diff --git a/.github/workflows/run-all-tests.yml b/.github/workflows/run-all-tests.yml index 6ed310b2..e701d268 100644 --- a/.github/workflows/run-all-tests.yml +++ b/.github/workflows/run-all-tests.yml @@ -1,7 +1,7 @@ # This workflow will do a clean installation of node dependencies, cache/restore them, build the source code and run tests across different versions of node # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-nodejs -name: 200+ Unit Tests +name: 250+ Unit Tests on: push: diff --git a/README.md b/README.md index 2f899bea..bf982149 100644 --- a/README.md +++ b/README.md @@ -7,9 +7,8 @@ ## The JS / React Library For Building Conversational AI Interfaces ✨💬 NLUX _(for Natural Language User Experience)_ is an open-source Javascript library that makes it super simple to -integrate -powerful large language models (LLMs) like ChatGPT into your web app or website. With just a few lines of code, you -can add conversational AI capabilities and interact with your favourite LLM. +integrate powerful large language models (LLMs) like ChatGPT into your web app or website. With just a few lines +of code, you can add conversational AI capabilities and interact with your favourite LLM. ## Key Features 🌟 @@ -88,6 +87,8 @@ cross platforms, with a focus on performance and usability. ## Community & Support 🙏 * **Star The Repo** 🌟 ― If you like NLUX, please star the repo to show your support. + + * **[GitHub Discussions](https://github.com/nluxai/nlux/discussions)** ― Ask questions, report issues, and share your ideas with the community. * **[Discord Community](https://discord.gg/VY4TDaf4)** ― Join our Discord server to chat with the community and get diff --git a/packages/js/core/src/components/chat/chat-room/actions/cancelPerformOnceOnScroll.ts b/packages/js/core/src/components/chat/chat-room/actions/cancelPerformOnceOnScroll.ts deleted file mode 100644 index 2ecbe683..00000000 --- a/packages/js/core/src/components/chat/chat-room/actions/cancelPerformOnceOnScroll.ts +++ /dev/null @@ -1,7 +0,0 @@ -export const cancelPerformOnceOnScrollFactory = ( - scrollUpOnceListeners: Set<() => void>, -) => { - return (callback: () => void) => { - scrollUpOnceListeners.delete(callback); - }; -}; diff --git a/packages/js/core/src/components/chat/chat-room/actions/performOnceOnScroll.ts b/packages/js/core/src/components/chat/chat-room/actions/performOnceOnScroll.ts deleted file mode 100644 index 0445c68a..00000000 --- a/packages/js/core/src/components/chat/chat-room/actions/performOnceOnScroll.ts +++ /dev/null @@ -1,15 +0,0 @@ -export const performOnceOnScrollFactory = ( - scrollUpOnceListeners: Set<() => void>, -) => { - return (direction: 'up', callback: () => void) => { - if (direction !== 'up') { - throw new Error(`Unsupported scroll direction by listener: ${direction}`); - } - - scrollUpOnceListeners.add(callback); - - return () => { - scrollUpOnceListeners.delete(callback); - }; - }; -}; \ No newline at end of file diff --git a/packages/js/core/src/components/chat/chat-room/actions/submitPrompt.ts b/packages/js/core/src/components/chat/chat-room/actions/submitPrompt.ts index 606d5911..0850540a 100644 --- a/packages/js/core/src/components/chat/chat-room/actions/submitPrompt.ts +++ b/packages/js/core/src/components/chat/chat-room/actions/submitPrompt.ts @@ -1,6 +1,6 @@ import {Observable} from '../../../../core/bus/observable'; import {ExceptionId} from '../../../../exceptions/exceptions'; -import {DataTransferMode} from '../../../../types/adapter'; +import {AdapterExtras, DataTransferMode} from '../../../../types/adapter'; import {NluxContext} from '../../../../types/context'; import {warn} from '../../../../x/warn'; import {CompConversation} from '../../conversation/conversation.model'; @@ -70,7 +70,14 @@ export const submitPromptFactory = ({ // Set the default data transfer mode based on the adapter's capabilities const defaultDataTransferMode = supportedDataTransferModes.length === 1 ? supportedDataTransferModes[0] : 'stream'; + const dataTransferModeToUse = dataTransferMode ?? defaultDataTransferMode; + const extras: AdapterExtras = { + aiChatProps: context.aiChatProps, + conversationHistory: conversation.getConversationContentForAdapter( + context.aiChatProps?.conversationOptions?.historyPayloadSize, + ), + }; if (dataTransferModeToUse === 'stream') { if (!context.adapter.streamText) { @@ -78,7 +85,7 @@ export const submitPromptFactory = ({ } observable = new Observable(); - context.adapter.streamText(messageToSend, observable); + context.adapter.streamText(messageToSend, observable, extras); messageContentType = 'stream'; } else { if (!context.adapter.fetchText) { @@ -86,7 +93,7 @@ export const submitPromptFactory = ({ } observable = undefined; - sentResponse = context.adapter.fetchText(messageToSend); + sentResponse = context.adapter.fetchText(messageToSend, extras); messageContentType = 'promise'; } @@ -115,6 +122,11 @@ export const submitPromptFactory = ({ sentResponse.then((promiseContent) => { message.setContent(promiseContent); resetPromptBox(true); + + // Only add user message to conversation content (used for history, and not displayed) if the + // message was sent successfully and a response was received. + conversation.updateConversationContent({role: 'user', message: messageToSend}); + conversation.updateConversationContent({role: 'ai', message: promiseContent}); context.emit('messageReceived', promiseContent); }).catch((error) => { message.setErrored(); @@ -159,7 +171,12 @@ export const submitPromptFactory = ({ complete: () => { message.commitContent(); resetPromptBox(true); + if (message.content) { + // Only add user message to conversation content (used for history, and not displayed) + // if the message was sent successfully and a response was received. + conversation.updateConversationContent({role: 'user', message: messageToSend}); + conversation.updateConversationContent({role: 'ai', message: message.content}); context.emit('messageReceived', message.content); } }, diff --git a/packages/js/core/src/components/chat/chat-room/chat-room.model.ts b/packages/js/core/src/components/chat/chat-room/chat-room.model.ts index 6637be35..4ef052cb 100644 --- a/packages/js/core/src/components/chat/chat-room/chat-room.model.ts +++ b/packages/js/core/src/components/chat/chat-room/chat-room.model.ts @@ -36,7 +36,7 @@ export class CompChatRoom extends BaseComp< promptBox, botPersona, userPersona, - conversationHistory, + initialConversationContent, }: CompChatRoomProps) { super(context, { visible, @@ -58,7 +58,7 @@ export class CompChatRoom extends BaseComp< getStreamingAnimationSpeed(streamingAnimationSpeed), botPersona, userPersona, - conversationHistory, + initialConversationContent, ); this.addPromptBox(promptBox?.placeholder, promptBox?.autoFocus); @@ -137,7 +137,7 @@ export class CompChatRoom extends BaseComp< streamingAnimationSpeed: number, botPersona?: BotPersona, userPersona?: UserPersona, - conversationHistory?: readonly ConversationItem[], + initialConversationContent?: readonly ConversationItem[], ) { this.conversation = comp(CompConversation) .withContext(this.context) @@ -146,7 +146,7 @@ export class CompChatRoom extends BaseComp< streamingAnimationSpeed, botPersona, userPersona, - messages: conversationHistory, + messages: initialConversationContent, }) .create(); diff --git a/packages/js/core/src/components/chat/chat-room/chat-room.types.ts b/packages/js/core/src/components/chat/chat-room/chat-room.types.ts index 3932dccf..a51dbc70 100644 --- a/packages/js/core/src/components/chat/chat-room/chat-room.types.ts +++ b/packages/js/core/src/components/chat/chat-room/chat-room.types.ts @@ -9,7 +9,7 @@ export type CompChatRoomProps = { visible?: boolean; botPersona?: BotPersona, userPersona?: UserPersona, - conversationHistory?: readonly ConversationItem[]; + initialConversationContent?: readonly ConversationItem[]; scrollWhenGenerating?: boolean; streamingAnimationSpeed?: number | null; containerMaxHeight?: number | string; diff --git a/packages/js/core/src/components/chat/conversation/conversation.model.ts b/packages/js/core/src/components/chat/conversation/conversation.model.ts index 6385510c..b8c6d796 100644 --- a/packages/js/core/src/components/chat/conversation/conversation.model.ts +++ b/packages/js/core/src/components/chat/conversation/conversation.model.ts @@ -2,7 +2,10 @@ import {BotPersona, UserPersona} from '@nlux/core'; import {BaseComp} from '../../../core/comp/base'; import {comp} from '../../../core/comp/comp'; import {CompEventListener, Model} from '../../../core/comp/decorators'; +import {HistoryPayloadSize} from '../../../core/options/conversationOptions'; import {NluxContext} from '../../../types/context'; +import {ConversationItem} from '../../../types/conversation'; +import {warnOnce} from '../../../x/warn'; import {CompList} from '../../miscellaneous/list/model'; import {messageInList, textMessage} from '../chat-room/utils/textMessage'; import {CompMessage} from '../message/message.model'; @@ -21,6 +24,7 @@ import {updateConversation} from './conversation.update'; export class CompConversation extends BaseComp< CompConversationProps, CompConversationElements, CompConversationEvents, CompConversationActions > { + private conversationContent: ConversationItem[] = []; private lastMessageId?: string; private lastMessageResizedListener?: Function; private messagesContainerRendered: boolean = false; @@ -32,6 +36,7 @@ export class CompConversation extends BaseComp< super(context, props); this.addConversation(); this.scrollWhenGeneratingUserOption = props.scrollWhenGenerating ?? true; + this.conversationContent = props.messages?.map((message) => ({...message})) ?? []; } public addMessage( @@ -107,6 +112,30 @@ export class CompConversation extends BaseComp< return message.id; } + public getConversationContentForAdapter( + historyPayloadSize: HistoryPayloadSize = 'max', + ): Readonly | undefined { + if (typeof historyPayloadSize === 'number' && historyPayloadSize <= 0) { + warnOnce( + `Invalid value provided for 'historyPayloadSize' : "${historyPayloadSize}"! ` + + `Value must be a positive integer or 'all'.`, + ); + + return undefined; + } + + if (historyPayloadSize === 'none') { + return undefined; + } + + if (historyPayloadSize === 'max') { + // We should return a new reference + return [...this.conversationContent]; + } + + return this.conversationContent.slice(-historyPayloadSize); + } + public getMessageById(messageId: string): CompMessage | undefined { return this.messagesList?.getComponentById(messageId); } @@ -150,6 +179,10 @@ export class CompConversation extends BaseComp< this.scrollWhenGeneratingUserOption = autoScrollToStreamingMessage; } + public updateConversationContent(newItem: ConversationItem) { + this.conversationContent.push(newItem); + } + private addConversation() { this.messagesList = comp(CompList).withContext(this.context).create(); this.addSubComponent(this.messagesList.id, this.messagesList, 'messagesContainer'); diff --git a/packages/js/core/src/core/aiChat.ts b/packages/js/core/src/core/aiChat.ts index decedf4a..6f7b3b3b 100644 --- a/packages/js/core/src/core/aiChat.ts +++ b/packages/js/core/src/core/aiChat.ts @@ -3,7 +3,7 @@ import {Adapter} from '../types/adapter'; import {AdapterBuilder} from '../types/adapterBuilder'; import {ConversationItem} from '../types/conversation'; import {EventCallback, EventName, EventsMap} from '../types/event'; -import {NluxProps} from '../types/props'; +import {AiChatProps} from '../types/props'; import {StandardAdapter} from '../types/standardAdapter'; import {debug} from '../x/debug'; import {NluxController} from './controller/controller'; @@ -20,8 +20,8 @@ export class AiChat implements IAiChat { protected theAdapterBuilder: StandardAdapter | null = null; protected theAdapterType: 'builder' | 'instance' | null = null; protected theClassName: string | null = null; - protected theConversationHistory: ConversationItem[] | null = null; protected theConversationOptions: ConversationOptions | null = null; + protected theInitialConversation: ConversationItem[] | null = null; protected theLayoutOptions: LayoutOptions | null = null; protected thePersonasOptions: PersonaOptions | null = null; protected thePromptBoxOptions: PromptBoxOptions | null = null; @@ -77,7 +77,7 @@ export class AiChat implements IAiChat { themeId: this.theThemeId ?? undefined, adapter: adapterToUser, className: this.theClassName ?? undefined, - conversationHistory: this.theConversationHistory ?? undefined, + initialConversation: this.theInitialConversation ?? undefined, syntaxHighlighter: this.theSyntaxHighlighter ?? undefined, layoutOptions: this.theLayoutOptions ?? {}, conversationOptions: this.theConversationOptions ?? {}, @@ -171,7 +171,7 @@ export class AiChat implements IAiChat { this.unregisteredEventListeners.clear(); } - public updateProps(props: Partial) { + public updateProps(props: Partial) { if (!this.controller) { throw new NluxRenderingError({ source: this.constructor.name, @@ -249,41 +249,41 @@ export class AiChat implements IAiChat { return this; } - public withConversationHistory(conversationHistory: ConversationItem[]) { + public withConversationOptions(conversationOptions: ConversationOptions) { if (this.mounted) { throw new NluxUsageError({ source: this.constructor.name, - message: 'Unable to set conversation history. NLUX is already mounted.', + message: 'Unable to set conversation options. NLUX is already mounted.', }); } - if (this.theConversationHistory) { + if (this.theConversationOptions) { throw new NluxUsageError({ source: this.constructor.name, - message: 'Unable to change config. Conversation history was already set.', + message: 'Unable to change config. Conversation options were already set.', }); } - this.theConversationHistory = conversationHistory; + this.theConversationOptions = conversationOptions; return this; } - public withConversationOptions(conversationOptions: ConversationOptions) { + public withInitialConversation(initialConversation: ConversationItem[]) { if (this.mounted) { throw new NluxUsageError({ source: this.constructor.name, - message: 'Unable to set conversation options. NLUX is already mounted.', + message: 'Unable to set conversation history. NLUX is already mounted.', }); } - if (this.theConversationOptions) { + if (this.theInitialConversation) { throw new NluxUsageError({ source: this.constructor.name, - message: 'Unable to change config. Conversation options were already set.', + message: 'Unable to change config. Conversation history was already set.', }); } - this.theConversationOptions = conversationOptions; + this.theInitialConversation = initialConversation; return this; } diff --git a/packages/js/core/src/core/context.ts b/packages/js/core/src/core/context.ts index 68934bd9..63068cff 100644 --- a/packages/js/core/src/core/context.ts +++ b/packages/js/core/src/core/context.ts @@ -1,8 +1,10 @@ import {ContextProps, NluxContext} from '../types/context'; import {EventName, EventsMap} from '../types/event'; +import {AiChatProps} from '../types/props'; export const createContext = ( props: ContextProps, + getAiChatProps: () => Readonly, emitEvent: ( event: EventToEmit, ...params: Parameters @@ -19,6 +21,9 @@ export const createContext = ( ) => { emitEvent(eventName, ...params); }, + get aiChatProps() { + return getAiChatProps(); + }, }; return context; diff --git a/packages/js/core/src/core/controller/controller.ts b/packages/js/core/src/core/controller/controller.ts index 2074a860..a5d7fd85 100644 --- a/packages/js/core/src/core/controller/controller.ts +++ b/packages/js/core/src/core/controller/controller.ts @@ -1,7 +1,7 @@ import {ExceptionId, NluxExceptions} from '../../exceptions/exceptions'; import {NluxContext} from '../../types/context'; import {EventCallback, EventName} from '../../types/event'; -import {NluxProps} from '../../types/props'; +import {AiChatInternalProps, AiChatProps} from '../../types/props'; import {uid} from '../../x/uid'; import {warn} from '../../x/warn'; import {createContext} from '../context'; @@ -12,7 +12,7 @@ export class NluxController { private readonly eventManager = new EventManager(); private readonly nluxInstanceId = uid(); - private readonly props: NluxProps; + private props: AiChatInternalProps; private renderException = (exceptionId: string) => { if (!this.mounted || !this.renderer) { @@ -34,7 +34,7 @@ export class NluxController { constructor( rootElement: HTMLElement, - props: NluxProps, + props: AiChatInternalProps, ) { this.rootCompId = 'chat-room'; this.rootElement = rootElement; @@ -59,11 +59,34 @@ export class NluxController { } const newContext: NluxContext = createContext({ - instanceId: this.nluxInstanceId, - exception: this.renderException, - adapter: this.props.adapter, - syntaxHighlighter: this.props.syntaxHighlighter, - }, this.eventManager.emit); + instanceId: this.nluxInstanceId, + exception: this.renderException, + adapter: this.props.adapter, + syntaxHighlighter: this.props.syntaxHighlighter, + }, + () => { + return { + ...this.props, + conversationOptions: this.props.conversationOptions && Object.keys( + this.props.conversationOptions).length > 0 + ? this.props.conversationOptions + : undefined, + promptBoxOptions: this.props.promptBoxOptions && Object.keys( + this.props.promptBoxOptions).length > 0 + ? this.props.promptBoxOptions + : undefined, + layoutOptions: this.props.layoutOptions && Object.keys( + this.props.layoutOptions).length > 0 + ? this.props.layoutOptions + : undefined, + personaOptions: this.props.personaOptions && Object.keys( + this.props.personaOptions).length > 0 + ? this.props.personaOptions + : undefined, + }; + }, + this.eventManager.emit, + ); this.renderer = new NluxRenderer( newContext, @@ -108,8 +131,13 @@ export class NluxController { this.renderer = null; } - public updateProps(props: Partial) { + public updateProps(props: Partial) { this.renderer?.updateProps(props); + this.props = { + ...this.props, + ...props, + }; + if (props.events) { this.props.events = props.events; this.eventManager.updateEventListeners(props.events); diff --git a/packages/js/core/src/core/interface.ts b/packages/js/core/src/core/interface.ts index 9a7a4c32..e87ff74d 100644 --- a/packages/js/core/src/core/interface.ts +++ b/packages/js/core/src/core/interface.ts @@ -1,7 +1,7 @@ import {AdapterBuilder} from '../types/adapterBuilder'; import {ConversationItem} from '../types/conversation'; import {EventCallback, EventName, EventsMap} from '../types/event'; -import {NluxProps} from '../types/props'; +import {AiChatProps} from '../types/props'; import {HighlighterExtension} from './highlighter/highlighter'; import {ConversationOptions} from './options/conversationOptions'; import {LayoutOptions} from './options/layoutOptions'; @@ -19,7 +19,7 @@ export interface IAiChat { show(): void; unmount(): void; - updateProps(props: Partial): void; + updateProps(props: Partial): void; withAdapter( adapterBuilder: AdapterBuilder, @@ -27,12 +27,12 @@ export interface IAiChat { withClassName( className: string, ): IAiChat; - withConversationHistory( - conversationHistory: ConversationItem[], - ): IAiChat; withConversationOptions( conversationOptions: ConversationOptions, ): IAiChat; + withInitialConversation( + initialConversation: ConversationItem[], + ): IAiChat; withLayoutOptions( layoutOptions: LayoutOptions, ): IAiChat; diff --git a/packages/js/core/src/core/options/conversationOptions.ts b/packages/js/core/src/core/options/conversationOptions.ts index 80c33c75..067926a1 100644 --- a/packages/js/core/src/core/options/conversationOptions.ts +++ b/packages/js/core/src/core/options/conversationOptions.ts @@ -1,6 +1,22 @@ +export type HistoryPayloadSize = number | 'none' | 'max'; + export interface ConversationOptions { + /** + * Indicates the number of messages from conversation history that should be sent to the backend with each message. + * For custom adapters, the history will be available as part of `extras.conversationHistory` attribute. + * For standard adapters, the history will be automatically handled by the adapter. + * + * By default, the entire conversation history is sent with each message. + * Set to `null` to disable sending conversation history with each message. + * Or set to a positive integer to send a specific number of messages. + * + * @default 'max' + */ + historyPayloadSize?: HistoryPayloadSize; + /** * Indicates whether the conversation should be scrolled to the bottom when a new message is added. + * * @default true */ scrollWhenGenerating?: boolean; @@ -9,6 +25,7 @@ export interface ConversationOptions { * The interval in milliseconds at which new characters are added to the conversation when a message * is being generated and rendering in the UI. * Set to `null` to disable the streaming animation. + * * @default 10 */ streamingAnimationSpeed?: number | null; diff --git a/packages/js/core/src/core/renderer/renderer.ts b/packages/js/core/src/core/renderer/renderer.ts index 0617dd40..e17c9e83 100644 --- a/packages/js/core/src/core/renderer/renderer.ts +++ b/packages/js/core/src/core/renderer/renderer.ts @@ -5,7 +5,7 @@ import {CompExceptionsBoxProps} from '../../components/miscellaneous/exceptions- import {NluxContext} from '../../types/context'; import {ConversationItem} from '../../types/conversation'; import {ExceptionType} from '../../types/exception'; -import {NluxProps} from '../../types/props'; +import {AiChatInternalProps, AiChatProps} from '../../types/props'; import {warn} from '../../x/warn'; import {comp} from '../comp/comp'; import {CompRegistry} from '../comp/registry'; @@ -29,8 +29,8 @@ export class NluxRenderer { private rootElement: HTMLElement | null = null; private rootElementInitialClassName: string | null; private theClassName: string | null = null; - private theConversationHistory: Readonly | null = null; private theConversationOptions: Readonly = {}; + private theInitialConversationContent: Readonly | null = null; private theLayoutOptions: Readonly = {}; private thePersonasOptions: Readonly = {}; private thePromptBoxOptions: Readonly = {}; @@ -40,7 +40,7 @@ export class NluxRenderer { context: NluxContext, rootCompId: string, rootElement: HTMLElement, - props: NluxProps | null = null, + props: AiChatInternalProps | null = null, ) { if (!rootCompId) { throw new NluxRenderingError({ @@ -61,7 +61,7 @@ export class NluxRenderer { this.theLayoutOptions = props?.layoutOptions ?? {}; this.theConversationOptions = props?.conversationOptions ?? {}; - this.theConversationHistory = props?.conversationHistory ?? null; + this.theInitialConversationContent = props?.initialConversation ?? null; this.thePromptBoxOptions = props?.promptBoxOptions ?? {}; this.thePersonasOptions = props?.personaOptions ?? {}; } @@ -146,7 +146,7 @@ export class NluxRenderer { visible: true, botPersona: this.thePersonasOptions?.bot ?? undefined, userPersona: this.thePersonasOptions?.user ?? undefined, - conversationHistory: this.theConversationHistory ?? undefined, + initialConversationContent: this.theInitialConversationContent ?? undefined, scrollWhenGenerating: this.theConversationOptions?.scrollWhenGenerating, streamingAnimationSpeed: this.theConversationOptions?.streamingAnimationSpeed, containerMaxHeight: this.theLayoutOptions?.maxHeight || undefined, @@ -260,7 +260,7 @@ export class NluxRenderer { this.isMounted = false; } - public updateProps(props: Partial) { + public updateProps(props: Partial) { if (props.hasOwnProperty('className')) { const newClassName = props.className || undefined; if (newClassName) { diff --git a/packages/js/core/src/index.ts b/packages/js/core/src/index.ts index 15de1f6c..5865ed98 100644 --- a/packages/js/core/src/index.ts +++ b/packages/js/core/src/index.ts @@ -24,7 +24,7 @@ export type { export type {IObserver} from './core/bus/observer'; export type {ExposedConfig} from './core/config'; -export type {NluxProps} from './types/props'; +export type {AiChatInternalProps} from './types/props'; export type { EventName, @@ -52,8 +52,13 @@ export type { OutputFormat, } from './types/standardAdapterConfig'; +export type { + AiChatProps, +} from './types/props'; + export type { Adapter, + AdapterExtras, StreamingAdapterObserver, DataTransferMode, } from './types/adapter'; diff --git a/packages/js/core/src/types/adapter.ts b/packages/js/core/src/types/adapter.ts index 3609fba6..e3d7ea39 100644 --- a/packages/js/core/src/types/adapter.ts +++ b/packages/js/core/src/types/adapter.ts @@ -1,3 +1,6 @@ +import {ConversationItem} from './conversation'; +import {AiChatProps} from './props'; + /** * This type is used to indicate the mode in which the adapter should request data from the API. */ @@ -14,9 +17,10 @@ export interface Adapter { * Either this method or `streamText` (or both) should be implemented by any adapter. * * @param `string` message + * @param `AdapterExtras` extras * @returns Promise */ - fetchText?: (message: string) => Promise; + fetchText?: (message: string, extras: AdapterExtras) => Promise; /** * This method should be implemented by any adapter to be used with NLUX. @@ -24,15 +28,16 @@ export interface Adapter { * * @param {string} message * @param {StreamingAdapterObserver} observer + * @param {AdapterExtras} extras */ - streamText?: (message: string, observer: StreamingAdapterObserver) => void; + streamText?: (message: string, observer: StreamingAdapterObserver, extras: AdapterExtras) => void; } /** * This interface is used to capture the stream of data being generated by the API and send it to the AiChat * user interface as it's being generated. */ -export interface StreamingAdapterObserver { +export interface StreamingAdapterObserver { /** * This method should be called by the adapter when it has completed sending data to the AiChat user interface. * This will result in the AiChat component removing the loading indicator and resetting the conversation @@ -54,7 +59,23 @@ export interface StreamingAdapterObserver { /** * This method should be called by the adapter when it has new data to send to the AiChat user interface. - * @param {DataType} message + * @param {string} message + */ + next(message: string): void; +} + +/** + * Additional data sent to the adapter when a message is sent. + */ +export type AdapterExtras = { + /** + * This attribute contains the properties used with the AiChat component. + */ + aiChatProps: AiChatProps; + + /** + * This attribute contains the conversation history. + * It's only included if the `conversationOptions.historyPayloadSize` is set to a positive number or 'all'. */ - next(message: DataType): void; + conversationHistory?: Readonly; } diff --git a/packages/js/core/src/types/context.ts b/packages/js/core/src/types/context.ts index 85f9c615..0a1a8734 100644 --- a/packages/js/core/src/types/context.ts +++ b/packages/js/core/src/types/context.ts @@ -2,6 +2,7 @@ import {HighlighterExtension} from '../core/highlighter/highlighter'; import {ExceptionId} from '../exceptions/exceptions'; import {Adapter} from './adapter'; import {EventName, EventsMap} from './event'; +import {AiChatProps} from './props'; import {StandardAdapter} from './standardAdapter'; export type ContextProps = Readonly<{ @@ -14,4 +15,5 @@ export type ContextProps = Readonly<{ export type NluxContext = ContextProps & { update: (props: Partial) => void; emit: (eventName: EventToEmit, ...params: Parameters) => void; + get aiChatProps(): Readonly; }; diff --git a/packages/js/core/src/types/props.ts b/packages/js/core/src/types/props.ts index 6efd1412..13111e1b 100644 --- a/packages/js/core/src/types/props.ts +++ b/packages/js/core/src/types/props.ts @@ -8,15 +8,32 @@ import {ConversationItem} from './conversation'; import {EventsMap} from './event'; import {StandardAdapter} from './standardAdapter'; -export type NluxProps = { +export type AiChatInternalProps = { adapter: Adapter | StandardAdapter; events?: Partial; themeId?: string; className?: string; - conversationHistory?: ConversationItem[]; - promptBoxOptions: Partial; - conversationOptions: Partial; - personaOptions: Partial; - layoutOptions: Partial; + initialConversation?: ConversationItem[]; + promptBoxOptions: PromptBoxOptions; + conversationOptions: ConversationOptions; + personaOptions: PersonaOptions; + layoutOptions: LayoutOptions; syntaxHighlighter?: HighlighterExtension; }; + +/** + * These are the props that are exposed to the user of the AiChat component. + * They can be updated using the `updateProps` method, and they are provided to certain adapter methods + * as part of the `AdapterExtras` attribute. + */ +export type AiChatProps = Readonly<{ + adapter: Adapter | StandardAdapter; + events?: Partial; + themeId?: string; + className?: string; + promptBoxOptions?: Readonly; + conversationOptions?: Readonly; + personaOptions?: Readonly + layoutOptions?: Readonly; + syntaxHighlighter?: HighlighterExtension; +}>; diff --git a/packages/js/core/src/types/standardAdapter.ts b/packages/js/core/src/types/standardAdapter.ts index 0a6a66f1..99cba0a2 100644 --- a/packages/js/core/src/types/standardAdapter.ts +++ b/packages/js/core/src/types/standardAdapter.ts @@ -1,4 +1,4 @@ -import {Adapter, DataTransferMode, StreamingAdapterObserver} from './adapter'; +import {Adapter, AdapterExtras, DataTransferMode, StreamingAdapterObserver} from './adapter'; import {StandardAdapterConfig, StandardAdapterInfo} from './standardAdapterConfig'; export type StandardAdapterStatus = 'disconnected' @@ -20,11 +20,11 @@ export interface StandardAdapter extends Adapte get dataTransferMode(): DataTransferMode; decode(payload: InboundPayload): Promise; encode(message: string): Promise; - fetchText(message: string): Promise; + fetchText(message: string, extras: AdapterExtras): Promise; get id(): string; get info(): StandardAdapterInfo; get status(): StandardAdapterStatus; - streamText(message: string, observer: StreamingAdapterObserver): void; + streamText(message: string, observer: StreamingAdapterObserver, extras: AdapterExtras): void; } export const isStandardAdapter = (adapter: Adapter): boolean => { diff --git a/packages/js/core/src/x/debug.ts b/packages/js/core/src/x/debug.ts index 4d8936f5..b0633177 100644 --- a/packages/js/core/src/x/debug.ts +++ b/packages/js/core/src/x/debug.ts @@ -6,9 +6,15 @@ export const debug = (...messages: any[]) => { for (const message of messages) { if (typeof message === 'string') { console.log(`[nlux] ${message}`); - } else { - console.log('[nlux] Debug:'); - console.log(JSON.stringify(message, null, 2)); + continue; } + + if (message && typeof message.toString === 'function') { + console.log(`[nlux] ${message.toString()}`); + continue; + } + + console.log('[nlux] Debug:'); + console.log(JSON.stringify(message, null, 2)); } }; diff --git a/packages/js/core/src/x/warn.ts b/packages/js/core/src/x/warn.ts index dc920d82..1b81323e 100644 --- a/packages/js/core/src/x/warn.ts +++ b/packages/js/core/src/x/warn.ts @@ -1,14 +1,16 @@ export const warn = (message: any) => { - const prefix = (process.env.NLUX_DEBUG_ENABLED === 'true') - ? '[nlux]: ' - : ''; - if (typeof message === 'string') { - console.warn(`${prefix}${message}`); - } else { - prefix && console.warn(prefix); - console.log(JSON.stringify(message, null, 2)); + console.warn(`[nlux] ${message}`); + return; + } + + if (message && typeof message.toString === 'function') { + console.warn(`[nlux] ${message.toString()}`); + return; } + + console.warn('[nlux]'); + console.log(JSON.stringify(message, null, 2)); }; const warnedMessages: string[] = []; diff --git a/packages/js/langchain/src/langserve/adapter/adapter.ts b/packages/js/langchain/src/langserve/adapter/adapter.ts index 4863fd3a..53b35fb7 100644 --- a/packages/js/langchain/src/langserve/adapter/adapter.ts +++ b/packages/js/langchain/src/langserve/adapter/adapter.ts @@ -1,4 +1,6 @@ import { + AdapterExtras, + ConversationItem, DataTransferMode, StandardAdapter, StandardAdapterConfig, @@ -136,7 +138,7 @@ export abstract class LangServeAbstractAdapter implements StandardAdapter; + abstract fetchText(message: string, extras: AdapterExtras): Promise; init() { if (!this.inputPreProcessor && this.useInputSchema) { @@ -146,7 +148,7 @@ export abstract class LangServeAbstractAdapter implements StandardAdapter { - const body = this.getRequestBody(message); + async fetchText(message: string, extras: AdapterExtras): Promise { + const body = this.getRequestBody(message, extras.conversationHistory); const response = await fetch(this.endpointUrl, { method: 'POST', body, @@ -29,7 +29,7 @@ export class LangServeFetchAdapter extends LangServeAbstractAdapter { return this.getDisplayableMessageFromAiOutput(output) ?? ''; } - streamText(message: string, observer: StreamingAdapterObserver): void { + streamText(message: string, observer: StreamingAdapterObserver, extras: AdapterExtras): void { throw new NluxUsageError({ source: this.constructor.name, message: 'Cannot stream text from the fetch adapter!', diff --git a/packages/js/langchain/src/langserve/adapter/stream.ts b/packages/js/langchain/src/langserve/adapter/stream.ts index 4c2f045f..7b2d068c 100644 --- a/packages/js/langchain/src/langserve/adapter/stream.ts +++ b/packages/js/langchain/src/langserve/adapter/stream.ts @@ -1,4 +1,4 @@ -import {NluxError, NluxUsageError, StreamingAdapterObserver, warn} from '@nlux/core'; +import {AdapterExtras, NluxError, NluxUsageError, StreamingAdapterObserver, warn} from '@nlux/core'; import {parseChunk} from '../parser/parseChunk'; import {adapterErrorToExceptionId} from '../utils/adapterErrorToExceptionId'; import {LangServeAbstractAdapter} from './adapter'; @@ -8,15 +8,15 @@ export class LangServeStreamAdapter extends LangServeAbstractAdapter { super(options); } - async fetchText(message: string): Promise { + async fetchText(message: string, extras: AdapterExtras): Promise { throw new NluxUsageError({ source: this.constructor.name, message: 'Cannot fetch text using the stream adapter!', }); } - streamText(message: string, observer: StreamingAdapterObserver): void { - const body = this.getRequestBody(message); + streamText(message: string, observer: StreamingAdapterObserver, extras: AdapterExtras): void { + const body = this.getRequestBody(message, extras.conversationHistory); fetch(this.endpointUrl, { method: 'POST', headers: {'Content-Type': 'application/json'}, diff --git a/packages/js/langchain/src/langserve/types/inputPreProcessor.ts b/packages/js/langchain/src/langserve/types/inputPreProcessor.ts index 6b1f0182..c16b04bd 100644 --- a/packages/js/langchain/src/langserve/types/inputPreProcessor.ts +++ b/packages/js/langchain/src/langserve/types/inputPreProcessor.ts @@ -1,3 +1,5 @@ +import {ConversationItem} from '@nlux/core'; + /** * A function that can be used to pre-process the input before sending it to the runnable. * Whatever this function returns will be sent to the runnable under the "input" property. @@ -8,7 +10,7 @@ * * For the following input processor: * ``` - * (message: string) => ({ message, year: 1999 }) + * (message) => ({ message, year: 1999 }) * ``` * The following input will be sent to the runnable when the user * types "Hello world": @@ -23,4 +25,5 @@ */ export type LangServeInputPreProcessor = ( input: string, + conversationHistory?: readonly ConversationItem[], ) => any; diff --git a/packages/js/langchain/src/langserve/utils/transformInputBasedOnSchema.ts b/packages/js/langchain/src/langserve/utils/transformInputBasedOnSchema.ts index 9617064e..fd6b3731 100644 --- a/packages/js/langchain/src/langserve/utils/transformInputBasedOnSchema.ts +++ b/packages/js/langchain/src/langserve/utils/transformInputBasedOnSchema.ts @@ -1,6 +1,14 @@ -import {warn} from '@nlux/core'; +import {ConversationItem, warn} from '@nlux/core'; + +export const transformInputBasedOnSchema = ( + message: string, + conversationHistory: readonly ConversationItem[] | undefined, + schema: any, + runnableName: string, +): any | undefined => { + // TODO - Attempt to include conversation history in the input + // if the schema allows it. -export const transformInputBasedOnSchema = (message: string, schema: any, runnableName: string): any | undefined => { if (!schema || typeof schema.properties !== 'object') { return message; } diff --git a/packages/js/openai/src/openai/gpt/adapters/adapter.ts b/packages/js/openai/src/openai/gpt/adapters/adapter.ts index 021fc392..86af558b 100644 --- a/packages/js/openai/src/openai/gpt/adapters/adapter.ts +++ b/packages/js/openai/src/openai/gpt/adapters/adapter.ts @@ -1,4 +1,5 @@ import { + AdapterExtras, DataTransferMode, StandardAdapter, StandardAdapterConfig, @@ -53,8 +54,8 @@ export abstract class OpenAiAbstractAdapter imp + 'The useUnsafeAdapter/createUnsafeAdapter are only intended for development and testing purposes.\n\n' + 'For production use, we recommend that you implement a server-side proxy and configure a customized ' + 'adapter for it. To learn more about how to create custom adapters for NLUX, visit:\n' - + 'https://nlux.dev/learn/adapters/custom-adapters' - ) + + 'https://nlux.dev/learn/adapters/custom-adapters', + ); } abstract get config(): StandardAdapterConfig; @@ -85,7 +86,7 @@ export abstract class OpenAiAbstractAdapter imp return encodeMessage(message); } - abstract fetchText(message: string): Promise; + abstract fetchText(message: string, extras: AdapterExtras): Promise; - abstract streamText(message: string, observer: StreamingAdapterObserver): void; + abstract streamText(message: string, observer: StreamingAdapterObserver, extras: AdapterExtras): void; } diff --git a/packages/js/openai/src/openai/gpt/adapters/fetch.ts b/packages/js/openai/src/openai/gpt/adapters/fetch.ts index 7462d598..5d3ef7f0 100644 --- a/packages/js/openai/src/openai/gpt/adapters/fetch.ts +++ b/packages/js/openai/src/openai/gpt/adapters/fetch.ts @@ -1,6 +1,7 @@ -import {NluxUsageError, StreamingAdapterObserver, warn} from '@nlux/core'; +import {AdapterExtras, NluxUsageError, StreamingAdapterObserver, warn} from '@nlux/core'; import OpenAI from 'openai'; -import {adapterErrorToExceptionId} from '../../../x/adapterErrorToExceptionId'; +import {adapterErrorToExceptionId} from '../../../utils/adapterErrorToExceptionId'; +import {conversationHistoryToMessagesList} from '../../../utils/conversationHistoryToMessagesList'; import {gptFetchAdapterConfig} from '../config'; import {OpenAiAdapterOptions} from '../types/adapterOptions'; import {OpenAiAbstractAdapter} from './adapter'; @@ -30,17 +31,24 @@ export class OpenAiFetchAdapter extends OpenAiAbstractAdapter< return gptFetchAdapterConfig; } - async fetchText(message: string): Promise { - const messagesToSend: { - role: 'system' | 'user', - content: string - }[] = this.systemMessage ? [ + async fetchText(message: string, extras: AdapterExtras): Promise { + const messagesToSend: Array< + OpenAI.Chat.Completions.ChatCompletionSystemMessageParam | + OpenAI.Chat.Completions.ChatCompletionUserMessageParam | + OpenAI.Chat.Completions.ChatCompletionAssistantMessageParam + > = this.systemMessage ? [ { role: 'system', content: this.systemMessage, }, ] : []; + if (extras.conversationHistory) { + messagesToSend.push( + ...conversationHistoryToMessagesList(extras.conversationHistory), + ); + } + messagesToSend.push({ role: 'user', content: message, @@ -71,7 +79,7 @@ export class OpenAiFetchAdapter extends OpenAiAbstractAdapter< } } - streamText(message: string, observer: StreamingAdapterObserver): void { + streamText(message: string, observer: StreamingAdapterObserver, extras: AdapterExtras): void { throw new NluxUsageError({ source: this.constructor.name, message: 'Cannot stream text from the fetch adapter!', diff --git a/packages/js/openai/src/openai/gpt/adapters/stream.ts b/packages/js/openai/src/openai/gpt/adapters/stream.ts index 261239ea..e037b922 100644 --- a/packages/js/openai/src/openai/gpt/adapters/stream.ts +++ b/packages/js/openai/src/openai/gpt/adapters/stream.ts @@ -1,6 +1,7 @@ -import {NluxUsageError, StreamingAdapterObserver, warn} from '@nlux/core'; +import {AdapterExtras, NluxUsageError, StreamingAdapterObserver, warn} from '@nlux/core'; import OpenAI from 'openai'; -import {adapterErrorToExceptionId} from '../../../x/adapterErrorToExceptionId'; +import {adapterErrorToExceptionId} from '../../../utils/adapterErrorToExceptionId'; +import {conversationHistoryToMessagesList} from '../../../utils/conversationHistoryToMessagesList'; import {gptStreamingAdapterConfig} from '../config'; import {OpenAiAdapterOptions} from '../types/adapterOptions'; import {OpenAiAbstractAdapter} from './adapter'; @@ -37,18 +38,24 @@ export class OpenAiStreamingAdapter extends OpenAiAbstractAdapter< }); } - streamText(message: string, observer: StreamingAdapterObserver): void { - // TODO - Only send system message once per conversation, when history is included - const messagesToSend: { - role: 'system' | 'user', - content: string - }[] = this.systemMessage ? [ + streamText(message: string, observer: StreamingAdapterObserver, extras: AdapterExtras): void { + const messagesToSend: Array< + OpenAI.Chat.Completions.ChatCompletionSystemMessageParam | + OpenAI.Chat.Completions.ChatCompletionUserMessageParam | + OpenAI.Chat.Completions.ChatCompletionAssistantMessageParam + > = this.systemMessage ? [ { role: 'system', content: this.systemMessage, }, ] : []; + if (extras.conversationHistory) { + messagesToSend.push( + ...conversationHistoryToMessagesList(extras.conversationHistory), + ); + } + messagesToSend.push({ role: 'user', content: message, diff --git a/packages/js/openai/src/x/adapterErrorToExceptionId.ts b/packages/js/openai/src/utils/adapterErrorToExceptionId.ts similarity index 100% rename from packages/js/openai/src/x/adapterErrorToExceptionId.ts rename to packages/js/openai/src/utils/adapterErrorToExceptionId.ts diff --git a/packages/js/openai/src/utils/conversationHistoryToMessagesList.ts b/packages/js/openai/src/utils/conversationHistoryToMessagesList.ts new file mode 100644 index 00000000..80b3b8a1 --- /dev/null +++ b/packages/js/openai/src/utils/conversationHistoryToMessagesList.ts @@ -0,0 +1,9 @@ +import {ConversationItem} from '@nlux/core'; +import {participantRoleToOpenAiRole} from './participantRoleToOpenAiRole'; + +export const conversationHistoryToMessagesList = ( + conversationHistory: readonly ConversationItem[], +) => conversationHistory.map((item) => ({ + role: participantRoleToOpenAiRole(item.role), + content: item.message, +})); diff --git a/packages/js/openai/src/utils/participantRoleToOpenAiRole.ts b/packages/js/openai/src/utils/participantRoleToOpenAiRole.ts new file mode 100644 index 00000000..9b4bfbb5 --- /dev/null +++ b/packages/js/openai/src/utils/participantRoleToOpenAiRole.ts @@ -0,0 +1,14 @@ +import {ParticipantRole} from '@nlux/core'; + +export const participantRoleToOpenAiRole = (participantRole: ParticipantRole): 'system' | 'user' | 'assistant' => { + switch (participantRole) { + case 'system': + return 'system'; + case 'user': + return 'user'; + case 'ai': + return 'assistant'; + default: + return 'user'; + } +}; diff --git a/packages/react/core/src/components/AiChat/handleNewPropsReceived.ts b/packages/react/core/src/components/AiChat/handleNewPropsReceived.ts index 6b12cbdf..9d0f7e61 100644 --- a/packages/react/core/src/components/AiChat/handleNewPropsReceived.ts +++ b/packages/react/core/src/components/AiChat/handleNewPropsReceived.ts @@ -1,13 +1,13 @@ -import {NluxProps, warn} from '@nlux/core'; +import {AiChatProps, warn} from '@nlux/core'; import {adapterParamToUsableAdapter} from '../../utils/adapterParamToUsableAdapter'; import {optionsUpdater} from '../../utils/optionsUpdater'; import {personaOptionsUpdater} from '../../utils/personasUpdater'; -import type {AiChatProps} from './props'; +import type {AiChatReactProps} from './props'; export const handleNewPropsReceived = async ( - currentProps: AiChatProps, - newProps: AiChatProps, -): Promise | undefined> => { + currentProps: AiChatReactProps, + newProps: AiChatReactProps, +): Promise | undefined> => { const eventListeners = optionsUpdater( currentProps.events, newProps.events, @@ -33,7 +33,11 @@ export const handleNewPropsReceived = async ( newProps.personaOptions, ); - const propsToUpdate: Partial = {}; + type MutableAiChatProps = { + -readonly [P in keyof AiChatProps]: AiChatProps[P]; + }; + + const propsToUpdate: Partial = {}; if (eventListeners !== undefined) { propsToUpdate.events = eventListeners ?? {}; diff --git a/packages/react/core/src/components/AiChat/index.tsx b/packages/react/core/src/components/AiChat/index.tsx index ad1742c7..35f01644 100644 --- a/packages/react/core/src/components/AiChat/index.tsx +++ b/packages/react/core/src/components/AiChat/index.tsx @@ -2,11 +2,11 @@ import {AiChat as AiChatType, createAiChat, warn} from '@nlux/core'; import React, {useEffect, useRef, useState} from 'react'; import {reactPersonasToCorePersonas} from '../../utils/reactPersonasToCorePersonas'; import {handleNewPropsReceived} from './handleNewPropsReceived'; -import {AiChatProps} from './props'; +import {AiChatReactProps} from './props'; -export const AiChat = (props: Readonly) => { +export const AiChat = (props: Readonly) => { const rootElement = useRef(null); - const [currentProps, setCurrentProps] = useState | null>(null); + const [currentProps, setCurrentProps] = useState | null>(null); const aiChat = useRef(null); useEffect(() => { @@ -25,7 +25,7 @@ export const AiChat = (props: Readonly) => { promptBoxOptions, personaOptions, events, - conversationHistory, + initialConversation, } = props; let newInstance = createAiChat().withAdapter(adapter); @@ -50,8 +50,8 @@ export const AiChat = (props: Readonly) => { newInstance = newInstance.withSyntaxHighlighter(syntaxHighlighter); } - if (conversationHistory) { - newInstance = newInstance.withConversationHistory(conversationHistory); + if (initialConversation) { + newInstance = newInstance.withInitialConversation(initialConversation); } if (events) { diff --git a/packages/react/core/src/components/AiChat/props.ts b/packages/react/core/src/components/AiChat/props.ts index e5f08cf3..8304cfdd 100644 --- a/packages/react/core/src/components/AiChat/props.ts +++ b/packages/react/core/src/components/AiChat/props.ts @@ -13,7 +13,7 @@ import {PersonaOptions} from './personaOptions'; /** * Properties for the AiChat React component. */ -export type AiChatProps = { +export type AiChatReactProps = { /** * The adapter or adapter builder to use for the conversation. * This can be obtained via useAdapter() hook for standard adapters or by creating your own custom adapter @@ -32,11 +32,11 @@ export type AiChatProps = { className?: string; /** - * The initial conversation history to display. + * The initial conversation to display when the AiChat component is mounted. * This property is not reactive, which means that its value is only read once when the * component is mounted and any subsequent changes to it will be ignored. */ - conversationHistory?: ConversationItem[]; + initialConversation?: ConversationItem[]; /** * The syntax highlighter to use for any source code generated by the LLM diff --git a/packages/react/core/src/index.tsx b/packages/react/core/src/index.tsx index 4e3b83ce..3c94433f 100644 --- a/packages/react/core/src/index.tsx +++ b/packages/react/core/src/index.tsx @@ -1,6 +1,7 @@ export type { Adapter, AdapterBuilder, + AdapterExtras, StreamingAdapterObserver, StandardAdapter, DataTransferMode, @@ -28,7 +29,7 @@ export type { } from './components/AiChat/personaOptions'; export type { - AiChatProps, + AiChatReactProps, } from './components/AiChat/props'; export { diff --git a/pipeline/npm/core/README.md b/pipeline/npm/core/README.md index 65fce725..4c4ccdd7 100644 --- a/pipeline/npm/core/README.md +++ b/pipeline/npm/core/README.md @@ -1,12 +1,13 @@ # [NLUX JS](https://nlux.ai) 🌲✨💬 -[![Free And Open Source](https://img.shields.io/badge/Free%20%26%20Open%20Source-%2348c342)](https://github.com/nluxai/nlux) [![Docs NLUX.ai](https://img.shields.io/badge/Docs_Website-NLUX.dev-%23fa896b)](https://nlux.dev) +![Free And Open Source](https://img.shields.io/badge/Free%20%26%20Open%20Source-1ccb61) +[![Docs NLUX.ai](https://img.shields.io/badge/Docs_Website-NLUX.dev-fa896b)](https://nlux.dev) ## The Conversational AI UI Library For Any LLM -NLUX _(for Natural Language User Experience)_ is an open-source Javascript library that makes it simple to integrate -powerful large language models (LLMs) like ChatGPT into your web app or website. With just a few lines of code, you -can add conversational AI capabilities and interact with your favourite LLM. +NLUX _(for Natural Language User Experience)_ is an open-source Javascript library that makes it super simple to +integrate powerful large language models (LLMs) like ChatGPT into your web app or website. With just a few lines +of code, you can add conversational AI capabilities and interact with your favourite LLM. ## Key Features 🌟 @@ -19,9 +20,16 @@ can add conversational AI capabilities and interact with your favourite LLM. * **Event Listeners** - Listen to messages, errors, and other events to customize the UI and behaviour. * **Zero Dependencies** ― Lightweight codebase, with zero-dep except for LLM front-end libraries. +[![200+ Unit Tests](https://github.com/nluxai/nlux/actions/workflows/run-all-tests.yml/badge.svg)](https://github.com/nluxai/nlux/actions/workflows/run-all-tests.yml) + ## Docs & Examples 📖 -For developer documentation, examples, and API reference ― you can visit: **[NLUX.ai](https://nlux.ai/)** +* Developer portal ― [NLUX.dev](https://nlux.dev/) +* Examples and live code playgrounds ― [NLUX.dev/examples](https://nlux.dev/examples) + + +* [Standard LLM adapters available](https://nlux.dev/learn/adapters) +* [How to create your own adapter for NLUX](https://nlux.dev/learn/adapters/custom-adapters/create-custom-adapter) ## Vanilla JS 🟨 vs React JS ⚛️ @@ -29,40 +37,39 @@ This package `@nlux/core` is the vanilla JS version of NLUX. If you're looking for the React JS version, please check the [`@nlux/react`](https://www.npmjs.com/package/@nlux/react) package. -## Get Started With NLUX and ChatGPT 🚀 +## Get Started With NLUX 🚀 -To get started with NLUX JS and ChatGPT, install the `@nlux/core` and `@nlux/openai` packages: +The example below demonstrates how to create an AI chat interface using NLUX JS and LangChain, the open source +framework for building LLM backends. But you can use NLUX **with any LLM** ― either +via the [standard adapters](https://nlux.dev/learn/adapters) provided, or +by creating [your own adapter](https://nlux.dev/learn/adapters/custom-adapters/create-custom-adapter). + +To get started with NLUX JS and LangChain, install the `@nlux/core` and `@nlux/langchain` packages: ```sh -npm install @nlux/core @nlux/openai +npm install @nlux/core @nlux/langchain ``` -Configure the OpenAI adapter with your API key:
-_(You can [get an API key](https://help.openai.com/en/articles/4936850-where-do-i-find-my-secret-api-key) from your -OpenAI dashboard)_ +Configure the LangChain LangServe adapter to connect to your API endpoint: ```js import {createAiChat} from '@nlux/core'; -import {createAdapter} from '@nlux/openai'; - -const chatGptAdapter = createAdapter() - .withApiKey('YOUR_OPEN_AI_API_KEY') - // 👇 Instruct ChatGPT how to behave (optional) - .withSystemMessage( - 'Give sound, tailored financial advice. Explain concepts simply. When unsure, ask questions. ' + - 'Only recommend legal, ethical practices. Be friendly. Write concise answers under 5 sentences.' - ); +import {createAdapter} from '@nlux/langchain'; + +const langChainAdapter = createAdapter().withUrl('https://'); ``` Then render the `AiChat` component into your web page: ```js const aiChat = createAiChat() - .withAdapter(chatGptAdapter) + .withAdapter(langChainAdapter) .withConversationOptions({ - scrollWhenGenerating: true, + historyPayloadSize: 'max' + }) + .withPromptBoxOptions({ + placeholder: 'How can I help you today?' }) - .withPromptBoxOptions({placeholder: 'How can I help you today?'}) aiChat.mount(document.getElementById('root')); ``` @@ -71,7 +78,7 @@ You should also [include the NLUX theme CSS file](#theme-file-and-css-) in your ## And The Result Is ✨ -An AI chatbot, experienced in personal finance, that can give your users sound, tailored financial advice: +An AI chatbot, powered by LangChain, that can understand and respond to user messages: [![NLUX AiChat Component](https://nlux.ai/images/demos/chat-convo-demo-fin-advisor.gif)](https://nlux.ai) @@ -83,7 +90,7 @@ from [`@nlux/themes`](https://www.npmjs.com/package/@nlux/themes) or use the CDN hosted version from below: ```jsx - + ``` This CDN is provided for demo purposes only and it's not scalable. diff --git a/pipeline/npm/openai-react/README.md b/pipeline/npm/openai-react/README.md index 677144f3..2e0e626b 100644 --- a/pipeline/npm/openai-react/README.md +++ b/pipeline/npm/openai-react/README.md @@ -6,9 +6,20 @@ This package enables the integration between NLUX and OpenAI's API. More specifically ― the package include the adapter to connect to text generation models exposed via [OpenAI Text Generation APIs](https://platform.openai.com/docs/guides/text-generation). +Please note: This adapter connects to the OpenAI API directly from the browser and requires an API key +to be used from a web page. It's not recommended to use it in production environments, and it should only +be used for development and testing purposes. + +If you would like to use OpenAI's API in a production environment, you should use the OpenAI API from a server +(such us a simple Node.js proxy server that you build) and then connect to it from your web page. You can use +NLUX with any API or LLM by **creating a custom adapter for it**. + For more information on how to use this package, please visit: [https://docs.nlux.ai/api/adapters/open-ai](https://docs.nlux.ai/api/adapters/open-ai) +For more information on how to create custom adapters for your own LLM or API, please visit: +[https://docs.nlux.ai/learn/adapters/custom-adapters/create-custom-adapter](https://docs.nlux.ai/learn/adapters/custom-adapters/create-custom-adapter) + ### Vanilla JS 🟨 vs React JS ⚛️ This package `@nlux/openai-react` is meant for use with the React JS version of NLUX. @@ -33,6 +44,8 @@ can add conversational AI capabilities and interact with your favourite LLM. * **Event Listeners** - Listen to messages, errors, and other events to customize the UI and behaviour. * **Zero Dependencies** ― Lightweight codebase, with zero-dep except for LLM front-end libraries. +[![200+ Unit Tests](https://github.com/nluxai/nlux/actions/workflows/run-all-tests.yml/badge.svg)](https://github.com/nluxai/nlux/actions/workflows/run-all-tests.yml) + ### Docs & Examples 📖 For developer documentation, examples, and API reference ― you can visit: **[NLUX.ai](https://nlux.ai/)** diff --git a/pipeline/npm/openai/README.md b/pipeline/npm/openai/README.md index 2ac96894..9fda1623 100644 --- a/pipeline/npm/openai/README.md +++ b/pipeline/npm/openai/README.md @@ -6,10 +6,21 @@ This package enables the integration between NLUX and OpenAI's API. More specifically ― the package include the adapter to connect to text generation models exposed via [OpenAI Text Generation APIs](https://platform.openai.com/docs/guides/text-generation). +Please note: This adapter connects to the OpenAI API directly from the browser and requires an API key +to be used from a web page. It's not recommended to use it in production environments, and it should only +be used for development and testing purposes. + +If you would like to use OpenAI's API in a production environment, you should use the OpenAI API from a server +(such us a simple Node.js proxy server that you build) and then connect to it from your web page. You can use +NLUX with any API or LLM by **creating a custom adapter for it**. + For more information on how to use this package, please visit: [https://docs.nlux.ai/api/adapters/open-ai](https://docs.nlux.ai/api/adapters/open-ai) -### Vanilla JS 🟨 vs React JS ⚛️ +For more information on how to create custom adapters for your own LLM or API, please visit: +[https://docs.nlux.ai/learn/adapters/custom-adapters/create-custom-adapter](https://docs.nlux.ai/learn/adapters/custom-adapters/create-custom-adapter) + +## Vanilla JS 🟨 vs React JS ⚛️ This package `@nlux/openai` is meant for use with the vanilla JS version of NLUX. If you're looking for the React JS version, please check @@ -33,6 +44,8 @@ can add conversational AI capabilities and interact with your favourite LLM. * **Event Listeners** - Listen to messages, errors, and other events to customize the UI and behaviour. * **Zero Dependencies** ― Lightweight codebase, with zero-dep except for LLM front-end libraries. +[![200+ Unit Tests](https://github.com/nluxai/nlux/actions/workflows/run-all-tests.yml/badge.svg)](https://github.com/nluxai/nlux/actions/workflows/run-all-tests.yml) + ### Docs & Examples 📖 For developer documentation, examples, and API reference ― you can visit: **[NLUX.ai](https://nlux.ai/)** diff --git a/pipeline/npm/package-template.json b/pipeline/npm/package-template.json index 4a49586f..b5a3f1b9 100644 --- a/pipeline/npm/package-template.json +++ b/pipeline/npm/package-template.json @@ -34,6 +34,8 @@ "intelligence", "natural", "language", + "langchain", + "langserve", "processing", "nlp", "nlu", diff --git a/pipeline/npm/react/README.md b/pipeline/npm/react/README.md index d35e4a72..04cba4c7 100644 --- a/pipeline/npm/react/README.md +++ b/pipeline/npm/react/README.md @@ -1,12 +1,14 @@ # [NLUX REACT](https://nlux.ai) 🌲✨💬 -[![Free And Open Source](https://img.shields.io/badge/Free%20%26%20Open%20Source-%2348c342)](https://github.com/nluxai/nlux) [![Docs NLUX.ai](https://img.shields.io/badge/Docs_Website-NLUX.dev-%23fa896b)](https://nlux.dev) +![Free And Open Source](https://img.shields.io/badge/Free%20%26%20Open%20Source-1ccb61) +[![Docs NLUX.ai](https://img.shields.io/badge/Docs_Website-NLUX.dev-fa896b)](https://nlux.dev) ## The Conversational AI UI Library For Any LLM -NLUX _(for Natural Language User Experience)_ is an open-source Javascript library that makes it simple to integrate -powerful large language models (LLMs) like ChatGPT into your web app or website. With just a few lines of code, you -can add conversational AI capabilities and interact with your favourite LLM. +NLUX _(for Natural Language User Experience)_ is an open-source React and Javascript library that makes it super simple +to +integrate powerful large language models (LLMs) like ChatGPT into your web app or website. With just a few lines +of code, you can add conversational AI capabilities and interact with your favourite LLM. ## Key Features 🌟 @@ -20,16 +22,28 @@ can add conversational AI capabilities and interact with your favourite LLM. * **Event Listeners** - Listen to messages, errors, and other events to customize the UI and behaviour. * **Zero Dependencies** ― Lightweight codebase, with zero-dep except for LLM front-end libraries. +[![200+ Unit Tests](https://github.com/nluxai/nlux/actions/workflows/run-all-tests.yml/badge.svg)](https://github.com/nluxai/nlux/actions/workflows/run-all-tests.yml) + ## Docs & Examples 📖 -For developer documentation, examples, and API reference ― you can visit: **[NLUX.ai](https://nlux.ai/)** +* Developer portal ― [NLUX.dev](https://nlux.dev/) +* Examples and live code playgrounds ― [NLUX.dev/examples](https://nlux.dev/examples) + + +* [Standard LLM adapters available](https://nlux.dev/learn/adapters) +* [How to create your own adapter for NLUX](https://nlux.dev/learn/adapters/custom-adapters/create-custom-adapter) -## Get Started With NLUX React and ChatGPT 🚀 +## Get Started With NLUX React 🚀 -Install and import dependencies: +The example below demonstrates how to create an AI chat interface using NLUX JS and LangChain, the open source +framework for building LLM backends. But you can use NLUX **with any LLM** ― either +via the [standard adapters](https://nlux.dev/learn/adapters) provided, or +by creating [your own adapter](https://nlux.dev/learn/adapters/custom-adapters/create-custom-adapter). + +To get started with NLUX JS and LangChain, install the `@nlux/react` and `@nlux/langchain-react` packages: ```sh -npm install @nlux/react @nlux/openai-react +npm install @nlux/react @nlux/langchain-react ``` Then include `` in your React app to get started.
@@ -37,15 +51,11 @@ Use the `useAdapter` hook to configure an adapter for your LLM. ```jsx import {AiChat} from '@nlux/react'; -import {useAdapter} from '@nlux/openai-react'; +import {useAdapter} from '@nlux/langchain-react'; const App = () => { const gptAdapter = useAdapter({ - apiKey: 'YOUR_OPEN_AI_API_KEY', - // 👇 Instruct ChatGPT how to behave (optional) - systemMessage: - 'Give sound, tailored financial advice. Explain concepts simply. When unsure, ask questions. ' + - 'Only recommend legal, ethical practices. Be friendly. Write concise answers under 5 sentences.' + url: 'https://' }); return ( @@ -54,6 +64,9 @@ const App = () => { promptBoxOptions={{ placeholder: 'How can I help you today?' }} + conversationOptions={{ + historyPayloadSize: 'max' + }} /> ); } @@ -64,7 +77,7 @@ or import it in your React app. ## And The Result Is ✨ -An AI chatbot, experienced in personal finance, that can give your users sound, tailored financial advice: +An AI chatbot, powered by LangChain, that can understand and respond to user messages: [![NLUX AiChat Component](https://nlux.ai/images/demos/chat-convo-demo-fin-advisor.gif)](https://nlux.ai) diff --git a/pipeline/npm/versions.json b/pipeline/npm/versions.json index 377d49cb..d3f97c97 100644 --- a/pipeline/npm/versions.json +++ b/pipeline/npm/versions.json @@ -1,6 +1,6 @@ { "inherit": true, - "nlux": "0.10.11", + "nlux": "0.11.0", "peerDependencies": { "react": "18.2.0", "react-dom": "18.2.0" diff --git a/samples/emulator/src/01-vanilla-js-with-adapters/customAdapter.ts b/samples/emulator/src/01-vanilla-js-with-adapters/customAdapter.ts index 5adb0238..0faedea8 100644 --- a/samples/emulator/src/01-vanilla-js-with-adapters/customAdapter.ts +++ b/samples/emulator/src/01-vanilla-js-with-adapters/customAdapter.ts @@ -1,10 +1,12 @@ -import {Adapter, StreamingAdapterObserver} from '@nlux/core'; +import {Adapter, AdapterExtras, StreamingAdapterObserver} from '@nlux/core'; export const myCustomStreamingAdapter: Adapter = { streamText: ( message: string, observer: StreamingAdapterObserver, + extras: AdapterExtras, ) => { + console.dir(extras, {depth: 3}); setTimeout(() => { const messageToStream = 'Lorem stream ipsum **dolor** sit amet, consectetur adipiscing elit. ' + 'Sed non risus. Suspendisse lectus tortor, dignissim sit amet, ' + @@ -21,7 +23,8 @@ export const myCustomStreamingAdapter: Adapter = { }; export const myCustomPromiseAdapter: Adapter = { - fetchText(message: string): Promise { + fetchText(message: string, extras): Promise { + console.dir(extras, {depth: 3}); return new Promise((resolve) => { setTimeout(() => { const messageToStream = 'Lorem promise ipsum **dolor** sit amet, consectetur adipiscing elit. ' + diff --git a/samples/emulator/src/01-vanilla-js-with-adapters/index.ts b/samples/emulator/src/01-vanilla-js-with-adapters/index.ts index 8920baf3..b3425afd 100644 --- a/samples/emulator/src/01-vanilla-js-with-adapters/index.ts +++ b/samples/emulator/src/01-vanilla-js-with-adapters/index.ts @@ -56,11 +56,14 @@ document.addEventListener('DOMContentLoaded', () => { .withDataTransferMode('stream'); aiChat = createAiChat() - .withAdapter(langServeAdapter) - // .withAdapter(openAiAdapter) + // .withAdapter(langServeAdapter) + .withAdapter(openAiAdapter) // .withAdapter(myCustomStreamingAdapter) // .withAdapter(myCustomPromiseAdapter) .withSyntaxHighlighter(highlighter) + .withConversationOptions({ + historyPayloadSize: 3, + }) .withLayoutOptions({ maxWidth: 500, maxHeight: 500, diff --git a/samples/emulator/src/04-react-js-with-langserve/adapters/einbotStream.ts b/samples/emulator/src/04-react-js-with-langserve/adapters/einbotStream.ts index 8de6311a..94d4cf24 100644 --- a/samples/emulator/src/04-react-js-with-langserve/adapters/einbotStream.ts +++ b/samples/emulator/src/04-react-js-with-langserve/adapters/einbotStream.ts @@ -68,8 +68,6 @@ data: {"content":"","additional_kwargs":{},"type":"AIMessageChunk","example":fal event: end `; - - // TODO - parse the result and feed it to the observer } observer.complete(); diff --git a/samples/emulator/src/05-react-js-with-adapters/customAdapter.ts b/samples/emulator/src/05-react-js-with-adapters/customAdapter.ts index 5adb0238..6f97dc28 100644 --- a/samples/emulator/src/05-react-js-with-adapters/customAdapter.ts +++ b/samples/emulator/src/05-react-js-with-adapters/customAdapter.ts @@ -1,10 +1,12 @@ -import {Adapter, StreamingAdapterObserver} from '@nlux/core'; +import {Adapter, AdapterExtras, StreamingAdapterObserver} from '@nlux/react'; export const myCustomStreamingAdapter: Adapter = { streamText: ( message: string, observer: StreamingAdapterObserver, + extras: AdapterExtras, ) => { + console.dir(extras, {depth: 3}); setTimeout(() => { const messageToStream = 'Lorem stream ipsum **dolor** sit amet, consectetur adipiscing elit. ' + 'Sed non risus. Suspendisse lectus tortor, dignissim sit amet, ' + @@ -21,7 +23,8 @@ export const myCustomStreamingAdapter: Adapter = { }; export const myCustomPromiseAdapter: Adapter = { - fetchText(message: string): Promise { + fetchText(message: string, extras: AdapterExtras): Promise { + console.dir(extras, {depth: 3}); return new Promise((resolve) => { setTimeout(() => { const messageToStream = 'Lorem promise ipsum **dolor** sit amet, consectetur adipiscing elit. ' + diff --git a/samples/emulator/src/05-react-js-with-adapters/index.tsx b/samples/emulator/src/05-react-js-with-adapters/index.tsx index 75245f6d..85dab41b 100644 --- a/samples/emulator/src/05-react-js-with-adapters/index.tsx +++ b/samples/emulator/src/05-react-js-with-adapters/index.tsx @@ -3,6 +3,7 @@ import {useUnsafeAdapter} from '@nlux/openai-react'; import {AiChat} from '@nlux/react'; import React, {useCallback, useState} from 'react'; import {createRoot} from 'react-dom/client'; +import {myCustomStreamingAdapter} from '../01-vanilla-js-with-adapters/customAdapter'; import {personaOptions} from './personaOptions'; debugger; @@ -45,7 +46,7 @@ const ExampleWrapper = () => { { // tagline: 'Mischievously Making Magic With Mirthful AI!', // }, }} - // conversationHistory={[ + // initialConversationContent={[ // { // role: 'user', // message: 'Hello', diff --git a/samples/emulator/src/08-react-js-with-conv-history/index.tsx b/samples/emulator/src/08-react-js-with-conv-history/index.tsx index ca4c68d5..09a07334 100644 --- a/samples/emulator/src/08-react-js-with-conv-history/index.tsx +++ b/samples/emulator/src/08-react-js-with-conv-history/index.tsx @@ -66,7 +66,7 @@ const ExampleWrapper = () => { picture: 'https://nlux.ai/images/demos/persona-woman.jpeg', }, }} - conversationHistory={[ + initialConversation={[ { role: 'user', message: 'Hello', diff --git a/samples/emulator/src/x/debug.ts b/samples/emulator/src/x/debug.ts index 83a9bf4d..4823a7df 100644 --- a/samples/emulator/src/x/debug.ts +++ b/samples/emulator/src/x/debug.ts @@ -2,18 +2,30 @@ export const debug = (...messages: any[]) => { for (const message of messages) { if (typeof message === 'string') { console.log(`[nlux-emulator] ${message}`); - } else { - console.log('[nlux-emulator] Debug:'); - console.log(message); + continue; } + + if (message && typeof message.toString === 'function') { + console.log(`[nlux-emulator] ${message.toString()}`); + continue; + } + + console.log('[nlux-emulator] Debug:'); + console.log(JSON.stringify(message, null, 2)); } }; export const warn = (message: any) => { if (typeof message === 'string') { console.warn(`[nlux-emulator] ${message}`); - } else { - console.warn('[nlux-emulator] Debug:'); - console.log(JSON.stringify(message, null, 2)); + return; + } + + if (message && typeof message.toString === 'function') { + console.warn(`[nlux-emulator] ${message.toString()}`); + return; } + + console.warn('[nlux-emulator] Warn:'); + console.log(JSON.stringify(message, null, 2)); }; diff --git a/specs/specs/core/adapters/03-fetch-adapter-extras-props.spec.ts b/specs/specs/core/adapters/03-fetch-adapter-extras-props.spec.ts new file mode 100644 index 00000000..2424fefa --- /dev/null +++ b/specs/specs/core/adapters/03-fetch-adapter-extras-props.spec.ts @@ -0,0 +1,241 @@ +import {AiChat, createAiChat, PersonaOptions} from '@nlux/core'; +import {afterEach, beforeEach, describe, expect, it} from 'vitest'; +import {adapterBuilder} from '../../../utils/adapterBuilder'; +import {AdapterController} from '../../../utils/adapters'; +import {submit, type} from '../../../utils/userInteractions'; +import {waitForRenderCycle} from '../../../utils/wait'; + +describe('When a fetch adapter with extras attribute is provided', () => { + let adapterController: AdapterController; + let rootElement: HTMLElement; + let aiChat: AiChat; + + beforeEach(() => { + rootElement = document.createElement('div'); + document.body.append(rootElement); + adapterController = adapterBuilder().withFetchText().create(); + }); + + afterEach(() => { + aiChat?.unmount(); + rootElement?.remove(); + }); + + it('options should be provided to the adapter as part of extras attribute', async () => { + const testPersonaOptions: PersonaOptions = { + bot: { + name: 'Test Bot', + picture: 'https://example.com/test-bot-image.png', + tagline: 'Test Bot Tagline', + }, + user: { + name: 'Test User', + picture: 'https://example.com/test-user-image.png', + }, + }; + + aiChat = createAiChat() + .withAdapter(adapterController.adapter) + .withPersonaOptions(testPersonaOptions); + + aiChat.mount(rootElement); + await waitForRenderCycle(); + + await type('Hello'); + await submit(); + + expect(adapterController.getLastExtras()?.aiChatProps?.personaOptions) + .toEqual(testPersonaOptions); + }); + + it('when options change, new options should be provided to the adapter as part of extras attribute', + async () => { + const testPersonaOptions: PersonaOptions = { + bot: { + name: 'Test Bot', + picture: 'https://example.com/test-bot-image.png', + tagline: 'Test Bot Tagline', + }, + user: { + name: 'Test User', + picture: 'https://example.com/test-user-image.png', + }, + }; + + aiChat = createAiChat() + .withAdapter(adapterController.adapter) + .withPersonaOptions(testPersonaOptions); + + aiChat.mount(rootElement); + await waitForRenderCycle(); + + await type('Hello'); + await submit(); + + expect(adapterController.getLastExtras()?.aiChatProps.layoutOptions).toBeUndefined(); + expect(adapterController.getLastExtras()?.aiChatProps.className).toBeUndefined(); + expect(adapterController.getLastExtras()?.aiChatProps.personaOptions) + .toEqual(testPersonaOptions); + + adapterController.resolve(''); + aiChat.updateProps({ + className: 'new-class', + personaOptions: undefined, + layoutOptions: { + height: 500, + width: 500, + }, + }); + + await type('Hello'); + await submit(); + + expect(adapterController.getLastExtras()?.aiChatProps?.personaOptions).toBeUndefined(); + expect(adapterController.getLastExtras()?.aiChatProps?.className).toEqual('new-class'); + expect(adapterController.getLastExtras()?.aiChatProps?.layoutOptions).toEqual({ + height: 500, + width: 500, + }); + }, + ); + + it('initial conversation should be provided as part of extras.conversationHistory', async () => { + aiChat = createAiChat() + .withInitialConversation([ + {message: 'Hello', role: 'user'}, + {message: 'Hi there!', role: 'ai'}, + ]) + .withAdapter(adapterController.adapter); + + aiChat.mount(rootElement); + await waitForRenderCycle(); + + await type('Hello'); + await submit(); + + expect(adapterController.getLastExtras()?.conversationHistory) + .toEqual([ + {message: 'Hello', role: 'user'}, + {message: 'Hi there!', role: 'ai'}, + ]); + }); + + it('new messages should be added to the conversation history sent to adapter', async () => { + aiChat = createAiChat() + .withInitialConversation([ + {message: 'Hello', role: 'user'}, + {message: 'Hi there! What do you want to know?', role: 'ai'}, + ]) + .withAdapter(adapterController.adapter); + + aiChat.mount(rootElement); + await waitForRenderCycle(); + + await type('How is the weather today?'); + await submit(); + + adapterController.resolve('The weather is great!'); + await waitForRenderCycle(); + + await type('And what about the rain?'); + await submit(); + + expect(adapterController.getLastExtras()?.conversationHistory) + .toEqual([ + {message: 'Hello', role: 'user'}, + {message: 'Hi there! What do you want to know?', role: 'ai'}, + {message: 'How is the weather today?', role: 'user'}, + {message: 'The weather is great!', role: 'ai'}, + ]); + }); + + describe('when conversation history is disabled', () => { + it('initial conversation history should not be provided to the adapter', async () => { + aiChat = createAiChat() + .withInitialConversation([ + {message: 'Hello', role: 'user'}, + {message: 'Hi there! What do you want to know?', role: 'ai'}, + ]) + .withConversationOptions({ + historyPayloadSize: 0, + }) + .withAdapter(adapterController.adapter); + + aiChat.mount(rootElement); + await waitForRenderCycle(); + + await type('How is the weather today?'); + await submit(); + + expect(adapterController.getLastExtras()?.conversationHistory).toBeUndefined(); + }); + + it('new messages should not be added to the conversation history sent to adapter', async () => { + aiChat = createAiChat() + .withConversationOptions({ + historyPayloadSize: 'none', + }) + .withAdapter(adapterController.adapter); + + aiChat.mount(rootElement); + await waitForRenderCycle(); + + await type('How is the weather today?'); + await submit(); + + adapterController.resolve('The weather is great!'); + await waitForRenderCycle(); + + await type('And what about the rain?'); + await submit(); + await waitForRenderCycle(); + + expect(adapterController.getLastExtras()?.conversationHistory).toBeUndefined(); + }); + }); + + describe('when conversation history config is set to a specific value', () => { + it('conversation history should be limited to the specified value', async () => { + aiChat = createAiChat() + .withInitialConversation([ + {message: 'Hello', role: 'user'}, + {message: 'Hi there! What do you want to know?', role: 'ai'}, + {message: 'How is the weather today?', role: 'user'}, + {message: 'The weather is great!', role: 'ai'}, + ]) + .withConversationOptions({ + historyPayloadSize: 3, + }) + .withAdapter(adapterController.adapter); + + aiChat.mount(rootElement); + await waitForRenderCycle(); + + await type('And what about the rain?'); + await submit(); + + adapterController.resolve('The rain is also great!'); + await waitForRenderCycle(); + + expect(adapterController.getLastExtras()?.conversationHistory) + .toEqual([ + {message: 'Hi there! What do you want to know?', role: 'ai'}, + {message: 'How is the weather today?', role: 'user'}, + {message: 'The weather is great!', role: 'ai'}, + ]); + + await type('And what about the snow?'); + await submit(); + + adapterController.resolve('The snow is also great!'); + await waitForRenderCycle(); + + expect(adapterController.getLastExtras()?.conversationHistory) + .toEqual([ + {message: 'The weather is great!', role: 'ai'}, + {message: 'And what about the rain?', role: 'user'}, + {message: 'The rain is also great!', role: 'ai'}, + ]); + }); + }); +}); diff --git a/specs/specs/core/adapters/03-streaming-adapter.spec.ts b/specs/specs/core/adapters/04-streaming-adapter.spec.ts similarity index 100% rename from specs/specs/core/adapters/03-streaming-adapter.spec.ts rename to specs/specs/core/adapters/04-streaming-adapter.spec.ts diff --git a/specs/specs/core/adapters/05-streaming-adapter-extras-props.spec.ts b/specs/specs/core/adapters/05-streaming-adapter-extras-props.spec.ts new file mode 100644 index 00000000..697d3faa --- /dev/null +++ b/specs/specs/core/adapters/05-streaming-adapter-extras-props.spec.ts @@ -0,0 +1,246 @@ +// import {AiChat, createAiChat, PersonaOptions} from '@nlux/core'; +import {AiChat, createAiChat, PersonaOptions} from '@nlux-dev/core/src'; +import {afterEach, beforeEach, describe, expect, it} from 'vitest'; +import {adapterBuilder} from '../../../utils/adapterBuilder'; +import {AdapterController} from '../../../utils/adapters'; +import {submit, type} from '../../../utils/userInteractions'; +import {waitForRenderCycle} from '../../../utils/wait'; + +describe('When an streaming adapter with extras attribute is provided', () => { + let adapterController: AdapterController; + let rootElement: HTMLElement; + let aiChat: AiChat; + + beforeEach(() => { + rootElement = document.createElement('div'); + document.body.append(rootElement); + adapterController = adapterBuilder().withStreamText().create(); + }); + + afterEach(() => { + aiChat?.unmount(); + rootElement?.remove(); + }); + + it('options should be provided to the adapter as part of extras attribute', async () => { + const testPersonaOptions: PersonaOptions = { + bot: { + name: 'Test Bot', + picture: 'https://example.com/test-bot-image.png', + tagline: 'Test Bot Tagline', + }, + user: { + name: 'Test User', + picture: 'https://example.com/test-user-image.png', + }, + }; + + aiChat = createAiChat() + .withAdapter(adapterController.adapter) + .withPersonaOptions(testPersonaOptions); + + aiChat.mount(rootElement); + await waitForRenderCycle(); + + await type('Hello'); + await submit(); + + expect(adapterController.getLastExtras()?.aiChatProps?.personaOptions) + .toEqual(testPersonaOptions); + }); + + it('when options change, new options should be provided to the adapter as part of extras attribute', + async () => { + const testPersonaOptions: PersonaOptions = { + bot: { + name: 'Test Bot', + picture: 'https://example.com/test-bot-image.png', + tagline: 'Test Bot Tagline', + }, + user: { + name: 'Test User', + picture: 'https://example.com/test-user-image.png', + }, + }; + + aiChat = createAiChat() + .withAdapter(adapterController.adapter) + .withPersonaOptions(testPersonaOptions); + + aiChat.mount(rootElement); + await waitForRenderCycle(); + + await type('Hello'); + await submit(); + + expect(adapterController.getLastExtras()?.aiChatProps.layoutOptions).toBeUndefined(); + expect(adapterController.getLastExtras()?.aiChatProps.className).toBeUndefined(); + expect(adapterController.getLastExtras()?.aiChatProps.personaOptions) + .toEqual(testPersonaOptions); + + adapterController.complete(); + aiChat.updateProps({ + className: 'new-class', + personaOptions: undefined, + layoutOptions: { + height: 500, + width: 500, + }, + }); + + await type('Hello'); + await submit(); + + expect(adapterController.getLastExtras()?.aiChatProps?.personaOptions).toBeUndefined(); + expect(adapterController.getLastExtras()?.aiChatProps?.className).toEqual('new-class'); + expect(adapterController.getLastExtras()?.aiChatProps?.layoutOptions).toEqual({ + height: 500, + width: 500, + }); + }, + ); + + it('initial conversation should be provided as part of extras.conversationHistory', async () => { + aiChat = createAiChat() + .withInitialConversation([ + {message: 'Hello', role: 'user'}, + {message: 'Hi there!', role: 'ai'}, + ]) + .withAdapter(adapterController.adapter); + + aiChat.mount(rootElement); + await waitForRenderCycle(); + + await type('Hello'); + await submit(); + + expect(adapterController.getLastExtras()?.conversationHistory) + .toEqual([ + {message: 'Hello', role: 'user'}, + {message: 'Hi there!', role: 'ai'}, + ]); + }); + + it('new messages should be added to the conversation history sent to adapter', async () => { + aiChat = createAiChat() + .withInitialConversation([ + {message: 'Hello', role: 'user'}, + {message: 'Hi there! What do you want to know?', role: 'ai'}, + ]) + .withAdapter(adapterController.adapter); + + aiChat.mount(rootElement); + await waitForRenderCycle(); + + await type('How is the weather today?'); + await submit(); + + adapterController.next('The weather is great!'); + adapterController.complete(); + await waitForRenderCycle(); + + await type('And what about the rain?'); + await submit(); + + expect(adapterController.getLastExtras()?.conversationHistory) + .toEqual([ + {message: 'Hello', role: 'user'}, + {message: 'Hi there! What do you want to know?', role: 'ai'}, + {message: 'How is the weather today?', role: 'user'}, + {message: 'The weather is great!', role: 'ai'}, + ]); + }); + + describe('when conversation history is disabled', () => { + it('initial conversation history should not be provided to the adapter', async () => { + aiChat = createAiChat() + .withInitialConversation([ + {message: 'Hello', role: 'user'}, + {message: 'Hi there! What do you want to know?', role: 'ai'}, + ]) + .withConversationOptions({ + historyPayloadSize: 0, + }) + .withAdapter(adapterController.adapter); + + aiChat.mount(rootElement); + await waitForRenderCycle(); + + await type('How is the weather today?'); + await submit(); + + expect(adapterController.getLastExtras()?.conversationHistory).toBeUndefined(); + }); + + it('new messages should not be added to the conversation history sent to adapter', async () => { + aiChat = createAiChat() + .withConversationOptions({ + historyPayloadSize: 'none', + }) + .withAdapter(adapterController.adapter); + + aiChat.mount(rootElement); + await waitForRenderCycle(); + + await type('How is the weather today?'); + await submit(); + + adapterController.next('The weather is great!'); + adapterController.complete(); + await waitForRenderCycle(); + + await type('And what about the rain?'); + await submit(); + await waitForRenderCycle(); + + expect(adapterController.getLastExtras()?.conversationHistory).toBeUndefined(); + }); + }); + + describe('when conversation history config is set to a specific value', () => { + it('conversation history should be limited to the specified value', async () => { + aiChat = createAiChat() + .withInitialConversation([ + {message: 'Hello', role: 'user'}, + {message: 'Hi there! What do you want to know?', role: 'ai'}, + {message: 'How is the weather today?', role: 'user'}, + {message: 'The weather is great!', role: 'ai'}, + ]) + .withConversationOptions({ + historyPayloadSize: 3, + }) + .withAdapter(adapterController.adapter); + + aiChat.mount(rootElement); + await waitForRenderCycle(); + + await type('And what about the rain?'); + await submit(); + + adapterController.next('The rain is also great!'); + adapterController.complete(); + await waitForRenderCycle(); + + expect(adapterController.getLastExtras()?.conversationHistory) + .toEqual([ + {message: 'Hi there! What do you want to know?', role: 'ai'}, + {message: 'How is the weather today?', role: 'user'}, + {message: 'The weather is great!', role: 'ai'}, + ]); + + await type('And what about the snow?'); + await submit(); + + adapterController.next('The snow is also great!'); + adapterController.complete(); + await waitForRenderCycle(); + + expect(adapterController.getLastExtras()?.conversationHistory) + .toEqual([ + {message: 'The weather is great!', role: 'ai'}, + {message: 'And what about the rain?', role: 'user'}, + {message: 'The rain is also great!', role: 'ai'}, + ]); + }); + }); +}); diff --git a/specs/specs/core/ai-chat/06-conversation-history.spec.ts b/specs/specs/core/ai-chat/06-conversation-history.spec.ts index cca9002d..b301ec92 100644 --- a/specs/specs/core/ai-chat/06-conversation-history.spec.ts +++ b/specs/specs/core/ai-chat/06-conversation-history.spec.ts @@ -27,7 +27,7 @@ describe('When AiChat box is created with conversation history', () => { aiChat = new AiChat() .withAdapter(adapterController.adapter) .withLayoutOptions({height: '200px'}) - .withConversationHistory([ + .withInitialConversation([ {message: 'Hello AI!', role: 'user'}, {message: 'Hi user!', role: 'ai'}, ]); @@ -60,7 +60,7 @@ describe('When AiChat box is created with conversation history', () => { picture: 'https://i.imgur.com/7QuesI3.png', }, }) - .withConversationHistory([ + .withInitialConversation([ {message: 'Hello AI!', role: 'user'}, {message: 'Hi user!', role: 'ai'}, ]); diff --git a/specs/specs/core/personas/01-welcome-message.spec.ts b/specs/specs/core/personas/01-welcome-message.spec.ts index 16dc6d47..b7ce8500 100644 --- a/specs/specs/core/personas/01-welcome-message.spec.ts +++ b/specs/specs/core/personas/01-welcome-message.spec.ts @@ -88,7 +88,7 @@ describe('Personas config is provided', () => { aiChat = createAiChat() .withAdapter(adapterController.adapter) .withPersonaOptions(personasConfig) - .withConversationHistory([ + .withInitialConversation([ { message: 'Hello LLM', role: 'user', diff --git a/specs/specs/core/reactive-props/react/adapter-conversation-history.spec.tsx b/specs/specs/core/reactive-props/react/adapter-conversation-history.spec.tsx new file mode 100644 index 00000000..7caf5561 --- /dev/null +++ b/specs/specs/core/reactive-props/react/adapter-conversation-history.spec.tsx @@ -0,0 +1,261 @@ +import {AiChat} from '@nlux/react'; +import {render} from '@testing-library/react'; +import React from 'react'; +import {beforeEach, describe, expect, it} from 'vitest'; +import {adapterBuilder} from '../../../../utils/adapterBuilder'; +import {AdapterController} from '../../../../utils/adapters'; +import {submit, type} from '../../../../utils/userInteractions'; +import {delayBeforeSendingResponse, waitForMilliseconds, waitForRenderCycle} from '../../../../utils/wait'; + +describe('When the adapter consuming conversation history is used with a React component', () => { + let adapterController: AdapterController; + + beforeEach(() => { + adapterController = adapterBuilder().withFetchText().create(); + }); + + it('adapter should receive the initial conversation in extras', async () => { + render( + , + ); + + await type('How is the weather today?'); + await submit(); + await waitForMilliseconds(delayBeforeSendingResponse / 2); + + adapterController.resolve('The weather is great!'); + await waitForMilliseconds(delayBeforeSendingResponse); + + expect(adapterController.getLastExtras()?.conversationHistory).toEqual([ + {role: 'user', message: 'Hello'}, + {role: 'ai', message: 'Hi there! How can I help you today?'}, + ]); + }); + + it('should receive new messages in extras', async () => { + render( + , + ); + + await type('How is the weather today?'); + await submit(); + await waitForMilliseconds(delayBeforeSendingResponse / 2); + + adapterController.resolve('The weather is great!'); + await waitForMilliseconds(delayBeforeSendingResponse); + + await type('And tomorrow?'); + await submit(); + await waitForMilliseconds(delayBeforeSendingResponse / 2); + + expect(adapterController.getLastExtras()?.conversationHistory).toEqual([ + {role: 'ai', message: 'Hi there! How can I help you today?'}, + {role: 'user', message: 'How is the weather today?'}, + {role: 'ai', message: 'The weather is great!'}, + ]); + }); + + it('should not receive history when disabled', async () => { + render( + , + ); + + await type('How is the weather today?'); + await submit(); + await waitForMilliseconds(delayBeforeSendingResponse / 2); + + adapterController.resolve('The weather is great!'); + await waitForMilliseconds(delayBeforeSendingResponse); + + expect(adapterController.getLastExtras()?.conversationHistory).toEqual(undefined); + }); + + it('adapter should stop receiving conversation history when option changes', async () => { + const {rerender} = render( + , + ); + + await type('How is the weather today?'); + await submit(); + await waitForMilliseconds(delayBeforeSendingResponse / 2); + + adapterController.resolve('The weather is great!'); + await waitForMilliseconds(delayBeforeSendingResponse); + + expect(adapterController.getLastExtras()?.conversationHistory).toEqual([ + {role: 'user', message: 'Hello'}, + {role: 'ai', message: 'Hi there! How can I help you today?'}, + ]); + + rerender( + , + ); + + await type('How is the rain today?'); + await submit(); + await waitForMilliseconds(delayBeforeSendingResponse / 2); + + adapterController.resolve('The rain is great!'); + await waitForMilliseconds(delayBeforeSendingResponse); + + expect(adapterController.getLastExtras()?.conversationHistory).toBeUndefined(); + }); + + it('should get adjusted history when option changes', async () => { + const {rerender} = render( + , + ); + + await type('How is the weather today?'); + await submit(); + await waitForMilliseconds(delayBeforeSendingResponse / 2); + + adapterController.resolve('The weather is great!'); + await waitForMilliseconds(delayBeforeSendingResponse); + + expect(adapterController.getLastExtras()?.conversationHistory).toEqual([ + {role: 'ai', message: 'Hi there! How can I help you today?'}, + ]); + + rerender( + , + ); + + await type('And tomorrow?'); + await submit(); + await waitForMilliseconds(delayBeforeSendingResponse / 2); + + expect(adapterController.getLastExtras()?.conversationHistory).toEqual([ + {role: 'user', message: 'Hello'}, + {role: 'ai', message: 'Hi there! How can I help you today?'}, + {role: 'user', message: 'How is the weather today?'}, + {role: 'ai', message: 'The weather is great!'}, + ]); + }); + + it('should not be impacted by change in initial conversation', async () => { + const {rerender} = render( + , + ); + + await type('How is the weather today?'); + await submit(); + await waitForMilliseconds(delayBeforeSendingResponse / 2); + + adapterController.resolve('The weather is great!'); + await waitForMilliseconds(delayBeforeSendingResponse); + + expect(adapterController.getLastExtras()?.conversationHistory).toEqual([ + {role: 'user', message: 'Hello'}, + {role: 'ai', message: 'Hi there! How can I help you today?'}, + ]); + + rerender( + , + ); + + await waitForRenderCycle(); + + await type('Non, merci'); + await submit(); + await waitForMilliseconds(delayBeforeSendingResponse / 2); + + expect(adapterController.getLastExtras()?.conversationHistory).toEqual([ + {role: 'user', message: 'Hello'}, + {role: 'ai', message: 'Hi there! How can I help you today?'}, + {role: 'user', message: 'How is the weather today?'}, + {role: 'ai', message: 'The weather is great!'}, + ]); + + await waitForRenderCycle(); + adapterController.resolve('Ca marche!'); + + await type('Ok!'); + await submit(); + await waitForMilliseconds(delayBeforeSendingResponse / 2); + + expect(adapterController.getLastExtras()?.conversationHistory).toEqual([ + {role: 'user', message: 'Hello'}, + {role: 'ai', message: 'Hi there! How can I help you today?'}, + {role: 'user', message: 'How is the weather today?'}, + {role: 'ai', message: 'The weather is great!'}, + {role: 'user', message: 'Non, merci'}, + {role: 'ai', message: 'Ca marche!'}, + ]); + }); +}); diff --git a/specs/specs/core/reactive-props/react/adapter-extras.spec.tsx b/specs/specs/core/reactive-props/react/adapter-extras.spec.tsx new file mode 100644 index 00000000..a0b3689a --- /dev/null +++ b/specs/specs/core/reactive-props/react/adapter-extras.spec.tsx @@ -0,0 +1,121 @@ +import {AiChat} from '@nlux/react'; +import {render} from '@testing-library/react'; +import React from 'react'; +import {beforeEach, describe, expect, it} from 'vitest'; +import {adapterBuilder} from '../../../../utils/adapterBuilder'; +import {AdapterController} from '../../../../utils/adapters'; +import {submit, type} from '../../../../utils/userInteractions'; +import {delayBeforeSendingResponse, waitForMilliseconds} from '../../../../utils/wait'; + +describe('When the adapter consuming extra props is used with a React component', () => { + let adapterController: AdapterController; + + beforeEach(() => { + adapterController = adapterBuilder().withFetchText().create(); + }); + + it('adapter should receive props in extras', async () => { + const {getByTestId} = render( + , + ); + + await type('Hello'); + await submit(); + await waitForMilliseconds(delayBeforeSendingResponse / 2); + + adapterController.resolve('Yo!'); + await waitForMilliseconds(delayBeforeSendingResponse); + + expect(adapterController.getLastExtras()?.aiChatProps?.personaOptions).toEqual({ + bot: { + name: 'Test Bot', + picture: 'https://example.com/test-bot-image.png', + tagline: 'Test Bot Tagline', + }, + user: { + name: 'Test User', + picture: 'https://example.com/test-user-image.png', + }, + }); + }); + + describe('When the component props change', () => { + it('adapter should receive updated props in extras', async () => { + const {rerender} = render( + , + ); + + await type('Hello'); + await submit(); + await waitForMilliseconds(delayBeforeSendingResponse / 2); + + adapterController.resolve('Yo!'); + await waitForMilliseconds(delayBeforeSendingResponse); + expect(adapterController.getLastExtras()?.aiChatProps?.className).toEqual(undefined); + + rerender( + , + ); + + await type('Hello'); + await submit(); + await waitForMilliseconds(delayBeforeSendingResponse / 2); + + adapterController.resolve('Yo!'); + await waitForMilliseconds(delayBeforeSendingResponse); + + expect(adapterController.getLastExtras()?.aiChatProps?.className).toEqual('test-class'); + expect(adapterController.getLastExtras()?.aiChatProps?.personaOptions).toEqual({ + bot: { + name: 'Updated Bot', + picture: 'https://example.com/updated-bot-image.png', + tagline: 'Updated Bot Tagline', + }, + user: { + name: 'Updated User', + picture: 'https://example.com/updated-user-image.png', + }, + }); + }); + }); +}); diff --git a/specs/specs/markdown/12-hyperlink.spec.ts b/specs/specs/markdown/12-hyperlink.spec.ts index 4e5f6255..aba90496 100644 --- a/specs/specs/markdown/12-hyperlink.spec.ts +++ b/specs/specs/markdown/12-hyperlink.spec.ts @@ -1,6 +1,4 @@ -// import {createMdStreamRenderer, StandardStreamParserOutput} from '@nlux/core'; -import {createMdStreamRenderer} from '@nlux-dev/core/src/core/markdown/streamParser'; -import {StandardStreamParserOutput} from '@nlux-dev/core/src/types/markdown/streamParser'; +import {createMdStreamRenderer, StandardStreamParserOutput} from '@nlux/core'; import {beforeEach, describe, expect, it} from 'vitest'; import {waitForMdStreamToComplete} from '../../utils/wait'; diff --git a/specs/specs/standard-adapters/langchain/01-stream-output-parser.spec.ts b/specs/specs/standard-adapters/langchain/01-stream-output-parser.spec.ts index d2737dc4..964554c8 100644 --- a/specs/specs/standard-adapters/langchain/01-stream-output-parser.spec.ts +++ b/specs/specs/standard-adapters/langchain/01-stream-output-parser.spec.ts @@ -1,5 +1,5 @@ +import {parseChunk} from '@nlux-dev/langchain/src/langserve/parser/parseChunk'; import {describe, expect, it} from 'vitest'; -import {parseChunk} from '../../../../packages/js/langchain/src/langserve/parser/parseChunk'; import multipleMixedEvents from './data/multiple-mixed-events'; import multipleValidDataEvents from './data/multiple-valid-data-events'; import multipleValidDataEventsWithSpace from './data/multiple-valid-data-events-with-space'; diff --git a/specs/specs/standard-adapters/langchain/02-input-pre-processor.spec.ts b/specs/specs/standard-adapters/langchain/02-input-pre-processor.spec.ts new file mode 100644 index 00000000..d2b7c604 --- /dev/null +++ b/specs/specs/standard-adapters/langchain/02-input-pre-processor.spec.ts @@ -0,0 +1,131 @@ +import {createAdapter} from '@nlux-dev/langchain/src'; +import {AiChat, createAiChat} from '@nlux/core'; +import {afterEach, beforeEach, describe, expect, it} from 'vitest'; +import {submit, type} from '../../../utils/userInteractions'; +import {waitForRenderCycle} from '../../../utils/wait'; + +describe('When LangServe input pre-processor is used with streaming adapter', () => { + let rootElement: HTMLElement; + let aiChat: AiChat; + + beforeEach(() => { + rootElement = document.createElement('div'); + document.body.append(rootElement); + }); + + afterEach(() => { + aiChat?.unmount(); + rootElement?.remove(); + }); + + it('should receive the entire conversation history', async () => { + let conversationHistoryInPreProcessor: any; + let userMessage: any; + + const adapter = createAdapter() + .withUrl('http://localhost:8080') + .withDataTransferMode('stream') + .withInputPreProcessor(( + input, + conversationHistory, + ) => { + userMessage = input; + conversationHistoryInPreProcessor = conversationHistory; + }); + + aiChat = createAiChat() + .withAdapter(adapter) + .withInitialConversation([ + { + role: 'user', + message: 'Hello AI', + }, + { + role: 'ai', + message: 'Hi user', + }, + ]); + + aiChat.mount(rootElement); + + await waitForRenderCycle(); + await type('How is the weather today?'); + await submit(); + await waitForRenderCycle(); + + expect(userMessage).toEqual('How is the weather today?'); + expect(conversationHistoryInPreProcessor).toEqual([ + { + role: 'user', + message: 'Hello AI', + }, + { + role: 'ai', + message: 'Hi user', + }, + ]); + }); +}); + +describe('When LangServe input pre-processor is used with fetch adapter', () => { + let rootElement: HTMLElement; + let aiChat: AiChat; + + beforeEach(() => { + rootElement = document.createElement('div'); + document.body.append(rootElement); + }); + + afterEach(() => { + aiChat?.unmount(); + rootElement?.remove(); + }); + + it('should receive the entire conversation history', async () => { + let conversationHistoryInPreProcessor: any; + let userMessage: any; + + const adapter = createAdapter() + .withUrl('http://localhost:8080') + .withDataTransferMode('fetch') + .withInputPreProcessor(( + input, + conversationHistory, + ) => { + userMessage = input; + conversationHistoryInPreProcessor = conversationHistory; + }); + + aiChat = createAiChat() + .withAdapter(adapter) + .withInitialConversation([ + { + role: 'user', + message: 'Hello AI', + }, + { + role: 'ai', + message: 'Hi user', + }, + ]); + + aiChat.mount(rootElement); + + await waitForRenderCycle(); + await type('How is the weather today?'); + await submit(); + await waitForRenderCycle(); + + expect(userMessage).toEqual('How is the weather today?'); + expect(conversationHistoryInPreProcessor).toEqual([ + { + role: 'user', + message: 'Hello AI', + }, + { + role: 'ai', + message: 'Hi user', + }, + ]); + }); +}); diff --git a/specs/utils/adapters.ts b/specs/utils/adapters.ts index f9be767e..b7d8fee0 100644 --- a/specs/utils/adapters.ts +++ b/specs/utils/adapters.ts @@ -1,4 +1,4 @@ -import {Adapter, StreamingAdapterObserver} from '@nlux/core'; +import {Adapter, AdapterExtras, StreamingAdapterObserver} from '@nlux/core'; import {vi} from 'vitest'; export const createAdapterController = ({ @@ -9,12 +9,16 @@ export const createAdapterController = ({ let rejectPromise: Function | null = null; let lastMessageSent: string | null = null; let streamTextObserver: StreamingAdapterObserver | null = null; + let extrasFromLastMessage: AdapterExtras | undefined | null = null; let fetchTextMock = vi.fn(); let streamTextMock = vi.fn(); - const createNewFetchTextMock = () => (message: string) => { + const createNewFetchTextMock = () => ( + message: string, extras: AdapterExtras, + ) => { lastMessageSent = message; + extrasFromLastMessage = extras; fetchTextMock(message); return new Promise((resolve, reject) => { @@ -23,9 +27,13 @@ export const createAdapterController = ({ }); }; - const createNewStreamTextMock = () => (message: string, observer: StreamingAdapterObserver) => { + const createNewStreamTextMock = () => ( + message: string, observer: StreamingAdapterObserver, extras: AdapterExtras, + ) => { lastMessageSent = message; streamTextObserver = observer; + extrasFromLastMessage = extras; + streamTextMock(message, observer); return new Promise((resolve, reject) => { @@ -41,6 +49,7 @@ export const createAdapterController = ({ return Object.freeze({ getLastMessage: () => lastMessageSent, + getLastExtras: () => extrasFromLastMessage, adapter: adapter, fetchTextMock, streamTextMock, diff --git a/yarn.lock b/yarn.lock index 134d6834..671ab8c1 100644 --- a/yarn.lock +++ b/yarn.lock @@ -725,16 +725,16 @@ form-data "^4.0.0" "@types/node@*": - version "20.11.16" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.11.16.tgz#4411f79411514eb8e2926f036c86c9f0e4ec6708" - integrity sha512-gKb0enTmRCzXSSUJDq6/sPcqrfCv2mkkG6Jt/clpn5eiCbKTY+SgZUxo+p8ZKMof5dCp9vHQUAB7wOUTod22wQ== + version "20.11.17" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.11.17.tgz#cdd642d0e62ef3a861f88ddbc2b61e32578a9292" + integrity sha512-QmgQZGWu1Yw9TDyAP9ZzpFJKynYNeOvwMJmaxABfieQoVoiVOS6MN1WSpqpRcbeA5+RW82kraAVxCCJg+780Qw== dependencies: undici-types "~5.26.4" "@types/node@^18.11.18": - version "18.19.14" - resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.14.tgz#1880ff1b3ac913f3877f711588e5ed227da01886" - integrity sha512-EnQ4Us2rmOS64nHDWr0XqAD8DsO6f3XR6lf9UIIrZQpUzPVdN/oPuEzfDWNHSyXLvoGgjuEm/sPwFGSSs35Wtg== + version "18.19.15" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.15.tgz#313a9d75435669a57fc28dc8694e7f4c4319f419" + integrity sha512-AMZ2UWx+woHNfM11PyAEQmfSxi05jm9OlkxczuHeEqmvwPkYj6MWv44gbzDPefYOLysTOFyI3ziiy2ONmUZfpA== dependencies: undici-types "~5.26.4" @@ -1506,9 +1506,9 @@ domutils@^2.8.0: domhandler "^4.2.0" electron-to-chromium@^1.4.648: - version "1.4.660" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.660.tgz#80be71d08c1224980e645904ab9155f3fa54a1ea" - integrity sha512-1BqvQG0BBQrAA7FVL2EMrb5A1sVyXF3auwJneXjGWa1TpN+g0C4KbUsYWePz6OZ0mXZfXGy+RmQDELJWwE8v/Q== + version "1.4.665" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.665.tgz#681700bd590b0e5a3be66e3e2874ce62abcf5da5" + integrity sha512-UpyCWObBoD+nSZgOC2ToaIdZB0r9GhqT2WahPKiSki6ckkSuKhQNso8V2PrFcHBMleI/eqbKgVQgVC4Wni4ilw== entities@^2.0.0: version "2.2.0" @@ -2494,9 +2494,9 @@ onetime@^6.0.0: mimic-fn "^4.0.0" openai@^4.26.0: - version "4.26.1" - resolved "https://registry.yarnpkg.com/openai/-/openai-4.26.1.tgz#7b7c0225c09922445f68f3c4cdbd5775ed31108c" - integrity sha512-DvWbjhWbappsFRatOWmu4Dp1/Q4RG9oOz6CfOSjy0/Drb8G+5iAiqWAO4PfpGIkhOOKtvvNfQri2SItl+U7LhQ== + version "4.27.0" + resolved "https://registry.yarnpkg.com/openai/-/openai-4.27.0.tgz#4ada66a19c369b202e8a939b8bd287e4dca036f4" + integrity sha512-j1ZEx9NiBpm31rxWqQTjQt1QvH/8001xHsc/pRoPjkRDYWONCb+qkR6L9C7Wl6ar72Mz1ybtn1bv6fqAoTPlKw== dependencies: "@types/node" "^18.11.18" "@types/node-fetch" "^2.6.4"