Skip to content

Commit

Permalink
added some new params to bedrock adapter's builder
Browse files Browse the repository at this point in the history
  • Loading branch information
somebodyawesome-dev committed Jul 22, 2024
1 parent b137dfe commit cedb659
Show file tree
Hide file tree
Showing 2 changed files with 220 additions and 145 deletions.
161 changes: 97 additions & 64 deletions packages/js/bedrock/src/bedrock/builder/builder.ts
Original file line number Diff line number Diff line change
@@ -1,69 +1,102 @@
import {BedrockRuntimeClientConfigType, InferenceConfiguration} from '@aws-sdk/client-bedrock-runtime';
import {ChatAdapterBuilder as CoreChatAdapterBuilder, DataTransferMode, StandardChatAdapter} from '@nlux/core';
import {
BedrockRuntimeClientConfigType,
InferenceConfiguration,
} from "@aws-sdk/client-bedrock-runtime";
import {
ChatAdapterBuilder as CoreChatAdapterBuilder,
DataTransferMode,
StandardChatAdapter,
} from "@nlux/core";

export interface ChatAdapterBuilder<AiMsg>
extends CoreChatAdapterBuilder<AiMsg> {
/**
* Create a new Bedrock Inference API adapter.
* Adapter users don't need to call this method directly. It will be called by nlux when the adapter is expected
* to be created.
*
* @returns {StandardChatAdapter}
*/
create(): StandardChatAdapter<AiMsg>;
extends CoreChatAdapterBuilder<AiMsg> {
/**
* Create a new Bedrock Inference API adapter.
* Adapter users don't need to call this method directly. It will be called by nlux when the adapter is expected
* to be created.
*
* @returns {StandardChatAdapter}
*/
create(): StandardChatAdapter<AiMsg>;

/**
* The authorization token to use for Bedrock Inference API.
* This will be passed to the `Authorization` header of the HTTP request.
* If no token is provided, the request will be sent without an `Authorization` header as in this example:
* `"Authorization": f"Bearer {AUTH_TOKEN}"`.
*
* Public models do not require an authorization token, but if your model is private, you will need to provide one.
*
* @optional
* @param {string} cred
* @returns {ChatAdapterBuilder}
*/
withCredintial(
cred: BedrockRuntimeClientConfigType['credentials'],
): ChatAdapterBuilder<AiMsg>;
/**
* The authorization token to use for Bedrock Inference API.
* This will be passed to the `Authorization` header of the HTTP request.
* If no token is provided, the request will be sent without an `Authorization` header as in this example:
* `"Authorization": f"Bearer {AUTH_TOKEN}"`.
*
* Public models do not require an authorization token, but if your model is private, you will need to provide one.
*
* @optional
* @param {string} cred
* @returns {ChatAdapterBuilder}
*/
withCredintial(
cred: BedrockRuntimeClientConfigType["credentials"]
): ChatAdapterBuilder<AiMsg>;

/**
* Instruct the adapter to connect to API and load data either in streaming mode or in batch mode.
* The `stream` mode would use protocols such as websockets or server-side events, and nlux will display data as
* it's being generated by the server. The `batch` mode would use a single request to fetch data, and the response
* would only be displayed once the entire message is loaded.
*
* @optional
* @default 'stream'
* @returns {ChatAdapterBuilder}
*/
withDataTransferMode(mode: DataTransferMode): ChatAdapterBuilder<AiMsg>;
/**
* Inference parameters to pass to the model. <code>Converse</code> supports a base
* set of inference parameters. If you need to pass additional parameters that the model
* supports, use the <code>additionalModelRequestFields</code> request field.
*
* @param {InferenceConfiguration} inferenceConfig
* @returns {ChatAdapterBuilder}
*/
withInferenceConfig(
inferenceConfig: InferenceConfiguration,
): ChatAdapterBuilder<AiMsg>;
/**
* The model or the endpoint to use for Bedrock Inference API.
* You should provide either a model or an endpoint, but not both.
*
* @param {string} model
* @returns {ChatAdapterBuilder}
*/
withModel(model: string): ChatAdapterBuilder<AiMsg>;
/**
* The endpoint to use for Bedrock Inference API.
*
* @optional
* @param {string} region
* @returns {ChatAdapterBuilder}
*/
withRegion(region: string): ChatAdapterBuilder<AiMsg>;
/**
* Instruct the adapter to connect to API and load data either in streaming mode or in batch mode.
* The `stream` mode would use protocols such as websockets or server-side events, and nlux will display data as
* it's being generated by the server. The `batch` mode would use a single request to fetch data, and the response
* would only be displayed once the entire message is loaded.
*
* @optional
* @default 'stream'
* @returns {ChatAdapterBuilder}
*/
withDataTransferMode(mode: DataTransferMode): ChatAdapterBuilder<AiMsg>;
/**
* Inference parameters to pass to the model. <code>Converse</code> supports a base
* set of inference parameters. If you need to pass additional parameters that the model
* supports, use the <code>additionalModelRequestFields</code> request field.
*
* @param {InferenceConfiguration} inferenceConfig
* @returns {ChatAdapterBuilder}
*/
withInferenceConfig(
inferenceConfig: InferenceConfiguration
): ChatAdapterBuilder<AiMsg>;
/**
* The model or the endpoint to use for Bedrock Inference API.
* You should provide either a model or an endpoint, but not both.
*
* @param {string} model
* @returns {ChatAdapterBuilder}
*/
withModel(model: string): ChatAdapterBuilder<AiMsg>;
/**
* The region to use for Bedrock Inference API.
*
* @optional
* @param {string} region
* @returns {ChatAdapterBuilder}
*/
withRegion(region: string): ChatAdapterBuilder<AiMsg>;
/**
* The endpoint to use for Bedrock Inference API.
*
* @optional
* @param {string} endpoint
* @returns {ChatAdapterBuilder}
*/
withEndpoint(endpoint: string): ChatAdapterBuilder<AiMsg>;

/**
* The max number of attempts for retrying Bedrock Inference API.
*
* @optional
* @param {number} maxAttempts
* @returns {ChatAdapterBuilder}
*/
withMaxAttempts(maxAttempts: number): ChatAdapterBuilder<AiMsg>;

/**
* Unique Service Identifier.
*
* @optional
* @param {number} serviceId
* @returns {ChatAdapterBuilder}
*/
withServiceId(serviceId: string): ChatAdapterBuilder<AiMsg>;
}
204 changes: 123 additions & 81 deletions packages/js/bedrock/src/bedrock/builder/builderImpl.ts
Original file line number Diff line number Diff line change
@@ -1,102 +1,144 @@
import {BedrockRuntimeClientConfigType, InferenceConfiguration} from '@aws-sdk/client-bedrock-runtime';
import {DataTransferMode} from '@nlux/core';
import {NluxUsageError, NluxValidationError} from '@shared/types/error';
import {BedrockChatAdapterImpl} from '../adapter/chatAdapter';
import {ChatAdapterBuilder} from './builder';
import {
BedrockRuntimeClientConfigType,
InferenceConfiguration,
} from "@aws-sdk/client-bedrock-runtime";
import { DataTransferMode } from "@nlux/core";
import { NluxUsageError, NluxValidationError } from "@shared/types/error";
import { BedrockChatAdapterImpl } from "../adapter/chatAdapter";
import { ChatAdapterBuilder } from "./builder";

export class ChatAdapterBuilderImpl<AiMsg>
implements ChatAdapterBuilder<AiMsg> {
private credentials: BedrockRuntimeClientConfigType['credentials'] | null =
null;
private inferenceConfig: InferenceConfiguration | null = null;
private region: string | null = null;
private theDataTransferMode: DataTransferMode = 'stream';
private theModel: string | null = null;
private withDataTransferModeCalled = false;
implements ChatAdapterBuilder<AiMsg>
{
private credentials: BedrockRuntimeClientConfigType["credentials"] | null =
null;
private inferenceConfig: InferenceConfiguration | null = null;
private region: string | null = null;
private endpoint: string | null = null;
private theDataTransferMode: DataTransferMode = "stream";
private theModel: string | null = null;
private serviceId: string | null = null;
private maxAttempts: number | null = null;
private withDataTransferModeCalled = false;

create(): BedrockChatAdapterImpl<AiMsg> {
if (!this.theModel) {
throw new NluxValidationError({
source: this.constructor.name,
message:
'You must provide a model or an endpoint using the "withModel()" method or the ' +
'"withEndpoint()" method!',
});
}
create(): BedrockChatAdapterImpl<AiMsg> {
if (!this.theModel) {
throw new NluxValidationError({
source: this.constructor.name,
message:
'You must provide a model or an endpoint using the "withModel()" method or the ' +
'"withEndpoint()" method!',
});
}

return new BedrockChatAdapterImpl({
dataTransferMode: this.theDataTransferMode,
model: this.theModel ?? undefined,
credentials: this.credentials ?? undefined,
region: this.region ?? undefined,
return new BedrockChatAdapterImpl({
dataTransferMode: this.theDataTransferMode,
model: this.theModel ?? undefined,
credentials: this.credentials ?? undefined,
region: this.region ?? undefined,
inferenceConfig: this.inferenceConfig ?? undefined,
endpoint: this.endpoint ?? undefined,
maxAttempts: this.maxAttempts ?? undefined,
serviceId: this.serviceId ?? undefined,
});
}

inferenceConfig: this.inferenceConfig ?? undefined,
});
withCredintial(
cred: BedrockRuntimeClientConfigType["credentials"]
): ChatAdapterBuilder<AiMsg> {
if (this.credentials !== null) {
throw new NluxUsageError({
source: this.constructor.name,
message: "Cannot set the cred token more than once",
});
}

withCredintial(
cred: BedrockRuntimeClientConfigType['credentials'],
): ChatAdapterBuilder<AiMsg> {
if (this.credentials !== null) {
throw new NluxUsageError({
source: this.constructor.name,
message: 'Cannot set the cred token more than once',
});
}
this.credentials = cred;
return this;
}

this.credentials = cred;
return this;
withDataTransferMode(mode: DataTransferMode): ChatAdapterBuilder<AiMsg> {
if (this.withDataTransferModeCalled) {
throw new NluxUsageError({
source: this.constructor.name,
message: "Cannot set the data loading mode more than once",
});
}

withDataTransferMode(mode: DataTransferMode): ChatAdapterBuilder<AiMsg> {
if (this.withDataTransferModeCalled) {
throw new NluxUsageError({
source: this.constructor.name,
message: 'Cannot set the data loading mode more than once',
});
}
this.theDataTransferMode = mode;
this.withDataTransferModeCalled = true;
return this;
}

this.theDataTransferMode = mode;
this.withDataTransferModeCalled = true;
return this;
withInferenceConfig(
inferenceConfig: InferenceConfiguration
): ChatAdapterBuilder<AiMsg> {
if (this.inferenceConfig !== null) {
throw new NluxUsageError({
source: this.constructor.name,
message: "Cannot set the Inference Config more than once",
});
}
this.inferenceConfig = inferenceConfig;
return this;
}

withInferenceConfig(
inferenceConfig: InferenceConfiguration,
): ChatAdapterBuilder<AiMsg> {
if (this.inferenceConfig !== null) {
throw new NluxUsageError({
source: this.constructor.name,
message: 'Cannot set the Inference Config more than once',
});
}
this.inferenceConfig = inferenceConfig;
return this;
withModel(model: string) {
if (this.theModel !== null) {
throw new NluxUsageError({
source: this.constructor.name,
message:
"Cannot set the model because a model or an endpoint has already been set",
});
}

withModel(model: string) {
if (this.theModel !== null) {
throw new NluxUsageError({
source: this.constructor.name,
message:
'Cannot set the model because a model or an endpoint has already been set',
});
}
this.theModel = model;
return this;
}

withRegion(region: string): ChatAdapterBuilder<AiMsg> {
if (this.region !== null) {
throw new NluxUsageError({
source: this.constructor.name,
message: "Cannot set the region because an region has already been set",
});
}

this.theModel = model;
return this;
this.region = region;
return this;
}
withEndpoint(endpoint: string): ChatAdapterBuilder<AiMsg> {
if (this.endpoint !== null) {
throw new NluxUsageError({
source: this.constructor.name,
message:
"Cannot set the endpoint because endpoint has already been set",
});
}

withRegion(region: string): ChatAdapterBuilder<AiMsg> {
if (this.region !== null) {
throw new NluxUsageError({
source: this.constructor.name,
message:
'Cannot set the endpoint because a model or an endpoint has already been set',
});
}
this.endpoint = endpoint;

this.region = region;
return this;
return this;
}

withMaxAttempts(maxAttempts: number): ChatAdapterBuilder<AiMsg> {
if (this.maxAttempts !== null) {
throw new NluxUsageError({
source: this.constructor.name,
message: "Cannot set the max attempts because it has already been set",
});
}
this.maxAttempts = maxAttempts;
return this;
}
withServiceId(serviceId: string): ChatAdapterBuilder<AiMsg> {
if (this.serviceId !== null) {
throw new NluxUsageError({
source: this.constructor.name,
message: "Cannot set the serviceId because it has already been set",
});
}
this.serviceId = serviceId;
return this;
}
}

0 comments on commit cedb659

Please sign in to comment.