Skip to content

Commit

Permalink
Remove @returns annotations
Browse files Browse the repository at this point in the history
  • Loading branch information
DennisTraub committed Mar 20, 2024
1 parent 4d40a20 commit 2ec21c1
Show file tree
Hide file tree
Showing 8 changed files with 0 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ import {
*
* @param {string} prompt - The input text prompt for the model to complete.
* @param {string} [modelId] - The ID of the model to use. Defaults to "ai21.j2-mid-v1".
* @returns {Promise<string[]>} The inference response from the model.
*/
export const invokeModel = async (prompt, modelId) => {
// Create a new Bedrock Runtime client instance.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ import {
*
* @param {string} prompt - The input text prompt for the model to complete.
* @param {string} [modelId] - The ID of the model to use. Defaults to "amazon.titan-text-express-v1".
* @returns {Promise<string[]>} The inference response from the model.
*/
export const invokeModel = async (prompt, modelId) => {
// Create a new Bedrock Runtime client instance.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ import {
*
* @param {string} prompt - The input text prompt for the model to complete.
* @param {string} [modelId] - The ID of the model to use. Defaults to "anthropic.claude-v2".
* @returns {Promise<string[]>} The inference response from the model.
*/
export const invokeMessagesApi = async (prompt, modelId) => {
// Create a new Bedrock Runtime client instance.
Expand Down Expand Up @@ -80,7 +79,6 @@ export const invokeMessagesApi = async (prompt, modelId) => {
*
* @param {string} prompt - The input text prompt for the model to complete.
* @param {string} [modelId] - The ID of the model to use. Defaults to "anthropic.claude-v2".
* @returns {Promise<string>} The inference response from the model.
*/
export const invokeTextCompletionsApi = async (prompt, modelId) => {
// Create a new Bedrock Runtime client instance.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@ import {
*
* @param {string} prompt - The input text prompt for the model to complete.
* @param {string} [modelId] - The ID of the model to use. Defaults to "anthropic.claude-3-haiku-20240307-v1:0".
* @returns {Promise<string[]>} The inference response from the model.
*/
export const invokeModel = async (prompt, modelId) => {
// Create a new Bedrock Runtime client instance.
Expand Down Expand Up @@ -85,7 +84,6 @@ export const invokeModel = async (prompt, modelId) => {
*
* @param {string} prompt - The input text prompt for the model to complete.
* @param {string} [modelId] - The ID of the model to use. Defaults to "anthropic.claude-3-haiku-20240307-v1:0".
* @returns {Promise<Object[]>} The final response from the model.
*/
export const invokeModelWithResponseStream = async (prompt, modelId) => {
// Create a new Bedrock Runtime client instance.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ import {
*
* @param {string} prompt - The input text prompt for the model to complete.
* @param {string} [modelId] - The ID of the model to use. Defaults to "anthropic.claude-instant-v1".
* @returns {Promise<string[]>} The inference response from the model.
*/
export const invokeMessagesApi = async (prompt, modelId) => {
// Create a new Bedrock Runtime client instance.
Expand Down Expand Up @@ -78,7 +77,6 @@ export const invokeMessagesApi = async (prompt, modelId) => {
*
* @param {string} prompt - The input text prompt for the model to complete.
* @param {string} [modelId] - The ID of the model to use. Defaults to "anthropic.claude-instant-v1".
* @returns {Promise<string>} The inference response from the model.
*/
export const invokeTextCompletionsApi = async (prompt, modelId) => {
// Create a new Bedrock Runtime client instance.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ import {
*
* @param {string} prompt - The input text prompt for the model to complete.
* @param {string} [modelId] - The ID of the model to use. Defaults to "meta.llama2-13b-chat-v1".
* @returns {Promise<string>} The inference response from the model.
*/
export const invokeModel = async (prompt, modelId) => {
// Create a new Bedrock Runtime client instance.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import {
*
* @param {string} prompt - The input text prompt for the model to complete.
* @param {string} [modelId] - The ID of the model to use. Defaults to "mistral.mistral-7b-instruct-v0:2".
* @returns {Promise<string[]>} The inference response from the model.
*/
export const invokeModel = async (prompt, modelId) => {
// Create a new Bedrock Runtime client instance.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import {
*
* @param {string} prompt - The input text prompt for the model to complete.
* @param {string} [modelId] - The ID of the model to use. Defaults to "mistral.mixtral-8x7b-instruct-v0:1".
* @returns {Promise<string[]>} The inference response from the model.
*/
export const invokeModel = async (prompt, modelId) => {
// Create a new Bedrock Runtime client instance.
Expand Down

0 comments on commit 2ec21c1

Please sign in to comment.