From f8065394c166d47305e038e780e036dc5196c706 Mon Sep 17 00:00:00 2001 From: Jeongho Nam Date: Sun, 8 Sep 2024 02:42:30 +0900 Subject: [PATCH 1/3] Description comments of LLM function calling schemas. --- src/HttpLlm.ts | 115 ++++++++++++++++++++++++++ src/structures/IHttpLlmApplication.ts | 77 ++++++++++++++--- src/structures/IHttpLlmFunction.ts | 34 ++++---- src/structures/ILlmFunction.ts | 12 +-- 4 files changed, 206 insertions(+), 32 deletions(-) diff --git a/src/HttpLlm.ts b/src/HttpLlm.ts index 2d76154..9ff129c 100644 --- a/src/HttpLlm.ts +++ b/src/HttpLlm.ts @@ -11,7 +11,57 @@ import { ILlmFunction } from "./structures/ILlmFunction"; import { ILlmSchema } from "./structures/ILlmSchema"; import { LlmDataMerger } from "./utils/LlmDataMerger"; +/** + * LLM function calling application composer from OpenAPI document. + * + * `HttpLlm` is a module for composing LLM (Large Language Model) function calling + * application by the {@link OpenApi.IDocument OpenAPI document}, and also for + * LLM function call execution and parameter merging. + * + * At first, you can construct the LLM function calling application by the + * {@link HttpLlm.application HttpLlm.application()} function. And then the LLM + * has selected a {@link IHttpLlmFunction function} to call and composes its + * arguments, you can execute the function by + * {@link HttpLlm.execute HttpLlm.execute()} or + * {@link HttpLlm.propagate HttpLlm.propagate()}. + * + * By the way, if you have configured the {@link IHttpLlmApplication.IOptions.separate} + * option to separate the parameters into human and LLM sides, you can merge these + * human and LLM sides' parameters into one through + * {@link HttpLlm.mergeParameters HttpLlm.mergeParameters()} before the actual LLM + * function call execution. + * + * @author Jeongho Nam - https://github.com/samchon + */ export namespace HttpLlm { + /* ----------------------------------------------------------- + COMPOSERS + ----------------------------------------------------------- */ + /** + * Convert OpenAPI document to LLM function calling application. + * + * Converts {@link OpenApi.IDocument OpenAPI document} or + * {@link IHttpMigrateApplication migrated application} to the + * {@link IHttpLlmApplication LLM function calling application}. Every + * {@link OpenApi.IOperation API operations} in the OpenAPI document are converted + * to the {@link IHttpLlmFunction LLM function} type, and they would be used for + * the LLM function calling. + * + * If you have configured the {@link IHttpLlmApplication.IOptions.separate} option, + * every parameters in the {@link IHttpLlmFunction} would be separated into both + * human and LLM sides. In that case, you can merge these human and LLM sides' + * parameters into one through {@link HttpLlm.mergeParameters} before the actual + * LLM function call execution. + * + * Additionally, if you have configured the {@link IHttpLlmApplication.IOptions.keyword} + * as `true`, the number of {@link IHttpLlmFunction.parameters} are always 1 and the + * first parameter type is always {@link ILlmSchema.IObject}. I recommend this option + * because LLM can understand the keyword arguments more easily. + * + * @param document Target OpenAPI document to convert (or migrate application) + * @param options Options for the LLM function calling application conversion + * @returns LLM function calling application + */ export const application = < Schema extends ILlmSchema, Operation extends OpenApi.IOperation, @@ -35,11 +85,30 @@ export namespace HttpLlm { ); }; + /** + * Convert JSON schema to LLM schema. + * + * Converts {@link OpenApi.IJsonSchema JSON schema} to {@link ILlmSchema LLM schema}. + * + * By the way, if the target JSON schema has some recursive references, the + * conversion would be failed and `null` value would be returned. It's because + * the LLM schema does not support the reference type embodied by the + * {@link OpenApi.IJsonSchema.IReference} type. + * + * @param props Schema to convert and components to refer + * @returns LLM schema or null value + */ export const schema = (props: { components: OpenApi.IComponents; schema: OpenApi.IJsonSchema; }): ILlmSchema | null => HttpLlmConverter.schema(props); + /* ----------------------------------------------------------- + FETCHERS + ----------------------------------------------------------- */ + /** + * Properties for the LLM function call. + */ export interface IFetchProps { /** * Document of the OpenAI function call schemas. @@ -61,18 +130,64 @@ export namespace HttpLlm { */ arguments: any[]; } + + /** + * Execute the LLM function call. + * + * @param props + * @returns + */ export const execute = (props: IFetchProps): Promise => HttpLlmFunctionFetcher.execute(props); + + /** + * Propagate the LLM function call. + * + * @param props + * @returns + */ export const propagate = (props: IFetchProps): Promise => HttpLlmFunctionFetcher.propagate(props); + /* ----------------------------------------------------------- + MERGERS + ----------------------------------------------------------- */ + /** + * Properties for the parameters' merging. + */ export interface IMergeProps { + /** + * Metadata of the target function. + */ function: ILlmFunction; + + /** + * Arguments composed by the LLM. + */ llm: unknown[]; + + /** + * Arguments composed by the human. + */ human: unknown[]; } + + /** + * Merge the parameters. + * + * @param props + * @returns + */ export const mergeParameters = (props: IMergeProps): unknown[] => LlmDataMerger.parameters(props); + + /** + * Merge two values. + * + * @param x Value X to merge + * @param y Value Y to merge + * @returns Merged value + */ export const mergeValue = (x: unknown, y: unknown): unknown => LlmDataMerger.value(x, y); } diff --git a/src/structures/IHttpLlmApplication.ts b/src/structures/IHttpLlmApplication.ts index c974f48..98ba33d 100644 --- a/src/structures/IHttpLlmApplication.ts +++ b/src/structures/IHttpLlmApplication.ts @@ -3,6 +3,69 @@ import { IHttpLlmFunction } from "./IHttpLlmFunction"; import { IHttpMigrateRoute } from "./IHttpMigrateRoute"; import { ILlmSchema } from "./ILlmSchema"; +/** + * Application of LLM function call from OpenAPI document. + * + * `IHttpLlmApplication` is a data structure representing collection of + * {@link IHttpLlmFunction LLM function calling schemas} composed from the + * {@link OpenApi.IDocument OpenAPI document} and its {@link OpenApi.IOperation operation} + * metadata. It also contains {@link IHttpLlmApplication.errors failed operations}, and + * adjusted {@link IHttpLlmApplication.options options} during the `IHttpLlmApplication` + * construction. + * + * About the {@link OpenApi.IOperation API operations}, they are converted to + * {@link IHttpLlmFunction} type which represents LLM function calling schema. + * By the way, if tehre're some recursive types which can't escape the + * {@link OpenApi.IJsonSchema.IReference} type, the operation would be failed and + * pushed into the {@link IHttpLlmApplication.errors}. Otherwise not, the operation + * would be successfully converted to {@link IHttpLlmFunction} and its type schemas + * are downgraded to {@link OpenApiV3.IJsonSchema} and converted to {@link ILlmSchema}. + * + * About the options, if you've configured {@link IHttpLlmApplication.options.keyword} + * (as `true`), number of {@link IHttpLlmFunction.parameters} are always 1 and the first + * parameter type is always {@link ILlmSchema.IObject}. Otherwise, the parameters would + * be multiple, and the sequence of the parameters are following below rules. + * + * - `pathParameters`: Path parameters of {@link IHttpMigrateRoute.parameters} + * - `query`: Query parameter of {@link IHttpMigrateRoute.query} + * - `body`: Body parameter of {@link IHttpMigrateRoute.body} + * + * ```typescript + * // KEYWORD TRUE + * { + * ...pathParameters, + * query, + * body, + * } + * + * // KEYWORD FALSE + * [ + * ...pathParameters, + * ...(query ? [query] : []), + * ...(body ? [body] : []), + * ] + * ``` + * + * By the way, there can be some parameters (or their nested properties) which must be + * composed by human, not by LLM. File uploading feature or some sensitive information + * like secrety key (password) are the examples. In that case, you can separate the + * function parameters to both LLM and human sides by configuring the + * {@link IHttpLlmApplication.IOptions.separate} property. The separated parameters are + * assigned to the {@link IHttpLlmFunction.separated} property. + * + * For reference, the actual function call execution is not by LLM, but by you. + * When the LLM selects the proper function and fills the arguments, you just call + * the function by {@link HttpLlm.execute} with the LLM prepared arguments. And then + * informs the return value to the LLM by system prompt. The LLM will continue the next + * conversation based on the return value. + * + * Additionally, if you've configured {@link IHttpLlmApplication.IOptions.separate}, + * so that the parameters are separated to human and LLM sides, you can merge these + * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters} + * before the actual LLM function call execution. + * + * @author Jeongho Nam - https://github.com/samchon + */ export interface IHttpLlmApplication< Schema extends ILlmSchema = ILlmSchema, Operation extends OpenApi.IOperation = OpenApi.IOperation, @@ -91,25 +154,19 @@ export namespace IHttpLlmApplication { * If this property value is `true`, length of the * {@link IHttpLlmApplication.IFunction.parameters} is always 1, and type of * the pararameter is always {@link ILlmSchema.IObject} type. - * Also, its properties are following below rules: * - * - `pathParameters`: Path parameters of {@link IHttpMigrateRoute.parameters} - * - `query`: Query parameter of {@link IHttpMigrateRoute.query} - * - `body`: Body parameter of {@link IHttpMigrateRoute.body} + * Otherwise, the parameters would be multiple, and the sequence of the parameters + * are following below rules. * * ```typescript + * // KEYWORD TRUE * { * ...pathParameters, * query, * body, * } - * ``` - * - * Otherwise (this property value is `false`), length of the - * {@link IHttpLlmFunction.parameters} is variable, and sequence of the - * parameters are following below rules. * - * ```typescript + * // KEYWORD FALSE * [ * ...pathParameters, * ...(query ? [query] : []), diff --git a/src/structures/IHttpLlmFunction.ts b/src/structures/IHttpLlmFunction.ts index dc9c527..a36e99d 100644 --- a/src/structures/IHttpLlmFunction.ts +++ b/src/structures/IHttpLlmFunction.ts @@ -3,27 +3,25 @@ import { IHttpMigrateRoute } from "./IHttpMigrateRoute"; import { ILlmSchema } from "./ILlmSchema"; /** - * LLM function metadata from HTTP (OpenAPI) operation. + * LLM function calling schema from HTTP (OpenAPI) operation. * - * `IHttpLlmFunction` is a data structure representing a procedure converted - * from the OpenAPI operation, used for the LLM (Large Language Model) - * function calling. It's a typical RPC (Remote Procedure Call) structure - * containing the procedure {@link name}, {@link parameters}, and + * `IHttpLlmFunction` is a data structure representing a function converted + * from the {@link OpenApi.IOperation OpenAPI operation}, used for the LLM + * (Large Language Model) function calling. It's a typical RPC (Remote Procedure Call) + * structure containing the function {@link name}, {@link parameters}, and * {@link output return type}. * - * If you provide this `IHttpLlmFunction` data to the LLM like "OpenAI", - * the "OpenAI" will compose a function arguments by analyzing - * conversations with the user. With the LLM composed arguments, you can - * execute the procedure through {@link LlmFetcher.execute} and get the - * result. + * If you provide this `IHttpLlmFunction` data to the LLM provider like "OpenAI", + * the "OpenAI" will compose a function arguments by analyzing conversations with + * the user. With the LLM composed arguments, you can execute the function through + * {@link LlmFetcher.execute} and get the result. * * For reference, different between `IHttpLlmFunction` and its origin source - * {@link OpenApi.IOperation} is, `IHttpLlmFunction` has converted every type - * schema informations from {@link OpenApi.IJsonSchema} to {@link ILlmSchema} - * to escape {@link OpenApi.IJsonSchema.IReference reference types}, and - * downgrade the version of the JSON schema to OpenAPI 3.0. It's because - * LLM function call feature cannot understand both reference types and - * OpenAPI 3.1 specification. + * {@link OpenApi.IOperation} is, `IHttpLlmFunction` has converted every type schema + * informations from {@link OpenApi.IJsonSchema} to {@link ILlmSchema} to escape + * {@link OpenApi.IJsonSchema.IReference reference types}, and downgrade the version + * of the JSON schema to OpenAPI 3.0. It's because LLM function call feature cannot + * understand both reference types and OpenAPI 3.1 specification. * * Additionally, if you've composed `IHttpLlmFunction` with * {@link IHttpLlmApplication.IOptions.keyword} configuration as `true`, number of @@ -163,7 +161,7 @@ export interface IHttpLlmFunction< output?: Schema | undefined; /** - * Description of the procedure. + * Description of the function. * * `IHttpLlmFunction.description` is composed by below rule: * @@ -228,6 +226,8 @@ export namespace IHttpLlmFunction { export interface ISeparatedParameter { /** * Index of the parameter. + * + * @type uint */ index: number; diff --git a/src/structures/ILlmFunction.ts b/src/structures/ILlmFunction.ts index 7b13072..e0cef71 100644 --- a/src/structures/ILlmFunction.ts +++ b/src/structures/ILlmFunction.ts @@ -8,12 +8,12 @@ import { ILlmSchema } from "./ILlmSchema"; * calling. Also, it's a function structure containing the function * {@link name}, {@link parameters} and {@link output return type}. * - * If you provide this `ILlmFunction` data to the LLM like "OpenAI", - * the "OpenAI" will compose a function arguments by analyzing - * conversations with the user. With the LLM composed arguments, you can - * execute the function and get the result. + * If you provide this `ILlmFunction` data to the LLM provider like "OpenAI", + * the "OpenAI" will compose a function arguments by analyzing conversations + * with the user. With the LLM composed arguments, you can execute the function + * and get the result. * - * By the way, do not sure that LLM will always provide the correct + * By the way, do not ensure that LLM will always provide the correct * arguments. The LLM of present age is not perfect, so that you would * better to validate the arguments before executing the function. * I recommend you to validate the arguments before execution by using @@ -81,6 +81,8 @@ export namespace ILlmFunction { export interface ISeparatedParameter { /** * Index of the parameter. + * + * @type uint */ index: number; From 8620a64454284126da95e65eb9f283b8a3077345 Mon Sep 17 00:00:00 2001 From: Jeongho Nam Date: Sun, 8 Sep 2024 02:56:39 +0900 Subject: [PATCH 2/3] Description about `HttpLlm` module. --- src/HttpLlm.ts | 64 +++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 58 insertions(+), 6 deletions(-) diff --git a/src/HttpLlm.ts b/src/HttpLlm.ts index 9ff129c..537b285 100644 --- a/src/HttpLlm.ts +++ b/src/HttpLlm.ts @@ -134,8 +134,27 @@ export namespace HttpLlm { /** * Execute the LLM function call. * - * @param props - * @returns + * `HttmLlm.execute()` is a function executing the target + * {@link OpenApi.IOperation API endpoint} with with the connection information + * and arguments composed by Large Language Model like OpenAI (+human sometimes). + * + * By the way, if you've configured the {@link IHttpLlmApplication.IOptions.separate}, + * so that the parameters are separated to human and LLM sides, you have to merge + * these humand and LLM sides' parameters into one through + * {@link HttpLlm.mergeParameters} function. + * + * About the {@link IHttpLlmApplication.IOptions.keyword} option, don't worry anything. + * This `HttmLlm.execute()` function will automatically recognize the keyword arguments + * and convert them to the proper sequence. + * + * For reference, if the target API endpoinnt responds none 200/201 status, this + * would be considered as an error and the {@link HttpError} would be thrown. + * Otherwise you don't want such rule, you can use the {@link HttpLlm.propagate} + * function instead. + * + * @param props Properties for the LLM function call + * @returns Return value from the API endpoint + * @throws HttpError when the API endpoint responds none 200/201 status */ export const execute = (props: IFetchProps): Promise => HttpLlmFunctionFetcher.execute(props); @@ -143,8 +162,26 @@ export namespace HttpLlm { /** * Propagate the LLM function call. * - * @param props - * @returns + * `HttmLlm.propagate()` is a function propagating the target + * {@link OpenApi.IOperation API endpoint} with with the connection information + * and arguments composed by Large Language Model like OpenAI (+human sometimes). + * + * By the way, if you've configured the {@link IHttpLlmApplication.IOptions.separate}, + * so that the parameters are separated to human and LLM sides, you have to merge + * these humand and LLM sides' parameters into one through + * {@link HttpLlm.mergeParameters} function. + * + * About the {@link IHttpLlmApplication.IOptions.keyword} option, don't worry anything. + * This `HttmLlm.propagate()` function will automatically recognize the keyword arguments + * and convert them to the proper sequence. + * + * For reference, the propagation means always returning the response from the API + * endpoint, even if the status is not 200/201. This is useful when you want to + * handle the response by yourself. + * + * @param props Properties for the LLM function call + * @returns Response from the API endpoint + * @throws Error only when the connection is failed */ export const propagate = (props: IFetchProps): Promise => HttpLlmFunctionFetcher.propagate(props); @@ -175,8 +212,17 @@ export namespace HttpLlm { /** * Merge the parameters. * - * @param props - * @returns + * If you've configured the {@link IHttpLlmApplication.IOptions.separate} option, + * so that the parameters are separated to human and LLM sides, you can merge these + * humand and LLM sides' parameters into one through this `HttpLlm.mergeParameters()` + * function before the actual LLM function call execution. + * + * On contrary, if you've not configured the + * {@link IHttpLlmApplication.IOptions.separate} option, this function would throw + * an error. + * + * @param props Properties for the parameters' merging + * @returns Merged parameter values */ export const mergeParameters = (props: IMergeProps): unknown[] => LlmDataMerger.parameters(props); @@ -184,6 +230,12 @@ export namespace HttpLlm { /** * Merge two values. * + * If both values are objects, then combines them in the properties level. + * + * Otherwise, returns the latter value if it's not null, otherwise the former value. + * + * - `return (y ?? x)` + * * @param x Value X to merge * @param y Value Y to merge * @returns Merged value From 8217f7fb0da67365975d4015acc305634382a6fb Mon Sep 17 00:00:00 2001 From: Jeongho Nam Date: Mon, 9 Sep 2024 00:57:38 +0900 Subject: [PATCH 3/3] Completetion of description comments writing. --- package.json | 2 +- src/HttpLlm.ts | 6 +- src/HttpMigration.ts | 131 +++++++++++++++++- src/http/HttpLlmFunctionFetcher.ts | 39 ++---- src/http/HttpMigrateRouteFetcher.ts | 25 ++-- src/structures/IHttpMigrateApplication.ts | 5 +- src/structures/IHttpMigrateRoute.ts | 4 +- src/structures/IHttpResponse.ts | 20 +++ ...t_http_migrate_fetch_keyword_parameters.ts | 2 +- .../test_http_migrate_fetch_multipart.ts | 2 +- ...ttp_migrate_fetch_positional_parameters.ts | 2 +- .../migrate/test_http_migrate_fetch_query.ts | 2 +- .../test_http_migrate_fetch_query_and_body.ts | 2 +- 13 files changed, 181 insertions(+), 61 deletions(-) diff --git a/package.json b/package.json index d3a5906..ccde257 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@samchon/openapi", - "version": "0.5.0-dev.20240906-2", + "version": "1.0.0-dev.20240908", "description": "OpenAPI definitions and converters for 'typia' and 'nestia'.", "main": "./lib/index.js", "module": "./lib/index.mjs", diff --git a/src/HttpLlm.ts b/src/HttpLlm.ts index 537b285..de94dec 100644 --- a/src/HttpLlm.ts +++ b/src/HttpLlm.ts @@ -15,7 +15,7 @@ import { LlmDataMerger } from "./utils/LlmDataMerger"; * LLM function calling application composer from OpenAPI document. * * `HttpLlm` is a module for composing LLM (Large Language Model) function calling - * application by the {@link OpenApi.IDocument OpenAPI document}, and also for + * application from the {@link OpenApi.IDocument OpenAPI document}, and also for * LLM function call execution and parameter merging. * * At first, you can construct the LLM function calling application by the @@ -121,7 +121,7 @@ export namespace HttpLlm { function: IHttpLlmFunction; /** - * Connection info to the server. + * Connection info to the HTTP server. */ connection: IHttpConnection; @@ -153,7 +153,7 @@ export namespace HttpLlm { * function instead. * * @param props Properties for the LLM function call - * @returns Return value from the API endpoint + * @returns Return value (response body) from the API endpoint * @throws HttpError when the API endpoint responds none 200/201 status */ export const execute = (props: IFetchProps): Promise => diff --git a/src/HttpMigration.ts b/src/HttpMigration.ts index b90f538..653808c 100644 --- a/src/HttpMigration.ts +++ b/src/HttpMigration.ts @@ -6,7 +6,72 @@ import { IHttpMigrateApplication } from "./structures/IHttpMigrateApplication"; import { IHttpMigrateRoute } from "./structures/IHttpMigrateRoute"; import { IHttpResponse } from "./structures/IHttpResponse"; +/** + * HTTP migration application composer from OpenAPI document. + * + * `HttpMigration` is a module for composing HTTP migration application from the + * {@link OpenApi.IDocument OpenAPI document}. It is designed for helping the OpenAPI + * generator libraries, which converts {@link OpenApi.IOperation OpenAPI operations} to + * an RPC (Remote Procedure Call) function. + * + * The key feature of the `HttpModule` is the {@link HttpMigration.application} function. + * It converts the {@link OpenApi.IOperation OpenAPI operations} to the + * {@link IHttpMigrateRoute HTTP migration route}, and it normalizes the OpenAPI operations + * to the RPC function calling suitable route structure. + * + * The other functions, {@link HttpMigration.execute} and {@link HttpMigration.propagate}, + * are for executing the HTTP request to the HTTP server. The {@link HttpMigration.execute} + * function returns the response body from the API endpoint when the status code is `200` + * or `201`. Otherwise, it throws an {@link HttpError} when the status code is not `200` + * or `201`. The {@link HttpMigration.propagate} function returns the response information + * from the API endpoint, including the status code, headers, and response body. + * + * The {@link HttpLlm} module is a good example utilizing this `HttpMigration` module + * for composing RPC function calling application. The {@link HttpLlm} module composes + * LLM (Large Language Model) function calling application from the OpenAPI document + * bypassing through the {@link IHttpLlmApplication} type. + * + * @author Jeongho Nam - https://github.com/samchon + */ export namespace HttpMigration { + /* ----------------------------------------------------------- + COMPOSER + ----------------------------------------------------------- */ + /** + * Convert HTTP migration application from OpenAPI document. + * + * `HttpMigration.application()` is a function converting the + * {@link OpenApi.IDocument OpenAPI document} and its {@link OpenApi.IOperation operations} + * to the {@link IHttpMigrateApplication HTTP migration application}. + * + * The HTTP migration application is designed for helping the OpenAPI generator + * libraries, which converts OpenAPI operations to an RPC (Remote Procedure Call) + * function. To support the OpenAPI generator libraries, {@link IHttpMigrateRoute} + * takes below normalization rules: + * + * - Path parameters are separated to atomic level. + * - Query parameters are binded into one object. + * - Header parameters are binded into one object. + * - Allow only below HTTP methods + * - `head` + * - `get` + * - `post` + * - `put` + * - `patch` + * - `delete` + * - Allow only below content media types + * - `application/json` + * - `application/x-www-form-urlencoded` + * - `multipart/form-data` + * - `text/plain` + * + * If there're some {@link OpenApi.IOperation API operations} which canont adjust + * the above rules or there're some logically insensible, these operation would be + * failed to migrate and registered into the {@link IHttpMigrateApplication.errors}. + * + * @param document OpenAPI document to migrate. + * @returns Migrated application. + */ export const application = < Schema extends OpenApi.IJsonSchema = OpenApi.IJsonSchema, Operation extends OpenApi.IOperation = OpenApi.IOperation, @@ -15,17 +80,79 @@ export namespace HttpMigration { ): IHttpMigrateApplication => MigrateConverter.convert(document); + /** + * Properties for the request to the HTTP server. + */ export interface IFetchProps { + /** + * Connection info to the HTTP server. + */ connection: IHttpConnection; + + /** + * Route information for the migration. + */ route: IHttpMigrateRoute; + + /** + * Path parameters. + * + * Path parameters with sequenced array or key-value paired object. + */ parameters: | Array | Record; + + /** + * Query parameters as a key-value paired object. + */ query?: object | undefined; + + /** + * Request body data. + */ body?: object | undefined; } - export const request = (props: IFetchProps): Promise => - HttpMigrateRouteFetcher.request(props); + + /* ----------------------------------------------------------- + FETCHERS + ----------------------------------------------------------- */ + /** + * Execute the HTTP request. + * + * `HttpMigration.execute()` is a function executing the HTTP request to the HTTP server. + * + * It returns the response body from the API endpoint when the status code is `200` + * or `201`. Otherwise, it throws an {@link HttpError} when the status code is not + * `200` or `201`. + * + * If you want to get more information than the response body, or get the detailed + * response information even when the status code is `200` or `201`, use the + * {@link HttpMigration.propagate} function instead. + * + * @param props Properties for the request. + * @returns Return value (response body) from the API endpoint. + * @throws HttpError when the API endpoint responds none 200/201 status. + */ + export const execute = (props: IFetchProps): Promise => + HttpMigrateRouteFetcher.execute(props); + + /** + * Propagate the HTTP request. + * + * `HttpMigration.propagate()` is a function propagating the request to the HTTP server. + * + * It returns the response information from the API endpoint, including the status code, + * headers, and response body. + * + * Even if the status code is not `200` or `201`, this function + * would return the response information. By the way, if the connection to the HTTP server + * is failed, this function would throw an {@link Error}. + * + * @param props Properties for the request. + * @returns Response from the API endpoint. + * @throws Error when the connection is failed. + */ export const propagate = (props: IFetchProps): Promise => HttpMigrateRouteFetcher.propagate(props); } diff --git a/src/http/HttpLlmFunctionFetcher.ts b/src/http/HttpLlmFunctionFetcher.ts index 7f08c35..100b37e 100644 --- a/src/http/HttpLlmFunctionFetcher.ts +++ b/src/http/HttpLlmFunctionFetcher.ts @@ -1,43 +1,22 @@ -import { IHttpConnection } from "../structures/IHttpConnection"; -import { IHttpLlmApplication } from "../structures/IHttpLlmApplication"; -import { IHttpLlmFunction } from "../structures/IHttpLlmFunction"; +import type { HttpLlm } from "../HttpLlm"; +import type { HttpMigration } from "../HttpMigration"; import { IHttpMigrateRoute } from "../structures/IHttpMigrateRoute"; import { IHttpResponse } from "../structures/IHttpResponse"; import { HttpMigrateRouteFetcher } from "./HttpMigrateRouteFetcher"; export namespace HttpLlmFunctionFetcher { - export interface IProps { - /** - * Application of the OpenAI function call schemas. - */ - application: IHttpLlmApplication; + export const execute = async (props: HttpLlm.IFetchProps): Promise => + HttpMigrateRouteFetcher.execute(getFetchArguments("execute", props)); - /** - * Function schema to call. - */ - function: IHttpLlmFunction; - - /** - * Connection info to the server. - */ - connection: IHttpConnection; - - /** - * Arguments for the function call. - */ - arguments: any[]; - } - - export const execute = async (props: IProps): Promise => - HttpMigrateRouteFetcher.request(getFetchArguments("execute", props)); - - export const propagate = async (props: IProps): Promise => + export const propagate = async ( + props: HttpLlm.IFetchProps, + ): Promise => HttpMigrateRouteFetcher.propagate(getFetchArguments("propagate", props)); const getFetchArguments = ( from: string, - props: IProps, - ): HttpMigrateRouteFetcher.IProps => { + props: HttpLlm.IFetchProps, + ): HttpMigration.IFetchProps => { const route: IHttpMigrateRoute = props.function.route(); if (props.application.options.keyword === true) { const input: Record = props.arguments[0]; diff --git a/src/http/HttpMigrateRouteFetcher.ts b/src/http/HttpMigrateRouteFetcher.ts index c2551cc..71bd69f 100644 --- a/src/http/HttpMigrateRouteFetcher.ts +++ b/src/http/HttpMigrateRouteFetcher.ts @@ -1,20 +1,12 @@ +import type { HttpMigration } from "../HttpMigration"; import { IHttpConnection } from "../structures/IHttpConnection"; -import { IHttpMigrateRoute } from "../structures/IHttpMigrateRoute"; import { IHttpResponse } from "../structures/IHttpResponse"; import { HttpError } from "./HttpError"; export namespace HttpMigrateRouteFetcher { - export interface IProps { - connection: IHttpConnection; - route: IHttpMigrateRoute; - parameters: - | Array - | Record; - query?: object | undefined; - body?: object | undefined; - } - - export const request = async (props: IProps): Promise => { + export const execute = async ( + props: HttpMigration.IFetchProps, + ): Promise => { const result: IHttpResponse = await _Propagate("request", props); props.route.success?.media; if (result.status !== 200 && result.status !== 201) @@ -28,13 +20,14 @@ export namespace HttpMigrateRouteFetcher { return result.body; }; - export const propagate = (props: IProps): Promise => - _Propagate("propagate", props); + export const propagate = ( + props: HttpMigration.IFetchProps, + ): Promise => _Propagate("propagate", props); } const _Propagate = async ( from: string, - props: HttpMigrateRouteFetcher.IProps, + props: HttpMigration.IFetchProps, ): Promise => { // VALIDATE PARAMETERS const error = (message: string) => @@ -142,7 +135,7 @@ const _Propagate = async ( }; const getPath = ( - props: Pick, + props: Pick, ): string => { let path: string = props.route.emendedPath; props.route.parameters.forEach((p, i) => { diff --git a/src/structures/IHttpMigrateApplication.ts b/src/structures/IHttpMigrateApplication.ts index e88fa11..737d1ed 100644 --- a/src/structures/IHttpMigrateApplication.ts +++ b/src/structures/IHttpMigrateApplication.ts @@ -5,8 +5,9 @@ import { IHttpMigrateRoute } from "./IHttpMigrateRoute"; * Document of migration. * * The `IHttpMigrateApplication` interface is an application migrated from - * {@link OpenAPI.IDocument OpenAPI document} to RPC (Remote Procedure Call) - * functions; {@link IHttpMigrateRoute}. + * {@link OpenAPI.IDocument OpenAPI document} for supporting the OpenAPI generator + * libraries which compose RPC (Remote Procedure Call) functions from the + * {@link OpenAPI.IOperation OpenAPI operations}. * * As the `IHttpMigrateApplication` and {@link IHttpMigrateRoute} have a lot of special * stories, when you're developing OpenAPI generator library, please read diff --git a/src/structures/IHttpMigrateRoute.ts b/src/structures/IHttpMigrateRoute.ts index c262fb0..5341273 100644 --- a/src/structures/IHttpMigrateRoute.ts +++ b/src/structures/IHttpMigrateRoute.ts @@ -4,8 +4,8 @@ import { OpenApi } from "../OpenApi"; * Route information for migration. * * The `IHttpMigrateRoute` is a structure representing a route information for - * OpenAPI generated RPC (Remote Procedure Call) function composed from the - * {@link OpenApi.IOperation OpenAPI operation}. + * OpenAPI generator libraries, which composes an RPC (Remote Procedure Call) function + * from the {@link OpenApi.IOperation OpenAPI operation}. * * As the `IHttpMigrateRoute` has a lot of speical stories, when you're developing * OpenAPI generator library, please read its description carefully including diff --git a/src/structures/IHttpResponse.ts b/src/structures/IHttpResponse.ts index cdd12e9..47684eb 100644 --- a/src/structures/IHttpResponse.ts +++ b/src/structures/IHttpResponse.ts @@ -1,5 +1,25 @@ +/** + * Represents an HTTP response. + * + * The `IHttpResponse` interface represents an HTTP response. + * + * It contains the {@link status} code, {@link headers}, and {@link body} of the response. + * + * @author Jeongho Nam - https://github.com/samchon + */ export interface IHttpResponse { + /** + * Status code of the response. + */ status: number; + + /** + * Headers of the response. + */ headers: Record; + + /** + * Body of the response. + */ body: unknown; } diff --git a/test/features/migrate/test_http_migrate_fetch_keyword_parameters.ts b/test/features/migrate/test_http_migrate_fetch_keyword_parameters.ts index 8c94ce9..6551139 100644 --- a/test/features/migrate/test_http_migrate_fetch_keyword_parameters.ts +++ b/test/features/migrate/test_http_migrate_fetch_keyword_parameters.ts @@ -19,7 +19,7 @@ export const test_http_migrate_fetch_keyword_parameters = async ( ); if (route === undefined) throw new Error("Route not found"); - await HttpMigration.request({ + await HttpMigration.execute({ connection, route, parameters: { diff --git a/test/features/migrate/test_http_migrate_fetch_multipart.ts b/test/features/migrate/test_http_migrate_fetch_multipart.ts index cfbae26..63f268e 100644 --- a/test/features/migrate/test_http_migrate_fetch_multipart.ts +++ b/test/features/migrate/test_http_migrate_fetch_multipart.ts @@ -19,7 +19,7 @@ export const test_http_migrate_fetch_multipart = async ( ); if (route === undefined) throw new Error("Route not found"); - await HttpMigration.request({ + await HttpMigration.execute({ connection, route, parameters: { diff --git a/test/features/migrate/test_http_migrate_fetch_positional_parameters.ts b/test/features/migrate/test_http_migrate_fetch_positional_parameters.ts index 5dcf8a6..f20d839 100644 --- a/test/features/migrate/test_http_migrate_fetch_positional_parameters.ts +++ b/test/features/migrate/test_http_migrate_fetch_positional_parameters.ts @@ -19,7 +19,7 @@ export const test_http_migrate_fetch_positional_parameters = async ( ); if (route === undefined) throw new Error("Route not found"); - await HttpMigration.request({ + await HttpMigration.execute({ connection, route, parameters: ["https://some.url/index.html", 2, true], diff --git a/test/features/migrate/test_http_migrate_fetch_query.ts b/test/features/migrate/test_http_migrate_fetch_query.ts index 46ad613..5df5243 100644 --- a/test/features/migrate/test_http_migrate_fetch_query.ts +++ b/test/features/migrate/test_http_migrate_fetch_query.ts @@ -18,7 +18,7 @@ export const test_http_migrate_fetch_query = async ( ); if (route === undefined) throw new Error("Route not found"); - await HttpMigration.request({ + await HttpMigration.execute({ connection, route, parameters: { diff --git a/test/features/migrate/test_http_migrate_fetch_query_and_body.ts b/test/features/migrate/test_http_migrate_fetch_query_and_body.ts index 453a623..8c1c1dd 100644 --- a/test/features/migrate/test_http_migrate_fetch_query_and_body.ts +++ b/test/features/migrate/test_http_migrate_fetch_query_and_body.ts @@ -19,7 +19,7 @@ export const test_http_migrate_fetch_query_and_body = async ( ); if (route === undefined) throw new Error("Route not found"); - await HttpMigration.request({ + await HttpMigration.execute({ connection, route, parameters: {