Skip to content

Commit

Permalink
add a billion tests
Browse files Browse the repository at this point in the history
  • Loading branch information
tomfrenken committed Jan 10, 2025
1 parent 6746577 commit a5d65ef
Show file tree
Hide file tree
Showing 9 changed files with 275 additions and 33 deletions.
95 changes: 74 additions & 21 deletions packages/orchestration/src/orchestration-client.test.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import nock from 'nock';
import { jest } from '@jest/globals';
import { createLogger } from '@sap-cloud-sdk/util';
import {
mockClientCredentialsGrantCall,
mockDeploymentsList,
Expand Down Expand Up @@ -29,6 +31,21 @@ describe('orchestration service client', () => {
nock.cleanAll();
});

const jsonConfig = `{
"module_configurations": {
"llm_module_config": {
"model_name": "gpt-35-turbo-16k",
"model_params": {
"max_tokens": 50,
"temperature": 0.1
}
},
"templating_module_config": {
"template": [{ "role": "user", "content": "What is the capital of France?" }]
}
}
}`;

it('calls chatCompletion with minimum configuration', async () => {
const config: OrchestrationModuleConfig = {
llm: {
Expand Down Expand Up @@ -66,22 +83,15 @@ describe('orchestration service client', () => {
expect(response.getTokenUsage().completion_tokens).toEqual(9);
});

it('calls chatCompletion with valid JSON configuration', async () => {
const jsonConfig = `{
"module_configurations": {
"llm_module_config": {
"model_name": "gpt-35-turbo-16k",
"model_params": {
"max_tokens": 50,
"temperature": 0.1
}
},
"templating_module_config": {
"template": [{ "role": "user", "content": "What is the capital of France?" }]
}
}
}`;
it('should throw an error when invalid JSON is provided', () => {
const invalidJsonConfig = '{ "module_configurations": {}, ';

expect(() => new OrchestrationClient(invalidJsonConfig)).toThrow(
'Could not parse JSON'
);
});

it('calls chatCompletion with valid JSON configuration', async () => {
const mockResponse = await parseMockResponse<CompletionPostResponse>(
'orchestration',
'orchestration-chat-completion-success-response.json'
Expand Down Expand Up @@ -428,13 +438,56 @@ describe('orchestration service client', () => {
}
});

it('should throw an error when invalid JSON is provided', () => {
const invalidJsonConfig = '{ "module_configurations": {}, ';
it('executes a streaming request with JSON config and logs warning for stream options', async () => {
const mockResponse = await parseFileToString(
'orchestration',
'orchestration-chat-completion-stream-chunks.txt'
);

expect(() => new OrchestrationClient(invalidJsonConfig)).toThrow(
'Could not parse JSON'
mockInference(
{
data: constructCompletionPostRequestFromJsonModuleConfig(
JSON.parse(jsonConfig),
undefined,
true
)
},
{
data: mockResponse,
status: 200
},
{
url: 'inference/deployments/1234/completion'
}
);
});

// add test for executing streaming with options with a JSON client, check for warning log<f
const logger = createLogger({
package: 'orchestration',
messageContext: 'orchestration-client'
});

const warnSpy = jest.spyOn(logger, 'warn');

const response = await new OrchestrationClient(jsonConfig).stream(
undefined,
undefined,
{
outputFiltering: { overlap: 100 }
}
);

expect(warnSpy).toHaveBeenCalledWith(
'Stream options are not supported when using a JSON module config.'
);

const initialResponse = await parseFileToString(
'orchestration',
'orchestration-chat-completion-stream-chunk-response-initial.json'
);

for await (const chunk of response.stream) {
expect(chunk.data).toEqual(JSON.parse(initialResponse));
break;
}
});
});
5 changes: 4 additions & 1 deletion packages/orchestration/src/orchestration-client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,10 @@ import type {
import type { OrchestrationStreamChunkResponse } from './orchestration-stream-chunk-response.js';
import type { HttpDestinationOrFetchOptions } from '@sap-cloud-sdk/connectivity';

const logger = createLogger({ messageContext: 'orchestration-client' });
const logger = createLogger({
package: 'orchestration',
messageContext: 'orchestration-client'
});

/**
* Get the orchestration client.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
import { constructCompletionPostRequest } from './orchestration-utils.js';
import { buildAzureContentFilter } from './orchestration-filter-utility.js';
import type { CompletionPostRequest } from './client/api/schema/index.js';
import type { OrchestrationModuleConfig } from './orchestration-types.js';
import type {
OrchestrationModuleConfig,
StreamOptions
} from './orchestration-types.js';

describe('construct completion post request', () => {
const defaultConfig: OrchestrationModuleConfig = {
Expand Down Expand Up @@ -199,4 +202,50 @@ describe('construct completion post request', () => {
constructCompletionPostRequest(config);
expect(completionPostRequest).toEqual(expectedCompletionPostRequest);
});

it('should construct completion post request with stream options', async () => {
const config: OrchestrationModuleConfig = {
...defaultConfig,
filtering: {
output: buildAzureContentFilter({ Hate: 4, SelfHarm: 0 })
}
};

const streamOptions: StreamOptions = {
global: { chunk_size: 100 },
outputFiltering: { overlap: 100 }
};

const expectedCompletionPostRequest: CompletionPostRequest = {
orchestration_config: {
stream: true,
stream_options: streamOptions.global,
module_configurations: {
templating_module_config: config.templating,
llm_module_config: {
...config.llm,
model_params: {
...config.llm.model_params,
stream_options: { include_usage: true }
}
},
filtering_module_config: {
output: {
...config.filtering!.output!,
stream_options: streamOptions.outputFiltering
}
}
}
},
input_params: { phrase: 'I hate you.' }
};
const completionPostRequest: CompletionPostRequest =
constructCompletionPostRequest(
config,
{ inputParams: { phrase: 'I hate you.' } },
true,
streamOptions
);
expect(completionPostRequest).toEqual(expectedCompletionPostRequest);
});
});
2 changes: 1 addition & 1 deletion packages/orchestration/src/orchestration-types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ export interface StreamOptions {
/**
* LLM specific stream options.
*/
llm?: { include_usage?: boolean; [key: string]: any };
llm?: { include_usage?: boolean; [key: string]: any } | null;
/**
* Output filtering stream options.
*/
Expand Down
4 changes: 0 additions & 4 deletions packages/orchestration/src/orchestration-utils.temp.ts

This file was deleted.

135 changes: 135 additions & 0 deletions packages/orchestration/src/orchestration-utils.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,135 @@
import { createLogger } from '@sap-cloud-sdk/util';
import { jest } from '@jest/globals';
import {
addStreamOptions,
addStreamOptionsToLlmModuleConfig,
addStreamOptionsToOutputFilteringConfig
} from './orchestration-utils.js';
import { buildAzureContentFilter } from './orchestration-filter-utility.js';
import type {
OrchestrationModuleConfig,
StreamOptions
} from './orchestration-types.js';
import type {
ModuleConfigs,
OrchestrationConfig
} from './client/api/schema/index.js';

describe('construct completion post request', () => {
const defaultConfig: OrchestrationModuleConfig = {
llm: {
model_name: 'gpt-35-turbo-16k',
model_params: { max_tokens: 50, temperature: 0.1 }
},
templating: {
template: [{ role: 'user', content: 'Create paraphrases of {{?phrase}}' }]
}
};

const defaultModuleConfigs: ModuleConfigs = {
llm_module_config: defaultConfig.llm,
templating_module_config: defaultConfig.templating
};

const defaultStreamOptions: StreamOptions = {
global: { chunk_size: 100 },
llm: { include_usage: false },
outputFiltering: { overlap: 100 }
};

it('should add include_usage to llm module config', () => {
const llmConfig = addStreamOptionsToLlmModuleConfig(defaultConfig.llm);
expect(llmConfig.model_params.stream_options).toEqual({
include_usage: true
});
});

it('should set include_usage to false in llm module config', () => {
const llmConfig = addStreamOptionsToLlmModuleConfig(
defaultConfig.llm,
defaultStreamOptions
);
expect(llmConfig.model_params.stream_options).toEqual({
include_usage: false
});
});

it('should not add any stream options to llm module config', () => {
const llmConfig = addStreamOptionsToLlmModuleConfig(defaultConfig.llm, {
llm: null
});
expect(
Object.keys(llmConfig.model_params).every(key => key !== 'stream_options')
).toBe(true);
});

it('should add stream options to output filtering config', () => {
const config: OrchestrationModuleConfig = {
...defaultConfig,
filtering: {
output: buildAzureContentFilter({ Hate: 4, SelfHarm: 0 })
}
};
const filteringConfig = addStreamOptionsToOutputFilteringConfig(
config.filtering!.output!,
defaultStreamOptions.outputFiltering!
);
expect(filteringConfig.filters).toEqual(config.filtering?.output?.filters);
expect(filteringConfig.stream_options).toEqual({
overlap: 100
});
});

it('should add stream options to orchestration config', () => {
const config: ModuleConfigs = {
...defaultModuleConfigs,
filtering_module_config: {
output: buildAzureContentFilter({ Hate: 4, SelfHarm: 0 })
}
};

// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { llm, ...streamOptions } = defaultStreamOptions;

const expectedOrchestrationConfig: OrchestrationConfig = {
stream: true,
stream_options: streamOptions.global,
module_configurations: {
...config,
llm_module_config: {
...config.llm_module_config,
model_params: {
...config.llm_module_config.model_params,
stream_options: { include_usage: true }
}
},
filtering_module_config: {
output: {
...config.filtering_module_config!.output!,
stream_options: streamOptions.outputFiltering
}
}
}
};
const orchestrationConfig = addStreamOptions(config, streamOptions);
expect(orchestrationConfig).toEqual(expectedOrchestrationConfig);
});

it('should warn if no filter config was set, but streaming options were set', () => {
const logger = createLogger({
package: 'orchestration',
messageContext: 'orchestration-utils'
});

const warnSpy = jest.spyOn(logger, 'warn');

const config = addStreamOptions(defaultModuleConfigs, defaultStreamOptions);

expect(warnSpy).toHaveBeenCalledWith(
'Output filter stream options are not applied because filtering module is not configured.'
);
expect(
config.module_configurations.filtering_module_config
).toBeUndefined();
});
});
11 changes: 7 additions & 4 deletions packages/orchestration/src/orchestration-utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,10 @@ import type {
OrchestrationModuleConfig
} from './orchestration-types.js';

const logger = createLogger({ messageContext: 'orchestration-utils' });
const logger = createLogger({
package: 'orchestration',
messageContext: 'orchestration-utils'
});

/**
* @internal
Expand Down Expand Up @@ -44,7 +47,7 @@ export function addStreamOptionsToLlmModuleConfig(
llmModuleConfig: LlmModuleConfig,
streamOptions?: StreamOptions
): LlmModuleConfig {
if (streamOptions?.llm === undefined) {
if (streamOptions?.llm === null) {
return llmModuleConfig;
}
return {
Expand All @@ -55,7 +58,7 @@ export function addStreamOptionsToLlmModuleConfig(
stream_options: {
include_usage: true,
...(llmModuleConfig.model_params.stream_options || {}),
...streamOptions.llm
...(streamOptions?.llm || {})
}
})
}
Expand Down Expand Up @@ -97,7 +100,7 @@ export function addStreamOptions(

return {
stream: true,
stream_options: globalOptions,
...(globalOptions && { stream_options: globalOptions }),
module_configurations: {
...moduleConfigs,
llm_module_config: addStreamOptionsToLlmModuleConfig(
Expand Down
2 changes: 1 addition & 1 deletion packages/orchestration/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
"tsBuildInfoFile": "./dist/.tsbuildinfo",
"composite": true
},
"include": ["src/**/*.ts", "src/orchestration-utils.temp.ts"],
"include": ["src/**/*.ts", "src/orchestration-utils.test.ts"],
"exclude": ["dist/**/*", "test/**/*", "**/*.test.ts", "node_modules/**/*"],
"references": [{ "path": "../core" }, { "path": "../ai-api" }]
}
Loading

0 comments on commit a5d65ef

Please sign in to comment.