Skip to content

Commit 6d4b700

Browse files
MatKuhrcloud-sdk-jsmarikanertomfrenken
authored
feat: Model Discovery (#75)
* Integrate changes from main * Fix linting * Fix tests * fix: Changes from lint * Fix type test * Add alternative API variant * Make models readonly * Remove obsolete class * Fix tests * fix: Changes from lint * add alternative proposal * Minor improvements * Refactoring from API discussion * Linting * More style * fix: Changes from lint * Fix type test * fix: Changes from lint * JS docs * fix leftover * Update packages/gen-ai-hub/src/orchestration/orchestration-client.ts * Update packages/gen-ai-hub/src/utils/deployment-resolver.ts * fix typescript issue * update unit tests --------- Co-authored-by: cloud-sdk-js <[email protected]> Co-authored-by: Marika Marszalkowski <[email protected]> Co-authored-by: Marika Marszalkowski <[email protected]> Co-authored-by: Tom Frenken <[email protected]> Co-authored-by: Tom Frenken <[email protected]>
1 parent 4df6fcb commit 6d4b700

26 files changed

+460
-332
lines changed

eslint.config.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ export default [
88
rules: { 'import/namespace': 'off'}
99
},
1010
{
11-
ignores: ['**/dist/**/*', '**/coverage/**/*', 'packages/ai-core/src/client/**/*'],
11+
ignores: ['**/dist*/**/*', '**/coverage/**/*', 'packages/ai-core/src/client/**/*'],
1212
},
1313
{
1414
files: ['**/test-util/**/*.ts', '**/packages/gen-ai-hub/src/orchestration/client/**/*'],

packages/core/src/context.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { createLogger } from '@sap-cloud-sdk/util';
22
import {
3-
Destination,
3+
HttpDestination,
44
Service,
55
ServiceCredentials,
66
getServiceBinding,
@@ -18,7 +18,7 @@ let aiCoreServiceBinding: Service | undefined;
1818
* Returns a destination object from AI Core service binding.
1919
* @returns The destination object.
2020
*/
21-
export async function getAiCoreDestination(): Promise<Destination> {
21+
export async function getAiCoreDestination(): Promise<HttpDestination> {
2222
if (!aiCoreServiceBinding) {
2323
aiCoreServiceBinding =
2424
getAiCoreServiceKeyFromEnv() || getServiceBinding('aicore');
@@ -29,12 +29,12 @@ export async function getAiCoreDestination(): Promise<Destination> {
2929
}
3030
}
3131

32-
const aiCoreDestination = await transformServiceBindingToDestination(
32+
const aiCoreDestination = (await transformServiceBindingToDestination(
3333
aiCoreServiceBinding,
3434
{
3535
useCache: true
3636
}
37-
);
37+
)) as HttpDestination;
3838
return aiCoreDestination;
3939
}
4040

packages/core/src/http-client.ts

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -54,19 +54,17 @@ export interface EndpointOptions {
5454
* @param requestConfig - The request configuration.
5555
* @returns The {@link HttpResponse} from the AI Core service.
5656
*/
57-
export async function executeRequest<Data extends BaseLlmParameters>(
57+
export async function executeRequest(
5858
endpointOptions: EndpointOptions,
59-
data: Data,
59+
data: any,
6060
requestConfig?: CustomRequestConfig
6161
): Promise<HttpResponse> {
6262
const aiCoreDestination = await getAiCoreDestination();
63-
// eslint-disable-next-line @typescript-eslint/no-unused-vars
64-
const { deploymentConfiguration, ...body } = data;
6563
const { url, apiVersion } = endpointOptions;
6664

6765
const mergedRequestConfig = {
6866
...mergeWithDefaultRequestConfig(apiVersion, requestConfig),
69-
data: JSON.stringify(body)
67+
data: JSON.stringify(data)
7068
};
7169

7270
const targetUrl = aiCoreDestination.url + `/v2/${removeLeadingSlashes(url)}`;

packages/core/src/openapi-request-builder.ts

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -27,13 +27,9 @@ export class OpenApiRequestBuilder<
2727
async executeRaw(): Promise<HttpResponse> {
2828
const { url, data, ...rest } = await this.requestConfig();
2929
// TODO: Remove explicit url! once we updated the type in the Cloud SDK, since url is always defined.
30-
return executeRequest(
31-
{ url: url! },
32-
{ deploymentConfiguration: {}, ...data },
33-
{
34-
...rest
35-
}
36-
);
30+
return executeRequest({ url: url! }, data, {
31+
...rest
32+
});
3733
}
3834

3935
/**

packages/gen-ai-hub/package.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@
2929
},
3030
"dependencies": {
3131
"@sap-ai-sdk/core": "workspace:^",
32+
"@sap-ai-sdk/ai-core": "workspace:^",
3233
"@sap-cloud-sdk/http-client": "^3.18.1",
3334
"@sap-cloud-sdk/connectivity": "^3.18.1",
3435
"@sap-cloud-sdk/util": "^3.18.1",

packages/gen-ai-hub/src/client/interface.ts

Lines changed: 0 additions & 28 deletions
This file was deleted.
Lines changed: 26 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -1,29 +1,24 @@
11
import nock from 'nock';
2-
import { BaseLlmParametersWithDeploymentId } from '@sap-ai-sdk/core';
32
import {
43
mockClientCredentialsGrantCall,
54
mockInference,
65
parseMockResponse
76
} from '../../../../../test-util/mock-http.js';
87
import {
98
OpenAiChatCompletionOutput,
10-
OpenAiChatCompletionParameters,
119
OpenAiChatMessage,
1210
OpenAiEmbeddingOutput,
1311
OpenAiEmbeddingParameters
1412
} from './openai-types.js';
1513
import { OpenAiClient } from './openai-client.js';
1614

1715
describe('openai client', () => {
18-
const deploymentConfiguration: BaseLlmParametersWithDeploymentId = {
19-
deploymentId: 'deployment-id'
20-
};
2116
const chatCompletionEndpoint = {
22-
url: `inference/deployments/${deploymentConfiguration.deploymentId}/chat/completions`,
17+
url: 'inference/deployments/1234/chat/completions',
2318
apiVersion: '2024-02-01'
2419
};
2520
const embeddingsEndpoint = {
26-
url: `inference/deployments/${deploymentConfiguration.deploymentId}/embeddings`,
21+
url: 'inference/deployments/1234/embeddings',
2722
apiVersion: '2024-02-01'
2823
};
2924

@@ -47,18 +42,15 @@ describe('openai client', () => {
4742
}
4843
] as OpenAiChatMessage[]
4944
};
50-
const request: OpenAiChatCompletionParameters = {
51-
...prompt,
52-
deploymentConfiguration
53-
};
45+
5446
const mockResponse = parseMockResponse<OpenAiChatCompletionOutput>(
5547
'openai',
5648
'openai-chat-completion-success-response.json'
5749
);
5850

5951
mockInference(
6052
{
61-
data: request
53+
data: prompt
6254
},
6355
{
6456
data: mockResponse,
@@ -67,24 +59,24 @@ describe('openai client', () => {
6759
chatCompletionEndpoint
6860
);
6961

70-
const response = await client.chatCompletion(request);
62+
const response = await client.chatCompletion(
63+
'gpt-35-turbo',
64+
prompt,
65+
'1234'
66+
);
7167
expect(response).toEqual(mockResponse);
7268
});
7369

7470
it('throws on bad request', async () => {
7571
const prompt = { messages: [] };
76-
const request: OpenAiChatCompletionParameters = {
77-
...prompt,
78-
deploymentConfiguration
79-
};
8072
const mockResponse = parseMockResponse(
8173
'openai',
8274
'openai-error-response.json'
8375
);
8476

8577
mockInference(
8678
{
87-
data: request
79+
data: prompt
8880
},
8981
{
9082
data: mockResponse,
@@ -93,50 +85,50 @@ describe('openai client', () => {
9385
chatCompletionEndpoint
9486
);
9587

96-
expect(client.chatCompletion(request)).rejects.toThrow();
88+
await expect(
89+
client.chatCompletion('gpt-4', prompt, '1234')
90+
).rejects.toThrow('status code 400');
9791
});
9892
});
9993

10094
describe('embeddings', () => {
10195
it('parses a successful response', async () => {
102-
const prompt = { input: ['AI is fascinating'] };
103-
const request: OpenAiEmbeddingParameters = {
104-
...prompt,
105-
deploymentConfiguration
106-
};
96+
const prompt = {
97+
input: ['AI is fascinating']
98+
} as OpenAiEmbeddingParameters;
10799
const mockResponse = parseMockResponse<OpenAiEmbeddingOutput>(
108100
'openai',
109101
'openai-embeddings-success-response.json'
110102
);
111103

112104
mockInference(
113105
{
114-
data: request
106+
data: prompt
115107
},
116108
{
117109
data: mockResponse,
118110
status: 200
119111
},
120112
embeddingsEndpoint
121113
);
122-
const response = await client.embeddings(request);
114+
const response = await client.embeddings(
115+
'text-embedding-ada-002',
116+
prompt,
117+
'1234'
118+
);
123119
expect(response).toEqual(mockResponse);
124120
});
125121

126122
it('throws on bad request', async () => {
127123
const prompt = { input: [] };
128-
const request: OpenAiEmbeddingParameters = {
129-
...prompt,
130-
deploymentConfiguration
131-
};
132124
const mockResponse = parseMockResponse(
133125
'openai',
134126
'openai-error-response.json'
135127
);
136128

137129
mockInference(
138130
{
139-
data: request
131+
data: prompt
140132
},
141133
{
142134
data: mockResponse,
@@ -145,7 +137,9 @@ describe('openai client', () => {
145137
embeddingsEndpoint
146138
);
147139

148-
expect(client.embeddings(request)).rejects.toThrow();
140+
await expect(
141+
client.embeddings('text-embedding-3-large', prompt, '1234')
142+
).rejects.toThrow('status code 400');
149143
});
150144
});
151145
});

0 commit comments

Comments
 (0)