Skip to content

Commit 4c93940

Browse files
committed
Merge branch 'v3.8' of github.com:opensumi/core
2 parents 7038720 + 445496e commit 4c93940

File tree

96 files changed

+922
-436
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

96 files changed

+922
-436
lines changed

lerna.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
{
22
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
3-
"version": "3.8.1"
3+
"version": "3.8.2"
44
}

packages/addons/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@opensumi/ide-addons",
3-
"version": "3.8.1",
3+
"version": "3.8.2",
44
"files": [
55
"lib",
66
"src"

packages/ai-native/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@opensumi/ide-ai-native",
3-
"version": "3.8.1",
3+
"version": "3.8.2",
44
"files": [
55
"lib",
66
"src"

packages/ai-native/src/browser/ai-core.contribution.ts

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -360,6 +360,7 @@ export class AINativeBrowserContribution
360360
if (supportsCustomLLMSettings) {
361361
this.preferenceService.onSpecificPreferenceChange(AINativeSettingSectionsId.LLMModelSelection, (change) => {
362362
const model = this.getModelByName(change.newValue);
363+
// support modelIds
363364
const modelIds = model ? Object.keys(model) : [];
364365
const defaultModelId = modelIds.length ? modelIds[0] : '';
365366
const currentSchemas = this.preferenceSchemaProvider.getPreferenceProperty(AINativeSettingSectionsId.ModelID);
@@ -382,6 +383,25 @@ export class AINativeBrowserContribution
382383
modelIds.reduce((obj, item) => ({ ...obj, [item]: item }), {}),
383384
);
384385
});
386+
this.preferenceService.onSpecificPreferenceChange(AINativeSettingSectionsId.ModelID, (change) => {
387+
const model = this.preferenceService.get<string>(AINativeSettingSectionsId.LLMModelSelection);
388+
if (!model) {
389+
return;
390+
}
391+
const modelInfo = this.getModelByName(model);
392+
if (modelInfo && modelInfo[change.newValue]) {
393+
this.preferenceService.set(
394+
AINativeSettingSectionsId.MaxTokens,
395+
modelInfo[change.newValue].maxTokens,
396+
change.scope,
397+
);
398+
this.preferenceService.set(
399+
AINativeSettingSectionsId.ContextWindow,
400+
modelInfo[change.newValue].contextWindow,
401+
change.scope,
402+
);
403+
}
404+
});
385405
}
386406

387407
if (supportsMCP) {
@@ -546,6 +566,14 @@ export class AINativeBrowserContribution
546566
id: AINativeSettingSectionsId.OpenaiBaseURL,
547567
localized: 'preference.ai.native.openai.baseURL',
548568
},
569+
{
570+
id: AINativeSettingSectionsId.MaxTokens,
571+
localized: 'preference.ai.native.maxTokens',
572+
},
573+
{
574+
id: AINativeSettingSectionsId.ContextWindow,
575+
localized: 'preference.ai.native.contextWindow',
576+
},
549577
],
550578
});
551579
}

packages/ai-native/src/browser/chat/chat-manager.service.ts

Lines changed: 53 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,19 @@
11
import { Autowired, INJECTOR_TOKEN, Injectable, Injector } from '@opensumi/di';
2+
import { PreferenceService } from '@opensumi/ide-core-browser';
23
import {
4+
AINativeSettingSectionsId,
35
CancellationToken,
46
CancellationTokenSource,
57
Disposable,
68
DisposableMap,
79
Emitter,
810
IChatProgress,
11+
IDisposable,
912
IStorage,
13+
LRUCache,
1014
STORAGE_NAMESPACE,
1115
StorageProvider,
1216
debounce,
13-
formatLocalize,
1417
} from '@opensumi/ide-core-common';
1518
import { IHistoryChatMessage } from '@opensumi/ide-core-common/lib/types/ai-native';
1619

@@ -38,9 +41,26 @@ interface ISessionModel {
3841

3942
const MAX_SESSION_COUNT = 20;
4043

44+
class DisposableLRUCache<K, V extends IDisposable = IDisposable> extends LRUCache<K, V> implements IDisposable {
45+
disposeKey(key: K): void {
46+
const disposable = this.get(key);
47+
if (disposable) {
48+
disposable.dispose();
49+
}
50+
this.delete(key);
51+
}
52+
53+
dispose(): void {
54+
this.forEach((disposable) => {
55+
disposable.dispose();
56+
});
57+
this.clear();
58+
}
59+
}
60+
4161
@Injectable()
4262
export class ChatManagerService extends Disposable {
43-
#sessionModels = this.registerDispose(new DisposableMap<string, ChatModel>());
63+
#sessionModels = this.registerDispose(new DisposableLRUCache<string, ChatModel>(MAX_SESSION_COUNT));
4464
#pendingRequests = this.registerDispose(new DisposableMap<string, CancellationTokenSource>());
4565
private storageInitEmitter = new Emitter<void>();
4666
public onStorageInit = this.storageInitEmitter.event;
@@ -54,35 +74,39 @@ export class ChatManagerService extends Disposable {
5474
@Autowired(StorageProvider)
5575
private storageProvider: StorageProvider;
5676

77+
@Autowired(PreferenceService)
78+
private preferenceService: PreferenceService;
79+
5780
private _chatStorage: IStorage;
5881

5982
protected fromJSON(data: ISessionModel[]) {
60-
// TODO: 支持ApplyService恢复
61-
return data.map((item) => {
62-
const model = new ChatModel({
63-
sessionId: item.sessionId,
64-
history: new MsgHistoryManager(item.history),
83+
return data
84+
.filter((item) => item.history.messages.length > 0)
85+
.map((item) => {
86+
const model = new ChatModel({
87+
sessionId: item.sessionId,
88+
history: new MsgHistoryManager(item.history),
89+
});
90+
const requests = item.requests.map(
91+
(request) =>
92+
new ChatRequestModel(
93+
request.requestId,
94+
model,
95+
request.message,
96+
new ChatResponseModel(request.requestId, model, request.message.agentId, {
97+
responseContents: request.response.responseContents,
98+
isComplete: true,
99+
responseText: request.response.responseText,
100+
responseParts: request.response.responseParts,
101+
errorDetails: request.response.errorDetails,
102+
followups: request.response.followups,
103+
isCanceled: request.response.isCanceled,
104+
}),
105+
),
106+
);
107+
model.restoreRequests(requests);
108+
return model;
65109
});
66-
const requests = item.requests.map(
67-
(request) =>
68-
new ChatRequestModel(
69-
request.requestId,
70-
model,
71-
request.message,
72-
new ChatResponseModel(request.requestId, model, request.message.agentId, {
73-
responseContents: request.response.responseContents,
74-
isComplete: true,
75-
responseText: request.response.responseText,
76-
responseParts: request.response.responseParts,
77-
errorDetails: request.response.errorDetails,
78-
followups: request.response.followups,
79-
isCanceled: request.response.isCanceled,
80-
}),
81-
),
82-
);
83-
model.restoreRequests(requests);
84-
return model;
85-
});
86110
}
87111

88112
constructor() {
@@ -105,9 +129,6 @@ export class ChatManagerService extends Disposable {
105129
}
106130

107131
startSession() {
108-
if (this.#sessionModels.size >= MAX_SESSION_COUNT) {
109-
throw new Error(formatLocalize('aiNative.chat.session.max', MAX_SESSION_COUNT.toString()));
110-
}
111132
const model = new ChatModel();
112133
this.#sessionModels.set(model.sessionId, model);
113134
this.listenSession(model);
@@ -155,7 +176,8 @@ export class ChatManagerService extends Disposable {
155176
request.response.cancel();
156177
});
157178

158-
const history = model.messageHistory;
179+
const contextWindow = this.preferenceService.get<number>(AINativeSettingSectionsId.ContextWindow);
180+
const history = model.getMessageHistory(contextWindow);
159181

160182
try {
161183
const progressCallback = (progress: IChatProgress) => {

packages/ai-native/src/browser/chat/chat-model.ts

Lines changed: 22 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -276,15 +276,12 @@ export class ChatRequestModel implements IChatRequestModel {
276276
}
277277

278278
export class ChatModel extends Disposable implements IChatModel {
279-
private static requestIdPool = 0;
279+
private requestIdPool = 0;
280280

281-
constructor(initParams?: { sessionId?: string; history?: MsgHistoryManager; requests?: ChatRequestModel[] }) {
281+
constructor(initParams?: { sessionId?: string; history?: MsgHistoryManager }) {
282282
super();
283283
this.#sessionId = initParams?.sessionId ?? uuid();
284284
this.history = initParams?.history ?? new MsgHistoryManager();
285-
if (initParams?.requests) {
286-
this.#requests = new Map(initParams.requests.map((r) => [r.requestId, r]));
287-
}
288285
}
289286

290287
#sessionId: string;
@@ -299,11 +296,18 @@ export class ChatModel extends Disposable implements IChatModel {
299296

300297
restoreRequests(requests: ChatRequestModel[]): void {
301298
this.#requests = new Map(requests.map((r) => [r.requestId, r]));
299+
this.requestIdPool = requests.length;
302300
}
303301

304302
readonly history: MsgHistoryManager;
305303

306-
get messageHistory() {
304+
#slicedMessageCount = 0;
305+
306+
public get slicedMessageCount() {
307+
return this.#slicedMessageCount;
308+
}
309+
310+
getMessageHistory(contextWindow?: number) {
307311
const history: CoreMessage[] = [];
308312
for (const request of this.requests) {
309313
if (!request.response.isComplete) {
@@ -352,13 +356,23 @@ export class ChatModel extends Disposable implements IChatModel {
352356
}
353357
}
354358
}
355-
return history;
359+
if (contextWindow) {
360+
while (this.#slicedMessageCount < history.length) {
361+
// 简单的使用 JSON.stringify 计算 token 数量
362+
const tokenCount = JSON.stringify(history.slice(this.#slicedMessageCount)).length / 3;
363+
if (tokenCount <= contextWindow) {
364+
break;
365+
}
366+
this.#slicedMessageCount++;
367+
}
368+
}
369+
return history.slice(this.#slicedMessageCount);
356370
}
357371

358372
addRequest(message: IChatRequestMessage): ChatRequestModel {
359373
const msg = message;
360374

361-
const requestId = `${this.sessionId}_request_${ChatModel.requestIdPool++}`;
375+
const requestId = `${this.sessionId}_request_${this.requestIdPool++}`;
362376
const response = new ChatResponseModel(requestId, this, msg.agentId);
363377
const request = new ChatRequestModel(requestId, this, msg, response);
364378

packages/ai-native/src/browser/chat/chat-proxy.service.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -92,13 +92,15 @@ export class ChatProxyService extends Disposable {
9292
apiKey = this.preferenceService.get<string>(AINativeSettingSectionsId.OpenaiApiKey, '');
9393
baseURL = this.preferenceService.get<string>(AINativeSettingSectionsId.OpenaiBaseURL, '');
9494
}
95+
const maxTokens = this.preferenceService.get<number>(AINativeSettingSectionsId.MaxTokens);
9596
const agent = this.chatAgentService.getAgent(ChatProxyService.AGENT_ID);
9697
return {
9798
clientId: this.applicationService.clientId,
9899
model,
99100
modelId,
100101
apiKey,
101102
baseURL,
103+
maxTokens,
102104
system: agent?.metadata.systemPrompt,
103105
};
104106
}

0 commit comments

Comments
 (0)