Skip to content

Commit de7fab4

Browse files
authored
Merge pull request #4846 from continuedev/nate/llm-info-fix
llm-info fix
2 parents da4a71c + cdbf7d5 commit de7fab4

File tree

9 files changed

+33
-20
lines changed

9 files changed

+33
-20
lines changed

core/llm/index.ts

+12-9
Original file line numberDiff line numberDiff line change
@@ -177,7 +177,11 @@ export abstract class BaseLLM implements ILLM {
177177

178178
this.model = options.model;
179179
// Use @continuedev/llm-info package to autodetect certain parameters
180-
const llmInfo = findLlmInfo(this.model);
180+
const modelSearchString =
181+
this.providerName === "continue-proxy"
182+
? this.model?.split("/").pop() || this.model
183+
: this.model;
184+
const llmInfo = findLlmInfo(modelSearchString);
181185

182186
const templateType =
183187
options.template ?? autodetectTemplateType(options.model);
@@ -195,11 +199,11 @@ export abstract class BaseLLM implements ILLM {
195199
options.completionOptions?.maxTokens ??
196200
(llmInfo?.maxCompletionTokens
197201
? Math.min(
198-
llmInfo.maxCompletionTokens,
199-
// Even if the model has a large maxTokens, we don't want to use that every time,
200-
// because it takes away from the context length
201-
this.contextLength / 4,
202-
)
202+
llmInfo.maxCompletionTokens,
203+
// Even if the model has a large maxTokens, we don't want to use that every time,
204+
// because it takes away from the context length
205+
this.contextLength / 4,
206+
)
203207
: DEFAULT_MAX_TOKENS),
204208
};
205209
this.requestOptions = options.requestOptions;
@@ -840,7 +844,6 @@ export abstract class BaseLLM implements ILLM {
840844
signal,
841845
completionOptions,
842846
)) {
843-
844847
if (chunk.role === "assistant") {
845848
completion += chunk.content;
846849
yield chunk;
@@ -948,15 +951,15 @@ export abstract class BaseLLM implements ILLM {
948951
);
949952
}
950953

951-
protected async * _streamComplete(
954+
protected async *_streamComplete(
952955
prompt: string,
953956
signal: AbortSignal,
954957
options: CompletionOptions,
955958
): AsyncGenerator<string> {
956959
throw new Error("Not implemented");
957960
}
958961

959-
protected async * _streamChat(
962+
protected async *_streamChat(
960963
messages: ChatMessage[],
961964
signal: AbortSignal,
962965
options: CompletionOptions,

core/package-lock.json

+4-4
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

core/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@
4949
"@continuedev/config-types": "^1.0.13",
5050
"@continuedev/config-yaml": "^1.0.67",
5151
"@continuedev/fetch": "^1.0.4",
52-
"@continuedev/llm-info": "^1.0.2",
52+
"@continuedev/llm-info": "^1.0.8",
5353
"@continuedev/openai-adapters": "^1.0.18",
5454
"@modelcontextprotocol/sdk": "^1.5.0",
5555
"@mozilla/readability": "^0.5.0",

extensions/intellij/gradle.properties

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ pluginGroup=com.github.continuedev.continueintellijextension
33
pluginName=continue-intellij-extension
44
pluginRepositoryUrl=https://github.com/continuedev/continue
55
# SemVer format -> https://semver.org
6-
pluginVersion=1.0.7
6+
pluginVersion=1.0.8
77
# Supported build number ranges and IntelliJ Platform versions -> https://plugins.jetbrains.com/docs/intellij/build-number-ranges.html
88
pluginSinceBuild=223
99
# IntelliJ Platform Properties -> https://plugins.jetbrains.com/docs/intellij/tools-gradle-intellij-plugin.html#configuration-intellij-extension

extensions/vscode/package-lock.json

+1-1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

extensions/vscode/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
"name": "continue",
33
"icon": "media/icon.png",
44
"author": "Continue Dev, Inc",
5-
"version": "1.1.16",
5+
"version": "1.1.17",
66
"repository": {
77
"type": "git",
88
"url": "https://github.com/continuedev/continue"

gui/package-lock.json

+1-1
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

packages/llm-info/package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@continuedev/llm-info",
3-
"version": "1.0.7",
3+
"version": "1.0.8",
44
"description": "",
55
"main": "dist/index.js",
66
"types": "dist/index.d.ts",

packages/llm-info/src/providers/anthropic.ts

+11-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,17 @@ export const Anthropic: ModelProvider = {
1111
maxCompletionTokens: 8192,
1212
description:
1313
"Most intelligent model with the highest level of intelligence and capability.",
14-
regex: /claude-3\.5-sonnet/i,
14+
regex: /claude-3[.-]5-sonnet.*/i,
15+
recommendedFor: ["chat"],
16+
},
17+
{
18+
model: "claude-3-7-sonnet-latest",
19+
displayName: "Claude 3.7 Sonnet",
20+
contextLength: 200000,
21+
maxCompletionTokens: 8192,
22+
description:
23+
"Most intelligent model with the highest level of intelligence and capability.",
24+
regex: /claude-3[.-]7-sonnet.*/i,
1525
recommendedFor: ["chat"],
1626
},
1727
{

0 commit comments

Comments
 (0)