Skip to content

Commit 93e23dd

Browse files
committed
add local chat config for msty
1 parent 21492af commit 93e23dd

File tree

1 file changed

+42
-0
lines changed

1 file changed

+42
-0
lines changed

docs/docs/customize/model-roles/chat.mdx

+42
Original file line numberDiff line numberDiff line change
@@ -257,6 +257,14 @@ If your local machine can run an 8B parameter model, then we recommend running L
257257
model: llama3.1:8b
258258
```
259259
</TabItem>
260+
<TabItem value="Msty">
261+
```yaml title="config.yaml"
262+
models:
263+
- name: Llama 3.1 8B
264+
provider: msty
265+
model: llama3.1:8b
266+
```
267+
</TabItem>
260268
</Tabs>
261269
</TabItem>
262270
<TabItem value="json" label="JSON">
@@ -287,6 +295,19 @@ If your local machine can run an 8B parameter model, then we recommend running L
287295
}
288296
```
289297
</TabItem>
298+
<TabItem value="Msty">
299+
```json title="config.json"
300+
{
301+
"models": [
302+
{
303+
"title": "Llama 3.1 8B",
304+
"provider": "msty",
305+
"model": "llama3.1-8b"
306+
}
307+
]
308+
}
309+
```
310+
</TabItem>
290311
</Tabs>
291312
</TabItem>
292313
</Tabs>
@@ -325,6 +346,14 @@ If your local machine can run a 16B parameter model, then we recommend running D
325346
model: deepseek-coder-v2:16b
326347
```
327348
</TabItem>
349+
<TabItem value="Msty">
350+
```yaml title="config.yaml"
351+
models:
352+
- name: DeepSeek Coder 2 16B
353+
provider: msty
354+
model: deepseek-coder-v2:16b
355+
```
356+
</TabItem>
328357
</Tabs>
329358
</TabItem>
330359
<TabItem value="json" label="JSON">
@@ -356,6 +385,19 @@ If your local machine can run a 16B parameter model, then we recommend running D
356385
}
357386
```
358387
</TabItem>
388+
<TabItem value="Msty">
389+
```json title="config.json"
390+
{
391+
"models": [
392+
{
393+
"title": "DeepSeek Coder 2 16B",
394+
"provider": "msty",
395+
"model": "deepseek-coder-v2:16b"
396+
}
397+
]
398+
}
399+
```
400+
</TabItem>
359401
</Tabs>
360402
</TabItem>
361403
</Tabs>

0 commit comments

Comments
 (0)