File tree 1 file changed +42
-0
lines changed
docs/docs/customize/model-roles
1 file changed +42
-0
lines changed Original file line number Diff line number Diff line change @@ -257,6 +257,14 @@ If your local machine can run an 8B parameter model, then we recommend running L
257
257
model : llama3.1:8b
258
258
` ` `
259
259
</TabItem>
260
+ <TabItem value="Msty">
261
+ ` ` ` yaml title="config.yaml"
262
+ models :
263
+ - name : Llama 3.1 8B
264
+ provider : msty
265
+ model : llama3.1:8b
266
+ ` ` `
267
+ </TabItem>
260
268
</Tabs>
261
269
</TabItem>
262
270
<TabItem value="json" label="JSON">
@@ -287,6 +295,19 @@ If your local machine can run an 8B parameter model, then we recommend running L
287
295
}
288
296
```
289
297
</TabItem >
298
+ <TabItem value = " Msty" >
299
+ ``` json title="config.json"
300
+ {
301
+ "models" : [
302
+ {
303
+ "title" : " Llama 3.1 8B" ,
304
+ "provider" : " msty" ,
305
+ "model" : " llama3.1-8b"
306
+ }
307
+ ]
308
+ }
309
+ ```
310
+ </TabItem >
290
311
</Tabs >
291
312
</TabItem >
292
313
</Tabs >
@@ -325,6 +346,14 @@ If your local machine can run a 16B parameter model, then we recommend running D
325
346
model : deepseek-coder-v2:16b
326
347
` ` `
327
348
</TabItem>
349
+ <TabItem value="Msty">
350
+ ` ` ` yaml title="config.yaml"
351
+ models :
352
+ - name : DeepSeek Coder 2 16B
353
+ provider : msty
354
+ model : deepseek-coder-v2:16b
355
+ ` ` `
356
+ </TabItem>
328
357
</Tabs>
329
358
</TabItem>
330
359
<TabItem value="json" label="JSON">
@@ -356,6 +385,19 @@ If your local machine can run a 16B parameter model, then we recommend running D
356
385
}
357
386
```
358
387
</TabItem >
388
+ <TabItem value = " Msty" >
389
+ ``` json title="config.json"
390
+ {
391
+ "models" : [
392
+ {
393
+ "title" : " DeepSeek Coder 2 16B" ,
394
+ "provider" : " msty" ,
395
+ "model" : " deepseek-coder-v2:16b"
396
+ }
397
+ ]
398
+ }
399
+ ```
400
+ </TabItem >
359
401
</Tabs >
360
402
</TabItem >
361
403
</Tabs >
You can’t perform that action at this time.
0 commit comments