Skip to content

Ai-models-integration #100

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 35 additions & 0 deletions src/components/ai-model-options.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
import React from 'react';

const modelOptions = [
'deepseek-r1', 'llama3.3', 'phi4', 'llama3.2', 'llama3.1', 'nomic-embed-text', 'mistral', 'llama3', 'qwen',
'gemma', 'qwen2', 'qwen2.5', 'llama2', 'phi3', 'llava', 'gemma2', 'qwen2.5-coder', 'codellama', 'tinyllama',
'mxbai-embed-large', 'mistral-nemo', 'llama3.2-vision', 'starcoder2', 'snowflake-arctic-embed', 'mixtral',
'deepseek-coder-v2', 'dolphin-mixtral', 'phi', 'codegemma', 'deepseek-coder', 'llama2-uncensored', 'wizardlm2',
'dolphin-mistral', 'all-minilm', 'dolphin-llama3', 'bge-m3', 'llama2-chinese', 'smollm2', 'codegeex4', 'openchat',
'aya', 'codeqwen', 'nous-hermes2', 'mistral-large', 'command-r-plus', 'stable-code', 'openhermes', 'tinydolphin',
'deepseek-llm', 'glm4', 'wizardcoder', 'qwen2-math', 'bakllava', 'stablelm2', 'reflection', 'moondream', 'neural-chat',
'llama3-gradient', 'wizard-math', 'deepseek-v2', 'llama3-chatqa', 'minicpm-v', 'sqlcoder', 'xwinlm', 'mistral-small',
'nous-hermes', 'dolphincoder', 'phind-codellama', 'yarn-llama2', 'hermes3', 'solar', 'wizardlm', 'starling-lm',
'yi-coder', 'llava-phi3', 'internlm2', 'athene-v2', 'falcon', 'falcon3', 'notux', 'open-orca-platypus2', 'shieldgemma',
'notus', 'goliath', 'llama-guard3', 'bespoke-minicheck', 'nuextract', 'granite3.1-moe', 'opencoder', 'deepseek-v2.5',
'snowflake-arctic-embed2', 'firefunction-v2', 'dbrx', 'paraphrase-multilingual', 'alfred', 'olmo2', 'exaone3.5',
'tulu3', 'command-r7b', 'granite-embedding', 'granite3-guardian', 'sailor2'
];

const AiModelOptions = ({ searchTerm }: { searchTerm: string }) => {
const filteredOptions = modelOptions.filter((model) =>
model.toLowerCase().includes(searchTerm.toLowerCase())
);

const optionsToShow = filteredOptions.length > 0 ? filteredOptions.slice(0, 5) : modelOptions.slice(0, 5);

return (
<datalist id="model-options">
{optionsToShow.map((model, index) => (
<option key={index} value={model} />
))}
</datalist>
);
};

export default AiModelOptions;
21 changes: 14 additions & 7 deletions src/components/chat/chat-topbar.tsx
Original file line number Diff line number Diff line change
@@ -42,15 +42,22 @@ export default function ChatTopbar({
const setSelectedModel = useChatStore((state) => state.setSelectedModel);

useEffect(() => {
const fetchModels = async () => {
const fetchedModels = await fetch("/api/tags");
const json = await fetchedModels.json();
const apiModels = json.models.map((model: any) => model.name);
setModels([...apiModels]);
};
fetchModels();
(async () => {
try {
const res = await fetch("/api/tags");
if (!res.ok) throw new Error(`HTTP Error: ${res.status}`);

const data = await res.json().catch(() => null);
if (!data?.models?.length) return;

setModels(data.models.map(({ name }: { name: string }) => name));
} catch (error) {
console.error("Error fetching models:", error);
}
})();
}, []);


const handleModelChange = (model: string) => {
setSelectedModel(model);
setOpen(false);
24 changes: 13 additions & 11 deletions src/components/pull-model-form.tsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
"use client";

import React from "react";
import {
Form,
@@ -19,6 +17,7 @@ import { Input } from "./ui/input";
import { throttle } from "lodash";
import useChatStore from "@/app/hooks/useChatStore";
import { useRouter } from "next/navigation";
import AiModelOptions from "./ai-model-options";

const formSchema = z.object({
name: z.string().min(1, {
@@ -78,8 +77,7 @@ export default function PullModelForm() {
router.refresh();
} catch (error) {
toast.error(
`Error: ${
error instanceof Error ? error.message : "Failed to pull model"
`Error: ${error instanceof Error ? error.message : "Failed to pull model"
}`
);
} finally {
@@ -137,20 +135,24 @@ export default function PullModelForm() {

return (
<Form {...form}>
<form onSubmit={form.handleSubmit(onSubmit)} className="w-full space-y-6">
<form onSubmit={form.handleSubmit(onSubmit)} className="w-full space-y-6 max-h-48 overflow-y-hidden" >
<FormField
control={form.control}
name="name"
render={({ field }) => (
<FormItem>
<FormLabel>Model name</FormLabel>
<FormControl>
<Input
{...field}
type="text"
placeholder="llama2"
value={field.value || ""}
/>
<div className="flex flex-col">
<Input
{...field}
type="text"
placeholder="Write name or Search for a model to pull"
value={field.value || ""}
list="model-options"
/>
<AiModelOptions searchTerm={field.value} />
</div>
</FormControl>
<p className="text-xs pt-1">
Check the{" "}