From f8a3f7973cfcc201322a9a8c30fa3630dd7c38c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Victor=20Mu=C5=A1tar?= Date: Fri, 9 May 2025 17:32:33 +0200 Subject: [PATCH 1/4] add MLX LM --- packages/tasks/src/local-apps.ts | 44 ++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/packages/tasks/src/local-apps.ts b/packages/tasks/src/local-apps.ts index 155d4bf401..26ed0e2f91 100644 --- a/packages/tasks/src/local-apps.ts +++ b/packages/tasks/src/local-apps.ts @@ -262,6 +262,43 @@ const snippetTgi = (model: ModelData): LocalAppSnippet[] => { ]; }; +const snippetMlxLm = (model: ModelData): LocalAppSnippet[] => { + const openaiCurl = [ + "# Calling the OpenAI-compatible server with curl", + `curl -X POST "http://localhost:8000/v1/chat/completions" \\`, + ` -H "Content-Type: application/json" \\`, + ` --data '{`, + ` "model": "${model.id}",`, + ` "messages": [`, + ` {"role": "user", "content": "Hello"}`, + ` ]`, + ` }'`, + ]; + + return [ + { + title: "Generate or start a chat session", + setup: ["# Install MLX LM", "pip install mlx-lm"].join("\n"), + content: [ + "# One-shot generation", + `mlx_lm.generate --model "${model.id}" --prompt "Hello"`, + ...(model.tags.includes("conversational") + ? ["# Interactive chat REPL", `mlx_lm.chat --model "${model.id}"`] + : []), + ].join("\n"), + }, + ...(model.tags.includes("conversational") + ? [ + { + title: "Run an OpenAI-compatible server", + setup: ["# Install MLX LM", "pip install mlx-lm"].join("\n"), + content: ["# Start the server", `mlx_lm.server --model "${model.id}"`, ...openaiCurl].join("\n"), + }, + ] + : []), + ]; +}; + /** * Add your new local app here. * @@ -302,6 +339,13 @@ export const LOCAL_APPS = { (model.pipeline_tag === "text-generation" || model.pipeline_tag === "image-text-to-text"), snippet: snippetVllm, }, + "mlx-lm": { + prettyLabel: "MLX LM", + docsUrl: "https://github.com/ml-explore/mlx-lm", + mainTask: "text-generation", + displayOnModelPage: isMlxModel, + snippet: snippetMlxLm, + }, tgi: { prettyLabel: "TGI", docsUrl: "https://huggingface.co/docs/text-generation-inference/", From d5cf085802189a9b3595b878b7addf5b9a21290a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Victor=20Mu=C5=A1tar?= Date: Mon, 12 May 2025 13:43:31 +0200 Subject: [PATCH 2/4] pipeline_tag check --- packages/tasks/src/local-apps.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/tasks/src/local-apps.ts b/packages/tasks/src/local-apps.ts index 26ed0e2f91..da8c973287 100644 --- a/packages/tasks/src/local-apps.ts +++ b/packages/tasks/src/local-apps.ts @@ -343,7 +343,7 @@ export const LOCAL_APPS = { prettyLabel: "MLX LM", docsUrl: "https://github.com/ml-explore/mlx-lm", mainTask: "text-generation", - displayOnModelPage: isMlxModel, + displayOnModelPage: (model) => model.pipeline_tag === "text-generation" && isMlxModel(model), snippet: snippetMlxLm, }, tgi: { From 4aecf603f7ccd761ee1aa9e0772bf3c352372643 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Victor=20Mu=C5=A1tar?= Date: Mon, 12 May 2025 14:08:31 +0200 Subject: [PATCH 3/4] us uv + generate for non conv only --- packages/tasks/src/local-apps.ts | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/packages/tasks/src/local-apps.ts b/packages/tasks/src/local-apps.ts index da8c973287..11b68e970f 100644 --- a/packages/tasks/src/local-apps.ts +++ b/packages/tasks/src/local-apps.ts @@ -278,20 +278,18 @@ const snippetMlxLm = (model: ModelData): LocalAppSnippet[] => { return [ { title: "Generate or start a chat session", - setup: ["# Install MLX LM", "pip install mlx-lm"].join("\n"), + setup: ["# Install MLX LM", "uv tool install mlx-lm"].join("\n"), content: [ - "# One-shot generation", - `mlx_lm.generate --model "${model.id}" --prompt "Hello"`, ...(model.tags.includes("conversational") ? ["# Interactive chat REPL", `mlx_lm.chat --model "${model.id}"`] - : []), + : ["# One-shot generation", `mlx_lm.generate --model "${model.id}" --prompt "Hello"`]), ].join("\n"), }, ...(model.tags.includes("conversational") ? [ { title: "Run an OpenAI-compatible server", - setup: ["# Install MLX LM", "pip install mlx-lm"].join("\n"), + setup: ["# Install MLX LM", "uv tool install mlx-lm"].join("\n"), content: ["# Start the server", `mlx_lm.server --model "${model.id}"`, ...openaiCurl].join("\n"), }, ] From d090f67a64da3472ef89d5bc7b69adb6322bd5d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Victor=20Mu=C5=A1tar?= Date: Mon, 12 May 2025 14:11:15 +0200 Subject: [PATCH 4/4] wording --- packages/tasks/src/local-apps.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/tasks/src/local-apps.ts b/packages/tasks/src/local-apps.ts index 11b68e970f..c0ff1f8249 100644 --- a/packages/tasks/src/local-apps.ts +++ b/packages/tasks/src/local-apps.ts @@ -282,7 +282,7 @@ const snippetMlxLm = (model: ModelData): LocalAppSnippet[] => { content: [ ...(model.tags.includes("conversational") ? ["# Interactive chat REPL", `mlx_lm.chat --model "${model.id}"`] - : ["# One-shot generation", `mlx_lm.generate --model "${model.id}" --prompt "Hello"`]), + : ["# Generate some text", `mlx_lm.generate --model "${model.id}" --prompt "Once upon a time"`]), ].join("\n"), }, ...(model.tags.includes("conversational")