diff --git a/packages/components/credentials/LitellmApi.credential.ts b/packages/components/credentials/LitellmApi.credential.ts new file mode 100644 index 00000000000..6bf866f5cee --- /dev/null +++ b/packages/components/credentials/LitellmApi.credential.ts @@ -0,0 +1,23 @@ +import { INodeParams, INodeCredential } from '../src/Interface' + +class LitellmApi implements INodeCredential { + label: string + name: string + version: number + inputs: INodeParams[] + + constructor() { + this.label = 'Litellm API' + this.name = 'litellmApi' + this.version = 1.0 + this.inputs = [ + { + label: 'API Key', + name: 'litellmApiKey', + type: 'password' + } + ] + } +} + +module.exports = { credClass: LitellmApi } diff --git a/packages/components/nodes/chatmodels/ChatLitellm/ChatLitellm.ts b/packages/components/nodes/chatmodels/ChatLitellm/ChatLitellm.ts new file mode 100644 index 00000000000..352f883c64b --- /dev/null +++ b/packages/components/nodes/chatmodels/ChatLitellm/ChatLitellm.ts @@ -0,0 +1,135 @@ +import { OpenAIChatInput, ChatOpenAI } from '@langchain/openai' +import { BaseCache } from '@langchain/core/caches' +import { BaseLLMParams } from '@langchain/core/language_models/llms' +import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +class ChatLitellm_ChatModels implements INode { + label: string + name: string + version: number + type: string + icon: string + category: string + description: string + baseClasses: string[] + credential: INodeParams + inputs: INodeParams[] + + constructor() { + this.label = 'ChatLitellm' + this.name = 'chatLitellm' + this.version = 1.0 + this.type = 'ChatLitellm' + this.icon = 'litellm.jpg' + this.category = 'Chat Models' + this.description = 'Connect to a Litellm server using OpenAI-compatible API' + this.baseClasses = [this.type, 'BaseChatModel', ...getBaseClasses(ChatOpenAI)] + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['litellmApi'], + optional: true + } + this.inputs = [ + { + label: 'Cache', + name: 'cache', + type: 'BaseCache', + optional: true + }, + { + label: 'Base URL', + name: 'basePath', + type: 'string', + placeholder: 'http://localhost:8000' + }, + { + label: 'Model Name', + name: 'modelName', + type: 'string', + placeholder: 'model_name' + }, + { + label: 'Temperature', + name: 'temperature', + type: 'number', + step: 0.1, + default: 0.9, + optional: true + }, + { + label: 'Streaming', + name: 'streaming', + type: 'boolean', + default: true, + optional: true, + additionalParams: true + }, + { + label: 'Max Tokens', + name: 'maxTokens', + type: 'number', + step: 1, + optional: true, + additionalParams: true + }, + { + label: 'Top P', + name: 'topP', + type: 'number', + step: 0.1, + optional: true, + additionalParams: true + }, + { + label: 'Timeout', + name: 'timeout', + type: 'number', + step: 1, + optional: true, + additionalParams: true + } + ] + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const cache = nodeData.inputs?.cache as BaseCache + const basePath = nodeData.inputs?.basePath as string + const modelName = nodeData.inputs?.modelName as string + const temperature = nodeData.inputs?.temperature as string + const streaming = nodeData.inputs?.streaming as boolean + const maxTokens = nodeData.inputs?.maxTokens as string + const topP = nodeData.inputs?.topP as string + const timeout = nodeData.inputs?.timeout as string + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const apiKey = getCredentialParam('litellmApiKey', credentialData, nodeData) + + const obj: Partial & + BaseLLMParams & { openAIApiKey?: string } & { configuration?: { baseURL?: string; defaultHeaders?: ICommonObject } } = { + temperature: parseFloat(temperature), + modelName, + streaming: streaming ?? true + } + + if (basePath) { + obj.configuration = { + baseURL: basePath + } + } + + if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10) + if (topP) obj.topP = parseFloat(topP) + if (timeout) obj.timeout = parseInt(timeout, 10) + if (cache) obj.cache = cache + if (apiKey) obj.openAIApiKey = apiKey + + const model = new ChatOpenAI(obj) + + return model + } +} + +module.exports = { nodeClass: ChatLitellm_ChatModels } diff --git a/packages/components/nodes/chatmodels/ChatLitellm/litellm.jpg b/packages/components/nodes/chatmodels/ChatLitellm/litellm.jpg new file mode 100644 index 00000000000..d6a77b2d105 Binary files /dev/null and b/packages/components/nodes/chatmodels/ChatLitellm/litellm.jpg differ