Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
105 changes: 105 additions & 0 deletions packages/components/nodes/chatmodels/ChatLitellm/ChatLitellm.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
import { INodeData } from '../../../src/Interface'
import { ChatOpenAI } from '../ChatOpenAI/FlowiseChatOpenAI'

// Import the target node class for testing
const { nodeClass } = require('./ChatLitellm')

// Mock external utility functions
jest.mock('../../../src/utils', () => ({
getBaseClasses: jest.fn().mockReturnValue(['BaseChatModel', 'ChatLitellm']),
getCredentialData: jest.fn().mockResolvedValue({}),
getCredentialParam: jest.fn().mockReturnValue('sk-test-api-key')
}))

// Mock FlowiseChatOpenAI class to prevent actual API calls and verify passed parameters
jest.mock('../ChatOpenAI/FlowiseChatOpenAI', () => {
return {
ChatOpenAI: jest.fn().mockImplementation((id, obj) => {
return {
id,
obj, // Expose the injected parameters for testing
setMultiModalOption: jest.fn()
}
})
}
})

describe('ChatLitellm_ChatModels', () => {
let node: any

beforeEach(() => {
node = new nodeClass()
jest.clearAllMocks()
})

it('should initialize with correct properties and inputs', () => {
expect(node.label).toBe('LiteLLM')
expect(node.name).toBe('chatLitellm')
expect(node.type).toBe('ChatLitellm')

// Verify that the newly added 'user' field exists in the inputs array
const userInput = node.inputs.find((input: any) => input.name === 'user')
expect(userInput).toBeDefined()
expect(userInput.type).toBe('string')
expect(userInput.additionalParams).toBe(true)
})

it('should initialize the model with basic required parameters', async () => {
const nodeData: INodeData = {
id: 'test-node-id',
inputs: {
modelName: 'vertex-gemini-2.5-flash',
temperature: '0.7'
}
} as any

const model = await node.init(nodeData, '', {})

expect(ChatOpenAI).toHaveBeenCalledTimes(1)
expect(model.id).toBe('test-node-id')

// Check basic parameter mapping
expect(model.obj.modelName).toBe('vertex-gemini-2.5-flash')
expect(model.obj.temperature).toBe(0.7)
expect(model.obj.streaming).toBe(true) // Default fallback
expect(model.obj.openAIApiKey).toBe('sk-test-api-key')
})

it('should properly map advanced parameters including the new "user" parameter', async () => {
const nodeData: INodeData = {
id: 'test-node-id',
inputs: {
modelName: 'vertex-gemini-2.5-flash',
temperature: '0.5',
streaming: false,
maxTokens: '1024',
topP: '0.9',
timeout: '5000',
user: 'test01', // Added User ID
basePath: 'http://localhost:4000/v1',
allowImageUploads: true
}
} as any

const model = await node.init(nodeData, '', {})

expect(ChatOpenAI).toHaveBeenCalledTimes(1)

// Check advanced parameter and user data mapping
expect(model.obj.user).toBe('test01')
expect(model.obj.maxTokens).toBe(1024)
expect(model.obj.topP).toBe(0.9)
expect(model.obj.timeout).toBe(5000)
expect(model.obj.streaming).toBe(false)

// Check Base URL configuration
expect(model.obj.configuration?.baseURL).toBe('http://localhost:4000/v1')

// Check multimodal option call
expect(model.setMultiModalOption).toHaveBeenCalledWith({
image: {
allowImageUploads: true
}
})
})
})
10 changes: 10 additions & 0 deletions packages/components/nodes/chatmodels/ChatLitellm/ChatLitellm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,14 @@ class ChatLitellm_ChatModels implements INode {
step: 1,
optional: true,
additionalParams: true
},
{
label: 'User',
name: 'user',
type: 'string',
placeholder: 'user id',
optional: true,
additionalParams: true
Comment on lines +105 to +110
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

Adding a description for the user parameter is recommended to clarify its purpose for end-users (e.g., for tracking or rate-limiting). Also, using a more descriptive placeholder like 'user-1234' is consistent with other LLM nodes in the repository.

                label: 'User',
                name: 'user',
                type: 'string',
                description: 'A unique identifier representing your end-user, which can help LiteLLM to monitor and detect abuse.',
                placeholder: 'user-1234',
                optional: true,
                additionalParams: true

}
]
}
Expand All @@ -114,6 +122,7 @@ class ChatLitellm_ChatModels implements INode {
const topP = nodeData.inputs?.topP as string
const timeout = nodeData.inputs?.timeout as string
const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean
const user = nodeData.inputs?.user as string

const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const apiKey = getCredentialParam('litellmApiKey', credentialData, nodeData)
Expand All @@ -135,6 +144,7 @@ class ChatLitellm_ChatModels implements INode {
if (topP) obj.topP = parseFloat(topP)
if (timeout) obj.timeout = parseInt(timeout, 10)
if (cache) obj.cache = cache
if (user) obj.user = user
if (apiKey) {
obj.openAIApiKey = apiKey
obj.apiKey = apiKey
Expand Down