Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions packages/components/credentials/LmStudioApi.credential.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import { INodeParams, INodeCredential } from '../src/Interface'

class LMStudioApi implements INodeCredential {
label: string
name: string
version: number
description: string
inputs: INodeParams[]

constructor() {
this.label = 'LM Studio API'
Comment thread
0xi4o marked this conversation as resolved.
this.name = 'lmStudioApi'
this.version = 1.0
this.description =
'Refer to <a target="_blank" href="https://lmstudio.ai/docs/developer/core/authentication">official guide</a> on how to get access token on LmStudio'

this.inputs = [
{
label: 'LM Studio Api Key',
name: 'lmStudioApiKey',
type: 'password',
placeholder: '<LM_STUDIO_ACCESS_TOKEN>'
}
]
}
}

module.exports = { credClass: LMStudioApi }
149 changes: 149 additions & 0 deletions packages/components/nodes/chatmodels/ChatLmStudio/ChatLmStudio.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
import { ChatOpenAI as LangchainChatLmStudio, ChatOpenAIFields as ChatLmStudioFields } from '@langchain/openai'
import { BaseCache } from '@langchain/core/caches'
import { ICommonObject, IMultiModalOption, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { FlowiseChatLmStudio } from './FlowiseChatLmStudio'

class ChatLmStudio_ChatModels implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]

constructor() {
this.label = 'LMStudio'
this.name = 'chatLmStudio'
this.version = 3.0
this.type = 'ChatLmStudio'
this.icon = 'lmstudio.png'
this.category = 'Chat Models'
this.description = 'Use local LLMs using LmStudio'
this.baseClasses = [this.type, 'BaseChatModel', ...getBaseClasses(LangchainChatLmStudio)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['lmStudioApi'],
optional: true
}
this.inputs = [
{
label: 'Cache',
name: 'cache',
type: 'BaseCache',
optional: true
},
{
label: 'Base URL',
name: 'baseURL',
type: 'string',
placeholder: 'http://localhost:1234/v1'
},
{
label: 'Model Name',
name: 'modelName',
type: 'string',
placeholder: 'gpt4all-lora-quantized.bin'
},
{
label: 'Temperature',
name: 'temperature',
type: 'number',
step: 0.1,
default: 0.9,
optional: true
},
{
label: 'Allow Image Uploads',
name: 'allowImageUploads',
type: 'boolean',
description:
'Allow image input. Refer to the <a href="https://docs.flowiseai.com/using-flowise/uploads#image" target="_blank">docs</a> for more details.',
default: false,
optional: true
},
{
label: 'Streaming',
name: 'streaming',
type: 'boolean',
default: true,
optional: true,
additionalParams: true
},
{
label: 'Max Tokens',
name: 'maxTokens',
type: 'number',
step: 1,
optional: true,
additionalParams: true
},
{
label: 'Top Probability',
name: 'topP',
type: 'number',
step: 0.1,
optional: true,
additionalParams: true
},
{
label: 'Timeout',
name: 'timeout',
type: 'number',
step: 1,
optional: true,
additionalParams: true
}
]
}

async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const temperature = nodeData.inputs?.temperature as string
const modelName = nodeData.inputs?.modelName as string
const maxTokens = nodeData.inputs?.maxTokens as string
const topP = nodeData.inputs?.topP as string
const timeout = nodeData.inputs?.timeout as string
const baseURL = nodeData.inputs?.baseURL as string
const streaming = nodeData.inputs?.streaming as boolean
const allowImageUploads = nodeData.inputs?.allowImageUploads as boolean

const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const lmStudioApiKey = getCredentialParam('lmStudioApiKey', credentialData, nodeData)

const cache = nodeData.inputs?.cache as BaseCache

const obj: ChatLmStudioFields = {
modelName,
streaming: streaming ?? true,
configuration: {
baseURL,
apiKey: lmStudioApiKey
}
}

if (temperature) obj.temperature = parseFloat(temperature)
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
if (topP) obj.topP = parseFloat(topP)
if (timeout) obj.timeout = parseInt(timeout, 10)
if (cache) obj.cache = cache
Comment thread
sohaieb marked this conversation as resolved.

const multiModalOption: IMultiModalOption = {
image: {
allowImageUploads: allowImageUploads ?? false
}
}

const model = new FlowiseChatLmStudio(nodeData.id, obj)
model.setMultiModalOption(multiModalOption)

return model
}
}

module.exports = { nodeClass: ChatLmStudio_ChatModels }
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
import { ChatOpenAI as LangchainChatLmStudio, ChatOpenAIFields as ChatLmStudioFields } from '@langchain/openai'
import { IMultiModalOption, IVisionChatModal } from '../../../src'

export class FlowiseChatLmStudio extends LangchainChatLmStudio implements IVisionChatModal {
configuredModel: string
configuredMaxToken?: number
multiModalOption: IMultiModalOption
builtInTools: Record<string, any>[] = []
id: string

constructor(id: string, fields?: ChatLmStudioFields) {
super(fields)
this.id = id
this.configuredModel = fields?.modelName ?? ''
this.configuredMaxToken = fields?.maxTokens
}

revertToOriginalModel(): void {
this.model = this.configuredModel
this.maxTokens = this.configuredMaxToken
}

setMultiModalOption(multiModalOption: IMultiModalOption): void {
this.multiModalOption = multiModalOption
}

setVisionModel(): void {
// pass
}

addBuiltInTools(builtInTool: Record<string, any>): void {
this.builtInTools.push(builtInTool)
}
}
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
import {
ClientOptions,
OpenAIEmbeddings as LmStudioEmbeddings,
OpenAIEmbeddingsParams as LmStudioEmbeddingsParams
} from '@langchain/openai'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getCredentialData, getCredentialParam } from '../../../src/utils'

class LmStudioEmbedding_Embeddings implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]

constructor() {
this.label = 'LMStudio Embeddings'
this.name = 'lmStudioEmbeddings'
this.version = 1.0
this.type = 'LmStudio Embeddings'
this.icon = 'lmstudio.png'
this.category = 'Embeddings'
this.description = 'Use local embeddings from LMStudio'
this.baseClasses = [this.type, 'Embeddings']
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['lmStudioApi'],
optional: true
}
this.inputs = [
{
label: 'Base URL',
name: 'baseURL',
type: 'string',
placeholder: 'http://localhost:1234/v1'
},
{
label: 'Model Name',
name: 'modelName',
type: 'string',
placeholder: 'text-embedding-ada-002'
}
]
}

async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const modelName = nodeData.inputs?.modelName as string
const baseURL = nodeData.inputs?.baseURL as string

const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const lmStudioApiKey = getCredentialParam('lmStudioApiKey', credentialData, nodeData)

const obj: Partial<LmStudioEmbeddingsParams> & { configuration?: ClientOptions } = {
modelName,
Comment thread
0xi4o marked this conversation as resolved.
encodingFormat: 'float',
configuration: {
apiKey: lmStudioApiKey,
baseURL
}
}

const model = new LmStudioEmbeddings(obj)

return model
}
}

module.exports = { nodeClass: LmStudioEmbedding_Embeddings }
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Loading