节点 18.20.4
添加前翻模型,如下所示:
https://js.langchain.com/v0.2/docs/integrations/chat/baidu_qianfan
import { BaseCache } from '@langchain/core/caches'
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
import { ChatBaiduQianfan } from '@langchain/baidu-qianfan'
class ChatBaiduQianfan_ChatModels implements INode {
label: string
name: string
version: number
type: string
icon: string
category: string
description: string
baseClasses: string[]
credential: INodeParams
inputs: INodeParams[]
constructor() {
this.label = 'ChatBaiduQianfan'
this.name = 'chatBaiduQianfan'
this.version = 1.0
this.type = 'ChatBaiduQianfan'
this.icon = 'baiduwenxin.svg'
this.category = 'Chat Models'
this.description = 'Wrapper around BaiduQianfan Chat Endpoints'
this.baseClasses = [this.type, ...getBaseClasses(ChatBaiduQianfan)]
this.credential = {
label: 'Connect Credential',
name: 'credential',
type: 'credential',
credentialNames: ['baiduApi']
}
this.inputs = [
{
label: 'Cache',
name: 'cache',
type: 'BaseCache',
optional: true
},
{
label: 'Model',
name: 'modelName',
type: 'string',
placeholder: 'ERNIE-Bot-turbo'
},
{
label: 'Temperature',
name: 'temperature',
type: 'number',
step: 0.1,
default: 0.9,
optional: true
}
]
}
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
const cache = nodeData.inputs?.cache as BaseCache
const temperature = nodeData.inputs?.temperature as string
const modelName = nodeData.inputs?.modelName as string
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
const baiduApiKey = getCredentialParam('baiduApiKey', credentialData, nodeData)
const baiduSecretKey = getCredentialParam('baiduSecretKey', credentialData, nodeData)
const obj: Partial<ChatBaiduQianfan> = {
streaming: true,
qianfanAK: baiduApiKey,
qianfanSK: baiduSecretKey,
qianfanAccessKey: baiduApiKey,
qianfanSecretKey: baiduSecretKey,
model: modelName,
temperature: temperature ? parseFloat(temperature) : undefined
}
if (cache) obj.cache = cache
const model = new ChatBaiduQianfan(obj)
return model
}
}
module.exports = { nodeClass: ChatBaiduQianfan_ChatModels }
然后使用:
错误:
2024-07-10 15:54:12 [INFO]: Starting Flowise...
2024-07-10 15:54:12 [INFO]: 📦 [server]: Data Source is initializing...
2024-07-10 15:54:15 [INFO]: 📦 [server]: Data Source has been initialized!
2024-07-10 15:54:15 [INFO]: ⚡️ [server]: Flowise Server is listening at :3030
2024-07-10 15:54:16 [INFO]: ⬆️ POST /api/v1/node-load-method/documentStore
2024-07-10 15:54:33 [INFO]: ❌ DELETE /api/v1/chatmessage/2d760c06-b0de-45ed-9b74-f9dfe36656a9?chatId=fcf818bd-98f0-4780-a6ae-d977b46c6d8c&chatType=INTERNAL
2024-07-10 15:54:36 [INFO]: ⬆️ POST /api/v1/internal-prediction/2d760c06-b0de-45ed-9b74-f9dfe36656a9
2024-07-10 15:54:37 [INFO]: [server]: Chatflow 2d760c06-b0de-45ed-9b74-f9dfe36656a9 added into ChatflowPool
2024-07-10 15:54:38 [ERROR]: [server]: Error: stream.getReader is not a function
TypeError: stream.getReader is not a function
at getBytes (/Users/didi/WebstormProjects/Flowise/node_modules/.pnpm/@langchain+core@0.1.63_langchain@0.1.37_kov6ixubspoxa2akjvxho2sune__openai@4.51.0_encoding@0.1.13_/node_modules/@langchain/core/dist/utils/event_source_parse.cjs:19:27)
at Object.start (/Users/didi/WebstormProjects/Flowise/node_modules/.pnpm/@langchain+core@0.1.63_langchain@0.1.37_kov6ixubspoxa2akjvxho2sune__openai@4.51.0_encoding@0.1.13_/node_modules/@langchain/core/dist/utils/event_source_parse.cjs:207:19)
at setupReadableStreamDefaultController (node:internal/webstreams/readablestream:2333:23)
at setupReadableStreamDefaultControllerFromSource (node:internal/webstreams/readablestream:2366:3)
at new ReadableStream (node:internal/webstreams/readablestream:289:7)
at convertEventStreamToIterableReadableDataStream (/Users/didi/WebstormProjects/Flowise/node_modules/.pnpm/@langchain+core@0.1.63_langchain@0.1.37_kov6ixubspoxa2akjvxho2sune__openai@4.51.0_encoding@0.1.13_/node_modules/@langchain/core/dist/utils/event_source_parse.cjs:188:24)
at ChatBaiduQianfan.createStream (/Users/didi/WebstormProjects/Flowise/node_modules/.pnpm/@langchain+baidu-qianfan@0.0.1_@babel+core@7.24.7_encoding@0.1.13_langchain@0.1.37_kov6ixubsp_ccx72fmvccog47x7lvf257gdy4/node_modules/@langchain/baidu-qianfan/dist/chat_models.cjs:329:88)
at async RetryOperation._fn (/Users/didi/WebstormProjects/Flowise/node_modules/.pnpm/p-retry@4.6.2/node_modules/p-retry/index.js:50:12)
4条答案
按热度按时间kupeojn61#
我认为我们可能需要升级langchain包的版本以获取新的变化,它的更新在这里 - #2798
igetnqfo2#
我认为我们可能需要升级langchain包的版本以获取新的变化,更新在这里 - #2798
谢谢,没问题。但它有一个新问题,比如:
我不确定这是否是langchain的问题还是flowiseAi的问题。
8fq7wneg3#
@iceycc 我没有钱帆的凭证来尝试,如果你有,你能发邮件给我 @ henryheng@flowiseai.com 吗?这样我就可以测试一下了?
7rtdyuoh4#
请问这个是否与langchain-ai/langchainjs#5970有关?
无法测试,😅