Skip to content

Feature/Update LangchainJS version #4

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Apr 13, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
strategy:
matrix:
platform: [ubuntu-latest]
node-version: [14.x, 16.x]
node-version: [18.15.0]
runs-on: ${{ matrix.platform }}

steps:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutor, AgentExecutor } from 'langchain/agents'
import { Tool } from 'langchain/tools'
import { BaseChatModel } from 'langchain/chat_models/base'
import { BaseChatMemory } from 'langchain/memory'
import { getBaseClasses } from '../../../src/utils'

class ConversationalAgent_Agents implements INode {
label: string
Expand All @@ -17,6 +22,7 @@ class ConversationalAgent_Agents implements INode {
this.category = 'Agents'
this.icon = 'agent.svg'
this.description = 'Conversational agent for a chat model. It will utilize chat specific prompts'
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
this.inputs = [
{
label: 'Allowed Tools',
Expand All @@ -37,24 +43,18 @@ class ConversationalAgent_Agents implements INode {
]
}

async getBaseClasses(): Promise<string[]> {
return ['AgentExecutor']
}

async init(nodeData: INodeData): Promise<any> {
const { initializeAgentExecutor } = await import('langchain/agents')

const model = nodeData.inputs?.model
const tools = nodeData.inputs?.tools
const memory = nodeData.inputs?.memory
const model = nodeData.inputs?.model as BaseChatModel
const tools = nodeData.inputs?.tools as Tool[]
const memory = nodeData.inputs?.memory as BaseChatMemory

const executor = await initializeAgentExecutor(tools, model, 'chat-conversational-react-description', true)
executor.memory = memory
return executor
}

async run(nodeData: INodeData, input: string): Promise<string> {
const executor = nodeData.instance
const executor = nodeData.instance as AgentExecutor
const result = await executor.call({ input })

return result?.output
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutor, AgentExecutor } from 'langchain/agents'
import { Tool } from 'langchain/tools'
import { BaseChatModel } from 'langchain/chat_models/base'
import { getBaseClasses } from '../../../src/utils'

class MRLKAgentChat_Agents implements INode {
class MRKLAgentChat_Agents implements INode {
label: string
name: string
description: string
Expand All @@ -11,12 +15,13 @@ class MRLKAgentChat_Agents implements INode {
inputs: INodeParams[]

constructor() {
this.label = 'MRLK Agent for Chat Models'
this.name = 'mrlkAgentChat'
this.label = 'MRKL Agent for Chat Models'
this.name = 'mrklAgentChat'
this.type = 'AgentExecutor'
this.category = 'Agents'
this.icon = 'agent.svg'
this.description = 'Agent that uses the ReAct Framework to decide what action to take, optimized to be used with Chat Models'
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
this.inputs = [
{
label: 'Allowed Tools',
Expand All @@ -32,27 +37,20 @@ class MRLKAgentChat_Agents implements INode {
]
}

async getBaseClasses(): Promise<string[]> {
return ['AgentExecutor']
}

async init(nodeData: INodeData): Promise<any> {
const { initializeAgentExecutor } = await import('langchain/agents')

const model = nodeData.inputs?.model
const tools = nodeData.inputs?.tools
const model = nodeData.inputs?.model as BaseChatModel
const tools = nodeData.inputs?.tools as Tool[]

const executor = await initializeAgentExecutor(tools, model, 'chat-zero-shot-react-description', true)

return executor
}

async run(nodeData: INodeData, input: string): Promise<string> {
const executor = nodeData.instance
const executor = nodeData.instance as AgentExecutor
const result = await executor.call({ input })

return result?.output
}
}

module.exports = { nodeClass: MRLKAgentChat_Agents }
module.exports = { nodeClass: MRKLAgentChat_Agents }
Original file line number Diff line number Diff line change
@@ -1,6 +1,10 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { initializeAgentExecutor, AgentExecutor } from 'langchain/agents'
import { Tool } from 'langchain/tools'
import { BaseLLM } from 'langchain/llms/base'
import { getBaseClasses } from '../../../src/utils'

class MRLKAgentLLM_Agents implements INode {
class MRKLAgentLLM_Agents implements INode {
label: string
name: string
description: string
Expand All @@ -11,12 +15,13 @@ class MRLKAgentLLM_Agents implements INode {
inputs: INodeParams[]

constructor() {
this.label = 'MRLK Agent for LLMs'
this.name = 'mrlkAgentLLM'
this.label = 'MRKL Agent for LLMs'
this.name = 'mrklAgentLLM'
this.type = 'AgentExecutor'
this.category = 'Agents'
this.icon = 'agent.svg'
this.description = 'Agent that uses the ReAct Framework to decide what action to take, optimized to be used with LLMs'
this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)]
this.inputs = [
{
label: 'Allowed Tools',
Expand All @@ -27,32 +32,25 @@ class MRLKAgentLLM_Agents implements INode {
{
label: 'LLM Model',
name: 'model',
type: 'BaseLanguageModel'
type: 'BaseLLM'
}
]
}

async getBaseClasses(): Promise<string[]> {
return ['AgentExecutor']
}

async init(nodeData: INodeData): Promise<any> {
const { initializeAgentExecutor } = await import('langchain/agents')

const model = nodeData.inputs?.model
const tools = nodeData.inputs?.tools
const model = nodeData.inputs?.model as BaseLLM
const tools = nodeData.inputs?.tools as Tool[]

const executor = await initializeAgentExecutor(tools, model, 'zero-shot-react-description', true)

return executor
}

async run(nodeData: INodeData, input: string): Promise<string> {
const executor = nodeData.instance
const executor = nodeData.instance as AgentExecutor
const result = await executor.call({ input })

return result?.output
}
}

module.exports = { nodeClass: MRLKAgentLLM_Agents }
module.exports = { nodeClass: MRKLAgentLLM_Agents }
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
import { ICommonObject, IMessage, INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { ConversationalRetrievalQAChain } from 'langchain/chains'
import { BaseLLM } from 'langchain/llms/base'
import { BaseRetriever } from 'langchain/schema'

class ConversationalRetrievalQAChain_Chains implements INode {
label: string
Expand All @@ -18,11 +21,12 @@ class ConversationalRetrievalQAChain_Chains implements INode {
this.icon = 'chain.svg'
this.category = 'Chains'
this.description = 'Document QA - built on RetrievalQAChain to provide a chat history component'
this.baseClasses = [this.type, ...getBaseClasses(ConversationalRetrievalQAChain)]
this.inputs = [
{
label: 'LLM',
name: 'llm',
type: 'BaseLanguageModel'
type: 'BaseLLM'
},
{
label: 'Vector Store Retriever',
Expand All @@ -32,23 +36,16 @@ class ConversationalRetrievalQAChain_Chains implements INode {
]
}

async getBaseClasses(): Promise<string[]> {
const { ConversationalRetrievalQAChain } = await import('langchain/chains')
return getBaseClasses(ConversationalRetrievalQAChain)
}

async init(nodeData: INodeData): Promise<any> {
const { ConversationalRetrievalQAChain } = await import('langchain/chains')

const llm = nodeData.inputs?.llm
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever
const llm = nodeData.inputs?.llm as BaseLLM
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as BaseRetriever

const chain = ConversationalRetrievalQAChain.fromLLM(llm, vectorStoreRetriever)
return chain
}

async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
const chain = nodeData.instance
const chain = nodeData.instance as ConversationalRetrievalQAChain
let chatHistory = ''

if (options && options.chatHistory) {
Expand Down
35 changes: 18 additions & 17 deletions packages/components/nodes/chains/LLMChain/LLMChain.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { getBaseClasses } from '../../../src/utils'
import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language'
import { BasePromptTemplate } from 'langchain/prompts'

class LLMChain_Chains implements INode {
label: string
Expand All @@ -18,10 +21,11 @@ class LLMChain_Chains implements INode {
this.icon = 'chain.svg'
this.category = 'Chains'
this.description = 'Chain to run queries against LLMs'
this.baseClasses = [this.type, ...getBaseClasses(LLMChain)]
this.inputs = [
{
label: 'LLM',
name: 'llm',
label: 'Language Model',
name: 'model',
type: 'BaseLanguageModel'
},
{
Expand All @@ -43,24 +47,17 @@ class LLMChain_Chains implements INode {
]
}

async getBaseClasses(): Promise<string[]> {
const { LLMChain } = await import('langchain/chains')
return getBaseClasses(LLMChain)
}

async init(nodeData: INodeData): Promise<any> {
const { LLMChain } = await import('langchain/chains')
const model = nodeData.inputs?.model as BaseLanguageModel
const prompt = nodeData.inputs?.prompt as BasePromptTemplate

const llm = nodeData.inputs?.llm
const prompt = nodeData.inputs?.prompt

const chain = new LLMChain({ llm, prompt })
const chain = new LLMChain({ llm: model, prompt })
return chain
}

async run(nodeData: INodeData, input: string): Promise<string> {
const inputVariables = nodeData.instance.prompt.inputVariables // ["product"]
const chain = nodeData.instance
const inputVariables = nodeData.instance.prompt.inputVariables as string[] // ["product"]
const chain = nodeData.instance as LLMChain

if (inputVariables.length === 1) {
const res = await chain.run(input)
Expand All @@ -71,7 +68,7 @@ class LLMChain_Chains implements INode {

const promptValues = JSON.parse(promptValuesStr.replace(/\s/g, ''))

let seen = []
let seen: string[] = []

for (const variable of inputVariables) {
seen.push(variable)
Expand All @@ -81,13 +78,17 @@ class LLMChain_Chains implements INode {
}

if (seen.length === 1) {
const lastValue = seen.pop()
if (!lastValue) throw new Error('Please provide Prompt Values')
const options = {
...promptValues,
[seen.pop()]: input
[lastValue]: input
}
const res = await chain.call(options)
return res?.text
} else throw new Error('Please provide Prompt Values')
} else {
throw new Error('Please provide Prompt Values')
}
} else {
const res = await chain.run(input)
return res
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
import { INode, INodeData, INodeParams } from '../../../src/Interface'
import { RetrievalQAChain } from 'langchain/chains'
import { BaseLLM } from 'langchain/llms/base'
import { BaseRetriever } from 'langchain/schema'
import { getBaseClasses } from '../../../src/utils'

class RetrievalQAChain_Chains implements INode {
label: string
Expand All @@ -17,11 +21,12 @@ class RetrievalQAChain_Chains implements INode {
this.icon = 'chain.svg'
this.category = 'Chains'
this.description = 'QA chain to answer a question based on the retrieved documents'
this.baseClasses = [this.type, ...getBaseClasses(RetrievalQAChain)]
this.inputs = [
{
label: 'LLM',
name: 'llm',
type: 'BaseLanguageModel'
type: 'BaseLLM'
},
{
label: 'Vector Store Retriever',
Expand All @@ -31,21 +36,16 @@ class RetrievalQAChain_Chains implements INode {
]
}

async getBaseClasses(): Promise<string[]> {
return ['BaseChain']
}

async init(nodeData: INodeData): Promise<any> {
const { RetrievalQAChain } = await import('langchain/chains')
const llm = nodeData.inputs?.llm
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever
const llm = nodeData.inputs?.llm as BaseLLM
const vectorStoreRetriever = nodeData.inputs?.vectorStoreRetriever as BaseRetriever

const chain = RetrievalQAChain.fromLLM(llm, vectorStoreRetriever)
return chain
}

async run(nodeData: INodeData, input: string): Promise<string> {
const chain = nodeData.instance
const chain = nodeData.instance as RetrievalQAChain
const obj = {
query: input
}
Expand Down
Loading