Skip to content

Commit

Permalink
Merge pull request FlowiseAI#1115 from vinodkiran/FEATURE/output-parsers
Browse files Browse the repository at this point in the history
New Feature - Output Parsers
  • Loading branch information
HenryHengZJ authored Nov 3, 2023
2 parents c86502e + 38791f9 commit cbd9837
Show file tree
Hide file tree
Showing 27 changed files with 2,174 additions and 768 deletions.
61 changes: 47 additions & 14 deletions packages/components/nodes/chains/LLMChain/LLMChain.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@ import { getBaseClasses, handleEscapeCharacters } from '../../../src/utils'
import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language'
import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler'
import { BaseOutputParser } from 'langchain/schema/output_parser'
import { formatResponse, injectOutputParser } from '../../outputparsers/OutputParserHelpers'
import { BaseLLMOutputParser } from 'langchain/schema/output_parser'
import { OutputFixingParser } from 'langchain/output_parsers'

class LLMChain_Chains implements INode {
label: string
Expand All @@ -15,11 +19,12 @@ class LLMChain_Chains implements INode {
description: string
inputs: INodeParams[]
outputs: INodeOutputsValue[]
outputParser: BaseOutputParser

constructor() {
this.label = 'LLM Chain'
this.name = 'llmChain'
this.version = 1.0
this.version = 3.0
this.type = 'LLMChain'
this.icon = 'chain.svg'
this.category = 'Chains'
Expand All @@ -36,6 +41,12 @@ class LLMChain_Chains implements INode {
name: 'prompt',
type: 'BasePromptTemplate'
},
{
label: 'Output Parser',
name: 'outputParser',
type: 'BaseLLMOutputParser',
optional: true
},
{
label: 'Chain Name',
name: 'chainName',
Expand Down Expand Up @@ -63,12 +74,29 @@ class LLMChain_Chains implements INode {
const prompt = nodeData.inputs?.prompt
const output = nodeData.outputs?.output as string
const promptValues = prompt.promptValues as ICommonObject

const llmOutputParser = nodeData.inputs?.outputParser as BaseOutputParser
this.outputParser = llmOutputParser
if (llmOutputParser) {
let autoFix = (llmOutputParser as any).autoFix
if (autoFix === true) {
this.outputParser = OutputFixingParser.fromLLM(model, llmOutputParser)
}
}
if (output === this.name) {
const chain = new LLMChain({ llm: model, prompt, verbose: process.env.DEBUG === 'true' ? true : false })
const chain = new LLMChain({
llm: model,
outputParser: this.outputParser as BaseLLMOutputParser<string | object>,
prompt,
verbose: process.env.DEBUG === 'true'
})
return chain
} else if (output === 'outputPrediction') {
const chain = new LLMChain({ llm: model, prompt, verbose: process.env.DEBUG === 'true' ? true : false })
const chain = new LLMChain({
llm: model,
outputParser: this.outputParser as BaseLLMOutputParser<string | object>,
prompt,
verbose: process.env.DEBUG === 'true'
})
const inputVariables = chain.prompt.inputVariables as string[] // ["product"]
const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData)
// eslint-disable-next-line no-console
Expand All @@ -84,10 +112,15 @@ class LLMChain_Chains implements INode {
}
}

async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string> {
async run(nodeData: INodeData, input: string, options: ICommonObject): Promise<string | object> {
const inputVariables = nodeData.instance.prompt.inputVariables as string[] // ["product"]
const chain = nodeData.instance as LLMChain
const promptValues = nodeData.inputs?.prompt.promptValues as ICommonObject
let promptValues: ICommonObject | undefined = nodeData.inputs?.prompt.promptValues as ICommonObject
const outputParser = nodeData.inputs?.outputParser as BaseOutputParser
if (!this.outputParser && outputParser) {
this.outputParser = outputParser
}
promptValues = injectOutputParser(this.outputParser, chain, promptValues)
const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData)
// eslint-disable-next-line no-console
console.log('\x1b[93m\x1b[1m\n*****FINAL RESULT*****\n\x1b[0m\x1b[0m')
Expand All @@ -99,9 +132,9 @@ class LLMChain_Chains implements INode {

const runPrediction = async (
inputVariables: string[],
chain: LLMChain,
chain: LLMChain<string | object>,
input: string,
promptValuesRaw: ICommonObject,
promptValuesRaw: ICommonObject | undefined,
options: ICommonObject,
nodeData: INodeData
) => {
Expand Down Expand Up @@ -135,10 +168,10 @@ const runPrediction = async (
if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.call(options, [loggerHandler, handler, ...callbacks])
return res?.text
return formatResponse(res?.text)
} else {
const res = await chain.call(options, [loggerHandler, ...callbacks])
return res?.text
return formatResponse(res?.text)
}
} else if (seen.length === 1) {
// If one inputVariable is not specify, use input (user's question) as value
Expand All @@ -151,10 +184,10 @@ const runPrediction = async (
if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.call(options, [loggerHandler, handler, ...callbacks])
return res?.text
return formatResponse(res?.text)
} else {
const res = await chain.call(options, [loggerHandler, ...callbacks])
return res?.text
return formatResponse(res?.text)
}
} else {
throw new Error(`Please provide Prompt Values for: ${seen.join(', ')}`)
Expand All @@ -163,10 +196,10 @@ const runPrediction = async (
if (isStreaming) {
const handler = new CustomChainHandler(socketIO, socketIOClientId)
const res = await chain.run(input, [loggerHandler, handler, ...callbacks])
return res
return formatResponse(res)
} else {
const res = await chain.run(input, [loggerHandler, ...callbacks])
return res
return formatResponse(res)
}
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
import { getBaseClasses, INode, INodeData, INodeParams } from '../../../src'
import { BaseOutputParser } from 'langchain/schema/output_parser'
import { CommaSeparatedListOutputParser } from 'langchain/output_parsers'
import { CATEGORY } from '../OutputParserHelpers'

class CSVListOutputParser implements INode {
label: string
name: string
version: number
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
credential: INodeParams

constructor() {
this.label = 'CSV Output Parser'
this.name = 'csvOutputParser'
this.version = 1.0
this.type = 'CSVListOutputParser'
this.description = 'Parse the output of an LLM call as a comma-separated list of values'
this.icon = 'csv.png'
this.category = CATEGORY
this.baseClasses = [this.type, ...getBaseClasses(BaseOutputParser)]
this.inputs = [
{
label: 'Autofix',
name: 'autofixParser',
type: 'boolean',
optional: true,
description: 'In the event that the first call fails, will make another call to the model to fix any errors.'
}
]
}

async init(nodeData: INodeData): Promise<any> {
const autoFix = nodeData.inputs?.autofixParser as boolean

const commaSeparatedListOutputParser = new CommaSeparatedListOutputParser()
Object.defineProperty(commaSeparatedListOutputParser, 'autoFix', {
enumerable: true,
configurable: true,
writable: true,
value: autoFix
})
return commaSeparatedListOutputParser
}
}

module.exports = { nodeClass: CSVListOutputParser }
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import { getBaseClasses, INode, INodeData, INodeParams } from '../../../src'
import { BaseOutputParser } from 'langchain/schema/output_parser'
import { CustomListOutputParser as LangchainCustomListOutputParser } from 'langchain/output_parsers'
import { CATEGORY } from '../OutputParserHelpers'

class CustomListOutputParser implements INode {
label: string
name: string
version: number
description: string
type: string
icon: string
category: string
baseClasses: string[]
inputs: INodeParams[]
credential: INodeParams

constructor() {
this.label = 'Custom List Output Parser'
this.name = 'customListOutputParser'
this.version = 1.0
this.type = 'CustomListOutputParser'
this.description = 'Parse the output of an LLM call as a list of values.'
this.icon = 'list.png'
this.category = CATEGORY
this.baseClasses = [this.type, ...getBaseClasses(BaseOutputParser)]
this.inputs = [
{
label: 'Length',
name: 'length',
type: 'number',
default: 5,
step: 1,
description: 'Number of values to return'
},
{
label: 'Separator',
name: 'separator',
type: 'string',
description: 'Separator between values',
default: ','
},
{
label: 'Autofix',
name: 'autofixParser',
type: 'boolean',
optional: true,
description: 'In the event that the first call fails, will make another call to the model to fix any errors.'
}
]
}

async init(nodeData: INodeData): Promise<any> {
const separator = nodeData.inputs?.separator as string
const lengthStr = nodeData.inputs?.length as string
const autoFix = nodeData.inputs?.autofixParser as boolean
let length = 5
if (lengthStr) length = parseInt(lengthStr, 10)

const parser = new LangchainCustomListOutputParser({ length: length, separator: separator })
Object.defineProperty(parser, 'autoFix', {
enumerable: true,
configurable: true,
writable: true,
value: autoFix
})
return parser
}
}

module.exports = { nodeClass: CustomListOutputParser }
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
46 changes: 46 additions & 0 deletions packages/components/nodes/outputparsers/OutputParserHelpers.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
import { BaseOutputParser } from 'langchain/schema/output_parser'
import { LLMChain } from 'langchain/chains'
import { BaseLanguageModel } from 'langchain/base_language'
import { ICommonObject } from '../../src'
import { ChatPromptTemplate, FewShotPromptTemplate, PromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts'

export const CATEGORY = 'Output Parsers'

export const formatResponse = (response: string | object): string | object => {
if (typeof response === 'object') {
return { json: response }
}
return response
}

export const injectOutputParser = (
outputParser: BaseOutputParser<unknown>,
chain: LLMChain<string, BaseLanguageModel>,
promptValues: ICommonObject | undefined = undefined
) => {
if (outputParser && chain.prompt) {
const formatInstructions = outputParser.getFormatInstructions()
if (chain.prompt instanceof PromptTemplate) {
let pt = chain.prompt
pt.template = pt.template + '\n{format_instructions}'
chain.prompt.partialVariables = { format_instructions: formatInstructions }
} else if (chain.prompt instanceof ChatPromptTemplate) {
let pt = chain.prompt
pt.promptMessages.forEach((msg) => {
if (msg instanceof SystemMessagePromptTemplate) {
;(msg.prompt as any).partialVariables = { format_instructions: outputParser.getFormatInstructions() }
;(msg.prompt as any).template = ((msg.prompt as any).template + '\n{format_instructions}') as string
}
})
} else if (chain.prompt instanceof FewShotPromptTemplate) {
chain.prompt.examplePrompt.partialVariables = { format_instructions: formatInstructions }
chain.prompt.examplePrompt.template = chain.prompt.examplePrompt.template + '\n{format_instructions}'
}

chain.prompt.inputVariables.push('format_instructions')
if (promptValues) {
promptValues = { ...promptValues, format_instructions: outputParser.getFormatInstructions() }
}
}
return promptValues
}
Loading

0 comments on commit cbd9837

Please sign in to comment.