From 76c690269a88129528eeaf2d88c593183ac4e95c Mon Sep 17 00:00:00 2001 From: vinodkiran Date: Mon, 23 Oct 2023 13:16:12 +0530 Subject: [PATCH 01/13] New Feature - Output Parsers --- .../nodes/chains/LLMChain/LLMChain.ts | 70 +++++++++++++--- .../csvlist/CSVListOutputParser.ts | 35 ++++++++ .../nodes/outputparsers/csvlist/csv.png | Bin 0 -> 8498 bytes .../customlist/CustomListOutputParser.ts | 55 +++++++++++++ .../nodes/outputparsers/customlist/list.png | Bin 0 -> 5002 bytes .../structured/StructuredOutputParser.ts | 77 ++++++++++++++++++ .../outputparsers/structured/structure.png | Bin 0 -> 3849 bytes 7 files changed, 227 insertions(+), 10 deletions(-) create mode 100644 packages/components/nodes/outputparsers/csvlist/CSVListOutputParser.ts create mode 100644 packages/components/nodes/outputparsers/csvlist/csv.png create mode 100644 packages/components/nodes/outputparsers/customlist/CustomListOutputParser.ts create mode 100644 packages/components/nodes/outputparsers/customlist/list.png create mode 100644 packages/components/nodes/outputparsers/structured/StructuredOutputParser.ts create mode 100644 packages/components/nodes/outputparsers/structured/structure.png diff --git a/packages/components/nodes/chains/LLMChain/LLMChain.ts b/packages/components/nodes/chains/LLMChain/LLMChain.ts index 63994b13230..0544365afab 100644 --- a/packages/components/nodes/chains/LLMChain/LLMChain.ts +++ b/packages/components/nodes/chains/LLMChain/LLMChain.ts @@ -3,6 +3,8 @@ import { getBaseClasses, handleEscapeCharacters } from '../../../src/utils' import { LLMChain } from 'langchain/chains' import { BaseLanguageModel } from 'langchain/base_language' import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler' +import { BaseOutputParser } from 'langchain/schema/output_parser' +import { ChatPromptTemplate, PromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts' class LLMChain_Chains implements INode { label: string @@ -19,7 +21,7 @@ class LLMChain_Chains implements INode { constructor() { this.label = 'LLM Chain' this.name = 'llmChain' - this.version = 1.0 + this.version = 2.0 this.type = 'LLMChain' this.icon = 'chain.svg' this.category = 'Chains' @@ -36,6 +38,12 @@ class LLMChain_Chains implements INode { name: 'prompt', type: 'BasePromptTemplate' }, + { + label: 'Output Parser', + name: 'outputParser', + type: 'BaseLLMOutputParser', + optional: true + }, { label: 'Chain Name', name: 'chainName', @@ -87,8 +95,35 @@ class LLMChain_Chains implements INode { async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { const inputVariables = nodeData.instance.prompt.inputVariables as string[] // ["product"] const chain = nodeData.instance as LLMChain - const promptValues = nodeData.inputs?.prompt.promptValues as ICommonObject - const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData) + let promptValues = nodeData.inputs?.prompt.promptValues as ICommonObject + const outputParser = nodeData.inputs?.outputParser as BaseOutputParser + if (outputParser && chain.prompt) { + const formatInstructions = outputParser.getFormatInstructions() + chain.prompt.inputVariables.push('format_instructions') + if (chain.prompt instanceof PromptTemplate) { + let pt = chain.prompt + pt.template = pt.template + '\n{format_instructions}' + chain.prompt.partialVariables = { format_instructions: formatInstructions } + // eslint-disable-next-line no-console + console.log('prompt :: ', chain.prompt) + } else if (chain.prompt instanceof ChatPromptTemplate) { + let pt = chain.prompt + pt.promptMessages.forEach((msg) => { + if (msg instanceof SystemMessagePromptTemplate) { + ;(msg.prompt as any).partialVariables = { format_instructions: outputParser.getFormatInstructions() } + ;(msg.prompt as any).template = ((msg.prompt as any).template + '\n{format_instructions}') as string + // eslint-disable-next-line no-console + console.log(msg) + } + }) + //pt.template = pt.template + '\n{format_instructions}' + } + + promptValues = { ...promptValues, format_instructions: outputParser.getFormatInstructions() } + // eslint-disable-next-line no-console + console.log('promptValues :: ', promptValues) + } + const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData, outputParser) // eslint-disable-next-line no-console console.log('\x1b[93m\x1b[1m\n*****FINAL RESULT*****\n\x1b[0m\x1b[0m') // eslint-disable-next-line no-console @@ -103,7 +138,8 @@ const runPrediction = async ( input: string, promptValuesRaw: ICommonObject, options: ICommonObject, - nodeData: INodeData + nodeData: INodeData, + outputParser: BaseOutputParser | undefined = undefined ) => { const loggerHandler = new ConsoleCallbackHandler(options.logger) const callbacks = await additionalCallbacks(nodeData, options) @@ -135,10 +171,10 @@ const runPrediction = async ( if (isStreaming) { const handler = new CustomChainHandler(socketIO, socketIOClientId) const res = await chain.call(options, [loggerHandler, handler, ...callbacks]) - return res?.text + return runOutputParser(res?.text, outputParser) } else { const res = await chain.call(options, [loggerHandler, ...callbacks]) - return res?.text + return runOutputParser(res?.text, outputParser) } } else if (seen.length === 1) { // If one inputVariable is not specify, use input (user's question) as value @@ -151,10 +187,10 @@ const runPrediction = async ( if (isStreaming) { const handler = new CustomChainHandler(socketIO, socketIOClientId) const res = await chain.call(options, [loggerHandler, handler, ...callbacks]) - return res?.text + return runOutputParser(res?.text, outputParser) } else { const res = await chain.call(options, [loggerHandler, ...callbacks]) - return res?.text + return runOutputParser(res?.text, outputParser) } } else { throw new Error(`Please provide Prompt Values for: ${seen.join(', ')}`) @@ -163,12 +199,26 @@ const runPrediction = async ( if (isStreaming) { const handler = new CustomChainHandler(socketIO, socketIOClientId) const res = await chain.run(input, [loggerHandler, handler, ...callbacks]) - return res + return runOutputParser(res, outputParser) } else { const res = await chain.run(input, [loggerHandler, ...callbacks]) - return res + return runOutputParser(res, outputParser) + } + } +} + +const runOutputParser = async (response: string, outputParser: BaseOutputParser | undefined): Promise => { + if (outputParser) { + const parsedResponse = await outputParser.parse(response) + // eslint-disable-next-line no-console + console.log('**** parsedResponse ****', parsedResponse) + if (typeof parsedResponse === 'object') { + return JSON.stringify(parsedResponse) + } else { + return parsedResponse as string } } + return response } module.exports = { nodeClass: LLMChain_Chains } diff --git a/packages/components/nodes/outputparsers/csvlist/CSVListOutputParser.ts b/packages/components/nodes/outputparsers/csvlist/CSVListOutputParser.ts new file mode 100644 index 00000000000..04911fb89be --- /dev/null +++ b/packages/components/nodes/outputparsers/csvlist/CSVListOutputParser.ts @@ -0,0 +1,35 @@ +import { getBaseClasses, ICommonObject, INode, INodeData, INodeParams } from '../../../src' +import { BaseOutputParser } from 'langchain/schema/output_parser' +import { CommaSeparatedListOutputParser } from 'langchain/output_parsers' + +class CSVListOutputParser implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + + constructor() { + this.label = 'CSV Output Parser' + this.name = 'csvOutputParser' + this.version = 1.0 + this.type = 'CSVListOutputParser' + this.description = 'Parse the output of an LLM call as a comma-separated list of values' + this.icon = 'csv.png' + this.category = 'Output Parser' + this.baseClasses = [this.type, ...getBaseClasses(BaseOutputParser)] + this.inputs = [] + } + + // eslint-disable-next-line unused-imports/no-unused-vars + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + return new CommaSeparatedListOutputParser() + } +} + +module.exports = { nodeClass: CSVListOutputParser } diff --git a/packages/components/nodes/outputparsers/csvlist/csv.png b/packages/components/nodes/outputparsers/csvlist/csv.png new file mode 100644 index 0000000000000000000000000000000000000000..41b84e16a04dd8b6b6eb1606f4fa9f5317bffc96 GIT binary patch literal 8498 zcmeHsWmJ?=+w~bb1Zf26lt!eHR7y%gLRz}JL&iWtL|Oqsx?$*!A*DgOLApV@d1rXm zdVjq?zMtQ>zIWD|b)9qOy7s>IzRx|g?lU1TRps$;C~*J)c!~Az|Vh$*&{!W&lr?(HDo5c5%>h*+d>lhtw4aI|pqeCuKkJUu$7iZeD&tVG*pjq_nKOqOz*GrnauWp|J_x+|t_C-toJ$tGlPSuYX|h z&(QG5=-Bwg)PYk$Q}khV|hU8fOlF2*Cz@#@wOC)kgY^@7nfmZ=c=j<+YIKz@+$EG?!(6uTC5a zX@lofUIwECpX%@Exjf7GqF`>0;V$Ey97m5Ey=|t}*z4ft4OnR;oln=q)wq)M$?b^7 z=hY@{7MB3k#uK^KCStzZsv$=q-rEvug)2!>t)A+u?{nYEe`OnW|G&Ez?F47@j1uCZ86`;a29UziY zNe^a%^f4b{#t5K7-z&HSe``4rz?dJzyt)q_-~nnF9tgvLp2VR8IaH}>98{1FNr@!{a4TTN4tO5| zN)*tG0UyUkCYz67BC~#P0c2Lg3V{9rgOv<-$(l@nml7mkAhMy!7{CH`q3({roFClP zz|fH4vKxXd-H}C>zLfb-4L-8wGYTk!r1S&09pFQ7B9e%;@51#81WBVUdlxnpkj?X> zf**JQ4;4WpWdNX)z)TPY3C~)D3=u*I7Iq+C&;bSB8x=WiJ*LK+UKNu#iqSvSA>DL@y>XSfCAiGn7j_i&G1>r!1J1de!)SU|%?#Cd=bghjLNWo!VXtFd=U#sA}AdR(o^NTDScSdOWU1-8!<|w4sevBvf7)^5IBc zMc%j}edAT>VZbED^HHTKr}&vGqcZZx$8jfyjK}|eg$DbmT$Ps5=UY2yBHrY@q{1Sr#fBt+ZKY@wyR_KC20oLuAjs}Pb<@ebC_}}cwkTew3 z>E8!CQUnbJ)PK1l>!A;P^b8D8ipl7YD#vPj0^C?+*6q#7qUXNmNM$z}2$*+X+b`}_ zr+TcY2b!{vhS*LVG#aW&91e{z*YPdai5(BejUKX!4>!49sf;R-6u#srdbL&L9~9eFI>f0+GH>=X8>0YJ70?LiF1QzRrX!A%jOtabwXN?pm62HHo zJ)B*ZoW~BdjJxQn_K2O#R$s4$^caLICd~?0iol#Jo$Ll=e}9;|@Zf#6H2c{Kx3x>= z{nsUj&1P-3o||NC$_dGm&cvW1v8^LttA)OkS~CU*nt#?>3u5GJhwKN{J&)W?h2y#S zTrv|S0CU&CB!B0*g86&7vLQR)UR|AmNinDVuJ>~|y9-oJg9a&nnG}USEs5oc7|oa@ z8Il`st7cz3{pe%tG9`h}0+P-}5js0u6Be)HZw zTTS58fYz&tHNURsRPKc3x4}E)Pj|3ExI}`{!BbMX#HlMHqHFu3CF389>YMQ>CWogH z?p*hg*&jd(7cvo6~<6*@XzT&{qwo!Qvm<#alpzRM>cldt#mv-1dHwwvE00R zRxFk-k(?(9a9?xk{V`l{(+JZKa6CIc5MCQB4DW%iv~^5J?gA)gSS1w(DJsO^E< z-XCfrK@wa1Zbp1zP1#}~v75cC>G;`8SUv-DT|Swy@<&&9A(5}M3LPot39FztOg;KG zRJy@|hhf@i4rb+^u% zdC3@{vM*r29V?)7P%~hgH84x@@g-X`IFzvbo5)hBe^k&9{Ipfx+!XqA`#!SUtc3Op zi;rjiEX*6%x2qwxz6Za*GP2T?kQVt(%7;6piuq@&keH1T$MRKI1}gAn&*^Cz6<4@6 zk{$i~1#ZpWEA00v8Wfquunr%iFRZ=5Ii$5iE2u#6Wd1PII?Wf})U1d`LHoc!K@Xg0 zzKRdv5OJE74y^P2I4VN%JBZqYpe59#uBQThK}s|897a5GXwQDtch4o4RU|gNYCCA3 zqhxfEq825>CwAyc&mYCdu&8Yw^4;`AHA`EZwX)k`0#5|&jIPf6NT#f9Z71Mx=%!AY ze`s;uNg)^ect{j!FZS8lBtPE`eZf4tKW}a;*PMbkj2hTbU?#g($x2uYOmGmX3z5-y$1wGHX6c7s)^ej`$EnXyet|TK*D3> zQZs$+!?PPq+0#Or`NR3O+i1hBMhv%=A0RX1`b!XhNXh9htzUl~b7tup*(%TJHn8E4 z%QA^P3I4ql_-3y8we9Wo;cwLIbu?Fkvt%Y|yrsgpH38EVN#d(ucXH-fI}M-)PZb%@Z0Hb4je5ClSGPF{<-<+T(<~6gIIKq>HjpH`|{cP&ynxcXLeGy2hI* zIG|j_W4C$r{%@6699Pw%%%=IIE(&k!=Z&CY#UhMQ{|@s=6z5ID%F5}9=3tr)EI8rK z>#&x?%r)tH6sem%&E;QmyhWbH)E`aTP9(;Np#64r6`tJ#AHC?R8=|f>h?YKVO+EJd zGN>Osl8PQ4}ZEvZHLP+Zyt~WW78T z`0pRVI$`1-tvXX>wANT~t>kt;4pMXJr*HITYsbIwazA}#B5s-<4tQB1+%IE4-N=mN z#X{h@(s;|159#b}&Q$4d?V7&*V^Q zL6up1MNzXpQ+jf!yhEmUq5KS}7LP&M4nCHW3@+&VT@CVew5!D)^XYXVAnRX2tv3$_ z-qailn)g$np3QtC_(xR!E&4NXqD(KCpb7-_J;LvKV!WHaJ71cLzYh-)+Z43L0@++0 zIG4(AZ#Ibgv5p>V&dtB|vMhAqFr01TLx{trGBtR;`b&V*u|2;QGs&VJwZvg|@%zSm zgewBAS|ie#;HQPb%TrF-T?*Twu~om9q1sdvc?gA0cDZxVBHpb~N-E8zq(7bZ*9@i# z`RoWxr|?1;2`Zc_tf0~$tKAP%EbP<-*s9T;jjr(#xPAeB_s~JE>KF^>dE05~5|2dAd%L$riuOZ< zP?~$s>SQ25#nUbFEgty`Z>h%?Zlw1v{?Qx0yP@EMSkYB#JmsHuwiGj9t?nJZWU7YI zdyu#}bkhmk%6CJIIjzTIWYS~9Vf^*pA8ZxcvG2_hv$c&+8qVcG6ii=B$$iEGs?vh{ zkD8UZaX*Ds{WboYD~bD!CX*S=^KI8ZWCu|=)z!NBa8x~g{~*5IKxqlI_Iy15Y-T{q zyKRS03it*7#nIW=z$p>e2~P=((|rDTVi_lu7jN&rq@(F0MBD zpbSPk+a3qv10#+qG#&dSEsWB3TM`mn;YTdVhDYhifR`n3EQNr*qR1A7!I#=)^^(41 z2YPt%Nohu3-Ur1ge?mF^C5qJLi#irKJO9ideufT9E4WQ#G0deP3-I3j)eRy6Q!J*i z#adO2(4lsXI@BG;v;6(eU;OAPwG+x~gD6tITMBe3@aLGx?FO@dA*(FCMATlW);Pcv zEz2?(@jUjGiNIK(?DpS5@`Md^$51Rd-1n6>X!8``DtU=26|e5`jiX}Z+E~Ttt^^fTyHUn{zpK{R7I4KnM-ji82&vNCC zlUVz0_b>c}lt)66%hl`B7mh=u(D?@zx+_G$tu*_S(EB>OLK}%URwXq=gIy&rLpjrOX)AsrnKwtoPChz9ow4B;ML@F(ln(nkN$e{M|dYciDqF?O(!m z7=2K#vYr602OeaRGzW!SKjz;)vMX@mG+bRuqjseszp969+>;9UoOF}%H9Q=)@`wce zTd_g#rTT9pFQ}DVCrKgVE+%^?u#xd?dc~Ch_a^z{Vj=OOz2-1uxD_7!@}AgRHeEug zuJj%4AQRP+KHRTH)w&T@sVf~B0$^JB^BiMeOc|%dj0JB;$yZ7C@>s8%0og<}mEnh= z->bO6zh5Tf7v{7rE%$%}9sx|xubJw;^nKgqJs(u5FjAABg?1_Lej2;H5&Lr%l}(j}70O%KiU!oV8uedIm&P4*i8#2qu`{_mmbjNI@eGkL~j zd;VrsqaVhiuNqBjQTNGytE&xZ_3)U=rDN6c&@t+o*^<>ckf9D_IC1b4P3&{IIYph8Ezlu%{@|!u?)2j5cAfy;wPn>81zPjeHoqLhICo~OGM7P5 zOxSS{UDX=}YFh75w^eaN(D{hIqGCtrkMWX`FSaEnk8F>Q9y7@$1|RwQ_Rg^-9OvPx z1Nnr{!VIi3Kfn-D?Mh-C zUAv?dVnV&Ne&ZXJ%Yst) zA$kw(cl=L{-d9Mj1!Me@*E$Qc1jhEX{;C7ExX|jR**T{1SrprV-Aqx|b)KncPw=Bd zfTBb}8+=;vt?O|g$9E`nibHI=Q*ktS`~yMAlcQcq(~4V*6V6bN(iu4tH`yKrYZ?@2 z^0J=4v=Vq=+u&Jd`D%psmXaDz6#R*-N0V}S+c^17G!Cuq%fR$*N5h9eHqk$&DFH@{ zY``i5udYNgPjOLw4c3SGV5ixmt_&?Rvu7jMIDk8DmkX%x98HiXWi#Ab9)5ptkl!i< zWeKYyuX8;T#JR^ZPqYmyjhjGx@h|>^;f|bX2z>jOQ>3Vh@1Nw{eY{QCN7#G5K|s0< z>#4_(s_c`UO0TG{PAc%8 zYj<8ZIQy^pk=tEapDLWF6RLVR+`ygJ z>mtb^Daoqhn%jc6hvMJMr7rOuehAbL_mR6%OOg+Ir|{IQ%RQ6WuNvFURT-F`Fxkan zZC2x;DjnQ{NZH`^d`;^fx1TAQ&rw|ZeBZ0spiIw(w0M~1I|ctlJNGyHsgi+U>_aX8 zf9F%DmTHuLYV#Um+gOD|-#rY9d=$T;0<2s!GIe56Yi{&&r(g7Mx*Ss1Mpw7XSV2-Y z@*u@1_6HJLx#PM0X#srQ@pTo+=ui=j!9WR|1!2R?;XGBxuKxbK*Oi_Sr+*u*8qiqX z(l2>#wKLXdNwb8$kwK77yBsnoXkFIRyl^v&6C?WDHZRJ9JZDtLk>a{F{LSX}rn#LR z@0jY0BWcGPtdDbOq7du`RJP6H7}Um(3OB!(%+<(ofdEmD{1+%!_)Kdg+G|eLiD!GB z3r-E=kQDyd+i%fV-ZMr0MtYvEh~2PLY!|);Nh!b_VQzDb_p9ggEaA?!3@h0w;qj&g zICFceFWzYukuV5OmEci}y3oT=V^&Kl&wyrhHh;eQ8$TAN6+J5M)_ToWfJp^34 zd`Zi&s~Z^?3e`7oo$$x_JbiTjXoV0w^7X2YDmZ)N>=2c3HSeY@gV9*J;ii!cfv!it zU>{msds0&vb#)pwoZ#p<(DI`3DH@>OgBkmsttwr|XhqyG95QDvVCy;1rq`o^KMv>V zZh_pnF}vy!xA~quFPd5O-U+Qe2EkgRQAvqOyq`4Y^)Ai_xw!79;?3(FFo6K8 zoq(1M=A6c+KPk+|%x{`ZE4Hc3VvWU`ejc!c=kup*49wDzT*Oym3Y%Jk75|F8(95(o z-c{a8dp`haWlFUj)qR7cJZO_jV)o|qHV$Fh0VkH~shQhPCgQRoXV2noeME09?l)Eb zI=ej#!tTQnE;URWXHm~6oAEUg=-vWX=jAtM%Sk7WXy=Z!rJ;4bW>b0Z+_yxH%F=<8m%ccr^|_g~cZmBg-r(HzJQ0TX(Jv)kKi=o7m@-lz4;nz ztTz!akowQFAIpiJoEa*%bB@{Lcr@Qi}kf0XhI60P8C*oU`Ty1`(f~7Fp!8cN zs(d6GNWDP+@BXb)(U_P7OkYFkx_#-1s0% zi{7c=mPN)5T33EH;K)2>vxy>-bD`m~;IH?tRkL)so8`yJ2Y u(oR+NV%^$;V!ERC6o&GwPsubR^KVgWi2vFX5{O|V#VX3GJ};6n{_tOAR;!Kx literal 0 HcmV?d00001 diff --git a/packages/components/nodes/outputparsers/customlist/CustomListOutputParser.ts b/packages/components/nodes/outputparsers/customlist/CustomListOutputParser.ts new file mode 100644 index 00000000000..db05117ec2d --- /dev/null +++ b/packages/components/nodes/outputparsers/customlist/CustomListOutputParser.ts @@ -0,0 +1,55 @@ +import { getBaseClasses, ICommonObject, INode, INodeData, INodeParams } from '../../../src' +import { BaseOutputParser } from 'langchain/schema/output_parser' +import { CustomListOutputParser as LangchainCustomListOutputParser } from 'langchain/output_parsers' + +class CustomListOutputParser implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + + constructor() { + this.label = 'Custom List Output Parser' + this.name = 'customListOutputParser' + this.version = 1.0 + this.type = 'CustomListOutputParser' + this.description = 'Parse the output of an LLM call as a list of values.' + this.icon = 'list.png' + this.category = 'Output Parser' + this.baseClasses = [this.type, ...getBaseClasses(BaseOutputParser)] + this.inputs = [ + { + label: 'Length', + name: 'length', + type: 'number', + default: 5, + step: 1, + description: 'Number of values to return' + }, + { + label: 'Separator', + name: 'separator', + type: 'string', + description: 'Separator between values', + default: ',' + } + ] + } + + // eslint-disable-next-line unused-imports/no-unused-vars + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const separator = nodeData.inputs?.separator as string + const lengthStr = nodeData.inputs?.length as string + let length = 5 + if (lengthStr) length = parseInt(lengthStr, 10) + return new LangchainCustomListOutputParser({ length: length, separator: separator }) + } +} + +module.exports = { nodeClass: CustomListOutputParser } diff --git a/packages/components/nodes/outputparsers/customlist/list.png b/packages/components/nodes/outputparsers/customlist/list.png new file mode 100644 index 0000000000000000000000000000000000000000..acb4e5d68f200207a97e10ee63125eb4e040fcec GIT binary patch literal 5002 zcmd5<2{_c-`~QwX$Tk=|b*-5x5|dmh$v)YQu`66;%_w`J8`(7^WhzOw8AjF;6AD?9 zt+9+HvPP0U!u+R3|J(Ds&vWnd|3CNspZ}R>&ikG3`=0lG&pDs-dCyS$siOe5fsVco z00IF3i1q=fgTOU_Z4V2C2g<|4!_ULb&Br4I<$*xBAzVB>T!K(pA!(=}zo@i=lBhU* zpP-Ki5Tzx^00w=_^+(&o!obAL2>zzk;s!uq5CaSAUPcB65Ysmc5CfQz3Bt?+ z6;L!!?b^fti?XSOe@YoIpAzDf8RjCkr1YF$R<}gt5lu-cX_uSXG%tKVd#6YF_7?I} z%-y#XfE`TB2LgrwCxJT=0HBDfUIXvU1mzr=$?Av zr^3Z4(hY_R&KYZY3vo43i7l^BK>3|f0$736v|mlw7O&pcHT~^%iViW1(_*R9ap#Cq z4sZX2A)EEDY|Yb2s@Zn#6V48=WGjfsPNJZxUh^#8*~zX+FF&?3!)HeycQp?`a5JB9 z1=jrbi>(rtcUwXNG(g{w7lv%8Idl(d>-C6I{Arme*ec-ze#g~UsLrZ}ck1SF{E4QC zp0kGqBEpaZMkCmR)#mLy&}gj)|P46p`XO}6aW zGW)UsF8@9VbS;CA&|92K*tk>U#vlDtt`Q|cvh8xA{8wb3ccxkMB^zmb`CzWhkj?9_ zhIOZxxV;aV&pf5#LIvtI2s}+2b|0irp(B(2Lz5&_TdVWbq3sjbiRvsad#<;mo~YuB z{DW<#h_Huw6;Ne4_s0%J%cfmz661QCR`=I0lDZz^w|(K}EBs1eu}e$>t=eM`M4%M~ zsaUrtfeP2fw1P{Yy|YIw+1g-eeb|Po>Te;NDl@MfJHJc>BQRd?p6*5NnjEQ+f6h@J z+FF5~Ah(oV8KaBQL-Lk6ef5>eL9K6}eL32-0PEFFMa#5*ji!r;@+zBjM`nupINWS* zbWpd3U@dNFhg10^L>CO*lOFbABJS%6Mabgcy~US`T+2nF6!1r9O zIi4eU+s&)iMS6ry-PU*!IbOrr`F;PGdECv4ZJuWoPq{AMwGmR5Ll-$3qL-C|-Y_(d z(m{29&_FuO453_=~!60>4!hGIs8 z_KwL55ow5T74I4{vBs+r89{Dyb$D`nA{7JlxL{KZo56-DI{GU6IgaWJNex9P4@kNE zp=#GPj!EAa=x#14;BEqs7GBvaj|cjUqX zBPn#y|IP9GHq-OUF}+oDRDl26k&&J%X57e8aO!AS9fVVXt3EBXE|P_E7fvz8wi#iy znU7=(myB20hD+X^w1)*NWqEyF^x_SEt<_M)zG%XUPmT&jC3`+vGS`XUIkOZLH?@&7mpJ^gV`^!6{imGa>de!~~09Lzo8V=Mhnz!sShqtV&66&M|tRaNE`R2R> z=ee-tFy9m@44xpI=x>U;oIZ0rxH8#Z)-OV5p))6;ZpHC%_J?J?8s%fZ%5!AtR=9+v zC<>&-B|53Cpp4JSb!#Pf+QfWovCT0t>2?u12vHHU@hZPuT6KY^Z{cz*caq<3O!eYM za5cjX#F9LXCGQ!A1MNjBB$eRmXz%l+57q5M20Ml%aLbbiEZIlPM^=WXJ_%LiuIDbO z_`x@hu5ihWL`Wo1dO~{waZg=$Jrz$3b{@xt0C7s5h6{!xK4Ul=-w>bWK%UxE-@gpO z84fNyFymyEjjqB^25#VCBef-+)tc8l_6I~UJpB{avHbseAcDjme1|`~RA-Ju#bt!! z>-O zEVc!P7-lQdrs4K<YO~`ls1VK8muN?4Ax*CRuRwtGbPCr;sh5 z;iddhrlx*XQ2x1c!D-IsIn!{ph}rs9H~M4rg^x)U0?SW13q^BxA9pqXd!t4Q-{%4T zMs-_$8@EcUuGS&fcL7I2x%|ryKwtieSz{jmiGS11>6r}EvHm8zK*LS7+;^aZmM35&Em9-cW}7LYuFw9IR_zuI@%uBvgp07vO)n);Z2?L>kb+Ho@#Dvq z90J(f=)H?Ich4k?doXjHBo_s>yq{uBJ680=NN@=!@Mp5d1LcmD0;X+>8>4jX!}7!Y-Y_Wh^Nw|YP;6&S^phL7f@{-|t}c2u;55z1;L<{mumfh9{1 zmc641VU7)kszQH-a&;}`RUcRzE2qAL$sib!!IoC`Tb0@Q@tB(-(cAVlEZIqGeYWY<`&LB*E1wOK z`l&zxe32*F9`uMkI$#p>q! zk_YSJjxBg!stPTk0;aD*J&W3glQ&apAqQ4DHmigFKAGu-T~qPv#%xT6d$oH4WgiD$DG=4Uw@JRY&uH` zk8>28(2lOzDXVlW`$p{S^ekyMgtl zSEOF(Fm$xI?kO`W)2V6wfiQbx-epLs2TlIvpLa%EuBacbdu>JKD5x_R{VHKPRr3DX zy3cIW`;#zB;g4!KTm|*|`YcuPNgl7Cz%Y3d61;nu z81+3q`|eG%l-1{;0;?8F3C5Hii_MH0ikck`F=FTwOUPjl8@LB(4wMH}9rdhn?V?N! z5SQGST`zJSb<^N}?kz?%%vcbjYcq}y+yl4Y>10&E>3XKRRyWbLGRZVk+6d6zsqe!V z>DrcTnkj7rpt { + const structureType = nodeData.inputs?.structureType as string + const structure = nodeData.inputs?.structure as string + let parsedStructure: any | undefined = undefined + if (structure) { + try { + parsedStructure = JSON.parse(structure) + } catch (exception) { + throw new Error('Invalid JSON in StructuredOutputParser: ' + exception) + } + } + if (structureType === 'fromZodSchema') { + return LangchainStructuredOutputParser.fromZodSchema(parsedStructure) + } else { + return LangchainStructuredOutputParser.fromNamesAndDescriptions(parsedStructure) + } + } +} + +module.exports = { nodeClass: StructuredOutputParser } diff --git a/packages/components/nodes/outputparsers/structured/structure.png b/packages/components/nodes/outputparsers/structured/structure.png new file mode 100644 index 0000000000000000000000000000000000000000..c56b2dd7786b2002fe6374d1f00f56fd70df310c GIT binary patch literal 3849 zcmb7HX*3jW`yMkG>j;hIXKifRmF#<#22l-Jl3hZ!ED2+ol)=c7qEHjY(mta?4Py$8 zWM4A&L6Hnb#?E;4|NTF{U*7AS>$%T!J@>gkJm=ikeeTTjj%WCJ{^0=t0Q|OaE5u=J z|4S~;!}FEf>nDeSJ^Hk*GuI)ITmcz}ckW2Ido%#REBu#ifZ|dy008pB*2=>9X6e#I z9NzOHl(HJZ!>zz^nkW4Tmv!;27>lfv8651^1U5)EG*(iIw?fM$#wyS`=C}yEAl>b_ zIImT4`H3TEj-;WEJipI=?5n-((hj9VE|-!tKM}n#dKLR;xAWz+%p_@%PIO&1*`xzu z{{q1+hft!0a^h9-ym%cg2vyRJ`z4?d$crMP&_X|tBZ_UwL=+Z~3w$iOD#p;J?UM&< zg+((Lj??UD?th?d=Lwt*ocd4ZlAhaP8X;S%rq(B%$GIDYp;?3NXKD4V>e-*JtK!dz ziz*4t9Hny-J7<37#*#oMtxax*6W16C<+F zCPG%nX=a2I*r}S6m6!kt{!YcDV*zc#)E0Pt|HYqADvI8C48Yb=aMu;teR7ZRFi5U~ zdE43MVlru5xt(-1OrOiHd+uR8+aR}8o(t4iN*Kf>&s$-c;&e#pmi@A-$%1mX^*v3k?{XBEIKGGU3;fHRlL;9IcV;_KufJ90$d1Sex&7y1tb zSY5t(R@oiC6*c0|*p)ZZ)e;pXlX%qSLbW#767_#Z+xTeguku$TaX&W{kXp&3C$Ra{ zH=bn%`zigiJ<&A=k|;rnQ!{F{sgS zbO^)rkwIc>6Rl-f19WSi5$T4zOzS=w0e@^b>;XZU;eh-ea=SH_jDdN-fcv8T)?RdI zWFPo~*KkkXDxQ3qPbT6)(SD*|?C8fqxnZeRb~?bZ%>9r(fw{|kNRbj243X%xs+!Z% z=c+7@;|7(_4mt|&oGz_=vaEp!RymT-_rJ2jNlEtoNuGz9VSn@>VwhK?%WP_&bmcZL zC7{#82VvRk zl-FPZa?p%i5XxIH0#kXF@b*`n>f)`|8uQPg;Jt_X)_v;V{m_bGo?qHBdE{FY@e=gf zEPo$k?nV=qLKcsF^L4VOt&>>VEW&54#&s-8B@bBzsxKgEaelcR%Bz%V&e2bNM4h|& zjJMoF4H#%m{3GIoV$NApwX+8Awb*_zQXtzf?x8;SU)W3%5J8`%p|DYj-*OzQx-j^){J>J`zxd_~FY z76xwCYdItOuk?lcijwq*+Fs8&TfyD+w7Y{-52C2qif}((}`=cdz9~@$PTO#p4?P(R4vEDcfX`N>7J}VO-6V z!&V(j+K>-A*GZRS2)wwoq@^vukhf8j-t@4c93q@z@I%)G5pLP~6oAg|qB-HfXZ(Sh33YbK}yBVc+%N<=Z=na@$@f z)N!o`)iWhxp&5L2ad*Z%=Xz~4RE~(#-$TPT1w&J&5YNm5@bxpzFWSAxEL4~p11_)m zH&=miVP4my>c!MTuZ`-1RY_bJL8w`0M@j!DQ?VL(Non0DBt%ViFXL7b@SMSqT8Pnx z0}tFJuUPT`w^gv7#u$tGm43^BYXN*=z@Ix3(X*1SUTY}4+xArzyt1(Q>Rtliy}}%5 zg`2}AZP^$Y6YFI#{HDMVRk+~uElAW~Z%r5ACxR@T!Nez{2m} z-v$%6qZMu@4TqKYk!5UC4iNGpzVpR!Stz_NliKtC85sG*4J-+*&>n?zoNP3dY}VE4 z&Kcle!T}e$&eFX41z6KFo8%gJQm>s#-eebG8I~l-2aWFAzP)_-`)7fyvparbl565S z$8hZDQ9twwL!{{OCfwLUy~jMbhE0Do4X2i~#yutPaq_U~>9)n%gGK%H311aU4x#W< z4DQh3Jn9{S^p>gChx@9+(2R;;GRKzVMHx3S*O|8;9{+d>T-B4>QZc2}?2DGQN*w;@7?(SVrt`@i9b0%ct`P_sYB<2UbDau0`sVY5JA$(bF9)uw0=$Oy5n7 zb3WEFW<#TI^x?vscX+JI`$%puac9~8&UXg|Outij-i_2qkI=rMKx)Cp18Ul51?tC5 zBB8}W!h!=fkB${5gc3@H36{vmKU~s4>qf0baJ*`z$7@C%kgXQ{0LJ(Mh0I^VNtpQ3^3hMpD-pGC3P@b(7z!SIZ|0W;a$hv*)O=%kU%qJT3UBP$)+ffWk9;mKJ&CAEcttl}uS1u!yy%PPA& zsN(Hvq7&(S)H0#z0Ta{|#AGvS*o5;+4kS=zOB1QgPj@`A}9j0lIT*6xbA1~pBq;aA%r6b z5r@-h2o>KIeW)o;>$TM1U2WU*oGiEoTKlv0vB*O>&IKuynANBe3)5Mz&Z;wlBp!Ev zt|-Dq<@+vB?O0wVVimGE1qrJ$m-M_XC9k|~>^^risk zOl_Pv!h${=Zpr_1Ph$hbGT-^+*5-yXvHug3Vd>>C;at(bLD5Zev))0uSL*BGoQMx+ z^9JI(4QwI8sROC1C?&X=xf3NM)B%m&JakN(YGhRKXsDNf9A3O85x+_$WKs*pyPloF zHkC3)-U@);$kNewTL>dPWxLXED7phTRq#@RG)3NhiPzE z@bqTvH3f!pg14G3x4z8O<3Pu*DGu03W09=c?XZ{idYZ^&UCzXAraImI&itZA3u!W| z!BA`pWvJ!iOEKx8xVR)pS@fxsW+qO+_BE6y$?9@0fP#rc>wvT{d|EIHsv(Ii70L zlu~zJP3{5;?j?s`pPt2^UTCSR)U-7Ejmh{wz|{`G7g%)km)nEOhr2Am*4okP^=Y45 F{{fD Date: Wed, 25 Oct 2023 19:52:50 +0530 Subject: [PATCH 02/13] Addition of OutputParser for FewShotPromptTemplate --- .../components/nodes/chains/LLMChain/LLMChain.ts | 14 +++++--------- .../structured/StructuredOutputParser.ts | 10 +++++----- 2 files changed, 10 insertions(+), 14 deletions(-) diff --git a/packages/components/nodes/chains/LLMChain/LLMChain.ts b/packages/components/nodes/chains/LLMChain/LLMChain.ts index 0544365afab..5eca8823ac1 100644 --- a/packages/components/nodes/chains/LLMChain/LLMChain.ts +++ b/packages/components/nodes/chains/LLMChain/LLMChain.ts @@ -4,7 +4,7 @@ import { LLMChain } from 'langchain/chains' import { BaseLanguageModel } from 'langchain/base_language' import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler' import { BaseOutputParser } from 'langchain/schema/output_parser' -import { ChatPromptTemplate, PromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts' +import { ChatPromptTemplate, FewShotPromptTemplate, PromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts' class LLMChain_Chains implements INode { label: string @@ -99,29 +99,25 @@ class LLMChain_Chains implements INode { const outputParser = nodeData.inputs?.outputParser as BaseOutputParser if (outputParser && chain.prompt) { const formatInstructions = outputParser.getFormatInstructions() - chain.prompt.inputVariables.push('format_instructions') if (chain.prompt instanceof PromptTemplate) { let pt = chain.prompt pt.template = pt.template + '\n{format_instructions}' chain.prompt.partialVariables = { format_instructions: formatInstructions } - // eslint-disable-next-line no-console - console.log('prompt :: ', chain.prompt) } else if (chain.prompt instanceof ChatPromptTemplate) { let pt = chain.prompt pt.promptMessages.forEach((msg) => { if (msg instanceof SystemMessagePromptTemplate) { ;(msg.prompt as any).partialVariables = { format_instructions: outputParser.getFormatInstructions() } ;(msg.prompt as any).template = ((msg.prompt as any).template + '\n{format_instructions}') as string - // eslint-disable-next-line no-console - console.log(msg) } }) - //pt.template = pt.template + '\n{format_instructions}' + } else if (chain.prompt instanceof FewShotPromptTemplate) { + chain.prompt.examplePrompt.partialVariables = { format_instructions: formatInstructions } + chain.prompt.examplePrompt.template = chain.prompt.examplePrompt.template + '\n{format_instructions}' } + chain.prompt.inputVariables.push('format_instructions') promptValues = { ...promptValues, format_instructions: outputParser.getFormatInstructions() } - // eslint-disable-next-line no-console - console.log('promptValues :: ', promptValues) } const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData, outputParser) // eslint-disable-next-line no-console diff --git a/packages/components/nodes/outputparsers/structured/StructuredOutputParser.ts b/packages/components/nodes/outputparsers/structured/StructuredOutputParser.ts index e935e5fba6d..ce10239b2a9 100644 --- a/packages/components/nodes/outputparsers/structured/StructuredOutputParser.ts +++ b/packages/components/nodes/outputparsers/structured/StructuredOutputParser.ts @@ -62,15 +62,15 @@ class StructuredOutputParser implements INode { if (structure) { try { parsedStructure = JSON.parse(structure) + if (structureType === 'fromZodSchema') { + return LangchainStructuredOutputParser.fromZodSchema(parsedStructure) + } else { + return LangchainStructuredOutputParser.fromNamesAndDescriptions(parsedStructure) + } } catch (exception) { throw new Error('Invalid JSON in StructuredOutputParser: ' + exception) } } - if (structureType === 'fromZodSchema') { - return LangchainStructuredOutputParser.fromZodSchema(parsedStructure) - } else { - return LangchainStructuredOutputParser.fromNamesAndDescriptions(parsedStructure) - } } } From 3687000298c0efd0f59ce25651387889b930a508 Mon Sep 17 00:00:00 2001 From: vinodkiran Date: Thu, 26 Oct 2023 10:12:51 +0530 Subject: [PATCH 03/13] Code cleanup and minor fixes. --- .../nodes/chains/LLMChain/LLMChain.ts | 57 ++++--------------- .../outputparsers/OutputParserHelpers.ts | 53 +++++++++++++++++ .../csvlist/CSVListOutputParser.ts | 3 +- .../customlist/CustomListOutputParser.ts | 3 +- .../structured/StructuredOutputParser.ts | 17 ++---- 5 files changed, 74 insertions(+), 59 deletions(-) create mode 100644 packages/components/nodes/outputparsers/OutputParserHelpers.ts diff --git a/packages/components/nodes/chains/LLMChain/LLMChain.ts b/packages/components/nodes/chains/LLMChain/LLMChain.ts index 5eca8823ac1..6d041e50479 100644 --- a/packages/components/nodes/chains/LLMChain/LLMChain.ts +++ b/packages/components/nodes/chains/LLMChain/LLMChain.ts @@ -4,7 +4,7 @@ import { LLMChain } from 'langchain/chains' import { BaseLanguageModel } from 'langchain/base_language' import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler' import { BaseOutputParser } from 'langchain/schema/output_parser' -import { ChatPromptTemplate, FewShotPromptTemplate, PromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts' +import { injectOutputParser, applyOutputParser } from '../../outputparsers/OutputParserHelpers' class LLMChain_Chains implements INode { label: string @@ -21,7 +21,7 @@ class LLMChain_Chains implements INode { constructor() { this.label = 'LLM Chain' this.name = 'llmChain' - this.version = 2.0 + this.version = 3.0 this.type = 'LLMChain' this.icon = 'chain.svg' this.category = 'Chains' @@ -95,30 +95,9 @@ class LLMChain_Chains implements INode { async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { const inputVariables = nodeData.instance.prompt.inputVariables as string[] // ["product"] const chain = nodeData.instance as LLMChain - let promptValues = nodeData.inputs?.prompt.promptValues as ICommonObject + let promptValues: ICommonObject | undefined = nodeData.inputs?.prompt.promptValues as ICommonObject const outputParser = nodeData.inputs?.outputParser as BaseOutputParser - if (outputParser && chain.prompt) { - const formatInstructions = outputParser.getFormatInstructions() - if (chain.prompt instanceof PromptTemplate) { - let pt = chain.prompt - pt.template = pt.template + '\n{format_instructions}' - chain.prompt.partialVariables = { format_instructions: formatInstructions } - } else if (chain.prompt instanceof ChatPromptTemplate) { - let pt = chain.prompt - pt.promptMessages.forEach((msg) => { - if (msg instanceof SystemMessagePromptTemplate) { - ;(msg.prompt as any).partialVariables = { format_instructions: outputParser.getFormatInstructions() } - ;(msg.prompt as any).template = ((msg.prompt as any).template + '\n{format_instructions}') as string - } - }) - } else if (chain.prompt instanceof FewShotPromptTemplate) { - chain.prompt.examplePrompt.partialVariables = { format_instructions: formatInstructions } - chain.prompt.examplePrompt.template = chain.prompt.examplePrompt.template + '\n{format_instructions}' - } - - chain.prompt.inputVariables.push('format_instructions') - promptValues = { ...promptValues, format_instructions: outputParser.getFormatInstructions() } - } + promptValues = injectOutputParser(outputParser, chain, promptValues) const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData, outputParser) // eslint-disable-next-line no-console console.log('\x1b[93m\x1b[1m\n*****FINAL RESULT*****\n\x1b[0m\x1b[0m') @@ -132,7 +111,7 @@ const runPrediction = async ( inputVariables: string[], chain: LLMChain, input: string, - promptValuesRaw: ICommonObject, + promptValuesRaw: ICommonObject | undefined, options: ICommonObject, nodeData: INodeData, outputParser: BaseOutputParser | undefined = undefined @@ -167,10 +146,10 @@ const runPrediction = async ( if (isStreaming) { const handler = new CustomChainHandler(socketIO, socketIOClientId) const res = await chain.call(options, [loggerHandler, handler, ...callbacks]) - return runOutputParser(res?.text, outputParser) + return applyOutputParser(res?.text, outputParser) } else { const res = await chain.call(options, [loggerHandler, ...callbacks]) - return runOutputParser(res?.text, outputParser) + return applyOutputParser(res?.text, outputParser) } } else if (seen.length === 1) { // If one inputVariable is not specify, use input (user's question) as value @@ -183,10 +162,10 @@ const runPrediction = async ( if (isStreaming) { const handler = new CustomChainHandler(socketIO, socketIOClientId) const res = await chain.call(options, [loggerHandler, handler, ...callbacks]) - return runOutputParser(res?.text, outputParser) + return applyOutputParser(res?.text, outputParser) } else { const res = await chain.call(options, [loggerHandler, ...callbacks]) - return runOutputParser(res?.text, outputParser) + return applyOutputParser(res?.text, outputParser) } } else { throw new Error(`Please provide Prompt Values for: ${seen.join(', ')}`) @@ -195,26 +174,12 @@ const runPrediction = async ( if (isStreaming) { const handler = new CustomChainHandler(socketIO, socketIOClientId) const res = await chain.run(input, [loggerHandler, handler, ...callbacks]) - return runOutputParser(res, outputParser) + return applyOutputParser(res, outputParser) } else { const res = await chain.run(input, [loggerHandler, ...callbacks]) - return runOutputParser(res, outputParser) - } - } -} - -const runOutputParser = async (response: string, outputParser: BaseOutputParser | undefined): Promise => { - if (outputParser) { - const parsedResponse = await outputParser.parse(response) - // eslint-disable-next-line no-console - console.log('**** parsedResponse ****', parsedResponse) - if (typeof parsedResponse === 'object') { - return JSON.stringify(parsedResponse) - } else { - return parsedResponse as string + return applyOutputParser(res, outputParser) } } - return response } module.exports = { nodeClass: LLMChain_Chains } diff --git a/packages/components/nodes/outputparsers/OutputParserHelpers.ts b/packages/components/nodes/outputparsers/OutputParserHelpers.ts new file mode 100644 index 00000000000..87a59170d44 --- /dev/null +++ b/packages/components/nodes/outputparsers/OutputParserHelpers.ts @@ -0,0 +1,53 @@ +import { BaseOutputParser } from 'langchain/schema/output_parser' +import { LLMChain } from 'langchain/chains' +import { BaseLanguageModel } from 'langchain/base_language' +import { ICommonObject } from '../../src' +import { ChatPromptTemplate, FewShotPromptTemplate, PromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts' + +export const CATEGORY = 'Output Parser (Experimental)' + +export const applyOutputParser = async (response: string, outputParser: BaseOutputParser | undefined): Promise => { + if (outputParser) { + const parsedResponse = await outputParser.parse(response) + // eslint-disable-next-line no-console + console.log('**** parsedResponse ****', parsedResponse) + if (typeof parsedResponse === 'object') { + return JSON.stringify(parsedResponse) + } else { + return parsedResponse as string + } + } + return response +} + +export const injectOutputParser = ( + outputParser: BaseOutputParser, + chain: LLMChain, + promptValues: ICommonObject | undefined = undefined +) => { + if (outputParser && chain.prompt) { + const formatInstructions = outputParser.getFormatInstructions() + if (chain.prompt instanceof PromptTemplate) { + let pt = chain.prompt + pt.template = pt.template + '\n{format_instructions}' + chain.prompt.partialVariables = { format_instructions: formatInstructions } + } else if (chain.prompt instanceof ChatPromptTemplate) { + let pt = chain.prompt + pt.promptMessages.forEach((msg) => { + if (msg instanceof SystemMessagePromptTemplate) { + ;(msg.prompt as any).partialVariables = { format_instructions: outputParser.getFormatInstructions() } + ;(msg.prompt as any).template = ((msg.prompt as any).template + '\n{format_instructions}') as string + } + }) + } else if (chain.prompt instanceof FewShotPromptTemplate) { + chain.prompt.examplePrompt.partialVariables = { format_instructions: formatInstructions } + chain.prompt.examplePrompt.template = chain.prompt.examplePrompt.template + '\n{format_instructions}' + } + + chain.prompt.inputVariables.push('format_instructions') + if (promptValues) { + promptValues = { ...promptValues, format_instructions: outputParser.getFormatInstructions() } + } + } + return promptValues +} diff --git a/packages/components/nodes/outputparsers/csvlist/CSVListOutputParser.ts b/packages/components/nodes/outputparsers/csvlist/CSVListOutputParser.ts index 04911fb89be..4bc87851a66 100644 --- a/packages/components/nodes/outputparsers/csvlist/CSVListOutputParser.ts +++ b/packages/components/nodes/outputparsers/csvlist/CSVListOutputParser.ts @@ -1,6 +1,7 @@ import { getBaseClasses, ICommonObject, INode, INodeData, INodeParams } from '../../../src' import { BaseOutputParser } from 'langchain/schema/output_parser' import { CommaSeparatedListOutputParser } from 'langchain/output_parsers' +import { CATEGORY } from '../OutputParserHelpers' class CSVListOutputParser implements INode { label: string @@ -21,7 +22,7 @@ class CSVListOutputParser implements INode { this.type = 'CSVListOutputParser' this.description = 'Parse the output of an LLM call as a comma-separated list of values' this.icon = 'csv.png' - this.category = 'Output Parser' + this.category = CATEGORY this.baseClasses = [this.type, ...getBaseClasses(BaseOutputParser)] this.inputs = [] } diff --git a/packages/components/nodes/outputparsers/customlist/CustomListOutputParser.ts b/packages/components/nodes/outputparsers/customlist/CustomListOutputParser.ts index db05117ec2d..1e9617c3fdb 100644 --- a/packages/components/nodes/outputparsers/customlist/CustomListOutputParser.ts +++ b/packages/components/nodes/outputparsers/customlist/CustomListOutputParser.ts @@ -1,6 +1,7 @@ import { getBaseClasses, ICommonObject, INode, INodeData, INodeParams } from '../../../src' import { BaseOutputParser } from 'langchain/schema/output_parser' import { CustomListOutputParser as LangchainCustomListOutputParser } from 'langchain/output_parsers' +import { CATEGORY } from '../OutputParserHelpers' class CustomListOutputParser implements INode { label: string @@ -21,7 +22,7 @@ class CustomListOutputParser implements INode { this.type = 'CustomListOutputParser' this.description = 'Parse the output of an LLM call as a list of values.' this.icon = 'list.png' - this.category = 'Output Parser' + this.category = CATEGORY this.baseClasses = [this.type, ...getBaseClasses(BaseOutputParser)] this.inputs = [ { diff --git a/packages/components/nodes/outputparsers/structured/StructuredOutputParser.ts b/packages/components/nodes/outputparsers/structured/StructuredOutputParser.ts index ce10239b2a9..ef04de7de3d 100644 --- a/packages/components/nodes/outputparsers/structured/StructuredOutputParser.ts +++ b/packages/components/nodes/outputparsers/structured/StructuredOutputParser.ts @@ -1,6 +1,7 @@ import { getBaseClasses, ICommonObject, INode, INodeData, INodeParams } from '../../../src' import { BaseOutputParser } from 'langchain/schema/output_parser' import { StructuredOutputParser as LangchainStructuredOutputParser } from 'langchain/output_parsers' +import { CATEGORY } from '../OutputParserHelpers' class StructuredOutputParser implements INode { label: string @@ -21,8 +22,9 @@ class StructuredOutputParser implements INode { this.type = 'StructuredOutputParser' this.description = 'Parse the output of an LLM call into a given (JSON) structure.' this.icon = 'structure.png' - this.category = 'Output Parser' + this.category = CATEGORY this.baseClasses = [this.type, ...getBaseClasses(BaseOutputParser)] + //TODO: To extend the structureType to ZodSchema this.inputs = [ { label: 'Structure Type', @@ -32,10 +34,6 @@ class StructuredOutputParser implements INode { { label: 'Names And Descriptions', name: 'fromNamesAndDescriptions' - }, - { - label: 'Zod Schema', - name: 'fromZodSchema' } ], default: 'fromNamesAndDescriptions' @@ -59,18 +57,15 @@ class StructuredOutputParser implements INode { const structureType = nodeData.inputs?.structureType as string const structure = nodeData.inputs?.structure as string let parsedStructure: any | undefined = undefined - if (structure) { + if (structure && structureType === 'fromNamesAndDescriptions') { try { parsedStructure = JSON.parse(structure) - if (structureType === 'fromZodSchema') { - return LangchainStructuredOutputParser.fromZodSchema(parsedStructure) - } else { - return LangchainStructuredOutputParser.fromNamesAndDescriptions(parsedStructure) - } + return LangchainStructuredOutputParser.fromNamesAndDescriptions(parsedStructure) } catch (exception) { throw new Error('Invalid JSON in StructuredOutputParser: ' + exception) } } + throw new Error('Error creating OutputParser.') } } From e92d6b928ddc64ae0737ebc8bb021d64bd085017 Mon Sep 17 00:00:00 2001 From: vinodkiran Date: Sat, 28 Oct 2023 09:09:29 +0530 Subject: [PATCH 04/13] Removal of the custom output parsing. --- .../nodes/chains/LLMChain/LLMChain.ts | 42 +++++++++++-------- 1 file changed, 24 insertions(+), 18 deletions(-) diff --git a/packages/components/nodes/chains/LLMChain/LLMChain.ts b/packages/components/nodes/chains/LLMChain/LLMChain.ts index 6d041e50479..d1860f1b49c 100644 --- a/packages/components/nodes/chains/LLMChain/LLMChain.ts +++ b/packages/components/nodes/chains/LLMChain/LLMChain.ts @@ -4,7 +4,8 @@ import { LLMChain } from 'langchain/chains' import { BaseLanguageModel } from 'langchain/base_language' import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler' import { BaseOutputParser } from 'langchain/schema/output_parser' -import { injectOutputParser, applyOutputParser } from '../../outputparsers/OutputParserHelpers' +import { injectOutputParser } from '../../outputparsers/OutputParserHelpers' +import { BaseLLMOutputParser } from 'langchain/schema/output_parser' class LLMChain_Chains implements INode { label: string @@ -28,16 +29,16 @@ class LLMChain_Chains implements INode { this.description = 'Chain to run queries against LLMs' this.baseClasses = [this.type, ...getBaseClasses(LLMChain)] this.inputs = [ - { - label: 'Language Model', - name: 'model', - type: 'BaseLanguageModel' - }, { label: 'Prompt', name: 'prompt', type: 'BasePromptTemplate' }, + { + label: 'Language Model', + name: 'model', + type: 'BaseLanguageModel' + }, { label: 'Output Parser', name: 'outputParser', @@ -71,12 +72,18 @@ class LLMChain_Chains implements INode { const prompt = nodeData.inputs?.prompt const output = nodeData.outputs?.output as string const promptValues = prompt.promptValues as ICommonObject + const llmOutputParser = nodeData.inputs?.outputParser as BaseLLMOutputParser if (output === this.name) { - const chain = new LLMChain({ llm: model, prompt, verbose: process.env.DEBUG === 'true' ? true : false }) + const chain = new LLMChain({ llm: model, outputParser: llmOutputParser, prompt, verbose: process.env.DEBUG === 'true' }) return chain } else if (output === 'outputPrediction') { - const chain = new LLMChain({ llm: model, prompt, verbose: process.env.DEBUG === 'true' ? true : false }) + const chain = new LLMChain({ + llm: model, + outputParser: llmOutputParser, + prompt, + verbose: process.env.DEBUG === 'true' + }) const inputVariables = chain.prompt.inputVariables as string[] // ["product"] const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData) // eslint-disable-next-line no-console @@ -98,7 +105,7 @@ class LLMChain_Chains implements INode { let promptValues: ICommonObject | undefined = nodeData.inputs?.prompt.promptValues as ICommonObject const outputParser = nodeData.inputs?.outputParser as BaseOutputParser promptValues = injectOutputParser(outputParser, chain, promptValues) - const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData, outputParser) + const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData) // eslint-disable-next-line no-console console.log('\x1b[93m\x1b[1m\n*****FINAL RESULT*****\n\x1b[0m\x1b[0m') // eslint-disable-next-line no-console @@ -109,12 +116,11 @@ class LLMChain_Chains implements INode { const runPrediction = async ( inputVariables: string[], - chain: LLMChain, + chain: LLMChain, input: string, promptValuesRaw: ICommonObject | undefined, options: ICommonObject, - nodeData: INodeData, - outputParser: BaseOutputParser | undefined = undefined + nodeData: INodeData ) => { const loggerHandler = new ConsoleCallbackHandler(options.logger) const callbacks = await additionalCallbacks(nodeData, options) @@ -146,10 +152,10 @@ const runPrediction = async ( if (isStreaming) { const handler = new CustomChainHandler(socketIO, socketIOClientId) const res = await chain.call(options, [loggerHandler, handler, ...callbacks]) - return applyOutputParser(res?.text, outputParser) + return res?.text } else { const res = await chain.call(options, [loggerHandler, ...callbacks]) - return applyOutputParser(res?.text, outputParser) + return res?.text } } else if (seen.length === 1) { // If one inputVariable is not specify, use input (user's question) as value @@ -162,10 +168,10 @@ const runPrediction = async ( if (isStreaming) { const handler = new CustomChainHandler(socketIO, socketIOClientId) const res = await chain.call(options, [loggerHandler, handler, ...callbacks]) - return applyOutputParser(res?.text, outputParser) + return res?.text } else { const res = await chain.call(options, [loggerHandler, ...callbacks]) - return applyOutputParser(res?.text, outputParser) + return res?.text } } else { throw new Error(`Please provide Prompt Values for: ${seen.join(', ')}`) @@ -174,10 +180,10 @@ const runPrediction = async ( if (isStreaming) { const handler = new CustomChainHandler(socketIO, socketIOClientId) const res = await chain.run(input, [loggerHandler, handler, ...callbacks]) - return applyOutputParser(res, outputParser) + return res } else { const res = await chain.run(input, [loggerHandler, ...callbacks]) - return applyOutputParser(res, outputParser) + return res } } } From bc6db5bca7e3044274c870d801d42137701b9308 Mon Sep 17 00:00:00 2001 From: vinodkiran Date: Sat, 28 Oct 2023 13:59:32 +0530 Subject: [PATCH 05/13] AutoFixOutputParser: Addition of Autofix option for all output parsers --- .../nodes/chains/LLMChain/LLMChain.ts | 26 +++++++++++++++---- .../csvlist/CSVListOutputParser.ts | 21 +++++++++++++-- .../customlist/CustomListOutputParser.ts | 17 +++++++++++- .../structured/StructuredOutputParser.ts | 20 +++++++++++++- 4 files changed, 75 insertions(+), 9 deletions(-) diff --git a/packages/components/nodes/chains/LLMChain/LLMChain.ts b/packages/components/nodes/chains/LLMChain/LLMChain.ts index d1860f1b49c..7ec777be091 100644 --- a/packages/components/nodes/chains/LLMChain/LLMChain.ts +++ b/packages/components/nodes/chains/LLMChain/LLMChain.ts @@ -6,6 +6,7 @@ import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from import { BaseOutputParser } from 'langchain/schema/output_parser' import { injectOutputParser } from '../../outputparsers/OutputParserHelpers' import { BaseLLMOutputParser } from 'langchain/schema/output_parser' +import { OutputFixingParser } from 'langchain/output_parsers' class LLMChain_Chains implements INode { label: string @@ -18,6 +19,7 @@ class LLMChain_Chains implements INode { description: string inputs: INodeParams[] outputs: INodeOutputsValue[] + outputParser: BaseOutputParser constructor() { this.label = 'LLM Chain' @@ -72,15 +74,26 @@ class LLMChain_Chains implements INode { const prompt = nodeData.inputs?.prompt const output = nodeData.outputs?.output as string const promptValues = prompt.promptValues as ICommonObject - const llmOutputParser = nodeData.inputs?.outputParser as BaseLLMOutputParser - + const llmOutputParser = nodeData.inputs?.outputParser as BaseOutputParser + this.outputParser = llmOutputParser + if (llmOutputParser) { + let autoFix = (llmOutputParser as any).autoFix + if (autoFix === true) { + this.outputParser = OutputFixingParser.fromLLM(model, llmOutputParser) + } + } if (output === this.name) { - const chain = new LLMChain({ llm: model, outputParser: llmOutputParser, prompt, verbose: process.env.DEBUG === 'true' }) + const chain = new LLMChain({ + llm: model, + outputParser: this.outputParser as BaseLLMOutputParser, + prompt, + verbose: process.env.DEBUG === 'true' + }) return chain } else if (output === 'outputPrediction') { const chain = new LLMChain({ llm: model, - outputParser: llmOutputParser, + outputParser: this.outputParser as BaseLLMOutputParser, prompt, verbose: process.env.DEBUG === 'true' }) @@ -104,7 +117,10 @@ class LLMChain_Chains implements INode { const chain = nodeData.instance as LLMChain let promptValues: ICommonObject | undefined = nodeData.inputs?.prompt.promptValues as ICommonObject const outputParser = nodeData.inputs?.outputParser as BaseOutputParser - promptValues = injectOutputParser(outputParser, chain, promptValues) + if (!this.outputParser && outputParser) { + this.outputParser = outputParser + } + promptValues = injectOutputParser(this.outputParser, chain, promptValues) const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData) // eslint-disable-next-line no-console console.log('\x1b[93m\x1b[1m\n*****FINAL RESULT*****\n\x1b[0m\x1b[0m') diff --git a/packages/components/nodes/outputparsers/csvlist/CSVListOutputParser.ts b/packages/components/nodes/outputparsers/csvlist/CSVListOutputParser.ts index 4bc87851a66..31ada42e367 100644 --- a/packages/components/nodes/outputparsers/csvlist/CSVListOutputParser.ts +++ b/packages/components/nodes/outputparsers/csvlist/CSVListOutputParser.ts @@ -24,12 +24,29 @@ class CSVListOutputParser implements INode { this.icon = 'csv.png' this.category = CATEGORY this.baseClasses = [this.type, ...getBaseClasses(BaseOutputParser)] - this.inputs = [] + this.inputs = [ + { + label: 'Autofix', + name: 'autofixParser', + type: 'boolean', + rows: 4, + description: 'In the event that the first call fails, will make another call to the model to fix any errors.' + } + ] } // eslint-disable-next-line unused-imports/no-unused-vars async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { - return new CommaSeparatedListOutputParser() + const autoFix = nodeData.inputs?.autofixParser as boolean + + const commaSeparatedListOutputParser = new CommaSeparatedListOutputParser() + Object.defineProperty(commaSeparatedListOutputParser, 'autoFix', { + enumerable: true, + configurable: true, + writable: true, + value: autoFix + }) + return commaSeparatedListOutputParser } } diff --git a/packages/components/nodes/outputparsers/customlist/CustomListOutputParser.ts b/packages/components/nodes/outputparsers/customlist/CustomListOutputParser.ts index 1e9617c3fdb..f9104f08e35 100644 --- a/packages/components/nodes/outputparsers/customlist/CustomListOutputParser.ts +++ b/packages/components/nodes/outputparsers/customlist/CustomListOutputParser.ts @@ -39,6 +39,13 @@ class CustomListOutputParser implements INode { type: 'string', description: 'Separator between values', default: ',' + }, + { + label: 'Autofix', + name: 'autofixParser', + type: 'boolean', + rows: 4, + description: 'In the event that the first call fails, will make another call to the model to fix any errors.' } ] } @@ -47,9 +54,17 @@ class CustomListOutputParser implements INode { async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { const separator = nodeData.inputs?.separator as string const lengthStr = nodeData.inputs?.length as string + const autoFix = nodeData.inputs?.autofixParser as boolean let length = 5 if (lengthStr) length = parseInt(lengthStr, 10) - return new LangchainCustomListOutputParser({ length: length, separator: separator }) + const parser = new LangchainCustomListOutputParser({ length: length, separator: separator }) + Object.defineProperty(parser, 'autoFix', { + enumerable: true, + configurable: true, + writable: true, + value: autoFix + }) + return parser } } diff --git a/packages/components/nodes/outputparsers/structured/StructuredOutputParser.ts b/packages/components/nodes/outputparsers/structured/StructuredOutputParser.ts index ef04de7de3d..30bf0bf564a 100644 --- a/packages/components/nodes/outputparsers/structured/StructuredOutputParser.ts +++ b/packages/components/nodes/outputparsers/structured/StructuredOutputParser.ts @@ -48,6 +48,13 @@ class StructuredOutputParser implements INode { ' answer: "answer to the question",\n' + ' source: "source used to answer the question, should be a website.",\n' + '}' + }, + { + label: 'Autofix', + name: 'autofixParser', + type: 'boolean', + rows: 4, + description: 'In the event that the first call fails, will make another call to the model to fix any errors.' } ] } @@ -56,11 +63,22 @@ class StructuredOutputParser implements INode { async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { const structureType = nodeData.inputs?.structureType as string const structure = nodeData.inputs?.structure as string + const autoFix = nodeData.inputs?.autofixParser as boolean + let parsedStructure: any | undefined = undefined if (structure && structureType === 'fromNamesAndDescriptions') { try { parsedStructure = JSON.parse(structure) - return LangchainStructuredOutputParser.fromNamesAndDescriptions(parsedStructure) + + // NOTE: When we change Flowise to return a json response, the following has to be changed to: JsonStructuredOutputParser + let structuredOutputParser = LangchainStructuredOutputParser.fromNamesAndDescriptions(parsedStructure) + Object.defineProperty(structuredOutputParser, 'autoFix', { + enumerable: true, + configurable: true, + writable: true, + value: autoFix + }) + return structuredOutputParser } catch (exception) { throw new Error('Invalid JSON in StructuredOutputParser: ' + exception) } From ca1f6971547972e1b007b564751a1813278e8d06 Mon Sep 17 00:00:00 2001 From: Henry Date: Sun, 29 Oct 2023 10:27:04 +0000 Subject: [PATCH 06/13] return JSON output in the chat --- .../nodes/chains/LLMChain/LLMChain.ts | 29 ++++++++++--------- .../outputparsers/OutputParserHelpers.ts | 13 ++------- .../vectorstores/Milvus/Milvus_Upsert.ts | 2 +- packages/components/src/handler.ts | 5 +++- packages/server/src/index.ts | 5 +++- .../ui/src/views/chatmessage/ChatMessage.js | 6 ++++ 6 files changed, 33 insertions(+), 27 deletions(-) diff --git a/packages/components/nodes/chains/LLMChain/LLMChain.ts b/packages/components/nodes/chains/LLMChain/LLMChain.ts index 7ec777be091..7d45082535a 100644 --- a/packages/components/nodes/chains/LLMChain/LLMChain.ts +++ b/packages/components/nodes/chains/LLMChain/LLMChain.ts @@ -4,7 +4,7 @@ import { LLMChain } from 'langchain/chains' import { BaseLanguageModel } from 'langchain/base_language' import { ConsoleCallbackHandler, CustomChainHandler, additionalCallbacks } from '../../../src/handler' import { BaseOutputParser } from 'langchain/schema/output_parser' -import { injectOutputParser } from '../../outputparsers/OutputParserHelpers' +import { formatResponse, injectOutputParser } from '../../outputparsers/OutputParserHelpers' import { BaseLLMOutputParser } from 'langchain/schema/output_parser' import { OutputFixingParser } from 'langchain/output_parsers' @@ -98,7 +98,7 @@ class LLMChain_Chains implements INode { verbose: process.env.DEBUG === 'true' }) const inputVariables = chain.prompt.inputVariables as string[] // ["product"] - const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData) + const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData, this.outputParser) // eslint-disable-next-line no-console console.log('\x1b[92m\x1b[1m\n*****OUTPUT PREDICTION*****\n\x1b[0m\x1b[0m') // eslint-disable-next-line no-console @@ -112,7 +112,7 @@ class LLMChain_Chains implements INode { } } - async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { + async run(nodeData: INodeData, input: string, options: ICommonObject): Promise { const inputVariables = nodeData.instance.prompt.inputVariables as string[] // ["product"] const chain = nodeData.instance as LLMChain let promptValues: ICommonObject | undefined = nodeData.inputs?.prompt.promptValues as ICommonObject @@ -121,7 +121,7 @@ class LLMChain_Chains implements INode { this.outputParser = outputParser } promptValues = injectOutputParser(this.outputParser, chain, promptValues) - const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData) + const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData, this.outputParser) // eslint-disable-next-line no-console console.log('\x1b[93m\x1b[1m\n*****FINAL RESULT*****\n\x1b[0m\x1b[0m') // eslint-disable-next-line no-console @@ -136,7 +136,8 @@ const runPrediction = async ( input: string, promptValuesRaw: ICommonObject | undefined, options: ICommonObject, - nodeData: INodeData + nodeData: INodeData, + outputParser: BaseOutputParser ) => { const loggerHandler = new ConsoleCallbackHandler(options.logger) const callbacks = await additionalCallbacks(nodeData, options) @@ -166,12 +167,12 @@ const runPrediction = async ( // All inputVariables have fixed values specified const options = { ...promptValues } if (isStreaming) { - const handler = new CustomChainHandler(socketIO, socketIOClientId) + const handler = new CustomChainHandler(socketIO, socketIOClientId, undefined, undefined, outputParser ? true : undefined) const res = await chain.call(options, [loggerHandler, handler, ...callbacks]) - return res?.text + return formatResponse(res?.text) } else { const res = await chain.call(options, [loggerHandler, ...callbacks]) - return res?.text + return formatResponse(res?.text) } } else if (seen.length === 1) { // If one inputVariable is not specify, use input (user's question) as value @@ -182,24 +183,24 @@ const runPrediction = async ( [lastValue]: input } if (isStreaming) { - const handler = new CustomChainHandler(socketIO, socketIOClientId) + const handler = new CustomChainHandler(socketIO, socketIOClientId, undefined, undefined, outputParser ? true : undefined) const res = await chain.call(options, [loggerHandler, handler, ...callbacks]) - return res?.text + return formatResponse(res?.text) } else { const res = await chain.call(options, [loggerHandler, ...callbacks]) - return res?.text + return formatResponse(res?.text) } } else { throw new Error(`Please provide Prompt Values for: ${seen.join(', ')}`) } } else { if (isStreaming) { - const handler = new CustomChainHandler(socketIO, socketIOClientId) + const handler = new CustomChainHandler(socketIO, socketIOClientId, undefined, undefined, outputParser ? true : undefined) const res = await chain.run(input, [loggerHandler, handler, ...callbacks]) - return res + return formatResponse(res) } else { const res = await chain.run(input, [loggerHandler, ...callbacks]) - return res + return formatResponse(res) } } } diff --git a/packages/components/nodes/outputparsers/OutputParserHelpers.ts b/packages/components/nodes/outputparsers/OutputParserHelpers.ts index 87a59170d44..63dd4838284 100644 --- a/packages/components/nodes/outputparsers/OutputParserHelpers.ts +++ b/packages/components/nodes/outputparsers/OutputParserHelpers.ts @@ -6,16 +6,9 @@ import { ChatPromptTemplate, FewShotPromptTemplate, PromptTemplate, SystemMessag export const CATEGORY = 'Output Parser (Experimental)' -export const applyOutputParser = async (response: string, outputParser: BaseOutputParser | undefined): Promise => { - if (outputParser) { - const parsedResponse = await outputParser.parse(response) - // eslint-disable-next-line no-console - console.log('**** parsedResponse ****', parsedResponse) - if (typeof parsedResponse === 'object') { - return JSON.stringify(parsedResponse) - } else { - return parsedResponse as string - } +export const formatResponse = (response: string | object): string | object => { + if (typeof response === 'object') { + return { json: response } } return response } diff --git a/packages/components/nodes/vectorstores/Milvus/Milvus_Upsert.ts b/packages/components/nodes/vectorstores/Milvus/Milvus_Upsert.ts index ca69cb39531..40afe9a4824 100644 --- a/packages/components/nodes/vectorstores/Milvus/Milvus_Upsert.ts +++ b/packages/components/nodes/vectorstores/Milvus/Milvus_Upsert.ts @@ -252,7 +252,7 @@ class MilvusUpsert extends Milvus { collection_name: this.collectionName }) - if (descIndexResp.status.error_code === ErrorCode.INDEX_NOT_EXIST) { + if (descIndexResp.status.error_code === ErrorCode.IndexNotExist) { const resp = await this.client.createIndex({ collection_name: this.collectionName, field_name: this.vectorField, diff --git a/packages/components/src/handler.ts b/packages/components/src/handler.ts index 37075342f12..c0ee67e45cd 100644 --- a/packages/components/src/handler.ts +++ b/packages/components/src/handler.ts @@ -152,13 +152,15 @@ export class CustomChainHandler extends BaseCallbackHandler { skipK = 0 // Skip streaming for first K numbers of handleLLMStart returnSourceDocuments = false cachedResponse = true + isOutputParser = false - constructor(socketIO: Server, socketIOClientId: string, skipK?: number, returnSourceDocuments?: boolean) { + constructor(socketIO: Server, socketIOClientId: string, skipK?: number, returnSourceDocuments?: boolean, isOutputParser?: boolean) { super() this.socketIO = socketIO this.socketIOClientId = socketIOClientId this.skipK = skipK ?? this.skipK this.returnSourceDocuments = returnSourceDocuments ?? this.returnSourceDocuments + this.isOutputParser = isOutputParser ?? this.isOutputParser } handleLLMStart() { @@ -171,6 +173,7 @@ export class CustomChainHandler extends BaseCallbackHandler { if (!this.isLLMStarted) { this.isLLMStarted = true this.socketIO.to(this.socketIOClientId).emit('start', token) + if (this.isOutputParser) this.socketIO.to(this.socketIOClientId).emit('token', '```json') } this.socketIO.to(this.socketIOClientId).emit('token', token) } diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index 9d3f7052400..1afb8396ac9 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -980,7 +980,7 @@ export class App { if (nodeToExecuteData.instance) checkMemorySessionId(nodeToExecuteData.instance, chatId) - const result = isStreamValid + let result = isStreamValid ? await nodeInstance.run(nodeToExecuteData, incomingInput.question, { chatHistory: incomingInput.history, socketIO, @@ -998,7 +998,10 @@ export class App { analytic: chatflow.analytic }) + result = typeof result === 'string' ? { text: result } : result + logger.debug(`[server]: Finished running ${nodeToExecuteData.label} (${nodeToExecuteData.id})`) + return res.json(result) } catch (e: any) { logger.error('[server]: Error:', e) diff --git a/packages/ui/src/views/chatmessage/ChatMessage.js b/packages/ui/src/views/chatmessage/ChatMessage.js index 3e96754148a..d199259bc42 100644 --- a/packages/ui/src/views/chatmessage/ChatMessage.js +++ b/packages/ui/src/views/chatmessage/ChatMessage.js @@ -165,6 +165,12 @@ export const ChatMessage = ({ open, chatflowid, isDialog }) => { ]) } addChatMessage(data.text, 'apiMessage', data.sourceDocuments) + } else if (typeof data === 'object' && data.json) { + const text = '```json' + JSON.stringify(data.json, null, 2) + if (!isChatFlowAvailableToStream) { + setMessages((prevMessages) => [...prevMessages, { message: text, type: 'apiMessage' }]) + } + addChatMessage(text, 'apiMessage') } else { if (!isChatFlowAvailableToStream) { setMessages((prevMessages) => [...prevMessages, { message: data, type: 'apiMessage' }]) From 31f7d1641d84ce3797fe5478d0d7c44dba4fe553 Mon Sep 17 00:00:00 2001 From: Henry Date: Tue, 31 Oct 2023 14:07:29 +0000 Subject: [PATCH 07/13] disabled ouput parser streaming --- .../components/nodes/chains/LLMChain/LLMChain.ts | 14 +++++++------- .../CSVListOutputParser.ts | 7 +++---- .../{csvlist => CSVListOutputParser}/csv.png | Bin .../CustomListOutputParser.ts | 8 ++++---- .../list.png | Bin .../StructuredOutputParser.ts | 7 +++---- .../structure.png | Bin packages/components/src/handler.ts | 5 +---- packages/server/src/index.ts | 3 --- packages/server/src/utils/index.ts | 11 ++++++++++- packages/ui/src/views/chatmessage/ChatMessage.js | 5 ++++- 11 files changed, 32 insertions(+), 28 deletions(-) rename packages/components/nodes/outputparsers/{csvlist => CSVListOutputParser}/CSVListOutputParser.ts (85%) rename packages/components/nodes/outputparsers/{csvlist => CSVListOutputParser}/csv.png (100%) rename packages/components/nodes/outputparsers/{customlist => CustomListOutputParser}/CustomListOutputParser.ts (89%) rename packages/components/nodes/outputparsers/{customlist => CustomListOutputParser}/list.png (100%) rename packages/components/nodes/outputparsers/{structured => StructuredOutputParser}/StructuredOutputParser.ts (92%) rename packages/components/nodes/outputparsers/{structured => StructuredOutputParser}/structure.png (100%) diff --git a/packages/components/nodes/chains/LLMChain/LLMChain.ts b/packages/components/nodes/chains/LLMChain/LLMChain.ts index 7d45082535a..4f8076b739b 100644 --- a/packages/components/nodes/chains/LLMChain/LLMChain.ts +++ b/packages/components/nodes/chains/LLMChain/LLMChain.ts @@ -98,7 +98,7 @@ class LLMChain_Chains implements INode { verbose: process.env.DEBUG === 'true' }) const inputVariables = chain.prompt.inputVariables as string[] // ["product"] - const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData, this.outputParser) + const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData) // eslint-disable-next-line no-console console.log('\x1b[92m\x1b[1m\n*****OUTPUT PREDICTION*****\n\x1b[0m\x1b[0m') // eslint-disable-next-line no-console @@ -121,7 +121,7 @@ class LLMChain_Chains implements INode { this.outputParser = outputParser } promptValues = injectOutputParser(this.outputParser, chain, promptValues) - const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData, this.outputParser) + const res = await runPrediction(inputVariables, chain, input, promptValues, options, nodeData) // eslint-disable-next-line no-console console.log('\x1b[93m\x1b[1m\n*****FINAL RESULT*****\n\x1b[0m\x1b[0m') // eslint-disable-next-line no-console @@ -136,8 +136,7 @@ const runPrediction = async ( input: string, promptValuesRaw: ICommonObject | undefined, options: ICommonObject, - nodeData: INodeData, - outputParser: BaseOutputParser + nodeData: INodeData ) => { const loggerHandler = new ConsoleCallbackHandler(options.logger) const callbacks = await additionalCallbacks(nodeData, options) @@ -167,7 +166,7 @@ const runPrediction = async ( // All inputVariables have fixed values specified const options = { ...promptValues } if (isStreaming) { - const handler = new CustomChainHandler(socketIO, socketIOClientId, undefined, undefined, outputParser ? true : undefined) + const handler = new CustomChainHandler(socketIO, socketIOClientId) const res = await chain.call(options, [loggerHandler, handler, ...callbacks]) return formatResponse(res?.text) } else { @@ -183,11 +182,12 @@ const runPrediction = async ( [lastValue]: input } if (isStreaming) { - const handler = new CustomChainHandler(socketIO, socketIOClientId, undefined, undefined, outputParser ? true : undefined) + const handler = new CustomChainHandler(socketIO, socketIOClientId) const res = await chain.call(options, [loggerHandler, handler, ...callbacks]) return formatResponse(res?.text) } else { const res = await chain.call(options, [loggerHandler, ...callbacks]) + console.log('formatResponse=', formatResponse(res?.text)) return formatResponse(res?.text) } } else { @@ -195,7 +195,7 @@ const runPrediction = async ( } } else { if (isStreaming) { - const handler = new CustomChainHandler(socketIO, socketIOClientId, undefined, undefined, outputParser ? true : undefined) + const handler = new CustomChainHandler(socketIO, socketIOClientId) const res = await chain.run(input, [loggerHandler, handler, ...callbacks]) return formatResponse(res) } else { diff --git a/packages/components/nodes/outputparsers/csvlist/CSVListOutputParser.ts b/packages/components/nodes/outputparsers/CSVListOutputParser/CSVListOutputParser.ts similarity index 85% rename from packages/components/nodes/outputparsers/csvlist/CSVListOutputParser.ts rename to packages/components/nodes/outputparsers/CSVListOutputParser/CSVListOutputParser.ts index 31ada42e367..8758d4f789e 100644 --- a/packages/components/nodes/outputparsers/csvlist/CSVListOutputParser.ts +++ b/packages/components/nodes/outputparsers/CSVListOutputParser/CSVListOutputParser.ts @@ -1,4 +1,4 @@ -import { getBaseClasses, ICommonObject, INode, INodeData, INodeParams } from '../../../src' +import { getBaseClasses, INode, INodeData, INodeParams } from '../../../src' import { BaseOutputParser } from 'langchain/schema/output_parser' import { CommaSeparatedListOutputParser } from 'langchain/output_parsers' import { CATEGORY } from '../OutputParserHelpers' @@ -29,14 +29,13 @@ class CSVListOutputParser implements INode { label: 'Autofix', name: 'autofixParser', type: 'boolean', - rows: 4, + optional: true, description: 'In the event that the first call fails, will make another call to the model to fix any errors.' } ] } - // eslint-disable-next-line unused-imports/no-unused-vars - async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + async init(nodeData: INodeData): Promise { const autoFix = nodeData.inputs?.autofixParser as boolean const commaSeparatedListOutputParser = new CommaSeparatedListOutputParser() diff --git a/packages/components/nodes/outputparsers/csvlist/csv.png b/packages/components/nodes/outputparsers/CSVListOutputParser/csv.png similarity index 100% rename from packages/components/nodes/outputparsers/csvlist/csv.png rename to packages/components/nodes/outputparsers/CSVListOutputParser/csv.png diff --git a/packages/components/nodes/outputparsers/customlist/CustomListOutputParser.ts b/packages/components/nodes/outputparsers/CustomListOutputParser/CustomListOutputParser.ts similarity index 89% rename from packages/components/nodes/outputparsers/customlist/CustomListOutputParser.ts rename to packages/components/nodes/outputparsers/CustomListOutputParser/CustomListOutputParser.ts index f9104f08e35..06523afba56 100644 --- a/packages/components/nodes/outputparsers/customlist/CustomListOutputParser.ts +++ b/packages/components/nodes/outputparsers/CustomListOutputParser/CustomListOutputParser.ts @@ -1,4 +1,4 @@ -import { getBaseClasses, ICommonObject, INode, INodeData, INodeParams } from '../../../src' +import { getBaseClasses, INode, INodeData, INodeParams } from '../../../src' import { BaseOutputParser } from 'langchain/schema/output_parser' import { CustomListOutputParser as LangchainCustomListOutputParser } from 'langchain/output_parsers' import { CATEGORY } from '../OutputParserHelpers' @@ -44,19 +44,19 @@ class CustomListOutputParser implements INode { label: 'Autofix', name: 'autofixParser', type: 'boolean', - rows: 4, + optional: true, description: 'In the event that the first call fails, will make another call to the model to fix any errors.' } ] } - // eslint-disable-next-line unused-imports/no-unused-vars - async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + async init(nodeData: INodeData): Promise { const separator = nodeData.inputs?.separator as string const lengthStr = nodeData.inputs?.length as string const autoFix = nodeData.inputs?.autofixParser as boolean let length = 5 if (lengthStr) length = parseInt(lengthStr, 10) + const parser = new LangchainCustomListOutputParser({ length: length, separator: separator }) Object.defineProperty(parser, 'autoFix', { enumerable: true, diff --git a/packages/components/nodes/outputparsers/customlist/list.png b/packages/components/nodes/outputparsers/CustomListOutputParser/list.png similarity index 100% rename from packages/components/nodes/outputparsers/customlist/list.png rename to packages/components/nodes/outputparsers/CustomListOutputParser/list.png diff --git a/packages/components/nodes/outputparsers/structured/StructuredOutputParser.ts b/packages/components/nodes/outputparsers/StructuredOutputParser/StructuredOutputParser.ts similarity index 92% rename from packages/components/nodes/outputparsers/structured/StructuredOutputParser.ts rename to packages/components/nodes/outputparsers/StructuredOutputParser/StructuredOutputParser.ts index 30bf0bf564a..a0927106da2 100644 --- a/packages/components/nodes/outputparsers/structured/StructuredOutputParser.ts +++ b/packages/components/nodes/outputparsers/StructuredOutputParser/StructuredOutputParser.ts @@ -1,4 +1,4 @@ -import { getBaseClasses, ICommonObject, INode, INodeData, INodeParams } from '../../../src' +import { getBaseClasses, INode, INodeData, INodeParams } from '../../../src' import { BaseOutputParser } from 'langchain/schema/output_parser' import { StructuredOutputParser as LangchainStructuredOutputParser } from 'langchain/output_parsers' import { CATEGORY } from '../OutputParserHelpers' @@ -53,14 +53,13 @@ class StructuredOutputParser implements INode { label: 'Autofix', name: 'autofixParser', type: 'boolean', - rows: 4, + optional: true, description: 'In the event that the first call fails, will make another call to the model to fix any errors.' } ] } - // eslint-disable-next-line unused-imports/no-unused-vars - async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + async init(nodeData: INodeData): Promise { const structureType = nodeData.inputs?.structureType as string const structure = nodeData.inputs?.structure as string const autoFix = nodeData.inputs?.autofixParser as boolean diff --git a/packages/components/nodes/outputparsers/structured/structure.png b/packages/components/nodes/outputparsers/StructuredOutputParser/structure.png similarity index 100% rename from packages/components/nodes/outputparsers/structured/structure.png rename to packages/components/nodes/outputparsers/StructuredOutputParser/structure.png diff --git a/packages/components/src/handler.ts b/packages/components/src/handler.ts index c0ee67e45cd..37075342f12 100644 --- a/packages/components/src/handler.ts +++ b/packages/components/src/handler.ts @@ -152,15 +152,13 @@ export class CustomChainHandler extends BaseCallbackHandler { skipK = 0 // Skip streaming for first K numbers of handleLLMStart returnSourceDocuments = false cachedResponse = true - isOutputParser = false - constructor(socketIO: Server, socketIOClientId: string, skipK?: number, returnSourceDocuments?: boolean, isOutputParser?: boolean) { + constructor(socketIO: Server, socketIOClientId: string, skipK?: number, returnSourceDocuments?: boolean) { super() this.socketIO = socketIO this.socketIOClientId = socketIOClientId this.skipK = skipK ?? this.skipK this.returnSourceDocuments = returnSourceDocuments ?? this.returnSourceDocuments - this.isOutputParser = isOutputParser ?? this.isOutputParser } handleLLMStart() { @@ -173,7 +171,6 @@ export class CustomChainHandler extends BaseCallbackHandler { if (!this.isLLMStarted) { this.isLLMStarted = true this.socketIO.to(this.socketIOClientId).emit('start', token) - if (this.isOutputParser) this.socketIO.to(this.socketIOClientId).emit('token', '```json') } this.socketIO.to(this.socketIOClientId).emit('token', token) } diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index 1afb8396ac9..31ad9277cd4 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -998,10 +998,7 @@ export class App { analytic: chatflow.analytic }) - result = typeof result === 'string' ? { text: result } : result - logger.debug(`[server]: Finished running ${nodeToExecuteData.label} (${nodeToExecuteData.id})`) - return res.json(result) } catch (e: any) { logger.error('[server]: Error:', e) diff --git a/packages/server/src/utils/index.ts b/packages/server/src/utils/index.ts index 317bcc0c26d..31961c3ad25 100644 --- a/packages/server/src/utils/index.ts +++ b/packages/server/src/utils/index.ts @@ -804,7 +804,16 @@ export const isFlowValidForStream = (reactFlowNodes: IReactFlowNode[], endingNod isValidChainOrAgent = whitelistAgents.includes(endingNodeData.name) } - return isChatOrLLMsExist && isValidChainOrAgent + // If no output parser, flow is available to stream + let isOutputParserExist = false + for (const flowNode of reactFlowNodes) { + const data = flowNode.data + if (data.category.includes('Output Parser')) { + isOutputParserExist = true + } + } + + return isChatOrLLMsExist && isValidChainOrAgent && !isOutputParserExist } /** diff --git a/packages/ui/src/views/chatmessage/ChatMessage.js b/packages/ui/src/views/chatmessage/ChatMessage.js index d199259bc42..8c9a486daa7 100644 --- a/packages/ui/src/views/chatmessage/ChatMessage.js +++ b/packages/ui/src/views/chatmessage/ChatMessage.js @@ -164,17 +164,20 @@ export const ChatMessage = ({ open, chatflowid, isDialog }) => { { message: data.text, sourceDocuments: data.sourceDocuments, type: 'apiMessage' } ]) } + console.log('here1=', data.text) addChatMessage(data.text, 'apiMessage', data.sourceDocuments) } else if (typeof data === 'object' && data.json) { - const text = '```json' + JSON.stringify(data.json, null, 2) + const text = '```json\n' + JSON.stringify(data.json, null, 2) if (!isChatFlowAvailableToStream) { setMessages((prevMessages) => [...prevMessages, { message: text, type: 'apiMessage' }]) } + console.log('here2=', text) addChatMessage(text, 'apiMessage') } else { if (!isChatFlowAvailableToStream) { setMessages((prevMessages) => [...prevMessages, { message: data, type: 'apiMessage' }]) } + console.log('here3=', data) addChatMessage(data, 'apiMessage') } setLoading(false) From 229f7cfbc40a642e3bd4bb48835886dd946b324b Mon Sep 17 00:00:00 2001 From: Henry Date: Tue, 31 Oct 2023 14:11:11 +0000 Subject: [PATCH 08/13] remove console log --- packages/components/nodes/chains/LLMChain/LLMChain.ts | 1 - packages/server/src/index.ts | 2 +- packages/ui/src/views/chatmessage/ChatMessage.js | 3 --- 3 files changed, 1 insertion(+), 5 deletions(-) diff --git a/packages/components/nodes/chains/LLMChain/LLMChain.ts b/packages/components/nodes/chains/LLMChain/LLMChain.ts index 4f8076b739b..c180aeca792 100644 --- a/packages/components/nodes/chains/LLMChain/LLMChain.ts +++ b/packages/components/nodes/chains/LLMChain/LLMChain.ts @@ -187,7 +187,6 @@ const runPrediction = async ( return formatResponse(res?.text) } else { const res = await chain.call(options, [loggerHandler, ...callbacks]) - console.log('formatResponse=', formatResponse(res?.text)) return formatResponse(res?.text) } } else { diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index 31ad9277cd4..9d3f7052400 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -980,7 +980,7 @@ export class App { if (nodeToExecuteData.instance) checkMemorySessionId(nodeToExecuteData.instance, chatId) - let result = isStreamValid + const result = isStreamValid ? await nodeInstance.run(nodeToExecuteData, incomingInput.question, { chatHistory: incomingInput.history, socketIO, diff --git a/packages/ui/src/views/chatmessage/ChatMessage.js b/packages/ui/src/views/chatmessage/ChatMessage.js index 8c9a486daa7..3035163dfff 100644 --- a/packages/ui/src/views/chatmessage/ChatMessage.js +++ b/packages/ui/src/views/chatmessage/ChatMessage.js @@ -164,20 +164,17 @@ export const ChatMessage = ({ open, chatflowid, isDialog }) => { { message: data.text, sourceDocuments: data.sourceDocuments, type: 'apiMessage' } ]) } - console.log('here1=', data.text) addChatMessage(data.text, 'apiMessage', data.sourceDocuments) } else if (typeof data === 'object' && data.json) { const text = '```json\n' + JSON.stringify(data.json, null, 2) if (!isChatFlowAvailableToStream) { setMessages((prevMessages) => [...prevMessages, { message: text, type: 'apiMessage' }]) } - console.log('here2=', text) addChatMessage(text, 'apiMessage') } else { if (!isChatFlowAvailableToStream) { setMessages((prevMessages) => [...prevMessages, { message: data, type: 'apiMessage' }]) } - console.log('here3=', data) addChatMessage(data, 'apiMessage') } setLoading(false) From f2b92afc24812c9374e29e9da2a0422d46314693 Mon Sep 17 00:00:00 2001 From: Henry Date: Tue, 31 Oct 2023 22:12:09 +0000 Subject: [PATCH 09/13] add datagrid field type --- .../StructuredOutputParser.ts | 89 +++++++------- .../nodes/tools/CustomTool/CustomTool.ts | 24 +--- packages/components/src/Interface.ts | 2 + packages/components/src/utils.ts | 28 +++++ packages/ui/src/ui-component/grid/DataGrid.js | 111 ++++++++++++++++++ packages/ui/src/utils/genericHelper.js | 15 +++ .../ui/src/views/canvas/NodeInputHandler.js | 10 ++ packages/ui/src/views/tools/ToolDialog.js | 24 +--- 8 files changed, 217 insertions(+), 86 deletions(-) create mode 100644 packages/ui/src/ui-component/grid/DataGrid.js diff --git a/packages/components/nodes/outputparsers/StructuredOutputParser/StructuredOutputParser.ts b/packages/components/nodes/outputparsers/StructuredOutputParser/StructuredOutputParser.ts index a0927106da2..10a5f0bbe5b 100644 --- a/packages/components/nodes/outputparsers/StructuredOutputParser/StructuredOutputParser.ts +++ b/packages/components/nodes/outputparsers/StructuredOutputParser/StructuredOutputParser.ts @@ -1,7 +1,8 @@ -import { getBaseClasses, INode, INodeData, INodeParams } from '../../../src' +import { convertSchemaToZod, getBaseClasses, INode, INodeData, INodeParams } from '../../../src' import { BaseOutputParser } from 'langchain/schema/output_parser' import { StructuredOutputParser as LangchainStructuredOutputParser } from 'langchain/output_parsers' import { CATEGORY } from '../OutputParserHelpers' +import { z } from 'zod' class StructuredOutputParser implements INode { label: string @@ -24,65 +25,65 @@ class StructuredOutputParser implements INode { this.icon = 'structure.png' this.category = CATEGORY this.baseClasses = [this.type, ...getBaseClasses(BaseOutputParser)] - //TODO: To extend the structureType to ZodSchema this.inputs = [ - { - label: 'Structure Type', - name: 'structureType', - type: 'options', - options: [ - { - label: 'Names And Descriptions', - name: 'fromNamesAndDescriptions' - } - ], - default: 'fromNamesAndDescriptions' - }, - { - label: 'Structure', - name: 'structure', - type: 'string', - rows: 4, - placeholder: - '{' + - ' answer: "answer to the question",\n' + - ' source: "source used to answer the question, should be a website.",\n' + - '}' - }, { label: 'Autofix', name: 'autofixParser', type: 'boolean', optional: true, description: 'In the event that the first call fails, will make another call to the model to fix any errors.' + }, + { + label: 'JSON Structure', + name: 'jsonStructure', + type: 'datagrid', + description: 'JSON structure for LLM to return', + datagrid: [ + { field: 'property', headerName: 'Property', editable: true }, + { + field: 'type', + headerName: 'Type', + type: 'singleSelect', + valueOptions: ['string', 'number', 'boolean'], + editable: true + }, + { field: 'description', headerName: 'Description', editable: true, flex: 1 } + ], + default: [ + { + property: 'answer', + type: 'string', + description: `answer to the user's question` + }, + { + property: 'source', + type: 'string', + description: `sources used to answer the question, should be websites` + } + ], + additionalParams: true } ] } async init(nodeData: INodeData): Promise { - const structureType = nodeData.inputs?.structureType as string - const structure = nodeData.inputs?.structure as string + const jsonStructure = nodeData.inputs?.jsonStructure as string const autoFix = nodeData.inputs?.autofixParser as boolean - let parsedStructure: any | undefined = undefined - if (structure && structureType === 'fromNamesAndDescriptions') { - try { - parsedStructure = JSON.parse(structure) + try { + const structuredOutputParser = LangchainStructuredOutputParser.fromZodSchema(z.object(convertSchemaToZod(jsonStructure))) - // NOTE: When we change Flowise to return a json response, the following has to be changed to: JsonStructuredOutputParser - let structuredOutputParser = LangchainStructuredOutputParser.fromNamesAndDescriptions(parsedStructure) - Object.defineProperty(structuredOutputParser, 'autoFix', { - enumerable: true, - configurable: true, - writable: true, - value: autoFix - }) - return structuredOutputParser - } catch (exception) { - throw new Error('Invalid JSON in StructuredOutputParser: ' + exception) - } + // NOTE: When we change Flowise to return a json response, the following has to be changed to: JsonStructuredOutputParser + Object.defineProperty(structuredOutputParser, 'autoFix', { + enumerable: true, + configurable: true, + writable: true, + value: autoFix + }) + return structuredOutputParser + } catch (exception) { + throw new Error('Invalid JSON in StructuredOutputParser: ' + exception) } - throw new Error('Error creating OutputParser.') } } diff --git a/packages/components/nodes/tools/CustomTool/CustomTool.ts b/packages/components/nodes/tools/CustomTool/CustomTool.ts index c070df3176f..541edcf07d9 100644 --- a/packages/components/nodes/tools/CustomTool/CustomTool.ts +++ b/packages/components/nodes/tools/CustomTool/CustomTool.ts @@ -1,5 +1,5 @@ import { ICommonObject, IDatabaseEntity, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface' -import { getBaseClasses } from '../../../src/utils' +import { convertSchemaToZod, getBaseClasses } from '../../../src/utils' import { DynamicStructuredTool } from './core' import { z } from 'zod' import { DataSource } from 'typeorm' @@ -87,26 +87,4 @@ class CustomTool_Tools implements INode { } } -const convertSchemaToZod = (schema: string) => { - try { - const parsedSchema = JSON.parse(schema) - const zodObj: any = {} - for (const sch of parsedSchema) { - if (sch.type === 'string') { - if (sch.required) z.string({ required_error: `${sch.property} required` }).describe(sch.description) - zodObj[sch.property] = z.string().describe(sch.description) - } else if (sch.type === 'number') { - if (sch.required) z.number({ required_error: `${sch.property} required` }).describe(sch.description) - zodObj[sch.property] = z.number().describe(sch.description) - } else if (sch.type === 'boolean') { - if (sch.required) z.boolean({ required_error: `${sch.property} required` }).describe(sch.description) - zodObj[sch.property] = z.boolean().describe(sch.description) - } - } - return zodObj - } catch (e) { - throw new Error(e) - } -} - module.exports = { nodeClass: CustomTool_Tools } diff --git a/packages/components/src/Interface.ts b/packages/components/src/Interface.ts index d0694d6f788..5008813be0b 100644 --- a/packages/components/src/Interface.ts +++ b/packages/components/src/Interface.ts @@ -6,6 +6,7 @@ export type NodeParamsType = | 'asyncOptions' | 'options' | 'multiOptions' + | 'datagrid' | 'string' | 'number' | 'boolean' @@ -60,6 +61,7 @@ export interface INodeParams { description?: string warning?: string options?: Array + datagrid?: Array credentialNames?: Array optional?: boolean | INodeDisplay step?: number diff --git a/packages/components/src/utils.ts b/packages/components/src/utils.ts index 8f33683cc75..9ef30a0e847 100644 --- a/packages/components/src/utils.ts +++ b/packages/components/src/utils.ts @@ -3,6 +3,7 @@ import { load } from 'cheerio' import * as fs from 'fs' import * as path from 'path' import { JSDOM } from 'jsdom' +import { z } from 'zod' import { DataSource } from 'typeorm' import { ICommonObject, IDatabaseEntity, IMessage, INodeData } from './Interface' import { AES, enc } from 'crypto-js' @@ -546,3 +547,30 @@ export const convertChatHistoryToText = (chatHistory: IMessage[] = []): string = }) .join('\n') } + +/** + * Convert schema to zod schema + * @param {string} schema + * @returns {ICommonObject} + */ +export const convertSchemaToZod = (schema: string) => { + try { + const parsedSchema = JSON.parse(schema) + const zodObj: ICommonObject = {} + for (const sch of parsedSchema) { + if (sch.type === 'string') { + if (sch.required) z.string({ required_error: `${sch.property} required` }).describe(sch.description) + zodObj[sch.property] = z.string().describe(sch.description) + } else if (sch.type === 'number') { + if (sch.required) z.number({ required_error: `${sch.property} required` }).describe(sch.description) + zodObj[sch.property] = z.number().describe(sch.description) + } else if (sch.type === 'boolean') { + if (sch.required) z.boolean({ required_error: `${sch.property} required` }).describe(sch.description) + zodObj[sch.property] = z.boolean().describe(sch.description) + } + } + return zodObj + } catch (e) { + throw new Error(e) + } +} diff --git a/packages/ui/src/ui-component/grid/DataGrid.js b/packages/ui/src/ui-component/grid/DataGrid.js new file mode 100644 index 00000000000..7c2a267017c --- /dev/null +++ b/packages/ui/src/ui-component/grid/DataGrid.js @@ -0,0 +1,111 @@ +import PropTypes from 'prop-types' +import { useState, useCallback } from 'react' +import { DataGrid as MUIDataGrid, GridActionsCellItem } from '@mui/x-data-grid' +import { IconPlus } from '@tabler/icons' +import { Button } from '@mui/material' +import DeleteIcon from '@mui/icons-material/Delete' +import { cloneDeep } from 'lodash' +import { formatDataGridRows } from 'utils/genericHelper' + +export const DataGrid = ({ columns, rows, style, disabled = false, hideFooter = false, onChange }) => { + const [rowValues, setRowValues] = useState(formatDataGridRows(rows) ?? []) + + const deleteItem = useCallback( + (id) => () => { + let updatedRows = [] + setRowValues((prevRows) => { + let allRows = [...cloneDeep(prevRows)] + allRows = allRows.filter((row) => row.id !== id) + updatedRows = allRows + return allRows + }) + onChange(JSON.stringify(updatedRows)) + }, + // eslint-disable-next-line react-hooks/exhaustive-deps + [] + ) + + const addCols = (columns) => { + return [ + ...columns, + { + field: 'actions', + type: 'actions', + width: 80, + getActions: (params) => [ + } label='Delete' onClick={deleteItem(params.id)} /> + ] + } + ] + } + + const colValues = addCols(columns) + + const handleProcessRowUpdate = (newRow) => { + let updatedRows = [] + setRowValues((prevRows) => { + let allRows = [...cloneDeep(prevRows)] + const indexToUpdate = allRows.findIndex((row) => row.id === newRow.id) + if (indexToUpdate >= 0) { + allRows[indexToUpdate] = { ...newRow } + } + updatedRows = allRows + return allRows + }) + onChange(JSON.stringify(updatedRows)) + return newRow + } + + const getEmptyJsonObj = () => { + const obj = {} + for (let i = 0; i < colValues.length; i += 1) { + obj[colValues[i]?.field] = '' + } + return obj + } + + const addNewRow = () => { + setRowValues((prevRows) => { + let allRows = [...cloneDeep(prevRows)] + const lastRowId = allRows.length ? allRows[allRows.length - 1].id + 1 : 1 + allRows.push({ + ...getEmptyJsonObj(), + id: lastRowId + }) + return allRows + }) + } + + return ( + <> + {rowValues && colValues && ( +
+ { + return !disabled + }} + hideFooter={hideFooter} + onProcessRowUpdateError={(error) => console.error(error)} + rows={rowValues} + columns={colValues} + /> +
+ )} + {!disabled && ( + + )} + + ) +} + +DataGrid.propTypes = { + rows: PropTypes.array, + columns: PropTypes.array, + style: PropTypes.any, + disabled: PropTypes.bool, + hideFooter: PropTypes.bool, + onChange: PropTypes.func +} diff --git a/packages/ui/src/utils/genericHelper.js b/packages/ui/src/utils/genericHelper.js index af7f43533cb..18230a553b0 100644 --- a/packages/ui/src/utils/genericHelper.js +++ b/packages/ui/src/utils/genericHelper.js @@ -43,6 +43,7 @@ export const initNode = (nodeData, newNodeId) => { 'asyncOptions', 'options', 'multiOptions', + 'datagrid', 'string', 'number', 'boolean', @@ -422,3 +423,17 @@ export const isValidURL = (url) => { return undefined } } + +export const formatDataGridRows = (rows) => { + try { + const parsedRows = typeof rows === 'string' ? JSON.parse(rows) : rows + return parsedRows.map((sch, index) => { + return { + ...sch, + id: index + } + }) + } catch (e) { + return [] + } +} diff --git a/packages/ui/src/views/canvas/NodeInputHandler.js b/packages/ui/src/views/canvas/NodeInputHandler.js index 3a13a3b5f5d..906ad1d8a48 100644 --- a/packages/ui/src/views/canvas/NodeInputHandler.js +++ b/packages/ui/src/views/canvas/NodeInputHandler.js @@ -14,6 +14,7 @@ import { Dropdown } from 'ui-component/dropdown/Dropdown' import { MultiDropdown } from 'ui-component/dropdown/MultiDropdown' import { AsyncDropdown } from 'ui-component/dropdown/AsyncDropdown' import { Input } from 'ui-component/input/Input' +import { DataGrid } from 'ui-component/grid/DataGrid' import { File } from 'ui-component/file/File' import { SwitchInput } from 'ui-component/switch/Switch' import { flowContext } from 'store/context/ReactFlowContext' @@ -258,6 +259,15 @@ const NodeInputHandler = ({ inputAnchor, inputParam, data, disabled = false, isA value={data.inputs[inputParam.name] ?? inputParam.default ?? false} /> )} + {inputParam.type === 'datagrid' && ( + (data.inputs[inputParam.name] = newValue)} + /> + )} {(inputParam.type === 'string' || inputParam.type === 'password' || inputParam.type === 'number') && ( { - try { - const parsedSchema = JSON.parse(schema) - return parsedSchema.map((sch, index) => { - return { - ...sch, - id: index - } - }) - } catch (e) { - return [] - } - } - useEffect(() => { if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) else dispatch({ type: HIDE_CANVAS_DIALOG }) @@ -167,7 +153,7 @@ const ToolDialog = ({ show, dialogProps, onUseTemplate, onCancel, onConfirm }) = setToolId(getSpecificToolApi.data.id) setToolName(getSpecificToolApi.data.name) setToolDesc(getSpecificToolApi.data.description) - setToolSchema(formatSchema(getSpecificToolApi.data.schema)) + setToolSchema(formatDataGridRows(getSpecificToolApi.data.schema)) if (getSpecificToolApi.data.func) setToolFunc(getSpecificToolApi.data.func) else setToolFunc('') } @@ -180,7 +166,7 @@ const ToolDialog = ({ show, dialogProps, onUseTemplate, onCancel, onConfirm }) = setToolName(dialogProps.data.name) setToolDesc(dialogProps.data.description) setToolIcon(dialogProps.data.iconSrc) - setToolSchema(formatSchema(dialogProps.data.schema)) + setToolSchema(formatDataGridRows(dialogProps.data.schema)) if (dialogProps.data.func) setToolFunc(dialogProps.data.func) else setToolFunc('') } else if (dialogProps.type === 'EDIT' && dialogProps.toolId) { @@ -191,7 +177,7 @@ const ToolDialog = ({ show, dialogProps, onUseTemplate, onCancel, onConfirm }) = setToolName(dialogProps.data.name) setToolDesc(dialogProps.data.description) setToolIcon(dialogProps.data.iconSrc) - setToolSchema(formatSchema(dialogProps.data.schema)) + setToolSchema(formatDataGridRows(dialogProps.data.schema)) if (dialogProps.data.func) setToolFunc(dialogProps.data.func) else setToolFunc('') } else if (dialogProps.type === 'TEMPLATE' && dialogProps.data) { @@ -199,7 +185,7 @@ const ToolDialog = ({ show, dialogProps, onUseTemplate, onCancel, onConfirm }) = setToolName(dialogProps.data.name) setToolDesc(dialogProps.data.description) setToolIcon(dialogProps.data.iconSrc) - setToolSchema(formatSchema(dialogProps.data.schema)) + setToolSchema(formatDataGridRows(dialogProps.data.schema)) if (dialogProps.data.func) setToolFunc(dialogProps.data.func) else setToolFunc('') } else if (dialogProps.type === 'ADD') { From 825fdc05f4691f556336b34aecdd8467f521b289 Mon Sep 17 00:00:00 2001 From: Henry Date: Tue, 31 Oct 2023 23:00:09 +0000 Subject: [PATCH 10/13] add fix to detect json or string --- packages/components/src/utils.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/components/src/utils.ts b/packages/components/src/utils.ts index 9ef30a0e847..45acdd0578a 100644 --- a/packages/components/src/utils.ts +++ b/packages/components/src/utils.ts @@ -512,7 +512,7 @@ export const getUserHome = (): string => { /** * Map incoming chat history to ChatMessageHistory - * @param {options} ICommonObject + * @param {ICommonObject} options * @returns {ChatMessageHistory} */ export const mapChatHistory = (options: ICommonObject): ChatMessageHistory => { @@ -550,12 +550,12 @@ export const convertChatHistoryToText = (chatHistory: IMessage[] = []): string = /** * Convert schema to zod schema - * @param {string} schema + * @param {string | object} schema * @returns {ICommonObject} */ -export const convertSchemaToZod = (schema: string) => { +export const convertSchemaToZod = (schema: string | object): ICommonObject => { try { - const parsedSchema = JSON.parse(schema) + const parsedSchema = typeof schema === 'string' ? JSON.parse(schema) : schema const zodObj: ICommonObject = {} for (const sch of parsedSchema) { if (sch.type === 'string') { From 2bf90d3c3c5b851b675c167a99227654d37789e7 Mon Sep 17 00:00:00 2001 From: Henry Heng Date: Tue, 31 Oct 2023 23:54:55 +0000 Subject: [PATCH 11/13] update LLMChain prompt --- packages/components/nodes/chains/LLMChain/LLMChain.ts | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/components/nodes/chains/LLMChain/LLMChain.ts b/packages/components/nodes/chains/LLMChain/LLMChain.ts index c180aeca792..0d55588415c 100644 --- a/packages/components/nodes/chains/LLMChain/LLMChain.ts +++ b/packages/components/nodes/chains/LLMChain/LLMChain.ts @@ -31,16 +31,16 @@ class LLMChain_Chains implements INode { this.description = 'Chain to run queries against LLMs' this.baseClasses = [this.type, ...getBaseClasses(LLMChain)] this.inputs = [ - { - label: 'Prompt', - name: 'prompt', - type: 'BasePromptTemplate' - }, { label: 'Language Model', name: 'model', type: 'BaseLanguageModel' }, + { + label: 'Prompt', + name: 'prompt', + type: 'BasePromptTemplate' + }, { label: 'Output Parser', name: 'outputParser', From 895bc54f0b8edd528f6081448ffdd02a6e6f9822 Mon Sep 17 00:00:00 2001 From: vinodkiran Date: Fri, 3 Nov 2023 08:41:16 +0530 Subject: [PATCH 12/13] Change of category name for output parsers --- packages/components/nodes/outputparsers/OutputParserHelpers.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/components/nodes/outputparsers/OutputParserHelpers.ts b/packages/components/nodes/outputparsers/OutputParserHelpers.ts index 63dd4838284..a94edddd3d7 100644 --- a/packages/components/nodes/outputparsers/OutputParserHelpers.ts +++ b/packages/components/nodes/outputparsers/OutputParserHelpers.ts @@ -4,7 +4,7 @@ import { BaseLanguageModel } from 'langchain/base_language' import { ICommonObject } from '../../src' import { ChatPromptTemplate, FewShotPromptTemplate, PromptTemplate, SystemMessagePromptTemplate } from 'langchain/prompts' -export const CATEGORY = 'Output Parser (Experimental)' +export const CATEGORY = 'Output Parsers' export const formatResponse = (response: string | object): string | object => { if (typeof response === 'object') { From 38791f9aa22cc331e37b37ec3e0ec5d128192b95 Mon Sep 17 00:00:00 2001 From: Henry Date: Fri, 3 Nov 2023 13:33:38 +0000 Subject: [PATCH 13/13] update marketplace llmchain templates --- .../marketplaces/chatflows/Antonym.json | 34 +- .../chatflows/HuggingFace LLM Chain.json | 196 +++---- .../chatflows/List Output Parser.json | 417 +++++++++++++++ .../Prompt Chaining with VectorStore.json | 288 ++++++----- .../chatflows/Prompt Chaining.json | 379 +++++++------- .../marketplaces/chatflows/Replicate LLM.json | 194 +++---- .../chatflows/Simple LLM Chain.json | 196 +++---- .../chatflows/Structured Output Parser.json | 477 ++++++++++++++++++ .../marketplaces/chatflows/Translator.json | 196 +++---- 9 files changed, 1671 insertions(+), 706 deletions(-) create mode 100644 packages/server/marketplaces/chatflows/List Output Parser.json create mode 100644 packages/server/marketplaces/chatflows/Structured Output Parser.json diff --git a/packages/server/marketplaces/chatflows/Antonym.json b/packages/server/marketplaces/chatflows/Antonym.json index 54342d8fd61..059ab6c11a3 100644 --- a/packages/server/marketplaces/chatflows/Antonym.json +++ b/packages/server/marketplaces/chatflows/Antonym.json @@ -13,8 +13,8 @@ "data": { "id": "fewShotPromptTemplate_1", "label": "Few Shot Prompt Template", - "name": "fewShotPromptTemplate", "version": 1, + "name": "fewShotPromptTemplate", "type": "FewShotPromptTemplate", "baseClasses": ["FewShotPromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], "category": "Prompts", @@ -115,8 +115,8 @@ "data": { "id": "promptTemplate_0", "label": "Prompt Template", - "name": "promptTemplate", "version": 1, + "name": "promptTemplate", "type": "PromptTemplate", "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], "category": "Prompts", @@ -165,7 +165,7 @@ }, { "width": 300, - "height": 523, + "height": 574, "id": "chatOpenAI_0", "position": { "x": 1226.7977900193628, @@ -175,8 +175,8 @@ "data": { "id": "chatOpenAI_0", "label": "ChatOpenAI", - "name": "chatOpenAI", "version": 2, + "name": "chatOpenAI", "type": "ChatOpenAI", "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], "category": "Chat Models", @@ -327,20 +327,20 @@ }, { "width": 300, - "height": 405, + "height": 456, "id": "llmChain_0", "position": { - "x": 1573.7490072386481, - "y": 429.1905949837192 + "x": 1609.3428158423485, + "y": 409.3763727612179 }, "type": "customNode", "data": { "id": "llmChain_0", "label": "LLM Chain", + "version": 3, "name": "llmChain", - "version": 1, "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain"], + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], "category": "Chains", "description": "Chain to run queries against LLMs", "inputParams": [ @@ -365,11 +365,19 @@ "name": "prompt", "type": "BasePromptTemplate", "id": "llmChain_0-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_0-input-outputParser-BaseLLMOutputParser" } ], "inputs": { "model": "{{chatOpenAI_0.data.instance}}", "prompt": "{{fewShotPromptTemplate_1.data.instance}}", + "outputParser": "", "chainName": "" }, "outputAnchors": [ @@ -379,10 +387,10 @@ "type": "options", "options": [ { - "id": "llmChain_0-output-llmChain-LLMChain|BaseChain", + "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable", "name": "llmChain", "label": "LLM Chain", - "type": "LLMChain | BaseChain" + "type": "LLMChain | BaseChain | Runnable" }, { "id": "llmChain_0-output-outputPrediction-string|json", @@ -401,8 +409,8 @@ }, "selected": false, "positionAbsolute": { - "x": 1573.7490072386481, - "y": 429.1905949837192 + "x": 1609.3428158423485, + "y": 409.3763727612179 }, "dragging": false } diff --git a/packages/server/marketplaces/chatflows/HuggingFace LLM Chain.json b/packages/server/marketplaces/chatflows/HuggingFace LLM Chain.json index e51e1ee0339..5e33b63ac38 100644 --- a/packages/server/marketplaces/chatflows/HuggingFace LLM Chain.json +++ b/packages/server/marketplaces/chatflows/HuggingFace LLM Chain.json @@ -1,87 +1,6 @@ { "description": "Simple LLM Chain using HuggingFace Inference API on falcon-7b-instruct model", "nodes": [ - { - "width": 300, - "height": 405, - "id": "llmChain_1", - "position": { - "x": 970.9254258940236, - "y": 320.56761595884564 - }, - "type": "customNode", - "data": { - "id": "llmChain_1", - "label": "LLM Chain", - "name": "llmChain", - "version": 1, - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "BaseLangChain"], - "category": "Chains", - "description": "Chain to run queries against LLMs", - "inputParams": [ - { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", - "optional": true, - "id": "llmChain_1-input-chainName-string" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_1-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_1-input-prompt-BasePromptTemplate" - } - ], - "inputs": { - "model": "{{huggingFaceInference_LLMs_0.data.instance}}", - "prompt": "{{promptTemplate_0.data.instance}}", - "chainName": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|BaseLangChain", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | BaseLangChain" - }, - { - "id": "llmChain_1-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" - } - ], - "outputs": { - "output": "llmChain" - }, - "selected": false - }, - "positionAbsolute": { - "x": 970.9254258940236, - "y": 320.56761595884564 - }, - "selected": false, - "dragging": false - }, { "width": 300, "height": 475, @@ -94,8 +13,8 @@ "data": { "id": "promptTemplate_0", "label": "Prompt Template", - "name": "promptTemplate", "version": 1, + "name": "promptTemplate", "type": "PromptTemplate", "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], "category": "Prompts", @@ -144,7 +63,7 @@ }, { "width": 300, - "height": 526, + "height": 577, "id": "huggingFaceInference_LLMs_0", "position": { "x": 498.8594464193537, @@ -154,8 +73,8 @@ "data": { "id": "huggingFaceInference_LLMs_0", "label": "HuggingFace Inference", - "name": "huggingFaceInference_LLMs", "version": 2, + "name": "huggingFaceInference_LLMs", "type": "HuggingFaceInference", "baseClasses": ["HuggingFaceInference", "LLM", "BaseLLM", "BaseLanguageModel"], "category": "LLMs", @@ -267,27 +186,116 @@ "y": -94.91050256311678 }, "dragging": false + }, + { + "width": 300, + "height": 456, + "id": "llmChain_0", + "position": { + "x": 909.6249320819859, + "y": 338.9520801783737 + }, + "type": "customNode", + "data": { + "id": "llmChain_0", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_0-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_0-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_0-input-outputParser-BaseLLMOutputParser" + } + ], + "inputs": { + "model": "{{huggingFaceInference_LLMs_0.data.instance}}", + "prompt": "{{promptTemplate_0.data.instance}}", + "outputParser": "", + "chainName": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_0-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 909.6249320819859, + "y": 338.9520801783737 + }, + "dragging": false } ], "edges": [ { - "source": "promptTemplate_0", - "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", - "target": "llmChain_1", - "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", + "source": "huggingFaceInference_LLMs_0", + "sourceHandle": "huggingFaceInference_LLMs_0-output-huggingFaceInference_LLMs-HuggingFaceInference|LLM|BaseLLM|BaseLanguageModel", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-model-BaseLanguageModel", "type": "buttonedge", - "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", + "id": "huggingFaceInference_LLMs_0-huggingFaceInference_LLMs_0-output-huggingFaceInference_LLMs-HuggingFaceInference|LLM|BaseLLM|BaseLanguageModel-llmChain_0-llmChain_0-input-model-BaseLanguageModel", "data": { "label": "" } }, { - "source": "huggingFaceInference_LLMs_0", - "sourceHandle": "huggingFaceInference_LLMs_0-output-huggingFaceInference_LLMs-HuggingFaceInference|LLM|BaseLLM|BaseLanguageModel", - "target": "llmChain_1", - "targetHandle": "llmChain_1-input-model-BaseLanguageModel", + "source": "promptTemplate_0", + "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", "type": "buttonedge", - "id": "huggingFaceInference_LLMs_0-huggingFaceInference_LLMs_0-output-huggingFaceInference_LLMs-HuggingFaceInference|LLM|BaseLLM|BaseLanguageModel-llmChain_1-llmChain_1-input-model-BaseLanguageModel", + "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/List Output Parser.json b/packages/server/marketplaces/chatflows/List Output Parser.json new file mode 100644 index 00000000000..c96dd5301c4 --- /dev/null +++ b/packages/server/marketplaces/chatflows/List Output Parser.json @@ -0,0 +1,417 @@ +{ + "description": "Return response as a list (array) instead of a string/text", + "nodes": [ + { + "width": 300, + "height": 456, + "id": "llmChain_0", + "position": { + "x": 1490.4252662385359, + "y": 229.91198307750102 + }, + "type": "customNode", + "data": { + "id": "llmChain_0", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_0-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_0-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_0-input-outputParser-BaseLLMOutputParser" + } + ], + "inputs": { + "model": "{{chatOpenAI_0.data.instance}}", + "prompt": "{{promptTemplate_0.data.instance}}", + "outputParser": "{{csvOutputParser_0.data.instance}}", + "chainName": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_0-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1490.4252662385359, + "y": 229.91198307750102 + }, + "dragging": false + }, + { + "width": 300, + "height": 276, + "id": "csvOutputParser_0", + "position": { + "x": 476.70884184429417, + "y": 346.38506209058426 + }, + "type": "customNode", + "data": { + "id": "csvOutputParser_0", + "label": "CSV Output Parser", + "version": 1, + "name": "csvOutputParser", + "type": "CSVListOutputParser", + "baseClasses": ["CSVListOutputParser", "BaseLLMOutputParser", "Runnable"], + "category": "Output Parsers", + "description": "Parse the output of an LLM call as a comma-separated list of values", + "inputParams": [ + { + "label": "Autofix", + "name": "autofixParser", + "type": "boolean", + "optional": true, + "description": "In the event that the first call fails, will make another call to the model to fix any errors.", + "id": "csvOutputParser_0-input-autofixParser-boolean" + } + ], + "inputAnchors": [], + "inputs": { + "autofixParser": true + }, + "outputAnchors": [ + { + "id": "csvOutputParser_0-output-csvOutputParser-CSVListOutputParser|BaseLLMOutputParser|Runnable", + "name": "csvOutputParser", + "label": "CSVListOutputParser", + "type": "CSVListOutputParser | BaseLLMOutputParser | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 476.70884184429417, + "y": 346.38506209058426 + }, + "dragging": false + }, + { + "width": 300, + "height": 475, + "id": "promptTemplate_0", + "position": { + "x": 804.3731431892371, + "y": 10.888147964487587 + }, + "type": "customNode", + "data": { + "id": "promptTemplate_0", + "label": "Prompt Template", + "version": 1, + "name": "promptTemplate", + "type": "PromptTemplate", + "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate", "Runnable"], + "category": "Prompts", + "description": "Schema to represent a basic prompt for an LLM", + "inputParams": [ + { + "label": "Template", + "name": "template", + "type": "string", + "rows": 4, + "placeholder": "What is a good name for a company that makes {product}?", + "id": "promptTemplate_0-input-template-string" + }, + { + "label": "Format Prompt Values", + "name": "promptValues", + "type": "json", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "promptTemplate_0-input-promptValues-json" + } + ], + "inputAnchors": [], + "inputs": { + "template": "Answer user's question as best you can: {question}", + "promptValues": "" + }, + "outputAnchors": [ + { + "id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", + "name": "promptTemplate", + "label": "PromptTemplate", + "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 804.3731431892371, + "y": 10.888147964487587 + }, + "dragging": false + }, + { + "width": 300, + "height": 574, + "id": "chatOpenAI_0", + "position": { + "x": 1137.2591863882824, + "y": -204.50870351724768 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "version": 2, + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["openAIApi"], + "id": "chatOpenAI_0-input-credential-credential" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0613", + "name": "gpt-4-0613" + }, + { + "label": "gpt-4-32k", + "name": "gpt-4-32k" + }, + { + "label": "gpt-4-32k-0613", + "name": "gpt-4-32k-0613" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0613", + "name": "gpt-3.5-turbo-0613" + }, + { + "label": "gpt-3.5-turbo-16k", + "name": "gpt-3.5-turbo-16k" + }, + { + "label": "gpt-3.5-turbo-16k-0613", + "name": "gpt-3.5-turbo-16k-0613" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "step": 0.1, + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" + } + ], + "inputAnchors": [ + { + "label": "Cache", + "name": "cache", + "type": "BaseCache", + "optional": true, + "id": "chatOpenAI_0-input-cache-BaseCache" + } + ], + "inputs": { + "cache": "", + "modelName": "gpt-3.5-turbo-16k", + "temperature": "0", + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "", + "basepath": "", + "baseOptions": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1137.2591863882824, + "y": -204.50870351724768 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "csvOutputParser_0", + "sourceHandle": "csvOutputParser_0-output-csvOutputParser-CSVListOutputParser|BaseLLMOutputParser|Runnable", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-outputParser-BaseLLMOutputParser", + "type": "buttonedge", + "id": "csvOutputParser_0-csvOutputParser_0-output-csvOutputParser-CSVListOutputParser|BaseLLMOutputParser|Runnable-llmChain_0-llmChain_0-input-outputParser-BaseLLMOutputParser", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_0-llmChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "promptTemplate_0", + "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/chatflows/Prompt Chaining with VectorStore.json b/packages/server/marketplaces/chatflows/Prompt Chaining with VectorStore.json index 0f270c7b9c4..bca8db04836 100644 --- a/packages/server/marketplaces/chatflows/Prompt Chaining with VectorStore.json +++ b/packages/server/marketplaces/chatflows/Prompt Chaining with VectorStore.json @@ -661,50 +661,55 @@ }, { "width": 300, - "height": 405, - "id": "llmChain_0", + "height": 454, + "id": "vectorStoreToDocument_0", "position": { - "x": 747.1299875516488, - "y": -267.01184813798244 + "x": 1906.6871314089658, + "y": -157.0046189166955 }, "type": "customNode", "data": { - "id": "llmChain_0", - "label": "LLM Chain", - "version": 1, - "name": "llmChain", - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "Runnable"], - "category": "Chains", - "description": "Chain to run queries against LLMs", + "id": "vectorStoreToDocument_0", + "label": "VectorStore To Document", + "version": 2, + "name": "vectorStoreToDocument", + "type": "Document", + "baseClasses": ["Document"], + "category": "Document Loaders", + "description": "Search documents with scores from vector store", "inputParams": [ { - "label": "Chain Name", - "name": "chainName", + "label": "Query", + "name": "query", "type": "string", - "placeholder": "Name Your Chain", + "description": "Query to retrieve documents from vector database. If not specified, user question will be used", + "optional": true, + "acceptVariable": true, + "id": "vectorStoreToDocument_0-input-query-string" + }, + { + "label": "Minimum Score (%)", + "name": "minScore", + "type": "number", "optional": true, - "id": "llmChain_0-input-chainName-string" + "placeholder": "75", + "step": 1, + "description": "Minumum score for embeddings documents to be included", + "id": "vectorStoreToDocument_0-input-minScore-number" } ], "inputAnchors": [ { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_0-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_0-input-prompt-BasePromptTemplate" + "label": "Vector Store", + "name": "vectorStore", + "type": "VectorStore", + "id": "vectorStoreToDocument_0-input-vectorStore-VectorStore" } ], "inputs": { - "model": "{{chatOpenAI_0.data.instance}}", - "prompt": "{{promptTemplate_0.data.instance}}", - "chainName": "RephraseQuestion" + "vectorStore": "{{pineconeExistingIndex_0.data.instance}}", + "query": "{{llmChain_2.data.instance}}", + "minScore": "" }, "outputAnchors": [ { @@ -713,46 +718,46 @@ "type": "options", "options": [ { - "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | Runnable" + "id": "vectorStoreToDocument_0-output-document-Document", + "name": "document", + "label": "Document", + "type": "Document" }, { - "id": "llmChain_0-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", + "id": "vectorStoreToDocument_0-output-text-string|json", + "name": "text", + "label": "Text", "type": "string | json" } ], - "default": "llmChain" + "default": "document" } ], "outputs": { - "output": "outputPrediction" + "output": "text" }, "selected": false }, "selected": false, "positionAbsolute": { - "x": 747.1299875516488, - "y": -267.01184813798244 + "x": 1906.6871314089658, + "y": -157.0046189166955 }, "dragging": false }, { "width": 300, - "height": 405, - "id": "llmChain_1", + "height": 456, + "id": "llmChain_2", "position": { - "x": 2694.8707655351186, - "y": -308.59150355411236 + "x": 756.2678342825631, + "y": -244.07972550448233 }, "type": "customNode", "data": { - "id": "llmChain_1", + "id": "llmChain_2", "label": "LLM Chain", - "version": 1, + "version": 3, "name": "llmChain", "type": "LLMChain", "baseClasses": ["LLMChain", "BaseChain", "Runnable"], @@ -765,7 +770,7 @@ "type": "string", "placeholder": "Name Your Chain", "optional": true, - "id": "llmChain_1-input-chainName-string" + "id": "llmChain_2-input-chainName-string" } ], "inputAnchors": [ @@ -773,19 +778,27 @@ "label": "Language Model", "name": "model", "type": "BaseLanguageModel", - "id": "llmChain_1-input-model-BaseLanguageModel" + "id": "llmChain_2-input-model-BaseLanguageModel" }, { "label": "Prompt", "name": "prompt", "type": "BasePromptTemplate", - "id": "llmChain_1-input-prompt-BasePromptTemplate" + "id": "llmChain_2-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_2-input-outputParser-BaseLLMOutputParser" } ], "inputs": { - "model": "{{chatOpenAI_1.data.instance}}", - "prompt": "{{chatPromptTemplate_0.data.instance}}", - "chainName": "FinalResponse" + "model": "{{chatOpenAI_0.data.instance}}", + "prompt": "{{promptTemplate_0.data.instance}}", + "outputParser": "", + "chainName": "RephraseQuestion" }, "outputAnchors": [ { @@ -794,13 +807,13 @@ "type": "options", "options": [ { - "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|Runnable", + "id": "llmChain_2-output-llmChain-LLMChain|BaseChain|Runnable", "name": "llmChain", "label": "LLM Chain", "type": "LLMChain | BaseChain | Runnable" }, { - "id": "llmChain_1-output-outputPrediction-string|json", + "id": "llmChain_2-output-outputPrediction-string|json", "name": "outputPrediction", "label": "Output Prediction", "type": "string | json" @@ -810,68 +823,71 @@ } ], "outputs": { - "output": "llmChain" + "output": "outputPrediction" }, "selected": false }, "selected": false, "positionAbsolute": { - "x": 2694.8707655351186, - "y": -308.59150355411236 + "x": 756.2678342825631, + "y": -244.07972550448233 }, "dragging": false }, { "width": 300, - "height": 454, - "id": "vectorStoreToDocument_0", + "height": 456, + "id": "llmChain_1", "position": { - "x": 1906.6871314089658, - "y": -157.0046189166955 + "x": 2684.08901232628, + "y": -301.4742415779482 }, "type": "customNode", "data": { - "id": "vectorStoreToDocument_0", - "label": "VectorStore To Document", - "version": 2, - "name": "vectorStoreToDocument", - "type": "Document", - "baseClasses": ["Document"], - "category": "Document Loaders", - "description": "Search documents with scores from vector store", + "id": "llmChain_1", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", "inputParams": [ { - "label": "Query", - "name": "query", + "label": "Chain Name", + "name": "chainName", "type": "string", - "description": "Query to retrieve documents from vector database. If not specified, user question will be used", - "optional": true, - "acceptVariable": true, - "id": "vectorStoreToDocument_0-input-query-string" - }, - { - "label": "Minimum Score (%)", - "name": "minScore", - "type": "number", + "placeholder": "Name Your Chain", "optional": true, - "placeholder": "75", - "step": 1, - "description": "Minumum score for embeddings documents to be included", - "id": "vectorStoreToDocument_0-input-minScore-number" + "id": "llmChain_1-input-chainName-string" } ], "inputAnchors": [ { - "label": "Vector Store", - "name": "vectorStore", - "type": "VectorStore", - "id": "vectorStoreToDocument_0-input-vectorStore-VectorStore" + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_1-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_1-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_1-input-outputParser-BaseLLMOutputParser" } ], "inputs": { - "vectorStore": "{{pineconeExistingIndex_0.data.instance}}", - "query": "{{llmChain_0.data.instance}}", - "minScore": "" + "model": "{{chatOpenAI_1.data.instance}}", + "prompt": "{{chatPromptTemplate_0.data.instance}}", + "outputParser": "", + "chainName": "FinalResponse" }, "outputAnchors": [ { @@ -880,30 +896,30 @@ "type": "options", "options": [ { - "id": "vectorStoreToDocument_0-output-document-Document", - "name": "document", - "label": "Document", - "type": "Document" + "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" }, { - "id": "vectorStoreToDocument_0-output-text-string|json", - "name": "text", - "label": "Text", + "id": "llmChain_1-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", "type": "string | json" } ], - "default": "document" + "default": "llmChain" } ], "outputs": { - "output": "text" + "output": "llmChain" }, "selected": false }, "selected": false, "positionAbsolute": { - "x": 1906.6871314089658, - "y": -157.0046189166955 + "x": 2684.08901232628, + "y": -301.4742415779482 }, "dragging": false } @@ -921,78 +937,78 @@ } }, { - "source": "promptTemplate_0", - "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", - "target": "llmChain_0", - "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", + "source": "pineconeExistingIndex_0", + "sourceHandle": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore", + "target": "vectorStoreToDocument_0", + "targetHandle": "vectorStoreToDocument_0-input-vectorStore-VectorStore", "type": "buttonedge", - "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", + "id": "pineconeExistingIndex_0-pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore-vectorStoreToDocument_0-vectorStoreToDocument_0-input-vectorStore-VectorStore", "data": { "label": "" } }, { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "target": "llmChain_0", - "targetHandle": "llmChain_0-input-model-BaseLanguageModel", + "source": "vectorStoreToDocument_0", + "sourceHandle": "vectorStoreToDocument_0-output-text-string|json", + "target": "chatPromptTemplate_0", + "targetHandle": "chatPromptTemplate_0-input-promptValues-json", "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_0-llmChain_0-input-model-BaseLanguageModel", + "id": "vectorStoreToDocument_0-vectorStoreToDocument_0-output-text-string|json-chatPromptTemplate_0-chatPromptTemplate_0-input-promptValues-json", "data": { "label": "" } }, { - "source": "chatPromptTemplate_0", - "sourceHandle": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable", - "target": "llmChain_1", - "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "target": "llmChain_2", + "targetHandle": "llmChain_2-input-model-BaseLanguageModel", "type": "buttonedge", - "id": "chatPromptTemplate_0-chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_2-llmChain_2-input-model-BaseLanguageModel", "data": { "label": "" } }, { - "source": "chatOpenAI_1", - "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "target": "llmChain_1", - "targetHandle": "llmChain_1-input-model-BaseLanguageModel", + "source": "promptTemplate_0", + "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", + "target": "llmChain_2", + "targetHandle": "llmChain_2-input-prompt-BasePromptTemplate", "type": "buttonedge", - "id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_1-llmChain_1-input-model-BaseLanguageModel", + "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable-llmChain_2-llmChain_2-input-prompt-BasePromptTemplate", "data": { "label": "" } }, { - "source": "pineconeExistingIndex_0", - "sourceHandle": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore", + "source": "llmChain_2", + "sourceHandle": "llmChain_2-output-outputPrediction-string|json", "target": "vectorStoreToDocument_0", - "targetHandle": "vectorStoreToDocument_0-input-vectorStore-VectorStore", + "targetHandle": "vectorStoreToDocument_0-input-query-string", "type": "buttonedge", - "id": "pineconeExistingIndex_0-pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore-vectorStoreToDocument_0-vectorStoreToDocument_0-input-vectorStore-VectorStore", + "id": "llmChain_2-llmChain_2-output-outputPrediction-string|json-vectorStoreToDocument_0-vectorStoreToDocument_0-input-query-string", "data": { "label": "" } }, { - "source": "vectorStoreToDocument_0", - "sourceHandle": "vectorStoreToDocument_0-output-text-string|json", - "target": "chatPromptTemplate_0", - "targetHandle": "chatPromptTemplate_0-input-promptValues-json", + "source": "chatOpenAI_1", + "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-model-BaseLanguageModel", "type": "buttonedge", - "id": "vectorStoreToDocument_0-vectorStoreToDocument_0-output-text-string|json-chatPromptTemplate_0-chatPromptTemplate_0-input-promptValues-json", + "id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_1-llmChain_1-input-model-BaseLanguageModel", "data": { "label": "" } }, { - "source": "llmChain_0", - "sourceHandle": "llmChain_0-output-outputPrediction-string|json", - "target": "vectorStoreToDocument_0", - "targetHandle": "vectorStoreToDocument_0-input-query-string", + "source": "chatPromptTemplate_0", + "sourceHandle": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", "type": "buttonedge", - "id": "llmChain_0-llmChain_0-output-outputPrediction-string|json-vectorStoreToDocument_0-vectorStoreToDocument_0-input-query-string", + "id": "chatPromptTemplate_0-chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/Prompt Chaining.json b/packages/server/marketplaces/chatflows/Prompt Chaining.json index 77c238ad736..3181bc472b6 100644 --- a/packages/server/marketplaces/chatflows/Prompt Chaining.json +++ b/packages/server/marketplaces/chatflows/Prompt Chaining.json @@ -13,8 +13,8 @@ "data": { "id": "promptTemplate_0", "label": "Prompt Template", - "name": "promptTemplate", "version": 1, + "name": "promptTemplate", "type": "PromptTemplate", "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], "category": "Prompts", @@ -73,8 +73,8 @@ "data": { "id": "promptTemplate_1", "label": "Prompt Template", - "name": "promptTemplate", "version": 1, + "name": "promptTemplate", "type": "PromptTemplate", "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], "category": "Prompts", @@ -123,169 +123,7 @@ }, { "width": 300, - "height": 405, - "id": "llmChain_0", - "position": { - "x": 1192.835706086358, - "y": 367.49653955405995 - }, - "type": "customNode", - "data": { - "id": "llmChain_0", - "label": "LLM Chain", - "name": "llmChain", - "version": 1, - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain"], - "category": "Chains", - "description": "Chain to run queries against LLMs", - "inputParams": [ - { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", - "optional": true, - "id": "llmChain_0-input-chainName-string" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_0-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_0-input-prompt-BasePromptTemplate" - } - ], - "inputs": { - "model": "{{openAI_1.data.instance}}", - "prompt": "{{promptTemplate_0.data.instance}}", - "chainName": "FirstChain" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_0-output-llmChain-LLMChain|BaseChain", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain" - }, - { - "id": "llmChain_0-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" - } - ], - "outputs": { - "output": "outputPrediction" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1192.835706086358, - "y": 367.49653955405995 - }, - "dragging": false - }, - { - "width": 300, - "height": 405, - "id": "llmChain_1", - "position": { - "x": 1956.8236771865425, - "y": 359.10696865911547 - }, - "type": "customNode", - "data": { - "id": "llmChain_1", - "label": "LLM Chain", - "name": "llmChain", - "version": 1, - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain"], - "category": "Chains", - "description": "Chain to run queries against LLMs", - "inputParams": [ - { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", - "optional": true, - "id": "llmChain_1-input-chainName-string" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_1-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_1-input-prompt-BasePromptTemplate" - } - ], - "inputs": { - "model": "{{openAI_2.data.instance}}", - "prompt": "{{promptTemplate_1.data.instance}}", - "chainName": "LastChain" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_1-output-llmChain-LLMChain|BaseChain", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain" - }, - { - "id": "llmChain_1-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" - } - ], - "outputs": { - "output": "llmChain" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1956.8236771865425, - "y": 359.10696865911547 - }, - "dragging": false - }, - { - "width": 300, - "height": 523, + "height": 574, "id": "openAI_1", "position": { "x": 791.6102007244282, @@ -295,8 +133,8 @@ "data": { "id": "openAI_1", "label": "OpenAI", - "name": "openAI", "version": 3, + "name": "openAI", "type": "OpenAI", "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], "category": "LLMs", @@ -445,18 +283,18 @@ }, { "width": 300, - "height": 523, + "height": 574, "id": "openAI_2", "position": { "x": 1571.148617508543, - "y": -90.372437481171687 + "y": -90.37243748117169 }, "type": "customNode", "data": { "id": "openAI_2", "label": "OpenAI", - "name": "openAI", "version": 3, + "name": "openAI", "type": "OpenAI", "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], "category": "LLMs", @@ -573,7 +411,6 @@ "id": "openAI_2-input-cache-BaseCache" } ], - "default": "gpt-3.5-turbo-instruct", "inputs": { "modelName": "gpt-3.5-turbo-instruct", "temperature": 0.7, @@ -600,12 +437,201 @@ "selected": false, "positionAbsolute": { "x": 1571.148617508543, - "y": -90.372437481171687 + "y": -90.37243748117169 + }, + "dragging": false + }, + { + "width": 300, + "height": 456, + "id": "llmChain_0", + "position": { + "x": 1183.0899727188096, + "y": 385.0159960992951 + }, + "type": "customNode", + "data": { + "id": "llmChain_0", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_0-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_0-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_0-input-outputParser-BaseLLMOutputParser" + } + ], + "inputs": { + "model": "{{openAI_1.data.instance}}", + "prompt": "{{promptTemplate_0.data.instance}}", + "outputParser": "", + "chainName": "FirstChain" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_0-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "outputPrediction" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1183.0899727188096, + "y": 385.0159960992951 + }, + "dragging": false + }, + { + "width": 300, + "height": 456, + "id": "llmChain_1", + "position": { + "x": 1973.883197748518, + "y": 370.7937277714931 + }, + "type": "customNode", + "data": { + "id": "llmChain_1", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_1-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_1-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_1-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_1-input-outputParser-BaseLLMOutputParser" + } + ], + "inputs": { + "model": "{{openAI_2.data.instance}}", + "prompt": "{{promptTemplate_1.data.instance}}", + "outputParser": "", + "chainName": "LastChain" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_1-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1973.883197748518, + "y": 370.7937277714931 }, "dragging": false } ], "edges": [ + { + "source": "openAI_1", + "sourceHandle": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "openAI_1-openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-llmChain_0-llmChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, { "source": "promptTemplate_0", "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", @@ -639,17 +665,6 @@ "label": "" } }, - { - "source": "openAI_1", - "sourceHandle": "openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", - "target": "llmChain_0", - "targetHandle": "llmChain_0-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "openAI_1-openAI_1-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-llmChain_0-llmChain_0-input-model-BaseLanguageModel", - "data": { - "label": "" - } - }, { "source": "openAI_2", "sourceHandle": "openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", diff --git a/packages/server/marketplaces/chatflows/Replicate LLM.json b/packages/server/marketplaces/chatflows/Replicate LLM.json index 0049214c72b..bee565ce699 100644 --- a/packages/server/marketplaces/chatflows/Replicate LLM.json +++ b/packages/server/marketplaces/chatflows/Replicate LLM.json @@ -3,88 +3,7 @@ "nodes": [ { "width": 300, - "height": 405, - "id": "llmChain_1", - "position": { - "x": 967.581544453458, - "y": 320.56761595884564 - }, - "type": "customNode", - "data": { - "id": "llmChain_1", - "label": "LLM Chain", - "version": 1, - "name": "llmChain", - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "BaseLangChain"], - "category": "Chains", - "description": "Chain to run queries against LLMs", - "inputParams": [ - { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", - "optional": true, - "id": "llmChain_1-input-chainName-string" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_1-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_1-input-prompt-BasePromptTemplate" - } - ], - "inputs": { - "model": "{{replicate_0.data.instance}}", - "prompt": "{{promptTemplate_0.data.instance}}", - "chainName": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|BaseLangChain", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | BaseLangChain" - }, - { - "id": "llmChain_1-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" - } - ], - "outputs": { - "output": "llmChain" - }, - "selected": false - }, - "positionAbsolute": { - "x": 967.581544453458, - "y": 320.56761595884564 - }, - "selected": false, - "dragging": false - }, - { - "width": 300, - "height": 474, + "height": 475, "id": "promptTemplate_0", "position": { "x": 269.2203229225663, @@ -144,7 +63,7 @@ }, { "width": 300, - "height": 526, + "height": 577, "id": "replicate_0", "position": { "x": 623.313978186024, @@ -260,27 +179,116 @@ "y": -142.92788335022428 }, "dragging": false + }, + { + "width": 300, + "height": 456, + "id": "llmChain_0", + "position": { + "x": 1013.8484815418046, + "y": 298.7146179121001 + }, + "type": "customNode", + "data": { + "id": "llmChain_0", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_0-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_0-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_0-input-outputParser-BaseLLMOutputParser" + } + ], + "inputs": { + "model": "{{replicate_0.data.instance}}", + "prompt": "{{promptTemplate_0.data.instance}}", + "outputParser": "", + "chainName": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_0-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1013.8484815418046, + "y": 298.7146179121001 + }, + "dragging": false } ], "edges": [ { - "source": "promptTemplate_0", - "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", - "target": "llmChain_1", - "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", + "source": "replicate_0", + "sourceHandle": "replicate_0-output-replicate-Replicate|BaseChatModel|LLM|BaseLLM|BaseLanguageModel|Runnable", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-model-BaseLanguageModel", "type": "buttonedge", - "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", + "id": "replicate_0-replicate_0-output-replicate-Replicate|BaseChatModel|LLM|BaseLLM|BaseLanguageModel|Runnable-llmChain_0-llmChain_0-input-model-BaseLanguageModel", "data": { "label": "" } }, { - "source": "replicate_0", - "sourceHandle": "replicate_0-output-replicate-Replicate|BaseChatModel|LLM|BaseLLM|BaseLanguageModel|Runnable", - "target": "llmChain_1", - "targetHandle": "llmChain_1-input-model-BaseLanguageModel", + "source": "promptTemplate_0", + "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", "type": "buttonedge", - "id": "replicate_0-replicate_0-output-replicate-Replicate|BaseChatModel|LLM|BaseLLM|BaseLanguageModel|Runnable-llmChain_1-llmChain_1-input-model-BaseLanguageModel", + "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/Simple LLM Chain.json b/packages/server/marketplaces/chatflows/Simple LLM Chain.json index f3db04ef2bb..b07124c0596 100644 --- a/packages/server/marketplaces/chatflows/Simple LLM Chain.json +++ b/packages/server/marketplaces/chatflows/Simple LLM Chain.json @@ -1,87 +1,6 @@ { "description": "Basic example of stateless (no memory) LLM Chain with a Prompt Template and LLM Model", "nodes": [ - { - "width": 300, - "height": 405, - "id": "llmChain_1", - "position": { - "x": 970.9254258940236, - "y": 320.56761595884564 - }, - "type": "customNode", - "data": { - "id": "llmChain_1", - "label": "LLM Chain", - "name": "llmChain", - "version": 1, - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "BaseLangChain"], - "category": "Chains", - "description": "Chain to run queries against LLMs", - "inputParams": [ - { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", - "optional": true, - "id": "llmChain_1-input-chainName-string" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_1-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_1-input-prompt-BasePromptTemplate" - } - ], - "inputs": { - "model": "{{openAI_0.data.instance}}", - "prompt": "{{promptTemplate_0.data.instance}}", - "chainName": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|BaseLangChain", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | BaseLangChain" - }, - { - "id": "llmChain_1-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" - } - ], - "outputs": { - "output": "llmChain" - }, - "selected": false - }, - "positionAbsolute": { - "x": 970.9254258940236, - "y": 320.56761595884564 - }, - "selected": false, - "dragging": false - }, { "width": 300, "height": 475, @@ -94,8 +13,8 @@ "data": { "id": "promptTemplate_0", "label": "Prompt Template", - "name": "promptTemplate", "version": 1, + "name": "promptTemplate", "type": "PromptTemplate", "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], "category": "Prompts", @@ -144,7 +63,7 @@ }, { "width": 300, - "height": 523, + "height": 574, "id": "openAI_0", "position": { "x": 513.3297923232442, @@ -154,8 +73,8 @@ "data": { "id": "openAI_0", "label": "OpenAI", - "name": "openAI", "version": 3, + "name": "openAI", "type": "OpenAI", "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], "category": "LLMs", @@ -301,27 +220,116 @@ "y": -112.67554802812833 }, "dragging": false + }, + { + "width": 300, + "height": 456, + "id": "llmChain_0", + "position": { + "x": 919.263534910828, + "y": 318.465734712124 + }, + "type": "customNode", + "data": { + "id": "llmChain_0", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_0-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_0-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_0-input-outputParser-BaseLLMOutputParser" + } + ], + "inputs": { + "model": "{{openAI_0.data.instance}}", + "prompt": "{{promptTemplate_0.data.instance}}", + "outputParser": "", + "chainName": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_0-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 919.263534910828, + "y": 318.465734712124 + }, + "dragging": false } ], "edges": [ { - "source": "promptTemplate_0", - "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", - "target": "llmChain_1", - "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", + "source": "openAI_0", + "sourceHandle": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-model-BaseLanguageModel", "type": "buttonedge", - "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", + "id": "openAI_0-openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-llmChain_0-llmChain_0-input-model-BaseLanguageModel", "data": { "label": "" } }, { - "source": "openAI_0", - "sourceHandle": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", - "target": "llmChain_1", - "targetHandle": "llmChain_1-input-model-BaseLanguageModel", + "source": "promptTemplate_0", + "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", "type": "buttonedge", - "id": "openAI_0-openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-llmChain_1-llmChain_1-input-model-BaseLanguageModel", + "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/Structured Output Parser.json b/packages/server/marketplaces/chatflows/Structured Output Parser.json new file mode 100644 index 00000000000..38947f93ede --- /dev/null +++ b/packages/server/marketplaces/chatflows/Structured Output Parser.json @@ -0,0 +1,477 @@ +{ + "description": "Return response as a specified JSON structure instead of a string/text", + "nodes": [ + { + "width": 300, + "height": 574, + "id": "chatOpenAI_0", + "position": { + "x": 845.3961479115309, + "y": -205.74401580699953 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "version": 2, + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["openAIApi"], + "id": "chatOpenAI_0-input-credential-credential" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0613", + "name": "gpt-4-0613" + }, + { + "label": "gpt-4-32k", + "name": "gpt-4-32k" + }, + { + "label": "gpt-4-32k-0613", + "name": "gpt-4-32k-0613" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0613", + "name": "gpt-3.5-turbo-0613" + }, + { + "label": "gpt-3.5-turbo-16k", + "name": "gpt-3.5-turbo-16k" + }, + { + "label": "gpt-3.5-turbo-16k-0613", + "name": "gpt-3.5-turbo-16k-0613" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "step": 0.1, + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" + } + ], + "inputAnchors": [ + { + "label": "Cache", + "name": "cache", + "type": "BaseCache", + "optional": true, + "id": "chatOpenAI_0-input-cache-BaseCache" + } + ], + "inputs": { + "cache": "", + "modelName": "gpt-3.5-turbo", + "temperature": "0", + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "", + "basepath": "", + "baseOptions": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 845.3961479115309, + "y": -205.74401580699953 + }, + "dragging": false + }, + { + "width": 300, + "height": 456, + "id": "llmChain_0", + "position": { + "x": 1229.1699649849293, + "y": 245.55173505632646 + }, + "type": "customNode", + "data": { + "id": "llmChain_0", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_0-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_0-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_0-input-outputParser-BaseLLMOutputParser" + } + ], + "inputs": { + "model": "{{chatOpenAI_0.data.instance}}", + "prompt": "{{chatPromptTemplate_0.data.instance}}", + "outputParser": "{{structuredOutputParser_0.data.instance}}", + "chainName": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_0-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "positionAbsolute": { + "x": 1229.1699649849293, + "y": 245.55173505632646 + }, + "selected": false + }, + { + "width": 300, + "height": 652, + "id": "chatPromptTemplate_0", + "position": { + "x": 501.1597501123828, + "y": -154.43917602832562 + }, + "type": "customNode", + "data": { + "id": "chatPromptTemplate_0", + "label": "Chat Prompt Template", + "version": 1, + "name": "chatPromptTemplate", + "type": "ChatPromptTemplate", + "baseClasses": ["ChatPromptTemplate", "BaseChatPromptTemplate", "BasePromptTemplate", "Runnable"], + "category": "Prompts", + "description": "Schema to represent a chat prompt", + "inputParams": [ + { + "label": "System Message", + "name": "systemMessagePrompt", + "type": "string", + "rows": 4, + "placeholder": "You are a helpful assistant that translates {input_language} to {output_language}.", + "id": "chatPromptTemplate_0-input-systemMessagePrompt-string" + }, + { + "label": "Human Message", + "name": "humanMessagePrompt", + "type": "string", + "rows": 4, + "placeholder": "{text}", + "id": "chatPromptTemplate_0-input-humanMessagePrompt-string" + }, + { + "label": "Format Prompt Values", + "name": "promptValues", + "type": "json", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "chatPromptTemplate_0-input-promptValues-json" + } + ], + "inputAnchors": [], + "inputs": { + "systemMessagePrompt": "Answer user's question as best you can", + "humanMessagePrompt": "{text}", + "promptValues": "" + }, + "outputAnchors": [ + { + "id": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable", + "name": "chatPromptTemplate", + "label": "ChatPromptTemplate", + "type": "ChatPromptTemplate | BaseChatPromptTemplate | BasePromptTemplate | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 501.1597501123828, + "y": -154.43917602832562 + }, + "dragging": false + }, + { + "width": 300, + "height": 328, + "id": "structuredOutputParser_0", + "position": { + "x": 170.3869571939727, + "y": 343.9298288967859 + }, + "type": "customNode", + "data": { + "id": "structuredOutputParser_0", + "label": "Structured Output Parser", + "version": 1, + "name": "structuredOutputParser", + "type": "StructuredOutputParser", + "baseClasses": ["StructuredOutputParser", "BaseLLMOutputParser", "Runnable"], + "category": "Output Parsers", + "description": "Parse the output of an LLM call into a given (JSON) structure.", + "inputParams": [ + { + "label": "Autofix", + "name": "autofixParser", + "type": "boolean", + "optional": true, + "description": "In the event that the first call fails, will make another call to the model to fix any errors.", + "id": "structuredOutputParser_0-input-autofixParser-boolean" + }, + { + "label": "JSON Structure", + "name": "jsonStructure", + "type": "datagrid", + "description": "JSON structure for LLM to return", + "datagrid": [ + { + "field": "property", + "headerName": "Property", + "editable": true + }, + { + "field": "type", + "headerName": "Type", + "type": "singleSelect", + "valueOptions": ["string", "number", "boolean"], + "editable": true + }, + { + "field": "description", + "headerName": "Description", + "editable": true, + "flex": 1 + } + ], + "default": [ + { + "property": "answer", + "type": "string", + "description": "answer to the user's question" + }, + { + "property": "source", + "type": "string", + "description": "sources used to answer the question, should be websites" + } + ], + "additionalParams": true, + "id": "structuredOutputParser_0-input-jsonStructure-datagrid" + } + ], + "inputAnchors": [], + "inputs": { + "autofixParser": true, + "jsonStructure": [ + { + "property": "answer", + "type": "string", + "description": "answer to the user's question" + }, + { + "property": "source", + "type": "string", + "description": "sources used to answer the question, should be websites" + } + ] + }, + "outputAnchors": [ + { + "id": "structuredOutputParser_0-output-structuredOutputParser-StructuredOutputParser|BaseLLMOutputParser|Runnable", + "name": "structuredOutputParser", + "label": "StructuredOutputParser", + "type": "StructuredOutputParser | BaseLLMOutputParser | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 170.3869571939727, + "y": 343.9298288967859 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_0-llmChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "chatPromptTemplate_0", + "sourceHandle": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "chatPromptTemplate_0-chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", + "data": { + "label": "" + } + }, + { + "source": "structuredOutputParser_0", + "sourceHandle": "structuredOutputParser_0-output-structuredOutputParser-StructuredOutputParser|BaseLLMOutputParser|Runnable", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-outputParser-BaseLLMOutputParser", + "type": "buttonedge", + "id": "structuredOutputParser_0-structuredOutputParser_0-output-structuredOutputParser-StructuredOutputParser|BaseLLMOutputParser|Runnable-llmChain_0-llmChain_0-input-outputParser-BaseLLMOutputParser", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/chatflows/Translator.json b/packages/server/marketplaces/chatflows/Translator.json index b552aceb863..7cbe5ac7a0a 100644 --- a/packages/server/marketplaces/chatflows/Translator.json +++ b/packages/server/marketplaces/chatflows/Translator.json @@ -1,87 +1,6 @@ { "description": "Language translation using LLM Chain with a Chat Prompt Template and Chat Model", "nodes": [ - { - "width": 300, - "height": 405, - "id": "llmChain_1", - "position": { - "x": 865.7775572410412, - "y": 543.9211372857111 - }, - "type": "customNode", - "data": { - "id": "llmChain_1", - "label": "LLM Chain", - "name": "llmChain", - "version": 1, - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "BaseLangChain"], - "category": "Chains", - "description": "Chain to run queries against LLMs", - "inputParams": [ - { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", - "optional": true, - "id": "llmChain_1-input-chainName-string" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_1-input-model-BaseLanguageModel" - }, - { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_1-input-prompt-BasePromptTemplate" - } - ], - "inputs": { - "model": "{{chatOpenAI_0.data.instance}}", - "prompt": "{{chatPromptTemplate_0.data.instance}}", - "chainName": "Language Translation" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|BaseLangChain", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | BaseLangChain" - }, - { - "id": "llmChain_1-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" - } - ], - "outputs": { - "output": "llmChain" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 865.7775572410412, - "y": 543.9211372857111 - }, - "dragging": false - }, { "width": 300, "height": 652, @@ -94,8 +13,8 @@ "data": { "id": "chatPromptTemplate_0", "label": "Chat Prompt Template", - "name": "chatPromptTemplate", "version": 1, + "name": "chatPromptTemplate", "type": "ChatPromptTemplate", "baseClasses": ["ChatPromptTemplate", "BaseChatPromptTemplate", "BasePromptTemplate"], "category": "Prompts", @@ -153,7 +72,7 @@ }, { "width": 300, - "height": 523, + "height": 574, "id": "chatOpenAI_0", "position": { "x": 436.97058562345904, @@ -163,8 +82,8 @@ "data": { "id": "chatOpenAI_0", "label": "ChatOpenAI", - "name": "chatOpenAI", "version": 2, + "name": "chatOpenAI", "type": "ChatOpenAI", "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], "category": "Chat Models", @@ -312,27 +231,116 @@ "y": 29.96180150605153 }, "dragging": false + }, + { + "width": 300, + "height": 456, + "id": "llmChain_0", + "position": { + "x": 836.089121144244, + "y": 510.07109938359963 + }, + "type": "customNode", + "data": { + "id": "llmChain_0", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_0-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_0-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_0-input-outputParser-BaseLLMOutputParser" + } + ], + "inputs": { + "model": "{{chatOpenAI_0.data.instance}}", + "prompt": "{{chatPromptTemplate_0.data.instance}}", + "outputParser": "", + "chainName": "Language Translation" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_0-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "selected": false, + "dragging": false, + "positionAbsolute": { + "x": 836.089121144244, + "y": 510.07109938359963 + } } ], "edges": [ { - "source": "chatPromptTemplate_0", - "sourceHandle": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate", - "target": "llmChain_1", - "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-model-BaseLanguageModel", "type": "buttonedge", - "id": "chatPromptTemplate_0-chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-llmChain_0-llmChain_0-input-model-BaseLanguageModel", "data": { "label": "" } }, { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "target": "llmChain_1", - "targetHandle": "llmChain_1-input-model-BaseLanguageModel", + "source": "chatPromptTemplate_0", + "sourceHandle": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-llmChain_1-llmChain_1-input-model-BaseLanguageModel", + "id": "chatPromptTemplate_0-chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", "data": { "label": "" }