From 44cadc1cc35041084c1a4d035a762fcbb8cb3e05 Mon Sep 17 00:00:00 2001 From: Henry Date: Thu, 16 Nov 2023 18:34:17 +0000 Subject: [PATCH 1/4] add vector upsert ability --- .../agents/MRKLAgentChat/MRKLAgentChat.ts | 4 +- .../nodes/agents/MRKLAgentLLM/MRKLAgentLLM.ts | 4 +- .../nodes/tools/ZapierNLA/ZapierNLA.ts | 2 + .../nodes/vectorstores/Chroma/Chroma.ts | 170 ++ .../vectorstores/Chroma/Chroma_Existing.ts | 2 + .../vectorstores/Chroma/Chroma_Upsert.ts | 2 + .../Elasticsearch/ElasticSearchBase.ts | 2 + .../Elasticsearch/Elasticsearch.ts | 232 +++ .../nodes/vectorstores/Faiss/Faiss.ts | 145 ++ .../Faiss_Existing.ts | 2 + .../{Faiss_Upsert => Faiss}/Faiss_Upsert.ts | 2 + .../{Faiss_Existing => Faiss}/faiss.svg | 0 .../nodes/vectorstores/Faiss_Upsert/faiss.svg | 10 - .../InMemory/InMemoryVectorStore.ts | 26 +- .../nodes/vectorstores/Milvus/Milvus.ts | 348 ++++ .../vectorstores/Milvus/Milvus_Existing.ts | 2 + .../vectorstores/Milvus/Milvus_Upsert.ts | 2 + .../vectorstores/OpenSearch/OpenSearch.ts | 139 ++ .../OpenSearch_Upsert.ts | 2 + .../OpenSearch_existing.ts | 2 + .../opensearch.png | Bin .../OpenSearch_Upsert/opensearch.png | Bin 5216 -> 0 bytes .../nodes/vectorstores/Pinecone/Pinecone.ts | 189 ++ .../Pinecone/Pinecone_Existing.ts | 2 + .../vectorstores/Pinecone/Pinecone_Upsert.ts | 6 +- .../nodes/vectorstores/Postgres/Postgres.ts | 268 +++ .../Postgres_Exisiting.ts | 2 + .../Postgres_Upsert.ts | 2 + .../postgres.svg | 0 .../vectorstores/Postgres_Upsert/postgres.svg | 1 - .../nodes/vectorstores/Qdrant/Qdrant.ts | 246 +++ .../Qdrant_Existing.ts | 2 + .../Qdrant_Upsert.ts | 2 + .../{Qdrant_Existing => Qdrant}/qdrant.png | Bin .../vectorstores/Qdrant_Upsert/qdrant.png | Bin 11663 -> 0 bytes .../nodes/vectorstores/Redis/Redis.ts | 326 ++++ .../vectorstores/Redis/RedisSearchBase.ts | 2 + .../vectorstores/Redis/Redis_Existing.ts | 4 +- .../nodes/vectorstores/Redis/Redis_Upsert.ts | 2 +- .../vectorstores/Singlestore/Singlestore.ts | 198 +++ .../Singlestore_Existing.ts | 2 + .../Singlestore_Upsert.ts | 2 + .../singlestore.svg | 0 .../Singlestore_Upsert/singlestore.svg | 20 - .../nodes/vectorstores/Supabase/Supabase.ts | 169 ++ .../Supabase_Exisiting.ts | 2 + .../Supabase_Upsert.ts | 2 + .../supabase.svg | 0 .../vectorstores/Supabase_Upsert/supabase.svg | 15 - .../nodes/vectorstores/Vectara/Vectara.ts | 240 +++ .../vectorstores/Vectara/Vectara_Existing.ts | 2 + .../vectorstores/Vectara/Vectara_Upload.ts | 2 + .../vectorstores/Vectara/Vectara_Upsert.ts | 2 + .../nodes/vectorstores/Weaviate/Weaviate.ts | 212 +++ .../Weaviate_Existing.ts | 2 + .../Weaviate_Upsert.ts | 2 + .../weaviate.png | Bin .../vectorstores/Weaviate_Upsert/weaviate.png | Bin 55728 -> 0 bytes .../components/nodes/vectorstores/Zep/Zep.ts | 281 +++ .../nodes/vectorstores/Zep/Zep_Existing.ts | 2 + .../nodes/vectorstores/Zep/Zep_Upsert.ts | 2 + packages/components/src/Interface.ts | 6 + .../chatflows/API Agent OpenAI.json | 22 +- .../marketplaces/chatflows/API Agent.json | 33 +- .../marketplaces/chatflows/Antonym.json | 11 +- .../marketplaces/chatflows/AutoGPT.json | 112 +- .../marketplaces/chatflows/BabyAGI.json | 429 ++--- .../marketplaces/chatflows/CSV Agent.json | 11 +- .../marketplaces/chatflows/ChatGPTPlugin.json | 11 +- .../chatflows/Conversational Agent.json | 11 +- .../Conversational Retrieval Agent.json | 639 +++---- .../Conversational Retrieval QA Chain.json | 642 +++---- .../chatflows/Flowise Docs QnA.json | 11 +- .../chatflows/Image Generation.json | 671 +++++++ .../marketplaces/chatflows/Local QnA.json | 564 +++--- .../chatflows/Long Term Memory.json | 673 +++---- .../chatflows/Metadata Filter Load.json | 501 ------ ...ilter Upsert.json => Metadata Filter.json} | 345 ++-- .../chatflows/Multi Prompt Chain.json | 11 +- .../chatflows/Multi Retrieval QA Chain.json | 821 +++++---- .../chatflows/Multiple VectorDB.json | 1540 +++++++++++------ .../marketplaces/chatflows/OpenAI Agent.json | 11 +- .../Prompt Chaining with VectorStore.json | 1078 ++++++------ .../{MRKLAgent.json => ReAct Agent.json} | 185 +- .../marketplaces/chatflows/SQL DB Chain.json | 11 +- .../chatflows/Simple Conversation Chain.json | 11 +- .../marketplaces/chatflows/Translator.json | 11 +- .../chatflows/Vectara LLM Chain Upload.json | 334 ++-- .../marketplaces/chatflows/WebBrowser.json | 22 +- .../marketplaces/chatflows/WebPage QnA.json | 632 +++---- .../marketplaces/chatflows/Zapier NLA.json | 290 ---- packages/server/src/Interface.ts | 1 + packages/server/src/index.ts | 43 +- packages/server/src/utils/index.ts | 41 +- packages/ui/src/api/vectorstore.js | 7 + .../src/assets/scss/_themes-vars.module.scss | 16 + .../ui/src/store/context/ReactFlowContext.js | 32 +- packages/ui/src/themes/palette.js | 6 + .../src/ui-component/dialog/NodeInfoDialog.js | 9 +- packages/ui/src/ui-component/table/Table.js | 15 +- packages/ui/src/utils/genericHelper.js | 154 ++ packages/ui/src/views/canvas/AddNodes.js | 35 +- packages/ui/src/views/canvas/index.js | 20 +- .../ui/src/views/chatflows/APICodeDialog.js | 79 +- .../ui/src/views/chatmessage/ChatMessage.js | 13 +- .../ui/src/views/chatmessage/ChatPopUp.js | 6 +- .../views/vectorstore/VectorStoreDialog.js | 556 ++++++ .../src/views/vectorstore/VectorStorePopUp.js | 114 ++ 108 files changed, 9699 insertions(+), 4406 deletions(-) create mode 100644 packages/components/nodes/vectorstores/Chroma/Chroma.ts create mode 100644 packages/components/nodes/vectorstores/Elasticsearch/Elasticsearch.ts create mode 100644 packages/components/nodes/vectorstores/Faiss/Faiss.ts rename packages/components/nodes/vectorstores/{Faiss_Existing => Faiss}/Faiss_Existing.ts (98%) rename packages/components/nodes/vectorstores/{Faiss_Upsert => Faiss}/Faiss_Upsert.ts (98%) rename packages/components/nodes/vectorstores/{Faiss_Existing => Faiss}/faiss.svg (100%) delete mode 100644 packages/components/nodes/vectorstores/Faiss_Upsert/faiss.svg create mode 100644 packages/components/nodes/vectorstores/Milvus/Milvus.ts create mode 100644 packages/components/nodes/vectorstores/OpenSearch/OpenSearch.ts rename packages/components/nodes/vectorstores/{OpenSearch_Upsert => OpenSearch}/OpenSearch_Upsert.ts (98%) rename packages/components/nodes/vectorstores/{OpenSearch_Existing => OpenSearch}/OpenSearch_existing.ts (98%) rename packages/components/nodes/vectorstores/{OpenSearch_Existing => OpenSearch}/opensearch.png (100%) delete mode 100644 packages/components/nodes/vectorstores/OpenSearch_Upsert/opensearch.png create mode 100644 packages/components/nodes/vectorstores/Pinecone/Pinecone.ts create mode 100644 packages/components/nodes/vectorstores/Postgres/Postgres.ts rename packages/components/nodes/vectorstores/{Postgres_Existing => Postgres}/Postgres_Exisiting.ts (99%) rename packages/components/nodes/vectorstores/{Postgres_Upsert => Postgres}/Postgres_Upsert.ts (99%) rename packages/components/nodes/vectorstores/{Postgres_Existing => Postgres}/postgres.svg (100%) delete mode 100644 packages/components/nodes/vectorstores/Postgres_Upsert/postgres.svg create mode 100644 packages/components/nodes/vectorstores/Qdrant/Qdrant.ts rename packages/components/nodes/vectorstores/{Qdrant_Existing => Qdrant}/Qdrant_Existing.ts (99%) rename packages/components/nodes/vectorstores/{Qdrant_Upsert => Qdrant}/Qdrant_Upsert.ts (99%) rename packages/components/nodes/vectorstores/{Qdrant_Existing => Qdrant}/qdrant.png (100%) delete mode 100644 packages/components/nodes/vectorstores/Qdrant_Upsert/qdrant.png create mode 100644 packages/components/nodes/vectorstores/Redis/Redis.ts create mode 100644 packages/components/nodes/vectorstores/Singlestore/Singlestore.ts rename packages/components/nodes/vectorstores/{Singlestore_Existing => Singlestore}/Singlestore_Existing.ts (99%) rename packages/components/nodes/vectorstores/{Singlestore_Upsert => Singlestore}/Singlestore_Upsert.ts (99%) rename packages/components/nodes/vectorstores/{Singlestore_Existing => Singlestore}/singlestore.svg (100%) delete mode 100644 packages/components/nodes/vectorstores/Singlestore_Upsert/singlestore.svg create mode 100644 packages/components/nodes/vectorstores/Supabase/Supabase.ts rename packages/components/nodes/vectorstores/{Supabase_Existing => Supabase}/Supabase_Exisiting.ts (98%) rename packages/components/nodes/vectorstores/{Supabase_Upsert => Supabase}/Supabase_Upsert.ts (98%) rename packages/components/nodes/vectorstores/{Supabase_Existing => Supabase}/supabase.svg (100%) delete mode 100644 packages/components/nodes/vectorstores/Supabase_Upsert/supabase.svg create mode 100644 packages/components/nodes/vectorstores/Vectara/Vectara.ts create mode 100644 packages/components/nodes/vectorstores/Weaviate/Weaviate.ts rename packages/components/nodes/vectorstores/{Weaviate_Existing => Weaviate}/Weaviate_Existing.ts (99%) rename packages/components/nodes/vectorstores/{Weaviate_Upsert => Weaviate}/Weaviate_Upsert.ts (99%) rename packages/components/nodes/vectorstores/{Weaviate_Existing => Weaviate}/weaviate.png (100%) delete mode 100644 packages/components/nodes/vectorstores/Weaviate_Upsert/weaviate.png create mode 100644 packages/components/nodes/vectorstores/Zep/Zep.ts create mode 100644 packages/server/marketplaces/chatflows/Image Generation.json delete mode 100644 packages/server/marketplaces/chatflows/Metadata Filter Load.json rename packages/server/marketplaces/chatflows/{Metadata Filter Upsert.json => Metadata Filter.json} (86%) rename packages/server/marketplaces/chatflows/{MRKLAgent.json => ReAct Agent.json} (78%) delete mode 100644 packages/server/marketplaces/chatflows/Zapier NLA.json create mode 100644 packages/ui/src/api/vectorstore.js create mode 100644 packages/ui/src/views/vectorstore/VectorStoreDialog.js create mode 100644 packages/ui/src/views/vectorstore/VectorStorePopUp.js diff --git a/packages/components/nodes/agents/MRKLAgentChat/MRKLAgentChat.ts b/packages/components/nodes/agents/MRKLAgentChat/MRKLAgentChat.ts index ed169e62b79..19835e36d54 100644 --- a/packages/components/nodes/agents/MRKLAgentChat/MRKLAgentChat.ts +++ b/packages/components/nodes/agents/MRKLAgentChat/MRKLAgentChat.ts @@ -18,13 +18,13 @@ class MRKLAgentChat_Agents implements INode { inputs: INodeParams[] constructor() { - this.label = 'MRKL Agent for Chat Models' + this.label = 'ReAct Agent for Chat Models' this.name = 'mrklAgentChat' this.version = 1.0 this.type = 'AgentExecutor' this.category = 'Agents' this.icon = 'agent.svg' - this.description = 'Agent that uses the ReAct Framework to decide what action to take, optimized to be used with Chat Models' + this.description = 'Agent that uses the ReAct logic to decide what action to take, optimized to be used with Chat Models' this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)] this.inputs = [ { diff --git a/packages/components/nodes/agents/MRKLAgentLLM/MRKLAgentLLM.ts b/packages/components/nodes/agents/MRKLAgentLLM/MRKLAgentLLM.ts index 74929af84c5..43a4dee2cac 100644 --- a/packages/components/nodes/agents/MRKLAgentLLM/MRKLAgentLLM.ts +++ b/packages/components/nodes/agents/MRKLAgentLLM/MRKLAgentLLM.ts @@ -18,13 +18,13 @@ class MRKLAgentLLM_Agents implements INode { inputs: INodeParams[] constructor() { - this.label = 'MRKL Agent for LLMs' + this.label = 'ReAct Agent for LLMs' this.name = 'mrklAgentLLM' this.version = 1.0 this.type = 'AgentExecutor' this.category = 'Agents' this.icon = 'agent.svg' - this.description = 'Agent that uses the ReAct Framework to decide what action to take, optimized to be used with LLMs' + this.description = 'Agent that uses the ReAct logic to decide what action to take, optimized to be used with LLMs' this.baseClasses = [this.type, ...getBaseClasses(AgentExecutor)] this.inputs = [ { diff --git a/packages/components/nodes/tools/ZapierNLA/ZapierNLA.ts b/packages/components/nodes/tools/ZapierNLA/ZapierNLA.ts index 49543136a27..31ac989b5ca 100644 --- a/packages/components/nodes/tools/ZapierNLA/ZapierNLA.ts +++ b/packages/components/nodes/tools/ZapierNLA/ZapierNLA.ts @@ -11,6 +11,7 @@ class ZapierNLA_Tools implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -23,6 +24,7 @@ class ZapierNLA_Tools implements INode { this.icon = 'zapier.svg' this.category = 'Tools' this.description = "Access to apps and actions on Zapier's platform through a natural language API interface" + this.badge = 'DEPRECATING' this.inputs = [] this.credential = { label: 'Connect Credential', diff --git a/packages/components/nodes/vectorstores/Chroma/Chroma.ts b/packages/components/nodes/vectorstores/Chroma/Chroma.ts new file mode 100644 index 00000000000..eef2db79df7 --- /dev/null +++ b/packages/components/nodes/vectorstores/Chroma/Chroma.ts @@ -0,0 +1,170 @@ +import { flatten } from 'lodash' +import { Chroma } from 'langchain/vectorstores/chroma' +import { Embeddings } from 'langchain/embeddings/base' +import { Document } from 'langchain/document' +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { ChromaExtended } from './core' + +class Chroma_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Chroma' + this.name = 'chroma' + this.version = 1.0 + this.type = 'Chroma' + this.icon = 'chroma.svg' + this.category = 'Vector Stores' + this.description = 'Upsert or Load data to Chroma Vector Database' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'NEW' + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + description: 'Only needed if you have chroma on cloud services with X-Api-key', + optional: true, + credentialNames: ['chromaApi'] + } + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true, + optional: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Collection Name', + name: 'collectionName', + type: 'string' + }, + { + label: 'Chroma URL', + name: 'chromaURL', + type: 'string', + optional: true + }, + { + label: 'Chroma Metadata Filter', + name: 'chromaMetadataFilter', + type: 'json', + optional: true, + additionalParams: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Chroma Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Chroma Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(Chroma)] + } + ] + } + + //@ts-ignore + vectorStoreMethods = { + async upsert(nodeData: INodeData, options: ICommonObject): Promise { + const collectionName = nodeData.inputs?.collectionName as string + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + const chromaURL = nodeData.inputs?.chromaURL as string + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const chromaApiKey = getCredentialParam('chromaApiKey', credentialData, nodeData) + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + finalDocs.push(new Document(flattenDocs[i])) + } + } + + const obj: { + collectionName: string + url?: string + chromaApiKey?: string + } = { collectionName } + if (chromaURL) obj.url = chromaURL + if (chromaApiKey) obj.chromaApiKey = chromaApiKey + + try { + await ChromaExtended.fromDocuments(finalDocs, embeddings, obj) + } catch (e) { + throw new Error(e) + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const collectionName = nodeData.inputs?.collectionName as string + const embeddings = nodeData.inputs?.embeddings as Embeddings + const chromaURL = nodeData.inputs?.chromaURL as string + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseFloat(topK) : 4 + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const chromaApiKey = getCredentialParam('chromaApiKey', credentialData, nodeData) + + const chromaMetadataFilter = nodeData.inputs?.chromaMetadataFilter + + const obj: { + collectionName: string + url?: string + chromaApiKey?: string + filter?: object | undefined + } = { collectionName } + if (chromaURL) obj.url = chromaURL + if (chromaApiKey) obj.chromaApiKey = chromaApiKey + if (chromaMetadataFilter) { + const metadatafilter = typeof chromaMetadataFilter === 'object' ? chromaMetadataFilter : JSON.parse(chromaMetadataFilter) + obj.filter = metadatafilter + } + + const vectorStore = await ChromaExtended.fromExistingCollection(embeddings, obj) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: Chroma_VectorStores } diff --git a/packages/components/nodes/vectorstores/Chroma/Chroma_Existing.ts b/packages/components/nodes/vectorstores/Chroma/Chroma_Existing.ts index ff929ef1a3b..62d3f8a232c 100644 --- a/packages/components/nodes/vectorstores/Chroma/Chroma_Existing.ts +++ b/packages/components/nodes/vectorstores/Chroma/Chroma_Existing.ts @@ -12,6 +12,7 @@ class Chroma_Existing_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -26,6 +27,7 @@ class Chroma_Existing_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Load existing index from Chroma (i.e: Document has been upserted)' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Chroma/Chroma_Upsert.ts b/packages/components/nodes/vectorstores/Chroma/Chroma_Upsert.ts index 951338bae87..e85644785cc 100644 --- a/packages/components/nodes/vectorstores/Chroma/Chroma_Upsert.ts +++ b/packages/components/nodes/vectorstores/Chroma/Chroma_Upsert.ts @@ -14,6 +14,7 @@ class ChromaUpsert_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -28,6 +29,7 @@ class ChromaUpsert_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Upsert documents to Chroma' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Elasticsearch/ElasticSearchBase.ts b/packages/components/nodes/vectorstores/Elasticsearch/ElasticSearchBase.ts index 59294b7ea1c..d5b8fbcafb0 100644 --- a/packages/components/nodes/vectorstores/Elasticsearch/ElasticSearchBase.ts +++ b/packages/components/nodes/vectorstores/Elasticsearch/ElasticSearchBase.ts @@ -21,6 +21,7 @@ export abstract class ElasticSearchBase { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -30,6 +31,7 @@ export abstract class ElasticSearchBase { this.type = 'Elasticsearch' this.icon = 'elasticsearch.png' this.category = 'Vector Stores' + this.badge = 'DEPRECATING' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] this.credential = { label: 'Connect Credential', diff --git a/packages/components/nodes/vectorstores/Elasticsearch/Elasticsearch.ts b/packages/components/nodes/vectorstores/Elasticsearch/Elasticsearch.ts new file mode 100644 index 00000000000..e7915b3e62e --- /dev/null +++ b/packages/components/nodes/vectorstores/Elasticsearch/Elasticsearch.ts @@ -0,0 +1,232 @@ +import { flatten } from 'lodash' +import { Client, ClientOptions } from '@elastic/elasticsearch' +import { Document } from 'langchain/document' +import { Embeddings } from 'langchain/embeddings/base' +import { ElasticClientArgs, ElasticVectorSearch } from 'langchain/vectorstores/elasticsearch' +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +class Elasticsearch_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Elasticsearch' + this.name = 'elasticsearch' + this.version = 1.0 + this.description = 'Upsert or Load data to Elasticsearch Vector Database' + this.type = 'Elasticsearch' + this.icon = 'elasticsearch.png' + this.category = 'Vector Stores' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'NEW' + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['elasticsearchApi', 'elasticSearchUserPassword'] + } + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true, + optional: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Index Name', + name: 'indexName', + placeholder: '', + type: 'string' + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + }, + { + label: 'Similarity', + name: 'similarity', + description: 'Similarity measure used in Elasticsearch.', + type: 'options', + default: 'l2_norm', + options: [ + { + label: 'l2_norm', + name: 'l2_norm' + }, + { + label: 'dot_product', + name: 'dot_product' + }, + { + label: 'cosine', + name: 'cosine' + } + ], + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Elasticsearch Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Elasticsearch Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(ElasticVectorSearch)] + } + ] + } + + //@ts-ignore + vectorStoreMethods = { + async upsert(nodeData: INodeData, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const endPoint = getCredentialParam('endpoint', credentialData, nodeData) + const cloudId = getCredentialParam('cloudId', credentialData, nodeData) + const indexName = nodeData.inputs?.indexName as string + const embeddings = nodeData.inputs?.embeddings as Embeddings + const similarityMeasure = nodeData.inputs?.similarityMeasure as string + + const docs = nodeData.inputs?.document as Document[] + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + finalDocs.push(new Document(flattenDocs[i])) + } + } + + // The following code is a workaround for a bug (Langchain Issue #1589) in the underlying library. + // Store does not support object in metadata and fail silently + finalDocs.forEach((d) => { + delete d.metadata.pdf + delete d.metadata.loc + }) + // end of workaround + + const elasticSearchClientArgs = prepareClientArgs(endPoint, cloudId, credentialData, nodeData, similarityMeasure, indexName) + const vectorStore = new ElasticVectorSearch(embeddings, elasticSearchClientArgs) + + try { + await vectorStore.addDocuments(finalDocs) + } catch (e) { + throw new Error(e) + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const endPoint = getCredentialParam('endpoint', credentialData, nodeData) + const cloudId = getCredentialParam('cloudId', credentialData, nodeData) + const indexName = nodeData.inputs?.indexName as string + const embeddings = nodeData.inputs?.embeddings as Embeddings + const topK = nodeData.inputs?.topK as string + const similarityMeasure = nodeData.inputs?.similarityMeasure as string + const k = topK ? parseFloat(topK) : 4 + const output = nodeData.outputs?.output as string + + const elasticSearchClientArgs = prepareClientArgs(endPoint, cloudId, credentialData, nodeData, similarityMeasure, indexName) + const vectorStore = await ElasticVectorSearch.fromExistingIndex(embeddings, elasticSearchClientArgs) + + if (output === 'retriever') { + return vectorStore.asRetriever(k) + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +const prepareConnectionOptions = ( + endPoint: string | undefined, + cloudId: string | undefined, + credentialData: ICommonObject, + nodeData: INodeData +) => { + let elasticSearchClientOptions: ClientOptions = {} + if (endPoint) { + let apiKey = getCredentialParam('apiKey', credentialData, nodeData) + elasticSearchClientOptions = { + node: endPoint, + auth: { + apiKey: apiKey + } + } + } else if (cloudId) { + let username = getCredentialParam('username', credentialData, nodeData) + let password = getCredentialParam('password', credentialData, nodeData) + elasticSearchClientOptions = { + cloud: { + id: cloudId + }, + auth: { + username: username, + password: password + } + } + } + return elasticSearchClientOptions +} + +const prepareClientArgs = ( + endPoint: string | undefined, + cloudId: string | undefined, + credentialData: ICommonObject, + nodeData: INodeData, + similarityMeasure: string, + indexName: string +) => { + let elasticSearchClientOptions = prepareConnectionOptions(endPoint, cloudId, credentialData, nodeData) + let vectorSearchOptions = {} + switch (similarityMeasure) { + case 'dot_product': + vectorSearchOptions = { + similarity: 'dot_product' + } + break + case 'cosine': + vectorSearchOptions = { + similarity: 'cosine' + } + break + default: + vectorSearchOptions = { + similarity: 'l2_norm' + } + } + const elasticSearchClientArgs: ElasticClientArgs = { + client: new Client(elasticSearchClientOptions), + indexName: indexName, + vectorSearchOptions: vectorSearchOptions + } + return elasticSearchClientArgs +} + +module.exports = { nodeClass: Elasticsearch_VectorStores } diff --git a/packages/components/nodes/vectorstores/Faiss/Faiss.ts b/packages/components/nodes/vectorstores/Faiss/Faiss.ts new file mode 100644 index 00000000000..e2c1512e951 --- /dev/null +++ b/packages/components/nodes/vectorstores/Faiss/Faiss.ts @@ -0,0 +1,145 @@ +import { flatten } from 'lodash' +import { Document } from 'langchain/document' +import { FaissStore } from 'langchain/vectorstores/faiss' +import { Embeddings } from 'langchain/embeddings/base' +import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' + +class Faiss_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Faiss' + this.name = 'faiss' + this.version = 1.0 + this.type = 'Faiss' + this.icon = 'faiss.svg' + this.category = 'Vector Stores' + this.description = 'Upsert or Load data to Faiss Vector Store' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'NEW' + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true, + optional: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Base Path to load', + name: 'basePath', + description: 'Path to load faiss.index file', + placeholder: `C:\\Users\\User\\Desktop`, + type: 'string' + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Faiss Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Faiss Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(FaissStore)] + } + ] + } + + //@ts-ignore + vectorStoreMethods = { + async upsert(nodeData: INodeData): Promise { + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + const basePath = nodeData.inputs?.basePath as string + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + finalDocs.push(new Document(flattenDocs[i])) + } + } + + try { + const vectorStore = await FaissStore.fromDocuments(finalDocs, embeddings) + await vectorStore.save(basePath) + + // Avoid illegal invocation error + vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number) => { + return await similaritySearchVectorWithScore(query, k, vectorStore) + } + } catch (e) { + throw new Error(e) + } + } + } + + async init(nodeData: INodeData): Promise { + const embeddings = nodeData.inputs?.embeddings as Embeddings + const basePath = nodeData.inputs?.basePath as string + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseFloat(topK) : 4 + + const vectorStore = await FaissStore.load(basePath, embeddings) + + // Avoid illegal invocation error + vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number) => { + return await similaritySearchVectorWithScore(query, k, vectorStore) + } + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +const similaritySearchVectorWithScore = async (query: number[], k: number, vectorStore: FaissStore) => { + const index = vectorStore.index + + if (k > index.ntotal()) { + const total = index.ntotal() + console.warn(`k (${k}) is greater than the number of elements in the index (${total}), setting k to ${total}`) + k = total + } + + const result = index.search(query, k) + return result.labels.map((id, index) => { + const uuid = vectorStore._mapping[id] + return [vectorStore.docstore.search(uuid), result.distances[index]] as [Document, number] + }) +} + +module.exports = { nodeClass: Faiss_VectorStores } diff --git a/packages/components/nodes/vectorstores/Faiss_Existing/Faiss_Existing.ts b/packages/components/nodes/vectorstores/Faiss/Faiss_Existing.ts similarity index 98% rename from packages/components/nodes/vectorstores/Faiss_Existing/Faiss_Existing.ts rename to packages/components/nodes/vectorstores/Faiss/Faiss_Existing.ts index 15d476d8c9b..6f4e54d7f54 100644 --- a/packages/components/nodes/vectorstores/Faiss_Existing/Faiss_Existing.ts +++ b/packages/components/nodes/vectorstores/Faiss/Faiss_Existing.ts @@ -12,6 +12,7 @@ class Faiss_Existing_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] outputs: INodeOutputsValue[] @@ -25,6 +26,7 @@ class Faiss_Existing_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Load existing index from Faiss (i.e: Document has been upserted)' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.inputs = [ { label: 'Embeddings', diff --git a/packages/components/nodes/vectorstores/Faiss_Upsert/Faiss_Upsert.ts b/packages/components/nodes/vectorstores/Faiss/Faiss_Upsert.ts similarity index 98% rename from packages/components/nodes/vectorstores/Faiss_Upsert/Faiss_Upsert.ts rename to packages/components/nodes/vectorstores/Faiss/Faiss_Upsert.ts index a84b9da4709..9b658a37baa 100644 --- a/packages/components/nodes/vectorstores/Faiss_Upsert/Faiss_Upsert.ts +++ b/packages/components/nodes/vectorstores/Faiss/Faiss_Upsert.ts @@ -13,6 +13,7 @@ class FaissUpsert_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] outputs: INodeOutputsValue[] @@ -26,6 +27,7 @@ class FaissUpsert_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Upsert documents to Faiss' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.inputs = [ { label: 'Document', diff --git a/packages/components/nodes/vectorstores/Faiss_Existing/faiss.svg b/packages/components/nodes/vectorstores/Faiss/faiss.svg similarity index 100% rename from packages/components/nodes/vectorstores/Faiss_Existing/faiss.svg rename to packages/components/nodes/vectorstores/Faiss/faiss.svg diff --git a/packages/components/nodes/vectorstores/Faiss_Upsert/faiss.svg b/packages/components/nodes/vectorstores/Faiss_Upsert/faiss.svg deleted file mode 100644 index 5fbe98322bd..00000000000 --- a/packages/components/nodes/vectorstores/Faiss_Upsert/faiss.svg +++ /dev/null @@ -1,10 +0,0 @@ - - - - - - - - - - \ No newline at end of file diff --git a/packages/components/nodes/vectorstores/InMemory/InMemoryVectorStore.ts b/packages/components/nodes/vectorstores/InMemory/InMemoryVectorStore.ts index a827e3ee376..51394613e42 100644 --- a/packages/components/nodes/vectorstores/InMemory/InMemoryVectorStore.ts +++ b/packages/components/nodes/vectorstores/InMemory/InMemoryVectorStore.ts @@ -1,9 +1,9 @@ -import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { flatten } from 'lodash' import { MemoryVectorStore } from 'langchain/vectorstores/memory' import { Embeddings } from 'langchain/embeddings/base' import { Document } from 'langchain/document' +import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' import { getBaseClasses } from '../../../src/utils' -import { flatten } from 'lodash' class InMemoryVectorStore_VectorStores implements INode { label: string @@ -61,6 +61,28 @@ class InMemoryVectorStore_VectorStores implements INode { ] } + //@ts-ignore + vectorStoreMethods = { + async upsert(nodeData: INodeData): Promise { + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + finalDocs.push(new Document(flattenDocs[i])) + } + } + + try { + await MemoryVectorStore.fromDocuments(finalDocs, embeddings) + } catch (e) { + throw new Error(e) + } + } + } + async init(nodeData: INodeData): Promise { const docs = nodeData.inputs?.document as Document[] const embeddings = nodeData.inputs?.embeddings as Embeddings diff --git a/packages/components/nodes/vectorstores/Milvus/Milvus.ts b/packages/components/nodes/vectorstores/Milvus/Milvus.ts new file mode 100644 index 00000000000..b937be1e7e5 --- /dev/null +++ b/packages/components/nodes/vectorstores/Milvus/Milvus.ts @@ -0,0 +1,348 @@ +import { flatten } from 'lodash' +import { DataType, ErrorCode, MetricType, IndexType } from '@zilliz/milvus2-sdk-node' +import { Document } from 'langchain/document' +import { MilvusLibArgs, Milvus } from 'langchain/vectorstores/milvus' +import { Embeddings } from 'langchain/embeddings/base' +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +interface InsertRow { + [x: string]: string | number[] +} + +class Milvus_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Milvus' + this.name = 'milvus' + this.version = 1.0 + this.type = 'Milvus' + this.icon = 'milvus.svg' + this.category = 'Vector Stores' + this.description = 'Upsert or Load data to Milvus Vector Database' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'NEW' + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + optional: true, + credentialNames: ['milvusAuth'] + } + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true, + optional: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Milvus Server URL', + name: 'milvusServerUrl', + type: 'string', + placeholder: 'http://localhost:19530' + }, + { + label: 'Milvus Collection Name', + name: 'milvusCollection', + type: 'string' + }, + { + label: 'Milvus Filter', + name: 'milvusFilter', + type: 'string', + optional: true, + description: + 'Filter data with a simple string query. Refer Milvus docs for more details.', + placeholder: 'doc=="a"', + additionalParams: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Milvus Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Milvus Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(Milvus)] + } + ] + } + + //@ts-ignore + vectorStoreMethods = { + async upsert(nodeData: INodeData, options: ICommonObject): Promise { + // server setup + const address = nodeData.inputs?.milvusServerUrl as string + const collectionName = nodeData.inputs?.milvusCollection as string + + // embeddings + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + + // credential + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const milvusUser = getCredentialParam('milvusUser', credentialData, nodeData) + const milvusPassword = getCredentialParam('milvusPassword', credentialData, nodeData) + + // init MilvusLibArgs + const milVusArgs: MilvusLibArgs = { + url: address, + collectionName: collectionName + } + + if (milvusUser) milVusArgs.username = milvusUser + if (milvusPassword) milVusArgs.password = milvusPassword + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + finalDocs.push(new Document(flattenDocs[i])) + } + } + + try { + const vectorStore = await MilvusUpsert.fromDocuments(finalDocs, embeddings, milVusArgs) + + // Avoid Illegal Invocation + vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: string) => { + return await similaritySearchVectorWithScore(query, k, vectorStore, undefined, filter) + } + } catch (e) { + throw new Error(e) + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + // server setup + const address = nodeData.inputs?.milvusServerUrl as string + const collectionName = nodeData.inputs?.milvusCollection as string + const milvusFilter = nodeData.inputs?.milvusFilter as string + + // embeddings + const embeddings = nodeData.inputs?.embeddings as Embeddings + const topK = nodeData.inputs?.topK as string + + // output + const output = nodeData.outputs?.output as string + + // format data + const k = topK ? parseInt(topK, 10) : 4 + + // credential + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const milvusUser = getCredentialParam('milvusUser', credentialData, nodeData) + const milvusPassword = getCredentialParam('milvusPassword', credentialData, nodeData) + + // init MilvusLibArgs + const milVusArgs: MilvusLibArgs = { + url: address, + collectionName: collectionName + } + + if (milvusUser) milVusArgs.username = milvusUser + if (milvusPassword) milVusArgs.password = milvusPassword + + const vectorStore = await Milvus.fromExistingCollection(embeddings, milVusArgs) + + // Avoid Illegal Invocation + vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: string) => { + return await similaritySearchVectorWithScore(query, k, vectorStore, milvusFilter, filter) + } + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +const checkJsonString = (value: string): { isJson: boolean; obj: any } => { + try { + const result = JSON.parse(value) + return { isJson: true, obj: result } + } catch (e) { + return { isJson: false, obj: null } + } +} + +const similaritySearchVectorWithScore = async (query: number[], k: number, vectorStore: Milvus, milvusFilter?: string, filter?: string) => { + const hasColResp = await vectorStore.client.hasCollection({ + collection_name: vectorStore.collectionName + }) + if (hasColResp.status.error_code !== ErrorCode.SUCCESS) { + throw new Error(`Error checking collection: ${hasColResp}`) + } + if (hasColResp.value === false) { + throw new Error(`Collection not found: ${vectorStore.collectionName}, please create collection before search.`) + } + + const filterStr = milvusFilter ?? filter ?? '' + + await vectorStore.grabCollectionFields() + + const loadResp = await vectorStore.client.loadCollectionSync({ + collection_name: vectorStore.collectionName + }) + + if (loadResp.error_code !== ErrorCode.SUCCESS) { + throw new Error(`Error loading collection: ${loadResp}`) + } + + const outputFields = vectorStore.fields.filter((field) => field !== vectorStore.vectorField) + + const searchResp = await vectorStore.client.search({ + collection_name: vectorStore.collectionName, + search_params: { + anns_field: vectorStore.vectorField, + topk: k.toString(), + metric_type: vectorStore.indexCreateParams.metric_type, + params: vectorStore.indexSearchParams + }, + output_fields: outputFields, + vector_type: DataType.FloatVector, + vectors: [query], + filter: filterStr + }) + if (searchResp.status.error_code !== ErrorCode.SUCCESS) { + throw new Error(`Error searching data: ${JSON.stringify(searchResp)}`) + } + const results: [Document, number][] = [] + searchResp.results.forEach((result) => { + const fields = { + pageContent: '', + metadata: {} as Record + } + Object.keys(result).forEach((key) => { + if (key === vectorStore.textField) { + fields.pageContent = result[key] + } else if (vectorStore.fields.includes(key) || key === vectorStore.primaryField) { + if (typeof result[key] === 'string') { + const { isJson, obj } = checkJsonString(result[key]) + fields.metadata[key] = isJson ? obj : result[key] + } else { + fields.metadata[key] = result[key] + } + } + }) + results.push([new Document(fields), result.score]) + }) + return results +} + +class MilvusUpsert extends Milvus { + async addVectors(vectors: number[][], documents: Document[]): Promise { + if (vectors.length === 0) { + return + } + await this.ensureCollection(vectors, documents) + + const insertDatas: InsertRow[] = [] + + for (let index = 0; index < vectors.length; index++) { + const vec = vectors[index] + const doc = documents[index] + const data: InsertRow = { + [this.textField]: doc.pageContent, + [this.vectorField]: vec + } + this.fields.forEach((field) => { + switch (field) { + case this.primaryField: + if (!this.autoId) { + if (doc.metadata[this.primaryField] === undefined) { + throw new Error( + `The Collection's primaryField is configured with autoId=false, thus its value must be provided through metadata.` + ) + } + data[field] = doc.metadata[this.primaryField] + } + break + case this.textField: + data[field] = doc.pageContent + break + case this.vectorField: + data[field] = vec + break + default: // metadata fields + if (doc.metadata[field] === undefined) { + throw new Error(`The field "${field}" is not provided in documents[${index}].metadata.`) + } else if (typeof doc.metadata[field] === 'object') { + data[field] = JSON.stringify(doc.metadata[field]) + } else { + data[field] = doc.metadata[field] + } + break + } + }) + + insertDatas.push(data) + } + + const descIndexResp = await this.client.describeIndex({ + collection_name: this.collectionName + }) + + if (descIndexResp.status.error_code === ErrorCode.IndexNotExist) { + const resp = await this.client.createIndex({ + collection_name: this.collectionName, + field_name: this.vectorField, + index_name: `myindex_${Date.now().toString()}`, + index_type: IndexType.AUTOINDEX, + metric_type: MetricType.L2 + }) + if (resp.error_code !== ErrorCode.SUCCESS) { + throw new Error(`Error creating index`) + } + } + + const insertResp = await this.client.insert({ + collection_name: this.collectionName, + fields_data: insertDatas + }) + + if (insertResp.status.error_code !== ErrorCode.SUCCESS) { + throw new Error(`Error inserting data: ${JSON.stringify(insertResp)}`) + } + + await this.client.flushSync({ collection_names: [this.collectionName] }) + } +} + +module.exports = { nodeClass: Milvus_VectorStores } diff --git a/packages/components/nodes/vectorstores/Milvus/Milvus_Existing.ts b/packages/components/nodes/vectorstores/Milvus/Milvus_Existing.ts index 3e9b125f79f..bce5b9cb707 100644 --- a/packages/components/nodes/vectorstores/Milvus/Milvus_Existing.ts +++ b/packages/components/nodes/vectorstores/Milvus/Milvus_Existing.ts @@ -13,6 +13,7 @@ class Milvus_Existing_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -27,6 +28,7 @@ class Milvus_Existing_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Load existing collection from Milvus (i.e: Document has been upserted)' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Milvus/Milvus_Upsert.ts b/packages/components/nodes/vectorstores/Milvus/Milvus_Upsert.ts index 371fd16fc26..a0cae742b32 100644 --- a/packages/components/nodes/vectorstores/Milvus/Milvus_Upsert.ts +++ b/packages/components/nodes/vectorstores/Milvus/Milvus_Upsert.ts @@ -18,6 +18,7 @@ class Milvus_Upsert_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -32,6 +33,7 @@ class Milvus_Upsert_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Upsert documents to Milvus' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/OpenSearch/OpenSearch.ts b/packages/components/nodes/vectorstores/OpenSearch/OpenSearch.ts new file mode 100644 index 00000000000..66f04143b19 --- /dev/null +++ b/packages/components/nodes/vectorstores/OpenSearch/OpenSearch.ts @@ -0,0 +1,139 @@ +import { flatten } from 'lodash' +import { Client } from '@opensearch-project/opensearch' +import { Document } from 'langchain/document' +import { OpenSearchVectorStore } from 'langchain/vectorstores/opensearch' +import { Embeddings } from 'langchain/embeddings/base' +import { INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses } from '../../../src/utils' + +class OpenSearch_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'OpenSearch' + this.name = 'openSearch' + this.version = 1.0 + this.type = 'OpenSearch' + this.icon = 'opensearch.png' + this.category = 'Vector Stores' + this.description = 'Upsert or Load data to OpenSearch Vector Database' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'NEW' + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true, + optional: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'OpenSearch URL', + name: 'opensearchURL', + type: 'string', + placeholder: 'http://127.0.0.1:9200' + }, + { + label: 'Index Name', + name: 'indexName', + type: 'string' + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'OpenSearch Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'OpenSearch Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(OpenSearchVectorStore)] + } + ] + } + + //@ts-ignore + vectorStoreMethods = { + async upsert(nodeData: INodeData): Promise { + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + const opensearchURL = nodeData.inputs?.opensearchURL as string + const indexName = nodeData.inputs?.indexName as string + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + finalDocs.push(new Document(flattenDocs[i])) + } + } + + const client = new Client({ + nodes: [opensearchURL] + }) + + try { + await OpenSearchVectorStore.fromDocuments(finalDocs, embeddings, { + client, + indexName: indexName + }) + } catch (e) { + throw new Error(e) + } + } + } + + async init(nodeData: INodeData): Promise { + const embeddings = nodeData.inputs?.embeddings as Embeddings + const opensearchURL = nodeData.inputs?.opensearchURL as string + const indexName = nodeData.inputs?.indexName as string + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseFloat(topK) : 4 + + const client = new Client({ + nodes: [opensearchURL] + }) + + const vectorStore = new OpenSearchVectorStore(embeddings, { + client, + indexName + }) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: OpenSearch_VectorStores } diff --git a/packages/components/nodes/vectorstores/OpenSearch_Upsert/OpenSearch_Upsert.ts b/packages/components/nodes/vectorstores/OpenSearch/OpenSearch_Upsert.ts similarity index 98% rename from packages/components/nodes/vectorstores/OpenSearch_Upsert/OpenSearch_Upsert.ts rename to packages/components/nodes/vectorstores/OpenSearch/OpenSearch_Upsert.ts index da1235819c6..2eb47316020 100644 --- a/packages/components/nodes/vectorstores/OpenSearch_Upsert/OpenSearch_Upsert.ts +++ b/packages/components/nodes/vectorstores/OpenSearch/OpenSearch_Upsert.ts @@ -14,6 +14,7 @@ class OpenSearchUpsert_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] outputs: INodeOutputsValue[] @@ -27,6 +28,7 @@ class OpenSearchUpsert_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Upsert documents to OpenSearch' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.inputs = [ { label: 'Document', diff --git a/packages/components/nodes/vectorstores/OpenSearch_Existing/OpenSearch_existing.ts b/packages/components/nodes/vectorstores/OpenSearch/OpenSearch_existing.ts similarity index 98% rename from packages/components/nodes/vectorstores/OpenSearch_Existing/OpenSearch_existing.ts rename to packages/components/nodes/vectorstores/OpenSearch/OpenSearch_existing.ts index c8d09470ae1..a012a2e5348 100644 --- a/packages/components/nodes/vectorstores/OpenSearch_Existing/OpenSearch_existing.ts +++ b/packages/components/nodes/vectorstores/OpenSearch/OpenSearch_existing.ts @@ -12,6 +12,7 @@ class OpenSearch_Existing_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] outputs: INodeOutputsValue[] @@ -25,6 +26,7 @@ class OpenSearch_Existing_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Load existing index from OpenSearch (i.e: Document has been upserted)' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.inputs = [ { label: 'Embeddings', diff --git a/packages/components/nodes/vectorstores/OpenSearch_Existing/opensearch.png b/packages/components/nodes/vectorstores/OpenSearch/opensearch.png similarity index 100% rename from packages/components/nodes/vectorstores/OpenSearch_Existing/opensearch.png rename to packages/components/nodes/vectorstores/OpenSearch/opensearch.png diff --git a/packages/components/nodes/vectorstores/OpenSearch_Upsert/opensearch.png b/packages/components/nodes/vectorstores/OpenSearch_Upsert/opensearch.png deleted file mode 100644 index 3fdcfd3f09ed02c1478f690884ecafe844cb32ae..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5216 zcmcgwS5y-~vkpy=5~`pOy7b-ys0f56y@o0sX-Wtkq)6`&dIt?vq=&9_fl!pDAT0=? zO7955pa1bb+{b&*p4mD3&6(Yu**UYb8>gqEMnTF*3IG5oG}IvmIF9-+k>KO{#yW%} zjuF06)K&xl8dAt^Y>9AVHhXmgZ2%yU8vqE41OWcxps+0fz(*7S*s%csVPtE_FT+hti>ComH1eO{D|CTb80SS} z1kh=M2X3F`ll>ayrBi_#S~O`Bo^o0=_)CTNNEvwEs_y)oyP@YNoP{UN+3{qGGt1 z1q^{vN}i{`zvzOl<5c%}?3z@YuHI@~Z6|OcY12_LyhH+e$uZZ{f3?fb zM{s|QPvZoO9#YnIr#)ogRI~;SmDOqU&vlOK*0^r(`hr0yHIZimYQRi`t4`=kEnST) zlwl^^&}1-qQD|>AkjLYpb0{%K6C4W;__XrAaG}bZnGxh|a{;U|S(;gHP6M(FK$0^C z+Mw3{s_zeXvVJhpk-iC5HQ(PjgYM+N5)#1jJ49&}E1X>Fm!;jBiW{V1ZYyS`9bZiV zAs~}J1ZVK;sV0T=QHpjG8S*-)}izSw5Z%tPfksPGK!$j)Go(2K|wk#`lq%*B2Z|5?vQxUwMiY z@9GU~uFs|Bs>%~WK|%hQ>6AN)J>DlcP+eQ6gtoV=o8z9es`E2qEQ9(%2B zWTy^pu7^!)$m9EI#HR%R6|?SWKMRoEB_l&AQ}rPJU11wjAw!$%qPeeJV%Px5Yau6# zwCsxSPz0W2#o6}m@OWR6J`zfbfSpNB6cUERke#8p6~bk6x;eSTihSut*G}Lw&8TUh zq9|$9>Q#EmRB~Su*oAmS2)dIu*3{+wt6})(nc0Gu^mMY_qqi@U1FCE4+Y){0iVm?z zHHTX*9VOXS0}`p5(Bfi=huTvVkjDlzYjEk2KpiW7S#QPR5w4k*0N?X*h3-^}mLttZVCnF4jc27!&-l?qfO{iAZK5k+jnL(L*-hJx^Qhw;0=X z^5#1Qy-sAA8XZG8x|x&LGWZ?Wf83jYCz$kDE%}-}fzW4yHON1N(y*P^CKt=kFj&ulvcnZ3A09rxHm2Jw1el#|mV zLK`(dNrV5b3pC5s_`X&uxcM$lMn<;&HxPIFc($_m;SCprbfkbBMiq~02LhbAj#55l z#(_2Fw5&`*3(7KKQu@>t-x$y^W-eP<_%R|U4U)k{{kZXIeND=k~5 zUybZrcSIj<`P*CY?D6hLmx7L}<{D3~S)1i%vU(IHWQLx&x$Nz<>zR9YR65xBJJ|M8 zKVsR~RJ%*GZ(0z^G@edgeWM;ClItf7SpyH$r5`u;6Wv3$*Oz{-FSRT)T24pgwK^&< z51#R9%&;I|m6gFjJgxGAc4|PKcfl#Wy1_#tHD2O~HF{u$m$zMGOVydT{&wz%siJr9 ziDEd#1&#K2T)6A!zt@$%5NRE$7{HpQ`wz)FB2r;9R?&vO+3N{orZ?Je?K{2=*FMv`DAa=l#Bt6%8c2Ip(0r{#5a=QTaGCiL=4=HhP_PlRUVTw&bgn(LL>`6(^ zl88P|*V;^jPl^1NA-u(aK`!Kl203-DtPP;zG?(a1x;B>r=$y`r)MszZ{;zKZ=$; zyv>sYe}?Uy?Ym8dt~H&IEtKMLJgLI%ro8XuR29yo&#BO=zGP+aQLDg)aCt}Vw(ddb z=iP;S(W-=!JF97-I;c`Aakzi6`WNo|L)LxoE2!vI9xJ(AeLl>e;{WpJ;*^3+V*@5; zCdy;3NGc9Dg`~;&aO!l$_kU+oof>afKRma--RVeHFD}tyg{lGBXN85?Ib*h?owYqd z2K&8n*`B-5;W>P+e-1X^bEF?)u2v+vph8|=$nf}lqL`yDNr8Ens4>6SdGy`mmqr5> zeRiq~#@;_|QJ5J1RAG2m+wSn~Q%&J&57z^@TqHRUIQ)X^sOzn`8t{aIV;ujGa^R;G z_(;4ckhzmp(UKI4x*3qtsW?H_eG|D#TN!Z@E>}$YkO|HB=Mt4tLgl_3wf=`fu#LEt>!w&dxH%~qo{K^5azYVk-0x!fA}aCm4iFZ*#r31{6%4d$ zK$RU%Oe(sVkR`D75t)gHQFsE^Og{PQ*q1fTPZzt6;qfWe9RSCn#b&gBH}(s4H9fwg z_LhDgdfyO9qb4fmyT$xO5VCBs4HW%oAL?0Xn#tYPGF(vjrD=0u;PtAY@Ka3Yfxc7pKO>-N;Pw(dHjwW=MMiS@= z@|Q?^w!K3FO;*ea0qC0_>$MlLOgtL0f9Gv{1inW-1I6rG7Tm}`)GI0EMfv>Zk&a_mi&7z@h8nV(J_8jP_?2JsskE zZ`FYSAN;zO%6?D(mHJpsg)B(L5BjXE3(EV&>BB454sugGcHBc8k?T1R>8)PzT~4MT zIuMp8x8~zPEJm)gMC?VO(Z~vwQ!u*1^`#K|a?~Yk&Z}>!%&d=3kdO9lIzF5HcVkzf zcM{+L*}n^7TpOrD$mO5mLQ7of@Z`$Wm*qec`R}m_b9#3FHsq`9>X%fTf5EjsaSsxu zkJTzng^-W-nvM7?=Ct>o{rpBll27%vwjqxWr)FE*? z3=A5}Y1DM_?zekKTGLyvIcJ(8;ihA4m+UKsd}{;yPDHSS5Ga|rt0uazuFNWJ9+NCr zJWpVv7=(VKURvEh_&I6|ZGPOFU7YnSv@a&0q{p~9YLIk}WJ5qhIA$rj>A-H8!>7B5 z-8C@MhGxoge05<%U(@$IchJ_$mHZV3*?AP-8QZ#VBuYx;%j$U+-C;1rH=XgVilFdv zgI3Op%q}p|e0G5aLU@;i@&Psvj51GmuDI4l-_kTMFGc>mNBJHx95noc>7z*eZP;y$ z-WNs@`3Tp52i{%zii|;)Fw)(iMPsXGrnSm!4*lr!8uN8oqx*ad0$eBbi4(j%g{^+M zA62uvPwRK`o;#1?m3*W!jzUG4#($F|g^r79wE1;c%bd2P4gWEtc{GeyvV$N0EN!Ds z5pb6Feb&VXd`SkC@Q@aWSD>9J@f%hK0>+ImcVCZ@R#mEHxFs1_C&59$BFpky*p(Tz zfS_Q4I~D-@!ob=U*MK+g2_Pe*qsj0EnR5p_v@uZ>85v(Z>|4kr`zjZc`RU1oT$27IaB*YpUP86hCM` z71Nq3)T2lI!WBgls*+HGo;(Q2l3O2Bx$4n+Xs=Ud?NA#X-4lXYi%Uu9`*QV;)Gp;( zH~=OmIv4em@zRCr5s!*(8``$uZ+a=^0>|3n@X6>5$3xb>Ul)}#JZTM}1k=sQ>G0Zn zTj97XLl3cR^7ic2{f*LY9yz>}CzLhcegp|hfX`S_EWpv1$tr>pTc6Y{DCl=d66gcI zhAqW6*6IBvB79T4_P~>GG{2!fOyVxd(={l^Ygk^j+JjR6V`>A@wnb{r{%b}13fk9F zl}|)`&U%~8{4de#bbhO=G0qQ6g|&>{>LpuTRv|Lte3(+}fr|JSwc?K^s??ODGQ3{M z{HkZZQp@hvuj4Phok3@>w=pLve4wR%kP<63LDZ`NxP5m>8P<x9OGj@6jLB6yZQ*3Xp>eWk3-#|xM0eb`Ey z9vM^oJe8^1reo*m%zEACUJhvBH`sl-U5n;QXub}&*JE_8BRuu7B+WI8pPxeGAe=6g zJKcVeLklR*OE-)j3MO9c{N?*$2mhnU$OyTpY;0<3^pcw8dNSr)AHHxf#P3-IC3j3^ zBxLm+KHk~Dh7;^uq*3T)AC@YvL=TI#@At(^Z&_0Yp9n{pfm?0i{@TuTxFWxh$Gy68 z!<8cPU|l;XAsAHQOkC3B;B z;%WEaAUKJ0Z{@;9pgnm&F)Jjv3yq(pfiG6$Sh`_r5Zv`NX`g$Z;3U$RJEoYCl@aG6O&xfFDjp7r#P67?Y6eB<<0g$uW~X@`kH;;S*5yMzZY=(Ac7ZT1xg|@yXV= z*p(nk^fx7fS4x5Z3P5=LF;mB5789oL=4o=>4R^SzpC>N{8|2vLsaoC7`xs{eOV2T8 zQyZiYPm%|1xu|Gz1und83~ALgc?ZH{hvYFcJy&}=f7S)f`t=$ImAgc$#!qKVze9wa zN+O%^?j2poAeeA-o>#YdbVG%-Qf%9Z|1ocQT%0jcDSpD2b&bbG>6_YRj&eRN%ONap z?S)-wHoVF_@KbWx{TC157i<#5z0`m=%X+4pq6);LTO`_#|DFY zIJvkxvUvx<9NAo8-u3{1f5F-@If { + const index = nodeData.inputs?.pineconeIndex as string + const pineconeNamespace = nodeData.inputs?.pineconeNamespace as string + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const pineconeApiKey = getCredentialParam('pineconeApiKey', credentialData, nodeData) + const pineconeEnv = getCredentialParam('pineconeEnv', credentialData, nodeData) + + const client = new Pinecone({ + apiKey: pineconeApiKey, + environment: pineconeEnv + }) + + const pineconeIndex = client.Index(index) + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + finalDocs.push(new Document(flattenDocs[i])) + } + } + + const obj: PineconeLibArgs = { + pineconeIndex + } + + if (pineconeNamespace) obj.namespace = pineconeNamespace + + try { + await PineconeStore.fromDocuments(finalDocs, embeddings, obj) + } catch (e) { + throw new Error(e) + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const index = nodeData.inputs?.pineconeIndex as string + const pineconeNamespace = nodeData.inputs?.pineconeNamespace as string + const pineconeMetadataFilter = nodeData.inputs?.pineconeMetadataFilter + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseFloat(topK) : 4 + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const pineconeApiKey = getCredentialParam('pineconeApiKey', credentialData, nodeData) + const pineconeEnv = getCredentialParam('pineconeEnv', credentialData, nodeData) + + const client = new Pinecone({ + apiKey: pineconeApiKey, + environment: pineconeEnv + }) + + const pineconeIndex = client.Index(index) + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + finalDocs.push(new Document(flattenDocs[i])) + } + } + + const obj: PineconeLibArgs = { + pineconeIndex + } + + if (pineconeNamespace) obj.namespace = pineconeNamespace + if (pineconeMetadataFilter) { + const metadatafilter = typeof pineconeMetadataFilter === 'object' ? pineconeMetadataFilter : JSON.parse(pineconeMetadataFilter) + obj.filter = metadatafilter + } + + const vectorStore = await PineconeStore.fromExistingIndex(embeddings, obj) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: Pinecone_VectorStores } diff --git a/packages/components/nodes/vectorstores/Pinecone/Pinecone_Existing.ts b/packages/components/nodes/vectorstores/Pinecone/Pinecone_Existing.ts index e8536d8d932..ee2db071e4e 100644 --- a/packages/components/nodes/vectorstores/Pinecone/Pinecone_Existing.ts +++ b/packages/components/nodes/vectorstores/Pinecone/Pinecone_Existing.ts @@ -12,6 +12,7 @@ class Pinecone_Existing_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -26,6 +27,7 @@ class Pinecone_Existing_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Load existing index from Pinecone (i.e: Document has been upserted)' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Pinecone/Pinecone_Upsert.ts b/packages/components/nodes/vectorstores/Pinecone/Pinecone_Upsert.ts index 0851fa3e59b..0c63ce7bd5f 100644 --- a/packages/components/nodes/vectorstores/Pinecone/Pinecone_Upsert.ts +++ b/packages/components/nodes/vectorstores/Pinecone/Pinecone_Upsert.ts @@ -1,10 +1,10 @@ -import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { flatten } from 'lodash' import { Pinecone } from '@pinecone-database/pinecone' import { PineconeLibArgs, PineconeStore } from 'langchain/vectorstores/pinecone' import { Embeddings } from 'langchain/embeddings/base' import { Document } from 'langchain/document' +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' -import { flatten } from 'lodash' class PineconeUpsert_VectorStores implements INode { label: string @@ -14,6 +14,7 @@ class PineconeUpsert_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -28,6 +29,7 @@ class PineconeUpsert_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Upsert documents to Pinecone' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Postgres/Postgres.ts b/packages/components/nodes/vectorstores/Postgres/Postgres.ts new file mode 100644 index 00000000000..0609d0b5750 --- /dev/null +++ b/packages/components/nodes/vectorstores/Postgres/Postgres.ts @@ -0,0 +1,268 @@ +import { Pool } from 'pg' +import { flatten } from 'lodash' +import { DataSourceOptions } from 'typeorm' +import { Embeddings } from 'langchain/embeddings/base' +import { Document } from 'langchain/document' +import { TypeORMVectorStore, TypeORMVectorStoreDocument } from 'langchain/vectorstores/typeorm' +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +class Postgres_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Postgres' + this.name = 'postgres' + this.version = 1.0 + this.type = 'Postgres' + this.icon = 'postgres.svg' + this.category = 'Vector Stores' + this.description = 'Upsert or Load data to Postgres using pgvector' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'NEW' + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['PostgresApi'] + } + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true, + optional: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Host', + name: 'host', + type: 'string' + }, + { + label: 'Database', + name: 'database', + type: 'string' + }, + { + label: 'Port', + name: 'port', + type: 'number', + placeholder: '6432', + optional: true + }, + { + label: 'Table Name', + name: 'tableName', + type: 'string', + placeholder: 'documents', + additionalParams: true, + optional: true + }, + { + label: 'Additional Configuration', + name: 'additionalConfig', + type: 'json', + additionalParams: true, + optional: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Postgres Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Postgres Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(TypeORMVectorStore)] + } + ] + } + + //@ts-ignore + vectorStoreMethods = { + async upsert(nodeData: INodeData, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const user = getCredentialParam('user', credentialData, nodeData) + const password = getCredentialParam('password', credentialData, nodeData) + const _tableName = nodeData.inputs?.tableName as string + const tableName = _tableName ? _tableName : 'documents' + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + const additionalConfig = nodeData.inputs?.additionalConfig as string + + let additionalConfiguration = {} + if (additionalConfig) { + try { + additionalConfiguration = typeof additionalConfig === 'object' ? additionalConfig : JSON.parse(additionalConfig) + } catch (exception) { + throw new Error('Invalid JSON in the Additional Configuration: ' + exception) + } + } + + const postgresConnectionOptions = { + ...additionalConfiguration, + type: 'postgres', + host: nodeData.inputs?.host as string, + port: nodeData.inputs?.port as number, + username: user, + password: password, + database: nodeData.inputs?.database as string + } + + const args = { + postgresConnectionOptions: postgresConnectionOptions as DataSourceOptions, + tableName: tableName + } + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + finalDocs.push(new Document(flattenDocs[i])) + } + } + + try { + const vectorStore = await TypeORMVectorStore.fromDocuments(finalDocs, embeddings, args) + + // Avoid Illegal invocation error + vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: any) => { + return await similaritySearchVectorWithScore(query, k, tableName, postgresConnectionOptions, filter) + } + } catch (e) { + throw new Error(e) + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const user = getCredentialParam('user', credentialData, nodeData) + const password = getCredentialParam('password', credentialData, nodeData) + const _tableName = nodeData.inputs?.tableName as string + const tableName = _tableName ? _tableName : 'documents' + const embeddings = nodeData.inputs?.embeddings as Embeddings + const additionalConfig = nodeData.inputs?.additionalConfig as string + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseFloat(topK) : 4 + + let additionalConfiguration = {} + if (additionalConfig) { + try { + additionalConfiguration = typeof additionalConfig === 'object' ? additionalConfig : JSON.parse(additionalConfig) + } catch (exception) { + throw new Error('Invalid JSON in the Additional Configuration: ' + exception) + } + } + + const postgresConnectionOptions = { + ...additionalConfiguration, + type: 'postgres', + host: nodeData.inputs?.host as string, + port: nodeData.inputs?.port as number, + username: user, + password: password, + database: nodeData.inputs?.database as string + } + + const args = { + postgresConnectionOptions: postgresConnectionOptions as DataSourceOptions, + tableName: tableName + } + + const vectorStore = await TypeORMVectorStore.fromDataSource(embeddings, args) + + // Rewrite the method to use pg pool connection instead of the default connection + /* Otherwise a connection error is displayed when the chain tries to execute the function + [chain/start] [1:chain:ConversationalRetrievalQAChain] Entering Chain run with input: { "question": "what the document is about", "chat_history": [] } + [retriever/start] [1:chain:ConversationalRetrievalQAChain > 2:retriever:VectorStoreRetriever] Entering Retriever run with input: { "query": "what the document is about" } + [ERROR]: uncaughtException: Illegal invocation TypeError: Illegal invocation at Socket.ref (node:net:1524:18) at Connection.ref (.../node_modules/pg/lib/connection.js:183:17) at Client.ref (.../node_modules/pg/lib/client.js:591:21) at BoundPool._pulseQueue (/node_modules/pg-pool/index.js:148:28) at .../node_modules/pg-pool/index.js:184:37 at process.processTicksAndRejections (node:internal/process/task_queues:77:11) + */ + vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: any) => { + return await similaritySearchVectorWithScore(query, k, tableName, postgresConnectionOptions, filter) + } + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +const similaritySearchVectorWithScore = async ( + query: number[], + k: number, + tableName: string, + postgresConnectionOptions: ICommonObject, + filter?: any +) => { + const embeddingString = `[${query.join(',')}]` + const _filter = filter ?? '{}' + + const queryString = ` + SELECT *, embedding <=> $1 as "_distance" + FROM ${tableName} + WHERE metadata @> $2 + ORDER BY "_distance" ASC + LIMIT $3;` + + const poolOptions = { + host: postgresConnectionOptions.host, + port: postgresConnectionOptions.port, + user: postgresConnectionOptions.username, + password: postgresConnectionOptions.password, + database: postgresConnectionOptions.database + } + const pool = new Pool(poolOptions) + const conn = await pool.connect() + + const documents = await conn.query(queryString, [embeddingString, _filter, k]) + + conn.release() + + const results = [] as [TypeORMVectorStoreDocument, number][] + for (const doc of documents.rows) { + if (doc._distance != null && doc.pageContent != null) { + const document = new Document(doc) as TypeORMVectorStoreDocument + document.id = doc.id + results.push([document, doc._distance]) + } + } + + return results +} + +module.exports = { nodeClass: Postgres_VectorStores } diff --git a/packages/components/nodes/vectorstores/Postgres_Existing/Postgres_Exisiting.ts b/packages/components/nodes/vectorstores/Postgres/Postgres_Exisiting.ts similarity index 99% rename from packages/components/nodes/vectorstores/Postgres_Existing/Postgres_Exisiting.ts rename to packages/components/nodes/vectorstores/Postgres/Postgres_Exisiting.ts index c0887432292..99794a0def9 100644 --- a/packages/components/nodes/vectorstores/Postgres_Existing/Postgres_Exisiting.ts +++ b/packages/components/nodes/vectorstores/Postgres/Postgres_Exisiting.ts @@ -14,6 +14,7 @@ class Postgres_Existing_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -28,6 +29,7 @@ class Postgres_Existing_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Load existing index from Postgres using pgvector (i.e: Document has been upserted)' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Postgres_Upsert/Postgres_Upsert.ts b/packages/components/nodes/vectorstores/Postgres/Postgres_Upsert.ts similarity index 99% rename from packages/components/nodes/vectorstores/Postgres_Upsert/Postgres_Upsert.ts rename to packages/components/nodes/vectorstores/Postgres/Postgres_Upsert.ts index 8ea3501d7ff..f706cbe827c 100644 --- a/packages/components/nodes/vectorstores/Postgres_Upsert/Postgres_Upsert.ts +++ b/packages/components/nodes/vectorstores/Postgres/Postgres_Upsert.ts @@ -15,6 +15,7 @@ class PostgresUpsert_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -29,6 +30,7 @@ class PostgresUpsert_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Upsert documents to Postgres using pgvector' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Postgres_Existing/postgres.svg b/packages/components/nodes/vectorstores/Postgres/postgres.svg similarity index 100% rename from packages/components/nodes/vectorstores/Postgres_Existing/postgres.svg rename to packages/components/nodes/vectorstores/Postgres/postgres.svg diff --git a/packages/components/nodes/vectorstores/Postgres_Upsert/postgres.svg b/packages/components/nodes/vectorstores/Postgres_Upsert/postgres.svg deleted file mode 100644 index f631e7a842c..00000000000 --- a/packages/components/nodes/vectorstores/Postgres_Upsert/postgres.svg +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/packages/components/nodes/vectorstores/Qdrant/Qdrant.ts b/packages/components/nodes/vectorstores/Qdrant/Qdrant.ts new file mode 100644 index 00000000000..6de2c1869cc --- /dev/null +++ b/packages/components/nodes/vectorstores/Qdrant/Qdrant.ts @@ -0,0 +1,246 @@ +import { flatten } from 'lodash' +import { QdrantClient } from '@qdrant/js-client-rest' +import { VectorStoreRetrieverInput } from 'langchain/vectorstores/base' +import { Document } from 'langchain/document' +import { QdrantVectorStore, QdrantLibArgs } from 'langchain/vectorstores/qdrant' +import { Embeddings } from 'langchain/embeddings/base' +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +type RetrieverConfig = Partial> + +class Qdrant_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Qdrant' + this.name = 'qdrant' + this.version = 1.0 + this.type = 'Qdrant' + this.icon = 'qdrant.png' + this.category = 'Vector Stores' + this.description = 'Upsert or Load data to Qdrant Vector Database' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'NEW' + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + description: 'Only needed when using Qdrant cloud hosted', + optional: true, + credentialNames: ['qdrantApi'] + } + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true, + optional: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Qdrant Server URL', + name: 'qdrantServerUrl', + type: 'string', + placeholder: 'http://localhost:6333' + }, + { + label: 'Qdrant Collection Name', + name: 'qdrantCollection', + type: 'string' + }, + { + label: 'Vector Dimension', + name: 'qdrantVectorDimension', + type: 'number', + default: 1536, + additionalParams: true + }, + { + label: 'Similarity', + name: 'qdrantSimilarity', + description: 'Similarity measure used in Qdrant.', + type: 'options', + default: 'Cosine', + options: [ + { + label: 'Cosine', + name: 'Cosine' + }, + { + label: 'Euclid', + name: 'Euclid' + }, + { + label: 'Dot', + name: 'Dot' + } + ], + additionalParams: true + }, + { + label: 'Additional Collection Cofiguration', + name: 'qdrantCollectionConfiguration', + description: + 'Refer to collection docs for more reference', + type: 'json', + optional: true, + additionalParams: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + }, + { + label: 'Qdrant Search Filter', + name: 'qdrantFilter', + description: 'Only return points which satisfy the conditions', + type: 'json', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Qdrant Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Qdrant Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(QdrantVectorStore)] + } + ] + } + + //@ts-ignore + vectorStoreMethods = { + async upsert(nodeData: INodeData, options: ICommonObject): Promise { + const qdrantServerUrl = nodeData.inputs?.qdrantServerUrl as string + const collectionName = nodeData.inputs?.qdrantCollection as string + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + const qdrantSimilarity = nodeData.inputs?.qdrantSimilarity + const qdrantVectorDimension = nodeData.inputs?.qdrantVectorDimension + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const qdrantApiKey = getCredentialParam('qdrantApiKey', credentialData, nodeData) + + const client = new QdrantClient({ + url: qdrantServerUrl, + apiKey: qdrantApiKey + }) + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + finalDocs.push(new Document(flattenDocs[i])) + } + } + + const dbConfig: QdrantLibArgs = { + client, + url: qdrantServerUrl, + collectionName, + collectionConfig: { + vectors: { + size: qdrantVectorDimension ? parseInt(qdrantVectorDimension, 10) : 1536, + distance: qdrantSimilarity ?? 'Cosine' + } + } + } + + try { + await QdrantVectorStore.fromDocuments(finalDocs, embeddings, dbConfig) + } catch (e) { + throw new Error(e) + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const qdrantServerUrl = nodeData.inputs?.qdrantServerUrl as string + const collectionName = nodeData.inputs?.qdrantCollection as string + let qdrantCollectionConfiguration = nodeData.inputs?.qdrantCollectionConfiguration + const embeddings = nodeData.inputs?.embeddings as Embeddings + const qdrantSimilarity = nodeData.inputs?.qdrantSimilarity + const qdrantVectorDimension = nodeData.inputs?.qdrantVectorDimension + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + let queryFilter = nodeData.inputs?.queryFilter + + const k = topK ? parseFloat(topK) : 4 + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const qdrantApiKey = getCredentialParam('qdrantApiKey', credentialData, nodeData) + + const client = new QdrantClient({ + url: qdrantServerUrl, + apiKey: qdrantApiKey + }) + + const dbConfig: QdrantLibArgs = { + client, + collectionName + } + + const retrieverConfig: RetrieverConfig = { + k + } + + if (qdrantCollectionConfiguration) { + qdrantCollectionConfiguration = + typeof qdrantCollectionConfiguration === 'object' + ? qdrantCollectionConfiguration + : JSON.parse(qdrantCollectionConfiguration) + dbConfig.collectionConfig = { + ...qdrantCollectionConfiguration, + vectors: { + ...qdrantCollectionConfiguration.vectors, + size: qdrantVectorDimension ? parseInt(qdrantVectorDimension, 10) : 1536, + distance: qdrantSimilarity ?? 'Cosine' + } + } + } + + if (queryFilter) { + retrieverConfig.filter = typeof queryFilter === 'object' ? queryFilter : JSON.parse(queryFilter) + } + + const vectorStore = await QdrantVectorStore.fromExistingCollection(embeddings, dbConfig) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(retrieverConfig) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: Qdrant_VectorStores } diff --git a/packages/components/nodes/vectorstores/Qdrant_Existing/Qdrant_Existing.ts b/packages/components/nodes/vectorstores/Qdrant/Qdrant_Existing.ts similarity index 99% rename from packages/components/nodes/vectorstores/Qdrant_Existing/Qdrant_Existing.ts rename to packages/components/nodes/vectorstores/Qdrant/Qdrant_Existing.ts index c16e8f5440a..fb114402d85 100644 --- a/packages/components/nodes/vectorstores/Qdrant_Existing/Qdrant_Existing.ts +++ b/packages/components/nodes/vectorstores/Qdrant/Qdrant_Existing.ts @@ -15,6 +15,7 @@ class Qdrant_Existing_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -29,6 +30,7 @@ class Qdrant_Existing_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Load existing index from Qdrant (i.e., documents have been upserted)' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Qdrant_Upsert/Qdrant_Upsert.ts b/packages/components/nodes/vectorstores/Qdrant/Qdrant_Upsert.ts similarity index 99% rename from packages/components/nodes/vectorstores/Qdrant_Upsert/Qdrant_Upsert.ts rename to packages/components/nodes/vectorstores/Qdrant/Qdrant_Upsert.ts index 183271f75b3..c43a0c8d493 100644 --- a/packages/components/nodes/vectorstores/Qdrant_Upsert/Qdrant_Upsert.ts +++ b/packages/components/nodes/vectorstores/Qdrant/Qdrant_Upsert.ts @@ -17,6 +17,7 @@ class QdrantUpsert_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -31,6 +32,7 @@ class QdrantUpsert_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Upsert documents to Qdrant' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Qdrant_Existing/qdrant.png b/packages/components/nodes/vectorstores/Qdrant/qdrant.png similarity index 100% rename from packages/components/nodes/vectorstores/Qdrant_Existing/qdrant.png rename to packages/components/nodes/vectorstores/Qdrant/qdrant.png diff --git a/packages/components/nodes/vectorstores/Qdrant_Upsert/qdrant.png b/packages/components/nodes/vectorstores/Qdrant_Upsert/qdrant.png deleted file mode 100644 index ecb2a56d55fbaf6ccdc3640a3d8d3d6587dd701a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 11663 zcmb_?RZv`A)FlusxCD21htRlNf(Cc@;10p1(FQ^i2pS-0aCdiicZbH^nfv`SRa5gm z5B<`$@9oq3ti9H@C{<-SbQEF~7#J9I1$pT&Ffg!$|9y}Vfp5T&!&n#?HZ28d2@UV` z<5jP3`Zukj=lcUy!aG#@buDK4o-J})JyB#rp=b;Vf8YCkWV7gK&BIES%$~qMV4L_g z^_eRCR$B`-44=`G{zDb6wNN+9+UGT?x~~d#be{#=lch({d57!%mP4+6+KnCyX!nJc z(&g5xMde1Wgpmb|rT)?H5*`|{EWE1I=G8tF^VCR79Z2wLExiu$A4iRE5&vxV7S;$K*)dBuZ) zoN{#QohjCE+EjAGqmGAus~{g1jndcMnoo>e?zcblF-E>-C3AVV^G-1yz4a^=Zt>|; z-c)89J)9)Mb~drb%l8bh-l@IKtne5tL^yNNxQ5bnI9hVo7U?v!fsvUIb9%KP`Iy!5 z{;W#xrW?jAy+%Jq@#t#oDCcj1RX(#_dX35!T%n1ubbkl3=RUO4?TFO0*&IyId9mqI z@_;apUo2g@J4bN4WW)G$GnEj@;~12_QDLo>nhE!YL!Y2w%M43NZ$b^1Dgq;&%u5UD z6!$O>QZowRH**>r!B6|Ad&akgH|-`FmdxH8@{?T+XGVsctx+EM+#VX7aN2s)Wa?V8 zPcK!endqebbd_V~n8GHpKb`^wY52BoT8Px%FfcAFE5@Yh!6>pFnSnR=tr;(mVld}c z8NF&CRMF;rNV-r#fB1Fvm?S*}MK;Kj+)i=DNB%~*ques1SHRoD?CSvTe3|^C`VZj* zEgs-@XecdbMJ}>VioR*&cK5N6=5A<#)|^{x0*=b7r#~#ybpJIkxk?z$4Z#jVoDF)O zbzsQi_VHOAg*7U?M#WS ztNv&a;1Uhf;$a6cBcrR$#_>}9tZ-xTS=)MDd+UhY@P!gK(CXKLd#$4HuSSHkEd+5MmUETao-Q_1x;IMHV7 zG2}h*_{Am`ihXc`#Gt~C;Qc#}tgQ3xT~r+9O2}4Gp$ValEk&%)TTJSc-{bW<5>~HZ zd|GuR>q52N;&lPRXxwtYU@U+kH@Z#iGr(msb1gH8ZjvT)<$VEEJFDlRfzC=nM3t z>qdyqKGpvDb8rpO<-GQ&lCK!IqTq-iq&X+VC7~+yG_qb);EB6`8*xr|An*Jr-J+Mw z%OLB94oZ6(iP8s4&f=(NA-HmlXy}QLOm1EbTD(%>P!$ys`DhX8LFy%mU@n)(*SfhZ z%D#@`TwnDcZjhR+^1{lSx0^~m#KI7Zc@op`4P^1TW7b0dG6P@4=!ywek zzaqth5cW?5JFoR6(}C6q<}p|Czc!sZshl{R|hx0Tq^ZhOl$QuD5)XpjMg|0;3>`tEga#zE-{D zH$j5er%qlhqFM11cT1bKRMx-$EF(1`i{OiAPst=_)7G1>zt_>-Hl-5`l zM6`3V$Xr-yHRXUk2(^UkOg{bdn_7Eq*iwRQ?*TWrlG@2a_l(ru#tucFlh8SD2q`+8 zaP`BSDYw_K%`7N#?a>8g209()+=I*6Q%=>UuZ4?E0EQTPyHvfpB311WM7@yjj;gHM zFEh@(RFqDc=@5+N@s{p97F^)n*74al4;6eDr$#w)A@1Spc~JdtpT3mY+K&Z zkW&bgkN%X^(TALLu4-%x!{VJCJUY50o(jRzai}&!U$|(1zuHh1P1iG9$uuZr8Gj^~ zLh~!{{B0YWc$Z+z#)a^A=s#6&3yN=Oxi+8V`zBjtCJ-l+aBnHA4#ZmaArD@3eU0n9 zBK6FKqZ5T^1y98DKQ+A^b~;AQGvp3HF}+%>)nlZgUsbQ--mLJWRDY(%@d)@#AWg8; zoS1M*p1#H7g})!SEMPg(1y>T(Go={zDjgb=+Pxz6n<7G4Pbu?D5fJ)srPkKH292#Z zr3)3af|DAtf=8NIsYeW)&y!tsd3n>GkVDe5NA>XEjaRPg{-W;*V-tL4oUfFv9lUo zi*RZR-)A0HX5ZznX40hC`i3y3AZoT->}UUaBG%)JcjMJ`9O^Nb-N|5--CO(;nSOpJ z_JJ@qW4JA}?Hn0=HjI0|@gJF1`1C%9anI?p_vfMJH>+?g>aLuxTh<(K44?qAdkESx26)Pj`Pdb1-pG9lhIF)jsIJ(h$fQeCo7dQ9Kl
    DPc0O4`-5wY)ahbZgDg9BWaxSRC&VOx8SG)kiSlc=F7iu+W zV|5%`_bI%PfoxxQ>%+cj7NB3<&`voBz7aarFI*Vu8 zBP#bwX6YlgQf28{oLMJj`YwO|o>*^G3@{0~TMNghVBlG0h(@F+CxeprIep(?epv7R zVHd#de!c#}BXpoGizwpNU0hR|X!-?A@mZ+F#0H4DLBz)j_p^5FvfC1V(-avakVXT% z<{@ufx9IF(3H;KYnqQB#L`eh%Ihu|v zUvIwfj~QkWGLX3U+@2V58u~v5Wq|b^IZ98LD07Nj)%SP=Lt=}$|ITSqdJj;Ey$dnN zkD96r5zNf{Sa2KwJe0%J9ok080&_%P@&4#$r|u^U!%R5iV;q-VFsXeO%5T znsj?m-zlFrHN9YG)Ql_1MGleJ3Kfabo?6B@j&M@_->JSpma@lZb@r6*g62K{fIk7v zot(1uCQA~EX(-xc&v3zlY*5A!Gs960tGz$8^$Gchi@~)7scC6-t#1W!(EO?lCb2PM z=A?+xL1mPad7H_lMWl=X_u{l`kWoI@-x*D+(tOfrBMEd?HzMjxInQ3(zcX4(Lndvc)zxt|it!KJn`(+2orVy)T}U}_T#!-GLxgFu$X4H}*4}kl;J=dP zjbSaVgY*3Aq(uK%k@d3O6UGe`ve)BzDx3-t zs*GJBlT&k{W0|mij%%g&u@Dmg?{zHFV=2u0wmnw3dt@B4vYG-I20GAU>d8U7XIqLb~}<@bcL4&&&$$ya5Z&x5N7wn#3c zWAT%MY~%&5m$s>EJ`rHBn!bAI-Y+ZAQ2+=G`GAUYE+y;ADk~dTXln!38s7<4PNw8e zj)m*PaiiCl6h(7J4vu;}RMO{O4%g$o_N{#u2~WM2 zlk=#n4kyfZQxUD#C*uJY0qNZ?9cd?qvbTFO+K)3GxXH76rT9n~q!sAiZtjg8znvtn zFL9{2*l`l0v3Tp;O-&|jG2OY%4Ql}^>N=k8H}EY^FSy`~HG9ZBIRN<~%Aw%ra*VvJ zdz$bPs~H-G_w~tw$uJBfrwiDho*pS#_|`m)@@sQ3r3CMUzFulYppC^<7~jXz>-ccH`A z{=!al3^MPvTfKx9++KITc5xldg(Q;V#2Hlwzl199SMJ#RhiV9)CH{Y+Jv+BuUo?(eGE8>tQ{a4rZ`&hG2Nz$N4nJgD(g? zWAptaJkvi9PADY(KNBd@&)aOfsF9V{CHDA*5b7%;qd2P)}g8Wm4pkU3XdDKSQ(B%p0tOk>As*MH(f!2H&ChBLV?!ILmfc^8B%7M(Dn?Qp&9m z%;j(RY=G7!e$iF{s-~2jdXoCqJMEetxtU9z!qfdx}?tA zrV_G3vYwLZeg~;K+gVBW{(hSZe{8z>jM`Q76&$fu$1CfS@Gt86p^#h8=CAPx68Iw^ zF)<2S25B0qk8o@!tp^9ES=V*n(qz&X)oYYOPVrv1;zKPu8E$5N!Pm0x?=QyUW`B`z24~<8 zJQ$a%$)=a|R9j4#MvQQu80HohY*+~J&tkpc>lssATy-h;MiiU2Y<;)Yce(!!eNF!J zzB0LH2}RngZp-GCnwKewU1(=0QGHj(^@Kyk=1ueZ_Q#R3qz|nm0ybYsH`DgC{6f1{=Ursd~-XPq(7 zi=rZ%i6DcDh1>L`NgMImMP^<;{zl8nShwX6qWyiAO1#mIMLmo5$J7)3a4h@}t%h|- zOkxV-o&ShM`ubMhK94olQ*nPV=TdD{Ow^_AN0P$#4uwe2bt+0zx(fTSIZA{gM0Y1#UBHJiL*NeD$cRM1*1tINTc^%X zND}dIReNVkcn>Q|F?~%8CabgwyTBFo)%@VQURj6S{91784w_znLQD=G+5@7!ZcTsB zVtykZOA@%KO8L zr}n341LBO(3up7SwS`u9yjx?Bn;%2F--;Wgzqy~B*u+QI4&O#PPonfm-3;LsDvZ`B zjq=|=F8V(;1yO#hd!!G$^8}Q%>vj&o36bNK`m0)}Hz6uRSE5ghY9inAh0r$WnmOld z)=(@d>*}2V`lmZi4NmgEeDvgUC}myZkyplvOB|T}Nb}`VvYJGfDx_ic;FZCn!$Ol4 zQp`Vc*iq904|TToiq(R9?TE^Xs(~3QnfLV$lFDZE6FdUKsnp10& zARvsDx=P;iUV|H96$$>2{Hu_+l`<{N088cf9kfnSVV+KC+=^T!Jb zXoPD{E{kY40{ir1QWv#F8%?vA0R6JLgoIdsZA0{=`Y?|V%_P4N7uHf20MU-YI1O1) z*1Z=`%Y%N)XbT5^#RjPxbSe`6yv6T)&v^@2p)*gsj1yu^2^WLhY0UET1u zK7enji}f4IHEup+K20-YWnfR%C1pMaiO;DSL;z4_uy>SEK6F`z2CP0&sx1_n7)(5< z?-JvF?~FOV;`XTTg0*(NoJdItnMp-i=25<02!;Z^B#}KsmO0nB3x%%c-{Ha8l#ANa z*HhMd(V}n8ykDmG#|3d8#IB`|%~;L2;L4bA6liB&w4?WflTJ=@PRy++fMyW?q)O#K z?5LKIcQ0ICvfMMoYMXm>4PLZG+V% z2{*Ymi>WEcF=FMr#PLZvvjq4Z-wY{<)KRON+S;7PJcJiJJ}Af{1h^4-)>}z;X-0tWY#h%dti8lmF2;OH zm1A)GXZ#Gsd1w@eX zVoA>Lt|YYVNFVw13YVV#%*Izhc$v&)X^xJdlM4$b&G1TL@hE+9VMA~ZSrR9lb|lLP zZUKE}m4Di@$@ykyIVXn~S5Q_5p%H*P6O$5QeB`m9jLDX~#4bAnrW#rrs*Md)dYuhS zPEK6h-wPyo{>qtuCIE7gxx}{4COg}&U!^rQ@f7JzOKJvs)H)jkbQ3;QSK&A$*Xj#B za&p^Qr1tBvDk;SkmX`J&Fp9u!o&w!&n0}YK&r2vioW4)siHRwMq`^kqCm(VfCnqUO zON0pU;cbA51j4pNiHtehZ}B3EVv?n7x2akB>c!ZpDV{nZmndtXA1)5)|BeMC*IzPO z?eadLr0sd3my(jQv9*=8w`V~^oEJorP7ejBN>`hwQ{u}qgCO;X*|-zJ+UW-(d)*8X zeOsY%NnsK^GzpU+3fJoV+}zJ`zaU0BtVzbB*EcuWEjgsA!LZF%v|Z8&QeUO1F#u)8 z`fw1Rs<86c+OSqbTQj>Uf60_6>Kve7rwV*UN6+skm7!&#=!m?@0d2Mx?(oI4byvozuoMvd~(_yf*tC|Mq&i9=v@sZz&VZZ(Z z;5QE=neyq}mMYQ=UQLJrA|6^Jt5YSx_aADiVT0!n$xkjVJG7;G19;hBvOzo`Z|^=R z%p3|g;9@3}jO+#`9g?IGC=pm&0KfDlk&NFhieyAb|=GL4Pd z?CZnQ_|hd9u`w`8&&r8b4UuRI2b$*2!to1lcLdUJH%?fX=#>x}FFb4cDDX>QQr%vGj*<|Vo?ehe5PC9xz62OJd(6RAiq?f>dDfJq^7wMvq$ zqF_oE^gmiYQvQ>OyJt;^&qDKCqTT9yivgpL3f+318ZZnWCLq}BuQ1a8Ow;@2Gk<;%6kiW0Jz^EZ%WP85cadERnc>HC$ z*z#EcaoaQt?(|=kY$q@Q-EUl2!_}0V6^e-@ek8;2hY6;iB{WKrVf$(}6Weo%60|8I z(C6<$xgjnzM)bI57!>_Ax&4!g{7AK4D{zr5I{i&0zx%L-OQ4}EW>#e33q$}(Y{B$o zf)IS9aP7+ULtlR{me3d`a5#0cb%!=sS|kKut$uiEZK@0kmKb?}E+2gUB)xmW-;G5) zBse7?{0nka8Q=}GS!3s752&^H} zv6|&ETh;Zao8Dgx?x3o=9|&+&6iuYAw|jzX=51B&8u%C6TmLqu0kb|Y*;*aZVyPd! z7V{3cn}0$sOh`eDsruR8H%>O6RU~GYGzrl$vo1ZaV!OS?3-X7@!cZbQwD_irkFYdf zsK-_s5Xr7C>1**C7sDtHXHtYdn%#v-ryM+RUGi~c%AGz@(Co+6Z|0t6O`MTsG0W`& z!OKE;sYD817_yVjDwWliB|eJAmm(2532C1J*PHy70NcxY{1>EHK#)>$kVh7k$ZA;f z5$}&moE-f#y}f#RY-dP~_10NNr8D%TPZmdzVjuKH%W$uOuPpfFa_Lv!HE!1VBFLxo zlSUgjtN}K~HsN0D?MDhBZX0f5xfWn9(Y3*4EScXynF0r=(~rfA-J#(9KhYk~L?~rI zdKB=)>N{ryoui?D??Oke&Z*TR9n!{UKXLxA;E&2Dpx(u*-&aH6ab)HFz=kOrS=W3P z%r5foIt+~pvf0Ss(`$ydySpeF%%DR3Snkwe|HEiUCQ`&_2W4-$rm?Xw@W>X8RR?A* zj{XX_ix_W2U~j|3ea(j;mHdea4Lh-98*2-{(4@^$qeyytJPCi?^qmkma>VE*=|f+44P*l+U3o=cLGFi#z)*`Cl<9|O3x~!v{pax_ z^xVNwcH#B}SvwIFsmbviJA&CoXd-Im}o$(A>bjmHIX zsMrCCAeW{-sIJZ)ACf#zq)~l_8lRKTR4B_JySYbNsE~p?yCivqZ$2Qp?N=yEA-mOt zxWL4eb1c0<+y4|9!Rxwk!Kq|~3`lcO&}4#9`qV%ekJo6+a>AgB_iStjA-B!MC$GW%9?L_`dlZo z+8fi8oq~8&o&W;-&?+l!e0K_XmEs%niU`o+MPZ~o6}GckUtjBNeQ#7m;?`#WwO>SQ zxmvQq_{tcSm?t>>cJ2I24^PT6@IGqZj)B}N$?_m|h4mCc;$h&~xe7^7un0}Q)6yGNT-`V}m=U=}zJC9{B)5T-jG;?C zc+;Cn9D|LFxRJHlSOz6Y0nt&9u&J@tdZ%8=p*tP}?e{67xL2~$cP?Sln|M(;TSJi@ zFb37N_j_r4RH%;PIgDK|&bLk6pchftiRluHYdwJkfr#7 z2?hoUkNJ#3{~fB>h(Xju4cq$})>s z$XZr22_#;wmm^CYdSh>j?bqj^*iRd9AI#dP#VW0nev2!}@cGux`tG3M#6Lnv$WN9( zNUFH{_QbQLWy0eI1#M?UdSpxlnL_~KVu28<;b|#CD#&u%x&9KWCduH8FkA#3DMVgB z75Lpb&Yq5!NPe(?bKjUn!>teziOgHpOhz~UjJZTG>Xk-v1N_Zr5C{izD5uZ>9x@nQ zqKFva60lJI<;N5pOuw#$GlNy281K3-5A)e#5SKBU2tZ8zTQI`+9WU#;Cp$nH@Ti9N)FJ)0W=@z3k6{)Aydd>Dt9l$s%Q3NyW^rRAc` znDh2tzDQ>U>nenZp!ED-wAQ^y$sA#Tn$|ym1^%yV1Hk;$_U8p-v-P{@CclKoK~HJz z*-FSYdMhjQ%E6LHdv&U>dfjP0Iy z2D`T{A~;MR!+T81Iex}8yZy!+dH8?=W!TBGi*xbQExol|tvF<#`yEzxQ;eWmKudL% zJC3zD0gwMy^(268loVl7U#nB7D;zRhYBI)ouGwSHz~ON31M5R-6Z7zzD&;b5O1V@&=Tin z$_IOX$Z`!6j?e;_^dWvvQhlDVni(6ff3n+)T_7Nl%HQ-4B>EYH@-g`1Q2mc}_f)`Pus3p8WUj_B~!lnw8Ix z9OYDS$B(_`4QT4N1^0Qlz-8c$75sRvt#26US3llJgH5=TzoTQ1aO23+Q%x#W4;L`N zc6LlPGF3j%N3{wcLA$<~u^Gj3_KG zaRfJV0R5bSS_fw#FYkU$@`_02Ls0hM@OKsB*?*J#%EaMl4c?Yv0M;g?3kyxysa2Uj z=F`J}Yw#{B5ofByMukl$hd;l&3Z);Pi4D~^wikp)}SkxSFoVFB#Ig;(CwMJku!<5}%zY68IH_;7{Rfc;#d2`Qxv6I*5mk;cH4_q?u?3T+RbS|>2yQ*o~BA3EVB9$&)T$1dn1pv_3wH%)AraU2=uBWdkh7NV7a25$ws0s6g(F zKTzVm{X}vy0bD2dC&ZMt2UGqtvT%b%**E`BA?~6FmG8F9yfe?zjy?hXJ?O5MgN@mpw4in+|f$L!ruF- z^G>?z_bwJ;BaS@rpv(fzJQkLsW8>9Dp<90YyNWThn|d!%aa$ieDB(wi_lf1g7aOB@ zhR11jZ@pgl=wDx+%3(hUimmqg10uWsu3M)c1Qv69>(i`vSgRSLauUvyq9x#G;Bohp zP?oRi9QONnlNRAYPB57+CUld(AK8D6=6A(gInS5O_O^Xra+NNr4PT>h*nn90`f)ZzWR1Dl#{;hwAS#rx)a7&u;OlWbex3ta``~=ben0gZ+X$S zG|58DP-1dF0IVvMw{o3nW3D#-I`o$ScJ}h~b<>QOg92*?5nR~jtuNUurc%_>Gi zY0QlG_KA~!?22)*J7I;UehxE6vXGzP>$PXG za>zsMn#uTypb3KdWtTd1zEtnyvguv5BEDh1r#aJmSft#pZ+o7Fa>>PlvJ@nIdcUg1 z!peVLWUwTUzHSfE@w_z^GtOH&1zin~lSc|X7Nqe~92*p_tF<_m7noPT_mKMn_>`WugaKsnnAxg2VpVsu#M{=E zwM)q!-TBSwje+DgZGqc-Q1XSE#{`}`dllG!_!>y}Z!Qv0`|=V#awdOb(&XJ8WSWQS zq|C(@(^)s4#zEYsq~CaYo!*og*atS#V`r))AJGn#>YX>rycfly@J1Hxf9|oc-`$~8n}b}w zXfYuE_;uMD=H4A8t?Hcmip8n@TKustnd08xRkCI^Lab68EWbk{i)O{4**}mvRULsI z@MU_)$r!oeoC;?>k(B?$N#*T%uC%%eS1==OfzQ_H?Vvc@*m0Sgp2+aw1gys48p&%# zc)EVEqr(4c)qV0gUXZWkomyu?OtB-ma*_SiD;cR({F~D{E1^*qKg&bZXqr}AikOgP ztl^h=zI(9OkE7&upV0lr@_4+rTodmfU)!F9yjI#T?CSlhd)_c%VBX#yT9M{k4@5VC QMQa!Z8D;4*Nt2NO1;l09w*UYD diff --git a/packages/components/nodes/vectorstores/Redis/Redis.ts b/packages/components/nodes/vectorstores/Redis/Redis.ts new file mode 100644 index 00000000000..d857e225612 --- /dev/null +++ b/packages/components/nodes/vectorstores/Redis/Redis.ts @@ -0,0 +1,326 @@ +import { flatten } from 'lodash' +import { createClient, SearchOptions } from 'redis' +import { Embeddings } from 'langchain/embeddings/base' +import { RedisVectorStore, RedisVectorStoreConfig } from 'langchain/vectorstores/redis' +import { Document } from 'langchain/document' +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { escapeAllStrings, escapeSpecialChars, unEscapeSpecialChars } from './utils' + +class Redis_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Redis' + this.name = 'redis' + this.version = 1.0 + this.description = 'Upsert or Load data to Redis' + this.type = 'Redis' + this.icon = 'redis.svg' + this.category = 'Vector Stores' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'NEW' + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['redisCacheUrlApi', 'redisCacheApi'] + } + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true, + optional: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Index Name', + name: 'indexName', + placeholder: '', + type: 'string' + }, + { + label: 'Replace Index on Upsert', + name: 'replaceIndex', + description: 'Selecting this option will delete the existing index and recreate a new one when upserting', + default: false, + type: 'boolean' + }, + { + label: 'Content Field', + name: 'contentKey', + description: 'Name of the field (column) that contains the actual content', + type: 'string', + default: 'content', + additionalParams: true, + optional: true + }, + { + label: 'Metadata Field', + name: 'metadataKey', + description: 'Name of the field (column) that contains the metadata of the document', + type: 'string', + default: 'metadata', + additionalParams: true, + optional: true + }, + { + label: 'Vector Field', + name: 'vectorKey', + description: 'Name of the field (column) that contains the vector', + type: 'string', + default: 'content_vector', + additionalParams: true, + optional: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Redis Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Redis Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(RedisVectorStore)] + } + ] + } + + //@ts-ignore + vectorStoreMethods = { + async upsert(nodeData: INodeData, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const indexName = nodeData.inputs?.indexName as string + let contentKey = nodeData.inputs?.contentKey as string + let metadataKey = nodeData.inputs?.metadataKey as string + let vectorKey = nodeData.inputs?.vectorKey as string + const embeddings = nodeData.inputs?.embeddings as Embeddings + const replaceIndex = nodeData.inputs?.replaceIndex as boolean + + let redisUrl = getCredentialParam('redisUrl', credentialData, nodeData) + if (!redisUrl || redisUrl === '') { + const username = getCredentialParam('redisCacheUser', credentialData, nodeData) + const password = getCredentialParam('redisCachePwd', credentialData, nodeData) + const portStr = getCredentialParam('redisCachePort', credentialData, nodeData) + const host = getCredentialParam('redisCacheHost', credentialData, nodeData) + + redisUrl = 'redis://' + username + ':' + password + '@' + host + ':' + portStr + } + + const docs = nodeData.inputs?.document as Document[] + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + const document = new Document(flattenDocs[i]) + escapeAllStrings(document.metadata) + finalDocs.push(document) + } + } + + const redisClient = createClient({ url: redisUrl }) + await redisClient.connect() + + try { + const storeConfig: RedisVectorStoreConfig = { + redisClient: redisClient, + indexName: indexName + } + const isIndexExists = await checkIndexExists(redisClient, indexName) + if (replaceIndex && isIndexExists) { + let response = await redisClient.ft.dropIndex(indexName) + if (process.env.DEBUG === 'true') { + // eslint-disable-next-line no-console + console.log(`Redis Vector Store :: Dropping index [${indexName}], Received Response [${response}]`) + } + } + const vectorStore = await RedisVectorStore.fromDocuments(finalDocs, embeddings, storeConfig) + + if (!contentKey || contentKey === '') contentKey = 'content' + if (!metadataKey || metadataKey === '') metadataKey = 'metadata' + if (!vectorKey || vectorKey === '') vectorKey = 'content_vector' + + // Avoid Illegal invocation error + vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: any) => { + return await similaritySearchVectorWithScore( + query, + k, + indexName, + metadataKey, + vectorKey, + contentKey, + redisClient, + filter + ) + } + } catch (e) { + throw new Error(e) + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const indexName = nodeData.inputs?.indexName as string + let contentKey = nodeData.inputs?.contentKey as string + let metadataKey = nodeData.inputs?.metadataKey as string + let vectorKey = nodeData.inputs?.vectorKey as string + const embeddings = nodeData.inputs?.embeddings as Embeddings + const topK = nodeData.inputs?.topK as string + const k = topK ? parseFloat(topK) : 4 + const output = nodeData.outputs?.output as string + + let redisUrl = getCredentialParam('redisUrl', credentialData, nodeData) + if (!redisUrl || redisUrl === '') { + const username = getCredentialParam('redisCacheUser', credentialData, nodeData) + const password = getCredentialParam('redisCachePwd', credentialData, nodeData) + const portStr = getCredentialParam('redisCachePort', credentialData, nodeData) + const host = getCredentialParam('redisCacheHost', credentialData, nodeData) + + redisUrl = 'redis://' + username + ':' + password + '@' + host + ':' + portStr + } + + const redisClient = createClient({ url: redisUrl }) + await redisClient.connect() + + const storeConfig: RedisVectorStoreConfig = { + redisClient: redisClient, + indexName: indexName + } + + const vectorStore = new RedisVectorStore(embeddings, storeConfig) + + if (!contentKey || contentKey === '') contentKey = 'content' + if (!metadataKey || metadataKey === '') metadataKey = 'metadata' + if (!vectorKey || vectorKey === '') vectorKey = 'content_vector' + + // Avoid Illegal invocation error + vectorStore.similaritySearchVectorWithScore = async (query: number[], k: number, filter?: any) => { + return await similaritySearchVectorWithScore(query, k, indexName, metadataKey, vectorKey, contentKey, redisClient, filter) + } + + if (output === 'retriever') { + return vectorStore.asRetriever(k) + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +const checkIndexExists = async (redisClient: ReturnType, indexName: string) => { + try { + await redisClient.ft.info(indexName) + } catch (err: any) { + if (err?.message.includes('unknown command')) { + throw new Error( + 'Failed to run FT.INFO command. Please ensure that you are running a RediSearch-capable Redis instance: https://js.langchain.com/docs/modules/data_connection/vectorstores/integrations/redis#setup' + ) + } + // index doesn't exist + return false + } + + return true +} + +const buildQuery = ( + query: number[], + k: number, + metadataKey: string, + vectorKey: string, + contentKey: string, + filter?: string[] +): [string, SearchOptions] => { + const vectorScoreField = 'vector_score' + + let hybridFields = '*' + // if a filter is set, modify the hybrid query + if (filter && filter.length) { + // `filter` is a list of strings, then it's applied using the OR operator in the metadata key + hybridFields = `@${metadataKey}:(${filter.map(escapeSpecialChars).join('|')})` + } + + const baseQuery = `${hybridFields} => [KNN ${k} @${vectorKey} $vector AS ${vectorScoreField}]` + const returnFields = [metadataKey, contentKey, vectorScoreField] + + const options: SearchOptions = { + PARAMS: { + vector: Buffer.from(new Float32Array(query).buffer) + }, + RETURN: returnFields, + SORTBY: vectorScoreField, + DIALECT: 2, + LIMIT: { + from: 0, + size: k + } + } + + return [baseQuery, options] +} + +const similaritySearchVectorWithScore = async ( + query: number[], + k: number, + indexName: string, + metadataKey: string, + vectorKey: string, + contentKey: string, + redisClient: ReturnType, + filter?: string[] +): Promise<[Document, number][]> => { + const results = await redisClient.ft.search(indexName, ...buildQuery(query, k, metadataKey, vectorKey, contentKey, filter)) + const result: [Document, number][] = [] + + if (results.total) { + for (const res of results.documents) { + if (res.value) { + const document = res.value + if (document.vector_score) { + const metadataString = unEscapeSpecialChars(document[metadataKey] as string) + result.push([ + new Document({ + pageContent: document[contentKey] as string, + metadata: JSON.parse(metadataString) + }), + Number(document.vector_score) + ]) + } + } + } + } + return result +} + +module.exports = { nodeClass: Redis_VectorStores } diff --git a/packages/components/nodes/vectorstores/Redis/RedisSearchBase.ts b/packages/components/nodes/vectorstores/Redis/RedisSearchBase.ts index 9d1c2ea13ef..b6aa6ebb0f1 100644 --- a/packages/components/nodes/vectorstores/Redis/RedisSearchBase.ts +++ b/packages/components/nodes/vectorstores/Redis/RedisSearchBase.ts @@ -23,6 +23,7 @@ export abstract class RedisSearchBase { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -34,6 +35,7 @@ export abstract class RedisSearchBase { this.icon = 'redis.svg' this.category = 'Vector Stores' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Redis/Redis_Existing.ts b/packages/components/nodes/vectorstores/Redis/Redis_Existing.ts index 9ad472a8057..e8848d3387e 100644 --- a/packages/components/nodes/vectorstores/Redis/Redis_Existing.ts +++ b/packages/components/nodes/vectorstores/Redis/Redis_Existing.ts @@ -14,8 +14,8 @@ class RedisExisting_VectorStores extends RedisSearchBase implements INode { this.version = 1.0 this.description = 'Load existing index from Redis (i.e: Document has been upserted)' - // Remove deleteIndex from inputs as it is not applicable while fetching data from Redis - let input = this.inputs.find((i) => i.name === 'deleteIndex') + // Remove replaceIndex from inputs as it is not applicable while fetching data from Redis + let input = this.inputs.find((i) => i.name === 'replaceIndex') if (input) this.inputs.splice(this.inputs.indexOf(input), 1) } diff --git a/packages/components/nodes/vectorstores/Redis/Redis_Upsert.ts b/packages/components/nodes/vectorstores/Redis/Redis_Upsert.ts index 9d1a4f4534e..4da58eaffbc 100644 --- a/packages/components/nodes/vectorstores/Redis/Redis_Upsert.ts +++ b/packages/components/nodes/vectorstores/Redis/Redis_Upsert.ts @@ -56,7 +56,7 @@ class RedisUpsert_VectorStores extends RedisSearchBase implements INode { } } - return super.init(nodeData, _, options, flattenDocs) + return super.init(nodeData, _, options, finalDocs) } } diff --git a/packages/components/nodes/vectorstores/Singlestore/Singlestore.ts b/packages/components/nodes/vectorstores/Singlestore/Singlestore.ts new file mode 100644 index 00000000000..3597f41e1e7 --- /dev/null +++ b/packages/components/nodes/vectorstores/Singlestore/Singlestore.ts @@ -0,0 +1,198 @@ +import { flatten } from 'lodash' +import { Embeddings } from 'langchain/embeddings/base' +import { SingleStoreVectorStore, SingleStoreVectorStoreConfig } from 'langchain/vectorstores/singlestore' +import { Document } from 'langchain/document' +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +class SingleStore_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'SingleStore' + this.name = 'singlestore' + this.version = 1.0 + this.type = 'SingleStore' + this.icon = 'singlestore.svg' + this.category = 'Vector Stores' + this.description = 'Upsert or Load data to SingleStore Vector Database' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'NEW' + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + description: 'Needed when using SingleStore cloud hosted', + optional: true, + credentialNames: ['singleStoreApi'] + } + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true, + optional: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Host', + name: 'host', + type: 'string' + }, + { + label: 'Database', + name: 'database', + type: 'string' + }, + { + label: 'Table Name', + name: 'tableName', + type: 'string', + placeholder: 'embeddings', + additionalParams: true, + optional: true + }, + { + label: 'Content Column Name', + name: 'contentColumnName', + type: 'string', + placeholder: 'content', + additionalParams: true, + optional: true + }, + { + label: 'Vector Column Name', + name: 'vectorColumnName', + type: 'string', + placeholder: 'vector', + additionalParams: true, + optional: true + }, + { + label: 'Metadata Column Name', + name: 'metadataColumnName', + type: 'string', + placeholder: 'metadata', + additionalParams: true, + optional: true + }, + { + label: 'Top K', + name: 'topK', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'SingleStore Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'SingleStore Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(SingleStoreVectorStore)] + } + ] + } + + //@ts-ignore + vectorStoreMethods = { + async upsert(nodeData: INodeData, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const user = getCredentialParam('user', credentialData, nodeData) + const password = getCredentialParam('password', credentialData, nodeData) + + const singleStoreConnectionConfig = { + connectionOptions: { + host: nodeData.inputs?.host as string, + port: 3306, + user, + password, + database: nodeData.inputs?.database as string + }, + ...(nodeData.inputs?.tableName ? { tableName: nodeData.inputs.tableName as string } : {}), + ...(nodeData.inputs?.contentColumnName ? { contentColumnName: nodeData.inputs.contentColumnName as string } : {}), + ...(nodeData.inputs?.vectorColumnName ? { vectorColumnName: nodeData.inputs.vectorColumnName as string } : {}), + ...(nodeData.inputs?.metadataColumnName ? { metadataColumnName: nodeData.inputs.metadataColumnName as string } : {}) + } as SingleStoreVectorStoreConfig + + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + finalDocs.push(new Document(flattenDocs[i])) + } + } + + try { + const vectorStore = new SingleStoreVectorStore(embeddings, singleStoreConnectionConfig) + vectorStore.addDocuments.bind(vectorStore)(finalDocs) + } catch (e) { + throw new Error(e) + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const user = getCredentialParam('user', credentialData, nodeData) + const password = getCredentialParam('password', credentialData, nodeData) + + const singleStoreConnectionConfig = { + connectionOptions: { + host: nodeData.inputs?.host as string, + port: 3306, + user, + password, + database: nodeData.inputs?.database as string + }, + ...(nodeData.inputs?.tableName ? { tableName: nodeData.inputs.tableName as string } : {}), + ...(nodeData.inputs?.contentColumnName ? { contentColumnName: nodeData.inputs.contentColumnName as string } : {}), + ...(nodeData.inputs?.vectorColumnName ? { vectorColumnName: nodeData.inputs.vectorColumnName as string } : {}), + ...(nodeData.inputs?.metadataColumnName ? { metadataColumnName: nodeData.inputs.metadataColumnName as string } : {}) + } as SingleStoreVectorStoreConfig + + const embeddings = nodeData.inputs?.embeddings as Embeddings + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseFloat(topK) : 4 + + let vectorStore: SingleStoreVectorStore + + vectorStore = new SingleStoreVectorStore(embeddings, singleStoreConnectionConfig) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: SingleStore_VectorStores } diff --git a/packages/components/nodes/vectorstores/Singlestore_Existing/Singlestore_Existing.ts b/packages/components/nodes/vectorstores/Singlestore/Singlestore_Existing.ts similarity index 99% rename from packages/components/nodes/vectorstores/Singlestore_Existing/Singlestore_Existing.ts rename to packages/components/nodes/vectorstores/Singlestore/Singlestore_Existing.ts index c5f6fbceb8c..34061764a9c 100644 --- a/packages/components/nodes/vectorstores/Singlestore_Existing/Singlestore_Existing.ts +++ b/packages/components/nodes/vectorstores/Singlestore/Singlestore_Existing.ts @@ -11,6 +11,7 @@ class SingleStoreExisting_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -25,6 +26,7 @@ class SingleStoreExisting_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Load existing document from SingleStore' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Singlestore_Upsert/Singlestore_Upsert.ts b/packages/components/nodes/vectorstores/Singlestore/Singlestore_Upsert.ts similarity index 99% rename from packages/components/nodes/vectorstores/Singlestore_Upsert/Singlestore_Upsert.ts rename to packages/components/nodes/vectorstores/Singlestore/Singlestore_Upsert.ts index d8edc8d440d..f158f9e88ca 100644 --- a/packages/components/nodes/vectorstores/Singlestore_Upsert/Singlestore_Upsert.ts +++ b/packages/components/nodes/vectorstores/Singlestore/Singlestore_Upsert.ts @@ -13,6 +13,7 @@ class SingleStoreUpsert_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -27,6 +28,7 @@ class SingleStoreUpsert_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Upsert documents to SingleStore' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Singlestore_Existing/singlestore.svg b/packages/components/nodes/vectorstores/Singlestore/singlestore.svg similarity index 100% rename from packages/components/nodes/vectorstores/Singlestore_Existing/singlestore.svg rename to packages/components/nodes/vectorstores/Singlestore/singlestore.svg diff --git a/packages/components/nodes/vectorstores/Singlestore_Upsert/singlestore.svg b/packages/components/nodes/vectorstores/Singlestore_Upsert/singlestore.svg deleted file mode 100644 index bd8dc8177ff..00000000000 --- a/packages/components/nodes/vectorstores/Singlestore_Upsert/singlestore.svg +++ /dev/null @@ -1,20 +0,0 @@ - - - SingleStore - - - - - - - - - - - - - - - - - diff --git a/packages/components/nodes/vectorstores/Supabase/Supabase.ts b/packages/components/nodes/vectorstores/Supabase/Supabase.ts new file mode 100644 index 00000000000..a7de3211842 --- /dev/null +++ b/packages/components/nodes/vectorstores/Supabase/Supabase.ts @@ -0,0 +1,169 @@ +import { flatten } from 'lodash' +import { createClient } from '@supabase/supabase-js' +import { Document } from 'langchain/document' +import { Embeddings } from 'langchain/embeddings/base' +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' +import { SupabaseLibArgs, SupabaseVectorStore } from 'langchain/vectorstores/supabase' + +class Supabase_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Supabase' + this.name = 'supabase' + this.version = 1.0 + this.type = 'Supabase' + this.icon = 'supabase.svg' + this.category = 'Vector Stores' + this.description = 'Upsert or Load data to Supabase using pgvector' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'NEW' + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['supabaseApi'] + } + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true, + optional: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Supabase Project URL', + name: 'supabaseProjUrl', + type: 'string' + }, + { + label: 'Table Name', + name: 'tableName', + type: 'string' + }, + { + label: 'Query Name', + name: 'queryName', + type: 'string' + }, + { + label: 'Supabase Metadata Filter', + name: 'supabaseMetadataFilter', + type: 'json', + optional: true, + additionalParams: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Supabase Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Supabase Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(SupabaseVectorStore)] + } + ] + } + + //@ts-ignore + vectorStoreMethods = { + async upsert(nodeData: INodeData, options: ICommonObject): Promise { + const supabaseProjUrl = nodeData.inputs?.supabaseProjUrl as string + const tableName = nodeData.inputs?.tableName as string + const queryName = nodeData.inputs?.queryName as string + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const supabaseApiKey = getCredentialParam('supabaseApiKey', credentialData, nodeData) + + const client = createClient(supabaseProjUrl, supabaseApiKey) + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + finalDocs.push(new Document(flattenDocs[i])) + } + + try { + await SupabaseVectorStore.fromDocuments(finalDocs, embeddings, { + client, + tableName: tableName, + queryName: queryName + }) + } catch (e) { + throw new Error(e) + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const supabaseProjUrl = nodeData.inputs?.supabaseProjUrl as string + const tableName = nodeData.inputs?.tableName as string + const queryName = nodeData.inputs?.queryName as string + const embeddings = nodeData.inputs?.embeddings as Embeddings + const supabaseMetadataFilter = nodeData.inputs?.supabaseMetadataFilter + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseFloat(topK) : 4 + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const supabaseApiKey = getCredentialParam('supabaseApiKey', credentialData, nodeData) + + const client = createClient(supabaseProjUrl, supabaseApiKey) + + const obj: SupabaseLibArgs = { + client, + tableName, + queryName + } + + if (supabaseMetadataFilter) { + const metadatafilter = typeof supabaseMetadataFilter === 'object' ? supabaseMetadataFilter : JSON.parse(supabaseMetadataFilter) + obj.filter = metadatafilter + } + + const vectorStore = await SupabaseVectorStore.fromExistingIndex(embeddings, obj) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: Supabase_VectorStores } diff --git a/packages/components/nodes/vectorstores/Supabase_Existing/Supabase_Exisiting.ts b/packages/components/nodes/vectorstores/Supabase/Supabase_Exisiting.ts similarity index 98% rename from packages/components/nodes/vectorstores/Supabase_Existing/Supabase_Exisiting.ts rename to packages/components/nodes/vectorstores/Supabase/Supabase_Exisiting.ts index ed6febb5bd1..8f135cf7abf 100644 --- a/packages/components/nodes/vectorstores/Supabase_Existing/Supabase_Exisiting.ts +++ b/packages/components/nodes/vectorstores/Supabase/Supabase_Exisiting.ts @@ -12,6 +12,7 @@ class Supabase_Existing_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -26,6 +27,7 @@ class Supabase_Existing_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Load existing index from Supabase (i.e: Document has been upserted)' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Supabase_Upsert/Supabase_Upsert.ts b/packages/components/nodes/vectorstores/Supabase/Supabase_Upsert.ts similarity index 98% rename from packages/components/nodes/vectorstores/Supabase_Upsert/Supabase_Upsert.ts rename to packages/components/nodes/vectorstores/Supabase/Supabase_Upsert.ts index 90fe2121fb2..9e97f48f7f0 100644 --- a/packages/components/nodes/vectorstores/Supabase_Upsert/Supabase_Upsert.ts +++ b/packages/components/nodes/vectorstores/Supabase/Supabase_Upsert.ts @@ -14,6 +14,7 @@ class SupabaseUpsert_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -28,6 +29,7 @@ class SupabaseUpsert_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Upsert documents to Supabase' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Supabase_Existing/supabase.svg b/packages/components/nodes/vectorstores/Supabase/supabase.svg similarity index 100% rename from packages/components/nodes/vectorstores/Supabase_Existing/supabase.svg rename to packages/components/nodes/vectorstores/Supabase/supabase.svg diff --git a/packages/components/nodes/vectorstores/Supabase_Upsert/supabase.svg b/packages/components/nodes/vectorstores/Supabase_Upsert/supabase.svg deleted file mode 100644 index 884d6449f47..00000000000 --- a/packages/components/nodes/vectorstores/Supabase_Upsert/supabase.svg +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/packages/components/nodes/vectorstores/Vectara/Vectara.ts b/packages/components/nodes/vectorstores/Vectara/Vectara.ts new file mode 100644 index 00000000000..f12dc4a223a --- /dev/null +++ b/packages/components/nodes/vectorstores/Vectara/Vectara.ts @@ -0,0 +1,240 @@ +import { flatten } from 'lodash' +import { VectaraStore, VectaraLibArgs, VectaraFilter, VectaraContextConfig, VectaraFile } from 'langchain/vectorstores/vectara' +import { Document } from 'langchain/document' +import { Embeddings } from 'langchain/embeddings/base' +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +class Vectara_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Vectara' + this.name = 'vectara' + this.version = 1.0 + this.type = 'Vectara' + this.icon = 'vectara.png' + this.category = 'Vector Stores' + this.description = 'Upsert or Load data to Vectara Vector Database' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'NEW' + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['vectaraApi'] + } + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true, + optional: true + }, + { + label: 'File', + name: 'file', + description: + 'File to upload to Vectara. Supported file types: https://docs.vectara.com/docs/api-reference/indexing-apis/file-upload/file-upload-filetypes', + type: 'file', + optional: true + }, + { + label: 'Metadata Filter', + name: 'filter', + description: + 'Filter to apply to Vectara metadata. Refer to the documentation on how to use Vectara filters with Flowise.', + type: 'string', + additionalParams: true, + optional: true + }, + { + label: 'Sentences Before', + name: 'sentencesBefore', + description: 'Number of sentences to fetch before the matched sentence. Defaults to 2.', + type: 'number', + additionalParams: true, + optional: true + }, + { + label: 'Sentences After', + name: 'sentencesAfter', + description: 'Number of sentences to fetch after the matched sentence. Defaults to 2.', + type: 'number', + additionalParams: true, + optional: true + }, + { + label: 'Lambda', + name: 'lambda', + description: + 'Improves retrieval accuracy by adjusting the balance (from 0 to 1) between neural search and keyword-based search factors.', + type: 'number', + additionalParams: true, + optional: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Defaults to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Vectara Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Vectara Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(VectaraStore)] + } + ] + } + + //@ts-ignore + vectorStoreMethods = { + async upsert(nodeData: INodeData, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const apiKey = getCredentialParam('apiKey', credentialData, nodeData) + const customerId = getCredentialParam('customerID', credentialData, nodeData) + const corpusId = getCredentialParam('corpusID', credentialData, nodeData).split(',') + + const docs = nodeData.inputs?.document as Document[] + const embeddings = {} as Embeddings + const vectaraMetadataFilter = nodeData.inputs?.filter as string + const sentencesBefore = nodeData.inputs?.sentencesBefore as number + const sentencesAfter = nodeData.inputs?.sentencesAfter as number + const lambda = nodeData.inputs?.lambda as number + const fileBase64 = nodeData.inputs?.file + + const vectaraArgs: VectaraLibArgs = { + apiKey: apiKey, + customerId: customerId, + corpusId: corpusId, + source: 'flowise' + } + + const vectaraFilter: VectaraFilter = {} + if (vectaraMetadataFilter) vectaraFilter.filter = vectaraMetadataFilter + if (lambda) vectaraFilter.lambda = lambda + + const vectaraContextConfig: VectaraContextConfig = {} + if (sentencesBefore) vectaraContextConfig.sentencesBefore = sentencesBefore + if (sentencesAfter) vectaraContextConfig.sentencesAfter = sentencesAfter + vectaraFilter.contextConfig = vectaraContextConfig + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + finalDocs.push(new Document(flattenDocs[i])) + } + } + + let files: string[] = [] + if (fileBase64.startsWith('[') && fileBase64.endsWith(']')) { + files = JSON.parse(fileBase64) + } else { + files = [fileBase64] + } + + const vectaraFiles: VectaraFile[] = [] + for (const file of files) { + const splitDataURI = file.split(',') + splitDataURI.pop() + const bf = Buffer.from(splitDataURI.pop() || '', 'base64') + const blob = new Blob([bf]) + vectaraFiles.push({ blob: blob, fileName: getFileName(file) }) + } + + try { + if (finalDocs.length) await VectaraStore.fromDocuments(finalDocs, embeddings, vectaraArgs) + if (vectaraFiles.length) { + const vectorStore = new VectaraStore(vectaraArgs) + await vectorStore.addFiles(vectaraFiles) + } + } catch (e) { + throw new Error(e) + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const apiKey = getCredentialParam('apiKey', credentialData, nodeData) + const customerId = getCredentialParam('customerID', credentialData, nodeData) + const corpusId = getCredentialParam('corpusID', credentialData, nodeData).split(',') + + const vectaraMetadataFilter = nodeData.inputs?.filter as string + const sentencesBefore = nodeData.inputs?.sentencesBefore as number + const sentencesAfter = nodeData.inputs?.sentencesAfter as number + const lambda = nodeData.inputs?.lambda as number + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseInt(topK, 10) : 4 + + const vectaraArgs: VectaraLibArgs = { + apiKey: apiKey, + customerId: customerId, + corpusId: corpusId, + source: 'flowise' + } + + const vectaraFilter: VectaraFilter = {} + if (vectaraMetadataFilter) vectaraFilter.filter = vectaraMetadataFilter + if (lambda) vectaraFilter.lambda = lambda + + const vectaraContextConfig: VectaraContextConfig = {} + if (sentencesBefore) vectaraContextConfig.sentencesBefore = sentencesBefore + if (sentencesAfter) vectaraContextConfig.sentencesAfter = sentencesAfter + vectaraFilter.contextConfig = vectaraContextConfig + + const vectorStore = new VectaraStore(vectaraArgs) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k, vectaraFilter) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +const getFileName = (fileBase64: string) => { + let fileNames = [] + if (fileBase64.startsWith('[') && fileBase64.endsWith(']')) { + const files = JSON.parse(fileBase64) + for (const file of files) { + const splitDataURI = file.split(',') + const filename = splitDataURI[splitDataURI.length - 1].split(':')[1] + fileNames.push(filename) + } + return fileNames.join(', ') + } else { + const splitDataURI = fileBase64.split(',') + const filename = splitDataURI[splitDataURI.length - 1].split(':')[1] + return filename + } +} + +module.exports = { nodeClass: Vectara_VectorStores } diff --git a/packages/components/nodes/vectorstores/Vectara/Vectara_Existing.ts b/packages/components/nodes/vectorstores/Vectara/Vectara_Existing.ts index 4448aa5c767..dda6b8dae90 100644 --- a/packages/components/nodes/vectorstores/Vectara/Vectara_Existing.ts +++ b/packages/components/nodes/vectorstores/Vectara/Vectara_Existing.ts @@ -10,6 +10,7 @@ class VectaraExisting_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -24,6 +25,7 @@ class VectaraExisting_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Load existing index from Vectara (i.e: Document has been upserted)' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Vectara/Vectara_Upload.ts b/packages/components/nodes/vectorstores/Vectara/Vectara_Upload.ts index 39104b9d56c..0c1e6ef377a 100644 --- a/packages/components/nodes/vectorstores/Vectara/Vectara_Upload.ts +++ b/packages/components/nodes/vectorstores/Vectara/Vectara_Upload.ts @@ -10,6 +10,7 @@ class VectaraUpload_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -24,6 +25,7 @@ class VectaraUpload_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Upload files to Vectara' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Vectara/Vectara_Upsert.ts b/packages/components/nodes/vectorstores/Vectara/Vectara_Upsert.ts index 376668de1f7..6ce2ffad3ec 100644 --- a/packages/components/nodes/vectorstores/Vectara/Vectara_Upsert.ts +++ b/packages/components/nodes/vectorstores/Vectara/Vectara_Upsert.ts @@ -13,6 +13,7 @@ class VectaraUpsert_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -27,6 +28,7 @@ class VectaraUpsert_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Upsert documents to Vectara' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Weaviate/Weaviate.ts b/packages/components/nodes/vectorstores/Weaviate/Weaviate.ts new file mode 100644 index 00000000000..e54d122b22f --- /dev/null +++ b/packages/components/nodes/vectorstores/Weaviate/Weaviate.ts @@ -0,0 +1,212 @@ +import { flatten } from 'lodash' +import weaviate, { WeaviateClient, ApiKey } from 'weaviate-ts-client' +import { WeaviateLibArgs, WeaviateStore } from 'langchain/vectorstores/weaviate' +import { Document } from 'langchain/document' +import { Embeddings } from 'langchain/embeddings/base' +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +class Weaviate_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Weaviate' + this.name = 'weaviate' + this.version = 1.0 + this.type = 'Weaviate' + this.icon = 'weaviate.png' + this.category = 'Vector Stores' + this.description = 'Upsert or Load data to Weaviate Vector Database' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'NEW' + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + description: 'Only needed when using Weaviate cloud hosted', + optional: true, + credentialNames: ['weaviateApi'] + } + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true, + optional: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Weaviate Scheme', + name: 'weaviateScheme', + type: 'options', + default: 'https', + options: [ + { + label: 'https', + name: 'https' + }, + { + label: 'http', + name: 'http' + } + ] + }, + { + label: 'Weaviate Host', + name: 'weaviateHost', + type: 'string', + placeholder: 'localhost:8080' + }, + { + label: 'Weaviate Index', + name: 'weaviateIndex', + type: 'string', + placeholder: 'Test' + }, + { + label: 'Weaviate Text Key', + name: 'weaviateTextKey', + type: 'string', + placeholder: 'text', + optional: true, + additionalParams: true + }, + { + label: 'Weaviate Metadata Keys', + name: 'weaviateMetadataKeys', + type: 'string', + rows: 4, + placeholder: `["foo"]`, + optional: true, + additionalParams: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Weaviate Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Weaviate Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(WeaviateStore)] + } + ] + } + + //@ts-ignore + vectorStoreMethods = { + async upsert(nodeData: INodeData, options: ICommonObject): Promise { + const weaviateScheme = nodeData.inputs?.weaviateScheme as string + const weaviateHost = nodeData.inputs?.weaviateHost as string + const weaviateIndex = nodeData.inputs?.weaviateIndex as string + const weaviateTextKey = nodeData.inputs?.weaviateTextKey as string + const weaviateMetadataKeys = nodeData.inputs?.weaviateMetadataKeys as string + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const weaviateApiKey = getCredentialParam('weaviateApiKey', credentialData, nodeData) + + const clientConfig: any = { + scheme: weaviateScheme, + host: weaviateHost + } + if (weaviateApiKey) clientConfig.apiKey = new ApiKey(weaviateApiKey) + + const client: WeaviateClient = weaviate.client(clientConfig) + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + finalDocs.push(new Document(flattenDocs[i])) + } + } + + const obj: WeaviateLibArgs = { + client, + indexName: weaviateIndex + } + + if (weaviateTextKey) obj.textKey = weaviateTextKey + if (weaviateMetadataKeys) obj.metadataKeys = JSON.parse(weaviateMetadataKeys.replace(/\s/g, '')) + + try { + await WeaviateStore.fromDocuments(finalDocs, embeddings, obj) + } catch (e) { + throw new Error(e) + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const weaviateScheme = nodeData.inputs?.weaviateScheme as string + const weaviateHost = nodeData.inputs?.weaviateHost as string + const weaviateIndex = nodeData.inputs?.weaviateIndex as string + const weaviateTextKey = nodeData.inputs?.weaviateTextKey as string + const weaviateMetadataKeys = nodeData.inputs?.weaviateMetadataKeys as string + const embeddings = nodeData.inputs?.embeddings as Embeddings + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseFloat(topK) : 4 + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const weaviateApiKey = getCredentialParam('weaviateApiKey', credentialData, nodeData) + + const clientConfig: any = { + scheme: weaviateScheme, + host: weaviateHost + } + if (weaviateApiKey) clientConfig.apiKey = new ApiKey(weaviateApiKey) + + const client: WeaviateClient = weaviate.client(clientConfig) + + const obj: WeaviateLibArgs = { + client, + indexName: weaviateIndex + } + + if (weaviateTextKey) obj.textKey = weaviateTextKey + if (weaviateMetadataKeys) obj.metadataKeys = JSON.parse(weaviateMetadataKeys.replace(/\s/g, '')) + + const vectorStore = await WeaviateStore.fromExistingIndex(embeddings, obj) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: Weaviate_VectorStores } diff --git a/packages/components/nodes/vectorstores/Weaviate_Existing/Weaviate_Existing.ts b/packages/components/nodes/vectorstores/Weaviate/Weaviate_Existing.ts similarity index 99% rename from packages/components/nodes/vectorstores/Weaviate_Existing/Weaviate_Existing.ts rename to packages/components/nodes/vectorstores/Weaviate/Weaviate_Existing.ts index e35a3917117..d11b351f59b 100644 --- a/packages/components/nodes/vectorstores/Weaviate_Existing/Weaviate_Existing.ts +++ b/packages/components/nodes/vectorstores/Weaviate/Weaviate_Existing.ts @@ -12,6 +12,7 @@ class Weaviate_Existing_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -26,6 +27,7 @@ class Weaviate_Existing_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Load existing index from Weaviate (i.e: Document has been upserted)' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Weaviate_Upsert/Weaviate_Upsert.ts b/packages/components/nodes/vectorstores/Weaviate/Weaviate_Upsert.ts similarity index 99% rename from packages/components/nodes/vectorstores/Weaviate_Upsert/Weaviate_Upsert.ts rename to packages/components/nodes/vectorstores/Weaviate/Weaviate_Upsert.ts index 0b7f23937e1..14adfabd781 100644 --- a/packages/components/nodes/vectorstores/Weaviate_Upsert/Weaviate_Upsert.ts +++ b/packages/components/nodes/vectorstores/Weaviate/Weaviate_Upsert.ts @@ -14,6 +14,7 @@ class WeaviateUpsert_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -28,6 +29,7 @@ class WeaviateUpsert_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Upsert documents to Weaviate' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Weaviate_Existing/weaviate.png b/packages/components/nodes/vectorstores/Weaviate/weaviate.png similarity index 100% rename from packages/components/nodes/vectorstores/Weaviate_Existing/weaviate.png rename to packages/components/nodes/vectorstores/Weaviate/weaviate.png diff --git a/packages/components/nodes/vectorstores/Weaviate_Upsert/weaviate.png b/packages/components/nodes/vectorstores/Weaviate_Upsert/weaviate.png deleted file mode 100644 index 25a39e33894ee2702ff1e569cf14971eb2bf45f3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 55728 zcmd2?^;?wR(?;nQP`ag&mQImwM7p~>mL)_=K&(vFbKf)boO3oxLroqRiyR9H2?q!$AiXox2`w>A*OFBIDk zsvnS$s^hR9%ux}4(^@KgQbj`YV?sju8is^)hj{dL4++Ve8wu&a3<*i(ClV5gTV}JS z7~&sj7RvH6NKel{zgmiu5Kqv(6jf!>cTuQ_7zm?v8CMaHUbuRweSR^P2S`U)LQ<6Z z@Y#3qs0EP8APJjH;$;Uq-f+N=#?z1MxyrQ!m zuZqtnq#TTgyfshfg=zPT&>^mO*ad2^j1Hyb7pw{R>4}7%7U6c*o3@UT07Rth_nRx4 zK*^xHgEqAEw*yJQesE_hV!hK!z03|~Jvy$2wDW{fb4;a*DSm~Y-Qk)Pbeo0_Bx!jr z(JYBbcCK_?s>^0M=rCz3i~Z?+Xl{q@PEN>=u~tx~NZ4Ws8dGkno=A1l`Ro@@WLEI)p#Rk-21fRdxvpE9uhR%?VK+d%_WK$3%F6LizKS*fnc09Ff*dW~I|sM`v_^ znc0drDMK+{uH3Had-1S!8(j?uX2-Y(%n;h2MI$Jp&nR$9c3}zJHj+Y?Sk_ zHA_6Si*)(o15O!`xCI|0B@~S=_fy&MYA=OW8%h=OLKHjwX)Xr^W$&6_;@tCc0$;ZW z+=tFt{|$6y1NZuR%fpJEl99%7@K+61w2}&>+#nvwh9tovfHjdX-fjfXZfB6&je3|`w0a!Bu)IW!Zh3PYB*{-w{5>-FBo z=lso#4I`N_bqqubhD)&=%qMW%wPt`NDTm+}(0yrLazouTP|LaH1ayU9O;z>7wEmKcA2wF{%4zBgFbnCjjh^6&K$7KuAMWK6J zi9WC3RYts(C7Q|mF3yS{K|{0H*-7Vs9yvL!G}1G-Ak!*-QTN`fe;3)bzNiEC7|?M{ zGGW5FS#8|Fb<_K&MD!(mjl*LFi7SIQ#;Li+j*rCe;&9K=_O~%pe}QN^FowRzC$x&e!u^dY z6~Lv(=2rn?8zEzpfwBm|2vg%Q)jw0;_fc9`+@x-;lO#p>-OgkvaH;pB;Sx%-k)Jo7 zr8FHP3UtQ{b>7#-PJ;}v*c3!Dty+K3Y_5EUL8>md9%~`G+&PK}HESxeoV=Gj(Uy=` z%7Z+F7@OJWtoMTRbjPm9sYmC^0yUI7L4YtrWnUE!sNrax%wa%myZASTg|H%t7C+#v;@UzKsglCIeypn>Jd` z$tQen6#l-wpuS}s@cKW^c!8YC=wHGu5&dExu<6gjFtCZhw@2ouDNW+H{h}HeAkK^R z!(d1RqPibbJ|`v&`q4peK*qwCxfi-h!{|~L)Blh_1!!S0O3VDqUA`sI{06JiO0OgGIU~U&Zk>DaOc*eoi9`g zUjjGosj-vh&YJSq+JA$u#c>FB@)(CPmVVB8ZYXncEfuYI^4a_Vn?1*2{nzdk9m#>- znsunFQfUusph371|A)MS#YbAg)keMSg^}IZ5JdlMvZWVm32@dKQE{zB0Tn|i8}iSI zVo?bYbr5E5WuE5;^M?YyI-a4n_}>g-=oDRqHe@t z;5GJ5ei4CgHe2yTKZN%6M?cYXhumj5TsV5W7JR!txzwjCw`BF@=LI|v1FXH~}%WhiOR z{Ps0)`yNwf;~pu&%f>#)h2yU}{N%nPL)*lL3ck0E>W`$?mEL3W8DVFS={1AxSxI$e z`|T!UwC7Fjaf+#s*l^VFHF{+CH8-(3>Q*4oCr9>k5vW#nu zfLm$=C;U;A5mGaMcouN+SfkCFv1dPzXlkNw1btRP^A&PkL8cD;L ztvW1Za=<_PuGtEdn}*;2fY&Nb#|lO$3QUTP=p&`o=*UHRETOo?4Go;zWQ?rt`HsU3 zFdJ?9f6qw7gjoGl0+)!vgby9F3JXoX?9X#93tpl5S$gwCb_3|7_fEv{!iaB&N)J0y(JMf2Y_;Ul? zesX0oM5zjfCH#Su3&fVaBScRi53z zk~Os8L#!3FxtcUA|AD{(yGjf|kwZqY6Qkc1pMYxL-y-=J=P*6Jz>paaVPd?GWoyBp zkgxa~5#G`@lPOup5`&rBj<~Rn{>u2pQX@h@48>LF}RUtsN`h9K27$qwDQqE)a z_;R%b4F=OTS&H!~^3?C;mo&4$UE*g)0R3m}I63=;sgAetag3E7DIIGoWTFkc-?z2= zxkZ)yZbd>B0!Fw65m}|e@`C-TnjtY@{!}2yFy1ueS;f)qsMekJ%#Dv~V_;7Zqk<_U z@jApw6}k{$U7)%HI86a!De4QpimD{A>GiRSh#vk3HBay7)XJWcP&d}1kp>QWrz~x8 zWFTz#QN_Z(DfNpU9TE0S$3u#Bl$lG7$$B~`jP-=vsan<@Q0UM2+aJ2Uo%e_B8W?0e zCEl=NY7K=h1TCtVYi7$D8V_=o8~H&HH7A5zI6EOF9x%Va)bFoaC;`-|(6zZT_es8h zdn540A~=9%!~_;9YdUCUXSRDTg!ljWB?kFfmU?Cj zjp$(rh}Q_Hvqxsfwy_|)O$^e)gCmR-&N+7Bezyr!7k8x?G8@XhM^RY;?tUZ&BLx4V z=QmYB!uLKL>XweO%jXTXoi*n^G|xhQeJWL;%nY+u4udBn+QpDvd32&~EB&h|F3cYY z;a+H->Uajh3xaWayjfKUjfh$Nu#By+T5&{+V zJ~yZZ>V$0d*5$~&*OjaCa=^EV98SZcKSQhI|en`xKTnf~C z`?XJPMk3sSg!~BlyoL(+Leud+!E{NH%?PVl@MBxrZvsU?BJd6Ute!1~ z&09{?UN4suf+9EwZyHDbak2(O9Mu(=Dfk-AcG~e0=U*duL|rXZi(oXWsr~YX>Cnh! zM2dBj-Kx{jWQwyvcF`-_avDL{oLFE{E#tG~(1ark)mtE!_kxA2KxnucY@n?9^}Kom zp5#PP6Yuw)B!BKev!7fluRzzB2-{UcEonI-D6I?!P;4Xk`H3+4(I8yEq0=+FF)muQ z1nz~f`LYAUJ27#K?y>dI=oUO2oL@(dn0K4dr_g=kO`t@P-MigKTWYQE0pe&n_4qe` zL4))#vTd@ZUDl-{2<5gM6RK*(b8}e3mym#O4b3{tEL%3&4-9c?8NbNyiXtlSWg(6= z#fJ~>q9^#uhfRWZ;E!ux*KyZXzgbj;rPwYE|9(-lO_uCDZ7Q6Kl|O7$5vSLx3Pf0x zs*+~hqH-`Sow1waCaDS30NFqMf#mJQi`QY z9498zWZ)rbEPOQ57m;*`0BuQy0hBqlP2Owd{+no^hN)k}pLgT8@B7n;hMRL1nN{E} z0xw%0ri0DDJazZ&sX8K%`=*x;j!}=nJ@1vyfKV)}EHX57kFS}VTftuDzH*c}l=_;2 zUn*t-;;o3QdGYLOzfA0|wXUKI*6CLY67AGE69rd8D@P!TA+u?0Q~vA3Ni>-aTsJ=_ z+gH4VZ%?{k^?QF5vff<(u-$D-HK-a7(7icrCi%?yEU(4>v*EI>E_vW;Uxk}n*mi}H z8az9K>z(>)$RBjp?=h^A-Nk0+HiD9YuPNtqo;-WR{w0sp=&}`rjSkr`S?kX(*@4E3 z#F~~NbNz*FNbE6^2&)(3CG;@rsG#-H_jB4ughR?;-8lZzL>R$ep-v@r$9fZO*vjkM2U3N zN!@0Gj~Tk2{Os-v>$B+TFkC>uI3CB+#rG~%&(nvWXXUGAJRlZFy zOt~1YKRLvVXlpUypp}OU(wo7_$LZiyVShD{xdWd^u$5{0u6UeFcW1-61ko3fndgpx z(UWU906wjaZYW9sz_yz!rVRm+68YPI4BhU6sRF)aG6>M!Wp{I_pl=1*+r$Z|vITiI z5%}J*rWFe0=Xt(kUwq|CGZfk^;c1fo{)mspySNw|f#}Z;nIiM^GC)b_A(*G{UWje3 zu$;A_mnyq9Mh+&aS_n@jQ*u%h=N=|ehaKO~Ij<((6e5PJ{pxjJ2mtj{6c`=ulB})f zqxa3Wa|ngG2g+Is$qp3@e11y~`~3Oe@W`dh@YD$*bU@z+q`%cdy?j%p1uE~w*oifc zgZ;bt-uh+piZM#k+ltxrr$a>aH0{i}3-VltQzlD~7z}wZ9^${xtXed>wUXElUff|# zLqKu&#^@n`%eEp7V7ajE#{g@Mtou{Wv+Cad(M?coMRK{3Fr$_J40{TE`-jJ^>feJl4sJ2yvhMJDca+~{96aX zDWJpr&%y)XneRc_t?qxCBizRwqRq@L@YPmUgZyq=Y0vl&FCJTTr#(3P(S)3Q%Ibol z-U4W7prqI8mp=9N0%w}L-*uD_RVCN7ohYcFxZXSZ*3E@=2;hackcw$)&34CHuY!N< z>Hhf+8qrR$x4p}JwYK%ID*$-%Bk;`U1V|Xs2ej5n`-&ilIctBDz8sCxd&1qx);5CdPP@xGR zw(0egWUwAfya7cyBJbni{iIm%vljOY{&Cujj5=*NSohn{F^NP08aVqrKw#MX#s-cZ z(}rY~!~f~X8J-i`qc5=y8l^X*CSBv6Hxae4SMLiG(i}i_Q(bukmB?sU6hgGJda`Y9 zWs|ODw84>(>!Ya)4Z}c7e2GE3hkIq}P#$=K>9< za&^?Bq2ZHPJ+nmeXkGe!z9p#kA{pd|!^#|EbCVADswyunXYOhqWt>kU)>IbuAoe2oxL|PR&FDN^rT?*6yI5@e2Ic|dw>f87)pNCgzEI8pvO_Ue(I{bD$ zU|Xl!4<=PG#G;iExYU4f{^5pN1u8n6X6t%5s0)}&K%_k@0GJy9cd>#QWa1MZ85EW< zQL$dhifO2dJpvDo@37pTz=DtJ7&)xF3F!MFn0;SXF8$UQ8wO6fngC!idQFb4WIC2=a%Auhv}!{ z9ho5?UVywSG+P2kvbD}fv|Oc@cz`Xq%@Y3sKv6Nuj~XQ~+M9BUi7K3?Zws&3pOYT) zvd@X|kZra4Zexj3EDtch@&)*AJF0F_C^w!Zg-u zd(IddvMP@?nJ-jt0T;78%42Hk%K^~;eGt;Xp-IVZ%N8$UW~^B7q63a;?JOhkUFI{o z7xZ)fsKPk{+vZCe`;?=yuj2xOqF8r}YzrR`l{IQMq*`bAj+{?}BSny977@*-14RdF zlJ=nNtInLb8~8{ebZK;Gw&lgM;TMpNvfKm7p^(GdB7Up9{gGM(*4UE6^lwfJ2Q$ zVeQ8n?I;SA2>rAvandQ;>>-=#L0%qeFeLcpSm9yCl1pK4RrO*V(<)ws2)7Q`=^$OC z{XMyq>E&omeMP06izfU;_%W#q|77b(4Z$I%U|NI*Dp`Q z4AeXNUiY;WRQ5eZ{=C3F_3*R?vnjjq$aJ?_YfXUT?pI|%%HJ@pX%%4@VZ3%t=Es^! z2GQlcSI?#Q_32KJkLo9>Cg^~DMuGtoYI*B_(%BH{)IL&|g;u(R4h9IML_M78>z17c&`O0~rD`VNoyc zZVfbWF_~s@_{KGUAv_{hTM6;+lhnzW3*r#_58cjT>f>8_o0+D|VdIlCz+jOT{s#h+ zjL8Fo48u%>hYnv|&Rf%NTiJ((Dyz91E=@rYWign@5`tnGOhNd{wT)-|AHi(b{km!! zX(X+6qix zA7J22l-5WZuQ-G3IyWk3AV$*0X2gD@bWWaKrTqj7Kkqq129FZ7hc~&V0zH08-w|YmV53$< z#byTAvM#8sh?h{(^T4n5eVcC1FIvn<_Zo5)%7BAe$CaJrnm|>`?>}h?BA)j{Juk5I zkFek_c0*LhD!j+qaL(+XlJ)pM5J1(x+23TRI~)4ZcxE43SxwA?bZ~M8>k|;wE>_vd z21qL+2o@MiUw>H7^aE(cGMX}+P=H-`#WVGCz~6y)y?eiFu1S!VnT_u30|U7@A6rWQ zj+K=}R9U@=xOz=Ga3uwx;fVM zkCcwqUGHquK85ecSajD~OidZ>4C*!4e5V6BDCH$*&B{ry8}ljW+~Ov3UL+sw) zSYLM8lXe4O`p(kFsWJ7rAZAX>iGpaNZ`;i_o4j9e7Rd)4Jn9B93;j2CZC&=fBHyx| zf1x11Nv!vW`o(|I&sztT-R0zoT3Vt#n$4o9N`Rg-rre@dPgA25!wvFiS@ zpThI~C~l$OMzOkaANw^5c2MfcC4li1XnTd~D;^E^xJ>{1S!d6(_@v~2mhN7tm5vfm zP8h-Bqdn?p&d$EfCU2LzS`;l$+`H)6n6tlL&RqcZ#(xYPb6kn(0eQ5XA*`qETt51Pm_T+}8|3sw2z?I^49If{p&(`*-56U8v$3z?yv%-W0g-&qC;Og30r*B4pTY-F&>6(6?*B;UIb@ zrQQGHXY#C0TFK7YbANeB_ks(y3dsg!@V+VS(P^kqg)a-r%?#YOdhxwypAroo^Ec*4 zv-aCmKIy-Y)SyCt1 zr+DN(XE`@2YBoF9w>Ps< z_d;7XbNcmfon9+#$nW$45v{m3P_uc<;BXY|-MLB6U-AOjSRn^>e?HO(#7NrBca3S# zytQ`Sc>k_8Qt@MBU~c`=on8fOw#U%(eLGHg;*F<4qlJM>fu{PF!Y6`YwC10BJ0SPP z#a(;b?;qCv$urrM5vIc4f8KCEwzSZ{zS}AB9JvEn1G`&@a5R52eF5$(XMO98yIL-$ z9*NMQIW3+Ui;q7%=GVWQymL%inpd{ZTHG(O57nnPwfyl{0ufw6K1QI=U@oEx$O31r zNc4AkB^tKCaao5(=7F^ub)2ml>JRYA$BS0h1wI3cktU5V`vylf_hm@9#_OfRcqx~{ z4bNIjiyurHoP4KStzISvwad9js#m<%aLh>=**JR4L@mtMAY65d-CJ7iV)*;&Pn@Ua zQjzu2Cz|J-s1Ff|k?vWcVqTtsEf7KML;?~W(~j9X^&d=U;RE)m#52kmt0o+7?v;{r8B|K+M?cejWK|T< z3LEZYp^yzp+1Bjq9MPbRa#gnX13`+JN-f&yB&)sP5U@a-%!*&ue}3FyEk7h2st|)! zmFVY*KiCC|kh};Y+^adrC>?FEWX;bE2XVo@Z6$s*HOj=j6hF(4!YiWysxef0)Rr>G zw2ei4Sh<_u{m9@dNf7tkeuCV-sDG|cs8vTC3PleWFC7@lhzI^UA8~n+ht*(gRg( z{0fPbm(Xc0c@T+zC6@16z=x_B%9nKU0(f0M!<83{PnJ2N+2r+FZbP>qrks;%;s=FX zAc33dmNRb#l^zbH(JHE#j>J=YKRK9hky zZ;Aw&ovV$0gi;-49oXsH9QRu&W`tM{I8M(_AAGK_o_!IX;Jc#x&Oa7qw=Pa)Xf~ci zDg>K({M%Kl6&KV5=lOkVcdcl*#O)!ap8}!~$Z(&X?e==>R4cpU)0gc?_}2R-A}a=V zAhM#dfP1PElpiy26zXs8(Yx}U;W;j84vf!=VGTKp zk%FH4ogjp6tsdO>--(D>Y3o?};6I%Iv4#Okd4e5gV55tXQA3BwTwO zpG!QqHN?@6%Uf7Tpfe_iSWCnZbZK{SHr=I+11lxz#$+74%dzwum+@eQh^YYZ%pYmZ z-ysmu`2$|In0770%K=9v=#L`8l!Tj#%Vumj?}Xu-DqSWPWf3~C8@_O|a4~`s^ z?e71S^Jipk*?lJ}xt-VQ7*f!7Mj8PFlNHpX!@po z#)wW>E$b|;;u0>zF} zGsEZ*18X1!Y9SLmu6gWnB&h2S#f3Z0mRli4jOSi4(CZ#&r|`!-pVH*+ zxpoEBPbuZWfz(Sb&YsX`GUl!(tRYL;qm+=e?1uZLonKUz2fm<)kAbs#YE>wM-)T5R z4wOI!3JIQE7;5;LJG2+>s-7tm66QjimZ#t7EH-lg#k1T(a1W}?1apb0{pikM^7G4Q zR#aAq6Me__Vw#4JIT1C&y*=IJ(`8+z%Fg=}O(n+^dBON;gbdVh>*RqROF2NMKHWk5QC%La92pg^{F&R-pDg`uVKdCbX*uebWa}7H~0~pk97Ss77lCLAsDXk zioAQu_O+gjN2XM4ZTa?sFJ=@5>P?sjesfi7dw~ai#J-uEprXXU5J$#Vcsz($JK|UW z;QQc2e}|&}JIu&o@M4{j2_Eav54NUY&k^#I+V@DGw5;2$ikykOD*lhq4apMw>|$Vm&N1m&rlebwzsqLOv*X6P(;bc5 z%w`*d^eZT(tDUe9xi&{?X+2IoPCSMW$+i;6hV<+SlZSehbm5jUTVwI%f_3XE)|K3x zBG@G2`U}u~101R6qh)+(W>_C6*qAZC^w<9&ixNrV%LTERMVEbm>p@}lIpsUQps=U! zylLDqz8YeE7G7dsn4Il=0!|cOk$oFgq*kWb-Kk}_G(+0ad1)cZ`-I7j)EFxs>Z*77 z$_bH{r8zM&68x4-1#vpWL1$sjOXIw;Q7}0Sg9ZxNeD`;AUxE=vhsAuRoy0(K@4^(i zVsPzJcWJJ^#}PdocaEUJT7YD48mv94aOnl!viQG7q^}W`=mzQ5_v1^@TMok0hBRXa zD#*t_L{_V>YMeKOU=Oih8iKfQDSyyp7z;-)q;#1b`B46R5_&uy?+N&4M%%N{y2CnO zbwIaO4*OUzAa?$0Kl|#JD8B|b$BzPHq3Zn<2@Wj8fa*eTx1xjro@g*UG;YjD(-1$& z_G6Z;4nRK;&vrcJs$H{TPc*el>+&S+yL5Np)r{I!wT1uy-@Ka`&Di800|hM`@v zsK7Z}QT`g|S=?qosEiDBL?e@_l>*HSb^EsJc-BvMW=AYT&AyB6!+%mCQf}Ntp9;dp zTf3iv#r6a0@PCV&80GtI`GD9U_!1CwU>r~ZM8O{o zdilin$cDJhYv-t}vV4`=m~7&cFx>|%*#qu=e<0=U%^w(=h$oM*61}5(vz&T{A=%}n zNpl*@ED0YyS2N&zSaB!@=?jN|Qr-$R+^BM+J@n0|JRs=Z`%%y+)N@uM93wF9tL=jT zFE64vqPI{KaHZc}ZCzH?#ejeTPb7+rN^wM$j{56cv7%XnD$`TXvw)?cPYc*PB~*hZ z0B~UZ5K;RS@#c~7VRd*_PQ**Ik{UR(UCoBoFYKCh`&UJ7drKpRszRuO!a|Wo;u9`t zC&JIKL*xxX-I>abs2IuqW{((fa*E64*Bi{I{T*|a5t`nA=Q-_damE8&3vT?Q`sc4R zmBU(@HPE^K0$g1i699ik!-u{b9kZRb0+vJ}L96_r)bC*B zDvC5}h-4{^lBw38J}z-F+1I$(z~O6aOxMq-FSl2SzUe*|DB3eUNP3d)hffL(4WjX9 zYP0H^@jdEYrRQA@?>`@XFS26&3D=bRLaaes{kPwa-{(k=?f2GW6{YDTB%gh>+0nF< zjnl1)4^r@?xBogGxZXa8do)N8V7s%U_nOSzk}aNLvxA44au<{Ho$qPS#aWQ-i0an1 zt1H$vCi{#lx>^QlS)#JobPLA51uv+dbclw-nJO=q#W?hfRO7*lKcKRzsw#ONrrgSr%C81-jc{KO7(!Y z`0J3!pz;B*{6Cm4VmP(`_^K8_+A^g0Zos z@7l^n0`ZD&_3|VqBoW{Rhj}sxG=0D2MpC67i+Ex8ox*Mj z&HT4=HD@$(v2N_f#bEJi{;m4PTTAc_(Wu{BY925CgjFrOIt$E&?vU{$fk^{O7>`Wi}*S`i0(U1v+=M!JxK5{_D692Bf{SPFo0 zC^-2h@2qNFj#cTalqjm#_pT4hhJ<@-GUvl*%3AQBk2ZXx;Rdk4XhA;nV7NM`qG_Wz-piY9C z_j3}7BX0VBM7>|lRkQV)_*Q$908zn{Jrq=xa##1v_0;{c2L^9z0d)^yXD zXFGe0il9hLH}KKzFj~G#w}{HH$&XLggc?OUyj64xt1H&+b-TatuLc|y-)7Gbi>27*Df1UoQFHi@i zeful%RT*UU3ag`hGpHM;e*+k`9_?$LvKBPM{+k?SoPtGUm;mQ5!a6* z6@}~7MBb7Vh>UIjOf7l&`nEdy#Sp{G?0&U}m!+T8c%lAw?VC`*#JlB%L1|pbn!7EP z@t3}{u_b$H!qu}yd2>!i)qnfFq^&&vBp#*`a(3btPd|xyPfnCofB-g)Dj4A(b?1IjKoyU|=Xs%8mbm#3!Z!!|8MkxA ztD!WU5B?j%M2e;;!qF$CzoS++P`(1TxY*RecRfAHV|jJ%OhLE&9DXWxW{2f`RB01C zT0HDLM^;{~=hlm(um*@iZaMzNQ)gfHhPj)CX>iPp>>aPuYflL{G-7wtBV%vXZXZc# zfxK(32>K&P2=4T#MPVAp{>?F@;mbnSnWDHMyG*;0xC(#!NKRg_Idc?v-n2w z%NQ|)ZM;Jh2^;-1@h@K!c;!Wr#_5=~e{9!sBJ5xk)w~}q*yE=p zjf6YNlUj5Z*ZLYlyMhOL2?DlmE^pE{=gxGqYU3gU^!L2Ed^;Kj83MOHzyNUSyw|D$ zAn5WZjC6Ub%$UFhkDS(&mDdmLjPskVx1u*~S-yNHnL#uH<)}LJd8A7(F=?~B`tm_x z8VaWGJwWf`n(p9zJvtN${20xE(zu4!C(%|o&FZQ-oDXzn*DZcCvM^NlTp+DrOeKc5cHQOX_WAlvXmiqaX9a)4wZTS<-mA|k%wnO2YAu-WGVf~p>2E-I%w7Zd(q+B6?#Ef28x(zF-cM*uM+cdpmdKZWjMiE7)jwkbz7RgKW&uH?xF#& zGtMhoL9sivIaZofEJ?u8GB%~gu?y!GM}z|<1`}iJQEp$fcdE-tWoN+<#NJVV=8hFL z*|#6gmCAumE*h@*bX`Q@KnX0iN<$g}@6K{&mmSkAf5~2QKweKiEXX0hJ5q9^FD&vs zdDr~BI>@QCb+Y86)UQiQ4cC$?f{#0-(#!Fsm#K~iNf;kllbi;mn#i=ZUTG0`w+rch zSJmrVa@=Z8_$O>i3s=!yp#dHMZvk>RwIJ(w2e-!-W;wa`l8&gG0+Ro+~0~ zYh_j_ec>gb(PCIp{UsB?^y=MQ&vHqs{yo8hq1Q3HC*uSE&RI`lNbcFOQHJ^d)O zYL-lfq*&xEjU*L4cYMhf~$#C41705~*oxQ>1-P%?L#tKq;*blzdtc;paN-hX~#VY*Y3 zsY|Bv0ddiDcB>EZ4Tq=4<+k-7=deDZ)>*pw63is25ZRJp%d&KDg$B7t!u&0^a$L*K zh~&TVtx3Q66wN+03+%@c$eh0kR(p|31=@C)(@?khnYFyw{*fBCw3c3_b|e35Z&t=W zybJ35MzSd^vs12#P4-cVw}$tO!|xGak5J~ zmp+L)lEedYEOkkcsY(^GzEEVUn#z93$>mzBRSb&cE=&bx=RWZ<-(P7@l~X}+6mdp9 zrq$bHZUfECgdoAM?78J35M65uMvmKmC;3M3X5CPSys0pIB}#7VSBm*%rHRXK=o#a6 zGOk|_KV=^>bTx>j&AgB%luUnOS_K@%xeErdqv`Oy?>ZYmHLa8?==uFV%5mhPTfEwx zZ!6h|r()*A7?Dx}3tqE^nluYx^{7uUT5O0yD}BFYv_e2{OSu#A+!jfL?8JgW(cl~A zx0Q_!hfXZHA2QvLkM&MM9>S5U$N8o4*4eN~;NR*{1KW5}3{+kbR)Xa zFz&ePCNs+=Ywqn6CAfe6X~n#=Kr$d2M}ysZvP5?hjoba5kQq7aReDr9w)wl@%CRqK zV0_6qN#I_%FSN*%nK$QK3Bxbwfp5gFLEI6!b1*`Hdd;eZu^_$#AeptI zkg=}kc`|6J;YYuJO2f)``V1rRtE8xJbHhX`TjGU?`+I=22u}`OojaT-*vMa<9oSfr0oCHY&(Xa4{Zmrr*wjdj9Wcrh*V2i-S7| zcj6+(me6*`@5>A34Mi*~e(&A{ui#Stqj&CVE;vbgzJ1Bq*&cx`{iF$Lg?w1p#Kw%o zmmni_rsycQH&WBRKTPk{oCa370=a;C$}T@)9~B2 zzBSs9T9bZ1GW2BagiF5f$&*GhM-S+A|K?$RBS5!~CDc<=s*QF2z>g2sah;1j43s(ik$FiB+-geCH@ctMRInruJo6E zhScN7OgqXh+z4o7M&Dyd+u4I86Mar_1;bi}kMZ6M<_#0m*YX?2HvWuB<27Rae-y=l zM={P}-AyN4%J<7eg&|Ska&hsDN%yXG(;Zh_Z`htpc2sa5?85>!;_OtB?pCplT_jtJ zT8U+&1~Z%X)}ogz;zA_xr%GRBy?zo}QQ#<#swb)X2=@Y@S}moUN6d!sfhfaWaq#9n z@z{n|#Lai90JeeybxZ`GGe z-bdGcqMOiJ>;)E*cE^Aam_?ye~9%qN#hj$wId=9&EYTJZ-pAe)6@ z%Rq5Kja-`dUE+DS!-op8a!|}<^dG!G`AhxB^%lwBF@L5X9ZnWBGOOL_XHJ*uy^mKR zdK>`#RMx=NzZv^kMJA9ur2vdVd}k?hZ-pccy>m}s<0IVH5+_IO-}Oxk!v8o!Ph6A0 zE3Qi_5Qz)$^7gw}i*?G9vrKxw|Mf3lRY=!>h>|=67=FHZ?Uu96I_6WO(fjv^`C)1v z{g#>O2`|UwM!st1-PI&^^HUF^=+|l^dDu|B|T>lLx4$ zvc~DN?boPAb$TMvRr?yw3Zz=eR9RX0M3S+p|3#&w%d}32DCdcBcKg8!UQfEskP=Px z&xJEqxGxvy932s{%b%@|T(1G}+rGB_3o*Qp?Jpg7-kR-c$cTI|o4GH6xEz5^mnSXF z9HG;pYzt(d`X{n}remUd_jmOa{I+uUss4gGv+0H2I+=;fHq5ubL#-e&Sf%LCchs?u z&Bazw?lzs0dpUD8%{WBs^t==j_q`gu>sudTj4&EeTAFs~eslq|K1-r!~ zg%e3&9{q`BEO5aa_zlfDG23kl{U57Kbj)2fqjguI1fDJ@spBr!eM(ZTFcVsC@}StF z(Dzlbid*GOYD_&sbWNJMA_Mm+vWQloS4t`=`C=Fb``6b&R zeD^oQ-D@AC&G}&J=4M#ZqQKk#-cBq;Wt{mFMxPAxfR!QKHGTQK`e??0FU3}@EY-o9 zHt^WgTca?QSc#vSaT1>aaoV@N2cEF572@Y38=;8Y(=P23>e+47!7QMs!8U&6t>D035kcH8X0 z^DGxTTX=TyyDqpBvx2n>3z+K3cPsQ^5xt`0TRrYqT#Px>k0rihuk$C_hf#~(E_LlR z*Ipd5Z4Zp!-l%^z&Q1lS?@2$doFP$sh9pVc{b5sfE|gY)zW6@?%Rn^0K=lo!bfnE7 zU_{_!eW~ZuF#;crjl`~4CQHo`3qX(frrNOjQt+I39mG{2Mdie8xAFCmFd9vJqcP(^ zd)0_{PAA(Q)tfUVrFC{COLG1JB>`+}v>%kT)7^&yVjWf_)N$3E_M{NLQ2{9R+aaqHrH zv2G2}6gxp==3Js_3ONf;*s&zJv-XA~{rYkh<>m=f?f6ulP?25LL?lb_lGTu$+>Xob z)|JT96QwmrHWNu~)>46B6A)~!$xO_%U1Ode9#W-$4}YZXAc*JK5^UOegp;gRe79>@ z7u{xGaqbYNIo3Fe51#%L{NCsr`P+8f+>=DU8wjektp}%>i(Oar4WSg2_r+GrM0F&h zF0vP+B;6>PaVW4CHmmmIGwj&n5&pIle}+Ff{%gP>BF}nC0f zs;OT5PDc8|_4A2N?wsqZ%k{K3?z7dEm$Xldz^KgABUFUGB~D0kY)VaBBaKs^$-mkc zyA?YxrDDQ0;!HUY(9IbDpv6_`LO*->!LasG)>)+i2HH}d_yjG{24%guP+oExDW-g& zc?Vv1=*9e5LodK9H@~(g$$S>NCE`p~(wT14+7pUJwb5x53A>8C7exGfVYR|rd0pDa zy@0W$Bm93(e+Jhdei7I}7`8F&jEtR?T)Lr|K5aBUxSD{8(gansmq0$PuuPN1NC`RD z*`@icH6GWLXIWlWRurp?O7c0Y0@=n0`m`z+~ zFI1Wb%gIbP1Btxx@f1<)d}Z{axVK)_m&Y4N@vo=;5wAV?V!mtW`FPcq*YypV?-~y? zcYGg|$aA7{TuFc84?-PLO>`PXqOKzE2KI#!OCez$`z%jcw&m5akMle<>x2&;`+NTC z2VRU-YX$%|sFla5k4YX&mXY2F9d9XQ`9v#DsypdBEG}}Qs(8NAgD-i)qgjNYTbIU2 zc5mwNZ zaMg9E_7t5OWw4{JqZRrKVG!%0zJ2H@z*G5pKv58{^&zJ&kw_CLX6 z);*!C$-K$)x=fF@wXKIbTHBZGtn*<@C>Oww^qu@K#%{vD%>5Gv*07lTpwm#@ zu~f}U`b97-hx3s4=?6FuXBG}m$PMq9&xz#Gq> zJdD}&rig4+5U~hDrjsJ&A)xsHn6iImS7OHI-Y6bUcpW2BEldXG^g=);CY}gGT0$JF z@ri9>qpp~@&|SwG$MB}3zkzq0_%r@HXa8vzKV1~?K*9?r)m6x5a;^f?rq%vIaU?$a zKZA6ZCu=P?ar9!@R@BC$XG>tMY{K+;Jp6_Q+V+|AWTl`s*BK-8$6^IbDi=2!+ZI)qt63dJs@;QM?#QvdC77Ir8_l=SkxN9g&F1 zE>k+EoJFd55mP4xs*pVnecR6Um=acD=H+LRfN)Mf)FgGa1$<=|DT2r)ZxX2x;>Z#*=kp;NT1xTRDn< zKJ`y{&FD+`bIyD|ez8l)yl#a`)s?B4*ra#0_2djIlf2XWtllWHiwpI{n^fEz3D%q} z{Ki90FLXbW_kva{3SXhIHK;lM?$|%_e>}bq|FZOVKuF+Y4L0EiqAM~zrOHp7B!2PF z*(*JpqVOX0R3gdMv^9=ZIq9QMZH58QPs_yUzA@ccs>m#3K|ngO(_p_C4#gmM}r9ooa9(tGcGs^v9Ya3-qDGakMmAuEwsX?1v+TFF}T}W8g zwXQ417LV{h9s3)6c=m%>w}FF)A7je&X17^H zC$`IFMH5-uHfxhrNZw<7&87YYTx%llb~wB^#=kIiD_(Z+njmB(fX?PliGk_}7jOH| zHDo&IGp0VH%F|SClHtKka|*MJv8&vFAKHY$NyK|6r5#L_vs5{CU6>%%NLv%&QU7Ll zj*ior>CBaIx{2)5C)NoJmRXd~oJ3ltKFNf(^E7)gxx~u3kU!%Pvx?3rZ!^_&tP{0f z)gC7PmFVeLG2tJwT3*GQ; zgwWQDYh~lu?p`3lx!YZXHoLaFj`-H>xA~qUe~u5%?FCjbG{a4H0?CN{=8EgyLWdl; zgm{bz!pv2kca1C4l+0(P)tr~Tmt1k4tA6Pm;F<}I9xQGMow05=tjeFCK3ftAe01%& zaBWSitOIwj%er<(1U^A`)n#PWBkC7fv(7uQ-gQ;uJjt3hGeoAEWF*FPs}psm{eKA= zek}GW1LuNYUXu05KL4?mWB8|2AH>z8*YLM&e+!qmbxkS&WLBSkME&+bzmc@8_?!8A!p z=OpP$62N=&t+OD?zQO>x7RhrN!CMfdZOjaa+K`2puLrDxeUfZ}igQ^x$ zT{SO$rkhhq37uCeRL7a2tzyL@@l{;;+Qii>ihhdQ1s`o?nfJUHP$o*Rn}`X82ThPC zaYAGp&LQgX)gKyed-uf|8H?YuVUz)Ctuo0>3VT3?K(u0D-J4CN(Jd8 zG@@xX7jyYQoh?#ps*!vcSK3DT6OwaQyWFr&D|JlF^s(=bd>KURM_~?}=2G=|Ok3 zCGt)v5|u=!kzq<;AkDCbvwVk-(}Q4(;|b@yK(n6$y%Nb2s1}hi~Cm z-G2=R)(ogaMz-4?K=5$d$VGB=Tqre=1rv}x{q00zEbk62(Nsia|Dtf%Xlc*XD{i1U z5&AbtVv`YNvgIyJ`Da=ctgCIAovxO#2_jS5DjoFk&*nNJWo!VE%R76T#L~})!&I@_ zqpw*;!@|{~cItSXnxqFlIhkN8M|})OR*vBxPVL1DM_hsGZ^ByEAMz&RW-g7#Yoa{iJcS6OUc#{EGHzL~$diT=FdsS`r(C#{8WPY6g>ERpj zOGke*xRV9)8`FTxM1vc=O$D!C>dK2^mc|92V)i6KkP8@iz`0QOP+}aD=4i;8dV8cQ z?1-)!SzM=;l1}d)Hyx8)X`P+642I@*Uu9dRrjU3))AVtm8RvS#L??A70f~l={@T=Co z2*c~QSL2y?NYFFnT`(Z>PH;svAA9OTBHIh(-L~ER8dazp@Yd0P;m>~Ho5;^j=Gj)gF*;vq4M`Q_Jf=zzobhe1 z9y&+lov5+sH)!wUOZtzwRF5EZo(@swh_NNnGZ|`!5+ZCa*^7C`GV1)5xYrdkiw4S!+S$BStD{i6eM4?NInKN~vTbGUI7?>)H}&l!0+ zf8Cj{!^^im$2*x%@PV|1riZI?5_ORyk> z;7Lzp9r3p_<1=ATAa-8humIDnB&1ye$ZNFxMr6=&ZG}%zC z=WuZp#;Po=BBinGXh|tv)i4X4Dkt$FwAiFpvJvONsz%GMb_f$f!E+zTXLus-_}F9V z<+YE0d*$2s#j)SU%_sNrH*UG!8eBhN}s~IpbfN&RQv{awidB>Dvf(sgS z@kECRD7@*3_4`f{qKh=n7g%CUsaMr>UU?l9b!oE^CTtLtr50)2DBcN3=c7uQja{`( z@-?>gWEX`X&#;Zf>(`M{RYhVKL|zqAWWa>;reh=ZB8OW_#e7B03ZSl&s&0txJ)JCs zDXT8sr()dqv^H_|@$eptuQ-VLBY8n%vv##4NXFsiW4QU`EqK<*EBMcCxEjB<{S|(_ z@@0{Cm$LHONr}k&PAC$UhnNKFRjgW9WUjEs*9nJP}wIIKzW ziL0oybD8j@8K&z_^c7B&r5lH;0y2oJFlLSVpT+CN5|%B2-s)y+T19=baAO)PZLYQX z>J?Y!6#FZ&s35K=sD5{}H0t3IN^Hc*8VjB~2Um{cZBy^Z&pi0c{4clv9-esS<<4YY zo@5(!K^50coADnlQvFnHEJRewbzD8>_fM-4X?*6L_2K zl*BUQP%9<#Wb+KQI7YJx#l_Dgm??#`tczj5_cZ$qGMmhj2a?|Da0QAdYZOZy{NS7g zAsvkb?CnF+KIe3Cm$2exxWA&KZx`L~tUcceTxE=_CL&x&DsRS4GWqyIS!Z$0!x<_*1iS5C zniF*+??sW|qBj>4dC!N@#iRTa6Q9G=AAA|u8G`_qHQ*dz18PYMQ(s^G#!v;gCWVGS z#yy@2V~K32&nbUuk0sj$HP_0F03whW@7wk?MLIPuIFOW1DDqD*Q}yOWZYj=B%C^E2 zS$^_hH%3-bnUl{n!&D7W^2@lYjjyqD5dq6uEs(1mA8=eq*ttWJD-lT!udNy4BwO^b zvyK-X;?)=Pme(hkl)H4@qRf)_?!f9MJ~a19+_v-$ymN9d|Npl7(aRUh;Jqt?m$b(xM6HKt>CIx z53h~RNHs|_$zIecFg;o2w%0h8fGKT6a>#Y|S#1^wdFCWduu-oA=%=09`Xufj%}RpO ziLLKco?_U!JJT;fa-m78TRUq8BsZ)Ia-*2Ho~cWYe^`oZ#w*9c#!v??1wNg1hhiZ&P7h$enY27N@DaxB>JKjJxh|3LGIF`m?X}rSL^Ly?JZW@ z>xhrR+Jr@61~#$cRiR7__A{i`Ma(0m^G)j?Bgh*02UGN;Osu1}#S3Plr<`{(XYrrf zx+(rd=c}kG%sLn=rAw!cxv@$gbcukKVT$nNCs`H;^FxYAH~B zawtx?4mOVC&sW}stM30zzWeN7!&A|6Pv*wqQ&}m~oJ(r}Gd}1Kn!TV2s+Tok5i{WWQ(B}vfHHd^QDoYePi^{B zrDszNcJ(Ka)5}v1Z0X)ott#k-RP6p#r-}LJnd<&dmiJwDH(q??xACT<@8-X@@#itT zetX*7I^}?YjiGJb-c>~42}Pk$ISr9u_j()G-QmGvvxI58q*!FVyB#+j{1E?%d#}a7 z8BJi2DOgbFkJ#QziQvc6OHkygy56z}Jx!LPEKbZjQKsfbG<6DF4v4tdSR7a;Y*;G- zi6`Z*1?V{XFHk(SBZo z0VFJXByXd5Axy*6wbKOATvJ~zju&an$DSdI0nApfVBg%Q@V`s9W0Edd<(@bHJK9VGRS0Bz3WJVb7|!(tarEK!xLZNzj^Sl@s5RC zFmMKta0|8xE@v%>;U3kp7uCyKCa<#Aq4VPBGU-9b_n*$xHOM@t+jy8vt0K4d1TMJ>dU9RkOB?#Oc5Jz7oQJc#g#KN+KH_&HVzYU9 z7J&|<7eX9=ilCWD8${_VU)K69&8g0T(Us$P%fh?x zqI=)O-?Z(QFfskbLRN}Xles=>NH#<8j3S3c=_J0WaEEodIc=8m%BZ(#-O20j53{X7 zd~xD4`1{6}fI&^;#TUBt8T-hg79Q30qk>>0KMbWQQ-jDWk9hp)m}Et{wyvm`ssXX% zZ9Q?SCL{~SdIn`%wS6TuS0{0hRT@l7)C_7P7r-2Z_y#fX3(?TqEPALkTwLhDh$Cms z`fVnxiVeDn)6zKN>T_8UWjbxr9TWytRvToedbDaM!2Yj90+3Ky9j7hgxz#kq$o3yNV9=;S^}&22=A2H zsZrdspUTOV-gjxUv=_cE5YN;)XwK_8MKuORGM|tr`DjZ@6P1lZydPDQw3JN_`thLX z&kWsq>|H2cZY~{2RKL@&kvc}zUZ%LtEchLt$LKa}sQd9?<2YI?r*OsGEauJtE?Jw> zM-`KI!zy&r_REWnP+Ju5iFdd`D!-}2bQU+LDS5BMia{NvKHZMfE5MKEfyFj2Zy}2C zt^iA(m0h|LD`!TYFyCN_dr`fN->zX6$%~E!EP`{B#6>pNjKCKHfxyS>;-Jgd-SoMe zl+w>kbBvp3p-vPxaBYIi(8}Iy%`FV{1z29`lty(<$P;dzLENX1yoaN9G6qFxdCN+7 z_V_dbEVO_J7J;#4VC$M8T)t-3>Rn&A)nX#>ggT&D5}&minoOEb&i zsAv*Qn^mj9Vk;o;r&r>~wc2QL6RJ}360Tt}fHUz6=z@*(HdPHkmA5Qy$wl5AEAMWL zc_4)ln9}itxvC3V?h+U^OUx8Acgj$0C^UgHA!Zqe9abSqA@GV!EcXd(p zAL7JE(96pcNZn=Tib~3gdi99XxY;e0U(rH?JnmV_I1nBQ$vnBxEy6v?+AA_V**2}L zsnxz|lRwTp@msAo4`ZY0_$A;6GYw#gks~wTS;b=KMc|v zoG600o7TGw4|ZjmcOhZFJ=Ydnz_H~(KBGd5GqhAPmgvSvleP#;+AI}9Km zi6up2tKtb<;f<3g0tuon)?IO2Mc@^a5m87I7yqV>rntNZh|6(09!U)iGZ?5!*^27` zm!s0~B!x7qgxru$Ytz%%)HSO-LmQW}<$FRkEE8oL&KMFB51zEoT%Ocu>J#!BMMmOG zJW4OsAvTGYHe#rxDh!?bo*`ZjnSIOBBzfK(W9sTY<;t5tA=?r>`Ks;^e~v8!ry9U{ z3W-hXB_Z-oAVjD<#AGh7xAK6+>AdofoF0FhiH@v$t+x|_&ok+%$b-=< zSZo3J%q68WDsb?2NJ;sHmg4<0Ao8IDE^8dbd6|NFXzk?}e3p$tp4!T0dEe8Rou$ax zpbE+323~HaVY{;ILz$gM3lg16lH1Py)f8rJ(??;wQ~N<`H`9=KfsooMO(ZXOY`Cgs zK9+k4w5NE=n6_(yTOAC_(T>{`&$`N99M7`XUS$&_`r#PiiaZft36u$ZyWAE}e@-m6 zfCt-p`O7x|n^&i`BfEQ4cFyFzKE}McP=Q8tuLK!dE6$ zNsM&oQ(z)H;VJ1;__I1c4dIlDa8$PDj>+-_D_1pK#lwajC`%%goyC$>7C#`Sn7ioY zAc^l`$sAET4yF3*#!jkto@eBzSyqz0*n$?FtLiGLEOS{VdkHs>O1=x5)EO=q8j(;`^Pv z+msJfOx}fr{rpTbkp<#WNK zo*?*Q|3I3|@-{J7NyK=pTCqIa^|O74EHq~Eo#{X?fAP9RYtli|8A2bSP!gZzjLNc0 z{>VI>n#jb)mb`0|k!s{&p}7cbasyv#_bMb!*)U=K-Lg)vW3?ud6I)4?#`_PR zJROZMiwl$>@X};A*3J~UmvW-RGR})x$Co8K)e<*qD#8NDZ%P>TARa+|T<&UcXm2Nr>&%K?!;_l$TKYWp_*P->!ne&-Ro6UCDF z9wzTLFMX?eR7KwDK?b0sd!I7Uy{93$u{HXU?nUSF0pVvZQZL#l$d{6zaYd30?>o?O zCKka_naxR!j)rBJiH8+gB-I;9;kry(5_<`A&R;SwKhqI!4LA&fy`<^<4K^SsH5ICr z8Y!JlI-)8olfWzbNh`MD#aE`V~& zgpP~~vJ(?Ukaipy){l93P{|W%@m|ng3q|Eq6<{nqO(tX1oR8!;@-b;*s+qGY>n}@X zfP_9jdVa|h9dQLjSiOrPUNS*I!-aF2s-*Wx2W$8&?wSca@)xgz*?nu-)dWJfp*Rwc z>>EVqOi4|fak3~4oQ-70s2<#*t_B4VdAG;t0&vh1c@M$GhzzlpE7QEB6(o-j^^5hd;Y=~|P|J4A@LtkfpH#xEnxtfnAL+1B*BB&HO)8DMJ$z0u&3zN! zb0aPa$k$(9V6-LhAr_m#KBZ$z0-3kThE|*jJ%(zc(dm4n@Sqr{zO? zEVh8}&A98{)4`C8rOx7SGaX5nH5Zv~tc>HjNkZa)$(vqc>RhN}Ue0Jm;8ny&rPH^_ zT$Bl9*+#@r*o>&!;F&gQV3`MU^r=qwtpiWbNBl5>a+qkm>*78O?N>?V$1Vp)?ZjGV?Ye(AGDlp$FVnI#ZQw#<&mY zAr@h=6?or|FBexv+>vge7ml4y5ovR)6Deh=@^CUt8Q2y~$(6uIQUenLY|{5ObrA@> zx5ni)Q;jv(nSjXFRG#m2qNY!#nqc^TGUa_>rgVpK*(w7ac{iz;&3Tb2YwBa7@3(Gz zXctawS#&g)<4kj6p#^+r1~_+3;FZtP%hUUhPz8z{cZHL96ECuHl7c<&re1%`=Hv?% znNzZ3Ti0I4=mKzXA^5upu|b3uhcc91nnXrP;CYAVX!)YWjggoc+Ps{ZCJ$ZN76jfz z&nuT7RY{U4MY~87LSBuxJUngKA2I#AEFH=4;-`A+9s4hW3BaK2m>YzqayZqLaxS>VxGf}MAZsj)~gnkq6mDs(*0So}VmE0p@@Dj}9I6JlkoNjc;JjrGV z{en_7@irvZl=K(nefBN&v7T%g7}-uX=phD7HGsQkg1-wT?-3NbY%U#-y=!@TKDso< z8ZKSlHtm7p2FcNzHc}U->Kh7yc~%;M^~4B}Ov*AU#Io}D3?>o9#K*@JgCfB#H8Lxv zvlGc(nqQs>L{n&}Daicn*)7V>%Nu^VP z8RVmqB{8877^WkNE=7}BcqS@xb9mL~a;k;+>Ydj?|lP3fDo;uB^b zJlaux1wEdQmB?*#tp`)ssIH~blP~qHJQfpq%gc5wd3j=O)(k3>iHbZCiU_|jtJO}d zRHRq(s;6Cll{Y*=L}7>6gpO&>L&H*2#)K-$lFpkhu1dw4CyiO7=M3W4F>93}S#jmh zv_(k$0gKb^lMB2PU;eRYDkL@pLJy&mjjcewr8BLm<-^w}fu_x>^}C@UOKU5N%RE@NF*pP5Uc_`!qRodPD`+sb}<86 zuXmc9iYfX(<35a4Loy)W^>d#CWznFfp34C`NIA;y8 z?u;RvGdK$Zq1RA4i7$k_nP#^|EWpu!2OU)h8k;V!gOJjl8ilp(<7XK5vPL;eT-<4t zS(&Mwmqt2Oo^y>iUA6KO^Q5=03s0HZh<(E{o_yjpO7S9&Qqnl0fJZ z)GEYe;l$mO=i9ihp4#kEc0XaMt$Uv?Tlb!Z)aCimtEC`WX%{!<1ryVo%Hl8RMkurA+N8m746JQ~!{8Ey~rnveNRnz9C3d`V9d8*Yg z%?u(+HXt0A+NrCGe0^eQli?p4?Tg5TMBmCtBg*+x}K|rM-N}Ol$I;Txwm7buq`$W2kv5mIkafR+qxjH3y{8R5o zMMmL$$>g*K6OXhBgR%QrGp=J&m^AmcXs_togpqnxkO5#ViF_jbu}<3s_cG3Eu}L`NH6i&ag9EgF=WQYjt)uZ)6A3~Khl6P zn0dh3H$`YDH8bNlDN#L{OKZ0GN8R0>dj3~UW zt#%of-eLf;xam=a#p6PM={A_^CO)i%xQA9E&`j=cNJCvSaTPmPN&XpPe3B_{3h@I_ zF@$*v84R$Lhq369mL`Q(m&0TkL31eeUXms-;)GA*HIF|}QatkVL{F4srSSc_z1Nmn zK`?~lOTc9tfQ_V=Cv*{tBk_(N@VabPHLkBg;>>&;%I!BSwE~YxEu}L`3;?Yx`GLxt zXy~ES4LqKbHj`9@X>$rivz(N+BvYl?|3GxTO`1te59RFnHMA}4&r1nK}aQH zFZQ?A1FrJu%W1-sX;Kpo^_NhEe#wMb(Ltdc4QY=w678>ZiW3?wvF7nMC%$`kFk4JA@xc~tW0JBLjAN-FgdMayvTERag|AG zns%iu)+g~X!#Q($ua)^OTxQatDqGLf)df(A&rIwaO*AfBO4 z&ZU|V$%I{;K!s_=RS}&OizyB!&Iw+2ZC@z*Pn4dpte1sQzS7ez%lH zc~wO9{E$8HioVM1OR3X!(y7GCiox@+2CL8;*=*#MCy5QA8&J3=o@XV}|4cVCNL*Oi z83wkVcboG23Db278EFfn8$e+bAx(E0rdPRo6|$1lauml*UbbnRBe6j=1u739&uDv$ zq^MRd6jF&uN}$PCDMM=iU!(_Cv|aYtYx`G__=qb3;W05^?IcJfd>T1OL+nf*{9ZlS z`Mp|lFY3zTnpKK~ku=s+-sY_GbmKnayEfxkSFXemnWDb3Cp-peRY1AgB29Rd6LYzJ zDdnX-I}-Pw#kG<>@Zt|~wMtcxJn2`h9x=^v9cn<{Wrz}+jcbQ+*;@Kg6rr9d)Dv$) zqOCpODMp2R-943Emj_i{vD5_k4i#A1KV8X0zJo4Ihuv8wP zZM(`lt@MtHo>_rJM_y7Pq6bUKFUm^iB^Q4)%{dbfyG*zTlHO95V&7;2E3M$akfhJI zZ;l0vn?yCvRu2RuKP307!ryJ;zkXHNx$Qh8PqK1o$;%LeUD}*eJeC+2;^Zd<3E2lv zt|TPJRGRKd?msDQL}qPf!Me;@%5PO&vd)FqNMhEpkekxV6jxlk)ILV%`X~u}@Plz^ zKYo|D=ZeH~6BwVH#fhaMJn_s~tRaaFp=K!ki>+V=~KVqo`oLB@7 zcbjxZfp}bG={03Ed(?PZwHseEbhAd4Hf@~dV3y2AJ|63qG+BuxW01>YDR%OCIl=~0 zrB5M!Dp^j!@zFUoL)%yeFT*`v#a^o%1pF_fe&+IvZsn@I)9t1Yp%OZQ%E0-{0b-oc^y-XQNCh+k~gEXAinL3*^Yymi_Z3%{0*7^pnqSu%mFJOLy& zb|$!j!mX)s0Klf(ZF?##YCTw0t4y2 zapJ6E)57k>yw_<(%Vo7Xd8?|)vIu!d3dM4{##XTcz!8KX)MYPvZyQZudMS{?EUb8t zU>G0|ftL@8Uh!oPEUy4_D*zS(Qh&x^AivqrCiS}264BGEjD8YUB5ifXpwB|WANy*l z;^RrNA{CR6SRFMUbpVO5uZi=jI;VK(ZH5HPxX>x{mdxYYdWaJe`8sm-HCJ?7w_&*z zIGo0pfQJoHNJ>JrC_Ka@5qE+LOD$l$N2D`Kv;<<9dqBuwDtOP4Pbns?WJD@Y&Zkhv_{H9_M{6F1o=Z~To^P;60E5lSJ`m&xDHh}iB*HP zOe7i?CQDTc-Fc#l(g2O>XzKNf4Lo&PIf_iotHz`W3D*~qrl?^o#m+(=)TLNz0Y8`q zwyyy$To2OA6DmPDUu@N!Pd{K_qd4JHu2WRm{f@))z>zk2@4ZL61o=j=BJ~B2DAkKU zam+|#qv7|mu2?Y<5xt}_&9TBGANw5^sCaXlt|BBhrAh=+<;7A}JSFC@cU&Z@EwX_o}G}*Z(n)TQ?Q)$>0?MZ>rtDWo$BtJIklE)F(6H4)KZ3HQf&PS0_ z8kq1#i^QLG%1})bBAq|tHfe9q&}*-nC_`~f=`j~4(2Dy{la+R_fU0|^f2{nUurk%Qq7Um};N?nT1@PMb;7^hE3 zYw`k4eNLEo=KJprxzqxd7H09qr6D|RBZZ_S6pzwLd@k}XoFvVA{jt;nCKmfhIwK>- z1)PLl_7I7NR_~5zwZJ_hwyTT?XfiTXM&MXJJHW+Ck*XgX{YX~VweNPO5W{JW*;%Ra%n z)+~N773k$JAc;-iqsY-;3W@jSbtmH9Jvbuh-un{a7ccFM;6*}{Iap205Xrju@@M+= zVFe;jCrjpMS|?pOvUXl&7m*8Hfdlajv$~|w3`o0`*WK}{QHnR6TOBU6zDq4&X$d&F z1e`GdY+eJLyCI%99EYman;6*?HT8<~%9|u4)|%BCQG=NnQ>sm&nh9t0_ki>}MZ2zp z_G`MKo$jQ;Sl8wm)=ed8605Dc_xsN!y(CC%h|Cj;LZO=YmL~$8nogG{kLI1oyEB$r zz&%raqI*w6n!Yq^(4`?&(=Pq1aKaU}6iW2b896Wt5-g(ODL}^PAU#ox2_pkyfq# zw2#L>N~&0iNmN+888tE$`FjCeHz2wGo|3zpNe6hEVY^|e8&0LZWkBXdNjx7i9|lN7 z4U(S^iOIgWuZ3q{_a25w<}Von&Z3Z%1dk|=#2Y?GvQE%u)$YJjGw{AoEalIu7r@&8 zi$QkCMWh#qUV1i5+`98RXJeGcN_-ywTRQ<@!V7&sJ&}mjO(TyG>n$$TC)%QrOeFG5 z(ZR>$5dxtK+t+Ags}2NS#8L}5HV=Ms32gHku$X6FQ+~qZF3oCr zimkk9GvxM6msatjQhJd@uXT8CKuvgMbEYXPc3wE3o8V^6XXO48?-D>_@N-NSu>bFPXeKmjjbk*$Ra%NMBa=B z#0s{F{6)xzN#&@#*=c(6D5<<$uOaKHn`b56vPz>aYuppwPQ*z~LKCmZ9|&T)w>PV1 zeLu$wO7p&ZF5g!jvhrDKMr0maLeG2~67P(%jD1W>mf{H4TqXO6Z60mQY-0H^={yu% zJk;cSZo1@=7AH7MCJ+kP^>22Ad%BMjJ)oZVyP9x zd!K0_*FMbNhUneXvC=SU)<)3`D7|egJr7l67M)}za(627WL%M-!o#idqRWv!BCWc# z2~OT4Czr>B->NcW_AF_$(gen)f%z~7YF*_pE;iiiaIMSd2p&F1=fNkJz%JVqgrd|97zTplY@p+vS5+X!{}n7Y;ZYW4I^u*>BJE-M01{DLpqdas()9yToOR{>t#AZD zuuq&5z4NM>K}-5A)-^?%rt4QRU84O209fK3z5KSJAzZkc;>#24P$-E{J4occ`w)hV z9P8((Gd7Uk_3?w~yi9!z$dncJ2rMPN$Uk(1UZulLc*$A343_q{hM=gVF|r&&<;fS* z>9qJ_^5rHly(n#3HPRob`@X33k)4a72k*O+jYsdl%4WF*e(w~pbq&~gl3qUc;V9OY zi!%k0&!~&aE3QH`mc_A|p*i9PS&LaD?h)anauq?!C?=2N(pZvjb}?VeK1=H#f)-1c zuhu~T09czzL_t(7U}AO_Y)$a7sIv!&%oEH|jwaqg?;fQ!OibjxSd7hg$U8Yy4A25% zvW@7)F0sk9PqHh1M$nn1ExW6{VOqJ5=p`SJ&T)x6t2V)jzXk2Dtnn2m3`{Inn^{tG zuhim8dCImEa{_r!>x>MPDo62PQl@v+3?GdUI`sKLFL z%vwr=WIk$$`$gKnd76#ld^7b3#E7)6J>q@=**4uo@k2<@)6NpwSZ)Pj znb$1MVtEbl5R%yR42tBQg_3w#`fL5ZBhP!~m}vkbv%v#~-~tBlR`OWlr=iYWbxgG~ zuG6+_Ifz1N9z|dAbC!X)iRkH!Np({CC*dp?R|urZgi2!5XaPs3f%#?R zLRsE-TSK4ZJy*RwSTlNf4zu z?eQpvhE#=-j2+VKJyCUravwmLM59}e-D_gM=%h~4O_rE!2>HbB`PgzEQrJEr1xsx* zKd}T{Od%-=87Pj#x1V<;o9Rw0wF2@!)5xD!Un2aHD3@PsB20FxX-u*!V4j#|9Te*+ zO-NiLPbj^V26^cS4%2*0M$9tieNWRCxzwQm8(V2lrbo8mO3S1)SJ@7BOHJ^5+7g@d zH>9Uk>BiFvR1s{Mh%*g|2B3^mmAFMUZgnP|N}Y_G{?6*$jJ}DrRp(W3kfsqLEVY0i zPXpW9Uimfs_q{BF0SeW`TghgSZNM!~5`lNaax?G%9Bp$QdIaYH2V$Gs5(tSt+&em< z2^U?A8VL#)35UXyEw^DnWDNt!^ND(7NrE)G7ukcZN*a0Bnc|p~Pl);DVB>0|sVKL{ zB{bc-0$^3PP>3M0VHckXoOy<@%EXhyIP#hYX;`!&RrJaW9%!a0Q|Y1BjbW}>IcxuD zA*crv^JkrxW8$`viM1eGlgl;PE2EuH3+;)mE=cF(_AsH7Im!vy+Jl%}1ZJ0kt!qhQ zLr9{MIPoIL>s}v>&3DKs0g>5eqMWv1|w2GXU`FDi(T1#ve}qiD~?@WaMDlG!RShPbd_iu@^O#md?#KZ zLdQYQ)rLmT0r~Pc_6zJ+Kmhvd2m!Y1p=0{X0fhjWgHaE@?=w>=ULzEG7kMPi)ylUi zsj)GtG&$f1`mxj(#j;Avp(5?zVixYcFdDTnsMNFP{b&9qpCTitAB&~ z465?oC|k9CC!s?nt3-yv6`4zt@V>=Gf<#qk$o9Jem7TtzV1_@g8U5m9fdWi)Q0_F} z@2YDBbyCI5Bev`E3mq$wHTtEI!zPL|cPwFhtP@39EMD>fV;B(q(kik{yY*vjz##Kk z^;*S`RXBDUO?jsVv^PaeA$%tz>xmgi1>e2>zGmi5^W#%k*PGG@(3luhgD=LcQOED^ zus~2t#Jj4Z)}CuY%+dFm0{tU=;qn{6vc(SQHqFnY)T&n)OHF1%(0ocQ; z@`6RVY8Pw$`zC>!YE;Z9mjsg^K#9NToD30-ddxFm665g;@?Bh4`9BiP;)@ycSV<9M zPDj(j=HMd~H%Ze>CAG(`l+Wn8?LP|?5>-S|w+kdEm)wqvHD5@ShLvsmhL^=gSXrok zpJ4&v+jb{D-d76NohdtXKlTA3U%;obFx}{g15)ZmTYAyC`D|YgA`%qWofWEt?h5Z? zh*5;W6>Wa^UB2e&mbQ~YN=MASXOm#xP4K(MwC)F^F+&bbAADRO7edY|hfW4CP$#t7 zQQkt+mT+7@F7uDy>Tku8EgBp0z7wp-cn+M7Pg8yA0WR`1s#a$dQlrJg)86L!>LTvw z1;Z($O!)fwUWYuuzvTgE)_nRj47jB32>m5Ku|X?#vfKAJlV%D`iW_PA=G}k{S7R>J zmu}4mX$lgj`=*A3pS5~+o`^1P(#NB36ObUfbq?a74L z;m|QxLqqQbH0vTE{Mx{y7<*pTIq^#zSU9D5T@rhlz~M5jG9j)z?PWIOWwzWDP^w^$9Y^b+ zv}Bu5#Av>79Dlg>O3YW3iIpLWT*j+^IEDXSNEK`+kfU`_C$gU#(gk9zov~C-tPA}L z%Fd4n@be=|n7@>BMfnq=$ESeU&-w`;`1+q%19mJa%p#*^1VZasRaWH-Yg$;&cmLoZ zbxd5TOz1fvL?8E!lJrxzlj-qQ^c>Qww4E%ey4d{HN@?}}=Uy!wcY3n1?a#Cjz*;-j z$<=X0bWft$Ekr<}TCzhaE8=Zk&+20l!Xo&N;M+*w1joPZA{K#{u&a*{6y}(&r{Q^j z`pa}+v0tL?TSj_E(ubJb-oghZ3X8B2HzkvWX6M9k12kIn0H1qiL4E}c!3#H zoEbGiMe|6iAK>;IPrW&VK36(>nBtRXUIohZ>{rDP6GOMkfsHFbW1&Tf)w2USd25k1 zk6j52g{Bcr8tVAzZvBJmq^FDao7S`ZZ~$NVN7anpiV|Lpb(v+dt6V+eg^^YH+R>Pa z%otgAzc)2b6v$!h)IBH0%FrL1rK|c)q<;nUTY(>bt`GjY*0Bxf%z91Z4T;>Qj3I#P zLFLP4*&Yk&-+i+NEXif3{rxWf&@n*9-HJt-HRS+k;Ee3sx6xwGshsp3pRAwJvDvKN zj#B}FIG_;ZI;B|zfrC!s6Z&kn$-XdNyYV zf3Je9?lfaab8*acgMz^Tv8~tt9fK-=X~L42>{X^?7XJm$3?Fk`Hc%*A27ACxpi(qu zRQ?NiHk3Y)m`~xA)p4%5I`x%Wi;~3CMk8NLbat*i#DZzH%OMOV_XlCxKH#f4e442G z-4K-N>4yCbrFWa@2%Va|r*>2`f%p2?tords8LLPoWGGDTKBXl0g3gL1w z8O4JV24>4>@*Evy{((3Ias-Z-(n@bFH^XN$dr86;J2`zbX-arS$C{|(Ax7pD}u(-KSSEUrx2zmFx?3>HO*AsW)=^BkU_QBs)wzStW_4-Jq?Tg9p&8#tuE%40;bB^18Hd=GIX)X)#3;SdCVZtSCsjQ`E5 zm#Y=GuAi5SvgZytoGi(7%sG)gWlRY4OGLj7rJo(*Ddfy$y+{U;DqQAj(cn>Ph(^5P zT?^tT9zZ_4hM{YzlJtQ=d%0syv>xn3w>KN{<&q# zPlAY?Vutcv2ML|o6fn#QnFxQl@3W;0F4^Q z7dxrRA@31bE9qYzeXL4r5jVc%AGnr$b?@r{%+aIRE?*+$l5%C=lur~Gs=+wy-A~G{ znIpOfC*%`qeXVLtXmJCMG zu-Hqmx_LeF>y~Dpuw%au)^NkOCH9V$q3IfF|Gg2-hn~fN^~f$O`&8w=Ql-(2J4Ix* zA~lVC(bJJ{jd_6uet@8PyXJ5U4g|xPsX)2pWCW$RRf2x%qJMdFAHI$$Ypwgn{<+fo zZJ#iz+&Qk|1C%?{E`%HEKI};N&Es0&5C%LDPLHOBTQwUHQ{-z!60;)}&~TBPukawS zT{?s{Y<(Z7)4aEgCiR>Uiu*@&eb!{n{+vPoQs<0CMTmCMy!V-wR_kfA*Wqb5gUEBa z>r*D}B-PtuxaxkUecc@rn}1D2mEZ~5rwVst>mzK$&L30jOpf~cnFG@*cr?2YH?rg= zYz@JBHZmDn$~10&WzkONx6Nv0)f!GlrO2Nk0ZNO1{yF8)8BVvzE<8M4dg95EkB@X1E^{7-YSCZktF^K7S8CoM3-zz)3J7&t$n3C z3xf>^6%$QK*fAYP?T2|q0Z5I}s1&|_-)1x=?6OBFjB};Y^gpEij+B9gO7&+_X@rNX zE3Nf3Iwwo!V-}mpJ4DqfxKUfm$3~=8hrXbdYC;?#UlDCtif(_9zk+kFDZH#sU(yGJ zx! zv$g#SAm6`Mb*&-r*zU!V%V#g~3`ggE6kwCC*{ono|M1G{qDVBTCwq*#cbb{gSC|Tn z7$GJ~ozyrx(Ehyl_4wO+#aA?df-~W@fgJ+sQ6Gtc83_*ZE=#f3z>J5x^*h|Ijol$* z`rC20uYU)P=pQXijMS+`?kjB&*$D(!$7X8;=4}Tg$hIYBenQC|o%lmPJpfsyb|Ir?w`ByOcC~3?GYQs4zj7nf^ig16|5PV^ow|KiPrv$Nme&SFr_H zyT;T7xx@?6f4>>FuJVjf0z&Vy@;;%yfjQ5KclLj$ZT|Ki)+Ga5m9DHtmA<|vUOFcb zI8#$u)MvVZIZ4KQ4jxitJ2fl_)*JoaXXy6fuz8Rwf`dO5=%?*Qp1!7HA z9X2PZ`j_U8u`%6D&=?Q(sX5uhX0jKEA0%uwws$`*XUxS==nc!OFQD$0yqA#2Aum+6 z_rT2wkERABMlc)y<{&BBY#J9QXs|V>rtFuigu~+2xgkvdickK*%*Ka!bQuBBG0N?Z zHrb!bhT;e>)mYun>qqi1otOi^u@zAwj0=#tiOvy}AX&&6ZvXlZ*AD_Q>5HNp4e+jh62F`tk^=6(A$LPpimfK-N?Ip@V$=@fp;v-B`wqnb4?)6PH-$00L|Nlpv_IS{FDn z1lfX9uJRy^Id3vYogr}koZ6<^fDEnsasaY+DD~811QxN-a+VWVN2PW%v3}rO87?&` z^#f6zjrfDoL)@?dPIwpShCpxUTEGe2v5`S6=9|lo@`r6<;(+1*#`fgdO5$GURiq8_ z9><&$N<_^iI9@3+nQ3DyM9lRef1wfE(#1qV^sv6P<1)_fUh~ksOs>I#Q{Z*XfH)*U zM@o;z!)E^d5={b=TZGqT5JH*1?YCqWt=79x-YOW1)FR2%l^*I<(&8GY3w>N^<{PG=X7(Wb(>L+TOG&%d6@%1J;?s#QT8 zKpY$J6d&x70goR5U!H-$e0T&a+%9d?=t|)gzyE>jQ^X`KxKwj}j(zy~*SrAt5gAK8 zOX2T`787pZsgVwve7Y_bRZ!rjIdSIF5PxG>YeKAV^>^1jDlSSx7X;M7gsS(6;En*$ zE3}K0Le&86xg7bP0lYDjfoqJ}?uu*@ zg^(Ge(yj2hcnn- zqY39)Lg0hE?Zm|_XS~#YT7fe}qIs8pDf?+^^jyee?EF<9%z)_}n?RVTktmSb(h7`| zB<$s1E7LI+CLvT1U9qAk#RpzG##p_E@jXR9P4kP;oN*w+Zd(<{Xm^KTMxP70KcAuF z)!p5Y!g(x|zd{POkCe`80BE_N(E4@h_iFCg_H&6o#!9atuyc`|2{t0x;S#>UuLdh& zIBr_J08UCn6Z8_k8gqv0pEPJ)ji^17>w8tXyl7&+64@^JicYE@!j0?ISD}@9c&WNr z_~X*GqHx<7uV|N^H7sHm;qcUaI80};Yohd904QZlP@N(pq{w8a#}TbN@{rhV<&@6J z0Mn=?xaHV-DVm7T7lQ(Wz4oJ;7~F5i={80QJ?r`Jad3!I#Rb87oFek6&s zQ7M5XTn}{`nmWjVKg6^&t^6fKt{edeFjd0{P5V}op=sXDF>cM=K6{0kf%vfLbz*uaogNXvLm0p*M>Tz>7whtS5^zT(EuF-j9etdnGY+Gsh&HBd5;x<#CEDoW4xa+E` zTUCPg;Jr(XH7p_s`$Z?glk9y5Vs1{AK^-LBmv{48&jEc~0=^EKqH?LM3w;0Tv1muJ zWYvR9*-^gn5*tv}K9|B0?}cNn#^YBB2ImypPq{^RNl=s@AcgsqD~E#pLSwnIQA_u; zjRVORnrMbQAgenvQ0jZ#CWaL=DseQpg0Iumf^P*5TlsX>by27}=%H4ISKH# zs?)Rab1@ZfN(mp5I0|l@6aoEBMeO7v8;;HiS!S%-&4Sj+sj$d_sE$rWoVCMjR+kpd za-a`h+r3|8(21~?g(8p6c+kqPiRnCVmK`>(S<3;R;>y(5?S7jY^i5IBU_1B=o}AF? zs@v=@vxvm0CD(cv_t<*!mzJ{y+y%h9R}AVT_tJGK8WpyIj~MN=Br|wu_&T*ydwp7< zWXVpYybgUNYPd3PtBhfk6SUS(_CItkHs`$l6u8qm5v31g~&MVWv~(S4i*1L{mg{b*ODBN1Pal7FJw7pcr4BElV$mkYCZEe_W>tGjKsTUg367DJENV ztqijZh;WIj(@(RXVHXS0bj~oz`i5 zDeAUv-g!s+pZEURITCT43?DujOY3od1U}jHll(Bt%tQZ8fy)F=*)A17H7_h;YLP5& z^h61B$sbO1JR>~%ld;X{JCAXk?IZ_N2cO3ngOu?Db%VX%-Ci+s5Rv7qNH%fTYSnv? z$5Hq?@T}5ySXu^gu0tugVUu-X>`kJ1{CEyn2y1rtnzEN_yO;^~N$+@W4p+5sMKlZr z4Ta9kFhk}i7Z7l?8!8*sg4{6XCnA#0QyNl0+CFU&Cv(4?t{tW5n~5rshU!&Ag$Zz-l_?5M%N` zC)0e6Zc!}rj4!@6Ef+(<`i=2w84iaM$vgbEZq4Nhda^_S-Xmp__~@JX#d+}N6>~b@ zw8u7`Nc_p@|Kn^l0ZGrUtsgbMnm8e0MyMTkgq<`#%vZ_%K7ir!018~hZ00ApDmaN% zVU*qM@6aUR_1$#|KOFcjWcL9q=qS0eDpgm&l(su8&>BVIL^g*}5RMckF8zL}n>qXv ztQAY(`AOWV{WKk?y=*Rks65s*gy*SG(Dy1i^{M(o$Dr>3fx8LR?EP6#|UaYIczwteIl$y^xAW|-0>=H>0 zZT$JPRLY|9R|)hg!RF`uEYXzJ7E$DK{a4}XBkw^{_a^72F!17^IQVSW-{b@Swoua) zyN`(7PdD_=2JfK}AY+<2Tvr~11liX77y$zRzojvP(!Zg4CE0ddj!ys%r;^!(Fls@E zvW$I$)^7r*vK6~WsyPkC1QLwQ7J*)McD78w*0g86*5Ojq1^HgIZ0Tc;3VCQ^- z>rdQiMPmT^c)!GYLn|xtpJw<4MC{v0blIpj_1w6)kGECfZ&zj()Oz*{QQFcxW@cuj zUjug{wPF)~5A4jNEQ7O@w~GsulcVVFfSHF(&=VnUO{vu}4Z)K2^;L>>HhD#R%caA> z=3m@R);jYNl}8Q91Eb9au3f|C!9^3>0IT6w8A)I!{#m}e_9}BB1Y)Oi( zAOb*=K|@6WgltENMeHu50LG zb)$yCbcF>-bpf<(ZdEAZy@S(rdpm_bzvj=3(vW9KVx+!G-; zUrYik!@6in(@cNYi2XL$CBX!?Kxgq{O_wyC3dGE7ty8$eB0~)miwq1_Y|Oh2`~+s` zh$6e&km?8TJMJhNJY#XR+@sH{5<*o65eYWnLp|E|xA|ket*+NS>FD5Hj~_*p@{=FT zvb#PGjM!0_xboz5s>v%{(4VkVv=BiL=42eqkcW3ns9{iq0VMtzKY@LXEDz~eO z4v+Mrb0Ye7Hx0{o9dZP{ba)>UCKeVli+a@>uNQ+;9r<^V-p*J-yPwT6U8F9QW{l?g zVoc)_*%nf)cj&cNp!IJX9z1IB)fs^WcxX{*hjVnlaJb{xKDSFA(XGI*>e`XP zMsV+rmJu^@4ACRD!@78s2`umh#`QWLR!xr-= z2uJ-JYL5ku-_l`bx*BVtRFFT2s_Q~x5G%cz?O@W9<_PA5UR!KH5fC_9fc%@nNiy6| zZ1$@M!}-eKxYNy4%7IYPj6`Mm&r)Utm@+gWWi|>6aOkq*0EJShy%8LRi`Fdd)R={& zewE_Z+6aB%&XS2gXq`oW3oZN`B$=*3LgQTdD`KQwymDWg0E$uip+j@VcrW?;qf-dk z6rJehKHj%?;k@yp#2Jz_3~di9nMQLcRcdsh*` zgEN>WCeS*r+#y#F=m`OE?)Ll5-@N_rUH{H3jh?A`bcewyd&TBHJm4O#TG}JJYjjkF zPzrx8IxTA^2y2H^^t+OlzhfVpzPB9wVC25t?ecf8coUeOJ=XPXj&`;dA-3dlq&&3} z#QXpbgyj=13(zmaP;bP%rh0Hop{_XN9>M@u-H5;`CIOtfElp%a362EbBc}DqE)f6U z@9FdCwPEDwRXW_W=&!_owvmWm1ZiKWo=17$YLwYuqJcB*hp@wo{0wEOYg&L*JiMws zJ9X@aY<6IwO+FyMt}NtXvOB|=q(dQR9 z7Tw;ag6^kA3Ium`^$x+U0JuCZ7)V(P%&)_FOjYJwj_MfFoGx|3b+P(9r7^*}Nr3ac zBxZ-A&8M244K;;QHIuz%1jgpfnCv-H;Nc*CsPCkqM z3p-cJ?#^f!`I15|y+e2P0|$=ODLRu9E72U6GMtL5>w89{_TM)YWSlPWy#^)}!UOD6 z0;_^PsN_yS^p1Ha+?c@m6L)Ll+vr`j0_)2wr3swgo~M~<0c$qLTEQa^4@}S%Rp6T+ zyd5?Ipl)Af)adjhTwAs8>Y6K3HbwYLIf~9=jUMyqw8iV^ zj}F}j)O8UG9EwGcJiD0Gz=1aXIGjkPfNm%Gia?=_uTEA(gYuOV7N_Ot5hgO=^kqXu z)+$1L9Jo%-wRiN8h@MX;$%=rqSXTLmO=ibV}iDdhy#)ohzY$>VJPVx-O|FD>f1T`49K4 zOhjC!(-bV=yDsEobYI#tYt+`hme}(=&mlmEJiy5nlL9{*)eA61d|7S899a{aQ(IVu z7z-tFC@R%oe!-+|QA-UA?QLYoapQFxtHp9^DDNYCbMEih$Y_ac zqC4&vndU-zzO}l;X0Ad&zQW_Xd&j2ctLHiTRm)kVjgAh@(Aw1}m|go5N`7r96-W!&r;!(c&ILr%sO9xy)ULCu?mJzT zLC*D~!ytlcXPPGyqI#!tr8eE#{h}a?9`x4`zCL8j#kmQxDYvyYpR_D zT?oD1vTR`DXi4n>X*UnHQ%w=J)*|wUrAYIlyqhj5`!9jCPK?w-+jl*-$+QX~sRY;( z-IT2k$@o89;^Fq|0oD5G>V_DTlLGO@&cZDO@1tYCwZ^9V&50?q`X;NDX4EQ3?jXF4 z*n=8w%}9R3FOTFAeG;Jt+>ArdVcZS(=KCGyV%K4ry1ZlTbP$}aj<_IDPA7Ei*G45v zz7z|!7#a~7<{G7ogZM`>KTyIIM0CFSIODxaQ($bL;jy-~{D8>fnyyJ{CvNLWqa=j>|W`|`2Z9@h^K84BapxtF6Hio4QA8N4VaoLtfhbVgIo0;FR-}GgT<0K zh5+1%9`hojW8hARpAdBA#8MJH%Rrk5Edrr`4#T&WQxHg?*|&@Cd5u5`@}aR}YYWVsWVPQfZhb5})J_rH zFho;Ga`v?kac+h@#(PBje&i?+UiCXAXirXk*VMxUD{aZKjFz$G#z}=Dvlwq!OrRAw zdkKCz)`z;Xu*3Ms?lL0Uqs*8;7l^j4kJxu31`Z$mTTCosRCR-*zeWnZYY7PdA}?0l zg|(5TAH_~~>m3vn-J-QP-TAsTfJn1a;?E}fPFb-ay|Cl*_KQDO;b~KfrK4p#TjLx8 z%;@PsB~rkZtl1VVsh<6{6G9<})8;b*mTQnsDc1z-nb>9tJDzJwwX25KFqzzxZaldxn>UyO#{{ZOO5w4CYyhs-)bBW8#?DZeFhoV zX5V^`v+OkW8lb%J?MPqW**E>mK@GZyos09EFRnVSO%H$>%gos(fhO$l5VE9AhudY= zj?A$Wh?ONy%}J9~#SaQr&|zzD(&qsKy4haZ zG!zYU-*aVb-z;=JBOK*`?q`aHg^|UEIoS~kXWFj5C7JuZ+)db)r>Fie9d!GR>T=d} zkRwUEOjX0-%pksFZ#Y_e`K#=LS<~RFAlqN}Ue|hQttw;NR7chOdTlftsi)7UE&G5L zjI2A=IPF9LeWkB+CUSdqh!(&_;~z+z7G=j|Wi2NDUt(J$x!jp{4K!2x&!XJAO$2N% zIM3-j_}K|kOD#ngadP)j&&W*Eg$TKv%uDow@@J!Kh(Nu&ko^Z!ccDYiL3d;HE` zj|P47)@EhsSheso70Af+xEGKUrELsQthO$ql#09^DC6Kyz;W zJ9PS-%clHD4!Y6HYviqshQt6{ZgQlQbgEP~OjL2gHelTG?`^m;Hj4NXm-QIX>1mI1qNVFSNgSKecIOi1tQs zC;!s^nt&d?gg>d>WR1PKaW_#QXW}oNSAU1`yq9{n@~sb%p8*0_cOI6RERp}!5Ecf} z#0XpfHle<6cSHB#uicql4-%6V08dAVh$jidQxdx{`WBT$%|SiRgdWkA5sH$2w$Tw# zC%6F*_o2E8lKzzs_F(gyCJKZw^UwFkReV(K!k!ISrEWP-+0xbxmx_VK!9PRF0idv1 zX(ZV2_pdjiZxiF*WY${ppux7&FU7y7zjZX^>3A5SiM1-bCMP+zmiQ-hk9!$81NayE zN1kMcI0mh;<2m7r#wO@~4*9XI3y^;oQFN|atn-kv|O{MIY79a1wgJDx9~7af}X zPa>6S=t^D-pPhnrQ>gEFY)87?Jl>-zv|IBlIH4`=Pr!)w2Mv9fQeYlrg6zt!;B2=O zl#|F-)?gRn%&)bHIJz1wjx6&E)MscX#`&yz@}-jz{R{PF9klJnuq!T~E^0wJ1-gi3 znzkkEdqbA*k=OMmFexa7dXRD*;4*6vXDUissztCvA;vdSJG>!ml=92>B*x5+j8X{1B(eZB2z6XDAB!Z6Gn7~%9|6njPN;pMjsDN77Y#XLO);eH4q;sVu76gDq;_Yy(-Uy z!9q>hCGy1}r4fQ&{8wbs4MlaC?RTVYSz5nBxRjJkn_st=E`s+Gs4nr{0;aX92x1qf$ zA{~kO(ha>(TKX!+_F2Fwhkm|m@f4&sTD{Z~JtB0vGA#kBNPXj++TY42H~Kn)jtv)+oAo@yFue$=0-=pGa)p^}W;9js8jiyABXB zKlfd62>e8qAl6xy)xX@nZ`j)mWEr(4utdM@u$$DmSnVR3^;@-WyIP1w?QM0jgab3s zZ6$GBQ)SZ}I{|@+(21aX5c~a3TzD!0bmHZnF-ODTBUR=NG6A$fu#z}kqJb6a4|o%T2~iFHF$#*iyAftYc3Bz`Ff_T;EGV_veRF};FCLX zT!Md8{8fqS*%^F=D+hx*fcz7)618zHPQz6NH(GFgfAVj*FE)LIpYTGtpue;u-1s#| z#tx8Z@(TJ28JHo5PyhO(d+zz`BAc@IdIX;{>eaQQ>3fZSSXds^&$ibKc{9|-X6`^b zz`Ts{3b`iOIu?SpC^K}m08`7+qddQnVu`Y#BX9PAN*+4ZX&TUF*L5T)&R5r%NO~Oh z70L3wlhO3RU&#*gR6Y{BnY#rPodWk#$GX$yoY=gJ-J%bMkTYh<_uwYeJFtO7rW`Xf zRQF#eiD}P0H2|7eYumI#Z0#iCo#vgUo~RqE@#56tBAi$*aoGEJ5*)*a5*jD?UAKqE z{qVEzem2gA=H#)1$xS@73DUUZBLM+s-+{$a^O*FK%#tmc4CwmEdv{P^|ASgEoym1t zfrTq%Ef)cI!WYiW$|5WT8fZEi?fNEyO`SiL>3)c3c4knEKfzU%P_b>fY6s8Y*~fG> zasLjw&JG?ac*YW_?rl5U)g=vRG5>$NJD!r}L|*1AtE5@buGxY2-Sd?lOjMh%#Z^4Q z)mKBA_3cej)>bx$n&Bp4>ex7d6YsZ3W3|)R(yioJ^x%D4$#9&VSMNZ}Oh;1;h*$Pc zpS7|-DUbP{iPeTl`}S%tG|`DHD7IK|B)<^_Zb~Rp-G1QgMrJ;{%m%>11AzD$aQ4~? z*`RKNr_jf7a$|%;lYd&&5bwU+f$s8S;O|%;7#Pa<$~d#uyeu=rVH<(75}^C#U1+ub z7izZ$p1xi%7o2pOj_URVa`kvzcH6uZh52%ImRInYI8U%JR4IKcSOwoTW61-WJ;;e> z^f8?z!Wjj+BjN-uhaB_UdYKFA*=|%m1N+!RR#qi0U@>f*^sBV*$wYTZ0g#DzeiJ6f zA3gf|ORamV(%BuMx9)lz8l7pnqC30(pZ!asJBjnL=t@euTOEh79r%9E68tzjzt%sq zFZypR@B6K*EhP;h_MHJk#|l*Nb81FLya)ZJhFu5wxcu}@#T}m*lyJCU&P+i&a}Q0%t-iKSd&n#})BK@Ghd=8f3e$Bf0`=oltg7Wvj2IPK_mXq~y_ zUqKwU>%6ggGKBO77VJr+aP3~28VfMW!VMPwp+rUhhwx>dHNXE)P>>W`&=r0NLc_X5 zt*${I)B<<>XsV7%+wLT{ z@a^|K%(G)0mZhc_0M%%YuQ-jXe?R8q-2Z76e9KwKWQ0-F~a8{TKVLs)xJlLyW_9VQ0^X>vOB2yPL*08%=-0 ztRER$3`i0Uzm7W9bnHF?A;_Q`e79GWb*O=DggL`bjIp+Esm5F!igT@4WbtpRGAOPN zv8{W9Qr9JYkNf@P$9a3P!YHJcrss*pHwx+=yYmbwk!$d4>dfl@PT9fvf5x>hK}0%S z{x!W6RmyGokrjT>#MsOBprvtE?&UOWr5^p0uM?uy6*I=(TXXgUSH{UmFrJ;}!PReb znR&jESZ2>sh(9Fs@~NM~{j8}lD)lQVF?ru^cBSKPV(rcgud*tU)Lu}OB}Z{QH&+3L z9;RUZ#?ZaoYzX!C)xhze#zll;0)pqt)*hDCk8$Oh3m%R8i^o-XVu~WL=Sey>tchd+gpfu?*(r!WnHV_5DI6E&UVpcDHOWJ3m zF%-I;^>;Pzt`&cn60Sbd~*tf zebM&YwE_I|-InJ{r(B(1o_Kp$c@2MCLY2r&S=g6LYx}C zs|G;H_nezuLNM)kjeX~tQnPt zW4beN9P-j?ckKYbTJKCjK~?~wc~}>8+YgBeO!;_)PvvxI$hlCj;p9{*4(FSJc&b`D z9M#&zDIO(@Nty?{^YB*NG6rPVVSa~i!SRNjxF?Fvx;9>ED+%U*`Vu6LM@iw42swEfE0H8FBgY|`2Yg>F`~qofA12j;qDEr@C}ODX5K|q< zJ)`e>g@eA%%o5JBC1pq{x-tinDb7w?lt9nXmtthGbMsF3!}o-+0KAcI7>Zp(e7*~B z1zFyAzay#18)l=w)`1oK<}NnG?A4Wy8XP@$A(GMKe@Ke7F4H9GoY0q!>O%n5x)J?V z`7bihFYG3tXD9G^L?rraAFF&7ugDGURa7Kv4h(Ef|9T~a7Qeq=dvMoIt#;m=70B)6Rd#`BT21Qm#0mOr0=KU}y+is)$Il5 zNZjh*U;*zHx2e`oH78APWuZR`=Mk`HaI*zrPS})`jHu#zF*foXrf!Mux>Tjbzl`}l z6|#pa2v&PLLHTX?F4rHWF41L^RL1~#6lC)&=%?!mE_-?X_Q0ornX2EsxiJwn&#JKc zaVu>0Wt*bja$~uNtloPtv8C&1L~VP*^*K%2WFMbR!*OqRG{h~g-MrgKm_Zp%MH=kEEk zCAVH5A8$0k!%MaoZN$2!pJ<$~;ID=PrP?63*9_443V{6@=Z|fn}634M6EPaX&QIju?K&`k+E%)AYbs;<9jgYsd(BTRXo7u z>m?7YD*g|gKse?&BT^h;*KOvO>FBK4HT9Pqydv-d)F-qT+a4&Z@rzv%L}&XQJTo7248 z7<9gjZF<^1WS`VeMMYV*2rTY{> zT5mg}SS&;~2i(EEO}=z;^9bUL_Bi|$6uY5wFVL5smHXk|ztVG!p`6PmrMj<_qA^kS zQOz{ai=Q`*>UrQ#enjunq3KUsa}skeK#J*Yrn7&-t==88=B_F)5h_J>tlYzQ$3^n+ z!Hrg$!Sv1^(Dx8m2F4p(@A^$9#*6)FIxG%mMBT1~x1n(eSCITBiEiky?6J^R9){b|xFPk?F$lP9 zYg3R^i^C#(c-T3U*R_2=q6^}e{B)J=GMM<+aP1@({dKo?Gcm`i7GM*z*px(?y#dxL z`ghHsoiC7Cww-E2BgQwOX8&g=-;Zjb4~Llq0l&Aj8?R*syGIr?Ywxq!WTT#)S5K2f zj1X|A#hkQPdi#Fh6Jw85L}$$?Y|$amqgyBh`nvj%PFz>UANPkHNNBwTSLaa2(2M?7 z4Wj`It*HH&3L~!L?FyKFan@n4@(b}ME}C`P2)plr5Una)W!DTRHf~(~P?NtRcO30{ zf$lAD17`F63(U3Kd(&f^T5f82x3OSDud7Dj`!gWur8fXqua^zliE*k@jQsHjyn5z) zUV9ku+O@ncIWpYD?7cZmJTTg!4sbi8`i59(<+i4JEOeZ@my2RQ*a(S-x%X2yO~Bdkj{lt9b!fT`_!7VXWhqu9PmPpw z+KDyw$B#_lAUKHSLf<`J4^OBKg#4oUk^I1fx-4MJvw~D7hAG^)UI}lOh7JgOPH=A+X`;)hpXH4AXh?<#iL`omD7aBje6^)3oX2=I z3_ZcSlw!PQ^WaLom7OqYfnUXgI#WbJZgw3Z3+#7wk^{rqzS^chdB8Z@pnC9OpULYu z&4q#!pm|u4KrdXMqa_4KF~Vqwe9EHq4Eu!Sz=xKE0I|ERV`c6MJG6Ct`pZTOY|pL~ zjr@g!f|_;l#vLVK2l|wt;I`}ZTNgRHr z1BR>K4*x}&jRVbtnKW9+PZx7Wf!x4Cp3#R?iCGMWvX8sm_MzJ)?JrN z9?wZg8`nDzatFr>)aYPe4e7?ru}ew>hQ2Lj`W)U(5Oh!lHJobYq=z89m6`epY+p4p z7Mu#oT>aqM0#Zm)PdH9sJlW-JdvE%p0&!CQv@Dwp#L=`A^wYc@pDfk#;;Fiqe{_MqXST9X*4%1m4hn2y-)))pdpkGzbVU|MoZNM^BQXhk-OR3&bt(&W2Y(;G zi}?JYn(XcsoyvSNa>41e#T{S%w~d$!hN20iiK?o>{)E_3+W}`PUYGecigO)Z%m}Mr zL-w^v3vM{dAxwd+yPtXUs5Or=XUu6%~Qc0f2>64t$F&J~^FH%<;{7 z@Ck1z5M|&A@SXGMv3_B%%1i6vh~F;K0d^?YSd%eIrKq~DyPOD z@L|(LT5sM|w>YEAJz@L(o<#ls1}!=;Z39F;7ay;5>q}d)7riQWNR?U78nv=+`t@rN zzA>-SOBc_LPuP*yLJLck)0Jk#&xUE*LPV=%hoVoZ4k3w|Yj9S6?#_kP+EG2`zlO*! zZhskT>~=itmNoGitv6o4Yl{Gajq1nbWL`!$s>+7weI=qN}q<0nY;AV26AJbd$Zl=5YN^+WcXhW%q& z{KIX(C*hpIisROXZm;1B%qb$sd6JF6Q?AEfsW}lQ2i)^~*m>9EfPfYDS1rr%&G1wP)S+|4zVK;A z+Zkj=v{P|l3Db^X6L|Y1^Zxtg%$OBz@cqmRk!8}8b(6(^uiLB|N_I#lngYB71S-rl zP(T()yyyuWEC34}3xzHx^L)i4Ai+7*ZQkMTJrWt|tM9l367d<-VKB?c3F&qlOMTeN`ZJ%vJqTdg&F^3F0mhXm^2gnIGjnKOGdfD^&kuxU@?C9?_! z{QBby_ElnlQA$~71>W{n#5dj>Rd&$M#Tu)ve&ZUnsS47A%!?CW$`v2-jr;6(8P0Eo zAY`8`tk!(?OirmMlzxHVLy}OTIXaZGU>1U+^It?o_(QuZc8yBiAO_Y45k>d~Jln%e zpnlQ{oXRW?Nm@RTIxso?WiRnfQm~JNx}}hTR|Pd@{yY_Ax+!;Jc2j=ZUadZ>#MyZN=_`J~1oDR+JEVi)g-*@8L~W{BNVAz%S-8D7w%`QPUG8@`ZL>z57BvcPi0HcJ|W z|IIFDxg3ILZme*HySVcI11lviR#+K-P~XAR@14){xv{!475hJ1&SD^$EiGsNRUe`WucpTCot%dP zNqa1@s|3Ot;gWW74W;%+$1VMIfD3Q-%9DYQz4kDt9Q*Rx=Egw1-->rvv+h|WU}bAD zUC2d!Pnn~4cDfn(IBc2&X{;h|<_Bm5?+d!xgx(31^6;eJW=m(3AMU(<{8B^?d#qUF z5Ni?)Hv(df%L5>rp@buH66G{}%t&jT?~~uly^Le5wctKdUtf#p3UEdNiQewGqr1yB zkCFHrxwWgiYq5%@NOGEk13PTyKCbB3-_LtW5}wTp?0*Ktt(%cHptx)!dgRMOjjHO8 zG?Gcu5QMz0FCNE;Aj2#!eD&7n_$N22(|SWEMklzp;;&5n#df3%pEZyevP5K$)Vh%q zkaH8KQsqD6lsx7CMz9Cyy(AJqxS!&FghiS8l+EaOyDTQ+7H#>h%b479yY%+%#gC`O zLNHeUS)bXVy`KN{KS%J}!k5PEeI>KFf6QGKKfMFty~>XUnJOoJ>o9qKK>ob&odLKRbixAvQWTR4#2x# zR{ZxWQ*Gwy9plaCPON70}UA=xrB{0W#U=mNf)LHp`GY7j5w<@_;9&@w7_mE`Kn^k_%7WbYt;8Qk1 znK1Oq9&Efm5;%(H+#3W}j&(C)x_TJHdY?31Laa0~9XE@k=J9JQG*OGPm z9-m@0;n%T>&KBM5`Q;C;-A~rKo5T7*vyFNDc;J?@kEPuU%mL+5@Zh-dwFcIhT)kdb zQ#0%JsRilrdchC5Y)~i zgtkC6Vp~SL!tFG3NSslCC_S0`d-v5eg{_^+Ky?J~r`+t&y0GE^nC z4(H_@xA+5KVd?v=dmigTJc-(Joic}ivJvPJFow|ieLXG0c|DI^%3`fgs=>su5cdH1 z#mH;mYzSN!aNQk{er{#T@h|uKTeQT4+KUdJ7f}*%HBAwM&W#(*MYJZ@{4_ry_)258 zQvi{t1U?vkRv;AMro|4A_F@rK|AU-c-`iw}$&=Mt9R{;+G8uS|RdvqWbX2A6i7z=) zsyWAztQKUy1uT_;fts(130Cl}!mF|=7$OL#S&Ke!&(&1(x@}U!{#pT@Xz=`#KO%Or zr`;Xa8)S>lo%;vV{M_7iuM&+>+%nWHHI4((aSuG(!;xC8i z=&Q|?Z8_~5M=pB{yJ)-{ z%L;c;n!;2Cohr>U1S4t_h@45(E*`&eJ5;-!ou(VeMi#z&>7{Y>qC%F&06%Tq#R!HT zj$JH1W}=fW#T_E%sFND3y#i>v-dXxI&FrP0kdt72HB-ie1~-D7mr^5Qwj(IYTT&Eu zRGlLwEK&qfJfXU`RrV$S$A3$?s2epKQpEVnJFZymTDl(bC1!ZCB$^}P-MD_5tuI%l zLuz1fgtd=OkWaE_IkXF9w7kvNuo)Kk&+k+;VMsw|+er$9@QN`miedsZRfc%S!!fHy3{k z+;i#vYqv@hcb%*{FWtLPNL!!%9@P-@_?&atKR`M;Cse}l#uM{>1JVRQ-+x6#(*A`| z`xnpaw%XD!XygL5+N z2~my$8oEprV zH3zMo6yj-+9J;@|wW(ZHjZ{7u=U6*e@%uz-I|Gu-K&|w5UBiG9cRId3ORhLJ=b!2I zX|=}!)T-+@K9YfPi?8hTcx70?0S4VzlRUN=7=_xTw*cTFW>IIz;OvK+#${)2o+1m1 zEgUwnX%3i?zl=F^(;q^9+K~B=rWZp_n8YcQMPSJQ4kPrHmGRJUk@wJWmFFcxX3CmO zm{sGV?;et(gN(fsb+`3=u++Z+a}1Z=nVvpH&F$Xg>#nw@*thQt3_>ean-RJn7C6~} zkvn$mun$Z~($<3(8B8mn7gRC`&N=m&xi@wgbSRWed=#NelP5($2zMT!w`fe~yamp& zH;FScqiZk|QG%hXVd99m$b?^K3|-)|cy(9#`f8%Q%-1tzS(C2=FaG|9za`UAY)~CT zKItJNKO9=l)Kl4hE497}D+5c~2V?`IiU;+S8{Os}<0`Q1nxgKc$PQHa)BQj{iE7^D z1Fxp@C9Db_xdV)*Oh@kseOu#6yutY#gk{?zSvFJ6p0D)7j&2Lj>KFVWWVG|# z4Z(;0$W|p6;*hi2xz(GR9%m zP_wE}m7S+wxwr#;HHY&^sS>P2q4cyD{)bcoU`bd$K~qU|_n{z0{JI;`VZ@WTS_SsK zsqr44S)78>e2tUs5^lGamKUZ^r77&Kim*8X?!qB%TDzhp(=leI2P*DKk+{!+z*L z_be$5lGp8y0;}fSRlrh+F?Z1E7}l4*Xv#z0tj^29SQ4B96&Vv`x8r)P;c zJMFQ)_V|yRQerd0h?q$Mta9ty^m3W4H{5)J?ucb1vlD$RmLfSfQ;l1r9yd4@B%BkB z&$bkvTeOo)p=L;-?llGVH37n?dNKXB#(P)M!to;iO&uvc)^uT827q?C+}1ERpBq3O znG(-j}_`UrCE%fxmyx`f<_i*N@_|4k_gS3d+4s8NX z-3R`q5zrs5WmuqglCdd=CLHbhE4MsPm2(=(&US$$CmIRr^r-tCsd z5zJkR{kpO<{0>;mm@8}kpjv7&f>P3jsh1ZL5qd7x$|BjFi$MC>e@{#Nq8~&{xw!eU zF7EKEBLceLdCl*KdN6>>^}3~E_{0-`_{2h49A=wRx~G=IG52WdXd#pOI_aN{y{_p) zfVjU6IoW<#P`Qp36zhzaeJiXy=Cg>D;#{C=>d@$rs_31pY~ht?D)g6sDbP#+>O)>Y z>&KHurw`#P+naB4y#HqJ(NywJDNt497n(CVfxX1cbVw+KpU)0z%AA)$8^{;vaf0&) zZs+1%MN>^3>r1%nKGiJ78Gv6gXAW6Pw#**IW?;c@Y z9v~kqTRmyzQ@i4M-_#i05cXu{1AK7kmsR)dp}to?5nn&l_m4p7Y;Ec5^U31dL=Y6tE;Eud@#mh6c;c{T24P+W*(@SPCgB6GmzfZcwZ zgZmF0xnz69(azq{_MnB0jiZfC@TnBT|Fs|rcJo#!;{RT-ZTpc*>84Jf^gdDJ7MS`U D(1YVr diff --git a/packages/components/nodes/vectorstores/Zep/Zep.ts b/packages/components/nodes/vectorstores/Zep/Zep.ts new file mode 100644 index 00000000000..ce863a9ee5b --- /dev/null +++ b/packages/components/nodes/vectorstores/Zep/Zep.ts @@ -0,0 +1,281 @@ +import { flatten } from 'lodash' +import { IDocument, ZepClient } from '@getzep/zep-js' +import { ZepVectorStore, IZepConfig } from 'langchain/vectorstores/zep' +import { Embeddings } from 'langchain/embeddings/base' +import { Document } from 'langchain/document' +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +class Zep_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'Zep' + this.name = 'zep' + this.version = 1.0 + this.type = 'Zep' + this.icon = 'zep.png' + this.category = 'Vector Stores' + this.description = 'Upsert or Load data to Zep Vector Database' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'NEW' + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + optional: true, + description: 'Configure JWT authentication on your Zep instance (Optional)', + credentialNames: ['zepMemoryApi'] + } + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true, + optional: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Base URL', + name: 'baseURL', + type: 'string', + default: 'http://127.0.0.1:8000' + }, + { + label: 'Zep Collection', + name: 'zepCollection', + type: 'string', + placeholder: 'my-first-collection' + }, + { + label: 'Zep Metadata Filter', + name: 'zepMetadataFilter', + type: 'json', + optional: true, + additionalParams: true + }, + { + label: 'Embedding Dimension', + name: 'dimension', + type: 'number', + default: 1536, + additionalParams: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'Zep Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'Zep Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(ZepVectorStore)] + } + ] + } + + //@ts-ignore + vectorStoreMethods = { + async upsert(nodeData: INodeData, options: ICommonObject): Promise { + const baseURL = nodeData.inputs?.baseURL as string + const zepCollection = nodeData.inputs?.zepCollection as string + const dimension = (nodeData.inputs?.dimension as number) ?? 1536 + const docs = nodeData.inputs?.document as Document[] + const embeddings = nodeData.inputs?.embeddings as Embeddings + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const apiKey = getCredentialParam('apiKey', credentialData, nodeData) + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + finalDocs.push(new Document(flattenDocs[i])) + } + } + + const zepConfig: IZepConfig = { + apiUrl: baseURL, + collectionName: zepCollection, + embeddingDimensions: dimension, + isAutoEmbedded: false + } + if (apiKey) zepConfig.apiKey = apiKey + + try { + await ZepVectorStore.fromDocuments(finalDocs, embeddings, zepConfig) + } catch (e) { + throw new Error(e) + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const baseURL = nodeData.inputs?.baseURL as string + const zepCollection = nodeData.inputs?.zepCollection as string + const zepMetadataFilter = nodeData.inputs?.zepMetadataFilter + const dimension = nodeData.inputs?.dimension as number + const embeddings = nodeData.inputs?.embeddings as Embeddings + const output = nodeData.outputs?.output as string + const topK = nodeData.inputs?.topK as string + const k = topK ? parseFloat(topK) : 4 + + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const apiKey = getCredentialParam('apiKey', credentialData, nodeData) + + const zepConfig: IZepConfig & Partial = { + apiUrl: baseURL, + collectionName: zepCollection, + embeddingDimensions: dimension, + isAutoEmbedded: false + } + if (apiKey) zepConfig.apiKey = apiKey + if (zepMetadataFilter) { + const metadatafilter = typeof zepMetadataFilter === 'object' ? zepMetadataFilter : JSON.parse(zepMetadataFilter) + zepConfig.filter = metadatafilter + } + + const vectorStore = await ZepExistingVS.fromExistingIndex(embeddings, zepConfig) + + if (output === 'retriever') { + const retriever = vectorStore.asRetriever(k) + return retriever + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +interface ZepFilter { + filter: Record +} + +function zepDocsToDocumentsAndScore(results: IDocument[]): [Document, number][] { + return results.map((d) => [ + new Document({ + pageContent: d.content, + metadata: d.metadata + }), + d.score ? d.score : 0 + ]) +} + +function assignMetadata(value: string | Record | object | undefined): Record | undefined { + if (typeof value === 'object' && value !== null) { + return value as Record + } + if (value !== undefined) { + console.warn('Metadata filters must be an object, Record, or undefined.') + } + return undefined +} + +class ZepExistingVS extends ZepVectorStore { + filter?: Record + args?: IZepConfig & Partial + + constructor(embeddings: Embeddings, args: IZepConfig & Partial) { + super(embeddings, args) + this.filter = args.filter + this.args = args + } + + async initalizeCollection(args: IZepConfig & Partial) { + this.client = await ZepClient.init(args.apiUrl, args.apiKey) + try { + this.collection = await this.client.document.getCollection(args.collectionName) + } catch (err) { + if (err instanceof Error) { + if (err.name === 'NotFoundError') { + await this.createNewCollection(args) + } else { + throw err + } + } + } + } + + async createNewCollection(args: IZepConfig & Partial) { + if (!args.embeddingDimensions) { + throw new Error( + `Collection ${args.collectionName} not found. You can create a new Collection by providing embeddingDimensions.` + ) + } + + this.collection = await this.client.document.addCollection({ + name: args.collectionName, + description: args.description, + metadata: args.metadata, + embeddingDimensions: args.embeddingDimensions, + isAutoEmbedded: false + }) + } + + async similaritySearchVectorWithScore( + query: number[], + k: number, + filter?: Record | undefined + ): Promise<[Document, number][]> { + if (filter && this.filter) { + throw new Error('cannot provide both `filter` and `this.filter`') + } + const _filters = filter ?? this.filter + const ANDFilters = [] + for (const filterKey in _filters) { + let filterVal = _filters[filterKey] + if (typeof filterVal === 'string') filterVal = `"${filterVal}"` + ANDFilters.push({ jsonpath: `$[*] ? (@.${filterKey} == ${filterVal})` }) + } + const newfilter = { + where: { and: ANDFilters } + } + await this.initalizeCollection(this.args!).catch((err) => { + console.error('Error initializing collection:', err) + throw err + }) + const results = await this.collection.search( + { + embedding: new Float32Array(query), + metadata: assignMetadata(newfilter) + }, + k + ) + return zepDocsToDocumentsAndScore(results) + } + + static async fromExistingIndex(embeddings: Embeddings, dbConfig: IZepConfig & Partial): Promise { + const instance = new this(embeddings, dbConfig) + return instance + } +} + +module.exports = { nodeClass: Zep_VectorStores } diff --git a/packages/components/nodes/vectorstores/Zep/Zep_Existing.ts b/packages/components/nodes/vectorstores/Zep/Zep_Existing.ts index 9be10c19851..5e9d7e1fbed 100644 --- a/packages/components/nodes/vectorstores/Zep/Zep_Existing.ts +++ b/packages/components/nodes/vectorstores/Zep/Zep_Existing.ts @@ -13,6 +13,7 @@ class Zep_Existing_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -27,6 +28,7 @@ class Zep_Existing_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Load existing index from Zep (i.e: Document has been upserted)' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/Zep/Zep_Upsert.ts b/packages/components/nodes/vectorstores/Zep/Zep_Upsert.ts index 915513b9ff9..40b08cbd4f3 100644 --- a/packages/components/nodes/vectorstores/Zep/Zep_Upsert.ts +++ b/packages/components/nodes/vectorstores/Zep/Zep_Upsert.ts @@ -13,6 +13,7 @@ class Zep_Upsert_VectorStores implements INode { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -27,6 +28,7 @@ class Zep_Upsert_VectorStores implements INode { this.category = 'Vector Stores' this.description = 'Upsert documents to Zep' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/src/Interface.ts b/packages/components/src/Interface.ts index 15b98770163..63ebdee159e 100644 --- a/packages/components/src/Interface.ts +++ b/packages/components/src/Interface.ts @@ -92,6 +92,7 @@ export interface INodeProperties { baseClasses: string[] description?: string filePath?: string + badge?: string } export interface INode extends INodeProperties { @@ -100,6 +101,11 @@ export interface INode extends INodeProperties { loadMethods?: { [key: string]: (nodeData: INodeData, options?: ICommonObject) => Promise } + vectorStoreMethods?: { + upsert: (nodeData: INodeData, options?: ICommonObject) => Promise + search: (nodeData: INodeData, options?: ICommonObject) => Promise + delete: (nodeData: INodeData, options?: ICommonObject) => Promise + } init?(nodeData: INodeData, input: string, options?: ICommonObject): Promise run?(nodeData: INodeData, input: string, options?: ICommonObject): Promise clearSessionMemory?(nodeData: INodeData, options?: ICommonObject): Promise diff --git a/packages/server/marketplaces/chatflows/API Agent OpenAI.json b/packages/server/marketplaces/chatflows/API Agent OpenAI.json index b2ff977be13..5498b4f3669 100644 --- a/packages/server/marketplaces/chatflows/API Agent OpenAI.json +++ b/packages/server/marketplaces/chatflows/API Agent OpenAI.json @@ -199,6 +199,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_1-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-baseOptions-json" } ], "inputAnchors": [ @@ -218,7 +226,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { @@ -510,6 +519,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_2-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-baseOptions-json" } ], "inputAnchors": [ @@ -529,7 +546,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { diff --git a/packages/server/marketplaces/chatflows/API Agent.json b/packages/server/marketplaces/chatflows/API Agent.json index 26c9b340b9c..d8fa22ad15a 100644 --- a/packages/server/marketplaces/chatflows/API Agent.json +++ b/packages/server/marketplaces/chatflows/API Agent.json @@ -507,6 +507,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_2-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-baseOptions-json" } ], "inputAnchors": [ @@ -526,7 +534,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { @@ -669,6 +678,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_1-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-baseOptions-json" } ], "inputAnchors": [ @@ -688,7 +705,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { @@ -831,6 +849,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_3-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_3-input-baseOptions-json" } ], "inputAnchors": [ @@ -850,7 +876,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { diff --git a/packages/server/marketplaces/chatflows/Antonym.json b/packages/server/marketplaces/chatflows/Antonym.json index 059ab6c11a3..85cd5e4c90a 100644 --- a/packages/server/marketplaces/chatflows/Antonym.json +++ b/packages/server/marketplaces/chatflows/Antonym.json @@ -286,6 +286,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" } ], "inputAnchors": [ @@ -305,7 +313,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { diff --git a/packages/server/marketplaces/chatflows/AutoGPT.json b/packages/server/marketplaces/chatflows/AutoGPT.json index 4fb706ab1cd..5f388da6123 100644 --- a/packages/server/marketplaces/chatflows/AutoGPT.json +++ b/packages/server/marketplaces/chatflows/AutoGPT.json @@ -13,8 +13,8 @@ "data": { "id": "autoGPT_0", "label": "AutoGPT", - "name": "autoGPT", "version": 1, + "name": "autoGPT", "type": "AutoGPT", "baseClasses": ["AutoGPT"], "category": "Agents", @@ -69,7 +69,7 @@ "inputs": { "tools": ["{{readFile_0.data.instance}}", "{{writeFile_1.data.instance}}", "{{serpAPI_0.data.instance}}"], "model": "{{chatOpenAI_0.data.instance}}", - "vectorStoreRetriever": "{{pineconeExistingIndex_0.data.instance}}", + "vectorStoreRetriever": "{{pinecone_0.data.instance}}", "aiName": "", "aiRole": "", "maxLoop": 5 @@ -104,8 +104,8 @@ "data": { "id": "writeFile_1", "label": "Write File", - "name": "writeFile", "version": 1, + "name": "writeFile", "type": "WriteFile", "baseClasses": ["WriteFile", "Tool", "StructuredTool", "BaseLangChain"], "category": "Tools", @@ -154,8 +154,8 @@ "data": { "id": "readFile_0", "label": "Read File", - "name": "readFile", "version": 1, + "name": "readFile", "type": "ReadFile", "baseClasses": ["ReadFile", "Tool", "StructuredTool", "BaseLangChain"], "category": "Tools", @@ -204,8 +204,8 @@ "data": { "id": "serpAPI_0", "label": "Serp API", - "name": "serpAPI", "version": 1, + "name": "serpAPI", "type": "SerpAPI", "baseClasses": ["SerpAPI", "Tool", "StructuredTool"], "category": "Tools", @@ -241,7 +241,7 @@ }, { "width": 300, - "height": 523, + "height": 574, "id": "chatOpenAI_0", "position": { "x": 176.69787776192283, @@ -251,8 +251,8 @@ "data": { "id": "chatOpenAI_0", "label": "ChatOpenAI", - "name": "chatOpenAI", "version": 2, + "name": "chatOpenAI", "type": "ChatOpenAI", "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], "category": "Chat Models", @@ -362,6 +362,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" } ], "inputAnchors": [ @@ -381,7 +389,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { @@ -413,8 +422,8 @@ "data": { "id": "openAIEmbeddings_0", "label": "OpenAI Embeddings", - "name": "openAIEmbeddings", "version": 1, + "name": "openAIEmbeddings", "type": "OpenAIEmbeddings", "baseClasses": ["OpenAIEmbeddings", "Embeddings"], "category": "Embeddings", @@ -487,35 +496,35 @@ }, { "width": 300, - "height": 505, - "id": "pineconeExistingIndex_0", + "height": 555, + "id": "pinecone_0", "position": { - "x": 1001.3784758268554, - "y": 415.24072209485803 + "x": 1061.413729190394, + "y": 387.9611693492896 }, "type": "customNode", "data": { - "id": "pineconeExistingIndex_0", - "label": "Pinecone Load Existing Index", - "name": "pineconeExistingIndex", + "id": "pinecone_0", + "label": "Pinecone", "version": 1, + "name": "pinecone", "type": "Pinecone", "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Load existing index from Pinecone (i.e: Document has been upserted)", + "description": "Upsert or Load data to Pinecone Vector Database", "inputParams": [ { "label": "Connect Credential", "name": "credential", "type": "credential", "credentialNames": ["pineconeApi"], - "id": "pineconeExistingIndex_0-input-credential-credential" + "id": "pinecone_0-input-credential-credential" }, { "label": "Pinecone Index", "name": "pineconeIndex", "type": "string", - "id": "pineconeExistingIndex_0-input-pineconeIndex-string" + "id": "pinecone_0-input-pineconeIndex-string" }, { "label": "Pinecone Namespace", @@ -524,7 +533,7 @@ "placeholder": "my-first-namespace", "additionalParams": true, "optional": true, - "id": "pineconeExistingIndex_0-input-pineconeNamespace-string" + "id": "pinecone_0-input-pineconeNamespace-string" }, { "label": "Pinecone Metadata Filter", @@ -532,7 +541,7 @@ "type": "json", "optional": true, "additionalParams": true, - "id": "pineconeExistingIndex_0-input-pineconeMetadataFilter-json" + "id": "pinecone_0-input-pineconeMetadataFilter-json" }, { "label": "Top K", @@ -542,18 +551,27 @@ "type": "number", "additionalParams": true, "optional": true, - "id": "pineconeExistingIndex_0-input-topK-number" + "id": "pinecone_0-input-topK-number" } ], "inputAnchors": [ + { + "label": "Document", + "name": "document", + "type": "Document", + "list": true, + "optional": true, + "id": "pinecone_0-input-document-Document" + }, { "label": "Embeddings", "name": "embeddings", "type": "Embeddings", - "id": "pineconeExistingIndex_0-input-embeddings-Embeddings" + "id": "pinecone_0-input-embeddings-Embeddings" } ], "inputs": { + "document": "", "embeddings": "{{openAIEmbeddings_0.data.instance}}", "pineconeIndex": "", "pineconeNamespace": "", @@ -567,13 +585,13 @@ "type": "options", "options": [ { - "id": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "id": "pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", "name": "retriever", "label": "Pinecone Retriever", "type": "Pinecone | VectorStoreRetriever | BaseRetriever" }, { - "id": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore", + "id": "pinecone_0-output-vectorStore-Pinecone|VectorStore", "name": "vectorStore", "label": "Pinecone Vector Store", "type": "Pinecone | VectorStore" @@ -588,11 +606,11 @@ "selected": false }, "selected": false, - "dragging": false, "positionAbsolute": { - "x": 1001.3784758268554, - "y": 415.24072209485803 - } + "x": 1061.413729190394, + "y": 387.9611693492896 + }, + "dragging": false } ], "edges": [ @@ -619,45 +637,45 @@ } }, { - "source": "pineconeExistingIndex_0", - "sourceHandle": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", "target": "autoGPT_0", - "targetHandle": "autoGPT_0-input-vectorStoreRetriever-BaseRetriever", + "targetHandle": "autoGPT_0-input-model-BaseChatModel", "type": "buttonedge", - "id": "pineconeExistingIndex_0-pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-autoGPT_0-autoGPT_0-input-vectorStoreRetriever-BaseRetriever", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-autoGPT_0-autoGPT_0-input-model-BaseChatModel", "data": { "label": "" } }, { - "source": "openAIEmbeddings_0", - "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "pineconeExistingIndex_0", - "targetHandle": "pineconeExistingIndex_0-input-embeddings-Embeddings", + "source": "serpAPI_0", + "sourceHandle": "serpAPI_0-output-serpAPI-SerpAPI|Tool|StructuredTool", + "target": "autoGPT_0", + "targetHandle": "autoGPT_0-input-tools-Tool", "type": "buttonedge", - "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeExistingIndex_0-pineconeExistingIndex_0-input-embeddings-Embeddings", + "id": "serpAPI_0-serpAPI_0-output-serpAPI-SerpAPI|Tool|StructuredTool-autoGPT_0-autoGPT_0-input-tools-Tool", "data": { "label": "" } }, { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "target": "autoGPT_0", - "targetHandle": "autoGPT_0-input-model-BaseChatModel", + "source": "openAIEmbeddings_0", + "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "pinecone_0", + "targetHandle": "pinecone_0-input-embeddings-Embeddings", "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-autoGPT_0-autoGPT_0-input-model-BaseChatModel", + "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pinecone_0-pinecone_0-input-embeddings-Embeddings", "data": { "label": "" } }, { - "source": "serpAPI_0", - "sourceHandle": "serpAPI_0-output-serpAPI-SerpAPI|Tool|StructuredTool", + "source": "pinecone_0", + "sourceHandle": "pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", "target": "autoGPT_0", - "targetHandle": "autoGPT_0-input-tools-Tool", + "targetHandle": "autoGPT_0-input-vectorStoreRetriever-BaseRetriever", "type": "buttonedge", - "id": "serpAPI_0-serpAPI_0-output-serpAPI-SerpAPI|Tool|StructuredTool-autoGPT_0-autoGPT_0-input-tools-Tool", + "id": "pinecone_0-pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-autoGPT_0-autoGPT_0-input-vectorStoreRetriever-BaseRetriever", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/BabyAGI.json b/packages/server/marketplaces/chatflows/BabyAGI.json index 04410b82f2e..211e9e42c5c 100644 --- a/packages/server/marketplaces/chatflows/BabyAGI.json +++ b/packages/server/marketplaces/chatflows/BabyAGI.json @@ -13,8 +13,8 @@ "data": { "id": "babyAGI_1", "label": "BabyAGI", - "name": "babyAGI", "version": 1, + "name": "babyAGI", "type": "BabyAGI", "baseClasses": ["BabyAGI"], "category": "Agents", @@ -44,7 +44,7 @@ ], "inputs": { "model": "{{chatOpenAI_0.data.instance}}", - "vectorStore": "{{pineconeExistingIndex_0.data.instance}}", + "vectorStore": "{{pinecone_0.data.instance}}", "taskLoop": 3 }, "outputAnchors": [ @@ -65,168 +65,6 @@ "y": 66.00028106865324 } }, - { - "width": 300, - "height": 523, - "id": "chatOpenAI_0", - "position": { - "x": 587.1798180512677, - "y": -355.9845878719703 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_0", - "label": "ChatOpenAI", - "name": "chatOpenAI", - "version": 2, - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "options", - "options": [ - { - "label": "gpt-4", - "name": "gpt-4" - }, - { - "label": "gpt-4-0613", - "name": "gpt-4-0613" - }, - { - "label": "gpt-4-32k", - "name": "gpt-4-32k" - }, - { - "label": "gpt-4-32k-0613", - "name": "gpt-4-32k-0613" - }, - { - "label": "gpt-3.5-turbo", - "name": "gpt-3.5-turbo" - }, - { - "label": "gpt-3.5-turbo-0613", - "name": "gpt-3.5-turbo-0613" - }, - { - "label": "gpt-3.5-turbo-16k", - "name": "gpt-3.5-turbo-16k" - }, - { - "label": "gpt-3.5-turbo-16k-0613", - "name": "gpt-3.5-turbo-16k-0613" - } - ], - "default": "gpt-3.5-turbo", - "optional": true, - "id": "chatOpenAI_0-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "default": 0.9, - "optional": true, - "id": "chatOpenAI_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-basepath-string" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_0-input-cache-BaseCache" - } - ], - "inputs": { - "modelName": "gpt-3.5-turbo", - "temperature": 0.9, - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 587.1798180512677, - "y": -355.9845878719703 - }, - "dragging": false - }, { "width": 300, "height": 329, @@ -239,8 +77,8 @@ "data": { "id": "openAIEmbeddings_0", "label": "OpenAI Embeddings", - "name": "openAIEmbeddings", "version": 1, + "name": "openAIEmbeddings", "type": "OpenAIEmbeddings", "baseClasses": ["OpenAIEmbeddings", "Embeddings"], "category": "Embeddings", @@ -313,35 +151,35 @@ }, { "width": 300, - "height": 505, - "id": "pineconeExistingIndex_0", + "height": 555, + "id": "pinecone_0", "position": { - "x": 241.78764591331816, - "y": -38.438460915613945 + "x": 238.1350223788262, + "y": -133.38073692212225 }, "type": "customNode", "data": { - "id": "pineconeExistingIndex_0", - "label": "Pinecone Load Existing Index", - "name": "pineconeExistingIndex", + "id": "pinecone_0", + "label": "Pinecone", "version": 1, + "name": "pinecone", "type": "Pinecone", "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Load existing index from Pinecone (i.e: Document has been upserted)", + "description": "Upsert or Load data to Pinecone Vector Database", "inputParams": [ { "label": "Connect Credential", "name": "credential", "type": "credential", "credentialNames": ["pineconeApi"], - "id": "pineconeExistingIndex_0-input-credential-credential" + "id": "pinecone_0-input-credential-credential" }, { "label": "Pinecone Index", "name": "pineconeIndex", "type": "string", - "id": "pineconeExistingIndex_0-input-pineconeIndex-string" + "id": "pinecone_0-input-pineconeIndex-string" }, { "label": "Pinecone Namespace", @@ -350,7 +188,7 @@ "placeholder": "my-first-namespace", "additionalParams": true, "optional": true, - "id": "pineconeExistingIndex_0-input-pineconeNamespace-string" + "id": "pinecone_0-input-pineconeNamespace-string" }, { "label": "Pinecone Metadata Filter", @@ -358,7 +196,7 @@ "type": "json", "optional": true, "additionalParams": true, - "id": "pineconeExistingIndex_0-input-pineconeMetadataFilter-json" + "id": "pinecone_0-input-pineconeMetadataFilter-json" }, { "label": "Top K", @@ -368,20 +206,29 @@ "type": "number", "additionalParams": true, "optional": true, - "id": "pineconeExistingIndex_0-input-topK-number" + "id": "pinecone_0-input-topK-number" } ], "inputAnchors": [ + { + "label": "Document", + "name": "document", + "type": "Document", + "list": true, + "optional": true, + "id": "pinecone_0-input-document-Document" + }, { "label": "Embeddings", "name": "embeddings", "type": "Embeddings", - "id": "pineconeExistingIndex_0-input-embeddings-Embeddings" + "id": "pinecone_0-input-embeddings-Embeddings" } ], "inputs": { + "document": "", "embeddings": "{{openAIEmbeddings_0.data.instance}}", - "pineconeIndex": "newindex", + "pineconeIndex": "", "pineconeNamespace": "", "pineconeMetadataFilter": "", "topK": "" @@ -393,13 +240,13 @@ "type": "options", "options": [ { - "id": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "id": "pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", "name": "retriever", "label": "Pinecone Retriever", "type": "Pinecone | VectorStoreRetriever | BaseRetriever" }, { - "id": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore", + "id": "pinecone_0-output-vectorStore-Pinecone|VectorStore", "name": "vectorStore", "label": "Pinecone Vector Store", "type": "Pinecone | VectorStore" @@ -415,42 +262,232 @@ }, "selected": false, "positionAbsolute": { - "x": 241.78764591331816, - "y": -38.438460915613945 + "x": 238.1350223788262, + "y": -133.38073692212225 + }, + "dragging": false + }, + { + "width": 300, + "height": 574, + "id": "chatOpenAI_0", + "position": { + "x": 600.5963052289515, + "y": -359.24280496678995 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "version": 2, + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["openAIApi"], + "id": "chatOpenAI_0-input-credential-credential" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, + { + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" + }, + { + "label": "gpt-4-0613", + "name": "gpt-4-0613" + }, + { + "label": "gpt-4-32k", + "name": "gpt-4-32k" + }, + { + "label": "gpt-4-32k-0613", + "name": "gpt-4-32k-0613" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, + { + "label": "gpt-3.5-turbo-0613", + "name": "gpt-3.5-turbo-0613" + }, + { + "label": "gpt-3.5-turbo-16k", + "name": "gpt-3.5-turbo-16k" + }, + { + "label": "gpt-3.5-turbo-16k-0613", + "name": "gpt-3.5-turbo-16k-0613" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "step": 0.1, + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" + } + ], + "inputAnchors": [ + { + "label": "Cache", + "name": "cache", + "type": "BaseCache", + "optional": true, + "id": "chatOpenAI_0-input-cache-BaseCache" + } + ], + "inputs": { + "cache": "", + "modelName": "gpt-3.5-turbo", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "", + "basepath": "", + "baseOptions": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 600.5963052289515, + "y": -359.24280496678995 }, "dragging": false } ], "edges": [ { - "source": "pineconeExistingIndex_0", - "sourceHandle": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore", - "target": "babyAGI_1", - "targetHandle": "babyAGI_1-input-vectorStore-VectorStore", + "source": "openAIEmbeddings_0", + "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "pinecone_0", + "targetHandle": "pinecone_0-input-embeddings-Embeddings", "type": "buttonedge", - "id": "pineconeExistingIndex_0-pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore-babyAGI_1-babyAGI_1-input-vectorStore-VectorStore", + "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pinecone_0-pinecone_0-input-embeddings-Embeddings", "data": { "label": "" } }, { "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", "target": "babyAGI_1", "targetHandle": "babyAGI_1-input-model-BaseChatModel", "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-babyAGI_1-babyAGI_1-input-model-BaseChatModel", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-babyAGI_1-babyAGI_1-input-model-BaseChatModel", "data": { "label": "" } }, { - "source": "openAIEmbeddings_0", - "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "pineconeExistingIndex_0", - "targetHandle": "pineconeExistingIndex_0-input-embeddings-Embeddings", + "source": "pinecone_0", + "sourceHandle": "pinecone_0-output-vectorStore-Pinecone|VectorStore", + "target": "babyAGI_1", + "targetHandle": "babyAGI_1-input-vectorStore-VectorStore", "type": "buttonedge", - "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeExistingIndex_0-pineconeExistingIndex_0-input-embeddings-Embeddings", + "id": "pinecone_0-pinecone_0-output-vectorStore-Pinecone|VectorStore-babyAGI_1-babyAGI_1-input-vectorStore-VectorStore", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/CSV Agent.json b/packages/server/marketplaces/chatflows/CSV Agent.json index 37764a53f63..61d97c4ddce 100644 --- a/packages/server/marketplaces/chatflows/CSV Agent.json +++ b/packages/server/marketplaces/chatflows/CSV Agent.json @@ -180,6 +180,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" } ], "inputAnchors": [ @@ -199,7 +207,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { diff --git a/packages/server/marketplaces/chatflows/ChatGPTPlugin.json b/packages/server/marketplaces/chatflows/ChatGPTPlugin.json index 1f00ff5f8a6..b8e2cf01cd7 100644 --- a/packages/server/marketplaces/chatflows/ChatGPTPlugin.json +++ b/packages/server/marketplaces/chatflows/ChatGPTPlugin.json @@ -325,6 +325,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" } ], "inputAnchors": [ @@ -344,7 +352,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { diff --git a/packages/server/marketplaces/chatflows/Conversational Agent.json b/packages/server/marketplaces/chatflows/Conversational Agent.json index d18f2ac068e..2232ade06b9 100644 --- a/packages/server/marketplaces/chatflows/Conversational Agent.json +++ b/packages/server/marketplaces/chatflows/Conversational Agent.json @@ -267,6 +267,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" } ], "inputAnchors": [ @@ -286,7 +294,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { diff --git a/packages/server/marketplaces/chatflows/Conversational Retrieval Agent.json b/packages/server/marketplaces/chatflows/Conversational Retrieval Agent.json index ede7215d22e..dd4cf3b16ea 100644 --- a/packages/server/marketplaces/chatflows/Conversational Retrieval Agent.json +++ b/packages/server/marketplaces/chatflows/Conversational Retrieval Agent.json @@ -2,168 +2,6 @@ "description": "Agent optimized for vector retrieval during conversation and answering questions based on previous dialogue.", "badge": "POPULAR", "nodes": [ - { - "width": 300, - "height": 523, - "id": "chatOpenAI_0", - "position": { - "x": 1381.867549919116, - "y": 212.76900895393834 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_0", - "label": "ChatOpenAI", - "version": 2, - "name": "chatOpenAI", - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "options", - "options": [ - { - "label": "gpt-4", - "name": "gpt-4" - }, - { - "label": "gpt-4-0613", - "name": "gpt-4-0613" - }, - { - "label": "gpt-4-32k", - "name": "gpt-4-32k" - }, - { - "label": "gpt-4-32k-0613", - "name": "gpt-4-32k-0613" - }, - { - "label": "gpt-3.5-turbo", - "name": "gpt-3.5-turbo" - }, - { - "label": "gpt-3.5-turbo-0613", - "name": "gpt-3.5-turbo-0613" - }, - { - "label": "gpt-3.5-turbo-16k", - "name": "gpt-3.5-turbo-16k" - }, - { - "label": "gpt-3.5-turbo-16k-0613", - "name": "gpt-3.5-turbo-16k-0613" - } - ], - "default": "gpt-3.5-turbo", - "optional": true, - "id": "chatOpenAI_0-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "default": 0.9, - "optional": true, - "id": "chatOpenAI_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-basepath-string" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_0-input-cache-BaseCache" - } - ], - "inputs": { - "modelName": "gpt-3.5-turbo-16k", - "temperature": "0", - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1381.867549919116, - "y": 212.76900895393834 - }, - "dragging": false - }, { "width": 300, "height": 329, @@ -248,115 +86,6 @@ }, "dragging": false }, - { - "width": 300, - "height": 505, - "id": "pineconeExistingIndex_0", - "position": { - "x": 1362.0018461011314, - "y": -334.0373537488481 - }, - "type": "customNode", - "data": { - "id": "pineconeExistingIndex_0", - "label": "Pinecone Load Existing Index", - "version": 1, - "name": "pineconeExistingIndex", - "type": "Pinecone", - "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Load existing index from Pinecone (i.e: Document has been upserted)", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["pineconeApi"], - "id": "pineconeExistingIndex_0-input-credential-credential" - }, - { - "label": "Pinecone Index", - "name": "pineconeIndex", - "type": "string", - "id": "pineconeExistingIndex_0-input-pineconeIndex-string" - }, - { - "label": "Pinecone Namespace", - "name": "pineconeNamespace", - "type": "string", - "placeholder": "my-first-namespace", - "additionalParams": true, - "optional": true, - "id": "pineconeExistingIndex_0-input-pineconeNamespace-string" - }, - { - "label": "Pinecone Metadata Filter", - "name": "pineconeMetadataFilter", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "pineconeExistingIndex_0-input-pineconeMetadataFilter-json" - }, - { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Default to 4", - "placeholder": "4", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "pineconeExistingIndex_0-input-topK-number" - } - ], - "inputAnchors": [ - { - "label": "Embeddings", - "name": "embeddings", - "type": "Embeddings", - "id": "pineconeExistingIndex_0-input-embeddings-Embeddings" - } - ], - "inputs": { - "embeddings": "{{openAIEmbeddings_0.data.instance}}", - "pineconeIndex": "newindex", - "pineconeNamespace": "", - "pineconeMetadataFilter": "", - "topK": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Pinecone Retriever", - "type": "Pinecone | VectorStoreRetriever | BaseRetriever" - }, - { - "id": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore", - "name": "vectorStore", - "label": "Pinecone Vector Store", - "type": "Pinecone | VectorStore" - } - ], - "default": "retriever" - } - ], - "outputs": { - "output": "retriever" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1362.0018461011314, - "y": -334.0373537488481 - }, - "dragging": false - }, { "width": 300, "height": 383, @@ -478,7 +207,7 @@ "inputs": { "name": "search_website", "description": "Searches and return documents regarding Jane - a culinary institution that offers top quality coffee, pastries, breakfast, lunch, and a variety of baked goods. They have multiple locations, including Jane on Fillmore, Jane on Larkin, Jane the Bakery, Toy Boat By Jane, and Little Jane on Grant. They emphasize healthy eating with a focus on flavor and quality ingredients. They bake everything in-house and work with local suppliers to source ingredients directly from farmers. They also offer catering services and delivery options.", - "retriever": "{{pineconeExistingIndex_0.data.instance}}" + "retriever": "{{pinecone_0.data.instance}}" }, "outputAnchors": [ { @@ -554,31 +283,317 @@ "y": 216.94151328212496 }, "dragging": false - } - ], - "edges": [ - { - "source": "openAIEmbeddings_0", - "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "pineconeExistingIndex_0", - "targetHandle": "pineconeExistingIndex_0-input-embeddings-Embeddings", - "type": "buttonedge", - "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeExistingIndex_0-pineconeExistingIndex_0-input-embeddings-Embeddings", - "data": { - "label": "" - } }, { - "source": "pineconeExistingIndex_0", - "sourceHandle": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", - "target": "retrieverTool_0", - "targetHandle": "retrieverTool_0-input-retriever-BaseRetriever", - "type": "buttonedge", - "id": "pineconeExistingIndex_0-pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-retrieverTool_0-retrieverTool_0-input-retriever-BaseRetriever", + "width": 300, + "height": 555, + "id": "pinecone_0", + "position": { + "x": 1376.1277183738853, + "y": -366.1202264629863 + }, + "type": "customNode", "data": { - "label": "" - } + "id": "pinecone_0", + "label": "Pinecone", + "version": 1, + "name": "pinecone", + "type": "Pinecone", + "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Upsert or Load data to Pinecone Vector Database", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["pineconeApi"], + "id": "pinecone_0-input-credential-credential" + }, + { + "label": "Pinecone Index", + "name": "pineconeIndex", + "type": "string", + "id": "pinecone_0-input-pineconeIndex-string" + }, + { + "label": "Pinecone Namespace", + "name": "pineconeNamespace", + "type": "string", + "placeholder": "my-first-namespace", + "additionalParams": true, + "optional": true, + "id": "pinecone_0-input-pineconeNamespace-string" + }, + { + "label": "Pinecone Metadata Filter", + "name": "pineconeMetadataFilter", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "pinecone_0-input-pineconeMetadataFilter-json" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "pinecone_0-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Document", + "name": "document", + "type": "Document", + "list": true, + "optional": true, + "id": "pinecone_0-input-document-Document" + }, + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "pinecone_0-input-embeddings-Embeddings" + } + ], + "inputs": { + "document": "", + "embeddings": "{{openAIEmbeddings_0.data.instance}}", + "pineconeIndex": "", + "pineconeNamespace": "", + "pineconeMetadataFilter": "", + "topK": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Pinecone Retriever", + "type": "Pinecone | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "pinecone_0-output-vectorStore-Pinecone|VectorStore", + "name": "vectorStore", + "label": "Pinecone Vector Store", + "type": "Pinecone | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "retriever" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1376.1277183738853, + "y": -366.1202264629863 + }, + "dragging": false }, + { + "width": 300, + "height": 574, + "id": "chatOpenAI_0", + "position": { + "x": 1379.1468417698134, + "y": 220.8323181063672 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "version": 2, + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["openAIApi"], + "id": "chatOpenAI_0-input-credential-credential" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, + { + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" + }, + { + "label": "gpt-4-0613", + "name": "gpt-4-0613" + }, + { + "label": "gpt-4-32k", + "name": "gpt-4-32k" + }, + { + "label": "gpt-4-32k-0613", + "name": "gpt-4-32k-0613" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, + { + "label": "gpt-3.5-turbo-0613", + "name": "gpt-3.5-turbo-0613" + }, + { + "label": "gpt-3.5-turbo-16k", + "name": "gpt-3.5-turbo-16k" + }, + { + "label": "gpt-3.5-turbo-16k-0613", + "name": "gpt-3.5-turbo-16k-0613" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "step": 0.1, + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" + } + ], + "inputAnchors": [ + { + "label": "Cache", + "name": "cache", + "type": "BaseCache", + "optional": true, + "id": "chatOpenAI_0-input-cache-BaseCache" + } + ], + "inputs": { + "cache": "", + "modelName": "gpt-3.5-turbo-16k", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "", + "basepath": "", + "baseOptions": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1379.1468417698134, + "y": 220.8323181063672 + }, + "dragging": false + } + ], + "edges": [ { "source": "retrieverTool_0", "sourceHandle": "retrieverTool_0-output-retrieverTool-RetrieverTool|DynamicTool|Tool|StructuredTool|Runnable", @@ -591,23 +606,45 @@ } }, { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", + "source": "bufferMemory_0", + "sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", "target": "conversationalRetrievalAgent_0", - "targetHandle": "conversationalRetrievalAgent_0-input-model-ChatOpenAI", + "targetHandle": "conversationalRetrievalAgent_0-input-memory-BaseChatMemory", "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalRetrievalAgent_0-conversationalRetrievalAgent_0-input-model-ChatOpenAI", + "id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-conversationalRetrievalAgent_0-conversationalRetrievalAgent_0-input-memory-BaseChatMemory", "data": { "label": "" } }, { - "source": "bufferMemory_0", - "sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", + "source": "openAIEmbeddings_0", + "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "pinecone_0", + "targetHandle": "pinecone_0-input-embeddings-Embeddings", + "type": "buttonedge", + "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pinecone_0-pinecone_0-input-embeddings-Embeddings", + "data": { + "label": "" + } + }, + { + "source": "pinecone_0", + "sourceHandle": "pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "target": "retrieverTool_0", + "targetHandle": "retrieverTool_0-input-retriever-BaseRetriever", + "type": "buttonedge", + "id": "pinecone_0-pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-retrieverTool_0-retrieverTool_0-input-retriever-BaseRetriever", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", "target": "conversationalRetrievalAgent_0", - "targetHandle": "conversationalRetrievalAgent_0-input-memory-BaseChatMemory", + "targetHandle": "conversationalRetrievalAgent_0-input-model-ChatOpenAI", "type": "buttonedge", - "id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-conversationalRetrievalAgent_0-conversationalRetrievalAgent_0-input-memory-BaseChatMemory", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-conversationalRetrievalAgent_0-conversationalRetrievalAgent_0-input-model-ChatOpenAI", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/Conversational Retrieval QA Chain.json b/packages/server/marketplaces/chatflows/Conversational Retrieval QA Chain.json index ba6c90b76bc..e775846cc2d 100644 --- a/packages/server/marketplaces/chatflows/Conversational Retrieval QA Chain.json +++ b/packages/server/marketplaces/chatflows/Conversational Retrieval QA Chain.json @@ -4,169 +4,7 @@ "nodes": [ { "width": 300, - "height": 522, - "id": "chatOpenAI_0", - "position": { - "x": 1184.1176114500388, - "y": -74.15535835370571 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_0", - "label": "ChatOpenAI", - "name": "chatOpenAI", - "version": 2, - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "options", - "options": [ - { - "label": "gpt-4", - "name": "gpt-4" - }, - { - "label": "gpt-4-0613", - "name": "gpt-4-0613" - }, - { - "label": "gpt-4-32k", - "name": "gpt-4-32k" - }, - { - "label": "gpt-4-32k-0613", - "name": "gpt-4-32k-0613" - }, - { - "label": "gpt-3.5-turbo", - "name": "gpt-3.5-turbo" - }, - { - "label": "gpt-3.5-turbo-0613", - "name": "gpt-3.5-turbo-0613" - }, - { - "label": "gpt-3.5-turbo-16k", - "name": "gpt-3.5-turbo-16k" - }, - { - "label": "gpt-3.5-turbo-16k-0613", - "name": "gpt-3.5-turbo-16k-0613" - } - ], - "default": "gpt-3.5-turbo", - "optional": true, - "id": "chatOpenAI_0-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "default": 0.9, - "optional": true, - "id": "chatOpenAI_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-basepath-string" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_0-input-cache-BaseCache" - } - ], - "inputs": { - "modelName": "gpt-3.5-turbo", - "temperature": "0", - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel" - } - ], - "outputs": {}, - "selected": false - }, - "positionAbsolute": { - "x": 1184.1176114500388, - "y": -74.15535835370571 - }, - "selected": false, - "dragging": false - }, - { - "width": 300, - "height": 328, + "height": 329, "id": "openAIEmbeddings_0", "position": { "x": 795.6162477805387, @@ -176,8 +14,8 @@ "data": { "id": "openAIEmbeddings_0", "label": "OpenAI Embeddings", - "name": "openAIEmbeddings", "version": 1, + "name": "openAIEmbeddings", "type": "OpenAIEmbeddings", "baseClasses": ["OpenAIEmbeddings", "Embeddings"], "category": "Embeddings", @@ -250,115 +88,7 @@ }, { "width": 300, - "height": 554, - "id": "pineconeUpsert_0", - "position": { - "x": 1191.1792786926865, - "y": 514.2126330994578 - }, - "type": "customNode", - "data": { - "id": "pineconeUpsert_0", - "label": "Pinecone Upsert Document", - "name": "pineconeUpsert", - "version": 1, - "type": "Pinecone", - "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Upsert documents to Pinecone", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["pineconeApi"], - "id": "pineconeUpsert_0-input-credential-credential" - }, - { - "label": "Pinecone Index", - "name": "pineconeIndex", - "type": "string", - "id": "pineconeUpsert_0-input-pineconeIndex-string" - }, - { - "label": "Pinecone Namespace", - "name": "pineconeNamespace", - "type": "string", - "placeholder": "my-first-namespace", - "additionalParams": true, - "optional": true, - "id": "pineconeUpsert_0-input-pineconeNamespace-string" - }, - { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Default to 4", - "placeholder": "4", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "pineconeUpsert_0-input-topK-number" - } - ], - "inputAnchors": [ - { - "label": "Document", - "name": "document", - "type": "Document", - "list": true, - "id": "pineconeUpsert_0-input-document-Document" - }, - { - "label": "Embeddings", - "name": "embeddings", - "type": "Embeddings", - "id": "pineconeUpsert_0-input-embeddings-Embeddings" - } - ], - "inputs": { - "document": ["{{textFile_0.data.instance}}"], - "embeddings": "{{openAIEmbeddings_0.data.instance}}", - "pineconeIndex": "", - "pineconeNamespace": "", - "topK": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "pineconeUpsert_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Pinecone Retriever", - "type": "Pinecone | VectorStoreRetriever | BaseRetriever" - }, - { - "id": "pineconeUpsert_0-output-vectorStore-Pinecone|VectorStore", - "name": "vectorStore", - "label": "Pinecone Vector Store", - "type": "Pinecone | VectorStore" - } - ], - "default": "retriever" - } - ], - "outputs": { - "output": "retriever" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1191.1792786926865, - "y": 514.2126330994578 - }, - "dragging": false - }, - { - "width": 300, - "height": 376, + "height": 429, "id": "recursiveCharacterTextSplitter_0", "position": { "x": 406.08456707531263, @@ -368,8 +98,8 @@ "data": { "id": "recursiveCharacterTextSplitter_0", "label": "Recursive Character Text Splitter", - "name": "recursiveCharacterTextSplitter", "version": 2, + "name": "recursiveCharacterTextSplitter", "type": "RecursiveCharacterTextSplitter", "baseClasses": ["RecursiveCharacterTextSplitter", "TextSplitter"], "category": "Text Splitters", @@ -427,7 +157,7 @@ }, { "width": 300, - "height": 410, + "height": 419, "id": "textFile_0", "position": { "x": 786.5497697231324, @@ -437,8 +167,8 @@ "data": { "id": "textFile_0", "label": "Text File", - "name": "textFile", "version": 3, + "name": "textFile", "type": "Document", "baseClasses": ["Document"], "category": "Document Loaders", @@ -509,7 +239,7 @@ }, { "width": 300, - "height": 479, + "height": 480, "id": "conversationalRetrievalQAChain_0", "position": { "x": 1558.6564094656787, @@ -519,8 +249,8 @@ "data": { "id": "conversationalRetrievalQAChain_0", "label": "Conversational Retrieval QA Chain", - "name": "conversationalRetrievalQAChain", "version": 1, + "name": "conversationalRetrievalQAChain", "type": "ConversationalRetrievalQAChain", "baseClasses": ["ConversationalRetrievalQAChain", "BaseChain"], "category": "Chains", @@ -593,7 +323,7 @@ ], "inputs": { "model": "{{chatOpenAI_0.data.instance}}", - "vectorStoreRetriever": "{{pineconeUpsert_0.data.instance}}", + "vectorStoreRetriever": "{{pinecone_0.data.instance}}", "memory": "", "returnSourceDocuments": "", "systemMessagePrompt": "", @@ -615,20 +345,317 @@ "y": 386.60217819991124 }, "selected": false - } - ], - "edges": [ + }, { - "source": "openAIEmbeddings_0", - "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "pineconeUpsert_0", - "targetHandle": "pineconeUpsert_0-input-embeddings-Embeddings", - "type": "buttonedge", - "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeUpsert_0-pineconeUpsert_0-input-embeddings-Embeddings", + "width": 300, + "height": 574, + "id": "chatOpenAI_0", + "position": { + "x": 1194.3554779412727, + "y": -46.74877201166788 + }, + "type": "customNode", "data": { - "label": "" - } + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "version": 2, + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["openAIApi"], + "id": "chatOpenAI_0-input-credential-credential" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, + { + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" + }, + { + "label": "gpt-4-0613", + "name": "gpt-4-0613" + }, + { + "label": "gpt-4-32k", + "name": "gpt-4-32k" + }, + { + "label": "gpt-4-32k-0613", + "name": "gpt-4-32k-0613" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, + { + "label": "gpt-3.5-turbo-0613", + "name": "gpt-3.5-turbo-0613" + }, + { + "label": "gpt-3.5-turbo-16k", + "name": "gpt-3.5-turbo-16k" + }, + { + "label": "gpt-3.5-turbo-16k-0613", + "name": "gpt-3.5-turbo-16k-0613" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "step": 0.1, + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" + } + ], + "inputAnchors": [ + { + "label": "Cache", + "name": "cache", + "type": "BaseCache", + "optional": true, + "id": "chatOpenAI_0-input-cache-BaseCache" + } + ], + "inputs": { + "cache": "", + "modelName": "gpt-3.5-turbo-16k", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "", + "basepath": "", + "baseOptions": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1194.3554779412727, + "y": -46.74877201166788 + }, + "dragging": false }, + { + "width": 300, + "height": 555, + "id": "pinecone_0", + "position": { + "x": 1192.4771449209463, + "y": 552.43946147251 + }, + "type": "customNode", + "data": { + "id": "pinecone_0", + "label": "Pinecone", + "version": 1, + "name": "pinecone", + "type": "Pinecone", + "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Upsert or Load data to Pinecone Vector Database", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["pineconeApi"], + "id": "pinecone_0-input-credential-credential" + }, + { + "label": "Pinecone Index", + "name": "pineconeIndex", + "type": "string", + "id": "pinecone_0-input-pineconeIndex-string" + }, + { + "label": "Pinecone Namespace", + "name": "pineconeNamespace", + "type": "string", + "placeholder": "my-first-namespace", + "additionalParams": true, + "optional": true, + "id": "pinecone_0-input-pineconeNamespace-string" + }, + { + "label": "Pinecone Metadata Filter", + "name": "pineconeMetadataFilter", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "pinecone_0-input-pineconeMetadataFilter-json" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "pinecone_0-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Document", + "name": "document", + "type": "Document", + "list": true, + "optional": true, + "id": "pinecone_0-input-document-Document" + }, + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "pinecone_0-input-embeddings-Embeddings" + } + ], + "inputs": { + "document": ["{{textFile_0.data.instance}}"], + "embeddings": "{{openAIEmbeddings_0.data.instance}}", + "pineconeIndex": "", + "pineconeNamespace": "", + "pineconeMetadataFilter": "", + "topK": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Pinecone Retriever", + "type": "Pinecone | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "pinecone_0-output-vectorStore-Pinecone|VectorStore", + "name": "vectorStore", + "label": "Pinecone Vector Store", + "type": "Pinecone | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "retriever" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1192.4771449209463, + "y": 552.43946147251 + }, + "dragging": false + } + ], + "edges": [ { "source": "recursiveCharacterTextSplitter_0", "sourceHandle": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter", @@ -642,33 +669,44 @@ }, { "source": "textFile_0", - "sourceHandle": "textFile_0-output-textFile-Document", - "target": "pineconeUpsert_0", - "targetHandle": "pineconeUpsert_0-input-document-Document", + "sourceHandle": "textFile_0-output-document-Document", + "target": "pinecone_0", + "targetHandle": "pinecone_0-input-document-Document", "type": "buttonedge", - "id": "textFile_0-textFile_0-output-textFile-Document-pineconeUpsert_0-pineconeUpsert_0-input-document-Document", + "id": "textFile_0-textFile_0-output-document-Document-pinecone_0-pinecone_0-input-document-Document", "data": { "label": "" } }, { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "target": "conversationalRetrievalQAChain_0", - "targetHandle": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "source": "openAIEmbeddings_0", + "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "pinecone_0", + "targetHandle": "pinecone_0-input-embeddings-Embeddings", "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pinecone_0-pinecone_0-input-embeddings-Embeddings", "data": { "label": "" } }, { - "source": "pineconeUpsert_0", - "sourceHandle": "pineconeUpsert_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "source": "pinecone_0", + "sourceHandle": "pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", "target": "conversationalRetrievalQAChain_0", "targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", "type": "buttonedge", - "id": "pineconeUpsert_0-pineconeUpsert_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "id": "pinecone_0-pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "target": "conversationalRetrievalQAChain_0", + "targetHandle": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/Flowise Docs QnA.json b/packages/server/marketplaces/chatflows/Flowise Docs QnA.json index 31a7df07b33..ac84cf56490 100644 --- a/packages/server/marketplaces/chatflows/Flowise Docs QnA.json +++ b/packages/server/marketplaces/chatflows/Flowise Docs QnA.json @@ -497,6 +497,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" } ], "inputAnchors": [ @@ -516,7 +524,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { diff --git a/packages/server/marketplaces/chatflows/Image Generation.json b/packages/server/marketplaces/chatflows/Image Generation.json new file mode 100644 index 00000000000..98d12238f3b --- /dev/null +++ b/packages/server/marketplaces/chatflows/Image Generation.json @@ -0,0 +1,671 @@ +{ + "description": "Generate image using Replicate Stability text-to-image generative AI model", + "badge": "NEW", + "nodes": [ + { + "width": 300, + "height": 475, + "id": "promptTemplate_0", + "position": { + "x": 366.28009688480114, + "y": 183.05394484895152 + }, + "type": "customNode", + "data": { + "id": "promptTemplate_0", + "label": "Prompt Template", + "version": 1, + "name": "promptTemplate", + "type": "PromptTemplate", + "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate"], + "category": "Prompts", + "description": "Schema to represent a basic prompt for an LLM", + "inputParams": [ + { + "label": "Template", + "name": "template", + "type": "string", + "rows": 4, + "placeholder": "What is a good name for a company that makes {product}?", + "id": "promptTemplate_0-input-template-string" + }, + { + "label": "Format Prompt Values", + "name": "promptValues", + "type": "json", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "promptTemplate_0-input-promptValues-json" + } + ], + "inputAnchors": [], + "inputs": { + "template": "{query}", + "promptValues": "{\"query\":\"{{question}}\"}" + }, + "outputAnchors": [ + { + "id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "name": "promptTemplate", + "label": "PromptTemplate", + "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 366.28009688480114, + "y": 183.05394484895152 + }, + "dragging": false + }, + { + "width": 300, + "height": 475, + "id": "promptTemplate_1", + "position": { + "x": 1391.1872909364881, + "y": 274.0360952991433 + }, + "type": "customNode", + "data": { + "id": "promptTemplate_1", + "label": "Prompt Template", + "version": 1, + "name": "promptTemplate", + "type": "PromptTemplate", + "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate", "Runnable"], + "category": "Prompts", + "description": "Schema to represent a basic prompt for an LLM", + "inputParams": [ + { + "label": "Template", + "name": "template", + "type": "string", + "rows": 4, + "placeholder": "What is a good name for a company that makes {product}?", + "id": "promptTemplate_1-input-template-string" + }, + { + "label": "Format Prompt Values", + "name": "promptValues", + "type": "json", + "optional": true, + "acceptVariable": true, + "list": true, + "id": "promptTemplate_1-input-promptValues-json" + } + ], + "inputAnchors": [], + "inputs": { + "template": "Reply with nothing else but the following:\n![]({text})", + "promptValues": "{\"text\":\"{{llmChain_0.data.instance}}\"}" + }, + "outputAnchors": [ + { + "id": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", + "name": "promptTemplate", + "label": "PromptTemplate", + "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1391.1872909364881, + "y": 274.0360952991433 + }, + "dragging": false + }, + { + "width": 300, + "height": 577, + "id": "replicate_0", + "position": { + "x": 700.5657822436667, + "y": -192.57827891379952 + }, + "type": "customNode", + "data": { + "id": "replicate_0", + "label": "Replicate", + "version": 2, + "name": "replicate", + "type": "Replicate", + "baseClasses": ["Replicate", "BaseChatModel", "LLM", "BaseLLM", "BaseLanguageModel", "Runnable"], + "category": "LLMs", + "description": "Use Replicate to run open source models on cloud", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["replicateApi"], + "id": "replicate_0-input-credential-credential" + }, + { + "label": "Model", + "name": "model", + "type": "string", + "placeholder": "a16z-infra/llama13b-v2-chat:df7690f1994d94e96ad9d568eac121aecf50684a0b0963b25a41cc40061269e5", + "optional": true, + "id": "replicate_0-input-model-string" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "step": 0.1, + "description": "Adjusts randomness of outputs, greater than 1 is random and 0 is deterministic, 0.75 is a good starting value.", + "default": 0.7, + "optional": true, + "id": "replicate_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "step": 1, + "description": "Maximum number of tokens to generate. A word is generally 2-3 tokens", + "optional": true, + "additionalParams": true, + "id": "replicate_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "step": 0.1, + "description": "When decoding text, samples from the top p percentage of most likely tokens; lower to ignore less likely tokens", + "optional": true, + "additionalParams": true, + "id": "replicate_0-input-topP-number" + }, + { + "label": "Repetition Penalty", + "name": "repetitionPenalty", + "type": "number", + "step": 0.1, + "description": "Penalty for repeated words in generated text; 1 is no penalty, values greater than 1 discourage repetition, less than 1 encourage it. (minimum: 0.01; maximum: 5)", + "optional": true, + "additionalParams": true, + "id": "replicate_0-input-repetitionPenalty-number" + }, + { + "label": "Additional Inputs", + "name": "additionalInputs", + "type": "json", + "description": "Each model has different parameters, refer to the specific model accepted inputs. For example: llama13b-v2", + "additionalParams": true, + "optional": true, + "id": "replicate_0-input-additionalInputs-json" + } + ], + "inputAnchors": [ + { + "label": "Cache", + "name": "cache", + "type": "BaseCache", + "optional": true, + "id": "replicate_0-input-cache-BaseCache" + } + ], + "inputs": { + "cache": "", + "model": "stability-ai/sdxl:af1a68a271597604546c09c64aabcd7782c114a63539a4a8d14d1eeda5630c33", + "temperature": 0.7, + "maxTokens": "", + "topP": "", + "repetitionPenalty": "", + "additionalInputs": "" + }, + "outputAnchors": [ + { + "id": "replicate_0-output-replicate-Replicate|BaseChatModel|LLM|BaseLLM|BaseLanguageModel|Runnable", + "name": "replicate", + "label": "Replicate", + "type": "Replicate | BaseChatModel | LLM | BaseLLM | BaseLanguageModel | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 700.5657822436667, + "y": -192.57827891379952 + }, + "dragging": false + }, + { + "width": 300, + "height": 456, + "id": "llmChain_0", + "position": { + "x": 1045.7783277092838, + "y": 242.08205161173464 + }, + "type": "customNode", + "data": { + "id": "llmChain_0", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_0-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_0-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_0-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_0-input-outputParser-BaseLLMOutputParser" + } + ], + "inputs": { + "model": "{{replicate_0.data.instance}}", + "prompt": "{{promptTemplate_0.data.instance}}", + "outputParser": "", + "chainName": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_0-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_0-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "outputPrediction" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1045.7783277092838, + "y": 242.08205161173464 + }, + "dragging": false + }, + { + "width": 300, + "height": 456, + "id": "llmChain_1", + "position": { + "x": 1769.7463380379868, + "y": 194.56291579865376 + }, + "type": "customNode", + "data": { + "id": "llmChain_1", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_1-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_1-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_1-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_1-input-outputParser-BaseLLMOutputParser" + } + ], + "inputs": { + "model": "{{chatOpenAI_0.data.instance}}", + "prompt": "{{promptTemplate_1.data.instance}}", + "outputParser": "", + "chainName": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_1-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1769.7463380379868, + "y": 194.56291579865376 + }, + "dragging": false + }, + { + "width": 300, + "height": 574, + "id": "chatOpenAI_0", + "position": { + "x": 1390.9908731749008, + "y": -332.0609187416074 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "version": 2, + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["openAIApi"], + "id": "chatOpenAI_0-input-credential-credential" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, + { + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" + }, + { + "label": "gpt-4-0613", + "name": "gpt-4-0613" + }, + { + "label": "gpt-4-32k", + "name": "gpt-4-32k" + }, + { + "label": "gpt-4-32k-0613", + "name": "gpt-4-32k-0613" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, + { + "label": "gpt-3.5-turbo-0613", + "name": "gpt-3.5-turbo-0613" + }, + { + "label": "gpt-3.5-turbo-16k", + "name": "gpt-3.5-turbo-16k" + }, + { + "label": "gpt-3.5-turbo-16k-0613", + "name": "gpt-3.5-turbo-16k-0613" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "step": 0.1, + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" + } + ], + "inputAnchors": [ + { + "label": "Cache", + "name": "cache", + "type": "BaseCache", + "optional": true, + "id": "chatOpenAI_0-input-cache-BaseCache" + } + ], + "inputs": { + "cache": "", + "modelName": "gpt-3.5-turbo", + "temperature": "0", + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "", + "basepath": "", + "baseOptions": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1390.9908731749008, + "y": -332.0609187416074 + }, + "dragging": false + } + ], + "edges": [ + { + "source": "promptTemplate_0", + "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "promptTemplate_0-promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate-llmChain_0-llmChain_0-input-prompt-BasePromptTemplate", + "data": { + "label": "" + } + }, + { + "source": "replicate_0", + "sourceHandle": "replicate_0-output-replicate-Replicate|BaseChatModel|LLM|BaseLLM|BaseLanguageModel|Runnable", + "target": "llmChain_0", + "targetHandle": "llmChain_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "replicate_0-replicate_0-output-replicate-Replicate|BaseChatModel|LLM|BaseLLM|BaseLanguageModel|Runnable-llmChain_0-llmChain_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "promptTemplate_1", + "sourceHandle": "promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "promptTemplate_1-promptTemplate_1-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_1-llmChain_1-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "llmChain_0", + "sourceHandle": "llmChain_0-output-outputPrediction-string|json", + "target": "promptTemplate_1", + "targetHandle": "promptTemplate_1-input-promptValues-json", + "type": "buttonedge", + "id": "llmChain_0-llmChain_0-output-outputPrediction-string|json-promptTemplate_1-promptTemplate_1-input-promptValues-json", + "data": { + "label": "" + } + } + ] +} diff --git a/packages/server/marketplaces/chatflows/Local QnA.json b/packages/server/marketplaces/chatflows/Local QnA.json index 78ce16f6b75..fcf8593cb45 100644 --- a/packages/server/marketplaces/chatflows/Local QnA.json +++ b/packages/server/marketplaces/chatflows/Local QnA.json @@ -1,20 +1,21 @@ { - "description": "QnA chain using local LLM, Embedding models, and Faiss local vector store", + "description": "QnA chain using Ollama local LLM, LocalAI embedding model, and Faiss local vector store", + "badge": "POPULAR", "nodes": [ { "width": 300, - "height": 376, + "height": 429, "id": "recursiveCharacterTextSplitter_1", "position": { - "x": 422.81091375202413, + "x": 424.5721426652516, "y": 122.99825010325736 }, "type": "customNode", "data": { "id": "recursiveCharacterTextSplitter_1", "label": "Recursive Character Text Splitter", - "name": "recursiveCharacterTextSplitter", "version": 2, + "name": "recursiveCharacterTextSplitter", "type": "RecursiveCharacterTextSplitter", "baseClasses": ["RecursiveCharacterTextSplitter", "TextSplitter"], "category": "Text Splitters", @@ -65,25 +66,25 @@ }, "selected": false, "positionAbsolute": { - "x": 422.81091375202413, + "x": 424.5721426652516, "y": 122.99825010325736 }, "dragging": false }, { "width": 300, - "height": 428, + "height": 480, "id": "conversationalRetrievalQAChain_0", "position": { - "x": 1634.455879160561, - "y": 428.77742668929807 + "x": 1604.8865818627112, + "y": 329.6333122200366 }, "type": "customNode", "data": { "id": "conversationalRetrievalQAChain_0", "label": "Conversational Retrieval QA Chain", - "name": "conversationalRetrievalQAChain", "version": 1, + "name": "conversationalRetrievalQAChain", "type": "ConversationalRetrievalQAChain", "baseClasses": ["ConversationalRetrievalQAChain", "BaseChain", "BaseLangChain"], "category": "Chains", @@ -155,8 +156,8 @@ } ], "inputs": { - "model": "{{chatLocalAI_0.data.instance}}", - "vectorStoreRetriever": "{{faissUpsert_0.data.instance}}", + "model": "{{chatOllama_0.data.instance}}", + "vectorStoreRetriever": "{{faiss_0.data.instance}}", "memory": "" }, "outputAnchors": [ @@ -172,69 +173,58 @@ }, "selected": false, "positionAbsolute": { - "x": 1634.455879160561, - "y": 428.77742668929807 + "x": 1604.8865818627112, + "y": 329.6333122200366 }, "dragging": false }, { "width": 300, - "height": 456, - "id": "faissUpsert_0", + "height": 419, + "id": "textFile_0", "position": { - "x": 1204.6898035516715, - "y": 521.0933926644659 + "x": 809.5432731751458, + "y": 55.85095796777051 }, "type": "customNode", "data": { - "id": "faissUpsert_0", - "label": "Faiss Upsert Document", - "name": "faissUpsert", - "version": 1, - "type": "Faiss", - "baseClasses": ["Faiss", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Upsert documents to Faiss", + "id": "textFile_0", + "label": "Text File", + "version": 3, + "name": "textFile", + "type": "Document", + "baseClasses": ["Document"], + "category": "Document Loaders", + "description": "Load data from text files", "inputParams": [ { - "label": "Base Path to store", - "name": "basePath", - "description": "Path to store faiss.index file", - "placeholder": "C:\\Users\\User\\Desktop", - "type": "string", - "id": "faissUpsert_0-input-basePath-string" + "label": "Txt File", + "name": "txtFile", + "type": "file", + "fileType": ".txt, .html, .aspx, .asp, .cpp, .c, .cs, .css, .go, .h, .java, .js, .less, .ts, .php, .proto, .python, .py, .rst, .ruby, .rb, .rs, .scala, .sc, .scss, .sol, .sql, .swift, .markdown, .md, .tex, .ltx, .vb, .xml", + "id": "textFile_0-input-txtFile-file" }, { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Default to 4", - "placeholder": "4", - "type": "number", - "additionalParams": true, + "label": "Metadata", + "name": "metadata", + "type": "json", "optional": true, - "id": "faissUpsert_0-input-topK-number" + "additionalParams": true, + "id": "textFile_0-input-metadata-json" } ], "inputAnchors": [ { - "label": "Document", - "name": "document", - "type": "Document", - "list": true, - "id": "faissUpsert_0-input-document-Document" - }, - { - "label": "Embeddings", - "name": "embeddings", - "type": "Embeddings", - "id": "faissUpsert_0-input-embeddings-Embeddings" + "label": "Text Splitter", + "name": "textSplitter", + "type": "TextSplitter", + "optional": true, + "id": "textFile_0-input-textSplitter-TextSplitter" } ], "inputs": { - "document": ["{{textFile_0.data.instance}}"], - "embeddings": "{{localAIEmbeddings_0.data.instance}}", - "basePath": "C:\\Users\\your-folder", - "topK": "" + "textSplitter": "{{recursiveCharacterTextSplitter_1.data.instance}}", + "metadata": "" }, "outputAnchors": [ { @@ -243,97 +233,263 @@ "type": "options", "options": [ { - "id": "faissUpsert_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Faiss Retriever", - "type": "Faiss | VectorStoreRetriever | BaseRetriever" + "id": "textFile_0-output-document-Document", + "name": "document", + "label": "Document", + "type": "Document" }, { - "id": "faissUpsert_0-output-vectorStore-Faiss|SaveableVectorStore|VectorStore", - "name": "vectorStore", - "label": "Faiss Vector Store", - "type": "Faiss | SaveableVectorStore | VectorStore" + "id": "textFile_0-output-text-string|json", + "name": "text", + "label": "Text", + "type": "string | json" } ], - "default": "retriever" + "default": "document" } ], "outputs": { - "output": "retriever" + "output": "document" }, "selected": false }, "selected": false, "positionAbsolute": { - "x": 1204.6898035516715, - "y": 521.0933926644659 + "x": 809.5432731751458, + "y": 55.85095796777051 }, "dragging": false }, { "width": 300, - "height": 526, - "id": "chatLocalAI_0", + "height": 376, + "id": "localAIEmbeddings_0", "position": { - "x": 1191.9512064167336, - "y": -94.05401001663306 + "x": 809.5432731751458, + "y": 507.4586304746849 }, "type": "customNode", "data": { - "id": "chatLocalAI_0", - "label": "ChatLocalAI", - "name": "chatLocalAI", - "version": 2, - "type": "ChatLocalAI", - "baseClasses": ["ChatLocalAI", "BaseChatModel", "LLM", "BaseLLM", "BaseLanguageModel", "BaseLangChain"], - "category": "Chat Models", - "description": "Use local LLMs like llama.cpp, gpt4all using LocalAI", + "id": "localAIEmbeddings_0", + "label": "LocalAI Embeddings", + "version": 1, + "name": "localAIEmbeddings", + "type": "LocalAI Embeddings", + "baseClasses": ["LocalAI Embeddings", "Embeddings"], + "category": "Embeddings", + "description": "Use local embeddings models like llama.cpp", "inputParams": [ { "label": "Base Path", "name": "basePath", "type": "string", "placeholder": "http://localhost:8080/v1", - "id": "chatLocalAI_0-input-basePath-string" + "id": "localAIEmbeddings_0-input-basePath-string" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "string", + "placeholder": "text-embedding-ada-002", + "id": "localAIEmbeddings_0-input-modelName-string" + } + ], + "inputAnchors": [], + "inputs": { + "basePath": "http://localhost:8080/v1", + "modelName": "text-embedding-ada-002" + }, + "outputAnchors": [ + { + "id": "localAIEmbeddings_0-output-localAIEmbeddings-LocalAI Embeddings|Embeddings", + "name": "localAIEmbeddings", + "label": "LocalAI Embeddings", + "type": "LocalAI Embeddings | Embeddings" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 809.5432731751458, + "y": 507.4586304746849 + }, + "dragging": false + }, + { + "width": 300, + "height": 578, + "id": "chatOllama_0", + "position": { + "x": 1198.006914501795, + "y": -78.92345253481488 + }, + "type": "customNode", + "data": { + "id": "chatOllama_0", + "label": "ChatOllama", + "version": 2, + "name": "chatOllama", + "type": "ChatOllama", + "baseClasses": ["ChatOllama", "SimpleChatModel", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Chat completion using open-source LLM on Ollama", + "inputParams": [ + { + "label": "Base URL", + "name": "baseUrl", + "type": "string", + "default": "http://localhost:11434", + "id": "chatOllama_0-input-baseUrl-string" }, { "label": "Model Name", "name": "modelName", "type": "string", - "placeholder": "gpt4all-lora-quantized.bin", - "id": "chatLocalAI_0-input-modelName-string" + "placeholder": "llama2", + "id": "chatOllama_0-input-modelName-string" }, { "label": "Temperature", "name": "temperature", "type": "number", + "description": "The temperature of the model. Increasing the temperature will make the model answer more creatively. (Default: 0.8). Refer to docs for more details", + "step": 0.1, "default": 0.9, "optional": true, - "id": "chatLocalAI_0-input-temperature-number" + "id": "chatOllama_0-input-temperature-number" }, { - "label": "Max Tokens", - "name": "maxTokens", + "label": "Top P", + "name": "topP", "type": "number", + "description": "Works together with top-k. A higher value (e.g., 0.95) will lead to more diverse text, while a lower value (e.g., 0.5) will generate more focused and conservative text. (Default: 0.9). Refer to docs for more details", + "step": 0.1, "optional": true, "additionalParams": true, - "id": "chatLocalAI_0-input-maxTokens-number" + "id": "chatOllama_0-input-topP-number" }, { - "label": "Top Probability", - "name": "topP", + "label": "Top K", + "name": "topK", + "type": "number", + "description": "Reduces the probability of generating nonsense. A higher value (e.g. 100) will give more diverse answers, while a lower value (e.g. 10) will be more conservative. (Default: 40). Refer to docs for more details", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOllama_0-input-topK-number" + }, + { + "label": "Mirostat", + "name": "mirostat", + "type": "number", + "description": "Enable Mirostat sampling for controlling perplexity. (default: 0, 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0). Refer to docs for more details", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOllama_0-input-mirostat-number" + }, + { + "label": "Mirostat ETA", + "name": "mirostatEta", "type": "number", + "description": "Influences how quickly the algorithm responds to feedback from the generated text. A lower learning rate will result in slower adjustments, while a higher learning rate will make the algorithm more responsive. (Default: 0.1) Refer to docs for more details", + "step": 0.1, "optional": true, "additionalParams": true, - "id": "chatLocalAI_0-input-topP-number" + "id": "chatOllama_0-input-mirostatEta-number" }, { - "label": "Timeout", - "name": "timeout", + "label": "Mirostat TAU", + "name": "mirostatTau", "type": "number", + "description": "Controls the balance between coherence and diversity of the output. A lower value will result in more focused and coherent text. (Default: 5.0) Refer to docs for more details", + "step": 0.1, "optional": true, "additionalParams": true, - "id": "chatLocalAI_0-input-timeout-number" + "id": "chatOllama_0-input-mirostatTau-number" + }, + { + "label": "Context Window Size", + "name": "numCtx", + "type": "number", + "description": "Sets the size of the context window used to generate the next token. (Default: 2048) Refer to docs for more details", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOllama_0-input-numCtx-number" + }, + { + "label": "Number of GQA groups", + "name": "numGqa", + "type": "number", + "description": "The number of GQA groups in the transformer layer. Required for some models, for example it is 8 for llama2:70b. Refer to docs for more details", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOllama_0-input-numGqa-number" + }, + { + "label": "Number of GPU", + "name": "numGpu", + "type": "number", + "description": "The number of layers to send to the GPU(s). On macOS it defaults to 1 to enable metal support, 0 to disable. Refer to docs for more details", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOllama_0-input-numGpu-number" + }, + { + "label": "Number of Thread", + "name": "numThread", + "type": "number", + "description": "Sets the number of threads to use during computation. By default, Ollama will detect this for optimal performance. It is recommended to set this value to the number of physical CPU cores your system has (as opposed to the logical number of cores). Refer to docs for more details", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOllama_0-input-numThread-number" + }, + { + "label": "Repeat Last N", + "name": "repeatLastN", + "type": "number", + "description": "Sets how far back for the model to look back to prevent repetition. (Default: 64, 0 = disabled, -1 = num_ctx). Refer to docs for more details", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOllama_0-input-repeatLastN-number" + }, + { + "label": "Repeat Penalty", + "name": "repeatPenalty", + "type": "number", + "description": "Sets how strongly to penalize repetitions. A higher value (e.g., 1.5) will penalize repetitions more strongly, while a lower value (e.g., 0.9) will be more lenient. (Default: 1.1). Refer to docs for more details", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOllama_0-input-repeatPenalty-number" + }, + { + "label": "Stop Sequence", + "name": "stop", + "type": "string", + "rows": 4, + "placeholder": "AI assistant:", + "description": "Sets the stop sequences to use. Use comma to seperate different sequences. Refer to docs for more details", + "optional": true, + "additionalParams": true, + "id": "chatOllama_0-input-stop-string" + }, + { + "label": "Tail Free Sampling", + "name": "tfsZ", + "type": "number", + "description": "Tail free sampling is used to reduce the impact of less probable tokens from the output. A higher value (e.g., 2.0) will reduce the impact more, while a value of 1.0 disables this setting. (Default: 1). Refer to docs for more details", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOllama_0-input-tfsZ-number" } ], "inputAnchors": [ @@ -342,23 +498,34 @@ "name": "cache", "type": "BaseCache", "optional": true, - "id": "chatLocalAI_0-input-cache-BaseCache" + "id": "chatOllama_0-input-cache-BaseCache" } ], "inputs": { - "basePath": "http://localhost:8080/v1", - "modelName": "ggml-gpt4all-j.bin", + "cache": "", + "baseUrl": "http://localhost:11434", + "modelName": "llama2", "temperature": 0.9, - "maxTokens": "", "topP": "", - "timeout": "" + "topK": "", + "mirostat": "", + "mirostatEta": "", + "mirostatTau": "", + "numCtx": "", + "numGqa": "", + "numGpu": "", + "numThread": "", + "repeatLastN": "", + "repeatPenalty": "", + "stop": "", + "tfsZ": "" }, "outputAnchors": [ { - "id": "chatLocalAI_0-output-chatLocalAI-ChatLocalAI|BaseChatModel|LLM|BaseLLM|BaseLanguageModel|BaseLangChain", - "name": "chatLocalAI", - "label": "ChatLocalAI", - "type": "ChatLocalAI | BaseChatModel | LLM | BaseLLM | BaseLanguageModel | BaseLangChain" + "id": "chatOllama_0-output-chatOllama-ChatOllama|SimpleChatModel|BaseChatModel|BaseLanguageModel|Runnable", + "name": "chatOllama", + "label": "ChatOllama", + "type": "ChatOllama | SimpleChatModel | BaseChatModel | BaseLanguageModel | Runnable" } ], "outputs": {}, @@ -366,58 +533,70 @@ }, "selected": false, "positionAbsolute": { - "x": 1191.9512064167336, - "y": -94.05401001663306 + "x": 1198.006914501795, + "y": -78.92345253481488 }, "dragging": false }, { "width": 300, - "height": 410, - "id": "textFile_0", + "height": 458, + "id": "faiss_0", "position": { - "x": 809.5432731751458, - "y": 55.85095796777051 + "x": 1199.3135683364685, + "y": 520.9300176396024 }, "type": "customNode", "data": { - "id": "textFile_0", - "label": "Text File", - "name": "textFile", - "version": 3, - "type": "Document", - "baseClasses": ["Document"], - "category": "Document Loaders", - "description": "Load data from text files", + "id": "faiss_0", + "label": "Faiss", + "version": 1, + "name": "faiss", + "type": "Faiss", + "baseClasses": ["Faiss", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Upsert or Load data to Faiss Vector Store", "inputParams": [ { - "label": "Txt File", - "name": "txtFile", - "type": "file", - "fileType": ".txt, .html, .aspx, .asp, .cpp, .c, .cs, .css, .go, .h, .java, .js, .less, .ts, .php, .proto, .python, .py, .rst, .ruby, .rb, .rs, .scala, .sc, .scss, .sol, .sql, .swift, .markdown, .md, .tex, .ltx, .vb, .xml", - "id": "textFile_0-input-txtFile-file" + "label": "Base Path to load", + "name": "basePath", + "description": "Path to load faiss.index file", + "placeholder": "C:\\Users\\User\\Desktop", + "type": "string", + "id": "faiss_0-input-basePath-string" }, { - "label": "Metadata", - "name": "metadata", - "type": "json", - "optional": true, + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", "additionalParams": true, - "id": "textFile_0-input-metadata-json" + "optional": true, + "id": "faiss_0-input-topK-number" } ], "inputAnchors": [ { - "label": "Text Splitter", - "name": "textSplitter", - "type": "TextSplitter", + "label": "Document", + "name": "document", + "type": "Document", + "list": true, "optional": true, - "id": "textFile_0-input-textSplitter-TextSplitter" + "id": "faiss_0-input-document-Document" + }, + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "faiss_0-input-embeddings-Embeddings" } ], "inputs": { - "textSplitter": "{{recursiveCharacterTextSplitter_1.data.instance}}", - "metadata": "" + "document": ["{{textFile_0.data.instance}}"], + "embeddings": "{{localAIEmbeddings_0.data.instance}}", + "basePath": "C:\\Users\\your-folder", + "topK": "" }, "outputAnchors": [ { @@ -426,143 +605,86 @@ "type": "options", "options": [ { - "id": "textFile_0-output-document-Document", - "name": "document", - "label": "Document", - "type": "Document" + "id": "faiss_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Faiss Retriever", + "type": "Faiss | VectorStoreRetriever | BaseRetriever" }, { - "id": "textFile_0-output-text-string|json", - "name": "text", - "label": "Text", - "type": "string | json" + "id": "faiss_0-output-vectorStore-Faiss|SaveableVectorStore|VectorStore", + "name": "vectorStore", + "label": "Faiss Vector Store", + "type": "Faiss | SaveableVectorStore | VectorStore" } ], - "default": "document" + "default": "retriever" } ], "outputs": { - "output": "document" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 809.5432731751458, - "y": 55.85095796777051 - }, - "dragging": false - }, - { - "width": 300, - "height": 376, - "id": "localAIEmbeddings_0", - "position": { - "x": 809.5432731751458, - "y": 507.4586304746849 - }, - "type": "customNode", - "data": { - "id": "localAIEmbeddings_0", - "label": "LocalAI Embeddings", - "name": "localAIEmbeddings", - "version": 1, - "type": "LocalAI Embeddings", - "baseClasses": ["LocalAI Embeddings", "Embeddings"], - "category": "Embeddings", - "description": "Use local embeddings models like llama.cpp", - "inputParams": [ - { - "label": "Base Path", - "name": "basePath", - "type": "string", - "placeholder": "http://localhost:8080/v1", - "id": "localAIEmbeddings_0-input-basePath-string" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "string", - "placeholder": "text-embedding-ada-002", - "id": "localAIEmbeddings_0-input-modelName-string" - } - ], - "inputAnchors": [], - "inputs": { - "basePath": "http://localhost:8080/v1", - "modelName": "text-embedding-ada-002" + "output": "retriever" }, - "outputAnchors": [ - { - "id": "localAIEmbeddings_0-output-localAIEmbeddings-LocalAI Embeddings|Embeddings", - "name": "localAIEmbeddings", - "label": "LocalAI Embeddings", - "type": "LocalAI Embeddings | Embeddings" - } - ], - "outputs": {}, "selected": false }, "selected": false, "positionAbsolute": { - "x": 809.5432731751458, - "y": 507.4586304746849 + "x": 1199.3135683364685, + "y": 520.9300176396024 }, "dragging": false } ], "edges": [ { - "source": "faissUpsert_0", - "sourceHandle": "faissUpsert_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever", - "target": "conversationalRetrievalQAChain_0", - "targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "source": "recursiveCharacterTextSplitter_1", + "sourceHandle": "recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter", + "target": "textFile_0", + "targetHandle": "textFile_0-input-textSplitter-TextSplitter", "type": "buttonedge", - "id": "faissUpsert_0-faissUpsert_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "id": "recursiveCharacterTextSplitter_1-recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter-textFile_0-textFile_0-input-textSplitter-TextSplitter", "data": { "label": "" } }, { - "source": "chatLocalAI_0", - "sourceHandle": "chatLocalAI_0-output-chatLocalAI-ChatLocalAI|BaseChatModel|LLM|BaseLLM|BaseLanguageModel|BaseLangChain", + "source": "chatOllama_0", + "sourceHandle": "chatOllama_0-output-chatOllama-ChatOllama|SimpleChatModel|BaseChatModel|BaseLanguageModel|Runnable", "target": "conversationalRetrievalQAChain_0", "targetHandle": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", "type": "buttonedge", - "id": "chatLocalAI_0-chatLocalAI_0-output-chatLocalAI-ChatLocalAI|BaseChatModel|LLM|BaseLLM|BaseLanguageModel|BaseLangChain-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "id": "chatOllama_0-chatOllama_0-output-chatOllama-ChatOllama|SimpleChatModel|BaseChatModel|BaseLanguageModel|Runnable-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", "data": { "label": "" } }, { - "source": "recursiveCharacterTextSplitter_1", - "sourceHandle": "recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter", - "target": "textFile_0", - "targetHandle": "textFile_0-input-textSplitter-TextSplitter", + "source": "textFile_0", + "sourceHandle": "textFile_0-output-document-Document", + "target": "faiss_0", + "targetHandle": "faiss_0-input-document-Document", "type": "buttonedge", - "id": "recursiveCharacterTextSplitter_1-recursiveCharacterTextSplitter_1-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter-textFile_0-textFile_0-input-textSplitter-TextSplitter", + "id": "textFile_0-textFile_0-output-document-Document-faiss_0-faiss_0-input-document-Document", "data": { "label": "" } }, { - "source": "textFile_0", - "sourceHandle": "textFile_0-output-textFile-Document", - "target": "faissUpsert_0", - "targetHandle": "faissUpsert_0-input-document-Document", + "source": "localAIEmbeddings_0", + "sourceHandle": "localAIEmbeddings_0-output-localAIEmbeddings-LocalAI Embeddings|Embeddings", + "target": "faiss_0", + "targetHandle": "faiss_0-input-embeddings-Embeddings", "type": "buttonedge", - "id": "textFile_0-textFile_0-output-textFile-Document-faissUpsert_0-faissUpsert_0-input-document-Document", + "id": "localAIEmbeddings_0-localAIEmbeddings_0-output-localAIEmbeddings-LocalAI Embeddings|Embeddings-faiss_0-faiss_0-input-embeddings-Embeddings", "data": { "label": "" } }, { - "source": "localAIEmbeddings_0", - "sourceHandle": "localAIEmbeddings_0-output-localAIEmbeddings-LocalAI Embeddings|Embeddings", - "target": "faissUpsert_0", - "targetHandle": "faissUpsert_0-input-embeddings-Embeddings", + "source": "faiss_0", + "sourceHandle": "faiss_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever", + "target": "conversationalRetrievalQAChain_0", + "targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", "type": "buttonedge", - "id": "localAIEmbeddings_0-localAIEmbeddings_0-output-localAIEmbeddings-LocalAI Embeddings|Embeddings-faissUpsert_0-faissUpsert_0-input-embeddings-Embeddings", + "id": "faiss_0-faiss_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/Long Term Memory.json b/packages/server/marketplaces/chatflows/Long Term Memory.json index 6f22c00ae3e..f5ff2dca9a8 100644 --- a/packages/server/marketplaces/chatflows/Long Term Memory.json +++ b/packages/server/marketplaces/chatflows/Long Term Memory.json @@ -1,20 +1,20 @@ { - "description": "Use long term memory Zep to differentiate conversations between users with sessionId", + "description": "Use long term memory like Zep to differentiate conversations between users with sessionId", "nodes": [ { "width": 300, "height": 480, "id": "conversationalRetrievalQAChain_0", "position": { - "x": 1999.7302950816731, - "y": 365.33064907894243 + "x": 2001.2622706097407, + "y": 360.7347224947406 }, "type": "customNode", "data": { "id": "conversationalRetrievalQAChain_0", "label": "Conversational Retrieval QA Chain", - "name": "conversationalRetrievalQAChain", "version": 1, + "name": "conversationalRetrievalQAChain", "type": "ConversationalRetrievalQAChain", "baseClasses": ["ConversationalRetrievalQAChain", "BaseChain", "BaseLangChain"], "category": "Chains", @@ -87,7 +87,7 @@ ], "inputs": { "model": "{{chatOpenAI_0.data.instance}}", - "vectorStoreRetriever": "{{pineconeExistingIndex_0.data.instance}}", + "vectorStoreRetriever": "{{qdrant_0.data.instance}}", "memory": "{{ZepMemory_0.data.instance}}", "returnSourceDocuments": true }, @@ -104,118 +104,52 @@ }, "selected": false, "positionAbsolute": { - "x": 1999.7302950816731, - "y": 365.33064907894243 + "x": 2001.2622706097407, + "y": 360.7347224947406 }, "dragging": false }, { "width": 300, - "height": 523, - "id": "chatOpenAI_0", + "height": 329, + "id": "openAIEmbeddings_0", "position": { - "x": 1554.3875781165111, - "y": -74.792508259787212 + "x": 789.6839176356616, + "y": 167.70165941305987 }, "type": "customNode", "data": { - "id": "chatOpenAI_0", - "label": "ChatOpenAI", - "name": "chatOpenAI", - "version": 2, - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "id": "openAIEmbeddings_0", + "label": "OpenAI Embeddings", + "version": 1, + "name": "openAIEmbeddings", + "type": "OpenAIEmbeddings", + "baseClasses": ["OpenAIEmbeddings", "Embeddings"], + "category": "Embeddings", + "description": "OpenAI API to generate embeddings for a given text", "inputParams": [ { "label": "Connect Credential", "name": "credential", "type": "credential", "credentialNames": ["openAIApi"], - "id": "chatOpenAI_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "options", - "options": [ - { - "label": "gpt-4", - "name": "gpt-4" - }, - { - "label": "gpt-4-0613", - "name": "gpt-4-0613" - }, - { - "label": "gpt-4-32k", - "name": "gpt-4-32k" - }, - { - "label": "gpt-4-32k-0613", - "name": "gpt-4-32k-0613" - }, - { - "label": "gpt-3.5-turbo", - "name": "gpt-3.5-turbo" - }, - { - "label": "gpt-3.5-turbo-0613", - "name": "gpt-3.5-turbo-0613" - }, - { - "label": "gpt-3.5-turbo-16k", - "name": "gpt-3.5-turbo-16k" - }, - { - "label": "gpt-3.5-turbo-16k-0613", - "name": "gpt-3.5-turbo-16k-0613" - } - ], - "default": "gpt-3.5-turbo", - "optional": true, - "id": "chatOpenAI_0-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "default": 0.9, - "optional": true, - "id": "chatOpenAI_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-topP-number" + "id": "openAIEmbeddings_0-input-credential-credential" }, { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", + "label": "Strip New Lines", + "name": "stripNewLines", + "type": "boolean", "optional": true, "additionalParams": true, - "id": "chatOpenAI_0-input-frequencyPenalty-number" + "id": "openAIEmbeddings_0-input-stripNewLines-boolean" }, { - "label": "Presence Penalty", - "name": "presencePenalty", + "label": "Batch Size", + "name": "batchSize", "type": "number", "optional": true, "additionalParams": true, - "id": "chatOpenAI_0-input-presencePenalty-number" + "id": "openAIEmbeddings_0-input-batchSize-number" }, { "label": "Timeout", @@ -223,7 +157,7 @@ "type": "number", "optional": true, "additionalParams": true, - "id": "chatOpenAI_0-input-timeout-number" + "id": "openAIEmbeddings_0-input-timeout-number" }, { "label": "BasePath", @@ -231,34 +165,22 @@ "type": "string", "optional": true, "additionalParams": true, - "id": "chatOpenAI_0-input-basepath-string" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_0-input-cache-BaseCache" + "id": "openAIEmbeddings_0-input-basepath-string" } ], + "inputAnchors": [], "inputs": { - "modelName": "gpt-3.5-turbo", - "temperature": "0", - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", + "stripNewLines": "", + "batchSize": "", "timeout": "", "basepath": "" }, "outputAnchors": [ { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel" + "id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "name": "openAIEmbeddings", + "label": "OpenAIEmbeddings", + "type": "OpenAIEmbeddings | Embeddings" } ], "outputs": {}, @@ -266,83 +188,140 @@ }, "selected": false, "positionAbsolute": { - "x": 1554.3875781165111, - "y": -74.792508259787212 + "x": 789.6839176356616, + "y": 167.70165941305987 }, "dragging": false }, { "width": 300, - "height": 329, - "id": "openAIEmbeddings_0", + "height": 623, + "id": "ZepMemory_0", "position": { - "x": 789.6839176356616, - "y": 167.70165941305987 + "x": 420.8032935700942, + "y": 92.41976641951993 }, "type": "customNode", "data": { - "id": "openAIEmbeddings_0", - "label": "OpenAI Embeddings", - "name": "openAIEmbeddings", + "id": "ZepMemory_0", + "label": "Zep Memory", "version": 1, - "type": "OpenAIEmbeddings", - "baseClasses": ["OpenAIEmbeddings", "Embeddings"], - "category": "Embeddings", - "description": "OpenAI API to generate embeddings for a given text", + "name": "ZepMemory", + "type": "ZepMemory", + "baseClasses": ["ZepMemory", "BaseChatMemory", "BaseMemory"], + "category": "Memory", + "description": "Summarizes the conversation and stores the memory in zep server", "inputParams": [ { "label": "Connect Credential", "name": "credential", "type": "credential", - "credentialNames": ["openAIApi"], - "id": "openAIEmbeddings_0-input-credential-credential" + "optional": true, + "description": "Configure JWT authentication on your Zep instance (Optional)", + "credentialNames": ["zepMemoryApi"], + "id": "ZepMemory_0-input-credential-credential" }, { - "label": "Strip New Lines", - "name": "stripNewLines", + "label": "Base URL", + "name": "baseURL", + "type": "string", + "default": "http://127.0.0.1:8000", + "id": "ZepMemory_0-input-baseURL-string" + }, + { + "label": "Auto Summary", + "name": "autoSummary", "type": "boolean", - "optional": true, + "default": true, + "id": "ZepMemory_0-input-autoSummary-boolean" + }, + { + "label": "Session Id", + "name": "sessionId", + "type": "string", + "description": "if empty, chatId will be used automatically", + "default": "", "additionalParams": true, - "id": "openAIEmbeddings_0-input-stripNewLines-boolean" + "optional": true, + "id": "ZepMemory_0-input-sessionId-string" }, { - "label": "Batch Size", - "name": "batchSize", + "label": "Size", + "name": "k", "type": "number", - "optional": true, + "default": "10", + "step": 1, + "description": "Window of size k to surface the last k back-and-forths to use as memory.", + "id": "ZepMemory_0-input-k-number" + }, + { + "label": "Auto Summary Template", + "name": "autoSummaryTemplate", + "type": "string", + "default": "This is the summary of the following conversation:\n{summary}", "additionalParams": true, - "id": "openAIEmbeddings_0-input-batchSize-number" + "id": "ZepMemory_0-input-autoSummaryTemplate-string" }, { - "label": "Timeout", - "name": "timeout", - "type": "number", - "optional": true, + "label": "AI Prefix", + "name": "aiPrefix", + "type": "string", + "default": "ai", "additionalParams": true, - "id": "openAIEmbeddings_0-input-timeout-number" + "id": "ZepMemory_0-input-aiPrefix-string" }, { - "label": "BasePath", - "name": "basepath", + "label": "Human Prefix", + "name": "humanPrefix", "type": "string", - "optional": true, + "default": "human", "additionalParams": true, - "id": "openAIEmbeddings_0-input-basepath-string" + "id": "ZepMemory_0-input-humanPrefix-string" + }, + { + "label": "Memory Key", + "name": "memoryKey", + "type": "string", + "default": "chat_history", + "additionalParams": true, + "id": "ZepMemory_0-input-memoryKey-string" + }, + { + "label": "Input Key", + "name": "inputKey", + "type": "string", + "default": "input", + "additionalParams": true, + "id": "ZepMemory_0-input-inputKey-string" + }, + { + "label": "Output Key", + "name": "outputKey", + "type": "string", + "default": "text", + "additionalParams": true, + "id": "ZepMemory_0-input-outputKey-string" } ], "inputAnchors": [], "inputs": { - "stripNewLines": "", - "batchSize": "", - "timeout": "", - "basepath": "" + "baseURL": "http://127.0.0.1:8000", + "autoSummary": true, + "sessionId": "", + "k": "10", + "autoSummaryTemplate": "This is the summary of the following conversation:\n{summary}", + "aiPrefix": "ai", + "humanPrefix": "human", + "memoryKey": "chat_history", + "inputKey": "input", + "outputKey": "text" }, "outputAnchors": [ { - "id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "name": "openAIEmbeddings", - "label": "OpenAIEmbeddings", - "type": "OpenAIEmbeddings | Embeddings" + "id": "ZepMemory_0-output-ZepMemory-ZepMemory|BaseChatMemory|BaseMemory", + "name": "ZepMemory", + "label": "ZepMemory", + "type": "ZepMemory | BaseChatMemory | BaseMemory" } ], "outputs": {}, @@ -350,59 +329,91 @@ }, "selected": false, "positionAbsolute": { - "x": 789.6839176356616, - "y": 167.70165941305987 + "x": 420.8032935700942, + "y": 92.41976641951993 }, "dragging": false }, { "width": 300, - "height": 505, - "id": "pineconeExistingIndex_0", + "height": 654, + "id": "qdrant_0", "position": { - "x": 1167.128201355349, - "y": 71.89355115516406 + "x": 1186.2560075381377, + "y": -86.38901299105441 }, "type": "customNode", "data": { - "id": "pineconeExistingIndex_0", - "label": "Pinecone Load Existing Index", - "name": "pineconeExistingIndex", + "id": "qdrant_0", + "label": "Qdrant", "version": 1, - "type": "Pinecone", - "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], + "name": "qdrant", + "type": "Qdrant", + "baseClasses": ["Qdrant", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Load existing index from Pinecone (i.e: Document has been upserted)", + "description": "Upsert or Load data to Qdrant Vector Database", "inputParams": [ { "label": "Connect Credential", "name": "credential", "type": "credential", - "credentialNames": ["pineconeApi"], - "id": "pineconeExistingIndex_0-input-credential-credential" + "description": "Only needed when using Qdrant cloud hosted", + "optional": true, + "credentialNames": ["qdrantApi"], + "id": "qdrant_0-input-credential-credential" }, { - "label": "Pinecone Index", - "name": "pineconeIndex", + "label": "Qdrant Server URL", + "name": "qdrantServerUrl", "type": "string", - "id": "pineconeExistingIndex_0-input-pineconeIndex-string" + "placeholder": "http://localhost:6333", + "id": "qdrant_0-input-qdrantServerUrl-string" }, { - "label": "Pinecone Namespace", - "name": "pineconeNamespace", + "label": "Qdrant Collection Name", + "name": "qdrantCollection", "type": "string", - "placeholder": "my-first-namespace", + "id": "qdrant_0-input-qdrantCollection-string" + }, + { + "label": "Vector Dimension", + "name": "qdrantVectorDimension", + "type": "number", + "default": 1536, "additionalParams": true, - "optional": true, - "id": "pineconeExistingIndex_0-input-pineconeNamespace-string" + "id": "qdrant_0-input-qdrantVectorDimension-number" + }, + { + "label": "Similarity", + "name": "qdrantSimilarity", + "description": "Similarity measure used in Qdrant.", + "type": "options", + "default": "Cosine", + "options": [ + { + "label": "Cosine", + "name": "Cosine" + }, + { + "label": "Euclid", + "name": "Euclid" + }, + { + "label": "Dot", + "name": "Dot" + } + ], + "additionalParams": true, + "id": "qdrant_0-input-qdrantSimilarity-options" }, { - "label": "Pinecone Metadata Filter", - "name": "pineconeMetadataFilter", + "label": "Additional Collection Cofiguration", + "name": "qdrantCollectionConfiguration", + "description": "Refer to collection docs for more reference", "type": "json", "optional": true, "additionalParams": true, - "id": "pineconeExistingIndex_0-input-pineconeMetadataFilter-json" + "id": "qdrant_0-input-qdrantCollectionConfiguration-json" }, { "label": "Top K", @@ -412,23 +423,44 @@ "type": "number", "additionalParams": true, "optional": true, - "id": "pineconeExistingIndex_0-input-topK-number" + "id": "qdrant_0-input-topK-number" + }, + { + "label": "Qdrant Search Filter", + "name": "qdrantFilter", + "description": "Only return points which satisfy the conditions", + "type": "json", + "additionalParams": true, + "optional": true, + "id": "qdrant_0-input-qdrantFilter-json" } ], "inputAnchors": [ + { + "label": "Document", + "name": "document", + "type": "Document", + "list": true, + "optional": true, + "id": "qdrant_0-input-document-Document" + }, { "label": "Embeddings", "name": "embeddings", "type": "Embeddings", - "id": "pineconeExistingIndex_0-input-embeddings-Embeddings" + "id": "qdrant_0-input-embeddings-Embeddings" } ], "inputs": { + "document": "", "embeddings": "{{openAIEmbeddings_0.data.instance}}", - "pineconeIndex": "", - "pineconeNamespace": "", - "pineconeMetadataFilter": "", - "topK": "" + "qdrantServerUrl": "", + "qdrantCollection": "", + "qdrantVectorDimension": 1536, + "qdrantSimilarity": "Cosine", + "qdrantCollectionConfiguration": "", + "topK": "", + "qdrantFilter": "" }, "outputAnchors": [ { @@ -437,16 +469,16 @@ "type": "options", "options": [ { - "id": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "id": "qdrant_0-output-retriever-Qdrant|VectorStoreRetriever|BaseRetriever", "name": "retriever", - "label": "Pinecone Retriever", - "type": "Pinecone | VectorStoreRetriever | BaseRetriever" + "label": "Qdrant Retriever", + "type": "Qdrant | VectorStoreRetriever | BaseRetriever" }, { - "id": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore", + "id": "qdrant_0-output-vectorStore-Qdrant|VectorStore", "name": "vectorStore", - "label": "Pinecone Vector Store", - "type": "Pinecone | VectorStore" + "label": "Qdrant Vector Store", + "type": "Qdrant | VectorStore" } ], "default": "retriever" @@ -459,140 +491,189 @@ }, "selected": false, "positionAbsolute": { - "x": 1167.128201355349, - "y": 71.89355115516406 + "x": 1186.2560075381377, + "y": -86.38901299105441 }, "dragging": false }, { "width": 300, - "height": 623, - "id": "ZepMemory_0", + "height": 574, + "id": "chatOpenAI_0", "position": { - "x": 1552.2067611642792, - "y": 560.8352147865392 + "x": 1561.0993169664887, + "y": -75.4103386563329 }, "type": "customNode", "data": { - "id": "ZepMemory_0", - "label": "Zep Memory", - "name": "ZepMemory", - "version": 1, - "type": "ZepMemory", - "baseClasses": ["ZepMemory", "BaseChatMemory", "BaseMemory"], - "category": "Memory", - "description": "Summarizes the conversation and stores the memory in zep server", + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "version": 2, + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", "inputParams": [ { "label": "Connect Credential", "name": "credential", "type": "credential", - "optional": true, - "description": "Configure JWT authentication on your Zep instance (Optional)", - "credentialNames": ["zepMemoryApi"], - "id": "ZepMemory_0-input-credential-credential" - }, - { - "label": "Base URL", - "name": "baseURL", - "type": "string", - "default": "http://127.0.0.1:8000", - "id": "ZepMemory_0-input-baseURL-string" + "credentialNames": ["openAIApi"], + "id": "chatOpenAI_0-input-credential-credential" }, { - "label": "Auto Summary", - "name": "autoSummary", - "type": "boolean", - "default": true, - "id": "ZepMemory_0-input-autoSummary-boolean" + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, + { + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" + }, + { + "label": "gpt-4-0613", + "name": "gpt-4-0613" + }, + { + "label": "gpt-4-32k", + "name": "gpt-4-32k" + }, + { + "label": "gpt-4-32k-0613", + "name": "gpt-4-32k-0613" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, + { + "label": "gpt-3.5-turbo-0613", + "name": "gpt-3.5-turbo-0613" + }, + { + "label": "gpt-3.5-turbo-16k", + "name": "gpt-3.5-turbo-16k" + }, + { + "label": "gpt-3.5-turbo-16k-0613", + "name": "gpt-3.5-turbo-16k-0613" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" }, { - "label": "Session Id", - "name": "sessionId", - "type": "string", - "description": "if empty, chatId will be used automatically", - "default": "", - "additionalParams": true, + "label": "Temperature", + "name": "temperature", + "type": "number", + "step": 0.1, + "default": 0.9, "optional": true, - "id": "ZepMemory_0-input-sessionId-string" + "id": "chatOpenAI_0-input-temperature-number" }, { - "label": "Size", - "name": "k", + "label": "Max Tokens", + "name": "maxTokens", "type": "number", - "default": "10", "step": 1, - "description": "Window of size k to surface the last k back-and-forths to use as memory.", - "id": "ZepMemory_0-input-k-number" + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" }, { - "label": "Auto Summary Template", - "name": "autoSummaryTemplate", - "type": "string", - "default": "This is the summary of the following conversation:\n{summary}", + "label": "Top Probability", + "name": "topP", + "type": "number", + "step": 0.1, + "optional": true, "additionalParams": true, - "id": "ZepMemory_0-input-autoSummaryTemplate-string" + "id": "chatOpenAI_0-input-topP-number" }, { - "label": "AI Prefix", - "name": "aiPrefix", - "type": "string", - "default": "ai", + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "step": 0.1, + "optional": true, "additionalParams": true, - "id": "ZepMemory_0-input-aiPrefix-string" + "id": "chatOpenAI_0-input-frequencyPenalty-number" }, { - "label": "Human Prefix", - "name": "humanPrefix", - "type": "string", - "default": "human", + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "step": 0.1, + "optional": true, "additionalParams": true, - "id": "ZepMemory_0-input-humanPrefix-string" + "id": "chatOpenAI_0-input-presencePenalty-number" }, { - "label": "Memory Key", - "name": "memoryKey", - "type": "string", - "default": "chat_history", + "label": "Timeout", + "name": "timeout", + "type": "number", + "step": 1, + "optional": true, "additionalParams": true, - "id": "ZepMemory_0-input-memoryKey-string" + "id": "chatOpenAI_0-input-timeout-number" }, { - "label": "Input Key", - "name": "inputKey", + "label": "BasePath", + "name": "basepath", "type": "string", - "default": "input", + "optional": true, "additionalParams": true, - "id": "ZepMemory_0-input-inputKey-string" + "id": "chatOpenAI_0-input-basepath-string" }, { - "label": "Output Key", - "name": "outputKey", - "type": "string", - "default": "text", + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, "additionalParams": true, - "id": "ZepMemory_0-input-outputKey-string" + "id": "chatOpenAI_0-input-baseOptions-json" + } + ], + "inputAnchors": [ + { + "label": "Cache", + "name": "cache", + "type": "BaseCache", + "optional": true, + "id": "chatOpenAI_0-input-cache-BaseCache" } ], - "inputAnchors": [], "inputs": { - "baseURL": "http://127.0.0.1:8000", - "autoSummary": true, - "sessionId": "", - "k": "10", - "autoSummaryTemplate": "This is the summary of the following conversation:\n{summary}", - "aiPrefix": "ai", - "humanPrefix": "human", - "memoryKey": "chat_history", - "inputKey": "input", - "outputKey": "text" + "cache": "", + "modelName": "gpt-3.5-turbo-16k", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "", + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { - "id": "ZepMemory_0-output-ZepMemory-ZepMemory|BaseChatMemory|BaseMemory", - "name": "ZepMemory", - "label": "ZepMemory", - "type": "ZepMemory | BaseChatMemory | BaseMemory" + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" } ], "outputs": {}, @@ -600,53 +681,53 @@ }, "selected": false, "positionAbsolute": { - "x": 1552.2067611642792, - "y": 560.8352147865392 + "x": 1561.0993169664887, + "y": -75.4103386563329 }, "dragging": false } ], "edges": [ { - "source": "openAIEmbeddings_0", - "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "pineconeExistingIndex_0", - "targetHandle": "pineconeExistingIndex_0-input-embeddings-Embeddings", + "source": "ZepMemory_0", + "sourceHandle": "ZepMemory_0-output-ZepMemory-ZepMemory|BaseChatMemory|BaseMemory", + "target": "conversationalRetrievalQAChain_0", + "targetHandle": "conversationalRetrievalQAChain_0-input-memory-BaseMemory", "type": "buttonedge", - "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeExistingIndex_0-pineconeExistingIndex_0-input-embeddings-Embeddings", + "id": "ZepMemory_0-ZepMemory_0-output-ZepMemory-ZepMemory|BaseChatMemory|BaseMemory-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-memory-BaseMemory", "data": { "label": "" } }, { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "target": "conversationalRetrievalQAChain_0", - "targetHandle": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "source": "openAIEmbeddings_0", + "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "qdrant_0", + "targetHandle": "qdrant_0-input-embeddings-Embeddings", "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-qdrant_0-qdrant_0-input-embeddings-Embeddings", "data": { "label": "" } }, { - "source": "pineconeExistingIndex_0", - "sourceHandle": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "source": "qdrant_0", + "sourceHandle": "qdrant_0-output-retriever-Qdrant|VectorStoreRetriever|BaseRetriever", "target": "conversationalRetrievalQAChain_0", "targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", "type": "buttonedge", - "id": "pineconeExistingIndex_0-pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "id": "qdrant_0-qdrant_0-output-retriever-Qdrant|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", "data": { "label": "" } }, { - "source": "ZepMemory_0", - "sourceHandle": "ZepMemory_0-output-ZepMemory-ZepMemory|BaseChatMemory|BaseMemory", + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", "target": "conversationalRetrievalQAChain_0", - "targetHandle": "conversationalRetrievalQAChain_0-input-memory-BaseMemory", + "targetHandle": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", "type": "buttonedge", - "id": "ZepMemory_0-ZepMemory_0-output-ZepMemory-ZepMemory|BaseChatMemory|BaseMemory-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-memory-BaseMemory", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/Metadata Filter Load.json b/packages/server/marketplaces/chatflows/Metadata Filter Load.json deleted file mode 100644 index 43438d6b428..00000000000 --- a/packages/server/marketplaces/chatflows/Metadata Filter Load.json +++ /dev/null @@ -1,501 +0,0 @@ -{ - "description": "Load existing index with metadata filters and feed into conversational retrieval QA chain", - "nodes": [ - { - "width": 300, - "height": 480, - "id": "conversationalRetrievalQAChain_0", - "position": { - "x": 1643.035168558474, - "y": 360.96295365212774 - }, - "type": "customNode", - "data": { - "id": "conversationalRetrievalQAChain_0", - "label": "Conversational Retrieval QA Chain", - "name": "conversationalRetrievalQAChain", - "version": 1, - "type": "ConversationalRetrievalQAChain", - "baseClasses": ["ConversationalRetrievalQAChain", "BaseChain", "BaseLangChain"], - "category": "Chains", - "description": "Document QA - built on RetrievalQAChain to provide a chat history component", - "inputParams": [ - { - "label": "Return Source Documents", - "name": "returnSourceDocuments", - "type": "boolean", - "optional": true, - "id": "conversationalRetrievalQAChain_0-input-returnSourceDocuments-boolean" - }, - { - "label": "System Message", - "name": "systemMessagePrompt", - "type": "string", - "rows": 4, - "additionalParams": true, - "optional": true, - "placeholder": "I want you to act as a document that I am having a conversation with. Your name is \"AI Assistant\". You will provide me with answers from the given info. If the answer is not included, say exactly \"Hmm, I am not sure.\" and stop after that. Refuse to answer any question not about the info. Never break character.", - "id": "conversationalRetrievalQAChain_0-input-systemMessagePrompt-string" - }, - { - "label": "Chain Option", - "name": "chainOption", - "type": "options", - "options": [ - { - "label": "MapReduceDocumentsChain", - "name": "map_reduce", - "description": "Suitable for QA tasks over larger documents and can run the preprocessing step in parallel, reducing the running time" - }, - { - "label": "RefineDocumentsChain", - "name": "refine", - "description": "Suitable for QA tasks over a large number of documents." - }, - { - "label": "StuffDocumentsChain", - "name": "stuff", - "description": "Suitable for QA tasks over a small number of documents." - } - ], - "additionalParams": true, - "optional": true, - "id": "conversationalRetrievalQAChain_0-input-chainOption-options" - } - ], - "inputAnchors": [ - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel" - }, - { - "label": "Vector Store Retriever", - "name": "vectorStoreRetriever", - "type": "BaseRetriever", - "id": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever" - }, - { - "label": "Memory", - "name": "memory", - "type": "BaseMemory", - "optional": true, - "description": "If left empty, a default BufferMemory will be used", - "id": "conversationalRetrievalQAChain_0-input-memory-BaseMemory" - } - ], - "inputs": { - "model": "{{chatOpenAI_0.data.instance}}", - "vectorStoreRetriever": "{{pineconeExistingIndex_0.data.instance}}" - }, - "outputAnchors": [ - { - "id": "conversationalRetrievalQAChain_0-output-conversationalRetrievalQAChain-ConversationalRetrievalQAChain|BaseChain|BaseLangChain", - "name": "conversationalRetrievalQAChain", - "label": "ConversationalRetrievalQAChain", - "type": "ConversationalRetrievalQAChain | BaseChain | BaseLangChain" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1643.035168558474, - "y": 360.96295365212774 - }, - "dragging": false - }, - { - "width": 300, - "height": 523, - "id": "chatOpenAI_0", - "position": { - "x": 1197.7264239788542, - "y": -76.177600120515933 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_0", - "label": "ChatOpenAI", - "name": "chatOpenAI", - "version": 2, - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "options", - "options": [ - { - "label": "gpt-4", - "name": "gpt-4" - }, - { - "label": "gpt-4-0613", - "name": "gpt-4-0613" - }, - { - "label": "gpt-4-32k", - "name": "gpt-4-32k" - }, - { - "label": "gpt-4-32k-0613", - "name": "gpt-4-32k-0613" - }, - { - "label": "gpt-3.5-turbo", - "name": "gpt-3.5-turbo" - }, - { - "label": "gpt-3.5-turbo-0613", - "name": "gpt-3.5-turbo-0613" - }, - { - "label": "gpt-3.5-turbo-16k", - "name": "gpt-3.5-turbo-16k" - }, - { - "label": "gpt-3.5-turbo-16k-0613", - "name": "gpt-3.5-turbo-16k-0613" - } - ], - "default": "gpt-3.5-turbo", - "optional": true, - "id": "chatOpenAI_0-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "default": 0.9, - "optional": true, - "id": "chatOpenAI_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-basepath-string" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_0-input-cache-BaseCache" - } - ], - "inputs": { - "modelName": "gpt-3.5-turbo", - "temperature": "0", - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1197.7264239788542, - "y": -76.177600120515933 - }, - "dragging": false - }, - { - "width": 300, - "height": 329, - "id": "openAIEmbeddings_0", - "position": { - "x": 805.2662010688601, - "y": 389.3163571296623 - }, - "type": "customNode", - "data": { - "id": "openAIEmbeddings_0", - "label": "OpenAI Embeddings", - "name": "openAIEmbeddings", - "version": 1, - "type": "OpenAIEmbeddings", - "baseClasses": ["OpenAIEmbeddings", "Embeddings"], - "category": "Embeddings", - "description": "OpenAI API to generate embeddings for a given text", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "openAIEmbeddings_0-input-credential-credential" - }, - { - "label": "Strip New Lines", - "name": "stripNewLines", - "type": "boolean", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-stripNewLines-boolean" - }, - { - "label": "Batch Size", - "name": "batchSize", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-batchSize-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-basepath-string" - } - ], - "inputAnchors": [], - "inputs": { - "stripNewLines": "", - "batchSize": "", - "timeout": "", - "basepath": "" - }, - "outputAnchors": [ - { - "id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "name": "openAIEmbeddings", - "label": "OpenAIEmbeddings", - "type": "OpenAIEmbeddings | Embeddings" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 805.2662010688601, - "y": 389.3163571296623 - }, - "dragging": false - }, - { - "width": 300, - "height": 505, - "id": "pineconeExistingIndex_0", - "position": { - "x": 1194.8300385699242, - "y": 542.8247838029442 - }, - "type": "customNode", - "data": { - "id": "pineconeExistingIndex_0", - "label": "Pinecone Load Existing Index", - "name": "pineconeExistingIndex", - "version": 1, - "type": "Pinecone", - "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Load existing index from Pinecone (i.e: Document has been upserted)", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["pineconeApi"], - "id": "pineconeExistingIndex_0-input-credential-credential" - }, - { - "label": "Pinecone Index", - "name": "pineconeIndex", - "type": "string", - "id": "pineconeExistingIndex_0-input-pineconeIndex-string" - }, - { - "label": "Pinecone Namespace", - "name": "pineconeNamespace", - "type": "string", - "placeholder": "my-first-namespace", - "additionalParams": true, - "optional": true, - "id": "pineconeExistingIndex_0-input-pineconeNamespace-string" - }, - { - "label": "Pinecone Metadata Filter", - "name": "pineconeMetadataFilter", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "pineconeExistingIndex_0-input-pineconeMetadataFilter-json" - }, - { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Default to 4", - "placeholder": "4", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "pineconeExistingIndex_0-input-topK-number" - } - ], - "inputAnchors": [ - { - "label": "Embeddings", - "name": "embeddings", - "type": "Embeddings", - "id": "pineconeExistingIndex_0-input-embeddings-Embeddings" - } - ], - "inputs": { - "embeddings": "{{openAIEmbeddings_0.data.instance}}", - "pineconeIndex": "", - "pineconeNamespace": "", - "pineconeMetadataFilter": "", - "topK": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Pinecone Retriever", - "type": "Pinecone | VectorStoreRetriever | BaseRetriever" - }, - { - "id": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore", - "name": "vectorStore", - "label": "Pinecone Vector Store", - "type": "Pinecone | VectorStore" - } - ], - "default": "retriever" - } - ], - "outputs": { - "output": "retriever" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1194.8300385699242, - "y": 542.8247838029442 - }, - "dragging": false - } - ], - "edges": [ - { - "source": "openAIEmbeddings_0", - "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "pineconeExistingIndex_0", - "targetHandle": "pineconeExistingIndex_0-input-embeddings-Embeddings", - "type": "buttonedge", - "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeExistingIndex_0-pineconeExistingIndex_0-input-embeddings-Embeddings", - "data": { - "label": "" - } - }, - { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "target": "conversationalRetrievalQAChain_0", - "targetHandle": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", - "data": { - "label": "" - } - }, - { - "source": "pineconeExistingIndex_0", - "sourceHandle": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", - "target": "conversationalRetrievalQAChain_0", - "targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", - "type": "buttonedge", - "id": "pineconeExistingIndex_0-pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", - "data": { - "label": "" - } - } - ] -} diff --git a/packages/server/marketplaces/chatflows/Metadata Filter Upsert.json b/packages/server/marketplaces/chatflows/Metadata Filter.json similarity index 86% rename from packages/server/marketplaces/chatflows/Metadata Filter Upsert.json rename to packages/server/marketplaces/chatflows/Metadata Filter.json index 38cec6dde92..f594a2b6cf0 100644 --- a/packages/server/marketplaces/chatflows/Metadata Filter Upsert.json +++ b/packages/server/marketplaces/chatflows/Metadata Filter.json @@ -1,9 +1,10 @@ { - "description": "Upsert multiple files with metadata filters and feed into conversational retrieval QA chain", + "description": "Upsert multiple files with metadata and filter by it using conversational retrieval QA chain", + "badge": "POPULAR", "nodes": [ { "width": 300, - "height": 376, + "height": 429, "id": "recursiveCharacterTextSplitter_1", "position": { "x": 347.5233039646277, @@ -13,8 +14,8 @@ "data": { "id": "recursiveCharacterTextSplitter_1", "label": "Recursive Character Text Splitter", - "name": "recursiveCharacterTextSplitter", "version": 2, + "name": "recursiveCharacterTextSplitter", "type": "RecursiveCharacterTextSplitter", "baseClasses": ["RecursiveCharacterTextSplitter", "TextSplitter"], "category": "Text Splitters", @@ -72,7 +73,7 @@ }, { "width": 300, - "height": 392, + "height": 419, "id": "textFile_0", "position": { "x": 756.5586098635717, @@ -82,8 +83,8 @@ "data": { "id": "textFile_0", "label": "Text File", - "name": "textFile", "version": 3, + "name": "textFile", "type": "Document", "baseClasses": ["Document"], "category": "Document Loaders", @@ -164,8 +165,8 @@ "data": { "id": "pdfFile_0", "label": "Pdf File", - "name": "pdfFile", "version": 1, + "name": "pdfFile", "type": "Document", "baseClasses": ["Document"], "category": "Document Loaders", @@ -248,8 +249,8 @@ "data": { "id": "conversationalRetrievalQAChain_0", "label": "Conversational Retrieval QA Chain", - "name": "conversationalRetrievalQAChain", "version": 1, + "name": "conversationalRetrievalQAChain", "type": "ConversationalRetrievalQAChain", "baseClasses": ["ConversationalRetrievalQAChain", "BaseChain", "BaseLangChain"], "category": "Chains", @@ -322,7 +323,7 @@ ], "inputs": { "model": "{{chatOpenAI_0.data.instance}}", - "vectorStoreRetriever": "{{pineconeUpsert_0.data.instance}}" + "vectorStoreRetriever": "{{pinecone_0.data.instance}}" }, "outputAnchors": [ { @@ -344,128 +345,104 @@ }, { "width": 300, - "height": 555, - "id": "pineconeUpsert_0", + "height": 329, + "id": "openAIEmbeddings_0", "position": { - "x": 1161.2426252201622, - "y": 549.7917156049002 + "x": 761.6417182278027, + "y": 852.6452698684387 }, "type": "customNode", "data": { - "id": "pineconeUpsert_0", - "label": "Pinecone Upsert Document", - "name": "pineconeUpsert", - "type": "Pinecone", + "id": "openAIEmbeddings_0", + "label": "OpenAI Embeddings", "version": 1, - "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Upsert documents to Pinecone", + "name": "openAIEmbeddings", + "type": "OpenAIEmbeddings", + "baseClasses": ["OpenAIEmbeddings", "Embeddings"], + "category": "Embeddings", + "description": "OpenAI API to generate embeddings for a given text", "inputParams": [ { "label": "Connect Credential", "name": "credential", "type": "credential", - "credentialNames": ["pineconeApi"], - "id": "pineconeUpsert_0-input-credential-credential" + "credentialNames": ["openAIApi"], + "id": "openAIEmbeddings_0-input-credential-credential" }, { - "label": "Pinecone Index", - "name": "pineconeIndex", - "type": "string", - "id": "pineconeUpsert_0-input-pineconeIndex-string" + "label": "Strip New Lines", + "name": "stripNewLines", + "type": "boolean", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-stripNewLines-boolean" }, { - "label": "Pinecone Namespace", - "name": "pineconeNamespace", - "type": "string", - "placeholder": "my-first-namespace", - "additionalParams": true, + "label": "Batch Size", + "name": "batchSize", + "type": "number", "optional": true, - "id": "pineconeUpsert_0-input-pineconeNamespace-string" + "additionalParams": true, + "id": "openAIEmbeddings_0-input-batchSize-number" }, { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Default to 4", - "placeholder": "4", + "label": "Timeout", + "name": "timeout", "type": "number", - "additionalParams": true, "optional": true, - "id": "pineconeUpsert_0-input-topK-number" - } - ], - "inputAnchors": [ - { - "label": "Document", - "name": "document", - "type": "Document", - "list": true, - "id": "pineconeUpsert_0-input-document-Document" + "additionalParams": true, + "id": "openAIEmbeddings_0-input-timeout-number" }, { - "label": "Embeddings", - "name": "embeddings", - "type": "Embeddings", - "id": "pineconeUpsert_0-input-embeddings-Embeddings" + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-basepath-string" } ], + "inputAnchors": [], "inputs": { - "document": ["{{textFile_0.data.instance}}", "{{pdfFile_0.data.instance}}"], - "embeddings": "{{openAIEmbeddings_0.data.instance}}", - "pineconeIndex": "", - "pineconeNamespace": "", - "topK": "" + "stripNewLines": "", + "batchSize": "", + "timeout": "", + "basepath": "" }, "outputAnchors": [ { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "pineconeUpsert_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Pinecone Retriever", - "type": "Pinecone | VectorStoreRetriever | BaseRetriever" - }, - { - "id": "pineconeUpsert_0-output-vectorStore-Pinecone|VectorStore", - "name": "vectorStore", - "label": "Pinecone Vector Store", - "type": "Pinecone | VectorStore" - } - ], - "default": "retriever" + "id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "name": "openAIEmbeddings", + "label": "OpenAIEmbeddings", + "type": "OpenAIEmbeddings | Embeddings" } ], - "outputs": { - "output": "retriever" - }, + "outputs": {}, "selected": false }, "selected": false, "positionAbsolute": { - "x": 1161.2426252201622, - "y": 549.7917156049002 + "x": 761.6417182278027, + "y": 852.6452698684387 }, "dragging": false }, { "width": 300, - "height": 523, + "height": 574, "id": "chatOpenAI_0", "position": { - "x": 1164.9667590264419, - "y": -44.2076264967032 + "x": 1162.9449281292038, + "y": -64.39144252849331 }, "type": "customNode", "data": { "id": "chatOpenAI_0", "label": "ChatOpenAI", - "name": "chatOpenAI", "version": 2, + "name": "chatOpenAI", "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], "category": "Chat Models", "description": "Wrapper around OpenAI large language models that use the Chat endpoint", "inputParams": [ @@ -485,6 +462,14 @@ "label": "gpt-4", "name": "gpt-4" }, + { + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, + { + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" + }, { "label": "gpt-4-0613", "name": "gpt-4-0613" @@ -501,6 +486,10 @@ "label": "gpt-3.5-turbo", "name": "gpt-3.5-turbo" }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, { "label": "gpt-3.5-turbo-0613", "name": "gpt-3.5-turbo-0613" @@ -522,6 +511,7 @@ "label": "Temperature", "name": "temperature", "type": "number", + "step": 0.1, "default": 0.9, "optional": true, "id": "chatOpenAI_0-input-temperature-number" @@ -530,6 +520,7 @@ "label": "Max Tokens", "name": "maxTokens", "type": "number", + "step": 1, "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-maxTokens-number" @@ -538,6 +529,7 @@ "label": "Top Probability", "name": "topP", "type": "number", + "step": 0.1, "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-topP-number" @@ -546,6 +538,7 @@ "label": "Frequency Penalty", "name": "frequencyPenalty", "type": "number", + "step": 0.1, "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-frequencyPenalty-number" @@ -554,6 +547,7 @@ "label": "Presence Penalty", "name": "presencePenalty", "type": "number", + "step": 0.1, "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-presencePenalty-number" @@ -562,6 +556,7 @@ "label": "Timeout", "name": "timeout", "type": "number", + "step": 1, "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-timeout-number" @@ -573,6 +568,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" } ], "inputAnchors": [ @@ -585,21 +588,23 @@ } ], "inputs": { - "modelName": "gpt-3.5-turbo", + "cache": "", + "modelName": "gpt-3.5-turbo-16k", "temperature": 0.9, "maxTokens": "", "topP": "", "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", "name": "chatOpenAI", "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel" + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" } ], "outputs": {}, @@ -607,92 +612,126 @@ }, "selected": false, "positionAbsolute": { - "x": 1164.9667590264419, - "y": -44.2076264967032 + "x": 1162.9449281292038, + "y": -64.39144252849331 }, "dragging": false }, { "width": 300, - "height": 329, - "id": "openAIEmbeddings_0", + "height": 555, + "id": "pinecone_0", "position": { - "x": 772.0706424639393, - "y": 862.6189553323906 + "x": 1175.8270637283192, + "y": 569.8692882036854 }, "type": "customNode", "data": { - "id": "openAIEmbeddings_0", - "label": "OpenAI Embeddings", - "name": "openAIEmbeddings", + "id": "pinecone_0", + "label": "Pinecone", "version": 1, - "type": "OpenAIEmbeddings", - "baseClasses": ["OpenAIEmbeddings", "Embeddings"], - "category": "Embeddings", - "description": "OpenAI API to generate embeddings for a given text", + "name": "pinecone", + "type": "Pinecone", + "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Upsert or Load data to Pinecone Vector Database", "inputParams": [ { "label": "Connect Credential", "name": "credential", "type": "credential", - "credentialNames": ["openAIApi"], - "id": "openAIEmbeddings_0-input-credential-credential" + "credentialNames": ["pineconeApi"], + "id": "pinecone_0-input-credential-credential" }, { - "label": "Strip New Lines", - "name": "stripNewLines", - "type": "boolean", - "optional": true, + "label": "Pinecone Index", + "name": "pineconeIndex", + "type": "string", + "id": "pinecone_0-input-pineconeIndex-string" + }, + { + "label": "Pinecone Namespace", + "name": "pineconeNamespace", + "type": "string", + "placeholder": "my-first-namespace", "additionalParams": true, - "id": "openAIEmbeddings_0-input-stripNewLines-boolean" + "optional": true, + "id": "pinecone_0-input-pineconeNamespace-string" }, { - "label": "Batch Size", - "name": "batchSize", - "type": "number", + "label": "Pinecone Metadata Filter", + "name": "pineconeMetadataFilter", + "type": "json", "optional": true, "additionalParams": true, - "id": "openAIEmbeddings_0-input-batchSize-number" + "id": "pinecone_0-input-pineconeMetadataFilter-json" }, { - "label": "Timeout", - "name": "timeout", + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", "type": "number", - "optional": true, "additionalParams": true, - "id": "openAIEmbeddings_0-input-timeout-number" - }, + "optional": true, + "id": "pinecone_0-input-topK-number" + } + ], + "inputAnchors": [ { - "label": "BasePath", - "name": "basepath", - "type": "string", + "label": "Document", + "name": "document", + "type": "Document", + "list": true, "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-basepath-string" + "id": "pinecone_0-input-document-Document" + }, + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "pinecone_0-input-embeddings-Embeddings" } ], - "inputAnchors": [], "inputs": { - "stripNewLines": "", - "batchSize": "", - "timeout": "", - "basepath": "" + "document": ["{{textFile_0.data.instance}}", "{{pdfFile_0.data.instance}}"], + "embeddings": "{{openAIEmbeddings_0.data.instance}}", + "pineconeIndex": "", + "pineconeNamespace": "", + "pineconeMetadataFilter": "{\"id\":{\"$in\":[\"doc1\",\"doc2\"]}}", + "topK": "" }, "outputAnchors": [ { - "id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "name": "openAIEmbeddings", - "label": "OpenAIEmbeddings", - "type": "OpenAIEmbeddings | Embeddings" + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Pinecone Retriever", + "type": "Pinecone | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "pinecone_0-output-vectorStore-Pinecone|VectorStore", + "name": "vectorStore", + "label": "Pinecone Vector Store", + "type": "Pinecone | VectorStore" + } + ], + "default": "retriever" } ], - "outputs": {}, + "outputs": { + "output": "retriever" + }, "selected": false }, "selected": false, "positionAbsolute": { - "x": 772.0706424639393, - "y": 862.6189553323906 + "x": 1175.8270637283192, + "y": 569.8692882036854 }, "dragging": false } @@ -721,23 +760,23 @@ } }, { - "source": "openAIEmbeddings_0", - "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "pineconeUpsert_0", - "targetHandle": "pineconeUpsert_0-input-embeddings-Embeddings", + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "target": "conversationalRetrievalQAChain_0", + "targetHandle": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", "type": "buttonedge", - "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeUpsert_0-pineconeUpsert_0-input-embeddings-Embeddings", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", "data": { "label": "" } }, { "source": "textFile_0", - "sourceHandle": "textFile_0-output-textFile-Document", - "target": "pineconeUpsert_0", - "targetHandle": "pineconeUpsert_0-input-document-Document", + "sourceHandle": "textFile_0-output-document-Document", + "target": "pinecone_0", + "targetHandle": "pinecone_0-input-document-Document", "type": "buttonedge", - "id": "textFile_0-textFile_0-output-textFile-Document-pineconeUpsert_0-pineconeUpsert_0-input-document-Document", + "id": "textFile_0-textFile_0-output-document-Document-pinecone_0-pinecone_0-input-document-Document", "data": { "label": "" } @@ -745,32 +784,32 @@ { "source": "pdfFile_0", "sourceHandle": "pdfFile_0-output-pdfFile-Document", - "target": "pineconeUpsert_0", - "targetHandle": "pineconeUpsert_0-input-document-Document", + "target": "pinecone_0", + "targetHandle": "pinecone_0-input-document-Document", "type": "buttonedge", - "id": "pdfFile_0-pdfFile_0-output-pdfFile-Document-pineconeUpsert_0-pineconeUpsert_0-input-document-Document", + "id": "pdfFile_0-pdfFile_0-output-pdfFile-Document-pinecone_0-pinecone_0-input-document-Document", "data": { "label": "" } }, { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "target": "conversationalRetrievalQAChain_0", - "targetHandle": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "source": "openAIEmbeddings_0", + "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "pinecone_0", + "targetHandle": "pinecone_0-input-embeddings-Embeddings", "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pinecone_0-pinecone_0-input-embeddings-Embeddings", "data": { "label": "" } }, { - "source": "pineconeUpsert_0", - "sourceHandle": "pineconeUpsert_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "source": "pinecone_0", + "sourceHandle": "pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", "target": "conversationalRetrievalQAChain_0", "targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", "type": "buttonedge", - "id": "pineconeUpsert_0-pineconeUpsert_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "id": "pinecone_0-pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/Multi Prompt Chain.json b/packages/server/marketplaces/chatflows/Multi Prompt Chain.json index e1063dcfec7..0a888a6b7f9 100644 --- a/packages/server/marketplaces/chatflows/Multi Prompt Chain.json +++ b/packages/server/marketplaces/chatflows/Multi Prompt Chain.json @@ -388,6 +388,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" } ], "inputAnchors": [ @@ -407,7 +415,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { diff --git a/packages/server/marketplaces/chatflows/Multi Retrieval QA Chain.json b/packages/server/marketplaces/chatflows/Multi Retrieval QA Chain.json index 36240e39dde..9032122bfa0 100644 --- a/packages/server/marketplaces/chatflows/Multi Retrieval QA Chain.json +++ b/packages/server/marketplaces/chatflows/Multi Retrieval QA Chain.json @@ -13,8 +13,8 @@ "data": { "id": "vectorStoreRetriever_0", "label": "Vector Store Retriever", - "name": "vectorStoreRetriever", "version": 1, + "name": "vectorStoreRetriever", "type": "VectorStoreRetriever", "baseClasses": ["VectorStoreRetriever"], "category": "Retrievers", @@ -46,7 +46,7 @@ } ], "inputs": { - "vectorStore": "{{supabaseExistingIndex_0.data.instance}}", + "vectorStore": "{{supabase_0.data.instance}}", "name": "aqua teen", "description": "Good for answering questions about Aqua Teen Hunger Force theme song" }, @@ -80,8 +80,8 @@ "data": { "id": "multiRetrievalQAChain_0", "label": "Multi Retrieval QA Chain", - "name": "multiRetrievalQAChain", "version": 1, + "name": "multiRetrievalQAChain", "type": "MultiRetrievalQAChain", "baseClasses": ["MultiRetrievalQAChain", "MultiRouteChain", "BaseChain", "BaseLangChain"], "category": "Chains", @@ -147,8 +147,8 @@ "data": { "id": "vectorStoreRetriever_1", "label": "Vector Store Retriever", - "name": "vectorStoreRetriever", "version": 1, + "name": "vectorStoreRetriever", "type": "VectorStoreRetriever", "baseClasses": ["VectorStoreRetriever"], "category": "Retrievers", @@ -180,7 +180,7 @@ } ], "inputs": { - "vectorStore": "{{chromaExistingIndex_0.data.instance}}", + "vectorStore": "{{chroma_0.data.instance}}", "name": "mst3k", "description": "Good for answering questions about Mystery Science Theater 3000 theme song" }, @@ -214,8 +214,8 @@ "data": { "id": "vectorStoreRetriever_2", "label": "Vector Store Retriever", - "name": "vectorStoreRetriever", "version": 1, + "name": "vectorStoreRetriever", "type": "VectorStoreRetriever", "baseClasses": ["VectorStoreRetriever"], "category": "Retrievers", @@ -247,7 +247,7 @@ } ], "inputs": { - "vectorStore": "{{pineconeExistingIndex_0.data.instance}}", + "vectorStore": "{{pinecone_0.data.instance}}", "name": "animaniacs", "description": "Good for answering questions about Animaniacs theme song" }, @@ -269,206 +269,6 @@ }, "dragging": false }, - { - "width": 300, - "height": 505, - "id": "pineconeExistingIndex_0", - "position": { - "x": 267.45589163840236, - "y": -300.13817634747346 - }, - "type": "customNode", - "data": { - "id": "pineconeExistingIndex_0", - "label": "Pinecone Load Existing Index", - "name": "pineconeExistingIndex", - "version": 1, - "type": "Pinecone", - "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Load existing index from Pinecone (i.e: Document has been upserted)", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["pineconeApi"], - "id": "pineconeExistingIndex_0-input-credential-credential" - }, - { - "label": "Pinecone Index", - "name": "pineconeIndex", - "type": "string", - "id": "pineconeExistingIndex_0-input-pineconeIndex-string" - }, - { - "label": "Pinecone Namespace", - "name": "pineconeNamespace", - "type": "string", - "placeholder": "my-first-namespace", - "additionalParams": true, - "optional": true, - "id": "pineconeExistingIndex_0-input-pineconeNamespace-string" - }, - { - "label": "Pinecone Metadata Filter", - "name": "pineconeMetadataFilter", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "pineconeExistingIndex_0-input-pineconeMetadataFilter-json" - }, - { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Default to 4", - "placeholder": "4", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "pineconeExistingIndex_0-input-topK-number" - } - ], - "inputAnchors": [ - { - "label": "Embeddings", - "name": "embeddings", - "type": "Embeddings", - "id": "pineconeExistingIndex_0-input-embeddings-Embeddings" - } - ], - "inputs": { - "embeddings": "{{openAIEmbeddings_0.data.instance}}", - "pineconeIndex": "", - "pineconeNamespace": "", - "pineconeMetadataFilter": "", - "topK": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Pinecone Retriever", - "type": "Pinecone | VectorStoreRetriever | BaseRetriever" - }, - { - "id": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore", - "name": "vectorStore", - "label": "Pinecone Vector Store", - "type": "Pinecone | VectorStore" - } - ], - "default": "retriever" - } - ], - "outputs": { - "output": "vectorStore" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 267.45589163840236, - "y": -300.13817634747346 - }, - "dragging": false - }, - { - "width": 300, - "height": 506, - "id": "chromaExistingIndex_0", - "position": { - "x": 264.5271545331116, - "y": 246.32716342844174 - }, - "type": "customNode", - "data": { - "id": "chromaExistingIndex_0", - "label": "Chroma Load Existing Index", - "name": "chromaExistingIndex", - "version": 1, - "type": "Chroma", - "baseClasses": ["Chroma", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Load existing index from Chroma (i.e: Document has been upserted)", - "inputParams": [ - { - "label": "Collection Name", - "name": "collectionName", - "type": "string", - "id": "chromaExistingIndex_0-input-collectionName-string" - }, - { - "label": "Chroma URL", - "name": "chromaURL", - "type": "string", - "optional": true, - "id": "chromaExistingIndex_0-input-chromaURL-string" - }, - { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Default to 4", - "placeholder": "4", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "chromaExistingIndex_0-input-topK-number" - } - ], - "inputAnchors": [ - { - "label": "Embeddings", - "name": "embeddings", - "type": "Embeddings", - "id": "chromaExistingIndex_0-input-embeddings-Embeddings" - } - ], - "inputs": { - "embeddings": "{{openAIEmbeddings_0.data.instance}}", - "collectionName": "", - "chromaURL": "", - "topK": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "chromaExistingIndex_0-output-retriever-Chroma|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Chroma Retriever", - "type": "Chroma | VectorStoreRetriever | BaseRetriever" - }, - { - "id": "chromaExistingIndex_0-output-vectorStore-Chroma|VectorStore", - "name": "vectorStore", - "label": "Chroma Vector Store", - "type": "Chroma | VectorStore" - } - ], - "default": "retriever" - } - ], - "outputs": { - "output": "vectorStore" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 264.5271545331116, - "y": 246.32716342844174 - }, - "dragging": false - }, { "width": 300, "height": 329, @@ -481,8 +281,8 @@ "data": { "id": "openAIEmbeddings_0", "label": "OpenAI Embeddings", - "name": "openAIEmbeddings", "version": 1, + "name": "openAIEmbeddings", "type": "OpenAIEmbeddings", "baseClasses": ["OpenAIEmbeddings", "Embeddings"], "category": "Embeddings", @@ -555,151 +355,46 @@ }, { "width": 300, - "height": 702, - "id": "supabaseExistingIndex_0", + "height": 574, + "id": "chatOpenAI_0", "position": { - "x": 270.90499551102573, - "y": 783.5053782099461 + "x": 1166.929741805626, + "y": -297.9691758089252 }, "type": "customNode", "data": { - "id": "supabaseExistingIndex_0", - "label": "Supabase Load Existing Index", - "name": "supabaseExistingIndex", - "version": 1, - "type": "Supabase", - "baseClasses": ["Supabase", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Load existing index from Supabase (i.e: Document has been upserted)", + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "version": 2, + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", "inputParams": [ { "label": "Connect Credential", "name": "credential", "type": "credential", - "credentialNames": ["supabaseApi"], - "id": "supabaseExistingIndex_0-input-credential-credential" - }, - { - "label": "Supabase Project URL", - "name": "supabaseProjUrl", - "type": "string", - "id": "supabaseExistingIndex_0-input-supabaseProjUrl-string" - }, - { - "label": "Table Name", - "name": "tableName", - "type": "string", - "id": "supabaseExistingIndex_0-input-tableName-string" - }, - { - "label": "Query Name", - "name": "queryName", - "type": "string", - "id": "supabaseExistingIndex_0-input-queryName-string" - }, - { - "label": "Supabase Metadata Filter", - "name": "supabaseMetadataFilter", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "supabaseExistingIndex_0-input-supabaseMetadataFilter-json" + "credentialNames": ["openAIApi"], + "id": "chatOpenAI_0-input-credential-credential" }, { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Default to 4", - "placeholder": "4", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "supabaseExistingIndex_0-input-topK-number" - } - ], - "inputAnchors": [ - { - "label": "Embeddings", - "name": "embeddings", - "type": "Embeddings", - "id": "supabaseExistingIndex_0-input-embeddings-Embeddings" - } - ], - "inputs": { - "embeddings": "{{openAIEmbeddings_0.data.instance}}", - "supabaseProjUrl": "", - "tableName": "", - "queryName": "", - "supabaseMetadataFilter": "", - "topK": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", + "label": "Model Name", + "name": "modelName", "type": "options", "options": [ { - "id": "supabaseExistingIndex_0-output-retriever-Supabase|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Supabase Retriever", - "type": "Supabase | VectorStoreRetriever | BaseRetriever" + "label": "gpt-4", + "name": "gpt-4" }, { - "id": "supabaseExistingIndex_0-output-vectorStore-Supabase|VectorStore", - "name": "vectorStore", - "label": "Supabase Vector Store", - "type": "Supabase | VectorStore" - } - ], - "default": "retriever" - } - ], - "outputs": { - "output": "vectorStore" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 270.90499551102573, - "y": 783.5053782099461 - }, - "dragging": false - }, - { - "width": 300, - "height": 523, - "id": "chatOpenAI_0", - "position": { - "x": 1154.0989175770958, - "y": -255.77769163789395 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_0", - "label": "ChatOpenAI", - "name": "chatOpenAI", - "version": 2, - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "options", - "options": [ + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, { - "label": "gpt-4", - "name": "gpt-4" + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" }, { "label": "gpt-4-0613", @@ -717,6 +412,10 @@ "label": "gpt-3.5-turbo", "name": "gpt-3.5-turbo" }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, { "label": "gpt-3.5-turbo-0613", "name": "gpt-3.5-turbo-0613" @@ -738,6 +437,7 @@ "label": "Temperature", "name": "temperature", "type": "number", + "step": 0.1, "default": 0.9, "optional": true, "id": "chatOpenAI_0-input-temperature-number" @@ -746,6 +446,7 @@ "label": "Max Tokens", "name": "maxTokens", "type": "number", + "step": 1, "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-maxTokens-number" @@ -754,6 +455,7 @@ "label": "Top Probability", "name": "topP", "type": "number", + "step": 0.1, "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-topP-number" @@ -762,6 +464,7 @@ "label": "Frequency Penalty", "name": "frequencyPenalty", "type": "number", + "step": 0.1, "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-frequencyPenalty-number" @@ -770,6 +473,7 @@ "label": "Presence Penalty", "name": "presencePenalty", "type": "number", + "step": 0.1, "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-presencePenalty-number" @@ -778,6 +482,7 @@ "label": "Timeout", "name": "timeout", "type": "number", + "step": 1, "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-timeout-number" @@ -789,6 +494,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" } ], "inputAnchors": [ @@ -801,6 +514,7 @@ } ], "inputs": { + "cache": "", "modelName": "gpt-3.5-turbo", "temperature": 0.9, "maxTokens": "", @@ -808,14 +522,15 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", "name": "chatOpenAI", "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel" + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" } ], "outputs": {}, @@ -823,8 +538,366 @@ }, "selected": false, "positionAbsolute": { - "x": 1154.0989175770958, - "y": -255.77769163789395 + "x": 1166.929741805626, + "y": -297.9691758089252 + }, + "dragging": false + }, + { + "width": 300, + "height": 555, + "id": "pinecone_0", + "position": { + "x": 261.3144465918519, + "y": -333.57075989595313 + }, + "type": "customNode", + "data": { + "id": "pinecone_0", + "label": "Pinecone", + "version": 1, + "name": "pinecone", + "type": "Pinecone", + "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Upsert or Load data to Pinecone Vector Database", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["pineconeApi"], + "id": "pinecone_0-input-credential-credential" + }, + { + "label": "Pinecone Index", + "name": "pineconeIndex", + "type": "string", + "id": "pinecone_0-input-pineconeIndex-string" + }, + { + "label": "Pinecone Namespace", + "name": "pineconeNamespace", + "type": "string", + "placeholder": "my-first-namespace", + "additionalParams": true, + "optional": true, + "id": "pinecone_0-input-pineconeNamespace-string" + }, + { + "label": "Pinecone Metadata Filter", + "name": "pineconeMetadataFilter", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "pinecone_0-input-pineconeMetadataFilter-json" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "pinecone_0-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Document", + "name": "document", + "type": "Document", + "list": true, + "optional": true, + "id": "pinecone_0-input-document-Document" + }, + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "pinecone_0-input-embeddings-Embeddings" + } + ], + "inputs": { + "document": "", + "embeddings": "{{openAIEmbeddings_0.data.instance}}", + "pineconeIndex": "", + "pineconeNamespace": "", + "pineconeMetadataFilter": "", + "topK": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Pinecone Retriever", + "type": "Pinecone | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "pinecone_0-output-vectorStore-Pinecone|VectorStore", + "name": "vectorStore", + "label": "Pinecone Vector Store", + "type": "Pinecone | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "vectorStore" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 261.3144465918519, + "y": -333.57075989595313 + }, + "dragging": false + }, + { + "width": 300, + "height": 654, + "id": "chroma_0", + "position": { + "x": 263.5395455972911, + "y": 242.72988251281214 + }, + "type": "customNode", + "data": { + "id": "chroma_0", + "label": "Chroma", + "version": 1, + "name": "chroma", + "type": "Chroma", + "baseClasses": ["Chroma", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Upsert or Load data to Chroma Vector Database", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "description": "Only needed if you have chroma on cloud services with X-Api-key", + "optional": true, + "credentialNames": ["chromaApi"], + "id": "chroma_0-input-credential-credential" + }, + { + "label": "Collection Name", + "name": "collectionName", + "type": "string", + "id": "chroma_0-input-collectionName-string" + }, + { + "label": "Chroma URL", + "name": "chromaURL", + "type": "string", + "optional": true, + "id": "chroma_0-input-chromaURL-string" + }, + { + "label": "Chroma Metadata Filter", + "name": "chromaMetadataFilter", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chroma_0-input-chromaMetadataFilter-json" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "chroma_0-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Document", + "name": "document", + "type": "Document", + "list": true, + "optional": true, + "id": "chroma_0-input-document-Document" + }, + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "chroma_0-input-embeddings-Embeddings" + } + ], + "inputs": { + "document": "", + "embeddings": "{{openAIEmbeddings_0.data.instance}}", + "collectionName": "", + "chromaURL": "", + "chromaMetadataFilter": "", + "topK": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "chroma_0-output-retriever-Chroma|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Chroma Retriever", + "type": "Chroma | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "chroma_0-output-vectorStore-Chroma|VectorStore", + "name": "vectorStore", + "label": "Chroma Vector Store", + "type": "Chroma | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "vectorStore" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 263.5395455972911, + "y": 242.72988251281214 + }, + "dragging": false + }, + { + "width": 300, + "height": 753, + "id": "supabase_0", + "position": { + "x": 263.16882559270005, + "y": 920.6999513218148 + }, + "type": "customNode", + "data": { + "id": "supabase_0", + "label": "Supabase", + "version": 1, + "name": "supabase", + "type": "Supabase", + "baseClasses": ["Supabase", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Upsert or Load data to Supabase using pgvector", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["supabaseApi"], + "id": "supabase_0-input-credential-credential" + }, + { + "label": "Supabase Project URL", + "name": "supabaseProjUrl", + "type": "string", + "id": "supabase_0-input-supabaseProjUrl-string" + }, + { + "label": "Table Name", + "name": "tableName", + "type": "string", + "id": "supabase_0-input-tableName-string" + }, + { + "label": "Query Name", + "name": "queryName", + "type": "string", + "id": "supabase_0-input-queryName-string" + }, + { + "label": "Supabase Metadata Filter", + "name": "supabaseMetadataFilter", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "supabase_0-input-supabaseMetadataFilter-json" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "supabase_0-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Document", + "name": "document", + "type": "Document", + "list": true, + "optional": true, + "id": "supabase_0-input-document-Document" + }, + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "supabase_0-input-embeddings-Embeddings" + } + ], + "inputs": { + "document": "", + "embeddings": "{{openAIEmbeddings_0.data.instance}}", + "supabaseProjUrl": "", + "tableName": "", + "queryName": "", + "supabaseMetadataFilter": "", + "topK": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "supabase_0-output-retriever-Supabase|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Supabase Retriever", + "type": "Supabase | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "supabase_0-output-vectorStore-Supabase|VectorStore", + "name": "vectorStore", + "label": "Supabase Vector Store", + "type": "Supabase | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "vectorStore" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 263.16882559270005, + "y": 920.6999513218148 }, "dragging": false } @@ -864,45 +937,45 @@ } }, { - "source": "pineconeExistingIndex_0", - "sourceHandle": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore", - "target": "vectorStoreRetriever_2", - "targetHandle": "vectorStoreRetriever_2-input-vectorStore-VectorStore", + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "target": "multiRetrievalQAChain_0", + "targetHandle": "multiRetrievalQAChain_0-input-model-BaseLanguageModel", "type": "buttonedge", - "id": "pineconeExistingIndex_0-pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore-vectorStoreRetriever_2-vectorStoreRetriever_2-input-vectorStore-VectorStore", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-multiRetrievalQAChain_0-multiRetrievalQAChain_0-input-model-BaseLanguageModel", "data": { "label": "" } }, { - "source": "chromaExistingIndex_0", - "sourceHandle": "chromaExistingIndex_0-output-vectorStore-Chroma|VectorStore", - "target": "vectorStoreRetriever_1", - "targetHandle": "vectorStoreRetriever_1-input-vectorStore-VectorStore", + "source": "pinecone_0", + "sourceHandle": "pinecone_0-output-vectorStore-Pinecone|VectorStore", + "target": "vectorStoreRetriever_2", + "targetHandle": "vectorStoreRetriever_2-input-vectorStore-VectorStore", "type": "buttonedge", - "id": "chromaExistingIndex_0-chromaExistingIndex_0-output-vectorStore-Chroma|VectorStore-vectorStoreRetriever_1-vectorStoreRetriever_1-input-vectorStore-VectorStore", + "id": "pinecone_0-pinecone_0-output-vectorStore-Pinecone|VectorStore-vectorStoreRetriever_2-vectorStoreRetriever_2-input-vectorStore-VectorStore", "data": { "label": "" } }, { - "source": "openAIEmbeddings_0", - "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "pineconeExistingIndex_0", - "targetHandle": "pineconeExistingIndex_0-input-embeddings-Embeddings", + "source": "chroma_0", + "sourceHandle": "chroma_0-output-vectorStore-Chroma|VectorStore", + "target": "vectorStoreRetriever_1", + "targetHandle": "vectorStoreRetriever_1-input-vectorStore-VectorStore", "type": "buttonedge", - "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeExistingIndex_0-pineconeExistingIndex_0-input-embeddings-Embeddings", + "id": "chroma_0-chroma_0-output-vectorStore-Chroma|VectorStore-vectorStoreRetriever_1-vectorStoreRetriever_1-input-vectorStore-VectorStore", "data": { "label": "" } }, { - "source": "openAIEmbeddings_0", - "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "chromaExistingIndex_0", - "targetHandle": "chromaExistingIndex_0-input-embeddings-Embeddings", + "source": "supabase_0", + "sourceHandle": "supabase_0-output-vectorStore-Supabase|VectorStore", + "target": "vectorStoreRetriever_0", + "targetHandle": "vectorStoreRetriever_0-input-vectorStore-VectorStore", "type": "buttonedge", - "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-chromaExistingIndex_0-chromaExistingIndex_0-input-embeddings-Embeddings", + "id": "supabase_0-supabase_0-output-vectorStore-Supabase|VectorStore-vectorStoreRetriever_0-vectorStoreRetriever_0-input-vectorStore-VectorStore", "data": { "label": "" } @@ -910,32 +983,32 @@ { "source": "openAIEmbeddings_0", "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "supabaseExistingIndex_0", - "targetHandle": "supabaseExistingIndex_0-input-embeddings-Embeddings", + "target": "supabase_0", + "targetHandle": "supabase_0-input-embeddings-Embeddings", "type": "buttonedge", - "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-supabaseExistingIndex_0-supabaseExistingIndex_0-input-embeddings-Embeddings", + "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-supabase_0-supabase_0-input-embeddings-Embeddings", "data": { "label": "" } }, { - "source": "supabaseExistingIndex_0", - "sourceHandle": "supabaseExistingIndex_0-output-vectorStore-Supabase|VectorStore", - "target": "vectorStoreRetriever_0", - "targetHandle": "vectorStoreRetriever_0-input-vectorStore-VectorStore", + "source": "openAIEmbeddings_0", + "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "chroma_0", + "targetHandle": "chroma_0-input-embeddings-Embeddings", "type": "buttonedge", - "id": "supabaseExistingIndex_0-supabaseExistingIndex_0-output-vectorStore-Supabase|VectorStore-vectorStoreRetriever_0-vectorStoreRetriever_0-input-vectorStore-VectorStore", + "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-chroma_0-chroma_0-input-embeddings-Embeddings", "data": { "label": "" } }, { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "target": "multiRetrievalQAChain_0", - "targetHandle": "multiRetrievalQAChain_0-input-model-BaseLanguageModel", + "source": "openAIEmbeddings_0", + "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "pinecone_0", + "targetHandle": "pinecone_0-input-embeddings-Embeddings", "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-multiRetrievalQAChain_0-multiRetrievalQAChain_0-input-model-BaseLanguageModel", + "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pinecone_0-pinecone_0-input-embeddings-Embeddings", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/Multiple VectorDB.json b/packages/server/marketplaces/chatflows/Multiple VectorDB.json index b4dedfddf5f..723b510e6dc 100644 --- a/packages/server/marketplaces/chatflows/Multiple VectorDB.json +++ b/packages/server/marketplaces/chatflows/Multiple VectorDB.json @@ -13,8 +13,8 @@ "data": { "id": "chainTool_2", "label": "Chain Tool", - "name": "chainTool", "version": 1, + "name": "chainTool", "type": "ChainTool", "baseClasses": ["ChainTool", "DynamicTool", "Tool", "StructuredTool", "BaseLangChain"], "category": "Tools", @@ -80,15 +80,15 @@ "height": 602, "id": "chainTool_3", "position": { - "x": 1267.7142132085273, - "y": -85.7749282485849 + "x": 1255.0365190596667, + "y": -79.4360811741546 }, "type": "customNode", "data": { "id": "chainTool_3", "label": "Chain Tool", - "name": "chainTool", "version": 1, + "name": "chainTool", "type": "ChainTool", "baseClasses": ["ChainTool", "DynamicTool", "Tool", "StructuredTool", "BaseLangChain"], "category": "Tools", @@ -127,7 +127,7 @@ ], "inputs": { "name": "state-of-union-qa", - "description": "State of the Union QA - useful for when you need to ask questions about the most recent state of the union address.", + "description": "State of the Union QA - useful for when you need to ask questions about the president speech and most recent state of the union address.", "returnDirect": "", "baseChain": "{{retrievalQAChain_1.data.instance}}" }, @@ -145,66 +145,10 @@ "selected": false, "dragging": false, "positionAbsolute": { - "x": 1267.7142132085273, - "y": -85.7749282485849 + "x": 1255.0365190596667, + "y": -79.4360811741546 } }, - { - "width": 300, - "height": 280, - "id": "mrklAgentLLM_0", - "position": { - "x": 2061.891333395338, - "y": -140.0694021759809 - }, - "type": "customNode", - "data": { - "id": "mrklAgentLLM_0", - "label": "MRKL Agent for LLMs", - "name": "mrklAgentLLM", - "version": 1, - "type": "AgentExecutor", - "baseClasses": ["AgentExecutor", "BaseChain", "BaseLangChain"], - "category": "Agents", - "description": "Agent that uses the ReAct Framework to decide what action to take, optimized to be used with LLMs", - "inputParams": [], - "inputAnchors": [ - { - "label": "Allowed Tools", - "name": "tools", - "type": "Tool", - "list": true, - "id": "mrklAgentLLM_0-input-tools-Tool" - }, - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "mrklAgentLLM_0-input-model-BaseLanguageModel" - } - ], - "inputs": { - "tools": ["{{chainTool_2.data.instance}}", "{{chainTool_3.data.instance}}"], - "model": "{{openAI_4.data.instance}}" - }, - "outputAnchors": [ - { - "id": "mrklAgentLLM_0-output-mrklAgentLLM-AgentExecutor|BaseChain|BaseLangChain", - "name": "mrklAgentLLM", - "label": "AgentExecutor", - "type": "AgentExecutor | BaseChain | BaseLangChain" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 2061.891333395338, - "y": -140.0694021759809 - }, - "dragging": false - }, { "width": 300, "height": 280, @@ -217,8 +161,8 @@ "data": { "id": "retrievalQAChain_0", "label": "Retrieval QA Chain", - "name": "retrievalQAChain", "version": 1, + "name": "retrievalQAChain", "type": "RetrievalQAChain", "baseClasses": ["RetrievalQAChain", "BaseChain", "BaseLangChain"], "category": "Chains", @@ -239,8 +183,8 @@ } ], "inputs": { - "model": "{{openAI_2.data.instance}}", - "vectorStoreRetriever": "{{chromaExistingIndex_0.data.instance}}" + "model": "{{chatOpenAI_0.data.instance}}", + "vectorStoreRetriever": "{{redis_0.data.instance}}" }, "outputAnchors": [ { @@ -265,15 +209,15 @@ "height": 280, "id": "retrievalQAChain_1", "position": { - "x": 895.4349543765911, - "y": 166.60331503487222 + "x": 903.8867504758316, + "y": 380.0111665406929 }, "type": "customNode", "data": { "id": "retrievalQAChain_1", "label": "Retrieval QA Chain", - "name": "retrievalQAChain", "version": 1, + "name": "retrievalQAChain", "type": "RetrievalQAChain", "baseClasses": ["RetrievalQAChain", "BaseChain", "BaseLangChain"], "category": "Chains", @@ -294,8 +238,8 @@ } ], "inputs": { - "model": "{{openAI_3.data.instance}}", - "vectorStoreRetriever": "{{pineconeExistingIndex_0.data.instance}}" + "model": "{{chatOpenAI_1.data.instance}}", + "vectorStoreRetriever": "{{faiss_0.data.instance}}" }, "outputAnchors": [ { @@ -310,168 +254,8 @@ }, "selected": false, "positionAbsolute": { - "x": 895.4349543765911, - "y": 166.60331503487222 - }, - "dragging": false - }, - { - "width": 300, - "height": 523, - "id": "openAI_2", - "position": { - "x": 520.8471510168988, - "y": -1362.1183473852964 - }, - "type": "customNode", - "data": { - "id": "openAI_2", - "label": "OpenAI", - "name": "openAI", - "version": 3, - "type": "OpenAI", - "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], - "category": "LLMs", - "description": "Wrapper around OpenAI large language models", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "openAI_2-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "options", - "options": [ - { - "label": "gpt-3.5-turbo-instruct", - "name": "gpt-3.5-turbo-instruct" - }, - { - "label": "babbage-002", - "name": "babbage-002" - }, - { - "label": "davinci-002", - "name": "davinci-002" - } - ], - "default": "gpt-3.5-turbo-instruct", - "optional": true, - "id": "openAI_2-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "default": 0.7, - "optional": true, - "id": "openAI_2-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAI_2-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAI_2-input-topP-number" - }, - { - "label": "Best Of", - "name": "bestOf", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAI_2-input-bestOf-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAI_2-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAI_2-input-presencePenalty-number" - }, - { - "label": "Batch Size", - "name": "batchSize", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAI_2-input-batchSize-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAI_2-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "openAI_2-input-basepath-string" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "openAI_2-input-cache-BaseCache" - } - ], - "inputs": { - "modelName": "gpt-3.5-turbo-instruct", - "temperature": 0.7, - "maxTokens": "", - "topP": "", - "bestOf": "", - "frequencyPenalty": "", - "presencePenalty": "", - "batchSize": "", - "timeout": "", - "basepath": "" - }, - "outputAnchors": [ - { - "id": "openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", - "name": "openAI", - "label": "OpenAI", - "type": "OpenAI | BaseLLM | BaseLanguageModel" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 520.8471510168988, - "y": -1362.1183473852964 + "x": 903.8867504758316, + "y": 380.0111665406929 }, "dragging": false }, @@ -480,15 +264,15 @@ "height": 329, "id": "openAIEmbeddings_1", "position": { - "x": 148.65789308409916, - "y": -915.1982675859331 + "x": 100.06006551346672, + "y": -686.9997729064416 }, "type": "customNode", "data": { "id": "openAIEmbeddings_1", "label": "OpenAI Embeddings", - "name": "openAIEmbeddings", "version": 1, + "name": "openAIEmbeddings", "type": "OpenAIEmbeddings", "baseClasses": ["OpenAIEmbeddings", "Embeddings"], "category": "Embeddings", @@ -554,99 +338,8 @@ }, "selected": false, "positionAbsolute": { - "x": 148.65789308409916, - "y": -915.1982675859331 - }, - "dragging": false - }, - { - "width": 300, - "height": 506, - "id": "chromaExistingIndex_0", - "position": { - "x": 509.55198017578016, - "y": -782.42003311752 - }, - "type": "customNode", - "data": { - "id": "chromaExistingIndex_0", - "label": "Chroma Load Existing Index", - "name": "chromaExistingIndex", - "version": 1, - "type": "Chroma", - "baseClasses": ["Chroma", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Load existing index from Chroma (i.e: Document has been upserted)", - "inputParams": [ - { - "label": "Collection Name", - "name": "collectionName", - "type": "string", - "id": "chromaExistingIndex_0-input-collectionName-string" - }, - { - "label": "Chroma URL", - "name": "chromaURL", - "type": "string", - "optional": true, - "id": "chromaExistingIndex_0-input-chromaURL-string" - }, - { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Default to 4", - "placeholder": "4", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "chromaExistingIndex_0-input-topK-number" - } - ], - "inputAnchors": [ - { - "label": "Embeddings", - "name": "embeddings", - "type": "Embeddings", - "id": "chromaExistingIndex_0-input-embeddings-Embeddings" - } - ], - "inputs": { - "embeddings": "{{openAIEmbeddings_1.data.instance}}", - "collectionName": "", - "chromaURL": "", - "topK": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "chromaExistingIndex_0-output-retriever-Chroma|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Chroma Retriever", - "type": "Chroma | VectorStoreRetriever | BaseRetriever" - }, - { - "id": "chromaExistingIndex_0-output-vectorStore-Chroma|VectorStore", - "name": "vectorStore", - "label": "Chroma Vector Store", - "type": "Chroma | VectorStore" - } - ], - "default": "retriever" - } - ], - "outputs": { - "output": "retriever" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 509.55198017578016, - "y": -782.42003311752 + "x": 100.06006551346672, + "y": -686.9997729064416 }, "dragging": false }, @@ -655,15 +348,15 @@ "height": 329, "id": "openAIEmbeddings_2", "position": { - "x": 128.85404348918783, - "y": 155.96043384682295 + "x": 126.74109446437771, + "y": 542.6301053870723 }, "type": "customNode", "data": { "id": "openAIEmbeddings_2", "label": "OpenAI Embeddings", - "name": "openAIEmbeddings", "version": 1, + "name": "openAIEmbeddings", "type": "OpenAIEmbeddings", "baseClasses": ["OpenAIEmbeddings", "Embeddings"], "category": "Embeddings", @@ -729,36 +422,36 @@ }, "selected": false, "positionAbsolute": { - "x": 128.85404348918783, - "y": 155.96043384682295 + "x": 126.74109446437771, + "y": 542.6301053870723 }, "dragging": false }, { "width": 300, - "height": 523, - "id": "openAI_3", + "height": 574, + "id": "chatOpenAI_0", "position": { - "x": 504.808358369027, - "y": -257.78194663790197 + "x": 518.3288471761277, + "y": -1348.530642047776 }, "type": "customNode", "data": { - "id": "openAI_3", - "label": "OpenAI", - "name": "openAI", - "version": 3, - "type": "OpenAI", - "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], - "category": "LLMs", - "description": "Wrapper around OpenAI large language models", + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "version": 2, + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", "inputParams": [ { "label": "Connect Credential", "name": "credential", "type": "credential", "credentialNames": ["openAIApi"], - "id": "openAI_3-input-credential-credential" + "id": "chatOpenAI_0-input-credential-credential" }, { "label": "Model Name", @@ -766,85 +459,107 @@ "type": "options", "options": [ { - "label": "gpt-3.5-turbo-instruct", - "name": "gpt-3.5-turbo-instruct" + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, + { + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" + }, + { + "label": "gpt-4-0613", + "name": "gpt-4-0613" + }, + { + "label": "gpt-4-32k", + "name": "gpt-4-32k" }, { - "label": "babbage-002", - "name": "babbage-002" + "label": "gpt-4-32k-0613", + "name": "gpt-4-32k-0613" }, { - "label": "davinci-002", - "name": "davinci-002" + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, + { + "label": "gpt-3.5-turbo-0613", + "name": "gpt-3.5-turbo-0613" + }, + { + "label": "gpt-3.5-turbo-16k", + "name": "gpt-3.5-turbo-16k" + }, + { + "label": "gpt-3.5-turbo-16k-0613", + "name": "gpt-3.5-turbo-16k-0613" } ], - "default": "gpt-3.5-turbo-instruct", + "default": "gpt-3.5-turbo", "optional": true, - "id": "openAI_3-input-modelName-options" + "id": "chatOpenAI_0-input-modelName-options" }, { "label": "Temperature", "name": "temperature", "type": "number", - "default": 0.7, + "step": 0.1, + "default": 0.9, "optional": true, - "id": "openAI_3-input-temperature-number" + "id": "chatOpenAI_0-input-temperature-number" }, { "label": "Max Tokens", "name": "maxTokens", "type": "number", + "step": 1, "optional": true, "additionalParams": true, - "id": "openAI_3-input-maxTokens-number" + "id": "chatOpenAI_0-input-maxTokens-number" }, { "label": "Top Probability", "name": "topP", "type": "number", + "step": 0.1, "optional": true, "additionalParams": true, - "id": "openAI_3-input-topP-number" - }, - { - "label": "Best Of", - "name": "bestOf", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAI_3-input-bestOf-number" + "id": "chatOpenAI_0-input-topP-number" }, { "label": "Frequency Penalty", "name": "frequencyPenalty", "type": "number", + "step": 0.1, "optional": true, "additionalParams": true, - "id": "openAI_3-input-frequencyPenalty-number" + "id": "chatOpenAI_0-input-frequencyPenalty-number" }, { "label": "Presence Penalty", "name": "presencePenalty", "type": "number", + "step": 0.1, "optional": true, "additionalParams": true, - "id": "openAI_3-input-presencePenalty-number" - }, - { - "label": "Batch Size", - "name": "batchSize", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAI_3-input-batchSize-number" + "id": "chatOpenAI_0-input-presencePenalty-number" }, { "label": "Timeout", "name": "timeout", "type": "number", + "step": 1, "optional": true, "additionalParams": true, - "id": "openAI_3-input-timeout-number" + "id": "chatOpenAI_0-input-timeout-number" }, { "label": "BasePath", @@ -852,7 +567,15 @@ "type": "string", "optional": true, "additionalParams": true, - "id": "openAI_3-input-basepath-string" + "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" } ], "inputAnchors": [ @@ -861,27 +584,27 @@ "name": "cache", "type": "BaseCache", "optional": true, - "id": "openAI_3-input-cache-BaseCache" + "id": "chatOpenAI_0-input-cache-BaseCache" } ], "inputs": { - "modelName": "gpt-3.5-turbo-instruct", - "temperature": 0.7, + "cache": "", + "modelName": "gpt-3.5-turbo-16k", + "temperature": 0.9, "maxTokens": "", "topP": "", - "bestOf": "", "frequencyPenalty": "", "presencePenalty": "", - "batchSize": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { - "id": "openAI_3-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", - "name": "openAI", - "label": "OpenAI", - "type": "OpenAI | BaseLLM | BaseLanguageModel" + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" } ], "outputs": {}, @@ -889,59 +612,179 @@ }, "selected": false, "positionAbsolute": { - "x": 504.808358369027, - "y": -257.78194663790197 + "x": 518.3288471761277, + "y": -1348.530642047776 }, "dragging": false }, { "width": 300, - "height": 505, - "id": "pineconeExistingIndex_0", + "height": 652, + "id": "redis_0", "position": { - "x": 507.5206146177215, - "y": 343.07818128024616 + "x": 526.7806432753682, + "y": -759.0178641257562 }, "type": "customNode", "data": { - "id": "pineconeExistingIndex_0", - "label": "Pinecone Load Existing Index", - "name": "pineconeExistingIndex", + "id": "redis_0", + "label": "Redis", "version": 1, - "type": "Pinecone", - "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], + "name": "redis", + "type": "Redis", + "baseClasses": ["Redis", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Load existing index from Pinecone (i.e: Document has been upserted)", + "description": "Upsert or Load data to Redis", "inputParams": [ { "label": "Connect Credential", "name": "credential", "type": "credential", - "credentialNames": ["pineconeApi"], - "id": "pineconeExistingIndex_0-input-credential-credential" + "credentialNames": ["redisCacheUrlApi", "redisCacheApi"], + "id": "redis_0-input-credential-credential" }, { - "label": "Pinecone Index", - "name": "pineconeIndex", + "label": "Index Name", + "name": "indexName", + "placeholder": "", "type": "string", - "id": "pineconeExistingIndex_0-input-pineconeIndex-string" + "id": "redis_0-input-indexName-string" + }, + { + "label": "Replace Index on Upsert", + "name": "replaceIndex", + "description": "Selecting this option will delete the existing index and recreate a new one when upserting", + "default": false, + "type": "boolean", + "id": "redis_0-input-replaceIndex-boolean" }, { - "label": "Pinecone Namespace", - "name": "pineconeNamespace", + "label": "Content Field", + "name": "contentKey", + "description": "Name of the field (column) that contains the actual content", "type": "string", - "placeholder": "my-first-namespace", + "default": "content", "additionalParams": true, "optional": true, - "id": "pineconeExistingIndex_0-input-pineconeNamespace-string" + "id": "redis_0-input-contentKey-string" }, { - "label": "Pinecone Metadata Filter", - "name": "pineconeMetadataFilter", - "type": "json", + "label": "Metadata Field", + "name": "metadataKey", + "description": "Name of the field (column) that contains the metadata of the document", + "type": "string", + "default": "metadata", + "additionalParams": true, + "optional": true, + "id": "redis_0-input-metadataKey-string" + }, + { + "label": "Vector Field", + "name": "vectorKey", + "description": "Name of the field (column) that contains the vector", + "type": "string", + "default": "content_vector", + "additionalParams": true, "optional": true, + "id": "redis_0-input-vectorKey-string" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Default to 4", + "placeholder": "4", + "type": "number", "additionalParams": true, - "id": "pineconeExistingIndex_0-input-pineconeMetadataFilter-json" + "optional": true, + "id": "redis_0-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Document", + "name": "document", + "type": "Document", + "list": true, + "optional": true, + "id": "redis_0-input-document-Document" + }, + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "redis_0-input-embeddings-Embeddings" + } + ], + "inputs": { + "document": ["{{plainText_0.data.instance}}"], + "embeddings": "{{openAIEmbeddings_1.data.instance}}", + "indexName": "redis-1234", + "replaceIndex": true, + "contentKey": "content", + "metadataKey": "metadata", + "vectorKey": "content_vector", + "topK": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "redis_0-output-retriever-Redis|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Redis Retriever", + "type": "Redis | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "redis_0-output-vectorStore-Redis|VectorStore", + "name": "vectorStore", + "label": "Redis Vector Store", + "type": "Redis | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "retriever" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 526.7806432753682, + "y": -759.0178641257562 + }, + "dragging": false + }, + { + "width": 300, + "height": 458, + "id": "faiss_0", + "position": { + "x": 533.1194903497986, + "y": 508.751550760307 + }, + "type": "customNode", + "data": { + "id": "faiss_0", + "label": "Faiss", + "version": 1, + "name": "faiss", + "type": "Faiss", + "baseClasses": ["Faiss", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Upsert or Load data to Faiss Vector Store", + "inputParams": [ + { + "label": "Base Path to load", + "name": "basePath", + "description": "Path to load faiss.index file", + "placeholder": "C:\\Users\\User\\Desktop", + "type": "string", + "id": "faiss_0-input-basePath-string" }, { "label": "Top K", @@ -951,22 +794,29 @@ "type": "number", "additionalParams": true, "optional": true, - "id": "pineconeExistingIndex_0-input-topK-number" + "id": "faiss_0-input-topK-number" } ], "inputAnchors": [ + { + "label": "Document", + "name": "document", + "type": "Document", + "list": true, + "optional": true, + "id": "faiss_0-input-document-Document" + }, { "label": "Embeddings", "name": "embeddings", "type": "Embeddings", - "id": "pineconeExistingIndex_0-input-embeddings-Embeddings" + "id": "faiss_0-input-embeddings-Embeddings" } ], "inputs": { + "document": ["{{plainText_1.data.instance}}"], "embeddings": "{{openAIEmbeddings_2.data.instance}}", - "pineconeIndex": "", - "pineconeNamespace": "", - "pineconeMetadataFilter": "", + "basePath": "C:\\Users\\user\\yourpath", "topK": "" }, "outputAnchors": [ @@ -976,16 +826,16 @@ "type": "options", "options": [ { - "id": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "id": "faiss_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever", "name": "retriever", - "label": "Pinecone Retriever", - "type": "Pinecone | VectorStoreRetriever | BaseRetriever" + "label": "Faiss Retriever", + "type": "Faiss | VectorStoreRetriever | BaseRetriever" }, { - "id": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore", + "id": "faiss_0-output-vectorStore-Faiss|SaveableVectorStore|VectorStore", "name": "vectorStore", - "label": "Pinecone Vector Store", - "type": "Pinecone | VectorStore" + "label": "Faiss Vector Store", + "type": "Faiss | SaveableVectorStore | VectorStore" } ], "default": "retriever" @@ -998,36 +848,120 @@ }, "selected": false, "positionAbsolute": { - "x": 507.5206146177215, - "y": 343.07818128024616 + "x": 533.1194903497986, + "y": 508.751550760307 + }, + "dragging": false + }, + { + "width": 300, + "height": 485, + "id": "plainText_0", + "position": { + "x": 93.6260931892966, + "y": -1209.0760064103088 + }, + "type": "customNode", + "data": { + "id": "plainText_0", + "label": "Plain Text", + "version": 2, + "name": "plainText", + "type": "Document", + "baseClasses": ["Document"], + "category": "Document Loaders", + "description": "Load data from plain text", + "inputParams": [ + { + "label": "Text", + "name": "text", + "type": "string", + "rows": 4, + "placeholder": "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua...", + "id": "plainText_0-input-text-string" + }, + { + "label": "Metadata", + "name": "metadata", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "plainText_0-input-metadata-json" + } + ], + "inputAnchors": [ + { + "label": "Text Splitter", + "name": "textSplitter", + "type": "TextSplitter", + "optional": true, + "id": "plainText_0-input-textSplitter-TextSplitter" + } + ], + "inputs": { + "text": "AI-generated content refers to text, images, videos, or other media produced by artificial intelligence algorithms. It leverages deep learning and natural language processing to create human-like content autonomously. AI-generated content has diverse applications, from automated customer support chatbots and personalized marketing to creative writing and art generation. While it offers efficiency and scalability, it also raises concerns about ethics, authenticity, and potential misuse. Striking a balance between harnessing its potential for productivity and addressing its ethical implications is crucial as AI-generated content continues to evolve and reshape industries.", + "textSplitter": "", + "metadata": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "plainText_0-output-document-Document", + "name": "document", + "label": "Document", + "type": "Document" + }, + { + "id": "plainText_0-output-text-string|json", + "name": "text", + "label": "Text", + "type": "string | json" + } + ], + "default": "document" + } + ], + "outputs": { + "output": "document" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 93.6260931892966, + "y": -1209.0760064103088 }, "dragging": false }, { "width": 300, - "height": 523, - "id": "openAI_4", + "height": 574, + "id": "chatOpenAI_1", "position": { - "x": 1619.5346765785587, - "y": 352.29615581180684 + "x": 531.5715383965282, + "y": -87.77517816462955 }, "type": "customNode", "data": { - "id": "openAI_4", - "label": "OpenAI", - "name": "openAI", - "version": 3, - "type": "OpenAI", - "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], - "category": "LLMs", - "description": "Wrapper around OpenAI large language models", + "id": "chatOpenAI_1", + "label": "ChatOpenAI", + "version": 2, + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", "inputParams": [ { "label": "Connect Credential", "name": "credential", "type": "credential", "credentialNames": ["openAIApi"], - "id": "openAI_4-input-credential-credential" + "id": "chatOpenAI_1-input-credential-credential" }, { "label": "Model Name", @@ -1035,85 +969,372 @@ "type": "options", "options": [ { - "label": "gpt-3.5-turbo-instruct", - "name": "gpt-3.5-turbo-instruct" + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, + { + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" + }, + { + "label": "gpt-4-0613", + "name": "gpt-4-0613" + }, + { + "label": "gpt-4-32k", + "name": "gpt-4-32k" + }, + { + "label": "gpt-4-32k-0613", + "name": "gpt-4-32k-0613" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, + { + "label": "gpt-3.5-turbo-0613", + "name": "gpt-3.5-turbo-0613" }, { - "label": "babbage-002", - "name": "babbage-002" + "label": "gpt-3.5-turbo-16k", + "name": "gpt-3.5-turbo-16k" }, { - "label": "davinci-002", - "name": "davinci-002" + "label": "gpt-3.5-turbo-16k-0613", + "name": "gpt-3.5-turbo-16k-0613" } ], - "default": "gpt-3.5-turbo-instruct", + "default": "gpt-3.5-turbo", "optional": true, - "id": "openAI_4-input-modelName-options" + "id": "chatOpenAI_1-input-modelName-options" }, { "label": "Temperature", "name": "temperature", "type": "number", - "default": 0.7, + "step": 0.1, + "default": 0.9, "optional": true, - "id": "openAI_4-input-temperature-number" + "id": "chatOpenAI_1-input-temperature-number" }, { "label": "Max Tokens", "name": "maxTokens", "type": "number", + "step": 1, "optional": true, "additionalParams": true, - "id": "openAI_4-input-maxTokens-number" + "id": "chatOpenAI_1-input-maxTokens-number" }, { "label": "Top Probability", "name": "topP", "type": "number", + "step": 0.1, "optional": true, "additionalParams": true, - "id": "openAI_4-input-topP-number" + "id": "chatOpenAI_1-input-topP-number" }, { - "label": "Best Of", - "name": "bestOf", + "label": "Frequency Penalty", + "name": "frequencyPenalty", "type": "number", + "step": 0.1, "optional": true, "additionalParams": true, - "id": "openAI_4-input-bestOf-number" + "id": "chatOpenAI_1-input-frequencyPenalty-number" }, { - "label": "Frequency Penalty", - "name": "frequencyPenalty", + "label": "Presence Penalty", + "name": "presencePenalty", "type": "number", + "step": 0.1, "optional": true, "additionalParams": true, - "id": "openAI_4-input-frequencyPenalty-number" + "id": "chatOpenAI_1-input-presencePenalty-number" }, { - "label": "Presence Penalty", - "name": "presencePenalty", + "label": "Timeout", + "name": "timeout", "type": "number", + "step": 1, "optional": true, "additionalParams": true, - "id": "openAI_4-input-presencePenalty-number" + "id": "chatOpenAI_1-input-timeout-number" }, { - "label": "Batch Size", - "name": "batchSize", + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-baseOptions-json" + } + ], + "inputAnchors": [ + { + "label": "Cache", + "name": "cache", + "type": "BaseCache", + "optional": true, + "id": "chatOpenAI_1-input-cache-BaseCache" + } + ], + "inputs": { + "cache": "", + "modelName": "gpt-3.5-turbo-16k", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "", + "basepath": "", + "baseOptions": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 531.5715383965282, + "y": -87.77517816462955 + }, + "dragging": false + }, + { + "width": 300, + "height": 383, + "id": "conversationalAgent_0", + "position": { + "x": 2506.011817109287, + "y": -241.58006840004734 + }, + "type": "customNode", + "data": { + "id": "conversationalAgent_0", + "label": "Conversational Agent", + "version": 1, + "name": "conversationalAgent", + "type": "AgentExecutor", + "baseClasses": ["AgentExecutor", "BaseChain", "Runnable"], + "category": "Agents", + "description": "Conversational agent for a chat model. It will utilize chat specific prompts", + "inputParams": [ + { + "label": "System Message", + "name": "systemMessage", + "type": "string", + "rows": 4, + "default": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist.", + "optional": true, + "additionalParams": true, + "id": "conversationalAgent_0-input-systemMessage-string" + } + ], + "inputAnchors": [ + { + "label": "Allowed Tools", + "name": "tools", + "type": "Tool", + "list": true, + "id": "conversationalAgent_0-input-tools-Tool" + }, + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "conversationalAgent_0-input-model-BaseLanguageModel" + }, + { + "label": "Memory", + "name": "memory", + "type": "BaseChatMemory", + "id": "conversationalAgent_0-input-memory-BaseChatMemory" + } + ], + "inputs": { + "tools": ["{{chainTool_2.data.instance}}", "{{chainTool_3.data.instance}}"], + "model": "{{chatOpenAI_2.data.instance}}", + "memory": "{{bufferMemory_0.data.instance}}", + "systemMessage": "Assistant is a large language model trained by OpenAI.\n\nAssistant is designed to be able to assist with a wide range of tasks, from answering simple questions to providing in-depth explanations and discussions on a wide range of topics. As a language model, Assistant is able to generate human-like text based on the input it receives, allowing it to engage in natural-sounding conversations and provide responses that are coherent and relevant to the topic at hand.\n\nAssistant is constantly learning and improving, and its capabilities are constantly evolving. It is able to process and understand large amounts of text, and can use this knowledge to provide accurate and informative responses to a wide range of questions. Additionally, Assistant is able to generate its own text based on the input it receives, allowing it to engage in discussions and provide explanations and descriptions on a wide range of topics.\n\nOverall, Assistant is a powerful system that can help with a wide range of tasks and provide valuable insights and information on a wide range of topics. Whether you need help with a specific question or just want to have a conversation about a particular topic, Assistant is here to assist." + }, + "outputAnchors": [ + { + "id": "conversationalAgent_0-output-conversationalAgent-AgentExecutor|BaseChain|Runnable", + "name": "conversationalAgent", + "label": "AgentExecutor", + "type": "AgentExecutor | BaseChain | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 2506.011817109287, + "y": -241.58006840004734 + }, + "dragging": false + }, + { + "width": 300, + "height": 574, + "id": "chatOpenAI_2", + "position": { + "x": 1628.7151156632485, + "y": 281.9500435520215 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_2", + "label": "ChatOpenAI", + "version": 2, + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["openAIApi"], + "id": "chatOpenAI_2-input-credential-credential" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, + { + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" + }, + { + "label": "gpt-4-0613", + "name": "gpt-4-0613" + }, + { + "label": "gpt-4-32k", + "name": "gpt-4-32k" + }, + { + "label": "gpt-4-32k-0613", + "name": "gpt-4-32k-0613" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, + { + "label": "gpt-3.5-turbo-0613", + "name": "gpt-3.5-turbo-0613" + }, + { + "label": "gpt-3.5-turbo-16k", + "name": "gpt-3.5-turbo-16k" + }, + { + "label": "gpt-3.5-turbo-16k-0613", + "name": "gpt-3.5-turbo-16k-0613" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_2-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "step": 0.1, + "default": 0.9, + "optional": true, + "id": "chatOpenAI_2-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", "type": "number", + "step": 0.1, "optional": true, "additionalParams": true, - "id": "openAI_4-input-batchSize-number" + "id": "chatOpenAI_2-input-presencePenalty-number" }, { "label": "Timeout", "name": "timeout", "type": "number", + "step": 1, "optional": true, "additionalParams": true, - "id": "openAI_4-input-timeout-number" + "id": "chatOpenAI_2-input-timeout-number" }, { "label": "BasePath", @@ -1121,7 +1342,15 @@ "type": "string", "optional": true, "additionalParams": true, - "id": "openAI_4-input-basepath-string" + "id": "chatOpenAI_2-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_2-input-baseOptions-json" } ], "inputAnchors": [ @@ -1130,27 +1359,27 @@ "name": "cache", "type": "BaseCache", "optional": true, - "id": "openAI_4-input-cache-BaseCache" + "id": "chatOpenAI_2-input-cache-BaseCache" } ], "inputs": { - "modelName": "gpt-3.5-turbo-instruct", - "temperature": 0.7, + "cache": "", + "modelName": "gpt-3.5-turbo-16k", + "temperature": 0.9, "maxTokens": "", "topP": "", - "bestOf": "", "frequencyPenalty": "", "presencePenalty": "", - "batchSize": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { - "id": "openAI_4-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", - "name": "openAI", - "label": "OpenAI", - "type": "OpenAI | BaseLLM | BaseLanguageModel" + "id": "chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" } ], "outputs": {}, @@ -1158,35 +1387,224 @@ }, "selected": false, "positionAbsolute": { - "x": 1619.5346765785587, - "y": 352.29615581180684 + "x": 1628.7151156632485, + "y": 281.9500435520215 }, "dragging": false - } - ], - "edges": [ + }, { - "source": "chainTool_2", - "sourceHandle": "chainTool_2-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain", - "target": "mrklAgentLLM_0", - "targetHandle": "mrklAgentLLM_0-input-tools-Tool", - "type": "buttonedge", - "id": "chainTool_2-chainTool_2-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain-mrklAgentLLM_0-mrklAgentLLM_0-input-tools-Tool", + "width": 300, + "height": 376, + "id": "bufferMemory_0", + "position": { + "x": 1996.4899941465392, + "y": 466.6000826492595 + }, + "type": "customNode", "data": { - "label": "" - } + "id": "bufferMemory_0", + "label": "Buffer Memory", + "version": 1, + "name": "bufferMemory", + "type": "BufferMemory", + "baseClasses": ["BufferMemory", "BaseChatMemory", "BaseMemory"], + "category": "Memory", + "description": "Remembers previous conversational back and forths directly", + "inputParams": [ + { + "label": "Memory Key", + "name": "memoryKey", + "type": "string", + "default": "chat_history", + "id": "bufferMemory_0-input-memoryKey-string" + }, + { + "label": "Input Key", + "name": "inputKey", + "type": "string", + "default": "input", + "id": "bufferMemory_0-input-inputKey-string" + } + ], + "inputAnchors": [], + "inputs": { + "memoryKey": "chat_history", + "inputKey": "input" + }, + "outputAnchors": [ + { + "id": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", + "name": "bufferMemory", + "label": "BufferMemory", + "type": "BufferMemory | BaseChatMemory | BaseMemory" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1996.4899941465392, + "y": 466.6000826492595 + }, + "dragging": false }, { - "source": "chainTool_3", - "sourceHandle": "chainTool_3-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain", - "target": "mrklAgentLLM_0", - "targetHandle": "mrklAgentLLM_0-input-tools-Tool", - "type": "buttonedge", - "id": "chainTool_3-chainTool_3-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain-mrklAgentLLM_0-mrklAgentLLM_0-input-tools-Tool", + "width": 300, + "height": 485, + "id": "plainText_1", + "position": { + "x": 117.23894449422778, + "y": 23.24339894687961 + }, + "type": "customNode", "data": { - "label": "" - } + "id": "plainText_1", + "label": "Plain Text", + "version": 2, + "name": "plainText", + "type": "Document", + "baseClasses": ["Document"], + "category": "Document Loaders", + "description": "Load data from plain text", + "inputParams": [ + { + "label": "Text", + "name": "text", + "type": "string", + "rows": 4, + "placeholder": "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua...", + "id": "plainText_1-input-text-string" + }, + { + "label": "Metadata", + "name": "metadata", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "plainText_1-input-metadata-json" + } + ], + "inputAnchors": [ + { + "label": "Text Splitter", + "name": "textSplitter", + "type": "TextSplitter", + "optional": true, + "id": "plainText_1-input-textSplitter-TextSplitter" + } + ], + "inputs": { + "text": "Madam Speaker, Madam Vice President, our First Lady and Second Gentleman. Members of Congress and the Cabinet. Justices of the Supreme Court. My fellow Americans. \n\nLast year COVID-19 kept us apart. This year we are finally together again. \n\nTonight, we meet as Democrats Republicans and Independents. But most importantly as Americans. \n\nWith a duty to one another to the American people to the Constitution. \n\nAnd with an unwavering resolve that freedom will always triumph over tyranny. \n\nSix days ago, Russia’s Vladimir Putin sought to shake the foundations of the free world thinking he could make it bend to his menacing ways. But he badly miscalculated. \n\nHe thought he could roll into Ukraine and the world would roll over. Instead he met a wall of strength he never imagined. \n\nHe met the Ukrainian people. \n\nFrom President Zelenskyy to every Ukrainian, their fearlessness, their courage, their determination, inspires the world. \n\nGroups of citizens blocking tanks with their bodies. Everyone from students to retirees teachers turned soldiers defending their homeland. \n\nIn this struggle as President Zelenskyy said in his speech to the European Parliament “Light will win over darkness.” The Ukrainian Ambassador to the United States is here tonight. \n\nLet each of us here tonight in this Chamber send an unmistakable signal to Ukraine and to the world. \n\nPlease rise if you are able and show that, Yes, we the United States of America stand with the Ukrainian people. \n\nThroughout our history we’ve learned this lesson when dictators do not pay a price for their aggression they cause more chaos. \n\nThey keep moving. \n\nAnd the costs and the threats to America and the world keep rising. \n\nThat’s why the NATO Alliance was created to secure peace and stability in Europe after World War 2. \n\nThe United States is a member along with 29 other nations. \n\nIt matters. American diplomacy matters. American resolve matters. \n\nPutin’s latest attack on Ukraine was premeditated and unprovoked. \n\nHe rejected repeated efforts at diplomacy. \n\nHe thought the West and NATO wouldn’t respond. And he thought he could divide us at home. Putin was wrong. We were ready. Here is what we did. \n\nWe prepared extensively and carefully. \n\nWe spent months building a coalition of other freedom-loving nations from Europe and the Americas to Asia and Africa to confront Putin. \n\nI spent countless hours unifying our European allies. We shared with the world in advance what we knew Putin was planning and precisely how he would try to falsely justify his aggression. \n\nWe countered Russia’s lies with truth. \n\nAnd now that he has acted the free world is holding him accountable. \n\nAlong with twenty-seven members of the European Union including France, Germany, Italy, as well as countries like the United Kingdom, Canada, Japan, Korea, Australia, New Zealand, and many others, even Switzerland. \n\nWe are inflicting pain on Russia and supporting the people of Ukraine. Putin is now isolated from the world more than ever. \n\nTogether with our allies –we are right now enforcing powerful economic sanctions. \n\nWe are cutting off Russia’s largest banks from the international financial system. \n\nPreventing Russia’s central bank from defending the Russian Ruble making Putin’s $630 Billion “war fund” worthless. \n\nWe are choking off Russia’s access to technology that will sap its economic strength and weaken its military for years to come. \n\nTonight I say to the Russian oligarchs and corrupt leaders who have bilked billions of dollars off this violent regime no more. \n\nThe U.S. Department of Justice is assembling a dedicated task force to go after the crimes of Russian oligarchs. \n\nWe are joining with our European allies to find and seize your yachts your luxury apartments your private jets. We are coming for your ill-begotten gains. \n\nAnd tonight I am announcing that we will join our allies in closing off American air space to all Russian flights – further isolating Russia – and adding an additional squeeze –on their economy. The Ruble has lost 30% of its value. \n\nThe Russian stock market has lost 40% of its value and trading remains suspended. Russia’s economy is reeling and Putin alone is to blame. \n\nTogether with our allies we are providing support to the Ukrainians in their fight for freedom. Military assistance. Economic assistance. Humanitarian assistance. \n\nWe are giving more than $1 Billion in direct assistance to Ukraine. \n\nAnd we will continue to aid the Ukrainian people as they defend their country and to help ease their suffering. \n\nLet me be clear, our forces are not engaged and will not engage in conflict with Russian forces in Ukraine. \n\nOur forces are not going to Europe to fight in Ukraine, but to defend our NATO Allies – in the event that Putin decides to keep moving west. \n\nFor that purpose we’ve mobilized American ground forces, air squadrons, and ship deployments to protect NATO countries including Poland, Romania, Latvia, Lithuania, and Estonia. \n\nAs I have made crystal clear the United States and our Allies will defend every inch of territory of NATO countries with the full force of our collective power. \n\nAnd we remain clear-eyed. The Ukrainians are fighting back with pure courage. But the next few days weeks, months, will be hard on them. \n\nPutin has unleashed violence and chaos. But while he may make gains on the battlefield – he will pay a continuing high price over the long run. \n\nAnd a proud Ukrainian people, who have known 30 years of independence, have repeatedly shown that they will not tolerate anyone who tries to take their country backwards. \n\nTo all Americans, I will be honest with you, as I’ve always promised. A Russian dictator, invading a foreign country, has costs around the world. \n\nAnd I’m taking robust action to make sure the pain of our sanctions is targeted at Russia’s economy. And I will use every tool at our disposal to protect American businesses and consumers. \n\nTonight, I can announce that the United States has worked with 30 other countries to release 60 Million barrels of oil from reserves around the world. \n\nAmerica will lead that effort, releasing 30 Million barrels from our own Strategic Petroleum Reserve. And we stand ready to do more if necessary, unified with our allies. \n\nThese steps will help blunt gas prices here at home. And I know the news about what’s happening can seem alarming. \n\nBut I want you to know that we are going to be okay. \n\nWhen the history of this era is written Putin’s war on Ukraine will have left Russia weaker and the rest of the world stronger. \n\nWhile it shouldn’t have taken something so terrible for people around the world to see what’s at stake now everyone sees it clearly. \n\nWe see the unity among leaders of nations and a more unified Europe a more unified West. And we see unity among the people who are gathering in cities in large crowds around the world even in Russia to demonstrate their support for Ukraine. \n\nIn the battle between democracy and autocracy, democracies are rising to the moment, and the world is clearly choosing the side of peace and security. \n\nThis is a real test. It’s going to take time. So let us continue to draw inspiration from the iron will of the Ukrainian people. \n\nTo our fellow Ukrainian Americans who forge a deep bond that connects our two nations we stand with you. \n\nPutin may circle Kyiv with tanks, but he will never gain the hearts and souls of the Ukrainian people. \n\nHe will never extinguish their love of freedom. He will never weaken the resolve of the free world. \n\nWe meet tonight in an America that has lived through two of the hardest years this nation has ever faced. \n\nThe pandemic has been punishing. \n\nAnd so many families are living paycheck to paycheck, struggling to keep up with the rising cost of food, gas, housing, and so much more. \n\nI understand. \n\nI remember when my Dad had to leave our home in Scranton, Pennsylvania to find work. I grew up in a family where if the price of food went up, you felt it. \n\nThat’s why one of the first things I did as President was fight to pass the American Rescue Plan. \n\nBecause people were hurting. We needed to act, and we did. \n\nFew pieces of legislation have done more in a critical moment in our history to lift us out of crisis. \n\nIt fueled our efforts to vaccinate the nation and combat COVID-19. It delivered immediate economic relief for tens of millions of Americans. \n\nHelped put food on their table, keep a roof over their heads, and cut the cost of health insurance. \n\nAnd as my Dad used to say, it gave people a little breathing room. \n\nAnd unlike the $2 Trillion tax cut passed in the previous administration that benefitted the top 1% of Americans, the American Rescue Plan helped working people—and left no one behind. \n\nAnd it worked. It created jobs. Lots of jobs. \n\nIn fact—our economy created over 6.5 Million new jobs just last year, more jobs created in one year \nthan ever before in the history of America. \n\nOur economy grew at a rate of 5.7% last year, the strongest growth in nearly 40 years, the first step in bringing fundamental change to an economy that hasn’t worked for the working people of this nation for too long. \n\nFor the past 40 years we were told that if we gave tax breaks to those at the very top, the benefits would trickle down to everyone else. \n\nBut that trickle-down theory led to weaker economic growth, lower wages, bigger deficits, and the widest gap between those at the top and everyone else in nearly a century. \n\nVice President Harris and I ran for office with a new economic vision for America. \n\nInvest in America. Educate Americans. Grow the workforce. Build the economy from the bottom up \nand the middle out, not from the top down. \n\nBecause we know that when the middle class grows, the poor have a ladder up and the wealthy do very well. \n\nAmerica used to have the best roads, bridges, and airports on Earth. \n\nNow our infrastructure is ranked 13th in the world. \n\nWe won’t be able to compete for the jobs of the 21st Century if we don’t fix that. \n\nThat’s why it was so important to pass the Bipartisan Infrastructure Law—the most sweeping investment to rebuild America in history. \n\nThis was a bipartisan effort, and I want to thank the members of both parties who worked to make it happen. \n\nWe’re done talking about infrastructure weeks. \n\nWe’re going to have an infrastructure decade. \n\nIt is going to transform America and put us on a path to win the economic competition of the 21st Century that we face with the rest of the world—particularly with China. \n\nAs I’ve told Xi Jinping, it is never a good bet to bet against the American people. \n\nWe’ll create good jobs for millions of Americans, modernizing roads, airports, ports, and waterways all across America. \n\nAnd we’ll do it all to withstand the devastating effects of the climate crisis and promote environmental justice. \n\nWe’ll build a national network of 500,000 electric vehicle charging stations, begin to replace poisonous lead pipes—so every child—and every American—has clean water to drink at home and at school, provide affordable high-speed internet for every American—urban, suburban, rural, and tribal communities. \n\n4,000 projects have already been announced. \n\nAnd tonight, I’m announcing that this year we will start fixing over 65,000 miles of highway and 1,500 bridges in disrepair. \n\nWhen we use taxpayer dollars to rebuild America – we are going to Buy American: buy American products to support American jobs. \n\nThe federal government spends about $600 Billion a year to keep the country safe and secure. \n\nThere’s been a law on the books for almost a century \nto make sure taxpayers’ dollars support American jobs and businesses. \n\nEvery Administration says they’ll do it, but we are actually doing it. \n\nWe will buy American to make sure everything from the deck of an aircraft carrier to the steel on highway guardrails are made in America. \n\nBut to compete for the best jobs of the future, we also need to level the playing field with China and other competitors. \n\nThat’s why it is so important to pass the Bipartisan Innovation Act sitting in Congress that will make record investments in emerging technologies and American manufacturing. \n\nLet me give you one example of why it’s so important to pass it. \n\nIf you travel 20 miles east of Columbus, Ohio, you’ll find 1,000 empty acres of land. \n\nIt won’t look like much, but if you stop and look closely, you’ll see a “Field of dreams,” the ground on which America’s future will be built. \n\nThis is where Intel, the American company that helped build Silicon Valley, is going to build its $20 billion semiconductor “mega site”. \n\nUp to eight state-of-the-art factories in one place. 10,000 new good-paying jobs. \n\nSome of the most sophisticated manufacturing in the world to make computer chips the size of a fingertip that power the world and our everyday lives. \n\nSmartphones. The Internet. Technology we have yet to invent. \n\nBut that’s just the beginning. \n\nIntel’s CEO, Pat Gelsinger, who is here tonight, told me they are ready to increase their investment from \n$20 billion to $100 billion. \n\nThat would be one of the biggest investments in manufacturing in American history. \n\nAnd all they’re waiting for is for you to pass this bill. \n\nSo let’s not wait any longer. Send it to my desk. I’ll sign it. \n\nAnd we will really take off. \n\nAnd Intel is not alone. \n\nThere’s something happening in America. \n\nJust look around and you’ll see an amazing story. \n\nThe rebirth of the pride that comes from stamping products “Made In America.” The revitalization of American manufacturing. \n\nCompanies are choosing to build new factories here, when just a few years ago, they would have built them overseas. \n\nThat’s what is happening. Ford is investing $11 billion to build electric vehicles, creating 11,000 jobs across the country. \n\nGM is making the largest investment in its history—$7 billion to build electric vehicles, creating 4,000 jobs in Michigan. \n\nAll told, we created 369,000 new manufacturing jobs in America just last year. \n\nPowered by people I’ve met like JoJo Burgess, from generations of union steelworkers from Pittsburgh, who’s here with us tonight. \n\nAs Ohio Senator Sherrod Brown says, “It’s time to bury the label “Rust Belt.” \n\nIt’s time. \n\nBut with all the bright spots in our economy, record job growth and higher wages, too many families are struggling to keep up with the bills. \n\nInflation is robbing them of the gains they might otherwise feel. \n\nI get it. That’s why my top priority is getting prices under control. \n\nLook, our economy roared back faster than most predicted, but the pandemic meant that businesses had a hard time hiring enough workers to keep up production in their factories. \n\nThe pandemic also disrupted global supply chains. \n\nWhen factories close, it takes longer to make goods and get them from the warehouse to the store, and prices go up. \n\nLook at cars. \n\nLast year, there weren’t enough semiconductors to make all the cars that people wanted to buy. \n\nAnd guess what, prices of automobiles went up. \n\nSo—we have a choice. \n\nOne way to fight inflation is to drive down wages and make Americans poorer. \n\nI have a better plan to fight inflation. \n\nLower your costs, not your wages. \n\nMake more cars and semiconductors in America. \n\nMore infrastructure and innovation in America. \n\nMore goods moving faster and cheaper in America. \n\nMore jobs where you can earn a good living in America. \n\nAnd instead of relying on foreign supply chains, let’s make it in America. \n\nEconomists call it “increasing the productive capacity of our economy.” \n\nI call it building a better America. \n\nMy plan to fight inflation will lower your costs and lower the deficit. \n\n17 Nobel laureates in economics say my plan will ease long-term inflationary pressures. Top business leaders and most Americans support my plan. And here’s the plan: \n\nFirst – cut the cost of prescription drugs. Just look at insulin. One in ten Americans has diabetes. In Virginia, I met a 13-year-old boy named Joshua Davis. \n\nHe and his Dad both have Type 1 diabetes, which means they need insulin every day. Insulin costs about $10 a vial to make. \n\nBut drug companies charge families like Joshua and his Dad up to 30 times more. I spoke with Joshua’s mom. \n\nImagine what it’s like to look at your child who needs insulin and have no idea how you’re going to pay for it. \n\nWhat it does to your dignity, your ability to look your child in the eye, to be the parent you expect to be. \n\nJoshua is here with us tonight. Yesterday was his birthday. Happy birthday, buddy. \n\nFor Joshua, and for the 200,000 other young people with Type 1 diabetes, let’s cap the cost of insulin at $35 a month so everyone can afford it. \n\nDrug companies will still do very well. And while we’re at it let Medicare negotiate lower prices for prescription drugs, like the VA already does. \n\nLook, the American Rescue Plan is helping millions of families on Affordable Care Act plans save $2,400 a year on their health care premiums. Let’s close the coverage gap and make those savings permanent. \n\nSecond – cut energy costs for families an average of $500 a year by combatting climate change. \n\nLet’s provide investments and tax credits to weatherize your homes and businesses to be energy efficient and you get a tax credit; double America’s clean energy production in solar, wind, and so much more; lower the price of electric vehicles, saving you another $80 a month because you’ll never have to pay at the gas pump again. \n\nThird – cut the cost of child care. Many families pay up to $14,000 a year for child care per child. \n\nMiddle-class and working families shouldn’t have to pay more than 7% of their income for care of young children. \n\nMy plan will cut the cost in half for most families and help parents, including millions of women, who left the workforce during the pandemic because they couldn’t afford child care, to be able to get back to work. \n\nMy plan doesn’t stop there. It also includes home and long-term care. More affordable housing. And Pre-K for every 3- and 4-year-old. \n\nAll of these will lower costs. \n\nAnd under my plan, nobody earning less than $400,000 a year will pay an additional penny in new taxes. Nobody. \n\nThe one thing all Americans agree on is that the tax system is not fair. We have to fix it. \n\nI’m not looking to punish anyone. But let’s make sure corporations and the wealthiest Americans start paying their fair share. \n\nJust last year, 55 Fortune 500 corporations earned $40 billion in profits and paid zero dollars in federal income tax. \n\nThat’s simply not fair. That’s why I’ve proposed a 15% minimum tax rate for corporations. \n\nWe got more than 130 countries to agree on a global minimum tax rate so companies can’t get out of paying their taxes at home by shipping jobs and factories overseas. \n\nThat’s why I’ve proposed closing loopholes so the very wealthy don’t pay a lower tax rate than a teacher or a firefighter. \n\nSo that’s my plan. It will grow the economy and lower costs for families. \n\nSo what are we waiting for? Let’s get this done. And while you’re at it, confirm my nominees to the Federal Reserve, which plays a critical role in fighting inflation. \n\nMy plan will not only lower costs to give families a fair shot, it will lower the deficit. \n\nThe previous Administration not only ballooned the deficit with tax cuts for the very wealthy and corporations, it undermined the watchdogs whose job was to keep pandemic relief funds from being wasted. \n\nBut in my administration, the watchdogs have been welcomed back. \n\nWe’re going after the criminals who stole billions in relief money meant for small businesses and millions of Americans. \n\nAnd tonight, I’m announcing that the Justice Department will name a chief prosecutor for pandemic fraud. \n\nBy the end of this year, the deficit will be down to less than half what it was before I took office. \n\nThe only president ever to cut the deficit by more than one trillion dollars in a single year. \n\nLowering your costs also means demanding more competition. \n\nI’m a capitalist, but capitalism without competition isn’t capitalism. \n\nIt’s exploitation—and it drives up prices. \n\nWhen corporations don’t have to compete, their profits go up, your prices go up, and small businesses and family farmers and ranchers go under. \n\nWe see it happening with ocean carriers moving goods in and out of America. \n\nDuring the pandemic, these foreign-owned companies raised prices by as much as 1,000% and made record profits. \n\nTonight, I’m announcing a crackdown on these companies overcharging American businesses and consumers. \n\nAnd as Wall Street firms take over more nursing homes, quality in those homes has gone down and costs have gone up. \n\nThat ends on my watch. \n\nMedicare is going to set higher standards for nursing homes and make sure your loved ones get the care they deserve and expect. \n\nWe’ll also cut costs and keep the economy going strong by giving workers a fair shot, provide more training and apprenticeships, hire them based on their skills not degrees. \n\nLet’s pass the Paycheck Fairness Act and paid leave. \n\nRaise the minimum wage to $15 an hour and extend the Child Tax Credit, so no one has to raise a family in poverty. \n\nLet’s increase Pell Grants and increase our historic support of HBCUs, and invest in what Jill—our First Lady who teaches full-time—calls America’s best-kept secret: community colleges. \n\nAnd let’s pass the PRO Act when a majority of workers want to form a union—they shouldn’t be stopped. \n\nWhen we invest in our workers, when we build the economy from the bottom up and the middle out together, we can do something we haven’t done in a long time: build a better America. \n\nFor more than two years, COVID-19 has impacted every decision in our lives and the life of the nation. \n\nAnd I know you’re tired, frustrated, and exhausted. \n\nBut I also know this. \n\nBecause of the progress we’ve made, because of your resilience and the tools we have, tonight I can say \nwe are moving forward safely, back to more normal routines. \n\nWe’ve reached a new moment in the fight against COVID-19, with severe cases down to a level not seen since last July. \n\nJust a few days ago, the Centers for Disease Control and Prevention—the CDC—issued new mask guidelines. \n\nUnder these new guidelines, most Americans in most of the country can now be mask free. \n\nAnd based on the projections, more of the country will reach that point across the next couple of weeks. \n\nThanks to the progress we have made this past year, COVID-19 need no longer control our lives. \n\nI know some are talking about “living with COVID-19”. Tonight – I say that we will never just accept living with COVID-19. \n\nWe will continue to combat the virus as we do other diseases. And because this is a virus that mutates and spreads, we will stay on guard. \n\nHere are four common sense steps as we move forward safely. \n\nFirst, stay protected with vaccines and treatments. We know how incredibly effective vaccines are. If you’re vaccinated and boosted you have the highest degree of protection. \n\nWe will never give up on vaccinating more Americans. Now, I know parents with kids under 5 are eager to see a vaccine authorized for their children. \n\nThe scientists are working hard to get that done and we’ll be ready with plenty of vaccines when they do. \n\nWe’re also ready with anti-viral treatments. If you get COVID-19, the Pfizer pill reduces your chances of ending up in the hospital by 90%. \n\nWe’ve ordered more of these pills than anyone in the world. And Pfizer is working overtime to get us 1 Million pills this month and more than double that next month. \n\nAnd we’re launching the “Test to Treat” initiative so people can get tested at a pharmacy, and if they’re positive, receive antiviral pills on the spot at no cost. \n\nIf you’re immunocompromised or have some other vulnerability, we have treatments and free high-quality masks. \n\nWe’re leaving no one behind or ignoring anyone’s needs as we move forward. \n\nAnd on testing, we have made hundreds of millions of tests available for you to order for free. \n\nEven if you already ordered free tests tonight, I am announcing that you can order more from covidtests.gov starting next week. \n\nSecond – we must prepare for new variants. Over the past year, we’ve gotten much better at detecting new variants. \n\nIf necessary, we’ll be able to deploy new vaccines within 100 days instead of many more months or years. \n\nAnd, if Congress provides the funds we need, we’ll have new stockpiles of tests, masks, and pills ready if needed. \n\nI cannot promise a new variant won’t come. But I can promise you we’ll do everything within our power to be ready if it does. \n\nThird – we can end the shutdown of schools and businesses. We have the tools we need. \n\nIt’s time for Americans to get back to work and fill our great downtowns again. People working from home can feel safe to begin to return to the office. \n\nWe’re doing that here in the federal government. The vast majority of federal workers will once again work in person. \n\nOur schools are open. Let’s keep it that way. Our kids need to be in school. \n\nAnd with 75% of adult Americans fully vaccinated and hospitalizations down by 77%, most Americans can remove their masks, return to work, stay in the classroom, and move forward safely. \n\nWe achieved this because we provided free vaccines, treatments, tests, and masks. \n\nOf course, continuing this costs money. \n\nI will soon send Congress a request. \n\nThe vast majority of Americans have used these tools and may want to again, so I expect Congress to pass it quickly. \n\nFourth, we will continue vaccinating the world. \n\nWe’ve sent 475 Million vaccine doses to 112 countries, more than any other nation. \n\nAnd we won’t stop. \n\nWe have lost so much to COVID-19. Time with one another. And worst of all, so much loss of life. \n\nLet’s use this moment to reset. Let’s stop looking at COVID-19 as a partisan dividing line and see it for what it is: A God-awful disease. \n\nLet’s stop seeing each other as enemies, and start seeing each other for who we really are: Fellow Americans. \n\nWe can’t change how divided we’ve been. But we can change how we move forward—on COVID-19 and other issues we must face together. \n\nI recently visited the New York City Police Department days after the funerals of Officer Wilbert Mora and his partner, Officer Jason Rivera. \n\nThey were responding to a 9-1-1 call when a man shot and killed them with a stolen gun. \n\nOfficer Mora was 27 years old. \n\nOfficer Rivera was 22. \n\nBoth Dominican Americans who’d grown up on the same streets they later chose to patrol as police officers. \n\nI spoke with their families and told them that we are forever in debt for their sacrifice, and we will carry on their mission to restore the trust and safety every community deserves. \n\nI’ve worked on these issues a long time. \n\nI know what works: Investing in crime preventionand community police officers who’ll walk the beat, who’ll know the neighborhood, and who can restore trust and safety. \n\nSo let’s not abandon our streets. Or choose between safety and equal justice. \n\nLet’s come together to protect our communities, restore trust, and hold law enforcement accountable. \n\nThat’s why the Justice Department required body cameras, banned chokeholds, and restricted no-knock warrants for its officers. \n\nThat’s why the American Rescue Plan provided $350 Billion that cities, states, and counties can use to hire more police and invest in proven strategies like community violence interruption—trusted messengers breaking the cycle of violence and trauma and giving young people hope. \n\nWe should all agree: The answer is not to Defund the police. The answer is to FUND the police with the resources and training they need to protect our communities. \n\nI ask Democrats and Republicans alike: Pass my budget and keep our neighborhoods safe. \n\nAnd I will keep doing everything in my power to crack down on gun trafficking and ghost guns you can buy online and make at home—they have no serial numbers and can’t be traced. \n\nAnd I ask Congress to pass proven measures to reduce gun violence. Pass universal background checks. Why should anyone on a terrorist list be able to purchase a weapon? \n\nBan assault weapons and high-capacity magazines. \n\nRepeal the liability shield that makes gun manufacturers the only industry in America that can’t be sued. \n\nThese laws don’t infringe on the Second Amendment. They save lives. \n\nThe most fundamental right in America is the right to vote – and to have it counted. And it’s under assault. \n\nIn state after state, new laws have been passed, not only to suppress the vote, but to subvert entire elections. \n\nWe cannot let this happen. \n\nTonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the John Lewis Voting Rights Act. And while you’re at it, pass the Disclose Act so Americans can know who is funding our elections. \n\nTonight, I’d like to honor someone who has dedicated his life to serve this country: Justice Stephen Breyer—an Army veteran, Constitutional scholar, and retiring Justice of the United States Supreme Court. Justice Breyer, thank you for your service. \n\nOne of the most serious constitutional responsibilities a President has is nominating someone to serve on the United States Supreme Court. \n\nAnd I did that 4 days ago, when I nominated Circuit Court of Appeals Judge Ketanji Brown Jackson. One of our nation’s top legal minds, who will continue Justice Breyer’s legacy of excellence. \n\nA former top litigator in private practice. A former federal public defender. And from a family of public school educators and police officers. A consensus builder. Since she’s been nominated, she’s received a broad range of support—from the Fraternal Order of Police to former judges appointed by Democrats and Republicans. \n\nAnd if we are to advance liberty and justice, we need to secure the Border and fix the immigration system. \n\nWe can do both. At our border, we’ve installed new technology like cutting-edge scanners to better detect drug smuggling. \n\nWe’ve set up joint patrols with Mexico and Guatemala to catch more human traffickers. \n\nWe’re putting in place dedicated immigration judges so families fleeing persecution and violence can have their cases heard faster. \n\nWe’re securing commitments and supporting partners in South and Central America to host more refugees and secure their own borders. \n\nWe can do all this while keeping lit the torch of liberty that has led generations of immigrants to this land—my forefathers and so many of yours. \n\nProvide a pathway to citizenship for Dreamers, those on temporary status, farm workers, and essential workers. \n\nRevise our laws so businesses have the workers they need and families don’t wait decades to reunite. \n\nIt’s not only the right thing to do—it’s the economically smart thing to do. \n\nThat’s why immigration reform is supported by everyone from labor unions to religious leaders to the U.S. Chamber of Commerce. \n\nLet’s get it done once and for all. \n\nAdvancing liberty and justice also requires protecting the rights of women. \n\nThe constitutional right affirmed in Roe v. Wade—standing precedent for half a century—is under attack as never before. \n\nIf we want to go forward—not backward—we must protect access to health care. Preserve a woman’s right to choose. And let’s continue to advance maternal health care in America. \n\nAnd for our LGBTQ+ Americans, let’s finally get the bipartisan Equality Act to my desk. The onslaught of state laws targeting transgender Americans and their families is wrong. \n\nAs I said last year, especially to our younger transgender Americans, I will always have your back as your President, so you can be yourself and reach your God-given potential. \n\nWhile it often appears that we never agree, that isn’t true. I signed 80 bipartisan bills into law last year. From preventing government shutdowns to protecting Asian-Americans from still-too-common hate crimes to reforming military justice. \n\nAnd soon, we’ll strengthen the Violence Against Women Act that I first wrote three decades ago. It is important for us to show the nation that we can come together and do big things. \n\nSo tonight I’m offering a Unity Agenda for the Nation. Four big things we can do together. \n\nFirst, beat the opioid epidemic. \n\nThere is so much we can do. Increase funding for prevention, treatment, harm reduction, and recovery. \n\nGet rid of outdated rules that stop doctors from prescribing treatments. And stop the flow of illicit drugs by working with state and local law enforcement to go after traffickers. \n\nIf you’re suffering from addiction, know you are not alone. I believe in recovery, and I celebrate the 23 million Americans in recovery. \n\nSecond, let’s take on mental health. Especially among our children, whose lives and education have been turned upside down. \n\nThe American Rescue Plan gave schools money to hire teachers and help students make up for lost learning. \n\nI urge every parent to make sure your school does just that. And we can all play a part—sign up to be a tutor or a mentor. \n\nChildren were also struggling before the pandemic. Bullying, violence, trauma, and the harms of social media. \n\nAs Frances Haugen, who is here with us tonight, has shown, we must hold social media platforms accountable for the national experiment they’re conducting on our children for profit. \n\nIt’s time to strengthen privacy protections, ban targeted advertising to children, demand tech companies stop collecting personal data on our children. \n\nAnd let’s get all Americans the mental health services they need. More people they can turn to for help, and full parity between physical and mental health care. \n\nThird, support our veterans. \n\nVeterans are the best of us. \n\nI’ve always believed that we have a sacred obligation to equip all those we send to war and care for them and their families when they come home. \n\nMy administration is providing assistance with job training and housing, and now helping lower-income veterans get VA care debt-free. \n\nOur troops in Iraq and Afghanistan faced many dangers. \n\nOne was stationed at bases and breathing in toxic smoke from “burn pits” that incinerated wastes of war—medical and hazard material, jet fuel, and more. \n\nWhen they came home, many of the world’s fittest and best trained warriors were never the same. \n\nHeadaches. Numbness. Dizziness. \n\nA cancer that would put them in a flag-draped coffin. \n\nI know. \n\nOne of those soldiers was my son Major Beau Biden. \n\nWe don’t know for sure if a burn pit was the cause of his brain cancer, or the diseases of so many of our troops. \n\nBut I’m committed to finding out everything we can. \n\nCommitted to military families like Danielle Robinson from Ohio. \n\nThe widow of Sergeant First Class Heath Robinson. \n\nHe was born a soldier. Army National Guard. Combat medic in Kosovo and Iraq. \n\nStationed near Baghdad, just yards from burn pits the size of football fields. \n\nHeath’s widow Danielle is here with us tonight. They loved going to Ohio State football games. He loved building Legos with their daughter. \n\nBut cancer from prolonged exposure to burn pits ravaged Heath’s lungs and body. \n\nDanielle says Heath was a fighter to the very end. \n\nHe didn’t know how to stop fighting, and neither did she. \n\nThrough her pain she found purpose to demand we do better. \n\nTonight, Danielle—we are. \n\nThe VA is pioneering new ways of linking toxic exposures to diseases, already helping more veterans get benefits. \n\nAnd tonight, I’m announcing we’re expanding eligibility to veterans suffering from nine respiratory cancers. \n\nI’m also calling on Congress: pass a law to make sure veterans devastated by toxic exposures in Iraq and Afghanistan finally get the benefits and comprehensive health care they deserve. \n\nAnd fourth, let’s end cancer as we know it. \n\nThis is personal to me and Jill, to Kamala, and to so many of you. \n\nCancer is the #2 cause of death in America–second only to heart disease. \n\nLast month, I announced our plan to supercharge \nthe Cancer Moonshot that President Obama asked me to lead six years ago. \n\nOur goal is to cut the cancer death rate by at least 50% over the next 25 years, turn more cancers from death sentences into treatable diseases. \n\nMore support for patients and families. \n\nTo get there, I call on Congress to fund ARPA-H, the Advanced Research Projects Agency for Health. \n\nIt’s based on DARPA—the Defense Department project that led to the Internet, GPS, and so much more. \n\nARPA-H will have a singular purpose—to drive breakthroughs in cancer, Alzheimer’s, diabetes, and more. \n\nA unity agenda for the nation. \n\nWe can do this. \n\nMy fellow Americans—tonight , we have gathered in a sacred space—the citadel of our democracy. \n\nIn this Capitol, generation after generation, Americans have debated great questions amid great strife, and have done great things. \n\nWe have fought for freedom, expanded liberty, defeated totalitarianism and terror. \n\nAnd built the strongest, freest, and most prosperous nation the world has ever known. \n\nNow is the hour. \n\nOur moment of responsibility. \n\nOur test of resolve and conscience, of history itself. \n\nIt is in this moment that our character is formed. Our purpose is found. Our future is forged. \n\nWell I know this nation. \n\nWe will meet the test. \n\nTo protect freedom and liberty, to expand fairness and opportunity. \n\nWe will save democracy. \n\nAs hard as these times have been, I am more optimistic about America today than I have been my whole life. \n\nBecause I see the future that is within our grasp. \n\nBecause I know there is simply nothing beyond our capacity. \n\nWe are the only nation on Earth that has always turned every crisis we have faced into an opportunity. \n\nThe only nation that can be defined by a single word: possibilities. \n\nSo on this night, in our 245th year as a nation, I have come to report on the State of the Union. \n\nAnd my report is this: the State of the Union is strong—because you, the American people, are strong. \n\nWe are stronger today than we were a year ago. \n\nAnd we will be stronger a year from now than we are today. \n\nNow is our moment to meet and overcome the challenges of our time. \n\nAnd we will, as one people. \n\nOne America. \n\nThe United States of America. \n\nMay God bless you all. May God protect our troops.", + "textSplitter": "{{recursiveCharacterTextSplitter_0.data.instance}}", + "metadata": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "plainText_1-output-document-Document", + "name": "document", + "label": "Document", + "type": "Document" + }, + { + "id": "plainText_1-output-text-string|json", + "name": "text", + "label": "Text", + "type": "string | json" + } + ], + "default": "document" + } + ], + "outputs": { + "output": "document" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 117.23894449422778, + "y": 23.24339894687961 + }, + "dragging": false }, + { + "width": 300, + "height": 429, + "id": "recursiveCharacterTextSplitter_0", + "position": { + "x": -259.38954307457425, + "y": 75.96855802341503 + }, + "type": "customNode", + "data": { + "id": "recursiveCharacterTextSplitter_0", + "label": "Recursive Character Text Splitter", + "version": 2, + "name": "recursiveCharacterTextSplitter", + "type": "RecursiveCharacterTextSplitter", + "baseClasses": ["RecursiveCharacterTextSplitter", "TextSplitter", "BaseDocumentTransformer", "Runnable"], + "category": "Text Splitters", + "description": "Split documents recursively by different characters - starting with \"\\n\\n\", then \"\\n\", then \" \"", + "inputParams": [ + { + "label": "Chunk Size", + "name": "chunkSize", + "type": "number", + "default": 1000, + "optional": true, + "id": "recursiveCharacterTextSplitter_0-input-chunkSize-number" + }, + { + "label": "Chunk Overlap", + "name": "chunkOverlap", + "type": "number", + "optional": true, + "id": "recursiveCharacterTextSplitter_0-input-chunkOverlap-number" + }, + { + "label": "Custom Separators", + "name": "separators", + "type": "string", + "rows": 4, + "description": "Array of custom separators to determine when to split the text, will override the default separators", + "placeholder": "[\"|\", \"##\", \">\", \"-\"]", + "additionalParams": true, + "optional": true, + "id": "recursiveCharacterTextSplitter_0-input-separators-string" + } + ], + "inputAnchors": [], + "inputs": { + "chunkSize": 1000, + "chunkOverlap": "", + "separators": "" + }, + "outputAnchors": [ + { + "id": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer|Runnable", + "name": "recursiveCharacterTextSplitter", + "label": "RecursiveCharacterTextSplitter", + "type": "RecursiveCharacterTextSplitter | TextSplitter | BaseDocumentTransformer | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": -259.38954307457425, + "y": 75.96855802341503 + }, + "dragging": false + } + ], + "edges": [ { "source": "retrievalQAChain_0", "sourceHandle": "retrievalQAChain_0-output-retrievalQAChain-RetrievalQAChain|BaseChain|BaseLangChain", @@ -1210,78 +1628,144 @@ } }, { - "source": "openAI_2", - "sourceHandle": "openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", - "target": "retrievalQAChain_0", - "targetHandle": "retrievalQAChain_0-input-model-BaseLanguageModel", + "source": "openAIEmbeddings_1", + "sourceHandle": "openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "redis_0", + "targetHandle": "redis_0-input-embeddings-Embeddings", "type": "buttonedge", - "id": "openAI_2-openAI_2-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-retrievalQAChain_0-retrievalQAChain_0-input-model-BaseLanguageModel", + "id": "openAIEmbeddings_1-openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-redis_0-redis_0-input-embeddings-Embeddings", "data": { "label": "" } }, { - "source": "openAIEmbeddings_1", - "sourceHandle": "openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "chromaExistingIndex_0", - "targetHandle": "chromaExistingIndex_0-input-embeddings-Embeddings", + "source": "redis_0", + "sourceHandle": "redis_0-output-retriever-Redis|VectorStoreRetriever|BaseRetriever", + "target": "retrievalQAChain_0", + "targetHandle": "retrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", "type": "buttonedge", - "id": "openAIEmbeddings_1-openAIEmbeddings_1-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-chromaExistingIndex_0-chromaExistingIndex_0-input-embeddings-Embeddings", + "id": "redis_0-redis_0-output-retriever-Redis|VectorStoreRetriever|BaseRetriever-retrievalQAChain_0-retrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", "data": { "label": "" } }, { - "source": "chromaExistingIndex_0", - "sourceHandle": "chromaExistingIndex_0-output-retriever-Chroma|VectorStoreRetriever|BaseRetriever", - "target": "retrievalQAChain_0", - "targetHandle": "retrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "source": "plainText_0", + "sourceHandle": "plainText_0-output-document-Document", + "target": "redis_0", + "targetHandle": "redis_0-input-document-Document", "type": "buttonedge", - "id": "chromaExistingIndex_0-chromaExistingIndex_0-output-retriever-Chroma|VectorStoreRetriever|BaseRetriever-retrievalQAChain_0-retrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "id": "plainText_0-plainText_0-output-document-Document-redis_0-redis_0-input-document-Document", "data": { "label": "" } }, { - "source": "openAIEmbeddings_2", - "sourceHandle": "openAIEmbeddings_2-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "pineconeExistingIndex_0", - "targetHandle": "pineconeExistingIndex_0-input-embeddings-Embeddings", + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "target": "retrievalQAChain_0", + "targetHandle": "retrievalQAChain_0-input-model-BaseLanguageModel", "type": "buttonedge", - "id": "openAIEmbeddings_2-openAIEmbeddings_2-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeExistingIndex_0-pineconeExistingIndex_0-input-embeddings-Embeddings", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-retrievalQAChain_0-retrievalQAChain_0-input-model-BaseLanguageModel", "data": { "label": "" } }, { - "source": "openAI_3", - "sourceHandle": "openAI_3-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", + "source": "chatOpenAI_1", + "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", "target": "retrievalQAChain_1", "targetHandle": "retrievalQAChain_1-input-model-BaseLanguageModel", "type": "buttonedge", - "id": "openAI_3-openAI_3-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-retrievalQAChain_1-retrievalQAChain_1-input-model-BaseLanguageModel", + "id": "chatOpenAI_1-chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-retrievalQAChain_1-retrievalQAChain_1-input-model-BaseLanguageModel", "data": { "label": "" } }, { - "source": "pineconeExistingIndex_0", - "sourceHandle": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "source": "faiss_0", + "sourceHandle": "faiss_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever", "target": "retrievalQAChain_1", "targetHandle": "retrievalQAChain_1-input-vectorStoreRetriever-BaseRetriever", "type": "buttonedge", - "id": "pineconeExistingIndex_0-pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-retrievalQAChain_1-retrievalQAChain_1-input-vectorStoreRetriever-BaseRetriever", + "id": "faiss_0-faiss_0-output-retriever-Faiss|VectorStoreRetriever|BaseRetriever-retrievalQAChain_1-retrievalQAChain_1-input-vectorStoreRetriever-BaseRetriever", + "data": { + "label": "" + } + }, + { + "source": "openAIEmbeddings_2", + "sourceHandle": "openAIEmbeddings_2-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "faiss_0", + "targetHandle": "faiss_0-input-embeddings-Embeddings", + "type": "buttonedge", + "id": "openAIEmbeddings_2-openAIEmbeddings_2-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-faiss_0-faiss_0-input-embeddings-Embeddings", + "data": { + "label": "" + } + }, + { + "source": "chainTool_2", + "sourceHandle": "chainTool_2-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-tools-Tool", + "type": "buttonedge", + "id": "chainTool_2-chainTool_2-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "chainTool_3", + "sourceHandle": "chainTool_3-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-tools-Tool", + "type": "buttonedge", + "id": "chainTool_3-chainTool_3-output-chainTool-ChainTool|DynamicTool|Tool|StructuredTool|BaseLangChain-conversationalAgent_0-conversationalAgent_0-input-tools-Tool", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_2", + "sourceHandle": "chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_2-chatOpenAI_2-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-conversationalAgent_0-conversationalAgent_0-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, + { + "source": "bufferMemory_0", + "sourceHandle": "bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory", + "target": "conversationalAgent_0", + "targetHandle": "conversationalAgent_0-input-memory-BaseChatMemory", + "type": "buttonedge", + "id": "bufferMemory_0-bufferMemory_0-output-bufferMemory-BufferMemory|BaseChatMemory|BaseMemory-conversationalAgent_0-conversationalAgent_0-input-memory-BaseChatMemory", + "data": { + "label": "" + } + }, + { + "source": "plainText_1", + "sourceHandle": "plainText_1-output-document-Document", + "target": "faiss_0", + "targetHandle": "faiss_0-input-document-Document", + "type": "buttonedge", + "id": "plainText_1-plainText_1-output-document-Document-faiss_0-faiss_0-input-document-Document", "data": { "label": "" } }, { - "source": "openAI_4", - "sourceHandle": "openAI_4-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", - "target": "mrklAgentLLM_0", - "targetHandle": "mrklAgentLLM_0-input-model-BaseLanguageModel", + "source": "recursiveCharacterTextSplitter_0", + "sourceHandle": "recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer|Runnable", + "target": "plainText_1", + "targetHandle": "plainText_1-input-textSplitter-TextSplitter", "type": "buttonedge", - "id": "openAI_4-openAI_4-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-mrklAgentLLM_0-mrklAgentLLM_0-input-model-BaseLanguageModel", + "id": "recursiveCharacterTextSplitter_0-recursiveCharacterTextSplitter_0-output-recursiveCharacterTextSplitter-RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer|Runnable-plainText_1-plainText_1-input-textSplitter-TextSplitter", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/OpenAI Agent.json b/packages/server/marketplaces/chatflows/OpenAI Agent.json index 9a98d29dc90..a4944af09d2 100644 --- a/packages/server/marketplaces/chatflows/OpenAI Agent.json +++ b/packages/server/marketplaces/chatflows/OpenAI Agent.json @@ -391,6 +391,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" } ], "inputAnchors": [ @@ -410,7 +418,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { diff --git a/packages/server/marketplaces/chatflows/Prompt Chaining with VectorStore.json b/packages/server/marketplaces/chatflows/Prompt Chaining with VectorStore.json index 29f7e7aa26a..fca62a76fef 100644 --- a/packages/server/marketplaces/chatflows/Prompt Chaining with VectorStore.json +++ b/packages/server/marketplaces/chatflows/Prompt Chaining with VectorStore.json @@ -4,76 +4,52 @@ "nodes": [ { "width": 300, - "height": 329, - "id": "openAIEmbeddings_0", + "height": 475, + "id": "promptTemplate_0", "position": { - "x": 1198.6643452533754, - "y": -584.4233173804803 + "x": 344.73370692733414, + "y": -122.34815000085804 }, "type": "customNode", "data": { - "id": "openAIEmbeddings_0", - "label": "OpenAI Embeddings", + "id": "promptTemplate_0", + "label": "Prompt Template", "version": 1, - "name": "openAIEmbeddings", - "type": "OpenAIEmbeddings", - "baseClasses": ["OpenAIEmbeddings", "Embeddings"], - "category": "Embeddings", - "description": "OpenAI API to generate embeddings for a given text", + "name": "promptTemplate", + "type": "PromptTemplate", + "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate", "Runnable"], + "category": "Prompts", + "description": "Schema to represent a basic prompt for an LLM", "inputParams": [ { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "openAIEmbeddings_0-input-credential-credential" - }, - { - "label": "Strip New Lines", - "name": "stripNewLines", - "type": "boolean", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-stripNewLines-boolean" - }, - { - "label": "Batch Size", - "name": "batchSize", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-batchSize-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-timeout-number" + "label": "Template", + "name": "template", + "type": "string", + "rows": 4, + "placeholder": "What is a good name for a company that makes {product}?", + "id": "promptTemplate_0-input-template-string" }, { - "label": "BasePath", - "name": "basepath", - "type": "string", + "label": "Format Prompt Values", + "name": "promptValues", + "type": "json", "optional": true, - "additionalParams": true, - "id": "openAIEmbeddings_0-input-basepath-string" + "acceptVariable": true, + "list": true, + "id": "promptTemplate_0-input-promptValues-json" } ], "inputAnchors": [], "inputs": { - "stripNewLines": "", - "batchSize": "", - "timeout": "", - "basepath": "" + "template": "Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question.\n\nChat History:\n{chat_history}\nFollow Up Input: {question}\nStandalone question:", + "promptValues": "{\"question\":\"{{question}}\",\"chat_history\":\"{{chat_history}}\"}" }, "outputAnchors": [ { - "id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "name": "openAIEmbeddings", - "label": "OpenAIEmbeddings", - "type": "OpenAIEmbeddings | Embeddings" + "id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", + "name": "promptTemplate", + "label": "PromptTemplate", + "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate | Runnable" } ], "outputs": {}, @@ -81,37 +57,45 @@ }, "selected": false, "positionAbsolute": { - "x": 1198.6643452533754, - "y": -584.4233173804803 + "x": 344.73370692733414, + "y": -122.34815000085804 }, "dragging": false }, { "width": 300, - "height": 475, - "id": "promptTemplate_0", + "height": 652, + "id": "chatPromptTemplate_0", "position": { - "x": 354.2706973608643, - "y": -122.34815000085804 + "x": 2290.8365353040026, + "y": -168.49082887954518 }, "type": "customNode", "data": { - "id": "promptTemplate_0", - "label": "Prompt Template", + "id": "chatPromptTemplate_0", + "label": "Chat Prompt Template", "version": 1, - "name": "promptTemplate", - "type": "PromptTemplate", - "baseClasses": ["PromptTemplate", "BaseStringPromptTemplate", "BasePromptTemplate", "Runnable"], + "name": "chatPromptTemplate", + "type": "ChatPromptTemplate", + "baseClasses": ["ChatPromptTemplate", "BaseChatPromptTemplate", "BasePromptTemplate", "Runnable"], "category": "Prompts", - "description": "Schema to represent a basic prompt for an LLM", + "description": "Schema to represent a chat prompt", "inputParams": [ { - "label": "Template", - "name": "template", + "label": "System Message", + "name": "systemMessagePrompt", "type": "string", "rows": 4, - "placeholder": "What is a good name for a company that makes {product}?", - "id": "promptTemplate_0-input-template-string" + "placeholder": "You are a helpful assistant that translates {input_language} to {output_language}.", + "id": "chatPromptTemplate_0-input-systemMessagePrompt-string" + }, + { + "label": "Human Message", + "name": "humanMessagePrompt", + "type": "string", + "rows": 4, + "placeholder": "{text}", + "id": "chatPromptTemplate_0-input-humanMessagePrompt-string" }, { "label": "Format Prompt Values", @@ -120,20 +104,21 @@ "optional": true, "acceptVariable": true, "list": true, - "id": "promptTemplate_0-input-promptValues-json" + "id": "chatPromptTemplate_0-input-promptValues-json" } ], "inputAnchors": [], "inputs": { - "template": "Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question.\n\nChat History:\n{chat_history}\nFollow Up Input: {question}\nStandalone question:", - "promptValues": "{\"question\":\"{{question}}\",\"chat_history\":\"{{chat_history}}\"}" + "systemMessagePrompt": "Using the provided context, answer the user's question to the best of your ability using the resources provided. If there is nothing in the context relevant to the question at hand, just say \"Hmm, I'm not sure.\" Don't try to make up an answer.\n\nAnything between the following \\`context\\` html blocks is retrieved from a knowledge bank, not part of the conversation with the user.\n\n\n {context}\n\n\nREMEMBER: If there is no relevant information within the context, just say \"Hmm, I'm not sure.\" Don't try to make up an answer. Anything between the preceding 'context' html blocks is retrieved from a knowledge bank, not part of the conversation with the user.", + "humanMessagePrompt": "{text}", + "promptValues": "{\"context\":\"{{vectorStoreToDocument_0.data.instance}}\",\"text\":\"{{question}}\"}" }, "outputAnchors": [ { - "id": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", - "name": "promptTemplate", - "label": "PromptTemplate", - "type": "PromptTemplate | BaseStringPromptTemplate | BasePromptTemplate | Runnable" + "id": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable", + "name": "chatPromptTemplate", + "label": "ChatPromptTemplate", + "type": "ChatPromptTemplate | BaseChatPromptTemplate | BasePromptTemplate | Runnable" } ], "outputs": {}, @@ -141,8 +126,272 @@ }, "selected": false, "positionAbsolute": { - "x": 354.2706973608643, - "y": -122.34815000085804 + "x": 2290.8365353040026, + "y": -168.49082887954518 + }, + "dragging": false + }, + { + "width": 300, + "height": 454, + "id": "vectorStoreToDocument_0", + "position": { + "x": 1906.6871314089658, + "y": -157.0046189166955 + }, + "type": "customNode", + "data": { + "id": "vectorStoreToDocument_0", + "label": "VectorStore To Document", + "version": 2, + "name": "vectorStoreToDocument", + "type": "Document", + "baseClasses": ["Document"], + "category": "Document Loaders", + "description": "Search documents with scores from vector store", + "inputParams": [ + { + "label": "Query", + "name": "query", + "type": "string", + "description": "Query to retrieve documents from vector database. If not specified, user question will be used", + "optional": true, + "acceptVariable": true, + "id": "vectorStoreToDocument_0-input-query-string" + }, + { + "label": "Minimum Score (%)", + "name": "minScore", + "type": "number", + "optional": true, + "placeholder": "75", + "step": 1, + "description": "Minumum score for embeddings documents to be included", + "id": "vectorStoreToDocument_0-input-minScore-number" + } + ], + "inputAnchors": [ + { + "label": "Vector Store", + "name": "vectorStore", + "type": "VectorStore", + "id": "vectorStoreToDocument_0-input-vectorStore-VectorStore" + } + ], + "inputs": { + "vectorStore": "{{singlestore_0.data.instance}}", + "query": "{{llmChain_2.data.instance}}", + "minScore": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "vectorStoreToDocument_0-output-document-Document", + "name": "document", + "label": "Document", + "type": "Document" + }, + { + "id": "vectorStoreToDocument_0-output-text-string|json", + "name": "text", + "label": "Text", + "type": "string | json" + } + ], + "default": "document" + } + ], + "outputs": { + "output": "text" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1906.6871314089658, + "y": -157.0046189166955 + }, + "dragging": false + }, + { + "width": 300, + "height": 456, + "id": "llmChain_2", + "position": { + "x": 756.2678342825631, + "y": -244.07972550448233 + }, + "type": "customNode", + "data": { + "id": "llmChain_2", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_2-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_2-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_2-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_2-input-outputParser-BaseLLMOutputParser" + } + ], + "inputs": { + "model": "{{chatOpenAI_0.data.instance}}", + "prompt": "{{promptTemplate_0.data.instance}}", + "outputParser": "", + "chainName": "RephraseQuestion" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_2-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_2-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "outputPrediction" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 756.2678342825631, + "y": -244.07972550448233 + }, + "dragging": false + }, + { + "width": 300, + "height": 456, + "id": "llmChain_1", + "position": { + "x": 2684.08901232628, + "y": -301.4742415779482 + }, + "type": "customNode", + "data": { + "id": "llmChain_1", + "label": "LLM Chain", + "version": 3, + "name": "llmChain", + "type": "LLMChain", + "baseClasses": ["LLMChain", "BaseChain", "Runnable"], + "category": "Chains", + "description": "Chain to run queries against LLMs", + "inputParams": [ + { + "label": "Chain Name", + "name": "chainName", + "type": "string", + "placeholder": "Name Your Chain", + "optional": true, + "id": "llmChain_1-input-chainName-string" + } + ], + "inputAnchors": [ + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "llmChain_1-input-model-BaseLanguageModel" + }, + { + "label": "Prompt", + "name": "prompt", + "type": "BasePromptTemplate", + "id": "llmChain_1-input-prompt-BasePromptTemplate" + }, + { + "label": "Output Parser", + "name": "outputParser", + "type": "BaseLLMOutputParser", + "optional": true, + "id": "llmChain_1-input-outputParser-BaseLLMOutputParser" + } + ], + "inputs": { + "model": "{{chatOpenAI_1.data.instance}}", + "prompt": "{{chatPromptTemplate_0.data.instance}}", + "outputParser": "", + "chainName": "FinalResponse" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|Runnable", + "name": "llmChain", + "label": "LLM Chain", + "type": "LLMChain | BaseChain | Runnable" + }, + { + "id": "llmChain_1-output-outputPrediction-string|json", + "name": "outputPrediction", + "label": "Output Prediction", + "type": "string | json" + } + ], + "default": "llmChain" + } + ], + "outputs": { + "output": "llmChain" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 2684.08901232628, + "y": -301.4742415779482 }, "dragging": false }, @@ -151,8 +400,8 @@ "height": 574, "id": "chatOpenAI_0", "position": { - "x": 353.5672832154869, - "y": -730.6436764835541 + "x": 339.96857057520754, + "y": -732.8078068632885 }, "type": "customNode", "data": { @@ -181,6 +430,14 @@ "label": "gpt-4", "name": "gpt-4" }, + { + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, + { + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" + }, { "label": "gpt-4-0613", "name": "gpt-4-0613" @@ -197,6 +454,10 @@ "label": "gpt-3.5-turbo", "name": "gpt-3.5-turbo" }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, { "label": "gpt-3.5-turbo-0613", "name": "gpt-3.5-turbo-0613" @@ -274,7 +535,15 @@ "type": "string", "optional": true, "additionalParams": true, - "id": "chatOpenAI_0-input-basepath-string" + "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" } ], "inputAnchors": [ @@ -287,6 +556,7 @@ } ], "inputs": { + "cache": "", "modelName": "gpt-3.5-turbo-16k", "temperature": "0", "maxTokens": "", @@ -294,7 +564,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { @@ -308,19 +579,19 @@ "selected": false }, "selected": false, - "dragging": false, "positionAbsolute": { - "x": 353.5672832154869, - "y": -730.6436764835541 - } + "x": 339.96857057520754, + "y": -732.8078068632885 + }, + "dragging": false }, { "width": 300, "height": 574, "id": "chatOpenAI_1", "position": { - "x": 2281.9246645710673, - "y": -778.8379360672121 + "x": 2291.510577325338, + "y": -785.9138727666948 }, "type": "customNode", "data": { @@ -349,6 +620,14 @@ "label": "gpt-4", "name": "gpt-4" }, + { + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, + { + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" + }, { "label": "gpt-4-0613", "name": "gpt-4-0613" @@ -365,6 +644,10 @@ "label": "gpt-3.5-turbo", "name": "gpt-3.5-turbo" }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, { "label": "gpt-3.5-turbo-0613", "name": "gpt-3.5-turbo-0613" @@ -443,6 +726,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_1-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-baseOptions-json" } ], "inputAnchors": [ @@ -455,6 +746,7 @@ } ], "inputs": { + "cache": "", "modelName": "gpt-3.5-turbo-16k", "temperature": "0", "maxTokens": "", @@ -462,7 +754,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { @@ -476,330 +769,124 @@ "selected": false }, "selected": false, - "dragging": false, - "positionAbsolute": { - "x": 2281.9246645710673, - "y": -778.8379360672121 - } - }, - { - "width": 300, - "height": 505, - "id": "pineconeExistingIndex_0", - "position": { - "x": 1544.4998097474581, - "y": -628.8477510577202 - }, - "type": "customNode", - "data": { - "id": "pineconeExistingIndex_0", - "label": "Pinecone Load Existing Index", - "version": 1, - "name": "pineconeExistingIndex", - "type": "Pinecone", - "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Load existing index from Pinecone (i.e: Document has been upserted)", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["pineconeApi"], - "id": "pineconeExistingIndex_0-input-credential-credential" - }, - { - "label": "Pinecone Index", - "name": "pineconeIndex", - "type": "string", - "id": "pineconeExistingIndex_0-input-pineconeIndex-string" - }, - { - "label": "Pinecone Namespace", - "name": "pineconeNamespace", - "type": "string", - "placeholder": "my-first-namespace", - "additionalParams": true, - "optional": true, - "id": "pineconeExistingIndex_0-input-pineconeNamespace-string" - }, - { - "label": "Pinecone Metadata Filter", - "name": "pineconeMetadataFilter", - "type": "json", - "optional": true, - "additionalParams": true, - "id": "pineconeExistingIndex_0-input-pineconeMetadataFilter-json" - }, - { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Default to 4", - "placeholder": "4", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "pineconeExistingIndex_0-input-topK-number" - } - ], - "inputAnchors": [ - { - "label": "Embeddings", - "name": "embeddings", - "type": "Embeddings", - "id": "pineconeExistingIndex_0-input-embeddings-Embeddings" - } - ], - "inputs": { - "embeddings": "{{openAIEmbeddings_0.data.instance}}", - "pineconeIndex": "", - "pineconeNamespace": "", - "pineconeMetadataFilter": "", - "topK": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "pineconeExistingIndex_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Pinecone Retriever", - "type": "Pinecone | VectorStoreRetriever | BaseRetriever" - }, - { - "id": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore", - "name": "vectorStore", - "label": "Pinecone Vector Store", - "type": "Pinecone | VectorStore" - } - ], - "default": "retriever" - } - ], - "outputs": { - "output": "vectorStore" - }, - "selected": false - }, - "selected": false, "positionAbsolute": { - "x": 1544.4998097474581, - "y": -628.8477510577202 + "x": 2291.510577325338, + "y": -785.9138727666948 }, "dragging": false }, { "width": 300, - "height": 652, - "id": "chatPromptTemplate_0", + "height": 654, + "id": "singlestore_0", "position": { - "x": 2290.8365353040026, - "y": -168.49082887954518 + "x": 1530.532503048084, + "y": -657.3586990397077 }, "type": "customNode", "data": { - "id": "chatPromptTemplate_0", - "label": "Chat Prompt Template", + "id": "singlestore_0", + "label": "SingleStore", "version": 1, - "name": "chatPromptTemplate", - "type": "ChatPromptTemplate", - "baseClasses": ["ChatPromptTemplate", "BaseChatPromptTemplate", "BasePromptTemplate", "Runnable"], - "category": "Prompts", - "description": "Schema to represent a chat prompt", - "inputParams": [ - { - "label": "System Message", - "name": "systemMessagePrompt", - "type": "string", - "rows": 4, - "placeholder": "You are a helpful assistant that translates {input_language} to {output_language}.", - "id": "chatPromptTemplate_0-input-systemMessagePrompt-string" - }, - { - "label": "Human Message", - "name": "humanMessagePrompt", - "type": "string", - "rows": 4, - "placeholder": "{text}", - "id": "chatPromptTemplate_0-input-humanMessagePrompt-string" - }, - { - "label": "Format Prompt Values", - "name": "promptValues", - "type": "json", - "optional": true, - "acceptVariable": true, - "list": true, - "id": "chatPromptTemplate_0-input-promptValues-json" - } - ], - "inputAnchors": [], - "inputs": { - "systemMessagePrompt": "Using the provided context, answer the user's question to the best of your ability using the resources provided. If there is nothing in the context relevant to the question at hand, just say \"Hmm, I'm not sure.\" Don't try to make up an answer.\n\nAnything between the following \\`context\\` html blocks is retrieved from a knowledge bank, not part of the conversation with the user.\n\n\n {context}\n\n\nREMEMBER: If there is no relevant information within the context, just say \"Hmm, I'm not sure.\" Don't try to make up an answer. Anything between the preceding 'context' html blocks is retrieved from a knowledge bank, not part of the conversation with the user.", - "humanMessagePrompt": "{text}", - "promptValues": "{\"context\":\"{{vectorStoreToDocument_0.data.instance}}\",\"text\":\"{{question}}\"}" - }, - "outputAnchors": [ - { - "id": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable", - "name": "chatPromptTemplate", - "label": "ChatPromptTemplate", - "type": "ChatPromptTemplate | BaseChatPromptTemplate | BasePromptTemplate | Runnable" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 2290.8365353040026, - "y": -168.49082887954518 - }, - "dragging": false - }, - { - "width": 300, - "height": 454, - "id": "vectorStoreToDocument_0", - "position": { - "x": 1906.6871314089658, - "y": -157.0046189166955 - }, - "type": "customNode", - "data": { - "id": "vectorStoreToDocument_0", - "label": "VectorStore To Document", - "version": 2, - "name": "vectorStoreToDocument", - "type": "Document", - "baseClasses": ["Document"], - "category": "Document Loaders", - "description": "Search documents with scores from vector store", - "inputParams": [ - { - "label": "Query", - "name": "query", - "type": "string", - "description": "Query to retrieve documents from vector database. If not specified, user question will be used", - "optional": true, - "acceptVariable": true, - "id": "vectorStoreToDocument_0-input-query-string" - }, - { - "label": "Minimum Score (%)", - "name": "minScore", - "type": "number", - "optional": true, - "placeholder": "75", - "step": 1, - "description": "Minumum score for embeddings documents to be included", - "id": "vectorStoreToDocument_0-input-minScore-number" - } - ], - "inputAnchors": [ - { - "label": "Vector Store", - "name": "vectorStore", - "type": "VectorStore", - "id": "vectorStoreToDocument_0-input-vectorStore-VectorStore" - } - ], - "inputs": { - "vectorStore": "{{pineconeExistingIndex_0.data.instance}}", - "query": "{{llmChain_2.data.instance}}", - "minScore": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "vectorStoreToDocument_0-output-document-Document", - "name": "document", - "label": "Document", - "type": "Document" - }, - { - "id": "vectorStoreToDocument_0-output-text-string|json", - "name": "text", - "label": "Text", - "type": "string | json" - } - ], - "default": "document" - } - ], - "outputs": { - "output": "text" - }, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1906.6871314089658, - "y": -157.0046189166955 - }, - "dragging": false - }, - { - "width": 300, - "height": 456, - "id": "llmChain_2", - "position": { - "x": 756.2678342825631, - "y": -244.07972550448233 - }, - "type": "customNode", - "data": { - "id": "llmChain_2", - "label": "LLM Chain", - "version": 3, - "name": "llmChain", - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "Runnable"], - "category": "Chains", - "description": "Chain to run queries against LLMs", + "name": "singlestore", + "type": "SingleStore", + "baseClasses": ["SingleStore", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Upsert or Load data to SingleStore Vector Database", "inputParams": [ { - "label": "Chain Name", - "name": "chainName", + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "description": "Needed when using SingleStore cloud hosted", + "optional": true, + "credentialNames": ["singleStoreApi"], + "id": "singlestore_0-input-credential-credential" + }, + { + "label": "Host", + "name": "host", "type": "string", - "placeholder": "Name Your Chain", + "id": "singlestore_0-input-host-string" + }, + { + "label": "Database", + "name": "database", + "type": "string", + "id": "singlestore_0-input-database-string" + }, + { + "label": "Table Name", + "name": "tableName", + "type": "string", + "placeholder": "embeddings", + "additionalParams": true, "optional": true, - "id": "llmChain_2-input-chainName-string" - } - ], - "inputAnchors": [ + "id": "singlestore_0-input-tableName-string" + }, { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_2-input-model-BaseLanguageModel" + "label": "Content Column Name", + "name": "contentColumnName", + "type": "string", + "placeholder": "content", + "additionalParams": true, + "optional": true, + "id": "singlestore_0-input-contentColumnName-string" }, { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_2-input-prompt-BasePromptTemplate" + "label": "Vector Column Name", + "name": "vectorColumnName", + "type": "string", + "placeholder": "vector", + "additionalParams": true, + "optional": true, + "id": "singlestore_0-input-vectorColumnName-string" }, { - "label": "Output Parser", - "name": "outputParser", - "type": "BaseLLMOutputParser", + "label": "Metadata Column Name", + "name": "metadataColumnName", + "type": "string", + "placeholder": "metadata", + "additionalParams": true, "optional": true, - "id": "llmChain_2-input-outputParser-BaseLLMOutputParser" + "id": "singlestore_0-input-metadataColumnName-string" + }, + { + "label": "Top K", + "name": "topK", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "singlestore_0-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Document", + "name": "document", + "type": "Document", + "list": true, + "optional": true, + "id": "singlestore_0-input-document-Document" + }, + { + "label": "Embeddings", + "name": "embeddings", + "type": "Embeddings", + "id": "singlestore_0-input-embeddings-Embeddings" } ], "inputs": { - "model": "{{chatOpenAI_0.data.instance}}", - "prompt": "{{promptTemplate_0.data.instance}}", - "outputParser": "", - "chainName": "RephraseQuestion" + "document": "", + "embeddings": "{{openAIEmbeddings_0.data.instance}}", + "host": "", + "database": "", + "tableName": "", + "contentColumnName": "", + "vectorColumnName": "", + "metadataColumnName": "", + "topK": "" }, "outputAnchors": [ { @@ -808,146 +895,119 @@ "type": "options", "options": [ { - "id": "llmChain_2-output-llmChain-LLMChain|BaseChain|Runnable", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | Runnable" + "id": "singlestore_0-output-retriever-SingleStore|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "SingleStore Retriever", + "type": "SingleStore | VectorStoreRetriever | BaseRetriever" }, { - "id": "llmChain_2-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" + "id": "singlestore_0-output-vectorStore-SingleStore|VectorStore", + "name": "vectorStore", + "label": "SingleStore Vector Store", + "type": "SingleStore | VectorStore" } ], - "default": "llmChain" + "default": "retriever" } ], "outputs": { - "output": "outputPrediction" + "output": "vectorStore" }, "selected": false }, "selected": false, "positionAbsolute": { - "x": 756.2678342825631, - "y": -244.07972550448233 + "x": 1530.532503048084, + "y": -657.3586990397077 }, "dragging": false }, { "width": 300, - "height": 456, - "id": "llmChain_1", + "height": 329, + "id": "openAIEmbeddings_0", "position": { - "x": 2684.08901232628, - "y": -301.4742415779482 + "x": 1154.293946350955, + "y": -589.6072684085893 }, "type": "customNode", "data": { - "id": "llmChain_1", - "label": "LLM Chain", - "version": 3, - "name": "llmChain", - "type": "LLMChain", - "baseClasses": ["LLMChain", "BaseChain", "Runnable"], - "category": "Chains", - "description": "Chain to run queries against LLMs", + "id": "openAIEmbeddings_0", + "label": "OpenAI Embeddings", + "version": 1, + "name": "openAIEmbeddings", + "type": "OpenAIEmbeddings", + "baseClasses": ["OpenAIEmbeddings", "Embeddings"], + "category": "Embeddings", + "description": "OpenAI API to generate embeddings for a given text", "inputParams": [ { - "label": "Chain Name", - "name": "chainName", - "type": "string", - "placeholder": "Name Your Chain", + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["openAIApi"], + "id": "openAIEmbeddings_0-input-credential-credential" + }, + { + "label": "Strip New Lines", + "name": "stripNewLines", + "type": "boolean", "optional": true, - "id": "llmChain_1-input-chainName-string" - } - ], - "inputAnchors": [ + "additionalParams": true, + "id": "openAIEmbeddings_0-input-stripNewLines-boolean" + }, { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "llmChain_1-input-model-BaseLanguageModel" + "label": "Batch Size", + "name": "batchSize", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-batchSize-number" }, { - "label": "Prompt", - "name": "prompt", - "type": "BasePromptTemplate", - "id": "llmChain_1-input-prompt-BasePromptTemplate" + "label": "Timeout", + "name": "timeout", + "type": "number", + "optional": true, + "additionalParams": true, + "id": "openAIEmbeddings_0-input-timeout-number" }, { - "label": "Output Parser", - "name": "outputParser", - "type": "BaseLLMOutputParser", + "label": "BasePath", + "name": "basepath", + "type": "string", "optional": true, - "id": "llmChain_1-input-outputParser-BaseLLMOutputParser" + "additionalParams": true, + "id": "openAIEmbeddings_0-input-basepath-string" } ], + "inputAnchors": [], "inputs": { - "model": "{{chatOpenAI_1.data.instance}}", - "prompt": "{{chatPromptTemplate_0.data.instance}}", - "outputParser": "", - "chainName": "FinalResponse" + "stripNewLines": "", + "batchSize": "", + "timeout": "", + "basepath": "" }, "outputAnchors": [ { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "llmChain_1-output-llmChain-LLMChain|BaseChain|Runnable", - "name": "llmChain", - "label": "LLM Chain", - "type": "LLMChain | BaseChain | Runnable" - }, - { - "id": "llmChain_1-output-outputPrediction-string|json", - "name": "outputPrediction", - "label": "Output Prediction", - "type": "string | json" - } - ], - "default": "llmChain" + "id": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "name": "openAIEmbeddings", + "label": "OpenAIEmbeddings", + "type": "OpenAIEmbeddings | Embeddings" } ], - "outputs": { - "output": "llmChain" - }, + "outputs": {}, "selected": false }, "selected": false, "positionAbsolute": { - "x": 2684.08901232628, - "y": -301.4742415779482 + "x": 1154.293946350955, + "y": -589.6072684085893 }, "dragging": false } ], "edges": [ - { - "source": "openAIEmbeddings_0", - "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "pineconeExistingIndex_0", - "targetHandle": "pineconeExistingIndex_0-input-embeddings-Embeddings", - "type": "buttonedge", - "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeExistingIndex_0-pineconeExistingIndex_0-input-embeddings-Embeddings", - "data": { - "label": "" - } - }, - { - "source": "pineconeExistingIndex_0", - "sourceHandle": "pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore", - "target": "vectorStoreToDocument_0", - "targetHandle": "vectorStoreToDocument_0-input-vectorStore-VectorStore", - "type": "buttonedge", - "id": "pineconeExistingIndex_0-pineconeExistingIndex_0-output-vectorStore-Pinecone|VectorStore-vectorStoreToDocument_0-vectorStoreToDocument_0-input-vectorStore-VectorStore", - "data": { - "label": "" - } - }, { "source": "vectorStoreToDocument_0", "sourceHandle": "vectorStoreToDocument_0-output-text-string|json", @@ -959,17 +1019,6 @@ "label": "" } }, - { - "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", - "target": "llmChain_2", - "targetHandle": "llmChain_2-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_2-llmChain_2-input-model-BaseLanguageModel", - "data": { - "label": "" - } - }, { "source": "promptTemplate_0", "sourceHandle": "promptTemplate_0-output-promptTemplate-PromptTemplate|BaseStringPromptTemplate|BasePromptTemplate|Runnable", @@ -992,6 +1041,28 @@ "label": "" } }, + { + "source": "chatPromptTemplate_0", + "sourceHandle": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable", + "target": "llmChain_1", + "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", + "type": "buttonedge", + "id": "chatPromptTemplate_0-chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", + "data": { + "label": "" + } + }, + { + "source": "chatOpenAI_0", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "target": "llmChain_2", + "targetHandle": "llmChain_2-input-model-BaseLanguageModel", + "type": "buttonedge", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-llmChain_2-llmChain_2-input-model-BaseLanguageModel", + "data": { + "label": "" + } + }, { "source": "chatOpenAI_1", "sourceHandle": "chatOpenAI_1-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", @@ -1004,12 +1075,23 @@ } }, { - "source": "chatPromptTemplate_0", - "sourceHandle": "chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable", - "target": "llmChain_1", - "targetHandle": "llmChain_1-input-prompt-BasePromptTemplate", + "source": "singlestore_0", + "sourceHandle": "singlestore_0-output-vectorStore-SingleStore|VectorStore", + "target": "vectorStoreToDocument_0", + "targetHandle": "vectorStoreToDocument_0-input-vectorStore-VectorStore", "type": "buttonedge", - "id": "chatPromptTemplate_0-chatPromptTemplate_0-output-chatPromptTemplate-ChatPromptTemplate|BaseChatPromptTemplate|BasePromptTemplate|Runnable-llmChain_1-llmChain_1-input-prompt-BasePromptTemplate", + "id": "singlestore_0-singlestore_0-output-vectorStore-SingleStore|VectorStore-vectorStoreToDocument_0-vectorStoreToDocument_0-input-vectorStore-VectorStore", + "data": { + "label": "" + } + }, + { + "source": "openAIEmbeddings_0", + "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", + "target": "singlestore_0", + "targetHandle": "singlestore_0-input-embeddings-Embeddings", + "type": "buttonedge", + "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-singlestore_0-singlestore_0-input-embeddings-Embeddings", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/MRKLAgent.json b/packages/server/marketplaces/chatflows/ReAct Agent.json similarity index 78% rename from packages/server/marketplaces/chatflows/MRKLAgent.json rename to packages/server/marketplaces/chatflows/ReAct Agent.json index 697e4919553..b776dd37b98 100644 --- a/packages/server/marketplaces/chatflows/MRKLAgent.json +++ b/packages/server/marketplaces/chatflows/ReAct Agent.json @@ -1,5 +1,5 @@ { - "description": "An agent that uses the React Framework to decide what action to take", + "description": "An agent that uses ReAct logic to decide what action to take", "nodes": [ { "width": 300, @@ -13,8 +13,8 @@ "data": { "id": "calculator_1", "label": "Calculator", - "name": "calculator", "version": 1, + "name": "calculator", "type": "Calculator", "baseClasses": ["Calculator", "Tool", "StructuredTool", "BaseLangChain"], "category": "Tools", @@ -40,62 +40,6 @@ "selected": false, "dragging": false }, - { - "width": 300, - "height": 280, - "id": "mrklAgentLLM_0", - "position": { - "x": 1055.3271135179489, - "y": 245.36098016819074 - }, - "type": "customNode", - "data": { - "id": "mrklAgentLLM_0", - "label": "MRKL Agent for LLMs", - "name": "mrklAgentLLM", - "version": 1, - "type": "AgentExecutor", - "baseClasses": ["AgentExecutor", "BaseChain", "BaseLangChain"], - "category": "Agents", - "description": "Agent that uses the ReAct Framework to decide what action to take, optimized to be used with LLMs", - "inputParams": [], - "inputAnchors": [ - { - "label": "Allowed Tools", - "name": "tools", - "type": "Tool", - "list": true, - "id": "mrklAgentLLM_0-input-tools-Tool" - }, - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "mrklAgentLLM_0-input-model-BaseLanguageModel" - } - ], - "inputs": { - "tools": ["{{calculator_1.data.instance}}", "{{serper_0.data.instance}}"], - "model": "{{chatOpenAI_0.data.instance}}" - }, - "outputAnchors": [ - { - "id": "mrklAgentLLM_0-output-mrklAgentLLM-AgentExecutor|BaseChain|BaseLangChain", - "name": "mrklAgentLLM", - "label": "AgentExecutor", - "type": "AgentExecutor | BaseChain | BaseLangChain" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1055.3271135179489, - "y": 245.36098016819074 - }, - "dragging": false - }, { "width": 300, "height": 277, @@ -108,8 +52,8 @@ "data": { "id": "serper_0", "label": "Serper", - "name": "serper", "version": 1, + "name": "serper", "type": "Serper", "baseClasses": ["Serper", "Tool", "StructuredTool"], "category": "Tools", @@ -145,20 +89,20 @@ }, { "width": 300, - "height": 523, + "height": 574, "id": "chatOpenAI_0", "position": { - "x": 333.58931284721206, - "y": 416.98420974875927 + "x": -27.71074046118335, + "y": 243.62715178281059 }, "type": "customNode", "data": { "id": "chatOpenAI_0", "label": "ChatOpenAI", - "name": "chatOpenAI", "version": 2, + "name": "chatOpenAI", "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], "category": "Chat Models", "description": "Wrapper around OpenAI large language models that use the Chat endpoint", "inputParams": [ @@ -178,6 +122,14 @@ "label": "gpt-4", "name": "gpt-4" }, + { + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, + { + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" + }, { "label": "gpt-4-0613", "name": "gpt-4-0613" @@ -194,6 +146,10 @@ "label": "gpt-3.5-turbo", "name": "gpt-3.5-turbo" }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, { "label": "gpt-3.5-turbo-0613", "name": "gpt-3.5-turbo-0613" @@ -215,6 +171,7 @@ "label": "Temperature", "name": "temperature", "type": "number", + "step": 0.1, "default": 0.9, "optional": true, "id": "chatOpenAI_0-input-temperature-number" @@ -223,6 +180,7 @@ "label": "Max Tokens", "name": "maxTokens", "type": "number", + "step": 1, "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-maxTokens-number" @@ -231,6 +189,7 @@ "label": "Top Probability", "name": "topP", "type": "number", + "step": 0.1, "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-topP-number" @@ -239,6 +198,7 @@ "label": "Frequency Penalty", "name": "frequencyPenalty", "type": "number", + "step": 0.1, "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-frequencyPenalty-number" @@ -247,6 +207,7 @@ "label": "Presence Penalty", "name": "presencePenalty", "type": "number", + "step": 0.1, "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-presencePenalty-number" @@ -255,6 +216,7 @@ "label": "Timeout", "name": "timeout", "type": "number", + "step": 1, "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-timeout-number" @@ -266,6 +228,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" } ], "inputAnchors": [ @@ -278,6 +248,7 @@ } ], "inputs": { + "cache": "", "modelName": "gpt-3.5-turbo", "temperature": 0.9, "maxTokens": "", @@ -285,14 +256,15 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", "name": "chatOpenAI", "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel" + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" } ], "outputs": {}, @@ -300,20 +272,75 @@ }, "selected": false, "positionAbsolute": { - "x": 333.58931284721206, - "y": 416.98420974875927 + "x": -27.71074046118335, + "y": 243.62715178281059 }, "dragging": false + }, + { + "width": 300, + "height": 280, + "id": "mrklAgentChat_0", + "position": { + "x": 1090.2058867451212, + "y": 423.2174695788541 + }, + "type": "customNode", + "data": { + "id": "mrklAgentChat_0", + "label": "ReAct Agent for Chat Models", + "version": 1, + "name": "mrklAgentChat", + "type": "AgentExecutor", + "baseClasses": ["AgentExecutor", "BaseChain", "Runnable"], + "category": "Agents", + "description": "Agent that uses the ReAct logic to decide what action to take, optimized to be used with Chat Models", + "inputParams": [], + "inputAnchors": [ + { + "label": "Allowed Tools", + "name": "tools", + "type": "Tool", + "list": true, + "id": "mrklAgentChat_0-input-tools-Tool" + }, + { + "label": "Language Model", + "name": "model", + "type": "BaseLanguageModel", + "id": "mrklAgentChat_0-input-model-BaseLanguageModel" + } + ], + "inputs": { + "tools": ["{{calculator_1.data.instance}}", "{{serper_0.data.instance}}"], + "model": "{{chatOpenAI_0.data.instance}}" + }, + "outputAnchors": [ + { + "id": "mrklAgentChat_0-output-mrklAgentChat-AgentExecutor|BaseChain|Runnable", + "name": "mrklAgentChat", + "label": "AgentExecutor", + "type": "AgentExecutor | BaseChain | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "positionAbsolute": { + "x": 1090.2058867451212, + "y": 423.2174695788541 + }, + "selected": false } ], "edges": [ { "source": "calculator_1", "sourceHandle": "calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain", - "target": "mrklAgentLLM_0", - "targetHandle": "mrklAgentLLM_0-input-tools-Tool", + "target": "mrklAgentChat_0", + "targetHandle": "mrklAgentChat_0-input-tools-Tool", "type": "buttonedge", - "id": "calculator_1-calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain-mrklAgentLLM_0-mrklAgentLLM_0-input-tools-Tool", + "id": "calculator_1-calculator_1-output-calculator-Calculator|Tool|StructuredTool|BaseLangChain-mrklAgentChat_0-mrklAgentChat_0-input-tools-Tool", "data": { "label": "" } @@ -321,21 +348,21 @@ { "source": "serper_0", "sourceHandle": "serper_0-output-serper-Serper|Tool|StructuredTool", - "target": "mrklAgentLLM_0", - "targetHandle": "mrklAgentLLM_0-input-tools-Tool", + "target": "mrklAgentChat_0", + "targetHandle": "mrklAgentChat_0-input-tools-Tool", "type": "buttonedge", - "id": "serper_0-serper_0-output-serper-Serper|Tool|StructuredTool-mrklAgentLLM_0-mrklAgentLLM_0-input-tools-Tool", + "id": "serper_0-serper_0-output-serper-Serper|Tool|StructuredTool-mrklAgentChat_0-mrklAgentChat_0-input-tools-Tool", "data": { "label": "" } }, { "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "target": "mrklAgentLLM_0", - "targetHandle": "mrklAgentLLM_0-input-model-BaseLanguageModel", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "target": "mrklAgentChat_0", + "targetHandle": "mrklAgentChat_0-input-model-BaseLanguageModel", "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-mrklAgentLLM_0-mrklAgentLLM_0-input-model-BaseLanguageModel", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-mrklAgentChat_0-mrklAgentChat_0-input-model-BaseLanguageModel", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/SQL DB Chain.json b/packages/server/marketplaces/chatflows/SQL DB Chain.json index 3b32efe097c..026a03d8a02 100644 --- a/packages/server/marketplaces/chatflows/SQL DB Chain.json +++ b/packages/server/marketplaces/chatflows/SQL DB Chain.json @@ -124,6 +124,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" } ], "inputAnchors": [ @@ -143,7 +151,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { diff --git a/packages/server/marketplaces/chatflows/Simple Conversation Chain.json b/packages/server/marketplaces/chatflows/Simple Conversation Chain.json index f5fac38e9a5..2dac382345e 100644 --- a/packages/server/marketplaces/chatflows/Simple Conversation Chain.json +++ b/packages/server/marketplaces/chatflows/Simple Conversation Chain.json @@ -182,6 +182,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" } ], "inputAnchors": [ @@ -201,7 +209,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { diff --git a/packages/server/marketplaces/chatflows/Translator.json b/packages/server/marketplaces/chatflows/Translator.json index 7cbe5ac7a0a..f1fa0764ff1 100644 --- a/packages/server/marketplaces/chatflows/Translator.json +++ b/packages/server/marketplaces/chatflows/Translator.json @@ -193,6 +193,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" } ], "inputAnchors": [ @@ -212,7 +220,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { diff --git a/packages/server/marketplaces/chatflows/Vectara LLM Chain Upload.json b/packages/server/marketplaces/chatflows/Vectara LLM Chain Upload.json index 5fc540b0561..4f35bd4cc32 100644 --- a/packages/server/marketplaces/chatflows/Vectara LLM Chain Upload.json +++ b/packages/server/marketplaces/chatflows/Vectara LLM Chain Upload.json @@ -3,123 +3,12 @@ "nodes": [ { "width": 300, - "height": 524, - "id": "vectaraUpload_0", - "position": { "x": 219.0098475967174, "y": 189.74396248534583 }, - "type": "customNode", - "data": { - "id": "vectaraUpload_0", - "label": "Vectara Upload File", - "version": 1, - "name": "vectaraUpload", - "type": "Vectara", - "baseClasses": ["Vectara", "VectorStoreRetriever", "BaseRetriever"], - "category": "Vector Stores", - "description": "Upload files to Vectara", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["vectaraApi"], - "id": "vectaraUpload_0-input-credential-credential" - }, - { - "label": "File", - "name": "file", - "description": "File to upload to Vectara. Supported file types: https://docs.vectara.com/docs/api-reference/indexing-apis/file-upload/file-upload-filetypes", - "type": "file", - "id": "vectaraUpload_0-input-file-file" - }, - { - "label": "Metadata Filter", - "name": "filter", - "description": "Filter to apply to Vectara metadata. Refer to the documentation on how to use Vectara filters with Flowise.", - "type": "string", - "additionalParams": true, - "optional": true, - "id": "vectaraUpload_0-input-filter-string" - }, - { - "label": "Sentences Before", - "name": "sentencesBefore", - "description": "Number of sentences to fetch before the matched sentence. Defaults to 2.", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "vectaraUpload_0-input-sentencesBefore-number" - }, - { - "label": "Sentences After", - "name": "sentencesAfter", - "description": "Number of sentences to fetch after the matched sentence. Defaults to 2.", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "vectaraUpload_0-input-sentencesAfter-number" - }, - { - "label": "Lambda", - "name": "lambda", - "description": "Improves retrieval accuracy by adjusting the balance (from 0 to 1) between neural search and keyword-based search factors.", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "vectaraUpload_0-input-lambda-number" - }, - { - "label": "Top K", - "name": "topK", - "description": "Number of top results to fetch. Defaults to 4", - "placeholder": "4", - "type": "number", - "additionalParams": true, - "optional": true, - "id": "vectaraUpload_0-input-topK-number" - } - ], - "inputAnchors": [], - "inputs": { - "filter": "", - "sentencesBefore": "", - "sentencesAfter": "", - "lambda": "", - "topK": "" - }, - "outputAnchors": [ - { - "name": "output", - "label": "Output", - "type": "options", - "options": [ - { - "id": "vectaraUpload_0-output-retriever-Vectara|VectorStoreRetriever|BaseRetriever", - "name": "retriever", - "label": "Vectara Retriever", - "type": "Vectara | VectorStoreRetriever | BaseRetriever" - }, - { - "id": "vectaraUpload_0-output-vectorStore-Vectara|VectorStore", - "name": "vectorStore", - "label": "Vectara Vector Store", - "type": "Vectara | VectorStore" - } - ], - "default": "retriever" - } - ], - "outputs": { "output": "retriever" }, - "selected": false - }, - "selected": false, - "positionAbsolute": { "x": 219.0098475967174, "y": 189.74396248534583 }, - "dragging": false - }, - { - "width": 300, - "height": 525, + "height": 574, "id": "chatOpenAI_0", - "position": { "x": 669.6533996522251, "y": 177.86181519287192 }, + "position": { + "x": 581.1784360612766, + "y": -229.3906666911439 + }, "type": "customNode", "data": { "id": "chatOpenAI_0", @@ -143,13 +32,34 @@ "name": "modelName", "type": "options", "options": [ - { "label": "gpt-4", "name": "gpt-4" }, - { "label": "gpt-4-0613", "name": "gpt-4-0613" }, - { "label": "gpt-4-32k", "name": "gpt-4-32k" }, - { "label": "gpt-4-32k-0613", "name": "gpt-4-32k-0613" }, - { "label": "gpt-3.5-turbo", "name": "gpt-3.5-turbo" }, - { "label": "gpt-3.5-turbo-0613", "name": "gpt-3.5-turbo-0613" }, - { "label": "gpt-3.5-turbo-16k", "name": "gpt-3.5-turbo-16k" }, + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-0613", + "name": "gpt-4-0613" + }, + { + "label": "gpt-4-32k", + "name": "gpt-4-32k" + }, + { + "label": "gpt-4-32k-0613", + "name": "gpt-4-32k-0613" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-0613", + "name": "gpt-3.5-turbo-0613" + }, + { + "label": "gpt-3.5-turbo-16k", + "name": "gpt-3.5-turbo-16k" + }, { "label": "gpt-3.5-turbo-16k-0613", "name": "gpt-3.5-turbo-16k-0613" @@ -241,7 +151,7 @@ ], "inputs": { "modelName": "gpt-3.5-turbo", - "temperature": "0.5", + "temperature": "0.6", "maxTokens": "", "topP": "", "frequencyPenalty": "", @@ -262,14 +172,20 @@ "selected": false }, "selected": false, - "positionAbsolute": { "x": 669.6533996522251, "y": 177.86181519287192 }, + "positionAbsolute": { + "x": 581.1784360612766, + "y": -229.3906666911439 + }, "dragging": false }, { "width": 300, - "height": 481, + "height": 480, "id": "conversationalRetrievalQAChain_0", - "position": { "x": 1135.5490908971935, "y": 201.62146241822506 }, + "position": { + "x": 979.9713511176517, + "y": 200.09513217589273 + }, "type": "customNode", "data": { "id": "conversationalRetrievalQAChain_0", @@ -348,7 +264,7 @@ ], "inputs": { "model": "{{chatOpenAI_0.data.instance}}", - "vectorStoreRetriever": "{{vectaraUpload_0.data.instance}}", + "vectorStoreRetriever": "{{vectara_0.data.instance}}", "memory": "", "returnSourceDocuments": true, "systemMessagePrompt": "", @@ -367,19 +283,146 @@ }, "selected": false, "dragging": false, - "positionAbsolute": { "x": 1135.5490908971935, "y": 201.62146241822506 } + "positionAbsolute": { + "x": 979.9713511176517, + "y": 200.09513217589273 + } + }, + { + "width": 300, + "height": 535, + "id": "vectara_0", + "position": { + "x": 199.28476672510158, + "y": 177.63260741741112 + }, + "type": "customNode", + "data": { + "id": "vectara_0", + "label": "Vectara", + "version": 1, + "name": "vectara", + "type": "Vectara", + "baseClasses": ["Vectara", "VectorStoreRetriever", "BaseRetriever"], + "category": "Vector Stores", + "description": "Upsert or Load data to Vectara Vector Database", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["vectaraApi"], + "id": "vectara_0-input-credential-credential" + }, + { + "label": "File", + "name": "file", + "description": "File to upload to Vectara. Supported file types: https://docs.vectara.com/docs/api-reference/indexing-apis/file-upload/file-upload-filetypes", + "type": "file", + "optional": true, + "id": "vectara_0-input-file-file" + }, + { + "label": "Metadata Filter", + "name": "filter", + "description": "Filter to apply to Vectara metadata. Refer to the documentation on how to use Vectara filters with Flowise.", + "type": "string", + "additionalParams": true, + "optional": true, + "id": "vectara_0-input-filter-string" + }, + { + "label": "Sentences Before", + "name": "sentencesBefore", + "description": "Number of sentences to fetch before the matched sentence. Defaults to 2.", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "vectara_0-input-sentencesBefore-number" + }, + { + "label": "Sentences After", + "name": "sentencesAfter", + "description": "Number of sentences to fetch after the matched sentence. Defaults to 2.", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "vectara_0-input-sentencesAfter-number" + }, + { + "label": "Lambda", + "name": "lambda", + "description": "Improves retrieval accuracy by adjusting the balance (from 0 to 1) between neural search and keyword-based search factors.", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "vectara_0-input-lambda-number" + }, + { + "label": "Top K", + "name": "topK", + "description": "Number of top results to fetch. Defaults to 4", + "placeholder": "4", + "type": "number", + "additionalParams": true, + "optional": true, + "id": "vectara_0-input-topK-number" + } + ], + "inputAnchors": [ + { + "label": "Document", + "name": "document", + "type": "Document", + "list": true, + "optional": true, + "id": "vectara_0-input-document-Document" + } + ], + "inputs": { + "document": "", + "filter": "", + "sentencesBefore": "", + "sentencesAfter": "", + "lambda": "", + "topK": "" + }, + "outputAnchors": [ + { + "name": "output", + "label": "Output", + "type": "options", + "options": [ + { + "id": "vectara_0-output-retriever-Vectara|VectorStoreRetriever|BaseRetriever", + "name": "retriever", + "label": "Vectara Retriever", + "type": "Vectara | VectorStoreRetriever | BaseRetriever" + }, + { + "id": "vectara_0-output-vectorStore-Vectara|VectorStore", + "name": "vectorStore", + "label": "Vectara Vector Store", + "type": "Vectara | VectorStore" + } + ], + "default": "retriever" + } + ], + "outputs": { + "output": "retriever" + }, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 199.28476672510158, + "y": 177.63260741741112 + }, + "dragging": false } ], "edges": [ - { - "source": "vectaraUpload_0", - "sourceHandle": "vectaraUpload_0-output-retriever-Vectara|VectorStoreRetriever|BaseRetriever", - "target": "conversationalRetrievalQAChain_0", - "targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", - "type": "buttonedge", - "id": "vectaraUpload_0-vectaraUpload_0-output-retriever-Vectara|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", - "data": { "label": "" } - }, { "source": "chatOpenAI_0", "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", @@ -387,7 +430,20 @@ "targetHandle": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", "type": "buttonedge", "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", - "data": { "label": "" } + "data": { + "label": "" + } + }, + { + "source": "vectara_0", + "sourceHandle": "vectara_0-output-retriever-Vectara|VectorStoreRetriever|BaseRetriever", + "target": "conversationalRetrievalQAChain_0", + "targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "type": "buttonedge", + "id": "vectara_0-vectara_0-output-retriever-Vectara|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", + "data": { + "label": "" + } } ] } diff --git a/packages/server/marketplaces/chatflows/WebBrowser.json b/packages/server/marketplaces/chatflows/WebBrowser.json index b784f9ab734..2f6fb721878 100644 --- a/packages/server/marketplaces/chatflows/WebBrowser.json +++ b/packages/server/marketplaces/chatflows/WebBrowser.json @@ -311,6 +311,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" } ], "inputAnchors": [ @@ -330,7 +338,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { @@ -557,6 +566,14 @@ "optional": true, "additionalParams": true, "id": "chatOpenAI_1-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_1-input-baseOptions-json" } ], "inputAnchors": [ @@ -576,7 +593,8 @@ "frequencyPenalty": "", "presencePenalty": "", "timeout": "", - "basepath": "" + "basepath": "", + "baseOptions": "" }, "outputAnchors": [ { diff --git a/packages/server/marketplaces/chatflows/WebPage QnA.json b/packages/server/marketplaces/chatflows/WebPage QnA.json index c4bbc22d4cc..da05721b9f1 100644 --- a/packages/server/marketplaces/chatflows/WebPage QnA.json +++ b/packages/server/marketplaces/chatflows/WebPage QnA.json @@ -4,173 +4,11 @@ "nodes": [ { "width": 300, - "height": 523, - "id": "chatOpenAI_0", - "position": { - "x": 1542.965468159417, - "y": -200.10756989974368 - }, - "type": "customNode", - "data": { - "id": "chatOpenAI_0", - "label": "ChatOpenAI", - "version": 2, - "name": "chatOpenAI", - "type": "ChatOpenAI", - "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel"], - "category": "Chat Models", - "description": "Wrapper around OpenAI large language models that use the Chat endpoint", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "chatOpenAI_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "options", - "options": [ - { - "label": "gpt-4", - "name": "gpt-4" - }, - { - "label": "gpt-4-0613", - "name": "gpt-4-0613" - }, - { - "label": "gpt-4-32k", - "name": "gpt-4-32k" - }, - { - "label": "gpt-4-32k-0613", - "name": "gpt-4-32k-0613" - }, - { - "label": "gpt-3.5-turbo", - "name": "gpt-3.5-turbo" - }, - { - "label": "gpt-3.5-turbo-0613", - "name": "gpt-3.5-turbo-0613" - }, - { - "label": "gpt-3.5-turbo-16k", - "name": "gpt-3.5-turbo-16k" - }, - { - "label": "gpt-3.5-turbo-16k-0613", - "name": "gpt-3.5-turbo-16k-0613" - } - ], - "default": "gpt-3.5-turbo", - "optional": true, - "id": "chatOpenAI_0-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "default": 0.9, - "optional": true, - "id": "chatOpenAI_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-topP-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-presencePenalty-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "chatOpenAI_0-input-basepath-string" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "chatOpenAI_0-input-cache-BaseCache" - } - ], - "inputs": { - "modelName": "gpt-3.5-turbo-16k", - "temperature": "0.9", - "maxTokens": "", - "topP": "", - "frequencyPenalty": "", - "presencePenalty": "", - "timeout": "", - "basepath": "" - }, - "outputAnchors": [ - { - "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", - "name": "chatOpenAI", - "label": "ChatOpenAI", - "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel" - } - ], - "outputs": {}, - "selected": false - }, - "positionAbsolute": { - "x": 1542.965468159417, - "y": -200.10756989974368 - }, - "selected": false, - "dragging": false - }, - { - "width": 300, - "height": 328, + "height": 329, "id": "openAIEmbeddings_0", "position": { - "x": 827.6835380475393, - "y": 253.8955254525015 + "x": 825.9524798523752, + "y": 243.50917628151723 }, "type": "customNode", "data": { @@ -243,8 +81,8 @@ }, "selected": false, "positionAbsolute": { - "x": 827.6835380475393, - "y": 253.8955254525015 + "x": 825.9524798523752, + "y": 243.50917628151723 }, "dragging": false }, @@ -314,7 +152,7 @@ }, { "width": 300, - "height": 479, + "height": 480, "id": "conversationalRetrievalQAChain_0", "position": { "x": 1882.5543981868987, @@ -398,8 +236,8 @@ ], "inputs": { "model": "{{chatOpenAI_0.data.instance}}", - "vectorStoreRetriever": "{{pineconeUpsert_0.data.instance}}", - "memory": "{{motorheadMemory_0.data.instance}}", + "vectorStoreRetriever": "{{pinecone_0.data.instance}}", + "memory": "{{RedisBackedChatMemory_0.data.instance}}", "returnSourceDocuments": true, "systemMessagePrompt": "I want you to act as a document that I am having a conversation with. Your name is \"AI Assistant\". You will provide me with answers from the given context. If the answer is not included, say exactly \"Hmm, I am not sure.\" and stop after that. Do not make up any information that is not in the context. Refuse to answer any question not about the info. Never break character.", "chainOption": "" @@ -427,8 +265,8 @@ "height": 380, "id": "cheerioWebScraper_0", "position": { - "x": 831.9867292136466, - "y": -181.92350323746112 + "x": 825.0624964329904, + "y": -183.65456143262517 }, "type": "customNode", "data": { @@ -525,42 +363,311 @@ }, "selected": false, "positionAbsolute": { - "x": 831.9867292136466, - "y": -181.92350323746112 + "x": 825.0624964329904, + "y": -183.65456143262517 + }, + "dragging": false + }, + { + "width": 300, + "height": 574, + "id": "chatOpenAI_0", + "position": { + "x": 1530.2074695018944, + "y": -247.5543013399219 + }, + "type": "customNode", + "data": { + "id": "chatOpenAI_0", + "label": "ChatOpenAI", + "version": 2, + "name": "chatOpenAI", + "type": "ChatOpenAI", + "baseClasses": ["ChatOpenAI", "BaseChatModel", "BaseLanguageModel", "Runnable"], + "category": "Chat Models", + "description": "Wrapper around OpenAI large language models that use the Chat endpoint", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "credentialNames": ["openAIApi"], + "id": "chatOpenAI_0-input-credential-credential" + }, + { + "label": "Model Name", + "name": "modelName", + "type": "options", + "options": [ + { + "label": "gpt-4", + "name": "gpt-4" + }, + { + "label": "gpt-4-1106-preview", + "name": "gpt-4-1106-preview" + }, + { + "label": "gpt-4-vision-preview", + "name": "gpt-4-vision-preview" + }, + { + "label": "gpt-4-0613", + "name": "gpt-4-0613" + }, + { + "label": "gpt-4-32k", + "name": "gpt-4-32k" + }, + { + "label": "gpt-4-32k-0613", + "name": "gpt-4-32k-0613" + }, + { + "label": "gpt-3.5-turbo", + "name": "gpt-3.5-turbo" + }, + { + "label": "gpt-3.5-turbo-1106", + "name": "gpt-3.5-turbo-1106" + }, + { + "label": "gpt-3.5-turbo-0613", + "name": "gpt-3.5-turbo-0613" + }, + { + "label": "gpt-3.5-turbo-16k", + "name": "gpt-3.5-turbo-16k" + }, + { + "label": "gpt-3.5-turbo-16k-0613", + "name": "gpt-3.5-turbo-16k-0613" + } + ], + "default": "gpt-3.5-turbo", + "optional": true, + "id": "chatOpenAI_0-input-modelName-options" + }, + { + "label": "Temperature", + "name": "temperature", + "type": "number", + "step": 0.1, + "default": 0.9, + "optional": true, + "id": "chatOpenAI_0-input-temperature-number" + }, + { + "label": "Max Tokens", + "name": "maxTokens", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-maxTokens-number" + }, + { + "label": "Top Probability", + "name": "topP", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-topP-number" + }, + { + "label": "Frequency Penalty", + "name": "frequencyPenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-frequencyPenalty-number" + }, + { + "label": "Presence Penalty", + "name": "presencePenalty", + "type": "number", + "step": 0.1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-presencePenalty-number" + }, + { + "label": "Timeout", + "name": "timeout", + "type": "number", + "step": 1, + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-timeout-number" + }, + { + "label": "BasePath", + "name": "basepath", + "type": "string", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-basepath-string" + }, + { + "label": "BaseOptions", + "name": "baseOptions", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "chatOpenAI_0-input-baseOptions-json" + } + ], + "inputAnchors": [ + { + "label": "Cache", + "name": "cache", + "type": "BaseCache", + "optional": true, + "id": "chatOpenAI_0-input-cache-BaseCache" + } + ], + "inputs": { + "cache": "", + "modelName": "gpt-3.5-turbo", + "temperature": 0.9, + "maxTokens": "", + "topP": "", + "frequencyPenalty": "", + "presencePenalty": "", + "timeout": "", + "basepath": "", + "baseOptions": "" + }, + "outputAnchors": [ + { + "id": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", + "name": "chatOpenAI", + "label": "ChatOpenAI", + "type": "ChatOpenAI | BaseChatModel | BaseLanguageModel | Runnable" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1530.2074695018944, + "y": -247.5543013399219 + }, + "dragging": false + }, + { + "width": 300, + "height": 329, + "id": "RedisBackedChatMemory_0", + "position": { + "x": 1203.0374706158896, + "y": 420.6341619933999 + }, + "type": "customNode", + "data": { + "id": "RedisBackedChatMemory_0", + "label": "Redis-Backed Chat Memory", + "version": 2, + "name": "RedisBackedChatMemory", + "type": "RedisBackedChatMemory", + "baseClasses": ["RedisBackedChatMemory", "BaseChatMemory", "BaseMemory"], + "category": "Memory", + "description": "Summarizes the conversation and stores the memory in Redis server", + "inputParams": [ + { + "label": "Connect Credential", + "name": "credential", + "type": "credential", + "optional": true, + "credentialNames": ["redisCacheApi", "redisCacheUrlApi"], + "id": "RedisBackedChatMemory_0-input-credential-credential" + }, + { + "label": "Session Id", + "name": "sessionId", + "type": "string", + "description": "If not specified, the first CHAT_MESSAGE_ID will be used as sessionId", + "default": "", + "additionalParams": true, + "optional": true, + "id": "RedisBackedChatMemory_0-input-sessionId-string" + }, + { + "label": "Session Timeouts", + "name": "sessionTTL", + "type": "number", + "description": "Omit this parameter to make sessions never expire", + "additionalParams": true, + "optional": true, + "id": "RedisBackedChatMemory_0-input-sessionTTL-number" + }, + { + "label": "Memory Key", + "name": "memoryKey", + "type": "string", + "default": "chat_history", + "additionalParams": true, + "id": "RedisBackedChatMemory_0-input-memoryKey-string" + } + ], + "inputAnchors": [], + "inputs": { + "sessionId": "", + "sessionTTL": "", + "memoryKey": "chat_history" + }, + "outputAnchors": [ + { + "id": "RedisBackedChatMemory_0-output-RedisBackedChatMemory-RedisBackedChatMemory|BaseChatMemory|BaseMemory", + "name": "RedisBackedChatMemory", + "label": "RedisBackedChatMemory", + "type": "RedisBackedChatMemory | BaseChatMemory | BaseMemory" + } + ], + "outputs": {}, + "selected": false + }, + "selected": false, + "positionAbsolute": { + "x": 1203.0374706158896, + "y": 420.6341619933999 }, "dragging": false }, { "width": 300, "height": 555, - "id": "pineconeUpsert_0", + "id": "pinecone_0", "position": { - "x": 1179.6228496246993, - "y": -167.023255532671 + "x": 1194.3821796400694, + "y": -162.7324497768837 }, "type": "customNode", "data": { - "id": "pineconeUpsert_0", - "label": "Pinecone Upsert Document", + "id": "pinecone_0", + "label": "Pinecone", "version": 1, - "name": "pineconeUpsert", + "name": "pinecone", "type": "Pinecone", "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Upsert documents to Pinecone", + "description": "Upsert or Load data to Pinecone Vector Database", "inputParams": [ { "label": "Connect Credential", "name": "credential", "type": "credential", "credentialNames": ["pineconeApi"], - "id": "pineconeUpsert_0-input-credential-credential" + "id": "pinecone_0-input-credential-credential" }, { "label": "Pinecone Index", "name": "pineconeIndex", "type": "string", - "id": "pineconeUpsert_0-input-pineconeIndex-string" + "id": "pinecone_0-input-pineconeIndex-string" }, { "label": "Pinecone Namespace", @@ -569,7 +676,15 @@ "placeholder": "my-first-namespace", "additionalParams": true, "optional": true, - "id": "pineconeUpsert_0-input-pineconeNamespace-string" + "id": "pinecone_0-input-pineconeNamespace-string" + }, + { + "label": "Pinecone Metadata Filter", + "name": "pineconeMetadataFilter", + "type": "json", + "optional": true, + "additionalParams": true, + "id": "pinecone_0-input-pineconeMetadataFilter-json" }, { "label": "Top K", @@ -579,7 +694,7 @@ "type": "number", "additionalParams": true, "optional": true, - "id": "pineconeUpsert_0-input-topK-number" + "id": "pinecone_0-input-topK-number" } ], "inputAnchors": [ @@ -588,13 +703,14 @@ "name": "document", "type": "Document", "list": true, - "id": "pineconeUpsert_0-input-document-Document" + "optional": true, + "id": "pinecone_0-input-document-Document" }, { "label": "Embeddings", "name": "embeddings", "type": "Embeddings", - "id": "pineconeUpsert_0-input-embeddings-Embeddings" + "id": "pinecone_0-input-embeddings-Embeddings" } ], "inputs": { @@ -602,6 +718,7 @@ "embeddings": "{{openAIEmbeddings_0.data.instance}}", "pineconeIndex": "", "pineconeNamespace": "", + "pineconeMetadataFilter": "", "topK": "" }, "outputAnchors": [ @@ -611,13 +728,13 @@ "type": "options", "options": [ { - "id": "pineconeUpsert_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "id": "pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", "name": "retriever", "label": "Pinecone Retriever", "type": "Pinecone | VectorStoreRetriever | BaseRetriever" }, { - "id": "pineconeUpsert_0-output-vectorStore-Pinecone|VectorStore", + "id": "pinecone_0-output-vectorStore-Pinecone|VectorStore", "name": "vectorStore", "label": "Pinecone Vector Store", "type": "Pinecone | VectorStore" @@ -633,110 +750,42 @@ }, "selected": false, "positionAbsolute": { - "x": 1179.6228496246993, - "y": -167.023255532671 - }, - "dragging": false - }, - { - "width": 300, - "height": 427, - "id": "motorheadMemory_0", - "position": { - "x": 1202.1545938923578, - "y": 425.69055061366237 - }, - "type": "customNode", - "data": { - "id": "motorheadMemory_0", - "label": "Motorhead Memory", - "version": 1, - "name": "motorheadMemory", - "type": "MotorheadMemory", - "baseClasses": ["MotorheadMemory", "BaseChatMemory", "BaseMemory"], - "category": "Memory", - "description": "Use Motorhead Memory to store chat conversations", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "optional": true, - "description": "Only needed when using hosted solution - https://getmetal.io", - "credentialNames": ["motorheadMemoryApi"], - "id": "motorheadMemory_0-input-credential-credential" - }, - { - "label": "Base URL", - "name": "baseURL", - "type": "string", - "optional": true, - "description": "To use the online version, leave the URL blank. More details at https://getmetal.io.", - "id": "motorheadMemory_0-input-baseURL-string" - }, - { - "label": "Session Id", - "name": "sessionId", - "type": "string", - "description": "If not specified, the first CHAT_MESSAGE_ID will be used as sessionId", - "default": "", - "additionalParams": true, - "optional": true, - "id": "motorheadMemory_0-input-sessionId-string" - }, - { - "label": "Memory Key", - "name": "memoryKey", - "type": "string", - "default": "chat_history", - "additionalParams": true, - "id": "motorheadMemory_0-input-memoryKey-string" - } - ], - "inputAnchors": [], - "inputs": { - "baseURL": "", - "sessionId": "", - "memoryKey": "chat_history" - }, - "outputAnchors": [ - { - "id": "motorheadMemory_0-output-motorheadMemory-MotorheadMemory|BaseChatMemory|BaseMemory", - "name": "motorheadMemory", - "label": "MotorheadMemory", - "type": "MotorheadMemory | BaseChatMemory | BaseMemory" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 1202.1545938923578, - "y": 425.69055061366237 + "x": 1194.3821796400694, + "y": -162.7324497768837 }, "dragging": false } ], "edges": [ + { + "source": "htmlToMarkdownTextSplitter_0", + "sourceHandle": "htmlToMarkdownTextSplitter_0-output-htmlToMarkdownTextSplitter-HtmlToMarkdownTextSplitter|MarkdownTextSplitter|RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer", + "target": "cheerioWebScraper_0", + "targetHandle": "cheerioWebScraper_0-input-textSplitter-TextSplitter", + "type": "buttonedge", + "id": "htmlToMarkdownTextSplitter_0-htmlToMarkdownTextSplitter_0-output-htmlToMarkdownTextSplitter-HtmlToMarkdownTextSplitter|MarkdownTextSplitter|RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer-cheerioWebScraper_0-cheerioWebScraper_0-input-textSplitter-TextSplitter", + "data": { + "label": "" + } + }, { "source": "chatOpenAI_0", - "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel", + "sourceHandle": "chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable", "target": "conversationalRetrievalQAChain_0", "targetHandle": "conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", "type": "buttonedge", - "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", + "id": "chatOpenAI_0-chatOpenAI_0-output-chatOpenAI-ChatOpenAI|BaseChatModel|BaseLanguageModel|Runnable-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-model-BaseLanguageModel", "data": { "label": "" } }, { - "source": "htmlToMarkdownTextSplitter_0", - "sourceHandle": "htmlToMarkdownTextSplitter_0-output-htmlToMarkdownTextSplitter-HtmlToMarkdownTextSplitter|MarkdownTextSplitter|RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer", - "target": "cheerioWebScraper_0", - "targetHandle": "cheerioWebScraper_0-input-textSplitter-TextSplitter", + "source": "RedisBackedChatMemory_0", + "sourceHandle": "RedisBackedChatMemory_0-output-RedisBackedChatMemory-RedisBackedChatMemory|BaseChatMemory|BaseMemory", + "target": "conversationalRetrievalQAChain_0", + "targetHandle": "conversationalRetrievalQAChain_0-input-memory-BaseMemory", "type": "buttonedge", - "id": "htmlToMarkdownTextSplitter_0-htmlToMarkdownTextSplitter_0-output-htmlToMarkdownTextSplitter-HtmlToMarkdownTextSplitter|MarkdownTextSplitter|RecursiveCharacterTextSplitter|TextSplitter|BaseDocumentTransformer-cheerioWebScraper_0-cheerioWebScraper_0-input-textSplitter-TextSplitter", + "id": "RedisBackedChatMemory_0-RedisBackedChatMemory_0-output-RedisBackedChatMemory-RedisBackedChatMemory|BaseChatMemory|BaseMemory-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-memory-BaseMemory", "data": { "label": "" } @@ -744,10 +793,10 @@ { "source": "cheerioWebScraper_0", "sourceHandle": "cheerioWebScraper_0-output-cheerioWebScraper-Document", - "target": "pineconeUpsert_0", - "targetHandle": "pineconeUpsert_0-input-document-Document", + "target": "pinecone_0", + "targetHandle": "pinecone_0-input-document-Document", "type": "buttonedge", - "id": "cheerioWebScraper_0-cheerioWebScraper_0-output-cheerioWebScraper-Document-pineconeUpsert_0-pineconeUpsert_0-input-document-Document", + "id": "cheerioWebScraper_0-cheerioWebScraper_0-output-cheerioWebScraper-Document-pinecone_0-pinecone_0-input-document-Document", "data": { "label": "" } @@ -755,32 +804,21 @@ { "source": "openAIEmbeddings_0", "sourceHandle": "openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings", - "target": "pineconeUpsert_0", - "targetHandle": "pineconeUpsert_0-input-embeddings-Embeddings", + "target": "pinecone_0", + "targetHandle": "pinecone_0-input-embeddings-Embeddings", "type": "buttonedge", - "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pineconeUpsert_0-pineconeUpsert_0-input-embeddings-Embeddings", + "id": "openAIEmbeddings_0-openAIEmbeddings_0-output-openAIEmbeddings-OpenAIEmbeddings|Embeddings-pinecone_0-pinecone_0-input-embeddings-Embeddings", "data": { "label": "" } }, { - "source": "pineconeUpsert_0", - "sourceHandle": "pineconeUpsert_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", + "source": "pinecone_0", + "sourceHandle": "pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever", "target": "conversationalRetrievalQAChain_0", "targetHandle": "conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", "type": "buttonedge", - "id": "pineconeUpsert_0-pineconeUpsert_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", - "data": { - "label": "" - } - }, - { - "source": "motorheadMemory_0", - "sourceHandle": "motorheadMemory_0-output-motorheadMemory-MotorheadMemory|BaseChatMemory|BaseMemory", - "target": "conversationalRetrievalQAChain_0", - "targetHandle": "conversationalRetrievalQAChain_0-input-memory-BaseMemory", - "type": "buttonedge", - "id": "motorheadMemory_0-motorheadMemory_0-output-motorheadMemory-MotorheadMemory|BaseChatMemory|BaseMemory-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-memory-BaseMemory", + "id": "pinecone_0-pinecone_0-output-retriever-Pinecone|VectorStoreRetriever|BaseRetriever-conversationalRetrievalQAChain_0-conversationalRetrievalQAChain_0-input-vectorStoreRetriever-BaseRetriever", "data": { "label": "" } diff --git a/packages/server/marketplaces/chatflows/Zapier NLA.json b/packages/server/marketplaces/chatflows/Zapier NLA.json deleted file mode 100644 index 49527da2414..00000000000 --- a/packages/server/marketplaces/chatflows/Zapier NLA.json +++ /dev/null @@ -1,290 +0,0 @@ -{ - "description": "An agent that uses Zapier NLA to accesss apps and actions on Zapier's platform", - "nodes": [ - { - "width": 300, - "height": 278, - "id": "zapierNLA_0", - "position": { - "x": 546.0561178227484, - "y": 83.03303671691799 - }, - "type": "customNode", - "data": { - "id": "zapierNLA_0", - "label": "Zapier NLA", - "name": "zapierNLA", - "version": 1, - "type": "ZapierNLA", - "baseClasses": ["ZapierNLA", "Tool"], - "category": "Tools", - "description": "Access to apps and actions on Zapier's platform through a natural language API interface", - "inputParams": [ - { - "label": "Zapier NLA Api Key", - "name": "apiKey", - "type": "password", - "id": "zapierNLA_0-input-apiKey-password" - } - ], - "inputAnchors": [], - "inputs": {}, - "outputAnchors": [ - { - "id": "zapierNLA_0-output-zapierNLA-ZapierNLA|Tool", - "name": "zapierNLA", - "label": "ZapierNLA", - "type": "ZapierNLA | Tool" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 546.0561178227484, - "y": 83.03303671691799 - }, - "dragging": false - }, - { - "width": 300, - "height": 280, - "id": "mrklAgentLLM_0", - "position": { - "x": 1002.5779315680477, - "y": 329.9701389591812 - }, - "type": "customNode", - "data": { - "id": "mrklAgentLLM_0", - "label": "MRKL Agent for LLMs", - "name": "mrklAgentLLM", - "version": 1, - "type": "AgentExecutor", - "baseClasses": ["AgentExecutor", "BaseChain", "BaseLangChain"], - "category": "Agents", - "description": "Agent that uses the ReAct Framework to decide what action to take, optimized to be used with LLMs", - "inputParams": [], - "inputAnchors": [ - { - "label": "Allowed Tools", - "name": "tools", - "type": "Tool", - "list": true, - "id": "mrklAgentLLM_0-input-tools-Tool" - }, - { - "label": "Language Model", - "name": "model", - "type": "BaseLanguageModel", - "id": "mrklAgentLLM_0-input-model-BaseLanguageModel" - } - ], - "inputs": { - "tools": ["{{zapierNLA_0.data.instance}}"], - "model": "{{openAI_0.data.instance}}" - }, - "outputAnchors": [ - { - "id": "mrklAgentLLM_0-output-mrklAgentLLM-AgentExecutor|BaseChain|BaseLangChain", - "name": "mrklAgentLLM", - "label": "AgentExecutor", - "type": "AgentExecutor | BaseChain | BaseLangChain" - } - ], - "outputs": {}, - "selected": false - }, - "positionAbsolute": { - "x": 1002.5779315680477, - "y": 329.9701389591812 - }, - "selected": false - }, - { - "width": 300, - "height": 523, - "id": "openAI_0", - "position": { - "x": 550.5957793208096, - "y": 378.30370661617934 - }, - "type": "customNode", - "data": { - "id": "openAI_0", - "label": "OpenAI", - "name": "openAI", - "version": 3, - "type": "OpenAI", - "baseClasses": ["OpenAI", "BaseLLM", "BaseLanguageModel"], - "category": "LLMs", - "description": "Wrapper around OpenAI large language models", - "inputParams": [ - { - "label": "Connect Credential", - "name": "credential", - "type": "credential", - "credentialNames": ["openAIApi"], - "id": "openAI_0-input-credential-credential" - }, - { - "label": "Model Name", - "name": "modelName", - "type": "options", - "options": [ - { - "label": "gpt-3.5-turbo-instruct", - "name": "gpt-3.5-turbo-instruct" - }, - { - "label": "babbage-002", - "name": "babbage-002" - }, - { - "label": "davinci-002", - "name": "davinci-002" - } - ], - "default": "gpt-3.5-turbo-instruct", - "optional": true, - "id": "openAI_0-input-modelName-options" - }, - { - "label": "Temperature", - "name": "temperature", - "type": "number", - "default": 0.7, - "optional": true, - "id": "openAI_0-input-temperature-number" - }, - { - "label": "Max Tokens", - "name": "maxTokens", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAI_0-input-maxTokens-number" - }, - { - "label": "Top Probability", - "name": "topP", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAI_0-input-topP-number" - }, - { - "label": "Best Of", - "name": "bestOf", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAI_0-input-bestOf-number" - }, - { - "label": "Frequency Penalty", - "name": "frequencyPenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAI_0-input-frequencyPenalty-number" - }, - { - "label": "Presence Penalty", - "name": "presencePenalty", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAI_0-input-presencePenalty-number" - }, - { - "label": "Batch Size", - "name": "batchSize", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAI_0-input-batchSize-number" - }, - { - "label": "Timeout", - "name": "timeout", - "type": "number", - "optional": true, - "additionalParams": true, - "id": "openAI_0-input-timeout-number" - }, - { - "label": "BasePath", - "name": "basepath", - "type": "string", - "optional": true, - "additionalParams": true, - "id": "openAI_0-input-basepath-string" - } - ], - "inputAnchors": [ - { - "label": "Cache", - "name": "cache", - "type": "BaseCache", - "optional": true, - "id": "openAI_0-input-cache-BaseCache" - } - ], - "inputs": { - "modelName": "gpt-3.5-turbo-instruct", - "temperature": 0.7, - "maxTokens": "", - "topP": "", - "bestOf": "", - "frequencyPenalty": "", - "presencePenalty": "", - "batchSize": "", - "timeout": "", - "basepath": "" - }, - "outputAnchors": [ - { - "id": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", - "name": "openAI", - "label": "OpenAI", - "type": "OpenAI | BaseLLM | BaseLanguageModel" - } - ], - "outputs": {}, - "selected": false - }, - "selected": false, - "positionAbsolute": { - "x": 550.5957793208096, - "y": 378.30370661617934 - }, - "dragging": false - } - ], - "edges": [ - { - "source": "zapierNLA_0", - "sourceHandle": "zapierNLA_0-output-zapierNLA-ZapierNLA|Tool", - "target": "mrklAgentLLM_0", - "targetHandle": "mrklAgentLLM_0-input-tools-Tool", - "type": "buttonedge", - "id": "zapierNLA_0-zapierNLA_0-output-zapierNLA-ZapierNLA|Tool-mrklAgentLLM_0-mrklAgentLLM_0-input-tools-Tool", - "data": { - "label": "" - } - }, - { - "source": "openAI_0", - "sourceHandle": "openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel", - "target": "mrklAgentLLM_0", - "targetHandle": "mrklAgentLLM_0-input-model-BaseLanguageModel", - "type": "buttonedge", - "id": "openAI_0-openAI_0-output-openAI-OpenAI|BaseLLM|BaseLanguageModel-mrklAgentLLM_0-mrklAgentLLM_0-input-model-BaseLanguageModel", - "data": { - "label": "" - } - } - ] -} diff --git a/packages/server/src/Interface.ts b/packages/server/src/Interface.ts index 8d0965f48b9..62137811d9b 100644 --- a/packages/server/src/Interface.ts +++ b/packages/server/src/Interface.ts @@ -165,6 +165,7 @@ export interface IncomingInput { overrideConfig?: ICommonObject socketIOClientId?: string chatId?: string + stopNodeId?: string } export interface IActiveChatflows { diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts index ba6c3ce0e8a..86dadd59d62 100644 --- a/packages/server/src/index.ts +++ b/packages/server/src/index.ts @@ -135,6 +135,7 @@ export class App { '/api/v1/chatflows/apikey/', '/api/v1/public-chatflows', '/api/v1/prediction/', + '/api/v1/vector/upsert/', '/api/v1/node-icon/', '/api/v1/components-credentials-icon/', '/api/v1/chatflows-streaming', @@ -1062,6 +1063,23 @@ export class App { return res.status(201).send('OK') }) + // ---------------------------------------- + // Upsert + // ---------------------------------------- + + this.app.post( + '/api/v1/vector/upsert/:id', + upload.array('files'), + (req: Request, res: Response, next: NextFunction) => getRateLimiter(req, res, next), + async (req: Request, res: Response) => { + await this.buildChatflow(req, res, undefined, false, true) + } + ) + + this.app.post('/api/v1/vector/internal-upsert/:id', async (req: Request, res: Response) => { + await this.buildChatflow(req, res, undefined, true, true) + }) + // ---------------------------------------- // Prediction // ---------------------------------------- @@ -1072,13 +1090,13 @@ export class App { upload.array('files'), (req: Request, res: Response, next: NextFunction) => getRateLimiter(req, res, next), async (req: Request, res: Response) => { - await this.processPrediction(req, res, socketIO) + await this.buildChatflow(req, res, socketIO) } ) // Send input message and get prediction result (Internal) this.app.post('/api/v1/internal-prediction/:id', async (req: Request, res: Response) => { - await this.processPrediction(req, res, socketIO, true) + await this.buildChatflow(req, res, socketIO, true) }) // ---------------------------------------- @@ -1284,13 +1302,14 @@ export class App { } /** - * Process Prediction + * Build Chatflow * @param {Request} req * @param {Response} res * @param {Server} socketIO * @param {boolean} isInternal + * @param {boolean} isUpsert */ - async processPrediction(req: Request, res: Response, socketIO?: Server, isInternal: boolean = false) { + async buildChatflow(req: Request, res: Response, socketIO?: Server, isInternal: boolean = false, isUpsert: boolean = false) { try { const chatflowid = req.params.id let incomingInput: IncomingInput = req.body @@ -1331,7 +1350,8 @@ export class App { question: req.body.question ?? 'hello', overrideConfig, history: [], - socketIOClientId: req.body.socketIOClientId + socketIOClientId: req.body.socketIOClientId, + stopNodeId: req.body.stopNodeId } } @@ -1356,7 +1376,8 @@ export class App { this.chatflowPool.activeChatflows[chatflowid].overrideConfig, incomingInput.overrideConfig ) && - !isStartNodeDependOnInput(this.chatflowPool.activeChatflows[chatflowid].startingNodes, nodes) + !isStartNodeDependOnInput(this.chatflowPool.activeChatflows[chatflowid].startingNodes, nodes) && + !isUpsert ) } @@ -1376,14 +1397,15 @@ export class App { const endingNodeData = nodes.find((nd) => nd.id === endingNodeId)?.data if (!endingNodeData) return res.status(500).send(`Ending node ${endingNodeId} data not found`) - if (endingNodeData && endingNodeData.category !== 'Chains' && endingNodeData.category !== 'Agents') { + if (endingNodeData && endingNodeData.category !== 'Chains' && endingNodeData.category !== 'Agents' && !isUpsert) { return res.status(500).send(`Ending node must be either a Chain or Agent`) } if ( endingNodeData.outputs && Object.keys(endingNodeData.outputs).length && - !Object.values(endingNodeData.outputs).includes(endingNodeData.name) + !Object.values(endingNodeData.outputs).includes(endingNodeData.name) && + !isUpsert ) { return res .status(500) @@ -1413,8 +1435,11 @@ export class App { chatflowid, this.AppDataSource, incomingInput?.overrideConfig, - this.cachePool + this.cachePool, + isUpsert, + incomingInput.stopNodeId ) + if (isUpsert) return res.status(201).send('Successfully Upserted') const nodeToExecute = reactFlowNodes.find((node: IReactFlowNode) => node.id === endingNodeId) if (!nodeToExecute) return res.status(404).send(`Node ${endingNodeId} not found`) diff --git a/packages/server/src/utils/index.ts b/packages/server/src/utils/index.ts index 239773a9a9b..aa911b8d21f 100644 --- a/packages/server/src/utils/index.ts +++ b/packages/server/src/utils/index.ts @@ -222,7 +222,9 @@ export const buildLangchain = async ( chatflowid: string, appDataSource: DataSource, overrideConfig?: ICommonObject, - cachePool?: CachePool + cachePool?: CachePool, + isUpsert?: boolean, + stopNodeId?: string ) => { const flowNodes = cloneDeep(reactFlowNodes) @@ -254,16 +256,33 @@ export const buildLangchain = async ( if (overrideConfig) flowNodeData = replaceInputsWithConfig(flowNodeData, overrideConfig) const reactFlowNodeData: INodeData = resolveVariables(flowNodeData, flowNodes, question, chatHistory) - logger.debug(`[server]: Initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) - flowNodes[nodeIndex].data.instance = await newNodeInstance.init(reactFlowNodeData, question, { - chatId, - chatflowid, - appDataSource, - databaseEntities, - logger, - cachePool - }) - logger.debug(`[server]: Finished initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) + if ( + isUpsert && + ((stopNodeId && reactFlowNodeData.id === stopNodeId) || (!stopNodeId && reactFlowNodeData.category === 'Vector Stores')) + ) { + logger.debug(`[server]: Upserting ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) + await newNodeInstance.vectorStoreMethods!['upsert']!.call(newNodeInstance, reactFlowNodeData, { + chatId, + chatflowid, + appDataSource, + databaseEntities, + logger, + cachePool + }) + logger.debug(`[server]: Finished upserting ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) + break + } else { + logger.debug(`[server]: Initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) + flowNodes[nodeIndex].data.instance = await newNodeInstance.init(reactFlowNodeData, question, { + chatId, + chatflowid, + appDataSource, + databaseEntities, + logger, + cachePool + }) + logger.debug(`[server]: Finished initializing ${reactFlowNode.data.label} (${reactFlowNode.data.id})`) + } } catch (e: any) { logger.error(e) throw new Error(e) diff --git a/packages/ui/src/api/vectorstore.js b/packages/ui/src/api/vectorstore.js new file mode 100644 index 00000000000..053f5112d42 --- /dev/null +++ b/packages/ui/src/api/vectorstore.js @@ -0,0 +1,7 @@ +import client from './client' + +const upsertVectorStore = (id, input) => client.post(`/vector/internal-upsert/${id}`, input) + +export default { + upsertVectorStore +} diff --git a/packages/ui/src/assets/scss/_themes-vars.module.scss b/packages/ui/src/assets/scss/_themes-vars.module.scss index 374c36c920a..6304b7071f2 100644 --- a/packages/ui/src/assets/scss/_themes-vars.module.scss +++ b/packages/ui/src/assets/scss/_themes-vars.module.scss @@ -31,6 +31,11 @@ $orangeLight: #fbe9e7; $orangeMain: #ffab91; $orangeDark: #d84315; +// brown +$tealLight: #76c893; +$tealMain: #52b69a; +$tealDark: #34a0a4; + // warning $warningLight: #fff8e1; $warningMain: #ffe57f; @@ -46,6 +51,9 @@ $grey600: #757575; $grey700: #616161; $grey900: #212121; +// transparent +$transparent: #ffffff00; + // ==============================|| DARK THEME VARIANTS ||============================== // // paper & background @@ -111,6 +119,11 @@ $darkTextSecondary: #8492c4; orangeMain: $orangeMain; orangeDark: $orangeDark; + // orange + tealLight: $tealLight; + tealMain: $tealMain; + tealDark: $tealDark; + // warning warningLight: $warningLight; warningMain: $warningMain; @@ -154,4 +167,7 @@ $darkTextSecondary: #8492c4; darkSecondaryDark: $darkSecondaryDark; darkSecondary200: $darkSecondary200; darkSecondary800: $darkSecondary800; + + // transparent + transparent: $transparent; } diff --git a/packages/ui/src/store/context/ReactFlowContext.js b/packages/ui/src/store/context/ReactFlowContext.js index 055cb8bc9e9..3619062670e 100644 --- a/packages/ui/src/store/context/ReactFlowContext.js +++ b/packages/ui/src/store/context/ReactFlowContext.js @@ -97,13 +97,41 @@ export const ReactFlowContext = ({ children }) => { selected: false } - const dataKeys = ['inputParams', 'inputAnchors', 'outputAnchors'] + const inputKeys = ['inputParams', 'inputAnchors'] + for (const key of inputKeys) { + for (const item of duplicatedNode.data[key]) { + if (item.id) { + item.id = item.id.replace(id, newNodeId) + } + } + } - for (const key of dataKeys) { + const outputKeys = ['outputAnchors'] + for (const key of outputKeys) { for (const item of duplicatedNode.data[key]) { if (item.id) { item.id = item.id.replace(id, newNodeId) } + if (item.options) { + for (const output of item.options) { + output.id = output.id.replace(id, newNodeId) + } + } + } + } + + // Clear connected inputs + for (const inputName in duplicatedNode.data.inputs) { + if ( + typeof duplicatedNode.data.inputs[inputName] === 'string' && + duplicatedNode.data.inputs[inputName].startsWith('{{') && + duplicatedNode.data.inputs[inputName].endsWith('}}') + ) { + duplicatedNode.data.inputs[inputName] = '' + } else if (Array.isArray(duplicatedNode.data.inputs[inputName])) { + duplicatedNode.data.inputs[inputName] = duplicatedNode.data.inputs[inputName].filter( + (item) => !(typeof item === 'string' && item.startsWith('{{') && item.endsWith('}}')) + ) } } diff --git a/packages/ui/src/themes/palette.js b/packages/ui/src/themes/palette.js index 66ec3d01433..750fe1be321 100644 --- a/packages/ui/src/themes/palette.js +++ b/packages/ui/src/themes/palette.js @@ -6,6 +6,7 @@ export default function themePalette(theme) { return { mode: theme?.customization?.navType, + transparent: theme.colors?.transparent, common: { black: theme.colors?.darkPaper, dark: theme.colors?.darkPrimaryMain @@ -34,6 +35,11 @@ export default function themePalette(theme) { main: theme.colors?.orangeMain, dark: theme.colors?.orangeDark }, + teal: { + light: theme.colors?.tealLight, + main: theme.colors?.tealMain, + dark: theme.colors?.tealDark + }, warning: { light: theme.colors?.warningLight, main: theme.colors?.warningMain, diff --git a/packages/ui/src/ui-component/dialog/NodeInfoDialog.js b/packages/ui/src/ui-component/dialog/NodeInfoDialog.js index 74c45a1a829..5abdb035723 100644 --- a/packages/ui/src/ui-component/dialog/NodeInfoDialog.js +++ b/packages/ui/src/ui-component/dialog/NodeInfoDialog.js @@ -123,7 +123,14 @@ const NodeInfoDialog = ({ show, dialogProps, onCancel }) => { )} {getNodeConfigApi.data && getNodeConfigApi.data.length > 0 && ( - + { + // eslint-disable-next-line + const { node, nodeId, ...rest } = obj + return rest + })} + columns={Object.keys(getNodeConfigApi.data[0]).slice(-3)} + /> )} diff --git a/packages/ui/src/ui-component/table/Table.js b/packages/ui/src/ui-component/table/Table.js index 2cf39182700..1f0892bbc90 100644 --- a/packages/ui/src/ui-component/table/Table.js +++ b/packages/ui/src/ui-component/table/Table.js @@ -1,11 +1,11 @@ import PropTypes from 'prop-types' import { TableContainer, Table, TableHead, TableCell, TableRow, TableBody, Paper } from '@mui/material' -export const TableViewOnly = ({ columns, rows }) => { +export const TableViewOnly = ({ columns, rows, sx }) => { return ( <> - +
    {columns.map((col, index) => ( @@ -16,11 +16,9 @@ export const TableViewOnly = ({ columns, rows }) => { {rows.map((row, index) => ( - {Object.keys(row) - .slice(-3) - .map((key, index) => ( - {row[key]} - ))} + {Object.keys(row).map((key, index) => ( + {row[key]} + ))} ))} @@ -32,5 +30,6 @@ export const TableViewOnly = ({ columns, rows }) => { TableViewOnly.propTypes = { rows: PropTypes.array, - columns: PropTypes.array + columns: PropTypes.array, + sx: PropTypes.object } diff --git a/packages/ui/src/utils/genericHelper.js b/packages/ui/src/utils/genericHelper.js index 32331b1420d..e93663e1bad 100644 --- a/packages/ui/src/utils/genericHelper.js +++ b/packages/ui/src/utils/genericHelper.js @@ -332,6 +332,57 @@ export const getAvailableNodesForVariable = (nodes, edges, target, targetHandle) return parentNodes } +export const getUpsertDetails = (nodes, edges) => { + const vsNodes = nodes.filter( + (node) => + node.data.category === 'Vector Stores' && !node.data.label.includes('Upsert') && !node.data.label.includes('Load Existing') + ) + const vsNodeIds = vsNodes.map((vs) => vs.data.id) + + const upsertNodes = [] + const seenVsNodeIds = [] + for (const edge of edges) { + if (vsNodeIds.includes(edge.source) || vsNodeIds.includes(edge.target)) { + const vsNode = vsNodes.find((node) => node.data.id === edge.source || node.data.id === edge.target) + if (!vsNode || seenVsNodeIds.includes(vsNode.data.id)) continue + seenVsNodeIds.push(vsNode.data.id) + + // Found Vector Store Node, proceed to find connected Document Loader node + let connectedDocs = [] + + if (vsNode.data.inputs.document) connectedDocs = [...new Set(vsNode.data.inputs.document)] + + if (connectedDocs.length) { + const innerNodes = [vsNode] + + if (vsNode.data.inputs.embeddings) { + const embeddingsId = vsNode.data.inputs.embeddings.replace(/{{|}}/g, '').split('.')[0] + innerNodes.push(nodes.find((node) => node.data.id === embeddingsId)) + } + + for (const doc of connectedDocs) { + const docId = doc.replace(/{{|}}/g, '').split('.')[0] + const docNode = nodes.find((node) => node.data.id === docId) + if (docNode) innerNodes.push(docNode) + + // Found Document Loader Node, proceed to find connected Text Splitter node + if (docNode && docNode.data.inputs.textSplitter) { + const textSplitterId = docNode.data.inputs.textSplitter.replace(/{{|}}/g, '').split('.')[0] + const textSplitterNode = nodes.find((node) => node.data.id === textSplitterId) + if (textSplitterNode) innerNodes.push(textSplitterNode) + } + } + + upsertNodes.push({ + vectorNode: vsNode, + nodes: innerNodes.reverse() + }) + } + } + } + return upsertNodes +} + export const rearrangeToolsOrdering = (newValues, sourceNodeId) => { // RequestsGet and RequestsPost have to be in order before other tools newValues.push(`{{${sourceNodeId}.data.instance}}`) @@ -454,3 +505,106 @@ export const formatDataGridRows = (rows) => { return [] } } + +export const setLocalStorageChatflow = (chatflowid, chatId, chatHistory) => { + const chatDetails = localStorage.getItem(`${chatflowid}_INTERNAL`) + const obj = {} + if (chatId) obj.chatId = chatId + if (chatHistory) obj.chatHistory = chatHistory + + if (!chatDetails) { + localStorage.setItem(`${chatflowid}_INTERNAL`, JSON.stringify(obj)) + } else { + try { + const parsedChatDetails = JSON.parse(chatDetails) + localStorage.setItem(`${chatflowid}_INTERNAL`, JSON.stringify({ ...parsedChatDetails, ...obj })) + } catch (e) { + const chatId = chatDetails + obj.chatId = chatId + localStorage.setItem(`${chatflowid}_INTERNAL`, JSON.stringify(obj)) + } + } +} + +export const unshiftFiles = (configData) => { + const filesConfig = configData.find((config) => config.name === 'files') + if (filesConfig) { + configData = configData.filter((config) => config.name !== 'files') + configData.unshift(filesConfig) + } + return configData +} + +export const getConfigExamplesForJS = (configData, bodyType, isMultiple, stopNodeId) => { + let finalStr = '' + configData = unshiftFiles(configData) + const loop = Math.min(configData.length, 4) + for (let i = 0; i < loop; i += 1) { + const config = configData[i] + let exampleVal = `"example"` + if (config.type === 'string') exampleVal = `"example"` + else if (config.type === 'boolean') exampleVal = `true` + else if (config.type === 'number') exampleVal = `1` + else if (config.type === 'json') exampleVal = `{ "key": "val" }` + else if (config.name === 'files') exampleVal = `input.files[0]` + finalStr += bodyType === 'json' ? `\n "${config.name}": ${exampleVal},` : `formData.append("${config.name}", ${exampleVal})\n` + if (i === loop - 1 && bodyType !== 'json') + finalStr += !isMultiple + ? `` + : stopNodeId + ? `formData.append("stopNodeId", "${stopNodeId}")\n` + : `formData.append("question", "Hey, how are you?")\n` + } + return finalStr +} + +export const getConfigExamplesForPython = (configData, bodyType, isMultiple, stopNodeId) => { + let finalStr = '' + configData = unshiftFiles(configData) + const loop = Math.min(configData.length, 4) + for (let i = 0; i < loop; i += 1) { + const config = configData[i] + let exampleVal = `"example"` + if (config.type === 'string') exampleVal = `"example"` + else if (config.type === 'boolean') exampleVal = `true` + else if (config.type === 'number') exampleVal = `1` + else if (config.type === 'json') exampleVal = `{ "key": "val" }` + else if (config.name === 'files') continue + finalStr += bodyType === 'json' ? `\n "${config.name}": ${exampleVal},` : `\n "${config.name}": ${exampleVal},` + if (i === loop - 1 && bodyType !== 'json') + finalStr += !isMultiple + ? `\n` + : stopNodeId + ? `\n "stopNodeId": "${stopNodeId}"\n` + : `\n "question": "Hey, how are you?"\n` + } + return finalStr +} + +export const getConfigExamplesForCurl = (configData, bodyType, isMultiple, stopNodeId) => { + let finalStr = '' + configData = unshiftFiles(configData) + const loop = Math.min(configData.length, 4) + for (let i = 0; i < loop; i += 1) { + const config = configData[i] + let exampleVal = `example` + if (config.type === 'string') exampleVal = bodyType === 'json' ? `"example"` : `example` + else if (config.type === 'boolean') exampleVal = `true` + else if (config.type === 'number') exampleVal = `1` + else if (config.type === 'json') exampleVal = `{key:val}` + else if (config.name === 'files') + exampleVal = `@/home/user1/Desktop/example${config.type.includes(',') ? config.type.split(',')[0] : config.type}` + finalStr += bodyType === 'json' ? `"${config.name}": ${exampleVal}` : `\n -F "${config.name}=${exampleVal}"` + if (i === loop - 1) + finalStr += + bodyType === 'json' + ? ` }` + : !isMultiple + ? `` + : stopNodeId + ? ` \\\n -F "stopNodeId=${stopNodeId}"` + : ` \\\n -F "question=Hey, how are you?"` + else finalStr += bodyType === 'json' ? `, ` : ` \\` + } + return finalStr +} diff --git a/packages/ui/src/views/canvas/AddNodes.js b/packages/ui/src/views/canvas/AddNodes.js index c6134cb9aec..e0e639d14a7 100644 --- a/packages/ui/src/views/canvas/AddNodes.js +++ b/packages/ui/src/views/canvas/AddNodes.js @@ -21,7 +21,8 @@ import { Paper, Popper, Stack, - Typography + Typography, + Chip } from '@mui/material' import ExpandMoreIcon from '@mui/icons-material/ExpandMore' @@ -301,7 +302,37 @@ const AddNodes = ({ nodesData, node }) => { + {node.label} +   + {node.badge && ( + + )} + + } secondary={node.description} /> diff --git a/packages/ui/src/views/canvas/index.js b/packages/ui/src/views/canvas/index.js index c0206a9e073..29602a4f291 100644 --- a/packages/ui/src/views/canvas/index.js +++ b/packages/ui/src/views/canvas/index.js @@ -25,6 +25,7 @@ import CanvasHeader from './CanvasHeader' import AddNodes from './AddNodes' import ConfirmDialog from 'ui-component/dialog/ConfirmDialog' import { ChatPopUp } from 'views/chatmessage/ChatPopUp' +import { VectorStorePopUp } from 'views/vectorstore/VectorStorePopUp' import { flowContext } from 'store/context/ReactFlowContext' // API @@ -39,7 +40,7 @@ import useConfirm from 'hooks/useConfirm' import { IconX } from '@tabler/icons' // utils -import { getUniqueNodeId, initNode, getEdgeLabelName, rearrangeToolsOrdering } from 'utils/genericHelper' +import { getUniqueNodeId, initNode, getEdgeLabelName, rearrangeToolsOrdering, getUpsertDetails } from 'utils/genericHelper' import useNotifier from 'utils/useNotifier' // const @@ -81,6 +82,7 @@ const Canvas = () => { const [edges, setEdges, onEdgesChange] = useEdgesState() const [selectedNode, setSelectedNode] = useState(null) + const [isUpsertButtonEnabled, setIsUpsertButtonEnabled] = useState(false) const reactFlowWrapper = useRef(null) @@ -167,6 +169,7 @@ const Canvas = () => { if (isConfirmed) { try { await chatflowsApi.deleteChatflow(chatflow.id) + localStorage.removeItem(`${chatflow.id}_INTERNAL`) navigate(-1) } catch (error) { const errorData = error.response.data || `${error.response.status}: ${error.response.statusText}` @@ -339,6 +342,12 @@ const Canvas = () => { dispatch({ type: SET_DIRTY }) } + const checkIfUpsertAvailable = (nodes, edges) => { + const upsertNodeDetails = getUpsertDetails(nodes, edges) + if (upsertNodeDetails.length) setIsUpsertButtonEnabled(true) + else setIsUpsertButtonEnabled(false) + } + // ==============================|| useEffect ||============================== // // Get specific chatflow successful @@ -409,7 +418,13 @@ const Canvas = () => { // eslint-disable-next-line react-hooks/exhaustive-deps }, [testChatflowApi.error]) - useEffect(() => setChatflow(canvasDataStore.chatflow), [canvasDataStore.chatflow]) + useEffect(() => { + setChatflow(canvasDataStore.chatflow) + if (canvasDataStore.chatflow) { + const flowData = canvasDataStore.chatflow.flowData ? JSON.parse(canvasDataStore.chatflow.flowData) : [] + checkIfUpsertAvailable(flowData.nodes || [], flowData.edges || []) + } + }, [canvasDataStore.chatflow]) // Initialization useEffect(() => { @@ -524,6 +539,7 @@ const Canvas = () => { /> + {isUpsertButtonEnabled && } diff --git a/packages/ui/src/views/chatflows/APICodeDialog.js b/packages/ui/src/views/chatflows/APICodeDialog.js index 49c718cc4ee..34c2281fb5d 100644 --- a/packages/ui/src/views/chatflows/APICodeDialog.js +++ b/packages/ui/src/views/chatflows/APICodeDialog.js @@ -23,6 +23,7 @@ import ExpandMoreIcon from '@mui/icons-material/ExpandMore' import { Dropdown } from 'ui-component/dropdown/Dropdown' import ShareChatbot from './ShareChatbot' import EmbedChat from './EmbedChat' +import Configuration from './Configuration' // Const import { baseURL } from 'store/constant' @@ -35,6 +36,7 @@ import cURLSVG from 'assets/images/cURL.svg' import EmbedSVG from 'assets/images/embed.svg' import ShareChatbotSVG from 'assets/images/sharing.png' import settingsSVG from 'assets/images/settings.svg' +import { IconBulb } from '@tabler/icons' // API import apiKeyApi from 'api/apikey' @@ -46,8 +48,8 @@ import useApi from 'hooks/useApi' import { CheckboxInput } from 'ui-component/checkbox/Checkbox' import { TableViewOnly } from 'ui-component/table/Table' -import { IconBulb } from '@tabler/icons' -import Configuration from './Configuration' +// Helpers +import { unshiftFiles, getConfigExamplesForJS, getConfigExamplesForPython, getConfigExamplesForCurl } from 'utils/genericHelper' function TabPanel(props) { const { children, value, index, ...other } = props @@ -77,67 +79,6 @@ function a11yProps(index) { } } -const unshiftFiles = (configData) => { - const filesConfig = configData.find((config) => config.name === 'files') - if (filesConfig) { - configData = configData.filter((config) => config.name !== 'files') - configData.unshift(filesConfig) - } - return configData -} - -const getConfigExamplesForJS = (configData, bodyType) => { - let finalStr = '' - configData = unshiftFiles(configData) - const loop = Math.min(configData.length, 4) - for (let i = 0; i < loop; i += 1) { - const config = configData[i] - let exampleVal = `"example"` - if (config.type === 'string') exampleVal = `"example"` - else if (config.type === 'boolean') exampleVal = `true` - else if (config.type === 'number') exampleVal = `1` - else if (config.name === 'files') exampleVal = `input.files[0]` - finalStr += bodyType === 'json' ? `\n "${config.name}": ${exampleVal},` : `formData.append("${config.name}", ${exampleVal})\n` - if (i === loop - 1 && bodyType !== 'json') finalStr += `formData.append("question", "Hey, how are you?")\n` - } - return finalStr -} - -const getConfigExamplesForPython = (configData, bodyType) => { - let finalStr = '' - configData = unshiftFiles(configData) - const loop = Math.min(configData.length, 4) - for (let i = 0; i < loop; i += 1) { - const config = configData[i] - let exampleVal = `"example"` - if (config.type === 'string') exampleVal = `"example"` - else if (config.type === 'boolean') exampleVal = `true` - else if (config.type === 'number') exampleVal = `1` - else if (config.name === 'files') continue - finalStr += bodyType === 'json' ? `\n "${config.name}": ${exampleVal},` : `\n "${config.name}": ${exampleVal},` - if (i === loop - 1 && bodyType !== 'json') finalStr += `\n "question": "Hey, how are you?"\n` - } - return finalStr -} - -const getConfigExamplesForCurl = (configData, bodyType) => { - let finalStr = '' - configData = unshiftFiles(configData) - const loop = Math.min(configData.length, 4) - for (let i = 0; i < loop; i += 1) { - const config = configData[i] - let exampleVal = `example` - if (config.type === 'string') exampleVal = bodyType === 'json' ? `"example"` : `example` - else if (config.type === 'boolean') exampleVal = `true` - else if (config.type === 'number') exampleVal = `1` - else if (config.name === 'files') exampleVal = `@/home/user1/Desktop/example${config.type}` - finalStr += bodyType === 'json' ? `"${config.name}": ${exampleVal}` : `\n -F "${config.name}=${exampleVal}"` - if (i === loop - 1) finalStr += bodyType === 'json' ? ` }` : ` \\\n -F "question=Hey, how are you?"` - else finalStr += bodyType === 'json' ? `, ` : ` \\` - } - return finalStr -} - const APICodeDialog = ({ show, dialogProps, onCancel }) => { const portalElement = document.getElementById('portal') const navigate = useNavigate() @@ -334,7 +275,8 @@ query({"question": "Hey, how are you?"}).then((response) => { const getConfigCodeWithFormData = (codeLang, configData) => { if (codeLang === 'Python') { configData = unshiftFiles(configData) - const fileType = configData[0].type + let fileType = configData[0].type + if (fileType.includes(',')) fileType = fileType.split(',')[0] return `import requests API_URL = "${baseURL}/api/v1/prediction/${dialogProps.chatflowid}" @@ -384,7 +326,8 @@ query(formData).then((response) => { const getConfigCodeWithFormDataWithAuth = (codeLang, configData) => { if (codeLang === 'Python') { configData = unshiftFiles(configData) - const fileType = configData[0].type + let fileType = configData[0].type + if (fileType.includes(',')) fileType = fileType.split(',')[0] return `import requests API_URL = "${baseURL}/api/v1/prediction/${dialogProps.chatflowid}" @@ -700,7 +643,11 @@ formData.append("openAIApiKey[openAIEmbeddings_0]", "sk-my-openai-2nd-key")` { + // eslint-disable-next-line + const { node, nodeId, ...rest } = obj + return rest + })} columns={Object.keys(nodeConfig[nodeLabel][0]).slice(-3)} /> diff --git a/packages/ui/src/views/chatmessage/ChatMessage.js b/packages/ui/src/views/chatmessage/ChatMessage.js index 0cf5695beff..61e37077e42 100644 --- a/packages/ui/src/views/chatmessage/ChatMessage.js +++ b/packages/ui/src/views/chatmessage/ChatMessage.js @@ -31,7 +31,7 @@ import { baseURL, maxScroll } from 'store/constant' import robotPNG from 'assets/images/robot.png' import userPNG from 'assets/images/account.png' -import { isValidURL, removeDuplicateURL } from 'utils/genericHelper' +import { isValidURL, removeDuplicateURL, setLocalStorageChatflow } from 'utils/genericHelper' export const ChatMessage = ({ open, chatflowid, isDialog }) => { const theme = useTheme() @@ -127,10 +127,9 @@ export const ChatMessage = ({ open, chatflowid, isDialog }) => { if (response.data) { const data = response.data - if (!chatId) { - setChatId(data.chatId) - localStorage.setItem(`${chatflowid}_INTERNAL`, data.chatId) - } + + if (!chatId) setChatId(data.chatId) + if (!isChatFlowAvailableToStream) { let text = '' if (data.text) text = data.text @@ -142,7 +141,7 @@ export const ChatMessage = ({ open, chatflowid, isDialog }) => { { message: text, sourceDocuments: data?.sourceDocuments, usedTools: data?.usedTools, type: 'apiMessage' } ]) } - + setLocalStorageChatflow(chatflowid, data.chatId, messages) setLoading(false) setUserInput('') setTimeout(() => { @@ -175,7 +174,6 @@ export const ChatMessage = ({ open, chatflowid, isDialog }) => { if (getChatmessageApi.data?.length) { const chatId = getChatmessageApi.data[0]?.chatId setChatId(chatId) - localStorage.setItem(`${chatflowid}_INTERNAL`, chatId) const loadedMessages = getChatmessageApi.data.map((message) => { const obj = { message: message.content, @@ -186,6 +184,7 @@ export const ChatMessage = ({ open, chatflowid, isDialog }) => { return obj }) setMessages((prevMessages) => [...prevMessages, ...loadedMessages]) + setLocalStorageChatflow(chatflowid, chatId, messages) } // eslint-disable-next-line react-hooks/exhaustive-deps diff --git a/packages/ui/src/views/chatmessage/ChatPopUp.js b/packages/ui/src/views/chatmessage/ChatPopUp.js index 1b87ac30d13..670fb00f0f0 100644 --- a/packages/ui/src/views/chatmessage/ChatPopUp.js +++ b/packages/ui/src/views/chatmessage/ChatPopUp.js @@ -85,8 +85,10 @@ export const ChatPopUp = ({ chatflowid }) => { if (isConfirmed) { try { - const chatId = localStorage.getItem(`${chatflowid}_INTERNAL`) - await chatmessageApi.deleteChatmessage(chatflowid, { chatId, chatType: 'INTERNAL' }) + const chatDetails = localStorage.getItem(`${chatflowid}_INTERNAL`) + if (!chatDetails) return + const objChatDetails = JSON.parse(chatDetails) + await chatmessageApi.deleteChatmessage(chatflowid, { chatId: objChatDetails.chatId, chatType: 'INTERNAL' }) localStorage.removeItem(`${chatflowid}_INTERNAL`) resetChatDialog() enqueueSnackbar({ diff --git a/packages/ui/src/views/vectorstore/VectorStoreDialog.js b/packages/ui/src/views/vectorstore/VectorStoreDialog.js new file mode 100644 index 00000000000..29f443f448c --- /dev/null +++ b/packages/ui/src/views/vectorstore/VectorStoreDialog.js @@ -0,0 +1,556 @@ +import { createPortal } from 'react-dom' +import PropTypes from 'prop-types' +import { useDispatch } from 'react-redux' +import { useContext, useState, useEffect } from 'react' +import PerfectScrollbar from 'react-perfect-scrollbar' +import { CopyBlock, atomOneDark } from 'react-code-blocks' + +import { + Dialog, + DialogContent, + DialogTitle, + Button, + Box, + Tabs, + Tab, + Accordion, + AccordionSummary, + AccordionDetails, + Typography +} from '@mui/material' + +import { CheckboxInput } from 'ui-component/checkbox/Checkbox' +import { BackdropLoader } from 'ui-component/loading/BackdropLoader' +import { TableViewOnly } from 'ui-component/table/Table' + +import { IconX } from '@tabler/icons' +import ExpandMoreIcon from '@mui/icons-material/ExpandMore' +import pythonSVG from 'assets/images/python.svg' +import javascriptSVG from 'assets/images/javascript.svg' +import cURLSVG from 'assets/images/cURL.svg' + +import useApi from 'hooks/useApi' +import configApi from 'api/config' +import vectorstoreApi from 'api/vectorstore' + +// Utils +import { + getUpsertDetails, + getFileName, + unshiftFiles, + getConfigExamplesForJS, + getConfigExamplesForPython, + getConfigExamplesForCurl +} from 'utils/genericHelper' +import useNotifier from 'utils/useNotifier' + +// Store +import { flowContext } from 'store/context/ReactFlowContext' +import { HIDE_CANVAS_DIALOG, SHOW_CANVAS_DIALOG } from 'store/actions' +import { baseURL } from 'store/constant' +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from 'store/actions' + +function TabPanel(props) { + const { children, value, index, ...other } = props + return ( + + ) +} + +TabPanel.propTypes = { + children: PropTypes.node, + index: PropTypes.number.isRequired, + value: PropTypes.number.isRequired +} + +function a11yProps(index) { + return { + id: `attachment-tab-${index}`, + 'aria-controls': `attachment-tabpanel-${index}` + } +} + +const VectorStoreDialog = ({ show, dialogProps, onCancel }) => { + const portalElement = document.getElementById('portal') + const { reactFlowInstance } = useContext(flowContext) + const dispatch = useDispatch() + + useNotifier() + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + const getConfigApi = useApi(configApi.getConfig) + + const [nodes, setNodes] = useState([]) + const [loading, setLoading] = useState(false) + const [isFormDataRequired, setIsFormDataRequired] = useState({}) + const [nodeConfigExpanded, setNodeConfigExpanded] = useState({}) + const [nodeCheckboxExpanded, setCheckboxExpanded] = useState({}) + const [tabValue, setTabValue] = useState(0) + const [expandedVectorNodeId, setExpandedVectorNodeId] = useState('') + const [configData, setConfigData] = useState({}) + + const reformatConfigData = (configData, nodes) => { + return configData.filter((item1) => nodes.some((item2) => item1.nodeId === item2.id)) + } + + const getCode = (codeLang, vectorNodeId, isMultiple, configData) => { + if (codeLang === 'Python') { + return `import requests + +API_URL = "${baseURL}/api/v1/vector/upsert/${dialogProps.chatflowid}" + +def query(payload): + response = requests.post(API_URL, json=payload) + return response.json() + +output = query({ + ${isMultiple ? `"stopNodeId": "${vectorNodeId}",\n ` : ``}"overrideConfig": {${getConfigExamplesForPython( + configData, + 'json', + isMultiple, + vectorNodeId + )} + } +}) +` + } else if (codeLang === 'JavaScript') { + return `async function query(data) { + const response = await fetch( + "${baseURL}/api/v1/vector/upsert/${dialogProps.chatflowid}", + { + method: "POST", + headers: { + "Content-Type": "application/json" + }, + body: JSON.stringify(data) + } + ); + const result = await response.json(); + return result; +} + +query({ + ${isMultiple ? `"stopNodeId": "${vectorNodeId}",\n ` : ``}"overrideConfig": {${getConfigExamplesForJS( + configData, + 'json', + isMultiple, + vectorNodeId + )} + } +}).then((response) => { + console.log(response); +}); +` + } else if (codeLang === 'cURL') { + return `curl ${baseURL}/api/v1/vector/upsert/${dialogProps.chatflowid} \\ + -X POST \\ + ${ + isMultiple + ? `-d '{"stopNodeId": "${vectorNodeId}", "overrideConfig": {${getConfigExamplesForCurl( + configData, + 'json', + isMultiple, + vectorNodeId + )}}' \\` + : `-d '{"overrideConfig": {${getConfigExamplesForCurl(configData, 'json', isMultiple, vectorNodeId)}}' \\` + } + -H "Content-Type: application/json"` + } + return '' + } + + const getCodeWithFormData = (codeLang, vectorNodeId, isMultiple, configData) => { + if (codeLang === 'Python') { + configData = unshiftFiles(configData) + let fileType = configData[0].type + if (fileType.includes(',')) fileType = fileType.split(',')[0] + return `import requests + +API_URL = "${baseURL}/api/v1/vector/upsert/${dialogProps.chatflowid}" + +# use form data to upload files +form_data = { + "files": ${`('example${fileType}', open('example${fileType}', 'rb'))`} +} +body_data = {${getConfigExamplesForPython(configData, 'formData', isMultiple, vectorNodeId)}} + +def query(form_data, body_data): + response = requests.post(API_URL, files=form_data, data=body_data) + return response.json() + +output = query(form_data, body_data) +` + } else if (codeLang === 'JavaScript') { + return `// use FormData to upload files +let formData = new FormData(); +${getConfigExamplesForJS(configData, 'formData', isMultiple, vectorNodeId)} +async function query(formData) { + const response = await fetch( + "${baseURL}/api/v1/vector/upsert/${dialogProps.chatflowid}", + { + method: "POST", + body: formData + } + ); + const result = await response.json(); + return result; +} + +query(formData).then((response) => { + console.log(response); +}); +` + } else if (codeLang === 'cURL') { + return `curl ${baseURL}/api/v1/vector/upsert/${dialogProps.chatflowid} \\ + -X POST \\${getConfigExamplesForCurl(configData, 'formData', isMultiple, vectorNodeId)} \\ + -H "Content-Type: multipart/form-data"` + } + return '' + } + + const getLang = (codeLang) => { + if (codeLang === 'Python') { + return 'python' + } else if (codeLang === 'JavaScript') { + return 'javascript' + } else if (codeLang === 'cURL') { + return 'bash' + } + return 'python' + } + + const getSVG = (codeLang) => { + if (codeLang === 'Python') { + return pythonSVG + } else if (codeLang === 'JavaScript') { + return javascriptSVG + } else if (codeLang === 'Embed') { + return EmbedSVG + } else if (codeLang === 'cURL') { + return cURLSVG + } else if (codeLang === 'Share Chatbot') { + return ShareChatbotSVG + } else if (codeLang === 'Configuration') { + return settingsSVG + } + return pythonSVG + } + + const handleAccordionChange = (nodeLabel) => (event, isExpanded) => { + const accordianNodes = { ...nodeConfigExpanded } + accordianNodes[nodeLabel] = isExpanded + setNodeConfigExpanded(accordianNodes) + } + + const onCheckBoxChanged = (vectorNodeId) => { + const checkboxNodes = { ...nodeCheckboxExpanded } + if (Object.keys(checkboxNodes).includes(vectorNodeId)) checkboxNodes[vectorNodeId] = !checkboxNodes[vectorNodeId] + else checkboxNodes[vectorNodeId] = true + + if (checkboxNodes[vectorNodeId] === true) getConfigApi.request(dialogProps.chatflowid) + setCheckboxExpanded(checkboxNodes) + setExpandedVectorNodeId(vectorNodeId) + + const newIsFormDataRequired = { ...isFormDataRequired } + newIsFormDataRequired[vectorNodeId] = false + setIsFormDataRequired(newIsFormDataRequired) + const newNodes = nodes.find((node) => node.vectorNode.data.id === vectorNodeId)?.nodes ?? [] + + for (const node of newNodes) { + if (node.data.inputParams.find((param) => param.type === 'file')) { + newIsFormDataRequired[vectorNodeId] = true + setIsFormDataRequired(newIsFormDataRequired) + break + } + } + } + + const onUpsertClicked = async (vectorStoreNode) => { + setLoading(true) + try { + await vectorstoreApi.upsertVectorStore(dialogProps.chatflowid, { stopNodeId: vectorStoreNode.data.id }) + enqueueSnackbar({ + message: 'Succesfully upserted vector store. You can start chatting now!', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + setLoading(false) + } catch (error) { + const errorData = error.response.data || `${error.response.status}: ${error.response.statusText}` + enqueueSnackbar({ + message: errorData, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + setLoading(false) + } + } + + const getNodeDetail = (node) => { + const nodeDetails = [] + const inputKeys = Object.keys(node.data.inputs) + for (let i = 0; i < node.data.inputParams.length; i += 1) { + if (inputKeys.includes(node.data.inputParams[i].name)) { + nodeDetails.push({ + label: node.data.inputParams[i].label, + name: node.data.inputParams[i].name, + type: node.data.inputParams[i].type, + value: + node.data.inputParams[i].type === 'file' + ? getFileName(node.data.inputs[node.data.inputParams[i].name]) + : node.data.inputs[node.data.inputParams[i].name] ?? '' + }) + } + } + return nodeDetails + } + + useEffect(() => { + if (getConfigApi.data) { + const newConfigData = { ...configData } + newConfigData[expandedVectorNodeId] = reformatConfigData( + getConfigApi.data, + nodes.find((node) => node.vectorNode.data.id === expandedVectorNodeId)?.nodes ?? [] + ) + setConfigData(newConfigData) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [getConfigApi.data]) + + useEffect(() => { + if (dialogProps && reactFlowInstance) { + const nodes = reactFlowInstance.getNodes() + const edges = reactFlowInstance.getEdges() + setNodes(getUpsertDetails(nodes, edges)) + } + + return () => { + setNodes([]) + setLoading(false) + setIsFormDataRequired({}) + setNodeConfigExpanded({}) + setCheckboxExpanded({}) + setTabValue(0) + setConfigData({}) + } + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [dialogProps]) + + useEffect(() => { + if (show) dispatch({ type: SHOW_CANVAS_DIALOG }) + else dispatch({ type: HIDE_CANVAS_DIALOG }) + return () => dispatch({ type: HIDE_CANVAS_DIALOG }) + }, [show, dispatch]) + + const component = show ? ( + + + {dialogProps.title} + + + + {nodes.length > 0 && + nodes.map((data, index) => { + return ( +
    + {data.nodes.length > 0 && + data.nodes.map((node, index) => { + return ( + + } + aria-controls={`nodes-accordian-${node.data.name}`} + id={`nodes-accordian-header-${node.data.name}`} + > +
    +
    + {node.data.name} +
    + {node.data.label} +
    + + {node.data.id} + +
    +
    +
    + + + +
    + ) + })} + + onCheckBoxChanged(data.vectorNode.data.id)} + /> + {nodeCheckboxExpanded[data.vectorNode.data.id] && ( +
    + setTabValue(val)} aria-label='tabs'> + {['Python', 'JavaScript', 'cURL'].map((codeLang, index) => ( + + } + iconPosition='start' + key={index} + label={codeLang} + {...a11yProps(index)} + > + ))} + +
    + )} + {nodeCheckboxExpanded[data.vectorNode.data.id] && + isFormDataRequired[data.vectorNode.data.id] !== undefined && + configData[data.vectorNode.data.id] && + configData[data.vectorNode.data.id].length > 0 && ( + <> +
    + {['Python', 'JavaScript', 'cURL'].map((codeLang, index) => ( + + 1 ? true : false, + configData[data.vectorNode.data.id] + ) + : getCode( + codeLang, + data.vectorNode.data.id, + nodes.length > 1 ? true : false, + configData[data.vectorNode.data.id] + ) + } + language={getLang(codeLang)} + showLineNumbers={false} + wrapLines + /> + + ))} +
    + + )} +
    +
    + {loading && } + {!loading && ( + + )} +
    +
    + ) + })} +
    +
    +
    + ) : null + + return createPortal(component, portalElement) +} + +VectorStoreDialog.propTypes = { + show: PropTypes.bool, + dialogProps: PropTypes.object, + onCancel: PropTypes.func +} + +export default VectorStoreDialog diff --git a/packages/ui/src/views/vectorstore/VectorStorePopUp.js b/packages/ui/src/views/vectorstore/VectorStorePopUp.js new file mode 100644 index 00000000000..2e23e69c5f4 --- /dev/null +++ b/packages/ui/src/views/vectorstore/VectorStorePopUp.js @@ -0,0 +1,114 @@ +import { useState, useRef, useEffect } from 'react' +import { useDispatch } from 'react-redux' +import PropTypes from 'prop-types' + +import { Button } from '@mui/material' +import { IconDatabaseImport, IconX } from '@tabler/icons' + +// project import +import { StyledFab } from 'ui-component/button/StyledFab' +import VectorStoreDialog from './VectorStoreDialog' + +// api +import vectorstoreApi from 'api/vectorstore' + +// Hooks +import useNotifier from 'utils/useNotifier' + +// Const +import { enqueueSnackbar as enqueueSnackbarAction, closeSnackbar as closeSnackbarAction } from 'store/actions' + +export const VectorStorePopUp = ({ chatflowid }) => { + const dispatch = useDispatch() + + useNotifier() + const enqueueSnackbar = (...args) => dispatch(enqueueSnackbarAction(...args)) + const closeSnackbar = (...args) => dispatch(closeSnackbarAction(...args)) + + const [open, setOpen] = useState(false) + const [showExpandDialog, setShowExpandDialog] = useState(false) + const [expandDialogProps, setExpandDialogProps] = useState({}) + + const anchorRef = useRef(null) + const prevOpen = useRef(open) + + const handleToggle = () => { + setOpen((prevopen) => !prevopen) + const props = { + open: true, + title: 'Upsert Vector Store', + chatflowid + } + setExpandDialogProps(props) + setShowExpandDialog(true) + } + + const onUpsert = async () => { + try { + await vectorstoreApi.upsertVectorStore(chatflowid, {}) + enqueueSnackbar({ + message: 'Succesfully upserted vector store', + options: { + key: new Date().getTime() + Math.random(), + variant: 'success', + action: (key) => ( + + ) + } + }) + } catch (error) { + const errorData = error.response.data || `${error.response.status}: ${error.response.statusText}` + enqueueSnackbar({ + message: errorData, + options: { + key: new Date().getTime() + Math.random(), + variant: 'error', + persist: true, + action: (key) => ( + + ) + } + }) + } + } + + useEffect(() => { + if (prevOpen.current === true && open === false) { + anchorRef.current.focus() + } + prevOpen.current = open + + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [open, chatflowid]) + + return ( + <> + + {open ? : } + + { + setShowExpandDialog(false) + setOpen((prevopen) => !prevopen) + }} + > + + ) +} + +VectorStorePopUp.propTypes = { chatflowid: PropTypes.string } From 7d13b6323fe426ccf209e6c1f11646c363bc98d1 Mon Sep 17 00:00:00 2001 From: Henry Date: Wed, 22 Nov 2023 19:48:01 +0000 Subject: [PATCH 2/4] fix namings, update description, show badge and node info --- .../retrievers/HydeRetriever/HydeRetriever.ts | 2 +- .../nodes/vectorstores/Chroma/Chroma.ts | 2 +- .../Elasticsearch/Elasticsearch.ts | 3 +- .../nodes/vectorstores/Faiss/Faiss.ts | 2 +- .../nodes/vectorstores/Milvus/Milvus.ts | 4 +- .../vectorstores/OpenSearch/OpenSearch.ts | 2 +- .../nodes/vectorstores/Pinecone/Pinecone.ts | 2 +- .../nodes/vectorstores/Postgres/Postgres.ts | 2 +- .../nodes/vectorstores/Qdrant/Qdrant.ts | 3 +- .../nodes/vectorstores/Redis/Redis.ts | 3 +- .../vectorstores/Singlestore/Singlestore.ts | 7 +- .../nodes/vectorstores/Supabase/Supabase.ts | 6 +- .../nodes/vectorstores/Vectara/Vectara.ts | 6 +- .../nodes/vectorstores/Weaviate/Weaviate.ts | 3 +- .../components/nodes/vectorstores/Zep/Zep.ts | 3 +- .../marketplaces/chatflows/AutoGPT.json | 2 +- .../marketplaces/chatflows/BabyAGI.json | 2 +- .../Conversational Retrieval Agent.json | 2 +- .../Conversational Retrieval QA Chain.json | 2 +- .../marketplaces/chatflows/Local QnA.json | 2 +- .../chatflows/Long Term Memory.json | 2 +- .../chatflows/Metadata Filter.json | 2 +- .../chatflows/Multi Retrieval QA Chain.json | 6 +- .../chatflows/Multiple VectorDB.json | 4 +- .../Prompt Chaining with VectorStore.json | 2 +- .../chatflows/Vectara LLM Chain Upload.json | 2 +- .../marketplaces/chatflows/WebPage QnA.json | 2 +- .../src/ui-component/dialog/NodeInfoDialog.js | 26 ++ packages/ui/src/views/canvas/AddNodes.js | 225 +++++++++++------- packages/ui/src/views/canvas/CanvasNode.js | 6 +- 30 files changed, 214 insertions(+), 123 deletions(-) diff --git a/packages/components/nodes/retrievers/HydeRetriever/HydeRetriever.ts b/packages/components/nodes/retrievers/HydeRetriever/HydeRetriever.ts index 2baf677ebb2..9ec7ada0c96 100644 --- a/packages/components/nodes/retrievers/HydeRetriever/HydeRetriever.ts +++ b/packages/components/nodes/retrievers/HydeRetriever/HydeRetriever.ts @@ -104,7 +104,7 @@ class HydeRetriever_Retrievers implements INode { const promptKey = nodeData.inputs?.promptKey as PromptKey const customPrompt = nodeData.inputs?.customPrompt as string const topK = nodeData.inputs?.topK as string - const k = topK ? parseInt(topK, 10) : 4 + const k = topK ? parseFloat(topK) : 4 const obj: HydeRetrieverOptions = { llm, diff --git a/packages/components/nodes/vectorstores/Chroma/Chroma.ts b/packages/components/nodes/vectorstores/Chroma/Chroma.ts index eef2db79df7..6e1cfa67918 100644 --- a/packages/components/nodes/vectorstores/Chroma/Chroma.ts +++ b/packages/components/nodes/vectorstores/Chroma/Chroma.ts @@ -27,7 +27,7 @@ class Chroma_VectorStores implements INode { this.type = 'Chroma' this.icon = 'chroma.svg' this.category = 'Vector Stores' - this.description = 'Upsert or Load data to Chroma Vector Database' + this.description = 'Upsert embedded data and perform similarity search upon query using Chroma, an open-source embedding database' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] this.badge = 'NEW' this.credential = { diff --git a/packages/components/nodes/vectorstores/Elasticsearch/Elasticsearch.ts b/packages/components/nodes/vectorstores/Elasticsearch/Elasticsearch.ts index e7915b3e62e..5f3cf20669a 100644 --- a/packages/components/nodes/vectorstores/Elasticsearch/Elasticsearch.ts +++ b/packages/components/nodes/vectorstores/Elasticsearch/Elasticsearch.ts @@ -24,7 +24,8 @@ class Elasticsearch_VectorStores implements INode { this.label = 'Elasticsearch' this.name = 'elasticsearch' this.version = 1.0 - this.description = 'Upsert or Load data to Elasticsearch Vector Database' + this.description = + 'Upsert embedded data and perform similarity search upon query using Elasticsearch, a distributed search and analytics engine' this.type = 'Elasticsearch' this.icon = 'elasticsearch.png' this.category = 'Vector Stores' diff --git a/packages/components/nodes/vectorstores/Faiss/Faiss.ts b/packages/components/nodes/vectorstores/Faiss/Faiss.ts index e2c1512e951..4120a57e26c 100644 --- a/packages/components/nodes/vectorstores/Faiss/Faiss.ts +++ b/packages/components/nodes/vectorstores/Faiss/Faiss.ts @@ -25,7 +25,7 @@ class Faiss_VectorStores implements INode { this.type = 'Faiss' this.icon = 'faiss.svg' this.category = 'Vector Stores' - this.description = 'Upsert or Load data to Faiss Vector Store' + this.description = 'Upsert embedded data and perform similarity search upon query using Faiss library from Meta' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] this.badge = 'NEW' this.inputs = [ diff --git a/packages/components/nodes/vectorstores/Milvus/Milvus.ts b/packages/components/nodes/vectorstores/Milvus/Milvus.ts index b937be1e7e5..090f35f7497 100644 --- a/packages/components/nodes/vectorstores/Milvus/Milvus.ts +++ b/packages/components/nodes/vectorstores/Milvus/Milvus.ts @@ -31,7 +31,7 @@ class Milvus_VectorStores implements INode { this.type = 'Milvus' this.icon = 'milvus.svg' this.category = 'Vector Stores' - this.description = 'Upsert or Load data to Milvus Vector Database' + this.description = `Upsert embedded data and perform similarity search upon query using Milvus, world's most advanced open-source vector database` this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] this.badge = 'NEW' this.credential = { @@ -159,7 +159,7 @@ class Milvus_VectorStores implements INode { const output = nodeData.outputs?.output as string // format data - const k = topK ? parseInt(topK, 10) : 4 + const k = topK ? parseFloat(topK) : 4 // credential const credentialData = await getCredentialData(nodeData.credential ?? '', options) diff --git a/packages/components/nodes/vectorstores/OpenSearch/OpenSearch.ts b/packages/components/nodes/vectorstores/OpenSearch/OpenSearch.ts index 66f04143b19..e3e18ce1428 100644 --- a/packages/components/nodes/vectorstores/OpenSearch/OpenSearch.ts +++ b/packages/components/nodes/vectorstores/OpenSearch/OpenSearch.ts @@ -26,7 +26,7 @@ class OpenSearch_VectorStores implements INode { this.type = 'OpenSearch' this.icon = 'opensearch.png' this.category = 'Vector Stores' - this.description = 'Upsert or Load data to OpenSearch Vector Database' + this.description = `Upsert embedded data and perform similarity search upon query using OpenSearch, an open-source, all-in-one vector database` this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] this.badge = 'NEW' this.inputs = [ diff --git a/packages/components/nodes/vectorstores/Pinecone/Pinecone.ts b/packages/components/nodes/vectorstores/Pinecone/Pinecone.ts index e4ef9fb755f..4ece4720a35 100644 --- a/packages/components/nodes/vectorstores/Pinecone/Pinecone.ts +++ b/packages/components/nodes/vectorstores/Pinecone/Pinecone.ts @@ -27,7 +27,7 @@ class Pinecone_VectorStores implements INode { this.type = 'Pinecone' this.icon = 'pinecone.png' this.category = 'Vector Stores' - this.description = 'Upsert or Load data to Pinecone Vector Database' + this.description = `Upsert embedded data and perform similarity search upon query using Pinecone, a leading fully managed hosted vector database` this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] this.badge = 'NEW' this.credential = { diff --git a/packages/components/nodes/vectorstores/Postgres/Postgres.ts b/packages/components/nodes/vectorstores/Postgres/Postgres.ts index 0609d0b5750..ac4b80c3c1b 100644 --- a/packages/components/nodes/vectorstores/Postgres/Postgres.ts +++ b/packages/components/nodes/vectorstores/Postgres/Postgres.ts @@ -28,7 +28,7 @@ class Postgres_VectorStores implements INode { this.type = 'Postgres' this.icon = 'postgres.svg' this.category = 'Vector Stores' - this.description = 'Upsert or Load data to Postgres using pgvector' + this.description = 'Upsert embedded data and perform similarity search upon query using pgvector on Postgres' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] this.badge = 'NEW' this.credential = { diff --git a/packages/components/nodes/vectorstores/Qdrant/Qdrant.ts b/packages/components/nodes/vectorstores/Qdrant/Qdrant.ts index 6de2c1869cc..6413f8bf87e 100644 --- a/packages/components/nodes/vectorstores/Qdrant/Qdrant.ts +++ b/packages/components/nodes/vectorstores/Qdrant/Qdrant.ts @@ -30,7 +30,8 @@ class Qdrant_VectorStores implements INode { this.type = 'Qdrant' this.icon = 'qdrant.png' this.category = 'Vector Stores' - this.description = 'Upsert or Load data to Qdrant Vector Database' + this.description = + 'Upsert embedded data and perform similarity search upon query using Qdrant, a scalable open source vector database written in Rust' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] this.badge = 'NEW' this.credential = { diff --git a/packages/components/nodes/vectorstores/Redis/Redis.ts b/packages/components/nodes/vectorstores/Redis/Redis.ts index d857e225612..dc993b86699 100644 --- a/packages/components/nodes/vectorstores/Redis/Redis.ts +++ b/packages/components/nodes/vectorstores/Redis/Redis.ts @@ -25,7 +25,8 @@ class Redis_VectorStores implements INode { this.label = 'Redis' this.name = 'redis' this.version = 1.0 - this.description = 'Upsert or Load data to Redis' + this.description = + 'Upsert embedded data and perform similarity search upon query using Redis, an open source, in-memory data structure store' this.type = 'Redis' this.icon = 'redis.svg' this.category = 'Vector Stores' diff --git a/packages/components/nodes/vectorstores/Singlestore/Singlestore.ts b/packages/components/nodes/vectorstores/Singlestore/Singlestore.ts index 3597f41e1e7..d16252ac9bd 100644 --- a/packages/components/nodes/vectorstores/Singlestore/Singlestore.ts +++ b/packages/components/nodes/vectorstores/Singlestore/Singlestore.ts @@ -26,7 +26,8 @@ class SingleStore_VectorStores implements INode { this.type = 'SingleStore' this.icon = 'singlestore.svg' this.category = 'Vector Stores' - this.description = 'Upsert or Load data to SingleStore Vector Database' + this.description = + 'Upsert embedded data and perform similarity search upon query using SingleStore, a fast and distributed cloud relational database' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] this.badge = 'NEW' this.credential = { @@ -180,9 +181,7 @@ class SingleStore_VectorStores implements INode { const topK = nodeData.inputs?.topK as string const k = topK ? parseFloat(topK) : 4 - let vectorStore: SingleStoreVectorStore - - vectorStore = new SingleStoreVectorStore(embeddings, singleStoreConnectionConfig) + const vectorStore = new SingleStoreVectorStore(embeddings, singleStoreConnectionConfig) if (output === 'retriever') { const retriever = vectorStore.asRetriever(k) diff --git a/packages/components/nodes/vectorstores/Supabase/Supabase.ts b/packages/components/nodes/vectorstores/Supabase/Supabase.ts index a7de3211842..13840ab7894 100644 --- a/packages/components/nodes/vectorstores/Supabase/Supabase.ts +++ b/packages/components/nodes/vectorstores/Supabase/Supabase.ts @@ -27,7 +27,7 @@ class Supabase_VectorStores implements INode { this.type = 'Supabase' this.icon = 'supabase.svg' this.category = 'Vector Stores' - this.description = 'Upsert or Load data to Supabase using pgvector' + this.description = 'Upsert embedded data and perform similarity search upon query using Supabase via pgvector extension' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] this.badge = 'NEW' this.credential = { @@ -112,7 +112,9 @@ class Supabase_VectorStores implements INode { const flattenDocs = docs && docs.length ? flatten(docs) : [] const finalDocs = [] for (let i = 0; i < flattenDocs.length; i += 1) { - finalDocs.push(new Document(flattenDocs[i])) + if (flattenDocs[i] && flattenDocs[i].pageContent) { + finalDocs.push(new Document(flattenDocs[i])) + } } try { diff --git a/packages/components/nodes/vectorstores/Vectara/Vectara.ts b/packages/components/nodes/vectorstores/Vectara/Vectara.ts index f12dc4a223a..7460c5864e7 100644 --- a/packages/components/nodes/vectorstores/Vectara/Vectara.ts +++ b/packages/components/nodes/vectorstores/Vectara/Vectara.ts @@ -26,7 +26,7 @@ class Vectara_VectorStores implements INode { this.type = 'Vectara' this.icon = 'vectara.png' this.category = 'Vector Stores' - this.description = 'Upsert or Load data to Vectara Vector Database' + this.description = 'Upsert embedded data and perform similarity search upon query using Vectara, a LLM-powered search-as-a-service' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] this.badge = 'NEW' this.credential = { @@ -65,6 +65,7 @@ class Vectara_VectorStores implements INode { name: 'sentencesBefore', description: 'Number of sentences to fetch before the matched sentence. Defaults to 2.', type: 'number', + default: 2, additionalParams: true, optional: true }, @@ -73,6 +74,7 @@ class Vectara_VectorStores implements INode { name: 'sentencesAfter', description: 'Number of sentences to fetch after the matched sentence. Defaults to 2.', type: 'number', + default: 2, additionalParams: true, optional: true }, @@ -189,7 +191,7 @@ class Vectara_VectorStores implements INode { const lambda = nodeData.inputs?.lambda as number const output = nodeData.outputs?.output as string const topK = nodeData.inputs?.topK as string - const k = topK ? parseInt(topK, 10) : 4 + const k = topK ? parseFloat(topK) : 4 const vectaraArgs: VectaraLibArgs = { apiKey: apiKey, diff --git a/packages/components/nodes/vectorstores/Weaviate/Weaviate.ts b/packages/components/nodes/vectorstores/Weaviate/Weaviate.ts index e54d122b22f..5c31c7371e7 100644 --- a/packages/components/nodes/vectorstores/Weaviate/Weaviate.ts +++ b/packages/components/nodes/vectorstores/Weaviate/Weaviate.ts @@ -27,7 +27,8 @@ class Weaviate_VectorStores implements INode { this.type = 'Weaviate' this.icon = 'weaviate.png' this.category = 'Vector Stores' - this.description = 'Upsert or Load data to Weaviate Vector Database' + this.description = + 'Upsert embedded data and perform similarity search upon query using Weaviate, a scalable open-source vector database' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] this.badge = 'NEW' this.credential = { diff --git a/packages/components/nodes/vectorstores/Zep/Zep.ts b/packages/components/nodes/vectorstores/Zep/Zep.ts index ce863a9ee5b..21c885b427b 100644 --- a/packages/components/nodes/vectorstores/Zep/Zep.ts +++ b/packages/components/nodes/vectorstores/Zep/Zep.ts @@ -27,7 +27,8 @@ class Zep_VectorStores implements INode { this.type = 'Zep' this.icon = 'zep.png' this.category = 'Vector Stores' - this.description = 'Upsert or Load data to Zep Vector Database' + this.description = + 'Upsert embedded data and perform similarity search upon query using Zep, a fast and scalable building block for LLM apps' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] this.badge = 'NEW' this.credential = { diff --git a/packages/server/marketplaces/chatflows/AutoGPT.json b/packages/server/marketplaces/chatflows/AutoGPT.json index 5f388da6123..150fe17eb24 100644 --- a/packages/server/marketplaces/chatflows/AutoGPT.json +++ b/packages/server/marketplaces/chatflows/AutoGPT.json @@ -511,7 +511,7 @@ "type": "Pinecone", "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Upsert or Load data to Pinecone Vector Database", + "description": "Upsert embedded data and perform similarity search upon query using Pinecone, a leading fully managed hosted vector database", "inputParams": [ { "label": "Connect Credential", diff --git a/packages/server/marketplaces/chatflows/BabyAGI.json b/packages/server/marketplaces/chatflows/BabyAGI.json index 211e9e42c5c..ab387205e4a 100644 --- a/packages/server/marketplaces/chatflows/BabyAGI.json +++ b/packages/server/marketplaces/chatflows/BabyAGI.json @@ -166,7 +166,7 @@ "type": "Pinecone", "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Upsert or Load data to Pinecone Vector Database", + "description": "Upsert embedded data and perform similarity search upon query using Pinecone, a leading fully managed hosted vector database", "inputParams": [ { "label": "Connect Credential", diff --git a/packages/server/marketplaces/chatflows/Conversational Retrieval Agent.json b/packages/server/marketplaces/chatflows/Conversational Retrieval Agent.json index dd4cf3b16ea..aafc8e8e2b9 100644 --- a/packages/server/marketplaces/chatflows/Conversational Retrieval Agent.json +++ b/packages/server/marketplaces/chatflows/Conversational Retrieval Agent.json @@ -301,7 +301,7 @@ "type": "Pinecone", "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Upsert or Load data to Pinecone Vector Database", + "description": "Upsert embedded data and perform similarity search upon query using Pinecone, a leading fully managed hosted vector database", "inputParams": [ { "label": "Connect Credential", diff --git a/packages/server/marketplaces/chatflows/Conversational Retrieval QA Chain.json b/packages/server/marketplaces/chatflows/Conversational Retrieval QA Chain.json index e775846cc2d..5c55d8332ee 100644 --- a/packages/server/marketplaces/chatflows/Conversational Retrieval QA Chain.json +++ b/packages/server/marketplaces/chatflows/Conversational Retrieval QA Chain.json @@ -553,7 +553,7 @@ "type": "Pinecone", "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Upsert or Load data to Pinecone Vector Database", + "description": "Upsert embedded data and perform similarity search upon query using Pinecone, a leading fully managed hosted vector database", "inputParams": [ { "label": "Connect Credential", diff --git a/packages/server/marketplaces/chatflows/Local QnA.json b/packages/server/marketplaces/chatflows/Local QnA.json index fcf8593cb45..e24ad7cafec 100644 --- a/packages/server/marketplaces/chatflows/Local QnA.json +++ b/packages/server/marketplaces/chatflows/Local QnA.json @@ -555,7 +555,7 @@ "type": "Faiss", "baseClasses": ["Faiss", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Upsert or Load data to Faiss Vector Store", + "description": "Upsert embedded data and perform similarity search upon query using Faiss library from Meta", "inputParams": [ { "label": "Base Path to load", diff --git a/packages/server/marketplaces/chatflows/Long Term Memory.json b/packages/server/marketplaces/chatflows/Long Term Memory.json index f5ff2dca9a8..c508b4807e0 100644 --- a/packages/server/marketplaces/chatflows/Long Term Memory.json +++ b/packages/server/marketplaces/chatflows/Long Term Memory.json @@ -351,7 +351,7 @@ "type": "Qdrant", "baseClasses": ["Qdrant", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Upsert or Load data to Qdrant Vector Database", + "description": "Upsert embedded data and perform similarity search upon query using Qdrant, a scalable open source vector database written in Rust", "inputParams": [ { "label": "Connect Credential", diff --git a/packages/server/marketplaces/chatflows/Metadata Filter.json b/packages/server/marketplaces/chatflows/Metadata Filter.json index f594a2b6cf0..9865ae70173 100644 --- a/packages/server/marketplaces/chatflows/Metadata Filter.json +++ b/packages/server/marketplaces/chatflows/Metadata Filter.json @@ -634,7 +634,7 @@ "type": "Pinecone", "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Upsert or Load data to Pinecone Vector Database", + "description": "Upsert embedded data and perform similarity search upon query using Pinecone, a leading fully managed hosted vector database", "inputParams": [ { "label": "Connect Credential", diff --git a/packages/server/marketplaces/chatflows/Multi Retrieval QA Chain.json b/packages/server/marketplaces/chatflows/Multi Retrieval QA Chain.json index 9032122bfa0..5388d96579e 100644 --- a/packages/server/marketplaces/chatflows/Multi Retrieval QA Chain.json +++ b/packages/server/marketplaces/chatflows/Multi Retrieval QA Chain.json @@ -560,7 +560,7 @@ "type": "Pinecone", "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Upsert or Load data to Pinecone Vector Database", + "description": "Upsert embedded data and perform similarity search upon query using Pinecone, a leading fully managed hosted vector database", "inputParams": [ { "label": "Connect Credential", @@ -678,7 +678,7 @@ "type": "Chroma", "baseClasses": ["Chroma", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Upsert or Load data to Chroma Vector Database", + "description": "Upsert embedded data and perform similarity search upon query using Chroma, an open-source embedding database", "inputParams": [ { "label": "Connect Credential", @@ -796,7 +796,7 @@ "type": "Supabase", "baseClasses": ["Supabase", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Upsert or Load data to Supabase using pgvector", + "description": "Upsert embedded data and perform similarity search upon query using Supabase via pgvector extension", "inputParams": [ { "label": "Connect Credential", diff --git a/packages/server/marketplaces/chatflows/Multiple VectorDB.json b/packages/server/marketplaces/chatflows/Multiple VectorDB.json index 723b510e6dc..e771861609d 100644 --- a/packages/server/marketplaces/chatflows/Multiple VectorDB.json +++ b/packages/server/marketplaces/chatflows/Multiple VectorDB.json @@ -634,7 +634,7 @@ "type": "Redis", "baseClasses": ["Redis", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Upsert or Load data to Redis", + "description": "Upsert embedded data and perform similarity search upon query using Redis, an open source, in-memory data structure store", "inputParams": [ { "label": "Connect Credential", @@ -776,7 +776,7 @@ "type": "Faiss", "baseClasses": ["Faiss", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Upsert or Load data to Faiss Vector Store", + "description": "Upsert embedded data and perform similarity search upon query using Faiss library from Meta", "inputParams": [ { "label": "Base Path to load", diff --git a/packages/server/marketplaces/chatflows/Prompt Chaining with VectorStore.json b/packages/server/marketplaces/chatflows/Prompt Chaining with VectorStore.json index fca62a76fef..0ddec74fd32 100644 --- a/packages/server/marketplaces/chatflows/Prompt Chaining with VectorStore.json +++ b/packages/server/marketplaces/chatflows/Prompt Chaining with VectorStore.json @@ -792,7 +792,7 @@ "type": "SingleStore", "baseClasses": ["SingleStore", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Upsert or Load data to SingleStore Vector Database", + "description": "Upsert embedded data and perform similarity search upon query using SingleStore, a fast and distributed cloud relational database", "inputParams": [ { "label": "Connect Credential", diff --git a/packages/server/marketplaces/chatflows/Vectara LLM Chain Upload.json b/packages/server/marketplaces/chatflows/Vectara LLM Chain Upload.json index 4f35bd4cc32..d9f9fb49c34 100644 --- a/packages/server/marketplaces/chatflows/Vectara LLM Chain Upload.json +++ b/packages/server/marketplaces/chatflows/Vectara LLM Chain Upload.json @@ -305,7 +305,7 @@ "type": "Vectara", "baseClasses": ["Vectara", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Upsert or Load data to Vectara Vector Database", + "description": "Upsert embedded data and perform similarity search upon query using Vectara, a LLM-powered search-as-a-service", "inputParams": [ { "label": "Connect Credential", diff --git a/packages/server/marketplaces/chatflows/WebPage QnA.json b/packages/server/marketplaces/chatflows/WebPage QnA.json index da05721b9f1..9b1119b9015 100644 --- a/packages/server/marketplaces/chatflows/WebPage QnA.json +++ b/packages/server/marketplaces/chatflows/WebPage QnA.json @@ -654,7 +654,7 @@ "type": "Pinecone", "baseClasses": ["Pinecone", "VectorStoreRetriever", "BaseRetriever"], "category": "Vector Stores", - "description": "Upsert or Load data to Pinecone Vector Database", + "description": "Upsert embedded data and perform similarity search upon query using Pinecone, a leading fully managed hosted vector database", "inputParams": [ { "label": "Connect Credential", diff --git a/packages/ui/src/ui-component/dialog/NodeInfoDialog.js b/packages/ui/src/ui-component/dialog/NodeInfoDialog.js index 5abdb035723..6f3bec5de7a 100644 --- a/packages/ui/src/ui-component/dialog/NodeInfoDialog.js +++ b/packages/ui/src/ui-component/dialog/NodeInfoDialog.js @@ -106,6 +106,32 @@ const NodeInfoDialog = ({ show, dialogProps, onCancel }) => { version {dialogProps.data.version} )} + {dialogProps.data.badge && ( +
    + + {dialogProps.data.badge} + +
    + )} diff --git a/packages/ui/src/views/canvas/AddNodes.js b/packages/ui/src/views/canvas/AddNodes.js index e0e639d14a7..44030d0eabf 100644 --- a/packages/ui/src/views/canvas/AddNodes.js +++ b/packages/ui/src/views/canvas/AddNodes.js @@ -57,6 +57,22 @@ const AddNodes = ({ nodesData, node }) => { const prevOpen = useRef(open) const ps = useRef() + // Temporary method to handle Deprecating Vector Store and New ones + const categorizeVectorStores = (nodes) => { + const obj = { ...nodes } + const vsNodes = obj['Vector Stores'] ?? [] + const deprecatingNodes = [] + const newNodes = [] + for (const vsNode of vsNodes) { + if (vsNode.badge === 'DEPRECATING') deprecatingNodes.push(vsNode) + else newNodes.push(vsNode) + } + delete obj['Vector Stores'] + obj['Vector Stores;DEPRECATING'] = deprecatingNodes + obj['Vector Stores;NEW'] = newNodes + setNodes(obj) + } + const scrollTop = () => { const curr = ps.current if (curr) { @@ -96,6 +112,7 @@ const AddNodes = ({ nodesData, node }) => { return r }, Object.create(null)) setNodes(result) + categorizeVectorStores(result) setCategoryExpanded(accordianCategories) } @@ -138,6 +155,8 @@ const AddNodes = ({ nodesData, node }) => { groupByCategory(nodesData) dispatch({ type: SET_COMPONENT_NODES, componentNodes: nodesData }) } + + // eslint-disable-next-line react-hooks/exhaustive-deps }, [nodesData, dispatch]) return ( @@ -250,99 +269,135 @@ const AddNodes = ({ nodesData, node }) => { > {Object.keys(nodes) .sort() - .map((category) => ( - - } - aria-controls={`nodes-accordian-${category}`} - id={`nodes-accordian-header-${category}`} + .map((category) => + category === 'Vector Stores' ? ( + <> + ) : ( + - {category} - - - {nodes[category].map((node, index) => ( -
    onDragStart(event, node)} - draggable - > - } + aria-controls={`nodes-accordian-${category}`} + id={`nodes-accordian-header-${category}`} + > + {category.split(';').length > 1 ? ( +
    - - -
    - {node.name} -
    -
    - {category.split(';')[0]} +   + +
    + ) : ( + {category} + )} + + + {nodes[category].map((node, index) => ( +
    onDragStart(event, node)} + draggable + > + + +
    - {node.label} -   - {node.badge && ( - - )} + {node.name}
    - } - secondary={node.description} - /> -
    -
    - {index === nodes[category].length - 1 ? null : } -
    - ))} -
    - - ))} + + + {node.label} +   + {node.badge && ( + + )} +
    + } + secondary={node.description} + /> + + + {index === nodes[category].length - 1 ? null : } + + ))} +
    +
    + ) + )} diff --git a/packages/ui/src/views/canvas/CanvasNode.js b/packages/ui/src/views/canvas/CanvasNode.js index cabe2329176..4455afc0763 100644 --- a/packages/ui/src/views/canvas/CanvasNode.js +++ b/packages/ui/src/views/canvas/CanvasNode.js @@ -83,8 +83,10 @@ const CanvasNode = ({ data }) => { if (componentNode) { if (!data.version) { setWarningMessage(nodeVersionEmptyMessage(componentNode.version)) - } else { - if (componentNode.version > data.version) setWarningMessage(nodeOutdatedMessage(data.version, componentNode.version)) + } else if (data.version && componentNode.version > data.version) { + setWarningMessage(nodeOutdatedMessage(data.version, componentNode.version)) + } else if (componentNode.badge === 'DEPRECATING') { + setWarningMessage('This node will be deprecated in the next release. Change to a new node tagged with NEW') } } }, [canvas.componentNodes, data.name, data.version]) From d5af16fcfa38abe423daf9b6a76a4675dc3baba8 Mon Sep 17 00:00:00 2001 From: Henry Date: Wed, 22 Nov 2023 20:06:32 +0000 Subject: [PATCH 3/4] add mongodb atlas --- .../vectorstores/MongoDBAtlas/MongoDBAtlas.ts | 194 ++++++++++++++++++ .../MongoDBSearchBase.ts | 2 + .../MongoDB_Existing.ts | 0 .../MongoDB_Upsert.ts | 2 +- .../{MongoDB => MongoDBAtlas}/mongodb.png | Bin 5 files changed, 197 insertions(+), 1 deletion(-) create mode 100644 packages/components/nodes/vectorstores/MongoDBAtlas/MongoDBAtlas.ts rename packages/components/nodes/vectorstores/{MongoDB => MongoDBAtlas}/MongoDBSearchBase.ts (98%) rename packages/components/nodes/vectorstores/{MongoDB => MongoDBAtlas}/MongoDB_Existing.ts (100%) rename packages/components/nodes/vectorstores/{MongoDB => MongoDBAtlas}/MongoDB_Upsert.ts (97%) rename packages/components/nodes/vectorstores/{MongoDB => MongoDBAtlas}/mongodb.png (100%) diff --git a/packages/components/nodes/vectorstores/MongoDBAtlas/MongoDBAtlas.ts b/packages/components/nodes/vectorstores/MongoDBAtlas/MongoDBAtlas.ts new file mode 100644 index 00000000000..a0699f6bd7d --- /dev/null +++ b/packages/components/nodes/vectorstores/MongoDBAtlas/MongoDBAtlas.ts @@ -0,0 +1,194 @@ +import { flatten } from 'lodash' +import { MongoClient } from 'mongodb' +import { MongoDBAtlasVectorSearch } from 'langchain/vectorstores/mongodb_atlas' +import { Embeddings } from 'langchain/embeddings/base' +import { Document } from 'langchain/document' +import { ICommonObject, INode, INodeData, INodeOutputsValue, INodeParams } from '../../../src/Interface' +import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils' + +class MongoDBAtlas_VectorStores implements INode { + label: string + name: string + version: number + description: string + type: string + icon: string + category: string + badge: string + baseClasses: string[] + inputs: INodeParams[] + credential: INodeParams + outputs: INodeOutputsValue[] + + constructor() { + this.label = 'MongoDB Atlas' + this.name = 'mongoDBAtlas' + this.version = 1.0 + this.description = `Upsert embedded data and perform similarity search upon query using MongoDB Atlas, a managed cloud mongodb database` + this.type = 'MongoDB Atlas' + this.icon = 'mongodb.png' + this.category = 'Vector Stores' + this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'NEW' + this.credential = { + label: 'Connect Credential', + name: 'credential', + type: 'credential', + credentialNames: ['mongoDBUrlApi'] + } + this.inputs = [ + { + label: 'Document', + name: 'document', + type: 'Document', + list: true, + optional: true + }, + { + label: 'Embeddings', + name: 'embeddings', + type: 'Embeddings' + }, + { + label: 'Database', + name: 'databaseName', + placeholder: '', + type: 'string' + }, + { + label: 'Collection Name', + name: 'collectionName', + placeholder: '', + type: 'string' + }, + { + label: 'Index Name', + name: 'indexName', + placeholder: '', + type: 'string' + }, + { + label: 'Content Field', + name: 'textKey', + description: 'Name of the field (column) that contains the actual content', + type: 'string', + default: 'text', + additionalParams: true, + optional: true + }, + { + label: 'Embedded Field', + name: 'embeddingKey', + description: 'Name of the field (column) that contains the Embedding', + type: 'string', + default: 'embedding', + additionalParams: true, + optional: true + }, + { + label: 'Top K', + name: 'topK', + description: 'Number of top results to fetch. Default to 4', + placeholder: '4', + type: 'number', + additionalParams: true, + optional: true + } + ] + this.outputs = [ + { + label: 'MongoDB Retriever', + name: 'retriever', + baseClasses: this.baseClasses + }, + { + label: 'MongoDB Vector Store', + name: 'vectorStore', + baseClasses: [this.type, ...getBaseClasses(MongoDBAtlasVectorSearch)] + } + ] + } + + //@ts-ignore + vectorStoreMethods = { + async upsert(nodeData: INodeData, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const databaseName = nodeData.inputs?.databaseName as string + const collectionName = nodeData.inputs?.collectionName as string + const indexName = nodeData.inputs?.indexName as string + let textKey = nodeData.inputs?.textKey as string + let embeddingKey = nodeData.inputs?.embeddingKey as string + const embeddings = nodeData.inputs?.embeddings as Embeddings + + let mongoDBConnectUrl = getCredentialParam('mongoDBConnectUrl', credentialData, nodeData) + + const docs = nodeData.inputs?.document as Document[] + + const flattenDocs = docs && docs.length ? flatten(docs) : [] + const finalDocs = [] + for (let i = 0; i < flattenDocs.length; i += 1) { + if (flattenDocs[i] && flattenDocs[i].pageContent) { + const document = new Document(flattenDocs[i]) + finalDocs.push(document) + } + } + + const mongoClient = new MongoClient(mongoDBConnectUrl) + const collection = mongoClient.db(databaseName).collection(collectionName) + + if (!textKey || textKey === '') textKey = 'text' + if (!embeddingKey || embeddingKey === '') embeddingKey = 'embedding' + + const mongoDBAtlasVectorSearch = new MongoDBAtlasVectorSearch(embeddings, { + collection, + indexName, + textKey, + embeddingKey + }) + + try { + await mongoDBAtlasVectorSearch.addDocuments(finalDocs) + } catch (e) { + throw new Error(e) + } + } + } + + async init(nodeData: INodeData, _: string, options: ICommonObject): Promise { + const credentialData = await getCredentialData(nodeData.credential ?? '', options) + const databaseName = nodeData.inputs?.databaseName as string + const collectionName = nodeData.inputs?.collectionName as string + const indexName = nodeData.inputs?.indexName as string + let textKey = nodeData.inputs?.textKey as string + let embeddingKey = nodeData.inputs?.embeddingKey as string + const embeddings = nodeData.inputs?.embeddings as Embeddings + const topK = nodeData.inputs?.topK as string + const k = topK ? parseFloat(topK) : 4 + const output = nodeData.outputs?.output as string + + let mongoDBConnectUrl = getCredentialParam('mongoDBConnectUrl', credentialData, nodeData) + + const mongoClient = new MongoClient(mongoDBConnectUrl) + const collection = mongoClient.db(databaseName).collection(collectionName) + + if (!textKey || textKey === '') textKey = 'text' + if (!embeddingKey || embeddingKey === '') embeddingKey = 'embedding' + + const vectorStore = new MongoDBAtlasVectorSearch(embeddings, { + collection, + indexName, + textKey, + embeddingKey + }) + + if (output === 'retriever') { + return vectorStore.asRetriever(k) + } else if (output === 'vectorStore') { + ;(vectorStore as any).k = k + return vectorStore + } + return vectorStore + } +} + +module.exports = { nodeClass: MongoDBAtlas_VectorStores } diff --git a/packages/components/nodes/vectorstores/MongoDB/MongoDBSearchBase.ts b/packages/components/nodes/vectorstores/MongoDBAtlas/MongoDBSearchBase.ts similarity index 98% rename from packages/components/nodes/vectorstores/MongoDB/MongoDBSearchBase.ts rename to packages/components/nodes/vectorstores/MongoDBAtlas/MongoDBSearchBase.ts index e9ef8e9a164..95930e4a048 100644 --- a/packages/components/nodes/vectorstores/MongoDB/MongoDBSearchBase.ts +++ b/packages/components/nodes/vectorstores/MongoDBAtlas/MongoDBSearchBase.ts @@ -22,6 +22,7 @@ export abstract class MongoDBSearchBase { type: string icon: string category: string + badge: string baseClasses: string[] inputs: INodeParams[] credential: INodeParams @@ -33,6 +34,7 @@ export abstract class MongoDBSearchBase { this.icon = 'mongodb.png' this.category = 'Vector Stores' this.baseClasses = [this.type, 'VectorStoreRetriever', 'BaseRetriever'] + this.badge = 'DEPRECATING' this.credential = { label: 'Connect Credential', name: 'credential', diff --git a/packages/components/nodes/vectorstores/MongoDB/MongoDB_Existing.ts b/packages/components/nodes/vectorstores/MongoDBAtlas/MongoDB_Existing.ts similarity index 100% rename from packages/components/nodes/vectorstores/MongoDB/MongoDB_Existing.ts rename to packages/components/nodes/vectorstores/MongoDBAtlas/MongoDB_Existing.ts diff --git a/packages/components/nodes/vectorstores/MongoDB/MongoDB_Upsert.ts b/packages/components/nodes/vectorstores/MongoDBAtlas/MongoDB_Upsert.ts similarity index 97% rename from packages/components/nodes/vectorstores/MongoDB/MongoDB_Upsert.ts rename to packages/components/nodes/vectorstores/MongoDBAtlas/MongoDB_Upsert.ts index 7d22f03526f..d9287243b64 100644 --- a/packages/components/nodes/vectorstores/MongoDB/MongoDB_Upsert.ts +++ b/packages/components/nodes/vectorstores/MongoDBAtlas/MongoDB_Upsert.ts @@ -10,7 +10,7 @@ import { MongoDBSearchBase } from './MongoDBSearchBase' class MongoDBUpsert_VectorStores extends MongoDBSearchBase implements INode { constructor() { super() - this.label = 'MongoDB Upsert Document' + this.label = 'MongoDB Atlas Upsert Document' this.name = 'MongoDBUpsert' this.version = 1.0 this.description = 'Upsert documents to MongoDB Atlas' diff --git a/packages/components/nodes/vectorstores/MongoDB/mongodb.png b/packages/components/nodes/vectorstores/MongoDBAtlas/mongodb.png similarity index 100% rename from packages/components/nodes/vectorstores/MongoDB/mongodb.png rename to packages/components/nodes/vectorstores/MongoDBAtlas/mongodb.png From db2a1d3be8d4047d70ddc33083da4d5bb668bae5 Mon Sep 17 00:00:00 2001 From: Henry Date: Wed, 22 Nov 2023 23:07:25 +0000 Subject: [PATCH 4/4] expand accordian upon filter search --- packages/ui/src/views/canvas/AddNodes.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/ui/src/views/canvas/AddNodes.js b/packages/ui/src/views/canvas/AddNodes.js index 44030d0eabf..0973cdda49c 100644 --- a/packages/ui/src/views/canvas/AddNodes.js +++ b/packages/ui/src/views/canvas/AddNodes.js @@ -58,7 +58,7 @@ const AddNodes = ({ nodesData, node }) => { const ps = useRef() // Temporary method to handle Deprecating Vector Store and New ones - const categorizeVectorStores = (nodes) => { + const categorizeVectorStores = (nodes, accordianCategories, isFilter) => { const obj = { ...nodes } const vsNodes = obj['Vector Stores'] ?? [] const deprecatingNodes = [] @@ -69,7 +69,9 @@ const AddNodes = ({ nodesData, node }) => { } delete obj['Vector Stores'] obj['Vector Stores;DEPRECATING'] = deprecatingNodes + accordianCategories['Vector Stores;DEPRECATING'] = isFilter ? true : false obj['Vector Stores;NEW'] = newNodes + accordianCategories['Vector Stores;NEW'] = isFilter ? true : false setNodes(obj) } @@ -112,7 +114,7 @@ const AddNodes = ({ nodesData, node }) => { return r }, Object.create(null)) setNodes(result) - categorizeVectorStores(result) + categorizeVectorStores(result, accordianCategories, isFilter) setCategoryExpanded(accordianCategories) }