Skip to content

Commit 9520190

Browse files
danny-avilamaxesse
andauthored
📦 fix: npm warnings; chore: bump deprecated packages (#4707)
* chore: bump langchain deps to address vulnerability warnings * chore: bump community package and install textsplitters package * fix: update expected result in tokenSplit tests for accuracy * chore: remove CodeSherpa tools * chore: remove E2B tools and loadToolSuite * chore: remove CodeBrew tool and update related references * chore: remove HumanTool and ChatTool, update tool references * chore: remove Zapier tool from manifest.json and update SerpAPI * chore: remove basic tools * chore: update import path for RecursiveCharacterTextSplitter * chore: update import path for DynamicStructuredTool * chore: remove extractionChain.js and update tool filtering logic * chore: npm audit fix * chore: bump google packages * chore: update DALL-E tool to DALL-E-3 and adjust authentication logic * ci: update message classes * chore: elliptic npm audit fix * chore: update CallbackManager import and remove deprecated tool handling logic * chore: imports order * chore: remove unused code --------- Co-authored-by: Max Sanna <[email protected]>
1 parent d012da0 commit 9520190

40 files changed

+1551
-3214
lines changed

api/app/clients/GoogleClient.js

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
const { google } = require('googleapis');
22
const { Agent, ProxyAgent } = require('undici');
33
const { ChatVertexAI } = require('@langchain/google-vertexai');
4+
const { GoogleVertexAI } = require('@langchain/google-vertexai');
5+
const { ChatGoogleVertexAI } = require('@langchain/google-vertexai');
46
const { ChatGoogleGenerativeAI } = require('@langchain/google-genai');
57
const { GoogleGenerativeAI: GenAI } = require('@google/generative-ai');
6-
const { GoogleVertexAI } = require('@langchain/community/llms/googlevertexai');
7-
const { ChatGoogleVertexAI } = require('langchain/chat_models/googlevertexai');
8-
const { AIMessage, HumanMessage, SystemMessage } = require('langchain/schema');
8+
const { AIMessage, HumanMessage, SystemMessage } = require('@langchain/core/messages');
99
const { encoding_for_model: encodingForModel, get_encoding: getEncoding } = require('tiktoken');
1010
const {
1111
validateVisionModel,

api/app/clients/PluginsClient.js

+3-6
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,13 @@
11
const OpenAIClient = require('./OpenAIClient');
2-
const { CallbackManager } = require('langchain/callbacks');
32
const { CacheKeys, Time } = require('librechat-data-provider');
3+
const { CallbackManager } = require('@langchain/core/callbacks/manager');
44
const { BufferMemory, ChatMessageHistory } = require('langchain/memory');
5-
const { initializeCustomAgent, initializeFunctionsAgent } = require('./agents');
65
const { addImages, buildErrorInput, buildPromptPrefix } = require('./output_parsers');
6+
const { initializeCustomAgent, initializeFunctionsAgent } = require('./agents');
77
const { processFileURL } = require('~/server/services/Files/process');
88
const { EModelEndpoint } = require('librechat-data-provider');
99
const { formatLangChainMessages } = require('./prompts');
1010
const checkBalance = require('~/models/checkBalance');
11-
const { SelfReflectionTool } = require('./tools');
1211
const { isEnabled } = require('~/server/utils');
1312
const { extractBaseURL } = require('~/utils');
1413
const { loadTools } = require('./tools/util');
@@ -122,9 +121,7 @@ class PluginsClient extends OpenAIClient {
122121
},
123122
});
124123

125-
if (this.tools.length > 0 && !this.functionsAgent) {
126-
this.tools.push(new SelfReflectionTool({ message, isGpt3: false }));
127-
} else if (this.tools.length === 0) {
124+
if (this.tools.length === 0) {
128125
return;
129126
}
130127

api/app/clients/agents/CustomAgent/CustomAgent.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
const { ZeroShotAgent } = require('langchain/agents');
2-
const { PromptTemplate, renderTemplate } = require('langchain/prompts');
2+
const { PromptTemplate, renderTemplate } = require('@langchain/core/prompts');
33
const { gpt3, gpt4 } = require('./instructions');
44

55
class CustomAgent extends ZeroShotAgent {

api/app/clients/agents/CustomAgent/initializeCustomAgent.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ const {
77
ChatPromptTemplate,
88
SystemMessagePromptTemplate,
99
HumanMessagePromptTemplate,
10-
} = require('langchain/prompts');
10+
} = require('@langchain/core/prompts');
1111

1212
const initializeCustomAgent = async ({
1313
tools,

api/app/clients/agents/Functions/FunctionsAgent.js

-122
This file was deleted.

api/app/clients/document/tokenSplit.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
const { TokenTextSplitter } = require('langchain/text_splitter');
1+
const { TokenTextSplitter } = require('@langchain/textsplitters');
22

33
/**
44
* Splits a given text by token chunks, based on the provided parameters for the TokenTextSplitter.

api/app/clients/document/tokenSplit.spec.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ describe('tokenSplit', () => {
1212
returnSize: 5,
1313
});
1414

15-
expect(result).toEqual(['. Null', ' Nullam', 'am id', ' id.', '.']);
15+
expect(result).toEqual(['it.', '. Null', ' Nullam', 'am id', ' id.']);
1616
});
1717

1818
it('returns correct text chunks with default parameters', async () => {

api/app/clients/llm/createLLM.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
const { ChatOpenAI } = require('langchain/chat_models/openai');
1+
const { ChatOpenAI } = require('@langchain/openai');
22
const { sanitizeModelName, constructAzureURL } = require('~/utils');
33
const { isEnabled } = require('~/server/utils');
44

api/app/clients/memory/summaryBuffer.demo.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
require('dotenv').config();
2-
const { ChatOpenAI } = require('langchain/chat_models/openai');
2+
const { ChatOpenAI } = require('@langchain/openai');
33
const { getBufferString, ConversationSummaryBufferMemory } = require('langchain/memory');
44

55
const chatPromptMemory = new ConversationSummaryBufferMemory({

api/app/clients/prompts/formatAgentMessages.spec.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
const { ToolMessage } = require('@langchain/core/messages');
22
const { ContentTypes } = require('librechat-data-provider');
3-
const { HumanMessage, AIMessage, SystemMessage } = require('langchain/schema');
3+
const { HumanMessage, AIMessage, SystemMessage } = require('@langchain/core/messages');
44
const { formatAgentMessages } = require('./formatMessages');
55

66
describe('formatAgentMessages', () => {

api/app/clients/prompts/formatMessages.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
const { ToolMessage } = require('@langchain/core/messages');
22
const { EModelEndpoint, ContentTypes } = require('librechat-data-provider');
3-
const { HumanMessage, AIMessage, SystemMessage } = require('langchain/schema');
3+
const { HumanMessage, AIMessage, SystemMessage } = require('@langchain/core/messages');
44

55
/**
66
* Formats a message to OpenAI Vision API payload format.

api/app/clients/prompts/formatMessages.spec.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
const { Constants } = require('librechat-data-provider');
2-
const { HumanMessage, AIMessage, SystemMessage } = require('langchain/schema');
2+
const { HumanMessage, AIMessage, SystemMessage } = require('@langchain/core/messages');
33
const { formatMessage, formatLangChainMessages, formatFromLangChain } = require('./formatMessages');
44

55
describe('formatMessage', () => {

api/app/clients/prompts/summaryPrompts.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
const { PromptTemplate } = require('langchain/prompts');
1+
const { PromptTemplate } = require('@langchain/core/prompts');
22
/*
33
* Without `{summary}` and `{new_lines}`, token count is 98
44
* We are counting this towards the max context tokens for summaries, +3 for the assistant label (101)

api/app/clients/prompts/titlePrompts.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ const {
22
ChatPromptTemplate,
33
SystemMessagePromptTemplate,
44
HumanMessagePromptTemplate,
5-
} = require('langchain/prompts');
5+
} = require('@langchain/core/prompts');
66

77
const langPrompt = new ChatPromptTemplate({
88
promptMessages: [

api/app/clients/specs/BaseClient.test.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ jest.mock('~/models', () => ({
3030
updateFileUsage: jest.fn(),
3131
}));
3232

33-
jest.mock('langchain/chat_models/openai', () => {
33+
jest.mock('@langchain/openai', () => {
3434
return {
3535
ChatOpenAI: jest.fn().mockImplementation(() => {
3636
return {};

api/app/clients/specs/OpenAIClient.test.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ jest.mock('~/models', () => ({
3434
updateFileUsage: jest.fn(),
3535
}));
3636

37-
jest.mock('langchain/chat_models/openai', () => {
37+
jest.mock('@langchain/openai', () => {
3838
return {
3939
ChatOpenAI: jest.fn().mockImplementation(() => {
4040
return {};

api/app/clients/specs/PluginsClient.test.js

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
const crypto = require('crypto');
22
const { Constants } = require('librechat-data-provider');
3-
const { HumanChatMessage, AIChatMessage } = require('langchain/schema');
3+
const { HumanMessage, AIMessage } = require('@langchain/core/messages');
44
const PluginsClient = require('../PluginsClient');
55

66
jest.mock('~/lib/db/connectDb');
@@ -55,8 +55,8 @@ describe('PluginsClient', () => {
5555

5656
const chatMessages = orderedMessages.map((msg) =>
5757
msg?.isCreatedByUser || msg?.role?.toLowerCase() === 'user'
58-
? new HumanChatMessage(msg.text)
59-
: new AIChatMessage(msg.text),
58+
? new HumanMessage(msg.text)
59+
: new AIMessage(msg.text),
6060
);
6161

6262
TestAgent.currentMessages = orderedMessages;

api/app/clients/tools/AzureAiSearch.js

-98
This file was deleted.

0 commit comments

Comments
 (0)