Skip to content

Commit 5bc152e

Browse files
run throught pipeline (InternLM#4)
Co-authored-by: liujiangning <[email protected]>
1 parent 9f57c63 commit 5bc152e

File tree

4 files changed

+59
-33
lines changed

4 files changed

+59
-33
lines changed

app.py

+2-4
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,6 @@
1313

1414
from src.agent import init_agent
1515

16-
# agent = os.environ.get('agent_cfg', dict())
17-
1816
app = FastAPI(docs_url='/')
1917

2018
app.add_middleware(CORSMiddleware,
@@ -95,10 +93,10 @@ async def async_generator_wrapper():
9593
# yield f'data: {response_json}\n\n'
9694

9795
inputs = request.inputs
98-
agent = init_agent(request.agent_cfg)
96+
agent = init_agent(**request.agent_cfg)
9997
return EventSourceResponse(generate())
10098

10199

102100
if __name__ == '__main__':
103101
import uvicorn
104-
uvicorn.run(app, host='0.0.0.0', port=8090, log_level='info')
102+
uvicorn.run(app, host='0.0.0.0', port=8002, log_level='info')

src/agent/__init__.py

+27-27
Original file line numberDiff line numberDiff line change
@@ -2,48 +2,48 @@
22
from datetime import datetime
33

44
from lagent.actions import ActionExecutor, BingBrowser
5-
from lagent.llms import LMDeployPipeline
65

7-
from .mindsearch_agent import MindSearchAgent, MindSearchProtocol
8-
from .mindsearch_prompt import (FINAL_RESPONSE_CN, GRAPH_PROMPT_CN,
9-
searcher_context_template_cn,
10-
searcher_input_template_cn,
11-
searcher_system_prompt_cn)
6+
import src.agent.models as llm_factory
7+
from src.agent.mindsearch_agent import MindSearchAgent, MindSearchProtocol
8+
from src.agent.mindsearch_prompt import (
9+
FINAL_RESPONSE_CN, FINAL_RESPONSE_EN, GRAPH_PROMPT_CN, GRAPH_PROMPT_EN,
10+
searcher_context_template_cn, searcher_context_template_en,
11+
searcher_input_template_cn, searcher_input_template_en,
12+
searcher_system_prompt_cn, searcher_system_prompt_en)
1213

13-
llm = LMDeployPipeline(path='internlm/internlm2_5-7b',
14-
model_name='internlm2',
15-
meta_template=[
16-
dict(role='system', api_role='system'),
17-
dict(role='user', api_role='user'),
18-
dict(role='assistant', api_role='assistant'),
19-
dict(role='environment', api_role='environment')
20-
],
21-
top_p=0.8,
22-
top_k=1,
23-
temperature=0,
24-
max_new_tokens=8192,
25-
repetition_penalty=1.02,
26-
stop_words=['<|im_end|>'])
14+
LLM = {}
2715

2816

29-
def init_agent(lang='cn', model_format='pipeline'):
17+
def init_agent(lang='cn', model_format='internlm_server'):
18+
llm = LLM.get(model_format, None)
19+
if llm is None:
20+
llm_cfg = getattr(llm_factory, model_format)
21+
if llm_cfg is None:
22+
raise NotImplementedError
23+
llm = llm_cfg.pop('type')(**llm_cfg)
24+
LLM[model_format] = llm
3025

3126
agent = MindSearchAgent(
3227
llm=llm,
3328
protocol=MindSearchProtocol(meta_prompt=datetime.now().strftime(
3429
'The current date is %Y-%m-%d.'),
35-
interpreter_prompt=GRAPH_PROMPT_CN,
36-
response_prompt=FINAL_RESPONSE_CN),
30+
interpreter_prompt=GRAPH_PROMPT_CN
31+
if lang == 'cn' else GRAPH_PROMPT_EN,
32+
response_prompt=FINAL_RESPONSE_CN
33+
if lang == 'cn' else FINAL_RESPONSE_EN),
3734
searcher_cfg=dict(
3835
llm=llm,
39-
plugin=ActionExecutor(
36+
plugin_executor=ActionExecutor(
4037
BingBrowser(
4138
api_key=os.environ.get('BING_API_KEY', 'YOUR_BING_API'))),
4239
protocol=MindSearchProtocol(
4340
meta_prompt=datetime.now().strftime(
4441
'The current date is %Y-%m-%d.'),
45-
plugin_prompt=searcher_system_prompt_cn,
42+
plugin_prompt=searcher_system_prompt_cn
43+
if lang == 'cn' else searcher_system_prompt_en,
4644
),
47-
template=dict(input=searcher_input_template_cn,
48-
context=searcher_context_template_cn)))
45+
template=dict(input=searcher_input_template_cn
46+
if lang == 'cn' else searcher_input_template_en,
47+
context=searcher_context_template_cn
48+
if lang == 'cn' else searcher_context_template_en)))
4949
return agent

src/agent/mindsearch_agent.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -322,8 +322,7 @@ def _generate_reference(self, agent_return, code, as_dict):
322322
def execute_code(self, command: str):
323323

324324
def extract_code(text: str) -> str:
325-
text = re.sub(r'from ([\w.]+) import',
326-
'from lagent.agents.mindsearch_agent import', text)
325+
text = re.sub(r'from ([\w.]+) import WebSearchGraph', '', text)
327326
triple_match = re.search(r'```[^\n]*\n(.+?)```', text, re.DOTALL)
328327
single_match = re.search(r'`([^`]*)`', text, re.DOTALL)
329328
if triple_match:

src/agent/models.py

+29
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
import os
2+
3+
from lagent.llms import GPTAPI, INTERNLM2_META, LMDeployClient, LMDeployServer
4+
5+
internlm_server = dict(type=LMDeployServer,
6+
path='internlm/internlm2_5-7b',
7+
model_name='internlm2',
8+
meta_template=INTERNLM2_META,
9+
top_p=0.8,
10+
top_k=1,
11+
temperature=0,
12+
max_new_tokens=8192,
13+
repetition_penalty=1.02,
14+
stop_words=['<|im_end|>'])
15+
16+
internlm_client = dict(type=LMDeployClient,
17+
model_name='internlm2_5-7b',
18+
url='http://127.0.0.1:23333',
19+
meta_template=INTERNLM2_META,
20+
top_p=0.8,
21+
top_k=1,
22+
temperature=0,
23+
max_new_tokens=8192,
24+
repetition_penalty=1.02,
25+
stop_words=['<|im_end|>'])
26+
27+
gpt4 = dict(type=GPTAPI,
28+
model_type='gpt-4-turbo',
29+
key=os.environ.get('OPENAI_API_KEY', 'YOUR OPENAI API KEY'))

0 commit comments

Comments
 (0)