Skip to content

Commit 22c1ab2

Browse files
authored
polishing (microsoft#2227)
1 parent 7ba4936 commit 22c1ab2

File tree

1 file changed

+111
-54
lines changed

1 file changed

+111
-54
lines changed

notebook/agentchat_websockets.ipynb

+111-54
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,7 @@
6262
}
6363
],
6464
"source": [
65+
"from datetime import datetime\n",
6566
"from tempfile import TemporaryDirectory\n",
6667
"\n",
6768
"from websockets.sync.client import connect as ws_connect\n",
@@ -145,20 +146,17 @@
145146
" @user_proxy.register_for_execution()\n",
146147
" @agent.register_for_llm(description=\"Weather forecats for a city\")\n",
147148
" def weather_forecast(city: str) -> str:\n",
148-
" return f\"The weather forecast for {city} is sunny.\"\n",
149+
" return f\"The weather forecast for {city} at {datetime.now()} is sunny.\"\n",
149150
"\n",
150151
" # we will use a temporary directory as the cache path root to ensure fresh completion each time\n",
151-
" with TemporaryDirectory() as cache_path_root:\n",
152-
" with Cache.disk(cache_path_root=cache_path_root) as cache:\n",
153-
" print(\n",
154-
" f\" - on_connect(): Initiating chat with agent {agent} using message '{initial_msg}'\",\n",
155-
" flush=True,\n",
156-
" )\n",
157-
" user_proxy.initiate_chat( # noqa: F704\n",
158-
" agent,\n",
159-
" message=initial_msg,\n",
160-
" cache=cache,\n",
161-
" )"
152+
" print(\n",
153+
" f\" - on_connect(): Initiating chat with agent {agent} using message '{initial_msg}'\",\n",
154+
" flush=True,\n",
155+
" )\n",
156+
" user_proxy.initiate_chat( # noqa: F704\n",
157+
" agent,\n",
158+
" message=initial_msg,\n",
159+
" )"
162160
]
163161
},
164162
{
@@ -203,7 +201,7 @@
203201
},
204202
{
205203
"cell_type": "code",
206-
"execution_count": 5,
204+
"execution_count": 3,
207205
"id": "4fbe004d",
208206
"metadata": {},
209207
"outputs": [
@@ -212,24 +210,22 @@
212210
"output_type": "stream",
213211
"text": [
214212
" - test_setup() with websocket server running on ws://127.0.0.1:8765.\n",
215-
" - on_connect(): Connected to client using IOWebsockets <autogen.io.websockets.IOWebsockets object at 0x75ad84aa0d60>\n",
213+
" - on_connect(): Connected to client using IOWebsockets <autogen.io.websockets.IOWebsockets object at 0x724188dc16f0>\n",
216214
" - on_connect(): Receiving message from client.\n",
217215
" - Connected to server on ws://127.0.0.1:8765\n",
218216
" - Sending message to server.\n",
219-
" - on_connect(): Initiating chat with agent <autogen.agentchat.conversable_agent.ConversableAgent object at 0x75ad84a72b30> using message 'Check out the weather in Paris and write a poem about it.'\n",
217+
" - on_connect(): Initiating chat with agent <autogen.agentchat.conversable_agent.ConversableAgent object at 0x724188dc1a80> using message 'Check out the weather in Paris and write a poem about it.'\n",
220218
"\u001b[33muser_proxy\u001b[0m (to chatbot):\n",
221219
"\n",
222220
"Check out the weather in Paris and write a poem about it.\n",
223221
"\n",
224222
"--------------------------------------------------------------------------------\n",
225223
"\u001b[31m\n",
226224
">>>>>>>> USING AUTO REPLY...\u001b[0m\n",
227-
"\u001b[32m\u001b[32m\u001b[0m\n",
228-
"\n",
229225
"\u001b[33mchatbot\u001b[0m (to user_proxy):\n",
230226
"\n",
231227
"\n",
232-
"\u001b[32m***** Suggested tool Call (call_U5VR0hck9KhDFWPdvmo1Eoke): weather_forecast *****\u001b[0m\n",
228+
"\u001b[32m***** Suggested tool call (call_pKFE5KQZOQTe4gC8mQRv6bZX): weather_forecast *****\u001b[0m\n",
233229
"Arguments: \n",
234230
"{\n",
235231
" \"city\": \"Paris\"\n",
@@ -243,36 +239,36 @@
243239
"\n",
244240
"\u001b[33muser_proxy\u001b[0m (to chatbot):\n",
245241
"\n",
246-
"\u001b[32m***** Response from calling tool \"call_U5VR0hck9KhDFWPdvmo1Eoke\" *****\u001b[0m\n",
247-
"The weather forecast for Paris is sunny.\n",
242+
"\u001b[32m***** Response from calling tool (call_pKFE5KQZOQTe4gC8mQRv6bZX) *****\u001b[0m\n",
243+
"The weather forecast for Paris at 2024-03-31 20:17:22.413225 is sunny.\n",
248244
"\u001b[32m**********************************************************************\u001b[0m\n",
249245
"\n",
250246
"--------------------------------------------------------------------------------\n",
251247
"\u001b[31m\n",
252248
">>>>>>>> USING AUTO REPLY...\u001b[0m\n",
253-
"\u001b[32m\u001b[32mIn the city of love, shines the sun above,\n",
254-
"Paris basks in golden rays, a beautiful day to praise.\n",
255-
"Strolling down the Champs Elysées, the warm light leads the way,\n",
256-
"In the glow, silhouettes dance, a perfect setting for romance.\n",
249+
"\u001b[32m\u001b[32mIn Paris, beneath the golden sun, so bright,\n",
250+
"Where cobblestones glisten with pure delight,\n",
251+
"The weather is sunny, a beautiful sight,\n",
252+
"Oh Paris, you're dazzling in the sunlight.\n",
257253
"\n",
258-
"In the sunlight, the Seine sparkles bright, reflecting the City of Light,\n",
259-
"Not a cloud in the crystal-clear blue sky, as the doves sail high.\n",
260-
"Sunny Paris so profound, beauty all around,\n",
261-
"Alive under the radiant crown, she wears her sunlight like a gown.\n",
254+
"The bistros beckon with fragrant delight,\n",
255+
"In gardens, flowers bloom with all their might.\n",
256+
"Paris, your charm shines so bright,\n",
257+
"Under the blanket of the sunny daylight.\n",
262258
"\n",
263259
"TERMINATE\u001b[0m\n",
264260
"\n",
265261
"\u001b[33mchatbot\u001b[0m (to user_proxy):\n",
266262
"\n",
267-
"In the city of love, shines the sun above,\n",
268-
"Paris basks in golden rays, a beautiful day to praise.\n",
269-
"Strolling down the Champs Elysées, the warm light leads the way,\n",
270-
"In the glow, silhouettes dance, a perfect setting for romance.\n",
263+
"In Paris, beneath the golden sun, so bright,\n",
264+
"Where cobblestones glisten with pure delight,\n",
265+
"The weather is sunny, a beautiful sight,\n",
266+
"Oh Paris, you're dazzling in the sunlight.\n",
271267
"\n",
272-
"In the sunlight, the Seine sparkles bright, reflecting the City of Light,\n",
273-
"Not a cloud in the crystal-clear blue sky, as the doves sail high.\n",
274-
"Sunny Paris so profound, beauty all around,\n",
275-
"Alive under the radiant crown, she wears her sunlight like a gown.\n",
268+
"The bistros beckon with fragrant delight,\n",
269+
"In gardens, flowers bloom with all their might.\n",
270+
"Paris, your charm shines so bright,\n",
271+
"Under the blanket of the sunny daylight.\n",
276272
"\n",
277273
"TERMINATE\n",
278274
"\n",
@@ -330,19 +326,7 @@
330326
"execution_count": 4,
331327
"id": "5e55dc06",
332328
"metadata": {},
333-
"outputs": [
334-
{
335-
"ename": "ModuleNotFoundError",
336-
"evalue": "No module named 'fastapi'",
337-
"output_type": "error",
338-
"traceback": [
339-
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
340-
"\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)",
341-
"Cell \u001b[0;32mIn[4], line 4\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mcontextlib\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m asynccontextmanager \u001b[38;5;66;03m# noqa: E402\u001b[39;00m\n\u001b[1;32m 2\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mpathlib\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m Path \u001b[38;5;66;03m# noqa: E402\u001b[39;00m\n\u001b[0;32m----> 4\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mfastapi\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m FastAPI \u001b[38;5;66;03m# noqa: E402\u001b[39;00m\n\u001b[1;32m 5\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mfastapi\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mresponses\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m HTMLResponse \u001b[38;5;66;03m# noqa: E402\u001b[39;00m\n\u001b[1;32m 7\u001b[0m PORT \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m8000\u001b[39m\n",
342-
"\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'fastapi'"
343-
]
344-
}
345-
],
329+
"outputs": [],
346330
"source": [
347331
"from contextlib import asynccontextmanager # noqa: E402\n",
348332
"from pathlib import Path # noqa: E402\n",
@@ -405,10 +389,54 @@
405389
},
406390
{
407391
"cell_type": "code",
408-
"execution_count": null,
392+
"execution_count": 5,
409393
"id": "d92e50b5",
410394
"metadata": {},
411-
"outputs": [],
395+
"outputs": [
396+
{
397+
"name": "stderr",
398+
"output_type": "stream",
399+
"text": [
400+
"INFO: Started server process [264152]\n",
401+
"INFO: Waiting for application startup.\n"
402+
]
403+
},
404+
{
405+
"name": "stdout",
406+
"output_type": "stream",
407+
"text": [
408+
"Websocket server started at ws://127.0.0.1:8080.\n"
409+
]
410+
},
411+
{
412+
"name": "stderr",
413+
"output_type": "stream",
414+
"text": [
415+
"INFO: Application startup complete.\n",
416+
"INFO: Uvicorn running on http://127.0.0.1:8000 (Press CTRL+C to quit)\n"
417+
]
418+
},
419+
{
420+
"name": "stdout",
421+
"output_type": "stream",
422+
"text": [
423+
"INFO: 127.0.0.1:46378 - \"GET / HTTP/1.1\" 200 OK\n",
424+
" - on_connect(): Connected to client using IOWebsockets <autogen.io.websockets.IOWebsockets object at 0x72418841c4c0>\n",
425+
" - on_connect(): Receiving message from client.\n",
426+
" - on_connect(): Initiating chat with agent <autogen.agentchat.conversable_agent.ConversableAgent object at 0x72418841ea40> using message 'Check out the weather in Paris and write a poem about it.'\n"
427+
]
428+
},
429+
{
430+
"name": "stderr",
431+
"output_type": "stream",
432+
"text": [
433+
"INFO: Shutting down\n",
434+
"INFO: Waiting for application shutdown.\n",
435+
"INFO: Application shutdown complete.\n",
436+
"INFO: Finished server process [264152]\n"
437+
]
438+
}
439+
],
412440
"source": [
413441
"import uvicorn # noqa: E402\n",
414442
"\n",
@@ -463,10 +491,36 @@
463491
},
464492
{
465493
"cell_type": "code",
466-
"execution_count": null,
494+
"execution_count": 7,
467495
"id": "708a98de",
468496
"metadata": {},
469-
"outputs": [],
497+
"outputs": [
498+
{
499+
"name": "stdout",
500+
"output_type": "stream",
501+
"text": [
502+
"Websocket server started at ws://127.0.0.1:8080.\n",
503+
"HTTP server started at http://localhost:8000\n"
504+
]
505+
},
506+
{
507+
"name": "stderr",
508+
"output_type": "stream",
509+
"text": [
510+
"127.0.0.1 - - [31/Mar/2024 20:20:07] \"GET / HTTP/1.1\" 200 -\n"
511+
]
512+
},
513+
{
514+
"name": "stdout",
515+
"output_type": "stream",
516+
"text": [
517+
" - on_connect(): Connected to client using IOWebsockets <autogen.io.websockets.IOWebsockets object at 0x7241882bf2b0>\n",
518+
" - on_connect(): Receiving message from client.\n",
519+
" - on_connect(): Initiating chat with agent <autogen.agentchat.conversable_agent.ConversableAgent object at 0x7241882beec0> using message 'Check out the weather in Paris and write a poem about it.'\n",
520+
" - HTTP server stopped.\n"
521+
]
522+
}
523+
],
470524
"source": [
471525
"from http.server import HTTPServer, SimpleHTTPRequestHandler # noqa: E402\n",
472526
"\n",
@@ -529,7 +583,10 @@
529583
"\n",
530584
" with HTTPServer((\"\", PORT), handler) as httpd:\n",
531585
" print(\"HTTP server started at http://localhost:\" + str(PORT))\n",
532-
" httpd.serve_forever()"
586+
" try:\n",
587+
" httpd.serve_forever()\n",
588+
" except KeyboardInterrupt:\n",
589+
" print(\" - HTTP server stopped.\", flush=True)"
533590
]
534591
},
535592
{

0 commit comments

Comments
 (0)