|
| 1 | +// Copyright (c) Microsoft Corporation. All rights reserved. |
| 2 | +// Dynamic_GroupChat.cs |
| 3 | + |
| 4 | +using AutoGen.Core; |
| 5 | +using AutoGen.OpenAI; |
| 6 | +using AutoGen.OpenAI.Extension; |
| 7 | +using AutoGen.SemanticKernel; |
| 8 | +using AutoGen.SemanticKernel.Extension; |
| 9 | +using Azure.AI.OpenAI; |
| 10 | +using Microsoft.SemanticKernel; |
| 11 | + |
| 12 | +namespace AutoGen.BasicSample; |
| 13 | + |
| 14 | +public class Dynamic_Group_Chat |
| 15 | +{ |
| 16 | + public static async Task RunAsync() |
| 17 | + { |
| 18 | + var apiKey = Environment.GetEnvironmentVariable("OPENAI_API_KEY") ?? throw new Exception("Please set OPENAI_API_KEY environment variable."); |
| 19 | + var model = "gpt-3.5-turbo"; |
| 20 | + |
| 21 | + #region Create_Coder |
| 22 | + var openaiClient = new OpenAIClient(apiKey); |
| 23 | + var coder = new OpenAIChatAgent( |
| 24 | + openAIClient: openaiClient, |
| 25 | + name: "coder", |
| 26 | + modelName: model, |
| 27 | + systemMessage: "You are a C# coder, when writing csharp code, please put the code between ```csharp and ```") |
| 28 | + .RegisterMessageConnector() // convert OpenAI message to AutoGen message |
| 29 | + .RegisterPrintMessage(); // print the message content |
| 30 | + #endregion Create_Coder |
| 31 | + |
| 32 | + #region Create_Commenter |
| 33 | + var kernel = Kernel |
| 34 | + .CreateBuilder() |
| 35 | + .AddOpenAIChatCompletion(modelId: model, apiKey: apiKey) |
| 36 | + .Build(); |
| 37 | + var commenter = new SemanticKernelAgent( |
| 38 | + kernel: kernel, |
| 39 | + name: "commenter", |
| 40 | + systemMessage: "You write inline comments for the code snippet and add unit tests if necessary") |
| 41 | + .RegisterMessageConnector() // register message connector so it support AutoGen built-in message types like TextMessage. |
| 42 | + .RegisterPrintMessage(); // pretty print the message to the console |
| 43 | + #endregion Create_Commenter |
| 44 | + |
| 45 | + #region Create_UserProxy |
| 46 | + var userProxy = new DefaultReplyAgent("user", defaultReply: "END") |
| 47 | + .RegisterPrintMessage(); // print the message content |
| 48 | + #endregion Create_UserProxy |
| 49 | + |
| 50 | + #region Create_Group |
| 51 | + var admin = new OpenAIChatAgent( |
| 52 | + openAIClient: openaiClient, |
| 53 | + name: "admin", |
| 54 | + modelName: model) |
| 55 | + .RegisterMessageConnector(); // convert OpenAI message to AutoGen message |
| 56 | + |
| 57 | + var group = new GroupChat( |
| 58 | + members: [coder, commenter, userProxy], |
| 59 | + admin: admin); |
| 60 | + #endregion Create_Group |
| 61 | + |
| 62 | + #region Chat_With_Group |
| 63 | + var workflowInstruction = new TextMessage( |
| 64 | + Role.User, |
| 65 | + """ |
| 66 | + Here is the workflow of this group chat: |
| 67 | + User{Ask a question} -> Coder{Write code} |
| 68 | + Coder{Write code} -> Commenter{Add comments to the code} |
| 69 | + Commenter{Add comments to the code} -> User{END} |
| 70 | + """); |
| 71 | + |
| 72 | + var question = new TextMessage(Role.User, "How to calculate the 100th Fibonacci number?"); |
| 73 | + var chatHistory = new List<IMessage> { workflowInstruction, question }; |
| 74 | + while (true) |
| 75 | + { |
| 76 | + var replies = await group.CallAsync(chatHistory, maxRound: 1); |
| 77 | + var lastReply = replies.Last(); |
| 78 | + chatHistory.Add(lastReply); |
| 79 | + |
| 80 | + if (lastReply.From == userProxy.Name) |
| 81 | + { |
| 82 | + break; |
| 83 | + } |
| 84 | + } |
| 85 | + #endregion Chat_With_Group |
| 86 | + |
| 87 | + #region Summarize_Chat_History |
| 88 | + var summary = await coder.SendAsync("summarize the conversation", chatHistory: chatHistory); |
| 89 | + #endregion Summarize_Chat_History |
| 90 | + } |
| 91 | +} |
0 commit comments