Skip to content
Merged
22 changes: 22 additions & 0 deletions lib/ai/client.go
Original file line number Diff line number Diff line change
Expand Up @@ -77,3 +77,25 @@ func (client *Client) Summary(ctx context.Context, message string) (string, erro

return resp.Choices[0].Message.Content, nil
}

// CommandSummary creates a command summary based on the command output.
// The message history is also passed to the model in order to keep context
// and extract relevant information from the output.
func (client *Client) CommandSummary(ctx context.Context, messages []openai.ChatCompletionMessage, output map[string][]byte) (string, error) {
messages = append(messages, openai.ChatCompletionMessage{
Role: openai.ChatMessageRoleUser, Content: model.ConversationCommandResult(output)})

resp, err := client.svc.CreateChatCompletion(
ctx,
openai.ChatCompletionRequest{
Model: openai.GPT4,
Messages: messages,
},
)

if err != nil {
return "", trace.Wrap(err)
}

return resp.Choices[0].Message.Content, nil
}
1 change: 1 addition & 0 deletions lib/ai/model/agent.go
Original file line number Diff line number Diff line change
Expand Up @@ -315,6 +315,7 @@ func parsePlanningOutput(text string) (*agentAction, *agentFinish, error) {
log.Tracef("received planning output: \"%v\"", text)
response, err := parseJSONFromModel[planOutput](text)
if err != nil {
log.WithError(err).Trace("failed to parse planning output")
return nil, nil, trace.Wrap(err)
}

Expand Down
19 changes: 18 additions & 1 deletion lib/ai/model/prompt.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,20 @@ limitations under the License.

package model

import "fmt"
import (
"fmt"
"strings"
)

var observationPrefix = "Observation: "
var thoughtPrefix = "Thought: "

const PromptSummarizeTitle = `You will be given a message. Create a short summary of that message.
Respond only with summary, nothing else.`

const PromptSummarizeCommand = `You will be given a chat history and a command output. Based on the history context, extract relevant information from the command output and write a short summary of the command output.
Respond only with summary, nothing else.`

const InitialAIResponse = `Hey, I'm Teleport - a powerful tool that can assist you in managing your Teleport cluster via OpenAI GPT-4.`

func PromptCharacter(username string) string {
Expand Down Expand Up @@ -100,3 +106,14 @@ USER'S INPUT

Okay, so what is the response to my last comment? If using information obtained from the tools you must mention it explicitly without mentioning the tool names - I have forgotten all TOOL RESPONSES! Remember to respond with a markdown code snippet of a json blob with a single action, and NOTHING else.`, toolResponse)
}

func ConversationCommandResult(result map[string][]byte) string {
var message strings.Builder
for node, output := range result {
message.WriteString(fmt.Sprintf(`Command ran on node "%s" and produced the following output:\n`, node))
message.WriteString(string(output))
message.WriteString("\n")
}
message.WriteString("Based on the chat history, extract relevant information out of the command output and write a summary.")
return message.String()
}
84 changes: 78 additions & 6 deletions lib/assist/assist.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,11 @@ const (
MessageKindCommand MessageType = "COMMAND"
// MessageKindCommandResult is the type of Assist message that contains the command execution result.
MessageKindCommandResult MessageType = "COMMAND_RESULT"
// MessageKindCommandResultSummary is the type of message that is optionally
// emitted after a command and contains a summary of the command output.
// This message is both sent after the command execution to the web UI,
// and persisted in the conversation history.
MessageKindCommandResultSummary MessageType = "COMMAND_RESULT_SUMMARY"
// MessageKindUserMessage is the type of Assist message that contains the user message.
MessageKindUserMessage MessageType = "CHAT_MESSAGE_USER"
// MessageKindAssistantMessage is the type of Assist message that contains the assistant message.
Expand Down Expand Up @@ -104,6 +109,10 @@ type Chat struct {
ConversationID string
// Username is the username of the user who started the chat.
Username string
// potentiallyStaleHistory indicates messages might have been inserted into
// the chat history and the messages should be re-fetched before attempting
// the next completion.
potentiallyStaleHistory bool
}

// NewChat creates a new Assist chat.
Expand All @@ -113,11 +122,12 @@ func (a *Assist) NewChat(ctx context.Context, assistService MessageService,
aichat := a.client.NewChat(username)

chat := &Chat{
assist: a,
chat: aichat,
assistService: assistService,
ConversationID: conversationID,
Username: username,
assist: a,
chat: aichat,
assistService: assistService,
ConversationID: conversationID,
Username: username,
potentiallyStaleHistory: false,
}

if err := chat.loadMessages(ctx); err != nil {
Expand All @@ -132,6 +142,29 @@ func (a *Assist) GenerateSummary(ctx context.Context, message string) (string, e
return a.client.Summary(ctx, message)
}

// GenerateCommandSummary summarizes the output of a command executed on one or
// many nodes. The conversation history is also sent into the prompt in order
// to gather context and know what information is relevant in the command output.
func (a *Assist) GenerateCommandSummary(ctx context.Context, messages []*assist.AssistantMessage, output map[string][]byte) (string, error) {
// Create system prompt
modelMessages := []openai.ChatCompletionMessage{
{Role: openai.ChatMessageRoleSystem, Content: model.PromptSummarizeCommand},
}

// Load context back into prompt
for _, message := range messages {
role := kindToRole(MessageType(message.Type))
if role != "" && role != openai.ChatMessageRoleSystem {
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Did you find any message with an empty role? Curious about the first part of this statement.

Copy link
Copy Markdown
Contributor Author

@hugoShaka hugoShaka Jun 22, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

COMMAND_RESULT has an empty role for example. Empty role is used in a few places as "this message should not be sent to the model"

payload, err := formatMessagePayload(message)
if err != nil {
return "", trace.Wrap(err)
}
modelMessages = append(modelMessages, openai.ChatCompletionMessage{Role: role, Content: payload})
}
}
return a.client.CommandSummary(ctx, modelMessages, output)
}

// loadMessages loads the messages from the database.
func (c *Chat) loadMessages(ctx context.Context) error {
// existing conversation, retrieve old messages
Expand All @@ -147,7 +180,11 @@ func (c *Chat) loadMessages(ctx context.Context) error {
for _, msg := range messages.GetMessages() {
role := kindToRole(MessageType(msg.Type))
if role != "" {
c.chat.Insert(role, msg.Payload)
payload, err := formatMessagePayload(msg)
if err != nil {
return trace.Wrap(err)
}
c.chat.Insert(role, payload)
}
}

Expand Down Expand Up @@ -202,6 +239,16 @@ func (c *Chat) ProcessComplete(ctx context.Context, onMessage onMessageFunc, use
) (*model.TokensUsed, error) {
var tokensUsed *model.TokensUsed

// If data might have been inserted into the chat history, we want to
// refresh and get the latest data before querying the model.
if c.potentiallyStaleHistory {
c.chat = c.assist.client.NewChat(c.Username)
err := c.loadMessages(ctx)
if err != nil {
return nil, trace.Wrap(err)
}
}

// query the assistant and fetch an answer
message, err := c.chat.Complete(ctx, userInput)
if err != nil {
Expand Down Expand Up @@ -279,6 +326,11 @@ func (c *Chat) ProcessComplete(ctx context.Context, onMessage onMessageFunc, use
if err := onMessage(MessageKindCommand, payloadJson, c.assist.clock.Now().UTC()); nil != err {
return nil, trace.Wrap(err)
}
// As we emitted a command suggestion, the user might have run it. If
// the command ran, a summary could have been inserted in the backend.
// To take this command summary into account we note the history might
// be stale.
c.potentiallyStaleHistory = true
default:
return nil, trace.Errorf("unknown message type")
}
Expand Down Expand Up @@ -313,7 +365,27 @@ func kindToRole(kind MessageType) string {
return openai.ChatMessageRoleAssistant
case MessageKindSystemMessage:
return openai.ChatMessageRoleSystem
case MessageKindCommandResultSummary:
return openai.ChatMessageRoleUser
default:
return ""
}
}

// formatMessagePayload generates the OpemAI message payload corresponding to
// an Assist message. Most Assist message payloads can be converted directly,
// but some payloads are JSON-formatted and must be processed before being
// passed to the model.
func formatMessagePayload(message *assist.AssistantMessage) (string, error) {
switch MessageType(message.GetType()) {
case MessageKindCommandResultSummary:
var summary CommandExecSummary
err := json.Unmarshal([]byte(message.GetPayload()), &summary)
if err != nil {
return "", trace.Wrap(err)
}
return summary.String(), nil
default:
return message.GetPayload(), nil
}
}
19 changes: 18 additions & 1 deletion lib/assist/messages.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,28 @@

package assist

import "github.com/gravitational/teleport/lib/ai/model"
import (
"fmt"

"github.com/gravitational/teleport/lib/ai/model"
)

// commandPayload is a payload for a command message.
type commandPayload struct {
Command string `json:"command,omitempty"`
Nodes []string `json:"nodes,omitempty"`
Labels []model.Label `json:"labels,omitempty"`
}

// CommandExecSummary is a payload for the COMMAND_RESULT_SUMMARY message.
type CommandExecSummary struct {
ExecutionID string `json:"execution_id"`
Summary string `json:"summary"`
Command string `json:"command"`
}

// String implements the Stringer interface and formats the message for AI
// model consumption.
func (s CommandExecSummary) String() string {
return fmt.Sprintf("Command: `%s` executed. The command output summary is: %s", s.Command, s.Summary)
}
Loading