Skip to content
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 11 additions & 11 deletions apps/web/client/public/onlook-preload-script.js

Large diffs are not rendered by default.

46 changes: 43 additions & 3 deletions apps/web/client/src/app/api/chat/helpers/stream.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,48 @@
import type { ToolCall } from '@ai-sdk/provider-utils';
import { initModel } from '@onlook/ai';
import { LLMProvider, OPENROUTER_MODELS } from '@onlook/models';
import { getAskModeSystemPrompt, getCreatePageSystemPrompt, getSystemPrompt, initModel } from '@onlook/ai';
import { ChatType, LLMProvider, OPENROUTER_MODELS, type ModelConfig } from '@onlook/models';
import { generateObject, NoSuchToolError, type ToolSet } from 'ai';

export function getModelFromType(chatType: ChatType) {
let model: ModelConfig;
switch (chatType) {
case ChatType.CREATE:
case ChatType.FIX:
model = initModel({
provider: LLMProvider.OPENROUTER,
model: OPENROUTER_MODELS.OPEN_AI_GPT_5,
});
break;
case ChatType.ASK:
case ChatType.EDIT:
default:
model = initModel({
provider: LLMProvider.OPENROUTER,
model: OPENROUTER_MODELS.CLAUDE_4_SONNET,
});
break;
}
return model;
}

export function getSystemPromptFromType(chatType: ChatType) {
let systemPrompt: string;

switch (chatType) {
case ChatType.CREATE:
systemPrompt = getCreatePageSystemPrompt();
break;
case ChatType.ASK:
systemPrompt = getAskModeSystemPrompt();
break;
case ChatType.EDIT:
default:
systemPrompt = getSystemPrompt();
break;
}
return systemPrompt;
}


export const repairToolCall = async ({ toolCall, tools, error }: { toolCall: ToolCall<string, unknown>, tools: ToolSet, error: Error }) => {
if (NoSuchToolError.isInstance(error)) {
Expand All @@ -20,7 +60,7 @@ export const repairToolCall = async ({ toolCall, tools, error }: { toolCall: Too
`Invalid parameter for tool ${toolCall.toolName} with args ${JSON.stringify(toolCall.input)}, attempting to fix`,
);

const { model } = initModel({
const { model } = await initModel({
provider: LLMProvider.OPENROUTER,
model: OPENROUTER_MODELS.OPEN_AI_GPT_5_NANO,
});
Expand Down
102 changes: 37 additions & 65 deletions apps/web/client/src/app/api/chat/route.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import { api } from '@/trpc/server';
import { trackEvent } from '@/utils/analytics/server';
import { AgentStreamer, BaseAgent, RootAgent, UserAgent } from '@onlook/ai';
import { convertToStreamMessages } from '@onlook/ai';
import { createRootAgent } from '@onlook/ai/src/agents/classes/root';
import { toDbMessage } from '@onlook/db';
import { AgentType, ChatType } from '@onlook/models';
import { ChatType, type ChatMessage, type ChatMetadata } from '@onlook/models';
import { type NextRequest } from 'next/server';
import { v4 as uuidv4 } from 'uuid';
import { checkMessageLimit, decrementUsage, errorHandler, getSupabaseUser, incrementUsage, repairToolCall } from './helpers';
import { z } from 'zod';

export async function POST(req: NextRequest) {
try {
Expand Down Expand Up @@ -52,24 +52,14 @@ export async function POST(req: NextRequest) {
}
}

const streamResponseSchema = z.object({
agentType: z.enum(AgentType).optional().default(AgentType.ROOT),
messages: z.array(z.any()),
chatType: z.enum(ChatType).optional(),
conversationId: z.string(),
projectId: z.string(),
}).refine((data) => {
// Only allow chatType if agentType is ROOT
if (data.chatType !== undefined && data.agentType !== AgentType.ROOT) {
return false;
}
return true;
}, { message: "chatType is only allowed if agentType is root" });

export const streamResponse = async (req: NextRequest, userId: string) => {
const body = await req.json();
const { agentType, messages, chatType, conversationId, projectId } = streamResponseSchema.parse(body);

const { messages, chatType, conversationId, projectId } = body as {
messages: ChatMessage[],
chatType: ChatType,
conversationId: string,
projectId: string,
};
// Updating the usage record and rate limit is done here to avoid
// abuse in the case where a single user sends many concurrent requests.
// If the call below fails, the user will not be penalized.
Expand All @@ -82,53 +72,35 @@ export const streamResponse = async (req: NextRequest, userId: string) => {
const lastUserMessage = messages.findLast((message) => message.role === 'user');
const traceId = lastUserMessage?.id ?? uuidv4();

// Create RootAgent instance
let agent: BaseAgent;
if (agentType === AgentType.ROOT) {
if (chatType === ChatType.EDIT) {
usageRecord = await incrementUsage(req, traceId);
}

agent = new RootAgent(chatType!);
} else if (agentType === AgentType.USER) {
agent = new UserAgent();
} else {
// agent = new WeatherAgent();
throw new Error('Agent type not supported');
if (chatType === ChatType.EDIT) {
usageRecord = await incrementUsage(req, traceId);
}
const streamer = new AgentStreamer(agent, conversationId);

return streamer.streamText(messages, {
streamTextConfig: {
experimental_telemetry: {
isEnabled: true,
metadata: {
const { agent, modelConfig } = createRootAgent({
chatType,
conversationId,
projectId,
userId,
traceId,
repairToolCall,
});
const result = agent.stream({
providerOptions: modelConfig.providerOptions,
messages: convertToStreamMessages(messages)
});
return result.toUIMessageStreamResponse<ChatMessage>(
{
originalMessages: messages,
generateMessageId: () => uuidv4(),
messageMetadata: ({ part }) => {
return {
createdAt: new Date(),
conversationId,
projectId,
userId,
agentType: agentType ?? AgentType.ROOT,
chatType: chatType ?? "null",
tags: ['chat'],
langfuseTraceId: traceId,
sessionId: conversationId,
},
},
experimental_repairToolCall: repairToolCall,
onError: async (error) => {
console.error('Error in chat stream call', error);
// if there was an error with the API, do not penalize the user
await decrementUsage(req, usageRecord);

// Ensure the stream stops on error by re-throwing
if (error instanceof Error) {
throw error;
} else {
const errorMessage = typeof error === 'string' ? error : JSON.stringify(error);
throw new Error(errorMessage);
}
context: [],
checkpoints: [],
finishReason: part.type === 'finish-step' ? part.finishReason : undefined,
usage: part.type === 'finish-step' ? part.usage : undefined,
} satisfies ChatMetadata;
},
},
toUIMessageStreamResponseConfig: {
onFinish: async ({ messages: finalMessages }) => {
const messagesToStore = finalMessages
.filter(msg =>
Expand All @@ -142,8 +114,8 @@ export const streamResponse = async (req: NextRequest, userId: string) => {
});
},
onError: errorHandler,
},
});
}
);
} catch (error) {
console.error('Error in streamResponse setup', error);
// If there was an error setting up the stream and we incremented usage, revert it
Expand Down
16 changes: 6 additions & 10 deletions apps/web/client/src/app/project/[id]/_hooks/use-chat/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import { useEditorEngine } from '@/components/store/editor';
import { handleToolCall } from '@/components/tools';
import { useChat as useAiChat } from '@ai-sdk/react';
import { AgentType, ChatType, type ChatMessage, type MessageContext, type QueuedMessage } from '@onlook/models';
import { ChatType, type ChatMessage, type MessageContext, type QueuedMessage } from '@onlook/models';
import { jsonClone } from '@onlook/utility';
import { DefaultChatTransport, lastAssistantMessageIsCompleteWithToolCalls, type FinishReason } from 'ai';
import { usePostHog } from 'posthog-js/react';
Expand Down Expand Up @@ -31,7 +31,6 @@ interface UseChatProps {
projectId: string;
initialMessages: ChatMessage[];
}
const agentType = AgentType.ROOT;

export function useChat({ conversationId, projectId, initialMessages }: UseChatProps) {
const editorEngine = useEditorEngine();
Expand All @@ -52,12 +51,11 @@ export function useChat({ conversationId, projectId, initialMessages }: UseChatP
body: {
conversationId,
projectId,
agentType,
},
}),
onToolCall: async (toolCall) => {
setIsExecutingToolCall(true);
void handleToolCall(agentType, toolCall.toolCall, editorEngine, addToolResult).then(() => {
void handleToolCall(toolCall.toolCall, editorEngine, addToolResult).then(() => {
setIsExecutingToolCall(false);
});
},
Expand Down Expand Up @@ -90,7 +88,6 @@ export function useChat({ conversationId, projectId, initialMessages }: UseChatP
chatType: type,
conversationId,
context: messageContext,
agentType,
},
});
void editorEngine.chat.conversation.generateTitle(content);
Expand Down Expand Up @@ -165,7 +162,6 @@ export function useChat({ conversationId, projectId, initialMessages }: UseChatP
body: {
chatType,
conversationId,
agentType,
},
});

Expand All @@ -185,16 +181,16 @@ export function useChat({ conversationId, projectId, initialMessages }: UseChatP

const processNextInQueue = useCallback(async () => {
if (isProcessingQueue.current || isStreaming || queuedMessages.length === 0) return;

const nextMessage = queuedMessages[0];
if (!nextMessage) return;

isProcessingQueue.current = true;

try {
const refreshedContext = await editorEngine.chat.context.getRefreshedContext(nextMessage.context);
await processMessage(nextMessage.content, nextMessage.type, refreshedContext);

// Remove only after successful processing
setQueuedMessages(prev => prev.slice(1));
} catch (error) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -141,13 +141,6 @@ export class ConversationManager {
return api.chat.conversation.getAll.query({ projectId: id });
}

async upsertConversationInStorage(conversation: Partial<ChatConversation>): Promise<ChatConversation> {
return await api.chat.conversation.upsert.mutate({
...conversation,
projectId: this.editorEngine.projectId,
});
}

async updateConversationInStorage(conversation: Partial<ChatConversation> & { id: string }) {
await api.chat.conversation.update.mutate(conversation);
}
Expand Down
19 changes: 7 additions & 12 deletions apps/web/client/src/components/tools/tools.ts
Original file line number Diff line number Diff line change
@@ -1,34 +1,29 @@
import type { EditorEngine } from '@/components/store/editor/engine';
import type { ToolCall } from '@ai-sdk/provider-utils';
import type { AbstractChat } from 'ai';
import { getAvailableTools, type OnToolCallHandler } from '@onlook/ai';
import { getToolClassesFromType } from '@onlook/ai';
import { toast } from '@onlook/ui/sonner';
import type { AgentType } from '@onlook/models';

export async function handleToolCall(agentType: AgentType, toolCall: ToolCall<string, unknown>, editorEngine: EditorEngine, addToolResult: typeof AbstractChat.prototype.addToolResult) {
export async function handleToolCall(toolCall: ToolCall<string, unknown>, editorEngine: EditorEngine, addToolResult: (toolResult: { tool: string, toolCallId: string, output: any }) => Promise<void>) {
const toolName = toolCall.toolName;
const currentChatMode = editorEngine.state.chatMode;
const availableTools = getAvailableTools(agentType, currentChatMode) as any[];
let output: any = null;
const availableTools = getToolClassesFromType(currentChatMode);
let output: unknown = null;

try {
const tool = availableTools.find((tool: any) => tool.toolName === toolName);
const tool = availableTools.find(tool => tool.toolName === toolName);
if (!tool) {
toast.error(`Tool "${toolName}" not available in ask mode`, {
description: `Switch to build mode to use this tool.`,
duration: 2000,
});

throw new Error(`Tool "${toolName}" is not available in ${currentChatMode} mode!!!!`);
throw new Error(`Tool "${toolName}" is not available in ${currentChatMode} mode`);
}
// Parse the input to the tool parameters. Throws if invalid.
const validatedInput = tool.parameters.parse(toolCall.input);
const toolInstance = new tool();
const getOnToolCall: OnToolCallHandler = (subAgentType, addSubAgentToolResult) => (toolCall) =>
void handleToolCall(subAgentType, toolCall.toolCall, editorEngine, addSubAgentToolResult);

// Can force type with as any because we know the input is valid.
output = await toolInstance.handle(validatedInput as any, editorEngine, getOnToolCall);
output = await toolInstance.handle(validatedInput as any, editorEngine);
} catch (error) {
output = 'error handling tool call ' + error;
} finally {
Expand Down
19 changes: 6 additions & 13 deletions apps/web/client/src/server/api/routers/chat/conversation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,13 +36,7 @@ export const conversationRouter = createTRPCRouter({
upsert: protectedProcedure
.input(conversationInsertSchema)
.mutation(async ({ ctx, input }) => {
const [conversation] = await ctx.db.insert(conversations).values(input).onConflictDoUpdate({
target: [conversations.id],
set: {
...input,
updatedAt: new Date(),
},
}).returning();
const [conversation] = await ctx.db.insert(conversations).values(input).returning();
if (!conversation) {
throw new Error('Conversation not created');
}
Expand All @@ -51,11 +45,10 @@ export const conversationRouter = createTRPCRouter({
update: protectedProcedure
.input(conversationUpdateSchema)
.mutation(async ({ ctx, input }) => {
const [conversation] = await ctx.db.update(conversations)
.set({
...input,
updatedAt: new Date(),
})
const [conversation] = await ctx.db.update({
...conversations,
updatedAt: new Date(),
}).set(input)
Comment on lines +48 to +51
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

Fix Drizzle update call

Spreading the table into a plain object strips the symbols Drizzle needs, so this will throw at runtime (table name becomes undefined) and updatedAt is no longer refreshed. Stick with the table instance and set updatedAt inside .set(...).

-            const [conversation] = await ctx.db.update({
-                ...conversations,
-                updatedAt: new Date(),
-            }).set(input)
+            const [conversation] = await ctx.db.update(conversations)
+                .set({
+                    ...input,
+                    updatedAt: new Date(),
+                })
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
const [conversation] = await ctx.db.update({
...conversations,
updatedAt: new Date(),
}).set(input)
const [conversation] = await ctx.db.update(conversations)
.set({
...input,
updatedAt: new Date(),
})
🤖 Prompt for AI Agents
In apps/web/client/src/server/api/routers/chat/conversation.ts around lines
48-51, the code spreads the `conversations` table into a plain object which
removes the Drizzle table symbols and prevents `updatedAt` from being updated;
change the update call to use the table instance (e.g.,
ctx.db.update(conversations)) and pass the update payload to .set(...) including
updatedAt (for example .set({ ...input, updatedAt: new Date() })) so the table
metadata is preserved and updatedAt is refreshed.

.where(eq(conversations.id, input.id)).returning();
if (!conversation) {
throw new Error('Conversation not updated');
Expand All @@ -75,7 +68,7 @@ export const conversationRouter = createTRPCRouter({
content: z.string(),
}))
.mutation(async ({ ctx, input }) => {
const { model, providerOptions, headers } = initModel({
const { model, providerOptions, headers } = await initModel({
provider: LLMProvider.OPENROUTER,
model: OPENROUTER_MODELS.CLAUDE_3_5_HAIKU,
});
Expand Down
2 changes: 1 addition & 1 deletion apps/web/client/src/server/api/routers/chat/suggestion.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ export const suggestionsRouter = createTRPCRouter({
})),
}))
.mutation(async ({ ctx, input }) => {
const { model, headers } = initModel({
const { model, headers } = await initModel({
provider: LLMProvider.OPENROUTER,
model: OPENROUTER_MODELS.OPEN_AI_GPT_5_NANO,
});
Expand Down
2 changes: 1 addition & 1 deletion apps/web/client/src/server/api/routers/project/project.ts
Original file line number Diff line number Diff line change
Expand Up @@ -321,7 +321,7 @@ export const projectRouter = createTRPCRouter({
}))
.mutation(async ({ ctx, input }): Promise<string> => {
try {
const { model, providerOptions, headers } = initModel({
const { model, providerOptions, headers } = await initModel({
provider: LLMProvider.OPENROUTER,
model: OPENROUTER_MODELS.OPEN_AI_GPT_5_NANO,
});
Expand Down
2 changes: 0 additions & 2 deletions packages/ai/src/agents/classes/index.ts

This file was deleted.

Loading
Loading