Skip to content
Merged
Show file tree
Hide file tree
Changes from 21 commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
f15ab45
Add get ops examples
Sg312 Oct 3, 2025
c180aa7
input format incorrectly created by copilot should not crash workflow
icecrasher321 Oct 3, 2025
c1d7063
fix tool edits triggering overall delta
icecrasher321 Oct 3, 2025
8b6c9fd
fix(db): add more options for SSL connection, add envvar for base64 d…
waleedlatif1 Oct 2, 2025
94c1017
fix trigger additions
icecrasher321 Oct 2, 2025
3095b5d
fix nested outputs for triggers
icecrasher321 Oct 3, 2025
e1aa73c
add condition subblock sanitization
icecrasher321 Oct 3, 2025
5807987
fix custom tools json
icecrasher321 Oct 3, 2025
6afea31
Model selector
Sg312 Oct 3, 2025
fa6f17c
fix response format sanitization
icecrasher321 Oct 3, 2025
c436bde
Merge branch 'feat/copilot-operations' of github.com:simstudioai/sim …
icecrasher321 Oct 3, 2025
0f64d43
remove dead code
icecrasher321 Oct 3, 2025
c41068f
fix export sanitization
icecrasher321 Oct 3, 2025
29e7fbd
Update migration
Sg312 Oct 3, 2025
818a0f3
fix import race cond
icecrasher321 Oct 3, 2025
e9487a9
Merge branch 'feat/copilot-operations' of github.com:simstudioai/sim …
icecrasher321 Oct 3, 2025
84fcba0
Copilot settings
Sg312 Oct 4, 2025
ff1ab81
fix response format
icecrasher321 Oct 4, 2025
3900762
Merge branch 'feat/copilot-operations' of github.com:simstudioai/sim …
icecrasher321 Oct 4, 2025
96379c1
stop loops/parallels copilot generation from breaking diff view
icecrasher321 Oct 4, 2025
6abd7bf
fix lint
icecrasher321 Oct 4, 2025
929a69d
Merge origin/staging into feat/copilot-operations
icecrasher321 Oct 4, 2025
3fbb7be
Apply suggestion from @greptile-apps[bot]
icecrasher321 Oct 4, 2025
cac7050
fix tests
icecrasher321 Oct 4, 2025
6340a85
fix lint
icecrasher321 Oct 4, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion apps/sim/.env.example
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
# Database (Required)
DATABASE_URL="postgresql://postgres:password@localhost:5432/postgres"
# DATABASE_SSL=TRUE # Optional: Enable SSL for database connections (defaults to FALSE)
# DATABASE_SSL=disable # Optional: SSL mode (disable, prefer, require, verify-ca, verify-full)
# DATABASE_SSL_CA= # Optional: Base64-encoded CA certificate (required for verify-ca/verify-full)
# To generate: cat your-ca.crt | base64 | tr -d '\n'

# PostgreSQL Port (Optional) - defaults to 5432 if not specified
# POSTGRES_PORT=5432
Expand Down
131 changes: 131 additions & 0 deletions apps/sim/app/api/copilot/user-models/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { auth } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/../../packages/db'
import { settings } from '@/../../packages/db/schema'

const logger = createLogger('CopilotUserModelsAPI')

const DEFAULT_ENABLED_MODELS: Record<string, boolean> = {
'gpt-4o': false,
'gpt-4.1': false,
'gpt-5-fast': false,
'gpt-5': true,
'gpt-5-medium': true,
'gpt-5-high': false,
o3: true,
'claude-4-sonnet': true,
'claude-4.5-sonnet': true,
'claude-4.1-opus': true,
}

// GET - Fetch user's enabled models
export async function GET(request: NextRequest) {
try {
const session = await auth.api.getSession({ headers: request.headers })

if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}

const userId = session.user.id

// Try to fetch existing settings record
const [userSettings] = await db
.select()
.from(settings)
.where(eq(settings.userId, userId))
.limit(1)

if (userSettings) {
const userModelsMap = (userSettings.copilotEnabledModels as Record<string, boolean>) || {}

// Merge: start with defaults, then override with user's existing preferences
const mergedModels = { ...DEFAULT_ENABLED_MODELS }
for (const [modelId, enabled] of Object.entries(userModelsMap)) {
mergedModels[modelId] = enabled
}

// If we added any new models, update the database
const hasNewModels = Object.keys(DEFAULT_ENABLED_MODELS).some(
(key) => !(key in userModelsMap)
)

if (hasNewModels) {
await db
.update(settings)
.set({
copilotEnabledModels: mergedModels,
updatedAt: new Date(),
})
.where(eq(settings.userId, userId))
}

return NextResponse.json({
enabledModels: mergedModels,
})
}

// If no settings record exists, create one with empty object (client will use defaults)
const [created] = await db
.insert(settings)
.values({
id: userId,
userId,
copilotEnabledModels: {},
})
.returning()

return NextResponse.json({
enabledModels: DEFAULT_ENABLED_MODELS,
})
} catch (error) {
logger.error('Failed to fetch user models', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

// PUT - Update user's enabled models
export async function PUT(request: NextRequest) {
try {
const session = await auth.api.getSession({ headers: request.headers })

if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}

const userId = session.user.id
const body = await request.json()

if (!body.enabledModels || typeof body.enabledModels !== 'object') {
return NextResponse.json({ error: 'enabledModels must be an object' }, { status: 400 })
}

// Check if settings record exists
const [existing] = await db.select().from(settings).where(eq(settings.userId, userId)).limit(1)

if (existing) {
// Update existing record
await db
.update(settings)
.set({
copilotEnabledModels: body.enabledModels,
updatedAt: new Date(),
})
.where(eq(settings.userId, userId))
} else {
// Create new settings record
await db.insert(settings).values({
id: userId,
userId,
copilotEnabledModels: body.enabledModels,
})
}

return NextResponse.json({ success: true })
} catch (error) {
logger.error('Failed to update user models', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
8 changes: 3 additions & 5 deletions apps/sim/app/api/logs/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -94,16 +94,14 @@ export async function GET(request: NextRequest) {
workflowUpdatedAt: workflow.updatedAt,
}

// Optimized query: Start by filtering workflows in the workspace with user permissions
// This ensures we scan only relevant logs instead of the entire table
const baseQuery = db
.select(selectColumns)
.from(workflowExecutionLogs)
.innerJoin(
workflow,
and(
eq(workflowExecutionLogs.workflowId, workflow.id),
eq(workflow.workspaceId, params.workspaceId) // Filter workspace during join!
eq(workflow.workspaceId, params.workspaceId)
)
)
.innerJoin(
Expand Down Expand Up @@ -184,15 +182,15 @@ export async function GET(request: NextRequest) {
.limit(params.limit)
.offset(params.offset)

// Get total count for pagination using the same optimized join structure
// Get total count for pagination using the same join structure
const countQuery = db
.select({ count: sql<number>`count(*)` })
.from(workflowExecutionLogs)
.innerJoin(
workflow,
and(
eq(workflowExecutionLogs.workflowId, workflow.id),
eq(workflow.workspaceId, params.workspaceId) // Same optimization
eq(workflow.workspaceId, params.workspaceId)
)
)
.innerJoin(
Expand Down
4 changes: 2 additions & 2 deletions apps/sim/app/api/v1/logs/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ export async function GET(request: NextRequest) {
const conditions = buildLogFilters(filters)
const orderBy = getOrderBy(params.order)

// Build and execute query - optimized to filter workspace during join
// Build and execute query
const baseQuery = db
.select({
id: workflowExecutionLogs.id,
Expand All @@ -128,7 +128,7 @@ export async function GET(request: NextRequest) {
workflow,
and(
eq(workflowExecutionLogs.workflowId, workflow.id),
eq(workflow.workspaceId, params.workspaceId) // Filter workspace during join!
eq(workflow.workspaceId, params.workspaceId)
)
)
.innerJoin(
Expand Down
Loading