|
| 1 | +import { db } from '@sim/db' |
| 2 | +import { permissions, workflowExecutionLogs } from '@sim/db/schema' |
| 3 | +import { and, desc, eq, gte, inArray } from 'drizzle-orm' |
| 4 | +import { type NextRequest, NextResponse } from 'next/server' |
| 5 | +import { z } from 'zod' |
| 6 | +import { getSession } from '@/lib/auth' |
| 7 | +import { createLogger } from '@/lib/logs/console/logger' |
| 8 | +import { generateRequestId } from '@/lib/utils' |
| 9 | + |
| 10 | +const logger = createLogger('WorkflowExecutionDetailsAPI') |
| 11 | + |
| 12 | +const QueryParamsSchema = z.object({ |
| 13 | + timeFilter: z.enum(['1h', '12h', '24h', '1w']).optional(), |
| 14 | + startTime: z.string().optional(), |
| 15 | + endTime: z.string().optional(), |
| 16 | + triggers: z.string().optional(), |
| 17 | +}) |
| 18 | + |
| 19 | +function getTimeRangeMs(filter: string): number { |
| 20 | + switch (filter) { |
| 21 | + case '1h': |
| 22 | + return 60 * 60 * 1000 |
| 23 | + case '12h': |
| 24 | + return 12 * 60 * 60 * 1000 |
| 25 | + case '24h': |
| 26 | + return 24 * 60 * 60 * 1000 |
| 27 | + case '1w': |
| 28 | + return 7 * 24 * 60 * 60 * 1000 |
| 29 | + default: |
| 30 | + return 24 * 60 * 60 * 1000 |
| 31 | + } |
| 32 | +} |
| 33 | + |
| 34 | +export async function GET( |
| 35 | + request: NextRequest, |
| 36 | + { params }: { params: Promise<{ id: string; workflowId: string }> } |
| 37 | +) { |
| 38 | + const requestId = generateRequestId() |
| 39 | + |
| 40 | + try { |
| 41 | + const session = await getSession() |
| 42 | + if (!session?.user?.id) { |
| 43 | + logger.warn(`[${requestId}] Unauthorized workflow details access attempt`) |
| 44 | + return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) |
| 45 | + } |
| 46 | + |
| 47 | + const userId = session.user.id |
| 48 | + const { id: workspaceId, workflowId } = await params |
| 49 | + const { searchParams } = new URL(request.url) |
| 50 | + const queryParams = QueryParamsSchema.parse(Object.fromEntries(searchParams.entries())) |
| 51 | + |
| 52 | + // Calculate time range - use custom times if provided, otherwise use timeFilter |
| 53 | + let endTime: Date |
| 54 | + let startTime: Date |
| 55 | + |
| 56 | + if (queryParams.startTime && queryParams.endTime) { |
| 57 | + startTime = new Date(queryParams.startTime) |
| 58 | + endTime = new Date(queryParams.endTime) |
| 59 | + } else { |
| 60 | + endTime = new Date() |
| 61 | + const timeRangeMs = getTimeRangeMs(queryParams.timeFilter || '24h') |
| 62 | + startTime = new Date(endTime.getTime() - timeRangeMs) |
| 63 | + } |
| 64 | + |
| 65 | + const timeRangeMs = endTime.getTime() - startTime.getTime() |
| 66 | + |
| 67 | + // Number of data points for the line charts |
| 68 | + const dataPoints = 30 |
| 69 | + const segmentDurationMs = timeRangeMs / dataPoints |
| 70 | + |
| 71 | + logger.debug(`[${requestId}] Fetching workflow details for ${workflowId}`) |
| 72 | + |
| 73 | + // Check permissions |
| 74 | + const [permission] = await db |
| 75 | + .select() |
| 76 | + .from(permissions) |
| 77 | + .where( |
| 78 | + and( |
| 79 | + eq(permissions.entityType, 'workspace'), |
| 80 | + eq(permissions.entityId, workspaceId), |
| 81 | + eq(permissions.userId, userId) |
| 82 | + ) |
| 83 | + ) |
| 84 | + .limit(1) |
| 85 | + |
| 86 | + if (!permission) { |
| 87 | + logger.warn(`[${requestId}] User ${userId} has no permission for workspace ${workspaceId}`) |
| 88 | + return NextResponse.json({ error: 'Forbidden' }, { status: 403 }) |
| 89 | + } |
| 90 | + |
| 91 | + // Build conditions for log filtering |
| 92 | + const logConditions = [ |
| 93 | + eq(workflowExecutionLogs.workflowId, workflowId), |
| 94 | + gte(workflowExecutionLogs.startedAt, startTime), |
| 95 | + ] |
| 96 | + |
| 97 | + // Add trigger filter if specified |
| 98 | + if (queryParams.triggers) { |
| 99 | + const triggerList = queryParams.triggers.split(',') |
| 100 | + logConditions.push(inArray(workflowExecutionLogs.trigger, triggerList)) |
| 101 | + } |
| 102 | + |
| 103 | + // Fetch all logs for this workflow in the time range |
| 104 | + const logs = await db |
| 105 | + .select({ |
| 106 | + id: workflowExecutionLogs.id, |
| 107 | + executionId: workflowExecutionLogs.executionId, |
| 108 | + level: workflowExecutionLogs.level, |
| 109 | + trigger: workflowExecutionLogs.trigger, |
| 110 | + startedAt: workflowExecutionLogs.startedAt, |
| 111 | + totalDurationMs: workflowExecutionLogs.totalDurationMs, |
| 112 | + executionData: workflowExecutionLogs.executionData, |
| 113 | + cost: workflowExecutionLogs.cost, |
| 114 | + }) |
| 115 | + .from(workflowExecutionLogs) |
| 116 | + .where(and(...logConditions)) |
| 117 | + .orderBy(desc(workflowExecutionLogs.startedAt)) |
| 118 | + .limit(50) |
| 119 | + |
| 120 | + // Calculate metrics for each time segment |
| 121 | + const errorRates: { timestamp: string; value: number }[] = [] |
| 122 | + const durations: { timestamp: string; value: number }[] = [] |
| 123 | + const executionCounts: { timestamp: string; value: number }[] = [] |
| 124 | + |
| 125 | + for (let i = 0; i < dataPoints; i++) { |
| 126 | + const segmentStart = new Date(startTime.getTime() + i * segmentDurationMs) |
| 127 | + const segmentEnd = new Date(startTime.getTime() + (i + 1) * segmentDurationMs) |
| 128 | + |
| 129 | + // Filter logs for this segment |
| 130 | + const segmentLogs = logs.filter((log) => { |
| 131 | + const logTime = log.startedAt.getTime() |
| 132 | + return logTime >= segmentStart.getTime() && logTime < segmentEnd.getTime() |
| 133 | + }) |
| 134 | + |
| 135 | + const totalExecutions = segmentLogs.length |
| 136 | + const errorExecutions = segmentLogs.filter((log) => log.level === 'error').length |
| 137 | + const errorRate = totalExecutions > 0 ? (errorExecutions / totalExecutions) * 100 : 0 |
| 138 | + |
| 139 | + // Calculate average duration for this segment |
| 140 | + const durationsInSegment = segmentLogs |
| 141 | + .filter((log) => log.totalDurationMs !== null) |
| 142 | + .map((log) => log.totalDurationMs!) |
| 143 | + const avgDuration = |
| 144 | + durationsInSegment.length > 0 |
| 145 | + ? durationsInSegment.reduce((sum, d) => sum + d, 0) / durationsInSegment.length |
| 146 | + : 0 |
| 147 | + |
| 148 | + errorRates.push({ |
| 149 | + timestamp: segmentStart.toISOString(), |
| 150 | + value: errorRate, |
| 151 | + }) |
| 152 | + |
| 153 | + durations.push({ |
| 154 | + timestamp: segmentStart.toISOString(), |
| 155 | + value: avgDuration, |
| 156 | + }) |
| 157 | + |
| 158 | + executionCounts.push({ |
| 159 | + timestamp: segmentStart.toISOString(), |
| 160 | + value: totalExecutions, |
| 161 | + }) |
| 162 | + } |
| 163 | + |
| 164 | + // Helper function to recursively search for error in trace spans |
| 165 | + const findErrorInSpans = (spans: any[]): string | null => { |
| 166 | + for (const span of spans) { |
| 167 | + if (span.status === 'error' && span.output?.error) { |
| 168 | + return span.output.error |
| 169 | + } |
| 170 | + if (span.children && Array.isArray(span.children)) { |
| 171 | + const childError = findErrorInSpans(span.children) |
| 172 | + if (childError) return childError |
| 173 | + } |
| 174 | + } |
| 175 | + return null |
| 176 | + } |
| 177 | + |
| 178 | + // Helper function to get all blocks from trace spans (flattened) |
| 179 | + const flattenTraceSpans = (spans: any[]): any[] => { |
| 180 | + const flattened: any[] = [] |
| 181 | + for (const span of spans) { |
| 182 | + if (span.type !== 'workflow') { |
| 183 | + flattened.push(span) |
| 184 | + } |
| 185 | + if (span.children && Array.isArray(span.children)) { |
| 186 | + flattened.push(...flattenTraceSpans(span.children)) |
| 187 | + } |
| 188 | + } |
| 189 | + return flattened |
| 190 | + } |
| 191 | + |
| 192 | + // Format logs for response |
| 193 | + const formattedLogs = logs.map((log) => { |
| 194 | + const executionData = log.executionData as any |
| 195 | + const triggerData = executionData?.trigger || {} |
| 196 | + const traceSpans = executionData?.traceSpans || [] |
| 197 | + |
| 198 | + // Extract error message from trace spans |
| 199 | + let errorMessage = null |
| 200 | + if (log.level === 'error') { |
| 201 | + errorMessage = findErrorInSpans(traceSpans) |
| 202 | + // Fallback to executionData.errorDetails |
| 203 | + if (!errorMessage) { |
| 204 | + errorMessage = executionData?.errorDetails?.error || null |
| 205 | + } |
| 206 | + } |
| 207 | + |
| 208 | + // Extract outputs from the last block in trace spans |
| 209 | + let outputs = null |
| 210 | + let cost = null |
| 211 | + |
| 212 | + if (traceSpans.length > 0) { |
| 213 | + // Flatten all blocks from trace spans |
| 214 | + const allBlocks = flattenTraceSpans(traceSpans) |
| 215 | + |
| 216 | + // Find the last successful block execution |
| 217 | + const successBlocks = allBlocks.filter( |
| 218 | + (span: any) => |
| 219 | + span.status !== 'error' && span.output && Object.keys(span.output).length > 0 |
| 220 | + ) |
| 221 | + |
| 222 | + if (successBlocks.length > 0) { |
| 223 | + const lastBlock = successBlocks[successBlocks.length - 1] |
| 224 | + const blockOutput = lastBlock.output || {} |
| 225 | + |
| 226 | + // Clean up the output to show meaningful data |
| 227 | + // Priority: content > result > data > the whole output object |
| 228 | + if (blockOutput.content) { |
| 229 | + outputs = { content: blockOutput.content } |
| 230 | + } else if (blockOutput.result !== undefined) { |
| 231 | + outputs = { result: blockOutput.result } |
| 232 | + } else if (blockOutput.data !== undefined) { |
| 233 | + outputs = { data: blockOutput.data } |
| 234 | + } else { |
| 235 | + // Filter out internal/metadata fields for cleaner display |
| 236 | + const cleanOutput: any = {} |
| 237 | + for (const [key, value] of Object.entries(blockOutput)) { |
| 238 | + if ( |
| 239 | + ![ |
| 240 | + 'executionTime', |
| 241 | + 'tokens', |
| 242 | + 'model', |
| 243 | + 'cost', |
| 244 | + 'childTraceSpans', |
| 245 | + 'error', |
| 246 | + 'stackTrace', |
| 247 | + ].includes(key) |
| 248 | + ) { |
| 249 | + cleanOutput[key] = value |
| 250 | + } |
| 251 | + } |
| 252 | + if (Object.keys(cleanOutput).length > 0) { |
| 253 | + outputs = cleanOutput |
| 254 | + } |
| 255 | + } |
| 256 | + |
| 257 | + // Extract cost from the block output |
| 258 | + if (blockOutput.cost) { |
| 259 | + cost = blockOutput.cost |
| 260 | + } |
| 261 | + } |
| 262 | + } |
| 263 | + |
| 264 | + // Use the cost stored at the top-level in workflowExecutionLogs table |
| 265 | + // This is the same cost shown in the logs page |
| 266 | + const logCost = log.cost as any |
| 267 | + |
| 268 | + return { |
| 269 | + id: log.id, |
| 270 | + executionId: log.executionId, |
| 271 | + startedAt: log.startedAt.toISOString(), |
| 272 | + level: log.level, |
| 273 | + trigger: log.trigger, |
| 274 | + triggerUserId: triggerData.userId || null, |
| 275 | + triggerInputs: triggerData.inputs || triggerData.data || null, |
| 276 | + outputs, |
| 277 | + errorMessage, |
| 278 | + duration: log.totalDurationMs, |
| 279 | + cost: logCost |
| 280 | + ? { |
| 281 | + input: logCost.input || 0, |
| 282 | + output: logCost.output || 0, |
| 283 | + total: logCost.total || 0, |
| 284 | + } |
| 285 | + : null, |
| 286 | + } |
| 287 | + }) |
| 288 | + |
| 289 | + logger.debug(`[${requestId}] Successfully calculated workflow details`) |
| 290 | + |
| 291 | + logger.debug(`[${requestId}] Returning ${formattedLogs.length} execution logs`) |
| 292 | + |
| 293 | + return NextResponse.json({ |
| 294 | + errorRates, |
| 295 | + durations, |
| 296 | + executionCounts, |
| 297 | + logs: formattedLogs, |
| 298 | + startTime: startTime.toISOString(), |
| 299 | + endTime: endTime.toISOString(), |
| 300 | + }) |
| 301 | + } catch (error) { |
| 302 | + logger.error(`[${requestId}] Error fetching workflow details:`, error) |
| 303 | + return NextResponse.json({ error: 'Failed to fetch workflow details' }, { status: 500 }) |
| 304 | + } |
| 305 | +} |
0 commit comments