({ fields, className
// biome-ignore lint/a11y/useKeyWithClickEvents: no need
handleClick(field)}
>
- {field.label}
+ {field.label}
{field.description(field.content as NonNullable)}
diff --git a/apps/dashboard/components/logs/hooks/use-bookmarked-filters.test.ts b/apps/dashboard/components/logs/hooks/use-bookmarked-filters.test.ts
index a86b43329f..92e76a5f1f 100644
--- a/apps/dashboard/components/logs/hooks/use-bookmarked-filters.test.ts
+++ b/apps/dashboard/components/logs/hooks/use-bookmarked-filters.test.ts
@@ -1,4 +1,4 @@
-import type { QuerySearchParams } from "@/app/(app)/logs/filters.schema";
+import type { QuerySearchParams } from "@/lib/schemas/logs.filter.schema";
import { act, renderHook } from "@testing-library/react";
import { beforeEach, describe, expect, it, vi } from "vitest";
import type { FilterValue } from "../validation/filter.types";
diff --git a/apps/dashboard/components/logs/hooks/use-bookmarked-filters.ts b/apps/dashboard/components/logs/hooks/use-bookmarked-filters.ts
index f16792e30d..50ea1036f5 100644
--- a/apps/dashboard/components/logs/hooks/use-bookmarked-filters.ts
+++ b/apps/dashboard/components/logs/hooks/use-bookmarked-filters.ts
@@ -1,4 +1,4 @@
-import { type QuerySearchParams, logsFilterFieldConfig } from "@/app/(app)/logs/filters.schema";
+import { type QuerySearchParams, logsFilterFieldConfig } from "@/lib/schemas/logs.filter.schema";
import { isBrowser } from "@/lib/utils";
import { useCallback, useEffect, useState } from "react";
import type { FilterValue } from "../validation/filter.types";
diff --git a/apps/dashboard/components/logs/overview-charts/overview-bar-chart.tsx b/apps/dashboard/components/logs/overview-charts/overview-bar-chart.tsx
index 3ff7862a6d..84779e7b44 100644
--- a/apps/dashboard/components/logs/overview-charts/overview-bar-chart.tsx
+++ b/apps/dashboard/components/logs/overview-charts/overview-bar-chart.tsx
@@ -171,6 +171,7 @@ export function OverviewBarChart({
- //@ts-expect-error safe to ignore for now
- createTimeIntervalFormatter(data, "HH:mm")(tooltipPayload)
+ createTimeIntervalFormatter(
+ data,
+ "HH:mm",
+ //@ts-expect-error safe to ignore for now
+ )(tooltipPayload)
}
/>
);
diff --git a/apps/dashboard/components/logs/queries/queries-context.tsx b/apps/dashboard/components/logs/queries/queries-context.tsx
index 601efd746d..d5472a998a 100644
--- a/apps/dashboard/components/logs/queries/queries-context.tsx
+++ b/apps/dashboard/components/logs/queries/queries-context.tsx
@@ -1,5 +1,4 @@
import type { QuerySearchParams as AuditSearchParams } from "@/app/(app)/audit/filters.schema";
-import type { QuerySearchParams } from "@/app/(app)/logs/filters.schema";
import type { RatelimitQuerySearchParams } from "@/app/(app)/ratelimits/[namespaceId]/logs/filters.schema";
import { type ReactNode, createContext, useContext } from "react";
import { type SavedFiltersGroup, useBookmarkedFilters } from "../hooks/use-bookmarked-filters";
@@ -91,6 +90,7 @@ export function useQueries() {
return context;
}
+import type { QuerySearchParams } from "@/lib/schemas/logs.filter.schema";
import { ChartActivity2 } from "@unkey/icons";
import React from "react";
import { iconsPerField } from "./utils";
@@ -175,7 +175,10 @@ export const defaultFormatValues = (
export const defaultGetIcon = (field: string): React.ReactNode => {
const Icon = iconsPerField[field] || ChartActivity2;
- return React.createElement(Icon, { size: "md-regular", className: "justify-center" });
+ return React.createElement(Icon, {
+ size: "md-regular",
+ className: "justify-center",
+ });
};
export const defaultFieldsToTruncate = [
diff --git a/apps/dashboard/components/logs/queries/utils.ts b/apps/dashboard/components/logs/queries/utils.ts
index b5282d9b98..a5d59ad4a0 100644
--- a/apps/dashboard/components/logs/queries/utils.ts
+++ b/apps/dashboard/components/logs/queries/utils.ts
@@ -11,7 +11,6 @@ import {
import React from "react";
import { auditLogsFilterFieldEnum } from "@/app/(app)/audit/filters.schema";
-import { logsFilterFieldEnum } from "@/app/(app)/logs/filters.schema";
import { ratelimitFilterFieldEnum } from "@/app/(app)/ratelimits/[namespaceId]/logs/filters.schema";
import {
Bucket,
@@ -28,9 +27,13 @@ import {
} from "@unkey/icons";
import type { AuditLogsFilterField } from "@/app/(app)/audit/filters.schema";
-import type { LogsFilterField, QuerySearchParams } from "@/app/(app)/logs/filters.schema";
import type { RatelimitFilterField } from "@/app/(app)/ratelimits/[namespaceId]/logs/filters.schema";
import { namespaceListFilterFieldEnum } from "@/app/(app)/ratelimits/_components/namespace-list-filters.schema";
+import {
+ type LogsFilterField,
+ type QuerySearchParams,
+ logsFilterFieldEnum,
+} from "@/lib/schemas/logs.filter.schema";
import type { IconProps } from "@unkey/icons/src/props";
import type { FC } from "react";
diff --git a/apps/dashboard/app/(app)/logs/filters.schema.ts b/apps/dashboard/lib/schemas/logs.filter.schema.ts
similarity index 97%
rename from apps/dashboard/app/(app)/logs/filters.schema.ts
rename to apps/dashboard/lib/schemas/logs.filter.schema.ts
index 2d766fded9..c34c2364d3 100644
--- a/apps/dashboard/app/(app)/logs/filters.schema.ts
+++ b/apps/dashboard/lib/schemas/logs.filter.schema.ts
@@ -1,5 +1,3 @@
-import { METHODS } from "./constants";
-
import type {
FilterValue,
NumberConfig,
@@ -27,7 +25,7 @@ export const logsFilterFieldConfig: FilterFieldConfigs = {
methods: {
type: "string",
operators: ["is"],
- validValues: METHODS,
+ validValues: ["GET", "POST", "PUT", "DELETE", "PATCH"] as const,
},
paths: {
type: "string",
diff --git a/apps/dashboard/lib/schemas/logs.schema.ts b/apps/dashboard/lib/schemas/logs.schema.ts
new file mode 100644
index 0000000000..4f3b432cf2
--- /dev/null
+++ b/apps/dashboard/lib/schemas/logs.schema.ts
@@ -0,0 +1,126 @@
+import { log } from "@unkey/clickhouse/src/logs";
+import { z } from "zod";
+import { logsFilterOperatorEnum } from "./logs.filter.schema";
+
+export type LogsRequestSchema = z.infer;
+export const logsRequestSchema = z.object({
+ limit: z.number().int(),
+ startTime: z.number().int(),
+ endTime: z.number().int(),
+ since: z.string(),
+ path: z
+ .object({
+ filters: z.array(
+ z.object({
+ operator: logsFilterOperatorEnum,
+ value: z.string(),
+ }),
+ ),
+ })
+ .nullable(),
+ host: z
+ .object({
+ filters: z
+ .array(
+ z.object({
+ operator: z.literal("is"),
+ value: z.string(),
+ }),
+ )
+ .optional(),
+ exclude: z.array(z.string()).optional(),
+ })
+ .nullable(),
+ method: z
+ .object({
+ filters: z.array(
+ z.object({
+ operator: z.literal("is"),
+ value: z.string(),
+ }),
+ ),
+ })
+ .nullable(),
+ requestId: z
+ .object({
+ filters: z.array(
+ z.object({
+ operator: z.literal("is"),
+ value: z.string(),
+ }),
+ ),
+ })
+ .nullable(),
+ status: z
+ .object({
+ filters: z.array(
+ z.object({
+ operator: z.literal("is"),
+ value: z.number(),
+ }),
+ ),
+ })
+ .nullable(),
+ cursor: z.number().nullable().optional().nullable(),
+});
+
+export const logsResponseSchema = z.object({
+ logs: z.array(log),
+ hasMore: z.boolean(),
+ total: z.number(),
+ nextCursor: z.number().int().optional(),
+});
+
+export type LogsResponseSchema = z.infer;
+
+// ### Timeseries
+
+export type TimeseriesRequestSchema = z.infer;
+export const timeseriesRequestSchema = z.object({
+ startTime: z.number().int(),
+ endTime: z.number().int(),
+ since: z.string(),
+ path: z
+ .object({
+ filters: z.array(
+ z.object({
+ operator: logsFilterOperatorEnum,
+ value: z.string(),
+ }),
+ ),
+ })
+ .nullable(),
+ host: z
+ .object({
+ filters: z
+ .array(
+ z.object({
+ operator: z.literal("is"),
+ value: z.string(),
+ }),
+ )
+ .optional(),
+ exclude: z.array(z.string()).optional(),
+ })
+ .nullable(),
+ method: z
+ .object({
+ filters: z.array(
+ z.object({
+ operator: z.literal("is"),
+ value: z.string(),
+ }),
+ ),
+ })
+ .nullable(),
+ status: z
+ .object({
+ filters: z.array(
+ z.object({
+ operator: z.literal("is"),
+ value: z.number(),
+ }),
+ ),
+ })
+ .nullable(),
+});
diff --git a/apps/dashboard/lib/trpc/routers/logs/llm-search/utils.ts b/apps/dashboard/lib/trpc/routers/logs/llm-search/utils.ts
index b415567135..26dad15e82 100644
--- a/apps/dashboard/lib/trpc/routers/logs/llm-search/utils.ts
+++ b/apps/dashboard/lib/trpc/routers/logs/llm-search/utils.ts
@@ -1,5 +1,5 @@
import { METHODS } from "@/app/(app)/logs/constants";
-import { filterOutputSchema, logsFilterFieldConfig } from "@/app/(app)/logs/filters.schema";
+import { filterOutputSchema, logsFilterFieldConfig } from "@/lib/schemas/logs.filter.schema";
import { TRPCError } from "@trpc/server";
import type OpenAI from "openai";
import { zodResponseFormat } from "openai/helpers/zod.mjs";
diff --git a/apps/dashboard/lib/trpc/routers/logs/query-logs/index.ts b/apps/dashboard/lib/trpc/routers/logs/query-logs/index.ts
index aacac64412..00aaa2ab61 100644
--- a/apps/dashboard/lib/trpc/routers/logs/query-logs/index.ts
+++ b/apps/dashboard/lib/trpc/routers/logs/query-logs/index.ts
@@ -1,27 +1,20 @@
-import { queryLogsPayload } from "@/app/(app)/logs/components/table/query-logs.schema";
import { clickhouse } from "@/lib/clickhouse";
import { db } from "@/lib/db";
+import {
+ type LogsResponseSchema,
+ logsRequestSchema,
+ logsResponseSchema,
+} from "@/lib/schemas/logs.schema";
import { ratelimit, requireUser, requireWorkspace, t, withRatelimit } from "@/lib/trpc/trpc";
import { TRPCError } from "@trpc/server";
-import { log } from "@unkey/clickhouse/src/logs";
-import { z } from "zod";
import { transformFilters } from "./utils";
-const LogsResponse = z.object({
- logs: z.array(log),
- hasMore: z.boolean(),
- total: z.number(),
- nextCursor: z.number().int().optional(),
-});
-
-type LogsResponse = z.infer;
-
export const queryLogs = t.procedure
.use(requireUser)
.use(requireWorkspace)
.use(withRatelimit(ratelimit.read))
- .input(queryLogsPayload)
- .output(LogsResponse)
+ .input(logsRequestSchema)
+ .output(logsResponseSchema)
.query(async ({ ctx, input }) => {
// Get workspace
const workspace = await db.query.workspaces
@@ -63,7 +56,7 @@ export const queryLogs = t.procedure
const logs = logsResult.val;
// Prepare the response with pagination info
- const response: LogsResponse = {
+ const response: LogsResponseSchema = {
logs,
hasMore: logs.length === input.limit,
total: countResult.val[0].total_count,
diff --git a/apps/dashboard/lib/trpc/routers/logs/query-logs/utils.test.ts b/apps/dashboard/lib/trpc/routers/logs/query-logs/utils.test.ts
index 6f4223ee3e..881a54e719 100644
--- a/apps/dashboard/lib/trpc/routers/logs/query-logs/utils.test.ts
+++ b/apps/dashboard/lib/trpc/routers/logs/query-logs/utils.test.ts
@@ -25,6 +25,7 @@ describe("transformFilters", () => {
hosts: [],
methods: [],
paths: [],
+ excludeHosts: [],
statusCodes: [],
requestIds: [],
cursorTime: null,
@@ -58,6 +59,7 @@ describe("transformFilters", () => {
startTime: payload.startTime,
endTime: payload.endTime,
limit: 50,
+ excludeHosts: [],
hosts: ["example.com"],
methods: ["GET"],
paths: [{ operator: "startsWith", value: "/api" }],
@@ -89,4 +91,28 @@ describe("transformFilters", () => {
expect(result.cursorTime).toBe(1706024400000);
});
+
+ it("should handle excluded hosts", () => {
+ const payload = {
+ ...basePayload,
+ host: {
+ filters: [{ operator: "is" as const, value: "example.com" }],
+ exclude: ["blocked.com", "spam.com"],
+ },
+ };
+
+ const result = transformFilters(payload);
+ expect(result).toEqual({
+ startTime: payload.startTime,
+ endTime: payload.endTime,
+ limit: 50,
+ hosts: ["example.com"],
+ excludeHosts: ["blocked.com", "spam.com"],
+ methods: [],
+ paths: [],
+ statusCodes: [],
+ requestIds: [],
+ cursorTime: null,
+ });
+ });
});
diff --git a/apps/dashboard/lib/trpc/routers/logs/query-logs/utils.ts b/apps/dashboard/lib/trpc/routers/logs/query-logs/utils.ts
index 1ca631ccb1..ec51748583 100644
--- a/apps/dashboard/lib/trpc/routers/logs/query-logs/utils.ts
+++ b/apps/dashboard/lib/trpc/routers/logs/query-logs/utils.ts
@@ -1,10 +1,9 @@
-import type { queryLogsPayload } from "@/app/(app)/logs/components/table/query-logs.schema";
+import type { LogsRequestSchema } from "@/lib/schemas/logs.schema";
import { getTimestampFromRelative } from "@/lib/utils";
import type { GetLogsClickhousePayload } from "@unkey/clickhouse/src/logs";
-import type { z } from "zod";
export function transformFilters(
- params: z.infer,
+ params: LogsRequestSchema,
): Omit {
// Transform path filters to include operators
const paths =
@@ -14,10 +13,13 @@ export function transformFilters(
})) || [];
// Extract other filters as before
- const requestIds = params.requestId?.filters.map((f) => f.value) || [];
- const hosts = params.host?.filters.map((f) => f.value) || [];
- const methods = params.method?.filters.map((f) => f.value) || [];
- const statusCodes = params.status?.filters.map((f) => f.value) || [];
+ const requestIds = params.requestId?.filters?.map((f) => f.value) || [];
+ const methods = params.method?.filters?.map((f) => f.value) || [];
+ const statusCodes = params.status?.filters?.map((f) => f.value) || [];
+
+ // Hosts with include/exclude pattern
+ const hosts = params.host?.filters?.map((f) => f.value) || [];
+ const excludeHosts = params.host?.exclude || [];
let startTime = params.startTime;
let endTime = params.endTime;
@@ -35,6 +37,7 @@ export function transformFilters(
endTime,
requestIds,
hosts,
+ excludeHosts,
methods,
paths,
statusCodes,
diff --git a/apps/dashboard/lib/trpc/routers/logs/query-timeseries/index.ts b/apps/dashboard/lib/trpc/routers/logs/query-timeseries/index.ts
index 1669752481..5e911a93f8 100644
--- a/apps/dashboard/lib/trpc/routers/logs/query-timeseries/index.ts
+++ b/apps/dashboard/lib/trpc/routers/logs/query-timeseries/index.ts
@@ -1,7 +1,7 @@
-import { queryTimeseriesPayload } from "@/app/(app)/logs/components/charts/query-timeseries.schema";
import { clickhouse } from "@/lib/clickhouse";
import { ratelimit, requireUser, requireWorkspace, t, withRatelimit } from "@/lib/trpc/trpc";
+import { timeseriesRequestSchema } from "@/lib/schemas/logs.schema";
import { TRPCError } from "@trpc/server";
import { transformFilters } from "./utils";
@@ -9,7 +9,7 @@ export const queryTimeseries = t.procedure
.use(requireUser)
.use(requireWorkspace)
.use(withRatelimit(ratelimit.read))
- .input(queryTimeseriesPayload)
+ .input(timeseriesRequestSchema)
.query(async ({ ctx, input }) => {
const { params: transformedInputs, granularity } = transformFilters(input);
const result = await clickhouse.api.timeseries[granularity]({
diff --git a/apps/dashboard/lib/trpc/routers/logs/query-timeseries/utils.ts b/apps/dashboard/lib/trpc/routers/logs/query-timeseries/utils.ts
index dab121e671..5ac1ee3d7b 100644
--- a/apps/dashboard/lib/trpc/routers/logs/query-timeseries/utils.ts
+++ b/apps/dashboard/lib/trpc/routers/logs/query-timeseries/utils.ts
@@ -1,14 +1,13 @@
-import type { queryTimeseriesPayload } from "@/app/(app)/logs/components/charts/query-timeseries.schema";
+import type { TimeseriesRequestSchema } from "@/lib/schemas/logs.schema";
import { getTimestampFromRelative } from "@/lib/utils";
import type { LogsTimeseriesParams } from "@unkey/clickhouse/src/logs";
-import type { z } from "zod";
import {
type RegularTimeseriesGranularity,
type TimeseriesConfig,
getTimeseriesGranularity,
} from "../../utils/granularity";
-export function transformFilters(params: z.infer): {
+export function transformFilters(params: TimeseriesRequestSchema): {
params: Omit;
granularity: RegularTimeseriesGranularity;
} {
@@ -25,7 +24,8 @@ export function transformFilters(params: z.infer)
params: {
startTime: timeConfig.startTime,
endTime: timeConfig.endTime,
- hosts: params.host?.filters.map((f) => f.value) || [],
+ hosts: params.host?.filters?.map((f) => f.value) || [],
+ excludeHosts: params.host?.exclude || [],
methods: params.method?.filters.map((f) => f.value) || [],
paths:
params.path?.filters.map((f) => ({
diff --git a/apps/dashboard/lib/trpc/routers/utils/granularity.test.ts b/apps/dashboard/lib/trpc/routers/utils/granularity.test.ts
index 012a6dae23..482a5f2cd3 100644
--- a/apps/dashboard/lib/trpc/routers/utils/granularity.test.ts
+++ b/apps/dashboard/lib/trpc/routers/utils/granularity.test.ts
@@ -89,49 +89,49 @@ describe("getTimeseriesGranularity", () => {
expectedGranularity: "perMinute",
},
{
- name: "should use per5Minutes for timeRange >= 2 hours & < 4 hours",
+ name: "should use perMinute for timeRange >= 2 hours & < 4 hours",
startTime: getTime(HOUR_IN_MS * 3),
- expectedGranularity: "per5Minutes",
+ expectedGranularity: "perMinute",
},
{
- name: "should use per15Minutes for timeRange >= 4 hours & < 6 hours",
+ name: "should use per5Minutes for timeRange >= 4 hours & < 6 hours",
startTime: getTime(HOUR_IN_MS * 5),
expectedGranularity: "per5Minutes",
},
{
- name: "should use per30Minutes for timeRange >= 6 hours & < 8 hours",
+ name: "should use per5Minutes for timeRange >= 6 hours & < 8 hours",
startTime: getTime(HOUR_IN_MS * 7),
expectedGranularity: "per5Minutes",
},
{
- name: "should use per30Minutes for timeRange >= 8 hours & < 12 hours",
+ name: "should use per15Minutes for timeRange >= 8 hours & < 12 hours",
startTime: getTime(HOUR_IN_MS * 10),
- expectedGranularity: "per30Minutes",
+ expectedGranularity: "per15Minutes",
},
{
- name: "should use perHour for timeRange >= 12 hours & < 16 hours",
+ name: "should use per15Minutes for timeRange >= 12 hours & < 16 hours",
startTime: getTime(HOUR_IN_MS * 14),
- expectedGranularity: "per30Minutes",
+ expectedGranularity: "per15Minutes",
},
{
- name: "should use per2Hours for timeRange >= 16 hours & < 24 hours",
+ name: "should use per15Minutes for timeRange >= 16 hours & < 24 hours",
startTime: getTime(HOUR_IN_MS * 20),
- expectedGranularity: "per2Hours",
+ expectedGranularity: "per15Minutes",
},
{
- name: "should use per4Hours for timeRange >= 24 hours & < 3 days",
+ name: "should use per15Minutes for timeRange >= 24 hours & < 3 days",
startTime: getTime(DAY_IN_MS * 2),
- expectedGranularity: "per4Hours",
+ expectedGranularity: "per15Minutes",
},
{
- name: "should use per6Hours for timeRange >= 3 days & < 7 days",
+ name: "should use per30Minutes for timeRange >= 3 days & < 7 days",
startTime: getTime(DAY_IN_MS * 5),
- expectedGranularity: "per6Hours",
+ expectedGranularity: "per30Minutes",
},
{
- name: "should use perDay for timeRange >= 7 days",
+ name: "should use per2Hours for timeRange >= 7 days",
startTime: getTime(DAY_IN_MS * 10),
- expectedGranularity: "perDay",
+ expectedGranularity: "per2Hours",
},
];
@@ -144,12 +144,12 @@ describe("getTimeseriesGranularity", () => {
it("should handle edge case at exactly 2 hours boundary", () => {
const result = getTimeseriesGranularity("forRegular", FIXED_NOW - HOUR_IN_MS * 2, FIXED_NOW);
- expect(result.granularity).toBe("per5Minutes");
+ expect(result.granularity).toBe("perMinute");
});
it("should handle edge case at exactly 7 days boundary", () => {
const result = getTimeseriesGranularity("forRegular", FIXED_NOW - DAY_IN_MS * 7, FIXED_NOW);
- expect(result.granularity).toBe("perDay");
+ expect(result.granularity).toBe("per2Hours");
});
});
@@ -271,7 +271,7 @@ describe("getTimeseriesGranularity", () => {
const oneDayAgo = FIXED_NOW - DAY_IN_MS;
const result = getTimeseriesGranularity("forRegular", oneDayAgo, FIXED_NOW);
- expect(result.granularity).toBe("per4Hours");
+ expect(result.granularity).toBe("per15Minutes");
expect(result.startTime).toBe(oneDayAgo);
expect(result.endTime).toBe(FIXED_NOW);
});
@@ -280,7 +280,7 @@ describe("getTimeseriesGranularity", () => {
const oneWeekAgo = FIXED_NOW - DAY_IN_MS * 7;
const result = getTimeseriesGranularity("forRegular", oneWeekAgo, FIXED_NOW);
- expect(result.granularity).toBe("perDay");
+ expect(result.granularity).toBe("per2Hours");
expect(result.startTime).toBe(oneWeekAgo);
expect(result.endTime).toBe(FIXED_NOW);
});
diff --git a/apps/dashboard/lib/trpc/routers/utils/granularity.ts b/apps/dashboard/lib/trpc/routers/utils/granularity.ts
index aa24ec0788..347a5b8dd1 100644
--- a/apps/dashboard/lib/trpc/routers/utils/granularity.ts
+++ b/apps/dashboard/lib/trpc/routers/utils/granularity.ts
@@ -107,23 +107,23 @@ export const getTimeseriesGranularity = (
}
} else {
if (timeRange >= DAY_IN_MS * 7) {
- granularity = "perDay";
+ granularity = "per2Hours";
} else if (timeRange >= DAY_IN_MS * 3) {
- granularity = "per6Hours";
+ granularity = "per30Minutes";
} else if (timeRange >= HOUR_IN_MS * 24) {
- granularity = "per4Hours";
+ granularity = "per15Minutes";
} else if (timeRange >= HOUR_IN_MS * 16) {
- granularity = "per2Hours";
+ granularity = "per15Minutes";
} else if (timeRange >= HOUR_IN_MS * 12) {
- granularity = "per30Minutes";
+ granularity = "per15Minutes";
} else if (timeRange >= HOUR_IN_MS * 8) {
- granularity = "per30Minutes";
+ granularity = "per15Minutes";
} else if (timeRange >= HOUR_IN_MS * 6) {
granularity = "per5Minutes";
} else if (timeRange >= HOUR_IN_MS * 4) {
granularity = "per5Minutes";
} else if (timeRange >= HOUR_IN_MS * 2) {
- granularity = "per5Minutes";
+ granularity = "perMinute";
} else {
granularity = "perMinute";
}
diff --git a/internal/clickhouse/src/logs-timeseries.test.ts b/internal/clickhouse/src/logs-timeseries.test.ts
index 15647aad3b..5056d6e2fb 100644
--- a/internal/clickhouse/src/logs-timeseries.test.ts
+++ b/internal/clickhouse/src/logs-timeseries.test.ts
@@ -70,6 +70,7 @@ describe.each([10, 100, 1_000, 10_000, 100_000])("with %i requests", (n) => {
statusCodes: [],
paths: [],
hosts: [],
+ excludeHosts: [],
methods: [],
startTime: new Date(Date.now() - 24 * 60 * 60 * 1000).getTime(), // 24 hours ago
endTime: Date.now(),
@@ -81,6 +82,7 @@ describe.each([10, 100, 1_000, 10_000, 100_000])("with %i requests", (n) => {
const hourly = await ch.api.timeseries.perHour({
workspaceId,
statusCodes: [],
+ excludeHosts: [],
paths: [],
hosts: [],
methods: [],
@@ -96,6 +98,7 @@ describe.each([10, 100, 1_000, 10_000, 100_000])("with %i requests", (n) => {
statusCodes: [],
paths: [],
hosts: [],
+ excludeHosts: [],
methods: [],
startTime: new Date(Date.now() - 30 * 24 * 60 * 60 * 1000).getTime(), // 30 days ago
endTime: Date.now(),
diff --git a/internal/clickhouse/src/logs.ts b/internal/clickhouse/src/logs.ts
index 0ad25bb81e..9f12ece0ec 100644
--- a/internal/clickhouse/src/logs.ts
+++ b/internal/clickhouse/src/logs.ts
@@ -15,6 +15,7 @@ export const getLogsClickhousePayload = z.object({
)
.nullable(),
hosts: z.array(z.string()).nullable(),
+ excludeHosts: z.array(z.string()).nullable(),
methods: z.array(z.string()).nullable(),
requestIds: z.array(z.string()).nullable(),
statusCodes: z.array(z.number().int()).nullable(),
@@ -93,6 +94,13 @@ export function getLogs(ch: Querier) {
ELSE TRUE
END
)
+ AND (
+ CASE
+ WHEN length({excludeHosts: Array(String)}) > 0 THEN
+ host NOT IN {excludeHosts: Array(String)}
+ ELSE TRUE
+ END
+ )
---------- Apply method filter
AND (
@@ -185,6 +193,7 @@ export const logsTimeseriesParams = z.object({
)
.nullable(),
hosts: z.array(z.string()).nullable(),
+ excludeHosts: z.array(z.string()).nullable(),
methods: z.array(z.string()).nullable(),
statusCodes: z.array(z.number().int()).nullable(),
});
@@ -210,47 +219,47 @@ type TimeInterval = {
const INTERVALS: Record = {
minute: {
- table: "metrics.api_requests_per_minute_v1",
+ table: "default.api_requests_per_minute_v2",
step: "MINUTE",
stepSize: 1,
},
fiveMinutes: {
- table: "metrics.api_requests_per_minute_v1",
+ table: "default.api_requests_per_minute_v2",
step: "MINUTES",
stepSize: 5,
},
fifteenMinutes: {
- table: "metrics.api_requests_per_minute_v1",
+ table: "default.api_requests_per_minute_v2",
step: "MINUTES",
stepSize: 15,
},
thirtyMinutes: {
- table: "metrics.api_requests_per_minute_v1",
+ table: "default.api_requests_per_minute_v2",
step: "MINUTES",
stepSize: 30,
},
hour: {
- table: "metrics.api_requests_per_hour_v1",
+ table: "default.api_requests_per_hour_v2",
step: "HOUR",
stepSize: 1,
},
twoHours: {
- table: "metrics.api_requests_per_hour_v1",
+ table: "default.api_requests_per_hour_v2",
step: "HOURS",
stepSize: 2,
},
fourHours: {
- table: "metrics.api_requests_per_hour_v1",
+ table: "default.api_requests_per_hour_v2",
step: "HOURS",
stepSize: 4,
},
sixHours: {
- table: "metrics.api_requests_per_hour_v1",
+ table: "default.api_requests_per_hour_v2",
step: "HOURS",
stepSize: 6,
},
day: {
- table: "metrics.api_requests_per_day_v1",
+ table: "default.api_requests_per_day_v2",
step: "DAY",
stepSize: 1,
},
@@ -313,6 +322,12 @@ function getLogsTimeseriesWhereClause(
WHEN length({hosts: Array(String)}) > 0 THEN
host IN {hosts: Array(String)}
ELSE TRUE
+ END)
+ AND
+ (CASE
+ WHEN length({excludeHosts: Array(String)}) > 0 THEN
+ host NOT IN {excludeHosts: Array(String)}
+ ELSE TRUE
END)`,
// Method filter
`(CASE