Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions apps/api/src/routes/v1_keys_verifyKey.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,8 @@ The key will be verified against the api's configuration. If the key does not be
.openapi({
description: `Tags do not influence the outcome of a verification.
They can be added to filter or aggregate historical verification data for your analytics needs.

To unkey, a tag is simply a string, we don't enforce any schema but leave that up to you.
The only exception is that each tag must be between 1 and 128 characters long.

A typical setup would be to add key-value pairs of resources or locations, that you need later when querying.
`,
example: ["path=/v1/users/123", "region=us-east-1"],
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
-- +goose up
CREATE TABLE verifications.key_verifications_per_hour_v3
(
time DateTime,
workspace_id String,
key_space_id String,
identity_id String,
key_id String,
outcome LowCardinality(String),
tags Array(String),
count Int64
)
ENGINE = SummingMergeTree()
ORDER BY (workspace_id, key_space_id, identity_id, key_id, time, tags, outcome)
;


-- +goose down
DROP TABLE verifications.key_verifications_per_hour_v3;
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
-- +goose up
CREATE MATERIALIZED VIEW IF NOT EXISTS verifications.key_verifications_per_hour_mv_v3
TO verifications.key_verifications_per_hour_v3
AS
SELECT
workspace_id,
key_space_id,
identity_id,
key_id,
outcome,
count(*) as count,
toStartOfHour(fromUnixTimestamp64Milli(time)) AS time,
tags
FROM verifications.raw_key_verifications_v1
GROUP BY
workspace_id,
key_space_id,
identity_id,
key_id,
outcome,
time,
tags
;


-- populate from existing data
-- INSERT INTO verifications.key_verifications_per_hour_v3
-- SELECT
-- toStartOfHour(fromUnixTimestamp64Milli(time)) AS time,
-- workspace_id,
-- key_space_id,
-- identity_id,
-- key_id,
-- outcome,
-- tags,
-- count(*) as count
-- FROM verifications.raw_key_verifications_v1
-- GROUP BY
-- workspace_id,
-- key_space_id,
-- identity_id,
-- key_id,
-- outcome,
-- time,
-- tags
-- ;

-- +goose down
DROP VIEW verifications.key_verifications_per_hour_mv_v3;
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
-- +goose up
CREATE TABLE verifications.key_verifications_per_day_v3
(
time DateTime,
workspace_id String,
key_space_id String,
identity_id String,
key_id String,
outcome LowCardinality(String),
tags Array(String),
count Int64
)
ENGINE = SummingMergeTree()
ORDER BY (workspace_id, key_space_id, identity_id, key_id, time, tags, outcome)
;


-- +goose down
DROP TABLE verifications.key_verifications_per_day_v3;
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
-- +goose up
CREATE MATERIALIZED VIEW IF NOT EXISTS verifications.key_verifications_per_day_mv_v3
TO verifications.key_verifications_per_day_v3
AS
SELECT
workspace_id,
key_space_id,
identity_id,
key_id,
outcome,
count(*) as count,
toStartOfDay(fromUnixTimestamp64Milli(time)) AS time,
tags
FROM verifications.raw_key_verifications_v1
GROUP BY
workspace_id,
key_space_id,
identity_id,
key_id,
outcome,
time,
tags
;

-- populate from existing data
-- INSERT INTO verifications.key_verifications_per_day_v3
-- SELECT
-- toStartOfDay(fromUnixTimestamp64Milli(time)) AS time,
-- workspace_id,
-- key_space_id,
-- identity_id,
-- key_id,
-- outcome,
-- tags,
-- count(*) as count
-- FROM verifications.raw_key_verifications_v1
-- GROUP BY
-- workspace_id,
-- key_space_id,
-- identity_id,
-- key_id,
-- outcome,
-- time,
-- tags
-- ;


-- +goose down
DROP VIEW verifications.key_verifications_per_day_mv_v3;
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
-- +goose up
CREATE TABLE verifications.key_verifications_per_month_v3
(
time DateTime,
workspace_id String,
key_space_id String,
identity_id String,
key_id String,
outcome LowCardinality(String),
tags Array(String),
count Int64
)
ENGINE = SummingMergeTree()
ORDER BY (workspace_id, key_space_id, identity_id, key_id, time, tags, outcome)
;


-- +goose down
DROP TABLE verifications.key_verifications_per_month_v3;
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
-- +goose up
CREATE MATERIALIZED VIEW IF NOT EXISTS verifications.key_verifications_per_month_mv_v3
TO verifications.key_verifications_per_month_v3
AS
SELECT
workspace_id,
key_space_id,
identity_id,
key_id,
outcome,
count(*) as count,
toStartOfMonth(fromUnixTimestamp64Milli(time)) AS time,
tags
FROM verifications.raw_key_verifications_v1
GROUP BY
workspace_id,
key_space_id,
identity_id,
key_id,
outcome,
time,
tags
;

-- populate from existing data
-- INSERT INTO verifications.key_verifications_per_month_v3
-- SELECT
-- toStartOfMonth(fromUnixTimestamp64Milli(time)) AS time,
-- workspace_id,
-- key_space_id,
-- identity_id,
-- key_id,
-- outcome,
-- tags,
-- count(*) as counts
-- FROM verifications.raw_key_verifications_v1
-- GROUP BY
-- workspace_id,
-- key_space_id,
-- identity_id,
-- key_id,
-- outcome,
-- time,
-- tags
-- ;


-- +goose down
DROP VIEW verifications.key_verifications_per_month_mv_v3;
2 changes: 0 additions & 2 deletions internal/clickhouse/src/client/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,6 @@ export class Client implements Querier, Inserter {
url: config.url,

clickhouse_settings: {
async_insert: 1,
wait_for_async_insert: 1,
output_format_json_quote_64bit_integers: 0,
output_format_json_quote_64bit_floats: 0,
},
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
import { describe, expect, test } from "vitest";
import { ClickHouse } from "./index";

import { randomUUID } from "node:crypto";
import { z } from "zod";
import { ClickHouseContainer } from "./testutil";

describe.each([10, 100, 1_000, 10_000, 100_000])("with %i verifications", (n) => {
test(
"accurately aggregates outcomes",
async (t) => {
const container = await ClickHouseContainer.start(t);

const ch = new ClickHouse({ url: container.url() });

const workspaceId = randomUUID();
const keySpaceId = randomUUID();
const keyId = randomUUID();

const end = Date.now();
const interval = 90 * 24 * 60 * 60 * 1000; // 90 days
const start = end - interval;
const outcomes = {
VALID: 0,
RATE_LIMITED: 0,
DISABLED: 0,
};
const verifications = Array.from({ length: n }).map((_) => {
const outcome = Object.keys(outcomes)[
Math.floor(Math.random() * Object.keys(outcomes).length)
] as keyof typeof outcomes;
outcomes[outcome]++;
return {
request_id: randomUUID(),
time: Math.round(Math.random() * (end - start + 1) + start),
workspace_id: workspaceId,
key_space_id: keySpaceId,
key_id: keyId,
outcome,
region: "test",
tags: ["tag"],
};
});

for (let i = 0; i < verifications.length; i += 1000) {
await ch.verifications.insert(verifications.slice(i, i + 1000));
}

// give clickhouse time to write to all tables
// await new Promise(r => setTimeout(r, 60_000))

const count = await ch.querier.query({
query: "SELECT count(*) as count FROM verifications.raw_key_verifications_v1",
schema: z.object({ count: z.number().int() }),
})({});
expect(count.err).toBeUndefined();
expect(count.val!.at(0)!.count).toBe(n);

const hourly = await ch.verifications.perHour({
workspaceId,
keySpaceId,
keyId,
start: start - interval,
end,
});
expect(hourly.err).toBeUndefined();

const daily = await ch.verifications.perDay({
workspaceId,
keySpaceId,
keyId,
start: start - interval,
end,
});
expect(daily.err).toBeUndefined();

const monthly = await ch.verifications.perMonth({
workspaceId,
keySpaceId,
keyId,
start: start - interval,
end,
});
expect(monthly.err).toBeUndefined();

for (const buckets of [hourly.val!, daily.val!, monthly.val!]) {
let total = 0;
const sumByOutcome = buckets.reduce(
(acc, bucket) => {
total += bucket.count;
if (!acc[bucket.outcome]) {
acc[bucket.outcome] = 0;
}
acc[bucket.outcome] += bucket.count;
return acc;
},
{} as Record<keyof typeof outcomes, number>,
);

expect(total).toBe(n);

for (const [k, v] of Object.entries(outcomes)) {
expect(sumByOutcome[k]).toEqual(v);
}
}
},
{ timeout: 120_000 },
);
});
6 changes: 3 additions & 3 deletions internal/clickhouse/src/verifications.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ export function getVerificationsPerHour(ch: Querier) {
outcome,
sum(count) as count,
tags
FROM verifications.key_verifications_per_hour_v2
FROM verifications.key_verifications_per_hour_v3
WHERE
workspace_id = {workspaceId: String}
AND key_space_id = {keySpaceId: String}
Expand Down Expand Up @@ -91,7 +91,7 @@ export function getVerificationsPerDay(ch: Querier) {
outcome,
sum(count) as count,
tags
FROM verifications.key_verifications_per_day_v2
FROM verifications.key_verifications_per_day_v3
WHERE
workspace_id = {workspaceId: String}
AND key_space_id = {keySpaceId: String}
Expand All @@ -118,7 +118,7 @@ export function getVerificationsPerMonth(ch: Querier) {
outcome,
sum(count) as count,
tags
FROM verifications.key_verifications_per_month_v2
FROM verifications.key_verifications_per_month_v3
WHERE
workspace_id = {workspaceId: String}
AND key_space_id = {keySpaceId: String}
Expand Down
1 change: 1 addition & 0 deletions internal/clickhouse/vitest.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import { defineConfig } from "vitest/config";
export default defineConfig({
test: {
exclude: [],
bail: 1,
pool: "threads",
poolOptions: {
threads: {
Expand Down
11 changes: 2 additions & 9 deletions packages/api/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,12 @@
"publishConfig": {
"access": "public"
},
"keywords": [
"unkey",
"client",
"api"
],
"keywords": ["unkey", "client", "api"],
"bugs": {
"url": "https://github.com/unkeyed/unkey/issues"
},
"homepage": "https://github.com/unkeyed/unkey#readme",
"files": [
"./dist/**",
"README.md"
],
"files": ["./dist/**", "README.md"],
"author": "Andreas Thomas <andreas@chronark.com>",
"scripts": {
"generate": "openapi-typescript https://api.unkey.dev/openapi.json -o ./src/openapi.d.ts",
Expand Down
Loading