Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
77 changes: 37 additions & 40 deletions .github/shared/test/spec-model.test.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
// @ts-check

import { randomUUID } from "crypto";
Expand All @@ -7,6 +7,7 @@
import { mapAsync } from "../src/array.js";
import { ConsoleLogger } from "../src/logger.js";
import { SpecModel } from "../src/spec-model.js";
import { Duration } from "../src/time.js";
import { repoRoot } from "./repo.js";

const options = { logger: new ConsoleLogger(/*debug*/ true) };
Expand Down Expand Up @@ -305,52 +306,48 @@
// the '.skip' from the describe block. Put '.skip' back in when done or this
// test may fail unexpectedly in the future.
describe.skip("Parse readmes", () => {
it(
"Does not produce exceptions",
{ timeout: 30 * 60 * 1000 /* 30 minutes */ },
async ({ expect }) => {
const excludeFolders = [
"authorization", // specification/authorization/resource-manager/readme.md defines has duplicate tags including 'package-2020-10-01'
"azureactivedirectory", // specification/azureactivedirectory/resource-manager/readme.md has duplicate tags including 'package-preview-2020-07'
"cost-management", // specification/cost-management/resource-manager/readme.md has duplicate tags including 'package-2019-01'
"migrate", // specification/migrate/resource-manager/readme.md has duplicate tags including 'package-migrate-2023-04'
"quota", // specification/quota/resource-manager/readme.md has duplicate tags including 'package-2023-02-01'
"redisenterprise", // specification/redisenterprise/resource-manager/readme.md has duplicate tags including 'package-2024-02'
"security", // specification/security/resource-manager/readme.md has duplicate tags including 'package-2021-07-preview-only'
"confidentialledger", // data-plane/readme.md tag 'package-2022-04-20-preview-ledger' points to a swagger file that doesn't exist
"network", // network takes a long time to evaluate
"servicenetworking", // servicenetworking includes a swagger file which references a file that doesn't exist
];
it("Does not produce exceptions", { timeout: 30 * Duration.Minute }, async ({ expect }) => {
const excludeFolders = [
"authorization", // specification/authorization/resource-manager/readme.md defines has duplicate tags including 'package-2020-10-01'
"azureactivedirectory", // specification/azureactivedirectory/resource-manager/readme.md has duplicate tags including 'package-preview-2020-07'
"cost-management", // specification/cost-management/resource-manager/readme.md has duplicate tags including 'package-2019-01'
"migrate", // specification/migrate/resource-manager/readme.md has duplicate tags including 'package-migrate-2023-04'
"quota", // specification/quota/resource-manager/readme.md has duplicate tags including 'package-2023-02-01'
"redisenterprise", // specification/redisenterprise/resource-manager/readme.md has duplicate tags including 'package-2024-02'
"security", // specification/security/resource-manager/readme.md has duplicate tags including 'package-2021-07-preview-only'
"confidentialledger", // data-plane/readme.md tag 'package-2022-04-20-preview-ledger' points to a swagger file that doesn't exist
"network", // network takes a long time to evaluate
"servicenetworking", // servicenetworking includes a swagger file which references a file that doesn't exist
];

// List all folders under specification/
const folders = await readdir(join(repoRoot, "specification"), {
withFileTypes: true,
});
const services = folders
.filter((f) => f.isDirectory() && !excludeFolders.includes(f.name))
.map((f) => f.name);
for (const folder of services) {
// Folders are listed in alphabetical order, when running this function
// iteratively over all service folders, a value can be placed in in this
// condition to skip folders that appear before a given folder. This means
// you won't have to wait for tests to run over all folders that have
// previously passed.
if (folder < "000") {
console.log(`Skipping service: ${folder}`);
continue;
}
// List all folders under specification/
const folders = await readdir(join(repoRoot, "specification"), {
withFileTypes: true,
});
const services = folders
.filter((f) => f.isDirectory() && !excludeFolders.includes(f.name))
.map((f) => f.name);
for (const folder of services) {
// Folders are listed in alphabetical order, when running this function
// iteratively over all service folders, a value can be placed in in this
// condition to skip folders that appear before a given folder. This means
// you won't have to wait for tests to run over all folders that have
// previously passed.
if (folder < "000") {
console.log(`Skipping service: ${folder}`);
continue;
}

console.log(`Testing service: ${folder}`);
const specModel = new SpecModel(`specification/${folder}`, options);
console.log(`Testing service: ${folder}`);
const specModel = new SpecModel(`specification/${folder}`, options);

expect(specModel).toBeDefined();
}
},
);
expect(specModel).toBeDefined();
}
});

it(
"runs properly against specific services",
{ timeout: 30 * 60 * 1000 /* 30 minutes */ },
{ timeout: 30 * Duration.Minute },
async ({ expect }) => {
const folders = [
// Fill in services to test here
Expand Down
8 changes: 5 additions & 3 deletions .github/workflows/src/context.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
// @ts-check

import { isFullGitSha } from "../../shared/src/git.js";
import { PER_PAGE_MAX } from "../../shared/src/github.js";
import { createLogHook, rateLimitHook } from "./github.js";
import { CoreLogger } from "./core-logger.js";
import { createLogHook, createRateLimitHook } from "./github.js";
import { getIssueNumber } from "./issues.js";

/**
Expand Down Expand Up @@ -31,8 +32,9 @@
core.debug(`context: ${JSON.stringify(context)}`);
}

github.hook.before("request", createLogHook(github.request.endpoint));
github.hook.after("request", rateLimitHook);
const coreLogger = new CoreLogger(core);
github.hook.before("request", createLogHook(github.request.endpoint, coreLogger));
github.hook.after("request", createRateLimitHook(coreLogger));

/** @type {{ owner: string, repo: string, head_sha: string, issue_number: number, run_id: number, details_url?: string }} */
let inputs;
Expand Down
74 changes: 46 additions & 28 deletions .github/workflows/src/github.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
// @ts-check

import { PER_PAGE_MAX } from "../../shared/src/github.js";
Expand Down Expand Up @@ -105,48 +105,66 @@

/**
* @param {import("@octokit/endpoint").endpoint} endpoint
* @param {import('../../shared/src/logger.js').ILogger} logger
* @returns {(options: import("@octokit/types").RequestParameters & {url: string, method: string}) => void}
*/
export function createLogHook(endpoint) {
export function createLogHook(endpoint, logger) {
/**
* @param {import("@octokit/types").RequestParameters & {url: string, method: string}} options
*/
function logHook(options) {
const request = endpoint(options);
const { method, url, body } = request;
console.log(`[github] ${method.toUpperCase()} ${url} ${body ? JSON.stringify(body) : ""}`);
logger.info(`[github] ${method.toUpperCase()} ${url} ${body ? JSON.stringify(body) : ""}`);
}

return logHook;
}

/**
* @param {import("@octokit/types").OctokitResponse<any>} response
* @param {import('../../shared/src/logger.js').ILogger} logger
* @returns {(response: import("@octokit/types").OctokitResponse<any>) => void}
*/
export function rateLimitHook(response) {
const headers = response.headers;

const limit = parseInt(headers["x-ratelimit-limit"] || "");
const remaining = parseInt(headers["x-ratelimit-remaining"] || "");
const used = limit - remaining;

const reset = new Date(parseInt(response.headers["x-ratelimit-reset"] || "") * 1000);
const start = subtract(reset, Duration.Hour);
const elapsedMs = new Date().getTime() - start.getTime();
const elapsedFraction = elapsedMs / Duration.Hour;

// Example: If limit is 1000, and 6 minutes have elapsed (10% of 1 hour),
// availableLimit will be 100 (10% of total).
const availableLimit = limit * elapsedFraction;

// If load is > 100%, we are "running hot" and predicted to hit limit before reset
// Keep load < 50% for a safety margin. If regularly > 50%, optimize.
const load = used / availableLimit;

// const resource = headers["x-ratelimit-resource"];
export function createRateLimitHook(logger) {
/**
* @param {import("@octokit/types").OctokitResponse<any>} response
*/
function rateLimitHook(response) {
const {
"x-ratelimit-limit": limitHeader,
"x-ratelimit-remaining": remainingHeader,
"x-ratelimit-reset": resetHeader,
} = response.headers;

if (!limitHeader || !remainingHeader || !resetHeader) {
logger.debug(`[github] missing ratelimit header(s) in response`);
return;
}

const limit = parseInt(limitHeader);
const remaining = parseInt(remainingHeader);
const used = limit - remaining;

const reset = new Date(parseInt(resetHeader) * Duration.Second);
const start = subtract(reset, Duration.Hour);
const elapsedMs = new Date().getTime() - start.getTime();
const elapsedFraction = elapsedMs / Duration.Hour;

// Example: If limit is 1000, and 6 minutes have elapsed (10% of 1 hour),
// availableLimit will be 100 (10% of total).
const availableLimit = limit * elapsedFraction;

// If load is > 100%, we are "running hot" and predicted to hit limit before reset
// Keep load < 50% for a safety margin. If regularly > 50%, optimize.
const load = used / availableLimit;

// const resource = headers["x-ratelimit-resource"];

logger.info(
`[github] load: ${toPercent(load)}, used: ${used}, remaining: ${remaining}` +
`, reset: ${formatDuration(getDuration(new Date(), reset))}`,
);
}

console.log(
`[github] load: ${toPercent(load)}, used: ${used}, remaining: ${remaining}` +
`, reset: ${formatDuration(getDuration(new Date(), reset))}`,
);
return rateLimitHook;
}
152 changes: 149 additions & 3 deletions .github/workflows/test/github.test.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,14 @@
import { describe, expect, it, vi } from "vitest";
import { getCheckRuns, getWorkflowRuns, writeToActionsSummary } from "../src/github.js";
import { createMockContext, createMockCore, createMockGithub } from "./mocks.js";
import { afterEach } from "node:test";
import { beforeEach, describe, expect, it, vi } from "vitest";
import { add, Duration } from "../../shared/src/time.js";
import {
createLogHook,
createRateLimitHook,
getCheckRuns,
getWorkflowRuns,
writeToActionsSummary,
} from "../src/github.js";
import { createMockContext, createMockCore, createMockGithub, createMockLogger } from "./mocks.js";

const mockCore = createMockCore();

Expand Down Expand Up @@ -209,3 +217,141 @@
);
});
});

describe("createLogHook", () => {
it("logs request info with body", () => {
const mockLogger = createMockLogger();
const logHook = createLogHook((r) => r, mockLogger);

expect(
logHook({
method: "GET",
url: "https://test-url.com",
body: { "test-key": "test-value" },
}),
).toBeUndefined();

expect(mockLogger.info).toBeCalledTimes(1);
expect(mockLogger.info.mock.calls[0]).toMatchInlineSnapshot(`
[
"[github] GET https://test-url.com {"test-key":"test-value"}",
]
`);
});

it("logs request info without body", () => {
const mockLogger = createMockLogger();
const logHook = createLogHook((r) => r, mockLogger);

expect(
logHook({
method: "GET",
url: "https://test-url.com",
}),
).toBeUndefined();

expect(mockLogger.info).toBeCalledTimes(1);
expect(mockLogger.info.mock.calls[0]).toMatchInlineSnapshot(`
[
"[github] GET https://test-url.com ",
]
`);
});
});

describe("createRateLimitHook", () => {
beforeEach(() => {
vi.useFakeTimers();
vi.setSystemTime(new Date("2025-01-01T00:00:00Z"));
});

afterEach(() => {
vi.useRealTimers();
});

it("logs to debug if missing rate limit header", () => {
const mockLogger = createMockLogger();
const ratelimitHook = createRateLimitHook(mockLogger);

expect(ratelimitHook({ headers: {} })).toBeUndefined();

expect(mockLogger.info).toBeCalledTimes(0);
expect(mockLogger.debug).toBeCalledTimes(1);
expect(mockLogger.debug.mock.calls[0]).toMatchInlineSnapshot(`
[
"[github] missing ratelimit header(s) in response",
]
`);
});

it("logs to info if all rate limit headers", () => {
const mockLogger = createMockLogger();
const ratelimitHook = createRateLimitHook(mockLogger);

const reset = (add(new Date(), 30 * Duration.Minute).getTime() / Duration.Second).toFixed(0);

expect(
ratelimitHook({
headers: {
"x-ratelimit-limit": "100",
"x-ratelimit-remaining": "50",
"x-ratelimit-reset": reset,
},
}),
).toBeUndefined();
expect(mockLogger.info).toBeCalledTimes(1);
expect(mockLogger.info.mock.calls[0]).toMatchInlineSnapshot(`
[
"[github] load: 100%, used: 50, remaining: 50, reset: 00:30:00",
]
`);

expect(
ratelimitHook({
headers: {
"x-ratelimit-limit": "100",
"x-ratelimit-remaining": "75",
"x-ratelimit-reset": reset,
},
}),
).toBeUndefined();
expect(mockLogger.info).toBeCalledTimes(2);
expect(mockLogger.info.mock.calls[1]).toMatchInlineSnapshot(`
[
"[github] load: 50%, used: 25, remaining: 75, reset: 00:30:00",
]
`);

expect(
ratelimitHook({
headers: {
"x-ratelimit-limit": "100",
"x-ratelimit-remaining": "25",
"x-ratelimit-reset": reset,
},
}),
).toBeUndefined();
expect(mockLogger.info).toBeCalledTimes(3);
expect(mockLogger.info.mock.calls[2]).toMatchInlineSnapshot(`
[
"[github] load: 150%, used: 75, remaining: 25, reset: 00:30:00",
]
`);

expect(
ratelimitHook({
headers: {
"x-ratelimit-limit": "100",
"x-ratelimit-remaining": "0",
"x-ratelimit-reset": reset,
},
}),
).toBeUndefined();
expect(mockLogger.info).toBeCalledTimes(4);
expect(mockLogger.info.mock.calls[3]).toMatchInlineSnapshot(`
[
"[github] load: 200%, used: 100, remaining: 0, reset: 00:30:00",
]
`);
});
});
Loading
Loading