diff --git a/.gitattributes b/.gitattributes index 594552221cc..4c262a83c4c 100644 --- a/.gitattributes +++ b/.gitattributes @@ -60,3 +60,5 @@ # https://github.com/github/linguist/issues/1626#issuecomment-401442069 # this only affects the repo's language statistics *.h linguist-language=C + +.github/workflows/*.lock.yml linguist-generated=true merge=ours \ No newline at end of file diff --git a/.github/policies/milestoneAssignment.prClosed.yml b/.github/policies/milestoneAssignment.prClosed.yml index 1ec03595d00..ad9aaad2f57 100644 --- a/.github/policies/milestoneAssignment.prClosed.yml +++ b/.github/policies/milestoneAssignment.prClosed.yml @@ -16,16 +16,16 @@ configuration: branch: main then: - addMilestone: - milestone: 13.2 + milestone: 13.3 description: '[Milestone Assignments] Assign Milestone to PRs merged to the `main` branch' - if: - payloadType: Pull_Request - isAction: action: Closed - targetsBranch: - branch: release/13.1 + branch: release/13.2 then: - removeMilestone - addMilestone: - milestone: 13.1.1 - description: '[Milestone Assignments] Assign Milestone to PRs merged to release/13.1 branch' + milestone: 13.2 + description: '[Milestone Assignments] Assign Milestone to PRs merged to release/13.2 branch' diff --git a/.github/skills/startup-perf/SKILL.md b/.github/skills/startup-perf/SKILL.md new file mode 100644 index 00000000000..33ca4d3875f --- /dev/null +++ b/.github/skills/startup-perf/SKILL.md @@ -0,0 +1,193 @@ +--- +name: startup-perf +description: Measures Aspire application startup performance using dotnet-trace and the TraceAnalyzer tool. Use this when asked to measure impact of a code change on Aspire application startup performance. +--- + +# Aspire Startup Performance Measurement + +This skill provides patterns and practices for measuring .NET Aspire application startup performance using the `Measure-StartupPerformance.ps1` script and the companion `TraceAnalyzer` tool. + +## Overview + +The startup performance tooling collects `dotnet-trace` traces from an Aspire AppHost application and computes the startup duration from `AspireEventSource` events. Specifically, it measures the time between the `DcpModelCreationStart` (event ID 17) and `DcpModelCreationStop` (event ID 18) events emitted by the `Microsoft-Aspire-Hosting` EventSource provider. + +**Script Location**: `tools/perf/Measure-StartupPerformance.ps1` +**TraceAnalyzer Location**: `tools/perf/TraceAnalyzer/` +**Documentation**: `docs/getting-perf-traces.md` + +## Prerequisites + +- PowerShell 7+ +- `dotnet-trace` global tool (`dotnet tool install -g dotnet-trace`) +- .NET SDK (restored via `./restore.cmd` or `./restore.sh`) + +## Quick Start + +### Single Measurement + +```powershell +# From repository root — measures the default TestShop.AppHost +.\tools\perf\Measure-StartupPerformance.ps1 +``` + +### Multiple Iterations with Statistics + +```powershell +.\tools\perf\Measure-StartupPerformance.ps1 -Iterations 5 +``` + +### Custom Project + +```powershell +.\tools\perf\Measure-StartupPerformance.ps1 -ProjectPath "path\to\MyApp.AppHost.csproj" -Iterations 3 +``` + +### Preserve Traces for Manual Analysis + +```powershell +.\tools\perf\Measure-StartupPerformance.ps1 -Iterations 3 -PreserveTraces -TraceOutputDirectory "C:\traces" +``` + +### Verbose Output + +```powershell +.\tools\perf\Measure-StartupPerformance.ps1 -Verbose +``` + +## Parameters + +| Parameter | Default | Description | +|-----------|---------|-------------| +| `ProjectPath` | TestShop.AppHost | Path to the AppHost `.csproj` to measure | +| `Iterations` | 1 | Number of measurement runs (1–100) | +| `PreserveTraces` | `$false` | Keep `.nettrace` files after analysis | +| `TraceOutputDirectory` | temp folder | Directory for preserved trace files | +| `SkipBuild` | `$false` | Skip `dotnet build` before running | +| `TraceDurationSeconds` | 60 | Maximum trace collection time (1–86400) | +| `PauseBetweenIterationsSeconds` | 45 | Pause between iterations (0–3600) | +| `Verbose` | `$false` | Show detailed output | + +## How It Works + +The script follows this sequence: + +1. **Prerequisites check** — Verifies `dotnet-trace` is installed and the project exists. +2. **Build** — Builds the AppHost project in Release configuration (unless `-SkipBuild`). +3. **Build TraceAnalyzer** — Builds the companion `tools/perf/TraceAnalyzer` project. +4. **For each iteration:** + a. Locates the compiled executable (Arcade-style or traditional output paths). + b. Reads `launchSettings.json` for environment variables. + c. Launches the AppHost as a separate process. + d. Attaches `dotnet-trace` to the running process with the `Microsoft-Aspire-Hosting` provider. + e. Waits for the trace to complete (duration timeout or process exit). + f. Runs the TraceAnalyzer to extract the startup duration from the `.nettrace` file. + g. Cleans up processes. +5. **Reports results** — Prints per-iteration times and statistics (min, max, average, std dev). + +## TraceAnalyzer Tool + +The `tools/perf/TraceAnalyzer` is a small .NET console app that parses `.nettrace` files using the `Microsoft.Diagnostics.Tracing.TraceEvent` library. + +### What It Does + +- Opens the `.nettrace` file with `EventPipeEventSource` +- Listens for events from the `Microsoft-Aspire-Hosting` provider +- Extracts timestamps for `DcpModelCreationStart` (ID 17) and `DcpModelCreationStop` (ID 18) +- Outputs the duration in milliseconds (or `"null"` if events are not found) + +### Standalone Usage + +```bash +dotnet run --project tools/perf/TraceAnalyzer -c Release -- +``` + +## Understanding Output + +### Successful Run + +``` +================================================== + Aspire Startup Performance Measurement +================================================== + +Project: TestShop.AppHost +Iterations: 3 +... + +Iteration 1 +---------------------------------------- +Starting TestShop.AppHost... +Attaching trace collection to PID 12345... +Collecting performance trace... +Trace collection completed. +Analyzing trace: ... +Startup time: 1234.56 ms + +... + +================================================== + Results Summary +================================================== + +Iteration StartupTimeMs +--------- ------------- + 1 1234.56 + 2 1189.23 + 3 1201.45 + +Statistics: + Successful iterations: 3 / 3 + Minimum: 1189.23 ms + Maximum: 1234.56 ms + Average: 1208.41 ms + Std Dev: 18.92 ms +``` + +### Common Issues + +| Symptom | Cause | Fix | +|---------|-------|-----| +| `dotnet-trace is not installed` | Missing global tool | Run `dotnet tool install -g dotnet-trace` | +| `Could not find compiled executable` | Project not built | Remove `-SkipBuild` or build manually | +| `Could not find DcpModelCreation events` | Trace too short or events not emitted | Increase `-TraceDurationSeconds` | +| `Application exited immediately` | App crash on startup | Check app logs, ensure dependencies are available | +| `dotnet-trace exited with code != 0` | Trace collection error | Check verbose output; trace file may still be valid | + +## Comparing Before/After Performance + +To measure the impact of a code change: + +```powershell +# 1. Measure baseline (on main branch) +git checkout main +.\tools\perf\Measure-StartupPerformance.ps1 -Iterations 5 -PreserveTraces -TraceOutputDirectory "C:\traces\baseline" + +# 2. Measure with changes +git checkout my-feature-branch +.\tools\perf\Measure-StartupPerformance.ps1 -Iterations 5 -PreserveTraces -TraceOutputDirectory "C:\traces\feature" + +# 3. Compare the reported averages and std devs +``` + +Use enough iterations (5+) and a consistent pause between iterations for reliable comparisons. + +## Collecting Traces for Manual Analysis + +If you need to inspect trace files manually (e.g., in PerfView or Visual Studio): + +```powershell +.\tools\perf\Measure-StartupPerformance.ps1 -PreserveTraces -TraceOutputDirectory "C:\my-traces" +``` + +See `docs/getting-perf-traces.md` for guidance on analyzing traces with PerfView or `dotnet trace report`. + +## EventSource Provider Details + +The `Microsoft-Aspire-Hosting` EventSource emits events for key Aspire lifecycle milestones. The startup performance script focuses on: + +| Event ID | Event Name | Description | +|----------|------------|-------------| +| 17 | `DcpModelCreationStart` | Marks the beginning of DCP model creation | +| 18 | `DcpModelCreationStop` | Marks the completion of DCP model creation | + +The measured startup time is the wall-clock difference between these two events, representing the time to create all application services and supporting dependencies. diff --git a/.github/workflows/README.md b/.github/workflows/README.md index 06975dcd71c..e802e904706 100644 --- a/.github/workflows/README.md +++ b/.github/workflows/README.md @@ -99,3 +99,26 @@ When you comment on a PR (not an issue), the workflow will automatically push ch ### Concurrency The workflow uses concurrency groups based on the issue/PR number to prevent race conditions when multiple commands are issued on the same issue. + +## Backmerge Release Workflow + +The `backmerge-release.yml` workflow automatically creates PRs to merge changes from `release/13.2` back into `main`. + +### Schedule + +Runs daily at 00:00 UTC (4pm PT during standard time, 5pm PT during daylight saving time). Can also be triggered manually via `workflow_dispatch`. + +### Behavior + +1. **Change Detection**: Checks if `release/13.2` has commits not in `main` +2. **PR Creation**: If changes exist, creates a PR to merge `release/13.2` → `main` +3. **Auto-merge**: Enables GitHub's auto-merge feature, so the PR merges automatically once approved +4. **Conflict Handling**: If merge conflicts occur, creates an issue instead of a PR + +### Assignees + +PRs and conflict issues are automatically assigned to @joperezr and @radical. + +### Manual Trigger + +To trigger manually, go to Actions → "Backmerge Release to Main" → "Run workflow". diff --git a/.github/workflows/backmerge-release.yml b/.github/workflows/backmerge-release.yml new file mode 100644 index 00000000000..0e530c49dfc --- /dev/null +++ b/.github/workflows/backmerge-release.yml @@ -0,0 +1,166 @@ +name: Backmerge Release to Main + +on: + schedule: + - cron: '0 0 * * *' # Runs daily at 00:00 UTC (16:00 PST / 17:00 PDT) + workflow_dispatch: # Allow manual trigger + +permissions: + contents: write + pull-requests: write + issues: write + +jobs: + backmerge: + runs-on: ubuntu-latest + timeout-minutes: 15 + if: ${{ github.repository_owner == 'dotnet' }} + + steps: + - name: Checkout repository + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + fetch-depth: 0 # Full history needed for merge + + - name: Check for changes to backmerge + id: check + run: | + git fetch origin main release/13.2 + BEHIND_COUNT=$(git rev-list --count origin/main..origin/release/13.2) + echo "behind_count=$BEHIND_COUNT" >> $GITHUB_OUTPUT + if [ "$BEHIND_COUNT" -gt 0 ]; then + echo "changes=true" >> $GITHUB_OUTPUT + echo "Found $BEHIND_COUNT commits in release/13.2 not in main" + else + echo "changes=false" >> $GITHUB_OUTPUT + echo "No changes to backmerge - release/13.2 is up-to-date with main" + fi + + - name: Attempt merge and create branch + if: steps.check.outputs.changes == 'true' + id: merge + run: | + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + git checkout origin/main + git checkout -b backmerge/release-13.2-to-main + + # Attempt the merge + if git merge origin/release/13.2 --no-edit; then + echo "merge_success=true" >> $GITHUB_OUTPUT + git push origin backmerge/release-13.2-to-main --force + echo "Merge successful, branch pushed" + else + echo "merge_success=false" >> $GITHUB_OUTPUT + git merge --abort + echo "Merge conflicts detected" + fi + + - name: Create or update Pull Request + if: steps.check.outputs.changes == 'true' && steps.merge.outputs.merge_success == 'true' + id: create-pr + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + # Check if a PR already exists for this branch + EXISTING_PR=$(gh pr list --head backmerge/release-13.2-to-main --base main --json number --jq '.[0].number // empty') + + if [ -n "$EXISTING_PR" ]; then + echo "PR #$EXISTING_PR already exists, updating it" + echo "pull_request_number=$EXISTING_PR" >> $GITHUB_OUTPUT + else + PR_BODY="## Automated Backmerge + + This PR merges changes from \`release/13.2\` back into \`main\`. + + **Commits to merge:** ${{ steps.check.outputs.behind_count }} + + This PR was created automatically to keep \`main\` up-to-date with release branch changes. + Once approved, please merge using a **merge commit** (not squash or rebase). + + --- + *This PR was generated by the [backmerge-release](${{ github.server_url }}/${{ github.repository }}/actions/workflows/backmerge-release.yml) workflow.*" + + # Remove leading whitespace from heredoc-style body + PR_BODY=$(echo "$PR_BODY" | sed 's/^ //') + + PR_URL=$(gh pr create \ + --head backmerge/release-13.2-to-main \ + --base main \ + --title "[Automated] Backmerge release/13.2 to main" \ + --body "$PR_BODY" \ + --assignee joperezr,radical \ + --label area-engineering-systems) + + PR_NUMBER=$(echo "$PR_URL" | grep -oE '[0-9]+$') + if [ -z "$PR_NUMBER" ]; then + echo "::error::Failed to extract PR number from: $PR_URL" + exit 1 + fi + echo "pull_request_number=$PR_NUMBER" >> $GITHUB_OUTPUT + echo "Created PR #$PR_NUMBER" + fi + + - name: Create issue for merge conflicts + if: steps.check.outputs.changes == 'true' && steps.merge.outputs.merge_success == 'false' + uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 + with: + script: | + const workflowRunUrl = `${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId}`; + + // Check if there's already an open issue for this + const existingIssues = await github.rest.issues.listForRepo({ + owner: context.repo.owner, + repo: context.repo.repo, + state: 'open', + labels: 'backmerge-conflict', + creator: 'github-actions[bot]' + }); + + if (existingIssues.data.length > 0) { + console.log(`Existing backmerge conflict issue found: #${existingIssues.data[0].number}`); + // Add a comment to the existing issue + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: existingIssues.data[0].number, + body: `⚠️ Merge conflicts still exist.\n\n**Workflow run:** ${workflowRunUrl}\n\nPlease resolve the conflicts manually.` + }); + return; + } + + // Create a new issue + const issueBody = [ + '## Backmerge Conflict', + '', + 'The automated backmerge from `release/13.2` to `main` failed due to merge conflicts.', + '', + '### What to do', + '', + '1. Checkout main and attempt the merge locally:', + ' ```bash', + ' git checkout main', + ' git pull origin main', + ' git merge origin/release/13.2', + ' ```', + '2. Resolve the conflicts', + '3. Push the merge commit or create a PR manually', + '', + '### Details', + '', + `**Workflow run:** ${workflowRunUrl}`, + '**Commits to merge:** ${{ steps.check.outputs.behind_count }}', + '', + '---', + `*This issue was created automatically by the [backmerge-release](${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/workflows/backmerge-release.yml) workflow.*` + ].join('\n'); + + await github.rest.issues.create({ + owner: context.repo.owner, + repo: context.repo.repo, + title: '[Backmerge] Merge conflicts between release/13.2 and main', + body: issueBody, + assignees: ['joperezr', 'radical'], + labels: ['area-engineering-systems', 'backmerge-conflict'] + }); diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a0007e8030f..0901bf2f888 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -43,6 +43,7 @@ jobs: eng/pipelines/.* eng/test-configuration.json \.github/workflows/apply-test-attributes.yml + \.github/workflows/backmerge-release.yml \.github/workflows/backport.yml \.github/workflows/dogfood-comment.yml \.github/workflows/generate-api-diffs.yml diff --git a/.github/workflows/daily-repo-status.lock.yml b/.github/workflows/daily-repo-status.lock.yml new file mode 100644 index 00000000000..f226e7a052b --- /dev/null +++ b/.github/workflows/daily-repo-status.lock.yml @@ -0,0 +1,1101 @@ +# +# ___ _ _ +# / _ \ | | (_) +# | |_| | __ _ ___ _ __ | |_ _ ___ +# | _ |/ _` |/ _ \ '_ \| __| |/ __| +# | | | | (_| | __/ | | | |_| | (__ +# \_| |_/\__, |\___|_| |_|\__|_|\___| +# __/ | +# _ _ |___/ +# | | | | / _| | +# | | | | ___ _ __ _ __| |_| | _____ ____ +# | |/\| |/ _ \ '__| |/ /| _| |/ _ \ \ /\ / / ___| +# \ /\ / (_) | | | | ( | | | | (_) \ V V /\__ \ +# \/ \/ \___/|_| |_|\_\|_| |_|\___/ \_/\_/ |___/ +# +# This file was automatically generated by gh-aw (v0.45.5). DO NOT EDIT. +# +# To update this file, edit the corresponding .md file and run: +# gh aw compile +# Not all edits will cause changes to this file. +# +# For more information: https://github.github.com/gh-aw/introduction/overview/ +# +# Daily burndown report for the Aspire 13.2 milestone. Tracks progress +# on issues closed, new bugs found, notable changes merged into the +# release/13.2 branch, pending PR reviews, and discussions. Generates +# a 7-day burndown chart using cached daily snapshots. +# +# frontmatter-hash: 427ab537ab52b999a8cbb139515b504ba7359549cab995530c129ea037f08ef0 + +name: "13.2 Release Burndown Report" +"on": + schedule: + - cron: "42 9 * * *" + # Friendly format: daily around 9am (scattered) + workflow_dispatch: + +permissions: {} + +concurrency: + group: "gh-aw-${{ github.workflow }}" + +run-name: "13.2 Release Burndown Report" + +jobs: + activation: + runs-on: ubuntu-slim + permissions: + contents: read + outputs: + comment_id: "" + comment_repo: "" + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@852cb06ad52958b402ed982b69957ffc57ca0619 # v0.45.5 + with: + destination: /opt/gh-aw/actions + - name: Checkout .github and .agents folders + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + sparse-checkout: | + .github + .agents + fetch-depth: 1 + persist-credentials: false + - name: Check workflow file timestamps + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_WORKFLOW_FILE: "daily-repo-status.lock.yml" + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/check_workflow_timestamp_api.cjs'); + await main(); + - name: Create prompt with built-in context + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + run: | + bash /opt/gh-aw/actions/create_prompt_first.sh + cat << 'GH_AW_PROMPT_EOF' > "$GH_AW_PROMPT" + + GH_AW_PROMPT_EOF + cat "/opt/gh-aw/prompts/xpia.md" >> "$GH_AW_PROMPT" + cat "/opt/gh-aw/prompts/temp_folder_prompt.md" >> "$GH_AW_PROMPT" + cat "/opt/gh-aw/prompts/markdown.md" >> "$GH_AW_PROMPT" + cat "/opt/gh-aw/prompts/cache_memory_prompt.md" >> "$GH_AW_PROMPT" + cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" + + GitHub API Access Instructions + + The gh CLI is NOT authenticated. Do NOT use gh commands for GitHub operations. + + + To create or modify GitHub resources (issues, discussions, pull requests, etc.), you MUST call the appropriate safe output tool. Simply writing content will NOT work - the workflow requires actual tool calls. + + Temporary IDs: Some safe output tools support a temporary ID field (usually named temporary_id) so you can reference newly-created items elsewhere in the SAME agent output (for example, using #aw_abc1 in a later body). + + **IMPORTANT - temporary_id format rules:** + - If you DON'T need to reference the item later, OMIT the temporary_id field entirely (it will be auto-generated if needed) + - If you DO need cross-references/chaining, you MUST match this EXACT validation regex: /^aw_[A-Za-z0-9]{3,8}$/i + - Format: aw_ prefix followed by 3 to 8 alphanumeric characters (A-Z, a-z, 0-9, case-insensitive) + - Valid alphanumeric characters: ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789 + - INVALID examples: aw_ab (too short), aw_123456789 (too long), aw_test-id (contains hyphen), aw_id_123 (contains underscore) + - VALID examples: aw_abc, aw_abc1, aw_Test123, aw_A1B2C3D4, aw_12345678 + - To generate valid IDs: use 3-8 random alphanumeric characters or omit the field to let the system auto-generate + + Do NOT invent other aw_* formats — downstream steps will reject them with validation errors matching against /^aw_[A-Za-z0-9]{3,8}$/i. + + Discover available tools from the safeoutputs MCP server. + + **Critical**: Tool calls write structured data that downstream jobs process. Without tool calls, follow-up actions will be skipped. + + **Note**: If you made no other safe output tool calls during this workflow execution, call the "noop" tool to provide a status message indicating completion or that no actions were needed. + + + + The following GitHub context information is available for this workflow: + {{#if __GH_AW_GITHUB_ACTOR__ }} + - **actor**: __GH_AW_GITHUB_ACTOR__ + {{/if}} + {{#if __GH_AW_GITHUB_REPOSITORY__ }} + - **repository**: __GH_AW_GITHUB_REPOSITORY__ + {{/if}} + {{#if __GH_AW_GITHUB_WORKSPACE__ }} + - **workspace**: __GH_AW_GITHUB_WORKSPACE__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ }} + - **issue-number**: #__GH_AW_GITHUB_EVENT_ISSUE_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ }} + - **discussion-number**: #__GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ }} + - **pull-request-number**: #__GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER__ + {{/if}} + {{#if __GH_AW_GITHUB_EVENT_COMMENT_ID__ }} + - **comment-id**: __GH_AW_GITHUB_EVENT_COMMENT_ID__ + {{/if}} + {{#if __GH_AW_GITHUB_RUN_ID__ }} + - **workflow-run-id**: __GH_AW_GITHUB_RUN_ID__ + {{/if}} + + + GH_AW_PROMPT_EOF + cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" + + GH_AW_PROMPT_EOF + cat << 'GH_AW_PROMPT_EOF' >> "$GH_AW_PROMPT" + {{#runtime-import .github/workflows/daily-repo-status.md}} + GH_AW_PROMPT_EOF + - name: Interpolate variables and render templates + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/interpolate_prompt.cjs'); + await main(); + - name: Substitute placeholders + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_ALLOWED_EXTENSIONS: '' + GH_AW_CACHE_DESCRIPTION: '' + GH_AW_CACHE_DIR: '/tmp/gh-aw/cache-memory/' + GH_AW_GITHUB_ACTOR: ${{ github.actor }} + GH_AW_GITHUB_EVENT_COMMENT_ID: ${{ github.event.comment.id }} + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: ${{ github.event.discussion.number }} + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: ${{ github.event.issue.number }} + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: ${{ github.event.pull_request.number }} + GH_AW_GITHUB_REPOSITORY: ${{ github.repository }} + GH_AW_GITHUB_RUN_ID: ${{ github.run_id }} + GH_AW_GITHUB_WORKSPACE: ${{ github.workspace }} + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: ${{ needs.pre_activation.outputs.activated }} + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND: ${{ needs.pre_activation.outputs.matched_command }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + + const substitutePlaceholders = require('/opt/gh-aw/actions/substitute_placeholders.cjs'); + + // Call the substitution function + return await substitutePlaceholders({ + file: process.env.GH_AW_PROMPT, + substitutions: { + GH_AW_ALLOWED_EXTENSIONS: process.env.GH_AW_ALLOWED_EXTENSIONS, + GH_AW_CACHE_DESCRIPTION: process.env.GH_AW_CACHE_DESCRIPTION, + GH_AW_CACHE_DIR: process.env.GH_AW_CACHE_DIR, + GH_AW_GITHUB_ACTOR: process.env.GH_AW_GITHUB_ACTOR, + GH_AW_GITHUB_EVENT_COMMENT_ID: process.env.GH_AW_GITHUB_EVENT_COMMENT_ID, + GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER: process.env.GH_AW_GITHUB_EVENT_DISCUSSION_NUMBER, + GH_AW_GITHUB_EVENT_ISSUE_NUMBER: process.env.GH_AW_GITHUB_EVENT_ISSUE_NUMBER, + GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER: process.env.GH_AW_GITHUB_EVENT_PULL_REQUEST_NUMBER, + GH_AW_GITHUB_REPOSITORY: process.env.GH_AW_GITHUB_REPOSITORY, + GH_AW_GITHUB_RUN_ID: process.env.GH_AW_GITHUB_RUN_ID, + GH_AW_GITHUB_WORKSPACE: process.env.GH_AW_GITHUB_WORKSPACE, + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_ACTIVATED, + GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND: process.env.GH_AW_NEEDS_PRE_ACTIVATION_OUTPUTS_MATCHED_COMMAND + } + }); + - name: Validate prompt placeholders + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + run: bash /opt/gh-aw/actions/validate_prompt_placeholders.sh + - name: Print prompt + env: + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + run: bash /opt/gh-aw/actions/print_prompt_summary.sh + - name: Upload prompt artifact + if: success() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: prompt + path: /tmp/gh-aw/aw-prompts/prompt.txt + retention-days: 1 + + agent: + needs: activation + runs-on: ubuntu-latest + permissions: + contents: read + discussions: read + issues: read + pull-requests: read + concurrency: + group: "gh-aw-copilot-${{ github.workflow }}" + env: + DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} + GH_AW_ASSETS_ALLOWED_EXTS: "" + GH_AW_ASSETS_BRANCH: "" + GH_AW_ASSETS_MAX_SIZE_KB: 0 + GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs + GH_AW_SAFE_OUTPUTS: /opt/gh-aw/safeoutputs/outputs.jsonl + GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /opt/gh-aw/safeoutputs/config.json + GH_AW_SAFE_OUTPUTS_TOOLS_PATH: /opt/gh-aw/safeoutputs/tools.json + GH_AW_WORKFLOW_ID_SANITIZED: dailyrepostatus + outputs: + checkout_pr_success: ${{ steps.checkout-pr.outputs.checkout_pr_success || 'true' }} + has_patch: ${{ steps.collect_output.outputs.has_patch }} + model: ${{ steps.generate_aw_info.outputs.model }} + output: ${{ steps.collect_output.outputs.output }} + output_types: ${{ steps.collect_output.outputs.output_types }} + secret_verification_result: ${{ steps.validate-secret.outputs.verification_result }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@852cb06ad52958b402ed982b69957ffc57ca0619 # v0.45.5 + with: + destination: /opt/gh-aw/actions + - name: Checkout repository + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + persist-credentials: false + - name: Create gh-aw temp directory + run: bash /opt/gh-aw/actions/create_gh_aw_tmp_dir.sh + # Cache memory file share configuration from frontmatter processed below + - name: Create cache-memory directory + run: bash /opt/gh-aw/actions/create_cache_memory_dir.sh + - name: Restore cache-memory file share data + uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} + path: /tmp/gh-aw/cache-memory + restore-keys: | + memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}- + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Checkout PR branch + id: checkout-pr + if: | + github.event.pull_request + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + with: + github-token: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/checkout_pr_branch.cjs'); + await main(); + - name: Generate agentic run info + id: generate_aw_info + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const fs = require('fs'); + + const awInfo = { + engine_id: "copilot", + engine_name: "GitHub Copilot CLI", + model: process.env.GH_AW_MODEL_AGENT_COPILOT || "", + version: "", + agent_version: "0.0.410", + cli_version: "v0.45.5", + workflow_name: "13.2 Release Burndown Report", + experimental: false, + supports_tools_allowlist: true, + run_id: context.runId, + run_number: context.runNumber, + run_attempt: process.env.GITHUB_RUN_ATTEMPT, + repository: context.repo.owner + '/' + context.repo.repo, + ref: context.ref, + sha: context.sha, + actor: context.actor, + event_name: context.eventName, + staged: false, + allowed_domains: ["defaults"], + firewall_enabled: true, + awf_version: "v0.19.1", + awmg_version: "v0.1.4", + steps: { + firewall: "squid" + }, + created_at: new Date().toISOString() + }; + + // Write to /tmp/gh-aw directory to avoid inclusion in PR + const tmpPath = '/tmp/gh-aw/aw_info.json'; + fs.writeFileSync(tmpPath, JSON.stringify(awInfo, null, 2)); + console.log('Generated aw_info.json at:', tmpPath); + console.log(JSON.stringify(awInfo, null, 2)); + + // Set model as output for reuse in other steps/jobs + core.setOutput('model', awInfo.model); + - name: Validate COPILOT_GITHUB_TOKEN secret + id: validate-secret + run: /opt/gh-aw/actions/validate_multi_secret.sh COPILOT_GITHUB_TOKEN 'GitHub Copilot CLI' https://github.github.com/gh-aw/reference/engines/#github-copilot-default + env: + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + - name: Install GitHub Copilot CLI + run: /opt/gh-aw/actions/install_copilot_cli.sh 0.0.410 + - name: Install awf binary + run: bash /opt/gh-aw/actions/install_awf_binary.sh v0.19.1 + - name: Download container images + run: bash /opt/gh-aw/actions/download_docker_images.sh ghcr.io/github/gh-aw-firewall/agent:0.19.1 ghcr.io/github/gh-aw-firewall/squid:0.19.1 ghcr.io/github/gh-aw-mcpg:v0.1.4 ghcr.io/github/github-mcp-server:v0.30.3 node:lts-alpine + - name: Write Safe Outputs Config + run: | + mkdir -p /opt/gh-aw/safeoutputs + mkdir -p /tmp/gh-aw/safeoutputs + mkdir -p /tmp/gh-aw/mcp-logs/safeoutputs + cat > /opt/gh-aw/safeoutputs/config.json << 'GH_AW_SAFE_OUTPUTS_CONFIG_EOF' + {"create_issue":{"max":1},"missing_data":{},"missing_tool":{},"noop":{"max":1}} + GH_AW_SAFE_OUTPUTS_CONFIG_EOF + cat > /opt/gh-aw/safeoutputs/tools.json << 'GH_AW_SAFE_OUTPUTS_TOOLS_EOF' + [ + { + "description": "Create a new GitHub issue for tracking bugs, feature requests, or tasks. Use this for actionable work items that need assignment, labeling, and status tracking. For reports, announcements, or status updates that don't require task tracking, use create_discussion instead. CONSTRAINTS: Maximum 1 issue(s) can be created. Title will be prefixed with \"[13.2-burndown] \". Labels [report burndown] will be automatically added.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "body": { + "description": "Detailed issue description in Markdown. Do NOT repeat the title as a heading since it already appears as the issue's h1. Include context, reproduction steps, or acceptance criteria as appropriate.", + "type": "string" + }, + "labels": { + "description": "Labels to categorize the issue (e.g., 'bug', 'enhancement'). Labels must exist in the repository.", + "items": { + "type": "string" + }, + "type": "array" + }, + "parent": { + "description": "Parent issue number for creating sub-issues. This is the numeric ID from the GitHub URL (e.g., 42 in github.com/owner/repo/issues/42). Can also be a temporary_id (e.g., 'aw_abc123', 'aw_Test123') from a previously created issue in the same workflow run.", + "type": [ + "number", + "string" + ] + }, + "temporary_id": { + "description": "Unique temporary identifier for referencing this issue before it's created. Format: 'aw_' followed by 3 to 8 alphanumeric characters (e.g., 'aw_abc1', 'aw_Test123'). Use '#aw_ID' in body text to reference other issues by their temporary_id; these are replaced with actual issue numbers after creation.", + "pattern": "^aw_[A-Za-z0-9]{3,8}$", + "type": "string" + }, + "title": { + "description": "Concise issue title summarizing the bug, feature, or task. The title appears as the main heading, so keep it brief and descriptive.", + "type": "string" + } + }, + "required": [ + "title", + "body" + ], + "type": "object" + }, + "name": "create_issue" + }, + { + "description": "Report that a tool or capability needed to complete the task is not available, or share any information you deem important about missing functionality or limitations. Use this when you cannot accomplish what was requested because the required functionality is missing or access is restricted.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "alternatives": { + "description": "Any workarounds, manual steps, or alternative approaches the user could take (max 256 characters).", + "type": "string" + }, + "reason": { + "description": "Explanation of why this tool is needed or what information you want to share about the limitation (max 256 characters).", + "type": "string" + }, + "tool": { + "description": "Optional: Name or description of the missing tool or capability (max 128 characters). Be specific about what functionality is needed.", + "type": "string" + } + }, + "required": [ + "reason" + ], + "type": "object" + }, + "name": "missing_tool" + }, + { + "description": "Log a transparency message when no significant actions are needed. Use this to confirm workflow completion and provide visibility when analysis is complete but no changes or outputs are required (e.g., 'No issues found', 'All checks passed'). This ensures the workflow produces human-visible output even when no other actions are taken.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "message": { + "description": "Status or completion message to log. Should explain what was analyzed and the outcome (e.g., 'Code review complete - no issues found', 'Analysis complete - all tests passing').", + "type": "string" + } + }, + "required": [ + "message" + ], + "type": "object" + }, + "name": "noop" + }, + { + "description": "Report that data or information needed to complete the task is not available. Use this when you cannot accomplish what was requested because required data, context, or information is missing.", + "inputSchema": { + "additionalProperties": false, + "properties": { + "alternatives": { + "description": "Any workarounds, manual steps, or alternative approaches the user could take (max 256 characters).", + "type": "string" + }, + "context": { + "description": "Additional context about the missing data or where it should come from (max 256 characters).", + "type": "string" + }, + "data_type": { + "description": "Type or description of the missing data or information (max 128 characters). Be specific about what data is needed.", + "type": "string" + }, + "reason": { + "description": "Explanation of why this data is needed to complete the task (max 256 characters).", + "type": "string" + } + }, + "required": [], + "type": "object" + }, + "name": "missing_data" + } + ] + GH_AW_SAFE_OUTPUTS_TOOLS_EOF + cat > /opt/gh-aw/safeoutputs/validation.json << 'GH_AW_SAFE_OUTPUTS_VALIDATION_EOF' + { + "create_issue": { + "defaultMax": 1, + "fields": { + "body": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 65000 + }, + "labels": { + "type": "array", + "itemType": "string", + "itemSanitize": true, + "itemMaxLength": 128 + }, + "parent": { + "issueOrPRNumber": true + }, + "repo": { + "type": "string", + "maxLength": 256 + }, + "temporary_id": { + "type": "string" + }, + "title": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 128 + } + } + }, + "missing_tool": { + "defaultMax": 20, + "fields": { + "alternatives": { + "type": "string", + "sanitize": true, + "maxLength": 512 + }, + "reason": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 256 + }, + "tool": { + "type": "string", + "sanitize": true, + "maxLength": 128 + } + } + }, + "noop": { + "defaultMax": 1, + "fields": { + "message": { + "required": true, + "type": "string", + "sanitize": true, + "maxLength": 65000 + } + } + } + } + GH_AW_SAFE_OUTPUTS_VALIDATION_EOF + - name: Generate Safe Outputs MCP Server Config + id: safe-outputs-config + run: | + # Generate a secure random API key (360 bits of entropy, 40+ chars) + # Mask immediately to prevent timing vulnerabilities + API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "::add-mask::${API_KEY}" + + PORT=3001 + + # Set outputs for next steps + { + echo "safe_outputs_api_key=${API_KEY}" + echo "safe_outputs_port=${PORT}" + } >> "$GITHUB_OUTPUT" + + echo "Safe Outputs MCP server will run on port ${PORT}" + + - name: Start Safe Outputs MCP HTTP Server + id: safe-outputs-start + env: + DEBUG: '*' + GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-config.outputs.safe_outputs_port }} + GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-config.outputs.safe_outputs_api_key }} + GH_AW_SAFE_OUTPUTS_TOOLS_PATH: /opt/gh-aw/safeoutputs/tools.json + GH_AW_SAFE_OUTPUTS_CONFIG_PATH: /opt/gh-aw/safeoutputs/config.json + GH_AW_MCP_LOG_DIR: /tmp/gh-aw/mcp-logs/safeoutputs + run: | + # Environment variables are set above to prevent template injection + export DEBUG + export GH_AW_SAFE_OUTPUTS_PORT + export GH_AW_SAFE_OUTPUTS_API_KEY + export GH_AW_SAFE_OUTPUTS_TOOLS_PATH + export GH_AW_SAFE_OUTPUTS_CONFIG_PATH + export GH_AW_MCP_LOG_DIR + + bash /opt/gh-aw/actions/start_safe_outputs_server.sh + + - name: Start MCP Gateway + id: start-mcp-gateway + env: + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_SAFE_OUTPUTS_API_KEY: ${{ steps.safe-outputs-start.outputs.api_key }} + GH_AW_SAFE_OUTPUTS_PORT: ${{ steps.safe-outputs-start.outputs.port }} + GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN || secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + run: | + set -eo pipefail + mkdir -p /tmp/gh-aw/mcp-config + + # Export gateway environment variables for MCP config and gateway script + export MCP_GATEWAY_PORT="80" + export MCP_GATEWAY_DOMAIN="host.docker.internal" + MCP_GATEWAY_API_KEY=$(openssl rand -base64 45 | tr -d '/+=') + echo "::add-mask::${MCP_GATEWAY_API_KEY}" + export MCP_GATEWAY_API_KEY + export MCP_GATEWAY_PAYLOAD_DIR="/tmp/gh-aw/mcp-payloads" + mkdir -p "${MCP_GATEWAY_PAYLOAD_DIR}" + export DEBUG="*" + + export GH_AW_ENGINE="copilot" + export MCP_GATEWAY_DOCKER_COMMAND='docker run -i --rm --network host -v /var/run/docker.sock:/var/run/docker.sock -e MCP_GATEWAY_PORT -e MCP_GATEWAY_DOMAIN -e MCP_GATEWAY_API_KEY -e MCP_GATEWAY_PAYLOAD_DIR -e DEBUG -e MCP_GATEWAY_LOG_DIR -e GH_AW_MCP_LOG_DIR -e GH_AW_SAFE_OUTPUTS -e GH_AW_SAFE_OUTPUTS_CONFIG_PATH -e GH_AW_SAFE_OUTPUTS_TOOLS_PATH -e GH_AW_ASSETS_BRANCH -e GH_AW_ASSETS_MAX_SIZE_KB -e GH_AW_ASSETS_ALLOWED_EXTS -e DEFAULT_BRANCH -e GITHUB_MCP_SERVER_TOKEN -e GITHUB_MCP_LOCKDOWN -e GITHUB_REPOSITORY -e GITHUB_SERVER_URL -e GITHUB_SHA -e GITHUB_WORKSPACE -e GITHUB_TOKEN -e GITHUB_RUN_ID -e GITHUB_RUN_NUMBER -e GITHUB_RUN_ATTEMPT -e GITHUB_JOB -e GITHUB_ACTION -e GITHUB_EVENT_NAME -e GITHUB_EVENT_PATH -e GITHUB_ACTOR -e GITHUB_ACTOR_ID -e GITHUB_TRIGGERING_ACTOR -e GITHUB_WORKFLOW -e GITHUB_WORKFLOW_REF -e GITHUB_WORKFLOW_SHA -e GITHUB_REF -e GITHUB_REF_NAME -e GITHUB_REF_TYPE -e GITHUB_HEAD_REF -e GITHUB_BASE_REF -e GH_AW_SAFE_OUTPUTS_PORT -e GH_AW_SAFE_OUTPUTS_API_KEY -v /tmp/gh-aw/mcp-payloads:/tmp/gh-aw/mcp-payloads:rw -v /opt:/opt:ro -v /tmp:/tmp:rw -v '"${GITHUB_WORKSPACE}"':'"${GITHUB_WORKSPACE}"':rw ghcr.io/github/gh-aw-mcpg:v0.1.4' + + mkdir -p /home/runner/.copilot + cat << GH_AW_MCP_CONFIG_EOF | bash /opt/gh-aw/actions/start_mcp_gateway.sh + { + "mcpServers": { + "github": { + "type": "stdio", + "container": "ghcr.io/github/github-mcp-server:v0.30.3", + "env": { + "GITHUB_PERSONAL_ACCESS_TOKEN": "\${GITHUB_MCP_SERVER_TOKEN}", + "GITHUB_READ_ONLY": "1", + "GITHUB_TOOLSETS": "repos,issues,pull_requests,discussions,search" + } + }, + "safeoutputs": { + "type": "http", + "url": "http://host.docker.internal:$GH_AW_SAFE_OUTPUTS_PORT", + "headers": { + "Authorization": "\${GH_AW_SAFE_OUTPUTS_API_KEY}" + } + } + }, + "gateway": { + "port": $MCP_GATEWAY_PORT, + "domain": "${MCP_GATEWAY_DOMAIN}", + "apiKey": "${MCP_GATEWAY_API_KEY}", + "payloadDir": "${MCP_GATEWAY_PAYLOAD_DIR}" + } + } + GH_AW_MCP_CONFIG_EOF + - name: Generate workflow overview + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { generateWorkflowOverview } = require('/opt/gh-aw/actions/generate_workflow_overview.cjs'); + await generateWorkflowOverview(core); + - name: Download prompt artifact + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: prompt + path: /tmp/gh-aw/aw-prompts + - name: Clean git credentials + run: bash /opt/gh-aw/actions/clean_git_credentials.sh + - name: Execute GitHub Copilot CLI + id: agentic_execution + # Copilot CLI tool arguments (sorted): + # --allow-tool github + # --allow-tool safeoutputs + # --allow-tool shell(cat) + # --allow-tool shell(date) + # --allow-tool shell(echo) + # --allow-tool shell(grep) + # --allow-tool shell(head) + # --allow-tool shell(ls) + # --allow-tool shell(pwd) + # --allow-tool shell(sort) + # --allow-tool shell(tail) + # --allow-tool shell(uniq) + # --allow-tool shell(wc) + # --allow-tool shell(yq) + # --allow-tool write + timeout-minutes: 20 + run: | + set -o pipefail + sudo -E awf --env-all --container-workdir "${GITHUB_WORKSPACE}" --allow-domains api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com --log-level info --proxy-logs-dir /tmp/gh-aw/sandbox/firewall/logs --enable-host-access --image-tag 0.19.1 --skip-pull \ + -- /bin/bash -c '/usr/local/bin/copilot --add-dir /tmp/gh-aw/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --add-dir "${GITHUB_WORKSPACE}" --disable-builtin-mcps --allow-tool github --allow-tool safeoutputs --allow-tool '\''shell(cat)'\'' --allow-tool '\''shell(date)'\'' --allow-tool '\''shell(echo)'\'' --allow-tool '\''shell(grep)'\'' --allow-tool '\''shell(head)'\'' --allow-tool '\''shell(ls)'\'' --allow-tool '\''shell(pwd)'\'' --allow-tool '\''shell(sort)'\'' --allow-tool '\''shell(tail)'\'' --allow-tool '\''shell(uniq)'\'' --allow-tool '\''shell(wc)'\'' --allow-tool '\''shell(yq)'\'' --allow-tool write --add-dir /tmp/gh-aw/cache-memory/ --allow-all-paths --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$(cat /tmp/gh-aw/aw-prompts/prompt.txt)"${GH_AW_MODEL_AGENT_COPILOT:+ --model "$GH_AW_MODEL_AGENT_COPILOT"}' 2>&1 | tee -a /tmp/gh-aw/agent-stdio.log + env: + COPILOT_AGENT_RUNNER_TYPE: STANDALONE + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + GH_AW_MCP_CONFIG: /home/runner/.copilot/mcp-config.json + GH_AW_MODEL_AGENT_COPILOT: ${{ vars.GH_AW_MODEL_AGENT_COPILOT || '' }} + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GITHUB_HEAD_REF: ${{ github.head_ref }} + GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_STEP_SUMMARY: ${{ env.GITHUB_STEP_SUMMARY }} + GITHUB_WORKSPACE: ${{ github.workspace }} + XDG_CONFIG_HOME: /home/runner + - name: Configure Git credentials + env: + REPO_NAME: ${{ github.repository }} + SERVER_URL: ${{ github.server_url }} + run: | + git config --global user.email "github-actions[bot]@users.noreply.github.com" + git config --global user.name "github-actions[bot]" + # Re-authenticate git with GitHub token + SERVER_URL_STRIPPED="${SERVER_URL#https://}" + git remote set-url origin "https://x-access-token:${{ github.token }}@${SERVER_URL_STRIPPED}/${REPO_NAME}.git" + echo "Git configured with standard GitHub Actions identity" + - name: Copy Copilot session state files to logs + if: always() + continue-on-error: true + run: | + # Copy Copilot session state files to logs folder for artifact collection + # This ensures they are in /tmp/gh-aw/ where secret redaction can scan them + SESSION_STATE_DIR="$HOME/.copilot/session-state" + LOGS_DIR="/tmp/gh-aw/sandbox/agent/logs" + + if [ -d "$SESSION_STATE_DIR" ]; then + echo "Copying Copilot session state files from $SESSION_STATE_DIR to $LOGS_DIR" + mkdir -p "$LOGS_DIR" + cp -v "$SESSION_STATE_DIR"/*.jsonl "$LOGS_DIR/" 2>/dev/null || true + echo "Session state files copied successfully" + else + echo "No session-state directory found at $SESSION_STATE_DIR" + fi + - name: Stop MCP Gateway + if: always() + continue-on-error: true + env: + MCP_GATEWAY_PORT: ${{ steps.start-mcp-gateway.outputs.gateway-port }} + MCP_GATEWAY_API_KEY: ${{ steps.start-mcp-gateway.outputs.gateway-api-key }} + GATEWAY_PID: ${{ steps.start-mcp-gateway.outputs.gateway-pid }} + run: | + bash /opt/gh-aw/actions/stop_mcp_gateway.sh "$GATEWAY_PID" + - name: Redact secrets in logs + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/redact_secrets.cjs'); + await main(); + env: + GH_AW_SECRET_NAMES: 'COPILOT_GITHUB_TOKEN,GH_AW_GITHUB_MCP_SERVER_TOKEN,GH_AW_GITHUB_TOKEN,GITHUB_TOKEN' + SECRET_COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + SECRET_GH_AW_GITHUB_MCP_SERVER_TOKEN: ${{ secrets.GH_AW_GITHUB_MCP_SERVER_TOKEN }} + SECRET_GH_AW_GITHUB_TOKEN: ${{ secrets.GH_AW_GITHUB_TOKEN }} + SECRET_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Upload Safe Outputs + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: safe-output + path: ${{ env.GH_AW_SAFE_OUTPUTS }} + if-no-files-found: warn + - name: Ingest agent output + id: collect_output + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_SAFE_OUTPUTS: ${{ env.GH_AW_SAFE_OUTPUTS }} + GH_AW_ALLOWED_DOMAINS: "api.business.githubcopilot.com,api.enterprise.githubcopilot.com,api.github.com,api.githubcopilot.com,api.individual.githubcopilot.com,api.snapcraft.io,archive.ubuntu.com,azure.archive.ubuntu.com,crl.geotrust.com,crl.globalsign.com,crl.identrust.com,crl.sectigo.com,crl.thawte.com,crl.usertrust.com,crl.verisign.com,crl3.digicert.com,crl4.digicert.com,crls.ssl.com,github.com,host.docker.internal,json-schema.org,json.schemastore.org,keyserver.ubuntu.com,ocsp.digicert.com,ocsp.geotrust.com,ocsp.globalsign.com,ocsp.identrust.com,ocsp.sectigo.com,ocsp.ssl.com,ocsp.thawte.com,ocsp.usertrust.com,ocsp.verisign.com,packagecloud.io,packages.cloud.google.com,packages.microsoft.com,ppa.launchpad.net,raw.githubusercontent.com,registry.npmjs.org,s.symcb.com,s.symcd.com,security.ubuntu.com,telemetry.enterprise.githubcopilot.com,ts-crl.ws.symantec.com,ts-ocsp.ws.symantec.com" + GITHUB_SERVER_URL: ${{ github.server_url }} + GITHUB_API_URL: ${{ github.api_url }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/collect_ndjson_output.cjs'); + await main(); + - name: Upload sanitized agent output + if: always() && env.GH_AW_AGENT_OUTPUT + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: agent-output + path: ${{ env.GH_AW_AGENT_OUTPUT }} + if-no-files-found: warn + - name: Upload engine output files + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: agent_outputs + path: | + /tmp/gh-aw/sandbox/agent/logs/ + /tmp/gh-aw/redacted-urls.log + if-no-files-found: ignore + - name: Parse agent logs for step summary + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: /tmp/gh-aw/sandbox/agent/logs/ + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_copilot_log.cjs'); + await main(); + - name: Parse MCP Gateway logs for step summary + if: always() + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_mcp_gateway_log.cjs'); + await main(); + - name: Print firewall logs + if: always() + continue-on-error: true + env: + AWF_LOGS_DIR: /tmp/gh-aw/sandbox/firewall/logs + run: | + # Fix permissions on firewall logs so they can be uploaded as artifacts + # AWF runs with sudo, creating files owned by root + sudo chmod -R a+r /tmp/gh-aw/sandbox/firewall/logs 2>/dev/null || true + # Only run awf logs summary if awf command exists (it may not be installed if workflow failed before install step) + if command -v awf &> /dev/null; then + awf logs summary | tee -a "$GITHUB_STEP_SUMMARY" + else + echo 'AWF binary not installed, skipping firewall log summary' + fi + - name: Upload cache-memory data as artifact + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + if: always() + with: + name: cache-memory + path: /tmp/gh-aw/cache-memory + - name: Upload agent artifacts + if: always() + continue-on-error: true + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: agent-artifacts + path: | + /tmp/gh-aw/aw-prompts/prompt.txt + /tmp/gh-aw/aw_info.json + /tmp/gh-aw/mcp-logs/ + /tmp/gh-aw/sandbox/firewall/logs/ + /tmp/gh-aw/agent-stdio.log + /tmp/gh-aw/agent/ + if-no-files-found: ignore + + conclusion: + needs: + - activation + - agent + - detection + - safe_outputs + - update_cache_memory + if: (always()) && (needs.agent.result != 'skipped') + runs-on: ubuntu-slim + permissions: + contents: read + issues: write + outputs: + noop_message: ${{ steps.noop.outputs.noop_message }} + tools_reported: ${{ steps.missing_tool.outputs.tools_reported }} + total_count: ${{ steps.missing_tool.outputs.total_count }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@852cb06ad52958b402ed982b69957ffc57ca0619 # v0.45.5 + with: + destination: /opt/gh-aw/actions + - name: Download agent output artifact + continue-on-error: true + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: agent-output + path: /tmp/gh-aw/safeoutputs/ + - name: Setup agent output environment variable + run: | + mkdir -p /tmp/gh-aw/safeoutputs/ + find "/tmp/gh-aw/safeoutputs/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" + - name: Process No-Op Messages + id: noop + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_NOOP_MAX: 1 + GH_AW_WORKFLOW_NAME: "13.2 Release Burndown Report" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/noop.cjs'); + await main(); + - name: Record Missing Tool + id: missing_tool + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "13.2 Release Burndown Report" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/missing_tool.cjs'); + await main(); + - name: Handle Agent Failure + id: handle_agent_failure + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "13.2 Release Burndown Report" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} + GH_AW_WORKFLOW_ID: "daily-repo-status" + GH_AW_SECRET_VERIFICATION_RESULT: ${{ needs.agent.outputs.secret_verification_result }} + GH_AW_CHECKOUT_PR_SUCCESS: ${{ needs.agent.outputs.checkout_pr_success }} + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/handle_agent_failure.cjs'); + await main(); + - name: Handle No-Op Message + id: handle_noop_message + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_WORKFLOW_NAME: "13.2 Release Burndown Report" + GH_AW_RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + GH_AW_AGENT_CONCLUSION: ${{ needs.agent.result }} + GH_AW_NOOP_MESSAGE: ${{ steps.noop.outputs.noop_message }} + GH_AW_NOOP_REPORT_AS_ISSUE: "true" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/handle_noop_message.cjs'); + await main(); + + detection: + needs: agent + if: needs.agent.outputs.output_types != '' || needs.agent.outputs.has_patch == 'true' + runs-on: ubuntu-latest + permissions: {} + concurrency: + group: "gh-aw-copilot-${{ github.workflow }}" + timeout-minutes: 10 + outputs: + success: ${{ steps.parse_results.outputs.success }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@852cb06ad52958b402ed982b69957ffc57ca0619 # v0.45.5 + with: + destination: /opt/gh-aw/actions + - name: Download agent artifacts + continue-on-error: true + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: agent-artifacts + path: /tmp/gh-aw/threat-detection/ + - name: Download agent output artifact + continue-on-error: true + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: agent-output + path: /tmp/gh-aw/threat-detection/ + - name: Echo agent output types + env: + AGENT_OUTPUT_TYPES: ${{ needs.agent.outputs.output_types }} + run: | + echo "Agent output-types: $AGENT_OUTPUT_TYPES" + - name: Setup threat detection + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + WORKFLOW_NAME: "13.2 Release Burndown Report" + WORKFLOW_DESCRIPTION: "Daily burndown report for the Aspire 13.2 milestone. Tracks progress\non issues closed, new bugs found, notable changes merged into the\nrelease/13.2 branch, pending PR reviews, and discussions. Generates\na 7-day burndown chart using cached daily snapshots." + HAS_PATCH: ${{ needs.agent.outputs.has_patch }} + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/setup_threat_detection.cjs'); + await main(); + - name: Ensure threat-detection directory and log + run: | + mkdir -p /tmp/gh-aw/threat-detection + touch /tmp/gh-aw/threat-detection/detection.log + - name: Validate COPILOT_GITHUB_TOKEN secret + id: validate-secret + run: /opt/gh-aw/actions/validate_multi_secret.sh COPILOT_GITHUB_TOKEN 'GitHub Copilot CLI' https://github.github.com/gh-aw/reference/engines/#github-copilot-default + env: + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + - name: Install GitHub Copilot CLI + run: /opt/gh-aw/actions/install_copilot_cli.sh 0.0.410 + - name: Execute GitHub Copilot CLI + id: agentic_execution + # Copilot CLI tool arguments (sorted): + # --allow-tool shell(cat) + # --allow-tool shell(grep) + # --allow-tool shell(head) + # --allow-tool shell(jq) + # --allow-tool shell(ls) + # --allow-tool shell(tail) + # --allow-tool shell(wc) + timeout-minutes: 20 + run: | + set -o pipefail + COPILOT_CLI_INSTRUCTION="$(cat /tmp/gh-aw/aw-prompts/prompt.txt)" + mkdir -p /tmp/ + mkdir -p /tmp/gh-aw/ + mkdir -p /tmp/gh-aw/agent/ + mkdir -p /tmp/gh-aw/sandbox/agent/logs/ + copilot --add-dir /tmp/ --add-dir /tmp/gh-aw/ --add-dir /tmp/gh-aw/agent/ --log-level all --log-dir /tmp/gh-aw/sandbox/agent/logs/ --disable-builtin-mcps --allow-tool 'shell(cat)' --allow-tool 'shell(grep)' --allow-tool 'shell(head)' --allow-tool 'shell(jq)' --allow-tool 'shell(ls)' --allow-tool 'shell(tail)' --allow-tool 'shell(wc)' --share /tmp/gh-aw/sandbox/agent/logs/conversation.md --prompt "$COPILOT_CLI_INSTRUCTION"${GH_AW_MODEL_DETECTION_COPILOT:+ --model "$GH_AW_MODEL_DETECTION_COPILOT"} 2>&1 | tee /tmp/gh-aw/threat-detection/detection.log + env: + COPILOT_AGENT_RUNNER_TYPE: STANDALONE + COPILOT_GITHUB_TOKEN: ${{ secrets.COPILOT_GITHUB_TOKEN }} + GH_AW_MODEL_DETECTION_COPILOT: ${{ vars.GH_AW_MODEL_DETECTION_COPILOT || '' }} + GH_AW_PROMPT: /tmp/gh-aw/aw-prompts/prompt.txt + GITHUB_HEAD_REF: ${{ github.head_ref }} + GITHUB_REF_NAME: ${{ github.ref_name }} + GITHUB_STEP_SUMMARY: ${{ env.GITHUB_STEP_SUMMARY }} + GITHUB_WORKSPACE: ${{ github.workspace }} + XDG_CONFIG_HOME: /home/runner + - name: Parse threat detection results + id: parse_results + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + with: + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/parse_threat_detection_results.cjs'); + await main(); + - name: Upload threat detection log + if: always() + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6 + with: + name: threat-detection.log + path: /tmp/gh-aw/threat-detection/detection.log + if-no-files-found: ignore + + safe_outputs: + needs: + - agent + - detection + if: ((!cancelled()) && (needs.agent.result != 'skipped')) && (needs.detection.outputs.success == 'true') + runs-on: ubuntu-slim + permissions: + contents: read + issues: write + timeout-minutes: 15 + env: + GH_AW_ENGINE_ID: "copilot" + GH_AW_WORKFLOW_ID: "daily-repo-status" + GH_AW_WORKFLOW_NAME: "13.2 Release Burndown Report" + outputs: + create_discussion_error_count: ${{ steps.process_safe_outputs.outputs.create_discussion_error_count }} + create_discussion_errors: ${{ steps.process_safe_outputs.outputs.create_discussion_errors }} + process_safe_outputs_processed_count: ${{ steps.process_safe_outputs.outputs.processed_count }} + process_safe_outputs_temporary_id_map: ${{ steps.process_safe_outputs.outputs.temporary_id_map }} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@852cb06ad52958b402ed982b69957ffc57ca0619 # v0.45.5 + with: + destination: /opt/gh-aw/actions + - name: Download agent output artifact + continue-on-error: true + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + with: + name: agent-output + path: /tmp/gh-aw/safeoutputs/ + - name: Setup agent output environment variable + run: | + mkdir -p /tmp/gh-aw/safeoutputs/ + find "/tmp/gh-aw/safeoutputs/" -type f -print + echo "GH_AW_AGENT_OUTPUT=/tmp/gh-aw/safeoutputs/agent_output.json" >> "$GITHUB_ENV" + - name: Process Safe Outputs + id: process_safe_outputs + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8 + env: + GH_AW_AGENT_OUTPUT: ${{ env.GH_AW_AGENT_OUTPUT }} + GH_AW_SAFE_OUTPUTS_HANDLER_CONFIG: "{\"create_issue\":{\"close_older_issues\":true,\"labels\":[\"report\",\"burndown\"],\"max\":1,\"title_prefix\":\"[13.2-burndown] \"},\"missing_data\":{},\"missing_tool\":{}}" + with: + github-token: ${{ secrets.GH_AW_GITHUB_TOKEN || secrets.GITHUB_TOKEN }} + script: | + const { setupGlobals } = require('/opt/gh-aw/actions/setup_globals.cjs'); + setupGlobals(core, github, context, exec, io); + const { main } = require('/opt/gh-aw/actions/safe_output_handler_manager.cjs'); + await main(); + + update_cache_memory: + needs: + - agent + - detection + if: always() && needs.detection.outputs.success == 'true' + runs-on: ubuntu-latest + permissions: {} + steps: + - name: Setup Scripts + uses: github/gh-aw/actions/setup@852cb06ad52958b402ed982b69957ffc57ca0619 # v0.45.5 + with: + destination: /opt/gh-aw/actions + - name: Download cache-memory artifact (default) + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6 + continue-on-error: true + with: + name: cache-memory + path: /tmp/gh-aw/cache-memory + - name: Save cache-memory to cache (default) + uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0 + with: + key: memory-${{ env.GH_AW_WORKFLOW_ID_SANITIZED }}-${{ github.run_id }} + path: /tmp/gh-aw/cache-memory + diff --git a/.github/workflows/daily-repo-status.md b/.github/workflows/daily-repo-status.md new file mode 100644 index 00000000000..6291aed99d5 --- /dev/null +++ b/.github/workflows/daily-repo-status.md @@ -0,0 +1,131 @@ +--- +description: | + Daily burndown report for the Aspire 13.2 milestone. Tracks progress + on issues closed, new bugs found, notable changes merged into the + release/13.2 branch, pending PR reviews, and discussions. Generates + a 7-day burndown chart using cached daily snapshots. + +on: + schedule: daily around 9am + workflow_dispatch: + +permissions: + contents: read + issues: read + pull-requests: read + discussions: read + +network: defaults + +tools: + github: + toolsets: [repos, issues, pull_requests, discussions, search] + lockdown: false + cache-memory: + bash: ["echo", "date", "cat", "wc"] + +safe-outputs: + create-issue: + title-prefix: "[13.2-burndown] " + labels: [report, burndown] + close-older-issues: true +--- + +# 13.2 Release Burndown Report + +Create a daily burndown report for the **Aspire 13.2 milestone** as a GitHub issue. +The primary goal of this report is to help the team track progress towards the 13.2 release. + +## Data gathering + +Collect the following data using the GitHub tools. All time-based queries should look at the **last 24 hours** unless stated otherwise. + +### 1. Milestone snapshot + +- Find the milestone named **13.2** in this repository. +- Count the **total open issues** and **total closed issues** in the milestone, **excluding pull requests**. Use an issues-only filter (for example, a search query like `is:issue milestone:"13.2" state:open` / `state:closed`) so the counts are consistent across tools. +- Store today's snapshot (date, open count, closed count) using the **cache-memory** tool with the key `burndown-13.2-snapshot`. + - The value for this key **must** be a JSON array of objects with the exact shape: + `[{ "date": "YYYY-MM-DD", "open": , "closed": }, ...]` + - When writing today's data: + 1. Read the existing cache value (if any) and parse it as JSON. If the cache is empty or invalid, start from an empty array. + 2. If an entry for today's date already exists, **replace** it instead of adding a duplicate. + 3. If no entry exists, append a new object. + 4. Sort by date ascending and trim to the **most recent 7 entries**. + 5. Serialize back to JSON and overwrite the cache value. + +### 2. Issues closed in the last 24 hours (13.2 milestone) + +- Search for issues in this repository that were **closed in the last 24 hours** and belong to the **13.2 milestone**. +- For each issue, note the issue number, title, and who closed it. + +### 3. New issues added to 13.2 milestone in the last 24 hours + +- Search for issues in this repository that were **opened in the last 24 hours** and are assigned to the **13.2 milestone**. +- Highlight any that are labeled as `bug` — these are newly discovered bugs for the release. + +### 4. Notable changes merged into release/13.2 + +- Look at pull requests **merged in the last 24 hours** whose **base branch is `release/13.2`**. +- Summarize the most impactful or interesting changes (group by area if possible). + +### 5. PRs pending review targeting release/13.2 + +- Find **open pull requests** with base branch `release/13.2` that are **awaiting reviews** (have no approving reviews yet, or have review requests pending). +- List them with PR number, title, author, and how long they've been open. + +### 6. Discussions related to 13.2 + +- Search discussions in this repository that mention "13.2" or the milestone, especially any **recent activity in the last 24 hours**. +- Briefly summarize any relevant discussion threads. + +### 7. General triage needs (secondary) + +- Briefly note any **new issues opened in the last 24 hours that have no milestone assigned** and may need triage. +- Keep this section short — the focus is on 13.2. + +## Burndown chart + +Using the historical data stored via **cache-memory** (key: `burndown-13.2-snapshot`), generate a **Mermaid xychart** showing the number of **open issues** in the 13.2 milestone over the last 7 days (or however many data points are available). + +Use this format so it renders natively in the GitHub issue: + +~~~ +```mermaid +xychart-beta + title "13.2 Milestone Burndown (Open Issues)" + x-axis [Feb 13, Feb 14, Feb 15, ...] + y-axis "Open Issues" 0 --> MAX + line [N1, N2, N3, ...] +``` +~~~ + +If fewer than 2 data points are available, note that the chart will become richer over the coming days as more snapshots are collected, and still show whatever data is available. + +## Report structure + +Create a GitHub issue with the following sections in this order: + +1. **📊 Burndown Chart** — The Mermaid chart (or a note that data is still being collected) +2. **📈 Milestone Progress** — Total open vs closed, percentage complete, net change today +3. **✅ Issues Closed Today** — Table or list of issues closed in the 13.2 milestone +4. **🐛 New Bugs Found** — Any new bug issues added to the 13.2 milestone +5. **🚀 Notable Changes Merged** — Summary of impactful PRs merged to release/13.2 +6. **👀 PRs Awaiting Review** — Open PRs targeting release/13.2 that need reviewer attention +7. **💬 Discussions** — Relevant 13.2 discussion activity +8. **📋 Triage Queue** — Brief list of un-milestoned issues that need attention (keep short) + +## Style + +- Be concise and data-driven — this is a status report, not a blog post +- Use tables for lists of issues and PRs where appropriate +- Use emojis for section headers to make scanning easy +- If there was no activity in a section, say so briefly (e.g., "No new bugs found today 🎉") +- End with a one-line motivational note for the team + +## Process + +1. Gather all the data described above +2. Read historical burndown data from cache-memory and store today's snapshot +3. Generate the burndown chart +4. Create a new GitHub issue with all sections populated diff --git a/AGENTS.md b/AGENTS.md index cb4d5711eb1..cb6596c3d31 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -355,6 +355,7 @@ The following specialized skills are available in `.github/skills/`: - **test-management**: Quarantines or disables flaky/problematic tests using the QuarantineTools utility - **connection-properties**: Expert for creating and improving Connection Properties in Aspire resources - **dependency-update**: Guides dependency version updates by checking nuget.org, triggering the dotnet-migrate-package Azure DevOps pipeline, and monitoring runs +- **startup-perf**: Measures Aspire application startup performance using dotnet-trace and the TraceAnalyzer tool ## Pattern-Based Instructions diff --git a/docs/getting-perf-traces.md b/docs/getting-perf-traces.md index a669c591ee0..94a5a14a0d5 100644 --- a/docs/getting-perf-traces.md +++ b/docs/getting-perf-traces.md @@ -28,8 +28,16 @@ Once you are ready, hit "Start Collection" button and run your scenario. When done with the scenario, hit "Stop Collection". Wait for PerfView to finish merging and analyzing data (the "working" status bar stops flashing). -### Verify that the trace contains Aspire data +### Verify that PerfView trace contains Aspire data This is an optional step, but if you are wondering if your trace has been captured properly, you can check the following: 1. Open the trace (usually named PerfViewData.etl, if you haven't changed the name) and double click Events view. Verify you have a bunch of events from the Microsoft-Aspire-Hosting provider. + +## Profiling scripts + +The `tools/perf` folder in the repository contains scripts that help quickly assess the impact of code changes on key performance scenarios. Currently available scripts are: + +| Script | Description | +| --- | --------- | +| `Measure-StartupPerformance.ps1` | Measures startup time for a specific Aspire project. More specifically, the script measures the time to get all application services and supporting dependencies CREATED; the application is not necessarily responsive after measured time. | diff --git a/eng/Version.Details.xml b/eng/Version.Details.xml index 1cdf20f5f5e..6e938847991 100644 --- a/eng/Version.Details.xml +++ b/eng/Version.Details.xml @@ -179,33 +179,33 @@ - + https://github.com/dotnet/arcade - 27e190e2a8053738859c082e2f70df62e01ff524 + 4bf37ce670528cf2aef4d9b1cd892554b1b02d9d - + https://github.com/dotnet/arcade - 27e190e2a8053738859c082e2f70df62e01ff524 + 4bf37ce670528cf2aef4d9b1cd892554b1b02d9d - + https://github.com/dotnet/arcade - 27e190e2a8053738859c082e2f70df62e01ff524 + 4bf37ce670528cf2aef4d9b1cd892554b1b02d9d - + https://github.com/dotnet/arcade - 27e190e2a8053738859c082e2f70df62e01ff524 + 4bf37ce670528cf2aef4d9b1cd892554b1b02d9d - + https://github.com/dotnet/arcade - 27e190e2a8053738859c082e2f70df62e01ff524 + 4bf37ce670528cf2aef4d9b1cd892554b1b02d9d - + https://github.com/dotnet/arcade - 27e190e2a8053738859c082e2f70df62e01ff524 + 4bf37ce670528cf2aef4d9b1cd892554b1b02d9d - + https://github.com/dotnet/arcade - 27e190e2a8053738859c082e2f70df62e01ff524 + 4bf37ce670528cf2aef4d9b1cd892554b1b02d9d diff --git a/eng/Versions.props b/eng/Versions.props index 194b5e9f25e..2184328ecea 100644 --- a/eng/Versions.props +++ b/eng/Versions.props @@ -2,7 +2,7 @@ 13 - 2 + 3 0 $(MajorVersion).$(MinorVersion).$(PatchVersion) preview.1 @@ -38,9 +38,9 @@ 0.22.6 0.22.6 - 11.0.0-beta.25610.3 - 11.0.0-beta.25610.3 - 11.0.0-beta.25610.3 + 10.0.0-beta.26110.1 + 10.0.0-beta.26110.1 + 10.0.0-beta.26110.1 10.0.2 10.2.0 diff --git a/eng/build.sh b/eng/build.sh index c80b2c68aba..58596335da2 100755 --- a/eng/build.sh +++ b/eng/build.sh @@ -150,7 +150,7 @@ while [[ $# > 0 ]]; do ;; -mauirestore) - extraargs="$extraargs -restoreMaui" + export restore_maui=true shift 1 ;; diff --git a/eng/common/SetupNugetSources.ps1 b/eng/common/SetupNugetSources.ps1 index fc8d618014e..65ed3a8adef 100644 --- a/eng/common/SetupNugetSources.ps1 +++ b/eng/common/SetupNugetSources.ps1 @@ -1,6 +1,7 @@ # This script adds internal feeds required to build commits that depend on internal package sources. For instance, -# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. In addition also enables -# disabled internal Maestro (darc-int*) feeds. +# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. Similarly, +# dotnet-eng-internal and dotnet-tools-internal are added if dotnet-eng and dotnet-tools are present. +# In addition, this script also enables disabled internal Maestro (darc-int*) feeds. # # Optionally, this script also adds a credential entry for each of the internal feeds if supplied. # @@ -173,4 +174,16 @@ foreach ($dotnetVersion in $dotnetVersions) { } } +# Check for dotnet-eng and add dotnet-eng-internal if present +$dotnetEngSource = $sources.SelectSingleNode("add[@key='dotnet-eng']") +if ($dotnetEngSource -ne $null) { + AddOrEnablePackageSource -Sources $sources -DisabledPackageSources $disabledSources -SourceName "dotnet-eng-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-eng-internal/nuget/$feedSuffix" -Creds $creds -Username $userName -pwd $Password +} + +# Check for dotnet-tools and add dotnet-tools-internal if present +$dotnetToolsSource = $sources.SelectSingleNode("add[@key='dotnet-tools']") +if ($dotnetToolsSource -ne $null) { + AddOrEnablePackageSource -Sources $sources -DisabledPackageSources $disabledSources -SourceName "dotnet-tools-internal" -SourceEndPoint "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/$feedSuffix" -Creds $creds -Username $userName -pwd $Password +} + $doc.Save($filename) diff --git a/eng/common/SetupNugetSources.sh b/eng/common/SetupNugetSources.sh index b97cc536379..b2163abbe71 100755 --- a/eng/common/SetupNugetSources.sh +++ b/eng/common/SetupNugetSources.sh @@ -1,8 +1,9 @@ #!/usr/bin/env bash # This script adds internal feeds required to build commits that depend on internal package sources. For instance, -# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. In addition also enables -# disabled internal Maestro (darc-int*) feeds. +# dotnet6-internal would be added automatically if dotnet6 was found in the nuget.config file. Similarly, +# dotnet-eng-internal and dotnet-tools-internal are added if dotnet-eng and dotnet-tools are present. +# In addition, this script also enables disabled internal Maestro (darc-int*) feeds. # # Optionally, this script also adds a credential entry for each of the internal feeds if supplied. # @@ -173,6 +174,18 @@ for DotNetVersion in ${DotNetVersions[@]} ; do fi done +# Check for dotnet-eng and add dotnet-eng-internal if present +grep -i " /dev/null +if [ "$?" == "0" ]; then + AddOrEnablePackageSource "dotnet-eng-internal" "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-eng-internal/nuget/$FeedSuffix" +fi + +# Check for dotnet-tools and add dotnet-tools-internal if present +grep -i " /dev/null +if [ "$?" == "0" ]; then + AddOrEnablePackageSource "dotnet-tools-internal" "https://pkgs.dev.azure.com/dnceng/internal/_packaging/dotnet-tools-internal/nuget/$FeedSuffix" +fi + # I want things split line by line PrevIFS=$IFS IFS=$'\n' diff --git a/eng/common/build.ps1 b/eng/common/build.ps1 index c10aba98ac6..8cfee107e7a 100644 --- a/eng/common/build.ps1 +++ b/eng/common/build.ps1 @@ -30,7 +30,6 @@ Param( [string] $runtimeSourceFeedKey = '', [switch] $excludePrereleaseVS, [switch] $nativeToolsOnMachine, - [switch] $restoreMaui, [switch] $help, [Parameter(ValueFromRemainingArguments=$true)][String[]]$properties ) @@ -77,7 +76,6 @@ function Print-Usage() { Write-Host " -nodeReuse Sets nodereuse msbuild parameter ('true' or 'false')" Write-Host " -buildCheck Sets /check msbuild parameter" Write-Host " -fromVMR Set when building from within the VMR" - Write-Host " -restoreMaui Restore the MAUI workload after restore (only on Windows/macOS)" Write-Host "" Write-Host "Command line arguments not listed above are passed thru to msbuild." diff --git a/eng/common/build.sh b/eng/common/build.sh index 09d1f8e6d9c..9767bb411a4 100755 --- a/eng/common/build.sh +++ b/eng/common/build.sh @@ -44,7 +44,6 @@ usage() echo " --warnAsError Sets warnaserror msbuild parameter ('true' or 'false')" echo " --buildCheck Sets /check msbuild parameter" echo " --fromVMR Set when building from within the VMR" - echo " --restoreMaui Restore the MAUI workload after restore (only on macOS)" echo "" echo "Command line arguments not listed above are passed thru to msbuild." echo "Arguments can also be passed in with a single hyphen." @@ -77,7 +76,6 @@ sign=false public=false ci=false clean=false -restore_maui=false warn_as_error=true node_reuse=true @@ -94,7 +92,7 @@ runtime_source_feed='' runtime_source_feed_key='' properties=() -while [[ $# -gt 0 ]]; do +while [[ $# > 0 ]]; do opt="$(echo "${1/#--/-}" | tr "[:upper:]" "[:lower:]")" case "$opt" in -help|-h) @@ -185,9 +183,6 @@ while [[ $# -gt 0 ]]; do -buildcheck) build_check=true ;; - -restoremaui|-restore-maui) - restore_maui=true - ;; -runtimesourcefeed) runtime_source_feed=$2 shift diff --git a/eng/common/core-templates/job/job.yml b/eng/common/core-templates/job/job.yml index 748c4f07a64..5ce51840619 100644 --- a/eng/common/core-templates/job/job.yml +++ b/eng/common/core-templates/job/job.yml @@ -19,8 +19,6 @@ parameters: # publishing defaults artifacts: '' enableMicrobuild: false - enablePreviewMicrobuild: false - microbuildPluginVersion: 'latest' enableMicrobuildForMacAndLinux: false microbuildUseESRP: true enablePublishBuildArtifacts: false @@ -73,8 +71,6 @@ jobs: templateContext: ${{ parameters.templateContext }} variables: - - name: AllowPtrToDetectTestRunRetryFiles - value: true - ${{ if ne(parameters.enableTelemetry, 'false') }}: - name: DOTNET_CLI_TELEMETRY_PROFILE value: '$(Build.Repository.Uri)' @@ -132,8 +128,6 @@ jobs: - template: /eng/common/core-templates/steps/install-microbuild.yml parameters: enableMicrobuild: ${{ parameters.enableMicrobuild }} - enablePreviewMicrobuild: ${{ parameters.enablePreviewMicrobuild }} - microbuildPluginVersion: ${{ parameters.microbuildPluginVersion }} enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }} microbuildUseESRP: ${{ parameters.microbuildUseESRP }} continueOnError: ${{ parameters.continueOnError }} @@ -159,8 +153,6 @@ jobs: - template: /eng/common/core-templates/steps/cleanup-microbuild.yml parameters: enableMicrobuild: ${{ parameters.enableMicrobuild }} - enablePreviewMicrobuild: ${{ parameters.enablePreviewMicrobuild }} - microbuildPluginVersion: ${{ parameters.microbuildPluginVersion }} enableMicrobuildForMacAndLinux: ${{ parameters.enableMicrobuildForMacAndLinux }} continueOnError: ${{ parameters.continueOnError }} diff --git a/eng/common/core-templates/job/publish-build-assets.yml b/eng/common/core-templates/job/publish-build-assets.yml index 8b5c635fe80..b955fac6e13 100644 --- a/eng/common/core-templates/job/publish-build-assets.yml +++ b/eng/common/core-templates/job/publish-build-assets.yml @@ -80,7 +80,7 @@ jobs: # If it's not devdiv, it's dnceng ${{ if ne(variables['System.TeamProject'], 'DevDiv') }}: name: NetCore1ESPool-Publishing-Internal - image: windows.vs2019.amd64 + image: windows.vs2022.amd64 os: windows steps: - ${{ if eq(parameters.is1ESPipeline, '') }}: @@ -91,8 +91,8 @@ jobs: fetchDepth: 3 clean: true - - ${{ if eq(parameters.isAssetlessBuild, 'false') }}: - - ${{ if eq(parameters.publishingVersion, 3) }}: + - ${{ if eq(parameters.isAssetlessBuild, 'false') }}: + - ${{ if eq(parameters.publishingVersion, 3) }}: - task: DownloadPipelineArtifact@2 displayName: Download Asset Manifests inputs: @@ -117,7 +117,7 @@ jobs: flattenFolders: true condition: ${{ parameters.condition }} continueOnError: ${{ parameters.continueOnError }} - + - task: NuGetAuthenticate@1 # Populate internal runtime variables. @@ -125,7 +125,7 @@ jobs: ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: parameters: legacyCredential: $(dn-bot-dnceng-artifact-feeds-rw) - + - template: /eng/common/templates/steps/enable-internal-runtimes.yml - task: AzureCLI@2 @@ -145,7 +145,7 @@ jobs: condition: ${{ parameters.condition }} continueOnError: ${{ parameters.continueOnError }} - + - task: powershell@2 displayName: Create ReleaseConfigs Artifact inputs: @@ -173,7 +173,7 @@ jobs: artifactName: AssetManifests displayName: 'Publish Merged Manifest' retryCountOnTaskFailure: 10 # for any logs being locked - sbomEnabled: false # we don't need SBOM for logs + sbomEnabled: false # we don't need SBOM for logs - template: /eng/common/core-templates/steps/publish-build-artifacts.yml parameters: @@ -190,7 +190,7 @@ jobs: BARBuildId: ${{ parameters.BARBuildId }} PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} is1ESPipeline: ${{ parameters.is1ESPipeline }} - + # Darc is targeting 8.0, so make sure it's installed - task: UseDotNet@2 inputs: @@ -218,4 +218,4 @@ jobs: - template: /eng/common/core-templates/steps/publish-logs.yml parameters: is1ESPipeline: ${{ parameters.is1ESPipeline }} - JobLabel: 'Publish_Artifacts_Logs' + JobLabel: 'Publish_Artifacts_Logs' diff --git a/eng/common/core-templates/job/source-build.yml b/eng/common/core-templates/job/source-build.yml index 9d820f97421..1997c2ae00d 100644 --- a/eng/common/core-templates/job/source-build.yml +++ b/eng/common/core-templates/job/source-build.yml @@ -60,19 +60,19 @@ jobs: pool: ${{ if eq(variables['System.TeamProject'], 'public') }}: name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')] - demands: ImageOverride -equals build.ubuntu.2204.amd64 + demands: ImageOverride -equals build.azurelinux.3.amd64.open ${{ if eq(variables['System.TeamProject'], 'internal') }}: name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')] - image: 1es-azurelinux-3 + image: build.azurelinux.3.amd64 os: linux ${{ else }}: pool: ${{ if eq(variables['System.TeamProject'], 'public') }}: name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore-Svc-Public' ), False, 'NetCore-Public')] - demands: ImageOverride -equals Build.Ubuntu.2204.Amd64.Open + demands: ImageOverride -equals build.azurelinux.3.amd64.open ${{ if eq(variables['System.TeamProject'], 'internal') }}: name: $[replace(replace(eq(contains(coalesce(variables['System.PullRequest.TargetBranch'], variables['Build.SourceBranch'], 'refs/heads/main'), 'release'), 'true'), True, 'NetCore1ESPool-Svc-Internal'), False, 'NetCore1ESPool-Internal')] - demands: ImageOverride -equals Build.Ubuntu.2204.Amd64 + demands: ImageOverride -equals build.azurelinux.3.amd64 ${{ if ne(parameters.platform.pool, '') }}: pool: ${{ parameters.platform.pool }} diff --git a/eng/common/core-templates/post-build/post-build.yml b/eng/common/core-templates/post-build/post-build.yml index 06864cd1feb..b942a79ef02 100644 --- a/eng/common/core-templates/post-build/post-build.yml +++ b/eng/common/core-templates/post-build/post-build.yml @@ -1,106 +1,106 @@ parameters: -# Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST. -# Publishing V1 is no longer supported -# Publishing V2 is no longer supported -# Publishing V3 is the default -- name: publishingInfraVersion - displayName: Which version of publishing should be used to promote the build definition? - type: number - default: 3 - values: - - 3 - -- name: BARBuildId - displayName: BAR Build Id - type: number - default: 0 - -- name: PromoteToChannelIds - displayName: Channel to promote BARBuildId to - type: string - default: '' - -- name: enableSourceLinkValidation - displayName: Enable SourceLink validation - type: boolean - default: false - -- name: enableSigningValidation - displayName: Enable signing validation - type: boolean - default: true - -- name: enableSymbolValidation - displayName: Enable symbol validation - type: boolean - default: false - -- name: enableNugetValidation - displayName: Enable NuGet validation - type: boolean - default: true - -- name: publishInstallersAndChecksums - displayName: Publish installers and checksums - type: boolean - default: true - -- name: requireDefaultChannels - displayName: Fail the build if there are no default channel(s) registrations for the current build - type: boolean - default: false - -- name: SDLValidationParameters - type: object - default: - enable: false - publishGdn: false - continueOnError: false - params: '' - artifactNames: '' - downloadArtifacts: true - -- name: isAssetlessBuild - type: boolean - displayName: Is Assetless Build - default: false - -# These parameters let the user customize the call to sdk-task.ps1 for publishing -# symbols & general artifacts as well as for signing validation -- name: symbolPublishingAdditionalParameters - displayName: Symbol publishing additional parameters - type: string - default: '' - -- name: artifactsPublishingAdditionalParameters - displayName: Artifact publishing additional parameters - type: string - default: '' - -- name: signingValidationAdditionalParameters - displayName: Signing validation additional parameters - type: string - default: '' - -# Which stages should finish execution before post-build stages start -- name: validateDependsOn - type: object - default: - - build - -- name: publishDependsOn - type: object - default: - - Validate - -# Optional: Call asset publishing rather than running in a separate stage -- name: publishAssetsImmediately - type: boolean - default: false - -- name: is1ESPipeline - type: boolean - default: false + # Which publishing infra should be used. THIS SHOULD MATCH THE VERSION ON THE BUILD MANIFEST. + # Publishing V1 is no longer supported + # Publishing V2 is no longer supported + # Publishing V3 is the default + - name: publishingInfraVersion + displayName: Which version of publishing should be used to promote the build definition? + type: number + default: 3 + values: + - 3 + + - name: BARBuildId + displayName: BAR Build Id + type: number + default: 0 + + - name: PromoteToChannelIds + displayName: Channel to promote BARBuildId to + type: string + default: '' + + - name: enableSourceLinkValidation + displayName: Enable SourceLink validation + type: boolean + default: false + + - name: enableSigningValidation + displayName: Enable signing validation + type: boolean + default: true + + - name: enableSymbolValidation + displayName: Enable symbol validation + type: boolean + default: false + + - name: enableNugetValidation + displayName: Enable NuGet validation + type: boolean + default: true + + - name: publishInstallersAndChecksums + displayName: Publish installers and checksums + type: boolean + default: true + + - name: requireDefaultChannels + displayName: Fail the build if there are no default channel(s) registrations for the current build + type: boolean + default: false + + - name: SDLValidationParameters + type: object + default: + enable: false + publishGdn: false + continueOnError: false + params: '' + artifactNames: '' + downloadArtifacts: true + + - name: isAssetlessBuild + type: boolean + displayName: Is Assetless Build + default: false + + # These parameters let the user customize the call to sdk-task.ps1 for publishing + # symbols & general artifacts as well as for signing validation + - name: symbolPublishingAdditionalParameters + displayName: Symbol publishing additional parameters + type: string + default: '' + + - name: artifactsPublishingAdditionalParameters + displayName: Artifact publishing additional parameters + type: string + default: '' + + - name: signingValidationAdditionalParameters + displayName: Signing validation additional parameters + type: string + default: '' + + # Which stages should finish execution before post-build stages start + - name: validateDependsOn + type: object + default: + - build + + - name: publishDependsOn + type: object + default: + - Validate + + # Optional: Call asset publishing rather than running in a separate stage + - name: publishAssetsImmediately + type: boolean + default: false + + - name: is1ESPipeline + type: boolean + default: false stages: - ${{ if or(eq( parameters.enableNugetValidation, 'true'), eq(parameters.enableSigningValidation, 'true'), eq(parameters.enableSourceLinkValidation, 'true'), eq(parameters.SDLValidationParameters.enable, 'true')) }}: @@ -108,10 +108,10 @@ stages: dependsOn: ${{ parameters.validateDependsOn }} displayName: Validate Build Assets variables: - - template: /eng/common/core-templates/post-build/common-variables.yml - - template: /eng/common/core-templates/variables/pool-providers.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} + - template: /eng/common/core-templates/post-build/common-variables.yml + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} jobs: - job: displayName: NuGet Validation @@ -134,28 +134,28 @@ stages: demands: ImageOverride -equals windows.vs2026preview.scout.amd64 steps: - - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Package Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: PackageArtifacts - checkDownloadedFiles: true - - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/nuget-validation.ps1 - arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/nuget-validation.ps1 + arguments: -PackagesPath $(Build.ArtifactStagingDirectory)/PackageArtifacts/ - job: displayName: Signing Validation @@ -169,7 +169,7 @@ stages: os: windows # If it's not devdiv, it's dnceng ${{ else }}: - ${{ if eq(parameters.is1ESPipeline, true) }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: name: $(DncEngInternalBuildPool) image: 1es-windows-2022 os: windows @@ -177,46 +177,46 @@ stages: name: $(DncEngInternalBuildPool) demands: ImageOverride -equals windows.vs2026preview.scout.amd64 steps: - - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Package Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: PackageArtifacts - checkDownloadedFiles: true - - # This is necessary whenever we want to publish/restore to an AzDO private feed - # Since sdk-task.ps1 tries to restore packages we need to do this authentication here - # otherwise it'll complain about accessing a private feed. - - task: NuGetAuthenticate@1 - displayName: 'Authenticate to AzDO Feeds' - - # Signing validation will optionally work with the buildmanifest file which is downloaded from - # Azure DevOps above. - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: eng\common\sdk-task.ps1 - arguments: -task SigningValidation -restore -msbuildEngine vs - /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' - /p:SignCheckExclusionsFile='$(System.DefaultWorkingDirectory)/eng/SignCheckExclusionsFile.txt' - ${{ parameters.signingValidationAdditionalParameters }} - - - template: /eng/common/core-templates/steps/publish-logs.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} - StageLabel: 'Validation' - JobLabel: 'Signing' - BinlogToolVersion: $(BinlogToolVersion) + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Package Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: PackageArtifacts + checkDownloadedFiles: true + + # This is necessary whenever we want to publish/restore to an AzDO private feed + # Since sdk-task.ps1 tries to restore packages we need to do this authentication here + # otherwise it'll complain about accessing a private feed. + - task: NuGetAuthenticate@1 + displayName: 'Authenticate to AzDO Feeds' + + # Signing validation will optionally work with the buildmanifest file which is downloaded from + # Azure DevOps above. + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: eng\common\sdk-task.ps1 + arguments: -task SigningValidation -restore -msbuildEngine vs + /p:PackageBasePath='$(Build.ArtifactStagingDirectory)/PackageArtifacts' + /p:SignCheckExclusionsFile='$(System.DefaultWorkingDirectory)/eng/SignCheckExclusionsFile.txt' + ${{ parameters.signingValidationAdditionalParameters }} + + - template: /eng/common/core-templates/steps/publish-logs.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} + StageLabel: 'Validation' + JobLabel: 'Signing' + BinlogToolVersion: $(BinlogToolVersion) - job: displayName: SourceLink Validation @@ -230,7 +230,7 @@ stages: os: windows # If it's not devdiv, it's dnceng ${{ else }}: - ${{ if eq(parameters.is1ESPipeline, true) }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: name: $(DncEngInternalBuildPool) image: 1es-windows-2022 os: windows @@ -238,33 +238,33 @@ stages: name: $(DncEngInternalBuildPool) demands: ImageOverride -equals windows.vs2026preview.scout.amd64 steps: - - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - - task: DownloadBuildArtifacts@0 - displayName: Download Blob Artifacts - inputs: - buildType: specific - buildVersionToDownload: specific - project: $(AzDOProjectName) - pipeline: $(AzDOPipelineId) - buildId: $(AzDOBuildId) - artifactName: BlobArtifacts - checkDownloadedFiles: true - - - task: PowerShell@2 - displayName: Validate - inputs: - filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/sourcelink-validation.ps1 - arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ - -ExtractPath $(Agent.BuildDirectory)/Extract/ - -GHRepoName $(Build.Repository.Name) - -GHCommit $(Build.SourceVersion) - -SourcelinkCliVersion $(SourceLinkCLIVersion) - continueOnError: true + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: DownloadBuildArtifacts@0 + displayName: Download Blob Artifacts + inputs: + buildType: specific + buildVersionToDownload: specific + project: $(AzDOProjectName) + pipeline: $(AzDOPipelineId) + buildId: $(AzDOBuildId) + artifactName: BlobArtifacts + checkDownloadedFiles: true + + - task: PowerShell@2 + displayName: Validate + inputs: + filePath: $(System.DefaultWorkingDirectory)/eng/common/post-build/sourcelink-validation.ps1 + arguments: -InputPath $(Build.ArtifactStagingDirectory)/BlobArtifacts/ + -ExtractPath $(Agent.BuildDirectory)/Extract/ + -GHRepoName $(Build.Repository.Name) + -GHCommit $(Build.SourceVersion) + -SourcelinkCliVersion $(SourceLinkCLIVersion) + continueOnError: true - ${{ if ne(parameters.publishAssetsImmediately, 'true') }}: - stage: publish_using_darc @@ -274,10 +274,10 @@ stages: dependsOn: ${{ parameters.validateDependsOn }} displayName: Publish using Darc variables: - - template: /eng/common/core-templates/post-build/common-variables.yml - - template: /eng/common/core-templates/variables/pool-providers.yml - parameters: - is1ESPipeline: ${{ parameters.is1ESPipeline }} + - template: /eng/common/core-templates/post-build/common-variables.yml + - template: /eng/common/core-templates/variables/pool-providers.yml + parameters: + is1ESPipeline: ${{ parameters.is1ESPipeline }} jobs: - job: displayName: Publish Using Darc @@ -291,41 +291,42 @@ stages: os: windows # If it's not devdiv, it's dnceng ${{ else }}: - ${{ if eq(parameters.is1ESPipeline, true) }}: + ${{ if eq(parameters.is1ESPipeline, true) }}: name: NetCore1ESPool-Publishing-Internal - image: windows.vs2019.amd64 + image: windows.vs2022.amd64 os: windows ${{ else }}: name: NetCore1ESPool-Publishing-Internal - demands: ImageOverride -equals windows.vs2019.amd64 + demands: ImageOverride -equals windows.vs2022.amd64 steps: - - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml - parameters: - BARBuildId: ${{ parameters.BARBuildId }} - PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} - is1ESPipeline: ${{ parameters.is1ESPipeline }} - - - task: NuGetAuthenticate@1 - - # Populate internal runtime variables. - - template: /eng/common/templates/steps/enable-internal-sources.yml - parameters: - legacyCredential: $(dn-bot-dnceng-artifact-feeds-rw) - - - template: /eng/common/templates/steps/enable-internal-runtimes.yml - - - task: UseDotNet@2 - inputs: - version: 8.0.x - - - task: AzureCLI@2 - displayName: Publish Using Darc - inputs: - azureSubscription: "Darc: Maestro Production" - scriptType: ps - scriptLocation: scriptPath - scriptPath: $(System.DefaultWorkingDirectory)/eng/common/post-build/publish-using-darc.ps1 - arguments: > + - template: /eng/common/core-templates/post-build/setup-maestro-vars.yml + parameters: + BARBuildId: ${{ parameters.BARBuildId }} + PromoteToChannelIds: ${{ parameters.PromoteToChannelIds }} + is1ESPipeline: ${{ parameters.is1ESPipeline }} + + - task: NuGetAuthenticate@1 + + # Populate internal runtime variables. + - template: /eng/common/templates/steps/enable-internal-sources.yml + parameters: + legacyCredential: $(dn-bot-dnceng-artifact-feeds-rw) + + - template: /eng/common/templates/steps/enable-internal-runtimes.yml + + # Darc is targeting 8.0, so make sure it's installed + - task: UseDotNet@2 + inputs: + version: 8.0.x + + - task: AzureCLI@2 + displayName: Publish Using Darc + inputs: + azureSubscription: "Darc: Maestro Production" + scriptType: ps + scriptLocation: scriptPath + scriptPath: $(System.DefaultWorkingDirectory)/eng/common/post-build/publish-using-darc.ps1 + arguments: > -BuildId $(BARBuildId) -PublishingInfraVersion ${{ parameters.publishingInfraVersion }} -AzdoToken '$(System.AccessToken)' diff --git a/eng/common/core-templates/steps/generate-sbom.yml b/eng/common/core-templates/steps/generate-sbom.yml index 003f7eae0fa..c05f6502797 100644 --- a/eng/common/core-templates/steps/generate-sbom.yml +++ b/eng/common/core-templates/steps/generate-sbom.yml @@ -5,7 +5,7 @@ # IgnoreDirectories - Directories to ignore for SBOM generation. This will be passed through to the CG component detector. parameters: - PackageVersion: 11.0.0 + PackageVersion: 10.0.0 BuildDropPath: '$(System.DefaultWorkingDirectory)/artifacts' PackageName: '.NET' ManifestDirPath: $(Build.ArtifactStagingDirectory)/sbom diff --git a/eng/common/core-templates/steps/install-microbuild-impl.yml b/eng/common/core-templates/steps/install-microbuild-impl.yml deleted file mode 100644 index b9e0143ee92..00000000000 --- a/eng/common/core-templates/steps/install-microbuild-impl.yml +++ /dev/null @@ -1,34 +0,0 @@ -parameters: - - name: microbuildTaskInputs - type: object - default: {} - - - name: microbuildEnv - type: object - default: {} - - - name: enablePreviewMicrobuild - type: boolean - default: false - - - name: condition - type: string - - - name: continueOnError - type: boolean - -steps: -- ${{ if eq(parameters.enablePreviewMicrobuild, 'true') }}: - - task: MicroBuildSigningPluginPreview@4 - displayName: Install Preview MicroBuild plugin - inputs: ${{ parameters.microbuildTaskInputs }} - env: ${{ parameters.microbuildEnv }} - continueOnError: ${{ parameters.continueOnError }} - condition: ${{ parameters.condition }} -- ${{ else }}: - - task: MicroBuildSigningPlugin@4 - displayName: Install MicroBuild plugin - inputs: ${{ parameters.microbuildTaskInputs }} - env: ${{ parameters.microbuildEnv }} - continueOnError: ${{ parameters.continueOnError }} - condition: ${{ parameters.condition }} diff --git a/eng/common/core-templates/steps/install-microbuild.yml b/eng/common/core-templates/steps/install-microbuild.yml index 4f4b56ed2a6..553fce66b94 100644 --- a/eng/common/core-templates/steps/install-microbuild.yml +++ b/eng/common/core-templates/steps/install-microbuild.yml @@ -4,8 +4,6 @@ parameters: # Enable install tasks for MicroBuild on Mac and Linux # Will be ignored if 'enableMicrobuild' is false or 'Agent.Os' is 'Windows_NT' enableMicrobuildForMacAndLinux: false - # Enable preview version of MB signing plugin - enablePreviewMicrobuild: false # Determines whether the ESRP service connection information should be passed to the signing plugin. # This overlaps with _SignType to some degree. We only need the service connection for real signing. # It's important that the service connection not be passed to the MicroBuildSigningPlugin task in this place. @@ -15,8 +13,6 @@ parameters: microbuildUseESRP: true # Microbuild installation directory microBuildOutputFolder: $(Agent.TempDirectory)/MicroBuild - # Microbuild version - microbuildPluginVersion: 'latest' continueOnError: false @@ -73,46 +69,42 @@ steps: # YAML expansion, and Windows vs. Linux/Mac uses different service connections. However, # we can avoid including the MB install step if not enabled at all. This avoids a bunch of # extra pipeline authorizations, since most pipelines do not sign on non-Windows. - - template: /eng/common/core-templates/steps/install-microbuild-impl.yml@self - parameters: - enablePreviewMicrobuild: ${{ parameters.enablePreviewMicrobuild }} - microbuildTaskInputs: + - task: MicroBuildSigningPlugin@4 + displayName: Install MicroBuild plugin (Windows) + inputs: + signType: $(_SignType) + zipSources: false + feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json + ${{ if eq(parameters.microbuildUseESRP, true) }}: + ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)' + ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: + ConnectedPMEServiceName: 6cc74545-d7b9-4050-9dfa-ebefcc8961ea + ${{ else }}: + ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca + env: + TeamName: $(_TeamName) + MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} + SYSTEM_ACCESSTOKEN: $(System.AccessToken) + continueOnError: ${{ parameters.continueOnError }} + condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'), in(variables['_SignType'], 'real', 'test')) + + - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, true) }}: + - task: MicroBuildSigningPlugin@4 + displayName: Install MicroBuild plugin (non-Windows) + inputs: signType: $(_SignType) zipSources: false feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json - version: ${{ parameters.microbuildPluginVersion }} + workingDirectory: ${{ parameters.microBuildOutputFolder }} ${{ if eq(parameters.microbuildUseESRP, true) }}: ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)' ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - ConnectedPMEServiceName: 6cc74545-d7b9-4050-9dfa-ebefcc8961ea + ConnectedPMEServiceName: beb8cb23-b303-4c95-ab26-9e44bc958d39 ${{ else }}: - ConnectedPMEServiceName: 248d384a-b39b-46e3-8ad5-c2c210d5e7ca - microbuildEnv: + ConnectedPMEServiceName: c24de2a5-cc7a-493d-95e4-8e5ff5cad2bc + env: TeamName: $(_TeamName) MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} SYSTEM_ACCESSTOKEN: $(System.AccessToken) continueOnError: ${{ parameters.continueOnError }} - condition: and(succeeded(), eq(variables['Agent.Os'], 'Windows_NT'), in(variables['_SignType'], 'real', 'test')) - - - ${{ if eq(parameters.enableMicrobuildForMacAndLinux, true) }}: - - template: /eng/common/core-templates/steps/install-microbuild-impl.yml@self - parameters: - enablePreviewMicrobuild: ${{ parameters.enablePreviewMicrobuild }} - microbuildTaskInputs: - signType: $(_SignType) - zipSources: false - feedSource: https://dnceng.pkgs.visualstudio.com/_packaging/MicroBuildToolset/nuget/v3/index.json - version: ${{ parameters.microbuildPluginVersion }} - workingDirectory: ${{ parameters.microBuildOutputFolder }} - ${{ if eq(parameters.microbuildUseESRP, true) }}: - ConnectedServiceName: 'MicroBuild Signing Task (DevDiv)' - ${{ if eq(variables['System.TeamProject'], 'DevDiv') }}: - ConnectedPMEServiceName: beb8cb23-b303-4c95-ab26-9e44bc958d39 - ${{ else }}: - ConnectedPMEServiceName: c24de2a5-cc7a-493d-95e4-8e5ff5cad2bc - microbuildEnv: - TeamName: $(_TeamName) - MicroBuildOutputFolderOverride: ${{ parameters.microBuildOutputFolder }} - SYSTEM_ACCESSTOKEN: $(System.AccessToken) - continueOnError: ${{ parameters.continueOnError }} - condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'), eq(variables['_SignType'], 'real')) + condition: and(succeeded(), ne(variables['Agent.Os'], 'Windows_NT'), eq(variables['_SignType'], 'real')) diff --git a/eng/common/core-templates/steps/source-build.yml b/eng/common/core-templates/steps/source-build.yml index acf16ed3496..b9c86c18ae4 100644 --- a/eng/common/core-templates/steps/source-build.yml +++ b/eng/common/core-templates/steps/source-build.yml @@ -24,7 +24,7 @@ steps: # in the default public locations. internalRuntimeDownloadArgs= if [ '$(dotnetbuilds-internal-container-read-token-base64)' != '$''(dotnetbuilds-internal-container-read-token-base64)' ]; then - internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://ci.dot.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://ci.dot.net/internal --runtimesourcefeedkey $(dotnetbuilds-internal-container-read-token-base64)' + internalRuntimeDownloadArgs='/p:DotNetRuntimeSourceFeed=https://ci.dot.net/internal /p:DotNetRuntimeSourceFeedKey=$(dotnetbuilds-internal-container-read-token-base64) --runtimesourcefeed https://ci.dot.net/internal --runtimesourcefeedkey '$(dotnetbuilds-internal-container-read-token-base64)'' fi buildConfig=Release diff --git a/eng/common/core-templates/steps/source-index-stage1-publish.yml b/eng/common/core-templates/steps/source-index-stage1-publish.yml index ac019e2d033..e9a694afa58 100644 --- a/eng/common/core-templates/steps/source-index-stage1-publish.yml +++ b/eng/common/core-templates/steps/source-index-stage1-publish.yml @@ -1,6 +1,6 @@ parameters: - sourceIndexUploadPackageVersion: 2.0.0-20250906.1 - sourceIndexProcessBinlogPackageVersion: 1.0.1-20250906.1 + sourceIndexUploadPackageVersion: 2.0.0-20250818.1 + sourceIndexProcessBinlogPackageVersion: 1.0.1-20250818.1 sourceIndexPackageSource: https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-tools/nuget/v3/index.json binlogPath: artifacts/log/Debug/Build.binlog @@ -14,8 +14,8 @@ steps: workingDirectory: $(Agent.TempDirectory) - script: | - $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version ${{parameters.sourceIndexProcessBinlogPackageVersion}} --source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools - $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version ${{parameters.sourceIndexUploadPackageVersion}} --source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools + $(Agent.TempDirectory)/dotnet/dotnet tool install BinLogToSln --version ${{parameters.sourceIndexProcessBinlogPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools + $(Agent.TempDirectory)/dotnet/dotnet tool install UploadIndexStage1 --version ${{parameters.sourceIndexUploadPackageVersion}} --add-source ${{parameters.SourceIndexPackageSource}} --tool-path $(Agent.TempDirectory)/.source-index/tools displayName: "Source Index: Download netsourceindex Tools" # Set working directory to temp directory so 'dotnet' doesn't try to use global.json and use the repo's sdk. workingDirectory: $(Agent.TempDirectory) diff --git a/eng/common/darc-init.sh b/eng/common/darc-init.sh index 9f5ad6b763b..e889f439b8d 100755 --- a/eng/common/darc-init.sh +++ b/eng/common/darc-init.sh @@ -5,7 +5,7 @@ darcVersion='' versionEndpoint='https://maestro.dot.net/api/assets/darc-version?api-version=2020-02-20' verbosity='minimal' -while [[ $# -gt 0 ]]; do +while [[ $# > 0 ]]; do opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" case "$opt" in --darcversion) diff --git a/eng/common/dotnet-install.sh b/eng/common/dotnet-install.sh index 61f302bb677..7b9d97e3bd4 100755 --- a/eng/common/dotnet-install.sh +++ b/eng/common/dotnet-install.sh @@ -18,7 +18,7 @@ architecture='' runtime='dotnet' runtimeSourceFeed='' runtimeSourceFeedKey='' -while [[ $# -gt 0 ]]; do +while [[ $# > 0 ]]; do opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" case "$opt" in -version|-v) diff --git a/eng/common/dotnet.sh b/eng/common/dotnet.sh index f6d24871c1d..2ef68235675 100644 --- a/eng/common/dotnet.sh +++ b/eng/common/dotnet.sh @@ -19,7 +19,7 @@ source $scriptroot/tools.sh InitializeDotNetCli true # install # Invoke acquired SDK with args if they are provided -if [[ $# -gt 0 ]]; then +if [[ $# > 0 ]]; then __dotnetDir=${_InitializeDotNetCli} dotnetPath=${__dotnetDir}/dotnet ${dotnetPath} "$@" diff --git a/eng/common/internal-feed-operations.sh b/eng/common/internal-feed-operations.sh index 6299e7effd4..9378223ba09 100755 --- a/eng/common/internal-feed-operations.sh +++ b/eng/common/internal-feed-operations.sh @@ -100,7 +100,7 @@ operation='' authToken='' repoName='' -while [[ $# -gt 0 ]]; do +while [[ $# > 0 ]]; do opt="$(echo "$1" | tr "[:upper:]" "[:lower:]")" case "$opt" in --operation) diff --git a/eng/common/native/install-dependencies.sh b/eng/common/native/install-dependencies.sh index 64b87d0bcc3..477a44f335b 100644 --- a/eng/common/native/install-dependencies.sh +++ b/eng/common/native/install-dependencies.sh @@ -27,11 +27,9 @@ case "$os" in libssl-dev libkrb5-dev pigz cpio localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8 - elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ] || [ "$ID" = "azurelinux" ] || [ "$ID" = "centos"]; then + elif [ "$ID" = "fedora" ] || [ "$ID" = "rhel" ] || [ "$ID" = "azurelinux" ]; then pkg_mgr="$(command -v tdnf 2>/dev/null || command -v dnf)" $pkg_mgr install -y cmake llvm lld lldb clang python curl libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio - elif [ "$ID" = "amzn" ]; then - dnf install -y cmake llvm lld lldb clang python libicu-devel openssl-devel krb5-devel lttng-ust-devel pigz cpio elif [ "$ID" = "alpine" ]; then apk add build-base cmake bash curl clang llvm-dev lld lldb krb5-dev lttng-ust-dev icu-dev openssl-dev pigz cpio else diff --git a/eng/common/post-build/redact-logs.ps1 b/eng/common/post-build/redact-logs.ps1 index fc0218a013d..472d5bb562c 100644 --- a/eng/common/post-build/redact-logs.ps1 +++ b/eng/common/post-build/redact-logs.ps1 @@ -9,8 +9,7 @@ param( [Parameter(Mandatory=$false)][string] $TokensFilePath, [Parameter(ValueFromRemainingArguments=$true)][String[]]$TokensToRedact, [Parameter(Mandatory=$false)][string] $runtimeSourceFeed, - [Parameter(Mandatory=$false)][string] $runtimeSourceFeedKey -) + [Parameter(Mandatory=$false)][string] $runtimeSourceFeedKey) try { $ErrorActionPreference = 'Stop' diff --git a/eng/common/templates/variables/pool-providers.yml b/eng/common/templates/variables/pool-providers.yml index e0b19c14a07..18693ea120d 100644 --- a/eng/common/templates/variables/pool-providers.yml +++ b/eng/common/templates/variables/pool-providers.yml @@ -23,7 +23,7 @@ # # pool: # name: $(DncEngInternalBuildPool) -# demands: ImageOverride -equals windows.vs2019.amd64 +# demands: ImageOverride -equals windows.vs2022.amd64 variables: - ${{ if eq(variables['System.TeamProject'], 'internal') }}: - template: /eng/common/templates-official/variables/pool-providers.yml diff --git a/eng/common/tools.ps1 b/eng/common/tools.ps1 index e8e9f7615f1..049fe6db994 100644 --- a/eng/common/tools.ps1 +++ b/eng/common/tools.ps1 @@ -157,6 +157,9 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) { return $global:_DotNetInstallDir } + # Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism + $env:DOTNET_MULTILEVEL_LOOKUP=0 + # Disable first run since we do not need all ASP.NET packages restored. $env:DOTNET_NOLOGO=1 @@ -222,6 +225,7 @@ function InitializeDotNetCli([bool]$install, [bool]$createSdkLocationFile) { # Make Sure that our bootstrapped dotnet cli is available in future steps of the Azure Pipelines build Write-PipelinePrependPath -Path $dotnetRoot + Write-PipelineSetVariable -Name 'DOTNET_MULTILEVEL_LOOKUP' -Value '0' Write-PipelineSetVariable -Name 'DOTNET_NOLOGO' -Value '1' return $global:_DotNetInstallDir = $dotnetRoot @@ -556,19 +560,26 @@ function LocateVisualStudio([object]$vsRequirements = $null){ }) } - if (!$vsRequirements) { $vsRequirements = $GlobalJson.tools.vs } + if (!$vsRequirements) { + if (Get-Member -InputObject $GlobalJson.tools -Name 'vs' -ErrorAction SilentlyContinue) { + $vsRequirements = $GlobalJson.tools.vs + } else { + $vsRequirements = $null + } + } + $args = @('-latest', '-format', 'json', '-requires', 'Microsoft.Component.MSBuild', '-products', '*') if (!$excludePrereleaseVS) { $args += '-prerelease' } - if (Get-Member -InputObject $vsRequirements -Name 'version') { + if ($vsRequirements -and (Get-Member -InputObject $vsRequirements -Name 'version' -ErrorAction SilentlyContinue)) { $args += '-version' $args += $vsRequirements.version } - if (Get-Member -InputObject $vsRequirements -Name 'components') { + if ($vsRequirements -and (Get-Member -InputObject $vsRequirements -Name 'components' -ErrorAction SilentlyContinue)) { foreach ($component in $vsRequirements.components) { $args += '-requires' $args += $component diff --git a/eng/common/tools.sh b/eng/common/tools.sh index 6c121300ac7..c1841c9dfd0 100755 --- a/eng/common/tools.sh +++ b/eng/common/tools.sh @@ -115,6 +115,9 @@ function InitializeDotNetCli { local install=$1 + # Don't resolve runtime, shared framework, or SDK from other locations to ensure build determinism + export DOTNET_MULTILEVEL_LOOKUP=0 + # Disable first run since we want to control all package sources export DOTNET_NOLOGO=1 @@ -163,6 +166,7 @@ function InitializeDotNetCli { # build steps from using anything other than what we've downloaded. Write-PipelinePrependPath -path "$dotnet_root" + Write-PipelineSetVariable -name "DOTNET_MULTILEVEL_LOOKUP" -value "0" Write-PipelineSetVariable -name "DOTNET_NOLOGO" -value "1" # return value diff --git a/eng/restore-toolset.sh b/eng/restore-toolset.sh index 8a7bb526c06..cdcf18f1d19 100644 --- a/eng/restore-toolset.sh +++ b/eng/restore-toolset.sh @@ -3,7 +3,7 @@ # Install MAUI workload if -restoreMaui was passed # Only on macOS (MAUI doesn't support Linux, Windows uses .cmd) -if [[ "$restore_maui" == true ]]; then +if [[ "${restore_maui:-false}" == true ]]; then # Check if we're on macOS if [[ "$(uname -s)" == "Darwin" ]]; then echo "" diff --git a/extension/loc/xlf/aspire-vscode.xlf b/extension/loc/xlf/aspire-vscode.xlf index 73088ed32fb..bbe98c2cc15 100644 --- a/extension/loc/xlf/aspire-vscode.xlf +++ b/extension/loc/xlf/aspire-vscode.xlf @@ -10,6 +10,9 @@ Aspire CLI Version: {0}. + + Aspire CLI found at {0}. The extension will use this path. + Aspire CLI is not available on PATH. Please install it and restart VS Code. diff --git a/extension/package.nls.json b/extension/package.nls.json index 75e0719f912..03c1794715e 100644 --- a/extension/package.nls.json +++ b/extension/package.nls.json @@ -93,6 +93,7 @@ "aspire-vscode.strings.lookingForDevkitBuildTask": "C# Dev Kit is installed, looking for C# Dev Kit build task...", "aspire-vscode.strings.csharpDevKitNotInstalled": "C# Dev Kit is not installed, building using dotnet CLI...", "aspire-vscode.strings.cliNotAvailable": "Aspire CLI is not available on PATH. Please install it and restart VS Code.", + "aspire-vscode.strings.cliFoundAtDefaultPath": "Aspire CLI found at {0}. The extension will use this path.", "aspire-vscode.strings.openCliInstallInstructions": "See CLI installation instructions", "aspire-vscode.strings.dismissLabel": "Dismiss" } diff --git a/extension/src/commands/add.ts b/extension/src/commands/add.ts index 5d8bd3307a7..e1e158d7b4b 100644 --- a/extension/src/commands/add.ts +++ b/extension/src/commands/add.ts @@ -1,5 +1,5 @@ import { AspireTerminalProvider } from '../utils/AspireTerminalProvider'; export async function addCommand(terminalProvider: AspireTerminalProvider) { - terminalProvider.sendAspireCommandToAspireTerminal('add'); + await terminalProvider.sendAspireCommandToAspireTerminal('add'); } diff --git a/extension/src/commands/deploy.ts b/extension/src/commands/deploy.ts index a40590e1891..057d419f6ca 100644 --- a/extension/src/commands/deploy.ts +++ b/extension/src/commands/deploy.ts @@ -1,5 +1,5 @@ import { AspireTerminalProvider } from '../utils/AspireTerminalProvider'; export async function deployCommand(terminalProvider: AspireTerminalProvider) { - terminalProvider.sendAspireCommandToAspireTerminal('deploy'); + await terminalProvider.sendAspireCommandToAspireTerminal('deploy'); } diff --git a/extension/src/commands/init.ts b/extension/src/commands/init.ts index 642bfa23aa3..3d6c60e25d9 100644 --- a/extension/src/commands/init.ts +++ b/extension/src/commands/init.ts @@ -1,5 +1,5 @@ import { AspireTerminalProvider } from "../utils/AspireTerminalProvider"; export async function initCommand(terminalProvider: AspireTerminalProvider) { - terminalProvider.sendAspireCommandToAspireTerminal('init'); + await terminalProvider.sendAspireCommandToAspireTerminal('init'); }; \ No newline at end of file diff --git a/extension/src/commands/new.ts b/extension/src/commands/new.ts index d8a26eab433..ab2936e0af3 100644 --- a/extension/src/commands/new.ts +++ b/extension/src/commands/new.ts @@ -1,5 +1,5 @@ import { AspireTerminalProvider } from "../utils/AspireTerminalProvider"; export async function newCommand(terminalProvider: AspireTerminalProvider) { - terminalProvider.sendAspireCommandToAspireTerminal('new'); + await terminalProvider.sendAspireCommandToAspireTerminal('new'); }; diff --git a/extension/src/commands/publish.ts b/extension/src/commands/publish.ts index 181d590337a..276ea03a7a8 100644 --- a/extension/src/commands/publish.ts +++ b/extension/src/commands/publish.ts @@ -1,5 +1,5 @@ import { AspireTerminalProvider } from '../utils/AspireTerminalProvider'; export async function publishCommand(terminalProvider: AspireTerminalProvider) { - terminalProvider.sendAspireCommandToAspireTerminal('publish'); + await terminalProvider.sendAspireCommandToAspireTerminal('publish'); } diff --git a/extension/src/commands/update.ts b/extension/src/commands/update.ts index 31ab5b9f89e..23e8070920e 100644 --- a/extension/src/commands/update.ts +++ b/extension/src/commands/update.ts @@ -1,5 +1,5 @@ import { AspireTerminalProvider } from '../utils/AspireTerminalProvider'; export async function updateCommand(terminalProvider: AspireTerminalProvider) { - terminalProvider.sendAspireCommandToAspireTerminal('update'); + await terminalProvider.sendAspireCommandToAspireTerminal('update'); } diff --git a/extension/src/debugger/AspireDebugConfigurationProvider.ts b/extension/src/debugger/AspireDebugConfigurationProvider.ts index ba4c8d98c14..643db6ed958 100644 --- a/extension/src/debugger/AspireDebugConfigurationProvider.ts +++ b/extension/src/debugger/AspireDebugConfigurationProvider.ts @@ -1,15 +1,8 @@ import * as vscode from 'vscode'; import { defaultConfigurationName } from '../loc/strings'; -import { AspireTerminalProvider } from '../utils/AspireTerminalProvider'; import { checkCliAvailableOrRedirect } from '../utils/workspace'; export class AspireDebugConfigurationProvider implements vscode.DebugConfigurationProvider { - private _terminalProvider: AspireTerminalProvider; - - constructor(terminalProvider: AspireTerminalProvider) { - this._terminalProvider = terminalProvider; - } - async provideDebugConfigurations(folder: vscode.WorkspaceFolder | undefined, token?: vscode.CancellationToken): Promise { if (folder === undefined) { return []; @@ -28,9 +21,8 @@ export class AspireDebugConfigurationProvider implements vscode.DebugConfigurati async resolveDebugConfiguration(folder: vscode.WorkspaceFolder | undefined, config: vscode.DebugConfiguration, token?: vscode.CancellationToken): Promise { // Check if CLI is available before starting debug session - const cliPath = this._terminalProvider.getAspireCliExecutablePath(); - const isCliAvailable = await checkCliAvailableOrRedirect(cliPath); - if (!isCliAvailable) { + const result = await checkCliAvailableOrRedirect(); + if (!result.available) { return undefined; // Cancel the debug session } diff --git a/extension/src/debugger/AspireDebugSession.ts b/extension/src/debugger/AspireDebugSession.ts index bc35aceeb6c..293beade0d7 100644 --- a/extension/src/debugger/AspireDebugSession.ts +++ b/extension/src/debugger/AspireDebugSession.ts @@ -93,14 +93,14 @@ export class AspireDebugSession implements vscode.DebugAdapter { if (isDirectory(appHostPath)) { this.sendMessageWithEmoji("📁", launchingWithDirectory(appHostPath)); - this.spawnRunCommand(args, appHostPath, noDebug); + void this.spawnRunCommand(args, appHostPath, noDebug); } else { this.sendMessageWithEmoji("📂", launchingWithAppHost(appHostPath)); const workspaceFolder = path.dirname(appHostPath); args.push('--project', appHostPath); - this.spawnRunCommand(args, workspaceFolder, noDebug); + void this.spawnRunCommand(args, workspaceFolder, noDebug); } } else if (message.command === 'disconnect' || message.command === 'terminate') { @@ -133,7 +133,7 @@ export class AspireDebugSession implements vscode.DebugAdapter { } } - spawnRunCommand(args: string[], workingDirectory: string | undefined, noDebug: boolean) { + async spawnRunCommand(args: string[], workingDirectory: string | undefined, noDebug: boolean) { const disposable = this._rpcServer.onNewConnection((client: ICliRpcClient) => { if (client.debugSessionId === this.debugSessionId) { this._rpcClient = client; @@ -143,7 +143,7 @@ export class AspireDebugSession implements vscode.DebugAdapter { spawnCliProcess( this._terminalProvider, - this._terminalProvider.getAspireCliExecutablePath(), + await this._terminalProvider.getAspireCliExecutablePath(), args, { stdoutCallback: (data) => { diff --git a/extension/src/extension.ts b/extension/src/extension.ts index f2e2c44f8eb..de001575696 100644 --- a/extension/src/extension.ts +++ b/extension/src/extension.ts @@ -67,7 +67,7 @@ export async function activate(context: vscode.ExtensionContext) { context.subscriptions.push(cliAddCommandRegistration, cliNewCommandRegistration, cliInitCommandRegistration, cliDeployCommandRegistration, cliPublishCommandRegistration, openTerminalCommandRegistration, configureLaunchJsonCommandRegistration); context.subscriptions.push(cliUpdateCommandRegistration, settingsCommandRegistration, openLocalSettingsCommandRegistration, openGlobalSettingsCommandRegistration, runAppHostCommandRegistration, debugAppHostCommandRegistration); - const debugConfigProvider = new AspireDebugConfigurationProvider(terminalProvider); + const debugConfigProvider = new AspireDebugConfigurationProvider(); context.subscriptions.push( vscode.debug.registerDebugConfigurationProvider('aspire', debugConfigProvider, vscode.DebugConfigurationProviderTriggerKind.Dynamic) ); @@ -114,9 +114,8 @@ async function tryExecuteCommand(commandName: string, terminalProvider: AspireTe const cliCheckExcludedCommands: string[] = ["aspire-vscode.settings", "aspire-vscode.configureLaunchJson"]; if (!cliCheckExcludedCommands.includes(commandName)) { - const cliPath = terminalProvider.getAspireCliExecutablePath(); - const isCliAvailable = await checkCliAvailableOrRedirect(cliPath); - if (!isCliAvailable) { + const result = await checkCliAvailableOrRedirect(); + if (!result.available) { return; } } diff --git a/extension/src/loc/strings.ts b/extension/src/loc/strings.ts index 484ca92ec30..1b02e953ff7 100644 --- a/extension/src/loc/strings.ts +++ b/extension/src/loc/strings.ts @@ -71,3 +71,4 @@ export const csharpDevKitNotInstalled = vscode.l10n.t('C# Dev Kit is not install export const dismissLabel = vscode.l10n.t('Dismiss'); export const openCliInstallInstructions = vscode.l10n.t('See CLI installation instructions'); export const cliNotAvailable = vscode.l10n.t('Aspire CLI is not available on PATH. Please install it and restart VS Code.'); +export const cliFoundAtDefaultPath = (path: string) => vscode.l10n.t('Aspire CLI found at {0}. The extension will use this path.', path); diff --git a/extension/src/test/aspireTerminalProvider.test.ts b/extension/src/test/aspireTerminalProvider.test.ts index dc70ca4c3fb..fa139b51715 100644 --- a/extension/src/test/aspireTerminalProvider.test.ts +++ b/extension/src/test/aspireTerminalProvider.test.ts @@ -2,94 +2,58 @@ import * as assert from 'assert'; import * as vscode from 'vscode'; import * as sinon from 'sinon'; import { AspireTerminalProvider } from '../utils/AspireTerminalProvider'; +import * as cliPathModule from '../utils/cliPath'; suite('AspireTerminalProvider tests', () => { let terminalProvider: AspireTerminalProvider; - let configStub: sinon.SinonStub; + let resolveCliPathStub: sinon.SinonStub; let subscriptions: vscode.Disposable[]; setup(() => { subscriptions = []; terminalProvider = new AspireTerminalProvider(subscriptions); - configStub = sinon.stub(vscode.workspace, 'getConfiguration'); + resolveCliPathStub = sinon.stub(cliPathModule, 'resolveCliPath'); }); teardown(() => { - configStub.restore(); + resolveCliPathStub.restore(); subscriptions.forEach(s => s.dispose()); }); suite('getAspireCliExecutablePath', () => { - test('returns "aspire" when no custom path is configured', () => { - configStub.returns({ - get: sinon.stub().returns('') - }); + test('returns "aspire" when CLI is on PATH', async () => { + resolveCliPathStub.resolves({ cliPath: 'aspire', available: true, source: 'path' }); - const result = terminalProvider.getAspireCliExecutablePath(); + const result = await terminalProvider.getAspireCliExecutablePath(); assert.strictEqual(result, 'aspire'); }); - test('returns custom path when configured', () => { - configStub.returns({ - get: sinon.stub().returns('/usr/local/bin/aspire') - }); + test('returns resolved path when CLI found at default install location', async () => { + resolveCliPathStub.resolves({ cliPath: '/home/user/.aspire/bin/aspire', available: true, source: 'default-install' }); - const result = terminalProvider.getAspireCliExecutablePath(); - assert.strictEqual(result, '/usr/local/bin/aspire'); + const result = await terminalProvider.getAspireCliExecutablePath(); + assert.strictEqual(result, '/home/user/.aspire/bin/aspire'); }); - test('returns custom path with spaces', () => { - configStub.returns({ - get: sinon.stub().returns('/my path/with spaces/aspire') - }); - - const result = terminalProvider.getAspireCliExecutablePath(); - assert.strictEqual(result, '/my path/with spaces/aspire'); - }); + test('returns configured custom path', async () => { + resolveCliPathStub.resolves({ cliPath: '/usr/local/bin/aspire', available: true, source: 'configured' }); - test('trims whitespace from configured path', () => { - configStub.returns({ - get: sinon.stub().returns(' /usr/local/bin/aspire ') - }); - - const result = terminalProvider.getAspireCliExecutablePath(); + const result = await terminalProvider.getAspireCliExecutablePath(); assert.strictEqual(result, '/usr/local/bin/aspire'); }); - test('returns "aspire" when configured path is only whitespace', () => { - configStub.returns({ - get: sinon.stub().returns(' ') - }); + test('returns "aspire" when CLI is not found', async () => { + resolveCliPathStub.resolves({ cliPath: 'aspire', available: false, source: 'not-found' }); - const result = terminalProvider.getAspireCliExecutablePath(); + const result = await terminalProvider.getAspireCliExecutablePath(); assert.strictEqual(result, 'aspire'); }); - test('handles Windows-style paths', () => { - configStub.returns({ - get: sinon.stub().returns('C:\\Program Files\\Aspire\\aspire.exe') - }); + test('handles Windows-style paths', async () => { + resolveCliPathStub.resolves({ cliPath: 'C:\\Program Files\\Aspire\\aspire.exe', available: true, source: 'configured' }); - const result = terminalProvider.getAspireCliExecutablePath(); + const result = await terminalProvider.getAspireCliExecutablePath(); assert.strictEqual(result, 'C:\\Program Files\\Aspire\\aspire.exe'); }); - - test('handles Windows-style paths without spaces', () => { - configStub.returns({ - get: sinon.stub().returns('C:\\aspire\\aspire.exe') - }); - - const result = terminalProvider.getAspireCliExecutablePath(); - assert.strictEqual(result, 'C:\\aspire\\aspire.exe'); - }); - - test('handles paths with special characters', () => { - configStub.returns({ - get: sinon.stub().returns('/path/with$dollar/aspire') - }); - - const result = terminalProvider.getAspireCliExecutablePath(); - assert.strictEqual(result, '/path/with$dollar/aspire'); - }); }); }); diff --git a/extension/src/test/cliPath.test.ts b/extension/src/test/cliPath.test.ts new file mode 100644 index 00000000000..e70519b3ebe --- /dev/null +++ b/extension/src/test/cliPath.test.ts @@ -0,0 +1,211 @@ +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import * as os from 'os'; +import * as path from 'path'; +import { getDefaultCliInstallPaths, resolveCliPath, CliPathDependencies } from '../utils/cliPath'; + +const bundlePath = '/home/user/.aspire/bin/aspire'; +const globalToolPath = '/home/user/.dotnet/tools/aspire'; +const defaultPaths = [bundlePath, globalToolPath]; + +function createMockDeps(overrides: Partial = {}): CliPathDependencies { + return { + getConfiguredPath: () => '', + getDefaultPaths: () => defaultPaths, + isOnPath: async () => false, + findAtDefaultPath: async () => undefined, + tryExecute: async () => false, + setConfiguredPath: async () => {}, + ...overrides, + }; +} + +suite('utils/cliPath tests', () => { + + suite('getDefaultCliInstallPaths', () => { + test('returns bundle path (~/.aspire/bin) as first entry', () => { + const paths = getDefaultCliInstallPaths(); + const homeDir = os.homedir(); + + assert.ok(paths.length >= 2, 'Should return at least 2 default paths'); + assert.ok(paths[0].startsWith(path.join(homeDir, '.aspire', 'bin')), `First path should be bundle install: ${paths[0]}`); + }); + + test('returns global tool path (~/.dotnet/tools) as second entry', () => { + const paths = getDefaultCliInstallPaths(); + const homeDir = os.homedir(); + + assert.ok(paths[1].startsWith(path.join(homeDir, '.dotnet', 'tools')), `Second path should be global tool: ${paths[1]}`); + }); + + test('uses correct executable name for current platform', () => { + const paths = getDefaultCliInstallPaths(); + + for (const p of paths) { + const basename = path.basename(p); + if (process.platform === 'win32') { + assert.strictEqual(basename, 'aspire.exe'); + } else { + assert.strictEqual(basename, 'aspire'); + } + } + }); + }); + + suite('resolveCliPath', () => { + test('falls back to default install path when CLI is not on PATH', async () => { + const setConfiguredPath = sinon.stub().resolves(); + + const deps = createMockDeps({ + isOnPath: async () => false, + findAtDefaultPath: async () => bundlePath, + setConfiguredPath, + }); + + const result = await resolveCliPath(deps); + + assert.strictEqual(result.available, true); + assert.strictEqual(result.source, 'default-install'); + assert.strictEqual(result.cliPath, bundlePath); + assert.ok(setConfiguredPath.calledOnceWith(bundlePath), 'should update the VS Code setting to the found path'); + }); + + test('updates VS Code setting when CLI found at default path but not on PATH', async () => { + const setConfiguredPath = sinon.stub().resolves(); + + const deps = createMockDeps({ + getConfiguredPath: () => '', + isOnPath: async () => false, + findAtDefaultPath: async () => bundlePath, + setConfiguredPath, + }); + + await resolveCliPath(deps); + + assert.ok(setConfiguredPath.calledOnce, 'setConfiguredPath should be called once'); + assert.strictEqual(setConfiguredPath.firstCall.args[0], bundlePath, 'should set the path to the found install location'); + }); + + test('prefers PATH over default install path', async () => { + const setConfiguredPath = sinon.stub().resolves(); + + const deps = createMockDeps({ + isOnPath: async () => true, + findAtDefaultPath: async () => bundlePath, + setConfiguredPath, + }); + + const result = await resolveCliPath(deps); + + assert.strictEqual(result.available, true); + assert.strictEqual(result.source, 'path'); + assert.strictEqual(result.cliPath, 'aspire'); + assert.ok(setConfiguredPath.notCalled, 'should not update settings when CLI is on PATH'); + }); + + test('clears setting when CLI is on PATH and setting was previously set to a default path', async () => { + const setConfiguredPath = sinon.stub().resolves(); + + const deps = createMockDeps({ + getConfiguredPath: () => bundlePath, + isOnPath: async () => true, + setConfiguredPath, + }); + + const result = await resolveCliPath(deps); + + assert.strictEqual(result.source, 'path'); + assert.ok(setConfiguredPath.calledOnceWith(''), 'should clear the setting'); + }); + + test('clears setting when CLI is on PATH and setting was previously set to global tool path', async () => { + const setConfiguredPath = sinon.stub().resolves(); + + const deps = createMockDeps({ + getConfiguredPath: () => globalToolPath, + isOnPath: async () => true, + setConfiguredPath, + }); + + const result = await resolveCliPath(deps); + + assert.strictEqual(result.source, 'path'); + assert.ok(setConfiguredPath.calledOnceWith(''), 'should clear the setting'); + }); + + test('returns not-found when CLI is not on PATH and not at any default path', async () => { + const deps = createMockDeps({ + isOnPath: async () => false, + findAtDefaultPath: async () => undefined, + }); + + const result = await resolveCliPath(deps); + + assert.strictEqual(result.available, false); + assert.strictEqual(result.source, 'not-found'); + }); + + test('uses custom configured path when valid and not a default', async () => { + const customPath = '/custom/path/aspire'; + + const deps = createMockDeps({ + getConfiguredPath: () => customPath, + tryExecute: async (p) => p === customPath, + }); + + const result = await resolveCliPath(deps); + + assert.strictEqual(result.available, true); + assert.strictEqual(result.source, 'configured'); + assert.strictEqual(result.cliPath, customPath); + }); + + test('falls through to PATH check when custom configured path is invalid', async () => { + const deps = createMockDeps({ + getConfiguredPath: () => '/bad/path/aspire', + tryExecute: async () => false, + isOnPath: async () => true, + }); + + const result = await resolveCliPath(deps); + + assert.strictEqual(result.source, 'path'); + assert.strictEqual(result.available, true); + }); + + test('falls through to default path when custom configured path is invalid and not on PATH', async () => { + const setConfiguredPath = sinon.stub().resolves(); + + const deps = createMockDeps({ + getConfiguredPath: () => '/bad/path/aspire', + tryExecute: async () => false, + isOnPath: async () => false, + findAtDefaultPath: async () => bundlePath, + setConfiguredPath, + }); + + const result = await resolveCliPath(deps); + + assert.strictEqual(result.source, 'default-install'); + assert.strictEqual(result.cliPath, bundlePath); + assert.ok(setConfiguredPath.calledOnceWith(bundlePath)); + }); + + test('does not update setting when already set to the found default path', async () => { + const setConfiguredPath = sinon.stub().resolves(); + + const deps = createMockDeps({ + getConfiguredPath: () => bundlePath, + isOnPath: async () => false, + findAtDefaultPath: async () => bundlePath, + setConfiguredPath, + }); + + const result = await resolveCliPath(deps); + + assert.strictEqual(result.source, 'default-install'); + assert.ok(setConfiguredPath.notCalled, 'should not re-set the path if it already matches'); + }); + }); +}); + diff --git a/extension/src/utils/AspireTerminalProvider.ts b/extension/src/utils/AspireTerminalProvider.ts index 35762287729..95ed6bf5426 100644 --- a/extension/src/utils/AspireTerminalProvider.ts +++ b/extension/src/utils/AspireTerminalProvider.ts @@ -5,6 +5,7 @@ import { RpcServerConnectionInfo } from '../server/AspireRpcServer'; import { DcpServerConnectionInfo } from '../dcp/types'; import { getRunSessionInfo, getSupportedCapabilities } from '../capabilities'; import { EnvironmentVariables } from './environment'; +import { resolveCliPath } from './cliPath'; import path from 'path'; export const enum AnsiColors { @@ -57,8 +58,8 @@ export class AspireTerminalProvider implements vscode.Disposable { this._dcpServerConnectionInfo = value; } - sendAspireCommandToAspireTerminal(subcommand: string, showTerminal: boolean = true) { - const cliPath = this.getAspireCliExecutablePath(); + async sendAspireCommandToAspireTerminal(subcommand: string, showTerminal: boolean = true) { + const cliPath = await this.getAspireCliExecutablePath(); // On Windows, use & to execute paths, especially those with special characters // On Unix, just use the path directly @@ -200,15 +201,9 @@ export class AspireTerminalProvider implements vscode.Disposable { } - getAspireCliExecutablePath(): string { - const aspireCliPath = vscode.workspace.getConfiguration('aspire').get('aspireCliExecutablePath', ''); - if (aspireCliPath && aspireCliPath.trim().length > 0) { - extensionLogOutputChannel.debug(`Using user-configured Aspire CLI path: ${aspireCliPath}`); - return aspireCliPath.trim(); - } - - extensionLogOutputChannel.debug('No user-configured Aspire CLI path found'); - return "aspire"; + async getAspireCliExecutablePath(): Promise { + const result = await resolveCliPath(); + return result.cliPath; } isCliDebugLoggingEnabled(): boolean { diff --git a/extension/src/utils/cliPath.ts b/extension/src/utils/cliPath.ts new file mode 100644 index 00000000000..6290ac6d945 --- /dev/null +++ b/extension/src/utils/cliPath.ts @@ -0,0 +1,194 @@ +import * as vscode from 'vscode'; +import { execFile } from 'child_process'; +import { promisify } from 'util'; +import * as fs from 'fs'; +import * as path from 'path'; +import * as os from 'os'; +import { extensionLogOutputChannel } from './logging'; + +const execFileAsync = promisify(execFile); +const fsAccessAsync = promisify(fs.access); + +/** + * Gets the default installation paths for the Aspire CLI, in priority order. + * + * The CLI can be installed in two ways: + * 1. Bundle install (recommended): ~/.aspire/bin/aspire + * 2. .NET global tool: ~/.dotnet/tools/aspire + * + * @returns An array of default CLI paths to check, ordered by priority + */ +export function getDefaultCliInstallPaths(): string[] { + const homeDir = os.homedir(); + const exeName = process.platform === 'win32' ? 'aspire.exe' : 'aspire'; + + return [ + // Bundle install (recommended): ~/.aspire/bin/aspire + path.join(homeDir, '.aspire', 'bin', exeName), + // .NET global tool: ~/.dotnet/tools/aspire + path.join(homeDir, '.dotnet', 'tools', exeName), + ]; +} + +/** + * Checks if a file exists and is accessible. + */ +async function fileExists(filePath: string): Promise { + try { + await fsAccessAsync(filePath, fs.constants.F_OK); + return true; + } + catch { + return false; + } +} + +/** + * Tries to execute the CLI at the given path to verify it works. + */ +async function tryExecuteCli(cliPath: string): Promise { + try { + await execFileAsync(cliPath, ['--version'], { timeout: 5000 }); + return true; + } + catch { + return false; + } +} + +/** + * Checks if the Aspire CLI is available on the system PATH. + */ +export async function isCliOnPath(): Promise { + return await tryExecuteCli('aspire'); +} + +/** + * Finds the first default installation path where the Aspire CLI exists and is executable. + * + * @returns The path where CLI was found, or undefined if not found at any default location + */ +export async function findCliAtDefaultPath(): Promise { + for (const defaultPath of getDefaultCliInstallPaths()) { + if (await fileExists(defaultPath) && await tryExecuteCli(defaultPath)) { + return defaultPath; + } + } + + return undefined; +} + +/** + * Gets the VS Code configuration setting for the Aspire CLI path. + */ +export function getConfiguredCliPath(): string { + return vscode.workspace.getConfiguration('aspire').get('aspireCliExecutablePath', '').trim(); +} + +/** + * Updates the VS Code configuration setting for the Aspire CLI path. + * Uses ConfigurationTarget.Global to set it at the user level. + */ +export async function setConfiguredCliPath(cliPath: string): Promise { + extensionLogOutputChannel.info(`Setting aspire.aspireCliExecutablePath to: ${cliPath || '(empty)'}`); + await vscode.workspace.getConfiguration('aspire').update( + 'aspireCliExecutablePath', + cliPath || undefined, // Use undefined to remove the setting + vscode.ConfigurationTarget.Global + ); +} + +/** + * Result of checking CLI availability. + */ +export interface CliPathResolutionResult { + /** The resolved CLI path to use */ + cliPath: string; + /** Whether the CLI is available */ + available: boolean; + /** Where the CLI was found */ + source: 'path' | 'default-install' | 'configured' | 'not-found'; +} + +/** + * Dependencies for resolveCliPath that can be overridden for testing. + */ +export interface CliPathDependencies { + getConfiguredPath: () => string; + getDefaultPaths: () => string[]; + isOnPath: () => Promise; + findAtDefaultPath: () => Promise; + tryExecute: (cliPath: string) => Promise; + setConfiguredPath: (cliPath: string) => Promise; +} + +const defaultDependencies: CliPathDependencies = { + getConfiguredPath: getConfiguredCliPath, + getDefaultPaths: getDefaultCliInstallPaths, + isOnPath: isCliOnPath, + findAtDefaultPath: findCliAtDefaultPath, + tryExecute: tryExecuteCli, + setConfiguredPath: setConfiguredCliPath, +}; + +/** + * Resolves the Aspire CLI path, checking multiple locations in order: + * 1. User-configured path in VS Code settings + * 2. System PATH + * 3. Default installation directories (~/.aspire/bin, ~/.dotnet/tools) + * + * If the CLI is found at a default installation path but not on PATH, + * the VS Code setting is updated to use that path. + * + * If the CLI is on PATH and a setting was previously auto-configured to a default path, + * the setting is cleared to prefer PATH. + */ +export async function resolveCliPath(deps: CliPathDependencies = defaultDependencies): Promise { + const configuredPath = deps.getConfiguredPath(); + const defaultPaths = deps.getDefaultPaths(); + + // 1. Check if user has configured a custom path (not one of the defaults) + if (configuredPath && !defaultPaths.includes(configuredPath)) { + const isValid = await deps.tryExecute(configuredPath); + if (isValid) { + extensionLogOutputChannel.info(`Using user-configured Aspire CLI path: ${configuredPath}`); + return { cliPath: configuredPath, available: true, source: 'configured' }; + } + + extensionLogOutputChannel.warn(`Configured CLI path is invalid: ${configuredPath}`); + // Continue to check other locations + } + + // 2. Check if CLI is on PATH + const onPath = await deps.isOnPath(); + if (onPath) { + extensionLogOutputChannel.info('Aspire CLI found on system PATH'); + + // If we previously auto-set the path to a default install location, clear it + // since PATH is now working + if (defaultPaths.includes(configuredPath)) { + extensionLogOutputChannel.info('Clearing aspireCliExecutablePath setting since CLI is on PATH'); + await deps.setConfiguredPath(''); + } + + return { cliPath: 'aspire', available: true, source: 'path' }; + } + + // 3. Check default installation paths (~/.aspire/bin first, then ~/.dotnet/tools) + const foundPath = await deps.findAtDefaultPath(); + if (foundPath) { + extensionLogOutputChannel.info(`Aspire CLI found at default install location: ${foundPath}`); + + // Update the setting so future invocations use this path + if (configuredPath !== foundPath) { + extensionLogOutputChannel.info('Updating aspireCliExecutablePath setting to use default install location'); + await deps.setConfiguredPath(foundPath); + } + + return { cliPath: foundPath, available: true, source: 'default-install' }; + } + + // 4. CLI not found anywhere + extensionLogOutputChannel.warn('Aspire CLI not found on PATH or at default install locations'); + return { cliPath: 'aspire', available: false, source: 'not-found' }; +} diff --git a/extension/src/utils/configInfoProvider.ts b/extension/src/utils/configInfoProvider.ts index ca9f4ea3c64..bd342a5feb5 100644 --- a/extension/src/utils/configInfoProvider.ts +++ b/extension/src/utils/configInfoProvider.ts @@ -9,11 +9,13 @@ import * as strings from '../loc/strings'; * Gets configuration information from the Aspire CLI. */ export async function getConfigInfo(terminalProvider: AspireTerminalProvider): Promise { + const cliPath = await terminalProvider.getAspireCliExecutablePath(); + return new Promise((resolve) => { const args = ['config', 'info', '--json']; let output = ''; - spawnCliProcess(terminalProvider, terminalProvider.getAspireCliExecutablePath(), args, { + spawnCliProcess(terminalProvider, cliPath, args, { stdoutCallback: (data) => { output += data; }, diff --git a/extension/src/utils/workspace.ts b/extension/src/utils/workspace.ts index 302b11dc716..f1335aa87d4 100644 --- a/extension/src/utils/workspace.ts +++ b/extension/src/utils/workspace.ts @@ -1,13 +1,13 @@ import * as vscode from 'vscode'; -import { cliNotAvailable, dismissLabel, dontShowAgainLabel, doYouWantToSetDefaultApphost, noLabel, noWorkspaceOpen, openCliInstallInstructions, selectDefaultLaunchApphost, yesLabel } from '../loc/strings'; +import { cliNotAvailable, cliFoundAtDefaultPath, dismissLabel, dontShowAgainLabel, doYouWantToSetDefaultApphost, noLabel, noWorkspaceOpen, openCliInstallInstructions, selectDefaultLaunchApphost, yesLabel } from '../loc/strings'; import path from 'path'; import { spawnCliProcess } from '../debugger/languages/cli'; import { AspireTerminalProvider } from './AspireTerminalProvider'; -import { ChildProcessWithoutNullStreams, execFile } from 'child_process'; +import { ChildProcessWithoutNullStreams } from 'child_process'; import { AspireSettingsFile } from './cliTypes'; import { extensionLogOutputChannel } from './logging'; import { EnvironmentVariables } from './environment'; -import { promisify } from 'util'; +import { resolveCliPath } from './cliPath'; /** * Common file patterns to exclude from workspace file searches. @@ -158,13 +158,14 @@ export async function checkForExistingAppHostPathInWorkspace(terminalProvider: A extensionLogOutputChannel.info('Searching for AppHost projects using CLI command: aspire extension get-apphosts'); let proc: ChildProcessWithoutNullStreams; + const cliPath = await terminalProvider.getAspireCliExecutablePath(); new Promise((resolve, reject) => { const args = ['extension', 'get-apphosts']; if (process.env[EnvironmentVariables.ASPIRE_CLI_STOP_ON_ENTRY] === 'true') { args.push('--cli-wait-for-debugger'); } - proc = spawnCliProcess(terminalProvider, terminalProvider.getAspireCliExecutablePath(), args, { + proc = spawnCliProcess(terminalProvider, cliPath, args, { errorCallback: error => { extensionLogOutputChannel.error(`Error executing get-apphosts command: ${error}`); reject(); @@ -268,44 +269,38 @@ async function promptToAddAppHostPathToSettingsFile(result: AppHostProjectSearch extensionLogOutputChannel.info(`Successfully set appHostPath to: ${appHostToUse} in ${settingsFileLocation.fsPath}`); } -const execFileAsync = promisify(execFile); - -let cliAvailableOnPath: boolean | undefined = undefined; - /** - * Checks if the Aspire CLI is available. If not, shows a message prompting to open Aspire CLI installation steps on the repo. - * @param cliPath The path to the Aspire CLI executable - * @returns true if CLI is available, false otherwise + * Checks if the Aspire CLI is available. If not found on PATH, it checks the default + * installation directory and updates the VS Code setting accordingly. + * + * If not available, shows a message prompting to open Aspire CLI installation steps. + * @returns An object containing the CLI path to use and whether CLI is available */ -export async function checkCliAvailableOrRedirect(cliPath: string): Promise { - if (cliAvailableOnPath === true) { - // Assume, for now, that CLI availability does not change during the session if it was previously confirmed - return Promise.resolve(true); +export async function checkCliAvailableOrRedirect(): Promise<{ cliPath: string; available: boolean }> { + // Resolve CLI path fresh each time — settings or PATH may have changed + const result = await resolveCliPath(); + + if (result.available) { + // Show informational message if CLI was found at default path (not on PATH) + if (result.source === 'default-install') { + extensionLogOutputChannel.info(`Using Aspire CLI from default install location: ${result.cliPath}`); + vscode.window.showInformationMessage(cliFoundAtDefaultPath(result.cliPath)); + } + + return { cliPath: result.cliPath, available: true }; } - try { - // Remove surrounding quotes if present (both single and double quotes) - let cleanPath = cliPath.trim(); - if ((cleanPath.startsWith("'") && cleanPath.endsWith("'")) || - (cleanPath.startsWith('"') && cleanPath.endsWith('"'))) { - cleanPath = cleanPath.slice(1, -1); + // CLI not found - show error message with install instructions + vscode.window.showErrorMessage( + cliNotAvailable, + openCliInstallInstructions, + dismissLabel + ).then(selection => { + if (selection === openCliInstallInstructions) { + // Go to Aspire CLI installation instruction page in external browser + vscode.env.openExternal(vscode.Uri.parse('https://aspire.dev/get-started/install-cli/')); } - await execFileAsync(cleanPath, ['--version'], { timeout: 5000 }); - cliAvailableOnPath = true; - return true; - } catch (error) { - cliAvailableOnPath = false; - vscode.window.showErrorMessage( - cliNotAvailable, - openCliInstallInstructions, - dismissLabel - ).then(selection => { - if (selection === openCliInstallInstructions) { - // Go to Aspire CLI installation instruction page in external browser - vscode.env.openExternal(vscode.Uri.parse('https://aspire.dev/get-started/install-cli/')); - } - }); + }); - return false; - } + return { cliPath: result.cliPath, available: false }; } diff --git a/global.json b/global.json index 087505bbcae..39ccee4a4d2 100644 --- a/global.json +++ b/global.json @@ -33,8 +33,8 @@ "msbuild-sdks": { "Microsoft.Build.NoTargets": "3.7.0", "Microsoft.Build.Traversal": "3.2.0", - "Microsoft.DotNet.Arcade.Sdk": "11.0.0-beta.25610.3", - "Microsoft.DotNet.Helix.Sdk": "11.0.0-beta.25610.3", - "Microsoft.DotNet.SharedFramework.Sdk": "11.0.0-beta.25610.3" + "Microsoft.DotNet.Arcade.Sdk": "10.0.0-beta.26110.1", + "Microsoft.DotNet.Helix.Sdk": "10.0.0-beta.26110.1", + "Microsoft.DotNet.SharedFramework.Sdk": "10.0.0-beta.26110.1" } } diff --git a/src/Aspire.Hosting.Kubernetes/KubernetesEnvironmentContext.cs b/src/Aspire.Hosting.Kubernetes/KubernetesEnvironmentContext.cs index 738fb0d2ec0..cbd4163d04c 100644 --- a/src/Aspire.Hosting.Kubernetes/KubernetesEnvironmentContext.cs +++ b/src/Aspire.Hosting.Kubernetes/KubernetesEnvironmentContext.cs @@ -8,7 +8,7 @@ namespace Aspire.Hosting.Kubernetes; internal sealed class KubernetesEnvironmentContext(KubernetesEnvironmentResource environment, ILogger logger) { - private readonly Dictionary _kubernetesComponents = []; + private readonly Dictionary _kubernetesComponents = new(new ResourceNameComparer()); public ILogger Logger => logger; diff --git a/tests/Aspire.Cli.Tests/Commands/DeployCommandTests.cs b/tests/Aspire.Cli.Tests/Commands/DeployCommandTests.cs index fee7f2ccda8..e18fa6d9492 100644 --- a/tests/Aspire.Cli.Tests/Commands/DeployCommandTests.cs +++ b/tests/Aspire.Cli.Tests/Commands/DeployCommandTests.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System.Runtime.CompilerServices; @@ -9,7 +9,6 @@ using Aspire.Cli.Tests.TestServices; using Microsoft.Extensions.DependencyInjection; using Aspire.Cli.Utils; -using Aspire.TestUtilities; using Microsoft.AspNetCore.InternalTesting; namespace Aspire.Cli.Tests.Commands; @@ -269,7 +268,6 @@ public async Task DeployCommandSucceedsEndToEnd() } [Fact] - [QuarantinedTest("https://github.com/dotnet/aspire/issues/11217")] public async Task DeployCommandIncludesDeployFlagInArguments() { using var tempRepo = TemporaryWorkspace.Create(outputHelper); diff --git a/tests/Aspire.Cli.Tests/Commands/ExtensionInternalCommandTests.cs b/tests/Aspire.Cli.Tests/Commands/ExtensionInternalCommandTests.cs index 85dc01e97f3..e46e24a6026 100644 --- a/tests/Aspire.Cli.Tests/Commands/ExtensionInternalCommandTests.cs +++ b/tests/Aspire.Cli.Tests/Commands/ExtensionInternalCommandTests.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. using System.Text.Json; @@ -7,7 +7,6 @@ using Aspire.Cli.Projects; using Aspire.Cli.Tests.Utils; using Microsoft.Extensions.DependencyInjection; -using Aspire.TestUtilities; using Microsoft.AspNetCore.InternalTesting; namespace Aspire.Cli.Tests.Commands; @@ -55,7 +54,6 @@ public async Task ExtensionInternalCommand_WithNoSubcommand_ReturnsZero() } [Fact] - [QuarantinedTest("https://github.com/dotnet/aspire/issues/12304")] public async Task GetAppHostsCommand_WithSingleProject_ReturnsSuccessWithValidJson() { using var workspace = TemporaryWorkspace.Create(outputHelper); @@ -97,7 +95,6 @@ public async Task GetAppHostsCommand_WithSingleProject_ReturnsSuccessWithValidJs } [Fact] - [QuarantinedTest("https://github.com/dotnet/aspire/issues/12300")] public async Task GetAppHostsCommand_WithMultipleProjects_ReturnsSuccessWithAllCandidates() { using var workspace = TemporaryWorkspace.Create(outputHelper); diff --git a/tests/Aspire.Hosting.Azure.Tests/Snapshots/AzureDeployerTests.DeployAsync_WithMultipleComputeEnvironments_Works_step=diagnostics.verified.txt b/tests/Aspire.Hosting.Azure.Tests/Snapshots/AzureDeployerTests.DeployAsync_WithMultipleComputeEnvironments_Works_step=diagnostics.verified.txt index 0214785534c..04e5588f32e 100644 --- a/tests/Aspire.Hosting.Azure.Tests/Snapshots/AzureDeployerTests.DeployAsync_WithMultipleComputeEnvironments_Works_step=diagnostics.verified.txt +++ b/tests/Aspire.Hosting.Azure.Tests/Snapshots/AzureDeployerTests.DeployAsync_WithMultipleComputeEnvironments_Works_step=diagnostics.verified.txt @@ -29,26 +29,26 @@ Steps with no dependencies run first, followed by steps that depend on them. 13. login-to-acr-aca-env-acr 14. push-prereq 15. push-api-service - 16. update-api-service-provisionable-resource - 17. provision-api-service-website - 18. print-api-service-summary - 19. provision-aca-env - 20. provision-cache-containerapp - 21. print-cache-summary - 22. push-python-app - 23. provision-python-app-containerapp - 24. provision-storage - 25. provision-azure-bicep-resources - 26. print-dashboard-url-aas-env - 27. print-dashboard-url-aca-env - 28. print-python-app-summary - 29. deploy - 30. deploy-api-service - 31. deploy-cache - 32. deploy-python-app - 33. diagnostics - 34. publish-prereq - 35. publish-azure634f9 + 16. provision-api-service-website + 17. print-api-service-summary + 18. provision-aca-env + 19. provision-cache-containerapp + 20. print-cache-summary + 21. push-python-app + 22. provision-python-app-containerapp + 23. provision-storage + 24. provision-azure-bicep-resources + 25. print-dashboard-url-aas-env + 26. print-dashboard-url-aca-env + 27. print-python-app-summary + 28. deploy + 29. deploy-api-service + 30. deploy-cache + 31. deploy-python-app + 32. diagnostics + 33. publish-prereq + 34. publish-azure634f9 + 35. validate-appservice-config-aas-env 36. publish 37. publish-manifest 38. push @@ -182,7 +182,7 @@ Step: provision-aca-env-acr Step: provision-api-service-website Description: Provisions the Azure Bicep resource api-service-website using Azure infrastructure. - Dependencies: ✓ create-provisioning-context, ✓ provision-aas-env, ✓ push-api-service, ✓ update-api-service-provisionable-resource + Dependencies: ✓ create-provisioning-context, ✓ provision-aas-env, ✓ push-api-service Resource: api-service-website (AzureAppServiceWebSiteResource) Tags: provision-infra @@ -212,7 +212,7 @@ Step: provision-storage Step: publish Description: Aggregation step for all publish operations. All publish steps should be required by this step. - Dependencies: ✓ publish-azure634f9 + Dependencies: ✓ publish-azure634f9, ✓ validate-appservice-config-aas-env Step: publish-azure634f9 Description: Publishes the Azure environment configuration for azure634f9. @@ -245,10 +245,10 @@ Step: push-python-app Resource: python-app (ContainerResource) Tags: push-container-image -Step: update-api-service-provisionable-resource - Dependencies: ✓ create-provisioning-context - Resource: api-service-website (AzureAppServiceWebSiteResource) - Tags: update-website-provisionable-resource +Step: validate-appservice-config-aas-env + Description: Validates Azure App Service configuration for aas-env. + Dependencies: ✓ publish-prereq + Resource: aas-env (AzureAppServiceEnvironmentResource) Step: validate-azure-login Description: Validates Azure CLI authentication before deployment. @@ -309,13 +309,13 @@ If targeting 'create-provisioning-context': If targeting 'deploy': Direct dependencies: build-api-service, build-python-app, create-provisioning-context, print-api-service-summary, print-cache-summary, print-dashboard-url-aas-env, print-dashboard-url-aca-env, print-python-app-summary, provision-azure-bicep-resources, validate-azure-login - Total steps: 28 + Total steps: 27 Execution order: [0] process-parameters [1] build-prereq | deploy-prereq (parallel) [2] build-api-service | build-python-app | validate-azure-login (parallel) [3] create-provisioning-context - [4] provision-aas-env-acr | provision-aca-env-acr | provision-storage | update-api-service-provisionable-resource (parallel) + [4] provision-aas-env-acr | provision-aca-env-acr | provision-storage (parallel) [5] login-to-acr-aas-env-acr | login-to-acr-aca-env-acr | provision-aas-env | provision-aca-env (parallel) [6] provision-cache-containerapp | push-prereq (parallel) [7] print-cache-summary | push-api-service | push-python-app (parallel) @@ -326,13 +326,13 @@ If targeting 'deploy': If targeting 'deploy-api-service': Direct dependencies: print-api-service-summary - Total steps: 17 + Total steps: 16 Execution order: [0] process-parameters [1] build-prereq | deploy-prereq (parallel) [2] build-api-service | validate-azure-login (parallel) [3] create-provisioning-context - [4] provision-aas-env-acr | provision-aca-env-acr | update-api-service-provisionable-resource (parallel) + [4] provision-aas-env-acr | provision-aca-env-acr (parallel) [5] login-to-acr-aas-env-acr | login-to-acr-aca-env-acr | provision-aas-env (parallel) [6] push-prereq [7] push-api-service @@ -407,13 +407,13 @@ If targeting 'login-to-acr-aca-env-acr': If targeting 'print-api-service-summary': Direct dependencies: provision-api-service-website - Total steps: 16 + Total steps: 15 Execution order: [0] process-parameters [1] build-prereq | deploy-prereq (parallel) [2] build-api-service | validate-azure-login (parallel) [3] create-provisioning-context - [4] provision-aas-env-acr | provision-aca-env-acr | update-api-service-provisionable-resource (parallel) + [4] provision-aas-env-acr | provision-aca-env-acr (parallel) [5] login-to-acr-aas-env-acr | login-to-acr-aca-env-acr | provision-aas-env (parallel) [6] push-prereq [7] push-api-service @@ -435,13 +435,13 @@ If targeting 'print-cache-summary': If targeting 'print-dashboard-url-aas-env': Direct dependencies: provision-aas-env, provision-azure-bicep-resources - Total steps: 23 + Total steps: 22 Execution order: [0] process-parameters [1] build-prereq | deploy-prereq (parallel) [2] build-api-service | build-python-app | validate-azure-login (parallel) [3] create-provisioning-context - [4] provision-aas-env-acr | provision-aca-env-acr | provision-storage | update-api-service-provisionable-resource (parallel) + [4] provision-aas-env-acr | provision-aca-env-acr | provision-storage (parallel) [5] login-to-acr-aas-env-acr | login-to-acr-aca-env-acr | provision-aas-env | provision-aca-env (parallel) [6] provision-cache-containerapp | push-prereq (parallel) [7] push-api-service | push-python-app (parallel) @@ -451,13 +451,13 @@ If targeting 'print-dashboard-url-aas-env': If targeting 'print-dashboard-url-aca-env': Direct dependencies: provision-aca-env, provision-azure-bicep-resources - Total steps: 23 + Total steps: 22 Execution order: [0] process-parameters [1] build-prereq | deploy-prereq (parallel) [2] build-api-service | build-python-app | validate-azure-login (parallel) [3] create-provisioning-context - [4] provision-aas-env-acr | provision-aca-env-acr | provision-storage | update-api-service-provisionable-resource (parallel) + [4] provision-aas-env-acr | provision-aca-env-acr | provision-storage (parallel) [5] login-to-acr-aas-env-acr | login-to-acr-aca-env-acr | provision-aas-env | provision-aca-env (parallel) [6] provision-cache-containerapp | push-prereq (parallel) [7] push-api-service | push-python-app (parallel) @@ -529,14 +529,14 @@ If targeting 'provision-aca-env-acr': [4] provision-aca-env-acr If targeting 'provision-api-service-website': - Direct dependencies: create-provisioning-context, provision-aas-env, push-api-service, update-api-service-provisionable-resource - Total steps: 15 + Direct dependencies: create-provisioning-context, provision-aas-env, push-api-service + Total steps: 14 Execution order: [0] process-parameters [1] build-prereq | deploy-prereq (parallel) [2] build-api-service | validate-azure-login (parallel) [3] create-provisioning-context - [4] provision-aas-env-acr | provision-aca-env-acr | update-api-service-provisionable-resource (parallel) + [4] provision-aas-env-acr | provision-aca-env-acr (parallel) [5] login-to-acr-aas-env-acr | login-to-acr-aca-env-acr | provision-aas-env (parallel) [6] push-prereq [7] push-api-service @@ -544,13 +544,13 @@ If targeting 'provision-api-service-website': If targeting 'provision-azure-bicep-resources': Direct dependencies: create-provisioning-context, deploy-prereq, provision-aas-env, provision-aas-env-acr, provision-aca-env, provision-aca-env-acr, provision-api-service-website, provision-cache-containerapp, provision-python-app-containerapp, provision-storage - Total steps: 22 + Total steps: 21 Execution order: [0] process-parameters [1] build-prereq | deploy-prereq (parallel) [2] build-api-service | build-python-app | validate-azure-login (parallel) [3] create-provisioning-context - [4] provision-aas-env-acr | provision-aca-env-acr | provision-storage | update-api-service-provisionable-resource (parallel) + [4] provision-aas-env-acr | provision-aca-env-acr | provision-storage (parallel) [5] login-to-acr-aas-env-acr | login-to-acr-aca-env-acr | provision-aas-env | provision-aca-env (parallel) [6] provision-cache-containerapp | push-prereq (parallel) [7] push-api-service | push-python-app (parallel) @@ -594,12 +594,12 @@ If targeting 'provision-storage': [4] provision-storage If targeting 'publish': - Direct dependencies: publish-azure634f9 - Total steps: 4 + Direct dependencies: publish-azure634f9, validate-appservice-config-aas-env + Total steps: 5 Execution order: [0] process-parameters [1] publish-prereq - [2] publish-azure634f9 + [2] publish-azure634f9 | validate-appservice-config-aas-env (parallel) [3] publish If targeting 'publish-azure634f9': @@ -675,15 +675,13 @@ If targeting 'push-python-app': [6] push-prereq [7] push-python-app -If targeting 'update-api-service-provisionable-resource': - Direct dependencies: create-provisioning-context - Total steps: 5 +If targeting 'validate-appservice-config-aas-env': + Direct dependencies: publish-prereq + Total steps: 3 Execution order: [0] process-parameters - [1] deploy-prereq - [2] validate-azure-login - [3] create-provisioning-context - [4] update-api-service-provisionable-resource + [1] publish-prereq + [2] validate-appservice-config-aas-env If targeting 'validate-azure-login': Direct dependencies: deploy-prereq diff --git a/tests/Aspire.Hosting.Docker.Tests/DockerComposeTests.cs b/tests/Aspire.Hosting.Docker.Tests/DockerComposeTests.cs index 15034bb96c0..ddfe4aae3b4 100644 --- a/tests/Aspire.Hosting.Docker.Tests/DockerComposeTests.cs +++ b/tests/Aspire.Hosting.Docker.Tests/DockerComposeTests.cs @@ -652,7 +652,6 @@ public async Task PushImageToRegistry_WithLocalRegistry_OnlyTagsImage() } [Fact] - [QuarantinedTest("https://github.com/dotnet/aspire/issues/13878")] public async Task PushImageToRegistry_WithRemoteRegistry_PushesImage() { using var tempDir = new TestTempDirectory(); diff --git a/tests/Aspire.Hosting.Kubernetes.Tests/KubernetesPublisherTests.cs b/tests/Aspire.Hosting.Kubernetes.Tests/KubernetesPublisherTests.cs index a3da353c498..654b8cf29b4 100644 --- a/tests/Aspire.Hosting.Kubernetes.Tests/KubernetesPublisherTests.cs +++ b/tests/Aspire.Hosting.Kubernetes.Tests/KubernetesPublisherTests.cs @@ -407,6 +407,48 @@ public async Task KubernetesWithProjectResources() await settingsTask; } + [Fact] + public async Task KubernetesMapsPortsForBaitAndSwitchResources() + { + using var tempDir = new TestTempDirectory(); + var builder = TestDistributedApplicationBuilder.Create(DistributedApplicationOperation.Publish, tempDir.Path); + builder.AddKubernetesEnvironment("env"); + var api = builder.AddExecutable("api", "node", ".") + .PublishAsDockerFile() + .WithHttpEndpoint(env: "PORT"); + builder.AddContainer("gateway", "nginx") + .WithHttpEndpoint(targetPort: 8080) + .WithReference(api.GetEndpoint("http")); + var app = builder.Build(); + app.Run(); + // Assert + var expectedFiles = new[] + { + "Chart.yaml", + "values.yaml", + "templates/api/deployment.yaml", + "templates/api/service.yaml", + "templates/api/config.yaml", + "templates/gateway/deployment.yaml", + "templates/gateway/config.yaml" + }; + SettingsTask settingsTask = default!; + foreach (var expectedFile in expectedFiles) + { + var filePath = Path.Combine(tempDir.Path, expectedFile); + var fileExtension = Path.GetExtension(filePath)[1..]; + if (settingsTask is null) + { + settingsTask = Verify(File.ReadAllText(filePath), fileExtension); + } + else + { + settingsTask = settingsTask.AppendContentAsFile(File.ReadAllText(filePath), fileExtension); + } + } + await settingsTask; + } + private sealed class TestProject : IProjectMetadata { public string ProjectPath => "another-path"; diff --git a/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#00.verified.yaml b/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#00.verified.yaml new file mode 100644 index 00000000000..e4179697054 --- /dev/null +++ b/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#00.verified.yaml @@ -0,0 +1,11 @@ +apiVersion: "v2" +name: "aspire-hosting-tests" +version: "0.1.0" +kubeVersion: ">= 1.18.0-0" +description: "Aspire Helm Chart" +type: "application" +keywords: + - "aspire" + - "kubernetes" +appVersion: "0.1.0" +deprecated: false diff --git a/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#01.verified.yaml b/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#01.verified.yaml new file mode 100644 index 00000000000..9bb8e2495d4 --- /dev/null +++ b/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#01.verified.yaml @@ -0,0 +1,10 @@ +parameters: + api: + api_image: "api:latest" +secrets: {} +config: + api: + PORT: "8000" + gateway: + API_HTTP: "http://api-service:8000" + services__api__http__0: "http://api-service:8000" diff --git a/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#02.verified.yaml b/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#02.verified.yaml new file mode 100644 index 00000000000..7c0045b550b --- /dev/null +++ b/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#02.verified.yaml @@ -0,0 +1,40 @@ +--- +apiVersion: "apps/v1" +kind: "Deployment" +metadata: + name: "api-deployment" + labels: + app.kubernetes.io/name: "aspire-hosting-tests" + app.kubernetes.io/component: "api" + app.kubernetes.io/instance: "{{ .Release.Name }}" +spec: + template: + metadata: + labels: + app.kubernetes.io/name: "aspire-hosting-tests" + app.kubernetes.io/component: "api" + app.kubernetes.io/instance: "{{ .Release.Name }}" + spec: + containers: + - image: "{{ .Values.parameters.api.api_image }}" + name: "api" + envFrom: + - configMapRef: + name: "api-config" + ports: + - name: "http" + protocol: "TCP" + containerPort: 8000 + imagePullPolicy: "IfNotPresent" + selector: + matchLabels: + app.kubernetes.io/name: "aspire-hosting-tests" + app.kubernetes.io/component: "api" + app.kubernetes.io/instance: "{{ .Release.Name }}" + replicas: 1 + revisionHistoryLimit: 3 + strategy: + rollingUpdate: + maxSurge: 1 + maxUnavailable: 1 + type: "RollingUpdate" diff --git a/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#03.verified.yaml b/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#03.verified.yaml new file mode 100644 index 00000000000..a3bfbdbc5d2 --- /dev/null +++ b/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#03.verified.yaml @@ -0,0 +1,20 @@ +--- +apiVersion: "v1" +kind: "Service" +metadata: + name: "api-service" + labels: + app.kubernetes.io/name: "aspire-hosting-tests" + app.kubernetes.io/component: "api" + app.kubernetes.io/instance: "{{ .Release.Name }}" +spec: + type: "ClusterIP" + selector: + app.kubernetes.io/name: "aspire-hosting-tests" + app.kubernetes.io/component: "api" + app.kubernetes.io/instance: "{{ .Release.Name }}" + ports: + - name: "http" + protocol: "TCP" + port: 8000 + targetPort: 8000 diff --git a/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#04.verified.yaml b/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#04.verified.yaml new file mode 100644 index 00000000000..2b756089179 --- /dev/null +++ b/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#04.verified.yaml @@ -0,0 +1,11 @@ +--- +apiVersion: "v1" +kind: "ConfigMap" +metadata: + name: "api-config" + labels: + app.kubernetes.io/name: "aspire-hosting-tests" + app.kubernetes.io/component: "api" + app.kubernetes.io/instance: "{{ .Release.Name }}" +data: + PORT: "{{ .Values.config.api.PORT }}" diff --git a/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#05.verified.yaml b/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#05.verified.yaml new file mode 100644 index 00000000000..7abdfd9076b --- /dev/null +++ b/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#05.verified.yaml @@ -0,0 +1,40 @@ +--- +apiVersion: "apps/v1" +kind: "Deployment" +metadata: + name: "gateway-deployment" + labels: + app.kubernetes.io/name: "aspire-hosting-tests" + app.kubernetes.io/component: "gateway" + app.kubernetes.io/instance: "{{ .Release.Name }}" +spec: + template: + metadata: + labels: + app.kubernetes.io/name: "aspire-hosting-tests" + app.kubernetes.io/component: "gateway" + app.kubernetes.io/instance: "{{ .Release.Name }}" + spec: + containers: + - image: "nginx:latest" + name: "gateway" + envFrom: + - configMapRef: + name: "gateway-config" + ports: + - name: "http" + protocol: "TCP" + containerPort: 8080 + imagePullPolicy: "IfNotPresent" + selector: + matchLabels: + app.kubernetes.io/name: "aspire-hosting-tests" + app.kubernetes.io/component: "gateway" + app.kubernetes.io/instance: "{{ .Release.Name }}" + replicas: 1 + revisionHistoryLimit: 3 + strategy: + rollingUpdate: + maxSurge: 1 + maxUnavailable: 1 + type: "RollingUpdate" diff --git a/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#06.verified.yaml b/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#06.verified.yaml new file mode 100644 index 00000000000..190928c781d --- /dev/null +++ b/tests/Aspire.Hosting.Kubernetes.Tests/Snapshots/KubernetesPublisherTests.KubernetesMapsPortsForBaitAndSwitchResources#06.verified.yaml @@ -0,0 +1,12 @@ +--- +apiVersion: "v1" +kind: "ConfigMap" +metadata: + name: "gateway-config" + labels: + app.kubernetes.io/name: "aspire-hosting-tests" + app.kubernetes.io/component: "gateway" + app.kubernetes.io/instance: "{{ .Release.Name }}" +data: + API_HTTP: "{{ .Values.config.gateway.API_HTTP }}" + services__api__http__0: "{{ .Values.config.gateway.services__api__http__0 }}" diff --git a/tests/Aspire.Hosting.Tests/Pipelines/DistributedApplicationPipelineTests.cs b/tests/Aspire.Hosting.Tests/Pipelines/DistributedApplicationPipelineTests.cs index edfc9542a61..5af54367a6d 100644 --- a/tests/Aspire.Hosting.Tests/Pipelines/DistributedApplicationPipelineTests.cs +++ b/tests/Aspire.Hosting.Tests/Pipelines/DistributedApplicationPipelineTests.cs @@ -1,4 +1,4 @@ -// Licensed to the .NET Foundation under one or more agreements. +// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. #pragma warning disable CS0618 // Type or member is obsolete @@ -13,7 +13,6 @@ using Aspire.Hosting.Pipelines; using Aspire.Hosting.Tests.Publishing; using Aspire.Hosting.Utils; -using Aspire.TestUtilities; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging.Abstractions; @@ -2027,7 +2026,6 @@ public async Task FilterStepsForExecution_WithRequiredBy_IncludesTransitiveDepen } [Fact] - [QuarantinedTest("https://github.com/dotnet/aspire/issues/13083")] public async Task ProcessParametersStep_ValidatesBehavior() { // Arrange diff --git a/tests/Aspire.Hosting.Tests/WithHttpCommandTests.cs b/tests/Aspire.Hosting.Tests/WithHttpCommandTests.cs index e0659ee6e79..daca5107af6 100644 --- a/tests/Aspire.Hosting.Tests/WithHttpCommandTests.cs +++ b/tests/Aspire.Hosting.Tests/WithHttpCommandTests.cs @@ -4,7 +4,6 @@ using System.Net; using Aspire.Hosting.Testing; using Aspire.Hosting.Utils; -using Aspire.TestUtilities; using Microsoft.AspNetCore.InternalTesting; using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Http.Resilience; @@ -401,7 +400,6 @@ public async Task WithHttpCommand_CallsGetResponseCallback_AfterSendingRequest() } [Fact] - [QuarantinedTest("https://github.com/dotnet/aspire/issues/8101")] public async Task WithHttpCommand_EnablesCommandOnceResourceIsRunning() { // Arrange diff --git a/tools/perf/Measure-StartupPerformance.ps1 b/tools/perf/Measure-StartupPerformance.ps1 new file mode 100644 index 00000000000..626adff10ed --- /dev/null +++ b/tools/perf/Measure-StartupPerformance.ps1 @@ -0,0 +1,678 @@ +<# +.SYNOPSIS + Measures .NET Aspire application startup performance by collecting ETW traces. + +.DESCRIPTION + This script runs an Aspire application, collects a performance trace + using dotnet-trace, and computes the startup time from AspireEventSource events. + The trace collection ends when the DcpModelCreationStop event is fired. + +.PARAMETER ProjectPath + Path to the AppHost project (.csproj) to measure. Can be absolute or relative. + Defaults to the TestShop.AppHost project in the playground folder. + +.PARAMETER Iterations + Number of times to run the scenario and collect traces. Defaults to 1. + +.PARAMETER PreserveTraces + If specified, trace files are preserved after the run. By default, traces are + stored in a temporary folder and deleted after analysis. + +.PARAMETER TraceOutputDirectory + Directory where trace files will be saved when PreserveTraces is set. + Defaults to a 'traces' subdirectory in the script folder. + +.PARAMETER SkipBuild + If specified, skips building the project before running. + +.PARAMETER TraceDurationSeconds + Duration in seconds for the trace collection. Defaults to 60 (1 minute). + The value is automatically converted to the dd:hh:mm:ss format required by dotnet-trace. + +.PARAMETER PauseBetweenIterationsSeconds + Number of seconds to pause between iterations. Defaults to 15. + Set to 0 to disable the pause. + +.PARAMETER Verbose + If specified, shows detailed output during execution. + +.EXAMPLE + .\Measure-StartupPerformance.ps1 + +.EXAMPLE + .\Measure-StartupPerformance.ps1 -Iterations 5 + +.EXAMPLE + .\Measure-StartupPerformance.ps1 -ProjectPath "C:\MyApp\MyApp.AppHost.csproj" -Iterations 3 + +.EXAMPLE + .\Measure-StartupPerformance.ps1 -Iterations 3 -PreserveTraces -TraceOutputDirectory "C:\traces" + +.EXAMPLE + .\Measure-StartupPerformance.ps1 -TraceDurationSeconds 120 + +.EXAMPLE + .\Measure-StartupPerformance.ps1 -Iterations 5 -PauseBetweenIterationsSeconds 30 + +.NOTES + Requires: + - PowerShell 7+ + - dotnet-trace global tool (dotnet tool install -g dotnet-trace) + - .NET SDK +#> + +[CmdletBinding()] +param( + [Parameter(Mandatory = $false)] + [string]$ProjectPath, + + [Parameter(Mandatory = $false)] + [ValidateRange(1, 100)] + [int]$Iterations = 1, + + [Parameter(Mandatory = $false)] + [switch]$PreserveTraces, + + [Parameter(Mandatory = $false)] + [string]$TraceOutputDirectory, + + [Parameter(Mandatory = $false)] + [switch]$SkipBuild, + + [Parameter(Mandatory = $false)] + [ValidateRange(1, 86400)] + [int]$TraceDurationSeconds = 60, + + [Parameter(Mandatory = $false)] + [ValidateRange(0, 3600)] + [int]$PauseBetweenIterationsSeconds = 45 +) + +$ErrorActionPreference = 'Stop' +Set-StrictMode -Version Latest + +# Constants +$EventSourceName = 'Microsoft-Aspire-Hosting' +$DcpModelCreationStartEventId = 17 +$DcpModelCreationStopEventId = 18 + +# Get repository root (script is in tools/perf) +$ScriptDir = $PSScriptRoot +$RepoRoot = (Resolve-Path (Join-Path $ScriptDir '..' '..')).Path + +# Resolve project path +if (-not $ProjectPath) { + # Default to TestShop.AppHost + $ProjectPath = Join-Path $RepoRoot 'playground' 'TestShop' 'TestShop.AppHost' 'TestShop.AppHost.csproj' +} +elseif (-not [System.IO.Path]::IsPathRooted($ProjectPath)) { + # Relative path - resolve from current directory + $ProjectPath = (Resolve-Path $ProjectPath -ErrorAction Stop).Path +} + +$AppHostProject = $ProjectPath +$AppHostDir = Split-Path $AppHostProject -Parent +$AppHostName = [System.IO.Path]::GetFileNameWithoutExtension($AppHostProject) + +# Determine output directory for traces - always use temp directory unless explicitly specified +if ($TraceOutputDirectory) { + $OutputDirectory = $TraceOutputDirectory +} +else { + # Always use a temp directory for traces + $OutputDirectory = Join-Path ([System.IO.Path]::GetTempPath()) "aspire-perf-$([System.Guid]::NewGuid().ToString('N').Substring(0, 8))" +} + +# Only delete temp directory if not preserving traces and no custom directory was specified +$ShouldCleanupDirectory = -not $PreserveTraces -and -not $TraceOutputDirectory + +# Ensure output directory exists +if (-not (Test-Path $OutputDirectory)) { + New-Item -ItemType Directory -Path $OutputDirectory -Force | Out-Null +} + +# Verify prerequisites +function Test-Prerequisites { + Write-Host "Checking prerequisites..." -ForegroundColor Cyan + + # Check dotnet-trace is installed + $dotnetTrace = Get-Command 'dotnet-trace' -ErrorAction SilentlyContinue + if (-not $dotnetTrace) { + throw "dotnet-trace is not installed. Install it with: dotnet tool install -g dotnet-trace" + } + Write-Verbose "dotnet-trace found at: $($dotnetTrace.Source)" + + # Check project exists + if (-not (Test-Path $AppHostProject)) { + throw "AppHost project not found at: $AppHostProject" + } + Write-Verbose "AppHost project found at: $AppHostProject" + + Write-Host "Prerequisites check passed." -ForegroundColor Green +} + +# Build the project +function Build-AppHost { + Write-Host "Building $AppHostName..." -ForegroundColor Cyan + + Push-Location $AppHostDir + try { + $buildOutput = & dotnet build -c Release --nologo 2>&1 + if ($LASTEXITCODE -ne 0) { + Write-Host ($buildOutput -join "`n") -ForegroundColor Red + throw "Failed to build $AppHostName" + } + Write-Verbose ($buildOutput -join "`n") + Write-Host "Build completed successfully." -ForegroundColor Green + } + finally { + Pop-Location + } +} + +# Run a single iteration of the performance test +function Invoke-PerformanceIteration { + param( + [int]$IterationNumber, + [string]$TraceOutputPath + ) + + Write-Host "`nIteration $IterationNumber" -ForegroundColor Yellow + Write-Host ("-" * 40) -ForegroundColor Yellow + + $nettracePath = "$TraceOutputPath.nettrace" + $appProcess = $null + $traceProcess = $null + + try { + # Find the compiled executable - we need the path to launch it + $exePath = $null + $dllPath = $null + + # Search in multiple possible output locations: + # 1. Arcade-style: artifacts/bin//Release// + # 2. Traditional: /bin/Release// + $searchPaths = @( + (Join-Path $RepoRoot 'artifacts' 'bin' $AppHostName 'Release'), + (Join-Path $AppHostDir 'bin' 'Release') + ) + + foreach ($basePath in $searchPaths) { + if (-not (Test-Path $basePath)) { + continue + } + + # Find TFM subdirectories (e.g., net8.0, net9.0, net10.0) + $tfmDirs = Get-ChildItem -Path $basePath -Directory -Filter 'net*' -ErrorAction SilentlyContinue + foreach ($tfmDir in $tfmDirs) { + $candidateExe = Join-Path $tfmDir.FullName "$AppHostName.exe" + $candidateDll = Join-Path $tfmDir.FullName "$AppHostName.dll" + + if (Test-Path $candidateExe) { + $exePath = $candidateExe + Write-Verbose "Found executable at: $exePath" + break + } + elseif (Test-Path $candidateDll) { + $dllPath = $candidateDll + Write-Verbose "Found DLL at: $dllPath" + break + } + } + + if ($exePath -or $dllPath) { + break + } + } + + if (-not $exePath -and -not $dllPath) { + $searchedPaths = $searchPaths -join "`n - " + throw "Could not find compiled executable or DLL. Searched in:`n - $searchedPaths`nPlease build the project first (without -SkipBuild)." + } + + # Read launchSettings.json to get environment variables + $launchSettingsPath = Join-Path $AppHostDir 'Properties' 'launchSettings.json' + $envVars = @{} + if (Test-Path $launchSettingsPath) { + Write-Verbose "Reading launch settings from: $launchSettingsPath" + try { + # Read the file and remove JSON comments (// style) before parsing + # Only remove lines that start with // (after optional whitespace) to avoid breaking URLs like https:// + $jsonLines = Get-Content $launchSettingsPath + $filteredLines = $jsonLines | Where-Object { $_.Trim() -notmatch '^//' } + $jsonContent = $filteredLines -join "`n" + $launchSettings = $jsonContent | ConvertFrom-Json + + # Try to find a suitable profile (prefer 'http' for simplicity, then first available) + $profile = $null + if ($launchSettings.profiles.http) { + $profile = $launchSettings.profiles.http + Write-Verbose "Using 'http' launch profile" + } + elseif ($launchSettings.profiles.https) { + $profile = $launchSettings.profiles.https + Write-Verbose "Using 'https' launch profile" + } + else { + # Use first profile that has environmentVariables + foreach ($prop in $launchSettings.profiles.PSObject.Properties) { + if ($prop.Value.environmentVariables) { + $profile = $prop.Value + Write-Verbose "Using '$($prop.Name)' launch profile" + break + } + } + } + + if ($profile -and $profile.environmentVariables) { + foreach ($prop in $profile.environmentVariables.PSObject.Properties) { + $envVars[$prop.Name] = $prop.Value + Write-Verbose " Environment: $($prop.Name)=$($prop.Value)" + } + } + + # Use applicationUrl to set ASPNETCORE_URLS if not already set + if ($profile -and $profile.applicationUrl -and -not $envVars.ContainsKey('ASPNETCORE_URLS')) { + $envVars['ASPNETCORE_URLS'] = $profile.applicationUrl + Write-Verbose " Environment: ASPNETCORE_URLS=$($profile.applicationUrl) (from applicationUrl)" + } + } + catch { + Write-Warning "Failed to parse launchSettings.json: $_" + } + } + else { + Write-Verbose "No launchSettings.json found at: $launchSettingsPath" + } + + # Always ensure Development environment is set + if (-not $envVars.ContainsKey('DOTNET_ENVIRONMENT')) { + $envVars['DOTNET_ENVIRONMENT'] = 'Development' + } + if (-not $envVars.ContainsKey('ASPNETCORE_ENVIRONMENT')) { + $envVars['ASPNETCORE_ENVIRONMENT'] = 'Development' + } + + # Start the AppHost application as a separate process + Write-Host "Starting $AppHostName..." -ForegroundColor Cyan + + $appPsi = [System.Diagnostics.ProcessStartInfo]::new() + if ($exePath) { + $appPsi.FileName = $exePath + $appPsi.Arguments = '' + } + else { + $appPsi.FileName = 'dotnet' + $appPsi.Arguments = "`"$dllPath`"" + } + $appPsi.WorkingDirectory = $AppHostDir + $appPsi.UseShellExecute = $false + $appPsi.RedirectStandardOutput = $true + $appPsi.RedirectStandardError = $true + $appPsi.CreateNoWindow = $true + + # Set environment variables from launchSettings.json + foreach ($key in $envVars.Keys) { + $appPsi.Environment[$key] = $envVars[$key] + } + + $appProcess = [System.Diagnostics.Process]::Start($appPsi) + $appPid = $appProcess.Id + + Write-Verbose "$AppHostName started with PID: $appPid" + + # Give the process a moment to initialize before attaching + Start-Sleep -Milliseconds 200 + + # Verify the process is still running + if ($appProcess.HasExited) { + $stdout = $appProcess.StandardOutput.ReadToEnd() + $stderr = $appProcess.StandardError.ReadToEnd() + throw "Application exited immediately with code $($appProcess.ExitCode).`nStdOut: $stdout`nStdErr: $stderr" + } + + # Start dotnet-trace to attach to the running process + Write-Host "Attaching trace collection to PID $appPid..." -ForegroundColor Cyan + + # Use dotnet-trace with the EventSource provider + # Format: ProviderName:Keywords:Level + # Keywords=0xFFFFFFFF (all), Level=5 (Verbose) + $providers = "${EventSourceName}" + + # Convert TraceDurationSeconds to dd:hh:mm:ss format required by dotnet-trace + $days = [math]::Floor($TraceDurationSeconds / 86400) + $hours = [math]::Floor(($TraceDurationSeconds % 86400) / 3600) + $minutes = [math]::Floor(($TraceDurationSeconds % 3600) / 60) + $seconds = $TraceDurationSeconds % 60 + $traceDuration = '{0:00}:{1:00}:{2:00}:{3:00}' -f $days, $hours, $minutes, $seconds + + $traceArgs = @( + 'collect', + '--process-id', $appPid, + '--providers', $providers, + '--output', $nettracePath, + '--format', 'nettrace', + '--duration', $traceDuration, + '--buffersize', '8192' + ) + + Write-Verbose "dotnet-trace arguments: $($traceArgs -join ' ')" + + $tracePsi = [System.Diagnostics.ProcessStartInfo]::new() + $tracePsi.FileName = 'dotnet-trace' + $tracePsi.Arguments = $traceArgs -join ' ' + $tracePsi.WorkingDirectory = $AppHostDir + $tracePsi.UseShellExecute = $false + $tracePsi.RedirectStandardOutput = $true + $tracePsi.RedirectStandardError = $true + $tracePsi.CreateNoWindow = $true + + $traceProcess = [System.Diagnostics.Process]::Start($tracePsi) + + Write-Host "Collecting performance trace..." -ForegroundColor Cyan + + # Wait for trace to complete + $traceProcess.WaitForExit() + + # Read app process output (what was captured while trace was running) + # Use async read to avoid blocking - read whatever is available + $appStdout = "" + $appStderr = "" + if ($appProcess -and -not $appProcess.HasExited) { + # Process is still running, we can try to read available output + # Note: ReadToEnd would block, so we read what's available after stopping + } + + $traceOutput = $traceProcess.StandardOutput.ReadToEnd() + $traceError = $traceProcess.StandardError.ReadToEnd() + + if ($traceOutput) { Write-Verbose "dotnet-trace output: $traceOutput" } + if ($traceError) { Write-Verbose "dotnet-trace stderr: $traceError" } + + # Check if trace file was created despite any errors + # dotnet-trace may report errors during cleanup but the trace file is often still valid + if ($traceProcess.ExitCode -ne 0) { + if (Test-Path $nettracePath) { + Write-Warning "dotnet-trace exited with code $($traceProcess.ExitCode), but trace file was created. Attempting to analyze." + } + else { + Write-Warning "dotnet-trace exited with code $($traceProcess.ExitCode) and no trace file was created." + return $null + } + } + + Write-Host "Trace collection completed." -ForegroundColor Green + + return $nettracePath + } + finally { + # Clean up the application process and capture its output + if ($appProcess) { + # Read any remaining output before killing the process + $appStdout = "" + $appStderr = "" + try { + # Give a moment for any buffered output + Start-Sleep -Milliseconds 100 + + # We need to read asynchronously since the process may still be running + # Read what's available without blocking indefinitely + $stdoutTask = $appProcess.StandardOutput.ReadToEndAsync() + $stderrTask = $appProcess.StandardError.ReadToEndAsync() + + # Wait briefly for output + [System.Threading.Tasks.Task]::WaitAll(@($stdoutTask, $stderrTask), 1000) | Out-Null + + if ($stdoutTask.IsCompleted) { + $appStdout = $stdoutTask.Result + } + if ($stderrTask.IsCompleted) { + $appStderr = $stderrTask.Result + } + } + catch { + # Ignore errors reading output + } + + if ($appStdout) { + Write-Verbose "Application stdout:`n$appStdout" + } + if ($appStderr) { + Write-Verbose "Application stderr:`n$appStderr" + } + + if (-not $appProcess.HasExited) { + Write-Verbose "Stopping $AppHostName (PID: $($appProcess.Id))..." + try { + # Try graceful shutdown first + $appProcess.Kill($true) + $appProcess.WaitForExit(5000) | Out-Null + } + catch { + Write-Warning "Failed to stop application: $_" + } + } + $appProcess.Dispose() + } + + # Clean up trace process + if ($traceProcess) { + if (-not $traceProcess.HasExited) { + try { + $traceProcess.Kill() + $traceProcess.WaitForExit(2000) | Out-Null + } + catch { + # Ignore errors killing trace process + } + } + $traceProcess.Dispose() + } + } +} + +# Path to the trace analyzer tool +$TraceAnalyzerDir = Join-Path $ScriptDir 'TraceAnalyzer' +$TraceAnalyzerProject = Join-Path $TraceAnalyzerDir 'TraceAnalyzer.csproj' + +# Build the trace analyzer tool +function Build-TraceAnalyzer { + if (-not (Test-Path $TraceAnalyzerProject)) { + Write-Warning "TraceAnalyzer project not found at: $TraceAnalyzerProject" + return $false + } + + Write-Verbose "Building TraceAnalyzer tool..." + $buildOutput = & dotnet build $TraceAnalyzerProject -c Release --verbosity quiet 2>&1 + if ($LASTEXITCODE -ne 0) { + Write-Warning "Failed to build TraceAnalyzer: $buildOutput" + return $false + } + + Write-Verbose "TraceAnalyzer built successfully" + return $true +} + +# Parse nettrace file using the TraceAnalyzer tool +function Get-StartupTiming { + param( + [string]$TracePath + ) + + Write-Host "Analyzing trace: $TracePath" -ForegroundColor Cyan + + if (-not (Test-Path $TracePath)) { + Write-Warning "Trace file not found: $TracePath" + return $null + } + + try { + $output = & dotnet run --project $TraceAnalyzerProject -c Release --no-build -- $TracePath 2>&1 + if ($LASTEXITCODE -ne 0) { + Write-Warning "TraceAnalyzer failed: $output" + return $null + } + + $result = $output | Select-Object -Last 1 + if ($result -eq 'null') { + Write-Warning "Could not find DcpModelCreation events in the trace" + return $null + } + + $duration = [double]::Parse($result, [System.Globalization.CultureInfo]::InvariantCulture) + Write-Verbose "Calculated duration: $duration ms" + return $duration + } + catch { + Write-Warning "Error parsing trace: $_" + return $null + } +} + +# Main execution +function Main { + Write-Host "==================================================" -ForegroundColor Cyan + Write-Host " Aspire Startup Performance Measurement" -ForegroundColor Cyan + Write-Host "==================================================" -ForegroundColor Cyan + Write-Host "" + Write-Host "Project: $AppHostName" + Write-Host "Project Path: $AppHostProject" + Write-Host "Iterations: $Iterations" + Write-Host "Trace Duration: $TraceDurationSeconds seconds" + Write-Host "Pause Between Iterations: $PauseBetweenIterationsSeconds seconds" + Write-Host "Preserve Traces: $PreserveTraces" + if ($PreserveTraces -or $TraceOutputDirectory) { + Write-Host "Trace Directory: $OutputDirectory" + } + Write-Host "" + + Test-Prerequisites + + # Build the TraceAnalyzer tool for parsing traces + $traceAnalyzerAvailable = Build-TraceAnalyzer + + # Ensure output directory exists + if (-not (Test-Path $OutputDirectory)) { + New-Item -ItemType Directory -Path $OutputDirectory -Force | Out-Null + } + + if (-not $SkipBuild) { + Build-AppHost + } + else { + Write-Host "Skipping build (SkipBuild flag set)" -ForegroundColor Yellow + } + + $results = @() + $timestamp = Get-Date -Format 'yyyyMMdd_HHmmss' + + try { + for ($i = 1; $i -le $Iterations; $i++) { + $traceBaseName = "${AppHostName}_startup_${timestamp}_iter${i}" + $traceOutputPath = Join-Path $OutputDirectory $traceBaseName + + $tracePath = Invoke-PerformanceIteration -IterationNumber $i -TraceOutputPath $traceOutputPath + + if ($tracePath -and (Test-Path $tracePath)) { + $duration = $null + if ($traceAnalyzerAvailable) { + $duration = Get-StartupTiming -TracePath $tracePath + } + + if ($null -ne $duration) { + $results += [PSCustomObject]@{ + Iteration = $i + TracePath = $tracePath + StartupTimeMs = [math]::Round($duration, 2) + } + Write-Host "Startup time: $([math]::Round($duration, 2)) ms" -ForegroundColor Green + } + else { + $results += [PSCustomObject]@{ + Iteration = $i + TracePath = $tracePath + StartupTimeMs = $null + } + Write-Host "Trace collected: $tracePath" -ForegroundColor Green + } + } + else { + Write-Warning "No trace file generated for iteration $i" + } + + # Pause between iterations + if ($i -lt $Iterations -and $PauseBetweenIterationsSeconds -gt 0) { + Write-Verbose "Pausing for $PauseBetweenIterationsSeconds seconds before next iteration..." + Start-Sleep -Seconds $PauseBetweenIterationsSeconds + } + } + } + finally { + # Clean up temporary trace directory if not preserving traces + if ($ShouldCleanupDirectory -and (Test-Path $OutputDirectory)) { + Write-Verbose "Cleaning up temporary trace directory: $OutputDirectory" + Remove-Item -Path $OutputDirectory -Recurse -Force -ErrorAction SilentlyContinue + } + } + + # Summary + Write-Host "" + Write-Host "==================================================" -ForegroundColor Cyan + Write-Host " Results Summary" -ForegroundColor Cyan + Write-Host "==================================================" -ForegroundColor Cyan + + # Wrap in @() to ensure array even with single/null results + $validResults = @($results | Where-Object { $null -ne $_.StartupTimeMs }) + + if ($validResults.Count -gt 0) { + Write-Host "" + # Only show TracePath in summary if PreserveTraces is set + if ($PreserveTraces) { + $results | Format-Table -AutoSize + } + else { + $results | Select-Object Iteration, StartupTimeMs | Format-Table -AutoSize + } + + $times = @($validResults | ForEach-Object { $_.StartupTimeMs }) + $avg = ($times | Measure-Object -Average).Average + $min = ($times | Measure-Object -Minimum).Minimum + $max = ($times | Measure-Object -Maximum).Maximum + + Write-Host "" + Write-Host "Statistics:" -ForegroundColor Yellow + Write-Host " Successful iterations: $($validResults.Count) / $Iterations" + Write-Host " Minimum: $([math]::Round($min, 2)) ms" + Write-Host " Maximum: $([math]::Round($max, 2)) ms" + Write-Host " Average: $([math]::Round($avg, 2)) ms" + + if ($validResults.Count -gt 1) { + $stdDev = [math]::Sqrt(($times | ForEach-Object { [math]::Pow($_ - $avg, 2) } | Measure-Object -Average).Average) + Write-Host " Std Dev: $([math]::Round($stdDev, 2)) ms" + } + + if ($PreserveTraces) { + Write-Host "" + Write-Host "Trace files saved to: $OutputDirectory" -ForegroundColor Cyan + } + } + elseif ($results.Count -gt 0) { + Write-Host "" + Write-Host "Collected $($results.Count) trace(s) but could not extract timing." -ForegroundColor Yellow + if ($PreserveTraces) { + Write-Host "" + Write-Host "Trace files saved to: $OutputDirectory" -ForegroundColor Cyan + $results | Select-Object Iteration, TracePath | Format-Table -AutoSize + Write-Host "" + Write-Host "Open traces in PerfView or Visual Studio to analyze startup timing." -ForegroundColor Yellow + } + } + else { + Write-Warning "No traces were collected." + } + + return $results +} + +# Run the script +Main diff --git a/tools/perf/TraceAnalyzer/Program.cs b/tools/perf/TraceAnalyzer/Program.cs new file mode 100644 index 00000000000..76ffe45d44d --- /dev/null +++ b/tools/perf/TraceAnalyzer/Program.cs @@ -0,0 +1,80 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +// Tool to analyze .nettrace files and extract Aspire startup timing information. +// Usage: dotnet run -- +// Output: Prints the startup duration in milliseconds to stdout, or "null" if events not found. + +using Microsoft.Diagnostics.Tracing; + +if (args.Length == 0) +{ + Console.Error.WriteLine("Usage: TraceAnalyzer "); + return 1; +} + +var tracePath = args[0]; + +if (!File.Exists(tracePath)) +{ + Console.Error.WriteLine($"Error: File not found: {tracePath}"); + return 1; +} + +// Event IDs from AspireEventSource +const int DcpModelCreationStartEventId = 17; +const int DcpModelCreationStopEventId = 18; + +const string AspireHostingProviderName = "Microsoft-Aspire-Hosting"; + +try +{ + double? startTime = null; + double? stopTime = null; + + using (var source = new EventPipeEventSource(tracePath)) + { + source.Dynamic.AddCallbackForProviderEvents((string pName, string eName) => + { + if (pName != AspireHostingProviderName) + { + return EventFilterResponse.RejectProvider; + } + if (eName == null || eName.StartsWith("DcpModelCreation", StringComparison.Ordinal)) + { + return EventFilterResponse.AcceptEvent; + } + return EventFilterResponse.RejectEvent; + }, + (TraceEvent traceEvent) => + { + if ((int)traceEvent.ID == DcpModelCreationStartEventId) + { + startTime = traceEvent.TimeStampRelativeMSec; + } + else if ((int)traceEvent.ID == DcpModelCreationStopEventId) + { + stopTime = traceEvent.TimeStampRelativeMSec; + } + }); + + source.Process(); + } + + if (startTime.HasValue && stopTime.HasValue) + { + var duration = stopTime.Value - startTime.Value; + Console.WriteLine(duration.ToString("F2", System.Globalization.CultureInfo.InvariantCulture)); + return 0; + } + else + { + Console.WriteLine("null"); + return 0; + } +} +catch (Exception ex) +{ + Console.Error.WriteLine($"Error parsing trace: {ex.Message}"); + return 1; +} diff --git a/tools/perf/TraceAnalyzer/TraceAnalyzer.csproj b/tools/perf/TraceAnalyzer/TraceAnalyzer.csproj new file mode 100644 index 00000000000..f984521fbc3 --- /dev/null +++ b/tools/perf/TraceAnalyzer/TraceAnalyzer.csproj @@ -0,0 +1,16 @@ + + + + Exe + net8.0 + enable + enable + + false + + + + + + +