Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
118 changes: 118 additions & 0 deletions .github/workflows/ai-triage.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
name: AI Issue Triage

on:
issues:
types: [opened]
discussion:
types: [created]
workflow_dispatch:
inputs:
reference:
description: 'Reference (e.g., "issue:123" or "discussion:456")'
required: true
type: string
live_mode:
description: 'Enable live mode'
required: false
type: boolean
default: false

concurrency:
group: triage-${{ github.event.issue.number || github.event.discussion.number || github.event.inputs.reference }}
cancel-in-progress: false

permissions:
issues: write
discussions: write
contents: read

jobs:
security-check:
runs-on: ubuntu-latest
outputs:
should_run: ${{ steps.check.outputs.should_run }}
reference: ${{ steps.check.outputs.reference }}
steps:
- name: Validate
id: check
run: |
if [[ "${{ github.actor }}" == *"[bot]"* ]] || [[ "${{ github.event.repository.fork }}" == "true" ]]; then
echo "should_run=false" >> $GITHUB_OUTPUT
exit 0
fi

if [[ "${{ github.event_name }}" == "issues" ]]; then
REF="issue:${{ github.event.issue.number }}"
elif [[ "${{ github.event_name }}" == "discussion" ]]; then
REF="discussion:${{ github.event.discussion.number }}"
elif [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
REF="${{ github.event.inputs.reference }}"
else
echo "should_run=false" >> $GITHUB_OUTPUT
exit 0
fi

if ! [[ "$REF" =~ ^(issue|discussion):[0-9]+$ ]]; then
echo "should_run=false" >> $GITHUB_OUTPUT
exit 1
fi

echo "should_run=true" >> $GITHUB_OUTPUT
echo "reference=$REF" >> $GITHUB_OUTPUT

triage:
needs: security-check
if: needs.security-check.outputs.should_run == 'true'
runs-on: ubuntu-latest
timeout-minutes: 10
env:
UV_CACHE_DIR: /tmp/.uv-cache
TRIAGE_LIVE: ${{ github.event.inputs.live_mode || 'false' }}

steps:
- name: Checkout
uses: actions/checkout@v4
with:
sparse-checkout: |
python
.github/actions
sparse-checkout-cone-mode: false

- name: Setup
run: git clone https://${{ secrets.TRIAGE_BOT_REPO_TOKEN }}@github.com/${{ vars.TRIAGE_BOT_REPO }}.git triage-bot
env:
GIT_TERMINAL_PROMPT: 0

- name: Set up uv
uses: astral-sh/setup-uv@v6
with:
enable-cache: true
cache-dependency-glob: "python/**/uv.lock"

- name: Install
working-directory: python
run: uv sync --package agent-framework-core --extra openai

- name: Pull GitHub MCP server image
run: docker pull ghcr.io/github/github-mcp-server

- name: Run
working-directory: python
env:
GITHUB_PERSONAL_ACCESS_TOKEN: ${{ secrets.GH_ACTIONS_PR_WRITE }}
OPENAI_API_KEY: ${{ secrets.OPENAI__APIKEY }}
AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }}
AZURE_OPENAI_API_VERSION: ${{ secrets.AZURE_OPENAI_API_VERSION }}
TRIAGE_REFERENCE: ${{ needs.security-check.outputs.reference }}
TRIAGE_BOT_PATH: ${{ github.workspace }}/triage-bot
run: |
echo "::add-mask::$GITHUB_PERSONAL_ACCESS_TOKEN"
echo "::add-mask::$OPENAI_API_KEY"
uv run python $TRIAGE_BOT_PATH/run.py

- name: Summary
if: always()
run: |
echo "## Triage Summary" >> $GITHUB_STEP_SUMMARY
echo "- **Reference:** ${{ needs.security-check.outputs.reference }}" >> $GITHUB_STEP_SUMMARY
echo "- **Status:** ${{ job.status }}" >> $GITHUB_STEP_SUMMARY
158 changes: 158 additions & 0 deletions .github/workflows/docs-update.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,158 @@
# Example GitHub Action Workflow for Docs Update Bot
#
# Place this file in the PUBLIC repo at:
# .github/workflows/docs-update.yml
#
# Required Secrets:
# - DOCS_BOT_REPO_TOKEN: PAT with access to clone the private docs-bot repo
# - GH_ACTIONS_PR_WRITE: PAT with write access to the docs repo
# - OPENAI__APIKEY: OpenAI API key (or use Azure OpenAI secrets)
#
# Required Variables:
# - DOCS_BOT_REPO: The private repo containing the docs-bot code (e.g., "your-org/docs-bot")

name: Docs Update on PR Merge

on:
pull_request:
types: [closed]
branches: [main]

# Allow manual trigger for testing
workflow_dispatch:
inputs:
pr_number:
description: 'PR number to process'
required: true
type: number

concurrency:
group: docs-update-${{ github.event.pull_request.number || github.event.inputs.pr_number }}
cancel-in-progress: false

permissions:
contents: read
pull-requests: read

jobs:
# Security check - skip for bots and forks
security-check:
runs-on: ubuntu-latest
outputs:
should_run: ${{ steps.check.outputs.should_run }}
pr_number: ${{ steps.check.outputs.pr_number }}
pr_title: ${{ steps.check.outputs.pr_title }}
pr_url: ${{ steps.check.outputs.pr_url }}
merge_sha: ${{ steps.check.outputs.merge_sha }}
steps:
- name: Validate
id: check
run: |
# Skip for bots and forks
if [[ "${{ github.actor }}" == *"[bot]"* ]] || [[ "${{ github.event.repository.fork }}" == "true" ]]; then
echo "should_run=false" >> $GITHUB_OUTPUT
exit 0
fi

# For PR merge events
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
# Only run for merged PRs
if [[ "${{ github.event.pull_request.merged }}" != "true" ]]; then
echo "should_run=false" >> $GITHUB_OUTPUT
exit 0
fi

echo "pr_number=${{ github.event.pull_request.number }}" >> $GITHUB_OUTPUT
echo "pr_title=${{ github.event.pull_request.title }}" >> $GITHUB_OUTPUT
echo "pr_url=${{ github.event.pull_request.html_url }}" >> $GITHUB_OUTPUT
echo "merge_sha=${{ github.event.pull_request.merge_commit_sha }}" >> $GITHUB_OUTPUT

# For manual dispatch
elif [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
echo "pr_number=${{ github.event.inputs.pr_number }}" >> $GITHUB_OUTPUT
# For manual dispatch, we need to fetch PR details
echo "pr_title=Manual trigger for PR #${{ github.event.inputs.pr_number }}" >> $GITHUB_OUTPUT
echo "pr_url=${{ github.server_url }}/${{ github.repository }}/pull/${{ github.event.inputs.pr_number }}" >> $GITHUB_OUTPUT
echo "merge_sha=" >> $GITHUB_OUTPUT

else
echo "should_run=false" >> $GITHUB_OUTPUT
exit 0
fi

echo "should_run=true" >> $GITHUB_OUTPUT

docs-update:
needs: security-check
if: needs.security-check.outputs.should_run == 'true'
runs-on: ubuntu-latest
timeout-minutes: 15
env:
UV_CACHE_DIR: /tmp/.uv-cache

steps:
- name: Checkout source repo
uses: actions/checkout@v4
with:
sparse-checkout: |
python
sparse-checkout-cone-mode: false

- name: Clone docs-bot
run: git clone https://${{ secrets.DOCS_BOT_REPO_TOKEN }}@github.com/${{ vars.DOCS_BOT_REPO }}.git docs-bot
env:
GIT_TERMINAL_PROMPT: 0

- name: Set up uv
uses: astral-sh/setup-uv@v6
with:
enable-cache: true
cache-dependency-glob: "python/**/uv.lock"

- name: Install dependencies
working-directory: python
run: uv sync --package agent-framework-core --extra openai

- name: Pull GitHub MCP server image
run: docker pull ghcr.io/github/github-mcp-server

- name: Run docs-bot
working-directory: python
env:
# GitHub tokens
GITHUB_PERSONAL_ACCESS_TOKEN: ${{ secrets.GH_ACTIONS_PR_WRITE }}

# LLM configuration
OPENAI_API_KEY: ${{ secrets.OPENAI__APIKEY }}
# Uncomment for Azure OpenAI:
# AZURE_OPENAI_ENDPOINT: ${{ secrets.AZURE_OPENAI_ENDPOINT }}
# AZURE_OPENAI_API_VERSION: ${{ secrets.AZURE_OPENAI_API_VERSION }}

# PR information
PR_NUMBER: ${{ needs.security-check.outputs.pr_number }}
PR_TITLE: ${{ needs.security-check.outputs.pr_title }}
PR_URL: ${{ needs.security-check.outputs.pr_url }}
MERGE_SHA: ${{ needs.security-check.outputs.merge_sha }}

# Repository configuration
SOURCE_REPO: ${{ github.repository }}
DOCS_REPO: microsoftdocs/semantic-kernel-pr

# Workflow settings
CONFIDENCE_THRESHOLD_HIGH: "0.7"
CONFIDENCE_THRESHOLD_LOW: "0.4"

# Path to docs-bot
DOCS_BOT_PATH: ${{ github.workspace }}/docs-bot
run: |
echo "::add-mask::$GITHUB_PERSONAL_ACCESS_TOKEN"
echo "::add-mask::$OPENAI_API_KEY"
uv run python $DOCS_BOT_PATH/run.py

- name: Summary
if: always()
run: |
echo "## Docs Update Summary" >> $GITHUB_STEP_SUMMARY
echo "- **PR Number:** ${{ needs.security-check.outputs.pr_number }}" >> $GITHUB_STEP_SUMMARY
echo "- **PR Title:** ${{ needs.security-check.outputs.pr_title }}" >> $GITHUB_STEP_SUMMARY
echo "- **Status:** ${{ job.status }}" >> $GITHUB_STEP_SUMMARY
30 changes: 23 additions & 7 deletions python/packages/core/agent_framework/_workflows/_executor.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import inspect
import logging
import types
import typing
from collections.abc import Awaitable, Callable
from typing import Any, TypeVar, overload

Expand Down Expand Up @@ -717,25 +718,40 @@ def _validate_handler_signature(
if len(params) != expected_counts:
raise ValueError(f"Handler {func.__name__} must have {param_description}. Got {len(params)} parameters.")

type_hints: dict[str, Any] = {}
try:
try:
type_hints = typing.get_type_hints(
Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

typing.get_type_hints() evaluates annotations and may execute code via forward refs/globalns; since handlers may be user-supplied, avoid evaluating unless necessary and/or provide a safe fallback (e.g., use raw annotations when resolution fails, or resolve only specific required parameters).

Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Automated status: round 1. Accepted for implementation in the next remediation pass.

func,
Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Correctness: eager get_type_hints() runs even when ctx/message annotations should be optional due to explicit @handler(input=..., output=...) types; unresolved ctx forward refs (or problematic return annotations) can raise before reaching the skip/bypass logic. Resolve hints conditionally/only for required params.

Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Automated status: round 1. Accepted for implementation in the next remediation pass.

globalns=getattr(func, "__globals__", None),
include_extras=True,
)
except TypeError:
type_hints = typing.get_type_hints(func, globalns=getattr(func, "__globals__", None))
except Exception as exc:
raise ValueError(f"Handler {func.__name__} has unresolved type annotations: {exc}") from exc
Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Behavior change: unresolved annotations now raise ValueError—add a regression test that intentionally hits this path (e.g., unresolved forward ref) so failures are explicit and stable.

Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Automated status: round 1. Accepted for implementation in the next remediation pass.


# Check message parameter has type annotation (unless skipped)
message_param = params[1]
if not skip_message_annotation and message_param.annotation == inspect.Parameter.empty:
message_ann = type_hints.get(message_param.name, message_param.annotation)
if not skip_message_annotation and message_ann == inspect.Parameter.empty:
raise ValueError(f"Handler {func.__name__} must have a type annotation for the message parameter")

# Validate ctx parameter is WorkflowContext and extract type args
ctx_param = params[2]
if skip_message_annotation and ctx_param.annotation == inspect.Parameter.empty:
ctx_ann = type_hints.get(ctx_param.name, ctx_param.annotation)
if skip_message_annotation and ctx_ann == inspect.Parameter.empty:
# When explicit types are provided via @handler(input=..., output=...),
# the ctx parameter doesn't need a type annotation - types come from the decorator.
output_types: list[type[Any] | types.UnionType] = []
workflow_output_types: list[type[Any] | types.UnionType] = []
output_types = []
workflow_output_types = []
else:
output_types, workflow_output_types = validate_workflow_context_annotation(
ctx_param.annotation, f"parameter '{ctx_param.name}'", "Handler"
ctx_ann, f"parameter '{ctx_param.name}'", "Handler"
)

message_type = message_param.annotation if message_param.annotation != inspect.Parameter.empty else None
ctx_annotation = ctx_param.annotation
message_type = message_ann if message_ann != inspect.Parameter.empty else None
ctx_annotation = ctx_ann

return message_type, ctx_annotation, output_types, workflow_output_types

Expand Down
39 changes: 39 additions & 0 deletions python/packages/core/tests/workflow/test_executor_future.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# Copyright (c) Microsoft. All rights reserved.

from __future__ import annotations

from dataclasses import dataclass

from agent_framework import Executor, WorkflowContext, handler


@dataclass
class FutureMessage:
value: str


@dataclass
class FutureOutput:
value: int


@dataclass
class FutureWorkflowOutput:
value: bool


class TestExecutorFutureAnnotations:
def test_executor_handler_future_annotations(self) -> None:
class FutureExecutor(Executor):
@handler
async def handle(
self, message: FutureMessage, ctx: WorkflowContext[FutureOutput, FutureWorkflowOutput]
) -> None:
pass

executor = FutureExecutor(id="future")
Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This asserts on private _handler_specs which is brittle and may not reflect real runtime behavior; prefer asserting via public registration/dispatch behavior and add negative coverage for unresolved forward refs and the explicit-types skip path.

Copy link
Copy Markdown
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Automated status: round 1. Accepted for implementation in the next remediation pass.

spec = executor._handler_specs[0]

assert spec["message_type"] is FutureMessage
assert spec["output_types"] == [FutureOutput]
assert spec["workflow_output_types"] == [FutureWorkflowOutput]