diff --git a/.github/scripts/algolia.py b/.github/scripts/algolia.py
deleted file mode 100644
index 5071ea58006..00000000000
--- a/.github/scripts/algolia.py
+++ /dev/null
@@ -1,118 +0,0 @@
-import os
-from re import S
-import sys
-import json
-from bs4 import BeautifulSoup
-from algoliasearch.search_client import SearchClient
-
-url = "docs.dapr.io"
-if len(sys.argv) > 1:
- starting_directory = os.path.join(os.getcwd(), str(sys.argv[1]))
-else:
- starting_directory = os.getcwd()
-
-ALGOLIA_APP_ID = os.getenv('ALGOLIA_APP_ID')
-ALGOLIA_API_KEY = os.getenv('ALGOLIA_API_WRITE_KEY')
-ALGOLIA_INDEX_NAME = os.getenv('ALGOLIA_INDEX_NAME')
-
-client = SearchClient.create(ALGOLIA_APP_ID, ALGOLIA_API_KEY)
-index = client.init_index(ALGOLIA_INDEX_NAME)
-
-excluded_files = [
- "404.html",
-]
-
-exluded_directories = [
- "zh-hans",
-]
-
-rankings = {
- "Getting started": 0,
- "Concepts": 100,
- "Developing applications": 200,
- "Operations": 300,
- "Reference": 400,
- "Contributing": 500,
- "Home": 600
-}
-
-def scan_directory(directory: str, pages: list):
- if os.path.basename(directory) in exluded_directories:
- print(f'Skipping directory: {directory}')
- return
- for file in os.listdir(directory):
- path = os.path.join(directory, file)
- if os.path.isfile(path):
- if file.endswith(".html") and file not in excluded_files:
- if '' not in open(path, encoding="utf8").read():
- print(f'Indexing: {path}')
- pages.append(path)
- else:
- print(f'Skipping hidden page: {path}')
- else:
- scan_directory(path, pages)
-
-def parse_file(path: str):
- data = {}
- data["hierarchy"] = {}
- data["rank"] = 999
- data["subrank"] = 99
- data["type"] = "lvl2"
- data["lvl0"] = ""
- data["lvl1"] = ""
- data["lvl2"] = ""
- data["lvl3"] = ""
- text = ""
- subrank = 0
- with open(path, "r", errors='ignore') as file:
- content = file.read()
- soup = BeautifulSoup(content, "html.parser")
- for meta in soup.find_all("meta"):
- if meta.get("name") == "description":
- data["lvl2"] = meta.get("content")
- data["hierarchy"]["lvl1"] = meta.get("content")
- elif meta.get("property") == "og:title":
- data["lvl0"] = meta.get("content")
- data["hierarchy"]["lvl0"] = meta.get("content")
- data["hierarchy"]["lvl2"] = meta.get("content")
- elif meta.get("property") == "og:url":
- data["url"] = meta.get("content")
- data["path"] = meta.get("content").split(url)[1]
- data["objectID"] = meta.get("content").split(url)[1]
- breadcrumbs = soup.find_all("li", class_="breadcrumb-item")
- try:
- subrank = len(breadcrumbs)
- data["subrank"] = subrank
- except:
- subrank = 99
- data["subrank"] = 99
- for bc in breadcrumbs:
- section = bc.text.strip()
- data["lvl1"] = section
- data["hierarchy"]["lvl0"] = section
- try:
- data["rank"] = rankings[section] + subrank
- except:
- print(f"Rank not found for section {section}")
- data["rank"] = 998
- break
- for p in soup.find_all("p"):
- if p.text != "":
- text = text + p.text
- data["text"] = text
- return data
-
-def index_payload(payload):
- res = index.replace_all_objects(payload)
- res.wait()
-
-
-if __name__ == "__main__":
- pages = []
- payload = []
- scan_directory(starting_directory, pages)
- for page in pages:
- data = parse_file(page)
- if "objectID" in data:
- payload.append(data)
- index_payload(payload)
diff --git a/.github/workflows/dapr-maintainer-merge.yml b/.github/workflows/dapr-maintainer-merge.yml
new file mode 100644
index 00000000000..e6cfe77d66e
--- /dev/null
+++ b/.github/workflows/dapr-maintainer-merge.yml
@@ -0,0 +1,196 @@
+
+name: Auto-approve & merge SDK docs PRs (per directory/team)
+
+# Run on PRs (including forks) but act with repo-level permissions.
+# We DO NOT check out PR code; we only read PR metadata via the API.
+on:
+ pull_request_target:
+ types: [opened, synchronize, reopened, ready_for_review, edited]
+
+# Token scopes needed: label, review, and merge.
+permissions:
+ contents: write # required to merge
+ pull-requests: write # required to approve/merge
+ issues: write # required to create/add labels
+
+jobs:
+ sdk-docs-automerge:
+ runs-on: ubuntu-latest
+ # Ignore drafts
+ if: ${{ github.event.pull_request.draft == false }}
+
+ steps:
+ - name: Evaluate PR for SDK docs eligibility & add label
+ id: check
+ uses: actions/github-script@v7
+ with:
+ # Optional: override org/merge method/colors via env
+ # env:
+ # ORG: dapr
+ # MERGE_METHOD: squash
+ # LABEL_COLOR_DEFAULT: '6A9286'
+ script: |
+ const { owner, repo } = context.repo;
+ const pr = context.payload.pull_request;
+ const number = pr.number;
+
+ // --- Mapping: directory prefixes -> team slug + label ---
+ // Each entry can have multiple prefixes for the same SDK if needed.
+ const MAPPINGS = [
+ { label: 'automerge: dotnet', teamSlug: 'maintainers-dotnet-sdk', prefixes: ['sdkdocs/dotnet/'] },
+ { label: 'automerge: go', teamSlug: 'maintainers-go-sdk', prefixes: ['sdkdocs/go/'] },
+ { label: 'automerge: java', teamSlug: 'maintainers-java-sdk', prefixes: ['sdkdocs/java/content/en/'] },
+ { label: 'automerge: js', teamSlug: 'maintainers-js-sdk', prefixes: ['sdkdocs/js/'] },
+ { label: 'automerge: php', teamSlug: 'maintainers-php-sdk', prefixes: ['sdkdocs/php/'] },
+ { label: 'automerge: python', teamSlug: 'maintainers-python-sdk', prefixes: ['sdkdocs/python/'] },
+ { label: 'automerge: rust', teamSlug: 'maintainers-rust-sdk', prefixes: ['sdkdocs/rust/content/en/'] },
+ ];
+
+ const org = owner;
+ const defaultLabelColor = '6A9286';
+ const username = pr.user.login;
+
+ // 1) List changed files
+ const files = await github.paginate(
+ github.rest.pulls.listFiles,
+ { owner, repo, pull_number: number, per_page: 100 }
+ );
+
+ if (files.length === 0) {
+ core.info('No files changed in PR; skipping.');
+ core.setOutput('eligible', 'false');
+ return;
+ }
+
+ // 2) Determine which single SDK mapping the PR targets
+ // - All files must match ONE mapping's prefixes
+ // - If files touch multiple mappings or outside any mapping, skip
+
+ let currentMapping = null; // holds the mapping object we've locked onto
+ let ineligible = false;
+
+ for (const f of files) {
+ const path = f.filename;
+
+ // find the first mapping whose prefixes match this file
+ let matched = null;
+ for (const m of MAPPINGS) {
+ if (m.prefixes.some(p => path.startsWith(p))) {
+ matched = m;
+ break;
+ }
+ }
+
+ // if no mapping matched, we can stop: not eligible
+ if (!matched) {
+ ineligible = true;
+ break;
+ }
+
+ // if we haven't locked onto a mapping yet, set it now
+ if (!currentMapping) {
+ currentMapping = matched;
+ } else if (currentMapping !== matched) {
+ // different SDK mapping from the one already selected => not eligible
+ ineligible = true;
+ break;
+ }
+ }
+
+ if (ineligible || !currentMapping) {
+ core.info('PR is not eligible: outside mapped paths or touches multiple SDK directories.');
+ core.setOutput('eligible', 'false');
+ return;
+ }
+
+ const mapping = currentMapping;
+ const labelName = mapping.label;
+ const teamSlug = mapping.teamSlug;
+ const lang = mapping.label.split(': ')[1] || 'sdk';
+
+ // 3) Verify author is active in the corresponding team
+ // teams.getMembershipForUserInOrg: GET /orgs/{org}/teams/{team_slug}/memberships/{username}
+ // Requires team visibility to the token. [3](https://docs.github.com/rest/teams/members)
+ try {
+ const membership = await github.rest.teams.getMembershipForUserInOrg({
+ org,
+ team_slug: teamSlug,
+ username
+ });
+ if (membership.data.state !== 'active') {
+ core.info(`User ${username} is not active in team ${teamSlug}.`);
+ core.setOutput('eligible', 'false');
+ return;
+ }
+ } catch (err) {
+ if (err.status === 404) {
+ core.info(`User ${username} is not a member of team ${teamSlug}.`);
+ core.setOutput('eligible', 'false');
+ return;
+ }
+ throw err;
+ }
+
+ // 4) Ensure label exists; then add it to the PR
+ try {
+ await github.rest.issues.getLabel({ owner, repo, name: labelName });
+ } catch (e) {
+ if (e.status === 404) {
+ await github.rest.issues.createLabel({
+ owner, repo, name: labelName, color: defaultLabelColor,
+ description: 'Auto-merged language-specific SDK docs'
+ });
+ } else {
+ throw e;
+ }
+ }
+ await github.rest.issues.addLabels({
+ owner, repo, issue_number: number, labels: [labelName]
+ });
+
+ // 5) Expose mapping for next step
+ core.setOutput('eligible', 'true');
+ core.setOutput('label', labelName);
+ core.setOutput('teamSlug', teamSlug);
+ core.setOutput('lang', lang);
+
+ - name: Auto-approve & merge (only if eligible)
+ if: steps.check.outputs.eligible == 'true'
+ uses: actions/github-script@v7
+ with:
+ script: |
+ const { owner, repo } = context.repo;
+ const number = context.payload.pull_request.number;
+ const lang = core.getInput('lang') || '${{ steps.check.outputs.lang }}';
+ const mergeMethod = process.env.MERGE_METHOD || 'squash';
+
+ // 6) Auto-approve review
+ try {
+ await github.rest.pulls.createReview({
+ owner, repo, pull_number: number,
+ event: 'APPROVE',
+ body: `Auto-approval: ${lang} SDK docs`
+ });
+ } catch (e) {
+ core.warning(`Failed to create review: ${e.message}`);
+ }
+
+ // 7) Poll until PR is mergeable (clean/unstable)
+ const wait = ms => new Promise(r => setTimeout(r, ms));
+ let attempt = 0;
+ while (attempt < 12) { // up to ~60s
+ attempt++;
+ const pr = await github.rest.pulls.get({ owner, repo, pull_number: number });
+ const state = pr.data.mergeable_state;
+ core.info(`mergeable=${pr.data.mergeable}, mergeable_state=${state}`);
+ if (pr.data.mergeable && (state === 'clean' || state === 'unstable')) break;
+ await wait(5000);
+ }
+
+ // 8) Merge the PR
+ await github.rest.pulls.merge({
+ owner, repo, pull_number: number,
+ merge_method: mergeMethod,
+ commit_title: `${lang}: ${context.payload.pull_request.title}`,
+ commit_message: `Auto-merged by SDK maintainer merge bot (${lang})`
+ });
diff --git a/.github/workflows/website-root.yml b/.github/workflows/website-root.yml
index dc437b1ca92..10c09bc6b11 100644
--- a/.github/workflows/website-root.yml
+++ b/.github/workflows/website-root.yml
@@ -4,11 +4,11 @@ on:
workflow_dispatch:
push:
branches:
- - v1.15
+ - v1.16
pull_request:
types: [opened, synchronize, reopened, closed]
branches:
- - v1.15
+ - v1.16
concurrency:
# Cancel the previously triggered build for only PR build.
@@ -50,23 +50,17 @@ jobs:
if [ $GITHUB_EVENT_NAME == 'pull_request' ]; then
STAGING_URL="https://${SWA_BASE}-${{github.event.number}}.westus2.azurestaticapps.net/"
fi
- hugo ${STAGING_URL+-b "$STAGING_URL"}
+ hugo ${STAGING_URL+-b "$STAGING_URL"} --minify
- name: Deploy docs site
uses: Azure/static-web-apps-deploy@v1
with:
azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN_PROUD_BAY_0E9E0E81E }}
repo_token: ${{ secrets.GITHUB_TOKEN }}
action: "upload"
- app_location: "/daprdocs/public/"
+ app_location: "/daprdocs/public"
output_location: "/"
skip_app_build: true
skip_deploy_on_missing_secrets: true
- - name: Upload Hugo artifacts
- uses: actions/upload-artifact@v4
- with:
- name: hugo_build
- path: ./daprdocs/public/
- if-no-files-found: error
close_staging_site:
if: github.event_name == 'pull_request' && github.event.action == 'closed'
@@ -80,29 +74,3 @@ jobs:
azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN_PROUD_BAY_0E9E0E81E }}
action: "close"
skip_deploy_on_missing_secrets: true
-
- algolia_index:
- name: Index site for Algolia
- if: github.event_name == 'push'
- needs: ['build_and_deploy_job']
- runs-on: ubuntu-latest
- env:
- ALGOLIA_APP_ID: ${{ secrets.ALGOLIA_APP_ID }}
- ALGOLIA_API_WRITE_KEY: ${{ secrets.ALGOLIA_API_WRITE_KEY }}
- ALGOLIA_INDEX_NAME: daprdocs
- steps:
- - name: Checkout docs repo
- uses: actions/checkout@v4
- with:
- submodules: false
- - name: Download Hugo artifacts
- uses: actions/download-artifact@v3
- with:
- name: hugo_build
- path: site/
- - name: Install Python packages
- run: |
- pip install --upgrade bs4
- pip install --upgrade 'algoliasearch>=2.0,<3.0'
- - name: Index site
- run: python ./.github/scripts/algolia.py ./site
diff --git a/.gitmodules b/.gitmodules
index 30857f4ef22..3b8c2987525 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,31 +1,4 @@
-[submodule "sdkdocs/python"]
- path = sdkdocs/python
- url = https://github.com/dapr/python-sdk.git
-[submodule "sdkdocs/php"]
- path = sdkdocs/php
- url = https://github.com/dapr/php-sdk.git
-[submodule "sdkdocs/dotnet"]
- path = sdkdocs/dotnet
- url = https://github.com/dapr/dotnet-sdk.git
[submodule "translations/docs-zh"]
path = translations/docs-zh
url = https://github.com/dapr/docs-zh.git
- branch = v1.0_content
-[submodule "sdkdocs/go"]
- path = sdkdocs/go
- url = https://github.com/dapr/go-sdk.git
-[submodule "sdkdocs/java"]
- path = sdkdocs/java
- url = https://github.com/dapr/java-sdk.git
-[submodule "sdkdocs/js"]
- path = sdkdocs/js
- url = https://github.com/dapr/js-sdk.git
-[submodule "sdkdocs/pluggable-components/dotnet"]
- path = sdkdocs/pluggable-components/dotnet
- url = https://github.com/dapr-sandbox/components-dotnet-sdk
-[submodule "sdkdocs/pluggable-components/go"]
- path = sdkdocs/pluggable-components/go
- url = https://github.com/dapr-sandbox/components-go-sdk
-[submodule "sdkdocs/rust"]
- path = sdkdocs/rust
- url = https://github.com/dapr/rust-sdk.git
+ branch = v1.0_content
\ No newline at end of file
diff --git a/README.md b/README.md
index 3af7bb0833e..4fa213b7709 100644
--- a/README.md
+++ b/README.md
@@ -16,8 +16,8 @@ The following branches are currently maintained:
| Branch | Website | Description |
| ------------------------------------------------------------ | -------------------------- | ------------------------------------------------------------------------------------------------ |
-| [v1.15](https://github.com/dapr/docs) (primary) | https://docs.dapr.io | Latest Dapr release documentation. Typo fixes, clarifications, and most documentation goes here. |
-| [v1.16](https://github.com/dapr/docs/tree/v1.16) (pre-release) | https://v1-16.docs.dapr.io/ | Pre-release documentation. Doc updates that are only applicable to v1.15+ go here. |
+| [v1.16](https://github.com/dapr/docs) (primary) | https://docs.dapr.io | Latest Dapr release documentation. Typo fixes, clarifications, and most documentation goes here. |
+| [v1.17](https://github.com/dapr/docs/tree/v1.16) (pre-release) | https://v1-17.docs.dapr.io/ | Pre-release documentation. Doc updates that are only applicable to v1.16+ go here. |
For more information visit the [Dapr branch structure](https://docs.dapr.io/contributing/docs-contrib/contributing-docs/#branch-guidance) document.
@@ -68,22 +68,17 @@ Continue with the [Run local server](#run-local-server) steps.
1. Ensure pre-requisites are installed.
1. [Fork](https://github.com/dapr/docs/fork) and clone this repository.
-1. Change to daprdocs directory:
+1. Make sure you are in the root folder for the docs repo. You should find a hugo.yaml file in this directory.
-```sh
-cd ./daprdocs
-```
-
-4. Update submodules:
+1. Update submodules:
```sh
git submodule update --init --recursive
```
-1. Navigate back to the repository root and install npm packages:
+1. Install the npm packages:
```sh
-cd ..
npm install
```
diff --git a/daprdocs/assets/scss/_content.scss b/daprdocs/assets/scss/_content.scss
index 8b40a659f07..2f4d0b7b402 100644
--- a/daprdocs/assets/scss/_content.scss
+++ b/daprdocs/assets/scss/_content.scss
@@ -153,6 +153,10 @@
color: $secondary;
}
+.card-title > img {
+ margin-right: 10px;
+}
+
.td-page-meta {
a, a:visited {
diff --git a/daprdocs/content/en/_index.md b/daprdocs/content/en/_index.md
index f17e63ec71f..54ab8f9d4d1 100644
--- a/daprdocs/content/en/_index.md
+++ b/daprdocs/content/en/_index.md
@@ -4,17 +4,134 @@ no_list: true
---
#
-Welcome to the Dapr documentation site!
+## Dapr, the distributed application runtime
+Dapr provides APIs for communication, state, workflow, and agentic AI. The APIs decouple the application code from the underlying infrastructure ensuring flexibility and portability. Dapr leverages industry best practices for security, resiliency, and observability, so you can focus on your code.
-{{% alert title="What is Dapr?" color="primary" %}}
-Dapr is a portable, event-driven runtime that makes it easy for any developer to build resilient,
-stateless and stateful applications that run on the cloud and edge and embraces the diversity of
-languages and developer frameworks. Leveraging the benefits of a sidecar architecture, Dapr helps
-you tackle the challenges that come with building microservices and keeps your code platform agnostic.
{{< button text="Get started" page="getting-started.md" >}}
-{{% /alert %}}
+### Use Cases
+
+
+
+
+
+
+ Workflow Orchestration
+
Orchestrate your microservices reliably with Dapr workflow.
+
+
+
+
+
+
+
+ Agentic AI
+
Create durable agentic AI applications with Dapr Agents.
+
+
+
+
+
+
+
+
+
+ Microservices
+
Build resilient microservices using the Dapr service invocation API.
+
+
+
+
+
+
+
+ Event Driven Architecture
+
Create event-driven applications with the Dapr pub/sub API.
+
+
+
+
+
+### Languages
+
+
+
+
+
+
+ .NET
+
+
+ Learn more about the .NET SDK.
+
+
+
+
+
+
+
+
+ Python
+
+
+ Learn more about the Python SDK.
+
+
+
+
+
+
+
+
+ JavaScript
+
+
+ Learn more about the JavaScript SDK.
+
+
+
+
+
+
+
+
+
+
+
+ Java
+
+
+ Learn more about the Java SDK.
+
+
+
+
+
+
+
+
+ Go
+
+
+ Learn more about the Go SDK.
+
+
+
+
+
+
+
+
+ PHP
+
+
+ Learn more about the PHP SDK.
+
+
+
+
+
### Start developing with Dapr
@@ -113,9 +230,8 @@ you tackle the challenges that come with building microservices and keeps your c
-
-
+
@@ -128,81 +244,17 @@ you tackle the challenges that come with building microservices and keeps your c
-
-
- .NET
+
+ Dapr University
- Learn more about the .NET SDK.
+ Learn Dapr through a series of free hands-on courses in a cloud-based sandbox environment.
-
-
-
-
-
-
-
- Python
-
-
- Learn more about the Python SDK.
-
-
-
-
-
-
-
-
- JavaScript
-
-
- Learn more about the JavaScript SDK.
-
-
+
-
-
-
-
-
-
- Java
-
-
- Learn more about the Java SDK.
-
-
-
-
-
-
-
-
- Go
-
-
- Learn more about the Go SDK.
-
-
-
-
-
-
-
-
- PHP
-
-
- Learn more about the PHP SDK.
-
-
-
-
-
\ No newline at end of file
diff --git a/daprdocs/content/en/concepts/building-blocks-concept.md b/daprdocs/content/en/concepts/building-blocks-concept.md
index 23c444f0c93..08d94f5ccde 100644
--- a/daprdocs/content/en/concepts/building-blocks-concept.md
+++ b/daprdocs/content/en/concepts/building-blocks-concept.md
@@ -31,4 +31,4 @@ Dapr provides the following building blocks:
| [**Distributed lock**]({{% ref "distributed-lock-api-overview" %}}) | `/v1.0-alpha1/lock` | The distributed lock API enables you to take a lock on a resource so that multiple instances of an application can access the resource without conflicts and provide consistency guarantees.
| [**Cryptography**]({{% ref "cryptography-overview" %}}) | `/v1.0-alpha1/crypto` | The Cryptography API enables you to perform cryptographic operations, such as encrypting and decrypting messages, without exposing keys to your application.
| [**Jobs**]({{% ref "jobs-overview" %}}) | `/v1.0-alpha1/jobs` | The Jobs API enables you to schedule and orchestrate jobs. Example scenarios include:
Schedule batch processing jobs to run every business day
Schedule various maintenance scripts to perform clean-ups
Schedule ETL jobs to run at specific times (hourly, daily) to fetch new data, process it, and update the data warehouse with the latest information.
-| [**Conversation**]({{% ref "conversation-overview" %}}) | `/v1.0-alpha1/conversation` | The Conversation API enables you to supply prompts to converse with different large language models (LLMs) and includes features such as prompt caching and personally identifiable information (PII) obfuscation.
\ No newline at end of file
+| [**Conversation**]({{% ref "conversation-overview" %}}) | `/v1.0-alpha2/conversation` | The Conversation API enables you to supply prompts to converse with different large language models (LLMs) and includes features such as prompt caching and personally identifiable information (PII) obfuscation.
\ No newline at end of file
diff --git a/daprdocs/content/en/concepts/dapr-services/scheduler.md b/daprdocs/content/en/concepts/dapr-services/scheduler.md
index a143ee08b97..3a561862457 100644
--- a/daprdocs/content/en/concepts/dapr-services/scheduler.md
+++ b/daprdocs/content/en/concepts/dapr-services/scheduler.md
@@ -85,7 +85,7 @@ When running in HA mode, you only need to expose the ports for one scheduler ins
version: "3.5"
services:
scheduler-0:
- image: "docker.io/daprio/scheduler:1.16.0"
+ image: "docker.io/daprio/scheduler:{{% dapr-latest-version %}}"
command:
- "./scheduler"
- "--etcd-data-dir=/var/run/dapr/scheduler"
@@ -96,7 +96,7 @@ services:
volumes:
- ./dapr_scheduler/0:/var/run/dapr/scheduler
scheduler-1:
- image: "docker.io/daprio/scheduler:1.16.0"
+ image: "docker.io/daprio/scheduler:{{% dapr-latest-version %}}"
command:
- "./scheduler"
- "--etcd-data-dir=/var/run/dapr/scheduler"
@@ -105,7 +105,7 @@ services:
volumes:
- ./dapr_scheduler/1:/var/run/dapr/scheduler
scheduler-2:
- image: "docker.io/daprio/scheduler:1.16.0"
+ image: "docker.io/daprio/scheduler:{{% dapr-latest-version %}}"
command:
- "./scheduler"
- "--etcd-data-dir=/var/run/dapr/scheduler"
@@ -115,25 +115,93 @@ services:
- ./dapr_scheduler/2:/var/run/dapr/scheduler
```
-## Back Up and Restore Scheduler Data
+## Managing jobs with the Dapr CLI
-In production environments, it's recommended to perform periodic backups of this data at an interval that aligns with your recovery point objectives.
+Dapr provides a CLI for inspecting and managing all scheduled jobs, regardless of type.
+The CLI is the recommended way to view, back up, and delete jobs.
-### Port Forward for Backup Operations
+There are several different types of jobs which Scheduler manages:
-To perform backup and restore operations, you'll need to access the embedded etcd instance. This requires port forwarding to expose the etcd ports (port 2379).
+- `app/{app-id}/{job-name}`: Jobs created via the [Jobs API]({{% ref jobs_api %}})
+- `actor/{actor-type}/{actor-id}/{reminder-name}`: Actor reminder jobs created via the [Actor Reminders API]({{% ref "actors-timers-reminders#actor-reminders" %}})
+- `activity/{app-id}/{instance-id}::{generation-name}::{activity-index}`: Used internally for [Workflow Activity reminders]({{% ref "workflow-features-concepts.md#workflow-activities" %}})
+- `workflow/{app-id}/{instance-id}/{random-name}`: Used internally for [Workflows]({{% ref "workflow-overview.md" %}}).
-#### Kubernetes Example
+Please see [here for how to manage specifically reminders]({{% ref "actors-timers-reminders#managing-reminders-with-the-cli" %}}) with the CLI.
-Here's how to port forward and connect to the etcd instance:
+### List jobs
-```shell
-kubectl port-forward svc/dapr-scheduler-server 2379:2379 -n dapr-system
+```bash
+dapr scheduler list
+```
+
+Example output:
+
+```bash
+NAME BEGIN COUNT LAST TRIGGER
+actor/myactortype/actorid1/test1 -3.89s 1 2025-10-03T16:58:55Z
+actor/myactortype/actorid2/test2 -3.89s 1 2025-10-03T16:58:55Z
+app/test-scheduler/test1 -3.89s 1 2025-10-03T16:58:55Z
+app/test-scheduler/test2 -3.89s 1 2025-10-03T16:58:55Z
+activity/test-scheduler/xyz1::0::1 -888.8ms 0
+activity/test-scheduler/xyz2::0::1 -888.8ms 0
+workflow/test-scheduler/abc1/timer-0-TVIQGkvu +50.0h 0
+workflow/test-scheduler/abc2/timer-0-OM2xqG9m +50.0h 0
+```
+
+For more detail, use the wide output format:
+
+```bash
+dapr scheduler list -o wide
+```
+
+```yaml
+NAMESPACE NAME BEGIN EXPIRATION SCHEDULE DUE TIME TTL REPEATS COUNT LAST TRIGGER
+default actor/myactortype/actorid1/test1 2025-10-03T16:58:55Z @every 2h46m40s 2025-10-03T17:58:55+01:00 100 1 2025-10-03T16:58:55Z
+default actor/myactortype/actorid2/test2 2025-10-03T16:58:55Z @every 2h46m40s 2025-10-03T17:58:55+01:00 100 1 2025-10-03T16:58:55Z
+default app/test-scheduler/test1 2025-10-03T16:58:55Z @every 100m 2025-10-03T17:58:55+01:00 1234 1 2025-10-03T16:58:55Z
+default app/test-scheduler/test2 2025-10-03T16:58:55Z 2025-10-03T19:45:35Z @every 100m 2025-10-03T17:58:55+01:00 10000s 56788 1 2025-10-03T16:58:55Z
+default activity/test-scheduler/xyz1::0::1 2025-10-03T16:58:58Z 0s 0
+default activity/test-scheduler/xyz2::0::1 2025-10-03T16:58:58Z 0s 0
+default workflow/test-scheduler/abc1/timer-0-TVIQGkvu 2025-10-05T18:58:58Z 2025-10-05T18:58:58Z 0
+default workflow/test-scheduler/abc2/timer-0-OM2xqG9m 2025-10-05T18:58:58Z 2025-10-05T18:58:58Z 0
+```
+
+### Get job details
+
+```bash
+dapr scheduler get app/my-app/job1 -o yaml
```
-### Performing Backup and Restore
+### Delete jobs
-Once you have access to the etcd ports, you can follow the [official etcd backup and restore documentation](https://etcd.io/docs/v3.5/op-guide/recovery/) to perform backup and restore operations. The process involves using standard etcd commands to create snapshots and restore from them.
+Delete one or more specific jobs:
+
+```bash
+dapr scheduler delete app/my-app/job1 actor/MyActor/123/reminder1
+```
+
+Bulk delete jobs with filters:
+
+```bash
+dapr scheduler delete-all all
+dapr scheduler delete-all app/my-app
+dapr scheduler delete-all actor/MyActorType
+```
+
+### Backup and restore jobs
+
+Export all jobs to a file:
+
+```bash
+dapr scheduler export -o backup.bin
+```
+
+Re-import jobs from a backup file:
+
+```bash
+dapr scheduler import -f backup.bin
+```
## Monitoring Scheduler's etcd Metrics
@@ -155,7 +223,7 @@ For more information on running Dapr on Kubernetes, visit the [Kubernetes hostin
A number of Etcd flags are exposed on Scheduler which can be used to tune for your deployment use case.
-### External Etcd database
+### External Etcd database
Scheduler can be configured to use an external Etcd database instead of the embedded one inside the Scheduler service replicas.
It may be interesting to decouple the storage volume from the Scheduler StatefulSet or container, because of how the cluster or environment is administered or what storage backend is being used.
@@ -230,4 +298,5 @@ dapr_scheduler.etcdMaxSnapshots=10
## Related links
-[Learn more about the Jobs API.]({{% ref jobs_api %}})
+- [Learn more about the Jobs API.]({{% ref jobs_api %}})
+- [Learn more about Actor Reminders.]{{% ref "actors-features-concepts#reminders" %}})
diff --git a/daprdocs/content/en/concepts/overview.md b/daprdocs/content/en/concepts/overview.md
index 273ab87e698..125b040c421 100644
--- a/daprdocs/content/en/concepts/overview.md
+++ b/daprdocs/content/en/concepts/overview.md
@@ -141,6 +141,14 @@ Dapr can be used from any developer framework. Here are some that have been inte
| [JavaScript](https://github.com/dapr/js-sdk) | [Express](https://expressjs.com/) | Build Express applications with Dapr APIs
| [PHP]({{% ref php %}}) | | You can serve with Apache, Nginx, or Caddyserver.
+#### Dapr Agents
+
+
+
+
+[Dapr Agents]({{% ref "../developing-ai/dapr-agents" %}}) is a Python framework for building intelligent, durable agents powered by LLMs. It provides agent-centric capabilities such as tool calling, memory management, [MCP support](https://modelcontextprotocol.io/) and agent orchestration, while leveraging Dapr for durability, observability, and security, at scale.
+
+
#### Integrations and extensions
Visit the [integrations]({{% ref integrations %}}) page to learn about some of the first-class support Dapr has for various frameworks and external products, including:
diff --git a/daprdocs/content/en/concepts/security-concept.md b/daprdocs/content/en/concepts/security-concept.md
index fa284cde420..2008c38a039 100644
--- a/daprdocs/content/en/concepts/security-concept.md
+++ b/daprdocs/content/en/concepts/security-concept.md
@@ -244,7 +244,7 @@ The audit was a holistic security audit with the following goals:
- Formalize a threat model of Dapr
- Perform manual code review
-- Evaluate Daprs fuzzing suite against the formalized threat model
+- Evaluate Dapr's fuzzing suite against the formalized threat model
- Carry out a SLSA review of Dapr.
You can find the full report [here](/docs/Dapr-september-2023-security-audit-report.pdf).
diff --git a/daprdocs/content/en/concepts/terminology.md b/daprdocs/content/en/concepts/terminology.md
index 1c74bf22366..46a4dffe2d6 100644
--- a/daprdocs/content/en/concepts/terminology.md
+++ b/daprdocs/content/en/concepts/terminology.md
@@ -10,15 +10,17 @@ This page details all of the common terms you may come across in the Dapr docs.
| Term | Definition | More information |
|:-----|------------|------------------|
-| App/Application | A running service/binary, usually one that you as the user create and run.
+| App/Application | A running service/binary, usually one that you as the user create and run.
| Building block | An API that Dapr provides to users to help in the creation of microservices and applications. | [Dapr building blocks]({{% ref building-blocks-concept %}})
| Component | Modular types of functionality that are used either individually or with a collection of other components, by a Dapr building block. | [Dapr components]({{% ref components-concept %}})
| Configuration | A YAML file declaring all of the settings for Dapr sidecars or the Dapr control plane. This is where you can configure control plane mTLS settings, or the tracing and middleware settings for an application instance. | [Dapr configuration]({{% ref configuration-concept %}})
| Dapr | Distributed Application Runtime. | [Dapr overview]({{% ref overview %}})
+| Dapr Actors | A Dapr building block that implements the virtual actor pattern for building stateful, single-threaded objects with identity, lifecycle, and concurrency management. | [Actors overview]({{% ref actors-overview %}})
+| Dapr Agents | A developer framework built on top of Dapr Python SDK for creating durable agentic applications powered by LLMs. | [Dapr Agents]({{% ref "../developing-ai/dapr-agents" %}})
| Dapr control plane | A collection of services that are part of a Dapr installation on a hosting platform such as a Kubernetes cluster. This allows Dapr-enabled applications to run on the platform and handles Dapr capabilities such as actor placement, Dapr sidecar injection, or certificate issuance/rollover. | [Self-hosted overview]({{% ref self-hosted-overview %}}) [Kubernetes overview]({{% ref kubernetes-overview %}})
+| Dapr Workflows | A Dapr building block for authoring code-first workflows with durable execution that survive crashes, support long-running processes, and enable human-in-the-loop interactions. | [Workflow overview]({{% ref workflow-overview %}})
| HTTPEndpoint | HTTPEndpoint is a Dapr resource use to identify non-Dapr endpoints to invoke via the service invocation API. | [Service invocation API]({{% ref service_invocation_api %}})
| Namespacing | Namespacing in Dapr provides isolation, and thus provides multi-tenancy. | Learn more about namespacing [components]({{% ref component-scopes %}}), [service invocation]({{% ref service-invocation-namespaces %}}), [pub/sub]({{% ref pubsub-namespaces %}}), and [actors]({{% ref namespaced-actors %}})
| Self-hosted | Windows/macOS/Linux machine(s) where you can run your applications with Dapr. Dapr provides the capability to run on machines in "self-hosted" mode. | [Self-hosted mode]({{% ref self-hosted-overview %}})
-| Service | A running application or binary. This can refer to your application or to a Dapr application.
-| Sidecar | A program that runs alongside your application as a separate process or container. | [Sidecar pattern](https://docs.microsoft.com/azure/architecture/patterns/sidecar)
-
+| Service | A running application or binary. This can refer to your application or to a Dapr application.
+| Sidecar | A program that runs alongside your application as a separate process or container. | [Sidecar pattern](https://docs.microsoft.com/azure/architecture/patterns/sidecar)
diff --git a/daprdocs/content/en/contributing/docs-contrib/maintainer-guide.md b/daprdocs/content/en/contributing/docs-contrib/maintainer-guide.md
index 1ebdfb88172..7c72665b2f0 100644
--- a/daprdocs/content/en/contributing/docs-contrib/maintainer-guide.md
+++ b/daprdocs/content/en/contributing/docs-contrib/maintainer-guide.md
@@ -103,24 +103,22 @@ These steps will prepare the latest release branch for archival.
git checkout -b release_v1.0
```
-1. In VS Code, navigate to `/daprdocs/config.toml`.
-1. Add the following TOML to the `# Versioning` section (around line 154):
-
- ```toml
- version_menu = "v1.0"
- version = "v1.0"
- archived_version = true
- url_latest_version = "https://docs.dapr.io"
-
- [[params.versions]]
- version = "v1.2 (preview)"
- url = "v1-2.docs.dapr.io"
- [[params.versions]]
- version = "v1.1 (latest)"
- url = "#"
- [[params.versions]]
- version = "v1.0"
- url = "https://v1-0.docs.dapr.io"
+1. In VS Code, navigate to `hugo.yaml` located in the root.
+1. Add the following configuration to the `# Versioning` section (around line 121 and onwards):
+
+ ```yaml
+ version_menu: "v1.0"
+ version: "v1.0"
+ archived_version: true
+ url_latest_version: "https://docs.dapr.io"
+
+ versions:
+ - version: v1.2 (preview)
+ url: https://v1-2.docs.dapr.io
+ - version: v1.1 (latest)
+ url: "#"
+ - version: v1.0
+ url: https://v1-0.docs.dapr.io
```
1. Delete `.github/workflows/website-root.yml`.
@@ -146,26 +144,25 @@ These steps will prepare the upcoming release branch for promotion to latest rel
git checkout -b release_v1.1
```
-1. In VS Code, navigate to `/daprdocs/config.toml`.
-1. Update line 1 to `baseURL - https://docs.dapr.io/`.
-1. Update the `# Versioning` section (around line 154) to display the correct versions and tags:
+1. In VS Code, navigate to `hugo.yaml` located in the root.
+1. Update line 1 to `baseURL: https://docs.dapr.io/`.
+1. Update the `# Versioning` section (around line 121 and onwards) to display the correct versions and tags:
- ```toml
+ ```yaml
# Versioning
- version_menu = "v1.1 (latest)"
- version = "v1.1"
- archived_version = false
- url_latest_version = "https://docs.dapr.io"
-
- [[params.versions]]
- version = "v1.2 (preview)"
- url = "v1-2.docs.dapr.io"
- [[params.versions]]
- version = "v1.1 (latest)"
- url = "#"
- [[params.versions]]
- version = "v1.0"
- url = "https://v1-0.docs.dapr.io"
+ version_menu: "v1.1 (latest)"
+ version: "v1.1"
+ archived_version: false
+ url_latest_version: https://docs.dapr.io
+ github_branch: v1.1
+
+ versions:
+ - version: v1.2 (preview)
+ url: https://v1-2.docs.dapr.io
+ - version: v1.1 (latest)
+ url: "#"
+ - version: v1.0
+ url: https://v1-0.docs.dapr.io
```
1. Navigate to `.github/workflows/website-root.yml`.
@@ -194,6 +191,7 @@ These steps will prepare the upcoming release branch for promotion to latest rel
| [v1.2](https://github.com/dapr/docs/tree/v1.2) (pre-release) | https://v1-2.docs.dapr.io/ | Pre-release documentation. Doc updates that are only applicable to v1.2+ go here. |
```
+1. Update the _Supported versions_ table in `support-release-policy.md`; add a new line at the top of the table with the new version of the runtime and SDKs. Change the releases which are older than n-2 to be `Unsupported`.
1. Update the `dapr-latest-version.html` shortcode partial to the new minor/patch version (in this example, `1.1.0` and `1.1`).
1. Commit the staged changes and push to your branch (`release_v1.1`).
1. Open a PR from `release/v1.1` to `v1.1`.
diff --git a/daprdocs/content/en/contributing/presentations.md b/daprdocs/content/en/contributing/presentations.md
index 2f2b7147b2d..0f50ee96d3c 100644
--- a/daprdocs/content/en/contributing/presentations.md
+++ b/daprdocs/content/en/contributing/presentations.md
@@ -6,11 +6,11 @@ weight: 20
description: How to give a presentation on Dapr and examples
---
-We encourage community members to give presentations on Dapr. To get you started quickly, we offer two PowerPoint files:
+We encourage community members to give presentations on Dapr. To get you started quickly, we offer three PowerPoint files:
-- *dapr-slidedeck.pptx*, this is a 150+ page slide deck and contains; an overview of Dapr, all of its building block APIs, cross-cutting concerns, hosting options, and assets to create your own architecture diagrams.
-- *dapr-workflow-slidedeck.pptx*, this is a dedicated slide deck about Dapr Workflow and contains; durable execution concept, workflow authoring, workflow patterns, workflow management, and challenges & tips.
-- *dapr-agents-slidedeck.pptx*, this is a dedicated slide deck about Dapr Agents and contains; AI agents explanation, Dapr Agent types, multi-agent systems, and agentic patterns.
+- *dapr-slidedeck.pptx*, this is a 150+ page slide deck and contains: an overview of Dapr, all of its building block APIs, cross-cutting concerns, hosting options, and assets to create your own architecture diagrams.
+- *dapr-workflow-slidedeck.pptx*, this is a dedicated slide deck about Dapr Workflow and contains: durable execution concept, workflow authoring, workflow patterns, workflow management, and challenges & tips.
+- *dapr-agents-slidedeck.pptx*, this is a dedicated slide deck about Dapr Agents and contains: AI agents explanation, Dapr Agent types, multi-agent systems, and agentic patterns.
There is a downloadable zip file that contains all slide decks.
@@ -25,7 +25,7 @@ brew install --cask font-space-grotesk
## Giving a Dapr presentation
-- Begin by downloading the [Dapr Presentation Decks](/presentations/dapr-slidedecks.zip). These contain slides, diagrams, and graphical assets needed to give a Dapr presentation.
+- Begin by downloading the [Dapr Presentation Decks](/presentations/dapr-slidedecks.zip). These contain slides, diagrams, and graphical assets.
- Next, review the docs to make sure you understand the [concepts]({{% ref concepts %}}).
- Use the Dapr [quickstarts](https://github.com/dapr/quickstarts) repo to show demos of how to use Dapr.
- Once you've done a Dapr presentation, claim the *Dapr Presenter* badge by adding your presentation to [this table](https://github.com/dapr/community/tree/master/presentations) in the Dapr Community repository.
diff --git a/daprdocs/content/en/developing-ai/_index.md b/daprdocs/content/en/developing-ai/_index.md
new file mode 100644
index 00000000000..a0486084ba7
--- /dev/null
+++ b/daprdocs/content/en/developing-ai/_index.md
@@ -0,0 +1,7 @@
+---
+type: docs
+title: "Developing AI with Dapr"
+linkTitle: "Developing AI"
+description: "Information on how to build reliable and secure agentic AI systems with Dapr"
+weight: 31
+---
diff --git a/daprdocs/content/en/developing-ai/agent-integrations/_index.md b/daprdocs/content/en/developing-ai/agent-integrations/_index.md
new file mode 100644
index 00000000000..7f578a4d80d
--- /dev/null
+++ b/daprdocs/content/en/developing-ai/agent-integrations/_index.md
@@ -0,0 +1,22 @@
+---
+type: docs
+title: "Agent Integrations"
+linkTitle: "Agent Integrations"
+weight: 25
+description: "Information on how to integrate agentic frameworks with Dapr runtime"
+---
+
+### What are agent integrations in Dapr?
+
+Dapr augments and enhances other agentic frameworks by providing them with key critical features for running in production:
+
+* Durable execution using [Dapr Workflows]({{% ref workflow-overview %}}) for resilient and long-running AI tasks
+* Portable agent context & memory using Dapr's [State Management API]({{% ref "state-management-overview" %}})
+* Reliable and secure agent-to-agent communication using [Dapr Pub/Sub]({{% ref "pubsub-overview" %}}) and [Service Invocation
+]({{% ref service-invocation-overview %}})
+* Secure agent [identity]({{< ref "concepts/security-concept" >}}#application-identity)
+
+{{< button text="Install Dapr" page="getting-started.md" >}}
+
+With Dapr, developers writing AI systems using the framework of their choice enjoy accelerated development via the Dapr APIs and gain confidence taking agentic systems into production.
+
\ No newline at end of file
diff --git a/daprdocs/content/en/developing-ai/agent-integrations/crewai/_index.md b/daprdocs/content/en/developing-ai/agent-integrations/crewai/_index.md
new file mode 100644
index 00000000000..7b28fd6ae56
--- /dev/null
+++ b/daprdocs/content/en/developing-ai/agent-integrations/crewai/_index.md
@@ -0,0 +1,11 @@
+---
+type: docs
+title: "CrewAI"
+linkTitle: "CrewAI"
+weight: 25
+description: "Dapr first-class integrations with CrewAI Agents"
+---
+
+### What is the Dapr CrewAI integration?
+
+Dapr provides CrewAI agents first class integrations that range from agent session management to connecting agents via pub/sub and orchestrating agentic workflows.
diff --git a/daprdocs/content/en/developing-ai/agent-integrations/crewai/crewai-workflows.md b/daprdocs/content/en/developing-ai/agent-integrations/crewai/crewai-workflows.md
new file mode 100644
index 00000000000..e6ea8a79f9a
--- /dev/null
+++ b/daprdocs/content/en/developing-ai/agent-integrations/crewai/crewai-workflows.md
@@ -0,0 +1,212 @@
+---
+type: docs
+title: "CrewAI Workflows"
+linkTitle: "CrewAI Workflows"
+weight: 25
+description: "How to run CrewAI agents with durable, fault-tolerant execution using Dapr Workflows"
+---
+
+## Overview
+
+Dapr Workflows make it possible to run CrewAI agents **reliably**, **durably**, and **with built-in resiliency**.
+By orchestrating CrewAI tasks with the Dapr Workflow engine, developers can:
+
+- Ensure long-running CrewAI work survives crashes and restarts.
+- Get automatic checkpoints, retries, and state recovery.
+- Run each CrewAI task as a durable activity.
+- Observe execution through tracing, metrics, and structured logs.
+
+This guide walks through orchestrating multiple CrewAI tasks using Dapr Workflows, ensuring each step is run *exactly once* even if the process restarts.
+
+## Getting Started
+
+Initialize Dapr locally to set up a self-hosted environment for development. This process installs the Dapr sidecar binaries, provisions the workflow engine, and prepares a default components directory. For full details, see [guide on initializing Dapr locally]({{% ref install-dapr-selfhost.md %}}).
+
+Initialize Dapr:
+
+```bash
+dapr init
+```
+
+Verify that daprio/dapr, openzipkin/zipkin, and redis are running:
+
+```bash
+docker ps
+```
+
+### Install Python
+
+{{% alert title="Note" color="info" %}}
+Make sure you have Python already installed. `Python >=3.10`. For installation instructions, visit the official [Python installation guide](https://www.python.org/downloads/).
+{{% /alert %}}
+
+### Create a Python Virtual Environment (recommended)
+
+```bash
+python -m venv .venv
+source .venv/bin/activate # Windows: .venv\Scripts\activate
+```
+
+### Install Dependencies
+
+```bash
+pip install dapr dapr-ext-workflow crewai
+```
+
+### Create a Workflow to Run CrewAI Tasks
+
+Create a file named crewai_workflow.py and paste the following:
+
+```python
+from dapr.ext.workflow import (
+ WorkflowRuntime,
+ DaprWorkflowContext,
+ WorkflowActivityContext,
+ DaprWorkflowClient,
+)
+from crewai import Agent, Task, Crew
+import time
+
+wfr = WorkflowRuntime()
+
+# ------------------------------------------------------------
+# 1. Define Agent, Tasks, and Task Dictionary
+# ------------------------------------------------------------
+agent = Agent(
+ role="Research Analyst",
+ goal="Research and summarize impactful technology updates.",
+ backstory="A skilled analyst who specializes in researching and summarizing technology topics.",
+)
+
+tasks = {
+ "latest_ai_news": Task(
+ description="Find the latest news about artificial intelligence.",
+ expected_output="A 3-paragraph summary of the top 3 stories.",
+ agent=agent,
+ ),
+ "ai_startup_launches": Task(
+ description="Summarize the most impactful AI startup launches in the last 6 months.",
+ expected_output="A list summarizing 2 AI startups with links.",
+ agent=agent,
+ ),
+ "ai_policy_updates": Task(
+ description="Summarize the newest AI government policy and regulation updates.",
+ expected_output="A bullet-point list summarizing the latest policy changes.",
+ agent=agent,
+ ),
+}
+
+# ------------------------------------------------------------
+# 2. Activity — runs ONE task by name
+# ------------------------------------------------------------
+@wfr.activity(name="run_task")
+def run_task_activity(ctx: WorkflowActivityContext, task_name: str):
+ print(f"Running CrewAI task: {task_name}", flush=True)
+
+ task = tasks[task_name]
+
+ # Create a Crew for just this one task
+ temp_crew = Crew(agents=[agent], tasks=[task])
+
+ # kickoff() works across CrewAI versions
+ result = temp_crew.kickoff()
+
+ return str(result)
+
+# ------------------------------------------------------------
+# 3. Workflow — orchestrates tasks durably
+# ------------------------------------------------------------
+@wfr.workflow(name="crewai_multi_task_workflow")
+def crewai_workflow(ctx: DaprWorkflowContext):
+ print("Starting multi-task CrewAI workflow", flush=True)
+
+ latest_news = yield ctx.call_activity(run_task_activity, input="latest_ai_news")
+ startup_summary = yield ctx.call_activity(run_task_activity, input="ai_startup_launches")
+ policy_updates = yield ctx.call_activity(run_task_activity, input="ai_policy_updates")
+
+ return {
+ "latest_news": latest_news,
+ "startup_summary": startup_summary,
+ "policy_updates": policy_updates,
+ }
+
+# ------------------------------------------------------------
+# 4. Runtime + Client (entry point)
+# ------------------------------------------------------------
+if __name__ == "__main__":
+ wfr.start()
+
+ client = DaprWorkflowClient()
+ instance_id = "crewai-multi-01"
+
+ client.schedule_new_workflow(
+ workflow=crewai_workflow,
+ input=None,
+ instance_id=instance_id
+ )
+
+ state = client.wait_for_workflow_completion(instance_id, timeout_in_seconds=60)
+ print(state.serialized_output)
+```
+
+This CrewAI agent starts a workflow that does news gathering and summary for the subjects of AI and startups.
+
+### Create the Workflow Database Component
+
+Dapr Workflows persist durable state using any [Dapr state store]({{% ref supported-state-stores %}}) that supports workflows.
+Create a directory named `components`, then create the file workflowstore.yaml:
+
+```yaml
+apiVersion: dapr.io/v1alpha1
+kind: Component
+metadata:
+ name: workflowstore
+spec:
+ type: state.redis
+ version: v1
+ metadata:
+ - name: redisHost
+ value: localhost:6379
+ - name: redisPassword
+ value: ""
+ - name: actorStateStore
+ value: "true"
+```
+
+This component stores:
+
+* Code execution checkpoints
+* Execution history
+* Deterministic resumption state
+* Final output data
+
+### Set a CrewAI LLM Provider
+
+CrewAI needs an LLM configuration or token to run. See instructions [here](https://docs.crewai.com/en/concepts/llms#setting-up-your-llm).
+
+For example, to set up OpenAI:
+
+```
+export OPENAI_API_KEY=sk-...
+```
+
+### Run the Workflow
+
+Launch the CrewAI workflow using the Dapr CLI:
+
+```bash
+dapr run \
+ --app-id crewaiwf \
+ --dapr-grpc-port 50001 \
+ --resources-path ./components \
+ -- python3 ./crewai_workflow.py
+```
+
+As the workflow runs, each CrewAI task is executed as a durable activity.
+If the process crashes, the workflow resumes exactly where it left off. You can try this by killing the process after the first activity and then rerunning that command line above with the same app ID.
+
+Open Zipkin to view workflow traces:
+
+```
+http://localhost:9411
+```
diff --git a/daprdocs/content/en/developing-ai/agent-integrations/openai-agents/_index.md b/daprdocs/content/en/developing-ai/agent-integrations/openai-agents/_index.md
new file mode 100644
index 00000000000..83d119d11d3
--- /dev/null
+++ b/daprdocs/content/en/developing-ai/agent-integrations/openai-agents/_index.md
@@ -0,0 +1,12 @@
+---
+type: docs
+title: "OpenAI Agents"
+linkTitle: "OpenAI Agents"
+weight: 25
+description: "Dapr first-class integrations for OpenAI Agents"
+---
+
+### What is the Dapr OpenAI Agents integration?
+
+Dapr provides OpenAI agents first class integrations that range from agent session management to connecting agents via pub/sub and orchestrating agentic workflows. The Dapr OpenAI integration is an extension in the OpenAI Python SDK that developers can use to augment OpenAI agents with the various Dapr APIs.
+
\ No newline at end of file
diff --git a/daprdocs/content/en/developing-ai/agent-integrations/openai-agents/openai-agents-sessions.md b/daprdocs/content/en/developing-ai/agent-integrations/openai-agents/openai-agents-sessions.md
new file mode 100644
index 00000000000..d0b2c0d9764
--- /dev/null
+++ b/daprdocs/content/en/developing-ai/agent-integrations/openai-agents/openai-agents-sessions.md
@@ -0,0 +1,127 @@
+---
+type: docs
+title: "Agent Sessions"
+linkTitle: "Agent Sessions"
+weight: 20
+description: "How to use Dapr to reliably and securely manage agent state"
+---
+
+## Overview
+
+By using Dapr to manage the state and [session data for OpenAI agents](https://openai.github.io/openai-agents-python/sessions/), users can store agent state in all databases supported by Dapr, including key/value stores, caches and SQL databases. Developers also get built-in tracing, metrics and resiliency policies that make agent session data operate reliably in production.
+
+## Getting Started
+
+Initialize Dapr locally to set up a self-hosted environment for development. This process fetches and installs the Dapr sidecar binaries, runs essential services as Docker containers, and prepares a default components folder for your application. For detailed steps, see the official [guide on initializing Dapr locally]({{% ref install-dapr-cli.md %}}).
+
+To initialize the Dapr control plane containers and create a default configuration file, run:
+
+```bash
+dapr init
+```
+
+Verify you have container instances with `daprio/dapr`, `openzipkin/zipkin`, and `redis` images running:
+
+```bash
+docker ps
+```
+
+### Install Python
+
+{{% alert title="Note" color="info" %}}
+Make sure you have Python already installed. `Python >=3.10`. For installation instructions, visit the official [Python installation guide](https://www.python.org/downloads/).
+{{% /alert %}}
+
+### Install Dependencies
+
+```bash
+pip install openai-agents dapr
+```
+
+### Create an OpenAI Agent
+
+Let's create a simple OpenAI agent. Put the following in a file named `openai_agent.py`:
+
+```python
+import asyncio
+from agents import Agent, Runner
+from agents.extensions.memory.dapr_session import DaprSession
+
+async def main():
+ agent = Agent(
+ name="Assistant",
+ instructions="Reply very concisely.",
+ )
+
+ session = DaprSession.from_address(
+ session_id="123",
+ state_store_name="statestore"
+ )
+
+ result = await Runner.run(agent, "What city is the Golden Gate Bridge in?", session=session)
+ print(result.final_output)
+
+ result = await Runner.run(agent, "What state is it in?", session=session)
+ print(result.final_output)
+
+ result = await Runner.run(agent, "What's the population?", session=session)
+ print(result.final_output)
+
+asyncio.run(main())
+```
+
+### Set an OpenAI API key
+
+```bash
+export OPENAI_API_KEY=sk-...
+```
+
+### Create a Python venv
+
+```bash
+python -m venv .venv
+source .venv/bin/activate # On Windows: .venv\Scripts\activate
+```
+
+### Create the database component
+
+The component file is how Dapr connects to your databae. The full list of supported databases can be found [here]({{% ref supported-state-stores %}}). Create a `components` directory and this file in it:
+
+`statestore.yaml`:
+
+```yaml
+apiVersion: dapr.io/v1alpha1
+kind: Component
+metadata:
+ name: statestore
+spec:
+ type: state.redis
+ version: v1
+ metadata:
+ - name: redisHost
+ value: localhost:6379
+ - name: redisPassword
+ value: ""
+```
+
+### Run The Agent
+
+Now run the local Dapr process and your Python script using the Dapr CLI.
+
+```bash
+dapr run --app-id openaisessions --dapr-grpc-port 50001 --resources-path ./components -- python3 ./openai_agent.py
+```
+
+Open `http://localhost:9411` to view your the traces and dependency graph.
+
+You can see [the session data stored in Redis]({{% ref "getting-started/get-started-api" %}}#step-4-see-how-the-state-is-stored-in-redis) with the following command
+
+```bash
+hgetall "123:messages"
+```
+
+## Next Steps
+
+Now that you have an OpenAI agent using Dapr to manage the agent sessions, explore more you can do with the [State API]({{% ref "state-management-overview" %}}) and how to enable [resiliency policies]({{% ref resiliency-overview %}}) for enhanced reliability.
+
+Read more about OpenAI agent sessions and Dapr [here](https://openai.github.io/openai-agents-python/sessions/).
diff --git a/daprdocs/content/en/developing-applications/dapr-agents/_index.md b/daprdocs/content/en/developing-ai/dapr-agents/_index.md
similarity index 91%
rename from daprdocs/content/en/developing-applications/dapr-agents/_index.md
rename to daprdocs/content/en/developing-ai/dapr-agents/_index.md
index cfa94c00bad..e75e9f12c60 100644
--- a/daprdocs/content/en/developing-applications/dapr-agents/_index.md
+++ b/daprdocs/content/en/developing-ai/dapr-agents/_index.md
@@ -4,6 +4,8 @@ title: "Dapr Agents"
linkTitle: "Dapr Agents"
weight: 25
description: "A framework for building durable and resilient AI agent systems at scale"
+aliases:
+ - /developing-applications/dapr-agents
---
### What is Dapr Agents?
diff --git a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-core-concepts.md b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-core-concepts.md
similarity index 59%
rename from daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-core-concepts.md
rename to daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-core-concepts.md
index 7398c762274..e2515d21743 100644
--- a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-core-concepts.md
+++ b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-core-concepts.md
@@ -4,6 +4,8 @@ title: "Core Concepts"
linkTitle: "Core Concepts"
weight: 40
description: "Learn about the core concepts of Dapr Agents"
+aliases:
+ - /developing-applications/dapr-agents/dapr-agents-core-concepts
---
Dapr Agents provides a structured way to build and orchestrate applications that use LLMs without getting bogged down in infrastructure details. The primary goal is to enable AI development by abstracting away the complexities of working with LLMs, tools, memory management, and distributed systems, allowing developers to focus on the business logic of their AI applications. Agents in this framework are the fundamental building blocks.
@@ -29,9 +31,15 @@ async def main():
weather_agent = Agent(
name="WeatherAgent",
role="Weather Assistant",
+ goal="Provide timely weather updates across cities",
instructions=["Help users with weather information"],
tools=[my_weather_func],
- memory=ConversationDaprStateMemory(store_name="historystore", session_id="some-id"),
+ memory = AgentMemoryConfig(
+ store=ConversationDaprStateMemory(
+ store_name="historystore",
+ session_id="some-id",
+ )
+ ),
)
response1 = await weather_agent.run("What's the weather?")
@@ -46,28 +54,36 @@ The `DurableAgent` class is a workflow-based agent that extends the standard Age
```python
-travel_planner = DurableAgent(
+from dapr_agents.workflow.runners import AgentRunner
+
+async def main():
+ travel_planner = DurableAgent(
name="TravelBuddy",
role="Travel Planner",
+ goal="Help users find flights and remember preferences",
instructions=["Help users find flights and remember preferences"],
tools=[search_flights],
- memory=ConversationDaprStateMemory(
- store_name="conversationstore", session_id="my-unique-id"
- ),
-
- # DurableAgent Configurations
- message_bus_name="messagepubsub",
- state_store_name="workflowstatestore",
- state_key="workflow_state",
- agents_registry_store_name="registrystatestore",
- agents_registry_key="agents_registry",
+ memory = AgentMemoryConfig(
+ store=ConversationDaprStateMemory(
+ store_name="conversationstore",
+ session_id="travel-session",
+ )
+ )
)
- travel_planner.as_service(port=8001)
- await travel_planner.start()
+ runner = AgentRunner()
+
+ try:
+ itinerary = await runner.run(
+ travel_planner,
+ payload={"task": "Plan a 3-day trip to Paris"},
+ )
+ print(itinerary)
+ finally:
+ runner.shutdown(travel_planner)
```
-This example demonstrates creating a workflow-backed agent that runs autonomously in the background. The agent can be triggered once and continues execution even across system restarts.
+This example demonstrates creating a workflow-backed agent that runs autonomously in the background. The `AgentRunner` schedules the workflow for you, waits for completion, and ensures the agent can be triggered once yet continue execution across restarts.
**Key Characteristics:**
- Workflow-based execution using Dapr Workflows
@@ -75,6 +91,7 @@ This example demonstrates creating a workflow-backed agent that runs autonomousl
- Automatic retry and recovery mechanisms
- Deterministic execution with checkpointing
- Built-in message routing and agent communication
+- `AgentRunner` modes for DurableAgents: ad-hoc runs (`runner.run(...)`), pub/sub subscriptions (`runner.subscribe(...)`), and FastAPI services (`runner.serve(...)`)
- Supports complex orchestration patterns and multi-agent collaboration
**When to use:**
@@ -177,35 +194,52 @@ tools = client.get_all_tools()
Once connected, the MCP client fetches all available tools from the server and prepares them for immediate use within the agent’s toolset. This allows agents to incorporate capabilities exposed by external processes—such as local Python scripts or remote services without hardcoding or preloading them. Agents can invoke these tools at runtime, expanding their behavior based on what’s offered by the active MCP server.
+
### Memory
Agents retain context across interactions, enhancing their ability to provide coherent and adaptive responses. Memory options range from simple in-memory lists for managing chat history to vector databases for semantic search, and also integrates with [Dapr state stores](https://docs.dapr.io/developing-applications/building-blocks/state-management/howto-get-save-state/), for scalable and persistent memory for advanced use cases from 28 different state store providers.
```python
-# ConversationListMemory (Simple In-Memory) - Default
+from dapr_agents import Agent, DurableAgent
+from dapr_agents.agents.configs import AgentMemoryConfig
+from dapr_agents.memory import (
+ ConversationDaprStateMemory,
+ ConversationListMemory,
+ ConversationVectorMemory,
+)
+
+# 1. ConversationListMemory (Simple In-Memory) - Default
memory_list = ConversationListMemory()
-# ConversationVectorMemory (Vector Store)
+# 2. ConversationVectorMemory (Vector Store)
memory_vector = ConversationVectorMemory(
vector_store=your_vector_store_instance,
- distance_metric="cosine"
+ distance_metric="cosine",
)
-# 3. ConversationDaprStateMemory (Dapr State Store)
-memory_dapr = ConversationDaprStateMemory(
- store_name="historystore", # Maps to Dapr component name
- session_id="some-id"
+# 3. ConversationDaprStateMemory (Dapr State Store) via AgentMemoryConfig
+durable_memory = AgentMemoryConfig(
+ store=ConversationDaprStateMemory(
+ store_name="historystore", # Dapr component name
+ session_id="my-session",
+ )
)
-# Using with an agent
+# Using with a regular Agent (pass the memory instance directly)
agent = Agent(
name="MyAgent",
role="Assistant",
- memory=memory_dapr # Pass any memory implementation
+ memory=memory_list,
)
+# Using with a DurableAgent (pass the AgentMemoryConfig)
+travel_planner = DurableAgent(
+ name="TravelBuddy",
+ memory=durable_memory,
+ # ... other configs ...
+)
```
-`ConversationListMemory` is the default memory implementation when none is specified. It provides fast, temporary storage in Python lists for development and testing. The Dapr's memory implementations are interchangeable, allowing you to switch between them without modifying your agent logic.
+`ConversationListMemory` is the default memory implementation when none is specified. It provides fast, temporary storage in Python lists for development and testing. The Dapr's memory implementations (all found in `dapr_agents.memory`) are interchangeable, allowing you to switch between them without modifying your agent logic or deployment model.
| Memory Implementation | Type | Persistence | Search | Use Case |
|---|---|---|---|---|
@@ -216,20 +250,70 @@ agent = Agent(
### Agent Services
-`DurableAgents` are exposed as independent services using [FastAPI and Dapr applications](https://docs.dapr.io/developing-applications/sdks/python/python-sdk-extensions/python-fastapi/). This modular approach separates the agent's logic from its service layer, enabling seamless reuse, deployment, and integration into multi-agent systems.
+`AgentRunner` wires DurableAgents into three complementary hosting modes:
+
+1. **`run`** – trigger a durable workflow directly from Python (CLIs, tests, notebooks) and optionally wait for completion.
+2. **`subscribe`** – automatically register every `@message_router` decorated handler on the agent (including `DurableAgent.agent_workflow`) so CloudEvents on the configured topics are validated against their `message_model` and scheduled as workflow runs.
+3. **`serve`** – host the agent as a web service by combining `subscribe` with FastAPI route registration and an auto-started Uvicorn server. By default it exposes `POST /run` (schedules the `@workflow_entry`) and `GET /run/{instance_id}` (fetches workflow status), but you can supply your own FastAPI app or customize host/port/paths.
```python
-travel_planner.as_service(port=8001)
-await travel_planner.start()
+travel_planner = DurableAgent(
+ name="TravelBuddy",
+ role="Travel Planner",
+ goal="Help humans find flights and remember preferences",
+ instructions=[
+ "Find flights to destinations",
+ "Remember user preferences",
+ "Provide clear flight info.",
+ ],
+ tools=[search_flights],
+)
+runner = AgentRunner()
```
-This exposes the agent as a REST service, allowing other systems to interact with it through standard HTTP requests such as this one:
+The snippets below reuse this `travel_planner` instance to illustrate each mode.
+
+#### 1. Ad-hoc execution with `runner.run(...)`
+
+Use `run` when you want to trigger a durable workflow directly from Python code (tests, CLIs, notebooks, etc.). The runner locates the agent's `@workflow_entry`, and schedules it. The `.run()` command is a blocking call that triggers the agent and and waits for its completion.
+
+```python
+result = await runner.run(
+ travel_planner,
+ payload={"task": "Plan a 3-day trip to Paris"},
+)
+print(result)
```
-curl -i -X POST http://localhost:8001/start-workflow \
--H "Content-Type: application/json" \
--d '{"task": "I want to find flights to Paris"}'
+
+This mode is ideal for synchronous automation or when you need to capture the final response programmatically. Pass `wait=False` for fire-and-forget instances.
+
+#### 2. Pub/Sub subscriptions with `runner.subscribe(...)`
+
+`subscribe` scans the agent for every method tagged with `@message_router`—including the built-in `agent_workflow`—and automatically registers the necessary Dapr subscriptions using the topics and schemas defined in `AgentPubSubConfig`. Each incoming CloudEvent is validated against the declared `message_model` (for example, `TriggerAction`) before the runner schedules the workflow entry.
+
+```python
+runner.subscribe(travel_planner)
+await wait_for_shutdown()
```
-Unlike conversational agents that provide immediate synchronous responses, durable agents operate as headless services that are triggered asynchronously. You trigger it, receive a workflow instance ID, and can track progress over time. This enables long-running, fault-tolerant operations that can span multiple systems and survive restarts, making them ideal for complex multi-step processes in environments requiring high levels of durability and resiliency.
+
+Add your own `@message_router` methods to support extra topics or broadcast channels—the runner will discover them automatically and route messages to the appropriate handler. Use helpers such as `wait_for_shutdown()` (from `dapr_agents.workflow.utils.core`) to keep the process alive until you stop it.
+
+#### 3. FastAPI services with `runner.serve(...)`
+
+`serve` is the one-line way to run a DurableAgent as a web service. It first calls `subscribe(...)`, then spins up a FastAPI app (unless you pass your own) with two default endpoints:
+
+- `POST /run`: Validates the JSON body against the agent's `@workflow_entry` signature and schedules a new workflow instance.
+- `GET /run/{instance_id}`: Proxies workflow status queries (including payloads, if requested).
+
+```python
+runner.serve(
+ travel_planner,
+ port=8001,
+)
+```
+
+Because workflows are durable, the `/run` endpoint responds immediately with an instance ID even though the agent keeps working in the background. You can mount the generated FastAPI routes into a larger application or let `serve` run its own Uvicorn loop for standalone deployments.
+
## Multi-agent Systems (MAS)
@@ -253,53 +337,121 @@ Workflows are structured processes where LLM agents and tools collaborate in pre
This approach is particularly suitable for business-critical applications where you need both the intelligence of LLMs and the reliability of traditional software systems.
```python
-# Define Workflow logic
-@workflow(name="task_chain_workflow")
+import dapr.ext.workflow as wf
+from dapr.ext.workflow import DaprWorkflowContext
+
+from dapr_agents.llm.dapr import DaprChatClient
+from dapr_agents.workflow.decorators import llm_activity
+
+runtime = wf.WorkflowRuntime()
+llm = DaprChatClient(component_name="openai")
+
+
+@runtime.workflow(name="task_chain_workflow")
def task_chain_workflow(ctx: DaprWorkflowContext):
- result1 = yield ctx.call_activity(get_character)
- result2 = yield ctx.call_activity(get_line, input={"character": result1})
- return result2
+ character = yield ctx.call_activity(get_character)
+ line = yield ctx.call_activity(get_line, input={"character": character})
+ return line
+
-@task(description="Pick a random character from The Lord of the Rings and respond with the character's name only")
-def get_character() -> str:
+@runtime.activity(name="get_character")
+@llm_activity(
+ prompt="Pick a random character from The Lord of the Rings. Respond with the name only.",
+ llm=llm,
+)
+def get_character(ctx) -> str:
pass
-@task(description="What is a famous line by {character}")
-def get_line(character: str) -> str:
+
+@runtime.activity(name="get_line")
+@llm_activity(
+ prompt="What is a famous line by {character}?",
+ llm=llm,
+)
+def get_line(ctx, character: str) -> str:
pass
+
+
+runtime.start()
+client = wf.DaprWorkflowClient()
+instance_id = client.schedule_new_workflow(task_chain_workflow)
+state = client.wait_for_workflow_completion(instance_id)
+print(state.serialized_output)
+runtime.shutdown()
```
-This workflow demonstrates sequential task execution where the output of one task becomes the input for the next, enabling complex multi-step processes with clear dependencies and data flow.
+This workflow demonstrates sequential task execution where the output of one LLM-backed activity becomes the input for the next. The `@llm_activity` decorator wires prompts, formatting, and response handling so activities stay deterministic while still using model reasoning.
Dapr Agents supports coordination of LLM interactions at different levels of granularity:
-### Prompt Tasks
-Tasks created from prompts that leverage LLM reasoning capabilities for specific, well-defined operations.
+### LLM Activities
+`@llm_activity` binds a workflow activity to a prompt, LLM client, and optional structured output. The decorated function body can stay empty because the decorator handles prompting, retries, and response parsing.
```python
-@task(description="Pick a random character from The Lord of the Rings and respond with the character's name only")
-def get_character() -> str:
+llm = DaprChatClient(component_name="openai")
+
+@runtime.activity(name="generate_outline")
+@llm_activity(
+ prompt="Create a short outline about {topic}.",
+ llm=llm,
+)
+def generate_outline(ctx, topic: str) -> str:
pass
```
-While technically not full agents (as they lack tools and memory), prompt tasks serve as lightweight agentic building blocks that perform focused LLM interactions within the broader workflow context.
+LLM activities are perfect for lightweight reasoning steps, extraction tasks, or summarization stages that need deterministic workflow control with LLM flexibility.
-### Agent Tasks
-Tasks based on agents with tools, providing greater flexibility and capability for complex operations requiring external integrations.
+### Agent Activities
+`@agent_activity` lets workflows call fully-configured `Agent` instances (tools, memory, instructions) as activities. The workflow provides the inputs, and the decorator routes execution through the agent’s reasoning loop.
```python
-@task(agent=custom_agent, description="Retrieve stock data for {ticker}")
-def get_stock_data(ticker: str) -> dict:
+planner = Agent(
+ name="PlannerAgent",
+ role="Trip planner",
+ instructions=["Create a concise 3-day plan for any city."],
+ llm=DaprChatClient(component_name="openai"),
+)
+
+@runtime.activity(name="plan_outline")
+@agent_activity(agent=planner)
+def plan_outline(ctx, destination: str) -> dict:
pass
```
-Agent tasks enable workflows to leverage specialized agents with their own tools, memory, and reasoning capabilities while maintaining the structured coordination benefits of workflow orchestration.
-> **Note:** Agent tasks must use regular `Agent` instances, not `DurableAgent` instances, as workflows manage the execution context and durability through the Dapr workflow engine.
+Agent activities enable workflows to leverage specialized agents with their own tools, memory, and reasoning capabilities while maintaining the structured coordination benefits of workflow orchestration.
+
+> **Note:** Agent activities must use regular `Agent` instances, not `DurableAgent` instances, because workflows manage the execution context and durability through the Dapr workflow engine.
### Workflow Patterns
Workflows enable the implementation of various agentic patterns through structured orchestration, including Prompt Chaining, Routing, Parallelization, Orchestrator-Workers, Evaluator-Optimizer, Human-in-the-loop, and others. For detailed implementations and examples of these patterns, see the [Patterns documentation]({{< ref dapr-agents-patterns.md >}}).
+### Message Router Workflows
+
+The `@message_router` decorator binds a workflow directly to a Dapr Pub/Sub topic so every validated message automatically schedules a workflow instance. This pattern—used in the message-router quickstart—lets you push CloudEvent payloads onto a topic and have LLM-backed activities take over immediately.
+
+```python
+from pydantic import BaseModel
+from dapr_agents.workflow.decorators.routers import message_router
+
+class StartBlogMessage(BaseModel):
+ topic: str
+
+@message_router(
+ pubsub="messagepubsub",
+ topic="blog.requests",
+ message_model=StartBlogMessage,
+)
+def blog_workflow(ctx: DaprWorkflowContext, wf_input: dict) -> str:
+ outline = yield ctx.call_activity(
+ create_outline, input={"topic": wf_input["topic"]}
+ )
+ post = yield ctx.call_activity(write_post, input={"outline": outline})
+ return post
+```
+
+During startup, call `register_message_routes(targets=[blog_workflow], dapr_client=client)` to automatically configure subscriptions, schema validation, and workflow scheduling. This keeps the workflow definition as the single source of truth for both orchestration and event ingress.
+
### Workflows vs. Durable Agents
Both DurableAgent and workflow-based agent orchestration use Dapr workflows behind the scenes for durability and reliability, but they differ in how control flow is determined.
@@ -327,15 +479,57 @@ The core participants in this multi-agent coordination systems are the following
Each agent runs as an independent service with its own lifecycle, configured as a standard DurableAgent with pub/sub enabled:
```python
-hobbit_service = DurableAgent(
- name="Frodo",
- instructions=["Speak like Frodo, with humility and determination."],
- message_bus_name="messagepubsub",
- state_store_name="workflowstatestore",
- state_key="workflow_state",
- agents_registry_store_name="agentstatestore",
- agents_registry_key="agents_registry",
+import asyncio
+
+from dapr_agents.agents.configs import (
+ AgentMemoryConfig,
+ AgentProfileConfig,
+ AgentPubSubConfig,
+ AgentRegistryConfig,
+ AgentStateConfig,
+)
+from dapr_agents.memory import ConversationDaprStateMemory
+from dapr_agents.storage.daprstores.stateservice import StateStoreService
+from dapr_agents.workflow.runners import AgentRunner
+from dapr_agents.workflow.utils.core import wait_for_shutdown
+
+registry = AgentRegistryConfig(
+ store=StateStoreService(store_name="agentregistrystore"),
+ team_name="fellowship",
)
+
+frodo = DurableAgent(
+ profile=AgentProfileConfig(
+ name="Frodo",
+ role="Ring Bearer",
+ instructions=["Speak like Frodo, with humility and determination."],
+ ),
+ pubsub=AgentPubSubConfig(
+ pubsub_name="messagepubsub",
+ agent_topic="fellowship.frodo.requests",
+ broadcast_topic="fellowship.broadcast",
+ ),
+ state=AgentStateConfig(
+ store=StateStoreService(store_name="workflowstatestore", key_prefix="frodo:")
+ ),
+ registry=registry,
+ memory=AgentMemoryConfig(
+ store=ConversationDaprStateMemory(
+ store_name="memorystore",
+ session_id="frodo-session",
+ )
+ ),
+)
+
+async def main():
+ runner = AgentRunner()
+ try:
+ runner.subscribe(frodo)
+ await wait_for_shutdown()
+ finally:
+ runner.shutdown(frodo)
+
+asyncio.run(main())
```
#### Orchestrator
@@ -343,18 +537,45 @@ hobbit_service = DurableAgent(
The orchestrator coordinates interactions between agents and manages conversation flow by selecting appropriate agents, managing interaction sequences, and tracking progress. Dapr Agents offers three orchestration strategies: Random, RoundRobin, and LLM-based orchestration.
```python
+from dapr_agents.agents.configs import (
+ AgentExecutionConfig,
+ AgentPubSubConfig,
+ AgentRegistryConfig,
+ AgentStateConfig,
+)
+from dapr_agents.llm.openai import OpenAIChatClient
+from dapr_agents.storage.daprstores.stateservice import StateStoreService
+from dapr_agents.workflow.runners import AgentRunner
+import dapr.ext.workflow as wf
+
llm_orchestrator = LLMOrchestrator(
name="LLMOrchestrator",
- message_bus_name="messagepubsub",
- state_store_name="agenticworkflowstate",
- state_key="workflow_state",
- agents_registry_store_name="agentstatestore",
- agents_registry_key="agents_registry",
- max_iterations=3
+ llm=OpenAIChatClient(),
+ pubsub=AgentPubSubConfig(
+ pubsub_name="messagepubsub",
+ agent_topic="llm.orchestrator.requests",
+ broadcast_topic="fellowship.broadcast",
+ ),
+ state=AgentStateConfig(
+ store=StateStoreService(
+ store_name="workflowstatestore", key_prefix="llm.orchestrator:"
+ )
+ ),
+ registry=AgentRegistryConfig(
+ store=StateStoreService(store_name="agentregistrystore"),
+ team_name="fellowship",
+ ),
+ execution=AgentExecutionConfig(max_iterations=3),
+ runtime=wf.WorkflowRuntime(),
)
+
+runner = AgentRunner()
+runner.serve(llm_orchestrator, port=8004)
```
-The LLM-based orchestrator uses intelligent agent selection for context-aware decision making, while Random and RoundRobin provide alternative coordination strategies for simpler use cases.
+The LLM-based orchestrator uses intelligent agent selection for context-aware decision making, while Random and RoundRobin provide alternative coordination strategies for simpler use cases. The runner keeps the orchestrator online as a Dapr app or HTTP service so clients can publish tasks over topics or REST calls.
+
+Because both `DurableAgent.agent_workflow` and the orchestrators above are decorated with `@message_router(message_model=TriggerAction)`, `runner.subscribe(...)` automatically wires the topics declared in `AgentPubSubConfig` and validates every incoming CloudEvent against the expected schema before scheduling the `@workflow_entry`. You can add additional message routers (each with its own `message_model`) to the same agent; the runner will discover them the next time it starts and extend the subscription list automatically.
### Communication Flow
@@ -391,4 +612,4 @@ Pub/Sub messaging is essential for event-driven agentic workflows because it:
* **Fosters Collaboration**: Multiple agents can subscribe to the same topic, making it easy to share updates or divide responsibilities.
* **Enables Scalability**:The message bus ensures that communication scales effortlessly, whether you are adding new agents, expanding workflows, or adapting to changing requirements. Agents remain loosely coupled, allowing workflows to evolve without disruptions.
-This messaging framework ensures that agents operate efficiently, workflows remain flexible, and systems can scale dynamically.
\ No newline at end of file
+This messaging framework ensures that agents operate efficiently, workflows remain flexible, and systems can scale dynamically.
diff --git a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-getting-started.md b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-getting-started.md
similarity index 82%
rename from daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-getting-started.md
rename to daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-getting-started.md
index 2d160a4cb5a..4cd7fba0201 100644
--- a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-getting-started.md
+++ b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-getting-started.md
@@ -4,6 +4,8 @@ title: "Getting Started"
linkTitle: "Getting Started"
weight: 20
description: "How to install Dapr Agents and run your first agent"
+aliases:
+ - /developing-applications/dapr-agents/dapr-agents-getting-started
---
{{% alert title="Dapr Agents Concepts" color="primary" %}}
@@ -53,19 +55,11 @@ Make sure you have Python already installed. `Python >=3.10`. For installation i
Let's create a weather assistant agent that demonstrates tool calling with Dapr state management used for conversation memory.
-### 1. Create the environment file
+### 1. Create the Dapr components
-Create a `.env` file with your OpenAI API key:
+Create a `components` directory and add two files:
-```env
-OPENAI_API_KEY=your_api_key_here
-```
-
-This API key is essential for agents to communicate with the LLM, as the default LLM client in the agent uses OpenAI's services. If you don't have an API key, you can [create one here](https://platform.openai.com/api-keys).
-
-### 2. Create the Dapr component
-
-Create a `components` directory and add `historystore.yaml`:
+`historystore.yaml`:
```yaml
apiVersion: dapr.io/v1alpha1
@@ -84,6 +78,27 @@ spec:
This component will be used to store the conversation history, as LLMs are stateless and every chat interaction needs to send all the previous conversations to maintain context.
+`openai.yaml`:
+
+```yaml
+apiVersion: dapr.io/v1alpha1
+kind: Component
+metadata:
+ name: openai
+spec:
+ type: conversation.openai
+ version: v1
+ metadata:
+ - name: key
+ value: "{{OPENAI_API_KEY}}"
+ - name: model
+ value: gpt-5-2025-08-07
+ - name: temperature
+ value: 1
+```
+
+This component wires the default `DaprChatClient` to OpenAI via the Conversation API. Replace the `{{OPENAI_API_KEY}}` placeholder with your actual OpenAI key by editing the file directly. This API key is essential for agents to communicate with the LLM, as the default chat client talks to OpenAI-compatible endpoints. If you don't have an API key, you can [create one here](https://platform.openai.com/api-keys). You can also tweak metadata (model, temperature, baseUrl, etc.) to point at compatible OpenAI-style providers.
+
### 3. Create the agent with weather tool
Create `weather_agent.py`:
@@ -91,29 +106,39 @@ Create `weather_agent.py`:
```python
import asyncio
from dapr_agents import tool, Agent
+from dapr_agents.agents.configs import AgentMemoryConfig
from dapr_agents.memory import ConversationDaprStateMemory
from dotenv import load_dotenv
load_dotenv()
+
@tool
def get_weather() -> str:
"""Get current weather."""
return "It's 72°F and sunny"
+
async def main():
+ memory_config = AgentMemoryConfig(
+ store=ConversationDaprStateMemory(
+ store_name="historystore",
+ session_id="hello-world",
+ )
+ )
+
agent = Agent(
name="WeatherAgent",
role="Weather Assistant",
instructions=["Help users with weather information"],
- memory=ConversationDaprStateMemory(store_name="historystore", session_id="hello-world"),
+ memory=memory_config,
tools=[get_weather],
)
# First interaction
response1 = await agent.run("Hi! My name is John. What's the weather?")
print(f"Agent: {response1}")
-
+
# Second interaction - agent should remember the name
response2 = await agent.run("What's my name?")
print(f"Agent: {response2}")
@@ -184,4 +209,4 @@ Here you can browse the state store used in the agent and explore its data.
## Next Steps
Now that you have Dapr Agents installed and running, explore more advanced examples and patterns in the [quickstarts]({{% ref dapr-agents-quickstarts.md %}}) section to learn about multi-agent workflows, durable agents, and integration with Dapr's powerful distributed capabilities.
-
\ No newline at end of file
+
diff --git a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-integrations.md b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-integrations.md
similarity index 91%
rename from daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-integrations.md
rename to daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-integrations.md
index 11406f3479e..61c3e46cd00 100644
--- a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-integrations.md
+++ b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-integrations.md
@@ -4,6 +4,8 @@ title: "Integrations"
linkTitle: "Integrations"
weight: 60
description: "Various integrations and tools available in Dapr Agents"
+aliases:
+ - /developing-applications/dapr-agents/dapr-agents-integrations
---
# Out-of-the-box Tools
@@ -313,4 +315,39 @@ While the Arxiv Fetcher provides robust functionality for retrieving and process
* **Building a Searchable Knowledge Base**: Combine fetched papers with integrations like text splitting and vector embeddings for advanced search capabilities.
* **Retrieval-Augmented Generation (RAG)**: Use processed papers as inputs for RAG pipelines to power question-answering systems.
-* **Automated Literature Surveys**: Generate summaries or insights based on the fetched and processed research.
\ No newline at end of file
+* **Automated Literature Surveys**: Generate summaries or insights based on the fetched and processed research.
+
+## Tools
+
+### MCP Toolbox for databases
+
+Dapr Agents support integrating with [MCP Toolbox for Databases](https://googleapis.github.io/genai-toolbox/getting-started/introduction/) by implementing a wrapper that loads the available tools into the `Tool` model Dapr Agents utilize.
+
+To integrate the Toolbox, load the tools as follows:
+
+```python
+from toolbox_core import ToolboxSyncClient
+client = ToolboxSyncClient("http://127.0.0.1:5000")
+agent_tools = AgentTool.from_toolbox_many(client.load_toolset("your-tools-name-here"))
+agent = DurableAgent(
+ ..
+ tools=agent_tools
+)
+
+..
+# Remember to close the tool
+finally:
+ client.close()
+```
+
+Or wrap it in a `with` statement:
+
+```python
+from toolbox_core import ToolboxSyncClient
+with ToolboxSyncClient("http://127.0.0.1:5000") as client:
+ agent_tools = AgentTool.from_toolbox_many(client.load_toolset("your-tools-name-here"))
+ agent = DurableAgent(
+ ..
+ tools=agent_tools
+ )
+```
\ No newline at end of file
diff --git a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-introduction.md b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-introduction.md
similarity index 97%
rename from daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-introduction.md
rename to daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-introduction.md
index 17cc7dcf7f1..40a3bae070f 100644
--- a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-introduction.md
+++ b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-introduction.md
@@ -4,6 +4,8 @@ title: "Introduction"
linkTitle: "Introduction"
weight: 10
description: "Overview of Dapr Agents and its key features"
+aliases:
+ - /developing-applications/dapr-agents/dapr-agents-introduction
---

@@ -66,11 +68,11 @@ Dapr Agents is a Python framework built on top of the [Python Dapr SDK]({{% ref
Get started with Dapr Agents by following the instructions on the [Getting Started page]({{% ref dapr-agents-getting-started.md %}}).
-### Framework Integrations
+### Framework integrations
-Dapr Agents integrates with popular Python frameworks and tools. For detailed integration guides and examples, see the [integrations page]({{% ref "developing-applications/dapr-agents/dapr-agents-integrations.md" %}}).
+Dapr Agents integrates with popular Python frameworks and tools. For detailed integration guides and examples, see the [integrations page]({{% ref "developing-ai/dapr-agents/dapr-agents-integrations.md" %}}).
-## Operational Support
+## Operational support
Dapr Agents inherits Dapr's enterprise-grade operational capabilities, providing comprehensive support for durable and reliable deployments of agentic systems.
diff --git a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-patterns.md b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-patterns.md
similarity index 89%
rename from daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-patterns.md
rename to daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-patterns.md
index 959b097455b..fa6eda426d6 100644
--- a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-patterns.md
+++ b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-patterns.md
@@ -4,6 +4,8 @@ title: "Agentic Patterns"
linkTitle: "Agentic Patterns"
weight: 50
description: "Common design patterns and use cases for building agentic systems"
+aliases:
+ - /developing-applications/dapr-agents/dapr-agents-patterns
---
Dapr Agents simplify the implementation of agentic systems, from simple augmented LLMs to fully autonomous agents in enterprise environments. The following sections describe several application patterns that can benefit from Dapr Agents.
@@ -362,7 +364,19 @@ The Durable Agent also enables the "headless agents" approach where autonomous s
**Implementation with Dapr Agents:**
```python
+import asyncio
+
from dapr_agents import DurableAgent
+from dapr_agents.agents.configs import (
+ AgentExecutionConfig,
+ AgentMemoryConfig,
+ AgentPubSubConfig,
+ AgentRegistryConfig,
+ AgentStateConfig,
+)
+from dapr_agents.memory import ConversationDaprStateMemory
+from dapr_agents.storage.daprstores.stateservice import StateStoreService
+from dapr_agents.workflow.runners import AgentRunner
travel_planner = DurableAgent(
name="TravelBuddy",
@@ -371,23 +385,56 @@ travel_planner = DurableAgent(
instructions=[
"Find flights to destinations",
"Remember user preferences",
- "Provide clear flight info"
+ "Provide clear flight info",
],
tools=[search_flights],
- message_bus_name="messagepubsub",
- state_store_name="workflowstatestore",
- state_key="workflow_state",
- agents_registry_store_name="workflowstatestore",
- agents_registry_key="agents_registry",
+ pubsub=AgentPubSubConfig(
+ pubsub_name="messagepubsub",
+ agent_topic="travel.requests",
+ broadcast_topic="travel.broadcast",
+ ),
+ state=AgentStateConfig(
+ store=StateStoreService(store_name="workflowstatestore"),
+ ),
+ registry=AgentRegistryConfig(
+ store=StateStoreService(store_name="registrystatestore"),
+ team_name="travel-team",
+ ),
+ execution=AgentExecutionConfig(max_iterations=3),
+ memory=AgentMemoryConfig(
+ store=ConversationDaprStateMemory(
+ store_name="conversationstore",
+ session_id="travel-session",
+ )
+ ),
)
+
+async def main():
+ runner = AgentRunner()
+ try:
+ result = await runner.run(
+ travel_planner,
+ payload={"task": "Find weekend flights to Paris"},
+ )
+ print(result)
+ finally:
+ runner.shutdown(travel_planner)
+
+asyncio.run(main())
```
The implementation follows Dapr's sidecar architecture model, where all infrastructure concerns are handled by the Dapr runtime:
- **Persistent Memory** - Agent state is stored in Dapr's state store, surviving process crashes
- **Workflow Orchestration** - All agent interactions managed through Dapr's workflow system
-- **Service Exposure** - REST endpoints for workflow management come out of the box
-- **Pub/Sub Input/Output** - Event-driven messaging through Dapr's pub/sub system for seamless integration
+- **Service Exposure** - `AgentRunner.serve()` exposes REST endpoints (e.g., `POST /run`) that schedule the agent's `@workflow_entry`
+- **Pub/Sub Input/Output** - `AgentRunner.subscribe()` scans the agent for `@message_router` methods and wires the configured topics with schema validation
+
+The Durable Agent enables the concept of "headless agents" - autonomous systems that operate without direct user interaction. Depending on the scenario you can:
+
+1. **Run** durable workflows programmatically (`runner.run` as shown above)
+2. **Subscribe** the agent to topics so other services can trigger it via pub/sub (`runner.subscribe`)
+3. **Serve** the agent behind a FastAPI app with built-in `/run` and status endpoints (`runner.serve`)
-The Durable Agent enables the concept of "headless agents" - autonomous systems that operate without direct user interaction. Dapr's Durable Agent exposes both REST and Pub/Sub APIs, making it ideal for long-running operations that are triggered by other applications or external events. This allows agents to run in the background, processing requests asynchronously and integrating seamlessly into larger distributed systems.
+These options make it easy to process requests asynchronously and integrate seamlessly into larger distributed systems.
## Choosing the Right Pattern
@@ -397,4 +444,4 @@ The journey from simple agentic workflows to fully autonomous agents represents
- **Start with simpler patterns** like Augmented LLM and Prompt Chaining for well-defined tasks where predictability is crucial
- **Progress to more dynamic patterns** like Parallelization and Orchestrator-Workers as your needs grow more complex
- **Consider fully autonomous agents** only for open-ended tasks where the benefits of flexibility outweigh the need for strict control
-
\ No newline at end of file
+
diff --git a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-quickstarts.md b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-quickstarts.md
similarity index 79%
rename from daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-quickstarts.md
rename to daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-quickstarts.md
index cf35ac09c72..663eac16140 100644
--- a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-quickstarts.md
+++ b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-quickstarts.md
@@ -4,6 +4,8 @@ title: "Quickstarts"
linkTitle: "Quickstarts"
weight: 70
description: "Get started with Dapr Agents through practical step-by-step examples"
+aliases:
+ - /developing-applications/dapr-agents/dapr-agents-quickstarts
---
[Dapr Agents Quickstarts](https://github.com/dapr/dapr-agents/tree/main/quickstarts) demonstrate how to use Dapr Agents to build applications with LLM-powered autonomous agents and event-driven workflows. Each quickstart builds upon the previous one, introducing new concepts incrementally.
@@ -17,12 +19,12 @@ description: "Get started with Dapr Agents through practical step-by-step exampl
| Scenario | What You'll Learn |
|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------|
-| [Hello World](https://github.com/dapr/dapr-agents/tree/main/quickstarts/01-hello-world) A rapid introduction that demonstrates core Dapr Agents concepts through simple, practical examples. | - **Basic LLM Usage**: Simple text generation with OpenAI models - **Creating Agents**: Building agents with custom tools in under 20 lines of code
- **Simple Workflows**: Setting up multi-step LLM processes |
-| [LLM Call with Dapr Chat Client](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02_llm_call_dapr) Explore interaction with Language Models through Dapr Agents' `DaprChatClient`, featuring basic text generation with plain text prompts and templates. | - **Text Completion**: Generating responses to prompts - **Swapping LLM providers**: Switching LLM backends without application code change - **Resilience**: Setting timeout, retry and circuit-breaking - **PII Obfuscation**: Automatically detect and mask sensitive user information |
-| [LLM Call with OpenAI Client](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02_llm_call_open_ai) Leverage native LLM client libraries with Dapr Agents using the OpenAI Client for chat completion, audio processing, and embeddings. | - **Text Completion**: Generating responses to prompts - **Structured Outputs**: Converting LLM responses to Pydantic objects
*Note: Other quickstarts for specific clients are available for [Elevenlabs](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02_llm_call_elevenlabs), [Hugging Face](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02_llm_call_hugging_face), and [Nvidia](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02_llm_call_nvidia).* |
-| [Agent Tool Call](https://github.com/dapr/dapr-agents/tree/main/quickstarts/03-agent-tool-call) Build your first AI agent with custom tools by creating a practical weather assistant that fetches information and performs actions. | - **Tool Definition**: Creating reusable tools with the `@tool` decorator - **Agent Configuration**: Setting up agents with roles, goals, and tools - **Function Calling**: Enabling LLMs to execute Python functions |
+| [Hello World](https://github.com/dapr/dapr-agents/tree/main/quickstarts/01-hello-world) A rapid introduction that demonstrates core Dapr Agents concepts through simple, practical examples. | - **Basic LLM Usage**: Simple text generation with OpenAI models - **Creating Agents**: Building agents with custom tools in under 20 lines of code
- **Simple Workflows**: Setting up multi-step LLM processes - **DurableAgent Hosting**: Learn `AgentRunner.run`, `AgentRunner.subscribe`, and `AgentRunner.serve` using the `03_durable_agent_*.py` samples |
+| [LLM Call with Dapr Chat Client](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02-llm-call-dapr) Explore interaction with Language Models through Dapr Agents' `DaprChatClient`, featuring basic text generation with plain text prompts and templates. | - **Text Completion**: Generating responses to prompts - **Swapping LLM providers**: Switching LLM backends without application code change - **Resilience**: Setting timeout, retry and circuit-breaking - **PII Obfuscation**: Automatically detect and mask sensitive user information |
+| [LLM Call with OpenAI Client](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02-llm-call-open-ai) Leverage native LLM client libraries with Dapr Agents using the OpenAI Client for chat completion, audio processing, and embeddings. | - **Text Completion**: Generating responses to prompts - **Structured Outputs**: Converting LLM responses to Pydantic objects
*Note: Other quickstarts for specific clients are available for [Elevenlabs](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02-llm-call-elevenlabs), [Hugging Face](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02-llm-call-hugging-face), and [Nvidia](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02-llm-call-nvidia).* |
+| Standalone & Durable Agents [Standalone Agent Tool Call](https://github.com/dapr/dapr-agents/tree/main/quickstarts/03-standalone-agent-tool-call) · [Durable Agent Tool Call](https://github.com/dapr/dapr-agents/tree/main/quickstarts/03-durable-agent-tool-call) | - **Standalone Agents**: Build conversational agents with tools in under 20 lines using the `Agent` class - **Durable Agents**: Upgrade to workflow-backed `DurableAgent` instances with `AgentRunner.run/subscribe/serve` - **Tool Definition**: Reuse tools with the `@tool` decorator and structured args models - **Function Calling**: Let LLMs invoke Python functions safely |
| [Agentic Workflow](https://github.com/dapr/dapr-agents/tree/main/quickstarts/04-llm-based-workflows) Dive into stateful workflows with Dapr Agents by orchestrating sequential and parallel tasks through powerful workflow capabilities. | - **LLM-powered Tasks**: Using language models in workflows - **Task Chaining**: Creating resilient multi-step processes executing in sequence - **Fan-out/Fan-in**: Executing activities in parallel; then synchronizing these activities until all preceding activities have completed |
| [Multi-Agent Workflows](https://github.com/dapr/dapr-agents/tree/main/quickstarts/05-multi-agent-workflows) Explore advanced event-driven workflows featuring a Lord of the Rings themed multi-agent system where autonomous agents collaborate to solve problems. | - **Multi-agent Systems**: Creating a network of specialized agents - **Event-driven Architecture**: Implementing pub/sub messaging between agents - **Workflow Orchestration**: Coordinating agents through different selection strategies|
| [Multi-Agent Workflow on Kubernetes](https://github.com/dapr/dapr-agents/tree/main/quickstarts/05-multi-agent-workflow-k8s) Run multi-agent workflows in Kubernetes, demonstrating deployment and orchestration of event-driven agent systems in a containerized environment. | - **Kubernetes Deployment**: Running agents on Kubernetes - **Container Orchestration**: Managing agent lifecycles with K8s - **Service Communication**: Inter-agent communication in K8s |
| [Document Agent with Chainlit](https://github.com/dapr/dapr-agents/tree/main/quickstarts/06-document-agent-chainlit) Create a conversational agent with an operational UI that can upload, and learn unstructured documents while retaining long-term memory. | - **Conversational Document Agent**: Upload and converse over unstructured documents - **Cloud Agnostic Storage**: Upload files to multiple storage providers - **Conversation Memory Storage**: Persists conversation history using external storage. |
-| [Data Agent with MCP and Chainlit](https://github.com/dapr/dapr-agents/tree/main/quickstarts/08-data-agent-mcp-chainlit) Build a conversational agent over a Postgres database using Model Composition Protocol (MCP) with a ChatGPT-like interface. | - **Database Querying**: Natural language queries to relational databases - **MCP Integration**: Connecting to databases without DB-specific code - **Data Analysis**: Complex data analysis through conversation |
+| [Data Agent with MCP and Chainlit](https://github.com/dapr/dapr-agents/tree/main/quickstarts/08-data-agent-mcp-chainlit) Build a conversational agent over a Postgres database using Model Composition Protocol (MCP) with a ChatGPT-like interface. | - **Database Querying**: Natural language queries to relational databases - **MCP Integration**: Connecting to databases without DB-specific code - **Data Analysis**: Complex data analysis through conversation |
\ No newline at end of file
diff --git a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-why.md b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-why.md
similarity index 95%
rename from daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-why.md
rename to daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-why.md
index a65ad4d8502..afb32bdbf2d 100644
--- a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-why.md
+++ b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-why.md
@@ -4,6 +4,8 @@ title: "Why Dapr Agents"
linkTitle: "Why Dapr Agents"
weight: 30
description: "Understanding the benefits and use cases for Dapr Agents"
+aliases:
+ - /developing-applications/dapr-agents/dapr-agents-why
---
Dapr Agents is an open-source framework for building and orchestrating LLM-based autonomous agents that leverages Dapr's proven distributed systems foundation. Unlike other agentic frameworks that require developers to build infrastructure from scratch, Dapr Agents enables teams to focus on agent intelligence by providing enterprise-grade scalability, state management, and messaging capabilities out of the box. This approach eliminates the complexity of recreating distributed system fundamentals while delivering agentic workflows powered by Dapr.
@@ -102,11 +104,11 @@ Dapr Agents uses a [durable-execution workflow engine]({{% ref workflow-overview
Dapr Agents builds on Dapr's Workflow API, which represents each agent as an actor, a single unit of compute and state that is thread-safe and natively distributed. This design enables a scale-to-zero architecture that minimizes infrastructure costs, making AI adoption accessible to organizations of all sizes. The underlying virtual actor model allows thousands of agents to run on demand on a single machine with low latency when scaling from zero. When unused, agents are reclaimed by the system but retain their state until needed again. This design eliminates the trade-off between performance and resource efficiency.
-### Data-Centric AI Agents
+### Data-centric AI agents
-With built-in connectivity to over 50 enterprise data sources, Dapr Agents efficiently handles structured and unstructured data. From basic [PDF extraction]({{% ref "/developing-applications/dapr-agents/dapr-agents-integrations.md" %}}) to large-scale database interactions, it enables data-driven AI workflows with minimal code changes. Dapr's [bindings]({{% ref bindings-overview.md %}}) and [state stores]({{% ref supported-state-stores.md %}}), along with MCP support, provide access to numerous data sources for agent data ingestion.
+With built-in connectivity to over 50 enterprise data sources, Dapr Agents efficiently handles structured and unstructured data. From basic [PDF extraction]({{% ref "/developing-ai/dapr-agents/dapr-agents-integrations.md" %}}) to large-scale database interactions, it enables data-driven AI workflows with minimal code changes. Dapr's [bindings]({{% ref bindings-overview.md %}}) and [state stores]({{% ref supported-state-stores.md %}}), along with MCP support, provide access to numerous data sources for agent data ingestion.
-### Accelerated Development
+### Accelerated development
Dapr Agents provides AI features that give developers a complete API surface to tackle common problems, including:
diff --git a/daprdocs/content/en/developing-applications/building-blocks/actors/actors-timers-reminders.md b/daprdocs/content/en/developing-applications/building-blocks/actors/actors-timers-reminders.md
index 736547e91d6..8d174a8ecc3 100644
--- a/daprdocs/content/en/developing-applications/building-blocks/actors/actors-timers-reminders.md
+++ b/daprdocs/content/en/developing-applications/building-blocks/actors/actors-timers-reminders.md
@@ -187,6 +187,66 @@ To use protobuf serialization for actor reminders on self-hosted, use the follow
--max-api-level=20
```
+## Managing reminders with the CLI
+
+Actor reminders are persisted in the Scheduler.
+You can manage them with the dapr scheduler CLI commands.
+
+#### List actor reminders
+
+```bash
+dapr scheduler list --filter actor
+NAME BEGIN COUNT LAST TRIGGER
+actor/MyActorType/actorid1/test1 -3.89s 1 2025-10-03T16:58:55Z
+actor/MyActorType/actorid2/test2 -3.89s 1 2025-10-03T16:58:55Z
+```
+
+Get reminder details
+
+```bash
+dapr scheduler get actor/MyActorType/actorid1/test1 -o yaml
+```
+
+#### Delete reminders
+
+Delete a single reminder:
+
+```bash
+dapr scheduler delete actor/MyActorType/actorid1/test1
+```
+
+Delete all reminders for a given actor type:
+
+```bash
+dapr scheduler delete-all actor/MyActorType
+```
+
+Delete all reminders for a specific actor instance:
+
+```bash
+dapr scheduler delete-all actor/MyActorType/actorid1
+```
+
+#### Backup and restore reminders
+
+Export all reminders:
+
+```bash
+dapr scheduler export -o reminders-backup.bin
+```
+
+Restore from a backup file:
+
+```bash
+dapr scheduler import -f reminders-backup.bin
+```
+
+#### Summary
+
+- Reminders are stored in the Dapr Scheduler, not in the app.
+- Create reminders via the Actors API
+- Manage existing reminders (list, get, delete, backup/restore) using the `dapr scheduler` CLI.
+
## Next steps
{{< button text="Configure actor runtime behavior >>" page="actors-runtime-config.md" >}}
diff --git a/daprdocs/content/en/developing-applications/building-blocks/actors/namespaced-actors.md b/daprdocs/content/en/developing-applications/building-blocks/actors/namespaced-actors.md
index b51ffc79f2b..3584e9a7160 100644
--- a/daprdocs/content/en/developing-applications/building-blocks/actors/namespaced-actors.md
+++ b/daprdocs/content/en/developing-applications/building-blocks/actors/namespaced-actors.md
@@ -25,7 +25,7 @@ In self-hosted mode, you can specify the namespace for a Dapr instance by settin
{{% /tab %}}
{{% tab "Kubernetes" %}}
-On Kubernetes, you can create and configure namepaces when deploying actor applications. For example, start with the following `kubectl` commands:
+On Kubernetes, you can create and configure namespaces when deploying actor applications. For example, start with the following `kubectl` commands:
```bash
kubectl create namespace namespace-actorA
diff --git a/daprdocs/content/en/developing-applications/building-blocks/bindings/howto-bindings.md b/daprdocs/content/en/developing-applications/building-blocks/bindings/howto-bindings.md
index 7f77f75b506..d6d46cf8102 100644
--- a/daprdocs/content/en/developing-applications/building-blocks/bindings/howto-bindings.md
+++ b/daprdocs/content/en/developing-applications/building-blocks/bindings/howto-bindings.md
@@ -242,7 +242,7 @@ func main() {
{{% /tab %}}
-{{% tab "JavaScript%}}" %}}
+{{% tab "JavaScript" %}}
```javascript
//dependencies
diff --git a/daprdocs/content/en/developing-applications/building-blocks/conversation/conversation-overview.md b/daprdocs/content/en/developing-applications/building-blocks/conversation/conversation-overview.md
index 4f98d726a12..7483a41c296 100644
--- a/daprdocs/content/en/developing-applications/building-blocks/conversation/conversation-overview.md
+++ b/daprdocs/content/en/developing-applications/building-blocks/conversation/conversation-overview.md
@@ -14,11 +14,16 @@ Dapr's conversation API reduces the complexity of securely and reliably interact
-In additon to enabling critical performance and security functionality (like [prompt caching]({{% ref "#prompt-caching" %}}) and [PII scrubbing]({{% ref "#personally-identifiable-information-pii-obfuscation" %}})), you can also pair the conversation API with Dapr functionalities, like:
-- Resiliency circuit breakers and retries to circumvent limit and token errors, or
-- Middleware to authenticate requests coming to and from the LLM
+In addition to enabling critical performance and security functionality (like [prompt caching]({{% ref "#prompt-caching" %}}) and [PII scrubbing]({{% ref "#personally-identifiable-information-pii-obfuscation" %}})), the conversation API also provides:
-Dapr provides observability by issuing metrics for your LLM interactions.
+- **Tool calling capabilities** that allow LLMs to interact with external functions and APIs, enabling more sophisticated AI applications
+- **OpenAI-compatible interface** for seamless integration with existing AI workflows and tools
+
+You can also pair the conversation API with Dapr functionalities, like:
+
+- Resiliency policies including circuit breakers to handle repeated errors, timeouts to safeguards from slow responses, and retries for temporary network failures
+- Observability with metrics and distributed tracing using OpenTelemetry and Zipkin
+- Middleware to authenticate requests to and from the LLM
## Features
@@ -26,7 +31,7 @@ The following features are out-of-the-box for [all the supported conversation co
### Prompt caching
-Prompt caching optimizes performance by storing and reusing prompts that are often repeated across multiple API calls. To significantly reduce latency and cost, Dapr stores frequent prompts in a local cache to be reused by your cluster, pod, or other, instead of reprocessing the information for every new request.
+The Conversation API includes a built-in caching mechanism (enabled by the cacheTTL parameter) that optimizes both performance and cost by storing previous model responses for faster delivery to repetitive requests. This is particularly valuable in scenarios where similar prompt patterns occur frequently. When caching is enabled, Dapr creates a deterministic hash of the prompt text and all configuration parameters, checks if a valid cached response exists for this hash within the time period (for example, 10 minutes), and returns the cached response immediately if found. If no match exists, Dapr makes the API call and stores the result. This eliminates external API calls, lowers latency, and avoids provider charges for repeated requests. The cache exists entirely within your runtime environment, with each Dapr sidecar maintaining its own local cache.
### Personally identifiable information (PII) obfuscation
@@ -45,13 +50,24 @@ The PII scrubber obfuscates the following user information:
- SHA-256 hex
- MD5 hex
+### Tool calling support
+
+The conversation API supports advanced tool calling capabilities that allow LLMs to interact with external functions and APIs. This enables you to build sophisticated AI applications that can:
+
+- Execute custom functions based on user requests
+- Integrate with external services and databases
+- Provide dynamic, context-aware responses
+- Create multi-step workflows and automation
+
+Tool calling follows [OpenAI's function calling format](https://platform.openai.com/docs/guides/function-calling), making it easy to integrate with existing AI development workflows and tools.
+
## Demo
Watch the demo presented during [Diagrid's Dapr v1.15 celebration](https://www.diagrid.io/videos/dapr-1-15-deep-dive) to see how the conversation API works using the .NET SDK.
{{< youtube id=NTnwoDhHIcQ start=5444 >}}
-## Try out conversation
+## Try out conversation API
### Quickstarts and tutorials
diff --git a/daprdocs/content/en/developing-applications/building-blocks/conversation/howto-conversation-layer.md b/daprdocs/content/en/developing-applications/building-blocks/conversation/howto-conversation-layer.md
index 99b7803b8d6..e79af9f190e 100644
--- a/daprdocs/content/en/developing-applications/building-blocks/conversation/howto-conversation-layer.md
+++ b/daprdocs/content/en/developing-applications/building-blocks/conversation/howto-conversation-layer.md
@@ -56,7 +56,7 @@ spec:
## Connect the conversation client
-The following examples use an HTTP client to send a POST request to Dapr's sidecar HTTP endpoint. You can also use [the Dapr SDK client instead]({{% ref "#related-links" %}}).
+The following examples use the Dapr SDK client to interact with LLMs.
{{< tabpane text=true >}}
@@ -83,7 +83,7 @@ var response = await conversationClient.ConverseAsync("conversation",
DaprConversationRole.Generic)
});
-Console.WriteLine("Received the following from the LLM:");
+Console.WriteLine("conversation output: ");
foreach (var resp in response.Outputs)
{
Console.WriteLine($"\t{resp.Result}");
@@ -92,6 +92,77 @@ foreach (var resp in response.Outputs)
{{% /tab %}}
+
+{{% tab "Java" %}}
+
+```java
+//dependencies
+import io.dapr.client.DaprClientBuilder;
+import io.dapr.client.DaprPreviewClient;
+import io.dapr.client.domain.ConversationInput;
+import io.dapr.client.domain.ConversationRequest;
+import io.dapr.client.domain.ConversationResponse;
+import reactor.core.publisher.Mono;
+
+import java.util.List;
+
+public class Conversation {
+
+ public static void main(String[] args) {
+ String prompt = "Please write a witty haiku about the Dapr distributed programming framework at dapr.io";
+
+ try (DaprPreviewClient client = new DaprClientBuilder().buildPreviewClient()) {
+ System.out.println("Input: " + prompt);
+
+ ConversationInput daprConversationInput = new ConversationInput(prompt);
+
+ // Component name is the name provided in the metadata block of the conversation.yaml file.
+ Mono responseMono = client.converse(new ConversationRequest("echo",
+ List.of(daprConversationInput))
+ .setContextId("contextId")
+ .setScrubPii(true).setTemperature(1.1d));
+ ConversationResponse response = responseMono.block();
+ System.out.printf("conversation output: %s", response.getConversationOutputs().get(0).getResult());
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+}
+```
+
+{{% /tab %}}
+
+
+{{% tab "Python" %}}
+
+```python
+#dependencies
+from dapr.clients import DaprClient
+from dapr.clients.grpc._request import ConversationInput
+
+#code
+with DaprClient() as d:
+ inputs = [
+ ConversationInput(content="Please write a witty haiku about the Dapr distributed programming framework at dapr.io", role='user', scrub_pii=True),
+ ]
+
+ metadata = {
+ 'model': 'modelname',
+ 'key': 'authKey',
+ 'cacheTTL': '10m',
+ }
+
+ response = d.converse_alpha1(
+ name='echo', inputs=inputs, temperature=0.7, context_id='chat-123', metadata=metadata
+ )
+
+ for output in response.outputs:
+ print(f'conversation output: {output.result}')
+```
+
+{{% /tab %}}
+
+
{{% tab "Go" %}}
@@ -189,21 +260,40 @@ dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resour
{{% /tab %}}
-
-{{% tab "Go" %}}
+
+{{% tab "Java" %}}
```bash
-dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resources-path ./config -- go run ./main.go
+
+dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resources-path ./config -- mvn spring-boot:run
```
-**Expected output**
+{{% /tab %}}
+
+
+{{% tab "Python" %}}
+
+```bash
+
+dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resources-path ./config -- python3 app.py
```
- - '== APP == conversation output: Please write a witty haiku about the Dapr distributed programming framework at dapr.io'
+
+{{% /tab %}}
+
+
+
+{{% tab "Go" %}}
+
+```bash
+dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resources-path ./config -- go run ./main.go
```
+
{{% /tab %}}
+
+
{{% tab "Rust" %}}
@@ -211,17 +301,17 @@ dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resour
dapr run --app-id=conversation --resources-path ./config --dapr-grpc-port 3500 -- cargo run --example conversation
```
+{{% /tab %}}
+
+{{< /tabpane >}}
+
+
**Expected output**
```
- - 'conversation input: hello world'
- - 'conversation output: hello world'
+ - '== APP == conversation output: Please write a witty haiku about the Dapr distributed programming framework at dapr.io'
```
-{{% /tab %}}
-
-{{< /tabpane >}}
-
## Advanced features
The conversation API supports the following features:
@@ -230,9 +320,11 @@ The conversation API supports the following features:
1. **PII scrubbing:** Allows for the obfuscation of data going in and out of the LLM.
+1. **Tool calling:** Allows LLMs to interact with external functions and APIs.
+
To learn how to enable these features, see the [conversation API reference guide]({{% ref conversation_api %}}).
-## Related links
+## Conversation API examples in Dapr SDK repositories
Try out the conversation API using the full examples provided in the supported SDK repos.
@@ -246,7 +338,23 @@ Try out the conversation API using the full examples provided in the supported S
{{% /tab %}}
-
+
+
+{{% tab "Java" %}}
+
+[Dapr conversation example with the Java SDK](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/conversation)
+
+{{% /tab %}}
+
+
+
+{{% tab "Python" %}}
+
+[Dapr conversation example with the Python SDK](https://github.com/dapr/python-sdk/tree/main/examples/conversation)
+
+{{% /tab %}}
+
+
{{% tab "Go" %}}
[Dapr conversation example with the Go SDK](https://github.com/dapr/go-sdk/tree/main/examples/conversation)
@@ -264,6 +372,6 @@ Try out the conversation API using the full examples provided in the supported S
## Next steps
-
+- [Conversation quickstart]({{% ref conversation-quickstart %}})
- [Conversation API reference guide]({{% ref conversation_api %}})
- [Available conversation components]({{% ref supported-conversation %}})
diff --git a/daprdocs/content/en/developing-applications/building-blocks/jobs/howto-schedule-and-handle-triggered-jobs.md b/daprdocs/content/en/developing-applications/building-blocks/jobs/howto-schedule-and-handle-triggered-jobs.md
index 12e50dda952..2057d25ff9c 100644
--- a/daprdocs/content/en/developing-applications/building-blocks/jobs/howto-schedule-and-handle-triggered-jobs.md
+++ b/daprdocs/content/en/developing-applications/building-blocks/jobs/howto-schedule-and-handle-triggered-jobs.md
@@ -56,7 +56,7 @@ internal sealed record BackupJobData([property: JsonPropertyName("task")] string
internal sealed record BackupMetadata([property: JsonPropertyName("DBName")]string DatabaseName, [property: JsonPropertyName("BackupLocation")] string BackupLocation);
```
-Next, set up a handler as part of your application setup that will be called anytime a job is triggered on your
+Next, set up a handler as part of your application setup that will be called any time a job is triggered on your
application. It's the responsibility of this handler to identify how jobs should be processed based on the job name provided.
This works by registering a handler with ASP.NET Core at `/job/`, where `` is parameterized and
diff --git a/daprdocs/content/en/developing-applications/building-blocks/jobs/jobs-features-concepts.md b/daprdocs/content/en/developing-applications/building-blocks/jobs/jobs-features-concepts.md
index fbeb0f50a0a..dcd49a80aae 100644
--- a/daprdocs/content/en/developing-applications/building-blocks/jobs/jobs-features-concepts.md
+++ b/daprdocs/content/en/developing-applications/building-blocks/jobs/jobs-features-concepts.md
@@ -119,3 +119,60 @@ or the not-before time from which the schedule should take effect
The `DueTime` and `Ttl` fields will reflect an RC3339 timestamp value reflective of the time zone provided when the job was
originally scheduled. If no time zone was provided, these values indicate the time zone used by the server running
Dapr.
+
+### Managing jobs
+
+While jobs are created via API calls, you can manage (list, inspect, delete, back up, and restore) jobs is by using the dapr scheduler CLI commands.
+
+#### List jobs
+
+```bash
+dapr scheduler list --filter app
+NAME BEGIN COUNT LAST TRIGGER
+app/my-app/my-job -3.89s 1 2025-10-03T16:58:55Z
+app/my-app/another-job -3.89s 1 2025-10-03T16:58:55Z
+```
+
+```bash
+dapr scheduler list -o wide
+NAMESPACE NAME BEGIN EXPIRATION SCHEDULE DUE TIME TTL REPEATS COUNT LAST TRIGGER
+default app/my-app/my-job 2025-10-03T16:58:55Z @every 5s 2025-10-03T17:58:55+01:00 100 1 2025-10-03T16:58:55Z
+```
+
+```bash
+dapr scheduler get app/my-app/my-job -o yaml
+```
+
+#### Delete jobs
+
+Delete a specific job:
+
+```bash
+dapr scheduler delete app/my-app/my-job
+```
+
+Delete all jobs for an app:
+
+```bash
+dapr scheduler delete-all app/my-app
+```
+
+#### Backup and restore jobs
+
+Export all jobs:
+
+```bash
+dapr scheduler export -o jobs-backup.bin
+```
+
+Import them later:
+
+```bash
+dapr scheduler import -f jobs-backup.bin
+```
+
+#### Summary
+
+- Use the Jobs API to create or update jobs from applications.
+- Use the dapr scheduler CLI to view, inspect, back up, or delete jobs.
+- Jobs are stored in the Dapr Scheduler, ensuring reliability across restarts and deployments.
diff --git a/daprdocs/content/en/developing-applications/building-blocks/pubsub/howto-route-messages.md b/daprdocs/content/en/developing-applications/building-blocks/pubsub/howto-route-messages.md
index 578e3081a5e..d5645ed2566 100644
--- a/daprdocs/content/en/developing-applications/building-blocks/pubsub/howto-route-messages.md
+++ b/daprdocs/content/en/developing-applications/building-blocks/pubsub/howto-route-messages.md
@@ -271,7 +271,7 @@ Match deposits greater than $10,000:
event.type == "deposit" && int(event.data.amount) > 10000
```
{{% alert title="Note" color="primary" %}}
-By default the numeric values are written as double-precision floating-point. There are no automatic arithmetic conversions for numeric values. In this case, if `event.data.amount` is not cast as integer, the match is not performed. For more information, see the [CEL documentation](https://github.com/google/cel-spec/blob/master/doc/langdef).
+By default the numeric values are written as double-precision floating-point. There are no automatic arithmetic conversions for numeric values. In this case, if `event.data.amount` is not cast as integer, the match is not performed. For more information, see the [CEL documentation](https://github.com/google/cel-spec/blob/master/doc/langdef.md).
{{% /alert %}}
Match multiple versions of a message:
diff --git a/daprdocs/content/en/developing-applications/building-blocks/pubsub/pubsub-raw.md b/daprdocs/content/en/developing-applications/building-blocks/pubsub/pubsub-raw.md
index 1ac75613a6f..1cbfb72c259 100644
--- a/daprdocs/content/en/developing-applications/building-blocks/pubsub/pubsub-raw.md
+++ b/daprdocs/content/en/developing-applications/building-blocks/pubsub/pubsub-raw.md
@@ -101,6 +101,36 @@ $app->run(function(\DI\FactoryInterface $factory) {
{{% /tab %}}
+{{% tab "Java" %}}
+
+```java
+@RestController
+@PathMapping("/publish")
+public class PublishController {
+
+ @Inject
+ DaprClient client;
+
+ @PostMapping
+ public void sendRawMessage() {
+
+ Map metadata = new HashMap<>();
+ metatada.put("content-type", "application/json");
+ metadata.put("rawPayload", "true");
+
+ Message message = new Message(UUID.random().toString(), "Hello from Dapr");
+
+ client.publishEvent(
+ "pubsub", // pubsub name
+ "messages", // topic name
+ message, // message data
+ metadata) // metadata
+ .block(); // wait for completion
+ }
+}
+```
+{{% /tab %}}
+
{{< /tabpane >}}
## Subscribing to raw messages
@@ -216,6 +246,32 @@ $app->start();
```
{{% /tab %}}
+{{% tab "Java" %}}
+```java
+@RequestMapping("/consumer")
+@RestController
+public class MessageConsumerController {
+
+ @PostMapping
+ @ResponseStatus(HttpStatus.OK)
+ @Topic(pubsubName = "pubsub", name = "messages", metadata = "{\"rawPayload\":\"true\", \"content-type\": \"application/json\"}")
+ public void consume(@RequestBody Message message) {
+ System.out.println("Message received: " + message);
+ }
+
+ @PostMapping
+ @ResponseStatus(HttpStatus.OK)
+ @Topic(pubsubName = "pubsub", name = "another-topic", metadata = """
+ {"rawPayload": "true", "content-type": "application/json"}
+ """) // Using Java 15 text block
+ public void consumeAnother(@RequestBody Message message) {
+ System.out.println("Message received: " + message);
+ }
+}
+
+```
+{{% /tab %}}
+
{{< /tabpane >}}
## Declaratively subscribe to raw events
diff --git a/daprdocs/content/en/developing-applications/building-blocks/state-management/howto-outbox.md b/daprdocs/content/en/developing-applications/building-blocks/state-management/howto-outbox.md
index f99f44c3b88..01762e53c4e 100644
--- a/daprdocs/content/en/developing-applications/building-blocks/state-management/howto-outbox.md
+++ b/daprdocs/content/en/developing-applications/building-blocks/state-management/howto-outbox.md
@@ -16,23 +16,72 @@ For example, you can use the outbox pattern to:
With Dapr's outbox support, you can notify subscribers when an application's state is created or updated when calling Dapr's [transactions API]({{% ref "state_api.md#state-transactions" %}}).
-The diagram below is an overview of how the outbox feature works:
+The diagram below is an overview of how the outbox feature works at a high level:
1) Service A saves/updates state to the state store using a transaction.
2) A message is written to the broker under the same transaction. When the message is successfully delivered to the message broker, the transaction completes, ensuring the state and message are transacted together.
3) The message broker delivers the message topic to any subscribers - in this case, Service B.
-
+
+## How outbox works under the hood
+
+Dapr outbox processes requests in two flows: the user request flow and the background message flow. Together, they guarantee that state and events stay consistent.
+
+
+
+This is the sequence of interactions:
+
+1. An application calls the Dapr State Management API to write state transactionally using the transactional methods.
+ This is the entry point where business data, such as an order or profile update, is submitted for persistence.
+
+2. Dapr publishes an intent message with a unique transaction ID to an internal outbox topic.
+ This durable record ensures the event intent exists before any database commit happens.
+
+3. The state and a transaction marker are written atomically in the same state store.
+ Both the business data and the marker are committed in the same transaction, preventing partial writes.
+
+4. The application receives a success response after the transaction commits.
+ At this point, the application can continue, knowing state is saved and the event intent is guaranteed.
+
+5. A background subscriber reads the intent message.
+ When outbox is enabled, Dapr starts consumers that process the internal outbox topic.
+
+6. The subscriber verifies the transaction marker in the state store.
+ This check confirms that the database commit was successful before publishing externally.
+
+7. Verified business event is published to the external pub/sub topic.
+ The event is sent to the configured broker (Kafka, RabbitMQ, etc.) where other services can consume it.
+
+8. The marker is cleaned up (deleted) from the state store.
+ This prevents unbounded growth in the database once the event has been successfully delivered.
+
+9. Message is acknowledged and removed from internal topic
+ If publishing or cleanup fails, Dapr retries, ensuring reliable at-least-once delivery.
+
## Requirements
-The outbox feature can be used with using any [transactional state store]({{% ref supported-state-stores %}}) supported by Dapr. All [pub/sub brokers]({{% ref supported-pubsub %}}) are supported with the outbox feature.
+1. The outbox feature requires a [transactional state store]({{% ref supported-state-stores %}}) supported by Dapr.
+ [Learn more about the transactional methods you can use.]({{% ref "howto-get-save-state.md#perform-state-transactions" %}})
-[Learn more about the transactional methods you can use.]({{% ref "howto-get-save-state.md#perform-state-transactions" %}})
+2. Any [pub/sub broker]({{% ref supported-pubsub %}}) supported by Dapr can be used with the outbox feature.
-{{% alert title="Note" color="primary" %}}
-Message brokers that work with the competing consumer pattern (for example, [Apache Kafka]({{% ref setup-apache-kafka%}})) are encouraged to reduce the chances of duplicate events.
-{{% /alert %}}
+ {{% alert title="Note" color="primary" %}}
+ Message brokers that support the competing consumer pattern (for example, [Apache Kafka]({{% ref setup-apache-kafka%}})) are recommended to reduce the chance of duplicate events.
+ {{% /alert %}}
+
+3. Internal outbox topic
+ When outbox is enabled, Dapr creates an internal topic using the following naming convention: `{namespace}{appID}{topic}outbox`, where:
+
+ - `namespace`: the Dapr application namespace (if configured)
+ - `appID`: the Dapr application identifier
+ - `topic`: the value specified in the `outboxPublishTopic` metadata
+
+ This way each outbox topic is uniquely identified per application and external topic, preventing routing conflicts in multi-tenant environments.
+
+ {{% alert title="Note" color="primary" %}}
+ Ensure that the topic is created in advance, or Dapr has sufficient permissions to create the topic at startup time.
+ {{% /alert %}}
## Enable the outbox pattern
@@ -132,28 +181,20 @@ DAPR_STORE_NAME = "statestore"
async def main():
client = DaprClient()
- # Define the first state operation to save the value "2"
- op1 = StateItem(
- key="key1",
- value=b"2"
- )
-
- # Define the second state operation to publish the value "3" with metadata
- op2 = StateItem(
- key="key1",
- value=b"3",
- options=StateOptions(
- metadata={
- "outbox.projection": "true"
- }
- )
+ client.execute_state_transaction(
+ store_name=DAPR_STORE_NAME,
+ operations=[
+ # Define the first state operation to save the value "2"
+ TransactionalStateOperation(
+ key='key1', data='2', metadata={'outbox.projection': 'false'}
+ ),
+ # Define the second state operation to publish the value "3" with metadata
+ TransactionalStateOperation(
+ key='key1', data='3', metadata={'outbox.projection': 'true'}
+ ),
+ ],
)
- # Create the list of state operations
- ops = [op1, op2]
-
- # Execute the state transaction
- await client.state.transaction(DAPR_STORE_NAME, operations=ops)
print("State transaction executed.")
```
@@ -281,30 +322,45 @@ public class Main {
public static void main(String[] args) {
try (DaprClient client = new DaprClientBuilder().build()) {
// Define the first state operation to save the value "2"
- StateOperation op1 = new StateOperation<>(
- StateOperationType.UPSERT,
+ State state1 = new State<>(
"key1",
- "2"
+ "2",
+ null, // etag
+ null // concurrency and consistency options
);
// Define the second state operation to publish the value "3" with metadata
Map metadata = new HashMap<>();
metadata.put("outbox.projection", "true");
- StateOperation op2 = new StateOperation<>(
- StateOperationType.UPSERT,
+ State state2 = new State<>(
"key1",
"3",
- metadata
+ null, // etag
+ metadata,
+ null // concurrency and consistency options
+ );
+
+ TransactionalStateOperation op1 = new TransactionalStateOperation<>(
+ TransactionalStateOperation.OperationType.UPSERT, state1
+ );
+
+ TransactionalStateOperation op2 = new TransactionalStateOperation<>(
+ TransactionalStateOperation.OperationType.UPSERT, state2
);
- // Create the list of state operations
- List> ops = new ArrayList<>();
+ // Create the list of transaction state operations
+ List> ops = new ArrayList<>();
ops.add(op1);
ops.add(op2);
+ // Configure transaction request setting the state store
+ ExecuteStateTransactionRequest transactionRequest = new ExecuteStateTransactionRequest(DAPR_STORE_NAME);
+
+ transactionRequest.setOperations(ops);
+
// Execute the state transaction
- client.executeStateTransaction(DAPR_STORE_NAME, ops).block();
+ client.executeStateTransaction(transactionRequest).block();
System.out.println("State transaction executed.");
} catch (Exception e) {
e.printStackTrace();
@@ -554,39 +610,42 @@ public class StateOperationExample {
executeStateTransaction();
}
- public static void executeStateTransaction() {
- // Build Dapr client
- try (DaprClient daprClient = new DaprClientBuilder().build()) {
-
- // Define the value "2"
- String value = "2";
-
- // Override CloudEvent metadata
- Map metadata = new HashMap<>();
- metadata.put("cloudevent.id", "unique-business-process-id");
- metadata.put("cloudevent.source", "CustomersApp");
- metadata.put("cloudevent.type", "CustomerCreated");
- metadata.put("cloudevent.subject", "123");
- metadata.put("my-custom-ce-field", "abc");
-
- // Define state operations
- List> ops = new ArrayList<>();
- StateOperation op1 = new StateOperation<>(
- StateOperationType.UPSERT,
- "key1",
- value,
- metadata
- );
- ops.add(op1);
-
- // Execute state transaction
- String storeName = "your-state-store-name";
- daprClient.executeStateTransaction(storeName, ops).block();
- System.out.println("State transaction executed.");
- } catch (Exception e) {
- e.printStackTrace();
- }
+ public static void executeStateTransaction() {
+ // Build Dapr client
+ try (DaprClient daprClient = new DaprClientBuilder().build()) {
+
+ // Override CloudEvent metadata
+ Map metadata = new HashMap<>();
+ metadata.put("cloudevent.id", "unique-business-process-id");
+ metadata.put("cloudevent.source", "CustomersApp");
+ metadata.put("cloudevent.type", "CustomerCreated");
+ metadata.put("cloudevent.subject", "123");
+ metadata.put("my-custom-ce-field", "abc");
+
+ State state = new State<>(
+ "key1", // Define the key "key1"
+ "value1", // Define the value "value1"
+ null, // etag
+ metadata,
+ null // concurrency and consistency options
+ );
+
+ // Define state operations
+ List> ops = new ArrayList<>();
+ TransactionalStateOperation op1 = new TransactionalStateOperation<>(
+ TransactionalStateOperation.OperationType.UPSERT,
+ state
+ );
+ ops.add(op1);
+
+ // Execute state transaction
+ String storeName = "your-state-store-name";
+ daprClient.executeStateTransaction(storeName, ops).block();
+ System.out.println("State transaction executed.");
+ } catch (Exception e) {
+ e.printStackTrace();
}
+ }
}
```
{{% /tab %}}
@@ -682,3 +741,7 @@ The `data` CloudEvent field is reserved for Dapr's use only, and is non-customiz
Watch [this video for an overview of the outbox pattern](https://youtu.be/rTovKpG0rhY?t=1338):
{{< youtube id=rTovKpG0rhY start=1338 >}}
+
+## Next Steps
+
+[How Dapr Outbox Eliminates Dual Writes in Distributed Applications](https://www.diagrid.io/blog/how-dapr-outbox-eliminates-dual-writes-in-distributed-applications)
diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md
index ddb92b63ac3..27da1a22be0 100644
--- a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md
+++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md
@@ -197,10 +197,12 @@ public class DemoWorkflowActivity implements WorkflowActivity {
+### Define workflow activities
+
Define each workflow activity you'd like your workflow to perform. The Activity input can be unmarshalled from the context with `ctx.GetInput`. Activities should be defined as taking a `ctx workflow.ActivityContext` parameter and returning an interface and error.
```go
-func TestActivity(ctx workflow.ActivityContext) (any, error) {
+func BusinessActivity(ctx workflow.ActivityContext) (any, error) {
var input int
if err := ctx.GetInput(&input); err != nil {
return "", err
@@ -211,6 +213,87 @@ func TestActivity(ctx workflow.ActivityContext) (any, error) {
}
```
+### Define the workflow
+
+Define your workflow function with the parameter `ctx *workflow.WorkflowContext` and return any and error. Invoke your defined activities from within your workflow.
+
+```go
+func BusinessWorkflow(ctx *workflow.WorkflowContext) (any, error) {
+ var input int
+ if err := ctx.GetInput(&input); err != nil {
+ return nil, err
+ }
+ var output string
+ if err := ctx.CallActivity(BusinessActivity, workflow.ActivityInput(input)).Await(&output); err != nil {
+ return nil, err
+ }
+ if err := ctx.WaitForExternalEvent("businessEvent", time.Minute*60).Await(&output); err != nil {
+ return nil, err
+ }
+
+ if err := ctx.CreateTimer(time.Second).Await(nil); err != nil {
+ return nil, nil
+ }
+ return output, nil
+}
+```
+
+### Register workflows and activities
+
+Before your application can execute workflows, you must register both the workflow orchestrator and its activities with a workflow registry. This ensures Dapr knows which functions to call when executing your workflow.
+
+```go
+func main() {
+ // Create a workflow registry
+ r := workflow.NewRegistry()
+
+ // Register the workflow orchestrator
+ if err := r.AddWorkflow(BusinessWorkflow); err != nil {
+ log.Fatal(err)
+ }
+ fmt.Println("BusinessWorkflow registered")
+
+ // Register the workflow activities
+ if err := r.AddActivity(BusinessActivity); err != nil {
+ log.Fatal(err)
+ }
+ fmt.Println("BusinessActivity registered")
+
+ // Create workflow client and start worker
+ wclient, err := client.NewWorkflowClient()
+ if err != nil {
+ log.Fatal(err)
+ }
+ fmt.Println("Worker initialized")
+
+ ctx, cancel := context.WithCancel(context.Background())
+ if err = wclient.StartWorker(ctx, r); err != nil {
+ log.Fatal(err)
+ }
+ fmt.Println("runner started")
+
+ // Your application logic continues here...
+ // Example: Start a workflow
+ instanceID, err := wclient.ScheduleWorkflow(ctx, "BusinessWorkflow", workflow.WithInput(1))
+ if err != nil {
+ log.Fatalf("failed to start workflow: %v", err)
+ }
+ fmt.Printf("workflow started with id: %v\n", instanceID)
+
+ // Stop workflow worker when done
+ cancel()
+ fmt.Println("workflow worker successfully shutdown")
+}
+```
+
+**Key points about registration:**
+- Use `workflow.NewRegistry()` to create a workflow registry
+- Use `r.AddWorkflow()` to register workflow functions
+- Use `r.AddActivity()` to register activity functions
+- Use `client.NewWorkflowClient()` to create a workflow client
+- Call `wclient.StartWorker()` to begin processing workflows
+- Use `wclient.ScheduleWorkflow` to schedule a named instance of a workflow
+
[See the Go SDK workflow activity example in context.](https://github.com/dapr/go-sdk/tree/main/examples/workflow/README.md)
{{% /tab %}}
@@ -281,9 +364,9 @@ export default class WorkflowRuntime {
// Register workflow activities
public registerActivity(fn: TWorkflowActivity): WorkflowRuntime {
const name = getFunctionName(fn);
- const activityWrapper = (ctx: ActivityContext, intput: TInput): TOutput => {
+ const activityWrapper = (ctx: ActivityContext, input: TInput): TOutput => {
const wfActivityContext = new WorkflowActivityContext(ctx);
- return fn(wfActivityContext, intput);
+ return fn(wfActivityContext, input);
};
this.worker.addNamedActivity(name, activityWrapper);
return this;
@@ -383,16 +466,16 @@ public class DemoWorkflowWorker {
Define your workflow function with the parameter `ctx *workflow.WorkflowContext` and return any and error. Invoke your defined activities from within your workflow.
```go
-func TestWorkflow(ctx *workflow.WorkflowContext) (any, error) {
+func BusinessWorkflow(ctx *workflow.WorkflowContext) (any, error) {
var input int
if err := ctx.GetInput(&input); err != nil {
return nil, err
}
var output string
- if err := ctx.CallActivity(TestActivity, workflow.ActivityInput(input)).Await(&output); err != nil {
+ if err := ctx.CallActivity(BusinessActivity, workflow.ActivityInput(input)).Await(&output); err != nil {
return nil, err
}
- if err := ctx.WaitForExternalEvent("testEvent", time.Second*60).Await(&output); err != nil {
+ if err := ctx.WaitForExternalEvent("businessEvent", time.Minute*60).Await(&output); err != nil {
return nil, err
}
@@ -583,6 +666,7 @@ def main():
if non_existent_id_error in err._message:
print('Instance Successfully Purged')
+ sleep(10000)
wfr.shutdown()
@@ -602,139 +686,79 @@ if __name__ == '__main__':
- `WorkflowRuntime`: Allows you to register workflows and workflow activities
- `DaprWorkflowContext`: Allows you to [create workflows]({{% ref "#write-the-workflow" %}})
- `WorkflowActivityContext`: Allows you to [create workflow activities]({{% ref "#write-the-workflow-activities" %}})
-- API calls. In the example below, these calls start, terminate, get status, pause, resume, raise event, and purge the workflow.
-
-```javascript
-import { TaskHubGrpcClient } from "@microsoft/durabletask-js";
-import { WorkflowState } from "./WorkflowState";
-import { generateApiTokenClientInterceptors, generateEndpoint, getDaprApiToken } from "../internal/index";
-import { TWorkflow } from "../../types/workflow/Workflow.type";
-import { getFunctionName } from "../internal";
-import { WorkflowClientOptions } from "../../types/workflow/WorkflowClientOption";
-
-/** DaprWorkflowClient class defines client operations for managing workflow instances. */
-
-export default class DaprWorkflowClient {
- private readonly _innerClient: TaskHubGrpcClient;
-
- /** Initialize a new instance of the DaprWorkflowClient.
- */
- constructor(options: Partial = {}) {
- const grpcEndpoint = generateEndpoint(options);
- options.daprApiToken = getDaprApiToken(options);
- this._innerClient = this.buildInnerClient(grpcEndpoint.endpoint, options);
- }
+- API calls. The following example is a simple project consuming the workflow APIs:
- private buildInnerClient(hostAddress: string, options: Partial): TaskHubGrpcClient {
- let innerOptions = options?.grpcOptions;
- if (options.daprApiToken !== undefined && options.daprApiToken !== "") {
- innerOptions = {
- ...innerOptions,
- interceptors: [generateApiTokenClientInterceptors(options), ...(innerOptions?.interceptors ?? [])],
- };
- }
- return new TaskHubGrpcClient(hostAddress, innerOptions);
- }
+```bash
+mkdir my-wf && cd my-wf
+npm init -y
+npm i @dapr/dapr @microsoft/durabletask-js
+npm i -D typescript ts-node @types/node
+```
- /**
- * Schedule a new workflow using the DurableTask client.
- */
- public async scheduleNewWorkflow(
- workflow: TWorkflow | string,
- input?: any,
- instanceId?: string,
- startAt?: Date,
- ): Promise {
- if (typeof workflow === "string") {
- return await this._innerClient.scheduleNewOrchestration(workflow, input, instanceId, startAt);
- }
- return await this._innerClient.scheduleNewOrchestration(getFunctionName(workflow), input, instanceId, startAt);
- }
+Create the following `tsconfig.json` file:
- /**
- * Terminate the workflow associated with the provided instance id.
- *
- * @param {string} workflowInstanceId - Workflow instance id to terminate.
- * @param {any} output - The optional output to set for the terminated workflow instance.
- */
- public async terminateWorkflow(workflowInstanceId: string, output: any) {
- await this._innerClient.terminateOrchestration(workflowInstanceId, output);
- }
+```json
+{
+ "compilerOptions": {
+ "target": "ES2020",
+ "module": "CommonJS",
+ "moduleResolution": "Node",
+ "strict": true,
+ "esModuleInterop": true,
+ "skipLibCheck": true,
+ "outDir": "dist"
+ },
+ "include": ["src"]
+}
+```
- /**
- * Fetch workflow instance metadata from the configured durable store.
- */
- public async getWorkflowState(
- workflowInstanceId: string,
- getInputsAndOutputs: boolean,
- ): Promise {
- const state = await this._innerClient.getOrchestrationState(workflowInstanceId, getInputsAndOutputs);
- if (state !== undefined) {
- return new WorkflowState(state);
- }
- }
+Create the following `src/app.ts` file:
- /**
- * Waits for a workflow to start running
- */
- public async waitForWorkflowStart(
- workflowInstanceId: string,
- fetchPayloads = true,
- timeoutInSeconds = 60,
- ): Promise {
- const state = await this._innerClient.waitForOrchestrationStart(
- workflowInstanceId,
- fetchPayloads,
- timeoutInSeconds,
- );
- if (state !== undefined) {
- return new WorkflowState(state);
- }
- }
+```typescript
+import {
+ WorkflowRuntime,
+ WorkflowActivityContext,
+ WorkflowContext,
+ DaprWorkflowClient,
+ TWorkflow
+} from "@dapr/dapr";
- /**
- * Waits for a workflow to complete running
- */
- public async waitForWorkflowCompletion(
- workflowInstanceId: string,
- fetchPayloads = true,
- timeoutInSeconds = 60,
- ): Promise {
- const state = await this._innerClient.waitForOrchestrationCompletion(
- workflowInstanceId,
- fetchPayloads,
- timeoutInSeconds,
- );
- if (state != undefined) {
- return new WorkflowState(state);
- }
- }
+const workflowClient = new DaprWorkflowClient();
+const workflowRuntime = new WorkflowRuntime();
- /**
- * Sends an event notification message to an awaiting workflow instance
- */
- public async raiseEvent(workflowInstanceId: string, eventName: string, eventPayload?: any) {
- this._innerClient.raiseOrchestrationEvent(workflowInstanceId, eventName, eventPayload);
- }
+// simple activity
+const hello = async (_: WorkflowActivityContext, name: string) => `Hello ${name}!`;
- /**
- * Purges the workflow instance state from the workflow state store.
- */
- public async purgeWorkflow(workflowInstanceId: string): Promise {
- const purgeResult = await this._innerClient.purgeOrchestration(workflowInstanceId);
- if (purgeResult !== undefined) {
- return purgeResult.deletedInstanceCount > 0;
- }
- return false;
- }
+// simple workflow: call the activity 3 times
+const sequence: TWorkflow = async function* (ctx: WorkflowContext): any {
+ const out: string[] = [];
+ out.push(yield ctx.callActivity(hello, "Tokyo"));
+ out.push(yield ctx.callActivity(hello, "Seattle"));
+ out.push(yield ctx.callActivity(hello, "London"));
+ out.push(yield ctx.waitForExternalEvent("continue"));
+ return out;
+};
+
+async function main() {
+ workflowRuntime.registerWorkflow(sequence).registerActivity(hello);
+ await workflowRuntime.start();
+
+ const id = await workflowClient.scheduleNewWorkflow(sequence);
+ console.log("Scheduled:", id);
+
+ workflowClient.raiseEvent(id, "continue", "Go go go!");
+
+ const state = await workflowClient.waitForWorkflowCompletion(id, undefined, 30);
+ console.log("Done:", state?.runtimeStatus, "output:", state?.serializedOutput);
+
+ await new Promise(f => setTimeout(f, 100000));
+
+ await workflowRuntime.stop();
+ await workflowClient.stop();
- /**
- * Closes the inner DurableTask client and shutdown the GRPC channel.
- */
- public async stop() {
- await this._innerClient.stop();
- }
}
+
+main().catch((e) => { console.error(e); });
```
{{% /tab %}}
@@ -864,7 +888,7 @@ public class DemoWorkflow extends Workflow {
[As in the following example](https://github.com/dapr/go-sdk/tree/main/examples/workflow/README.md), a hello-world application using the Go SDK and Dapr Workflow would include:
- A Go package called `client` to receive the Go SDK client capabilities.
-- The `TestWorkflow` method
+- The `BusinessWorkflow` method
- Creating the workflow with input and output.
- API calls. In the example below, these calls start and call the workflow activities.
@@ -874,86 +898,98 @@ package main
import (
"context"
+ "errors"
"fmt"
"log"
+ "strconv"
"time"
- "github.com/dapr/durabletask-go/api"
- "github.com/dapr/durabletask-go/backend"
- "github.com/dapr/durabletask-go/client"
- "github.com/dapr/durabletask-go/task"
- dapr "github.com/dapr/go-sdk/client"
+ "github.com/dapr/durabletask-go/workflow"
+ "github.com/dapr/go-sdk/client"
)
var stage = 0
-
-const (
- workflowComponent = "dapr"
-)
+var failActivityTries = 0
func main() {
- registry := task.NewTaskRegistry()
+ r := workflow.NewRegistry()
- if err := registry.AddOrchestrator(TestWorkflow); err != nil {
+ if err := r.AddWorkflow(BusinessWorkflow); err != nil {
log.Fatal(err)
}
- fmt.Println("TestWorkflow registered")
+ fmt.Println("BusinessWorkflow registered")
- if err := registry.AddActivity(TestActivity); err != nil {
+ if err := r.AddActivity(BusinessActivity); err != nil {
log.Fatal(err)
}
- fmt.Println("TestActivity registered")
+ fmt.Println("BusinessActivity registered")
- daprClient, err := dapr.NewClient()
- if err != nil {
- log.Fatalf("failed to create Dapr client: %v", err)
+ if err := r.AddActivity(FailActivity); err != nil {
+ log.Fatal(err)
}
+ fmt.Println("FailActivity registered")
- client := client.NewTaskHubGrpcClient(daprClient.GrpcClientConn(), backend.DefaultLogger())
- if err := client.StartWorkItemListener(context.TODO(), registry); err != nil {
- log.Fatalf("failed to start work item listener: %v", err)
+ wclient, err := client.NewWorkflowClient()
+ if err != nil {
+ log.Fatal(err)
}
+ fmt.Println("Worker initialized")
+ ctx, cancel := context.WithCancel(context.Background())
+ if err = wclient.StartWorker(ctx, r); err != nil {
+ log.Fatal(err)
+ }
fmt.Println("runner started")
- ctx := context.Background()
-
// Start workflow test
- id, err := client.ScheduleNewOrchestration(ctx, "TestWorkflow", api.WithInput(1))
+ // Set the start time to the current time to not wait for the workflow to
+ // "start". This is useful for increasing the throughput of creating
+ // workflows.
+ // workflow.WithStartTime(time.Now())
+ instanceID, err := wclient.ScheduleWorkflow(ctx, "BusinessWorkflow", workflow.WithInstanceID("a7a4168d-3a1c-41da-8a4f-e7f6d9c718d9"), workflow.WithInput("1"))
if err != nil {
log.Fatalf("failed to start workflow: %v", err)
}
- fmt.Printf("workflow started with id: %v\n", id)
+ fmt.Printf("workflow started with id: %v\n", instanceID)
// Pause workflow test
- err = client.PurgeOrchestrationState(ctx, id)
+ err = wclient.SuspendWorkflow(ctx, instanceID, "")
if err != nil {
log.Fatalf("failed to pause workflow: %v", err)
}
- respGet, err := client.FetchOrchestrationMetadata(ctx, id)
+ respFetch, err := wclient.FetchWorkflowMetadata(ctx, instanceID, workflow.WithFetchPayloads(true))
if err != nil {
- log.Fatalf("failed to get workflow: %v", err)
+ log.Fatalf("failed to fetch workflow: %v", err)
+ }
+
+ if respFetch.RuntimeStatus != workflow.StatusSuspended {
+ log.Fatalf("workflow not paused: %s: %v", respFetch.RuntimeStatus, respFetch)
}
- fmt.Printf("workflow paused: %s\n", respGet.RuntimeStatus)
+
+ fmt.Printf("workflow paused\n")
// Resume workflow test
- err = client.ResumeOrchestration(ctx, id, "")
+ err = wclient.ResumeWorkflow(ctx, instanceID, "")
if err != nil {
log.Fatalf("failed to resume workflow: %v", err)
}
- fmt.Printf("workflow running: %s\n", respGet.RuntimeStatus)
- respGet, err = client.FetchOrchestrationMetadata(ctx, id)
+ respFetch, err = wclient.FetchWorkflowMetadata(ctx, instanceID, workflow.WithFetchPayloads(true))
if err != nil {
log.Fatalf("failed to get workflow: %v", err)
}
- fmt.Printf("workflow resumed: %s\n", respGet.RuntimeStatus)
+
+ if respFetch.RuntimeStatus != workflow.StatusRunning {
+ log.Fatalf("workflow not running")
+ }
+
+ fmt.Println("workflow resumed")
fmt.Printf("stage: %d\n", stage)
- // Raise Event Test
- err = client.RaiseEvent(ctx, id, "testEvent", api.WithEventPayload("testData"))
+ // Raise Event
+ err = wclient.RaiseEvent(ctx, instanceID, "businessEvent", workflow.WithEventPayload("testData"))
if err != nil {
fmt.Printf("failed to raise event: %v", err)
}
@@ -964,53 +1000,117 @@ func main() {
fmt.Printf("stage: %d\n", stage)
- respGet, err = client.FetchOrchestrationMetadata(ctx, id)
+ _, err = wclient.WaitForWorkflowCompletion(ctx, instanceID)
+ if err != nil {
+ log.Fatalf("failed to wait for workflow: %v", err)
+ }
+
+ fmt.Printf("fail activity executions: %d\n", failActivityTries)
+
+ respFetch, err = wclient.FetchWorkflowMetadata(ctx, instanceID, workflow.WithFetchPayloads(true))
if err != nil {
log.Fatalf("failed to get workflow: %v", err)
}
- fmt.Printf("workflow status: %v\n", respGet.RuntimeStatus)
+ fmt.Printf("workflow status: %v\n", respFetch.String())
// Purge workflow test
- err = client.PurgeOrchestrationState(ctx, id)
+ err = wclient.PurgeWorkflowState(ctx, instanceID)
+ if err != nil {
+ log.Fatalf("failed to purge workflow: %v", err)
+ }
+
+ respFetch, err = wclient.FetchWorkflowMetadata(ctx, instanceID, workflow.WithFetchPayloads(true))
+ if err == nil || respFetch != nil {
+ log.Fatalf("failed to purge workflow: %v", err)
+ }
+
+ fmt.Println("workflow purged")
+
+ fmt.Printf("stage: %d\n", stage)
+
+ // Terminate workflow test
+ id, err := wclient.ScheduleWorkflow(ctx, "BusinessWorkflow", workflow.WithInstanceID("a7a4168d-3a1c-41da-8a4f-e7f6d9c718d9"), workflow.WithInput("1"))
+ if err != nil {
+ log.Fatalf("failed to start workflow: %v", err)
+ }
+ fmt.Printf("workflow started with id: %v\n", instanceID)
+
+ metadata, err := wclient.WaitForWorkflowStart(ctx, id)
+ if err != nil {
+ log.Fatalf("failed to get workflow: %v", err)
+ }
+ fmt.Printf("workflow status: %s\n", metadata.String())
+
+ err = wclient.TerminateWorkflow(ctx, id)
+ if err != nil {
+ log.Fatalf("failed to terminate workflow: %v", err)
+ }
+ fmt.Println("workflow terminated")
+
+ err = wclient.PurgeWorkflowState(ctx, id)
if err != nil {
log.Fatalf("failed to purge workflow: %v", err)
}
fmt.Println("workflow purged")
+
+ <-ctx.Done()
+ cancel()
+
+ fmt.Println("workflow worker successfully shutdown")
}
-func TestWorkflow(ctx *task.OrchestrationContext) (any, error) {
- var input int
+func BusinessWorkflow(ctx *workflow.WorkflowContext) (any, error) {
+ var input string
if err := ctx.GetInput(&input); err != nil {
return nil, err
}
var output string
- if err := ctx.CallActivity(TestActivity, task.WithActivityInput(input)).Await(&output); err != nil {
+ if err := ctx.CallActivity(BusinessActivity, workflow.WithActivityInput(input)).Await(&output); err != nil {
return nil, err
}
- err := ctx.WaitForSingleEvent("testEvent", time.Second*60).Await(&output)
+ err := ctx.WaitForExternalEvent("businessEvent", time.Minute*60).Await(&output)
if err != nil {
return nil, err
}
- if err := ctx.CallActivity(TestActivity, task.WithActivityInput(input)).Await(&output); err != nil {
+ if err := ctx.CallActivity(BusinessActivity, workflow.WithActivityInput(input)).Await(&output); err != nil {
return nil, err
}
+ if err := ctx.CallActivity(FailActivity, workflow.WithActivityRetryPolicy(&workflow.RetryPolicy{
+ MaxAttempts: 3,
+ InitialRetryInterval: 100 * time.Millisecond,
+ BackoffCoefficient: 2,
+ MaxRetryInterval: 1 * time.Second,
+ })).Await(nil); err == nil {
+ return nil, fmt.Errorf("unexpected no error executing fail activity")
+ }
+
return output, nil
}
-func TestActivity(ctx task.ActivityContext) (any, error) {
- var input int
+func BusinessActivity(ctx workflow.ActivityContext) (any, error) {
+ var input string
if err := ctx.GetInput(&input); err != nil {
return "", err
}
- stage += input
+ iinput, err := strconv.Atoi(input)
+ if err != nil {
+ return "", err
+ }
+
+ stage += iinput
return fmt.Sprintf("Stage: %d", stage), nil
}
+
+func FailActivity(ctx workflow.ActivityContext) (any, error) {
+ failActivityTries += 1
+ return nil, errors.New("dummy activity error")
+}
```
[See the full Go SDK workflow example in context.](https://github.com/dapr/go-sdk/tree/main/examples/workflow/README.md)
@@ -1025,6 +1125,272 @@ Because of how replay-based workflows execute, you'll write logic that does thin
{{% /alert %}}
+## Run the workflow & inspect the workflow execution with the Diagrid Dashboard
+
+Start the workflow application via your IDE or the Dapr CLI ([Dapr multi-app run]({{% ref multi-app-overview.md %}}) if you want to start multiple applications, or regular [Dapr run command](#testing-the-workflow-via-the-dapr-cli) for one application, and schedule a new workflow instance.
+
+Use the local [Diagrid Dashboard](https://diagrid.ws/diagrid-dashboard-docs) to visualize and inspect your workflow state, and drill down to see detailed workflow execution history. The dashboard runs as a container and is connected to the state store that is used by Dapr workflows (by default a local Redis instance).
+
+
+
+Start the Diagrid Dashboard container using Docker:
+
+```bash
+docker run -p 8080:8080 ghcr.io/diagridio/diagrid-dashboard:latest
+```
+
+{{% alert title="Note" color="primary" %}}
+If you're using another state store than the default Redis instance, you need to provide some additional arguments to run the container, see the [Diagrid Dashboard reference docs](https://diagrid.ws/diagrid-dashboard-docs).
+{{% /alert %}}
+
+
+Open the dashboard in a browser at [http://localhost:8080](http://localhost:8080).
+
+
+## Testing the workflow via the Dapr CLI
+
+After authoring the workflow, you can test it using the Dapr CLI:
+
+{{< tabpane text=true >}}
+
+{{% tab "Python" %}}
+
+#### Run the workflow application
+
+```bash
+dapr run --app-id workflow-app python3 app.py
+```
+Make sure the application is running:
+
+```bash
+dapr list
+```
+
+#### Run the workflow
+```bash
+dapr workflow run hello_world_wf --app-id workflow-app --input 'hello world' --instance-id test-run
+```
+
+#### Check the workflow status
+```bash
+dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 -o wide
+```
+
+#### Check completed workflows
+```bash
+dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 --filter-status COMPLETED -o wide
+```
+
+#### View workflow history
+```bash
+dapr workflow history --app-id workflow-app test-run
+```
+
+{{% /tab %}}
+
+{{% tab "Javascript" %}}
+
+#### Run the workflow application
+
+```bash
+dapr run --app-id workflow-app npx ts-node src/app.ts
+```
+Make sure the application is running:
+
+```bash
+dapr list
+```
+
+#### Run the workflow
+```bash
+dapr workflow run sequence --app-id workflow-app --input 'hello world' --instance-id test-run
+```
+
+#### Check the workflow status
+```bash
+dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 -o wide
+```
+
+#### Raise the waiting external event
+```bash
+dapr workflow raise-event --app-id workflow-app test-run/businessEvent
+```
+
+#### Check completed workflows
+```bash
+dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 --filter-status COMPLETED -o wide
+```
+
+#### View workflow history
+```bash
+dapr workflow history --app-id workflow-app test-run
+```
+
+{{% /tab %}}
+
+{{% tab ".NET" %}}
+
+#### Run the workflow application
+
+```bash
+dapr run --app-id workflow-app dotnet run
+```
+Make sure the application is running:
+
+```bash
+dapr list
+```
+
+#### Run the workflow
+```bash
+dapr workflow run OrderProcessingWorkflow --app-id workflow-app --instance-id test-run --input '{"name": "Paperclips", "totalCost": 99.95}'
+```
+
+#### Check the workflow status
+```bash
+dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 -o wide
+```
+
+#### Raise the waiting external event
+```bash
+dapr workflow raise-event --app-id workflow-app test-run/incoming-purchase-order --input '{"name": "Paperclips", "totalCost": 99.95}'
+```
+
+#### Check completed workflows
+```bash
+dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 --filter-status COMPLETED -o wide
+```
+
+#### View workflow history
+```bash
+dapr workflow history --app-id workflow-app test-run
+```
+
+{{% /tab %}}
+
+{{% tab "Java" %}}
+
+#### Run the workflow application
+
+```bash
+dapr run --app-id workflow-app -- java -jar target/WorkflowService-0.0.1-SNAPSHOT.jar
+```
+
+Make sure the application is running:
+
+```bash
+dapr list
+```
+
+#### Run the workflow
+```bash
+dapr workflow run DemoWorkflow --app-id workflow-app --instance-id test-run --input "input data"
+```
+
+#### Check the workflow status
+```bash
+dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 -o wide
+```
+
+#### Raise the waiting external event
+```bash
+dapr workflow raise-event --app-id workflow-app test-run/TestEvent --input 'TestEventPayload'
+dapr workflow raise-event --app-id workflow-app test-run/event1 --input 'TestEvent 1 Payload'
+dapr workflow raise-event --app-id workflow-app test-run/event2 --input 'TestEvent 2 Payload'
+dapr workflow raise-event --app-id workflow-app test-run/event3 --input 'TestEvent 3 Payload'
+```
+
+#### Check completed workflows
+```bash
+dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 --filter-status COMPLETED -o wide
+```
+
+#### View workflow history
+```bash
+dapr workflow history --app-id workflow-app test-run
+```
+
+{{% /tab %}}
+
+{{% tab "Go" %}}
+
+#### Run the workflow application
+```bash
+dapr run --app-id workflow-app go run main.go
+```
+
+Make sure the application is running:
+
+```bash
+dapr list
+```
+
+#### Run the workflow
+```bash
+dapr workflow run BusinessWorkflow --app-id workflow-app --input '1' --instance-id test-run
+```
+
+#### Check the workflow status
+```bash
+dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 -o wide
+```
+
+#### Raise the waiting external event
+```bash
+dapr workflow raise-event --app-id workflow-app test-run/businessEvent
+```
+
+#### Check completed workflows
+```bash
+dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 --filter-status COMPLETED -o wide
+```
+
+#### View workflow history
+```bash
+dapr workflow history test-run --app-id workflow-app
+```
+
+{{% /tab %}}
+
+{{< /tabpane >}}
+
+
+### Monitor Workflow Execution
+
+```bash
+dapr workflow list --app-id workflow-app --filter-status RUNNING -o wide
+```
+
+```bash
+dapr workflow list --app-id workflow-app --filter-status FAILED -o wide
+```
+
+```bash
+dapr workflow list --app-id workflow-app --filter-status COMPLETED -o wide
+```
+
+### Test External Events
+
+```bash
+# Raise an event your workflow is waiting for
+dapr workflow raise-event /ApprovalReceived \
+ --app-id workflow-app \
+ --input '{"approved": true, "approver": "manager@company.com"}'
+```
+
+### Debug Failed Workflows
+
+```bash
+# List failed workflows
+dapr workflow list --app-id workflow-app --filter-status FAILED --output wide
+
+# Get detailed history of a failed workflow
+dapr workflow history --app-id workflow-app --output json
+
+# Re-run the workflow after fixing issues
+dapr workflow rerun --app-id workflow-app --input ''
+```
+
## Next steps
Now that you've authored a workflow, learn how to manage it.
diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md
index de4f6b23249..db41b48205a 100644
--- a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md
+++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md
@@ -6,10 +6,361 @@ weight: 6000
description: Manage and run workflows
---
-Now that you've [authored the workflow and its activities in your application]({{% ref howto-author-workflow.md %}}), you can start, terminate, and get information about the workflow using HTTP API calls. For more information, read the [workflow API reference]({{% ref workflow_api.md %}}).
+Now that you've [authored the workflow and its activities in your application]({{% ref howto-author-workflow.md %}}), you can start, terminate, and get information about the workflow using the CLI or API calls. For more information, read the [workflow API reference]({{% ref workflow_api.md %}}).
{{< tabpane text=true >}}
+
+{{% tab "CLI" %}}
+Workflow reminders are stored in the Scheduler and can be managed using the dapr scheduler CLI.
+
+#### List workflow reminders
+
+```bash
+dapr scheduler list --filter workflow
+NAME BEGIN COUNT LAST TRIGGER
+workflow/my-app/instance1/timer-0-ABC123 +50.0h 0
+workflow/my-app/instance2/timer-0-XYZ789 +50.0h 0
+```
+
+Get reminder details
+
+```bash
+dapr scheduler get workflow/my-app/instance1/timer-0-ABC123 -o yaml
+```
+
+#### Delete workflow reminders
+
+Delete a single reminder:
+
+```bash
+dapr scheduler delete workflow/my-app/instance1/timer-0-ABC123
+```
+
+Delete all reminders for a given workflow app"
+
+```bash
+dapr scheduler delete-all workflow/my-app
+```
+
+Delete all reminders for a specific workflow instance:
+
+```bash
+dapr scheduler delete-all workflow/my-app/instance1
+```
+
+#### Backup and restore reminders
+
+Export all reminders:
+
+```bash
+dapr scheduler export -o workflow-reminders-backup.bin
+```
+
+Restore from a backup file:
+
+```bash
+dapr scheduler import -f workflow-reminders-backup.bin
+```
+
+#### Summary
+
+- Workflow reminders are persisted in the Dapr Scheduler.
+- Create workflow reminders via the Workflow API.
+- Manage reminders (list, get, delete, backup/restore) with the dapr scheduler CLI.
+
+## Managing Workflows with the Dapr CLI
+
+The Dapr CLI provides commands for managing workflow instances in both self-hosted and Kubernetes environments.
+
+### Prerequisites
+
+- Dapr CLI version 1.16.2 or later
+- A running Dapr application that has registered a workflow
+- For database operations: network access to your actor state store
+
+### Basic Workflow Operations
+
+#### Start a Workflow
+
+```bash
+# Using the `orderprocessing` application, start a new workflow instance with input data
+dapr workflow run OrderProcessingWorkflow \
+ --app-id orderprocessing \
+ --input '{"orderId": "12345", "amount": 100.50}'
+
+# Start with a new workflow with a specific instance ID
+dapr workflow run OrderProcessingWorkflow \
+ --app-id orderprocessing \
+ --instance-id order-12345 \
+ --input '{"orderId": "12345"}'
+
+# Schedule a new workflow to start at 10:00:00 AM on December 25, 2024, Coordinated Universal Time (UTC).
+dapr workflow run OrderProcessingWorkflow \
+ --app-id orderprocessing \
+ --start-time "2024-12-25T10:00:00Z"
+```
+
+#### List Workflow Instances
+
+```bash
+# List all workflows for an app
+dapr workflow list --app-id orderprocessing
+
+# Filter by status
+dapr workflow list --app-id orderprocessing --filter-status RUNNING
+
+# Filter by workflow name
+dapr workflow list --app-id orderprocessing --filter-name OrderProcessingWorkflow
+
+# Filter by age (workflows started in last 24 hours)
+dapr workflow list --app-id orderprocessing --filter-max-age 24h
+
+# Get detailed output
+dapr workflow list --app-id orderprocessing --output wide
+```
+
+#### View Workflow History
+
+```bash
+# Get execution history
+dapr workflow history order-12345 --app-id orderprocessing
+
+# Get history in JSON format
+dapr workflow history order-12345 --app-id orderprocessing --output json
+```
+
+#### Control Workflow Execution
+
+```bash
+# Suspend a running workflow
+dapr workflow suspend order-12345 \
+ --app-id orderprocessing \
+ --reason "Waiting for manual approval"
+
+# Resume a suspended workflow
+dapr workflow resume order-12345 \
+ --app-id orderprocessing \
+ --reason "Approved by manager"
+
+# Terminate a workflow
+dapr workflow terminate order-12345 \
+ --app-id orderprocessing \
+ --output '{"reason": "Cancelled by customer"}'
+```
+
+#### Raise External Events
+
+```bash
+# Raise an event for a waiting workflow
+dapr workflow raise-event order-12345/PaymentReceived \
+ --app-id orderprocessing \
+ --input '{"paymentId": "pay-67890", "amount": 100.50}'
+```
+
+#### Re-run Workflows
+
+```bash
+# Re-run from the beginning
+dapr workflow rerun order-12345 --app-id orderprocessing
+
+# Re-run from a specific event
+dapr workflow rerun order-12345 \
+ --app-id orderprocessing \
+ --event-id 5
+
+# Re-run with a new instance ID
+dapr workflow rerun order-12345 \
+ --app-id orderprocessing \
+ --new-instance-id order-12345-retry
+```
+
+#### Purge Completed Workflows
+
+Note that purging a workflow from the CLI will also delete all associated Scheduler reminders.
+
+{{% alert title="Important" color="warning" %}}
+It is required that a workflow client is running in the application to perform purge operations.
+The workflow client connection is required in order to preserve the workflow state machine integrity and prevent corruption.
+Errors like the following suggest that the workflow client is not running:
+```
+failed to purge orchestration state: rpc error: code = FailedPrecondition desc = failed to purge orchestration state: failed to lookup actor: api error: code = FailedPrecondition desc = did not find address for actor
+```
+{{% /alert %}}
+
+```bash
+# Purge a specific instance
+dapr workflow purge order-12345 --app-id orderprocessing
+
+# Purge all completed workflows older than 30 days
+dapr workflow purge --app-id orderprocessing --all-older-than 720h
+
+# Purge all terminal workflows (use with caution!)
+dapr workflow purge --app-id orderprocessing --all
+```
+
+### Kubernetes Operations
+
+All commands support the `-k` flag for Kubernetes deployments:
+
+```bash
+# List workflows in Kubernetes
+dapr workflow list \
+ --kubernetes \
+ --namespace production \
+ --app-id orderprocessing
+
+# Suspend a workflow in Kubernetes
+dapr workflow suspend order-12345 \
+ --kubernetes \
+ --namespace production \
+ --app-id orderprocessing \
+ --reason "Maintenance window"
+```
+
+### Advanced: Direct Database Access
+
+For advanced operations like listing and purging workflows, you can connect directly to the actor state store database. This is useful for:
+
+- Querying workflows across multiple app instances
+- Bulk operations on workflow metadata
+- Custom filtering beyond what the API provides
+
+#### Self-Hosted Mode
+
+In self-hosted mode, the CLI can automatically discover your state store configuration:
+
+```bash
+# The CLI reads your component configuration automatically
+dapr workflow list --app-id orderprocessing --connection-string=redis://127.0.0.1:6379
+```
+
+To override with a specific connection string:
+
+```bash
+# PostgreSQL
+dapr workflow list \
+ --app-id orderprocessing \
+ --connection-string "host=localhost user=dapr password=dapr dbname=dapr port=5432 sslmode=disable" \
+ --table-name actor-store
+
+# MySQL
+dapr workflow list \
+ --app-id orderprocessing \
+ --connection-string "dapr:dapr@tcp(localhost:3306)/dapr?parseTime=true" \
+ --table-name actor-store
+
+# SQL Server
+dapr workflow list \
+ --app-id orderprocessing \
+ --connection-string "sqlserver://dapr:Pass@word1@localhost:1433?database=dapr" \
+ --table-name abc
+
+# Redis
+dapr workflow list \
+ --app-id orderprocessing \
+ --connection-string=redis://user:mypassword@127.0.0.1:6379 \
+```
+
+#### Kubernetes Mode with Port Forwarding
+
+In Kubernetes, you need to establish connectivity to your database:
+
+**Step 1: Port forward to your database service**
+
+```bash
+# PostgreSQL
+kubectl port-forward service/postgres 5432:5432 -n production
+
+# MySQL
+kubectl port-forward service/mysql 3306:3306 -n production
+
+# SQL Server
+kubectl port-forward service/mssql 1433:1433 -n production
+
+# Redis
+kubectl port-forward service/redis 6379:6379 -n production
+```
+
+**Step 2: Use the CLI with the connection string**
+
+```bash
+# PostgreSQL example
+dapr workflow list \
+ --kubernetes \
+ --namespace production \
+ --app-id orderprocessing \
+ --connection-string "host=localhost user=dapr password=dapr dbname=dapr port=5432 sslmode=disable" \
+ --table-name workflows
+
+# Purge old workflows
+dapr workflow purge \
+ --kubernetes \
+ --namespace production \
+ --app-id orderprocessing \
+ --connection-string "host=localhost user=dapr password=dapr dbname=dapr port=5432 sslmode=disable" \
+ --table-name workflows \
+ --all-older-than 2160h # 90 days
+```
+
+**Step 3: Stop port forwarding when done**
+
+```bash
+# Press Ctrl+C to stop the port forward
+```
+
+#### Connection String Formats by Database
+
+**PostgreSQL / CockroachDB**
+```
+host=localhost user=dapr password=dapr dbname=dapr port=5432 sslmode=disable connect_timeout=10
+```
+
+**MySQL**
+```
+username:password@tcp(host:port)/database?parseTime=true&loc=UTC
+```
+
+**SQL Server**
+```
+sqlserver://username:password@host:port?database=dbname&encrypt=false
+```
+
+**MongoDB**
+```
+mongodb://username:password@localhost:27017/database
+```
+
+**Redis**
+```
+redis://127.0.0.1:6379
+```
+
+### Workflow Management Best Practices
+
+1. **Regular Cleanup**: Schedule periodic purge operations for completed workflows
+ ```bash
+ # Weekly cron job to purge workflows older than 90 days
+ dapr workflow purge --app-id orderprocessing --all-older-than 2160h
+ ```
+
+2. **Monitor Running Workflows**: Use filtered lists to track long-running instances
+ ```bash
+ dapr workflow list --app-id orderprocessing --filter-status RUNNING --filter-max-age 24h
+ ```
+
+3. **Use Instance IDs**: Assign meaningful instance IDs for easier tracking
+ ```bash
+ dapr workflow run OrderWorkflow --app-id orderprocessing --instance-id "order-$(date +%s)"
+ ```
+
+4. **Export for Analysis**: Export workflow data for analysis
+ ```bash
+ dapr workflow list --app-id orderprocessing --output json > workflows.json
+ ```
+
+{{% /tab %}}
+
{{% tab "Python" %}}
@@ -356,7 +707,7 @@ To resume a workflow with an ID `12345678`, run:
curl -X POST "http://localhost:3500/v1.0/workflows/dapr/12345678/resume"
```
-### Purge a workflow
+### Purge a workflow
The purge API can be used to permanently delete workflow metadata from the underlying state store, including any stored inputs, outputs, and workflow history records. This is often useful for implementing data retention policies and for freeing resources.
diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md
index 2fdd37d1cf8..47893aab80a 100644
--- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md
+++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md
@@ -175,6 +175,59 @@ Similarly, if a state store imposes restrictions on the size of a batch transact
Workflow state can be purged from a state store, including all its history.
Each Dapr SDK exposes APIs for purging all metadata related to specific workflow instances.
+#### State store record count
+
+The number of records which are saved as history in the state store per workflow run is determined by its complexity or "shape". In other words, the number of activities, timers, sub-workflows etc.
+The following table shows a general guide to the number of records that are saved by different workflow tasks.
+This number may be larger or smaller depending on retries or concurrency.
+
+| Task type | Number of records saved |
+| ----------|-------------------------|
+| Start workflow | 5 records |
+| Call activity | 3 records |
+| Timer | 3 records |
+| Raise event | 3 records |
+| Start child workflow | 8 records |
+
+#### Direct Database Access
+
+For advanced operations, you can access workflow data directly:
+
+```bash
+# Port forward to a postgres database in Kubernetes
+kubectl port-forward service/postgres 5432:5432
+
+# Query workflows directly
+dapr workflow list \
+ --app-id myapp \
+ --connection-string "host=localhost user=dapr password=dapr dbname=dapr port=5432 sslmode=disable" \
+ --table-name workflows
+```
+
+```bash
+# Port forward to redis database in Kubernetes
+kubectl port-forward service/redis 6379:6379
+
+# Query workflows directly
+dapr workflow list \
+ --app-id myapp \
+ --connection-string redis://127.0.0.1:6379 \
+ --table-name workflows
+```
+
+### Supported State Stores
+
+The workflow engine supports these state stores:
+- PostgreSQL
+- MySQL
+- SQL Server
+- SQLite
+- Oracle Database
+- CockroachDB
+- MongoDB
+- Redis
+
+
## Workflow scalability
Because Dapr Workflows are internally implemented using actors, Dapr Workflows have the same scalability characteristics as actors.
diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md
index 2114b1827d7..58c7df2b8d7 100644
--- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md
+++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md
@@ -24,6 +24,62 @@ There are several different kinds of tasks that a workflow can schedule, includi
- [Child workflows]({{% ref "workflow-features-concepts.md#child-workflows" %}}) for breaking larger workflows into smaller pieces
- [External event waiters]({{% ref "workflow-features-concepts.md#external-events" %}}) for blocking workflows until they receive external event signals. These tasks are described in more details in their corresponding sections.
+## Workflow Instance Management
+
+### Querying Workflow State
+
+You can query workflow instances using the CLI:
+
+```bash
+# Find all running workflows
+dapr workflow list --app-id myapp --filter-status RUNNING
+
+# Find workflows by name
+dapr workflow list --app-id myapp --filter-name OrderProcessing
+
+# Find recent workflows (last 2 hours)
+dapr workflow list --app-id myapp --filter-max-age 2h
+
+# Get detailed JSON output
+dapr workflow list --app-id myapp --output json
+```
+
+### Workflow History
+
+View the complete execution history:
+
+```bash
+dapr workflow history wf-12345 --app-id myapp --output json
+```
+
+This shows all events, activities, and state transitions.
+
+## External Events
+
+### Raising Events via CLI
+
+```bash
+dapr workflow raise-event wf-12345/ApprovalReceived \
+ --app-id myapp \
+ --input '{"approved": true, "comments": "Approved by manager"}'
+```
+
+## Workflow Suspension and Resumption
+
+### Using the CLI
+
+```bash
+# Suspend for manual intervention
+dapr workflow suspend wf-12345 \
+ --app-id myapp \
+ --reason "Awaiting customer response"
+
+# Resume when ready
+dapr workflow resume wf-12345 \
+ --app-id myapp \
+ --reason "Customer responded"
+```
+
### Workflow identity
Each workflow you define has a type name, and individual executions of a workflow require a unique _instance ID_. Workflow instance IDs can be generated by your app code, which is useful when workflows correspond to business entities like documents or jobs, or can be auto-generated UUIDs. A workflow's instance ID is useful for debugging and also for managing workflows using the [Workflow APIs]({{% ref workflow_api.md %}}).
@@ -131,11 +187,11 @@ Because workflow retry policies are configured in code, the exact developer expe
| Parameter | Description |
| --- | --- |
-| **Maximum number of attempts** | The maximum number of times to execute the activity or child workflow. |
+| **Maximum number of attempts** | The maximum number of times to execute the activity or child workflow. If set to 0, no attempts will be made. |
| **First retry interval** | The amount of time to wait before the first retry. |
| **Backoff coefficient** | The coefficient used to determine the rate of increase of back-off. For example a coefficient of 2 doubles the wait of each subsequent retry. |
-| **Maximum retry interval** | The maximum amount of time to wait before each subsequent retry. |
-| **Retry timeout** | The overall timeout for retries, regardless of any configured max number of attempts. |
+| **Maximum retry interval** | The maximum amount of time to wait before each subsequent retry. If set to 0, no retries will happen. |
+| **Retry timeout** | The global timeout for retries, regardless of any configured max number of attempts. No further attempts are made executing activities after this timeout expires.
## External events
diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-multi-app.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-multi-app.md
index 8eb2f439430..b369c2153e5 100644
--- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-multi-app.md
+++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-multi-app.md
@@ -6,8 +6,9 @@ weight: 7000
description: "Executing workflows across multiple applications"
---
-It is often the case that a single workflow spans multiple applications, microservices, or programing languages.
+It is often the case that a single workflow spans multiple applications, microservices, or programming languages.
This is where an activity or a child workflow will be executed on a different application than the one hosting the parent workflow.
+
Some scenarios where this is useful include:
- A Machine Learning (ML) training activity must be executed on GPU-enabled machines, while the rest of the workflow runs on CPU-only orchestration machines.
@@ -15,85 +16,99 @@ Some scenarios where this is useful include:
- Different parts of the workflow need to be executed in different trust zones or networks.
- Different parts of the workflow need to be executed in different geographic regions due to data residency requirements.
- An involved business process spans multiple teams or departments, each owning their own application.
-- Implementation of a workflow spans different programming lanaguages based on team expertise or existing codebases.
+- Implementation of a workflow spans different programming languages based on team expertise or existing codebases.
+- Different team boundaries or microservice ownership.
+
+
+
+The diagram below shows an example scenario of a complex workflow that orchestrates across multiple applications that are written in different languages. Each applications' main steps and activities are:
+
+• **App1: Main Workflow Service** - Top-level orchestrator that coordinates the entire ML pipeline
+- Starts the process
+- Calls data processing activities on App2
+- Calls ML training child workflow on App3
+- Calls model deployment on App4
+- Ends the complete workflow
+- **Language: Java**
+
+• **App2: Data Processing Pipeline** - **GPU activities** only
+- Data Ingesting Activity (GPU-accelerated)
+- Feature Engineering Activity (GPU-accelerated)
+- Returns completion signal to Main Workflow
+- **Language: Go**
+
+• **App3: ML Training Child Workflow** - Contains a child workflow and activities
+- Child workflow orchestrates:
+ - Data Processing Activity
+ - Model Training Activity (GPU-intensive)
+ - Model Validation Activity
+- Triggered by App2's activities completing
+- Returns completion signal to Main Workflow
+- **Language: Java**
+
+• **App4: Model Serving Service** - **Beefy GPU app** with activities only
+- Model Loading Activity (GPU memory intensive)
+- Inference Setup Activity (GPU-accelerated inference)
+- Triggered by App3's workflow completing
+- Returns completion signal to Main Workflow
+- **Language: Go**
## Multi-application workflows
-Like all building blocks in Dapr, workflow execution routing is based on the [App ID of the hosting Dapr application]({{% ref "security-concept.md#application-identity" %}}).
-By default, the full workflow execution is hosted on the app ID that started the workflow.
-This workflow will be executed across all replicas of that app ID, not just the single replica which scheduled the workflow.
+Workflow execution routing is based on the [App ID of the hosting Dapr application]({{% ref "security-concept.md#application-identity" %}}).
+By default, the full workflow execution is hosted on the app ID that started the workflow. This workflow can be executed across any replicas of that app ID, not just the single replica which scheduled the workflow.
+
-It is possible to execute activities or child workflows on different app IDs by specifying the target app ID parameter, inside the workflow execution code.
-Upon execution, the target app ID will execute the activity or child workflow, and return the result to the parent workflow of the originating app ID.
-Workflows being durable, if the target activity or child workflow app ID is not available or has not been defined, the parent workflow retry until the target app ID becomes available, indefinitely.
-It is paramount that their is co-ordination between the teams owning the different app IDs to ensure that the activities and child workflows are defined and available when needed.
+It is possible to execute activities and child workflows on different app IDs by specifying the target app ID parameter, inside the workflow execution code.
+Upon execution, the target app ID executes the activity or child workflow, and returns the result to the parent workflow of the originating app ID.
The entire Workflow execution may be distributed across multiple app IDs with no limit, with each activity or child workflow specifying the target app ID.
The final history of the workflow will be saved by the app ID that hosts the very parent (or can consider it the root) workflow.
{{% alert title="Restrictions" color="primary" %}}
-Like other building blocks and resources in Dapr, workflows are scoped to a single namespace.
+Like other API building blocks and resources in Dapr, workflows are scoped to a single namespace.
This means that all app IDs involved in a multi-application workflow must be in the same namespace.
-Similarly, all app IDs must use the same actor state store.
-Finally, the target app ID must have the activity or child workflow defined, otherwise the parent workflow will retry indefinitely.
+Similarly, all app IDs must use the same workflow (or actor) state store.
+Finally, the target app ID must have the activity or child workflow defined and registered, otherwise the parent workflow retries indefinitely.
{{% /alert %}}
-## Multi-application activity examples
-
-The following examples show how to execute activities on different target app IDs.
-
-{{< tabpane text=true >}}
-
-{{% tab "Go" %}}
-
-```go
-package main
+{{% alert title="Important Limitations" color="warning" %}}
+**SDKs supporting multi-application workflows** - Multi-application workflows are used via the SDKs.
+Currently the following are supported:
+- **Java** (**only** activity calls)
+- **Go** (**both** activities and child workflows calls)
+- **Python** (**both** activities and child workflows calls)
+- **.NET** (**both** activities and child workflows calls)
+- Support is planned for future releases for the JavaScript SDK.
+{{% /alert %}}
-import (
- "context"
- "log"
+## Error handling
- "github.com/dapr/durabletask-go/backend"
- "github.com/dapr/durabletask-go/client"
- "github.com/dapr/durabletask-go/task"
- dapr "github.com/dapr/go-sdk/client"
-)
+When calling multi-application activities or child workflows:
+- If the target application does not exist, the call will be retried using the provided retry policy.
+- If the target application exists but doesn't contain the specified activity or workflow, the call will return an error.
+- Standard workflow retry policies apply to multi-application calls.
-func main() {
- ctx := context.Background()
+It is paramount that there is coordination between the teams owning the different app IDs to ensure that the activities and child workflows are defined and available when needed.
- registry := task.NewTaskRegistry()
- if err := registry.AddOrchestrator(TestWorkflow); err != nil {
- log.Fatal(err)
- }
+## Multi-application activity example
- daprClient, err := dapr.NewClient()
- if err != nil {
- log.Fatal(err)
- }
+
- client := client.NewTaskHubGrpcClient(daprClient.GrpcClientConn(), backend.DefaultLogger())
- if err := client.StartWorkItemListener(ctx, registry); err != nil {
- log.Fatal(err)
- }
+The following example shows how to execute the activity `ActivityA` on the target app `App2`.
- id, err := client.ScheduleNewOrchestration(ctx, "TestWorkflow")
- if err != nil {
- log.Fatal(err)
- }
+{{< tabpane text=true >}}
- if _, err = client.WaitForOrchestrationCompletion(ctx, id); err != nil {
- log.Fatal(err)
- }
-}
+{{% tab "Go" %}}
-func TestWorkflow(ctx *task.OrchestrationContext) (any, error) {
+```go
+func BusinessWorkflow(ctx *workflow.WorkflowContext) (any, error) {
var output string
- err := ctx.CallActivity("my-other-activity",
- task.WithActivityInput("my-input"),
- // Here we set custom target app ID which will execute this activity.
- task.WithActivityAppID("my-other-app-id"),
+ err := ctx.CallActivity("ActivityA",
+ workflow.WithActivityInput("my-input"),
+ workflow.WithActivityAppID("App2"), // Here we set the target app ID which will execute this activity.
).Await(&output)
+
if err != nil {
return nil, err
}
@@ -107,45 +122,18 @@ func TestWorkflow(ctx *task.OrchestrationContext) (any, error) {
{{% tab "Java" %}}
```java
-public class CrossAppWorkflow implements Workflow {
+public class BusinessWorkflow implements Workflow {
@Override
public WorkflowStub create() {
return ctx -> {
- var logger = ctx.getLogger();
- logger.info("=== WORKFLOW STARTING ===");
- logger.info("Starting CrossAppWorkflow: {}", ctx.getName());
- logger.info("Workflow name: {}", ctx.getName());
- logger.info("Workflow instance ID: {}", ctx.getInstanceId());
-
- String input = ctx.getInput(String.class);
- logger.info("CrossAppWorkflow received input: {}", input);
- logger.info("Workflow input: {}", input);
-
- // Call an activity in another app by passing in an active appID to the WorkflowTaskOptions
- logger.info("Calling cross-app activity in 'app2'...");
- logger.info("About to call cross-app activity in app2...");
- String crossAppResult = ctx.callActivity(
- App2TransformActivity.class.getName(),
- input,
- new WorkflowTaskOptions("app2"),
+ String output = ctx.callActivity(
+ ActivityA.class.getName(),
+ "my-input",
+ new WorkflowTaskOptions("App2"), // Here we set the target app ID which will execute this activity.
String.class
).await();
- // Call another activity in a different app
- logger.info("Calling cross-app activity in 'app3'...");
- logger.info("About to call cross-app activity in app3...");
- String finalResult = ctx.callActivity(
- App3FinalizeActivity.class.getName(),
- crossAppResult,
- new WorkflowTaskOptions("app3"),
- String.class
- ).await();
- logger.info("Final cross-app activity result: {}", finalResult);
- logger.info("Final cross-app activity result: {}", finalResult);
-
- logger.info("CrossAppWorkflow finished with: {}", finalResult);
- logger.info("=== WORKFLOW COMPLETING WITH: {} ===" , finalResult);
- ctx.complete(finalResult);
+ ctx.complete(output);
};
}
}
@@ -153,62 +141,60 @@ public class CrossAppWorkflow implements Workflow {
{{% /tab %}}
-{{< /tabpane >}}
-
-The following examples show how to execute child workflows on different target app IDs.
-
-{{< tabpane text=true >}}
+{{% tab ".NET" %}}
+
+```csharp
+// Specify App ID during workflow registration
+builder.Services.AddDaprWorkflowBuilder(opt =>
+ {
+ opt.RegisterWorkflow();
+ opt.RegisterActivity();
+ opt.AppId = "my-application-1";
+ });
+
+// Call activity in another application
+public sealed class WorkflowA : Workflow
+{
+ public override Task RunAsync(WorkflowContext context, int input) =>
+ context.CallActivityAsync(nameof("AnotherActivity"), input, new WorkflowTaskOptions(
+ targetAppId: "my-other-app"));
+ });
+}
+```
-{{% tab "Go" %}}
+{{% /tab %}}
-```go
-package main
+{{% tab "Python" %}}
-import (
- "context"
- "log"
+```python
+@wfr.workflow
+def app1_workflow(ctx: wf.DaprWorkflowContext):
+ output = yield ctx.call_activity('ActivityA', input='my-input', app_id='App2')
+ return output
+```
- "github.com/dapr/durabletask-go/backend"
- "github.com/dapr/durabletask-go/client"
- "github.com/dapr/durabletask-go/task"
- dapr "github.com/dapr/go-sdk/client"
-)
+{{% /tab %}}
-func main() {
- ctx := context.Background()
+{{< /tabpane >}}
- registry := task.NewTaskRegistry()
- if err := registry.AddOrchestrator(TestWorkflow); err != nil {
- log.Fatal(err)
- }
+## Multi-application child workflow example
- daprClient, err := dapr.NewClient()
- if err != nil {
- log.Fatal(err)
- }
+
- client := client.NewTaskHubGrpcClient(daprClient.GrpcClientConn(), backend.DefaultLogger())
- if err := client.StartWorkItemListener(ctx, registry); err != nil {
- log.Fatal(err)
- }
+The following example shows how to execute the child workflow `Workflow2` on the target app `App2`.
- id, err := client.ScheduleNewOrchestration(ctx, "TestWorkflow")
- if err != nil {
- log.Fatal(err)
- }
+{{< tabpane text=true >}}
- if _, err = client.WaitForOrchestrationCompletion(ctx, id); err != nil {
- log.Fatal(err)
- }
-}
+{{% tab "Go" %}}
-func TestWorkflow(ctx *task.OrchestrationContext) (any, error) {
+```go
+func BusinessWorkflow(ctx *workflow.WorkflowContext) (any, error) {
var output string
- err := ctx.CallSubOrchestrator("my-sub-orchestration",
- task.WithSubOrchestratorInput("my-input"),
- // Here we set custom target app ID which will execute this child workflow.
- task.WithSubOrchestratorAppID("my-sub-app-id"),
+ err := ctx.CallChildWorkflow("Workflow2",
+ workflow.WithChildWorkflowInput("my-input"),
+ workflow.WithChildWorkflowAppID("App2"), // Here we set the target app ID which will execute this child workflow.
).Await(&output)
+
if err != nil {
return nil, err
}
@@ -219,6 +205,40 @@ func TestWorkflow(ctx *task.OrchestrationContext) (any, error) {
{{% /tab %}}
+{{% tab ".NET" %}}
+
+```csharp
+// Specify App ID during workflow registration
+builder.Services.AddDaprWorkflowBuilder(opt =>
+ {
+ opt.RegisterWorkflow();
+ opt.RegisterActivity();
+ opt.AppId = "my-application-1";
+ });
+
+// Call child workflow in another application
+public sealed class WorkflowA : Workflow
+{
+ public override Task RunAsync(WorkflowContext context, int input) =>
+ context.CallChildWorkflow(nameof("AnotherWorkflow"), input, new ChildWorkflowTaskOptions(
+ TargetAppId: "my-other-app"
+ });
+}
+```
+
+{{% /tab %}}
+
+{{% tab "Python" %}}
+
+```python
+@wfr.workflow
+def workflow1(ctx: wf.DaprWorkflowContext):
+ output = yield ctx.call_child_workflow(workflow='Workflow2', input='my-input', app_id='App2')
+ return output
+```
+
+{{% /tab %}}
+
{{< /tabpane >}}
## Related links
diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md
index 67e4941f880..cce02d3316b 100644
--- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md
+++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md
@@ -46,6 +46,12 @@ Child workflow also supports automatic retry policies.
[Learn more about child workflows.]({{% ref "workflow-features-concepts.md#child-workflows" %}})
+### Multi-application workflows
+
+Multi-application workflows, enable you to orchestrate complex business processes that span across multiple applications. This allows a workflow to call activities or start child workflows in different applications, distributing the workflow execution while maintaining the security, reliability and durability guarantees of Dapr's workflow engine.
+
+[Learn more about multi-application workflows.]({{% ref "workflow-multi-app.md" %}})
+
### Timers and reminders
Same as Dapr actors, you can schedule reminder-like durable delays for any time range.
@@ -108,9 +114,47 @@ Want to put workflows to the test? Walk through the following quickstart and tut
Want to skip the quickstarts? Not a problem. You can try out the workflow building block directly in your application. After [Dapr is installed]({{% ref install-dapr-cli.md %}}), you can begin using workflows, starting with [how to author a workflow]({{% ref howto-author-workflow.md %}}).
+## Managing Workflows
+
+Dapr provides comprehensive workflow management capabilities through both the HTTP API and the CLI.
+
+### Workflow Lifecycle Operations
+
+**Start Workflows**
+```bash
+dapr workflow run MyWorkflow --app-id myapp --input '{"key": "value"}'
+```
+
+**Monitor Workflows**
+```bash
+# List active workflows for a given application
+dapr workflow list --app-id myapp --filter-status RUNNING
+
+# View execution history
+dapr workflow history --app-id myapp
+```
+
+**Control Workflows**
+```bash
+# Suspend, resume, or terminate
+dapr workflow suspend --app-id myapp
+dapr workflow resume --app-id myapp
+dapr workflow terminate --app-id myapp
+```
+
+**Maintenance Operations**
+```bash
+# Purge completed workflows
+dapr workflow purge --app-id myapp --all-older-than 720h
+```
+
+See [How-To: Manage workflows]({{< ref howto-manage-workflow.md >}}) for detailed instructions.
+
## Limitations
-- **State stores:** Due to underlying limitations in some database choices, more commonly NoSQL databases, you might run into limitations around storing internal states. For example, CosmosDB has a maximum single operation item limit of only 100 states in a single request.
+- **State stores:** You can only use state stores which support workflows, as [described here]({{% ref supported-state-stores %}}).
+- Azure Cosmos DB has [payload and workflow complexity limitations]({{% ref "setup-azure-cosmosdb.md#workflow-limitations" %}}).
+- AWS DynamoDB has [workflow complexity limitations]({{% ref "setup-azure-cosmosdb.md#workflow-limitations" %}}).
## Watch the demo
diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md
index faa92d946ae..8158ddfdbbc 100644
--- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md
+++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md
@@ -307,6 +307,16 @@ In addition to the challenges mentioned in [the previous pattern]({{% ref "workf
Dapr Workflows provides a way to express the fan-out/fan-in pattern as a simple function, as shown in the following example:
+```bash
+# Start the workflow
+dapr workflow run DataProcessingWorkflow \
+ --app-id processor \
+ --input '{"items": ["item1", "item2", "item3"]}'
+
+# Monitor parallel execution
+dapr workflow history --app-id processor --output json
+```
+
{{< tabpane text=true >}}
{{% tab "Python" %}}
@@ -615,8 +625,7 @@ await context.CallActivityAsync("PostResults", sum);
{{< /tabpane >}}
-With the release of 1.16, it's even easier to process workflow activities in parallel while putting an upper cap on
-concurrency by using the following extension methods on the `WorkflowContext`:
+You can process workflow activities in parallel while putting an upper cap on concurrency by using the following extension methods on the `WorkflowContext`:
{{< tabpane text=true >}}
@@ -1428,33 +1437,33 @@ The following diagram illustrates this flow.
```java
public class PaymentProcessingWorkflow implements Workflow {
-
+
@Override
public WorkflowStub create() {
return ctx -> {
ctx.getLogger().info("Starting Workflow: " + ctx.getName());
var orderId = ctx.getInput(String.class);
List compensations = new ArrayList<>();
-
+
try {
// Step 1: Reserve inventory
String reservationId = ctx.callActivity(ReserveInventoryActivity.class.getName(), orderId, String.class).await();
ctx.getLogger().info("Inventory reserved: {}", reservationId);
compensations.add("ReleaseInventory");
-
+
// Step 2: Process payment
String paymentId = ctx.callActivity(ProcessPaymentActivity.class.getName(), orderId, String.class).await();
ctx.getLogger().info("Payment processed: {}", paymentId);
compensations.add("RefundPayment");
-
+
// Step 3: Ship order
String shipmentId = ctx.callActivity(ShipOrderActivity.class.getName(), orderId, String.class).await();
ctx.getLogger().info("Order shipped: {}", shipmentId);
compensations.add("CancelShipment");
-
+
} catch (TaskFailedException e) {
ctx.getLogger().error("Activity failed: {}", e.getMessage());
-
+
// Execute compensations in reverse order
Collections.reverse(compensations);
for (String compensation : compensations) {
@@ -1462,24 +1471,24 @@ public class PaymentProcessingWorkflow implements Workflow {
switch (compensation) {
case "CancelShipment":
String shipmentCancelResult = ctx.callActivity(
- CancelShipmentActivity.class.getName(),
- orderId,
+ CancelShipmentActivity.class.getName(),
+ orderId,
String.class).await();
ctx.getLogger().info("Shipment cancellation completed: {}", shipmentCancelResult);
break;
-
+
case "RefundPayment":
String refundResult = ctx.callActivity(
- RefundPaymentActivity.class.getName(),
- orderId,
+ RefundPaymentActivity.class.getName(),
+ orderId,
String.class).await();
ctx.getLogger().info("Payment refund completed: {}", refundResult);
break;
-
+
case "ReleaseInventory":
String releaseResult = ctx.callActivity(
- ReleaseInventoryActivity.class.getName(),
- orderId,
+ ReleaseInventoryActivity.class.getName(),
+ orderId,
String.class).await();
ctx.getLogger().info("Inventory release completed: {}", releaseResult);
break;
@@ -1494,7 +1503,7 @@ public class PaymentProcessingWorkflow implements Workflow {
// Step 4: Send confirmation
ctx.callActivity(SendConfirmationActivity.class.getName(), orderId, Void.class).await();
ctx.getLogger().info("Confirmation sent for order: {}", orderId);
-
+
ctx.complete("Order processed successfully: " + orderId);
};
}
@@ -1597,7 +1606,7 @@ The compensation pattern ensures that your distributed workflows can maintain co
- [Try out Dapr Workflows using the quickstart]({{% ref workflow-quickstart.md %}})
- [Workflow overview]({{% ref workflow-overview.md %}})
- [Workflow API reference]({{% ref workflow_api.md %}})
-- Try out the following examples:
+- Try out the following examples:
- [Python](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow)
- [JavaScript](https://github.com/dapr/js-sdk/tree/main/examples/workflow)
- [.NET](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow)
diff --git a/daprdocs/content/en/developing-applications/error-codes/errors-overview.md b/daprdocs/content/en/developing-applications/error-codes/errors-overview.md
index 00e56ae61a9..b66ce4bcb0f 100644
--- a/daprdocs/content/en/developing-applications/error-codes/errors-overview.md
+++ b/daprdocs/content/en/developing-applications/error-codes/errors-overview.md
@@ -6,7 +6,7 @@ weight: 10
description: "Overview of Dapr errors"
---
-An error code is a numeric or alphamueric code that indicates the nature of an error and, when possible, why it occured.
+An error code is a numeric or alphanumeric code that indicates the nature of an error and, when possible, why it occured.
Dapr error codes are standardized strings for over 80+ common errors across HTTP and gRPC requests when using the Dapr APIs. These codes are both:
- Returned in the JSON response body of the request.
diff --git a/daprdocs/content/en/developing-applications/local-development/multi-app-dapr-run/multi-app-template.md b/daprdocs/content/en/developing-applications/local-development/multi-app-dapr-run/multi-app-template.md
index 7e527d74fe1..7cac1459d1b 100644
--- a/daprdocs/content/en/developing-applications/local-development/multi-app-dapr-run/multi-app-template.md
+++ b/daprdocs/content/en/developing-applications/local-development/multi-app-dapr-run/multi-app-template.md
@@ -100,7 +100,7 @@ dapr list -k
## Stop the multi-app template
-Stop the multi-app run template anytime with either of the following commands:
+Stop the multi-app run template any time with either of the following commands:
{{< tabpane text=true >}}
diff --git a/daprdocs/content/en/developing-applications/sdks/_index.md b/daprdocs/content/en/developing-applications/sdks/_index.md
index 079a06d3297..ace28b18042 100644
--- a/daprdocs/content/en/developing-applications/sdks/_index.md
+++ b/daprdocs/content/en/developing-applications/sdks/_index.md
@@ -31,6 +31,12 @@ Select your [preferred language below]({{% ref "#sdk-languages" %}}) to learn mo
| [C++](https://github.com/dapr/cpp-sdk) | In development | ✔ | | |
| [Rust]({{% ref rust %}}) | In development | ✔ | | ✔ | |
+
+## Frameworks
+
+| Framework | Language | Status | Description |
+|----------------------------------------|:----------------------|:---------------|:-----------------:|
+| [Dapr Agents]({{% ref "../../developing-ai/dapr-agents" %}}) | Python | In development | A framework for building LLM-powered autonomous agents that leverages Dapr's distributed systems capabilities for durable execution, with built-in security, observability, and state management. |
## Further reading
- [Serialization in the Dapr SDKs]({{% ref sdk-serialization.md %}})
diff --git a/daprdocs/content/en/getting-started/install-dapr-selfhost.md b/daprdocs/content/en/getting-started/install-dapr-selfhost.md
index 616c71f4d2e..0848f61caaa 100644
--- a/daprdocs/content/en/getting-started/install-dapr-selfhost.md
+++ b/daprdocs/content/en/getting-started/install-dapr-selfhost.md
@@ -170,8 +170,6 @@ explorer "%USERPROFILE%\.dapr"
{{< /tabpane >}}
-
-
### Slim init
To install the CLI without any default configuration files or Docker containers, use the `--slim` flag. [Learn more about the `init` command and its flags.]({{% ref dapr-init.md %}})
@@ -180,6 +178,16 @@ To install the CLI without any default configuration files or Docker containers,
dapr init --slim
```
+### Other tooling (optional)
-{{< button text="Next step: Use the Dapr API >>" page="getting-started/get-started-api.md" >}}
+#### Diagrid Dashboard for Dapr Workflow
+
+If you're planning to build Dapr Workflow applications, you can install the [Diagrid Dashboard](https://diagrid.ws/diagrid-dashboard-docs) to visualize workflow state during local development:
+
+Start the Diagrid Dashboard container using Docker:
+```bash
+docker run -p 8080:8080 ghcr.io/diagridio/diagrid-dashboard:latest
+```
+
+{{< button text="Next step: Use the Dapr API >>" page="getting-started/get-started-api.md" >}}
diff --git a/daprdocs/content/en/getting-started/quickstarts/_index.md b/daprdocs/content/en/getting-started/quickstarts/_index.md
index 102aae1f5b2..6b5d17f6553 100644
--- a/daprdocs/content/en/getting-started/quickstarts/_index.md
+++ b/daprdocs/content/en/getting-started/quickstarts/_index.md
@@ -25,6 +25,7 @@ Hit the ground running with our Dapr quickstarts, complete with code samples aim
| [Service Invocation]({{% ref serviceinvocation-quickstart %}}) | Synchronous communication between two services using HTTP or gRPC. |
| [Publish and Subscribe]({{% ref pubsub-quickstart %}}) | Asynchronous communication between two services using messaging. |
| [Workflow]({{% ref workflow-quickstart %}}) | Orchestrate business workflow activities in long running, fault-tolerant, stateful applications. |
+| [Agents]({{% ref dapr-agents-quickstarts.md %}}) | Build LLM-powered autonomous agentic applications. |
| [State Management]({{% ref statemanagement-quickstart %}}) | Store a service's data as key/value pairs in supported state stores. |
| [Bindings]({{% ref bindings-quickstart %}}) | Work with external systems using input bindings to respond to events and output bindings to call operations. |
| [Actors]({{% ref actors-quickstart %}}) | Run a microservice and a simple console client to demonstrate stateful object patterns in Dapr Actors. |
@@ -33,4 +34,4 @@ Hit the ground running with our Dapr quickstarts, complete with code samples aim
| [Resiliency]({{% ref resiliency %}}) | Define and apply fault-tolerance policies to your Dapr API requests. |
| [Cryptography]({{% ref cryptography-quickstart %}}) | Encrypt and decrypt data using Dapr's cryptographic APIs. |
| [Jobs]({{% ref jobs-quickstart %}}) | Schedule, retrieve, and delete jobs using Dapr's jobs APIs. |
-| [Conversation]({{% ref conversation-quickstart %}}) | Securely and reliably interact with Large Language Models (LLMs). |
\ No newline at end of file
+| [Conversation]({{% ref conversation-quickstart %}}) | Securely and reliably interact with Large Language Models (LLMs). |
diff --git a/daprdocs/content/en/getting-started/quickstarts/conversation-quickstart.md b/daprdocs/content/en/getting-started/quickstarts/conversation-quickstart.md
index 8caef45b9b3..fe2731ced93 100644
--- a/daprdocs/content/en/getting-started/quickstarts/conversation-quickstart.md
+++ b/daprdocs/content/en/getting-started/quickstarts/conversation-quickstart.md
@@ -23,6 +23,8 @@ Currently, you can only use JavaScript for the quickstart sample using HTTP, not
## Run the app with the template file
+Select your preferred language-specific Dapr SDK before proceeding with the Quickstart.
+
{{< tabpane text=true >}}
@@ -61,12 +63,13 @@ pip3 install -r requirements.txt
### Step 3: Launch the conversation service
-Navigate back to the `sdk` directory and start the conversation service with the following command:
```bash
dapr run -f .
```
+> **Note**: Since Python3.exe is not defined in Windows, you may need to use `python app.py` instead of `python3 app.py`.
+
**Expected output**
```
@@ -76,9 +79,7 @@ dapr run -f .
### What happened?
-When you ran `dapr init` during Dapr install, the [`dapr.yaml` Multi-App Run template file]({{% ref "#dapryaml-multi-app-run-template-file" %}}) was generated in the `.dapr/components` directory.
-
-Running `dapr run -f .` in this Quickstart started [conversation.go]({{% ref "#programcs-conversation-app" %}}).
+Running `dapr run -f .` in this Quickstart started [app.py]({{% ref "#programcs-conversation-app" %}}).
#### `dapr.yaml` Multi-App Run template file
@@ -118,27 +119,23 @@ In the application code:
```python
from dapr.clients import DaprClient
-from dapr.clients.grpc._request import ConversationInput
+from dapr.clients.grpc.conversation import ConversationInputAlpha2, ConversationMessage, ConversationMessageContent, ConversationMessageOfUser
with DaprClient() as d:
- inputs = [
- ConversationInput(content="What is dapr?", role='user', scrub_pii=True),
- ]
-
- metadata = {
- 'model': 'modelname',
- 'key': 'authKey',
- 'cacheTTL': '10m',
- }
+ text_input = "What is dapr?"
+ provider_component = "echo"
- print('Input sent: What is dapr?')
+ inputs = [
+ ConversationInputAlpha2(messages=[ConversationMessage(of_user=ConversationMessageOfUser(content=[ConversationMessageContent(text=text_input)]))],
+ scrub_pii=True),
+ ]
- response = d.converse_alpha1(
- name='echo', inputs=inputs, temperature=0.7, context_id='chat-123', metadata=metadata
- )
+ print(f'Input sent: {text_input}')
- for output in response.outputs:
- print(f'Output response: {output.result}')
+ response = d.converse_alpha2(name=provider_component, inputs=inputs, temperature=0.7, context_id='chat-123')
+
+ for output in response.outputs:
+ print(f'Output response: {output.choices[0].message.content}')
```
{{% /tab %}}
@@ -188,14 +185,16 @@ dapr run -f .
**Expected output**
```
-== APP - conversation == Input sent: What is dapr?
+== APP - conversation == Conversation input sent: What is dapr?
== APP - conversation == Output response: What is dapr?
+== APP - conversation == Tool calling input sent: What is the weather like in San Francisco in celsius?
+== APP - conversation == Output message: { outputs: [ { choices: [Array] } ] }
+== APP - conversation == Output message: What is the weather like in San Francisco in celsius?
+== APP - conversation == Tool calls detected: [{"id":"0","function":{"name":"get_weather","arguments":"location,unit"}}]
```
### What happened?
-When you ran `dapr init` during Dapr install, the [`dapr.yaml` Multi-App Run template file]({{% ref "#dapryaml-multi-app-run-template-file" %}}) was generated in the `.dapr/components` directory.
-
Running `dapr run -f .` in this Quickstart started [conversation.go]({{% ref "#programcs-conversation-app" %}}).
#### `dapr.yaml` Multi-App Run template file
@@ -231,50 +230,138 @@ To interface with a real LLM, swap out the mock component with one of [the suppo
#### `index.js` conversation app
-In the application code:
+In the first part of the application code:
- The app sends an input "What is dapr?" to the echo mock LLM component.
- The mock LLM echoes "What is dapr?".
```javascript
-const conversationComponentName = "echo";
+const daprHost = process.env.DAPR_HOST || "http://localhost";
+const daprHttpPort = process.env.DAPR_HTTP_PORT || "3500";
-async function main() {
- const daprHost = process.env.DAPR_HOST || "http://localhost";
- const daprHttpPort = process.env.DAPR_HTTP_PORT || "3500";
+const reqURL = `${daprHost}:${daprHttpPort}/v1.0-alpha2/conversation/${conversationComponentName}/converse`;
- const inputBody = {
- name: "echo",
- inputs: [{ message: "What is dapr?" }],
+// Plain conversation
+try {
+ const converseInputBody = {
+ inputs: [
+ {
+ messages: [
+ {
+ ofUser: {
+ content: [
+ {
+ text: "What is dapr?",
+ },
+ ],
+ },
+ },
+ ],
+ },
+ ],
parameters: {},
metadata: {},
};
+ const response = await fetch(reqURL, {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ body: JSON.stringify(converseInputBody),
+ });
+
+ console.log("Conversation input sent: What is dapr?");
+
+ const data = await response.json();
+ const result = data.outputs[0].choices[0].message.content;
+ console.log("Output response:", result);
+} catch (error) {
+ console.error("Error:", error.message);
+ process.exit(1);
+}
+```
- const reqURL = `${daprHost}:${daprHttpPort}/v1.0-alpha1/conversation/${conversationComponentName}/converse`;
+In the second part of the application code:
+- The app sends an input "What is the weather like in San Francisco in celsius" together with the definition of a tool that is available `get_weather`.
+- The mock LLM echoes "What is the weather like in San Francisco in celsius?" and the function definition, which is detected in the response.
- try {
- const response = await fetch(reqURL, {
- method: "POST",
- headers: {
- "Content-Type": "application/json",
+```javascript
+try {
+ const toolCallingInputBody = {
+ inputs: [
+ {
+ messages: [
+ {
+ ofUser: {
+ content: [
+ {
+ text: "What is the weather like in San Francisco in celsius?",
+ },
+ ],
+ },
+ },
+ ],
+ scrubPii: false,
},
- body: JSON.stringify(inputBody),
- });
-
- console.log("Input sent: What is dapr?");
-
- const data = await response.json();
- const result = data.outputs[0].result;
- console.log("Output response:", result);
- } catch (error) {
- console.error("Error:", error.message);
- process.exit(1);
+ ],
+ metadata: {
+ api_key: "test-key",
+ version: "1.0",
+ },
+ scrubPii: false,
+ temperature: 0.7,
+ tools: [
+ {
+ function: {
+ name: "get_weather",
+ description: "Get the current weather for a location",
+ parameters: {
+ type: "object",
+ properties: {
+ location: {
+ type: "string",
+ description: "The city and state, e.g. San Francisco, CA",
+ },
+ unit: {
+ type: "string",
+ enum: ["celsius", "fahrenheit"],
+ description: "The temperature unit to use",
+ },
+ },
+ required: ["location"],
+ },
+ },
+ },
+ ],
+ toolChoice: "auto",
+ };
+ const response = await fetch(reqURL, {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ body: JSON.stringify(toolCallingInputBody),
+ });
+
+ console.log(
+ "Tool calling input sent: What is the weather like in San Francisco in celsius?"
+ );
+
+ const data = await response.json();
+
+ const result = data?.outputs?.[0]?.choices?.[0]?.message?.content;
+ console.log("Output message:", result);
+
+ if (data?.outputs?.[0]?.choices?.[0]?.message?.toolCalls) {
+ console.log(
+ "Tool calls detected:",
+ JSON.stringify(data.outputs[0].choices[0].message?.toolCalls)
+ );
+ } else {
+ console.log("No tool calls in response");
}
-}
-
-main().catch((error) => {
- console.error("Unhandled error:", error);
+} catch (error) {
+ console.error("Error:", error.message);
process.exit(1);
-});
```
{{% /tab %}}
@@ -318,14 +405,18 @@ dapr run -f .
**Expected output**
```
-== APP - conversation == Input sent: What is dapr?
+== APP - conversation == Conversation input sent: What is dapr?
== APP - conversation == Output response: What is dapr?
+== APP - conversation == Tool calling input sent: What is the weather like in San Francisco in celsius?
+== APP - conversation == Output message: What is the weather like in San Francisco in celsius?
+== APP - conversation == Tool calls detected:
+== APP - conversation == Tool call: {"id":0,"function":{"name":"get_weather","arguments":"location,unit"}}
+== APP - conversation == Function name: get_weather
+== APP - conversation == Function arguments: location,unit
```
### What happened?
-When you ran `dapr init` during Dapr install, the [`dapr.yaml` Multi-App Run template file]({{% ref "#dapryaml-multi-app-run-template-file" %}}) was generated in the `.dapr/components` directory.
-
Running `dapr run -f .` in this Quickstart started the [conversation Program.cs]({{% ref "#programcs-conversation-app" %}}).
#### `dapr.yaml` Multi-App Run template file
@@ -364,44 +455,244 @@ To interface with a real LLM, swap out the mock component with one of [the suppo
In the application code:
- The app sends an input "What is dapr?" to the echo mock LLM component.
- The mock LLM echoes "What is dapr?".
+- The app sends an input “What is the weather like in San Francisco in celsius” together with the definition of a tool that is available `get_weather`.
+- The mock LLM echoes “What is the weather like in San Francisco in celsius?” and the function definition, which is detected in the response.
+
```csharp
+using System.Text.Json;
using Dapr.AI.Conversation;
+using Dapr.AI.Conversation.ConversationRoles;
using Dapr.AI.Conversation.Extensions;
+using Dapr.AI.Conversation.Tools;
-class Program
-{
- private const string ConversationComponentName = "echo";
-
- static async Task Main(string[] args)
- {
- const string prompt = "What is dapr?";
+const string conversationComponentName = "echo";
+const string conversationText = "What is dapr?";
+const string toolCallInput = "What is the weather like in San Francisco in celsius?";
- var builder = WebApplication.CreateBuilder(args);
- builder.Services.AddDaprConversationClient();
- var app = builder.Build();
+var builder = WebApplication.CreateBuilder(args);
+builder.Services.AddDaprConversationClient();
+var app = builder.Build();
- //Instantiate Dapr Conversation Client
- var conversationClient = app.Services.GetRequiredService();
+//
+// Setup
- try
- {
- // Send a request to the echo mock LLM component
- var response = await conversationClient.ConverseAsync(ConversationComponentName, [new(prompt, DaprConversationRole.Generic)]);
- Console.WriteLine("Input sent: " + prompt);
+var conversationClient = app.Services.GetRequiredService();
- if (response != null)
- {
- Console.Write("Output response:");
- foreach (var resp in response.Outputs)
+var conversationOptions = new ConversationOptions(conversationComponentName)
+{
+ ScrubPII = false,
+ ToolChoice = ToolChoice.Auto,
+ Temperature = 0.7,
+ Tools = [
+ new ToolFunction("function")
{
- Console.WriteLine($" {resp.Result}");
- }
- }
- }
- catch (Exception ex)
+ Name = "get_weather",
+ Description = "Get the current weather for a location",
+ Parameters = JsonSerializer.Deserialize>("""
+ {
+ "type": "object",
+ "properties": {
+ "location": {
+ "type": "string",
+ "description": "The city and state, e.g. San Francisco, CA"
+ },
+ "unit": {
+ "type": "string",
+ "enum": ["celsius", "fahrenheit"],
+ "description": "The temperature unit to use"
+ }
+ },
+ "required": ["location"]
+ }
+ """) ?? throw new("Unable to parse tool function parameters."),
+ },
+ ],
+};
+
+//
+// Simple Conversation
+
+var conversationResponse = await conversationClient.ConverseAsync(
+ [new ConversationInput(new List
{
- Console.WriteLine("Error: " + ex.Message);
+ new UserMessage {
+ Name = "TestUser",
+ Content = [
+ new MessageContent(conversationText),
+ ],
+ },
+ })],
+ conversationOptions
+);
+
+Console.WriteLine($"Conversation input sent: {conversationText}");
+Console.WriteLine($"Output response: {conversationResponse.Outputs.First().Choices.First().Message.Content}");
+
+//
+// Tool Calling
+
+var toolCallResponse = await conversationClient.ConverseAsync(
+ [new ConversationInput(new List
+ {
+ new UserMessage {
+ Name = "TestUser",
+ Content = [
+ new MessageContent(toolCallInput),
+ ],
+ },
+ })],
+ conversationOptions
+);
+
+Console.WriteLine($"Tool calling input sent: {toolCallInput}");
+Console.WriteLine($"Output message: {toolCallResponse.Outputs.First().Choices.First().Message.Content}");
+Console.WriteLine("Tool calls detected:");
+
+var functionToolCall = toolCallResponse.Outputs.First().Choices.First().Message.ToolCalls.First() as CalledToolFunction
+ ?? throw new("Unexpected tool call type for demo.");
+
+var toolCallJson = JsonSerializer.Serialize(new
+{
+ id = 0,
+ function = new
+ {
+ name = functionToolCall.Name,
+ arguments = functionToolCall.JsonArguments,
+ },
+});
+Console.WriteLine($"Tool call: {toolCallJson}");
+Console.WriteLine($"Function name: {functionToolCall.Name}");
+Console.WriteLine($"Function arguments: {functionToolCall.JsonArguments}");
+```
+
+{{% /tab %}}
+
+
+
+
+{{% tab "Java" %}}
+
+
+### Step 1: Pre-requisites
+
+For this example, you will need:
+
+- [Dapr CLI and initialized environment](https://docs.dapr.io/getting-started).
+- Java JDK 17 (or greater):
+ - [Oracle JDK](https://www.oracle.com/java/technologies/downloads), or
+ - OpenJDK
+- [Apache Maven](https://maven.apache.org/install.html), version 3.x.
+
+- [Docker Desktop](https://www.docker.com/products/docker-desktop)
+
+
+### Step 2: Set up the environment
+
+Clone the [sample provided in the Quickstarts repo](https://github.com/dapr/quickstarts/tree/master/conversation).
+
+```bash
+git clone https://github.com/dapr/quickstarts.git
+```
+
+From the root of the Quickstarts directory, navigate into the conversation directory:
+
+```bash
+cd conversation/java/sdk/conversation
+```
+
+Install the dependencies:
+
+```bash
+mvn clean install
+```
+
+### Step 3: Launch the conversation service
+
+Navigate back to the sdk directory and start the conversation service with the following command:
+
+```bash
+dapr run -f .
+```
+
+**Expected output**
+
+```
+== APP - conversation == Input: What is Dapr?
+== APP - conversation == Output response: What is Dapr?
+```
+
+### What happened?
+
+Running `dapr run -f .` in this Quickstart started [Conversation.java]({{% ref "#programcs-conversation-app" %}}).
+
+#### `dapr.yaml` Multi-App Run template file
+
+Running the [Multi-App Run template file]({{% ref multi-app-dapr-run %}}) with `dapr run -f .` starts all applications in your project. This Quickstart has only one application, so the `dapr.yaml` file contains the following:
+
+```yml
+version: 1
+common:
+ resourcesPath: ../../components
+apps:
+ - appID: conversation
+ appDirPath: ./conversation/target
+ command: ["java", "-jar", "ConversationAIService-0.0.1-SNAPSHOT.jar"]
+```
+
+#### Echo mock LLM component
+
+In [`conversation/components`](https://github.com/dapr/quickstarts/tree/master/conversation/components) directly of the quickstart, the [`conversation.yaml` file](https://github.com/dapr/quickstarts/tree/master/conversation/components/conversation.yaml) configures the echo LLM component.
+
+```yml
+apiVersion: dapr.io/v1alpha1
+kind: Component
+metadata:
+ name: echo
+spec:
+ type: conversation.echo
+ version: v1
+```
+
+To interface with a real LLM, swap out the mock component with one of [the supported conversation components]({{% ref "supported-conversation" %}}). For example, to use an OpenAI component, see the [example in the conversation how-to guide]({{% ref "howto-conversation-layer#use-the-openai-component" %}})
+
+#### `Conversation.java` conversation app
+
+In the application code:
+- The app sends an input "What is dapr?" to the echo mock LLM component.
+- The mock LLM echoes "What is dapr?".
+
+```java
+package com.service;
+
+import io.dapr.client.DaprClientBuilder;
+import io.dapr.client.DaprPreviewClient;
+import io.dapr.client.domain.ConversationInput;
+import io.dapr.client.domain.ConversationRequest;
+import io.dapr.client.domain.ConversationResponse;
+import reactor.core.publisher.Mono;
+
+import java.util.List;
+
+public class Conversation {
+
+ public static void main(String[] args) {
+ String prompt = "What is Dapr?";
+
+ try (DaprPreviewClient client = new DaprClientBuilder().buildPreviewClient()) {
+ System.out.println("Input: " + prompt);
+
+ ConversationInput daprConversationInput = new ConversationInput(prompt);
+
+ // Component name is the name provided in the metadata block of the conversation.yaml file.
+ Mono responseMono = client.converse(new ConversationRequest("echo",
+ List.of(daprConversationInput))
+ .setContextId("contextId")
+ .setScrubPii(true).setTemperature(1.1d));
+ ConversationResponse response = responseMono.block();
+ System.out.printf("Output response: %s", response.getConversationOutputs().get(0).getResult());
+ } catch (Exception e) {
+ throw new RuntimeException(e);
}
}
}
@@ -448,14 +739,15 @@ dapr run -f .
**Expected output**
```
-== APP - conversation == Input sent: What is dapr?
-== APP - conversation == Output response: What is dapr?
+== APP - conversation-sdk == Input sent: What is dapr?
+== APP - conversation-sdk == Output response: What is dapr?
+== APP - conversation-sdk == Tool calling input sent: What is the weather like in San Francisco in celsius?'
+== APP - conversation-sdk == Tool Call: Name: getWeather - Arguments: location,unit
+== APP - conversation-sdk == Tool Call Output: The weather in San Francisco is 25 degrees Celsius
```
### What happened?
-When you ran `dapr init` during Dapr install, the [`dapr.yaml` Multi-App Run template file]({{% ref "#dapryaml-multi-app-run-template-file" %}}) was generated in the `.dapr/components` directory.
-
Running `dapr run -f .` in this Quickstart started [conversation.go]({{% ref "#programcs-conversation-app" %}}).
#### `dapr.yaml` Multi-App Run template file
@@ -493,43 +785,163 @@ To interface with a real LLM, swap out the mock component with one of [the suppo
In the application code:
- The app sends an input "What is dapr?" to the echo mock LLM component.
-- The mock LLM echoes "What is dapr?".
+- The mock LLM echoes "What is dapr?".
+- The app sends an input “What is the weather like in San Francisco in celsius” together with the definition of a tool that is available `get_weather`.
+- The mock LLM echoes “What is the weather like in San Francisco in celsius?” and the function definition, which is detected in the response.
```go
-package main
-
import (
- "context"
- "fmt"
- "log"
+ "context"
+ "encoding/json"
+ "fmt"
+ "log"
+ "strings"
- dapr "github.com/dapr/go-sdk/client"
+ "github.com/invopop/jsonschema"
+ "google.golang.org/protobuf/encoding/protojson"
+ "google.golang.org/protobuf/types/known/structpb"
+
+ dapr "github.com/dapr/go-sdk/client"
)
+// createMapOfArgsForEcho is a helper function to deal with the issue with the echo component not returning args as a map but in csv format
+func createMapOfArgsForEcho(s string) ([]byte, error) {
+ m := map[string]any{}
+ for _, p := range strings.Split(s, ",") {
+ m[p] = p
+ }
+ return json.Marshal(m)
+}
+
+// getWeatherInLocation is an example function to use as a tool call
+func getWeatherInLocation(request GetDegreesWeatherRequest, defaultValues GetDegreesWeatherRequest) string {
+ location := request.Location
+ unit := request.Unit
+ if location == "location" {
+ location = defaultValues.Location
+ }
+ if unit == "unit" {
+ unit = defaultValues.Unit
+ }
+ return fmt.Sprintf("The weather in %s is 25 degrees %s", location, unit)
+}
+
+type GetDegreesWeatherRequest struct {
+ Location string `json:"location" jsonschema:"title=Location,description=The location to look up the weather for"`
+ Unit string `json:"unit" jsonschema:"enum=celsius,enum=fahrenheit,description=Unit"`
+}
+
+// GenerateFunctionTool helper method to create jsonschema input
+func GenerateFunctionTool[T any](name, description string) (*dapr.ConversationToolsAlpha2, error) {
+ reflector := jsonschema.Reflector{
+ AllowAdditionalProperties: false,
+ DoNotReference: true,
+ }
+ var v T
+
+ schema := reflector.Reflect(v)
+
+ schemaBytes, err := schema.MarshalJSON()
+ if err != nil {
+ return nil, err
+ }
+
+ var protoStruct structpb.Struct
+ if err := protojson.Unmarshal(schemaBytes, &protoStruct); err != nil {
+ return nil, fmt.Errorf("converting jsonschema to proto Struct: %w", err)
+ }
+
+ return (*dapr.ConversationToolsAlpha2)(&dapr.ConversationToolsFunctionAlpha2{
+ Name: name,
+ Description: &description,
+ Parameters: &protoStruct,
+ }), nil
+}
+
+// createUserMessageInput is a helper method to create user messages in expected proto format
+func createUserMessageInput(msg string) *dapr.ConversationInputAlpha2 {
+ return &dapr.ConversationInputAlpha2{
+ Messages: []*dapr.ConversationMessageAlpha2{
+ {
+ ConversationMessageOfUser: &dapr.ConversationMessageOfUserAlpha2{
+ Content: []*dapr.ConversationMessageContentAlpha2{
+ {
+ Text: &msg,
+ },
+ },
+ },
+ },
+ },
+ }
+}
+
func main() {
- client, err := dapr.NewClient()
- if err != nil {
- panic(err)
- }
+ client, err := dapr.NewClient()
+ if err != nil {
+ panic(err)
+ }
- input := dapr.ConversationInput{
- Message: "What is dapr?",
- // Role: nil, // Optional
- // ScrubPII: nil, // Optional
- }
+ inputMsg := "What is dapr?"
+ conversationComponent := "echo"
- fmt.Println("Input sent:", input.Message)
+ request := dapr.ConversationRequestAlpha2{
+ Name: conversationComponent,
+ Inputs: []*dapr.ConversationInputAlpha2{createUserMessageInput(inputMsg)},
+ }
+
+ fmt.Println("Input sent:", inputMsg)
- var conversationComponent = "echo"
+ resp, err := client.ConverseAlpha2(context.Background(), request)
+ if err != nil {
+ log.Fatalf("err: %v", err)
+ }
- request := dapr.NewConversationRequest(conversationComponent, []dapr.ConversationInput{input})
+ fmt.Println("Output response:", resp.Outputs[0].Choices[0].Message.Content)
+
+ tool, err := GenerateFunctionTool[GetDegreesWeatherRequest]("getWeather", "get weather from a location in the given unit")
+ if err != nil {
+ log.Fatalf("err: %v", err)
+ }
+
+ weatherMessage := "Tool calling input sent: What is the weather like in San Francisco in celsius?'"
+ requestWithTool := dapr.ConversationRequestAlpha2{
+ Name: conversationComponent,
+ Inputs: []*dapr.ConversationInputAlpha2{createUserMessageInput(weatherMessage)},
+ Tools: []*dapr.ConversationToolsAlpha2{tool},
+ }
+
+ resp, err = client.ConverseAlpha2(context.Background(), requestWithTool)
+ if err != nil {
+ log.Fatalf("err: %v", err)
+ }
- resp, err := client.ConverseAlpha1(context.Background(), request)
- if err != nil {
- log.Fatalf("err: %v", err)
- }
+ fmt.Println(resp.Outputs[0].Choices[0].Message.Content)
+ for _, toolCalls := range resp.Outputs[0].Choices[0].Message.ToolCalls {
+ fmt.Printf("Tool Call: Name: %s - Arguments: %v\n", toolCalls.ToolTypes.Name, toolCalls.ToolTypes.Arguments)
+
+ // parse the arguments and execute tool
+ args := []byte(toolCalls.ToolTypes.Arguments)
+ if conversationComponent == "echo" {
+ // The echo component does not return a compliant tool calling response in json format but rather returns a csv
+ args, err = createMapOfArgsForEcho(toolCalls.ToolTypes.Arguments)
+ if err != nil {
+ log.Fatalf("err: %v", err)
+ }
+ }
- fmt.Println("Output response:", resp.Outputs[0].Result)
+ // find the tool (only one in this case) and execute
+ for _, toolInfo := range requestWithTool.Tools {
+ if toolInfo.Name == toolCalls.ToolTypes.Name && toolInfo.Name == "getWeather" {
+ var reqArgs GetDegreesWeatherRequest
+ if err = json.Unmarshal(args, &reqArgs); err != nil {
+ log.Fatalf("err: %v", err)
+ }
+ // execute tool
+ toolExecutionOutput := getWeatherInLocation(reqArgs, GetDegreesWeatherRequest{Location: "San Francisco", Unit: "Celsius"})
+ fmt.Printf("Tool Call Output: %s\n", toolExecutionOutput)
+ }
+ }
+ }
}
```
@@ -630,17 +1042,19 @@ npm install
### Step 3: Launch the conversation service
-Navigate back to the `http` directory and start the conversation service with the following command:
```bash
-dapr run --app-id conversation --resources-path ../../../components/ -- npm run start
+dapr run --app-id conversation --resources-path ../../../components -- npm run start
```
**Expected output**
```
-== APP - conversation == Input sent: What is dapr?
-== APP - conversation == Output response: What is dapr?
+== APP == Conversation input sent: What is dapr?
+== APP == Output response: What is dapr?
+== APP == Tool calling input sent: What is the weather like in San Francisco in celsius?
+== APP == Output message: What is the weather like in San Francisco in celsius?
+== APP == Tool calls detected: [{"id":"0","function":{"name":"get_weather","arguments":"location,unit"}}]
```
{{% /tab %}}
@@ -690,8 +1104,68 @@ dapr run --app-id conversation --resources-path ../../../components/ -- dotnet r
**Expected output**
```
-== APP - conversation == Input sent: What is dapr?
-== APP - conversation == Output response: What is dapr?
+== APP == Conversation input sent: What is dapr?
+== APP == Output response: What is dapr?
+== APP == Tool calling input sent: What is the weather like in San Francisco in celsius?
+== APP == Output message: What is the weather like in San Francisco in celsius?
+== APP == Tool calls detected:
+== APP == Tool call: {"id":0,"function":{"name":"get_weather","arguments":"location,unit"}}
+== APP == Function name: get_weather
+== APP == Function arguments: location,unit
+```
+
+{{% /tab %}}
+
+
+{{% tab "Java" %}}
+
+
+### Step 1: Pre-requisites
+
+For this example, you will need:
+
+- [Dapr CLI and initialized environment](https://docs.dapr.io/getting-started).
+- Java JDK 17 (or greater):
+ - [Oracle JDK](https://www.oracle.com/java/technologies/downloads), or
+ - OpenJDK
+- [Apache Maven](https://maven.apache.org/install.html), version 3.x.
+
+- [Docker Desktop](https://www.docker.com/products/docker-desktop)
+
+
+### Step 2: Set up the environment
+
+Clone the [sample provided in the Quickstarts repo](https://github.com/dapr/quickstarts/tree/master/conversation).
+
+```bash
+git clone https://github.com/dapr/quickstarts.git
+```
+
+From the root of the Quickstarts directory, navigate into the conversation directory:
+
+```bash
+cd conversation/java/sdk/conversation
+```
+
+Install the dependencies:
+
+```bash
+mvn clean install
+```
+
+### Step 3: Launch the conversation service
+
+Start the conversation service with the following command:
+
+```bash
+dapr run --app-id conversation --resources-path ../../../components/ -- java -jar target/ConversationAIService-0.0.1-SNAPSHOT.jar com.service.Conversation
+```
+
+**Expected output**
+
+```
+== APP == Input: What is Dapr?
+== APP == Output response: What is Dapr?
```
{{% /tab %}}
@@ -741,8 +1215,12 @@ dapr run --app-id conversation --resources-path ../../../components/ -- go run .
**Expected output**
```
-== APP - conversation == Input sent: What is dapr?
-== APP - conversation == Output response: What is dapr?
+== APP == dapr client initializing for: 127.0.0.1:53826
+== APP == Input sent: What is dapr?
+== APP == Output response: What is dapr?
+== APP == Tool calling input sent: What is the weather like in San Francisco in celsius?'
+== APP == Tool Call: Name: getWeather - Arguments: location,unit
+== APP == Tool Call Output: The weather in San Francisco is 25 degrees Celsius
```
{{% /tab %}}
diff --git a/daprdocs/content/en/getting-started/quickstarts/jobs-quickstart.md b/daprdocs/content/en/getting-started/quickstarts/jobs-quickstart.md
index 73e9e4b5c24..3ea0a9771c4 100644
--- a/daprdocs/content/en/getting-started/quickstarts/jobs-quickstart.md
+++ b/daprdocs/content/en/getting-started/quickstarts/jobs-quickstart.md
@@ -81,6 +81,15 @@ dapr run -f .
== APP - job-scheduler == Deleted job: BB-8
```
+You should eventually see the jobs being scheduled in scheduler:
+
+```bash
+$ dapr scheduler list
+NAME TARGET BEGIN COUNT LAST TRIGGER
+C-3PO job +13.40s 0
+R2-D2 job +3.40s 0
+```
+
After 5 seconds, the terminal output should present the `R2-D2` job being processed:
```text
@@ -95,6 +104,13 @@ After 10 seconds, the terminal output should present the `C3-PO` job being proce
== APP - job-service == Executing maintenance job: Memory Wipe
```
+The jobs will no longer be listed in the scheduler:
+
+```bash
+$ dapr scheduler list
+NAME TARGET BEGIN COUNT LAST TRIGGER
+```
+
Once the process has completed, you can stop and clean up application processes with a single command.
```bash
diff --git a/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md b/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md
index 0c6be63d31d..35839f5c511 100644
--- a/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md
+++ b/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md
@@ -1756,11 +1756,8 @@ import (
"log"
"time"
- "github.com/dapr/durabletask-go/api"
- "github.com/dapr/durabletask-go/backend"
- "github.com/dapr/durabletask-go/client"
- "github.com/dapr/durabletask-go/task"
- dapr "github.com/dapr/go-sdk/client"
+ "github.com/dapr/durabletask-go/workflow"
+ "github.com/dapr/go-sdk/client"
)
var (
@@ -1774,43 +1771,46 @@ func main() {
fmt.Println("*** Welcome to the Dapr Workflow console app sample!")
fmt.Println("*** Using this app, you can place orders that start workflows.")
- registry := task.NewTaskRegistry()
+ r := workflow.NewRegistry()
- if err := registry.AddOrchestrator(OrderProcessingWorkflow); err != nil {
+ if err := r.AddWorkflow(OrderProcessingWorkflow); err != nil {
log.Fatal(err)
}
- if err := registry.AddActivity(NotifyActivity); err != nil {
+ if err := r.AddActivity(NotifyActivity); err != nil {
log.Fatal(err)
}
- if err := registry.AddActivity(RequestApprovalActivity); err != nil {
+ if err := r.AddActivity(RequestApprovalActivity); err != nil {
log.Fatal(err)
}
- if err := registry.AddActivity(VerifyInventoryActivity); err != nil {
+ if err := r.AddActivity(VerifyInventoryActivity); err != nil {
log.Fatal(err)
}
- if err := registry.AddActivity(ProcessPaymentActivity); err != nil {
+ if err := r.AddActivity(ProcessPaymentActivity); err != nil {
log.Fatal(err)
}
- if err := registry.AddActivity(UpdateInventoryActivity); err != nil {
+ if err := r.AddActivity(UpdateInventoryActivity); err != nil {
log.Fatal(err)
}
- daprClient, err := dapr.NewClient()
+ wfClient, err := client.NewWorkflowClient()
if err != nil {
- log.Fatalf("failed to create Dapr client: %v", err)
+ log.Fatalf("failed to initialise workflow client: %v", err)
}
- client := client.NewTaskHubGrpcClient(daprClient.GrpcClientConn(), backend.DefaultLogger())
- if err := client.StartWorkItemListener(context.TODO(), registry); err != nil {
- log.Fatalf("failed to start work item listener: %v", err)
+ if err := wfClient.StartWorker(context.Background(), r); err != nil {
+ log.Fatal(err)
}
+ dclient, err := client.NewClient()
+ if err != nil {
+ log.Fatal(err)
+ }
inventory := []InventoryItem{
{ItemName: "paperclip", PerItemCost: 5, Quantity: 100},
{ItemName: "cars", PerItemCost: 5000, Quantity: 10},
{ItemName: "computers", PerItemCost: 500, Quantity: 100},
}
- if err := restockInventory(daprClient, inventory); err != nil {
+ if err := restockInventory(dclient, inventory); err != nil {
log.Fatalf("failed to restock: %v", err)
}
@@ -1827,31 +1827,30 @@ func main() {
TotalCost: totalCost,
}
- id, err := client.ScheduleNewOrchestration(context.TODO(), workflowName,
- api.WithInput(orderPayload),
- )
+ id, err := wfClient.ScheduleWorkflow(context.Background(), workflowName, workflow.WithInput(orderPayload), workflow.WithInstanceID("order-"+time.Now().Format("20060102150405")))
if err != nil {
log.Fatalf("failed to start workflow: %v", err)
}
waitCtx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
- defer cancel()
- _, err = client.WaitForOrchestrationCompletion(waitCtx, id)
+ _, err = wfClient.WaitForWorkflowCompletion(waitCtx, id)
+ cancel()
if err != nil {
log.Fatalf("failed to wait for workflow: %v", err)
}
- respFetch, err := client.FetchOrchestrationMetadata(context.Background(), id, api.WithFetchPayloads(true))
+ respFetch, err := wfClient.FetchWorkflowMetadata(context.Background(), id, workflow.WithFetchPayloads(true))
if err != nil {
log.Fatalf("failed to get workflow: %v", err)
}
- fmt.Printf("workflow status: %v\n", respFetch.RuntimeStatus)
+ fmt.Printf("workflow status: %v\n", respFetch.String())
fmt.Println("Purchase of item is complete")
+ select {}
}
-func restockInventory(daprClient dapr.Client, inventory []InventoryItem) error {
+func restockInventory(daprClient client.Client, inventory []InventoryItem) error {
for _, item := range inventory {
itemSerialized, err := json.Marshal(item)
if err != nil {
@@ -1879,18 +1878,60 @@ import (
"log"
"time"
- "github.com/dapr/durabletask-go/task"
+ "github.com/dapr/durabletask-go/workflow"
"github.com/dapr/go-sdk/client"
)
+type OrderPayload struct {
+ ItemName string `json:"item_name"`
+ TotalCost int `json:"total_cost"`
+ Quantity int `json:"quantity"`
+}
+
+type OrderResult struct {
+ Processed bool `json:"processed"`
+}
+
+type InventoryItem struct {
+ ItemName string `json:"item_name"`
+ PerItemCost int `json:"per_item_cost"`
+ Quantity int `json:"quantity"`
+}
+
+type InventoryRequest struct {
+ RequestID string `json:"request_id"`
+ ItemName string `json:"item_name"`
+ Quantity int `json:"quantity"`
+}
+
+type InventoryResult struct {
+ Success bool `json:"success"`
+ InventoryItem InventoryItem `json:"inventory_item"`
+}
+
+type PaymentRequest struct {
+ RequestID string `json:"request_id"`
+ ItemBeingPurchased string `json:"item_being_purchased"`
+ Amount int `json:"amount"`
+ Quantity int `json:"quantity"`
+}
+
+type ApprovalRequired struct {
+ Approval bool `json:"approval"`
+}
+
+type Notification struct {
+ Message string `json:"message"`
+}
+
// OrderProcessingWorkflow is the main workflow for orchestrating activities in the order process.
-func OrderProcessingWorkflow(ctx *task.OrchestrationContext) (any, error) {
- orderID := ctx.ID
+func OrderProcessingWorkflow(ctx *workflow.WorkflowContext) (any, error) {
+ orderID := ctx.ID()
var orderPayload OrderPayload
if err := ctx.GetInput(&orderPayload); err != nil {
return nil, err
}
- err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{
+ err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{
Message: fmt.Sprintf("Received order %s for %d %s - $%d", orderID, orderPayload.Quantity, orderPayload.ItemName, orderPayload.TotalCost),
})).Await(nil)
if err != nil {
@@ -1898,8 +1939,8 @@ func OrderProcessingWorkflow(ctx *task.OrchestrationContext) (any, error) {
}
var verifyInventoryResult InventoryResult
- if err := ctx.CallActivity(VerifyInventoryActivity, task.WithActivityInput(InventoryRequest{
- RequestID: string(orderID),
+ if err := ctx.CallActivity(VerifyInventoryActivity, workflow.WithActivityInput(InventoryRequest{
+ RequestID: orderID,
ItemName: orderPayload.ItemName,
Quantity: orderPayload.Quantity,
})).Await(&verifyInventoryResult); err != nil {
@@ -1908,64 +1949,64 @@ func OrderProcessingWorkflow(ctx *task.OrchestrationContext) (any, error) {
if !verifyInventoryResult.Success {
notification := Notification{Message: fmt.Sprintf("Insufficient inventory for %s", orderPayload.ItemName)}
- err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(notification)).Await(nil)
+ err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(notification)).Await(nil)
return OrderResult{Processed: false}, err
}
if orderPayload.TotalCost > 5000 {
var approvalRequired ApprovalRequired
- if err := ctx.CallActivity(RequestApprovalActivity, task.WithActivityInput(orderPayload)).Await(&approvalRequired); err != nil {
+ if err := ctx.CallActivity(RequestApprovalActivity, workflow.WithActivityInput(orderPayload)).Await(&approvalRequired); err != nil {
return OrderResult{Processed: false}, err
}
- if err := ctx.WaitForSingleEvent("manager_approval", time.Second*200).Await(nil); err != nil {
+ if err := ctx.WaitForExternalEvent("manager_approval", time.Second*200).Await(nil); err != nil {
return OrderResult{Processed: false}, err
}
// TODO: Confirm timeout flow - this will be in the form of an error.
if approvalRequired.Approval {
- if err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{Message: fmt.Sprintf("Payment for order %s has been approved!", orderID)})).Await(nil); err != nil {
+ if err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{Message: fmt.Sprintf("Payment for order %s has been approved!", orderID)})).Await(nil); err != nil {
log.Printf("failed to notify of a successful order: %v\n", err)
}
} else {
- if err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{Message: fmt.Sprintf("Payment for order %s has been rejected!", orderID)})).Await(nil); err != nil {
+ if err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{Message: fmt.Sprintf("Payment for order %s has been rejected!", orderID)})).Await(nil); err != nil {
log.Printf("failed to notify of an unsuccessful order :%v\n", err)
}
return OrderResult{Processed: false}, err
}
}
- err = ctx.CallActivity(ProcessPaymentActivity, task.WithActivityInput(PaymentRequest{
- RequestID: string(orderID),
+ err = ctx.CallActivity(ProcessPaymentActivity, workflow.WithActivityInput(PaymentRequest{
+ RequestID: orderID,
ItemBeingPurchased: orderPayload.ItemName,
Amount: orderPayload.TotalCost,
Quantity: orderPayload.Quantity,
})).Await(nil)
if err != nil {
- if err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s failed!", orderID)})).Await(nil); err != nil {
+ if err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s failed!", orderID)})).Await(nil); err != nil {
log.Printf("failed to notify of a failed order: %v", err)
}
return OrderResult{Processed: false}, err
}
- err = ctx.CallActivity(UpdateInventoryActivity, task.WithActivityInput(PaymentRequest{
- RequestID: string(orderID),
+ err = ctx.CallActivity(UpdateInventoryActivity, workflow.WithActivityInput(PaymentRequest{
+ RequestID: orderID,
ItemBeingPurchased: orderPayload.ItemName,
Amount: orderPayload.TotalCost,
Quantity: orderPayload.Quantity,
})).Await(nil)
if err != nil {
- if err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s failed!", orderID)})).Await(nil); err != nil {
+ if err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s failed!", orderID)})).Await(nil); err != nil {
log.Printf("failed to notify of a failed order: %v", err)
}
return OrderResult{Processed: false}, err
}
- if err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s has completed!", orderID)})).Await(nil); err != nil {
+ if err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s has completed!", orderID)})).Await(nil); err != nil {
log.Printf("failed to notify of a successful order: %v", err)
}
return OrderResult{Processed: true}, err
}
// NotifyActivity outputs a notification message
-func NotifyActivity(ctx task.ActivityContext) (any, error) {
+func NotifyActivity(ctx workflow.ActivityContext) (any, error) {
var input Notification
if err := ctx.GetInput(&input); err != nil {
return "", err
@@ -1975,7 +2016,7 @@ func NotifyActivity(ctx task.ActivityContext) (any, error) {
}
// ProcessPaymentActivity is used to process a payment
-func ProcessPaymentActivity(ctx task.ActivityContext) (any, error) {
+func ProcessPaymentActivity(ctx workflow.ActivityContext) (any, error) {
var input PaymentRequest
if err := ctx.GetInput(&input); err != nil {
return "", err
@@ -1985,7 +2026,7 @@ func ProcessPaymentActivity(ctx task.ActivityContext) (any, error) {
}
// VerifyInventoryActivity is used to verify if an item is available in the inventory
-func VerifyInventoryActivity(ctx task.ActivityContext) (any, error) {
+func VerifyInventoryActivity(ctx workflow.ActivityContext) (any, error) {
var input InventoryRequest
if err := ctx.GetInput(&input); err != nil {
return nil, err
@@ -2017,7 +2058,7 @@ func VerifyInventoryActivity(ctx task.ActivityContext) (any, error) {
}
// UpdateInventoryActivity modifies the inventory.
-func UpdateInventoryActivity(ctx task.ActivityContext) (any, error) {
+func UpdateInventoryActivity(ctx workflow.ActivityContext) (any, error) {
var input PaymentRequest
if err := ctx.GetInput(&input); err != nil {
return nil, err
@@ -2051,7 +2092,7 @@ func UpdateInventoryActivity(ctx task.ActivityContext) (any, error) {
}
// RequestApprovalActivity requests approval for the order
-func RequestApprovalActivity(ctx task.ActivityContext) (any, error) {
+func RequestApprovalActivity(ctx workflow.ActivityContext) (any, error) {
var input OrderPayload
if err := ctx.GetInput(&input); err != nil {
return nil, err
@@ -2066,6 +2107,107 @@ func RequestApprovalActivity(ctx task.ActivityContext) (any, error) {
{{< /tabpane >}}
+
+## Step 5: Manage Your Workflow
+
+Now that your workflow is running, let's learn how to manage it using the Dapr CLI.
+
+### View Running Workflows
+
+Open a separate terminal and run the following CLI commands.
+
+```bash
+# List all workflows
+dapr workflow list --app-id order-processor --connection-string=redis://127.0.0.1:6379 -o wide
+```
+
+You should see output like:
+
+```
+NAMESPACE APP ID NAME INSTANCE ID CREATED LAST UPDATE STATUS
+default order-processor OrderProcessingWorkflow e4d3807c 2025-11-07T12:29:37Z 2025-11-07T12:29:52Z COMPLETED
+```
+
+### Check Workflow History
+
+View the detailed execution history of your workflow:
+
+```bash
+dapr workflow history e4d3807c --app-id order-processor
+```
+
+You should see output like:
+
+```
+TYPE NAME EVENTID ELAPSED STATUS DETAILS
+ExecutionStarted OrderProcessingWorkflow - Age:1.1m RUNNING orchestration start
+OrchestratorStarted - - 13.4ms RUNNING replay cycle start
+TaskScheduled NotifyActivity 0 1.3ms RUNNING activity=NotifyActivity
+TaskCompleted - - 2.6ms RUNNING eventId=0
+OrchestratorStarted - - 2.6ms RUNNING replay cycle start
+TaskScheduled VerifyInventoryActivity 1 637.6µs RUNNING activity=VerifyInventoryActivity
+TaskCompleted - - 2.4ms RUNNING eventId=1
+OrchestratorStarted - - 1.7ms RUNNING replay cycle start
+TaskScheduled ProcessPaymentActivity 2 439.3µs RUNNING activity=ProcessPaymentActivity
+TaskCompleted - - 1.6ms RUNNING eventId=2
+OrchestratorStarted - - 1.5ms RUNNING replay cycle start
+TaskScheduled UpdateInventoryActivity 3 311.2µs RUNNING activity=UpdateInventoryActivity
+TaskCompleted - - 2.4ms RUNNING eventId=3
+OrchestratorStarted - - 2.7ms RUNNING replay cycle start
+TaskScheduled NotifyActivity 4 354.1µs RUNNING activity=NotifyActivity
+TaskCompleted - - 2.5ms RUNNING eventId=4
+OrchestratorStarted - - 1.6ms RUNNING replay cycle start
+ExecutionCompleted - 5 517.1µs COMPLETED execDuration=38.7ms
+```
+
+### Interact with Your Workflow
+
+#### Raise an External Event
+
+If your workflow is waiting for an [external event]({{% ref "workflow-patterns.md#external-system-interaction" %}}), you can raise one.
+It takes a single argument in the format of `/`.
+
+```bash
+dapr workflow raise-event e4d3807c/ApprovalEvent \
+ --app-id order-processor \
+ --input '{"paymentId": "pay-123", "amount": 100.00}'
+```
+
+#### Suspend and Resume
+
+```bash
+# Suspend a workflow
+dapr workflow suspend e4d3807c \
+ --app-id order-processor \
+ --reason "Waiting for inventory"
+
+# Resume when ready
+dapr workflow resume e4d3807c \
+ --app-id order-processor \
+ --reason "Inventory received"
+```
+
+### Clean Up
+
+After testing, purge completed workflows.
+
+{{% alert title="Important" color="warning" %}}
+It is required that a workflow client is running in the application to perform purge operations.
+The workflow client connection is required in order to preserve the workflow state machine integrity and prevent corruption.
+Errors like the following suggest that the workflow client is not running:
+```
+failed to purge orchestration state: rpc error: code = FailedPrecondition desc = failed to purge orchestration state: failed to lookup actor: api error: code = FailedPrecondition desc = did not find address for actor
+```
+{{% /alert %}}
+
+```bash
+# Purge a specific workflow
+dapr workflow purge e4d3807c --app-id order-processor --connection-string=redis://127.0.0.1:6379
+
+# Or purge all completed workflows
+dapr workflow purge --app-id order-processor --connection-string=redis://127.0.0.1:6379 --all-older-than 1h
+```
+
## Tell us what you think!
We're continuously working to improve our Quickstart examples and value your feedback. Did you find this Quickstart helpful? Do you have suggestions for improvement?
@@ -2077,5 +2219,6 @@ Join the discussion in our [discord channel](https://discord.com/channels/778680
- Set up Dapr Workflow with any programming language using [HTTP instead of an SDK]({{% ref howto-manage-workflow.md %}})
- Walk through a more in-depth [.NET SDK example workflow](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow)
- Learn more about [Workflow as a Dapr building block]({{% ref workflow-overview %}})
+```
{{< button text="Explore Dapr tutorials >>" page="getting-started/tutorials/_index.md" >}}
diff --git a/daprdocs/content/en/operations/configuration/configuration-overview.md b/daprdocs/content/en/operations/configuration/configuration-overview.md
index f501710405c..33cc11d8e31 100644
--- a/daprdocs/content/en/operations/configuration/configuration-overview.md
+++ b/daprdocs/content/en/operations/configuration/configuration-overview.md
@@ -62,13 +62,14 @@ A Dapr sidecar can apply a specific configuration by using a `dapr.io/config` an
### Application configuration settings
-The following menu includes all of the configuration settings you can set on the sidecar.
+The following menu includes all of the configuration settings you can set:
- [Tracing](#tracing)
- [Metrics](#metrics)
- [Logging](#logging)
- [Middleware](#middleware)
- [Name resolution](#name-resolution)
+- [Workflow](#workflow)
- [Scope secret store access](#scope-secret-store-access)
- [Access Control allow lists for building block APIs](#access-control-allow-lists-for-building-block-apis)
- [Access Control allow lists for service invocation API](#access-control-allow-lists-for-service-invocation-api)
@@ -255,6 +256,15 @@ For more information, see:
- [The name resolution component documentation]({{% ref supported-name-resolution %}}) for more examples.
- [The Configuration file documentation]({{% ref configuration-schema.md %}}) to learn more about how to configure name resolution per component.
+#### Workflow
+
+The `workflow` section contains properties for configuring [Workflows]({{% ref "workflow-overview.md" %}}).
+
+| Property | Type | Description |
+|------------------|--------|-----|
+| `maxConcurrentWorkflowInvocations` | int32 | Maximum number of concurrent workflow executions per Dapr sidecar. Default is infinite. |
+| `maxConcurrentActivityInvocations` | int32 | Maximum number of concurrent activity executions per Dapr sidecar. Default is infinite. |
+
#### Scope secret store access
See the [Scoping secrets]({{% ref "secret-scope.md" %}}) guide for information and examples on how to scope secrets to an application.
@@ -334,6 +344,9 @@ spec:
deny:
- bindings.smtp
- secretstores.local.file
+ workflow:
+ maxConcurrentWorkflowInvocations: 100
+ maxConcurrentActivityInvocations: 1000
accessControl:
defaultAction: deny
trustDomain: "public"
diff --git a/daprdocs/content/en/operations/configuration/increase-request-size.md b/daprdocs/content/en/operations/configuration/increase-request-size.md
index 04f6500866c..76c9d613949 100644
--- a/daprdocs/content/en/operations/configuration/increase-request-size.md
+++ b/daprdocs/content/en/operations/configuration/increase-request-size.md
@@ -1,27 +1,29 @@
---
type: docs
-title: "How-To: Handle large http body requests"
-linkTitle: "HTTP request body size"
+title: "How-To: Handle larger body requests"
+linkTitle: "Request body size"
weight: 6000
description: "Configure http requests that are bigger than 4 MB"
---
-By default, Dapr has a limit for the request body size, set to 4MB. You can change this by defining:
-- The `dapr.io/http-max-request-size` annotation, or
-- The `--dapr-http-max-request-size` flag.
+{{% alert title="Note" color="primary" %}}
+The existing flag/annotation`dapr-http-max-request-size` has been deprecated and updated to `max-body-size`.
+{{% /alert %}}
+
+By default, Dapr has a limit for the request body size, set to 4MB. You can change this for both HTTP and gRPC requests by defining:
+- The `dapr.io/max-body-size` annotation, or
+- The `--max-body-size` flag.
{{< tabpane text=true >}}
{{% tab "Self-hosted" %}}
-When running in self-hosted mode, use the `--dapr-http-max-request-size` flag to configure Dapr to use non-default request body size:
+When running in self-hosted mode, use the `--max-body-size` flag to configure Dapr to use non-default request body size:
```bash
-dapr run --dapr-http-max-request-size 16 node app.js
+dapr run --max-body-size 16Mi node app.js
```
-This tells Dapr to set maximum request body size to `16` MB.
-
{{% /tab %}}
@@ -50,7 +52,7 @@ spec:
dapr.io/enabled: "true"
dapr.io/app-id: "myapp"
dapr.io/app-port: "8000"
- dapr.io/http-max-request-size: "16"
+ dapr.io/max-body-size: "16Mi"
#...
```
@@ -58,6 +60,8 @@ spec:
{{< /tabpane >}}
+This tells Dapr to set the maximum request body size to `16` MB for both HTTP and gRPC requests.
+
## Related links
[Dapr Kubernetes pod annotations spec]({{% ref arguments-annotations-overview.md %}})
diff --git a/daprdocs/content/en/operations/configuration/secret-scope.md b/daprdocs/content/en/operations/configuration/secret-scope.md
index a26575ca14d..aba1e8c8965 100644
--- a/daprdocs/content/en/operations/configuration/secret-scope.md
+++ b/daprdocs/content/en/operations/configuration/secret-scope.md
@@ -4,18 +4,15 @@ title: "How-To: Limit the secrets that can be read from secret stores"
linkTitle: "Limit secret store access"
weight: 3000
description: "Define secret scopes by augmenting the existing configuration resource with restrictive permissions."
-description: "Define secret scopes by augmenting the existing configuration resource with restrictive permissions."
---
In addition to [scoping which applications can access a given component]({{% ref "component-scopes.md"%}}), you can also scope a named secret store component to one or more secrets for an application. By defining `allowedSecrets` and/or `deniedSecrets` lists, you restrict applications to access only specific secrets.
-For more information about configuring a Configuration resource:
-- [Configuration overview]({{% ref configuration-overview.md %}})
-- [Configuration schema]({{% ref configuration-schema.md %}})
For more information about configuring a Configuration resource:
- [Configuration overview]({{% ref configuration-overview.md %}})
- [Configuration schema]({{% ref configuration-schema.md %}})
+
## Configure secrets access
The `secrets` section under the `Configuration` spec contains the following properties:
@@ -59,10 +56,8 @@ The `allowedSecrets` and `deniedSecrets` list values take priority over the `def
### Scenario 1: Deny access to all secrets for a secret store
-In a Kubernetes cluster, the native Kubernetes secret store is added to your Dapr application by default. In some scenarios, it may be necessary to deny access to Dapr secrets for a given application. To add this configuration:
In a Kubernetes cluster, the native Kubernetes secret store is added to your Dapr application by default. In some scenarios, it may be necessary to deny access to Dapr secrets for a given application. To add this configuration:
-1. Define the following `appconfig.yaml`.
1. Define the following `appconfig.yaml`.
```yaml
@@ -76,26 +71,8 @@ In a Kubernetes cluster, the native Kubernetes secret store is added to your Dap
- storeName: kubernetes
defaultAccess: deny
```
- ```yaml
- apiVersion: dapr.io/v1alpha1
- kind: Configuration
- metadata:
- name: appconfig
- spec:
- secrets:
- scopes:
- - storeName: kubernetes
- defaultAccess: deny
- ```
-
-1. Apply it to the Kubernetes cluster using the following command:
-
- ```bash
- kubectl apply -f appconfig.yaml`.
- ```
-For applications that you need to deny access to the Kubernetes secret store, follow [the Kubernetes instructions]({{% ref kubernetes-overview %}}), adding the following annotation to the application pod.
-1. Apply it to the Kubernetes cluster using the following command:
+2. Apply it to the Kubernetes cluster using the following command:
```bash
kubectl apply -f appconfig.yaml`.
@@ -109,7 +86,6 @@ dapr.io/config: appconfig
With this defined, the application no longer has access to Kubernetes secret store.
-### Scenario 2: Allow access to only certain secrets in a secret store
### Scenario 2: Allow access to only certain secrets in a secret store
To allow a Dapr application to have access to only certain secrets, define the following `config.yaml`:
@@ -127,7 +103,6 @@ spec:
allowedSecrets: ["secret1", "secret2"]
```
-This example defines configuration for secret store named `vault`. The default access to the secret store is `deny`. Meanwhile, some secrets are accessible by the application based on the `allowedSecrets` list. Follow [the Sidecar configuration instructions]({{% ref "configuration-overview.md#sidecar-configuration" %}}) to apply configuration to the sidecar.
This example defines configuration for secret store named `vault`. The default access to the secret store is `deny`. Meanwhile, some secrets are accessible by the application based on the `allowedSecrets` list. Follow [the Sidecar configuration instructions]({{% ref "configuration-overview.md#sidecar-configuration" %}}) to apply configuration to the sidecar.
### Scenario 3: Deny access to certain sensitive secrets in a secret store
@@ -152,9 +127,3 @@ This configuration explicitly denies access to `secret1` and `secret2` from the
## Next steps
{{< button text="Service invocation access control" page="invoke-allowlist.md" >}}
-
-This configuration explicitly denies access to `secret1` and `secret2` from the secret store named `vault,` while allowing access to all other secrets. Follow [the Sidecar configuration instructions]({{% ref "configuration-overview.md#sidecar-configuration" %}}) to apply configuration to the sidecar.
-
-## Next steps
-
-{{< button text="Service invocation access control" page="invoke-allowlist.md" >}}
diff --git a/daprdocs/content/en/operations/hosting/kubernetes/kubernetes-persisting-scheduler.md b/daprdocs/content/en/operations/hosting/kubernetes/kubernetes-persisting-scheduler.md
index 3083f2ec060..e379b745de3 100644
--- a/daprdocs/content/en/operations/hosting/kubernetes/kubernetes-persisting-scheduler.md
+++ b/daprdocs/content/en/operations/hosting/kubernetes/kubernetes-persisting-scheduler.md
@@ -77,22 +77,6 @@ kubectl delete pvc -n dapr-system dapr-scheduler-data-dir-dapr-scheduler-server-
Persistent Volume Claims are not deleted automatically with an [uninstall]({{% ref dapr-uninstall.md %}}). This is a deliberate safety measure to prevent accidental data loss.
{{% /alert %}}
-{{% alert title="Note" color="primary" %}}
-For storage providers that do NOT support dynamic volume expansion: If Dapr has ever been installed on the cluster before, the Scheduler's Persistent Volume Claims must be manually uninstalled in order for new ones with increased storage size to be created.
-```bash
-kubectl delete pvc -n dapr-system dapr-scheduler-data-dir-dapr-scheduler-server-0 dapr-scheduler-data-dir-dapr-scheduler-server-1 dapr-scheduler-data-dir-dapr-scheduler-server-2
-```
-Persistent Volume Claims are not deleted automatically with an [uninstall]({{< ref dapr-uninstall.md >}}). This is a deliberate safety measure to prevent accidental data loss.
-{{% /alert %}}
-
-{{% alert title="Note" color="primary" %}}
-For storage providers that do NOT support dynamic volume expansion: If Dapr has ever been installed on the cluster before, the Scheduler's Persistent Volume Claims must be manually uninstalled in order for new ones with increased storage size to be created.
-```bash
-kubectl delete pvc -n dapr-system dapr-scheduler-data-dir-dapr-scheduler-server-0 dapr-scheduler-data-dir-dapr-scheduler-server-1 dapr-scheduler-data-dir-dapr-scheduler-server-2
-```
-Persistent Volume Claims are not deleted automatically with an [uninstall]({{< ref dapr-uninstall.md >}}). This is a deliberate safety measure to prevent accidental data loss.
-{{% /alert %}}
-
#### Increase existing Scheduler Storage Size
{{% alert title="Warning" color="warning" %}}
diff --git a/daprdocs/content/en/operations/observability/tracing/otel-collector/open-telemetry-collector-jaeger.md b/daprdocs/content/en/operations/observability/tracing/otel-collector/open-telemetry-collector-jaeger.md
index 479a408b97d..fce8875dc3a 100644
--- a/daprdocs/content/en/operations/observability/tracing/otel-collector/open-telemetry-collector-jaeger.md
+++ b/daprdocs/content/en/operations/observability/tracing/otel-collector/open-telemetry-collector-jaeger.md
@@ -1,30 +1,47 @@
---
type: docs
-title: "Using OpenTelemetry Collector to collect traces to send to Jaeger"
-linkTitle: "Using the OpenTelemetry for Jaeger"
+title: "Using OpenTelemetry to send traces to Jaeger V2"
+linkTitle: "Using OpenTelemetry for Jaeger V2"
weight: 1200
-description: "How to push trace events to Jaeger distributed tracing platform, using the OpenTelemetry Collector."
+description: "How to push trace events to Jaeger V2 distributed tracing platform using OpenTelemetry protocol."
---
-While Dapr supports writing traces using OpenTelemetry (OTLP) and Zipkin protocols, Zipkin support for Jaeger has been deprecated in favor of OTLP. Although Jaeger supports OTLP directly, the recommended approach for production is to use the OpenTelemetry Collector to collect traces from Dapr and send them to Jaeger, allowing your application to quickly offload data and take advantage of features like retries, batching, and encryption. For more information, read the Open Telemetry Collector [documentation](https://opentelemetry.io/docs/collector/#when-to-use-a-collector).
+Dapr supports writing traces using the OpenTelemetry (OTLP) protocol, and Jaeger V2 natively supports OTLP, allowing Dapr to send traces directly to a Jaeger V2 instance. This approach is recommended for production to leverage Jaeger V2's capabilities for distributed tracing.
+
{{< tabpane text=true >}}
{{% tab "Self-hosted" %}}
-
-## Configure Jaeger in self-hosted mode
+## Configure Jaeger V2 in self-hosted mode
### Local setup
The simplest way to start Jaeger is to run the pre-built, all-in-one Jaeger image published to DockerHub and expose the OTLP port:
+> **Note:** Port 9411 is commonly used by Zipkin. If you have Zipkin running (starts by default when you run `dapr init`), stop the `dapr_zipkin` container first to avoid port conflicts: `docker stop dapr_zipkin`
+
```bash
-docker run -d --name jaeger \
- -p 4317:4317 \
+docker run -d --rm --name jaeger \
-p 16686:16686 \
- jaegertracing/all-in-one:1.49
+ -p 4317:4317 \
+ -p 4318:4318 \
+ -p 5778:5778 \
+ -p 9411:9411 \
+ cr.jaegertracing.io/jaegertracing/jaeger:2.11.0
```
-Next, create the following `config.yaml` file locally:
+You can also view the logs from the jaeger container using:
+
+```bash
+docker logs jaeger
+```
+
+### Configure Dapr for tracing
+
+You have two options to configure Dapr to send traces to Jaeger V2:
+
+#### Option 1: Use a custom config file
+
+Create a `config.yaml` file with the following content:
> **Note:** Because you are using the Open Telemetry protocol to talk to Jaeger, you need to fill out the `otel` section of the tracing configuration and set the `endpointAddress` to the address of the Jaeger container.
@@ -51,6 +68,10 @@ the `--config` option. For example:
dapr run --app-id myapp --app-port 3000 node app.js --config config.yaml
```
+#### Option 2: Update the default Dapr config (development environment)
+
+Alternatively, in your development environment, navigate to your [local Dapr components directory](https://docs.dapr.io/getting-started/install-dapr-selfhost/#step-5-verify-components-directory-has-been-initialized) and update the default `config.yaml` file with the OTLP configuration above. This way, all Dapr applications will use the Jaeger V2 tracing configuration by default without needing to specify the `--config` flag each time.
+
### View traces
To view traces in your browser, go to `http://localhost:16686` to see the Jaeger UI.
@@ -58,41 +79,105 @@ To view traces in your browser, go to `http://localhost:16686` to see the Jaeger
{{% tab "Kubernetes" %}}
-## Configure Jaeger on Kubernetes with the OpenTelemetry Collector
+## Configure Jaeger V2 on Kubernetes
-The following steps show you how to configure Dapr to send distributed tracing data to the OpenTelemetry Collector which, in turn, sends the traces to Jaeger.
+The following steps show you how to configure Dapr to send distributed tracing data directly to a Jaeger V2 instance deployed using the OpenTelemetry Operator with in-memory storage.
### Prerequisites
- [Install Dapr on Kubernetes]({{% ref kubernetes %}})
-- [Set up Jaeger](https://www.jaegertracing.io/docs/1.49/operator/) using the Jaeger Kubernetes Operator
-### Set up OpenTelemetry Collector to push to Jaeger
+### Set up Jaeger V2 with the OpenTelemetry Operator
-To push traces to your Jaeger instance, install the OpenTelemetry Collector on your Kubernetes cluster.
+Jaeger V2 can be deployed using the OpenTelemetry Operator for simplified management and native OTLP support. The following example configures Jaeger V2 with in-memory storage.
-1. Download and inspect the [`open-telemetry-collector-jaeger.yaml`](/docs/open-telemetry-collector/open-telemetry-collector-jaeger.yaml) file.
+> **Note on Storage Backends:** This example uses in-memory storage (`memstore`) for simplicity, suitable for development or testing environments as it stores up to 100,000 traces in memory. For production environments, consider configuring a persistent storage backend like Cassandra or Elasticsearch to ensure trace data durability.
-1. In the data section of the `otel-collector-conf` ConfigMap, update the `otlp/jaeger.endpoint` value to reflect the endpoint of your Jaeger collector Kubernetes service object.
+#### Installation
-1. Deploy the OpenTelemetry Collector into the same namespace where your Dapr-enabled applications are running:
+> **Note:** In order for the API server to communicate with the webhook component of the operator, the webhook requires a TLS certificate that the API server is configured to trust. There are a few different ways you can use to generate/configure the required TLS certificate detailed in the [otel operator chart docs](https://github.com/open-telemetry/opentelemetry-helm-charts/tree/main/charts/opentelemetry-operator#tls-certificate-requirement)
- ```sh
- kubectl apply -f open-telemetry-collector-jaeger.yaml
- ```
+For simplicity you can use Helm to create an automatically generated self-signed certificate.
-### Set up Dapr to send traces to OpenTelemetryCollector
+1. **Install the OpenTelemetry Operator**:
-Create a Dapr configuration file to enable tracing and export the sidecar traces to the OpenTelemetry Collector.
-
-1. Use the [`collector-config-otel.yaml`](/docs/open-telemetry-collector/collector-config-otel.yaml) file to create your own Dapr configuration.
+ ```bash
+ helm install opentelemetry-operator open-telemetry/opentelemetry-operator -n opentelemetry-operator-system --create-namespace \
+ --set "manager.collectorImage.repository=ghcr.io/open-telemetry/opentelemetry-collector-releases/opentelemetry-collector-k8s" \
+ --set admissionWebhooks.certManager.enabled=false \
+ --set admissionWebhooks.autoGenerateCert.enabled=true
+ ```
+ Confirm that all resources in the `opentelemetry-operator-system` namespace are ready.
+
+1. **Deploy a Jaeger V2 instance with in-memory storage**:
+ Create a file named `jaeger-inmemory.yaml` with the following configuration:
+ ```yaml
+ apiVersion: opentelemetry.io/v1beta1
+ kind: OpenTelemetryCollector
+ metadata:
+ name: jaeger-inmemory-instance
+ namespace: observability
+ spec:
+ image: jaegertracing/jaeger:latest
+ ports:
+ - name: jaeger
+ port: 16686
+ config:
+ service:
+ extensions: [jaeger_storage, jaeger_query]
+ pipelines:
+ traces:
+ receivers: [otlp]
+ exporters: [jaeger_storage_exporter]
+ extensions:
+ jaeger_query:
+ storage:
+ traces: memstore
+ jaeger_storage:
+ backends:
+ memstore:
+ memory:
+ max_traces: 100000
+ receivers:
+ otlp:
+ protocols:
+ grpc:
+ endpoint: 0.0.0.0:4317
+ http:
+ endpoint: 0.0.0.0:4318
+ exporters:
+ jaeger_storage_exporter:
+ trace_storage: memstore
+ ```
+ Apply it with:
+ ```bash
+ kubectl apply -f jaeger-inmemory.yaml -n observability
+ ```
-1. Update the `namespace` and `otel.endpointAddress` values to align with the namespace where your Dapr-enabled applications and OpenTelemetry Collector are deployed.
-1. Apply the configuration with:
+### Set up Dapr to send traces to Jaeger V2
+
+Create a Dapr configuration file to enable tracing and export the sidecar traces directly to the Jaeger V2 instance.
+
+1. Create a configuration file (for example `tracing.yaml`) with the following content, updating the `namespace` and `otel.endpointAddress` to match your Jaeger V2 instance:
+ ```yaml
+ apiVersion: dapr.io/v1alpha1
+ kind: Configuration
+ metadata:
+ name: tracing
+ namespace: order-system
+ spec:
+ tracing:
+ samplingRate: "1"
+ otel:
+ endpointAddress: "jaeger-inmemory-instance-collector.observability.svc.cluster.local:4317"
+ isSecure: false
+ protocol: grpc
+ ```
- ```sh
- kubectl apply -f collector-config.yaml
+2. Apply the configuration:
+ ```bash
+ kubectl apply -f tracing.yaml -n order-system
```
### Deploy your app with tracing enabled
@@ -122,20 +207,20 @@ That’s it! There’s no need to include the OpenTelemetry SDK or instrument yo
### View traces
-To view Dapr sidecar traces, port-forward the Jaeger Service and open the UI:
+To view Dapr sidecar traces, port-forward the Jaeger V2 service and open the UI:
```bash
-kubectl port-forward svc/jaeger-query 16686 -n observability
+kubectl port-forward svc/jaeger-inmemory-instance-collector 16686:16686 -n observability
```
-In your browser, go to `http://localhost:16686` and you will see the Jaeger UI.
+In your browser, go to `http://localhost:16686` to see the Jaeger V2 UI.

{{% /tab %}}
{{< /tabpane >}}
+
## References
-- [Jaeger Getting Started](https://www.jaegertracing.io/docs/1.49/getting-started/)
-- [Jaeger Kubernetes Operator](https://www.jaegertracing.io/docs/1.49/operator/)
-- [OpenTelemetry Collector Exporters](https://opentelemetry.io/docs/collector/configuration/#exporters)
+- [Jaeger V2 Getting Started](https://www.jaegertracing.io/docs/2.11/getting-started/)
+- [Jaeger V2 Kubernetes Operator](https://www.jaegertracing.io/docs/2.11/deployment/kubernetes/#kubernetes-operator)
\ No newline at end of file
diff --git a/daprdocs/content/en/operations/resiliency/health-checks/sidecar-health.md b/daprdocs/content/en/operations/resiliency/health-checks/sidecar-health.md
index ee58a2ad233..401a3dd2f85 100644
--- a/daprdocs/content/en/operations/resiliency/health-checks/sidecar-health.md
+++ b/daprdocs/content/en/operations/resiliency/health-checks/sidecar-health.md
@@ -42,9 +42,9 @@ On the other hand, as shown by the green boundary lines in the diagram above, th
- The Dapr HTTP port is available; _but,_
- The app channel is not yet established.
-In the Dapr SDKs, the `waitForSidecar`/`wait_until_ready` method (depending on [which SDK you use]({{% ref "#sdks-supporting-outbound-health-endpoint" %}})) is used for this specific check with the `v1.0/healthz/outbound` endpoint. Using this behavior, instead of waiting for the app channel to be available (see: red boundary lines) with the `v1.0/healthz/` endpoint, Dapr waits for a successful response from `v1.0/healthz/outbound`. This approach enables your application to perform calls on the Dapr sidecar APIs before the app channel is initalized - for example, reading secrets with the secrets API.
+In the Dapr SDKs, the `waitForSidecar` method (depending on [which SDK you use]({{% ref "#sdks-supporting-outbound-health-endpoint" %}})) is used for this specific check with the `v1.0/healthz/outbound` endpoint. Using this behavior, instead of waiting for the app channel to be available (see: red boundary lines) with the `v1.0/healthz/` endpoint, Dapr waits for a successful response from `v1.0/healthz/outbound`. This approach enables your application to perform calls on the Dapr sidecar APIs before the app channel is initalized - for example, reading secrets with the secrets API.
-If you are using the `waitForSidecar`/`wait_until_ready` method on the SDKs, then the correct initialization is performed. Otherwise, you can call the `v1.0/healthz/outbound` endpoint during initalization, and if successesful, you can call the Dapr sidecar APIs.
+If you are using the `waitForSidecar` method on the SDKs, then the correct initialization is performed. Otherwise, you can call the `v1.0/healthz/outbound` endpoint during initalization, and if successesful, you can call the Dapr sidecar APIs.
### SDKs supporting outbound health endpoint
Currently, the `v1.0/healthz/outbound` endpoint is supported in the:
diff --git a/daprdocs/content/en/operations/security/api-token.md b/daprdocs/content/en/operations/security/api-token.md
index d91a1490fd6..6593f5e729b 100644
--- a/daprdocs/content/en/operations/security/api-token.md
+++ b/daprdocs/content/en/operations/security/api-token.md
@@ -52,45 +52,12 @@ annotations:
When deployed, Dapr sidecar injector will automatically create a secret reference and inject the actual value into `DAPR_API_TOKEN` environment variable.
-## Rotate a token
-
-### Self-hosted
-
-To rotate the configured token in self-hosted, update the `DAPR_API_TOKEN` environment variable to the new value and restart the `daprd` process.
-
-### Kubernetes
-
-To rotate the configured token in Kubernetes, update the previously-created secret with the new token in each namespace. You can do that using `kubectl patch` command, but a simpler way to update these in each namespace is by using a manifest:
-
-```yaml
-apiVersion: v1
-kind: Secret
-metadata:
- name: dapr-api-token
-type: Opaque
-data:
- token:
-```
-
-And then apply it to each namespace:
-
-```shell
-kubectl apply --file token-secret.yaml --namespace
-```
-
-To tell Dapr to start using the new token, trigger a rolling upgrade to each one of your deployments:
-
-```shell
-kubectl rollout restart deployment/ --namespace
-```
-
-> Assuming your service is configured with more than one replica, the key rotation process does not result in any downtime.
## Adding API token to client API invocations
-Once token authentication is configured in Dapr, all clients invoking Dapr API need to append the `dapr-api-token` token to every request.
+Once token authentication is configured in Dapr, all clients invoking the Dapr APIs need to append the `dapr-api-token` token to every request.
-> **Note:** The Dapr SDKs read the [DAPR_API_TOKEN]({{% ref environment %}}) environment variable and set it for you by default.
+> **Note:** The Dapr SDKs read the [DAPR_API_TOKEN]({{% ref environment %}}) environment variable and set it for you by default, however you still must ensure that your app has access to the environment variable.
@@ -122,15 +89,18 @@ dapr-api-token[0].
### Kubernetes
-In Kubernetes, it's recommended to mount the secret to your pod as an environment variable, as shown in the example below, where a Kubernetes secret with the name `dapr-api-token` is used to hold the token.
+In Kubernetes, it's required to mount the API token on your application pod as an environment variable, when your application is making outbound calls to the Dapr APIs (Service Invocation invoke, Pub/sub publish, etc.), otherwise the request will fail with an `Unauthorized` error. Mounting the environment variable is done by providing the name of the Kubernetes secret in your application pod specification, as shown in the example below, where a Kubernetes secret with the name `dapr-api-token` is used to hold the token.
```yaml
containers:
- name: mycontainer
image: myregistry/myapp
- envFrom:
- - secretRef:
- name: dapr-api-token
+ env:
+ - name: DAPR_API_TOKEN
+ valueFrom:
+ secretKeyRef:
+ name: dapr-api-token
+ key: token
```
### Self-hosted
@@ -141,6 +111,40 @@ In self-hosted mode, you can set the token as an environment variable for your a
export DAPR_API_TOKEN=
```
+## Rotate a token
+
+### Self-hosted
+
+To rotate the configured token in self-hosted, update the `DAPR_API_TOKEN` environment variable to the new value and restart the `daprd` process.
+
+### Kubernetes
+
+To rotate the configured token in Kubernetes, update the previously-created secret with the new token in each namespace. You can do that using `kubectl patch` command, but a simpler way to update these in each namespace is by using a manifest:
+
+```yaml
+apiVersion: v1
+kind: Secret
+metadata:
+ name: dapr-api-token
+type: Opaque
+data:
+ token:
+```
+
+And then apply it to each namespace:
+
+```shell
+kubectl apply --file token-secret.yaml --namespace
+```
+
+To tell Dapr to start using the new token, trigger a rolling upgrade to each one of your deployments:
+
+```shell
+kubectl rollout restart deployment/ --namespace
+```
+
+> Assuming your service is configured with more than one replica, the key rotation process does not result in any downtime.
+
## Related Links
- Learn about [Dapr security concepts]({{% ref security-concept.md %}})
diff --git a/daprdocs/content/en/operations/support/alpha-beta-apis.md b/daprdocs/content/en/operations/support/alpha-beta-apis.md
index f8dd6e3ce11..8b1d81e8218 100644
--- a/daprdocs/content/en/operations/support/alpha-beta-apis.md
+++ b/daprdocs/content/en/operations/support/alpha-beta-apis.md
@@ -16,8 +16,8 @@ description: "List of current alpha and beta APIs"
| Bulk Subscribe | [Bulk subscribe proto](https://github.com/dapr/dapr/blob/5aba3c9aa4ea9b3f388df125f9c66495b43c5c9e/dapr/proto/runtime/v1/appcallback.proto#L57) | N/A | The bulk subscribe application callback receives multiple messages from a topic in a single call. | [Bulk Publish and Subscribe API]({{% ref "pubsub-bulk.md" %}}) | v1.10 |
| Cryptography | [Crypto proto](https://github.com/dapr/dapr/blob/5aba3c9aa4ea9b3f388df125f9c66495b43c5c9e/dapr/proto/runtime/v1/dapr.proto#L118) | `v1.0-alpha1/crypto` | The cryptography API enables you to perform **high level** cryptography operations for encrypting and decrypting messages. | [Cryptography API]({{% ref "cryptography-overview.md" %}}) | v1.11 |
| Jobs | [Jobs proto](https://github.com/dapr/dapr/blob/master/dapr/proto/runtime/v1/dapr.proto#L212-219) | `v1.0-alpha1/jobs` | The jobs API enables you to schedule and orchestrate jobs. | [Jobs API]({{% ref "jobs-overview.md" %}}) | v1.14 |
-| Conversation | [Conversation proto](https://github.com/dapr/dapr/blob/master/dapr/proto/runtime/v1/dapr.proto#L221-222) | `v1.0-alpha1/conversation` | Converse between different large language models using the conversation API. | [Conversation API]({{% ref "conversation-overview.md" %}}) | v1.15 |
-
+| Streaming Subscription | [Streaming Subscription proto](https://github.com/dapr/dapr/blob/310c83140b2f0c3cb7d2bef19624df88af3e8e0a/dapr/proto/runtime/v1/dapr.proto#L454) | N/A | Subscription is defined in the application code. Streaming subscriptions are dynamic, meaning they allow for adding or removing subscriptions at runtime. | [Streaming Subscription API]({{% ref "subscription-methods/#streaming-subscriptions" %}}) | v1.14 |
+| Conversation | [Conversation proto](https://github.com/dapr/dapr/blob/master/dapr/proto/runtime/v1/dapr.proto#L226) | `v1.0-alpha2/conversation` | Converse between different large language models using the conversation API. | [Conversation API]({{% ref "conversation-overview.md" %}}) | v1.15 |
## Beta APIs
diff --git a/daprdocs/content/en/operations/support/breaking-changes-and-deprecations.md b/daprdocs/content/en/operations/support/breaking-changes-and-deprecations.md
index a0895537cc3..3c2214cb711 100644
--- a/daprdocs/content/en/operations/support/breaking-changes-and-deprecations.md
+++ b/daprdocs/content/en/operations/support/breaking-changes-and-deprecations.md
@@ -58,17 +58,18 @@ After announcing a future breaking change, the change will happen in 2 releases
## Announced deprecations
-| Feature | Deprecation announcement | Removal |
-|-----------------------|-----------------------|------------------------- |
-| GET /v1.0/shutdown API (Users should use [POST API]({{% ref kubernetes-job.md %}}) instead) | 1.2.0 | 1.4.0 |
-| Java domain builder classes deprecated (Users should use [setters](https://github.com/dapr/java-sdk/issues/587) instead) | Java SDK 1.3.0 | Java SDK 1.5.0 |
-| Service invocation will no longer provide a default content type header of `application/json` when no content-type is specified. You must explicitly [set a content-type header]({{% ref "service_invocation_api.md#request-contents" %}}) for service invocation if your invoked apps rely on this header. | 1.7.0 | 1.9.0 |
-| gRPC service invocation using `invoke` method is deprecated. Use proxy mode service invocation instead. See [How-To: Invoke services using gRPC ]({{% ref howto-invoke-services-grpc.md %}}) to use the proxy mode.| 1.9.0 | 1.10.0 |
-| The CLI flag `--app-ssl` (in both the Dapr CLI and daprd) has been deprecated in favor of using `--app-protocol` with values `https` or `grpcs`. [daprd:6158](https://github.com/dapr/dapr/issues/6158) [cli:1267](https://github.com/dapr/cli/issues/1267)| 1.11.0 | 1.13.0 |
-| Hazelcast PubSub Component | 1.9.0 | 1.11.0 |
-| Twitter Binding Component | 1.10.0 | 1.11.0 |
-| NATS Streaming PubSub Component | 1.11.0 | 1.13.0 |
-| Workflows API Alpha1 `/v1.0-alpha1/workflows` being deprecated in favor of Workflow Client | 1.15.0 | 1.17.0 |
+| Feature | Deprecation announcement | Removal |
+|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------|----------------|
+| GET /v1.0/shutdown API (Users should use [POST API]({{% ref kubernetes-job.md %}}) instead) | 1.2.0 | 1.4.0 |
+| Java domain builder classes deprecated (Users should use [setters](https://github.com/dapr/java-sdk/issues/587) instead) | Java SDK 1.3.0 | Java SDK 1.5.0 |
+| Service invocation will no longer provide a default content type header of `application/json` when no content-type is specified. You must explicitly [set a content-type header]({{% ref "service_invocation_api.md#request-contents" %}}) for service invocation if your invoked apps rely on this header. | 1.7.0 | 1.9.0 |
+| gRPC service invocation using `invoke` method is deprecated. Use proxy mode service invocation instead. See [How-To: Invoke services using gRPC ]({{% ref howto-invoke-services-grpc.md %}}) to use the proxy mode. | 1.9.0 | 1.10.0 |
+| The CLI flag `--app-ssl` (in both the Dapr CLI and daprd) has been deprecated in favor of using `--app-protocol` with values `https` or `grpcs`. [daprd:6158](https://github.com/dapr/dapr/issues/6158) [cli:1267](https://github.com/dapr/cli/issues/1267) | 1.11.0 | 1.13.0 |
+| Hazelcast PubSub Component | 1.9.0 | 1.11.0 |
+| Twitter Binding Component | 1.10.0 | 1.11.0 |
+| NATS Streaming PubSub Component | 1.11.0 | 1.13.0 |
+| Workflows API Alpha1 `/v1.0-alpha1/workflows` being deprecated in favor of Workflow Client | 1.15.0 | 1.17.0 |
+| Migration of `http-max-request-size` flags/annotations to `max-body-size`. See [How-To: Handle larger body requests]({{% ref increase-request-size.md %}}) | 1.14.0 | 1.17.0 |
## Related links
diff --git a/daprdocs/content/en/operations/support/support-release-policy.md b/daprdocs/content/en/operations/support/support-release-policy.md
index 579008b70e0..aae6a2f89ce 100644
--- a/daprdocs/content/en/operations/support/support-release-policy.md
+++ b/daprdocs/content/en/operations/support/support-release-policy.md
@@ -19,7 +19,7 @@ Dapr releases use `MAJOR.MINOR.PATCH` versioning. For example, 1.0.0.
A supported release means:
-- A hoxfix patch is released if the release has a critical issue such as a mainline broken scenario or a security issue. Each of these are reviewed on a case by case basis.
+- A hotfix patch is released if the release has a critical issue such as a mainline broken scenario or a security issue. Each of these are reviewed on a case by case basis.
- Issues are investigated for the supported releases. If a release is no longer supported, you need to upgrade to a newer release and determine if the issue is still relevant.
From the 1.8.0 release onwards three (3) versions of Dapr are supported; the current and previous two (2) versions. Typically these are `MINOR`release updates. This means that there is a rolling window that moves forward for supported releases and it is your operational responsibility to remain up to date with these supported versions. If you have an older version of Dapr you may have to do intermediate upgrades to get to a supported version.
@@ -45,26 +45,36 @@ The table below shows the versions of Dapr releases that have been tested togeth
| Release date | Runtime | CLI | SDKs | Dashboard | Status | Release notes |
|--------------------|:--------:|:--------|---------|---------|---------|------------|
-| July 31st 2025 | 1.15.9 | 1.15.0 | Java 1.14.2, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.4 JS 3.5.2 Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.9 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.9) |
-| July 18th 2025 | 1.15.8 | 1.15.0 | Java 1.14.2, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.4 JS 3.5.2 Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.8 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.8) |
-| July 16th 2025 | 1.15.7 | 1.15.0 | Java 1.14.1, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.4 JS 3.5.2 Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.7 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.7) |
-| June 20th 2025 | 1.15.6 | 1.15.0 | Java 1.14.1, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.4 JS 3.5.2 Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.6 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.6) |
-| May 5th 2025 | 1.15.5 | 1.15.0 | Java 1.14.1, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.4 JS 3.5.2 Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.5 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.5) |
-| April 4th 2025 | 1.15.4 | 1.15.0 | Java 1.14.0, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.4 JS 3.5.2 Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.4) |
-| March 5rd 2025 | 1.15.3 | 1.15.0 | Java 1.14.0, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.4 JS 3.5.2 Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.3) |
-| March 3rd 2025 | 1.15.2 | 1.15.0 | Java 1.14.0, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.0 JS 3.5.0 Rust 0.16 | 0.15.0 | Supported (current) | [v1.15.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.2) |
-| February 28th 2025 | 1.15.1 | 1.15.0 | Java 1.14.0, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.0 JS 3.5.0 Rust 0.16 | 0.15.0 | Supported (current) | [v1.15.1 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.1) |
+| Jan 9th 2026 | 1.16.6 | 1.16.5 | Java 1.16.0 Go 1.13.0 PHP 1.2.0 Python 1.16.0 .NET 1.16.0 JS 3.6.0 Rust 0.17.0 | 0.15.0 | Supported (current) | [v1.16.6 release notes](https://github.com/dapr/dapr/releases/tag/v1.16.6) |
+| Dec 19th 2025 | 1.16.5 | 1.16.5 | Java 1.16.0 Go 1.13.0 PHP 1.2.0 Python 1.16.0 .NET 1.16.0 JS 3.6.0 Rust 0.17.0 | 0.15.0 | Supported (current) | [v1.16.5 release notes](https://github.com/dapr/dapr/releases/tag/v1.16.5) |
+| Dec 8th 2025 | 1.16.4 | 1.16.5 | Java 1.16.0 Go 1.13.0 PHP 1.2.0 Python 1.16.0 .NET 1.16.0 JS 3.6.0 Rust 0.17.0 | 0.15.0 | Supported (current) | [v1.16.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.16.4) |
+| Nov 21st 2025| 1.16.3 | 1.16.4 | Java 1.16.0 Go 1.13.0 PHP 1.2.0 Python 1.16.0 .NET 1.16.0 JS 3.6.0 Rust 0.17.0 | 0.15.0 | Supported (current) | [v1.16.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.16.3) |
+| Oct 30th 2025 | 1.16.2 | 1.16.3 | Java 1.16.0 Go 1.13.0 PHP 1.2.0 Python 1.16.0 .NET 1.16.0 JS 3.6.0 Rust 0.17.0 | 0.15.0 | Supported (current) | [v1.16.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.16.2) |
+| Oct 6th 2025 | 1.16.1 | 1.16.1 | Java 1.16.0 Go 1.13.0 PHP 1.2.0 Python 1.16.0 .NET 1.16.0 JS 3.6.0 Rust 0.17.0 | 0.15.0 | Supported (current) | [v1.16.1 release notes](https://github.com/dapr/dapr/releases/tag/v1.16.1) |
+| Sep 16th 2025 | 1.16.0 | 1.16.0 | Java 1.16.0 Go 1.13.0 PHP 1.2.0 Python 1.16.0 .NET 1.16.0 JS 3.6.0 Rust 0.17.0 | 0.15.0 | Supported (current) | [v1.16.0 release notes](https://github.com/dapr/dapr/releases/tag/v1.16.0) |
+| Sep 17th 2025 | 1.15.12 | 1.15.0 | Java 1.14.2, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.4 JS 3.5.2 Rust 0.16.1 | 0.15.0 | Supported | [v1.15.12 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.12) |
+| August 28th 2025 | 1.15.11 | 1.15.0 | Java 1.14.2, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.4 JS 3.5.2 Rust 0.16.1 | 0.15.0 | Supported | [v1.15.11 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.11) |
+| August 21st 2025 | 1.15.10 | 1.15.0 | Java 1.14.2, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.4 JS 3.5.2 Rust 0.16.1 | 0.15.0 | Supported | [v1.15.10 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.10) |
+| July 31st 2025 | 1.15.9 | 1.15.0 | Java 1.14.2, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.4 JS 3.5.2 Rust 0.16.1 | 0.15.0 | Supported | [v1.15.9 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.9) |
+| July 18th 2025 | 1.15.8 | 1.15.0 | Java 1.14.2, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.4 JS 3.5.2 Rust 0.16.1 | 0.15.0 | Supported | [v1.15.8 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.8) |
+| July 16th 2025 | 1.15.7 | 1.15.0 | Java 1.14.1, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.4 JS 3.5.2 Rust 0.16.1 | 0.15.0 | Supported | [v1.15.7 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.7) |
+| June 20th 2025 | 1.15.6 | 1.15.0 | Java 1.14.1, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.4 JS 3.5.2 Rust 0.16.1 | 0.15.0 | Supported | [v1.15.6 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.6) |
+| May 5th 2025 | 1.15.5 | 1.15.0 | Java 1.14.1, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.4 JS 3.5.2 Rust 0.16.1 | 0.15.0 | Supported | [v1.15.5 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.5) |
+| April 4th 2025 | 1.15.4 | 1.15.0 | Java 1.14.0, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.4 JS 3.5.2 Rust 0.16.1 | 0.15.0 | Supported | [v1.15.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.4) |
+| March 5rd 2025 | 1.15.3 | 1.15.0 | Java 1.14.0, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.4 JS 3.5.2 Rust 0.16.1 | 0.15.0 | Supported | [v1.15.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.3) |
+| March 3rd 2025 | 1.15.2 | 1.15.0 | Java 1.14.0, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.0 JS 3.5.0 Rust 0.16 | 0.15.0 | Supported | [v1.15.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.2) |
+| February 28th 2025 | 1.15.1 | 1.15.0 | Java 1.14.0, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.0 JS 3.5.0 Rust 0.16 | 0.15.0 | Supported | [v1.15.1 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.1) |
| February 27th 2025 | 1.15.0 | 1.15.0 | Java 1.14.0, 1.15.0 Go 1.12.0 PHP 1.2.0 Python 1.15.0 .NET 1.15.0 JS 3.5.0 Rust 0.16 | 0.15.0 | Supported | [v1.15.0 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.0) |
| September 16th 2024 | 1.14.4 | 1.14.1 | Java 1.12.0 Go 1.11.0 PHP 1.2.0 Python 1.14.0 .NET 1.14.0 JS 3.3.1 | 0.15.0 | Supported | [v1.14.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.14.4) |
| September 13th 2024 | 1.14.3 | 1.14.1 | Java 1.12.0 Go 1.11.0 PHP 1.2.0 Python 1.14.0 .NET 1.14.0 JS 3.3.1 | 0.15.0 | ⚠️ Recalled | [v1.14.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.14.3) |
| September 6th 2024 | 1.14.2 | 1.14.1 | Java 1.12.0 Go 1.11.0 PHP 1.2.0 Python 1.14.0 .NET 1.14.0 JS 3.3.1 | 0.15.0 | Supported | [v1.14.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.14.2) |
| August 14th 2024 | 1.14.1 | 1.14.1 | Java 1.12.0 Go 1.11.0 PHP 1.2.0 Python 1.14.0 .NET 1.14.0 JS 3.3.1 | 0.15.0 | Supported | [v1.14.1 release notes](https://github.com/dapr/dapr/releases/tag/v1.14.1) |
| August 14th 2024 | 1.14.0 | 1.14.0 | Java 1.12.0 Go 1.11.0 PHP 1.2.0 Python 1.14.0 .NET 1.14.0 JS 3.3.1 | 0.15.0 | Supported | [v1.14.0 release notes](https://github.com/dapr/dapr/releases/tag/v1.14.0) |
-| May 29th 2024 | 1.13.4 | 1.13.0 | Java 1.11.0 Go 1.10.0 PHP 1.2.0 Python 1.13.0 .NET 1.13.0 JS 3.3.0 | 0.14.0 | Supported | [v1.13.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.4) |
-| May 21st 2024 | 1.13.3 | 1.13.0 | Java 1.11.0 Go 1.10.0 PHP 1.2.0 Python 1.13.0 .NET 1.13.0 JS 3.3.0 | 0.14.0 | Supported | [v1.13.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.3) |
-| April 3rd 2024 | 1.13.2 | 1.13.0 | Java 1.11.0 Go 1.10.0 PHP 1.2.0 Python 1.13.0 .NET 1.13.0 JS 3.3.0 | 0.14.0 | Supported | [v1.13.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.2) |
-| March 26th 2024 | 1.13.1 | 1.13.0 | Java 1.11.0 Go 1.10.0 PHP 1.2.0 Python 1.13.0 .NET 1.13.0 JS 3.3.0 | 0.14.0 | Supported | [v1.13.1 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.1) |
-| March 6th 2024 | 1.13.0 | 1.13.0 | Java 1.11.0 Go 1.10.0 PHP 1.2.0 Python 1.13.0 .NET 1.13.0 JS 3.3.0 | 0.14.0 | Supported | [v1.13.0 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.0) |
+| May 29th 2024 | 1.13.4 | 1.13.0 | Java 1.11.0 Go 1.10.0 PHP 1.2.0 Python 1.13.0 .NET 1.13.0 JS 3.3.0 | 0.14.0 | Unsupported | [v1.13.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.4) |
+| May 21st 2024 | 1.13.3 | 1.13.0 | Java 1.11.0 Go 1.10.0 PHP 1.2.0 Python 1.13.0 .NET 1.13.0 JS 3.3.0 | 0.14.0 | Unsupported | [v1.13.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.3) |
+| April 3rd 2024 | 1.13.2 | 1.13.0 | Java 1.11.0 Go 1.10.0 PHP 1.2.0 Python 1.13.0 .NET 1.13.0 JS 3.3.0 | 0.14.0 | Unsupported | [v1.13.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.2) |
+| March 26th 2024 | 1.13.1 | 1.13.0 | Java 1.11.0 Go 1.10.0 PHP 1.2.0 Python 1.13.0 .NET 1.13.0 JS 3.3.0 | 0.14.0 | Unsupported | [v1.13.1 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.1) |
+| March 6th 2024 | 1.13.0 | 1.13.0 | Java 1.11.0 Go 1.10.0 PHP 1.2.0 Python 1.13.0 .NET 1.13.0 JS 3.3.0 | 0.14.0 | Unsupported | [v1.13.0 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.0) |
| January 17th 2024 | 1.12.4 | 1.12.0 | Java 1.10.0 Go 1.9.1 PHP 1.2.0 Python 1.12.0 .NET 1.12.0 JS 3.2.0 | 0.14.0 | Unsupported | [v1.12.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.12.4) |
| January 2nd 2024 | 1.12.3 | 1.12.0 | Java 1.10.0 Go 1.9.1 PHP 1.2.0 Python 1.12.0 .NET 1.12.0 JS 3.2.0 | 0.14.0 | Unsupported | [v1.12.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.12.3) |
| November 18th 2023 | 1.12.2 | 1.12.0 | Java 1.10.0 Go 1.9.1 PHP 1.2.0 Python 1.12.0 .NET 1.12.0 JS 3.2.0 | 0.14.0 | Unsupported | [v1.12.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.12.2) |
diff --git a/daprdocs/content/en/reference/api/actors_api.md b/daprdocs/content/en/reference/api/actors_api.md
index 4fead8ee19d..288c4dcafb4 100644
--- a/daprdocs/content/en/reference/api/actors_api.md
+++ b/daprdocs/content/en/reference/api/actors_api.md
@@ -3,7 +3,7 @@ type: docs
title: "Actors API reference"
linkTitle: "Actors API"
description: "Detailed documentation on the actors API"
-weight: 600
+weight: 200
---
Dapr provides native, cross-platform, and cross-language virtual actor capabilities.
diff --git a/daprdocs/content/en/reference/api/bindings_api.md b/daprdocs/content/en/reference/api/bindings_api.md
index 81e13eecca7..8c63feb0a01 100644
--- a/daprdocs/content/en/reference/api/bindings_api.md
+++ b/daprdocs/content/en/reference/api/bindings_api.md
@@ -3,7 +3,7 @@ type: docs
title: "Bindings API reference"
linkTitle: "Bindings API"
description: "Detailed documentation on the bindings API"
-weight: 500
+weight: 300
---
Dapr provides bi-directional binding capabilities for applications and a consistent approach to interacting with different cloud/on-premise services or systems.
diff --git a/daprdocs/content/en/reference/api/configuration_api.md b/daprdocs/content/en/reference/api/configuration_api.md
index ef28fc42a8f..e09a5d9b7bd 100644
--- a/daprdocs/content/en/reference/api/configuration_api.md
+++ b/daprdocs/content/en/reference/api/configuration_api.md
@@ -3,7 +3,7 @@ type: docs
title: "Configuration API reference"
linkTitle: "Configuration API"
description: "Detailed documentation on the configuration API"
-weight: 800
+weight: 400
---
## Get Configuration
diff --git a/daprdocs/content/en/reference/api/conversation_api.md b/daprdocs/content/en/reference/api/conversation_api.md
index 1a4e006b348..95980f41e10 100644
--- a/daprdocs/content/en/reference/api/conversation_api.md
+++ b/daprdocs/content/en/reference/api/conversation_api.md
@@ -3,21 +3,23 @@ type: docs
title: "Conversation API reference"
linkTitle: "Conversation API"
description: "Detailed documentation on the conversation API"
-weight: 1400
+weight: 500
---
{{% alert title="Alpha" color="primary" %}}
The conversation API is currently in [alpha]({{% ref "certification-lifecycle.md#certification-levels" %}}).
{{% /alert %}}
-Dapr provides an API to interact with Large Language Models (LLMs) and enables critical performance and security functionality with features like prompt caching and PII data obfuscation.
+Dapr provides an API to interact with Large Language Models (LLMs) and enables critical performance and security functionality with features like prompt caching, PII data obfuscation, and tool calling capabilities.
+
+Tool calling follows OpenAI's function calling format, making it easy to integrate with existing AI development workflows and tools.
## Converse
-This endpoint lets you converse with LLMs.
+This endpoint lets you converse with LLMs using the Alpha2 version of the API, which provides enhanced tool calling support and alignment with OpenAI's interface.
```
-POST http://localhost:/v1.0-alpha1/conversation//converse
+POST http://localhost:/v1.0-alpha2/conversation//converse
```
### URL parameters
@@ -30,35 +32,175 @@ POST http://localhost:/v1.0-alpha1/conversation//converse
| Field | Description |
| --------- | ----------- |
+| `contextId` | The ID of an existing chat (like in ChatGPT). Optional |
| `inputs` | Inputs for the conversation. Multiple inputs at one time are supported. Required |
-| `cacheTTL` | A time-to-live value for a prompt cache to expire. Uses Golang duration format. Optional |
-| `scrubPII` | A boolean value to enable obfuscation of sensitive information returning from the LLM. Set this value if all PII (across contents) in the request needs to be scrubbed. Optional |
-| `temperature` | A float value to control the temperature of the model. Used to optimize for consistency and creativity. Optional |
-| `metadata` | [Metadata](#metadata) passed to conversation components. Optional |
+| `parameters` | Parameters for all custom fields. Optional |
+| `metadata` | Metadata passed to conversation components. Optional |
+| `scrubPii` | A boolean value to enable obfuscation of sensitive information returning from the LLM. Optional |
+| `temperature` | A float value to control the temperature of the model. Used to optimize for consistency (0) or creativity (1). Optional |
+| `tools` | Tools register the tools available to be used by the LLM during the conversation. Optional |
+| `toolChoice` | Controls which (if any) tool is called by the model. Values: `auto`, `required`, or specific tool name. Defaults to `auto` if tools are present. Optional |
#### Input body
| Field | Description |
| --------- | ----------- |
-| `content` | The message content to send to the LLM. Required |
-| `role` | The role for the LLM to assume. Possible values: 'user', 'tool', 'assistant' |
-| `scrubPII` | A boolean value to enable obfuscation of sensitive information present in the content field. Set this value if PII for this specific content needs to be scrubbed exclusively. Optional |
+| `messages` | Array of conversation messages. Required |
+| `scrubPii` | A boolean value to enable obfuscation of sensitive information present in the content field. Optional |
+
+#### Message types
+
+The API supports different message types:
+
+| Type | Description |
+| ---- | ----------- |
+| `ofDeveloper` | Developer role messages with optional name and content |
+| `ofSystem` | System role messages with optional name and content |
+| `ofUser` | User role messages with optional name and content |
+| `ofAssistant` | Assistant role messages with optional name, content, and tool calls |
+| `ofTool` | Tool role messages with tool ID, name, and content |
+
+
+#### Tool calling
+
+Tools can be defined using the `tools` field with function definitions:
+
+| Field | Description |
+| --------- | ----------- |
+| `function.name` | The name of the function to be called. Required |
+| `function.description` | A description of what the function does. Optional |
+| `function.parameters` | JSON Schema object describing the function parameters. Optional |
+
+
+#### Tool choice options
-### Request content example
+The `toolChoice` is an optional parameter that controls how the model can use available tools:
+
+- **`auto`**: The model can pick between generating a message or calling one or more tools (default when tools are present)
+- **`required`**: Requires one or more functions to be called
+- **`{tool_name}`**: Forces the model to call a specific tool by name
+
+
+#### Metadata
+The `metadata` field serves as a dynamic configuration mechanism that allows you to pass additional configuration and authentication information to conversation components on a per-request basis. This metadata overrides any corresponding fields configured in the component's YAML configuration file, enabling dynamic configuration without modifying static component definitions.
+
+**Common metadata fields:**
+
+| Field | Description | Example |
+| ----- | ----------- | ------- |
+| `api_key` | API key for authenticating with the LLM service | `"sk-1234567890abcdef"` |
+| `model` | Specific model identifier | `"gpt-4-turbo"`, `"claude-3-sonnet"` |
+| `version` | API version or service version | `"1.0"`, `"2023-12-01"` |
+| `endpoint` | Custom endpoint URL for the service | `"https://api.custom-llm.com/v1"` |
+
+{{% alert title="Note" color="primary" %}}
+The exact metadata fields supported depend on the specific conversation component implementation. Refer to the component's documentation for the complete list of supported metadata fields.
+{{% /alert %}}
+
+In addition to passing metadata in the request body, you can also pass metadata as URL query parameters without modifying the request payload. Here is the format:
+
+- **Prefix**: All metadata parameters must be prefixed with `metadata.`
+- **Format**: `?metadata.=`
+- **Multiple parameters**: Separate with `&` (e.g., `?metadata.api_key=sk-123&metadata.model=gpt-4`)
+
+Example of model override:
+```bash
+POST http://localhost:3500/v1.0-alpha2/conversation/openai/converse?metadata.model=sk-gpt-4-turbo
+```
+
+URL metadata parameters are merged with request body metadata, URL parameters take precedence if conflicts exist, and both override component configuration in the YAML file.
+
+### Request content examples
+
+#### Basic conversation
```json
-REQUEST = {
- "inputs": [
- {
- "content": "What is Dapr?",
- "role": "user", // Optional
- "scrubPII": "true", // Optional. Will obfuscate any sensitive information found in the content field
- },
- ],
- "cacheTTL": "10m", // Optional
- "scrubPII": "true", // Optional. Will obfuscate any sensitive information returning from the LLM
- "temperature": 0.5 // Optional. Optimizes for consistency (0) or creativity (1)
-}
+curl -X POST http://localhost:3500/v1.0-alpha2/conversation/openai/converse \
+ -H "Content-Type: application/json" \
+ -d '{
+ "inputs": [
+ {
+ "messages": [
+ {
+ "ofUser": {
+ "content": [
+ {
+ "text": "What is Dapr?"
+ }
+ ]
+ }
+ }
+ ]
+ }
+ ],
+ "parameters": {},
+ "metadata": {}
+ }'
+```
+
+#### Conversation with tool calling
+
+```json
+curl -X POST http://localhost:3500/v1.0-alpha2/conversation/openai/converse \
+ -H "Content-Type: application/json" \
+ -d '{
+ "inputs": [
+ {
+ "messages": [
+ {
+ "ofUser": {
+ "content": [
+ {
+ "text": "What is the weather like in San Francisco in celsius?"
+ }
+ ]
+ }
+ }
+ ],
+ "scrubPii": false
+ }
+ ],
+ "parameters": {
+ "max_tokens": {
+ "@type": "type.googleapis.com/google.protobuf.Int64Value",
+ "value": "100"
+ },
+ "model": {
+ "@type": "type.googleapis.com/google.protobuf.StringValue",
+ "value": "claude-3-5-sonnet-20240620"
+ }
+ },
+ "metadata": {
+ "api_key": "test-key",
+ "version": "1.0"
+ },
+ "scrubPii": false,
+ "temperature": 0.7,
+ "tools": [
+ {
+ "function": {
+ "name": "get_weather",
+ "description": "Get the current weather for a location",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "location": {
+ "type": "string",
+ "description": "The city and state, e.g. San Francisco, CA"
+ },
+ "unit": {
+ "type": "string",
+ "enum": ["celsius", "fahrenheit"],
+ "description": "The temperature unit to use"
+ }
+ },
+ "required": ["location"]
+ }
+ }
+ }
+ ],
+ "toolChoice": "auto"
+ }'
```
### HTTP response codes
@@ -71,21 +213,61 @@ Code | Description
### Response content
+#### Basic conversation response
+
```json
-RESPONSE = {
- "outputs": {
+{
+ "outputs": [
{
- "result": "Dapr is distribution application runtime ...",
- "parameters": {},
- },
+ "choices": [
+ {
+ "finishReason": "stop",
+ "message": {
+ "content": "Distributed application runtime, open-source."
+ }
+ }
+ ]
+ }
+ ]
+}
+```
+
+#### Tool calling response
+
+```json
+{
+ "outputs": [
{
- "result": "Dapr can help developers ...",
- "parameters": {},
+ "choices": [
+ {
+ "finishReason": "tool_calls",
+ "message": {
+ "toolCalls": [
+ {
+ "id": "call_Uwa41pG0UqGA2zp0Fec0KwOq",
+ "function": {
+ "name": "get_weather",
+ "arguments": "{\"location\":\"San Francisco, CA\",\"unit\":\"celsius\"}"
+ }
+ }
+ ]
+ }
+ }
+ ]
}
- },
+ ]
}
```
+
+## Legacy Alpha1 API
+
+The previous Alpha1 version of the API is still supported for backward compatibility but is deprecated. For new implementations, use the Alpha2 version described above.
+
+```
+POST http://localhost:/v1.0-alpha2/conversation//converse
+```
+
## Next steps
- [Conversation API overview]({{% ref conversation-overview.md %}})
diff --git a/daprdocs/content/en/reference/api/cryptography_api.md b/daprdocs/content/en/reference/api/cryptography_api.md
index 163abe1d77a..985a247c9d9 100644
--- a/daprdocs/content/en/reference/api/cryptography_api.md
+++ b/daprdocs/content/en/reference/api/cryptography_api.md
@@ -3,7 +3,7 @@ type: docs
title: "Cryptography API reference"
linkTitle: "Cryptography API"
description: "Detailed documentation on the cryptography API"
-weight: 1300
+weight: 600
---
Dapr provides cross-platform and cross-language support for encryption and decryption support via the
diff --git a/daprdocs/content/en/reference/api/distributed_lock_api.md b/daprdocs/content/en/reference/api/distributed_lock_api.md
index 92914a03768..b4db657cee2 100644
--- a/daprdocs/content/en/reference/api/distributed_lock_api.md
+++ b/daprdocs/content/en/reference/api/distributed_lock_api.md
@@ -1,9 +1,9 @@
---
type: docs
-title: "Distributed Lock API reference"
-linkTitle: "Distributed Lock API"
+title: "Distributed lock API reference"
+linkTitle: "Distributed lock API"
description: "Detailed documentation on the distributed lock API"
-weight: 900
+weight: 700
---
## Lock
diff --git a/daprdocs/content/en/reference/api/health_api.md b/daprdocs/content/en/reference/api/health_api.md
index 164e2dd6d0b..bb0e096074e 100644
--- a/daprdocs/content/en/reference/api/health_api.md
+++ b/daprdocs/content/en/reference/api/health_api.md
@@ -3,7 +3,7 @@ type: docs
title: "Health API reference"
linkTitle: "Health API"
description: "Detailed documentation on the health API"
-weight: 1000
+weight: 800
---
Dapr provides health checking probes that can be used as readiness or liveness of Dapr and for initialization readiness from SDKs.
diff --git a/daprdocs/content/en/reference/api/jobs_api.md b/daprdocs/content/en/reference/api/jobs_api.md
index aa4c29f940a..690fe834135 100644
--- a/daprdocs/content/en/reference/api/jobs_api.md
+++ b/daprdocs/content/en/reference/api/jobs_api.md
@@ -3,7 +3,7 @@ type: docs
title: "Jobs API reference"
linkTitle: "Jobs API"
description: "Detailed documentation on the jobs API"
-weight: 1300
+weight: 900
---
{{% alert title="Note" color="primary" %}}
diff --git a/daprdocs/content/en/reference/api/metadata_api.md b/daprdocs/content/en/reference/api/metadata_api.md
index dc5ed7fa953..ed77aca1b6c 100644
--- a/daprdocs/content/en/reference/api/metadata_api.md
+++ b/daprdocs/content/en/reference/api/metadata_api.md
@@ -3,7 +3,7 @@ type: docs
title: "Metadata API reference"
linkTitle: "Metadata API"
description: "Detailed documentation on the Metadata API"
-weight: 1100
+weight: 1000
---
Dapr has a metadata API that returns information about the sidecar allowing runtime discoverability. The metadata endpoint returns the following information.
diff --git a/daprdocs/content/en/reference/api/placement_api.md b/daprdocs/content/en/reference/api/placement_api.md
index de216c1f156..6b02dd2cbfb 100644
--- a/daprdocs/content/en/reference/api/placement_api.md
+++ b/daprdocs/content/en/reference/api/placement_api.md
@@ -3,7 +3,7 @@ type: docs
title: "Placement API reference"
linkTitle: "Placement API"
description: "Detailed documentation on the Placement API"
-weight: 1200
+weight: 1100
---
Dapr has an HTTP API `/placement/state` for Placement service that exposes placement table information. The API is exposed on the sidecar on the same port as the healthz. This is an unauthenticated endpoint, and is disabled by default.
diff --git a/daprdocs/content/en/reference/api/pubsub_api.md b/daprdocs/content/en/reference/api/pubsub_api.md
index d2cc67ab03e..32af3bb0d09 100644
--- a/daprdocs/content/en/reference/api/pubsub_api.md
+++ b/daprdocs/content/en/reference/api/pubsub_api.md
@@ -3,7 +3,7 @@ type: docs
title: "Pub/sub API reference"
linkTitle: "Pub/Sub API"
description: "Detailed documentation on the pub/sub API"
-weight: 200
+weight: 1200
---
## Publish a message to a given topic
diff --git a/daprdocs/content/en/reference/api/secrets_api.md b/daprdocs/content/en/reference/api/secrets_api.md
index 752736f5fe4..6561823ec90 100644
--- a/daprdocs/content/en/reference/api/secrets_api.md
+++ b/daprdocs/content/en/reference/api/secrets_api.md
@@ -3,7 +3,7 @@ type: docs
title: "Secrets API reference"
linkTitle: "Secrets API"
description: "Detailed documentation on the secrets API"
-weight: 700
+weight: 1300
---
## Get Secret
diff --git a/daprdocs/content/en/reference/api/service_invocation_api.md b/daprdocs/content/en/reference/api/service_invocation_api.md
index cc46d982488..811d0021ac8 100644
--- a/daprdocs/content/en/reference/api/service_invocation_api.md
+++ b/daprdocs/content/en/reference/api/service_invocation_api.md
@@ -3,7 +3,7 @@ type: docs
title: "Service invocation API reference"
linkTitle: "Service invocation API"
description: "Detailed documentation on the service invocation API"
-weight: 100
+weight: 1400
---
Dapr provides users with the ability to call other applications that are using Dapr with a unique named identifier (appId), or HTTP endpoints that are not using Dapr.
diff --git a/daprdocs/content/en/reference/api/state_api.md b/daprdocs/content/en/reference/api/state_api.md
index 328ffec80fd..bf6e2c15272 100644
--- a/daprdocs/content/en/reference/api/state_api.md
+++ b/daprdocs/content/en/reference/api/state_api.md
@@ -3,7 +3,7 @@ type: docs
title: "State management API reference"
linkTitle: "State management API"
description: "Detailed documentation on the state management API"
-weight: 400
+weight: 1500
---
## Component file
diff --git a/daprdocs/content/en/reference/api/workflow_api.md b/daprdocs/content/en/reference/api/workflow_api.md
index 85ee207162d..f84996444f6 100644
--- a/daprdocs/content/en/reference/api/workflow_api.md
+++ b/daprdocs/content/en/reference/api/workflow_api.md
@@ -3,7 +3,7 @@ type: docs
title: "Workflow API reference"
linkTitle: "Workflow API"
description: "Detailed documentation on the workflow API"
-weight: 300
+weight: 1600
---
Dapr provides users with the ability to interact with workflows through its built-in workflow engine, which is implemented using Dapr Actors. This workflow engine is accessed using the name `dapr` in API calls as the `workflowComponentName`.
diff --git a/daprdocs/content/en/reference/cli/dapr-scheduler.md b/daprdocs/content/en/reference/cli/dapr-scheduler.md
new file mode 100644
index 00000000000..61d40f190bb
--- /dev/null
+++ b/daprdocs/content/en/reference/cli/dapr-scheduler.md
@@ -0,0 +1,156 @@
+---
+type: docs
+title: "dapr scheduler"
+linkTitle: "scheduler"
+description: "Manage Dapr Scheduler jobs and reminders using the dapr CLI"
+weight: 3000
+---
+
+# dapr scheduler
+
+Manage scheduled jobs and reminders stored in the Dapr Scheduler.
+
+``` bash
+dapr scheduler [command]
+```
+
+## Aliases
+- `scheduler`
+- `sched`
+
+## Available Commands
+
+- [list](#dapr-scheduler-list): List scheduled jobs
+- [get](#dapr-scheduler-get): Get a scheduled job by key
+- [delete](#dapr-scheduler-delete): Delete a scheduled job by key
+- [delete-all)](#dapr-scheduler-delete-all): Delete all scheduled jobs by key prefix
+- [export](#dapr-scheduler-export): Export all scheduled jobs to a file
+- [import](#dapr-scheduler-import): Import scheduled jobs from a file
+
+
+## Global Flags
+
+| Flag | Description |
+| -k, --kubernetes | Perform operation on a Kubernetes Dapr cluster |
+| -n, --namespace string | Namespace of the Dapr app (default "default") |
+| --scheduler-namespace string | Namespace where the scheduler runs (default "dapr-system") |
+
+## dapr scheduler list
+
+List scheduled jobs in Scheduler.
+
+```bash
+dapr scheduler list [flags]
+```
+
+### Flags
+
+- `--filter string` – Filter jobs by type. One of: all, app, actor, workflow, activity (default all)
+- `-o, --output string` – Output format: short, wide, yaml, json (default short)
+
+### Examples
+
+```bash
+$ dapr scheduler list
+NAME BEGIN COUNT LAST TRIGGER
+actor/myactortype/actorid1/test1 -3.89s 1 2025-10-03T16:58:55Z
+actor/myactortype/actorid2/test2 -3.89s 1 2025-10-03T16:58:55Z
+app/test-scheduler/test1 -3.89s 1 2025-10-03T16:58:55Z
+app/test-scheduler/test2 -3.89s 1 2025-10-03T16:58:55Z
+activity/test-scheduler/xyz1::0::1 -888.8ms 0
+activity/test-scheduler/xyz2::0::1 -888.8ms 0
+workflow/test-scheduler/abc1/timer-0-TVIQGkvu +50.0h 0
+workflow/test-scheduler/abc2/timer-0-OM2xqG9m +50.0h 0
+```
+
+```bash
+$ dapr scheduler list -o wide
+NAMESPACE NAME BEGIN EXPIRATION SCHEDULE DUE TIME TTL REPEATS COUNT LAST TRIGGER
+default actor/myactortype/actorid1/test1 2025-10-03T16:58:55Z @every 2h46m40s 2025-10-03T17:58:55+01:00 100 1 2025-10-03T16:58:55Z
+default actor/myactortype/actorid2/test2 2025-10-03T16:58:55Z @every 2h46m40s 2025-10-03T17:58:55+01:00 100 1 2025-10-03T16:58:55Z
+default app/test-scheduler/test1 2025-10-03T16:58:55Z @every 100m 2025-10-03T17:58:55+01:00 1234 1 2025-10-03T16:58:55Z
+default app/test-scheduler/test2 2025-10-03T16:58:55Z 2025-10-03T19:45:35Z @every 100m 2025-10-03T17:58:55+01:00 10000s 56788 1 2025-10-03T16:58:55Z
+default activity/test-scheduler/xyz1::0::1 2025-10-03T16:58:58Z 0s 0
+default activity/test-scheduler/xyz2::0::1 2025-10-03T16:58:58Z 0s 0
+default workflow/test-scheduler/abc1/timer-0-TVIQGkvu 2025-10-05T18:58:58Z 2025-10-05T18:58:58Z 0
+default workflow/test-scheduler/abc2/timer-0-OM2xqG9m 2025-10-05T18:58:58Z 2025-10-05T18:58:58Z 0
+```
+
+## dapr scheduler get
+
+Get one or more scheduled jobs/reminders by key.
+
+```bash
+dapr scheduler get [flags]
+```
+
+### Key formats
+
+- App job: `app//`
+- Actor reminder: `actor///`
+- Workflow reminder: `workflow///`
+- Activity reminder: `activity//`
+
+### Flags
+
+- `-o, --output string` – Output format: `short`, `wide`, `yaml`, `json` (default `short`)
+
+### Examples
+
+```bash
+dapr scheduler get app/my-app/job1 -o yaml
+```
+
+## dapr scheduler delete
+
+Delete one or more jobs.
+
+```bash
+dapr scheduler delete
+```
+
+### Aliases
+- `delete`, `d`, `del`
+
+### Examples
+
+```bash
+dapr scheduler delete app/my-app/job1 actor/MyActor/123/reminder1
+```
+
+## dapr scheduler delete-all
+
+Bulk delete jobs by filter key.
+
+```bash
+dapr scheduler delete-all
+```
+
+### Aliases
+
+- `delete-all`, `da`, `delall`
+
+### Examples
+
+```bash
+dapr scheduler delete-all all
+dapr scheduler delete-all app/my-app
+dapr scheduler delete-all actor/MyActorType
+```
+
+## dapr scheduler export
+
+Export all jobs and reminders to a file.
+
+```bash
+dapr scheduler export -o backup.bin
+```
+
+## dapr scheduler import
+
+Import jobs and reminders from a file.
+
+```bash
+dapr scheduler import -f backup.bin
+```
+
diff --git a/daprdocs/content/en/reference/cli/dapr-workflow.md b/daprdocs/content/en/reference/cli/dapr-workflow.md
new file mode 100644
index 00000000000..f490a8a7ecd
--- /dev/null
+++ b/daprdocs/content/en/reference/cli/dapr-workflow.md
@@ -0,0 +1,217 @@
+---
+type: docs
+title: "workflow CLI command"
+linkTitle: "workflow"
+description: "Detailed information on the workflow CLI command"
+---
+
+Manage Dapr workflow instances.
+
+## Commands
+
+| Command | Description |
+|---------|-------------|
+| dapr workflow run | Start a new workflow instance |
+| dapr workflow list | List workflow instances |
+| dapr workflow history | Get workflow execution history |
+| dapr workflow purge | Purge workflow instances |
+| dapr workflow suspend | Suspend a workflow |
+| dapr workflow resume | Resume a workflow |
+| dapr workflow terminate | Terminate a workflow |
+| dapr workflow raise-event | Raise an external event |
+| dapr workflow rerun | Re-run a workflow |
+
+## Flags
+
+```
+ -a, --app-id string The app ID owner of the workflow instance
+ -h, --help help for workflow
+ -k, --kubernetes Target a Kubernetes dapr installation
+ -n, --namespace string Namespace to perform workflow operation on (default "default")
+```
+
+## Examples
+
+### List workflows
+```bash
+dapr workflow list --app-id myapp
+```
+
+### Start a workflow
+```bash
+dapr workflow run MyWorkflow --app-id myapp --input '{"key": "value"}'
+```
+
+### Kubernetes mode
+```bash
+dapr workflow list -k -n production --app-id myapp
+```
+
+## List workflow instances for a given application.
+
+## Usage
+
+```bash
+dapr workflow list [flags]
+```
+
+## Flags
+
+| Name | Type | Description |
+|------|------|-------------|
+| `--app-id`, `-a` | string | (Required) The app ID owner of the workflow instances |
+| `--filter-name`, `-w` | string | Filter workflows by name |
+| `--filter-status`, `-s` | string | Filter by status: RUNNING, COMPLETED, FAILED, CANCELED, TERMINATED, PENDING, SUSPENDED |
+| `--filter-max-age`, `-m` | string | Filter workflows started within duration or timestamp (e.g., "300ms", "1.5h", "2023-01-02T15:04:05") |
+| `--output`, `-o` | string | Output format: short, wide, yaml, json (default "short") |
+| `--connection-string`, `-c` | string | Connection string to the actor state store |
+| `--table-name`, `-t` | string | Table or collection name used as the actor state store |
+| `--kubernetes`, `-k` | bool | Target a Kubernetes Dapr installation |
+| `--namespace`, `-n` | string | Kubernetes namespace (default "default") |
+
+## Examples
+
+### Basic usage
+```bash
+dapr workflow list --app-id myapp
+```
+
+### Filter by status
+```bash
+dapr workflow list --app-id myapp --filter-status RUNNING
+```
+
+### Filter by workflow name
+```bash
+dapr workflow list --app-id myapp --filter-name OrderProcessing
+```
+
+### Filter by age
+```bash
+# Workflows from last 24 hours
+dapr workflow list --app-id myapp --filter-max-age 24h
+
+# Workflows after specific date
+dapr workflow list --app-id myapp --filter-max-age 2024-01-01T00:00:00Z
+```
+
+### JSON output
+```bash
+dapr workflow list --app-id myapp --output json
+```
+
+### Kubernetes with port forwarding
+```bash
+# Terminal 1: Port forward to database
+kubectl port-forward service/postgres 5432:5432 -n production
+
+# Terminal 2: List workflows with direct database access
+dapr workflow list \
+ --kubernetes \
+ --namespace production \
+ --app-id myapp \
+ --connection-string "host=localhost user=dapr password=dapr dbname=dapr port=5432 sslmode=disable" \
+ --table-name workflows
+```
+
+## Connection String Formats
+
+### PostgreSQL / CockroachDB
+```
+host=localhost user=dapr password=dapr dbname=dapr port=5432 sslmode=disable
+```
+
+### MySQL
+```
+dapr:dapr@tcp(localhost:3306)/dapr?parseTime=true
+```
+
+### SQL Server
+```
+sqlserver://dapr:Pass@word@localhost:1433?database=dapr
+```
+
+### MongoDB
+```
+mongodb://localhost:27017/dapr
+```
+
+### Redis
+```
+redis[s]://[[username][:password]@][host][:port][/db-number]:
+```
+
+## Purge workflow instances with terminal states (COMPLETED, FAILED, TERMINATED).
+
+## Usage
+
+```bash
+dapr workflow purge [instance-id] [flags]
+```
+
+## Flags
+
+| Name | Type | Description |
+|------|------|-------------|
+| `--app-id`, `-a` | string | (Required) The app ID owner of the workflow instances |
+| `--all` | bool | Purge all terminal workflow instances (use with caution) |
+| `--all-older-than` | string | Purge instances older than duration or timestamp (e.g., "24h", "2023-01-02T15:04:05Z") |
+| `--connection-string`, `-c` | string | Connection string to the actor state store |
+| `--table-name`, `-t` | string | Table or collection name used as the actor state store |
+| `--kubernetes`, `-k` | bool | Target a Kubernetes Dapr installation |
+| `--namespace`, `-n` | string | Kubernetes namespace (default "default") |
+
+## Examples
+
+### Purge a specific instance
+```bash
+dapr workflow purge wf-12345 --app-id myapp
+```
+
+### Purge instances older than 30 days
+```bash
+dapr workflow purge --app-id myapp --all-older-than 720h
+```
+
+### Purge instances older than specific date
+```bash
+dapr workflow purge --app-id myapp --all-older-than 2023-12-01T00:00:00Z
+```
+
+### Purge all terminal instances (dangerous!)
+```bash
+dapr workflow purge --app-id myapp --all
+```
+
+### Kubernetes with database access
+```bash
+# Port forward to database
+kubectl port-forward service/postgres 5432:5432 -n production
+
+# Purge old workflows
+dapr workflow purge \
+ --kubernetes \
+ --namespace production \
+ --app-id myapp \
+ --connection-string "host=localhost user=dapr password=dapr dbname=dapr port=5432 sslmode=disable" \
+ --table-name workflows \
+ --all-older-than 2160h # 90 days
+```
+
+## Best Practices
+
+1. **Regular Cleanup**: Schedule periodic purge operations
+ ```bash
+ # Cron job to purge workflows older than 90 days
+ 0 2 * * 0 dapr workflow purge --app-id myapp --all-older-than 2160h
+ ```
+
+2. **Test First**: Use list command to see what will be purged
+ ```bash
+ dapr workflow list --app-id myapp --filter-status COMPLETED --filter-max-age 2160h
+ ```
+
+3. **Backup Before Bulk Purge**: Export data before using `--all`
+ ```bash
+ dapr workflow list --app-id myapp --output json > backup.json
+ ```
diff --git a/daprdocs/content/en/reference/components-reference/supported-bindings/_index.md b/daprdocs/content/en/reference/components-reference/supported-bindings/_index.md
index bca14fe0607..0f61294f835 100644
--- a/daprdocs/content/en/reference/components-reference/supported-bindings/_index.md
+++ b/daprdocs/content/en/reference/components-reference/supported-bindings/_index.md
@@ -2,7 +2,7 @@
type: docs
title: "Bindings component specs"
linkTitle: "Bindings"
-weight: 4000
+weight: 1000
description: The supported external bindings that interface with Dapr
aliases:
- "/operations/components/setup-bindings/supported-bindings/"
diff --git a/daprdocs/content/en/reference/components-reference/supported-bindings/eventhubs.md b/daprdocs/content/en/reference/components-reference/supported-bindings/eventhubs.md
index 989f93ab366..227b9973785 100644
--- a/daprdocs/content/en/reference/components-reference/supported-bindings/eventhubs.md
+++ b/daprdocs/content/en/reference/components-reference/supported-bindings/eventhubs.md
@@ -70,24 +70,24 @@ The above example uses secrets as plain strings. It is recommended to use a secr
## Spec metadata fields
-| Field | Required | Binding support | Details | Example |
-|--------------------|:--------:|------------|-----|---------|
-| `eventHub` | Y* | Input/Output | The name of the Event Hubs hub ("topic"). Required if using Microsoft Entra ID authentication or if the connection string doesn't contain an `EntityPath` value | `mytopic` |
-| `connectionString` | Y* | Input/Output | Connection string for the Event Hub or the Event Hub namespace. * Mutally exclusive with `eventHubNamespace` field. * Required when not using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key};EntityPath={EventHub}"` or `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key}"`
-| `eventHubNamespace` | Y* | Input/Output | The Event Hub Namespace name. * Mutally exclusive with `connectionString` field. * Required when using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"namespace"`
-| `enableEntityManagement` | N | Input/Output | Boolean value to allow management of the EventHub namespace and storage account. Default: `false` | `"true"`, `"false"`
-| `enableInOrderMessageDelivery` | N | Input/Output | Boolean value to allow messages to be delivered in the order in which they were posted. This assumes `partitionKey` is set when publishing or posting to ensure ordering across partitions. Default: `false` | `"true"`, `"false"`
-| `resourceGroupName` | N | Input/Output | Name of the resource group the Event Hub namespace is part of. Required when entity management is enabled | `"test-rg"`
-| `subscriptionID` | N | Input/Output | Azure subscription ID value. Required when entity management is enabled | `"azure subscription id"`
-| `partitionCount` | N | Input/Output | Number of partitions for the new Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"2"`
-| `messageRetentionInDays` | N | Input/Output | Number of days to retain messages for in the newly created Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"90"`
-| `consumerGroup` | Y | Input | The name of the [Event Hubs Consumer Group](https://docs.microsoft.com/azure/event-hubs/event-hubs-features#consumer-groups) to listen on | `"group1"` |
-| `storageAccountName` | Y | Input | Storage account name to use for the checkpoint store. |`"myeventhubstorage"`
-| `storageAccountKey` | Y* | Input | Storage account key for the checkpoint store account. * When using Microsoft Entra ID, it's possible to omit this if the service principal has access to the storage account too. | `"112233445566778899"`
-| `storageConnectionString` | Y* | Input | Connection string for the checkpoint store, alternative to specifying `storageAccountKey` | `"DefaultEndpointsProtocol=https;AccountName=myeventhubstorage;AccountKey="`
-| `storageContainerName` | Y | Input | Storage container name for the storage account name. | `"myeventhubstoragecontainer"`
-| `getAllMessageProperties` | N | Input | When set to `true`, retrieves all user/app/custom properties from the Event Hub message and forwards them in the returned event metadata. Default setting is `"false"`. | `"true"`, `"false"`
-| `direction` | N | Input/Output | The direction of the binding. | `"input"`, `"output"`, `"input, output"`
+| Field | Required | Binding support | Details | Example |
+|--------------------|:--------:|------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------|
+| `eventHub` | Y* | Input/Output | The name of the Event Hubs hub ("topic"). Required if using Microsoft Entra ID authentication or if the connection string doesn't contain an `EntityPath` value | `mytopic` |
+| `connectionString` | Y* | Input/Output | Connection string for the Event Hub or the Event Hub namespace. * Mutually exclusive with `eventHubNamespace` field. * Required when not using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key};EntityPath={EventHub}"` or `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key}"`
+| `eventHubNamespace` | Y* | Input/Output | The Event Hub Namespace name. * Mutually exclusive with `connectionString` field. * Required when using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"namespace"`
+| `enableEntityManagement` | N | Input/Output | Boolean value to allow management of the EventHub namespace and storage account. Default: `false` | `"true"`, `"false"`
+| `enableInOrderMessageDelivery` | N | Input/Output | Boolean value to allow messages to be delivered in the order in which they were posted. This assumes `partitionKey` is set when publishing or posting to ensure ordering across partitions. Default: `false` | `"true"`, `"false"`
+| `resourceGroupName` | N | Input/Output | Name of the resource group the Event Hub namespace is part of. Required when entity management is enabled | `"test-rg"`
+| `subscriptionID` | N | Input/Output | Azure subscription ID value. Required when entity management is enabled | `"azure subscription id"`
+| `partitionCount` | N | Input/Output | Number of partitions for the new Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"2"`
+| `messageRetentionInDays` | N | Input/Output | Number of days to retain messages for in the newly created Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"90"`
+| `consumerGroup` | Y | Input | The name of the [Event Hubs Consumer Group](https://docs.microsoft.com/azure/event-hubs/event-hubs-features#consumer-groups) to listen on | `"group1"` |
+| `storageAccountName` | Y | Input | Storage account name to use for the checkpoint store. |`"myeventhubstorage"`
+| `storageAccountKey` | Y* | Input | Storage account key for the checkpoint store account. * When using Microsoft Entra ID, it's possible to omit this if the service principal has access to the storage account too. | `"112233445566778899"`
+| `storageConnectionString` | Y* | Input | Connection string for the checkpoint store, alternative to specifying `storageAccountKey` | `"DefaultEndpointsProtocol=https;AccountName=myeventhubstorage;AccountKey="`
+| `storageContainerName` | Y | Input | Storage container name for the storage account name. | `"myeventhubstoragecontainer"`
+| `getAllMessageProperties` | N | Input | When set to `true`, retrieves all user/app/custom properties from the Event Hub message and forwards them in the returned event metadata. Default setting is `"false"`. | `"true"`, `"false"`
+| `direction` | N | Input/Output | The direction of the binding. | `"input"`, `"output"`, `"input, output"`
### Microsoft Entra ID authentication
diff --git a/daprdocs/content/en/reference/components-reference/supported-bindings/redis.md b/daprdocs/content/en/reference/components-reference/supported-bindings/redis.md
index 91b17c196a7..52170d484c0 100644
--- a/daprdocs/content/en/reference/components-reference/supported-bindings/redis.md
+++ b/daprdocs/content/en/reference/components-reference/supported-bindings/redis.md
@@ -38,14 +38,14 @@ The above example uses secrets as plain strings. It is recommended to use a secr
| Field | Required | Binding support | Details | Example |
|--------------------|:--------:|------------|-----|---------|
-| `redisHost` | Y | Output | The Redis host address | `"localhost:6379"` |
+| `redisHost` | Y | Output | Connection-string for the redis host. If `"redisType"` is `"cluster"`, it can be multiple hosts separated by commas or just a single host. When using Redis Sentinel (`"failover"` is `"true"`), multiple sentinel addresses can also be provided as comma-separated values. | `localhost:6379`, `redis-master.default.svc.cluster.local:6379`, `sentinel1:26379,sentinel2:26379,sentinel3:26379` |
| `redisPassword` | N | Output | The Redis password | `"password"` |
| `redisUsername` | N | Output | Username for Redis host. Defaults to empty. Make sure your redis server version is 6 or above, and have created acl rule correctly. | `"username"` |
| `useEntraID` | N | Output | Implements EntraID support for Azure Cache for Redis. Before enabling this:
The `redisHost` name must be specified in the form of `"server:port"`
TLS must be enabled
Learn more about this setting under [Create a Redis instance > Azure Cache for Redis]({{% ref "#create-a-redis-instance" %}}) | `"true"`, `"false"` |
| `enableTLS` | N | Output | If the Redis instance supports TLS with public certificates it can be configured to enable or disable TLS. Defaults to `"false"` | `"true"`, `"false"` |
| `clientCert` | N | Output | The content of the client certificate, used for Redis instances that require client-side certificates. Must be used with `clientKey` and `enableTLS` must be set to true. It is recommended to use a secret store as described [here]({{% ref component-secrets.md %}}) | `"----BEGIN CERTIFICATE-----\nMIIC..."` |
| `clientKey` | N | Output | The content of the client private key, used in conjunction with `clientCert` for authentication. It is recommended to use a secret store as described [here]({{% ref component-secrets.md %}}) | `"----BEGIN PRIVATE KEY-----\nMIIE..."` |
-| `failover` | N | Output | Property to enable failover configuration. Needs sentinelMasterName to be set. Defaults to `"false"` | `"true"`, `"false"`
+| `failover` | N | Output | Property to enable failover configuration. Needs sentinelMasterName to be set. When enabled, redisHost should contain the sentinel addresses. Defaults to `"false"` | `"true"`, `"false"`
| `sentinelMasterName` | N | Output | The sentinel master name. See [Redis Sentinel Documentation](https://redis.io/docs/reference/sentinel-clients/) | `""`, `"mymaster"`
| `sentinelUsername` | N | Output | Username for Redis Sentinel. Applicable only when "failover" is true, and Redis Sentinel has authentication enabled | `"username"`
| `sentinelPassword` | N | Output | Password for Redis Sentinel. Applicable only when "failover" is true, and Redis Sentinel has authentication enabled | `"password"`
@@ -249,6 +249,28 @@ You can use [Helm](https://helm.sh/) to quickly create a Redis instance in our K
The Dapr CLI automatically deploys a local redis instance in self hosted mode as part of the `dapr init` command.
{{% /alert %}}
+## Redis Sentinel configuration
+
+When using Redis Sentinel for high availability, set `redisType` to `"node"`, enable failover mode with `failover: "true"`, and provide the sentinel master name. Multiple sentinel addresses can be specified as a comma-separated list in the `redisHost` field for redundancy.
+
+ ```yaml
+ apiVersion: dapr.io/v1alpha1
+ kind: Component
+ metadata:
+ name: redis-pubsub
+ spec:
+ type: pubsub.redis
+ version: v1
+ metadata:
+ - name: redisHost
+ value: "sentinel1:26379,sentinel2:26379,sentinel3:26379"
+ - name: redisType
+ value: "node"
+ - name: failover
+ value: "true"
+ - name: sentinelMasterName
+ value: "mymaster"
+ ```
## Related links
diff --git a/daprdocs/content/en/reference/components-reference/supported-configuration-stores/_index.md b/daprdocs/content/en/reference/components-reference/supported-configuration-stores/_index.md
index b8e80f12216..64e4ebf7366 100644
--- a/daprdocs/content/en/reference/components-reference/supported-configuration-stores/_index.md
+++ b/daprdocs/content/en/reference/components-reference/supported-configuration-stores/_index.md
@@ -2,7 +2,7 @@
type: docs
title: "Configuration store component specs"
linkTitle: "Configuration stores"
-weight: 6000
+weight: 2000
description: The supported configuration stores that interface with Dapr
aliases:
- "/operations/components/setup-configuration-store/supported-configuration-stores/"
diff --git a/daprdocs/content/en/reference/components-reference/supported-configuration-stores/azure-appconfig-configuration-store.md b/daprdocs/content/en/reference/components-reference/supported-configuration-stores/azure-appconfig-configuration-store.md
index 3db6a4bba94..b2e2c3919d3 100644
--- a/daprdocs/content/en/reference/components-reference/supported-configuration-stores/azure-appconfig-configuration-store.md
+++ b/daprdocs/content/en/reference/components-reference/supported-configuration-stores/azure-appconfig-configuration-store.md
@@ -50,14 +50,14 @@ The above example uses secrets as plain strings. It is recommended to use a secr
## Spec metadata fields
-| Field | Required | Details | Example |
-|----------------------------|:--------:|---------|---------|
-| connectionString | Y* | Connection String for the Azure App Configuration instance. No Default. Can be `secretKeyRef` to use a secret reference. *Mutally exclusive with host field. *Not to be used when [Azure Authentication](https://docs.dapr.io/developing-applications/integrations/azure/azure-authentication/authenticating-azure/) is used | `Endpoint=https://foo.azconfig.io;Id=osOX-l9-s0:sig;Secret=00000000000000000000000000000000000000000000`
-| host | N* | Endpoint for the Azure App Configuration instance. No Default. *Mutally exclusive with connectionString field. *To be used when [Azure Authentication](https://docs.dapr.io/developing-applications/integrations/azure/azure-authentication/authenticating-azure/) is used | `https://dapr.azconfig.io`
-| maxRetries | N | Maximum number of retries before giving up. Defaults to `3` | `5`, `10`
-| retryDelay | N | RetryDelay specifies the initial amount of delay to use before retrying an operation. The delay increases exponentially with each retry up to the maximum specified by MaxRetryDelay. Defaults to `4` seconds; `"-1"` disables delay between retries. | `4s`
-| maxRetryDelay | N | MaxRetryDelay specifies the maximum delay allowed before retrying an operation. Typically the value is greater than or equal to the value specified in RetryDelay. Defaults to `120` seconds; `"-1"` disables the limit | `120s`
-| subscribePollInterval | N | subscribePollInterval specifies the poll interval in nanoseconds for polling the subscribed keys for any changes. This will be updated in the future to Go Time format. Default polling interval is set to `24` hours. | `24h`
+| Field | Required | Details | Example |
+|----------------------------|:--------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------|
+| connectionString | Y* | Connection String for the Azure App Configuration instance. No Default. Can be `secretKeyRef` to use a secret reference. *Mutually exclusive with host field. *Not to be used when [Azure Authentication](https://docs.dapr.io/developing-applications/integrations/azure/azure-authentication/authenticating-azure/) is used | `Endpoint=https://foo.azconfig.io;Id=osOX-l9-s0:sig;Secret=00000000000000000000000000000000000000000000`
+| host | N* | Endpoint for the Azure App Configuration instance. No Default. *Mutually exclusive with connectionString field. *To be used when [Azure Authentication](https://docs.dapr.io/developing-applications/integrations/azure/azure-authentication/authenticating-azure/) is used | `https://dapr.azconfig.io`
+| maxRetries | N | Maximum number of retries before giving up. Defaults to `3` | `5`, `10`
+| retryDelay | N | RetryDelay specifies the initial amount of delay to use before retrying an operation. The delay increases exponentially with each retry up to the maximum specified by MaxRetryDelay. Defaults to `4` seconds; `"-1"` disables delay between retries. | `4s`
+| maxRetryDelay | N | MaxRetryDelay specifies the maximum delay allowed before retrying an operation. Typically the value is greater than or equal to the value specified in RetryDelay. Defaults to `120` seconds; `"-1"` disables the limit | `120s`
+| subscribePollInterval | N | subscribePollInterval specifies the poll interval in nanoseconds for polling the subscribed keys for any changes. This will be updated in the future to Go Time format. Default polling interval is set to `24` hours. | `24h`
**Note**: either `host` or `connectionString` must be specified.
diff --git a/daprdocs/content/en/reference/components-reference/supported-configuration-stores/redis-configuration-store.md b/daprdocs/content/en/reference/components-reference/supported-configuration-stores/redis-configuration-store.md
index e65ddac614f..09d935512c0 100644
--- a/daprdocs/content/en/reference/components-reference/supported-configuration-stores/redis-configuration-store.md
+++ b/daprdocs/content/en/reference/components-reference/supported-configuration-stores/redis-configuration-store.md
@@ -39,13 +39,13 @@ The above example uses secrets as plain strings. It is recommended to use a secr
| Field | Required | Details | Example |
|--------------------|:--------:|---------|---------|
-| redisHost | Y | Output | The Redis host address | `"localhost:6379"` |
+| redisHost | Y | Output | Connection-string for the redis host. If `"redisType"` is `"cluster"`, it can be multiple hosts separated by commas or just a single host. When using Redis Sentinel (`"failover"` is `"true"`), multiple sentinel addresses can also be provided as comma-separated values. | `localhost:6379`, `redis-master.default.svc.cluster.local:6379`, `sentinel1:26379,sentinel2:26379,sentinel3:26379` |
| redisPassword | N | Output | The Redis password | `"password"` |
| redisUsername | N | Output | Username for Redis host. Defaults to empty. Make sure your Redis server version is 6 or above, and have created acl rule correctly. | `"username"` |
| enableTLS | N | Output | If the Redis instance supports TLS with public certificates it can be configured to enable or disable TLS. Defaults to `"false"` | `"true"`, `"false"` |
| clientCert | N | Output | The content of the client certificate, used for Redis instances that require client-side certificates. Must be used with `clientKey` and `enableTLS` must be set to true. It is recommended to use a secret store as described [here]({{% ref component-secrets.md %}}) | `"----BEGIN CERTIFICATE-----\nMIIC..."` |
| clientKey | N | Output | The content of the client private key, used in conjunction with `clientCert` for authentication. It is recommended to use a secret store as described [here]({{% ref component-secrets.md %}}) | `"----BEGIN PRIVATE KEY-----\nMIIE..."` |
-| failover | N | Output | Property to enable failover configuration. Needs sentinelMasterName to be set. Defaults to `"false"` | `"true"`, `"false"`
+| failover | N | Output | Property to enable failover configuration. Needs sentinelMasterName to be set. When enabled, redisHost should contain the sentinel addresses. Defaults to `"false"` | `"true"`, `"false"`
| sentinelMasterName | N | Output | The Sentinel master name. See [Redis Sentinel Documentation](https://redis.io/docs/reference/sentinel-clients/) | `""`, `"mymaster"`
| sentinelUsername | N | Output | Username for Redis Sentinel. Applicable only when "failover" is true, and Redis Sentinel has authentication enabled | `"username"`
| sentinelPassword | N | Output | Password for Redis Sentinel. Applicable only when "failover" is true, and Redis Sentinel has authentication enabled | `"password"`
@@ -143,6 +143,29 @@ You can use [Helm](https://helm.sh/) to quickly create a Redis instance in our K
{{< /tabpane >}}
+## Redis Sentinel configuration
+
+When using Redis Sentinel for high availability, set `redisType` to `"node"`, enable failover mode with `failover: "true"`, and provide the sentinel master name. Multiple sentinel addresses can be specified as a comma-separated list in the `redisHost` field for redundancy.
+
+ ```yaml
+ apiVersion: dapr.io/v1alpha1
+ kind: Component
+ metadata:
+ name: redis-pubsub
+ spec:
+ type: pubsub.redis
+ version: v1
+ metadata:
+ - name: redisHost
+ value: "sentinel1:26379,sentinel2:26379,sentinel3:26379"
+ - name: redisType
+ value: "node"
+ - name: failover
+ value: "true"
+ - name: sentinelMasterName
+ value: "mymaster"
+ ```
+
## Related links
- [Basic schema for a Dapr component]({{% ref component-schema %}})
- Read [How-To: Manage configuration from a store]({{% ref "howto-manage-configuration" %}}) for instructions on how to use Redis as a configuration store.
diff --git a/daprdocs/content/en/reference/components-reference/supported-conversation/_index.md b/daprdocs/content/en/reference/components-reference/supported-conversation/_index.md
index 179162b3bb2..c6c862960b9 100644
--- a/daprdocs/content/en/reference/components-reference/supported-conversation/_index.md
+++ b/daprdocs/content/en/reference/components-reference/supported-conversation/_index.md
@@ -2,7 +2,7 @@
type: docs
title: "Conversation component specs"
linkTitle: "Conversation"
-weight: 9000
+weight: 3000
description: The supported conversation components that interface with Dapr
no_list: true
---
diff --git a/daprdocs/content/en/reference/components-reference/supported-conversation/openai.md b/daprdocs/content/en/reference/components-reference/supported-conversation/openai.md
index 795f9877909..f1c29e2b5f3 100644
--- a/daprdocs/content/en/reference/components-reference/supported-conversation/openai.md
+++ b/daprdocs/content/en/reference/components-reference/supported-conversation/openai.md
@@ -46,6 +46,48 @@ The above example uses secrets as plain strings. It is recommended to use a secr
| `apiType` | N | Specifies the API provider type. Required when using a provider that does not follow the default OpenAI API endpoint conventions. | `azure` |
| `apiVersion`| N | The API version to use. Required when the `apiType` is set to `azure`. | `2025-04-01-preview` |
+## Azure OpenAI Configuration
+
+To configure the OpenAI component to connect to Azure OpenAI, you need to set the following metadata fields which are required for Azure's API format.
+
+### Required fields for Azure OpenAI
+
+When connecting to Azure OpenAI, the following fields are **required**:
+
+- `apiType`: Must be set to `azure` to enable Azure OpenAI compatibility
+- `endpoint`: Your Azure OpenAI resource endpoint URL (e.g., `https://your-resource.openai.azure.com/`)
+- `apiVersion`: The API version for your Azure OpenAI deployment (e.g., `2025-01-01-preview`)
+- `key`: Your Azure OpenAI API key
+
+Get your configuration values from: https://ai.azure.com/
+
+### Azure OpenAI component example
+
+```yaml
+apiVersion: dapr.io/v1alpha1
+kind: Component
+metadata:
+ name: azure-openai
+spec:
+ type: conversation.openai
+ metadata:
+ - name: key
+ value: "your-azure-openai-api-key"
+ - name: model
+ value: "gpt-4.1-nano" # Default: gpt-4.1-nano
+ - name: endpoint
+ value: "https://your-resource.openai.azure.com/"
+ - name: apiType
+ value: "azure"
+ - name: apiVersion
+ value: "2025-01-01-preview"
+```
+
+
+{{% alert title="Note" color="primary" %}}
+When using Azure OpenAI, both `endpoint` and `apiVersion` are mandatory fields. The component returns an error if either field is missing when `apiType` is set to `azure`.
+{{% /alert %}}
+
## Related links
- [Conversation API overview]({{% ref conversation-overview.md %}})
diff --git a/daprdocs/content/en/reference/components-reference/supported-cryptography/_index.md b/daprdocs/content/en/reference/components-reference/supported-cryptography/_index.md
index c7789d5e4a6..160d3e3427c 100644
--- a/daprdocs/content/en/reference/components-reference/supported-cryptography/_index.md
+++ b/daprdocs/content/en/reference/components-reference/supported-cryptography/_index.md
@@ -2,7 +2,7 @@
type: docs
title: "Cryptography component specs"
linkTitle: "Cryptography"
-weight: 8000
+weight: 4000
description: The supported cryptography components that interface with Dapr
no_list: true
---
diff --git a/daprdocs/content/en/reference/components-reference/supported-cryptography/local-storage.md b/daprdocs/content/en/reference/components-reference/supported-cryptography/local-storage.md
index 6d430678278..c8955c0669a 100644
--- a/daprdocs/content/en/reference/components-reference/supported-cryptography/local-storage.md
+++ b/daprdocs/content/en/reference/components-reference/supported-cryptography/local-storage.md
@@ -32,8 +32,8 @@ metadata:
name: mycrypto
spec:
type: crypto.dapr.localstorage
+ version: v1
metadata:
- version: v1
- name: path
value: /path/to/folder/
```
diff --git a/daprdocs/content/en/reference/components-reference/supported-locks/_index.md b/daprdocs/content/en/reference/components-reference/supported-locks/_index.md
index 134e75360dc..588e5acc4ff 100644
--- a/daprdocs/content/en/reference/components-reference/supported-locks/_index.md
+++ b/daprdocs/content/en/reference/components-reference/supported-locks/_index.md
@@ -2,7 +2,7 @@
type: docs
title: "Lock component specs"
linkTitle: "Locks"
-weight: 7000
+weight: 5000
description: The supported locks that interface with Dapr
no_list: true
---
diff --git a/daprdocs/content/en/reference/components-reference/supported-locks/redis-lock.md b/daprdocs/content/en/reference/components-reference/supported-locks/redis-lock.md
index 3beb290ee9a..daacb43e964 100644
--- a/daprdocs/content/en/reference/components-reference/supported-locks/redis-lock.md
+++ b/daprdocs/content/en/reference/components-reference/supported-locks/redis-lock.md
@@ -77,15 +77,15 @@ The above example uses secrets as plain strings. It is recommended to use a secr
| Field | Required | Details | Example |
|-----------------------|:--------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------|
-| redisHost | Y | Connection string for the redis host | `localhost:6379`, `redis-master.default.svc.cluster.local:6379` |
+| redisHost | Y | Connection-string for the redis host. If `"redisType"` is `"cluster"`, it can be multiple hosts separated by commas or just a single host. When using Redis Sentinel (`"failover"` is `"true"`), multiple sentinel addresses can also be provided as comma-separated values. | `localhost:6379`, `redis-master.default.svc.cluster.local:6379`, `sentinel1:26379,sentinel2:26379,sentinel3:26379` host | `localhost:6379`, `redis-master.default.svc.cluster.local:6379` |
| redisPassword | N | Password for Redis host. No Default. Can be `secretKeyRef` to use a secret reference | `""`, `"KeFg23!"` |
| redisUsername | N | Username for Redis host. Defaults to empty. Make sure your redis server version is 6 or above, and have created acl rule correctly. | `""`, `"default"` |
| useEntraID | N | Implements EntraID support for Azure Cache for Redis. Before enabling this:
The `redisHost` name must be specified in the form of `"server:port"`
TLS must be enabled
Learn more about this setting under [Create a Redis instance > Azure Cache for Redis]({{% ref "#setup-redis" %}}) | `"true"`, `"false"` |
| enableTLS | N | If the Redis instance supports TLS with public certificates, can be configured to be enabled or disabled. Defaults to `"false"` | `"true"`, `"false"` |
| maxRetries | N | Maximum number of retries before giving up. Defaults to `3` | `5`, `10` |
| maxRetryBackoff | N | Maximum backoff between each retry. Defaults to `2` seconds; `"-1"` disables backoff. | `3000000000` |
-| failover | N | Enable failover configuration. Needs sentinelMasterName to be set. The redisHost should be the sentinel host address. See [Redis Sentinel Documentation](https://redis.io/docs/manual/sentinel/). Defaults to `"false"` | `"true"`, `"false"` |
-| sentinelMasterName | N | The sentinel master name. See [Redis Sentinel Documentation](https://redis.io/docs/manual/sentinel/) | `"mymaster"` |
+| failover | N | Property to enable failover configuration. Needs sentinelMasterName to be set. When enabled, redisHost should contain the sentinel addresses. Defaults to `"false"` | `"true"`, `"false"` |
+| sentinelMasterName | N | The sentinel master name. See [Redis Sentinel Documentation](https://redis.io/docs/latest/operate/oss_and_stack/management/sentinel/) | `"mymaster"` |
| sentinelPassword | N | Password for Redis Sentinel. No Default. Applicable only when “failover” is true, and Redis Sentinel has authentication enabled | `""`, `"KeFg23!"`
| redeliverInterval | N | The interval between checking for pending messages for redelivery. Defaults to `"60s"`. `"0"` disables redelivery. | `"30s"` |
| processingTimeout | N | The amount of time a message must be pending before attempting to redeliver it. Defaults to `"15s"`. `"0"` disables redelivery. | `"30s"` |
@@ -185,7 +185,7 @@ You can use [Helm](https://helm.sh/) to quickly create a Redis instance in our K
## Redis Sentinel behavior
-Use `redisType: "node"` when connecting to Redis Sentinel. Additionally, set `failover` to `"true"` and `sentinelMasterName` to the name of the master node.
+Use `redisType: "node"` when connecting to Redis Sentinel. Additionally, set `failover` to `"true"` and `sentinelMasterName` to the name of the master node. Multiple sentinel addresses can be specified as a comma-separated list in the `redisHost` field for redundancy.
Failover characteristics:
- Lock loss during failover: Locks may be lost during master failover if they weren't replicated to the promoted replica before the original master failed
diff --git a/daprdocs/content/en/reference/components-reference/supported-middleware/_index.md b/daprdocs/content/en/reference/components-reference/supported-middleware/_index.md
index ddb92d740ed..995651d14db 100644
--- a/daprdocs/content/en/reference/components-reference/supported-middleware/_index.md
+++ b/daprdocs/content/en/reference/components-reference/supported-middleware/_index.md
@@ -2,7 +2,7 @@
type: docs
title: "Middleware component specs"
linkTitle: "Middleware"
-weight: 10000
+weight: 6000
description: List of all the supported middleware components that can be injected in Dapr's processing pipeline.
no_list: true
aliases:
diff --git a/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md b/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md
index c27f6f3fea8..ac0beb524dc 100644
--- a/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md
+++ b/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md
@@ -2,7 +2,7 @@
type: docs
title: "Name resolution provider component specs"
linkTitle: "Name resolution"
-weight: 9000
+weight: 7000
description: The supported name resolution providers to enable Dapr service invocation
no_list: true
---
diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/_index.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/_index.md
index 876e7bedc16..34785d7d65c 100644
--- a/daprdocs/content/en/reference/components-reference/supported-pubsub/_index.md
+++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/_index.md
@@ -2,7 +2,7 @@
type: docs
title: "Pub/sub brokers component specs"
linkTitle: "Pub/sub brokers"
-weight: 1000
+weight: 8000
description: The supported pub/sub brokers that interface with Dapr
aliases:
- "/operations/components/setup-pubsub/supported-pubsub/"
diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md
index 8e4e95d74ea..b806462724d 100644
--- a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md
+++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md
@@ -102,7 +102,7 @@ spec:
| clientCert | N | Client certificate, required for `authType` `mtls`. Can be `secretKeyRef` to use a secret reference | `"-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----"`
| clientKey | N | Client key, required for `authType` `mtls` Can be `secretKeyRef` to use a secret reference | `"-----BEGIN RSA PRIVATE KEY-----\n\n-----END RSA PRIVATE KEY-----"`
| skipVerify | N | Skip TLS verification, this is not recommended for use in production. Defaults to `"false"` | `"true"`, `"false"` |
-| disableTls | N | Disable TLS for transport security. To disable, you're not required to set value to `"true"`. This is not recommended for use in production. Defaults to `"false"`. | `"true"`, `"false"` |
+| disableTls | N | Disable TLS for transport security. To disable, you're required to set value to `"true"`. This is not recommended for use in production. Defaults to `"false"`. | `"true"`, `"false"` |
| oidcTokenEndpoint | N | Full URL to an OAuth2 identity provider access token endpoint. Required when `authType` is set to `oidc` | "https://identity.example.com/v1/token" |
| oidcClientID | N | The OAuth2 client ID that has been provisioned in the identity provider. Required when `authType` is set to `oidc` | `dapr-kafka` |
| oidcClientSecret | N | The OAuth2 client secret that has been provisioned in the identity provider: Required when `authType` is set to `oidc` | `"KeFg23!"` |
diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-azure-eventhubs.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-azure-eventhubs.md
index 21a50e96b55..7b21a4817d0 100644
--- a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-azure-eventhubs.md
+++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-azure-eventhubs.md
@@ -62,21 +62,21 @@ The above example uses secrets as plain strings. It is recommended to use a secr
## Spec metadata fields
-| Field | Required | Details | Example |
-|--------------------|:--------:|---------|---------|
-| `connectionString` | Y* | Connection string for the Event Hub or the Event Hub namespace. * Mutally exclusive with `eventHubNamespace` field. * Required when not using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key};EntityPath={EventHub}"` or `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key}"`
-| `eventHubNamespace` | Y* | The Event Hub Namespace name. * Mutally exclusive with `connectionString` field. * Required when using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"namespace"`
+| Field | Required | Details | Example |
+|--------------------|:--------:|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------|
+| `connectionString` | Y* | Connection string for the Event Hub or the Event Hub namespace. * Mutually exclusive with `eventHubNamespace` field. * Required when not using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key};EntityPath={EventHub}"` or `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key}"`
+| `eventHubNamespace` | Y* | The Event Hub Namespace name. * Mutually exclusive with `connectionString` field. * Required when using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"namespace"`
| `consumerID` | N | Consumer ID (consumer tag) organizes one or more consumers into a group. Consumers with the same consumer ID work as one virtual consumer; for example, a message is processed only once by one of the consumers in the group. If the `consumerID` is not provided, the Dapr runtime set it to the Dapr application ID (`appID`) value. | Can be set to string value (such as `"channel1"` in the example above) or string format value (such as `"{podName}"`, etc.). [See all of template tags you can use in your component metadata.]({{% ref "component-schema.md#templated-metadata-values" %}})
-| `enableEntityManagement` | N | Boolean value to allow management of the EventHub namespace and storage account. Default: `false` | `"true", "false"`
-| `enableInOrderMessageDelivery` | N | Input/Output | Boolean value to allow messages to be delivered in the order in which they were posted. This assumes `partitionKey` is set when publishing or posting to ensure ordering across partitions. Default: `false` | `"true"`, `"false"`
-| `storageAccountName` | Y | Storage account name to use for the checkpoint store. |`"myeventhubstorage"`
-| `storageAccountKey` | Y* | Storage account key for the checkpoint store account. * When using Microsoft Entra ID, it's possible to omit this if the service principal has access to the storage account too. | `"112233445566778899"`
-| `storageConnectionString` | Y* | Connection string for the checkpoint store, alternative to specifying `storageAccountKey` | `"DefaultEndpointsProtocol=https;AccountName=myeventhubstorage;AccountKey="`
-| `storageContainerName` | Y | Storage container name for the storage account name. | `"myeventhubstoragecontainer"`
-| `resourceGroupName` | N | Name of the resource group the Event Hub namespace is part of. Required when entity management is enabled | `"test-rg"`
-| `subscriptionID` | N | Azure subscription ID value. Required when entity management is enabled | `"azure subscription id"`
-| `partitionCount` | N | Number of partitions for the new Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"2"`
-| `messageRetentionInDays` | N | Number of days to retain messages for in the newly created Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"90"`
+| `enableEntityManagement` | N | Boolean value to allow management of the EventHub namespace and storage account. Default: `false` | `"true", "false"`
+| `enableInOrderMessageDelivery` | N | Input/Output | Boolean value to allow messages to be delivered in the order in which they were posted. This assumes `partitionKey` is set when publishing or posting to ensure ordering across partitions. Default: `false` | `"true"`, `"false"`
+| `storageAccountName` | Y | Storage account name to use for the checkpoint store. |`"myeventhubstorage"`
+| `storageAccountKey` | Y* | Storage account key for the checkpoint store account. * When using Microsoft Entra ID, it's possible to omit this if the service principal has access to the storage account too. | `"112233445566778899"`
+| `storageConnectionString` | Y* | Connection string for the checkpoint store, alternative to specifying `storageAccountKey` | `"DefaultEndpointsProtocol=https;AccountName=myeventhubstorage;AccountKey="`
+| `storageContainerName` | Y | Storage container name for the storage account name. | `"myeventhubstoragecontainer"`
+| `resourceGroupName` | N | Name of the resource group the Event Hub namespace is part of. Required when entity management is enabled | `"test-rg"`
+| `subscriptionID` | N | Azure subscription ID value. Required when entity management is enabled | `"azure subscription id"`
+| `partitionCount` | N | Number of partitions for the new Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"2"`
+| `messageRetentionInDays` | N | Number of days to retain messages for in the newly created Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"90"`
### Microsoft Entra ID authentication
diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-rabbitmq.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-rabbitmq.md
index c3b4e69a8f5..cb65415ba7c 100644
--- a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-rabbitmq.md
+++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-rabbitmq.md
@@ -76,35 +76,35 @@ The above example uses secrets as plain strings. It is recommended to use a secr
## Spec metadata fields
-| Field | Required | Details | Example |
-|--------------------|:--------:|---------|---------|
-| connectionString | Y* | The RabbitMQ connection string. *Mutally exclusive with protocol, hostname, username, password field | `amqp://user:pass@localhost:5672` |
-| protocol | N* | The RabbitMQ protocol. *Mutally exclusive with connectionString field | `amqp` |
-| hostname | N* | The RabbitMQ hostname. *Mutally exclusive with connectionString field | `localhost` |
-| username | N* | The RabbitMQ username. *Mutally exclusive with connectionString field | `username` |
-| password | N* | The RabbitMQ password. *Mutally exclusive with connectionString field | `password` |
+| Field | Required | Details | Example |
+|--------------------|:--------:|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------|
+| connectionString | Y* | The RabbitMQ connection string. *Mutually exclusive with protocol, hostname, username, password field | `amqp://user:pass@localhost:5672` |
+| protocol | N* | The RabbitMQ protocol. *Mutually exclusive with connectionString field | `amqp` |
+| hostname | N* | The RabbitMQ hostname. *Mutually exclusive with connectionString field | `localhost` |
+| username | N* | The RabbitMQ username. *Mutually exclusive with connectionString field | `username` |
+| password | N* | The RabbitMQ password. *Mutually exclusive with connectionString field | `password` |
| consumerID | N | Consumer ID (consumer tag) organizes one or more consumers into a group. Consumers with the same consumer ID work as one virtual consumer; for example, a message is processed only once by one of the consumers in the group. If the `consumerID` is not provided, the Dapr runtime set it to the Dapr application ID (`appID`) value. | Can be set to string value (such as `"channel1"` in the example above) or string format value (such as `"{podName}"`, etc.). [See all of template tags you can use in your component metadata.]({{% ref "component-schema.md#templated-metadata-values" %}})
-| durable | N | Whether or not to use [durable](https://www.rabbitmq.com/queues.html#durability) queues. Defaults to `"false"` | `"true"`, `"false"`
-| deletedWhenUnused | N | Whether or not the queue should be configured to [auto-delete](https://www.rabbitmq.com/queues.html) Defaults to `"true"` | `"true"`, `"false"`
-| autoAck | N | Whether or not the queue consumer should [auto-ack](https://www.rabbitmq.com/confirms.html) messages. Defaults to `"false"` | `"true"`, `"false"`
-| deliveryMode | N | Persistence mode when publishing messages. Defaults to `"0"`. RabbitMQ treats `"2"` as persistent, all other numbers as non-persistent | `"0"`, `"2"`
-| requeueInFailure | N | Whether or not to requeue when sending a [negative acknowledgement](https://www.rabbitmq.com/nack.html) in case of a failure. Defaults to `"false"` | `"true"`, `"false"`
-| prefetchCount | N | Number of messages to [prefetch](https://www.rabbitmq.com/consumer-prefetch.html). Consider changing this to a non-zero value for production environments. Defaults to `"0"`, which means that all available messages will be pre-fetched. | `"2"`
-| publisherConfirm | N | If enabled, client waits for [publisher confirms](https://www.rabbitmq.com/confirms.html#publisher-confirms) after publishing a message. Defaults to `"false"` | `"true"`, `"false"`
-| reconnectWait | N | How long to wait (in seconds) before reconnecting if a connection failure occurs | `"0"`
-| concurrencyMode | N | `parallel` is the default, and allows processing multiple messages in parallel (limited by the `app-max-concurrency` annotation, if configured). Set to `single` to disable parallel processing. In most situations there's no reason to change this. | `parallel`, `single`
-| enableDeadLetter | N | Enable forwarding Messages that cannot be handled to a dead-letter topic. Defaults to `"false"` | `"true"`, `"false"` |
-| maxLen | N | The maximum number of messages of a queue and its dead letter queue (if dead letter enabled). If both `maxLen` and `maxLenBytes` are set then both will apply; whichever limit is hit first will be enforced. Defaults to no limit. | `"1000"` |
-| maxLenBytes | N | Maximum length in bytes of a queue and its dead letter queue (if dead letter enabled). If both `maxLen` and `maxLenBytes` are set then both will apply; whichever limit is hit first will be enforced. Defaults to no limit. | `"1048576"` |
-| exchangeKind | N | Exchange kind of the rabbitmq exchange. Defaults to `"fanout"`. | `"fanout"`,`"topic"` |
-| saslExternal | N | With TLS, should the username be taken from an additional field (for example, CN). See [RabbitMQ Authentication Mechanisms](https://www.rabbitmq.com/access-control.html#mechanisms). Defaults to `"false"`. | `"true"`, `"false"` |
-| ttlInSeconds | N | Set message TTL at the component level, which can be overwritten by message level TTL per request. | `"60"` |
-| caCert | Required for using TLS | Certificate Authority (CA) certificate in PEM format for verifying server TLS certificates. | `"-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----"`
-| clientCert | Required for using TLS | TLS client certificate in PEM format. Must be used with `clientKey`. | `"-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----"`
-| clientKey | Required for using TLS | TLS client key in PEM format. Must be used with `clientCert`. Can be `secretKeyRef` to use a secret reference. | `"-----BEGIN RSA PRIVATE KEY-----\n\n-----END RSA PRIVATE KEY-----"`
-| clientName | N | This RabbitMQ [client-provided connection name](https://www.rabbitmq.com/connections.html#client-provided-names) is a custom identifier. If set, the identifier is mentioned in RabbitMQ server log entries and management UI. Can be set to {uuid}, {podName}, or {appID}, which is replaced by Dapr runtime to the real value. | `"app1"`, `{uuid}`, `{podName}`, `{appID}`
-| heartBeat | N | Defines the heartbeat interval with the server, detecting the aliveness of the peer TCP connection with the RabbitMQ server. Defaults to `10s` . | `"10s"`
-| `publishMessagePropertiesToMetadata` | N | Whether to publish AMQP message properties (headers, message ID, etc.) to the metadata. | "true", "false"
+| durable | N | Whether or not to use [durable](https://www.rabbitmq.com/queues.html#durability) queues. Defaults to `"false"` | `"true"`, `"false"`
+| deletedWhenUnused | N | Whether or not the queue should be configured to [auto-delete](https://www.rabbitmq.com/queues.html) Defaults to `"true"` | `"true"`, `"false"`
+| autoAck | N | Whether or not the queue consumer should [auto-ack](https://www.rabbitmq.com/confirms.html) messages. Defaults to `"false"` | `"true"`, `"false"`
+| deliveryMode | N | Persistence mode when publishing messages. Defaults to `"0"`. RabbitMQ treats `"2"` as persistent, all other numbers as non-persistent | `"0"`, `"2"`
+| requeueInFailure | N | Whether or not to requeue when sending a [negative acknowledgement](https://www.rabbitmq.com/nack.html) in case of a failure. Defaults to `"false"` | `"true"`, `"false"`
+| prefetchCount | N | Number of messages to [prefetch](https://www.rabbitmq.com/consumer-prefetch.html). Consider changing this to a non-zero value for production environments. Defaults to `"0"`, which means that all available messages will be pre-fetched. | `"2"`
+| publisherConfirm | N | If enabled, client waits for [publisher confirms](https://www.rabbitmq.com/confirms.html#publisher-confirms) after publishing a message. Defaults to `"false"` | `"true"`, `"false"`
+| reconnectWait | N | How long to wait (in seconds) before reconnecting if a connection failure occurs | `"0"`
+| concurrencyMode | N | `parallel` is the default, and allows processing multiple messages in parallel (limited by the `app-max-concurrency` annotation, if configured). Set to `single` to disable parallel processing. In most situations there's no reason to change this. | `parallel`, `single`
+| enableDeadLetter | N | Enable forwarding Messages that cannot be handled to a dead-letter topic. Defaults to `"false"` | `"true"`, `"false"` |
+| maxLen | N | The maximum number of messages of a queue and its dead letter queue (if dead letter enabled). If both `maxLen` and `maxLenBytes` are set then both will apply; whichever limit is hit first will be enforced. Defaults to no limit. | `"1000"` |
+| maxLenBytes | N | Maximum length in bytes of a queue and its dead letter queue (if dead letter enabled). If both `maxLen` and `maxLenBytes` are set then both will apply; whichever limit is hit first will be enforced. Defaults to no limit. | `"1048576"` |
+| exchangeKind | N | Exchange kind of the rabbitmq exchange. Defaults to `"fanout"`. | `"fanout"`,`"topic"` |
+| saslExternal | N | With TLS, should the username be taken from an additional field (for example, CN). See [RabbitMQ Authentication Mechanisms](https://www.rabbitmq.com/access-control.html#mechanisms). Defaults to `"false"`. | `"true"`, `"false"` |
+| ttlInSeconds | N | Set message TTL at the component level, which can be overwritten by message level TTL per request. | `"60"` |
+| caCert | Required for using TLS | Certificate Authority (CA) certificate in PEM format for verifying server TLS certificates. | `"-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----"`
+| clientCert | Required for using TLS | TLS client certificate in PEM format. Must be used with `clientKey`. | `"-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----"`
+| clientKey | Required for using TLS | TLS client key in PEM format. Must be used with `clientCert`. Can be `secretKeyRef` to use a secret reference. | `"-----BEGIN RSA PRIVATE KEY-----\n\n-----END RSA PRIVATE KEY-----"`
+| clientName | N | This RabbitMQ [client-provided connection name](https://www.rabbitmq.com/connections.html#client-provided-names) is a custom identifier. If set, the identifier is mentioned in RabbitMQ server log entries and management UI. Can be set to {uuid}, {podName}, or {appID}, which is replaced by Dapr runtime to the real value. | `"app1"`, `{uuid}`, `{podName}`, `{appID}`
+| heartBeat | N | Defines the heartbeat interval with the server, detecting the aliveness of the peer TCP connection with the RabbitMQ server. Defaults to `10s` . | `"10s"`
+| `publishMessagePropertiesToMetadata` | N | Whether to publish AMQP message properties (headers, message ID, etc.) to the metadata. | "true", "false"
## Communication using TLS
diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-redis-pubsub.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-redis-pubsub.md
index 6092d96a424..d4e9b566652 100644
--- a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-redis-pubsub.md
+++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-redis-pubsub.md
@@ -40,7 +40,7 @@ The above example uses secrets as plain strings. It is recommended to use a secr
| Field | Required | Details | Example |
|--------------------|:--------:|---------|---------|
-| redisHost | Y | Connection-string for the redis host. If `"redisType"` is `"cluster"` it can be multiple hosts separated by commas or just a single host | `localhost:6379`, `redis-master.default.svc.cluster.local:6379`
+| redisHost | Y | Connection-string for the redis host. If `"redisType"` is `"cluster"`, it can be multiple hosts separated by commas or just a single host. When using Redis Sentinel (`"failover"` is `"true"`), multiple sentinel addresses can also be provided as comma-separated values. | `localhost:6379`, `redis-master.default.svc.cluster.local:6379`, `sentinel1:26379,sentinel2:26379,sentinel3:26379`
| redisPassword | N | Password for Redis host. No Default. Can be `secretKeyRef` to use a secret reference | `""`, `"KeFg23!"`
| redisUsername | N | Username for Redis host. Defaults to empty. Make sure your redis server version is 6 or above, and have created acl rule correctly. | `""`, `"default"`
| consumerID | N | The consumer group ID. | Can be set to string value (such as `"channel1"` in the example above) or string format value (such as `"{podName}"`, etc.). [See all of template tags you can use in your component metadata.]({{% ref "component-schema.md#templated-metadata-values" %}})
@@ -66,8 +66,8 @@ The above example uses secrets as plain strings. It is recommended to use a secr
| minIdleConns | N | Minimum number of idle connections to keep open in order to avoid the performance degradation associated with creating new connections. Defaults to `"0"`. | `"2"`
| idleCheckFrequency | N | Frequency of idle checks made by idle connections reaper. Default is `"1m"`. `"-1"` disables idle connections reaper. | `"-1"`
| idleTimeout | N | Amount of time after which the client closes idle connections. Should be less than server's timeout. Default is `"5m"`. `"-1"` disables idle timeout check. | `"10m"`
-| failover | N | Property to enable failover configuration. Needs sentinelMasterName to be set. Defaults to `"false"` | `"true"`, `"false"`
-| sentinelMasterName | N | The sentinel master name. See [Redis Sentinel Documentation](https://redis.io/docs/manual/sentinel/) | `""`, `"mymaster"`
+| failover | N | Property to enable failover configuration. Needs sentinelMasterName to be set. When enabled, redisHost should contain the sentinel addresses. Defaults to `"false"` | `"true"`, `"false"`
+| sentinelMasterName | N | The sentinel master name. See [Redis Sentinel Documentation](https://redis.io/docs/latest/operate/oss_and_stack/management/sentinel/) | `""`, `"mymaster"`
| sentinelUsername | N | Username for Redis Sentinel. Applicable only when "failover" is true, and Redis Sentinel has authentication enabled | `"username"`
| sentinelPassword | N | Password for Redis Sentinel. Applicable only when "failover" is true, and Redis Sentinel has authentication enabled | `"password"`
| maxLenApprox | N | Maximum number of items inside a stream.The old entries are automatically evicted when the specified length is reached, so that the stream is left at a constant size. Defaults to unlimited. | `"10000"`
@@ -113,6 +113,7 @@ You can use [Helm](https://helm.sh/) to quickly create a Redis instance in our K
- name: redisPassword
value: "lhDOkwTlp0"
```
+
{{% /tab %}}
{{% tab "AWS" %}}
@@ -159,6 +160,29 @@ You can use [Helm](https://helm.sh/) to quickly create a Redis instance in our K
The Dapr CLI automatically deploys a local redis instance in self hosted mode as part of the `dapr init` command.
{{% /alert %}}
+## Redis Sentinel configuration
+
+When using Redis Sentinel for high availability, set `redisType` to `"node"`, enable failover mode with `failover: "true"`, and provide the sentinel master name. Multiple sentinel addresses can be specified as a comma-separated list in the `redisHost` field for redundancy.
+
+ ```yaml
+ apiVersion: dapr.io/v1alpha1
+ kind: Component
+ metadata:
+ name: redis-pubsub
+ spec:
+ type: pubsub.redis
+ version: v1
+ metadata:
+ - name: redisHost
+ value: "sentinel1:26379,sentinel2:26379,sentinel3:26379"
+ - name: redisType
+ value: "node"
+ - name: failover
+ value: "true"
+ - name: sentinelMasterName
+ value: "mymaster"
+ ```
+
## Related links
- [Basic schema for a Dapr component]({{% ref component-schema %}})
- Read [this guide]({{% ref "howto-publish-subscribe.md#step-2-publish-a-topic" %}}) for instructions on configuring pub/sub components
diff --git a/daprdocs/content/en/reference/components-reference/supported-secret-stores/_index.md b/daprdocs/content/en/reference/components-reference/supported-secret-stores/_index.md
index d44055ae9d5..03d17ca02cb 100644
--- a/daprdocs/content/en/reference/components-reference/supported-secret-stores/_index.md
+++ b/daprdocs/content/en/reference/components-reference/supported-secret-stores/_index.md
@@ -2,7 +2,7 @@
type: docs
title: "Secret store component specs"
linkTitle: "Secret stores"
-weight: 5000
+weight: 9000
description: The supported secret stores that interface with Dapr
aliases:
- "/operations/components/setup-secret-store/supported-secret-stores/"
diff --git a/daprdocs/content/en/reference/components-reference/supported-secret-stores/aws-secret-manager.md b/daprdocs/content/en/reference/components-reference/supported-secret-stores/aws-secret-manager.md
index d59af95a5a3..0150633974b 100644
--- a/daprdocs/content/en/reference/components-reference/supported-secret-stores/aws-secret-manager.md
+++ b/daprdocs/content/en/reference/components-reference/supported-secret-stores/aws-secret-manager.md
@@ -2,7 +2,7 @@
type: docs
title: "AWS Secrets Manager"
linkTitle: "AWS Secrets Manager"
-description: Detailed information on the secret store component
+description: Detailed information on the AWS Secrets Manager secret store component
aliases:
- "/operations/components/setup-secret-store/supported-secret-stores/aws-secret-manager/"
---
@@ -30,6 +30,8 @@ spec:
value: "[aws_secret_key]"
- name: sessionToken
value: "[aws_session_token]"
+ - name: multipleKeyValuesPerSecret
+ value: "false"
```
{{% alert title="Warning" color="warning" %}}
The above example uses secrets as plain strings. It is recommended to use a local secret store such as [Kubernetes secret store]({{% ref kubernetes-secret-store.md %}}) or a [local file]({{% ref file-secret-store.md %}}) to bootstrap secure key storage.
@@ -43,6 +45,7 @@ The above example uses secrets as plain strings. It is recommended to use a loca
| accessKey | Y | The AWS Access Key to access this resource | `"key"` |
| secretKey | Y | The AWS Secret Access Key to access this resource | `"secretAccessKey"` |
| sessionToken | N | The AWS session token to use | `"sessionToken"` |
+| multipleKeyValuesPerSecret | N | When set to `"true"` allows for multiple key value pairs to be stored in a single secret. Defaults to `"false"` | `"true"` |
{{% alert title="Important" color="warning" %}}
When running the Dapr sidecar (daprd) with your application on EKS (AWS Kubernetes), if you're using a node/pod that has already been attached to an IAM policy defining access to AWS resources, you **must not** provide AWS access-key, secret-key, and tokens in the definition of the component spec you're using.
@@ -57,6 +60,46 @@ Query Parameter | Description
`metadata.version_id` | Version for the given secret key.
`metadata.version_stage` | Version stage for the given secret key.
+## Configure multiple key-values per secret
+
+The `multipleKeyValuesPerSecret` flag determines whether the secret store presents a single value or multiple key-value pairs per secret.
+
+### Single value per secret
+
+If `multipleKeyValuesPerSecret` is `false` (default), AWS Secrets Manager returns the secret value as-is. Given a secret named `database-credentials` with the following JSON content:
+
+```json
+{
+ "username": "admin",
+ "password": "secret123",
+ "host": "db.example.com"
+}
+```
+
+Requesting this secret returns the entire JSON as a single value:
+
+```bash
+$ curl http://localhost:3500/v1.0/secrets/awssecretmanager/database-credentials
+{
+ "database-credentials": "{\"username\":\"admin\",\"password\":\"secret123\",\"host\":\"db.example.com\"}"
+}
+```
+
+### Multiple key-value pairs per secret
+
+If `multipleKeyValuesPerSecret` is `true`, the secret store parses JSON content stored in AWS Secrets Manager and returns it as multiple key-value pairs.
+
+Requesting the same `database-credentials` secret from above, the response breaks the JSON object into its own entries, allowing it to be parsed into multiple key-value pairs.
+
+```bash
+$ curl http://localhost:3500/v1.0/secrets/awssecretmanager/database-credentials
+{
+ "username": "admin",
+ "password": "secret123",
+ "host": "db.example.com"
+}
+```
+
## Create an AWS Secrets Manager instance
Setup AWS Secrets Manager using the AWS documentation: https://docs.aws.amazon.com/secretsmanager/latest/userguide/tutorials_basic.html.
diff --git a/daprdocs/content/en/reference/components-reference/supported-state-stores/_index.md b/daprdocs/content/en/reference/components-reference/supported-state-stores/_index.md
index 2b2509f53fb..0855682eb22 100644
--- a/daprdocs/content/en/reference/components-reference/supported-state-stores/_index.md
+++ b/daprdocs/content/en/reference/components-reference/supported-state-stores/_index.md
@@ -3,7 +3,7 @@ type: docs
title: "State store component specs"
linkTitle: "State stores"
description: "The supported state stores that interface with Dapr"
-weight: 4000
+weight: 10000
aliases:
- "/operations/components/setup-state-store/supported-state-stores/"
no_list: true
diff --git a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-cosmosdb.md b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-cosmosdb.md
index 2e67d46c137..dc225eee5d5 100644
--- a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-cosmosdb.md
+++ b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-cosmosdb.md
@@ -225,6 +225,22 @@ This particular optimization only makes sense if you are saving large objects to
{{% /alert %}}
+## Workflow Limitations
+
+{{% alert title="Note" color="primary" %}}
+
+As described below, CosmosDB has limitations that likely make it unsuitable for production environments.
+There is currently no path for migrating Workflow data from CosmosDB to another state store, meaning exceeding these limits in production will result in failed workflows with no workaround.
+
+{{% /alert %}}
+
+The more complex a workflow is with number of activities, child workflows, etc, the more DB state operations it performs per state store transaction.
+All input & output values are saved to the workflow history, and are part of an operation of these transactions.
+CosmosDB has a [maximum document size of 2MB and maximum transaction size of 100 operations.](https://learn.microsoft.com/azure/cosmos-db/concepts-limits#per-request-limits).
+Attempting to write to CosmosDB beyond these limits results in an error code of `413`.
+This means that the workflow history must not exceed this size, meaning that CosmosDB is not suitable for workflows with large input/output values or larger complex workflows.
+A general guide to the number of records that are saved during a workflow executon can be found [here]({{% ref "workflow-architecture.md#state-store-record-count" %}}).
+
## Related links
- [Basic schema for a Dapr component]({{% ref component-schema %}})
diff --git a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-dynamodb.md b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-dynamodb.md
index 5a45e374d90..e556191829d 100644
--- a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-dynamodb.md
+++ b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-dynamodb.md
@@ -34,6 +34,8 @@ spec:
value: "myTOKEN" # Optional
- name: ttlAttributeName
value: "expiresAt" # Optional
+ - name: ttlInSeconds
+ value: # Optional
- name: partitionKey
value: "ContractID" # Optional
# Uncomment this if you wish to use AWS DynamoDB as a state store for actors (optional)
@@ -60,6 +62,7 @@ In order to use DynamoDB as a Dapr state store, the table must have a primary ke
| endpoint | N |AWS endpoint for the component to use. Only used for local development. The `endpoint` is unncessary when running against production AWS | `"http://localhost:4566"`
| sessionToken | N |AWS session token to use. A session token is only required if you are using temporary security credentials. | `"TOKEN"`
| ttlAttributeName | N |The table attribute name which should be used for TTL. | `"expiresAt"`
+| ttlInSeconds | N | Allows specifying a Time-to-live (TTL) in seconds that will be applied to every state store request unless TTL is explicitly defined via the [request metadata]({{% ref "state-store-ttl.md" %}}). If set to zero or less, no default TTL is applied, and items will only expire if a TTL is explicitly provided in the request metadata with if ttlAttributeName is set. | `600`
| partitionKey | N |The table primary key or partition key attribute name. This field is used to replace the default primary key attribute name `"key"`. See the section [Partition Keys]({{% ref "setup-dynamodb.md#partition-keys" %}}). | `"ContractID"`
| actorStateStore | N | Consider this state store for actors. Defaults to "false" | `"true"`, `"false"`
@@ -158,6 +161,20 @@ $ aws dynamodb get-item \
}
```
+## Workflow Limitations
+
+{{% alert title="Note" color="primary" %}}
+
+As described below, DynamoDB has limitations that likely make it unsuitable for production environments.
+There is currently no path for migrating Workflow data from DynamoDB to another state store, meaning exceeding these limits in production will result in failed workflows with no workaround.
+
+{{% /alert %}}
+
+The more complex a workflow is (number of activities, child workflows, etc.), the more state operations it performs per state store transaction.
+The maximum number of operations that can be performed by DynamoDB in a [single transaction is 100](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/transaction-apis.html).
+This means that DynamoDB can only handle workflows with a limited complexity, meaning it is not suitable for all workflow scenarios.
+A general guide to the number of records that are saved during a workflow executon can be found [here]({{% ref "workflow-architecture.md#state-store-record-count" %}}).
+
## Related links
- [Basic schema for a Dapr component]({{% ref component-schema %}})
diff --git a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-redis.md b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-redis.md
index 68cdc27eb2b..58ac80c95b3 100644
--- a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-redis.md
+++ b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-redis.md
@@ -105,7 +105,7 @@ If you wish to use Redis as an actor store, append the following to the yaml.
| Field | Required | Details | Example |
|--------------------|:--------:|---------|---------|
-| redisHost | Y | Connection-string for the redis host | `localhost:6379`, `redis-master.default.svc.cluster.local:6379`
+| redisHost | Y | Connection-string for the redis host. If `"redisType"` is `"cluster"`, it can be multiple hosts separated by commas or just a single host. When using Redis Sentinel (`"failover"` is `"true"`), multiple sentinel addresses can also be provided as comma-separated values. | `localhost:6379`, `redis-master.default.svc.cluster.local:6379`, `sentinel1:26379,sentinel2:26379,sentinel3:26379`
| redisPassword | N | Password for Redis host. No Default. Can be `secretKeyRef` to use a secret reference | `""`, `"KeFg23!"`
| redisUsername | N | Username for Redis host. Defaults to empty. Make sure your redis server version is 6 or above, and have created acl rule correctly. | `""`, `"default"`
| useEntraID | N | Implements EntraID support for Azure Cache for Redis. Before enabling this:
The `redisHost` name must be specified in the form of `"server:port"`
TLS must be enabled
Learn more about this setting under [Create a Redis instance > Azure Cache for Redis]({{% ref "#setup-redis" %}}) | `"true"`, `"false"` |
@@ -114,8 +114,8 @@ If you wish to use Redis as an actor store, append the following to the yaml.
| clientKey | N | The content of the client private key, used in conjunction with `clientCert` for authentication. It is recommended to use a secret store as described [here]({{% ref component-secrets.md %}}) | `"----BEGIN PRIVATE KEY-----\nMIIE..."` |
| maxRetries | N | Maximum number of retries before giving up. Defaults to `3` | `5`, `10`
| maxRetryBackoff | N | Maximum backoff between each retry. Defaults to `2` seconds; `"-1"` disables backoff. | `3000000000`
-| failover | N | Property to enable failover configuration. Needs sentinelMasterName to be set. The redisHost should be the sentinel host address. See [Redis Sentinel Documentation](https://redis.io/docs/manual/sentinel/). Defaults to `"false"` | `"true"`, `"false"`
-| sentinelMasterName | N | The sentinel master name. See [Redis Sentinel Documentation](https://redis.io/docs/manual/sentinel/) | `""`, `"mymaster"`
+| failover | N | Property to enable failover configuration. Needs sentinelMasterName to be set. When enabled, redisHost should contain the sentinel addresses. Defaults to `"false"` | `"true"`, `"false"`
+| sentinelMasterName | N | The sentinel master name. See [Redis Sentinel Documentation](https://redis.io/docs/latest/operate/oss_and_stack/management/sentinel/) | `""`, `"mymaster"`
| sentinelUsername | N | Username for Redis Sentinel. Applicable only when "failover" is true, and Redis Sentinel has authentication enabled | `"username"`
| sentinelPassword | N | Password for Redis Sentinel. Applicable only when "failover" is true, and Redis Sentinel has authentication enabled | `"password"`
| redeliverInterval | N | The interval between checking for pending messages to redelivery. Defaults to `"60s"`. `"0"` disables redelivery. | `"30s"`
@@ -494,6 +494,29 @@ The result will be:
The query syntax and documentation is available [here]({{% ref howto-state-query-api.md %}})
+## Redis Sentinel configuration
+
+When using Redis Sentinel for high availability, set `redisType` to `"node"`, enable failover mode with `failover: "true"`, and provide the sentinel master name. Multiple sentinel addresses can be specified as a comma-separated list in the `redisHost` field for redundancy.
+
+ ```yaml
+ apiVersion: dapr.io/v1alpha1
+ kind: Component
+ metadata:
+ name: redis-pubsub
+ spec:
+ type: pubsub.redis
+ version: v1
+ metadata:
+ - name: redisHost
+ value: "sentinel1:26379,sentinel2:26379,sentinel3:26379"
+ - name: redisType
+ value: "node"
+ - name: failover
+ value: "true"
+ - name: sentinelMasterName
+ value: "mymaster"
+ ```
+
## Related links
- [Basic schema for a Dapr component]({{% ref component-schema %}})
- Read [this guide]({{% ref "howto-get-save-state.md#step-2-save-and-retrieve-a-single-state" %}}) for instructions on configuring state store components
diff --git a/daprdocs/content/en/reference/resource-specs/subscription-schema.md b/daprdocs/content/en/reference/resource-specs/subscription-schema.md
index 7f842f7150d..70cc0b2451c 100644
--- a/daprdocs/content/en/reference/resource-specs/subscription-schema.md
+++ b/daprdocs/content/en/reference/resource-specs/subscription-schema.md
@@ -38,6 +38,7 @@ spec:
enabled:
maxMessagesCount:
maxAwaitDurationMs:
+ metadata: # Optional
scopes:
-
```
@@ -51,6 +52,7 @@ scopes:
| pubsubname | N | The name of your pub/sub component. | `pubsub` |
| deadLetterTopic | N | The name of the dead letter topic that forwards undeliverable messages. | `poisonMessages` |
| bulkSubscribe | N | Enable bulk subscribe properties. | `true`, `false` |
+| metadata | N | Set subscribe metadata. | `{"key": "value"}` |
## `v1alpha1` format
@@ -67,6 +69,7 @@ spec:
route: # Required
pubsubname: # Required
deadLetterTopic: # Optional
+ metadata: # Optional
bulkSubscribe: # Optional
- enabled:
- maxMessagesCount:
@@ -83,6 +86,7 @@ scopes:
| route | Y | The endpoint to which all topic messages are sent. | `/checkout` |
| pubsubname | N | The name of your pub/sub component. | `pubsub` |
| deadlettertopic | N | The name of the dead letter topic that forwards undeliverable messages. | `poisonMessages` |
+| metadata | N | Set subscribe metadata. | `{"key": "value"}` |
| bulksubscribe | N | Enable bulk subscribe properties. | `true`, `false` |
## Related links
diff --git a/daprdocs/data/components/secret_stores/aws.yaml b/daprdocs/data/components/secret_stores/aws.yaml
index 522b7f64e64..5d08844b8ff 100644
--- a/daprdocs/data/components/secret_stores/aws.yaml
+++ b/daprdocs/data/components/secret_stores/aws.yaml
@@ -3,6 +3,8 @@
state: Beta
version: v1
since: "1.15"
+ features:
+ multipleKeyValuesPerSecret: true
- component: AWS SSM Parameter Store
link: aws-parameter-store
state: Alpha
diff --git a/daprdocs/data/components/state_stores/azure.yaml b/daprdocs/data/components/state_stores/azure.yaml
index 287477de780..b340a26db71 100644
--- a/daprdocs/data/components/state_stores/azure.yaml
+++ b/daprdocs/data/components/state_stores/azure.yaml
@@ -30,7 +30,7 @@
transactions: true
etag: true
ttl: true
- workflow: false
+ workflow: true
- component: Azure Table Storage
link: setup-azure-tablestorage
state: Stable
diff --git a/daprdocs/data/components/state_stores/generic.yaml b/daprdocs/data/components/state_stores/generic.yaml
index 289d8ce4305..8d8ce44184c 100644
--- a/daprdocs/data/components/state_stores/generic.yaml
+++ b/daprdocs/data/components/state_stores/generic.yaml
@@ -52,7 +52,7 @@
transactions: true
etag: true
ttl: true
- workflow: false
+ workflow: true
- component: Hashicorp Consul
link: setup-consul
state: Alpha
@@ -140,7 +140,7 @@
transactions: true
etag: true
ttl: true
- workflow: false
+ workflow: true
- component: PostgreSQL v1
link: setup-postgresql-v1
state: Stable
@@ -195,7 +195,7 @@
transactions: true
etag: true
ttl: true
- workflow: false
+ workflow: true
- component: Zookeeper
link: setup-zookeeper
state: Alpha
diff --git a/daprdocs/data/components/state_stores/oracle.yaml b/daprdocs/data/components/state_stores/oracle.yaml
index eae48304fde..fc5136684b6 100644
--- a/daprdocs/data/components/state_stores/oracle.yaml
+++ b/daprdocs/data/components/state_stores/oracle.yaml
@@ -9,7 +9,7 @@
etag: true
ttl: true
query: false
- workflow: false
+ workflow: true
- component: Coherence
link: setup-coherence
state: Alpha
diff --git a/daprdocs/layouts/_partials/hooks/body-end.html b/daprdocs/layouts/_partials/hooks/body-end.html
index ee0ebc1f145..9bb0c9688f0 100644
--- a/daprdocs/layouts/_partials/hooks/body-end.html
+++ b/daprdocs/layouts/_partials/hooks/body-end.html
@@ -5,16 +5,10 @@
-
-
-
-
-{{ end }}
\ No newline at end of file
+{{ end }}
diff --git a/daprdocs/layouts/_partials/hooks/head-end.html b/daprdocs/layouts/_partials/hooks/head-end.html
index a86318b4883..26e604a4cfc 100644
--- a/daprdocs/layouts/_partials/hooks/head-end.html
+++ b/daprdocs/layouts/_partials/hooks/head-end.html
@@ -1,25 +1,3 @@
{{ with .Site.Params.search.algolia }}
-
-
{{ end }}
\ No newline at end of file
diff --git a/daprdocs/layouts/_shortcodes/dapr-latest-version.html b/daprdocs/layouts/_shortcodes/dapr-latest-version.html
index a085fd0e6f5..da12c873eb6 100644
--- a/daprdocs/layouts/_shortcodes/dapr-latest-version.html
+++ b/daprdocs/layouts/_shortcodes/dapr-latest-version.html
@@ -1 +1 @@
-{{- if .Get "short" }}1.15{{ else if .Get "long" }}1.15.5{{ else if .Get "cli" }}1.15.1{{ else }}1.15.1{{ end -}}
+{{- if .Get "short" }}1.16{{ else if .Get "long" }}1.16.6{{ else if .Get "cli" }}1.16.5{{ else }}1.16.6{{ end -}}
diff --git a/daprdocs/static/images/homepage/dapr-agents.svg b/daprdocs/static/images/homepage/dapr-agents.svg
new file mode 100644
index 00000000000..1b34a323cf3
--- /dev/null
+++ b/daprdocs/static/images/homepage/dapr-agents.svg
@@ -0,0 +1,12 @@
+
diff --git a/daprdocs/static/images/homepage/dark-blue-dapr.svg b/daprdocs/static/images/homepage/dark-blue-dapr.svg
new file mode 100644
index 00000000000..e43f605ce59
--- /dev/null
+++ b/daprdocs/static/images/homepage/dark-blue-dapr.svg
@@ -0,0 +1,10 @@
+
diff --git a/daprdocs/static/images/homepage/pub-sub.svg b/daprdocs/static/images/homepage/pub-sub.svg
new file mode 100644
index 00000000000..03199fd61f8
--- /dev/null
+++ b/daprdocs/static/images/homepage/pub-sub.svg
@@ -0,0 +1,3 @@
+
diff --git a/daprdocs/static/images/homepage/service-invocation.svg b/daprdocs/static/images/homepage/service-invocation.svg
new file mode 100644
index 00000000000..e6a2a204c19
--- /dev/null
+++ b/daprdocs/static/images/homepage/service-invocation.svg
@@ -0,0 +1,3 @@
+
diff --git a/daprdocs/static/images/homepage/workflow.svg b/daprdocs/static/images/homepage/workflow.svg
new file mode 100644
index 00000000000..7d31f803b2c
--- /dev/null
+++ b/daprdocs/static/images/homepage/workflow.svg
@@ -0,0 +1,3 @@
+
diff --git a/daprdocs/static/images/state-management-outbox-steps.png b/daprdocs/static/images/state-management-outbox-steps.png
new file mode 100644
index 00000000000..a520b443b97
Binary files /dev/null and b/daprdocs/static/images/state-management-outbox-steps.png differ
diff --git a/daprdocs/static/images/workflow-overview/workflow-diagrid-dashboard.png b/daprdocs/static/images/workflow-overview/workflow-diagrid-dashboard.png
new file mode 100644
index 00000000000..2e5aa34a970
Binary files /dev/null and b/daprdocs/static/images/workflow-overview/workflow-diagrid-dashboard.png differ
diff --git a/daprdocs/static/images/workflow-overview/workflow-multi-app-callactivity.png b/daprdocs/static/images/workflow-overview/workflow-multi-app-callactivity.png
new file mode 100644
index 00000000000..7b2a28561d8
Binary files /dev/null and b/daprdocs/static/images/workflow-overview/workflow-multi-app-callactivity.png differ
diff --git a/daprdocs/static/images/workflow-overview/workflow-multi-app-child-workflow.png b/daprdocs/static/images/workflow-overview/workflow-multi-app-child-workflow.png
new file mode 100644
index 00000000000..388fcc8cf27
Binary files /dev/null and b/daprdocs/static/images/workflow-overview/workflow-multi-app-child-workflow.png differ
diff --git a/daprdocs/static/images/workflow-overview/workflow-multi-app-complex.png b/daprdocs/static/images/workflow-overview/workflow-multi-app-complex.png
new file mode 100644
index 00000000000..8a2c401f89a
Binary files /dev/null and b/daprdocs/static/images/workflow-overview/workflow-multi-app-complex.png differ
diff --git a/hugo.yaml b/hugo.yaml
index a6e206f75ee..67071e159e9 100644
--- a/hugo.yaml
+++ b/hugo.yaml
@@ -1,4 +1,4 @@
-baseURL: https://v1-16.docs.dapr.io
+baseURL: https://docs.dapr.io
title: Dapr Docs
# Output directory for generated site
@@ -117,21 +117,22 @@ params:
# First one is picked as the Twitter card image if not set on page.
# images: [images/project-illustration.png]
-
+
+ # Versioning
# Menu title if your navbar has a versions selector to access old versions of your site.
# This menu appears only if you have at least one [params.versions] set.
- version_menu: v1.16 (preview)
-
- # Flag used in the "version-banner" partial to decide whether to display a
- # banner on every page indicating that this is an archived version of the docs.
- # Set this flag to "true" if you want to display the banner.
- archived_version: false
+ version_menu: v1.16 (latest)
# The version number for the version of the docs represented in this doc set.
# Used in the "version-banner" partial to display a version number for the
# current doc set.
version: v1.16
+ # Flag used in the "version-banner" partial to decide whether to display a
+ # banner on every page indicating that this is an archived version of the docs.
+ # Set this flag to "true" if you want to display the banner.
+ archived_version: false
+
# A link to latest version of the docs. Used in the "version-banner" partial to
# point people to the main doc site.
url_latest_version: https://docs.dapr.io
@@ -150,10 +151,12 @@ params:
github_branch: v1.16
versions:
- - version: v1.16 (preview)
+ - version: v1.17 (preview)
+ url: https://v1-17.docs.dapr.io
+ - version: v1.16 (latest)
url: "#"
- - version: v1.15 (latest)
- url: "https://docs.dapr.io"
+ - version: v1.15
+ url: https://v1-15.docs.dapr.io
- version: v1.14
url: https://v1-14.docs.dapr.io
- version: v1.13
@@ -185,9 +188,9 @@ params:
search:
algolia:
- appId: 'O0QLQGNF38'
- apiKey: '54ae43aa28ce8f00c54c8d5f544d29b9'
- indexName: daprdocs
+ appId: O0QLQGNF38
+ apiKey: 54ae43aa28ce8f00c54c8d5f544d29b9
+ indexName: crawler_dapr
offlineSearch: false
# User interface configuration
@@ -272,49 +275,49 @@ module:
target: assets
- source: daprdocs/archetypes
target: archetypes
- - source: sdkdocs/python/daprdocs/content/en/python-sdk-docs
+ - source: sdkdocs/python/content/en/python-sdk-docs
target: content/developing-applications/sdks/python
lang: en
- - source: sdkdocs/python/daprdocs/content/en/python-sdk-contributing
+ - source: sdkdocs/python/content/en/python-sdk-contributing
target: content/contributing/sdk-contrib/
lang: en
- - source: sdkdocs/php/daprdocs/content/en/php-sdk-docs
+ - source: sdkdocs/php/content/en/php-sdk-docs
target: content/developing-applications/sdks/php
lang: en
- - source: sdkdocs/dotnet/daprdocs/content/en/dotnet-sdk-docs
+ - source: sdkdocs/dotnet/content/en/dotnet-sdk-docs
target: content/developing-applications/sdks/dotnet
lang: en
- - source: sdkdocs/dotnet/daprdocs/content/en/dotnet-sdk-contributing
+ - source: sdkdocs/dotnet/content/en/dotnet-sdk-contributing
target: content/contributing/sdk-contrib/
lang: en
- - source: sdkdocs/go/daprdocs/content/en/go-sdk-docs
+ - source: sdkdocs/go/content/en/go-sdk-docs
target: content/developing-applications/sdks/go
lang: en
- - source: sdkdocs/go/daprdocs/content/en/go-sdk-contributing
+ - source: sdkdocs/go/content/en/go-sdk-contributing
target: content/contributing/sdk-contrib/
lang: en
- - source: sdkdocs/java/daprdocs/content/en/java-sdk-docs
+ - source: sdkdocs/java/content/en/java-sdk-docs
target: content/developing-applications/sdks/java
lang: en
- - source: sdkdocs/java/daprdocs/content/en/java-sdk-contributing
+ - source: sdkdocs/java/content/en/java-sdk-contributing
target: content/contributing/sdk-contrib/
lang: en
- - source: sdkdocs/js/daprdocs/content/en/js-sdk-docs
+ - source: sdkdocs/js/content/en/js-sdk-docs
target: content/developing-applications/sdks/js
lang: en
- - source: sdkdocs/js/daprdocs/content/en/js-sdk-contributing
+ - source: sdkdocs/js/content/en/js-sdk-contributing
target: content/contributing/sdk-contrib/
lang: en
- - source: sdkdocs/rust/daprdocs/content/en/rust-sdk-docs
+ - source: sdkdocs/rust/content/en/rust-sdk-docs
target: content/developing-applications/sdks/rust
lang: en
- - source: sdkdocs/rust/daprdocs/content/en/rust-sdk-contributing
+ - source: sdkdocs/rust/content/en/rust-sdk-contributing
target: content/contributing/sdk-contrib/
lang: en
- - source: sdkdocs/pluggable-components/dotnet/daprdocs/content/en/dotnet-sdk-docs
+ - source: sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs
target: content/developing-applications/develop-components/pluggable-components/pluggable-components-sdks/pluggable-components-dotnet
lang: en
- - source: sdkdocs/pluggable-components/go/daprdocs/content/en/go-sdk-docs
+ - source: sdkdocs/pluggable-components/go/content/en/go-sdk-docs
target: content/developing-applications/develop-components/pluggable-components/pluggable-components-sdks/pluggable-components-go
lang: en
- source: translations/docs-zh/translated_content/zh_CN/docs
diff --git a/sdkdocs/dotnet b/sdkdocs/dotnet
deleted file mode 160000
index 241a646a203..00000000000
--- a/sdkdocs/dotnet
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 241a646a2037d4e91d3192dcbaf1f128b15de185
diff --git a/sdkdocs/dotnet/README.md b/sdkdocs/dotnet/README.md
new file mode 100644
index 00000000000..ce83c6b66d9
--- /dev/null
+++ b/sdkdocs/dotnet/README.md
@@ -0,0 +1,25 @@
+# Dapr .NET SDK documentation
+
+This page covers how the documentation is structured for the Dapr .NET SDK.
+
+## Dapr Docs
+
+All Dapr documentation is hosted at [docs.dapr.io](https://docs.dapr.io), including the docs for the [.NET SDK](https://docs.dapr.io/developing-applications/sdks/dotnet/). Head over there if you want to read the docs.
+
+### .NET SDK docs source
+
+Although the docs site code and content is in the [docs repo](https://github.com/dapr/docs), the .NET SDK content and images are within the `content` and `static` directories, respectively.
+
+This allows separation of roles and expertise between maintainers, and makes it easy to find the docs files you are looking for.
+
+## Writing .NET SDK docs
+
+To get up and running to write .NET SDK docs, visit the [docs repo](https://github.com/dapr/docs) to initialize your environment. It will clone both the docs repo and this repo, so you can make changes and see it rendered within the site instantly, as well as commit and PR into this repo.
+
+Make sure to read the [docs contributing guide](https://docs.dapr.io/contributing/contributing-docs/) for information on style/semantics/etc.
+
+## Docs architecture
+
+The docs site is built on [Hugo](https://gohugo.io), which lives in the docs repo. This repo is setup as a git submodule so that when the repo is cloned and initialized, the dotnet-sdk repo, along with the docs, are cloned as well.
+
+Then, in the Hugo configuration file, the `daprdocs/content` and `daprdocs/static` directories are redirected to the `daprdocs/developing-applications/sdks/dotnet` and `static/dotnet` directories, respectively. Thus, all the content within this repo is folded into the main docs site.
\ No newline at end of file
diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-contributing/dotnet-contributing.md b/sdkdocs/dotnet/content/en/dotnet-sdk-contributing/dotnet-contributing.md
new file mode 100644
index 00000000000..8f8837fcd9c
--- /dev/null
+++ b/sdkdocs/dotnet/content/en/dotnet-sdk-contributing/dotnet-contributing.md
@@ -0,0 +1,108 @@
+---
+type: docs
+title: "Contributing to the .NET SDK"
+linkTitle: ".NET SDK"
+weight: 3000
+description: Guidelines for contributing to the Dapr .NET SDK
+---
+
+# Welcome!
+If you're reading this, you're likely interested in contributing to Dapr and/or the Dapr .NET SDK. Welcome to the project
+and thank you for your interest in contributing!
+
+Please review the documentation, familiarize yourself with what Dapr is and what it's seeking to accomplish and reach
+out on [Discord](https://bit.ly/dapr-discord). Let us know how you'd like to contribute and we'd be happy to chime in
+with ideas and suggestions.
+
+There are many ways to contribute to Dapr:
+- Submit bug reports for the [Dapr runtime](https://github.com/dapr/dapr/issues/new/choose) or the [Dapr .NET SDK](https://github.com/dapr/dotnet-sdk/issues/new/choose)
+- Propose new [runtime capabilities](https://github.com/dapr/proposals/issues/new/choose) or [SDK functionality](https://github.com/dapr/dotnet-sdk/issues/new/choose)
+- Improve the documentation in either the [larger Dapr project](https://github.com/dapr/docs) or the [Dapr .NET SDK specifically](https://github.com/dapr/dotnet-sdk/tree/master/daprdocs)
+- Add new or improve existing [components](https://github.com/dapr/components-contrib/) that implement the various building blocks
+- Augment the [.NET pluggable component SDK capabilities](https://github.com/dapr-sandbox/components-dotnet-sdk)
+- Improve the Dapr .NET SDK code base and/or fix a bug (detailed below)
+
+If you're new to the code base, please feel encouraged to ask in the #dotnet-sdk channel in Discord about how
+to implement changes or generally ask questions. You are not required to seek permission to work on anything, but do
+note that if an issue is assigned to someone, it's an indication that someone might have already started work on it.
+Especially if it's been a while since the last activity on that issue, please feel free to reach out and see if it's
+still something they're interested in pursuing or whether you can take over, and open a pull request with your
+implementation.
+
+If you'd like to assign yourself to an issue, respond to the conversation with "/assign" and the bot will assign you
+to it.
+
+We have labeled some issues as `good-first-issue` or `help wanted` indicating that these are likely to be small,
+self-contained changes.
+
+If you're not certain about your implementation, please create it as a draft pull request and solicit feedback
+from the [.NET maintainers](https://github.com/orgs/dapr/teams/maintainers-dotnet-sdk) by tagging
+`@dapr/maintainers-dotnet-sdk` and providing some context about what you need assistance with.
+
+# Contribution Rules and Best Practices
+
+When contributing to the [.NET SDK](https://github.com/dapr/dotnet-sdk) the following rules and best-practices should
+be followed.
+
+## Pull Requests
+Pull requests that contain only formatting changes are generally discouraged. Pull requests should instead seek to
+fix a bug, add new functionality, or improve on existing capabilities.
+
+Do aim to minimize the contents of your pull request to span only a single issue. Broad PRs that touch on a lot of files
+are not likely to be reviewed or accepted in a short timeframe. Accommodating many different issues in a single PR makes
+it hard to determine whether your code fully addresses the underlying issue(s) or not and complicates the code review.
+
+## Tests
+All pull requests should include unit and/or integration tests that reflect the nature of what was added or changed
+so it's clear that the functionality works as intended. Avoid using auto-generated tests that duplicate testing the
+same functionality several times. Rather, seek to improve code coverage by validating each possible path of your
+changes so future contributors can more easily navigate the contours of your logic and more readily identify limitations.
+
+## Examples
+
+The `examples` directory contains code samples for users to run to try out specific functionality of the various
+Dapr .NET SDK packages and extensions. When writing new and updated samples keep in mind:
+
+- All examples should be runnable on Windows, Linux, and MacOS. While .NET Core code is consistent among operating
+systems, any pre/post example commands should provide options through
+[tabpane]({{% ref "contributing-docs.md#tabbed-content" %}})
+- Contain steps to download/install any required pre-requisites. Someone coming in with a fresh OS install should be
+able to start on the example and complete it without an error. Links to external download pages are fine.
+
+## Documentation
+
+The `daprdocs` directory contains the markdown files that are rendered into the [Dapr Docs](https://docs.dapr.io) website. When the
+documentation website is built this repo is cloned and configured so that its contents are rendered with the docs
+content. When writing docs keep in mind:
+
+ - All rules in the [docs guide]({{% ref contributing-docs.md %}}) should be followed in addition to these.
+ - All files and directories should be prefixed with `dotnet-` to ensure all file/directory names are globally
+ - unique across all Dapr documentation.
+
+All pull requests should strive to include both XML documentation in the code clearly indicating what functionality
+does and why it's there as well as changes to the published documentation to clarify for other developers how your change
+improves the Dapr framework.
+
+## GitHub Dapr Bot Commands
+
+Checkout the [daprbot documentation](https://docs.dapr.io/contributing/daprbot/) for Github commands you can run in this repo for common tasks. For example,
+you can comment `/assign` on an issue to assign it to yourself.
+
+## Commit Sign-offs
+All code submitted to the Dapr .NET SDK must be signed off by the developer authoring it. This means that every
+commit must end with the following:
+> Signed-off-by: First Last
+
+The name and email address must match the registered GitHub name and email address of the user committing the changes.
+We use a bot to detect this in pull requests and we will be unable to merge the PR if this check fails to validate.
+
+If you notice that a PR has failed to validate because of a failed DCO check early on in the PR history, please consider
+squashing the PR locally and resubmitting to ensure that the sign-off statement is included in the commit history.
+
+# Languages, Tools and Processes
+All source code in the Dapr .NET SDK is written in C# and targets the latest language version available to the earliest
+supported .NET SDK. As of v1.16, this means that both .NET 8 and .NET 9 are supported. The latest language version available
+is [C# version 12](https://learn.microsoft.com/dotnet/csharp/whats-new/csharp-version-history#c-version-12)
+
+Contributors are welcome to use whatever IDE they're most comfortable developing in, but please do not submit
+IDE-specific preference files along with your contributions as these will be rejected.
\ No newline at end of file
diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/_index.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/_index.md
new file mode 100644
index 00000000000..e92be0f411f
--- /dev/null
+++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/_index.md
@@ -0,0 +1,79 @@
+---
+type: docs
+title: "Dapr .NET SDK"
+linkTitle: ".NET"
+weight: 1000
+description: .NET SDK packages for developing Dapr applications
+no_list: true
+cascade:
+ github_repo: https://github.com/dapr/dotnet-sdk
+ github_subdir: daprdocs/content/en/dotnet-sdk-docs
+ path_base_for_github_subdir: content/en/developing-applications/sdks/dotnet/
+ github_branch: master
+---
+
+Dapr offers a variety of packages to help with the development of .NET applications. Using them you can create .NET clients, servers, and virtual actors with Dapr.
+
+## Prerequisites
+- [Dapr CLI]({{< ref install-dapr-cli.md >}}) installed
+- Initialized [Dapr environment]({{< ref install-dapr-selfhost.md >}})
+- [.NET 8](https://dotnet.microsoft.com/download) or [.NET 9](https://dotnet.microsoft.com/download) installed
+
+## Installation
+
+To get started with the Client .NET SDK, install the Dapr .NET SDK package:
+
+```sh
+dotnet add package Dapr.Client
+```
+
+## Try it out
+
+Put the Dapr .NET SDK to the test. Walk through the .NET quickstarts and tutorials to see Dapr in action:
+
+| SDK samples | Description |
+| ----------- | ----------- |
+| [Quickstarts]({{% ref quickstarts %}}) | Experience Dapr's API building blocks in just a few minutes using the .NET SDK. |
+| [SDK samples](https://github.com/dapr/dotnet-sdk/tree/master/examples) | Clone the SDK repo to try out some examples and get started. |
+| [Pub/sub tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/pub-sub) | See how Dapr .NET SDK works alongside other Dapr SDKs to enable pub/sub applications. |
+
+## Available packages
+
+| Package Name | Documentation Link | Description |
+|-----------------------------------------------------------------------------------------------------------|---------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------|
+| [Dapr.Client](https://www.nuget.org/packages/Dapr.Client) | [Documentation]({{% ref dotnet-client %}}) | Create .NET clients that interact with a Dapr sidecar and other Dapr applications. |
+| [Dapr.AI](https://www.nuget.org/packages/Dapr.AI) | [Documentation]({{% ref dotnet-ai %}}) | Create and manage AI operations in .NET. |
+| [Dapr.AI.A2a](https://www.nuget.org/packages/Dapr.AI.A2a) | | Dapr SDK for implementing agent-to-agent operations using the [A2A](https://github.com/a2aproject/a2a-dotnet) framework. |
+| [Dapr.AI.Microsoft.Extensions](https://www.nuget.org/packages/Dapr.AI.Microsoft.Extensions) | [Documentation]({{% ref dotnet-ai-extensions-howto %}}) | Easily interact with LLMs conversationally and using tooling via the Dapr Conversation building block. |
+| [Dapr.AspNetCore](https://www.nuget.org/packages/Dapr.AspNetCore) | [Documentation]({{% ref dotnet-client %}}) | Write servers and services in .NET using the Dapr SDK. Includes support and utilities providing richer integration with ASP.NET Core. |
+| [Dapr.Actors](https://www.nuget.org/packages/Dapr.Actors) | [Documentation]({{% ref dotnet-actors %}}) | Create virtual actors with state, reminders/timers, and methods. |
+| [Dapr.Actors.AspNetCore](https://www.nuget.org/packages/Dapr.Actors) | [Documentation]({{% ref dotnet-actors %}}) | Create virtual actors with state, reminders/timers, and methods with rich integration with ASP.NET Core. |
+| [Dapr.Actors.Analyzers](https://www.nuget.org/packages/Dapr.Actors.Analyzers) | [Documentation]({{% ref dotnet-guidance-source-generators %}}) | A collection of Roslyn source generators and analyzers for enabling better practices and preventing common errors when using Dapr Actors in .NET. |
+| [Dapr.Cryptography](https://www.nuget.org/packages/Dapr.Cryptography) | [Documentation]({{% ref dotnet-cryptography %}}) | Encrypt and decrypt streaming state of any size using Dapr's cryptography building block. |
+| [Dapr.Jobs](https://www.nuget.org/packages/Dapr.Jobs) | [Documentation]({{% ref dotnet-jobs %}}) | Create and manage the scheduling and orchestration of jobs. |
+| [Dapr.Jobs.Analyzers](https://www.nuget.org/packages/Dapr.Jobs.Analyzers) | [Documentation]({{% ref dotnet-guidance-source-generators %}}) | A collection of Roslyn source generators and analyzers for enabling better practices and preventing common errors when using Dapr Jobs in .NET. |
+| [Dapr.DistributedLocks](https://www.nuget.org/packages/Dapr.DistributedLocks) | [Documentation]({{% ref dotnet-distributed-lock %}}) | Create and manage distributed locks for managing exclusive resource access. |
+| [Dapr.Extensions.Configuration](https://www.nuget.org/packages/Dapr.Extensions.Configuration) | | Dapr secret store configuration provider implementation for `Microsoft.Extensions.Configuration`. |
+| [Dapr.PluggableComponents](https://www.nuget.org/packages/Dapr.PluggableComponents) | | Used to implement pluggable components with Dapr using .NET. |
+| [Dapr.PluggableComponents.AspNetCore](https://www.nuget.org/packages/Dapr.PluggableComponents.AspNetCore) | | Implement pluggable components with Dapr using .NET with rich ASP.NET Core support. |
+| [Dapr.PluggableComponents.Protos](https://www.nuget.org/packages/Dapr.PluggableComponents.Protos) | | **Note:** Developers needn't install this package directly in their applications. |
+| [Dapr.Messaging](https://www.nuget.org/packages/Dapr.Messaging) | [Documentation]({{% ref dotnet-messaging %}}) | Build distributed applications using the Dapr Messaging SDK that utilize messaging components like streaming pub/sub subscriptions. |
+| [Dapr.Workflow](https://www.nuget.org/packages/Dapr.Workflow) | [Documentation]({{% ref dotnet-workflow %}}) | Create and manage workflows that work with other Dapr APIs. |
+| [Dapr.Workflow.Analyzers](https://www.nuget.org/packages/Dapr.Workflow.Analyzers) | [Documentation]({{% ref dotnet-guidance-source-generators %}}) | A collection of Roslyn source generators and analyzers for enabling better practices and preventing common errors when using Dapr Workflows in .NET |
+
+## More information
+
+Learn more about local development options, best practices, or browse NuGet packages to add to your existing .NET
+applications.
+
+{{% cardpane %}}
+{{% card title="**Development**"%}}
+ [Learn about local development integration options]({{% ref dotnet-integrations %}})
+{{% /card %}}
+{{% card title="**Best Practices**"%}}
+ [Learn about best practices for developing .NET Dapr applications]({{% ref dotnet-guidance %}})
+{{% /card %}}
+{{% card title="**Nuget Packages**"%}}
+ [NuGet packages for adding the Dapr to your .NET applications](https://www.nuget.org/profiles/dapr.io)
+{{% /card %}}
+{{% /cardpane %}}
diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/_index.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/_index.md
new file mode 100644
index 00000000000..9bab18eddf5
--- /dev/null
+++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/_index.md
@@ -0,0 +1,11 @@
+---
+type: docs
+title: "Dapr actors .NET SDK"
+linkTitle: "Actors"
+weight: 40000
+description: Get up and running with the Dapr actors .NET SDK
+---
+
+With the Dapr actor package, you can interact with Dapr virtual actors from a .NET application.
+
+To get started, walk through the [Dapr actors]({{% ref dotnet-actors-howto.md %}}) how-to guide.
\ No newline at end of file
diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-client.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-client.md
new file mode 100644
index 00000000000..6431e4a493e
--- /dev/null
+++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-client.md
@@ -0,0 +1,114 @@
+---
+type: docs
+title: "The IActorProxyFactory interface"
+linkTitle: "Actors client"
+weight: 100000
+description: Learn how to create actor clients with the IActorProxyFactory interface
+---
+
+Inside of an `Actor` class or an ASP.NET Core project, the `IActorProxyFactory` interface is recommended to create actor clients.
+
+The `AddActors(...)` method will register actor services with ASP.NET Core dependency injection.
+
+- **Outside of an actor instance:** The `IActorProxyFactory` instance is available through dependency injection as a singleton service.
+- **Inside an actor instance:** The `IActorProxyFactory` instance is available as a property (`this.ProxyFactory`).
+
+The following is an example of creating a proxy inside an actor:
+
+```csharp
+public Task GetDataAsync()
+{
+ var proxy = this.ProxyFactory.CreateActorProxy(ActorId.CreateRandom(), "OtherActor");
+ await proxy.DoSomethingGreat();
+
+ return this.StateManager.GetStateAsync("my_data");
+}
+```
+
+In this guide, you will learn how to use `IActorProxyFactory`.
+
+{{% alert title="Tip" color="primary" %}}
+For a non-dependency-injected application, you can use the static methods on `ActorProxy`. Since the `ActorProxy` methods are error prone, try to avoid using them when configuring custom settings.
+{{% /alert %}}
+
+## Identifying an actor
+
+All of the APIs on `IActorProxyFactory` will require an actor _type_ and actor _id_ to communicate with an actor. For strongly-typed clients, you also need one of its interfaces.
+
+- **Actor type** uniquely identifies the actor implementation across the whole application.
+- **Actor id** uniquely identifies an instance of that type.
+
+If you don't have an actor `id` and want to communicate with a new instance, create a random id with `ActorId.CreateRandom()`. Since the random id is a cryptographically strong identifier, the runtime will create a new actor instance when you interact with it.
+
+You can use the type `ActorReference` to exchange an actor type and actor id with other actors as part of messages.
+
+## Two styles of actor client
+
+The actor client supports two different styles of invocation:
+
+| Actor client style | Description |
+| ------------------ | ----------- |
+| Strongly-typed | Strongly-typed clients are based on .NET interfaces and provide the typical benefits of strong-typing. They don't work with non-.NET actors. |
+| Weakly-typed | Weakly-typed clients use the `ActorProxy` class. It is recommended to use these only when required for interop or other advanced reasons. |
+
+### Using a strongly-typed client
+
+The following example uses the `CreateActorProxy<>` method to create a strongly-typed client. `CreateActorProxy<>` requires an actor interface type, and will return an instance of that interface.
+
+```csharp
+// Create a proxy for IOtherActor to type OtherActor with a random id
+var proxy = this.ProxyFactory.CreateActorProxy(ActorId.CreateRandom(), "OtherActor");
+
+// Invoke a method defined by the interface to invoke the actor
+//
+// proxy is an implementation of IOtherActor so we can invoke its methods directly
+await proxy.DoSomethingGreat();
+```
+
+### Using a weakly-typed client
+
+The following example uses the `Create` method to create a weakly-typed client. `Create` returns an instance of `ActorProxy`.
+
+```csharp
+// Create a proxy for type OtherActor with a random id
+var proxy = this.ProxyFactory.Create(ActorId.CreateRandom(), "OtherActor");
+
+// Invoke a method by name to invoke the actor
+//
+// proxy is an instance of ActorProxy.
+await proxy.InvokeMethodAsync("DoSomethingGreat");
+```
+
+Since `ActorProxy` is a weakly-typed proxy, you need to pass in the actor method name as a string.
+
+You can also use `ActorProxy` to invoke methods with both a request and a response message. Request and response messages will be serialized using the `System.Text.Json` serializer.
+
+```csharp
+// Create a proxy for type OtherActor with a random id
+var proxy = this.ProxyFactory.Create(ActorId.CreateRandom(), "OtherActor");
+
+// Invoke a method on the proxy to invoke the actor
+//
+// proxy is an instance of ActorProxy.
+var request = new MyRequest() { Message = "Hi, it's me.", };
+var response = await proxy.InvokeMethodAsync("DoSomethingGreat", request);
+```
+
+When using a weakly-typed proxy, you _must_ proactively define the correct actor method names and message types. When using a strongly-typed proxy, these names and types are defined for you as part of the interface definition.
+
+### Actor method invocation exception details
+
+The actor method invocation exception details are surfaced to the caller and the callee, providing an entry point to track down the issue. Exception details include:
+ - Method name
+ - Line number
+ - Exception type
+ - UUID
+
+You use the UUID to match the exception on the caller and callee side. Below is an example of exception details:
+```
+Dapr.Actors.ActorMethodInvocationException: Remote Actor Method Exception, DETAILS: Exception: NotImplementedException, Method Name: ExceptionExample, Line Number: 14, Exception uuid: d291a006-84d5-42c4-b39e-d6300e9ac38b
+```
+
+## Next steps
+
+[Learn how to author and run actors with `ActorHost`]({{% ref dotnet-actors-usage.md %}}).
\ No newline at end of file
diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-howto.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-howto.md
new file mode 100644
index 00000000000..bac91f7955f
--- /dev/null
+++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-howto.md
@@ -0,0 +1,467 @@
+---
+type: docs
+title: "How to: Run and use virtual actors in the .NET SDK"
+linkTitle: "How to: Run & use virtual actors"
+weight: 300000
+description: Try out .NET Dapr virtual actors with this example
+---
+
+The Dapr actor package allows you to interact with Dapr virtual actors from a .NET application. In this guide, you learn how to:
+
+- Create an Actor (`MyActor`).
+- Invoke its methods on the client application.
+
+```
+MyActor --- MyActor.Interfaces
+ |
+ +- MyActorService
+ |
+ +- MyActorClient
+```
+
+**The interface project (\MyActor\MyActor.Interfaces)**
+
+This project contains the interface definition for the actor. Actor interfaces can be defined in any project with any name. The interface defines the actor contract shared by:
+
+- The actor implementation
+- The clients calling the actor
+
+Because client projects may depend on it, it's better to define it in an assembly separate from the actor implementation.
+
+**The actor service project (\MyActor\MyActorService)**
+
+This project implements the ASP.Net Core web service that hosts the actor. It contains the implementation of the actor, `MyActor.cs`. An actor implementation is a class that:
+
+- Derives from the base type Actor
+- Implements the interfaces defined in the `MyActor.Interfaces` project.
+
+An actor class must also implement a constructor that accepts an `ActorService` instance and an `ActorId`, and passes them to the base Actor class.
+
+**The actor client project (\MyActor\MyActorClient)**
+
+This project contains the implementation of the actor client which calls MyActor's method defined in Actor Interfaces.
+
+## Prerequisites
+- [Dapr CLI]({{< ref install-dapr-cli.md >}}) installed.
+- Initialized [Dapr environment]({{< ref install-dapr-selfhost.md >}}).
+- [.NET 8](https://dotnet.microsoft.com/download) or [.NET 9](https://dotnet.microsoft.com/download) installed
+
+## Step 0: Prepare
+
+Since we'll be creating 3 projects, choose an empty directory to start from, and open it in your terminal of choice.
+
+## Step 1: Create actor interfaces
+
+Actor interface defines the actor contract that is shared by the actor implementation and the clients calling the actor.
+
+Actor interface is defined with the below requirements:
+
+- Actor interface must inherit `Dapr.Actors.IActor` interface
+- The return type of Actor method must be `Task` or `Task