diff --git a/.github/scripts/algolia.py b/.github/scripts/algolia.py deleted file mode 100644 index 5071ea58006..00000000000 --- a/.github/scripts/algolia.py +++ /dev/null @@ -1,118 +0,0 @@ -import os -from re import S -import sys -import json -from bs4 import BeautifulSoup -from algoliasearch.search_client import SearchClient - -url = "docs.dapr.io" -if len(sys.argv) > 1: - starting_directory = os.path.join(os.getcwd(), str(sys.argv[1])) -else: - starting_directory = os.getcwd() - -ALGOLIA_APP_ID = os.getenv('ALGOLIA_APP_ID') -ALGOLIA_API_KEY = os.getenv('ALGOLIA_API_WRITE_KEY') -ALGOLIA_INDEX_NAME = os.getenv('ALGOLIA_INDEX_NAME') - -client = SearchClient.create(ALGOLIA_APP_ID, ALGOLIA_API_KEY) -index = client.init_index(ALGOLIA_INDEX_NAME) - -excluded_files = [ - "404.html", -] - -exluded_directories = [ - "zh-hans", -] - -rankings = { - "Getting started": 0, - "Concepts": 100, - "Developing applications": 200, - "Operations": 300, - "Reference": 400, - "Contributing": 500, - "Home": 600 -} - -def scan_directory(directory: str, pages: list): - if os.path.basename(directory) in exluded_directories: - print(f'Skipping directory: {directory}') - return - for file in os.listdir(directory): - path = os.path.join(directory, file) - if os.path.isfile(path): - if file.endswith(".html") and file not in excluded_files: - if '' not in open(path, encoding="utf8").read(): - print(f'Indexing: {path}') - pages.append(path) - else: - print(f'Skipping hidden page: {path}') - else: - scan_directory(path, pages) - -def parse_file(path: str): - data = {} - data["hierarchy"] = {} - data["rank"] = 999 - data["subrank"] = 99 - data["type"] = "lvl2" - data["lvl0"] = "" - data["lvl1"] = "" - data["lvl2"] = "" - data["lvl3"] = "" - text = "" - subrank = 0 - with open(path, "r", errors='ignore') as file: - content = file.read() - soup = BeautifulSoup(content, "html.parser") - for meta in soup.find_all("meta"): - if meta.get("name") == "description": - data["lvl2"] = meta.get("content") - data["hierarchy"]["lvl1"] = meta.get("content") - elif meta.get("property") == "og:title": - data["lvl0"] = meta.get("content") - data["hierarchy"]["lvl0"] = meta.get("content") - data["hierarchy"]["lvl2"] = meta.get("content") - elif meta.get("property") == "og:url": - data["url"] = meta.get("content") - data["path"] = meta.get("content").split(url)[1] - data["objectID"] = meta.get("content").split(url)[1] - breadcrumbs = soup.find_all("li", class_="breadcrumb-item") - try: - subrank = len(breadcrumbs) - data["subrank"] = subrank - except: - subrank = 99 - data["subrank"] = 99 - for bc in breadcrumbs: - section = bc.text.strip() - data["lvl1"] = section - data["hierarchy"]["lvl0"] = section - try: - data["rank"] = rankings[section] + subrank - except: - print(f"Rank not found for section {section}") - data["rank"] = 998 - break - for p in soup.find_all("p"): - if p.text != "": - text = text + p.text - data["text"] = text - return data - -def index_payload(payload): - res = index.replace_all_objects(payload) - res.wait() - - -if __name__ == "__main__": - pages = [] - payload = [] - scan_directory(starting_directory, pages) - for page in pages: - data = parse_file(page) - if "objectID" in data: - payload.append(data) - index_payload(payload) diff --git a/.github/workflows/dapr-maintainer-merge.yml b/.github/workflows/dapr-maintainer-merge.yml new file mode 100644 index 00000000000..e6cfe77d66e --- /dev/null +++ b/.github/workflows/dapr-maintainer-merge.yml @@ -0,0 +1,196 @@ + +name: Auto-approve & merge SDK docs PRs (per directory/team) + +# Run on PRs (including forks) but act with repo-level permissions. +# We DO NOT check out PR code; we only read PR metadata via the API. +on: + pull_request_target: + types: [opened, synchronize, reopened, ready_for_review, edited] + +# Token scopes needed: label, review, and merge. +permissions: + contents: write # required to merge + pull-requests: write # required to approve/merge + issues: write # required to create/add labels + +jobs: + sdk-docs-automerge: + runs-on: ubuntu-latest + # Ignore drafts + if: ${{ github.event.pull_request.draft == false }} + + steps: + - name: Evaluate PR for SDK docs eligibility & add label + id: check + uses: actions/github-script@v7 + with: + # Optional: override org/merge method/colors via env + # env: + # ORG: dapr + # MERGE_METHOD: squash + # LABEL_COLOR_DEFAULT: '6A9286' + script: | + const { owner, repo } = context.repo; + const pr = context.payload.pull_request; + const number = pr.number; + + // --- Mapping: directory prefixes -> team slug + label --- + // Each entry can have multiple prefixes for the same SDK if needed. + const MAPPINGS = [ + { label: 'automerge: dotnet', teamSlug: 'maintainers-dotnet-sdk', prefixes: ['sdkdocs/dotnet/'] }, + { label: 'automerge: go', teamSlug: 'maintainers-go-sdk', prefixes: ['sdkdocs/go/'] }, + { label: 'automerge: java', teamSlug: 'maintainers-java-sdk', prefixes: ['sdkdocs/java/content/en/'] }, + { label: 'automerge: js', teamSlug: 'maintainers-js-sdk', prefixes: ['sdkdocs/js/'] }, + { label: 'automerge: php', teamSlug: 'maintainers-php-sdk', prefixes: ['sdkdocs/php/'] }, + { label: 'automerge: python', teamSlug: 'maintainers-python-sdk', prefixes: ['sdkdocs/python/'] }, + { label: 'automerge: rust', teamSlug: 'maintainers-rust-sdk', prefixes: ['sdkdocs/rust/content/en/'] }, + ]; + + const org = owner; + const defaultLabelColor = '6A9286'; + const username = pr.user.login; + + // 1) List changed files + const files = await github.paginate( + github.rest.pulls.listFiles, + { owner, repo, pull_number: number, per_page: 100 } + ); + + if (files.length === 0) { + core.info('No files changed in PR; skipping.'); + core.setOutput('eligible', 'false'); + return; + } + + // 2) Determine which single SDK mapping the PR targets + // - All files must match ONE mapping's prefixes + // - If files touch multiple mappings or outside any mapping, skip + + let currentMapping = null; // holds the mapping object we've locked onto + let ineligible = false; + + for (const f of files) { + const path = f.filename; + + // find the first mapping whose prefixes match this file + let matched = null; + for (const m of MAPPINGS) { + if (m.prefixes.some(p => path.startsWith(p))) { + matched = m; + break; + } + } + + // if no mapping matched, we can stop: not eligible + if (!matched) { + ineligible = true; + break; + } + + // if we haven't locked onto a mapping yet, set it now + if (!currentMapping) { + currentMapping = matched; + } else if (currentMapping !== matched) { + // different SDK mapping from the one already selected => not eligible + ineligible = true; + break; + } + } + + if (ineligible || !currentMapping) { + core.info('PR is not eligible: outside mapped paths or touches multiple SDK directories.'); + core.setOutput('eligible', 'false'); + return; + } + + const mapping = currentMapping; + const labelName = mapping.label; + const teamSlug = mapping.teamSlug; + const lang = mapping.label.split(': ')[1] || 'sdk'; + + // 3) Verify author is active in the corresponding team + // teams.getMembershipForUserInOrg: GET /orgs/{org}/teams/{team_slug}/memberships/{username} + // Requires team visibility to the token. [3](https://docs.github.com/rest/teams/members) + try { + const membership = await github.rest.teams.getMembershipForUserInOrg({ + org, + team_slug: teamSlug, + username + }); + if (membership.data.state !== 'active') { + core.info(`User ${username} is not active in team ${teamSlug}.`); + core.setOutput('eligible', 'false'); + return; + } + } catch (err) { + if (err.status === 404) { + core.info(`User ${username} is not a member of team ${teamSlug}.`); + core.setOutput('eligible', 'false'); + return; + } + throw err; + } + + // 4) Ensure label exists; then add it to the PR + try { + await github.rest.issues.getLabel({ owner, repo, name: labelName }); + } catch (e) { + if (e.status === 404) { + await github.rest.issues.createLabel({ + owner, repo, name: labelName, color: defaultLabelColor, + description: 'Auto-merged language-specific SDK docs' + }); + } else { + throw e; + } + } + await github.rest.issues.addLabels({ + owner, repo, issue_number: number, labels: [labelName] + }); + + // 5) Expose mapping for next step + core.setOutput('eligible', 'true'); + core.setOutput('label', labelName); + core.setOutput('teamSlug', teamSlug); + core.setOutput('lang', lang); + + - name: Auto-approve & merge (only if eligible) + if: steps.check.outputs.eligible == 'true' + uses: actions/github-script@v7 + with: + script: | + const { owner, repo } = context.repo; + const number = context.payload.pull_request.number; + const lang = core.getInput('lang') || '${{ steps.check.outputs.lang }}'; + const mergeMethod = process.env.MERGE_METHOD || 'squash'; + + // 6) Auto-approve review + try { + await github.rest.pulls.createReview({ + owner, repo, pull_number: number, + event: 'APPROVE', + body: `Auto-approval: ${lang} SDK docs` + }); + } catch (e) { + core.warning(`Failed to create review: ${e.message}`); + } + + // 7) Poll until PR is mergeable (clean/unstable) + const wait = ms => new Promise(r => setTimeout(r, ms)); + let attempt = 0; + while (attempt < 12) { // up to ~60s + attempt++; + const pr = await github.rest.pulls.get({ owner, repo, pull_number: number }); + const state = pr.data.mergeable_state; + core.info(`mergeable=${pr.data.mergeable}, mergeable_state=${state}`); + if (pr.data.mergeable && (state === 'clean' || state === 'unstable')) break; + await wait(5000); + } + + // 8) Merge the PR + await github.rest.pulls.merge({ + owner, repo, pull_number: number, + merge_method: mergeMethod, + commit_title: `${lang}: ${context.payload.pull_request.title}`, + commit_message: `Auto-merged by SDK maintainer merge bot (${lang})` + }); diff --git a/.github/workflows/website-root.yml b/.github/workflows/website-root.yml index dc437b1ca92..10c09bc6b11 100644 --- a/.github/workflows/website-root.yml +++ b/.github/workflows/website-root.yml @@ -4,11 +4,11 @@ on: workflow_dispatch: push: branches: - - v1.15 + - v1.16 pull_request: types: [opened, synchronize, reopened, closed] branches: - - v1.15 + - v1.16 concurrency: # Cancel the previously triggered build for only PR build. @@ -50,23 +50,17 @@ jobs: if [ $GITHUB_EVENT_NAME == 'pull_request' ]; then STAGING_URL="https://${SWA_BASE}-${{github.event.number}}.westus2.azurestaticapps.net/" fi - hugo ${STAGING_URL+-b "$STAGING_URL"} + hugo ${STAGING_URL+-b "$STAGING_URL"} --minify - name: Deploy docs site uses: Azure/static-web-apps-deploy@v1 with: azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN_PROUD_BAY_0E9E0E81E }} repo_token: ${{ secrets.GITHUB_TOKEN }} action: "upload" - app_location: "/daprdocs/public/" + app_location: "/daprdocs/public" output_location: "/" skip_app_build: true skip_deploy_on_missing_secrets: true - - name: Upload Hugo artifacts - uses: actions/upload-artifact@v4 - with: - name: hugo_build - path: ./daprdocs/public/ - if-no-files-found: error close_staging_site: if: github.event_name == 'pull_request' && github.event.action == 'closed' @@ -80,29 +74,3 @@ jobs: azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN_PROUD_BAY_0E9E0E81E }} action: "close" skip_deploy_on_missing_secrets: true - - algolia_index: - name: Index site for Algolia - if: github.event_name == 'push' - needs: ['build_and_deploy_job'] - runs-on: ubuntu-latest - env: - ALGOLIA_APP_ID: ${{ secrets.ALGOLIA_APP_ID }} - ALGOLIA_API_WRITE_KEY: ${{ secrets.ALGOLIA_API_WRITE_KEY }} - ALGOLIA_INDEX_NAME: daprdocs - steps: - - name: Checkout docs repo - uses: actions/checkout@v4 - with: - submodules: false - - name: Download Hugo artifacts - uses: actions/download-artifact@v3 - with: - name: hugo_build - path: site/ - - name: Install Python packages - run: | - pip install --upgrade bs4 - pip install --upgrade 'algoliasearch>=2.0,<3.0' - - name: Index site - run: python ./.github/scripts/algolia.py ./site diff --git a/.gitmodules b/.gitmodules index 30857f4ef22..3b8c2987525 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,31 +1,4 @@ -[submodule "sdkdocs/python"] - path = sdkdocs/python - url = https://github.com/dapr/python-sdk.git -[submodule "sdkdocs/php"] - path = sdkdocs/php - url = https://github.com/dapr/php-sdk.git -[submodule "sdkdocs/dotnet"] - path = sdkdocs/dotnet - url = https://github.com/dapr/dotnet-sdk.git [submodule "translations/docs-zh"] path = translations/docs-zh url = https://github.com/dapr/docs-zh.git - branch = v1.0_content -[submodule "sdkdocs/go"] - path = sdkdocs/go - url = https://github.com/dapr/go-sdk.git -[submodule "sdkdocs/java"] - path = sdkdocs/java - url = https://github.com/dapr/java-sdk.git -[submodule "sdkdocs/js"] - path = sdkdocs/js - url = https://github.com/dapr/js-sdk.git -[submodule "sdkdocs/pluggable-components/dotnet"] - path = sdkdocs/pluggable-components/dotnet - url = https://github.com/dapr-sandbox/components-dotnet-sdk -[submodule "sdkdocs/pluggable-components/go"] - path = sdkdocs/pluggable-components/go - url = https://github.com/dapr-sandbox/components-go-sdk -[submodule "sdkdocs/rust"] - path = sdkdocs/rust - url = https://github.com/dapr/rust-sdk.git + branch = v1.0_content \ No newline at end of file diff --git a/README.md b/README.md index 3af7bb0833e..4fa213b7709 100644 --- a/README.md +++ b/README.md @@ -16,8 +16,8 @@ The following branches are currently maintained: | Branch | Website | Description | | ------------------------------------------------------------ | -------------------------- | ------------------------------------------------------------------------------------------------ | -| [v1.15](https://github.com/dapr/docs) (primary) | https://docs.dapr.io | Latest Dapr release documentation. Typo fixes, clarifications, and most documentation goes here. | -| [v1.16](https://github.com/dapr/docs/tree/v1.16) (pre-release) | https://v1-16.docs.dapr.io/ | Pre-release documentation. Doc updates that are only applicable to v1.15+ go here. | +| [v1.16](https://github.com/dapr/docs) (primary) | https://docs.dapr.io | Latest Dapr release documentation. Typo fixes, clarifications, and most documentation goes here. | +| [v1.17](https://github.com/dapr/docs/tree/v1.16) (pre-release) | https://v1-17.docs.dapr.io/ | Pre-release documentation. Doc updates that are only applicable to v1.16+ go here. | For more information visit the [Dapr branch structure](https://docs.dapr.io/contributing/docs-contrib/contributing-docs/#branch-guidance) document. @@ -68,22 +68,17 @@ Continue with the [Run local server](#run-local-server) steps. 1. Ensure pre-requisites are installed. 1. [Fork](https://github.com/dapr/docs/fork) and clone this repository. -1. Change to daprdocs directory: +1. Make sure you are in the root folder for the docs repo. You should find a hugo.yaml file in this directory. -```sh -cd ./daprdocs -``` - -4. Update submodules: +1. Update submodules: ```sh git submodule update --init --recursive ``` -1. Navigate back to the repository root and install npm packages: +1. Install the npm packages: ```sh -cd .. npm install ``` diff --git a/daprdocs/assets/scss/_content.scss b/daprdocs/assets/scss/_content.scss index 8b40a659f07..2f4d0b7b402 100644 --- a/daprdocs/assets/scss/_content.scss +++ b/daprdocs/assets/scss/_content.scss @@ -153,6 +153,10 @@ color: $secondary; } +.card-title > img { + margin-right: 10px; +} + .td-page-meta { a, a:visited { diff --git a/daprdocs/content/en/_index.md b/daprdocs/content/en/_index.md index f17e63ec71f..54ab8f9d4d1 100644 --- a/daprdocs/content/en/_index.md +++ b/daprdocs/content/en/_index.md @@ -4,17 +4,134 @@ no_list: true --- # Dapr Docs -Welcome to the Dapr documentation site! +## Dapr, the distributed application runtime +Dapr provides APIs for communication, state, workflow, and agentic AI. The APIs decouple the application code from the underlying infrastructure ensuring flexibility and portability. Dapr leverages industry best practices for security, resiliency, and observability, so you can focus on your code. -{{% alert title="What is Dapr?" color="primary" %}} -Dapr is a portable, event-driven runtime that makes it easy for any developer to build resilient, -stateless and stateful applications that run on the cloud and edge and embraces the diversity of -languages and developer frameworks. Leveraging the benefits of a sidecar architecture, Dapr helps -you tackle the challenges that come with building microservices and keeps your code platform agnostic. {{< button text="Get started" page="getting-started.md" >}} -{{% /alert %}} +### Use Cases + +
+
+
+
+ Dapr workflow + Workflow Orchestration
+

Orchestrate your microservices reliably with Dapr workflow.

+ +
+
+
+
+
+ Dapr Agents + Agentic AI
+

Create durable agentic AI applications with Dapr Agents.

+ +
+
+
+
+
+
+
+ Dapr service invocation API + Microservices
+

Build resilient microservices using the Dapr service invocation API.

+ +
+
+
+
+
+ Dapr pub/sub API + Event Driven Architecture
+

Create event-driven applications with the Dapr pub/sub API.

+ +
+
+
+ +### Languages + +
+
+
+
+ .NET logo + .NET +
+

+ Learn more about the .NET SDK. +

+ +
+
+
+
+
+ Python logo + Python +
+

+ Learn more about the Python SDK. +

+ +
+
+
+
+
+ JS logo + JavaScript +
+

+ Learn more about the JavaScript SDK. +

+ +
+
+
+ +
+
+
+
+ Java logo + Java +
+

+ Learn more about the Java SDK. +

+ +
+
+
+
+
+ Go logo + Go +
+

+ Learn more about the Go SDK. +

+ +
+
+
+
+
+ PHP logo + PHP +
+

+ Learn more about the PHP SDK. +

+ +
+
+
### Start developing with Dapr @@ -113,9 +230,8 @@ you tackle the challenges that come with building microservices and keeps your c -
-
+
Code icon @@ -128,81 +244,17 @@ you tackle the challenges that come with building microservices and keeps your c
-
- .NET logo - .NET + Code icon + Dapr University

- Learn more about the .NET SDK. + Learn Dapr through a series of free hands-on courses in a cloud-based sandbox environment.

- -
-
-
-
-
- Python logo - Python -
-

- Learn more about the Python SDK. -

- -
-
-
-
-
- JS logo - JavaScript -
-

- Learn more about the JavaScript SDK. -

- +
-
-
-
-
-
- Java logo - Java -
-

- Learn more about the Java SDK. -

- -
-
-
-
-
- Go logo - Go -
-

- Learn more about the Go SDK. -

- -
-
-
-
-
- PHP logo - PHP -
-

- Learn more about the PHP SDK. -

- -
-
-
\ No newline at end of file diff --git a/daprdocs/content/en/concepts/building-blocks-concept.md b/daprdocs/content/en/concepts/building-blocks-concept.md index 23c444f0c93..08d94f5ccde 100644 --- a/daprdocs/content/en/concepts/building-blocks-concept.md +++ b/daprdocs/content/en/concepts/building-blocks-concept.md @@ -31,4 +31,4 @@ Dapr provides the following building blocks: | [**Distributed lock**]({{% ref "distributed-lock-api-overview" %}}) | `/v1.0-alpha1/lock` | The distributed lock API enables you to take a lock on a resource so that multiple instances of an application can access the resource without conflicts and provide consistency guarantees. | [**Cryptography**]({{% ref "cryptography-overview" %}}) | `/v1.0-alpha1/crypto` | The Cryptography API enables you to perform cryptographic operations, such as encrypting and decrypting messages, without exposing keys to your application. | [**Jobs**]({{% ref "jobs-overview" %}}) | `/v1.0-alpha1/jobs` | The Jobs API enables you to schedule and orchestrate jobs. Example scenarios include: -| [**Conversation**]({{% ref "conversation-overview" %}}) | `/v1.0-alpha1/conversation` | The Conversation API enables you to supply prompts to converse with different large language models (LLMs) and includes features such as prompt caching and personally identifiable information (PII) obfuscation. \ No newline at end of file +| [**Conversation**]({{% ref "conversation-overview" %}}) | `/v1.0-alpha2/conversation` | The Conversation API enables you to supply prompts to converse with different large language models (LLMs) and includes features such as prompt caching and personally identifiable information (PII) obfuscation. \ No newline at end of file diff --git a/daprdocs/content/en/concepts/dapr-services/scheduler.md b/daprdocs/content/en/concepts/dapr-services/scheduler.md index a143ee08b97..3a561862457 100644 --- a/daprdocs/content/en/concepts/dapr-services/scheduler.md +++ b/daprdocs/content/en/concepts/dapr-services/scheduler.md @@ -85,7 +85,7 @@ When running in HA mode, you only need to expose the ports for one scheduler ins version: "3.5" services: scheduler-0: - image: "docker.io/daprio/scheduler:1.16.0" + image: "docker.io/daprio/scheduler:{{% dapr-latest-version %}}" command: - "./scheduler" - "--etcd-data-dir=/var/run/dapr/scheduler" @@ -96,7 +96,7 @@ services: volumes: - ./dapr_scheduler/0:/var/run/dapr/scheduler scheduler-1: - image: "docker.io/daprio/scheduler:1.16.0" + image: "docker.io/daprio/scheduler:{{% dapr-latest-version %}}" command: - "./scheduler" - "--etcd-data-dir=/var/run/dapr/scheduler" @@ -105,7 +105,7 @@ services: volumes: - ./dapr_scheduler/1:/var/run/dapr/scheduler scheduler-2: - image: "docker.io/daprio/scheduler:1.16.0" + image: "docker.io/daprio/scheduler:{{% dapr-latest-version %}}" command: - "./scheduler" - "--etcd-data-dir=/var/run/dapr/scheduler" @@ -115,25 +115,93 @@ services: - ./dapr_scheduler/2:/var/run/dapr/scheduler ``` -## Back Up and Restore Scheduler Data +## Managing jobs with the Dapr CLI -In production environments, it's recommended to perform periodic backups of this data at an interval that aligns with your recovery point objectives. +Dapr provides a CLI for inspecting and managing all scheduled jobs, regardless of type. +The CLI is the recommended way to view, back up, and delete jobs. -### Port Forward for Backup Operations +There are several different types of jobs which Scheduler manages: -To perform backup and restore operations, you'll need to access the embedded etcd instance. This requires port forwarding to expose the etcd ports (port 2379). +- `app/{app-id}/{job-name}`: Jobs created via the [Jobs API]({{% ref jobs_api %}}) +- `actor/{actor-type}/{actor-id}/{reminder-name}`: Actor reminder jobs created via the [Actor Reminders API]({{% ref "actors-timers-reminders#actor-reminders" %}}) +- `activity/{app-id}/{instance-id}::{generation-name}::{activity-index}`: Used internally for [Workflow Activity reminders]({{% ref "workflow-features-concepts.md#workflow-activities" %}}) +- `workflow/{app-id}/{instance-id}/{random-name}`: Used internally for [Workflows]({{% ref "workflow-overview.md" %}}). -#### Kubernetes Example +Please see [here for how to manage specifically reminders]({{% ref "actors-timers-reminders#managing-reminders-with-the-cli" %}}) with the CLI. -Here's how to port forward and connect to the etcd instance: +### List jobs -```shell -kubectl port-forward svc/dapr-scheduler-server 2379:2379 -n dapr-system +```bash +dapr scheduler list +``` + +Example output: + +```bash +NAME BEGIN COUNT LAST TRIGGER +actor/myactortype/actorid1/test1 -3.89s 1 2025-10-03T16:58:55Z +actor/myactortype/actorid2/test2 -3.89s 1 2025-10-03T16:58:55Z +app/test-scheduler/test1 -3.89s 1 2025-10-03T16:58:55Z +app/test-scheduler/test2 -3.89s 1 2025-10-03T16:58:55Z +activity/test-scheduler/xyz1::0::1 -888.8ms 0 +activity/test-scheduler/xyz2::0::1 -888.8ms 0 +workflow/test-scheduler/abc1/timer-0-TVIQGkvu +50.0h 0 +workflow/test-scheduler/abc2/timer-0-OM2xqG9m +50.0h 0 +``` + +For more detail, use the wide output format: + +```bash +dapr scheduler list -o wide +``` + +```yaml +NAMESPACE NAME BEGIN EXPIRATION SCHEDULE DUE TIME TTL REPEATS COUNT LAST TRIGGER +default actor/myactortype/actorid1/test1 2025-10-03T16:58:55Z @every 2h46m40s 2025-10-03T17:58:55+01:00 100 1 2025-10-03T16:58:55Z +default actor/myactortype/actorid2/test2 2025-10-03T16:58:55Z @every 2h46m40s 2025-10-03T17:58:55+01:00 100 1 2025-10-03T16:58:55Z +default app/test-scheduler/test1 2025-10-03T16:58:55Z @every 100m 2025-10-03T17:58:55+01:00 1234 1 2025-10-03T16:58:55Z +default app/test-scheduler/test2 2025-10-03T16:58:55Z 2025-10-03T19:45:35Z @every 100m 2025-10-03T17:58:55+01:00 10000s 56788 1 2025-10-03T16:58:55Z +default activity/test-scheduler/xyz1::0::1 2025-10-03T16:58:58Z 0s 0 +default activity/test-scheduler/xyz2::0::1 2025-10-03T16:58:58Z 0s 0 +default workflow/test-scheduler/abc1/timer-0-TVIQGkvu 2025-10-05T18:58:58Z 2025-10-05T18:58:58Z 0 +default workflow/test-scheduler/abc2/timer-0-OM2xqG9m 2025-10-05T18:58:58Z 2025-10-05T18:58:58Z 0 +``` + +### Get job details + +```bash +dapr scheduler get app/my-app/job1 -o yaml ``` -### Performing Backup and Restore +### Delete jobs -Once you have access to the etcd ports, you can follow the [official etcd backup and restore documentation](https://etcd.io/docs/v3.5/op-guide/recovery/) to perform backup and restore operations. The process involves using standard etcd commands to create snapshots and restore from them. +Delete one or more specific jobs: + +```bash +dapr scheduler delete app/my-app/job1 actor/MyActor/123/reminder1 +``` + +Bulk delete jobs with filters: + +```bash +dapr scheduler delete-all all +dapr scheduler delete-all app/my-app +dapr scheduler delete-all actor/MyActorType +``` + +### Backup and restore jobs + +Export all jobs to a file: + +```bash +dapr scheduler export -o backup.bin +``` + +Re-import jobs from a backup file: + +```bash +dapr scheduler import -f backup.bin +``` ## Monitoring Scheduler's etcd Metrics @@ -155,7 +223,7 @@ For more information on running Dapr on Kubernetes, visit the [Kubernetes hostin A number of Etcd flags are exposed on Scheduler which can be used to tune for your deployment use case. -### External Etcd database +### External Etcd database Scheduler can be configured to use an external Etcd database instead of the embedded one inside the Scheduler service replicas. It may be interesting to decouple the storage volume from the Scheduler StatefulSet or container, because of how the cluster or environment is administered or what storage backend is being used. @@ -230,4 +298,5 @@ dapr_scheduler.etcdMaxSnapshots=10 ## Related links -[Learn more about the Jobs API.]({{% ref jobs_api %}}) +- [Learn more about the Jobs API.]({{% ref jobs_api %}}) +- [Learn more about Actor Reminders.]{{% ref "actors-features-concepts#reminders" %}}) diff --git a/daprdocs/content/en/concepts/overview.md b/daprdocs/content/en/concepts/overview.md index 273ab87e698..125b040c421 100644 --- a/daprdocs/content/en/concepts/overview.md +++ b/daprdocs/content/en/concepts/overview.md @@ -141,6 +141,14 @@ Dapr can be used from any developer framework. Here are some that have been inte | [JavaScript](https://github.com/dapr/js-sdk) | [Express](https://expressjs.com/) | Build Express applications with Dapr APIs | [PHP]({{% ref php %}}) | | You can serve with Apache, Nginx, or Caddyserver. +#### Dapr Agents + +![Dapr Agents Overview](/images/dapr-agents/concepts-agents-overview.png) + + +[Dapr Agents]({{% ref "../developing-ai/dapr-agents" %}}) is a Python framework for building intelligent, durable agents powered by LLMs. It provides agent-centric capabilities such as tool calling, memory management, [MCP support](https://modelcontextprotocol.io/) and agent orchestration, while leveraging Dapr for durability, observability, and security, at scale. + + #### Integrations and extensions Visit the [integrations]({{% ref integrations %}}) page to learn about some of the first-class support Dapr has for various frameworks and external products, including: diff --git a/daprdocs/content/en/concepts/security-concept.md b/daprdocs/content/en/concepts/security-concept.md index fa284cde420..2008c38a039 100644 --- a/daprdocs/content/en/concepts/security-concept.md +++ b/daprdocs/content/en/concepts/security-concept.md @@ -244,7 +244,7 @@ The audit was a holistic security audit with the following goals: - Formalize a threat model of Dapr - Perform manual code review -- Evaluate Daprs fuzzing suite against the formalized threat model +- Evaluate Dapr's fuzzing suite against the formalized threat model - Carry out a SLSA review of Dapr. You can find the full report [here](/docs/Dapr-september-2023-security-audit-report.pdf). diff --git a/daprdocs/content/en/concepts/terminology.md b/daprdocs/content/en/concepts/terminology.md index 1c74bf22366..46a4dffe2d6 100644 --- a/daprdocs/content/en/concepts/terminology.md +++ b/daprdocs/content/en/concepts/terminology.md @@ -10,15 +10,17 @@ This page details all of the common terms you may come across in the Dapr docs. | Term | Definition | More information | |:-----|------------|------------------| -| App/Application | A running service/binary, usually one that you as the user create and run. +| App/Application | A running service/binary, usually one that you as the user create and run. | Building block | An API that Dapr provides to users to help in the creation of microservices and applications. | [Dapr building blocks]({{% ref building-blocks-concept %}}) | Component | Modular types of functionality that are used either individually or with a collection of other components, by a Dapr building block. | [Dapr components]({{% ref components-concept %}}) | Configuration | A YAML file declaring all of the settings for Dapr sidecars or the Dapr control plane. This is where you can configure control plane mTLS settings, or the tracing and middleware settings for an application instance. | [Dapr configuration]({{% ref configuration-concept %}}) | Dapr | Distributed Application Runtime. | [Dapr overview]({{% ref overview %}}) +| Dapr Actors | A Dapr building block that implements the virtual actor pattern for building stateful, single-threaded objects with identity, lifecycle, and concurrency management. | [Actors overview]({{% ref actors-overview %}}) +| Dapr Agents | A developer framework built on top of Dapr Python SDK for creating durable agentic applications powered by LLMs. | [Dapr Agents]({{% ref "../developing-ai/dapr-agents" %}}) | Dapr control plane | A collection of services that are part of a Dapr installation on a hosting platform such as a Kubernetes cluster. This allows Dapr-enabled applications to run on the platform and handles Dapr capabilities such as actor placement, Dapr sidecar injection, or certificate issuance/rollover. | [Self-hosted overview]({{% ref self-hosted-overview %}})
[Kubernetes overview]({{% ref kubernetes-overview %}}) +| Dapr Workflows | A Dapr building block for authoring code-first workflows with durable execution that survive crashes, support long-running processes, and enable human-in-the-loop interactions. | [Workflow overview]({{% ref workflow-overview %}}) | HTTPEndpoint | HTTPEndpoint is a Dapr resource use to identify non-Dapr endpoints to invoke via the service invocation API. | [Service invocation API]({{% ref service_invocation_api %}}) | Namespacing | Namespacing in Dapr provides isolation, and thus provides multi-tenancy. | Learn more about namespacing [components]({{% ref component-scopes %}}), [service invocation]({{% ref service-invocation-namespaces %}}), [pub/sub]({{% ref pubsub-namespaces %}}), and [actors]({{% ref namespaced-actors %}}) | Self-hosted | Windows/macOS/Linux machine(s) where you can run your applications with Dapr. Dapr provides the capability to run on machines in "self-hosted" mode. | [Self-hosted mode]({{% ref self-hosted-overview %}}) -| Service | A running application or binary. This can refer to your application or to a Dapr application. -| Sidecar | A program that runs alongside your application as a separate process or container. | [Sidecar pattern](https://docs.microsoft.com/azure/architecture/patterns/sidecar) - +| Service | A running application or binary. This can refer to your application or to a Dapr application. +| Sidecar | A program that runs alongside your application as a separate process or container. | [Sidecar pattern](https://docs.microsoft.com/azure/architecture/patterns/sidecar) diff --git a/daprdocs/content/en/contributing/docs-contrib/maintainer-guide.md b/daprdocs/content/en/contributing/docs-contrib/maintainer-guide.md index 1ebdfb88172..7c72665b2f0 100644 --- a/daprdocs/content/en/contributing/docs-contrib/maintainer-guide.md +++ b/daprdocs/content/en/contributing/docs-contrib/maintainer-guide.md @@ -103,24 +103,22 @@ These steps will prepare the latest release branch for archival. git checkout -b release_v1.0 ``` -1. In VS Code, navigate to `/daprdocs/config.toml`. -1. Add the following TOML to the `# Versioning` section (around line 154): - - ```toml - version_menu = "v1.0" - version = "v1.0" - archived_version = true - url_latest_version = "https://docs.dapr.io" - - [[params.versions]] - version = "v1.2 (preview)" - url = "v1-2.docs.dapr.io" - [[params.versions]] - version = "v1.1 (latest)" - url = "#" - [[params.versions]] - version = "v1.0" - url = "https://v1-0.docs.dapr.io" +1. In VS Code, navigate to `hugo.yaml` located in the root. +1. Add the following configuration to the `# Versioning` section (around line 121 and onwards): + + ```yaml + version_menu: "v1.0" + version: "v1.0" + archived_version: true + url_latest_version: "https://docs.dapr.io" + + versions: + - version: v1.2 (preview) + url: https://v1-2.docs.dapr.io + - version: v1.1 (latest) + url: "#" + - version: v1.0 + url: https://v1-0.docs.dapr.io ``` 1. Delete `.github/workflows/website-root.yml`. @@ -146,26 +144,25 @@ These steps will prepare the upcoming release branch for promotion to latest rel git checkout -b release_v1.1 ``` -1. In VS Code, navigate to `/daprdocs/config.toml`. -1. Update line 1 to `baseURL - https://docs.dapr.io/`. -1. Update the `# Versioning` section (around line 154) to display the correct versions and tags: +1. In VS Code, navigate to `hugo.yaml` located in the root. +1. Update line 1 to `baseURL: https://docs.dapr.io/`. +1. Update the `# Versioning` section (around line 121 and onwards) to display the correct versions and tags: - ```toml + ```yaml # Versioning - version_menu = "v1.1 (latest)" - version = "v1.1" - archived_version = false - url_latest_version = "https://docs.dapr.io" - - [[params.versions]] - version = "v1.2 (preview)" - url = "v1-2.docs.dapr.io" - [[params.versions]] - version = "v1.1 (latest)" - url = "#" - [[params.versions]] - version = "v1.0" - url = "https://v1-0.docs.dapr.io" + version_menu: "v1.1 (latest)" + version: "v1.1" + archived_version: false + url_latest_version: https://docs.dapr.io + github_branch: v1.1 + + versions: + - version: v1.2 (preview) + url: https://v1-2.docs.dapr.io + - version: v1.1 (latest) + url: "#" + - version: v1.0 + url: https://v1-0.docs.dapr.io ``` 1. Navigate to `.github/workflows/website-root.yml`. @@ -194,6 +191,7 @@ These steps will prepare the upcoming release branch for promotion to latest rel | [v1.2](https://github.com/dapr/docs/tree/v1.2) (pre-release) | https://v1-2.docs.dapr.io/ | Pre-release documentation. Doc updates that are only applicable to v1.2+ go here. | ``` +1. Update the _Supported versions_ table in `support-release-policy.md`; add a new line at the top of the table with the new version of the runtime and SDKs. Change the releases which are older than n-2 to be `Unsupported`. 1. Update the `dapr-latest-version.html` shortcode partial to the new minor/patch version (in this example, `1.1.0` and `1.1`). 1. Commit the staged changes and push to your branch (`release_v1.1`). 1. Open a PR from `release/v1.1` to `v1.1`. diff --git a/daprdocs/content/en/contributing/presentations.md b/daprdocs/content/en/contributing/presentations.md index 2f2b7147b2d..0f50ee96d3c 100644 --- a/daprdocs/content/en/contributing/presentations.md +++ b/daprdocs/content/en/contributing/presentations.md @@ -6,11 +6,11 @@ weight: 20 description: How to give a presentation on Dapr and examples --- -We encourage community members to give presentations on Dapr. To get you started quickly, we offer two PowerPoint files: +We encourage community members to give presentations on Dapr. To get you started quickly, we offer three PowerPoint files: -- *dapr-slidedeck.pptx*, this is a 150+ page slide deck and contains; an overview of Dapr, all of its building block APIs, cross-cutting concerns, hosting options, and assets to create your own architecture diagrams. -- *dapr-workflow-slidedeck.pptx*, this is a dedicated slide deck about Dapr Workflow and contains; durable execution concept, workflow authoring, workflow patterns, workflow management, and challenges & tips. -- *dapr-agents-slidedeck.pptx*, this is a dedicated slide deck about Dapr Agents and contains; AI agents explanation, Dapr Agent types, multi-agent systems, and agentic patterns. +- *dapr-slidedeck.pptx*, this is a 150+ page slide deck and contains: an overview of Dapr, all of its building block APIs, cross-cutting concerns, hosting options, and assets to create your own architecture diagrams. +- *dapr-workflow-slidedeck.pptx*, this is a dedicated slide deck about Dapr Workflow and contains: durable execution concept, workflow authoring, workflow patterns, workflow management, and challenges & tips. +- *dapr-agents-slidedeck.pptx*, this is a dedicated slide deck about Dapr Agents and contains: AI agents explanation, Dapr Agent types, multi-agent systems, and agentic patterns. There is a downloadable zip file that contains all slide decks. @@ -25,7 +25,7 @@ brew install --cask font-space-grotesk ## Giving a Dapr presentation -- Begin by downloading the [Dapr Presentation Decks](/presentations/dapr-slidedecks.zip). These contain slides, diagrams, and graphical assets needed to give a Dapr presentation. +- Begin by downloading the [Dapr Presentation Decks](/presentations/dapr-slidedecks.zip). These contain slides, diagrams, and graphical assets. - Next, review the docs to make sure you understand the [concepts]({{% ref concepts %}}). - Use the Dapr [quickstarts](https://github.com/dapr/quickstarts) repo to show demos of how to use Dapr. - Once you've done a Dapr presentation, claim the *Dapr Presenter* badge by adding your presentation to [this table](https://github.com/dapr/community/tree/master/presentations) in the Dapr Community repository. diff --git a/daprdocs/content/en/developing-ai/_index.md b/daprdocs/content/en/developing-ai/_index.md new file mode 100644 index 00000000000..a0486084ba7 --- /dev/null +++ b/daprdocs/content/en/developing-ai/_index.md @@ -0,0 +1,7 @@ +--- +type: docs +title: "Developing AI with Dapr" +linkTitle: "Developing AI" +description: "Information on how to build reliable and secure agentic AI systems with Dapr" +weight: 31 +--- diff --git a/daprdocs/content/en/developing-ai/agent-integrations/_index.md b/daprdocs/content/en/developing-ai/agent-integrations/_index.md new file mode 100644 index 00000000000..7f578a4d80d --- /dev/null +++ b/daprdocs/content/en/developing-ai/agent-integrations/_index.md @@ -0,0 +1,22 @@ +--- +type: docs +title: "Agent Integrations" +linkTitle: "Agent Integrations" +weight: 25 +description: "Information on how to integrate agentic frameworks with Dapr runtime" +--- + +### What are agent integrations in Dapr? + +Dapr augments and enhances other agentic frameworks by providing them with key critical features for running in production: + +* Durable execution using [Dapr Workflows]({{% ref workflow-overview %}}) for resilient and long-running AI tasks +* Portable agent context & memory using Dapr's [State Management API]({{% ref "state-management-overview" %}}) +* Reliable and secure agent-to-agent communication using [Dapr Pub/Sub]({{% ref "pubsub-overview" %}}) and [Service Invocation +]({{% ref service-invocation-overview %}}) +* Secure agent [identity]({{< ref "concepts/security-concept" >}}#application-identity) + +{{< button text="Install Dapr" page="getting-started.md" >}} + +With Dapr, developers writing AI systems using the framework of their choice enjoy accelerated development via the Dapr APIs and gain confidence taking agentic systems into production. + \ No newline at end of file diff --git a/daprdocs/content/en/developing-ai/agent-integrations/crewai/_index.md b/daprdocs/content/en/developing-ai/agent-integrations/crewai/_index.md new file mode 100644 index 00000000000..7b28fd6ae56 --- /dev/null +++ b/daprdocs/content/en/developing-ai/agent-integrations/crewai/_index.md @@ -0,0 +1,11 @@ +--- +type: docs +title: "CrewAI" +linkTitle: "CrewAI" +weight: 25 +description: "Dapr first-class integrations with CrewAI Agents" +--- + +### What is the Dapr CrewAI integration? + +Dapr provides CrewAI agents first class integrations that range from agent session management to connecting agents via pub/sub and orchestrating agentic workflows. diff --git a/daprdocs/content/en/developing-ai/agent-integrations/crewai/crewai-workflows.md b/daprdocs/content/en/developing-ai/agent-integrations/crewai/crewai-workflows.md new file mode 100644 index 00000000000..e6ea8a79f9a --- /dev/null +++ b/daprdocs/content/en/developing-ai/agent-integrations/crewai/crewai-workflows.md @@ -0,0 +1,212 @@ +--- +type: docs +title: "CrewAI Workflows" +linkTitle: "CrewAI Workflows" +weight: 25 +description: "How to run CrewAI agents with durable, fault-tolerant execution using Dapr Workflows" +--- + +## Overview + +Dapr Workflows make it possible to run CrewAI agents **reliably**, **durably**, and **with built-in resiliency**. +By orchestrating CrewAI tasks with the Dapr Workflow engine, developers can: + +- Ensure long-running CrewAI work survives crashes and restarts. +- Get automatic checkpoints, retries, and state recovery. +- Run each CrewAI task as a durable activity. +- Observe execution through tracing, metrics, and structured logs. + +This guide walks through orchestrating multiple CrewAI tasks using Dapr Workflows, ensuring each step is run *exactly once* even if the process restarts. + +## Getting Started + +Initialize Dapr locally to set up a self-hosted environment for development. This process installs the Dapr sidecar binaries, provisions the workflow engine, and prepares a default components directory. For full details, see [guide on initializing Dapr locally]({{% ref install-dapr-selfhost.md %}}). + +Initialize Dapr: + +```bash +dapr init +``` + +Verify that daprio/dapr, openzipkin/zipkin, and redis are running: + +```bash +docker ps +``` + +### Install Python + +{{% alert title="Note" color="info" %}} +Make sure you have Python already installed. `Python >=3.10`. For installation instructions, visit the official [Python installation guide](https://www.python.org/downloads/). +{{% /alert %}} + +### Create a Python Virtual Environment (recommended) + +```bash +python -m venv .venv +source .venv/bin/activate # Windows: .venv\Scripts\activate +``` + +### Install Dependencies + +```bash +pip install dapr dapr-ext-workflow crewai +``` + +### Create a Workflow to Run CrewAI Tasks + +Create a file named crewai_workflow.py and paste the following: + +```python +from dapr.ext.workflow import ( + WorkflowRuntime, + DaprWorkflowContext, + WorkflowActivityContext, + DaprWorkflowClient, +) +from crewai import Agent, Task, Crew +import time + +wfr = WorkflowRuntime() + +# ------------------------------------------------------------ +# 1. Define Agent, Tasks, and Task Dictionary +# ------------------------------------------------------------ +agent = Agent( + role="Research Analyst", + goal="Research and summarize impactful technology updates.", + backstory="A skilled analyst who specializes in researching and summarizing technology topics.", +) + +tasks = { + "latest_ai_news": Task( + description="Find the latest news about artificial intelligence.", + expected_output="A 3-paragraph summary of the top 3 stories.", + agent=agent, + ), + "ai_startup_launches": Task( + description="Summarize the most impactful AI startup launches in the last 6 months.", + expected_output="A list summarizing 2 AI startups with links.", + agent=agent, + ), + "ai_policy_updates": Task( + description="Summarize the newest AI government policy and regulation updates.", + expected_output="A bullet-point list summarizing the latest policy changes.", + agent=agent, + ), +} + +# ------------------------------------------------------------ +# 2. Activity — runs ONE task by name +# ------------------------------------------------------------ +@wfr.activity(name="run_task") +def run_task_activity(ctx: WorkflowActivityContext, task_name: str): + print(f"Running CrewAI task: {task_name}", flush=True) + + task = tasks[task_name] + + # Create a Crew for just this one task + temp_crew = Crew(agents=[agent], tasks=[task]) + + # kickoff() works across CrewAI versions + result = temp_crew.kickoff() + + return str(result) + +# ------------------------------------------------------------ +# 3. Workflow — orchestrates tasks durably +# ------------------------------------------------------------ +@wfr.workflow(name="crewai_multi_task_workflow") +def crewai_workflow(ctx: DaprWorkflowContext): + print("Starting multi-task CrewAI workflow", flush=True) + + latest_news = yield ctx.call_activity(run_task_activity, input="latest_ai_news") + startup_summary = yield ctx.call_activity(run_task_activity, input="ai_startup_launches") + policy_updates = yield ctx.call_activity(run_task_activity, input="ai_policy_updates") + + return { + "latest_news": latest_news, + "startup_summary": startup_summary, + "policy_updates": policy_updates, + } + +# ------------------------------------------------------------ +# 4. Runtime + Client (entry point) +# ------------------------------------------------------------ +if __name__ == "__main__": + wfr.start() + + client = DaprWorkflowClient() + instance_id = "crewai-multi-01" + + client.schedule_new_workflow( + workflow=crewai_workflow, + input=None, + instance_id=instance_id + ) + + state = client.wait_for_workflow_completion(instance_id, timeout_in_seconds=60) + print(state.serialized_output) +``` + +This CrewAI agent starts a workflow that does news gathering and summary for the subjects of AI and startups. + +### Create the Workflow Database Component + +Dapr Workflows persist durable state using any [Dapr state store]({{% ref supported-state-stores %}}) that supports workflows. +Create a directory named `components`, then create the file workflowstore.yaml: + +```yaml +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: workflowstore +spec: + type: state.redis + version: v1 + metadata: + - name: redisHost + value: localhost:6379 + - name: redisPassword + value: "" + - name: actorStateStore + value: "true" +``` + +This component stores: + +* Code execution checkpoints +* Execution history +* Deterministic resumption state +* Final output data + +### Set a CrewAI LLM Provider + +CrewAI needs an LLM configuration or token to run. See instructions [here](https://docs.crewai.com/en/concepts/llms#setting-up-your-llm). + +For example, to set up OpenAI: + +``` +export OPENAI_API_KEY=sk-... +``` + +### Run the Workflow + +Launch the CrewAI workflow using the Dapr CLI: + +```bash +dapr run \ + --app-id crewaiwf \ + --dapr-grpc-port 50001 \ + --resources-path ./components \ + -- python3 ./crewai_workflow.py +``` + +As the workflow runs, each CrewAI task is executed as a durable activity. +If the process crashes, the workflow resumes exactly where it left off. You can try this by killing the process after the first activity and then rerunning that command line above with the same app ID. + +Open Zipkin to view workflow traces: + +``` +http://localhost:9411 +``` diff --git a/daprdocs/content/en/developing-ai/agent-integrations/openai-agents/_index.md b/daprdocs/content/en/developing-ai/agent-integrations/openai-agents/_index.md new file mode 100644 index 00000000000..83d119d11d3 --- /dev/null +++ b/daprdocs/content/en/developing-ai/agent-integrations/openai-agents/_index.md @@ -0,0 +1,12 @@ +--- +type: docs +title: "OpenAI Agents" +linkTitle: "OpenAI Agents" +weight: 25 +description: "Dapr first-class integrations for OpenAI Agents" +--- + +### What is the Dapr OpenAI Agents integration? + +Dapr provides OpenAI agents first class integrations that range from agent session management to connecting agents via pub/sub and orchestrating agentic workflows. The Dapr OpenAI integration is an extension in the OpenAI Python SDK that developers can use to augment OpenAI agents with the various Dapr APIs. + \ No newline at end of file diff --git a/daprdocs/content/en/developing-ai/agent-integrations/openai-agents/openai-agents-sessions.md b/daprdocs/content/en/developing-ai/agent-integrations/openai-agents/openai-agents-sessions.md new file mode 100644 index 00000000000..d0b2c0d9764 --- /dev/null +++ b/daprdocs/content/en/developing-ai/agent-integrations/openai-agents/openai-agents-sessions.md @@ -0,0 +1,127 @@ +--- +type: docs +title: "Agent Sessions" +linkTitle: "Agent Sessions" +weight: 20 +description: "How to use Dapr to reliably and securely manage agent state" +--- + +## Overview + +By using Dapr to manage the state and [session data for OpenAI agents](https://openai.github.io/openai-agents-python/sessions/), users can store agent state in all databases supported by Dapr, including key/value stores, caches and SQL databases. Developers also get built-in tracing, metrics and resiliency policies that make agent session data operate reliably in production. + +## Getting Started + +Initialize Dapr locally to set up a self-hosted environment for development. This process fetches and installs the Dapr sidecar binaries, runs essential services as Docker containers, and prepares a default components folder for your application. For detailed steps, see the official [guide on initializing Dapr locally]({{% ref install-dapr-cli.md %}}). + +To initialize the Dapr control plane containers and create a default configuration file, run: + +```bash +dapr init +``` + +Verify you have container instances with `daprio/dapr`, `openzipkin/zipkin`, and `redis` images running: + +```bash +docker ps +``` + +### Install Python + +{{% alert title="Note" color="info" %}} +Make sure you have Python already installed. `Python >=3.10`. For installation instructions, visit the official [Python installation guide](https://www.python.org/downloads/). +{{% /alert %}} + +### Install Dependencies + +```bash +pip install openai-agents dapr +``` + +### Create an OpenAI Agent + +Let's create a simple OpenAI agent. Put the following in a file named `openai_agent.py`: + +```python +import asyncio +from agents import Agent, Runner +from agents.extensions.memory.dapr_session import DaprSession + +async def main(): + agent = Agent( + name="Assistant", + instructions="Reply very concisely.", + ) + + session = DaprSession.from_address( + session_id="123", + state_store_name="statestore" + ) + + result = await Runner.run(agent, "What city is the Golden Gate Bridge in?", session=session) + print(result.final_output) + + result = await Runner.run(agent, "What state is it in?", session=session) + print(result.final_output) + + result = await Runner.run(agent, "What's the population?", session=session) + print(result.final_output) + +asyncio.run(main()) +``` + +### Set an OpenAI API key + +```bash +export OPENAI_API_KEY=sk-... +``` + +### Create a Python venv + +```bash +python -m venv .venv +source .venv/bin/activate # On Windows: .venv\Scripts\activate +``` + +### Create the database component + +The component file is how Dapr connects to your databae. The full list of supported databases can be found [here]({{% ref supported-state-stores %}}). Create a `components` directory and this file in it: + +`statestore.yaml`: + +```yaml +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: statestore +spec: + type: state.redis + version: v1 + metadata: + - name: redisHost + value: localhost:6379 + - name: redisPassword + value: "" +``` + +### Run The Agent + +Now run the local Dapr process and your Python script using the Dapr CLI. + +```bash +dapr run --app-id openaisessions --dapr-grpc-port 50001 --resources-path ./components -- python3 ./openai_agent.py +``` + +Open `http://localhost:9411` to view your the traces and dependency graph. + +You can see [the session data stored in Redis]({{% ref "getting-started/get-started-api" %}}#step-4-see-how-the-state-is-stored-in-redis) with the following command + +```bash +hgetall "123:messages" +``` + +## Next Steps + +Now that you have an OpenAI agent using Dapr to manage the agent sessions, explore more you can do with the [State API]({{% ref "state-management-overview" %}}) and how to enable [resiliency policies]({{% ref resiliency-overview %}}) for enhanced reliability. + +Read more about OpenAI agent sessions and Dapr [here](https://openai.github.io/openai-agents-python/sessions/). diff --git a/daprdocs/content/en/developing-applications/dapr-agents/_index.md b/daprdocs/content/en/developing-ai/dapr-agents/_index.md similarity index 91% rename from daprdocs/content/en/developing-applications/dapr-agents/_index.md rename to daprdocs/content/en/developing-ai/dapr-agents/_index.md index cfa94c00bad..e75e9f12c60 100644 --- a/daprdocs/content/en/developing-applications/dapr-agents/_index.md +++ b/daprdocs/content/en/developing-ai/dapr-agents/_index.md @@ -4,6 +4,8 @@ title: "Dapr Agents" linkTitle: "Dapr Agents" weight: 25 description: "A framework for building durable and resilient AI agent systems at scale" +aliases: + - /developing-applications/dapr-agents --- ### What is Dapr Agents? diff --git a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-core-concepts.md b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-core-concepts.md similarity index 59% rename from daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-core-concepts.md rename to daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-core-concepts.md index 7398c762274..e2515d21743 100644 --- a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-core-concepts.md +++ b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-core-concepts.md @@ -4,6 +4,8 @@ title: "Core Concepts" linkTitle: "Core Concepts" weight: 40 description: "Learn about the core concepts of Dapr Agents" +aliases: + - /developing-applications/dapr-agents/dapr-agents-core-concepts --- Dapr Agents provides a structured way to build and orchestrate applications that use LLMs without getting bogged down in infrastructure details. The primary goal is to enable AI development by abstracting away the complexities of working with LLMs, tools, memory management, and distributed systems, allowing developers to focus on the business logic of their AI applications. Agents in this framework are the fundamental building blocks. @@ -29,9 +31,15 @@ async def main(): weather_agent = Agent( name="WeatherAgent", role="Weather Assistant", + goal="Provide timely weather updates across cities", instructions=["Help users with weather information"], tools=[my_weather_func], - memory=ConversationDaprStateMemory(store_name="historystore", session_id="some-id"), + memory = AgentMemoryConfig( + store=ConversationDaprStateMemory( + store_name="historystore", + session_id="some-id", + ) + ), ) response1 = await weather_agent.run("What's the weather?") @@ -46,28 +54,36 @@ The `DurableAgent` class is a workflow-based agent that extends the standard Age ```python -travel_planner = DurableAgent( +from dapr_agents.workflow.runners import AgentRunner + +async def main(): + travel_planner = DurableAgent( name="TravelBuddy", role="Travel Planner", + goal="Help users find flights and remember preferences", instructions=["Help users find flights and remember preferences"], tools=[search_flights], - memory=ConversationDaprStateMemory( - store_name="conversationstore", session_id="my-unique-id" - ), - - # DurableAgent Configurations - message_bus_name="messagepubsub", - state_store_name="workflowstatestore", - state_key="workflow_state", - agents_registry_store_name="registrystatestore", - agents_registry_key="agents_registry", + memory = AgentMemoryConfig( + store=ConversationDaprStateMemory( + store_name="conversationstore", + session_id="travel-session", + ) + ) ) - travel_planner.as_service(port=8001) - await travel_planner.start() + runner = AgentRunner() + + try: + itinerary = await runner.run( + travel_planner, + payload={"task": "Plan a 3-day trip to Paris"}, + ) + print(itinerary) + finally: + runner.shutdown(travel_planner) ``` -This example demonstrates creating a workflow-backed agent that runs autonomously in the background. The agent can be triggered once and continues execution even across system restarts. +This example demonstrates creating a workflow-backed agent that runs autonomously in the background. The `AgentRunner` schedules the workflow for you, waits for completion, and ensures the agent can be triggered once yet continue execution across restarts. **Key Characteristics:** - Workflow-based execution using Dapr Workflows @@ -75,6 +91,7 @@ This example demonstrates creating a workflow-backed agent that runs autonomousl - Automatic retry and recovery mechanisms - Deterministic execution with checkpointing - Built-in message routing and agent communication +- `AgentRunner` modes for DurableAgents: ad-hoc runs (`runner.run(...)`), pub/sub subscriptions (`runner.subscribe(...)`), and FastAPI services (`runner.serve(...)`) - Supports complex orchestration patterns and multi-agent collaboration **When to use:** @@ -177,35 +194,52 @@ tools = client.get_all_tools() Once connected, the MCP client fetches all available tools from the server and prepares them for immediate use within the agent’s toolset. This allows agents to incorporate capabilities exposed by external processes—such as local Python scripts or remote services without hardcoding or preloading them. Agents can invoke these tools at runtime, expanding their behavior based on what’s offered by the active MCP server. + ### Memory Agents retain context across interactions, enhancing their ability to provide coherent and adaptive responses. Memory options range from simple in-memory lists for managing chat history to vector databases for semantic search, and also integrates with [Dapr state stores](https://docs.dapr.io/developing-applications/building-blocks/state-management/howto-get-save-state/), for scalable and persistent memory for advanced use cases from 28 different state store providers. ```python -# ConversationListMemory (Simple In-Memory) - Default +from dapr_agents import Agent, DurableAgent +from dapr_agents.agents.configs import AgentMemoryConfig +from dapr_agents.memory import ( + ConversationDaprStateMemory, + ConversationListMemory, + ConversationVectorMemory, +) + +# 1. ConversationListMemory (Simple In-Memory) - Default memory_list = ConversationListMemory() -# ConversationVectorMemory (Vector Store) +# 2. ConversationVectorMemory (Vector Store) memory_vector = ConversationVectorMemory( vector_store=your_vector_store_instance, - distance_metric="cosine" + distance_metric="cosine", ) -# 3. ConversationDaprStateMemory (Dapr State Store) -memory_dapr = ConversationDaprStateMemory( - store_name="historystore", # Maps to Dapr component name - session_id="some-id" +# 3. ConversationDaprStateMemory (Dapr State Store) via AgentMemoryConfig +durable_memory = AgentMemoryConfig( + store=ConversationDaprStateMemory( + store_name="historystore", # Dapr component name + session_id="my-session", + ) ) -# Using with an agent +# Using with a regular Agent (pass the memory instance directly) agent = Agent( name="MyAgent", role="Assistant", - memory=memory_dapr # Pass any memory implementation + memory=memory_list, ) +# Using with a DurableAgent (pass the AgentMemoryConfig) +travel_planner = DurableAgent( + name="TravelBuddy", + memory=durable_memory, + # ... other configs ... +) ``` -`ConversationListMemory` is the default memory implementation when none is specified. It provides fast, temporary storage in Python lists for development and testing. The Dapr's memory implementations are interchangeable, allowing you to switch between them without modifying your agent logic. +`ConversationListMemory` is the default memory implementation when none is specified. It provides fast, temporary storage in Python lists for development and testing. The Dapr's memory implementations (all found in `dapr_agents.memory`) are interchangeable, allowing you to switch between them without modifying your agent logic or deployment model. | Memory Implementation | Type | Persistence | Search | Use Case | |---|---|---|---|---| @@ -216,20 +250,70 @@ agent = Agent( ### Agent Services -`DurableAgents` are exposed as independent services using [FastAPI and Dapr applications](https://docs.dapr.io/developing-applications/sdks/python/python-sdk-extensions/python-fastapi/). This modular approach separates the agent's logic from its service layer, enabling seamless reuse, deployment, and integration into multi-agent systems. +`AgentRunner` wires DurableAgents into three complementary hosting modes: + +1. **`run`** – trigger a durable workflow directly from Python (CLIs, tests, notebooks) and optionally wait for completion. +2. **`subscribe`** – automatically register every `@message_router` decorated handler on the agent (including `DurableAgent.agent_workflow`) so CloudEvents on the configured topics are validated against their `message_model` and scheduled as workflow runs. +3. **`serve`** – host the agent as a web service by combining `subscribe` with FastAPI route registration and an auto-started Uvicorn server. By default it exposes `POST /run` (schedules the `@workflow_entry`) and `GET /run/{instance_id}` (fetches workflow status), but you can supply your own FastAPI app or customize host/port/paths. ```python -travel_planner.as_service(port=8001) -await travel_planner.start() +travel_planner = DurableAgent( + name="TravelBuddy", + role="Travel Planner", + goal="Help humans find flights and remember preferences", + instructions=[ + "Find flights to destinations", + "Remember user preferences", + "Provide clear flight info.", + ], + tools=[search_flights], +) +runner = AgentRunner() ``` -This exposes the agent as a REST service, allowing other systems to interact with it through standard HTTP requests such as this one: +The snippets below reuse this `travel_planner` instance to illustrate each mode. + +#### 1. Ad-hoc execution with `runner.run(...)` + +Use `run` when you want to trigger a durable workflow directly from Python code (tests, CLIs, notebooks, etc.). The runner locates the agent's `@workflow_entry`, and schedules it. The `.run()` command is a blocking call that triggers the agent and and waits for its completion. + +```python +result = await runner.run( + travel_planner, + payload={"task": "Plan a 3-day trip to Paris"}, +) +print(result) ``` -curl -i -X POST http://localhost:8001/start-workflow \ --H "Content-Type: application/json" \ --d '{"task": "I want to find flights to Paris"}' + +This mode is ideal for synchronous automation or when you need to capture the final response programmatically. Pass `wait=False` for fire-and-forget instances. + +#### 2. Pub/Sub subscriptions with `runner.subscribe(...)` + +`subscribe` scans the agent for every method tagged with `@message_router`—including the built-in `agent_workflow`—and automatically registers the necessary Dapr subscriptions using the topics and schemas defined in `AgentPubSubConfig`. Each incoming CloudEvent is validated against the declared `message_model` (for example, `TriggerAction`) before the runner schedules the workflow entry. + +```python +runner.subscribe(travel_planner) +await wait_for_shutdown() ``` -Unlike conversational agents that provide immediate synchronous responses, durable agents operate as headless services that are triggered asynchronously. You trigger it, receive a workflow instance ID, and can track progress over time. This enables long-running, fault-tolerant operations that can span multiple systems and survive restarts, making them ideal for complex multi-step processes in environments requiring high levels of durability and resiliency. + +Add your own `@message_router` methods to support extra topics or broadcast channels—the runner will discover them automatically and route messages to the appropriate handler. Use helpers such as `wait_for_shutdown()` (from `dapr_agents.workflow.utils.core`) to keep the process alive until you stop it. + +#### 3. FastAPI services with `runner.serve(...)` + +`serve` is the one-line way to run a DurableAgent as a web service. It first calls `subscribe(...)`, then spins up a FastAPI app (unless you pass your own) with two default endpoints: + +- `POST /run`: Validates the JSON body against the agent's `@workflow_entry` signature and schedules a new workflow instance. +- `GET /run/{instance_id}`: Proxies workflow status queries (including payloads, if requested). + +```python +runner.serve( + travel_planner, + port=8001, +) +``` + +Because workflows are durable, the `/run` endpoint responds immediately with an instance ID even though the agent keeps working in the background. You can mount the generated FastAPI routes into a larger application or let `serve` run its own Uvicorn loop for standalone deployments. + ## Multi-agent Systems (MAS) @@ -253,53 +337,121 @@ Workflows are structured processes where LLM agents and tools collaborate in pre This approach is particularly suitable for business-critical applications where you need both the intelligence of LLMs and the reliability of traditional software systems. ```python -# Define Workflow logic -@workflow(name="task_chain_workflow") +import dapr.ext.workflow as wf +from dapr.ext.workflow import DaprWorkflowContext + +from dapr_agents.llm.dapr import DaprChatClient +from dapr_agents.workflow.decorators import llm_activity + +runtime = wf.WorkflowRuntime() +llm = DaprChatClient(component_name="openai") + + +@runtime.workflow(name="task_chain_workflow") def task_chain_workflow(ctx: DaprWorkflowContext): - result1 = yield ctx.call_activity(get_character) - result2 = yield ctx.call_activity(get_line, input={"character": result1}) - return result2 + character = yield ctx.call_activity(get_character) + line = yield ctx.call_activity(get_line, input={"character": character}) + return line + -@task(description="Pick a random character from The Lord of the Rings and respond with the character's name only") -def get_character() -> str: +@runtime.activity(name="get_character") +@llm_activity( + prompt="Pick a random character from The Lord of the Rings. Respond with the name only.", + llm=llm, +) +def get_character(ctx) -> str: pass -@task(description="What is a famous line by {character}") -def get_line(character: str) -> str: + +@runtime.activity(name="get_line") +@llm_activity( + prompt="What is a famous line by {character}?", + llm=llm, +) +def get_line(ctx, character: str) -> str: pass + + +runtime.start() +client = wf.DaprWorkflowClient() +instance_id = client.schedule_new_workflow(task_chain_workflow) +state = client.wait_for_workflow_completion(instance_id) +print(state.serialized_output) +runtime.shutdown() ``` -This workflow demonstrates sequential task execution where the output of one task becomes the input for the next, enabling complex multi-step processes with clear dependencies and data flow. +This workflow demonstrates sequential task execution where the output of one LLM-backed activity becomes the input for the next. The `@llm_activity` decorator wires prompts, formatting, and response handling so activities stay deterministic while still using model reasoning. Dapr Agents supports coordination of LLM interactions at different levels of granularity: -### Prompt Tasks -Tasks created from prompts that leverage LLM reasoning capabilities for specific, well-defined operations. +### LLM Activities +`@llm_activity` binds a workflow activity to a prompt, LLM client, and optional structured output. The decorated function body can stay empty because the decorator handles prompting, retries, and response parsing. ```python -@task(description="Pick a random character from The Lord of the Rings and respond with the character's name only") -def get_character() -> str: +llm = DaprChatClient(component_name="openai") + +@runtime.activity(name="generate_outline") +@llm_activity( + prompt="Create a short outline about {topic}.", + llm=llm, +) +def generate_outline(ctx, topic: str) -> str: pass ``` -While technically not full agents (as they lack tools and memory), prompt tasks serve as lightweight agentic building blocks that perform focused LLM interactions within the broader workflow context. +LLM activities are perfect for lightweight reasoning steps, extraction tasks, or summarization stages that need deterministic workflow control with LLM flexibility. -### Agent Tasks -Tasks based on agents with tools, providing greater flexibility and capability for complex operations requiring external integrations. +### Agent Activities +`@agent_activity` lets workflows call fully-configured `Agent` instances (tools, memory, instructions) as activities. The workflow provides the inputs, and the decorator routes execution through the agent’s reasoning loop. ```python -@task(agent=custom_agent, description="Retrieve stock data for {ticker}") -def get_stock_data(ticker: str) -> dict: +planner = Agent( + name="PlannerAgent", + role="Trip planner", + instructions=["Create a concise 3-day plan for any city."], + llm=DaprChatClient(component_name="openai"), +) + +@runtime.activity(name="plan_outline") +@agent_activity(agent=planner) +def plan_outline(ctx, destination: str) -> dict: pass ``` -Agent tasks enable workflows to leverage specialized agents with their own tools, memory, and reasoning capabilities while maintaining the structured coordination benefits of workflow orchestration. -> **Note:** Agent tasks must use regular `Agent` instances, not `DurableAgent` instances, as workflows manage the execution context and durability through the Dapr workflow engine. +Agent activities enable workflows to leverage specialized agents with their own tools, memory, and reasoning capabilities while maintaining the structured coordination benefits of workflow orchestration. + +> **Note:** Agent activities must use regular `Agent` instances, not `DurableAgent` instances, because workflows manage the execution context and durability through the Dapr workflow engine. ### Workflow Patterns Workflows enable the implementation of various agentic patterns through structured orchestration, including Prompt Chaining, Routing, Parallelization, Orchestrator-Workers, Evaluator-Optimizer, Human-in-the-loop, and others. For detailed implementations and examples of these patterns, see the [Patterns documentation]({{< ref dapr-agents-patterns.md >}}). +### Message Router Workflows + +The `@message_router` decorator binds a workflow directly to a Dapr Pub/Sub topic so every validated message automatically schedules a workflow instance. This pattern—used in the message-router quickstart—lets you push CloudEvent payloads onto a topic and have LLM-backed activities take over immediately. + +```python +from pydantic import BaseModel +from dapr_agents.workflow.decorators.routers import message_router + +class StartBlogMessage(BaseModel): + topic: str + +@message_router( + pubsub="messagepubsub", + topic="blog.requests", + message_model=StartBlogMessage, +) +def blog_workflow(ctx: DaprWorkflowContext, wf_input: dict) -> str: + outline = yield ctx.call_activity( + create_outline, input={"topic": wf_input["topic"]} + ) + post = yield ctx.call_activity(write_post, input={"outline": outline}) + return post +``` + +During startup, call `register_message_routes(targets=[blog_workflow], dapr_client=client)` to automatically configure subscriptions, schema validation, and workflow scheduling. This keeps the workflow definition as the single source of truth for both orchestration and event ingress. + ### Workflows vs. Durable Agents Both DurableAgent and workflow-based agent orchestration use Dapr workflows behind the scenes for durability and reliability, but they differ in how control flow is determined. @@ -327,15 +479,57 @@ The core participants in this multi-agent coordination systems are the following Each agent runs as an independent service with its own lifecycle, configured as a standard DurableAgent with pub/sub enabled: ```python -hobbit_service = DurableAgent( - name="Frodo", - instructions=["Speak like Frodo, with humility and determination."], - message_bus_name="messagepubsub", - state_store_name="workflowstatestore", - state_key="workflow_state", - agents_registry_store_name="agentstatestore", - agents_registry_key="agents_registry", +import asyncio + +from dapr_agents.agents.configs import ( + AgentMemoryConfig, + AgentProfileConfig, + AgentPubSubConfig, + AgentRegistryConfig, + AgentStateConfig, +) +from dapr_agents.memory import ConversationDaprStateMemory +from dapr_agents.storage.daprstores.stateservice import StateStoreService +from dapr_agents.workflow.runners import AgentRunner +from dapr_agents.workflow.utils.core import wait_for_shutdown + +registry = AgentRegistryConfig( + store=StateStoreService(store_name="agentregistrystore"), + team_name="fellowship", ) + +frodo = DurableAgent( + profile=AgentProfileConfig( + name="Frodo", + role="Ring Bearer", + instructions=["Speak like Frodo, with humility and determination."], + ), + pubsub=AgentPubSubConfig( + pubsub_name="messagepubsub", + agent_topic="fellowship.frodo.requests", + broadcast_topic="fellowship.broadcast", + ), + state=AgentStateConfig( + store=StateStoreService(store_name="workflowstatestore", key_prefix="frodo:") + ), + registry=registry, + memory=AgentMemoryConfig( + store=ConversationDaprStateMemory( + store_name="memorystore", + session_id="frodo-session", + ) + ), +) + +async def main(): + runner = AgentRunner() + try: + runner.subscribe(frodo) + await wait_for_shutdown() + finally: + runner.shutdown(frodo) + +asyncio.run(main()) ``` #### Orchestrator @@ -343,18 +537,45 @@ hobbit_service = DurableAgent( The orchestrator coordinates interactions between agents and manages conversation flow by selecting appropriate agents, managing interaction sequences, and tracking progress. Dapr Agents offers three orchestration strategies: Random, RoundRobin, and LLM-based orchestration. ```python +from dapr_agents.agents.configs import ( + AgentExecutionConfig, + AgentPubSubConfig, + AgentRegistryConfig, + AgentStateConfig, +) +from dapr_agents.llm.openai import OpenAIChatClient +from dapr_agents.storage.daprstores.stateservice import StateStoreService +from dapr_agents.workflow.runners import AgentRunner +import dapr.ext.workflow as wf + llm_orchestrator = LLMOrchestrator( name="LLMOrchestrator", - message_bus_name="messagepubsub", - state_store_name="agenticworkflowstate", - state_key="workflow_state", - agents_registry_store_name="agentstatestore", - agents_registry_key="agents_registry", - max_iterations=3 + llm=OpenAIChatClient(), + pubsub=AgentPubSubConfig( + pubsub_name="messagepubsub", + agent_topic="llm.orchestrator.requests", + broadcast_topic="fellowship.broadcast", + ), + state=AgentStateConfig( + store=StateStoreService( + store_name="workflowstatestore", key_prefix="llm.orchestrator:" + ) + ), + registry=AgentRegistryConfig( + store=StateStoreService(store_name="agentregistrystore"), + team_name="fellowship", + ), + execution=AgentExecutionConfig(max_iterations=3), + runtime=wf.WorkflowRuntime(), ) + +runner = AgentRunner() +runner.serve(llm_orchestrator, port=8004) ``` -The LLM-based orchestrator uses intelligent agent selection for context-aware decision making, while Random and RoundRobin provide alternative coordination strategies for simpler use cases. +The LLM-based orchestrator uses intelligent agent selection for context-aware decision making, while Random and RoundRobin provide alternative coordination strategies for simpler use cases. The runner keeps the orchestrator online as a Dapr app or HTTP service so clients can publish tasks over topics or REST calls. + +Because both `DurableAgent.agent_workflow` and the orchestrators above are decorated with `@message_router(message_model=TriggerAction)`, `runner.subscribe(...)` automatically wires the topics declared in `AgentPubSubConfig` and validates every incoming CloudEvent against the expected schema before scheduling the `@workflow_entry`. You can add additional message routers (each with its own `message_model`) to the same agent; the runner will discover them the next time it starts and extend the subscription list automatically. ### Communication Flow @@ -391,4 +612,4 @@ Pub/Sub messaging is essential for event-driven agentic workflows because it: * **Fosters Collaboration**: Multiple agents can subscribe to the same topic, making it easy to share updates or divide responsibilities. * **Enables Scalability**:The message bus ensures that communication scales effortlessly, whether you are adding new agents, expanding workflows, or adapting to changing requirements. Agents remain loosely coupled, allowing workflows to evolve without disruptions. -This messaging framework ensures that agents operate efficiently, workflows remain flexible, and systems can scale dynamically. \ No newline at end of file +This messaging framework ensures that agents operate efficiently, workflows remain flexible, and systems can scale dynamically. diff --git a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-getting-started.md b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-getting-started.md similarity index 82% rename from daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-getting-started.md rename to daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-getting-started.md index 2d160a4cb5a..4cd7fba0201 100644 --- a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-getting-started.md +++ b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-getting-started.md @@ -4,6 +4,8 @@ title: "Getting Started" linkTitle: "Getting Started" weight: 20 description: "How to install Dapr Agents and run your first agent" +aliases: + - /developing-applications/dapr-agents/dapr-agents-getting-started --- {{% alert title="Dapr Agents Concepts" color="primary" %}} @@ -53,19 +55,11 @@ Make sure you have Python already installed. `Python >=3.10`. For installation i Let's create a weather assistant agent that demonstrates tool calling with Dapr state management used for conversation memory. -### 1. Create the environment file +### 1. Create the Dapr components -Create a `.env` file with your OpenAI API key: +Create a `components` directory and add two files: -```env -OPENAI_API_KEY=your_api_key_here -``` - -This API key is essential for agents to communicate with the LLM, as the default LLM client in the agent uses OpenAI's services. If you don't have an API key, you can [create one here](https://platform.openai.com/api-keys). - -### 2. Create the Dapr component - -Create a `components` directory and add `historystore.yaml`: +`historystore.yaml`: ```yaml apiVersion: dapr.io/v1alpha1 @@ -84,6 +78,27 @@ spec: This component will be used to store the conversation history, as LLMs are stateless and every chat interaction needs to send all the previous conversations to maintain context. +`openai.yaml`: + +```yaml +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: openai +spec: + type: conversation.openai + version: v1 + metadata: + - name: key + value: "{{OPENAI_API_KEY}}" + - name: model + value: gpt-5-2025-08-07 + - name: temperature + value: 1 +``` + +This component wires the default `DaprChatClient` to OpenAI via the Conversation API. Replace the `{{OPENAI_API_KEY}}` placeholder with your actual OpenAI key by editing the file directly. This API key is essential for agents to communicate with the LLM, as the default chat client talks to OpenAI-compatible endpoints. If you don't have an API key, you can [create one here](https://platform.openai.com/api-keys). You can also tweak metadata (model, temperature, baseUrl, etc.) to point at compatible OpenAI-style providers. + ### 3. Create the agent with weather tool Create `weather_agent.py`: @@ -91,29 +106,39 @@ Create `weather_agent.py`: ```python import asyncio from dapr_agents import tool, Agent +from dapr_agents.agents.configs import AgentMemoryConfig from dapr_agents.memory import ConversationDaprStateMemory from dotenv import load_dotenv load_dotenv() + @tool def get_weather() -> str: """Get current weather.""" return "It's 72°F and sunny" + async def main(): + memory_config = AgentMemoryConfig( + store=ConversationDaprStateMemory( + store_name="historystore", + session_id="hello-world", + ) + ) + agent = Agent( name="WeatherAgent", role="Weather Assistant", instructions=["Help users with weather information"], - memory=ConversationDaprStateMemory(store_name="historystore", session_id="hello-world"), + memory=memory_config, tools=[get_weather], ) # First interaction response1 = await agent.run("Hi! My name is John. What's the weather?") print(f"Agent: {response1}") - + # Second interaction - agent should remember the name response2 = await agent.run("What's my name?") print(f"Agent: {response2}") @@ -184,4 +209,4 @@ Here you can browse the state store used in the agent and explore its data. ## Next Steps Now that you have Dapr Agents installed and running, explore more advanced examples and patterns in the [quickstarts]({{% ref dapr-agents-quickstarts.md %}}) section to learn about multi-agent workflows, durable agents, and integration with Dapr's powerful distributed capabilities. - \ No newline at end of file + diff --git a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-integrations.md b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-integrations.md similarity index 91% rename from daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-integrations.md rename to daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-integrations.md index 11406f3479e..61c3e46cd00 100644 --- a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-integrations.md +++ b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-integrations.md @@ -4,6 +4,8 @@ title: "Integrations" linkTitle: "Integrations" weight: 60 description: "Various integrations and tools available in Dapr Agents" +aliases: + - /developing-applications/dapr-agents/dapr-agents-integrations --- # Out-of-the-box Tools @@ -313,4 +315,39 @@ While the Arxiv Fetcher provides robust functionality for retrieving and process * **Building a Searchable Knowledge Base**: Combine fetched papers with integrations like text splitting and vector embeddings for advanced search capabilities. * **Retrieval-Augmented Generation (RAG)**: Use processed papers as inputs for RAG pipelines to power question-answering systems. -* **Automated Literature Surveys**: Generate summaries or insights based on the fetched and processed research. \ No newline at end of file +* **Automated Literature Surveys**: Generate summaries or insights based on the fetched and processed research. + +## Tools + +### MCP Toolbox for databases + +Dapr Agents support integrating with [MCP Toolbox for Databases](https://googleapis.github.io/genai-toolbox/getting-started/introduction/) by implementing a wrapper that loads the available tools into the `Tool` model Dapr Agents utilize. + +To integrate the Toolbox, load the tools as follows: + +```python +from toolbox_core import ToolboxSyncClient +client = ToolboxSyncClient("http://127.0.0.1:5000") +agent_tools = AgentTool.from_toolbox_many(client.load_toolset("your-tools-name-here")) +agent = DurableAgent( + .. + tools=agent_tools +) + +.. +# Remember to close the tool +finally: + client.close() +``` + +Or wrap it in a `with` statement: + +```python +from toolbox_core import ToolboxSyncClient +with ToolboxSyncClient("http://127.0.0.1:5000") as client: + agent_tools = AgentTool.from_toolbox_many(client.load_toolset("your-tools-name-here")) + agent = DurableAgent( + .. + tools=agent_tools + ) +``` \ No newline at end of file diff --git a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-introduction.md b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-introduction.md similarity index 97% rename from daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-introduction.md rename to daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-introduction.md index 17cc7dcf7f1..40a3bae070f 100644 --- a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-introduction.md +++ b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-introduction.md @@ -4,6 +4,8 @@ title: "Introduction" linkTitle: "Introduction" weight: 10 description: "Overview of Dapr Agents and its key features" +aliases: + - /developing-applications/dapr-agents/dapr-agents-introduction --- ![Agent Overview](/images/dapr-agents/concepts-agents-overview.png) @@ -66,11 +68,11 @@ Dapr Agents is a Python framework built on top of the [Python Dapr SDK]({{% ref Get started with Dapr Agents by following the instructions on the [Getting Started page]({{% ref dapr-agents-getting-started.md %}}). -### Framework Integrations +### Framework integrations -Dapr Agents integrates with popular Python frameworks and tools. For detailed integration guides and examples, see the [integrations page]({{% ref "developing-applications/dapr-agents/dapr-agents-integrations.md" %}}). +Dapr Agents integrates with popular Python frameworks and tools. For detailed integration guides and examples, see the [integrations page]({{% ref "developing-ai/dapr-agents/dapr-agents-integrations.md" %}}). -## Operational Support +## Operational support Dapr Agents inherits Dapr's enterprise-grade operational capabilities, providing comprehensive support for durable and reliable deployments of agentic systems. diff --git a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-patterns.md b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-patterns.md similarity index 89% rename from daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-patterns.md rename to daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-patterns.md index 959b097455b..fa6eda426d6 100644 --- a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-patterns.md +++ b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-patterns.md @@ -4,6 +4,8 @@ title: "Agentic Patterns" linkTitle: "Agentic Patterns" weight: 50 description: "Common design patterns and use cases for building agentic systems" +aliases: + - /developing-applications/dapr-agents/dapr-agents-patterns --- Dapr Agents simplify the implementation of agentic systems, from simple augmented LLMs to fully autonomous agents in enterprise environments. The following sections describe several application patterns that can benefit from Dapr Agents. @@ -362,7 +364,19 @@ The Durable Agent also enables the "headless agents" approach where autonomous s **Implementation with Dapr Agents:** ```python +import asyncio + from dapr_agents import DurableAgent +from dapr_agents.agents.configs import ( + AgentExecutionConfig, + AgentMemoryConfig, + AgentPubSubConfig, + AgentRegistryConfig, + AgentStateConfig, +) +from dapr_agents.memory import ConversationDaprStateMemory +from dapr_agents.storage.daprstores.stateservice import StateStoreService +from dapr_agents.workflow.runners import AgentRunner travel_planner = DurableAgent( name="TravelBuddy", @@ -371,23 +385,56 @@ travel_planner = DurableAgent( instructions=[ "Find flights to destinations", "Remember user preferences", - "Provide clear flight info" + "Provide clear flight info", ], tools=[search_flights], - message_bus_name="messagepubsub", - state_store_name="workflowstatestore", - state_key="workflow_state", - agents_registry_store_name="workflowstatestore", - agents_registry_key="agents_registry", + pubsub=AgentPubSubConfig( + pubsub_name="messagepubsub", + agent_topic="travel.requests", + broadcast_topic="travel.broadcast", + ), + state=AgentStateConfig( + store=StateStoreService(store_name="workflowstatestore"), + ), + registry=AgentRegistryConfig( + store=StateStoreService(store_name="registrystatestore"), + team_name="travel-team", + ), + execution=AgentExecutionConfig(max_iterations=3), + memory=AgentMemoryConfig( + store=ConversationDaprStateMemory( + store_name="conversationstore", + session_id="travel-session", + ) + ), ) + +async def main(): + runner = AgentRunner() + try: + result = await runner.run( + travel_planner, + payload={"task": "Find weekend flights to Paris"}, + ) + print(result) + finally: + runner.shutdown(travel_planner) + +asyncio.run(main()) ``` The implementation follows Dapr's sidecar architecture model, where all infrastructure concerns are handled by the Dapr runtime: - **Persistent Memory** - Agent state is stored in Dapr's state store, surviving process crashes - **Workflow Orchestration** - All agent interactions managed through Dapr's workflow system -- **Service Exposure** - REST endpoints for workflow management come out of the box -- **Pub/Sub Input/Output** - Event-driven messaging through Dapr's pub/sub system for seamless integration +- **Service Exposure** - `AgentRunner.serve()` exposes REST endpoints (e.g., `POST /run`) that schedule the agent's `@workflow_entry` +- **Pub/Sub Input/Output** - `AgentRunner.subscribe()` scans the agent for `@message_router` methods and wires the configured topics with schema validation + +The Durable Agent enables the concept of "headless agents" - autonomous systems that operate without direct user interaction. Depending on the scenario you can: + +1. **Run** durable workflows programmatically (`runner.run` as shown above) +2. **Subscribe** the agent to topics so other services can trigger it via pub/sub (`runner.subscribe`) +3. **Serve** the agent behind a FastAPI app with built-in `/run` and status endpoints (`runner.serve`) -The Durable Agent enables the concept of "headless agents" - autonomous systems that operate without direct user interaction. Dapr's Durable Agent exposes both REST and Pub/Sub APIs, making it ideal for long-running operations that are triggered by other applications or external events. This allows agents to run in the background, processing requests asynchronously and integrating seamlessly into larger distributed systems. +These options make it easy to process requests asynchronously and integrate seamlessly into larger distributed systems. ## Choosing the Right Pattern @@ -397,4 +444,4 @@ The journey from simple agentic workflows to fully autonomous agents represents - **Start with simpler patterns** like Augmented LLM and Prompt Chaining for well-defined tasks where predictability is crucial - **Progress to more dynamic patterns** like Parallelization and Orchestrator-Workers as your needs grow more complex - **Consider fully autonomous agents** only for open-ended tasks where the benefits of flexibility outweigh the need for strict control - \ No newline at end of file + diff --git a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-quickstarts.md b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-quickstarts.md similarity index 79% rename from daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-quickstarts.md rename to daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-quickstarts.md index cf35ac09c72..663eac16140 100644 --- a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-quickstarts.md +++ b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-quickstarts.md @@ -4,6 +4,8 @@ title: "Quickstarts" linkTitle: "Quickstarts" weight: 70 description: "Get started with Dapr Agents through practical step-by-step examples" +aliases: + - /developing-applications/dapr-agents/dapr-agents-quickstarts --- [Dapr Agents Quickstarts](https://github.com/dapr/dapr-agents/tree/main/quickstarts) demonstrate how to use Dapr Agents to build applications with LLM-powered autonomous agents and event-driven workflows. Each quickstart builds upon the previous one, introducing new concepts incrementally. @@ -17,12 +19,12 @@ description: "Get started with Dapr Agents through practical step-by-step exampl | Scenario | What You'll Learn | |------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------| -| [Hello World](https://github.com/dapr/dapr-agents/tree/main/quickstarts/01-hello-world)
A rapid introduction that demonstrates core Dapr Agents concepts through simple, practical examples. | - **Basic LLM Usage**: Simple text generation with OpenAI models
- **Creating Agents**: Building agents with custom tools in under 20 lines of code

- **Simple Workflows**: Setting up multi-step LLM processes | -| [LLM Call with Dapr Chat Client](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02_llm_call_dapr)
Explore interaction with Language Models through Dapr Agents' `DaprChatClient`, featuring basic text generation with plain text prompts and templates. | - **Text Completion**: Generating responses to prompts
- **Swapping LLM providers**: Switching LLM backends without application code change
- **Resilience**: Setting timeout, retry and circuit-breaking
- **PII Obfuscation**: Automatically detect and mask sensitive user information | -| [LLM Call with OpenAI Client](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02_llm_call_open_ai)
Leverage native LLM client libraries with Dapr Agents using the OpenAI Client for chat completion, audio processing, and embeddings. | - **Text Completion**: Generating responses to prompts
- **Structured Outputs**: Converting LLM responses to Pydantic objects

*Note: Other quickstarts for specific clients are available for [Elevenlabs](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02_llm_call_elevenlabs), [Hugging Face](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02_llm_call_hugging_face), and [Nvidia](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02_llm_call_nvidia).* | -| [Agent Tool Call](https://github.com/dapr/dapr-agents/tree/main/quickstarts/03-agent-tool-call)
Build your first AI agent with custom tools by creating a practical weather assistant that fetches information and performs actions. | - **Tool Definition**: Creating reusable tools with the `@tool` decorator
- **Agent Configuration**: Setting up agents with roles, goals, and tools
- **Function Calling**: Enabling LLMs to execute Python functions | +| [Hello World](https://github.com/dapr/dapr-agents/tree/main/quickstarts/01-hello-world)
A rapid introduction that demonstrates core Dapr Agents concepts through simple, practical examples. | - **Basic LLM Usage**: Simple text generation with OpenAI models
- **Creating Agents**: Building agents with custom tools in under 20 lines of code

- **Simple Workflows**: Setting up multi-step LLM processes
- **DurableAgent Hosting**: Learn `AgentRunner.run`, `AgentRunner.subscribe`, and `AgentRunner.serve` using the `03_durable_agent_*.py` samples | +| [LLM Call with Dapr Chat Client](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02-llm-call-dapr)
Explore interaction with Language Models through Dapr Agents' `DaprChatClient`, featuring basic text generation with plain text prompts and templates. | - **Text Completion**: Generating responses to prompts
- **Swapping LLM providers**: Switching LLM backends without application code change
- **Resilience**: Setting timeout, retry and circuit-breaking
- **PII Obfuscation**: Automatically detect and mask sensitive user information | +| [LLM Call with OpenAI Client](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02-llm-call-open-ai)
Leverage native LLM client libraries with Dapr Agents using the OpenAI Client for chat completion, audio processing, and embeddings. | - **Text Completion**: Generating responses to prompts
- **Structured Outputs**: Converting LLM responses to Pydantic objects

*Note: Other quickstarts for specific clients are available for [Elevenlabs](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02-llm-call-elevenlabs), [Hugging Face](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02-llm-call-hugging-face), and [Nvidia](https://github.com/dapr/dapr-agents/tree/main/quickstarts/02-llm-call-nvidia).* | +| Standalone & Durable Agents
[Standalone Agent Tool Call](https://github.com/dapr/dapr-agents/tree/main/quickstarts/03-standalone-agent-tool-call) · [Durable Agent Tool Call](https://github.com/dapr/dapr-agents/tree/main/quickstarts/03-durable-agent-tool-call) | - **Standalone Agents**: Build conversational agents with tools in under 20 lines using the `Agent` class
- **Durable Agents**: Upgrade to workflow-backed `DurableAgent` instances with `AgentRunner.run/subscribe/serve`
- **Tool Definition**: Reuse tools with the `@tool` decorator and structured args models
- **Function Calling**: Let LLMs invoke Python functions safely | | [Agentic Workflow](https://github.com/dapr/dapr-agents/tree/main/quickstarts/04-llm-based-workflows)
Dive into stateful workflows with Dapr Agents by orchestrating sequential and parallel tasks through powerful workflow capabilities. | - **LLM-powered Tasks**: Using language models in workflows
- **Task Chaining**: Creating resilient multi-step processes executing in sequence
- **Fan-out/Fan-in**: Executing activities in parallel; then synchronizing these activities until all preceding activities have completed | | [Multi-Agent Workflows](https://github.com/dapr/dapr-agents/tree/main/quickstarts/05-multi-agent-workflows)
Explore advanced event-driven workflows featuring a Lord of the Rings themed multi-agent system where autonomous agents collaborate to solve problems. | - **Multi-agent Systems**: Creating a network of specialized agents
- **Event-driven Architecture**: Implementing pub/sub messaging between agents
- **Workflow Orchestration**: Coordinating agents through different selection strategies| | [Multi-Agent Workflow on Kubernetes](https://github.com/dapr/dapr-agents/tree/main/quickstarts/05-multi-agent-workflow-k8s)
Run multi-agent workflows in Kubernetes, demonstrating deployment and orchestration of event-driven agent systems in a containerized environment. | - **Kubernetes Deployment**: Running agents on Kubernetes
- **Container Orchestration**: Managing agent lifecycles with K8s
- **Service Communication**: Inter-agent communication in K8s | | [Document Agent with Chainlit](https://github.com/dapr/dapr-agents/tree/main/quickstarts/06-document-agent-chainlit)
Create a conversational agent with an operational UI that can upload, and learn unstructured documents while retaining long-term memory. | - **Conversational Document Agent**: Upload and converse over unstructured documents
- **Cloud Agnostic Storage**: Upload files to multiple storage providers
- **Conversation Memory Storage**: Persists conversation history using external storage. | -| [Data Agent with MCP and Chainlit](https://github.com/dapr/dapr-agents/tree/main/quickstarts/08-data-agent-mcp-chainlit)
Build a conversational agent over a Postgres database using Model Composition Protocol (MCP) with a ChatGPT-like interface. | - **Database Querying**: Natural language queries to relational databases
- **MCP Integration**: Connecting to databases without DB-specific code
- **Data Analysis**: Complex data analysis through conversation | +| [Data Agent with MCP and Chainlit](https://github.com/dapr/dapr-agents/tree/main/quickstarts/08-data-agent-mcp-chainlit)
Build a conversational agent over a Postgres database using Model Composition Protocol (MCP) with a ChatGPT-like interface. | - **Database Querying**: Natural language queries to relational databases
- **MCP Integration**: Connecting to databases without DB-specific code
- **Data Analysis**: Complex data analysis through conversation | \ No newline at end of file diff --git a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-why.md b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-why.md similarity index 95% rename from daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-why.md rename to daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-why.md index a65ad4d8502..afb32bdbf2d 100644 --- a/daprdocs/content/en/developing-applications/dapr-agents/dapr-agents-why.md +++ b/daprdocs/content/en/developing-ai/dapr-agents/dapr-agents-why.md @@ -4,6 +4,8 @@ title: "Why Dapr Agents" linkTitle: "Why Dapr Agents" weight: 30 description: "Understanding the benefits and use cases for Dapr Agents" +aliases: + - /developing-applications/dapr-agents/dapr-agents-why --- Dapr Agents is an open-source framework for building and orchestrating LLM-based autonomous agents that leverages Dapr's proven distributed systems foundation. Unlike other agentic frameworks that require developers to build infrastructure from scratch, Dapr Agents enables teams to focus on agent intelligence by providing enterprise-grade scalability, state management, and messaging capabilities out of the box. This approach eliminates the complexity of recreating distributed system fundamentals while delivering agentic workflows powered by Dapr. @@ -102,11 +104,11 @@ Dapr Agents uses a [durable-execution workflow engine]({{% ref workflow-overview Dapr Agents builds on Dapr's Workflow API, which represents each agent as an actor, a single unit of compute and state that is thread-safe and natively distributed. This design enables a scale-to-zero architecture that minimizes infrastructure costs, making AI adoption accessible to organizations of all sizes. The underlying virtual actor model allows thousands of agents to run on demand on a single machine with low latency when scaling from zero. When unused, agents are reclaimed by the system but retain their state until needed again. This design eliminates the trade-off between performance and resource efficiency. -### Data-Centric AI Agents +### Data-centric AI agents -With built-in connectivity to over 50 enterprise data sources, Dapr Agents efficiently handles structured and unstructured data. From basic [PDF extraction]({{% ref "/developing-applications/dapr-agents/dapr-agents-integrations.md" %}}) to large-scale database interactions, it enables data-driven AI workflows with minimal code changes. Dapr's [bindings]({{% ref bindings-overview.md %}}) and [state stores]({{% ref supported-state-stores.md %}}), along with MCP support, provide access to numerous data sources for agent data ingestion. +With built-in connectivity to over 50 enterprise data sources, Dapr Agents efficiently handles structured and unstructured data. From basic [PDF extraction]({{% ref "/developing-ai/dapr-agents/dapr-agents-integrations.md" %}}) to large-scale database interactions, it enables data-driven AI workflows with minimal code changes. Dapr's [bindings]({{% ref bindings-overview.md %}}) and [state stores]({{% ref supported-state-stores.md %}}), along with MCP support, provide access to numerous data sources for agent data ingestion. -### Accelerated Development +### Accelerated development Dapr Agents provides AI features that give developers a complete API surface to tackle common problems, including: diff --git a/daprdocs/content/en/developing-applications/building-blocks/actors/actors-timers-reminders.md b/daprdocs/content/en/developing-applications/building-blocks/actors/actors-timers-reminders.md index 736547e91d6..8d174a8ecc3 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/actors/actors-timers-reminders.md +++ b/daprdocs/content/en/developing-applications/building-blocks/actors/actors-timers-reminders.md @@ -187,6 +187,66 @@ To use protobuf serialization for actor reminders on self-hosted, use the follow --max-api-level=20 ``` +## Managing reminders with the CLI + +Actor reminders are persisted in the Scheduler. +You can manage them with the dapr scheduler CLI commands. + +#### List actor reminders + +```bash +dapr scheduler list --filter actor +NAME BEGIN COUNT LAST TRIGGER +actor/MyActorType/actorid1/test1 -3.89s 1 2025-10-03T16:58:55Z +actor/MyActorType/actorid2/test2 -3.89s 1 2025-10-03T16:58:55Z +``` + +Get reminder details + +```bash +dapr scheduler get actor/MyActorType/actorid1/test1 -o yaml +``` + +#### Delete reminders + +Delete a single reminder: + +```bash +dapr scheduler delete actor/MyActorType/actorid1/test1 +``` + +Delete all reminders for a given actor type: + +```bash +dapr scheduler delete-all actor/MyActorType +``` + +Delete all reminders for a specific actor instance: + +```bash +dapr scheduler delete-all actor/MyActorType/actorid1 +``` + +#### Backup and restore reminders + +Export all reminders: + +```bash +dapr scheduler export -o reminders-backup.bin +``` + +Restore from a backup file: + +```bash +dapr scheduler import -f reminders-backup.bin +``` + +#### Summary + +- Reminders are stored in the Dapr Scheduler, not in the app. +- Create reminders via the Actors API +- Manage existing reminders (list, get, delete, backup/restore) using the `dapr scheduler` CLI. + ## Next steps {{< button text="Configure actor runtime behavior >>" page="actors-runtime-config.md" >}} diff --git a/daprdocs/content/en/developing-applications/building-blocks/actors/namespaced-actors.md b/daprdocs/content/en/developing-applications/building-blocks/actors/namespaced-actors.md index b51ffc79f2b..3584e9a7160 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/actors/namespaced-actors.md +++ b/daprdocs/content/en/developing-applications/building-blocks/actors/namespaced-actors.md @@ -25,7 +25,7 @@ In self-hosted mode, you can specify the namespace for a Dapr instance by settin {{% /tab %}} {{% tab "Kubernetes" %}} -On Kubernetes, you can create and configure namepaces when deploying actor applications. For example, start with the following `kubectl` commands: +On Kubernetes, you can create and configure namespaces when deploying actor applications. For example, start with the following `kubectl` commands: ```bash kubectl create namespace namespace-actorA diff --git a/daprdocs/content/en/developing-applications/building-blocks/bindings/howto-bindings.md b/daprdocs/content/en/developing-applications/building-blocks/bindings/howto-bindings.md index 7f77f75b506..d6d46cf8102 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/bindings/howto-bindings.md +++ b/daprdocs/content/en/developing-applications/building-blocks/bindings/howto-bindings.md @@ -242,7 +242,7 @@ func main() { {{% /tab %}} -{{% tab "JavaScript%}}" %}} +{{% tab "JavaScript" %}} ```javascript //dependencies diff --git a/daprdocs/content/en/developing-applications/building-blocks/conversation/conversation-overview.md b/daprdocs/content/en/developing-applications/building-blocks/conversation/conversation-overview.md index 4f98d726a12..7483a41c296 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/conversation/conversation-overview.md +++ b/daprdocs/content/en/developing-applications/building-blocks/conversation/conversation-overview.md @@ -14,11 +14,16 @@ Dapr's conversation API reduces the complexity of securely and reliably interact Diagram showing the flow of a user's app communicating with Dapr's LLM components. -In additon to enabling critical performance and security functionality (like [prompt caching]({{% ref "#prompt-caching" %}}) and [PII scrubbing]({{% ref "#personally-identifiable-information-pii-obfuscation" %}})), you can also pair the conversation API with Dapr functionalities, like: -- Resiliency circuit breakers and retries to circumvent limit and token errors, or -- Middleware to authenticate requests coming to and from the LLM +In addition to enabling critical performance and security functionality (like [prompt caching]({{% ref "#prompt-caching" %}}) and [PII scrubbing]({{% ref "#personally-identifiable-information-pii-obfuscation" %}})), the conversation API also provides: -Dapr provides observability by issuing metrics for your LLM interactions. +- **Tool calling capabilities** that allow LLMs to interact with external functions and APIs, enabling more sophisticated AI applications +- **OpenAI-compatible interface** for seamless integration with existing AI workflows and tools + +You can also pair the conversation API with Dapr functionalities, like: + +- Resiliency policies including circuit breakers to handle repeated errors, timeouts to safeguards from slow responses, and retries for temporary network failures +- Observability with metrics and distributed tracing using OpenTelemetry and Zipkin +- Middleware to authenticate requests to and from the LLM ## Features @@ -26,7 +31,7 @@ The following features are out-of-the-box for [all the supported conversation co ### Prompt caching -Prompt caching optimizes performance by storing and reusing prompts that are often repeated across multiple API calls. To significantly reduce latency and cost, Dapr stores frequent prompts in a local cache to be reused by your cluster, pod, or other, instead of reprocessing the information for every new request. +The Conversation API includes a built-in caching mechanism (enabled by the cacheTTL parameter) that optimizes both performance and cost by storing previous model responses for faster delivery to repetitive requests. This is particularly valuable in scenarios where similar prompt patterns occur frequently. When caching is enabled, Dapr creates a deterministic hash of the prompt text and all configuration parameters, checks if a valid cached response exists for this hash within the time period (for example, 10 minutes), and returns the cached response immediately if found. If no match exists, Dapr makes the API call and stores the result. This eliminates external API calls, lowers latency, and avoids provider charges for repeated requests. The cache exists entirely within your runtime environment, with each Dapr sidecar maintaining its own local cache. ### Personally identifiable information (PII) obfuscation @@ -45,13 +50,24 @@ The PII scrubber obfuscates the following user information: - SHA-256 hex - MD5 hex +### Tool calling support + +The conversation API supports advanced tool calling capabilities that allow LLMs to interact with external functions and APIs. This enables you to build sophisticated AI applications that can: + +- Execute custom functions based on user requests +- Integrate with external services and databases +- Provide dynamic, context-aware responses +- Create multi-step workflows and automation + +Tool calling follows [OpenAI's function calling format](https://platform.openai.com/docs/guides/function-calling), making it easy to integrate with existing AI development workflows and tools. + ## Demo Watch the demo presented during [Diagrid's Dapr v1.15 celebration](https://www.diagrid.io/videos/dapr-1-15-deep-dive) to see how the conversation API works using the .NET SDK. {{< youtube id=NTnwoDhHIcQ start=5444 >}} -## Try out conversation +## Try out conversation API ### Quickstarts and tutorials diff --git a/daprdocs/content/en/developing-applications/building-blocks/conversation/howto-conversation-layer.md b/daprdocs/content/en/developing-applications/building-blocks/conversation/howto-conversation-layer.md index 99b7803b8d6..e79af9f190e 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/conversation/howto-conversation-layer.md +++ b/daprdocs/content/en/developing-applications/building-blocks/conversation/howto-conversation-layer.md @@ -56,7 +56,7 @@ spec: ## Connect the conversation client -The following examples use an HTTP client to send a POST request to Dapr's sidecar HTTP endpoint. You can also use [the Dapr SDK client instead]({{% ref "#related-links" %}}). +The following examples use the Dapr SDK client to interact with LLMs. {{< tabpane text=true >}} @@ -83,7 +83,7 @@ var response = await conversationClient.ConverseAsync("conversation", DaprConversationRole.Generic) }); -Console.WriteLine("Received the following from the LLM:"); +Console.WriteLine("conversation output: "); foreach (var resp in response.Outputs) { Console.WriteLine($"\t{resp.Result}"); @@ -92,6 +92,77 @@ foreach (var resp in response.Outputs) {{% /tab %}} + +{{% tab "Java" %}} + +```java +//dependencies +import io.dapr.client.DaprClientBuilder; +import io.dapr.client.DaprPreviewClient; +import io.dapr.client.domain.ConversationInput; +import io.dapr.client.domain.ConversationRequest; +import io.dapr.client.domain.ConversationResponse; +import reactor.core.publisher.Mono; + +import java.util.List; + +public class Conversation { + + public static void main(String[] args) { + String prompt = "Please write a witty haiku about the Dapr distributed programming framework at dapr.io"; + + try (DaprPreviewClient client = new DaprClientBuilder().buildPreviewClient()) { + System.out.println("Input: " + prompt); + + ConversationInput daprConversationInput = new ConversationInput(prompt); + + // Component name is the name provided in the metadata block of the conversation.yaml file. + Mono responseMono = client.converse(new ConversationRequest("echo", + List.of(daprConversationInput)) + .setContextId("contextId") + .setScrubPii(true).setTemperature(1.1d)); + ConversationResponse response = responseMono.block(); + System.out.printf("conversation output: %s", response.getConversationOutputs().get(0).getResult()); + } catch (Exception e) { + throw new RuntimeException(e); + } + } +} +``` + +{{% /tab %}} + + +{{% tab "Python" %}} + +```python +#dependencies +from dapr.clients import DaprClient +from dapr.clients.grpc._request import ConversationInput + +#code +with DaprClient() as d: + inputs = [ + ConversationInput(content="Please write a witty haiku about the Dapr distributed programming framework at dapr.io", role='user', scrub_pii=True), + ] + + metadata = { + 'model': 'modelname', + 'key': 'authKey', + 'cacheTTL': '10m', + } + + response = d.converse_alpha1( + name='echo', inputs=inputs, temperature=0.7, context_id='chat-123', metadata=metadata + ) + + for output in response.outputs: + print(f'conversation output: {output.result}') +``` + +{{% /tab %}} + + {{% tab "Go" %}} @@ -189,21 +260,40 @@ dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resour {{% /tab %}} - -{{% tab "Go" %}} + +{{% tab "Java" %}} ```bash -dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resources-path ./config -- go run ./main.go + +dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resources-path ./config -- mvn spring-boot:run ``` -**Expected output** +{{% /tab %}} + + +{{% tab "Python" %}} + +```bash + +dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resources-path ./config -- python3 app.py ``` - - '== APP == conversation output: Please write a witty haiku about the Dapr distributed programming framework at dapr.io' + +{{% /tab %}} + + + +{{% tab "Go" %}} + +```bash +dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resources-path ./config -- go run ./main.go ``` + {{% /tab %}} + + {{% tab "Rust" %}} @@ -211,17 +301,17 @@ dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resour dapr run --app-id=conversation --resources-path ./config --dapr-grpc-port 3500 -- cargo run --example conversation ``` +{{% /tab %}} + +{{< /tabpane >}} + + **Expected output** ``` - - 'conversation input: hello world' - - 'conversation output: hello world' + - '== APP == conversation output: Please write a witty haiku about the Dapr distributed programming framework at dapr.io' ``` -{{% /tab %}} - -{{< /tabpane >}} - ## Advanced features The conversation API supports the following features: @@ -230,9 +320,11 @@ The conversation API supports the following features: 1. **PII scrubbing:** Allows for the obfuscation of data going in and out of the LLM. +1. **Tool calling:** Allows LLMs to interact with external functions and APIs. + To learn how to enable these features, see the [conversation API reference guide]({{% ref conversation_api %}}). -## Related links +## Conversation API examples in Dapr SDK repositories Try out the conversation API using the full examples provided in the supported SDK repos. @@ -246,7 +338,23 @@ Try out the conversation API using the full examples provided in the supported S {{% /tab %}} - + + +{{% tab "Java" %}} + +[Dapr conversation example with the Java SDK](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/conversation) + +{{% /tab %}} + + + +{{% tab "Python" %}} + +[Dapr conversation example with the Python SDK](https://github.com/dapr/python-sdk/tree/main/examples/conversation) + +{{% /tab %}} + + {{% tab "Go" %}} [Dapr conversation example with the Go SDK](https://github.com/dapr/go-sdk/tree/main/examples/conversation) @@ -264,6 +372,6 @@ Try out the conversation API using the full examples provided in the supported S ## Next steps - +- [Conversation quickstart]({{% ref conversation-quickstart %}}) - [Conversation API reference guide]({{% ref conversation_api %}}) - [Available conversation components]({{% ref supported-conversation %}}) diff --git a/daprdocs/content/en/developing-applications/building-blocks/jobs/howto-schedule-and-handle-triggered-jobs.md b/daprdocs/content/en/developing-applications/building-blocks/jobs/howto-schedule-and-handle-triggered-jobs.md index 12e50dda952..2057d25ff9c 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/jobs/howto-schedule-and-handle-triggered-jobs.md +++ b/daprdocs/content/en/developing-applications/building-blocks/jobs/howto-schedule-and-handle-triggered-jobs.md @@ -56,7 +56,7 @@ internal sealed record BackupJobData([property: JsonPropertyName("task")] string internal sealed record BackupMetadata([property: JsonPropertyName("DBName")]string DatabaseName, [property: JsonPropertyName("BackupLocation")] string BackupLocation); ``` -Next, set up a handler as part of your application setup that will be called anytime a job is triggered on your +Next, set up a handler as part of your application setup that will be called any time a job is triggered on your application. It's the responsibility of this handler to identify how jobs should be processed based on the job name provided. This works by registering a handler with ASP.NET Core at `/job/`, where `` is parameterized and diff --git a/daprdocs/content/en/developing-applications/building-blocks/jobs/jobs-features-concepts.md b/daprdocs/content/en/developing-applications/building-blocks/jobs/jobs-features-concepts.md index fbeb0f50a0a..dcd49a80aae 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/jobs/jobs-features-concepts.md +++ b/daprdocs/content/en/developing-applications/building-blocks/jobs/jobs-features-concepts.md @@ -119,3 +119,60 @@ or the not-before time from which the schedule should take effect The `DueTime` and `Ttl` fields will reflect an RC3339 timestamp value reflective of the time zone provided when the job was originally scheduled. If no time zone was provided, these values indicate the time zone used by the server running Dapr. + +### Managing jobs + +While jobs are created via API calls, you can manage (list, inspect, delete, back up, and restore) jobs is by using the dapr scheduler CLI commands. + +#### List jobs + +```bash +dapr scheduler list --filter app +NAME BEGIN COUNT LAST TRIGGER +app/my-app/my-job -3.89s 1 2025-10-03T16:58:55Z +app/my-app/another-job -3.89s 1 2025-10-03T16:58:55Z +``` + +```bash +dapr scheduler list -o wide +NAMESPACE NAME BEGIN EXPIRATION SCHEDULE DUE TIME TTL REPEATS COUNT LAST TRIGGER +default app/my-app/my-job 2025-10-03T16:58:55Z @every 5s 2025-10-03T17:58:55+01:00 100 1 2025-10-03T16:58:55Z +``` + +```bash +dapr scheduler get app/my-app/my-job -o yaml +``` + +#### Delete jobs + +Delete a specific job: + +```bash +dapr scheduler delete app/my-app/my-job +``` + +Delete all jobs for an app: + +```bash +dapr scheduler delete-all app/my-app +``` + +#### Backup and restore jobs + +Export all jobs: + +```bash +dapr scheduler export -o jobs-backup.bin +``` + +Import them later: + +```bash +dapr scheduler import -f jobs-backup.bin +``` + +#### Summary + +- Use the Jobs API to create or update jobs from applications. +- Use the dapr scheduler CLI to view, inspect, back up, or delete jobs. +- Jobs are stored in the Dapr Scheduler, ensuring reliability across restarts and deployments. diff --git a/daprdocs/content/en/developing-applications/building-blocks/pubsub/howto-route-messages.md b/daprdocs/content/en/developing-applications/building-blocks/pubsub/howto-route-messages.md index 578e3081a5e..d5645ed2566 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/pubsub/howto-route-messages.md +++ b/daprdocs/content/en/developing-applications/building-blocks/pubsub/howto-route-messages.md @@ -271,7 +271,7 @@ Match deposits greater than $10,000: event.type == "deposit" && int(event.data.amount) > 10000 ``` {{% alert title="Note" color="primary" %}} -By default the numeric values ​​are written as double-precision floating-point. There are no automatic arithmetic conversions for numeric values. In this case, if `event.data.amount` is not cast as integer, the match is not performed. For more information, see the [CEL documentation](https://github.com/google/cel-spec/blob/master/doc/langdef). +By default the numeric values ​​are written as double-precision floating-point. There are no automatic arithmetic conversions for numeric values. In this case, if `event.data.amount` is not cast as integer, the match is not performed. For more information, see the [CEL documentation](https://github.com/google/cel-spec/blob/master/doc/langdef.md). {{% /alert %}} Match multiple versions of a message: diff --git a/daprdocs/content/en/developing-applications/building-blocks/pubsub/pubsub-raw.md b/daprdocs/content/en/developing-applications/building-blocks/pubsub/pubsub-raw.md index 1ac75613a6f..1cbfb72c259 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/pubsub/pubsub-raw.md +++ b/daprdocs/content/en/developing-applications/building-blocks/pubsub/pubsub-raw.md @@ -101,6 +101,36 @@ $app->run(function(\DI\FactoryInterface $factory) { {{% /tab %}} +{{% tab "Java" %}} + +```java +@RestController +@PathMapping("/publish") +public class PublishController { + + @Inject + DaprClient client; + + @PostMapping + public void sendRawMessage() { + + Map metadata = new HashMap<>(); + metatada.put("content-type", "application/json"); + metadata.put("rawPayload", "true"); + + Message message = new Message(UUID.random().toString(), "Hello from Dapr"); + + client.publishEvent( + "pubsub", // pubsub name + "messages", // topic name + message, // message data + metadata) // metadata + .block(); // wait for completion + } +} +``` +{{% /tab %}} + {{< /tabpane >}} ## Subscribing to raw messages @@ -216,6 +246,32 @@ $app->start(); ``` {{% /tab %}} +{{% tab "Java" %}} +```java +@RequestMapping("/consumer") +@RestController +public class MessageConsumerController { + + @PostMapping + @ResponseStatus(HttpStatus.OK) + @Topic(pubsubName = "pubsub", name = "messages", metadata = "{\"rawPayload\":\"true\", \"content-type\": \"application/json\"}") + public void consume(@RequestBody Message message) { + System.out.println("Message received: " + message); + } + + @PostMapping + @ResponseStatus(HttpStatus.OK) + @Topic(pubsubName = "pubsub", name = "another-topic", metadata = """ + {"rawPayload": "true", "content-type": "application/json"} + """) // Using Java 15 text block + public void consumeAnother(@RequestBody Message message) { + System.out.println("Message received: " + message); + } +} + +``` +{{% /tab %}} + {{< /tabpane >}} ## Declaratively subscribe to raw events diff --git a/daprdocs/content/en/developing-applications/building-blocks/state-management/howto-outbox.md b/daprdocs/content/en/developing-applications/building-blocks/state-management/howto-outbox.md index f99f44c3b88..01762e53c4e 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/state-management/howto-outbox.md +++ b/daprdocs/content/en/developing-applications/building-blocks/state-management/howto-outbox.md @@ -16,23 +16,72 @@ For example, you can use the outbox pattern to: With Dapr's outbox support, you can notify subscribers when an application's state is created or updated when calling Dapr's [transactions API]({{% ref "state_api.md#state-transactions" %}}). -The diagram below is an overview of how the outbox feature works: +The diagram below is an overview of how the outbox feature works at a high level: 1) Service A saves/updates state to the state store using a transaction. 2) A message is written to the broker under the same transaction. When the message is successfully delivered to the message broker, the transaction completes, ensuring the state and message are transacted together. 3) The message broker delivers the message topic to any subscribers - in this case, Service B. -Diagram showing the steps of the outbox pattern +Diagram showing the overview of outbox pattern +## How outbox works under the hood + +Dapr outbox processes requests in two flows: the user request flow and the background message flow. Together, they guarantee that state and events stay consistent. + +Diagram showing the steps of the outbox pattern + +This is the sequence of interactions: + +1. An application calls the Dapr State Management API to write state transactionally using the transactional methods. + This is the entry point where business data, such as an order or profile update, is submitted for persistence. + +2. Dapr publishes an intent message with a unique transaction ID to an internal outbox topic. + This durable record ensures the event intent exists before any database commit happens. + +3. The state and a transaction marker are written atomically in the same state store. + Both the business data and the marker are committed in the same transaction, preventing partial writes. + +4. The application receives a success response after the transaction commits. + At this point, the application can continue, knowing state is saved and the event intent is guaranteed. + +5. A background subscriber reads the intent message. + When outbox is enabled, Dapr starts consumers that process the internal outbox topic. + +6. The subscriber verifies the transaction marker in the state store. + This check confirms that the database commit was successful before publishing externally. + +7. Verified business event is published to the external pub/sub topic. + The event is sent to the configured broker (Kafka, RabbitMQ, etc.) where other services can consume it. + +8. The marker is cleaned up (deleted) from the state store. + This prevents unbounded growth in the database once the event has been successfully delivered. + +9. Message is acknowledged and removed from internal topic + If publishing or cleanup fails, Dapr retries, ensuring reliable at-least-once delivery. + ## Requirements -The outbox feature can be used with using any [transactional state store]({{% ref supported-state-stores %}}) supported by Dapr. All [pub/sub brokers]({{% ref supported-pubsub %}}) are supported with the outbox feature. +1. The outbox feature requires a [transactional state store]({{% ref supported-state-stores %}}) supported by Dapr. + [Learn more about the transactional methods you can use.]({{% ref "howto-get-save-state.md#perform-state-transactions" %}}) -[Learn more about the transactional methods you can use.]({{% ref "howto-get-save-state.md#perform-state-transactions" %}}) +2. Any [pub/sub broker]({{% ref supported-pubsub %}}) supported by Dapr can be used with the outbox feature. -{{% alert title="Note" color="primary" %}} -Message brokers that work with the competing consumer pattern (for example, [Apache Kafka]({{% ref setup-apache-kafka%}})) are encouraged to reduce the chances of duplicate events. -{{% /alert %}} + {{% alert title="Note" color="primary" %}} + Message brokers that support the competing consumer pattern (for example, [Apache Kafka]({{% ref setup-apache-kafka%}})) are recommended to reduce the chance of duplicate events. + {{% /alert %}} + +3. Internal outbox topic + When outbox is enabled, Dapr creates an internal topic using the following naming convention: `{namespace}{appID}{topic}outbox`, where: + + - `namespace`: the Dapr application namespace (if configured) + - `appID`: the Dapr application identifier + - `topic`: the value specified in the `outboxPublishTopic` metadata + + This way each outbox topic is uniquely identified per application and external topic, preventing routing conflicts in multi-tenant environments. + + {{% alert title="Note" color="primary" %}} + Ensure that the topic is created in advance, or Dapr has sufficient permissions to create the topic at startup time. + {{% /alert %}} ## Enable the outbox pattern @@ -132,28 +181,20 @@ DAPR_STORE_NAME = "statestore" async def main(): client = DaprClient() - # Define the first state operation to save the value "2" - op1 = StateItem( - key="key1", - value=b"2" - ) - - # Define the second state operation to publish the value "3" with metadata - op2 = StateItem( - key="key1", - value=b"3", - options=StateOptions( - metadata={ - "outbox.projection": "true" - } - ) + client.execute_state_transaction( + store_name=DAPR_STORE_NAME, + operations=[ + # Define the first state operation to save the value "2" + TransactionalStateOperation( + key='key1', data='2', metadata={'outbox.projection': 'false'} + ), + # Define the second state operation to publish the value "3" with metadata + TransactionalStateOperation( + key='key1', data='3', metadata={'outbox.projection': 'true'} + ), + ], ) - # Create the list of state operations - ops = [op1, op2] - - # Execute the state transaction - await client.state.transaction(DAPR_STORE_NAME, operations=ops) print("State transaction executed.") ``` @@ -281,30 +322,45 @@ public class Main { public static void main(String[] args) { try (DaprClient client = new DaprClientBuilder().build()) { // Define the first state operation to save the value "2" - StateOperation op1 = new StateOperation<>( - StateOperationType.UPSERT, + State state1 = new State<>( "key1", - "2" + "2", + null, // etag + null // concurrency and consistency options ); // Define the second state operation to publish the value "3" with metadata Map metadata = new HashMap<>(); metadata.put("outbox.projection", "true"); - StateOperation op2 = new StateOperation<>( - StateOperationType.UPSERT, + State state2 = new State<>( "key1", "3", - metadata + null, // etag + metadata, + null // concurrency and consistency options + ); + + TransactionalStateOperation op1 = new TransactionalStateOperation<>( + TransactionalStateOperation.OperationType.UPSERT, state1 + ); + + TransactionalStateOperation op2 = new TransactionalStateOperation<>( + TransactionalStateOperation.OperationType.UPSERT, state2 ); - // Create the list of state operations - List> ops = new ArrayList<>(); + // Create the list of transaction state operations + List> ops = new ArrayList<>(); ops.add(op1); ops.add(op2); + // Configure transaction request setting the state store + ExecuteStateTransactionRequest transactionRequest = new ExecuteStateTransactionRequest(DAPR_STORE_NAME); + + transactionRequest.setOperations(ops); + // Execute the state transaction - client.executeStateTransaction(DAPR_STORE_NAME, ops).block(); + client.executeStateTransaction(transactionRequest).block(); System.out.println("State transaction executed."); } catch (Exception e) { e.printStackTrace(); @@ -554,39 +610,42 @@ public class StateOperationExample { executeStateTransaction(); } - public static void executeStateTransaction() { - // Build Dapr client - try (DaprClient daprClient = new DaprClientBuilder().build()) { - - // Define the value "2" - String value = "2"; - - // Override CloudEvent metadata - Map metadata = new HashMap<>(); - metadata.put("cloudevent.id", "unique-business-process-id"); - metadata.put("cloudevent.source", "CustomersApp"); - metadata.put("cloudevent.type", "CustomerCreated"); - metadata.put("cloudevent.subject", "123"); - metadata.put("my-custom-ce-field", "abc"); - - // Define state operations - List> ops = new ArrayList<>(); - StateOperation op1 = new StateOperation<>( - StateOperationType.UPSERT, - "key1", - value, - metadata - ); - ops.add(op1); - - // Execute state transaction - String storeName = "your-state-store-name"; - daprClient.executeStateTransaction(storeName, ops).block(); - System.out.println("State transaction executed."); - } catch (Exception e) { - e.printStackTrace(); - } + public static void executeStateTransaction() { + // Build Dapr client + try (DaprClient daprClient = new DaprClientBuilder().build()) { + + // Override CloudEvent metadata + Map metadata = new HashMap<>(); + metadata.put("cloudevent.id", "unique-business-process-id"); + metadata.put("cloudevent.source", "CustomersApp"); + metadata.put("cloudevent.type", "CustomerCreated"); + metadata.put("cloudevent.subject", "123"); + metadata.put("my-custom-ce-field", "abc"); + + State state = new State<>( + "key1", // Define the key "key1" + "value1", // Define the value "value1" + null, // etag + metadata, + null // concurrency and consistency options + ); + + // Define state operations + List> ops = new ArrayList<>(); + TransactionalStateOperation op1 = new TransactionalStateOperation<>( + TransactionalStateOperation.OperationType.UPSERT, + state + ); + ops.add(op1); + + // Execute state transaction + String storeName = "your-state-store-name"; + daprClient.executeStateTransaction(storeName, ops).block(); + System.out.println("State transaction executed."); + } catch (Exception e) { + e.printStackTrace(); } + } } ``` {{% /tab %}} @@ -682,3 +741,7 @@ The `data` CloudEvent field is reserved for Dapr's use only, and is non-customiz Watch [this video for an overview of the outbox pattern](https://youtu.be/rTovKpG0rhY?t=1338): {{< youtube id=rTovKpG0rhY start=1338 >}} + +## Next Steps + +[How Dapr Outbox Eliminates Dual Writes in Distributed Applications](https://www.diagrid.io/blog/how-dapr-outbox-eliminates-dual-writes-in-distributed-applications) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md index ddb92b63ac3..27da1a22be0 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md @@ -197,10 +197,12 @@ public class DemoWorkflowActivity implements WorkflowActivity { +### Define workflow activities + Define each workflow activity you'd like your workflow to perform. The Activity input can be unmarshalled from the context with `ctx.GetInput`. Activities should be defined as taking a `ctx workflow.ActivityContext` parameter and returning an interface and error. ```go -func TestActivity(ctx workflow.ActivityContext) (any, error) { +func BusinessActivity(ctx workflow.ActivityContext) (any, error) { var input int if err := ctx.GetInput(&input); err != nil { return "", err @@ -211,6 +213,87 @@ func TestActivity(ctx workflow.ActivityContext) (any, error) { } ``` +### Define the workflow + +Define your workflow function with the parameter `ctx *workflow.WorkflowContext` and return any and error. Invoke your defined activities from within your workflow. + +```go +func BusinessWorkflow(ctx *workflow.WorkflowContext) (any, error) { + var input int + if err := ctx.GetInput(&input); err != nil { + return nil, err + } + var output string + if err := ctx.CallActivity(BusinessActivity, workflow.ActivityInput(input)).Await(&output); err != nil { + return nil, err + } + if err := ctx.WaitForExternalEvent("businessEvent", time.Minute*60).Await(&output); err != nil { + return nil, err + } + + if err := ctx.CreateTimer(time.Second).Await(nil); err != nil { + return nil, nil + } + return output, nil +} +``` + +### Register workflows and activities + +Before your application can execute workflows, you must register both the workflow orchestrator and its activities with a workflow registry. This ensures Dapr knows which functions to call when executing your workflow. + +```go +func main() { + // Create a workflow registry + r := workflow.NewRegistry() + + // Register the workflow orchestrator + if err := r.AddWorkflow(BusinessWorkflow); err != nil { + log.Fatal(err) + } + fmt.Println("BusinessWorkflow registered") + + // Register the workflow activities + if err := r.AddActivity(BusinessActivity); err != nil { + log.Fatal(err) + } + fmt.Println("BusinessActivity registered") + + // Create workflow client and start worker + wclient, err := client.NewWorkflowClient() + if err != nil { + log.Fatal(err) + } + fmt.Println("Worker initialized") + + ctx, cancel := context.WithCancel(context.Background()) + if err = wclient.StartWorker(ctx, r); err != nil { + log.Fatal(err) + } + fmt.Println("runner started") + + // Your application logic continues here... + // Example: Start a workflow + instanceID, err := wclient.ScheduleWorkflow(ctx, "BusinessWorkflow", workflow.WithInput(1)) + if err != nil { + log.Fatalf("failed to start workflow: %v", err) + } + fmt.Printf("workflow started with id: %v\n", instanceID) + + // Stop workflow worker when done + cancel() + fmt.Println("workflow worker successfully shutdown") +} +``` + +**Key points about registration:** +- Use `workflow.NewRegistry()` to create a workflow registry +- Use `r.AddWorkflow()` to register workflow functions +- Use `r.AddActivity()` to register activity functions +- Use `client.NewWorkflowClient()` to create a workflow client +- Call `wclient.StartWorker()` to begin processing workflows +- Use `wclient.ScheduleWorkflow` to schedule a named instance of a workflow + [See the Go SDK workflow activity example in context.](https://github.com/dapr/go-sdk/tree/main/examples/workflow/README.md) {{% /tab %}} @@ -281,9 +364,9 @@ export default class WorkflowRuntime { // Register workflow activities public registerActivity(fn: TWorkflowActivity): WorkflowRuntime { const name = getFunctionName(fn); - const activityWrapper = (ctx: ActivityContext, intput: TInput): TOutput => { + const activityWrapper = (ctx: ActivityContext, input: TInput): TOutput => { const wfActivityContext = new WorkflowActivityContext(ctx); - return fn(wfActivityContext, intput); + return fn(wfActivityContext, input); }; this.worker.addNamedActivity(name, activityWrapper); return this; @@ -383,16 +466,16 @@ public class DemoWorkflowWorker { Define your workflow function with the parameter `ctx *workflow.WorkflowContext` and return any and error. Invoke your defined activities from within your workflow. ```go -func TestWorkflow(ctx *workflow.WorkflowContext) (any, error) { +func BusinessWorkflow(ctx *workflow.WorkflowContext) (any, error) { var input int if err := ctx.GetInput(&input); err != nil { return nil, err } var output string - if err := ctx.CallActivity(TestActivity, workflow.ActivityInput(input)).Await(&output); err != nil { + if err := ctx.CallActivity(BusinessActivity, workflow.ActivityInput(input)).Await(&output); err != nil { return nil, err } - if err := ctx.WaitForExternalEvent("testEvent", time.Second*60).Await(&output); err != nil { + if err := ctx.WaitForExternalEvent("businessEvent", time.Minute*60).Await(&output); err != nil { return nil, err } @@ -583,6 +666,7 @@ def main(): if non_existent_id_error in err._message: print('Instance Successfully Purged') + sleep(10000) wfr.shutdown() @@ -602,139 +686,79 @@ if __name__ == '__main__': - `WorkflowRuntime`: Allows you to register workflows and workflow activities - `DaprWorkflowContext`: Allows you to [create workflows]({{% ref "#write-the-workflow" %}}) - `WorkflowActivityContext`: Allows you to [create workflow activities]({{% ref "#write-the-workflow-activities" %}}) -- API calls. In the example below, these calls start, terminate, get status, pause, resume, raise event, and purge the workflow. - -```javascript -import { TaskHubGrpcClient } from "@microsoft/durabletask-js"; -import { WorkflowState } from "./WorkflowState"; -import { generateApiTokenClientInterceptors, generateEndpoint, getDaprApiToken } from "../internal/index"; -import { TWorkflow } from "../../types/workflow/Workflow.type"; -import { getFunctionName } from "../internal"; -import { WorkflowClientOptions } from "../../types/workflow/WorkflowClientOption"; - -/** DaprWorkflowClient class defines client operations for managing workflow instances. */ - -export default class DaprWorkflowClient { - private readonly _innerClient: TaskHubGrpcClient; - - /** Initialize a new instance of the DaprWorkflowClient. - */ - constructor(options: Partial = {}) { - const grpcEndpoint = generateEndpoint(options); - options.daprApiToken = getDaprApiToken(options); - this._innerClient = this.buildInnerClient(grpcEndpoint.endpoint, options); - } +- API calls. The following example is a simple project consuming the workflow APIs: - private buildInnerClient(hostAddress: string, options: Partial): TaskHubGrpcClient { - let innerOptions = options?.grpcOptions; - if (options.daprApiToken !== undefined && options.daprApiToken !== "") { - innerOptions = { - ...innerOptions, - interceptors: [generateApiTokenClientInterceptors(options), ...(innerOptions?.interceptors ?? [])], - }; - } - return new TaskHubGrpcClient(hostAddress, innerOptions); - } +```bash +mkdir my-wf && cd my-wf +npm init -y +npm i @dapr/dapr @microsoft/durabletask-js +npm i -D typescript ts-node @types/node +``` - /** - * Schedule a new workflow using the DurableTask client. - */ - public async scheduleNewWorkflow( - workflow: TWorkflow | string, - input?: any, - instanceId?: string, - startAt?: Date, - ): Promise { - if (typeof workflow === "string") { - return await this._innerClient.scheduleNewOrchestration(workflow, input, instanceId, startAt); - } - return await this._innerClient.scheduleNewOrchestration(getFunctionName(workflow), input, instanceId, startAt); - } +Create the following `tsconfig.json` file: - /** - * Terminate the workflow associated with the provided instance id. - * - * @param {string} workflowInstanceId - Workflow instance id to terminate. - * @param {any} output - The optional output to set for the terminated workflow instance. - */ - public async terminateWorkflow(workflowInstanceId: string, output: any) { - await this._innerClient.terminateOrchestration(workflowInstanceId, output); - } +```json +{ + "compilerOptions": { + "target": "ES2020", + "module": "CommonJS", + "moduleResolution": "Node", + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "outDir": "dist" + }, + "include": ["src"] +} +``` - /** - * Fetch workflow instance metadata from the configured durable store. - */ - public async getWorkflowState( - workflowInstanceId: string, - getInputsAndOutputs: boolean, - ): Promise { - const state = await this._innerClient.getOrchestrationState(workflowInstanceId, getInputsAndOutputs); - if (state !== undefined) { - return new WorkflowState(state); - } - } +Create the following `src/app.ts` file: - /** - * Waits for a workflow to start running - */ - public async waitForWorkflowStart( - workflowInstanceId: string, - fetchPayloads = true, - timeoutInSeconds = 60, - ): Promise { - const state = await this._innerClient.waitForOrchestrationStart( - workflowInstanceId, - fetchPayloads, - timeoutInSeconds, - ); - if (state !== undefined) { - return new WorkflowState(state); - } - } +```typescript +import { + WorkflowRuntime, + WorkflowActivityContext, + WorkflowContext, + DaprWorkflowClient, + TWorkflow +} from "@dapr/dapr"; - /** - * Waits for a workflow to complete running - */ - public async waitForWorkflowCompletion( - workflowInstanceId: string, - fetchPayloads = true, - timeoutInSeconds = 60, - ): Promise { - const state = await this._innerClient.waitForOrchestrationCompletion( - workflowInstanceId, - fetchPayloads, - timeoutInSeconds, - ); - if (state != undefined) { - return new WorkflowState(state); - } - } +const workflowClient = new DaprWorkflowClient(); +const workflowRuntime = new WorkflowRuntime(); - /** - * Sends an event notification message to an awaiting workflow instance - */ - public async raiseEvent(workflowInstanceId: string, eventName: string, eventPayload?: any) { - this._innerClient.raiseOrchestrationEvent(workflowInstanceId, eventName, eventPayload); - } +// simple activity +const hello = async (_: WorkflowActivityContext, name: string) => `Hello ${name}!`; - /** - * Purges the workflow instance state from the workflow state store. - */ - public async purgeWorkflow(workflowInstanceId: string): Promise { - const purgeResult = await this._innerClient.purgeOrchestration(workflowInstanceId); - if (purgeResult !== undefined) { - return purgeResult.deletedInstanceCount > 0; - } - return false; - } +// simple workflow: call the activity 3 times +const sequence: TWorkflow = async function* (ctx: WorkflowContext): any { + const out: string[] = []; + out.push(yield ctx.callActivity(hello, "Tokyo")); + out.push(yield ctx.callActivity(hello, "Seattle")); + out.push(yield ctx.callActivity(hello, "London")); + out.push(yield ctx.waitForExternalEvent("continue")); + return out; +}; + +async function main() { + workflowRuntime.registerWorkflow(sequence).registerActivity(hello); + await workflowRuntime.start(); + + const id = await workflowClient.scheduleNewWorkflow(sequence); + console.log("Scheduled:", id); + + workflowClient.raiseEvent(id, "continue", "Go go go!"); + + const state = await workflowClient.waitForWorkflowCompletion(id, undefined, 30); + console.log("Done:", state?.runtimeStatus, "output:", state?.serializedOutput); + + await new Promise(f => setTimeout(f, 100000)); + + await workflowRuntime.stop(); + await workflowClient.stop(); - /** - * Closes the inner DurableTask client and shutdown the GRPC channel. - */ - public async stop() { - await this._innerClient.stop(); - } } + +main().catch((e) => { console.error(e); }); ``` {{% /tab %}} @@ -864,7 +888,7 @@ public class DemoWorkflow extends Workflow { [As in the following example](https://github.com/dapr/go-sdk/tree/main/examples/workflow/README.md), a hello-world application using the Go SDK and Dapr Workflow would include: - A Go package called `client` to receive the Go SDK client capabilities. -- The `TestWorkflow` method +- The `BusinessWorkflow` method - Creating the workflow with input and output. - API calls. In the example below, these calls start and call the workflow activities. @@ -874,86 +898,98 @@ package main import ( "context" + "errors" "fmt" "log" + "strconv" "time" - "github.com/dapr/durabletask-go/api" - "github.com/dapr/durabletask-go/backend" - "github.com/dapr/durabletask-go/client" - "github.com/dapr/durabletask-go/task" - dapr "github.com/dapr/go-sdk/client" + "github.com/dapr/durabletask-go/workflow" + "github.com/dapr/go-sdk/client" ) var stage = 0 - -const ( - workflowComponent = "dapr" -) +var failActivityTries = 0 func main() { - registry := task.NewTaskRegistry() + r := workflow.NewRegistry() - if err := registry.AddOrchestrator(TestWorkflow); err != nil { + if err := r.AddWorkflow(BusinessWorkflow); err != nil { log.Fatal(err) } - fmt.Println("TestWorkflow registered") + fmt.Println("BusinessWorkflow registered") - if err := registry.AddActivity(TestActivity); err != nil { + if err := r.AddActivity(BusinessActivity); err != nil { log.Fatal(err) } - fmt.Println("TestActivity registered") + fmt.Println("BusinessActivity registered") - daprClient, err := dapr.NewClient() - if err != nil { - log.Fatalf("failed to create Dapr client: %v", err) + if err := r.AddActivity(FailActivity); err != nil { + log.Fatal(err) } + fmt.Println("FailActivity registered") - client := client.NewTaskHubGrpcClient(daprClient.GrpcClientConn(), backend.DefaultLogger()) - if err := client.StartWorkItemListener(context.TODO(), registry); err != nil { - log.Fatalf("failed to start work item listener: %v", err) + wclient, err := client.NewWorkflowClient() + if err != nil { + log.Fatal(err) } + fmt.Println("Worker initialized") + ctx, cancel := context.WithCancel(context.Background()) + if err = wclient.StartWorker(ctx, r); err != nil { + log.Fatal(err) + } fmt.Println("runner started") - ctx := context.Background() - // Start workflow test - id, err := client.ScheduleNewOrchestration(ctx, "TestWorkflow", api.WithInput(1)) + // Set the start time to the current time to not wait for the workflow to + // "start". This is useful for increasing the throughput of creating + // workflows. + // workflow.WithStartTime(time.Now()) + instanceID, err := wclient.ScheduleWorkflow(ctx, "BusinessWorkflow", workflow.WithInstanceID("a7a4168d-3a1c-41da-8a4f-e7f6d9c718d9"), workflow.WithInput("1")) if err != nil { log.Fatalf("failed to start workflow: %v", err) } - fmt.Printf("workflow started with id: %v\n", id) + fmt.Printf("workflow started with id: %v\n", instanceID) // Pause workflow test - err = client.PurgeOrchestrationState(ctx, id) + err = wclient.SuspendWorkflow(ctx, instanceID, "") if err != nil { log.Fatalf("failed to pause workflow: %v", err) } - respGet, err := client.FetchOrchestrationMetadata(ctx, id) + respFetch, err := wclient.FetchWorkflowMetadata(ctx, instanceID, workflow.WithFetchPayloads(true)) if err != nil { - log.Fatalf("failed to get workflow: %v", err) + log.Fatalf("failed to fetch workflow: %v", err) + } + + if respFetch.RuntimeStatus != workflow.StatusSuspended { + log.Fatalf("workflow not paused: %s: %v", respFetch.RuntimeStatus, respFetch) } - fmt.Printf("workflow paused: %s\n", respGet.RuntimeStatus) + + fmt.Printf("workflow paused\n") // Resume workflow test - err = client.ResumeOrchestration(ctx, id, "") + err = wclient.ResumeWorkflow(ctx, instanceID, "") if err != nil { log.Fatalf("failed to resume workflow: %v", err) } - fmt.Printf("workflow running: %s\n", respGet.RuntimeStatus) - respGet, err = client.FetchOrchestrationMetadata(ctx, id) + respFetch, err = wclient.FetchWorkflowMetadata(ctx, instanceID, workflow.WithFetchPayloads(true)) if err != nil { log.Fatalf("failed to get workflow: %v", err) } - fmt.Printf("workflow resumed: %s\n", respGet.RuntimeStatus) + + if respFetch.RuntimeStatus != workflow.StatusRunning { + log.Fatalf("workflow not running") + } + + fmt.Println("workflow resumed") fmt.Printf("stage: %d\n", stage) - // Raise Event Test - err = client.RaiseEvent(ctx, id, "testEvent", api.WithEventPayload("testData")) + // Raise Event + err = wclient.RaiseEvent(ctx, instanceID, "businessEvent", workflow.WithEventPayload("testData")) if err != nil { fmt.Printf("failed to raise event: %v", err) } @@ -964,53 +1000,117 @@ func main() { fmt.Printf("stage: %d\n", stage) - respGet, err = client.FetchOrchestrationMetadata(ctx, id) + _, err = wclient.WaitForWorkflowCompletion(ctx, instanceID) + if err != nil { + log.Fatalf("failed to wait for workflow: %v", err) + } + + fmt.Printf("fail activity executions: %d\n", failActivityTries) + + respFetch, err = wclient.FetchWorkflowMetadata(ctx, instanceID, workflow.WithFetchPayloads(true)) if err != nil { log.Fatalf("failed to get workflow: %v", err) } - fmt.Printf("workflow status: %v\n", respGet.RuntimeStatus) + fmt.Printf("workflow status: %v\n", respFetch.String()) // Purge workflow test - err = client.PurgeOrchestrationState(ctx, id) + err = wclient.PurgeWorkflowState(ctx, instanceID) + if err != nil { + log.Fatalf("failed to purge workflow: %v", err) + } + + respFetch, err = wclient.FetchWorkflowMetadata(ctx, instanceID, workflow.WithFetchPayloads(true)) + if err == nil || respFetch != nil { + log.Fatalf("failed to purge workflow: %v", err) + } + + fmt.Println("workflow purged") + + fmt.Printf("stage: %d\n", stage) + + // Terminate workflow test + id, err := wclient.ScheduleWorkflow(ctx, "BusinessWorkflow", workflow.WithInstanceID("a7a4168d-3a1c-41da-8a4f-e7f6d9c718d9"), workflow.WithInput("1")) + if err != nil { + log.Fatalf("failed to start workflow: %v", err) + } + fmt.Printf("workflow started with id: %v\n", instanceID) + + metadata, err := wclient.WaitForWorkflowStart(ctx, id) + if err != nil { + log.Fatalf("failed to get workflow: %v", err) + } + fmt.Printf("workflow status: %s\n", metadata.String()) + + err = wclient.TerminateWorkflow(ctx, id) + if err != nil { + log.Fatalf("failed to terminate workflow: %v", err) + } + fmt.Println("workflow terminated") + + err = wclient.PurgeWorkflowState(ctx, id) if err != nil { log.Fatalf("failed to purge workflow: %v", err) } fmt.Println("workflow purged") + + <-ctx.Done() + cancel() + + fmt.Println("workflow worker successfully shutdown") } -func TestWorkflow(ctx *task.OrchestrationContext) (any, error) { - var input int +func BusinessWorkflow(ctx *workflow.WorkflowContext) (any, error) { + var input string if err := ctx.GetInput(&input); err != nil { return nil, err } var output string - if err := ctx.CallActivity(TestActivity, task.WithActivityInput(input)).Await(&output); err != nil { + if err := ctx.CallActivity(BusinessActivity, workflow.WithActivityInput(input)).Await(&output); err != nil { return nil, err } - err := ctx.WaitForSingleEvent("testEvent", time.Second*60).Await(&output) + err := ctx.WaitForExternalEvent("businessEvent", time.Minute*60).Await(&output) if err != nil { return nil, err } - if err := ctx.CallActivity(TestActivity, task.WithActivityInput(input)).Await(&output); err != nil { + if err := ctx.CallActivity(BusinessActivity, workflow.WithActivityInput(input)).Await(&output); err != nil { return nil, err } + if err := ctx.CallActivity(FailActivity, workflow.WithActivityRetryPolicy(&workflow.RetryPolicy{ + MaxAttempts: 3, + InitialRetryInterval: 100 * time.Millisecond, + BackoffCoefficient: 2, + MaxRetryInterval: 1 * time.Second, + })).Await(nil); err == nil { + return nil, fmt.Errorf("unexpected no error executing fail activity") + } + return output, nil } -func TestActivity(ctx task.ActivityContext) (any, error) { - var input int +func BusinessActivity(ctx workflow.ActivityContext) (any, error) { + var input string if err := ctx.GetInput(&input); err != nil { return "", err } - stage += input + iinput, err := strconv.Atoi(input) + if err != nil { + return "", err + } + + stage += iinput return fmt.Sprintf("Stage: %d", stage), nil } + +func FailActivity(ctx workflow.ActivityContext) (any, error) { + failActivityTries += 1 + return nil, errors.New("dummy activity error") +} ``` [See the full Go SDK workflow example in context.](https://github.com/dapr/go-sdk/tree/main/examples/workflow/README.md) @@ -1025,6 +1125,272 @@ Because of how replay-based workflows execute, you'll write logic that does thin {{% /alert %}} +## Run the workflow & inspect the workflow execution with the Diagrid Dashboard + +Start the workflow application via your IDE or the Dapr CLI ([Dapr multi-app run]({{% ref multi-app-overview.md %}}) if you want to start multiple applications, or regular [Dapr run command](#testing-the-workflow-via-the-dapr-cli) for one application, and schedule a new workflow instance. + +Use the local [Diagrid Dashboard](https://diagrid.ws/diagrid-dashboard-docs) to visualize and inspect your workflow state, and drill down to see detailed workflow execution history. The dashboard runs as a container and is connected to the state store that is used by Dapr workflows (by default a local Redis instance). + +Diagrid Dashboard showing local workflow executions
+ +Start the Diagrid Dashboard container using Docker: + +```bash +docker run -p 8080:8080 ghcr.io/diagridio/diagrid-dashboard:latest +``` + +{{% alert title="Note" color="primary" %}} +If you're using another state store than the default Redis instance, you need to provide some additional arguments to run the container, see the [Diagrid Dashboard reference docs](https://diagrid.ws/diagrid-dashboard-docs). +{{% /alert %}} + + +Open the dashboard in a browser at [http://localhost:8080](http://localhost:8080). + + +## Testing the workflow via the Dapr CLI + +After authoring the workflow, you can test it using the Dapr CLI: + +{{< tabpane text=true >}} + +{{% tab "Python" %}} + +#### Run the workflow application + +```bash +dapr run --app-id workflow-app python3 app.py +``` +Make sure the application is running: + +```bash +dapr list +``` + +#### Run the workflow +```bash +dapr workflow run hello_world_wf --app-id workflow-app --input 'hello world' --instance-id test-run +``` + +#### Check the workflow status +```bash +dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 -o wide +``` + +#### Check completed workflows +```bash +dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 --filter-status COMPLETED -o wide +``` + +#### View workflow history +```bash +dapr workflow history --app-id workflow-app test-run +``` + +{{% /tab %}} + +{{% tab "Javascript" %}} + +#### Run the workflow application + +```bash +dapr run --app-id workflow-app npx ts-node src/app.ts +``` +Make sure the application is running: + +```bash +dapr list +``` + +#### Run the workflow +```bash +dapr workflow run sequence --app-id workflow-app --input 'hello world' --instance-id test-run +``` + +#### Check the workflow status +```bash +dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 -o wide +``` + +#### Raise the waiting external event +```bash +dapr workflow raise-event --app-id workflow-app test-run/businessEvent +``` + +#### Check completed workflows +```bash +dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 --filter-status COMPLETED -o wide +``` + +#### View workflow history +```bash +dapr workflow history --app-id workflow-app test-run +``` + +{{% /tab %}} + +{{% tab ".NET" %}} + +#### Run the workflow application + +```bash +dapr run --app-id workflow-app dotnet run +``` +Make sure the application is running: + +```bash +dapr list +``` + +#### Run the workflow +```bash +dapr workflow run OrderProcessingWorkflow --app-id workflow-app --instance-id test-run --input '{"name": "Paperclips", "totalCost": 99.95}' +``` + +#### Check the workflow status +```bash +dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 -o wide +``` + +#### Raise the waiting external event +```bash +dapr workflow raise-event --app-id workflow-app test-run/incoming-purchase-order --input '{"name": "Paperclips", "totalCost": 99.95}' +``` + +#### Check completed workflows +```bash +dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 --filter-status COMPLETED -o wide +``` + +#### View workflow history +```bash +dapr workflow history --app-id workflow-app test-run +``` + +{{% /tab %}} + +{{% tab "Java" %}} + +#### Run the workflow application + +```bash +dapr run --app-id workflow-app -- java -jar target/WorkflowService-0.0.1-SNAPSHOT.jar +``` + +Make sure the application is running: + +```bash +dapr list +``` + +#### Run the workflow +```bash +dapr workflow run DemoWorkflow --app-id workflow-app --instance-id test-run --input "input data" +``` + +#### Check the workflow status +```bash +dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 -o wide +``` + +#### Raise the waiting external event +```bash +dapr workflow raise-event --app-id workflow-app test-run/TestEvent --input 'TestEventPayload' +dapr workflow raise-event --app-id workflow-app test-run/event1 --input 'TestEvent 1 Payload' +dapr workflow raise-event --app-id workflow-app test-run/event2 --input 'TestEvent 2 Payload' +dapr workflow raise-event --app-id workflow-app test-run/event3 --input 'TestEvent 3 Payload' +``` + +#### Check completed workflows +```bash +dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 --filter-status COMPLETED -o wide +``` + +#### View workflow history +```bash +dapr workflow history --app-id workflow-app test-run +``` + +{{% /tab %}} + +{{% tab "Go" %}} + +#### Run the workflow application +```bash +dapr run --app-id workflow-app go run main.go +``` + +Make sure the application is running: + +```bash +dapr list +``` + +#### Run the workflow +```bash +dapr workflow run BusinessWorkflow --app-id workflow-app --input '1' --instance-id test-run +``` + +#### Check the workflow status +```bash +dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 -o wide +``` + +#### Raise the waiting external event +```bash +dapr workflow raise-event --app-id workflow-app test-run/businessEvent +``` + +#### Check completed workflows +```bash +dapr workflow list --app-id workflow-app --connection-string=redis://127.0.0.1:6379 --filter-status COMPLETED -o wide +``` + +#### View workflow history +```bash +dapr workflow history test-run --app-id workflow-app +``` + +{{% /tab %}} + +{{< /tabpane >}} + + +### Monitor Workflow Execution + +```bash +dapr workflow list --app-id workflow-app --filter-status RUNNING -o wide +``` + +```bash +dapr workflow list --app-id workflow-app --filter-status FAILED -o wide +``` + +```bash +dapr workflow list --app-id workflow-app --filter-status COMPLETED -o wide +``` + +### Test External Events + +```bash +# Raise an event your workflow is waiting for +dapr workflow raise-event /ApprovalReceived \ + --app-id workflow-app \ + --input '{"approved": true, "approver": "manager@company.com"}' +``` + +### Debug Failed Workflows + +```bash +# List failed workflows +dapr workflow list --app-id workflow-app --filter-status FAILED --output wide + +# Get detailed history of a failed workflow +dapr workflow history --app-id workflow-app --output json + +# Re-run the workflow after fixing issues +dapr workflow rerun --app-id workflow-app --input '' +``` + ## Next steps Now that you've authored a workflow, learn how to manage it. diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md index de4f6b23249..db41b48205a 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-manage-workflow.md @@ -6,10 +6,361 @@ weight: 6000 description: Manage and run workflows --- -Now that you've [authored the workflow and its activities in your application]({{% ref howto-author-workflow.md %}}), you can start, terminate, and get information about the workflow using HTTP API calls. For more information, read the [workflow API reference]({{% ref workflow_api.md %}}). +Now that you've [authored the workflow and its activities in your application]({{% ref howto-author-workflow.md %}}), you can start, terminate, and get information about the workflow using the CLI or API calls. For more information, read the [workflow API reference]({{% ref workflow_api.md %}}). {{< tabpane text=true >}} + +{{% tab "CLI" %}} +Workflow reminders are stored in the Scheduler and can be managed using the dapr scheduler CLI. + +#### List workflow reminders + +```bash +dapr scheduler list --filter workflow +NAME BEGIN COUNT LAST TRIGGER +workflow/my-app/instance1/timer-0-ABC123 +50.0h 0 +workflow/my-app/instance2/timer-0-XYZ789 +50.0h 0 +``` + +Get reminder details + +```bash +dapr scheduler get workflow/my-app/instance1/timer-0-ABC123 -o yaml +``` + +#### Delete workflow reminders + +Delete a single reminder: + +```bash +dapr scheduler delete workflow/my-app/instance1/timer-0-ABC123 +``` + +Delete all reminders for a given workflow app" + +```bash +dapr scheduler delete-all workflow/my-app +``` + +Delete all reminders for a specific workflow instance: + +```bash +dapr scheduler delete-all workflow/my-app/instance1 +``` + +#### Backup and restore reminders + +Export all reminders: + +```bash +dapr scheduler export -o workflow-reminders-backup.bin +``` + +Restore from a backup file: + +```bash +dapr scheduler import -f workflow-reminders-backup.bin +``` + +#### Summary + +- Workflow reminders are persisted in the Dapr Scheduler. +- Create workflow reminders via the Workflow API. +- Manage reminders (list, get, delete, backup/restore) with the dapr scheduler CLI. + +## Managing Workflows with the Dapr CLI + +The Dapr CLI provides commands for managing workflow instances in both self-hosted and Kubernetes environments. + +### Prerequisites + +- Dapr CLI version 1.16.2 or later +- A running Dapr application that has registered a workflow +- For database operations: network access to your actor state store + +### Basic Workflow Operations + +#### Start a Workflow + +```bash +# Using the `orderprocessing` application, start a new workflow instance with input data +dapr workflow run OrderProcessingWorkflow \ + --app-id orderprocessing \ + --input '{"orderId": "12345", "amount": 100.50}' + +# Start with a new workflow with a specific instance ID +dapr workflow run OrderProcessingWorkflow \ + --app-id orderprocessing \ + --instance-id order-12345 \ + --input '{"orderId": "12345"}' + +# Schedule a new workflow to start at 10:00:00 AM on December 25, 2024, Coordinated Universal Time (UTC). +dapr workflow run OrderProcessingWorkflow \ + --app-id orderprocessing \ + --start-time "2024-12-25T10:00:00Z" +``` + +#### List Workflow Instances + +```bash +# List all workflows for an app +dapr workflow list --app-id orderprocessing + +# Filter by status +dapr workflow list --app-id orderprocessing --filter-status RUNNING + +# Filter by workflow name +dapr workflow list --app-id orderprocessing --filter-name OrderProcessingWorkflow + +# Filter by age (workflows started in last 24 hours) +dapr workflow list --app-id orderprocessing --filter-max-age 24h + +# Get detailed output +dapr workflow list --app-id orderprocessing --output wide +``` + +#### View Workflow History + +```bash +# Get execution history +dapr workflow history order-12345 --app-id orderprocessing + +# Get history in JSON format +dapr workflow history order-12345 --app-id orderprocessing --output json +``` + +#### Control Workflow Execution + +```bash +# Suspend a running workflow +dapr workflow suspend order-12345 \ + --app-id orderprocessing \ + --reason "Waiting for manual approval" + +# Resume a suspended workflow +dapr workflow resume order-12345 \ + --app-id orderprocessing \ + --reason "Approved by manager" + +# Terminate a workflow +dapr workflow terminate order-12345 \ + --app-id orderprocessing \ + --output '{"reason": "Cancelled by customer"}' +``` + +#### Raise External Events + +```bash +# Raise an event for a waiting workflow +dapr workflow raise-event order-12345/PaymentReceived \ + --app-id orderprocessing \ + --input '{"paymentId": "pay-67890", "amount": 100.50}' +``` + +#### Re-run Workflows + +```bash +# Re-run from the beginning +dapr workflow rerun order-12345 --app-id orderprocessing + +# Re-run from a specific event +dapr workflow rerun order-12345 \ + --app-id orderprocessing \ + --event-id 5 + +# Re-run with a new instance ID +dapr workflow rerun order-12345 \ + --app-id orderprocessing \ + --new-instance-id order-12345-retry +``` + +#### Purge Completed Workflows + +Note that purging a workflow from the CLI will also delete all associated Scheduler reminders. + +{{% alert title="Important" color="warning" %}} +It is required that a workflow client is running in the application to perform purge operations. +The workflow client connection is required in order to preserve the workflow state machine integrity and prevent corruption. +Errors like the following suggest that the workflow client is not running: +``` +failed to purge orchestration state: rpc error: code = FailedPrecondition desc = failed to purge orchestration state: failed to lookup actor: api error: code = FailedPrecondition desc = did not find address for actor +``` +{{% /alert %}} + +```bash +# Purge a specific instance +dapr workflow purge order-12345 --app-id orderprocessing + +# Purge all completed workflows older than 30 days +dapr workflow purge --app-id orderprocessing --all-older-than 720h + +# Purge all terminal workflows (use with caution!) +dapr workflow purge --app-id orderprocessing --all +``` + +### Kubernetes Operations + +All commands support the `-k` flag for Kubernetes deployments: + +```bash +# List workflows in Kubernetes +dapr workflow list \ + --kubernetes \ + --namespace production \ + --app-id orderprocessing + +# Suspend a workflow in Kubernetes +dapr workflow suspend order-12345 \ + --kubernetes \ + --namespace production \ + --app-id orderprocessing \ + --reason "Maintenance window" +``` + +### Advanced: Direct Database Access + +For advanced operations like listing and purging workflows, you can connect directly to the actor state store database. This is useful for: + +- Querying workflows across multiple app instances +- Bulk operations on workflow metadata +- Custom filtering beyond what the API provides + +#### Self-Hosted Mode + +In self-hosted mode, the CLI can automatically discover your state store configuration: + +```bash +# The CLI reads your component configuration automatically +dapr workflow list --app-id orderprocessing --connection-string=redis://127.0.0.1:6379 +``` + +To override with a specific connection string: + +```bash +# PostgreSQL +dapr workflow list \ + --app-id orderprocessing \ + --connection-string "host=localhost user=dapr password=dapr dbname=dapr port=5432 sslmode=disable" \ + --table-name actor-store + +# MySQL +dapr workflow list \ + --app-id orderprocessing \ + --connection-string "dapr:dapr@tcp(localhost:3306)/dapr?parseTime=true" \ + --table-name actor-store + +# SQL Server +dapr workflow list \ + --app-id orderprocessing \ + --connection-string "sqlserver://dapr:Pass@word1@localhost:1433?database=dapr" \ + --table-name abc + +# Redis +dapr workflow list \ + --app-id orderprocessing \ + --connection-string=redis://user:mypassword@127.0.0.1:6379 \ +``` + +#### Kubernetes Mode with Port Forwarding + +In Kubernetes, you need to establish connectivity to your database: + +**Step 1: Port forward to your database service** + +```bash +# PostgreSQL +kubectl port-forward service/postgres 5432:5432 -n production + +# MySQL +kubectl port-forward service/mysql 3306:3306 -n production + +# SQL Server +kubectl port-forward service/mssql 1433:1433 -n production + +# Redis +kubectl port-forward service/redis 6379:6379 -n production +``` + +**Step 2: Use the CLI with the connection string** + +```bash +# PostgreSQL example +dapr workflow list \ + --kubernetes \ + --namespace production \ + --app-id orderprocessing \ + --connection-string "host=localhost user=dapr password=dapr dbname=dapr port=5432 sslmode=disable" \ + --table-name workflows + +# Purge old workflows +dapr workflow purge \ + --kubernetes \ + --namespace production \ + --app-id orderprocessing \ + --connection-string "host=localhost user=dapr password=dapr dbname=dapr port=5432 sslmode=disable" \ + --table-name workflows \ + --all-older-than 2160h # 90 days +``` + +**Step 3: Stop port forwarding when done** + +```bash +# Press Ctrl+C to stop the port forward +``` + +#### Connection String Formats by Database + +**PostgreSQL / CockroachDB** +``` +host=localhost user=dapr password=dapr dbname=dapr port=5432 sslmode=disable connect_timeout=10 +``` + +**MySQL** +``` +username:password@tcp(host:port)/database?parseTime=true&loc=UTC +``` + +**SQL Server** +``` +sqlserver://username:password@host:port?database=dbname&encrypt=false +``` + +**MongoDB** +``` +mongodb://username:password@localhost:27017/database +``` + +**Redis** +``` +redis://127.0.0.1:6379 +``` + +### Workflow Management Best Practices + +1. **Regular Cleanup**: Schedule periodic purge operations for completed workflows + ```bash + # Weekly cron job to purge workflows older than 90 days + dapr workflow purge --app-id orderprocessing --all-older-than 2160h + ``` + +2. **Monitor Running Workflows**: Use filtered lists to track long-running instances + ```bash + dapr workflow list --app-id orderprocessing --filter-status RUNNING --filter-max-age 24h + ``` + +3. **Use Instance IDs**: Assign meaningful instance IDs for easier tracking + ```bash + dapr workflow run OrderWorkflow --app-id orderprocessing --instance-id "order-$(date +%s)" + ``` + +4. **Export for Analysis**: Export workflow data for analysis + ```bash + dapr workflow list --app-id orderprocessing --output json > workflows.json + ``` + +{{% /tab %}} + {{% tab "Python" %}} @@ -356,7 +707,7 @@ To resume a workflow with an ID `12345678`, run: curl -X POST "http://localhost:3500/v1.0/workflows/dapr/12345678/resume" ``` -### Purge a workflow +### Purge a workflow The purge API can be used to permanently delete workflow metadata from the underlying state store, including any stored inputs, outputs, and workflow history records. This is often useful for implementing data retention policies and for freeing resources. diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md index 2fdd37d1cf8..47893aab80a 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md @@ -175,6 +175,59 @@ Similarly, if a state store imposes restrictions on the size of a batch transact Workflow state can be purged from a state store, including all its history. Each Dapr SDK exposes APIs for purging all metadata related to specific workflow instances. +#### State store record count + +The number of records which are saved as history in the state store per workflow run is determined by its complexity or "shape". In other words, the number of activities, timers, sub-workflows etc. +The following table shows a general guide to the number of records that are saved by different workflow tasks. +This number may be larger or smaller depending on retries or concurrency. + +| Task type | Number of records saved | +| ----------|-------------------------| +| Start workflow | 5 records | +| Call activity | 3 records | +| Timer | 3 records | +| Raise event | 3 records | +| Start child workflow | 8 records | + +#### Direct Database Access + +For advanced operations, you can access workflow data directly: + +```bash +# Port forward to a postgres database in Kubernetes +kubectl port-forward service/postgres 5432:5432 + +# Query workflows directly +dapr workflow list \ + --app-id myapp \ + --connection-string "host=localhost user=dapr password=dapr dbname=dapr port=5432 sslmode=disable" \ + --table-name workflows +``` + +```bash +# Port forward to redis database in Kubernetes +kubectl port-forward service/redis 6379:6379 + +# Query workflows directly +dapr workflow list \ + --app-id myapp \ + --connection-string redis://127.0.0.1:6379 \ + --table-name workflows +``` + +### Supported State Stores + +The workflow engine supports these state stores: +- PostgreSQL +- MySQL +- SQL Server +- SQLite +- Oracle Database +- CockroachDB +- MongoDB +- Redis + + ## Workflow scalability Because Dapr Workflows are internally implemented using actors, Dapr Workflows have the same scalability characteristics as actors. diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md index 2114b1827d7..58c7df2b8d7 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-features-concepts.md @@ -24,6 +24,62 @@ There are several different kinds of tasks that a workflow can schedule, includi - [Child workflows]({{% ref "workflow-features-concepts.md#child-workflows" %}}) for breaking larger workflows into smaller pieces - [External event waiters]({{% ref "workflow-features-concepts.md#external-events" %}}) for blocking workflows until they receive external event signals. These tasks are described in more details in their corresponding sections. +## Workflow Instance Management + +### Querying Workflow State + +You can query workflow instances using the CLI: + +```bash +# Find all running workflows +dapr workflow list --app-id myapp --filter-status RUNNING + +# Find workflows by name +dapr workflow list --app-id myapp --filter-name OrderProcessing + +# Find recent workflows (last 2 hours) +dapr workflow list --app-id myapp --filter-max-age 2h + +# Get detailed JSON output +dapr workflow list --app-id myapp --output json +``` + +### Workflow History + +View the complete execution history: + +```bash +dapr workflow history wf-12345 --app-id myapp --output json +``` + +This shows all events, activities, and state transitions. + +## External Events + +### Raising Events via CLI + +```bash +dapr workflow raise-event wf-12345/ApprovalReceived \ + --app-id myapp \ + --input '{"approved": true, "comments": "Approved by manager"}' +``` + +## Workflow Suspension and Resumption + +### Using the CLI + +```bash +# Suspend for manual intervention +dapr workflow suspend wf-12345 \ + --app-id myapp \ + --reason "Awaiting customer response" + +# Resume when ready +dapr workflow resume wf-12345 \ + --app-id myapp \ + --reason "Customer responded" +``` + ### Workflow identity Each workflow you define has a type name, and individual executions of a workflow require a unique _instance ID_. Workflow instance IDs can be generated by your app code, which is useful when workflows correspond to business entities like documents or jobs, or can be auto-generated UUIDs. A workflow's instance ID is useful for debugging and also for managing workflows using the [Workflow APIs]({{% ref workflow_api.md %}}). @@ -131,11 +187,11 @@ Because workflow retry policies are configured in code, the exact developer expe | Parameter | Description | | --- | --- | -| **Maximum number of attempts** | The maximum number of times to execute the activity or child workflow. | +| **Maximum number of attempts** | The maximum number of times to execute the activity or child workflow. If set to 0, no attempts will be made. | | **First retry interval** | The amount of time to wait before the first retry. | | **Backoff coefficient** | The coefficient used to determine the rate of increase of back-off. For example a coefficient of 2 doubles the wait of each subsequent retry. | -| **Maximum retry interval** | The maximum amount of time to wait before each subsequent retry. | -| **Retry timeout** | The overall timeout for retries, regardless of any configured max number of attempts. | +| **Maximum retry interval** | The maximum amount of time to wait before each subsequent retry. If set to 0, no retries will happen. | +| **Retry timeout** | The global timeout for retries, regardless of any configured max number of attempts. No further attempts are made executing activities after this timeout expires. ## External events diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-multi-app.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-multi-app.md index 8eb2f439430..b369c2153e5 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-multi-app.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-multi-app.md @@ -6,8 +6,9 @@ weight: 7000 description: "Executing workflows across multiple applications" --- -It is often the case that a single workflow spans multiple applications, microservices, or programing languages. +It is often the case that a single workflow spans multiple applications, microservices, or programming languages. This is where an activity or a child workflow will be executed on a different application than the one hosting the parent workflow. + Some scenarios where this is useful include: - A Machine Learning (ML) training activity must be executed on GPU-enabled machines, while the rest of the workflow runs on CPU-only orchestration machines. @@ -15,85 +16,99 @@ Some scenarios where this is useful include: - Different parts of the workflow need to be executed in different trust zones or networks. - Different parts of the workflow need to be executed in different geographic regions due to data residency requirements. - An involved business process spans multiple teams or departments, each owning their own application. -- Implementation of a workflow spans different programming lanaguages based on team expertise or existing codebases. +- Implementation of a workflow spans different programming languages based on team expertise or existing codebases. +- Different team boundaries or microservice ownership. + +Diagram showing multi-application complex workflow + +The diagram below shows an example scenario of a complex workflow that orchestrates across multiple applications that are written in different languages. Each applications' main steps and activities are: + +• **App1: Main Workflow Service** - Top-level orchestrator that coordinates the entire ML pipeline +- Starts the process +- Calls data processing activities on App2 +- Calls ML training child workflow on App3 +- Calls model deployment on App4 +- Ends the complete workflow +- **Language: Java** + +• **App2: Data Processing Pipeline** - **GPU activities** only +- Data Ingesting Activity (GPU-accelerated) +- Feature Engineering Activity (GPU-accelerated) +- Returns completion signal to Main Workflow +- **Language: Go** + +• **App3: ML Training Child Workflow** - Contains a child workflow and activities +- Child workflow orchestrates: + - Data Processing Activity + - Model Training Activity (GPU-intensive) + - Model Validation Activity +- Triggered by App2's activities completing +- Returns completion signal to Main Workflow +- **Language: Java** + +• **App4: Model Serving Service** - **Beefy GPU app** with activities only +- Model Loading Activity (GPU memory intensive) +- Inference Setup Activity (GPU-accelerated inference) +- Triggered by App3's workflow completing +- Returns completion signal to Main Workflow +- **Language: Go** ## Multi-application workflows -Like all building blocks in Dapr, workflow execution routing is based on the [App ID of the hosting Dapr application]({{% ref "security-concept.md#application-identity" %}}). -By default, the full workflow execution is hosted on the app ID that started the workflow. -This workflow will be executed across all replicas of that app ID, not just the single replica which scheduled the workflow. +Workflow execution routing is based on the [App ID of the hosting Dapr application]({{% ref "security-concept.md#application-identity" %}}). +By default, the full workflow execution is hosted on the app ID that started the workflow. This workflow can be executed across any replicas of that app ID, not just the single replica which scheduled the workflow. + -It is possible to execute activities or child workflows on different app IDs by specifying the target app ID parameter, inside the workflow execution code. -Upon execution, the target app ID will execute the activity or child workflow, and return the result to the parent workflow of the originating app ID. -Workflows being durable, if the target activity or child workflow app ID is not available or has not been defined, the parent workflow retry until the target app ID becomes available, indefinitely. -It is paramount that their is co-ordination between the teams owning the different app IDs to ensure that the activities and child workflows are defined and available when needed. +It is possible to execute activities and child workflows on different app IDs by specifying the target app ID parameter, inside the workflow execution code. +Upon execution, the target app ID executes the activity or child workflow, and returns the result to the parent workflow of the originating app ID. The entire Workflow execution may be distributed across multiple app IDs with no limit, with each activity or child workflow specifying the target app ID. The final history of the workflow will be saved by the app ID that hosts the very parent (or can consider it the root) workflow. {{% alert title="Restrictions" color="primary" %}} -Like other building blocks and resources in Dapr, workflows are scoped to a single namespace. +Like other API building blocks and resources in Dapr, workflows are scoped to a single namespace. This means that all app IDs involved in a multi-application workflow must be in the same namespace. -Similarly, all app IDs must use the same actor state store. -Finally, the target app ID must have the activity or child workflow defined, otherwise the parent workflow will retry indefinitely. +Similarly, all app IDs must use the same workflow (or actor) state store. +Finally, the target app ID must have the activity or child workflow defined and registered, otherwise the parent workflow retries indefinitely. {{% /alert %}} -## Multi-application activity examples - -The following examples show how to execute activities on different target app IDs. - -{{< tabpane text=true >}} - -{{% tab "Go" %}} - -```go -package main +{{% alert title="Important Limitations" color="warning" %}} +**SDKs supporting multi-application workflows** - Multi-application workflows are used via the SDKs. +Currently the following are supported: +- **Java** (**only** activity calls) +- **Go** (**both** activities and child workflows calls) +- **Python** (**both** activities and child workflows calls) +- **.NET** (**both** activities and child workflows calls) +- Support is planned for future releases for the JavaScript SDK. +{{% /alert %}} -import ( - "context" - "log" +## Error handling - "github.com/dapr/durabletask-go/backend" - "github.com/dapr/durabletask-go/client" - "github.com/dapr/durabletask-go/task" - dapr "github.com/dapr/go-sdk/client" -) +When calling multi-application activities or child workflows: +- If the target application does not exist, the call will be retried using the provided retry policy. +- If the target application exists but doesn't contain the specified activity or workflow, the call will return an error. +- Standard workflow retry policies apply to multi-application calls. -func main() { - ctx := context.Background() +It is paramount that there is coordination between the teams owning the different app IDs to ensure that the activities and child workflows are defined and available when needed. - registry := task.NewTaskRegistry() - if err := registry.AddOrchestrator(TestWorkflow); err != nil { - log.Fatal(err) - } +## Multi-application activity example - daprClient, err := dapr.NewClient() - if err != nil { - log.Fatal(err) - } +Diagram showing multi-application call activity workflow pattern - client := client.NewTaskHubGrpcClient(daprClient.GrpcClientConn(), backend.DefaultLogger()) - if err := client.StartWorkItemListener(ctx, registry); err != nil { - log.Fatal(err) - } +The following example shows how to execute the activity `ActivityA` on the target app `App2`. - id, err := client.ScheduleNewOrchestration(ctx, "TestWorkflow") - if err != nil { - log.Fatal(err) - } +{{< tabpane text=true >}} - if _, err = client.WaitForOrchestrationCompletion(ctx, id); err != nil { - log.Fatal(err) - } -} +{{% tab "Go" %}} -func TestWorkflow(ctx *task.OrchestrationContext) (any, error) { +```go +func BusinessWorkflow(ctx *workflow.WorkflowContext) (any, error) { var output string - err := ctx.CallActivity("my-other-activity", - task.WithActivityInput("my-input"), - // Here we set custom target app ID which will execute this activity. - task.WithActivityAppID("my-other-app-id"), + err := ctx.CallActivity("ActivityA", + workflow.WithActivityInput("my-input"), + workflow.WithActivityAppID("App2"), // Here we set the target app ID which will execute this activity. ).Await(&output) + if err != nil { return nil, err } @@ -107,45 +122,18 @@ func TestWorkflow(ctx *task.OrchestrationContext) (any, error) { {{% tab "Java" %}} ```java -public class CrossAppWorkflow implements Workflow { +public class BusinessWorkflow implements Workflow { @Override public WorkflowStub create() { return ctx -> { - var logger = ctx.getLogger(); - logger.info("=== WORKFLOW STARTING ==="); - logger.info("Starting CrossAppWorkflow: {}", ctx.getName()); - logger.info("Workflow name: {}", ctx.getName()); - logger.info("Workflow instance ID: {}", ctx.getInstanceId()); - - String input = ctx.getInput(String.class); - logger.info("CrossAppWorkflow received input: {}", input); - logger.info("Workflow input: {}", input); - - // Call an activity in another app by passing in an active appID to the WorkflowTaskOptions - logger.info("Calling cross-app activity in 'app2'..."); - logger.info("About to call cross-app activity in app2..."); - String crossAppResult = ctx.callActivity( - App2TransformActivity.class.getName(), - input, - new WorkflowTaskOptions("app2"), + String output = ctx.callActivity( + ActivityA.class.getName(), + "my-input", + new WorkflowTaskOptions("App2"), // Here we set the target app ID which will execute this activity. String.class ).await(); - // Call another activity in a different app - logger.info("Calling cross-app activity in 'app3'..."); - logger.info("About to call cross-app activity in app3..."); - String finalResult = ctx.callActivity( - App3FinalizeActivity.class.getName(), - crossAppResult, - new WorkflowTaskOptions("app3"), - String.class - ).await(); - logger.info("Final cross-app activity result: {}", finalResult); - logger.info("Final cross-app activity result: {}", finalResult); - - logger.info("CrossAppWorkflow finished with: {}", finalResult); - logger.info("=== WORKFLOW COMPLETING WITH: {} ===" , finalResult); - ctx.complete(finalResult); + ctx.complete(output); }; } } @@ -153,62 +141,60 @@ public class CrossAppWorkflow implements Workflow { {{% /tab %}} -{{< /tabpane >}} - -The following examples show how to execute child workflows on different target app IDs. - -{{< tabpane text=true >}} +{{% tab ".NET" %}} + +```csharp +// Specify App ID during workflow registration +builder.Services.AddDaprWorkflowBuilder(opt => + { + opt.RegisterWorkflow(); + opt.RegisterActivity(); + opt.AppId = "my-application-1"; + }); + +// Call activity in another application +public sealed class WorkflowA : Workflow +{ + public override Task RunAsync(WorkflowContext context, int input) => + context.CallActivityAsync(nameof("AnotherActivity"), input, new WorkflowTaskOptions( + targetAppId: "my-other-app")); + }); +} +``` -{{% tab "Go" %}} +{{% /tab %}} -```go -package main +{{% tab "Python" %}} -import ( - "context" - "log" +```python +@wfr.workflow +def app1_workflow(ctx: wf.DaprWorkflowContext): + output = yield ctx.call_activity('ActivityA', input='my-input', app_id='App2') + return output +``` - "github.com/dapr/durabletask-go/backend" - "github.com/dapr/durabletask-go/client" - "github.com/dapr/durabletask-go/task" - dapr "github.com/dapr/go-sdk/client" -) +{{% /tab %}} -func main() { - ctx := context.Background() +{{< /tabpane >}} - registry := task.NewTaskRegistry() - if err := registry.AddOrchestrator(TestWorkflow); err != nil { - log.Fatal(err) - } +## Multi-application child workflow example - daprClient, err := dapr.NewClient() - if err != nil { - log.Fatal(err) - } +Diagram showing multi-application child workflow pattern - client := client.NewTaskHubGrpcClient(daprClient.GrpcClientConn(), backend.DefaultLogger()) - if err := client.StartWorkItemListener(ctx, registry); err != nil { - log.Fatal(err) - } +The following example shows how to execute the child workflow `Workflow2` on the target app `App2`. - id, err := client.ScheduleNewOrchestration(ctx, "TestWorkflow") - if err != nil { - log.Fatal(err) - } +{{< tabpane text=true >}} - if _, err = client.WaitForOrchestrationCompletion(ctx, id); err != nil { - log.Fatal(err) - } -} +{{% tab "Go" %}} -func TestWorkflow(ctx *task.OrchestrationContext) (any, error) { +```go +func BusinessWorkflow(ctx *workflow.WorkflowContext) (any, error) { var output string - err := ctx.CallSubOrchestrator("my-sub-orchestration", - task.WithSubOrchestratorInput("my-input"), - // Here we set custom target app ID which will execute this child workflow. - task.WithSubOrchestratorAppID("my-sub-app-id"), + err := ctx.CallChildWorkflow("Workflow2", + workflow.WithChildWorkflowInput("my-input"), + workflow.WithChildWorkflowAppID("App2"), // Here we set the target app ID which will execute this child workflow. ).Await(&output) + if err != nil { return nil, err } @@ -219,6 +205,40 @@ func TestWorkflow(ctx *task.OrchestrationContext) (any, error) { {{% /tab %}} +{{% tab ".NET" %}} + +```csharp +// Specify App ID during workflow registration +builder.Services.AddDaprWorkflowBuilder(opt => + { + opt.RegisterWorkflow(); + opt.RegisterActivity(); + opt.AppId = "my-application-1"; + }); + +// Call child workflow in another application +public sealed class WorkflowA : Workflow +{ + public override Task RunAsync(WorkflowContext context, int input) => + context.CallChildWorkflow(nameof("AnotherWorkflow"), input, new ChildWorkflowTaskOptions( + TargetAppId: "my-other-app" + }); +} +``` + +{{% /tab %}} + +{{% tab "Python" %}} + +```python +@wfr.workflow +def workflow1(ctx: wf.DaprWorkflowContext): + output = yield ctx.call_child_workflow(workflow='Workflow2', input='my-input', app_id='App2') + return output +``` + +{{% /tab %}} + {{< /tabpane >}} ## Related links diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md index 67e4941f880..cce02d3316b 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md @@ -46,6 +46,12 @@ Child workflow also supports automatic retry policies. [Learn more about child workflows.]({{% ref "workflow-features-concepts.md#child-workflows" %}}) +### Multi-application workflows + +Multi-application workflows, enable you to orchestrate complex business processes that span across multiple applications. This allows a workflow to call activities or start child workflows in different applications, distributing the workflow execution while maintaining the security, reliability and durability guarantees of Dapr's workflow engine. + +[Learn more about multi-application workflows.]({{% ref "workflow-multi-app.md" %}}) + ### Timers and reminders Same as Dapr actors, you can schedule reminder-like durable delays for any time range. @@ -108,9 +114,47 @@ Want to put workflows to the test? Walk through the following quickstart and tut Want to skip the quickstarts? Not a problem. You can try out the workflow building block directly in your application. After [Dapr is installed]({{% ref install-dapr-cli.md %}}), you can begin using workflows, starting with [how to author a workflow]({{% ref howto-author-workflow.md %}}). +## Managing Workflows + +Dapr provides comprehensive workflow management capabilities through both the HTTP API and the CLI. + +### Workflow Lifecycle Operations + +**Start Workflows** +```bash +dapr workflow run MyWorkflow --app-id myapp --input '{"key": "value"}' +``` + +**Monitor Workflows** +```bash +# List active workflows for a given application +dapr workflow list --app-id myapp --filter-status RUNNING + +# View execution history +dapr workflow history --app-id myapp +``` + +**Control Workflows** +```bash +# Suspend, resume, or terminate +dapr workflow suspend --app-id myapp +dapr workflow resume --app-id myapp +dapr workflow terminate --app-id myapp +``` + +**Maintenance Operations** +```bash +# Purge completed workflows +dapr workflow purge --app-id myapp --all-older-than 720h +``` + +See [How-To: Manage workflows]({{< ref howto-manage-workflow.md >}}) for detailed instructions. + ## Limitations -- **State stores:** Due to underlying limitations in some database choices, more commonly NoSQL databases, you might run into limitations around storing internal states. For example, CosmosDB has a maximum single operation item limit of only 100 states in a single request. +- **State stores:** You can only use state stores which support workflows, as [described here]({{% ref supported-state-stores %}}). +- Azure Cosmos DB has [payload and workflow complexity limitations]({{% ref "setup-azure-cosmosdb.md#workflow-limitations" %}}). +- AWS DynamoDB has [workflow complexity limitations]({{% ref "setup-azure-cosmosdb.md#workflow-limitations" %}}). ## Watch the demo diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md index faa92d946ae..8158ddfdbbc 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md @@ -307,6 +307,16 @@ In addition to the challenges mentioned in [the previous pattern]({{% ref "workf Dapr Workflows provides a way to express the fan-out/fan-in pattern as a simple function, as shown in the following example: +```bash +# Start the workflow +dapr workflow run DataProcessingWorkflow \ + --app-id processor \ + --input '{"items": ["item1", "item2", "item3"]}' + +# Monitor parallel execution +dapr workflow history --app-id processor --output json +``` + {{< tabpane text=true >}} {{% tab "Python" %}} @@ -615,8 +625,7 @@ await context.CallActivityAsync("PostResults", sum); {{< /tabpane >}} -With the release of 1.16, it's even easier to process workflow activities in parallel while putting an upper cap on -concurrency by using the following extension methods on the `WorkflowContext`: +You can process workflow activities in parallel while putting an upper cap on concurrency by using the following extension methods on the `WorkflowContext`: {{< tabpane text=true >}} @@ -1428,33 +1437,33 @@ The following diagram illustrates this flow. ```java public class PaymentProcessingWorkflow implements Workflow { - + @Override public WorkflowStub create() { return ctx -> { ctx.getLogger().info("Starting Workflow: " + ctx.getName()); var orderId = ctx.getInput(String.class); List compensations = new ArrayList<>(); - + try { // Step 1: Reserve inventory String reservationId = ctx.callActivity(ReserveInventoryActivity.class.getName(), orderId, String.class).await(); ctx.getLogger().info("Inventory reserved: {}", reservationId); compensations.add("ReleaseInventory"); - + // Step 2: Process payment String paymentId = ctx.callActivity(ProcessPaymentActivity.class.getName(), orderId, String.class).await(); ctx.getLogger().info("Payment processed: {}", paymentId); compensations.add("RefundPayment"); - + // Step 3: Ship order String shipmentId = ctx.callActivity(ShipOrderActivity.class.getName(), orderId, String.class).await(); ctx.getLogger().info("Order shipped: {}", shipmentId); compensations.add("CancelShipment"); - + } catch (TaskFailedException e) { ctx.getLogger().error("Activity failed: {}", e.getMessage()); - + // Execute compensations in reverse order Collections.reverse(compensations); for (String compensation : compensations) { @@ -1462,24 +1471,24 @@ public class PaymentProcessingWorkflow implements Workflow { switch (compensation) { case "CancelShipment": String shipmentCancelResult = ctx.callActivity( - CancelShipmentActivity.class.getName(), - orderId, + CancelShipmentActivity.class.getName(), + orderId, String.class).await(); ctx.getLogger().info("Shipment cancellation completed: {}", shipmentCancelResult); break; - + case "RefundPayment": String refundResult = ctx.callActivity( - RefundPaymentActivity.class.getName(), - orderId, + RefundPaymentActivity.class.getName(), + orderId, String.class).await(); ctx.getLogger().info("Payment refund completed: {}", refundResult); break; - + case "ReleaseInventory": String releaseResult = ctx.callActivity( - ReleaseInventoryActivity.class.getName(), - orderId, + ReleaseInventoryActivity.class.getName(), + orderId, String.class).await(); ctx.getLogger().info("Inventory release completed: {}", releaseResult); break; @@ -1494,7 +1503,7 @@ public class PaymentProcessingWorkflow implements Workflow { // Step 4: Send confirmation ctx.callActivity(SendConfirmationActivity.class.getName(), orderId, Void.class).await(); ctx.getLogger().info("Confirmation sent for order: {}", orderId); - + ctx.complete("Order processed successfully: " + orderId); }; } @@ -1597,7 +1606,7 @@ The compensation pattern ensures that your distributed workflows can maintain co - [Try out Dapr Workflows using the quickstart]({{% ref workflow-quickstart.md %}}) - [Workflow overview]({{% ref workflow-overview.md %}}) - [Workflow API reference]({{% ref workflow_api.md %}}) -- Try out the following examples: +- Try out the following examples: - [Python](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow) - [JavaScript](https://github.com/dapr/js-sdk/tree/main/examples/workflow) - [.NET](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) diff --git a/daprdocs/content/en/developing-applications/error-codes/errors-overview.md b/daprdocs/content/en/developing-applications/error-codes/errors-overview.md index 00e56ae61a9..b66ce4bcb0f 100644 --- a/daprdocs/content/en/developing-applications/error-codes/errors-overview.md +++ b/daprdocs/content/en/developing-applications/error-codes/errors-overview.md @@ -6,7 +6,7 @@ weight: 10 description: "Overview of Dapr errors" --- -An error code is a numeric or alphamueric code that indicates the nature of an error and, when possible, why it occured. +An error code is a numeric or alphanumeric code that indicates the nature of an error and, when possible, why it occured. Dapr error codes are standardized strings for over 80+ common errors across HTTP and gRPC requests when using the Dapr APIs. These codes are both: - Returned in the JSON response body of the request. diff --git a/daprdocs/content/en/developing-applications/local-development/multi-app-dapr-run/multi-app-template.md b/daprdocs/content/en/developing-applications/local-development/multi-app-dapr-run/multi-app-template.md index 7e527d74fe1..7cac1459d1b 100644 --- a/daprdocs/content/en/developing-applications/local-development/multi-app-dapr-run/multi-app-template.md +++ b/daprdocs/content/en/developing-applications/local-development/multi-app-dapr-run/multi-app-template.md @@ -100,7 +100,7 @@ dapr list -k ## Stop the multi-app template -Stop the multi-app run template anytime with either of the following commands: +Stop the multi-app run template any time with either of the following commands: {{< tabpane text=true >}} diff --git a/daprdocs/content/en/developing-applications/sdks/_index.md b/daprdocs/content/en/developing-applications/sdks/_index.md index 079a06d3297..ace28b18042 100644 --- a/daprdocs/content/en/developing-applications/sdks/_index.md +++ b/daprdocs/content/en/developing-applications/sdks/_index.md @@ -31,6 +31,12 @@ Select your [preferred language below]({{% ref "#sdk-languages" %}}) to learn mo | [C++](https://github.com/dapr/cpp-sdk) | In development | ✔ | | | | [Rust]({{% ref rust %}}) | In development | ✔ | | ✔ | | + +## Frameworks + +| Framework | Language | Status | Description | +|----------------------------------------|:----------------------|:---------------|:-----------------:| +| [Dapr Agents]({{% ref "../../developing-ai/dapr-agents" %}}) | Python | In development | A framework for building LLM-powered autonomous agents that leverages Dapr's distributed systems capabilities for durable execution, with built-in security, observability, and state management. | ## Further reading - [Serialization in the Dapr SDKs]({{% ref sdk-serialization.md %}}) diff --git a/daprdocs/content/en/getting-started/install-dapr-selfhost.md b/daprdocs/content/en/getting-started/install-dapr-selfhost.md index 616c71f4d2e..0848f61caaa 100644 --- a/daprdocs/content/en/getting-started/install-dapr-selfhost.md +++ b/daprdocs/content/en/getting-started/install-dapr-selfhost.md @@ -170,8 +170,6 @@ explorer "%USERPROFILE%\.dapr" {{< /tabpane >}} -
- ### Slim init To install the CLI without any default configuration files or Docker containers, use the `--slim` flag. [Learn more about the `init` command and its flags.]({{% ref dapr-init.md %}}) @@ -180,6 +178,16 @@ To install the CLI without any default configuration files or Docker containers, dapr init --slim ``` +### Other tooling (optional) -{{< button text="Next step: Use the Dapr API >>" page="getting-started/get-started-api.md" >}} +#### Diagrid Dashboard for Dapr Workflow + +If you're planning to build Dapr Workflow applications, you can install the [Diagrid Dashboard](https://diagrid.ws/diagrid-dashboard-docs) to visualize workflow state during local development: + +Start the Diagrid Dashboard container using Docker: +```bash +docker run -p 8080:8080 ghcr.io/diagridio/diagrid-dashboard:latest +``` + +{{< button text="Next step: Use the Dapr API >>" page="getting-started/get-started-api.md" >}} diff --git a/daprdocs/content/en/getting-started/quickstarts/_index.md b/daprdocs/content/en/getting-started/quickstarts/_index.md index 102aae1f5b2..6b5d17f6553 100644 --- a/daprdocs/content/en/getting-started/quickstarts/_index.md +++ b/daprdocs/content/en/getting-started/quickstarts/_index.md @@ -25,6 +25,7 @@ Hit the ground running with our Dapr quickstarts, complete with code samples aim | [Service Invocation]({{% ref serviceinvocation-quickstart %}}) | Synchronous communication between two services using HTTP or gRPC. | | [Publish and Subscribe]({{% ref pubsub-quickstart %}}) | Asynchronous communication between two services using messaging. | | [Workflow]({{% ref workflow-quickstart %}}) | Orchestrate business workflow activities in long running, fault-tolerant, stateful applications. | +| [Agents]({{% ref dapr-agents-quickstarts.md %}}) | Build LLM-powered autonomous agentic applications. | | [State Management]({{% ref statemanagement-quickstart %}}) | Store a service's data as key/value pairs in supported state stores. | | [Bindings]({{% ref bindings-quickstart %}}) | Work with external systems using input bindings to respond to events and output bindings to call operations. | | [Actors]({{% ref actors-quickstart %}}) | Run a microservice and a simple console client to demonstrate stateful object patterns in Dapr Actors. | @@ -33,4 +34,4 @@ Hit the ground running with our Dapr quickstarts, complete with code samples aim | [Resiliency]({{% ref resiliency %}}) | Define and apply fault-tolerance policies to your Dapr API requests. | | [Cryptography]({{% ref cryptography-quickstart %}}) | Encrypt and decrypt data using Dapr's cryptographic APIs. | | [Jobs]({{% ref jobs-quickstart %}}) | Schedule, retrieve, and delete jobs using Dapr's jobs APIs. | -| [Conversation]({{% ref conversation-quickstart %}}) | Securely and reliably interact with Large Language Models (LLMs). | \ No newline at end of file +| [Conversation]({{% ref conversation-quickstart %}}) | Securely and reliably interact with Large Language Models (LLMs). | diff --git a/daprdocs/content/en/getting-started/quickstarts/conversation-quickstart.md b/daprdocs/content/en/getting-started/quickstarts/conversation-quickstart.md index 8caef45b9b3..fe2731ced93 100644 --- a/daprdocs/content/en/getting-started/quickstarts/conversation-quickstart.md +++ b/daprdocs/content/en/getting-started/quickstarts/conversation-quickstart.md @@ -23,6 +23,8 @@ Currently, you can only use JavaScript for the quickstart sample using HTTP, not ## Run the app with the template file +Select your preferred language-specific Dapr SDK before proceeding with the Quickstart. + {{< tabpane text=true >}} @@ -61,12 +63,13 @@ pip3 install -r requirements.txt ### Step 3: Launch the conversation service -Navigate back to the `sdk` directory and start the conversation service with the following command: ```bash dapr run -f . ``` +> **Note**: Since Python3.exe is not defined in Windows, you may need to use `python app.py` instead of `python3 app.py`. + **Expected output** ``` @@ -76,9 +79,7 @@ dapr run -f . ### What happened? -When you ran `dapr init` during Dapr install, the [`dapr.yaml` Multi-App Run template file]({{% ref "#dapryaml-multi-app-run-template-file" %}}) was generated in the `.dapr/components` directory. - -Running `dapr run -f .` in this Quickstart started [conversation.go]({{% ref "#programcs-conversation-app" %}}). +Running `dapr run -f .` in this Quickstart started [app.py]({{% ref "#programcs-conversation-app" %}}). #### `dapr.yaml` Multi-App Run template file @@ -118,27 +119,23 @@ In the application code: ```python from dapr.clients import DaprClient -from dapr.clients.grpc._request import ConversationInput +from dapr.clients.grpc.conversation import ConversationInputAlpha2, ConversationMessage, ConversationMessageContent, ConversationMessageOfUser with DaprClient() as d: - inputs = [ - ConversationInput(content="What is dapr?", role='user', scrub_pii=True), - ] - - metadata = { - 'model': 'modelname', - 'key': 'authKey', - 'cacheTTL': '10m', - } + text_input = "What is dapr?" + provider_component = "echo" - print('Input sent: What is dapr?') + inputs = [ + ConversationInputAlpha2(messages=[ConversationMessage(of_user=ConversationMessageOfUser(content=[ConversationMessageContent(text=text_input)]))], + scrub_pii=True), + ] - response = d.converse_alpha1( - name='echo', inputs=inputs, temperature=0.7, context_id='chat-123', metadata=metadata - ) + print(f'Input sent: {text_input}') - for output in response.outputs: - print(f'Output response: {output.result}') + response = d.converse_alpha2(name=provider_component, inputs=inputs, temperature=0.7, context_id='chat-123') + + for output in response.outputs: + print(f'Output response: {output.choices[0].message.content}') ``` {{% /tab %}} @@ -188,14 +185,16 @@ dapr run -f . **Expected output** ``` -== APP - conversation == Input sent: What is dapr? +== APP - conversation == Conversation input sent: What is dapr? == APP - conversation == Output response: What is dapr? +== APP - conversation == Tool calling input sent: What is the weather like in San Francisco in celsius? +== APP - conversation == Output message: { outputs: [ { choices: [Array] } ] } +== APP - conversation == Output message: What is the weather like in San Francisco in celsius? +== APP - conversation == Tool calls detected: [{"id":"0","function":{"name":"get_weather","arguments":"location,unit"}}] ``` ### What happened? -When you ran `dapr init` during Dapr install, the [`dapr.yaml` Multi-App Run template file]({{% ref "#dapryaml-multi-app-run-template-file" %}}) was generated in the `.dapr/components` directory. - Running `dapr run -f .` in this Quickstart started [conversation.go]({{% ref "#programcs-conversation-app" %}}). #### `dapr.yaml` Multi-App Run template file @@ -231,50 +230,138 @@ To interface with a real LLM, swap out the mock component with one of [the suppo #### `index.js` conversation app -In the application code: +In the first part of the application code: - The app sends an input "What is dapr?" to the echo mock LLM component. - The mock LLM echoes "What is dapr?". ```javascript -const conversationComponentName = "echo"; +const daprHost = process.env.DAPR_HOST || "http://localhost"; +const daprHttpPort = process.env.DAPR_HTTP_PORT || "3500"; -async function main() { - const daprHost = process.env.DAPR_HOST || "http://localhost"; - const daprHttpPort = process.env.DAPR_HTTP_PORT || "3500"; +const reqURL = `${daprHost}:${daprHttpPort}/v1.0-alpha2/conversation/${conversationComponentName}/converse`; - const inputBody = { - name: "echo", - inputs: [{ message: "What is dapr?" }], +// Plain conversation +try { + const converseInputBody = { + inputs: [ + { + messages: [ + { + ofUser: { + content: [ + { + text: "What is dapr?", + }, + ], + }, + }, + ], + }, + ], parameters: {}, metadata: {}, }; + const response = await fetch(reqURL, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(converseInputBody), + }); + + console.log("Conversation input sent: What is dapr?"); + + const data = await response.json(); + const result = data.outputs[0].choices[0].message.content; + console.log("Output response:", result); +} catch (error) { + console.error("Error:", error.message); + process.exit(1); +} +``` - const reqURL = `${daprHost}:${daprHttpPort}/v1.0-alpha1/conversation/${conversationComponentName}/converse`; +In the second part of the application code: +- The app sends an input "What is the weather like in San Francisco in celsius" together with the definition of a tool that is available `get_weather`. +- The mock LLM echoes "What is the weather like in San Francisco in celsius?" and the function definition, which is detected in the response. - try { - const response = await fetch(reqURL, { - method: "POST", - headers: { - "Content-Type": "application/json", +```javascript +try { + const toolCallingInputBody = { + inputs: [ + { + messages: [ + { + ofUser: { + content: [ + { + text: "What is the weather like in San Francisco in celsius?", + }, + ], + }, + }, + ], + scrubPii: false, }, - body: JSON.stringify(inputBody), - }); - - console.log("Input sent: What is dapr?"); - - const data = await response.json(); - const result = data.outputs[0].result; - console.log("Output response:", result); - } catch (error) { - console.error("Error:", error.message); - process.exit(1); + ], + metadata: { + api_key: "test-key", + version: "1.0", + }, + scrubPii: false, + temperature: 0.7, + tools: [ + { + function: { + name: "get_weather", + description: "Get the current weather for a location", + parameters: { + type: "object", + properties: { + location: { + type: "string", + description: "The city and state, e.g. San Francisco, CA", + }, + unit: { + type: "string", + enum: ["celsius", "fahrenheit"], + description: "The temperature unit to use", + }, + }, + required: ["location"], + }, + }, + }, + ], + toolChoice: "auto", + }; + const response = await fetch(reqURL, { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(toolCallingInputBody), + }); + + console.log( + "Tool calling input sent: What is the weather like in San Francisco in celsius?" + ); + + const data = await response.json(); + + const result = data?.outputs?.[0]?.choices?.[0]?.message?.content; + console.log("Output message:", result); + + if (data?.outputs?.[0]?.choices?.[0]?.message?.toolCalls) { + console.log( + "Tool calls detected:", + JSON.stringify(data.outputs[0].choices[0].message?.toolCalls) + ); + } else { + console.log("No tool calls in response"); } -} - -main().catch((error) => { - console.error("Unhandled error:", error); +} catch (error) { + console.error("Error:", error.message); process.exit(1); -}); ``` {{% /tab %}} @@ -318,14 +405,18 @@ dapr run -f . **Expected output** ``` -== APP - conversation == Input sent: What is dapr? +== APP - conversation == Conversation input sent: What is dapr? == APP - conversation == Output response: What is dapr? +== APP - conversation == Tool calling input sent: What is the weather like in San Francisco in celsius? +== APP - conversation == Output message: What is the weather like in San Francisco in celsius? +== APP - conversation == Tool calls detected: +== APP - conversation == Tool call: {"id":0,"function":{"name":"get_weather","arguments":"location,unit"}} +== APP - conversation == Function name: get_weather +== APP - conversation == Function arguments: location,unit ``` ### What happened? -When you ran `dapr init` during Dapr install, the [`dapr.yaml` Multi-App Run template file]({{% ref "#dapryaml-multi-app-run-template-file" %}}) was generated in the `.dapr/components` directory. - Running `dapr run -f .` in this Quickstart started the [conversation Program.cs]({{% ref "#programcs-conversation-app" %}}). #### `dapr.yaml` Multi-App Run template file @@ -364,44 +455,244 @@ To interface with a real LLM, swap out the mock component with one of [the suppo In the application code: - The app sends an input "What is dapr?" to the echo mock LLM component. - The mock LLM echoes "What is dapr?". +- The app sends an input “What is the weather like in San Francisco in celsius” together with the definition of a tool that is available `get_weather`. +- The mock LLM echoes “What is the weather like in San Francisco in celsius?” and the function definition, which is detected in the response. + ```csharp +using System.Text.Json; using Dapr.AI.Conversation; +using Dapr.AI.Conversation.ConversationRoles; using Dapr.AI.Conversation.Extensions; +using Dapr.AI.Conversation.Tools; -class Program -{ - private const string ConversationComponentName = "echo"; - - static async Task Main(string[] args) - { - const string prompt = "What is dapr?"; +const string conversationComponentName = "echo"; +const string conversationText = "What is dapr?"; +const string toolCallInput = "What is the weather like in San Francisco in celsius?"; - var builder = WebApplication.CreateBuilder(args); - builder.Services.AddDaprConversationClient(); - var app = builder.Build(); +var builder = WebApplication.CreateBuilder(args); +builder.Services.AddDaprConversationClient(); +var app = builder.Build(); - //Instantiate Dapr Conversation Client - var conversationClient = app.Services.GetRequiredService(); +// +// Setup - try - { - // Send a request to the echo mock LLM component - var response = await conversationClient.ConverseAsync(ConversationComponentName, [new(prompt, DaprConversationRole.Generic)]); - Console.WriteLine("Input sent: " + prompt); +var conversationClient = app.Services.GetRequiredService(); - if (response != null) - { - Console.Write("Output response:"); - foreach (var resp in response.Outputs) +var conversationOptions = new ConversationOptions(conversationComponentName) +{ + ScrubPII = false, + ToolChoice = ToolChoice.Auto, + Temperature = 0.7, + Tools = [ + new ToolFunction("function") { - Console.WriteLine($" {resp.Result}"); - } - } - } - catch (Exception ex) + Name = "get_weather", + Description = "Get the current weather for a location", + Parameters = JsonSerializer.Deserialize>(""" + { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA" + }, + "unit": { + "type": "string", + "enum": ["celsius", "fahrenheit"], + "description": "The temperature unit to use" + } + }, + "required": ["location"] + } + """) ?? throw new("Unable to parse tool function parameters."), + }, + ], +}; + +// +// Simple Conversation + +var conversationResponse = await conversationClient.ConverseAsync( + [new ConversationInput(new List { - Console.WriteLine("Error: " + ex.Message); + new UserMessage { + Name = "TestUser", + Content = [ + new MessageContent(conversationText), + ], + }, + })], + conversationOptions +); + +Console.WriteLine($"Conversation input sent: {conversationText}"); +Console.WriteLine($"Output response: {conversationResponse.Outputs.First().Choices.First().Message.Content}"); + +// +// Tool Calling + +var toolCallResponse = await conversationClient.ConverseAsync( + [new ConversationInput(new List + { + new UserMessage { + Name = "TestUser", + Content = [ + new MessageContent(toolCallInput), + ], + }, + })], + conversationOptions +); + +Console.WriteLine($"Tool calling input sent: {toolCallInput}"); +Console.WriteLine($"Output message: {toolCallResponse.Outputs.First().Choices.First().Message.Content}"); +Console.WriteLine("Tool calls detected:"); + +var functionToolCall = toolCallResponse.Outputs.First().Choices.First().Message.ToolCalls.First() as CalledToolFunction + ?? throw new("Unexpected tool call type for demo."); + +var toolCallJson = JsonSerializer.Serialize(new +{ + id = 0, + function = new + { + name = functionToolCall.Name, + arguments = functionToolCall.JsonArguments, + }, +}); +Console.WriteLine($"Tool call: {toolCallJson}"); +Console.WriteLine($"Function name: {functionToolCall.Name}"); +Console.WriteLine($"Function arguments: {functionToolCall.JsonArguments}"); +``` + +{{% /tab %}} + + + + +{{% tab "Java" %}} + + +### Step 1: Pre-requisites + +For this example, you will need: + +- [Dapr CLI and initialized environment](https://docs.dapr.io/getting-started). +- Java JDK 17 (or greater): + - [Oracle JDK](https://www.oracle.com/java/technologies/downloads), or + - OpenJDK +- [Apache Maven](https://maven.apache.org/install.html), version 3.x. + +- [Docker Desktop](https://www.docker.com/products/docker-desktop) + + +### Step 2: Set up the environment + +Clone the [sample provided in the Quickstarts repo](https://github.com/dapr/quickstarts/tree/master/conversation). + +```bash +git clone https://github.com/dapr/quickstarts.git +``` + +From the root of the Quickstarts directory, navigate into the conversation directory: + +```bash +cd conversation/java/sdk/conversation +``` + +Install the dependencies: + +```bash +mvn clean install +``` + +### Step 3: Launch the conversation service + +Navigate back to the sdk directory and start the conversation service with the following command: + +```bash +dapr run -f . +``` + +**Expected output** + +``` +== APP - conversation == Input: What is Dapr? +== APP - conversation == Output response: What is Dapr? +``` + +### What happened? + +Running `dapr run -f .` in this Quickstart started [Conversation.java]({{% ref "#programcs-conversation-app" %}}). + +#### `dapr.yaml` Multi-App Run template file + +Running the [Multi-App Run template file]({{% ref multi-app-dapr-run %}}) with `dapr run -f .` starts all applications in your project. This Quickstart has only one application, so the `dapr.yaml` file contains the following: + +```yml +version: 1 +common: + resourcesPath: ../../components +apps: + - appID: conversation + appDirPath: ./conversation/target + command: ["java", "-jar", "ConversationAIService-0.0.1-SNAPSHOT.jar"] +``` + +#### Echo mock LLM component + +In [`conversation/components`](https://github.com/dapr/quickstarts/tree/master/conversation/components) directly of the quickstart, the [`conversation.yaml` file](https://github.com/dapr/quickstarts/tree/master/conversation/components/conversation.yaml) configures the echo LLM component. + +```yml +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: echo +spec: + type: conversation.echo + version: v1 +``` + +To interface with a real LLM, swap out the mock component with one of [the supported conversation components]({{% ref "supported-conversation" %}}). For example, to use an OpenAI component, see the [example in the conversation how-to guide]({{% ref "howto-conversation-layer#use-the-openai-component" %}}) + +#### `Conversation.java` conversation app + +In the application code: +- The app sends an input "What is dapr?" to the echo mock LLM component. +- The mock LLM echoes "What is dapr?". + +```java +package com.service; + +import io.dapr.client.DaprClientBuilder; +import io.dapr.client.DaprPreviewClient; +import io.dapr.client.domain.ConversationInput; +import io.dapr.client.domain.ConversationRequest; +import io.dapr.client.domain.ConversationResponse; +import reactor.core.publisher.Mono; + +import java.util.List; + +public class Conversation { + + public static void main(String[] args) { + String prompt = "What is Dapr?"; + + try (DaprPreviewClient client = new DaprClientBuilder().buildPreviewClient()) { + System.out.println("Input: " + prompt); + + ConversationInput daprConversationInput = new ConversationInput(prompt); + + // Component name is the name provided in the metadata block of the conversation.yaml file. + Mono responseMono = client.converse(new ConversationRequest("echo", + List.of(daprConversationInput)) + .setContextId("contextId") + .setScrubPii(true).setTemperature(1.1d)); + ConversationResponse response = responseMono.block(); + System.out.printf("Output response: %s", response.getConversationOutputs().get(0).getResult()); + } catch (Exception e) { + throw new RuntimeException(e); } } } @@ -448,14 +739,15 @@ dapr run -f . **Expected output** ``` -== APP - conversation == Input sent: What is dapr? -== APP - conversation == Output response: What is dapr? +== APP - conversation-sdk == Input sent: What is dapr? +== APP - conversation-sdk == Output response: What is dapr? +== APP - conversation-sdk == Tool calling input sent: What is the weather like in San Francisco in celsius?' +== APP - conversation-sdk == Tool Call: Name: getWeather - Arguments: location,unit +== APP - conversation-sdk == Tool Call Output: The weather in San Francisco is 25 degrees Celsius ``` ### What happened? -When you ran `dapr init` during Dapr install, the [`dapr.yaml` Multi-App Run template file]({{% ref "#dapryaml-multi-app-run-template-file" %}}) was generated in the `.dapr/components` directory. - Running `dapr run -f .` in this Quickstart started [conversation.go]({{% ref "#programcs-conversation-app" %}}). #### `dapr.yaml` Multi-App Run template file @@ -493,43 +785,163 @@ To interface with a real LLM, swap out the mock component with one of [the suppo In the application code: - The app sends an input "What is dapr?" to the echo mock LLM component. -- The mock LLM echoes "What is dapr?". +- The mock LLM echoes "What is dapr?". +- The app sends an input “What is the weather like in San Francisco in celsius” together with the definition of a tool that is available `get_weather`. +- The mock LLM echoes “What is the weather like in San Francisco in celsius?” and the function definition, which is detected in the response. ```go -package main - import ( - "context" - "fmt" - "log" + "context" + "encoding/json" + "fmt" + "log" + "strings" - dapr "github.com/dapr/go-sdk/client" + "github.com/invopop/jsonschema" + "google.golang.org/protobuf/encoding/protojson" + "google.golang.org/protobuf/types/known/structpb" + + dapr "github.com/dapr/go-sdk/client" ) +// createMapOfArgsForEcho is a helper function to deal with the issue with the echo component not returning args as a map but in csv format +func createMapOfArgsForEcho(s string) ([]byte, error) { + m := map[string]any{} + for _, p := range strings.Split(s, ",") { + m[p] = p + } + return json.Marshal(m) +} + +// getWeatherInLocation is an example function to use as a tool call +func getWeatherInLocation(request GetDegreesWeatherRequest, defaultValues GetDegreesWeatherRequest) string { + location := request.Location + unit := request.Unit + if location == "location" { + location = defaultValues.Location + } + if unit == "unit" { + unit = defaultValues.Unit + } + return fmt.Sprintf("The weather in %s is 25 degrees %s", location, unit) +} + +type GetDegreesWeatherRequest struct { + Location string `json:"location" jsonschema:"title=Location,description=The location to look up the weather for"` + Unit string `json:"unit" jsonschema:"enum=celsius,enum=fahrenheit,description=Unit"` +} + +// GenerateFunctionTool helper method to create jsonschema input +func GenerateFunctionTool[T any](name, description string) (*dapr.ConversationToolsAlpha2, error) { + reflector := jsonschema.Reflector{ + AllowAdditionalProperties: false, + DoNotReference: true, + } + var v T + + schema := reflector.Reflect(v) + + schemaBytes, err := schema.MarshalJSON() + if err != nil { + return nil, err + } + + var protoStruct structpb.Struct + if err := protojson.Unmarshal(schemaBytes, &protoStruct); err != nil { + return nil, fmt.Errorf("converting jsonschema to proto Struct: %w", err) + } + + return (*dapr.ConversationToolsAlpha2)(&dapr.ConversationToolsFunctionAlpha2{ + Name: name, + Description: &description, + Parameters: &protoStruct, + }), nil +} + +// createUserMessageInput is a helper method to create user messages in expected proto format +func createUserMessageInput(msg string) *dapr.ConversationInputAlpha2 { + return &dapr.ConversationInputAlpha2{ + Messages: []*dapr.ConversationMessageAlpha2{ + { + ConversationMessageOfUser: &dapr.ConversationMessageOfUserAlpha2{ + Content: []*dapr.ConversationMessageContentAlpha2{ + { + Text: &msg, + }, + }, + }, + }, + }, + } +} + func main() { - client, err := dapr.NewClient() - if err != nil { - panic(err) - } + client, err := dapr.NewClient() + if err != nil { + panic(err) + } - input := dapr.ConversationInput{ - Message: "What is dapr?", - // Role: nil, // Optional - // ScrubPII: nil, // Optional - } + inputMsg := "What is dapr?" + conversationComponent := "echo" - fmt.Println("Input sent:", input.Message) + request := dapr.ConversationRequestAlpha2{ + Name: conversationComponent, + Inputs: []*dapr.ConversationInputAlpha2{createUserMessageInput(inputMsg)}, + } + + fmt.Println("Input sent:", inputMsg) - var conversationComponent = "echo" + resp, err := client.ConverseAlpha2(context.Background(), request) + if err != nil { + log.Fatalf("err: %v", err) + } - request := dapr.NewConversationRequest(conversationComponent, []dapr.ConversationInput{input}) + fmt.Println("Output response:", resp.Outputs[0].Choices[0].Message.Content) + + tool, err := GenerateFunctionTool[GetDegreesWeatherRequest]("getWeather", "get weather from a location in the given unit") + if err != nil { + log.Fatalf("err: %v", err) + } + + weatherMessage := "Tool calling input sent: What is the weather like in San Francisco in celsius?'" + requestWithTool := dapr.ConversationRequestAlpha2{ + Name: conversationComponent, + Inputs: []*dapr.ConversationInputAlpha2{createUserMessageInput(weatherMessage)}, + Tools: []*dapr.ConversationToolsAlpha2{tool}, + } + + resp, err = client.ConverseAlpha2(context.Background(), requestWithTool) + if err != nil { + log.Fatalf("err: %v", err) + } - resp, err := client.ConverseAlpha1(context.Background(), request) - if err != nil { - log.Fatalf("err: %v", err) - } + fmt.Println(resp.Outputs[0].Choices[0].Message.Content) + for _, toolCalls := range resp.Outputs[0].Choices[0].Message.ToolCalls { + fmt.Printf("Tool Call: Name: %s - Arguments: %v\n", toolCalls.ToolTypes.Name, toolCalls.ToolTypes.Arguments) + + // parse the arguments and execute tool + args := []byte(toolCalls.ToolTypes.Arguments) + if conversationComponent == "echo" { + // The echo component does not return a compliant tool calling response in json format but rather returns a csv + args, err = createMapOfArgsForEcho(toolCalls.ToolTypes.Arguments) + if err != nil { + log.Fatalf("err: %v", err) + } + } - fmt.Println("Output response:", resp.Outputs[0].Result) + // find the tool (only one in this case) and execute + for _, toolInfo := range requestWithTool.Tools { + if toolInfo.Name == toolCalls.ToolTypes.Name && toolInfo.Name == "getWeather" { + var reqArgs GetDegreesWeatherRequest + if err = json.Unmarshal(args, &reqArgs); err != nil { + log.Fatalf("err: %v", err) + } + // execute tool + toolExecutionOutput := getWeatherInLocation(reqArgs, GetDegreesWeatherRequest{Location: "San Francisco", Unit: "Celsius"}) + fmt.Printf("Tool Call Output: %s\n", toolExecutionOutput) + } + } + } } ``` @@ -630,17 +1042,19 @@ npm install ### Step 3: Launch the conversation service -Navigate back to the `http` directory and start the conversation service with the following command: ```bash -dapr run --app-id conversation --resources-path ../../../components/ -- npm run start +dapr run --app-id conversation --resources-path ../../../components -- npm run start ``` **Expected output** ``` -== APP - conversation == Input sent: What is dapr? -== APP - conversation == Output response: What is dapr? +== APP == Conversation input sent: What is dapr? +== APP == Output response: What is dapr? +== APP == Tool calling input sent: What is the weather like in San Francisco in celsius? +== APP == Output message: What is the weather like in San Francisco in celsius? +== APP == Tool calls detected: [{"id":"0","function":{"name":"get_weather","arguments":"location,unit"}}] ``` {{% /tab %}} @@ -690,8 +1104,68 @@ dapr run --app-id conversation --resources-path ../../../components/ -- dotnet r **Expected output** ``` -== APP - conversation == Input sent: What is dapr? -== APP - conversation == Output response: What is dapr? +== APP == Conversation input sent: What is dapr? +== APP == Output response: What is dapr? +== APP == Tool calling input sent: What is the weather like in San Francisco in celsius? +== APP == Output message: What is the weather like in San Francisco in celsius? +== APP == Tool calls detected: +== APP == Tool call: {"id":0,"function":{"name":"get_weather","arguments":"location,unit"}} +== APP == Function name: get_weather +== APP == Function arguments: location,unit +``` + +{{% /tab %}} + + +{{% tab "Java" %}} + + +### Step 1: Pre-requisites + +For this example, you will need: + +- [Dapr CLI and initialized environment](https://docs.dapr.io/getting-started). +- Java JDK 17 (or greater): + - [Oracle JDK](https://www.oracle.com/java/technologies/downloads), or + - OpenJDK +- [Apache Maven](https://maven.apache.org/install.html), version 3.x. + +- [Docker Desktop](https://www.docker.com/products/docker-desktop) + + +### Step 2: Set up the environment + +Clone the [sample provided in the Quickstarts repo](https://github.com/dapr/quickstarts/tree/master/conversation). + +```bash +git clone https://github.com/dapr/quickstarts.git +``` + +From the root of the Quickstarts directory, navigate into the conversation directory: + +```bash +cd conversation/java/sdk/conversation +``` + +Install the dependencies: + +```bash +mvn clean install +``` + +### Step 3: Launch the conversation service + +Start the conversation service with the following command: + +```bash +dapr run --app-id conversation --resources-path ../../../components/ -- java -jar target/ConversationAIService-0.0.1-SNAPSHOT.jar com.service.Conversation +``` + +**Expected output** + +``` +== APP == Input: What is Dapr? +== APP == Output response: What is Dapr? ``` {{% /tab %}} @@ -741,8 +1215,12 @@ dapr run --app-id conversation --resources-path ../../../components/ -- go run . **Expected output** ``` -== APP - conversation == Input sent: What is dapr? -== APP - conversation == Output response: What is dapr? +== APP == dapr client initializing for: 127.0.0.1:53826 +== APP == Input sent: What is dapr? +== APP == Output response: What is dapr? +== APP == Tool calling input sent: What is the weather like in San Francisco in celsius?' +== APP == Tool Call: Name: getWeather - Arguments: location,unit +== APP == Tool Call Output: The weather in San Francisco is 25 degrees Celsius ``` {{% /tab %}} diff --git a/daprdocs/content/en/getting-started/quickstarts/jobs-quickstart.md b/daprdocs/content/en/getting-started/quickstarts/jobs-quickstart.md index 73e9e4b5c24..3ea0a9771c4 100644 --- a/daprdocs/content/en/getting-started/quickstarts/jobs-quickstart.md +++ b/daprdocs/content/en/getting-started/quickstarts/jobs-quickstart.md @@ -81,6 +81,15 @@ dapr run -f . == APP - job-scheduler == Deleted job: BB-8 ``` +You should eventually see the jobs being scheduled in scheduler: + +```bash +$ dapr scheduler list +NAME TARGET BEGIN COUNT LAST TRIGGER +C-3PO job +13.40s 0 +R2-D2 job +3.40s 0 +``` + After 5 seconds, the terminal output should present the `R2-D2` job being processed: ```text @@ -95,6 +104,13 @@ After 10 seconds, the terminal output should present the `C3-PO` job being proce == APP - job-service == Executing maintenance job: Memory Wipe ``` +The jobs will no longer be listed in the scheduler: + +```bash +$ dapr scheduler list +NAME TARGET BEGIN COUNT LAST TRIGGER +``` + Once the process has completed, you can stop and clean up application processes with a single command. ```bash diff --git a/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md b/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md index 0c6be63d31d..35839f5c511 100644 --- a/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md +++ b/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md @@ -1756,11 +1756,8 @@ import ( "log" "time" - "github.com/dapr/durabletask-go/api" - "github.com/dapr/durabletask-go/backend" - "github.com/dapr/durabletask-go/client" - "github.com/dapr/durabletask-go/task" - dapr "github.com/dapr/go-sdk/client" + "github.com/dapr/durabletask-go/workflow" + "github.com/dapr/go-sdk/client" ) var ( @@ -1774,43 +1771,46 @@ func main() { fmt.Println("*** Welcome to the Dapr Workflow console app sample!") fmt.Println("*** Using this app, you can place orders that start workflows.") - registry := task.NewTaskRegistry() + r := workflow.NewRegistry() - if err := registry.AddOrchestrator(OrderProcessingWorkflow); err != nil { + if err := r.AddWorkflow(OrderProcessingWorkflow); err != nil { log.Fatal(err) } - if err := registry.AddActivity(NotifyActivity); err != nil { + if err := r.AddActivity(NotifyActivity); err != nil { log.Fatal(err) } - if err := registry.AddActivity(RequestApprovalActivity); err != nil { + if err := r.AddActivity(RequestApprovalActivity); err != nil { log.Fatal(err) } - if err := registry.AddActivity(VerifyInventoryActivity); err != nil { + if err := r.AddActivity(VerifyInventoryActivity); err != nil { log.Fatal(err) } - if err := registry.AddActivity(ProcessPaymentActivity); err != nil { + if err := r.AddActivity(ProcessPaymentActivity); err != nil { log.Fatal(err) } - if err := registry.AddActivity(UpdateInventoryActivity); err != nil { + if err := r.AddActivity(UpdateInventoryActivity); err != nil { log.Fatal(err) } - daprClient, err := dapr.NewClient() + wfClient, err := client.NewWorkflowClient() if err != nil { - log.Fatalf("failed to create Dapr client: %v", err) + log.Fatalf("failed to initialise workflow client: %v", err) } - client := client.NewTaskHubGrpcClient(daprClient.GrpcClientConn(), backend.DefaultLogger()) - if err := client.StartWorkItemListener(context.TODO(), registry); err != nil { - log.Fatalf("failed to start work item listener: %v", err) + if err := wfClient.StartWorker(context.Background(), r); err != nil { + log.Fatal(err) } + dclient, err := client.NewClient() + if err != nil { + log.Fatal(err) + } inventory := []InventoryItem{ {ItemName: "paperclip", PerItemCost: 5, Quantity: 100}, {ItemName: "cars", PerItemCost: 5000, Quantity: 10}, {ItemName: "computers", PerItemCost: 500, Quantity: 100}, } - if err := restockInventory(daprClient, inventory); err != nil { + if err := restockInventory(dclient, inventory); err != nil { log.Fatalf("failed to restock: %v", err) } @@ -1827,31 +1827,30 @@ func main() { TotalCost: totalCost, } - id, err := client.ScheduleNewOrchestration(context.TODO(), workflowName, - api.WithInput(orderPayload), - ) + id, err := wfClient.ScheduleWorkflow(context.Background(), workflowName, workflow.WithInput(orderPayload), workflow.WithInstanceID("order-"+time.Now().Format("20060102150405"))) if err != nil { log.Fatalf("failed to start workflow: %v", err) } waitCtx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - defer cancel() - _, err = client.WaitForOrchestrationCompletion(waitCtx, id) + _, err = wfClient.WaitForWorkflowCompletion(waitCtx, id) + cancel() if err != nil { log.Fatalf("failed to wait for workflow: %v", err) } - respFetch, err := client.FetchOrchestrationMetadata(context.Background(), id, api.WithFetchPayloads(true)) + respFetch, err := wfClient.FetchWorkflowMetadata(context.Background(), id, workflow.WithFetchPayloads(true)) if err != nil { log.Fatalf("failed to get workflow: %v", err) } - fmt.Printf("workflow status: %v\n", respFetch.RuntimeStatus) + fmt.Printf("workflow status: %v\n", respFetch.String()) fmt.Println("Purchase of item is complete") + select {} } -func restockInventory(daprClient dapr.Client, inventory []InventoryItem) error { +func restockInventory(daprClient client.Client, inventory []InventoryItem) error { for _, item := range inventory { itemSerialized, err := json.Marshal(item) if err != nil { @@ -1879,18 +1878,60 @@ import ( "log" "time" - "github.com/dapr/durabletask-go/task" + "github.com/dapr/durabletask-go/workflow" "github.com/dapr/go-sdk/client" ) +type OrderPayload struct { + ItemName string `json:"item_name"` + TotalCost int `json:"total_cost"` + Quantity int `json:"quantity"` +} + +type OrderResult struct { + Processed bool `json:"processed"` +} + +type InventoryItem struct { + ItemName string `json:"item_name"` + PerItemCost int `json:"per_item_cost"` + Quantity int `json:"quantity"` +} + +type InventoryRequest struct { + RequestID string `json:"request_id"` + ItemName string `json:"item_name"` + Quantity int `json:"quantity"` +} + +type InventoryResult struct { + Success bool `json:"success"` + InventoryItem InventoryItem `json:"inventory_item"` +} + +type PaymentRequest struct { + RequestID string `json:"request_id"` + ItemBeingPurchased string `json:"item_being_purchased"` + Amount int `json:"amount"` + Quantity int `json:"quantity"` +} + +type ApprovalRequired struct { + Approval bool `json:"approval"` +} + +type Notification struct { + Message string `json:"message"` +} + // OrderProcessingWorkflow is the main workflow for orchestrating activities in the order process. -func OrderProcessingWorkflow(ctx *task.OrchestrationContext) (any, error) { - orderID := ctx.ID +func OrderProcessingWorkflow(ctx *workflow.WorkflowContext) (any, error) { + orderID := ctx.ID() var orderPayload OrderPayload if err := ctx.GetInput(&orderPayload); err != nil { return nil, err } - err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{ + err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{ Message: fmt.Sprintf("Received order %s for %d %s - $%d", orderID, orderPayload.Quantity, orderPayload.ItemName, orderPayload.TotalCost), })).Await(nil) if err != nil { @@ -1898,8 +1939,8 @@ func OrderProcessingWorkflow(ctx *task.OrchestrationContext) (any, error) { } var verifyInventoryResult InventoryResult - if err := ctx.CallActivity(VerifyInventoryActivity, task.WithActivityInput(InventoryRequest{ - RequestID: string(orderID), + if err := ctx.CallActivity(VerifyInventoryActivity, workflow.WithActivityInput(InventoryRequest{ + RequestID: orderID, ItemName: orderPayload.ItemName, Quantity: orderPayload.Quantity, })).Await(&verifyInventoryResult); err != nil { @@ -1908,64 +1949,64 @@ func OrderProcessingWorkflow(ctx *task.OrchestrationContext) (any, error) { if !verifyInventoryResult.Success { notification := Notification{Message: fmt.Sprintf("Insufficient inventory for %s", orderPayload.ItemName)} - err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(notification)).Await(nil) + err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(notification)).Await(nil) return OrderResult{Processed: false}, err } if orderPayload.TotalCost > 5000 { var approvalRequired ApprovalRequired - if err := ctx.CallActivity(RequestApprovalActivity, task.WithActivityInput(orderPayload)).Await(&approvalRequired); err != nil { + if err := ctx.CallActivity(RequestApprovalActivity, workflow.WithActivityInput(orderPayload)).Await(&approvalRequired); err != nil { return OrderResult{Processed: false}, err } - if err := ctx.WaitForSingleEvent("manager_approval", time.Second*200).Await(nil); err != nil { + if err := ctx.WaitForExternalEvent("manager_approval", time.Second*200).Await(nil); err != nil { return OrderResult{Processed: false}, err } // TODO: Confirm timeout flow - this will be in the form of an error. if approvalRequired.Approval { - if err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{Message: fmt.Sprintf("Payment for order %s has been approved!", orderID)})).Await(nil); err != nil { + if err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{Message: fmt.Sprintf("Payment for order %s has been approved!", orderID)})).Await(nil); err != nil { log.Printf("failed to notify of a successful order: %v\n", err) } } else { - if err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{Message: fmt.Sprintf("Payment for order %s has been rejected!", orderID)})).Await(nil); err != nil { + if err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{Message: fmt.Sprintf("Payment for order %s has been rejected!", orderID)})).Await(nil); err != nil { log.Printf("failed to notify of an unsuccessful order :%v\n", err) } return OrderResult{Processed: false}, err } } - err = ctx.CallActivity(ProcessPaymentActivity, task.WithActivityInput(PaymentRequest{ - RequestID: string(orderID), + err = ctx.CallActivity(ProcessPaymentActivity, workflow.WithActivityInput(PaymentRequest{ + RequestID: orderID, ItemBeingPurchased: orderPayload.ItemName, Amount: orderPayload.TotalCost, Quantity: orderPayload.Quantity, })).Await(nil) if err != nil { - if err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s failed!", orderID)})).Await(nil); err != nil { + if err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s failed!", orderID)})).Await(nil); err != nil { log.Printf("failed to notify of a failed order: %v", err) } return OrderResult{Processed: false}, err } - err = ctx.CallActivity(UpdateInventoryActivity, task.WithActivityInput(PaymentRequest{ - RequestID: string(orderID), + err = ctx.CallActivity(UpdateInventoryActivity, workflow.WithActivityInput(PaymentRequest{ + RequestID: orderID, ItemBeingPurchased: orderPayload.ItemName, Amount: orderPayload.TotalCost, Quantity: orderPayload.Quantity, })).Await(nil) if err != nil { - if err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s failed!", orderID)})).Await(nil); err != nil { + if err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s failed!", orderID)})).Await(nil); err != nil { log.Printf("failed to notify of a failed order: %v", err) } return OrderResult{Processed: false}, err } - if err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s has completed!", orderID)})).Await(nil); err != nil { + if err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s has completed!", orderID)})).Await(nil); err != nil { log.Printf("failed to notify of a successful order: %v", err) } return OrderResult{Processed: true}, err } // NotifyActivity outputs a notification message -func NotifyActivity(ctx task.ActivityContext) (any, error) { +func NotifyActivity(ctx workflow.ActivityContext) (any, error) { var input Notification if err := ctx.GetInput(&input); err != nil { return "", err @@ -1975,7 +2016,7 @@ func NotifyActivity(ctx task.ActivityContext) (any, error) { } // ProcessPaymentActivity is used to process a payment -func ProcessPaymentActivity(ctx task.ActivityContext) (any, error) { +func ProcessPaymentActivity(ctx workflow.ActivityContext) (any, error) { var input PaymentRequest if err := ctx.GetInput(&input); err != nil { return "", err @@ -1985,7 +2026,7 @@ func ProcessPaymentActivity(ctx task.ActivityContext) (any, error) { } // VerifyInventoryActivity is used to verify if an item is available in the inventory -func VerifyInventoryActivity(ctx task.ActivityContext) (any, error) { +func VerifyInventoryActivity(ctx workflow.ActivityContext) (any, error) { var input InventoryRequest if err := ctx.GetInput(&input); err != nil { return nil, err @@ -2017,7 +2058,7 @@ func VerifyInventoryActivity(ctx task.ActivityContext) (any, error) { } // UpdateInventoryActivity modifies the inventory. -func UpdateInventoryActivity(ctx task.ActivityContext) (any, error) { +func UpdateInventoryActivity(ctx workflow.ActivityContext) (any, error) { var input PaymentRequest if err := ctx.GetInput(&input); err != nil { return nil, err @@ -2051,7 +2092,7 @@ func UpdateInventoryActivity(ctx task.ActivityContext) (any, error) { } // RequestApprovalActivity requests approval for the order -func RequestApprovalActivity(ctx task.ActivityContext) (any, error) { +func RequestApprovalActivity(ctx workflow.ActivityContext) (any, error) { var input OrderPayload if err := ctx.GetInput(&input); err != nil { return nil, err @@ -2066,6 +2107,107 @@ func RequestApprovalActivity(ctx task.ActivityContext) (any, error) { {{< /tabpane >}} + +## Step 5: Manage Your Workflow + +Now that your workflow is running, let's learn how to manage it using the Dapr CLI. + +### View Running Workflows + +Open a separate terminal and run the following CLI commands. + +```bash +# List all workflows +dapr workflow list --app-id order-processor --connection-string=redis://127.0.0.1:6379 -o wide +``` + +You should see output like: + +``` +NAMESPACE APP ID NAME INSTANCE ID CREATED LAST UPDATE STATUS +default order-processor OrderProcessingWorkflow e4d3807c 2025-11-07T12:29:37Z 2025-11-07T12:29:52Z COMPLETED +``` + +### Check Workflow History + +View the detailed execution history of your workflow: + +```bash +dapr workflow history e4d3807c --app-id order-processor +``` + +You should see output like: + +``` +TYPE NAME EVENTID ELAPSED STATUS DETAILS +ExecutionStarted OrderProcessingWorkflow - Age:1.1m RUNNING orchestration start +OrchestratorStarted - - 13.4ms RUNNING replay cycle start +TaskScheduled NotifyActivity 0 1.3ms RUNNING activity=NotifyActivity +TaskCompleted - - 2.6ms RUNNING eventId=0 +OrchestratorStarted - - 2.6ms RUNNING replay cycle start +TaskScheduled VerifyInventoryActivity 1 637.6µs RUNNING activity=VerifyInventoryActivity +TaskCompleted - - 2.4ms RUNNING eventId=1 +OrchestratorStarted - - 1.7ms RUNNING replay cycle start +TaskScheduled ProcessPaymentActivity 2 439.3µs RUNNING activity=ProcessPaymentActivity +TaskCompleted - - 1.6ms RUNNING eventId=2 +OrchestratorStarted - - 1.5ms RUNNING replay cycle start +TaskScheduled UpdateInventoryActivity 3 311.2µs RUNNING activity=UpdateInventoryActivity +TaskCompleted - - 2.4ms RUNNING eventId=3 +OrchestratorStarted - - 2.7ms RUNNING replay cycle start +TaskScheduled NotifyActivity 4 354.1µs RUNNING activity=NotifyActivity +TaskCompleted - - 2.5ms RUNNING eventId=4 +OrchestratorStarted - - 1.6ms RUNNING replay cycle start +ExecutionCompleted - 5 517.1µs COMPLETED execDuration=38.7ms +``` + +### Interact with Your Workflow + +#### Raise an External Event + +If your workflow is waiting for an [external event]({{% ref "workflow-patterns.md#external-system-interaction" %}}), you can raise one. +It takes a single argument in the format of `/`. + +```bash +dapr workflow raise-event e4d3807c/ApprovalEvent \ + --app-id order-processor \ + --input '{"paymentId": "pay-123", "amount": 100.00}' +``` + +#### Suspend and Resume + +```bash +# Suspend a workflow +dapr workflow suspend e4d3807c \ + --app-id order-processor \ + --reason "Waiting for inventory" + +# Resume when ready +dapr workflow resume e4d3807c \ + --app-id order-processor \ + --reason "Inventory received" +``` + +### Clean Up + +After testing, purge completed workflows. + +{{% alert title="Important" color="warning" %}} +It is required that a workflow client is running in the application to perform purge operations. +The workflow client connection is required in order to preserve the workflow state machine integrity and prevent corruption. +Errors like the following suggest that the workflow client is not running: +``` +failed to purge orchestration state: rpc error: code = FailedPrecondition desc = failed to purge orchestration state: failed to lookup actor: api error: code = FailedPrecondition desc = did not find address for actor +``` +{{% /alert %}} + +```bash +# Purge a specific workflow +dapr workflow purge e4d3807c --app-id order-processor --connection-string=redis://127.0.0.1:6379 + +# Or purge all completed workflows +dapr workflow purge --app-id order-processor --connection-string=redis://127.0.0.1:6379 --all-older-than 1h +``` + ## Tell us what you think! We're continuously working to improve our Quickstart examples and value your feedback. Did you find this Quickstart helpful? Do you have suggestions for improvement? @@ -2077,5 +2219,6 @@ Join the discussion in our [discord channel](https://discord.com/channels/778680 - Set up Dapr Workflow with any programming language using [HTTP instead of an SDK]({{% ref howto-manage-workflow.md %}}) - Walk through a more in-depth [.NET SDK example workflow](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) - Learn more about [Workflow as a Dapr building block]({{% ref workflow-overview %}}) +``` {{< button text="Explore Dapr tutorials >>" page="getting-started/tutorials/_index.md" >}} diff --git a/daprdocs/content/en/operations/configuration/configuration-overview.md b/daprdocs/content/en/operations/configuration/configuration-overview.md index f501710405c..33cc11d8e31 100644 --- a/daprdocs/content/en/operations/configuration/configuration-overview.md +++ b/daprdocs/content/en/operations/configuration/configuration-overview.md @@ -62,13 +62,14 @@ A Dapr sidecar can apply a specific configuration by using a `dapr.io/config` an ### Application configuration settings -The following menu includes all of the configuration settings you can set on the sidecar. +The following menu includes all of the configuration settings you can set: - [Tracing](#tracing) - [Metrics](#metrics) - [Logging](#logging) - [Middleware](#middleware) - [Name resolution](#name-resolution) +- [Workflow](#workflow) - [Scope secret store access](#scope-secret-store-access) - [Access Control allow lists for building block APIs](#access-control-allow-lists-for-building-block-apis) - [Access Control allow lists for service invocation API](#access-control-allow-lists-for-service-invocation-api) @@ -255,6 +256,15 @@ For more information, see: - [The name resolution component documentation]({{% ref supported-name-resolution %}}) for more examples. - [The Configuration file documentation]({{% ref configuration-schema.md %}}) to learn more about how to configure name resolution per component. +#### Workflow + +The `workflow` section contains properties for configuring [Workflows]({{% ref "workflow-overview.md" %}}). + +| Property | Type | Description | +|------------------|--------|-----| +| `maxConcurrentWorkflowInvocations` | int32 | Maximum number of concurrent workflow executions per Dapr sidecar. Default is infinite. | +| `maxConcurrentActivityInvocations` | int32 | Maximum number of concurrent activity executions per Dapr sidecar. Default is infinite. | + #### Scope secret store access See the [Scoping secrets]({{% ref "secret-scope.md" %}}) guide for information and examples on how to scope secrets to an application. @@ -334,6 +344,9 @@ spec: deny: - bindings.smtp - secretstores.local.file + workflow: + maxConcurrentWorkflowInvocations: 100 + maxConcurrentActivityInvocations: 1000 accessControl: defaultAction: deny trustDomain: "public" diff --git a/daprdocs/content/en/operations/configuration/increase-request-size.md b/daprdocs/content/en/operations/configuration/increase-request-size.md index 04f6500866c..76c9d613949 100644 --- a/daprdocs/content/en/operations/configuration/increase-request-size.md +++ b/daprdocs/content/en/operations/configuration/increase-request-size.md @@ -1,27 +1,29 @@ --- type: docs -title: "How-To: Handle large http body requests" -linkTitle: "HTTP request body size" +title: "How-To: Handle larger body requests" +linkTitle: "Request body size" weight: 6000 description: "Configure http requests that are bigger than 4 MB" --- -By default, Dapr has a limit for the request body size, set to 4MB. You can change this by defining: -- The `dapr.io/http-max-request-size` annotation, or -- The `--dapr-http-max-request-size` flag. +{{% alert title="Note" color="primary" %}} +The existing flag/annotation`dapr-http-max-request-size` has been deprecated and updated to `max-body-size`. +{{% /alert %}} + +By default, Dapr has a limit for the request body size, set to 4MB. You can change this for both HTTP and gRPC requests by defining: +- The `dapr.io/max-body-size` annotation, or +- The `--max-body-size` flag. {{< tabpane text=true >}} {{% tab "Self-hosted" %}} -When running in self-hosted mode, use the `--dapr-http-max-request-size` flag to configure Dapr to use non-default request body size: +When running in self-hosted mode, use the `--max-body-size` flag to configure Dapr to use non-default request body size: ```bash -dapr run --dapr-http-max-request-size 16 node app.js +dapr run --max-body-size 16Mi node app.js ``` -This tells Dapr to set maximum request body size to `16` MB. - {{% /tab %}} @@ -50,7 +52,7 @@ spec: dapr.io/enabled: "true" dapr.io/app-id: "myapp" dapr.io/app-port: "8000" - dapr.io/http-max-request-size: "16" + dapr.io/max-body-size: "16Mi" #... ``` @@ -58,6 +60,8 @@ spec: {{< /tabpane >}} +This tells Dapr to set the maximum request body size to `16` MB for both HTTP and gRPC requests. + ## Related links [Dapr Kubernetes pod annotations spec]({{% ref arguments-annotations-overview.md %}}) diff --git a/daprdocs/content/en/operations/configuration/secret-scope.md b/daprdocs/content/en/operations/configuration/secret-scope.md index a26575ca14d..aba1e8c8965 100644 --- a/daprdocs/content/en/operations/configuration/secret-scope.md +++ b/daprdocs/content/en/operations/configuration/secret-scope.md @@ -4,18 +4,15 @@ title: "How-To: Limit the secrets that can be read from secret stores" linkTitle: "Limit secret store access" weight: 3000 description: "Define secret scopes by augmenting the existing configuration resource with restrictive permissions." -description: "Define secret scopes by augmenting the existing configuration resource with restrictive permissions." --- In addition to [scoping which applications can access a given component]({{% ref "component-scopes.md"%}}), you can also scope a named secret store component to one or more secrets for an application. By defining `allowedSecrets` and/or `deniedSecrets` lists, you restrict applications to access only specific secrets. -For more information about configuring a Configuration resource: -- [Configuration overview]({{% ref configuration-overview.md %}}) -- [Configuration schema]({{% ref configuration-schema.md %}}) For more information about configuring a Configuration resource: - [Configuration overview]({{% ref configuration-overview.md %}}) - [Configuration schema]({{% ref configuration-schema.md %}}) + ## Configure secrets access The `secrets` section under the `Configuration` spec contains the following properties: @@ -59,10 +56,8 @@ The `allowedSecrets` and `deniedSecrets` list values take priority over the `def ### Scenario 1: Deny access to all secrets for a secret store -In a Kubernetes cluster, the native Kubernetes secret store is added to your Dapr application by default. In some scenarios, it may be necessary to deny access to Dapr secrets for a given application. To add this configuration: In a Kubernetes cluster, the native Kubernetes secret store is added to your Dapr application by default. In some scenarios, it may be necessary to deny access to Dapr secrets for a given application. To add this configuration: -1. Define the following `appconfig.yaml`. 1. Define the following `appconfig.yaml`. ```yaml @@ -76,26 +71,8 @@ In a Kubernetes cluster, the native Kubernetes secret store is added to your Dap - storeName: kubernetes defaultAccess: deny ``` - ```yaml - apiVersion: dapr.io/v1alpha1 - kind: Configuration - metadata: - name: appconfig - spec: - secrets: - scopes: - - storeName: kubernetes - defaultAccess: deny - ``` - -1. Apply it to the Kubernetes cluster using the following command: - - ```bash - kubectl apply -f appconfig.yaml`. - ``` -For applications that you need to deny access to the Kubernetes secret store, follow [the Kubernetes instructions]({{% ref kubernetes-overview %}}), adding the following annotation to the application pod. -1. Apply it to the Kubernetes cluster using the following command: +2. Apply it to the Kubernetes cluster using the following command: ```bash kubectl apply -f appconfig.yaml`. @@ -109,7 +86,6 @@ dapr.io/config: appconfig With this defined, the application no longer has access to Kubernetes secret store. -### Scenario 2: Allow access to only certain secrets in a secret store ### Scenario 2: Allow access to only certain secrets in a secret store To allow a Dapr application to have access to only certain secrets, define the following `config.yaml`: @@ -127,7 +103,6 @@ spec: allowedSecrets: ["secret1", "secret2"] ``` -This example defines configuration for secret store named `vault`. The default access to the secret store is `deny`. Meanwhile, some secrets are accessible by the application based on the `allowedSecrets` list. Follow [the Sidecar configuration instructions]({{% ref "configuration-overview.md#sidecar-configuration" %}}) to apply configuration to the sidecar. This example defines configuration for secret store named `vault`. The default access to the secret store is `deny`. Meanwhile, some secrets are accessible by the application based on the `allowedSecrets` list. Follow [the Sidecar configuration instructions]({{% ref "configuration-overview.md#sidecar-configuration" %}}) to apply configuration to the sidecar. ### Scenario 3: Deny access to certain sensitive secrets in a secret store @@ -152,9 +127,3 @@ This configuration explicitly denies access to `secret1` and `secret2` from the ## Next steps {{< button text="Service invocation access control" page="invoke-allowlist.md" >}} - -This configuration explicitly denies access to `secret1` and `secret2` from the secret store named `vault,` while allowing access to all other secrets. Follow [the Sidecar configuration instructions]({{% ref "configuration-overview.md#sidecar-configuration" %}}) to apply configuration to the sidecar. - -## Next steps - -{{< button text="Service invocation access control" page="invoke-allowlist.md" >}} diff --git a/daprdocs/content/en/operations/hosting/kubernetes/kubernetes-persisting-scheduler.md b/daprdocs/content/en/operations/hosting/kubernetes/kubernetes-persisting-scheduler.md index 3083f2ec060..e379b745de3 100644 --- a/daprdocs/content/en/operations/hosting/kubernetes/kubernetes-persisting-scheduler.md +++ b/daprdocs/content/en/operations/hosting/kubernetes/kubernetes-persisting-scheduler.md @@ -77,22 +77,6 @@ kubectl delete pvc -n dapr-system dapr-scheduler-data-dir-dapr-scheduler-server- Persistent Volume Claims are not deleted automatically with an [uninstall]({{% ref dapr-uninstall.md %}}). This is a deliberate safety measure to prevent accidental data loss. {{% /alert %}} -{{% alert title="Note" color="primary" %}} -For storage providers that do NOT support dynamic volume expansion: If Dapr has ever been installed on the cluster before, the Scheduler's Persistent Volume Claims must be manually uninstalled in order for new ones with increased storage size to be created. -```bash -kubectl delete pvc -n dapr-system dapr-scheduler-data-dir-dapr-scheduler-server-0 dapr-scheduler-data-dir-dapr-scheduler-server-1 dapr-scheduler-data-dir-dapr-scheduler-server-2 -``` -Persistent Volume Claims are not deleted automatically with an [uninstall]({{< ref dapr-uninstall.md >}}). This is a deliberate safety measure to prevent accidental data loss. -{{% /alert %}} - -{{% alert title="Note" color="primary" %}} -For storage providers that do NOT support dynamic volume expansion: If Dapr has ever been installed on the cluster before, the Scheduler's Persistent Volume Claims must be manually uninstalled in order for new ones with increased storage size to be created. -```bash -kubectl delete pvc -n dapr-system dapr-scheduler-data-dir-dapr-scheduler-server-0 dapr-scheduler-data-dir-dapr-scheduler-server-1 dapr-scheduler-data-dir-dapr-scheduler-server-2 -``` -Persistent Volume Claims are not deleted automatically with an [uninstall]({{< ref dapr-uninstall.md >}}). This is a deliberate safety measure to prevent accidental data loss. -{{% /alert %}} - #### Increase existing Scheduler Storage Size {{% alert title="Warning" color="warning" %}} diff --git a/daprdocs/content/en/operations/observability/tracing/otel-collector/open-telemetry-collector-jaeger.md b/daprdocs/content/en/operations/observability/tracing/otel-collector/open-telemetry-collector-jaeger.md index 479a408b97d..fce8875dc3a 100644 --- a/daprdocs/content/en/operations/observability/tracing/otel-collector/open-telemetry-collector-jaeger.md +++ b/daprdocs/content/en/operations/observability/tracing/otel-collector/open-telemetry-collector-jaeger.md @@ -1,30 +1,47 @@ --- type: docs -title: "Using OpenTelemetry Collector to collect traces to send to Jaeger" -linkTitle: "Using the OpenTelemetry for Jaeger" +title: "Using OpenTelemetry to send traces to Jaeger V2" +linkTitle: "Using OpenTelemetry for Jaeger V2" weight: 1200 -description: "How to push trace events to Jaeger distributed tracing platform, using the OpenTelemetry Collector." +description: "How to push trace events to Jaeger V2 distributed tracing platform using OpenTelemetry protocol." --- -While Dapr supports writing traces using OpenTelemetry (OTLP) and Zipkin protocols, Zipkin support for Jaeger has been deprecated in favor of OTLP. Although Jaeger supports OTLP directly, the recommended approach for production is to use the OpenTelemetry Collector to collect traces from Dapr and send them to Jaeger, allowing your application to quickly offload data and take advantage of features like retries, batching, and encryption. For more information, read the Open Telemetry Collector [documentation](https://opentelemetry.io/docs/collector/#when-to-use-a-collector). +Dapr supports writing traces using the OpenTelemetry (OTLP) protocol, and Jaeger V2 natively supports OTLP, allowing Dapr to send traces directly to a Jaeger V2 instance. This approach is recommended for production to leverage Jaeger V2's capabilities for distributed tracing. + {{< tabpane text=true >}} {{% tab "Self-hosted" %}} - -## Configure Jaeger in self-hosted mode +## Configure Jaeger V2 in self-hosted mode ### Local setup The simplest way to start Jaeger is to run the pre-built, all-in-one Jaeger image published to DockerHub and expose the OTLP port: +> **Note:** Port 9411 is commonly used by Zipkin. If you have Zipkin running (starts by default when you run `dapr init`), stop the `dapr_zipkin` container first to avoid port conflicts: `docker stop dapr_zipkin` + ```bash -docker run -d --name jaeger \ - -p 4317:4317 \ +docker run -d --rm --name jaeger \ -p 16686:16686 \ - jaegertracing/all-in-one:1.49 + -p 4317:4317 \ + -p 4318:4318 \ + -p 5778:5778 \ + -p 9411:9411 \ + cr.jaegertracing.io/jaegertracing/jaeger:2.11.0 ``` -Next, create the following `config.yaml` file locally: +You can also view the logs from the jaeger container using: + +```bash +docker logs jaeger +``` + +### Configure Dapr for tracing + +You have two options to configure Dapr to send traces to Jaeger V2: + +#### Option 1: Use a custom config file + +Create a `config.yaml` file with the following content: > **Note:** Because you are using the Open Telemetry protocol to talk to Jaeger, you need to fill out the `otel` section of the tracing configuration and set the `endpointAddress` to the address of the Jaeger container. @@ -51,6 +68,10 @@ the `--config` option. For example: dapr run --app-id myapp --app-port 3000 node app.js --config config.yaml ``` +#### Option 2: Update the default Dapr config (development environment) + +Alternatively, in your development environment, navigate to your [local Dapr components directory](https://docs.dapr.io/getting-started/install-dapr-selfhost/#step-5-verify-components-directory-has-been-initialized) and update the default `config.yaml` file with the OTLP configuration above. This way, all Dapr applications will use the Jaeger V2 tracing configuration by default without needing to specify the `--config` flag each time. + ### View traces To view traces in your browser, go to `http://localhost:16686` to see the Jaeger UI. @@ -58,41 +79,105 @@ To view traces in your browser, go to `http://localhost:16686` to see the Jaeger {{% tab "Kubernetes" %}} -## Configure Jaeger on Kubernetes with the OpenTelemetry Collector +## Configure Jaeger V2 on Kubernetes -The following steps show you how to configure Dapr to send distributed tracing data to the OpenTelemetry Collector which, in turn, sends the traces to Jaeger. +The following steps show you how to configure Dapr to send distributed tracing data directly to a Jaeger V2 instance deployed using the OpenTelemetry Operator with in-memory storage. ### Prerequisites - [Install Dapr on Kubernetes]({{% ref kubernetes %}}) -- [Set up Jaeger](https://www.jaegertracing.io/docs/1.49/operator/) using the Jaeger Kubernetes Operator -### Set up OpenTelemetry Collector to push to Jaeger +### Set up Jaeger V2 with the OpenTelemetry Operator -To push traces to your Jaeger instance, install the OpenTelemetry Collector on your Kubernetes cluster. +Jaeger V2 can be deployed using the OpenTelemetry Operator for simplified management and native OTLP support. The following example configures Jaeger V2 with in-memory storage. -1. Download and inspect the [`open-telemetry-collector-jaeger.yaml`](/docs/open-telemetry-collector/open-telemetry-collector-jaeger.yaml) file. +> **Note on Storage Backends:** This example uses in-memory storage (`memstore`) for simplicity, suitable for development or testing environments as it stores up to 100,000 traces in memory. For production environments, consider configuring a persistent storage backend like Cassandra or Elasticsearch to ensure trace data durability. -1. In the data section of the `otel-collector-conf` ConfigMap, update the `otlp/jaeger.endpoint` value to reflect the endpoint of your Jaeger collector Kubernetes service object. +#### Installation -1. Deploy the OpenTelemetry Collector into the same namespace where your Dapr-enabled applications are running: +> **Note:** In order for the API server to communicate with the webhook component of the operator, the webhook requires a TLS certificate that the API server is configured to trust. There are a few different ways you can use to generate/configure the required TLS certificate detailed in the [otel operator chart docs](https://github.com/open-telemetry/opentelemetry-helm-charts/tree/main/charts/opentelemetry-operator#tls-certificate-requirement) - ```sh - kubectl apply -f open-telemetry-collector-jaeger.yaml - ``` +For simplicity you can use Helm to create an automatically generated self-signed certificate. -### Set up Dapr to send traces to OpenTelemetryCollector +1. **Install the OpenTelemetry Operator**: -Create a Dapr configuration file to enable tracing and export the sidecar traces to the OpenTelemetry Collector. - -1. Use the [`collector-config-otel.yaml`](/docs/open-telemetry-collector/collector-config-otel.yaml) file to create your own Dapr configuration. + ```bash + helm install opentelemetry-operator open-telemetry/opentelemetry-operator -n opentelemetry-operator-system --create-namespace \ + --set "manager.collectorImage.repository=ghcr.io/open-telemetry/opentelemetry-collector-releases/opentelemetry-collector-k8s" \ + --set admissionWebhooks.certManager.enabled=false \ + --set admissionWebhooks.autoGenerateCert.enabled=true + ``` + Confirm that all resources in the `opentelemetry-operator-system` namespace are ready. + +1. **Deploy a Jaeger V2 instance with in-memory storage**: + Create a file named `jaeger-inmemory.yaml` with the following configuration: + ```yaml + apiVersion: opentelemetry.io/v1beta1 + kind: OpenTelemetryCollector + metadata: + name: jaeger-inmemory-instance + namespace: observability + spec: + image: jaegertracing/jaeger:latest + ports: + - name: jaeger + port: 16686 + config: + service: + extensions: [jaeger_storage, jaeger_query] + pipelines: + traces: + receivers: [otlp] + exporters: [jaeger_storage_exporter] + extensions: + jaeger_query: + storage: + traces: memstore + jaeger_storage: + backends: + memstore: + memory: + max_traces: 100000 + receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + http: + endpoint: 0.0.0.0:4318 + exporters: + jaeger_storage_exporter: + trace_storage: memstore + ``` + Apply it with: + ```bash + kubectl apply -f jaeger-inmemory.yaml -n observability + ``` -1. Update the `namespace` and `otel.endpointAddress` values to align with the namespace where your Dapr-enabled applications and OpenTelemetry Collector are deployed. -1. Apply the configuration with: +### Set up Dapr to send traces to Jaeger V2 + +Create a Dapr configuration file to enable tracing and export the sidecar traces directly to the Jaeger V2 instance. + +1. Create a configuration file (for example `tracing.yaml`) with the following content, updating the `namespace` and `otel.endpointAddress` to match your Jaeger V2 instance: + ```yaml + apiVersion: dapr.io/v1alpha1 + kind: Configuration + metadata: + name: tracing + namespace: order-system + spec: + tracing: + samplingRate: "1" + otel: + endpointAddress: "jaeger-inmemory-instance-collector.observability.svc.cluster.local:4317" + isSecure: false + protocol: grpc + ``` - ```sh - kubectl apply -f collector-config.yaml +2. Apply the configuration: + ```bash + kubectl apply -f tracing.yaml -n order-system ``` ### Deploy your app with tracing enabled @@ -122,20 +207,20 @@ That’s it! There’s no need to include the OpenTelemetry SDK or instrument yo ### View traces -To view Dapr sidecar traces, port-forward the Jaeger Service and open the UI: +To view Dapr sidecar traces, port-forward the Jaeger V2 service and open the UI: ```bash -kubectl port-forward svc/jaeger-query 16686 -n observability +kubectl port-forward svc/jaeger-inmemory-instance-collector 16686:16686 -n observability ``` -In your browser, go to `http://localhost:16686` and you will see the Jaeger UI. +In your browser, go to `http://localhost:16686` to see the Jaeger V2 UI. ![jaeger](/images/jaeger_ui.png) {{% /tab %}} {{< /tabpane >}} + ## References -- [Jaeger Getting Started](https://www.jaegertracing.io/docs/1.49/getting-started/) -- [Jaeger Kubernetes Operator](https://www.jaegertracing.io/docs/1.49/operator/) -- [OpenTelemetry Collector Exporters](https://opentelemetry.io/docs/collector/configuration/#exporters) +- [Jaeger V2 Getting Started](https://www.jaegertracing.io/docs/2.11/getting-started/) +- [Jaeger V2 Kubernetes Operator](https://www.jaegertracing.io/docs/2.11/deployment/kubernetes/#kubernetes-operator) \ No newline at end of file diff --git a/daprdocs/content/en/operations/resiliency/health-checks/sidecar-health.md b/daprdocs/content/en/operations/resiliency/health-checks/sidecar-health.md index ee58a2ad233..401a3dd2f85 100644 --- a/daprdocs/content/en/operations/resiliency/health-checks/sidecar-health.md +++ b/daprdocs/content/en/operations/resiliency/health-checks/sidecar-health.md @@ -42,9 +42,9 @@ On the other hand, as shown by the green boundary lines in the diagram above, th - The Dapr HTTP port is available; _but,_ - The app channel is not yet established. -In the Dapr SDKs, the `waitForSidecar`/`wait_until_ready` method (depending on [which SDK you use]({{% ref "#sdks-supporting-outbound-health-endpoint" %}})) is used for this specific check with the `v1.0/healthz/outbound` endpoint. Using this behavior, instead of waiting for the app channel to be available (see: red boundary lines) with the `v1.0/healthz/` endpoint, Dapr waits for a successful response from `v1.0/healthz/outbound`. This approach enables your application to perform calls on the Dapr sidecar APIs before the app channel is initalized - for example, reading secrets with the secrets API. +In the Dapr SDKs, the `waitForSidecar` method (depending on [which SDK you use]({{% ref "#sdks-supporting-outbound-health-endpoint" %}})) is used for this specific check with the `v1.0/healthz/outbound` endpoint. Using this behavior, instead of waiting for the app channel to be available (see: red boundary lines) with the `v1.0/healthz/` endpoint, Dapr waits for a successful response from `v1.0/healthz/outbound`. This approach enables your application to perform calls on the Dapr sidecar APIs before the app channel is initalized - for example, reading secrets with the secrets API. -If you are using the `waitForSidecar`/`wait_until_ready` method on the SDKs, then the correct initialization is performed. Otherwise, you can call the `v1.0/healthz/outbound` endpoint during initalization, and if successesful, you can call the Dapr sidecar APIs. +If you are using the `waitForSidecar` method on the SDKs, then the correct initialization is performed. Otherwise, you can call the `v1.0/healthz/outbound` endpoint during initalization, and if successesful, you can call the Dapr sidecar APIs. ### SDKs supporting outbound health endpoint Currently, the `v1.0/healthz/outbound` endpoint is supported in the: diff --git a/daprdocs/content/en/operations/security/api-token.md b/daprdocs/content/en/operations/security/api-token.md index d91a1490fd6..6593f5e729b 100644 --- a/daprdocs/content/en/operations/security/api-token.md +++ b/daprdocs/content/en/operations/security/api-token.md @@ -52,45 +52,12 @@ annotations: When deployed, Dapr sidecar injector will automatically create a secret reference and inject the actual value into `DAPR_API_TOKEN` environment variable. -## Rotate a token - -### Self-hosted - -To rotate the configured token in self-hosted, update the `DAPR_API_TOKEN` environment variable to the new value and restart the `daprd` process. - -### Kubernetes - -To rotate the configured token in Kubernetes, update the previously-created secret with the new token in each namespace. You can do that using `kubectl patch` command, but a simpler way to update these in each namespace is by using a manifest: - -```yaml -apiVersion: v1 -kind: Secret -metadata: - name: dapr-api-token -type: Opaque -data: - token: -``` - -And then apply it to each namespace: - -```shell -kubectl apply --file token-secret.yaml --namespace -``` - -To tell Dapr to start using the new token, trigger a rolling upgrade to each one of your deployments: - -```shell -kubectl rollout restart deployment/ --namespace -``` - -> Assuming your service is configured with more than one replica, the key rotation process does not result in any downtime. ## Adding API token to client API invocations -Once token authentication is configured in Dapr, all clients invoking Dapr API need to append the `dapr-api-token` token to every request. +Once token authentication is configured in Dapr, all clients invoking the Dapr APIs need to append the `dapr-api-token` token to every request. -> **Note:** The Dapr SDKs read the [DAPR_API_TOKEN]({{% ref environment %}}) environment variable and set it for you by default. +> **Note:** The Dapr SDKs read the [DAPR_API_TOKEN]({{% ref environment %}}) environment variable and set it for you by default, however you still must ensure that your app has access to the environment variable. @@ -122,15 +89,18 @@ dapr-api-token[0]. ### Kubernetes -In Kubernetes, it's recommended to mount the secret to your pod as an environment variable, as shown in the example below, where a Kubernetes secret with the name `dapr-api-token` is used to hold the token. +In Kubernetes, it's required to mount the API token on your application pod as an environment variable, when your application is making outbound calls to the Dapr APIs (Service Invocation invoke, Pub/sub publish, etc.), otherwise the request will fail with an `Unauthorized` error. Mounting the environment variable is done by providing the name of the Kubernetes secret in your application pod specification, as shown in the example below, where a Kubernetes secret with the name `dapr-api-token` is used to hold the token. ```yaml containers: - name: mycontainer image: myregistry/myapp - envFrom: - - secretRef: - name: dapr-api-token + env: + - name: DAPR_API_TOKEN + valueFrom: + secretKeyRef: + name: dapr-api-token + key: token ``` ### Self-hosted @@ -141,6 +111,40 @@ In self-hosted mode, you can set the token as an environment variable for your a export DAPR_API_TOKEN= ``` +## Rotate a token + +### Self-hosted + +To rotate the configured token in self-hosted, update the `DAPR_API_TOKEN` environment variable to the new value and restart the `daprd` process. + +### Kubernetes + +To rotate the configured token in Kubernetes, update the previously-created secret with the new token in each namespace. You can do that using `kubectl patch` command, but a simpler way to update these in each namespace is by using a manifest: + +```yaml +apiVersion: v1 +kind: Secret +metadata: + name: dapr-api-token +type: Opaque +data: + token: +``` + +And then apply it to each namespace: + +```shell +kubectl apply --file token-secret.yaml --namespace +``` + +To tell Dapr to start using the new token, trigger a rolling upgrade to each one of your deployments: + +```shell +kubectl rollout restart deployment/ --namespace +``` + +> Assuming your service is configured with more than one replica, the key rotation process does not result in any downtime. + ## Related Links - Learn about [Dapr security concepts]({{% ref security-concept.md %}}) diff --git a/daprdocs/content/en/operations/support/alpha-beta-apis.md b/daprdocs/content/en/operations/support/alpha-beta-apis.md index f8dd6e3ce11..8b1d81e8218 100644 --- a/daprdocs/content/en/operations/support/alpha-beta-apis.md +++ b/daprdocs/content/en/operations/support/alpha-beta-apis.md @@ -16,8 +16,8 @@ description: "List of current alpha and beta APIs" | Bulk Subscribe | [Bulk subscribe proto](https://github.com/dapr/dapr/blob/5aba3c9aa4ea9b3f388df125f9c66495b43c5c9e/dapr/proto/runtime/v1/appcallback.proto#L57) | N/A | The bulk subscribe application callback receives multiple messages from a topic in a single call. | [Bulk Publish and Subscribe API]({{% ref "pubsub-bulk.md" %}}) | v1.10 | | Cryptography | [Crypto proto](https://github.com/dapr/dapr/blob/5aba3c9aa4ea9b3f388df125f9c66495b43c5c9e/dapr/proto/runtime/v1/dapr.proto#L118) | `v1.0-alpha1/crypto` | The cryptography API enables you to perform **high level** cryptography operations for encrypting and decrypting messages. | [Cryptography API]({{% ref "cryptography-overview.md" %}}) | v1.11 | | Jobs | [Jobs proto](https://github.com/dapr/dapr/blob/master/dapr/proto/runtime/v1/dapr.proto#L212-219) | `v1.0-alpha1/jobs` | The jobs API enables you to schedule and orchestrate jobs. | [Jobs API]({{% ref "jobs-overview.md" %}}) | v1.14 | -| Conversation | [Conversation proto](https://github.com/dapr/dapr/blob/master/dapr/proto/runtime/v1/dapr.proto#L221-222) | `v1.0-alpha1/conversation` | Converse between different large language models using the conversation API. | [Conversation API]({{% ref "conversation-overview.md" %}}) | v1.15 | - +| Streaming Subscription | [Streaming Subscription proto](https://github.com/dapr/dapr/blob/310c83140b2f0c3cb7d2bef19624df88af3e8e0a/dapr/proto/runtime/v1/dapr.proto#L454) | N/A | Subscription is defined in the application code. Streaming subscriptions are dynamic, meaning they allow for adding or removing subscriptions at runtime. | [Streaming Subscription API]({{% ref "subscription-methods/#streaming-subscriptions" %}}) | v1.14 | +| Conversation | [Conversation proto](https://github.com/dapr/dapr/blob/master/dapr/proto/runtime/v1/dapr.proto#L226) | `v1.0-alpha2/conversation` | Converse between different large language models using the conversation API. | [Conversation API]({{% ref "conversation-overview.md" %}}) | v1.15 | ## Beta APIs diff --git a/daprdocs/content/en/operations/support/breaking-changes-and-deprecations.md b/daprdocs/content/en/operations/support/breaking-changes-and-deprecations.md index a0895537cc3..3c2214cb711 100644 --- a/daprdocs/content/en/operations/support/breaking-changes-and-deprecations.md +++ b/daprdocs/content/en/operations/support/breaking-changes-and-deprecations.md @@ -58,17 +58,18 @@ After announcing a future breaking change, the change will happen in 2 releases ## Announced deprecations -| Feature | Deprecation announcement | Removal | -|-----------------------|-----------------------|------------------------- | -| GET /v1.0/shutdown API (Users should use [POST API]({{% ref kubernetes-job.md %}}) instead) | 1.2.0 | 1.4.0 | -| Java domain builder classes deprecated (Users should use [setters](https://github.com/dapr/java-sdk/issues/587) instead) | Java SDK 1.3.0 | Java SDK 1.5.0 | -| Service invocation will no longer provide a default content type header of `application/json` when no content-type is specified. You must explicitly [set a content-type header]({{% ref "service_invocation_api.md#request-contents" %}}) for service invocation if your invoked apps rely on this header. | 1.7.0 | 1.9.0 | -| gRPC service invocation using `invoke` method is deprecated. Use proxy mode service invocation instead. See [How-To: Invoke services using gRPC ]({{% ref howto-invoke-services-grpc.md %}}) to use the proxy mode.| 1.9.0 | 1.10.0 | -| The CLI flag `--app-ssl` (in both the Dapr CLI and daprd) has been deprecated in favor of using `--app-protocol` with values `https` or `grpcs`. [daprd:6158](https://github.com/dapr/dapr/issues/6158) [cli:1267](https://github.com/dapr/cli/issues/1267)| 1.11.0 | 1.13.0 | -| Hazelcast PubSub Component | 1.9.0 | 1.11.0 | -| Twitter Binding Component | 1.10.0 | 1.11.0 | -| NATS Streaming PubSub Component | 1.11.0 | 1.13.0 | -| Workflows API Alpha1 `/v1.0-alpha1/workflows` being deprecated in favor of Workflow Client | 1.15.0 | 1.17.0 | +| Feature | Deprecation announcement | Removal | +|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------|----------------| +| GET /v1.0/shutdown API (Users should use [POST API]({{% ref kubernetes-job.md %}}) instead) | 1.2.0 | 1.4.0 | +| Java domain builder classes deprecated (Users should use [setters](https://github.com/dapr/java-sdk/issues/587) instead) | Java SDK 1.3.0 | Java SDK 1.5.0 | +| Service invocation will no longer provide a default content type header of `application/json` when no content-type is specified. You must explicitly [set a content-type header]({{% ref "service_invocation_api.md#request-contents" %}}) for service invocation if your invoked apps rely on this header. | 1.7.0 | 1.9.0 | +| gRPC service invocation using `invoke` method is deprecated. Use proxy mode service invocation instead. See [How-To: Invoke services using gRPC ]({{% ref howto-invoke-services-grpc.md %}}) to use the proxy mode. | 1.9.0 | 1.10.0 | +| The CLI flag `--app-ssl` (in both the Dapr CLI and daprd) has been deprecated in favor of using `--app-protocol` with values `https` or `grpcs`. [daprd:6158](https://github.com/dapr/dapr/issues/6158) [cli:1267](https://github.com/dapr/cli/issues/1267) | 1.11.0 | 1.13.0 | +| Hazelcast PubSub Component | 1.9.0 | 1.11.0 | +| Twitter Binding Component | 1.10.0 | 1.11.0 | +| NATS Streaming PubSub Component | 1.11.0 | 1.13.0 | +| Workflows API Alpha1 `/v1.0-alpha1/workflows` being deprecated in favor of Workflow Client | 1.15.0 | 1.17.0 | +| Migration of `http-max-request-size` flags/annotations to `max-body-size`. See [How-To: Handle larger body requests]({{% ref increase-request-size.md %}}) | 1.14.0 | 1.17.0 | ## Related links diff --git a/daprdocs/content/en/operations/support/support-release-policy.md b/daprdocs/content/en/operations/support/support-release-policy.md index 579008b70e0..aae6a2f89ce 100644 --- a/daprdocs/content/en/operations/support/support-release-policy.md +++ b/daprdocs/content/en/operations/support/support-release-policy.md @@ -19,7 +19,7 @@ Dapr releases use `MAJOR.MINOR.PATCH` versioning. For example, 1.0.0. A supported release means: -- A hoxfix patch is released if the release has a critical issue such as a mainline broken scenario or a security issue. Each of these are reviewed on a case by case basis. +- A hotfix patch is released if the release has a critical issue such as a mainline broken scenario or a security issue. Each of these are reviewed on a case by case basis. - Issues are investigated for the supported releases. If a release is no longer supported, you need to upgrade to a newer release and determine if the issue is still relevant. From the 1.8.0 release onwards three (3) versions of Dapr are supported; the current and previous two (2) versions. Typically these are `MINOR`release updates. This means that there is a rolling window that moves forward for supported releases and it is your operational responsibility to remain up to date with these supported versions. If you have an older version of Dapr you may have to do intermediate upgrades to get to a supported version. @@ -45,26 +45,36 @@ The table below shows the versions of Dapr releases that have been tested togeth | Release date | Runtime | CLI | SDKs | Dashboard | Status | Release notes | |--------------------|:--------:|:--------|---------|---------|---------|------------| -| July 31st 2025 | 1.15.9
| 1.15.0 | Java 1.14.2, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.9 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.9) | -| July 18th 2025 | 1.15.8
| 1.15.0 | Java 1.14.2, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.8 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.8) | -| July 16th 2025 | 1.15.7
| 1.15.0 | Java 1.14.1, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.7 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.7) | -| June 20th 2025 | 1.15.6
| 1.15.0 | Java 1.14.1, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.6 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.6) | -| May 5th 2025 | 1.15.5
| 1.15.0 | Java 1.14.1, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.5 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.5) | -| April 4th 2025 | 1.15.4
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.4) | -| March 5rd 2025 | 1.15.3
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.3) | -| March 3rd 2025 | 1.15.2
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.0
JS 3.5.0
Rust 0.16 | 0.15.0 | Supported (current) | [v1.15.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.2) | -| February 28th 2025 | 1.15.1
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.0
JS 3.5.0
Rust 0.16 | 0.15.0 | Supported (current) | [v1.15.1 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.1) | +| Jan 9th 2026 | 1.16.6
| 1.16.5 | Java 1.16.0
Go 1.13.0
PHP 1.2.0
Python 1.16.0
.NET 1.16.0
JS 3.6.0
Rust 0.17.0 | 0.15.0 | Supported (current) | [v1.16.6 release notes](https://github.com/dapr/dapr/releases/tag/v1.16.6) | +| Dec 19th 2025 | 1.16.5
| 1.16.5 | Java 1.16.0
Go 1.13.0
PHP 1.2.0
Python 1.16.0
.NET 1.16.0
JS 3.6.0
Rust 0.17.0 | 0.15.0 | Supported (current) | [v1.16.5 release notes](https://github.com/dapr/dapr/releases/tag/v1.16.5) | +| Dec 8th 2025 | 1.16.4
| 1.16.5 | Java 1.16.0
Go 1.13.0
PHP 1.2.0
Python 1.16.0
.NET 1.16.0
JS 3.6.0
Rust 0.17.0 | 0.15.0 | Supported (current) | [v1.16.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.16.4) | +| Nov 21st 2025| 1.16.3
| 1.16.4 | Java 1.16.0
Go 1.13.0
PHP 1.2.0
Python 1.16.0
.NET 1.16.0
JS 3.6.0
Rust 0.17.0 | 0.15.0 | Supported (current) | [v1.16.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.16.3) | +| Oct 30th 2025 | 1.16.2
| 1.16.3 | Java 1.16.0
Go 1.13.0
PHP 1.2.0
Python 1.16.0
.NET 1.16.0
JS 3.6.0
Rust 0.17.0 | 0.15.0 | Supported (current) | [v1.16.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.16.2) | +| Oct 6th 2025 | 1.16.1
| 1.16.1 | Java 1.16.0
Go 1.13.0
PHP 1.2.0
Python 1.16.0
.NET 1.16.0
JS 3.6.0
Rust 0.17.0 | 0.15.0 | Supported (current) | [v1.16.1 release notes](https://github.com/dapr/dapr/releases/tag/v1.16.1) | +| Sep 16th 2025 | 1.16.0
| 1.16.0 | Java 1.16.0
Go 1.13.0
PHP 1.2.0
Python 1.16.0
.NET 1.16.0
JS 3.6.0
Rust 0.17.0 | 0.15.0 | Supported (current) | [v1.16.0 release notes](https://github.com/dapr/dapr/releases/tag/v1.16.0) | +| Sep 17th 2025 | 1.15.12
| 1.15.0 | Java 1.14.2, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.12 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.12) | +| August 28th 2025 | 1.15.11
| 1.15.0 | Java 1.14.2, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.11 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.11) | +| August 21st 2025 | 1.15.10
| 1.15.0 | Java 1.14.2, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.10 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.10) | +| July 31st 2025 | 1.15.9
| 1.15.0 | Java 1.14.2, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.9 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.9) | +| July 18th 2025 | 1.15.8
| 1.15.0 | Java 1.14.2, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.8 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.8) | +| July 16th 2025 | 1.15.7
| 1.15.0 | Java 1.14.1, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.7 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.7) | +| June 20th 2025 | 1.15.6
| 1.15.0 | Java 1.14.1, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.6 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.6) | +| May 5th 2025 | 1.15.5
| 1.15.0 | Java 1.14.1, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.5 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.5) | +| April 4th 2025 | 1.15.4
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.4) | +| March 5rd 2025 | 1.15.3
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.3) | +| March 3rd 2025 | 1.15.2
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.0
JS 3.5.0
Rust 0.16 | 0.15.0 | Supported | [v1.15.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.2) | +| February 28th 2025 | 1.15.1
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.0
JS 3.5.0
Rust 0.16 | 0.15.0 | Supported | [v1.15.1 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.1) | | February 27th 2025 | 1.15.0
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.0
JS 3.5.0
Rust 0.16 | 0.15.0 | Supported | [v1.15.0 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.0) | | September 16th 2024 | 1.14.4
| 1.14.1 | Java 1.12.0
Go 1.11.0
PHP 1.2.0
Python 1.14.0
.NET 1.14.0
JS 3.3.1 | 0.15.0 | Supported | [v1.14.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.14.4) | | September 13th 2024 | 1.14.3
| 1.14.1 | Java 1.12.0
Go 1.11.0
PHP 1.2.0
Python 1.14.0
.NET 1.14.0
JS 3.3.1 | 0.15.0 | ⚠️ Recalled | [v1.14.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.14.3) | | September 6th 2024 | 1.14.2
| 1.14.1 | Java 1.12.0
Go 1.11.0
PHP 1.2.0
Python 1.14.0
.NET 1.14.0
JS 3.3.1 | 0.15.0 | Supported | [v1.14.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.14.2) | | August 14th 2024 | 1.14.1
| 1.14.1 | Java 1.12.0
Go 1.11.0
PHP 1.2.0
Python 1.14.0
.NET 1.14.0
JS 3.3.1 | 0.15.0 | Supported | [v1.14.1 release notes](https://github.com/dapr/dapr/releases/tag/v1.14.1) | | August 14th 2024 | 1.14.0
| 1.14.0 | Java 1.12.0
Go 1.11.0
PHP 1.2.0
Python 1.14.0
.NET 1.14.0
JS 3.3.1 | 0.15.0 | Supported | [v1.14.0 release notes](https://github.com/dapr/dapr/releases/tag/v1.14.0) | -| May 29th 2024 | 1.13.4
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Supported | [v1.13.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.4) | -| May 21st 2024 | 1.13.3
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Supported | [v1.13.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.3) | -| April 3rd 2024 | 1.13.2
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Supported | [v1.13.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.2) | -| March 26th 2024 | 1.13.1
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Supported | [v1.13.1 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.1) | -| March 6th 2024 | 1.13.0
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Supported | [v1.13.0 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.0) | +| May 29th 2024 | 1.13.4
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Unsupported | [v1.13.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.4) | +| May 21st 2024 | 1.13.3
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Unsupported | [v1.13.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.3) | +| April 3rd 2024 | 1.13.2
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Unsupported | [v1.13.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.2) | +| March 26th 2024 | 1.13.1
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Unsupported | [v1.13.1 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.1) | +| March 6th 2024 | 1.13.0
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Unsupported | [v1.13.0 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.0) | | January 17th 2024 | 1.12.4
| 1.12.0 | Java 1.10.0
Go 1.9.1
PHP 1.2.0
Python 1.12.0
.NET 1.12.0
JS 3.2.0 | 0.14.0 | Unsupported | [v1.12.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.12.4) | | January 2nd 2024 | 1.12.3
| 1.12.0 | Java 1.10.0
Go 1.9.1
PHP 1.2.0
Python 1.12.0
.NET 1.12.0
JS 3.2.0 | 0.14.0 | Unsupported | [v1.12.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.12.3) | | November 18th 2023 | 1.12.2
| 1.12.0 | Java 1.10.0
Go 1.9.1
PHP 1.2.0
Python 1.12.0
.NET 1.12.0
JS 3.2.0 | 0.14.0 | Unsupported | [v1.12.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.12.2) | diff --git a/daprdocs/content/en/reference/api/actors_api.md b/daprdocs/content/en/reference/api/actors_api.md index 4fead8ee19d..288c4dcafb4 100644 --- a/daprdocs/content/en/reference/api/actors_api.md +++ b/daprdocs/content/en/reference/api/actors_api.md @@ -3,7 +3,7 @@ type: docs title: "Actors API reference" linkTitle: "Actors API" description: "Detailed documentation on the actors API" -weight: 600 +weight: 200 --- Dapr provides native, cross-platform, and cross-language virtual actor capabilities. diff --git a/daprdocs/content/en/reference/api/bindings_api.md b/daprdocs/content/en/reference/api/bindings_api.md index 81e13eecca7..8c63feb0a01 100644 --- a/daprdocs/content/en/reference/api/bindings_api.md +++ b/daprdocs/content/en/reference/api/bindings_api.md @@ -3,7 +3,7 @@ type: docs title: "Bindings API reference" linkTitle: "Bindings API" description: "Detailed documentation on the bindings API" -weight: 500 +weight: 300 --- Dapr provides bi-directional binding capabilities for applications and a consistent approach to interacting with different cloud/on-premise services or systems. diff --git a/daprdocs/content/en/reference/api/configuration_api.md b/daprdocs/content/en/reference/api/configuration_api.md index ef28fc42a8f..e09a5d9b7bd 100644 --- a/daprdocs/content/en/reference/api/configuration_api.md +++ b/daprdocs/content/en/reference/api/configuration_api.md @@ -3,7 +3,7 @@ type: docs title: "Configuration API reference" linkTitle: "Configuration API" description: "Detailed documentation on the configuration API" -weight: 800 +weight: 400 --- ## Get Configuration diff --git a/daprdocs/content/en/reference/api/conversation_api.md b/daprdocs/content/en/reference/api/conversation_api.md index 1a4e006b348..95980f41e10 100644 --- a/daprdocs/content/en/reference/api/conversation_api.md +++ b/daprdocs/content/en/reference/api/conversation_api.md @@ -3,21 +3,23 @@ type: docs title: "Conversation API reference" linkTitle: "Conversation API" description: "Detailed documentation on the conversation API" -weight: 1400 +weight: 500 --- {{% alert title="Alpha" color="primary" %}} The conversation API is currently in [alpha]({{% ref "certification-lifecycle.md#certification-levels" %}}). {{% /alert %}} -Dapr provides an API to interact with Large Language Models (LLMs) and enables critical performance and security functionality with features like prompt caching and PII data obfuscation. +Dapr provides an API to interact with Large Language Models (LLMs) and enables critical performance and security functionality with features like prompt caching, PII data obfuscation, and tool calling capabilities. + +Tool calling follows OpenAI's function calling format, making it easy to integrate with existing AI development workflows and tools. ## Converse -This endpoint lets you converse with LLMs. +This endpoint lets you converse with LLMs using the Alpha2 version of the API, which provides enhanced tool calling support and alignment with OpenAI's interface. ``` -POST http://localhost:/v1.0-alpha1/conversation//converse +POST http://localhost:/v1.0-alpha2/conversation//converse ``` ### URL parameters @@ -30,35 +32,175 @@ POST http://localhost:/v1.0-alpha1/conversation//converse | Field | Description | | --------- | ----------- | +| `contextId` | The ID of an existing chat (like in ChatGPT). Optional | | `inputs` | Inputs for the conversation. Multiple inputs at one time are supported. Required | -| `cacheTTL` | A time-to-live value for a prompt cache to expire. Uses Golang duration format. Optional | -| `scrubPII` | A boolean value to enable obfuscation of sensitive information returning from the LLM. Set this value if all PII (across contents) in the request needs to be scrubbed. Optional | -| `temperature` | A float value to control the temperature of the model. Used to optimize for consistency and creativity. Optional | -| `metadata` | [Metadata](#metadata) passed to conversation components. Optional | +| `parameters` | Parameters for all custom fields. Optional | +| `metadata` | Metadata passed to conversation components. Optional | +| `scrubPii` | A boolean value to enable obfuscation of sensitive information returning from the LLM. Optional | +| `temperature` | A float value to control the temperature of the model. Used to optimize for consistency (0) or creativity (1). Optional | +| `tools` | Tools register the tools available to be used by the LLM during the conversation. Optional | +| `toolChoice` | Controls which (if any) tool is called by the model. Values: `auto`, `required`, or specific tool name. Defaults to `auto` if tools are present. Optional | #### Input body | Field | Description | | --------- | ----------- | -| `content` | The message content to send to the LLM. Required | -| `role` | The role for the LLM to assume. Possible values: 'user', 'tool', 'assistant' | -| `scrubPII` | A boolean value to enable obfuscation of sensitive information present in the content field. Set this value if PII for this specific content needs to be scrubbed exclusively. Optional | +| `messages` | Array of conversation messages. Required | +| `scrubPii` | A boolean value to enable obfuscation of sensitive information present in the content field. Optional | + +#### Message types + +The API supports different message types: + +| Type | Description | +| ---- | ----------- | +| `ofDeveloper` | Developer role messages with optional name and content | +| `ofSystem` | System role messages with optional name and content | +| `ofUser` | User role messages with optional name and content | +| `ofAssistant` | Assistant role messages with optional name, content, and tool calls | +| `ofTool` | Tool role messages with tool ID, name, and content | + + +#### Tool calling + +Tools can be defined using the `tools` field with function definitions: + +| Field | Description | +| --------- | ----------- | +| `function.name` | The name of the function to be called. Required | +| `function.description` | A description of what the function does. Optional | +| `function.parameters` | JSON Schema object describing the function parameters. Optional | + + +#### Tool choice options -### Request content example +The `toolChoice` is an optional parameter that controls how the model can use available tools: + +- **`auto`**: The model can pick between generating a message or calling one or more tools (default when tools are present) +- **`required`**: Requires one or more functions to be called +- **`{tool_name}`**: Forces the model to call a specific tool by name + + +#### Metadata +The `metadata` field serves as a dynamic configuration mechanism that allows you to pass additional configuration and authentication information to conversation components on a per-request basis. This metadata overrides any corresponding fields configured in the component's YAML configuration file, enabling dynamic configuration without modifying static component definitions. + +**Common metadata fields:** + +| Field | Description | Example | +| ----- | ----------- | ------- | +| `api_key` | API key for authenticating with the LLM service | `"sk-1234567890abcdef"` | +| `model` | Specific model identifier | `"gpt-4-turbo"`, `"claude-3-sonnet"` | +| `version` | API version or service version | `"1.0"`, `"2023-12-01"` | +| `endpoint` | Custom endpoint URL for the service | `"https://api.custom-llm.com/v1"` | + +{{% alert title="Note" color="primary" %}} +The exact metadata fields supported depend on the specific conversation component implementation. Refer to the component's documentation for the complete list of supported metadata fields. +{{% /alert %}} + +In addition to passing metadata in the request body, you can also pass metadata as URL query parameters without modifying the request payload. Here is the format: + +- **Prefix**: All metadata parameters must be prefixed with `metadata.` +- **Format**: `?metadata.=` +- **Multiple parameters**: Separate with `&` (e.g., `?metadata.api_key=sk-123&metadata.model=gpt-4`) + +Example of model override: +```bash +POST http://localhost:3500/v1.0-alpha2/conversation/openai/converse?metadata.model=sk-gpt-4-turbo +``` + +URL metadata parameters are merged with request body metadata, URL parameters take precedence if conflicts exist, and both override component configuration in the YAML file. + +### Request content examples + +#### Basic conversation ```json -REQUEST = { - "inputs": [ - { - "content": "What is Dapr?", - "role": "user", // Optional - "scrubPII": "true", // Optional. Will obfuscate any sensitive information found in the content field - }, - ], - "cacheTTL": "10m", // Optional - "scrubPII": "true", // Optional. Will obfuscate any sensitive information returning from the LLM - "temperature": 0.5 // Optional. Optimizes for consistency (0) or creativity (1) -} +curl -X POST http://localhost:3500/v1.0-alpha2/conversation/openai/converse \ + -H "Content-Type: application/json" \ + -d '{ + "inputs": [ + { + "messages": [ + { + "ofUser": { + "content": [ + { + "text": "What is Dapr?" + } + ] + } + } + ] + } + ], + "parameters": {}, + "metadata": {} + }' +``` + +#### Conversation with tool calling + +```json +curl -X POST http://localhost:3500/v1.0-alpha2/conversation/openai/converse \ + -H "Content-Type: application/json" \ + -d '{ + "inputs": [ + { + "messages": [ + { + "ofUser": { + "content": [ + { + "text": "What is the weather like in San Francisco in celsius?" + } + ] + } + } + ], + "scrubPii": false + } + ], + "parameters": { + "max_tokens": { + "@type": "type.googleapis.com/google.protobuf.Int64Value", + "value": "100" + }, + "model": { + "@type": "type.googleapis.com/google.protobuf.StringValue", + "value": "claude-3-5-sonnet-20240620" + } + }, + "metadata": { + "api_key": "test-key", + "version": "1.0" + }, + "scrubPii": false, + "temperature": 0.7, + "tools": [ + { + "function": { + "name": "get_weather", + "description": "Get the current weather for a location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA" + }, + "unit": { + "type": "string", + "enum": ["celsius", "fahrenheit"], + "description": "The temperature unit to use" + } + }, + "required": ["location"] + } + } + } + ], + "toolChoice": "auto" + }' ``` ### HTTP response codes @@ -71,21 +213,61 @@ Code | Description ### Response content +#### Basic conversation response + ```json -RESPONSE = { - "outputs": { +{ + "outputs": [ { - "result": "Dapr is distribution application runtime ...", - "parameters": {}, - }, + "choices": [ + { + "finishReason": "stop", + "message": { + "content": "Distributed application runtime, open-source." + } + } + ] + } + ] +} +``` + +#### Tool calling response + +```json +{ + "outputs": [ { - "result": "Dapr can help developers ...", - "parameters": {}, + "choices": [ + { + "finishReason": "tool_calls", + "message": { + "toolCalls": [ + { + "id": "call_Uwa41pG0UqGA2zp0Fec0KwOq", + "function": { + "name": "get_weather", + "arguments": "{\"location\":\"San Francisco, CA\",\"unit\":\"celsius\"}" + } + } + ] + } + } + ] } - }, + ] } ``` + +## Legacy Alpha1 API + +The previous Alpha1 version of the API is still supported for backward compatibility but is deprecated. For new implementations, use the Alpha2 version described above. + +``` +POST http://localhost:/v1.0-alpha2/conversation//converse +``` + ## Next steps - [Conversation API overview]({{% ref conversation-overview.md %}}) diff --git a/daprdocs/content/en/reference/api/cryptography_api.md b/daprdocs/content/en/reference/api/cryptography_api.md index 163abe1d77a..985a247c9d9 100644 --- a/daprdocs/content/en/reference/api/cryptography_api.md +++ b/daprdocs/content/en/reference/api/cryptography_api.md @@ -3,7 +3,7 @@ type: docs title: "Cryptography API reference" linkTitle: "Cryptography API" description: "Detailed documentation on the cryptography API" -weight: 1300 +weight: 600 --- Dapr provides cross-platform and cross-language support for encryption and decryption support via the diff --git a/daprdocs/content/en/reference/api/distributed_lock_api.md b/daprdocs/content/en/reference/api/distributed_lock_api.md index 92914a03768..b4db657cee2 100644 --- a/daprdocs/content/en/reference/api/distributed_lock_api.md +++ b/daprdocs/content/en/reference/api/distributed_lock_api.md @@ -1,9 +1,9 @@ --- type: docs -title: "Distributed Lock API reference" -linkTitle: "Distributed Lock API" +title: "Distributed lock API reference" +linkTitle: "Distributed lock API" description: "Detailed documentation on the distributed lock API" -weight: 900 +weight: 700 --- ## Lock diff --git a/daprdocs/content/en/reference/api/health_api.md b/daprdocs/content/en/reference/api/health_api.md index 164e2dd6d0b..bb0e096074e 100644 --- a/daprdocs/content/en/reference/api/health_api.md +++ b/daprdocs/content/en/reference/api/health_api.md @@ -3,7 +3,7 @@ type: docs title: "Health API reference" linkTitle: "Health API" description: "Detailed documentation on the health API" -weight: 1000 +weight: 800 --- Dapr provides health checking probes that can be used as readiness or liveness of Dapr and for initialization readiness from SDKs. diff --git a/daprdocs/content/en/reference/api/jobs_api.md b/daprdocs/content/en/reference/api/jobs_api.md index aa4c29f940a..690fe834135 100644 --- a/daprdocs/content/en/reference/api/jobs_api.md +++ b/daprdocs/content/en/reference/api/jobs_api.md @@ -3,7 +3,7 @@ type: docs title: "Jobs API reference" linkTitle: "Jobs API" description: "Detailed documentation on the jobs API" -weight: 1300 +weight: 900 --- {{% alert title="Note" color="primary" %}} diff --git a/daprdocs/content/en/reference/api/metadata_api.md b/daprdocs/content/en/reference/api/metadata_api.md index dc5ed7fa953..ed77aca1b6c 100644 --- a/daprdocs/content/en/reference/api/metadata_api.md +++ b/daprdocs/content/en/reference/api/metadata_api.md @@ -3,7 +3,7 @@ type: docs title: "Metadata API reference" linkTitle: "Metadata API" description: "Detailed documentation on the Metadata API" -weight: 1100 +weight: 1000 --- Dapr has a metadata API that returns information about the sidecar allowing runtime discoverability. The metadata endpoint returns the following information. diff --git a/daprdocs/content/en/reference/api/placement_api.md b/daprdocs/content/en/reference/api/placement_api.md index de216c1f156..6b02dd2cbfb 100644 --- a/daprdocs/content/en/reference/api/placement_api.md +++ b/daprdocs/content/en/reference/api/placement_api.md @@ -3,7 +3,7 @@ type: docs title: "Placement API reference" linkTitle: "Placement API" description: "Detailed documentation on the Placement API" -weight: 1200 +weight: 1100 --- Dapr has an HTTP API `/placement/state` for Placement service that exposes placement table information. The API is exposed on the sidecar on the same port as the healthz. This is an unauthenticated endpoint, and is disabled by default. diff --git a/daprdocs/content/en/reference/api/pubsub_api.md b/daprdocs/content/en/reference/api/pubsub_api.md index d2cc67ab03e..32af3bb0d09 100644 --- a/daprdocs/content/en/reference/api/pubsub_api.md +++ b/daprdocs/content/en/reference/api/pubsub_api.md @@ -3,7 +3,7 @@ type: docs title: "Pub/sub API reference" linkTitle: "Pub/Sub API" description: "Detailed documentation on the pub/sub API" -weight: 200 +weight: 1200 --- ## Publish a message to a given topic diff --git a/daprdocs/content/en/reference/api/secrets_api.md b/daprdocs/content/en/reference/api/secrets_api.md index 752736f5fe4..6561823ec90 100644 --- a/daprdocs/content/en/reference/api/secrets_api.md +++ b/daprdocs/content/en/reference/api/secrets_api.md @@ -3,7 +3,7 @@ type: docs title: "Secrets API reference" linkTitle: "Secrets API" description: "Detailed documentation on the secrets API" -weight: 700 +weight: 1300 --- ## Get Secret diff --git a/daprdocs/content/en/reference/api/service_invocation_api.md b/daprdocs/content/en/reference/api/service_invocation_api.md index cc46d982488..811d0021ac8 100644 --- a/daprdocs/content/en/reference/api/service_invocation_api.md +++ b/daprdocs/content/en/reference/api/service_invocation_api.md @@ -3,7 +3,7 @@ type: docs title: "Service invocation API reference" linkTitle: "Service invocation API" description: "Detailed documentation on the service invocation API" -weight: 100 +weight: 1400 --- Dapr provides users with the ability to call other applications that are using Dapr with a unique named identifier (appId), or HTTP endpoints that are not using Dapr. diff --git a/daprdocs/content/en/reference/api/state_api.md b/daprdocs/content/en/reference/api/state_api.md index 328ffec80fd..bf6e2c15272 100644 --- a/daprdocs/content/en/reference/api/state_api.md +++ b/daprdocs/content/en/reference/api/state_api.md @@ -3,7 +3,7 @@ type: docs title: "State management API reference" linkTitle: "State management API" description: "Detailed documentation on the state management API" -weight: 400 +weight: 1500 --- ## Component file diff --git a/daprdocs/content/en/reference/api/workflow_api.md b/daprdocs/content/en/reference/api/workflow_api.md index 85ee207162d..f84996444f6 100644 --- a/daprdocs/content/en/reference/api/workflow_api.md +++ b/daprdocs/content/en/reference/api/workflow_api.md @@ -3,7 +3,7 @@ type: docs title: "Workflow API reference" linkTitle: "Workflow API" description: "Detailed documentation on the workflow API" -weight: 300 +weight: 1600 --- Dapr provides users with the ability to interact with workflows through its built-in workflow engine, which is implemented using Dapr Actors. This workflow engine is accessed using the name `dapr` in API calls as the `workflowComponentName`. diff --git a/daprdocs/content/en/reference/cli/dapr-scheduler.md b/daprdocs/content/en/reference/cli/dapr-scheduler.md new file mode 100644 index 00000000000..61d40f190bb --- /dev/null +++ b/daprdocs/content/en/reference/cli/dapr-scheduler.md @@ -0,0 +1,156 @@ +--- +type: docs +title: "dapr scheduler" +linkTitle: "scheduler" +description: "Manage Dapr Scheduler jobs and reminders using the dapr CLI" +weight: 3000 +--- + +# dapr scheduler + +Manage scheduled jobs and reminders stored in the Dapr Scheduler. + +``` bash +dapr scheduler [command] +``` + +## Aliases +- `scheduler` +- `sched` + +## Available Commands + +- [list](#dapr-scheduler-list): List scheduled jobs +- [get](#dapr-scheduler-get): Get a scheduled job by key +- [delete](#dapr-scheduler-delete): Delete a scheduled job by key +- [delete-all)](#dapr-scheduler-delete-all): Delete all scheduled jobs by key prefix +- [export](#dapr-scheduler-export): Export all scheduled jobs to a file +- [import](#dapr-scheduler-import): Import scheduled jobs from a file + + +## Global Flags + +| Flag | Description | +| -k, --kubernetes | Perform operation on a Kubernetes Dapr cluster | +| -n, --namespace string | Namespace of the Dapr app (default "default") | +| --scheduler-namespace string | Namespace where the scheduler runs (default "dapr-system") | + +## dapr scheduler list + +List scheduled jobs in Scheduler. + +```bash +dapr scheduler list [flags] +``` + +### Flags + +- `--filter string` – Filter jobs by type. One of: all, app, actor, workflow, activity (default all) +- `-o, --output string` – Output format: short, wide, yaml, json (default short) + +### Examples + +```bash +$ dapr scheduler list +NAME BEGIN COUNT LAST TRIGGER +actor/myactortype/actorid1/test1 -3.89s 1 2025-10-03T16:58:55Z +actor/myactortype/actorid2/test2 -3.89s 1 2025-10-03T16:58:55Z +app/test-scheduler/test1 -3.89s 1 2025-10-03T16:58:55Z +app/test-scheduler/test2 -3.89s 1 2025-10-03T16:58:55Z +activity/test-scheduler/xyz1::0::1 -888.8ms 0 +activity/test-scheduler/xyz2::0::1 -888.8ms 0 +workflow/test-scheduler/abc1/timer-0-TVIQGkvu +50.0h 0 +workflow/test-scheduler/abc2/timer-0-OM2xqG9m +50.0h 0 +``` + +```bash +$ dapr scheduler list -o wide +NAMESPACE NAME BEGIN EXPIRATION SCHEDULE DUE TIME TTL REPEATS COUNT LAST TRIGGER +default actor/myactortype/actorid1/test1 2025-10-03T16:58:55Z @every 2h46m40s 2025-10-03T17:58:55+01:00 100 1 2025-10-03T16:58:55Z +default actor/myactortype/actorid2/test2 2025-10-03T16:58:55Z @every 2h46m40s 2025-10-03T17:58:55+01:00 100 1 2025-10-03T16:58:55Z +default app/test-scheduler/test1 2025-10-03T16:58:55Z @every 100m 2025-10-03T17:58:55+01:00 1234 1 2025-10-03T16:58:55Z +default app/test-scheduler/test2 2025-10-03T16:58:55Z 2025-10-03T19:45:35Z @every 100m 2025-10-03T17:58:55+01:00 10000s 56788 1 2025-10-03T16:58:55Z +default activity/test-scheduler/xyz1::0::1 2025-10-03T16:58:58Z 0s 0 +default activity/test-scheduler/xyz2::0::1 2025-10-03T16:58:58Z 0s 0 +default workflow/test-scheduler/abc1/timer-0-TVIQGkvu 2025-10-05T18:58:58Z 2025-10-05T18:58:58Z 0 +default workflow/test-scheduler/abc2/timer-0-OM2xqG9m 2025-10-05T18:58:58Z 2025-10-05T18:58:58Z 0 +``` + +## dapr scheduler get + +Get one or more scheduled jobs/reminders by key. + +```bash +dapr scheduler get [flags] +``` + +### Key formats + +- App job: `app//` +- Actor reminder: `actor///` +- Workflow reminder: `workflow///` +- Activity reminder: `activity//` + +### Flags + +- `-o, --output string` – Output format: `short`, `wide`, `yaml`, `json` (default `short`) + +### Examples + +```bash +dapr scheduler get app/my-app/job1 -o yaml +``` + +## dapr scheduler delete + +Delete one or more jobs. + +```bash +dapr scheduler delete +``` + +### Aliases +- `delete`, `d`, `del` + +### Examples + +```bash +dapr scheduler delete app/my-app/job1 actor/MyActor/123/reminder1 +``` + +## dapr scheduler delete-all + +Bulk delete jobs by filter key. + +```bash +dapr scheduler delete-all +``` + +### Aliases + +- `delete-all`, `da`, `delall` + +### Examples + +```bash +dapr scheduler delete-all all +dapr scheduler delete-all app/my-app +dapr scheduler delete-all actor/MyActorType +``` + +## dapr scheduler export + +Export all jobs and reminders to a file. + +```bash +dapr scheduler export -o backup.bin +``` + +## dapr scheduler import + +Import jobs and reminders from a file. + +```bash +dapr scheduler import -f backup.bin +``` + diff --git a/daprdocs/content/en/reference/cli/dapr-workflow.md b/daprdocs/content/en/reference/cli/dapr-workflow.md new file mode 100644 index 00000000000..f490a8a7ecd --- /dev/null +++ b/daprdocs/content/en/reference/cli/dapr-workflow.md @@ -0,0 +1,217 @@ +--- +type: docs +title: "workflow CLI command" +linkTitle: "workflow" +description: "Detailed information on the workflow CLI command" +--- + +Manage Dapr workflow instances. + +## Commands + +| Command | Description | +|---------|-------------| +| dapr workflow run | Start a new workflow instance | +| dapr workflow list | List workflow instances | +| dapr workflow history | Get workflow execution history | +| dapr workflow purge | Purge workflow instances | +| dapr workflow suspend | Suspend a workflow | +| dapr workflow resume | Resume a workflow | +| dapr workflow terminate | Terminate a workflow | +| dapr workflow raise-event | Raise an external event | +| dapr workflow rerun | Re-run a workflow | + +## Flags + +``` + -a, --app-id string The app ID owner of the workflow instance + -h, --help help for workflow + -k, --kubernetes Target a Kubernetes dapr installation + -n, --namespace string Namespace to perform workflow operation on (default "default") +``` + +## Examples + +### List workflows +```bash +dapr workflow list --app-id myapp +``` + +### Start a workflow +```bash +dapr workflow run MyWorkflow --app-id myapp --input '{"key": "value"}' +``` + +### Kubernetes mode +```bash +dapr workflow list -k -n production --app-id myapp +``` + +## List workflow instances for a given application. + +## Usage + +```bash +dapr workflow list [flags] +``` + +## Flags + +| Name | Type | Description | +|------|------|-------------| +| `--app-id`, `-a` | string | (Required) The app ID owner of the workflow instances | +| `--filter-name`, `-w` | string | Filter workflows by name | +| `--filter-status`, `-s` | string | Filter by status: RUNNING, COMPLETED, FAILED, CANCELED, TERMINATED, PENDING, SUSPENDED | +| `--filter-max-age`, `-m` | string | Filter workflows started within duration or timestamp (e.g., "300ms", "1.5h", "2023-01-02T15:04:05") | +| `--output`, `-o` | string | Output format: short, wide, yaml, json (default "short") | +| `--connection-string`, `-c` | string | Connection string to the actor state store | +| `--table-name`, `-t` | string | Table or collection name used as the actor state store | +| `--kubernetes`, `-k` | bool | Target a Kubernetes Dapr installation | +| `--namespace`, `-n` | string | Kubernetes namespace (default "default") | + +## Examples + +### Basic usage +```bash +dapr workflow list --app-id myapp +``` + +### Filter by status +```bash +dapr workflow list --app-id myapp --filter-status RUNNING +``` + +### Filter by workflow name +```bash +dapr workflow list --app-id myapp --filter-name OrderProcessing +``` + +### Filter by age +```bash +# Workflows from last 24 hours +dapr workflow list --app-id myapp --filter-max-age 24h + +# Workflows after specific date +dapr workflow list --app-id myapp --filter-max-age 2024-01-01T00:00:00Z +``` + +### JSON output +```bash +dapr workflow list --app-id myapp --output json +``` + +### Kubernetes with port forwarding +```bash +# Terminal 1: Port forward to database +kubectl port-forward service/postgres 5432:5432 -n production + +# Terminal 2: List workflows with direct database access +dapr workflow list \ + --kubernetes \ + --namespace production \ + --app-id myapp \ + --connection-string "host=localhost user=dapr password=dapr dbname=dapr port=5432 sslmode=disable" \ + --table-name workflows +``` + +## Connection String Formats + +### PostgreSQL / CockroachDB +``` +host=localhost user=dapr password=dapr dbname=dapr port=5432 sslmode=disable +``` + +### MySQL +``` +dapr:dapr@tcp(localhost:3306)/dapr?parseTime=true +``` + +### SQL Server +``` +sqlserver://dapr:Pass@word@localhost:1433?database=dapr +``` + +### MongoDB +``` +mongodb://localhost:27017/dapr +``` + +### Redis +``` +redis[s]://[[username][:password]@][host][:port][/db-number]: +``` + +## Purge workflow instances with terminal states (COMPLETED, FAILED, TERMINATED). + +## Usage + +```bash +dapr workflow purge [instance-id] [flags] +``` + +## Flags + +| Name | Type | Description | +|------|------|-------------| +| `--app-id`, `-a` | string | (Required) The app ID owner of the workflow instances | +| `--all` | bool | Purge all terminal workflow instances (use with caution) | +| `--all-older-than` | string | Purge instances older than duration or timestamp (e.g., "24h", "2023-01-02T15:04:05Z") | +| `--connection-string`, `-c` | string | Connection string to the actor state store | +| `--table-name`, `-t` | string | Table or collection name used as the actor state store | +| `--kubernetes`, `-k` | bool | Target a Kubernetes Dapr installation | +| `--namespace`, `-n` | string | Kubernetes namespace (default "default") | + +## Examples + +### Purge a specific instance +```bash +dapr workflow purge wf-12345 --app-id myapp +``` + +### Purge instances older than 30 days +```bash +dapr workflow purge --app-id myapp --all-older-than 720h +``` + +### Purge instances older than specific date +```bash +dapr workflow purge --app-id myapp --all-older-than 2023-12-01T00:00:00Z +``` + +### Purge all terminal instances (dangerous!) +```bash +dapr workflow purge --app-id myapp --all +``` + +### Kubernetes with database access +```bash +# Port forward to database +kubectl port-forward service/postgres 5432:5432 -n production + +# Purge old workflows +dapr workflow purge \ + --kubernetes \ + --namespace production \ + --app-id myapp \ + --connection-string "host=localhost user=dapr password=dapr dbname=dapr port=5432 sslmode=disable" \ + --table-name workflows \ + --all-older-than 2160h # 90 days +``` + +## Best Practices + +1. **Regular Cleanup**: Schedule periodic purge operations + ```bash + # Cron job to purge workflows older than 90 days + 0 2 * * 0 dapr workflow purge --app-id myapp --all-older-than 2160h + ``` + +2. **Test First**: Use list command to see what will be purged + ```bash + dapr workflow list --app-id myapp --filter-status COMPLETED --filter-max-age 2160h + ``` + +3. **Backup Before Bulk Purge**: Export data before using `--all` + ```bash + dapr workflow list --app-id myapp --output json > backup.json + ``` diff --git a/daprdocs/content/en/reference/components-reference/supported-bindings/_index.md b/daprdocs/content/en/reference/components-reference/supported-bindings/_index.md index bca14fe0607..0f61294f835 100644 --- a/daprdocs/content/en/reference/components-reference/supported-bindings/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-bindings/_index.md @@ -2,7 +2,7 @@ type: docs title: "Bindings component specs" linkTitle: "Bindings" -weight: 4000 +weight: 1000 description: The supported external bindings that interface with Dapr aliases: - "/operations/components/setup-bindings/supported-bindings/" diff --git a/daprdocs/content/en/reference/components-reference/supported-bindings/eventhubs.md b/daprdocs/content/en/reference/components-reference/supported-bindings/eventhubs.md index 989f93ab366..227b9973785 100644 --- a/daprdocs/content/en/reference/components-reference/supported-bindings/eventhubs.md +++ b/daprdocs/content/en/reference/components-reference/supported-bindings/eventhubs.md @@ -70,24 +70,24 @@ The above example uses secrets as plain strings. It is recommended to use a secr ## Spec metadata fields -| Field | Required | Binding support | Details | Example | -|--------------------|:--------:|------------|-----|---------| -| `eventHub` | Y* | Input/Output | The name of the Event Hubs hub ("topic"). Required if using Microsoft Entra ID authentication or if the connection string doesn't contain an `EntityPath` value | `mytopic` | -| `connectionString` | Y* | Input/Output | Connection string for the Event Hub or the Event Hub namespace.
* Mutally exclusive with `eventHubNamespace` field.
* Required when not using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key};EntityPath={EventHub}"` or `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key}"` -| `eventHubNamespace` | Y* | Input/Output | The Event Hub Namespace name.
* Mutally exclusive with `connectionString` field.
* Required when using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"namespace"` -| `enableEntityManagement` | N | Input/Output | Boolean value to allow management of the EventHub namespace and storage account. Default: `false` | `"true"`, `"false"` -| `enableInOrderMessageDelivery` | N | Input/Output | Boolean value to allow messages to be delivered in the order in which they were posted. This assumes `partitionKey` is set when publishing or posting to ensure ordering across partitions. Default: `false` | `"true"`, `"false"` -| `resourceGroupName` | N | Input/Output | Name of the resource group the Event Hub namespace is part of. Required when entity management is enabled | `"test-rg"` -| `subscriptionID` | N | Input/Output | Azure subscription ID value. Required when entity management is enabled | `"azure subscription id"` -| `partitionCount` | N | Input/Output | Number of partitions for the new Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"2"` -| `messageRetentionInDays` | N | Input/Output | Number of days to retain messages for in the newly created Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"90"` -| `consumerGroup` | Y | Input | The name of the [Event Hubs Consumer Group](https://docs.microsoft.com/azure/event-hubs/event-hubs-features#consumer-groups) to listen on | `"group1"` | -| `storageAccountName` | Y | Input | Storage account name to use for the checkpoint store. |`"myeventhubstorage"` -| `storageAccountKey` | Y* | Input | Storage account key for the checkpoint store account.
* When using Microsoft Entra ID, it's possible to omit this if the service principal has access to the storage account too. | `"112233445566778899"` -| `storageConnectionString` | Y* | Input | Connection string for the checkpoint store, alternative to specifying `storageAccountKey` | `"DefaultEndpointsProtocol=https;AccountName=myeventhubstorage;AccountKey="` -| `storageContainerName` | Y | Input | Storage container name for the storage account name. | `"myeventhubstoragecontainer"` -| `getAllMessageProperties` | N | Input | When set to `true`, retrieves all user/app/custom properties from the Event Hub message and forwards them in the returned event metadata. Default setting is `"false"`. | `"true"`, `"false"` -| `direction` | N | Input/Output | The direction of the binding. | `"input"`, `"output"`, `"input, output"` +| Field | Required | Binding support | Details | Example | +|--------------------|:--------:|------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------| +| `eventHub` | Y* | Input/Output | The name of the Event Hubs hub ("topic"). Required if using Microsoft Entra ID authentication or if the connection string doesn't contain an `EntityPath` value | `mytopic` | +| `connectionString` | Y* | Input/Output | Connection string for the Event Hub or the Event Hub namespace.
* Mutually exclusive with `eventHubNamespace` field.
* Required when not using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key};EntityPath={EventHub}"` or `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key}"` +| `eventHubNamespace` | Y* | Input/Output | The Event Hub Namespace name.
* Mutually exclusive with `connectionString` field.
* Required when using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"namespace"` +| `enableEntityManagement` | N | Input/Output | Boolean value to allow management of the EventHub namespace and storage account. Default: `false` | `"true"`, `"false"` +| `enableInOrderMessageDelivery` | N | Input/Output | Boolean value to allow messages to be delivered in the order in which they were posted. This assumes `partitionKey` is set when publishing or posting to ensure ordering across partitions. Default: `false` | `"true"`, `"false"` +| `resourceGroupName` | N | Input/Output | Name of the resource group the Event Hub namespace is part of. Required when entity management is enabled | `"test-rg"` +| `subscriptionID` | N | Input/Output | Azure subscription ID value. Required when entity management is enabled | `"azure subscription id"` +| `partitionCount` | N | Input/Output | Number of partitions for the new Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"2"` +| `messageRetentionInDays` | N | Input/Output | Number of days to retain messages for in the newly created Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"90"` +| `consumerGroup` | Y | Input | The name of the [Event Hubs Consumer Group](https://docs.microsoft.com/azure/event-hubs/event-hubs-features#consumer-groups) to listen on | `"group1"` | +| `storageAccountName` | Y | Input | Storage account name to use for the checkpoint store. |`"myeventhubstorage"` +| `storageAccountKey` | Y* | Input | Storage account key for the checkpoint store account.
* When using Microsoft Entra ID, it's possible to omit this if the service principal has access to the storage account too. | `"112233445566778899"` +| `storageConnectionString` | Y* | Input | Connection string for the checkpoint store, alternative to specifying `storageAccountKey` | `"DefaultEndpointsProtocol=https;AccountName=myeventhubstorage;AccountKey="` +| `storageContainerName` | Y | Input | Storage container name for the storage account name. | `"myeventhubstoragecontainer"` +| `getAllMessageProperties` | N | Input | When set to `true`, retrieves all user/app/custom properties from the Event Hub message and forwards them in the returned event metadata. Default setting is `"false"`. | `"true"`, `"false"` +| `direction` | N | Input/Output | The direction of the binding. | `"input"`, `"output"`, `"input, output"` ### Microsoft Entra ID authentication diff --git a/daprdocs/content/en/reference/components-reference/supported-bindings/redis.md b/daprdocs/content/en/reference/components-reference/supported-bindings/redis.md index 91b17c196a7..52170d484c0 100644 --- a/daprdocs/content/en/reference/components-reference/supported-bindings/redis.md +++ b/daprdocs/content/en/reference/components-reference/supported-bindings/redis.md @@ -38,14 +38,14 @@ The above example uses secrets as plain strings. It is recommended to use a secr | Field | Required | Binding support | Details | Example | |--------------------|:--------:|------------|-----|---------| -| `redisHost` | Y | Output | The Redis host address | `"localhost:6379"` | +| `redisHost` | Y | Output | Connection-string for the redis host. If `"redisType"` is `"cluster"`, it can be multiple hosts separated by commas or just a single host. When using Redis Sentinel (`"failover"` is `"true"`), multiple sentinel addresses can also be provided as comma-separated values. | `localhost:6379`, `redis-master.default.svc.cluster.local:6379`, `sentinel1:26379,sentinel2:26379,sentinel3:26379` | | `redisPassword` | N | Output | The Redis password | `"password"` | | `redisUsername` | N | Output | Username for Redis host. Defaults to empty. Make sure your redis server version is 6 or above, and have created acl rule correctly. | `"username"` | | `useEntraID` | N | Output | Implements EntraID support for Azure Cache for Redis. Before enabling this:
  • The `redisHost` name must be specified in the form of `"server:port"`
  • TLS must be enabled
Learn more about this setting under [Create a Redis instance > Azure Cache for Redis]({{% ref "#create-a-redis-instance" %}}) | `"true"`, `"false"` | | `enableTLS` | N | Output | If the Redis instance supports TLS with public certificates it can be configured to enable or disable TLS. Defaults to `"false"` | `"true"`, `"false"` | | `clientCert` | N | Output | The content of the client certificate, used for Redis instances that require client-side certificates. Must be used with `clientKey` and `enableTLS` must be set to true. It is recommended to use a secret store as described [here]({{% ref component-secrets.md %}}) | `"----BEGIN CERTIFICATE-----\nMIIC..."` | | `clientKey` | N | Output | The content of the client private key, used in conjunction with `clientCert` for authentication. It is recommended to use a secret store as described [here]({{% ref component-secrets.md %}}) | `"----BEGIN PRIVATE KEY-----\nMIIE..."` | -| `failover` | N | Output | Property to enable failover configuration. Needs sentinelMasterName to be set. Defaults to `"false"` | `"true"`, `"false"` +| `failover` | N | Output | Property to enable failover configuration. Needs sentinelMasterName to be set. When enabled, redisHost should contain the sentinel addresses. Defaults to `"false"` | `"true"`, `"false"` | `sentinelMasterName` | N | Output | The sentinel master name. See [Redis Sentinel Documentation](https://redis.io/docs/reference/sentinel-clients/) | `""`, `"mymaster"` | `sentinelUsername` | N | Output | Username for Redis Sentinel. Applicable only when "failover" is true, and Redis Sentinel has authentication enabled | `"username"` | `sentinelPassword` | N | Output | Password for Redis Sentinel. Applicable only when "failover" is true, and Redis Sentinel has authentication enabled | `"password"` @@ -249,6 +249,28 @@ You can use [Helm](https://helm.sh/) to quickly create a Redis instance in our K The Dapr CLI automatically deploys a local redis instance in self hosted mode as part of the `dapr init` command. {{% /alert %}} +## Redis Sentinel configuration + +When using Redis Sentinel for high availability, set `redisType` to `"node"`, enable failover mode with `failover: "true"`, and provide the sentinel master name. Multiple sentinel addresses can be specified as a comma-separated list in the `redisHost` field for redundancy. + + ```yaml + apiVersion: dapr.io/v1alpha1 + kind: Component + metadata: + name: redis-pubsub + spec: + type: pubsub.redis + version: v1 + metadata: + - name: redisHost + value: "sentinel1:26379,sentinel2:26379,sentinel3:26379" + - name: redisType + value: "node" + - name: failover + value: "true" + - name: sentinelMasterName + value: "mymaster" + ``` ## Related links diff --git a/daprdocs/content/en/reference/components-reference/supported-configuration-stores/_index.md b/daprdocs/content/en/reference/components-reference/supported-configuration-stores/_index.md index b8e80f12216..64e4ebf7366 100644 --- a/daprdocs/content/en/reference/components-reference/supported-configuration-stores/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-configuration-stores/_index.md @@ -2,7 +2,7 @@ type: docs title: "Configuration store component specs" linkTitle: "Configuration stores" -weight: 6000 +weight: 2000 description: The supported configuration stores that interface with Dapr aliases: - "/operations/components/setup-configuration-store/supported-configuration-stores/" diff --git a/daprdocs/content/en/reference/components-reference/supported-configuration-stores/azure-appconfig-configuration-store.md b/daprdocs/content/en/reference/components-reference/supported-configuration-stores/azure-appconfig-configuration-store.md index 3db6a4bba94..b2e2c3919d3 100644 --- a/daprdocs/content/en/reference/components-reference/supported-configuration-stores/azure-appconfig-configuration-store.md +++ b/daprdocs/content/en/reference/components-reference/supported-configuration-stores/azure-appconfig-configuration-store.md @@ -50,14 +50,14 @@ The above example uses secrets as plain strings. It is recommended to use a secr ## Spec metadata fields -| Field | Required | Details | Example | -|----------------------------|:--------:|---------|---------| -| connectionString | Y* | Connection String for the Azure App Configuration instance. No Default. Can be `secretKeyRef` to use a secret reference. *Mutally exclusive with host field. *Not to be used when [Azure Authentication](https://docs.dapr.io/developing-applications/integrations/azure/azure-authentication/authenticating-azure/) is used | `Endpoint=https://foo.azconfig.io;Id=osOX-l9-s0:sig;Secret=00000000000000000000000000000000000000000000` -| host | N* | Endpoint for the Azure App Configuration instance. No Default. *Mutally exclusive with connectionString field. *To be used when [Azure Authentication](https://docs.dapr.io/developing-applications/integrations/azure/azure-authentication/authenticating-azure/) is used | `https://dapr.azconfig.io` -| maxRetries | N | Maximum number of retries before giving up. Defaults to `3` | `5`, `10` -| retryDelay | N | RetryDelay specifies the initial amount of delay to use before retrying an operation. The delay increases exponentially with each retry up to the maximum specified by MaxRetryDelay. Defaults to `4` seconds; `"-1"` disables delay between retries. | `4s` -| maxRetryDelay | N | MaxRetryDelay specifies the maximum delay allowed before retrying an operation. Typically the value is greater than or equal to the value specified in RetryDelay. Defaults to `120` seconds; `"-1"` disables the limit | `120s` -| subscribePollInterval | N | subscribePollInterval specifies the poll interval in nanoseconds for polling the subscribed keys for any changes. This will be updated in the future to Go Time format. Default polling interval is set to `24` hours. | `24h` +| Field | Required | Details | Example | +|----------------------------|:--------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------| +| connectionString | Y* | Connection String for the Azure App Configuration instance. No Default. Can be `secretKeyRef` to use a secret reference. *Mutually exclusive with host field. *Not to be used when [Azure Authentication](https://docs.dapr.io/developing-applications/integrations/azure/azure-authentication/authenticating-azure/) is used | `Endpoint=https://foo.azconfig.io;Id=osOX-l9-s0:sig;Secret=00000000000000000000000000000000000000000000` +| host | N* | Endpoint for the Azure App Configuration instance. No Default. *Mutually exclusive with connectionString field. *To be used when [Azure Authentication](https://docs.dapr.io/developing-applications/integrations/azure/azure-authentication/authenticating-azure/) is used | `https://dapr.azconfig.io` +| maxRetries | N | Maximum number of retries before giving up. Defaults to `3` | `5`, `10` +| retryDelay | N | RetryDelay specifies the initial amount of delay to use before retrying an operation. The delay increases exponentially with each retry up to the maximum specified by MaxRetryDelay. Defaults to `4` seconds; `"-1"` disables delay between retries. | `4s` +| maxRetryDelay | N | MaxRetryDelay specifies the maximum delay allowed before retrying an operation. Typically the value is greater than or equal to the value specified in RetryDelay. Defaults to `120` seconds; `"-1"` disables the limit | `120s` +| subscribePollInterval | N | subscribePollInterval specifies the poll interval in nanoseconds for polling the subscribed keys for any changes. This will be updated in the future to Go Time format. Default polling interval is set to `24` hours. | `24h` **Note**: either `host` or `connectionString` must be specified. diff --git a/daprdocs/content/en/reference/components-reference/supported-configuration-stores/redis-configuration-store.md b/daprdocs/content/en/reference/components-reference/supported-configuration-stores/redis-configuration-store.md index e65ddac614f..09d935512c0 100644 --- a/daprdocs/content/en/reference/components-reference/supported-configuration-stores/redis-configuration-store.md +++ b/daprdocs/content/en/reference/components-reference/supported-configuration-stores/redis-configuration-store.md @@ -39,13 +39,13 @@ The above example uses secrets as plain strings. It is recommended to use a secr | Field | Required | Details | Example | |--------------------|:--------:|---------|---------| -| redisHost | Y | Output | The Redis host address | `"localhost:6379"` | +| redisHost | Y | Output | Connection-string for the redis host. If `"redisType"` is `"cluster"`, it can be multiple hosts separated by commas or just a single host. When using Redis Sentinel (`"failover"` is `"true"`), multiple sentinel addresses can also be provided as comma-separated values. | `localhost:6379`, `redis-master.default.svc.cluster.local:6379`, `sentinel1:26379,sentinel2:26379,sentinel3:26379` | | redisPassword | N | Output | The Redis password | `"password"` | | redisUsername | N | Output | Username for Redis host. Defaults to empty. Make sure your Redis server version is 6 or above, and have created acl rule correctly. | `"username"` | | enableTLS | N | Output | If the Redis instance supports TLS with public certificates it can be configured to enable or disable TLS. Defaults to `"false"` | `"true"`, `"false"` | | clientCert | N | Output | The content of the client certificate, used for Redis instances that require client-side certificates. Must be used with `clientKey` and `enableTLS` must be set to true. It is recommended to use a secret store as described [here]({{% ref component-secrets.md %}}) | `"----BEGIN CERTIFICATE-----\nMIIC..."` | | clientKey | N | Output | The content of the client private key, used in conjunction with `clientCert` for authentication. It is recommended to use a secret store as described [here]({{% ref component-secrets.md %}}) | `"----BEGIN PRIVATE KEY-----\nMIIE..."` | -| failover | N | Output | Property to enable failover configuration. Needs sentinelMasterName to be set. Defaults to `"false"` | `"true"`, `"false"` +| failover | N | Output | Property to enable failover configuration. Needs sentinelMasterName to be set. When enabled, redisHost should contain the sentinel addresses. Defaults to `"false"` | `"true"`, `"false"` | sentinelMasterName | N | Output | The Sentinel master name. See [Redis Sentinel Documentation](https://redis.io/docs/reference/sentinel-clients/) | `""`, `"mymaster"` | sentinelUsername | N | Output | Username for Redis Sentinel. Applicable only when "failover" is true, and Redis Sentinel has authentication enabled | `"username"` | sentinelPassword | N | Output | Password for Redis Sentinel. Applicable only when "failover" is true, and Redis Sentinel has authentication enabled | `"password"` @@ -143,6 +143,29 @@ You can use [Helm](https://helm.sh/) to quickly create a Redis instance in our K {{< /tabpane >}} +## Redis Sentinel configuration + +When using Redis Sentinel for high availability, set `redisType` to `"node"`, enable failover mode with `failover: "true"`, and provide the sentinel master name. Multiple sentinel addresses can be specified as a comma-separated list in the `redisHost` field for redundancy. + + ```yaml + apiVersion: dapr.io/v1alpha1 + kind: Component + metadata: + name: redis-pubsub + spec: + type: pubsub.redis + version: v1 + metadata: + - name: redisHost + value: "sentinel1:26379,sentinel2:26379,sentinel3:26379" + - name: redisType + value: "node" + - name: failover + value: "true" + - name: sentinelMasterName + value: "mymaster" + ``` + ## Related links - [Basic schema for a Dapr component]({{% ref component-schema %}}) - Read [How-To: Manage configuration from a store]({{% ref "howto-manage-configuration" %}}) for instructions on how to use Redis as a configuration store. diff --git a/daprdocs/content/en/reference/components-reference/supported-conversation/_index.md b/daprdocs/content/en/reference/components-reference/supported-conversation/_index.md index 179162b3bb2..c6c862960b9 100644 --- a/daprdocs/content/en/reference/components-reference/supported-conversation/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-conversation/_index.md @@ -2,7 +2,7 @@ type: docs title: "Conversation component specs" linkTitle: "Conversation" -weight: 9000 +weight: 3000 description: The supported conversation components that interface with Dapr no_list: true --- diff --git a/daprdocs/content/en/reference/components-reference/supported-conversation/openai.md b/daprdocs/content/en/reference/components-reference/supported-conversation/openai.md index 795f9877909..f1c29e2b5f3 100644 --- a/daprdocs/content/en/reference/components-reference/supported-conversation/openai.md +++ b/daprdocs/content/en/reference/components-reference/supported-conversation/openai.md @@ -46,6 +46,48 @@ The above example uses secrets as plain strings. It is recommended to use a secr | `apiType` | N | Specifies the API provider type. Required when using a provider that does not follow the default OpenAI API endpoint conventions. | `azure` | | `apiVersion`| N | The API version to use. Required when the `apiType` is set to `azure`. | `2025-04-01-preview` | +## Azure OpenAI Configuration + +To configure the OpenAI component to connect to Azure OpenAI, you need to set the following metadata fields which are required for Azure's API format. + +### Required fields for Azure OpenAI + +When connecting to Azure OpenAI, the following fields are **required**: + +- `apiType`: Must be set to `azure` to enable Azure OpenAI compatibility +- `endpoint`: Your Azure OpenAI resource endpoint URL (e.g., `https://your-resource.openai.azure.com/`) +- `apiVersion`: The API version for your Azure OpenAI deployment (e.g., `2025-01-01-preview`) +- `key`: Your Azure OpenAI API key + +Get your configuration values from: https://ai.azure.com/ + +### Azure OpenAI component example + +```yaml +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: azure-openai +spec: + type: conversation.openai + metadata: + - name: key + value: "your-azure-openai-api-key" + - name: model + value: "gpt-4.1-nano" # Default: gpt-4.1-nano + - name: endpoint + value: "https://your-resource.openai.azure.com/" + - name: apiType + value: "azure" + - name: apiVersion + value: "2025-01-01-preview" +``` + + +{{% alert title="Note" color="primary" %}} +When using Azure OpenAI, both `endpoint` and `apiVersion` are mandatory fields. The component returns an error if either field is missing when `apiType` is set to `azure`. +{{% /alert %}} + ## Related links - [Conversation API overview]({{% ref conversation-overview.md %}}) diff --git a/daprdocs/content/en/reference/components-reference/supported-cryptography/_index.md b/daprdocs/content/en/reference/components-reference/supported-cryptography/_index.md index c7789d5e4a6..160d3e3427c 100644 --- a/daprdocs/content/en/reference/components-reference/supported-cryptography/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-cryptography/_index.md @@ -2,7 +2,7 @@ type: docs title: "Cryptography component specs" linkTitle: "Cryptography" -weight: 8000 +weight: 4000 description: The supported cryptography components that interface with Dapr no_list: true --- diff --git a/daprdocs/content/en/reference/components-reference/supported-cryptography/local-storage.md b/daprdocs/content/en/reference/components-reference/supported-cryptography/local-storage.md index 6d430678278..c8955c0669a 100644 --- a/daprdocs/content/en/reference/components-reference/supported-cryptography/local-storage.md +++ b/daprdocs/content/en/reference/components-reference/supported-cryptography/local-storage.md @@ -32,8 +32,8 @@ metadata: name: mycrypto spec: type: crypto.dapr.localstorage + version: v1 metadata: - version: v1 - name: path value: /path/to/folder/ ``` diff --git a/daprdocs/content/en/reference/components-reference/supported-locks/_index.md b/daprdocs/content/en/reference/components-reference/supported-locks/_index.md index 134e75360dc..588e5acc4ff 100644 --- a/daprdocs/content/en/reference/components-reference/supported-locks/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-locks/_index.md @@ -2,7 +2,7 @@ type: docs title: "Lock component specs" linkTitle: "Locks" -weight: 7000 +weight: 5000 description: The supported locks that interface with Dapr no_list: true --- diff --git a/daprdocs/content/en/reference/components-reference/supported-locks/redis-lock.md b/daprdocs/content/en/reference/components-reference/supported-locks/redis-lock.md index 3beb290ee9a..daacb43e964 100644 --- a/daprdocs/content/en/reference/components-reference/supported-locks/redis-lock.md +++ b/daprdocs/content/en/reference/components-reference/supported-locks/redis-lock.md @@ -77,15 +77,15 @@ The above example uses secrets as plain strings. It is recommended to use a secr | Field | Required | Details | Example | |-----------------------|:--------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------------| -| redisHost | Y | Connection string for the redis host | `localhost:6379`, `redis-master.default.svc.cluster.local:6379` | +| redisHost | Y | Connection-string for the redis host. If `"redisType"` is `"cluster"`, it can be multiple hosts separated by commas or just a single host. When using Redis Sentinel (`"failover"` is `"true"`), multiple sentinel addresses can also be provided as comma-separated values. | `localhost:6379`, `redis-master.default.svc.cluster.local:6379`, `sentinel1:26379,sentinel2:26379,sentinel3:26379` host | `localhost:6379`, `redis-master.default.svc.cluster.local:6379` | | redisPassword | N | Password for Redis host. No Default. Can be `secretKeyRef` to use a secret reference | `""`, `"KeFg23!"` | | redisUsername | N | Username for Redis host. Defaults to empty. Make sure your redis server version is 6 or above, and have created acl rule correctly. | `""`, `"default"` | | useEntraID | N | Implements EntraID support for Azure Cache for Redis. Before enabling this:
  • The `redisHost` name must be specified in the form of `"server:port"`
  • TLS must be enabled
Learn more about this setting under [Create a Redis instance > Azure Cache for Redis]({{% ref "#setup-redis" %}}) | `"true"`, `"false"` | | enableTLS | N | If the Redis instance supports TLS with public certificates, can be configured to be enabled or disabled. Defaults to `"false"` | `"true"`, `"false"` | | maxRetries | N | Maximum number of retries before giving up. Defaults to `3` | `5`, `10` | | maxRetryBackoff | N | Maximum backoff between each retry. Defaults to `2` seconds; `"-1"` disables backoff. | `3000000000` | -| failover | N | Enable failover configuration. Needs sentinelMasterName to be set. The redisHost should be the sentinel host address. See [Redis Sentinel Documentation](https://redis.io/docs/manual/sentinel/). Defaults to `"false"` | `"true"`, `"false"` | -| sentinelMasterName | N | The sentinel master name. See [Redis Sentinel Documentation](https://redis.io/docs/manual/sentinel/) | `"mymaster"` | +| failover | N | Property to enable failover configuration. Needs sentinelMasterName to be set. When enabled, redisHost should contain the sentinel addresses. Defaults to `"false"` | `"true"`, `"false"` | +| sentinelMasterName | N | The sentinel master name. See [Redis Sentinel Documentation](https://redis.io/docs/latest/operate/oss_and_stack/management/sentinel/) | `"mymaster"` | | sentinelPassword | N | Password for Redis Sentinel. No Default. Applicable only when “failover” is true, and Redis Sentinel has authentication enabled | `""`, `"KeFg23!"` | redeliverInterval | N | The interval between checking for pending messages for redelivery. Defaults to `"60s"`. `"0"` disables redelivery. | `"30s"` | | processingTimeout | N | The amount of time a message must be pending before attempting to redeliver it. Defaults to `"15s"`. `"0"` disables redelivery. | `"30s"` | @@ -185,7 +185,7 @@ You can use [Helm](https://helm.sh/) to quickly create a Redis instance in our K ## Redis Sentinel behavior -Use `redisType: "node"` when connecting to Redis Sentinel. Additionally, set `failover` to `"true"` and `sentinelMasterName` to the name of the master node. +Use `redisType: "node"` when connecting to Redis Sentinel. Additionally, set `failover` to `"true"` and `sentinelMasterName` to the name of the master node. Multiple sentinel addresses can be specified as a comma-separated list in the `redisHost` field for redundancy. Failover characteristics: - Lock loss during failover: Locks may be lost during master failover if they weren't replicated to the promoted replica before the original master failed diff --git a/daprdocs/content/en/reference/components-reference/supported-middleware/_index.md b/daprdocs/content/en/reference/components-reference/supported-middleware/_index.md index ddb92d740ed..995651d14db 100644 --- a/daprdocs/content/en/reference/components-reference/supported-middleware/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-middleware/_index.md @@ -2,7 +2,7 @@ type: docs title: "Middleware component specs" linkTitle: "Middleware" -weight: 10000 +weight: 6000 description: List of all the supported middleware components that can be injected in Dapr's processing pipeline. no_list: true aliases: diff --git a/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md b/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md index c27f6f3fea8..ac0beb524dc 100644 --- a/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md @@ -2,7 +2,7 @@ type: docs title: "Name resolution provider component specs" linkTitle: "Name resolution" -weight: 9000 +weight: 7000 description: The supported name resolution providers to enable Dapr service invocation no_list: true --- diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/_index.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/_index.md index 876e7bedc16..34785d7d65c 100644 --- a/daprdocs/content/en/reference/components-reference/supported-pubsub/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/_index.md @@ -2,7 +2,7 @@ type: docs title: "Pub/sub brokers component specs" linkTitle: "Pub/sub brokers" -weight: 1000 +weight: 8000 description: The supported pub/sub brokers that interface with Dapr aliases: - "/operations/components/setup-pubsub/supported-pubsub/" diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md index 8e4e95d74ea..b806462724d 100644 --- a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md +++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-apache-kafka.md @@ -102,7 +102,7 @@ spec: | clientCert | N | Client certificate, required for `authType` `mtls`. Can be `secretKeyRef` to use a secret reference | `"-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----"` | clientKey | N | Client key, required for `authType` `mtls` Can be `secretKeyRef` to use a secret reference | `"-----BEGIN RSA PRIVATE KEY-----\n\n-----END RSA PRIVATE KEY-----"` | skipVerify | N | Skip TLS verification, this is not recommended for use in production. Defaults to `"false"` | `"true"`, `"false"` | -| disableTls | N | Disable TLS for transport security. To disable, you're not required to set value to `"true"`. This is not recommended for use in production. Defaults to `"false"`. | `"true"`, `"false"` | +| disableTls | N | Disable TLS for transport security. To disable, you're required to set value to `"true"`. This is not recommended for use in production. Defaults to `"false"`. | `"true"`, `"false"` | | oidcTokenEndpoint | N | Full URL to an OAuth2 identity provider access token endpoint. Required when `authType` is set to `oidc` | "https://identity.example.com/v1/token" | | oidcClientID | N | The OAuth2 client ID that has been provisioned in the identity provider. Required when `authType` is set to `oidc` | `dapr-kafka` | | oidcClientSecret | N | The OAuth2 client secret that has been provisioned in the identity provider: Required when `authType` is set to `oidc` | `"KeFg23!"` | diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-azure-eventhubs.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-azure-eventhubs.md index 21a50e96b55..7b21a4817d0 100644 --- a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-azure-eventhubs.md +++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-azure-eventhubs.md @@ -62,21 +62,21 @@ The above example uses secrets as plain strings. It is recommended to use a secr ## Spec metadata fields -| Field | Required | Details | Example | -|--------------------|:--------:|---------|---------| -| `connectionString` | Y* | Connection string for the Event Hub or the Event Hub namespace.
* Mutally exclusive with `eventHubNamespace` field.
* Required when not using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key};EntityPath={EventHub}"` or `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key}"` -| `eventHubNamespace` | Y* | The Event Hub Namespace name.
* Mutally exclusive with `connectionString` field.
* Required when using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"namespace"` +| Field | Required | Details | Example | +|--------------------|:--------:|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------| +| `connectionString` | Y* | Connection string for the Event Hub or the Event Hub namespace.
* Mutually exclusive with `eventHubNamespace` field.
* Required when not using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key};EntityPath={EventHub}"` or `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key}"` +| `eventHubNamespace` | Y* | The Event Hub Namespace name.
* Mutually exclusive with `connectionString` field.
* Required when using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"namespace"` | `consumerID` | N | Consumer ID (consumer tag) organizes one or more consumers into a group. Consumers with the same consumer ID work as one virtual consumer; for example, a message is processed only once by one of the consumers in the group. If the `consumerID` is not provided, the Dapr runtime set it to the Dapr application ID (`appID`) value. | Can be set to string value (such as `"channel1"` in the example above) or string format value (such as `"{podName}"`, etc.). [See all of template tags you can use in your component metadata.]({{% ref "component-schema.md#templated-metadata-values" %}}) -| `enableEntityManagement` | N | Boolean value to allow management of the EventHub namespace and storage account. Default: `false` | `"true", "false"` -| `enableInOrderMessageDelivery` | N | Input/Output | Boolean value to allow messages to be delivered in the order in which they were posted. This assumes `partitionKey` is set when publishing or posting to ensure ordering across partitions. Default: `false` | `"true"`, `"false"` -| `storageAccountName` | Y | Storage account name to use for the checkpoint store. |`"myeventhubstorage"` -| `storageAccountKey` | Y* | Storage account key for the checkpoint store account.
* When using Microsoft Entra ID, it's possible to omit this if the service principal has access to the storage account too. | `"112233445566778899"` -| `storageConnectionString` | Y* | Connection string for the checkpoint store, alternative to specifying `storageAccountKey` | `"DefaultEndpointsProtocol=https;AccountName=myeventhubstorage;AccountKey="` -| `storageContainerName` | Y | Storage container name for the storage account name. | `"myeventhubstoragecontainer"` -| `resourceGroupName` | N | Name of the resource group the Event Hub namespace is part of. Required when entity management is enabled | `"test-rg"` -| `subscriptionID` | N | Azure subscription ID value. Required when entity management is enabled | `"azure subscription id"` -| `partitionCount` | N | Number of partitions for the new Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"2"` -| `messageRetentionInDays` | N | Number of days to retain messages for in the newly created Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"90"` +| `enableEntityManagement` | N | Boolean value to allow management of the EventHub namespace and storage account. Default: `false` | `"true", "false"` +| `enableInOrderMessageDelivery` | N | Input/Output | Boolean value to allow messages to be delivered in the order in which they were posted. This assumes `partitionKey` is set when publishing or posting to ensure ordering across partitions. Default: `false` | `"true"`, `"false"` +| `storageAccountName` | Y | Storage account name to use for the checkpoint store. |`"myeventhubstorage"` +| `storageAccountKey` | Y* | Storage account key for the checkpoint store account.
* When using Microsoft Entra ID, it's possible to omit this if the service principal has access to the storage account too. | `"112233445566778899"` +| `storageConnectionString` | Y* | Connection string for the checkpoint store, alternative to specifying `storageAccountKey` | `"DefaultEndpointsProtocol=https;AccountName=myeventhubstorage;AccountKey="` +| `storageContainerName` | Y | Storage container name for the storage account name. | `"myeventhubstoragecontainer"` +| `resourceGroupName` | N | Name of the resource group the Event Hub namespace is part of. Required when entity management is enabled | `"test-rg"` +| `subscriptionID` | N | Azure subscription ID value. Required when entity management is enabled | `"azure subscription id"` +| `partitionCount` | N | Number of partitions for the new Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"2"` +| `messageRetentionInDays` | N | Number of days to retain messages for in the newly created Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"90"` ### Microsoft Entra ID authentication diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-rabbitmq.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-rabbitmq.md index c3b4e69a8f5..cb65415ba7c 100644 --- a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-rabbitmq.md +++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-rabbitmq.md @@ -76,35 +76,35 @@ The above example uses secrets as plain strings. It is recommended to use a secr ## Spec metadata fields -| Field | Required | Details | Example | -|--------------------|:--------:|---------|---------| -| connectionString | Y* | The RabbitMQ connection string. *Mutally exclusive with protocol, hostname, username, password field | `amqp://user:pass@localhost:5672` | -| protocol | N* | The RabbitMQ protocol. *Mutally exclusive with connectionString field | `amqp` | -| hostname | N* | The RabbitMQ hostname. *Mutally exclusive with connectionString field | `localhost` | -| username | N* | The RabbitMQ username. *Mutally exclusive with connectionString field | `username` | -| password | N* | The RabbitMQ password. *Mutally exclusive with connectionString field | `password` | +| Field | Required | Details | Example | +|--------------------|:--------:|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------| +| connectionString | Y* | The RabbitMQ connection string. *Mutually exclusive with protocol, hostname, username, password field | `amqp://user:pass@localhost:5672` | +| protocol | N* | The RabbitMQ protocol. *Mutually exclusive with connectionString field | `amqp` | +| hostname | N* | The RabbitMQ hostname. *Mutually exclusive with connectionString field | `localhost` | +| username | N* | The RabbitMQ username. *Mutually exclusive with connectionString field | `username` | +| password | N* | The RabbitMQ password. *Mutually exclusive with connectionString field | `password` | | consumerID | N | Consumer ID (consumer tag) organizes one or more consumers into a group. Consumers with the same consumer ID work as one virtual consumer; for example, a message is processed only once by one of the consumers in the group. If the `consumerID` is not provided, the Dapr runtime set it to the Dapr application ID (`appID`) value. | Can be set to string value (such as `"channel1"` in the example above) or string format value (such as `"{podName}"`, etc.). [See all of template tags you can use in your component metadata.]({{% ref "component-schema.md#templated-metadata-values" %}}) -| durable | N | Whether or not to use [durable](https://www.rabbitmq.com/queues.html#durability) queues. Defaults to `"false"` | `"true"`, `"false"` -| deletedWhenUnused | N | Whether or not the queue should be configured to [auto-delete](https://www.rabbitmq.com/queues.html) Defaults to `"true"` | `"true"`, `"false"` -| autoAck | N | Whether or not the queue consumer should [auto-ack](https://www.rabbitmq.com/confirms.html) messages. Defaults to `"false"` | `"true"`, `"false"` -| deliveryMode | N | Persistence mode when publishing messages. Defaults to `"0"`. RabbitMQ treats `"2"` as persistent, all other numbers as non-persistent | `"0"`, `"2"` -| requeueInFailure | N | Whether or not to requeue when sending a [negative acknowledgement](https://www.rabbitmq.com/nack.html) in case of a failure. Defaults to `"false"` | `"true"`, `"false"` -| prefetchCount | N | Number of messages to [prefetch](https://www.rabbitmq.com/consumer-prefetch.html). Consider changing this to a non-zero value for production environments. Defaults to `"0"`, which means that all available messages will be pre-fetched. | `"2"` -| publisherConfirm | N | If enabled, client waits for [publisher confirms](https://www.rabbitmq.com/confirms.html#publisher-confirms) after publishing a message. Defaults to `"false"` | `"true"`, `"false"` -| reconnectWait | N | How long to wait (in seconds) before reconnecting if a connection failure occurs | `"0"` -| concurrencyMode | N | `parallel` is the default, and allows processing multiple messages in parallel (limited by the `app-max-concurrency` annotation, if configured). Set to `single` to disable parallel processing. In most situations there's no reason to change this. | `parallel`, `single` -| enableDeadLetter | N | Enable forwarding Messages that cannot be handled to a dead-letter topic. Defaults to `"false"` | `"true"`, `"false"` | -| maxLen | N | The maximum number of messages of a queue and its dead letter queue (if dead letter enabled). If both `maxLen` and `maxLenBytes` are set then both will apply; whichever limit is hit first will be enforced. Defaults to no limit. | `"1000"` | -| maxLenBytes | N | Maximum length in bytes of a queue and its dead letter queue (if dead letter enabled). If both `maxLen` and `maxLenBytes` are set then both will apply; whichever limit is hit first will be enforced. Defaults to no limit. | `"1048576"` | -| exchangeKind | N | Exchange kind of the rabbitmq exchange. Defaults to `"fanout"`. | `"fanout"`,`"topic"` | -| saslExternal | N | With TLS, should the username be taken from an additional field (for example, CN). See [RabbitMQ Authentication Mechanisms](https://www.rabbitmq.com/access-control.html#mechanisms). Defaults to `"false"`. | `"true"`, `"false"` | -| ttlInSeconds | N | Set message TTL at the component level, which can be overwritten by message level TTL per request. | `"60"` | -| caCert | Required for using TLS | Certificate Authority (CA) certificate in PEM format for verifying server TLS certificates. | `"-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----"` -| clientCert | Required for using TLS | TLS client certificate in PEM format. Must be used with `clientKey`. | `"-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----"` -| clientKey | Required for using TLS | TLS client key in PEM format. Must be used with `clientCert`. Can be `secretKeyRef` to use a secret reference. | `"-----BEGIN RSA PRIVATE KEY-----\n\n-----END RSA PRIVATE KEY-----"` -| clientName | N | This RabbitMQ [client-provided connection name](https://www.rabbitmq.com/connections.html#client-provided-names) is a custom identifier. If set, the identifier is mentioned in RabbitMQ server log entries and management UI. Can be set to {uuid}, {podName}, or {appID}, which is replaced by Dapr runtime to the real value. | `"app1"`, `{uuid}`, `{podName}`, `{appID}` -| heartBeat | N | Defines the heartbeat interval with the server, detecting the aliveness of the peer TCP connection with the RabbitMQ server. Defaults to `10s` . | `"10s"` -| `publishMessagePropertiesToMetadata` | N | Whether to publish AMQP message properties (headers, message ID, etc.) to the metadata. | "true", "false" +| durable | N | Whether or not to use [durable](https://www.rabbitmq.com/queues.html#durability) queues. Defaults to `"false"` | `"true"`, `"false"` +| deletedWhenUnused | N | Whether or not the queue should be configured to [auto-delete](https://www.rabbitmq.com/queues.html) Defaults to `"true"` | `"true"`, `"false"` +| autoAck | N | Whether or not the queue consumer should [auto-ack](https://www.rabbitmq.com/confirms.html) messages. Defaults to `"false"` | `"true"`, `"false"` +| deliveryMode | N | Persistence mode when publishing messages. Defaults to `"0"`. RabbitMQ treats `"2"` as persistent, all other numbers as non-persistent | `"0"`, `"2"` +| requeueInFailure | N | Whether or not to requeue when sending a [negative acknowledgement](https://www.rabbitmq.com/nack.html) in case of a failure. Defaults to `"false"` | `"true"`, `"false"` +| prefetchCount | N | Number of messages to [prefetch](https://www.rabbitmq.com/consumer-prefetch.html). Consider changing this to a non-zero value for production environments. Defaults to `"0"`, which means that all available messages will be pre-fetched. | `"2"` +| publisherConfirm | N | If enabled, client waits for [publisher confirms](https://www.rabbitmq.com/confirms.html#publisher-confirms) after publishing a message. Defaults to `"false"` | `"true"`, `"false"` +| reconnectWait | N | How long to wait (in seconds) before reconnecting if a connection failure occurs | `"0"` +| concurrencyMode | N | `parallel` is the default, and allows processing multiple messages in parallel (limited by the `app-max-concurrency` annotation, if configured). Set to `single` to disable parallel processing. In most situations there's no reason to change this. | `parallel`, `single` +| enableDeadLetter | N | Enable forwarding Messages that cannot be handled to a dead-letter topic. Defaults to `"false"` | `"true"`, `"false"` | +| maxLen | N | The maximum number of messages of a queue and its dead letter queue (if dead letter enabled). If both `maxLen` and `maxLenBytes` are set then both will apply; whichever limit is hit first will be enforced. Defaults to no limit. | `"1000"` | +| maxLenBytes | N | Maximum length in bytes of a queue and its dead letter queue (if dead letter enabled). If both `maxLen` and `maxLenBytes` are set then both will apply; whichever limit is hit first will be enforced. Defaults to no limit. | `"1048576"` | +| exchangeKind | N | Exchange kind of the rabbitmq exchange. Defaults to `"fanout"`. | `"fanout"`,`"topic"` | +| saslExternal | N | With TLS, should the username be taken from an additional field (for example, CN). See [RabbitMQ Authentication Mechanisms](https://www.rabbitmq.com/access-control.html#mechanisms). Defaults to `"false"`. | `"true"`, `"false"` | +| ttlInSeconds | N | Set message TTL at the component level, which can be overwritten by message level TTL per request. | `"60"` | +| caCert | Required for using TLS | Certificate Authority (CA) certificate in PEM format for verifying server TLS certificates. | `"-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----"` +| clientCert | Required for using TLS | TLS client certificate in PEM format. Must be used with `clientKey`. | `"-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----"` +| clientKey | Required for using TLS | TLS client key in PEM format. Must be used with `clientCert`. Can be `secretKeyRef` to use a secret reference. | `"-----BEGIN RSA PRIVATE KEY-----\n\n-----END RSA PRIVATE KEY-----"` +| clientName | N | This RabbitMQ [client-provided connection name](https://www.rabbitmq.com/connections.html#client-provided-names) is a custom identifier. If set, the identifier is mentioned in RabbitMQ server log entries and management UI. Can be set to {uuid}, {podName}, or {appID}, which is replaced by Dapr runtime to the real value. | `"app1"`, `{uuid}`, `{podName}`, `{appID}` +| heartBeat | N | Defines the heartbeat interval with the server, detecting the aliveness of the peer TCP connection with the RabbitMQ server. Defaults to `10s` . | `"10s"` +| `publishMessagePropertiesToMetadata` | N | Whether to publish AMQP message properties (headers, message ID, etc.) to the metadata. | "true", "false" ## Communication using TLS diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-redis-pubsub.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-redis-pubsub.md index 6092d96a424..d4e9b566652 100644 --- a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-redis-pubsub.md +++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-redis-pubsub.md @@ -40,7 +40,7 @@ The above example uses secrets as plain strings. It is recommended to use a secr | Field | Required | Details | Example | |--------------------|:--------:|---------|---------| -| redisHost | Y | Connection-string for the redis host. If `"redisType"` is `"cluster"` it can be multiple hosts separated by commas or just a single host | `localhost:6379`, `redis-master.default.svc.cluster.local:6379` +| redisHost | Y | Connection-string for the redis host. If `"redisType"` is `"cluster"`, it can be multiple hosts separated by commas or just a single host. When using Redis Sentinel (`"failover"` is `"true"`), multiple sentinel addresses can also be provided as comma-separated values. | `localhost:6379`, `redis-master.default.svc.cluster.local:6379`, `sentinel1:26379,sentinel2:26379,sentinel3:26379` | redisPassword | N | Password for Redis host. No Default. Can be `secretKeyRef` to use a secret reference | `""`, `"KeFg23!"` | redisUsername | N | Username for Redis host. Defaults to empty. Make sure your redis server version is 6 or above, and have created acl rule correctly. | `""`, `"default"` | consumerID | N | The consumer group ID. | Can be set to string value (such as `"channel1"` in the example above) or string format value (such as `"{podName}"`, etc.). [See all of template tags you can use in your component metadata.]({{% ref "component-schema.md#templated-metadata-values" %}}) @@ -66,8 +66,8 @@ The above example uses secrets as plain strings. It is recommended to use a secr | minIdleConns | N | Minimum number of idle connections to keep open in order to avoid the performance degradation associated with creating new connections. Defaults to `"0"`. | `"2"` | idleCheckFrequency | N | Frequency of idle checks made by idle connections reaper. Default is `"1m"`. `"-1"` disables idle connections reaper. | `"-1"` | idleTimeout | N | Amount of time after which the client closes idle connections. Should be less than server's timeout. Default is `"5m"`. `"-1"` disables idle timeout check. | `"10m"` -| failover | N | Property to enable failover configuration. Needs sentinelMasterName to be set. Defaults to `"false"` | `"true"`, `"false"` -| sentinelMasterName | N | The sentinel master name. See [Redis Sentinel Documentation](https://redis.io/docs/manual/sentinel/) | `""`, `"mymaster"` +| failover | N | Property to enable failover configuration. Needs sentinelMasterName to be set. When enabled, redisHost should contain the sentinel addresses. Defaults to `"false"` | `"true"`, `"false"` +| sentinelMasterName | N | The sentinel master name. See [Redis Sentinel Documentation](https://redis.io/docs/latest/operate/oss_and_stack/management/sentinel/) | `""`, `"mymaster"` | sentinelUsername | N | Username for Redis Sentinel. Applicable only when "failover" is true, and Redis Sentinel has authentication enabled | `"username"` | sentinelPassword | N | Password for Redis Sentinel. Applicable only when "failover" is true, and Redis Sentinel has authentication enabled | `"password"` | maxLenApprox | N | Maximum number of items inside a stream.The old entries are automatically evicted when the specified length is reached, so that the stream is left at a constant size. Defaults to unlimited. | `"10000"` @@ -113,6 +113,7 @@ You can use [Helm](https://helm.sh/) to quickly create a Redis instance in our K - name: redisPassword value: "lhDOkwTlp0" ``` + {{% /tab %}} {{% tab "AWS" %}} @@ -159,6 +160,29 @@ You can use [Helm](https://helm.sh/) to quickly create a Redis instance in our K The Dapr CLI automatically deploys a local redis instance in self hosted mode as part of the `dapr init` command. {{% /alert %}} +## Redis Sentinel configuration + +When using Redis Sentinel for high availability, set `redisType` to `"node"`, enable failover mode with `failover: "true"`, and provide the sentinel master name. Multiple sentinel addresses can be specified as a comma-separated list in the `redisHost` field for redundancy. + + ```yaml + apiVersion: dapr.io/v1alpha1 + kind: Component + metadata: + name: redis-pubsub + spec: + type: pubsub.redis + version: v1 + metadata: + - name: redisHost + value: "sentinel1:26379,sentinel2:26379,sentinel3:26379" + - name: redisType + value: "node" + - name: failover + value: "true" + - name: sentinelMasterName + value: "mymaster" + ``` + ## Related links - [Basic schema for a Dapr component]({{% ref component-schema %}}) - Read [this guide]({{% ref "howto-publish-subscribe.md#step-2-publish-a-topic" %}}) for instructions on configuring pub/sub components diff --git a/daprdocs/content/en/reference/components-reference/supported-secret-stores/_index.md b/daprdocs/content/en/reference/components-reference/supported-secret-stores/_index.md index d44055ae9d5..03d17ca02cb 100644 --- a/daprdocs/content/en/reference/components-reference/supported-secret-stores/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-secret-stores/_index.md @@ -2,7 +2,7 @@ type: docs title: "Secret store component specs" linkTitle: "Secret stores" -weight: 5000 +weight: 9000 description: The supported secret stores that interface with Dapr aliases: - "/operations/components/setup-secret-store/supported-secret-stores/" diff --git a/daprdocs/content/en/reference/components-reference/supported-secret-stores/aws-secret-manager.md b/daprdocs/content/en/reference/components-reference/supported-secret-stores/aws-secret-manager.md index d59af95a5a3..0150633974b 100644 --- a/daprdocs/content/en/reference/components-reference/supported-secret-stores/aws-secret-manager.md +++ b/daprdocs/content/en/reference/components-reference/supported-secret-stores/aws-secret-manager.md @@ -2,7 +2,7 @@ type: docs title: "AWS Secrets Manager" linkTitle: "AWS Secrets Manager" -description: Detailed information on the secret store component +description: Detailed information on the AWS Secrets Manager secret store component aliases: - "/operations/components/setup-secret-store/supported-secret-stores/aws-secret-manager/" --- @@ -30,6 +30,8 @@ spec: value: "[aws_secret_key]" - name: sessionToken value: "[aws_session_token]" + - name: multipleKeyValuesPerSecret + value: "false" ``` {{% alert title="Warning" color="warning" %}} The above example uses secrets as plain strings. It is recommended to use a local secret store such as [Kubernetes secret store]({{% ref kubernetes-secret-store.md %}}) or a [local file]({{% ref file-secret-store.md %}}) to bootstrap secure key storage. @@ -43,6 +45,7 @@ The above example uses secrets as plain strings. It is recommended to use a loca | accessKey | Y | The AWS Access Key to access this resource | `"key"` | | secretKey | Y | The AWS Secret Access Key to access this resource | `"secretAccessKey"` | | sessionToken | N | The AWS session token to use | `"sessionToken"` | +| multipleKeyValuesPerSecret | N | When set to `"true"` allows for multiple key value pairs to be stored in a single secret. Defaults to `"false"` | `"true"` | {{% alert title="Important" color="warning" %}} When running the Dapr sidecar (daprd) with your application on EKS (AWS Kubernetes), if you're using a node/pod that has already been attached to an IAM policy defining access to AWS resources, you **must not** provide AWS access-key, secret-key, and tokens in the definition of the component spec you're using. @@ -57,6 +60,46 @@ Query Parameter | Description `metadata.version_id` | Version for the given secret key. `metadata.version_stage` | Version stage for the given secret key. +## Configure multiple key-values per secret + +The `multipleKeyValuesPerSecret` flag determines whether the secret store presents a single value or multiple key-value pairs per secret. + +### Single value per secret + +If `multipleKeyValuesPerSecret` is `false` (default), AWS Secrets Manager returns the secret value as-is. Given a secret named `database-credentials` with the following JSON content: + +```json +{ + "username": "admin", + "password": "secret123", + "host": "db.example.com" +} +``` + +Requesting this secret returns the entire JSON as a single value: + +```bash +$ curl http://localhost:3500/v1.0/secrets/awssecretmanager/database-credentials +{ + "database-credentials": "{\"username\":\"admin\",\"password\":\"secret123\",\"host\":\"db.example.com\"}" +} +``` + +### Multiple key-value pairs per secret + +If `multipleKeyValuesPerSecret` is `true`, the secret store parses JSON content stored in AWS Secrets Manager and returns it as multiple key-value pairs. + +Requesting the same `database-credentials` secret from above, the response breaks the JSON object into its own entries, allowing it to be parsed into multiple key-value pairs. + +```bash +$ curl http://localhost:3500/v1.0/secrets/awssecretmanager/database-credentials +{ + "username": "admin", + "password": "secret123", + "host": "db.example.com" +} +``` + ## Create an AWS Secrets Manager instance Setup AWS Secrets Manager using the AWS documentation: https://docs.aws.amazon.com/secretsmanager/latest/userguide/tutorials_basic.html. diff --git a/daprdocs/content/en/reference/components-reference/supported-state-stores/_index.md b/daprdocs/content/en/reference/components-reference/supported-state-stores/_index.md index 2b2509f53fb..0855682eb22 100644 --- a/daprdocs/content/en/reference/components-reference/supported-state-stores/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-state-stores/_index.md @@ -3,7 +3,7 @@ type: docs title: "State store component specs" linkTitle: "State stores" description: "The supported state stores that interface with Dapr" -weight: 4000 +weight: 10000 aliases: - "/operations/components/setup-state-store/supported-state-stores/" no_list: true diff --git a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-cosmosdb.md b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-cosmosdb.md index 2e67d46c137..dc225eee5d5 100644 --- a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-cosmosdb.md +++ b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-cosmosdb.md @@ -225,6 +225,22 @@ This particular optimization only makes sense if you are saving large objects to {{% /alert %}} +## Workflow Limitations + +{{% alert title="Note" color="primary" %}} + +As described below, CosmosDB has limitations that likely make it unsuitable for production environments. +There is currently no path for migrating Workflow data from CosmosDB to another state store, meaning exceeding these limits in production will result in failed workflows with no workaround. + +{{% /alert %}} + +The more complex a workflow is with number of activities, child workflows, etc, the more DB state operations it performs per state store transaction. +All input & output values are saved to the workflow history, and are part of an operation of these transactions. +CosmosDB has a [maximum document size of 2MB and maximum transaction size of 100 operations.](https://learn.microsoft.com/azure/cosmos-db/concepts-limits#per-request-limits). +Attempting to write to CosmosDB beyond these limits results in an error code of `413`. +This means that the workflow history must not exceed this size, meaning that CosmosDB is not suitable for workflows with large input/output values or larger complex workflows. +A general guide to the number of records that are saved during a workflow executon can be found [here]({{% ref "workflow-architecture.md#state-store-record-count" %}}). + ## Related links - [Basic schema for a Dapr component]({{% ref component-schema %}}) diff --git a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-dynamodb.md b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-dynamodb.md index 5a45e374d90..e556191829d 100644 --- a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-dynamodb.md +++ b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-dynamodb.md @@ -34,6 +34,8 @@ spec: value: "myTOKEN" # Optional - name: ttlAttributeName value: "expiresAt" # Optional + - name: ttlInSeconds + value: # Optional - name: partitionKey value: "ContractID" # Optional # Uncomment this if you wish to use AWS DynamoDB as a state store for actors (optional) @@ -60,6 +62,7 @@ In order to use DynamoDB as a Dapr state store, the table must have a primary ke | endpoint | N |AWS endpoint for the component to use. Only used for local development. The `endpoint` is unncessary when running against production AWS | `"http://localhost:4566"` | sessionToken | N |AWS session token to use. A session token is only required if you are using temporary security credentials. | `"TOKEN"` | ttlAttributeName | N |The table attribute name which should be used for TTL. | `"expiresAt"` +| ttlInSeconds | N | Allows specifying a Time-to-live (TTL) in seconds that will be applied to every state store request unless TTL is explicitly defined via the [request metadata]({{% ref "state-store-ttl.md" %}}). If set to zero or less, no default TTL is applied, and items will only expire if a TTL is explicitly provided in the request metadata with if ttlAttributeName is set. | `600` | partitionKey | N |The table primary key or partition key attribute name. This field is used to replace the default primary key attribute name `"key"`. See the section [Partition Keys]({{% ref "setup-dynamodb.md#partition-keys" %}}). | `"ContractID"` | actorStateStore | N | Consider this state store for actors. Defaults to "false" | `"true"`, `"false"` @@ -158,6 +161,20 @@ $ aws dynamodb get-item \ } ``` +## Workflow Limitations + +{{% alert title="Note" color="primary" %}} + +As described below, DynamoDB has limitations that likely make it unsuitable for production environments. +There is currently no path for migrating Workflow data from DynamoDB to another state store, meaning exceeding these limits in production will result in failed workflows with no workaround. + +{{% /alert %}} + +The more complex a workflow is (number of activities, child workflows, etc.), the more state operations it performs per state store transaction. +The maximum number of operations that can be performed by DynamoDB in a [single transaction is 100](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/transaction-apis.html). +This means that DynamoDB can only handle workflows with a limited complexity, meaning it is not suitable for all workflow scenarios. +A general guide to the number of records that are saved during a workflow executon can be found [here]({{% ref "workflow-architecture.md#state-store-record-count" %}}). + ## Related links - [Basic schema for a Dapr component]({{% ref component-schema %}}) diff --git a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-redis.md b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-redis.md index 68cdc27eb2b..58ac80c95b3 100644 --- a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-redis.md +++ b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-redis.md @@ -105,7 +105,7 @@ If you wish to use Redis as an actor store, append the following to the yaml. | Field | Required | Details | Example | |--------------------|:--------:|---------|---------| -| redisHost | Y | Connection-string for the redis host | `localhost:6379`, `redis-master.default.svc.cluster.local:6379` +| redisHost | Y | Connection-string for the redis host. If `"redisType"` is `"cluster"`, it can be multiple hosts separated by commas or just a single host. When using Redis Sentinel (`"failover"` is `"true"`), multiple sentinel addresses can also be provided as comma-separated values. | `localhost:6379`, `redis-master.default.svc.cluster.local:6379`, `sentinel1:26379,sentinel2:26379,sentinel3:26379` | redisPassword | N | Password for Redis host. No Default. Can be `secretKeyRef` to use a secret reference | `""`, `"KeFg23!"` | redisUsername | N | Username for Redis host. Defaults to empty. Make sure your redis server version is 6 or above, and have created acl rule correctly. | `""`, `"default"` | useEntraID | N | Implements EntraID support for Azure Cache for Redis. Before enabling this:
  • The `redisHost` name must be specified in the form of `"server:port"`
  • TLS must be enabled
Learn more about this setting under [Create a Redis instance > Azure Cache for Redis]({{% ref "#setup-redis" %}}) | `"true"`, `"false"` | @@ -114,8 +114,8 @@ If you wish to use Redis as an actor store, append the following to the yaml. | clientKey | N | The content of the client private key, used in conjunction with `clientCert` for authentication. It is recommended to use a secret store as described [here]({{% ref component-secrets.md %}}) | `"----BEGIN PRIVATE KEY-----\nMIIE..."` | | maxRetries | N | Maximum number of retries before giving up. Defaults to `3` | `5`, `10` | maxRetryBackoff | N | Maximum backoff between each retry. Defaults to `2` seconds; `"-1"` disables backoff. | `3000000000` -| failover | N | Property to enable failover configuration. Needs sentinelMasterName to be set. The redisHost should be the sentinel host address. See [Redis Sentinel Documentation](https://redis.io/docs/manual/sentinel/). Defaults to `"false"` | `"true"`, `"false"` -| sentinelMasterName | N | The sentinel master name. See [Redis Sentinel Documentation](https://redis.io/docs/manual/sentinel/) | `""`, `"mymaster"` +| failover | N | Property to enable failover configuration. Needs sentinelMasterName to be set. When enabled, redisHost should contain the sentinel addresses. Defaults to `"false"` | `"true"`, `"false"` +| sentinelMasterName | N | The sentinel master name. See [Redis Sentinel Documentation](https://redis.io/docs/latest/operate/oss_and_stack/management/sentinel/) | `""`, `"mymaster"` | sentinelUsername | N | Username for Redis Sentinel. Applicable only when "failover" is true, and Redis Sentinel has authentication enabled | `"username"` | sentinelPassword | N | Password for Redis Sentinel. Applicable only when "failover" is true, and Redis Sentinel has authentication enabled | `"password"` | redeliverInterval | N | The interval between checking for pending messages to redelivery. Defaults to `"60s"`. `"0"` disables redelivery. | `"30s"` @@ -494,6 +494,29 @@ The result will be: The query syntax and documentation is available [here]({{% ref howto-state-query-api.md %}}) +## Redis Sentinel configuration + +When using Redis Sentinel for high availability, set `redisType` to `"node"`, enable failover mode with `failover: "true"`, and provide the sentinel master name. Multiple sentinel addresses can be specified as a comma-separated list in the `redisHost` field for redundancy. + + ```yaml + apiVersion: dapr.io/v1alpha1 + kind: Component + metadata: + name: redis-pubsub + spec: + type: pubsub.redis + version: v1 + metadata: + - name: redisHost + value: "sentinel1:26379,sentinel2:26379,sentinel3:26379" + - name: redisType + value: "node" + - name: failover + value: "true" + - name: sentinelMasterName + value: "mymaster" + ``` + ## Related links - [Basic schema for a Dapr component]({{% ref component-schema %}}) - Read [this guide]({{% ref "howto-get-save-state.md#step-2-save-and-retrieve-a-single-state" %}}) for instructions on configuring state store components diff --git a/daprdocs/content/en/reference/resource-specs/subscription-schema.md b/daprdocs/content/en/reference/resource-specs/subscription-schema.md index 7f842f7150d..70cc0b2451c 100644 --- a/daprdocs/content/en/reference/resource-specs/subscription-schema.md +++ b/daprdocs/content/en/reference/resource-specs/subscription-schema.md @@ -38,6 +38,7 @@ spec: enabled: maxMessagesCount: maxAwaitDurationMs: + metadata: # Optional scopes: - ``` @@ -51,6 +52,7 @@ scopes: | pubsubname | N | The name of your pub/sub component. | `pubsub` | | deadLetterTopic | N | The name of the dead letter topic that forwards undeliverable messages. | `poisonMessages` | | bulkSubscribe | N | Enable bulk subscribe properties. | `true`, `false` | +| metadata | N | Set subscribe metadata. | `{"key": "value"}` | ## `v1alpha1` format @@ -67,6 +69,7 @@ spec: route: # Required pubsubname: # Required deadLetterTopic: # Optional + metadata: # Optional bulkSubscribe: # Optional - enabled: - maxMessagesCount: @@ -83,6 +86,7 @@ scopes: | route | Y | The endpoint to which all topic messages are sent. | `/checkout` | | pubsubname | N | The name of your pub/sub component. | `pubsub` | | deadlettertopic | N | The name of the dead letter topic that forwards undeliverable messages. | `poisonMessages` | +| metadata | N | Set subscribe metadata. | `{"key": "value"}` | | bulksubscribe | N | Enable bulk subscribe properties. | `true`, `false` | ## Related links diff --git a/daprdocs/data/components/secret_stores/aws.yaml b/daprdocs/data/components/secret_stores/aws.yaml index 522b7f64e64..5d08844b8ff 100644 --- a/daprdocs/data/components/secret_stores/aws.yaml +++ b/daprdocs/data/components/secret_stores/aws.yaml @@ -3,6 +3,8 @@ state: Beta version: v1 since: "1.15" + features: + multipleKeyValuesPerSecret: true - component: AWS SSM Parameter Store link: aws-parameter-store state: Alpha diff --git a/daprdocs/data/components/state_stores/azure.yaml b/daprdocs/data/components/state_stores/azure.yaml index 287477de780..b340a26db71 100644 --- a/daprdocs/data/components/state_stores/azure.yaml +++ b/daprdocs/data/components/state_stores/azure.yaml @@ -30,7 +30,7 @@ transactions: true etag: true ttl: true - workflow: false + workflow: true - component: Azure Table Storage link: setup-azure-tablestorage state: Stable diff --git a/daprdocs/data/components/state_stores/generic.yaml b/daprdocs/data/components/state_stores/generic.yaml index 289d8ce4305..8d8ce44184c 100644 --- a/daprdocs/data/components/state_stores/generic.yaml +++ b/daprdocs/data/components/state_stores/generic.yaml @@ -52,7 +52,7 @@ transactions: true etag: true ttl: true - workflow: false + workflow: true - component: Hashicorp Consul link: setup-consul state: Alpha @@ -140,7 +140,7 @@ transactions: true etag: true ttl: true - workflow: false + workflow: true - component: PostgreSQL v1 link: setup-postgresql-v1 state: Stable @@ -195,7 +195,7 @@ transactions: true etag: true ttl: true - workflow: false + workflow: true - component: Zookeeper link: setup-zookeeper state: Alpha diff --git a/daprdocs/data/components/state_stores/oracle.yaml b/daprdocs/data/components/state_stores/oracle.yaml index eae48304fde..fc5136684b6 100644 --- a/daprdocs/data/components/state_stores/oracle.yaml +++ b/daprdocs/data/components/state_stores/oracle.yaml @@ -9,7 +9,7 @@ etag: true ttl: true query: false - workflow: false + workflow: true - component: Coherence link: setup-coherence state: Alpha diff --git a/daprdocs/layouts/_partials/hooks/body-end.html b/daprdocs/layouts/_partials/hooks/body-end.html index ee0ebc1f145..9bb0c9688f0 100644 --- a/daprdocs/layouts/_partials/hooks/body-end.html +++ b/daprdocs/layouts/_partials/hooks/body-end.html @@ -5,16 +5,10 @@ - - - - -{{ end }} \ No newline at end of file +{{ end }} diff --git a/daprdocs/layouts/_partials/hooks/head-end.html b/daprdocs/layouts/_partials/hooks/head-end.html index a86318b4883..26e604a4cfc 100644 --- a/daprdocs/layouts/_partials/hooks/head-end.html +++ b/daprdocs/layouts/_partials/hooks/head-end.html @@ -1,25 +1,3 @@ {{ with .Site.Params.search.algolia }} - - {{ end }} \ No newline at end of file diff --git a/daprdocs/layouts/_shortcodes/dapr-latest-version.html b/daprdocs/layouts/_shortcodes/dapr-latest-version.html index a085fd0e6f5..da12c873eb6 100644 --- a/daprdocs/layouts/_shortcodes/dapr-latest-version.html +++ b/daprdocs/layouts/_shortcodes/dapr-latest-version.html @@ -1 +1 @@ -{{- if .Get "short" }}1.15{{ else if .Get "long" }}1.15.5{{ else if .Get "cli" }}1.15.1{{ else }}1.15.1{{ end -}} +{{- if .Get "short" }}1.16{{ else if .Get "long" }}1.16.6{{ else if .Get "cli" }}1.16.5{{ else }}1.16.6{{ end -}} diff --git a/daprdocs/static/images/homepage/dapr-agents.svg b/daprdocs/static/images/homepage/dapr-agents.svg new file mode 100644 index 00000000000..1b34a323cf3 --- /dev/null +++ b/daprdocs/static/images/homepage/dapr-agents.svg @@ -0,0 +1,12 @@ + + + + + + + + + + + + diff --git a/daprdocs/static/images/homepage/dark-blue-dapr.svg b/daprdocs/static/images/homepage/dark-blue-dapr.svg new file mode 100644 index 00000000000..e43f605ce59 --- /dev/null +++ b/daprdocs/static/images/homepage/dark-blue-dapr.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/daprdocs/static/images/homepage/pub-sub.svg b/daprdocs/static/images/homepage/pub-sub.svg new file mode 100644 index 00000000000..03199fd61f8 --- /dev/null +++ b/daprdocs/static/images/homepage/pub-sub.svg @@ -0,0 +1,3 @@ + + + diff --git a/daprdocs/static/images/homepage/service-invocation.svg b/daprdocs/static/images/homepage/service-invocation.svg new file mode 100644 index 00000000000..e6a2a204c19 --- /dev/null +++ b/daprdocs/static/images/homepage/service-invocation.svg @@ -0,0 +1,3 @@ + + + diff --git a/daprdocs/static/images/homepage/workflow.svg b/daprdocs/static/images/homepage/workflow.svg new file mode 100644 index 00000000000..7d31f803b2c --- /dev/null +++ b/daprdocs/static/images/homepage/workflow.svg @@ -0,0 +1,3 @@ + + + diff --git a/daprdocs/static/images/state-management-outbox-steps.png b/daprdocs/static/images/state-management-outbox-steps.png new file mode 100644 index 00000000000..a520b443b97 Binary files /dev/null and b/daprdocs/static/images/state-management-outbox-steps.png differ diff --git a/daprdocs/static/images/workflow-overview/workflow-diagrid-dashboard.png b/daprdocs/static/images/workflow-overview/workflow-diagrid-dashboard.png new file mode 100644 index 00000000000..2e5aa34a970 Binary files /dev/null and b/daprdocs/static/images/workflow-overview/workflow-diagrid-dashboard.png differ diff --git a/daprdocs/static/images/workflow-overview/workflow-multi-app-callactivity.png b/daprdocs/static/images/workflow-overview/workflow-multi-app-callactivity.png new file mode 100644 index 00000000000..7b2a28561d8 Binary files /dev/null and b/daprdocs/static/images/workflow-overview/workflow-multi-app-callactivity.png differ diff --git a/daprdocs/static/images/workflow-overview/workflow-multi-app-child-workflow.png b/daprdocs/static/images/workflow-overview/workflow-multi-app-child-workflow.png new file mode 100644 index 00000000000..388fcc8cf27 Binary files /dev/null and b/daprdocs/static/images/workflow-overview/workflow-multi-app-child-workflow.png differ diff --git a/daprdocs/static/images/workflow-overview/workflow-multi-app-complex.png b/daprdocs/static/images/workflow-overview/workflow-multi-app-complex.png new file mode 100644 index 00000000000..8a2c401f89a Binary files /dev/null and b/daprdocs/static/images/workflow-overview/workflow-multi-app-complex.png differ diff --git a/hugo.yaml b/hugo.yaml index a6e206f75ee..67071e159e9 100644 --- a/hugo.yaml +++ b/hugo.yaml @@ -1,4 +1,4 @@ -baseURL: https://v1-16.docs.dapr.io +baseURL: https://docs.dapr.io title: Dapr Docs # Output directory for generated site @@ -117,21 +117,22 @@ params: # First one is picked as the Twitter card image if not set on page. # images: [images/project-illustration.png] - + + # Versioning # Menu title if your navbar has a versions selector to access old versions of your site. # This menu appears only if you have at least one [params.versions] set. - version_menu: v1.16 (preview) - - # Flag used in the "version-banner" partial to decide whether to display a - # banner on every page indicating that this is an archived version of the docs. - # Set this flag to "true" if you want to display the banner. - archived_version: false + version_menu: v1.16 (latest) # The version number for the version of the docs represented in this doc set. # Used in the "version-banner" partial to display a version number for the # current doc set. version: v1.16 + # Flag used in the "version-banner" partial to decide whether to display a + # banner on every page indicating that this is an archived version of the docs. + # Set this flag to "true" if you want to display the banner. + archived_version: false + # A link to latest version of the docs. Used in the "version-banner" partial to # point people to the main doc site. url_latest_version: https://docs.dapr.io @@ -150,10 +151,12 @@ params: github_branch: v1.16 versions: - - version: v1.16 (preview) + - version: v1.17 (preview) + url: https://v1-17.docs.dapr.io + - version: v1.16 (latest) url: "#" - - version: v1.15 (latest) - url: "https://docs.dapr.io" + - version: v1.15 + url: https://v1-15.docs.dapr.io - version: v1.14 url: https://v1-14.docs.dapr.io - version: v1.13 @@ -185,9 +188,9 @@ params: search: algolia: - appId: 'O0QLQGNF38' - apiKey: '54ae43aa28ce8f00c54c8d5f544d29b9' - indexName: daprdocs + appId: O0QLQGNF38 + apiKey: 54ae43aa28ce8f00c54c8d5f544d29b9 + indexName: crawler_dapr offlineSearch: false # User interface configuration @@ -272,49 +275,49 @@ module: target: assets - source: daprdocs/archetypes target: archetypes - - source: sdkdocs/python/daprdocs/content/en/python-sdk-docs + - source: sdkdocs/python/content/en/python-sdk-docs target: content/developing-applications/sdks/python lang: en - - source: sdkdocs/python/daprdocs/content/en/python-sdk-contributing + - source: sdkdocs/python/content/en/python-sdk-contributing target: content/contributing/sdk-contrib/ lang: en - - source: sdkdocs/php/daprdocs/content/en/php-sdk-docs + - source: sdkdocs/php/content/en/php-sdk-docs target: content/developing-applications/sdks/php lang: en - - source: sdkdocs/dotnet/daprdocs/content/en/dotnet-sdk-docs + - source: sdkdocs/dotnet/content/en/dotnet-sdk-docs target: content/developing-applications/sdks/dotnet lang: en - - source: sdkdocs/dotnet/daprdocs/content/en/dotnet-sdk-contributing + - source: sdkdocs/dotnet/content/en/dotnet-sdk-contributing target: content/contributing/sdk-contrib/ lang: en - - source: sdkdocs/go/daprdocs/content/en/go-sdk-docs + - source: sdkdocs/go/content/en/go-sdk-docs target: content/developing-applications/sdks/go lang: en - - source: sdkdocs/go/daprdocs/content/en/go-sdk-contributing + - source: sdkdocs/go/content/en/go-sdk-contributing target: content/contributing/sdk-contrib/ lang: en - - source: sdkdocs/java/daprdocs/content/en/java-sdk-docs + - source: sdkdocs/java/content/en/java-sdk-docs target: content/developing-applications/sdks/java lang: en - - source: sdkdocs/java/daprdocs/content/en/java-sdk-contributing + - source: sdkdocs/java/content/en/java-sdk-contributing target: content/contributing/sdk-contrib/ lang: en - - source: sdkdocs/js/daprdocs/content/en/js-sdk-docs + - source: sdkdocs/js/content/en/js-sdk-docs target: content/developing-applications/sdks/js lang: en - - source: sdkdocs/js/daprdocs/content/en/js-sdk-contributing + - source: sdkdocs/js/content/en/js-sdk-contributing target: content/contributing/sdk-contrib/ lang: en - - source: sdkdocs/rust/daprdocs/content/en/rust-sdk-docs + - source: sdkdocs/rust/content/en/rust-sdk-docs target: content/developing-applications/sdks/rust lang: en - - source: sdkdocs/rust/daprdocs/content/en/rust-sdk-contributing + - source: sdkdocs/rust/content/en/rust-sdk-contributing target: content/contributing/sdk-contrib/ lang: en - - source: sdkdocs/pluggable-components/dotnet/daprdocs/content/en/dotnet-sdk-docs + - source: sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs target: content/developing-applications/develop-components/pluggable-components/pluggable-components-sdks/pluggable-components-dotnet lang: en - - source: sdkdocs/pluggable-components/go/daprdocs/content/en/go-sdk-docs + - source: sdkdocs/pluggable-components/go/content/en/go-sdk-docs target: content/developing-applications/develop-components/pluggable-components/pluggable-components-sdks/pluggable-components-go lang: en - source: translations/docs-zh/translated_content/zh_CN/docs diff --git a/sdkdocs/dotnet b/sdkdocs/dotnet deleted file mode 160000 index 241a646a203..00000000000 --- a/sdkdocs/dotnet +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 241a646a2037d4e91d3192dcbaf1f128b15de185 diff --git a/sdkdocs/dotnet/README.md b/sdkdocs/dotnet/README.md new file mode 100644 index 00000000000..ce83c6b66d9 --- /dev/null +++ b/sdkdocs/dotnet/README.md @@ -0,0 +1,25 @@ +# Dapr .NET SDK documentation + +This page covers how the documentation is structured for the Dapr .NET SDK. + +## Dapr Docs + +All Dapr documentation is hosted at [docs.dapr.io](https://docs.dapr.io), including the docs for the [.NET SDK](https://docs.dapr.io/developing-applications/sdks/dotnet/). Head over there if you want to read the docs. + +### .NET SDK docs source + +Although the docs site code and content is in the [docs repo](https://github.com/dapr/docs), the .NET SDK content and images are within the `content` and `static` directories, respectively. + +This allows separation of roles and expertise between maintainers, and makes it easy to find the docs files you are looking for. + +## Writing .NET SDK docs + +To get up and running to write .NET SDK docs, visit the [docs repo](https://github.com/dapr/docs) to initialize your environment. It will clone both the docs repo and this repo, so you can make changes and see it rendered within the site instantly, as well as commit and PR into this repo. + +Make sure to read the [docs contributing guide](https://docs.dapr.io/contributing/contributing-docs/) for information on style/semantics/etc. + +## Docs architecture + +The docs site is built on [Hugo](https://gohugo.io), which lives in the docs repo. This repo is setup as a git submodule so that when the repo is cloned and initialized, the dotnet-sdk repo, along with the docs, are cloned as well. + +Then, in the Hugo configuration file, the `daprdocs/content` and `daprdocs/static` directories are redirected to the `daprdocs/developing-applications/sdks/dotnet` and `static/dotnet` directories, respectively. Thus, all the content within this repo is folded into the main docs site. \ No newline at end of file diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-contributing/dotnet-contributing.md b/sdkdocs/dotnet/content/en/dotnet-sdk-contributing/dotnet-contributing.md new file mode 100644 index 00000000000..8f8837fcd9c --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-contributing/dotnet-contributing.md @@ -0,0 +1,108 @@ +--- +type: docs +title: "Contributing to the .NET SDK" +linkTitle: ".NET SDK" +weight: 3000 +description: Guidelines for contributing to the Dapr .NET SDK +--- + +# Welcome! +If you're reading this, you're likely interested in contributing to Dapr and/or the Dapr .NET SDK. Welcome to the project +and thank you for your interest in contributing! + +Please review the documentation, familiarize yourself with what Dapr is and what it's seeking to accomplish and reach +out on [Discord](https://bit.ly/dapr-discord). Let us know how you'd like to contribute and we'd be happy to chime in +with ideas and suggestions. + +There are many ways to contribute to Dapr: +- Submit bug reports for the [Dapr runtime](https://github.com/dapr/dapr/issues/new/choose) or the [Dapr .NET SDK](https://github.com/dapr/dotnet-sdk/issues/new/choose) +- Propose new [runtime capabilities](https://github.com/dapr/proposals/issues/new/choose) or [SDK functionality](https://github.com/dapr/dotnet-sdk/issues/new/choose) +- Improve the documentation in either the [larger Dapr project](https://github.com/dapr/docs) or the [Dapr .NET SDK specifically](https://github.com/dapr/dotnet-sdk/tree/master/daprdocs) +- Add new or improve existing [components](https://github.com/dapr/components-contrib/) that implement the various building blocks +- Augment the [.NET pluggable component SDK capabilities](https://github.com/dapr-sandbox/components-dotnet-sdk) +- Improve the Dapr .NET SDK code base and/or fix a bug (detailed below) + +If you're new to the code base, please feel encouraged to ask in the #dotnet-sdk channel in Discord about how +to implement changes or generally ask questions. You are not required to seek permission to work on anything, but do +note that if an issue is assigned to someone, it's an indication that someone might have already started work on it. +Especially if it's been a while since the last activity on that issue, please feel free to reach out and see if it's +still something they're interested in pursuing or whether you can take over, and open a pull request with your +implementation. + +If you'd like to assign yourself to an issue, respond to the conversation with "/assign" and the bot will assign you +to it. + +We have labeled some issues as `good-first-issue` or `help wanted` indicating that these are likely to be small, +self-contained changes. + +If you're not certain about your implementation, please create it as a draft pull request and solicit feedback +from the [.NET maintainers](https://github.com/orgs/dapr/teams/maintainers-dotnet-sdk) by tagging +`@dapr/maintainers-dotnet-sdk` and providing some context about what you need assistance with. + +# Contribution Rules and Best Practices + +When contributing to the [.NET SDK](https://github.com/dapr/dotnet-sdk) the following rules and best-practices should +be followed. + +## Pull Requests +Pull requests that contain only formatting changes are generally discouraged. Pull requests should instead seek to +fix a bug, add new functionality, or improve on existing capabilities. + +Do aim to minimize the contents of your pull request to span only a single issue. Broad PRs that touch on a lot of files +are not likely to be reviewed or accepted in a short timeframe. Accommodating many different issues in a single PR makes +it hard to determine whether your code fully addresses the underlying issue(s) or not and complicates the code review. + +## Tests +All pull requests should include unit and/or integration tests that reflect the nature of what was added or changed +so it's clear that the functionality works as intended. Avoid using auto-generated tests that duplicate testing the +same functionality several times. Rather, seek to improve code coverage by validating each possible path of your +changes so future contributors can more easily navigate the contours of your logic and more readily identify limitations. + +## Examples + +The `examples` directory contains code samples for users to run to try out specific functionality of the various +Dapr .NET SDK packages and extensions. When writing new and updated samples keep in mind: + +- All examples should be runnable on Windows, Linux, and MacOS. While .NET Core code is consistent among operating +systems, any pre/post example commands should provide options through +[tabpane]({{% ref "contributing-docs.md#tabbed-content" %}}) +- Contain steps to download/install any required pre-requisites. Someone coming in with a fresh OS install should be +able to start on the example and complete it without an error. Links to external download pages are fine. + +## Documentation + +The `daprdocs` directory contains the markdown files that are rendered into the [Dapr Docs](https://docs.dapr.io) website. When the +documentation website is built this repo is cloned and configured so that its contents are rendered with the docs +content. When writing docs keep in mind: + + - All rules in the [docs guide]({{% ref contributing-docs.md %}}) should be followed in addition to these. + - All files and directories should be prefixed with `dotnet-` to ensure all file/directory names are globally + - unique across all Dapr documentation. + +All pull requests should strive to include both XML documentation in the code clearly indicating what functionality +does and why it's there as well as changes to the published documentation to clarify for other developers how your change +improves the Dapr framework. + +## GitHub Dapr Bot Commands + +Checkout the [daprbot documentation](https://docs.dapr.io/contributing/daprbot/) for Github commands you can run in this repo for common tasks. For example, +you can comment `/assign` on an issue to assign it to yourself. + +## Commit Sign-offs +All code submitted to the Dapr .NET SDK must be signed off by the developer authoring it. This means that every +commit must end with the following: +> Signed-off-by: First Last + +The name and email address must match the registered GitHub name and email address of the user committing the changes. +We use a bot to detect this in pull requests and we will be unable to merge the PR if this check fails to validate. + +If you notice that a PR has failed to validate because of a failed DCO check early on in the PR history, please consider +squashing the PR locally and resubmitting to ensure that the sign-off statement is included in the commit history. + +# Languages, Tools and Processes +All source code in the Dapr .NET SDK is written in C# and targets the latest language version available to the earliest +supported .NET SDK. As of v1.16, this means that both .NET 8 and .NET 9 are supported. The latest language version available +is [C# version 12](https://learn.microsoft.com/dotnet/csharp/whats-new/csharp-version-history#c-version-12) + +Contributors are welcome to use whatever IDE they're most comfortable developing in, but please do not submit +IDE-specific preference files along with your contributions as these will be rejected. \ No newline at end of file diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/_index.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/_index.md new file mode 100644 index 00000000000..e92be0f411f --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/_index.md @@ -0,0 +1,79 @@ +--- +type: docs +title: "Dapr .NET SDK" +linkTitle: ".NET" +weight: 1000 +description: .NET SDK packages for developing Dapr applications +no_list: true +cascade: + github_repo: https://github.com/dapr/dotnet-sdk + github_subdir: daprdocs/content/en/dotnet-sdk-docs + path_base_for_github_subdir: content/en/developing-applications/sdks/dotnet/ + github_branch: master +--- + +Dapr offers a variety of packages to help with the development of .NET applications. Using them you can create .NET clients, servers, and virtual actors with Dapr. + +## Prerequisites +- [Dapr CLI]({{< ref install-dapr-cli.md >}}) installed +- Initialized [Dapr environment]({{< ref install-dapr-selfhost.md >}}) +- [.NET 8](https://dotnet.microsoft.com/download) or [.NET 9](https://dotnet.microsoft.com/download) installed + +## Installation + +To get started with the Client .NET SDK, install the Dapr .NET SDK package: + +```sh +dotnet add package Dapr.Client +``` + +## Try it out + +Put the Dapr .NET SDK to the test. Walk through the .NET quickstarts and tutorials to see Dapr in action: + +| SDK samples | Description | +| ----------- | ----------- | +| [Quickstarts]({{% ref quickstarts %}}) | Experience Dapr's API building blocks in just a few minutes using the .NET SDK. | +| [SDK samples](https://github.com/dapr/dotnet-sdk/tree/master/examples) | Clone the SDK repo to try out some examples and get started. | +| [Pub/sub tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/pub-sub) | See how Dapr .NET SDK works alongside other Dapr SDKs to enable pub/sub applications. | + +## Available packages + +| Package Name | Documentation Link | Description | +|-----------------------------------------------------------------------------------------------------------|---------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------| +| [Dapr.Client](https://www.nuget.org/packages/Dapr.Client) | [Documentation]({{% ref dotnet-client %}}) | Create .NET clients that interact with a Dapr sidecar and other Dapr applications. | +| [Dapr.AI](https://www.nuget.org/packages/Dapr.AI) | [Documentation]({{% ref dotnet-ai %}}) | Create and manage AI operations in .NET. | +| [Dapr.AI.A2a](https://www.nuget.org/packages/Dapr.AI.A2a) | | Dapr SDK for implementing agent-to-agent operations using the [A2A](https://github.com/a2aproject/a2a-dotnet) framework. | +| [Dapr.AI.Microsoft.Extensions](https://www.nuget.org/packages/Dapr.AI.Microsoft.Extensions) | [Documentation]({{% ref dotnet-ai-extensions-howto %}}) | Easily interact with LLMs conversationally and using tooling via the Dapr Conversation building block. | +| [Dapr.AspNetCore](https://www.nuget.org/packages/Dapr.AspNetCore) | [Documentation]({{% ref dotnet-client %}}) | Write servers and services in .NET using the Dapr SDK. Includes support and utilities providing richer integration with ASP.NET Core. | +| [Dapr.Actors](https://www.nuget.org/packages/Dapr.Actors) | [Documentation]({{% ref dotnet-actors %}}) | Create virtual actors with state, reminders/timers, and methods. | +| [Dapr.Actors.AspNetCore](https://www.nuget.org/packages/Dapr.Actors) | [Documentation]({{% ref dotnet-actors %}}) | Create virtual actors with state, reminders/timers, and methods with rich integration with ASP.NET Core. | +| [Dapr.Actors.Analyzers](https://www.nuget.org/packages/Dapr.Actors.Analyzers) | [Documentation]({{% ref dotnet-guidance-source-generators %}}) | A collection of Roslyn source generators and analyzers for enabling better practices and preventing common errors when using Dapr Actors in .NET. | +| [Dapr.Cryptography](https://www.nuget.org/packages/Dapr.Cryptography) | [Documentation]({{% ref dotnet-cryptography %}}) | Encrypt and decrypt streaming state of any size using Dapr's cryptography building block. | +| [Dapr.Jobs](https://www.nuget.org/packages/Dapr.Jobs) | [Documentation]({{% ref dotnet-jobs %}}) | Create and manage the scheduling and orchestration of jobs. | +| [Dapr.Jobs.Analyzers](https://www.nuget.org/packages/Dapr.Jobs.Analyzers) | [Documentation]({{% ref dotnet-guidance-source-generators %}}) | A collection of Roslyn source generators and analyzers for enabling better practices and preventing common errors when using Dapr Jobs in .NET. | +| [Dapr.DistributedLocks](https://www.nuget.org/packages/Dapr.DistributedLocks) | [Documentation]({{% ref dotnet-distributed-lock %}}) | Create and manage distributed locks for managing exclusive resource access. | +| [Dapr.Extensions.Configuration](https://www.nuget.org/packages/Dapr.Extensions.Configuration) | | Dapr secret store configuration provider implementation for `Microsoft.Extensions.Configuration`. | +| [Dapr.PluggableComponents](https://www.nuget.org/packages/Dapr.PluggableComponents) | | Used to implement pluggable components with Dapr using .NET. | +| [Dapr.PluggableComponents.AspNetCore](https://www.nuget.org/packages/Dapr.PluggableComponents.AspNetCore) | | Implement pluggable components with Dapr using .NET with rich ASP.NET Core support. | +| [Dapr.PluggableComponents.Protos](https://www.nuget.org/packages/Dapr.PluggableComponents.Protos) | | **Note:** Developers needn't install this package directly in their applications. | +| [Dapr.Messaging](https://www.nuget.org/packages/Dapr.Messaging) | [Documentation]({{% ref dotnet-messaging %}}) | Build distributed applications using the Dapr Messaging SDK that utilize messaging components like streaming pub/sub subscriptions. | +| [Dapr.Workflow](https://www.nuget.org/packages/Dapr.Workflow) | [Documentation]({{% ref dotnet-workflow %}}) | Create and manage workflows that work with other Dapr APIs. | +| [Dapr.Workflow.Analyzers](https://www.nuget.org/packages/Dapr.Workflow.Analyzers) | [Documentation]({{% ref dotnet-guidance-source-generators %}}) | A collection of Roslyn source generators and analyzers for enabling better practices and preventing common errors when using Dapr Workflows in .NET | + +## More information + +Learn more about local development options, best practices, or browse NuGet packages to add to your existing .NET +applications. + +{{% cardpane %}} +{{% card title="**Development**"%}} + [Learn about local development integration options]({{% ref dotnet-integrations %}}) +{{% /card %}} +{{% card title="**Best Practices**"%}} + [Learn about best practices for developing .NET Dapr applications]({{% ref dotnet-guidance %}}) +{{% /card %}} +{{% card title="**Nuget Packages**"%}} + [NuGet packages for adding the Dapr to your .NET applications](https://www.nuget.org/profiles/dapr.io) +{{% /card %}} +{{% /cardpane %}} diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/_index.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/_index.md new file mode 100644 index 00000000000..9bab18eddf5 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/_index.md @@ -0,0 +1,11 @@ +--- +type: docs +title: "Dapr actors .NET SDK" +linkTitle: "Actors" +weight: 40000 +description: Get up and running with the Dapr actors .NET SDK +--- + +With the Dapr actor package, you can interact with Dapr virtual actors from a .NET application. + +To get started, walk through the [Dapr actors]({{% ref dotnet-actors-howto.md %}}) how-to guide. \ No newline at end of file diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-client.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-client.md new file mode 100644 index 00000000000..6431e4a493e --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-client.md @@ -0,0 +1,114 @@ +--- +type: docs +title: "The IActorProxyFactory interface" +linkTitle: "Actors client" +weight: 100000 +description: Learn how to create actor clients with the IActorProxyFactory interface +--- + +Inside of an `Actor` class or an ASP.NET Core project, the `IActorProxyFactory` interface is recommended to create actor clients. + +The `AddActors(...)` method will register actor services with ASP.NET Core dependency injection. + +- **Outside of an actor instance:** The `IActorProxyFactory` instance is available through dependency injection as a singleton service. +- **Inside an actor instance:** The `IActorProxyFactory` instance is available as a property (`this.ProxyFactory`). + +The following is an example of creating a proxy inside an actor: + +```csharp +public Task GetDataAsync() +{ + var proxy = this.ProxyFactory.CreateActorProxy(ActorId.CreateRandom(), "OtherActor"); + await proxy.DoSomethingGreat(); + + return this.StateManager.GetStateAsync("my_data"); +} +``` + +In this guide, you will learn how to use `IActorProxyFactory`. + +{{% alert title="Tip" color="primary" %}} +For a non-dependency-injected application, you can use the static methods on `ActorProxy`. Since the `ActorProxy` methods are error prone, try to avoid using them when configuring custom settings. +{{% /alert %}} + +## Identifying an actor + +All of the APIs on `IActorProxyFactory` will require an actor _type_ and actor _id_ to communicate with an actor. For strongly-typed clients, you also need one of its interfaces. + +- **Actor type** uniquely identifies the actor implementation across the whole application. +- **Actor id** uniquely identifies an instance of that type. + +If you don't have an actor `id` and want to communicate with a new instance, create a random id with `ActorId.CreateRandom()`. Since the random id is a cryptographically strong identifier, the runtime will create a new actor instance when you interact with it. + +You can use the type `ActorReference` to exchange an actor type and actor id with other actors as part of messages. + +## Two styles of actor client + +The actor client supports two different styles of invocation: + +| Actor client style | Description | +| ------------------ | ----------- | +| Strongly-typed | Strongly-typed clients are based on .NET interfaces and provide the typical benefits of strong-typing. They don't work with non-.NET actors. | +| Weakly-typed | Weakly-typed clients use the `ActorProxy` class. It is recommended to use these only when required for interop or other advanced reasons. | + +### Using a strongly-typed client + +The following example uses the `CreateActorProxy<>` method to create a strongly-typed client. `CreateActorProxy<>` requires an actor interface type, and will return an instance of that interface. + +```csharp +// Create a proxy for IOtherActor to type OtherActor with a random id +var proxy = this.ProxyFactory.CreateActorProxy(ActorId.CreateRandom(), "OtherActor"); + +// Invoke a method defined by the interface to invoke the actor +// +// proxy is an implementation of IOtherActor so we can invoke its methods directly +await proxy.DoSomethingGreat(); +``` + +### Using a weakly-typed client + +The following example uses the `Create` method to create a weakly-typed client. `Create` returns an instance of `ActorProxy`. + +```csharp +// Create a proxy for type OtherActor with a random id +var proxy = this.ProxyFactory.Create(ActorId.CreateRandom(), "OtherActor"); + +// Invoke a method by name to invoke the actor +// +// proxy is an instance of ActorProxy. +await proxy.InvokeMethodAsync("DoSomethingGreat"); +``` + +Since `ActorProxy` is a weakly-typed proxy, you need to pass in the actor method name as a string. + +You can also use `ActorProxy` to invoke methods with both a request and a response message. Request and response messages will be serialized using the `System.Text.Json` serializer. + +```csharp +// Create a proxy for type OtherActor with a random id +var proxy = this.ProxyFactory.Create(ActorId.CreateRandom(), "OtherActor"); + +// Invoke a method on the proxy to invoke the actor +// +// proxy is an instance of ActorProxy. +var request = new MyRequest() { Message = "Hi, it's me.", }; +var response = await proxy.InvokeMethodAsync("DoSomethingGreat", request); +``` + +When using a weakly-typed proxy, you _must_ proactively define the correct actor method names and message types. When using a strongly-typed proxy, these names and types are defined for you as part of the interface definition. + +### Actor method invocation exception details + +The actor method invocation exception details are surfaced to the caller and the callee, providing an entry point to track down the issue. Exception details include: + - Method name + - Line number + - Exception type + - UUID + +You use the UUID to match the exception on the caller and callee side. Below is an example of exception details: +``` +Dapr.Actors.ActorMethodInvocationException: Remote Actor Method Exception, DETAILS: Exception: NotImplementedException, Method Name: ExceptionExample, Line Number: 14, Exception uuid: d291a006-84d5-42c4-b39e-d6300e9ac38b +``` + +## Next steps + +[Learn how to author and run actors with `ActorHost`]({{% ref dotnet-actors-usage.md %}}). \ No newline at end of file diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-howto.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-howto.md new file mode 100644 index 00000000000..bac91f7955f --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-howto.md @@ -0,0 +1,467 @@ +--- +type: docs +title: "How to: Run and use virtual actors in the .NET SDK" +linkTitle: "How to: Run & use virtual actors" +weight: 300000 +description: Try out .NET Dapr virtual actors with this example +--- + +The Dapr actor package allows you to interact with Dapr virtual actors from a .NET application. In this guide, you learn how to: + +- Create an Actor (`MyActor`). +- Invoke its methods on the client application. + +``` +MyActor --- MyActor.Interfaces + | + +- MyActorService + | + +- MyActorClient +``` + +**The interface project (\MyActor\MyActor.Interfaces)** + +This project contains the interface definition for the actor. Actor interfaces can be defined in any project with any name. The interface defines the actor contract shared by: + +- The actor implementation +- The clients calling the actor + +Because client projects may depend on it, it's better to define it in an assembly separate from the actor implementation. + +**The actor service project (\MyActor\MyActorService)** + +This project implements the ASP.Net Core web service that hosts the actor. It contains the implementation of the actor, `MyActor.cs`. An actor implementation is a class that: + +- Derives from the base type Actor +- Implements the interfaces defined in the `MyActor.Interfaces` project. + +An actor class must also implement a constructor that accepts an `ActorService` instance and an `ActorId`, and passes them to the base Actor class. + +**The actor client project (\MyActor\MyActorClient)** + +This project contains the implementation of the actor client which calls MyActor's method defined in Actor Interfaces. + +## Prerequisites +- [Dapr CLI]({{< ref install-dapr-cli.md >}}) installed. +- Initialized [Dapr environment]({{< ref install-dapr-selfhost.md >}}). +- [.NET 8](https://dotnet.microsoft.com/download) or [.NET 9](https://dotnet.microsoft.com/download) installed + +## Step 0: Prepare + +Since we'll be creating 3 projects, choose an empty directory to start from, and open it in your terminal of choice. + +## Step 1: Create actor interfaces + +Actor interface defines the actor contract that is shared by the actor implementation and the clients calling the actor. + +Actor interface is defined with the below requirements: + +- Actor interface must inherit `Dapr.Actors.IActor` interface +- The return type of Actor method must be `Task` or `Task` +- Actor method can have one argument at a maximum + +### Create interface project and add dependencies + +```bash +# Create Actor Interfaces +dotnet new classlib -o MyActor.Interfaces + +cd MyActor.Interfaces + +# Add Dapr.Actors nuget package. Please use the latest package version from nuget.org +dotnet add package Dapr.Actors + +cd .. +``` + +### Implement IMyActor interface + +Define `IMyActor` interface and `MyData` data object. Paste the following code into `MyActor.cs` in the `MyActor.Interfaces` project. + +```csharp +using Dapr.Actors; +using Dapr.Actors.Runtime; +using System.Threading.Tasks; + +namespace MyActor.Interfaces +{ + public interface IMyActor : IActor + { + Task SetDataAsync(MyData data); + Task GetDataAsync(); + Task RegisterReminder(); + Task UnregisterReminder(); + Task GetReminder(); + Task RegisterTimer(); + Task UnregisterTimer(); + } + + public class MyData + { + public string PropertyA { get; set; } + public string PropertyB { get; set; } + + public override string ToString() + { + var propAValue = this.PropertyA == null ? "null" : this.PropertyA; + var propBValue = this.PropertyB == null ? "null" : this.PropertyB; + return $"PropertyA: {propAValue}, PropertyB: {propBValue}"; + } + } +} +``` + +## Step 2: Create actor service + +Dapr uses ASP.NET web service to host Actor service. This section will implement `IMyActor` actor interface and register Actor to Dapr Runtime. + +### Create actor service project and add dependencies + +```bash +# Create ASP.Net Web service to host Dapr actor +dotnet new web -o MyActorService + +cd MyActorService + +# Add Dapr.Actors.AspNetCore nuget package. Please use the latest package version from nuget.org +dotnet add package Dapr.Actors.AspNetCore + +# Add Actor Interface reference +dotnet add reference ../MyActor.Interfaces/MyActor.Interfaces.csproj + +cd .. +``` + +### Add actor implementation + +Implement IMyActor interface and derive from `Dapr.Actors.Actor` class. Following example shows how to use Actor Reminders as well. For Actors to use Reminders, it must derive from IRemindable. If you don't intend to use Reminder feature, you can skip implementing IRemindable and reminder specific methods which are shown in the code below. + +Paste the following code into `MyActor.cs` in the `MyActorService` project: + +```csharp +using Dapr.Actors; +using Dapr.Actors.Runtime; +using MyActor.Interfaces; +using System; +using System.Threading.Tasks; + +namespace MyActorService +{ + internal class MyActor : Actor, IMyActor, IRemindable + { + // The constructor must accept ActorHost as a parameter, and can also accept additional + // parameters that will be retrieved from the dependency injection container + // + /// + /// Initializes a new instance of MyActor + /// + /// The Dapr.Actors.Runtime.ActorHost that will host this actor instance. + public MyActor(ActorHost host) + : base(host) + { + } + + /// + /// This method is called whenever an actor is activated. + /// An actor is activated the first time any of its methods are invoked. + /// + protected override Task OnActivateAsync() + { + // Provides opportunity to perform some optional setup. + Console.WriteLine($"Activating actor id: {this.Id}"); + return Task.CompletedTask; + } + + /// + /// This method is called whenever an actor is deactivated after a period of inactivity. + /// + protected override Task OnDeactivateAsync() + { + // Provides Opporunity to perform optional cleanup. + Console.WriteLine($"Deactivating actor id: {this.Id}"); + return Task.CompletedTask; + } + + /// + /// Set MyData into actor's private state store + /// + /// the user-defined MyData which will be stored into state store as "my_data" state + public async Task SetDataAsync(MyData data) + { + // Data is saved to configured state store implicitly after each method execution by Actor's runtime. + // Data can also be saved explicitly by calling this.StateManager.SaveStateAsync(); + // State to be saved must be DataContract serializable. + await this.StateManager.SetStateAsync( + "my_data", // state name + data); // data saved for the named state "my_data" + + return "Success"; + } + + /// + /// Get MyData from actor's private state store + /// + /// the user-defined MyData which is stored into state store as "my_data" state + public Task GetDataAsync() + { + // Gets state from the state store. + return this.StateManager.GetStateAsync("my_data"); + } + + /// + /// Register MyReminder reminder with the actor + /// + public async Task RegisterReminder() + { + await this.RegisterReminderAsync( + "MyReminder", // The name of the reminder + null, // User state passed to IRemindable.ReceiveReminderAsync() + TimeSpan.FromSeconds(5), // Time to delay before invoking the reminder for the first time + TimeSpan.FromSeconds(5)); // Time interval between reminder invocations after the first invocation + } + + /// + /// Get MyReminder reminder details with the actor + /// + public async Task GetReminder() + { + await this.GetReminderAsync("MyReminder"); + } + + /// + /// Unregister MyReminder reminder with the actor + /// + public Task UnregisterReminder() + { + Console.WriteLine("Unregistering MyReminder..."); + return this.UnregisterReminderAsync("MyReminder"); + } + + // + // Implement IRemindeable.ReceiveReminderAsync() which is call back invoked when an actor reminder is triggered. + // + public Task ReceiveReminderAsync(string reminderName, byte[] state, TimeSpan dueTime, TimeSpan period) + { + Console.WriteLine("ReceiveReminderAsync is called!"); + return Task.CompletedTask; + } + + /// + /// Register MyTimer timer with the actor + /// + public Task RegisterTimer() + { + return this.RegisterTimerAsync( + "MyTimer", // The name of the timer + nameof(this.OnTimerCallBack), // Timer callback + null, // User state passed to OnTimerCallback() + TimeSpan.FromSeconds(5), // Time to delay before the async callback is first invoked + TimeSpan.FromSeconds(5)); // Time interval between invocations of the async callback + } + + /// + /// Unregister MyTimer timer with the actor + /// + public Task UnregisterTimer() + { + Console.WriteLine("Unregistering MyTimer..."); + return this.UnregisterTimerAsync("MyTimer"); + } + + /// + /// Timer callback once timer is expired + /// + private Task OnTimerCallBack(byte[] data) + { + Console.WriteLine("OnTimerCallBack is called!"); + return Task.CompletedTask; + } + } +} +``` + +### Register actor runtime with ASP.NET Core startup + +The Actor runtime is configured through ASP.NET Core `Startup.cs`. + +The runtime uses the ASP.NET Core dependency injection system to register actor types and essential services. This integration is provided through the `AddActors(...)` method call in `ConfigureServices(...)`. Use the delegate passed to `AddActors(...)` to register actor types and configure actor runtime settings. You can register additional types for dependency injection inside `ConfigureServices(...)`. These will be available to be injected into the constructors of your Actor types. + +Actors are implemented via HTTP calls with the Dapr runtime. This functionality is part of the application's HTTP processing pipeline and is registered inside `UseEndpoints(...)` inside `Configure(...)`. + +Paste the following code into `Startup.cs` in the `MyActorService` project: + +```csharp +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Hosting; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; + +namespace MyActorService +{ + public class Startup + { + public void ConfigureServices(IServiceCollection services) + { + services.AddActors(options => + { + // Register actor types and configure actor settings + options.Actors.RegisterActor(); + }); + } + + public void Configure(IApplicationBuilder app, IWebHostEnvironment env) + { + if (env.IsDevelopment()) + { + app.UseDeveloperExceptionPage(); + } + + app.UseRouting(); + + // Register actors handlers that interface with the Dapr runtime. + app.MapActorsHandlers(); + } + } +} +``` + +## Step 3: Add a client + +Create a simple console app to call the actor service. Dapr SDK provides Actor Proxy client to invoke actor methods defined in Actor Interface. + +### Create actor client project and add dependencies + +```bash +# Create Actor's Client +dotnet new console -o MyActorClient + +cd MyActorClient + +# Add Dapr.Actors nuget package. Please use the latest package version from nuget.org +dotnet add package Dapr.Actors + +# Add Actor Interface reference +dotnet add reference ../MyActor.Interfaces/MyActor.Interfaces.csproj + +cd .. +``` + +### Invoke actor methods with strongly-typed client + +You can use `ActorProxy.Create(..)` to create a strongly-typed client and invoke methods on the actor. + +Paste the following code into `Program.cs` in the `MyActorClient` project: + +```csharp +using System; +using System.Threading.Tasks; +using Dapr.Actors; +using Dapr.Actors.Client; +using MyActor.Interfaces; + +namespace MyActorClient +{ + class Program + { + static async Task MainAsync(string[] args) + { + Console.WriteLine("Startup up..."); + + // Registered Actor Type in Actor Service + var actorType = "MyActor"; + + // An ActorId uniquely identifies an actor instance + // If the actor matching this id does not exist, it will be created + var actorId = new ActorId("1"); + + // Create the local proxy by using the same interface that the service implements. + // + // You need to provide the type and id so the actor can be located. + var proxy = ActorProxy.Create(actorId, actorType); + + // Now you can use the actor interface to call the actor's methods. + Console.WriteLine($"Calling SetDataAsync on {actorType}:{actorId}..."); + var response = await proxy.SetDataAsync(new MyData() + { + PropertyA = "ValueA", + PropertyB = "ValueB", + }); + Console.WriteLine($"Got response: {response}"); + + Console.WriteLine($"Calling GetDataAsync on {actorType}:{actorId}..."); + var savedData = await proxy.GetDataAsync(); + Console.WriteLine($"Got response: {savedData}"); + } + } +} +``` + +## Running the code + +The projects that you've created can now to test the sample. + +1. Run MyActorService + + Since `MyActorService` is hosting actors, it needs to be run with the Dapr CLI. + + ```bash + cd MyActorService + dapr run --app-id myapp --app-port 5000 --dapr-http-port 3500 -- dotnet run + ``` + + You will see commandline output from both `daprd` and `MyActorService` in this terminal. You should see something like the following, which indicates that the application started successfully. + + ```txt + ... + ℹ️ Updating metadata for app command: dotnet run + ✅ You're up and running! Both Dapr and your app logs will appear here. + + == APP == info: Microsoft.Hosting.Lifetime[0] + + == APP == Now listening on: https://localhost:5001 + + == APP == info: Microsoft.Hosting.Lifetime[0] + + == APP == Now listening on: http://localhost:5000 + + == APP == info: Microsoft.Hosting.Lifetime[0] + + == APP == Application started. Press Ctrl+C to shut down. + + == APP == info: Microsoft.Hosting.Lifetime[0] + + == APP == Hosting environment: Development + + == APP == info: Microsoft.Hosting.Lifetime[0] + + == APP == Content root path: /Users/ryan/actortest/MyActorService + ``` + +2. Run MyActorClient + + `MyActorClient` is acting as the client, and it can be run normally with `dotnet run`. + + Open a new terminal an navigate to the `MyActorClient` directory. Then run the project with: + + ```bash + dotnet run + ``` + + You should see commandline output like: + + ```txt + Startup up... + Calling SetDataAsync on MyActor:1... + Got response: Success + Calling GetDataAsync on MyActor:1... + Got response: PropertyA: ValueA, PropertyB: ValueB + ``` + +> 💡 This sample relies on a few assumptions. The default listening port for an ASP.NET Core web project is 5000, which is being passed to `dapr run` as `--app-port 5000`. The default HTTP port for the Dapr sidecar is 3500. We're telling the sidecar for `MyActorService` to use 3500 so that `MyActorClient` can rely on the default value. + +Now you have successfully created an actor service and client. See the related links section to learn more. + +## Related links + +- [.NET Dapr Actors client guide]({{% ref dotnet-actors-client.md %}}) +- [.NET Dapr Actors usage guide]({{% ref dotnet-actors-usage.md %}}) diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-serialization.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-serialization.md new file mode 100644 index 00000000000..d246e3575cd --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-serialization.md @@ -0,0 +1,595 @@ +--- +type: docs +title: "Actor serialization in the .NET SDK" +linkTitle: "Actor serialization" +weight: 300000 +description: Necessary steps to serialize your types remoted and non-remoted Actors in .NET +--- +# Actor Serialization + +The Dapr actor package enables you to use Dapr virtual actors within a .NET application with either a weakly- or strongly-typed client. Each utilizes a different serialization approach. This document will review the differences and convey a few key ground rules to understand in either scenario. + +Please be advised that it is not a supported scenario to use the weakly- or strongly typed actor clients interchangeably because of these different serialization approaches. The data persisted using one Actor client will not be accessible using the other Actor client, so it is important to pick one and use it consistently throughout your application. + +## Weakly-typed Dapr Actor client +In this section, you will learn how to configure your C# types so they are properly serialized and deserialized at runtime when using a weakly-typed actor client. These clients use string-based names of methods with request and response payloads that are serialized using the System.Text.Json serializer. Please note that this serialization framework is not specific to Dapr and is separately maintained by the .NET team within the [.NET GitHub repository](https://github.com/dotnet/runtime/tree/main/src/libraries/System.Text.Json). + +When using the weakly-typed Dapr Actor client to invoke methods from your various actors, it's not necessary to independently serialize or deserialize the method payloads as this will happen transparently on your behalf by the SDK. + +The client will use the latest version of System.Text.Json available for the version of .NET you're building against and serialization is subject to all the inherent capabilities provided in the [associated .NET documentation](https://learn.microsoft.com/dotnet/standard/serialization/system-text-json/overview). + +The serializer will be configured to use the `JsonSerializerOptions.Web` [default options](https://learn.microsoft.com/dotnet/standard/serialization/system-text-json/configure-options?pivots=dotnet-8-0#web-defaults-for-jsonserializeroptions) unless overridden with a custom options configuration which means the following are applied: +- Deserialization of the property name is performed in a case-insensitive manner +- Serialization of the property name is performed using [camel casing](https://en.wikipedia.org/wiki/Camel_case) unless the property is overridden with a `[JsonPropertyName]` attribute +- Deserialization will read numeric values from number and/or string values + +### Basic Serialization +In the following example, we present a simple class named Doodad though it could just as well be a record as well. + +```csharp +public class Doodad +{ + public Guid Id { get; set; } + public string Name { get; set; } + public int Count { get; set; } +} +``` + +By default, this will serialize using the names of the members as used in the type and whatever values it was instantiated with: + +```json +{"id": "a06ced64-4f42-48ad-84dd-46ae6a7e333d", "name": "DoodadName", "count": 5} +``` + +### Override Serialized Property Name +The default property names can be overridden by applying the `[JsonPropertyName]` attribute to desired properties. + +Generally, this isn't going to be necessary for types you're persisting to the actor state as you're not intended to read or write them independent of Dapr-associated functionality, but +the following is provided just to clearly illustrate that it's possible. + +#### Override Property Names on Classes +Here's an example demonstrating the use of `JsonPropertyName` to change the name for the first property following serialization. Note that the last usage of `JsonPropertyName` on the `Count` property +matches what it would be expected to serialize to. This is largely just to demonstrate that applying this attribute won't negatively impact anything - in fact, it might be preferable if you later +decide to change the default serialization options but still need to consistently access the properties previously serialized before that change as `JsonPropertyName` will override those options. + +```csharp +public class Doodad +{ + [JsonPropertyName("identifier")] + public Guid Id { get; set; } + public string Name { get; set; } + [JsonPropertyName("count")] + public int Count { get; set; } +} +``` + +This would serialize to the following: + +```json +{"identifier": "a06ced64-4f42-48ad-84dd-46ae6a7e333d", "name": "DoodadName", "count": 5} +``` + +#### Override Property Names on Records +Let's try doing the same thing with a record from C# 12 or later: + +```csharp +public record Thingy(string Name, [JsonPropertyName("count")] int Count); +``` + +Because the argument passed in a primary constructor (introduced in C# 12) can be applied to either a property or field within a record, using the `[JsonPropertyName]` attribute may +require specifying that you intend the attribute to apply to a property and not a field in some ambiguous cases. Should this be necessary, you'd indicate as much in the primary constructor with: + +```csharp +public record Thingy(string Name, [property: JsonPropertyName("count")] int Count); +``` + +If `[property: ]` is applied to the `[JsonPropertyName]` attribute where it's not necessary, it will not negatively impact serialization or deserialization as the operation will +proceed normally as though it were a property (as it typically would if not marked as such). + +### Enumeration types +Enumerations, including flat enumerations are serializable to JSON, but the value persisted may surprise you. Again, it's not expected that the developer should ever engage +with the serialized data independently of Dapr, but the following information may at least help in diagnosing why a seemingly mild version migration isn't working as expected. + +Take the following `enum` type providing the various seasons in the year: + +```csharp +public enum Season +{ + Spring, + Summer, + Fall, + Winter +} +``` + +We'll go ahead and use a separate demonstration type that references our `Season` and simultaneously illustrate how this works with records: + +```csharp +public record Engagement(string Name, Season TimeOfYear); +``` + +Given the following initialized instance: + +```csharp +var myEngagement = new Engagement("Ski Trip", Season.Winter); +``` + +This would serialize to the following JSON: +```json +{"name": "Ski Trip", "season": 3} +``` + +That might be unexpected that our `Season.Winter` value was represented as a `3`, but this is because the serializer is going to automatically use numeric representations +of the enum values starting with zero for the first value and incrementing the numeric value for each additional value available. Again, if a migration were taking place and +a developer had flipped the order of the enums, this would affect a breaking change in your solution as the serialized numeric values would point to different values when deserialized. + +Rather, there is a `JsonConverter` available with `System.Text.Json` that will instead opt to use a string-based value instead of the numeric value. The `[JsonConverter]` attribute needs +to be applied to be enum type itself to enable this, but will then be realized in any downstream serialization or deserialization operation that references the enum. + +```csharp +[JsonConverter(typeof(JsonStringEnumConverter))] +public enum Season +{ + Spring, + Summer, + Fall, + Winter +} +``` + +Using the same values from our `myEngagement` instance above, this would produce the following JSON instead: + +```json +{"name": "Ski Trip", "season": "Winter"} +``` + +As a result, the enum members can be shifted around without fear of introducing errors during deserialization. + +#### Custom Enumeration Values + +The System.Text.Json serialization platform doesn't, out of the box, support the use of `[EnumMember]` to allow you to change the value of enum that's used during serialization or deserialization, but +there are scenarios where this could be useful. Again, assume that you're tasking with refactoring the solution to apply some better names to your various +enums. You're using the `JsonStringEnumConverter` detailed above so you're saving the name of the enum to value instead of a numeric value, but if you change +the enum name, that will introduce a breaking change as the name will no longer match what's in state. + +Do note that if you opt into using this approach, you should decorate all your enum members with the `[EnumMeber]` attribute so that the values are consistently applied for each enum value instead +of haphazardly. Nothing will validate this at build or runtime, but it is considered a best practice operation. + +How can you specify the precise value persisted while still changing the name of the enum member in this scenario? Use a custom `JsonConverter` with an extension method that can pull the value +out of the attached `[EnumMember]` attributes where provided. Add the following to your solution: + +```csharp +public sealed class EnumMemberJsonConverter : JsonConverter where T : struct, Enum +{ + /// Reads and converts the JSON to type . + /// The reader. + /// The type to convert. + /// An object that specifies serialization options to use. + /// The converted value. + public override T Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + // Get the string value from the JSON reader + var value = reader.GetString(); + + // Loop through all the enum values + foreach (var enumValue in Enum.GetValues()) + { + // Get the value from the EnumMember attribute, if any + var enumMemberValue = GetValueFromEnumMember(enumValue); + + // If the values match, return the enum value + if (value == enumMemberValue) + { + return enumValue; + } + } + + // If no match found, throw an exception + throw new JsonException($"Invalid value for {typeToConvert.Name}: {value}"); + } + + /// Writes a specified value as JSON. + /// The writer to write to. + /// The value to convert to JSON. + /// An object that specifies serialization options to use. + public override void Write(Utf8JsonWriter writer, T value, JsonSerializerOptions options) + { + // Get the value from the EnumMember attribute, if any + var enumMemberValue = GetValueFromEnumMember(value); + + // Write the value to the JSON writer + writer.WriteStringValue(enumMemberValue); + } + + private static string GetValueFromEnumMember(T value) + { + MemberInfo[] member = typeof(T).GetMember(value.ToString(), BindingFlags.DeclaredOnly | BindingFlags.Static | BindingFlags.Public); + if (member.Length == 0) + return value.ToString(); + object[] customAttributes = member.GetCustomAttributes(typeof(EnumMemberAttribute), false); + if (customAttributes.Length != 0) + { + EnumMemberAttribute enumMemberAttribute = (EnumMemberAttribute)customAttributes; + if (enumMemberAttribute != null && enumMemberAttribute.Value != null) + return enumMemberAttribute.Value; + } + return value.ToString(); + } +} +``` + +Now let's add a sample enumerator. We'll set a value that uses the lower-case version of each enum member to demonstrate this. Don't forget to decorate the enum with the `JsonConverter` +attribute and reference our custom converter in place of the numeral-to-string converter used in the last section. + +```csharp +[JsonConverter(typeof(EnumMemberJsonConverter))] +public enum Season +{ + [EnumMember(Value="spring")] + Spring, + [EnumMember(Value="summer")] + Summer, + [EnumMember(Value="fall")] + Fall, + [EnumMember(Value="winter")] + Winter +} +``` + +Let's use our sample record from before. We'll also add a `[JsonPropertyName]` attribute just to augment the demonstration: +```csharp +public record Engagement([property: JsonPropertyName("event")] string Name, Season TimeOfYear); +``` + +And finally, let's initialize a new instance of this: + +```csharp +var myEngagement = new Engagement("Conference", Season.Fall); +``` + +This time, serialization will take into account the values from the attached `[EnumMember]` attribute providing us a mechanism to refactor our application without necessitating +a complex versioning scheme for our existing enum values in the state. + +```json +{"event": "Conference", "season": "fall"} +``` + +### Polymorphic Serialization +When working with polymorphic types in Dapr Actor clients, it is essential to handle serialization and deserialization correctly to ensure that the appropriate +derived types are instantiated. Polymorphic serialization allows you to serialize objects of a base type while preserving the specific derived type information. + +To enable polymorphic deserialization, you must use the `[JsonPolymorphic]` attribute on your base type. Additionally, +it is crucial to include the `[AllowOutOfOrderMetadataProperties]` attribute to ensure that metadata properties, such as `$type` +can be processed correctly by System.Text.Json even if they are not the first properties in the JSON object. + +#### Example +```cs +[JsonPolymorphic] +[AllowOutOfOrderMetadataProperties] +public abstract class SampleValueBase +{ + public string CommonProperty { get; set; } +} + +public class DerivedSampleValue : SampleValueBase +{ + public string SpecificProperty { get; set; } +} +``` +In this example, the `SampleValueBase` class is marked with both `[JsonPolymorphic]` and `[AllowOutOfOrderMetadataProperties]` +attributes. This setup ensures that the `$type` metadata property can be correctly identified and processed during +deserialization, regardless of its position in the JSON object. + +By following this approach, you can effectively manage polymorphic serialization and deserialization in your Dapr Actor +clients, ensuring that the correct derived types are instantiated and used. + + +## Strongly-typed Dapr Actor client +In this section, you will learn how to configure your classes and records so they are properly serialized and deserialized at runtime when using a strongly-typed actor client. These clients are implemented using .NET interfaces and are not compatible with Dapr Actors written using other languages. + +This actor client serializes data using an engine called the [Data Contract Serializer](https://learn.microsoft.com/dotnet/framework/wcf/feature-details/serializable-types) which converts your C# types to and from XML documents. This serialization framework is not specific to Dapr and is separately maintained by the .NET team within the [.NET GitHub repository](https://github.com/dotnet/runtime/blob/main/src/libraries/System.Private.DataContractSerialization/src/System/Runtime/Serialization/DataContractSerializer.cs). + +When sending or receiving primitives (like strings or ints), this serialization happens transparently and there's no requisite preparation needed on your part. However, when working with complex types such as those you create, there are some important rules to take into consideration so this process works smoothly. + +### Serializable Types +There are several important considerations to keep in mind when using the Data Contract Serializer: + +- By default, all types, read/write properties (after construction) and fields marked as publicly visible are serialized +- All types must either expose a public parameterless constructor or be decorated with the DataContractAttribute attribute +- Init-only setters are only supported with the use of the DataContractAttribute attribute +- Read-only fields, properties without a Get and Set method and internal or properties with private Get and Set methods are ignored during serialization +- Serialization is supported for types that use other complex types that are not themselves marked with the DataContractAttribute attribute through the use of the KnownTypesAttribute attribute +- If a type is marked with the DataContractAttribute attribute, all members you wish to serialize and deserialize must be decorated with the DataMemberAttribute attribute as well or they'll be set to their default values + +### How does deserialization work? +The approach used for deserialization depends on whether or not the type is decorated with the [DataContractAttribute](https://learn.microsoft.com/dotnet/api/system.runtime.serialization.datacontractattribute) attribute. If this attribute isn't present, an instance of the type is created using the parameterless constructor. Each of the properties and fields are then mapped into the type using their respective setters and the instance is returned to the caller. + +If the type _is_ marked with `[DataContract]`, the serializer instead uses reflection to read the metadata of the type and determine which properties or fields should be included based on whether or not they're marked with the DataMemberAttribute attribute as it's performed on an opt-in basis. It then allocates an uninitialized object in memory (avoiding the use of any constructors, parameterless or not) and then sets the value directly on each mapped property or field, even if private or uses init-only setters. Serialization callbacks are invoked as applicable throughout this process and then the object is returned to the caller. + +Use of the serialization attributes is highly recommended as they grant more flexibility to override names and namespaces and generally use more of the modern C# functionality. While the default serializer can be relied on for primitive types, it's not recommended for any of your own types, whether they be classes, structs or records. It's recommended that if you decorate a type with the DataContractAttribute attribute, you also explicitly decorate each of the members you want to serialize or deserialize with the DataMemberAttribute attribute as well. + +#### .NET Classes +Classes are fully supported in the Data Contract Serializer provided that that other rules detailed on this page and the [Data Contract Serializer](https://learn.microsoft.com/dotnet/framework/wcf/feature-details/serializable-types) documentation are also followed. + +The most important thing to remember here is that you must either have a public parameterless constructor or you must decorate it with the appropriate attributes. Let's review some examples to really clarify what will and won't work. + +In the following example, we present a simple class named Doodad. We don't provide an explicit constructor here, so the compiler will provide an default parameterless constructor. Because we're using [supported primitive types](###supported-primitive-types) (Guid, string and int32) and all our members have a public getter and setter, no attributes are required and we'll be able to use this class without issue when sending and receiving it from a Dapr actor method. + +```csharp +public class Doodad +{ + public Guid Id { get; set; } + public string Name { get; set; } + public int Count { get; set; } +} +``` + +By default, this will serialize using the names of the members as used in the type and whatever values it was instantiated with: + +```xml + + a06ced64-4f42-48ad-84dd-46ae6a7e333d + DoodadName + 5 + +``` + +So let's tweak it - let's add our own constructor and only use init-only setters on the members. This will fail to serialize and deserialize not because of the use of the init-only setters, but because there's no parameterless constructors. + +```csharp +// WILL NOT SERIALIZE PROPERLY! +public class Doodad +{ + public Doodad(string name, int count) + { + Id = Guid.NewGuid(); + Name = name; + Count = count; + } + + public Guid Id { get; set; } + public string Name { get; init; } + public int Count { get; init; } +} +``` + +If we add a public parameterless constructor to the type, we're good to go and this will work without further annotations. + +```csharp +public class Doodad +{ + public Doodad() + { + } + + public Doodad(string name, int count) + { + Id = Guid.NewGuid(); + Name = name; + Count = count; + } + + public Guid Id { get; set; } + public string Name { get; set; } + public int Count { get; set; } +} +``` + +But what if we don't want to add this constructor? Perhaps you don't want your developers to accidentally create an instance of this Doodad using an unintended constructor. That's where the more flexible attributes are useful. If you decorate your type with a [DataContractAttribute](https://learn.microsoft.com/dotnet/api/system.runtime.serialization.datacontractattribute) attribute, you can drop your parameterless constructor and it will work once again. + +```csharp +[DataContract] +public class Doodad +{ + public Doodad(string name, int count) + { + Id = Guid.NewGuid(); + Name = name; + Count = count; + } + + public Guid Id { get; set; } + public string Name { get; set; } + public int Count { get; set; } +} +``` + +In the above example, we don't need to also use the [DataMemberAttribute](https://learn.microsoft.com/dotnet/api/system.runtime.serialization.datamemberattribute) attributes because again, we're using [built-in primitives](###supported-primitive-types) that the serializer supports. But, we do get more flexibility if we use the attributes. From the DataContractAttribute attribute, we can specify our own XML namespace with the Namespace argument and, via the Name argument, change the name of the type as used when serialized into the XML document. + +It's a recommended practice to append the DataContractAttribute attribute to the type and the DataMemberAttribute attributes to all the members you want to serialize anyway - if they're not necessary and you're not changing the default values, they'll just be ignored, but they give you a mechanism to opt into serializing members that wouldn't otherwise have been included such as those marked as private or that are themselves complex types or collections. + +Note that if you do opt into serializing your private members, their values will be serialized into plain text - they can very well be viewed, intercepted and potentially manipulated based on how you're handing the data once serialized, so it's an important consideration whether you want to mark these members or not in your use case. + +In the following example, we'll look at using the attributes to change the serialized names of some of the members as well as introduce the [IgnoreDataMemberAttribute](https://learn.microsoft.com/dotnet/api/system.runtime.serialization.ignoredatamemberattribute) attribute. As the name indicates, this tells the serializer to skip this property even though it'd be otherwise eligible to serialize. Further, because I'm decorating the type with the DataContractAttribute attribute, it means that I can use init-only setters on the properties. + +```csharp +[DataContract(Name="Doodad")] +public class Doodad +{ + public Doodad(string name = "MyDoodad", int count = 5) + { + Id = Guid.NewGuid(); + Name = name; + Count = count; + } + + [DataMember(Name = "id")] + public Guid Id { get; init; } + [IgnoreDataMember] + public string Name { get; init; } + [DataMember] + public int Count { get; init; } +} +``` + +When this is serialized, because we're changing the names of the serialized members, we can expect a new instance of Doodad using the default values this to be serialized as: + +```xml + + a06ced64-4f42-48ad-84dd-46ae6a7e333d + 5 + +``` + +##### Classes in C# 12 - Primary Constructors +C# 12 brought us primary constructors on classes. Use of a primary constructor means the compiler will be prevented from creating the default implicit parameterless constructor. While a primary constructor on a class doesn't generate any public properties, it does mean that if you pass this primary constructor any arguments or have non-primitive types in your class, you'll either need to specify your own parameterless constructor or use the serialization attributes. + +Here's an example where we're using the primary constructor to inject an ILogger to a field and add our own parameterless constructor without the need for any attributes. + +```csharp +public class Doodad(ILogger _logger) +{ + public Doodad() {} //Our parameterless constructor + + public Doodad(string name, int count) + { + Id = Guid.NewGuid(); + Name = name; + Count = count; + } + + public Guid Id { get; set; } + public string Name { get; set; } + public int Count { get; set; } +} +``` + +And using our serialization attributes (again, opting for init-only setters since we're using the serialization attributes): + +```csharp +[DataContract] +public class Doodad(ILogger _logger) +{ + public Doodad(string name, int count) + { + Id = Guid.NewGuid(); + Name = name; + Count = count; + } + + [DataMember] + public Guid Id { get; init; } + [DataMember] + public string Name { get; init; } + [DataMember] + public int Count { get; init; } +} +``` + +#### .NET Structs +Structs are supported by the Data Contract serializer provided that they are marked with the DataContractAttribute attribute and the members you wish to serialize are marked with the DataMemberAttribute attribute. Further, to support deserialization, the struct will also need to have a parameterless constructor. This works even if you define your own parameterless constructor as enabled in C# 10. + +```csharp +[DataContract] +public struct Doodad +{ + [DataMember] + public int Count { get; set; } +} +``` + +#### .NET Records +Records were introduced in C# 9 and follow precisely the same rules as classes when it comes to serialization. We recommend that you should decorate all your records with the DataContractAttribute attribute and members you wish to serialize with DataMemberAttribute attributes so you don't experience any deserialization issues using this or other newer C# functionalities. Because record classes use init-only setters for properties by default and encourage the use of the primary constructor, applying these attributes to your types ensures that the serializer can properly otherwise accommodate your types as-is. + +Typically records are presented as a simple one-line statement using the new primary constructor concept: + +```csharp +public record Doodad(Guid Id, string Name, int Count); +``` + +This will throw an error encouraging the use of the serialization attributes as soon as you use it in a Dapr actor method invocation because there's no parameterless constructor available nor is it decorated with the aforementioned attributes. + +Here we add an explicit parameterless constructor and it won't throw an error, but none of the values will be set during deserialization since they're created with init-only setters. Because this doesn't use the DataContractAttribute attribute or the DataMemberAttribute attribute on any members, the serializer will be unable to map the target members correctly during deserialization. +```csharp +public record Doodad(Guid Id, string Name, int Count) +{ + public Doodad() {} +} +``` + +This approach does without the additional constructor and instead relies on the serialization attributes. Because we mark the type with the DataContractAttribute attribute and decorate each member with its own DataMemberAttribute attribute, the serialization engine will be able to map from the XML document to our type without issue. +```csharp +[DataContract] +public record Doodad( + [property: DataMember] Guid Id, + [property: DataMember] string Name, + [property: DataMember] int Count) +``` + +#### Supported Primitive Types +There are several types built into .NET that are considered primitive and eligible for serialization without additional effort on the part of the developer: + +- [Byte](https://learn.microsoft.com/dotnet/api/system.byte) +- [SByte](https://learn.microsoft.com/dotnet/api/system.sbyte) +- [Int16](https://learn.microsoft.com/dotnet/api/system.int16) +- [Int32](https://learn.microsoft.com/dotnet/api/system.int32) +- [Int64](https://learn.microsoft.com/dotnet/api/system.int64) +- [UInt16](https://learn.microsoft.com/dotnet/api/system.uint16) +- [UInt32](https://learn.microsoft.com/dotnet/api/system.uint32) +- [UInt64](https://learn.microsoft.com/dotnet/api/system.uint64) +- [Single](https://learn.microsoft.com/dotnet/api/system.single) +- [Double](https://learn.microsoft.com/dotnet/api/system.double) +- [Boolean](https://learn.microsoft.com/dotnet/api/system.boolean) +- [Char](https://learn.microsoft.com/dotnet/api/system.char) +- [Decimal](https://learn.microsoft.com/dotnet/api/system.decimal) +- [Object](https://learn.microsoft.com/dotnet/api/system.object) +- [String](https://learn.microsoft.com/dotnet/api/system.string) + +There are additional types that aren't actually primitives but have similar built-in support: + +- [DateTime](https://learn.microsoft.com/dotnet/api/system.datetime) +- [TimeSpan](https://learn.microsoft.com/dotnet/api/system.timespan) +- [Guid](https://learn.microsoft.com/dotnet/api/system.guid) +- [Uri](https://learn.microsoft.com/dotnet/api/system.uri) +- [XmlQualifiedName](https://learn.microsoft.com/dotnet/api/system.xml.xmlqualifiedname) + +Again, if you want to pass these types around via your actor methods, no additional consideration is necessary as they'll be serialized and deserialized without issue. Further, types that are themselves marked with the (SerializeableAttribute)[https://learn.microsoft.com/dotnet/api/system.serializableattribute] attribute will be serialized. + +#### Enumeration Types +Enumerations, including flag enumerations are serializable if appropriately marked. The enum members you wish to be serialized must be marked with the [EnumMemberAttribute](https://learn.microsoft.com/dotnet/api/system.runtime.serialization.enummemberattribute) attribute in order to be serialized. Passing a custom value into the optional Value argument on this attribute will allow you to specify the value used for the member in the serialized document instead of having the serializer derive it from the name of the member. + +The enum type does not require that the type be decorated with the `DataContractAttribute` attribute - only that the members you wish to serialize be decorated with the `EnumMemberAttribute` attributes. + +```csharp +public enum Colors +{ + [EnumMember] + Red, + [EnumMember(Value="g")] + Green, + Blue, //Even if used by a type, this value will not be serialized as it's not decorated with the EnumMember attribute +} +``` + +#### Collection Types +With regards to the data contact serializer, all collection types that implement the [IEnumerable](https://learn.microsoft.com/dotnet/api/system.collections.ienumerable) interface including arays and generic collections are considered collections. Those types that implement [IDictionary](https://learn.microsoft.com/dotnet/api/system.collections.idictionary) or the generic [IDictionary](https://learn.microsoft.com/dotnet/api/system.collections.generic.idictionary-2) are considered dictionary collections; all others are list collections. + +Not unlike other complex types, collection types must have a parameterless constructor available. Further, they must also have a method called Add so they can be properly serialized and deserialized. The types used by these collection types must themselves be marked with the `DataContractAttribute` attribute or otherwise be serializable as described throughout this document. + +#### Data Contract Versioning +As the data contract serializer is only used in Dapr with respect to serializing the values in the .NET SDK to and from the Dapr actor instances via the proxy methods, there's little need to consider versioning of data contracts as the data isn't being persisted between application versions using the same serializer. For those interested in learning more about data contract versioning visit [here](https://learn.microsoft.com/dotnet/framework/wcf/feature-details/data-contract-versioning). + +#### Known Types +Nesting your own complex types is easily accommodated by marking each of the types with the [DataContractAttribute](https://learn.microsoft.com/dotnet/api/system.runtime.serialization.datacontractattribute) attribute. This informs the serializer as to how deserialization should be performed. +But what if you're working with polymorphic types and one of your members is a base class or interface with derived classes or other implementations? Here, you'll use the [KnownTypeAttribute](https://learn.microsoft.com/dotnet/api/system.runtime.serialization.knowntypeattribute) attribute to give a hint to the serializer about how to proceed. + +When you apply the [KnownTypeAttribute](https://learn.microsoft.com/dotnet/api/system.runtime.serialization.knowntypeattribute) attribute to a type, you are informing the data contract serializer about what subtypes it might encounter allowing it to properly handle the serialization and deserialization of these types, even when the actual type at runtime is different from the declared type. + +```chsarp +[DataContract] +[KnownType(typeof(DerivedClass))] +public class BaseClass +{ + //Members of the base class +} + +[DataContract] +public class DerivedClass : BaseClass +{ + //Additional members of the derived class +} +``` + +In this example, the `BaseClass` is marked with `[KnownType(typeof(DerivedClass))]` which tells the data contract serializer that `DerivedClass` is a possible implementation of `BaseClass` that it may need to serialize or deserialize. Without this attribute, the serialize would not be aware of the `DerivedClass` when it encounters an instance of `BaseClass` that is actually of type `DerivedClass` and this could lead to a serialization exception because the serializer would not know how to handle the derived type. By specifying all possible derived types as known types, you ensure that the serializer can process the type and its members correctly. + +For more information and examples about using `[KnownType]`, please refer to the [official documentation](https://learn.microsoft.com/dotnet/framework/wcf/feature-details/data-contract-known-types). \ No newline at end of file diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-usage.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-usage.md new file mode 100644 index 00000000000..335aee90a2d --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-actors/dotnet-actors-usage.md @@ -0,0 +1,244 @@ +--- +type: docs +title: "Author & run actors" +linkTitle: "Authoring actors" +weight: 200000 +description: Learn all about authoring and running actors with the .NET SDK +--- + +## Author actors + +### ActorHost + +The `ActorHost`: + +- Is a required constructor parameter of all actors +- Is provided by the runtime +- Must be passed to the base class constructor +- Contains all of the state that allows that actor instance to communicate with the runtime + +```csharp +internal class MyActor : Actor, IMyActor, IRemindable +{ + public MyActor(ActorHost host) // Accept ActorHost in the constructor + : base(host) // Pass ActorHost to the base class constructor + { + } +} +``` + +Since the `ActorHost` contains state unique to the actor, you don't need to pass the instance into other parts of your code. It's recommended only create your own instances of `ActorHost` in tests. + +### Dependency injection + +Actors support [dependency injection](https://docs.microsoft.com/aspnet/core/fundamentals/dependency-injection) of additional parameters into the constructor. Any other parameters you define will have their values satisfied from the dependency injection container. + +```csharp +internal class MyActor : Actor, IMyActor, IRemindable +{ + public MyActor(ActorHost host, BankService bank) // Accept BankService in the constructor + : base(host) + { + ... + } +} +``` + +An actor type should have a single `public` constructor. The actor infrastructure uses the [`ActivatorUtilities`](https://docs.microsoft.com/dotnet/core/extensions/dependency-injection#constructor-injection-behavior) pattern for constructing actor instances. + +You can register types with dependency injection in `Startup.cs` to make them available. Read more about [the different ways of registering your types](https://docs.microsoft.com/aspnet/core/fundamentals/dependency-injection?#service-registration-methods). + +```csharp +// In Startup.cs +public void ConfigureServices(IServiceCollection services) +{ + ... + + // Register additional types with dependency injection. + services.AddSingleton(); +} +``` + +Each actor instance has its own dependency injection scope and remains in memory for some time after performing an operation. During that time, the dependency injection scope associated with the actor is also considered live. The scope will be released when the actor is deactivated. + +If an actor injects an `IServiceProvider` in the constructor, the actor will receive a reference to the `IServiceProvider` associated with its scope. The `IServiceProvider` can be used to resolve services dynamically in the future. + +```csharp +internal class MyActor : Actor, IMyActor, IRemindable +{ + public MyActor(ActorHost host, IServiceProvider services) // Accept IServiceProvider in the constructor + : base(host) + { + ... + } +} +``` + +When using this pattern, avoid creating many instances of **transient** services which implement `IDisposable`. Since the scope associated with an actor could be considered valid for a long time, you can accumulate many services in memory. See the [dependency injection guidelines](https://docs.microsoft.com/dotnet/core/extensions/dependency-injection-guidelines) for more information. + +### IDisposable and actors + +Actors can implement `IDisposable` or `IAsyncDisposable`. It's recommended that you rely on dependency injection for resource management rather than implementing dispose functionality in application code. Dispose support is provided in the rare case where it is truly necessary. + +### Logging + +Inside an actor class, you have access to an `ILogger` instance through a property on the base `Actor` class. This instance is connected to the ASP.NET Core logging system and should be used for all logging inside an actor. Read more about [logging](https://docs.microsoft.com/dotnet/core/extensions/logging?tabs=command-line). You can configure a variety of different logging formats and output sinks. + +Use _structured logging_ with _named placeholders_ like the example below: + +```csharp +public Task GetDataAsync() +{ + this.Logger.LogInformation("Getting state at {CurrentTime}", DateTime.UtcNow); + return this.StateManager.GetStateAsync("my_data"); +} +``` + +When logging, avoid using format strings like: `$"Getting state at {DateTime.UtcNow}"` + +Logging should use the [named placeholder syntax](https://docs.microsoft.com/dotnet/core/extensions/logging?tabs=command-line#log-message-template) which offers better performance and integration with logging systems. + +### Using an explicit actor type name + +By default, the _type_ of the actor, as seen by clients, is derived from the _name_ of the actor implementation class. The default name will be the class name (without namespace). + +If desired, you can specify an explicit type name by attaching an `ActorAttribute` attribute to the actor implementation class. + +```csharp +[Actor(TypeName = "MyCustomActorTypeName")] +internal class MyActor : Actor, IMyActor +{ + // ... +} +``` + +In the example above, the name will be `MyCustomActorTypeName`. + +No change is needed to the code that registers the actor type with the runtime, providing the value via the attribute is all that is required. + +## Host actors on the server + +### Registering actors + +Actor registration is part of `ConfigureServices` in `Startup.cs`. You can register services with dependency injection via the `ConfigureServices` method. Registering the set of actor types is part of the registration of actor services. + +Inside `ConfigureServices` you can: + +- Register the actor runtime (`AddActors`) +- Register actor types (`options.Actors.RegisterActor<>`) +- Configure actor runtime settings `options` +- Register additional service types for dependency injection into actors (`services`) + +```csharp +// In Startup.cs +public void ConfigureServices(IServiceCollection services) +{ + // Register actor runtime with DI + services.AddActors(options => + { + // Register actor types and configure actor settings + options.Actors.RegisterActor(); + + // Configure default settings + options.ActorIdleTimeout = TimeSpan.FromMinutes(10); + options.ActorScanInterval = TimeSpan.FromSeconds(35); + options.DrainOngoingCallTimeout = TimeSpan.FromSeconds(35); + options.DrainRebalancedActors = true; + }); + + // Register additional services for use with actors + services.AddSingleton(); +} +``` + +### Configuring JSON options + +The actor runtime uses [System.Text.Json](https://docs.microsoft.com/dotnet/standard/serialization/system-text-json-overview) for: + +- Serializing data to the state store +- Handling requests from the weakly-typed client + +By default, the actor runtime uses settings based on [JsonSerializerDefaults.Web](https://docs.microsoft.com/dotnet/api/system.text.json.jsonserializerdefaults?view=net-5.0). + +You can configure the `JsonSerializerOptions` as part of `ConfigureServices`: + +```csharp +// In Startup.cs +public void ConfigureServices(IServiceCollection services) +{ + services.AddActors(options => + { + ... + + // Customize JSON options + options.JsonSerializerOptions = ... + }); +} +``` + +### Actors and routing + +The ASP.NET Core hosting support for actors uses the [endpoint routing](https://docs.microsoft.com/aspnet/core/fundamentals/routing) system. The .NET SDK provides no support hosting actors with the legacy routing system from early ASP.NET Core releases. + +Since actors uses endpoint routing, the actors HTTP handler is part of the middleware pipeline. The following is a minimal example of a `Configure` method setting up the middleware pipeline with actors. + +```csharp +// in Startup.cs +public void Configure(IApplicationBuilder app, IWebHostEnvironment env) +{ + if (env.IsDevelopment()) + { + app.UseDeveloperExceptionPage(); + } + + app.UseRouting(); + + app.UseEndpoints(endpoints => + { + // Register actors handlers that interface with the Dapr runtime. + endpoints.MapActorsHandlers(); + }); +} +``` + +The `UseRouting` and `UseEndpoints` calls are necessary to configure routing. Configure actors as part of the pipeline by adding `MapActorsHandlers` inside the endpoint middleware. + +This is a minimal example, it's valid for Actors functionality to existing alongside: + +- Controllers +- Razor Pages +- Blazor +- gRPC Services +- Dapr pub/sub handler +- other endpoints such as health checks + +### Problematic middleware + +Certain middleware may interfere with the routing of Dapr requests to the actors handlers. In particular, the `UseHttpsRedirection` is problematic for Dapr's default configuration. Dapr sends requests over unencrypted HTTP by default, which the `UseHttpsRedirection` middleware will block. This middleware cannot be used with Dapr at this time. + +```csharp +// in Startup.cs +public void Configure(IApplicationBuilder app, IWebHostEnvironment env) +{ + if (env.IsDevelopment()) + { + app.UseDeveloperExceptionPage(); + } + + // INVALID - this will block non-HTTPS requests + app.UseHttpsRedirection(); + // INVALID - this will block non-HTTPS requests + + app.UseRouting(); + + app.UseEndpoints(endpoints => + { + // Register actors handlers that interface with the Dapr runtime. + endpoints.MapActorsHandlers(); + }); +} +``` + +## Next steps + +Try the [Running and using virtual actors example]({{% ref dotnet-actors-howto.md %}}). \ No newline at end of file diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-ai/_index.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-ai/_index.md new file mode 100644 index 00000000000..e3398ec5ae5 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-ai/_index.md @@ -0,0 +1,12 @@ +--- +type: docs +title: "Dapr AI .NET SDK" +linkTitle: "AI" +weight: 50000 +description: Get up and running with the Dapr AI .NET SDK +--- + +With the Dapr AI package, you can interact with the Dapr AI workloads from a .NET application. + +Today, Dapr provides the Conversational API to engage with large language models. To get started with this workload, +walk through the [Dapr Conversational AI]({{% ref dotnet-ai-conversation-howto.md %}}) how-to guide. \ No newline at end of file diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-ai/dotnet-ai-conversation-howto.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-ai/dotnet-ai-conversation-howto.md new file mode 100644 index 00000000000..88e7a3c5540 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-ai/dotnet-ai-conversation-howto.md @@ -0,0 +1,79 @@ +--- +type: docs +title: "How to: Create and use Dapr AI Conversations in the .NET SDK" +linkTitle: "How to: Use the AI Conversations client" +weight: 50010 +description: Learn how to create and use the Dapr Conversational AI client using the .NET SDK +--- + +## Prerequisites +- [.NET 8](https://dotnet.microsoft.com/download/dotnet/8.0), or [.NET 9](https://dotnet.microsoft.com/download/dotnet/9.0) installed +- [Dapr CLI](https://docs.dapr.io/getting-started/install-dapr-cli/) +- [Initialized Dapr environment](https://docs.dapr.io/getting-started/install-dapr-selfhost) + +## Installation + +To get started with the Dapr AI .NET SDK client, install the [Dapr.AI package](https://www.nuget.org/packages/Dapr.AI) from NuGet: +```sh +dotnet add package Dapr.AI +``` + +A `DaprConversationClient` maintains access to networking resources in the form of TCP sockets used to communicate with the Dapr sidecar. + +### Dependency Injection + +The `AddDaprAiConversation()` method will register the Dapr client ASP.NET Core dependency injection and is the recommended approach +for using this package. This method accepts an optional options delegate for configuring the `DaprConversationClient` and a +`ServiceLifetime` argument, allowing you to specify a different lifetime for the registered services instead of the default `Singleton` +value. + +The following example assumes all default values are acceptable and is sufficient to register the `DaprConversationClient`: + +```csharp +services.AddDaprAiConversation(); +``` + +The optional configuration delegate is used to configure the `DaprConversationClient` by specifying options on the +`DaprConversationClientBuilder` as in the following example: +```csharp +services.AddSingleton(); +services.AddDaprAiConversation((serviceProvider, clientBuilder) => { + //Inject a service to source a value from + var optionsProvider = serviceProvider.GetRequiredService(); + var standardTimeout = optionsProvider.GetStandardTimeout(); + + //Configure the value on the client builder + clientBuilder.UseTimeout(standardTimeout); +}); +``` + +### Manual Instantiation +Rather than using dependency injection, a `DaprConversationClient` can also be built using the static client builder. + +For best performance, create a single long-lived instance of `DaprConversationClient` and provide access to that shared instance throughout +your application. `DaprConversationClient` instances are thread-safe and intended to be shared. + +Avoid creating a `DaprConversationClient` per-operation. + +A `DaprConversationClient` can be configured by invoking methods on the `DaprConversationClientBuilder` class before calling `.Build()` +to create the client. The settings for each `DaprConversationClient` are separate and cannot be changed after calling `.Build()`. + +```csharp +var daprConversationClient = new DaprConversationClientBuilder() + .UseJsonSerializerSettings( ... ) //Configure JSON serializer + .Build(); +``` + +See the .NET [documentation here]({{% ref dotnet-client %}}) for more information about the options available when configuring the Dapr client via the builder. + +## Try it out +Put the Dapr AI .NET SDK to the test. Walk through the samples to see Dapr in action: + +| SDK Samples | Description | +| ----------- | ----------- | +| [SDK samples](https://github.com/dapr/dotnet-sdk/tree/master/examples) | Clone the SDK repo to try out some examples and get started. | + +## Building Blocks + +This part of the .NET SDK allows you to interface with the Conversations API to send and receive messages from +large language models. diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-ai/dotnet-ai-conversation-usage.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-ai/dotnet-ai-conversation-usage.md new file mode 100644 index 00000000000..6fdec0f88ad --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-ai/dotnet-ai-conversation-usage.md @@ -0,0 +1,135 @@ +--- +type: docs +title: "Dapr AI Client" +linkTitle: "AI client" +weight: 50005 +description: Learn how to create Dapr AI clients +--- + +The Dapr AI client package allows you to interact with the AI capabilities provided by the Dapr sidecar. + +## Lifetime management +A `DaprConversationClient` is a version of the Dapr client that is dedicated to interacting with the Dapr Conversation +API. It can be registered alongside a `DaprClient` and other Dapr clients without issue. + +It maintains access to networking resources in the form of TCP sockets used to communicate with the Dapr sidecar. + +For best performance, create a single long-lived instance of `DaprConversationClient` and provide access to that shared +instance throughout your application. `DaprConversationClient` instances are thread-safe and intended to be shared. + +This can be aided by utilizing the dependency injection functionality. The registration method supports registration +as a singleton, a scoped instance or as transient (meaning it's recreated every time it's injected), but also enables +registration to utilize values from an `IConfiguration` or other injected services in a way that's impractical when +creating the client from scratch in each of your classes. + +Avoid creating a `DaprConversationClient` for each operation. + +## Configuring DaprConversationClient via DaprConversationClientBuilder + +A `DaprConversationClient` can be configured by invoking methods on the `DaprConversationClientBuilder` class before +calling `.Build()` to create the client itself. The settings for each `DaprConversationClient` are separate +and cannot be changed after calling `.Build()`. + +```cs +var daprConversationClient = new DaprConversationClientBuilder() + .UseDaprApiToken("abc123") // Specify the API token used to authenticate to other Dapr sidecars + .Build(); +``` + +The `DaprConversationClientBuilder` contains settings for: + +- The HTTP endpoint of the Dapr sidecar +- The gRPC endpoint of the Dapr sidecar +- The `JsonSerializerOptions` object used to configure JSON serialization +- The `GrpcChannelOptions` object used to configure gRPC +- The API token used to authenticate requests to the sidecar +- The factory method used to create the `HttpClient` instance used by the SDK +- The timeout used for the `HttpClient` instance when making requests to the sidecar + +The SDK will read the following environment variables to configure the default values: + +- `DAPR_HTTP_ENDPOINT`: used to find the HTTP endpoint of the Dapr sidecar, example: `https://dapr-api.mycompany.com` +- `DAPR_GRPC_ENDPOINT`: used to find the gRPC endpoint of the Dapr sidecar, example: `https://dapr-grpc-api.mycompany.com` +- `DAPR_HTTP_PORT`: if `DAPR_HTTP_ENDPOINT` is not set, this is used to find the HTTP local endpoint of the Dapr sidecar +- `DAPR_GRPC_PORT`: if `DAPR_GRPC_ENDPOINT` is not set, this is used to find the gRPC local endpoint of the Dapr sidecar +- `DAPR_API_TOKEN`: used to set the API token + +### Configuring gRPC channel options + +Dapr's use of `CancellationToken` for cancellation relies on the configuration of the gRPC channel options. If you need +to configure these options yourself, make sure to enable the [ThrowOperationCanceledOnCancellation setting](https://grpc.github.io/grpc/csharp-dotnet/api/Grpc.Net.Client.GrpcChannelOptions.html#Grpc_Net_Client_GrpcChannelOptions_ThrowOperationCanceledOnCancellation). + +```cs +var daprConversationClient = new DaprConversationClientBuilder() + .UseGrpcChannelOptions(new GrpcChannelOptions { ... ThrowOperationCanceledOnCancellation = true }) + .Build(); +``` + +## Using cancellation with `DaprConversationClient` + +The APIs on `DaprConversationClient` perform asynchronous operations and accept an optional `CancellationToken` parameter. This +follows a standard .NET practice for cancellable operations. Note that when cancellation occurs, there is no guarantee that +the remote endpoint stops processing the request, only that the client has stopped waiting for completion. + +When an operation is cancelled, it will throw an `OperationCancelledException`. + +## Configuring `DaprConversationClient` via dependency injection + +Using the built-in extension methods for registering the `DaprConversationClient` in a dependency injection container can +provide the benefit of registering the long-lived service a single time, centralize complex configuration and improve +performance by ensuring similarly long-lived resources are re-purposed when possible (e.g. `HttpClient` instances). + +There are three overloads available to give the developer the greatest flexibility in configuring the client for their +scenario. Each of these will register the `IHttpClientFactory` on your behalf if not already registered, and configure +the `DaprConversationClientBuilder` to use it when creating the `HttpClient` instance in order to re-use the same instance as +much as possible and avoid socket exhaustion and other issues. + +In the first approach, there's no configuration done by the developer and the `DaprConversationClient` is configured with the +default settings. + +```cs +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddDaprConversationClient(); //Registers the `DaprConversationClient` to be injected as needed +var app = builder.Build(); +``` + +Sometimes the developer will need to configure the created client using the various configuration options detailed +above. This is done through an overload that passes in the `DaprConversationClientBuiler` and exposes methods for configuring +the necessary options. + +```cs +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddDaprConversationClient((_, daprConversationClientBuilder) => { + //Set the API token + daprConversationClientBuilder.UseDaprApiToken("abc123"); + //Specify a non-standard HTTP endpoint + daprConversationClientBuilder.UseHttpEndpoint("http://dapr.my-company.com"); +}); + +var app = builder.Build(); +``` + +Finally, it's possible that the developer may need to retrieve information from another service in order to populate +these configuration values. That value may be provided from a `DaprClient` instance, a vendor-specific SDK or some +local service, but as long as it's also registered in DI, it can be injected into this configuration operation via the +last overload: + +```cs +var builder = WebApplication.CreateBuilder(args); + +//Register a fictional service that retrieves secrets from somewhere +builder.Services.AddSingleton(); + +builder.Services.AddDaprConversationClient((serviceProvider, daprConversationClientBuilder) => { + //Retrieve an instance of the `SecretService` from the service provider + var secretService = serviceProvider.GetRequiredService(); + var daprApiToken = secretService.GetSecret("DaprApiToken").Value; + + //Configure the `DaprConversationClientBuilder` + daprConversationClientBuilder.UseDaprApiToken(daprApiToken); +}); + +var app = builder.Build(); +``` \ No newline at end of file diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-ai/dotnet-ai-extensions-howto.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-ai/dotnet-ai-extensions-howto.md new file mode 100644 index 00000000000..5041e5d6b7e --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-ai/dotnet-ai-extensions-howto.md @@ -0,0 +1,142 @@ +--- +type: docs +title: "How to: Using Microsoft's AI extensions with Dapr's .NET Conversation SDK" +linkTitle: "How to: Use Microsoft's AI extensions with Dapr" +weight: 50020 +description: Learn how to create and use Dapr with Microsoft's AI extensions +--- + +## Prerequisites +- [.NET 8](https://dotnet.microsoft.com/download/dotnet/8.0), or [.NET 9](https://dotnet.microsoft.com/download/dotnet/9.0) installed +- [Dapr CLI](https://docs.dapr.io/getting-started/install-dapr-cli/) +- [Initialized Dapr environment](https://docs.dapr.io/getting-started/install-dapr-selfhost) + +## Installation + +To get started with this SDK, install both the [Dapr.AI](https://www.nuget.org/packages/Dapr.AI) and +[Dapr.AI.Microsoft.Extensions](https://www.nuget.org/packages/Dapr.AI.Microsoft.Extensions) packages from NuGet: +```sh +dotnet add package Dapr.AI +dotnet add package Dapr.AI.Microsoft.Extensions +``` + +The `DaprChatClient` is a Dapr-based implementation of the `IChatClient` interface provided in the +`Microsoft.Extensions.AI.Abstractions` package using Dapr's [conversation building block]({{ ref conversation-overview.md }}). It allows +developers to build against the types provided by Microsoft's abstraction while providing the greatest conformity to the +Dapr conversation building block available. As both approaches adopt OpenAI's API approach, these are expected to increasingly +converge over time. + +{{% alert title="Dapr Conversation Building Block" color="primary" %}} + +Do note that Dapr's conversation building block is still in an alpha state, meaning that the shape of the API +is likely to change future releases. It's the intent of this SDK package to provide an API that's aligned with +Microsoft's AI extensions that also maps to and conforms with the Dapr API, but the names of types and properties +may change from one release to the next, so please be aware of this possibility when using this SDK. + +{{% /alert %}} + +## About Microsoft.Extensions.AI +The `Dapr.AI.Microsoft.Extensions` package implements the `Microsoft.Extensions.AI` abstractions, providing a unified API for +AI services in .NET applications. `Microsoft.Extensions.AI` is designed to offer a consistent programming model across +different AI providers and scenarios. For detailed information about `Microsoft.Extensions.AI`, refer to the +[official documentation](https://learn.microsoft.com/dotnet/ai/microsoft-extensions-ai). + +{{% alert title="Limited Support" color="warning" %}} + +Note that Microsoft's AI extension provide many more properties and methods than Dapr's conversation building block currently +supports. This package will only map those properties that have Dapr support and will ignore the others, so just because +it's available in the Microsoft.Extensions.AI package doesn't mean it's supported by Dapr. Rely on this documentation +and the exposed XML documentation in the package to understand what is and isn't supported. + +{{% /alert %}} + +## Service Registration +The `DaprChatClient` can be registered with the dependency injection container using several extension methods. First, +ensure that you reigster the `DaprConversationClient` that's part of the `Dapr.AI` package from NuGet: + +```csharp +services.AddDaprConversationClient(); +``` + +Then register the `DaprChatClient` with your conversation component name: + +```csharp +services.AddDaprChatClient("my-conversation-component"); +``` + +### Configuration Options +You can confiugre the `DaprChatClient` using the `DaprChatClientOptions` though the current implementation only +provides configuration for the component name itself. This is expected to change in future releases. + +```csharp +services.AddDaprChatClient("my-conversation-component", options => +{ + // Configure additional options here +}); +``` + +You can also configure the service lifetime (this defaults to `ServiceLifetime.Scoped`): + +```csharp +services.AddDaprChatClient("my-conversation-component", ServiceLifetime.Singleton); +``` + +## Usage +Once registered, you can inject and use `IChatClient` in your services: + +```csharp +public class ChatService(IChatClient chatClient) +{ + public async Task> GetResponseAsync(string message) + { + var response = await chatClient.GetResponseAsync([ + new ChatMessage(ChatRole.User, + "Please write me a poem in iambic pentameter about the joys of using Dapr to develop distributed applications with .NET") + ]); + + return response.Messages.Select(msg => msg.Text).ToList(); + } +} +``` + +### Streaming Conversations +The `DaprChatClient` does not yet support streaming responses and use of the corresponding `GetStreamingResponseAsync` +methods will throw a `NotImplemenetedException`. This is expected to change in a future release once the Dapr runtime +supports this functionality. + +### Tool Integration +The client supports function calling through the `Microsoft.Extensions.AI` tool integration. Tools registered with the +conversation will be automatically available to the large language model. + +```csharp +string GetCurrentWeather() => Random.Shared.NextDouble() > 0.5 ? "It's sunny today!" : "It's raining today!"; +var toolChatOptions = new ChatOptions { Tools = [AIFunctionFactory.Create(GetCurrentWeather, "weather")] }; +var toolResponse = await chatClient.GetResponseAsync("What's the weather like today?", toolChatOptions); +foreach (var toolResp in toolResponse.Messages) +{ + Console.WriteLine(toolResp); +} +``` + +## Error Handling +The `DaprChatClient` integrates with Dapr's error handling and will throw appropriate exceptions when issues occur. + +## Configuration and Metadata +The underlying Dapr conversation component can be configured with metadata and parameters through the Dapr conversation +building block configuration. The `DaprChatClient` will respect these settings when making calls to the conversation component. + +## Best Practices + +1. **Service Lifetime**: Use `ServiceLifetime.Scoped` or `ServiceLifetime.Singleton` for the `DaprChatClient` registration to avoid creating multiple instances unnecessarily. + +2. **Error Handling**: Always wrap calls in appropriate try-catch blocks to handle both Dapr-specific and general exceptions. + +3. **Resource Management**: The `DaprChatClient` properly implements `IDisposable` through its base classes, so resources are automatically managed when using dependency injection. + +4. **Configuration**: Configure your Dapr conversation component properly to ensure optimal performance and reliability. + +## Related Links + +- [Dapr Conversation Building Block]({{ ref conversation-overview.md }}) +- [Microsoft.Extensions.AI Documentation](https://learn.microsoft.com/dotnet/ai/microsoft-extensions-ai) +- [Dapr .NET Conversation SDK]({{% ref dotnet-ai-conversation-howto.md %}}) diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-client/_index.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-client/_index.md new file mode 100644 index 00000000000..05781c62993 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-client/_index.md @@ -0,0 +1,412 @@ +--- +type: docs +title: "Getting started with the Dapr client .NET SDK" +linkTitle: "Client" +weight: 20000 +description: How to get up and running with the Dapr .NET SDK +no_list: true +--- + +The Dapr client package allows you to interact with other Dapr applications from a .NET application. + +{{% alert title="Note" color="primary" %}} + If you haven't already, [try out one of the quickstarts]({{% ref quickstarts %}}) for a quick walk-through on how to use the Dapr .NET SDK with an API building block. + +{{% /alert %}} + + +## Building blocks + +The .NET SDK allows you to interface with all of the [Dapr building blocks]({{% ref building-blocks %}}). + +{{% alert title="Note" color="primary" %}} + +We will only include the dependency injection registration for the `DaprClient` in the first example +(service invocation). In nearly all other examples, it's assumed you've already registered the `DaprClient` in your +application in the latter examples and have injected an instance of `DaprClient` into your code as an instance named +`client`. + +{{% /alert %}} + +### Invoke a service + +#### HTTP +You can either use the `DaprClient` or `System.Net.Http.HttpClient` to invoke your services. + +{{% alert title="Note" color="primary" %}} + You can also [invoke a non-Dapr endpoint using either a named `HTTPEndpoint` or an FQDN URL to the non-Dapr environment]({{% ref "howto-invoke-non-dapr-endpoints.md#using-an-httpendpoint-resource-or-fqdn-url-for-non-dapr-endpoints" %}}). + +{{% /alert %}} + + +{{< tabpane text=true >}} + +{{% tab header="ASP.NET Core Project" %}} +```csharp +var builder = WebApplication.CreateBuilder(args); +builder.Services.AddDaprClient(); +var app = builder.Build(); + +using var scope = app.Services.CreateScope(); +var client = scope.ServiceProvider.GetRequiredService(); + +// Invokes a POST method named "deposit" that takes input of type "Transaction" +var data = new { id = "17", amount = 99m }; +var account = await client.InvokeMethodAsync("routing", "deposit", data, cancellationToken); +Console.WriteLine("Returned: id:{0} | Balance:{1}", account.Id, account.Balance); +``` +{{% /tab %}} + +{{% tab header="Console Project" %}} +using Microsoft.Extensins.Hosting; +using Microsoft.Extensions.DependencyInjection; + +var builder = Host.CreateApplicationBuilder(args); +builder.Services.AddDaprClient(); +var app = builder.Build(); + +using var scope = app.Services.CreateScope(); +var client = scope.ServiceProvider.GetRequiredService(); + +// Invokes a POST method named "deposit" that takes input of type "Transaction" +var data = new { id = "17", amount = 99m }; +var account = await client.InvokeMethodAsync("routing", "deposit", data, cancellationToken); +Console.WriteLine("Returned: id:{0} | Balance:{1}", account.Id, account.Balance); +{{% /tab %}} + +{{% tab header="HTTP" %}} +```csharp +var client = DaprClient.CreateInvokeHttpClient(appId: "routing"); + +// To set a timeout on the HTTP client: +client.Timeout = TimeSpan.FromSeconds(2); + +var deposit = new Transaction { Id = "17", Amount = 99m }; +var response = await client.PostAsJsonAsync("/deposit", deposit, cancellationToken); +var account = await response.Content.ReadFromJsonAsync(cancellationToken: cancellationToken); +Console.WriteLine("Returned: id:{0} | Balance:{1}", account.Id, account.Balance); +``` +{{% /tab %}} +{{< /tabpane >}} + +#### gRPC +You can use the `DaprClient` to invoke your services over gRPC. + +```csharp +using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(20)); +var invoker = DaprClient.CreateInvocationInvoker(appId: myAppId, daprEndpoint: serviceEndpoint); +var client = new MyService.MyServiceClient(invoker); + +var options = new CallOptions(cancellationToken: cts.Token, deadline: DateTime.UtcNow.AddSeconds(1)); +await client.MyMethodAsync(new Empty(), options); + +Assert.Equal(StatusCode.DeadlineExceeded, ex.StatusCode); +``` + +- For a full guide on service invocation visit [How-To: Invoke a service]({{% ref howto-invoke-discover-services.md %}}). + +### Save & get application state + +```csharp +var state = new Widget() { Size = "small", Color = "yellow", }; +await client.SaveStateAsync(storeName, stateKeyName, state, cancellationToken: cancellationToken); +Console.WriteLine("Saved State!"); + +state = await client.GetStateAsync(storeName, stateKeyName, cancellationToken: cancellationToken); +Console.WriteLine($"Got State: {state.Size} {state.Color}"); + +await client.DeleteStateAsync(storeName, stateKeyName, cancellationToken: cancellationToken); +Console.WriteLine("Deleted State!"); +``` + +### Query State (Alpha) + +```csharp +var query = "{" + + "\"filter\": {" + + "\"EQ\": { \"value.Id\": \"1\" }" + + "}," + + "\"sort\": [" + + "{" + + "\"key\": \"value.Balance\"," + + "\"order\": \"DESC\"" + + "}" + + "]" + + "}"; +var queryResponse = await client.QueryStateAsync("querystore", query, cancellationToken: cancellationToken); + +Console.WriteLine($"Got {queryResponse.Results.Count}"); +foreach (var account in queryResponse.Results) +{ + Console.WriteLine($"Account: {account.Data.Id} has {account.Data.Balance}"); +} +``` + +- For a full list of state operations visit [How-To: Get & save state]({{% ref howto-get-save-state.md %}}). + +### Publish messages + +```csharp +var eventData = new { Id = "17", Amount = 10m, }; +await client.PublishEventAsync(pubsubName, "deposit", eventData, cancellationToken); +Console.WriteLine("Published deposit event!"); +``` + +- For a full list of state operations visit [How-To: Publish & subscribe]({{% ref howto-publish-subscribe.md %}}). +- Visit [.NET SDK examples](https://github.com/dapr/dotnet-sdk/tree/master/examples/Client/PublishSubscribe) for code samples and instructions to try out pub/sub + +### Interact with output bindings + +When calling `InvokeBindingAsync`, you have the option to handle serialization and encoding yourself, +or to have the SDK serialize it to JSON and then encode it to bytes for you. + +{{% alert title="Important" color="warning" %}} +Bindings differ in the shape of data they expect, take special note and ensure that the data you +are sending is handled accordingly. If you are authoring both an output and an input, make sure +that they both follow the same conventions for serialization. +{{% /alert %}} + +#### Manual serialization + +For most scenarios, you're advised to use this overload of `InvokeBindingAsync` as it gives you clarity and control over +how the data is being handled. + +_In this example, the data is sent as the UTF-8 byte representation of the string._ + +```csharp +using var client = new DaprClientBuilder().Build(); + +var request = new BindingRequest("send-email", "create") +{ + // note: This is an example payload for the Twilio SendGrid binding + Data = Encoding.UTF8.GetBytes("

Testing Dapr Bindings

This is a test.
Bye!"), + Metadata = + { + { "emailTo", "customer@example.com" }, + { "subject", "An email from Dapr SendGrid binding" }, + }, +} +await client.InvokeBindingAsync(request); +``` + +#### Automatic serialzation and encoding + +_In this example, the data is sent as a UTF-8 encoded byte representation of the value serialized to JSON._ + +```csharp +using var client = new DaprClientBuilder().Build(); + +var email = new +{ + // note: This is an example payload for the Twilio SendGrid binding + data = "

Testing Dapr Bindings

This is a test.
Bye!", + metadata = new + { + emailTo = "customer@example.com", + subject = "An email from Dapr SendGrid binding", + }, +}; +await client.InvokeBindingAsync("send-email", "create", email); +``` + +- For a full guide on output bindings visit [How-To: Use bindings]({{% ref howto-bindings.md %}}). + +### Retrieve secrets +Prior to retrieving secrets, it's important that the outbound channel be registered and ready or the SDK will be unable +to communicate bidirectionally with the Dapr sidecar. The SDK provides a helper method intended to be used for this +purpose called `CheckOutboundHealthAsync`. This isn't referring to outbound from the SDK to the runtime, so much as +outbound from the Dapr runtime back into the client application using the SDK. + +This method is simply opening a connection to the {{% ref "health_api#wait-for-specific-health-check-against-outbound-path" %}} +endpoint in the Dapr Health API and evaluating the HTTP status code returned to determine the health of the endpoint +as reported by the runtime. + +It's important to note that this and the `WaitForSidecarAsync` methods perform nearly identical operations; `WaitForSidecarAsync` +polls the `CheckOutboundHealthAsync` endpoint indefinitely until it returns a healthy status value. They are intended +exclusively for situations like secrets or configuration retrieval. Using them in other scenarios will result in +unintended behavior (e.g., the endpoint never being ready because there are no registered components that use an +"outbound" channel). + +This behavior will be changed in a future release and should only be relied on sparingly. + + +{{< tabpane text=true >}} + +{{% tab header="Multi-value-secret" %}} + +```csharp +// Get an instance of the DaprClient from DI +var client = scope.GetRequiredService(); + +// Wait for the outbound channel to be established - only use for this scenario and not generally +await client.WaitForOutboundHealthAsync(); + +// Retrieve a key-value-pair-based secret - returns a Dictionary +var secrets = await client.GetSecretAsync("mysecretstore", "key-value-pair-secret"); +Console.WriteLine($"Got secret keys: {string.Join(", ", secrets.Keys)}"); +``` + +{{% /tab %}} + +{{% tab header="Single-value-secret" %}} + +```csharp +// Get an instance of the DaprClient from DI +var client = scope.GetRequiredService(); + +// Wait for the outbound channel to be established - only use for this scenario and not generally +await client.WaitForOutboundHealthAsync(); + +// Retrieve a key-value-pair-based secret - returns a Dictionary +var secrets = await client.GetSecretAsync("mysecretstore", "key-value-pair-secret"); +Console.WriteLine($"Got secret keys: {string.Join(", ", secrets.Keys)}"); + +// Retrieve a single-valued secret - returns a Dictionary +// containing a single value with the secret name as the key +var data = await client.GetSecretAsync("mysecretstore", "single-value-secret"); +var value = data["single-value-secret"] +Console.WriteLine("Got a secret value, I'm not going to be print it, it's a secret!"); +``` + +{{% /tab %}} + +{{< /tabpane >}} + +- For a full guide on secrets visit [How-To: Retrieve secrets]({{% ref howto-secrets.md %}}). + +### Get Configuration Keys +```csharp +// Retrieve a specific set of keys. +var specificItems = await client.GetConfiguration("configstore", new List() { "key1", "key2" }); +Console.WriteLine($"Here are my values:\n{specificItems[0].Key} -> {specificItems[0].Value}\n{specificItems[1].Key} -> {specificItems[1].Value}"); + +// Retrieve all configuration items by providing an empty list. +var specificItems = await client.GetConfiguration("configstore", new List()); +Console.WriteLine($"I got {configItems.Count} entires!"); +foreach (var item in configItems) +{ + Console.WriteLine($"{item.Key} -> {item.Value}") +} +``` + +### Subscribe to Configuration Keys +```csharp +// The Subscribe Configuration API returns a wrapper around an IAsyncEnumerable>. +// Iterate through it by accessing its Source in a foreach loop. The loop will end when the stream is severed +// or if the cancellation token is cancelled. +var subscribeConfigurationResponse = await daprClient.SubscribeConfiguration(store, keys, metadata, cts.Token); +await foreach (var items in subscribeConfigurationResponse.Source.WithCancellation(cts.Token)) +{ + foreach (var item in items) + { + Console.WriteLine($"{item.Key} -> {item.Value}") + } +} +``` + +### Distributed lock (Alpha) + +#### Acquire a lock + +```csharp +using System; +using Dapr.Client; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.DependencyInjection; + +namespace LockService +{ + class Program + { + [Obsolete("Distributed Lock API is in Alpha, this can be removed once it is stable.")] + static async Task Main(string[] args) + { + const string daprLockName = "lockstore"; + const string fileName = "my_file_name"; + + var builder = Host.CreateDefaultBuilder(); + builder.ConfigureServices(services => + { + services.AddDaprClient(); + }); + var app = builder.Build(); + + using var scope = app.Services.CreateScope(); + var client = scope.ServiceProvider.GetRequiredService(); + + // Locking with this approach will also unlock it automatically, as this is a disposable object + await using (var fileLock = await client.Lock(DAPR_LOCK_NAME, fileName, "random_id_abc123", 60)) + { + if (fileLock.Success) + { + Console.WriteLine("Success"); + } + else + { + Console.WriteLine($"Failed to lock {fileName}."); + } + } + } + } +} +``` + +#### Unlock an existing lock + +```csharp +using System; +using Dapr.Client; + +namespace LockService +{ + class Program + { + static async Task Main(string[] args) + { + var daprLockName = "lockstore"; + + var builder = Host.CreateDefaultBuilder(); + builder.ConfigureServices(services => + { + services.AddDaprClient(); + }); + var app = builder.Build(); + + using var scope = app.Services.CreateScope(); + var client = scope.ServiceProvider.GetRequiredService(); + + var response = await client.Unlock(DAPR_LOCK_NAME, "my_file_name", "random_id_abc123")); + Console.WriteLine(response.status); + } + } +} +``` + +## Sidecar APIs +### Sidecar Health +While the .NET SDK provides a way to poll for the sidecar health, it is not generally recommended that developer +utilize this functionality unless they are explicitly using Dapr to also retrieve secrets or configuration values. + +There are two methods available: +- `CheckOutboundHealthAsync` which queries an outbound readiness endpoint in the Dapr Health API {{% ref "health_api#wait-for-specific-health-check-against-outbound-path" %}} +for a successful HTTP status code and reports readiness based on this value. +- `WaitForSidecarAsync` continuously polls `CheckOutboundHealthAsync` until it returns a successful status code. + +The "outbound" direction refers to the communication outbound from the Dapr runtime to your application. If your +application doesn't use actors, secret management, configuration retrieval or workflows, the runtime will not attempt +to create an outbound connection. This means that if your application takes a dependency on `WaitForSidecarAsync` +without using any of these Dapr components, it will indefinitely lock up during startup as the endpoint will never be established. + +A future release will remove these methods altogether and perform this as an internal SDK operation, so neither +method should be relied on in general. Reach out in the Discord #dotnet-sdk channel for more clarification as +to whether your scenario may necessitate using this, but in most situations, these methods should not be required. + + +### Shutdown the sidecar +```csharp +var client = new DaprClientBuilder().Build(); +await client.ShutdownSidecarAsync(); +``` + +## Related links +- [.NET SDK examples](https://github.com/dapr/dotnet-sdk/tree/master/examples) diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-client/dotnet-daprclient-usage.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-client/dotnet-daprclient-usage.md new file mode 100644 index 00000000000..9e087b5929e --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-client/dotnet-daprclient-usage.md @@ -0,0 +1,225 @@ +--- +type: docs +title: "DaprClient usage" +linkTitle: "DaprClient usage" +weight: 100000 +description: Essential tips and advice for using DaprClient +--- + +## Lifetime management + +A `DaprClient` holds access to networking resources in the form of TCP sockets used to communicate with the Dapr sidecar. `DaprClient` implements `IDisposable` to support eager cleanup of resources. + +### Dependency Injection + +The `AddDaprClient()` method will register the Dapr client with ASP.NET Core dependency injection. This method accepts an optional +options delegate for configuring the `DaprClient` and an `ServiceLifetime` argument, allowing you to specify a different lifetime +for the registered resources instead of the default `Singleton` value. + +The following example assumes all default values are acceptable and is sufficient to register the `DaprClient`. + +```csharp +services.AddDaprClient(); +``` + +The optional configuration delegates are used to configure `DaprClient` by specifying options on the provided `DaprClientBuilder` +as in the following example: + +```csharp +services.AddDaprClient(daprBuilder => { + daprBuilder.UseJsonSerializerOptions(new JsonSerializerOptions { + WriteIndented = true, + MaxDepth = 8 + }); + daprBuilder.UseTimeout(TimeSpan.FromSeconds(30)); +}); +``` + +The another optional configuration delegate overload provides access to both the `DaprClientBuilder` as well as an `IServiceProvider` +allowing for more advanced configurations that may require injecting services from the dependency injection container. + +```csharp +services.AddSingleton(); +services.AddDaprClient((serviceProvider, daprBuilder) => { + var sampleService = serviceProvider.GetRequiredService(); + var timeoutValue = sampleService.TimeoutOptions; + + daprBuilder.UseTimeout(timeoutValue); +}); +``` + +### Manual Instantiation + +Rather than using dependency injection, a `DaprClient` can also be built using the static client builder. + +For best performance, create a single long-lived instance of `DaprClient` and provide access to that shared instance throughout your application. `DaprClient` instances are thread-safe and intended to be shared. + +Avoid creating a `DaprClient` per-operation and disposing it when the operation is complete. + +## Configuring DaprClient + +A `DaprClient` can be configured by invoking methods on `DaprClientBuilder` class before calling `.Build()` to create the client. The settings for each `DaprClient` object are separate and cannot be changed after calling `.Build()`. + +```C# +var daprClient = new DaprClientBuilder() + .UseJsonSerializerSettings( ... ) // Configure JSON serializer + .Build(); +``` + +By default, the `DaprClientBuilder` will prioritize the following locations, in the following order, to source the configuration +values: + +- The value provided to a method on the `DaprClientBuilder` (e.g. `UseTimeout(TimeSpan.FromSeconds(30))`) +- The value pulled from an optionally injected `IConfiguration` matching the name expected in the associated environment variable +- The value pulled from the associated environment variable +- Default values + +### Configuring on `DaprClientBuilder` + +The `DaprClientBuilder` contains the following methods to set configuration options: + +- `UseHttpEndpoint(string)`: The HTTP endpoint of the Dapr sidecar +- `UseGrpcEndpoint(string)`: Sets the gRPC endpoint of the Dapr sidecar +- `UseGrpcChannelOptions(GrpcChannelOptions)`: Sets the gRPC channel options used to connect to the Dapr sidecar +- `UseHttpClientFactory(IHttpClientFactory)`: Configures the DaprClient to use a registered `IHttpClientFactory` when building `HttpClient` instances +- `UseJsonSerializationOptions(JsonSerializerOptions)`: Used to configure JSON serialization +- `UseDaprApiToken(string)`: Adds the provided token to every request to authenticate to the Dapr sidecar +- `UseTimeout(TimeSpan)`: Specifies a timeout value used by the `HttpClient` when communicating with the Dapr sidecar + +### Configuring From `IConfiguration` +Rather than rely on sourcing configuration values directly from environment variables or because the values are sourced +from dependency injected services, another options is to make these values available on `IConfiguration`. + +For example, you might be registering your application in a multi-tenant environment and need to prefix the environment +variables used. The following example shows how these values can be sourced from the environment variables to your +`IConfiguration` when their keys are prefixed with `test_`; + +```csharp +var builder = WebApplication.CreateBuilder(args); +builder.Configuration.AddEnvironmentVariables("test_"); //Retrieves all environment variables that start with "test_" and removes the prefix when sourced from IConfiguration +builder.Services.AddDaprClient(); +``` + +### Configuring From Environment Variables + +The SDK will read the following environment variables to configure the default values: + +- `DAPR_HTTP_ENDPOINT`: used to find the HTTP endpoint of the Dapr sidecar, example: `https://dapr-api.mycompany.com` +- `DAPR_GRPC_ENDPOINT`: used to find the gRPC endpoint of the Dapr sidecar, example: `https://dapr-grpc-api.mycompany.com` +- `DAPR_HTTP_PORT`: if `DAPR_HTTP_ENDPOINT` is not set, this is used to find the HTTP local endpoint of the Dapr sidecar +- `DAPR_GRPC_PORT`: if `DAPR_GRPC_ENDPOINT` is not set, this is used to find the gRPC local endpoint of the Dapr sidecar +- `DAPR_API_TOKEN`: used to set the API Token + +{{% alert title="Note" color="primary" %}} +If both `DAPR_HTTP_ENDPOINT` and `DAPR_HTTP_PORT` are specified, the port value from `DAPR_HTTP_PORT` will be ignored in favor of the port +implicitly or explicitly defined on `DAPR_HTTP_ENDPOINT`. The same is true of both `DAPR_GRPC_ENDPOINT` and `DAPR_GRPC_PORT`. +{{% /alert %}} + +### Configuring gRPC channel options + +Dapr's use of `CancellationToken` for cancellation relies on the configuration of the gRPC channel options and this is enabled by default. If you need to configure these options yourself, make sure to enable the [ThrowOperationCanceledOnCancellation setting](https://grpc.github.io/grpc/csharp-dotnet/api/Grpc.Net.Client.GrpcChannelOptions.html#Grpc_Net_Client_GrpcChannelOptions_ThrowOperationCanceledOnCancellation). + +```C# +var daprClient = new DaprClientBuilder() + .UseGrpcChannelOptions(new GrpcChannelOptions { ... ThrowOperationCanceledOnCancellation = true }) + .Build(); +``` + +## Using cancellation with DaprClient + +The APIs on DaprClient that perform asynchronous operations accept an optional `CancellationToken` parameter. This follows a standard .NET idiom for cancellable operations. Note that when cancellation occurs, there is no guarantee that the remote endpoint stops processing the request, only that the client has stopped waiting for completion. + +When an operation is cancelled, it will throw an `OperationCancelledException`. + +## Understanding DaprClient JSON serialization + +Many methods on `DaprClient` perform JSON serialization using the `System.Text.Json` serializer. Methods that accept an application data type as an argument will JSON serialize it, unless the documentation clearly states otherwise. + +It is worth reading the [System.Text.Json documentation](https://docs.microsoft.com/dotnet/standard/serialization/system-text-json-overview) if you have advanced requirements. The Dapr .NET SDK provides no unique serialization behavior or customizations - it relies on the underlying serializer to convert data to and from the application's .NET types. + +`DaprClient` is configured to use a serializer options object configured from [JsonSerializerDefaults.Web](https://docs.microsoft.com/dotnet/api/system.text.json.jsonserializerdefaults?view=net-5.0). This means that `DaprClient` will use `camelCase` for property names, allow reading quoted numbers (`"10.99"`), and will bind properties case-insensitively. These are the same settings used with ASP.NET Core and the `System.Text.Json.Http` APIs, and are designed to follow interoperable web conventions. + +`System.Text.Json` as of .NET 5.0 does not have good support for all of F# language features built-in. If you are using F# you may want to use one of the converter packages that add support for F#'s features such as [FSharp.SystemTextJson](https://github.com/Tarmil/FSharp.SystemTextJson). + +### Simple guidance for JSON serialization + +Your experience using JSON serialization and `DaprClient` will be smooth if you use a feature set that maps to JSON's type system. These are general guidelines that will simplify your code where they can be applied. + +- Avoid inheritance and polymorphism +- Do not attempt to serialize data with cyclic references +- Do not put complex or expensive logic in constructors or property accessors +- Use .NET types that map cleanly to JSON types (numeric types, strings, `DateTime`) +- Create your own classes for top-level messages, events, or state values so you can add properties in the future +- Design types with `get`/`set` properties OR use the [supported pattern](https://docs.microsoft.com/dotnet/standard/serialization/system-text-json-immutability?pivots=dotnet-5-0) for immutable types with JSON + +### Polymorphism and serialization + +The `System.Text.Json` serializer used by `DaprClient` uses the declared type of values when performing serialization. + +This section will use `DaprClient.SaveStateAsync(...)` in examples, but the advice is applicable to any Dapr building block exposed by the SDK. + +```C# +public class Widget +{ + public string Color { get; set; } +} +... + +// Storing a Widget value as JSON in the state store +widget widget = new Widget() { Color = "Green", }; +await client.SaveStateAsync("mystatestore", "mykey", widget); +``` + +In the example above, the type parameter `TValue` has its type argument inferred from the type of the `widget` variable. This is important because the `System.Text.Json` serializer will perform serialization based on the *declared type* of the value. The result is that the JSON value `{ "color": "Green" }` will be stored. + +Consider what happens when you try to use derived type of `Widget`: + +```C# +public class Widget +{ + public string Color { get; set; } +} + +public class SuperWidget : Widget +{ + public bool HasSelfCleaningFeature { get; set; } +} +... + +// Storing a SuperWidget value as JSON in the state store +Widget widget = new SuperWidget() { Color = "Green", HasSelfCleaningFeature = true, }; +await client.SaveStateAsync("mystatestore", "mykey", widget); +``` + +In this example we're using a `SuperWidget` but the variable's declared type is `Widget`. Since the JSON serializer's behavior is determined by the declared type, it only sees a simple `Widget` and will save the value `{ "color": "Green" }` instead of `{ "color": "Green", "hasSelfCleaningFeature": true }`. + +If you want the properties of `SuperWidget` to be serialized, then the best option is to override the type argument with `object`. This will cause the serializer to include all data as it knows nothing about the type. + +```C# +Widget widget = new SuperWidget() { Color = "Green", HasSelfCleaningFeature = true, }; +await client.SaveStateAsync("mystatestore", "mykey", widget); +``` + +## Error handling + +Methods on `DaprClient` will throw `DaprException` or a subclass when a failure is encountered. + +```C# +try +{ + var widget = new Widget() { Color = "Green", }; + await client.SaveStateAsync("mystatestore", "mykey", widget); +} +catch (DaprException ex) +{ + // handle the exception, log, retry, etc. +} +``` + +The most common cases of failure will be related to: + +- Incorrect configuration of Dapr component +- Transient failures such as a networking problem +- Invalid data, such as a failure to deserialize JSON + +In any of these cases you can examine more exception details through the `.InnerException` property. diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-cryptography/_index.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-cryptography/_index.md new file mode 100644 index 00000000000..3fe574f1492 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-cryptography/_index.md @@ -0,0 +1,12 @@ +--- +type: docs +title: "Dapr Cryptography .NET SDK" +linkTitle: "Cryptography" +weight: 51000 +description: Get up and running with the Dapr Cryptography .NET SDK +--- + +With the Dapr Cryptography package, you can perform high-performance encryption and decryption operations with Dapr. + +To get started with this functionality, walk through the [Dapr Cryptography({{< ref dotnet-cryptography-howto.md >}}) +how-to guide. \ No newline at end of file diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-cryptography/dotnet-cryptography-howto.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-cryptography/dotnet-cryptography-howto.md new file mode 100644 index 00000000000..1bf4f20f29b --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-cryptography/dotnet-cryptography-howto.md @@ -0,0 +1,74 @@ +--- +type: docs +title: "How to: Create an use Dapr Cryptography in the .NET SDK" +linkTitle: "How to: Use the Cryptography client" +weight: 510100 +description: Learn how to create and use the Dapr Cryptography client using the .NET SDK +--- + +## Prerequisites +- [.NET 8](https://dotnet.microsoft.com/download/dotnet/8.0), or [.NET 9](https://dotnet.microsoft.com/download/dotnet/9.0) installed +- [Dapr CLI](https://docs.dapr.io/getting-started/install-dapr-cli/) +- [Initialized Dapr environment](https://docs.dapr.io/getting-started/install-dapr-selfhost) + +## Installation +To get started with the Dapr Cryptography client, install the [Dapr.Cryptography package](https://www.nuget.org/packages/Dapr.Cryptography) from NuGet: +```sh +dotnet add package Dapr.Cryptography +``` + +A `DaprEncryptionClient` maintains access to networking resources in the form of TCP sockets used to communicate with +the Dapr sidecar. + +### Dependency Injection + +The `AddDaprEncryptionClient()` method will register the Dapr client with dependency injection and is the recommended approach +for using this package. This method accepts an optional options delegate for configuring the `DaprEncryptionClient` and a +`ServiceLifetime` argument, allowing you to specify a different lifetime for the registered services instead of the default `Singleton` +value. + +The following example assumes all default values are acceptable and is sufficient to register the `DaprEncryptionClient`: + +```csharp +services.AddDaprEncryptionClient(); +``` + +The optional configuration delegate is used to configure the `DaprEncryptionClient` by specifying options on the +`DaprEncryptionClientBuilder` as in the following example: +```csharp +services.AddSingleton(); +services.AddDaprEncryptionClient((serviceProvider, clientBuilder) => { + //Inject a service to source a value from + var optionsProvider = serviceProvider.GetRequiredService(); + var standardTimeout = optionsProvider.GetStandardTimeout(); + + //Configure the value on the client builder + clientBuilder.UseTimeout(standardTimeout); +}); +``` + +### Manual Instantiation +Rather than using dependency injection, a `DaprEncryptionClient` can also be built using the static client builder. + +For best performance, create a single long-lived instance of `DaprEncryptionClient` and provide access to that shared instance throughout +your application. `DaprEncryptionClient` instances are thread-safe and intended to be shared. + +Avoid creating a `DaprEncryptionClient` per-operation. + +A `DaprEncryptionClient` can be configured by invoking methods on the `DaprEncryptionClientBuilder` class before calling `.Build()` +to create the client. The settings for each `DaprEncryptionClient` are separate and cannot be changed after calling `.Build()`. + +```csharp +var daprEncryptionClient = new DaprEncryptionClientBuilder() + .UseJsonSerializerSettings( ... ) //Configure JSON serializer + .Build(); +``` + +See the .NET [documentation here]({{< ref dotnet-client >}}) for more information about the options available when configuring the Dapr client via the builder. + +## Try it out +Put the Dapr AI .NET SDK to the test. Walk through the samples to see Dapr in action: + +| SDK Samples | Description | +|-------------------------------------------------------------------------------------| ----------- | +| [SDK samples](https://github.com/dapr/dotnet-sdk/tree/master/examples/Cryptography) | Clone the SDK repo to try out some examples and get started. | diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-cryptography/dotnet-cryptography-usage.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-cryptography/dotnet-cryptography-usage.md new file mode 100644 index 00000000000..760e396518d --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-cryptography/dotnet-cryptography-usage.md @@ -0,0 +1,131 @@ +--- +type: docs +title: "Dapr Cryptography Client" +linkTitle: "Cryptography client" +weight: 510005 +description: Learn how to create Dapr Crytography clients +--- + +The Dapr Cryptography package allows you to perform encryption and decryption operations provided by the Dapr sidecar. + +## Lifetime management +A `DaprEncryptionClient` is a version of the Dapr client that is dedicated to interacting with the Dapr Cryptography API. +It can be registered alongside a `DaprClient` and other Dapr clients without issue. + +It maintains access to networking resources in the form of TCP sockets used to communicate with the Dapr sidecar. + +For best performance, create a single long-lived instance of `DaprEncryptionClient` and provide access to that shared +instance throughout your application. `DaprEncryptionClient` instances are thread-safe and intended to be shared. + +This can be aided by utilizing the dependency injection functionality. The registration method supports registration +as a singleton, a scoped instance, or as a transient (meaning it's recreated every time it's injected), but also enables +registration to utilize values from an `IConfiguration` or other injected service in a way that's impractical when creating +the client from scratch in each of your classes. + +Avoid creating a `DaprEncryptionClient` for each operation. + +## Configuring `DaprEncryptionClient` via `DaprEncryptionClientBuilder` +A `DaprCryptographyClient` can be configured by invoking methods on the `DaprEncryptionClientBuilder` class before calling +`.Build()` to create the client itself. The settings for each `DaprEncryptionClientBuilder` are separate can cannot be +changed after calling `.Build()`. + +```cs +var daprEncryptionClient = new DaprEncryptionClientBuilder() + .UseDaprApiToken("abc123") //Specify the API token used to authenticate to the Dapr sidecar + .Build(); +``` + +The `DaprEncryptionClientBuilder` contains settings for: +- The HTTP endpoint of the Dapr sidecar +- The gRPC endpoint of the Dapr sidecar +- The `JsonSerializerOptions` object used to configure JSON serialization +- The `GrpcChannelOptions` object used to configure gRPC +- The API token used to authenticate requests to the sidecar +- The factory method used to create the `HttpClient` instance used by the SDK +- The timeout used for the `HttpClient` instance when making requests to the sidecar + +The SDK will read the following environment variables to configure the default values: + +- `DAPR_HTTP_ENDPOINT`: used to find the HTTP endpoint of the Dapr sidecar, example: `https://dapr-api.mycompany.com` +- `DAPR_GRPC_ENDPOINT`: used to find the gRPC endpoint of the Dapr sidecar, example: `https://dapr-grpc-api.mycompany.com` +- `DAPR_HTTP_PORT`: if `DAPR_HTTP_ENDPOINT` is not set, this is used to find the HTTP local endpoint of the Dapr sidecar +- `DAPR_GRPC_PORT`: if `DAPR_GRPC_ENDPOINT` is not set, this is used to find the gRPC local endpoint of the Dapr sidecar +- `DAPR_API_TOKEN`: used to set the API token + +### Configuring gRPC channel options + +Dapr's use of `CancellationToken` for cancellation relies on the configuration of the gRPC channel options. If you need +to configure these options yourself, make sure to enable the [ThrowOperationCanceledOnCancellation setting](https://grpc.github.io/grpc/csharp-dotnet/api/Grpc.Net.Client.GrpcChannelOptions.html#Grpc_Net_Client_GrpcChannelOptions_ThrowOperationCanceledOnCancellation). + +```cs +var daprEncryptionClient = new DaprEncryptionClientBuilder() + .UseGrpcChannelOptions(new GrpcChannelOptions { .. ThrowOperationCanceledOnCancellation = true }) + .Build(); +``` + +## Using cancellation with `DaprEncryptionClient` +The APIs on `DaprEncryptionClient` perform asynchronous operations and accept an optional `CancellationToken` parameter. This +follows a standard .NET practice for cancellable operations. Note that when cancellation occurs, there is no guarantee that +the remote endpoint stops processing the request, only that the client has stopped waiting for completion. + +When an operation is cancelled, it will throw an `OperationCancelledException`. + +## Configuring `DaprEncryptionClient` via dependency injection +Using the built-in extension methods for registering the `DaprEncryptionClient` in a dependency injection container can +provide the benefit of registering the long-lived service a single time, centralize complex configuration and improve +performance by ensuring similarly long-lived resources are re-purposed when possible (e.g. `HttpClient` instances). + +There are three overloads available to give the developer the greatest flexibility in configuring the client for their +scenario. Each of these will register the `IHttpClientFactory` on your behalf if not already registered, and configure +the `DaprEncryptionClientBuilder` to use it when creating the `HttpClient` instance in order to re-use the same instance as +much as possible and avoid socket exhaustion and other issues. + +In the first approach, there's no configuration done by the developer and the `DaprEncryptionClient` is configured with the +default settings. + +```cs +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddDaprEncryptionClent(); //Registers the `DaprEncryptionClient` to be injected as needed +var app = builder.Build(); +``` + +Sometimes the developer will need to configure the created client using the various configuration options detailed +above. This is done through an overload that passes in the `DaprEncryptionClientBuiler` and exposes methods for configuring +the necessary options. + +```cs +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddDaprEncryptionClient((_, daprEncrpyptionClientBuilder) => { + //Set the API token + daprEncryptionClientBuilder.UseDaprApiToken("abc123"); + //Specify a non-standard HTTP endpoint + daprEncryptionClientBuilder.UseHttpEndpoint("http://dapr.my-company.com"); +}); + +var app = builder.Build(); +``` + +Finally, it's possible that the developer may need to retrieve information from another service in order to populate +these configuration values. That value may be provided from a `DaprClient` instance, a vendor-specific SDK or some +local service, but as long as it's also registered in DI, it can be injected into this configuration operation via the +last overload: + +```cs +var builder = WebApplication.CreateBuilder(args); + +//Register a fictional service that retrieves secrets from somewhere +builder.Services.AddSingleton(); + +builder.Services.AddDaprEncryptionClient((serviceProvider, daprEncryptionClientBuilder) => { + //Retrieve an instance of the `SecretService` from the service provider + var secretService = serviceProvider.GetRequiredService(); + var daprApiToken = secretService.GetSecret("DaprApiToken").Value; + + //Configure the `DaprEncryptionClientBuilder` + daprEncryptionClientBuilder.UseDaprApiToken(daprApiToken); +}); + +var app = builder.Build(); +``` \ No newline at end of file diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-distributed-lock/_index.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-distributed-lock/_index.md new file mode 100644 index 00000000000..b8aad64e3c8 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-distributed-lock/_index.md @@ -0,0 +1,143 @@ +--- +type: docs +title: "Dapr Distributed Lock .NET SDK" +linkTitle: "Distributed Lock" +weight: 61000 +description: Get up and running with the Dapr Distributed .NET SDK +--- + +With the Dapr Distributed Lock package, you can create and remove locks on resources to manage exclusivity across +your distributed applications. + +While this capability is implemented in both the `Dapr.Client` and `Dapr.DistributedLock` packages, the approach differs +slightly between them and a future release will see the `Dapr.Client` package be deprecated. It's recommended that new +implementations use the `Dapr.DistributedLock` package. This document will reflect the implementation in the +`Dapr.DistributedLock` package. + +## Lifetime management +A `DaprDistributedLockClient` is a version of the Dapr client that is dedicated to interacting with Dapr's distributed +lock API. It can be registered alongside a `DaprClient` and other Dapr clients without issue. + +It maintains access to networking resources in the form of TCP sockets used to communicate with the Dapr sidecar runtime. + +For best performance, it is recommended that you utilize the dependency injection container mechanisms provided with the +`Dapr.DistributedLock` package to provide easy access to an injected instance throughout your application. These injected +instances are thread-safe and intended to be used across different types within your application. Registration via +dependency injection can utilize values from an `IConfiguration` or other injected services in a way that's impractical +when creating the client from scratch in each of your classes. + +If you do opt to manually create a `DaprDistributedLockClient` instance, it is recommended that you use the `DaprClientBuilder` +to create the client. This will ensure that the client is properly configured to communicate with the Dapr sidecar runtime.` + +Avoid creawting a `DaprDistributedLockClient` for each operation. + +## Configuring a `DaprDistributedLockClient` via `DaprDistributedLockBuilder` + +A `DaprDistributedLockClient` can be configured by invoking methods on the `DaprDistributedLockBuilder` class before calling +`.Build()` to create the client itself. The settings for each `DaprDistributedLockClient` are separate and cannot be changed +after calling `.Build()`. + +```csharp +var daprDistributedLockClient = new DaprDistributedLockBuilder() + .UseDaprApiToken("abc123") // Optionally specify the API token used to authenticate to other Dapr sidecars + .Build(); +``` + +The `DaprDistributedLockBuilder` contains settings for: + +- The HTTP endpoint of the Dapr sidecar +- The gRPC endpoint of the Dapr sidecar +- The `JsonSerializerOptions` object used to configure JSON serialization +- The `GrpcChannelOptions` object used to configure gRPC +- The API token used to authenticate requests to the sidecar +- The factory method used to create the `HttpClient` instance used by the SDK +- The timeout used for the `HttpClient` instance when making requests to the sidecar + +The SDK will read the following environment variables to configure the default values: + +- `DAPR_HTTP_ENDPOINT`: used to find the HTTP endpoint of the Dapr sidecar, example: `https://dapr-api.mycompany.com` +- `DAPR_GRPC_ENDPOINT`: used to find the gRPC endpoint of the Dapr sidecar, example: `https://dapr-grpc-api.mycompany.com` +- `DAPR_HTTP_PORT`: if `DAPR_HTTP_ENDPOINT` is not set, this is used to find the HTTP local endpoint of the Dapr sidecar +- `DAPR_GRPC_PORT`: if `DAPR_GRPC_ENDPOINT` is not set, this is used to find the gRPC local endpoint of the Dapr sidecar +- `DAPR_API_TOKEN`: used to set the API token + +### Configuring gRPC channel options + +Dapr's use of `CancellationToken` for cancellation relies on the configuration of the gRPC channel options. If you need +to configure these options yourself, make sure to enable the [ThrowOperationCanceledOnCancellation setting](https://grpc.github.io/grpc/csharp-dotnet/api/Grpc.Net.Client.GrpcChannelOptions.html#Grpc_Net_Client_GrpcChannelOptions_ThrowOperationCanceledOnCancellation). + +```cs +var daprDistributedLockClient = new DaprDistributedLockBuilder() + .UseGrpcChannelOptions(new GrpcChannelOptions { ... ThrowOperationCanceledOnCancellation = true }) + .Build(); +``` + +## Using cancellation with `DaprDistributedLockClient` + +The APIs on `DaprDistributedLockClient` perform asynchronous operations and accept an optional `CancellationToken` parameter. This +follows a standard .NET practice for cancellable operations. Note that when cancellation occurs, there is no guarantee that +the remote endpoint stops processing the request, only that the client has stopped waiting for completion. + +When an operation is cancelled, it will throw an `OperationCancelledException`. + +## Configuring `DaprDistributedLockClient` via dependency injection + +Using the built-in extension methods for registering the `DaprDistributedLockClient` in a dependency injection container can +provide the benefit of registering the long-lived service a single time, centralize complex configuration and improve +performance by ensuring similarly long-lived resources are re-purposed when possible (e.g. `HttpClient` instances). + +There are three overloads available to give the developer the greatest flexibility in configuring the client for their +scenario. Each of these will register the `IHttpClientFactory` on your behalf if not already registered, and configure +the `DaprDistributedLockBuilder` to use it when creating the `HttpClient` instance in order to re-use the same instance as +much as possible and avoid socket exhaustion and other issues. + +In the first approach, there's no configuration done by the developer and the `DaprDistributedLockClient` is configured with the +default settings. + +```cs +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddDaprDistributedLock(); //Registers the `DaprDistributedLockClient` to be injected as needed +var app = builder.Build(); +``` + +Sometimes the developer will need to configure the created client using the various configuration options detailed +above. This is done through an overload that passes in the `DaprDistributedLockBuilder` and exposes methods for configuring +the necessary options. + +```cs +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddDaprDistributedLock((_, daprDistributedLockBuilder) => { + //Set the API token + daprDistributedLockBuilder.UseDaprApiToken("abc123"); + //Specify a non-standard HTTP endpoint + daprDistributedLockBuilder.UseHttpEndpoint("http://dapr.my-company.com"); +}); + +var app = builder.Build(); +``` + +Finally, it's possible that the developer may need to retrieve information from another service in order to populate +these configuration values. That value may be provided from a `DaprClient` instance, a vendor-specific SDK or some +local service, but as long as it's also registered in DI, it can be injected into this configuration operation via the +last overload: + +```cs +var builder = WebApplication.CreateBuilder(args); + +//Register a fictional service that retrieves secrets from somewhere +builder.Services.AddSingleton(); + +builder.Services.AddDaprDistributedLock((serviceProvider, daprDistributedLockBuilder) => { + //Retrieve an instance of the `SecretService` from the service provider + var secretService = serviceProvider.GetRequiredService(); + var daprApiToken = secretService.GetSecret("DaprApiToken").Value; + + //Configure the `DaprDistributedLockBuilder` + daprDistributedLockBuilder.UseDaprApiToken(daprApiToken); +}); + +var app = builder.Build(); +``` + diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-distributed-lock/dotnet-distributedlock-howto.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-distributed-lock/dotnet-distributedlock-howto.md new file mode 100644 index 00000000000..89fbe0afd72 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-distributed-lock/dotnet-distributedlock-howto.md @@ -0,0 +1,80 @@ +--- +type: docs +title: "How to: Create and use Dapr Distributed Lock in the .NET SDK" +linkTitle: "How to: Use the Distributed Lock client" +weight: 61050 +description: Learn how to create and use the Dapr Distributed Lock client using the .NET SDK +--- + +## Prerequisites +- [.NET 8](https://dotnet.microsoft.com/download/dotnet/8.0), or [.NET 9](https://dotnet.microsoft.com/download/dotnet/9.0) installed +- [Dapr CLI](https://docs.dapr.io/getting-started/install-dapr-cli/) +- [Initialized Dapr environment](https://docs.dapr.io/getting-started/install-dapr-selfhost) + +## Installation + +To get started with the Dapr Distributed lock .NET SDK client, install the [Dapr.Distributed Lock package](https://www.nuget.org/packages/Dapr.DistributedLock) from NuGet: +```sh +dotnet add package Dapr.DistributedLock +``` + +A `DaprDistributedLockClient` maintains access to networking resources in the form of TCP sockets used to communicate with the Dapr sidecar. + +### Dependency Injection + +The `AddDaprDistributedLock()` method will register the Dapr client ASP.NET Core dependency injection and is the recommended approach +for using this package. This method accepts an optional options delegate for configuring the `DaprDistributedLockClient` and a +`ServiceLifetime` argument, allowing you to specify a different lifetime for the registered services instead of the default `Singleton` +value. + +The following example assumes all default values are acceptable and is sufficient to register the `DaprDistributedLockClient`: + +```csharp +services.AddDaprDistributedLock(); +``` + +The optional configuration delegate is used to configure the `DaprDistributedLockClient` by specifying options on the +`DaprDistributedLockBuilder` as in the following example: +```csharp +services.AddSingleton(); +services.AddDaprDistributedLock((serviceProvider, clientBuilder) => { + //Inject a service to source a value from + var optionsProvider = serviceProvider.GetRequiredService(); + var standardTimeout = optionsProvider.GetStandardTimeout(); + + //Configure the value on the client builder + clientBuilder.UseTimeout(standardTimeout); +}); +``` + +### Manual Instantiation +Rather than using dependency injection, a `DaprDistributedLockClient` can also be built using the static client builder. + +For best performance, create a single long-lived instance of `DaprDistributedLockClient` and provide access to that shared instance throughout +your application. `DaprDistributedLockClient` instances are thread-safe and intended to be shared. + +Avoid creating a `DaprDistributedLockClient` per-operation. + +A `DaprDistributedLockClient` can be configured by invoking methods on the `DaprDistributedLockBuilder` class before calling `.Build()` +to create the client. The settings for each `DaprDistributedLockClient` are separate and cannot be changed after calling `.Build()`. + +```csharp +var daprDistributedLockClient = new DaprDistributedLockBuilder() + .UseJsonSerializerSettings( ... ) //Configure JSON serializer + .Build(); +``` + +See the .NET [documentation here]({{% ref dotnet-distributed-lock %}}) for more information about the options available +when configuring the Dapr Distributed Lock client via the builder. + +## Try it out +Put the Dapr Distributed Lock .NET SDK to the test. Walk through the samples to see Dapr in action: + +| SDK Samples | Description | +| ----------- | ----------- | +| [SDK samples](https://github.com/dapr/dotnet-sdk/tree/master/examples) | Clone the SDK repo to try out some examples and get started. | + +## Building Blocks + +This part of the .NET SDK allows you to interface with the Distributed Lock API to place and remove locks for managing +resource exclusivity across your distributed applications. diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-guidance/_index.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-guidance/_index.md new file mode 100644 index 00000000000..2df0581fcf4 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-guidance/_index.md @@ -0,0 +1,53 @@ +--- +type: docs +title: "Best Practices for the Dapr .NET SDK" +linkTitle: "Best Practices" +weight: 85000 +description: Using Dapr .NET SDK effectively +--- + +## Building with confidence + +The Dapr .NET SDK offers a rich set of capabilities for building distributed applications. This section provides +practical guidance for using the SDK effectively in production scenarios—focusing on reliability, maintainability, and +developer experience. + +Topics covered include: + +- Error handling strategies across Dapr building blocks +- Managing experimental features and suppressing related warnings +- Leveraging source analyzers and generators to reduce boilerplate and catch issues early +- General .NET development practices in Dapr-based applications + +## Error model guidance + +Dapr operations can fail for many reasons—network issues, misconfigured components, or transient faults. The SDK +provides structured error types to help you distinguish between retryable and fatal errors. + +Learn how to use `DaprException` and its derived types effectively [here]({{% ref dotnet-guidance-error-model.md %}}). + +## Experimental attributes + +Some SDK features are marked as experimental and may change in future releases. These are annotated with +`[Experimental]` and generate build-time warnings by default. You can: + +- Suppress warnings selectively using `#pragma warning disable` +- Use `SuppressMessage` attributes for finer control +- Track experimental usage across your codebase + +Learn more about our use of the `[Experimenta]` attribute [here]({{% ref dotnet-guidance-experimental-attributes.md %}}). + +## Source tooling + +The SDK includes Roslyn-based analyzers and source generators to help you write better code with less effort. These tools: + +- Warn about common misuses of the SDK +- Generate boilerplate for actor registration and invocation +- Support IDE integration for faster feedback + +Read more about how to install and use these analyzers [here]({{% ref dotnet-guidance-source-generators.md %}}). + +## Additional guidance + +This section is designed to support a wide range of development scenarios. As your applications grow in complexity, you'll find increasingly relevant practices and patterns for working with Dapr in .NET—from actor lifecycle management to configuration strategies and performance tuning. + diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-guidance/dotnet-guidance-error-model.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-guidance/dotnet-guidance-error-model.md new file mode 100644 index 00000000000..53f84b5b018 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-guidance/dotnet-guidance-error-model.md @@ -0,0 +1,141 @@ +--- +type: docs +title: "Error Model in the Dapr .NET SDK" +linkTitle: "Error Model" +weight: 85100 +description: Learn how to use the richer error model in the .NET SDK. +--- + +The Dapr .NET SDK supports the richer error model, implemented by the Dapr runtime. This model provides a way for applications to enrich their errors with added context, +allowing consumers of the application to better understand the issue and resolve it faster. You can read more about the richer error model [here](https://google.aip.dev/193), and you +can find the Dapr proto file implementing these errors [here](https://github.com/googleapis/googleapis/blob/master/google/rpc/error_details.proto"). + +The Dapr .NET SDK implements all details supported by the Dapr runtime, implemented in the `Dapr.Common.Exceptions` namespace, and is accessible through +the `DaprException` extension method `TryGetExtendedErrorInfo`. Currently, this detail extraction is only supported for +`RpcException`s where the details are present. + +```csharp +// Example usage of ExtendedErrorInfo + +try +{ + // Perform some action with the Dapr client that throws a DaprException. +} +catch (DaprException daprEx) +{ + if (daprEx.TryGetExtendedErrorInfo(out DaprExtendedErrorInfo errorInfo) + { + Console.WriteLine(errorInfo.Code); + Console.WriteLine(errorInfo.Message); + + foreach (DaprExtendedErrorDetail detail in errorInfo.Details) + { + Console.WriteLine(detail.ErrorType); + switch (detail.ErrorType) + case ExtendedErrorType.ErrorInfo: + Console.WriteLine(detail.Reason); + Console.WriteLine(detail.Domain); + default: + Console.WriteLine(detail.TypeUrl); + } + } +} +``` + +## DaprExtendedErrorInfo + +Contains `Code` (the status code) and `Message` (the error message) associated with the error, parsed from an inner `RpcException`. +Also contains a collection of `DaprExtendedErrorDetails` parsed from the details in the exception. + +## DaprExtendedErrorDetail + +All details implement the abstract `DaprExtendedErrorDetail` and have an associated `DaprExtendedErrorType`. + +1. [RetryInfo](#retryinfo) + +2. [DebugInfo](#debuginfo) + +3. [QuotaFailure](#quotafailure) + +4. [PreconditionFailure](#preconditionfailure) + +5. [RequestInfo](#requestinfo) + +6. [LocalizedMessage](#localizedmessage) + +7. [BadRequest](#badrequest) + +8. [ErrorInfo](#errorinfo) + +9. [Help](#help) + +10. [ResourceInfo](#resourceinfo) + +11. [Unknown](#unknown) + +## RetryInfo + +Information notifying the client how long to wait before they should retry. Provides a `DaprRetryDelay` with the properties +`Second` (offset in seconds) and `Nano` (offset in nanoseconds). + +## DebugInfo + +Debugging information offered by the server. Contains `StackEntries` (a collection of strings containing the stack trace), and +`Detail` (further debugging information). + +## QuotaFailure + +Information relating to some quota that may have been reached, such as a daily usage limit on an API. It has one property `Violations`, +a collection of `DaprQuotaFailureViolation`, which each contain a `Subject` (the subject of the request) and `Description` (further information regarding the failure). + +## PreconditionFailure + +Information informing the client that some required precondition was not met. Has one property `Violations`, a collection of +`DaprPreconditionFailureViolation`, which each has `Subject` (subject where the precondition failure occured, e.g. "Azure"), +`Type` (representation of the precondition type, e.g. "TermsOfService"), and `Description` (further description e.g. "ToS must be accepted."). + +## RequestInfo + +Information returned by the server that can be used by the server to identify the client's request. Contains +`RequestId` and `ServingData` properties, `RequestId` being some string (such as a UID) the server can interpret, +and `ServingData` being some arbitrary data that made up part of the request. + +## LocalizedMessage + +Contains a localized message, along with the locale of the message. Contains `Locale` (the locale e.g. "en-US") and `Message` (the localized message). + +## BadRequest + +Describes a bad request field. Contains collection of `DaprBadRequestDetailFieldViolation`, which each has `Field` (the offending field in request, e.g. 'first_name') and +`Description` (further information detailing the reason, e.g. "first_name cannot contain special characters"). + +## ErrorInfo + +Details the cause of an error. Contains three properties, `Reason` (the reason for the error, which should take the form of UPPER_SNAKE_CASE, e.g. DAPR_INVALID_KEY), +`Domain` (domain the error belongs to, e.g. 'dapr.io'), and `Metadata`, a key/value-based collection with further information. + +## Help + +Provides resources for the client to perform further research into the issue. Contains a collection of `DaprHelpDetailLink`, +which provides `Url` (a url to help or documentation), and `Description` (a description of what the link provides). + +## ResourceInfo + +Provides information relating to an accessed resource. Provides three properties `ResourceType` (type of the resource being access e.g. "Azure service bus"), +`ResourceName` (the name of the resource e.g. "my-configured-service-bus"), `Owner` (the owner of the resource e.g. "subscriptionowner@dapr.io"), +and `Description` (further information on the resource relating to the error, e.g. "missing permissions to use this resource"). + +## Unknown + +Returned when the detail type url cannot be mapped to the correct `DaprExtendedErrorDetail` implementation. +Provides one property `TypeUrl` (the type url that could not be parsed, e.g. "type.googleapis.com/Google.rpc.UnrecognizedType"). + + + + + + + + + + diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-guidance/dotnet-guidance-experimental-attributes.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-guidance/dotnet-guidance-experimental-attributes.md new file mode 100644 index 00000000000..c0a279fd032 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-guidance/dotnet-guidance-experimental-attributes.md @@ -0,0 +1,138 @@ +--- +type: docs +title: "Experimental Attributes" +linkTitle: "Experimental Attributes" +weight: 85200 +description: Learn about why we mark some methods with the `[Experimental]` attribute +--- + +## Experimental Attributes + +### Introduction to Experimental Attributes + +With the release of .NET 8, C# 12 introduced the `[Experimental]` attribute, which provides a standardized way to mark +APIs that are still in development or experimental. This attribute is defined in the `System.Diagnostics.CodeAnalysis` +namespace and requires a diagnostic ID parameter used to generate compiler warnings when the experimental API +is used. + +In the Dapr .NET SDK, we now use the `[Experimental]` attribute instead of `[Obsolete]` to mark building blocks and +components that have not yet passed the stable lifecycle certification. This approach provides a clearer distinction +between: + +1. **Experimental APIs** - Features that are available but still evolving and have not yet been certified as stable +according to the [Dapr Component Certification Lifecycle](https://docs.dapr.io/operations/components/certification-lifecycle/). + +2. **Obsolete APIs** - Features that are truly deprecated and will be removed in a future release. + +### Usage in the Dapr .NET SDK + +In the Dapr .NET SDK, we apply the `[Experimental]` attribute at the class level for building blocks that are still in +the Alpha or Beta stages of the [Component Certification Lifecycle](https://docs.dapr.io/operations/components/certification-lifecycle/). +The attribute includes: + +- A diagnostic ID that identifies the experimental building block +- A URL that points to the relevant documentation for that block + +For example: + +```csharp +using System.Diagnostics.CodeAnalysis; +namespace Dapr.Cryptography.Encryption +{ + [Experimental("DAPR_CRYPTOGRAPHY", UrlFormat = "https://docs.dapr.io/developing-applications/building-blocks/cryptography/cryptography-overview/")] + public class DaprEncryptionClient + { + // Implementation + } +} +``` + +The diagnostic IDs follow a naming convention of `DAPR_[BUILDING_BLOCK_NAME]`, such as: + +- `DAPR_CONVERSATION` - For the Conversation building block +- `DAPR_CRYPTOGRAPHY` - For the Cryptography building block +- `DAPR_JOBS` - For the Jobs building block +- `DAPR_DISTRIBUTEDLOCK` - For the Distributed Lock building block + +### Suppressing Experimental Warnings + +When you use APIs marked with the `[Experimental]` attribute, the compiler will generate errors. +To build your solution without marking your own code as experimental, you will need to suppress these errors. Here are +several approaches to do this: + +#### Option 1: Using #pragma directive + +You can use the `#pragma warning` directive to suppress the warning for specific sections of code: + +```csharp +// Disable experimental warning +#pragma warning disable DAPR_CRYPTOGRAPHY +// Your code using the experimental API +var client = new DaprEncryptionClient(); +// Re-enable the warning +#pragma warning restore DAPR_CRYPTOGRAPHY +``` + +This approach is useful when you want to suppress warnings only for specific sections of your code. + +#### Option 2: Project-level suppression + +To suppress warnings for an entire project, add the following to your `.csproj` file. +file. + +```xml + + $(NoWarn);DAPR_CRYPTOGRAPHY + +``` + +You can include multiple diagnostic IDs separated by semicolons: + +```xml + + $(NoWarn);DAPR_CONVERSATION;DAPR_JOBS;DAPR_DISTRIBUTEDLOCK;DAPR_CRYPTOGRAPHY + +``` + +This approach is particularly useful for test projects that need to use experimental APIs. + +#### Option 3: Directory-level suppression + +For suppressing warnings across multiple projects in a directory, add a `Directory.Build.props` file: + +```xml + + $(NoWarn);DAPR_CONVERSATION;DAPR_JOBS;DAPR_DISTRIBUTEDLOCK;DAPR_CRYPTOGRAPHY + +``` + +This file should be placed in the root directory of your test projects. You can learn more about using +`Directory.Build.props` files in the +[MSBuild documentation](https://learn.microsoft.com/visualstudio/msbuild/customize-by-directory). + +### Lifecycle of Experimental APIs + +As building blocks move through the certification lifecycle and reach the "Stable" stage, the `[Experimental]` attribute will be removed. No migration or code changes will be required from users when this happens, except for the removal of any warning suppressions if they were added. + +Conversely, the `[Obsolete]` attribute will now be reserved exclusively for APIs that are truly deprecated and scheduled for removal. When you see a method or class marked with `[Obsolete]`, you should plan to migrate away from it according to the migration guidance provided in the attribute message. + +### Best Practices + +1. **In application code:** + - Be cautious when using experimental APIs, as they may change in future releases + - Consider isolating usage of experimental APIs to make future updates easier + - Document your use of experimental APIs for team awareness + +2. **In test code:** + - Use project-level suppression to avoid cluttering test code with warning suppressions + - Regularly review which experimental APIs you're using and check if they've been stabilized + +3. **When contributing to the SDK:** + - Use `[Experimental]` for new building blocks that haven't completed certification + - Use `[Obsolete]` only for truly deprecated APIs + - Provide clear documentation links in the `UrlFormat` parameter + +### Additional Resources + +- [Dapr Component Certification Lifecycle](https://docs.dapr.io/operations/components/certification-lifecycle/) +- [C# Experimental Attribute Documentation](https://learn.microsoft.com/dotnet/csharp/language-reference/proposals/csharp-12.0/experimental-attribute) diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-guidance/dotnet-guidance-source-generators.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-guidance/dotnet-guidance-source-generators.md new file mode 100644 index 00000000000..32d9d9be00a --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-guidance/dotnet-guidance-source-generators.md @@ -0,0 +1,82 @@ +--- +type: docs +title: "Dapr source code analyzers and generators" +linkTitle: "Roslyn Analyzers/Generators" +weight: 85300 +description: Code analyzers and fixes for common Dapr issues +no_list: true +--- + +Dapr supports a growing collection of optional Roslyn analyzers and code fix providers that inspect your code for +code quality issues. Starting with the release of v1.16, developers have the opportunity to install additional projects +from NuGet alongside each of the standard capability packages to enable these analyzers in their solutions. + +{{% alert title="Note" color="primary" %}} + +A future release of the Dapr .NET SDK will include these analyzers by default without requiring a separate package +install. + +{{% /alert %}} + +Rule violations will typically be marked as `Info` or `Warning` so that if the analyzer identifies an issue, it won't +necessarily break builds. All code analysis violations appear with the prefix "DAPR" and are uniquely distinguished +by a number following this prefix. + +{{% alert title="Note" color="primary" %}} + +At this time, the first two digits of the diagnostic identifier map one-to-one to distinct Dapr packages, but this +is subject to change in the future as more analyzers are developed. + +{{% /alert %}} + +## Install and configure analyzers +The following packages will be available via NuGet following the v1.16 Dapr release: +- Dapr.Actors.Analyzers +- Dapr.Jobs.Analyzers +- Dapr.Workflow.Analyzers + +Install each NuGet package on every project where you want the analyzers to run. The package will be installed as a +project dependency and analyzers will run as you write your code or as part of a CI/CD build. The analyzers will flag +issues in your existing code and warn you about new issues as you build your project. + +Many of our analyzers have associated code fixes that can be applied to automatically correct the problem. If your IDE +supports this capability, any available code fixes will show up as an inline menu option in your code. + +Further, most of our analyzers should also report a specific line and column number in your code of the syntax that's +been identified as a key aspect of the rule. If your IDE supports it, double clicking any of the analyzer warnings +should jump directly to the part of your code responsible for the violating the analyzer's rule. + +### Suppress specific analyzers +If you wish to keep an analyzer from firing against some particular piece of your project, their outputs can be +individually targeted for suppression through a number of ways. Read more about suppressing analyzers in projects +or files in the associated [.NET documentation](https://learn.microsoft.com/dotnet/fundamentals/code-analysis/suppress-warnings#use-the-suppressmessageattribute). + +### Disable all analyzers +If you wish to disable all analyzers in your project without removing any packages providing them, set +the `EnableNETAnalyzers` property to `false` in your csproj file. + +## Available Analyzers + +| Diagnostic ID | Dapr Package | Category | Severity | Version Added | Description | Code Fix Available | +| -- | -- |------------------|--------------|-----------------------------------------------------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------| -- | +| DAPR1301 | Dapr.Workflow | Usage | Warning | 1.16 | The workflow type is not registered with the dependency injection provider | Yes | +| DAPR1302 | Dapr.Workflow | Usage | Warning | 1.16 | The workflow activity type is not registered with the dependency injection provider | Yes | +| DAPR1401 | Dapr.Actors | Usage | Warning | 1.16 | Actor timer method invocations require the named callback method to exist on type | No | +| DAPR1402 | Dapr.Actors | Usage | Warning | 1.16 | The actor type is not registered with dependency injection | Yes | +| DAPR1403 | Dapr.Actors | Interoperability | Info | 1.16 | Set options.UseJsonSerialization to true to support interoperability with non-.NET actors | Yes | +| DAPR1404 | Dapr.Actors | Usage | Warning | 1.16 | Call app.MapActorsHandlers to map endpoints for Dapr actors | Yes | +| DAPR1501 | Dapr.Jobs | Usage | Warning | 1.16 | Job invocations require the MapDaprScheduledJobHandler to be set and configured for each anticipated job on IEndpointRouteBuilder | No | + +## Analyzer Categories +The following are each of the eligible categories that an analyzer can be assigned to and are modeled after the +standard categories used by the .NET analyzers: +- Design +- Documentation +- Globalization +- Interoperability +- Maintainability +- Naming +- Performance +- Reliability +- Security +- Usage \ No newline at end of file diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-integrations/_index.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-integrations/_index.md new file mode 100644 index 00000000000..ff03c411532 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-integrations/_index.md @@ -0,0 +1,42 @@ +--- +type: docs +title: "Developing applications with the Dapr .NET SDK" +linkTitle: "Deployment Integrations" +weight: 90000 +description: Deployment integrations with the Dapr .NET SDK +--- + +## Thinking more than one at a time + +Using your favorite IDE or editor to launch an application typically assumes that you only need to run one thing: +the application you're debugging. However, developing microservices challenges you to think about your local +development process for *more than one at a time*. A microservices application has multiple services that you might +need running simultaneously, and dependencies (like state stores) to manage. + +Adding Dapr to your development process means you need to manage the following concerns: + +- Each service you want to run +- A Dapr sidecar for each service +- Dapr component and configuration manifests +- Additional dependencies such as state stores +- optional: the Dapr placement service for actors + +This document assumes that you're building a production application and want to create a repeatable and robust set of +development practices. The guidance here is generalized, and applies to any .NET server application using +Dapr (including actors). + +## Managing components + +You have two primary methods of storing component definitions for local development with Dapr: + +- Use the default location (`~/.dapr/components`) +- Use your own location + +Creating a folder within your source code repository to store components and configuration will give you a way to +version and share these definitions. The guidance provided here will assume you created a folder next to the +application source code to store these files. + +## Development options + +Choose one of these links to learn about tools you can use in local development scenarios. It's suggested that +you familiarize yourself with each of them to get a sense of the options provided by the .NET SDK. diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-integrations/dotnet-development-dapr-aspire.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-integrations/dotnet-development-dapr-aspire.md new file mode 100644 index 00000000000..d33f5441b88 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-integrations/dotnet-development-dapr-aspire.md @@ -0,0 +1,155 @@ +--- +type: docs +title: "Dapr .NET SDK Development with .NET Aspire" +linkTitle: ".NET Aspire" +weight: 90300 +description: Learn about local development with .NET Aspire +--- + +# .NET Aspire + +[.NET Aspire](https://learn.microsoft.com/dotnet/aspire/get-started/aspire-overview) is a development tool +designed to make it easier to include external software into .NET applications by providing a framework that allows +third-party services to be readily integrated, observed and provisioned alongside your own software. + +Aspire simplifies local development by providing rich integration with popular IDEs including +[Microsoft Visual Studio](https://visualstudio.microsoft.com/vs/), +[Visual Studio Code](https://code.visualstudio.com/), +[JetBrains Rider](https://blog.jetbrains.com/dotnet/2024/02/19/jetbrains-rider-and-the-net-aspire-plugin/) and others +to launch your application with the debugger while automatically launching and provisioning access to other +integrations as well, including Dapr. + +While Aspire also assists with deployment of your application to various cloud hosts like Microsoft Azure and +Amazon AWS, deployment is currently outside the scope of this guide. More information can be found in Aspire's +documentation [here](https://learn.microsoft.com/dotnet/aspire/deployment/overview). + +An end-to-end demonstration featuring the following and demonstrating service invocation between multiple Dapr-enabled +services can be found [here](https://github.com/dapr/dotnet-sdk/tree/master/examples/Hosting/Aspire/ServiceInvocationDemo). + +## Prerequisites +- Both the Dapr .NET SDK and .NET Aspire are compatible with [.NET 8](https://dotnet.microsoft.com/download/dotnet/8.0) +or [.NET 9](https://dotnet.microsoft.com/download/dotnet/9.0) +- An OCI compliant container runtime such as [Docker Desktop](https://www.docker.com/products/docker-desktop) or +[Podman](https://podman.io/) +- Install and initialize Dapr v1.16 or later + +## Using .NET Aspire via CLI + +We'll start by creating a brand new .NET application. Open your preferred CLI and navigate to the directory you wish +to create your new .NET solution within. Start by using the following command to install a template that will create +an empty Aspire application: + +```sh +dotnet new install Aspire.ProjectTemplates +``` + +Once that's installed, proceed to create an empty .NET Aspire application in your current directory. The `-n` argument +allows you to specify the name of the output solution. If it's excluded, the .NET CLI will instead use the name +of the output directory, e.g. `C:\source\aspiredemo` will result in the solution being named `aspiredemo`. The rest +of this tutorial will assume a solution named `aspiredemo`. + +```sh +dotnet new aspire -n aspiredemo +``` + +This will create two Aspire-specific directories and one file in your directory: +- `aspiredemo.AppHost/` contains the Aspire orchestration project that is used to configure each of the integrations +used in your application(s). +- `aspiredemo.ServiceDefaults/` contains a collection of extensions meant to be shared across your solution to aid in +resilience, service discovery and telemetry capabilities offered by Aspire (these are distinct from the capabilities +offered in Dapr itself). +- `aspiredemo.sln` is the file that maintains the layout of your current solution + +We'll next create twp projects that'll serve as our Dapr application and demonstrate Dapr functionality. From the same +directory, use the following to create an empty ASP.NET Core project called `FrontEndApp` and another called +'BackEndApp'. Either one will be created relative to your current directory in +`FrontEndApp\FrontEndApp.csproj` and `BackEndApp\BackEndApp.csproj`, respectively. + +```sh +dotnet new web --name FrontEndApp +``` + +Next we'll configure the AppHost project to add the necessary package to support local Dapr development. Navigate +into the AppHost directory with the following and install the `CommunityToolkit.Aspire.Hosting.Dapr` package from NuGet into the project. + +We'll also add a reference to our `FrontEndApp` project so we can reference it during the registration process. + +{{% alert color="primary" %}} + +This package was previously called `Aspire.Hosting.Dapr`, which has been [marked as deprecated](https://www.nuget.org/packages/Aspire.Hosting.Dapr). + +{{% /alert %}} + +```sh +cd aspiredemo.AppHost +dotnet add package CommunityToolkit.Aspire.Hosting.Dapr +dotnet add reference ../FrontEndApp/ +dotnet add reference ../BackEndApp/ +``` + +Next, we need to configure Dapr as a resource to be loaded alongside your project. Open the `Program.cs` file in that +project within your preferred IDE. It should look similar to the following: + +```csharp +var builder = DistributedApplication.CreateBuilder(args); + +builder.Build().Run(); +``` + +If you're familiar with the dependency injection approach used in ASP.NET Core projects or others utilizing the +`Microsoft.Extensions.DependencyInjection` functionality, you'll find that this will be a familiar experience. + +Because we've already added a project reference to `MyApp`, we need to start by adding a reference in this configuration +as well. Add the following before the `builder.Build().Run()` line: + +```csharp +var backEndApp = builder + .AddProject("be") + .WithDaprSidecar(); + +var frontEndApp = builder + .AddProject("fe") + .WithDaprSidecar(); +``` + +Because the project reference has been added to this solution, your project shows up as a type within the `Projects.` +namespace for our purposes here. The name of the variable you assign the project to doesn't much matter in this tutorial +but would be used if you wanted to create a reference between this project and another using Aspire's service discovery +functionality. + +Adding `.WithDaprSidecar()` configures Dapr as a .NET Aspire resource so that when the project runs, the sidecar will be +deployed alongside your application. This accepts a number of different options and could optionally be configured as in +the following example: + +```csharp +DaprSidecarOptions sidecarOptions = new() +{ + AppId = "how-dapr-identifies-your-app", + AppPort = 8080, //Note that this argument is required if you intend to configure pubsub, actors or workflows as of Aspire v9.0 + DaprGrpcPort = 50001, + DaprHttpPort = 3500, + MetricsPort = 9090 +}; + +builder + .AddProject("be") + .WithReference(myApp) + .WithDaprSidecar(sidecarOptions); +``` + +{{% alert color="primary" %}} + +As indicated in the example above, as of .NET Aspire 9.0, if you intend to use any functionality in which Dapr needs to +call into your application such as pubsub, actors or workflows, you will need to specify your AppPort as +a configured option as Aspire will not automatically pass it to Dapr at runtime. It's expected that this behavior will +change in a future release as a fix has been merged and can be tracked [here](https://github.com/dotnet/aspire/pull/6362). + +{{% /alert %}} + +Finally, let's add an endpoint to the back-end app that we can invoke using Dapr's service invocation to display to a +page to demonstrate that Dapr is working as expected. + +When you open the solution in your IDE, ensure that the `aspiredemo.AppHost` is configured as your startup project, but +when you launch it in a debug configuration, you'll note that your integrated console should reflect your expected Dapr +logs and it will be available to your application. + diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-integrations/dotnet-development-dapr-cli.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-integrations/dotnet-development-dapr-cli.md new file mode 100644 index 00000000000..2c7e95f9617 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-integrations/dotnet-development-dapr-cli.md @@ -0,0 +1,53 @@ +--- +type: docs +title: "Dapr .NET SDK Development with Dapr CLI" +linkTitle: "Dapr CLI" +weight: 90100 +description: Learn about local development with the Dapr CLI +--- + +## Dapr CLI + +*Consider this to be a .NET companion to the [Dapr Self-Hosted with Docker Guide]({{% ref self-hosted-with-docker.md %}})*. + +The Dapr CLI provides you with a good base to work from by initializing a local redis container, zipkin container, the placement service, and component manifests for redis. This will enable you to work with the following building blocks on a fresh install with no additional setup: + +- [Service invocation]({{% ref service-invocation %}}) +- [State Store]({{% ref state-management %}}) +- [Pub/Sub]({{% ref pubsub %}}) +- [Actors]({{% ref actors %}}) + +You can run .NET services with `dapr run` as your strategy for developing locally. Plan on running one of these commands per-service in order to launch your application. + +- **Pro:** this is easy to set up since its part of the default Dapr installation +- **Con:** this uses long-running docker containers on your machine, which might not be desirable +- **Con:** the scalability of this approach is poor since it requires running a separate command per-service + +### Using the Dapr CLI + +For each service you need to choose: + +- A unique app-id for addressing (`app-id`) +- A unique listening port for HTTP (`port`) + +You also should have decided on where you are storing components (`components-path`). + +The following command can be run from multiple terminals to launch each service, with the respective values substituted. + +```sh +dapr run --app-id --app-port --components-path -- dotnet run -p --urls http://localhost: +``` + +**Explanation:** this command will use `dapr run` to launch each service and its sidecar. The first half of the command (before `--`) passes required configuration to the Dapr CLI. The second half of the command (after `--`) passes required configuration to the `dotnet run` command. + +{{% alert title="💡 Ports" color="primary" %}} +Since you need to configure a unique port for each service, you can use this command to pass that port value to **both** Dapr and the service. `--urls http://localhost:` will configure ASP.NET Core to listen for traffic on the provided port. Using configuration at the commandline is a more flexible approach than hardcoding a listening port elsewhere. +{{% /alert %}} + +If any of your services do not accept HTTP traffic, then modify the command above by removing the `--app-port` and `--urls` arguments. + +### Next steps + +If you need to debug, then use the attach feature of your debugger to attach to one of the running processes. + +If you want to scale up this approach, then consider building a script which automates this process for your whole application. diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-integrations/dotnet-development-docker-compose.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-integrations/dotnet-development-docker-compose.md new file mode 100644 index 00000000000..99cdecf52f7 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-integrations/dotnet-development-docker-compose.md @@ -0,0 +1,44 @@ +--- +type: docs +title: "Dapr .NET SDK Development with Docker-Compose" +linkTitle: "Docker Compose" +weight: 90200 +description: Learn about local development with Docker-Compose +--- + +## Docker-Compose + +*Consider this to be a .NET companion to the [Dapr Self-Hosted with Docker Guide]({{% ref self-hosted-with-docker.md %}})*. + +`docker-compose` is a CLI tool included with Docker Desktop that you can use to run multiple containers at a time. It is a way to automate the lifecycle of multiple containers together, and offers a development experience similar to a production environment for applications targeting Kubernetes. + +- **Pro:** Since `docker-compose` manages containers for you, you can make dependencies part of the application definition and stop the long-running containers on your machine. +- **Con:** most investment required, services need to be containerized to get started. +- **Con:** can be difficult to debug and troubleshoot if you are unfamilar with Docker. + +### Using docker-compose + +From the .NET perspective, there is no specialized guidance needed for `docker-compose` with Dapr. `docker-compose` runs containers, and once your service is in a container, configuring it similar to any other programming technology. + +{{% alert title="💡 App Port" color="primary" %}} +In a container, an ASP.NET Core app will listen on port 80 by default. Remember this for when you need to configure the `--app-port` later. +{{% /alert %}} + +To summarize the approach: + +- Create a `Dockerfile` for each service +- Create a `docker-compose.yaml` and place check it in to the source code repository + +To understand the authoring the `docker-compose.yaml` you should start with the [Hello, docker-compose sample](https://github.com/dapr/samples/tree/master/hello-docker-compose). + +Similar to running locally with `dapr run` for each service you need to choose a unique app-id. Choosing the container name as the app-id will make this simple to remember. + +The compose file will contain at a minimum: + +- A network that the containers use to communicate +- Each service's container +- A `-daprd` sidecar container with the service's port and app-id specified +- Additional dependencies that run in containers (redis for example) +- optional: Dapr placement container (for actors) + +You can also view a larger example from the [eShopOnContainers](https://github.com/dotnet-architecture/eShopOnDapr/blob/master/docker-compose.yml) sample application. diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-jobs/_index.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-jobs/_index.md new file mode 100644 index 00000000000..d4f4f52be4b --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-jobs/_index.md @@ -0,0 +1,13 @@ +--- +type: docs +title: "Dapr Jobs .NET SDK" +linkTitle: "Jobs" +weight: 50000 +description: Get up and running with Dapr Jobs and the Dapr .NET SDK +--- + +With the Dapr Job package, you can interact with the Dapr Job APIs from a .NET application to trigger future operations +to run according to a predefined schedule with an optional payload. + +To get started, walk through the [Dapr Jobs]({{% ref dotnet-jobs-howto.md %}}) how-to guide and refer to +[best practices documentation]({{% ref dotnet-jobsclient-usage.md %}}) for additional guidance. diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-jobs/dotnet-jobs-howto.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-jobs/dotnet-jobs-howto.md new file mode 100644 index 00000000000..8858b632631 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-jobs/dotnet-jobs-howto.md @@ -0,0 +1,419 @@ +--- +type: docs +title: "How to: Author and manage Dapr Jobs in the .NET SDK" +linkTitle: "How to: Author & manage jobs" +weight: 51000 +description: Learn how to author and manage Dapr Jobs using the .NET SDK +--- + +Let's create an endpoint that will be invoked by Dapr Jobs when it triggers, then schedule the job in the same app. We'll use the [simple example provided here](https://github.com/dapr/dotnet-sdk/tree/master/examples/Jobs), for the following demonstration and walk through it as an explainer of how you can schedule one-time or recurring jobs using either an interval or Cron expression yourself. In this guide, +you will: + +- Deploy a .NET Web API application ([JobsSample](https://github.com/dapr/dotnet-sdk/tree/master/examples/Jobs/JobsSample)) +- Utilize the Dapr .NET Jobs SDK to schedule a job invocation and set up the endpoint to be triggered + +In the .NET example project: +- The main [`Program.cs`](https://github.com/dapr/dotnet-sdk/tree/master/examples/Jobs/JobsSample/Program.cs) file comprises the entirety of this demonstration. + +## Prerequisites +- [Dapr CLI](https://docs.dapr.io/getting-started/install-dapr-cli/) +- [Initialized Dapr environment](https://docs.dapr.io/getting-started/install-dapr-selfhost) +- [.NET 8](https://dotnet.microsoft.com/download/dotnet/8.0) or [.NET 9](https://dotnet.microsoft.com/download/dotnet/9.0) installed +- [Dapr.Jobs](https://www.nuget.org/packages/Dapr.Jobs) NuGet package installed to your project + +## Set up the environment +Clone the [.NET SDK repo](https://github.com/dapr/dotnet-sdk). + +```sh +git clone https://github.com/dapr/dotnet-sdk.git +``` + +From the .NET SDK root directory, navigate to the Dapr Jobs example. + +```sh +cd examples/Jobs +``` + +## Run the application locally + +To run the Dapr application, you need to start the .NET program and a Dapr sidecar. Navigate to the `JobsSample` directory. + +```sh +cd JobsSample +``` + +We'll run a command that starts both the Dapr sidecar and the .NET program at the same time. + +```sh +dapr run --app-id jobsapp --dapr-grpc-port 4001 --dapr-http-port 3500 -- dotnet run +``` + +> Dapr listens for HTTP requests at `http://localhost:3500` and internal Jobs gRPC requests at `http://localhost:4001`. + +## Register the Dapr Jobs client with dependency injection +The Dapr Jobs SDK provides an extension method to simplify the registration of the Dapr Jobs client. Before completing +the dependency injection registration in `Program.cs`, add the following line: + +```cs +var builder = WebApplication.CreateBuilder(args); + +//Add anywhere between these two lines +builder.Services.AddDaprJobsClient(); + +var app = builder.Build(); +``` + +> Note that in today's implementation of the Jobs API, the app that schedules the job will also be the app that receives the trigger notification. In other words, you cannot schedule a trigger to run in another application. As a result, while you don't explicitly need the Dapr Jobs client to be registered in your application to schedule a trigger invocation endpoint, your endpoint will never be invoked without the same app also scheduling the job somehow (whether via this Dapr Jobs .NET SDK or an HTTP call to the sidecar). + +It's possible that you may want to provide some configuration options to the Dapr Jobs client that +should be present with each call to the sidecar such as a Dapr API token, or you want to use a non-standard +HTTP or gRPC endpoint. This is possible through use of an overload of the registration method that allows configuration of a +`DaprJobsClientBuilder` instance: + +```cs +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddDaprJobsClient((_, daprJobsClientBuilder) => +{ + daprJobsClientBuilder.UseDaprApiToken("abc123"); + daprJobsClientBuilder.UseHttpEndpoint("http://localhost:8512"); //Non-standard sidecar HTTP endpoint +}); + +var app = builder.Build(); +``` + +Still, it's possible that whatever values you wish to inject need to be retrieved from some other source, itself registered as a dependency. There's one more overload you can use to inject an `IServiceProvider` into the configuration action method. In the following example, we register a fictional singleton that can retrieve secrets from somewhere and pass it into the configuration method for `AddDaprJobClient` so +we can retrieve our Dapr API token from somewhere else for registration here: + +```cs +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddSingleton(); +builder.Services.AddDaprJobsClient((serviceProvider, daprJobsClientBuilder) => +{ + var secretRetriever = serviceProvider.GetRequiredService(); + var daprApiToken = secretRetriever.GetSecret("DaprApiToken").Value; + daprJobsClientBuilder.UseDaprApiToken(daprApiToken); + + daprJobsClientBuilder.UseHttpEndpoint("http://localhost:8512"); +}); + +var app = builder.Build(); +``` + +## Use the Dapr Jobs client using IConfiguration +It's possible to configure the Dapr Jobs client using the values in your registered `IConfiguration` as well without +explicitly specifying each of the value overrides using the `DaprJobsClientBuilder` as demonstrated in the previous +section. Rather, by populating an `IConfiguration` made available through dependency injection the `AddDaprJobsClient()` +registration will automatically use these values over their respective defaults. + +Start by populating the values in your configuration. This can be done in several different ways as demonstrated below. + +### Configuration via `ConfigurationBuilder` +Application settings can be configured without using a configuration source and by instead populating the value in-memory +using a `ConfigurationBuilder` instance: + +```csharp +var builder = WebApplication.CreateBuilder(); + +//Create the configuration +var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary { + { "DAPR_HTTP_ENDPOINT", "http://localhost:54321" }, + { "DAPR_API_TOKEN", "abc123" } + }) + .Build(); + +builder.Configuration.AddConfiguration(configuration); +builder.Services.AddDaprJobsClient(); //This will automatically populate the HTTP endpoint and API token values from the IConfiguration +``` + +### Configuration via Environment Variables +Application settings can be accessed from environment variables available to your application. + +The following environment variables will be used to populate both the HTTP endpoint and API token used to register the +Dapr Jobs client. + +| Key | Value | +| --- | --- | +| DAPR_HTTP_ENDPOINT | http://localhost:54321 | +| DAPR_API_TOKEN | abc123 | + +```csharp +var builder = WebApplication.CreateBuilder(); + +builder.Configuration.AddEnvironmentVariables(); +builder.Services.AddDaprJobsClient(); +``` + +The Dapr Jobs client will be configured to use both the HTTP endpoint `http://localhost:54321` and populate all outbound +requests with the API token header `abc123`. + +### Configuration via prefixed Environment Variables + +However, in shared-host scenarios where there are multiple applications all running on the same machine without using +containers or in development environments, it's not uncommon to prefix environment variables. The following example +assumes that both the HTTP endpoint and the API token will be pulled from environment variables prefixed with the +value "myapp_". The two environment variables used in this scenario are as follows: + +| Key | Value | +| --- | --- | +| myapp_DAPR_HTTP_ENDPOINT | http://localhost:54321 | +| myapp_DAPR_API_TOKEN | abc123 | + +These environment variables will be loaded into the registered configuration in the following example and made available +without the prefix attached. + +```csharp +var builder = WebApplication.CreateBuilder(); + +builder.Configuration.AddEnvironmentVariables(prefix: "myapp_"); +builder.Services.AddDaprJobsClient(); +``` + +The Dapr Jobs client will be configured to use both the HTTP endpoint `http://localhost:54321` and populate all outbound +requests with the API token header `abc123`. + +## Use the Dapr Jobs client without relying on dependency injection +While the use of dependency injection simplifies the use of complex types in .NET and makes it easier to +deal with complicated configurations, you're not required to register the `DaprJobsClient` in this way. Rather, you can also elect to create an instance of it from a `DaprJobsClientBuilder` instance as demonstrated below: + +```cs + +public class MySampleClass +{ + public void DoSomething() + { + var daprJobsClientBuilder = new DaprJobsClientBuilder(); + var daprJobsClient = daprJobsClientBuilder.Build(); + + //Do something with the `daprJobsClient` + } +} +``` + +## Set up a endpoint to be invoked when the job is triggered + +It's easy to set up a jobs endpoint if you're at all familiar with [minimal APIs in ASP.NET Core](https://learn.microsoft.com/aspnet/core/fundamentals/minimal-apis/overview) as the syntax is the same between the two. + +Once dependency injection registration has been completed, configure the application the same way you would to handle mapping an HTTP request via the minimal API functionality in ASP.NET Core. Implemented as an extension method, +pass the name of the job it should be responsive to and a delegate. Services can be injected into the delegate's arguments as you wish and the job payload can be accessed from the `ReadOnlyMemory` originally provided to the +job registration. + +There are two delegates you can use here. One provides an `IServiceProvider` in case you need to inject other services into the handler: + +```cs +//We have this from the example above +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddDaprJobsClient(); + +var app = builder.Build(); + +//Add our endpoint registration +app.MapDaprScheduledJob("myJob", (IServiceProvider serviceProvider, string jobName, ReadOnlyMemory jobPayload) => { + var logger = serviceProvider.GetService(); + logger?.LogInformation("Received trigger invocation for '{jobName}'", "myJob"); + + //Do something... +}); + +app.Run(); +``` + +The other overload of the delegate doesn't require an `IServiceProvider` if not necessary: + +```cs +//We have this from the example above +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddDaprJobsClient(); + +var app = builder.Build(); + +//Add our endpoint registration +app.MapDaprScheduledJob("myJob", (string jobName, ReadOnlyMemory jobPayload) => { + //Do something... +}); + +app.Run(); +``` + +## Support cancellation tokens when processing mapped invocations +You may want to ensure that timeouts are handled on job invocations so that they don't indefinitely hang and use system resources. When setting up the job mapping, there's an optional `TimeSpan` parameter that can be +provided as the last argument to specify a timeout for the request. Every time the job mapping invocation is triggered, a new `CancellationTokenSource` will be created using this timeout parameter and a `CancellationToken` +will be created from it to put an upper bound on the processing of the request. If a timeout isn't provided, this defaults to `CancellationToken.None` and a timeout will not be automatically applied to the mapping. + +```cs +//We have this from the example above +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddDaprJobsClient(); + +var app = builder.Build(); + +//Add our endpoint registration +app.MapDaprScheduledJob("myJob", (string jobName, ReadOnlyMemory jobPayload) => { + //Do something... +}, TimeSpan.FromSeconds(15)); //Assigns a maximum timeout of 15 seconds for handling the invocation request + +app.Run(); +``` + +## Register the job + +Finally, we have to register the job we want scheduled. Note that from here, all SDK methods have cancellation token support and use a default token if not otherwise set. + +There are three different ways to set up a job that vary based on how you want to configure the schedule. The following +shows the different arguments available when scheduling a job: + +| Argument Name | Type | Description | Required | +|---|---|---|---| +| jobName | string | The name of the job being scheduled. | Yes | +| schedule | DaprJobSchedule | The schedule defining when the job will be triggered. | Yes | +| payload | ReadOnlyMemory | Job data provided to the invocation endpoint when triggered. | No | +| startingFrom | DateTime | The point in time from which the job schedule should start. | No | +| repeats | int | The maximum number of times the job should be triggered. | No | +| ttl | When the job should expires and no longer trigger. | No | +| overwrite | bool | A flag indicating whether an existing job should be overwritten when submitted or false to require that an existing job with the same name be deleted first. | No | +| cancellationToken | CancellationToken | Used to cancel out of the operation early, e.g. because of an operation timeout. | No | + +### `DaprJobSchedule` +All jobs are scheduled via the SDK using the `DaprJobSchedule` which creates an expression passed to the +runtime to schedule jobs. There are several static methods exposed on the `DaprJobSchedule` used to faciliate +easy registration of each of the kinds of job schedules available as follows. This separates specifying +the job schedule itself from any additional options like repeating the operation or providing a cancellation token. + +### One-time job +A one-time job is exactly that; it will run at a single point in time and will not repeat. + +This approach requires that you select a job name and specify a time it should be triggered. + +`DaprJobSchedule.FromDateTime(DateTimeOffset scheduledTime)` + +One-time jobs can be scheduled from the Dapr Jobs client as in the following example: + +```cs +public class MyOperation(DaprJobsClient daprJobsClient) +{ + public async Task ScheduleOneTimeJobAsync(CancellationToken cancellationToken) + { + var today = DateTimeOffset.UtcNow; + var threeDaysFromNow = today.AddDays(3); + + var schedule = DaprJobSchedule.FromDateTime(threeDaysFromNow); + await daprJobsClient.ScheduleJobAsync("job", schedule, cancellationToken: cancellationToken); + } +} +``` + +### Interval-based job +An interval-based job is one that runs on a recurring loop configured as a fixed amount of time, not unlike how [reminders](https://docs.dapr.io/developing-applications/building-blocks/actors/actors-timers-reminders/#actor-reminders) work in the Actors building block today. + +`DaprJobSchedule.FromDuration(TimeSpan interval)` + +Interval-based jobs can be scheduled from the Dapr Jobs client as in the following example: + +```cs +public class MyOperation(DaprJobsClient daprJobsClient) +{ + + public async Task ScheduleIntervalJobAsync(CancellationToken cancellationToken) + { + var hourlyInterval = TimeSpan.FromHours(1); + + //Trigger the job hourly, but a maximum of 5 times + var schedule = DaprJobSchedule.FromDuration(hourlyInterval); + await daprJobsClient.ScheduleJobAsync("job", schedule, repeats: 5, cancellationToken: cancellationToken); + } +} +``` + +### Cron-based job +A Cron-based job is scheduled using a Cron expression. This gives more calendar-based control over when the job is triggered as it can used calendar-based values in the expression. + +`DaprJobSchedule.FromCronExpression(string cronExpression)` + +There are two different approaches supported to scheduling a Cron-based job in the Dapr SDK. + +#### Provide your own Cron expression +You can just provide your own Cron expression via a string via `DaprJobSchedule.FromExpression()`: + +```csharp +public class MyOperation(DaprJobsClient daprJobsClient) +{ + public async Task ScheduleCronJobAsync(CancellationToken cancellationToken) + { + //At the top of every other hour on the fifth day of the month + const string cronSchedule = "0 */2 5 * *"; + var schedule = DaprJobSchedule.FromExpression(cronSchedule); + + //Don't start this until next month + var now = DateTime.UtcNow; + var oneMonthFromNow = now.AddMonths(1); + var firstOfNextMonth = new DateTime(oneMonthFromNow.Year, oneMonthFromNow.Month, 1, 0, 0, 0); + + await daprJobsClient.ScheduleJobAsync("myJobName", ) + await daprJobsClient.ScheduleCronJobAsync("myJobName", schedule, dueTime: firstOfNextMonth, cancellationToken: cancellationToken); + } +} +``` + +#### Use the `CronExpressionBuilder` +Alternatively, you can use our fluent builder to produce a valid Cron expression: + +```csharp +public class MyOperation(DaprJobsClient daprJobsClient) +{ + public async Task ScheduleCronJobAsync(CancellationToken cancellationToken) + { + //At the top of every other hour on the fifth day of the month + var cronExpression = new CronExpressionBuilder() + .Every(EveryCronPeriod.Hour, 2) + .On(OnCronPeriod.DayOfMonth, 5) + .ToString(); + var schedule = DaprJobSchedule.FromExpression(cronExpression); + + //Don't start this until next month + var now = DateTime.UtcNow; + var oneMonthFromNow = now.AddMonths(1); + var firstOfNextMonth = new DateTime(oneMonthFromNow.Year, oneMonthFromNow.Month, 1, 0, 0, 0); + + await daprJobsClient.ScheduleJobAsync("myJobName", ) + await daprJobsClient.ScheduleCronJobAsync("myJobName", schedule, dueTime: firstOfNextMonth, cancellationToken: cancellationToken); + } +} +``` + +## Get details of already-scheduled job +If you know the name of an already-scheduled job, you can retrieve its metadata without waiting for it to +be triggered. The returned `JobDetails` exposes a few helpful properties for consuming the information from the Dapr Jobs API: + +- If the `Schedule` property contains a Cron expression, the `IsCronExpression` property will be true and the expression will also be available in the `CronExpression` property. +- If the `Schedule` property contains a duration value, the `IsIntervalExpression` property will instead be true and the value will be converted to a `TimeSpan` value accessible from the `Interval` property. + +This can be done by using the following: + +```cs +public class MyOperation(DaprJobsClient daprJobsClient) +{ + public async Task GetJobDetailsAsync(string jobName, CancellationToken cancellationToken) + { + var jobDetails = await daprJobsClient.GetJobAsync(jobName, canecllationToken); + return jobDetails; + } +} +``` + +## Delete a scheduled job +To delete a scheduled job, you'll need to know its name. From there, it's as simple as calling the `DeleteJobAsync` method on the Dapr Jobs client: + +```cs +public class MyOperation(DaprJobsClient daprJobsClient) +{ + public async Task DeleteJobAsync(string jobName, CancellationToken cancellationToken) + { + await daprJobsClient.DeleteJobAsync(jobName, cancellationToken); + } +} +``` \ No newline at end of file diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-jobs/dotnet-jobsclient-usage.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-jobs/dotnet-jobsclient-usage.md new file mode 100644 index 00000000000..ddbf226ee29 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-jobs/dotnet-jobsclient-usage.md @@ -0,0 +1,197 @@ +--- +type: docs +title: "DaprJobsClient usage" +linkTitle: "DaprJobsClient usage" +weight: 59000 +description: Essential tips and advice for using DaprJobsClient +--- + +## Lifetime management + +A `DaprJobsClient` is a version of the Dapr client that is dedicated to interacting with the Dapr Jobs API. It can be +registered alongside a `DaprClient` and other Dapr clients without issue. + +It maintains access to networking resources in the form of TCP sockets used to communicate with the Dapr sidecar and +implements `IDisposable` to support the eager cleanup of resources. + +For best performance, create a single long-lived instance of `DaprJobsClient` and provide access to that shared instance +throughout your application. `DaprJobsClient` instances are thread-safe and intended to be shared. + +This can be aided by utilizing the dependency injection functionality. The registration method supports registration using +as a singleton, a scoped instance or as transient (meaning it's recreated every time it's injected), but also enables +registration to utilize values from an `IConfiguration` or other injected service in a way that's impractical when +creating the client from scratch in each of your classes. + +Avoid creating a `DaprJobsClient` for each operation and disposing it when the operation is complete. + +## Configuring DaprJobsClient via the DaprJobsClientBuilder + +A `DaprJobsClient` can be configured by invoking methods on the `DaprJobsClientBuilder` class before calling `.Build()` +to create the client itself. The settings for each `DaprJobsClient` are separate +and cannot be changed after calling `.Build()`. + +```cs +var daprJobsClient = new DaprJobsClientBuilder() + .UseDaprApiToken("abc123") // Specify the API token used to authenticate to other Dapr sidecars + .Build(); +``` + +The `DaprJobsClientBuilder` contains settings for: + +- The HTTP endpoint of the Dapr sidecar +- The gRPC endpoint of the Dapr sidecar +- The `JsonSerializerOptions` object used to configure JSON serialization +- The `GrpcChannelOptions` object used to configure gRPC +- The API token used to authenticate requests to the sidecar +- The factory method used to create the `HttpClient` instance used by the SDK +- The timeout used for the `HttpClient` instance when making requests to the sidecar + +The SDK will read the following environment variables to configure the default values: + +- `DAPR_HTTP_ENDPOINT`: used to find the HTTP endpoint of the Dapr sidecar, example: `https://dapr-api.mycompany.com` +- `DAPR_GRPC_ENDPOINT`: used to find the gRPC endpoint of the Dapr sidecar, example: `https://dapr-grpc-api.mycompany.com` +- `DAPR_HTTP_PORT`: if `DAPR_HTTP_ENDPOINT` is not set, this is used to find the HTTP local endpoint of the Dapr sidecar +- `DAPR_GRPC_PORT`: if `DAPR_GRPC_ENDPOINT` is not set, this is used to find the gRPC local endpoint of the Dapr sidecar +- `DAPR_API_TOKEN`: used to set the API token + +### Configuring gRPC channel options + +Dapr's use of `CancellationToken` for cancellation relies on the configuration of the gRPC channel options. If you need +to configure these options yourself, make sure to enable the [ThrowOperationCanceledOnCancellation setting](https://grpc.github.io/grpc/csharp-dotnet/api/Grpc.Net.Client.GrpcChannelOptions.html#Grpc_Net_Client_GrpcChannelOptions_ThrowOperationCanceledOnCancellation). + +```cs +var daprJobsClient = new DaprJobsClientBuilder() + .UseGrpcChannelOptions(new GrpcChannelOptions { ... ThrowOperationCanceledOnCancellation = true }) + .Build(); +``` + +## Using cancellation with `DaprJobsClient` + +The APIs on `DaprJobsClient` perform asynchronous operations and accept an optional `CancellationToken` parameter. This +follows a standard .NET practice for cancellable operations. Note that when cancellation occurs, there is no guarantee that +the remote endpoint stops processing the request, only that the client has stopped waiting for completion. + +When an operation is cancelled, it will throw an `OperationCancelledException`. + +## Configuring `DaprJobsClient` via dependency injection + +Using the built-in extension methods for registering the `DaprJobsClient` in a dependency injection container can +provide the benefit of registering the long-lived service a single time, centralize complex configuration and improve +performance by ensuring similarly long-lived resources are re-purposed when possible (e.g. `HttpClient` instances). + +There are three overloads available to give the developer the greatest flexibility in configuring the client for their +scenario. Each of these will register the `IHttpClientFactory` on your behalf if not already registered, and configure +the `DaprJobsClientBuilder` to use it when creating the `HttpClient` instance in order to re-use the same instance as +much as possible and avoid socket exhaustion and other issues. + +In the first approach, there's no configuration done by the developer and the `DaprJobsClient` is configured with the +default settings. + +```cs +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddDaprJobsClient(); //Registers the `DaprJobsClient` to be injected as needed +var app = builder.Build(); +``` + +Sometimes the developer will need to configure the created client using the various configuration options detailed +above. This is done through an overload that passes in the `DaprJobsClientBuiler` and exposes methods for configuring +the necessary options. + +```cs +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddDaprJobsClient((_, daprJobsClientBuilder) => { + //Set the API token + daprJobsClientBuilder.UseDaprApiToken("abc123"); + //Specify a non-standard HTTP endpoint + daprJobsClientBuilder.UseHttpEndpoint("http://dapr.my-company.com"); +}); + +var app = builder.Build(); +``` + +Finally, it's possible that the developer may need to retrieve information from another service in order to populate +these configuration values. That value may be provided from a `DaprClient` instance, a vendor-specific SDK or some +local service, but as long as it's also registered in DI, it can be injected into this configuration operation via the +last overload: + +```cs +var builder = WebApplication.CreateBuilder(args); + +//Register a fictional service that retrieves secrets from somewhere +builder.Services.AddSingleton(); + +builder.Services.AddDaprJobsClient((serviceProvider, daprJobsClientBuilder) => { + //Retrieve an instance of the `SecretService` from the service provider + var secretService = serviceProvider.GetRequiredService(); + var daprApiToken = secretService.GetSecret("DaprApiToken").Value; + + //Configure the `DaprJobsClientBuilder` + daprJobsClientBuilder.UseDaprApiToken(daprApiToken); +}); + +var app = builder.Build(); +``` + +## Understanding payload serialization on DaprJobsClient + +While there are many methods on the `DaprClient` that automatically serialize and deserialize data using the +`System.Text.Json` serializer, this SDK takes a different philosophy. Instead, the relevant methods accept an optional +payload of `ReadOnlyMemory` meaning that serialization is an exercise left to the developer and is not +generally handled by the SDK. + +That said, there are some helper extension methods available for each of the scheduling methods. If you know that you +want to use a type that's JSON-serializable, you can use the `Schedule*WithPayloadAsync` method for each scheduling +type that accepts an `object` as a payload and an optional `JsonSerializerOptions` to use when serializing the value. +This will convert the value to UTF-8 encoded bytes for you as a convenience. Here's an example of what this might +look like when scheduling a Cron expression: + +```cs +public sealed record Doodad (string Name, int Value); + +//... +var doodad = new Doodad("Thing", 100); +await daprJobsClient.ScheduleCronJobWithPayloadAsync("myJob", "5 * * * *", doodad); +``` + +In the same vein, if you have a plain string value, you can use an overload of the same method to serialize a +string-typed payload and the JSON serialization step will be skipped and it'll only be encoded to an array of +UTF-8 encoded bytes. Here's an example of what this might look like when scheduling a one-time job: + +```cs +var now = DateTime.UtcNow; +var oneWeekFromNow = now.AddDays(7); +await daprJobsClient.ScheduleOneTimeJobWithPayloadAsync("myOtherJob", oneWeekFromNow, "This is a test!"); +``` + +The delegate handling the job invocation expects at least two arguments to be present: +- A `string` that is populated with the `jobName`, providing the name of the invoked job +- A `ReadOnlyMemory` that is populated with the bytes originally provided during the job registration. + +Because the payload is stored as a `ReadOnlyMemory`, the developer has the freedom to serialize and deserialize +as they wish, but there are again two helper extensions included that can deserialize this to either a JSON-compatible +type or a string. Both methods assume that the developer encoded the originally scheduled job (perhaps using the +helper serialization methods) as these methods will not force the bytes to represent something they're not. + +To deserialize the bytes to a string, the following helper method can be used: +```cs +var payloadAsString = Encoding.UTF8.GetString(jobPayload.Span); //If successful, returns a string with the value +``` + +## Error handling + +Methods on `DaprJobsClient` will throw a `DaprJobsServiceException` if an issue is encountered between the SDK +and the Jobs API service running on the Dapr sidecar. If a failure is encountered because of a poorly formatted +request made to the Jobs API service through this SDK, a `DaprMalformedJobException` will be thrown. In case of +illegal argument values, the appropriate standard exception will be thrown (e.g. `ArgumentOutOfRangeException` +or `ArgumentNullException`) with the name of the offending argument. And for anything else, a `DaprException` +will be thrown. + +The most common cases of failure will be related to: + +- Incorrect argument formatting while engaging with the Jobs API +- Transient failures such as a networking problem +- Invalid data, such as a failure to deserialize a value into a type it wasn't originally serialized from + +In any of these cases, you can examine more exception details through the `.InnerException` property. diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-messaging/_index.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-messaging/_index.md new file mode 100644 index 00000000000..9927145d7fd --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-messaging/_index.md @@ -0,0 +1,17 @@ +--- +type: docs +title: "Dapr Messaging .NET SDK" +linkTitle: "Messaging" +weight: 60000 +description: Get up and running with the Dapr Messaging .NET SDK +--- + +With the Dapr Messaging package, you can interact with the Dapr messaging APIs from a .NET application. In the +v1.15 release, this package only contains the functionality corresponding to the +[streaming PubSub capability]({{% ref "dotnet-messaging-pubsub-howto.md#subscribe-to-topics" %}}) + +Future Dapr .NET SDK releases will migrate existing messaging capabilities out from Dapr.Client to this +Dapr.Messaging package. This will be documented in the release notes, documentation and obsolete attributes in advance. + +To get started, walk through the [Dapr Messaging]({{% ref dotnet-messaging-pubsub-howto.md %}}) how-to guide and +refer to [best practices documentation]({{% ref dotnet-messaging-pubsub-usage.md %}}) for additional guidance. diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-messaging/dotnet-messaging-pubsub-howto.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-messaging/dotnet-messaging-pubsub-howto.md new file mode 100644 index 00000000000..5b748b61cae --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-messaging/dotnet-messaging-pubsub-howto.md @@ -0,0 +1,262 @@ +--- +type: docs +title: "How to: Author and manage Dapr streaming subscriptions in the .NET SDK" +linkTitle: "How to: Author & manage streaming subscriptions" +weight: 61000 +description: Learn how to author and manage Dapr streaming subscriptions using the .NET SDK +--- + +Let's create a subscription to a pub/sub topic or queue at using the streaming capability. We'll use the +[simple example provided here](https://github.com/dapr/dotnet-sdk/tree/master/examples/Client/PublishSubscribe/StreamingSubscriptionExample), +for the following demonstration and walk through it as an explainer of how you can configure message handlers at +runtime and which do not require an endpoint to be pre-configured. In this guide, you will: + +- Deploy a .NET Web API application ([StreamingSubscriptionExample](https://github.com/dapr/dotnet-sdk/tree/master/examples/Client/PublishSubscribe/StreamingSubscriptionExample)) +- Utilize the Dapr .NET Messaging SDK to subscribe dynamically to a pub/sub topic. + +## Prerequisites +- [Dapr CLI](https://docs.dapr.io/getting-started/install-dapr-cli/) +- [Initialized Dapr environment](https://docs.dapr.io/getting-started/install-dapr-selfhost) +- [.NET 8](https://dotnet.microsoft.com/download/dotnet/8.0) or [.NET 9](https://dotnet.microsoft.com/download/dotnet/9.0) installed +- [Dapr.Messaging](https://www.nuget.org/packages/Dapr.Messaging) NuGet package installed to your project + +## Set up the environment +Clone the [.NET SDK repo](https://github.com/dapr/dotnet-sdk). + +```sh +git clone https://github.com/dapr/dotnet-sdk.git +``` + +From the .NET SDK root directory, navigate to the Dapr streaming PubSub example. + +```sh +cd examples/Client/PublishSubscribe +``` + +## Run the application locally + +To run the Dapr application, you need to start the .NET program and a Dapr sidecar. Navigate to the `StreamingSubscriptionExample` directory. + +```sh +cd StreamingSubscriptionExample +``` + +We'll run a command that starts both the Dapr sidecar and the .NET program at the same time. + +```sh +dapr run --app-id pubsubapp --dapr-grpc-port 4001 --dapr-http-port 3500 -- dotnet run +``` +> Dapr listens for HTTP requests at `http://localhost:3500` and internal Jobs gRPC requests at `http://localhost:4001`. + +## Register the Dapr PubSub client with dependency injection +The Dapr Messaging SDK provides an extension method to simplify the registration of the Dapr PubSub client. Before +completing the dependency injection registration in `Program.cs`, add the following line: + +```csharp +var builder = WebApplication.CreateBuilder(args); + +//Add anywhere between these two +builder.Services.AddDaprPubSubClient(); //That's it + +var app = builder.Build(); +``` + +It's possible that you may want to provide some configuration options to the Dapr PubSub client that +should be present with each call to the sidecar such as a Dapr API token, or you want to use a non-standard +HTTP or gRPC endpoint. This be possible through use of an overload of the registration method that allows configuration +of a `DaprPublishSubscribeClientBuilder` instance: + +```csharp +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddDaprPubSubClient((_, daprPubSubClientBuilder) => { + daprPubSubClientBuilder.UseDaprApiToken("abc123"); + daprPubSubClientBuilder.UseHttpEndpoint("http://localhost:8512"); //Non-standard sidecar HTTP endpoint +}); + +var app = builder.Build(); +``` + +Still, it's possible that whatever values you wish to inject need to be retrieved from some other source, itself registered as a dependency. There's one more overload you can use to inject an `IServiceProvider` into the configuration action method. In the following example, we register a fictional singleton that can retrieve secrets from somewhere and pass it into the configuration method for `AddDaprJobClient` so +we can retrieve our Dapr API token from somewhere else for registration here: + +```csharp +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddSingleton(); +builder.Services.AddDaprPubSubClient((serviceProvider, daprPubSubClientBuilder) => { + var secretRetriever = serviceProvider.GetRequiredService(); + var daprApiToken = secretRetriever.GetSecret("DaprApiToken").Value; + daprPubSubClientBuilder.UseDaprApiToken(daprApiToken); + + daprPubSubClientBuilder.UseHttpEndpoint("http://localhost:8512"); +}); + +var app = builder.Build(); +``` + +## Use the Dapr PubSub client using IConfiguration +It's possible to configure the Dapr PubSub client using the values in your registered `IConfiguration` as well without +explicitly specifying each of the value overrides using the `DaprPublishSubscribeClientBuilder` as demonstrated in the previous +section. Rather, by populating an `IConfiguration` made available through dependency injection the `AddDaprPubSubClient()` +registration will automatically use these values over their respective defaults. + +Start by populating the values in your configuration. This can be done in several different ways as demonstrated below. + +### Configuration via `ConfigurationBuilder` +Application settings can be configured without using a configuration source and by instead populating the value in-memory +using a `ConfigurationBuilder` instance: + +```csharp +var builder = WebApplication.CreateBuilder(); + +//Create the configuration +var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary { + { "DAPR_HTTP_ENDPOINT", "http://localhost:54321" }, + { "DAPR_API_TOKEN", "abc123" } + }) + .Build(); + +builder.Configuration.AddConfiguration(configuration); +builder.Services.AddDaprPubSubClient(); //This will automatically populate the HTTP endpoint and API token values from the IConfiguration +``` + +### Configuration via Environment Variables +Application settings can be accessed from environment variables available to your application. + +The following environment variables will be used to populate both the HTTP endpoint and API token used to register the +Dapr PubSub client. + +| Key | Value | +|--------------------|------------------------| +| DAPR_HTTP_ENDPOINT | http://localhost:54321 | +| DAPR_API_TOKEN | abc123 | + +```csharp +var builder = WebApplication.CreateBuilder(); + +builder.Configuration.AddEnvironmentVariables(); +builder.Services.AddDaprPubSubClient(); +``` + +The Dapr PubSub client will be configured to use both the HTTP endpoint `http://localhost:54321` and populate all outbound +requests with the API token header `abc123`. + +### Configuration via prefixed Environment Variables +However, in shared-host scenarios where there are multiple applications all running on the same machine without using +containers or in development environments, it's not uncommon to prefix environment variables. The following example +assumes that both the HTTP endpoint and the API token will be pulled from environment variables prefixed with the +value "myapp_". The two environment variables used in this scenario are as follows: + +| Key | Value | +|--------------------------|------------------------| +| myapp_DAPR_HTTP_ENDPOINT | http://localhost:54321 | +| myapp_DAPR_API_TOKEN | abc123 | + +These environment variables will be loaded into the registered configuration in the following example and made available +without the prefix attached. + +```csharp +var builder = WebApplication.CreateBuilder(); + +builder.Configuration.AddEnvironmentVariables(prefix: "myapp_"); +builder.Services.AddDaprPubSubClient(); +``` + +The Dapr PubSub client will be configured to use both the HTTP endpoint `http://localhost:54321` and populate all outbound +requests with the API token header `abc123`. + +## Use the Dapr PubSub client without relying on dependency injection +While the use of dependency injection simplifies the use of complex types in .NET and makes it easier to +deal with complicated configurations, you're not required to register the `DaprPublishSubscribeClient` in this way. +Rather, you can also elect to create an instance of it from a `DaprPublishSubscribeClientBuilder` instance as +demonstrated below: + +```cs + +public class MySampleClass +{ + public void DoSomething() + { + var daprPubSubClientBuilder = new DaprPublishSubscribeClientBuilder(); + var daprPubSubClient = daprPubSubClientBuilder.Build(); + + //Do something with the `daprPubSubClient` + } +} +``` + +## Set up message handler +The streaming subscription implementation in Dapr gives you greater control over handling backpressure from events by +leaving the messages in the Dapr runtime until your application is ready to accept them. The .NET SDK supports a +high-performance queue for maintaining a local cache of these messages in your application while processing is pending. +These messages will persist in the queue until processing either times out for each one or a response action is taken +for each (typically after processing succeeds or fails). Until this response action is received by the Dapr runtime, +the messages will be persisted by Dapr and made available in case of a service failure. + +The various response actions available are as follows: +| Response Action | Description | +| --- | --- | +| Retry | The event should be delivered again in the future. | +| Drop | The event should be deleted (or forwarded to a dead letter queue, if configured) and not attempted again. | +| Success | The event should be deleted as it was successfully processed. | + +The handler will receive only one message at a time and if a cancellation token is provided to the subscription, +this token will be provided during the handler invocation. + +The handler must be configured to return a `Task` indicating one of these operations, even if from +a try/catch block. If an exception is not caught by your handler, the subscription will use the response action configured +in the options during subscription registration. + +The following demonstrates the sample message handler provided in the example: + +```csharp +Task HandleMessageAsync(TopicMessage message, CancellationToken cancellationToken = default) +{ + try + { + //Do something with the message + Console.WriteLine(Encoding.UTF8.GetString(message.Data.Span)); + return Task.FromResult(TopicResponseAction.Success); + } + catch + { + return Task.FromResult(TopicResponseAction.Retry); + } +} +``` + +## Configure and subscribe to the PubSub topic +Configuration of the streaming subscription requires the name of the PubSub component registered with Dapr, the name +of the topic or queue being subscribed to, the `DaprSubscriptionOptions` providing the configuration for the subscription, +the message handler and an optional cancellation token. The only required argument to the `DaprSubscriptionOptions` is +the default `MessageHandlingPolicy` which consists of a per-event timeout and the `TopicResponseAction` to take when +that timeout occurs. + +Other options are as follows: + +| Property Name | Description | +|-----------------------------------------------------------------------------------------------|------------------------------------------------------------------------------------------------| +| Metadata | Additional subscription metadata | +| DeadLetterTopic | The optional name of the dead-letter topic to send dropped messages to. | +| MaximumQueuedMessages | By default, there is no maximum boundary enforced for the internal queue, but setting this | +| property would impose an upper limit. | | +| MaximumCleanupTimeout | When the subscription is disposed of or the token flags a cancellation request, this specifies | +| the maximum amount of time available to process the remaining messages in the internal queue. | | + +Subscription is then configured as in the following example: +```csharp +var messagingClient = app.Services.GetRequiredService(); + +var cancellationTokenSource = new CancellationTokenSource(TimeSpan.FromSeconds(60)); //Override the default of 30 seconds +var options = new DaprSubscriptionOptions(new MessageHandlingPolicy(TimeSpan.FromSeconds(10), TopicResponseAction.Retry)); +var subscription = await messagingClient.SubscribeAsync("pubsub", "mytopic", options, HandleMessageAsync, cancellationTokenSource.Token); +``` + +## Terminate and clean up subscription +When you've finished with your subscription and wish to stop receiving new events, simply await a call to +`DisposeAsync()` on your subscription instance. This will cause the client to unregister from additional events and +proceed to finish processing all the events still leftover in the backpressure queue, if any, before disposing of any +internal resources. This cleanup will be limited to the timeout interval provided in the `DaprSubscriptionOptions` when +the subscription was registered and by default, this is set to 30 seconds. \ No newline at end of file diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-messaging/dotnet-messaging-pubsub-usage.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-messaging/dotnet-messaging-pubsub-usage.md new file mode 100644 index 00000000000..8b3359d0ca7 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-messaging/dotnet-messaging-pubsub-usage.md @@ -0,0 +1,130 @@ +--- +type: docs +title: "DaprPublishSubscribeClient usage" +linkTitle: "DaprPublishSubscribeClient usage" +weight: 69000 +description: Essential tips and advice for using DaprPublishSubscribeClient +--- + +## Lifetime management + +A `DaprPublishSubscribeClient` is a version of the Dapr client that is dedicated to interacting with the Dapr Messaging API. +It can be registered alongside a `DaprClient` and other Dapr clients without issue. + +It maintains access to networking resources in the form of TCP sockets used to communicate with the Dapr sidecar and implements +`IAsyncDisposable` to support the eager cleanup of resources. + +For best performance, create a single long-lived instance of `DaprPublishSubscribeClient` and provide access to that shared +instance throughout your application. `DaprPublishSubscribeClient` instances are thread-safe and intended to be shared. + +This can be aided by utilizing the dependency injection functionality. The registration method supports registration using +as a singleton, a scoped instance or as transient (meaning it's recreated every time it's injected), but also enables +registration to utilize values from an `IConfiguration` or other injected service in a way that's impractical when +creating the client from scratch in each of your classes. + +Avoid creating a `DaprPublishSubscribeClient` for each operation and disposing it when the operation is complete. It's +intended that the `DaprPublishSubscribeClient` should only be disposed when you no longer wish to receive events on the +subscription as disposing it will cancel the ongoing receipt of new events. + +## Configuring DaprPublishSubscribeClient via the DaprPublishSubscribeClientBuilder +A `DaprPublishSubscribeClient` can be configured by invoking methods on the `DaprPublishSubscribeClientBuilder` class +before calling `.Build()` to create the client itself. The settings for each `DaprPublishSubscribeClient` are separate +and cannot be changed after calling `.Build()`. + +```cs +var daprPubsubClient = new DaprPublishSubscribeClientBuilder() + .UseDaprApiToken("abc123") // Specify the API token used to authenticate to other Dapr sidecars + .Build(); +``` + +The `DaprPublishSubscribeClientBuilder` contains settings for: + +- The HTTP endpoint of the Dapr sidecar +- The gRPC endpoint of the Dapr sidecar +- The `JsonSerializerOptions` object used to configure JSON serialization +- The `GrpcChannelOptions` object used to configure gRPC +- The API token used to authenticate requests to the sidecar +- The factory method used to create the `HttpClient` instance used by the SDK +- The timeout used for the `HttpClient` instance when making requests to the sidecar + +The SDK will read the following environment variables to configure the default values: + +- `DAPR_HTTP_ENDPOINT`: used to find the HTTP endpoint of the Dapr sidecar, example: `https://dapr-api.mycompany.com` +- `DAPR_GRPC_ENDPOINT`: used to find the gRPC endpoint of the Dapr sidecar, example: `https://dapr-grpc-api.mycompany.com` +- `DAPR_HTTP_PORT`: if `DAPR_HTTP_ENDPOINT` is not set, this is used to find the HTTP local endpoint of the Dapr sidecar +- `DAPR_GRPC_PORT`: if `DAPR_GRPC_ENDPOINT` is not set, this is used to find the gRPC local endpoint of the Dapr sidecar +- `DAPR_API_TOKEN`: used to set the API token + +### Configuring gRPC channel options +Dapr's use of `CancellationToken` for cancellation relies on the configuration of the gRPC channel options. If you +need to configure these options yourself, make sure to enable the [ThrowOperationCanceledOnCancellation setting](https://grpc.github.io/grpc/csharp-dotnet/api/Grpc.Net.Client.GrpcChannelOptions.html#Grpc_Net_Client_GrpcChannelOptions_ThrowOperationCanceledOnCancellation). + +```cs +var daprPubsubClient = new DaprPublishSubscribeClientBuilder() + .UseGrpcChannelOptions(new GrpcChannelOptions { ... ThrowOperationCanceledOnCancellation = true }) + .Build(); +``` + +## Using cancellation with `DaprPublishSubscribeClient` + +The APIs on `DaprPublishSubscribeClient` perform asynchronous operations and accept an optional `CancellationToken` +parameter. This follows a standard .NET practice for cancellable operations. Note that when cancellation occurs, there is +no guarantee that the remote endpoint stops processing the request, only that the client has stopped waiting for completion. + +When an operation is cancelled, it will throw an `OperationCancelledException`. + +## Configuring `DaprPublishSubscribeClient` via dependency injection + +Using the built-in extension methods for registering the `DaprPublishSubscribeClient` in a dependency injection container +can provide the benefit of registering the long-lived service a single time, centralize complex configuration and improve +performance by ensuring similarly long-lived resources are re-purposed when possible (e.g. `HttpClient` instances). + +There are three overloads available to give the developer the greatest flexibility in configuring the client for their +scenario. Each of these will register the `IHttpClientFactory` on your behalf if not already registered, and configure +the `DaprPublishSubscribeClientBuilder` to use it when creating the `HttpClient` instance in order to re-use the same +instance as much as possible and avoid socket exhaustion and other issues. + +In the first approach, there's no configuration done by the developer and the `DaprPublishSubscribeClient` is configured with +the default settings. + +```cs +var builder = WebApplication.CreateBuilder(args); + +builder.Services.DaprPublishSubscribeClient(); //Registers the `DaprPublishSubscribeClient` to be injected as needed +var app = builder.Build(); +``` + +Sometimes the developer will need to configure the created client using the various configuration options detailed above. This is done through an overload that passes in the `DaprJobsClientBuiler` and exposes methods for configuring the necessary options. + +```cs +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddDaprJobsClient((_, daprPubSubClientBuilder) => { + //Set the API token + daprPubSubClientBuilder.UseDaprApiToken("abc123"); + //Specify a non-standard HTTP endpoint + daprPubSubClientBuilder.UseHttpEndpoint("http://dapr.my-company.com"); +}); + +var app = builder.Build(); +``` + +Finally, it's possible that the developer may need to retrieve information from another service in order to populate these configuration values. That value may be provided from a `DaprClient` instance, a vendor-specific SDK or some local service, but as long as it's also registered in DI, it can be injected into this configuration operation via the last overload: + +```cs +var builder = WebApplication.CreateBuilder(args); + +//Register a fictional service that retrieves secrets from somewhere +builder.Services.AddSingleton(); + +builder.Services.AddDaprPublishSubscribeClient((serviceProvider, daprPubSubClientBuilder) => { + //Retrieve an instance of the `SecretService` from the service provider + var secretService = serviceProvider.GetRequiredService(); + var daprApiToken = secretService.GetSecret("DaprApiToken").Value; + + //Configure the `DaprPublishSubscribeClientBuilder` + daprPubSubClientBuilder.UseDaprApiToken(daprApiToken); +}); + +var app = builder.Build(); +``` \ No newline at end of file diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-troubleshooting/_index.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-troubleshooting/_index.md new file mode 100644 index 00000000000..5ffd7c7c699 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-troubleshooting/_index.md @@ -0,0 +1,7 @@ +--- +type: docs +title: "How to troubleshoot and debug with the Dapr .NET SDK" +linkTitle: "Troubleshooting" +weight: 120000 +description: Tips, tricks, and guides for troubleshooting and debugging with the Dapr .NET SDKs +--- \ No newline at end of file diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-troubleshooting/dotnet-troubleshooting-pubsub.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-troubleshooting/dotnet-troubleshooting-pubsub.md new file mode 100644 index 00000000000..e60beb819eb --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-troubleshooting/dotnet-troubleshooting-pubsub.md @@ -0,0 +1,284 @@ +--- +type: docs +title: "Troubleshoot Pub/Sub with the .NET SDK" +linkTitle: "Troubleshoot pub/sub" +weight: 100000 +description: Troubleshoot Pub/Sub with the .NET SDK +--- + +# Troubleshooting Pub/Sub + +The most common problem with pub/sub is that the pub/sub endpoint in your application is not being called. + +There are a few layers to this problem with different solutions: + +- The application is not receiving any traffic from Dapr +- The application is not registering pub/sub endpoints with Dapr +- The pub/sub endpoints are registered with Dapr, but the request is not reaching the desired endpoint + +## Step 1: Turn up the logs + +**This is important. Future steps will depend on your ability to see logging output. ASP.NET Core logs almost nothing with the default log settings, so you will need to change it.** + +Adjust the logging verbosity to include `Information` logging for ASP.NET Core as described [here](https://docs.microsoft.com/aspnet/core/mvc/controllers/routing?view=aspnetcore-5.0#debug-diagnostics). Set the `Microsoft` key to `Information`. + +## Step 2: Verify you can receive traffic from Dapr + +1. Start the application as you would normally (`dapr run ...`). Make sure that you're including an `--app-port` argument in the commandline. Dapr needs to know that your application is listening for traffic. By default an ASP.NET Core application will listen for HTTP on port 5000 in local development. + +2. Wait for Dapr to finish starting + +3. Examine the logs + +You should see a log entry like: + +``` +info: Microsoft.AspNetCore.Hosting.Diagnostics[1] + Request starting HTTP/1.1 GET http://localhost:5000/..... +``` + +During initialization Dapr will make some requests to your application for configuration. If you can't find these then it means that something has gone wrong. Please ask for help either via an issue or in Discord (include the logs). If you see requests made to your application, then continue to step 3. + +## Step 3: Verify endpoint registration + +1. Start the application as you would normally (`dapr run ...`). + +2. Use `curl` at the command line (or another HTTP testing tool) to access the `/dapr/subscribe` endpoint. + +Here's an example command assuming your application's listening port is 5000: + +```sh +curl http://localhost:5000/dapr/subscribe -v +``` + +For a correctly configured application the output should look like the following: + +```txt +* Trying ::1... +* TCP_NODELAY set +* Connected to localhost (::1) port 5000 (#0) +> GET /dapr/subscribe HTTP/1.1 +> Host: localhost:5000 +> User-Agent: curl/7.64.1 +> Accept: */* +> +< HTTP/1.1 200 OK +< Date: Fri, 15 Jan 2021 22:31:40 GMT +< Content-Type: application/json +< Server: Kestrel +< Transfer-Encoding: chunked +< +* Connection #0 to host localhost left intact +[{"topic":"deposit","route":"deposit","pubsubName":"pubsub"},{"topic":"withdraw","route":"withdraw","pubsubName":"pubsub"}]* Closing connection 0 +``` + +Pay particular attention to the HTTP status code, and the JSON output. + +```txt +< HTTP/1.1 200 OK +``` + +A 200 status code indicates success. + + +The JSON blob that's included near the end is the output of `/dapr/subscribe` that's processed by the Dapr runtime. In this case it's using the `ControllerSample` in this repo - so this is an example of correct output. + +```json +[ + {"topic":"deposit","route":"deposit","pubsubName":"pubsub"}, + {"topic":"withdraw","route":"withdraw","pubsubName":"pubsub"} +] +``` + +--- + +With the output of this command in hand, you are ready to diagnose a problem or move on to the next step. + +### Option 0: The response was a 200 included some pub/sub entries + +**If you have entries in the JSON output from this test then the problem lies elsewhere, move on to step 2.** + +### Option 1: The response was not a 200, or didn't contain JSON + +If the response was not a 200 or did not contain JSON, then the `MapSubscribeHandler()` endpoint was not reached. + +Make sure you have some code like the following in `Startup.cs` and repeat the test. + +```cs +app.UseRouting(); + +app.UseCloudEvents(); + +app.UseEndpoints(endpoints => +{ + endpoints.MapSubscribeHandler(); // This is the Dapr subscribe handler + endpoints.MapControllers(); +}); +``` + +**If adding the subscribe handler did not resolve the problem, please open an issue on this repo and include the contents of your `Startup.cs` file.** + +### Option 2: The response contained JSON but it was empty (like `[]`) + +If the JSON output was an empty array (like `[]`) then the subscribe handler is registered, but no topic endpoints were registered. + +--- + +If you're using a controller for pub/sub you should have a method like: + +```C# +[Topic("pubsub", "deposit")] +[HttpPost("deposit")] +public async Task Deposit(...) + +// Using Pub/Sub routing +[Topic("pubsub", "transactions", "event.type == \"withdraw.v2\"", 1)] +[HttpPost("withdraw")] +public async Task Withdraw(...) +``` + +In this example the `Topic` and `HttpPost` attributes are required, but other details might be different. + +--- + +If you're using routing for pub/sub you should have an endpoint like: + +```C# +endpoints.MapPost("deposit", ...).WithTopic("pubsub", "deposit"); +``` + +In this example the call to `WithTopic(...)` is required but other details might be different. + +--- + +**After correcting this code and re-testing if the JSON output is still the empty array (like `[]`) then please open an issue on this repository and include the contents of `Startup.cs` and your pub/sub endpoint.** + +## Step 4: Verify endpoint reachability + +In this step we'll verify that the entries registered with pub/sub are reachable. The last step should have left you with some JSON output like the following: + +```json +[ + { + "pubsubName": "pubsub", + "topic": "deposit", + "route": "deposit" + }, + { + "pubsubName": "pubsub", + "topic": "deposit", + "routes": { + "rules": [ + { + "match": "event.type == \"withdraw.v2\"", + "path": "withdraw" + } + ] + } + } +] +``` + +Keep this output, as we'll use the `route` information to test the application. + +1. Start the application as you would normally (`dapr run ...`). + +2. Use `curl` at the command line (or another HTTP testing tool) to access one of the routes registered with a pub/sub endpoint. + +Here's an example command assuming your application's listening port is 5000, and one of your pub/sub routes is `withdraw`: + +```sh +curl http://localhost:5000/withdraw -H 'Content-Type: application/json' -d '{}' -v +``` + +Here's the output from running the above command against the sample: + +```txt +* Trying ::1... +* TCP_NODELAY set +* Connected to localhost (::1) port 5000 (#0) +> POST /withdraw HTTP/1.1 +> Host: localhost:5000 +> User-Agent: curl/7.64.1 +> Accept: */* +> Content-Type: application/json +> Content-Length: 2 +> +* upload completely sent off: 2 out of 2 bytes +< HTTP/1.1 400 Bad Request +< Date: Fri, 15 Jan 2021 22:53:27 GMT +< Content-Type: application/problem+json; charset=utf-8 +< Server: Kestrel +< Transfer-Encoding: chunked +< +* Connection #0 to host localhost left intact +{"type":"https://tools.ietf.org/html/rfc7231#section-6.5.1","title":"One or more validation errors occurred.","status":400,"traceId":"|5e9d7eee-4ea66b1e144ce9bb.","errors":{"Id":["The Id field is required."]}}* Closing connection 0 +``` + +Based on the HTTP 400 and JSON payload, this response indicates that the endpoint was reached but the request was rejected due to a validation error. + +You should also look at the console output of the running application. This is example output with the Dapr logging headers stripped away for clarity. + +``` +info: Microsoft.AspNetCore.Hosting.Diagnostics[1] + Request starting HTTP/1.1 POST http://localhost:5000/withdraw application/json 2 +info: Microsoft.AspNetCore.Routing.EndpointMiddleware[0] + Executing endpoint 'ControllerSample.Controllers.SampleController.Withdraw (ControllerSample)' +info: Microsoft.AspNetCore.Mvc.Infrastructure.ControllerActionInvoker[3] + Route matched with {action = "Withdraw", controller = "Sample"}. Executing controller action with signature System.Threading.Tasks.Task`1[Microsoft.AspNetCore.Mvc.ActionResult`1[ControllerSample.Account]] Withdraw(ControllerSample.Transaction, Dapr.Client.DaprClient) on controller ControllerSample.Controllers.SampleController (ControllerSample). +info: Microsoft.AspNetCore.Mvc.Infrastructure.ObjectResultExecutor[1] + Executing ObjectResult, writing value of type 'Microsoft.AspNetCore.Mvc.ValidationProblemDetails'. +info: Microsoft.AspNetCore.Mvc.Infrastructure.ControllerActionInvoker[2] + Executed action ControllerSample.Controllers.SampleController.Withdraw (ControllerSample) in 52.1211ms +info: Microsoft.AspNetCore.Routing.EndpointMiddleware[1] + Executed endpoint 'ControllerSample.Controllers.SampleController.Withdraw (ControllerSample)' +info: Microsoft.AspNetCore.Hosting.Diagnostics[2] + Request finished in 157.056ms 400 application/problem+json; charset=utf-8 +``` + +The log entry of primary interest is the one coming from routing: + +```txt +info: Microsoft.AspNetCore.Routing.EndpointMiddleware[0] + Executing endpoint 'ControllerSample.Controllers.SampleController.Withdraw (ControllerSample)' +``` + +This entry shows that: + +- Routing executed +- Routing chose the `ControllerSample.Controllers.SampleController.Withdraw (ControllerSample)'` endpoint + +Now you have the information needed to troubleshoot this step. + +### Option 0: Routing chose the correct endpoint + +If the information in the routing log entry is correct, then it means that in isolation your application is behaving correctly. + +Example: + +```txt +info: Microsoft.AspNetCore.Routing.EndpointMiddleware[0] + Executing endpoint 'ControllerSample.Controllers.SampleController.Withdraw (ControllerSample)' +``` + +You might want to try using the Dapr cli to execute send a pub/sub message directly and compare the logging output. + +Example command: + +```sh +dapr publish --pubsub pubsub --topic withdraw --data '{}' +``` + +**If after doing this you still don't understand the problem please open an issue on this repo and include the contents of your `Startup.cs`.** + +### Option 1: Routing did not execute + +If you don't see an entry for `Microsoft.AspNetCore.Routing.EndpointMiddleware` in the logs, then it means that the request was handled by something other than routing. Usually the problem in this case is a misbehaving middleware. Other logs from the request might give you a clue to what's happening. + +**If you need help understanding the problem please open an issue on this repo and include the contents of your `Startup.cs`.** + +### Option 2: Routing chose the wrong endpoint + +If you see an entry for `Microsoft.AspNetCore.Routing.EndpointMiddleware` in the logs, but it contains the wrong endpoint then it means that you've got a routing conflict. The endpoint that was chosen will appear in the logs so that should give you an idea of what's causing the conflict. + +**If you need help understanding the problem please open an issue on this repo and include the contents of your `Startup.cs`.** diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-workflow/_index.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-workflow/_index.md new file mode 100644 index 00000000000..c732f646983 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-workflow/_index.md @@ -0,0 +1,8 @@ +--- +type: docs +title: "Dapr Workflow .NET SDK" +linkTitle: "Workflow" +weight: 30000 +description: Get up and running with Dapr Workflow and the Dapr .NET SDK +--- + diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-workflow/dotnet-workflow-examples.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-workflow/dotnet-workflow-examples.md new file mode 100644 index 00000000000..9ae67a028da --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-workflow/dotnet-workflow-examples.md @@ -0,0 +1,19 @@ +--- +type: docs +title: ".NET Workflow Examples" +linkTitle: "Workflow examples on GitHub" +weight: 3000 +description: Explore Dapr Workflow code examples on GitHub +--- + +## Workflow tutorials in the Dapr Quickstarts repository + +The Dapr Quickstarts repository on GitHub includes many workflow tutorials that showcase the various workflow patterns and how to use the workflow management operations. You can find these tutorials in the [quickstarts/tutorials/workflow/csharp](https://github.com/dapr/quickstarts/tree/master/tutorials/workflow/csharp) folder. + +## Workflow examples in the .NET SDK repository + +The Dapr .NET SDK repository on GitHub contains several examples demonstrating how to use Dapr Workflows with .NET. You can find these examples in the [examples/Workflow](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) folder. + +## Next steps + +- [Try the Dapr University Workflow track](https://www.diagrid.io/dapr-university#dapr-workflow) diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-workflow/dotnet-workflow-management-methods.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-workflow/dotnet-workflow-management-methods.md new file mode 100644 index 00000000000..b83fbfc4cea --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-workflow/dotnet-workflow-management-methods.md @@ -0,0 +1,109 @@ +--- +type: docs +title: "Workflow management operations with DaprWorkflowClient" +linkTitle: "Workflow management operations" +weight: 2000 +description: Learn how to use the `DaprWorkflowClient` to manage workflows +--- + +## Workflow management operations with `DaprWorkflowClient` + +The `DaprWorkflowClient` class provides methods to manage workflow instances. Below are the operations you can perform +using the `DaprWorkflowClient`. + +### Schedule a new workflow instance + +To start a new workflow instance, use the `ScheduleNewWorkflowAsync` method. This method requires the workflow type +name and an input required by the workflow. The workflow `instancedId` is an optional argument; if not provided, a +new GUID is generated by the `DaprWorkflowClient`. The final optional argument is a `startTime` of type +`DateTimeOffset` which can be used to define when the workflow instance should start. The method returns the +`instanceId` of the scheduled workflow which is used for other workflow management operations. + +```csharp +var instanceId = $"order-workflow-{Guid.NewGuid().ToString()[..8]}"; +var input = new Order("Paperclips", 1000, 9.95); +await daprWorkflowClient.ScheduleNewWorkflowAsync( + nameof(OrderProcessingWorkflow), + instanceId, + input); +``` + +### Calling activities and child workflows in another application (multi-app workflows) + +Dapr supports **multi-application workflows**, where a workflow can call activities or child workflows that are hosted +(defined and registered) in a *different* Dapr application. +See [Multi Application Workflows]({{% ref "workflow-multi-app.md" %}}) for the conceptual overview and constraints. + +Using multi-application workflows in .NET requires: + +- Setting `AppId` on `WorkflowRuntimeOptions` to the **App ID of the application where the workflows are running** (the workflow host), + as described in [`dotnet-workflowclient-usage.md`]({{% ref "dotnet-workflowclient-usage.md" %}}). This ensures workflow execution is routed + to the correct hosting application. +- Setting `TargetAppId` when invoking work on another application: + + - **Calling a child workflow in another app**: set `TargetAppId` on `ChildWorkflowTaskOptions`. + - **Calling an activity in another app**: set `TargetAppId` on `WorkflowTaskOptions`. + +This causes the activity/child-workflow execution to be invoked on the specified target application's App ID, while +the parent workflow continues to orchestrate and receives the result. + +### Retrieve the status of a workflow instance + +To get the current status of a workflow instance, use the `GetWorkflowStateAsync` method. This method requires the +instance ID of the workflow and returns a `WorkflowStatus` object containing details about the workflow's current state. + +```csharp +var workflowStatus = await daprWorkflowClient.GetWorkflowStateAsync(instanceId); +``` + +### Raise an event to a running workflow instance + +To send an event to a running workflow instance that is waiting for an external event, use the `RaiseEventAsync` +method. This method requires the instance ID of the workflow, the name of the event, and optionally the event payload. + +```csharp +await daprWorkflowClient.RaiseEventAsync(instanceId, "Approval", true); +``` + +### Suspend a running workflow instance + +A running workflow instance can be paused using the `SuspendWorkflowAsync` method. This method requires the instance +ID of the workflow. You can optionally provide a reason for suspending the workflow. + +```csharp +await daprWorkflowClient.SuspendWorkflowAsync(instanceId); +``` + +### Resume a suspended workflow instance + +A suspended workflow instance can be resumed using the `ResumeWorkflowAsync` method. This method requires the instance +ID of the workflow. You can optionally provide a reason for resuming the workflow. + +```csharp +await daprWorkflowClient.ResumeWorkflowAsync(instanceId); +``` + +### Terminate a workflow instance + +To terminate a workflow instance, use the `TerminateWorkflowAsync` method. This method requires the instance ID of +the workflow. You can optionally provide an `output` argument of type `string`. Terminating a workflow instance +will also terminal all child workflow instances but it has no impact on in-flight activity executions. + +```csharp +await daprWorkflowClient.TerminateWorkflowAsync(instanceId); +``` + +### Purge a workflow instance + +To remove the workflow instance history from the Dapr Workflow state store, use the `PurgeWorkflowAsync` method. +This method requires the instance ID of the workflow. Only completed, failed, or terminated workflow instances +can be purged. + +```csharp +await daprWorkflowClient.PurgeWorkflowAsync(instanceId); +``` + +## Next steps + +- [Learn how to author workflows and activities]({{% ref howto-author-workflow.md %}}) +- [Try the Dapr Workflow quickstart]({{% ref workflow-quickstart.md %}}) diff --git a/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-workflow/dotnet-workflowclient-usage.md b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-workflow/dotnet-workflowclient-usage.md new file mode 100644 index 00000000000..89dc6ccf3c5 --- /dev/null +++ b/sdkdocs/dotnet/content/en/dotnet-sdk-docs/dotnet-workflow/dotnet-workflowclient-usage.md @@ -0,0 +1,189 @@ +--- +type: docs +title: "DaprWorkflowClient lifetime management and registration" +linkTitle: "DaprWorkflowClient registration" +weight: 1000 +description: Learn how to configure the `DaprWorkflowClient` lifetime management and dependency injection +--- + +## Lifetime management + +A `DaprWorkflowClient` holds access to networking resources in the form of TCP sockets used to communicate with the Dapr sidecar as well +as other types used in the management and operation of Workflows. `DaprWorkflowClient` implements `IAsyncDisposable` to support eager +cleanup of resources. + +## Dependency Injection + +The `AddDaprWorkflow()` method will register the Dapr workflow services with ASP.NET Core dependency injection. This method +requires an options delegate that defines each of the workflows and activities you wish to register and use in your application. + +{{% alert title="Multi-application workflows require setting AppId" color="warning" %}} + +If you are using Dapr's **multi-application workflow** capability +(see [Multi Application Workflows]({{% ref "workflow-multi-app.md" %}})), you must set the `AppId` property on +`WorkflowRuntimeOptions` to the **App ID of the application where the workflows are running** (the workflow host). +This ensures workflow execution is routed to the correct hosting application. + +{{% /alert %}} + +{{% alert title="Note" color="primary" %}} + +This method will attempt to register a `DaprClient` instance, but this will only work if it hasn't already been registered with another +lifetime. For example, an earlier call to `AddDaprClient()` with a singleton lifetime will always use a singleton regardless of the +lifetime chose for the workflow client. The `DaprClient` instance will be used to communicate with the Dapr sidecar and if it's not +yet registered, the lifetime provided during the `AddDaprWorkflow()` registration will be used to register the `DaprWorkflowClient` +as well as its own dependencies. + +{{% /alert %}} + +### Singleton Registration + +By default, the `AddDaprWorkflow` method registers the `DaprWorkflowClient` and associated services using a singleton lifetime. This means +that the services are instantiated only a single time. + +The following is an example of how registration of the `DaprWorkflowClient` as it would appear in a typical `Program.cs` file: + +```csharp +builder.Services.AddDaprWorkflow(options => { + options.RegisterWorkflow(); + options.RegisterActivity(); +}); + +var app = builder.Build(); +await app.RunAsync(); +``` + +### Scoped Registration + +While this may generally be acceptable in your use case, you may instead wish to override the lifetime specified. This is done by passing a `ServiceLifetime` +argument in `AddDaprWorkflow`. For example, you may wish to inject another scoped service into your ASP.NET Core processing pipeline +that needs context used by the `DaprClient` that wouldn't be available if the former service were registered as a singleton. + +This is demonstrated in the following example: + +```csharp +builder.Services.AddDaprWorkflow(options => { + options.RegisterWorkflow(); + options.RegisterActivity(); +}, ServiceLifecycle.Scoped); + +var app = builder.Build(); +await app.RunAsync(); +``` + +### Transient Registration + +Finally, Dapr services can also be registered using a transient lifetime meaning that they will be initialized every time they're injected. This +is demonstrated in the following example: + +```csharp +builder.Services.AddDaprWorkflow(options => { + options.RegisterWorkflow(); + options.RegisterActivity(); +}, ServiceLifecycle.Transient); + +var app = builder.Build(); +await app.RunAsync(); +``` + +### Create a `DaprWorkflowClient` instance + +{{< tabpane text=true >}} + +{{% tab "ASP.Net Core App" %}} + +In an ASP.Net Core application, you can inject the `DaprWorkflowClient` into methods or controllers via method or constructor injection. This example demonstrates method injection in a minimal API scenario: + +```csharp +app.MapPost("/start", async ( + [FromServices] DaprWorkflowClient daprWorkflowClient, + Order order + ) => { + var instanceId = await daprWorkflowClient.ScheduleNewWorkflowAsync( + nameof(OrderProcessingWorkflow), + input: order); + + return Results.Accepted(instanceId); +}); +``` + +{{% /tab %}} + +{{% tab "Console App" %}} + +To create a `DaprWorkflowClient` instance in a console app, retrieve it from the `ServiceProvider`: + +```csharp +using var scope = host.Services.CreateAsyncScope(); +var daprWorkflowClient = scope.ServiceProvider.GetRequiredService(); +``` + +{{% /tab %}} + +{{< /tabpane >}} + +Now, you can use this client to perform workflow management operations such as starting, pausing, resuming, and terminating a workflow instance. See [Workflow management operations with `DaprWorkflowClient`]({{% ref dotnet-workflow-management-methods.md %}}) for more information on these operations. + +## Injecting Services into Workflow Activities + +Workflow activities support the same dependency injection that developers have come to expect of modern C# applications. Assuming a proper +registration at startup, any such type can be injected into the constructor of the workflow activity and available to utilize during +the execution of the workflow. This makes it simple to add logging via an injected `ILogger` or access to other Dapr +building blocks by injecting `DaprClient` or `DaprJobsClient`, for example. + +```csharp +internal sealed class SquareNumberActivity : WorkflowActivity +{ + private readonly ILogger _logger; + + public MyActivity(ILogger logger) + { + this._logger = logger; + } + + public override Task RunAsync(WorkflowActivityContext context, int input) + { + this._logger.LogInformation("Squaring the value {number}", input); + var result = input * input; + this._logger.LogInformation("Got a result of {squareResult}", result); + + return Task.FromResult(result); + } +} +``` + +### Using ILogger in Workflow + +Because workflows must be deterministic, it is not possible to inject arbitrary services into them. For example, +if you were able to inject a standard `ILogger` into a workflow and it needed to be replayed because of an error, +subsequent replay from the event source log would result in the log recording additional operations that didn't actually +take place a second or third time because their results were sourced from the log. This has the potential to introduce +a significant amount of confusion. Rather, a replay-safe logger is made available for use within workflows. It will only +log events the first time the workflow runs and will not log anything whenever the workflow is being replaced. + +This logger can be retrieved from a method present on the `WorkflowContext` available on your workflow instance and +otherwise used precisely as you might otherwise use an `ILogger` instance. + +An end-to-end sample demonstrating this can be seen in the +[.NET SDK repository](https://github.com/dapr/dotnet-sdk/blob/master/examples/Workflow/WorkflowConsoleApp/Workflows/OrderProcessingWorkflow.cs) +but a brief extraction of this sample is available below. + +```csharp +public class OrderProcessingWorkflow : Workflow +{ + public override async Task RunAsync(WorkflowContext context, OrderPayload order) + { + string orderId = context.InstanceId; + var logger = context.CreateReplaySafeLogger(); //Use this method to access the logger instance + + logger.LogInformation("Received order {orderId} for {quantity} {name} at ${totalCost}", orderId, order.Quantity, order.Name, order.TotalCost); + + //... + } +} +``` + +## Next steps + +- [Learn more about Dapr workflow management operations]({{% ref dotnet-workflow-management-methods.md %}}) +- [Learn how to author workflows and activities]({{% ref howto-author-workflow.md %}}) diff --git a/sdkdocs/go b/sdkdocs/go deleted file mode 160000 index 6dd434913b6..00000000000 --- a/sdkdocs/go +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 6dd434913b6fb41f6ede006c64c01a35a02c458f diff --git a/sdkdocs/go/README.md b/sdkdocs/go/README.md new file mode 100644 index 00000000000..0e0ec048c7e --- /dev/null +++ b/sdkdocs/go/README.md @@ -0,0 +1,25 @@ +# Dapr Go SDK documentation + +This page covers how the documentation is structured for the Dapr Go SDK + +## Dapr Docs + +All Dapr documentation is hosted at [docs.dapr.io](https://docs.dapr.io), including the docs for the [Go SDK](https://docs.dapr.io/developing-applications/sdks/go/). Head over there if you want to read the docs. + +### Go SDK docs source + +Although the docs site code and content is in the [docs repo](https://github.com/dapr/docs), the Go SDK content and images are within the `content` and `static` directories, respectively. + +This allows separation of roles and expertise between maintainers, and makes it easy to find the docs files you are looking for. + +## Writing Go SDK docs + +To get up and running to write Go SDK docs, visit the [docs repo](https://github.com/dapr/docs) to initialize your environment. It will clone both the docs repo and this repo, so you can make changes and see it rendered within the site instantly, as well as commit and PR into this repo. + +Make sure to read the [docs contributing guide](https://docs.dapr.io/contributing/contributing-docs/) for information on style/semantics/etc. + +## Docs architecture + +The docs site is built on [Hugo](https://gohugo.io), which lives in the docs repo. This repo is setup as a git submodule so that when the repo is cloned and initialized, the dotnet-sdk repo, along with the docs, are cloned as well. + +Then, in the Hugo configuration file, the `daprdocs/content` and `daprdocs/static` directories are redirected to the `daprdocs/developing-applications/sdks/go` and `static/go` directories, respectively. Thus, all the content within this repo is folded into the main docs site. \ No newline at end of file diff --git a/sdkdocs/go/content/en/go-sdk-contributing/go-contributing.md b/sdkdocs/go/content/en/go-sdk-contributing/go-contributing.md new file mode 100644 index 00000000000..ae15e89ad78 --- /dev/null +++ b/sdkdocs/go/content/en/go-sdk-contributing/go-contributing.md @@ -0,0 +1,23 @@ +--- +type: docs +title: "Contributing to the Go SDK" +linkTitle: "Go SDK" +weight: 3000 +description: Guidelines for contributing to the Dapr Go SDK +--- + +When contributing to the [Go SDK](https://github.com/dapr/go-sdk) the following rules and best-practices should be followed. + +## Examples + +The `examples` directory contains code samples for users to run to try out specific functionality of the various Go SDK packages and extensions. When writing new and updated samples keep in mind: + +- All examples should be runnable on Windows, Linux, and MacOS. While Go code is consistent among operating systems, any pre/post example commands should provide options through [tabpane]({{% ref "contributing-docs.md#tabbed-content" %}}) +- Contain steps to download/install any required pre-requisites. Someone coming in with a fresh OS install should be able to start on the example and complete it without an error. Links to external download pages are fine. + +## Docs + +The `daprdocs` directory contains the markdown files that are rendered into the [Dapr Docs](https://docs.dapr.io) website. When the documentation website is built this repo is cloned and configured so that its contents are rendered with the docs content. When writing docs keep in mind: + + - All rules in the [docs guide]({{% ref contributing-docs.md %}}) should be followed in addition to these. + - All files and directories should be prefixed with `go-` to ensure all file/directory names are globally unique across all Dapr documentation. diff --git a/sdkdocs/go/content/en/go-sdk-docs/_index.md b/sdkdocs/go/content/en/go-sdk-docs/_index.md new file mode 100644 index 00000000000..49327b5a6e6 --- /dev/null +++ b/sdkdocs/go/content/en/go-sdk-docs/_index.md @@ -0,0 +1,28 @@ +--- +type: docs +title: "Dapr Go SDK" +linkTitle: "Go" +weight: 1000 +description: Go SDK packages for developing Dapr applications +no_list: true +cascade: + github_repo: https://github.com/dapr/go-sdk + github_subdir: daprdocs/content/en/go-sdk-docs + path_base_for_github_subdir: content/en/developing-applications/sdks/go/ + github_branch: main +--- + +A client library to help build Dapr applications in Go. This client supports all public Dapr APIs while focusing on idiomatic Go experiences and developer productivity. + +{{% cardpane %}} +{{% card title="**Client**"%}} + Use the Go Client SDK for invoking public Dapr APIs + + [Learn more about the Go Client SDK]({{% ref go-client %}}) +{{% /card %}} +{{% card title="**Service**"%}} + Use the Dapr Service (Callback) SDK for Go to create services that will be invoked by Dapr. + + [Learn more about the Go Service (Callback) SDK]({{% ref go-service %}}) +{{% /card %}} +{{% /cardpane %}} \ No newline at end of file diff --git a/sdkdocs/go/content/en/go-sdk-docs/go-client/_index.md b/sdkdocs/go/content/en/go-sdk-docs/go-client/_index.md new file mode 100644 index 00000000000..3cb76db194e --- /dev/null +++ b/sdkdocs/go/content/en/go-sdk-docs/go-client/_index.md @@ -0,0 +1,704 @@ +--- +type: docs +title: "Getting started with the Dapr client Go SDK" +linkTitle: "Client" +weight: 20000 +description: How to get up and running with the Dapr Go SDK +no_list: true +--- + +The Dapr client package allows you to interact with other Dapr applications from a Go application. + +## Prerequisites + +- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed +- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) +- [Go installed](https://golang.org/doc/install) + + +## Import the client package +```go +import "github.com/dapr/go-sdk/client" +``` +## Error handling +Dapr errors are based on [gRPC's richer error model](https://cloud.google.com/apis/design/errors#error_model). +The following code shows an example of how you can parse and handle the error details: + +```go +if err != nil { + st := status.Convert(err) + + fmt.Printf("Code: %s\n", st.Code().String()) + fmt.Printf("Message: %s\n", st.Message()) + + for _, detail := range st.Details() { + switch t := detail.(type) { + case *errdetails.ErrorInfo: + // Handle ErrorInfo details + fmt.Printf("ErrorInfo:\n- Domain: %s\n- Reason: %s\n- Metadata: %v\n", t.GetDomain(), t.GetReason(), t.GetMetadata()) + case *errdetails.BadRequest: + // Handle BadRequest details + fmt.Println("BadRequest:") + for _, violation := range t.GetFieldViolations() { + fmt.Printf("- Key: %s\n", violation.GetField()) + fmt.Printf("- The %q field was wrong: %s\n", violation.GetField(), violation.GetDescription()) + } + case *errdetails.ResourceInfo: + // Handle ResourceInfo details + fmt.Printf("ResourceInfo:\n- Resource type: %s\n- Resource name: %s\n- Owner: %s\n- Description: %s\n", + t.GetResourceType(), t.GetResourceName(), t.GetOwner(), t.GetDescription()) + case *errdetails.Help: + // Handle ResourceInfo details + fmt.Println("HelpInfo:") + for _, link := range t.GetLinks() { + fmt.Printf("- Url: %s\n", link.Url) + fmt.Printf("- Description: %s\n", link.Description) + } + + default: + // Add cases for other types of details you expect + fmt.Printf("Unhandled error detail type: %v\n", t) + } + } +} +``` + +## Building blocks + +The Go SDK allows you to interface with all of the [Dapr building blocks]({{% ref building-blocks %}}). + +### Service Invocation + +To invoke a specific method on another service running with Dapr sidecar, the Dapr client Go SDK provides two options: + +Invoke a service without data: +```go +resp, err := client.InvokeMethod(ctx, "app-id", "method-name", "post") +``` + +Invoke a service with data: +```go +content := &dapr.DataContent{ + ContentType: "application/json", + Data: []byte(`{ "id": "a123", "value": "demo", "valid": true }`), +} + +resp, err = client.InvokeMethodWithContent(ctx, "app-id", "method-name", "post", content) +``` + +For a full guide on service invocation, visit [How-To: Invoke a service]({{% ref howto-invoke-discover-services.md %}}). + +### Workflows + +Workflows and their activities can be authored and managed using the Dapr Go SDK like so: + +```go +import ( +... +"github.com/dapr/go-sdk/workflow" +... +) + +func ExampleWorkflow(ctx *workflow.WorkflowContext) (any, error) { + var output string + input := "world" + + if err := ctx.CallActivity(ExampleActivity, workflow.ActivityInput(input)).Await(&output); err != nil { + return nil, err + } + + // Print output - "hello world" + fmt.Println(output) + + return nil, nil +} + +func ExampleActivity(ctx workflow.ActivityContext) (any, error) { + var input int + if err := ctx.GetInput(&input); err != nil { + return "", err + } + + return fmt.Sprintf("hello %s", input), nil +} + +func main() { + // Create a workflow worker + w, err := workflow.NewWorker() + if err != nil { + log.Fatalf("error creating worker: %v", err) + } + + // Register the workflow + w.RegisterWorkflow(ExampleWorkflow) + + // Register the activity + w.RegisterActivity(ExampleActivity) + + // Start workflow runner + if err := w.Start(); err != nil { + log.Fatal(err) + } + + // Create a workflow client + wfClient, err := workflow.NewClient() + if err != nil { + log.Fatal(err) + } + + // Start a new workflow + id, err := wfClient.ScheduleNewWorkflow(context.Background(), "ExampleWorkflow") + if err != nil { + log.Fatal(err) + } + + // Wait for the workflow to complete + metadata, err := wfClient.WaitForWorkflowCompletion(ctx, id) + if err != nil { + log.Fatal(err) + } + + // Print workflow status post-completion + fmt.Println(metadata.RuntimeStatus) + + // Shutdown Worker + w.Shutdown() +} +``` + +- For a more comprehensive guide on workflows visit these How-To guides: + - [How-To: Author a workflow]({{% ref howto-author-workflow.md %}}). + - [How-To: Manage a workflow]({{% ref howto-manage-workflow.md %}}). +- Visit the Go SDK Examples to jump into complete examples: + - [Workflow Example](https://github.com/dapr/go-sdk/tree/main/examples/workflow) + - [Workflow - Parallelised](https://github.com/dapr/go-sdk/tree/main/examples/workflow-parallel) + +### State Management + +For simple use-cases, Dapr client provides easy to use `Save`, `Get`, `Delete` methods: + +```go +ctx := context.Background() +data := []byte("hello") +store := "my-store" // defined in the component YAML + +// save state with the key key1, default options: strong, last-write +if err := client.SaveState(ctx, store, "key1", data, nil); err != nil { + panic(err) +} + +// get state for key key1 +item, err := client.GetState(ctx, store, "key1", nil) +if err != nil { + panic(err) +} +fmt.Printf("data [key:%s etag:%s]: %s", item.Key, item.Etag, string(item.Value)) + +// delete state for key key1 +if err := client.DeleteState(ctx, store, "key1", nil); err != nil { + panic(err) +} +``` + +For more granular control, the Dapr Go client exposes `SetStateItem` type, which can be use to gain more control over the state operations and allow for multiple items to be saved at once: + +```go +item1 := &dapr.SetStateItem{ + Key: "key1", + Etag: &ETag{ + Value: "1", + }, + Metadata: map[string]string{ + "created-on": time.Now().UTC().String(), + }, + Value: []byte("hello"), + Options: &dapr.StateOptions{ + Concurrency: dapr.StateConcurrencyLastWrite, + Consistency: dapr.StateConsistencyStrong, + }, +} + +item2 := &dapr.SetStateItem{ + Key: "key2", + Metadata: map[string]string{ + "created-on": time.Now().UTC().String(), + }, + Value: []byte("hello again"), +} + +item3 := &dapr.SetStateItem{ + Key: "key3", + Etag: &dapr.ETag{ + Value: "1", + }, + Value: []byte("hello again"), +} + +if err := client.SaveBulkState(ctx, store, item1, item2, item3); err != nil { + panic(err) +} +``` + +Similarly, `GetBulkState` method provides a way to retrieve multiple state items in a single operation: + +```go +keys := []string{"key1", "key2", "key3"} +items, err := client.GetBulkState(ctx, store, keys, nil,100) +``` + +And the `ExecuteStateTransaction` method to execute multiple upsert or delete operations transactionally. + +```go +ops := make([]*dapr.StateOperation, 0) + +op1 := &dapr.StateOperation{ + Type: dapr.StateOperationTypeUpsert, + Item: &dapr.SetStateItem{ + Key: "key1", + Value: []byte(data), + }, +} +op2 := &dapr.StateOperation{ + Type: dapr.StateOperationTypeDelete, + Item: &dapr.SetStateItem{ + Key: "key2", + }, +} +ops = append(ops, op1, op2) +meta := map[string]string{} +err := testClient.ExecuteStateTransaction(ctx, store, meta, ops) +``` + +Retrieve, filter, and sort key/value data stored in your statestore using `QueryState`. + +```go +// Define the query string +query := `{ + "filter": { + "EQ": { "value.Id": "1" } + }, + "sort": [ + { + "key": "value.Balance", + "order": "DESC" + } + ] +}` + +// Use the client to query the state +queryResponse, err := c.QueryState(ctx, "querystore", query) +if err != nil { + log.Fatal(err) +} + +fmt.Printf("Got %d\n", len(queryResponse)) + +for _, account := range queryResponse { + var data Account + err := account.Unmarshal(&data) + if err != nil { + log.Fatal(err) + } + + fmt.Printf("Account: %s has %f\n", data.ID, data.Balance) +} +``` + +> **Note:** Query state API is currently in alpha + +For a full guide on state management, visit [How-To: Save & get state]({{% ref howto-get-save-state.md %}}). + +### Publish Messages +To publish data onto a topic, the Dapr Go client provides a simple method: + +```go +data := []byte(`{ "id": "a123", "value": "abcdefg", "valid": true }`) +if err := client.PublishEvent(ctx, "component-name", "topic-name", data); err != nil { + panic(err) +} +``` + +To publish multiple messages at once, the `PublishEvents` method can be used: + +```go +events := []string{"event1", "event2", "event3"} +res := client.PublishEvents(ctx, "component-name", "topic-name", events) +if res.Error != nil { + panic(res.Error) +} +``` + +For a full guide on pub/sub, visit [How-To: Publish & subscribe]({{% ref howto-publish-subscribe.md %}}). + +### Workflow + +You can create [workflows]({{% ref workflow-overview.md %}}) using the Go SDK. For example, start with a simple workflow activity: + +```go +func TestActivity(ctx workflow.ActivityContext) (any, error) { + var input int + if err := ctx.GetInput(&input); err != nil { + return "", err + } + + // Do something here + return "result", nil +} +``` + +Write a simple workflow function: + +```go +func TestWorkflow(ctx *workflow.WorkflowContext) (any, error) { + var input int + if err := ctx.GetInput(&input); err != nil { + return nil, err + } + var output string + if err := ctx.CallActivity(TestActivity, workflow.ActivityInput(input)).Await(&output); err != nil { + return nil, err + } + if err := ctx.WaitForExternalEvent("testEvent", time.Second*60).Await(&output); err != nil { + return nil, err + } + + if err := ctx.CreateTimer(time.Second).Await(nil); err != nil { + return nil, nil + } + return output, nil +} +``` + +Then compose your application that will use the workflow you've created. [Refer to the How-To: Author workflows guide]({{% ref howto-author-workflow.md %}}) for a full walk-through. + +Try out the [Go SDK workflow example.](https://github.com/dapr/go-sdk/blob/main/examples/workflow) + +### Jobs + +The Dapr client Go SDK allows you to schedule, get, and delete jobs. Jobs enable you to schedule work to be executed at specific times or intervals. + +#### Scheduling a Job + +To schedule a new job, use the `ScheduleJobAlpha1` method: + +```go +import ( + "google.golang.org/protobuf/types/known/anypb" +) + +// Create job data +data, err := anypb.New(&YourDataStruct{Message: "Hello, Job!"}) +if err != nil { + panic(err) +} + +// Create a simple job using the builder pattern +job := client.NewJob("my-scheduled-job", + client.WithJobData(data), + client.WithJobDueTime("10s"), // Execute in 10 seconds +) + +// Schedule the job +err = client.ScheduleJobAlpha1(ctx, job) +if err != nil { + panic(err) +} +``` + +#### Job with Schedule and Repeats + +You can create recurring jobs using the `Schedule` field with cron expressions: + +```go +job := client.NewJob("recurring-job", + client.WithJobData(data), + client.WithJobSchedule("0 9 * * *"), // Run at 9 AM every day + client.WithJobRepeats(10), // Repeat 10 times + client.WithJobTTL("1h"), // Job expires after 1 hour +) + +err = client.ScheduleJobAlpha1(ctx, job) +``` + +#### Job with Failure Policy + +Configure how jobs should handle failures using failure policies: + +```go +// Constant retry policy with max retries and interval +job := client.NewJob("resilient-job", + client.WithJobData(data), + client.WithJobDueTime("2024-01-01T10:00:00Z"), + client.WithJobConstantFailurePolicy(), + client.WithJobConstantFailurePolicyMaxRetries(3), + client.WithJobConstantFailurePolicyInterval(30*time.Second), +) + +err = client.ScheduleJobAlpha1(ctx, job) +``` + +For jobs that should not be retried on failure, use the drop policy: + +```go +job := client.NewJob("one-shot-job", + client.WithJobData(data), + client.WithJobDueTime("2024-01-01T10:00:00Z"), + client.WithJobDropFailurePolicy(), +) + +err = client.ScheduleJobAlpha1(ctx, job) +``` + +#### Getting a Job + +To get information about a scheduled job: + +```go +job, err := client.GetJobAlpha1(ctx, "my-scheduled-job") +if err != nil { + panic(err) +} + +fmt.Printf("Job: %s, Schedule: %s, Repeats: %d\n", + job.Name, job.Schedule, job.Repeats) +``` + +#### Deleting a Job + +To cancel a scheduled job: + +```go +err = client.DeleteJobAlpha1(ctx, "my-scheduled-job") +if err != nil { + panic(err) +} +``` + +For a full guide on jobs, visit [How-To: Schedule and manage jobs]({{< ref howto-schedule-and-handle-triggered-jobs.md >}}). + +### Output Bindings + + +The Dapr Go client SDK provides two methods to invoke an operation on a Dapr-defined binding. Dapr supports input, output, and bidirectional bindings. + +For simple, output-only binding: + +```go +in := &dapr.InvokeBindingRequest{ Name: "binding-name", Operation: "operation-name" } +err = client.InvokeOutputBinding(ctx, in) +``` + +To invoke method with content and metadata: + +```go +in := &dapr.InvokeBindingRequest{ + Name: "binding-name", + Operation: "operation-name", + Data: []byte("hello"), + Metadata: map[string]string{"k1": "v1", "k2": "v2"}, +} + +out, err := client.InvokeBinding(ctx, in) +``` + +For a full guide on output bindings, visit [How-To: Use bindings]({{% ref howto-bindings.md %}}). + +### Actors + +Use the Dapr Go client SDK to write actors. + +```go +// MyActor represents an example actor type. +type MyActor struct { + actors.Actor +} + +// MyActorMethod is a method that can be invoked on MyActor. +func (a *MyActor) MyActorMethod(ctx context.Context, req *actors.Message) (string, error) { + log.Printf("Received message: %s", req.Data) + return "Hello from MyActor!", nil +} + +func main() { + // Create a Dapr client + daprClient, err := client.NewClient() + if err != nil { + log.Fatal("Error creating Dapr client: ", err) + } + + // Register the actor type with Dapr + actors.RegisterActor(&MyActor{}) + + // Create an actor client + actorClient := actors.NewClient(daprClient) + + // Create an actor ID + actorID := actors.NewActorID("myactor") + + // Get or create the actor + err = actorClient.SaveActorState(context.Background(), "myactorstore", actorID, map[string]interface{}{"data": "initial state"}) + if err != nil { + log.Fatal("Error saving actor state: ", err) + } + + // Invoke a method on the actor + resp, err := actorClient.InvokeActorMethod(context.Background(), "myactorstore", actorID, "MyActorMethod", &actors.Message{Data: []byte("Hello from client!")}) + if err != nil { + log.Fatal("Error invoking actor method: ", err) + } + + log.Printf("Response from actor: %s", resp.Data) + + // Wait for a few seconds before terminating + time.Sleep(5 * time.Second) + + // Delete the actor + err = actorClient.DeleteActor(context.Background(), "myactorstore", actorID) + if err != nil { + log.Fatal("Error deleting actor: ", err) + } + + // Close the Dapr client + daprClient.Close() +} +``` + +For a full guide on actors, visit [the Actors building block documentation]({{% ref actors %}}). + +### Secret Management + +The Dapr client also provides access to the runtime secrets that can be backed by any number of secrete stores (e.g. Kubernetes Secrets, HashiCorp Vault, or Azure KeyVault): + +```go +opt := map[string]string{ + "version": "2", +} + +secret, err := client.GetSecret(ctx, "store-name", "secret-name", opt) +``` + +### Authentication + +By default, Dapr relies on the network boundary to limit access to its API. If however the target Dapr API is configured with token-based authentication, users can configure the Go Dapr client with that token in two ways: + +**Environment Variable** + +If the DAPR_API_TOKEN environment variable is defined, Dapr will automatically use it to augment its Dapr API invocations to ensure authentication. + +**Explicit Method** + +In addition, users can also set the API token explicitly on any Dapr client instance. This approach is helpful in cases when the user code needs to create multiple clients for different Dapr API endpoints. + +```go +func main() { + client, err := dapr.NewClient() + if err != nil { + panic(err) + } + defer client.Close() + client.WithAuthToken("your-Dapr-API-token-here") +} +``` + + +For a full guide on secrets, visit [How-To: Retrieve secrets]({{% ref howto-secrets.md %}}). + +### Distributed Lock + +The Dapr client provides mutually exclusive access to a resource using a lock. With a lock, you can: + +- Provide access to a database row, table, or an entire database +- Lock reading messages from a queue in a sequential manner + +```go +package main + +import ( + "fmt" + + dapr "github.com/dapr/go-sdk/client" +) + +func main() { + client, err := dapr.NewClient() + if err != nil { + panic(err) + } + defer client.Close() + + resp, err := client.TryLockAlpha1(ctx, "lockstore", &dapr.LockRequest{ + LockOwner: "random_id_abc123", + ResourceID: "my_file_name", + ExpiryInSeconds: 60, + }) + + fmt.Println(resp.Success) +} +``` + +For a full guide on distributed lock, visit [How-To: Use a lock]({{% ref howto-use-distributed-lock.md %}}). + +### Configuration + +With the Dapr client Go SDK, you can consume configuration items that are returned as read-only key/value pairs, and subscribe to configuration item changes. + +#### Config Get + +```go + items, err := client.GetConfigurationItem(ctx, "example-config", "mykey") + if err != nil { + panic(err) + } + fmt.Printf("get config = %s\n", (*items).Value) +``` + +#### Config Subscribe + +```go +go func() { + if err := client.SubscribeConfigurationItems(ctx, "example-config", []string{"mySubscribeKey1", "mySubscribeKey2", "mySubscribeKey3"}, func(id string, items map[string]*dapr.ConfigurationItem) { + for k, v := range items { + fmt.Printf("get updated config key = %s, value = %s \n", k, v.Value) + } + subscribeID = id + }); err != nil { + panic(err) + } +}() +``` + +For a full guide on configuration, visit [How-To: Manage configuration from a store]({{% ref howto-manage-configuration.md %}}). + +### Cryptography + +With the Dapr client Go SDK, you can use the high-level `Encrypt` and `Decrypt` cryptography APIs to encrypt and decrypt files while working on a stream of data. + +To encrypt: + +```go +// Encrypt the data using Dapr +out, err := client.Encrypt(context.Background(), rf, dapr.EncryptOptions{ + // These are the 3 required parameters + ComponentName: "mycryptocomponent", + KeyName: "mykey", + Algorithm: "RSA", +}) +if err != nil { + panic(err) +} +``` + +To decrypt: + +```go +// Decrypt the data using Dapr +out, err := client.Decrypt(context.Background(), rf, dapr.EncryptOptions{ + // Only required option is the component name + ComponentName: "mycryptocomponent", +}) +``` + +For a full guide on cryptography, visit [How-To: Use the cryptography APIs]({{% ref howto-cryptography.md %}}). + +## Related links +[Go SDK Examples](https://github.com/dapr/go-sdk/tree/main/examples) diff --git a/sdkdocs/go/content/en/go-sdk-docs/go-service/_index.md b/sdkdocs/go/content/en/go-sdk-docs/go-service/_index.md new file mode 100644 index 00000000000..93f51a9f884 --- /dev/null +++ b/sdkdocs/go/content/en/go-sdk-docs/go-service/_index.md @@ -0,0 +1,11 @@ +--- +type: docs +title: "Getting started with the Dapr Service (Callback) SDK for Go" +linkTitle: "Service" +weight: 20000 +description: How to get up and running with the Dapr Service (Callback) SDK for Go +no_list: true +--- +In addition to this Dapr API client, Dapr Go SDK also provides service package to bootstrap your Dapr callback services. These services can be developed in either gRPC or HTTP: + - [HTTP Service]({{% ref http-service.md %}}) + - [gRPC Service]({{% ref grpc-service.md %}}) \ No newline at end of file diff --git a/sdkdocs/go/content/en/go-sdk-docs/go-service/grpc-service.md b/sdkdocs/go/content/en/go-sdk-docs/go-service/grpc-service.md new file mode 100644 index 00000000000..33c7909d1c3 --- /dev/null +++ b/sdkdocs/go/content/en/go-sdk-docs/go-service/grpc-service.md @@ -0,0 +1,159 @@ +--- +type: docs +title: "Getting started with the Dapr Service (Callback) SDK for Go" +linkTitle: "gRPC Service" +weight: 20000 +description: How to get up and running with the Dapr Service (Callback) SDK for Go +no_list: true +--- + +## Dapr gRPC Service SDK for Go + +### Prerequisite +Start by importing Dapr Go service/grpc package: + +```go +daprd "github.com/dapr/go-sdk/service/grpc" +``` + +### Creating and Starting Service + +To create a gRPC Dapr service, first, create a Dapr callback instance with a specific address: + +```go +s, err := daprd.NewService(":50001") +if err != nil { + log.Fatalf("failed to start the server: %v", err) +} +``` +Or with address and an existing net.Listener in case you want to combine existing server listener: + +```go +list, err := net.Listen("tcp", "localhost:0") +if err != nil { + log.Fatalf("gRPC listener creation failed: %s", err) +} +s := daprd.NewServiceWithListener(list) +``` + +Once you create a service instance, you can "attach" to that service any number of event, binding, and service invocation logic handlers as shown below. Onces the logic is defined, you are ready to start the service: + +```go +if err := s.Start(); err != nil { + log.Fatalf("server error: %v", err) +} +``` + +### Event Handling +To handle events from specific topic you need to add at least one topic event handler before starting the service: + +```go +sub := &common.Subscription{ + PubsubName: "messages", + Topic: "topic1", + } +if err := s.AddTopicEventHandler(sub, eventHandler); err != nil { + log.Fatalf("error adding topic subscription: %v", err) +} +``` + +The handler method itself can be any method with the expected signature: + +```go +func eventHandler(ctx context.Context, e *common.TopicEvent) (retry bool, err error) { + log.Printf("event - PubsubName:%s, Topic:%s, ID:%s, Data: %v", e.PubsubName, e.Topic, e.ID, e.Data) + // do something with the event + return true, nil +} +``` + +Optionally, you can use [routing rules](https://docs.dapr.io/developing-applications/building-blocks/pubsub/howto-route-messages/) to send messages to different handlers based on the contents of the CloudEvent. + +```go +sub := &common.Subscription{ + PubsubName: "messages", + Topic: "topic1", + Route: "/important", + Match: `event.type == "important"`, + Priority: 1, +} +err := s.AddTopicEventHandler(sub, importantHandler) +if err != nil { + log.Fatalf("error adding topic subscription: %v", err) +} +``` + +You can also create a custom type that implements the `TopicEventSubscriber` interface to handle your events: + +```go +type EventHandler struct { + // any data or references that your event handler needs. +} + +func (h *EventHandler) Handle(ctx context.Context, e *common.TopicEvent) (retry bool, err error) { + log.Printf("event - PubsubName:%s, Topic:%s, ID:%s, Data: %v", e.PubsubName, e.Topic, e.ID, e.Data) + // do something with the event + return true, nil +} +``` + +The `EventHandler` can then be added using the `AddTopicEventSubscriber` method: + +```go +sub := &common.Subscription{ + PubsubName: "messages", + Topic: "topic1", +} +eventHandler := &EventHandler{ +// initialize any fields +} +if err := s.AddTopicEventSubscriber(sub, eventHandler); err != nil { + log.Fatalf("error adding topic subscription: %v", err) +} +``` + +### Service Invocation Handler +To handle service invocations you will need to add at least one service invocation handler before starting the service: + +```go +if err := s.AddServiceInvocationHandler("echo", echoHandler); err != nil { + log.Fatalf("error adding invocation handler: %v", err) +} +``` + +The handler method itself can be any method with the expected signature: + +```go +func echoHandler(ctx context.Context, in *common.InvocationEvent) (out *common.Content, err error) { + log.Printf("echo - ContentType:%s, Verb:%s, QueryString:%s, %+v", in.ContentType, in.Verb, in.QueryString, string(in.Data)) + // do something with the invocation here + out = &common.Content{ + Data: in.Data, + ContentType: in.ContentType, + DataTypeURL: in.DataTypeURL, + } + return +} +``` + +### Binding Invocation Handler +To handle binding invocations you will need to add at least one binding invocation handler before starting the service: + +```go +if err := s.AddBindingInvocationHandler("run", runHandler); err != nil { + log.Fatalf("error adding binding handler: %v", err) +} +``` + +The handler method itself can be any method with the expected signature: + +```go +func runHandler(ctx context.Context, in *common.BindingEvent) (out []byte, err error) { + log.Printf("binding - Data:%v, Meta:%v", in.Data, in.Metadata) + // do something with the invocation here + return nil, nil +} +``` + +## Related links +- [Go SDK Examples](https://github.com/dapr/go-sdk/tree/main/examples) diff --git a/sdkdocs/go/content/en/go-sdk-docs/go-service/http-service.md b/sdkdocs/go/content/en/go-sdk-docs/go-service/http-service.md new file mode 100644 index 00000000000..c73487e0650 --- /dev/null +++ b/sdkdocs/go/content/en/go-sdk-docs/go-service/http-service.md @@ -0,0 +1,153 @@ +--- +type: docs +title: "Getting started with the Dapr HTTP Service SDK for Go" +linkTitle: "HTTP Service" +weight: 10000 +description: How to get up and running with the Dapr HTTP Service SDK for Go +no_list: true +--- + +### Prerequisite +Start by importing Dapr Go service/http package: + +```go +daprd "github.com/dapr/go-sdk/service/http" +``` + +### Creating and Starting Service +To create an HTTP Dapr service, first, create a Dapr callback instance with a specific address: + +```go +s := daprd.NewService(":8080") +``` + +Or with address and an existing http.ServeMux in case you want to combine existing server implementations: + +```go +mux := http.NewServeMux() +mux.HandleFunc("/", myOtherHandler) +s := daprd.NewServiceWithMux(":8080", mux) +``` + +Once you create a service instance, you can "attach" to that service any number of event, binding, and service invocation logic handlers as shown below. Onces the logic is defined, you are ready to start the service: + +```go +if err := s.Start(); err != nil && err != http.ErrServerClosed { + log.Fatalf("error: %v", err) +} +``` + +### Event Handling +To handle events from specific topic you need to add at least one topic event handler before starting the service: + +```go +sub := &common.Subscription{ + PubsubName: "messages", + Topic: "topic1", + Route: "/events", +} +err := s.AddTopicEventHandler(sub, eventHandler) +if err != nil { + log.Fatalf("error adding topic subscription: %v", err) +} +``` + +The handler method itself can be any method with the expected signature: + +```go +func eventHandler(ctx context.Context, e *common.TopicEvent) (retry bool, err error) { + log.Printf("event - PubsubName:%s, Topic:%s, ID:%s, Data: %v", e.PubsubName, e.Topic, e.ID, e.Data) + // do something with the event + return true, nil +} +``` + +Optionally, you can use [routing rules](https://docs.dapr.io/developing-applications/building-blocks/pubsub/howto-route-messages/) to send messages to different handlers based on the contents of the CloudEvent. + +```go +sub := &common.Subscription{ + PubsubName: "messages", + Topic: "topic1", + Route: "/important", + Match: `event.type == "important"`, + Priority: 1, +} +err := s.AddTopicEventHandler(sub, importantHandler) +if err != nil { + log.Fatalf("error adding topic subscription: %v", err) +} +``` + +You can also create a custom type that implements the `TopicEventSubscriber` interface to handle your events: + +```go +type EventHandler struct { + // any data or references that your event handler needs. +} + +func (h *EventHandler) Handle(ctx context.Context, e *common.TopicEvent) (retry bool, err error) { + log.Printf("event - PubsubName:%s, Topic:%s, ID:%s, Data: %v", e.PubsubName, e.Topic, e.ID, e.Data) + // do something with the event + return true, nil +} +``` + +The `EventHandler` can then be added using the `AddTopicEventSubscriber` method: + +```go +sub := &common.Subscription{ + PubsubName: "messages", + Topic: "topic1", +} +eventHandler := &EventHandler{ +// initialize any fields +} +if err := s.AddTopicEventSubscriber(sub, eventHandler); err != nil { + log.Fatalf("error adding topic subscription: %v", err) +} +``` + +### Service Invocation Handler +To handle service invocations you will need to add at least one service invocation handler before starting the service: + +```go +if err := s.AddServiceInvocationHandler("/echo", echoHandler); err != nil { + log.Fatalf("error adding invocation handler: %v", err) +} +``` + +The handler method itself can be any method with the expected signature: + + +```go +func echoHandler(ctx context.Context, in *common.InvocationEvent) (out *common.Content, err error) { + log.Printf("echo - ContentType:%s, Verb:%s, QueryString:%s, %+v", in.ContentType, in.Verb, in.QueryString, string(in.Data)) + // do something with the invocation here + out = &common.Content{ + Data: in.Data, + ContentType: in.ContentType, + DataTypeURL: in.DataTypeURL, + } + return +} +``` + +### Binding Invocation Handler + +```go +if err := s.AddBindingInvocationHandler("/run", runHandler); err != nil { + log.Fatalf("error adding binding handler: %v", err) +} +``` + +The handler method itself can be any method with the expected signature: + +```go +func runHandler(ctx context.Context, in *common.BindingEvent) (out []byte, err error) { + log.Printf("binding - Data:%v, Meta:%v", in.Data, in.Metadata) + // do something with the invocation here + return nil, nil +} +``` +## Related links +- [Go SDK Examples](https://github.com/dapr/go-sdk/tree/main/examples) diff --git a/sdkdocs/java b/sdkdocs/java deleted file mode 160000 index 3bb91e505e3..00000000000 --- a/sdkdocs/java +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 3bb91e505e34ef3b7bc3325be853de8d0491c431 diff --git a/sdkdocs/java/content/en/java-sdk-contributing/java-contributing.md b/sdkdocs/java/content/en/java-sdk-contributing/java-contributing.md new file mode 100644 index 00000000000..03ba6d4e51f --- /dev/null +++ b/sdkdocs/java/content/en/java-sdk-contributing/java-contributing.md @@ -0,0 +1,27 @@ +--- +type: docs +title: "Contributing to the Java SDK" +linkTitle: "Java SDK" +weight: 3000 +description: Guidelines for contributing to the Dapr Java SDK +--- + +When contributing to the [Java SDK](https://github.com/dapr/java-sdk) the following rules and best-practices should be followed. + +## Examples + +The `examples` directory contains code samples for users to run to try out specific functionality of the various Java SDK packages and extensions. When writing new and updated samples keep in mind: + +- All examples should be runnable on Windows, Linux, and MacOS. While Java code is consistent among operating systems, any pre/post example commands should provide options through [tabpane]({{% ref "contributing-docs.md#tabbed-content" %}}) +- Contain steps to download/install any required pre-requisites. Someone coming in with a fresh OS install should be able to start on the example and complete it without an error. Links to external download pages are fine. + +## Docs + +The `daprdocs` directory contains the markdown files that are rendered into the [Dapr Docs](https://docs.dapr.io) website. When the documentation website is built, this repo is cloned and configured so that its contents are rendered with the docs content. When writing docs, keep in mind: + + - All rules in the [docs guide]({{% ref contributing-docs.md %}}) should be followed in addition to these. + - All files and directories should be prefixed with `java-` to ensure all file/directory names are globally unique across all Dapr documentation. + +## Github Dapr Bot Commands + +Checkout the [daprbot documentation](https://docs.dapr.io/contributing/daprbot/) for Github commands you can run in this repo for common tasks. For example, you can run the `/assign` (as a comment on an issue) to assign the issue to yourself. diff --git a/sdkdocs/java/content/en/java-sdk-docs/_index.md b/sdkdocs/java/content/en/java-sdk-docs/_index.md new file mode 100644 index 00000000000..d640101bc1a --- /dev/null +++ b/sdkdocs/java/content/en/java-sdk-docs/_index.md @@ -0,0 +1,145 @@ +--- +type: docs +title: "Dapr Java SDK" +linkTitle: "Java" +weight: 1000 +description: Java SDK packages for developing Dapr applications +cascade: + github_repo: https://github.com/dapr/java-sdk + github_subdir: daprdocs/content/en/java-sdk-docs + path_base_for_github_subdir: content/en/developing-applications/sdks/java/ + github_branch: master +--- + +Dapr offers a variety of packages to help with the development of Java applications. Using them you can create Java clients, servers, and virtual actors with Dapr. + +## Prerequisites + +- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed +- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) +- JDK 11 or above - the published jars are compatible with Java 8: + - [AdoptOpenJDK 11 - LTS](https://adoptopenjdk.net/) + - [Oracle's JDK 15](https://www.oracle.com/java/technologies/javase-downloads.html) + - [Oracle's JDK 11 - LTS](https://www.oracle.com/java/technologies/javase-jdk11-downloads.html) + - [OpenJDK](https://openjdk.java.net/) +- Install one of the following build tools for Java: + - [Maven 3.x](https://maven.apache.org/install.html) + - [Gradle 6.x](https://gradle.org/install/) + +## Import Dapr's Java SDK + +Next, import the Java SDK packages to get started. Select your preferred build tool to learn how to import. + +{{< tabpane text=true >}} + +{{% tab header="Maven" %}} + + +For a Maven project, add the following to your `pom.xml` file: + +```xml + + ... + + ... + + + io.dapr + dapr-sdk + 1.16.0 + + + + io.dapr + dapr-sdk-actors + 1.16.0 + + + + io.dapr + dapr-sdk-springboot + 1.16.0 + + ... + + ... + +``` +{{% /tab %}} + +{{% tab header="Gradle" %}} + + +For a Gradle project, add the following to your `build.gradle` file: + +```java +dependencies { +... + // Dapr's core SDK with all features, except Actors. + compile('io.dapr:dapr-sdk:1.16.0') + // Dapr's SDK for Actors (optional). + compile('io.dapr:dapr-sdk-actors:1.16.0') + // Dapr's SDK integration with SpringBoot (optional). + compile('io.dapr:dapr-sdk-springboot:1.16.0') +} +``` + +{{% /tab %}} + +{{< /tabpane >}} + +If you are also using Spring Boot, you may run into a common issue where the `OkHttp` version that the Dapr SDK uses conflicts with the one specified in the Spring Boot _Bill of Materials_. + +You can fix this by specifying a compatible `OkHttp` version in your project to match the version that the Dapr SDK uses: + +```xml + + com.squareup.okhttp3 + okhttp + 1.16.0 + +``` + +## Try it out + +Put the Dapr Java SDK to the test. Walk through the Java quickstarts and tutorials to see Dapr in action: + +| SDK samples | Description | +| ----------- | ----------- | +| [Quickstarts]({{% ref quickstarts %}}) | Experience Dapr's API building blocks in just a few minutes using the Java SDK. | +| [SDK samples](https://github.com/dapr/java-sdk/tree/master/examples) | Clone the SDK repo to try out some examples and get started. | + +```java +import io.dapr.client.DaprClient; +import io.dapr.client.DaprClientBuilder; + +try (DaprClient client = (new DaprClientBuilder()).build()) { + // sending a class with message; BINDING_OPERATION="create" + client.invokeBinding(BINDING_NAME, BINDING_OPERATION, myClass).block(); + + // sending a plain string + client.invokeBinding(BINDING_NAME, BINDING_OPERATION, message).block(); +} +``` + +- For a full guide on output bindings visit [How-To: Output bindings]({{% ref howto-bindings.md %}}). +- Visit [Java SDK examples](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/bindings/http) for code samples and instructions to try out output bindings. + +## Available packages + +
+
+
+
Client
+

Create Java clients that interact with a Dapr sidecar and other Dapr applications.

+ +
+
+
+
+
Workflow
+

Create and manage workflows that work with other Dapr APIs in Java.

+ +
+
+
diff --git a/sdkdocs/java/content/en/java-sdk-docs/java-ai/_index.md b/sdkdocs/java/content/en/java-sdk-docs/java-ai/_index.md new file mode 100644 index 00000000000..904edfc1115 --- /dev/null +++ b/sdkdocs/java/content/en/java-sdk-docs/java-ai/_index.md @@ -0,0 +1,7 @@ +--- +type: docs +title: "AI" +linkTitle: "AI" +weight: 3000 +description: With the Dapr Conversation AI package, you can interact with the Dapr AI workloads from a Java application. To get started, walk through the [Dapr AI]({{% ref java-ai-howto.md %}}) how-to guide. +--- \ No newline at end of file diff --git a/sdkdocs/java/content/en/java-sdk-docs/java-ai/java-ai-howto.md b/sdkdocs/java/content/en/java-sdk-docs/java-ai/java-ai-howto.md new file mode 100644 index 00000000000..39970d52188 --- /dev/null +++ b/sdkdocs/java/content/en/java-sdk-docs/java-ai/java-ai-howto.md @@ -0,0 +1,105 @@ +--- +type: docs +title: "How to: Author and manage Dapr Conversation AI in the Java SDK" +linkTitle: "How to: Author and manage Conversation AI" +weight: 20000 +description: How to get up and running with Conversation AI using the Dapr Java SDK +--- + +As part of this demonstration, we will look at how to use the Conversation API to converse with a Large Language Model (LLM). The API +will return the response from the LLM for the given prompt. With the [provided conversation ai example](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/conversation), you will: + +- You will provide a prompt using the [Conversation AI example](https://github.com/dapr/java-sdk/blob/master/examples/src/main/java/io/dapr/examples/conversation/DemoConversationAI.java) +- Filter out Personally identifiable information (PII). + +This example uses the default configuration from `dapr init` in [self-hosted mode](https://github.com/dapr/cli#install-dapr-on-your-local-machine-self-hosted). + +## Prerequisites + +- [Dapr CLI and initialized environment](https://docs.dapr.io/getting-started). +- Java JDK 11 (or greater): + - [Oracle JDK](https://www.oracle.com/java/technologies/downloads), or + - OpenJDK +- [Apache Maven](https://maven.apache.org/install.html), version 3.x. +- [Docker Desktop](https://www.docker.com/products/docker-desktop) + +## Set up the environment + +Clone the [Java SDK repo](https://github.com/dapr/java-sdk) and navigate into it. + +```bash +git clone https://github.com/dapr/java-sdk.git +cd java-sdk +``` + +Run the following command to install the requirements for running the Conversation AI example with the Dapr Java SDK. + +```bash +mvn clean install -DskipTests +``` + +From the Java SDK root directory, navigate to the examples' directory. + +```bash +cd examples +``` + +Run the Dapr sidecar. + +```sh +dapr run --app-id conversationapp --dapr-grpc-port 51439 --dapr-http-port 3500 --app-port 8080 +``` + +> Now, Dapr is listening for HTTP requests at `http://localhost:3500` and gRPC requests at `http://localhost:51439`. + +## Send a prompt with Personally identifiable information (PII) to the Conversation AI API + +In the `DemoConversationAI` there are steps to send a prompt using the `converse` method under the `DaprPreviewClient`. + +```java +public class DemoConversationAI { + /** + * The main method to start the client. + * + * @param args Input arguments (unused). + */ + public static void main(String[] args) { + try (DaprPreviewClient client = new DaprClientBuilder().buildPreviewClient()) { + System.out.println("Sending the following input to LLM: Hello How are you? This is the my number 672-123-4567"); + + ConversationInput daprConversationInput = new ConversationInput("Hello How are you? " + + "This is the my number 672-123-4567"); + + // Component name is the name provided in the metadata block of the conversation.yaml file. + Mono responseMono = client.converse(new ConversationRequest("echo", + List.of(daprConversationInput)) + .setContextId("contextId") + .setScrubPii(true).setTemperature(1.1d)); + ConversationResponse response = responseMono.block(); + System.out.printf("Conversation output: %s", response.getConversationOutputs().get(0).getResult()); + } catch (Exception e) { + throw new RuntimeException(e); + } + } +} +``` + +Run the `DemoConversationAI` with the following command. + +```sh +java -jar target/dapr-java-sdk-examples-exec.jar io.dapr.examples.conversation.DemoConversationAI +``` + +### Sample output +``` +== APP == Conversation output: Hello How are you? This is the my number +``` + +As shown in the output, the number sent to the API is obfuscated and returned in the form of . +The example above uses an ["echo"](https://docs.dapr.io/developing-applications/building-blocks/conversation/howto-conversation-layer/#set-up-the-conversation-component) +component for testing, which simply returns the input message. +When integrated with LLMs like OpenAI or Claude, you’ll receive meaningful responses instead of echoed input. + +## Next steps +- [Learn more about Conversation AI]({{% ref conversation-overview.md %}}) +- [Conversation AI API reference]({{% ref conversation_api.md %}}) \ No newline at end of file diff --git a/sdkdocs/java/content/en/java-sdk-docs/java-client/_index.md b/sdkdocs/java/content/en/java-sdk-docs/java-client/_index.md new file mode 100644 index 00000000000..5dc5fe97928 --- /dev/null +++ b/sdkdocs/java/content/en/java-sdk-docs/java-client/_index.md @@ -0,0 +1,756 @@ +--- +type: docs +title: "Getting started with the Dapr client Java SDK" +linkTitle: "Client" +weight: 3000 +description: How to get up and running with the Dapr Java SDK +--- + +The Dapr client package allows you to interact with other Dapr applications from a Java application. + +{{% alert title="Note" color="primary" %}} +If you haven't already, [try out one of the quickstarts]({{% ref quickstarts %}}) for a quick walk-through on how to use the Dapr Java SDK with an API building block. + +{{% /alert %}} + +## Prerequisites + +[Complete initial setup and import the Java SDK into your project]({{% ref java %}}) + +## Initializing the client +You can initialize a Dapr client as so: + +```java +DaprClient client = new DaprClientBuilder().build() +``` + +This will connect to the default Dapr gRPC endpoint `localhost:50001`. For information about configuring the client using environment variables and system properties, see [Properties]({{% ref properties.md %}}). + +#### Error Handling + +Initially, errors in Dapr followed the Standard gRPC error model. However, to provide more detailed and informative error +messages, in version 1.13 an enhanced error model has been introduced which aligns with the gRPC Richer error model. In +response, the Java SDK extended the DaprException to include the error details that were added in Dapr. + +Example of handling the DaprException and consuming the error details when using the Dapr Java SDK: + +```java +... + try { + client.publishEvent("unknown_pubsub", "mytopic", "mydata").block(); + } catch (DaprException exception) { + System.out.println("Dapr exception's error code: " + exception.getErrorCode()); + System.out.println("Dapr exception's message: " + exception.getMessage()); + // DaprException now contains `getStatusDetails()` to include more details about the error from Dapr runtime. + System.out.println("Dapr exception's reason: " + exception.getStatusDetails().get( + DaprErrorDetails.ErrorDetailType.ERROR_INFO, + "reason", + TypeRef.STRING)); + } +... +``` + +## Building blocks + +The Java SDK allows you to interface with all of the [Dapr building blocks]({{% ref building-blocks %}}). + +### Invoke a service + +```java +import io.dapr.client.DaprClient; +import io.dapr.client.DaprClientBuilder; + +try (DaprClient client = (new DaprClientBuilder()).build()) { + // invoke a 'GET' method (HTTP) skipping serialization: \say with a Mono return type + // for gRPC set HttpExtension.NONE parameters below + response = client.invokeMethod(SERVICE_TO_INVOKE, METHOD_TO_INVOKE, "{\"name\":\"World!\"}", HttpExtension.GET, byte[].class).block(); + + // invoke a 'POST' method (HTTP) skipping serialization: to \say with a Mono return type + response = client.invokeMethod(SERVICE_TO_INVOKE, METHOD_TO_INVOKE, "{\"id\":\"100\", \"FirstName\":\"Value\", \"LastName\":\"Value\"}", HttpExtension.POST, byte[].class).block(); + + System.out.println(new String(response)); + + // invoke a 'POST' method (HTTP) with serialization: \employees with a Mono return type + Employee newEmployee = new Employee("Nigel", "Guitarist"); + Employee employeeResponse = client.invokeMethod(SERVICE_TO_INVOKE, "employees", newEmployee, HttpExtension.POST, Employee.class).block(); +} +``` + +- For a full guide on service invocation visit [How-To: Invoke a service]({{% ref howto-invoke-discover-services.md %}}). +- Visit [Java SDK examples](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/invoke) for code samples and instructions to try out service invocation + +### Save & get application state + +```java +import io.dapr.client.DaprClient; +import io.dapr.client.DaprClientBuilder; +import io.dapr.client.domain.State; +import reactor.core.publisher.Mono; + +try (DaprClient client = (new DaprClientBuilder()).build()) { + // Save state + client.saveState(STATE_STORE_NAME, FIRST_KEY_NAME, myClass).block(); + + // Get state + State retrievedMessage = client.getState(STATE_STORE_NAME, FIRST_KEY_NAME, MyClass.class).block(); + + // Delete state + client.deleteState(STATE_STORE_NAME, FIRST_KEY_NAME).block(); +} +``` + +- For a full list of state operations visit [How-To: Get & save state]({{% ref howto-get-save-state.md %}}). +- Visit [Java SDK examples](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/state) for code samples and instructions to try out state management + +### Publish & subscribe to messages + +##### Publish messages + +```java +import io.dapr.client.DaprClient; +import io.dapr.client.DaprClientBuilder; +import io.dapr.client.domain.Metadata; +import static java.util.Collections.singletonMap; + +try (DaprClient client = (new DaprClientBuilder()).build()) { + client.publishEvent(PUBSUB_NAME, TOPIC_NAME, message, singletonMap(Metadata.TTL_IN_SECONDS, MESSAGE_TTL_IN_SECONDS)).block(); +} +``` + +##### Subscribe to messages + +```java +import com.fasterxml.jackson.databind.ObjectMapper; +import io.dapr.Topic; +import io.dapr.client.domain.BulkSubscribeAppResponse; +import io.dapr.client.domain.BulkSubscribeAppResponseEntry; +import io.dapr.client.domain.BulkSubscribeAppResponseStatus; +import io.dapr.client.domain.BulkSubscribeMessage; +import io.dapr.client.domain.BulkSubscribeMessageEntry; +import io.dapr.client.domain.CloudEvent; +import io.dapr.springboot.annotations.BulkSubscribe; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RestController; +import reactor.core.publisher.Mono; + +@RestController +public class SubscriberController { + + private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); + + @Topic(name = "testingtopic", pubsubName = "${myAppProperty:messagebus}") + @PostMapping(path = "/testingtopic") + public Mono handleMessage(@RequestBody(required = false) CloudEvent cloudEvent) { + return Mono.fromRunnable(() -> { + try { + System.out.println("Subscriber got: " + cloudEvent.getData()); + System.out.println("Subscriber got: " + OBJECT_MAPPER.writeValueAsString(cloudEvent)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @Topic(name = "testingtopic", pubsubName = "${myAppProperty:messagebus}", + rule = @Rule(match = "event.type == 'myevent.v2'", priority = 1)) + @PostMapping(path = "/testingtopicV2") + public Mono handleMessageV2(@RequestBody(required = false) CloudEvent envelope) { + return Mono.fromRunnable(() -> { + try { + System.out.println("Subscriber got: " + cloudEvent.getData()); + System.out.println("Subscriber got: " + OBJECT_MAPPER.writeValueAsString(cloudEvent)); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + + @BulkSubscribe() + @Topic(name = "testingtopicbulk", pubsubName = "${myAppProperty:messagebus}") + @PostMapping(path = "/testingtopicbulk") + public Mono handleBulkMessage( + @RequestBody(required = false) BulkSubscribeMessage> bulkMessage) { + return Mono.fromCallable(() -> { + if (bulkMessage.getEntries().size() == 0) { + return new BulkSubscribeAppResponse(new ArrayList()); + } + + System.out.println("Bulk Subscriber received " + bulkMessage.getEntries().size() + " messages."); + + List entries = new ArrayList(); + for (BulkSubscribeMessageEntry entry : bulkMessage.getEntries()) { + try { + System.out.printf("Bulk Subscriber message has entry ID: %s\n", entry.getEntryId()); + CloudEvent cloudEvent = (CloudEvent) entry.getEvent(); + System.out.printf("Bulk Subscriber got: %s\n", cloudEvent.getData()); + entries.add(new BulkSubscribeAppResponseEntry(entry.getEntryId(), BulkSubscribeAppResponseStatus.SUCCESS)); + } catch (Exception e) { + e.printStackTrace(); + entries.add(new BulkSubscribeAppResponseEntry(entry.getEntryId(), BulkSubscribeAppResponseStatus.RETRY)); + } + } + return new BulkSubscribeAppResponse(entries); + }); + } +} +``` + +##### Bulk Publish Messages +> Note: API is in Alpha stage + + +```java +import io.dapr.client.DaprClientBuilder; +import io.dapr.client.DaprPreviewClient; +import io.dapr.client.domain.BulkPublishResponse; +import io.dapr.client.domain.BulkPublishResponseFailedEntry; +import java.util.ArrayList; +import java.util.List; +class Solution { + public void publishMessages() { + try (DaprPreviewClient client = (new DaprClientBuilder()).buildPreviewClient()) { + // Create a list of messages to publish + List messages = new ArrayList<>(); + for (int i = 0; i < NUM_MESSAGES; i++) { + String message = String.format("This is message #%d", i); + messages.add(message); + System.out.println("Going to publish message : " + message); + } + + // Publish list of messages using the bulk publish API + BulkPublishResponse res = client.publishEvents(PUBSUB_NAME, TOPIC_NAME, "text/plain", messages).block() + } + } +} +``` + +- For a full guide on publishing messages and subscribing to a topic [How-To: Publish & subscribe]({{% ref howto-publish-subscribe.md %}}). +- Visit [Java SDK examples](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/pubsub/http) for code samples and instructions to try out pub/sub + +### Interact with output bindings + +```java +import io.dapr.client.DaprClient; +import io.dapr.client.DaprClientBuilder; + +try (DaprClient client = (new DaprClientBuilder()).build()) { + // sending a class with message; BINDING_OPERATION="create" + client.invokeBinding(BINDING_NAME, BINDING_OPERATION, myClass).block(); + + // sending a plain string + client.invokeBinding(BINDING_NAME, BINDING_OPERATION, message).block(); +} +``` + +- For a full guide on output bindings visit [How-To: Output bindings]({{% ref howto-bindings.md %}}). +- Visit [Java SDK examples](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/bindings/http) for code samples and instructions to try out output bindings. + +### Interact with input bindings + +```java +import org.springframework.web.bind.annotation.*; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@RestController +@RequestMapping("/") +public class myClass { + private static final Logger log = LoggerFactory.getLogger(myClass); + @PostMapping(path = "/checkout") + public Mono getCheckout(@RequestBody(required = false) byte[] body) { + return Mono.fromRunnable(() -> + log.info("Received Message: " + new String(body))); + } +} +``` + +- For a full guide on input bindings, visit [How-To: Input bindings]({{% ref howto-triggers %}}). +- Visit [Java SDK examples](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/bindings/http) for code samples and instructions to try out input bindings. + +### Retrieve secrets + +```java +import com.fasterxml.jackson.databind.ObjectMapper; +import io.dapr.client.DaprClient; +import io.dapr.client.DaprClientBuilder; +import java.util.Map; + +try (DaprClient client = (new DaprClientBuilder()).build()) { + Map secret = client.getSecret(SECRET_STORE_NAME, secretKey).block(); + System.out.println(JSON_SERIALIZER.writeValueAsString(secret)); +} +``` + +- For a full guide on secrets visit [How-To: Retrieve secrets]({{% ref howto-secrets.md %}}). +- Visit [Java SDK examples](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/secrets) for code samples and instructions to try out retrieving secrets + +### Actors +An actor is an isolated, independent unit of compute and state with single-threaded execution. Dapr provides an actor implementation based on the [Virtual Actor pattern](https://www.microsoft.com/research/project/orleans-virtual-actors/), which provides a single-threaded programming model and where actors are garbage collected when not in use. With Dapr's implementaiton, you write your Dapr actors according to the Actor model, and Dapr leverages the scalability and reliability that the underlying platform provides. + +```java +import io.dapr.actors.ActorMethod; +import io.dapr.actors.ActorType; +import reactor.core.publisher.Mono; + +@ActorType(name = "DemoActor") +public interface DemoActor { + + void registerReminder(); + + @ActorMethod(name = "echo_message") + String say(String something); + + void clock(String message); + + @ActorMethod(returns = Integer.class) + Mono incrementAndGet(int delta); +} +``` + +- For a full guide on actors visit [How-To: Use virtual actors in Dapr]({{% ref howto-actors.md %}}). +- Visit [Java SDK examples](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/actors) for code samples and instructions to try actors + +### Get & Subscribe to application configurations + +> Note this is a preview API and thus will only be accessible via the DaprPreviewClient interface and not the normal DaprClient interface + +```java +import io.dapr.client.DaprClientBuilder; +import io.dapr.client.DaprPreviewClient; +import io.dapr.client.domain.ConfigurationItem; +import io.dapr.client.domain.GetConfigurationRequest; +import io.dapr.client.domain.SubscribeConfigurationRequest; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +try (DaprPreviewClient client = (new DaprClientBuilder()).buildPreviewClient()) { + // Get configuration for a single key + Mono item = client.getConfiguration(CONFIG_STORE_NAME, CONFIG_KEY).block(); + + // Get configurations for multiple keys + Mono> items = + client.getConfiguration(CONFIG_STORE_NAME, CONFIG_KEY_1, CONFIG_KEY_2); + + // Subscribe to configuration changes + Flux outFlux = client.subscribeConfiguration(CONFIG_STORE_NAME, CONFIG_KEY_1, CONFIG_KEY_2); + outFlux.subscribe(configItems -> configItems.forEach(...)); + + // Unsubscribe from configuration changes + Mono unsubscribe = client.unsubscribeConfiguration(SUBSCRIPTION_ID, CONFIG_STORE_NAME) +} +``` + +- For a full list of configuration operations visit [How-To: Manage configuration from a store]({{% ref howto-manage-configuration.md %}}). +- Visit [Java SDK examples](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/configuration) for code samples and instructions to try out different configuration operations. + +### Query saved state + +> Note this is a preview API and thus will only be accessible via the DaprPreviewClient interface and not the normal DaprClient interface + +```java +import io.dapr.client.DaprClient; +import io.dapr.client.DaprClientBuilder; +import io.dapr.client.DaprPreviewClient; +import io.dapr.client.domain.QueryStateItem; +import io.dapr.client.domain.QueryStateRequest; +import io.dapr.client.domain.QueryStateResponse; +import io.dapr.client.domain.query.Query; +import io.dapr.client.domain.query.Sorting; +import io.dapr.client.domain.query.filters.EqFilter; + +try (DaprClient client = builder.build(); DaprPreviewClient previewClient = builder.buildPreviewClient()) { + String searchVal = args.length == 0 ? "searchValue" : args[0]; + + // Create JSON data + Listing first = new Listing(); + first.setPropertyType("apartment"); + first.setId("1000"); + ... + Listing second = new Listing(); + second.setPropertyType("row-house"); + second.setId("1002"); + ... + Listing third = new Listing(); + third.setPropertyType("apartment"); + third.setId("1003"); + ... + Listing fourth = new Listing(); + fourth.setPropertyType("apartment"); + fourth.setId("1001"); + ... + Map meta = new HashMap<>(); + meta.put("contentType", "application/json"); + + // Save state + SaveStateRequest request = new SaveStateRequest(STATE_STORE_NAME).setStates( + new State<>("1", first, null, meta, null), + new State<>("2", second, null, meta, null), + new State<>("3", third, null, meta, null), + new State<>("4", fourth, null, meta, null) + ); + client.saveBulkState(request).block(); + + + // Create query and query state request + + Query query = new Query() + .setFilter(new EqFilter<>("propertyType", "apartment")) + .setSort(Arrays.asList(new Sorting("id", Sorting.Order.DESC))); + QueryStateRequest request = new QueryStateRequest(STATE_STORE_NAME) + .setQuery(query); + + // Use preview client to call query state API + QueryStateResponse result = previewClient.queryState(request, MyData.class).block(); + + // View Query state response + System.out.println("Found " + result.getResults().size() + " items."); + for (QueryStateItem item : result.getResults()) { + System.out.println("Key: " + item.getKey()); + System.out.println("Data: " + item.getValue()); + } +} +``` +- For a full how-to on query state, visit [How-To: Query state]({{% ref howto-state-query-api.md %}}). +- Visit [Java SDK examples](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/querystate) for complete code sample. + +### Distributed lock + +```java +package io.dapr.examples.lock.grpc; + +import io.dapr.client.DaprClientBuilder; +import io.dapr.client.DaprPreviewClient; +import io.dapr.client.domain.LockRequest; +import io.dapr.client.domain.UnlockRequest; +import io.dapr.client.domain.UnlockResponseStatus; +import reactor.core.publisher.Mono; + +public class DistributedLockGrpcClient { + private static final String LOCK_STORE_NAME = "lockstore"; + + /** + * Executes various methods to check the different apis. + * + * @param args arguments + * @throws Exception throws Exception + */ + public static void main(String[] args) throws Exception { + try (DaprPreviewClient client = (new DaprClientBuilder()).buildPreviewClient()) { + System.out.println("Using preview client..."); + tryLock(client); + unlock(client); + } + } + + /** + * Trying to get lock. + * + * @param client DaprPreviewClient object + */ + public static void tryLock(DaprPreviewClient client) { + System.out.println("*******trying to get a free distributed lock********"); + try { + LockRequest lockRequest = new LockRequest(LOCK_STORE_NAME, "resouce1", "owner1", 5); + Mono result = client.tryLock(lockRequest); + System.out.println("Lock result -> " + (Boolean.TRUE.equals(result.block()) ? "SUCCESS" : "FAIL")); + } catch (Exception ex) { + System.out.println(ex.getMessage()); + } + } + + /** + * Unlock a lock. + * + * @param client DaprPreviewClient object + */ + public static void unlock(DaprPreviewClient client) { + System.out.println("*******unlock a distributed lock********"); + try { + UnlockRequest unlockRequest = new UnlockRequest(LOCK_STORE_NAME, "resouce1", "owner1"); + Mono result = client.unlock(unlockRequest); + System.out.println("Unlock result ->" + result.block().name()); + } catch (Exception ex) { + System.out.println(ex.getMessage()); + } + } +} +``` + +- For a full how-to on distributed lock, visit [How-To: Use a Lock]({{% ref howto-use-distributed-lock.md %}}) +- Visit [Java SDK examples](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/lock) for complete code sample. + +### Workflow + +```java +package io.dapr.examples.workflows; + +import io.dapr.workflows.client.DaprWorkflowClient; +import io.dapr.workflows.client.WorkflowState; + +import java.time.Duration; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +/** + * For setup instructions, see the README. + */ +public class DemoWorkflowClient { + + /** + * The main method. + * + * @param args Input arguments (unused). + * @throws InterruptedException If program has been interrupted. + */ + public static void main(String[] args) throws InterruptedException { + DaprWorkflowClient client = new DaprWorkflowClient(); + + try (client) { + String separatorStr = "*******"; + System.out.println(separatorStr); + String instanceId = client.scheduleNewWorkflow(DemoWorkflow.class, "input data"); + System.out.printf("Started new workflow instance with random ID: %s%n", instanceId); + + System.out.println(separatorStr); + System.out.println("**GetWorkflowMetadata:Running Workflow**"); + WorkflowState workflowMetadata = client.getWorkflowState(instanceId, true); + System.out.printf("Result: %s%n", workflowMetadata); + + System.out.println(separatorStr); + System.out.println("**WaitForWorkflowStart**"); + try { + WorkflowState waitForWorkflowStartResult = + client.waitForWorkflowStart(instanceId, Duration.ofSeconds(60), true); + System.out.printf("Result: %s%n", waitForWorkflowStartResult); + } catch (TimeoutException ex) { + System.out.printf("waitForWorkflowStart has an exception:%s%n", ex); + } + + System.out.println(separatorStr); + System.out.println("**SendExternalMessage**"); + client.raiseEvent(instanceId, "TestEvent", "TestEventPayload"); + + System.out.println(separatorStr); + System.out.println("** Registering parallel Events to be captured by allOf(t1,t2,t3) **"); + client.raiseEvent(instanceId, "event1", "TestEvent 1 Payload"); + client.raiseEvent(instanceId, "event2", "TestEvent 2 Payload"); + client.raiseEvent(instanceId, "event3", "TestEvent 3 Payload"); + System.out.printf("Events raised for workflow with instanceId: %s\n", instanceId); + + System.out.println(separatorStr); + System.out.println("** Registering Event to be captured by anyOf(t1,t2,t3) **"); + client.raiseEvent(instanceId, "e2", "event 2 Payload"); + System.out.printf("Event raised for workflow with instanceId: %s\n", instanceId); + + + System.out.println(separatorStr); + System.out.println("**waitForWorkflowCompletion**"); + try { + WorkflowState waitForWorkflowCompletionResult = + client.waitForWorkflowCompletion(instanceId, Duration.ofSeconds(60), true); + System.out.printf("Result: %s%n", waitForWorkflowCompletionResult); + } catch (TimeoutException ex) { + System.out.printf("waitForWorkflowCompletion has an exception:%s%n", ex); + } + + System.out.println(separatorStr); + System.out.println("**purgeWorkflow**"); + boolean purgeResult = client.purgeWorkflow(instanceId); + System.out.printf("purgeResult: %s%n", purgeResult); + + System.out.println(separatorStr); + System.out.println("**raiseEvent**"); + + String eventInstanceId = client.scheduleNewWorkflow(DemoWorkflow.class); + System.out.printf("Started new workflow instance with random ID: %s%n", eventInstanceId); + client.raiseEvent(eventInstanceId, "TestException", null); + System.out.printf("Event raised for workflow with instanceId: %s\n", eventInstanceId); + + System.out.println(separatorStr); + String instanceToTerminateId = "terminateMe"; + client.scheduleNewWorkflow(DemoWorkflow.class, null, instanceToTerminateId); + System.out.printf("Started new workflow instance with specified ID: %s%n", instanceToTerminateId); + + TimeUnit.SECONDS.sleep(5); + System.out.println("Terminate this workflow instance manually before the timeout is reached"); + client.terminateWorkflow(instanceToTerminateId, null); + System.out.println(separatorStr); + + String restartingInstanceId = "restarting"; + client.scheduleNewWorkflow(DemoWorkflow.class, null, restartingInstanceId); + System.out.printf("Started new workflow instance with ID: %s%n", restartingInstanceId); + System.out.println("Sleeping 30 seconds to restart the workflow"); + TimeUnit.SECONDS.sleep(30); + + System.out.println("**SendExternalMessage: RestartEvent**"); + client.raiseEvent(restartingInstanceId, "RestartEvent", "RestartEventPayload"); + + System.out.println("Sleeping 30 seconds to terminate the eternal workflow"); + TimeUnit.SECONDS.sleep(30); + client.terminateWorkflow(restartingInstanceId, null); + } + + System.out.println("Exiting DemoWorkflowClient."); + System.exit(0); + } +} +``` + +- For a full guide on workflows, visit: + - [How-To: Author workflows]({{% ref howto-author-workflow.md %}}). + - [How-To: Manage workflows]({{% ref howto-manage-workflow.md %}}). +- [Learn more about how to use workflows with the Java SDK]({{% ref java-workflow.md %}}). + +## Sidecar APIs + +#### Wait for sidecar +The `DaprClient` also provides a helper method to wait for the sidecar to become healthy (components only). When using +this method, be sure to specify a timeout in milliseconds and block() to wait for the result of a reactive operation. + +```java +// Wait for the Dapr sidecar to report healthy before attempting to use Dapr components. +try (DaprClient client = new DaprClientBuilder().build()) { + System.out.println("Waiting for Dapr sidecar ..."); + client.waitForSidecar(10000).block(); // Specify the timeout in milliseconds + System.out.println("Dapr sidecar is ready."); + ... +} + +// Perform Dapr component operations here i.e. fetching secrets or saving state. +``` + +### Shutdown the sidecar +```java +try (DaprClient client = new DaprClientBuilder().build()) { + logger.info("Sending shutdown request."); + client.shutdown().block(); + logger.info("Ensuring dapr has stopped."); + ... +} +``` + +Learn more about the [Dapr Java SDK packages available to add to your Java applications](https://dapr.github.io/java-sdk/). + +## Security + +### App API Token Authentication + +The building blocks like pubsub, input bindings, or jobs require Dapr to make incoming calls to your application, you can secure these requests using [Dapr App API Token Authentication]({{% ref app-api-token.md %}}). This ensures that only Dapr can invoke your application's endpoints. + +#### Understanding the two tokens + +Dapr uses two different tokens for securing communication. See [Properties]({{% ref properties.md %}}) for detailed information about both tokens: + +- **`DAPR_API_TOKEN`** (Your app → Dapr sidecar): Automatically handled by the Java SDK when using `DaprClient` +- **`APP_API_TOKEN`** (Dapr → Your app): Requires server-side validation in your application + +The examples below show how to implement server-side validation for `APP_API_TOKEN`. + +#### Implementing server-side token validation + +When using gRPC protocol, implement a server interceptor to capture the metadata. + +```java +import io.grpc.Context; +import io.grpc.Contexts; +import io.grpc.Metadata; +import io.grpc.ServerCall; +import io.grpc.ServerCallHandler; +import io.grpc.ServerInterceptor; + +public class SubscriberGrpcService extends AppCallbackGrpc.AppCallbackImplBase { + public static final Context.Key METADATA_KEY = Context.key("grpc-metadata"); + + // gRPC interceptor to capture metadata + public static class MetadataInterceptor implements ServerInterceptor { + @Override + public ServerCall.Listener interceptCall( + ServerCall call, + Metadata headers, + ServerCallHandler next) { + Context contextWithMetadata = Context.current().withValue(METADATA_KEY, headers); + return Contexts.interceptCall(contextWithMetadata, call, headers, next); + } + } + + // Your service methods go here... +} +``` + +Register the interceptor when building your gRPC server: + +```java +Server server = ServerBuilder.forPort(port) + .intercept(new SubscriberGrpcService.MetadataInterceptor()) + .addService(new SubscriberGrpcService()) + .build(); +server.start(); +``` + +Then, in your service methods, extract the token from metadata: + +```java +@Override +public void onTopicEvent(DaprAppCallbackProtos.TopicEventRequest request, + StreamObserver responseObserver) { + try { + // Extract metadata from context + Context context = Context.current(); + Metadata metadata = METADATA_KEY.get(context); + + if (metadata != null) { + String apiToken = metadata.get( + Metadata.Key.of("dapr-api-token", Metadata.ASCII_STRING_MARSHALLER)); + + // Validate token accordingly + } + + // Process the request + // ... + + } catch (Throwable e) { + responseObserver.onError(e); + } +} +``` + +#### Using with HTTP endpoints + +For HTTP-based endpoints, extract the token from the headers: + +```java +@RestController +public class SubscriberController { + + @PostMapping(path = "/endpoint") + public Mono handleRequest( + @RequestBody(required = false) byte[] body, + @RequestHeader Map headers) { + return Mono.fromRunnable(() -> { + try { + // Extract the token from headers + String apiToken = headers.get("dapr-api-token"); + + // Validate token accordingly + + // Process the request + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } +} +``` + +#### Examples + +For working examples with pubsub, bindings, and jobs: +- [PubSub with App API Token Authentication](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/pubsub#app-api-token-authentication-optional) +- [Bindings with App API Token Authentication](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/bindings/http#app-api-token-authentication-optional) +- [Jobs with App API Token Authentication](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/jobs#app-api-token-authentication-optional) + +## Related links +- [Java SDK examples](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples) + +For a full list of SDK properties and how to configure them, visit [Properties]({{% ref properties.md %}}). diff --git a/sdkdocs/java/content/en/java-sdk-docs/java-client/properties.md b/sdkdocs/java/content/en/java-sdk-docs/java-client/properties.md new file mode 100644 index 00000000000..a83836bc860 --- /dev/null +++ b/sdkdocs/java/content/en/java-sdk-docs/java-client/properties.md @@ -0,0 +1,211 @@ +--- +type: docs +title: "Properties" +linkTitle: "Properties" +weight: 3001 +description: SDK-wide properties for configuring the Dapr Java SDK using environment variables and system properties +--- + +# Properties + +The Dapr Java SDK provides a set of global properties that control the behavior of the SDK. These properties can be configured using environment variables or system properties. System properties can be set using the `-D` flag when running your Java application. + +These properties affect the entire SDK, including clients and runtime. They control aspects such as: +- Sidecar connectivity (endpoints, ports) +- Security settings (TLS, API tokens) +- Performance tuning (timeouts, connection pools) +- Protocol settings (gRPC, HTTP) +- String encoding + +## Environment Variables + +The following environment variables are available for configuring the Dapr Java SDK: + +### Sidecar Endpoints + +When these variables are set, the client will automatically use them to connect to the Dapr sidecar. + +| Environment Variable | Description | Default | +|---------------------|-------------|---------| +| `DAPR_GRPC_ENDPOINT` | The gRPC endpoint for the Dapr sidecar | `localhost:50001` | +| `DAPR_HTTP_ENDPOINT` | The HTTP endpoint for the Dapr sidecar | `localhost:3500` | +| `DAPR_GRPC_PORT` | The gRPC port for the Dapr sidecar (legacy, `DAPR_GRPC_ENDPOINT` takes precedence) | `50001` | +| `DAPR_HTTP_PORT` | The HTTP port for the Dapr sidecar (legacy, `DAPR_HTTP_ENDPOINT` takes precedence) | `3500` | + +### API Tokens + +Dapr supports two types of API tokens for securing communication: + +| Environment Variable | Description | Default | +|---------------------|-------------|---------| +| `DAPR_API_TOKEN` | API token for authenticating requests **from your app to the Dapr sidecar**. The Java SDK automatically includes this token in requests when using `DaprClient`. | `null` | +| `APP_API_TOKEN` | API token for authenticating requests **from Dapr to your app**. When set, Dapr includes this token in the `dapr-api-token` header/metadata when calling your application (for pubsub subscribers, input bindings, or job triggers). Your application must validate this token. | `null` | + +For implementation examples, see [App API Token Authentication]({{% ref "java-client#app-api-token-authentication" %}}). For more details, see [Dapr API token authentication](https://docs.dapr.io/operations/security/api-token/). + +### gRPC Configuration + +#### TLS Settings +For secure gRPC communication, you can configure TLS settings using the following environment variables: + +| Environment Variable | Description | Default | +|---------------------|-------------|---------| +| `DAPR_GRPC_TLS_INSECURE` | When set to "true", enables insecure TLS mode which still uses TLS but doesn't verify certificates. This uses InsecureTrustManagerFactory to trust all certificates. This should only be used for testing or in secure environments. | `false` | +| `DAPR_GRPC_TLS_CA_PATH` | Path to the CA certificate file. This is used for TLS connections to servers with self-signed certificates. | `null` | +| `DAPR_GRPC_TLS_CERT_PATH` | Path to the TLS certificate file for client authentication. | `null` | +| `DAPR_GRPC_TLS_KEY_PATH` | Path to the TLS private key file for client authentication. | `null` | + +#### Keepalive Settings +Configure gRPC keepalive behavior using these environment variables: + +| Environment Variable | Description | Default | +|---------------------|-------------|---------| +| `DAPR_GRPC_ENABLE_KEEP_ALIVE` | Whether to enable gRPC keepalive | `false` | +| `DAPR_GRPC_KEEP_ALIVE_TIME_SECONDS` | gRPC keepalive time in seconds | `10` | +| `DAPR_GRPC_KEEP_ALIVE_TIMEOUT_SECONDS` | gRPC keepalive timeout in seconds | `5` | +| `DAPR_GRPC_KEEP_ALIVE_WITHOUT_CALLS` | Whether to keep gRPC connection alive without calls | `true` | + +#### Inbound Message Settings +Configure gRPC inbound message settings using these environment variables: + +| Environment Variable | Description | Default | +|---------------------|-------------|---------| +| `DAPR_GRPC_MAX_INBOUND_MESSAGE_SIZE_BYTES` | Dapr's maximum inbound message size for gRPC in bytes. This value sets the maximum size of a gRPC message that can be received by the application | `4194304` | +| `DAPR_GRPC_MAX_INBOUND_METADATA_SIZE_BYTES` | Dapr's maximum inbound metadata size for gRPC in bytes | `8192` | + +### HTTP Client Configuration + +These properties control the behavior of the HTTP client used for communication with the Dapr sidecar: + +| Environment Variable | Description | Default | +|---------------------|-------------|---------| +| `DAPR_HTTP_CLIENT_READ_TIMEOUT_SECONDS` | Timeout in seconds for HTTP client read operations. This is the maximum time to wait for a response from the Dapr sidecar. | `60` | +| `DAPR_HTTP_CLIENT_MAX_REQUESTS` | Maximum number of concurrent HTTP requests that can be executed. Above this limit, requests will queue in memory waiting for running calls to complete. | `1024` | +| `DAPR_HTTP_CLIENT_MAX_IDLE_CONNECTIONS` | Maximum number of idle connections in the HTTP connection pool. This is the maximum number of connections that can remain idle in the pool. | `128` | + +### API Configuration + +These properties control the behavior of API calls made through the SDK: + +| Environment Variable | Description | Default | +|---------------------|-------------|---------| +| `DAPR_API_MAX_RETRIES` | Maximum number of retries for retriable exceptions when making API calls to the Dapr sidecar | `0` | +| `DAPR_API_TIMEOUT_MILLISECONDS` | Timeout in milliseconds for API calls to the Dapr sidecar. A value of 0 means no timeout. | `0` | + +### String Encoding + +| Environment Variable | Description | Default | +|---------------------|-------------|---------| +| `DAPR_STRING_CHARSET` | Character set used for string encoding/decoding in the SDK. Must be a valid Java charset name. | `UTF-8` | + +### System Properties + +All environment variables can be set as system properties using the `-D` flag. Here is the complete list of available system properties: + +| System Property | Description | Default | +|----------------|-------------|---------| +| `dapr.sidecar.ip` | IP address for the Dapr sidecar | `localhost` | +| `dapr.http.port` | HTTP port for the Dapr sidecar | `3500` | +| `dapr.grpc.port` | gRPC port for the Dapr sidecar | `50001` | +| `dapr.grpc.tls.cert.path` | Path to the gRPC TLS certificate | `null` | +| `dapr.grpc.tls.key.path` | Path to the gRPC TLS key | `null` | +| `dapr.grpc.tls.ca.path` | Path to the gRPC TLS CA certificate | `null` | +| `dapr.grpc.tls.insecure` | Whether to use insecure TLS mode | `false` | +| `dapr.grpc.endpoint` | gRPC endpoint for remote sidecar | `null` | +| `dapr.grpc.enable.keep.alive` | Whether to enable gRPC keepalive | `false` | +| `dapr.grpc.keep.alive.time.seconds` | gRPC keepalive time in seconds | `10` | +| `dapr.grpc.keep.alive.timeout.seconds` | gRPC keepalive timeout in seconds | `5` | +| `dapr.grpc.keep.alive.without.calls` | Whether to keep gRPC connection alive without calls | `true` | +| `dapr.http.endpoint` | HTTP endpoint for remote sidecar | `null` | +| `dapr.api.maxRetries` | Maximum number of retries for API calls | `0` | +| `dapr.api.timeoutMilliseconds` | Timeout for API calls in milliseconds | `0` | +| `dapr.api.token` | API token for authentication | `null` | +| `dapr.string.charset` | String encoding used in the SDK | `UTF-8` | +| `dapr.http.client.readTimeoutSeconds` | Timeout in seconds for HTTP client reads | `60` | +| `dapr.http.client.maxRequests` | Maximum number of concurrent HTTP requests | `1024` | +| `dapr.http.client.maxIdleConnections` | Maximum number of idle HTTP connections | `128` | + +## Property Resolution Order + +Properties are resolved in the following order: +1. Override values (if provided when creating a Properties instance) +2. System properties (set via `-D`) +3. Environment variables +4. Default values + +The SDK checks each source in order. If a value is invalid for the property type (e.g., non-numeric for a numeric property), the SDK will log a warning and try the next source. For example: + +```bash +# Invalid boolean value - will be ignored +java -Ddapr.grpc.enable.keep.alive=not-a-boolean -jar myapp.jar + +# Valid boolean value - will be used +export DAPR_GRPC_ENABLE_KEEP_ALIVE=false +``` + +In this case, the environment variable is used because the system property value is invalid. However, if both values are valid, the system property takes precedence: + +```bash +# Valid boolean value - will be used +java -Ddapr.grpc.enable.keep.alive=true -jar myapp.jar + +# Valid boolean value - will be ignored +export DAPR_GRPC_ENABLE_KEEP_ALIVE=false +``` + +Override values can be set using the `DaprClientBuilder` in two ways: + +1. Using individual property overrides (recommended for most cases): +```java +import io.dapr.config.Properties; + +// Set a single property override +DaprClient client = new DaprClientBuilder() + .withPropertyOverride(Properties.GRPC_ENABLE_KEEP_ALIVE, "true") + .build(); + +// Or set multiple property overrides +DaprClient client = new DaprClientBuilder() + .withPropertyOverride(Properties.GRPC_ENABLE_KEEP_ALIVE, "true") + .withPropertyOverride(Properties.HTTP_CLIENT_READ_TIMEOUT_SECONDS, "120") + .build(); +``` + +2. Using a Properties instance (useful when you have many properties to set at once): +```java +// Create a map of property overrides +Map overrides = new HashMap<>(); +overrides.put("dapr.grpc.enable.keep.alive", "true"); +overrides.put("dapr.http.client.readTimeoutSeconds", "120"); + +// Create a Properties instance with overrides +Properties properties = new Properties(overrides); + +// Use these properties when creating a client +DaprClient client = new DaprClientBuilder() + .withProperties(properties) + .build(); +``` + +For most use cases, you'll use system properties or environment variables. Override values are primarily used when you need different property values for different instances of the SDK in the same application. + +## Proxy Configuration + +You can configure proxy settings for your Java application using system properties. These are standard Java system properties that are part of Java's networking layer (`java.net` package), not specific to Dapr. They are used by Java's networking stack, including the HTTP client that Dapr's SDK uses. + +For detailed information about Java's proxy configuration, including all available properties and their usage, see the [Java Networking Properties documentation](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/net/doc-files/net-properties.html). + + +For example, here's how to configure a proxy: + +```bash +# Configure HTTP proxy - replace with your actual proxy server details +java -Dhttp.proxyHost=your-proxy-server.com -Dhttp.proxyPort=8080 -jar myapp.jar + +# Configure HTTPS proxy - replace with your actual proxy server details +java -Dhttps.proxyHost=your-proxy-server.com -Dhttps.proxyPort=8443 -jar myapp.jar +``` + +Replace `your-proxy-server.com` with your actual proxy server hostname or IP address, and adjust the port numbers to match your proxy server configuration. + +These proxy settings will affect all HTTP/HTTPS connections made by your Java application, including connections to the Dapr sidecar. \ No newline at end of file diff --git a/sdkdocs/java/content/en/java-sdk-docs/java-jobs/_index.md b/sdkdocs/java/content/en/java-sdk-docs/java-jobs/_index.md new file mode 100644 index 00000000000..9d017f77709 --- /dev/null +++ b/sdkdocs/java/content/en/java-sdk-docs/java-jobs/_index.md @@ -0,0 +1,7 @@ +--- +type: docs +title: "Jobs" +linkTitle: "Jobs" +weight: 3000 +description: With the Dapr Jobs package, you can interact with the Dapr Jobs APIs from a Java application to trigger future operations to run according to a predefined schedule with an optional payload. To get started, walk through the [Dapr Jobs]({{% ref java-jobs-howto.md %}}) how-to guide. +--- diff --git a/sdkdocs/java/content/en/java-sdk-docs/java-jobs/java-jobs-howto.md b/sdkdocs/java/content/en/java-sdk-docs/java-jobs/java-jobs-howto.md new file mode 100644 index 00000000000..e7c63462899 --- /dev/null +++ b/sdkdocs/java/content/en/java-sdk-docs/java-jobs/java-jobs-howto.md @@ -0,0 +1,164 @@ +--- +type: docs +title: "How to: Author and manage Dapr Jobs in the Java SDK" +linkTitle: "How to: Author and manage Jobs" +weight: 20000 +description: How to get up and running with Jobs using the Dapr Java SDK +--- + +As part of this demonstration we will schedule a Dapr Job. The scheduled job will trigger an endpoint registered in the +same app. With the [provided jobs example](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/jobs), you will: + +- Schedule a Job [Job scheduling example](https://github.com/dapr/java-sdk/blob/master/examples/src/main/java/io/dapr/examples/jobs/DemoJobsClient.java) +- Register an endpoint for the dapr sidecar to invoke at trigger time [Endpoint Registration](https://github.com/dapr/java-sdk/blob/master/examples/src/main/java/io/dapr/examples/jobs/DemoJobsSpringApplication.java) + +This example uses the default configuration from `dapr init` in [self-hosted mode](https://github.com/dapr/cli#install-dapr-on-your-local-machine-self-hosted). + +## Prerequisites + +- [Dapr CLI and initialized environment](https://docs.dapr.io/getting-started). +- Java JDK 11 (or greater): + - [Oracle JDK](https://www.oracle.com/java/technologies/downloads), or + - OpenJDK +- [Apache Maven](https://maven.apache.org/install.html), version 3.x. +- [Docker Desktop](https://www.docker.com/products/docker-desktop) + +## Set up the environment + +Clone the [Java SDK repo](https://github.com/dapr/java-sdk) and navigate into it. + +```bash +git clone https://github.com/dapr/java-sdk.git +cd java-sdk +``` + +Run the following command to install the requirements for running the jobs example with the Dapr Java SDK. + +```bash +mvn clean install -DskipTests +``` + +From the Java SDK root directory, navigate to the examples' directory. + +```bash +cd examples +``` + +Run the Dapr sidecar. + +```sh +dapr run --app-id jobsapp --dapr-grpc-port 51439 --dapr-http-port 3500 --app-port 8080 +``` + +> Now, Dapr is listening for HTTP requests at `http://localhost:3500` and internal Jobs gRPC requests at `http://localhost:51439`. + +## Schedule and Get a job + +In the `DemoJobsClient` there are steps to schedule a job. Calling `scheduleJob` using the `DaprPreviewClient` +will schedule a job with the Dapr Runtime. + +```java +public class DemoJobsClient { + + /** + * The main method of this app to schedule and get jobs. + */ + public static void main(String[] args) throws Exception { + try (DaprPreviewClient client = new DaprClientBuilder().withPropertyOverrides(overrides).buildPreviewClient()) { + + // Schedule a job. + System.out.println("**** Scheduling a Job with name dapr-jobs-1 *****"); + ScheduleJobRequest scheduleJobRequest = new ScheduleJobRequest("dapr-job-1", + JobSchedule.fromString("* * * * * *")).setData("Hello World!".getBytes()); + client.scheduleJob(scheduleJobRequest).block(); + + System.out.println("**** Scheduling job dapr-jobs-1 completed *****"); + } + } +} +``` + +Call `getJob` to retrieve the job details that were previously created and scheduled. +``` +client.getJob(new GetJobRequest("dapr-job-1")).block() +``` + +Run the `DemoJobsClient` with the following command. + +```sh +java -jar target/dapr-java-sdk-examples-exec.jar io.dapr.examples.jobs.DemoJobsClient +``` + +### Sample output +``` +**** Scheduling a Job with name dapr-jobs-1 ***** +**** Scheduling job dapr-jobs-1 completed ***** +**** Retrieving a Job with name dapr-jobs-1 ***** +``` + +## Set up an endpoint to be invoked when the job is triggered + +The `DemoJobsSpringApplication` class starts a Spring Boot application that registers the endpoints specified in the `JobsController` +This endpoint acts like a callback for the scheduled job requests. + +```java +@RestController +public class JobsController { + + /** + * Handles jobs callback from Dapr. + * + * @param jobName name of the job. + * @param payload data from the job if payload exists. + * @return Empty Mono. + */ + @PostMapping("/job/{jobName}") + public Mono handleJob(@PathVariable("jobName") String jobName, + @RequestBody(required = false) byte[] payload) { + System.out.println("Job Name: " + jobName); + System.out.println("Job Payload: " + new String(payload)); + + return Mono.empty(); + } +} +``` + +Parameters: + +* `jobName`: The name of the triggered job. +* `payload`: Optional payload data associated with the job (as a byte array). + +Run the Spring Boot application with the following command. + +```sh +java -jar target/dapr-java-sdk-examples-exec.jar io.dapr.examples.jobs.DemoJobsSpringApplication +``` + +### Sample output +``` +Job Name: dapr-job-1 +Job Payload: Hello World! +``` + +## Delete a scheduled job + +```java +public class DemoJobsClient { + + /** + * The main method of this app deletes a job that was previously scheduled. + */ + public static void main(String[] args) throws Exception { + try (DaprPreviewClient client = new DaprClientBuilder().buildPreviewClient()) { + + // Delete a job. + System.out.println("**** Delete a Job with name dapr-jobs-1 *****"); + client.deleteJob(new DeleteJobRequest("dapr-job-1")).block(); + } + } +} +``` + +## Next steps +- [Learn more about Jobs]({{% ref jobs-overview.md %}}) +- [Jobs API reference]({{% ref jobs_api.md %}}) \ No newline at end of file diff --git a/sdkdocs/java/content/en/java-sdk-docs/java-workflow/_index.md b/sdkdocs/java/content/en/java-sdk-docs/java-workflow/_index.md new file mode 100644 index 00000000000..ecfb7adeb6a --- /dev/null +++ b/sdkdocs/java/content/en/java-sdk-docs/java-workflow/_index.md @@ -0,0 +1,7 @@ +--- +type: docs +title: "Workflow" +linkTitle: "Workflow" +weight: 3000 +description: How to get up and running with the Dapr Workflow extension +--- diff --git a/sdkdocs/java/content/en/java-sdk-docs/java-workflow/java-workflow-howto.md b/sdkdocs/java/content/en/java-sdk-docs/java-workflow/java-workflow-howto.md new file mode 100644 index 00000000000..79c6e06d0c4 --- /dev/null +++ b/sdkdocs/java/content/en/java-sdk-docs/java-workflow/java-workflow-howto.md @@ -0,0 +1,284 @@ +--- +type: docs +title: "How to: Author and manage Dapr Workflow in the Java SDK" +linkTitle: "How to: Author and manage workflows" +weight: 20000 +description: How to get up and running with workflows using the Dapr Java SDK +--- + +Let's create a Dapr workflow and invoke it using the console. With the [provided workflow example](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/workflows), you will: + +- Execute the workflow instance using the [Java workflow worker](https://github.com/dapr/java-sdk/blob/master/examples/src/main/java/io/dapr/examples/workflows/DemoWorkflowWorker.java) +- Utilize the Java workflow client and API calls to [start and terminate workflow instances](https://github.com/dapr/java-sdk/blob/master/examples/src/main/java/io/dapr/examples/workflows/DemoWorkflowClient.java) + +This example uses the default configuration from `dapr init` in [self-hosted mode](https://github.com/dapr/cli#install-dapr-on-your-local-machine-self-hosted). + +## Prerequisites + +- [Dapr CLI and initialized environment](https://docs.dapr.io/getting-started). +- Java JDK 11 (or greater): + - [Oracle JDK](https://www.oracle.com/java/technologies/downloads), or + - OpenJDK +- [Apache Maven](https://maven.apache.org/install.html), version 3.x. + +- [Docker Desktop](https://www.docker.com/products/docker-desktop) + +- Verify you're using the latest proto bindings + +## Set up the environment + +Clone the Java SDK repo and navigate into it. + +```bash +git clone https://github.com/dapr/java-sdk.git +cd java-sdk +``` + +Run the following command to install the requirements for running this workflow sample with the Dapr Java SDK. + +```bash +mvn clean install +``` + +From the Java SDK root directory, navigate to the Dapr Workflow example. + +```bash +cd examples +``` + +## Run the `DemoWorkflowWorker` + +The `DemoWorkflowWorker` class registers an implementation of `DemoWorkflow` in Dapr's workflow runtime engine. In the `DemoWorkflowWorker.java` file, you can find the `DemoWorkflowWorker` class and the `main` method: + +```java +public class DemoWorkflowWorker { + + public static void main(String[] args) throws Exception { + // Register the Workflow with the runtime. + WorkflowRuntime.getInstance().registerWorkflow(DemoWorkflow.class); + System.out.println("Start workflow runtime"); + WorkflowRuntime.getInstance().startAndBlock(); + System.exit(0); + } +} +``` + +In the code above: +- `WorkflowRuntime.getInstance().registerWorkflow()` registers `DemoWorkflow` as a workflow in the Dapr Workflow runtime. +- `WorkflowRuntime.getInstance().start()` builds and starts the engine within the Dapr Workflow runtime. + +In the terminal, execute the following command to kick off the `DemoWorkflowWorker`: + +```sh +dapr run --app-id demoworkflowworker --resources-path ./components/workflows --dapr-grpc-port 50001 -- java -jar target/dapr-java-sdk-examples-exec.jar io.dapr.examples.workflows.DemoWorkflowWorker +``` + +**Expected output** + +``` +You're up and running! Both Dapr and your app logs will appear here. + +... + +== APP == Start workflow runtime +== APP == Sep 13, 2023 9:02:03 AM com.microsoft.durabletask.DurableTaskGrpcWorker startAndBlock +== APP == INFO: Durable Task worker is connecting to sidecar at 127.0.0.1:50001. +``` + +## Run the `DemoWorkflowClient` + +The `DemoWorkflowClient` starts instances of workflows that have been registered with Dapr. + +```java +public class DemoWorkflowClient { + + // ... + public static void main(String[] args) throws InterruptedException { + DaprWorkflowClient client = new DaprWorkflowClient(); + + try (client) { + String separatorStr = "*******"; + System.out.println(separatorStr); + String instanceId = client.scheduleNewWorkflow(DemoWorkflow.class, "input data"); + System.out.printf("Started new workflow instance with random ID: %s%n", instanceId); + + System.out.println(separatorStr); + System.out.println("**GetInstanceMetadata:Running Workflow**"); + WorkflowState workflowMetadata = client.getWorkflowState(instanceId, true); + System.out.printf("Result: %s%n", workflowMetadata); + + System.out.println(separatorStr); + System.out.println("**WaitForWorkflowStart**"); + try { + WorkflowState waitForWorkflowStartResult = + client.waitForWorkflowStart(instanceId, Duration.ofSeconds(60), true); + System.out.printf("Result: %s%n", waitForWorkflowStartResult); + } catch (TimeoutException ex) { + System.out.printf("waitForWorkflowStart has an exception:%s%n", ex); + } + + System.out.println(separatorStr); + System.out.println("**SendExternalMessage**"); + client.raiseEvent(instanceId, "TestEvent", "TestEventPayload"); + + System.out.println(separatorStr); + System.out.println("** Registering parallel Events to be captured by allOf(t1,t2,t3) **"); + client.raiseEvent(instanceId, "event1", "TestEvent 1 Payload"); + client.raiseEvent(instanceId, "event2", "TestEvent 2 Payload"); + client.raiseEvent(instanceId, "event3", "TestEvent 3 Payload"); + System.out.printf("Events raised for workflow with instanceId: %s\n", instanceId); + + System.out.println(separatorStr); + System.out.println("** Registering Event to be captured by anyOf(t1,t2,t3) **"); + client.raiseEvent(instanceId, "e2", "event 2 Payload"); + System.out.printf("Event raised for workflow with instanceId: %s\n", instanceId); + + + System.out.println(separatorStr); + System.out.println("**waitForWorkflowCompletion**"); + try { + WorkflowState waitForWorkflowCompletionResult = + client.waitForWorkflowCompletion(instanceId, Duration.ofSeconds(60), true); + System.out.printf("Result: %s%n", waitForWorkflowCompletionResult); + } catch (TimeoutException ex) { + System.out.printf("waitForWorkflowCompletion has an exception:%s%n", ex); + } + + System.out.println(separatorStr); + System.out.println("**purgeWorkflow**"); + boolean purgeResult = client.purgeWorkflow(instanceId); + System.out.printf("purgeResult: %s%n", purgeResult); + + System.out.println(separatorStr); + System.out.println("**raiseEvent**"); + + String eventInstanceId = client.scheduleNewWorkflow(DemoWorkflow.class); + System.out.printf("Started new workflow instance with random ID: %s%n", eventInstanceId); + client.raiseEvent(eventInstanceId, "TestException", null); + System.out.printf("Event raised for workflow with instanceId: %s\n", eventInstanceId); + + System.out.println(separatorStr); + String instanceToTerminateId = "terminateMe"; + client.scheduleNewWorkflow(DemoWorkflow.class, null, instanceToTerminateId); + System.out.printf("Started new workflow instance with specified ID: %s%n", instanceToTerminateId); + + TimeUnit.SECONDS.sleep(5); + System.out.println("Terminate this workflow instance manually before the timeout is reached"); + client.terminateWorkflow(instanceToTerminateId, null); + System.out.println(separatorStr); + + String restartingInstanceId = "restarting"; + client.scheduleNewWorkflow(DemoWorkflow.class, null, restartingInstanceId); + System.out.printf("Started new workflow instance with ID: %s%n", restartingInstanceId); + System.out.println("Sleeping 30 seconds to restart the workflow"); + TimeUnit.SECONDS.sleep(30); + + System.out.println("**SendExternalMessage: RestartEvent**"); + client.raiseEvent(restartingInstanceId, "RestartEvent", "RestartEventPayload"); + + System.out.println("Sleeping 30 seconds to terminate the eternal workflow"); + TimeUnit.SECONDS.sleep(30); + client.terminateWorkflow(restartingInstanceId, null); + } + + System.out.println("Exiting DemoWorkflowClient."); + System.exit(0); + } +} +``` + +In a second terminal window, start the workflow by running the following command: + +```sh +java -jar target/dapr-java-sdk-examples-exec.jar io.dapr.examples.workflows.DemoWorkflowClient +``` + +**Expected output** + +``` +******* +Started new workflow instance with random ID: 0b4cc0d5-413a-4c1c-816a-a71fa24740d4 +******* +**GetInstanceMetadata:Running Workflow** +Result: [Name: 'io.dapr.examples.workflows.DemoWorkflow', ID: '0b4cc0d5-413a-4c1c-816a-a71fa24740d4', RuntimeStatus: RUNNING, CreatedAt: 2023-09-13T13:02:30.547Z, LastUpdatedAt: 2023-09-13T13:02:30.699Z, Input: '"input data"', Output: ''] +******* +**WaitForWorkflowStart** +Result: [Name: 'io.dapr.examples.workflows.DemoWorkflow', ID: '0b4cc0d5-413a-4c1c-816a-a71fa24740d4', RuntimeStatus: RUNNING, CreatedAt: 2023-09-13T13:02:30.547Z, LastUpdatedAt: 2023-09-13T13:02:30.699Z, Input: '"input data"', Output: ''] +******* +**SendExternalMessage** +******* +** Registering parallel Events to be captured by allOf(t1,t2,t3) ** +Events raised for workflow with instanceId: 0b4cc0d5-413a-4c1c-816a-a71fa24740d4 +******* +** Registering Event to be captured by anyOf(t1,t2,t3) ** +Event raised for workflow with instanceId: 0b4cc0d5-413a-4c1c-816a-a71fa24740d4 +******* +**WaitForWorkflowCompletion** +Result: [Name: 'io.dapr.examples.workflows.DemoWorkflow', ID: '0b4cc0d5-413a-4c1c-816a-a71fa24740d4', RuntimeStatus: FAILED, CreatedAt: 2023-09-13T13:02:30.547Z, LastUpdatedAt: 2023-09-13T13:02:55.054Z, Input: '"input data"', Output: ''] +******* +**purgeWorkflow** +purgeResult: true +******* +**raiseEvent** +Started new workflow instance with random ID: 7707d141-ebd0-4e54-816e-703cb7a52747 +Event raised for workflow with instanceId: 7707d141-ebd0-4e54-816e-703cb7a52747 +******* +Started new workflow instance with specified ID: terminateMe +Terminate this workflow instance manually before the timeout is reached +******* +Started new workflow instance with ID: restarting +Sleeping 30 seconds to restart the workflow +**SendExternalMessage: RestartEvent** +Sleeping 30 seconds to terminate the eternal workflow +Exiting DemoWorkflowClient. +``` + +## What happened? + +1. When you ran `dapr run`, the workflow worker registered the workflow (`DemoWorkflow`) and its actvities to the Dapr Workflow engine. +1. When you ran `java`, the workflow client started the workflow instance with the following activities. You can follow along with the output in the terminal where you ran `dapr run`. + 1. The workflow is started, raises three parallel tasks, and waits for them to complete. + 1. The workflow client calls the activity and sends the "Hello Activity" message to the console. + 1. The workflow times out and is purged. + 1. The workflow client starts a new workflow instance with a random ID, uses another workflow instance called `terminateMe` to terminate it, and restarts it with the workflow called `restarting`. + 1. The worfklow client is then exited. + +## Next steps +- [Learn more about Dapr workflow]({{% ref workflow-overview.md %}}) +- [Workflow API reference]({{% ref workflow_api.md %}}) + +## Advanced features + +### Task Execution Keys + +Task execution keys are unique identifiers generated by the durabletask-java library. They are stored in the `WorkflowActivityContext` and can be used to track and manage the execution of workflow activities. They are particularly useful for: + +1. **Idempotency**: Ensuring activities are not executed multiple times for the same task +2. **State Management**: Tracking the state of activity execution +3. **Error Handling**: Managing retries and failures in a controlled manner + +Here's an example of how to use task execution keys in your workflow activities: + +```java +public class TaskExecutionKeyActivity implements WorkflowActivity { + @Override + public Object run(WorkflowActivityContext ctx) { + // Get the task execution key for this activity + String taskExecutionKey = ctx.getTaskExecutionKey(); + + // Use the key to implement idempotency or state management + // For example, check if this task has already been executed + if (isTaskAlreadyExecuted(taskExecutionKey)) { + return getPreviousResult(taskExecutionKey); + } + + // Execute the activity logic + Object result = executeActivityLogic(); + + // Store the result with the task execution key + storeResult(taskExecutionKey, result); + + return result; + } +} +``` diff --git a/sdkdocs/java/content/en/java-sdk-docs/spring-boot/_index.md b/sdkdocs/java/content/en/java-sdk-docs/spring-boot/_index.md new file mode 100644 index 00000000000..506ddcb7bad --- /dev/null +++ b/sdkdocs/java/content/en/java-sdk-docs/spring-boot/_index.md @@ -0,0 +1,303 @@ +--- +type: docs +title: "Getting started with the Dapr and Spring Boot" +linkTitle: "Spring Boot Integration" +weight: 4000 +description: How to get started with Dapr and Spring Boot +--- + +By combining Dapr and Spring Boot, we can create infrastructure independent Java applications that can be deployed across different environments, supporting a wide range of on-premises and cloud provider services. + +First, we will start with a simple integration covering the `DaprClient` and the [Testcontainers](https://testcontainers.com/) integration, to then use Spring and Spring Boot mechanisms and programming model to leverage the Dapr APIs under the hood. This helps teams to remove dependencies such as clients and drivers required to connect to environment-specific infrastructure (databases, key-value stores, message brokers, configuration/secret stores, etc) + +{{% alert title="Note" color="primary" %}} +The Spring Boot integration requires Spring Boot 3.x+ to work. This will not work with Spring Boot 2.x. +The Spring Boot integration remains in alpha. We need your help and feedback to graduate it. +Please join the [#java-sdk discord channel](https://discord.com/channels/778680217417809931/778749797242765342) discussion or open issues in the [dapr/java-sdk](https://github.com/dapr/java-sdk/issues). + +{{% /alert %}} + + +## Adding the Dapr and Spring Boot integration to your project + +If you already have a Spring Boot application, you can directly add the following dependencies to your project: + +``` + + io.dapr.spring + dapr-spring-boot-starter + 1.16.0 + + + io.dapr.spring + dapr-spring-boot-starter-test + 1.16.0 + test + +``` + +You can find the [latest released version here](https://central.sonatype.com/artifact/io.dapr.spring/dapr-spring-boot-starter). + +By adding these dependencies, you can: +- Autowire a `DaprClient` to use inside your applications +- Use the Spring Data and Messaging abstractions and programming model that uses the Dapr APIs under the hood +- Improve your inner-development loop by relying on [Testcontainers](https://testcontainers.com/) to bootstrap Dapr Control plane services and default components + +Once these dependencies are in your application, you can rely on Spring Boot autoconfiguration to autowire a `DaprClient` instance: + +```java +@Autowired +private DaprClient daprClient; + +``` + +This will connect to the default Dapr gRPC endpoint `localhost:50001`, requiring you to start Dapr outside of your application. + +{{% alert title="Note" color="primary" %}} +By default, the following properties are preconfigured for `DaprClient` and `DaprWorkflowClient`: +```properties +dapr.client.httpEndpoint=http://localhost +dapr.client.httpPort=3500 +dapr.client.grpcEndpoint=localhost +dapr.client.grpcPort=50001 +dapr.client.apiToken= +``` +These values are used by default, but you can override them in your `application.properties` file to suit your environment. Please note that both kebab case and camel case are supported. +{{% /alert %}} + +You can use the `DaprClient` to interact with the Dapr APIs anywhere in your application, for example from inside a REST endpoint: + +```java +@RestController +public class DemoRestController { + @Autowired + private DaprClient daprClient; + + @PostMapping("/store") + public void storeOrder(@RequestBody Order order){ + daprClient.saveState("kvstore", order.orderId(), order).block(); + } +} + +record Order(String orderId, Integer amount){} +``` + +If you want to avoid managing Dapr outside of your Spring Boot application, you can rely on [Testcontainers](https://testcontainers.com/) to bootstrap Dapr beside your application for development purposes. +To do this we can create a test configuration that uses `Testcontainers` to bootstrap all we need to develop our applications using the Dapr APIs. + +Using [Testcontainers](https://testcontainers.com/) and Dapr integrations, we let the `@TestConfiguration` bootstrap Dapr for our applications. +Notice that for this example, we are configuring Dapr with a Statestore component called `kvstore` that connects to an instance of `PostgreSQL` also bootstrapped by Testcontainers. + +```java +@TestConfiguration(proxyBeanMethods = false) +public class DaprTestContainersConfig { + @Bean + @ServiceConnection + public DaprContainer daprContainer(Network daprNetwork, PostgreSQLContainer postgreSQLContainer){ + + return new DaprContainer("daprio/daprd:1.16.0-rc.5") + .withAppName("producer-app") + .withNetwork(daprNetwork) + .withComponent(new Component("kvstore", "state.postgresql", "v1", STATE_STORE_PROPERTIES)) + .withComponent(new Component("kvbinding", "bindings.postgresql", "v1", BINDING_PROPERTIES)) + .dependsOn(postgreSQLContainer); + } +} +``` + +Inside the test classpath you can add a new Spring Boot Application that uses this configuration for tests: + +```java +@SpringBootApplication +public class TestProducerApplication { + + public static void main(String[] args) { + + SpringApplication + .from(ProducerApplication::main) + .with(DaprTestContainersConfig.class) + .run(args); + } + +} +``` + +Now you can start your application with: +```bash +mvn spring-boot:test-run +``` + +Running this command will start the application, using the provided test configuration that includes the Testcontainers and Dapr integration. In the logs you should be able to see that the `daprd` and the `placement` service containers were started for your application. + +Besides the previous configuration (`DaprTestContainersConfig`) your tests shouldn't be testing Dapr itself, just the REST endpoints that your application is exposing. + + +## Leveraging Spring & Spring Boot programming model with Dapr + +The Java SDK allows you to interface with all of the [Dapr building blocks]({{% ref building-blocks %}}). +But if you want to leverage the Spring and Spring Boot programming model you can use the `dapr-spring-boot-starter` integration. +This includes implementations of Spring Data (`KeyValueTemplate` and `CrudRepository`) as well as a `DaprMessagingTemplate` for producing and consuming messages +(similar to [Spring Kafka](https://spring.io/projects/spring-kafka), [Spring Pulsar](https://spring.io/projects/spring-pulsar) and [Spring AMQP for RabbitMQ](https://spring.io/projects/spring-amqp)) and Dapr workflows. + +## Using Spring Data `CrudRepository` and `KeyValueTemplate` + +You can use well known Spring Data constructs relying on a Dapr-based implementation. +With Dapr, you don't need to add any infrastructure-related driver or client, making your Spring application lighter and decoupled from the environment where it is running. + +Under the hood these implementations use the Dapr Statestore and Binding APIs. + +### Configuration parameters + +With Spring Data abstractions you can configure which statestore and bindings will be used by Dapr to connect to the available infrastructure. +This can be done by setting the following properties: + +```properties +dapr.statestore.name=kvstore +dapr.statestore.binding=kvbinding +``` + +Then you can `@Autowire` a `KeyValueTemplate` or a `CrudRepository` like this: + +```java +@RestController +@EnableDaprRepositories +public class OrdersRestController { + @Autowired + private OrderRepository repository; + + @PostMapping("/orders") + public void storeOrder(@RequestBody Order order){ + repository.save(order); + } + + @GetMapping("/orders") + public Iterable getAll(){ + return repository.findAll(); + } + + +} +``` + +Where `OrderRepository` is defined in an interface that extends the Spring Data `CrudRepository` interface: + +```java +public interface OrderRepository extends CrudRepository {} +``` + +Notice that the `@EnableDaprRepositories` annotation does all the magic of wiring the Dapr APIs under the `CrudRespository` interface. +Because Dapr allow users to interact with different StateStores from the same application, as a user you need to provide the following beans as a Spring Boot `@Configuration`: + +```java +@Configuration +@EnableConfigurationProperties({DaprStateStoreProperties.class}) +public class ProducerAppConfiguration { + + @Bean + public KeyValueAdapterResolver keyValueAdapterResolver(DaprClient daprClient, ObjectMapper mapper, DaprStateStoreProperties daprStatestoreProperties) { + String storeName = daprStatestoreProperties.getName(); + String bindingName = daprStatestoreProperties.getBinding(); + + return new DaprKeyValueAdapterResolver(daprClient, mapper, storeName, bindingName); + } + + @Bean + public DaprKeyValueTemplate daprKeyValueTemplate(KeyValueAdapterResolver keyValueAdapterResolver) { + return new DaprKeyValueTemplate(keyValueAdapterResolver); + } + +} +``` + +## Using Spring Messaging for producing and consuming events + +Similar to Spring Kafka, Spring Pulsar and Spring AMQP you can use the `DaprMessagingTemplate` to publish messages to the configured infrastructure. To consume messages you can use the `@Topic` annotation (soon to be renamed to `@DaprListener`). + +To publish events/messages you can `@Autowired` the `DaprMessagingTemplate` in your Spring application. +For this example we will be publishing `Order` events and we are sending messages to the topic named `topic`. + +```java +@Autowired +private DaprMessagingTemplate messagingTemplate; + +@PostMapping("/orders") +public void storeOrder(@RequestBody Order order){ + repository.save(order); + messagingTemplate.send("topic", order); +} + +``` + +Similarly to the `CrudRepository` we need to specify which PubSub broker do we want to use to publish and consume our messages. + +```properties +dapr.pubsub.name=pubsub +``` + +Because with Dapr you can connect to multiple PubSub brokers you need to provide the following bean to let Dapr know which PubSub broker your `DaprMessagingTemplate` will use: +```java +@Bean +public DaprMessagingTemplate messagingTemplate(DaprClient daprClient, + DaprPubSubProperties daprPubSubProperties) { + return new DaprMessagingTemplate<>(daprClient, daprPubSubProperties.getName()); +} +``` + +Finally, because Dapr PubSub requires a bidirectional connection between your application and Dapr you need to expand your Testcontainers configuration with a few parameters: + +```java +@Bean +@ServiceConnection +public DaprContainer daprContainer(Network daprNetwork, PostgreSQLContainer postgreSQLContainer, RabbitMQContainer rabbitMQContainer){ + + return new DaprContainer("daprio/daprd:1.16.0-rc.5") + .withAppName("producer-app") + .withNetwork(daprNetwork) + .withComponent(new Component("kvstore", "state.postgresql", "v1", STATE_STORE_PROPERTIES)) + .withComponent(new Component("kvbinding", "bindings.postgresql", "v1", BINDING_PROPERTIES)) + .withComponent(new Component("pubsub", "pubsub.rabbitmq", "v1", rabbitMqProperties)) + .withAppPort(8080) + .withAppChannelAddress("host.testcontainers.internal") + .dependsOn(rabbitMQContainer) + .dependsOn(postgreSQLContainer); +} +``` + +Now, in the Dapr configuration we have included a `pubsub` component that will connect to an instance of RabbitMQ started by Testcontainers. +We have also set two important parameters `.withAppPort(8080)` and `.withAppChannelAddress("host.testcontainers.internal")` which allows Dapr to +contact back to the application when a message is published in the broker. + +To listen to events/messages you need to expose an endpoint in the application that will be responsible to receive the messages. +If you expose a REST endpoint you can use the `@Topic` annotation to let Dapr know where it needs to forward the events/messages too: + +```java +@PostMapping("subscribe") +@Topic(pubsubName = "pubsub", name = "topic") +public void subscribe(@RequestBody CloudEvent cloudEvent){ + events.add(cloudEvent); +} +``` + +Upon bootstrapping your application, Dapr will register the subscription to messages to be forwarded to the `subscribe` endpoint exposed by your application. + +If you are writing tests for these subscribers you need to ensure that Testcontainers knows that your application will be running on port 8080, +so containers started with Testcontainers know where your application is: + +```java +@BeforeAll +public static void setup(){ + org.testcontainers.Testcontainers.exposeHostPorts(8080); +} +``` + +You can check and run the [full example source code here](https://github.com/salaboy/dapr-spring-boot-docs-examples). + + +## Next steps + +Learn more about the [Dapr Java SDK packages available to add to your Java applications](https://dapr.github.io/java-sdk/). + +Check the How To guide for [Dapr Workflows using Spring Boot and Testcontainers for a local workflow development experience](sb-workflows-howto.md). + +## Related links +- [Java SDK examples](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples) diff --git a/sdkdocs/java/content/en/java-sdk-docs/spring-boot/sb-workflows-howto.md b/sdkdocs/java/content/en/java-sdk-docs/spring-boot/sb-workflows-howto.md new file mode 100644 index 00000000000..4fcd973db2b --- /dev/null +++ b/sdkdocs/java/content/en/java-sdk-docs/spring-boot/sb-workflows-howto.md @@ -0,0 +1,86 @@ +--- +type: docs +title: "How to: Author and manage Dapr Workflow with Spring Boot" +linkTitle: "How to: Author and manage workflows with Spring Boot" +weight: 40000 +description: How to get up and running with workflows using the Spring Boot integration +--- + + +Following the same approach that we used for Spring Data and Spring Messaging, the [`dapr-spring-boot-starter`](_index.md) brings Dapr Workflow integration for Spring Boot users. + +With Dapr Workflows you define complex orchestrations (workflows) in Java code. The Dapr Spring Boot Starter makes your development easier by managing `Workflow`s and `WorkflowActivity`s as Spring Beans. + +In order to enable the automatic bean discovery you annotate your `@SpringBootApplication` with the `@EnableDaprWorkflows` annotation: + +``` +@SpringBootApplication +@EnableDaprWorkflows +public class MySpringBootApplication { + ... +} +``` + +By adding this annotation, all the `Workflow`s and `WorkflowActivity`s beans are automatically discovered by Spring and registered to the workflow engine. + +## Creating Workflows and Activities + +Inside your Spring Boot application you can define as many workflows as you want. You do that by creating new implementations of the `Workflow` interface. + +``` +@Component +public class MyWorkflow implements Workflow { + + @Override + public WorkflowStub create() { + return ctx -> { + + }; + } + +} +``` + +From inside your workflow definitions, you can perform service to service interactions, schedule timers or receive external events. + +By having all `WorkflowActivity`s as managed beans you can use the Spring `@Autowired` mechanism to inject any bean that the workflow activity might need to implement its functionality. For example the `@RestTemplate`: + +``` +@Component +public class MyWorkflowActivity implements WorkflowActivity { + + @Autowired + private RestTemplate restTemplate; +``` + +## Creating and interacting with Workflows + + +To create and interact with Workflow instances you use the `DaprWorkflowClient` that you can also `@Autowired`. + +``` +@Autowired +private DaprWorkflowClient daprWorkflowClient; +``` + +Applications can now schedule new workflow instances and raise events. + +``` +String instanceId = daprWorkflowClient.scheduleNewWorkflow(MyWorkflow.class, payload); +``` + +and + +``` +daprWorkflowClient.raiseEvent(instanceId, "MyEvenet", event); +``` + +[Check a full example here](https://github.com/dapr/java-sdk/blob/master/spring-boot-examples/workflows/patterns/src/main/java/io/dapr/springboot/examples/wfp/chain/ChainWorkflow.java) + + + +## Next Steps & Resources + +Check the blog post from [Baeldung covering Dapr Workflows and Dapr Pubsub](https://www.baeldung.com/dapr-workflows-pubsub) with a full working example. + +Check the [Dapr Workflow documentation](https://docs.dapr.io/developing-applications/building-blocks/workflow/workflow-overview/) for more information about how to work with Dapr Workflows. \ No newline at end of file diff --git a/sdkdocs/js b/sdkdocs/js deleted file mode 160000 index 26e8be8931a..00000000000 --- a/sdkdocs/js +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 26e8be8931aed2404e0e382b6c61264d1b64f0de diff --git a/sdkdocs/js/README.md b/sdkdocs/js/README.md new file mode 100644 index 00000000000..74f25a7b3c6 --- /dev/null +++ b/sdkdocs/js/README.md @@ -0,0 +1,25 @@ +# Dapr JavaScript SDK documentation + +This page covers how the documentation is structured for the Dapr JavaScript SDK + +## Dapr Docs + +All Dapr documentation is hosted at [docs.dapr.io](https://docs.dapr.io), including the docs for the [JavaScript SDK](https://docs.dapr.io/developing-applications/sdks/javascript/). Head over there if you want to read the docs. + +### JavaScript SDK docs source + +Although the docs site code and content is in the [docs repo](https://github.com/dapr/docs), the JavaScript SDK content and images are within the `content` and `static` directories, respectively. + +This allows separation of roles and expertise between maintainers, and makes it easy to find the docs files you are looking for. + +## Writing JavaScript SDK docs + +To get up and running to write JavaScript SDK docs, visit the [docs repo](https://github.com/dapr/docs) to initialize your environment. It will clone both the docs repo and this repo, so you can make changes and see it rendered within the site instantly, as well as commit and PR into this repo. + +Make sure to read the [docs contributing guide](https://docs.dapr.io/contributing/contributing-docs/) for information on style/semantics/etc. + +## Docs architecture + +The docs site is built on [Hugo](https://gohugo.io), which lives in the docs repo. This repo is setup as a git submodule so that when the repo is cloned and initialized, the javascript-sdk repo, along with the docs, are cloned as well. + +Then, in the Hugo configuration file, the `daprdocs/content` and `daprdocs/static` directories are redirected to the `daprdocs/developing-applications/sdks/javascript` and `static/javascript` directories, respectively. Thus, all the content within this repo is folded into the main docs site. diff --git a/sdkdocs/js/content/en/js-sdk-contributing/js-contributing.md b/sdkdocs/js/content/en/js-sdk-contributing/js-contributing.md new file mode 100644 index 00000000000..e7ad14e510c --- /dev/null +++ b/sdkdocs/js/content/en/js-sdk-contributing/js-contributing.md @@ -0,0 +1,130 @@ +--- +type: docs +title: "Contributing to the JavaScript SDK" +linkTitle: "JavaScript SDK" +weight: 3000 +description: Guidelines for contributing to the Dapr JavaScript SDK +--- + +When contributing to the [JavaScript SDK](https://github.com/dapr/js-sdk) the following rules and best-practices should be followed. + +💡 You can run `npm pretty-fix` to run prettier on all your files + +## Commit Guidelines + +The Dapr Javascript SDK uses the [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) +specification. The automatic changelog tool uses these to automatically generate +a changelog based on the commit messages. Here's a guide to writing a commit message +to allow this: + +### Format + +``` +type(scope)!: subject +``` + +- `type`: the type of the commit is one of the following: + + - `feat`: new features. + - `fix`: bug fixes. + - `docs`: documentation changes. + - `refactor`: refactor of a particular code section without introducing + new features or bug fixes. + - `style`: code style improvements. + - `perf`: performance improvements. + - `test`: changes to the test suite. + - `ci`: changes to the CI system. + - `build`: changes to the build system (we don't yet have one so this shouldn't apply). + - `chore`: for other changes that don't match previous types. This doesn't appear + in the changelog. + +- `scope`: section of the codebase that the commit makes changes to. If it makes changes to + many sections, or if no section in particular is modified, leave blank without the parentheses. + Examples: + + - Commit that adds a `test`: + + ``` + test(actors): add an actor test + ``` + + - Commit that changes many things at once: + + ``` + style: adopt eslint + ``` + + For changes to examples, the scope should be the example name with the `examples/` prefix: + + - ❌ `fix(agnoster): commit subject` + - ✅ `fix(examples/http/actor): commit subject` + +- `!`: this goes after the `scope` (or the `type` if scope is empty), to indicate that the commit + introduces breaking changes. + + Optionally, you can specify a message that the changelog tool will display to the user to indicate + what's changed and what they can do to deal with it. You can use multiple lines to type this message; + the changelog parser will keep reading until the end of the commit message or until it finds an empty + line. + + Example (made up): + + ``` + style(agnoster)!: change dirty git repo glyph + + BREAKING CHANGE: the glyph to indicate when a git repository is dirty has + changed from a Powerline character to a standard UTF-8 emoji. + + Fixes #420 + + Co-authored-by: Username + ``` + +- `subject`: a brief description of the changes. This will be displayed in the changelog. If you need + to specify other details you can use the commit body but it won't be visible. + + Formatting tricks: the commit subject may contain: + + - Links to related issues or PRs by writing `#issue`. This will be highlighted by the changelog tool: + + ``` + feat(archlinux): add support for aura AUR helper (#9467) + ``` + + - Formatted inline code by using backticks: the text inbetween backticks will also be highlighted by + the changelog tool: + ``` + feat(shell-proxy): enable unexported `DEFAULT_PROXY` setting (#9774) + ``` + +### Style + +Try to keep the first commit line short. This is harder to do using this commit style but try to be +concise and if you need more space, you can use the commit body. Try to make sure that the commit +subject is clear and precise enough that users will know what change by just looking at the changelog. + +## Github Dapr Bot Commands + +Checkout the [daprbot documentation](https://docs.dapr.io/contributing/daprbot/) for Github commands you can run in this repo for common tasks. For example, you can run the `/assign` (as a comment on an issue) to assign issues to a user or group of users. + +## Coding Rules + +To ensure consistency throughout the source code, keep these rules in mind as you are working: + +- All features or bug fixes **must be tested** by one or more specs (unit-tests). +- All public API methods **must be documented**. +- We follow [ESLint RecommendedRules](https://eslint.org/docs/rules/). + +## Examples + +The `examples` directory contains code samples for users to run to try out specific functionality of the various JavaScript SDK packages and extensions. When writing new and updated samples keep in mind: + +- All examples should be runnable on Windows, Linux, and MacOS. While JavaScript code is consistent among operating systems, any pre/post example commands should provide options through [tabpane]({{% ref "contributing-docs.md#tabbed-content" %}}). +- Contain steps to download/install any required pre-requisites. Someone coming in with a fresh OS install should be able to start on the example and complete it without an error. Links to external download pages are fine. + +## Docs + +The `daprdocs` directory contains the markdown files that are rendered into the [Dapr Docs](https://docs.dapr.io) website. When the documentation website is built, this repo is cloned and configured so that its contents are rendered with the docs content. When writing docs, keep in mind: + +- All rules in the [docs guide]({{% ref contributing-docs.md %}}) should be followed in addition to these. +- All files and directories should be prefixed with `js-` to ensure all file/directory names are globally unique across all Dapr documentation. diff --git a/sdkdocs/js/content/en/js-sdk-docs/_index.md b/sdkdocs/js/content/en/js-sdk-docs/_index.md new file mode 100644 index 00000000000..565779d48c5 --- /dev/null +++ b/sdkdocs/js/content/en/js-sdk-docs/_index.md @@ -0,0 +1,84 @@ +--- +type: docs +title: "JavaScript SDK" +linkTitle: "JavaScript" +weight: 1000 +description: JavaScript SDK packages for developing Dapr applications +no_list: true +cascade: + github_repo: https://github.com/dapr/js-sdk + github_subdir: daprdocs/content/en/js-sdk-docs + path_base_for_github_subdir: content/en/developing-applications/sdks/js/ + github_branch: main +--- + +A client library for building Dapr apps in JavaScript and TypeScript. This client abstracts the public Dapr APIs like service to service invocation, state management, pub/sub, secrets, and much more, and provides a simple, intuitive API for building applications. + +## Installation + +To get started with the JavaScript SDK, install the Dapr JavaScript SDK package from [NPM](https://www.npmjs.com/package/@dapr/dapr): + +```bash +npm install --save @dapr/dapr +``` + +## Structure + +The Dapr JavaScript SDK contains two major components: + +- **DaprServer**: to manage all Dapr sidecar to application communication. +- **DaprClient**: to manage all application to Dapr sidecar communication. + +The above communication can be configured to use either of the gRPC or HTTP protocols. + + + + + + +
Dapr Server Dapr Client
+ +## Getting Started + +To help you get started, check out the resources below: + +
+
+
+
Client
+

Create a JavaScript client and interact with the Dapr sidecar and other Dapr applications (e.g., publishing events, output binding support, etc.).

+ +
+
+
+
+
Server
+

Create a JavaScript server and let the Dapr sidecar interact with your application (e.g., subscribing to events, input binding support, etc.).

+ +
+
+
+
+
Actors
+

Create virtual actors with state, reminders/timers, and methods.

+ +
+
+
+
+
+
+
+
Logging
+

Configure and customize the SDK logging.

+ +
+
+
+
+
Examples
+

Clone the JavaScript SDK source code and try out some of the examples to get started quickly.

+ +
+
+
diff --git a/sdkdocs/js/content/en/js-sdk-docs/images/dapr-client.jpg b/sdkdocs/js/content/en/js-sdk-docs/images/dapr-client.jpg new file mode 100644 index 00000000000..f86e9960bca Binary files /dev/null and b/sdkdocs/js/content/en/js-sdk-docs/images/dapr-client.jpg differ diff --git a/sdkdocs/js/content/en/js-sdk-docs/images/dapr-server.jpg b/sdkdocs/js/content/en/js-sdk-docs/images/dapr-server.jpg new file mode 100644 index 00000000000..26a239680c4 Binary files /dev/null and b/sdkdocs/js/content/en/js-sdk-docs/images/dapr-server.jpg differ diff --git a/sdkdocs/js/content/en/js-sdk-docs/js-actors/_index.md b/sdkdocs/js/content/en/js-sdk-docs/js-actors/_index.md new file mode 100644 index 00000000000..f9d79b1058b --- /dev/null +++ b/sdkdocs/js/content/en/js-sdk-docs/js-actors/_index.md @@ -0,0 +1,258 @@ +--- +type: docs +title: "JavaScript SDK for Actors" +linkTitle: "Actors" +weight: 3000 +description: How to get up and running with Actors using the Dapr JavaScript SDK +--- + +The Dapr actors package allows you to interact with Dapr virtual actors from a JavaScript application. The examples below demonstrate how to use the JavaScript SDK for interacting with virtual actors. + +For a more in-depth overview of Dapr actors, visit the [actors overview page]({{% ref actors-overview %}}). + +## Pre-requisites + +- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed +- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) +- [Latest LTS version of Node or greater](https://nodejs.org/en/) +- [JavaScript NPM package installed](https://www.npmjs.com/package/@dapr/dapr) + +## Scenario + +The below code examples loosely describe the scenario of a Parking Garage Spot Monitoring System, which can be seen in this [video](https://www.youtube.com/watch?v=eJCu6a-x9uo&t=3785) by Mark Russinovich. + +A parking garage consists of hundreds of parking spaces, where each parking space includes a sensor that provides updates to a centralized monitoring system. The parking space sensors (our actors) detect if a parking space is occupied or available. + +To jump in and run this example yourself, clone the source code, which can be found in the [JavaScript SDK examples directory](https://github.com/dapr/js-sdk/tree/main/examples/http/actor-parking-sensor). + +## Actor Interface + +The actor interface defines the contract that is shared between the actor implementation and the clients calling the actor. In the example below, we have created an interace for a parking garage sensor. Each sensor has 2 methods: `carEnter` and `carLeave`, which defines the state of the parking space: + +```ts +export default interface ParkingSensorInterface { + carEnter(): Promise; + carLeave(): Promise; +} +``` + +## Actor Implementation + +An actor implementation defines a class by extending the base type `AbstractActor` and implementing the actor interface (`ParkingSensorInterface` in this case). + +The following code describes an actor implementation along with a few helper methods. + +```ts +import { AbstractActor } from "@dapr/dapr"; +import ParkingSensorInterface from "./ParkingSensorInterface"; + +export default class ParkingSensorImpl extends AbstractActor implements ParkingSensorInterface { + async carEnter(): Promise { + // Implementation that updates state that this parking spaces is occupied. + } + + async carLeave(): Promise { + // Implementation that updates state that this parking spaces is available. + } + + private async getInfo(): Promise { + // Implementation of requesting an update from the parking space sensor. + } + + /** + * @override + */ + async onActivate(): Promise { + // Initialization logic called by AbstractActor. + } +} +``` + +### Configuring Actor Runtime + +To configure actor runtime, use the `DaprClientOptions`. The various parameters and their default values are documented at [How-to: Use virtual actors in Dapr](https://docs.dapr.io/developing-applications/building-blocks/actors/howto-actors/#configuration-parameters). + +Note, the timeouts and intervals should be formatted as [time.ParseDuration](https://pkg.go.dev/time#ParseDuration) strings. + +```typescript +import { CommunicationProtocolEnum, DaprClient, DaprServer } from "@dapr/dapr"; + +// Configure the actor runtime with the DaprClientOptions. +const clientOptions = { + daprHost: daprHost, + daprPort: daprPort, + communicationProtocol: CommunicationProtocolEnum.HTTP, + actor: { + actorIdleTimeout: "1h", + actorScanInterval: "30s", + drainOngoingCallTimeout: "1m", + drainRebalancedActors: true, + reentrancy: { + enabled: true, + maxStackDepth: 32, + }, + remindersStoragePartitions: 0, + }, +}; + +// Use the options when creating DaprServer and DaprClient. + +// Note, DaprServer creates a DaprClient internally, which needs to be configured with clientOptions. +const server = new DaprServer({ serverHost, serverPort, clientOptions }); + +const client = new DaprClient(clientOptions); +``` + +## Registering Actors + +Initialize and register your actors by using the `DaprServer` package: + +```typescript +import { DaprServer } from "@dapr/dapr"; +import ParkingSensorImpl from "./ParkingSensorImpl"; + +const daprHost = "127.0.0.1"; +const daprPort = "50000"; +const serverHost = "127.0.0.1"; +const serverPort = "50001"; + +const server = new DaprServer({ + serverHost, + serverPort, + clientOptions: { + daprHost, + daprPort, + }, +}); + +await server.actor.init(); // Let the server know we need actors +server.actor.registerActor(ParkingSensorImpl); // Register the actor +await server.start(); // Start the server + +// To get the registered actors, you can invoke `getRegisteredActors`: +const resRegisteredActors = await server.actor.getRegisteredActors(); +console.log(`Registered Actors: ${JSON.stringify(resRegisteredActors)}`); +``` + +## Invoking Actor Methods + +After Actors are registered, create a Proxy object that implements `ParkingSensorInterface` using the `ActorProxyBuilder`. You can invoke the actor methods by directly calling methods on the Proxy object. Internally, it translates to making a network call to the Actor API and fetches the result back. + +```typescript +import { ActorId, DaprClient } from "@dapr/dapr"; +import ParkingSensorImpl from "./ParkingSensorImpl"; +import ParkingSensorInterface from "./ParkingSensorInterface"; + +const daprHost = "127.0.0.1"; +const daprPort = "50000"; + +const client = new DaprClient({ daprHost, daprPort }); + +// Create a new actor builder. It can be used to create multiple actors of a type. +const builder = new ActorProxyBuilder(ParkingSensorImpl, client); + +// Create a new actor instance. +const actor = builder.build(new ActorId("my-actor")); +// Or alternatively, use a random ID +// const actor = builder.build(ActorId.createRandomId()); + +// Invoke the method. +await actor.carEnter(); +``` + +## Using states with Actor + +```ts +import { AbstractActor } from "@dapr/dapr"; +import ActorStateInterface from "./ActorStateInterface"; + +export default class ActorStateExample extends AbstractActor implements ActorStateInterface { + async setState(key: string, value: any): Promise { + await this.getStateManager().setState(key, value); + await this.getStateManager().saveState(); + } + + async removeState(key: string): Promise { + await this.getStateManager().removeState(key); + await this.getStateManager().saveState(); + } + + // getState with a specific type + async getState(key: string): Promise { + return await this.getStateManager().getState(key); + } + + // getState without type as `any` + async getState(key: string): Promise { + return await this.getStateManager().getState(key); + } +} +``` + +## Actor Timers and Reminders + +The JS SDK supports actors that can schedule periodic work on themselves by registering either timers or reminders. The main difference between timers and reminders is that the Dapr actor runtime does not retain any information about timers after deactivation, but persists reminders information using the Dapr actor state provider. + +This distinction allows users to trade off between light-weight but stateless timers versus more resource-demanding but stateful reminders. + +The scheduling interface of timers and reminders is identical. For an more in-depth look at the scheduling configurations see the [actors timers and reminders docs]({{% ref "howto-actors.md#actor-timers-and-reminders" %}}). + +### Actor Timers + +```typescript +// ... + +const actor = builder.build(new ActorId("my-actor")); + +// Register a timer +await actor.registerActorTimer( + "timer-id", // Unique name of the timer. + "cb-method", // Callback method to execute when timer is fired. + Temporal.Duration.from({ seconds: 2 }), // DueTime + Temporal.Duration.from({ seconds: 1 }), // Period + Temporal.Duration.from({ seconds: 1 }), // TTL + 50, // State to be sent to timer callback. +); + +// Delete the timer +await actor.unregisterActorTimer("timer-id"); +``` + +### Actor Reminders + +```typescript +// ... + +const actor = builder.build(new ActorId("my-actor")); + +// Register a reminder, it has a default callback: `receiveReminder` +await actor.registerActorReminder( + "reminder-id", // Unique name of the reminder. + Temporal.Duration.from({ seconds: 2 }), // DueTime + Temporal.Duration.from({ seconds: 1 }), // Period + Temporal.Duration.from({ seconds: 1 }), // TTL + 100, // State to be sent to reminder callback. +); + +// Delete the reminder +await actor.unregisterActorReminder("reminder-id"); +``` + +To handle the callback, you need to override the default `receiveReminder` implementation in your actor. For example, from our original actor implementation: + +```ts +export default class ParkingSensorImpl extends AbstractActor implements ParkingSensorInterface { + // ... + + /** + * @override + */ + async receiveReminder(state: any): Promise { + // handle stuff here + } + + // ... +} +``` + +For a full guide on actors, visit [How-To: Use virtual actors in Dapr]({{% ref howto-actors.md %}}). diff --git a/sdkdocs/js/content/en/js-sdk-docs/js-client/_index.md b/sdkdocs/js/content/en/js-sdk-docs/js-client/_index.md new file mode 100644 index 00000000000..51b17299115 --- /dev/null +++ b/sdkdocs/js/content/en/js-sdk-docs/js-client/_index.md @@ -0,0 +1,728 @@ +--- +type: docs +title: "JavaScript Client SDK" +linkTitle: "Client" +weight: 1000 +description: JavaScript Client SDK for developing Dapr applications +--- + +## Introduction + +The Dapr Client allows you to communicate with the Dapr Sidecar and get access to its client facing features such as Publishing Events, Invoking Output Bindings, State Management, Secret Management, and much more. + +## Pre-requisites + +- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed +- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) +- [Latest LTS version of Node.js or greater](https://nodejs.org/en/) + +## Installing and importing Dapr's JS SDK + +1. Install the SDK with `npm`: + +```bash +npm i @dapr/dapr --save +``` + +2. Import the libraries: + +```typescript +import { DaprClient, DaprServer, HttpMethod, CommunicationProtocolEnum } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; // Dapr Sidecar Host +const daprPort = "3500"; // Dapr Sidecar Port of this Example Server +const serverHost = "127.0.0.1"; // App Host of this Example Server +const serverPort = "50051"; // App Port of this Example Server + +// HTTP Example +const client = new DaprClient({ daprHost, daprPort }); + +// GRPC Example +const client = new DaprClient({ daprHost, daprPort, communicationProtocol: CommunicationProtocolEnum.GRPC }); +``` + +## Running + +To run the examples, you can use two different protocols to interact with the Dapr sidecar: HTTP (default) or gRPC. + +### Using HTTP (default) + +```typescript +import { DaprClient } from "@dapr/dapr"; +const client = new DaprClient({ daprHost, daprPort }); +``` + +```bash +# Using dapr run +dapr run --app-id example-sdk --app-protocol http -- npm run start + +# or, using npm script +npm run start:dapr-http +``` + +### Using gRPC + +Since HTTP is the default, you will have to adapt the communication protocol to use gRPC. You can do this by passing an extra argument to the client or server constructor. + +```typescript +import { DaprClient, CommunicationProtocol } from "@dapr/dapr"; +const client = new DaprClient({ daprHost, daprPort, communicationProtocol: CommunicationProtocol.GRPC }); +``` + +```bash +# Using dapr run +dapr run --app-id example-sdk --app-protocol grpc -- npm run start + +# or, using npm script +npm run start:dapr-grpc +``` + +### Environment Variables + +##### Dapr Sidecar Endpoints + +You can use the `DAPR_HTTP_ENDPOINT` and `DAPR_GRPC_ENDPOINT` environment variables to set the Dapr +Sidecar's HTTP and gRPC endpoints respectively. When these variables are set, the `daprHost` +and `daprPort` don't have to be set in the options argument of the constructor, the client will parse them automatically +out of the provided endpoints. + +```typescript +import { DaprClient, CommunicationProtocol } from "@dapr/dapr"; + +// Using HTTP, when DAPR_HTTP_ENDPOINT is set +const client = new DaprClient(); + +// Using gRPC, when DAPR_GRPC_ENDPOINT is set +const client = new DaprClient({ communicationProtocol: CommunicationProtocol.GRPC }); +``` + +If the environment variables are set, but `daprHost` and `daprPort` values are passed to the +constructor, the latter will take precedence over the environment variables. + +##### Dapr API Token + +You can use the `DAPR_API_TOKEN` environment variable to set the Dapr API token. When this variable +is set, the `daprApiToken` doesn't have to be set in the options argument of the constructor, +the client will get it automatically. + +## General + +### Increasing Body Size + +You can increase the body size that is used by the application to communicate with the sidecar by using a`DaprClient`'s option. + +```typescript +import { DaprClient, CommunicationProtocol } from "@dapr/dapr"; + +// Allow a body size of 10Mb to be used +// The default is 4Mb +const client = new DaprClient({ + daprHost, + daprPort, + communicationProtocol: CommunicationProtocol.HTTP, + maxBodySizeMb: 10, +}); +``` + +### Proxying Requests + +By proxying requests, we can utilize the unique capabilities that Dapr brings with its sidecar architecture such as service discovery, logging, etc., enabling us to instantly "upgrade" our gRPC services. This feature of gRPC proxying was demonstrated in [community call 41](https://www.youtube.com/watch?v=B_vkXqptpXY&t=71s). + +#### Creating a Proxy + +To perform gRPC proxying, simply create a proxy by calling the `client.proxy.create()` method: + +```typescript +// As always, create a client to our dapr sidecar +// this client takes care of making sure the sidecar is started, that we can communicate, ... +const clientSidecar = new DaprClient({ daprHost, daprPort, communicationProtocol: CommunicationProtocol.GRPC }); + +// Create a Proxy that allows us to use our gRPC code +const clientProxy = await clientSidecar.proxy.create(GreeterClient); +``` + +We can now call the methods as defined in our `GreeterClient` interface (which in this case is from the [Hello World example](https://github.com/grpc/grpc-go/blob/master/examples/helloworld/helloworld/helloworld.proto)) + +#### Behind the Scenes (Technical Working) + +![Architecture](assets/architecture.png) + +1. The gRPC service gets started in Dapr. We tell Dapr which port this gRPC server is running on through `--app-port` and give it a unique Dapr app ID with `--app-id ` +2. We can now call the Dapr Sidecar through a client that will connect to the Sidecar +3. Whilst calling the Dapr Sidecar, we provide a metadata key named `dapr-app-id` with the value of our gRPC server booted in Dapr (e.g. `server` in our example) +4. Dapr will now forward the call to the gRPC server configured + +## Building blocks + +The JavaScript Client SDK allows you to interface with all of the [Dapr building blocks]({{% ref building-blocks %}}) focusing on Client to Sidecar features. + +### Invocation API + +#### Invoke a Service + +```typescript +import { DaprClient, HttpMethod } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; +const daprPort = "3500"; + +async function start() { + const client = new DaprClient({ daprHost, daprPort }); + + const serviceAppId = "my-app-id"; + const serviceMethod = "say-hello"; + + // POST Request + const response = await client.invoker.invoke(serviceAppId, serviceMethod, HttpMethod.POST, { hello: "world" }); + + // POST Request with headers + const response = await client.invoker.invoke( + serviceAppId, + serviceMethod, + HttpMethod.POST, + { hello: "world" }, + { headers: { "X-User-ID": "123" } }, + ); + + // GET Request + const response = await client.invoker.invoke(serviceAppId, serviceMethod, HttpMethod.GET); +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); +``` + +> For a full guide on service invocation visit [How-To: Invoke a service]({{% ref howto-invoke-discover-services.md %}}). + +### State Management API + +#### Save, Get and Delete application state + +```typescript +import { DaprClient } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; +const daprPort = "3500"; + +async function start() { + const client = new DaprClient({ daprHost, daprPort }); + + const serviceStoreName = "my-state-store-name"; + + // Save State + const response = await client.state.save( + serviceStoreName, + [ + { + key: "first-key-name", + value: "hello", + metadata: { + foo: "bar", + }, + }, + { + key: "second-key-name", + value: "world", + }, + ], + { + metadata: { + ttlInSeconds: "3", // this should override the ttl in the state item + }, + }, + ); + + // Get State + const response = await client.state.get(serviceStoreName, "first-key-name"); + + // Get Bulk State + const response = await client.state.getBulk(serviceStoreName, ["first-key-name", "second-key-name"]); + + // State Transactions + await client.state.transaction(serviceStoreName, [ + { + operation: "upsert", + request: { + key: "first-key-name", + value: "new-data", + }, + }, + { + operation: "delete", + request: { + key: "second-key-name", + }, + }, + ]); + + // Delete State + const response = await client.state.delete(serviceStoreName, "first-key-name"); +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); +``` + +> For a full list of state operations visit [How-To: Get & save state]({{% ref howto-get-save-state.md %}}). + +#### Query State API + +```typescript +import { DaprClient } from "@dapr/dapr"; + +async function start() { + const client = new DaprClient({ daprHost, daprPort }); + + const res = await client.state.query("state-mongodb", { + filter: { + OR: [ + { + EQ: { "person.org": "Dev Ops" }, + }, + { + AND: [ + { + EQ: { "person.org": "Finance" }, + }, + { + IN: { state: ["CA", "WA"] }, + }, + ], + }, + ], + }, + sort: [ + { + key: "state", + order: "DESC", + }, + ], + page: { + limit: 10, + }, + }); + + console.log(res); +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); +``` + +### PubSub API + +#### Publish messages + +```typescript +import { DaprClient } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; +const daprPort = "3500"; + +async function start() { + const client = new DaprClient({ daprHost, daprPort }); + + const pubSubName = "my-pubsub-name"; + const topic = "topic-a"; + + // Publish message to topic as text/plain + // Note, the content type is inferred from the message type unless specified explicitly + const response = await client.pubsub.publish(pubSubName, topic, "hello, world!"); + // If publish fails, response contains the error + console.log(response); + + // Publish message to topic as application/json + await client.pubsub.publish(pubSubName, topic, { hello: "world" }); + + // Publish a JSON message as plain text + const options = { contentType: "text/plain" }; + await client.pubsub.publish(pubSubName, topic, { hello: "world" }, options); + + // Publish message to topic as application/cloudevents+json + // You can also use the cloudevent SDK to create cloud events https://github.com/cloudevents/sdk-javascript + const cloudEvent = { + specversion: "1.0", + source: "/some/source", + type: "example", + id: "1234", + }; + await client.pubsub.publish(pubSubName, topic, cloudEvent); + + // Publish a cloudevent as raw payload + const options = { metadata: { rawPayload: true } }; + await client.pubsub.publish(pubSubName, topic, "hello, world!", options); + + // Publish multiple messages to a topic as text/plain + await client.pubsub.publishBulk(pubSubName, topic, ["message 1", "message 2", "message 3"]); + + // Publish multiple messages to a topic as application/json + await client.pubsub.publishBulk(pubSubName, topic, [ + { hello: "message 1" }, + { hello: "message 2" }, + { hello: "message 3" }, + ]); + + // Publish multiple messages with explicit bulk publish messages + const bulkPublishMessages = [ + { + entryID: "entry-1", + contentType: "application/json", + event: { hello: "foo message 1" }, + }, + { + entryID: "entry-2", + contentType: "application/cloudevents+json", + event: { ...cloudEvent, data: "foo message 2", datacontenttype: "text/plain" }, + }, + { + entryID: "entry-3", + contentType: "text/plain", + event: "foo message 3", + }, + ]; + await client.pubsub.publishBulk(pubSubName, topic, bulkPublishMessages); +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); +``` + +### Bindings API + +#### Invoke Output Binding + +**Output Bindings** + +```typescript +import { DaprClient } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; +const daprPort = "3500"; + +async function start() { + const client = new DaprClient({ daprHost, daprPort }); + + const bindingName = "my-binding-name"; + const bindingOperation = "create"; + const message = { hello: "world" }; + + const response = await client.binding.send(bindingName, bindingOperation, message); +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); +``` + +> For a full guide on output bindings visit [How-To: Use bindings]({{% ref howto-bindings.md %}}). + +### Secret API + +#### Retrieve secrets + +```typescript +import { DaprClient } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; +const daprPort = "3500"; + +async function start() { + const client = new DaprClient({ daprHost, daprPort }); + + const secretStoreName = "my-secret-store"; + const secretKey = "secret-key"; + + // Retrieve a single secret from secret store + const response = await client.secret.get(secretStoreName, secretKey); + + // Retrieve all secrets from secret store + const response = await client.secret.getBulk(secretStoreName); +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); +``` + +> For a full guide on secrets visit [How-To: Retrieve secrets]({{% ref howto-secrets.md %}}). + +### Configuration API + +#### Get Configuration Keys + +```typescript +import { DaprClient } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; + +async function start() { + const client = new DaprClient({ + daprHost, + daprPort: process.env.DAPR_GRPC_PORT, + communicationProtocol: CommunicationProtocolEnum.GRPC, + }); + + const config = await client.configuration.get("config-store", ["key1", "key2"]); + console.log(config); +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); +``` + +Sample output: + +```log +{ + items: { + key1: { key: 'key1', value: 'foo', version: '', metadata: {} }, + key2: { key: 'key2', value: 'bar2', version: '', metadata: {} } + } +} +``` + +#### Subscribe to Configuration Updates + +```typescript +import { DaprClient } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; + +async function start() { + const client = new DaprClient({ + daprHost, + daprPort: process.env.DAPR_GRPC_PORT, + communicationProtocol: CommunicationProtocolEnum.GRPC, + }); + + // Subscribes to config store changes for keys "key1" and "key2" + const stream = await client.configuration.subscribeWithKeys("config-store", ["key1", "key2"], async (data) => { + console.log("Subscribe received updates from config store: ", data); + }); + + // Wait for 60 seconds and unsubscribe. + await new Promise((resolve) => setTimeout(resolve, 60000)); + stream.stop(); +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); +``` + +Sample output: + +```log +Subscribe received updates from config store: { + items: { key2: { key: 'key2', value: 'bar', version: '', metadata: {} } } +} +Subscribe received updates from config store: { + items: { key1: { key: 'key1', value: 'foobar', version: '', metadata: {} } } +} +``` + +### Cryptography API + +> Support for the cryptography API is only available on the gRPC client in the JavaScript SDK. + +```typescript +import { createReadStream, createWriteStream } from "node:fs"; +import { readFile, writeFile } from "node:fs/promises"; +import { pipeline } from "node:stream/promises"; + +import { DaprClient, CommunicationProtocolEnum } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; +const daprPort = "50050"; // Dapr Sidecar Port of this example server + +async function start() { + const client = new DaprClient({ + daprHost, + daprPort, + communicationProtocol: CommunicationProtocolEnum.GRPC, + }); + + // Encrypt and decrypt a message using streams + await encryptDecryptStream(client); + + // Encrypt and decrypt a message from a buffer + await encryptDecryptBuffer(client); +} + +async function encryptDecryptStream(client: DaprClient) { + // First, encrypt the message + console.log("== Encrypting message using streams"); + console.log("Encrypting plaintext.txt to ciphertext.out"); + + await pipeline( + createReadStream("plaintext.txt"), + await client.crypto.encrypt({ + componentName: "crypto-local", + keyName: "symmetric256", + keyWrapAlgorithm: "A256KW", + }), + createWriteStream("ciphertext.out"), + ); + + // Decrypt the message + console.log("== Decrypting message using streams"); + console.log("Encrypting ciphertext.out to plaintext.out"); + await pipeline( + createReadStream("ciphertext.out"), + await client.crypto.decrypt({ + componentName: "crypto-local", + }), + createWriteStream("plaintext.out"), + ); +} + +async function encryptDecryptBuffer(client: DaprClient) { + // Read "plaintext.txt" so we have some content + const plaintext = await readFile("plaintext.txt"); + + // First, encrypt the message + console.log("== Encrypting message using buffers"); + + const ciphertext = await client.crypto.encrypt(plaintext, { + componentName: "crypto-local", + keyName: "my-rsa-key", + keyWrapAlgorithm: "RSA", + }); + + await writeFile("test.out", ciphertext); + + // Decrypt the message + console.log("== Decrypting message using buffers"); + const decrypted = await client.crypto.decrypt(ciphertext, { + componentName: "crypto-local", + }); + + // The contents should be equal + if (plaintext.compare(decrypted) !== 0) { + throw new Error("Decrypted message does not match original message"); + } +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); +``` + +> For a full guide on cryptography visit [How-To: Cryptography]({{% ref howto-cryptography.md %}}). + +### Distributed Lock API + +#### Try Lock and Unlock APIs + +```typescript +import { CommunicationProtocolEnum, DaprClient } from "@dapr/dapr"; +import { LockStatus } from "@dapr/dapr/types/lock/UnlockResponse"; + +const daprHost = "127.0.0.1"; +const daprPortDefault = "3500"; + +async function start() { + const client = new DaprClient({ daprHost, daprPort }); + + const storeName = "redislock"; + const resourceId = "resourceId"; + const lockOwner = "owner1"; + let expiryInSeconds = 1000; + + console.log(`Acquiring lock on ${storeName}, ${resourceId} as owner: ${lockOwner}`); + const lockResponse = await client.lock.lock(storeName, resourceId, lockOwner, expiryInSeconds); + console.log(lockResponse); + + console.log(`Unlocking on ${storeName}, ${resourceId} as owner: ${lockOwner}`); + const unlockResponse = await client.lock.unlock(storeName, resourceId, lockOwner); + console.log("Unlock API response: " + getResponseStatus(unlockResponse.status)); +} + +function getResponseStatus(status: LockStatus) { + switch (status) { + case LockStatus.Success: + return "Success"; + case LockStatus.LockDoesNotExist: + return "LockDoesNotExist"; + case LockStatus.LockBelongsToOthers: + return "LockBelongsToOthers"; + default: + return "InternalError"; + } +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); +``` + +> For a full guide on distributed locks visit [How-To: Use Distributed Locks]({{% ref howto-use-distributed-lock.md %}}). + +### Workflow API + +#### Workflow management + +```typescript +import { DaprClient } from "@dapr/dapr"; + +async function start() { + const client = new DaprClient(); + + // Start a new workflow instance + const instanceId = await client.workflow.start("OrderProcessingWorkflow", { + Name: "Paperclips", + TotalCost: 99.95, + Quantity: 4, + }); + console.log(`Started workflow instance ${instanceId}`); + + // Get a workflow instance + const workflow = await client.workflow.get(instanceId); + console.log( + `Workflow ${workflow.workflowName}, created at ${workflow.createdAt.toUTCString()}, has status ${ + workflow.runtimeStatus + }`, + ); + console.log(`Additional properties: ${JSON.stringify(workflow.properties)}`); + + // Pause a workflow instance + await client.workflow.pause(instanceId); + console.log(`Paused workflow instance ${instanceId}`); + + // Resume a workflow instance + await client.workflow.resume(instanceId); + console.log(`Resumed workflow instance ${instanceId}`); + + // Terminate a workflow instance + await client.workflow.terminate(instanceId); + console.log(`Terminated workflow instance ${instanceId}`); + + // Purge a workflow instance + await client.workflow.purge(instanceId); + console.log(`Purged workflow instance ${instanceId}`); +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); +``` + +## Related links + +- [JavaScript SDK examples](https://github.com/dapr/js-sdk/tree/master/examples) diff --git a/sdkdocs/js/content/en/js-sdk-docs/js-examples/_index.md b/sdkdocs/js/content/en/js-sdk-docs/js-examples/_index.md new file mode 100644 index 00000000000..4ac669379e7 --- /dev/null +++ b/sdkdocs/js/content/en/js-sdk-docs/js-examples/_index.md @@ -0,0 +1,30 @@ +--- +type: docs +title: "JavaScript Examples" +linkTitle: "Examples" +weight: 5000 +description: Get started with the Dapr Javascript SDK through some of our examples! +--- + +## Quickstarts + +- [State Management](https://github.com/dapr/quickstarts/tree/master/state_management/javascript/sdk): Learn the concept of state management with Dapr +- [Pub Sub](https://github.com/dapr/quickstarts/tree/master/pub_sub/javascript/sdk): Create your own Publish / Subscribe system +- [Secrets Management](https://github.com/dapr/quickstarts/tree/master/secrets_management/javascript/sdk) +- [Service Invocation](https://github.com/dapr/quickstarts/tree/master/service_invocation/javascript/http) + +## Articles + +> Want your article added? [Let us know!](https://github.com/dapr/js-sdk/discussions/categories/articles) so we can add it below + +- [xaviergeerinck.com - Create an Azure IoT Hub Stream Processor with Dapr](https://xaviergeerinck.com/2022/05/19/create-an-azure-iot-hub-stream-processor-with-dapr/) + +- [xaviergeerinck.com - Integrate Dapr with Nest.JS and the Dapr JS SDK](https://xaviergeerinck.com/2022/03/29/integrate-dapr-with-nest-js-and-the-dapr-js-sdk/) + +- [xaviergeerinck.com - Parking Garage Sensor implementation using Dapr Actors](https://xaviergeerinck.com/2021/10/09/parking-garage-sensor-implementation-using-dapr-actors/) + +- [xaviergeerinck.com - Running Dapr on Azure IoT Edge](https://xaviergeerinck.com/2021/04/23/running-dapr-on-azure-iot-edge/) + +- [xaviergeerinck.com - Tutorial - Creating an Email Microservice with Typescript and Dapr](https://xaviergeerinck.com/2021/03/25/tutorial---creating-an-email-microservice-with-typescript-and-dapr/) + +- [xaviergeerinck.com - Dapr - Creating a User Login/Register Microservice](https://xaviergeerinck.com/2020/04/10/dapr---creating-a-user-login-register-microservice/) diff --git a/sdkdocs/js/content/en/js-sdk-docs/js-logger/_index.md b/sdkdocs/js/content/en/js-sdk-docs/js-logger/_index.md new file mode 100644 index 00000000000..4ad22272371 --- /dev/null +++ b/sdkdocs/js/content/en/js-sdk-docs/js-logger/_index.md @@ -0,0 +1,106 @@ +--- +type: docs +title: "Logging in JavaScript SDK" +linkTitle: "Logging" +weight: 4000 +description: Configuring logging in JavaScript SDK +--- + +## Introduction + +The JavaScript SDK comes with a out-of-box `Console` based logger. The SDK emits various internal logs to help users understand the chain of events and troubleshoot problems. A consumer of this SDK can customize the verbosity of the log, as well as provide their own implementation for the logger. + +## Configure log level + +There are five levels of logging in **descending order of importance** - `error`, `warn`, `info`, `verbose`, and `debug`. Setting the log to a level means that the logger will emit all the logs that are at least as important as the mentioned level. For example, setting to `verbose` log means that the SDK will not emit `debug` level logs. The default log level is `info`. + +### Dapr Client + +```js +import { CommunicationProtocolEnum, DaprClient, LogLevel } from "@dapr/dapr"; + +// create a client instance with log level set to verbose. +const client = new DaprClient({ + daprHost, + daprPort, + communicationProtocol: CommunicationProtocolEnum.HTTP, + logger: { level: LogLevel.Verbose }, +}); +``` + +> For more details on how to use the Client, see [JavaScript Client]({{% ref js-client %}}). + +### DaprServer + +```ts +import { CommunicationProtocolEnum, DaprServer, LogLevel } from "@dapr/dapr"; + +// create a server instance with log level set to error. +const server = new DaprServer({ + serverHost, + serverPort, + clientOptions: { + daprHost, + daprPort, + logger: { level: LogLevel.Error }, + }, +}); +``` + +> For more details on how to use the Server, see [JavaScript Server]({{% ref js-server %}}). + +## Custom LoggerService + +The JavaScript SDK uses the in-built `Console` for logging. To use a custom logger like Winston or Pino, you can implement the `LoggerService` interface. + +### Winston based logging: + +Create a new implementation of `LoggerService`. + +```ts +import { LoggerService } from "@dapr/dapr"; +import * as winston from "winston"; + +export class WinstonLoggerService implements LoggerService { + private logger; + + constructor() { + this.logger = winston.createLogger({ + transports: [new winston.transports.Console(), new winston.transports.File({ filename: "combined.log" })], + }); + } + + error(message: any, ...optionalParams: any[]): void { + this.logger.error(message, ...optionalParams); + } + warn(message: any, ...optionalParams: any[]): void { + this.logger.warn(message, ...optionalParams); + } + info(message: any, ...optionalParams: any[]): void { + this.logger.info(message, ...optionalParams); + } + verbose(message: any, ...optionalParams: any[]): void { + this.logger.verbose(message, ...optionalParams); + } + debug(message: any, ...optionalParams: any[]): void { + this.logger.debug(message, ...optionalParams); + } +} +``` + +Pass the new implementation to the SDK. + +```ts +import { CommunicationProtocolEnum, DaprClient, LogLevel } from "@dapr/dapr"; +import { WinstonLoggerService } from "./WinstonLoggerService"; + +const winstonLoggerService = new WinstonLoggerService(); + +// create a client instance with log level set to verbose and logger service as winston. +const client = new DaprClient({ + daprHost, + daprPort, + communicationProtocol: CommunicationProtocolEnum.HTTP, + logger: { level: LogLevel.Verbose, service: winstonLoggerService }, +}); +``` diff --git a/sdkdocs/js/content/en/js-sdk-docs/js-server/_index.md b/sdkdocs/js/content/en/js-sdk-docs/js-server/_index.md new file mode 100644 index 00000000000..462ff4b81bc --- /dev/null +++ b/sdkdocs/js/content/en/js-sdk-docs/js-server/_index.md @@ -0,0 +1,665 @@ +--- +type: docs +title: "JavaScript Server SDK" +linkTitle: "Server" +weight: 2000 +description: JavaScript Server SDK for developing Dapr applications +--- + +## Introduction + +The Dapr Server will allow you to receive communication from the Dapr Sidecar and get access to its server facing features such as: Subscribing to Events, Receiving Input Bindings, and much more. + +## Pre-requisites + +- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed +- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) +- [Latest LTS version of Node or greater](https://nodejs.org/en/) + +## Installing and importing Dapr's JS SDK + +1. Install the SDK with `npm`: + +```bash +npm i @dapr/dapr --save +``` + +2. Import the libraries: + +```typescript +import { DaprServer, CommunicationProtocolEnum } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; // Dapr Sidecar Host +const daprPort = "3500"; // Dapr Sidecar Port of this Example Server +const serverHost = "127.0.0.1"; // App Host of this Example Server +const serverPort = "50051"; // App Port of this Example Server + +// HTTP Example +const server = new DaprServer({ + serverHost, + serverPort, + communicationProtocol: CommunicationProtocolEnum.HTTP, // DaprClient to use same communication protocol as DaprServer, in case DaprClient protocol not mentioned explicitly + clientOptions: { + daprHost, + daprPort, + }, +}); + +// GRPC Example +const server = new DaprServer({ + serverHost, + serverPort, + communicationProtocol: CommunicationProtocolEnum.GRPC, + clientOptions: { + daprHost, + daprPort, + }, +}); +``` + +## Running + +To run the examples, you can use two different protocols to interact with the Dapr sidecar: HTTP (default) or gRPC. + +### Using HTTP (built-in express webserver) + +```typescript +import { DaprServer } from "@dapr/dapr"; + +const server = new DaprServer({ + serverHost: appHost, + serverPort: appPort, + clientOptions: { + daprHost, + daprPort, + }, +}); +// initialize subscribtions, ... before server start +// the dapr sidecar relies on these +await server.start(); +``` + +```bash +# Using dapr run +dapr run --app-id example-sdk --app-port 50051 --app-protocol http -- npm run start + +# or, using npm script +npm run start:dapr-http +``` + +> ℹ️ **Note:** The `app-port` is required here, as this is where our server will need to bind to. Dapr will check for the application to bind to this port, before finishing start-up. + +### Using HTTP (bring your own express webserver) + +Instead of using the built-in web server for Dapr sidecar to application communication, you can also bring your own instance. This is helpful in scenarios like when you are building a REST API back-end and want to integrate Dapr directly in it. + +Note, this is currently available for [`express`](https://www.npmjs.com/package/express) only. + +> 💡 Note: when using a custom web-server, the SDK will configure server properties like max body size, and add new routes to it. The routes are unique on their own to avoid any collisions with your application, but it's not guaranteed to not collide. + +```typescript +import { DaprServer, CommunicationProtocolEnum } from "@dapr/dapr"; +import express from "express"; + +const myApp = express(); + +myApp.get("/my-custom-endpoint", (req, res) => { + res.send({ msg: "My own express app!" }); +}); + +const daprServer = new DaprServer({ + serverHost: "127.0.0.1", // App Host + serverPort: "50002", // App Port + serverHttp: myApp, + clientOptions: { + daprHost + daprPort + } + }); + +// Initialize subscriptions before the server starts, the Dapr sidecar uses it. +// This will also initialize the app server itself (removing the need for `app.listen` to be called). +await daprServer.start(); +``` + +After configuring the above, you can call your custom endpoint as you normally would: + +```typescript +const res = await fetch(`http://127.0.0.1:50002/my-custom-endpoint`); +const json = await res.json(); +``` + +### Using gRPC + +Since HTTP is the default, you will have to adapt the communication protocol to use gRPC. You can do this by passing an extra argument to the client or server constructor. + +```typescript +import { DaprServer, CommunicationProtocol } from "@dapr/dapr"; + +const server = new DaprServer({ + serverHost: appHost, + serverPort: appPort, + communicationProtocol: CommunicationProtocolEnum.GRPC, + clientOptions: { + daprHost, + daprPort, + }, +}); +// initialize subscribtions, ... before server start +// the dapr sidecar relies on these +await server.start(); +``` + +```bash +# Using dapr run +dapr run --app-id example-sdk --app-port 50051 --app-protocol grpc -- npm run start + +# or, using npm script +npm run start:dapr-grpc +``` + +> ℹ️ **Note:** The `app-port` is required here, as this is where our server will need to bind to. Dapr will check for the application to bind to this port, before finishing start-up. + +## Building blocks + +The JavaScript Server SDK allows you to interface with all of the [Dapr building blocks]({{% ref building-blocks %}}) focusing on Sidecar to App features. + +### Invocation API + +#### Listen to an Invocation + +```typescript +import { DaprServer, DaprInvokerCallbackContent } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; // Dapr Sidecar Host +const daprPort = "3500"; // Dapr Sidecar Port of this Example Server +const serverHost = "127.0.0.1"; // App Host of this Example Server +const serverPort = "50051"; // App Port of this Example Server " + +async function start() { + const server = new DaprServer({ + serverHost, + serverPort, + clientOptions: { + daprHost, + daprPort, + }, + }); + + const callbackFunction = (data: DaprInvokerCallbackContent) => { + console.log("Received body: ", data.body); + console.log("Received metadata: ", data.metadata); + console.log("Received query: ", data.query); + console.log("Received headers: ", data.headers); // only available in HTTP + }; + + await server.invoker.listen("hello-world", callbackFunction, { method: HttpMethod.GET }); + + // You can now invoke the service with your app id and method "hello-world" + + await server.start(); +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); +``` + +> For a full guide on service invocation visit [How-To: Invoke a service]({{% ref howto-invoke-discover-services.md %}}). + +### PubSub API + +#### Subscribe to messages + +Subscribing to messages can be done in several ways to offer flexibility of receiving messages on your topics: + +- Direct subscription through the `subscribe` method +- Direct susbcription with options through the `subscribeWithOptions` method +- Subscription afterwards through the `susbcribeOnEvent` method + +Each time an event arrives, we pass its body as `data` and the headers as `headers`, which can contain properties of the event publisher (e.g., a device ID from IoT Hub) + +> Dapr requires subscriptions to be set up on startup, but in the JS SDK we allow event handlers to be added afterwards as well, providing you the flexibility of programming. + +An example is provided below + +```typescript +import { DaprServer } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; // Dapr Sidecar Host +const daprPort = "3500"; // Dapr Sidecar Port of this Example Server +const serverHost = "127.0.0.1"; // App Host of this Example Server +const serverPort = "50051"; // App Port of this Example Server " + +async function start() { + const server = new DaprServer({ + serverHost, + serverPort, + clientOptions: { + daprHost, + daprPort, + }, + }); + + const pubSubName = "my-pubsub-name"; + const topic = "topic-a"; + + // Configure Subscriber for a Topic + // Method 1: Direct subscription through the `subscribe` method + await server.pubsub.subscribe(pubSubName, topic, async (data: any, headers: object) => + console.log(`Received Data: ${JSON.stringify(data)} with headers: ${JSON.stringify(headers)}`), + ); + + // Method 2: Direct susbcription with options through the `subscribeWithOptions` method + await server.pubsub.subscribeWithOptions(pubSubName, topic, { + callback: async (data: any, headers: object) => + console.log(`Received Data: ${JSON.stringify(data)} with headers: ${JSON.stringify(headers)}`), + }); + + // Method 3: Subscription afterwards through the `susbcribeOnEvent` method + // Note: we use default, since if no route was passed (empty options) we utilize "default" as the route name + await server.pubsub.subscribeWithOptions("pubsub-redis", "topic-options-1", {}); + server.pubsub.subscribeToRoute("pubsub-redis", "topic-options-1", "default", async (data: any, headers: object) => { + console.log(`Received Data: ${JSON.stringify(data)} with headers: ${JSON.stringify(headers)}`); + }); + + // Start the server + await server.start(); +} +``` + +> For a full list of state operations visit [How-To: Publish & subscribe]({{% ref howto-publish-subscribe.md %}}). + +#### Subscribe with SUCCESS/RETRY/DROP status + +Dapr supports [status codes for retry logic](https://docs.dapr.io/reference/api/pubsub_api/#expected-http-response) to specify what should happen after a message gets processed. + +> ⚠️ The JS SDK allows multiple callbacks on the same topic, we handle priority of status on `RETRY` > `DROP` > `SUCCESS` and default to `SUCCESS` + +> ⚠️ Make sure to [configure resiliency](https://docs.dapr.io/operations/resiliency/resiliency-overview/) in your application to handle `RETRY` messages + +In the JS SDK we support these messages through the `DaprPubSubStatusEnum` enum. To ensure Dapr will retry we configure a Resiliency policy as well. + +**components/resiliency.yaml** + +```yaml +apiVersion: dapr.io/v1alpha1 +kind: Resiliency +metadata: + name: myresiliency +spec: + policies: + retries: + # Global Retry Policy for Inbound Component operations + DefaultComponentInboundRetryPolicy: + policy: constant + duration: 500ms + maxRetries: 10 + targets: + components: + messagebus: + inbound: + retry: DefaultComponentInboundRetryPolicy +``` + +**src/index.ts** + +```typescript +import { DaprServer, DaprPubSubStatusEnum } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; // Dapr Sidecar Host +const daprPort = "3500"; // Dapr Sidecar Port of this Example Server +const serverHost = "127.0.0.1"; // App Host of this Example Server +const serverPort = "50051"; // App Port of this Example Server " + +async function start() { + const server = new DaprServer({ + serverHost, + serverPort, + clientOptions: { + daprHost, + daprPort, + }, + }); + + const pubSubName = "my-pubsub-name"; + const topic = "topic-a"; + + // Process a message successfully + await server.pubsub.subscribe(pubSubName, topic, async (data: any, headers: object) => { + return DaprPubSubStatusEnum.SUCCESS; + }); + + // Retry a message + // Note: this example will keep on retrying to deliver the message + // Note 2: each component can have their own retry configuration + // e.g., https://docs.dapr.io/reference/components-reference/supported-pubsub/setup-redis-pubsub/ + await server.pubsub.subscribe(pubSubName, topic, async (data: any, headers: object) => { + return DaprPubSubStatusEnum.RETRY; + }); + + // Drop a message + await server.pubsub.subscribe(pubSubName, topic, async (data: any, headers: object) => { + return DaprPubSubStatusEnum.DROP; + }); + + // Start the server + await server.start(); +} +``` + +#### Subscribe to messages rule based + +Dapr [supports routing messages](https://docs.dapr.io/developing-applications/building-blocks/pubsub/howto-route-messages/) to different handlers (routes) based on rules. + +> E.g., you are writing an application that needs to handle messages depending on their "type" with Dapr, you can send them to different routes `handlerType1` and `handlerType2` with the default route being `handlerDefault` + +```typescript +import { DaprServer } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; // Dapr Sidecar Host +const daprPort = "3500"; // Dapr Sidecar Port of this Example Server +const serverHost = "127.0.0.1"; // App Host of this Example Server +const serverPort = "50051"; // App Port of this Example Server " + +async function start() { + const server = new DaprServer({ + serverHost, + serverPort, + clientOptions: { + daprHost, + daprPort, + }, + }); + + const pubSubName = "my-pubsub-name"; + const topic = "topic-a"; + + // Configure Subscriber for a Topic with rule set + // Note: the default route and match patterns are optional + await server.pubsub.subscribe("pubsub-redis", "topic-1", { + default: "/default", + rules: [ + { + match: `event.type == "my-type-1"`, + path: "/type-1", + }, + { + match: `event.type == "my-type-2"`, + path: "/type-2", + }, + ], + }); + + // Add handlers for each route + server.pubsub.subscribeToRoute("pubsub-redis", "topic-1", "default", async (data) => { + console.log(`Handling Default`); + }); + server.pubsub.subscribeToRoute("pubsub-redis", "topic-1", "type-1", async (data) => { + console.log(`Handling Type 1`); + }); + server.pubsub.subscribeToRoute("pubsub-redis", "topic-1", "type-2", async (data) => { + console.log(`Handling Type 2`); + }); + + // Start the server + await server.start(); +} +``` + +#### Susbcribe with Wildcards + +The popular wildcards `*` and `+` are supported (make sure to validate if the [pubsub component supports it](https://docs.dapr.io/reference/components-reference/supported-pubsub/)) and can be subscribed to as follows: + +```typescript +import { DaprServer } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; // Dapr Sidecar Host +const daprPort = "3500"; // Dapr Sidecar Port of this Example Server +const serverHost = "127.0.0.1"; // App Host of this Example Server +const serverPort = "50051"; // App Port of this Example Server " + +async function start() { + const server = new DaprServer({ + serverHost, + serverPort, + clientOptions: { + daprHost, + daprPort, + }, + }); + + const pubSubName = "my-pubsub-name"; + + // * Wildcard + await server.pubsub.subscribe(pubSubName, "/events/*", async (data: any, headers: object) => + console.log(`Received Data: ${JSON.stringify(data)}`), + ); + + // + Wildcard + await server.pubsub.subscribe(pubSubName, "/events/+/temperature", async (data: any, headers: object) => + console.log(`Received Data: ${JSON.stringify(data)}`), + ); + + // Start the server + await server.start(); +} +``` + +#### Bulk Subscribe to messages + +Bulk Subscription is supported and is available through following API: + +- Bulk subscription through the `subscribeBulk` method: `maxMessagesCount` and `maxAwaitDurationMs` are optional; and if not provided, default values for related components will be used. + +While listening for messages, the application receives messages from Dapr in bulk. However, like regular subscribe, the callback function receives a single message at a time, and the user can choose to return a `DaprPubSubStatusEnum` value to acknowledge successfully, retry, or drop the message. The default behavior is to return a success response. + +Please refer [this document](https://v1-10.docs.dapr.io/developing-applications/building-blocks/pubsub/pubsub-bulk/) for more details. + +```typescript +import { DaprServer } from "@dapr/dapr"; + +const pubSubName = "orderPubSub"; +const topic = "topicbulk"; + +const daprHost = process.env.DAPR_HOST || "127.0.0.1"; +const daprHttpPort = process.env.DAPR_HTTP_PORT || "3502"; +const serverHost = process.env.SERVER_HOST || "127.0.0.1"; +const serverPort = process.env.APP_PORT || 5001; + +async function start() { + const server = new DaprServer({ + serverHost, + serverPort, + clientOptions: { + daprHost, + daprPort: daprHttpPort, + }, + }); + + // Publish multiple messages to a topic with default config. + await client.pubsub.subscribeBulk(pubSubName, topic, (data) => + console.log("Subscriber received: " + JSON.stringify(data)), + ); + + // Publish multiple messages to a topic with specific maxMessagesCount and maxAwaitDurationMs. + await client.pubsub.subscribeBulk( + pubSubName, + topic, + (data) => { + console.log("Subscriber received: " + JSON.stringify(data)); + return DaprPubSubStatusEnum.SUCCESS; // If App doesn't return anything, the default is SUCCESS. App can also return RETRY or DROP based on the incoming message. + }, + { + maxMessagesCount: 100, + maxAwaitDurationMs: 40, + }, + ); +} +``` + +#### Dead Letter Topics + +Dapr supports [dead letter topic](https://docs.dapr.io/developing-applications/building-blocks/pubsub/pubsub-deadletter/). This means that when a message fails to be processed, it gets sent to a dead letter queue. E.g., when a message fails to be handled on `/my-queue` it will be sent to `/my-queue-failed`. +E.g., when a message fails to be handled on `/my-queue` it will be sent to `/my-queue-failed`. + +You can use the following options with `subscribeWithOptions` method: + +- `deadletterTopic`: Specify a deadletter topic name (note: if none is provided we create one named `deadletter`) +- `deadletterCallback`: The method to trigger as handler for our deadletter + +Implementing Deadletter support in the JS SDK can be done by either + +- Passing the `deadletterCallback` as an option +- By subscribing to route manually with `subscribeToRoute` + +An example is provided below + +```typescript +import { DaprServer } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; // Dapr Sidecar Host +const daprPort = "3500"; // Dapr Sidecar Port of this Example Server +const serverHost = "127.0.0.1"; // App Host of this Example Server +const serverPort = "50051"; // App Port of this Example Server " + +async function start() { + const server = new DaprServer({ + serverHost, + serverPort, + clientOptions: { + daprHost, + daprPort, + }, + }); + + const pubSubName = "my-pubsub-name"; + + // Method 1 (direct subscribing through subscribeWithOptions) + await server.pubsub.subscribeWithOptions("pubsub-redis", "topic-options-5", { + callback: async (data: any) => { + throw new Error("Triggering Deadletter"); + }, + deadLetterCallback: async (data: any) => { + console.log("Handling Deadletter message"); + }, + }); + + // Method 2 (subscribe afterwards) + await server.pubsub.subscribeWithOptions("pubsub-redis", "topic-options-1", { + deadletterTopic: "my-deadletter-topic", + }); + server.pubsub.subscribeToRoute("pubsub-redis", "topic-options-1", "default", async () => { + throw new Error("Triggering Deadletter"); + }); + server.pubsub.subscribeToRoute("pubsub-redis", "topic-options-1", "my-deadletter-topic", async () => { + console.log("Handling Deadletter message"); + }); + + // Start server + await server.start(); +} +``` + +### Bindings API + +#### Receive an Input Binding + +```typescript +import { DaprServer } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; +const daprPort = "3500"; +const serverHost = "127.0.0.1"; +const serverPort = "5051"; + +async function start() { + const server = new DaprServer({ + serverHost, + serverPort, + clientOptions: { + daprHost, + daprPort, + }, + }); + + const bindingName = "my-binding-name"; + + const response = await server.binding.receive(bindingName, async (data: any) => + console.log(`Got Data: ${JSON.stringify(data)}`), + ); + + await server.start(); +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); +``` + +> For a full guide on output bindings visit [How-To: Use bindings]({{% ref howto-bindings.md %}}). + +### Configuration API + +> 💡 The configuration API is currently only available through gRPC + +#### Getting a configuration value + +```typescript +import { DaprServer } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; +const daprPort = "3500"; +const serverHost = "127.0.0.1"; +const serverPort = "5051"; + +async function start() { + const client = new DaprClient({ + daprHost, + daprPort, + communicationProtocol: CommunicationProtocolEnum.GRPC, + }); + const config = await client.configuration.get("config-redis", ["myconfigkey1", "myconfigkey2"]); +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); +``` + +#### Subscribing to Key Changes + +```typescript +import { DaprServer } from "@dapr/dapr"; + +const daprHost = "127.0.0.1"; +const daprPort = "3500"; +const serverHost = "127.0.0.1"; +const serverPort = "5051"; + +async function start() { + const client = new DaprClient({ + daprHost, + daprPort, + communicationProtocol: CommunicationProtocolEnum.GRPC, + }); + const stream = await client.configuration.subscribeWithKeys("config-redis", ["myconfigkey1", "myconfigkey2"], () => { + // Received a key update + }); + + // When you are ready to stop listening, call the following + await stream.close(); +} + +start().catch((e) => { + console.error(e); + process.exit(1); +}); +``` + +## Related links + +- [JavaScript SDK examples](https://github.com/dapr/js-sdk/tree/main/examples) diff --git a/sdkdocs/js/content/en/js-sdk-docs/js-workflow/_index.md b/sdkdocs/js/content/en/js-sdk-docs/js-workflow/_index.md new file mode 100644 index 00000000000..4cd73009820 --- /dev/null +++ b/sdkdocs/js/content/en/js-sdk-docs/js-workflow/_index.md @@ -0,0 +1,157 @@ +--- +type: docs +title: "How to: Author and manage Dapr Workflow in the JavaScript SDK" +linkTitle: "How to: Author and manage workflows" +weight: 20000 +description: How to get up and running with workflows using the Dapr JavaScript SDK +--- + +Let’s create a Dapr workflow and invoke it using the console. With the [provided workflow example](https://github.com/dapr/js-sdk/tree/main/examples/workflow), you will: + +- Execute the workflow instance using the [JavaScript workflow worker](https://github.com/dapr/js-sdk/tree/main/src/workflow/runtime/WorkflowRuntime.ts) +- Utilize the JavaScript workflow client and API calls to [start and terminate workflow instances](https://github.com/dapr/js-sdk/tree/main/src/workflow/client/DaprWorkflowClient.ts) + +This example uses the default configuration from `dapr init` in [self-hosted mode](https://github.com/dapr/cli#install-dapr-on-your-local-machine-self-hosted). + +## Prerequisites + +- [Dapr CLI and initialized environment](https://docs.dapr.io/getting-started). +- [Node.js and npm](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm), +- [Docker Desktop](https://www.docker.com/products/docker-desktop) +- Verify you're using the latest proto bindings + +## Set up the environment + +Clone the JavaScript SDK repo and navigate into it. + +```bash +git clone https://github.com/dapr/js-sdk +cd js-sdk +``` + +From the JavaScript SDK root directory, navigate to the Dapr Workflow example. + +```bash +cd examples/workflow/authoring +``` + +Run the following command to install the requirements for running this workflow sample with the Dapr JavaScript SDK. + +```bash +npm install +``` + +## Run the `activity-sequence.ts` + +The `activity-sequence` file registers a workflow and an activity with the Dapr Workflow runtime. The workflow is a sequence of activities that are executed in order. We use DaprWorkflowClient to schedule a new workflow instance and wait for it to complete. + +```typescript +const daprHost = "localhost"; +const daprPort = "50001"; +const workflowClient = new DaprWorkflowClient({ + daprHost, + daprPort, +}); +const workflowRuntime = new WorkflowRuntime({ + daprHost, + daprPort, +}); + +const hello = async (_: WorkflowActivityContext, name: string) => { + return `Hello ${name}!`; +}; + +const sequence: TWorkflow = async function* (ctx: WorkflowContext): any { + const cities: string[] = []; + + const result1 = yield ctx.callActivity(hello, "Tokyo"); + cities.push(result1); + const result2 = yield ctx.callActivity(hello, "Seattle"); + cities.push(result2); + const result3 = yield ctx.callActivity(hello, "London"); + cities.push(result3); + + return cities; +}; + +workflowRuntime.registerWorkflow(sequence).registerActivity(hello); + +// Wrap the worker startup in a try-catch block to handle any errors during startup +try { + await workflowRuntime.start(); + console.log("Workflow runtime started successfully"); +} catch (error) { + console.error("Error starting workflow runtime:", error); +} + +// Schedule a new orchestration +try { + const id = await workflowClient.scheduleNewWorkflow(sequence); + console.log(`Orchestration scheduled with ID: ${id}`); + + // Wait for orchestration completion + const state = await workflowClient.waitForWorkflowCompletion(id, undefined, 30); + + console.log(`Orchestration completed! Result: ${state?.serializedOutput}`); +} catch (error) { + console.error("Error scheduling or waiting for orchestration:", error); +} +``` + +In the code above: + +- `workflowRuntime.registerWorkflow(sequence)` registers `sequence` as a workflow in the Dapr Workflow runtime. +- `await workflowRuntime.start();` builds and starts the engine within the Dapr Workflow runtime. +- `await workflowClient.scheduleNewWorkflow(sequence)` schedules a new workflow instance with the Dapr Workflow runtime. +- `await workflowClient.waitForWorkflowCompletion(id, undefined, 30)` waits for the workflow instance to complete. + +In the terminal, execute the following command to kick off the `activity-sequence.ts`: + +```sh +npm run start:dapr:activity-sequence +``` + +**Expected output** + +``` +You're up and running! Both Dapr and your app logs will appear here. + +... + +== APP == Orchestration scheduled with ID: dc040bea-6436-4051-9166-c9294f9d2201 +== APP == Waiting 30 seconds for instance dc040bea-6436-4051-9166-c9294f9d2201 to complete... +== APP == Received "Orchestrator Request" work item with instance id 'dc040bea-6436-4051-9166-c9294f9d2201' +== APP == dc040bea-6436-4051-9166-c9294f9d2201: Rebuilding local state with 0 history event... +== APP == dc040bea-6436-4051-9166-c9294f9d2201: Processing 2 new history event(s): [ORCHESTRATORSTARTED=1, EXECUTIONSTARTED=1] +== APP == dc040bea-6436-4051-9166-c9294f9d2201: Waiting for 1 task(s) and 0 event(s) to complete... +== APP == dc040bea-6436-4051-9166-c9294f9d2201: Returning 1 action(s) +== APP == Received "Activity Request" work item +== APP == Activity hello completed with output "Hello Tokyo!" (14 chars) +== APP == Received "Orchestrator Request" work item with instance id 'dc040bea-6436-4051-9166-c9294f9d2201' +== APP == dc040bea-6436-4051-9166-c9294f9d2201: Rebuilding local state with 3 history event... +== APP == dc040bea-6436-4051-9166-c9294f9d2201: Processing 2 new history event(s): [ORCHESTRATORSTARTED=1, TASKCOMPLETED=1] +== APP == dc040bea-6436-4051-9166-c9294f9d2201: Waiting for 1 task(s) and 0 event(s) to complete... +== APP == dc040bea-6436-4051-9166-c9294f9d2201: Returning 1 action(s) +== APP == Received "Activity Request" work item +== APP == Activity hello completed with output "Hello Seattle!" (16 chars) +== APP == Received "Orchestrator Request" work item with instance id 'dc040bea-6436-4051-9166-c9294f9d2201' +== APP == dc040bea-6436-4051-9166-c9294f9d2201: Rebuilding local state with 6 history event... +== APP == dc040bea-6436-4051-9166-c9294f9d2201: Processing 2 new history event(s): [ORCHESTRATORSTARTED=1, TASKCOMPLETED=1] +== APP == dc040bea-6436-4051-9166-c9294f9d2201: Waiting for 1 task(s) and 0 event(s) to complete... +== APP == dc040bea-6436-4051-9166-c9294f9d2201: Returning 1 action(s) +== APP == Received "Activity Request" work item +== APP == Activity hello completed with output "Hello London!" (15 chars) +== APP == Received "Orchestrator Request" work item with instance id 'dc040bea-6436-4051-9166-c9294f9d2201' +== APP == dc040bea-6436-4051-9166-c9294f9d2201: Rebuilding local state with 9 history event... +== APP == dc040bea-6436-4051-9166-c9294f9d2201: Processing 2 new history event(s): [ORCHESTRATORSTARTED=1, TASKCOMPLETED=1] +== APP == dc040bea-6436-4051-9166-c9294f9d2201: Orchestration completed with status COMPLETED +== APP == dc040bea-6436-4051-9166-c9294f9d2201: Returning 1 action(s) +INFO[0006] dc040bea-6436-4051-9166-c9294f9d2201: 'sequence' completed with a COMPLETED status. app_id=activity-sequence-workflow instance=kaibocai-devbox scope=wfengine.backend type=log ver=1.12.3 +== APP == Instance dc040bea-6436-4051-9166-c9294f9d2201 completed +== APP == Orchestration completed! Result: ["Hello Tokyo!","Hello Seattle!","Hello London!"] +``` + +## Next steps + +- [Learn more about Dapr workflow]({{% ref workflow-overview.md %}}) +- [Workflow API reference]({{% ref workflow_api.md %}}) diff --git a/sdkdocs/php b/sdkdocs/php deleted file mode 160000 index 82c7283a73c..00000000000 --- a/sdkdocs/php +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 82c7283a73cf96e27b93383531094cf82cd2c6c4 diff --git a/sdkdocs/php/content/en/php-sdk-docs/_index.md b/sdkdocs/php/content/en/php-sdk-docs/_index.md new file mode 100644 index 00000000000..0d8044c6b7e --- /dev/null +++ b/sdkdocs/php/content/en/php-sdk-docs/_index.md @@ -0,0 +1,118 @@ +--- +type: docs +title: "Dapr PHP SDK" +linkTitle: "PHP" +weight: 1000 +description: PHP SDK packages for developing Dapr applications +no_list: true +cascade: + github_repo: https://github.com/dapr/php-sdk + github_subdir: daprdocs/content/en/php-sdk-docs + path_base_for_github_subdir: content/en/developing-applications/sdks/php/ + github_branch: main +--- + +Dapr offers an SDK to help with the development of PHP applications. Using it, you can create PHP clients, servers, and virtual actors with Dapr. + +## Setting up + +### Prerequisites + +- [Composer](https://getcomposer.org/) +- [PHP 8](https://www.php.net/) + +### Optional Prerequisites + +- [Docker](https://www.docker.com/) +- [xdebug](http://xdebug.org/) -- for debugging + +## Initialize your project + +In a directory where you want to create your service, run `composer init` and answer the questions. +Install with `composer require dapr/php-sdk` and any other dependencies you may wish to use. + +## Configure your service + +Create a config.php, copying the contents below: + +```php + LogLevel::WARNING, + + // Generate a new proxy on each request - recommended for development + 'dapr.actors.proxy.generation' => ProxyFactory::GENERATED, + + // put any subscriptions here + 'dapr.subscriptions' => [], + + // if this service will be hosting any actors, add them here + 'dapr.actors' => [], + + // if this service will be hosting any actors, configure how long until dapr should consider an actor idle + 'dapr.actors.idle_timeout' => null, + + // if this service will be hosting any actors, configure how often dapr will check for idle actors + 'dapr.actors.scan_interval' => null, + + // if this service will be hosting any actors, configure how long dapr will wait for an actor to finish during drains + 'dapr.actors.drain_timeout' => null, + + // if this service will be hosting any actors, configure if dapr should wait for an actor to finish + 'dapr.actors.drain_enabled' => null, + + // you shouldn't have to change this, but the setting is here if you need to + 'dapr.port' => env('DAPR_HTTP_PORT', '3500'), + + // add any custom serialization routines here + 'dapr.serializers.custom' => [], + + // add any custom deserialization routines here + 'dapr.deserializers.custom' => [], + + // the following has no effect, as it is the default middlewares and processed in order specified + 'dapr.http.middleware.request' => [get(Tracing::class)], + 'dapr.http.middleware.response' => [get(ApplicationJson::class), get(Tracing::class)], +]; +``` + +## Create your service + +Create `index.php` and put the following contents: + +```php + $builder->addDefinitions(__DIR__ . '/config.php')); +$app->get('/hello/{name}', function(string $name) { + return ['hello' => $name]; +}); +$app->start(); +``` + +## Try it out + +Initialize dapr with `dapr init` and then start the project with `dapr run -a dev -p 3000 -- php -S 0.0.0.0:3000`. + + +You can now open a web browser and point it to [http://localhost:3000/hello/world](http://localhost:3000/hello/world) +replacing `world` with your name, a pet's name, or whatever you want. + + +Congratulations, you've created your first Dapr service! I'm excited to see what you'll do with it! + +## More Information + +- [Packagist](https://packagist.org/packages/dapr/php-sdk) +- [Dapr SDK serialization]({{% ref sdk-serialization.md %}}) diff --git a/sdkdocs/php/content/en/php-sdk-docs/php-actors/_index.md b/sdkdocs/php/content/en/php-sdk-docs/php-actors/_index.md new file mode 100644 index 00000000000..bb6d3cc3ce6 --- /dev/null +++ b/sdkdocs/php/content/en/php-sdk-docs/php-actors/_index.md @@ -0,0 +1,164 @@ +--- +type: docs +title: "Virtual Actors" +linkTitle: "Actors" +weight: 1000 +description: How to build actors +no_list: true +--- + +If you're new to the actor pattern, the best place to learn about the actor pattern is in +the [Actor Overview.]({{% ref actors-overview.md %}}) + +In the PHP SDK, there are two sides to an actor, the Client, and the Actor (aka, the Runtime). As a client of an actor, +you'll interact with a remote actor via the `ActorProxy` class. This class generates a proxy class on-the-fly using one +of several configured strategies. + +When writing an actor, state can be managed for you. You can hook into the actor lifecycle, and define reminders and +timers. This gives you considerable power for handling all types of problems that the actor pattern is suited for. + +## The Actor Proxy + +Whenever you want to communicate with an actor, you'll need to get a proxy object to do so. The proxy is responsible for +serializing your request, deserializing the response, and returning it to you, all while obeying the contract defined by +the specified interface. + +In order to create the proxy, you'll first need an interface to define how and what you send and receive from an actor. +For example, if you want to communicate with a counting actor that solely keeps track of counts, you might define the +interface as follows: + +```php +run(function(\Dapr\Actors\ActorProxy $actorProxy) { + $actor = $actorProxy->get(ICount::class, 'actor-id'); + $actor->increment(10); +}); +``` + +## Writing Actors + +To create an actor, you need to implement the interface you defined earlier and also add the `DaprType` attribute. All +actors *must* implement `IActor`, however there's an `Actor` base class that implements the boilerplate making your +implementation much simpler. + +Here's the counter actor: + +```php +state->count += $amount; + } + + function get_count(): int { + return $this->state->count; + } +} +``` + +The most important bit is the constructor. It takes at least one argument with the name of `id` which is the id of the +actor. Any additional arguments are injected by the DI container, including any `ActorState` you want to use. + +### Actor Lifecycle + +An actor is instantiated via the constructor on every request targeting that actor type. You can use it to calculate +ephemeral state or handle any kind of request-specific startup you require, such as setting up other clients or +connections. + +After the actor is instantiated, the `on_activation()` method may be called. The `on_activation()` method is called any +time the actor "wakes up" or when it is created for the first time. It is not called on every request. + +Next, the actor method is called. This may be from a timer, reminder, or from a client. You may perform any work that +needs to be done and/or throw an exception. + +Finally, the result of the work is returned to the caller. After some time (depending on how you've configured the +service), the actor will be deactivated and `on_deactivation()` will be called. This may not be called if the host dies, +daprd crashes, or some other error occurs which prevents it from being called successfully. + +## Actor State + +Actor state is a "Plain Old PHP Object" (POPO) that extends `ActorState`. The `ActorState` base class provides a couple +of useful methods. Here's an example implementation: + +```php +}} + +{{% tab header="Production" %}} + +If you want to take advantage of pre-compiled dependency injection, you need to use a factory: + +```php + fn() => [Counter::class], +]; +``` + +All that is required to start the app: + +```php + $builder->addDefinitions('config.php')->enableCompilation(__DIR__) +); +$app->start(); +``` + +{{% /tab %}} +{{% tab header="Development" %}} + +```php + [Counter::class] +]; +``` + +All that is required to start the app: + +```php + $builder->addDefinitions('config.php')); +$app->start(); +``` + +{{% /tab %}} +{{< /tabpane >}} diff --git a/sdkdocs/php/content/en/php-sdk-docs/php-actors/php-actor-reference.md b/sdkdocs/php/content/en/php-sdk-docs/php-actors/php-actor-reference.md new file mode 100644 index 00000000000..0253934e3da --- /dev/null +++ b/sdkdocs/php/content/en/php-sdk-docs/php-actors/php-actor-reference.md @@ -0,0 +1,238 @@ +--- +type: docs +title: "Production Reference: Actors" +linkTitle: "Production Reference" +weight: 1000 +description: Running PHP actors in production +no_list: true +--- + +## Proxy modes + +There are four different modes actor proxies are handled. Each mode presents different trade-offs that you'll need to +weigh during development and in production. + +```php +}} +{{% tab header="GENERATED" %}} + +This is the default mode. In this mode, a class is generated and `eval`'d on every request. It's mostly for development +and shouldn't be used in production. + +{{% /tab %}} +{{% tab header="GENERATED_CACHED" %}} + +This is the same as `ProxyModes::GENERATED` except the class is stored in a tmp file so it doesn't need to be +regenerated on every request. It doesn't know when to update the cached class, so using it in development is discouraged +but is offered for when manually generating the files isn't possible. + +{{% /tab %}} +{{% tab header="ONLY_EXISTING" %}} + +In this mode, an exception is thrown if the proxy class doesn't exist. This is useful for when you don't want to +generate code in production. You'll have to make sure the class is generated and pre-/autoloaded. + +### Generating proxies + +You can create a composer script to generate proxies on demand to take advantage of the `ONLY_EXISTING` mode. + +Create a `ProxyCompiler.php` + +```php +run(function(\DI\FactoryInterface $factory) use ($interface) { + return \Dapr\Actors\Generators\FileGenerator::generate($interface, $factory); + }); + $reflection = new ReflectionClass($interface); + $dapr_type = $reflection->getAttributes(\Dapr\Actors\Attributes\DaprType::class)[0]->newInstance()->type; + $filename = 'dapr_proxy_'.$dapr_type.'.php'; + file_put_contents(self::PROXY_LOCATION.$filename, $output); + echo "Compiled: $interface"; + } + } catch (Exception $ex) { + echo "Failed to generate proxy for $interface\n{$ex->getMessage()} on line {$ex->getLine()} in {$ex->getFile()}\n"; + } + } +} +``` + +Then add a psr-4 autoloader for the generated proxies and a script in `composer.json`: + +```json +{ + "autoload": { + "psr-4": { + "Dapr\\Proxies\\": "path/to/proxies" + } + }, + "scripts": { + "compile-proxies": "ProxyCompiler::compile" + } +} +``` + +And finally, configure dapr to only use the generated proxies: + +```php + ProxyFactory::ONLY_EXISTING, +]; +``` + +{{% /tab %}} +{{% tab header="DYNAMIC" %}} + +In this mode, the proxy satisfies the interface contract, however, it does not actually implement the interface itself +(meaning `instanceof` will be `false`). This mode takes advantage of a few quirks in PHP to work and exists for cases +where code cannot be `eval`'d or generated. + +{{% /tab %}} +{{< /tabpane >}} + +### Requests + +Creating an actor proxy is very inexpensive for any mode. There are no requests made when creating an actor proxy object. + +When you call a method on a proxy object, only methods that you implemented are serviced by your actor implementation. +`get_id()` is handled locally, and `get_reminder()`, `delete_reminder()`, etc. are handled by the `daprd`. + +## Actor implementation + +Every actor implementation in PHP must implement `\Dapr\Actors\IActor` and use the `\Dapr\Actors\ActorTrait` trait. This +allows for fast reflection and some shortcuts. Using the `\Dapr\Actors\Actor` abstract base class does this for you, but +if you need to override the default behavior, you can do so by implementing the interface and using the trait. + +## Activation and deactivation + +When an actor activates, a token file is written to a temporary directory (by default this is in +`'/tmp/dapr_' + sha256(concat(Dapr type, id))` in linux and `'%temp%/dapr_' + sha256(concat(Dapr type, id))` on Windows). +This is persisted until the actor deactivates, or the host shuts down. This allows for `on_activation` to be called once +and only once when Dapr activates the actor on the host. + +## Performance + +Actor method invocation is very fast on a production setup with `php-fpm` and `nginx`, or IIS on Windows. Even though +the actor is constructed on every request, actor state keys are only loaded on-demand and not during each request. +However, there is some overhead in loading each key individually. This can be mitigated by storing an array of data in +state, trading some usability for speed. It is not recommended doing this from the start, but as an optimization when +needed. + +## Versioning state + +The names of the variables in the `ActorState` object directly correspond to key names in the store. This means that if +you change the type or name of a variable, you may run into errors. To get around this, you may need to version your state +object. In order to do this, you'll need to override how state is loaded and stored. There are many ways to approach this, +one such solution might be something like this: + +```php +state_version < self::VERSION) { + $value = parent::__get($this->get_versioned_key('key', $this->state_version)); + // update the value after updating the data structure + parent::__set($this->get_versioned_key('key', self::VERSION), $value); + $this->state_version = self::VERSION; + $this->save_state(); + } + } + + // if you upgrade all keys as needed in the method above, you don't need to walk the previous + // keys when loading/saving and you can just get the current version of the key. + + private function get_previous_version(int $version): int { + return $this->has_previous_version($version) ? $version - 1 : $version; + } + + private function has_previous_version(int $version): bool { + return $version >= 0; + } + + private function walk_versions(int $version, callable $callback, callable $predicate): mixed { + $value = $callback($version); + if($predicate($value) || !$this->has_previous_version($version)) { + return $value; + } + return $this->walk_versions($this->get_previous_version($version), $callback, $predicate); + } + + private function get_versioned_key(string $key, int $version) { + return $this->has_previous_version($version) ? $version.$key : $key; + } + + public function __get(string $key): mixed { + return $this->walk_versions( + self::VERSION, + fn($version) => parent::__get($this->get_versioned_key($key, $version)), + fn($value) => isset($value) + ); + } + + public function __isset(string $key): bool { + return $this->walk_versions( + self::VERSION, + fn($version) => parent::__isset($this->get_versioned_key($key, $version)), + fn($isset) => $isset + ); + } + + public function __set(string $key,mixed $value): void { + // optional: you can unset previous versions of the key + parent::__set($this->get_versioned_key($key, self::VERSION), $value); + } + + public function __unset(string $key) : void { + // unset this version and all previous versions + $this->walk_versions( + self::VERSION, + fn($version) => parent::__unset($this->get_versioned_key($key, $version)), + fn() => false + ); + } +} +``` + +There's a lot to be optimized, and it wouldn't be a good idea to use this verbatim in production, but you can get the +gist of how it would work. A lot of it will depend on your use case which is why there's not something like this in +the SDK. For instance, in this example implementation, the previous value is kept for where there may be a bug during an upgrade; +keeping the previous value allows for running the upgrade again, but you may wish to delete the previous value. diff --git a/sdkdocs/php/content/en/php-sdk-docs/php-app/_index.md b/sdkdocs/php/content/en/php-sdk-docs/php-app/_index.md new file mode 100644 index 00000000000..1eab0af4142 --- /dev/null +++ b/sdkdocs/php/content/en/php-sdk-docs/php-app/_index.md @@ -0,0 +1,90 @@ +--- +type: docs +title: "The App" +linkTitle: "App" +weight: 1000 +description: Using the App Class +no_list: true +--- + +In PHP, there is no default router. Thus, the `\Dapr\App` class is provided. It uses +[Nikic's FastRoute](https://github.com/nikic/FastRoute) under the hood. However, you are free to use any router or +framework that you'd like. Just check out the `add_dapr_routes()` method in the `App` class to see how actors and +subscriptions are implemented. + +Every app should start with `App::create()` which takes two parameters, the first is an existing DI container, if you +have one, and the second is a callback to hook into the `ContainerBuilder` and add your own configuration. + +From there, you should define your routes and then call `$app->start()` to execute the route on the current request. + + +```php + $builder->addDefinitions('config.php')); + +// add a controller for GET /test/{id} that returns the id +$app->get('/test/{id}', fn(string $id) => $id); + +$app->start(); +``` + +## Returning from a controller + +You can return anything from a controller, and it will be serialized into a json object. You can also request the +Psr Response object and return that instead, allowing you to customize headers, and have control over the entire response: + +```php + $builder->addDefinitions('config.php')); + +// add a controller for GET /test/{id} that returns the id +$app->get('/test/{id}', + fn( + string $id, + \Psr\Http\Message\ResponseInterface $response, + \Nyholm\Psr7\Factory\Psr17Factory $factory) => $response->withBody($factory->createStream($id))); + +$app->start(); +``` + +## Using the app as a client + +When you just want to use Dapr as a client, such as in existing code, you can call `$app->run()`. In these cases, there's +usually no need for a custom configuration, however, you may want to use a compiled DI container, especially in production: + +```php + $builder->enableCompilation(__DIR__)); +$result = $app->run(fn(\Dapr\DaprClient $client) => $client->get('/invoke/other-app/method/my-method')); +``` + +## Using in other frameworks + +A `DaprClient` object is provided, in fact, all the sugar used by the `App` object is built on the `DaprClient`. + +```php +withSerializationConfig($yourSerializer)->withDeserializationConfig($yourDeserializer); + +// you can also pass it a logger +$clientBuilder = $clientBuilder->withLogger($myLogger); + +// and change the url of the sidecar, for example, using https +$clientBuilder = $clientBuilder->useHttpClient('https://localhost:3800') +``` + +There are several functions you can call before diff --git a/sdkdocs/php/content/en/php-sdk-docs/php-app/php-unit-testing.md b/sdkdocs/php/content/en/php-sdk-docs/php-app/php-unit-testing.md new file mode 100644 index 00000000000..5da0028b41c --- /dev/null +++ b/sdkdocs/php/content/en/php-sdk-docs/php-app/php-unit-testing.md @@ -0,0 +1,285 @@ +--- +type: docs +title: "Unit Testing" +linkTitle: "Unit Testing" +weight: 1000 +description: Unit Testing +no_list: true +--- + +Unit and integration tests are first-class citizens with the PHP SDK. Using the DI container, mocks, stubs, +and the provided `\Dapr\Mocks\TestClient` allows you to have very fine-grained tests. + +## Testing Actors + +With actors, there are two things we're interested in while the actor is under test: + +1. The returned result based on an initial state +2. The resulting state based on the initial state + +{{< tabpane text=true >}} + +{{% tab header="integration test with TestClient" %}} + +Here's an example test a very simple actor that updates its state and returns a specific value: + +```php +state->number % 2 === 0) { + return false; + } + $this->state->number += 1; + + return true; + } +} + +// TheTest.php + +class TheTest extends \PHPUnit\Framework\TestCase +{ + private \DI\Container $container; + + public function setUp(): void + { + parent::setUp(); + // create a default app and extract the DI container from it + $app = \Dapr\App::create( + configure: fn(\DI\ContainerBuilder $builder) => $builder->addDefinitions( + ['dapr.actors' => [TestActor::class]], + [\Dapr\DaprClient::class => \DI\autowire(\Dapr\Mocks\TestClient::class)] + )); + $app->run(fn(\DI\Container $container) => $this->container = $container); + } + + public function testIncrementsWhenOdd() + { + $id = uniqid(); + $runtime = $this->container->get(\Dapr\Actors\ActorRuntime::class); + $client = $this->getClient(); + + // return the current state from http://localhost:1313/reference/api/actors_api/ + $client->register_get("/actors/TestActor/$id/state/number", code: 200, data: 3); + + // ensure it increments from http://localhost:1313/reference/api/actors_api/ + $client->register_post( + "/actors/TestActor/$id/state", + code: 204, + response_data: null, + expected_request: [ + [ + 'operation' => 'upsert', + 'request' => [ + 'key' => 'number', + 'value' => 4, + ], + ], + ] + ); + + $result = $runtime->resolve_actor( + 'TestActor', + $id, + fn($actor) => $runtime->do_method($actor, 'oddIncrement', null) + ); + $this->assertTrue($result); + } + + private function getClient(): \Dapr\Mocks\TestClient + { + return $this->container->get(\Dapr\DaprClient::class); + } +} +``` + +{{% /tab %}} +{{% tab header="unit test" %}} + +```php +state->number % 2 === 0) { + return false; + } + $this->state->number += 1; + + return true; + } +} + +// TheTest.php + +class TheTest extends \PHPUnit\Framework\TestCase +{ + public function testNotIncrementsWhenEven() { + $container = new \DI\Container(); + $state = new TestState($container, $container); + $state->number = 4; + $id = uniqid(); + $actor = new TestActor($id, $state); + $this->assertFalse($actor->oddIncrement()); + $this->assertSame(4, $state->number); + } +} +``` + +{{% /tab %}} + +{{< /tabpane >}} + +## Testing Transactions + +When building on transactions, you'll likely want to test how a failed transaction is handled. In order to do that, you +need to inject failures and ensure the transaction matches what you expect. + +{{< tabpane text=true >}} + +{{% tab header="integration test with TestClient" %}} + +```php +state->begin(); + $this->state->value = "hello world"; + $this->state->commit(); + } +} + +// TheTest.php +class TheTest extends \PHPUnit\Framework\TestCase { + private \DI\Container $container; + + public function setUp(): void + { + parent::setUp(); + $app = \Dapr\App::create(configure: fn(\DI\ContainerBuilder $builder) + => $builder->addDefinitions([\Dapr\DaprClient::class => \DI\autowire(\Dapr\Mocks\TestClient::class)])); + $this->container = $app->run(fn(\DI\Container $container) => $container); + } + + private function getClient(): \Dapr\Mocks\TestClient { + return $this->container->get(\Dapr\DaprClient::class); + } + + public function testTransactionFailure() { + $client = $this->getClient(); + + // create a response from {{% ref state_api %}} + $client->register_post('/state/statestore/bulk', code: 200, response_data: [ + [ + 'key' => 'value', + // no previous value + ], + ], expected_request: [ + 'keys' => ['value'], + 'parallelism' => 10 + ]); + $client->register_post('/state/statestore/transaction', + code: 200, + response_data: null, + expected_request: [ + 'operations' => [ + [ + 'operation' => 'upsert', + 'request' => [ + 'key' => 'value', + 'value' => 'hello world' + ] + ] + ] + ] + ); + $state = new MyState($this->container, $this->container); + $service = new SomeService($state); + $service->doWork(); + $this->assertSame('hello world', $state->value); + } +} +``` + +{{% /tab %}} +{{% tab header="unit test" %}} + +```php +state->begin(); + $this->state->value = "hello world"; + $this->state->commit(); + } +} + +// TheTest.php +class TheTest extends \PHPUnit\Framework\TestCase { + public function testTransactionFailure() { + $state = $this->createStub(MyState::class); + $service = new SomeService($state); + $service->doWork(); + $this->assertSame('hello world', $state->value); + } +} +``` + +{{% /tab %}} + +{{< /tabpane >}} diff --git a/sdkdocs/php/content/en/php-sdk-docs/php-pubsub/_index.md b/sdkdocs/php/content/en/php-sdk-docs/php-pubsub/_index.md new file mode 100644 index 00000000000..e4dc661265b --- /dev/null +++ b/sdkdocs/php/content/en/php-sdk-docs/php-pubsub/_index.md @@ -0,0 +1,63 @@ +--- +type: docs +title: "Publish and Subscribe with PHP" +linkTitle: "Publish and Subscribe" +weight: 1000 +description: How to use +no_list: true +--- + +With Dapr, you can publish anything, including cloud events. The SDK contains a simple cloud event implementation, but +you can also just pass an array that conforms to the cloud event spec or use another library. + +```php +post('/publish', function(\Dapr\Client\DaprClient $daprClient) { + $daprClient->publishEvent(pubsubName: 'pubsub', topicName: 'my-topic', data: ['something' => 'happened']); +}); +``` + +For more information about publish/subscribe, check out [the howto]({{% ref howto-publish-subscribe.md %}}). + +## Data content type + +The PHP SDK allows setting the data content type either when constructing a custom cloud event, or when publishing raw +data. + +{{< tabpane text=true >}} + +{{% tab header="CloudEvent" %}} + +```php +data = $xml; +$event->data_content_type = 'application/xml'; +``` + +{{% /tab %}} +{{% tab header="Raw" %}} + +```php +publishEvent(pubsubName: 'pubsub', topicName: 'my-topic', data: $raw_data, contentType: 'application/octet-stream'); +``` + +{{% alert title="Binary data" color="warning" %}} + +Only `application/octet-steam` is supported for binary data. + +{{% /alert %}} + +{{% /tab %}} + +{{< /tabpane >}} + +## Receiving cloud events + +In your subscription handler, you can have the DI Container inject either a `Dapr\PubSub\CloudEvent` or an `array` into +your controller. The former does some validation to ensure you have a proper event. If you need direct access to the +data, or the events do not conform to the spec, use an `array`. diff --git a/sdkdocs/php/content/en/php-sdk-docs/php-serialization.md b/sdkdocs/php/content/en/php-sdk-docs/php-serialization.md new file mode 100644 index 00000000000..99962d40ed8 --- /dev/null +++ b/sdkdocs/php/content/en/php-sdk-docs/php-serialization.md @@ -0,0 +1,55 @@ +--- +type: docs +title: "Custom Serialization" +linkTitle: "Custom Serializers" +weight: 1000 +description: How to configure serialization +no_list: true +--- + +Dapr uses JSON serialization and thus (complex) type information is lost when sending/receiving data. + +## Serialization + +When returning an object from a controller, passing an object to the `DaprClient`, or storing an object in a state store, +only public properties are scanned and serialized. You can customize this behavior by implementing `\Dapr\Serialization\ISerialize`. +For example, if you wanted to create an ID type that serialized to a string, you may implement it like so: + +```php +id; + } +} +``` + +This works for any type that we have full ownership over, however, it doesn't work for classes from libraries or PHP itself. +For that, you need to register a custom serializer with the DI container: + +```php + [SomeClass::class => new SerializeSomeClass()], +]; +``` + +## Deserialization + +Deserialization works exactly the same way, except the interface is `\Dapr\Deserialization\Deserializers\IDeserialize`. diff --git a/sdkdocs/php/content/en/php-sdk-docs/php-state/_index.md b/sdkdocs/php/content/en/php-sdk-docs/php-state/_index.md new file mode 100644 index 00000000000..193bd4dcbb0 --- /dev/null +++ b/sdkdocs/php/content/en/php-sdk-docs/php-state/_index.md @@ -0,0 +1,99 @@ +--- +type: docs +title: "State Management with PHP" +linkTitle: "State management" +weight: 1000 +description: How to use +no_list: true +--- + +Dapr offers a great modular approach to using state in your application. The best way to learn the basics is to visit +[the howto]({{% ref howto-get-save-state.md %}}). + +## Metadata + +Many state components allow you to pass metadata to the component to control specific aspects of the component's +behavior. The PHP SDK allows you to pass that metadata through: + +```php +run( + fn(\Dapr\State\StateManager $stateManager) => + $stateManager->save_state('statestore', new \Dapr\State\StateItem('key', 'value', metadata: ['port' => '112']))); + +// using the DaprClient +$app->run(fn(\Dapr\Client\DaprClient $daprClient) => $daprClient->saveState(storeName: 'statestore', key: 'key', value: 'value', metadata: ['port' => '112'])) +``` + +This is an example of how you might pass the port metadata to [Cassandra]({{% ref setup-cassandra.md %}}). + +Every state operation allows passing metadata. + +## Consistency/concurrency + +In the PHP SDK, there are four classes that represent the four different types of consistency and concurrency in Dapr: + +```php +}} + +{{% tab header="Transaction prefix" %}} + +```php +run(function (TransactionObject $object ) { + $object->begin(prefix: 'my-prefix-'); + $object->key = 'value'; + // commit to key `my-prefix-key` + $object->commit(); +}); +``` + +{{% /tab %}} +{{% tab header="StateManager prefix" %}} + +```php +run(function(\Dapr\State\StateManager $stateManager) { + $stateManager->load_object($obj = new StateObject(), prefix: 'my-prefix-'); + // original value is from `my-prefix-key` + $obj->key = 'value'; + // save to `my-prefix-key` + $stateManager->save_object($obj, prefix: 'my-prefix-'); +}); +``` + +{{% /tab %}} + +{{< /tabpane >}} diff --git a/sdkdocs/php/readme.md b/sdkdocs/php/readme.md new file mode 100644 index 00000000000..e69de29bb2d diff --git a/sdkdocs/pluggable-components/dotnet b/sdkdocs/pluggable-components/dotnet deleted file mode 160000 index 16213ef2f70..00000000000 --- a/sdkdocs/pluggable-components/dotnet +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 16213ef2f70cce4c443a357d7eb8f3f0d3a96aeb diff --git a/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/_index.md b/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/_index.md new file mode 100644 index 00000000000..8305dcc61eb --- /dev/null +++ b/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/_index.md @@ -0,0 +1,202 @@ +--- +type: docs +title: "Getting started with the Dapr pluggable components .NET SDK" +linkTitle: ".NET" +weight: 1000 +description: How to get up and running with the Dapr pluggable components .NET SDK +no_list: true +is_preview: true +cascade: + github_repo: https://github.com/dapr-sandbox/components-dotnet-sdk + github_subdir: daprdocs/content/en/dotnet-sdk-docs + path_base_for_github_subdir: content/en/developing-applications/develop-components/pluggable-components/pluggable-components-sdks/pluggable-components-dotnet/ + github_branch: main +--- + +Dapr offers NuGet packages to help with the development of .NET pluggable components. + +## Prerequisites + +- [.NET 6 SDK](https://dotnet.microsoft.com/download/dotnet) or later +- [Dapr 1.9 CLI]({{% ref install-dapr-cli.md %}}) or later +- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) +- Linux, Mac, or Windows (with WSL) + +{{% alert title="Note" color="primary" %}} +Development of Dapr pluggable components on Windows requires WSL as some development platforms do not fully support Unix Domain Sockets on "native" Windows. +{{% /alert %}} + +## Project creation + +Creating a pluggable component starts with an empty ASP.NET project. + +```bash +dotnet new web --name +``` + +## Add NuGet packages + +Add the Dapr .NET pluggable components NuGet package. + +```bash +dotnet add package Dapr.PluggableComponents.AspNetCore +``` + +## Create application and service + +Creating a Dapr pluggable component application is similar to creating an ASP.NET application. In `Program.cs`, replace the `WebApplication` related code with the Dapr `DaprPluggableComponentsApplication` equivalent. + +```csharp +using Dapr.PluggableComponents; + +var app = DaprPluggableComponentsApplication.Create(); + +app.RegisterService( + "", + serviceBuilder => + { + // Register one or more components with this service. + }); + +app.Run(); +``` + +This creates an application with a single service. Each service: + +- Corresponds to a single Unix Domain Socket +- Can host one or more component types + +{{% alert title="Note" color="primary" %}} +Only a single component of each type can be registered with an individual service. However, [multiple components of the same type can be spread across multiple services]({{% ref dotnet-multiple-services %}}). +{{% /alert %}} + +## Implement and register components + + - [Implementing an input/output binding component]({{% ref dotnet-bindings %}}) + - [Implementing a pub-sub component]({{% ref dotnet-pub-sub %}}) + - [Implementing a state store component]({{% ref dotnet-state-store %}}) + +## Test components locally + +Pluggable components can be tested by starting the application on the command line and configuring a Dapr sidecar to use it. + +To start the component, in the application directory: + +```bash +dotnet run +``` + +To configure Dapr to use the component, in the resources path directory: + +```yaml +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: +spec: + type: state. + version: v1 + metadata: + - name: key1 + value: value1 + - name: key2 + value: value2 +``` + +Any `metadata` properties will be passed to the component via its `IPluggableComponent.InitAsync()` method when the component is instantiated. + +To start Dapr (and, optionally, the service making use of the service): + +```bash +dapr run --app-id --resources-path ... +``` + +At this point, the Dapr sidecar will have started and connected via Unix Domain Socket to the component. You can then interact with the component either: +- Through the service using the component (if started), or +- By using the Dapr HTTP or gRPC API directly + +## Create Container + +There are several ways to create a container for your component for eventual deployment. + +### Use .NET SDK + +The [.NET 7 and later SDKs](https://dotnet.microsoft.com/download/dotnet) enable you to create a .NET-based container for your application *without* a `Dockerfile`, even for those targeting earlier versions of the .NET SDK. This is probably the simplest way of generating a container for your component today. + +{{% alert title="Note" color="primary" %}} +Currently, the .NET 7 SDK requires Docker Desktop on the local machine, a special NuGet package, and Docker Desktop on the local machine to build containers. Future versions of .NET SDK plan to eliminate those requirements. + +Multiple versions of the .NET SDK can be installed on the local machine at the same time. +{{% /alert %}} + +Add the `Microsoft.NET.Build.Containers` NuGet package to the component project. + +```bash +dotnet add package Microsoft.NET.Build.Containers +``` + +Publish the application as a container: + +```bash +dotnet publish --os linux --arch x64 /t:PublishContainer -c Release +``` + +{{% alert title="Note" color="primary" %}} +Ensure the architecture argument `--arch x64` matches that of the component's ultimate deployment target. By default, the architecture of the generated container matches that of the local machine. For example, if the local machine is ARM64-based (for example, a M1 or M2 Mac) and the argument is omitted, an ARM64 container will be generated which may not be compatible with deployment targets expecting an AMD64 container. +{{% /alert %}} + +For more configuration options, such as controlling the container name, tag, and base image, see the [.NET publish as container guide](https://learn.microsoft.com/dotnet/core/docker/publish-as-container). + +### Use a Dockerfile + +While there are tools that can generate a `Dockerfile` for a .NET application, the .NET SDK itself does not. A typical `Dockerfile` might look like: + +```dockerfile +FROM mcr.microsoft.com/dotnet/aspnet: AS base +WORKDIR /app + +# Creates a non-root user with an explicit UID and adds permission to access the /app folder +# For more info, please refer to https://aka.ms/vscode-docker-dotnet-configure-containers +RUN adduser -u 5678 --disabled-password --gecos "" appuser && chown -R appuser /app +USER appuser + +FROM mcr.microsoft.com/dotnet/sdk: AS build +WORKDIR /src +COPY [".csproj", "/"] +RUN dotnet restore "/.csproj" +COPY . . +WORKDIR "/src/" +RUN dotnet build ".csproj" -c Release -o /app/build + +FROM build AS publish +RUN dotnet publish ".csproj" -c Release -o /app/publish /p:UseAppHost=false + +FROM base AS final +WORKDIR /app +COPY --from=publish /app/publish . +ENTRYPOINT ["dotnet", ".dll"] +``` + +Build the image: + +```bash +docker build -f Dockerfile -t : . +``` + +{{% alert title="Note" color="primary" %}} +Paths for `COPY` operations in the `Dockerfile` are relative to the Docker context passed when building the image, while the Docker context itself will vary depending on the needs of the project being built (for example, if it has referenced projects). In the example above, the assumption is that the Docker context is the component project directory. +{{% /alert %}} + +## Demo + +Watch this video for a [demo on building pluggable components with .NET](https://youtu.be/s1p9MNl4VGo?t=1606): + + + +## Next steps + +- [Learn advanced steps for the Pluggable Component .NET SDK]({{% ref "dotnet-advanced" %}}) +- Learn more about using the Pluggable Component .NET SDK for: + - [Bindings]({{% ref "dotnet-bindings" %}}) + - [Pub/sub]({{% ref "dotnet-pub-sub" %}}) + - [State store]({{% ref "dotnet-state-store" %}}) \ No newline at end of file diff --git a/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-advanced/_index.md b/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-advanced/_index.md new file mode 100644 index 00000000000..be9d7da5d79 --- /dev/null +++ b/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-advanced/_index.md @@ -0,0 +1,10 @@ +--- +type: docs +title: "Advanced uses of the Dapr pluggable components .NET SDK" +linkTitle: "Advanced" +weight: 2000 +description: How to use advanced techniques with with the Dapr pluggable components .NET SDK +is_preview: true +--- + +While not typically needed by most, these guides show advanced ways to can configure your .NET pluggable components. \ No newline at end of file diff --git a/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-advanced/dotnet-application-environment.md b/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-advanced/dotnet-application-environment.md new file mode 100644 index 00000000000..83349c195b4 --- /dev/null +++ b/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-advanced/dotnet-application-environment.md @@ -0,0 +1,136 @@ +--- +type: docs +title: "Application Environment of a .NET Dapr pluggable component" +linkTitle: "Application environment" +weight: 1000 +description: How to configure the environment of a .NET pluggable component +no_list: true +is_preview: true +--- + +A .NET Dapr pluggable component application can be configured for dependency injection, logging, and configuration values similarly to ASP.NET applications. The `DaprPluggableComponentsApplication` exposes a similar set of configuration properties to that exposed by `WebApplicationBuilder`. + +## Dependency injection + +Components registered with services can participate in dependency injection. Arguments in the components constructor will be injected during creation, assuming those types have been registered with the application. You can register them through the `IServiceCollection` exposed by `DaprPluggableComponentsApplication`. + +```csharp +var app = DaprPluggableComponentsApplication.Create(); + +// Register MyService as the singleton implementation of IService. +app.Services.AddSingleton(); + +app.RegisterService( + "", + serviceBuilder => + { + serviceBuilder.RegisterStateStore(); + }); + +app.Run(); + +interface IService +{ + // ... +} + +class MyService : IService +{ + // ... +} + +class MyStateStore : IStateStore +{ + // Inject IService on creation of the state store. + public MyStateStore(IService service) + { + // ... + } + + // ... +} +``` + +{{% alert title="Warning" color="warning" %}} +Use of `IServiceCollection.AddScoped()` is not recommended. Such instances' lifetimes are bound to a single gRPC method call, which does not match the lifetime of an individual component instance. +{{% /alert %}} + +## Logging + +.NET Dapr pluggable components can use the [standard .NET logging mechanisms](https://learn.microsoft.com/dotnet/core/extensions/logging). The `DaprPluggableComponentsApplication` exposes an `ILoggingBuilder`, through which it can be configured. + +{{% alert title="Note" color="primary" %}} +Like with ASP.NET, logger services (for example, `ILogger`) are pre-registered. +{{% /alert %}} + +```csharp +var app = DaprPluggableComponentsApplication.Create(); + +// Reset the default loggers and setup new ones. +app.Logging.ClearProviders(); +app.Logging.AddConsole(); + +app.RegisterService( + "", + serviceBuilder => + { + serviceBuilder.RegisterStateStore(); + }); + +app.Run(); + +class MyStateStore : IStateStore +{ + // Inject a logger on creation of the state store. + public MyStateStore(ILogger logger) + { + // ... + } + + // ... +} +``` + +## Configuration Values + +Since .NET pluggable components are built on ASP.NET, they can use its [standard configuration mechanisms](https://learn.microsoft.com/dotnet/core/extensions/configuration) and default to the same set of [pre-registered providers](https://learn.microsoft.com/aspnet/core/fundamentals/configuration/?view=aspnetcore-6.0#default-application-configuration-sources). The `DaprPluggableComponentsApplication` exposes an `IConfigurationManager` through which it can be configured. + +```csharp +var app = DaprPluggableComponentsApplication.Create(); + +// Reset the default configuration providers and add new ones. +((IConfigurationBuilder)app.Configuration).Sources.Clear(); +app.Configuration.AddEnvironmentVariables(); + +// Get configuration value on startup. +const value = app.Configuration[""]; + +app.RegisterService( + "", + serviceBuilder => + { + serviceBuilder.RegisterStateStore(); + }); + +app.Run(); + +class MyStateStore : IStateStore +{ + // Inject the configuration on creation of the state store. + public MyStateStore(IConfiguration configuration) + { + // ... + } + + // ... +} +``` + +## Next steps + +- [Learn more about the component lifetime]({{% ref "dotnet-component-lifetime" %}}) +- [Learn more about multiple services]({{% ref "dotnet-multiple-services" %}}) +- Learn more about using the Pluggable Component .NET SDK for: + - [Bindings]({{% ref "dotnet-bindings" %}}) + - [Pub/sub]({{% ref "dotnet-pub-sub" %}}) + - [State store]({{% ref "dotnet-state-store" %}}) \ No newline at end of file diff --git a/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-advanced/dotnet-component-lifetime.md b/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-advanced/dotnet-component-lifetime.md new file mode 100644 index 00000000000..9eac96e3ced --- /dev/null +++ b/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-advanced/dotnet-component-lifetime.md @@ -0,0 +1,80 @@ +--- +type: docs +title: "Lifetimes of .NET Dapr pluggable components" +linkTitle: "Component lifetime" +weight: 1000 +description: How to control the lifetime of a .NET pluggable component +no_list: true +is_preview: true +--- + +There are two ways to register a component: + + - The component operates as a singleton, with lifetime managed by the SDK + - A component's lifetime is determined by the pluggable component and can be multi-instance or a singleton, as needed + +## Singleton components + +Components registered _by type_ are singletons: one instance will serve all configured components of that type associated with that socket. This approach is best when only a single component of that type exists and is shared amongst Dapr applications. + +```csharp +var app = DaprPluggableComponentsApplication.Create(); + +app.RegisterService( + "service-a", + serviceBuilder => + { + serviceBuilder.RegisterStateStore(); + }); + +app.Run(); + +class SingletonStateStore : IStateStore +{ + // ... +} +``` + +## Multi-instance components + +Components can be registered by passing a "factory method". This method will be called for each configured component of that type associated with that socket. The method returns the instance to associate with that component (whether shared or not). This approach is best when multiple components of the same type may be configured with different sets of metadata, when component operations need to be isolated from one another, etc. + +The factory method will be passed context, such as the ID of the configured Dapr component, that can be used to differentiate component instances. + +```csharp +var app = DaprPluggableComponentsApplication.Create(); + +app.RegisterService( + "service-a", + serviceBuilder => + { + serviceBuilder.RegisterStateStore( + context => + { + return new MultiStateStore(context.InstanceId); + }); + }); + +app.Run(); + +class MultiStateStore : IStateStore +{ + private readonly string instanceId; + + public MultiStateStore(string instanceId) + { + this.instanceId = instanceId; + } + + // ... +} +``` + +## Next steps + +- [Learn more about the application environment]({{% ref "dotnet-application-environment" %}}) +- [Learn more about multiple services]({{% ref "dotnet-multiple-services" %}}) +- Learn more about using the Pluggable Component .NET SDK for: + - [Bindings]({{% ref "dotnet-bindings" %}}) + - [Pub/sub]({{% ref "dotnet-pub-sub" %}}) + - [State store]({{% ref "dotnet-state-store" %}}) \ No newline at end of file diff --git a/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-advanced/dotnet-multiple-services.md b/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-advanced/dotnet-multiple-services.md new file mode 100644 index 00000000000..c5ea6a5e44e --- /dev/null +++ b/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-advanced/dotnet-multiple-services.md @@ -0,0 +1,98 @@ +--- +type: docs +title: "Multiple services in a .NET Dapr pluggable component" +linkTitle: "Multiple services" +weight: 1000 +description: How to expose multiple services from a .NET pluggable component +no_list: true +is_preview: true +--- + +A pluggable component can host multiple components of varying types. You might do this: +- To minimize the number of sidecars running in a cluster +- To group related components that are likely to share libraries and implementation, such as: + - A database exposed both as a general state store, and + - Output bindings that allow more specific operations. + +Each Unix Domain Socket can manage calls to one component of each type. To host multiple components of the _same_ type, you can spread those types across multiple sockets. The SDK binds each socket to a "service", with each service composed of one or more component types. + +## Registering multiple services + +Each call to `RegisterService()` binds a socket to a set of registered components, where one of each type of component can be registered per service. + +```csharp +var app = DaprPluggableComponentsApplication.Create(); + +app.RegisterService( + "service-a", + serviceBuilder => + { + serviceBuilder.RegisterStateStore(); + serviceBuilder.RegisterBinding(); + }); + +app.RegisterService( + "service-b", + serviceBuilder => + { + serviceBuilder.RegisterStateStore(); + }); + +app.Run(); + +class MyDatabaseStateStore : IStateStore +{ + // ... +} + +class MyDatabaseOutputBinding : IOutputBinding +{ + // ... +} + +class AnotherStateStore : IStateStore +{ + // ... +} +``` + +## Configuring Multiple Components + +Configuring Dapr to use the hosted components is the same as for any single component - the component YAML refers to the associated socket. + +```yaml +# +# This component uses the state store associated with socket `state-store-a` +# +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: state-store-a +spec: + type: state.service-a + version: v1 + metadata: [] +``` + +```yaml +# +# This component uses the state store associated with socket `state-store-b` +# +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: state-store-b +spec: + type: state.service-b + version: v1 + metadata: [] +``` + +## Next steps + +- [Learn more about the component lifetime]({{% ref "dotnet-component-lifetime" %}}) +- [Learn more about the application environment]({{% ref "dotnet-application-environment" %}}) +- Learn more about using the Pluggable Component .NET SDK for: + - [Bindings]({{% ref "dotnet-bindings" %}}) + - [Pub/sub]({{% ref "dotnet-pub-sub" %}}) + - [State store]({{% ref "dotnet-state-store" %}}) \ No newline at end of file diff --git a/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-bindings/_index.md b/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-bindings/_index.md new file mode 100644 index 00000000000..b7d1219e958 --- /dev/null +++ b/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-bindings/_index.md @@ -0,0 +1,139 @@ +--- +type: docs +title: "Implementing a .NET input/output binding component" +linkTitle: "Bindings" +weight: 1000 +description: How to create an input/output binding with the Dapr pluggable components .NET SDK +no_list: true +is_preview: true +--- + +Creating a binding component requires just a few basic steps. + +## Add bindings namespaces + +Add `using` statements for the bindings related namespaces. + +```csharp +using Dapr.PluggableComponents.Components; +using Dapr.PluggableComponents.Components.Bindings; +``` + +## Input bindings: Implement `IInputBinding` + +Create a class that implements the `IInputBinding` interface. + +```csharp +internal sealed class MyBinding : IInputBinding +{ + public Task InitAsync(MetadataRequest request, CancellationToken cancellationToken = default) + { + // Called to initialize the component with its configured metadata... + } + + public async Task ReadAsync(MessageDeliveryHandler deliveryHandler, CancellationToken cancellationToken = default) + { + // Until canceled, check the underlying store for messages and deliver them to the Dapr runtime... + } +} +``` + +Calls to the `ReadAsync()` method are "long-lived", in that the method is not expected to return until canceled (for example, via the `cancellationToken`). As messages are read from the underlying store of the component, they are delivered to the Dapr runtime via the `deliveryHandler` callback. Delivery allows the component to receive notification if/when the application (served by the Dapr runtime) acknowledges processing of the message. + +```csharp + public async Task ReadAsync(MessageDeliveryHandler deliveryHandler, CancellationToken cancellationToken = default) + { + TimeSpan pollInterval = // Polling interval (e.g. from initalization metadata)... + + // Poll the underlying store until canceled... + while (!cancellationToken.IsCancellationRequested) + { + var messages = // Poll underlying store for messages... + + foreach (var message in messages) + { + // Deliver the message to the Dapr runtime... + await deliveryHandler( + new InputBindingReadResponse + { + // Set the message content... + }, + // Callback invoked when application acknowledges the message... + async request => + { + // Process response data or error message... + }) + } + + // Wait for the next poll (or cancellation)... + await Task.Delay(pollInterval, cancellationToken); + } + } +``` + +## Output bindings: Implement `IOutputBinding` + +Create a class that implements the `IOutputBinding` interface. + +```csharp +internal sealed class MyBinding : IOutputBinding +{ + public Task InitAsync(MetadataRequest request, CancellationToken cancellationToken = default) + { + // Called to initialize the component with its configured metadata... + } + + public Task InvokeAsync(OutputBindingInvokeRequest request, CancellationToken cancellationToken = default) + { + // Called to invoke a specific operation... + } + + public Task ListOperationsAsync(CancellationToken cancellationToken = default) + { + // Called to list the operations that can be invoked. + } +} +``` + +## Input and output binding components + +A component can be _both_ an input _and_ output binding, simply by implementing both interfaces. + +```csharp +internal sealed class MyBinding : IInputBinding, IOutputBinding +{ + // IInputBinding Implementation... + + // IOutputBinding Implementation... +} +``` + +## Register binding component + +In the main program file (for example, `Program.cs`), register the binding component in an application service. + +```csharp +using Dapr.PluggableComponents; + +var app = DaprPluggableComponentsApplication.Create(); + +app.RegisterService( + "", + serviceBuilder => + { + serviceBuilder.RegisterBinding(); + }); + +app.Run(); +``` + +{{% alert title="Note" color="primary" %}} +A component that implements both `IInputBinding` and `IOutputBinding` will be registered as both an input and output binding. +{{% /alert %}} + +## Next steps + +- [Learn advanced steps for the Pluggable Component .NET SDK]({{% ref "dotnet-advanced" %}}) +- Learn more about using the Pluggable Component .NET SDK for: + - [Pub/sub]({{% ref "dotnet-pub-sub" %}}) + - [State store]({{% ref "dotnet-state-store" %}}) \ No newline at end of file diff --git a/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-pub-sub/_index.md b/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-pub-sub/_index.md new file mode 100644 index 00000000000..8120a8e6c52 --- /dev/null +++ b/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-pub-sub/_index.md @@ -0,0 +1,107 @@ +--- +type: docs +title: "Implementing a .NET pub/sub component" +linkTitle: "Pub/sub" +weight: 1000 +description: How to create a pub/sub with the Dapr pluggable components .NET SDK +no_list: true +is_preview: true +--- + +Creating a pub/sub component requires just a few basic steps. + +## Add pub/sub namespaces + +Add `using` statements for the pub/sub related namespaces. + +```csharp +using Dapr.PluggableComponents.Components; +using Dapr.PluggableComponents.Components.PubSub; +``` + +## Implement `IPubSub` + +Create a class that implements the `IPubSub` interface. + +```csharp +internal sealed class MyPubSub : IPubSub +{ + public Task InitAsync(MetadataRequest request, CancellationToken cancellationToken = default) + { + // Called to initialize the component with its configured metadata... + } + + public Task PublishAsync(PubSubPublishRequest request, CancellationToken cancellationToken = default) + { + // Send the message to the "topic"... + } + + public Task PullMessagesAsync(PubSubPullMessagesTopic topic, MessageDeliveryHandler deliveryHandler, CancellationToken cancellationToken = default) + { + // Until canceled, check the topic for messages and deliver them to the Dapr runtime... + } +} +``` + +Calls to the `PullMessagesAsync()` method are "long-lived", in that the method is not expected to return until canceled (for example, via the `cancellationToken`). The "topic" from which messages should be pulled is passed via the `topic` argument, while the delivery to the Dapr runtime is performed via the `deliveryHandler` callback. Delivery allows the component to receive notification if/when the application (served by the Dapr runtime) acknowledges processing of the message. + +```csharp + public async Task PullMessagesAsync(PubSubPullMessagesTopic topic, MessageDeliveryHandler deliveryHandler, CancellationToken cancellationToken = default) + { + TimeSpan pollInterval = // Polling interval (e.g. from initalization metadata)... + + // Poll the topic until canceled... + while (!cancellationToken.IsCancellationRequested) + { + var messages = // Poll topic for messages... + + foreach (var message in messages) + { + // Deliver the message to the Dapr runtime... + await deliveryHandler( + new PubSubPullMessagesResponse(topicName) + { + // Set the message content... + }, + // Callback invoked when application acknowledges the message... + async errorMessage => + { + // An empty message indicates the application successfully processed the message... + if (String.IsNullOrEmpty(errorMessage)) + { + // Delete the message from the topic... + } + }) + } + + // Wait for the next poll (or cancellation)... + await Task.Delay(pollInterval, cancellationToken); + } + } +``` + +## Register pub/sub component + +In the main program file (for example, `Program.cs`), register the pub/sub component with an application service. + +```csharp +using Dapr.PluggableComponents; + +var app = DaprPluggableComponentsApplication.Create(); + +app.RegisterService( + "", + serviceBuilder => + { + serviceBuilder.RegisterPubSub(); + }); + +app.Run(); +``` + +## Next steps + +- [Learn advanced steps for the Pluggable Component .NET SDK]({{% ref "dotnet-advanced" %}}) +- Learn more about using the Pluggable Component .NET SDK for: + - [Bindings]({{% ref "dotnet-bindings" %}}) + - [State store]({{% ref "dotnet-state-store" %}}) \ No newline at end of file diff --git a/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-state-store/_index.md b/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-state-store/_index.md new file mode 100644 index 00000000000..acef4c5d38b --- /dev/null +++ b/sdkdocs/pluggable-components/dotnet/content/en/dotnet-sdk-docs/dotnet-state-store/_index.md @@ -0,0 +1,172 @@ +--- +type: docs +title: "Implementing a .NET state store component" +linkTitle: "State Store" +weight: 1000 +description: How to create a state store with the Dapr pluggable components .NET SDK +no_list: true +is_preview: true +--- + +Creating a state store component requires just a few basic steps. + +## Add state store namespaces + +Add `using` statements for the state store related namespaces. + +```csharp +using Dapr.PluggableComponents.Components; +using Dapr.PluggableComponents.Components.StateStore; +``` + +## Implement `IStateStore` + +Create a class that implements the `IStateStore` interface. + +```csharp +internal sealed class MyStateStore : IStateStore +{ + public Task DeleteAsync(StateStoreDeleteRequest request, CancellationToken cancellationToken = default) + { + // Delete the requested key from the state store... + } + + public Task GetAsync(StateStoreGetRequest request, CancellationToken cancellationToken = default) + { + // Get the requested key value from from the state store, else return null... + } + + public Task InitAsync(MetadataRequest request, CancellationToken cancellationToken = default) + { + // Called to initialize the component with its configured metadata... + } + + public Task SetAsync(StateStoreSetRequest request, CancellationToken cancellationToken = default) + { + // Set the requested key to the specified value in the state store... + } +} +``` + +## Register state store component + +In the main program file (for example, `Program.cs`), register the state store with an application service. + +```csharp +using Dapr.PluggableComponents; + +var app = DaprPluggableComponentsApplication.Create(); + +app.RegisterService( + "", + serviceBuilder => + { + serviceBuilder.RegisterStateStore(); + }); + +app.Run(); +``` + +## Bulk state stores + +State stores that intend to support bulk operations should implement the optional `IBulkStateStore` interface. Its methods mirror those of the base `IStateStore` interface, but include multiple requested values. + +{{% alert title="Note" color="primary" %}} +The Dapr runtime will emulate bulk state store operations for state stores that do _not_ implement `IBulkStateStore` by calling its operations individually. +{{% /alert %}} + +```csharp +internal sealed class MyStateStore : IStateStore, IBulkStateStore +{ + // ... + + public Task BulkDeleteAsync(StateStoreDeleteRequest[] requests, CancellationToken cancellationToken = default) + { + // Delete all of the requested values from the state store... + } + + public Task BulkGetAsync(StateStoreGetRequest[] requests, CancellationToken cancellationToken = default) + { + // Return the values of all of the requested values from the state store... + } + + public Task BulkSetAsync(StateStoreSetRequest[] requests, CancellationToken cancellationToken = default) + { + // Set all of the values of the requested keys in the state store... + } +} +``` + +## Transactional state stores + +State stores that intend to support transactions should implement the optional `ITransactionalStateStore` interface. Its `TransactAsync()` method is passed a request with a sequence of delete and/or set operations to be performed within a transaction. The state store should iterate over the sequence and call each operation's `Visit()` method, passing callbacks that represent the action to take for each type of operation. + +```csharp +internal sealed class MyStateStore : IStateStore, ITransactionalStateStore +{ + // ... + + public async Task TransactAsync(StateStoreTransactRequest request, CancellationToken cancellationToken = default) + { + // Start transaction... + + try + { + foreach (var operation in request.Operations) + { + await operation.Visit( + async deleteRequest => + { + // Process delete request... + + }, + async setRequest => + { + // Process set request... + }); + } + } + catch + { + // Rollback transaction... + + throw; + } + + // Commit transaction... + } +} +``` + +## Queryable state stores + +State stores that intend to support queries should implement the optional `IQueryableStateStore` interface. Its `QueryAsync()` method is passed details about the query, such as the filter(s), result limits and pagination, and sort order(s) of the results. The state store should use those details to generate a set of values to return as part of its response. + +```csharp +internal sealed class MyStateStore : IStateStore, IQueryableStateStore +{ + // ... + + public Task QueryAsync(StateStoreQueryRequest request, CancellationToken cancellationToken = default) + { + // Generate and return results... + } +} +``` + +## ETag and other semantic error handling + +The Dapr runtime has additional handling of certain error conditions resulting from some state store operations. State stores can indicate such conditions by throwing specific exceptions from its operation logic: + +| Exception | Applicable Operations | Description +|---|---|---| +| `ETagInvalidException` | Delete, Set, Bulk Delete, Bulk Set | When an ETag is invalid | +| `ETagMismatchException`| Delete, Set, Bulk Delete, Bulk Set | When an ETag does not match an expected value | +| `BulkDeleteRowMismatchException` | Bulk Delete | When the number of affected rows does not match the expected rows | + +## Next steps + +- [Learn advanced steps for the Pluggable Component .NET SDK]({{% ref "dotnet-advanced" %}}) +- Learn more about using the Pluggable Component .NET SDK for: + - [Bindings]({{% ref "dotnet-bindings" %}}) + - [Pub/sub]({{% ref "dotnet-pub-sub" %}}) \ No newline at end of file diff --git a/sdkdocs/pluggable-components/go b/sdkdocs/pluggable-components/go deleted file mode 160000 index 25b080a8e8d..00000000000 --- a/sdkdocs/pluggable-components/go +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 25b080a8e8d1d897f32ee2ce31e617f89f972e32 diff --git a/sdkdocs/pluggable-components/go/content/en/go-sdk-docs/_index.md b/sdkdocs/pluggable-components/go/content/en/go-sdk-docs/_index.md new file mode 100644 index 00000000000..e818629ffe9 --- /dev/null +++ b/sdkdocs/pluggable-components/go/content/en/go-sdk-docs/_index.md @@ -0,0 +1,174 @@ +--- +type: docs +title: "Getting started with the Dapr pluggable components Go SDK" +linkTitle: "Go" +weight: 1000 +description: How to get up and running with the Dapr pluggable components Go SDK +no_list: true +is_preview: true +cascade: + github_repo: https://github.com/dapr-sandbox/components-go-sdk + github_subdir: daprdocs/content/en/go-sdk-docs + path_base_for_github_subdir: content/en/developing-applications/develop-components/pluggable-components/pluggable-components-sdks/pluggable-components-go/ + github_branch: main +--- + +Dapr offers packages to help with the development of Go pluggable components. + +## Prerequisites + +- [Go 1.20](https://go.dev/dl/) or later +- [Dapr 1.9 CLI]({{% ref install-dapr-cli.md %}}) or later +- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) +- Linux, Mac, or Windows (with WSL) + +{{% alert title="Note" color="primary" %}} +Development of Dapr pluggable components on Windows requires WSL. Not all languages and SDKs expose Unix Domain Sockets on "native" Windows. +{{% /alert %}} + +## Application creation + +Creating a pluggable component starts with an empty Go application. + +```bash +mkdir example +cd example +go mod init example +``` + +## Import Dapr packages + +Import the Dapr pluggable components SDK package. + +```bash +go get github.com/dapr-sandbox/components-go-sdk@v0.1.0 +``` + +## Create main package + +In `main.go`, import the Dapr plugggable components package and run the application. + +```go +package main + +import ( + dapr "github.com/dapr-sandbox/components-go-sdk" +) + +func main() { + dapr.MustRun() +} +``` + +This creates an application with no components. You will need to implement and register one or more components. + +## Implement and register components + + - [Implementing an input/output binding component]({{% ref go-bindings %}}) + - [Implementing a pub/sub component]({{% ref go-pub-sub %}}) + - [Implementing a state store component]({{% ref go-state-store %}}) + +{{% alert title="Note" color="primary" %}} +Only a single component of each type can be registered with an individual service. However, [multiple components of the same type can be spread across multiple services]({{% ref go-advanced %}}). +{{% /alert %}} + +## Test components locally + +### Create the Dapr components socket directory + +Dapr communicates with pluggable components via Unix Domain Sockets files in a common directory. By default, both Dapr and pluggable components use the `/tmp/dapr-components-sockets` directory. You should create this directory if it does not already exist. + +```bash +mkdir /tmp/dapr-components-sockets +``` + +### Start the pluggable component + +Pluggable components can be tested by starting the application on the command line. + +To start the component, in the application directory: + +```bash +go run main.go +``` + +### Configure Dapr to use the pluggable component + +To configure Dapr to use the component, create a component YAML file in the resources directory. For example, for a state store component: + +```yaml +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: +spec: + type: state. + version: v1 + metadata: + - name: key1 + value: value1 + - name: key2 + value: value2 +``` + +Any `metadata` properties will be passed to the component via its `Store.Init(metadata state.Metadata)` method when the component is instantiated. + +### Start Dapr + +To start Dapr (and, optionally, the service making use of the service): + +```bash +dapr run --app-id --resources-path ... +``` + +At this point, the Dapr sidecar will have started and connected via Unix Domain Socket to the component. You can then interact with the component either: +- Through the service using the component (if started), or +- By using the Dapr HTTP or gRPC API directly + +## Create container + +Pluggable components are deployed as containers that run as sidecars to the application (like Dapr itself). A typical `Dockerfile` for creating a Docker image for a Go application might look like: + +```dockerfile +FROM golang:1.20-alpine AS builder + +WORKDIR /usr/src/app + +# Download dependencies +COPY go.mod go.sum ./ +RUN go mod download && go mod verify + +# Build the application +COPY . . +RUN go build -v -o /usr/src/bin/app . + +FROM alpine:latest + +# Setup non-root user and permissions +RUN addgroup -S app && adduser -S app -G app +RUN mkdir /tmp/dapr-components-sockets && chown app /tmp/dapr-components-sockets + +# Copy application to runtime image +COPY --from=builder --chown=app /usr/src/bin/app /app + +USER app + +CMD ["/app"] +``` + +Build the image: + +```bash +docker build -f Dockerfile -t : . +``` + +{{% alert title="Note" color="primary" %}} +Paths for `COPY` operations in the `Dockerfile` are relative to the Docker context passed when building the image, while the Docker context itself will vary depending on the needs of the application being built. In the example above, the assumption is that the Docker context is the component application directory. +{{% /alert %}} + +## Next steps +- [Advanced techniques with the pluggable components Go SDK]({{% ref go-advanced %}}) +- Learn more about implementing: + - [Bindings]({{% ref go-bindings %}}) + - [State]({{% ref go-state-store %}}) + - [Pub/sub]({{% ref go-pub-sub %}}) diff --git a/sdkdocs/pluggable-components/go/content/en/go-sdk-docs/go-advanced/_index.md b/sdkdocs/pluggable-components/go/content/en/go-sdk-docs/go-advanced/_index.md new file mode 100644 index 00000000000..800827841ea --- /dev/null +++ b/sdkdocs/pluggable-components/go/content/en/go-sdk-docs/go-advanced/_index.md @@ -0,0 +1,76 @@ +--- +type: docs +title: "Advanced uses of the Dapr pluggable components .Go SDK" +linkTitle: "Advanced" +weight: 2000 +description: How to use advanced techniques with the Dapr pluggable components Go SDK +is_preview: true +--- + +While not typically needed by most, these guides show advanced ways you can configure your Go pluggable components. + +## Component lifetime + +Pluggable components are registered by passing a "factory method" that is called for each configured Dapr component of that type associated with that socket. The method returns the instance associated with that Dapr component (whether shared or not). This allows multiple Dapr components of the same type to be configured with different sets of metadata, when component operations need to be isolated from one another, etc. + +## Registering multiple services + +Each call to `Register()` binds a socket to a registered pluggable component. One of each component type (input/output binding, pub/sub, and state store) can be registered per socket. + +```go +func main() { + dapr.Register("service-a", dapr.WithStateStore(func() state.Store { + return &components.MyDatabaseStoreComponent{} + })) + + dapr.Register("service-a", dapr.WithOutputBinding(func() bindings.OutputBinding { + return &components.MyDatabaseOutputBindingComponent{} + })) + + dapr.Register("service-b", dapr.WithStateStore(func() state.Store { + return &components.MyDatabaseStoreComponent{} + })) + + dapr.MustRun() +} +``` + +In the example above, a state store and output binding is registered with the socket `service-a` while another state store is registered with the socket `service-b`. + +## Configuring Multiple Components + +Configuring Dapr to use the hosted components is the same as for any single component - the component YAML refers to the associated socket. For example, to configure Dapr state stores for the two components registered above (to sockets `service-a` and `service-b`), you create two configuration files, each referencing their respective socket. + +```yaml +# +# This component uses the state store associated with socket `service-a` +# +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: state-store-a +spec: + type: state.service-a + version: v1 + metadata: [] +``` + +```yaml +# +# This component uses the state store associated with socket `service-b` +# +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: state-store-b +spec: + type: state.service-b + version: v1 + metadata: [] +``` + +## Next steps +- Learn more about implementing: + - [Bindings]({{% ref go-bindings %}}) + - [State]({{% ref go-state-store %}}) + - [Pub/sub]({{% ref go-pub-sub %}}) diff --git a/sdkdocs/pluggable-components/go/content/en/go-sdk-docs/go-bindings/_index.md b/sdkdocs/pluggable-components/go/content/en/go-sdk-docs/go-bindings/_index.md new file mode 100644 index 00000000000..bd7b4500fdc --- /dev/null +++ b/sdkdocs/pluggable-components/go/content/en/go-sdk-docs/go-bindings/_index.md @@ -0,0 +1,131 @@ +--- +type: docs +title: "Implementing a Go input/output binding component" +linkTitle: "Bindings" +weight: 1000 +description: How to create an input/output binding with the Dapr pluggable components Go SDK +no_list: true +is_preview: true +--- + +Creating a binding component requires just a few basic steps. + +## Import bindings packages + +Create the file `components/inputbinding.go` and add `import` statements for the state store related packages. + +```go +package components + +import ( + "context" + "github.com/dapr/components-contrib/bindings" +) +``` + +## Input bindings: Implement the `InputBinding` interface + +Create a type that implements the `InputBinding` interface. + +```go +type MyInputBindingComponent struct { +} + +func (component *MyInputBindingComponent) Init(meta bindings.Metadata) error { + // Called to initialize the component with its configured metadata... +} + +func (component *MyInputBindingComponent) Read(ctx context.Context, handler bindings.Handler) error { + // Until canceled, check the underlying store for messages and deliver them to the Dapr runtime... +} +``` + +Calls to the `Read()` method are expected to set up a long-lived mechanism for retrieving messages but immediately return `nil` (or an error, if that mechanism could not be set up). The mechanism should end when canceled (for example, via the `ctx.Done() or ctx.Err() != nil`). As messages are read from the underlying store of the component, they are delivered to the Dapr runtime via the `handler` callback, which does not return until the application (served by the Dapr runtime) acknowledges processing of the message. + +```go +func (b *MyInputBindingComponent) Read(ctx context.Context, handler bindings.Handler) error { + go func() { + for { + err := ctx.Err() + + if err != nil { + return + } + + messages := // Poll for messages... + + for _, message := range messages { + handler(ctx, &bindings.ReadResponse{ + // Set the message content... + }) + } + + select { + case <-ctx.Done(): + case <-time.After(5 * time.Second): + } + } + }() + + return nil +} +``` + +## Output bindings: Implement the `OutputBinding` interface + +Create a type that implements the `OutputBinding` interface. + +```go +type MyOutputBindingComponent struct { +} + +func (component *MyOutputBindingComponent) Init(meta bindings.Metadata) error { + // Called to initialize the component with its configured metadata... +} + +func (component *MyOutputBindingComponent) Invoke(ctx context.Context, req *bindings.InvokeRequest) (*bindings.InvokeResponse, error) { + // Called to invoke a specific operation... +} + +func (component *MyOutputBindingComponent) Operations() []bindings.OperationKind { + // Called to list the operations that can be invoked. +} +``` + +## Input and output binding components + +A component can be _both_ an input _and_ output binding. Simply implement both interfaces and register the component as both binding types. + +## Register binding component + +In the main application file (for example, `main.go`), register the binding component with the application. + +```go +package main + +import ( + "example/components" + dapr "github.com/dapr-sandbox/components-go-sdk" + "github.com/dapr-sandbox/components-go-sdk/bindings/v1" +) + +func main() { + // Register an import binding... + dapr.Register("my-inputbinding", dapr.WithInputBinding(func() bindings.InputBinding { + return &components.MyInputBindingComponent{} + })) + + // Register an output binding... + dapr.Register("my-outputbinding", dapr.WithOutputBinding(func() bindings.OutputBinding { + return &components.MyOutputBindingComponent{} + })) + + dapr.MustRun() +} +``` + +## Next steps +- [Advanced techniques with the pluggable components Go SDK]({{% ref go-advanced %}}) +- Learn more about implementing: + - [State]({{% ref go-state-store %}}) + - [Pub/sub]({{% ref go-pub-sub %}}) diff --git a/sdkdocs/pluggable-components/go/content/en/go-sdk-docs/go-pub-sub/_index.md b/sdkdocs/pluggable-components/go/content/en/go-sdk-docs/go-pub-sub/_index.md new file mode 100644 index 00000000000..b805b18aae9 --- /dev/null +++ b/sdkdocs/pluggable-components/go/content/en/go-sdk-docs/go-pub-sub/_index.md @@ -0,0 +1,113 @@ +--- +type: docs +title: "Implementing a Go pub/sub component" +linkTitle: "Pub/sub" +weight: 1000 +description: How to create a pub/sub component with the Dapr pluggable components Go SDK +no_list: true +is_preview: true +--- + +Creating a pub/sub component requires just a few basic steps. + +## Import pub/sub packages + +Create the file `components/pubsub.go` and add `import` statements for the pub/sub related packages. + +```go +package components + +import ( + "context" + "github.com/dapr/components-contrib/pubsub" +) +``` + +## Implement the `PubSub` interface + +Create a type that implements the `PubSub` interface. + +```go +type MyPubSubComponent struct { +} + +func (component *MyPubSubComponent) Init(metadata pubsub.Metadata) error { + // Called to initialize the component with its configured metadata... +} + +func (component *MyPubSubComponent) Close() error { + // Not used with pluggable components... + return nil +} + +func (component *MyPubSubComponent) Features() []pubsub.Feature { + // Return a list of features supported by the component... +} + +func (component *MyPubSubComponent) Publish(req *pubsub.PublishRequest) error { + // Send the message to the "topic"... +} + +func (component *MyPubSubComponent) Subscribe(ctx context.Context, req pubsub.SubscribeRequest, handler pubsub.Handler) error { + // Until canceled, check the topic for messages and deliver them to the Dapr runtime... +} +``` + +Calls to the `Subscribe()` method are expected to set up a long-lived mechanism for retrieving messages but immediately return `nil` (or an error, if that mechanism could not be set up). The mechanism should end when canceled (for example, via the `ctx.Done()` or `ctx.Err() != nil`). The "topic" from which messages should be pulled is passed via the `req` argument, while the delivery to the Dapr runtime is performed via the `handler` callback. The callback doesn't return until the application (served by the Dapr runtime) acknowledges processing of the message. + +```go +func (component *MyPubSubComponent) Subscribe(ctx context.Context, req pubsub.SubscribeRequest, handler pubsub.Handler) error { + go func() { + for { + err := ctx.Err() + + if err != nil { + return + } + + messages := // Poll for messages... + + for _, message := range messages { + handler(ctx, &pubsub.NewMessage{ + // Set the message content... + }) + } + + select { + case <-ctx.Done(): + case <-time.After(5 * time.Second): + } + } + }() + + return nil +} +``` + +## Register pub/sub component + +In the main application file (for example, `main.go`), register the pub/sub component with the application. + +```go +package main + +import ( + "example/components" + dapr "github.com/dapr-sandbox/components-go-sdk" + "github.com/dapr-sandbox/components-go-sdk/pubsub/v1" +) + +func main() { + dapr.Register("", dapr.WithPubSub(func() pubsub.PubSub { + return &components.MyPubSubComponent{} + })) + + dapr.MustRun() +} +``` + +## Next steps +- [Advanced techniques with the pluggable components Go SDK]({{% ref go-advanced %}}) +- Learn more about implementing: + - [Bindings]({{% ref go-bindings %}}) + - [State]({{% ref go-state-store %}}) diff --git a/sdkdocs/pluggable-components/go/content/en/go-sdk-docs/go-state-store/_index.md b/sdkdocs/pluggable-components/go/content/en/go-sdk-docs/go-state-store/_index.md new file mode 100644 index 00000000000..54b66e32450 --- /dev/null +++ b/sdkdocs/pluggable-components/go/content/en/go-sdk-docs/go-state-store/_index.md @@ -0,0 +1,147 @@ +--- +type: docs +title: "Implementing a Go state store component" +linkTitle: "State Store" +weight: 1000 +description: How to create a state store with the Dapr pluggable components Go SDK +no_list: true +is_preview: true +--- + +Creating a state store component requires just a few basic steps. + +## Import state store packages + +Create the file `components/statestore.go` and add `import` statements for the state store related packages. + +```go +package components + +import ( + "context" + "github.com/dapr/components-contrib/state" +) +``` + +## Implement the `Store` interface + +Create a type that implements the `Store` interface. + +```go +type MyStateStore struct { +} + +func (store *MyStateStore) Init(metadata state.Metadata) error { + // Called to initialize the component with its configured metadata... +} + +func (store *MyStateStore) GetComponentMetadata() map[string]string { + // Not used with pluggable components... + return map[string]string{} +} + +func (store *MyStateStore) Features() []state.Feature { + // Return a list of features supported by the state store... +} + +func (store *MyStateStore) Delete(ctx context.Context, req *state.DeleteRequest) error { + // Delete the requested key from the state store... +} + +func (store *MyStateStore) Get(ctx context.Context, req *state.GetRequest) (*state.GetResponse, error) { + // Get the requested key value from the state store, else return an empty response... +} + +func (store *MyStateStore) Set(ctx context.Context, req *state.SetRequest) error { + // Set the requested key to the specified value in the state store... +} + +func (store *MyStateStore) BulkGet(ctx context.Context, req []state.GetRequest) (bool, []state.BulkGetResponse, error) { + // Get the requested key values from the state store... +} + +func (store *MyStateStore) BulkDelete(ctx context.Context, req []state.DeleteRequest) error { + // Delete the requested keys from the state store... +} + +func (store *MyStateStore) BulkSet(ctx context.Context, req []state.SetRequest) error { + // Set the requested keys to their specified values in the state store... +} +``` + +## Register state store component + +In the main application file (for example, `main.go`), register the state store with an application service. + +```go +package main + +import ( + "example/components" + dapr "github.com/dapr-sandbox/components-go-sdk" + "github.com/dapr-sandbox/components-go-sdk/state/v1" +) + +func main() { + dapr.Register("", dapr.WithStateStore(func() state.Store { + return &components.MyStateStoreComponent{} + })) + + dapr.MustRun() +} +``` + +## Bulk state stores + +While state stores are required to support the [bulk operations]({{% ref "state-management-overview.md#bulk-read-operations" %}}), their implementations sequentially delegate to the individual operation methods. + +## Transactional state stores + +State stores that intend to support transactions should implement the optional `TransactionalStore` interface. Its `Multi()` method receives a request with a sequence of `delete` and/or `set` operations to be performed within a transaction. The state store should iterate over the sequence and apply each operation. + +```go +func (store *MyStateStoreComponent) Multi(ctx context.Context, request *state.TransactionalStateRequest) error { + // Start transaction... + + for _, operation := range request.Operations { + switch operation.Operation { + case state.Delete: + deleteRequest := operation.Request.(state.DeleteRequest) + // Process delete request... + case state.Upsert: + setRequest := operation.Request.(state.SetRequest) + // Process set request... + } + } + + // End (or rollback) transaction... + + return nil +} +``` + +## Queryable state stores + +State stores that intend to support queries should implement the optional `Querier` interface. Its `Query()` method is passed details about the query, such as the filter(s), result limits, pagination, and sort order(s) of the results. The state store uses those details to generate a set of values to return as part of its response. + +```go +func (store *MyStateStoreComponent) Query(ctx context.Context, req *state.QueryRequest) (*state.QueryResponse, error) { + // Generate and return results... +} +``` + +## ETag and other semantic error handling + +The Dapr runtime has additional handling of certain error conditions resulting from some state store operations. State stores can indicate such conditions by returning specific errors from its operation logic: + +| Error | Applicable Operations | Description +|---|---|---| +| `NewETagError(state.ETagInvalid, ...)` | Delete, Set, Bulk Delete, Bulk Set | When an ETag is invalid | +| `NewETagError(state.ETagMismatch, ...)`| Delete, Set, Bulk Delete, Bulk Set | When an ETag does not match an expected value | +| `NewBulkDeleteRowMismatchError(...)` | Bulk Delete | When the number of affected rows does not match the expected rows | + +## Next steps +- [Advanced techniques with the pluggable components Go SDK]({{% ref go-advanced %}}) +- Learn more about implementing: + - [Bindings]({{% ref go-bindings %}}) + - [Pub/sub]({{% ref go-pub-sub %}}) diff --git a/sdkdocs/python b/sdkdocs/python deleted file mode 160000 index 5882d52961c..00000000000 --- a/sdkdocs/python +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 5882d52961cee7cb50d07c9a47c902f317dff396 diff --git a/sdkdocs/python/README.md b/sdkdocs/python/README.md new file mode 100644 index 00000000000..5213ae2140f --- /dev/null +++ b/sdkdocs/python/README.md @@ -0,0 +1,25 @@ +# Dapr Python SDK documentation + +This page covers how the documentation is structured for the Dapr Python SDK. + +## Dapr Docs + +All Dapr documentation is hosted at [docs.dapr.io](https://docs.dapr.io), including the docs for the [Python SDK](https://docs.dapr.io/developing-applications/sdks/python/). Head over there if you want to read the docs. + +### Python SDK docs source + +Although the docs site code and content is in the [docs repo](https://github.com/dapr/docs), the Python SDK content and images are within the `content` and `static` directories, respectively. + +This allows separation of roles and expertise between maintainers, and makes it easy to find the docs files you are looking for. + +## Writing Python SDK docs + +To get up and running to write Python SDK docs, visit the [docs repo](https://github.com/dapr/docs) to initialize your environment. It will clone both the docs repo and this repo, so you can make changes and see it rendered within the site instantly, as well as commit and PR into this repo. + +Make sure to read the [docs contributing guide](https://docs.dapr.io/contributing/contributing-docs/) for information on style/semantics/etc. + +## Docs architecture + +The docs site is built on [Hugo](https://gohugo.io), which lives in the docs repo. This repo is setup as a git submodule so that when the repo is cloned and initialized, the python repo, along with the docs, are cloned as well. + +Then, in the Hugo configuration file, the `daprdocs/content` and `daprdocs/static` directories are redirected to the `daprdocs/developing-applications/sdks/python` and `static/python` directories, respectively. Thus, all the content within this repo is folded into the main docs site. \ No newline at end of file diff --git a/sdkdocs/python/content/en/python-sdk-contributing/python-contributing.md b/sdkdocs/python/content/en/python-sdk-contributing/python-contributing.md new file mode 100644 index 00000000000..fe22c10bdc9 --- /dev/null +++ b/sdkdocs/python/content/en/python-sdk-contributing/python-contributing.md @@ -0,0 +1,27 @@ +--- +type: docs +title: "Contributing to the Python SDK" +linkTitle: "Python SDK" +weight: 3000 +description: Guidelines for contributing to the Dapr Python SDK +--- + +When contributing to the [Python SDK](https://github.com/dapr/python-sdk) the following rules and best-practices should be followed. + +## Examples + +The `examples` directory contains code samples for users to run to try out specific functionality of the various Python SDK packages and extensions. When writing new and updated samples keep in mind: + +- All examples should be runnable on Windows, Linux, and MacOS. While Python code is consistent among operating systems, any pre/post example commands should provide options through [tabpane]({{% ref "contributing-docs.md#tabbed-content" %}}) +- Contain steps to download/install any required pre-requisites. Someone coming in with a fresh OS install should be able to start on the example and complete it without an error. Links to external download pages are fine. + +## Docs + +The `daprdocs` directory contains the markdown files that are rendered into the [Dapr Docs](https://docs.dapr.io) website. When the documentation website is built this repo is cloned and configured so that its contents are rendered with the docs content. When writing docs keep in mind: + + - All rules in the [docs guide]({{% ref contributing-docs.md %}}) should be followed in addition to these. + - All files and directories should be prefixed with `python-` to ensure all file/directory names are globally unique across all Dapr documentation. + +## Github Dapr Bot Commands + +Checkout the [daprbot documentation](https://docs.dapr.io/contributing/daprbot/) for Github commands you can run in this repo for common tasks. For example, you can run the `/assign` (as a comment on an issue) to assign issues to a user or group of users. \ No newline at end of file diff --git a/sdkdocs/python/content/en/python-sdk-docs/_index.md b/sdkdocs/python/content/en/python-sdk-docs/_index.md new file mode 100644 index 00000000000..b8689eb99f9 --- /dev/null +++ b/sdkdocs/python/content/en/python-sdk-docs/_index.md @@ -0,0 +1,157 @@ +--- +type: docs +title: "Dapr Python SDK" +linkTitle: "Python" +weight: 1000 +description: Python SDK packages for developing Dapr applications +no_list: true +cascade: + github_repo: https://github.com/dapr/python-sdk + github_subdir: daprdocs/content/en/python-sdk-docs + path_base_for_github_subdir: content/en/developing-applications/sdks/python/ + github_branch: master +--- + +Dapr offers a variety of subpackages to help with the development of Python applications. Using them you can create Python clients, servers, and virtual actors with Dapr. + +## Prerequisites + +- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed +- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) +- [Python 3.9+](https://www.python.org/downloads/) installed + +## Installation + +To get started with the Python SDK, install the main Dapr Python SDK package. + +{{< tabpane text=true >}} + +{{% tab header="Stable" %}} + +```bash +pip install dapr +``` +{{% /tab %}} + +{{% tab header="Development" %}} + +> **Note:** The development package will contain features and behavior that will be compatible with the pre-release version of the Dapr runtime. Make sure to uninstall any stable versions of the Python SDK before installing the dapr-dev package. + +```bash +pip install dapr-dev +``` + +{{% /tab %}} + +{{< /tabpane >}} + + +## Available subpackages + +### SDK imports + +Python SDK imports are subpackages included with the main SDK install, but need to be imported when used. The most common imports provided by the Dapr Python SDK are: + +
+
+
+
Client
+

Write Python applications to interact with a Dapr sidecar and other Dapr applications, including stateful virtual actors in Python

+ +
+
+
+
+
Actors
+

Create and interact with Dapr's Actor framework.

+ +
+
+
+
+
Conversation
+

Use the Dapr Conversation API (Alpha) for LLM interactions, tools, and multi-turn flows.

+ +
+
+
+ +Learn more about _all_ of the [available Dapr Python SDK imports](https://github.com/dapr/python-sdk/tree/master/dapr). + +### SDK extensions + +SDK extensions mainly work as utilities for receiving pub/sub events, programatically creating pub/sub subscriptions, and handling input binding events. While you can acheive all of these tasks without an extension, using a Python SDK extension proves convenient. + +
+
+
+
gRPC
+

Create Dapr services with the gRPC server extension.

+ +
+
+
+
+
FastAPI
+

Integrate with Dapr Python virtual actors and pub/sub using the Dapr FastAPI extension.

+ +
+
+
+
+
Flask
+

Integrate with Dapr Python virtual actors using the Dapr Flask extension.

+ +
+
+
+
+
Workflow
+

Author workflows that work with other Dapr APIs in Python.

+ +
+
+
+ +Learn more about [the Dapr Python SDK extensions](https://github.com/dapr/python-sdk/tree/master/ext). + +## Try it out + +Clone the Python SDK repo. + +```bash +git clone https://github.com/dapr/python-sdk.git +``` + +Walk through the Python quickstarts, tutorials, and examples to see Dapr in action: + +| SDK samples | Description | +| ----------- | ----------- | +| [Quickstarts]({{% ref quickstarts %}}) | Experience Dapr's API building blocks in just a few minutes using the Python SDK. | +| [SDK samples](https://github.com/dapr/python-sdk/tree/master/examples) | Clone the SDK repo to try out some examples and get started. | +| [Bindings tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/bindings) | See how Dapr Python SDK works alongside other Dapr SDKs to enable bindings. | +| [Distributed Calculator tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/distributed-calculator/python) | Use the Dapr Python SDK to handle method invocation and state persistent capabilities. | +| [Hello World tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/hello-world) | Learn how to get Dapr up and running locally on your machine with the Python SDK. | +| [Hello Kubernetes tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/hello-kubernetes) | Get up and running with the Dapr Python SDK in a Kubernetes cluster. | +| [Observability tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/observability) | Explore Dapr's metric collection, tracing, logging and health check capabilities using the Python SDK. | +| [Pub/sub tutorial](https://github.com/dapr/quickstarts/tree/master/tutorials/pub-sub) | See how Dapr Python SDK works alongside other Dapr SDKs to enable pub/sub applications. | + + +## More information + +
+
+
+
Serialization
+

Learn more about serialization in Dapr SDKs.

+ +
+
+
+
+
PyPI
+

Python Package Index

+ +
+
+
diff --git a/sdkdocs/python/content/en/python-sdk-docs/conversation.md b/sdkdocs/python/content/en/python-sdk-docs/conversation.md new file mode 100644 index 00000000000..db67a6c495e --- /dev/null +++ b/sdkdocs/python/content/en/python-sdk-docs/conversation.md @@ -0,0 +1,295 @@ +title: "Conversation API (Python) – Recommended Usage" +linkTitle: "Conversation" +weight: 11000 +type: docs +description: Recommended patterns for using Dapr Conversation API in Python with and without tools, including multi‑turn flows and safety guidance. +--- + +The Dapr Conversation API is currently in alpha. This page presents the recommended, minimal patterns to use it effectively with the Python SDK: +- Plain requests (no tools) +- Requests with tools (functions as tools) +- Multi‑turn flows with tool execution +- Async variants +- Important safety notes for executing tool calls + +## Prerequisites + +- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed +- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) +- [Python 3.9+](https://www.python.org/downloads/) installed +- [Dapr Python package]({{% ref "python#installation" %}}) installed +- A configured LLM component (for example, OpenAI or Azure OpenAI) in your Dapr environment + +For full, end‑to‑end flows and provider setup, see: +- The SDK examples under Conversation: + - [TOOL-CALL-QUICKSTART.md](https://github.com/dapr/python-sdk/blob/main/examples/conversation/TOOL-CALL-QUICKSTART.md) + - [real_llm_providers_example.py](https://github.com/dapr/python-sdk/blob/main/examples/conversation/real_llm_providers_example.py) + +## Plain conversation (no tools) + +```python +from dapr.clients import DaprClient +from dapr.clients.grpc import conversation + +# Build a single‑turn Alpha2 input +user_msg = conversation.create_user_message("What's Dapr?") +alpha2_input = conversation.ConversationInputAlpha2(messages=[user_msg]) + +with DaprClient() as client: + resp = client.converse_alpha2( + name="echo", # replace with your LLM component name + inputs=[alpha2_input], + temperature=1, + ) + + for msg in resp.to_assistant_messages(): + if msg.of_assistant.content: + print(msg.of_assistant.content[0].text) +``` + +Key points: +- Use `conversation.create_user_message` to build messages. +- Wrap into `ConversationInputAlpha2(messages=[...])` and pass to `converse_alpha2`. +- Use `response.to_assistant_messages()` to iterate assistant outputs. + +## Tools: decorator‑based (recommended) + +Decorator-based tools offer a clean, ergonomic approach. Define a function with clear type hints and detail docstring, this is important for the LLM to understand how or when to invoke the tool; +decorate it with `@conversation.tool`. Registered tools can be passed to the LLM and invoked via tool calls. + +```python +from dapr.clients import DaprClient +from dapr.clients.grpc import conversation + +@conversation.tool +def get_weather(location: str, unit: str = 'fahrenheit') -> str: + """Get current weather for a location.""" + # Replace with a real implementation + return f"Weather in {location} (unit={unit})" + +user_msg = conversation.create_user_message("What's the weather in Paris?") +alpha2_input = conversation.ConversationInputAlpha2(messages=[user_msg]) + +with DaprClient() as client: + response = client.converse_alpha2( + name="openai", # your LLM component + inputs=[alpha2_input], + tools=conversation.get_registered_tools(), # tools registered by @conversation.tool + tool_choice='auto', + temperature=1, + ) + + # Inspect assistant messages, including any tool calls + for msg in response.to_assistant_messages(): + if msg.of_assistant.tool_calls: + for tc in msg.of_assistant.tool_calls: + print(f"Tool call: {tc.function.name} args={tc.function.arguments}") + elif msg.of_assistant.content: + print(msg.of_assistant.content[0].text) +``` + +Notes: +- Use `conversation.get_registered_tools()` to collect all `@conversation.tool` decorated functions. +- The binder validates/coerces params using your function signature. Keep annotations accurate. + +## Minimal multi‑turn with tools + +This is the go‑to loop for tool‑using conversations: + +{{% alert title="Warning" color="warning" %}} +Do not blindly auto‑execute tool calls returned by the LLM unless you trust all tools registered. Treat tool names and arguments as untrusted input. +- Validate inputs and enforce guardrails (allow‑listed tools, argument schemas, side‑effect constraints). +- For async or I/O‑bound tools, prefer `conversation.execute_registered_tool_async(..., timeout=...)` and set conservative timeouts. +- Consider adding a policy layer or a user confirmation step before execution in sensitive contexts. +- Log and monitor tool usage; fail closed when validation fails. +{{% /alert %}} + +```python +from dapr.clients import DaprClient +from dapr.clients.grpc import conversation + +@conversation.tool +def get_weather(location: str, unit: str = 'fahrenheit') -> str: + return f"Weather in {location} (unit={unit})" + +history: list[conversation.ConversationMessage] = [ + conversation.create_user_message("What's the weather in San Francisco?")] + +with DaprClient() as client: + # Turn 1 + resp1 = client.converse_alpha2( + name="openai", + inputs=[conversation.ConversationInputAlpha2(messages=history)], + tools=conversation.get_registered_tools(), + tool_choice='auto', + temperature=1, + ) + + # Append assistant messages; execute tool calls; append tool results + for msg in resp1.to_assistant_messages(): + history.append(msg) + for tc in msg.of_assistant.tool_calls: + # IMPORTANT: validate inputs and enforce guardrails in production + tool_output = conversation.execute_registered_tool( + tc.function.name, tc.function.arguments + ) + history.append( + conversation.create_tool_message( + tool_id=tc.id, name=tc.function.name, content=str(tool_output) + ) + ) + + # Turn 2 (LLM sees tool result) + history.append(conversation.create_user_message("Should I bring an umbrella?")) + resp2 = client.converse_alpha2( + name="openai", + inputs=[conversation.ConversationInputAlpha2(messages=history)], + tools=conversation.get_registered_tools(), + temperature=1, + ) + + for msg in resp2.to_assistant_messages(): + history.append(msg) + if not msg.of_assistant.tool_calls and msg.of_assistant.content: + print(msg.of_assistant.content[0].text) +``` + +Tips: +- Always append assistant messages to history. +- Execute each tool call (with validation) and append a tool message with the tool output. +- The next turn includes these tool results so the LLM can reason with them. + +## Functions as tools: alternatives + +When decorators aren’t practical, two options exist. + +A) Automatic schema from a typed function: + +```python +from enum import Enum +from dapr.clients.grpc import conversation + +class Units(Enum): + CELSIUS = 'celsius' + FAHRENHEIT = 'fahrenheit' + +def get_weather(location: str, unit: Units = Units.FAHRENHEIT) -> str: + return f"Weather in {location}" + +fn = conversation.ConversationToolsFunction.from_function(get_weather) +weather_tool = conversation.ConversationTools(function=fn) +``` + +B) Manual JSON Schema (fallback): + +```python +from dapr.clients.grpc import conversation + +fn = conversation.ConversationToolsFunction( + name='get_weather', + description='Get current weather', + parameters={ + 'type': 'object', + 'properties': { + 'location': {'type': 'string'}, + 'unit': {'type': 'string', 'enum': ['celsius', 'fahrenheit']}, + }, + 'required': ['location'], + }, +) +weather_tool = conversation.ConversationTools(function=fn) +``` + +## Async variant + +Use the asynchronous client and async tool execution helpers as needed. + +```python +import asyncio +from dapr.aio.clients import DaprClient as AsyncDaprClient +from dapr.clients.grpc import conversation + +@conversation.tool +def get_time() -> str: + return '2025-01-01T12:00:00Z' + +async def main(): + async with AsyncDaprClient() as client: + msg = conversation.create_user_message('What time is it?') + inp = conversation.ConversationInputAlpha2(messages=[msg]) + resp = await client.converse_alpha2( + name='openai', inputs=[inp], tools=conversation.get_registered_tools() + ) + for m in resp.to_assistant_messages(): + if m.of_assistant.content: + print(m.of_assistant.content[0].text) + +asyncio.run(main()) +``` + +If you need to execute tools asynchronously (e.g., network I/O), implement async functions and use `conversation.execute_registered_tool_async` with timeouts. + +## Safety and validation (must‑read) + +An LLM may suggest tool calls. Treat all model‑provided parameters as untrusted input. + +Recommendations: +- Register only trusted functions as tools. Prefer the `@conversation.tool` decorator for clarity and automatic schema generation. +- Use precise type annotations and docstrings. The SDK converts function signatures to JSON schema and binds parameters with type coercion and rejection of unexpected/invalid fields. +- Add guardrails for tools that can cause side effects (filesystem, network, subprocess). Consider allow‑lists, sandboxing, and limits. +- Validate arguments before execution. For example, sanitize file paths or restrict URLs/domains. +- Consider timeouts and concurrency controls. For async tools, pass a timeout to `execute_registered_tool_async(..., timeout=...)`. +- Log and monitor tool usage. Fail closed: if validation fails, avoid executing the tool and inform the user safely. + +See also inline notes in `dapr/clients/grpc/conversation.py` (e.g., `tool()`, `ConversationTools`, `execute_registered_tool`) for parameter binding and error handling details. + + +## Key helper methods (quick reference) + +This section summarizes helper utilities available in dapr.clients.grpc.conversation used throughout the examples. + +- create_user_message(text: str) -> ConversationMessage + - Builds a user role message for Alpha2. Use in history lists. + - Example: `history.append(conversation.create_user_message("Hello"))` + +- create_system_message(text: str) -> ConversationMessage + - Builds a system message to steer the assistant’s behavior. + - Example: `history = [conversation.create_system_message("You are a concise assistant.")]` + +- create_assistant_message(text: str) -> ConversationMessage + - Useful for injecting assistant text in tests or controlled flows. + +- create_tool_message(tool_id: str, name: str, content: Any) -> ConversationMessage + - Converts a tool’s output into a tool message the LLM can read next turn. + - content can be any object; it is stringified safely by the SDK. + - Example: `history.append(conversation.create_tool_message(tool_id=tc.id, name=tc.function.name, content=conversation.execute_registered_tool(tc.function.name, tc.function.arguments)))` + +- get_registered_tools() -> list[ConversationTools] + - Returns all tools currently registered in the in-process registry. + - Includes tools created via: + - @conversation.tool decorator (auto-registered by default), and + - ConversationToolsFunction.from_function with register=True (default). + - Pass this list in converse_alpha2(..., tools=...). + +- register_tool(name: str, t: ConversationTools) / unregister_tool(name: str) + - Manually manage the tool registry (e.g., advanced scenarios, tests, cleanup). + - Names must be unique; unregister to avoid collisions in long-lived processes. + +- execute_registered_tool(name: str, params: Mapping|Sequence|str|None) -> Any + - Synchronously executes a registered tool by name. + - params accepts kwargs (mapping), args (sequence), JSON string, or None. If a JSON string is provided (as commonly returned by LLMs), it is parsed for you. + - Parameters are validated and coerced against the function signature/schema; unexpected or invalid fields raise errors. + - Security: treat params as untrusted; add guardrails for side effects. + +- execute_registered_tool_async(name: str, params: Mapping|Sequence|str|None, *, timeout: float|None=None) -> Any + - Async counterpart. Supports timeouts, which are recommended for I/O-bound tools. + - Prefer this for async tools or when using the aio client. + +- ConversationToolsFunction.from_function(func: Callable, register: bool = True) -> ConversationToolsFunction + - Derives a JSON schema from a typed Python function (annotations + optional docstring) and optionally registers a tool. + - Typical usage: `spec = conversation.ConversationToolsFunction.from_function(my_func)`; then either rely on auto-registration or wrap with `ConversationTools(function=spec)` and call `register_tool(spec.name, tool)` or pass `[tool]` directly to `tools=`. + +- ConversationResponseAlpha2.to_assistant_messages() -> list[ConversationMessage] + - Convenience to transform the response outputs into assistant ConversationMessage objects you can append to history directly (including tool_calls when present). + +Tip: The @conversation.tool decorator is the easiest way to create a tool. It auto-generates the schema from your function, allows an optional namespace/name override, and auto-registers the tool (you can set register=False to defer registration). diff --git a/sdkdocs/python/content/en/python-sdk-docs/python-actor.md b/sdkdocs/python/content/en/python-sdk-docs/python-actor.md new file mode 100644 index 00000000000..22360afd056 --- /dev/null +++ b/sdkdocs/python/content/en/python-sdk-docs/python-actor.md @@ -0,0 +1,130 @@ +--- +type: docs +title: "Getting started with the Dapr actor Python SDK" +linkTitle: "Actor" +weight: 20000 +description: How to get up and running with the Dapr Python SDK +--- + +The Dapr actor package allows you to interact with Dapr virtual actors from a Python application. + +## Pre-requisites + +- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed +- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) +- [Python 3.9+](https://www.python.org/downloads/) installed +- [Dapr Python package]({{% ref "python#installation" %}}) installed + +## Actor interface + +The interface defines the actor contract that is shared between the actor implementation and the clients calling the actor. Because a client may depend on it, it typically makes sense to define it in an assembly that is separate from the actor implementation. + +```python +from dapr.actor import ActorInterface, actormethod + +class DemoActorInterface(ActorInterface): + @actormethod(name="GetMyData") + async def get_my_data(self) -> object: + ... +``` + +## Actor services + +An actor service hosts the virtual actor. It is implemented a class that derives from the base type `Actor` and implements the interfaces defined in the actor interface. + +Actors can be created using one of the Dapr actor extensions: + - [FastAPI actor extension]({{% ref python-fastapi.md %}}) + - [Flask actor extension]({{% ref python-flask.md %}}) + +## Actor client + +An actor client contains the implementation of the actor client which calls the actor methods defined in the actor interface. + +```python +import asyncio + +from dapr.actor import ActorProxy, ActorId +from demo_actor_interface import DemoActorInterface + +async def main(): + # Create proxy client + proxy = ActorProxy.create('DemoActor', ActorId('1'), DemoActorInterface) + + # Call method on client + resp = await proxy.GetMyData() +``` + +## Sample + +Visit [this page](https://github.com/dapr/python-sdk/tree/main/examples/demo_actor) for a runnable actor sample. + + +## Mock Actor Testing + +The Dapr Python SDK provides the ability to create mock actors to unit test your actor methods and see how they interact with the actor state. + +### Sample Usage + + +``` +from dapr.actor.runtime.mock_actor import create_mock_actor + +class MyActor(Actor, MyActorInterface): + async def save_state(self, data) -> None: + await self._state_manager.set_state('mystate', data) + await self._state_manager.save_state() + +mock_actor = create_mock_actor(MyActor, "id") + +await mock_actor.save_state(5) +assert mockactor._state_manager._mock_state['mystate'] == 5 #True +``` +Mock actors are created by passing your actor class and an actor ID (a string) to the create_mock_actor function. This function returns an instance of the actor with many internal methods overridden. Instead of interacting with Dapr for tasks like saving state or managing timers, the mock actor uses in-memory state to simulate these behaviors. + +This state can be accessed through the following variables: + +**IMPORTANT NOTE: Due to type hinting issues as discussed further down, these variables will not be visible to type hinters/linters/etc, who will think they are invalid variables. You will need to use them with #type: ignore in order to satisfy any such systems.** + +- **_state_manager._mock_state()** +A `[str, object]` dict where all the actor state is stored. Any variable saved via `_state_manager.save_state(key, value)`, or any other statemanager method is stored in the dict as that key, value pair. Any value loaded via `try_get_state` or any other statemanager method is taken from this dict. + +- **_state_manager._mock_timers()** +A `[str, ActorTimerData]` dict which holds the active actor timers. Any actor method which would add or remove a timer adds or pops the appropriate `ActorTimerData` object from this dict. + +- **_state_manager._mock_reminders()** +A [str, ActorReminderData] dict which holds the active actor reminders. Any actor method which would add or remove a timer adds or pops the appropriate ActorReminderData object from this dict. + +**Note: The timers and reminders will never actually trigger. The dictionaries exist only so methods that should add or remove timers/reminders can be tested. If you need to test the callbacks they should activate, you should call them directly with the appropriate values:** +``` +result = await mock_actor.recieve_reminder(name, state, due_time, period, _ttl) +# Test the result directly or test for side effects (like changing state) by querying `_state_manager._mock_state` +``` + +### Usage and Limitations + +**To allow for more fine-grained control, the `_on_activate` method will not be called automatically the way it is when Dapr initializes a new Actor instance. You should call it manually as needed as part of your tests.** + +**A current limitation of the mock actor system is that it does not call the `_on_pre_actor_method` and `_on_post_actor_method` methods. You can always call these methods manually as part of a test.** + +The `__init__`, `register_timer`, `unregister_timer`, `register_reminder`, `unregister_reminder` methods are all overwritten by the MockActor class that gets applied as a mixin via `create_mock_actor`. If your actor itself overwrites these methods, those modifications will themselves be overwritten and the actor will likely not behave as you expect. + +*note: `__init__` is a special case where you are expected to define it as* +``` + def __init__(self, ctx, actor_id): + super().__init__(ctx, actor_id) +``` +*Mock actors work fine with this, but if you have added any extra logic into `__init__`, it will be overwritten. It is worth noting that the correct way to apply logic on initialization is via `_on_activate` (which can also be safely used with mock actors) instead of `__init__`.* + +*If you have an actor which does override default Dapr actor methods, you can create a custom subclass of the MockActor class (from MockActor.py) which implements whatever custom logic you have along with interacting with `_mock_state`, `_mock_timers`, and `_mock_reminders` as normal, and then applying that custom class as a mixin via a `create_mock_actor` function you define yourself.* + +The actor `_runtime_ctx` variable is set to None. All the normal actor methods have been overwritten such as to not call it, but if your code itself interacts directly with `_runtime_ctx`, tests may fail. + +The actor _state_manager is overwritten with an instance of `MockStateManager`. This has all the same methods and functionality of the base `ActorStateManager`, except for using the various `_mock` variables for storing data instead of the `_runtime_ctx`. If your code implements its own custom state manager it will be overwritten and tests will likely fail. + +### Type Hinting + +Because of Python's lack of a unified method for type hinting type intersections (see: [python/typing #213](https://github.com/python/typing/issues/213)), type hinting unfortunately doesn't work with Mock Actors. The return type is type hinted as "instance of Actor subclass T" when it should really be type hinted as "instance of MockActor subclass T" or "instance of type intersection `[Actor subclass T, MockActor]`" (where, it is worth noting, `MockActor` is itself a subclass of `Actor`). + +This means that, for example, if you hover over `mockactor._state_manager` in a code editor, it will come up as an instance of ActorStateManager (instead of MockStateManager), and various IDE helper functions (like VSCode's `Go to Definition`, which will bring you to the definition of ActorStateManager instead of MockStateManager) won't work properly. + +For now, this issue is unfixable, so it's merely something to be noted because of the confusion it might cause. If in the future it becomes possible to accurately type hint cases like this feel free to open an issue about implementing it. \ No newline at end of file diff --git a/sdkdocs/python/content/en/python-sdk-docs/python-client.md b/sdkdocs/python/content/en/python-sdk-docs/python-client.md new file mode 100644 index 00000000000..f03a6a74cd1 --- /dev/null +++ b/sdkdocs/python/content/en/python-sdk-docs/python-client.md @@ -0,0 +1,601 @@ +--- +type: docs +title: "Getting started with the Dapr client Python SDK" +linkTitle: "Client" +weight: 10000 +description: How to get up and running with the Dapr Python SDK +--- + +The Dapr client package allows you to interact with other Dapr applications from a Python application. + +{{% alert title="Note" color="primary" %}} + If you haven't already, [try out one of the quickstarts]({{% ref quickstarts %}}) for a quick walk-through on how to use the Dapr Python SDK with an API building block. + +{{% /alert %}} + +## Prerequisites + +[Install the Dapr Python package]({{% ref "python#installation" %}}) before getting started. + +## Import the client package + +The `dapr` package contains the `DaprClient`, which is used to create and use a client. + +```python +from dapr.clients import DaprClient +``` + +## Initialising the client +You can initialise a Dapr client in multiple ways: + +#### Default values: +When you initialise the client without any parameters it will use the default values for a Dapr +sidecar instance (`127.0.0.1:50001`). +```python +from dapr.clients import DaprClient + +with DaprClient() as d: + # use the client +``` + +#### Specifying an endpoint on initialisation: +When passed as an argument in the constructor, the gRPC endpoint takes precedence over any +configuration or environment variable. + +```python +from dapr.clients import DaprClient + +with DaprClient("mydomain:50051?tls=true") as d: + # use the client +``` + +#### Configuration options: + +##### Dapr Sidecar Endpoints +You can use the standardised `DAPR_GRPC_ENDPOINT` environment variable to +specify the gRPC endpoint. When this variable is set, the client can be initialised +without any arguments: + +```bash +export DAPR_GRPC_ENDPOINT="mydomain:50051?tls=true" +``` +```python +from dapr.clients import DaprClient + +with DaprClient() as d: + # the client will use the endpoint specified in the environment variables +``` + +The legacy environment variables `DAPR_RUNTIME_HOST`, `DAPR_HTTP_PORT` and `DAPR_GRPC_PORT` are +also supported, but `DAPR_GRPC_ENDPOINT` takes precedence. + +##### Dapr API Token +If your Dapr instance is configured to require the `DAPR_API_TOKEN` environment variable, you can +set it in the environment and the client will use it automatically. +You can read more about Dapr API token authentication [here](https://docs.dapr.io/operations/security/api-token/). + +##### Health timeout +On client initialisation, a health check is performed against the Dapr sidecar (`/healthz/outbound`). +The client will wait for the sidecar to be up and running before proceeding. + +The default healthcheck timeout is 60 seconds, but it can be overridden by setting the `DAPR_HEALTH_TIMEOUT` +environment variable. + +##### Retries and timeout + +The Dapr client can retry a request if a specific error code is received from the sidecar. This is +configurable through the `DAPR_API_MAX_RETRIES` environment variable and is picked up automatically, +not requiring any code changes. +The default value for `DAPR_API_MAX_RETRIES` is `0`, which means no retries will be made. + +You can fine-tune more retry parameters by creating a `dapr.clients.retry.RetryPolicy` object and +passing it to the DaprClient constructor: + +```python +from dapr.clients.retry import RetryPolicy + +retry = RetryPolicy( + max_attempts=5, + initial_backoff=1, + max_backoff=20, + backoff_multiplier=1.5, + retryable_http_status_codes=[408, 429, 500, 502, 503, 504], + retryable_grpc_status_codes=[StatusCode.UNAVAILABLE, StatusCode.DEADLINE_EXCEEDED, ] +) + +with DaprClient(retry_policy=retry) as d: + ... +``` + +or for actors: +```python +factory = ActorProxyFactory(retry_policy=RetryPolicy(max_attempts=3)) +proxy = ActorProxy.create('DemoActor', ActorId('1'), DemoActorInterface, factory) +``` + +**Timeout** can be set for all calls through the environment variable `DAPR_API_TIMEOUT_SECONDS`. The default value is 60 seconds. + +> Note: You can control timeouts on service invocation separately, by passing a `timeout` parameter to the `invoke_method` method. + +## Error handling +Initially, errors in Dapr followed the [Standard gRPC error model](https://grpc.io/docs/guides/error/#standard-error-model). However, to provide more detailed and informative error messages, in version 1.13 an enhanced error model has been introduced which aligns with the gRPC [Richer error model](https://grpc.io/docs/guides/error/#richer-error-model). In response, the Python SDK implemented `DaprGrpcError`, a custom exception class designed to improve the developer experience. +It's important to note that the transition to using `DaprGrpcError` for all gRPC status exceptions is a work in progress. As of now, not every API call in the SDK has been updated to leverage this custom exception. We are actively working on this enhancement and welcome contributions from the community. + +Example of handling `DaprGrpcError` exceptions when using the Dapr python-SDK: + +```python +try: + d.save_state(store_name=storeName, key=key, value=value) +except DaprGrpcError as err: + print(f'Status code: {err.code()}') + print(f"Message: {err.message()}") + print(f"Error code: {err.error_code()}") + print(f"Error info(reason): {err.error_info.reason}") + print(f"Resource info (resource type): {err.resource_info.resource_type}") + print(f"Resource info (resource name): {err.resource_info.resource_name}") + print(f"Bad request (field): {err.bad_request.field_violations[0].field}") + print(f"Bad request (description): {err.bad_request.field_violations[0].description}") +``` + + +## Building blocks + +The Python SDK allows you to interface with all of the [Dapr building blocks]({{% ref building-blocks %}}). + +### Invoke a service + +The Dapr Python SDK provides a simple API for invoking services via either HTTP or gRPC (deprecated). The protocol can be selected by setting the `DAPR_API_METHOD_INVOCATION_PROTOCOL` environment variable, defaulting to HTTP when unset. GRPC service invocation in Dapr is deprecated and GRPC proxying is recommended as an alternative. + +```python +from dapr.clients import DaprClient + +with DaprClient() as d: + # invoke a method (gRPC or HTTP GET) + resp = d.invoke_method('service-to-invoke', 'method-to-invoke', data='{"message":"Hello World"}') + + # for other HTTP verbs the verb must be specified + # invoke a 'POST' method (HTTP only) + resp = d.invoke_method('service-to-invoke', 'method-to-invoke', data='{"id":"100", "FirstName":"Value", "LastName":"Value"}', http_verb='post') +``` + +The base endpoint for HTTP api calls is specified in the `DAPR_HTTP_ENDPOINT` environment variable. +If this variable is not set, the endpoint value is derived from the `DAPR_RUNTIME_HOST` and `DAPR_HTTP_PORT` variables, whose default values are `127.0.0.1` and `3500` accordingly. + +The base endpoint for gRPC calls is the one used for the client initialisation ([explained above](#initialising-the-client)). + + +- For a full guide on service invocation visit [How-To: Invoke a service]({{% ref howto-invoke-discover-services.md %}}). +- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/invoke-simple) for code samples and instructions to try out service invocation. + +### Save & get application state + +```python +from dapr.clients import DaprClient + +with DaprClient() as d: + # Save state + d.save_state(store_name="statestore", key="key1", value="value1") + + # Get state + data = d.get_state(store_name="statestore", key="key1").data + + # Delete state + d.delete_state(store_name="statestore", key="key1") +``` + +- For a full list of state operations visit [How-To: Get & save state]({{% ref howto-get-save-state.md %}}). +- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/state_store) for code samples and instructions to try out state management. + +### Query application state (Alpha) + +```python + from dapr import DaprClient + + query = ''' + { + "filter": { + "EQ": { "state": "CA" } + }, + "sort": [ + { + "key": "person.id", + "order": "DESC" + } + ] + } + ''' + + with DaprClient() as d: + resp = d.query_state( + store_name='state_store', + query=query, + states_metadata={"metakey": "metavalue"}, # optional + ) +``` + +- For a full list of state store query options visit [How-To: Query state]({{% ref howto-state-query-api.md %}}). +- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/state_store_query) for code samples and instructions to try out state store querying. + +### Publish & subscribe + +#### Publish messages + +```python +from dapr.clients import DaprClient + +with DaprClient() as d: + resp = d.publish_event(pubsub_name='pubsub', topic_name='TOPIC_A', data='{"message":"Hello World"}') +``` + + +Send [CloudEvents](https://cloudevents.io/) messages with a json payload: +```python +from dapr.clients import DaprClient +import json + +with DaprClient() as d: + cloud_event = { + 'specversion': '1.0', + 'type': 'com.example.event', + 'source': 'my-service', + 'id': 'myid', + 'data': {'id': 1, 'message': 'hello world'}, + 'datacontenttype': 'application/json', + } + + # Set the data content type to 'application/cloudevents+json' + resp = d.publish_event( + pubsub_name='pubsub', + topic_name='TOPIC_CE', + data=json.dumps(cloud_event), + data_content_type='application/cloudevents+json', + ) +``` + +Publish [CloudEvents](https://cloudevents.io/) messages with plain text payload: +```python +from dapr.clients import DaprClient +import json + +with DaprClient() as d: + cloud_event = { + 'specversion': '1.0', + 'type': 'com.example.event', + 'source': 'my-service', + 'id': "myid", + 'data': 'hello world', + 'datacontenttype': 'text/plain', + } + + # Set the data content type to 'application/cloudevents+json' + resp = d.publish_event( + pubsub_name='pubsub', + topic_name='TOPIC_CE', + data=json.dumps(cloud_event), + data_content_type='application/cloudevents+json', + ) +``` + + +#### Subscribe to messages + +```python +from cloudevents.sdk.event import v1 +from dapr.ext.grpc import App +import json + +app = App() + +# Default subscription for a topic +@app.subscribe(pubsub_name='pubsub', topic='TOPIC_A') +def mytopic(event: v1.Event) -> None: + data = json.loads(event.Data()) + print(f'Received: id={data["id"]}, message="{data ["message"]}"' + ' content_type="{event.content_type}"',flush=True) + +# Specific handler using Pub/Sub routing +@app.subscribe(pubsub_name='pubsub', topic='TOPIC_A', + rule=Rule("event.type == \"important\"", 1)) +def mytopic_important(event: v1.Event) -> None: + data = json.loads(event.Data()) + print(f'Received: id={data["id"]}, message="{data ["message"]}"' + ' content_type="{event.content_type}"',flush=True) +``` + +- For more information about pub/sub, visit [How-To: Publish & subscribe]({{% ref howto-publish-subscribe.md %}}). +- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/pubsub-simple) for code samples and instructions to try out pub/sub. + +#### Streaming message subscription + +You can create a streaming subscription to a PubSub topic using either the `subscribe` +or `subscribe_handler` methods. + +The `subscribe` method returns an iterable `Subscription` object, which allows you to pull messages from the +stream by using a `for` loop (ex. `for message in subscription`) or by +calling the `next_message` method. This will block on the main thread while waiting for messages. +When done, you should call the close method to terminate the +subscription and stop receiving messages. + +The `subscribe_with_handler` method accepts a callback function that is executed for each message +received from the stream. +It runs in a separate thread, so it doesn't block the main thread. The callback should return a +`TopicEventResponse` (ex. `TopicEventResponse('success')`), indicating whether the message was +processed successfully, should be retried, or should be discarded. The method will automatically +manage message acknowledgements based on the returned status. The call to `subscribe_with_handler` +method returns a close function, which should be called to terminate the subscription when you're +done. + +Here's an example of using the `subscribe` method: + +```python +import time + +from dapr.clients import DaprClient +from dapr.clients.grpc.subscription import StreamInactiveError, StreamCancelledError + +counter = 0 + + +def process_message(message): + global counter + counter += 1 + # Process the message here + print(f'Processing message: {message.data()} from {message.topic()}...') + return 'success' + + +def main(): + with DaprClient() as client: + global counter + + subscription = client.subscribe( + pubsub_name='pubsub', topic='TOPIC_A', dead_letter_topic='TOPIC_A_DEAD' + ) + + try: + for message in subscription: + if message is None: + print('No message received. The stream might have been cancelled.') + continue + + try: + response_status = process_message(message) + + if response_status == 'success': + subscription.respond_success(message) + elif response_status == 'retry': + subscription.respond_retry(message) + elif response_status == 'drop': + subscription.respond_drop(message) + + if counter >= 5: + break + except StreamInactiveError: + print('Stream is inactive. Retrying...') + time.sleep(1) + continue + except StreamCancelledError: + print('Stream was cancelled') + break + except Exception as e: + print(f'Error occurred during message processing: {e}') + + finally: + print('Closing subscription...') + subscription.close() + + +if __name__ == '__main__': + main() +``` + +And here's an example of using the `subscribe_with_handler` method: + +```python +import time + +from dapr.clients import DaprClient +from dapr.clients.grpc._response import TopicEventResponse + +counter = 0 + + +def process_message(message): + # Process the message here + global counter + counter += 1 + print(f'Processing message: {message.data()} from {message.topic()}...') + return TopicEventResponse('success') + + +def main(): + with (DaprClient() as client): + # This will start a new thread that will listen for messages + # and process them in the `process_message` function + close_fn = client.subscribe_with_handler( + pubsub_name='pubsub', topic='TOPIC_A', handler_fn=process_message, + dead_letter_topic='TOPIC_A_DEAD' + ) + + while counter < 5: + time.sleep(1) + + print("Closing subscription...") + close_fn() + + +if __name__ == '__main__': + main() +``` + +- For more information about pub/sub, visit [How-To: Publish & subscribe]({{% ref howto-publish-subscribe.md %}}). +- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/main/examples/pubsub-simple) for code samples and instructions to try out streaming pub/sub. + +### Conversation (Alpha) + +{{% alert title="Note" color="primary" %}} +The Dapr Conversation API is currently in alpha. +{{% /alert %}} + +Since version 1.15 Dapr offers developers the capability to securely and reliably interact with Large Language Models (LLM) through the [Conversation API]({{% ref conversation-overview.md %}}). + +```python +from dapr.clients import DaprClient +from dapr.clients.grpc.conversation import ConversationInput + +with DaprClient() as d: + inputs = [ + ConversationInput(content="What's Dapr?", role='user', scrub_pii=True), + ConversationInput(content='Give a brief overview.', role='user', scrub_pii=True), + ] + + metadata = { + 'model': 'foo', + 'key': 'authKey', + 'cacheTTL': '10m', + } + + response = d.converse_alpha1( + name='echo', inputs=inputs, temperature=0.7, context_id='chat-123', metadata=metadata + ) + + for output in response.outputs: + print(f'Result: {output.result}') +``` + +### Interact with output bindings + +```python +from dapr.clients import DaprClient + +with DaprClient() as d: + resp = d.invoke_binding(binding_name='kafkaBinding', operation='create', data='{"message":"Hello World"}') +``` + +- For a full guide on output bindings visit [How-To: Use bindings]({{% ref howto-bindings.md %}}). +- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/main/examples/invoke-binding) for code samples and instructions to try out output bindings. + +### Retrieve secrets + +```python +from dapr.clients import DaprClient + +with DaprClient() as d: + resp = d.get_secret(store_name='localsecretstore', key='secretKey') +``` + +- For a full guide on secrets visit [How-To: Retrieve secrets]({{% ref howto-secrets.md %}}). +- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/secret_store) for code samples and instructions to try out retrieving secrets + +### Configuration + +#### Get configuration + +```python +from dapr.clients import DaprClient + +with DaprClient() as d: + # Get Configuration + configuration = d.get_configuration(store_name='configurationstore', keys=['orderId'], config_metadata={}) +``` + +#### Subscribe to configuration + +```python +import asyncio +from time import sleep +from dapr.clients import DaprClient + +async def executeConfiguration(): + with DaprClient() as d: + storeName = 'configurationstore' + + key = 'orderId' + + # Wait for sidecar to be up within 20 seconds. + d.wait(20) + + # Subscribe to configuration by key. + configuration = await d.subscribe_configuration(store_name=storeName, keys=[key], config_metadata={}) + while True: + if configuration != None: + items = configuration.get_items() + for key, item in items: + print(f"Subscribe key={key} value={item.value} version={item.version}", flush=True) + else: + print("Nothing yet") + sleep(5) + +asyncio.run(executeConfiguration()) +``` + +- Learn more about managing configurations via the [How-To: Manage configuration]({{% ref howto-manage-configuration.md %}}) guide. +- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/configuration) for code samples and instructions to try out configuration. + +### Distributed Lock + +```python +from dapr.clients import DaprClient + +def main(): + # Lock parameters + store_name = 'lockstore' # as defined in components/lockstore.yaml + resource_id = 'example-lock-resource' + client_id = 'example-client-id' + expiry_in_seconds = 60 + + with DaprClient() as dapr: + print('Will try to acquire a lock from lock store named [%s]' % store_name) + print('The lock is for a resource named [%s]' % resource_id) + print('The client identifier is [%s]' % client_id) + print('The lock will will expire in %s seconds.' % expiry_in_seconds) + + with dapr.try_lock(store_name, resource_id, client_id, expiry_in_seconds) as lock_result: + assert lock_result.success, 'Failed to acquire the lock. Aborting.' + print('Lock acquired successfully!!!') + + # At this point the lock was released - by magic of the `with` clause ;) + unlock_result = dapr.unlock(store_name, resource_id, client_id) + print('We already released the lock so unlocking will not work.') + print('We tried to unlock it anyway and got back [%s]' % unlock_result.status) +``` + +- Learn more about using a distributed lock: [How-To: Use a lock]({{% ref howto-use-distributed-lock.md %}}). +- Visit [Python SDK examples](https://github.com/dapr/python-sdk/blob/master/examples/distributed_lock) for code samples and instructions to try out distributed lock. + +### Cryptography + +```python +from dapr.clients import DaprClient + +message = 'The secret is "passw0rd"' + +def main(): + with DaprClient() as d: + resp = d.encrypt( + data=message.encode(), + options=EncryptOptions( + component_name='crypto-localstorage', + key_name='rsa-private-key.pem', + key_wrap_algorithm='RSA', + ), + ) + encrypt_bytes = resp.read() + + resp = d.decrypt( + data=encrypt_bytes, + options=DecryptOptions( + component_name='crypto-localstorage', + key_name='rsa-private-key.pem', + ), + ) + decrypt_bytes = resp.read() + + print(decrypt_bytes.decode()) # The secret is "passw0rd" +``` + +- For a full list of state operations visit [How-To: Use the cryptography APIs]({{% ref howto-cryptography.md %}}). +- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples/crypto) for code samples and instructions to try out cryptography + +## Related links +[Python SDK examples](https://github.com/dapr/python-sdk/tree/master/examples) diff --git a/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/_index.md b/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/_index.md new file mode 100644 index 00000000000..8b7bc9c506f --- /dev/null +++ b/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/_index.md @@ -0,0 +1,7 @@ +--- +type: docs +title: "Dapr Python SDK extensions" +linkTitle: "Extensions" +weight: 30000 +description: Python SDK for developing Dapr applications +--- diff --git a/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/python-fastapi.md b/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/python-fastapi.md new file mode 100644 index 00000000000..13b6499b943 --- /dev/null +++ b/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/python-fastapi.md @@ -0,0 +1,115 @@ +--- +type: docs +title: "Dapr Python SDK integration with FastAPI" +linkTitle: "FastAPI" +weight: 200000 +description: How to create Dapr Python virtual actors and pubsub with the FastAPI extension +--- + +The Dapr Python SDK provides integration with FastAPI using the `dapr-ext-fastapi` extension. + +## Installation + +You can download and install the Dapr FastAPI extension with: + +{{< tabpane text=true >}} + +{{% tab header="Stable" %}} +```bash +pip install dapr-ext-fastapi +``` +{{% /tab %}} + +{{% tab header="Development" %}} +{{% alert title="Note" color="warning" %}} +The development package will contain features and behavior that will be compatible with the pre-release version of the Dapr runtime. Make sure to uninstall any stable versions of the Python SDK extension before installing the `dapr-dev` package. +{{% /alert %}} + +```bash +pip install dapr-ext-fastapi-dev +``` +{{% /tab %}} + +{{< /tabpane >}} + +## Example + +### Subscribing to events of different types + +```python +import uvicorn +from fastapi import Body, FastAPI +from dapr.ext.fastapi import DaprApp +from pydantic import BaseModel + +class RawEventModel(BaseModel): + body: str + +class User(BaseModel): + id: int + name: str + +class CloudEventModel(BaseModel): + data: User + datacontenttype: str + id: str + pubsubname: str + source: str + specversion: str + topic: str + traceid: str + traceparent: str + tracestate: str + type: str + + +app = FastAPI() +dapr_app = DaprApp(app) + +# Allow handling event with any structure (Easiest, but least robust) +# dapr publish --publish-app-id sample --topic any_topic --pubsub pubsub --data '{"id":"7", "desc": "good", "size":"small"}' +@dapr_app.subscribe(pubsub='pubsub', topic='any_topic') +def any_event_handler(event_data = Body()): + print(event_data) + +# For robustness choose one of the below based on if publisher is using CloudEvents + +# Handle events sent with CloudEvents +# dapr publish --publish-app-id sample --topic cloud_topic --pubsub pubsub --data '{"id":"7", "name":"Bob Jones"}' +@dapr_app.subscribe(pubsub='pubsub', topic='cloud_topic') +def cloud_event_handler(event_data: CloudEventModel): + print(event_data) + +# Handle raw events sent without CloudEvents +# curl -X "POST" http://localhost:3500/v1.0/publish/pubsub/raw_topic?metadata.rawPayload=true -H "Content-Type: application/json" -d '{"body": "345"}' +@dapr_app.subscribe(pubsub='pubsub', topic='raw_topic') +def raw_event_handler(event_data: RawEventModel): + print(event_data) + + + +if __name__ == "__main__": + uvicorn.run(app, host="0.0.0.0", port=30212) +``` + +### Creating an actor + +```python +from fastapi import FastAPI +from dapr.ext.fastapi import DaprActor +from demo_actor import DemoActor + +app = FastAPI(title=f'{DemoActor.__name__}Service') + +# Add Dapr Actor Extension +actor = DaprActor(app) + +@app.on_event("startup") +async def startup_event(): + # Register DemoActor + await actor.register_actor(DemoActor) + +@app.get("/GetMyData") +def get_my_data(): + return "{'message': 'myData'}" +``` diff --git a/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/python-flask.md b/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/python-flask.md new file mode 100644 index 00000000000..b4ec58f9c71 --- /dev/null +++ b/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/python-flask.md @@ -0,0 +1,60 @@ +--- +type: docs +title: "Dapr Python SDK integration with Flask" +linkTitle: "Flask" +weight: 300000 +description: How to create Dapr Python virtual actors with the Flask extension +--- + +The Dapr Python SDK provides integration with Flask using the `flask-dapr` extension. + +## Installation + +You can download and install the Dapr Flask extension with: + +{{< tabpane text=true >}} + +{{% tab header="Stable" %}} +```bash +pip install flask-dapr +``` +{{% /tab %}} + +{{% tab header="Development" %}} +{{% alert title="Note" color="warning" %}} +The development package will contain features and behavior that will be compatible with the pre-release version of the Dapr runtime. Make sure to uninstall any stable versions of the Python SDK extension before installing the `dapr-dev` package. +{{% /alert %}} + +```bash +pip install flask-dapr-dev +``` +{{% /tab %}} + +{{< /tabpane >}} + +## Example + +```python +from flask import Flask +from flask_dapr.actor import DaprActor + +from dapr.conf import settings +from demo_actor import DemoActor + +app = Flask(f'{DemoActor.__name__}Service') + +# Enable DaprActor Flask extension +actor = DaprActor(app) + +# Register DemoActor +actor.register_actor(DemoActor) + +# Setup method route +@app.route('/GetMyData', methods=['GET']) +def get_my_data(): + return {'message': 'myData'}, 200 + +# Run application +if __name__ == '__main__': + app.run(port=settings.HTTP_APP_PORT) +``` diff --git a/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/python-grpc.md b/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/python-grpc.md new file mode 100644 index 00000000000..e34c213b571 --- /dev/null +++ b/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/python-grpc.md @@ -0,0 +1,118 @@ +--- +type: docs +title: "Getting started with the Dapr Python gRPC service extension" +linkTitle: "gRPC" +weight: 100000 +description: How to get up and running with the Dapr Python gRPC extension +--- + +The Dapr Python SDK provides a built in gRPC server extension, `dapr.ext.grpc`, for creating Dapr services. + +## Installation + +You can download and install the Dapr gRPC server extension with: + +{{< tabpane text=true >}} + +{{% tab header="Stable" %}} +```bash +pip install dapr-ext-grpc +``` +{{% /tab %}} + +{{% tab header="Development" %}} +{{% alert title="Note" color="warning" %}} +The development package will contain features and behavior that will be compatible with the pre-release version of the Dapr runtime. Make sure to uninstall any stable versions of the Python SDK extension before installing the `dapr-dev` package. +{{% /alert %}} + +```bash +pip3 install dapr-ext-grpc-dev +``` +{{% /tab %}} + +{{< /tabpane >}} + +## Examples + +The `App` object can be used to create a server. + +### Listen for service invocation requests + +The `InvokeMethodReqest` and `InvokeMethodResponse` objects can be used to handle incoming requests. + +A simple service that will listen and respond to requests will look like: + +```python +from dapr.ext.grpc import App, InvokeMethodRequest, InvokeMethodResponse + +app = App() + +@app.method(name='my-method') +def mymethod(request: InvokeMethodRequest) -> InvokeMethodResponse: + print(request.metadata, flush=True) + print(request.text(), flush=True) + + return InvokeMethodResponse(b'INVOKE_RECEIVED', "text/plain; charset=UTF-8") + +app.run(50051) +``` + +A full sample can be found [here](https://github.com/dapr/python-sdk/tree/v1.0.0rc2/examples/invoke-simple). + +### Subscribe to a topic + +When subscribing to a topic, you can instruct dapr whether the event delivered has been accepted, or whether it should be dropped, or retried later. + +```python +from typing import Optional +from cloudevents.sdk.event import v1 +from dapr.ext.grpc import App +from dapr.clients.grpc._response import TopicEventResponse + +app = App() + +# Default subscription for a topic +@app.subscribe(pubsub_name='pubsub', topic='TOPIC_A') +def mytopic(event: v1.Event) -> Optional[TopicEventResponse]: + print(event.Data(),flush=True) + # Returning None (or not doing a return explicitly) is equivalent + # to returning a TopicEventResponse("success"). + # You can also return TopicEventResponse("retry") for dapr to log + # the message and retry delivery later, or TopicEventResponse("drop") + # for it to drop the message + return TopicEventResponse("success") + +# Specific handler using Pub/Sub routing +@app.subscribe(pubsub_name='pubsub', topic='TOPIC_A', + rule=Rule("event.type == \"important\"", 1)) +def mytopic_important(event: v1.Event) -> None: + print(event.Data(),flush=True) + +# Handler with disabled topic validation +@app.subscribe(pubsub_name='pubsub-mqtt', topic='topic/#', disable_topic_validation=True,) +def mytopic_wildcard(event: v1.Event) -> None: + print(event.Data(),flush=True) + +app.run(50051) +``` + +A full sample can be found [here](https://github.com/dapr/python-sdk/blob/v1.0.0rc2/examples/pubsub-simple/subscriber.py). + +### Setup input binding trigger + +```python +from dapr.ext.grpc import App, BindingRequest + +app = App() + +@app.binding('kafkaBinding') +def binding(request: BindingRequest): + print(request.text(), flush=True) + +app.run(50051) +``` + +A full sample can be found [here](https://github.com/dapr/python-sdk/tree/v1.0.0rc2/examples/invoke-binding). + +## Related links +- [PyPi](https://pypi.org/project/dapr-ext-grpc/) diff --git a/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/_index.md b/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/_index.md new file mode 100644 index 00000000000..12a63f8f6fd --- /dev/null +++ b/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/_index.md @@ -0,0 +1,105 @@ +--- +type: docs +title: "Dapr Python SDK integration with Dapr Workflow extension" +linkTitle: "Dapr Workflow" +weight: 400000 +description: How to get up and running with the Dapr Workflow extension +no_list: true +--- + +The Dapr Python SDK provides a built-in Dapr Workflow extension, `dapr.ext.workflow`, for creating Dapr services. + +## Installation + +You can download and install the Dapr Workflow extension with: + +{{< tabpane text=true >}} + +{{% tab header="Stable" %}} +```bash +pip install dapr-ext-workflow +``` +{{% /tab %}} + +{{% tab header="Development" %}} +{{% alert title="Note" color="warning" %}} +The development package will contain features and behavior that will be compatible with the pre-release version of the Dapr runtime. Make sure to uninstall any stable versions of the Python SDK extension before installing the `dapr-dev` package. +{{% /alert %}} + +```bash +pip install dapr-ext-workflow-dev +``` +{{% /tab %}} + +{{< /tabpane >}} + +## Example + +```python +from time import sleep + +import dapr.ext.workflow as wf + + +wfr = wf.WorkflowRuntime() + + +@wfr.workflow(name='random_workflow') +def task_chain_workflow(ctx: wf.DaprWorkflowContext, wf_input: int): + try: + result1 = yield ctx.call_activity(step1, input=wf_input) + result2 = yield ctx.call_activity(step2, input=result1) + except Exception as e: + yield ctx.call_activity(error_handler, input=str(e)) + raise + return [result1, result2] + + +@wfr.activity(name='step1') +def step1(ctx, activity_input): + print(f'Step 1: Received input: {activity_input}.') + # Do some work + return activity_input + 1 + + +@wfr.activity +def step2(ctx, activity_input): + print(f'Step 2: Received input: {activity_input}.') + # Do some work + return activity_input * 2 + +@wfr.activity +def error_handler(ctx, error): + print(f'Executing error handler: {error}.') + # Do some compensating work + + +if __name__ == '__main__': + wfr.start() + sleep(10) # wait for workflow runtime to start + + wf_client = wf.DaprWorkflowClient() + instance_id = wf_client.schedule_new_workflow(workflow=task_chain_workflow, input=42) + print(f'Workflow started. Instance ID: {instance_id}') + state = wf_client.wait_for_workflow_completion(instance_id) + print(f'Workflow completed! Status: {state.runtime_status}') + + wfr.shutdown() +``` + +- Learn more about authoring and managing workflows: + - [How-To: Author a workflow]({{% ref howto-author-workflow.md %}}). + - [How-To: Manage a workflow]({{% ref howto-manage-workflow.md %}}). + - +- Visit [Python SDK examples](https://github.com/dapr/python-sdk/tree/main/examples/workflow) for code samples and instructions to try out Dapr Workflow: + - [Simple workflow example]({{% ref python-workflow.md %}}) + - [Task chaining example](https://github.com/dapr/python-sdk/blob/main/examples/workflow/task_chaining.py) + - [Fan-out/Fan-in example](https://github.com/dapr/python-sdk/blob/main/examples/workflow/fan_out_fan_in.py) + - [Child workflow example](https://github.com/dapr/python-sdk/blob/main/examples/workflow/child_workflow.py) + - [Human approval example](https://github.com/dapr/python-sdk/blob/main/examples/workflow/human_approval.py) + - [Monitor example](https://github.com/dapr/python-sdk/blob/main/examples/workflow/monitor.py) + + +## Next steps + +{{< button text="Getting started with the Dapr Workflow Python SDK" page="python-workflow.md" >}} diff --git a/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/python-workflow.md b/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/python-workflow.md new file mode 100644 index 00000000000..8c882ac72cd --- /dev/null +++ b/sdkdocs/python/content/en/python-sdk-docs/python-sdk-extensions/python-workflow-ext/python-workflow.md @@ -0,0 +1,166 @@ +--- +type: docs +title: "Getting started with the Dapr Workflow Python SDK" +linkTitle: "Workflow" +weight: 30000 +description: How to get up and running with workflows using the Dapr Python SDK +--- + +Let’s create a Dapr workflow and invoke it using the console. With the [provided workflow example](https://github.com/dapr/python-sdk/tree/main/examples/workflow/simple.py), you will: + +- Run a [Python console application](https://github.com/dapr/python-sdk/blob/main/examples/workflow/simple.py) that demonstrates workflow orchestration with activities, child workflows, and external events +- Learn how to handle retries, timeouts, and workflow state management +- Use the Python workflow SDK to start, pause, resume, and purge workflow instances + +This example uses the default configuration from `dapr init` in [self-hosted mode](https://github.com/dapr/cli#install-dapr-on-your-local-machine-self-hosted). + +In the Python example project, the `simple.py` file contains the setup of the app, including: +- The workflow definition +- The workflow activity definitions +- The registration of the workflow and workflow activities + +## Prerequisites +- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed +- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) +- [Python 3.9+](https://www.python.org/downloads/) installed +- [Dapr Python package]({{% ref "python#installation" %}}) and the [workflow extension]({{% ref "python-workflow/_index.md" %}}) installed +- Verify you're using the latest proto bindings + +## Set up the environment + +Start by cloning the [Python SDK repo]. + +```bash +git clone https://github.com/dapr/python-sdk.git +``` + +From the Python SDK root directory, navigate to the Dapr Workflow example. + +```bash +cd examples/workflow +``` + +Run the following command to install the requirements for running this workflow sample with the Dapr Python SDK. + +```bash +pip3 install -r workflow/requirements.txt +``` + +## Run the application locally + +To run the Dapr application, you need to start the Python program and a Dapr sidecar. In the terminal, run: + +```bash +dapr run --app-id wf-simple-example --dapr-grpc-port 50001 --resources-path components -- python3 simple.py +``` + +> **Note:** Since Python3.exe is not defined in Windows, you may need to use `python simple.py` instead of `python3 simple.py`. + + +**Expected output** + +``` +- "== APP == Hi Counter!" +- "== APP == New counter value is: 1!" +- "== APP == New counter value is: 11!" +- "== APP == Retry count value is: 0!" +- "== APP == Retry count value is: 1! This print statement verifies retry" +- "== APP == Appending 1 to child_orchestrator_string!" +- "== APP == Appending a to child_orchestrator_string!" +- "== APP == Appending a to child_orchestrator_string!" +- "== APP == Appending 2 to child_orchestrator_string!" +- "== APP == Appending b to child_orchestrator_string!" +- "== APP == Appending b to child_orchestrator_string!" +- "== APP == Appending 3 to child_orchestrator_string!" +- "== APP == Appending c to child_orchestrator_string!" +- "== APP == Appending c to child_orchestrator_string!" +- "== APP == Get response from hello_world_wf after pause call: Suspended" +- "== APP == Get response from hello_world_wf after resume call: Running" +- "== APP == New counter value is: 111!" +- "== APP == New counter value is: 1111!" +- "== APP == Workflow completed! Result: "Completed" +``` + +## What happened? + +When you run the application, several key workflow features are shown: + +1. **Workflow and Activity Registration**: The application uses Python decorators to automatically register workflows and activities with the runtime. This decorator-based approach provides a clean, declarative way to define your workflow components: + ```python + @wfr.workflow(name='hello_world_wf') + def hello_world_wf(ctx: DaprWorkflowContext, wf_input): + # Workflow definition... + + @wfr.activity(name='hello_act') + def hello_act(ctx: WorkflowActivityContext, wf_input): + # Activity definition... + ``` + +2. **Runtime Setup**: The application initializes the workflow runtime and client: + ```python + wfr = WorkflowRuntime() + wfr.start() + wf_client = DaprWorkflowClient() + ``` + +2. **Activity Execution**: The workflow executes a series of activities that increment a counter: + ```python + @wfr.workflow(name='hello_world_wf') + def hello_world_wf(ctx: DaprWorkflowContext, wf_input): + yield ctx.call_activity(hello_act, input=1) + yield ctx.call_activity(hello_act, input=10) + ``` + +3. **Retry Logic**: The workflow demonstrates error handling with a retry policy: + ```python + retry_policy = RetryPolicy( + first_retry_interval=timedelta(seconds=1), + max_number_of_attempts=3, + backoff_coefficient=2, + max_retry_interval=timedelta(seconds=10), + retry_timeout=timedelta(seconds=100), + ) + yield ctx.call_activity(hello_retryable_act, retry_policy=retry_policy) + ``` + +4. **Child Workflow**: A child workflow is executed with its own retry policy: + ```python + yield ctx.call_child_workflow(child_retryable_wf, retry_policy=retry_policy) + ``` + +5. **External Event Handling**: The workflow waits for an external event with a timeout: + ```python + event = ctx.wait_for_external_event(event_name) + timeout = ctx.create_timer(timedelta(seconds=30)) + winner = yield when_any([event, timeout]) + ``` + +6. **Workflow Lifecycle Management**: The example demonstrates how to pause and resume the workflow: + ```python + wf_client.pause_workflow(instance_id=instance_id) + metadata = wf_client.get_workflow_state(instance_id=instance_id) + # ... check status ... + wf_client.resume_workflow(instance_id=instance_id) + ``` + +7. **Event Raising**: After resuming, the workflow raises an event: + ```python + wf_client.raise_workflow_event( + instance_id=instance_id, + event_name=event_name, + data=event_data + ) + ``` + +8. **Completion and Cleanup**: Finally, the workflow waits for completion and cleans up: + ```python + state = wf_client.wait_for_workflow_completion( + instance_id, + timeout_in_seconds=30 + ) + wf_client.purge_workflow(instance_id=instance_id) + ``` +## Next steps +- [Learn more about Dapr workflow]({{% ref workflow-overview.md %}}) +- [Workflow API reference]({{% ref workflow_api.md %}}) +- [Try implementing more complex workflow patterns](https://github.com/dapr/python-sdk/tree/main/examples/workflow) diff --git a/sdkdocs/rust b/sdkdocs/rust deleted file mode 160000 index 4475ed57cfd..00000000000 --- a/sdkdocs/rust +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 4475ed57cfdcb912a828f43cace8c0bea3eb99e1 diff --git a/sdkdocs/rust/content/en/rust-sdk-contributing/rust-contributing.md b/sdkdocs/rust/content/en/rust-sdk-contributing/rust-contributing.md new file mode 100644 index 00000000000..e5db90166a2 --- /dev/null +++ b/sdkdocs/rust/content/en/rust-sdk-contributing/rust-contributing.md @@ -0,0 +1,38 @@ +--- +type: docs +title: "Contributing to the Rust SDK" +linkTitle: "Rust SDK" +weight: 3000 +description: Guidelines for contributing to the Dapr Rust SDK +--- + +When contributing to the [Rust SDK](https://github.com/dapr/rust-sdk) the following rules and best-practices should be followed. + +## Examples + +The `examples` directory contains code samples for users to run to try out specific functionality of the various Rust SDK packages and extensions. It also hosts component examples used for validation. When writing new and updated samples keep in mind: + +- All examples should be runnable on Windows, Linux, and MacOS. While Rust code is consistent among operating systems aside from minor OS-feature gating, any pre/post example commands should provide options through [tabpane]({{% ref "contributing-docs.md#tabbed-content" %}}) +- Contain steps to download/install any required pre-requisites. Someone coming in with a fresh OS install should be able to start on the example and complete it without an error. Links to external download pages are fine. +- Examples should be pass validation and include mechanical markdown steps and be added to the validation workflow [TBA](#) + +## Docs + +The `daprdocs` directory contains the markdown files that are rendered into the [Dapr Docs](https://docs.dapr.io) website. When the documentation website is built this repo is cloned and configured so that its contents are rendered with the docs content. When writing docs keep in mind: + + - All rules in the [docs guide]({{% ref contributing-docs.md %}}) should be followed in addition to these. + - All files and directories should be prefixed with `rust-` to ensure all file/directory names are globally unique across all Dapr documentation. + +## Update Protobufs + +To pull the protobufs from the `dapr/dapr` repo you can run the script in the repo root like so: + +```bash +./update-protos.sh +``` + +By default, the script fetches the latest proto updates from the master branch of the Dapr repository. If you need to choose a specific release or version, use the -v flag: + +```bash +./update-protos.sh -v v1.13.0 +``` diff --git a/sdkdocs/rust/content/en/rust-sdk-docs/_index.md b/sdkdocs/rust/content/en/rust-sdk-docs/_index.md new file mode 100644 index 00000000000..da2500261de --- /dev/null +++ b/sdkdocs/rust/content/en/rust-sdk-docs/_index.md @@ -0,0 +1,27 @@ +--- +type: docs +title: "Dapr Rust SDK" +linkTitle: "Rust" +weight: 1000 +description: Rust SDK packages for developing Dapr applications +no_list: true +cascade: + github_repo: https://github.com/dapr/rust-sdk + github_subdir: daprdocs/content/en/rust-sdk-docs + path_base_for_github_subdir: content/en/developing-applications/sdks/rust/ + github_branch: main +--- + +{{% alert title="Note" color="primary" %}} +The Dapr Rust-SDK is currently in Alpha. Work is underway to bring it to a stable release and will likely involve breaking changes. +{{% /alert %}} + +A client library to help build Dapr applications using Rust. This client is targeting support for all public Dapr APIs while focusing on idiomatic Rust experiences and developer productivity. + +{{% cardpane %}} +{{% card title="**Client**" %}} + Use the Rust Client SDK for invoking public Dapr APIs + + [Learn more about the Rust Client SDK]({{% ref rust-client %}}) +{{% /card %}} +{{% /cardpane %}} diff --git a/sdkdocs/rust/content/en/rust-sdk-docs/rust-client/_index.md b/sdkdocs/rust/content/en/rust-sdk-docs/rust-client/_index.md new file mode 100644 index 00000000000..74c8ddb106f --- /dev/null +++ b/sdkdocs/rust/content/en/rust-sdk-docs/rust-client/_index.md @@ -0,0 +1,127 @@ +--- +type: docs +title: "Getting started with the Dapr client Rust SDK" +linkTitle: "Client" +weight: 20000 +description: How to get up and running with the Dapr Rust SDK +no_list: true +--- + +The Dapr client package allows you to interact with other Dapr applications from +a Rust application. + +{{% alert title="Note" color="primary" %}} +The Dapr Rust-SDK is currently in Alpha. Work is underway to bring it to a +stable release and will likely involve breaking changes. +{{% /alert %}} + +## Prerequisites + +- [Dapr CLI]({{% ref install-dapr-cli.md %}}) installed +- Initialized [Dapr environment]({{% ref install-dapr-selfhost.md %}}) +- [Rust installed](https://www.rust-lang.org/tools/install) + +## Import the client package + +Add Dapr to your `cargo.toml` + +```toml +[dependencies] +dapr = "0.16" +``` + +You can either reference `dapr::Client` or bind the full path to a new name as follows: + +```rust +use dapr::Client as DaprClient; +``` + +## Instantiating the Dapr client + +```rust +let addr = "https://127.0.0.1".to_string(); + +let mut client = dapr::Client::::connect(addr, +port).await?; +``` + +Alternatively if you would like to specify a custom port, this can be done by using this connect method: + +```rust +let mut client = dapr::Client::::connect_with_port(addr, "3500".to_string()).await?; +``` + +## Building blocks + +The Rust SDK allows you to interface with the +[Dapr building blocks]({{% ref building-blocks %}}). + +### Service Invocation (gRPC) + +To invoke a specific method on another service running with Dapr sidecar, the +Dapr client provides two options: + +Invoke a (gRPC) service + +```rust +let response = client + .invoke_service("service-to-invoke", "method-to-invoke", Some(data)) + .await + .unwrap(); +``` + +For a full guide on service invocation, visit +[How-To: Invoke a service]({{% ref howto-invoke-discover-services.md %}}). + +### State Management + +The Dapr Client provides access to these state management methods: `save_state` +, `get_state`, `delete_state` that can be used like so: + +```rust +let store_name = String::from("statestore"); + +let key = String::from("hello"); +let val = String::from("world").into_bytes(); + +// save key-value pair in the state store +client + .save_state(store_name, key, val, None, None, None) + .await?; + +let get_response = client + .get_state("statestore", "hello", None) + .await?; + +// delete a value from the state store +client + .delete_state("statestore", "hello", None) + .await?; +``` + +Multiple states can be sent with the `save_bulk_states` method. + +For a full guide on state management, visit +[How-To: Save & get state]({{% ref howto-get-save-state.md %}}). + +### Publish Messages + +To publish data onto a topic, the Dapr client provides a simple method: + +```rust +let pubsub_name = "pubsub-name".to_string(); +let pubsub_topic = "topic-name".to_string(); +let pubsub_content_type = "text/plain".to_string(); + +let data = "content".to_string().into_bytes(); +client + .publish_event(pubsub_name, pubsub_topic, pubsub_content_type, data, None) + .await?; +``` + +For a full guide on pub/sub, visit +[How-To: Publish & subscribe]({{% ref howto-publish-subscribe.md %}}). + +## Related links + +[Rust SDK Examples](https://github.com/dapr/rust-sdk/tree/master/examples)