diff --git a/docs/UpdateRTD.md b/docs/UpdateRTD.md new file mode 100644 index 00000000..22ad323e --- /dev/null +++ b/docs/UpdateRTD.md @@ -0,0 +1,66 @@ +# Managing Aries Cloud Agent Python `Read The Docs` Documentation + +This document describes how to maintain the `Read The Docs` documentation that +is generated from the ACA-Py code base. As the structure of the ACA-Py code +evolves, the RTD files need to be regenerated and possibly updated, as described here. + +## Generating ACA-Py Read The Docs (RTD) documentation + +### Before you start + +To test generate and view the RTD documentation locally, you must install [Sphinx](https://www.sphinx-doc.org/en/master/) and the +[Sphinx RTD theme](https://pypi.org/project/sphinx-rtd-theme/). Follow the instructions on the respective pages to install +and verify the installation on your system. + +### Generate Module Files + +To rebuild the project and settings from scratch (you'll need to move the generated index file up a level): + +`rm -rf generated; sphinx-apidoc -f -M -o ./generated ../aries_cloudagent/ $(find ../aries_cloudagent/ -name '*tests*')` + +Note that the `find` command that is used to exclude any of the `test` python files from the RTD documentation. + +Check the `git status` in your repo to see if the generator updates, adds or removes any existing RTD modules. + +### Reviewing the files locally + +To auto-generate the module documentation locally run: + +``` bash +sphinx-build -b html -a -E -c ./ ./ ./_build +``` + +Once generated, go into the `_build` folder and open `index.html` in a browser. Note that the `_build` is +`.gitignore`'d and so will not be part of a git push. + +### Look for Errors + +This is the hard part; looking for errors in docstrings added by devs. Some tips: + +- missing imports (`No module named 'async_timeout'`) can be solved by adding the module to the +list of `autodoc_mock_imports` in the `conf.py` file in the ACA-Py `docs` folder. +- Ignore any errors in .md files +- Ignore the warnings about including `docs/README.md` +- Ignore an dist-package errors + +Other than that, please investigate and fix things that you find. If there are fixes, it's usually +to adhere to the rules around processing docstrings, and especially around JSON samples. + +### Checking for missing modules + +The file `index.rst` in the ACA-Py `docs` folder drive the RTD generation. It picks up all the modules +in the source code, starting from the root `../aries_cloudagent` folder. However, some modules +are not picked up automatically from the root and have to be manually added to `index.rst`. To do that: + +- Get a list of all generated modules by running: `ls generated | grep "aries_cloudagent.[a-z]*.rst"` +- Compare that list with the modules listed in the "Subpackages" section of the left side menu in your browser, including any listed below the "Submodules". + +If any are missing, you likely need to add them to the `index.rst` file in the `toctree` section of the file. +You will see there are already several instances of that, notably "connections" and "protocols". + +### Updating the [readthedocs.org](https://readthedocs.org) site + +The RTD documentation is **not** currently auto-generated, so a manual re-generation of the documentation +is still required. + +> TODO: Automate this when new tags are applied to the repository. diff --git a/docs/assets/README.md b/docs/assets/README.md new file mode 100644 index 00000000..bfc63156 --- /dev/null +++ b/docs/assets/README.md @@ -0,0 +1,16 @@ +# Assets Folder for Documentation + +Put any assets (images, source for images, videos, etc.) in this folder to be referenced in the various documents for this repo. + +## Plantuml Source and Images + +Plantuml diagrams are stored in this folder in source form in files ending in `.puml` and are generated manually using the `./genPlantuml` script. The script uses a docker image from docker-hub and can be run without downloading any dependencies. + +If you don't want to use the script, download plantuml and a command line utility and use that for the plantuml generation. I preferred not having any dependencies used (other than docker) and couldn't find +a nice way to run plantuml headless from a command line. + +## To Do + +It would be better to use a local `Dockerfile` vs. one found on Docker Hub. The one I did find was simple and straight forward. + +I couldn't tell if the svg generation was working so just went with png. Not sure which would be better. \ No newline at end of file diff --git a/docs/assets/deploymentModel-agent.puml b/docs/assets/deploymentModel-agent.puml new file mode 100644 index 00000000..8603f8e3 --- /dev/null +++ b/docs/assets/deploymentModel-agent.puml @@ -0,0 +1,78 @@ +skinparam defaultTextAlignment center + +skinparam titleBorderRoundCorner 15 +skinparam titleBorderThickness 2 +skinparam titleBorderColor red +skinparam titleBackgroundColor White + +skinparam monochrome true + +skinparam componentStyle uml2 + +cloud "The Internet\n" { + + component "Distributed\nLedger" as DL + component "Other\nAgents" as others +} + +package "Aries Cloud Agent - Python" as vx { + package Core as core { + component "Transport Plugins" as tp + package "Conductor" as cond { + component "Msg Receiver\nGet Thread State" as mr + component "Msg Sender\nPut Thread State" as ms + } + component "Dispatcher" as disp + component "Handler API" as hapi + } + package "Protocols" as prot { + component "Protocol 1\nProtocol 2\n.\n.\nProtocol n" as protos + } + component "Controller\nREST API" as rest + package "Handler Plugins" as inthand { + component "VDR Manager\nStorage Manager\n.\n.\n." as intmgrs + } + package "Storage Implementation" as is { + database "Secure Storage" as storage + } + intmgrs --> storage + +} + +/' +package "Enterprise Services" { + package "Agent Controller" as per { + component "HTTP Handler" as http + component "Agent\nController\nConfiguration" as config { + database "Conf" as confdb + } + component "Initiator" as init + component "Responder" as resp + '' database "Configuration\nFiles" as configdb + component "Service\nIntegration" as si + '' config --> configdb + si --> init + resp --> si + config --> si + } + component "Service\nBusiness\nLogic" as back + back <--> si +} +'/ + +others -down-> tp +tp --> mr +ms --> tp +mr --> disp +hapi -> intmgrs +ms --> hapi +mr --> hapi +protos --> hapi +rest -right-> protos +'' protos --> http +disp <--> protos +disp --> ms +intmgrs -up-> DL +'' http --> resp +'' resp --> rest +'' init --> rest diff --git a/docs/assets/deploymentModel-controller.puml b/docs/assets/deploymentModel-controller.puml new file mode 100644 index 00000000..d9679f2c --- /dev/null +++ b/docs/assets/deploymentModel-controller.puml @@ -0,0 +1,76 @@ +skinparam defaultTextAlignment center + +skinparam titleBorderRoundCorner 15 +skinparam titleBorderThickness 2 +skinparam titleBorderColor red +skinparam titleBackgroundColor White + +skinparam monochrome true + +skinparam componentStyle uml2 + +/' +cloud "The Internet" { + package "DID Method Network" as SN { + component "Distributed\nLedger" as DL + } + component "Other Agents" as others +} +'/ +package "Aries Cloud Agent" as ica { +/' + package Core as core { + component "Transport Plugins" as tp + package "Conductor" as cond { + component "Msg Receiver\nGet Thread State" as mr + component "Msg Sender\nPut Thread State" as ms + } + component "Dispatcher" as disp + component "Handler API" as hapi + } +'/ + package "Protocols" as prot { + component "Protocol 1\nProtocol 2\n.\n.\nProtocol n" as protos + } + component "Controller\nREST API" as rest +/' + package "Handler Plugins" as inthand { + component "Storage Manager\nWallet Manager\nNode Pool Manager\n.\n.\n." as intmgrs + } + package "DID Method SDK" as is { + database "Secure Storage" as wallet + } + intmgrs --> wallet +'/ +} + + +package "Controller" as per { + component "HTTP Handler" as http + database "Agent\nController\nConfiguration" as config + component "Initiator" as init + component "Responder" as resp + '' database "Configuration\nFiles" as configdb + component "Business\nLogic" as si + '' config --> configdb + si --> init + resp --> si + config --> si +} + +'' others -down-> tp +'' tp --> mr +'' ms --> tp +'' mr --> disp +'' hapi -> intmgrs +'' ms --> hapi +'' mr --> hapi +'' mf --> hapi +rest -right-> protos +protos --> http: Web Hooks +'' disp <--> protos +'' disp --> ms +'' intmgrs -up-> DL +http --> resp +resp --> rest +init --> rest diff --git a/docs/assets/deploymentModel-full.puml b/docs/assets/deploymentModel-full.puml new file mode 100644 index 00000000..b7611a53 --- /dev/null +++ b/docs/assets/deploymentModel-full.puml @@ -0,0 +1,37 @@ +skinparam defaultTextAlignment center + +skinparam titleBorderRoundCorner 15 +skinparam titleBorderThickness 2 +skinparam titleBorderColor red +skinparam titleBackgroundColor White + +skinparam monochrome true + +skinparam componentStyle uml2 + +cloud "The Internet" { + package "DID Method Network" as SN { + component "Distributed\nLedger" as DL + } + component "Other Agents" as others +} + +package "Aries Cloud Agent" as vx { + component "Core Capabilities" as core + package "DIDComm Protocols" as prot { + component "Protocol 1\nProtocol 2\n.\n.\nProtocol n" as protos + } + component "Controller\nREST API" as rest +} + +package "Controller" as per { + component "Application \nBusiness\nLogic" as bl +} + +others -down-> core +protos -up-> core +rest -right-> protos +protos --> bl +bl --> rest +core <--> protos +core -up-> SN diff --git a/docs/assets/endorse-cred-def.puml b/docs/assets/endorse-cred-def.puml new file mode 100644 index 00000000..a1a78c77 --- /dev/null +++ b/docs/assets/endorse-cred-def.puml @@ -0,0 +1,75 @@ +@startuml +' List of actors for our use case +actor Admin +participant CredDefRoutes +participant RevocationRoutes +participant IndyRevocation +participant Ledger +participant TransactionManager +participant EventBus +participant OutboundHandler +participant EndorsedTxnHandler +boundary OtherAgent + +' Sequence for writing a new credential definition +Admin --> CredDefRoutes: POST /credential-definitions +group Endorse transaction process +CredDefRoutes --> Ledger: create_and_send_credential_definition() +CredDefRoutes --> TransactionManager: create_record() +CredDefRoutes --> TransactionManager: create_request() +CredDefRoutes --> OutboundHandler: send_outbound_msg() +OutboundHandler --> OtherAgent: send_msg() +OtherAgent --> OtherAgent: endorse_msg() +EndorsedTxnHandler <-- OtherAgent: send_msg() +TransactionManager <-- EndorsedTxnHandler: receive_endorse_response() +TransactionManager <-- EndorsedTxnHandler: complete_transaction() +Ledger <-- TransactionManager: txn_submit() +TransactionManager --> TransactionManager: endorsed_txn_post_processing() +TransactionManager --> EventBus: notify_cred_def_event() +end + +' Create the revocation registry once the credential definition is written +CredDefRoutes <-- EventBus: on_cred_def_event() +CredDefRoutes --> IndyRevocation: init_issuer_registry() +IndyRevocation --> EventBus: notify_revocation_reg_init_event() +RevocationRoutes <-- EventBus: on_revocation_registry_init_event() +RevocationRoutes --> RevocationRoutes: generate_tails() +group Endorse transaction process +RevocationRoutes --> Ledger:send_revoc_reg_def() +RevocationRoutes --> TransactionManager: create_record() +RevocationRoutes --> TransactionManager: create_request() +RevocationRoutes --> OutboundHandler: send_outbound_msg() +OutboundHandler --> OtherAgent: send_msg() +OtherAgent --> OtherAgent: endorse_msg() +EndorsedTxnHandler <-- OtherAgent: send_msg() +TransactionManager <-- EndorsedTxnHandler: receive_endorse_response() +TransactionManager <-- EndorsedTxnHandler: complete_transaction() +Ledger <-- TransactionManager: txn_submit() +TransactionManager --> TransactionManager: endorsed_txn_post_processing() +TransactionManager --> EventBus: notify_revocation_reg_endorsed_event() +end + +' Now create the revocation entry (accumulator) +RevocationRoutes <-- EventBus: on_revocation_registry_endorsed_event() +RevocationRoutes --> RevocationRoutes: upload_tails() +RevocationRoutes --> EventBus: notify_revocation_entry_event() +RevocationRoutes <-- EventBus: on_revocation_entry_event() +group Endorse transaction process +RevocationRoutes --> IndyRevocation: send_entry() +IndyRevocation --> Ledger: send_entry() +RevocationRoutes --> TransactionManager: create_record() +RevocationRoutes --> TransactionManager: create_request() +RevocationRoutes --> OutboundHandler: send_outbound_msg() +OutboundHandler --> OtherAgent: send_msg() +OtherAgent --> OtherAgent: endorse_msg() +EndorsedTxnHandler <-- OtherAgent: send_msg() +TransactionManager <-- EndorsedTxnHandler: receive_endorse_response() +TransactionManager <-- EndorsedTxnHandler: complete_transaction() +Ledger <-- TransactionManager: txn_submit() +TransactionManager --> TransactionManager: endorsed_txn_post_processing() + +' Notify that the revocation entry is completed (no one listens to this notification yet) +TransactionManager --> EventBus: notify_revocation_entry_endorsed_event() +end + +@enduml diff --git a/docs/assets/endorse-public-did.puml b/docs/assets/endorse-public-did.puml new file mode 100644 index 00000000..63de78bb --- /dev/null +++ b/docs/assets/endorse-public-did.puml @@ -0,0 +1,53 @@ +@startuml +' List of actors for our use case +actor Admin +participant WalletRoutes +participant IndyWallet +participant LedgerRoutes +participant Ledger +participant TransactionManager +participant EventBus +participant OutboundHandler +participant EndorsedTxnHandler +boundary OtherAgent + +' Sequence for writing a new DID on the ledger (assumes the author already has a DID) +Admin --> WalletRoutes: POST /wallet/did/create +Admin --> LedgerRoutes: POST /ledger/register-nym +group Endorse transaction process +LedgerRoutes --> Ledger: register_nym() +LedgerRoutes --> TransactionManager: create_record() +LedgerRoutes --> TransactionManager: create_request() +LedgerRoutes --> OutboundHandler: send_outbound_msg() +OutboundHandler --> OtherAgent: send_msg() +OtherAgent --> OtherAgent: endorse_msg() +EndorsedTxnHandler <-- OtherAgent: send_msg() +TransactionManager <-- EndorsedTxnHandler: receive_endorse_response() +TransactionManager <-- EndorsedTxnHandler: complete_transaction() +Ledger <-- TransactionManager: txn_submit() +TransactionManager --> TransactionManager: endorsed_txn_post_processing() +TransactionManager --> EventBus: notify_endorse_did_event() +end + +WalletRoutes <-- EventBus: on_register_nym_event() +WalletRoutes --> WalletRoutes:promote_wallet_public_did() +WalletRoutes --> IndyWallet:set_public_did() +group Endorse transaction process +WalletRoutes --> IndyWallet:set_did_endpoint() +IndyWallet --> Ledger:update_endpoint_for_did() +WalletRoutes --> TransactionManager: create_record() +WalletRoutes --> TransactionManager: create_request() +WalletRoutes --> OutboundHandler: send_outbound_msg() +OutboundHandler --> OtherAgent: send_msg() +OtherAgent --> OtherAgent: endorse_msg() +EndorsedTxnHandler <-- OtherAgent: send_msg() +TransactionManager <-- EndorsedTxnHandler: receive_endorse_response() +TransactionManager <-- EndorsedTxnHandler: complete_transaction() +Ledger <-- TransactionManager: txn_submit() +TransactionManager --> TransactionManager: endorsed_txn_post_processing() + +' notification that no one is listening to yet +TransactionManager --> EventBus: notify_endorse_did_attrib_event() +end + +@enduml diff --git a/docs/assets/endorser-design.puml b/docs/assets/endorser-design.puml new file mode 100644 index 00000000..39883ea6 --- /dev/null +++ b/docs/assets/endorser-design.puml @@ -0,0 +1,31 @@ +@startuml +interface AdminUser + +interface OtherAgent + +object TransactionRoutes + +object TransactionHandlers + +AdminUser --> TransactionRoutes: invoke_endpoint() + +OtherAgent --> TransactionHandlers: send_message() + +object TransactionManager + +object Wallet + +TransactionManager --> Wallet: manage_records() + +TransactionRoutes --> TransactionManager: invoke_api() +TransactionHandlers --> TransactionManager: handle_msg() + +object EventBus + +TransactionManager --> EventBus: notify() + +interface OtherProtocolRoutes + +OtherProtocolRoutes --> EventBus: subscribe() +EventBus --> OtherProtocolRoutes: notify() +@enduml diff --git a/docs/assets/genPlantuml b/docs/assets/genPlantuml new file mode 100755 index 00000000..82c59e34 --- /dev/null +++ b/docs/assets/genPlantuml @@ -0,0 +1,8 @@ +#!/bin/bash + +echo Generating images from plantuml source files + +for i in *.puml ; do + echo Generating image from: $i + cat $i | docker run --rm -i think/plantuml -tpng > `echo $i | sed "s/puml/png/"` +done diff --git a/docs/assets/inbound-messaging.puml b/docs/assets/inbound-messaging.puml new file mode 100644 index 00000000..4607463b --- /dev/null +++ b/docs/assets/inbound-messaging.puml @@ -0,0 +1,37 @@ +@startuml + +participant "Inbound\nMessage\nHandler" as oag +participant "http\nTransport" as ht +participant "Internal\nTransport\nManager" as itm +participant "Inbound\nSession" as is +participant "Conductor" as con +participant "Dispatcher" as disp +participant "Responder" as resp +participant "Message\nProtocol\nHandler" as mh + + +oag -> ht: "inbound_message_handler()" +ht->itm: "create_session()" +itm -> is: "create" +is --> itm +itm --> ht +ht --> is: "receive()" +is --> is: "parse_inbound()" +is --> is: "receive_inbound()" +is --> is: "process_inbound()" +is --> is: "inbound_handler()" +is --> con: "inbound_message_router()" +con --> disp: "queue_message()" +disp --> disp: "handle_message()" +disp --> disp: "make_message()" +disp --> resp: "create()" +disp --> mh: "handle()" +mh-->resp: "send_reply()" +mh --> disp: "" +disp --> con: "" +con --> con: "dispatch_complete()" +con --> is +is --> ht + + +@enduml diff --git a/docs/assets/mediation-message-flow.puml b/docs/assets/mediation-message-flow.puml new file mode 100644 index 00000000..32a17358 --- /dev/null +++ b/docs/assets/mediation-message-flow.puml @@ -0,0 +1,147 @@ +@startuml + +' Make the notes not look so awful + +actor Alice as Alice +entity Mediator as Med +actor Bob as Bob +autonumber + +== Arrange for Mediation with the Mediator == + +Alice <--> Med : Establish connection (details omitted) + +loop until terms are acceptable + Alice -> Med : Mediation Request + note over Alice, Med: Establish terms of Mediation... + Med -> Alice : Mediation deny + note over Alice, Med: Mediation counter terms from Mediator +end + +Alice <- Med : Mediation grant +note over Alice, Med +Mediator reports routing keys and endpoint to Alice. + +{ + "@type": ".../coordinate-mediation/1.0/grant", + "routing_keys": [""], + "endpoint": "" +} +end note + +... Some time later ... + +== Create a Mediated Connection == +group Invitation + Alice -> Alice : Create invitation + + Alice -> Med : Keylist update + note over Alice, Bob + Alice sends invitation key to mediator with keylist update message. + + { + "@type": ".../coordinate-mediation/1.0/keylist-update" + "updates": [ + { + "recipient_key": "", + "action": "add" + } + ] + } + end note + + Alice --> Bob : Transmit Invitation (Out of Band) + note over Alice, Bob + Mediator routing keys and endpoint used for invitation. + + { + "@type": ".../connections/1.0/invite", + "routingKeys": [""], + "recipientKeys": [""], + "serviceEndpoint": "" + } + end note +end + +group Connection Request + Bob -> Bob : Create connection request + Bob -> Bob : Prepare message for sending + note right of Bob + 1. Encrypt request for Alice + 2. Wrap message in Forward Message + 3. Pop key from "routingKeys", Encrypt message for key + 4. Repeat for each remaining key in "routingKeys" + end note + + Bob -> Med : Forward {Connection Request} + note right + Bob's response will be sent + to the mediator the mediator + will forward response to Alice + end note + Med -> Med : Process Forward + note right of Med + 1. Unpack message + 2. Inspect forward "to" field + 3. Look up key in routing tables + end note + Alice <- Med : Connection Request +end + +group Connection Response + Alice -> Alice : Create Response + Alice -> Med : Keylist Update + note over Alice, Bob + Alice sends updates to mediator, including adding + the new connection keys and removing invitation key. + + { + "@type": ".../coordinate-mediation/1.0/keylist-update" + "updates": [ + { + "recipient_key": "", + "action": "add" + }, + { + "recipient_key": " Bob : Connection Response + note left + Connection response sent to + Bob as normal. Sent DID Doc + includes routing keys from + the mediator and the mediator + endpoint for the service + endpoint. + end note +end + +== Mediation == + +Bob -> Med : Forward {Message} +note right +Messages are encrypted +for Alice and then wrapped +in a forward message for +the Mediator. +end note + +Alice <- Med : Message +note left +Mediator decrypts the forward +message, inspects the "to", +and forwards to Alice. +Alice decrypts final message. +end note + +Alice -> Bob : Message +note right +Outbound messages to Bob are sent +directly, not through Mediator. +end note + +@enduml diff --git a/docs/contributing/CODE_OF_CONDUCT.md b/docs/contributing/CODE_OF_CONDUCT.md index 82defcd4..c74f9bde 100644 --- a/docs/contributing/CODE_OF_CONDUCT.md +++ b/docs/contributing/CODE_OF_CONDUCT.md @@ -151,16 +151,18 @@ and any kind of face-to-face meetings or discussions. ## Incident Procedure To report incidents or to appeal reports of incidents, send email to Mike Dolan -(mdolan@linuxfoundation.org) or Angela Brown (angela@linuxfoundation.org). Please include any -available relevant information, including links to any publicly accessible material relating to the -matter. Every effort will be taken to ensure a safe and collegial environment in which to -collaborate on matters relating to the Project. In order to protect the community, the Project -reserves the right to take appropriate action, potentially including the removal of an individual -from any and all participation in the project. The Project will work towards an equitable resolution -in the event of a misunderstanding. +([mdolan@linuxfoundation.org](mailto:mdolan@linuxfoundation.org)) or Angela +Brown ([angela@linuxfoundation.org](mailto:angela@linuxfoundation.org)). Please +include any available relevant information, including links to any publicly +accessible material relating to the matter. Every effort will be taken to ensure +a safe and collegial environment in which to collaborate on matters relating to +the Project. In order to protect the community, the Project reserves the right +to take appropriate action, potentially including the removal of an individual +from any and all participation in the project. The Project will work towards an +equitable resolution in the event of a misunderstanding. ## Credits This code is based on the [W3C’s Code of Ethics and Professional Conduct](https://www.w3.org/Consortium/cepc) with some -additions from the [Cloud Foundry](https://www.cloudfoundry.org/)‘s Code of Conduct. \ No newline at end of file +additions from the [Cloud Foundry](https://www.cloudfoundry.org/)‘s Code of Conduct. diff --git a/docs/contributing/CONTRIBUTING.md b/docs/contributing/CONTRIBUTING.md index 1fc2001a..369ab200 100644 --- a/docs/contributing/CONTRIBUTING.md +++ b/docs/contributing/CONTRIBUTING.md @@ -1,4 +1,4 @@ -## How to contribute +# How to contribute You are encouraged to contribute to the repository by **forking and submitting a pull request**. @@ -21,12 +21,12 @@ A configuration for [pre-commit](https://pre-commit.com/) is included in this re On each commit, pre-commit hooks will run that verify the committed code complies with ruff and is formatted with black. To install the ruff and black checks: -``` -$ pre-commit install +```bash +pre-commit install ``` To install the commit message linter: -``` -$ pre-commit install --hook-type commit-msg +```bash +pre-commit install --hook-type commit-msg ``` diff --git a/docs/contributing/PUBLISHING.md b/docs/contributing/PUBLISHING.md index ec98ebdd..24b41edc 100644 --- a/docs/contributing/PUBLISHING.md +++ b/docs/contributing/PUBLISHING.md @@ -72,7 +72,7 @@ s/^/- / merged:>2022-04-07`) and for each page, highlight, and copy the text of only the list of PRs on the page to use in the following step. - For each page, run the command - `sed -e :a -e '$!N;s/\n#/ #/;ta' -e 'P;D' < Click here to view screenshot of the revocation registry on the ledger - Ledger + Ledger ## Accept the Invitation @@ -181,18 +190,18 @@ When the Faber agent starts up it automatically creates an invitation and genera
Click here to view screenshot - Accept Invitation + Accept Invitation
The mobile agent will give you feedback on the connection process, something like "A connection was added to your wallet".
Click here to view screenshot - Add Connection to Wallet + Add Connection to Wallet
Click here to view screenshot - Add Connection to Wallet + Add Connection to Wallet
Switch your browser back to Play with Docker. You should see that the connection has been established, and there is a prompt for what actions you want to take, e.g. "Issue Credential", "Send Proof Request" and so on. @@ -219,7 +228,7 @@ In the Faber console, select option `1` to send a credential to the mobile agent
Click here to view screenshot - Issue Credential + Issue Credential
The Faber agent outputs details to the console; e.g., @@ -239,15 +248,15 @@ The credential offer should automatically show up in the mobile agent. Accept th
Click here to view screenshot - Credential Offer + Credential Offer
Click here to view screenshot - Credential Details + Credential Details
Click here to view screenshot - Credential Acceptance + Credential Acceptance
## Issue a Presentation Request @@ -258,7 +267,7 @@ In the Faber console, select option `2` to send a proof request to the mobile ag
Click here to view screenshot - Request Proof + Request Proof
## Present the Proof @@ -267,15 +276,15 @@ The presentation (proof) request should automatically show up in the mobile agen
Click here to view screenshot - Proof Request Notice + Proof Request Notice
Click here to view screenshot - Proof Request Details + Proof Request Details
Click here to view screenshot - Proof Presentation + Proof Presentation
If the mobile agent is able to successfully prepare and send the proof, you can go back to the Play with Docker terminal to see the status of the proof. @@ -288,7 +297,7 @@ In the Faber console window, the proof should be received as validated.
Click here to view screenshot - Proof Validation + Proof Validation
## Revoke the Credential and Send Another Proof Request @@ -299,7 +308,7 @@ Once that is done, try sending another proof request and see what happens! Exper
Click here to view screenshot - Revocation + Revocation
## Send a Connectionless Proof Request diff --git a/docs/demo/AliceWantsAJsonCredential.md b/docs/demo/AliceWantsAJsonCredential.md index 6681fff2..e69de29b 100644 --- a/docs/demo/AliceWantsAJsonCredential.md +++ b/docs/demo/AliceWantsAJsonCredential.md @@ -1,592 +0,0 @@ - -# How to Issue JSON-LD Credentials using Aca-py - -Aca-py has the capability to issue and verify both Indy and JSON-LD (W3C compliant) credentials. - -The JSON-LD support is documented [here](../../features/JsonLdCredentials) - this document will provide some additional detail in how to use the demo and admin api to issue and prove JSON-LD credentials. - - -## Setup Agents to Issue JDON-LD Credentials - -Clone this repository to a directory on your local: - -```bash -git clone https://github.com/hyperledger/aries-cloudagent-python.git -cd aries-cloudagent-python/demo -``` - -Open up a second shell (so you have 2 shells open in the `demo` directory) and in one shell: - -```bash -LEDGER_URL=http://dev.greenlight.bcovrin.vonx.io ./run_demo faber --did-exchange --aip 20 --cred-type json-ld -``` - -... and in the other: - -```bash -LEDGER_URL=http://dev.greenlight.bcovrin.vonx.io ./run_demo alice -``` - -Note that you start the `faber` agent with AIP2.0 options. (When you specify `--cred-type json-ld` faber will set aip to `20` automatically, -so the `--aip` option is not strictly required). Note as well the use of the `LEDGER_URL`. Technically, that should not be needed if we aren't -doing anything with an Indy ledger-based credentials. However, there must be something in the way that the Faber and Alice controllers are starting up that requires access to a ledger. - -Also note that the above will only work with the `/issue-credential-2.0/create-offer` endpoint. If you want to use the `/issue-credential-2.0/send` endpoint - which automates each step of the credential exchange - you will need to include the `--no-auto` option when starting each of the alice and faber agents (since the alice and faber controllers *also* automatically respond to each step in the credential exchange). - -(Alternately you can run run Alice and Faber agents locally, see the `./faber-local.sh` and `./alice-local.sh` scripts in the `demo` directory.) - -Copy the "invitation" json text from the Faber shell and paste into the Alice shell to establish a connection between the two agents. - -(If you are running with `--no-auto` you will also need to call the `/connections/{conn_id}/accept-invitation` endpoint in alice's admin api swagger page.) - -Now open up two browser windows to the [Faber](http://localhost:8021/api/doc) and [Alice](http://localhost:8031/api/doc) admin api swagger pages. - -Using the Faber admin api, you have to create a DID with the appropriate: - -- DID method ("key" or "sov") -- key type "ed25519" or "bls12381g2" (corresponding to signature types "Ed25519Signature2018" or "BbsBlsSignature2020") -- if you use DID method "sov" you must use key type "ed25519" - -Note that "did:sov" must be a public DID (i.e. registered on the ledger) but "did:key" is not. - -For example, in Faber's swagger page call the `/wallet/did/create` endpoint with the following payload: - -``` -{ - "method": "key", - "options": { - "key_type": "bls12381g2" // or ed25519 - } -} -``` - -This will return something like: - -``` -{ - "result": { - "did": "did:key:zUC71KdwBhq1FioWh53VXmyFiGpewNcg8Ld42WrSChpMzzskRWwHZfG9TJ7hPj8wzmKNrek3rW4ZkXNiHAjVchSmTr9aNUQaArK3KSkTySzjEM73FuDV62bjdAHF7EMnZ27poCE", - "verkey": "mV6482Amu6wJH8NeMqH3QyTjh6JU6N58A8GcirMZG7Wx1uyerzrzerA2EjnhUTmjiSLAp6CkNdpkLJ1NTS73dtcra8WUDDBZ3o455EMrkPyAtzst16RdTMsGe3ctyTxxJav", - "posture": "wallet_only", - "key_type": "bls12381g2", - "method": "key" - } -} -``` - -You do *not* create a schema or cred def for a JSON-LD credential (these are only required for "indy" credentials). - -You will need to create a DID as above for Alice as well (`/wallet/did/create` etc ...). - -Congratulations, you are now ready to start issuing JSON-LD credentials! - -- You have two agents with a connection established between the agents - you will need to copy Faber's `connection_id` into the examples below. -- You have created a (non-public) DID for Faber to use to sign/issue the credentials - you will need to copy the DID that you created above into the examples below (as `issuer`). -- You have created a (non-public) DID for Alice to use as her `credentialSubject.id` - this is required for Alice to sign the proof (the `credentialSubject.id` is not required, but then the provided presentation can't be verified). - -To issue a credential, use the `/issue-credential-2.0/send-offer` endpoint. (You can also use the `/issue-credential-2.0/send`) endpoint, if, as mentioned above, you have included the `--no-auto` when starting both of the agents.) - -You can test with this example payload (just replace the "connection_id", "issuer" key, "credentialSubject.id" and "proofType" with appropriate values: - -``` -{ - "connection_id": "4fba2ce5-b411-4ecf-aa1b-ec66f3f6c903", - "filter": { - "ld_proof": { - "credential": { - "@context": [ - "https://www.w3.org/2018/credentials/v1", - "https://www.w3.org/2018/credentials/examples/v1" - ], - "type": ["VerifiableCredential", "UniversityDegreeCredential"], - "issuer": "did:key:zUC71KdwBhq1FioWh53VXmyFiGpewNcg8Ld42WrSChpMzzskRWwHZfG9TJ7hPj8wzmKNrek3rW4ZkXNiHAjVchSmTr9aNUQaArK3KSkTySzjEM73FuDV62bjdAHF7EMnZ27poCE", - "issuanceDate": "2020-01-01T12:00:00Z", - "credentialSubject": { - "id": "did:key:aksdkajshdkajhsdkjahsdkjahsdj", - "givenName": "Sally", - "familyName": "Student", - "degree": { - "type": "BachelorDegree", - "degreeType": "Undergraduate", - "name": "Bachelor of Science and Arts" - }, - "college": "Faber College" - } - }, - "options": { - "proofType": "BbsBlsSignature2020" - } - } - } -} -``` - -Note that if you have the "auto" settings on, this is all you need to do. Otherwise you need to call the `/send-request`, `/store`, etc endpoints to complete the protocol. - -To see the issued credential, call the `/credentials/w3c` endpoint on Alice's admin api - this will return something like: - -``` -{ - "results": [ - { - "contexts": [ - "https://w3id.org/security/bbs/v1", - "https://www.w3.org/2018/credentials/examples/v1", - "https://www.w3.org/2018/credentials/v1" - ], - "types": [ - "UniversityDegreeCredential", - "VerifiableCredential" - ], - "schema_ids": [], - "issuer_id": "did:key:zUC71KdwBhq1FioWh53VXmyFiGpewNcg8Ld42WrSChpMzzskRWwHZfG9TJ7hPj8wzmKNrek3rW4ZkXNiHAjVchSmTr9aNUQaArK3KSkTySzjEM73FuDV62bjdAHF7EMnZ27poCE", - "subject_ids": [], - "proof_types": [ - "BbsBlsSignature2020" - ], - "cred_value": { - "@context": [ - "https://www.w3.org/2018/credentials/v1", - "https://www.w3.org/2018/credentials/examples/v1", - "https://w3id.org/security/bbs/v1" - ], - "type": [ - "VerifiableCredential", - "UniversityDegreeCredential" - ], - "issuer": "did:key:zUC71Kd...poCE", - "issuanceDate": "2020-01-01T12:00:00Z", - "credentialSubject": { - "id": "did:key:aksdkajshdkajhsdkjahsdkjahsdj", - "givenName": "Sally", - "familyName": "Student", - "degree": { - "type": "BachelorDegree", - "degreeType": "Undergraduate", - "name": "Bachelor of Science and Arts" - }, - "college": "Faber College" - }, - "proof": { - "type": "BbsBlsSignature2020", - "proofPurpose": "assertionMethod", - "verificationMethod": "did:key:zUC71Kd...poCE#zUC71Kd...poCE", - "created": "2021-05-19T16:19:44.458170", - "proofValue": "g0weLyw2Q+niQ4pGfiXB...tL9C9ORhy9Q==" - } - }, - "cred_tags": {}, - "record_id": "365ab87b12f74b2db784fdd4db8419f5" - } - ] -} -``` - -If you *don't* see the credential in your wallet, look up the credential exchange record (in alice's admin api - `/issue-credential-2.0/records`) and check the state. If the state is `credential-received`, then the credential has been received but not stored, in this case just call the `/store` endpoint for this credential exchange. - - -## Building More Realistic JSON-LD Credentials - -The above example uses the "https://www.w3.org/2018/credentials/examples/v1" context, which should never be used in a real application. - -To build credentials in real life, you first determine which attributes you need and then include the appropriate contexts. - - -### Context schema.org - -You can use attributes defined on [schema.org](https://schema.org). Although this is *NOT RECOMMENDED* (included here for illustrative purposes only) - individual attributes can't be validated (see the comment later on). - -You first include `https://schema.org` in the `@context` block of the credential as follows: - -``` -"@context": [ - "https://www.w3.org/2018/credentials/v1", - "https://schema.org" -], -``` - -Then you review the [attributes and objects defined by `https://schema.org`](https://schema.org/docs/schemas.html) and decide what you need to include in your credential. - -For example to issue a credetial with [givenName](https://schema.org/givenName), [familyName](https://schema.org/familyName) and [alumniOf](https://schema.org/alumniOf) attributes, submit the following: - -``` -{ - "connection_id": "ad35a4d8-c84b-4a4f-a83f-1afbf134b8b9", - "filter": { - "ld_proof": { - "credential": { - "@context": [ - "https://www.w3.org/2018/credentials/v1", - "https://schema.org" - ], - "type": ["VerifiableCredential", "Person"], - "issuer": "did:key:zUC71pj2gpDLfcZ9DE1bMtjZGWCSLhkQsUCaKjqXtCftGkz27894pEX9VvGNiFsaV67gqv2TEPQ2aDaDDdTDNp42LfDdK1LaWSBCfzsQEyaiR1zjZm1RtoRu1ZM6v6vz4TiqDgU", - "issuanceDate": "2020-01-01T12:00:00Z", - "credentialSubject": { - "id": "did:key:aksdkajshdkajhsdkjahsdkjahsdj", - "givenName": "Sally", - "familyName": "Student", - "alumniOf": "Example University" - } - }, - "options": { - "proofType": "BbsBlsSignature2020" - } - } - } -} -``` - -Note that with `https://schema.org`, if you include attributes that aren't defined by *any* context, you will *not* get an error. For example you can try replacing the `credentialSubject` in the above with: - -``` -"credentialSubject": { - "id": "did:key:aksdkajshdkajhsdkjahsdkjahsdj", - "givenName": "Sally", - "familyName": "Student", - "alumniOf": "Example University", - "someUndefinedAttribute": "the value of the attribute" -} -``` - -... and the credential issuance *should* fail, however `https://schema.org` defines a `@vocab` that by default all terms derive from ([see here](https://stackoverflow.com/questions/30945898/what-is-the-use-of-vocab-in-json-ld-and-what-is-the-difference-to-context/30948037#30948037)). - -You can include more complex schemas, for example to use the schema.org [Person](https://schema.org/Person) schema (which includes `givenName` and `familyName`): - -``` -{ - "connection_id": "ad35a4d8-c84b-4a4f-a83f-1afbf134b8b9", - "filter": { - "ld_proof": { - "credential": { - "@context": [ - "https://www.w3.org/2018/credentials/v1", - "https://schema.org" - ], - "type": ["VerifiableCredential", "Person"], - "issuer": "did:key:zUC71pj2gpDLfcZ9DE1bMtjZGWCSLhkQsUCaKjqXtCftGkz27894pEX9VvGNiFsaV67gqv2TEPQ2aDaDDdTDNp42LfDdK1LaWSBCfzsQEyaiR1zjZm1RtoRu1ZM6v6vz4TiqDgU", - "issuanceDate": "2020-01-01T12:00:00Z", - "credentialSubject": { - "id": "did:key:aksdkajshdkajhsdkjahsdkjahsdj", - "student": { - "type": "Person", - "givenName": "Sally", - "familyName": "Student", - "alumniOf": "Example University" - } - } - }, - "options": { - "proofType": "BbsBlsSignature2020" - } - } - } -} -``` - - -## Credential-Specific Contexts - -The recommended approach to defining credentials is to define a credential-specific vocaublary (or make use of existing ones). (Note that these can include references to `https://schema.org`, you just shouldn't uste this directly in your credential.) - - -### Credential Issue Example - -The following example uses the W3C citizenship context to issue a PermanentResident credential (replace the `connection_id`, `issuer` and `credentialSubject.id` with your local values): - -``` -{ - "connection_id": "41acd909-9f45-4c69-8641-8146e0444a57", - "filter": { - "ld_proof": { - "credential": { - "@context": [ - "https://www.w3.org/2018/credentials/v1", - "https://w3id.org/citizenship/v1" - ], - "type": [ - "VerifiableCredential", - "PermanentResident" - ], - "id": "https://credential.example.com/residents/1234567890", - "issuer": "did:key:zUC7Dus47jW5Avcne8LLsUvJSdwspmErgehxMWqZZy8eSSNoHZ4x8wgs77sAmQtCADED5RQP1WWhvt7KFNm6GGMxdSGpKu3PX6R9a61G9VoVsiFoRf1yoK6pzhq9jtFP3e2SmU9", - "issuanceDate": "2020-01-01T12:00:00Z", - "credentialSubject": { - "type": [ - "PermanentResident" - ], - "id": "did:key:zUC7CXi82AXbkv4SvhxDxoufrLwQSAo79qbKiw7omCQ3c4TyciDdb9s3GTCbMvsDruSLZX6HNsjGxAr2SMLCNCCBRN5scukiZ4JV9FDPg5gccdqE9nfCU2zUcdyqRiUVnn9ZH83", - "givenName": "ALICE", - "familyName": "SMITH", - "gender": "Female", - "birthCountry": "Bahamas", - "birthDate": "1958-07-17" - } - }, - "options": { - "proofType": "BbsBlsSignature2020" - } - } - } -} -``` - -Copy and paste this content into Faber's `/issue-credential-2.0/send-offer` endpoint, and it will kick off the exchange process to issue a W3C credential to Alice. - -In Alice's swagger page, submit the `/credentials/records/w3c` endpoint to see the issued credential. - - -### Request Presentation Example - -To request a proof, submit the following (with appropriate `connection_id`) to Faber's `/present-proof-2.0/send-request` endpoint: - -``` -{ - "comment": "string", - "connection_id": "41acd909-9f45-4c69-8641-8146e0444a57", - "presentation_request": { - "dif": { - "options": { - "challenge": "3fa85f64-5717-4562-b3fc-2c963f66afa7", - "domain": "4jt78h47fh47" - }, - "presentation_definition": { - "id": "32f54163-7166-48f1-93d8-ff217bdb0654", - "format": { - "ldp_vp": { - "proof_type": [ - "BbsBlsSignature2020" - ] - } - }, - "input_descriptors": [ - { - "id": "citizenship_input_1", - "name": "EU Driver's License", - "schema": [ - { - "uri": "https://www.w3.org/2018/credentials#VerifiableCredential" - }, - { - "uri": "https://w3id.org/citizenship#PermanentResident" - } - ], - "constraints": { - "limit_disclosure": "required", - "is_holder": [ - { - "directive": "required", - "field_id": [ - "1f44d55f-f161-4938-a659-f8026467f126" - ] - } - ], - "fields": [ - { - "id": "1f44d55f-f161-4938-a659-f8026467f126", - "path": [ - "$.credentialSubject.familyName" - ], - "purpose": "The claim must be from one of the specified issuers", - "filter": { - "const": "SMITH" - } - }, - { - "path": [ - "$.credentialSubject.givenName" - ], - "purpose": "The claim must be from one of the specified issuers" - } - ] - } - } - ] - } - } - } -} -``` - -Note that the `is_holder` property can be used by Faber to verify that the holder of credential is the same as the subject of the attribute (`familyName`). Later on, the received presentation will be signed and verifiable only if `is_holder` with ` "directive": "required"` is included in the presentation request. - -There are several ways that Alice can respond with a presentation. The simplest will just tell aca-py to put the presentation together and send it to Faber - submit the following to Alice's `/present-proof-2.0/records/{pres_ex_id}/send-presentation`: - -``` -{ - "dif": { - } -} -``` - -There are two ways that Alice can provide some constraints to tell aca-py which credential(s) to include in the presentation. - -Firstly, Alice can include the received presentation request in the body to the `/send-presentation` endpoint, and can include additional constraints on the fields: - -``` -{ - "dif": { - "issuer_id": "did:key:zUC7Dus47jW5Avcne8LLsUvJSdwspmErgehxMWqZZy8eSSNoHZ4x8wgs77sAmQtCADED5RQP1WWhvt7KFNm6GGMxdSGpKu3PX6R9a61G9VoVsiFoRf1yoK6pzhq9jtFP3e2SmU9", - "presentation_definition": { - "format": { - "ldp_vp": { - "proof_type": [ - "BbsBlsSignature2020" - ] - } - }, - "id": "32f54163-7166-48f1-93d8-ff217bdb0654", - "input_descriptors": [ - { - "id": "citizenship_input_1", - "name": "Some kind of citizenship check", - "schema": [ - { - "uri": "https://www.w3.org/2018/credentials#VerifiableCredential" - }, - { - "uri": "https://w3id.org/citizenship#PermanentResident" - } - ], - "constraints": { - "limit_disclosure": "required", - "is_holder": [ - { - "directive": "required", - "field_id": [ - "1f44d55f-f161-4938-a659-f8026467f126", - "332be361-823a-4863-b18b-c3b930c5623e" - ], - } - ], - "fields": [ - { - "id": "1f44d55f-f161-4938-a659-f8026467f126", - "path": [ - "$.credentialSubject.familyName" - ], - "purpose": "The claim must be from one of the specified issuers", - "filter": { - "const": "SMITH" - } - }, - { - "id": "332be361-823a-4863-b18b-c3b930c5623e", - "path": [ - "$.id" - ], - "purpose": "Specify the id of the credential to present", - "filter": { - "const": "https://credential.example.com/residents/1234567890" - } - } - ] - } - } - ] - } - } -} -``` - -Note the additional constraint on `"path": [ "$.id" ]` - this restricts the presented credential to the one with the matching `credential.id`. Any credential attributes can be used, however this presumes that the issued credentials contain a uniquely identifying attribute. - -Another option is for Alice to specify the credential `record_id` - this is an internal value within aca-py: - -``` -{ - "dif": { - "issuer_id": "did:key:zUC7Dus47jW5Avcne8LLsUvJSdwspmErgehxMWqZZy8eSSNoHZ4x8wgs77sAmQtCADED5RQP1WWhvt7KFNm6GGMxdSGpKu3PX6R9a61G9VoVsiFoRf1yoK6pzhq9jtFP3e2SmU9", - "presentation_definition": { - "format": { - "ldp_vp": { - "proof_type": [ - "BbsBlsSignature2020" - ] - } - }, - "id": "32f54163-7166-48f1-93d8-ff217bdb0654", - "input_descriptors": [ - { - "id": "citizenship_input_1", - "name": "Some kind of citizenship check", - "schema": [ - { - "uri": "https://www.w3.org/2018/credentials#VerifiableCredential" - }, - { - "uri": "https://w3id.org/citizenship#PermanentResident" - } - ], - "constraints": { - "limit_disclosure": "required", - "fields": [ - { - "path": [ - "$.credentialSubject.familyName" - ], - "purpose": "The claim must be from one of the specified issuers", - "filter": { - "const": "SMITH" - } - } - ] - } - } - ] - }, - "record_ids": { - "citizenship_input_1": [ "1496316f972e40cf9b46b35971182337" ] - } - } -} -``` - -### Another Credential Issue Example - -TBD the following credential is based on the W3C Vaccination schema: - -``` -{ - "connection_id": "ad35a4d8-c84b-4a4f-a83f-1afbf134b8b9", - "filter": { - "ld_proof": { - "credential": { - "@context": [ - "https://www.w3.org/2018/credentials/v1", - "https://w3id.org/vaccination/v1" - ], - "type": ["VerifiableCredential", "VaccinationCertificate"], - "issuer": "did:key:zUC71pj2gpDLfcZ9DE1bMtjZGWCSLhkQsUCaKjqXtCftGkz27894pEX9VvGNiFsaV67gqv2TEPQ2aDaDDdTDNp42LfDdK1LaWSBCfzsQEyaiR1zjZm1RtoRu1ZM6v6vz4TiqDgU", - "issuanceDate": "2020-01-01T12:00:00Z", - "credentialSubject": { - "id": "did:key:aksdkajshdkajhsdkjahsdkjahsdj", - "type": "VaccinationEvent", - "batchNumber": "1183738569", - "administeringCentre": "MoH", - "healthProfessional": "MoH", - "countryOfVaccination": "NZ", - "recipient": { - "type": "VaccineRecipient", - "givenName": "JOHN", - "familyName": "SMITH", - "gender": "Male", - "birthDate": "1958-07-17" - }, - "vaccine": { - "type": "Vaccine", - "disease": "COVID-19", - "atcCode": "J07BX03", - "medicinalProductName": "COVID-19 Vaccine Moderna", - "marketingAuthorizationHolder": "Moderna Biotech" - } - } - }, - "options": { - "proofType": "BbsBlsSignature2020" - } - } - } -} -``` - diff --git a/docs/demo/AriesOpenAPIDemo.md b/docs/demo/AriesOpenAPIDemo.md index 9c2e5a2f..deed6abb 100644 --- a/docs/demo/AriesOpenAPIDemo.md +++ b/docs/demo/AriesOpenAPIDemo.md @@ -49,11 +49,11 @@ What better way to learn about controllers than by actually being one yourself! We will get started by opening three browser tabs that will be used throughout the lab. Two will be Swagger UIs for the Faber and Alice agent and one for the public ledger (showing the Hyperledger Indy ledger). As well, we'll keep the terminal sessions where we started the demos handy, as we'll be grabbing information from them as well. -Let's start with the ledger browser. For this demo, we're going to use an open public ledger operated by the BC Government's VON Team. In your first browser tab, go to: [http://dev.greenlight.bcovrin.vonx.io](http://dev.greenlight.bcovrin.vonx.io). This will be called the "ledger tab" in the instructions below. +Let's start with the ledger browser. For this demo, we're going to use an open public ledger operated by the BC Government's VON Team. In your first browser tab, go to: [http://test.bcovrin.vonx.io](http://test.bcovrin.vonx.io). This will be called the "ledger tab" in the instructions below. For the rest of the set up, you can choose to run the terminal sessions in your browser (no local resources needed), or you can run it in Docker on your local system. Your choice, each is covered in the next two sections. -> Note: In the following, when we start the agents we use several special demo settings. The command we use is this: `LEDGER_URL=http://dev.greenlight.bcovrin.vonx.io ./run_demo faber --events --no-auto --bg`. In that: +> Note: In the following, when we start the agents we use several special demo settings. The command we use is this: `LEDGER_URL=http://test.bcovrin.vonx.io ./run_demo faber --events --no-auto --bg`. In that: > > - The `LEDGER_URL` environment variable informs the agent what ledger to use. > - The `--events` option indicates that we want the controller to display the webhook events from ACA-Py in the log displayed on the terminal. @@ -71,7 +71,7 @@ In a browser, go to the [Play with Docker](https://labs.play-with-docker.com/) h ```bash git clone https://github.com/hyperledger/aries-cloudagent-python cd aries-cloudagent-python/demo -LEDGER_URL=http://dev.greenlight.bcovrin.vonx.io ./run_demo faber --events --no-auto --bg +LEDGER_URL=http://test.bcovrin.vonx.io ./run_demo faber --events --no-auto --bg ``` @@ -89,7 +89,7 @@ Once the Faber agent has started up (with the invite displayed), click the link
Show me a screenshot! - Swagger Page for Faber Agent + Swagger Page for Faber Agent
### Start the Alice Agent @@ -99,7 +99,7 @@ Now to start Alice's agent. Click the "+Add a new instance" button again to open ```bash git clone https://github.com/hyperledger/aries-cloudagent-python cd aries-cloudagent-python/demo -LEDGER_URL=http://dev.greenlight.bcovrin.vonx.io ./run_demo alice --events --no-auto --bg +LEDGER_URL=http://test.bcovrin.vonx.io ./run_demo alice --events --no-auto --bg ``` @@ -119,7 +119,7 @@ Once the Alice agent has started up (with the `invite:` prompt displayed), click
Show me a screenshot! - Swagger Page for Alice Agent + Swagger Page for Alice Agent
You are ready to go. Skip down to the [Using the OpenAPI/Swagger User Interface](#using-the-openapiswagger-user-interface) section. @@ -137,7 +137,7 @@ In the first terminal window, clone the ACA-Py repo, change into the demo folder ```bash git clone https://github.com/hyperledger/aries-cloudagent-python cd aries-cloudagent-python/demo -LEDGER_URL=http://dev.greenlight.bcovrin.vonx.io ./run_demo faber --events --no-auto --bg +LEDGER_URL=http://test.bcovrin.vonx.io ./run_demo faber --events --no-auto --bg ``` @@ -155,7 +155,7 @@ If all goes well, the agent will show a message indicating it is running. Use th
Show me a screenshot! - Swagger Page for Faber Agent + Swagger Page for Faber Agent
### Start the Alice Agent @@ -163,7 +163,7 @@ If all goes well, the agent will show a message indicating it is running. Use th To start Alice's agent, open up a second terminal window and in it, change to the same `demo` directory as where Faber's agent was started above. Once there, start Alice's agent: ``` bash -LEDGER_URL=http://dev.greenlight.bcovrin.vonx.io ./run_demo alice --events --no-auto --bg +LEDGER_URL=http://test.bcovrin.vonx.io ./run_demo alice --events --no-auto --bg ``` @@ -184,7 +184,7 @@ If all goes well, the agent will show a message indicating it is running. Open a
Show me a screenshot! - Swagger Page for Alice Agent + Swagger Page for Alice Agent
### Restarting the Docker Containers @@ -229,12 +229,12 @@ In the Faber browser tab, navigate to the **`POST /connections/create-invitation
Show me a screenshot - Create Invitation Request - Create Invitation Request + Create Invitation Request
Show me a screenshot - Create Invitation Response - Create Invitation Response + Create Invitation Response
### Copy the Invitation created by the Faber Agent @@ -243,14 +243,14 @@ Copy the entire block of the `invitation` object, from the curly brackets `{}`,
Show me a screenshot - Create Invitation Response - Create Invitation Response + Create Invitation Response
Before switching over to the Alice browser tab, scroll to and execute the **`GET /connections`** endpoint to see the list of Faber's connections. You should see a connection with a `connection_id` that is identical to the invitation you just created, and that its state is `invitation`.
Show me a screenshot - Faber Connection Status - Faber Connection Status + Faber Connection Status
### Use the Alice Agent to Receive Faber's Invitation @@ -261,12 +261,12 @@ Switch to the Alice browser tab and get ready to execute the **`POST /connection
Show me a screenshot - Receive Invitation Request - Receive Invitation Request + Receive Invitation Request
Show me a screenshot - Receive Invitation Response - Receive Invitation Request + Receive Invitation Request
> A key observation to make here. The "copy and paste" we are doing here from Faber's agent to Alice's agent is what is called an "out of band" message. Because we don't yet have a DIDComm connection between the two agents, we have to convey the invitation in plaintext (we can't encrypt it - no channel) using some other mechanism than DIDComm. With mobile agents, that's where QR codes often come in. Once we have the invitation in the receivers agent, we can get back to using DIDComm. @@ -277,19 +277,19 @@ At this point Alice has simply stored the invitation in her wallet. You can see
Show me a screenshot - Invitation Status + Invitation Status
To complete a connection with Faber, she must accept the invitation and send a corresponding connection request to Faber. Find the `connection_id` in the connection response from the previous **`POST /connections/receive-invitation`** endpoint call. You may note that the same data was sent to the controller as an event from ACA-Py and is visible in the terminal. Scroll to the **`POST /connections/{conn_id}/accept-invitation`** endpoint and paste the `connection_id` in the `id` parameter field (you will have to click the `Try it out` button to see the available URL parameters). The response from clicking `Execute` should show that the connection has a state of `request`.
Show me a screenshot - Accept Invitation Request - Receive Invitation Request + Receive Invitation Request
Show me a screenshot - Accept Invitation Response - Receive Invitation Response + Receive Invitation Response
### The Faber Agent Gets the Request @@ -298,7 +298,7 @@ In the Faber terminal session, an event (a web service callback from ACA-Py to t
Show me the event - Connection Request Event + Connection Request Event
Note that the connection ID held by Alice is different from the one held by Faber. That makes sense, as both independently created connection objects, each with a unique, self-generated GUID. @@ -309,12 +309,12 @@ To complete the connection process, Faber will respond to the connection request
Show me a screenshot - Accept Connection Request - Accept Connection Request + Accept Connection Request
Show me a screenshot - Accept Connection Request - Accept Connection Request + Accept Connection Request
### Review the Connection Status in Alice's Agent @@ -325,14 +325,14 @@ Scroll to and execute **`GET /connections`** to see a list of Alice's connection
Show me a screenshot - Alice Connection Status - Alice Connection Event + Alice Connection Event
As with Faber's side of the connection, Alice received a notification that Faber had accepted her connection request.
Show me the event - Alice Connection Status + Alice Connection Status
### Review the Connection Status in Faber's Agent @@ -341,7 +341,7 @@ You are connected! Switch to the Faber browser tab and run the same **`GET /conn
Show me a screenshot - Faber Connection Status - Faber Connection Status + Faber Connection Status
## Basic Messaging Between Agents @@ -354,7 +354,7 @@ On Alice's swagger page, scroll to the **`POST /connections/{conn_id}/send-messa
Show me a screenshot - Alice Send Message + Alice Send Message
### Receiving a Basic Message (Faber) @@ -363,7 +363,7 @@ How does Faber know that a message was sent? If you take a look at Faber's conso
Show me a screenshot - Faber Receive Message + Faber Receive Message
Faber's controller application can take whatever action is necessary to process this message. It could trigger some application code, or it might just be something the Faber application needs to display to its user (for example a reminder about some action the user needs to take). @@ -374,7 +374,7 @@ How does Alice get feedback that Faber has received the message? The same way -
Show me a screenshot - Alice Receive Message Confirmation + Alice Receive Message Confirmation
Again, Alice's agent can take whatever action is necessary, possibly just flagging the message as having been `received`. @@ -401,14 +401,14 @@ You can confirm the schema and credential definition were published by going bac
Show me a screenshot - Faber Public DID + Faber Public DID
-On the ledger browser of the [BCovrin ledger](http://dev.greenlight.bcovrin.vonx.io), click the `Domain` page, refresh, and paste the Faber public DID into the `Filter:` field: +On the ledger browser of the [BCovrin ledger](http://test.bcovrin.vonx.io), click the `Domain` page, refresh, and paste the Faber public DID into the `Filter:` field:
Show me a screenshot - Search Ledger by DID + Search Ledger by DID
The ledger browser should refresh and display the four (4) transactions on the ledger related to this DID: @@ -420,26 +420,26 @@ The ledger browser should refresh and display the four (4) transactions on the l
Show me the ledger transactions - DID Transaction - DID Endpoint Transaction - Schema Transaction - Credential Definition Transaction + DID Transaction + DID Endpoint Transaction + Schema Transaction + Credential Definition Transaction
You can also look up the Schema and Credential Definition information using Faber's swagger page. Use the **`GET /schemas/created`** endpoint to get a list of schemas, including the one `schema_id` that the Faber agent has defined. Keep this section of the Swagger page expanded as we'll need to copy the Id as part of starting the issue credential protocol coming next.
Show me a screenshot - Search Schemas - Search Schemas + Search Schemas + Search Schemas
Likewise use the **`GET /credential-definitions/created`** endpoint to get the list of the one (in this case) credential definition id created by Faber. Keep this section of the Swagger page expanded as we'll also need to copy the Id as part of starting the issue credential protocol coming next.
Show me a screenshot - Search Credential Definitions - Search Credential Definitions + Search Credential Definitions + Search Credential Definitions
**Hint**: Remember how the schema and credential definitions were created for you as Faber started up? To do it yourself, use the **`POST`** versions of these endpoints. Now you know! @@ -458,7 +458,7 @@ First, get the connection Id for Faber's connection with Alice. You can copy tha
Click here to see a screenshot - Connection Id + Connection Id
For the following fields, scroll on Faber's Swagger page to the listed endpoint, execute (if necessary), copy the response value and paste as the values of the following JSON items: @@ -513,8 +513,8 @@ OK, finally, you are ready to click `Execute`. The request should work, but if i
Show me a screenshot - credential offer - Faber Submit Credential Offer - Faber Submit Credential Offer + Faber Submit Credential Offer + Faber Submit Credential Offer
To confirm the issuance worked, scroll up on the Faber Swagger page to the `issue-credential v2.0` section and execute the **`GET /issue-credential-2.0/records`** endpoint. You should see a lot of information about the exchange just initiated. @@ -527,7 +527,7 @@ Alice's agent first received a notification of a Credential Offer, to which it r
Show me a screenshot - issue credential - Issue Credential + Issue Credential
### Alice Stores Credential in her Wallet @@ -536,16 +536,16 @@ We can check (via Alice's Swagger interface) the issue credential status by hitt
Show me a screenshot - check credential exchange status - - + +
First, we need the `cred_ex_id` from the API call response above, or from the event in the terminal; use the endpoint **`POST /issue-credential-2.0/records/{cred_ex_id}/store`** to tell Alice's ACA-Py instance to store the credential in agent storage (aka the Indy Wallet). Note that in the JSON for that endpoint we can provide a credential Id to store in the wallet by setting a value in the `credential_id` string. A real controller might use the `cred_ex_id` for that, or use something else that makes sense in the agent's business scenario (but the agent generates a random credential identifier by default).
Show me a screenshot - store credential - - + +
Now, in Alice’s swagger browser tab, find the `credentials` section and within that, execute the **`GET /credentials`** endpoint. There should be a list of credentials held by Alice, with just a single entry, the credential issued from the Faber agent. Note that the element `referent` is the value of the `credential_id` element used in other calls. `referent` is the name returned in the `indy-sdk` call to get the set of credentials for the wallet and ACA-Py code does not change it in the response. @@ -556,16 +556,16 @@ On the Faber side, we can see by scanning back in the terminal that it receive e
Show me Faber's event activity - - + +
Note that once the credential processing completed, Faber's agent deleted the credential exchange record from its wallet. This can be confirmed by executing the endpoint **`GET /issue-credential-2.0/records`**
Show me a screenshot - - + +
You’ve done it, issued a credential! w00t! @@ -581,7 +581,7 @@ Those that know something about the Indy process for issuing a credential and th If you would like to perform all of the issuance steps manually on the Faber agent side, use a sequence of the other `/issue-credential-2.0/` messages. Use the **`GET /issue-credential-2.0/records`** to both check the credential exchange state as you progress through the protocol and to find some of the data you’ll need in executing the sequence of requests. -The following table lists endpoints that you need to call ("REST service") and callbacks that your agent will receive ("callback") that your need to respond to. See the [detailed API docs](../../features/AdminAPI). +The following table lists endpoints that you need to call ("REST service") and callbacks that your agent will receive ("callback") that your need to respond to. See the [detailed API docs](../features/AdminAPI). | Protocol Step | Faber (Issuer) | Alice (Holder) | Notes | | -------------------- | ---------------------- | ------------------ | ----- | @@ -663,8 +663,8 @@ Notice that the proof request is using a predicate to check if Alice is older th
Show me a screenshot - send proof request - Send Proof Request - Send Proof Request + Send Proof Request + Send Proof Request
### Alice - Responding to the Proof Request @@ -673,8 +673,8 @@ As before, Alice receives a webhook event from her agent telling her she has rec
Show me Alice's event activity - Proof Request - Proof Request + Proof Request + Proof Request
In a real scenario, for example if Alice had a mobile agent on her smartphone, the agent would prompt Alice whether she wanted to respond or not. @@ -687,9 +687,9 @@ You can see some of Faber's activity below:
Show me Faber's event activity - Receive and Verify Proof - Receive and Verify Proof - Receive and Verify Proof + Receive and Verify Proof + Receive and Verify Proof + Receive and Verify Proof
### Present Proof Notes @@ -700,7 +700,7 @@ As with the issue credential process, the agents handled some of the presentatio If you would like to perform all of the proof request/response steps manually, you can call all of the individual `/present-proof-2.0` messages. -The following table lists endpoints that you need to call ("REST service") and callbacks that your agent will receive ("callback") that you need to respond to. See the [detailed API docs](../../features/AdminAPI). +The following table lists endpoints that you need to call ("REST service") and callbacks that your agent will receive ("callback") that you need to respond to. See the [detailed API docs](../features/AdminAPI). | Protocol Step | Faber (Verifier) | Alice (Holder/Prover) | Notes | | -------------------- | ---------------------- | ------------------------- | ----- | diff --git a/docs/demo/AriesPostmanDemo.md b/docs/demo/AriesPostmanDemo.md new file mode 100644 index 00000000..03fccec7 --- /dev/null +++ b/docs/demo/AriesPostmanDemo.md @@ -0,0 +1,111 @@ +# Aries Postman Demo + +In these demos we will use Postman as our controller client. + +## Contents + +- [Getting Started](#getting-started) + - [Installing Postman](#installing-postman) + - [Creating a workspace](#creating-a-workspace) + - [Importing the environment](#importing-the-environment) + - [Importing the collections](#importing-the-collections) + - [Postman basics](#postman-basics) +- [Experimenting with the vc-api endpoints](#experimenting-with-the-vc-api-endpoints) + - [Register new dids](#register-new-dids) + - [Issue credentials](#issue-credentials) + - [Store and retrieve credentials](#store-and-retrieve-credentials) + - [Verify credentials](#verify-credentials) + - [Prove a presentation](#prove-a-presentation) + - [Verify a presentation](#verify-a-presentation) + +## Getting Started + +Welcome to the Postman demo. This is an addition to the available OpenAPI demo, providing a set of collections to test and demonstrate various aca-py functionalities. + +### Installing Postman + +Download, install and launch [postman](https://www.postman.com/downloads/). + +### Creating a workspace + +Create a new postman workspace labeled "acapy-demo". + +### Importing the environment + +In the environment tab from the left, click the import button. You can paste this [link](https://raw.githubusercontent.com/hyperledger/aries-cloudagent-python/main/demo/postman/environment.json) which is the [environment file](https://github.com/hyperledger/aries-cloudagent-python/blob/main/demo/postman/environment.json) in the ACA-Py repository. + +Make sure you have the environment set as your active environment. + +### Importing the collections + +In the collections tab from the left, click the import button. + +The following collections are available: + +- [vc-api](https://raw.githubusercontent.com/hyperledger/aries-cloudagent-python/main/demo/postman/collections/vc-api.json) + +### Postman basics + +Once you are setup, you will be ready to run postman requests. The order of the request is important, since some values are saved dynamically as environment variables for subsequent calls. + +You have your environment where you define variables to be accessed by your collections. + +Each collection consists of a series of requests which can be configured independently. + +## Experimenting with the vc-api endpoints + +Make sure you have a demo agent available. You can use the following command to deploy one: + +```bash +LEDGER_URL=http://test.bcovrin.vonx.io ./run_demo faber --bg +``` + +When running for the first time, please allow some time for the images to build. + +### Register new dids + +The first 2 requests for this collection will create 2 did:keys. We will use those in subsequent calls to issue `Ed25519Signature2020` and `BbsBlsSignature2020` credentials. +Run the 2 did creation requests. These requests will use the `/wallet/did/create` endpoint. + +### Issue credentials + +For issuing, you must input a [w3c compliant json-ld credential](https://www.w3.org/TR/vc-data-model/) and [issuance options](https://w3c-ccg.github.io/vc-api/#issue-credential) in your request body. The issuer field must be a registered did from the agent's wallet. The suite will be derived from the did method. + +```json +{ + "credential": { + "@context": [ + "https://www.w3.org/2018/credentials/v1" + ], + "type": [ + "VerifiableCredential" + ], + "issuer": "did:example:123", + "issuanceDate": "2022-05-01T00:00:00Z", + "credentialSubject": { + "id": "did:example:123" + } + }, + "options": {} +} +``` + +Some examples have been pre-configured in the collection. Run the requests and inspect the results. Experiment with different credentials. + +### Store and retrieve credentials + +Your last issued credential will be stored as an environment variable for subsequent calls, such as storing, verifying and including in a presentation. + +Try running the store credential request, then retrieve the credential with the list and fetch requests. Try going back and forth between the issuance endpoints and the storage endpoints to store multiple different credentials. + +### Verify credentials + +You can verify your last issued credential with this endpoint or any issued credential you provide to it. + +### Prove a presentation + +Proving a presentation is an action where a holder will prove ownership of a credential by signing or demonstrating authority over the document. + +### Verify a presentation + +The final request is to verify a presentation. diff --git a/docs/demo/README.md b/docs/demo/README.md index 15ae3e9c..61a0fe5b 100644 --- a/docs/demo/README.md +++ b/docs/demo/README.md @@ -26,7 +26,7 @@ There are several demos available for ACA-Py mostly (but not only) aimed at deve - [Multi-ledger](#multi-ledger) - [Multi-tenancy](#multi-tenancy) - [Multi-tenancy *with Mediation*!!!](#multi-tenancy-with-mediation) - - [Other Environment Settings](#other-environment-settings) +- [Other Environment Settings](#other-environment-settings) - [Learning about the Alice/Faber code](#learning-about-the-alicefaber-code) - [OpenAPI (Swagger) Demo](#openapi-swagger-demo) - [Performance Demo](#performance-demo) @@ -43,7 +43,7 @@ In your browser, go to the docker playground service [Play with Docker](https:// ```bash git clone https://github.com/hyperledger/aries-cloudagent-python cd aries-cloudagent-python/demo -LEDGER_URL=http://dev.greenlight.bcovrin.vonx.io ./run_demo faber +LEDGER_URL=http://test.bcovrin.vonx.io ./run_demo faber ``` Now to start Alice's agent. Click the "+Add a new instance" button again to open another terminal session. Run the following commands to start Alice's agent: @@ -51,7 +51,7 @@ Now to start Alice's agent. Click the "+Add a new instance" button again to open ```bash git clone https://github.com/hyperledger/aries-cloudagent-python cd aries-cloudagent-python/demo -LEDGER_URL=http://dev.greenlight.bcovrin.vonx.io ./run_demo alice +LEDGER_URL=http://test.bcovrin.vonx.io ./run_demo alice ``` Alice's agent is now running. diff --git a/docs/deploying/AnonCredsWalletType.md b/docs/deploying/AnonCredsWalletType.md index 2023faed..f61d4515 100644 --- a/docs/deploying/AnonCredsWalletType.md +++ b/docs/deploying/AnonCredsWalletType.md @@ -1,8 +1,8 @@ -# AnonCreds-Rs Support +# AnonCreds-RS Support A new wallet type has been added to Aca-Py to support the new anoncreds-rs library: -``` +```bash --wallet-type askar-anoncreds ``` @@ -16,7 +16,7 @@ Within the protocols, there are new `handler` libraries to support the new `anon The existing `indy` code are in: -``` +```bash aries_cloudagent/protocols/issue_credential/v2_0/formats/indy/handler.py aries_cloudagent/protocols/indy/anoncreds/pres_exch_handler.py aries_cloudagent/protocols/present_proof/v2_0/formats/indy/handler.py @@ -24,7 +24,7 @@ aries_cloudagent/protocols/present_proof/v2_0/formats/indy/handler.py The new `anoncreds` code is in: -``` +```bash aries_cloudagent/protocols/issue_credential/v2_0/formats/anoncreds/handler.py aries_cloudagent/protocols/present_proof/anoncreds/pres_exch_handler.py aries_cloudagent/protocols/present_proof/v2_0/formats/anoncreds/handler.py @@ -32,7 +32,7 @@ aries_cloudagent/protocols/present_proof/v2_0/formats/anoncreds/handler.py The Indy handler checks to see if the wallet type is `askar-anoncreds` and if so delegates the calls to the anoncreds handler, for example: -``` +```python # Temporary shim while the new anoncreds library integration is in progress wallet_type = profile.settings.get_value("wallet.type") if wallet_type == "askar-anoncreds": @@ -41,7 +41,7 @@ The Indy handler checks to see if the wallet type is `askar-anoncreds` and if so ... and then: -``` +```python # Temporary shim while the new anoncreds library integration is in progress if self.anoncreds_handler: return self.anoncreds_handler.get_format_identifier(message_type) @@ -49,20 +49,19 @@ The Indy handler checks to see if the wallet type is `askar-anoncreds` and if so To run the alice/faber demo using the new anoncreds library, start the demo with: -``` +```bash --wallet-type askar-anoncreds ``` There are no anoncreds-specific integration tests, for the new anoncreds functionality the agents within the integration tests are started with: -``` +```bash --wallet-type askar-anoncreds ``` Everything should just work!!! -Theoretically ATH should work with anoncreds as well, by setting the wallet type (see https://github.com/hyperledger/aries-agent-test-harness#extra-backchannel-specific-parameters). - +Theoretically ATH should work with anoncreds as well, by setting the wallet type (see [https://github.com/hyperledger/aries-agent-test-harness#extra-backchannel-specific-parameters](https://github.com/hyperledger/aries-agent-test-harness#extra-backchannel-specific-parameters)). ## Revocation (new in anoncreds) @@ -72,31 +71,29 @@ The changes are significant. Notably: - In the new way, the AnonCreds library expects the identifier for the revregentry used (aka the timestamp), the accumulator, and the full state (0s and 1s) of the revocation status of all credentials in the registry. - The conversion from delta to full state must be handled in the Indy resolver -- not in the "generic" ACA-Py code, since the other ledgers automagically provide the full state. In fact, we're likely to update Indy VDR to always provide the full state. The "common" (post resolver) code should get back from the resolver the full state. -The Tails file changes are minimal -- nothing about the file itself changed. What changed: - -- the tails-file-server can be published to WITHOUT knowing the ID of the RevRegEntry, since that is not known when the tails file is generated/published. See: https://github.com/bcgov/indy-tails-server/pull/53 -- basically, by publishing based on the hash. -- The tails-file is not needed by the issuer after generation. It used to be needed for (I think) issuing and revoking credentials. Those are now done without the tails file. See: https://github.com/hyperledger/aries-cloudagent-python/pull/2302/files. That code is already in Main, so you should have it. +The Tails File changes are minimal -- nothing about the file itself changed. What changed: +- the tails-file-server can be published to WITHOUT knowing the ID of the RevRegEntry, since that is not known when the tails file is generated/published. See: [https://github.com/bcgov/indy-tails-server/pull/53](https://github.com/bcgov/indy-tails-server/pull/53) -- basically, by publishing based on the hash. +- The tails-file is not needed by the issuer after generation. It used to be needed for issuing and revoking credentials. Those are now done without the tails file. See: [https://github.com/hyperledger/aries-cloudagent-python/pull/2302/files](https://github.com/hyperledger/aries-cloudagent-python/pull/2302/files). That code is already in Main, so you should have it. ## Outstanding work -* revocation notifications (not sure if they're included in `anoncreds-rs` updates, haven't tested them ...) -* revocation support - complete the revocation implementation (support for unhappy path scenarios) -* testing - various scenarios like mediation, multitenancy etc. +- revocation notifications (not sure if they're included in `anoncreds-rs` updates, haven't tested them ...) +- revocation support - complete the revocation implementation (support for unhappy path scenarios) +- testing - various scenarios like mediation, multitenancy etc. -- unit tests (in the new anoncreds package) (see https://github.com/hyperledger/aries-cloudagent-python/pull/2596/commits/229ffbba209aff0ea7def5bad6556d93057f3c2a) +- unit tests (in the new anoncreds package) (see [https://github.com/hyperledger/aries-cloudagent-python/pull/2596/commits/229ffbba209aff0ea7def5bad6556d93057f3c2a](https://github.com/hyperledger/aries-cloudagent-python/pull/2596/commits/229ffbba209aff0ea7def5bad6556d93057f3c2a)) - unit tests (review and possibly update unit tests for the credential and presentation integration) - endorsement (not implemented with new anoncreds code) - wallet upgrade (askar to askar-anoncreds) - update V1.0 versions of the Credential and Presentation endpoints to use anoncreds -- any other anoncreds issues - https://github.com/hyperledger/aries-cloudagent-python/issues?q=is%3Aopen+is%3Aissue+label%3AAnonCreds - +- any other anoncreds issues - [https://github.com/hyperledger/aries-cloudagent-python/issues?q=is%3Aopen+is%3Aissue+label%3AAnonCreds](https://github.com/hyperledger/aries-cloudagent-python/issues?q=is%3Aopen+is%3Aissue+label%3AAnonCreds) ## Retiring old Indy and Askar (credx) Code The main changes for the Credential and Presentation support are in the following two files: -``` +```bash aries_cloudagent/protocols/issue_credential/v2_0/messages/cred_format.py aries_cloudagent/protocols/present_proof/v2_0/messages/pres_format.py ``` @@ -105,7 +102,7 @@ The `INDY` handler just need to be re-pointed to the new anoncreds handler, and The new code is already in place (in comments). For example for the Credential handler: -``` +```python To make the switch from indy to anoncreds replace the above with the following INDY = FormatSpec( "hlindy/", diff --git a/docs/deploying/ContainerImagesAndGithubActions.md b/docs/deploying/ContainerImagesAndGithubActions.md index 359c22a9..bdc28e19 100644 --- a/docs/deploying/ContainerImagesAndGithubActions.md +++ b/docs/deploying/ContainerImagesAndGithubActions.md @@ -13,7 +13,6 @@ are now built and published directly from the Aries Cloud Agent - Python project repository and made available through the [Github Packages Container Registry](https://ghcr.io). - ## Image This project builds and publishes the `ghcr.io/hyperledger/aries-cloudagent-python` image. @@ -26,13 +25,13 @@ end, there are multiple variants of ACA-Py built to suit the needs of a variety of environments and workflows. There are currently two main variants: - "Standard" - The default configuration of ACA-Py, including: - - Aries Askar for secure storage - - Indy VDR for Indy ledger communication - - Indy Shared Libraries for AnonCreds + - Aries Askar for secure storage + - Indy VDR for Indy ledger communication + - Indy Shared Libraries for AnonCreds - "Indy" - The legacy configuration of ACA-Py, including: - - Indy SDK Wallet for secure storage - - Indy SDK Ledger for Indy ledger communication - - Indy SDK for AnonCreds + - Indy SDK Wallet for secure storage + - Indy SDK Ledger for Indy ledger communication + - Indy SDK for AnonCreds These two image variants are largely distinguished by providers for Indy Network and AnonCreds support. The Standard variant is recommended for new projects. @@ -58,30 +57,30 @@ There are several key differences that should be noted between the two image variants and between the BC Gov ACA-Py images. - Standard Image - - Based on slim variant of Debian - - Does **NOT** include `libindy` - - Default user is `aries` - - Uses container's system python environment rather than `pyenv` - - Askar and Indy Shared libraries are installed as dependencies of ACA-Py through pip from pre-compiled binaries included in the python wrappers - - Built from repo contents + - Based on slim variant of Debian + - Does **NOT** include `libindy` + - Default user is `aries` + - Uses container's system python environment rather than `pyenv` + - Askar and Indy Shared libraries are installed as dependencies of ACA-Py through pip from pre-compiled binaries included in the python wrappers + - Built from repo contents - Indy Image - - Based on slim variant of Debian - - Built from multi-stage build step (`indy-base` in the Dockerfile) which includes Indy dependencies; this could be replaced with an explicit `indy-python` image from the Indy SDK repo - - Includes `libindy` but does **NOT** include the Indy CLI - - Default user is `indy` - - Uses container's system python environment rather than `pyenv` - - Askar and Indy Shared libraries are installed as dependencies of ACA-Py through pip from pre-compiled binaries included in the python wrappers - - Built from repo contents - - Includes Indy postgres storage plugin + - Based on slim variant of Debian + - Built from multi-stage build step (`indy-base` in the Dockerfile) which includes Indy dependencies; this could be replaced with an explicit `indy-python` image from the Indy SDK repo + - Includes `libindy` but does **NOT** include the Indy CLI + - Default user is `indy` + - Uses container's system python environment rather than `pyenv` + - Askar and Indy Shared libraries are installed as dependencies of ACA-Py through pip from pre-compiled binaries included in the python wrappers + - Built from repo contents + - Includes Indy postgres storage plugin - `bcgovimages/aries-cloudagent` - - (Usually) based on Ubuntu - - Based on `von-image` - - Default user is `indy` - - Includes `libindy` and Indy CLI - - Uses `pyenv` - - Askar and Indy Shared libraries built from source - - Built from ACA-Py python package uploaded to PyPI - - Includes Indy postgres storage plugin + - (Usually) based on Ubuntu + - Based on `von-image` + - Default user is `indy` + - Includes `libindy` and Indy CLI + - Uses `pyenv` + - Askar and Indy Shared libraries built from source + - Built from ACA-Py python package uploaded to PyPI + - Includes Indy postgres storage plugin ## Github Actions diff --git a/docs/deploying/IndySDKtoAskarMigration.md b/docs/deploying/IndySDKtoAskarMigration.md index 964734c2..a95d8729 100644 --- a/docs/deploying/IndySDKtoAskarMigration.md +++ b/docs/deploying/IndySDKtoAskarMigration.md @@ -159,4 +159,4 @@ please use the Aries Cloud Agent Python channel on [Hyperledger Discord], or submit a [GitHub issue to the ACA-Py repository]. [Hyperledger Discord]: https://discord.gg/hyperledger -[GitHub issue to the ACA-Py repository]: https://github.com/hyperledger/aries-cloudagent-python/issues \ No newline at end of file +[GitHub issue to the ACA-Py repository]: https://github.com/hyperledger/aries-cloudagent-python/issues diff --git a/docs/deploying/Poetry.md b/docs/deploying/Poetry.md index 9bfde340..fc7dc44a 100644 --- a/docs/deploying/Poetry.md +++ b/docs/deploying/Poetry.md @@ -25,24 +25,28 @@ poetry shell ``` Alternatively you can source the environment settings in the current shell -``` + +```bash source $(poetry env info --path)/bin/activate ``` for powershell users this would be -``` + +```powershell (& ((poetry env info --path) + "\Scripts\activate.ps1") ``` ### Deactivating the Virtual Environment When using `poetry shell` + ```bash exit ``` When using the `activate` script -``` + +```bash deactivate ``` @@ -120,6 +124,7 @@ poetry install -E extras-name ``` for example + ```bash poetry install -E "askar bbs indy" ``` diff --git a/docs/deploying/RedisPlugins.md b/docs/deploying/RedisPlugins.md index da4ee090..e3428a12 100644 --- a/docs/deploying/RedisPlugins.md +++ b/docs/deploying/RedisPlugins.md @@ -1,13 +1,15 @@ # ACA-Py Redis Plugins -# [aries-acapy-plugin-redis-events](https://github.com/bcgov/aries-acapy-plugin-redis-events/blob/master/README.md) [`redis_queue`] + +## [aries-acapy-plugin-redis-events](https://github.com/hyperledger/aries-acapy-plugins/blob/main/redis_events/README.md) `redis_queue` -It provides a mechansim to persists both inbound and outbound messages using redis, deliver messages and webhooks, and dispatch events. +It provides a mechanism to persists both inbound and outbound messages using redis, deliver messages and webhooks, and dispatch events. -More details can be found [here](https://github.com/bcgov/aries-acapy-plugin-redis-events/blob/master/README.md). +More details can be found [here](https://github.com/hyperledger/aries-acapy-plugins/blob/main/redis_events/README.md). -### Plugin configuration [`yaml`] -``` +### Redis Queue configuration `yaml` + +```yaml redis_queue: connection: connection_url: "redis://default:test1234@172.28.0.103:6379" @@ -50,6 +52,7 @@ redis_queue: acapy::keylist::updated: keylist deliver_webhook: true ``` + - `redis_queue.connection.connection_url`: This is required and is expected in `redis://{username}:{password}@{host}:{port}` format. - `redis_queue.inbound.acapy_inbound_topic`: This is the topic prefix for the inbound message queues. Recipient key of the message are also included in the complete topic name. The final topic will be in the following format `acapy_inbound_{recip_key}` - `redis_queue.inbound.acapy_direct_resp_topic`: Queue topic name for direct responses to inbound message. @@ -59,24 +62,30 @@ redis_queue: - `event.event_webhook_topic_maps`: Event to webhook topic map - `event.deliver_webhook`: When set to true, this will deliver webhooks to endpoints specified in `admin.webhook_urls`. By default, set to true. -### Usage +### Redis Plugin Usage + +#### Redis Plugin With Docker -#### With Docker Running the plugin with docker is simple. An -example [docker-compose.yml](https://github.com/bcgov/aries-acapy-plugin-redis-events/blob/master/docker/docker-compose.yml) file is available which launches both ACA-Py with redis and an accompanying Redis cluster. +example [docker-compose.yml](https://github.com/hyperledger/aries-acapy-plugins/blob/main/redis_events/docker/docker-compose.yml) file is available which launches both ACA-Py with redis and an accompanying Redis cluster. ```sh -$ docker-compose up --build -d +docker-compose up --build -d ``` -More details can be found [here](https://github.com/bcgov/aries-acapy-plugin-redis-events/blob/master/docker/README.md). -#### Without Docker +More details can be found [here](https://github.com/hyperledger/aries-acapy-plugins/blob/main/redis_events/README.md). + +#### Without Docker + Installation -``` + +```bash pip install git+https://github.com/bcgov/aries-acapy-plugin-redis-events.git ``` + Startup ACA-Py with `redis_queue` plugin loaded -``` + +```bash docker network create --subnet=172.28.0.0/24 `network_name` export REDIS_PASSWORD=" ... As specified in redis_cluster.conf ... " export NETWORK_NAME="`network_name`" @@ -88,8 +97,10 @@ aca-py start \ ``` Regardless of the options above, you will need to startup `deliverer` and `relay`/`mediator` service as a bridge to receive inbound messages. Consider the following to build your `docker-compose` file which should also start up your redis cluster: + - Relay + Deliverer - ``` + + ```yaml relay: image: redis-relay build: @@ -139,8 +150,10 @@ Regardless of the options above, you will need to startup `deliverer` and `relay networks: - acapy_default ``` + - Mediator + Deliverer - ``` + + ```yaml mediator: image: acapy-redis-queue build: @@ -191,7 +204,7 @@ Regardless of the options above, you will need to startup `deliverer` and `relay Both relay and mediator [demos](https://github.com/bcgov/aries-acapy-plugin-redis-events/tree/master/demo) are also available. -# [aries-acapy-cache-redis](https://github.com/Indicio-tech/aries-acapy-cache-redis/blob/main/README.md) [`redis_cache`] +## [aries-acapy-cache-redis](https://github.com/Indicio-tech/aries-acapy-cache-redis/blob/main/README.md) `redis_cache` ACA-Py uses a modular cache layer to story key-value pairs of data. The purpose @@ -200,8 +213,9 @@ caching needs. More details can be found [here](https://github.com/Indicio-tech/aries-acapy-cache-redis/blob/main/README.md). -### Plugin configuration [`yaml`] -``` +### Redis Cache Plugin configuration `yaml` + +```yaml redis_cache: connection: "redis://default:test1234@172.28.0.103:6379" max_connection: 50 @@ -211,48 +225,57 @@ redis_cache: ssl: cacerts: ./ca.crt ``` + - `redis_cache.connection`: This is required and is expected in `redis://{username}:{password}@{host}:{port}` format. - `redis_cache.max_connection`: Maximum number of redis pool connections. Default: 50 - `redis_cache.credentials.username`: Redis instance username - `redis_cache.credentials.password`: Redis instance password - `redis_cache.ssl.cacerts` -### Usage +### Redis Cache Usage + +#### Redis Cache Using Docker -#### With Docker - Running the plugin with docker is simple and straight-forward. There is an example [docker-compose.yml](https://github.com/Indicio-tech/aries-acapy-cache-redis/blob/main/docker-compose.yml) file in the root of the project that launches both ACA-Py and an accompanying Redis instance. Running it is as simple as: ```sh - $ docker-compose up --build -d + docker-compose up --build -d ``` -- To launch ACA-Py with an accompanying redis cluster of 6 nodes [3 primaries and 3 replicas], please refer to example [docker-compose.cluster.yml](https://github.com/Indicio-tech/aries-acapy-cache-redis/blob/main/docker-compose.cluster.yml) and run the following: +- To launch ACA-Py with an accompanying redis cluster of 6 nodes (3 primaries and 3 replicas), please refer to example [docker-compose.cluster.yml](https://github.com/Indicio-tech/aries-acapy-cache-redis/blob/main/docker-compose.cluster.yml) and run the following: Note: Cluster requires external docker network with specified subnet ```sh - $ docker network create --subnet=172.28.0.0/24 `network_name` - $ export REDIS_PASSWORD=" ... As specified in redis_cluster.conf ... " - $ export NETWORK_NAME="`network_name`" - $ docker-compose -f docker-compose.cluster.yml up --build -d + docker network create --subnet=172.28.0.0/24 `network_name` + export REDIS_PASSWORD=" ... As specified in redis_cluster.conf ... " + export NETWORK_NAME="`network_name`" + docker-compose -f docker-compose.cluster.yml up --build -d ``` -#### Without Docker + +#### Redis Cache Without Docker + Installation -``` + +```bash pip install git+https://github.com/Indicio-tech/aries-acapy-cache-redis.git ``` + Startup ACA-Py with `redis_cache` plugin loaded -``` + +```bash aca-py start \ --plugin acapy_cache_redis.v0_1 \ --plugin-config plugins-config.yaml \ # ... the remainder of your startup arguments ``` + or -``` + +```bash aca-py start \ --plugin acapy_cache_redis.v0_1 \ --plugin-config-value "redis_cache.connection=redis://redis-host:6379/0" \ @@ -261,6 +284,7 @@ aca-py start \ --plugin-config-value "redis_cache.credentials.password=password" \ # ... the remainder of your startup arguments ``` -## RedisCluster -If you startup a redis cluster and an ACA-Py agent loaded with either `redis_queue` or `redis_cache` plugin or both, then during the initialization of the plugin, it will bind an instance of `redis.asyncio.RedisCluster` [onto the `root_profile`]. Other plugin will have access to this redis client for it's functioning. This is done for efficiency and to avoid duplication of resources. +## Redis Cluster + +If you startup a redis cluster and an ACA-Py agent loaded with either `redis_queue` or `redis_cache` plugin or both, then during the initialization of the plugin, it will bind an instance of `redis.asyncio.RedisCluster` (onto the `root_profile`). Other plugin will have access to this redis client for it's functioning. This is done for efficiency and to avoid duplication of resources. diff --git a/docs/deploying/UpgradingACA-Py.md b/docs/deploying/UpgradingACA-Py.md index c1ef8225..1b41c5e5 100644 --- a/docs/deploying/UpgradingACA-Py.md +++ b/docs/deploying/UpgradingACA-Py.md @@ -26,7 +26,7 @@ Once an upgrade is identified as needed, the process is: - Collect (if any) the actions to be taken to get from the version recorded in secure storage to the current [version.py] - Execute the actions from oldest to newest. - - If the same action is collected more than once (e.g., "Resave the + - If the same action is collected more than once (e.g., "Resave the Connection Records" is defined for two different versions), perform the action only once. - Store the current ACA-Py version (from [version.py]) in the secure storage @@ -60,35 +60,40 @@ connections), you may want to do a test upgrade offline first, to see if there is likely to be a service disruption during the upgrade. Plan accordingly! ## Tagged upgrades -Upgrades are defined in the [Upgrade Definition YML file], in addition to specifying upgrade actions by version they can also be specified by named tags. Unlike version based upgrades where all applicable version based actions will be performed based upon sorted order of versions, with named tags only actions corresponding to provided tags will be performed. Note: `--force-upgrade` is required when running name tags based upgrade [i.e. provding `--named-tag`] -Tags are specfied in YML file as below: -``` +Upgrades are defined in the [Upgrade Definition YML file], in addition to specifying upgrade actions by version they can also be specified by named tags. Unlike version based upgrades where all applicable version based actions will be performed based upon sorted order of versions, with named tags only actions corresponding to provided tags will be performed. Note: `--force-upgrade` is required when running name tags based upgrade (i.e. providing `--named-tag`). + +Tags are specified in YML file as below: + +```yaml fix_issue_rev_reg: fix_issue_rev_reg_records: true ``` -Example -``` +Example: + +```bash ./scripts/run_docker upgrade --force-upgrade --named-tag fix_issue_rev_reg -In case, running multiple tags [say test1 & test2]: +# In case, running multiple tags [say test1 & test2]: ./scripts/run_docker upgrade --force-upgrade --named-tag test1 --named-tag test2 ``` ## Subwallet upgrades + With multitenant enabled, there is a subwallet associated with each tenant profile, so there is a need to upgrade those sub wallets in addition to the base wallet associated with root profile. There are 2 options to perform such upgrades: - - `--upgrade-all-subwallets` - - This will apply the upgrade steps to all sub wallets [tenant profiles] and the base wallet [root profiles]. - - - `--upgrade-subwallet` - This will apply the upgrade steps to specified sub wallets [identified by wallet id] and the base wallet. +- `--upgrade-all-subwallets` + +This will apply the upgrade steps to all sub wallets (tenant profiles) and the base wallet (root profiles). + +- `--upgrade-subwallet` + +This will apply the upgrade steps to specified sub wallets (identified by wallet id) and the base wallet. - Note: multiple specification allowed +Note: multiple specifications allowed ## Exceptions @@ -126,4 +131,4 @@ options in future (post-0.8.1) ACA-Py versions. [CHANGELOG.md]: https://github.com/hyperledger/aries-cloudagent-python/blob/main/CHANGELOG.md [version.py]: https://github.com/hyperledger/aries-cloudagent-python/blob/main/aries_cloudagent/version.py -[Upgrade Definition YML file]: https://github.com/hyperledger/aries-cloudagent-python/blob/main/aries_cloudagent/commands/default_version_upgrade_config.yml \ No newline at end of file +[Upgrade Definition YML file]: https://github.com/hyperledger/aries-cloudagent-python/blob/main/aries_cloudagent/commands/default_version_upgrade_config.yml diff --git a/docs/deploying/deploymentModel.md b/docs/deploying/deploymentModel.md index da9e0285..8d49bf80 100644 --- a/docs/deploying/deploymentModel.md +++ b/docs/deploying/deploymentModel.md @@ -7,7 +7,7 @@ This document is a "concept of operations" for an instance of an Aries cloud agent deployed from the primary artifact (a PyPi package) produced by this repo. In such a deployment there are **always** two components - a configured agent itself, and a controller that injects into that agent the business rules for the particular agent instance (see diagram). -![ACA-Py Deployment Overview](../../assets/deploymentModel-full.png "ACA-Py Deployment Overview") +![ACA-Py Deployment Overview](../assets/deploymentModel-full.png "ACA-Py Deployment Overview") The deployed agent messages with other agents via DIDComm protocols, and as events associated with those messages occur, sends webhook HTTP notifications to the controller. The agent also exposes for the controller's exclusive use an HTTP API covering all of the administrative handlers for those events. The controller receives the notifications from the agent, decides (with business rules - possible by asking a person using a UI) how to respond to the event and calls back to the agent via the HTTP API. Of course, the controller may also initiate events (e.g. messaging another agent) by calling that same API. @@ -23,7 +23,7 @@ The sections below detail the internals of the ACA-Py and it's configurable elem **Aries cloud agent** implement services to manage the execution of DIDComm messaging protocols for interacting with other DIDComm agents, and exposes an administrative HTTP API that supports a controller to direct how the agent should respond to messaging events. The agent relies on the controller to provide the business rules for handling the messaging events, and to initiate the execution of new DIDComm protocol instances. The internals of an ACA-Py instance is diagramed below. -![ACA-Py Agent Internals](../../assets/deploymentModel-agent.png "ACA-Py Agent Internals") +![ACA-Py Agent Internals](../assets/deploymentModel-agent.png "ACA-Py Agent Internals") Instances of the Aries cloud agents are configured with the following sub-components: @@ -41,7 +41,7 @@ Instances of the Aries cloud agents are configured with the following sub-compon A controller provides the personality of Aries cloud agent instance - the business logic (human, machine or rules driven) that drive the behaviour of the agent. The controller’s “Business Logic” in a cloud agent could be built into the controller app, could be an integration back to an enterprise system, or even a user interface for an individual. In all cases, the business logic provide responses to agent events or initiates agent actions. A deployed controller talks to a single Aries cloud agent deployment and manages the configuration of that agent. Both can be configured and deployed to support horizontal scaling. -![Controller Internals](../../assets/deploymentModel-controller.png "Controller Internals") +![Controller Internals](../assets/deploymentModel-controller.png "Controller Internals") Generically, a controller is a web app invoked by HTTP webhook calls from its corresponding Aries cloud agent and invoking the DIDComm administration capabilities of the Aries cloud agent by calling the REST API exposed by that cloud agent. As well as responding to Aries cloud agent events, the controller initiates DIDComm protocol instances using the same REST API. @@ -49,11 +49,11 @@ The controller and Aries cloud agent deployment **MUST** secure the HTTP interfa A controller implements the following capabilities. -* **Initiator** - provides a mechanism to initiate new DIDComm protocol instances. The initiator invokes the REST API exposed by the Aries cloud agent to initiate the creation of a DIDComm protocol instance. For example, a permit-issuing service uses this mechanism to issue a Verifiable Credential associated with the issuance of a new permit. -* **Responder** - subscribes to and responds to events from the Aries cloud agent protocol message handlers, providing business-driven responses. The responder might respond immediately, or the event might cause a delay while the decision is determined, perhaps by sending the request to a person to decide. The controller may persist the event response state if the event is asynchronous - for example, when the event is passed to a person who may respond when they next use the web app. -* **Configuration** - manages the controller configuration data and the configuration of the Aries cloud agent. Configuration in this context includes things like: - * Credentials and Proof Requests to be Issued/Verified (respectively) by the Aries cloud agent. - * The configuration of the webhook handler to which the responder subscribes. +- **Initiator** - provides a mechanism to initiate new DIDComm protocol instances. The initiator invokes the REST API exposed by the Aries cloud agent to initiate the creation of a DIDComm protocol instance. For example, a permit-issuing service uses this mechanism to issue a Verifiable Credential associated with the issuance of a new permit. +- **Responder** - subscribes to and responds to events from the Aries cloud agent protocol message handlers, providing business-driven responses. The responder might respond immediately, or the event might cause a delay while the decision is determined, perhaps by sending the request to a person to decide. The controller may persist the event response state if the event is asynchronous - for example, when the event is passed to a person who may respond when they next use the web app. +- **Configuration** - manages the controller configuration data and the configuration of the Aries cloud agent. Configuration in this context includes things like: + - Credentials and Proof Requests to be Issued/Verified (respectively) by the Aries cloud agent. + - The configuration of the webhook handler to which the responder subscribes. While there are several examples of controllers, there is no “cookie cutter” repository to fork and customize. A controller is just a web service that receives HTTP requests (webhooks) and sends HTTP messages to the Aries cloud agent it controls via the REST API exposed by that agent. @@ -63,5 +63,5 @@ The Aries cloud agent CI pipeline configured into the repository generates a PyP Current examples of deployed instances of Aries cloud agent and controllers include: -* [indy-email-verification](https://github.com/bcgov/indy-email-verification) - a web app Controller that sends an email to a given email address with an embedded DIDComm invitation and on establishment of a connection, offers and provides the connected agent with an email control verifiable credential. -* [iiwbook](https://github.com/bcgov/iiwbook) - a web app Controller that on creation of a DIDComm connection, requests a proof of email control, and then sends to the connection a verifiable credential proving attendance at IIW. In between the proof and issuance is a human approval step using a simple web-based UI that implements a request queue. \ No newline at end of file +- [indy-email-verification](https://github.com/bcgov/indy-email-verification) - a web app Controller that sends an email to a given email address with an embedded DIDComm invitation and on establishment of a connection, offers and provides the connected agent with an email control verifiable credential. +- [iiwbook](https://github.com/bcgov/iiwbook) - a web app Controller that on creation of a DIDComm connection, requests a proof of email control, and then sends to the connection a verifiable credential proving attendance at IIW. In between the proof and issuance is a human approval step using a simple web-based UI that implements a request queue. diff --git a/docs/design/AnoncredsW3CCompatibility.md b/docs/design/AnoncredsW3CCompatibility.md new file mode 100644 index 00000000..31a48c66 --- /dev/null +++ b/docs/design/AnoncredsW3CCompatibility.md @@ -0,0 +1,664 @@ +# Supporting AnonCreds in W3C VC/VP Formats in Aries Cloud Agent Python + +This design proposes to extend the Aries Cloud Agent Python (ACA-PY) to support Hyperledger AnonCreds credentials and presentations in the W3C Verifiable Credentials (VC) and Verifiable Presentations (VP) Format. The aim is to transition from the legacy AnonCreds format specified in Aries-Legacy-Method to the W3C VC format. + +## Overview + +The pre-requisites for the work are: + +- The availability of the AnonCreds RS library supporting the generation and processing of AnonCreds VCs in W3C VC format. +- The availability of the AnonCreds RS library supporting the generation and verification of AnonCreds VPs in W3C VP format. +- The availability of support in the AnonCreds RS Python Wrapper for the W3C VC/VP capabilities in AnonCreds RS. +- Agreement on the Aries Issue Credential v2.0 and Present Proof v2.0 protocol attachment formats to use when issuing AnonCreds W3C VC format credentials, and when presenting AnonCreds W3C VP format presentations. + - For issuing, use the (proposed) [RFC 0809 VC-DI] Attachments + - For presenting, use the [RFC 0510 DIF Presentation Exchange] Attachments + +[RFC 0809 VC-DI]: https://github.com/hyperledger/aries-rfcs/pull/809 +[RFC 0510 DIF Presentation Exchange]: https://github.com/hyperledger/aries-rfcs/blob/main/features/0510-dif-pres-exch-attach/README.md + +As of 2024-01-15, these pre-requisites have been met. + +## Impacts on ACA-Py + +### Issuer + +Issuer support needs to be added for using the [RFC 0809 VC-DI] attachment format when sending Issue Credential v2.0 protocol`offer` and `issue` messages and when receiving `request` messages. + +Related notes: + +- The Issue Credential v1.0 protocol will not be updated to support AnonCreds W3C VC format credentials. +- Once an instance of the Issue Credential v2.0 protocol is started using [RFC 0809 VC-DI] format attachments, subsequent messages in the protocol **MUST** use [RFC 0809 VC-DI] attachments. +- The ACA-Py maintainers are discussing the possibility of making pluggable the Issue Credential v2.0 and Present Proof v2.0 attachment formats, to simplify supporting additional formats, including [RFC 0809 VC-DI]. + +A mechanism must be defined such that an Issuer controller can use the ACA-Py Admin API to initiate the sending of an AnonCreds credential Offer using the [RFC 0809 VC-DI] attachment format. + +A credential's encoded attributes are not included in the issued AnonCreds W3C VC format credential. To be determined how that impacts the issuing process. + +### Verifier + +A verifier wanting a W3C VP Format presentation will send the Present Proof v2.0 `request` message with an [RFC 0510 DIF Presentation Exchange] format attachment. + +If needed, the [RFC 0510 DIF Presentation Exchange] document will be clarified and possibly updated to enable its use for handling AnonCreds W3C VP format presentations. + +An AnonCreds W3C VP format presentation does not include the encoded revealed attributes, and the encoded values must be calculated as needed. To be determined where those would be needed. + +### Holder + +A holder must support [RFC 0809 VC-DI] attachments when receiving Issue Credential v2.0 `offer` and `issue` messages, and when sending `request` messages. + +On receiving an Issue Credential v2.0 `offer` message with a [RFC 0809 VC-DI], the holder **MUST** respond using the [RFC 0809 VC-DI] on the subsequent `request` message. + +On receiving a credential from an issuer in an [RFC 0809 VC-DI] attachment, the holder must process and store the credential for subsequent use in presentations. + +- The AnonCreds verifiable credential **MUST** support being used in both legacy AnonCreds and W3C VP format (DIF Presentation Exchange) presentations. + +On receiving an [RFC 0510 DIF Presentation Exchange] `request` message, a holder must include AnonCreds verifiable credentials in the search for credentials satisfying the request, and if found and selected for use, must construct the presentation using the [RFC 0510 DIF Presentation Exchange] presentation format, with an embedded AnonCreds W3C VP format presentation. + +## Issues to consider + +- If and how the W3C VC Format attachments for the Issue Credential V2.0 and Present Proof V2 Aries DIDComm Protocols should be used when using AnonCreds W3C VC Format credentials. Anticipated triggers: + - An Issuer Controller invokes the Admin API to trigger an Issue Credential v2.0 protocol instance such that the [RFC 0809 VC-DI] will be used. + - A Holder receives an Issue Credential v2.0 `offer` message with an [RFC 0809 VC-DI] attachment. + - A Verifier initiates a Present Proof v2.0 protocol instance with an [RFC 0510 DIF Presentation Exchange] that can be satisfied by AnonCreds VCs held by the holder. + - A Holder receives a present proof `request` message with an [RFC 0510 DIF Presentation Exchange] format attachment that can be satisfied with AnonCreds credentials held by the holder. + - How are the `restrictions` and `revocation` data elements conveyed? +- How AnonCreds W3C VC Format verifiable credentials are stored by the holder such that they will be discoverable when needed for creating verifiable presentations. +- How and when multiple signatures can/should be added to a W3C VC Format credential, enabling both AnonCreds and non-AnonCreds signatures on a single credential and their use in presentations. Completing a multi-signature controller is out of scope, however we want to consider and ensure the design is fundamentally compatible with multi-sig credentials. + +## Flow Chart + +![image](https://github.com/Whats-Cookin/aries-cloudagent-python/blob/design/w3c-compatibility/docs/design/anoncreds-w3c-verification-flow.png?raw=true) + +## Key Questions + +### What is the roadmap for delivery? What will we build first, then second? + +It appears that the issue and presentation sides can be approached independently, assuming that any stored AnonCreds VC can be used in an AnonCreds W3C VP format presentation. + +#### Issue Credential + +1. Update Admin API endpoints to initiate an Issue Credential v2.0 protocol to issue an AnonCreds credential in W3C VC format using [RFC 0809 VC-DI] format attachments. +2. Add support for the [RFC 0809 VC-DI] message attachment formats. + 1. Should the attachment format be made pluggable as part of this? From the maintainers: _If we did make it pluggable, [this](https://github.com/hyperledger/aries-cloudagent-python/blob/main/aries_cloudagent/protocols/issue_credential/v2_0/messages/cred_format.py#L23) would be the point where that would take place. Since these values are hard coded, it is not pluggable currently, as noted. I've been dissatisfied with how this particular piece works for a while. I think making it pluggable, if done right, could help clean it up nicely. A plugin would then define their own implementation of [V20CredFormatHandler](https://github.com/hyperledger/aries-cloudagent-python/blob/main/aries_cloudagent/protocols/issue_credential/v2_0/formats/handler.py#L28). (@dbluhm)_ +3. Update the v2.0 Issue Credential protocol handler to support a "[RFC 0809 VC-DI] mode" such that when a protocol instance starts with that format, it continues with it until completion, supporting issuing AnonCreds credentials in the process. This includes both the sending and receiving of all protocol message types. + +#### Present Proof + +1. Adjust as needed the sending of a Present Proof request using the [RFC 0510 DIF Presentation Exchange] with support (to be defined) for requesting AnonCreds VCs. +2. Adjust as needed the processing of a Present Proof `request` message with an [RFC 0510 DIF Presentation Exchange] attachment so that AnonCreds VCs can found and used in the subsequent response. + 1. AnonCreds VCs issued as legacy or W3C VC format credentials should be usable in AnonCreds W3C VP format presentations. +3. Update the creation of an [RFC 0510 DIF Presentation Exchange] presentation submission to support the use of AnonCreds VCs as the source of the VPs. +4. Update the verifier receipt of a Present Proof v2.0 `presentation` message with an [RFC 0510 DIF Presentation Exchange] containing AnonCreds W3C VP(s) derived from AnonCreds source VCs. + +### What are the functions we are going to wrap? + +After thoroughly reviewing upcoming changes from [anoncreds-rs PR273](https://github.com/hyperledger/anoncreds-rs/pull/273), the classes or `AnoncredsObject` impacted by changes are as follows: + +[W3CCredential](https://github.com/hyperledger/anoncreds-rs/pull/273/files#diff-6f8cbd34bbd373240b6af81f159177023c05b074b63c7757fc6b3796a66ee240R424) + +- class methods (`create`, `load`) +- instance methods (`process`, `to_legacy`, `add_non_anoncreds_integrity_proof`, `set_id`, `set_subject_id`, `add_context`, `add_type`) +- class properties (`schema_id`, `cred_def_id`, `rev_reg_id`, `rev_reg_index`) +- bindings functions (`create_w3c_credential`, `process_w3c_credential`, `_object_from_json`, `_object_get_attribute`, `w3c_credential_add_non_anoncreds_integrity_proof`, `w3c_credential_set_id`, `w3c_credential_set_subject_id`, `w3c_credential_add_context`, `w3c_credential_add_type`) + +[W3CPresentation](https://github.com/hyperledger/anoncreds-rs/pull/273/files#diff-6f8cbd34bbd373240b6af81f159177023c05b074b63c7757fc6b3796a66ee240R791) + +- class methods (`create`, `load`) +- instance methods (`verify`) +- bindings functions (`create_w3c_presentation`, `_object_from_json`, `verify_w3c_presentation`) + +They will be added to [\_\_init\_\_.py](https://github.com/hyperledger/anoncreds-rs/blob/main/wrappers/python/anoncreds/__init__.py) as additional exports of AnoncredsObject. + +We also have to consider which classes or anoncreds objects have been modified + +The classes modified according to the same [PR](https://github.com/hyperledger/anoncreds-rs/pull/273) mentioned above are: + +[Credential](https://github.com/hyperledger/anoncreds-rs/pull/273/files#diff-6f8cbd34bbd373240b6af81f159177023c05b074b63c7757fc6b3796a66ee240R402) + +- added class methods (`from_w3c`) +- added instance methods (`to_w3c`) +- added bindings functions (`credential_from_w3c`, `credential_to_w3c`) + +[PresentCredential](https://github.com/hyperledger/anoncreds-rs/pull/273/files#diff-6f8cbd34bbd373240b6af81f159177023c05b074b63c7757fc6b3796a66ee240R603) + +- modified instance methods (`_get_entry`, `add_attributes`, `add_predicates`) + +#### Creating a W3C VC credential from credential definition, and issuing and presenting it as is + +The issuance, presentation and verification of legacy anoncreds are implemented in this [./aries_cloudagent/anoncreds](https://github.com/hyperledger/aries-cloudagent-python/tree/main/aries_cloudagent/anoncreds) directory. Therefore, we will also start from there. + +Let us navigate these implementation examples through the respective processes of the concerning agents - **Issuer** and **Holder** as described in [https://github.com/hyperledger/anoncreds-rs/blob/main/README.md](https://github.com/hyperledger/anoncreds-rs/blob/main/README.md). +We will proceed through the following processes in comparison with the legacy anoncreds implementations while watching out for signature differences between the two. +Looking at the [/anoncreds/issuer.py](https://github.com/hyperledger/aries-cloudagent-python/blob/main/aries_cloudagent/anoncreds/issuer.py) file, from `AnonCredsIssuer` class: + +Create VC_DI Credential Offer + +According to this DI credential offer attachment format - [didcomm/w3c-di-vc-offer@v0.1](https://github.com/hyperledger/aries-rfcs/pull/809/files#diff-40b1f86053dd6f0b34250d5be1319d3a0662b96a5a121957fe4dc8cceaa9cbc8R30-R63), + +- binding_required +- binding_method +- credential_definition + +could be the parameters for `create_offer` method. + +Create VC_DI Credential + +**NOTE: There has been some changes to _encoding of attribute values_ for creating a credential, so we have to be adjust to the new changes.** + +```python +async def create_credential( + self, + credential_offer: dict, + credential_request: dict, + credential_values: dict, + ) -> str: +... +... + try: + credential = await asyncio.get_event_loop().run_in_executor( + None, + lambda: W3CCredential.create( + cred_def.raw_value, + cred_def_private.raw_value, + credential_offer, + credential_request, + raw_values, + None, + None, + None, + None, + ), + ) +... +``` + +Create VC_DI Credential Request + +```python +async def create_vc_di_credential_request( + self, credential_offer: dict, credential_definition: CredDef, holder_did: str + ) -> Tuple[str, str]: +... +... +try: + secret = await self.get_master_secret() + ( + cred_req, + cred_req_metadata, + ) = await asyncio.get_event_loop().run_in_executor( + None, + W3CCredentialRequest.create, + None, + holder_did, + credential_definition.to_native(), + secret, + AnonCredsHolder.MASTER_SECRET_ID, + credential_offer, + ) +... +``` + +Create VC_DI Credential Presentation + +```python +async def create_vc_di_presentation( + self, + presentation_request: dict, + requested_credentials: dict, + schemas: Dict[str, AnonCredsSchema], + credential_definitions: Dict[str, CredDef], + rev_states: dict = None, + ) -> str: +... +... + try: + secret = await self.get_master_secret() + presentation = await asyncio.get_event_loop().run_in_executor( + None, + Presentation.create, + presentation_request, + present_creds, + self_attest, + secret, + { + schema_id: schema.to_native() + for schema_id, schema in schemas.items() + }, + { + cred_def_id: cred_def.to_native() + for cred_def_id, cred_def in credential_definitions.items() + }, + ) +... +``` + +#### Converting an already issued legacy anoncreds to VC_DI format(vice versa) + +In this case, we can use `to_w3c` method of `Credential` class to convert from legacy to w3c and `to_legacy` method of `W3CCredential` class to convert from w3c to legacy. + +We could call `to_w3c` method like this: + +```python +vc_di_cred = Credential.to_w3c(cred_def) +``` + +and for `to_legacy`: + +```python +legacy_cred = W3CCredential.to_legacy() +``` + +We don't need to input any parameters to it as it in turn calls `Credential.from_w3c()` method under the hood. + +### Format Handler for Issue_credential V2_0 Protocol + +Keeping in mind that we are trying to create anoncreds(not another type of VC) in w3c format, what if we add a protocol-level **vc_di** format support by adding a new format `VC_DI` in `./protocols/issue_credential/v2_0/messages/cred_format.py` - + +```python +# /protocols/issue_credential/v2_0/messages/cred_format.py + +class Format(Enum): + “””Attachment Format””” + INDY = FormatSpec(...) + LD_PROOF = FormatSpec(...) + VC_DI = FormatSpec( + “vc_di/”, + CredExRecordVCDI, + DeferLoad( + “aries_cloudagent.protocols.issue_credential.v2_0” + “.formats.vc_di.handler.AnonCredsW3CFormatHandler” + ), + ) +``` + +And create a new CredExRecordVCDI in reference to V20CredExRecordLDProof + +```python +# /protocols/issue_credential/v2_0/models/detail/w3c.py + +class CredExRecordW3C(BaseRecord): + """Credential exchange W3C detail record.""" + + class Meta: + """CredExRecordW3C metadata.""" + + schema_class = "CredExRecordW3CSchema" + + RECORD_ID_NAME = "cred_ex_w3c_id" + RECORD_TYPE = "w3c_cred_ex_v20" + TAG_NAMES = {"~cred_ex_id"} if UNENCRYPTED_TAGS else {"cred_ex_id"} + RECORD_TOPIC = "issue_credential_v2_0_w3c" + +``` + +Based on the proposed credential attachment format with the new Data Integrity proof in [aries-rfcs 809](https://github.com/hyperledger/aries-rfcs/pull/809/files#diff-40b1f86053dd6f0b34250d5be1319d3a0662b96a5a121957fe4dc8cceaa9cbc8R132-R151) - + +```json +{ + "@id": "284d3996-ba85-45d9-964b-9fd5805517b6", + "@type": "https://didcomm.org/issue-credential/2.0/issue-credential", + "comment": "", + "formats": [ + { + "attach_id": "5b38af88-d36f-4f77-bb7a-2f04ab806eb8", + "format": "didcomm/w3c-di-vc@v0.1" + } + ], + "credentials~attach": [ + { + "@id": "5b38af88-d36f-4f77-bb7a-2f04ab806eb8", + "mime-type": "application/ld+json", + "data": { + "base64": "ewogICAgICAgICAgIkBjb250ZXogWwogICAgICAg...(clipped)...RNVmR0SXFXZhWXgySkJBIgAgfQogICAgICAgIH0=" + } + } + ] +} +``` + +Assuming `VCDIDetail` and `VCDIOptions` are already in place, `VCDIDetailSchema` can be created like so: + +```python +# /protocols/issue_credential/v2_0/formats/vc_di/models/cred_detail.py + +class VCDIDetailSchema(BaseModelSchema): + """VC_DI verifiable credential detail schema.""" + + class Meta: + """Accept parameter overload.""" + + unknown = INCLUDE + model_class = VCDIDetail + + credential = fields.Nested( + CredentialSchema(), + required=True, + metadata={ + "description": "Detail of the VC_DI Credential to be issued", + "example": { + "@id": "284d3996-ba85-45d9-964b-9fd5805517b6", + "@type": "https://didcomm.org/issue-credential/2.0/issue-credential", + "comment": "", + "formats": [ + { + "attach_id": "5b38af88-d36f-4f77-bb7a-2f04ab806eb8", + "format": "didcomm/w3c-di-vc@v0.1" + } + ], + "credentials~attach": [ + { + "@id": "5b38af88-d36f-4f77-bb7a-2f04ab806eb8", + "mime-type": "application/ld+json", + "data": { + "base64": "ewogICAgICAgICAgIkBjb250ZXogWwogICAgICAg...(clipped)...RNVmR0SXFXZhWXgySkJBIgAgfQogICAgICAgIH0=" + } + } + ] + } + }, + ) +``` + +Then create w3c format handler with mapping like so: + +```python +# /protocols/issue_credential/v2_0/formats/w3c/handler.py + +mapping = { + CRED_20_PROPOSAL: VCDIDetailSchema, + CRED_20_OFFER: VCDIDetailSchema, + CRED_20_REQUEST: VCDIDetailSchema, + CRED_20_ISSUE: VerifiableCredentialSchema, + } +``` + +Doing so would allow us to be more independent in defining the schema suited for anoncreds in w3c format and once the proposal protocol can handle the w3c format, probably the rest of the flow can be easily implemented by adding `vc_di` flag to the corresponding routes. + +### Admin API Attachments + +To make sure that once an endpoint has been called to trigger the `Issue Credential` flow in `0809 W3C_DI attachment formats` the subsequent endpoints also follow this format, we can keep track of this [ATTACHMENT_FORMAT](https://github.com/hyperledger/aries-cloudagent-python/blob/main/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py#L41-L59) dictionary with the proposed `VC_DI` format. + +```python +# Format specifications +ATTACHMENT_FORMAT = { + CRED_20_PROPOSAL: { + V20CredFormat.Format.INDY.api: "hlindy/cred-filter@v2.0", + V20CredFormat.Format.LD_PROOF.api: "aries/ld-proof-vc-detail@v1.0", + V20CredFormat.Format.VC_DI.api: "aries/vc-di-detail@v2.0", + }, + CRED_20_OFFER: { + V20CredFormat.Format.INDY.api: "hlindy/cred-abstract@v2.0", + V20CredFormat.Format.LD_PROOF.api: "aries/ld-proof-vc-detail@v1.0", + V20CredFormat.Format.VC_DI.api: "aries/vc-di-detail@v2.0", + }, + CRED_20_REQUEST: { + V20CredFormat.Format.INDY.api: "hlindy/cred-req@v2.0", + V20CredFormat.Format.LD_PROOF.api: "aries/ld-proof-vc-detail@v1.0", + V20CredFormat.Format.VC_DI.api: "aries/vc-di-detail@v2.0", + }, + CRED_20_ISSUE: { + V20CredFormat.Format.INDY.api: "hlindy/cred@v2.0", + V20CredFormat.Format.LD_PROOF.api: "aries/ld-proof-vc@v1.0", + V20CredFormat.Format.VC_DI.api: "aries/vc-di@v2.0", + }, +} +``` + +And this [\_formats_filter](https://github.com/hyperledger/aries-cloudagent-python/blob/main/aries_cloudagent/protocols/issue_credential/v2_0/routes.py#L442-L461) function takes care of keeping the attachment formats uniform across the iteration of the flow. We can see this function gets called in: + +- [\_create_free_offer](https://github.com/hyperledger/aries-cloudagent-python/blob/main/aries_cloudagent/protocols/issue_credential/v2_0/routes.py#L877) function that gets called in the handler function of `/issue-credential-2.0/send-offer` route (in addition to other offer routes) +- [credential_exchange_send_free_request](https://github.com/hyperledger/aries-cloudagent-python/blob/main/aries_cloudagent/protocols/issue_credential/v2_0/routes.py#L1229) handler function of `/issue-credential-2.0/send-request` route +- [credential_exchange_create](https://github.com/hyperledger/aries-cloudagent-python/blob/main/aries_cloudagent/protocols/issue_credential/v2_0/routes.py#L630) handler function of `/issue-credential-2.0/create` route +- [credential_exchange_send](https://github.com/hyperledger/aries-cloudagent-python/blob/main/aries_cloudagent/protocols/issue_credential/v2_0/routes.py#L721) handler function of `/issue-credential-2.0/send` route + +The same goes for [ATTACHMENT_FORMAT](https://github.com/hyperledger/aries-cloudagent-python/blob/main/aries_cloudagent/protocols/present_proof/v2_0/message_types.py#L33-L47) of `Present Proof` flow. In this case, DIF Presentation Exchange formats in these [test vectors](https://github.com/TimoGlastra/anoncreds-w3c-test-vectors/tree/main/test-vectors) that are influenced by [RFC 0510 DIF Presentation Exchange](https://github.com/hyperledger/aries-rfcs/blob/main/features/0510-dif-pres-exch-attach/README.md) will be implemented. Here, the [\_formats_attach](https://github.com/hyperledger/aries-cloudagent-python/blob/main/aries_cloudagent/protocols/present_proof/v2_0/routes.py#L403-L422) function is the key for the same purpose above. It gets called in: + +- [present_proof_send_proposal](https://github.com/hyperledger/aries-cloudagent-python/blob/main/aries_cloudagent/protocols/present_proof/v2_0/routes.py#L833) handler function of `/present-proof-2.0/send-proposal` route +- [present_proof_create_request](https://github.com/hyperledger/aries-cloudagent-python/blob/main/aries_cloudagent/protocols/present_proof/v2_0/routes.py#L916) handler function of `/present-proof-2.0/create-request` route +- [present_proof_send_free_request](https://github.com/hyperledger/aries-cloudagent-python/blob/main/aries_cloudagent/protocols/present_proof/v2_0/routes.py#L998) handler function of `/present-proof-2.0/send-request` route + +#### Credential Exchange Admin Routes + +- /issue-credential-2.0/create-offer + +This route indirectly calls `_formats_filters` function to create credential proposal, which is in turn used to create a credential offer in the filter format. The request body for this route might look like this: + +```python +{ + "filter": ["vc_di"], + "comment: , + "auto-issue": true, + "auto-remove": true, + "replacement_id": , + "credential_preview": { + "@type": "issue-credential/2.0/credential-preview", + "attributes": { + ... + ... + } + } +} +``` + +- /issue-credential-2.0/create + +This route indirectly calls `_format_result_with_details` function to generate a cred_ex_record in the specified format, which is then returned. The request body for this route might look like this: + +```python +{ + "filter": ["vc_di"], + "comment: , + "auto-remove": true, + "credential_preview": { + "@type": "issue-credential/2.0/credential-preview", + "attributes": { + ... + ... + } + } +} +``` + +- /issue-credential-2.0/send + +The request body for this route might look like this: + +```python +{ + "connection_id": , + "filter": ["vc_di"], + "comment: , + "auto-remove": true, + "replacement_id": , + "credential_preview": { + "@type": "issue-credential/2.0/credential-preview", + "attributes": { + ... + ... + } + } +} +``` + +- /issue-credential-2.0/send-offer + +The request body for this route might look like this: + +```python +{ + "connection_id": , + "filter": ["vc_di"], + "comment: , + "auto-issue": true, + "auto-remove": true, + "replacement_id": , + "holder_did": , + "credential_preview": { + "@type": "issue-credential/2.0/credential-preview", + "attributes": { + ... + ... + } + } +} +``` + +- /issue-credential-2.0/send-request + +The request body for this route might look like this: + +```python +{ + "connection_id": , + "filter": ["vc_di"], + "comment: , + "auto-remove": true, + "replacement_id": , + "holder_did": , + "credential_preview": { + "@type": "issue-credential/2.0/credential-preview", + "attributes": { + ... + ... + } + } +} +``` + +#### Presentation Admin Routes + +- /present-proof-2.0/send-proposal + +The request body for this route might look like this: + +```python +{ + ... + ... + "connection_id": , + "presentation_proposal": ["vc_di"], + "comment: , + "auto-present": true, + "auto-remove": true, + "trace": false +} +``` + +- /present-proof-2.0/create-request + +The request body for this route might look like this: + +```python +{ + ... + ... + "connection_id": , + "presentation_proposal": ["vc_di"], + "comment: , + "auto-verify": true, + "auto-remove": true, + "trace": false +} +``` + +- /present-proof-2.0/send-request + +The request body for this route might look like this: + +```python +{ + ... + ... + "connection_id": , + "presentation_proposal": ["vc_di"], + "comment: , + "auto-verify": true, + "auto-remove": true, + "trace": false +} + +``` + +- /present-proof-2.0/records/{pres_ex_id}/send-presentation + +The request body for this route might look like this: + +```python +{ + "presentation_definition": , + "auto_remove": true, + "dif": { + issuer_id: "", + record_ids: { + "": ["", ""], + "": [""], + } + }, + "reveal_doc": { + // vc_di dict + } + +} + +``` + +### How a W3C credential is stored in the wallet + +Storing a credential in the wallet is somewhat dependent on the kinds of metadata that are relevant. The metadata mapping between the W3C credential and an AnonCreds credential is not fully clear yet. + +One of the questions we need to answer is whether the preferred approach is to modify the existing store credential function so that any credential type is a valid input, or whether there should be a special function just for storing W3C credentials. + +We will duplicate this [store_credential](https://github.com/hyperledger/aries-cloudagent-python/blob/8cfe8283ddb2a85e090ea1b8a916df2d78298ec0/aries_cloudagent/anoncreds/holder.py#L167) function and modify it: + +```python +async def store_w3c_credential(...) { + ... + ... + try: + cred = W3CCredential.load(credential_data) + ... + ... +} +``` + +**Question: Would it also be possible to generate the credentials on the fly to eliminate the need for storage?** + +**Answer: I don't think it is possible to eliminate the need for storage, and notably the secure storage (encrypted at rest) supported in Askar.** + +### How can we handle multiple signatures on a W3C VC Format credential? + +Only one of the signature types (CL) is allowed in the AnonCreds format, so if a W3C VC is created by `to_legacy()`, all signature types that can't be turned into a CL signature will be dropped. This would make the conversion lossy. Similarly, an AnonCreds credential carries only the CL signature, limiting output from `to_w3c()` signature types that can be derived from the source CL signature. A possible future enhancement would be to add an extra field to the AnonCreds data structure, in which additional signatures could be stored, even if they are not used. This could eliminate the lossiness, but it adds extra complexity and may not be worth doing. + +- Unlike a "typical" non-AnonCreds W3C VC, an AnonCreds VC is _never_ directly presented to a verifier. Rather, a derivation of the credential is generated, and it is the derivation that is shared with the verifier as a presentation. The derivation: + - Generates presentation-specific signatures to be verified. + - Selectively reveals attributes. + - Generates proofs of the requested predicates. + - Generates a proof of knowledge of the link secret blinded in the verifiable credential. + +### Compatibility with AFJ: how can we make sure that we are compatible? + +We will write a test for the Aries Agent Test Framework that issues a W3C VC instead of an AnonCreds credential, and then run that test where one of the agents is ACA-PY and the other is based on AFJ -- and vice versa. Also write a test where a W3C VC is presented after an AnonCreds issuance, and run it with the two roles played by the two different agents. This is a simple approach, but if the tests pass, this should eliminate almost all risk of incompatibility. + +### Will we introduce new dependencies, and what is risky or easy? + +Any significant bugs in the Rust implementation may prevent our wrappers from working, which would also prevent progress (or at least confirmed test results) on the higher-level code. + +If AFJ lags behind in delivering equivalent functionality, we may not be able to demonstrate compatibility with the test harness. + +### Where should the new issuance code go? + +So the [vc](https://github.com/hyperledger/aries-cloudagent-python/tree/main/aries_cloudagent/vc) directory contains code to verify vc's, is this a logical place to add the code for issuance? + +### What do we call the new things? Flexcreds? or just W3C_xxx + +Are we defining a concept called Flexcreds that is a credential with a proof array that you can generate more specific or limited credentials from? If so should this be included in the naming? + +- I don't think naming comes into play. We are creating and deriving presentations from VC Data Integrity Proofs using an AnonCreds cryptosuite. As such, these are "stock" W3C verifiable credentials. + +### How can a wallet retain the capability to present ONLY an anoncred credential? + +If the wallet receives a "Flexcred" credential object with an array of proofs, the wallet may wish to present ONLY the more zero-knowledge anoncreds proof + +How will wallets support that in a way that is developer-friendly to wallet devs? + +- The trigger for wallets to generate a W3C VP Format presentation is that they have receive a [RFC 0510 DIF Presentation Exchange] that can be satisfied with an AnonCreds verifiable credential in their storage. Once we decide to use one or more AnonCreds VCs to satisfy a presentation, we'll derive such a presentation and send it using the [RFC 0510 DIF Presentation Exchange] for the `presentation` message of the Present Proof v2.0 protocol. diff --git a/docs/design/anoncreds-w3c-verification-flow.png b/docs/design/anoncreds-w3c-verification-flow.png new file mode 100644 index 00000000..ee098f15 Binary files /dev/null and b/docs/design/anoncreds-w3c-verification-flow.png differ diff --git a/docs/features/AdminAPI.md b/docs/features/AdminAPI.md index d19f132b..464054a0 100644 --- a/docs/features/AdminAPI.md +++ b/docs/features/AdminAPI.md @@ -6,7 +6,7 @@ ACA-Py provides an OpenAPI-documented REST interface for administering the agent To see the specifics of the supported endpoints, as well as the expected request and response formats, it is recommended to run the `aca-py` agent with the `--admin {HOST} {PORT}` and `--admin-insecure-mode` command line parameters. This exposes the OpenAPI UI on the provided port for interaction via a web browser. For production deployments, run the agent with `--admin-api-key {KEY}` and add the `X-API-Key: {KEY}` header to all requests instead of using the `--admin-insecure-mode` parameter. -![Admin API Screenshot](../../assets/adminApi.png) +![Admin API Screenshot](../assets/adminApi.png) To invoke a specific method: diff --git a/docs/features/DevReadMe.md b/docs/features/DevReadMe.md index 6205c656..77feaa63 100644 --- a/docs/features/DevReadMe.md +++ b/docs/features/DevReadMe.md @@ -1,6 +1,6 @@ # Developer's Read Me for Hyperledger Aries Cloud Agent - Python -See the [README](../../release/acapy-README) for details about this repository and information about how the Aries Cloud Agent - Python fits into the Aries project and relates to Indy. +See the [README](../release/acapy-README.md) for details about this repository and information about how the Aries Cloud Agent - Python fits into the Aries project and relates to Indy. ## Table of Contents @@ -11,12 +11,13 @@ See the [README](../../release/acapy-README) for details about this repository a - [Docker](#docker) - [Locally Installed](#locally-installed) - [About ACA-Py Command Line Parameters](#about-aca-py-command-line-parameters) - - [Provisioning a Wallet](#provisioning-a-wallet) + - [Provisioning Secure Storage](#provisioning-secure-storage) - [Mediation](#mediation) - [Multi-tenancy](#multi-tenancy) - [JSON-LD Credentials](#json-ld-credentials) - [Developing](#developing) - [Prerequisites](#prerequisites) + - [Running In A Dev Container](#running-in-a-dev-container) - [Running Locally](#running-locally) - [Logging](#logging) - [Running Tests](#running-tests) @@ -32,12 +33,12 @@ Aries Cloud Agent Python (ACA-Py) is a configurable, extensible, non-mobile Arie The information on this page assumes you are developer with a background in decentralized identity, Aries, DID Methods, and verifiable credentials, especially AnonCreds. If you aren't familiar with those concepts and projects, -please use our [Getting Started Guide](../../gettingStarted/) +please use our [Getting Started Guide](../gettingStarted/README.md) to learn more. ## Developer Demos -To put ACA-Py through its paces at the command line, checkout our [demos](../../AriesDeveloperDemos) page. +To put ACA-Py through its paces at the command line, checkout our [demos](../demo/README.md) page. ## Running @@ -120,19 +121,19 @@ aca-py provision --wallet-type askar --seed $SEED For additional `provision` options, execute `aca-py provision --help`. -Additional information about secure storage options and configuration settings can be found [here](../../deploying/Databases). +Additional information about secure storage options and configuration settings can be found [here](../deploying/Databases.md). ### Mediation -ACA-Py can also run in mediator mode - ACA-Py can be run *as* a mediator (it can mediate connections for other agents), or it can connect to an external mediator to mediate its own connections. See the [docs on mediation](Mediation.md) for more info. +ACA-Py can also run in mediator mode - ACA-Py can be run _as_ a mediator (it can mediate connections for other agents), or it can connect to an external mediator to mediate its own connections. See the [docs on mediation](./Mediation.md) for more info. ### Multi-tenancy -ACA-Py can also be started in multi-tenant mode. This allows the agent to serve multiple tenants, that each have their own wallet. See the [docs on multi-tenancy](Multitenancy.md) for more info. +ACA-Py can also be started in multi-tenant mode. This allows the agent to serve multiple tenants, that each have their own wallet. See the [docs on multi-tenancy](./Multitenancy.md) for more info. ### JSON-LD Credentials -ACA-Py can issue W3C Verifiable Credentials using Linked Data Proofs. See the [docs on JSON-LD Credentials](JsonLdCredentials.md) for more info. +ACA-Py can issue W3C Verifiable Credentials using Linked Data Proofs. See the [docs on JSON-LD Credentials](./JsonLdCredentials.md) for more info. ## Developing @@ -142,7 +143,7 @@ ACA-Py can issue W3C Verifiable Credentials using Linked Data Proofs. See the [d ### Running In A Dev Container -The dev container environment is a great way to deploy agents quickly with code changes and an interactive debug session. Detailed information can be found in the [Docs On Devcontainers](devcontainer.md). It is specific for vscode, so if you prefer another code editor or IDE you will need to figure it out on your own, but it is highly recommended to give this a try. +The dev container environment is a great way to deploy agents quickly with code changes and an interactive debug session. Detailed information can be found in the [Docs On Devcontainers](./devcontainer.md). It is specific for vscode, so if you prefer another code editor or IDE you will need to figure it out on your own, but it is highly recommended to give this a try. One thing to be aware of is, unlike the demo, none of the steps are automated. You will need to create public dids, connections and all the other steps yourself. Using the demo and studying the flow and then copying them with your dev container debug session is a great way to learn how everything works. @@ -172,7 +173,7 @@ Refer to [the previous section](#running) for instructions on how to run ACA-Py. ### Logging -You can find more details about logging and log levels [here](../../testing/Logging/). +You can find more details about logging and log levels [here](../testing/Logging.md). ### Running Tests @@ -229,7 +230,7 @@ There are some good examples of various test scenarios for you to work from incl The test suite also displays the current code coverage after each run so you can see how much of your work is covered by tests. Use your best judgement for how much coverage is sufficient. -Please also refer to the [contributing guidelines](../../contributing/CONTRIBUTING/) and [code of conduct](../../contributing/CODE_OF_CONDUCT/). +Please also refer to the [contributing guidelines](../contributing/CONTRIBUTING.md) and [code of conduct](../contributing/CODE_OF_CONDUCT.md). ## Publishing Releases diff --git a/docs/features/Endorser.md b/docs/features/Endorser.md index 342292d6..a645309a 100644 --- a/docs/features/Endorser.md +++ b/docs/features/Endorser.md @@ -32,7 +32,7 @@ Web hooks will be triggered to notify each ACA-Py agent of any transaction reque The following start-up parameters are supported by ACA-Py: -``` +```bash Endorsement: --endorser-protocol-role Specify the role ('author' or 'endorser') which this agent will participate. Authors will request transaction endorsement from an Endorser. Endorsers will endorse transactions from @@ -71,13 +71,13 @@ The Endorser makes use of the [Event Bus](https://github.com/hyperledger/aries-c The overall architecture can be illustrated as: -![Class Diagram](../features/endorser-design.png) +![Class Diagram](../assets/endorser-design.png) ### Create Credential Definition and Revocation Registry An example of an Endorser flow is as follows, showing how a credential definition endorsement is received and processed, and optionally kicks off the revocation registry process: -![Sequence Diagram](../features/endorse-cred-def.png) +![Sequence Diagram](../assets/endorse-cred-def.png) You can see that there is a standard endorser flow happening each time there is a ledger write (illustrated in the "Endorser" process). @@ -95,7 +95,7 @@ Using the EventBus decouples the event sequence. Any functions triggered by an ... and an example of creating a DID and promoting it to public (and creating an ATTRIB for the endpoint: -![Sequence Diagram](../features/endorse-public-did.png) +![Sequence Diagram](../assets/endorse-public-did.png) You can see the same endorsement processes in this sequence. diff --git a/docs/features/JsonLdCredentials.md b/docs/features/JsonLdCredentials.md index 314dabdc..2f083129 100644 --- a/docs/features/JsonLdCredentials.md +++ b/docs/features/JsonLdCredentials.md @@ -16,6 +16,7 @@ By design Hyperledger Aries is credential format agnostic. This means you can us - [Issuing Credentials](#issuing-credentials) - [Retrieving Issued Credentials](#retrieving-issued-credentials) - [Present Proof](#present-proof) +- [VC-API](#vc-api) ## General Concept @@ -42,7 +43,7 @@ Contrary to Indy credentials, JSON-LD credentials do not need a schema or creden It is required that every property key in the document can be mapped to an IRI. This means the property key must either be an IRI by default, or have the shorthand property mapped in the `@context` of the document. If you have properties that are not mapped to IRIs, the Issue Credential API will throw the following error: -> "\ attributes dropped. Provide definitions in context to correct. [\]" +> ` attributes dropped. Provide definitions in context to correct. []` For credentials the `https://www.w3.org/2018/credentials/v1` context MUST always be the first context. In addition, when issuing BBS+ credentials the `https://w3id.org/security/bbs/v1` URL MUST be present in the context. For convenience this URL will be automatically added to the `@context` of the credential if not present. @@ -63,7 +64,7 @@ Writing JSON-LD contexts can be a daunting task and is out of scope of this guid - [Citizenship Vocabulary](https://w3c-ccg.github.io/citizenship-vocab/) - [Traceability Vocabulary](https://w3c-ccg.github.io/traceability-vocab/) -Verifiable credentials are not around that long, so there aren't that many vocabularies ready to use. If you can't use one of the existing vocabularies it is still beneficial to lean on already defined lower level contexts. http://schema.org has a large registry of definitions that can be used to build new contexts. The example vocabularies linked above all make use of types from http://schema.org +Verifiable credentials are not around that long, so there aren't that many vocabularies ready to use. If you can't use one of the existing vocabularies it is still beneficial to lean on already defined lower level contexts. [http://schema.org](http://schema.org) has a large registry of definitions that can be used to build new contexts. The example vocabularies linked above all make use of types from [http://schema.org](http://schema.org). For the remainder of this guide, we will be using the example `UniversityDegreeCredential` type and `https://www.w3.org/2018/credentials/examples/v1` context from the Verifiable Credential Data Model. You should not use this for production use cases. @@ -209,4 +210,20 @@ Call the `/credentials/w3c` endpoint to retrieve all JSON-LD credentials in your ## Present Proof -> ⚠️ TODO: https://github.com/hyperledger/aries-cloudagent-python/pull/1125 +> ⚠️ TODO: [https://github.com/hyperledger/aries-cloudagent-python/pull/1125](https://github.com/hyperledger/aries-cloudagent-python/pull/1125) + +## VC-API + +In order to support these functions outside of the respective DIDComm protocols, a set of endpoints conforming to the [vc-api](https://w3c-ccg.github.io/vc-api) specification are available. These endpoints should be used by a controller when building an identity platform. + +These endpoints include: + +- `GET /vc/credentials` -> returns a list of all stored json-ld credentials +- `GET /vc/credentials/{id}` -> returns a json-ld credential based on it's ID +- `POST /vc/credentials/issue` -> signs a credential +- `POST /vc/credentials/verify` -> verifies a credential +- `POST /vc/credentials/store` -> stores an issued credential +- `POST /vc/presentations/prove` -> proves a presentation +- `POST /vc/presentations/verify` -> verifies a presentation + +To learn more about using these endpoints, please refer to the available [postman collection](../demo/AriesPostmanDemo.md#experimenting-with-the-vc-api-endpoints). diff --git a/docs/features/Mediation.md b/docs/features/Mediation.md index ab9c7e33..8814f7bd 100644 --- a/docs/features/Mediation.md +++ b/docs/features/Mediation.md @@ -1,34 +1,35 @@ # Mediation docs ## Concepts -* **DIDComm Message Forwarding** - Sending an encrypted message to its recipient by first sending it to a third party responsible for forwarding the message on. Message contents are encrypted once for the recipient then wrapped in a [forward message](https://github.com/hyperledger/aries-rfcs/blob/master/concepts/0094-cross-domain-messaging/README.md#corerouting10forward) encrypted to the third party. -* **Mediator** - An agent that forwards messages to a client over a DIDComm connection. -* **Mediated Agent** or **Mediation client** - The agent(s) to which a mediator is willing to forward messages. -* **Mediation Request** - A message from a client to a mediator requesting mediation or forwarding. -* **Keylist** - The list of public keys used by the mediator to lookup to which connection a forward message should be sent. Each mediated agent is responsible for maintaining the keylist with the mediator. -* **Keylist Update** - A message from a client to a mediator informing the mediator of changes to the keylist. -* **Default Mediator** - A mediator to be used with every newly created DIDComm connection. -* **Mediation Connection** - Connection between the mediator and the mediated agent or client. Agents can use as many mediators as the identity owner sees fit. Requests for mediation are handled on a per connection basis. -* See [Aries RFC 0211: Coordinate Mediation Protocol](https://github.com/hyperledger/aries-rfcs/blob/master/features/0211-route-coordination/README.md) for additional details on message attributes and more. + +- **DIDComm Message Forwarding** - Sending an encrypted message to its recipient by first sending it to a third party responsible for forwarding the message on. Message contents are encrypted once for the recipient then wrapped in a [forward message](https://github.com/hyperledger/aries-rfcs/blob/master/concepts/0094-cross-domain-messaging/README.md#corerouting10forward) encrypted to the third party. +- **Mediator** - An agent that forwards messages to a client over a DIDComm connection. +- **Mediated Agent** or **Mediation client** - The agent(s) to which a mediator is willing to forward messages. +- **Mediation Request** - A message from a client to a mediator requesting mediation or forwarding. +- **Keylist** - The list of public keys used by the mediator to lookup to which connection a forward message should be sent. Each mediated agent is responsible for maintaining the keylist with the mediator. +- **Keylist Update** - A message from a client to a mediator informing the mediator of changes to the keylist. +- **Default Mediator** - A mediator to be used with every newly created DIDComm connection. +- **Mediation Connection** - Connection between the mediator and the mediated agent or client. Agents can use as many mediators as the identity owner sees fit. Requests for mediation are handled on a per connection basis. +- See [Aries RFC 0211: Coordinate Mediation Protocol](https://github.com/hyperledger/aries-rfcs/blob/master/features/0211-route-coordination/README.md) for additional details on message attributes and more. ## Command Line Arguments -* `--open-mediation` - Instructs mediators to automatically grant all incoming mediation requests. -* `--mediator-invitation` - Receive invitation, send mediation request and set as default mediator. -* `--mediator-connections-invite` - Connect to mediator through a connection invitation. If not specified, connect using an OOB invitation. -* `--default-mediator-id` - Set pre-existing mediator as default mediator. -* `--clear-default-mediator` - Clear the stored default mediator. +- `--open-mediation` - Instructs mediators to automatically grant all incoming mediation requests. +- `--mediator-invitation` - Receive invitation, send mediation request and set as default mediator. +- `--mediator-connections-invite` - Connect to mediator through a connection invitation. If not specified, connect using an OOB invitation. +- `--default-mediator-id` - Set pre-existing mediator as default mediator. +- `--clear-default-mediator` - Clear the stored default mediator. The minimum set of arguments *required* to enable mediation are: -```bash= +```bash aca-py start ... \ --open-mediation ``` To automate the mediation process on startup, *additionally* specify the following argument on the *mediated* agent (not the mediator): -```bash= +```bash aca-py start ... \ --mediator-invitation "" ``` @@ -38,37 +39,37 @@ If a default mediator has already been established, then the `--default-mediator ## DIDComm Messages See [Aries RFC 0211: Coordinate Mediation Protocol](https://github.com/hyperledger/aries-rfcs/blob/master/features/0211-route-coordination/README.md). - + ## Admin API -* `GET mediation/requests` - * Return a list of all mediation records. Filter by `conn_id`, `state`, `mediator_terms` and `recipient_terms`. -* `GET mediation/requests/{mediation_id}` - * Retrieve a mediation record by id. -* `DELETE mediation/requests/{mediation_id}` - * Delete mediation record by id. -* `POST mediation/requests/{mediation_id}/grant` - * As a mediator, grant a stored mediation request and send `granted` message to client. -* `POST mediation/requests/{mediation_id}/deny` - * As a mediator, deny a stored mediation request and send `denied` message to client. -* `POST mediation/request/{conn_id}` - * Send a mediation request to connection identified by the given connection ID. -* `GET mediation/keylists` - * Returns key list associated with a connection. Filter on `client` for keys mediated by other agents and `server` for keys mediated by this agent. -* `POST mediation/keylists/{mediation_id}/send-keylist-update` - * Send keylist update message to mediator identified by the given mediation ID. Updates contained in body of request. -* `POST mediation/keylists/{mediation_id}/send-keylist-query` - * Send keylist query message to mediator identified by the given mediation ID. -* `GET mediation/default-mediator` **(PR pending)** - * Retrieve the currently set default mediator. -* `PUT mediation/{mediation_id}/default-mediator` **(PR pending)** - * Set the mediator identified by the given mediation ID as the default mediator. -* `DELETE mediation/default-mediator` **(PR pending)** - * Clear the currently set default mediator (mediation status is maintained and remains functional, just not used as the default). +- `GET mediation/requests` + - Return a list of all mediation records. Filter by `conn_id`, `state`, `mediator_terms` and `recipient_terms`. +- `GET mediation/requests/{mediation_id}` + - Retrieve a mediation record by id. +- `DELETE mediation/requests/{mediation_id}` + - Delete mediation record by id. +- `POST mediation/requests/{mediation_id}/grant` + - As a mediator, grant a stored mediation request and send `granted` message to client. +- `POST mediation/requests/{mediation_id}/deny` + - As a mediator, deny a stored mediation request and send `denied` message to client. +- `POST mediation/request/{conn_id}` + - Send a mediation request to connection identified by the given connection ID. +- `GET mediation/keylists` + - Returns key list associated with a connection. Filter on `client` for keys mediated by other agents and `server` for keys mediated by this agent. +- `POST mediation/keylists/{mediation_id}/send-keylist-update` + - Send keylist update message to mediator identified by the given mediation ID. Updates contained in body of request. +- `POST mediation/keylists/{mediation_id}/send-keylist-query` + - Send keylist query message to mediator identified by the given mediation ID. +- `GET mediation/default-mediator` **(PR pending)** + - Retrieve the currently set default mediator. +- `PUT mediation/{mediation_id}/default-mediator` **(PR pending)** + - Set the mediator identified by the given mediation ID as the default mediator. +- `DELETE mediation/default-mediator` **(PR pending)** + - Clear the currently set default mediator (mediation status is maintained and remains functional, just not used as the default). ## Mediator Message Flow Overview -![Mediator Message Flow](/docs/assets/mediation-message-flow.png) +![Mediator Message Flow](../assets/mediation-message-flow.png) ## Using a Mediator diff --git a/docs/features/Multicredentials.md b/docs/features/Multicredentials.md index 1c9e1f72..9822438b 100644 --- a/docs/features/Multicredentials.md +++ b/docs/features/Multicredentials.md @@ -6,4 +6,4 @@ With the Present Proof Protocol v2, verifiers can ask for a combination of crede Moreover, it is possible to make similar presentation proof requests using the or logical operator. For instance, a verifier can ask for either an eID in AnonCreds format or an eID in W3C Verifiable Credential format. This has the potential to solve the interoperability problem of different credential formats and ecosystems from a user point of view by shifting the requirement of holding/accepting different credential formats from identity holders to verifiers. Here again, using Aries Cloud Agent Python as the underlying verifier agent can tackle such complex presentation proof requests since the agent is capable of verifying both type of credential formats and proof types. -In the future, it would be even possible to put mDoc as an attachment with an and or or logical operation, along with AnonCreds and/or W3C Verifiable Credentials. For this to happen, Aca-Py either needs the capabilities to validate mDocs internally or to connect third-party endpoints to validate and get a response. \ No newline at end of file +In the future, it would be even possible to put mDoc as an attachment with an and or or logical operation, along with AnonCreds and/or W3C Verifiable Credentials. For this to happen, Aca-Py either needs the capabilities to validate mDocs internally or to connect third-party endpoints to validate and get a response. diff --git a/docs/features/Multiledger.md b/docs/features/Multiledger.md index c83b368b..db70a3e4 100644 --- a/docs/features/Multiledger.md +++ b/docs/features/Multiledger.md @@ -7,7 +7,7 @@ More background information including problem statement, design (algorithm) and ## Table of Contents - [Usage](#usage) - - [Example config file:](#example-config-file) + - [Example config file](#example-config-file) - [Config properties](#config-properties) - [Multi-ledger Admin API](#multi-ledger-admin-api) - [Ledger Selection](#ledger-selection) @@ -25,8 +25,9 @@ Multi-ledger is disabled by default. You can enable support for multiple ledgers If `--genesis-transactions-list` is specified, then `--genesis-url, --genesis-file, --genesis-transactions` should not be specified. -### Example config file: -``` +### Example config file + +```yaml - id: localVON is_production: false genesis_url: 'http://host.docker.internal:9000/genesis' @@ -36,7 +37,7 @@ If `--genesis-transactions-list` is specified, then `--genesis-url, --genesis-fi genesis_url: 'http://test.bcovrin.vonx.io/genesis' ``` -``` +```yaml - id: localVON is_production: false genesis_url: 'http://host.docker.internal:9000/genesis' @@ -49,12 +50,15 @@ If `--genesis-transactions-list` is specified, then `--genesis-url, --genesis-fi - id: greenlightDev is_production: true is_write: true - genesis_url: 'http://dev.greenlight.bcovrin.vonx.io/genesis' + genesis_url: 'http://test.bcovrin.vonx.io/genesis' ``` -Note: `is_write` property means that the ledger is write configurable. With reference to the above config example, both `bcovrinTest` and `greenlightDev` ledgers are write configurable. By default, on startup `bcovrinTest` will be the write ledger as it is the topmost write configurable production ledger, [more details](#write-requests) regarding the selection rule. Using `PUT /ledger/{ledger_id}/set-write-ledger` endpoint, either `greenlightDev` and `bcovrinTest` can be set as the write ledger. +Note: `is_write` property means that the ledger is write configurable. With reference to the above config example, both `bcovrinTest` and (the no longer available -- in the above its pointing to BCovrin Test as well) `greenlightDev` ledgers are write configurable. By default, on startup `bcovrinTest` will be the write ledger as it is the topmost write configurable production ledger, [more details](#write-requests) regarding the selection rule. Using `PUT /ledger/{ledger_id}/set-write-ledger` endpoint, either `greenlightDev` and `bcovrinTest` can be set as the write ledger. -``` +> Note 2: The `greenlightDev` ledger is no longer available, so both ledger entries in the example above and below +intentionally point to the same ledger URL. + +```yaml - id: localVON is_production: false is_write: true @@ -64,12 +68,13 @@ Note: `is_write` property means that the ledger is write configurable. With refe genesis_url: 'http://test.bcovrin.vonx.io/genesis' - id: greenlightDev is_production: true - genesis_url: 'http://dev.greenlight.bcovrin.vonx.io/genesis' + genesis_url: 'http://test.bcovrin.vonx.io/genesis' ``` Note: For instance with regards to example config above, `localVON` will be the write ledger, as there are no production ledgers which are configurable it will choose the topmost write configurable non production ledger. ### Config properties + For each ledger, the required properties are as following: - `id`\*: The id (or name) of the ledger, can also be used as the pool name if none provided @@ -82,6 +87,7 @@ For connecting to ledger, one of the following needs to be specified: - `genesis_url`: The url from which to download the genesis transactions to use for connecting to an Indy ledger. Optional properties: + - `pool_name`: name of the indy pool to be opened - `keepalive`: how many seconds to keep the ledger open - `socks_proxy` @@ -91,7 +97,6 @@ Optional properties: Note: Both `endorser_did` and `endorser_alias` are part of the endorser info. Whenever a write ledger is selected using `PUT /ledger/{ledger_id}/set-write-ledger`, the endorser info associated with that ledger in the config updates the `endorser.endorser_public_did` and `endorser.endorser_alias` profile setting respectively. - ## Multi-ledger Admin API Multi-ledger related actions are grouped under the `ledger` topic in the SwaggerUI. @@ -110,6 +115,7 @@ Set active `write_ledger's` `ledger_id` ### Read Requests The following process is executed for these functions in ACA-Py: + 1. `get_schema` 2. `get_credential_definition` 3. `get_revoc_reg_def` @@ -125,9 +131,9 @@ If multiple ledgers are configured then `IndyLedgerRequestsExecutor` service ext #### For checking ledger in parallel - `lookup_did_in_configured_ledgers` function - - If the calling function (above) is in [1-4], then check the `DID` in `cache` for a corresponding applicable `ledger_id`. If found, return the ledger info, else continue. + - If the calling function (above) is in items 1-4, then check the `DID` in `cache` for a corresponding applicable `ledger_id`. If found, return the ledger info, else continue. - Otherwise, launch parallel `_get_ledger_by_did` tasks for each of the configured ledgers. - - As these tasks get finished, construct `applicable_prod_ledgers` and `applicable_non_prod_ledgers` dictionaries, each with `self_certified` and `non_self_certified` inner dict which are sorted by the original order or index. + - As these tasks get finished, construct `applicable_prod_ledgers` and `applicable_non_prod_ledgers` dictionaries, each with `self_certified` and `non_self_certified` inner dict which are sorted by the original order or index. - Order/preference for selection: `self_certified` > `production` > `non_production` - Checks `production` ledger where the `DID` is `self_certified` - Checks `non_production` ledger where the `DID` is `self_certified` @@ -144,7 +150,7 @@ If multiple ledgers are configured then `IndyLedgerRequestsExecutor` service ext ### Write Requests -On startup, the first configured applicable ledger is assigned as the `write_ledger` [`BaseLedger`], the selection is dependent on the order (top-down) and whether it is `production` or `non_production`. For instance, considering this [example configuration](#example-config-file), ledger `bcovrinTest` will be set as `write_ledger` as it is the topmost `production` ledger. If no `production` ledgers are included in configuration then the topmost `non_production` ledger is selected. +On startup, the first configured applicable ledger is assigned as the `write_ledger` (`BaseLedger`), the selection is dependent on the order (top-down) and whether it is `production` or `non_production`. For instance, considering this [example configuration](#example-config-file), ledger `bcovrinTest` will be set as `write_ledger` as it is the topmost `production` ledger. If no `production` ledgers are included in configuration then the topmost `non_production` ledger is selected. ## A Special Warning for TAA Acceptance @@ -173,9 +179,10 @@ There should be no impact/change in functionality to any ACA-Py protocols. Added `build_and_return_get_nym_request` and `submit_get_nym_request` helper functions to `IndySdkLedger` and `IndyVdrLedger`. -Best practice/feedback emerging from `Askar session deadlock` issue and `endorser refactoring` PR was also addressed here by not leaving sessions open unnecessarily and changing `context.session` to `context.profile.session`, etc. +Best practice/feedback emerging from `Askar session deadlock` issue and `endorser refactoring` PR was also addressed here by not leaving sessions open unnecessarily and changing `context.session` to `context.profile.session`, etc. These changes are made here: + - `./aries_cloudagent/ledger/routes.py` - `./aries_cloudagent/messaging/credential_definitions/routes.py` - `./aries_cloudagent/messaging/schemas/routes.py` diff --git a/docs/features/Multitenancy.md b/docs/features/Multitenancy.md index acefc3ea..07b63c32 100644 --- a/docs/features/Multitenancy.md +++ b/docs/features/Multitenancy.md @@ -11,9 +11,9 @@ This allows ACA-Py to be used for a wider range of use cases. One use case could - [Usage](#usage) - [Multi-tenant Admin API](#multi-tenant-admin-api) - [Managed vs Unmanaged Mode](#managed-vs-unmanaged-mode) - - [Managed](#managed) - - [Unmanaged](#unmanaged) - - [Usage](#usage-1) + - [Managed Mode](#managed-mode) + - [Unmanaged Mode](#unmanaged-mode) + - [Mode Usage](#mode-usage) - [Message Routing](#message-routing) - [Relaying](#relaying) - [Mediation](#mediation) @@ -76,11 +76,11 @@ See the SwaggerUI for the exact API definition for multi-tenancy. Multi-tenancy in ACA-Py is designed with two key management modes in mind. -### Managed +### Managed Mode In **`managed`** mode, ACA-Py will manage the key for the wallet. This is the easiest configuration as it allows ACA-Py to fully control the wallet. When a message is received from another agent it can immediately unlock the wallet and process the message. The wallet key is stored encrypted in the base wallet. -### Unmanaged +### Unmanaged Mode In **`unmanaged`** mode, ACA-Py won't manage the key for the wallet. The key is not stored in the base wallet, which means the key to unlock the wallet needs to be provided whenever the wallet is used. When a message from another agent is received, ACA-Py cannot immediately unlock the wallet and process the message. See [Authentication](#authentication) for more info. @@ -88,7 +88,7 @@ It is important to note unmanaged mode doesn't provide a lot of security over ma > :warning: Although support for unmanaged mode is mostly in place, the receiving of messages from other agents in unmanaged mode is not supported yet. This means unmanaged mode can not be used yet. -### Usage +### Mode Usage The mode used can be specified when creating a wallet using the `key_management_mode` parameter. @@ -309,7 +309,7 @@ After registering a tenant which effectively creates a subwallet, you may need t ### Update a tenant -The following properties can be updated: `image_url`, `label`, `wallet_dispatch_type`, and `wallet_webhook_urls` for tenants of a multitenancy wallet. To update these properties you will `PUT` a request json containing the properties you wish to update along with the updated values to the `/multitenancy/wallet/${TENANT_WALLET_ID}` admin endpoint. If the Admin API endoint is protected, you will also include the Admin API Key in the request header. +The following properties can be updated: `image_url`, `label`, `wallet_dispatch_type`, and `wallet_webhook_urls` for tenants of a multitenancy wallet. To update these properties you will `PUT` a request json containing the properties you wish to update along with the updated values to the `/multitenancy/wallet/${TENANT_WALLET_ID}` admin endpoint. If the Admin API endpoint is protected, you will also include the Admin API Key in the request header. Example @@ -352,7 +352,7 @@ echo $update_tenant | curl -X PUT "${ACAPY_ADMIN_URL}/multitenancy/wallet/${TEN } ``` -> An Admin API Key is all that is ALLOWED to be included in a request header during an update. Inluding the Bearer token header will result in a 404: Unauthorized error +> An Admin API Key is all that is ALLOWED to be included in a request header during an update. Including the Bearer token header will result in a 404: Unauthorized error ### Remove a tenant @@ -379,7 +379,7 @@ curl -X POST "${ACAPY_ADMIN_URL}/multitenancy/wallet/{wallet_id}/remove" \ ### Per tenant settings -To allow configurability of ACA-Py startup parameters/environment variables at a tenant/subwallet level. [PR#2233](https://github.com/hyperledger/aries-cloudagent-python/pull/2233) will provide the ability to update the following subset of settings when creating or updating the subwallet: +To allow the configuring of ACA-Py startup parameters/environment variables at a tenant/subwallet level. [PR#2233](https://github.com/hyperledger/aries-cloudagent-python/pull/2233) will provide the ability to update the following subset of settings when creating or updating the subwallet: | Labels | | Setting | |---|---|---| @@ -391,7 +391,7 @@ To allow configurability of ACA-Py startup parameters/environment variables at a | ACAPY_AUTO_PING_CONNECTION | auto-ping-connection | auto_ping_connection | | ACAPY_MONITOR_PING | monitor-ping | debug.monitor_ping | | ACAPY_AUTO_RESPOND_MESSAGES | auto-respond-messages | debug.auto_respond_messages | -| ACAPY_AUTO_RESPOND_CREDENTIAL_OFFER | auto-respond-credential-offer | debug.auto_resopnd_credential_offer | +| ACAPY_AUTO_RESPOND_CREDENTIAL_OFFER | auto-respond-credential-offer | debug.auto_respond_credential_offer | | ACAPY_AUTO_RESPOND_CREDENTIAL_REQUEST | auto-respond-credential-request | debug.auto_respond_credential_request | | ACAPY_AUTO_VERIFY_PRESENTATION | auto-verify-presentation | debug.auto_verify_presentation | | ACAPY_NOTIFY_REVOCATION | notify-revocation | revocation.notify | @@ -405,7 +405,8 @@ To allow configurability of ACA-Py startup parameters/environment variables at a Added `extra_settings` dict field to request schema. `extra_settings` can be configured in the request body as below: **`Example Request`** - ``` + + ```json { "wallet_name": " ... ", "default_label": " ... ", @@ -434,7 +435,8 @@ To allow configurability of ACA-Py startup parameters/environment variables at a Added `extra_settings` dict field to request schema. **`Example Request`** - ``` + +```json { "wallet_webhook_urls": [ ... ], "wallet_dispatch_type": "default", @@ -446,11 +448,11 @@ To allow configurability of ACA-Py startup parameters/environment variables at a "ACAPY_PUBLIC_INVITES": false }, } - ``` +``` - ```sh +```sh echo $update_tenant | curl -X PUT "${ACAPY_ADMIN_URL}/multitenancy/wallet/${WALLET_ID}" \ -H "Content-Type: application/json" \ -H "x-api-key: $ACAPY_ADMIN_URL_API_KEY" \ -d @- - ``` +``` diff --git a/docs/features/PlugIns.md b/docs/features/PlugIns.md index 0ea508d2..7008dc02 100644 --- a/docs/features/PlugIns.md +++ b/docs/features/PlugIns.md @@ -4,15 +4,14 @@ Plug-ins are loaded on Aca-Py startup based on the following parameters: -* `--plugin` - identifies the plug-in library to load -* `--block-plugin` - identifies plug-ins (including built-ins) that are *not* to be loaded -* `--plugin-config` - identify a configuration parameter for a plug-in -* `--plugin-config-value` - identify a *value* for a plug-in configuration - +- `--plugin` - identifies the plug-in library to load +- `--block-plugin` - identifies plug-ins (including built-ins) that are *not* to be loaded +- `--plugin-config` - identify a configuration parameter for a plug-in +- `--plugin-config-value` - identify a *value* for a plug-in configuration The `--plug-in` parameter specifies a package that is loaded by Aca-Py at runtime, and extends Aca-Py by adding support for additional protocols and message types, and/or extending the Admin API with additional endpoints. -The original plug-in design (which we will call the "old" model) explicitly indluded `message_types.py` `routes.py` (to add Admin API's). But functionality was added later (we'll call this the "new" model) to allow the plug-in to include a generic `setup` package that could perform arbitrary initialization. The "new" model also includes support for a `definition.py` file that can specify plug-in version information (major/minor plug-in version, as well as the minimum supported version (if another agent is running an older version of the plug-in)). +The original plug-in design (which we will call the "old" model) explicitly included `message_types.py` `routes.py` (to add Admin API's). But functionality was added later (we'll call this the "new" model) to allow the plug-in to include a generic `setup` package that could perform arbitrary initialization. The "new" model also includes support for a `definition.py` file that can specify plug-in version information (major/minor plug-in version, as well as the minimum supported version (if another agent is running an older version of the plug-in)). You can discover which plug-ins are installed in an aca-py instance by calling (in the "server" section) the `GET /plugins` endpoint. (Note that this will return all loaded protocols, including the built-ins. You can call the `GET /status/config` to inspect the Aca-Py configuration, which will include the configuration for the *external* plug-ins.) @@ -22,7 +21,7 @@ If a setup method is provided, it will be called. If not, the `message_types.py This would be in the `package/module __init__.py`: -``` +```python async def setup(context: InjectionContext): pass ``` @@ -47,7 +46,7 @@ If `routes.py` is available, then Aca-Py will call the following functions to in If `definition.py` is available, Aca-Py will read this package to determine protocol version information. An example follows (this is an example that specifies two protocol versions): -``` +```json versions = [ { "major_version": 1, @@ -71,7 +70,6 @@ The attributes are: - `minimum_minor_version` - specifies the minimum supported version (if a lower version is installed in another agent) - `path` - specifies the sub-path within the package for this version - ## Loading Aca-Py Plug-Ins at Runtime The load sequence for a plug-in (the "Startup" class depends on how Aca-Py is running - `upgrade`, `provision` or `start`): @@ -150,7 +148,7 @@ Most Aca-Py plug-ins provide support for installing the plug-in using [poetry](h TBD -# Aca-Py Plug-ins +## Aca-Py Plug-ins This list was originally published in [this hackmd document](https://hackmd.io/m2AZebwJRkm6sWgO64-5xQ). @@ -163,28 +161,29 @@ This list was originally published in [this hackmd document](https://hackmd.io/m | Indicio | Question & Answer | Non-Aries Protocol | Aug 2022 | https://github.com/Indicio-tech/acapy-plugin-qa | | Indicio | Acapy-plugin-pickup | Fetching Messages from Mediator | Aug 2022 | https://github.com/Indicio-tech/acapy-plugin-pickup | | Indicio | Machine Readable GF | Governance Framework | Mar 2022 | https://github.com/Indicio-tech/mrgf | -| Indicio | Cache Redis | Cache for Scaleability | Jul 2022 | https://github.com/Indicio-tech/aries-acapy-cache-redis | +| Indicio | Cache Redis | Cache for Scalability | Jul 2022 | https://github.com/Indicio-tech/aries-acapy-cache-redis | | SICPA Dlab | Kafka Events | Event Bus Integration | Aug 2022 | https://github.com/sicpa-dlab/aries-acapy-plugin-kafka-events | -| SICPA Dlab | DidComm Resolver | Unversal Resolver for DIDComm | Aug 2022 | https://github.com/sicpa-dlab/acapy-resolver-didcomm | +| SICPA Dlab | DidComm Resolver | Universal Resolver for DIDComm | Aug 2022 | https://github.com/sicpa-dlab/acapy-resolver-didcomm | | SICPA Dlab | Universal Resolver | Multi-ledger Reading | Jul 2021 | https://github.com/sicpa-dlab/acapy-resolver-universal | | DDX | mydata-did-protocol | | Oct 2022 | https://github.com/decentralised-dataexchange/acapy-mydata-did-protocol | | BCGov | Basic Message Storage | Basic message storage (traction) | Dec 2022 | https://github.com/bcgov/traction/tree/develop/plugins/basicmessage_storage | | BCGov | Multi-tenant Provider | Multi-tenant Provider (traction) | Dec 2022 | https://github.com/bcgov/traction/tree/develop/plugins/multitenant_provider | | BCGov | Traction Innkeeper | Innkeeper (traction) | Feb 2023 | https://github.com/bcgov/traction/tree/develop/plugins/traction_innkeeper | - -# Reference +## References The following links may be helpful or provide additional context for the current plug-in support. (These are links to issues or pull requests that were raised during plug-in development.) Configuration params: - https://github.com/hyperledger/aries-cloudagent-python/issues/1121 - https://hackmd.io/ROUzENdpQ12cz3UB9qk1nA - https://github.com/hyperledger/aries-cloudagent-python/pull/1226 + +- https://github.com/hyperledger/aries-cloudagent-python/issues/1121 +- https://hackmd.io/ROUzENdpQ12cz3UB9qk1nA +- https://github.com/hyperledger/aries-cloudagent-python/pull/1226 Loading plug-ins: - https://github.com/hyperledger/aries-cloudagent-python/pull/1086 + +- https://github.com/hyperledger/aries-cloudagent-python/pull/1086 Versioning for plug-ins: - https://github.com/hyperledger/aries-cloudagent-python/pull/443 +- https://github.com/hyperledger/aries-cloudagent-python/pull/443 diff --git a/docs/features/SelectiveDisclosureJWTs.md b/docs/features/SelectiveDisclosureJWTs.md index ed21f047..23489fae 100644 --- a/docs/features/SelectiveDisclosureJWTs.md +++ b/docs/features/SelectiveDisclosureJWTs.md @@ -11,7 +11,8 @@ In addition, this implementation includes an optional mechanism for key binding, The issuer determines which claims in an SD-JWT can be selectively disclosable. In this implementation, all claims at all levels of the JSON structure are by default selectively disclosable. If the issuer wishes for certain claims to always be visible, they can indicate which claims should not be selectively disclosable, as described below. Essential verification data such as `iss`, `iat`, `exp`, and `cnf` are always visible. The issuer creates a list of JSON paths for the claims that will not be selectively disclosable. Here is an example payload: -``` + +```json { "birthdate": "1940-01-01", "address": { @@ -25,20 +26,22 @@ The issuer creates a list of JSON paths for the claims that will not be selectiv ``` -| Attribute to access | JSON path | -|--------------|-----------| -| "birthdate" | "birthdate" | -| The country attribute within the address dictionary | "address.country" | -| The second item in the nationalities list | "nationalities[1] | -| All items in the nationalities list | "nationalities[0:2]" | +| Attribute to access | JSON path | +| --------------------------------------------------- | -------------------- | +| "birthdate" | "birthdate" | +| The country attribute within the address dictionary | "address.country" | +| The second item in the nationalities list | "nationalities[1] | +| All items in the nationalities list | "nationalities[0:2]" | The [specification](https://datatracker.ietf.org/doc/html/draft-ietf-oauth-selective-disclosure-jwt-05#name-nested-data-in-sd-jwts) defines options for how the issuer can handle nested structures with respect to selective disclosability. As mentioned, all claims at all levels of the JSON structure are by default selectively disclosable. ### [Option 1: Flat SD-JWT](https://datatracker.ietf.org/doc/html/draft-ietf-oauth-selective-disclosure-jwt-05#section-5.7.1) + The issuer can decide to treat the `address` claim in the above example payload as a block that can either be disclosed completely or not at all. The issuer lists out all the claims inside "address" in the `non_sd_list`, but not `address` itself: -``` + +```json non_sd_list = [ "address.street_address", "address.locality", @@ -48,26 +51,28 @@ non_sd_list = [ ``` ### [Option 2: Structured SD-JWT](https://datatracker.ietf.org/doc/html/draft-ietf-oauth-selective-disclosure-jwt-05#section-5.7.2) + The issuer may instead decide to make the `address` claim contents selectively disclosable individually. The issuer lists only "address" in the `non_sd_list`. -``` + +```json non_sd_list = ["address"] ``` ### [Option 3: SD-JWT with Recursive Disclosures](https://datatracker.ietf.org/doc/html/draft-ietf-oauth-selective-disclosure-jwt-05#section-5.7.3) + The issuer may also decide to make the `address` claim contents selectively disclosable recursively, i.e., the `address` claim is made selectively disclosable as well as its sub-claims. The issuer lists neither `address` nor the subclaims of `address` in the `non_sd_list`, leaving all with their default selective disclosability. If all claims can be selectively disclosable, the `non_sd_list` need not be defined explicitly. - ## Walk-Through of SD-JWT Implementation ### Signing SD-JWTs -#### Example input to `/wallet/sd-jwt/sign` endpoint: +#### Example input to `/wallet/sd-jwt/sign` endpoint -``` +```json { "did": "WpVJtxKVwGQdRpQP8iwJZy", "headers": {}, @@ -99,42 +104,46 @@ The issuer lists neither `address` nor the subclaims of `address` in the `non_sd } ``` -#### Output: -``` + +#### Output + +```bash "eyJ0eXAiOiAiSldUIiwgImFsZyI6ICJFZERTQSIsICJraWQiOiAiZGlkOnNvdjpXcFZKdHhLVndHUWRScFFQOGl3Slp5I2tleS0xIn0.eyJfc2QiOiBbIkR0a21ha3NkZGtHRjFKeDBDY0kxdmxRTmZMcGFnQWZ1N3p4VnBGRWJXeXciLCAiSlJLb1E0QXVHaU1INWJIanNmNVV4YmJFeDh2YzFHcUtvX0l3TXE3Nl9xbyIsICJNTTh0TlVLNUstR1lWd0swX01kN0k4MzExTTgwVi13Z0hRYWZvRkoxS09JIiwgIlBaM1VDQmdadVRMMDJkV0pxSVY4elUtSWhnalJNX1NTS3dQdTk3MURmLTQiLCAiX294WGNuSW5Yai1SV3BMVHNISU5YaHFrRVAwODkwUFJjNDBISWE1NElJMCIsICJhdnRLVW5Sdnc1clV0TnZfUnAwUll1dUdkR0RzcnJPYWJfVjR1Y05RRWRvIiwgInByRXZJbzBseTVtNTVsRUpTQUdTVzMxWGdVTElOalo5ZkxiRG81U1pCX0UiXSwgImdpdmVuX25hbWUiOiAiSm9obiIsICJmYW1pbHlfbmFtZSI6ICJEb2UiLCAibmF0aW9uYWxpdGllcyI6IFt7Ii4uLiI6ICJPdU1wcEhpYzEySjYzWTBIY2Ffd1BVeDJCTGdUQVdZQjJpdXpMY3lvcU5JIn0sIHsiLi4uIjogIlIxczlaU3NYeVV0T2QyODdEYy1DTVYyMEdvREF3WUVHV3c4ZkVKd1BNMjAifSwgeyIuLi4iOiAid0lJbjdhQlNDVkFZcUF1Rks3Nmpra3FjVGFvb3YzcUhKbzU5WjdKWHpnUSJ9XSwgImlzcyI6ICJodHRwczovL2V4YW1wbGUuY29tL2lzc3VlciIsICJpYXQiOiAxNjgzMDAwMDAwLCAiZXhwIjogMTg4MzAwMDAwMCwgIl9zZF9hbGciOiAic2hhLTI1NiJ9.cIsuGTIPfpRs_Z49nZcn7L6NUgxQumMGQpu8K6rBtv-YRiFyySUgthQI8KZe1xKyn5Wc8zJnRcWbFki2Vzw6Cw~WyJmWURNM1FQcnZicnZ6YlN4elJsUHFnIiwgIlNBIl0~WyI0UGc2SmZ0UnRXdGFPcDNZX2tscmZRIiwgIkRFIl0~WyJBcDh1VHgxbVhlYUgxeTJRRlVjbWV3IiwgIlVTIl0~WyJ4dkRYMDBmalpmZXJpTmlQb2Q1MXFRIiwgInVwZGF0ZWRfYXQiLCAxNTcwMDAwMDAwXQ~WyJYOTlzM19MaXhCY29yX2hudFJFWmNnIiwgInN1YiIsICJ1c2VyXzQyIl0~WyIxODVTak1hM1k3QlFiWUpabVE3U0NRIiwgInBob25lX251bWJlcl92ZXJpZmllZCIsIHRydWVd~WyJRN1FGaUpvZkhLSWZGV0kxZ0Vaal93IiwgInBob25lX251bWJlciIsICIrMS0yMDItNTU1LTAxMDEiXQ~WyJOeWtVcmJYN1BjVE1ubVRkUWVxZXl3IiwgImVtYWlsIiwgImpvaG5kb2VAZXhhbXBsZS5jb20iXQ~WyJlemJwQ2lnVlhrY205RlluVjNQMGJ3IiwgImJpcnRoZGF0ZSIsICIxOTQwLTAxLTAxIl0~WyJvd3ROX3I5Z040MzZKVnJFRWhQU05BIiwgInN0cmVldF9hZGRyZXNzIiwgIjEyMyBNYWluIFN0Il0~WyJLQXktZ0VaWmRiUnNHV1dNVXg5amZnIiwgInJlZ2lvbiIsICJBbnlzdGF0ZSJd~WyJPNnl0anM2SU9HMHpDQktwa0tzU1pBIiwgImxvY2FsaXR5IiwgIkFueXRvd24iXQ~WyI0Nzg5aG5GSjhFNTRsLW91RjRaN1V3IiwgImNvdW50cnkiLCAiVVMiXQ~WyIyaDR3N0FuaDFOOC15ZlpGc2FGVHRBIiwgImFkZHJlc3MiLCB7Il9zZCI6IFsiTXhKRDV5Vm9QQzFIQnhPRmVRa21TQ1E0dVJrYmNrellza1Z5RzVwMXZ5SSIsICJVYkxmVWlpdDJTOFhlX2pYbS15RHBHZXN0ZDNZOGJZczVGaVJpbVBtMHdvIiwgImhsQzJEYVBwT2t0eHZyeUFlN3U2YnBuM09IZ193Qk5heExiS3lPRDVMdkEiLCAia2NkLVJNaC1PaGFZS1FPZ2JaajhmNUppOXNLb2hyYnlhYzNSdXRqcHNNYyJdfV0~" ``` The `sd_jwt_sign()` method: + - Creates the list of claims that are selectively disclosable - - Uses the `non_sd_list` compared against the list of JSON paths for all claims to create the list of JSON paths for selectively disclosable claims - - Separates list splices if necessary - - Sorts the `sd_list` so that the claims deepest in the structure are handled first - - Since we will wrap the selectively disclosable claim keys, the JSON paths for nested structures do not work properly when the claim key is wrapped in an object + - Uses the `non_sd_list` compared against the list of JSON paths for all claims to create the list of JSON paths for selectively disclosable claims + - Separates list splices if necessary + - Sorts the `sd_list` so that the claims deepest in the structure are handled first + - Since we will wrap the selectively disclosable claim keys, the JSON paths for nested structures do not work properly when the claim key is wrapped in an object - Uses the JSON paths in the `sd_list` to find each selectively disclosable claim and wrap it in the `SDObj` defined by the [sd-jwt Python library](https://github.com/openwallet-foundation-labs/sd-jwt-python) and removes/replaces the original entry - - For list items, the element itself is wrapped - - For other objects, the dictionary key is wrapped + - For list items, the element itself is wrapped + - For other objects, the dictionary key is wrapped - With this modified payload, the `SDJWTIssuerACAPy.issue()` method: - - Checks if there are selectively disclosable claims at any level in the payload - - Assembles the SD-JWT payload and creates the disclosures - - Calls `SDJWTIssuerACAPy._create_signed_jws()`, which is redefined in order to use the ACA-Py `jwt_sign` method and which creates the JWT - - Combines and returns the signed JWT with its disclosures and option key binding JWT, as indicated in the [specification](https://datatracker.ietf.org/doc/html/draft-ietf-oauth-selective-disclosure-jwt-05#name-sd-jwt-structure) - - + - Checks if there are selectively disclosable claims at any level in the payload + - Assembles the SD-JWT payload and creates the disclosures + - Calls `SDJWTIssuerACAPy._create_signed_jws()`, which is redefined in order to use the ACA-Py `jwt_sign` method and which creates the JWT + - Combines and returns the signed JWT with its disclosures and option key binding JWT, as indicated in the [specification](https://datatracker.ietf.org/doc/html/draft-ietf-oauth-selective-disclosure-jwt-05#name-sd-jwt-structure) ### Verifying SD-JWTs -#### Example input to `/wallet/sd-jwt/verify` endpoint: +#### Example input to `/wallet/sd-jwt/verify` endpoint Using the output from the `/wallet/sd-jwt/sign` example above, we have decided to only reveal two of the selectively disclosable claims (`user` and `updated_at`) and achieved this by only including the disclosures for those claims. We have also included a key binding JWT following the disclosures. -``` + +```json { "sd_jwt": "eyJ0eXAiOiAiSldUIiwgImFsZyI6ICJFZERTQSIsICJraWQiOiAiZGlkOnNvdjpXcFZKdHhLVndHUWRScFFQOGl3Slp5I2tleS0xIn0.eyJfc2QiOiBbIkR0a21ha3NkZGtHRjFKeDBDY0kxdmxRTmZMcGFnQWZ1N3p4VnBGRWJXeXciLCAiSlJLb1E0QXVHaU1INWJIanNmNVV4YmJFeDh2YzFHcUtvX0l3TXE3Nl9xbyIsICJNTTh0TlVLNUstR1lWd0swX01kN0k4MzExTTgwVi13Z0hRYWZvRkoxS09JIiwgIlBaM1VDQmdadVRMMDJkV0pxSVY4elUtSWhnalJNX1NTS3dQdTk3MURmLTQiLCAiX294WGNuSW5Yai1SV3BMVHNISU5YaHFrRVAwODkwUFJjNDBISWE1NElJMCIsICJhdnRLVW5Sdnc1clV0TnZfUnAwUll1dUdkR0RzcnJPYWJfVjR1Y05RRWRvIiwgInByRXZJbzBseTVtNTVsRUpTQUdTVzMxWGdVTElOalo5ZkxiRG81U1pCX0UiXSwgImdpdmVuX25hbWUiOiAiSm9obiIsICJmYW1pbHlfbmFtZSI6ICJEb2UiLCAibmF0aW9uYWxpdGllcyI6IFt7Ii4uLiI6ICJPdU1wcEhpYzEySjYzWTBIY2Ffd1BVeDJCTGdUQVdZQjJpdXpMY3lvcU5JIn0sIHsiLi4uIjogIlIxczlaU3NYeVV0T2QyODdEYy1DTVYyMEdvREF3WUVHV3c4ZkVKd1BNMjAifSwgeyIuLi4iOiAid0lJbjdhQlNDVkFZcUF1Rks3Nmpra3FjVGFvb3YzcUhKbzU5WjdKWHpnUSJ9XSwgImlzcyI6ICJodHRwczovL2V4YW1wbGUuY29tL2lzc3VlciIsICJpYXQiOiAxNjgzMDAwMDAwLCAiZXhwIjogMTg4MzAwMDAwMCwgIl9zZF9hbGciOiAic2hhLTI1NiJ9.cIsuGTIPfpRs_Z49nZcn7L6NUgxQumMGQpu8K6rBtv-YRiFyySUgthQI8KZe1xKyn5Wc8zJnRcWbFki2Vzw6Cw~WyJ4dkRYMDBmalpmZXJpTmlQb2Q1MXFRIiwgInVwZGF0ZWRfYXQiLCAxNTcwMDAwMDAwXQ~WyJYOTlzM19MaXhCY29yX2hudFJFWmNnIiwgInN1YiIsICJ1c2VyXzQyIl0~eyJhbGciOiAiRWREU0EiLCAidHlwIjogImtiK2p3dCIsICJraWQiOiAiZGlkOnNvdjpXcFZKdHhLVndHUWRScFFQOGl3Slp5I2tleS0xIn0.eyJub25jZSI6ICIxMjM0NTY3ODkwIiwgImF1ZCI6ICJodHRwczovL2V4YW1wbGUuY29tL3ZlcmlmaWVyIiwgImlhdCI6IDE2ODgxNjA0ODN9.i55VeR7bNt7T8HWJcfj6jSLH3Q7vFk8N0t7Tb5FZHKmiHyLrg0IPAuK5uKr3_4SkjuGt1_iNl8Wr3atWBtXMDA" } ``` -#### Output: +#### Verify Output + Note that attributes in the `non_sd_list` (`given_name`, `family_name`, and `nationalities`), as well as essential verification data (`iss`, `iat`, `exp`) are visible directly within the payload. The disclosures include only the values for the `user` and `updated_at` claims, since those are the only selectively disclosable claims that the holder presented. The corresponding hashes for those disclosures appear in the `payload["_sd"]` list. -``` + +```json { "headers": { "typ": "JWT", @@ -187,8 +196,9 @@ Note that attributes in the `non_sd_list` (`given_name`, `family_name`, and `nat ``` The `sd_jwt_verify()` method: + - Parses the SD-JWT presentation into its component parts: JWT, disclosures, and optional key binding - - The JWT payload is parsed from its headers and signature + - The JWT payload is parsed from its headers and signature - Creates a list of plaintext disclosures - Calls `SDJWTVerifierACAPy._verify_sd_jwt`, which is redefined in order to use the ACA-Py `jwt_verify` method, and which returns the verified JWT - If key binding is used, the key binding JWT is verified and checked against the expected audience and nonce values diff --git a/docs/features/SupportedRFCs.md b/docs/features/SupportedRFCs.md index f3c87e67..245f881c 100644 --- a/docs/features/SupportedRFCs.md +++ b/docs/features/SupportedRFCs.md @@ -28,15 +28,15 @@ A summary of the Aries Interop Profiles and Aries RFCs supported in ACA-Py can b ## Platform Support -| Platform | Supported | Notes | -| -------- | :-------: | ------- | -| Server | :white_check_mark: | | -| Kubernetes | :white_check_mark: | BC Gov has extensive experience running ACA-Py on Red Hat's OpenShift Kubernetes Distribution. | -| Docker | :white_check_mark: | Official docker images are published to the GitHub [hyperledger/aries-cloudagent-python] container repository. | -| Desktop | :warning: | Could be run as a local service on the computer | -| iOS | :x: | | -| Android | :x: | | -| Browser | :x: | | +| Platform | Supported | Notes | +| ---------- | :----------------: | -------------------------------------------------------------------------------------------------------------------------- | +| Server | :white_check_mark: | | +| Kubernetes | :white_check_mark: | BC Gov has extensive experience running ACA-Py on Red Hat's OpenShift Kubernetes Distribution. | +| Docker | :white_check_mark: | Official docker images are published to the GitHub container repository at `ghcr.io/hyperledger/aries-cloudagent-python`. | +| Desktop | :warning: | Could be run as a local service on the computer | +| iOS | :x: | | +| Android | :x: | | +| Browser | :x: | | ## Agent Types @@ -46,7 +46,7 @@ A summary of the Aries Interop Profiles and Aries RFCs supported in ACA-Py can b | Holder | :white_check_mark: | | | Verifier | :white_check_mark: | | | Mediator Service | :white_check_mark: | See the [aries-mediator-service](https://github.com/hyperledger/aries-mediator-service), a pre-configured, production ready Aries Mediator Service based on a released version of ACA-Py. | -| Mediator Client | :white_check_mark: | +| Mediator Client | :white_check_mark: | | | Indy Transaction Author | :white_check_mark: | | | Indy Transaction Endorser | :white_check_mark: | | | Indy Endorser Service | :white_check_mark: | See the [aries-endorser-service](https://github.com/hyperledger/aries-endorser-service), a pre-configured, production ready Aries Endorser Service based on a released version of ACA-Py. | @@ -74,7 +74,7 @@ A summary of the Aries Interop Profiles and Aries RFCs supported in ACA-Py can b ## Secure Storage Types | Secure Storage Types | Supported | Notes | - --- | :--: | -- | +| --- | :--: | -- | | [Aries Askar] | :white_check_mark: | Recommended - Aries Askar provides equivalent/evolved secure storage and cryptography support to the "indy-wallet" part of the Indy SDK. When using Askar (via the `--wallet-type askar` startup parameter), other functionality is handled by [CredX](https://github.com/hyperledger/indy-shared-rs) (AnonCreds) and [Indy VDR](https://github.com/hyperledger/indy-vdr) (Indy ledger interactions). | | [Aries Askar]-AnonCreds | :white_check_mark: | Recommended - When using Askar/AnonCreds (via the `--wallet-type askar-anoncreds` startup parameter), other functionality is handled by [AnonCreds RS](https://github.com/hyperledger/anoncreds-rs) (AnonCreds) and [Indy VDR](https://github.com/hyperledger/indy-vdr) (Indy ledger interactions).

This `wallet-type` will eventually be the same as `askar` when we have fully integrated the AnonCreds RS library into ACA-Py. | | [Indy SDK](https://github.com/hyperledger/indy-sdk/tree/master/docs/design/003-wallet-storage) | :warning: Deprecated | Full support for the features of the "indy-wallet" secure storage capabilities found in the Indy SDK. | @@ -87,7 +87,7 @@ A summary of the Aries Interop Profiles and Aries RFCs supported in ACA-Py can b ## Miscellaneous Features | Feature | Supported | Notes | - --- | :--: | -- | +| --- | :--: | -- | | ACA-Py Plugins | :white_check_mark: | The [ACA-Py Plugins] repository contains a growing set of plugins that are maintained and (mostly) tested against new releases of ACA-Py. | | Multi use invitations | :white_check_mark: | | | Invitations using public did | :white_check_mark: | | @@ -104,7 +104,7 @@ A summary of the Aries Interop Profiles and Aries RFCs supported in ACA-Py can b | SD-JWTs | :white_check_mark: | Signing and verifying SD-JWTs is supported | [ACA-Py Plugins]: https://github.com/hyperledger/aries-acapy-plugins -[Indy SDK to Askar Migration Guide]: ../../deploying/IndySDKtoAskarMigration/ +[Indy SDK to Askar Migration Guide]: ../deploying/IndySDKtoAskarMigration.md [Traction]: https://github.com/bcgov/traction ## Supported RFCs @@ -115,7 +115,7 @@ All RFCs listed in [AIP 1.0](https://github.com/hyperledger/aries-rfcs/tree/main provides notes about the implementation of specific RFCs. | RFC | Supported | Notes | - --- | :--: | -- | +| --- | :--: | -- | | [0025-didcomm-transports](https://github.com/hyperledger/aries-rfcs/tree/b490ebe492985e1be9804fc0763119238b2e51ab/features/0025-didcomm-transports) | :white_check_mark: | ACA-Py currently supports HTTP and WebSockets for both inbound and outbound messaging. Transports are pluggable and an agent instance can use multiple inbound and outbound transports.| | [0160-connection-protocol](https://github.com/hyperledger/aries-rfcs/tree/9b0aaa39df7e8bd434126c4b33c097aae78d65bf/features/0160-connection-protocol) | :white_check_mark: | The agent supports Connection/DID exchange initiated from both plaintext invitations and public DIDs that enable bypassing the invitation message. | @@ -125,8 +125,8 @@ All RFCs listed in [AIP 2.0](https://github.com/hyperledger/aries-rfcs/tree/main are fully supported in ACA-Py **EXCEPT** as noted in the table below. | RFC | Supported | Notes | - --- | :--: | -- | -| [0587-encryption-envelope-v2](https://github.com/hyperledger/aries-rfcs/tree/b3a3942ef052039e73cd23d847f42947f8287da2/features/0587-encryption-envelope-v2) | :construction: | Support for the DIDComm V2 envelope format is a work in progress | +| --- | :--: | -- | +| [0587-encryption-envelope-v2](https://github.com/hyperledger/aries-rfcs/tree/b3a3942ef052039e73cd23d847f42947f8287da2/features/0587-encryption-envelope-v2) | :construction: | Supporting the DIDComm v2 encryption envelope does not make sense until DIDComm v2 is to be supported. | | [0317-please-ack](https://github.com/hyperledger/aries-rfcs/tree/main/features/0317-please-ack) | :x: | An investigation was done into supporting `please-ack` and a number of complications were found. As a result, we expect that `please-ack` will be dropped from AIP 2.0. It has not been implemented by any Aries frameworks or deployments. | ### Other Supported RFCs diff --git a/docs/features/UsingOpenAPI.md b/docs/features/UsingOpenAPI.md index 45e75a40..c141e44e 100644 --- a/docs/features/UsingOpenAPI.md +++ b/docs/features/UsingOpenAPI.md @@ -2,13 +2,13 @@ ACA-Py provides an OpenAPI-documented REST interface for administering the agent's internal state and initiating communication with connected agents. -The running agent provides a `Swagger User Interface` that can be browsed and used to test various scenarios manually (see the [Admin API Readme](AdminAPI.md) for details). However, it is often desirable to produce native language interfaces rather than coding `Controllers` using HTTP primitives. This is possible using several public code generation (codegen) tools. This page provides some suggestions based on experience with these tools when trying to generate `Typescript` wrappers. The information should be useful to those trying to generate other languages. Updates to this page based on experience are encouraged. +The running agent provides a `Swagger User Interface` that can be browsed and used to test various scenarios manually (see the [Admin API Readme](./AdminAPI.md) for details). However, it is often desirable to produce native language interfaces rather than coding `Controllers` using HTTP primitives. This is possible using several public code generation (codegen) tools. This page provides some suggestions based on experience with these tools when trying to generate `Typescript` wrappers. The information should be useful to those trying to generate other languages. Updates to this page based on experience are encouraged. ## ACA-Py, OpenAPI Raw Output Characteristics -ACA-Py uses [aiohttp_apispec](https://github.com/maximdanilchenko/aiohttp-apispec) tags in code to produce the OpenAPI spec file at runtime dependent on what features have been loaded. How these tags are created is documented in the [API Standard Behaviour](https://github.com/hyperledger/aries-cloudagent-python/blob/main/AdminAPI.md#api-standard-behaviour) section of the [Admin API Readme](AdminAPI.md). The OpenAPI spec is available in raw, unformatted form from a running ACA-Py instance using a route of `http:///api/docs/swagger.json` or from the browser `Swagger User Interface` directly. +ACA-Py uses [aiohttp_apispec](https://github.com/maximdanilchenko/aiohttp-apispec) tags in code to produce the OpenAPI spec file at runtime dependent on what features have been loaded. How these tags are created is documented in the [API Standard Behavior](https://github.com/hyperledger/aries-cloudagent-python/blob/main/AdminAPI.md#api-standard-behaviour) section of the [Admin API Readme](./AdminAPI.md). The OpenAPI spec is available in raw, unformatted form from a running ACA-Py instance using a route of `http:///api/docs/swagger.json` or from the browser `Swagger User Interface` directly. -The ACA-Py Admin API evolves across releases. To track these changes and ensure conformance with the OpenAPI specification, we provide a tool located at [`scripts/generate-open-api-spec`](scripts/generate-open-api-spec). This tool starts ACA-Py, retrieves the `swagger.json` file, and runs codegen tools to generate specifications in both Swagger and OpenAPI formats with `json` language output. The output of this tool enables comparison with the checked-in `open-api/swagger.json` and `open-api/openapi.json`, and also serves as a useful resource for identifying any non-conformance to the OpenAPI specification. At the moment, `validation` is turned off via the `open-api/openAPIJSON.config` file, so warning messages are printed for non-conformance, but the `json` is still output. Most of the warnings reported by `generate-open-api-spec` relate to missing `operationId` fields which results in manufactured method names being created by codegen tools. At the moment, [aiohttp_apispec](https://github.com/maximdanilchenko/aiohttp-apispec) does not support adding `operationId` annotations via tags. +The ACA-Py Admin API evolves across releases. To track these changes and ensure conformance with the OpenAPI specification, we provide a tool located at [`scripts/generate-open-api-spec`](https://github.com/hyperledger/aries-cloudagent-python/blob/main/scripts/generate-open-api-spec). This tool starts ACA-Py, retrieves the `swagger.json` file, and runs codegen tools to generate specifications in both Swagger and OpenAPI formats with `json` language output. The output of this tool enables comparison with the checked-in `open-api/swagger.json` and `open-api/openapi.json`, and also serves as a useful resource for identifying any non-conformance to the OpenAPI specification. At the moment, `validation` is turned off via the `open-api/openAPIJSON.config` file, so warning messages are printed for non-conformance, but the `json` is still output. Most of the warnings reported by `generate-open-api-spec` relate to missing `operationId` fields which results in manufactured method names being created by codegen tools. At the moment, [aiohttp_apispec](https://github.com/maximdanilchenko/aiohttp-apispec) does not support adding `operationId` annotations via tags. The `generate-open-api-spec` tool was initially created to help identify issues with method parameters not being sorted, resulting in somewhat random ordering each time a codegen operation was performed. This is relevant for languages which do not have support for [named parameters](https://en.wikipedia.org/wiki/Named_parameter) such as `Javascript`. It is recommended that the `generate-open-api-spec` is run prior to each release, and the resulting `open-api/openapi.json` file checked in to allow tracking of API changes over time. At the moment, this process is not automated as part of the release pipeline. diff --git a/docs/features/adminApi.png b/docs/features/adminApi.png deleted file mode 100644 index dd2380b5..00000000 Binary files a/docs/features/adminApi.png and /dev/null differ diff --git a/docs/features/adminApiAuthentication.png b/docs/features/adminApiAuthentication.png deleted file mode 100644 index 17b6e1d4..00000000 Binary files a/docs/features/adminApiAuthentication.png and /dev/null differ diff --git a/docs/features/deploymentModel-agent.png b/docs/features/deploymentModel-agent.png deleted file mode 100644 index 9c3d2482..00000000 Binary files a/docs/features/deploymentModel-agent.png and /dev/null differ diff --git a/docs/features/deploymentModel-controller.png b/docs/features/deploymentModel-controller.png deleted file mode 100644 index 3c38e6fc..00000000 Binary files a/docs/features/deploymentModel-controller.png and /dev/null differ diff --git a/docs/features/deploymentModel-full.png b/docs/features/deploymentModel-full.png deleted file mode 100644 index 65e69d15..00000000 Binary files a/docs/features/deploymentModel-full.png and /dev/null differ diff --git a/docs/features/devcontainer.md b/docs/features/devcontainer.md index e58ba071..719d0cea 100644 --- a/docs/features/devcontainer.md +++ b/docs/features/devcontainer.md @@ -1,11 +1,12 @@ -For information on running demos and tests using provided shell scripts, see [DevReadMe](../DevReadMe) readme. - # ACA-Py Development with Dev Container -The following guide will get you up and running and developing/debugging ACA-Py as quickly as possible. + +The following guide will get you up and running and developing/debugging ACA-Py as quickly as possible. We provide a [`devcontainer`](https://containers.dev) and will use [`VS Code`](https://code.visualstudio.com) to illustrate. By no means is ACA-Py limited to these tools; they are merely examples. +**For information on running demos and tests using provided shell scripts, see [DevReadMe](./DevReadMe) readme.** + ## Caveats The primary use case for this `devcontainer` is for developing, debugging and unit testing (pytest) the [aries_cloudagent](https://github.com/hyperledger/aries-cloudagent-python/tree/main/aries_cloudagent) source code. @@ -13,17 +14,18 @@ The primary use case for this `devcontainer` is for developing, debugging and un There are limitations running this devcontainer, such as all networking is within this container. This container has [docker-in-docker](https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker-in-docker.md) which allows running demos, building docker images, running `docker compose` all within this container. ### Files + The `.devcontainer` folder contains the `devcontainer.json` file which defines this container. We are using a `Dockerfile` and `post-install.sh` to build and configure the container run image. The `Dockerfile` is simple but in place for simplifying image enhancements (ex. adding `poetry` to the image). The `post-install.sh` will install some additional development libraries (including for BDD support). ## Devcontainer > What are Development Containers? -> +> > A Development Container (or Dev Container for short) allows you to use a container as a full-featured development environment. It can be used to run an application, to separate tools, libraries, or runtimes needed for working with a codebase, and to aid in continuous integration and testing. Dev containers can be run locally or remotely, in a private or public cloud. see [https://containers.dev](https://containers.dev). -In this guide, we will use [Docker](https://www.docker.com) and [Visual Studio Code](https://code.visualstudio.com) with the [Dev Containers Extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) installed, please set your machine up with those. As of writing, we used the following: +In this guide, we will use [Docker](https://www.docker.com) and [Visual Studio Code](https://code.visualstudio.com) with the [Dev Containers Extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) installed, please set your machine up with those. As of writing, we used the following: - Docker Version: 20.10.24 - VS Code Version: 1.79.0 @@ -45,11 +47,12 @@ To open ACA-Py in a devcontainer, we open the *root* of this repository. We can When the [.devcontainer/devcontainer.json](https://github.com/hyperledger/aries-cloudagent-python/blob/main/.devcontainer/devcontainer.json) is opened, you will see it building... it is building a Python 3.9 image (bash shell) and loading it with all the ACA-Py requirements (and black). We also load a few Visual Studio settings (for running Pytests and formatting with Flake and Black). ### Poetry + The Python libraries / dependencies are installed using [`poetry`](https://python-poetry.org). For the devcontainer, we *DO NOT* use virtual environments. This means you will not see or need venv prompts in the terminals and you will not need to run tasks through poetry (ie. `poetry run black .`). If you need to add new dependencies, you will need to add the dependency via poetry *AND* you should rebuild your devcontainer. In VS Code, open a Terminal, you should be able to run the following commands: -``` +```bash python -m aries_cloudagent -v cd aries_cloudagent ruff check . @@ -61,7 +64,7 @@ The first command should show you that `aries_cloudagent` module is loaded (ACA- When running `ruff check .` in the terminal, you may see `error: Failed to initialize cache at /.ruff_cache: Permission denied (os error 13)` - that's ok. If there are actual ruff errors, you should see something like: -``` +```bash error: Failed to initialize cache at /.ruff_cache: Permission denied (os error 13) admin/base_server.py:7:7: D101 Missing docstring in public class Found 1 error. @@ -78,7 +81,7 @@ More importantly, these extensions are now added to document save, so files will ### Running docker-in-docker demos -Start by running a von-network inside your dev container. Or connect to a hosted ledger. You will need to adjust the ledger configurations if you do this. +Start by running a von-network inside your dev container. Or connect to a hosted ledger. You will need to adjust the ledger configurations if you do this. ```sh git clone https://github.com/bcgov/von-network @@ -108,60 +111,61 @@ cd demo # follow the script... ``` - ## Further Reading and Links -* Development Containers (devcontainers): [https://containers.dev](https://containers.dev) -* Visual Studio Code: [https://code.visualstudio.com](https://code.visualstudio.com) -* Dev Containers Extension: [marketplace.visualstudio.com](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) -* Docker: [https://www.docker.com](https://www.docker.com) -* Docker Compose: [https://docs.docker.com/compose/](https://docs.docker.com/compose/) - +- Development Containers (devcontainers): [https://containers.dev](https://containers.dev) +- Visual Studio Code: [https://code.visualstudio.com](https://code.visualstudio.com) +- Dev Containers Extension: [marketplace.visualstudio.com](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) +- Docker: [https://www.docker.com](https://www.docker.com) +- Docker Compose: [https://docs.docker.com/compose/](https://docs.docker.com/compose/) ## ACA-Py Debugging To better illustrate debugging pytests and ACA-Py runtime code, let's add some run/debug configurations to VS Code. If you have your own `launch.json` and `settings.json`, please cut and paste what you want/need. -``` +```sh cp -R .vscode-sample .vscode ``` This will add a `launch.json`, `settings.json` and multiple ACA-Py configuration files for developing with different scenarios. - Faber: Simple agent to simulate an issuer - Alice: Simple agent to simulate a holder - Endorser: Simulates the endorser agent in an endorsement required environment - Author: Simulates an author agent in a endorsement required environment - Multitenant Admin: Includes settings for a multitenant/wallet scenario +- Faber: Simple agent to simulate an issuer +- Alice: Simple agent to simulate a holder +- Endorser: Simulates the endorser agent in an endorsement required environment +- Author: Simulates an author agent in a endorsement required environment +- Multitenant Admin: Includes settings for a multitenant/wallet scenario Having multiple agents is to demonstrate launching multiple agents in a debug session. Any of the config files and the launch file can be changed and customized to meet your needs. They are all setup to run on different ports so they don't interfere with each other. Running the debug session from inside the dev container allows you to contact other services such as a local ledger or tails server using localhost, while still being able to access the swagger admin api through your browser. For all the agents if you want to use another ledger (von-network) other than localhost you will need to change the `genesis-url` config. For all the agents if you don't want to support revocation you need to remove or comment out the `tails-server-base-url` config. If you want to use a non localhost server then you will need to change the url. +### Faber + +- admin api url = http://localhost:9041 +- study the demo to understand the steps to have the agent in the correct state. Make your public dids and schemas, cred-defs, etc. + +### Alice + +- admin api url = http://localhost:9011 +- study the demo to get a connection with faber +### Endorser -##### Faber: - - admin api url = http://localhost:9041 - - study the demo to understand the steps to have the agent in the correct state. Make your public dids and schemas, cred-defs, etc. +- admin api url = http://localhost:9031 +- This config is useful if you want to develop in an environment that requires endorsement. You can run the demo with `./run_demo faber --endorser-role author` to see all the steps to become and endorser. -##### Alice: - - admin api url = http://localhost:9011 - - study the demo to get a connection with faber +### Author -##### Endorser - - admin api url = http://localhost:9031 - - This config is useful if you want to develop in an environment that requires endorsement. You can run the demo with `./run_demo faber --endorser-role author` to see all the steps to become and endorser. +- admin api url = http://localhost:9021 +- This config is useful if you want to develop in an environment that requires endorsement. You can run the demo with `./run_demo faber --endorser-role author` to see all the steps to become and author. You need to uncomment the configurations for automating the connection to endorser. -##### Author - - admin api url = http://localhost:9021 - - This config is useful if you want to develop in an environment that requires endorsement. You can run the demo with `./run_demo faber --endorser-role author` to see all the steps to become and author. You need to uncomment the configurations for automating the connection to endorser. +### Multitenant-Admin - ##### Multitenant-Admin - - admin api url = http://localhost:9051 - - This is for a multitenant environment where you can create multiple tenants with subwallets with one agent. See [Multitenancy](./Multitenancy.md) +- admin api url = http://localhost:9051 +- This is for a multitenant environment where you can create multiple tenants with subwallets with one agent. See [Multitenancy](./Multitenancy.md) - ### Try running Faber and Alice at the same time. Add break points and recreate the demo! +### Try running Faber and Alice at the same time and add break points and recreate the demo To run your ACA-Py code in debug mode, go to the `Run and Debug` view, select the agent(s) you want to start and click `Start Debugging (F5)`. @@ -169,7 +173,7 @@ This will start your source code as a running ACA-Py instance, all configuration For example, open `aries_cloudagent/admin/server.py` and set a breakpoint in `async def status_handler(self, request: web.BaseRequest):`, then call [`GET /status`](http://localhost:9061/api/doc#/server/get_status) in the Admin Console and hit your breakpoint. -### Pytest +## Pytest Pytest is installed and almost ready; however, we must build the test list. In the Command Palette, `Test: Refresh Tests` will scan and find the tests. @@ -177,7 +181,7 @@ See [Python Testing](https://code.visualstudio.com/docs/python/testing) for more *WARNING*: our pytests include coverage, which will prevent the [debugger from working](https://code.visualstudio.com/docs/python/testing#_debug-tests). One way around this would be to have a `.vscode/settings.json` that says not to use coverage (see above). This will allow you to set breakpoints in the pytest and code under test and use commands such as `Test: Debug Tests in Current File` to start debugging. -*WARNING*: the project configuration found in `pyproject.toml` include performing `ruff` checks when we run `pytest`. Including `ruff` does not play nice with the Testing view. In order to have our pytests discoverable AND available in the Testing view, we create a `.pytest.ini` when we build the devcontainer. This file will not be commited to the repo, nor does it impact `./scripts/run_tests` but it will impact if you manually run the pytest commands locally outside of the devcontainer. Just be aware that the file will stay on your file system after you shutdown the devcontainer. +*WARNING*: the project configuration found in `pyproject.toml` include performing `ruff` checks when we run `pytest`. Including `ruff` does not play nice with the Testing view. In order to have our pytests discoverable AND available in the Testing view, we create a `.pytest.ini` when we build the devcontainer. This file will not be committed to the repo, nor does it impact `./scripts/run_tests` but it will impact if you manually run the pytest commands locally outside of the devcontainer. Just be aware that the file will stay on your file system after you shutdown the devcontainer. ## Next Steps diff --git a/docs/features/endorse-cred-def.png b/docs/features/endorse-cred-def.png deleted file mode 100644 index ceb3d2fb..00000000 Binary files a/docs/features/endorse-cred-def.png and /dev/null differ diff --git a/docs/features/endorse-public-did.png b/docs/features/endorse-public-did.png deleted file mode 100644 index 275b4ab6..00000000 Binary files a/docs/features/endorse-public-did.png and /dev/null differ diff --git a/docs/features/endorser-design.png b/docs/features/endorser-design.png deleted file mode 100644 index 1c4b9fc5..00000000 Binary files a/docs/features/endorser-design.png and /dev/null differ diff --git a/docs/features/inbound-messaging.png b/docs/features/inbound-messaging.png deleted file mode 100644 index 804c291e..00000000 Binary files a/docs/features/inbound-messaging.png and /dev/null differ diff --git a/docs/features/mediation-message-flow.png b/docs/features/mediation-message-flow.png deleted file mode 100644 index 1eebe68f..00000000 Binary files a/docs/features/mediation-message-flow.png and /dev/null differ diff --git a/docs/features/multitenancyDiagram.png b/docs/features/multitenancyDiagram.png deleted file mode 100644 index 73c605ae..00000000 Binary files a/docs/features/multitenancyDiagram.png and /dev/null differ diff --git a/docs/gettingStarted/AgentConnections.md b/docs/gettingStarted/AgentConnections.md index 01caf020..97605f32 100644 --- a/docs/gettingStarted/AgentConnections.md +++ b/docs/gettingStarted/AgentConnections.md @@ -1,3 +1,9 @@ # Establishing a connection between Aries Agents -To be completed. \ No newline at end of file +Use an ACA-Py issuer/verifier to establish a connection with an Aries mobile +wallet. Run the [Traction AnonCreds Workshop]. Get your own (temporary -- it +will be gone in a few weeks!) Aries Cloud Agent Python-based issuer/verifier +agent. Connect to the wallet on your mobile phone, issue a credential and then +present it back. Lots to learn, without ever leaving your browser! + +[Traction AnonCreds Workshop]: https://github.com/bcgov/traction/blob/main/docs/traction-anoncreds-workshop.md diff --git a/docs/gettingStarted/AriesAgentArchitecture.md b/docs/gettingStarted/AriesAgentArchitecture.md index 124716c7..51399118 100644 --- a/docs/gettingStarted/AriesAgentArchitecture.md +++ b/docs/gettingStarted/AriesAgentArchitecture.md @@ -1,8 +1,8 @@ # Aries Cloud Agent Internals: Agent and Controller -This section talks in particular about the architecture of this Aries cloud agent implementation. An instance of an Aries agent is actually made up of to two parts - the agent itself and a controller. +This section talks in particular about the architecture of this Aries cloud agent implementation. An instance of an Aries agent is actually made up of to two parts - the agent itself and a controller. -![ACA-Py Deployment Overview](../../assets/deploymentModel-full.png "ACA-Py Deployment Overview") +![ACA-Py Deployment Overview](../assets/deploymentModel-full.png "ACA-Py Deployment Overview") The agent handles all of the core Aries functionality such as interacting with other agents, managing secure storage, sending event notifications to, and receiving directions from, the controller. The controller provides the business logic that defines how that particular agent instance behaves--how to respond to events in the agent, and when to trigger the agent to initiate events. The controller might be a web or native user interface for a person or it might be coded business rules driven by an enterprise system. @@ -10,6 +10,7 @@ Between the two is a simple interface. The agent sends event notifications to th As such, the agent is just a configured dependency in an Aries cloud agent deployment. Thus, the vast majority of Aries developers will focus on building controllers (business logic) and perhaps some custom plugins (protocols, as we'll discuss soon) for the agent. Only a relatively small group of Aries cloud agent maintainers will focus on adding and maintaining the agent dependency. -Want more details about the agent and controller internals? Take a look at the [Aries cloud agent deployment model](../../deploying/deploymentModel) document. +Want more details about the agent and controller internals? Take a look at the [Aries cloud agent deployment model](../deploying/deploymentModel.md) document. -> Back to the [Aries Developer - Getting Started Guide](README.md). \ No newline at end of file +> Back to the [Aries Developer - Getting Started Guide](./README.md). +> \ No newline at end of file diff --git a/docs/gettingStarted/AriesBasics.md b/docs/gettingStarted/AriesBasics.md index 23bebb3b..f8693df7 100644 --- a/docs/gettingStarted/AriesBasics.md +++ b/docs/gettingStarted/AriesBasics.md @@ -13,4 +13,5 @@ The concepts and features that make up the Aries project are documented in the [ The Aries Cloud Agent Python currently only supports Hyperledger Indy-based verifiable credentials and public ledger. Longer term (as we'll see later in this guide) protocols will be extended or added to support other verifiable credential implementations and public ledgers. -> Back to the [Aries Developer - Getting Started Guide](README.md). \ No newline at end of file +> Back to the [Aries Developer - Getting Started Guide](./README.md). +> \ No newline at end of file diff --git a/docs/gettingStarted/AriesBigPicture.md b/docs/gettingStarted/AriesBigPicture.md index f7fd77a2..982ff3f6 100644 --- a/docs/gettingStarted/AriesBigPicture.md +++ b/docs/gettingStarted/AriesBigPicture.md @@ -8,7 +8,7 @@ The agents in the picture shares many attributes: - They have some sort of storage for keys and other data related to their role as an agent - They interact with other agents using secure. peer-to-peer messaging protocols -- They have some associated mechanism to provide "business rules" to control the behaviour of the agent +- They have some associated mechanism to provide "business rules" to control the behavior of the agent - That is often a person for phone, tablet, laptop, etc. based agents - That is often backend enterprise systems for enterprise agents - Business rules for cloud agents are often about the routing of messages to and from edge agents @@ -25,4 +25,4 @@ Note the many caveats in this section - "most common", "commonly", etc. There ar We also recommend **not** digging into all the layers described here. Just as you don't have to know how TCP/IP works to write a web app, you don't need to know how indy-node or indy-sdk work to be able to build your first Aries-based application. Later in this guide we'll covering the starting point you do need to know. -> Back to the [Aries Developer - Getting Started Guide](README.md). +> Back to the [Aries Developer - Getting Started Guide](./README.md). diff --git a/docs/gettingStarted/AriesDeveloperDemos.md b/docs/gettingStarted/AriesDeveloperDemos.md index 0f38143f..69fde702 100644 --- a/docs/gettingStarted/AriesDeveloperDemos.md +++ b/docs/gettingStarted/AriesDeveloperDemos.md @@ -6,16 +6,18 @@ Here are some demos that developers can use to get up to speed on Aries. You don This demo uses agents (and an Indy ledger), but doesn't implement a controller at all. Instead it uses the OpenAPI (aka Swagger) user interface to let you be the controller to connect agents, issue a credential and then proof that credential. -[Collaborating Agents OpenAPI Demo](../../demo/AriesOpenAPIDemo) +[Collaborating Agents OpenAPI Demo](../demo/AriesOpenAPIDemo.md) ## Python Controller demo Run this demo to see a couple of simple Python controller implementations for Alice and Faber. Like the previous demo, this shows the agents connecting, Faber issuing a credential to Alice and then requesting a proof based on the credential. Running the demo is simple, but there's a lot for a developer to learn from the code. -[Python-based Alice/Faber Demo](../../demo/) +[Python-based Alice/Faber Demo](../demo/README.md) -## Web App Sample - Email Verification Service +## Mobile App and Web Sample - BC Gov Showcase -This live service implements a real credential issuer - verifying a user's email address when connecting to an agent and then issuing a "verified email address" credential. This service is used the [IIWBook Demo](https://vonx.io/how_to/iiwbook). +Try out the [BC Gov Showcase] to download a production Wallet for holding Verifiable Credentials, +and then use your new wallet to get and present credentials in some sample scenarios. The end-to-end +verifiable credential experience in 30 minutes or less. -[Email Verification Service](https://github.com/bcgov/indy-email-verification) +[BC Gov Showcase]: https://digital.gov.bc.ca/digital-trust/showcase/ diff --git a/docs/gettingStarted/AriesMessaging.md b/docs/gettingStarted/AriesMessaging.md index 150f39fa..c1ec2ab2 100644 --- a/docs/gettingStarted/AriesMessaging.md +++ b/docs/gettingStarted/AriesMessaging.md @@ -14,4 +14,5 @@ Code for protocols are implemented as externalized modules from the core agent c Developers building Aries agents for a particular use case will generally focus on building controllers. They must understand the protocols that they are going to need, including the events the controller will receive, and the protocol's administrative messages exposed via the REST API. From time to time, such Aries agent developers might need to implement their own protocols. -> Back to the [Aries Developer - Getting Started Guide](README.md). +> Back to the [Aries Developer - Getting Started Guide](./README.md). +> diff --git a/docs/gettingStarted/AriesRoutingExample.md b/docs/gettingStarted/AriesRoutingExample.md index e0eaeed6..c3914c6c 100644 --- a/docs/gettingStarted/AriesRoutingExample.md +++ b/docs/gettingStarted/AriesRoutingExample.md @@ -4,13 +4,13 @@ In this example, we'll walk through an example of complex routing in Aries, outl We'll start with the Alice and Bob example from the [Cross Domain Messaging](https://github.com/hyperledger/aries-rfcs/blob/master/concepts/0094-cross-domain-messaging) Aries RFC. -![Cross Domain Messaging Example](https://github.com/hyperledger/aries-rfcs/blob/master/concepts/0094-cross-domain-messaging/domains.jpg "Cross Domain Messaging Example") +![Cross Domain Messaging Example](https://raw.githubusercontent.com/hyperledger/aries-rfcs/main/concepts/0094-cross-domain-messaging/domains.jpg "Cross Domain Messaging Example") What are the DIDs involved, what's in their DIDDocs, and what communications are happening between the agents as the connections are made? ## The Scenario -Bob and Alice want to establish a connection so that they can communicate. Bob uses an Agency endpoint ("https://agents-r-us.com), labelled as 9 and will have an agent used for routing, labelled as 3. We'll also focus on Bob's messages from his main iPhone, labelled as 4. We'll ignore Bob's other agents (5 and 6) and we won't worry about Alice's configuration (agents 1, 2 and 8). While the process below is all about Bob, Alice and her agents are doing the same interactions within her domain. +Bob and Alice want to establish a connection so that they can communicate. Bob uses an Agency endpoint (`https://agents-r-us.ca`), labelled as 9 and will have an agent used for routing, labelled as 3. We'll also focus on Bob's messages from his main iPhone, labelled as 4. We'll ignore Bob's other agents (5 and 6) and we won't worry about Alice's configuration (agents 1, 2 and 8). While the process below is all about Bob, Alice and her agents are doing the same interactions within her domain. ## All the DIDs @@ -73,7 +73,7 @@ We'll start the process with Alice sending an out of band connection invitation - The Routing Agent sends the data to Bob's iPhone agent. - Bob's iPhone agent fills in the rest of the DIDDoc: - the public key for the Routing Agent for the Alice relationship - - the `did-communication` service endpoint is set to the Agency public DID and + - the `did-communication` service endpoint is set to the Agency public DID and - the routing keys array with the values of the Agency public DID key reference and the Routing Agent key reference **Note**: Instead of using the DID Bob created, the Agency and Routing Agent might use the public key used to encrypt the messages for their internal routing table look up for where to send a message. In that case, the Bob and the Routing Agent share the public key instead of the DID to their respective upstream routers. @@ -84,6 +84,6 @@ At this time, there are **not** specific DIDComm protocols for the "set up the r Based on the DIDDoc that Bob has sent Alice, for her to send a DIDComm message to Bob, Alice must: -* Prepare the message for Bob's Agent. -* Encrypt and place that message into a "Forward" message for Bob's Routing Agent. -* Encrypt and send the "Forward" message to Bob's Agency endpoint. +- Prepare the message for Bob's Agent. +- Encrypt and place that message into a "Forward" message for Bob's Routing Agent. +- Encrypt and send the "Forward" message to Bob's Agency endpoint. diff --git a/docs/gettingStarted/ConnectIndyNetwork.md b/docs/gettingStarted/ConnectIndyNetwork.md index a4be1650..005a4648 100644 --- a/docs/gettingStarted/ConnectIndyNetwork.md +++ b/docs/gettingStarted/ConnectIndyNetwork.md @@ -1,3 +1,3 @@ # Connecting to an Indy Network -To be completed. \ No newline at end of file +To be completed. diff --git a/docs/gettingStarted/CredentialRevocation.md b/docs/gettingStarted/CredentialRevocation.md index 1967a688..b8a68668 100644 --- a/docs/gettingStarted/CredentialRevocation.md +++ b/docs/gettingStarted/CredentialRevocation.md @@ -15,8 +15,8 @@ revocations be published? Here is a summary of all of the AnonCreds revocation activities performed by issuers. After this, we'll provide a (much shorter) list of what an ACA-Py -issuer controller has to do. For those interested, there is a more [complete -overview of AnonCreds revocation], including all of the roles, and some details +issuer controller has to do. For those interested, there is a more +[complete overview of AnonCreds revocation], including all of the roles, and some details of the cryptography behind the approach: - Issuers indicate that a credential will support revocation when creating the @@ -98,12 +98,12 @@ enabled. You will need to have the URL of an running instance of Include the command line parameter `--tails-server-base-url ` -0. Publish credential definition +0. Publish credential definition Credential definition is created. All required revocation collateral is also created and managed including revocation registry definition, entry, and tails file. - ``` + ```json POST /credential-definitions { "schema_id": schema_id, @@ -119,12 +119,12 @@ Include the command line parameter `--tails-server-base-url @@ -152,8 +152,9 @@ Include the command line parameter `--tails-server-base-url **Note:** The optional `~please_ack` is not currently supported. - ### Issuer Role To notify connections to which credentials have been issued, during step 2 @@ -220,7 +220,7 @@ above, include the following attributes in the request body: Your request might look something like: -``` +```json POST /revocation/revoke { "rev_reg_id": @@ -262,7 +262,7 @@ There are several endpoints that must be called, and they must be called in this - here you need to provide the full URI that will be written to the ledger, for example: -``` +```json { "tails_public_uri": "http://host.docker.internal:6543/VDKEEMMSRTEqK4m7iiq5ZL:4:VDKEEMMSRTEqK4m7iiq5ZL:3:CL:8:faber.agent.degree_schema:CL_ACCUM:3cb5c439-928c-483c-a9a8-629c307e6b2d" } diff --git a/docs/gettingStarted/DecentralizedIdentityDemos.md b/docs/gettingStarted/DecentralizedIdentityDemos.md index 62bed219..c46fb398 100644 --- a/docs/gettingStarted/DecentralizedIdentityDemos.md +++ b/docs/gettingStarted/DecentralizedIdentityDemos.md @@ -2,31 +2,24 @@ The following are some demos that you can go through to see verifiable credentials in action. For each of the demos, we've included some guidance on what you should get out of the demo - and where you should **stop** exploring the demos. Later on in this guide we have some command line demos built on current generation code for developers wanting to look at what's going on under the hood. -### Alice and Faber - edX Version +## BC Gov Showcase -The Hyperledger Indy community is littered with "Alice and Faber" demos. Alice is a former student of [Faber College](https://en.wikipedia.org/wiki/Animal_House) (motto: Knowledge is Good), and is offered from Faber a verifiable credential that she can use to prove her educational accomplishments. Alice proves the claims in the credential to get a job at ACME Corp, and then uses a credential about her job at ACME Corp. to get a loan from Thrift Bank. +Try out the [BC Gov Showcase] to download a production Wallet for holding Verifiable Credentials, +and then use your new wallet to get and present credentials in some sample scenarios. The end-to-end +verifiable credential experience in 30 minutes or less. -The edX course version of the Alice/Faber story is good if you are new to Indy because in going through the story you get a web interface to see the interactions/technical steps in establishing connections between agents and the process for issuing and verifying credentials. **DO NOT** look into the underlying code because it is not maintained, and it is out-of-date. +[BC Gov Showcase]: https://digital.gov.bc.ca/digital-trust/showcase/ -We recommend using the "In Browser" steps to run the demo vs. getting things running on your local machine. +## Traction AnonCreds Workshop -Link: [Alice and Faber - edX Version](https://github.com/hyperledger/education/blob/master/LFS171x/indy-material/nodejs/README.md) +Now that you have a wallet, how about being an issuer, and experience what is needed on that side of an exchange? +To do that, try the [Traction AnonCreds Workshop]. Get your own (temporary -- it will be gone in a few weeks!) +Aries Cloud Agent Python-based issuer/verifier agent. Connect to the wallet on your mobile phone, issue a credential +and then present it back. Lots to learn, without ever leaving your browser! -### BC Gov's OrgBook and Greenlight +[Traction AnonCreds Workshop]: https://github.com/bcgov/traction/blob/main/docs/traction-anoncreds-workshop.md -BC Gov's Verifiable Organizations Network (VON) project implemented the first production Indy app (TheOrgBook, and now just "OrgBook") that exists to bootstrap verifiable credentials ecosystems. The Greenlight use case is a demo showing how verifiable credentials can be used for reducing the red tape businesses face in trying to get a government permit (for example, to open a restaurant). The business challenge addressed by Greenlight is figuring out what other permits and licenses need to be in place before a business can get the permit it actually wants. The demo simulates a business identifying their goal permit, seeing a roadmap of the prerequisite credentials already collected and still needed, and using links to get the needed credentials. Since in these early days of decentralized identity, business don't have their own digital wallet, in applying for each credential, each permitting service is using OrgBook to get proof of the prerequisite credentials, and issuing the new credential back to the OrgBook. +## More demos, please -If you are interested in using/contributing to VON and OrgBook, contact the folks from BC Gov using links on https://vonx.io. - -Link: [Greenlight](https://greenlight.orgbook.gov.bc.ca/) - choose the "City of Surrey - Business License" -Link: [Information about Verifiable Organizations Network (VON)](https://vonx.io) -Link: [OrgBook BC - Production Instance](https://orgbook.gov.bc.ca/) - -### The ConfBook Mobile Agent Demo - -The ConfBook demo was presented during the [Internet Identity Workshop](https://internetidentityworkshop.com/) (IIW) 28. The demo uses instances of the Aries Cloud Agent - Python-based services interacting with a mobile agent to issue and verify credentials. Follow along with the demo to get an Aries Mobile Agent and use it to get a verifiable credential that you control your email address, and proof the claims from that credential to get a verifiable credential that you attended a -conference. - -Link: [ConfBook Demo](https://vonx.io/how_to/confbook) - -> Back to the [Aries Developer - Getting Started Guide](README.md). +Interested in seeing your demos/use cases added to this list? Submit an issue or a PR and we'll see about +including it in this list. diff --git a/docs/gettingStarted/IndyAriesDevOptions.md b/docs/gettingStarted/IndyAriesDevOptions.md index 474dd328..9416b6dd 100644 --- a/docs/gettingStarted/IndyAriesDevOptions.md +++ b/docs/gettingStarted/IndyAriesDevOptions.md @@ -8,7 +8,7 @@ In the following, we go through the layers from the top of the stack to the bott ## Building Decentralized Identity Applications -If you just want to build enterprise applications on top of the decentralized identity-related Hyperledger projects, you can start with building cloud-based controller apps using any language you want, and deploying your code with an instance of the code in this repository ([aries-cloudagent-python](https://github.com/hyperledger/aries-cloudagent-python)). +If you just want to build enterprise applications on top of the decentralized identity-related Hyperledger projects, you can start with building cloud-based controller apps using any language you want, and deploying your code with an instance of the code in this repository ([aries-cloudagent-python](https://github.com/hyperledger/aries-cloudagent-python)). If you want to build a mobile agent, there are open source options available, including [Aries-MobileAgent-Xamarin](https://github.com/hyperledger/aries-mobileagent-xamarin) (aka "Aries MAX"), which is built on [Aries Framework .NET](https://github.com/hyperledger/aries-framework-dotnet), and [Aries Mobile Agent React Native](https://github.com/hyperledger/aries-mobile-agent-react-native), which is built on [Aries Framework JavaScript](https://github.com/hyperledger/aries-framework-javascript). @@ -43,7 +43,3 @@ If you are interested in getting into the public ledger part of Indy, particular ## Working in Cryptography Finally, at the deepest level, and core to all of the projects is the cryptography in [Hyperledger Ursa](https://github.com/hyperledger/ursa). If you are a cryptographer, that's where you want to be - and we want you there. - - - - diff --git a/docs/gettingStarted/IndyBasics.md b/docs/gettingStarted/IndyBasics.md index 500f14b9..b20b9737 100644 --- a/docs/gettingStarted/IndyBasics.md +++ b/docs/gettingStarted/IndyBasics.md @@ -2,7 +2,7 @@ > **NOTE:** If you are developer building apps on top of Aries and Indy, you **DO NOT** need to know the nuts and bolts of Indy to build applications. You need to know about verifiable credentials and the concepts of self-sovereign identity. But as an app developer, you don't need to do the Indy getting started pieces. Aries takes care of those details for you. The introduction linked here should be sufficient. -If you are new to Indy and verifiable credentials and want to learn the core concepts, this [link](https://github.com/hyperledger/education/blob/master/LFS171x/docs/introduction-to-hyperledger-indy.md) provides a solid foundation into the goals and purpose of Indy including verifiable credentials, DIDs, decentralized/self-sovereign identity, the Sovrin Foundation and more. The document is the content of the Indy chapter of the Hyperledger edX [Blockchain for Business](https://www.edx.org/course/blockchain-for-business-an-introduction-to-hyperledger-technologies) course (which you could also go through). +If you are new to Indy and verifiable credentials and want to learn the core concepts, this [link](https://github.com/hyperledger/education/blob/master/LFS171x/docs/introduction-to-hyperledger-indy.md) provides a solid foundation into the goals and purpose of Indy including verifiable credentials, DIDs, decentralized/self-sovereign identity, the Sovrin Foundation and more. The document is the content of the Indy chapter of the Hyperledger edX [Blockchain for Business](https://www.edx.org/course/blockchain-for-business-an-introduction-to-hyperledger-technologies) course (which you could also go through). Feel free to do the demo that is referenced in the material, but we recommend that you **not** dig into that codebase. It's pretty old now - almost a year! We've got much more relevant examples later in this guide. @@ -12,4 +12,4 @@ As well, **don't** use the guidance in the course to dive into the content about Indy provides an implementation of the basic functions required to implement a network for self-sovereign identity (SSI) - a ledger, client SDKs for interacting with the ledger, DIDs, and capabilities for issuing, holding and proving verifiable credentials. -> Back to the [Aries Developer - Getting Started Guide](README.md). +> Back to the [Aries Developer - Getting Started Guide](./README.md). diff --git a/docs/gettingStarted/IssuingAnonCredsCredentials.md b/docs/gettingStarted/IssuingAnonCredsCredentials.md new file mode 100644 index 00000000..8fe7c50e --- /dev/null +++ b/docs/gettingStarted/IssuingAnonCredsCredentials.md @@ -0,0 +1,8 @@ +# Issuing AnonCreds Credentials + +Become an issuer, and define, publish and issue verifiable credentials to a mobile wallet. +Run the [Traction AnonCreds Workshop]. Get your own (temporary -- it will be gone in a few weeks!) +Aries Cloud Agent Python-based issuer/verifier agent. Connect to the wallet on your mobile phone, issue a credential +and then present it back. Lots to learn, without ever leaving your browser! + +[Traction AnonCreds Workshop]: https://github.com/bcgov/traction/blob/main/docs/traction-anoncreds-workshop.md diff --git a/docs/gettingStarted/IssuingIndyCredentials.md b/docs/gettingStarted/IssuingIndyCredentials.md deleted file mode 100644 index 320985d7..00000000 --- a/docs/gettingStarted/IssuingIndyCredentials.md +++ /dev/null @@ -1,3 +0,0 @@ -# Issuing Indy Credentials - -To be completed. \ No newline at end of file diff --git a/docs/gettingStarted/PresentingAnonCredsProofs.md b/docs/gettingStarted/PresentingAnonCredsProofs.md new file mode 100644 index 00000000..76beba12 --- /dev/null +++ b/docs/gettingStarted/PresentingAnonCredsProofs.md @@ -0,0 +1,11 @@ +# Presenting AnonCreds Proofs + +Become a verifier, and construct a presentation request, send the request to a +mobile wallet, get a presentation derived from AnonCreds verifiable credentials +and verify the presentation. Run the [Traction AnonCreds Workshop]. Get your own +(temporary -- it will be gone in a few weeks!) Aries Cloud Agent Python-based +issuer/verifier agent. Connect to the wallet on your mobile phone, issue a +credential and then present it back. Lots to learn, without ever leaving your +browser! + +[Traction AnonCreds Workshop]: https://github.com/bcgov/traction/blob/main/docs/traction-anoncreds-workshop.md diff --git a/docs/gettingStarted/PresentingIndyProofs.md b/docs/gettingStarted/PresentingIndyProofs.md deleted file mode 100644 index 5eee3d5d..00000000 --- a/docs/gettingStarted/PresentingIndyProofs.md +++ /dev/null @@ -1,3 +0,0 @@ -# Presenting Indy Proofs - -To be completed. \ No newline at end of file diff --git a/docs/gettingStarted/README.md b/docs/gettingStarted/README.md index a9962781..ef585997 100644 --- a/docs/gettingStarted/README.md +++ b/docs/gettingStarted/README.md @@ -2,7 +2,8 @@ This guide is to get you from (pretty much) zero to developing code for issuing (and verifying) credentials with your own Aries agent. On the way, you'll look at Hyperledger Indy and how it works, find out about the architecture and components of an Aries agent and its underlying messaging protocols. Scan the list of topics below and jump in as soon as you hit a topic you don't know. -Note that in the guidance we have here, we include not only the links to look at, but we recommend that you **not** look at certain material to which you might naturally gravitate. That's because the material is out of date and will take you down some unnecessary rabbit holes. Keep your eyes on the goal - developing with Indy and Aries. +Note that in the guidance we have here, we include not only the links to look at, but we recommend that you **not** look at certain material to which you might naturally gravitate. That's because the material is out of date and will take you down some unnecessary rabbit holes. Keep your eyes on the goal - developing with Aries to interact with other agents +to (amongst other things) connect, issue, hold, present and verify verifiable credentials. * [I've heard of Indy, but I don't know the basics](IndyBasics.md) * [I know about Indy, but what is Aries?](AriesBasics.md) @@ -11,16 +12,16 @@ Note that in the guidance we have here, we include not only the links to look at * [Aries Internals - Deployment Components](AriesAgentArchitecture.md) * [An overview of Aries messaging](AriesMessaging.md) * [Demos - Aries Developer](AriesDeveloperDemos.md) -* To Do: [Establishing a connection between Aries Agents](AgentConnections.md) -* To Do: [Issuing an Indy credential: From Issuer to Holder/Prover](IssuingIndyCredentials.md) -* To Do: [Presenting an Indy credential: From Holder/Prover to Verifier](PresentingIndyProofs.md) -* To Do: [Next steps: Creating your own Aries Agent](YourOwnAriesAgent.md) +* [Establishing a connection between Aries Agents](AgentConnections.md) +* [Issuing an AnonCreds credential: From Issuer to Holder/Prover](IssuingAnonCredsCredentials.md) +* [Presenting an Indy credential: From Holder/Prover to Verifier](PresentingAnonCredsProofs.md) +* [Next steps: Creating your own Aries Agent](YourOwnAriesAgent.md) * [What should I work on? Options for Aries/Indy Developers](IndyAriesDevOptions.md) * [Deeper Dive: DIDComm Messages](DIDcommMsgs.md) * [Deeper Dive: DIDComm Message Routing and Encryption](RoutingEncryption.md) * [Deeper Dive: Routing Example](AriesRoutingExample.md) * To Do: [Deeper Dive: Running and Connecting to an Indy Network](ConnectIndyNetwork.md) * [Steps and APIs to support credential revocation with Aries agent](CredentialRevocation.md) -* [Deeper Dive: Aca-Py Plug-Ins](../features/PlugIns/) +* [Deeper Dive: Aca-Py Plug-Ins](../features/PlugIns.md) Want to help with this guide? Please add issues or submit a pull request to improve the document. Point out things that are missing, things to improve and especially things that are wrong. diff --git a/docs/gettingStarted/RoutingEncryption.md b/docs/gettingStarted/RoutingEncryption.md index 20409f5d..93b82076 100644 --- a/docs/gettingStarted/RoutingEncryption.md +++ b/docs/gettingStarted/RoutingEncryption.md @@ -1,9 +1,9 @@ # Deeper Dive: DIDComm Message Routing and Encryption -Many Aries edge agents do not directly receive messages from a peer edge agent - they have agents in between that route messages to them. This is done for many reasons, such as: +Many Aries edge agents do not directly receive messages from a peer edge agent - they have agents in between that route messages to them. This is done for many reasons, such as: - The agent is on a mobile device that does not have a persistent connection and so uses a cloud agent. -- The person does not want to allow correlation of their agent across relationships and so they use a shared, common endpoint (e.g. https://agents-R-Us.com) that they are "hidden in a crowd". +- The person does not want to allow correlation of their agent across relationships and so they use a shared, common endpoint (e.g. `https://agents-R-Us.ca`) that they are "hidden in a crowd". - An enterprise wants a single gateway to the many enterprise agents they have in their organization. Thus, when a DIDComm message is sent from one edge agent to another, it is routed per the instructions of the receiver and for the needs of the sender. For example, in the following picture, Alice might be told by Bob to send messages to his phone (agent 4) via agents 9 and 3, and Alice might always send out messages via agent 2. diff --git a/docs/gettingStarted/YourOwnAriesAgent.md b/docs/gettingStarted/YourOwnAriesAgent.md index 9efb9d52..cbd2ae86 100644 --- a/docs/gettingStarted/YourOwnAriesAgent.md +++ b/docs/gettingStarted/YourOwnAriesAgent.md @@ -1,3 +1,8 @@ -# Starting Your Own Aries Agent +# Creating Your Own Aries Agent -To be completed. \ No newline at end of file +Use the "next steps" in the [Traction AnonCreds Workshop] and create your own +controller. The [Aries ACA-Py Controllers] repository has some samples to get +you started. + +[Traction AnonCreds Workshop]: https://github.com/bcgov/traction/blob/main/docs/traction-anoncreds-workshop.md +[Aries ACA-Py Controllers]: https://github.com/hyperledger/aries-acapy-controllers diff --git a/docs/release/CHANGELOG.md b/docs/release/CHANGELOG.md index 5465d9b9..9806fa40 100644 --- a/docs/release/CHANGELOG.md +++ b/docs/release/CHANGELOG.md @@ -693,7 +693,7 @@ We have also noted that in some container orchestration environments such as install correctly in other environments (such as in `docker compose` setups). [\#2116]: https://github.com/hyperledger/aries-cloudagent-python/issues/2116 -[Upgrading ACA-Py]: ../../deploying/UpgradingACA-Py +[Upgrading ACA-Py]: ../deploying/UpgradingACA-Py [Issue #2201]: https://github.com/hyperledger/aries-cloudagent-python/issues/2201 [Aries Askar]: https://github.com/hyperledger/aries-askar [Red Hat's OpenShift]: https://www.openshift.com/products/container-platform/ @@ -1250,7 +1250,7 @@ release and later, and "as-is" connections made using earlier releases of ACA-Py candidates. A new "Upgrade deployment" capability ([#1557](https://github.com/hyperledger/aries-cloudagent-python/pull/1557), described below) must be executed to update your deployment to add tags for all existing connections. -The [Supported RFCs document](../../features/SupportedRFCs) has been updated to reflect the addition of the +The [Supported RFCs document](../features/SupportedRFCs) has been updated to reflect the addition of the AIP 2.0 RFCs for which support was added. The following is an annotated list of PRs in the release, including a link to each PR. @@ -1328,7 +1328,7 @@ With usage in the field increasing, we're cleaning up edge cases and issues rela The most significant new feature for users of Indy ledgers is a simplified approach for transaction authors getting their transactions signed by an endorser. Transaction author controllers now do almost nothing other than configuring their instance to use an Endorser, -and ACA-Py takes care of the rest. Documentation of that feature is [here](../../features/Endorser). +and ACA-Py takes care of the rest. Documentation of that feature is [here](../features/Endorser). - Improve cloud native deployments/scaling - unprotect liveness and readiness endpoints [#1416](https://github.com/hyperledger/aries-cloudagent-python/pull/1416) @@ -1441,11 +1441,11 @@ This is a significant release of ACA-Py with several new features, as well as ch #### Mediator support -While ACA-Py had previous support for a basic routing protocol, this was never fully developed or used in practice. Starting with this release, inbound and outbound connections can be established through a mediator agent using the Aries (Mediator Coordination Protocol)[https://github.com/hyperledger/aries-rfcs/tree/master/features/0211-route-coordination]. This work was initially contributed by Adam Burdett and Daniel Bluhm of [Indicio](https://indicio.tech/) on behalf of [SICPA](https://sicpa.com/). [Read more about mediation support](../../features/Mediation). +While ACA-Py had previous support for a basic routing protocol, this was never fully developed or used in practice. Starting with this release, inbound and outbound connections can be established through a mediator agent using the Aries (Mediator Coordination Protocol)[https://github.com/hyperledger/aries-rfcs/tree/master/features/0211-route-coordination]. This work was initially contributed by Adam Burdett and Daniel Bluhm of [Indicio](https://indicio.tech/) on behalf of [SICPA](https://sicpa.com/). [Read more about mediation support](../features/Mediation). #### Multi-Tenancy support -Started by [BMW](https://bmw.com/) and completed by [Animo Solutions](https://animo.id/) and [Anon Solutions](https://anon-solutions.ca/) on behalf of [SICPA](https://sicpa.com/), this feature allows for a single ACA-Py instance to host multiple wallet instances. This can greatly reduce the resources required when many identities are being handled. [Read more about multi-tenancy support](../../features/Multitenancy). +Started by [BMW](https://bmw.com/) and completed by [Animo Solutions](https://animo.id/) and [Anon Solutions](https://anon-solutions.ca/) on behalf of [SICPA](https://sicpa.com/), this feature allows for a single ACA-Py instance to host multiple wallet instances. This can greatly reduce the resources required when many identities are being handled. [Read more about multi-tenancy support](../features/Multitenancy). #### New connection protocol(s) diff --git a/docs/release/acapy-README.md b/docs/release/acapy-README.md index 37624496..fb246e67 100644 --- a/docs/release/acapy-README.md +++ b/docs/release/acapy-README.md @@ -8,35 +8,42 @@ > An easy to use Aries agent for building SSI services using any language that supports sending/receiving HTTP requests. Full access to an organized set of all of the ACA-Py documents is available at [https://aca-py.org](https://aca-py.org). -Check it out! It's much easier to navigate than finding all the documentation here. +Check it out! It's much easier to navigate than this GitHub repo for reading the documentation. ## Overview Hyperledger Aries Cloud Agent Python (ACA-Py) is a foundation for building Verifiable Credential (VC) ecosystems. It operates in the second and third layers of the [Trust Over IP framework (PDF)](https://trustoverip.org/wp-content/uploads/2020/05/toip_050520_primer.pdf) using [DIDComm messaging](https://github.com/hyperledger/aries-rfcs/tree/main/concepts/0005-didcomm) and [Hyperledger Aries](https://www.hyperledger.org/use/aries) protocols. The "cloud" in the name means that ACA-Py runs on servers (cloud, enterprise, IoT devices, and so forth), and is not designed to run on mobile devices. -ACA-Py is built on the Aries concepts and features that make up [Aries Interop Profile (AIP) 1.0](https://github.com/hyperledger/aries-rfcs/tree/main/concepts/0302-aries-interop-profile#aries-interop-profile-version-10), and most of the features in [AIP 2.0](https://github.com/hyperledger/aries-rfcs/tree/main/concepts/0302-aries-interop-profile#aries-interop-profile-version-20). [ACA-Py’s supported Aries protocols](https://github.com/hyperledger/aries-cloudagent-python/blob/main/SupportedRFCs.md) include, most importantly, protocols for issuing, verifying, and holding verifiable credentials using both [Hyperledger AnonCreds](https://www.hyperledger.org/use/anoncreds) verifiable credential format, and the [W3C Standard Verifiable Credential](https://www.w3.org/TR/vc-data-model/) format using JSON-LD with LD-Signatures and BBS+ Signatures. +ACA-Py is built on the Aries concepts and features that make up [Aries Interop Profile (AIP) 2.0](https://github.com/hyperledger/aries-rfcs/tree/main/concepts/0302-aries-interop-profile#aries-interop-profile-version-20). [ACA-Py’s supported Aries protocols](../features/SupportedRFCs.md) include, most importantly, protocols for issuing, verifying, and holding verifiable credentials using both [Hyperledger AnonCreds] verifiable credential format, and the [W3C Standard Verifiable Credential Data Model] format using JSON-LD with LD-Signatures and BBS+ Signatures. Coming soon -- issuing and presenting [Hyperledger AnonCreds] verifiable credentials using the [W3C Standard Verifiable Credential Data Model] format. -To use ACA-Py you create a business logic controller that "talks to" ACA-Py (sending HTTP requests and receiving webhook notifications), and ACA-Py handles the Aries and DIDComm functionality. That controller can be built in any language that supports making and receiving HTTP requests; knowledge of Python is not needed. Together, this means you can focus on building VC solutions using familiar web development technologies, instead of having to learn the nuts and bolts of low-level cryptography and Trust over IP-type Aries protocols. +[Hyperledger AnonCreds]: https://www.hyperledger.org/use/anoncreds +[W3C Standard Verifiable Credential Data Model]: https://www.w3.org/TR/vc-data-model/ -This [checklist-style overview document](../../features/SupportedRFCs) provides a full list of the features in ACA-Py. +To use ACA-Py you create a business logic controller that "talks to" an ACA-Py instance (sending HTTP requests and receiving webhook notifications), and ACA-Py handles the Aries and DIDComm protocols and related functionality. Your controller can be built in any language that supports making and receiving HTTP requests; knowledge of Python is not needed. Together, this means you can focus on building VC solutions using familiar web development technologies, instead of having to learn the nuts and bolts of low-level cryptography and Trust over IP-type Aries protocols. + +This [checklist-style overview document](../features/SupportedRFCs.md) provides a full list of the features in ACA-Py. The following is a list of some of the core features needed for a production deployment, with a link to detailed information about the capability. ### Multi-Tenant -ACA-Py supports "multi-tenant" scenarios. In these scenarios, one (scalable) instance of ACA-Py uses one database instance, and are together capable of managing separate secure storage (for private keys, DIDs, credentials, etc.) for many different actors. This enables (for example) an "issuer-as-a-service", where an enterprise may have many VC issuers, each with different identifiers, using the same instance of ACA-Py to interact with VC holders as required. Likewise, an ACA-Py instance could be a "cloud wallet" for many holders (e.g. people or organizations) that, for whatever reason, cannot use a mobile device for a wallet. Learn more about multi-tenant deployments [here](../../features/Multitenancy). +ACA-Py supports "multi-tenant" scenarios. In these scenarios, one (scalable) instance of ACA-Py uses one database instance, and are together capable of managing separate secure storage (for private keys, DIDs, credentials, etc.) for many different actors. This enables (for example) an "issuer-as-a-service", where an enterprise may have many VC issuers, each with different identifiers, using the same instance of ACA-Py to interact with VC holders as required. Likewise, an ACA-Py instance could be a "cloud wallet" for many holders (e.g. people or organizations) that, for whatever reason, cannot use a mobile device for a wallet. Learn more about multi-tenant deployments [here](../features/Multitenancy.md). ### Mediator Service -Startup options allow the use of an ACA-Py as an Aries [mediator](https://github.com/hyperledger/aries-rfcs/tree/main/concepts/0046-mediators-and-relays#summary) using core Aries protocols to coordinate its mediation role. Such an ACA-Py instance receives, stores and forwards messages to Aries agents that (for example) lack an addressable endpoint on the Internet such as a mobile wallet. A live instance of a public mediator based on ACA-Py is available [here](https://indicio-tech.github.io/mediator/) from Indicio Technologies. Learn more about deploying a mediator [here](../../features/Mediation). See the [Aries Mediator Service](https://github.com/hyperledger/aries-mediator-service) for a "best practices" configuration of an Aries mediator. +Startup options allow the use of an ACA-Py as an Aries [mediator](https://github.com/hyperledger/aries-rfcs/tree/main/concepts/0046-mediators-and-relays#summary) using core Aries protocols to coordinate its mediation role. Such an ACA-Py instance receives, stores and forwards messages to Aries agents that (for example) lack an addressable endpoint on the Internet such as a mobile wallet. A live instance of a public mediator based on ACA-Py is available [here](https://indicio-tech.github.io/mediator/) from Indicio Technologies. Learn more about deploying a mediator [here](../features/Mediation.md). See the [Aries Mediator Service](https://github.com/hyperledger/aries-mediator-service) for a "best practices" configuration of an Aries mediator. ### Indy Transaction Endorsing -ACA-Py supports a Transaction Endorsement protocol, for agents that don't have write access to an Indy ledger. Endorser support is documented [here](../../features/Endorser). +ACA-Py supports a Transaction Endorsement protocol, for agents that don't have write access to an Indy ledger. Endorser support is documented [here](../features/Endorser.md). ### Scaled Deployments ACA-Py supports deployments in scaled environments such as in Kubernetes environments where ACA-Py and its storage components can be horizontally scaled as needed to handle the load. +### VC-API Endpoints + +A set of endpoints conforming to the vc-api specification are included to manage w3c credentials and presentations. They are documented [here](../features/JsonLdCredentials.md#vc-api) and a postman demo is available [here](../features/JsonLdCredentials.md#vc-api). + ## Example Uses The business logic you use with ACA-Py is limited only by your imagination. Possible applications include: @@ -58,7 +65,7 @@ For those new to SSI, Aries and ACA-Py, there are a couple of Linux Foundation e The latter is the most useful for developers wanting to get a solid basis in using ACA-Py and other Aries Frameworks. -Also included here is a much more concise (but less maintained) [Getting Started Guide](../../gettingStarted/README) that will take you from knowing next to nothing about decentralized identity to developing Aries-based business apps and services. You’ll run some Indy apps, ACA-Py apps and developer-oriented demos. The guide has a table of contents so you can skip the parts you already know. +Also included here is a much more concise (but less maintained) [Getting Started Guide](../gettingStarted/README.md) that will take you from knowing next to nothing about decentralized identity to developing Aries-based business apps and services. You’ll run an Indy ledger (with no ramp-up time), ACA-Py apps and developer-oriented demos. The guide has a table of contents so you can skip the parts you already know. ### Understanding the Architecture @@ -66,32 +73,35 @@ There is an [architectural deep dive webinar](https://www.youtube.com/watch?v=FX ![drawing](./aca-py_architecture.png) -You can extend Aca-Py using plug-ins, which can be loaded at runtime. Plug-ins are mentioned in the [webinar](https://docs.google.com/presentation/d/1K7qiQkVi4n-lpJ3nUZY27OniUEM0c8HAIk4imCWCx5Q/edit#slide=id.g5d43fe05cc_0_145) and are [described in more detail here](../../features/PlugIns/). +You can extend ACA-Py using plug-ins, which can be loaded at runtime. Plug-ins are mentioned in the [webinar](https://docs.google.com/presentation/d/1K7qiQkVi4n-lpJ3nUZY27OniUEM0c8HAIk4imCWCx5Q/edit#slide=id.g5d43fe05cc_0_145) and are [described in more detail here](../features/PlugIns.md). An ever-expanding set of ACA-Py plugins can be found +in the [Aries ACA-Py Plugins repository]. Check them out -- it might have the very plugin you need! + +[Aries ACA-Py Plugins repository]: https://github.com/hyperledger/aries-acapy-plugins ### Installation and Usage -An ["install and go" page for developers](https://github.com/hyperledger/aries-cloudagent-python/blob/main/DevReadMe.md) is available if you are comfortable with Trust over IP and Aries concepts. ACA-Py can be run with Docker without installation (highly recommended), or can be installed [from PyPi](https://pypi.org/project/aries-cloudagent/). In the [/demo directory](/demo) there is a full set of demos for developers to use in getting started, and the [demo read me](../../demo/) is a great starting point for developers to use an "in-browser" approach to run a zero-install example. The [Read the Docs](https://aries-cloud-agent-python.readthedocs.io/en/latest/) overview is also a way to reference the modules and APIs that make up an ACA-Py instance. +Use the ["install and go" page for developers](../features/DevReadMe.md) if you are comfortable with Trust over IP and Aries concepts. ACA-Py can be run with Docker without installation (highly recommended), or can be installed [from PyPi](https://pypi.org/project/aries-cloudagent/). In the [/demo directory](../contributing/demo) there is a full set of demos for developers to use in getting started, and the [demo read me](../demo/README.md) is a great starting point for developers to use an "in-browser" approach to run a zero-install example. The [Read the Docs](https://aries-cloud-agent-python.readthedocs.io/en/latest/) overview is also a way to understand the internal modules and APIs that make up an ACA-Py instance. -If you would like to develop on ACA-Py locally note that we use Poetry for dependency management and packaging, if you are unfamiliar with poetry please see our [cheat sheet](/docs/Poetry.md) +If you would like to develop on ACA-Py locally note that we use Poetry for dependency management and packaging, if you are unfamiliar with poetry please see our [cheat sheet](../deploying/Poetry.md) ## About the ACA-Py Admin API -The [overview of ACA-Py’s API](https://github.com/hyperledger/aries-cloudagent-python/blob/main/AdminAPI.md) is a great starting place for learning about the ACA-Py API when you are starting to build your own controller. +The [overview of ACA-Py’s API](../features/AdminAPI.md) is a great starting place for learning about the ACA-Py API when you are starting to build your own controller. -An ACA-Py instance puts together an OpenAPI-documented REST interface based on the protocols that are loaded. This is used by a controller application (written in any language) to manage the behaviour of the agent. The controller can initiate actions (e.g. issuing a credential) and can respond to agent events (e.g. sending a presentation request after a connection is accepted). Agent events are delivered to the controller as webhooks to a configured URL. +An ACA-Py instance puts together an OpenAPI-documented REST interface based on the protocols that are loaded. This is used by a controller application (written in any language) to manage the behavior of the agent. The controller can initiate actions (e.g. issuing a credential) and can respond to agent events (e.g. sending a presentation request after a connection is accepted). Agent events are delivered to the controller as webhooks to a configured URL. Technical note: the administrative API exposed by the agent for the controller to use must be protected with an API key (using the --admin-api-key command line arg) or deliberately left unsecured using the --admin-insecure-mode command line arg. The latter should not be used other than in development if the API is not otherwise secured. ## Troubleshooting There are a number of resources for getting help with ACA-Py and troubleshooting -any problems you might run into. The [Troubleshooting](../../testing/Troubleshooting) -document contains some guidance about issues that have been experienced in the -past. Feel free to submit PRs to supplement the troubleshooting document! -Searching the [ACA-Py GitHub -issues](https://github.com/hyperledger/aries-cloudagent-python/issues) will -often uncover challenges that others have experienced, often with answers to -solving those challenges. As well, there is the "aries-cloudagent-python" +any problems you might run into. The +[Troubleshooting](../testing/Troubleshooting.md) document contains some +guidance about issues that have been experienced in the past. Feel free to +submit PRs to supplement the troubleshooting document! Searching the [ACA-Py +GitHub issues](https://github.com/hyperledger/aries-cloudagent-python/issues) +may uncovers challenges you are having that others have experienced, often +with solutions. As well, there is the "aries-cloudagent-python" channel on the Hyperledger Discord chat server ([invitation here](https://discord.gg/hyperledger)). @@ -101,15 +111,15 @@ The initial implementation of ACA-Py was developed by the Government of British [BC Digital Trust]: https://digital.gov.bc.ca/digital-trust/ -See the [MAINTAINERS.md](/Maintainers.md) file for a list of the current ACA-Py +See the [MAINTAINERS.md](../contributing/MAINTAINERS.md) file for a list of the current ACA-Py maintainers, and the guidelines for becoming a Maintainer. We'd love to have you join the team if you are willing and able to carry out the [duties of a -Maintainer](/MAINTAINERS.md#the-duties-of-a-maintainer). +Maintainer](../contributing/MAINTAINERS.md#the-duties-of-a-maintainer). ## Contributing -Pull requests are welcome! Please read our [contributions guide](https://github.com/hyperledger/aries-cloudagent-python/blob/main/CONTRIBUTING.md) and submit your PRs. We enforce [developer certificate of origin](https://developercertificate.org/) (DCO) commit signing — [guidance](https://github.com/apps/dco) on this is available. We also welcome issues submitted about problems you encounter in using ACA-Py. +Pull requests are welcome! Please read our [contributions guide](../contributing/CONTRIBUTING.md) and submit your PRs. We enforce [developer certificate of origin](https://developercertificate.org/) (DCO) commit signing — [guidance](https://github.com/apps/dco) on this is available. We also welcome issues submitted about problems you encounter in using ACA-Py. ## License -[Apache License Version 2.0](https://github.com/hyperledger/aries-cloudagent-python/blob/main/LICENSE) +[Apache License Version 2.0](LICENSE) diff --git a/docs/testing/AgentTracing.md b/docs/testing/AgentTracing.md index 819d9af6..2c2dabd2 100644 --- a/docs/testing/AgentTracing.md +++ b/docs/testing/AgentTracing.md @@ -10,7 +10,7 @@ Tracing is configured globally for the agent. The following options can be specified when starting the aca-py agent: -``` +```bash --trace Generate tracing events. --trace-target Target for trace events ("log", "message", or http @@ -31,7 +31,7 @@ The `run_demo` script supports the following parameters and environment variable Environment variables: -``` +```bash TRACE_ENABLED Flag to enable tracing TRACE_TARGET_URL Host:port of endpoint to log trace events (e.g. logstash:9700) @@ -43,7 +43,7 @@ TRACE_TAG Tag to be included in all logged trace events Parameters: -``` +```bash --trace-log Enables tracing to the standard log output (sets TRACE_ENABLED, TRACE_TARGET, TRACE_TAG) @@ -53,7 +53,7 @@ Parameters: When running the Faber controller, tracing can be enabled using the `T` menu option: -``` +```bash Faber | Connected (1) Issue Credential (2) Send Proof Request @@ -85,7 +85,7 @@ When `Exchange Tracing` is `ON`, all exchanges will include tracing. ## Logging Trace Events to an ELK Stack -You can use the `ELK` stack in the [ELK Stack sub-directory](./elk-stack) as a target for trace events, just start the ELK stack using the docker-compose file and then in two separate bash shells, startup the demo as follows: +You can use the `ELK` stack in the [ELK Stack sub-directory](https://github.com/hyperledger/aries-cloudagent-python/blob/main/demo/elk-stack) as a target for trace events, just start the ELK stack using the docker-compose file and then in two separate bash shells, startup the demo as follows: ```bash DOCKER_NET=elknet TRACE_TARGET_URL=logstash:9700 ./run_demo faber --trace-http diff --git a/docs/testing/INTEGRATION-TESTS.md b/docs/testing/INTEGRATION-TESTS.md index dadf204a..b304050d 100644 --- a/docs/testing/INTEGRATION-TESTS.md +++ b/docs/testing/INTEGRATION-TESTS.md @@ -85,7 +85,6 @@ AGENT_PORT_OVERRIDE=8030 ./run_bdd -t (Note that since the test run multiple agents you require up to 60 available ports.) - ## Aca-py Integration Tests vs Aries Agent Test Harness (AATH) Aca-py Behave tests are based on the interoperability tests that are implemented in the [Aries Agent Test Harness (AATH)](https://github.com/hyperledger/aries-agent-test-harness). Both use [Behave (Gherkin)](https://behave.readthedocs.io/en/stable/) to execute tests against a running aca-py agent (or in the case of AATH, against any compatible Aries agent), however the aca-py integration tests focus on aca-py specific features. @@ -111,7 +110,7 @@ Aca-py integration tests use the same configuration approach as AATH, documented In addition to support for external schemas, credential data etc, the aca-py integration tests support configuration of the aca-py agents that are used to run the test. For example: -``` +```behave Scenario Outline: Present Proof where the prover does not propose a presentation of the proof and is acknowledged Given "3" agents | name | role | capabilities | @@ -176,4 +175,3 @@ To run a specific set of Aca-py integration tests (or exclude specific tests): ## Aries Agent Test Harness ACA-Py Tests This [video](https://youtu.be/1dwyEBxQqWI) is a presentation by Aries Cloud Agent Python (ACA-Py) developer @ianco about using the Aries Agent Test Harness for local pre-release testing of ACA-Py. Have a big change that you want to test with other Aries Frameworks? Following this guidance to run AATH tests with your under-development branch of ACA-Py. - diff --git a/docs/testing/Logging.md b/docs/testing/Logging.md index 006b95dc..587657c5 100644 --- a/docs/testing/Logging.md +++ b/docs/testing/Logging.md @@ -1,16 +1,16 @@ # Logging docs -Acapy supports multiple configurations of logging. +ACA_Py supports multiple configurations of logging. ## Log level -Acapy's logging is based on python's [logging lib](https://docs.python.org/3/howto/logging.html). +ACA-Py's logging is based on python's [logging lib](https://docs.python.org/3/howto/logging.html). Log levels `DEBUG`, `INFO` and `WARNING` are available. Other log levels fall back to `WARNING`. ## Per Tenant Logging -Supports writing of log messages to a file with `wallet_id` as the tenant identifier for each. To enable this, both multitenant mode [`--multitenant`] and writing to log file option [`--log-file`] are required. If both `--multitenant` and `--log-file` are not passed when starting up ACA-Py, then it will use `default_logging_config.ini` config [backward compatible] and not log at a per tenant level. +Supports writing of log messages to a file with `wallet_id` as the tenant identifier for each. To enable this, both multitenant mode (`--multitenant`) and writing to log file option (`--log-file`) are required. If both `--multitenant` and `--log-file` are not passed when starting up ACA-Py, then it will use `default_logging_config.ini` config (backward compatible) and not log at a per tenant level. ## Command Line Arguments @@ -60,9 +60,9 @@ Find an example in [default_logging_config.ini](https://github.com/hyperledger/a You can find more detail description in the [logging documentation](https://docs.python.org/3/howto/logging.html#configuring-logging). -For per tenant logging, find an example in [default_per_tenant_logging_config.ini](https://github.com/hyperledger/aries-cloudagent-python/tree/main/aries_cloudagent/config/default_per_tenant_logging_config.ini), which sets up `TimedRotatingFileMultiProcessHandler` and `StreamHandler` handlers. Custom `TimedRotatingFileMultiProcessHandler` handler supports the ability to cleanup logs by time and maintain backup logs and a custom JSON formatter for logs. The arguments for it such as `file name`, `when`, `interval` and `backupCount` can be passed as `args=('acapy.log', 'd', 7, 1,)` [also shown below]. Note: `backupCount` of 0 will mean all backup log files will be retained and not deleted at all. More details about these attributes can be found [here](https://docs.python.org/3/library/logging.handlers.html#timedrotatingfilehandler) +For per tenant logging, find an example in [default_per_tenant_logging_config.ini](https://github.com/hyperledger/aries-cloudagent-python/tree/main/aries_cloudagent/config/default_per_tenant_logging_config.ini), which sets up `TimedRotatingFileMultiProcessHandler` and `StreamHandler` handlers. Custom `TimedRotatingFileMultiProcessHandler` handler supports the ability to cleanup logs by time and maintain backup logs and a custom JSON formatter for logs. The arguments for it such as `file name`, `when`, `interval` and `backupCount` can be passed as `args=('acapy.log', 'd', 7, 1,)` (also shown below). Note: `backupCount` of 0 will mean all backup log files will be retained and not deleted at all. More details about these attributes can be found [here](https://docs.python.org/3/library/logging.handlers.html#timedrotatingfilehandler) -``` +```ini [loggers] keys=root @@ -92,9 +92,9 @@ args=('acapy.log', 'd', 7, 1,) format=%(asctime)s %(wallet_id)s %(levelname)s %(pathname)s:%(lineno)d %(message)s ``` -For `DictConfig` [`dict` logging config file], find an example in [default_per_tenant_logging_config.yml](https://github.com/hyperledger/aries-cloudagent-python/tree/main/aries_cloudagent/config/default_per_tenant_logging_config.yml) with same attributes as `default_per_tenant_logging_config.ini` file. +For `DictConfig` (`dict` logging config file), find an example in [default_per_tenant_logging_config.yml](https://github.com/hyperledger/aries-cloudagent-python/tree/main/aries_cloudagent/config/default_per_tenant_logging_config.yml) with same attributes as `default_per_tenant_logging_config.ini` file. -``` +```yaml version: 1 formatters: default: diff --git a/docs/testing/Troubleshooting.md b/docs/testing/Troubleshooting.md index 206730a5..cdd3321e 100644 --- a/docs/testing/Troubleshooting.md +++ b/docs/testing/Troubleshooting.md @@ -28,11 +28,13 @@ If that is the cause -- have you started your local ledger, and did it startup p - Any errors in the startup of von-network? - Is the von-network webserver (usually at `https:/localhost:9000`) accessible? If so, can you click on and see the Genesis File? - Do you even need a local ledger? If not, you can use a public sandbox ledger, - such as the [Dev Greenlight ledger](), likely by just prefacing your ACA-Py - command with `LEDGER_URL=http://dev.greenlight.bcovrin.vonx.io`. For example, - when running the Alice-Faber demo in the [demo](../../demo) folder, you can run (for + such as the [BCovrin Test ledger], likely by just prefacing your ACA-Py + command with `LEDGER_URL=http://test.bcovrin.vonx.io`. For example, + when running the Alice-Faber demo in the [demo](../demo/README.md) folder, you can run (for example), the Faber agent using the command: - `LEDGER_URL=http://dev.greenlight.bcovrin.vonx.io ./run_demo faber` + `LEDGER_URL=http://test.bcovrin.vonx.io ./run_demo faber` + +[BCovrin Test ledger]: http://test.bcovrin.vonx.io ### Any Firewalls diff --git a/featuresDevReadMe.md b/featuresDevReadMe.md new file mode 100644 index 00000000..df6429b5 --- /dev/null +++ b/featuresDevReadMe.md @@ -0,0 +1,245 @@ +# Developer's Read Me for Hyperledger Aries Cloud Agent - Python + +See the [README](../release/acapy-README.md) for details about this repository and information about how the Aries Cloud Agent - Python fits into the Aries project and relates to Indy. + +## Table of Contents + +- [Introduction](#introduction) +- [Developer Demos](#developer-demos) +- [Running](#running) + - [Configuring ACA-PY: Command Line Parameters](#configuring-aca-py-command-line-parameters) + - [Docker](#docker) + - [Locally Installed](#locally-installed) + - [About ACA-Py Command Line Parameters](#about-aca-py-command-line-parameters) + - [Provisioning Secure Storage](#provisioning-secure-storage) + - [Mediation](#mediation) + - [Multi-tenancy](#multi-tenancy) + - [JSON-LD Credentials](#json-ld-credentials) +- [Developing](#developing) + - [Prerequisites](#prerequisites) + - [Running In A Dev Container](#running-in-a-dev-container) + - [Running Locally](#running-locally) + - [Logging](#logging) + - [Running Tests](#running-tests) + - [Running Aries Agent Test Harness Tests](#running-aries-agent-test-harness-tests) +- [Development Workflow](#development-workflow) +- [Publishing Releases](#publishing-releases) +- [Dynamic Injection of Services](#dynamic-injection-of-services) + +## Introduction + +Aries Cloud Agent Python (ACA-Py) is a configurable, extensible, non-mobile Aries agent that implements an easy way for developers to build decentralized identity services that use verifiable credentials. + +The information on this page assumes you are developer with a background in +decentralized identity, Aries, DID Methods, and verifiable credentials, +especially AnonCreds. If you aren't familiar with those concepts and projects, +please use our [Getting Started Guide](../gettingStarted/README.md) +to learn more. + +## Developer Demos + +To put ACA-Py through its paces at the command line, checkout our [demos](../demo/README.md) page. + +## Running + +### Configuring ACA-PY: Command Line Parameters + +ACA-Py agent instances are configured through the use of command line +parameters, environment variables and/or YAML files. All of the configurations +settings can be managed using any combination of the three methods (command line +parameters override environment variables override YAML). Use the `--help` +option to discover the available command line parameters. There are a lot of +them--for good and bad. + +### Docker + +To run a docker container based on the code in the current repo, use the following commands from the root folder of the repository to check the version, list the available modes of operation, and see all of the command line parameters: + +```bash +scripts/run_docker --version +scripts/run_docker --help +scripts/run_docker provision --help +scripts/run_docker start --help +``` + +### Locally Installed + +If you installed the PyPi package, the executable `aca-py` should be available on your PATH. + +Use the following commands from the root folder of the repository to check the version, list the available modes of operation, and see all of the command line parameters: + +```bash +aca-py --version +aca-py --help +aca-py provision --help +aca-py start --help +``` + +If you get an error about a missing module `indy` (e.g. `ModuleNotFoundError: No module named 'indy'`) when running `aca-py`, you will need to install the Indy libraries from the command line: + +```bash +pip install python3_indy +``` + +Once that completes successfully, you should be able to run `aca-py --version` and the other examples above. + +### About ACA-Py Command Line Parameters + +ACA-Py invocations are separated into two types - initially provisioning an agent (`provision`) and starting a new agent process (`start`). This separation enables not having to pass in some encryption-related parameters required for provisioning when starting an agent instance. This improves security in production deployments. + +When starting an agent instance, at least one _inbound_ and one _outbound_ transport MUST be specified. + +For example: + +```bash +aca-py start --inbound-transport http 0.0.0.0 8000 \ + --outbound-transport http +``` + +or + +```bash +aca-py start --inbound-transport http 0.0.0.0 8000 \ + --inbound-transport ws 0.0.0.0 8001 \ + --outbound-transport ws \ + --outbound-transport http +``` + +ACA-Py ships with both inbound and outbound transport drivers for `http` and `ws` (websockets). Additional transport drivers can be added as pluggable implementations. See the existing implementations in the [transports module](https://github.com/hyperledger/aries-cloudagent-python/tree/main/aries_cloudagent/transport) for getting started on adding a new transport. + +Most configuration parameters are provided to the agent at startup. Refer to the `Running` sections above for details on listing the available command line parameters. + +### Provisioning Secure Storage + +It is possible to provision a secure storage (sometimes called a wallet--but not +the same as a mobile wallet app) before running an agent to avoid passing in the +secure storage seed on every invocation of an agent (e.g. on every `aca-py start ...`). + +```bash +aca-py provision --wallet-type askar --seed $SEED +``` + +For additional `provision` options, execute `aca-py provision --help`. + +Additional information about secure storage options and configuration settings can be found [here](../deploying/Databases.md). + +### Mediation + +ACA-Py can also run in mediator mode - ACA-Py can be run _as_ a mediator (it can mediate connections for other agents), or it can connect to an external mediator to mediate its own connections. See the [docs on mediation](./Mediation.md) for more info. + +### Multi-tenancy + +ACA-Py can also be started in multi-tenant mode. This allows the agent to serve multiple tenants, that each have their own wallet. See the [docs on multi-tenancy](./Multitenancy.md) for more info. + +### JSON-LD Credentials + +ACA-Py can issue W3C Verifiable Credentials using Linked Data Proofs. See the [docs on JSON-LD Credentials](./JsonLdCredentials.md) for more info. + +## Developing + +### Prerequisites + +[Docker](https://www.docker.com) must be installed to run software locally and to run the test suite. + +### Running In A Dev Container + +The dev container environment is a great way to deploy agents quickly with code changes and an interactive debug session. Detailed information can be found in the [Docs On Devcontainers](./devcontainer.md). It is specific for vscode, so if you prefer another code editor or IDE you will need to figure it out on your own, but it is highly recommended to give this a try. + +One thing to be aware of is, unlike the demo, none of the steps are automated. You will need to create public dids, connections and all the other steps yourself. Using the demo and studying the flow and then copying them with your dev container debug session is a great way to learn how everything works. + +### Running Locally + +Another way to develop locally is by using the provided Docker scripts to run the ACA-Py software. + +```bash +./scripts/run_docker start +``` + +For example: + +```bash +./scripts/run_docker start --inbound-transport http 0.0.0.0 10000 --outbound-transport http --debug --log-level DEBUG +``` + +To enable the [ptvsd](https://github.com/Microsoft/ptvsd) Python debugger for Visual Studio/VSCode use the `--debug` command line parameter. + +Any ports you will be using from the docker container should be published using the `PORTS` environment variable. For example: + +```bash +PORTS="5000:5000 8000:8000 10000:10000" ./scripts/run_docker start --inbound-transport http 0.0.0.0 10000 --outbound-transport http --debug --log-level DEBUG +``` + +Refer to [the previous section](#running) for instructions on how to run ACA-Py. + +### Logging + +You can find more details about logging and log levels [here](../testing/Logging.md). + +### Running Tests + +To run the ACA-Py test suite, use the following script: + +```bash +./scripts/run_tests +``` + +To run the ACA-Py test suite with ptvsd debugger enabled: + +```bash +./scripts/run_tests --debug +``` + +To run specific tests pass parameters as defined by [pytest](https://docs.pytest.o../contributing/stable/usage.html#specifying-tests-selecting-tests): + +```bash +./scripts/run_tests aries_cloudagent/protocols/connections +``` + +To run the tests including [Indy SDK](https://github.com/hyperledger/indy-sdk) and related dependencies, run the script: + +```bash +./scripts/run_tests_indy +``` + +### Running Aries Agent Test Harness Tests + +You can run a full suite of integration tests using the [Aries Agent Test Harness (AATH)](https://github.com/hyperledger/aries-agent-test-harness). + +Check out and run AATH tests as follows (this tests the aca-py `main` branch): + +```bash +git clone https://github.com/hyperledger/aries-agent-test-harness.git +cd aries-agent-test-harness +./manage build -a acapy-main +./manage run -d acapy-main -t @AcceptanceTest -t ~@wip +``` + +The `manage` script is described in detail [here](https://github.com/hyperledger/aries-agent-test-harness#the-manage-bash-script), including how to modify the AATH code to run the tests against your aca-py repo/branch. + +## Development Workflow + +We use [Ruff](https://github.com/astral-sh/ruff) to enforce a coding style guide. + +We use [Black](https://black.readthedocs.../contributing/stable/) to automatically format code. + +Please write tests for the work that you submit. + +Tests should reside in a directory named `tests` alongside the code under test. Generally, there is one test file for each file module under test. Test files _must_ have a name starting with `test_` to be automatically picked up the test runner. + +There are some good examples of various test scenarios for you to work from including mocking external imports and working with async code so take a look around! + +The test suite also displays the current code coverage after each run so you can see how much of your work is covered by tests. Use your best judgement for how much coverage is sufficient. + +Please also refer to the [contributing guidelines](../contributing/CONTRIBUTING.md) and [code of conduct](../contributing/CODE_OF_CONDUCT.md). + +## Publishing Releases + +The [publishing](https://github.com/hyperledger/aries-cloudagent-python/blob/main/PUBLISHING.md) document provides information on tagging a release and publishing the release artifacts to PyPi. + +## Dynamic Injection of Services + +The Agent employs a dynamic injection system whereby providers of base classes are registered with the `RequestContext` instance, currently within `conductor.py`. Message handlers and services request an instance of the selected implementation using `context.inject(BaseClass)`; for instance the wallet instance may be injected using `wallet = context.inject(BaseWallet)`. The `inject` method normally throws an exception if no implementation of the base class is provided, but can be called with `required=False` for optional dependencies (in which case a value of `None` may be returned). + +Providers are registered with either `context.injector.bind_instance(BaseClass, instance)` for previously-constructed (singleton) object instances, or `context.injector.bind_provider(BaseClass, provider)` for dynamic providers. In some cases it may be desirable to write a custom provider which switches implementations based on configuration settings, such as the wallet provider. + +The `BaseProvider` classes in the `config.provider` module include `ClassProvider`, which can perform dynamic module inclusion when given the combined module and class name as a string (for instance `aries_cloudagent.wallet.indy.IndyWallet`). `ClassProvider` accepts additional positional and keyword arguments to be passed into the class constructor. Any of these arguments may be an instance of `ClassProvider.Inject(BaseClass)`, allowing dynamic injection of dependencies when the class instance is instantiated. diff --git a/mkdocs.yml b/mkdocs.yml index 1c6ff200..0a34eb80 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -106,6 +106,7 @@ nav: - Hyperledger Indy Endorser In Action: demo/Endorser.md - Using W3C JSON-LD Credentials: demo/AliceWantsAJsonCredential.md - DIY -- ACME Controller Workshop: demo/AcmeDemoWorkshop.md + - Aries Using Postman Demo: demo/AriesPostmanDemo.md - Getting Started: - Becoming an Indy/Aries Developer: gettingStarted/README.md - Hyperledger Indy Basics: gettingStarted/IndyBasics.md @@ -115,10 +116,10 @@ nav: - Aries Architecture: gettingStarted/AriesAgentArchitecture.md - Aries Messaging: gettingStarted/AriesMessaging.md - Aries Developer Demos: gettingStarted/AriesDeveloperDemos.md - - TODO Agent Connections: gettingStarted/AgentConnections.md - - TODO Issuing AnonCreds Credentials: gettingStarted/IssuingIndyCredentials.md - - TODO Presenting AnonCreds Proofs: gettingStarted/PresentingIndyProofs.md - - TODO Making Your Own ACA-Py Agent: gettingStarted/YourOwnAriesAgent.md + - Agent Connections: gettingStarted/AgentConnections.md + - Issuing AnonCreds Credentials: gettingStarted/IssuingAnonCredsCredentials.md + - Presenting AnonCreds Proofs: gettingStarted/PresentingAnonCredsProofs.md + - Making Your Own ACA-Py Agent: gettingStarted/YourOwnAriesAgent.md - Aries Developer Options: gettingStarted/IndyAriesDevOptions.md - DIDComm Messaging: gettingStarted/DIDcommMsgs.md - DIDComm Message Routing: gettingStarted/RoutingEncryption.md @@ -146,3 +147,4 @@ nav: - Hyperledger Code of Conduct: contributing/CODE_OF_CONDUCT.md - Security Vulnerability Reporting: contributing/SECURITY.md - Publishing an ACA-Py Release: contributing/PUBLISHING.md + - Updating the ACA-Py ReadTheDocs Site: UpdateRTD.md diff --git a/scripts/copyFixMDs.sh b/scripts/copyFixMDs.sh index 904a880f..899e1f63 100755 --- a/scripts/copyFixMDs.sh +++ b/scripts/copyFixMDs.sh @@ -8,20 +8,20 @@ # - Passed in is the Release Versions -- "main" or "0.8.0", etc. # - Delete the existing content of the /docs folder in this repo # - Define the per release Mkdocs navigation for the site and put it in place of the current mkdocs YML +# - For main starting from 2024.02.11, this step is eliminated and edits to the navigation are done directly # - For each folder that will be in the /docs folder of this rep0: # - For each file the is to be in the folder within the /docs folder # - Either directly copy, or copy with edits applied the source file to the /docs folder # - Edits are needed to "fix" the links to work when the file is in the new place in the repo -# See the edits below for the types of changes needed. Usually, they are to change absolute -# links in the ACA-Py folder to relative links in this folder, as well as to handle changes -# in where the docs are placed. +# See the edits below for the types of changes needed. Most are to change relative links +# in the ACA-Py document to because one of the files has moved during the processing. # # To find broken links: -# - Run mkdocs locally and click on links and images that result in 404s +# - Most are found in the running `mkdocs build` +# - After that, run mkdocs locally and click on links and images that result in 404s # - Once you publish the docs, run a "broken link finder" tool to find others # To find missing documentation files -# - Scan the /tmp folder for all .md files and see if you have them in the /docs folder -# - a script to compare the list of .md files in /tmp and /docs is planned +# - Run the `./scripts/diffMDs.md` to find docs in the ACA-Py release that AREN'T in these docs. VERSION=$1 @@ -31,86 +31,13 @@ echo Building pages for ACA-Py Version ${VERSION} # Clean out the docs folder rm -rf docs/* -# Replace the nav with the one for main -sed '/^nav:/,$d' mkdocs.yml >mkdocs.yml.tmp -cat << EOF >>mkdocs.yml.tmp -nav: -- Welcome!: - - Welcome: README.md - - ACA-Py README: release/acapy-README.md - - Release Notes: release/CHANGELOG.md -- Features: - - Developer Introduction: features/DevReadMe.md - - DevContainer Support: features/devcontainer.md - - Supported Aries Interop Profiles and RFCs: features/SupportedRFCs.md - - The Admin API: features/AdminAPI.md - - ACA-Py Plugins: features/PlugIns.md - - Multitenant ACA-Py: features/Multitenancy.md - - DID Methods: features/DIDMethods.md - - DID Resolution: features/DIDResolution.md - - Configuring Multiple Indy Ledgers: features/Multiledger.md - - Automatically Endorsing Indy Transations: features/Endorser.md - - Using W3C JSON-LD Signed Credentials: features/JsonLdCredentials.md - - Using SD-JWTs: features/SelectiveDisclosureJWTs.md - - AnonCreds Presentation Validation: features/AnoncredsProofValidation.md - - Multiple Credential Types: features/Multicredentials.md - - Code Generation with the Open API: features/UsingOpenAPI.md - - ACA-Py as a DIDComm Mediator: features/Mediation.md -- Demos: - - The Alice-Faber Demo: demo/README.md - - Open API Tutorial: demo/AriesOpenAPIDemo.md - - Alice Gets a Phone: demo/AliceGetsAPhone.md - - Hyperledger Indy Endorser In Action: demo/Endorser.md - - Using W3C JSON-LD Credentials: demo/AliceWantsAJsonCredential.md - - DIY -- ACME Controller Workshop: demo/AcmeDemoWorkshop.md -- Getting Started: - - Becoming an Indy/Aries Developer: gettingStarted/README.md - - Hyperledger Indy Basics: gettingStarted/IndyBasics.md - - Hyperledger Aries Basics: gettingStarted/AriesBasics.md - - Decentralized Identity Demos: gettingStarted/DecentralizedIdentityDemos.md - - Aries - The Big Picture: gettingStarted/AriesBigPicture.md - - Aries Architecture: gettingStarted/AriesAgentArchitecture.md - - Aries Messaging: gettingStarted/AriesMessaging.md - - Aries Developer Demos: gettingStarted/AriesDeveloperDemos.md - - TODO Agent Connections: gettingStarted/AgentConnections.md - - TODO Issuing AnonCreds Credentials: gettingStarted/IssuingIndyCredentials.md - - TODO Presenting AnonCreds Proofs: gettingStarted/PresentingIndyProofs.md - - TODO Making Your Own ACA-Py Agent: gettingStarted/YourOwnAriesAgent.md - - Aries Developer Options: gettingStarted/IndyAriesDevOptions.md - - DIDComm Messaging: gettingStarted/DIDcommMsgs.md - - DIDComm Message Routing: gettingStarted/RoutingEncryption.md - - DIDComm Message Routing Example: gettingStarted/AriesRoutingExample.md - - TODO Connecting to an Indy Network: gettingStarted/ConnectIndyNetwork.md - - AnonCreds Credential Revocation: gettingStarted/CredentialRevocation.md -- Deploying: - - Deployment Model: deploying/deploymentModel.md - - Upgrading ACA-Py: deploying/UpgradingACA-Py.md - - Indy SDK to Askar Migration: deploying/IndySDKtoAskarMigration.md - - The Use of Poetry in ACA-Py: deploying/Poetry.md - - ACA-Py Container Images: deploying/ContainerImagesAndGithubActions.md - - Databases: deploying/Databases.md - - Persistent Queues and Caching: deploying/RedisPlugins.md - - The askar-anoncreds Wallet Type: deploying/AnonCredsWalletType.md -- Testing/Troubleshooting: - - Running and Creating Unit Tests: testing/UnitTests.md - - Managing Logging: testing/Logging.md - - ACA-Py Integration Tests: testing/INTEGRATION-TESTS.md - - Protocol Tracing: testing/AgentTracing.md - - Troubleshooting: testing/Troubleshooting.md -- Contributing: - - How to Contribute: contributing/CONTRIBUTING.md - - Maintainers: contributing/MAINTAINERS.md - - Hyperledger Code of Conduct: contributing/CODE_OF_CONDUCT.md - - Security Vulnerability Reporting: contributing/SECURITY.md - - Publishing an ACA-Py Release: contributing/PUBLISHING.md -EOF -mv mkdocs.yml.tmp mkdocs.yml +# The mkdocs nav used to be built here. Now managed in the mkdocs.yml in the root # Root folder -- README.md # For debugging the "sed" command, you can uncomment the "diff" at the end of the -# sed command to see the differences from running the sed. +# sed command to see the differences from running the sed. Remember to put the back after! FOLDER=docs -# Introduction file is in this repo, not ACA-Py +# The Introduction file is in this repo, not ACA-Py so pull it in. if [ "${VERSION}" == "main" ]; then cp Introduction.md ${FOLDER}/README.md else @@ -119,160 +46,46 @@ else ${FILE} > ${FOLDER}/README.md; # diff tmp/${FILE} ${FOLDER}/${FILE} fi -# Release documents +# Release documents -- documents about this specific release +# Starts with the ACA-Py readme, and includes some other files from the root ACA-Py folder +# Fix up the links FOLDER=docs/release mkdir ${FOLDER} -FILE=README.md; sed -e 's#\./\(SupportedRFCs\).md#../../features/\1#' \ - -e 's#\./\(Multitenancy\).md#../../features/\1#' \ - -e 's#\./\(Mediation\).md#../../features/\1#' \ - -e 's#\(Endorser\).md#../../features/\1#' \ - -e 's#\(Troubleshooting\).md#../../testing/\1#' \ - -e 's#/demo/\(README\).md#../../demo/#' \ - -e 's#/docs/GettingStartedAriesDev/\(README\).md#../../gettingStarted/\1#' \ - -e 's#/docs/GettingStartedAriesDev/\(PlugIns\).md#../../features/\1/#' \ +FILE=README.md; sed \ + -e 's#docs/\(.*/.*md\)#../\1#g' \ + -e 's#docs/\(.*/.*md\)#../\1#g' \ + -e 's#(./\(.*\)\.md#(../contributing/\1.md#g' \ tmp/${FILE} > ${FOLDER}/acapy-${FILE}; # diff tmp/${FILE} ${FOLDER}/acapy-${FILE} cp tmp/aca-py_architecture.png ${FOLDER} -# Special handling for ChangeLog -- add a title at the top of the file -# echo "# Release Notes" >${FOLDER}/CHANGELOG.md +cp tmp/CHANGELOG.md ${FOLDER} FILE=CHANGELOG.md; sed -e '1s/^/# Release Notes\n\n/' \ - -e 's#\(Endorser\).md#../../features/\1#g' \ - -e 's#./\(Mediation\).md#../../features/\1#g' \ - -e 's#./\(Multitenancy\).md#../../features/\1#g' \ - -e 's#\/\(SupportedRFCs\).md#../../features/\1#' \ - -e 's#.\/\(UpgradingACA-Py\).md#../../deploying/\1#' \ + -e 's#\(Endorser\).md#../features/\1#g' \ + -e 's#./\(Mediation\).md#../features/\1#g' \ + -e 's#./\(Multitenancy\).md#../features/\1#g' \ + -e 's#\/\(SupportedRFCs\).md#../features/\1#' \ + -e 's#.\/\(UpgradingACA-Py\).md#../deploying/\1#' \ -e 's#(victorlee0505)#(https://github.com/victorlee0505)#' \ -e 's#^ - # - #' \ tmp/${FILE} >${FOLDER}/${FILE}; # diff tmp/${FILE} ${FOLDER}/${FILE} -# Assets -FOLDER=docs/assets -mkdir ${FOLDER} -cp tmp/docs/assets/*.png ${FOLDER} - -# ACA-Py Features -FOLDER=docs/features -mkdir ${FOLDER} -FILE=DevReadMe.md; sed -e 's#(README.md)#(/README.md)#' \ - -e "s#\(Databases\).md#../../deploying/\1#g" \ - -e "s#\(Logging\).md#../../testing/\1/#g" \ - -e "s#/docs/GettingStartedAriesDev/README.md#../../gettingStarted/#" \ - -e "s#/\(CONTRIBUTING\).md#../../contributing/\1/#" \ - -e "s#/\(CODE_OF_CONDUCT\).md#../../contributing/\1/#" \ - -e "s/#Running/#running/g" \ - -e "s#(/README.md)#(../../release/acapy-README)#" \ - -e "s#/docs/GettingStartedAriesDev/\(AriesDeveloperDemos\).md#../../\1#" \ - -e "s#\(aries_cloudagent/transport\)#https://github.com/hyperledger/aries-cloudagent-python/tree/${VERSION}/\1#" \ - tmp/${FILE} > ${FOLDER}/${FILE}; # diff tmp/${FILE} ${FOLDER}/${FILE} -FILE=devcontainer.md; sed -e "s#(\(.devcontainer/devcontainer.json\))#(https://github.com/hyperledger/aries-cloudagent-python/blob/main/\1)#" \ - -e "s#(./\(aries_cloudagent\))#(https://github.com/hyperledger/aries-cloudagent-python/tree/main/\1)#" \ - -e "s#(/\(DevReadMe\).md)#(../\1)#" \ - tmp/${FILE} > ${FOLDER}/${FILE}; # diff tmp/${FILE} ${FOLDER}/${FILE} -FILE=SupportedRFCs.md; sed -e "s#./\(IndySDKtoAskarMigration\).md#../../deploying/\1/#g" \ - tmp/${FILE} > ${FOLDER}/${FILE}; # diff tmp/${FILE} ${FOLDER}/${FILE} -FILE=AdminAPI.md; sed -e "s#/docs/assets/#../../assets/#" \ - tmp/${FILE} > ${FOLDER}/${FILE}; # diff tmp/${FILE} ${FOLDER}/${FILE} -cp tmp/Multitenancy.md ${FOLDER} -cp tmp/docs/GettingStartedAriesDev/SelectiveDisclosureJWTs.md ${FOLDER} -cp tmp/DIDMethods.md ${FOLDER} -cp tmp/DIDResolution.md ${FOLDER} -cp tmp/Multiledger.md ${FOLDER} -cp tmp/docs/GettingStartedAriesDev/PlugIns.md ${FOLDER} -cp tmp/Mediation.md ${FOLDER} -FILE=Endorser.md; sed -e 's#\./docs/assets/endorse#../features/endorse#' \ - tmp/${FILE} > ${FOLDER}/${FILE}; # diff tmp/${FILE} ${FOLDER}/${FILE} -cp tmp/docs/assets/*.png ${FOLDER} -cp tmp/JsonLdCredentials.md ${FOLDER} -cp tmp/AnoncredsProofValidation.md ${FOLDER} -FILE=UsingOpenAPI.md; sed -e 's#AdminApi.md#AdminAPI.md#' \ - tmp/${FILE} > ${FOLDER}/${FILE}; # diff tmp/${FILE} ${FOLDER}/${FILE} -cp tmp/Multicredentials.md ${FOLDER} +# Direct copy of the all of the ACA-Py docs +cp -r tmp/docs/UpdateRTD.md tmp/docs/assets tmp/docs/demo tmp/docs/deploying tmp/docs/design tmp/docs/gettingStarted tmp/docs/features tmp/docs/testing docs/ -# Deploying -FOLDER=docs/deploying -mkdir ${FOLDER} -cp tmp/ContainerImagesAndGithubActions.md ${FOLDER} -cp tmp/IndySDKtoAskarMigration.md ${FOLDER} -cp tmp/UpgradingACA-Py.md ${FOLDER} -cp tmp/docs/Poetry.md ${FOLDER} -FILE=deploymentModel.md; sed -e "s#/docs/assets/#../../assets/#" \ - tmp/${FILE} > ${FOLDER}/${FILE}; # diff tmp/${FILE} ${FOLDER}/${FILE} -FILE=Databases.md ; sed -e "s#demo/demo-args.yaml#https://github.com/hyperledger/aries-cloudagent-python/tree/${VERSION}/demo/demo-args.yaml#" \ - tmp/${FILE} > ${FOLDER}/${FILE}; # diff tmp/${FILE} ${FOLDER}/${FILE} -cp tmp/RedisPlugins.md ${FOLDER} -cp tmp/AnonCredsWalletType.md ${FOLDER} - -# Demos -FOLDER=docs/demo -mkdir ${FOLDER} -cp tmp/demo/AcmeDemoWorkshop.md ${FOLDER} -cp tmp/demo/AliceWantsAJsonCredential.md ${FOLDER} -FILE=AliceWantsAJsonCredential.md; sed -e "s#../\(JsonLdCredentials\).md#../../features/\1#" tmp/demo/${FILE} > ${FOLDER}/${FILE} -FILE=README.md; sed -e "s#runners/#https://github.com/hyperledger/aries-cloudagent-python/tree/${VERSION}/demo/runners/#g" \ - -e "s#:uhttps#https#" \ - tmp/demo/${FILE} > ${FOLDER}/${FILE}; # diff tmp/demo/${FILE} ${FOLDER}/${FILE} -cp tmp/demo/Endorser.md ${FOLDER} -FILE=AliceGetsAPhone.md; sed -e 's#\./collateral#../../demo/collateral#g' \ - -e "s#(/demo)#(../../demo)#" \ - tmp/demo/${FILE} > ${FOLDER}/${FILE}; # diff tmp/demo/${FILE} ${FOLDER}/${FILE} -FILE=AriesOpenAPIDemo.md; sed -e 's#\./collateral#../../demo/collateral#g' \ - -e 's#\.\./\(AdminAPI\).md#../../features/\1#' \ - tmp/demo/${FILE} > ${FOLDER}/${FILE}; # diff tmp/demo/${FILE} ${FOLDER}/${FILE} -cp -r tmp/demo/collateral ${FOLDER} - -# Getting Started -FOLDER=docs/gettingStarted -mkdir ${FOLDER} -FILE=README.md; sed -e 's#DIDCommMsgs.md#DIDcommMsgs.md#g' \ - -e "s#PlugIns.md#../features/PlugIns/#g" \ - tmp/docs/GettingStartedAriesDev/${FILE} > ${FOLDER}/${FILE}; # diff tmp/docs/GettingStartedAriesDev/${FILE} ${FOLDER}/${FILE} -cp tmp/docs/GettingStartedAriesDev/IndyBasics.md ${FOLDER} -cp tmp/docs/GettingStartedAriesDev/AriesBasics.md ${FOLDER} -cp tmp/docs/GettingStartedAriesDev/DecentralizedIdentityDemos.md ${FOLDER} -cp tmp/docs/GettingStartedAriesDev/AriesBigPicture.md ${FOLDER} -FILE=AriesAgentArchitecture.md; sed -e 's#../\(deploymentModel\).md#../../deploying/\1#g' \ - -e "s#/docs/assets/#../../assets/#" \ - tmp/docs/GettingStartedAriesDev/${FILE} > ${FOLDER}/${FILE}; # diff tmp/docs/GettingStartedAriesDev/${FILE} ${FOLDER}/${FILE} -cp tmp/docs/GettingStartedAriesDev/AriesMessaging.md ${FOLDER} -FILE=/AriesDeveloperDemos.md; sed -e 's#../../demo#../../demo#g' \ - -e "s#\(AriesOpenAPIDemo\).md#\1#" \ - -e "s#demo/README.md#demo/#" \ - tmp/docs/GettingStartedAriesDev/${FILE} > ${FOLDER}/${FILE}; # diff tmp/docs/GettingStartedAriesDev/${FILE} ${FOLDER}/${FILE} -cp tmp/docs/GettingStartedAriesDev/AgentConnections.md ${FOLDER} -cp tmp/docs/GettingStartedAriesDev/IssuingIndyCredentials.md ${FOLDER} -cp tmp/docs/GettingStartedAriesDev/PresentingIndyProofs.md ${FOLDER} -cp tmp/docs/GettingStartedAriesDev/YourOwnAriesAgent.md ${FOLDER} -cp tmp/docs/GettingStartedAriesDev/IndyAriesDevOptions.md ${FOLDER} -cp tmp/docs/GettingStartedAriesDev/DIDcommMsgs.md ${FOLDER} -cp tmp/docs/GettingStartedAriesDev/RoutingEncryption.md ${FOLDER} -cp tmp/docs/GettingStartedAriesDev/AriesRoutingExample.md ${FOLDER} -cp tmp/docs/GettingStartedAriesDev/ConnectIndyNetwork.md ${FOLDER} -cp tmp/docs/GettingStartedAriesDev/CredentialRevocation.md ${FOLDER} - -# Testing and Troubleshooting -FOLDER=docs/testing -mkdir ${FOLDER} -cp tmp/UnitTests.md ${FOLDER} -FILE=Logging.md ; sed -e "s#demo/demo-args.yaml#https://github.com/hyperledger/aries-cloudagent-python/tree/${VERSION}/demo/demo-args.yaml#" \ - -e "s#(\(aries_cloudagent/config/default_logging_config.ini\))#(https://github.com/hyperledger/aries-cloudagent-python/tree/${VERSION}/\1)#g" \ - -e "s#(\(aries_cloudagent/config/default_per_tenant_logging_config.ini\))#(https://github.com/hyperledger/aries-cloudagent-python/tree/${VERSION}/\1)#g" \ - -e "s#(\(aries_cloudagent/config/default_per_tenant_logging_config.yml\))#(https://github.com/hyperledger/aries-cloudagent-python/tree/${VERSION}/\1)#g" \ - tmp/${FILE} > ${FOLDER}/${FILE}; # diff tmp/${FILE} ${FOLDER}/${FILE} -FILE=AgentTracing.md ; sed -e "s#./\(EFK-stack\)#https://github.com/hyperledger/aries-cloudagent-python/tree/${VERSION}/demo/\1#" \ - tmp/demo/${FILE} > ${FOLDER}/${FILE}; # diff tmp/demo/${FILE} ${FOLDER}/${FILE} -cp tmp/demo/INTEGRATION-TESTS.md ${FOLDER} -FILE=Troubleshooting.md; sed -e "s#(demo)#(../../demo)#" \ - tmp/${FILE} > ${FOLDER}/${FILE}; # diff tmp/${FILE} ${FOLDER}/${FILE} +# Fix the links to the files not in the root +FILE=DevReadMe.md; FOLDER=docs/features; sed \ + -e 's#\.\./\.\./README.md#../release/acapy-README.md#g' \ + -e 's#\.\./\.\./#../contributing/#g' \ + tmp/${FOLDER}/${FILE} > ${FOLDER}/${FILE}; # diff tmp/${FOLDER}/${FILE} ${FOLDER}/${FILE} -# Contributing +# Contributing docs are all in the root, so they are easily found in the ACA-Py repo, so they need to be copied. FOLDER=docs/contributing mkdir ${FOLDER} cp tmp/CONTRIBUTING.md ${FOLDER} cp tmp/MAINTAINERS.md ${FOLDER} cp tmp/CODE_OF_CONDUCT.md ${FOLDER} cp tmp/SECURITY.md ${FOLDER} -FILE=PUBLISHING.md; sed -e "s#(aries_cloudagent/#(https://github.com/hyperledger/aries-cloudagent-python/tree/${VERSION}/#" \ - -e "s#(open-api/#(https://github.com/hyperledger/open-api/tree/${VERSION}/#" \ - -e "s#(pyproject.toml)#(https://github.com/hyperledger/aries-cloudagent-python/tree/${VERSION}/pyproject.toml)#" \ +FILE=PUBLISHING.md; sed \ + -e 's#docs/\(.*/.*md\)#../\1#g' \ tmp/${FILE} > ${FOLDER}/${FILE}; # diff tmp/${FILE} ${FOLDER}/${FILE} # Update all references to "main" to "${VERSION}" in Github pathes @@ -281,4 +94,4 @@ for i in $(find docs -name "*.md"); do sed "s#/tree/main/#/tree/${VERSION}/#" $i >$i.tmp sed "s#/blob/main/#/blob/${VERSION}/#" $i.tmp >$i rm $i.tmp -done \ No newline at end of file +done