diff --git a/CLAUDE.md b/CLAUDE.md index 3ed055e..706f60c 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,476 +1,61 @@ -# CLAUDE.md - ppds-demo +# PPDS Demo -**Reference implementation for Dynamics 365 / Dataverse projects.** +Reference implementation for Dynamics 365 / Dataverse projects. -**Part of the PPDS Ecosystem** - See `../CLAUDE.md` for cross-project context. +## Solution Context ---- - -## πŸ“Š Solution Context - -| Property | Value | -| ---------------- | ------------------------------------------- | -| Solution Name | `PPDSDemo` | -| Publisher Prefix | `ppds` | -| Schema Prefix | `ppds_` | -| Entity Binding | Early-bound (see `src/Entities/`) | +| Property | Value | +|----------|-------| +| Solution Name | `PPDSDemo` | +| Publisher Prefix | `ppds` | +| Schema Prefix | `ppds_` | +| Entity Binding | Early-bound (see `src/Entities/`) | | Plugin Framework | PPDS.Plugins (attribute-based registration) | ---- - -## πŸ” Dataverse Connection (User Secrets) - -The demo app uses .NET User Secrets for Dataverse credentials with **typed configuration**. - -| Property | Value | -| ------------------- | ------------------------------- | -| UserSecretsId | `ppds-dataverse-demo` | -| Config Section | `Dataverse:Environments:{Name}` | -| Default Environment | `Dataverse:DefaultEnvironment` | - -### Configuration Structure - -All environments live under `Dataverse:Environments:*`. Each environment has its own URL, connections, and uses shared pool settings: - -```json -{ - "Dataverse": { - "DefaultEnvironment": "Dev", - "Environments": { - "Dev": { - "Url": "https://dev.crm.dynamics.com", - "Connections": [ - { "Name": "Primary", "ClientId": "...", "ClientSecret": "..." } - ] - }, - "QA": { - "Url": "https://qa.crm.dynamics.com", - "Connections": [ - { "Name": "Primary", "ClientId": "...", "ClientSecret": "..." } - ] - } - }, - "Pool": { "MinPoolSize": 1, "DisableAffinityCookie": true } - } -} -``` - -### Configuration Properties - -| Property | Purpose | -| --------------------------------------------------------- | -------------------------------- | -| `Dataverse:Environments:{env}:Url` | Environment URL | -| `Dataverse:Environments:{env}:Connections:N:ClientId` | Azure AD App Registration ID | -| `Dataverse:Environments:{env}:Connections:N:ClientSecret` | Client secret (dev) or env var name containing secret (prod) | -| `Dataverse:DefaultEnvironment` | Default environment for commands | - -### Single Environment Setup - -For most work, configure just the Dev environment: - -```powershell -cd src/Console/PPDS.Dataverse.Demo -dotnet user-secrets set "Dataverse:Environments:Dev:Url" "https://dev.crm.dynamics.com" -dotnet user-secrets set "Dataverse:Environments:Dev:Connections:0:ClientId" "your-client-id" -dotnet user-secrets set "Dataverse:Environments:Dev:Connections:0:ClientSecret" "your-client-secret" -``` - -### Multi-Environment Setup (Cross-Env Migration) - -For cross-environment operations, add both environments: - -```powershell -cd src/Console/PPDS.Dataverse.Demo - -# Dev environment -dotnet user-secrets set "Dataverse:Environments:Dev:Url" "https://dev.crm.dynamics.com" -dotnet user-secrets set "Dataverse:Environments:Dev:Connections:0:ClientId" "dev-client-id" -dotnet user-secrets set "Dataverse:Environments:Dev:Connections:0:ClientSecret" "dev-secret" - -# QA environment -dotnet user-secrets set "Dataverse:Environments:QA:Url" "https://qa.crm.dynamics.com" -dotnet user-secrets set "Dataverse:Environments:QA:Connections:0:ClientId" "qa-client-id" -dotnet user-secrets set "Dataverse:Environments:QA:Connections:0:ClientSecret" "qa-secret" -``` - -### Production Configuration - -For production, use environment variables instead of direct secrets: - -```json -{ - "Dataverse": { - "Environments": { - "Prod": { - "Url": "https://prod.crm.dynamics.com", - "Connections": [ - { - "Name": "Primary", - "ClientId": "...", - "ClientSecret": "DATAVERSE_SECRET" - } - ] - } - } - } -} -``` - -The platform (Azure App Service, GitHub Actions, etc.) sets the `DATAVERSE_SECRET` environment variable. - -### Check Current Connections - -```powershell -dotnet user-secrets list --project src/Console/PPDS.Dataverse.Demo -``` - -### Command Connection Usage - -| Command | Environment | Notes | -| ----------------------- | ------------------------ | -------------------- | -| `whoami` | DefaultEnvironment (Dev) | Uses connection pool | -| `demo-features` | DefaultEnvironment (Dev) | Uses connection pool | -| `seed` | Dev | Explicit environment | -| `clean` | Dev (or `--env QA`) | Explicit environment | -| `load-geo-data` | DefaultEnvironment | Uses connection pool | -| `clean-geo-data` | DefaultEnvironment | Uses connection pool | -| `migrate-to-qa` | Dev β†’ QA | Cross-environment | -| `generate-user-mapping` | Dev β†’ QA | Cross-environment | - -### Why This Structure? - -1. **Single location** - All environments under `Dataverse:Environments:*` -2. **No duplication** - Shared pool settings apply to all environments -3. **Explicit targeting** - SDK creates pool per environment: `AddDataverseConnectionPool(config, environment: "Dev")` - ---- - -## πŸ”Œ Connection Pool Patterns (PPDS.Dataverse) - -### Always dispose pooled clients - -```csharp -// βœ… Correct - automatic return to pool -await using var client = await _pool.GetClientAsync(); -await client.CreateAsync(entity); - -// ❌ Wrong - connection leak (blocks pool) -var client = await _pool.GetClientAsync(); -await client.CreateAsync(entity); -// forgot to dispose - connection never returned -``` - -### Never store pooled clients in fields - -```csharp -// ❌ Wrong - storing client leads to stale connections -public class MyService -{ - private IPooledClient _client; // DON'T DO THIS -} - -// βœ… Correct - get per operation, dispose immediately -public async Task DoWorkAsync() -{ - await using var client = await _pool.GetClientAsync(); - await client.CreateAsync(entity); -} -``` - -### Use server-recommended parallelism - -```csharp -// βœ… Correct - query dynamically -await using var client = await _pool.GetClientAsync(); -int dop = client.RecommendedDegreesOfParallelism; - -// ❌ Wrong - hardcoded value -var options = new ParallelOptions { MaxDegreeOfParallelism = 10 }; // DON'T HARDCODE -``` - ---- - -## πŸ“¦ Bulk Operations Patterns (PPDS.Dataverse) - -### When to use bulk APIs - -| Records | API | Throughput | -| ------- | -------------------------------------------------- | ---------- | -| <10 | Single requests | ~50K/hr | -| 10+ | `CreateMultiple`/`UpdateMultiple`/`UpsertMultiple` | ~10M/hr | - -### UpsertMultiple alternate key pitfall - -```csharp -// βœ… Correct - key in KeyAttributes ONLY -entity.KeyAttributes["ppds_code"] = "12345"; -entity["ppds_name"] = "Value"; - -// ❌ Wrong - causes "item with same key already added" error -entity.KeyAttributes["ppds_code"] = "12345"; -entity["ppds_code"] = "12345"; // DO NOT set key in Attributes too -``` - -### Handle BulkOperationResult errors - -```csharp -var result = await _bulk.UpsertMultipleAsync("ppds_entity", entities); - -// Errors are returned, not thrown -if (!result.IsSuccess) -{ - foreach (var error in result.Errors) - { - _logger.LogError("Record {Index}: [{Code}] {Message}", - error.Index, error.ErrorCode, error.Message); - } -} - -// Available properties: SuccessCount, FailureCount, Duration, CreatedCount, UpdatedCount -``` - ---- - -## πŸ”§ Plugin Registration (PPDS.Plugins) - -### PluginStepAttribute - -```csharp -[PluginStep( - Message = "Create", // Required: Create, Update, Delete, etc. - EntityLogicalName = "account", // Required: logical name - Stage = PluginStage.PreOperation, // 10=PreValidation, 20=PreOperation, 40=PostOperation - Mode = PluginMode.Synchronous, // Synchronous (default) or Asynchronous - FilteringAttributes = "name,phone")] // Optional: comma-separated (Update only) -public class AccountCreatePlugin : PluginBase { } -``` - -### PluginImageAttribute - -```csharp -[PluginImage( - ImageType = PluginImageType.PreImage, // PreImage, PostImage, or Both - Name = "PreImage", // Access via context.PreEntityImages["PreImage"] - Attributes = "name,telephone1")] // Comma-separated columns to capture -public class AccountUpdatePlugin : PluginBase { } -``` - ---- - -## 🚫 NEVER - -| Rule | Why | -| ---------------------------------------------------------- | ----------------------------------------------------------- | -| **Modify files outside `demo/`** | This repo only; sdk/, tools/, extension/ are separate repos | -| `Console.WriteLine` in plugins | Sandbox blocks it; use `ITracingService` | -| Hardcoded GUIDs | Breaks across environments; use config or queries | -| `Xrm.Page` in JavaScript | Deprecated since v9; use `formContext` | -| `alert()` in web resources | Blocked in UCI; use `Xrm.Navigation.openAlertDialog` | -| Static state in plugins | Sandbox recycles instances; state is lost | -| External assemblies in plugins | Sandbox whitelist only; ILMerge if needed | -| Separate Managed/Unmanaged folders | Use `--packagetype Both` for unified source | -| PR directly to main | Always target `develop` first | -| Squash merge developβ†’main | Use regular merge to preserve feature commits | -| Sync plugins in Pre-Create | Entity doesn't exist yet; use Post-Create | -| Create `ServiceClient` per request | 42,000x slower than pool; use `IDataverseConnectionPool` | -| Store `IPooledClient` in fields | Get per operation; dispose immediately with `await using` | -| Hardcode parallelism values | Query `RecommendedDegreesOfParallelism` dynamically | -| Set alternate key in both `KeyAttributes` AND `Attributes` | Causes "duplicate key" error on upsert | - -> **Cross-Repo Changes:** If a fix requires changes to `sdk/`, `tools/`, `extension/`, or `alm/`, -> describe the proposed change and get approval first. Do NOT edit files in other repositories. - ---- - -## βœ… ALWAYS - -| Rule | Why | -| ------------------------------------------------ | -------------------------------------------------- | -| `ITracingService` for debugging | Only way to get runtime output in sandbox | -| try/catch with `InvalidPluginExecutionException` | Platform requires this type for user-facing errors | -| Check `InputParameters.Contains("Target")` | Not all messages have Target; prevents null ref | -| `formContext` from execution context | Required pattern since Xrm.Page deprecation | -| Namespace pattern in JS (`PPDSDemo.Account`) | Avoids global pollution and naming conflicts | -| Early-bound entities for type safety | Compile-time checking prevents runtime errors | -| Deployment settings per environment | Environment-specific connection refs and variables | -| Conventional commits | `feat:`, `fix:`, `chore:` for clear history | -| Dispose pooled clients with `await using` | Returns connection to pool; prevents leaks | -| Use bulk APIs for 10+ records | 200x throughput vs single requests | -| Check `BulkOperationResult.IsSuccess` | Errors are returned in result, not thrown | -| Inherit plugins from `PluginBase` | Standardized error handling and tracing | -| Use `PluginStepAttribute` for registration | Declarative; tooling reads metadata from assembly | - ---- - -## πŸ’» Error Handling Pattern - -```csharp -// βœ… Correct - Full error handling with tracing -public void Execute(IServiceProvider serviceProvider) -{ - var tracingService = (ITracingService)serviceProvider.GetService(typeof(ITracingService)); - var context = (IPluginExecutionContext)serviceProvider.GetService(typeof(IPluginExecutionContext)); - - try - { - tracingService.Trace("Plugin started: {0}", context.MessageName); - // Plugin logic here - tracingService.Trace("Plugin completed successfully"); - } - catch (InvalidPluginExecutionException) - { - throw; // Re-throw business exceptions as-is - } - catch (Exception ex) - { - tracingService.Trace("Error: {0}", ex.ToString()); - throw new InvalidPluginExecutionException( - $"An error occurred. Contact support with timestamp: {DateTime.UtcNow:O}", ex); - } -} - -// ❌ Wrong - No tracing, swallows exceptions -public void Execute(IServiceProvider serviceProvider) -{ - try - { - // Plugin logic - } - catch (Exception ex) - { - Console.WriteLine(ex.Message); // Blocked in sandbox! - } -} -``` - ---- - -## 🎯 When to Use What - -| Scenario | Use | Why | -| ----------------------------- | --------------------------------- | -------------------------------------- | -| Sync validation/modification | **Plugin (Pre-Operation)** | Runs in transaction, can modify/cancel | -| Post-save automation | **Plugin (Post-Operation Async)** | Non-blocking, retries on failure | -| User-triggered automation | **Power Automate** | Easier to modify, visible to makers | -| Long-running process (>2 min) | **Azure Function** | No platform timeout limits | -| External system integration | **Custom API + Azure** | Clean contract, scalable | -| Simple field calculations | **Calculated/Rollup columns** | Zero code, platform-managed | - ---- - -## πŸ“› Naming Conventions - -Dataverse uses two name formats for schema objects: - -- **Logical Name**: Always lowercase (`ppds_customerrecord`) -- **Schema Name**: PascalCase with lowercase prefix (`ppds_CustomerRecord`) - -| Component | Logical Name | Schema Name | -| ----------- | ----------------- | ----------------- | -| Tables | `ppds_demorecord` | `ppds_DemoRecord` | -| Columns | `ppds_firstname` | `ppds_FirstName` | -| Option Sets | `ppds_status` | `ppds_Status` | - -| Component | Convention | Example | -| -------------- | ------------------------------------- | ----------------------------------- | -| Web Resources | `prefix_/path/name.ext` | `ppds_/scripts/account.form.js` | -| Plugin Classes | `{Entity}{Message}Plugin` | `AccountCreatePlugin` | -| Plugin Steps | `{Entity}: {Message} - {Description}` | `account: Create - Validate tax ID` | - -**In code:** Use logical names for API calls (`account`, `ppds_firstname`). Early-bound classes use Schema Names for properties. - ---- - -## πŸ“ Solution Structure - -``` -solutions/PPDSDemo/ -β”œβ”€β”€ PPDSDemo.cdsproj # Debug=Unmanaged, Release=Managed -β”œβ”€β”€ config/ -β”‚ β”œβ”€β”€ qa.deploymentsettings.json -β”‚ └── prod.deploymentsettings.json -└── src/ # Flat structure (packagetype Both) - β”œβ”€β”€ Other/Solution.xml - β”œβ”€β”€ Entities/ - β”œβ”€β”€ OptionSets/ - └── WebResources/ - -src/ -β”œβ”€β”€ Plugins/PPDSDemo.Plugins/ # Plugin assemblies -β”œβ”€β”€ Entities/PPDSDemo.Entities/ # Early-bound classes -└── WebResources/ # TypeScript source -``` - ---- - -## πŸ› οΈ Common Commands - -```bash -# Build plugins -dotnet build src/Plugins/PPDSDemo.Plugins/PPDSDemo.Plugins.csproj -c Release - -# Build solution (managed for deployment) -dotnet build solutions/PPDSDemo/PPDSDemo.cdsproj -c Release - -# Export and unpack from environment -pac solution export --name PPDSDemo --path exports --managed false --overwrite -pac solution export --name PPDSDemo --path exports --managed true --overwrite -pac solution unpack --zipfile exports/PPDSDemo.zip --folder solutions/PPDSDemo/src --packagetype Both --allowDelete --allowWrite - -# Regenerate early-bound entities -pac modelbuilder build --outdirectory src/Entities/PPDSDemo.Entities - -# Deploy plugins -.\tools\Deploy-Plugins.ps1 -Environment Dev -``` - ---- - -## πŸ”€ Git Workflow - -| Flow | Merge Strategy | Why | -| ----------------------- | -------------- | --------------------------------------- | -| `feature/*` β†’ `develop` | Squash | Clean history, one commit per feature | -| `develop` β†’ `main` | Regular merge | Preserve features, clear release points | -| `hotfix/*` β†’ `main` | Regular merge | Then cherry-pick to develop | - -**Automated CI/CD:** Exports from Dev commit to `develop` automatically. Merges to `develop` deploy to QA. Merges to `main` deploy to Prod. +## NEVER ---- +- Store `IPooledClient` in fields - get per operation, dispose with `await using` +- Hardcode parallelism values - query `RecommendedDegreesOfParallelism` dynamically +- Use `Console.WriteLine` in plugins - sandbox blocks it; use `ITracingService` +- Run sync plugins in Pre-Create - entity doesn't exist yet; use Post-Create +- Set alternate key in both `KeyAttributes` AND `Attributes` - causes duplicate key error -## πŸ“š Reference Documentation +## ALWAYS -### Strategy (Why) +- Use `ITracingService` for debugging - only way to get runtime output in sandbox +- Wrap plugin exceptions in `InvalidPluginExecutionException` - platform requirement +- Dispose pooled clients with `await using` - returns connection to pool +- Use bulk APIs for 10+ records - 200x throughput vs single requests +- Use early-bound entities for type safety - compile-time checking prevents runtime errors -- [ALM_OVERVIEW.md](docs/strategy/ALM_OVERVIEW.md) - ALM philosophy and approach -- [BRANCHING_STRATEGY.md](docs/strategy/BRANCHING_STRATEGY.md) - Git workflow details -- [ENVIRONMENT_STRATEGY.md](docs/strategy/ENVIRONMENT_STRATEGY.md) - Dev/QA/Prod setup -- [PIPELINE_STRATEGY.md](docs/strategy/PIPELINE_STRATEGY.md) - CI/CD design +## Configuration -### Reference (How) +See `README.md#configuration` for User Secrets setup (single and multi-environment). -- [PLUGIN_COMPONENTS_REFERENCE.md](docs/reference/PLUGIN_COMPONENTS_REFERENCE.md) - Plugin patterns -- [WEBRESOURCE_PATTERNS.md](docs/reference/WEBRESOURCE_PATTERNS.md) - JavaScript/TypeScript -- [SOLUTION_STRUCTURE_REFERENCE.md](docs/reference/SOLUTION_STRUCTURE_REFERENCE.md) - Solution packaging -- [PAC_CLI_REFERENCE.md](docs/reference/PAC_CLI_REFERENCE.md) - PAC CLI commands -- [TESTING_PATTERNS.md](docs/reference/TESTING_PATTERNS.md) - Unit testing with FakeXrmEasy +## Key Files -### Guides (Step-by-Step) +- `src/Plugins/` - Plugin implementations with `PluginStepAttribute` +- `src/Services/` - Connection pool and bulk operation patterns +- `docs/reference/PLUGIN_COMPONENTS_REFERENCE.md` - Plugin patterns and error handling +- `solutions/PPDSDemo/` - Solution with deployment settings -- [GETTING_STARTED_GUIDE.md](docs/guides/GETTING_STARTED_GUIDE.md) - Initial setup -- [ENVIRONMENT_SETUP_GUIDE.md](docs/guides/ENVIRONMENT_SETUP_GUIDE.md) - Environment configuration -- [BRANCH_PROTECTION_GUIDE.md](docs/guides/BRANCH_PROTECTION_GUIDE.md) - GitHub rulesets -- [PLUGIN_REMOVAL_GUIDE.md](docs/guides/PLUGIN_REMOVAL_GUIDE.md) - Removing plugin steps +## Commands ---- +| Command | Purpose | +|---------|---------| +| `dotnet build src/Plugins -c Release` | Build plugins | +| `pac solution export --name PPDSDemo` | Export solution | +| `pac modelbuilder build` | Regenerate early-bound entities | -## βš–οΈ Decision Presentation +## Git Workflow -When presenting choices or asking questions: +| Flow | Strategy | +|------|----------| +| `feature/*` β†’ `develop` | Squash | +| `develop` β†’ `main` | Regular merge | +| `hotfix/*` β†’ `main` | Regular merge, cherry-pick to develop | -1. **Lead with your recommendation** and rationale -2. **List alternatives considered** and why they're not preferred -3. **Ask for confirmation**, not open-ended input +## See Also -❌ "What testing approach should we use?" -βœ… "I recommend X because Y. Alternatives considered: A (rejected because B), C (rejected because D). Do you agree?" +- `docs/guides/` - Setup and configuration guides +- `docs/reference/` - Plugin, web resource, and solution patterns diff --git a/docs/strategy/ALM_OVERVIEW.md b/docs/strategy/ALM_OVERVIEW.md deleted file mode 100644 index c8d2f97..0000000 --- a/docs/strategy/ALM_OVERVIEW.md +++ /dev/null @@ -1,147 +0,0 @@ -# ALM Overview - -This document describes the Application Lifecycle Management (ALM) philosophy and principles used in this reference architecture. - ---- - -## What This Architecture Provides - -A practical, enterprise-ready ALM implementation for Power Platform that: - -- Uses **source control as the source of truth** (Git/GitHub) -- Implements **CI/CD with PAC CLI** (portable to Azure DevOps) -- Supports **develop/main branching** for controlled releases -- Deploys **managed solutions** to non-development environments -- Provides **modular, extensible pipelines** for multi-solution scenarios - ---- - -## Core Principles - -### 1. Source Control is the Source of Truth - -All solution changes are exported to Git and committed. If an environment is lost or corrupted, it can be rebuilt from source control. - -**Implications:** -- Solutions are exported from Dev and unpacked to source control -- A single unified source is stored (using `--packagetype Both`) -- Same source builds both managed (Release) and unmanaged (Debug) -- Deployments to QA/Prod are built from source control, not exported from Dev - -### 2. Environments Have Specific Purposes - -Each environment serves a distinct role in the development lifecycle. - -| Environment | Purpose | Solution Type | -|-------------|---------|---------------| -| **Dev** | Development and maker work | Unmanaged | -| **QA** | Testing and validation | Managed | -| **Prod** | Production use | Managed | - -### 3. Managed Solutions for Deployment - -Non-development environments receive managed solutions only. This ensures: - -- Clean uninstall capability -- Proper solution layering -- Prevention of ad-hoc changes in QA/Prod - -### 4. Branches Map to Environments - -Git branches correspond to environment states: - -| Branch | Deploys To | Purpose | -|--------|------------|---------| -| `develop` | QA | Integration and testing | -| `main` | Prod | Production releases | - -### 5. Automation Over Manual Steps - -Deployments are automated through CI/CD pipelines. Manual solution imports are avoided except in development environments. - ---- - -## Architecture Diagram - -```mermaid -graph TB - subgraph "Development" - DEV[Dev Environment] - MAKER[Makers/Developers] - end - - subgraph "Source Control" - DEVELOP[develop branch] - MAIN[main branch] - end - - subgraph "Testing" - QA[QA Environment] - end - - subgraph "Production" - PROD[Prod Environment] - end - - MAKER -->|work in| DEV - DEV -->|export & commit| DEVELOP - DEVELOP -->|auto-deploy| QA - DEVELOP -->|PR & merge| MAIN - MAIN -->|auto-deploy| PROD -``` - ---- - -## What We Chose NOT to Use - -### Power Platform Pipelines (Native) - -We use PAC CLI instead of Power Platform's built-in Pipelines because: - -- **Source control integration** - Pipelines don't commit to Git -- **Portability** - PAC CLI works in GitHub Actions and Azure DevOps -- **Extensibility** - Custom validation, multi-solution orchestration -- **Transparency** - Full visibility into deployment process - -Power Platform Pipelines are excellent for citizen developer scenarios but lack the source control integration needed for enterprise ALM. - -### ALM Accelerator - -We implement similar patterns without the ALM Accelerator dependency because: - -- **Simplicity** - Fewer moving parts to understand and maintain -- **Transparency** - Direct PAC CLI commands are easier to debug -- **Flexibility** - Easier to customize for specific needs - -Organizations with existing ALM Accelerator deployments can continue using them. This architecture demonstrates the underlying patterns. - ---- - -## Scaling Guidance - -### When to Add Environments - -| Scenario | Recommendation | -|----------|----------------| -| Single team, simple solution | Dev β†’ QA β†’ Prod (3 environments) | -| Multiple teams, shared components | Add integration/validation environment | -| Regulated industry | Add UAT environment for formal sign-off | -| Large enterprise | Consider per-team Dev environments | - -### When to Add Branches - -| Scenario | Recommendation | -|----------|----------------| -| Simple continuous delivery | develop + main only | -| Formal release cycles | Add release branches | -| Multiple parallel features | Add feature branches | -| Emergency fixes | Add hotfix branch pattern | - ---- - -## πŸ”— See Also - -- [ENVIRONMENT_STRATEGY.md](ENVIRONMENT_STRATEGY.md) - Detailed environment configuration -- [BRANCHING_STRATEGY.md](BRANCHING_STRATEGY.md) - Git workflow details -- [PIPELINE_STRATEGY.md](PIPELINE_STRATEGY.md) - CI/CD implementation details -- [Microsoft ALM Documentation](https://learn.microsoft.com/en-us/power-platform/alm/) - Official guidance diff --git a/docs/strategy/BRANCHING_STRATEGY.md b/docs/strategy/BRANCHING_STRATEGY.md deleted file mode 100644 index b0da75b..0000000 --- a/docs/strategy/BRANCHING_STRATEGY.md +++ /dev/null @@ -1,491 +0,0 @@ -# Branching Strategy - -This document defines our Git branching model and workflow for Power Platform solution development. - ---- - -## Branch Overview - -We use a simplified GitFlow model with two primary branches. - -| Branch | Purpose | Protected | Deploys To | -|--------|---------|-----------|------------| -| `main` | Production-ready code | Yes | Prod | -| `develop` | Integration branch | Yes | QA | -| `feature/*` | Feature development | No | - | -| `fix/*` | Bug fixes (normal priority) | No | - | -| `hotfix/*` | Emergency fixes | No | - | - ---- - -## Branch Policy Enforcement - -PRs to `main` are restricted to ensure release integrity. This is enforced automatically by the PR validation workflow. - -| Source Branch | Allowed Target | Use Case | -|---------------|----------------|----------| -| `feature/*` | `develop` only | New functionality | -| `fix/*` | `develop` only | Bug fixes (normal priority) | -| `hotfix/*` | `main` | Emergency production fixes (then cherry-pick to develop) | -| `develop` | `main` | Release merges | - -**Automated Enforcement:** - -The `pr-validate.yml` workflow checks branch policy on every PR: -- PRs to `main` from unauthorized branches (e.g., `feature/*`, `fix/*`) will **fail** with a clear error message -- PRs to `develop` are allowed from any branch -- Ensures only tested code reaches production via the proper flow - -If you attempt to create a PR from `feature/my-change` to `main`, you'll see: - -``` -::error::PRs to main must come from 'develop' or 'hotfix/*' branches -::error::Source branch 'feature/my-change' is not allowed to target main. -::error::Please target 'develop' instead, or rename to 'hotfix/*' for emergency fixes. -``` - ---- - -## Branch Flow - -```mermaid -graph LR - subgraph "Feature Development" - F1[feature/add-validation] - F2[feature/new-entity] - end - - subgraph "Integration" - DEV[develop] - end - - subgraph "Production" - MAIN[main] - end - - F1 -->|PR| DEV - F2 -->|PR| DEV - DEV -->|PR| MAIN - - DEV -.->|auto-deploy| QA[QA Env] - MAIN -.->|auto-deploy| PROD[Prod Env] -``` - ---- - -## Branch Details - -### `main` Branch - -**Purpose:** Represents production-ready code. Every commit to `main` should be deployable to production. - -**Rules:** -- Protected branch (no direct commits) -- Requires pull request from `develop` -- Requires at least one approval -- All CI checks must pass - -**Deployment:** Pushes to `main` trigger deployment to Production environment. - ---- - -### `develop` Branch - -**Purpose:** Integration branch where features are combined and tested before release. - -**Rules:** -- Protected branch -- Receives automated exports from Dev environment (nightly) -- Receives pull requests from feature branches -- Can receive direct commits from automated export pipeline - -**Deployment:** Pushes to `develop` trigger deployment to QA environment. - ---- - -### `feature/*` and `fix/*` Branches - -**Purpose:** Isolated development of specific features or bug fixes. - -**Naming:** -- `feature/{short-description}` - New functionality -- `fix/{short-description}` - Bug fixes (normal priority) - -**Examples:** -``` -feature/add-account-validation -feature/new-contact-form -fix/workflow-error-handling -fix/form-validation-bug -``` - -**Workflow:** -1. Create from `develop` -2. Make changes in Dev environment -3. Export and commit to feature/fix branch -4. Create PR to `develop` (PRs to `main` will be rejected) -5. Delete after merge - ---- - -### `hotfix/*` Branches - -**Purpose:** Emergency fixes that need to go directly to production. - -**Naming:** `hotfix/{issue-description}` - -**Examples:** -``` -hotfix/fix-critical-workflow -hotfix/security-patch -``` - -**Workflow:** -1. Create from `main` -2. Make minimal fix -3. PR to `main` (for immediate production deployment) -4. Cherry-pick or merge back to `develop` -5. Delete after merge - ---- - -## Daily Workflow - -### Automated Export (Nightly) - -```mermaid -sequenceDiagram - participant Dev as Dev Env - participant GH as GitHub Actions - participant Develop as develop branch - participant QA as QA Env - - Note over Dev,QA: Nightly at 2 AM UTC - GH->>Dev: Export solution - GH->>Develop: Commit changes - GH->>QA: Deploy managed solution -``` - -### Feature Development - -```mermaid -sequenceDiagram - participant Maker as Maker - participant Dev as Dev Env - participant Feature as feature/* branch - participant Develop as develop branch - - Maker->>Dev: Make changes - Maker->>Feature: Export & commit - Maker->>Develop: Create PR - Note over Develop: Review & merge - Develop->>Feature: Delete branch -``` - -### Production Release - -```mermaid -sequenceDiagram - participant QA as QA Team - participant Develop as develop branch - participant Main as main branch - participant Prod as Prod Env - - QA->>QA: Validate in QA environment - QA->>Main: Create PR from develop - Note over Main: Review & approve - Main->>Prod: Auto-deploy to production -``` - ---- - -## Pull Request Requirements - -### PR to `develop` - -| Requirement | Required? | -|-------------|-----------| -| CI pipeline passes | Yes | -| At least 1 approval | Recommended | -| No merge conflicts | Yes | -| Linked work item | Optional | - -### PR to `main` - -| Requirement | Required? | -|-------------|-----------| -| CI pipeline passes | Yes | -| At least 1 approval | Yes | -| QA sign-off | Yes | -| No merge conflicts | Yes | -| All conversations resolved | Yes | - ---- - -## Merge Strategy - -We use different merge strategies for different branch flows to optimize history clarity. - -### Squash Merge: Feature β†’ Develop - -**Use squash merge** when merging feature branches into `develop`. - -``` -feature/add-validation (12 commits) β†’ develop (1 squashed commit) -``` - -**Why squash:** -| Reason | Explanation | -|--------|-------------| -| Clean history | Feature branches have noisy commits ("WIP", "fix typo", "try again") | -| Atomic features | Each feature = one commit, easy to identify and revert | -| Power Platform | Solution exports create many small commits; squashing cleans this up | -| PR preserves detail | Granular commits still visible in closed PR if needed | - -**GitHub setting:** Repository Settings β†’ Pull Requests β†’ Allow squash merging βœ“ - ---- - -### Regular Merge: Develop β†’ Main - -**Use regular merge** (merge commit) when merging `develop` into `main`. - -``` -develop β†’ main (merge commit preserves all feature commits) -``` - -**Why regular merge:** -| Reason | Explanation | -|--------|-------------| -| Preserves features | Each squashed feature commit flows through to main | -| Release boundaries | Merge commit marks exactly when a release happened | -| Traceability | "Prod broke" β†’ Which release? β†’ Which feature? β†’ Easy to trace | -| Selective revert | Can revert one feature without reverting entire release | - -**GitHub setting:** Repository Settings β†’ Pull Requests β†’ Allow merge commits βœ“ - ---- - -### Why NOT Squash Both Ways? - -If you squash `develop` β†’ `main`: - -``` -❌ BAD: Squash develop to main -main: -β”œβ”€β”€ Release 5 (one giant commit with 10 features mixed together) -β”œβ”€β”€ Release 4 (one giant commit with 8 features) -└── Release 3 (one giant commit) - -Problems: -- "Which feature broke prod?" - Can't tell, all mixed together -- "Revert just account validation" - Can't, it's mixed with other features -- Loss of audit trail -``` - -``` -βœ… GOOD: Regular merge develop to main -main: -β”œβ”€β”€ Merge develop β†’ main (Release 5) -β”‚ β”œβ”€β”€ feat: add account validation -β”‚ β”œβ”€β”€ feat: new contact form -β”‚ └── fix: workflow error -β”œβ”€β”€ Merge develop β†’ main (Release 4) -β”‚ β”œβ”€β”€ feat: dashboard updates -β”‚ └── feat: reporting changes - -Benefits: -- Clear release boundaries (merge commits) -- Feature-level granularity preserved -- Can revert specific features OR entire releases -``` - ---- - -## Branch Rulesets - -We use **GitHub Rulesets** (not legacy branch protection) to enforce different merge strategies per branch. This is critical for our workflow: - -- **Squash merge only** for PRs to `develop` (clean feature commits) -- **Merge commit only** for PRs to `main` (preserve feature history) - -Ruleset definitions are stored in `.github/rulesets/` for reference. - -### `main` Branch Ruleset - -| Rule | Setting | Reason | -|------|---------|--------| -| Require PR | Yes | No direct commits to production | -| Required approvals | 1 | Human review before production | -| Dismiss stale reviews | Yes | Re-review after new commits | -| Require last push approval | Yes | Final review after any changes | -| Require conversation resolution | Yes | All feedback must be addressed | -| Status checks (strict) | Yes | Branch must be up-to-date | -| Required checks | `Validation Status` | PR validation workflow | -| Allowed merge methods | **Merge commit only** | Preserve feature commits on main | -| Branch deletion | Blocked | Prevent accidental deletion | -| Force pushes | Blocked | Protect history | - -**Key:** `allowed_merge_methods: ["merge"]` - Squash is NOT allowed on main. - -### `develop` Branch Ruleset - -| Rule | Setting | Reason | -|------|---------|--------| -| Require PR | Yes | Feature branches merge via PR | -| Required approvals | 1 | Code review | -| Dismiss stale reviews | Yes | Re-review after changes | -| Require conversation resolution | Yes | All feedback must be addressed | -| Status checks (strict) | No | Nightly exports would conflict | -| Required checks | `Validation Status` | PR validation workflow | -| Allowed merge methods | **Squash only** | Clean feature history | -| Branch deletion | Blocked | Prevent accidental deletion | -| Force pushes | Blocked | Protect history | - -**Key:** `allowed_merge_methods: ["squash"]` - Merge commits NOT allowed on develop. - -> **Note:** Required approvals is set to 1 (not 0) to ensure code review even for the integration branch. - -### Repository Merge Settings - -Repository-level settings (Settings β†’ Pull Requests) enable both methods: -- βœ… Allow merge commits (for main) -- βœ… Allow squash merging (for develop) -- ❌ Allow rebase merging (disabled) - -**Squash commit formatting:** When squash merging to `develop`, commits use: -- **Title:** PR title (clean, descriptive feature name) -- **Message:** PR body (contains context, linked issues, etc.) - -This ensures squashed commits are meaningful and traceable back to their PR. - -The **rulesets** control which method is available for each target branch. - -### Applying Rulesets - -**Recommended:** Use the PowerShell script for idempotent setup (handles both create and update): - -```powershell -# Configure all rulesets and merge settings -.\tools\Setup-BranchProtection.ps1 - -# Preview changes without applying -.\tools\Setup-BranchProtection.ps1 -WhatIf -``` - -**Manual API (initial creation only):** - -```bash -# These POST commands only work for NEW rulesets (fail if already exists) -gh api repos/OWNER/REPO/rulesets -X POST --input .github/rulesets/develop.json -gh api repos/OWNER/REPO/rulesets -X POST --input .github/rulesets/main.json -``` - -See `.github/rulesets/` for the complete ruleset definitions. - -### Automation Bypass for CI/CD - -The nightly export workflow commits directly to `develop`, bypassing branch protection. This is **intentional and correct** for the ALM pattern. - -#### Why Automation Bypasses Branch Protection - -| Concern | Explanation | -|---------|-------------| -| "Shouldn't all changes require PR?" | No. PRs are for **human-authored changes**. Automated exports are operational, not developmental. | -| "What about review?" | QA environment IS the review. Blocking before QA adds delay without adding validation. | -| "What if bad changes export?" | If someone shouldn't change Dev, fix permissions. Don't slow the feedback loop for everyone. | - -#### Where Gates Should Be - -``` -Dev β†’ develop β†’ QA (automated, fast feedback) -QA β†’ main β†’ Prod (gated, human approval required) -``` - -The human gate belongs at **QA β†’ Prod**, not at **Dev β†’ QA**. QA is where you validate changes through testing, not through XML diff review. - -#### Configuration by Repository Type - -**Organization Repositories (Enterprise):** - -Add GitHub Actions to the ruleset bypass list: - -1. Repository Settings β†’ Rules β†’ Rulesets β†’ "Develop Branch Rules" -2. Under "Bypass list", add "GitHub Actions" -3. Save - -**Personal Repositories:** - -Use a Personal Access Token (PAT): - -1. Create fine-grained PAT with `contents: write` for the repo -2. Store as `AUTOMATION_TOKEN` repository secret -3. Workflow uses PAT for checkout: `token: ${{ secrets.AUTOMATION_TOKEN }}` - -This is the standard pattern when automation needs to bypass branch protection. - -#### What This Enables - -- **Nightly exports** commit directly to `develop` -- **QA deployment** triggers automatically on push -- **Fast feedback** - issues discovered within 24 hours -- **Human PRs** (feature branches) still require approval via `GITHUB_TOKEN` - ---- - -## Commit Message Convention - -Follow conventional commits for clear history: - -``` -: - -[optional body] - -[optional footer] -``` - -**Types:** -| Type | Description | -|------|-------------| -| `feat` | New feature or component | -| `fix` | Bug fix | -| `docs` | Documentation changes | -| `chore` | Maintenance, dependencies | -| `refactor` | Code restructuring | - -**Examples:** -``` -feat: add account validation plugin -fix: correct status transition in workflow -docs: update deployment guide -chore: sync solution from Dev environment -``` - ---- - -## When to Deviate - -### Add Release Branches When: -- You need to maintain multiple production versions -- Formal release cycles require stabilization periods -- Hotfixes need isolation from ongoing development - -### Add Environment-Specific Branches When: -- Multiple long-lived environments need different configurations -- UAT requires extended testing periods -- Regulatory requirements mandate branch-per-environment - -### Skip Feature Branches When: -- Solo developer working on simple changes -- Automated exports are the only commits -- Changes are trivial (typos, config adjustments) - ---- - -## πŸ”— See Also - -- [ALM_OVERVIEW.md](ALM_OVERVIEW.md) - High-level ALM philosophy -- [ENVIRONMENT_STRATEGY.md](ENVIRONMENT_STRATEGY.md) - Environment configuration -- [PIPELINE_STRATEGY.md](PIPELINE_STRATEGY.md) - CI/CD implementation -- [Atlassian GitFlow Guide](https://www.atlassian.com/git/tutorials/comparing-workflows/gitflow-workflow) - GitFlow reference diff --git a/docs/strategy/CROSS_TENANT_AZURE_INTEGRATION.md b/docs/strategy/CROSS_TENANT_AZURE_INTEGRATION.md deleted file mode 100644 index 1143dc9..0000000 --- a/docs/strategy/CROSS_TENANT_AZURE_INTEGRATION.md +++ /dev/null @@ -1,302 +0,0 @@ -# Cross-Tenant Azure Integration Strategy - -**Guidance for integrating Power Platform (Developer Subscription) with Azure resources in a separate tenant (e.g., MSDN Subscription).** - ---- - -## Overview - -This document addresses a common scenario: using Azure resources from a different Azure AD tenant than where your Power Platform environment exists. This is typical when: - -- Power Platform is on a Developer/Trial subscription (no Azure) -- Azure resources are in an MSDN/Visual Studio subscription (different tenant) -- Production has separate tenants for organizational reasons - ---- - -## Quick Reference - -| Question | Answer | -|----------|--------| -| Can you use Azure from a different tenant? | **Yes**, but authentication setup matters | -| Do managed identities work cross-tenant? | **No**, they're tenant-bound | -| Can app registrations work cross-tenant? | **Yes**, via multi-tenant apps or B2B | - ---- - -## Authentication Methods Comparison - -| Auth Method | Cross-Tenant? | Complexity | Security | Best For | -|-------------|---------------|------------|----------|----------| -| Webhook Key (shared secret) | βœ… Yes | Low | Medium | Demo/Dev, internal systems | -| API Key in header | βœ… Yes | Low | Medium | Simple integrations | -| SAS Token (Service Bus) | βœ… Yes | Low | Medium | Queue-based patterns | -| Multi-tenant AAD App | βœ… Yes | Medium | High | Production, enterprise | -| Managed Identity | ❌ No | N/A | N/A | Same-tenant only | - ---- - -## Architecture Patterns - -### Pattern 1: Webhook Key Authentication (Simplest) - -Best for demos, development, and internal trusted systems. - -```mermaid -flowchart LR - subgraph PP["Power Platform Tenant"] - SE["Dataverse
Service Endpoint
(WebhookKey auth)"] - end - - subgraph AZ["Azure Tenant (MSDN)"] - AF["Azure Function
+ Function Key auth"] - end - - SE -->|"HTTP + x-functions-key"| AF -``` - -**Setup Steps:** - -1. Create Azure Function in MSDN subscription -2. Get the function's key (host key or function key) -3. Create Service Endpoint in Dataverse with `WebhookKey` authentication type -4. Function validates `x-functions-key` header automatically - -**Pros:** -- Simple to implement -- No AAD configuration required -- Works immediately cross-tenant - -**Cons:** -- Shared secret management -- No identity context passed -- Less auditable - ---- - -### Pattern 2: Multi-Tenant App Registration (Production-Ready) - -Best for production scenarios requiring proper identity and audit trails. - -```mermaid -flowchart LR - subgraph PP["Power Platform Tenant (Dev Sub)"] - SE2["Dataverse
Service Endpoint
(OAuth auth)"] - SP["Service Principal
(Created via admin consent)"] - end - - subgraph AZ2["Azure Tenant (MSDN Sub)"] - AR["App Registration
(Multi-tenant enabled)"] - AF2["Azure Function
(validates AAD token)"] - AR --> AF2 - end - - SE2 -->|"OAuth token"| AR - SP -.->|"represents"| AR -``` - -**Setup Steps:** - -1. **In Azure Tenant (MSDN):** - - Create App Registration - - Set "Supported account types" to **"Accounts in any organizational directory (Multi-tenant)"** - - Create a client secret or certificate - - Note the Application (client) ID and Directory (tenant) ID - -2. **In Power Platform Tenant:** - - Admin consent to the multi-tenant app (creates a service principal) - - Or use the app directly if configuring Service Endpoint with OAuth - -3. **Configure Dataverse Service Endpoint:** - - Use OAuth authentication type - - Provide the Application ID and secret - -4. **Configure Azure Function:** - - Enable AAD authentication - - Validate tokens from the expected application - -**Pros:** -- Proper identity-based authentication -- Full audit trail -- Token-based (short-lived credentials) -- Enterprise-grade security - -**Cons:** -- More complex setup -- Requires admin consent in Power Platform tenant -- Secret/certificate rotation needed - ---- - -### Pattern 3: Service Bus with SAS Token - -Best for asynchronous, queue-based messaging patterns. - -```mermaid -flowchart LR - subgraph PP3["Power Platform Tenant"] - SE3["Dataverse
Service Endpoint
(SAS Key auth)"] - end - - subgraph AZ3["Azure Tenant (MSDN)"] - SB["Azure Service Bus
Queue/Topic"] - AF3["Azure Function
(Service Bus trigger)"] - SB --> AF3 - end - - SE3 -->|"SAS Token"| SB -``` - -> ⚠️ **Session-Enabled Queues Not Supported:** Dataverse Service Endpoints do NOT set a `SessionId` on messages. If you enable sessions on a Service Bus queue (for FIFO ordering), messages from Dataverse will be rejected. Use standard (non-session) queues, or relay through an Azure Function that adds the SessionId before forwarding to a session-enabled queue. - -**Setup Steps:** - -1. Create Azure Service Bus namespace in MSDN subscription -2. Create a queue or topic -3. Generate a SAS policy with Send permissions -4. Create Service Endpoint in Dataverse pointing to Service Bus -5. Create Azure Function with Service Bus trigger - -**Pros:** -- Decoupled architecture -- Built-in retry and dead-letter handling -- Scales well -- SAS tokens work cross-tenant - -**Cons:** -- Additional Azure resource (Service Bus) -- Slightly higher complexity -- Asynchronous only (no sync response) - ---- - -## Managed Identities: Why They Don't Work Cross-Tenant - -Managed identities are **strictly tenant-bound**: - -- The identity exists only in the Azure AD tenant where the resource lives -- Cannot authenticate to resources in another tenant -- Cannot be "shared" or "invited" to another tenant - -**Implication:** If your Azure Function uses a managed identity, it can only access resources in that same Azure tenant (your MSDN subscription's tenant), not your Power Platform tenant's Dataverse directly. - -**Workaround:** For Azure β†’ Dataverse communication, use a multi-tenant app registration instead of managed identity. - ---- - -## Bidirectional Communication - -### Power Platform β†’ Azure (Outbound) - -This is the **easy direction**. All patterns above work: -- Service Endpoints push data to Azure -- Webhooks trigger Azure Functions -- Plugin β†’ Service Endpoint β†’ Azure - -### Azure β†’ Dataverse (Callback/Inbound) - -This is **more complex** and requires additional setup: - -```mermaid -flowchart RL - subgraph AZ4["Azure Tenant (MSDN)"] - AF4["Azure Function
(uses client credentials)"] - end - - subgraph PP4["Power Platform Tenant"] - AR4["App Registration
(Dataverse permissions)"] - DV["Dataverse
Web API"] - AR4 --> DV - end - - AF4 -->|"Client credentials flow"| AR4 -``` - -**Options:** - -1. **Multi-tenant app in Power Platform tenant** - - Create app registration in PP tenant with Dataverse permissions - - Azure Function uses client credentials flow - - Function stores client ID/secret securely (Key Vault) - -2. **Application User in Dataverse** - - Create an Application User linked to the app registration - - Assign appropriate security roles - - Azure Function authenticates as this application user - ---- - -## Important Considerations - -### Security - -| Consideration | Guidance | -|---------------|----------| -| IP Restrictions | Don't rely on IP allowlisting - Dataverse IPs are not static | -| Secret Storage | Use Azure Key Vault for production secrets | -| Token Lifetime | AAD tokens are short-lived (1 hour default) - handled automatically | -| Audit | Multi-tenant apps provide better audit trails than shared secrets | - -### Performance - -| Consideration | Impact | -|---------------|--------| -| Cross-tenant latency | Negligible - same Microsoft backbone | -| Cold start (Functions) | 1-3 seconds on consumption plan; use Premium for critical workloads | -| Service Bus | Adds ~10-50ms for queue operations | - -### Cost (Minimal for Demo/Dev) - -| Resource | Estimated Cost | -|----------|----------------| -| Azure Functions (Consumption) | Free tier: 1M executions/month | -| Storage Account (for Functions) | ~$1/month | -| Service Bus (Basic) | ~$0.05/million operations | -| Key Vault | ~$0.03/10,000 operations | - ---- - -## Recommended Approach for PPDS Demo - -### Phase 1: Webhook Pattern (Current Priority) - -Implement the simplest cross-tenant pattern to demonstrate the concept: - -1. **Azure Function** with HTTP trigger and function key auth -2. **Dataverse Service Endpoint** with WebhookKey authentication -3. **Plugin Step** registered to fire the service endpoint - -**Why this approach:** -- Demonstrates the pattern clearly -- Minimal Azure cost -- Works immediately without AAD complexity -- Appropriate for demo/reference purposes - -### Phase 2: Production Pattern (Future Enhancement) - -Document and optionally implement multi-tenant OAuth: - -1. Multi-tenant app registration -2. Proper token validation in Azure Function -3. Service Endpoint with OAuth authentication - -**Document as:** "Production Enhancement" with clear migration path. - ---- - -## Decision Log - -| Decision | Rationale | Alternatives Considered | -|----------|-----------|------------------------| -| Use separate Azure tenant (MSDN) | Power Platform Developer subscription has no Azure; avoid additional cost | Upgrade PP subscription (expensive), use only Power Automate (limited scenarios) | -| Start with Webhook Key auth | Simplest cross-tenant pattern; appropriate for demo | Multi-tenant OAuth (more complex for demo), SAS tokens (requires Service Bus) | -| Document multi-tenant OAuth | Production-ready pattern should be documented | Ignore production pattern (incomplete guidance) | - ---- - -## Related Documentation - -- [Register a WebHook (Dataverse)](https://learn.microsoft.com/en-us/power-apps/developer/data-platform/register-web-hook) -- [Azure Integration (Dataverse)](https://learn.microsoft.com/en-us/power-apps/developer/data-platform/azure-integration) -- [Multi-tenant App Registration](https://learn.microsoft.com/en-us/azure/active-directory/develop/howto-convert-app-to-be-multi-tenant) -- [Azure Functions HTTP Trigger](https://learn.microsoft.com/en-us/azure/azure-functions/functions-bindings-http-webhook-trigger#authorization-keys) diff --git a/docs/strategy/ENVIRONMENT_STRATEGY.md b/docs/strategy/ENVIRONMENT_STRATEGY.md deleted file mode 100644 index ac2268a..0000000 --- a/docs/strategy/ENVIRONMENT_STRATEGY.md +++ /dev/null @@ -1,203 +0,0 @@ -# Environment Strategy - -This document defines our Power Platform environment structure, purposes, and configuration. - ---- - -## Environment Overview - -We use a three-environment model that balances simplicity with proper release management. - -| Environment | Type | Purpose | Solution State | Branch | -|-------------|------|---------|----------------|--------| -| **Dev** | Development/Sandbox | Maker development and testing | Unmanaged | Source for `develop` | -| **QA** | Sandbox | Integration testing, QA validation | Managed | Deployed from `develop` | -| **Prod** | Production | Live production use | Managed | Deployed from `main` | - ---- - -## Environment Details - -### Dev Environment - -**Purpose:** Where makers and developers build and test solutions. - -**Characteristics:** -- Solutions exist in **unmanaged** state -- Makers have Environment Maker or System Customizer roles -- Changes are exported nightly (or on-demand) to `develop` branch -- May contain test data and experimental components - -**Configuration:** -``` -GitHub Environment: Dev -Variables: - POWERPLATFORM_ENVIRONMENT_URL: https://org-dev.crm.dynamics.com/ - POWERPLATFORM_TENANT_ID: - POWERPLATFORM_CLIENT_ID: -Secrets: - POWERPLATFORM_CLIENT_SECRET: -``` - -**Who has access:** -- Makers (Environment Maker role) -- Developers (System Customizer role) -- Admins (System Administrator role) - ---- - -### QA Environment - -**Purpose:** Testing environment where QA validates changes before production. - -**Characteristics:** -- Solutions deployed as **managed** -- Automatically receives deployments when `develop` branch is updated -- Should mirror production configuration as closely as possible -- Test data should be representative but not production data - -**Configuration:** -``` -GitHub Environment: QA -Variables: - POWERPLATFORM_ENVIRONMENT_URL: https://org-qa.crm.dynamics.com/ - POWERPLATFORM_TENANT_ID: - POWERPLATFORM_CLIENT_ID: -Secrets: - POWERPLATFORM_CLIENT_SECRET: -``` - -**Who has access:** -- QA testers (Basic User + app-specific roles) -- Developers (read-only for debugging) -- Admins (System Administrator role) - ---- - -### Prod Environment - -**Purpose:** Live production environment for end users. - -**Characteristics:** -- Solutions deployed as **managed** -- Receives deployments only when `main` branch is updated -- Should have approval gates before deployment -- No direct customization allowed - -**Configuration:** -``` -GitHub Environment: Prod (when added) -Variables: - POWERPLATFORM_ENVIRONMENT_URL: https://org-prod.crm.dynamics.com/ - POWERPLATFORM_TENANT_ID: - POWERPLATFORM_CLIENT_ID: -Secrets: - POWERPLATFORM_CLIENT_SECRET: -Protection Rules: - - Required reviewers: 1+ - - Wait timer: optional -``` - -**Who has access:** -- End users (app-specific security roles) -- Support staff (limited access for troubleshooting) -- Admins (System Administrator role, limited to emergencies) - ---- - -## Environment Flow - -```mermaid -graph LR - subgraph "Dev Environment" - D1[Makers work here] - D2[Unmanaged solutions] - end - - subgraph "QA Environment" - Q1[QA tests here] - Q2[Managed solutions] - end - - subgraph "Prod Environment" - P1[Users work here] - P2[Managed solutions] - end - - D1 --> |"export to develop"| Q1 - Q1 --> |"PR to main"| P1 -``` - ---- - -## Service Principal Setup - -Each environment requires a service principal (app registration) for automated deployments. - -### Required Permissions - -The service principal needs: -- **Dataverse:** System Administrator role (or custom role with solution import permissions) -- **Azure AD:** No special permissions required - -### Creating the Service Principal - -```bash -# Create app registration -az ad app create --display-name "Power Platform CI/CD" - -# Create service principal -az ad sp create --id - -# Create client secret -az ad app credential reset --id -``` - -### Registering in Power Platform - -```bash -# Add application user to environment -pac admin assign-user \ - --environment \ - --user \ - --role "System Administrator" -``` - ---- - -## When to Add More Environments - -### Add UAT Environment When: -- Formal user acceptance testing is required -- Business stakeholders need sign-off before production -- Regulatory compliance requires documented testing phases - -### Add Integration Environment When: -- Multiple teams work on different solutions -- Solutions have complex dependencies -- You need to validate solution interactions before QA - -### Add Personal Developer Environments When: -- Multiple developers frequently conflict in shared Dev -- Developers need isolated experimentation space -- You have automation for environment provisioning - ---- - -## Environment Naming Convention - -| Environment | Suggested Name | URL Pattern | -|-------------|---------------|-------------| -| Dev | `{project}-dev` | `org-{project}-dev.crm.dynamics.com` | -| QA | `{project}-qa` | `org-{project}-qa.crm.dynamics.com` | -| UAT | `{project}-uat` | `org-{project}-uat.crm.dynamics.com` | -| Prod | `{project}-prod` | `org-{project}.crm.dynamics.com` | - ---- - -## πŸ”— See Also - -- [ALM_OVERVIEW.md](ALM_OVERVIEW.md) - High-level ALM philosophy -- [BRANCHING_STRATEGY.md](BRANCHING_STRATEGY.md) - How branches map to environments -- [PIPELINE_STRATEGY.md](PIPELINE_STRATEGY.md) - Automated deployment configuration -- [Microsoft Environment Strategy](https://learn.microsoft.com/en-us/power-platform/alm/environment-strategy-alm) - Official guidance diff --git a/docs/strategy/PIPELINE_STRATEGY.md b/docs/strategy/PIPELINE_STRATEGY.md deleted file mode 100644 index a7185a3..0000000 --- a/docs/strategy/PIPELINE_STRATEGY.md +++ /dev/null @@ -1,405 +0,0 @@ -# Pipeline Strategy - -This document explains our CI/CD approach for Power Platform solutions using GitHub Actions and PAC CLI. - ---- - -## Why PAC CLI Over Alternatives - -We use PAC CLI directly instead of `microsoft/powerplatform-actions` or Power Platform Pipelines. - -| Approach | Pros | Cons | Our Choice | -|----------|------|------|------------| -| **PAC CLI** | Portable, reliable, full control | More setup | Selected | -| **powerplatform-actions** | Less code | Download issues, less portable | Not used | -| **Power Platform Pipelines** | Built-in, simple | No source control | Not used | - -**Rationale:** - -1. **Reliability** - PAC CLI installs from NuGet, avoiding GitHub action download issues -2. **Portability** - Same commands work in Azure DevOps with minimal changes -3. **Transparency** - Direct commands are easier to debug than wrapped actions -4. **Extensibility** - Full control over validation, error handling, and custom logic - ---- - -## Pipeline Architecture - -```mermaid -graph TB - subgraph "Composite Actions" - A1[setup-pac-cli] - A2[pac-auth] - A3[pack-solution] - A4[import-solution] - A5[export-solution] - end - - subgraph "Reusable Workflows" - R1[_deploy-solution.yml] - end - - subgraph "CI Workflows" - CI1[ci-export.yml] - CI2[pr-validate.yml] - end - - subgraph "CD Workflows" - CD1[cd-qa.yml] - CD2[cd-prod.yml] - end - - A1 --> CI1 - A2 --> CI1 - A5 --> CI1 - - A1 --> CI2 - A3 --> CI2 - - A1 --> R1 - A2 --> R1 - A3 --> R1 - A4 --> R1 - - R1 --> CD1 - R1 --> CD2 -``` - -### Data Flow - -```mermaid -graph LR - DEV[Dev Env] -->|nightly export| CI[ci-export.yml] - CI -->|commit| DEVELOP[develop branch] - DEVELOP -->|push trigger| QA_DEPLOY[cd-qa.yml] - QA_DEPLOY -->|deploy| QA[QA Env] - DEVELOP -->|PR| MAIN[main branch] - MAIN -->|push trigger| PROD_DEPLOY[cd-prod.yml] - PROD_DEPLOY -->|deploy| PROD[Prod Env] -``` - ---- - -## Workflow File Naming Convention - -### File Names - -| Type | Pattern | Example | -|------|---------|---------| -| Reusable workflow | `_name.yml` (underscore prefix) | `_deploy-solution.yml` | -| CI workflow | `ci-{purpose}.yml` | `ci-export.yml` | -| CD workflow | `cd-{target}.yml` | `cd-qa.yml`, `cd-prod.yml` | -| PR workflow | `pr-{purpose}.yml` | `pr-validate.yml` | - -### UI Display Names (`name:` field) - -| Pattern | Example | -|---------|---------| -| `CI: {Description}` | `CI: Export from Dev` | -| `CD: Deploy to {Target}` | `CD: Deploy to QA` | -| `PR: {Description}` | `PR: Validate Solution` | - ---- - -## Workflow Files - -| Workflow | Trigger | Purpose | -|----------|---------|---------| -| `ci-export.yml` | Nightly schedule, manual | Export from Dev to develop branch | -| `ci-build.yml` | Push to develop (solution/plugin paths), manual | Build plugins and pack solution artifact | -| `ci-plugin-deploy.yml` | Push to develop (plugin paths), manual | Deploy plugins directly to Dev environment | -| `cd-qa.yml` | After CI Build succeeds, manual | Deploy to QA environment | -| `cd-prod.yml` | Push to main, manual | Deploy to Production environment | -| `pr-validate.yml` | PR to develop/main | Validate solution, build code, run Solution Checker | -| `codeql.yml` | Push, PR, weekly schedule | Security scanning for C# code | -| `_deploy-solution.yml` | Called by cd-* workflows | Reusable deploy pattern | - ---- - -## CI: Plugin Deployment Workflow - -The `ci-plugin-deploy.yml` workflow provides an "inner loop" for plugin development, automatically deploying plugin code to the Dev environment when changes are pushed. - -### Why a Separate Plugin Workflow? - -Plugin development benefits from a faster feedback loop than solution deployment: - -1. **Immediate deployment** - Plugin code changes deploy directly to Dev without waiting for nightly export -2. **Registration sync** - Plugin steps are registered/updated automatically -3. **Solution capture** - Changes are captured in the next nightly export to develop branch - -### Workflow Flow - -```mermaid -graph LR - A[Push to develop] -->|plugin paths only| B[Build Plugins] - B --> C[Extract Registrations] - C --> D[Auth to Dev] - D --> E[Deploy to Dev] - E --> F[Nightly Export] - F -->|captures changes| G[develop branch] -``` - -### Path Filters - -The workflow only triggers when plugin code changes: - -```yaml -paths: - - 'src/Plugins/**' - - 'src/PluginPackages/**' -``` - -### Manual Dispatch Options - -| Input | Description | Default | -|-------|-------------|---------| -| `project` | Deploy specific project only | (all) | -| `force` | Remove orphaned plugin steps | false | -| `dry_run` | WhatIf mode - show what would happen | false | - -### Relationship to Other Workflows - -``` -Developer changes plugin code - ↓ -ci-plugin-deploy.yml (immediate) - β†’ Deploys to Dev environment - β†’ Registers/updates plugin steps - ↓ -ci-export.yml (nightly) - β†’ Captures plugin registrations in solution - β†’ Commits to develop branch - ↓ -ci-build.yml (on push) - β†’ Builds plugins - β†’ Packs solution with binaries -``` - -**Key insight:** Plugin deployment and solution export are complementary: -- `ci-plugin-deploy.yml` handles the **runtime registration** (steps, images) -- `ci-export.yml` captures these registrations into **source control** -- `ci-build.yml` packages **binaries** into the solution artifact - ---- - -## Composite Actions - -Reusable step sequences are extracted into composite actions in `.github/actions/`: - -| Action | Purpose | Key Inputs | -|--------|---------|------------| -| `setup-pac-cli` | Install .NET and PAC CLI | `dotnet-version` | -| `pac-auth` | Authenticate to environment | `environment-url`, `tenant-id`, `client-id`, `client-secret` | -| `pack-solution` | Pack solution from source | `solution-folder`, `solution-name`, `package-type` | -| `import-solution` | Import solution to environment | `solution-path`, `force-overwrite` | -| `export-solution` | Export and unpack solution | `solution-name`, `output-folder` | -| `build-solution` | Build .NET solution | `solution-path`, `configuration`, `run-tests` | -| `copy-plugin-assemblies` | Copy classic DLLs to solution | `source-assembly`, `solution-folder` | -| `copy-plugin-packages` | Copy plugin packages to solution | `source-package`, `solution-folder` | -| `check-solution` | Run Solution Checker | `solution-path`, `fail-on-level` | -| `analyze-changes` | Detect meaningful changes | `solution-folder` | - -### Usage Example - -```yaml -steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Setup PAC CLI - uses: ./.github/actions/setup-pac-cli - - - name: Authenticate - uses: ./.github/actions/pac-auth - with: - environment-url: ${{ vars.POWERPLATFORM_ENVIRONMENT_URL }} - tenant-id: ${{ vars.POWERPLATFORM_TENANT_ID }} - client-id: ${{ vars.POWERPLATFORM_CLIENT_ID }} - client-secret: ${{ secrets.POWERPLATFORM_CLIENT_SECRET }} - - - name: Build managed solution - run: dotnet build solutions/PPDSDemo/PPDSDemo.cdsproj -c Release - - - name: Import solution - uses: ./.github/actions/import-solution - with: - solution-path: solutions/PPDSDemo/bin/Release/PPDSDemo.zip -``` - ---- - -## Reusable Workflow - -The `_deploy-solution.yml` workflow encapsulates the full deploy pattern: - -```yaml -jobs: - deploy-to-qa: - uses: ./.github/workflows/_deploy-solution.yml - with: - environment-name: QA - solution-name: PPDSDemo - solution-folder: solutions/PPDSDemo/src - ref: develop - secrets: inherit -``` - -**Inputs:** -| Input | Required | Description | -|-------|----------|-------------| -| `environment-name` | Yes | GitHub environment (Dev, QA, Prod) | -| `solution-name` | Yes | Solution unique name | -| `solution-folder` | Yes | Path to solution source folder | -| `ref` | No | Git ref to checkout | - -> **Note:** The solution source uses `--packagetype Both` format, enabling Release builds to produce managed solutions. - ---- - -## Environment Configuration - -### GitHub Environments - -Each Power Platform environment maps to a GitHub environment: - -``` -GitHub Environment: Dev / QA / Prod -β”œβ”€β”€ Variables: -β”‚ β”œβ”€β”€ POWERPLATFORM_ENVIRONMENT_URL -β”‚ β”œβ”€β”€ POWERPLATFORM_TENANT_ID -β”‚ └── POWERPLATFORM_CLIENT_ID -└── Secrets: - └── POWERPLATFORM_CLIENT_SECRET -``` - -### Variable Naming - -| Variable | Description | Example | -|----------|-------------|---------| -| `POWERPLATFORM_ENVIRONMENT_URL` | Dataverse URL | `https://org.crm.dynamics.com/` | -| `POWERPLATFORM_TENANT_ID` | Azure AD tenant | `xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx` | -| `POWERPLATFORM_CLIENT_ID` | Service principal app ID | `xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx` | -| `POWERPLATFORM_CLIENT_SECRET` | Service principal secret | (stored as secret) | - ---- - -## Adding New Workflows - -The modular architecture makes adding new workflows simple: - -### Example: Add UAT Deployment - -```yaml -# .github/workflows/cd-uat.yml -name: 'CD: Deploy to UAT' - -on: - workflow_dispatch: - inputs: - solution_name: - default: 'PPDSDemo' - -jobs: - deploy-to-uat: - uses: ./.github/workflows/_deploy-solution.yml - with: - environment-name: UAT # Create this GitHub environment - solution-name: ${{ github.event.inputs.solution_name || 'PPDSDemo' }} - solution-folder: solutions/PPDSDemo/src - secrets: inherit -``` - -### Example: Add Solution Checker - -```yaml -# In any workflow, add solution checker step: -- name: Run Solution Checker - run: | - pac solution check \ - --path "${{ steps.pack.outputs.solution-path }}" \ - --outputDirectory "./reports" - -- name: Upload checker results - uses: actions/upload-artifact@v4 - with: - name: solution-checker-results - path: ./reports/ -``` - ---- - -## Error Handling - -### Retry Logic - -```yaml -- name: Import Solution with Retry - uses: nick-fields/retry@v2 - with: - timeout_minutes: 30 - max_attempts: 3 - command: | - pac solution import \ - --path "./exports/${{ env.SOLUTION_NAME }}_managed.zip" \ - --force-overwrite -``` - -### Failure Notifications - -```yaml -- name: Notify on Failure - if: failure() - run: | - # Send notification (Teams, Slack, email, etc.) - echo "Deployment failed for ${{ env.SOLUTION_NAME }}" -``` - ---- - -## Migration to Azure DevOps - -The PAC CLI commands are identical in Azure DevOps. Only the pipeline syntax changes: - -**GitHub Actions:** -```yaml -- name: Export Solution - run: pac solution export --name "MySolution" --path "./solution.zip" -``` - -**Azure DevOps:** -```yaml -- script: pac solution export --name "MySolution" --path "./solution.zip" - displayName: 'Export Solution' -``` - -For authentication, Azure DevOps uses the Power Platform Build Tools extension or service connections instead of direct PAC CLI auth. - ---- - -## Current Features - -| Feature | Description | Status | -|---------|-------------|--------| -| Solution Checker | Automated quality gates in PR validation | Done | -| CodeQL scanning | Security analysis for C# code | Done | -| Dependency Review | Block PRs with vulnerable dependencies | Done | -| Plugin build integration | Build and copy assemblies/packages | Done | -| Version stamping | X.X.YYYYMMDD.BuildNumber format | Done | - -## Future Enhancements - -| Enhancement | Description | Priority | -|-------------|-------------|----------| -| Approval gates | Manual approval for production | Medium | -| Multi-solution templates | Dependency-aware deployments | Medium | -| Rollback automation | Quick rollback on failure | Low | - ---- - -## See Also - -- [ALM_OVERVIEW.md](ALM_OVERVIEW.md) - High-level ALM philosophy -- [ENVIRONMENT_STRATEGY.md](ENVIRONMENT_STRATEGY.md) - Environment configuration -- [BRANCHING_STRATEGY.md](BRANCHING_STRATEGY.md) - Git workflow -- [PAC CLI Reference](https://learn.microsoft.com/en-us/power-platform/developer/cli/reference/) - Official documentation diff --git a/src/Api/PPDSDemo.Api/PPDSDemo.Api.csproj b/src/Api/PPDSDemo.Api/PPDSDemo.Api.csproj index 12744a0..ba74ed7 100644 --- a/src/Api/PPDSDemo.Api/PPDSDemo.Api.csproj +++ b/src/Api/PPDSDemo.Api/PPDSDemo.Api.csproj @@ -13,7 +13,7 @@ --> - + diff --git a/src/Console/PPDS.Dataverse.Demo/Commands/CleanCommand.cs b/src/Console/PPDS.Dataverse.Demo/Commands/CleanCommand.cs index 93d835e..f84ba30 100644 --- a/src/Console/PPDS.Dataverse.Demo/Commands/CleanCommand.cs +++ b/src/Console/PPDS.Dataverse.Demo/Commands/CleanCommand.cs @@ -15,32 +15,34 @@ public static class CleanCommand { public static Command Create() { - var command = new Command("clean", "Remove sample accounts and contacts from Dataverse"); - - var forceOption = new Option( - aliases: ["--force", "-f"], - description: "Skip confirmation prompt"); + var forceOption = new Option("--force", "-f") + { + Description = "Skip confirmation prompt" + }; // Use standardized options from GlobalOptionsExtensions var envOption = GlobalOptionsExtensions.CreateEnvironmentOption(); var verboseOption = GlobalOptionsExtensions.CreateVerboseOption(); var debugOption = GlobalOptionsExtensions.CreateDebugOption(); - command.AddOption(forceOption); - command.AddOption(envOption); - command.AddOption(verboseOption); - command.AddOption(debugOption); + var command = new Command("clean", "Remove sample accounts and contacts from Dataverse") + { + forceOption, + envOption, + verboseOption, + debugOption + }; - command.SetHandler(async (bool force, string? environment, bool verbose, bool debug) => + command.SetAction(async (parseResult, cancellationToken) => { var options = new GlobalOptions { - Environment = environment, - Verbose = verbose, - Debug = debug + Environment = parseResult.GetValue(envOption), + Verbose = parseResult.GetValue(verboseOption), + Debug = parseResult.GetValue(debugOption) }; - Environment.ExitCode = await ExecuteAsync(force, options); - }, forceOption, envOption, verboseOption, debugOption); + return await ExecuteAsync(parseResult.GetValue(forceOption), options); + }); return command; } diff --git a/src/Console/PPDS.Dataverse.Demo/Commands/CleanGeoDataCommand.cs b/src/Console/PPDS.Dataverse.Demo/Commands/CleanGeoDataCommand.cs index d5ee5e3..5ae42ef 100644 --- a/src/Console/PPDS.Dataverse.Demo/Commands/CleanGeoDataCommand.cs +++ b/src/Console/PPDS.Dataverse.Demo/Commands/CleanGeoDataCommand.cs @@ -19,15 +19,15 @@ public static class CleanGeoDataCommand { public static Command Create() { - var command = new Command("clean-geo-data", "Bulk delete geographic reference data"); - - var zipOnlyOption = new Option( - "--zip-only", - "Only delete ZIP codes (preserve states)"); + var zipOnlyOption = new Option("--zip-only") + { + Description = "Only delete ZIP codes (preserve states)" + }; - var confirmOption = new Option( - "--confirm", - "Skip confirmation prompt"); + var confirmOption = new Option("--confirm") + { + Description = "Skip confirmation prompt" + }; // Use standardized options from GlobalOptionsExtensions var envOption = GlobalOptionsExtensions.CreateEnvironmentOption(); @@ -35,24 +35,30 @@ public static Command Create() var debugOption = GlobalOptionsExtensions.CreateDebugOption(); var parallelismOption = GlobalOptionsExtensions.CreateParallelismOption(); - command.AddOption(zipOnlyOption); - command.AddOption(confirmOption); - command.AddOption(envOption); - command.AddOption(verboseOption); - command.AddOption(debugOption); - command.AddOption(parallelismOption); + var command = new Command("clean-geo-data", "Bulk delete geographic reference data") + { + zipOnlyOption, + confirmOption, + envOption, + verboseOption, + debugOption, + parallelismOption + }; - command.SetHandler(async (bool zipOnly, bool confirm, string? environment, bool verbose, bool debug, int? parallelism) => + command.SetAction(async (parseResult, cancellationToken) => { var options = new GlobalOptions { - Environment = environment, - Verbose = verbose, - Debug = debug, - Parallelism = parallelism + Environment = parseResult.GetValue(envOption), + Verbose = parseResult.GetValue(verboseOption), + Debug = parseResult.GetValue(debugOption), + Parallelism = parseResult.GetValue(parallelismOption) }; - Environment.ExitCode = await ExecuteAsync(zipOnly, confirm, options); - }, zipOnlyOption, confirmOption, envOption, verboseOption, debugOption, parallelismOption); + return await ExecuteAsync( + parseResult.GetValue(zipOnlyOption), + parseResult.GetValue(confirmOption), + options); + }); return command; } diff --git a/src/Console/PPDS.Dataverse.Demo/Commands/CountGeoDataCommand.cs b/src/Console/PPDS.Dataverse.Demo/Commands/CountGeoDataCommand.cs index b706df0..5a05cf9 100644 --- a/src/Console/PPDS.Dataverse.Demo/Commands/CountGeoDataCommand.cs +++ b/src/Console/PPDS.Dataverse.Demo/Commands/CountGeoDataCommand.cs @@ -12,27 +12,28 @@ public static class CountGeoDataCommand { public static Command Create() { - var command = new Command("count-geo-data", "Display record counts for geographic reference data"); - // Use standardized options from GlobalOptionsExtensions var envOption = GlobalOptionsExtensions.CreateEnvironmentOption(); var verboseOption = GlobalOptionsExtensions.CreateVerboseOption(); var debugOption = GlobalOptionsExtensions.CreateDebugOption(); - command.AddOption(envOption); - command.AddOption(verboseOption); - command.AddOption(debugOption); + var command = new Command("count-geo-data", "Display record counts for geographic reference data") + { + envOption, + verboseOption, + debugOption + }; - command.SetHandler(async (string? environment, bool verbose, bool debug) => + command.SetAction(async (parseResult, cancellationToken) => { var options = new GlobalOptions { - Environment = environment, - Verbose = verbose, - Debug = debug + Environment = parseResult.GetValue(envOption), + Verbose = parseResult.GetValue(verboseOption), + Debug = parseResult.GetValue(debugOption) }; - Environment.ExitCode = await ExecuteAsync(options); - }, envOption, verboseOption, debugOption); + return await ExecuteAsync(options); + }); return command; } diff --git a/src/Console/PPDS.Dataverse.Demo/Commands/CreateGeoSchemaCommand.cs b/src/Console/PPDS.Dataverse.Demo/Commands/CreateGeoSchemaCommand.cs index 7df4de8..4333a8f 100644 --- a/src/Console/PPDS.Dataverse.Demo/Commands/CreateGeoSchemaCommand.cs +++ b/src/Console/PPDS.Dataverse.Demo/Commands/CreateGeoSchemaCommand.cs @@ -16,32 +16,34 @@ public static class CreateGeoSchemaCommand public static Command Create() { - var command = new Command("create-geo-schema", "Create geographic reference data tables for volume testing"); - - var deleteFirstOption = new Option( - "--delete-first", - "Delete existing tables before creating (WARNING: destroys data)"); + var deleteFirstOption = new Option("--delete-first") + { + Description = "Delete existing tables before creating (WARNING: destroys data)" + }; // Use standardized options from GlobalOptionsExtensions var envOption = GlobalOptionsExtensions.CreateEnvironmentOption(); var verboseOption = GlobalOptionsExtensions.CreateVerboseOption(); var debugOption = GlobalOptionsExtensions.CreateDebugOption(); - command.AddOption(deleteFirstOption); - command.AddOption(envOption); - command.AddOption(verboseOption); - command.AddOption(debugOption); + var command = new Command("create-geo-schema", "Create geographic reference data tables for volume testing") + { + deleteFirstOption, + envOption, + verboseOption, + debugOption + }; - command.SetHandler(async (bool deleteFirst, string? environment, bool verbose, bool debug) => + command.SetAction(async (parseResult, cancellationToken) => { var options = new GlobalOptions { - Environment = environment, - Verbose = verbose, - Debug = debug + Environment = parseResult.GetValue(envOption), + Verbose = parseResult.GetValue(verboseOption), + Debug = parseResult.GetValue(debugOption) }; - Environment.ExitCode = await ExecuteAsync(deleteFirst, options); - }, deleteFirstOption, envOption, verboseOption, debugOption); + return await ExecuteAsync(parseResult.GetValue(deleteFirstOption), options); + }); return command; } diff --git a/src/Console/PPDS.Dataverse.Demo/Commands/CrossEnvMigrationCommand.cs b/src/Console/PPDS.Dataverse.Demo/Commands/CrossEnvMigrationCommand.cs index be8cc6f..372f35e 100644 --- a/src/Console/PPDS.Dataverse.Demo/Commands/CrossEnvMigrationCommand.cs +++ b/src/Console/PPDS.Dataverse.Demo/Commands/CrossEnvMigrationCommand.cs @@ -43,34 +43,40 @@ public static class CrossEnvMigrationCommand public static Command Create() { - var command = new Command("migrate-to-qa", "Export from Dev and import to QA environment"); - - var skipSeedOption = new Option( - "--skip-seed", - "Skip seeding data in Dev (use existing data)"); + var skipSeedOption = new Option("--skip-seed") + { + Description = "Skip seeding data in Dev (use existing data)" + }; - var dryRunOption = new Option( - "--dry-run", - "Export only, don't import to QA"); + var dryRunOption = new Option("--dry-run") + { + Description = "Export only, don't import to QA" + }; // Use standardized options from GlobalOptionsExtensions var verboseOption = GlobalOptionsExtensions.CreateVerboseOption(); var debugOption = GlobalOptionsExtensions.CreateDebugOption(); - command.AddOption(skipSeedOption); - command.AddOption(dryRunOption); - command.AddOption(verboseOption); - command.AddOption(debugOption); + var command = new Command("migrate-to-qa", "Export from Dev and import to QA environment") + { + skipSeedOption, + dryRunOption, + verboseOption, + debugOption + }; - command.SetHandler(async (bool skipSeed, bool dryRun, bool verbose, bool debug) => + command.SetAction(async (parseResult, cancellationToken) => { var options = new GlobalOptions { - Verbose = verbose, - Debug = debug + Verbose = parseResult.GetValue(verboseOption), + Debug = parseResult.GetValue(debugOption) }; - Environment.ExitCode = await ExecuteAsync(skipSeed, dryRun, options); - }, skipSeedOption, dryRunOption, verboseOption, debugOption); + return await ExecuteAsync( + parseResult.GetValue(skipSeedOption), + parseResult.GetValue(dryRunOption), + options); + }); return command; } diff --git a/src/Console/PPDS.Dataverse.Demo/Commands/ExportGeoDataCommand.cs b/src/Console/PPDS.Dataverse.Demo/Commands/ExportGeoDataCommand.cs index 66c20ed..eb64636 100644 --- a/src/Console/PPDS.Dataverse.Demo/Commands/ExportGeoDataCommand.cs +++ b/src/Console/PPDS.Dataverse.Demo/Commands/ExportGeoDataCommand.cs @@ -44,32 +44,34 @@ public static class ExportGeoDataCommand public static Command Create() { - var command = new Command("export-geo-data", "Export geographic data to a portable ZIP package"); - - var outputOption = new Option( - aliases: ["--output", "-o"], - description: $"Output ZIP file path (default: geo-export.zip)"); + var outputOption = new Option("--output", "-o") + { + Description = "Output ZIP file path (default: geo-export.zip)" + }; // Use standardized options from GlobalOptionsExtensions var envOption = GlobalOptionsExtensions.CreateEnvironmentOption(); var verboseOption = GlobalOptionsExtensions.CreateVerboseOption(); var debugOption = GlobalOptionsExtensions.CreateDebugOption(); - command.AddOption(outputOption); - command.AddOption(envOption); - command.AddOption(verboseOption); - command.AddOption(debugOption); + var command = new Command("export-geo-data", "Export geographic data to a portable ZIP package") + { + outputOption, + envOption, + verboseOption, + debugOption + }; - command.SetHandler(async (string? output, string? environment, bool verbose, bool debug) => + command.SetAction(async (parseResult, cancellationToken) => { var options = new GlobalOptions { - Environment = environment ?? "Dev", - Verbose = verbose, - Debug = debug + Environment = parseResult.GetValue(envOption) ?? "Dev", + Verbose = parseResult.GetValue(verboseOption), + Debug = parseResult.GetValue(debugOption) }; - Environment.ExitCode = await ExecuteAsync(output, options); - }, outputOption, envOption, verboseOption, debugOption); + return await ExecuteAsync(parseResult.GetValue(outputOption), options); + }); return command; } diff --git a/src/Console/PPDS.Dataverse.Demo/Commands/GenerateUserMappingCommand.cs b/src/Console/PPDS.Dataverse.Demo/Commands/GenerateUserMappingCommand.cs index 91d7976..e9bcd5c 100644 --- a/src/Console/PPDS.Dataverse.Demo/Commands/GenerateUserMappingCommand.cs +++ b/src/Console/PPDS.Dataverse.Demo/Commands/GenerateUserMappingCommand.cs @@ -24,35 +24,41 @@ public static class GenerateUserMappingCommand public static Command Create() { - var command = new Command("generate-user-mapping", "Generate user mapping file for cross-environment migration"); - - var outputOption = new Option( - "--output", - () => OutputPath, - "Output path for the user mapping XML file"); + var outputOption = new Option("--output") + { + Description = "Output path for the user mapping XML file", + DefaultValueFactory = _ => OutputPath + }; - var analyzeOnlyOption = new Option( - "--analyze", - "Analyze user differences without generating mapping file"); + var analyzeOnlyOption = new Option("--analyze") + { + Description = "Analyze user differences without generating mapping file" + }; // Use standardized options from GlobalOptionsExtensions var verboseOption = GlobalOptionsExtensions.CreateVerboseOption(); var debugOption = GlobalOptionsExtensions.CreateDebugOption(); - command.AddOption(outputOption); - command.AddOption(analyzeOnlyOption); - command.AddOption(verboseOption); - command.AddOption(debugOption); + var command = new Command("generate-user-mapping", "Generate user mapping file for cross-environment migration") + { + outputOption, + analyzeOnlyOption, + verboseOption, + debugOption + }; - command.SetHandler(async (string output, bool analyzeOnly, bool verbose, bool debug) => + command.SetAction(async (parseResult, cancellationToken) => { var options = new GlobalOptions { - Verbose = verbose, - Debug = debug + Verbose = parseResult.GetValue(verboseOption), + Debug = parseResult.GetValue(debugOption) }; - Environment.ExitCode = await ExecuteAsync(output, analyzeOnly, options); - }, outputOption, analyzeOnlyOption, verboseOption, debugOption); + return await ExecuteAsync( + parseResult.GetValue(outputOption)!, + parseResult.GetValue(analyzeOnlyOption), + options); + }); return command; } diff --git a/src/Console/PPDS.Dataverse.Demo/Commands/ImportGeoDataCommand.cs b/src/Console/PPDS.Dataverse.Demo/Commands/ImportGeoDataCommand.cs index 3e7b325..9e787f2 100644 --- a/src/Console/PPDS.Dataverse.Demo/Commands/ImportGeoDataCommand.cs +++ b/src/Console/PPDS.Dataverse.Demo/Commands/ImportGeoDataCommand.cs @@ -35,46 +35,52 @@ public static class ImportGeoDataCommand { public static Command Create() { - var command = new Command("import-geo-data", "Import geographic data from a ZIP package"); - - var dataOption = new Option( - aliases: ["--data", "-d"], - description: "Input ZIP file path (required)") + var dataOption = new Option("--data", "-d") { - IsRequired = true + Description = "Input ZIP file path (required)", + Required = true }; - var cleanFirstOption = new Option( - "--clean-first", - "Run clean-geo-data before import"); + var cleanFirstOption = new Option("--clean-first") + { + Description = "Run clean-geo-data before import" + }; - var stripOwnerFieldsOption = new Option( - "--strip-owner-fields", - getDefaultValue: () => true, - description: "Strip owner fields to avoid user reference errors (default: true)"); + var stripOwnerFieldsOption = new Option("--strip-owner-fields") + { + Description = "Strip owner fields to avoid user reference errors (default: true)", + DefaultValueFactory = _ => true + }; // Use standardized options from GlobalOptionsExtensions var envOption = GlobalOptionsExtensions.CreateEnvironmentOption(isRequired: true); var verboseOption = GlobalOptionsExtensions.CreateVerboseOption(); var debugOption = GlobalOptionsExtensions.CreateDebugOption(); - command.AddOption(dataOption); - command.AddOption(envOption); - command.AddOption(cleanFirstOption); - command.AddOption(stripOwnerFieldsOption); - command.AddOption(verboseOption); - command.AddOption(debugOption); + var command = new Command("import-geo-data", "Import geographic data from a ZIP package") + { + dataOption, + envOption, + cleanFirstOption, + stripOwnerFieldsOption, + verboseOption, + debugOption + }; - command.SetHandler(async (string data, string? environment, bool cleanFirst, bool stripOwnerFields, bool verbose, bool debug) => + command.SetAction(async (parseResult, cancellationToken) => { var options = new GlobalOptions { - Environment = environment!, - Verbose = verbose, - Debug = debug + Environment = parseResult.GetValue(envOption)!, + Verbose = parseResult.GetValue(verboseOption), + Debug = parseResult.GetValue(debugOption) }; - Environment.ExitCode = await ExecuteAsync(data, options, cleanFirst, stripOwnerFields); - }, dataOption, envOption, cleanFirstOption, stripOwnerFieldsOption, verboseOption, debugOption); + return await ExecuteAsync( + parseResult.GetValue(dataOption)!, + options, + parseResult.GetValue(cleanFirstOption), + parseResult.GetValue(stripOwnerFieldsOption)); + }); return command; } diff --git a/src/Console/PPDS.Dataverse.Demo/Commands/LoadGeoDataCommand.cs b/src/Console/PPDS.Dataverse.Demo/Commands/LoadGeoDataCommand.cs index 41652af..cbbee01 100644 --- a/src/Console/PPDS.Dataverse.Demo/Commands/LoadGeoDataCommand.cs +++ b/src/Console/PPDS.Dataverse.Demo/Commands/LoadGeoDataCommand.cs @@ -31,19 +31,20 @@ public static class LoadGeoDataCommand public static Command Create() { - var command = new Command("load-geo-data", "Download and load US ZIP code data for volume testing"); - - var limitOption = new Option( - "--limit", - "Limit number of ZIP codes to load (for testing)"); + var limitOption = new Option("--limit") + { + Description = "Limit number of ZIP codes to load (for testing)" + }; - var skipDownloadOption = new Option( - "--skip-download", - "Use cached data file (skip download)"); + var skipDownloadOption = new Option("--skip-download") + { + Description = "Use cached data file (skip download)" + }; - var statesOnlyOption = new Option( - "--states-only", - "Only load states (skip ZIP codes)"); + var statesOnlyOption = new Option("--states-only") + { + Description = "Only load states (skip ZIP codes)" + }; // Use standardized options from GlobalOptionsExtensions var envOption = GlobalOptionsExtensions.CreateEnvironmentOption(); @@ -51,25 +52,32 @@ public static Command Create() var debugOption = GlobalOptionsExtensions.CreateDebugOption(); var parallelismOption = GlobalOptionsExtensions.CreateParallelismOption(); - command.AddOption(limitOption); - command.AddOption(skipDownloadOption); - command.AddOption(statesOnlyOption); - command.AddOption(envOption); - command.AddOption(verboseOption); - command.AddOption(debugOption); - command.AddOption(parallelismOption); + var command = new Command("load-geo-data", "Download and load US ZIP code data for volume testing") + { + limitOption, + skipDownloadOption, + statesOnlyOption, + envOption, + verboseOption, + debugOption, + parallelismOption + }; - command.SetHandler(async (int? limit, bool skipDownload, bool statesOnly, string? environment, bool verbose, bool debug, int? parallelism) => + command.SetAction(async (parseResult, cancellationToken) => { var options = new GlobalOptions { - Environment = environment, - Verbose = verbose, - Debug = debug, - Parallelism = parallelism + Environment = parseResult.GetValue(envOption), + Verbose = parseResult.GetValue(verboseOption), + Debug = parseResult.GetValue(debugOption), + Parallelism = parseResult.GetValue(parallelismOption) }; - Environment.ExitCode = await ExecuteAsync(limit, skipDownload, statesOnly, options); - }, limitOption, skipDownloadOption, statesOnlyOption, envOption, verboseOption, debugOption, parallelismOption); + return await ExecuteAsync( + parseResult.GetValue(limitOption), + parseResult.GetValue(skipDownloadOption), + parseResult.GetValue(statesOnlyOption), + options); + }); return command; } diff --git a/src/Console/PPDS.Dataverse.Demo/Commands/MigrateGeoDataCommand.cs b/src/Console/PPDS.Dataverse.Demo/Commands/MigrateGeoDataCommand.cs index 8cd1caa..9a63fda 100644 --- a/src/Console/PPDS.Dataverse.Demo/Commands/MigrateGeoDataCommand.cs +++ b/src/Console/PPDS.Dataverse.Demo/Commands/MigrateGeoDataCommand.cs @@ -36,47 +36,61 @@ public static class MigrateGeoDataCommand public static Command Create() { - var command = new Command("migrate-geo-data", "Migrate geographic data between environments"); - - var sourceOption = new Option( - aliases: ["--source", "-s"], - getDefaultValue: () => "Dev", - description: "Source environment name"); + var sourceOption = new Option("--source", "-s") + { + Description = "Source environment name", + DefaultValueFactory = _ => "Dev" + }; - var targetOption = new Option( - aliases: ["--target", "-t"], - description: "Target environment name (required for CLI mode)"); + var targetOption = new Option("--target", "-t") + { + Description = "Target environment name (required for CLI mode)" + }; - var dryRunOption = new Option( - "--dry-run", - "Export only, don't import to target"); + var dryRunOption = new Option("--dry-run") + { + Description = "Export only, don't import to target" + }; - var cleanTargetOption = new Option( - "--clean-target", - "Clean target environment before import"); + var cleanTargetOption = new Option("--clean-target") + { + Description = "Clean target environment before import" + }; - var useSdkOption = new Option( - "--use-sdk", - "Use direct SDK instead of CLI (for SDK developers)"); + var useSdkOption = new Option("--use-sdk") + { + Description = "Use direct SDK instead of CLI (for SDK developers)" + }; // Use standardized options from GlobalOptionsExtensions var parallelismOption = GlobalOptionsExtensions.CreateParallelismOption(); var verboseOption = GlobalOptionsExtensions.CreateVerboseOption(); var debugOption = GlobalOptionsExtensions.CreateDebugOption(); - command.AddOption(sourceOption); - command.AddOption(targetOption); - command.AddOption(dryRunOption); - command.AddOption(cleanTargetOption); - command.AddOption(useSdkOption); - command.AddOption(parallelismOption); - command.AddOption(verboseOption); - command.AddOption(debugOption); + var command = new Command("migrate-geo-data", "Migrate geographic data between environments") + { + sourceOption, + targetOption, + dryRunOption, + cleanTargetOption, + useSdkOption, + parallelismOption, + verboseOption, + debugOption + }; - command.SetHandler(async (string source, string? target, bool dryRun, bool cleanTarget, bool useSdk, int? parallelism, bool verbose, bool debug) => + command.SetAction(async (parseResult, cancellationToken) => { - Environment.ExitCode = await ExecuteAsync(source, target, dryRun, cleanTarget, useSdk, parallelism, verbose, debug); - }, sourceOption, targetOption, dryRunOption, cleanTargetOption, useSdkOption, parallelismOption, verboseOption, debugOption); + return await ExecuteAsync( + parseResult.GetValue(sourceOption)!, + parseResult.GetValue(targetOption), + parseResult.GetValue(dryRunOption), + parseResult.GetValue(cleanTargetOption), + parseResult.GetValue(useSdkOption), + parseResult.GetValue(parallelismOption), + parseResult.GetValue(verboseOption), + parseResult.GetValue(debugOption)); + }); return command; } diff --git a/src/Console/PPDS.Dataverse.Demo/Commands/MigrationFeaturesCommand.cs b/src/Console/PPDS.Dataverse.Demo/Commands/MigrationFeaturesCommand.cs index 5b9a773..5259438 100644 --- a/src/Console/PPDS.Dataverse.Demo/Commands/MigrationFeaturesCommand.cs +++ b/src/Console/PPDS.Dataverse.Demo/Commands/MigrationFeaturesCommand.cs @@ -19,20 +19,33 @@ public static class MigrationFeaturesCommand public static Command Create() { - var command = new Command("demo-features", "Demonstrate migration features"); - var featureOption = new Option("--feature", getDefaultValue: () => "all"); + var featureOption = new Option("--feature") + { + DefaultValueFactory = _ => "all" + }; var envOption = GlobalOptionsExtensions.CreateEnvironmentOption(); var verboseOption = GlobalOptionsExtensions.CreateVerboseOption(); var debugOption = GlobalOptionsExtensions.CreateDebugOption(); - command.AddOption(featureOption); - command.AddOption(envOption); - command.AddOption(verboseOption); - command.AddOption(debugOption); - command.SetHandler(async (string feature, string? environment, bool verbose, bool debug) => + + var command = new Command("demo-features", "Demonstrate migration features") + { + featureOption, + envOption, + verboseOption, + debugOption + }; + + command.SetAction(async (parseResult, cancellationToken) => { - var options = new GlobalOptions { Environment = environment, Verbose = verbose, Debug = debug }; - Environment.ExitCode = await ExecuteAsync(feature, options); - }, featureOption, envOption, verboseOption, debugOption); + var options = new GlobalOptions + { + Environment = parseResult.GetValue(envOption), + Verbose = parseResult.GetValue(verboseOption), + Debug = parseResult.GetValue(debugOption) + }; + return await ExecuteAsync(parseResult.GetValue(featureOption)!, options); + }); + return command; } diff --git a/src/Console/PPDS.Dataverse.Demo/Commands/SeedCommand.cs b/src/Console/PPDS.Dataverse.Demo/Commands/SeedCommand.cs index 80cafa5..be085eb 100644 --- a/src/Console/PPDS.Dataverse.Demo/Commands/SeedCommand.cs +++ b/src/Console/PPDS.Dataverse.Demo/Commands/SeedCommand.cs @@ -22,27 +22,28 @@ public static class SeedCommand { public static Command Create() { - var command = new Command("seed", "Create sample accounts and contacts in Dataverse"); - // Use standardized options from GlobalOptionsExtensions var envOption = GlobalOptionsExtensions.CreateEnvironmentOption(); var verboseOption = GlobalOptionsExtensions.CreateVerboseOption(); var debugOption = GlobalOptionsExtensions.CreateDebugOption(); - command.AddOption(envOption); - command.AddOption(verboseOption); - command.AddOption(debugOption); + var command = new Command("seed", "Create sample accounts and contacts in Dataverse") + { + envOption, + verboseOption, + debugOption + }; - command.SetHandler(async (string? environment, bool verbose, bool debug) => + command.SetAction(async (parseResult, cancellationToken) => { var options = new GlobalOptions { - Environment = environment, - Verbose = verbose, - Debug = debug + Environment = parseResult.GetValue(envOption), + Verbose = parseResult.GetValue(verboseOption), + Debug = parseResult.GetValue(debugOption) }; - Environment.ExitCode = await ExecuteAsync(options); - }, envOption, verboseOption, debugOption); + return await ExecuteAsync(options); + }); return command; } diff --git a/src/Console/PPDS.Dataverse.Demo/Commands/TestMigrationCommand.cs b/src/Console/PPDS.Dataverse.Demo/Commands/TestMigrationCommand.cs index b55e95b..0cf6d5b 100644 --- a/src/Console/PPDS.Dataverse.Demo/Commands/TestMigrationCommand.cs +++ b/src/Console/PPDS.Dataverse.Demo/Commands/TestMigrationCommand.cs @@ -39,32 +39,43 @@ public static class TestMigrationCommand public static Command Create() { - var command = new Command("test-migration", "End-to-end test of ppds-migrate export/import"); + var skipSeedOption = new Option("--skip-seed") + { + Description = "Skip seeding (use existing data)" + }; - var skipSeedOption = new Option("--skip-seed", "Skip seeding (use existing data)"); - var skipCleanOption = new Option("--skip-clean", "Skip cleaning after export"); + var skipCleanOption = new Option("--skip-clean") + { + Description = "Skip cleaning after export" + }; // Use standardized options from GlobalOptionsExtensions var envOption = GlobalOptionsExtensions.CreateEnvironmentOption(); var verboseOption = GlobalOptionsExtensions.CreateVerboseOption(); var debugOption = GlobalOptionsExtensions.CreateDebugOption(); - command.AddOption(skipSeedOption); - command.AddOption(skipCleanOption); - command.AddOption(envOption); - command.AddOption(verboseOption); - command.AddOption(debugOption); + var command = new Command("test-migration", "End-to-end test of ppds-migrate export/import") + { + skipSeedOption, + skipCleanOption, + envOption, + verboseOption, + debugOption + }; - command.SetHandler(async (bool skipSeed, bool skipClean, string? environment, bool verbose, bool debug) => + command.SetAction(async (parseResult, cancellationToken) => { var options = new GlobalOptions { - Environment = environment, - Verbose = verbose, - Debug = debug + Environment = parseResult.GetValue(envOption), + Verbose = parseResult.GetValue(verboseOption), + Debug = parseResult.GetValue(debugOption) }; - Environment.ExitCode = await ExecuteAsync(skipSeed, skipClean, options); - }, skipSeedOption, skipCleanOption, envOption, verboseOption, debugOption); + return await ExecuteAsync( + parseResult.GetValue(skipSeedOption), + parseResult.GetValue(skipCleanOption), + options); + }); return command; } diff --git a/src/Console/PPDS.Dataverse.Demo/Commands/WhoAmICommand.cs b/src/Console/PPDS.Dataverse.Demo/Commands/WhoAmICommand.cs index 2aeaef0..1233403 100644 --- a/src/Console/PPDS.Dataverse.Demo/Commands/WhoAmICommand.cs +++ b/src/Console/PPDS.Dataverse.Demo/Commands/WhoAmICommand.cs @@ -11,27 +11,28 @@ public static class WhoAmICommand { public static Command Create() { - var command = new Command("whoami", "Test connectivity with WhoAmI request"); - // Use standardized options from GlobalOptionsExtensions var envOption = GlobalOptionsExtensions.CreateEnvironmentOption(); var verboseOption = GlobalOptionsExtensions.CreateVerboseOption(); var debugOption = GlobalOptionsExtensions.CreateDebugOption(); - command.AddOption(envOption); - command.AddOption(verboseOption); - command.AddOption(debugOption); + var command = new Command("whoami", "Test connectivity with WhoAmI request") + { + envOption, + verboseOption, + debugOption + }; - command.SetHandler(async (string? environment, bool verbose, bool debug) => + command.SetAction(async (parseResult, cancellationToken) => { var options = new GlobalOptions { - Environment = environment, - Verbose = verbose, - Debug = debug + Environment = parseResult.GetValue(envOption), + Verbose = parseResult.GetValue(verboseOption), + Debug = parseResult.GetValue(debugOption) }; - Environment.ExitCode = await ExecuteAsync(options); - }, envOption, verboseOption, debugOption); + return await ExecuteAsync(options); + }); return command; } diff --git a/src/Console/PPDS.Dataverse.Demo/Infrastructure/GlobalOptions.cs b/src/Console/PPDS.Dataverse.Demo/Infrastructure/GlobalOptions.cs index 7e94a20..6ca04cf 100644 --- a/src/Console/PPDS.Dataverse.Demo/Infrastructure/GlobalOptions.cs +++ b/src/Console/PPDS.Dataverse.Demo/Infrastructure/GlobalOptions.cs @@ -55,16 +55,11 @@ public static class GlobalOptionsExtensions string? description = null, bool isRequired = false) { - var option = new System.CommandLine.Option( - aliases: ["--environment", "--env", "-e"], - description: description ?? "Target environment name (e.g., 'Dev', 'QA'). Uses DefaultEnvironment from config if not specified."); - - if (isRequired) + return new System.CommandLine.Option("--environment", "--env", "-e") { - option.IsRequired = true; - } - - return option; + Description = description ?? "Target environment name (e.g., 'Dev', 'QA'). Uses DefaultEnvironment from config if not specified.", + Required = isRequired + }; } /// @@ -73,9 +68,10 @@ public static class GlobalOptionsExtensions public static System.CommandLine.Option CreateVerboseOption( string? description = null) { - return new System.CommandLine.Option( - aliases: ["--verbose", "-v"], - description: description ?? "Enable verbose logging (operational: Connecting..., Processing...)"); + return new System.CommandLine.Option("--verbose", "-v") + { + Description = description ?? "Enable verbose logging (operational: Connecting..., Processing...)" + }; } /// @@ -84,9 +80,10 @@ public static System.CommandLine.Option CreateVerboseOption( public static System.CommandLine.Option CreateDebugOption( string? description = null) { - return new System.CommandLine.Option( - name: "--debug", - description: description ?? "Enable debug logging (diagnostic: parallelism, ceiling, internal state)"); + return new System.CommandLine.Option("--debug") + { + Description = description ?? "Enable debug logging (diagnostic: parallelism, ceiling, internal state)" + }; } /// @@ -95,9 +92,9 @@ public static System.CommandLine.Option CreateDebugOption( public static System.CommandLine.Option CreateParallelismOption( string? description = null) { - return new System.CommandLine.Option( - name: "--parallelism", - description: description ?? "Max parallel batches (uses SDK default if not specified)"); + return new System.CommandLine.Option("--parallelism") + { + Description = description ?? "Max parallel batches (uses SDK default if not specified)" + }; } - } diff --git a/src/Console/PPDS.Dataverse.Demo/PPDS.Dataverse.Demo.csproj b/src/Console/PPDS.Dataverse.Demo/PPDS.Dataverse.Demo.csproj index b648dca..5de771e 100644 --- a/src/Console/PPDS.Dataverse.Demo/PPDS.Dataverse.Demo.csproj +++ b/src/Console/PPDS.Dataverse.Demo/PPDS.Dataverse.Demo.csproj @@ -11,10 +11,10 @@ - + - - + + diff --git a/src/Console/PPDS.Dataverse.Demo/Program.cs b/src/Console/PPDS.Dataverse.Demo/Program.cs index 5fb9479..4e864bb 100644 --- a/src/Console/PPDS.Dataverse.Demo/Program.cs +++ b/src/Console/PPDS.Dataverse.Demo/Program.cs @@ -6,27 +6,24 @@ var rootCommand = new RootCommand("PPDS.Dataverse Demo - Connection pool and data migration demos") { - Name = "ppds-dataverse-demo" + WhoAmICommand.Create(), + SeedCommand.Create(), + CleanCommand.Create(), + TestMigrationCommand.Create(), + MigrationFeaturesCommand.Create(), + CrossEnvMigrationCommand.Create(), + GenerateUserMappingCommand.Create(), + CreateGeoSchemaCommand.Create(), + LoadGeoDataCommand.Create(), + CountGeoDataCommand.Create(), + CleanGeoDataCommand.Create(), + ExportGeoDataCommand.Create(), + ImportGeoDataCommand.Create(), + MigrateGeoDataCommand.Create() }; -// Add subcommands -rootCommand.AddCommand(WhoAmICommand.Create()); -rootCommand.AddCommand(SeedCommand.Create()); -rootCommand.AddCommand(CleanCommand.Create()); -rootCommand.AddCommand(TestMigrationCommand.Create()); -rootCommand.AddCommand(MigrationFeaturesCommand.Create()); -rootCommand.AddCommand(CrossEnvMigrationCommand.Create()); -rootCommand.AddCommand(GenerateUserMappingCommand.Create()); -rootCommand.AddCommand(CreateGeoSchemaCommand.Create()); -rootCommand.AddCommand(LoadGeoDataCommand.Create()); -rootCommand.AddCommand(CountGeoDataCommand.Create()); -rootCommand.AddCommand(CleanGeoDataCommand.Create()); -rootCommand.AddCommand(ExportGeoDataCommand.Create()); -rootCommand.AddCommand(ImportGeoDataCommand.Create()); -rootCommand.AddCommand(MigrateGeoDataCommand.Create()); - // Default behavior: show help if no command specified -rootCommand.SetHandler(() => +rootCommand.SetAction((parseResult, cancellationToken) => { Console.WriteLine("PPDS.Dataverse Demo"); Console.WriteLine("==================="); @@ -66,6 +63,8 @@ Console.WriteLine(" Dataverse:DefaultEnvironment - Default for pool-based commands"); Console.WriteLine(" See: docs/guides/LOCAL_DEVELOPMENT_GUIDE.md"); Console.WriteLine(); + + return Task.FromResult(0); }); -return await rootCommand.InvokeAsync(args); +return await rootCommand.Parse(args).InvokeAsync(); diff --git a/src/Plugins/PPDSDemo.Plugins/PPDSDemo.Plugins.csproj b/src/Plugins/PPDSDemo.Plugins/PPDSDemo.Plugins.csproj index 3ce1c4e..e4a9279 100644 --- a/src/Plugins/PPDSDemo.Plugins/PPDSDemo.Plugins.csproj +++ b/src/Plugins/PPDSDemo.Plugins/PPDSDemo.Plugins.csproj @@ -27,7 +27,7 @@ - + diff --git a/src/Shared/PPDSDemo.Entities/PPDSDemo.Entities.csproj b/src/Shared/PPDSDemo.Entities/PPDSDemo.Entities.csproj index 0bc15fc..5aaf607 100644 --- a/src/Shared/PPDSDemo.Entities/PPDSDemo.Entities.csproj +++ b/src/Shared/PPDSDemo.Entities/PPDSDemo.Entities.csproj @@ -18,6 +18,7 @@ +