diff --git a/README.md b/README.md index fb7e44fd17..5886e189d4 100644 --- a/README.md +++ b/README.md @@ -238,6 +238,8 @@ Built packages are served up by the Elastic Package Registry running locally (se Built packages can also be published to the global package registry service. +When the package declares required input packages ("requires.input" in manifest.yml), the build downloads those input packages from the configured package registry (see "package_registry.base_url" in ~/.elastic-package/config.yml). The build then incorporates their policy and data stream templates, merges variable definitions into the integration manifest, bundles data stream field definitions, and resolves package: references on inputs and streams to the effective input types expected by Fleet. For details on using a local or custom registry during development, see the [HOWTO guide](./docs/howto/local_package_registry.md). + For details on how to enable dependency management, see the [HOWTO guide](https://github.com/elastic/elastic-package/blob/main/docs/howto/dependency_management.md). ### `elastic-package changelog` diff --git a/cmd/benchmark.go b/cmd/benchmark.go index 1b4073da23..22ff9d82cb 100644 --- a/cmd/benchmark.go +++ b/cmd/benchmark.go @@ -29,6 +29,8 @@ import ( "github.com/elastic/elastic-package/internal/install" "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" + "github.com/elastic/elastic-package/internal/registry" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/signal" "github.com/elastic/elastic-package/internal/stack" "github.com/elastic/elastic-package/internal/testrunner" @@ -331,6 +333,18 @@ func rallyCommandAction(cmd *cobra.Command, args []string) error { return fmt.Errorf("can't create Kibana client: %w", err) } + appConfig, err := install.Configuration() + if err != nil { + return fmt.Errorf("can't load configuration: %w", err) + } + + baseURL := stack.PackageRegistryBaseURL(profile, appConfig) + eprClient, err := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, profile)...) + if err != nil { + return fmt.Errorf("failed to create package registry client: %w", err) + } + requiredInputsResolver := requiredinputs.NewRequiredInputsResolver(eprClient) + withOpts := []rally.OptionFunc{ rally.WithVariant(variant), rally.WithBenchmarkName(benchName), @@ -344,6 +358,7 @@ func rallyCommandAction(cmd *cobra.Command, args []string) error { rally.WithRallyPackageFromRegistry(packageName, packageVersion), rally.WithRallyCorpusAtPath(corpusAtPath), rally.WithRepositoryRoot(repositoryRoot), + rally.WithRequiredInputsResolver(requiredInputsResolver), } esMetricsClient, err := initializeESMetricsClient(ctx) @@ -506,6 +521,18 @@ func streamCommandAction(cmd *cobra.Command, args []string) error { return fmt.Errorf("can't create Kibana client: %w", err) } + appConfig, err := install.Configuration() + if err != nil { + return fmt.Errorf("can't load configuration: %w", err) + } + + baseURL := stack.PackageRegistryBaseURL(profile, appConfig) + eprClient, err := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, profile)...) + if err != nil { + return fmt.Errorf("failed to create package registry client: %w", err) + } + requiredInputsResolver := requiredinputs.NewRequiredInputsResolver(eprClient) + withOpts := []stream.OptionFunc{ stream.WithVariant(variant), stream.WithBenchmarkName(benchName), @@ -519,6 +546,7 @@ func streamCommandAction(cmd *cobra.Command, args []string) error { stream.WithKibanaClient(kc), stream.WithProfile(profile), stream.WithRepositoryRoot(repositoryRoot), + stream.WithRequiredInputsResolver(requiredInputsResolver), } runner := stream.NewStreamBenchmark(stream.NewOptions(withOpts...)) diff --git a/cmd/build.go b/cmd/build.go index b101a70133..29d577c479 100644 --- a/cmd/build.go +++ b/cmd/build.go @@ -16,6 +16,10 @@ import ( "github.com/elastic/elastic-package/internal/install" "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" + "github.com/elastic/elastic-package/internal/profile" + "github.com/elastic/elastic-package/internal/registry" + "github.com/elastic/elastic-package/internal/requiredinputs" + "github.com/elastic/elastic-package/internal/stack" ) const buildLongDescription = `Use this command to build a package. @@ -26,6 +30,8 @@ Built packages are served up by the Elastic Package Registry running locally (se Built packages can also be published to the global package registry service. +When the package declares required input packages ("requires.input" in manifest.yml), the build downloads those input packages from the configured package registry (see "package_registry.base_url" in ~/.elastic-package/config.yml). The build then incorporates their policy and data stream templates, merges variable definitions into the integration manifest, bundles data stream field definitions, and resolves package: references on inputs and streams to the effective input types expected by Fleet. For details on using a local or custom registry during development, see the [HOWTO guide](./docs/howto/local_package_registry.md). + For details on how to enable dependency management, see the [HOWTO guide](https://github.com/elastic/elastic-package/blob/main/docs/howto/dependency_management.md).` func setupBuildCommand() *cobraext.Command { @@ -84,15 +90,28 @@ func buildCommandAction(cmd *cobra.Command, args []string) error { return fmt.Errorf("can't load configuration: %w", err) } + prof, err := profile.LoadProfile(appConfig.CurrentProfile()) + if err != nil { + return fmt.Errorf("could not load profile: %w", err) + } + baseURL := stack.PackageRegistryBaseURL(prof, appConfig) + eprClient, err := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, prof)...) + if err != nil { + return fmt.Errorf("failed to create package registry client: %w", err) + } + + requiredInputsResolver := requiredinputs.NewRequiredInputsResolver(eprClient) + target, err := builder.BuildPackage(builder.BuildOptions{ - PackageRoot: packageRoot, - BuildDir: buildDir, - CreateZip: createZip, - SignPackage: signPackage, - SkipValidation: skipValidation, - RepositoryRoot: repositoryRoot, - UpdateReadmes: true, - SchemaURLs: appConfig.SchemaURLs(), + PackageRoot: packageRoot, + BuildDir: buildDir, + CreateZip: createZip, + SignPackage: signPackage, + SkipValidation: skipValidation, + RepositoryRoot: repositoryRoot, + UpdateReadmes: true, + SchemaURLs: appConfig.SchemaURLs(), + RequiredInputsResolver: requiredInputsResolver, }) if err != nil { return fmt.Errorf("building package failed: %w", err) diff --git a/cmd/install.go b/cmd/install.go index b3cae78823..d7e250b6c6 100644 --- a/cmd/install.go +++ b/cmd/install.go @@ -16,6 +16,8 @@ import ( "github.com/elastic/elastic-package/internal/kibana" "github.com/elastic/elastic-package/internal/packages" "github.com/elastic/elastic-package/internal/packages/installer" + "github.com/elastic/elastic-package/internal/registry" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/stack" ) @@ -96,13 +98,22 @@ func installCommandAction(cmd *cobra.Command, _ []string) error { return fmt.Errorf("can't load configuration: %w", err) } + baseURL := stack.PackageRegistryBaseURL(profile, appConfig) + eprClient, err := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, profile)...) + if err != nil { + return fmt.Errorf("failed to create package registry client: %w", err) + } + + requiredInputsResolver := requiredinputs.NewRequiredInputsResolver(eprClient) + installer, err := installer.NewForPackage(installer.Options{ - Kibana: kibanaClient, - PackageRoot: packageRoot, - SkipValidation: skipValidation, - ZipPath: zipPathFile, - RepositoryRoot: repositoryRoot, - SchemaURLs: appConfig.SchemaURLs(), + Kibana: kibanaClient, + PackageRoot: packageRoot, + SkipValidation: skipValidation, + ZipPath: zipPathFile, + RepositoryRoot: repositoryRoot, + SchemaURLs: appConfig.SchemaURLs(), + RequiredInputsResolver: requiredInputsResolver, }) if err != nil { return fmt.Errorf("package installation failed: %w", err) diff --git a/cmd/status.go b/cmd/status.go index 8ddaa8104d..3d7d868ba2 100644 --- a/cmd/status.go +++ b/cmd/status.go @@ -124,7 +124,10 @@ func statusCommandAction(cmd *cobra.Command, args []string) error { // Create registry client with configured URL // Currently, this command does not use profile, so we take the URL from the application configuration - registryClient := registry.NewClient(appConfig.PackageRegistryBaseURL()) + registryClient, err := registry.NewClient(appConfig.PackageRegistryBaseURL()) + if err != nil { + return fmt.Errorf("failed to create package registry client: %w", err) + } options := registry.SearchOptions{ All: showAll, diff --git a/cmd/testrunner.go b/cmd/testrunner.go index 655670f263..47f1da5118 100644 --- a/cmd/testrunner.go +++ b/cmd/testrunner.go @@ -21,6 +21,8 @@ import ( "github.com/elastic/elastic-package/internal/install" "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" + "github.com/elastic/elastic-package/internal/registry" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/signal" "github.com/elastic/elastic-package/internal/stack" "github.com/elastic/elastic-package/internal/testrunner" @@ -760,6 +762,12 @@ func testRunnerScriptCommandAction(cmd *cobra.Command, args []string) error { opts.Package = manifest.Name + profile, err := cobraext.GetProfileFlag(cmd) + if err != nil { + return err + } + opts.Profile = profile + var results []testrunner.TestResult err = script.Run(&results, cmd.OutOrStderr(), opts) if err != nil { @@ -870,19 +878,27 @@ func testRunnerPolicyCommandAction(cmd *cobra.Command, args []string) error { return fmt.Errorf("can't load configuration: %w", err) } + baseURL := stack.PackageRegistryBaseURL(profile, appConfig) + eprClient, err := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, profile)...) + if err != nil { + return fmt.Errorf("failed to create package registry client: %w", err) + } + requiredInputsResolver := requiredinputs.NewRequiredInputsResolver(eprClient) + logger.Info(version.Version()) logger.Infof("elastic-stack: %s", stackVersion.Version()) runner := policy.NewPolicyTestRunner(policy.PolicyTestRunnerOptions{ - PackageRoot: packageRoot, - KibanaClient: kibanaClient, - DataStreams: dataStreams, - FailOnMissingTests: failOnMissing, - GenerateTestResult: generateTestResult, - GlobalTestConfig: globalTestConfig.Policy, - WithCoverage: testCoverage, - CoverageType: testCoverageFormat, - RepositoryRoot: repositoryRoot, - SchemaURLs: appConfig.SchemaURLs(), + PackageRoot: packageRoot, + KibanaClient: kibanaClient, + DataStreams: dataStreams, + FailOnMissingTests: failOnMissing, + GenerateTestResult: generateTestResult, + GlobalTestConfig: globalTestConfig.Policy, + WithCoverage: testCoverage, + CoverageType: testCoverageFormat, + RepositoryRoot: repositoryRoot, + SchemaURLs: appConfig.SchemaURLs(), + RequiredInputsResolver: requiredInputsResolver, }) results, err := testrunner.RunSuite(ctx, runner) diff --git a/docs/howto/dependency_management.md b/docs/howto/dependency_management.md index 16d96f97b7..43b6779962 100644 --- a/docs/howto/dependency_management.md +++ b/docs/howto/dependency_management.md @@ -11,10 +11,22 @@ which field definition was correct, maintenance and typo correction process was The described situation brought us to a point in time when a simple dependency management was a requirement to maintain all used fields, especially ones imported from external sources. +Elastic Packages support two kinds of build-time dependency: + +- **Field dependencies** — import field definitions from external schemas (e.g. ECS) using + `_dev/build/build.yml`. Resolved from Git references and cached locally. +- **Package dependencies** — composable (integration) packages can depend on input and content packages + declared under `requires` in `manifest.yml`. **Input package** dependencies are resolved + at build time by downloading from the package registry. **Content package** dependencies are + resolved at runtime by Fleet. + +Both are described in the sections below. + ## Principles of operation -Currently Elastic Packages support build-time dependencies that can be used as external field sources. They use a flat -dependency model represented with an additional build manifest, stored in an optional YAML file - `_dev/build/build.yml`: +Currently Elastic Packages support build-time field dependencies that can be used as external +field sources. They use a flat dependency model represented with an additional build manifest, +stored in an optional YAML file - `_dev/build/build.yml`: ```yaml dependencies: @@ -83,4 +95,51 @@ and use a following field definition: ```yaml - name: event.category external: ecs -``` \ No newline at end of file +``` + +## Composable packages and the package registry + +Composable (integration) packages can also depend on input or content packages by declaring them under +`requires` in `manifest.yml`. Depending on the package type, dependencies are resolved +differently: **input package** dependencies are fetched at build time; **content package** +dependencies are resolved at runtime by Fleet. + +```yaml +requires: + input: + - package: sql_input + version: "0.2.0" +``` + +This type of dependency is resolved at **build time** by downloading the required input package +from the **package registry**. During `elastic-package build`, elastic-package fetches those +packages and updates the built integration: it bundles agent templates (policy and data stream), +merges variable definitions from the input packages into the composable manifest, adds data +stream field definitions where configured, and rewrites `package:` references on inputs and +streams to the concrete input types Fleet needs. Fleet still merges policy-specific values at +policy creation time. + +Unlike field-level dependencies (which are resolved from Git references and cached locally), +package dependencies are fetched from the configured package registry URL +(`stack.epr.base_url` in the active profile, or `package_registry.base_url` in +`~/.elastic-package/config.yml`, defaulting to `https://epr.elastic.co`). + +For details on using a local or custom registry when the required input packages are still +under development, see [HOWTO: Use a local or custom package registry](./local_package_registry.md). + +### Linked files (`*.link`) and `template_path` + +Some repositories share agent templates using **link files** (files ending in `.link` that +point at shared content). During `elastic-package build`, linked content is copied into the +build output under the **target** path (the link filename without the `.link` suffix). + +Composable bundling (`requires.input`) runs **after** linked files are materialized in the +build directory. In `manifest.yml`, always set `template_path` / `template_paths` to those +**materialized** names (for example `owned.hbs`), **not** the stub name (`owned.hbs.link`). +Fleet and the builder resolve templates by the names declared in the manifest; the `.link` +file exists only in the source tree. + +A small manual fixture that combines `requires.input` with a linked policy input template +lives under `test/manual_packages/required_inputs/with_linked_template_path/`. Automated +coverage is in `TestBundleInputPackageTemplates_PreservesLinkedTemplateTargetPath` in +`internal/requiredinputs/requiredinputs_test.go`. \ No newline at end of file diff --git a/docs/howto/local_package_registry.md b/docs/howto/local_package_registry.md new file mode 100644 index 0000000000..ca5c2acc19 --- /dev/null +++ b/docs/howto/local_package_registry.md @@ -0,0 +1,167 @@ +# HOWTO: Use a local or custom package registry for composable integrations + +## Overview + +Composable (integration) packages can declare required input packages in their `manifest.yml` +under `requires.input`. When you run `elastic-package build` or `elastic-package install`, +elastic-package resolves those dependencies by downloading them from the **package registry**. +By default it uses the production registry at `https://epr.elastic.co`. + +This guide explains how to point elastic-package at a local or custom registry, which is +useful when the required input packages are still under development and not yet published to +the production registry. + +For field-level build-time dependencies (ECS, `_dev/build/build.yml`), see +[HOWTO: Enable dependency management](./dependency_management.md). + +## Prerequisites + +- An integration package that declares `requires.input` in its `manifest.yml`, for example: + +```yaml +requires: + input: + - package: sql_input + version: "0.2.0" +``` + +- Optionally, a running local package registry that serves the required input packages. + +## Option 1: Use the built-in stack registry (recommended) + +`elastic-package stack up` (with the default compose provider) automatically starts a local +package registry container. The container runs in **proxy mode**: it serves packages found in +the repository's `build/packages/` directory and proxies all other package requests to the +production registry at `https://epr.elastic.co` (or to a custom upstream if configured). + +`elastic-package` discovers `build/packages/` by walking up from the current working +directory to the repository root, so you can run `elastic-package stack up` from anywhere +inside the repository. + +```shell +# 1. Build the required input package — this places the built package under build/packages/ +# at the repository root. +cd /path/to/sql_input +elastic-package build + +# 2. Start the Elastic Stack from anywhere inside the repository. +# The bundled registry picks up build/packages/ from the repository root. +elastic-package stack up -v -d +``` + +Then configure `~/.elastic-package/config.yml` to use the stack's local registry for +`elastic-package build`, `elastic-package test`, `elastic-package benchmark`, and +`elastic-package status`: + +```yaml +package_registry: + base_url: http://localhost:8080 +``` + +This setting defaults to `https://epr.elastic.co` when not set. + +> **Note:** This setting does not change the package registry container that the Elastic Stack +> itself uses (served by `elastic-package stack`). To also redirect the stack's proxy target, +> see [Option 2](#option-2-configure-the-registry-url-per-profile) below. + +### Alternative: standalone package registry container + +If you are not running `elastic-package stack`, you can start a standalone registry container. +Use a port other than `8080` to avoid conflicting with the stack's built-in registry: + +```shell +# Build your input package first +cd /path/to/sql_input +elastic-package build + +# Start a standalone registry on port 8081, mounting the build/packages/ directory +# at the repository root (run from anywhere inside the repo, or adjust the path). +docker run --rm -p 8081:8080 \ + -v "$(git -C /path/to/repo rev-parse --show-toplevel)/build/packages":/packages/package-registry \ + docker.elastic.co/package-registry/package-registry:v1.37.0 +``` + +> **Note:** The mounted directory must contain at least one valid package (a `.zip` file or an +> extracted package directory). If the directory is empty, the registry exits immediately with +> `No local packages found.` +> +> **Note:** The registry image tag above matches `PackageRegistryBaseImage` in +> [`internal/stack/versions.go`](../../internal/stack/versions.go); that constant is what +> `elastic-package stack` uses and is updated by automation, while this document is not — +> check there when upgrading. + +Then point `package_registry.base_url` at `http://localhost:8081` and run +`elastic-package build` from your integration package directory. + +## Option 2: Configure the registry URL per profile + +Use this option when you want both the **build tools** and the **stack's Fleet** to use the +same custom or standalone registry — for example, a registry serving packages not yet +published to production. + +Assume your custom registry is running on the host at port `8082`. Configure the active +profile (e.g. `~/.elastic-package/profiles/default/config.yml`): + +```yaml +# The stack's package registry container will proxy non-local requests to this URL. +# Use host.docker.internal so the container can reach the host. +stack.epr.proxy_to: http://host.docker.internal:8082 + +# elastic-package install (and stack commands) will use this URL to contact the registry. +stack.epr.base_url: http://localhost:8082 +``` + +To also cover `elastic-package test`, `elastic-package benchmark`, and `elastic-package status` +(which do not read profile settings), add the global setting: + +```yaml +# ~/.elastic-package/config.yml +package_registry: + base_url: http://localhost:8082 +``` + +### URL resolution reference + +**For `elastic-package build`** (profile, then global config): + +| Priority | Setting | +| -------- | ------- | +| 1 | `stack.epr.base_url` in the active profile `config.yml` | +| 2 | `package_registry.base_url` in `~/.elastic-package/config.yml` | +| 3 | `https://epr.elastic.co` (production fallback) | + +**For `elastic-package test`, `benchmark`, `status`** (global config only): + +| Priority | Setting | +| -------- | ------- | +| 1 | `package_registry.base_url` in `~/.elastic-package/config.yml` | +| 2 | `https://epr.elastic.co` (production fallback) | + +**For `elastic-package install` and stack commands** (profile takes precedence): + +| Priority | Setting | +| -------- | ------- | +| 1 | `stack.epr.base_url` in the active profile `config.yml` | +| 2 | `package_registry.base_url` in `~/.elastic-package/config.yml` | +| 3 | `https://epr.elastic.co` (production fallback) | + +**For the stack registry's proxy target** (`EPR_PROXY_TO` inside the container): + +| Priority | Setting | +| -------- | ------- | +| 1 | `stack.epr.proxy_to` in the active profile `config.yml` | +| 2 | `stack.epr.base_url` in the active profile `config.yml` | +| 3 | `package_registry.base_url` in `~/.elastic-package/config.yml` | +| 4 | `https://epr.elastic.co` (production fallback) | + +For more details on profiles, see the +[Elastic Package profiles section of the README](../../README.md#elastic-package-profiles). + +## Summary + +| Goal | Configuration | +| ---- | ------------- | +| Override registry for `build` | `stack.epr.base_url` in the active profile `config.yml` (or `package_registry.base_url` in `~/.elastic-package/config.yml`) | +| Override registry for `test` / `benchmark` / `status` | `package_registry.base_url` in `~/.elastic-package/config.yml` | +| Override registry for `install` and stack commands | `stack.epr.base_url` in the active profile `config.yml` | +| Override proxy target for the stack's registry container | `stack.epr.proxy_to` in the active profile `config.yml` | diff --git a/internal/benchrunner/runners/rally/options.go b/internal/benchrunner/runners/rally/options.go index 1696ec1bf2..be5a1c0145 100644 --- a/internal/benchrunner/runners/rally/options.go +++ b/internal/benchrunner/runners/rally/options.go @@ -11,26 +11,28 @@ import ( "github.com/elastic/elastic-package/internal/elasticsearch" "github.com/elastic/elastic-package/internal/kibana" "github.com/elastic/elastic-package/internal/profile" + "github.com/elastic/elastic-package/internal/requiredinputs" ) // Options contains benchmark runner options. type Options struct { - ESAPI *elasticsearch.API - KibanaClient *kibana.Client - DeferCleanup time.Duration - MetricsInterval time.Duration - ReindexData bool - ESMetricsAPI *elasticsearch.API - BenchName string - PackageRoot string - Variant string - Profile *profile.Profile - RallyTrackOutputDir string - DryRun bool - PackageName string - PackageVersion string - CorpusAtPath string - RepositoryRoot *os.Root + ESAPI *elasticsearch.API + KibanaClient *kibana.Client + DeferCleanup time.Duration + MetricsInterval time.Duration + ReindexData bool + ESMetricsAPI *elasticsearch.API + BenchName string + PackageRoot string + Variant string + Profile *profile.Profile + RallyTrackOutputDir string + DryRun bool + PackageName string + PackageVersion string + CorpusAtPath string + RepositoryRoot *os.Root + RequiredInputsResolver requiredinputs.Resolver } type ClientOptions struct { @@ -126,3 +128,9 @@ func WithRepositoryRoot(r *os.Root) OptionFunc { opts.RepositoryRoot = r } } + +func WithRequiredInputsResolver(r requiredinputs.Resolver) OptionFunc { + return func(opts *Options) { + opts.RequiredInputsResolver = r + } +} diff --git a/internal/benchrunner/runners/rally/runner.go b/internal/benchrunner/runners/rally/runner.go index 960aaaf1ed..fd6b660e3c 100644 --- a/internal/benchrunner/runners/rally/runner.go +++ b/internal/benchrunner/runners/rally/runner.go @@ -483,10 +483,11 @@ func (r *runner) installPackageFromRegistry(ctx context.Context, packageName, pa func (r *runner) installPackageFromPackageRoot(ctx context.Context) error { logger.Debug("Installing package...") installer, err := installer.NewForPackage(installer.Options{ - Kibana: r.options.KibanaClient, - PackageRoot: r.options.PackageRoot, - SkipValidation: true, - RepositoryRoot: r.options.RepositoryRoot, + Kibana: r.options.KibanaClient, + PackageRoot: r.options.PackageRoot, + SkipValidation: true, + RepositoryRoot: r.options.RepositoryRoot, + RequiredInputsResolver: r.options.RequiredInputsResolver, }) if err != nil { return fmt.Errorf("failed to initialize package installer: %w", err) diff --git a/internal/benchrunner/runners/stream/options.go b/internal/benchrunner/runners/stream/options.go index 7770f54b0a..32a7e7bcb1 100644 --- a/internal/benchrunner/runners/stream/options.go +++ b/internal/benchrunner/runners/stream/options.go @@ -11,22 +11,24 @@ import ( "github.com/elastic/elastic-package/internal/elasticsearch" "github.com/elastic/elastic-package/internal/kibana" "github.com/elastic/elastic-package/internal/profile" + "github.com/elastic/elastic-package/internal/requiredinputs" ) // Options contains benchmark runner options. type Options struct { - ESAPI *elasticsearch.API - KibanaClient *kibana.Client - BenchName string - BackFill time.Duration - EventsPerPeriod uint64 - PeriodDuration time.Duration - PerformCleanup bool - TimestampField string - PackageRoot string - Variant string - Profile *profile.Profile - RepositoryRoot *os.Root + ESAPI *elasticsearch.API + KibanaClient *kibana.Client + BenchName string + BackFill time.Duration + EventsPerPeriod uint64 + PeriodDuration time.Duration + PerformCleanup bool + TimestampField string + PackageRoot string + Variant string + Profile *profile.Profile + RepositoryRoot *os.Root + RequiredInputsResolver requiredinputs.Resolver } type ClientOptions struct { @@ -115,3 +117,9 @@ func WithRepositoryRoot(r *os.Root) OptionFunc { opts.RepositoryRoot = r } } + +func WithRequiredInputsResolver(r requiredinputs.Resolver) OptionFunc { + return func(opts *Options) { + opts.RequiredInputsResolver = r + } +} diff --git a/internal/benchrunner/runners/stream/runner.go b/internal/benchrunner/runners/stream/runner.go index 55c333d399..9d6aa137e2 100644 --- a/internal/benchrunner/runners/stream/runner.go +++ b/internal/benchrunner/runners/stream/runner.go @@ -253,10 +253,11 @@ func (r *runner) installPackage(ctx context.Context) error { func (r *runner) installPackageFromPackageRoot(ctx context.Context) error { logger.Debug("Installing package...") installer, err := installer.NewForPackage(installer.Options{ - Kibana: r.options.KibanaClient, - PackageRoot: r.options.PackageRoot, - SkipValidation: true, - RepositoryRoot: r.options.RepositoryRoot, + Kibana: r.options.KibanaClient, + PackageRoot: r.options.PackageRoot, + SkipValidation: true, + RepositoryRoot: r.options.RepositoryRoot, + RequiredInputsResolver: r.options.RequiredInputsResolver, }) if err != nil { diff --git a/internal/builder/packages.go b/internal/builder/packages.go index 239fbf085e..714a551fbd 100644 --- a/internal/builder/packages.go +++ b/internal/builder/packages.go @@ -18,6 +18,7 @@ import ( "github.com/elastic/elastic-package/internal/files" "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/validation" ) @@ -31,11 +32,12 @@ type BuildOptions struct { BuildDir string // directory where all the built packages are placed and zipped packages are stored RepositoryRoot *os.Root - CreateZip bool - SignPackage bool - SkipValidation bool - UpdateReadmes bool - SchemaURLs fields.SchemaURLs + CreateZip bool + SignPackage bool + SkipValidation bool + UpdateReadmes bool + SchemaURLs fields.SchemaURLs + RequiredInputsResolver requiredinputs.Resolver } // BuildDirectory function locates the target build directory. If the directory doesn't exist, it will create it. @@ -232,6 +234,15 @@ func BuildPackage(options BuildOptions) (string, error) { return "", fmt.Errorf("resolving transform manifests failed: %w", err) } + resolver := options.RequiredInputsResolver + if resolver == nil { + resolver = &requiredinputs.NoopRequiredInputsResolver{} + } + err = resolver.Bundle(buildPackageRoot) + if err != nil { + return "", fmt.Errorf("bundling input package templates failed: %w", err) + } + if options.UpdateReadmes { err = docs.UpdateReadmes(options.RepositoryRoot, options.PackageRoot, buildPackageRoot, options.SchemaURLs) if err != nil { diff --git a/internal/packages/installer/factory.go b/internal/packages/installer/factory.go index bd1ba78897..fb2498f97d 100644 --- a/internal/packages/installer/factory.go +++ b/internal/packages/installer/factory.go @@ -17,6 +17,7 @@ import ( "github.com/elastic/elastic-package/internal/kibana" "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/validation" ) @@ -35,12 +36,13 @@ type Installer interface { // Options are the parameters used to build an installer. type Options struct { - Kibana *kibana.Client - PackageRoot string // Root path of the package to be installed. - ZipPath string - SkipValidation bool - RepositoryRoot *os.Root // Root of the repository where package source code is located. - SchemaURLs fields.SchemaURLs + Kibana *kibana.Client + PackageRoot string // Root path of the package to be installed. + ZipPath string + SkipValidation bool + RepositoryRoot *os.Root // Root of the repository where package source code is located. + SchemaURLs fields.SchemaURLs + RequiredInputsResolver requiredinputs.Resolver // Input dependency resolver for downloading input packages. } // NewForPackage creates a new installer for a package, given its root path, or its prebuilt zip. @@ -90,13 +92,14 @@ func NewForPackage(options Options) (Installer, error) { } target, err := builder.BuildPackage(builder.BuildOptions{ - PackageRoot: options.PackageRoot, - CreateZip: supportsUploadZip, - SignPackage: false, - SkipValidation: options.SkipValidation, - RepositoryRoot: options.RepositoryRoot, - UpdateReadmes: false, - SchemaURLs: options.SchemaURLs, + PackageRoot: options.PackageRoot, + CreateZip: supportsUploadZip, + SignPackage: false, + SkipValidation: options.SkipValidation, + RepositoryRoot: options.RepositoryRoot, + UpdateReadmes: false, + SchemaURLs: options.SchemaURLs, + RequiredInputsResolver: options.RequiredInputsResolver, }) if err != nil { return nil, fmt.Errorf("failed to build package: %v", err) diff --git a/internal/packages/packages.go b/internal/packages/packages.go index 1b9962c23e..6daf05ceba 100644 --- a/internal/packages/packages.go +++ b/internal/packages/packages.go @@ -193,8 +193,23 @@ type Variable struct { // Input is a single input configuration. type Input struct { - Type string `config:"type" json:"type" yaml:"type"` - Vars []Variable `config:"vars" json:"vars" yaml:"vars"` + Type string `config:"type" json:"type" yaml:"type"` + Package string `config:"package,omitempty" json:"package,omitempty" yaml:"package,omitempty"` + Vars []Variable `config:"vars" json:"vars" yaml:"vars"` + TemplatePath string `config:"template_path,omitempty" json:"template_path,omitempty" yaml:"template_path,omitempty"` + TemplatePaths []string `config:"template_paths,omitempty" json:"template_paths,omitempty" yaml:"template_paths,omitempty"` +} + +// PackageDependency describes a dependency on another package. +type PackageDependency struct { + Package string `config:"package" json:"package" yaml:"package"` + Version string `config:"version" json:"version" yaml:"version"` +} + +// Requires lists the packages that an integration package depends on. +type Requires struct { + Input []PackageDependency `config:"input,omitempty" json:"input,omitempty" yaml:"input,omitempty"` + Content []PackageDependency `config:"content,omitempty" json:"content,omitempty" yaml:"content,omitempty"` } // Source contains metadata about the source code of the package. @@ -237,9 +252,10 @@ type PolicyTemplate struct { // For purposes of "input packages" Input string `config:"input,omitempty" json:"input,omitempty" yaml:"input,omitempty"` Type string `config:"type,omitempty" json:"type,omitempty" yaml:"type,omitempty"` - DynamicSignalTypes bool `config:"dynamic_signal_types,omitempty" json:"dynamic_signal_types,omitempty" yaml:"dynamic_signal_types,omitempty"` TemplatePath string `config:"template_path,omitempty" json:"template_path,omitempty" yaml:"template_path,omitempty"` + TemplatePaths []string `config:"template_paths,omitempty" json:"template_paths,omitempty" yaml:"template_paths,omitempty"` Vars []Variable `config:"vars,omitempty" json:"vars,omitempty" yaml:"vars,omitempty"` + DynamicSignalTypes bool `config:"dynamic_signal_types,omitempty" json:"dynamic_signal_types,omitempty" yaml:"dynamic_signal_types,omitempty"` } // Owner defines package owners, either a single person or a team. @@ -272,6 +288,7 @@ type PackageManifest struct { Categories []string `config:"categories" json:"categories" yaml:"categories"` Agent Agent `config:"agent" json:"agent" yaml:"agent"` Elasticsearch *Elasticsearch `config:"elasticsearch" json:"elasticsearch" yaml:"elasticsearch"` + Requires *Requires `config:"requires,omitempty" json:"requires,omitempty" yaml:"requires,omitempty"` } type PackageDirNameAndManifest struct { @@ -334,11 +351,13 @@ type TransformDefinition struct { // Stream contains information about an input stream. type Stream struct { - Input string `config:"input" json:"input" yaml:"input"` - Title string `config:"title" json:"title" yaml:"title"` - Description string `config:"description" json:"description" yaml:"description"` - TemplatePath string `config:"template_path" json:"template_path" yaml:"template_path"` - Vars []Variable `config:"vars" json:"vars" yaml:"vars"` + Input string `config:"input" json:"input" yaml:"input"` + Package string `config:"package,omitempty" json:"package,omitempty" yaml:"package,omitempty"` + Title string `config:"title" json:"title" yaml:"title"` + Description string `config:"description" json:"description" yaml:"description"` + TemplatePath string `config:"template_path,omitempty" json:"template_path,omitempty" yaml:"template_path,omitempty"` + TemplatePaths []string `config:"template_paths,omitempty" json:"template_paths,omitempty" yaml:"template_paths,omitempty"` + Vars []Variable `config:"vars" json:"vars" yaml:"vars"` } // HasSource checks if a given index or data stream name maches the transform sources @@ -717,6 +736,20 @@ func ReadPackageManifestBytes(contents []byte) (*PackageManifest, error) { return &m, nil } +func ReadDataStreamManifestBytes(contents []byte) (*DataStreamManifest, error) { + cfg, err := yaml.NewConfig(contents, ucfg.PathSep(".")) + if err != nil { + return nil, fmt.Errorf("reading manifest file failed: %w", err) + } + + var m DataStreamManifest + err = cfg.Unpack(&m) + if err != nil { + return nil, fmt.Errorf("unpacking data stream manifest failed: %w", err) + } + return &m, nil +} + // ReadDataStreamManifest reads and parses the given data stream manifest file. func ReadDataStreamManifest(path string) (*DataStreamManifest, error) { cfg, err := yaml.NewConfigWithFile(path, ucfg.PathSep(".")) diff --git a/internal/registry/client.go b/internal/registry/client.go index 6181c9dfa2..1e04fb9212 100644 --- a/internal/registry/client.go +++ b/internal/registry/client.go @@ -5,6 +5,7 @@ package registry import ( + "crypto/tls" "fmt" "io" "net/http" @@ -12,6 +13,7 @@ import ( "os" "path/filepath" + "github.com/elastic/elastic-package/internal/certs" "github.com/elastic/elastic-package/internal/logger" ) @@ -19,18 +21,63 @@ const ( ProductionURL = "https://epr.elastic.co" ) -// Client is responsible for exporting dashboards from Kibana. +// ClientOption is a functional option for the registry client. +type ClientOption func(*Client) + +// Client is responsible for communicating with the Package Registry API. type Client struct { - baseURL string + baseURL string + certificateAuthority string + tlsSkipVerify bool + httpClient *http.Client } // NewClient creates a new instance of the client. -func NewClient(baseURL string) *Client { - return &Client{ - baseURL: baseURL, +func NewClient(baseURL string, opts ...ClientOption) (*Client, error) { + c := &Client{baseURL: baseURL} + for _, opt := range opts { + opt(c) + } + httpClient, err := c.newHTTPClient() + if err != nil { + return nil, fmt.Errorf("creating registry HTTP client: %w", err) + } + c.httpClient = httpClient + return c, nil +} + +// CertificateAuthority sets the certificate authority to use for TLS verification. +func CertificateAuthority(path string) ClientOption { + return func(c *Client) { + c.certificateAuthority = path + } +} + +// TLSSkipVerify disables TLS certificate verification (e.g. for local HTTPS registries). +func TLSSkipVerify() ClientOption { + return func(c *Client) { + c.tlsSkipVerify = true } } +func (c *Client) newHTTPClient() (*http.Client, error) { + client := &http.Client{} + if c.tlsSkipVerify { + client.Transport = &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + } + } else if c.certificateAuthority != "" { + rootCAs, err := certs.SystemPoolWithCACertificate(c.certificateAuthority) + if err != nil { + return nil, fmt.Errorf("reading CA certificate: %w", err) + } + client.Transport = &http.Transport{ + TLSClientConfig: &tls.Config{RootCAs: rootCAs}, + } + } + return client, nil +} + func (c *Client) get(resourcePath string) (int, []byte, error) { base, err := url.Parse(c.baseURL) if err != nil { @@ -52,7 +99,10 @@ func (c *Client) get(resourcePath string) (int, []byte, error) { return 0, nil, fmt.Errorf("could not create request to Package Registry API resource: %s: %w", resourcePath, err) } - client := http.Client{} + client := c.httpClient + if client == nil { + client = &http.Client{} + } resp, err := client.Do(req) if err != nil { return 0, nil, fmt.Errorf("could not send request to Package Registry API: %w", err) @@ -80,8 +130,18 @@ func (c *Client) DownloadPackage(name, version, destDir string) (string, error) } zipPath := filepath.Join(destDir, fmt.Sprintf("%s-%s.zip", name, version)) + shouldRemove := false + defer func() { + if shouldRemove { + _ = os.Remove(zipPath) + } + }() + + shouldRemove = true if err := os.WriteFile(zipPath, body, 0o644); err != nil { return "", fmt.Errorf("writing package zip to %s: %w", zipPath, err) } + + shouldRemove = false return zipPath, nil } diff --git a/internal/registry/client_test.go b/internal/registry/client_test.go new file mode 100644 index 0000000000..8588655c47 --- /dev/null +++ b/internal/registry/client_test.go @@ -0,0 +1,129 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package registry + +import ( + "archive/zip" + "bytes" + "errors" + "fmt" + "io/fs" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestNewClient_invalidCertificateAuthorityPath(t *testing.T) { + missing := filepath.Join(t.TempDir(), "missing-ca.pem") + client, err := NewClient("https://example.test", CertificateAuthority(missing)) + require.Error(t, err) + require.Nil(t, client) + require.ErrorContains(t, err, "creating registry HTTP client") + require.ErrorContains(t, err, "reading CA certificate") +} + +func TestNewClient_invalidCertificateAuthorityPEM(t *testing.T) { + badPath := filepath.Join(t.TempDir(), "not-a-cert.pem") + require.NoError(t, os.WriteFile(badPath, []byte("this is not a PEM certificate block"), 0o600)) + + client, err := NewClient("https://example.test", CertificateAuthority(badPath)) + require.Error(t, err) + require.Nil(t, client) + require.ErrorContains(t, err, "creating registry HTTP client") + require.ErrorContains(t, err, "no certificate found") +} + +func TestNewClient_tlsskipVerifyOption(t *testing.T) { + srv := httptest.NewServer(http.NotFoundHandler()) + t.Cleanup(srv.Close) + + client, err := NewClient(srv.URL, TLSSkipVerify()) + require.NoError(t, err) + require.NotNil(t, client) +} + +func TestDownloadPackage_unexpectedStatusDoesNotWriteZip(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + http.Error(w, "gone", http.StatusGone) + })) + t.Cleanup(srv.Close) + + dest := t.TempDir() + client, err := NewClient(srv.URL) + require.NoError(t, err) + _, err = client.DownloadPackage("acme", "1.0.0", dest) + require.Error(t, err) + require.ErrorContains(t, err, "unexpected status code") + + _, statErr := os.Stat(filepath.Join(dest, "acme-1.0.0.zip")) + require.True(t, errors.Is(statErr, fs.ErrNotExist), "no zip should be written when the registry returns a non-OK status") +} + +func TestDownloadPackage_writeFailureCleansUp(t *testing.T) { + zipBytes := testAcmePackageZip(t) + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/epr/acme/acme-1.0.0.zip" { + http.NotFound(w, r) + return + } + _, err := w.Write(zipBytes) + require.NoError(t, err) + })) + t.Cleanup(srv.Close) + + dest := t.TempDir() + zipPath := filepath.Join(dest, "acme-1.0.0.zip") + require.NoError(t, os.Mkdir(zipPath, 0o700)) + + client, err := NewClient(srv.URL) + require.NoError(t, err) + _, err = client.DownloadPackage("acme", "1.0.0", dest) + require.Error(t, err) + require.ErrorContains(t, err, "writing package zip") + + _, statErr := os.Stat(zipPath) + require.True(t, errors.Is(statErr, fs.ErrNotExist), "partial zip should not remain after a write error") +} + +func TestDownloadPackage_success(t *testing.T) { + zipBytes := testAcmePackageZip(t) + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/epr/acme/acme-1.0.0.zip" { + http.NotFound(w, r) + return + } + _, err := w.Write(zipBytes) + require.NoError(t, err) + })) + t.Cleanup(srv.Close) + + dest := t.TempDir() + client, err := NewClient(srv.URL) + require.NoError(t, err) + zipPath, err := client.DownloadPackage("acme", "1.0.0", dest) + require.NoError(t, err) + require.FileExists(t, zipPath) +} + +func testAcmePackageZip(t *testing.T) []byte { + t.Helper() + const ( + name = "acme" + version = "1.0.0" + ) + var buf bytes.Buffer + zw := zip.NewWriter(&buf) + manifestPath := fmt.Sprintf("%s/manifest.yml", name) + w, err := zw.Create(manifestPath) + require.NoError(t, err) + _, err = fmt.Fprintf(w, "name: %s\nversion: %s\ntype: integration\n", name, version) + require.NoError(t, err) + require.NoError(t, zw.Close()) + return buf.Bytes() +} diff --git a/internal/requiredinputs/copy.go b/internal/requiredinputs/copy.go new file mode 100644 index 0000000000..e06b421d2d --- /dev/null +++ b/internal/requiredinputs/copy.go @@ -0,0 +1,69 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "fmt" + "io/fs" + "os" + "path" + + "github.com/elastic/elastic-package/internal/logger" + "github.com/elastic/elastic-package/internal/packages" +) + +// collectAndCopyPolicyTemplateFiles opens the input package at inputPkgPath, +// reads template names from its policy_templates manifest entries, copies each +// file from agent/input/ into destDir inside buildRoot with the prefix +// "-", and returns the list of destination file names. +func collectAndCopyPolicyTemplateFiles(inputPkgPath, pkgName, destDir string, buildRoot *os.Root) ([]string, error) { + inputPkgFS, closeFn, err := openPackageFS(inputPkgPath) + if err != nil { + return nil, fmt.Errorf("failed to open input package %q: %w", inputPkgPath, err) + } + defer func() { _ = closeFn() }() + + manifestBytes, err := fs.ReadFile(inputPkgFS, packages.PackageManifestFile) + if err != nil { + return nil, fmt.Errorf("failed to read input package manifest: %w", err) + } + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + if err != nil { + return nil, fmt.Errorf("failed to parse input package manifest: %w", err) + } + + seen := make(map[string]bool) + copiedNames := make([]string, 0) + for _, pt := range manifest.PolicyTemplates { + var names []string + switch { + case len(pt.TemplatePaths) > 0: + names = pt.TemplatePaths + case pt.TemplatePath != "": + names = []string{pt.TemplatePath} + } + for _, name := range names { + if seen[name] { + continue + } + seen[name] = true + content, err := fs.ReadFile(inputPkgFS, path.Join("agent", "input", name)) + if err != nil { + return nil, fmt.Errorf("failed to read template %q from agent/input (declared in manifest): %w", name, err) + } + destName := pkgName + "-" + name + if err := buildRoot.MkdirAll(destDir, 0755); err != nil { + return nil, fmt.Errorf("failed to create directory %q: %w", destDir, err) + } + destPath := path.Join(destDir, destName) + if err := buildRoot.WriteFile(destPath, content, 0644); err != nil { + return nil, fmt.Errorf("failed to write template %q: %w", destName, err) + } + logger.Debugf("Copied input package template: %s -> %s", name, destName) + copiedNames = append(copiedNames, destName) + } + } + return copiedNames, nil +} diff --git a/internal/requiredinputs/copy_test.go b/internal/requiredinputs/copy_test.go new file mode 100644 index 0000000000..9c572d1df7 --- /dev/null +++ b/internal/requiredinputs/copy_test.go @@ -0,0 +1,197 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// buildRootFor creates a temporary os.Root for use in tests. +func buildRootFor(t *testing.T) *os.Root { + t.Helper() + root, err := os.OpenRoot(t.TempDir()) + require.NoError(t, err) + t.Cleanup(func() { root.Close() }) + return root +} + +// TestCollectAndCopyPolicyTemplateFiles_SingleTemplatePath verifies that a package whose +// policy_template declares a single template_path is copied into destDir with the +// "-" prefix, and that the returned slice contains exactly that name. +func TestCollectAndCopyPolicyTemplateFiles_SingleTemplatePath(t *testing.T) { + inputPkgDir := createFakeInputHelper(t) + buildRoot := buildRootFor(t) + + destDir := filepath.Join("agent", "input") + got, err := collectAndCopyPolicyTemplateFiles(inputPkgDir, "sql", destDir, buildRoot) + require.NoError(t, err) + + assert.Equal(t, []string{"sql-input.yml.hbs"}, got) + + content, err := buildRoot.ReadFile(filepath.Join(destDir, "sql-input.yml.hbs")) + require.NoError(t, err) + assert.Equal(t, "template content", string(content)) +} + +// TestCollectAndCopyPolicyTemplateFiles_MultipleTemplatePaths verifies that all names listed +// in template_paths across multiple policy_templates are copied. +func TestCollectAndCopyPolicyTemplateFiles_MultipleTemplatePaths(t *testing.T) { + inputPkgDir := createFakeInputWithMultiplePolicyTemplates(t) + buildRoot := buildRootFor(t) + + destDir := filepath.Join("agent", "input") + got, err := collectAndCopyPolicyTemplateFiles(inputPkgDir, "sql", destDir, buildRoot) + require.NoError(t, err) + + assert.Equal(t, []string{"sql-input.yml.hbs", "sql-metrics.yml.hbs", "sql-extra.yml.hbs"}, got) + + for _, name := range []string{"sql-input.yml.hbs", "sql-metrics.yml.hbs", "sql-extra.yml.hbs"} { + _, err := buildRoot.ReadFile(filepath.Join(destDir, name)) + require.NoError(t, err, "expected %s to exist in destDir", name) + } +} + +// TestCollectAndCopyPolicyTemplateFiles_Deduplication verifies that when the same template name +// appears in more than one policy_template it is only copied once. +func TestCollectAndCopyPolicyTemplateFiles_Deduplication(t *testing.T) { + inputPkgDir := t.TempDir() + manifest := []byte(`name: sql +version: 0.1.0 +type: input +policy_templates: + - input: sql + template_path: shared.yml.hbs + - input: sql/metrics + template_path: shared.yml.hbs +`) + err := os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), manifest, 0644) + require.NoError(t, err) + err = os.MkdirAll(filepath.Join(inputPkgDir, "agent", "input"), 0755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(inputPkgDir, "agent", "input", "shared.yml.hbs"), []byte("shared"), 0644) + require.NoError(t, err) + + buildRoot := buildRootFor(t) + destDir := filepath.Join("agent", "input") + + got, err := collectAndCopyPolicyTemplateFiles(inputPkgDir, "sql", destDir, buildRoot) + require.NoError(t, err) + + // Returned slice must contain the prefixed name exactly once. + assert.Equal(t, []string{"sql-shared.yml.hbs"}, got) +} + +// TestCollectAndCopyPolicyTemplateFiles_NoTemplates verifies that a package whose +// policy_templates have neither template_path nor template_paths returns an empty slice +// without error. +func TestCollectAndCopyPolicyTemplateFiles_NoTemplates(t *testing.T) { + inputPkgDir := t.TempDir() + manifest := []byte(`name: sql +version: 0.1.0 +type: input +policy_templates: + - input: sql +`) + err := os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), manifest, 0644) + require.NoError(t, err) + err = os.MkdirAll(filepath.Join(inputPkgDir, "agent", "input"), 0755) + require.NoError(t, err) + + buildRoot := buildRootFor(t) + + got, err := collectAndCopyPolicyTemplateFiles(inputPkgDir, "sql", "agent/input", buildRoot) + require.NoError(t, err) + assert.Empty(t, got) +} + +// TestCollectAndCopyPolicyTemplateFiles_MissingTemplateFile verifies that when a template +// name is declared in the manifest but the corresponding file is absent from agent/input/, +// the function returns an error. +func TestCollectAndCopyPolicyTemplateFiles_MissingTemplateFile(t *testing.T) { + inputPkgDir := t.TempDir() + manifest := []byte(`name: sql +version: 0.1.0 +type: input +policy_templates: + - input: sql + template_path: missing.yml.hbs +`) + err := os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), manifest, 0644) + require.NoError(t, err) + err = os.MkdirAll(filepath.Join(inputPkgDir, "agent", "input"), 0755) + require.NoError(t, err) + // Intentionally do NOT create missing.yml.hbs. + + buildRoot := buildRootFor(t) + + _, err = collectAndCopyPolicyTemplateFiles(inputPkgDir, "sql", "agent/input", buildRoot) + require.Error(t, err) + assert.Contains(t, err.Error(), "missing.yml.hbs") +} + +// TestCollectAndCopyPolicyTemplateFiles_InvalidPackagePath verifies that a non-existent +// package path returns an error from openPackageFS. +func TestCollectAndCopyPolicyTemplateFiles_InvalidPackagePath(t *testing.T) { + buildRoot := buildRootFor(t) + + _, err := collectAndCopyPolicyTemplateFiles("/nonexistent/path", "sql", "agent/input", buildRoot) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to open input package") +} + +// TestCollectAndCopyPolicyTemplateFiles_CustomDestDir verifies that files are written to the +// caller-supplied destDir, not hardcoded to agent/input. This covers the data-stream use-case +// where destDir is data_stream//agent/stream. +func TestCollectAndCopyPolicyTemplateFiles_CustomDestDir(t *testing.T) { + inputPkgDir := createFakeInputHelper(t) + buildRoot := buildRootFor(t) + + destDir := filepath.Join("data_stream", "logs", "agent", "stream") + got, err := collectAndCopyPolicyTemplateFiles(inputPkgDir, "sql", destDir, buildRoot) + require.NoError(t, err) + + assert.Equal(t, []string{"sql-input.yml.hbs"}, got) + + _, err = buildRoot.ReadFile(filepath.Join(destDir, "sql-input.yml.hbs")) + require.NoError(t, err, "file must be written to the custom destDir") + + // Must NOT appear in agent/input. + _, err = buildRoot.ReadFile(filepath.Join("agent", "input", "sql-input.yml.hbs")) + assert.Error(t, err, "file must not be written to agent/input when a custom destDir is given") +} + +// TestCollectAndCopyPolicyTemplateFiles_FileContentPreserved verifies that the byte content +// of the template is copied verbatim without modification. +func TestCollectAndCopyPolicyTemplateFiles_FileContentPreserved(t *testing.T) { + inputPkgDir := t.TempDir() + originalContent := []byte("{{#each processors}}\n- {{this}}\n{{/each}}") + manifest := []byte(`name: sql +version: 0.1.0 +type: input +policy_templates: + - input: sql + template_path: input.yml.hbs +`) + err := os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), manifest, 0644) + require.NoError(t, err) + err = os.MkdirAll(filepath.Join(inputPkgDir, "agent", "input"), 0755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(inputPkgDir, "agent", "input", "input.yml.hbs"), originalContent, 0644) + require.NoError(t, err) + + buildRoot := buildRootFor(t) + + _, err = collectAndCopyPolicyTemplateFiles(inputPkgDir, "sql", "agent/input", buildRoot) + require.NoError(t, err) + + copied, err := buildRoot.ReadFile(filepath.Join("agent", "input", "sql-input.yml.hbs")) + require.NoError(t, err) + assert.Equal(t, originalContent, copied) +} diff --git a/internal/requiredinputs/fields.go b/internal/requiredinputs/fields.go new file mode 100644 index 0000000000..6e3c44f6bb --- /dev/null +++ b/internal/requiredinputs/fields.go @@ -0,0 +1,196 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "errors" + "fmt" + "io/fs" + "os" + "path" + + "github.com/goccy/go-yaml/ast" + "github.com/goccy/go-yaml/parser" + + "github.com/elastic/elastic-package/internal/logger" + "github.com/elastic/elastic-package/internal/packages" +) + +// bundleDataStreamFields bundles field definitions from required input packages +// into the composable integration package's data stream fields directories. +// For each data stream that references an input package, fields defined in the +// input package but not already present in the integration's data stream are +// copied into a new file named -fields.yml. +func (r *RequiredInputsResolver) bundleDataStreamFields(inputPkgPaths map[string]string, buildRoot *os.Root) error { + dsManifestsPaths, err := fs.Glob(buildRoot.FS(), "data_stream/*/manifest.yml") + if err != nil { + return fmt.Errorf("globbing data stream manifests: %w", err) + } + + errorList := make([]error, 0) + for _, manifestPath := range dsManifestsPaths { + manifestBytes, err := buildRoot.ReadFile(manifestPath) + if err != nil { + return fmt.Errorf("reading data stream manifest %q: %w", manifestPath, err) + } + manifest, err := packages.ReadDataStreamManifestBytes(manifestBytes) + if err != nil { + return fmt.Errorf("parsing data stream manifest %q: %w", manifestPath, err) + } + for _, stream := range manifest.Streams { + if stream.Package == "" { + continue + } + pkgPath, ok := inputPkgPaths[stream.Package] + if !ok { + errorList = append(errorList, fmt.Errorf("stream in manifest %q references input package %q which is not listed in requires.input", manifestPath, stream.Package)) + continue + } + dsRootDir := path.Dir(manifestPath) + if err := r.mergeInputPkgFields(dsRootDir, pkgPath, stream.Package, buildRoot); err != nil { + return fmt.Errorf("merging input package fields for manifest %q: %w", manifestPath, err) + } + } + } + return errors.Join(errorList...) +} + +// mergeInputPkgFields copies field definitions from the input package into the +// integration's data stream fields directory. Fields already defined in the +// integration take precedence; only fields absent from the integration are +// written to /fields/-fields.yml. +func (r *RequiredInputsResolver) mergeInputPkgFields(dsRootDir, inputPkgPath, inputPkgName string, buildRoot *os.Root) error { + existingNames, err := collectExistingFieldNames(dsRootDir, buildRoot) + if err != nil { + return fmt.Errorf("collecting existing field names: %w", err) + } + + inputPkgFS, closeFn, err := openPackageFS(inputPkgPath) + if err != nil { + return fmt.Errorf("opening package %q: %w", inputPkgPath, err) + } + defer func() { _ = closeFn() }() + + inputFieldFiles, err := fs.Glob(inputPkgFS, "fields/*.yml") + if err != nil { + return fmt.Errorf("globbing input package fields: %w", err) + } + if len(inputFieldFiles) == 0 { + logger.Debugf("Input package %q has no fields files, skipping field bundling", inputPkgName) + return nil + } + + // Collect field nodes from input package that are not already defined in the integration. + seenNames := make(map[string]bool) + newNodes := make([]ast.Node, 0) + for _, filePath := range inputFieldFiles { + nodes, err := loadFieldNodesFromFile(inputPkgFS, filePath) + if err != nil { + return fmt.Errorf("loading field nodes from %q: %w", filePath, err) + } + for _, node := range nodes { + name := fieldNodeName(node) + if name == "" || existingNames[name] || seenNames[name] { + continue + } + seenNames[name] = true + newNodes = append(newNodes, cloneNode(node)) + } + } + + if len(newNodes) == 0 { + logger.Debugf("No new fields from input package %q to bundle into %q", inputPkgName, dsRootDir) + return nil + } + + // Build a YAML sequence containing the new field nodes. + seqNode := newSeqNode(newNodes...) + + output, err := formatYAMLNode(seqNode) + if err != nil { + return fmt.Errorf("formatting bundled fields YAML: %w", err) + } + + fieldsDir := path.Join(dsRootDir, "fields") + if err := buildRoot.MkdirAll(fieldsDir, 0755); err != nil { + return fmt.Errorf("creating fields directory %q: %w", fieldsDir, err) + } + + destPath := path.Join(fieldsDir, inputPkgName+"-fields.yml") + if err := buildRoot.WriteFile(destPath, output, 0644); err != nil { + return fmt.Errorf("writing bundled fields to %q: %w", destPath, err) + } + logger.Debugf("Bundled %d field(s) from input package %q into %s", len(newNodes), inputPkgName, destPath) + return nil +} + +// collectExistingFieldNames returns the set of top-level field names already +// defined in the integration's data stream fields directory. +func collectExistingFieldNames(dsRootDir string, buildRoot *os.Root) (map[string]bool, error) { + pattern := path.Join(dsRootDir, "fields", "*.yml") + paths, err := fs.Glob(buildRoot.FS(), pattern) + if err != nil { + return nil, fmt.Errorf("globbing fields in %q: %w", dsRootDir, err) + } + + names := make(map[string]bool) + for _, p := range paths { + data, err := buildRoot.ReadFile(p) + if err != nil { + return nil, fmt.Errorf("reading fields file %q: %w", p, err) + } + nodes, err := loadFieldNodesFromBytes(data) + if err != nil { + return nil, fmt.Errorf("parsing fields file %q: %w", p, err) + } + for _, node := range nodes { + if name := fieldNodeName(node); name != "" { + names[name] = true + } + } + } + return names, nil +} + +// loadFieldNodesFromFile reads a fields YAML file from an fs.FS and returns +// its top-level sequence items as individual ast.Node values. +func loadFieldNodesFromFile(fsys fs.FS, filePath string) ([]ast.Node, error) { + data, err := fs.ReadFile(fsys, filePath) + if err != nil { + return nil, fmt.Errorf("reading file %q: %w", filePath, err) + } + return loadFieldNodesFromBytes(data) +} + +// loadFieldNodesFromBytes parses a fields YAML document (expected to be a +// sequence at the document root) and returns the individual item nodes. +func loadFieldNodesFromBytes(data []byte) ([]ast.Node, error) { + f, err := parser.ParseBytes(data, 0) + if err != nil { + return nil, fmt.Errorf("parsing fields YAML: %w", err) + } + if len(f.Docs) == 0 || f.Docs[0] == nil { + return nil, nil + } + body := f.Docs[0].Body + if body == nil { + return nil, nil + } + seqNode, ok := body.(*ast.SequenceNode) + if !ok { + return nil, fmt.Errorf("expected sequence at fields document root, got %T", body) + } + return seqNode.Values, nil +} + +// fieldNodeName returns the value of the "name" key in a field mapping node, +// or an empty string if the key is absent or the node is not a mapping. +func fieldNodeName(n ast.Node) string { + mn, ok := n.(*ast.MappingNode) + if !ok || mn == nil { + return "" + } + return nodeStringValue(mappingValue(mn, "name")) +} diff --git a/internal/requiredinputs/fields_test.go b/internal/requiredinputs/fields_test.go new file mode 100644 index 0000000000..ac58999cab --- /dev/null +++ b/internal/requiredinputs/fields_test.go @@ -0,0 +1,315 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "errors" + "os" + "path/filepath" + "testing" + + "github.com/goccy/go-yaml/ast" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// ---- unit tests -------------------------------------------------------------- + +// TestLoadFieldNodesFromBytes verifies that field YAML sequences are parsed +// correctly into individual yaml.Node pointers. +func TestLoadFieldNodesFromBytes(t *testing.T) { + t.Run("valid sequence", func(t *testing.T) { + data := []byte(` +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: message + type: text + description: Log message. +`) + nodes, err := loadFieldNodesFromBytes(data) + require.NoError(t, err) + require.Len(t, nodes, 2) + assert.Equal(t, "data_stream.type", fieldNodeName(nodes[0])) + assert.Equal(t, "message", fieldNodeName(nodes[1])) + }) + + t.Run("empty document", func(t *testing.T) { + nodes, err := loadFieldNodesFromBytes([]byte("")) + require.NoError(t, err) + assert.Empty(t, nodes) + }) + + t.Run("invalid YAML", func(t *testing.T) { + _, err := loadFieldNodesFromBytes([]byte(":\t:invalid")) + assert.Error(t, err) + }) + + t.Run("non-sequence root", func(t *testing.T) { + data := []byte(`name: foo\ntype: keyword`) + _, err := loadFieldNodesFromBytes(data) + assert.Error(t, err) + }) +} + +// TestFieldNodeName verifies extraction of the "name" field from a YAML +// mapping node representing a field definition. +func TestFieldNodeName(t *testing.T) { + t.Run("node with name", func(t *testing.T) { + n := &ast.MappingNode{BaseNode: &ast.BaseNode{}} + upsertKey(n, "name", strVal("message")) + assert.Equal(t, "message", fieldNodeName(n)) + }) + + t.Run("node without name", func(t *testing.T) { + n := &ast.MappingNode{BaseNode: &ast.BaseNode{}} + assert.Equal(t, "", fieldNodeName(n)) + }) + + t.Run("nil node", func(t *testing.T) { + assert.Equal(t, "", fieldNodeName(nil)) + }) +} + +// TestCollectExistingFieldNames verifies that field names are collected from +// all YAML files in a data stream's fields/ directory. +func TestCollectExistingFieldNames(t *testing.T) { + t.Run("collects names from multiple files", func(t *testing.T) { + tmpDir := t.TempDir() + buildRoot, err := os.OpenRoot(tmpDir) + require.NoError(t, err) + defer buildRoot.Close() + + require.NoError(t, buildRoot.MkdirAll("data_stream/logs/fields", 0755)) + require.NoError(t, buildRoot.WriteFile("data_stream/logs/fields/base-fields.yml", []byte(` +- name: "@timestamp" + type: date +- name: data_stream.type + type: constant_keyword +`), 0644)) + require.NoError(t, buildRoot.WriteFile("data_stream/logs/fields/extra-fields.yml", []byte(` +- name: message + type: text +`), 0644)) + + names, err := collectExistingFieldNames("data_stream/logs", buildRoot) + require.NoError(t, err) + assert.True(t, names["@timestamp"]) + assert.True(t, names["data_stream.type"]) + assert.True(t, names["message"]) + assert.Len(t, names, 3) + }) + + t.Run("returns empty set when fields directory does not exist", func(t *testing.T) { + tmpDir := t.TempDir() + buildRoot, err := os.OpenRoot(tmpDir) + require.NoError(t, err) + defer buildRoot.Close() + + require.NoError(t, buildRoot.MkdirAll("data_stream/logs", 0755)) + + names, err := collectExistingFieldNames("data_stream/logs", buildRoot) + require.NoError(t, err) + assert.Empty(t, names) + }) +} + +// ---- integration tests ------------------------------------------------------- + +// makeFakeEprForFieldBundling supplies the ci_input_pkg fixture path as if it +// were downloaded from the registry, so integration tests do not need a stack. +func makeFakeEprForFieldBundling(t *testing.T) *fakeEprClient { + t.Helper() + inputPkgPath := ciInputFixturePath() + return &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgPath, nil + }, + } +} + +// TestBundleDataStreamFields_PartialOverlap verifies the primary field bundling +// scenario: fields already present in the integration data stream are skipped +// (integration wins), and only fields unique to the input package are written +// to /fields/-fields.yml. +func TestBundleDataStreamFields_PartialOverlap(t *testing.T) { + // 02_ci_composable_integration has data_stream/ci_composable_logs/fields/base-fields.yml with + // 4 common fields. ci_input_pkg has those same 4 plus "message" and + // "log.level". After bundling, only "message" and "log.level" should appear + // in the generated file. + buildPackageRoot := copyComposableIntegrationFixture(t) + resolver := NewRequiredInputsResolver(makeFakeEprForFieldBundling(t)) + + require.NoError(t, resolver.Bundle(buildPackageRoot)) + + bundledPath := filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "fields", "ci_input_pkg-fields.yml") + data, err := os.ReadFile(bundledPath) + require.NoError(t, err, "bundled fields file should exist") + + nodes, err := loadFieldNodesFromBytes(data) + require.NoError(t, err) + require.Len(t, nodes, 2) + + names := make([]string, 0, len(nodes)) + for _, n := range nodes { + names = append(names, fieldNodeName(n)) + } + assert.ElementsMatch(t, []string{"message", "log.level"}, names) + + // Original base-fields.yml must be untouched. + originalData, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "fields", "base-fields.yml")) + require.NoError(t, err) + originalNodes, err := loadFieldNodesFromBytes(originalData) + require.NoError(t, err) + assert.Len(t, originalNodes, 4) +} + +// TestBundleDataStreamFields_AllFieldsOverlap verifies that when all fields in +// the input package are already present in the integration data stream, no +// bundled file is created (nothing to add). +func TestBundleDataStreamFields_AllFieldsOverlap(t *testing.T) { + // Copy the composable integration and replace the data stream base fields with + // the full set from ci_input_pkg so every input field is already declared — no bundled file. + buildPackageRoot := copyComposableIntegrationFixture(t) + inputFields, err := os.ReadFile(filepath.Join(ciInputFixturePath(), "fields", "base-fields.yml")) + require.NoError(t, err) + dsFieldsPath := filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "fields", "base-fields.yml") + require.NoError(t, os.WriteFile(dsFieldsPath, inputFields, 0644)) + + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return ciInputFixturePath(), nil + }, + } + resolver := NewRequiredInputsResolver(epr) + + err = resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + bundledPath := filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "fields", "ci_input_pkg-fields.yml") + _, statErr := os.Stat(bundledPath) + assert.True(t, errors.Is(statErr, os.ErrNotExist), "bundled fields file should not be created when all fields already exist") +} + +// TestBundleDataStreamFields_NoFieldsInInputPkg verifies that when the input +// package has no fields/ directory, no error occurs and no file is written. +func TestBundleDataStreamFields_NoFieldsInInputPkg(t *testing.T) { + // Create a minimal input package without a fields/ directory. + inputPkgDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), []byte(` +name: no_fields_pkg +version: 0.1.0 +type: input +policy_templates: + - name: t + input: logfile + template_path: input.yml.hbs +`), 0644)) + require.NoError(t, os.MkdirAll(filepath.Join(inputPkgDir, "agent", "input"), 0755)) + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "agent", "input", "input.yml.hbs"), []byte(""), 0644)) + + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgDir, nil + }, + } + + buildPackageRoot := copyComposableIntegrationFixture(t) + // Patch manifest to reference no_fields_pkg instead. + manifestPath := filepath.Join(buildPackageRoot, "manifest.yml") + patched := []byte(`format_version: 3.6.0 +name: ci_composable_integration +title: CI Composable Integration +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: no_fields_pkg + version: "0.1.0" +policy_templates: + - name: ci_composable_logs + title: CI composable logs + description: Collect logs + data_streams: + - ci_composable_logs + inputs: + - package: no_fields_pkg + title: Collect logs + description: Use the no fields input package + - type: logs + title: Native logs input + description: Plain logs input +owner: + github: elastic/integrations + type: elastic +`) + require.NoError(t, os.WriteFile(manifestPath, patched, 0644)) + + dsManifestPath := filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "manifest.yml") + require.NoError(t, os.WriteFile(dsManifestPath, []byte(`title: CI composable logs +type: logs +streams: + - package: no_fields_pkg + title: Logs via no-fields input package + description: Collect field logs. + - input: logfile + title: Plain logs stream + description: Native logs stream without package reference. + template_path: stream.yml.hbs + vars: + - name: paths + type: text + title: Paths + multi: true + required: true + show_user: true + default: + - /var/log/ci/*.log +`), 0644)) + + resolver := NewRequiredInputsResolver(epr) + err := resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + bundledPath := filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "fields", "no_fields_pkg-fields.yml") + _, statErr := os.Stat(bundledPath) + assert.True(t, errors.Is(statErr, os.ErrNotExist), "no fields file should be created when input package has no fields") +} + +// TestBundleDataStreamFields_StreamWithoutPackage verifies that data stream +// streams with no package reference are skipped without error. +func TestBundleDataStreamFields_StreamWithoutPackage(t *testing.T) { + // Second stream uses input: logfile (no package); Bundle should succeed and only + // bundle fields for the package-backed stream. + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return ciInputFixturePath(), nil + }, + } + + buildPackageRoot := copyComposableIntegrationFixture(t) + resolver := NewRequiredInputsResolver(epr) + + err := resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + fieldsDir := filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "fields") + entries, err := os.ReadDir(fieldsDir) + require.NoError(t, err) + var names []string + for _, e := range entries { + names = append(names, e.Name()) + } + assert.Contains(t, names, "base-fields.yml") + assert.Contains(t, names, "ci_input_pkg-fields.yml") + assert.Len(t, names, 2) +} diff --git a/internal/requiredinputs/policytemplates.go b/internal/requiredinputs/policytemplates.go new file mode 100644 index 0000000000..6d74eb4ad3 --- /dev/null +++ b/internal/requiredinputs/policytemplates.go @@ -0,0 +1,124 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "fmt" + "os" + "path" + + "github.com/goccy/go-yaml/ast" + + "github.com/elastic/elastic-package/internal/packages" +) + +func (r *RequiredInputsResolver) bundlePolicyTemplatesInputPackageTemplates(manifestBytes []byte, manifest *packages.PackageManifest, inputPkgPaths map[string]string, buildRoot *os.Root) error { + + // parse the manifest YAML document preserving formatting for targeted modifications + // using manifestBytes allows us to preserve comments and formatting in the manifest when we update it with template paths from input packages + root, err := parseDocumentRootMapping(manifestBytes) + if err != nil { + return fmt.Errorf("failed to parse manifest YAML: %w", err) + } + + // for each policy template, with an input package reference: + // collect the templates from the input package and copy them to the agent/input directory of the build package + // then update the policy template manifest to include the copied templates as template_paths + for ptIdx, pt := range manifest.PolicyTemplates { + for inputIdx, input := range pt.Inputs { + if input.Package == "" { + continue + } + sourcePath, ok := inputPkgPaths[input.Package] + if !ok || sourcePath == "" { + return fmt.Errorf("failed to find input package %q referenced by policy template %q", input.Package, pt.Name) + } + inputPaths, err := collectAndCopyInputPkgPolicyTemplates(sourcePath, input.Package, buildRoot) + if err != nil { + return fmt.Errorf("failed to collect and copy input package policy templates: %w", err) + } + if len(inputPaths) == 0 { + continue + } + + // current manifest template paths + paths := make([]string, 0) + // if composable package has included custom template path or paths, include them + // if no template paths are included at the manifest, only the imported templates are included + if input.TemplatePath != "" { + paths = append(paths, input.TemplatePath) + } else if len(input.TemplatePaths) > 0 { + paths = append(paths, input.TemplatePaths...) + } + paths = append(inputPaths, paths...) + + if err := setInputPolicyTemplateTemplatePaths(root, ptIdx, inputIdx, paths); err != nil { + return fmt.Errorf("failed to update policy template manifest with input package templates: %w", err) + } + } + } + + // Serialise the updated YAML document back to disk. + updated, err := formatYAMLNode(root) + if err != nil { + return fmt.Errorf("failed to format updated manifest: %w", err) + } + if err := buildRoot.WriteFile("manifest.yml", updated, 0664); err != nil { + return fmt.Errorf("failed to write updated manifest: %w", err) + } + + return nil +} + +// collectAndCopyInputPkgPolicyTemplates collects the templates from the input package and copies them to the agent/input directory of the build package +// it returns the list of copied template names +func collectAndCopyInputPkgPolicyTemplates(inputPkgPath, inputPkgName string, buildRoot *os.Root) ([]string, error) { + return collectAndCopyPolicyTemplateFiles(inputPkgPath, inputPkgName, path.Join("agent", "input"), buildRoot) +} + +// setInputPolicyTemplateTemplatePaths updates the manifest YAML root mapping to +// set template_paths for the specified policy template input. +func setInputPolicyTemplateTemplatePaths(root *ast.MappingNode, policyTemplatesIdx int, inputIdx int, paths []string) error { + // Navigate: root mapping -> "policy_templates" -> sequence -> item [policyTemplatesIdx] -> mapping -> "inputs" -> sequence -> item [inputIdx] -> input mapping. + policyTemplatesNode, ok := mappingValue(root, "policy_templates").(*ast.SequenceNode) + if !ok { + return fmt.Errorf("failed to set policy template input paths: 'policy_templates' key not found in manifest") + } + if policyTemplatesIdx < 0 || policyTemplatesIdx >= len(policyTemplatesNode.Values) { + return fmt.Errorf("failed to set policy template input paths: policy template index %d out of range (len=%d)", policyTemplatesIdx, len(policyTemplatesNode.Values)) + } + + policyTemplateNode, ok := policyTemplatesNode.Values[policyTemplatesIdx].(*ast.MappingNode) + if !ok { + return fmt.Errorf("failed to set policy template input paths: policy template entry %d is not a mapping", policyTemplatesIdx) + } + + inputsNode, ok := mappingValue(policyTemplateNode, "inputs").(*ast.SequenceNode) + if !ok { + return fmt.Errorf("failed to set policy template input paths: 'inputs' key not found in policy template %d", policyTemplatesIdx) + } + if inputIdx < 0 || inputIdx >= len(inputsNode.Values) { + return fmt.Errorf("failed to set policy template input paths: input index %d out of range (len=%d)", inputIdx, len(inputsNode.Values)) + } + + inputNode, ok := inputsNode.Values[inputIdx].(*ast.MappingNode) + if !ok { + return fmt.Errorf("failed to set policy template input paths: input entry %d is not a mapping", inputIdx) + } + + // Remove singular template_path if present. + removeKey(inputNode, "template_path") + + // Build the template_paths sequence node. + seqNode := newSeqNode() + for _, p := range paths { + seqNode.Values = append(seqNode.Values, strVal(p)) + } + + // Upsert template_paths on the input node. + upsertKey(inputNode, "template_paths", seqNode) + + return nil +} diff --git a/internal/requiredinputs/policytemplates_test.go b/internal/requiredinputs/policytemplates_test.go new file mode 100644 index 0000000000..e1f8de8d69 --- /dev/null +++ b/internal/requiredinputs/policytemplates_test.go @@ -0,0 +1,140 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-package/internal/packages" +) + +func TestBundlePolicyTemplatesInputPackageTemplates_InvalidYAML(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + manifestBytes := []byte("foo: [") + manifest, _ := packages.ReadPackageManifestBytes(manifestBytes) // may be nil/partial + + err = r.bundlePolicyTemplatesInputPackageTemplates(manifestBytes, manifest, nil, buildRoot) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to parse manifest YAML") +} + +// TestBundlePolicyTemplatesInputPackageTemplates_MultiplePolicyTemplates verifies that templates +// from ALL policy templates in an input package are bundled into agent/input/, not just the first +// one (Issue 5 in the alignment review). +func TestBundlePolicyTemplatesInputPackageTemplates_MultiplePolicyTemplates(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + manifestBytes := []byte(` +type: integration +requires: + input: + - package: sql + version: 0.1.0 +policy_templates: + - inputs: + - package: sql +`) + err = buildRoot.WriteFile("manifest.yml", manifestBytes, 0644) + require.NoError(t, err) + + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + fakeInputDir := createFakeInputWithMultiplePolicyTemplates(t) + inputPkgPaths := map[string]string{"sql": fakeInputDir} + + err = r.bundlePolicyTemplatesInputPackageTemplates(manifestBytes, manifest, inputPkgPaths, buildRoot) + require.NoError(t, err) + + // All templates from both policy templates in the input package must be present. + _, err = buildRoot.ReadFile(filepath.Join("agent", "input", "sql-input.yml.hbs")) + require.NoError(t, err, "template from first policy_template must be bundled") + _, err = buildRoot.ReadFile(filepath.Join("agent", "input", "sql-metrics.yml.hbs")) + require.NoError(t, err, "template from second policy_template must be bundled") + _, err = buildRoot.ReadFile(filepath.Join("agent", "input", "sql-extra.yml.hbs")) + require.NoError(t, err, "extra template from second policy_template must be bundled") + + updated, err := buildRoot.ReadFile("manifest.yml") + require.NoError(t, err) + updatedManifest, err := packages.ReadPackageManifestBytes(updated) + require.NoError(t, err) + require.Len(t, updatedManifest.PolicyTemplates, 1) + require.Len(t, updatedManifest.PolicyTemplates[0].Inputs, 1) + input := updatedManifest.PolicyTemplates[0].Inputs[0] + assert.Empty(t, input.TemplatePath) + assert.Equal(t, []string{"sql-input.yml.hbs", "sql-metrics.yml.hbs", "sql-extra.yml.hbs"}, input.TemplatePaths) +} + +func TestBundlePolicyTemplatesInputPackageTemplates_SuccessTemplatesCopied(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + // create current package manifest with one policy template input referencing an input package template + // it has an existing template, so both the existing and input package template should be copied and the manifest updated to reference both + manifestBytes := []byte(` +type: integration +requires: + input: + - package: sql + version: 0.1.0 +policy_templates: + - inputs: + - package: sql + template_path: existing.yml.hbs +`) + err = buildRoot.WriteFile("manifest.yml", manifestBytes, 0644) + require.NoError(t, err) + err = buildRoot.MkdirAll(filepath.Join("agent", "input"), 0755) + require.NoError(t, err) + err = buildRoot.WriteFile(filepath.Join("agent", "input", "existing.yml.hbs"), []byte("existing content"), 0644) + require.NoError(t, err) + + // parse manifest to pass to function + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + fakeInputDir := createFakeInputHelper(t) + inputPkgPaths := map[string]string{"sql": fakeInputDir} + + err = r.bundlePolicyTemplatesInputPackageTemplates(manifestBytes, manifest, inputPkgPaths, buildRoot) + require.NoError(t, err) + + // Files exist. + _, err = buildRoot.ReadFile(filepath.Join("agent", "input", "sql-input.yml.hbs")) + require.NoError(t, err) + _, err = buildRoot.ReadFile(filepath.Join("agent", "input", "existing.yml.hbs")) + require.NoError(t, err) + + // Written manifest has template_paths set and template_path removed for that input. + updated, err := buildRoot.ReadFile("manifest.yml") + require.NoError(t, err) + updatedManifest, err := packages.ReadPackageManifestBytes(updated) + require.NoError(t, err) + require.Len(t, updatedManifest.PolicyTemplates, 1) + require.Len(t, updatedManifest.PolicyTemplates[0].Inputs, 1) + input := updatedManifest.PolicyTemplates[0].Inputs[0] + assert.Empty(t, input.TemplatePath) + assert.Equal(t, []string{"sql-input.yml.hbs", "existing.yml.hbs"}, input.TemplatePaths) +} diff --git a/internal/requiredinputs/requiredinputs.go b/internal/requiredinputs/requiredinputs.go new file mode 100644 index 0000000000..9595ccecd2 --- /dev/null +++ b/internal/requiredinputs/requiredinputs.go @@ -0,0 +1,176 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "archive/zip" + "errors" + "fmt" + "io/fs" + "os" + "path" + + "github.com/elastic/elastic-package/internal/logger" + "github.com/elastic/elastic-package/internal/packages" +) + +type eprClient interface { + DownloadPackage(packageName string, packageVersion string, tmpDir string) (string, error) +} + +// Resolver enriches a built integration package using required input packages from the registry: +// policy and data stream templates, merged manifest variables, data stream field definitions, +// and resolution of package: references on inputs and streams to the effective input type +// from the required input package, where applicable. +type Resolver interface { + Bundle(buildPackageRoot string) error +} + +// NoopRequiredInputsResolver is a no-op implementation of Resolver. +// TODO: Replace with a resolver that supports test overrides (e.g. local package paths) +// when implementing local input package resolution for development and testing workflows. +type NoopRequiredInputsResolver struct{} + +func (r *NoopRequiredInputsResolver) Bundle(_ string) error { + return nil +} + +// RequiredInputsResolver implements Resolver by downloading required input packages via an EPR client +// and applying Bundle to the built package tree. +type RequiredInputsResolver struct { + eprClient eprClient +} + +// NewRequiredInputsResolver returns a Resolver that downloads required input packages from the registry. +func NewRequiredInputsResolver(eprClient eprClient) *RequiredInputsResolver { + return &RequiredInputsResolver{ + eprClient: eprClient, + } +} + +// Bundle updates buildPackageRoot (a built package directory) for integrations that declare +// requires.input: it downloads those input packages, copies policy and data stream templates, +// merges variables into the integration manifest, bundles data stream field definitions, and +// replaces package: references on policy template inputs and data stream streams with the +// concrete input type from the referenced input package (last, after variable merge). +// Non-integration packages or packages without requires.input are left unchanged. +func (r *RequiredInputsResolver) Bundle(buildPackageRoot string) error { + buildRoot, err := os.OpenRoot(buildPackageRoot) + if err != nil { + return fmt.Errorf("failed to open build package root: %w", err) + } + defer buildRoot.Close() + + manifestBytes, err := buildRoot.ReadFile("manifest.yml") + if err != nil { + return fmt.Errorf("failed to read package manifest: %w", err) + } + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + if err != nil { + return fmt.Errorf("failed to parse package manifest: %w", err) + } + + // validate that the package is an integration and has required input packages + if manifest.Type != "integration" { + return nil + } + if manifest.Requires == nil || len(manifest.Requires.Input) == 0 { + logger.Debug("Package has no required input packages, skipping required input processing") + return nil + } + + tmpDir, err := os.MkdirTemp("", "elastic-package-input-pkgs-*") + if err != nil { + return fmt.Errorf("failed to create temp directory for input packages: %w", err) + } + defer func() { _ = os.RemoveAll(tmpDir) }() + + inputPkgPaths, err := r.mapRequiredInputPackagesPaths(manifest.Requires.Input, tmpDir) + if err != nil { + return err + } + + if err := r.bundlePolicyTemplatesInputPackageTemplates(manifestBytes, manifest, inputPkgPaths, buildRoot); err != nil { + return fmt.Errorf("failed to bundle policy template input package templates: %w", err) + } + + if err := r.bundleDataStreamTemplates(inputPkgPaths, buildRoot); err != nil { + return fmt.Errorf("failed to bundle data stream input package templates: %w", err) + } + + if err := r.mergeVariables(manifest, inputPkgPaths, buildRoot); err != nil { + return fmt.Errorf("merging variables from input packages: %w", err) + } + + if err := r.bundleDataStreamFields(inputPkgPaths, buildRoot); err != nil { + return fmt.Errorf("bundling data stream fields from input packages: %w", err) + } + + if err := r.resolveStreamInputTypes(manifest, inputPkgPaths, buildRoot); err != nil { + return fmt.Errorf("resolving stream input types from input packages: %w", err) + } + + return nil +} + +// downloadInputsToTmp downloads required input packages to the temporary directory. +// It returns a map of package name to zip path. +func (r *RequiredInputsResolver) mapRequiredInputPackagesPaths(manifestInputRequires []packages.PackageDependency, tmpDir string) (map[string]string, error) { + inputPkgPaths := make(map[string]string, len(manifestInputRequires)) + errs := make([]error, 0, len(manifestInputRequires)) + for _, inputDependency := range manifestInputRequires { + if _, ok := inputPkgPaths[inputDependency.Package]; ok { + // skip if already downloaded + continue + } + path, err := r.eprClient.DownloadPackage(inputDependency.Package, inputDependency.Version, tmpDir) + if err != nil { + // all required input packages must be downloaded successfully + errs = append(errs, fmt.Errorf("failed to download input package %q: %w", inputDependency.Package, err)) + continue + } + + // key is package name, for now we only support one version per package + inputPkgPaths[inputDependency.Package] = path + logger.Debugf("Resolved input package %q at %s", inputDependency.Package, path) + } + + return inputPkgPaths, errors.Join(errs...) +} + +// openPackageFS returns an fs.FS rooted at the package root (manifest.yml at +// the top level) and a close function that must be called when done. For +// directory packages it closes the os.Root; for zip packages it closes the +// underlying zip.ReadCloser. +func openPackageFS(pkgPath string) (fs.FS, func() error, error) { + info, err := os.Stat(pkgPath) + if err != nil { + return nil, nil, err + } + if info.IsDir() { + // open the package directory as a root + root, err := os.OpenRoot(pkgPath) + if err != nil { + return nil, nil, err + } + return root.FS(), root.Close, nil + } + // open the package zip as a zip reader + zipRC, err := zip.OpenReader(pkgPath) + if err != nil { + return nil, nil, err + } + matched, err := fs.Glob(zipRC, "*/"+packages.PackageManifestFile) + if err != nil || len(matched) == 0 { + zipRC.Close() + return nil, nil, fmt.Errorf("failed to find package manifest in zip %s", pkgPath) + } + subFS, err := fs.Sub(zipRC, path.Dir(matched[0])) + if err != nil { + zipRC.Close() + return nil, nil, err + } + return subFS, zipRC.Close, nil +} diff --git a/internal/requiredinputs/requiredinputs_test.go b/internal/requiredinputs/requiredinputs_test.go new file mode 100644 index 0000000000..2d89a72129 --- /dev/null +++ b/internal/requiredinputs/requiredinputs_test.go @@ -0,0 +1,196 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "fmt" + "os" + "path" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-package/internal/packages" +) + +type fakeEprClient struct { + downloadPackageFunc func(packageName string, packageVersion string, tmpDir string) (string, error) +} + +func (f *fakeEprClient) DownloadPackage(packageName string, packageVersion string, tmpDir string) (string, error) { + if f.downloadPackageFunc != nil { + return f.downloadPackageFunc(packageName, packageVersion, tmpDir) + } + return "", fmt.Errorf("download package not implemented") +} + +func TestBundle_Success(t *testing.T) { + fakeInputPath := createFakeInputHelper(t) + fakeEprClient := &fakeEprClient{ + downloadPackageFunc: func(packageName string, packageVersion string, tmpDir string) (string, error) { + return fakeInputPath, nil + }, + } + buildPackageRoot := t.TempDir() + + manifest := []byte(`name: test-package +version: 0.1.0 +type: integration +requires: + input: + - package: sql + version: 0.1.0 +policy_templates: + - inputs: + - package: sql + - type: logs +`) + err := os.WriteFile(path.Join(buildPackageRoot, "manifest.yml"), manifest, 0644) + require.NoError(t, err) + + resolver := NewRequiredInputsResolver(fakeEprClient) + + err = resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + _, err = os.ReadFile(path.Join(buildPackageRoot, "agent", "input", "sql-input.yml.hbs")) + require.NoError(t, err) + + updatedManifestBytes, err := os.ReadFile(path.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + updatedManifest, err := packages.ReadPackageManifestBytes(updatedManifestBytes) + require.NoError(t, err) + require.Len(t, updatedManifest.Requires.Input, 1) + require.Equal(t, "sql", updatedManifest.Requires.Input[0].Package) + require.Equal(t, "0.1.0", updatedManifest.Requires.Input[0].Version) + + require.Equal(t, "sql", updatedManifest.PolicyTemplates[0].Inputs[0].Type) + require.Empty(t, updatedManifest.PolicyTemplates[0].Inputs[0].Package) + require.Len(t, updatedManifest.PolicyTemplates[0].Inputs[0].TemplatePaths, 1) + require.Equal(t, "sql-input.yml.hbs", updatedManifest.PolicyTemplates[0].Inputs[0].TemplatePaths[0]) + +} + +func TestBundle_NoManifest(t *testing.T) { + fakeInputPath := createFakeInputHelper(t) + fakeEprClient := &fakeEprClient{ + downloadPackageFunc: func(packageName string, packageVersion string, tmpDir string) (string, error) { + return fakeInputPath, nil + }, + } + buildPackageRoot := t.TempDir() + + resolver := NewRequiredInputsResolver(fakeEprClient) + + err := resolver.Bundle(buildPackageRoot) + require.Error(t, err) + assert.ErrorContains(t, err, "failed to read package manifest") +} + +func TestBundle_SkipNoIntegration(t *testing.T) { + fakeInputPath := createFakeInputHelper(t) + fakeEprClient := &fakeEprClient{ + downloadPackageFunc: func(packageName string, packageVersion string, tmpDir string) (string, error) { + return fakeInputPath, nil + }, + } + buildPackageRoot := t.TempDir() + + manifest := []byte(`name: test-package +version: 0.1.0 +type: input +`) + err := os.WriteFile(path.Join(buildPackageRoot, "manifest.yml"), manifest, 0644) + require.NoError(t, err) + + resolver := NewRequiredInputsResolver(fakeEprClient) + + err = resolver.Bundle(buildPackageRoot) + require.NoError(t, err) +} + +func TestBundle_NoRequires(t *testing.T) { + fakeEprClient := &fakeEprClient{ + downloadPackageFunc: func(packageName string, packageVersion string, tmpDir string) (string, error) { + return "", fmt.Errorf("no download without requires") + }, + } + buildPackageRoot := t.TempDir() + + manifest := []byte(`name: test-package +version: 0.1.0 +type: integration +policy_templates: + - inputs: + - type: logs +`) + err := os.WriteFile(path.Join(buildPackageRoot, "manifest.yml"), manifest, 0644) + require.NoError(t, err) + + resolver := NewRequiredInputsResolver(fakeEprClient) + + err = resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + updatedManifestBytes, err := os.ReadFile(path.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + updatedManifest, err := packages.ReadPackageManifestBytes(updatedManifestBytes) + require.NoError(t, err) + require.Nil(t, updatedManifest.Requires) +} + +// TestBundleInputPackageTemplates_PreservesLinkedTemplateTargetPath checks that after +// IncludeLinkedFiles has materialized a policy-template input template (regular file +// at the path named in manifest, not a *.link stub), bundling still prepends input-package +// templates and keeps the integration-owned template_path entry last in template_paths. +func TestBundleInputPackageTemplates_PreservesLinkedTemplateTargetPath(t *testing.T) { + fakeInputPath := createFakeInputHelper(t) + fakeEprClient := &fakeEprClient{ + downloadPackageFunc: func(packageName string, packageVersion string, tmpDir string) (string, error) { + return fakeInputPath, nil + }, + } + buildPackageRoot := t.TempDir() + + const ownedName = "integration_owned.hbs" + ownedContent := []byte("# from linked target\n") + err := os.MkdirAll(path.Join(buildPackageRoot, "agent", "input"), 0755) + require.NoError(t, err) + err = os.WriteFile(path.Join(buildPackageRoot, "agent", "input", ownedName), ownedContent, 0644) + require.NoError(t, err) + + manifest := []byte(`name: test-package +version: 0.1.0 +type: integration +requires: + input: + - package: sql + version: 0.1.0 +policy_templates: + - inputs: + - package: sql + template_path: ` + ownedName + ` + - type: logs +`) + err = os.WriteFile(path.Join(buildPackageRoot, "manifest.yml"), manifest, 0644) + require.NoError(t, err) + + resolver := NewRequiredInputsResolver(fakeEprClient) + err = resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + got, err := os.ReadFile(path.Join(buildPackageRoot, "agent", "input", ownedName)) + require.NoError(t, err) + require.Equal(t, ownedContent, got) + + updatedManifestBytes, err := os.ReadFile(path.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + updatedManifest, err := packages.ReadPackageManifestBytes(updatedManifestBytes) + require.NoError(t, err) + + paths := updatedManifest.PolicyTemplates[0].Inputs[0].TemplatePaths + require.Equal(t, []string{"sql-input.yml.hbs", ownedName}, paths) +} diff --git a/internal/requiredinputs/streamdefs.go b/internal/requiredinputs/streamdefs.go new file mode 100644 index 0000000000..5fd9c5d9de --- /dev/null +++ b/internal/requiredinputs/streamdefs.go @@ -0,0 +1,218 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "fmt" + "io/fs" + "os" + "path" + + "github.com/elastic/elastic-package/internal/logger" + "github.com/elastic/elastic-package/internal/packages" +) + +// inputPkgInfo holds the resolved metadata from an input package needed to +// replace package: references in composable package manifests. +type inputPkgInfo struct { + identifier string // policy_templates[0].input; if several templates exist, only the first is used + pkgTitle string // manifest.title (fallback title) + pkgDescription string // manifest.description (fallback description) +} + +// resolveStreamInputTypes replaces all package: references in the +// composable package's manifest.yml (policy_templates[].inputs) and in each +// data_stream/*/manifest.yml (streams[]) with the actual input type identifier +// from the referenced input package, then removes the package: key. +// +// This step must run last, after mergeVariables, because that step uses +// stream.Package and input.Package to identify which entries to process. +// It resolves metadata per required input via buildInputPkgInfoByName, then +// rewrites the root manifest and each data stream manifest. +func (r *RequiredInputsResolver) resolveStreamInputTypes( + manifest *packages.PackageManifest, + inputPkgPaths map[string]string, + buildRoot *os.Root, +) error { + infoByPkg, err := buildInputPkgInfoByName(inputPkgPaths) + if err != nil { + return err + } + + if err := applyInputTypesToComposableManifest(manifest, buildRoot, infoByPkg); err != nil { + return err + } + + return applyInputTypesToDataStreamManifests(buildRoot, infoByPkg) +} + +// buildInputPkgInfoByName loads inputPkgInfo for each downloaded required input package path. +func buildInputPkgInfoByName(inputPkgPaths map[string]string) (map[string]inputPkgInfo, error) { + infoByPkg := make(map[string]inputPkgInfo, len(inputPkgPaths)) + for pkgName, pkgPath := range inputPkgPaths { + info, err := loadInputPkgInfo(pkgPath) + if err != nil { + return nil, fmt.Errorf("loading input package info for %q: %w", pkgName, err) + } + infoByPkg[pkgName] = info + } + return infoByPkg, nil +} + +// applyInputTypesToComposableManifest sets type (and optional title/description) on +// package-backed policy template inputs in manifest.yml and drops package:. +func applyInputTypesToComposableManifest( + manifest *packages.PackageManifest, + buildRoot *os.Root, + infoByPkg map[string]inputPkgInfo, +) error { + manifestBytes, err := buildRoot.ReadFile("manifest.yml") + if err != nil { + return fmt.Errorf("reading manifest: %w", err) + } + root, err := parseDocumentRootMapping(manifestBytes) + if err != nil { + return fmt.Errorf("parsing manifest YAML: %w", err) + } + + for ptIdx, pt := range manifest.PolicyTemplates { + for inputIdx, input := range pt.Inputs { + if input.Package == "" { + continue + } + info, ok := infoByPkg[input.Package] + if !ok { + return fmt.Errorf("input package %q referenced in policy_templates[%d].inputs[%d] not found in required inputs", input.Package, ptIdx, inputIdx) + } + + inputNode, err := getInputMappingNode(root, ptIdx, inputIdx) + if err != nil { + return fmt.Errorf("getting input node at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) + } + + upsertKey(inputNode, "type", strVal(info.identifier)) + + if mappingValue(inputNode, "title") == nil && info.pkgTitle != "" { + upsertKey(inputNode, "title", strVal(info.pkgTitle)) + } + if mappingValue(inputNode, "description") == nil && info.pkgDescription != "" { + upsertKey(inputNode, "description", strVal(info.pkgDescription)) + } + + removeKey(inputNode, "package") + } + } + + updated, err := formatYAMLNode(root) + if err != nil { + return fmt.Errorf("formatting updated manifest: %w", err) + } + if err := buildRoot.WriteFile("manifest.yml", updated, 0664); err != nil { + return fmt.Errorf("writing updated manifest: %w", err) + } + return nil +} + +// applyInputTypesToDataStreamManifests sets input on package-backed streams in each +// data_stream/*/manifest.yml and drops package:. +func applyInputTypesToDataStreamManifests(buildRoot *os.Root, infoByPkg map[string]inputPkgInfo) error { + dsManifestPaths, err := fs.Glob(buildRoot.FS(), "data_stream/*/manifest.yml") + if err != nil { + return fmt.Errorf("globbing data stream manifests: %w", err) + } + + for _, manifestPath := range dsManifestPaths { + dsManifestBytes, err := buildRoot.ReadFile(manifestPath) + if err != nil { + return fmt.Errorf("reading data stream manifest %q: %w", manifestPath, err) + } + + dsRoot, err := parseDocumentRootMapping(dsManifestBytes) + if err != nil { + return fmt.Errorf("parsing data stream manifest YAML %q: %w", manifestPath, err) + } + + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + if err != nil { + return fmt.Errorf("parsing data stream manifest %q: %w", manifestPath, err) + } + + for streamIdx, stream := range dsManifest.Streams { + if stream.Package == "" { + continue + } + info, ok := infoByPkg[stream.Package] + if !ok { + return fmt.Errorf("input package %q referenced in %q streams[%d] not found in required inputs", stream.Package, path.Dir(manifestPath), streamIdx) + } + + streamNode, err := getStreamMappingNode(dsRoot, streamIdx) + if err != nil { + return fmt.Errorf("getting stream node at index %d in %q: %w", streamIdx, manifestPath, err) + } + + upsertKey(streamNode, "input", strVal(info.identifier)) + + if stream.Title == "" && info.pkgTitle != "" { + upsertKey(streamNode, "title", strVal(info.pkgTitle)) + } + if stream.Description == "" && info.pkgDescription != "" { + upsertKey(streamNode, "description", strVal(info.pkgDescription)) + } + + removeKey(streamNode, "package") + } + + dsUpdated, err := formatYAMLNode(dsRoot) + if err != nil { + return fmt.Errorf("formatting updated data stream manifest %q: %w", manifestPath, err) + } + if err := buildRoot.WriteFile(manifestPath, dsUpdated, 0664); err != nil { + return fmt.Errorf("writing updated data stream manifest %q: %w", manifestPath, err) + } + } + + return nil +} + +// loadInputPkgInfo reads an input package's manifest and extracts the metadata +// needed to replace package: references in composable packages. When the input +// package has several policy templates, only the first template's input id is +// used and a warning is logged. +func loadInputPkgInfo(pkgPath string) (inputPkgInfo, error) { + pkgFS, closeFn, err := openPackageFS(pkgPath) + if err != nil { + return inputPkgInfo{}, fmt.Errorf("opening package: %w", err) + } + defer func() { _ = closeFn() }() + + manifestBytes, err := fs.ReadFile(pkgFS, packages.PackageManifestFile) + if err != nil { + return inputPkgInfo{}, fmt.Errorf("reading manifest: %w", err) + } + + m, err := packages.ReadPackageManifestBytes(manifestBytes) + if err != nil { + return inputPkgInfo{}, fmt.Errorf("parsing manifest: %w", err) + } + + if len(m.PolicyTemplates) == 0 { + return inputPkgInfo{}, fmt.Errorf("input package %q has no policy templates", m.Name) + } + if len(m.PolicyTemplates) > 1 { + logger.Warnf("Input package %q has multiple policy templates; using input identifier %q from first policy template only", m.Name, m.PolicyTemplates[0].Input) + } + + pt := m.PolicyTemplates[0] + if pt.Input == "" { + return inputPkgInfo{}, fmt.Errorf("input package %q policy template %q has no input identifier", m.Name, pt.Name) + } + + return inputPkgInfo{ + identifier: pt.Input, + pkgTitle: m.Title, + pkgDescription: m.Description, + }, nil +} diff --git a/internal/requiredinputs/streamdefs_test.go b/internal/requiredinputs/streamdefs_test.go new file mode 100644 index 0000000000..f3a0b869bb --- /dev/null +++ b/internal/requiredinputs/streamdefs_test.go @@ -0,0 +1,530 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-package/internal/packages" +) + +// ---- unit tests -------------------------------------------------------------- + +// TestLoadInputPkgInfo verifies that metadata is correctly extracted from an +// input package manifest directory. +func TestLoadInputPkgInfo(t *testing.T) { + dir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(dir, "manifest.yml"), []byte(` +name: my_input_pkg +title: My Input Package +description: A test input package. +version: 0.1.0 +type: input +policy_templates: + - name: logs + input: logfile + type: logs +`), 0644)) + + info, err := loadInputPkgInfo(dir) + require.NoError(t, err) + assert.Equal(t, "logfile", info.identifier) + assert.Equal(t, "My Input Package", info.pkgTitle) + assert.Equal(t, "A test input package.", info.pkgDescription) +} + +func TestLoadInputPkgInfo_NoPolicyTemplates(t *testing.T) { + dir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(dir, "manifest.yml"), []byte(` +name: empty_pkg +version: 0.1.0 +type: input +`), 0644)) + + _, err := loadInputPkgInfo(dir) + assert.ErrorContains(t, err, "no policy templates") +} + +func TestLoadInputPkgInfo_EmptyInputIdentifier(t *testing.T) { + dir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(dir, "manifest.yml"), []byte(` +name: bad_pkg +version: 0.1.0 +type: input +policy_templates: + - name: logs + type: logs +`), 0644)) + + _, err := loadInputPkgInfo(dir) + assert.ErrorContains(t, err, "no input identifier") +} + +// TestLoadInputPkgInfo_MultiplePolicyTemplatesUsesFirstInput verifies that when +// an input package declares more than one policy template, loadInputPkgInfo +// keeps the input identifier from the first template (see streamdefs.go). This +// matches resolveStreamInputTypes behavior and the warning logged for the +// ambiguous multi-template case. +func TestLoadInputPkgInfo_MultiplePolicyTemplatesUsesFirstInput(t *testing.T) { + dir := createFakeInputWithMultiplePolicyTemplates(t) + info, err := loadInputPkgInfo(dir) + require.NoError(t, err) + assert.Equal(t, "sql", info.identifier) + assert.NotEqual(t, "sql/metrics", info.identifier) +} + +// ---- integration tests ------------------------------------------------------- + +// TestResolveStreamInputTypes_ReplacesPackageWithType verifies that a +// policy_templates[].inputs entry with package: is replaced by type: and that +// the package: key is removed. +func TestResolveStreamInputTypes_ReplacesPackageWithType(t *testing.T) { + inputPkgDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), []byte(` +name: test_input +title: Test Input +description: A test input package. +version: 0.1.0 +type: input +policy_templates: + - name: logs + input: logfile + type: logs +`), 0644)) + + buildRoot := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(buildRoot, "manifest.yml"), []byte(` +format_version: 3.0.0 +name: my_integration +version: 0.1.0 +type: integration +requires: + input: + - package: test_input + version: 0.1.0 +policy_templates: + - name: logs + title: Logs + description: Collect logs + inputs: + - package: test_input + title: Collect logs via test input + description: Use the test input to collect logs +`), 0644)) + + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgDir, nil + }, + } + resolver := NewRequiredInputsResolver(epr) + require.NoError(t, resolver.Bundle(buildRoot)) + + manifestBytes, err := os.ReadFile(filepath.Join(buildRoot, "manifest.yml")) + require.NoError(t, err) + m, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + require.Len(t, m.PolicyTemplates[0].Inputs, 1) + assert.Equal(t, "logfile", m.PolicyTemplates[0].Inputs[0].Type) + assert.Empty(t, m.PolicyTemplates[0].Inputs[0].Package) +} + +// TestResolveStreamInputTypes_InputPkgWithMultiplePolicyTemplatesUsesFirst +// exercises Bundle when the required input package has several policy +// templates with different input identifiers: resolution must use the first +// template only so composable manifests stay consistent with loadInputPkgInfo. +func TestResolveStreamInputTypes_InputPkgWithMultiplePolicyTemplatesUsesFirst(t *testing.T) { + inputPkgDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), []byte(` +name: dual_template_input +title: Dual Template Input +description: Input with two policy templates. +version: 0.1.0 +type: input +policy_templates: + - name: first + input: logfile + type: logs + - name: second + input: winlog + type: logs +`), 0644)) + + buildRoot := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(buildRoot, "manifest.yml"), []byte(` +format_version: 3.0.0 +name: my_integration +version: 0.1.0 +type: integration +requires: + input: + - package: dual_template_input + version: 0.1.0 +policy_templates: + - name: logs + title: Logs + description: Collect logs + inputs: + - package: dual_template_input + title: Collect logs via dual-template input + description: Use the input package +`), 0644)) + + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgDir, nil + }, + } + resolver := NewRequiredInputsResolver(epr) + require.NoError(t, resolver.Bundle(buildRoot)) + + manifestBytes, err := os.ReadFile(filepath.Join(buildRoot, "manifest.yml")) + require.NoError(t, err) + m, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + require.Len(t, m.PolicyTemplates[0].Inputs, 1) + assert.Equal(t, "logfile", m.PolicyTemplates[0].Inputs[0].Type) + assert.Empty(t, m.PolicyTemplates[0].Inputs[0].Package) +} + +// TestResolveStreamInputTypes_PreservesExistingTitleAndDescription verifies +// that title and description already set in the composable package input entry +// are preserved and not overwritten by the input package's values. +func TestResolveStreamInputTypes_PreservesExistingTitleAndDescription(t *testing.T) { + inputPkgDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), []byte(` +name: test_input +title: Input Pkg Title +description: Input pkg description. +version: 0.1.0 +type: input +policy_templates: + - name: logs + input: logfile + type: logs +`), 0644)) + + buildRoot := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(buildRoot, "manifest.yml"), []byte(` +format_version: 3.0.0 +name: my_integration +version: 0.1.0 +type: integration +requires: + input: + - package: test_input + version: 0.1.0 +policy_templates: + - name: logs + title: Logs + description: Collect logs + inputs: + - package: test_input + title: My Custom Title + description: My custom description. +`), 0644)) + + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgDir, nil + }, + } + resolver := NewRequiredInputsResolver(epr) + require.NoError(t, resolver.Bundle(buildRoot)) + + manifestBytes, err := os.ReadFile(filepath.Join(buildRoot, "manifest.yml")) + require.NoError(t, err) + + // Check raw YAML to verify title/description are preserved verbatim. + assert.Contains(t, string(manifestBytes), "My Custom Title") + assert.Contains(t, string(manifestBytes), "My custom description.") + assert.NotContains(t, string(manifestBytes), "Input Pkg Title") +} + +// TestResolveStreamInputTypes_PopulatesTitleFromInputPkg verifies that when +// the composable package input entry has no title/description, they are +// populated from the input package manifest. +func TestResolveStreamInputTypes_PopulatesTitleFromInputPkg(t *testing.T) { + inputPkgDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), []byte(` +name: test_input +title: Input Pkg Title +description: Input pkg description. +version: 0.1.0 +type: input +policy_templates: + - name: logs + input: logfile + type: logs +`), 0644)) + + buildRoot := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(buildRoot, "manifest.yml"), []byte(` +format_version: 3.0.0 +name: my_integration +version: 0.1.0 +type: integration +requires: + input: + - package: test_input + version: 0.1.0 +policy_templates: + - name: logs + title: Logs + description: Collect logs + inputs: + - package: test_input +`), 0644)) + + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgDir, nil + }, + } + resolver := NewRequiredInputsResolver(epr) + require.NoError(t, resolver.Bundle(buildRoot)) + + manifestBytes, err := os.ReadFile(filepath.Join(buildRoot, "manifest.yml")) + require.NoError(t, err) + + assert.Contains(t, string(manifestBytes), "Input Pkg Title") + assert.Contains(t, string(manifestBytes), "Input pkg description.") +} + +// TestResolveStreamInputTypes_SkipsNonPackageInputs verifies that inputs +// declared with type: (no package:) are not modified. +func TestResolveStreamInputTypes_SkipsNonPackageInputs(t *testing.T) { + inputPkgDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), []byte(` +name: test_input +title: Test Input +version: 0.1.0 +type: input +policy_templates: + - name: logs + input: logfile + type: logs +`), 0644)) + + buildRoot := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(buildRoot, "manifest.yml"), []byte(` +format_version: 3.0.0 +name: my_integration +version: 0.1.0 +type: integration +requires: + input: + - package: test_input + version: 0.1.0 +policy_templates: + - name: logs + title: Logs + description: Collect logs + inputs: + - package: test_input + title: From pkg + description: From pkg. + - type: metrics + title: Direct metrics + description: Direct metrics input. +`), 0644)) + + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgDir, nil + }, + } + resolver := NewRequiredInputsResolver(epr) + require.NoError(t, resolver.Bundle(buildRoot)) + + manifestBytes, err := os.ReadFile(filepath.Join(buildRoot, "manifest.yml")) + require.NoError(t, err) + m, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + require.Len(t, m.PolicyTemplates[0].Inputs, 2) + assert.Equal(t, "logfile", m.PolicyTemplates[0].Inputs[0].Type) + assert.Empty(t, m.PolicyTemplates[0].Inputs[0].Package) + assert.Equal(t, "metrics", m.PolicyTemplates[0].Inputs[1].Type) + assert.Empty(t, m.PolicyTemplates[0].Inputs[1].Package) +} + +// TestResolveStreamInputTypes_DataStreamStreamReplacement verifies that +// streams[].package in data stream manifests is replaced with streams[].input. +func TestResolveStreamInputTypes_DataStreamStreamReplacement(t *testing.T) { + inputPkgDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), []byte(` +name: test_input +title: Test Input +description: Test input pkg. +version: 0.1.0 +type: input +policy_templates: + - name: logs + input: logfile + type: logs +`), 0644)) + + buildRoot := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(buildRoot, "manifest.yml"), []byte(` +format_version: 3.0.0 +name: my_integration +version: 0.1.0 +type: integration +requires: + input: + - package: test_input + version: 0.1.0 +policy_templates: + - name: logs + title: Logs + description: Collect logs + data_streams: + - test_logs + inputs: + - package: test_input + title: Collect logs + description: Collect logs. +`), 0644)) + + dsDir := filepath.Join(buildRoot, "data_stream", "test_logs") + require.NoError(t, os.MkdirAll(dsDir, 0755)) + require.NoError(t, os.WriteFile(filepath.Join(dsDir, "manifest.yml"), []byte(` +title: Test Logs +type: logs +streams: + - package: test_input + title: Test log stream + description: Collect test logs. +`), 0644)) + + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgDir, nil + }, + } + resolver := NewRequiredInputsResolver(epr) + require.NoError(t, resolver.Bundle(buildRoot)) + dsManifestBytes, err := os.ReadFile(filepath.Join(dsDir, "manifest.yml")) + require.NoError(t, err) + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + require.NoError(t, err) + + require.Len(t, dsManifest.Streams, 1) + assert.Equal(t, "logfile", dsManifest.Streams[0].Input) + assert.Empty(t, dsManifest.Streams[0].Package) + assert.Equal(t, "Test log stream", dsManifest.Streams[0].Title) +} + +// TestResolveStreamInputTypes_SkipsNonPackageStreams verifies that streams +// declared with input: (no package:) are not modified. +func TestResolveStreamInputTypes_SkipsNonPackageStreams(t *testing.T) { + inputPkgDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), []byte(` +name: test_input +title: Test Input +version: 0.1.0 +type: input +policy_templates: + - name: logs + input: logfile + type: logs +`), 0644)) + + buildRoot := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(buildRoot, "manifest.yml"), []byte(` +format_version: 3.0.0 +name: my_integration +version: 0.1.0 +type: integration +requires: + input: + - package: test_input + version: 0.1.0 +policy_templates: + - name: logs + title: Logs + description: Collect logs + data_streams: + - test_logs + inputs: + - package: test_input + title: Collect logs + description: Collect logs. +`), 0644)) + + dsDir := filepath.Join(buildRoot, "data_stream", "test_logs") + require.NoError(t, os.MkdirAll(dsDir, 0755)) + require.NoError(t, os.WriteFile(filepath.Join(dsDir, "manifest.yml"), []byte(` +title: Test Logs +type: logs +streams: + - package: test_input + title: From pkg + description: From pkg. + - input: metrics + title: Direct metrics + description: Direct metrics stream. +`), 0644)) + + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgDir, nil + }, + } + resolver := NewRequiredInputsResolver(epr) + require.NoError(t, resolver.Bundle(buildRoot)) + + dsManifestBytes, err := os.ReadFile(filepath.Join(dsDir, "manifest.yml")) + require.NoError(t, err) + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + require.NoError(t, err) + + require.Len(t, dsManifest.Streams, 2) + assert.Equal(t, "logfile", dsManifest.Streams[0].Input) + assert.Empty(t, dsManifest.Streams[0].Package) + assert.Equal(t, "metrics", dsManifest.Streams[1].Input) + assert.Empty(t, dsManifest.Streams[1].Package) +} + +// TestResolveStreamInputTypes_FieldBundlingFixture runs the full +// Bundle pipeline on the composable CI integration fixture and +// verifies that package: references are replaced in both the main manifest and +// the data stream manifest. +func TestResolveStreamInputTypes_FieldBundlingFixture(t *testing.T) { + buildPackageRoot := copyComposableIntegrationFixture(t) + resolver := NewRequiredInputsResolver(makeFakeEprForFieldBundling(t)) + require.NoError(t, resolver.Bundle(buildPackageRoot)) + + // Check main manifest: package-backed input → type: logfile; native logs input unchanged. + manifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + m, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + require.Len(t, m.PolicyTemplates[0].Inputs, 2) + assert.Equal(t, "logfile", m.PolicyTemplates[0].Inputs[0].Type) + assert.Empty(t, m.PolicyTemplates[0].Inputs[0].Package) + assert.Equal(t, "logs", m.PolicyTemplates[0].Inputs[1].Type) + assert.Empty(t, m.PolicyTemplates[0].Inputs[1].Package) + + // Check data stream manifest: package stream → input: logfile; native stream stays logfile. + dsManifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "manifest.yml")) + require.NoError(t, err) + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + require.NoError(t, err) + require.Len(t, dsManifest.Streams, 2) + assert.Equal(t, "logfile", dsManifest.Streams[0].Input) + assert.Empty(t, dsManifest.Streams[0].Package) + assert.NotEmpty(t, dsManifest.Streams[0].Title) + assert.Equal(t, "logfile", dsManifest.Streams[1].Input) + assert.Empty(t, dsManifest.Streams[1].Package) +} diff --git a/internal/requiredinputs/streams.go b/internal/requiredinputs/streams.go new file mode 100644 index 0000000000..77b6939a4c --- /dev/null +++ b/internal/requiredinputs/streams.go @@ -0,0 +1,149 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "errors" + "fmt" + "io/fs" + "os" + "path" + + "github.com/goccy/go-yaml/ast" + + "github.com/elastic/elastic-package/internal/packages" +) + +func (r *RequiredInputsResolver) bundleDataStreamTemplates(inputPkgPaths map[string]string, buildRoot *os.Root) error { + // get all data stream manifest paths in the build package + dsManifestsPaths, err := fs.Glob(buildRoot.FS(), "data_stream/*/manifest.yml") + if err != nil { + return fmt.Errorf("failed to glob data stream manifests: %w", err) + } + + errorList := make([]error, 0) + for _, manifestPath := range dsManifestsPaths { + if err := r.processDataStreamManifest(manifestPath, inputPkgPaths, buildRoot); err != nil { + errorList = append(errorList, err) + } + } + return errors.Join(errorList...) +} + +func (r *RequiredInputsResolver) processDataStreamManifest(manifestPath string, inputPkgPaths map[string]string, buildRoot *os.Root) error { + manifestBytes, err := buildRoot.ReadFile(manifestPath) + if err != nil { + return fmt.Errorf("failed to read data stream manifest %q: %w", manifestPath, err) + } + // parse the manifest YAML document preserving formatting for targeted modifications + // using manifestBytes allows us to preserve comments and formatting in the manifest when we update it with template paths from input packages + root, err := parseDocumentRootMapping(manifestBytes) + if err != nil { + return fmt.Errorf("failed to parse data stream manifest YAML: %w", err) + } + + manifest, err := packages.ReadDataStreamManifestBytes(manifestBytes) + if err != nil { + return fmt.Errorf("failed to parse data stream manifest %q: %w", manifestPath, err) + } + + errorList := make([]error, 0) + for idx, stream := range manifest.Streams { + if stream.Package == "" { + continue + } + pkgPath, ok := inputPkgPaths[stream.Package] + if !ok { + errorList = append(errorList, fmt.Errorf("failed to resolve input package %q for stream in manifest %q: not listed in requires.input", stream.Package, manifestPath)) + continue + } + dsRootDir := path.Dir(manifestPath) + inputPaths, err := collectAndCopyInputPkgDataStreams(dsRootDir, pkgPath, stream.Package, buildRoot) + if err != nil { + return fmt.Errorf("failed to collect and copy input package data stream templates for manifest %q: %w", manifestPath, err) + } + if len(inputPaths) == 0 { + continue + } + + // current manifest template paths + paths := make([]string, 0) + // if composable package has included custom template path or paths, include them + // if no template paths are included at the manifest, only the imported templates are included + if stream.TemplatePath != "" { + paths = append(paths, stream.TemplatePath) + } else if len(stream.TemplatePaths) > 0 { + paths = append(paths, stream.TemplatePaths...) + } + paths = append(inputPaths, paths...) + + if err := setStreamTemplatePaths(root, idx, paths); err != nil { + return fmt.Errorf("failed to set stream template paths in manifest %q: %w", manifestPath, err) + } + } + if err := errors.Join(errorList...); err != nil { + return err + } + + // Serialise the updated YAML document back to disk. + updated, err := formatYAMLNode(root) + if err != nil { + return fmt.Errorf("failed to format updated manifest: %w", err) + } + if err := buildRoot.WriteFile(manifestPath, updated, 0664); err != nil { + return fmt.Errorf("failed to write updated manifest: %w", err) + } + + return nil +} + +// collectAndCopyInputPkgDataStreams collects the data streams from the input package and copies them to the agent/stream directory of the build package +// it returns the list of copied data stream names +// +// Design note: input package templates are authored for input-level compilation, where available +// variables are: package vars + input.vars. When these templates are copied to the integration's +// data_stream//agent/stream/ directory and compiled as stream templates, Fleet compiles them +// with package vars + input.vars + stream.vars. For templates that only reference package-level +// or input-level variables this works correctly. However, stream-level vars defined on the +// integration's data stream will NOT be accessible from input package templates — the template +// content must explicitly reference them. If stream-level vars need to be rendered, add an +// integration-owned stream template and include it after the input package templates in +// template_paths (integration templates are appended last and take precedence). +// See https://github.com/elastic/elastic-package/issues/3279 for the follow-up work on +// merging variable definitions from input packages and composable packages at build time. +func collectAndCopyInputPkgDataStreams(dsRootDir, inputPkgPath, inputPkgName string, buildRoot *os.Root) ([]string, error) { + agentStreamDir := path.Join(dsRootDir, "agent", "stream") + return collectAndCopyPolicyTemplateFiles(inputPkgPath, inputPkgName, agentStreamDir, buildRoot) +} + +func setStreamTemplatePaths(root *ast.MappingNode, streamIdx int, paths []string) error { + // Navigate: root mapping -> "streams" key -> sequence -> item [streamIdx] + streamsNode, ok := mappingValue(root, "streams").(*ast.SequenceNode) + if !ok { + return fmt.Errorf("failed to set stream template paths: 'streams' key not found in manifest") + } + if streamIdx >= len(streamsNode.Values) { + return fmt.Errorf("failed to set stream template paths: stream index %d out of range (len=%d)", streamIdx, len(streamsNode.Values)) + } + + streamNode, ok := streamsNode.Values[streamIdx].(*ast.MappingNode) + if !ok { + return fmt.Errorf("failed to set stream template paths: stream entry %d is not a mapping", streamIdx) + } + + // Remove singular template_path if present. + removeKey(streamNode, "template_path") + + // Build the template_paths sequence node. + seqNode := newSeqNode() + for _, p := range paths { + seqNode.Values = append(seqNode.Values, strVal(p)) + } + + // Upsert template_paths. + upsertKey(streamNode, "template_paths", seqNode) + + return nil +} diff --git a/internal/requiredinputs/streams_test.go b/internal/requiredinputs/streams_test.go new file mode 100644 index 0000000000..3868ee7e3a --- /dev/null +++ b/internal/requiredinputs/streams_test.go @@ -0,0 +1,269 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-package/internal/packages" +) + +// TestBundleDataStreamTemplates_MultiplePolicyTemplates verifies that templates from ALL +// policy templates in the input package are bundled, not just the first one (Issue 5). +func TestBundleDataStreamTemplates_MultiplePolicyTemplates(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + datastreamDir := filepath.Join("data_stream", "test_ds") + err = buildRoot.MkdirAll(datastreamDir, 0755) + require.NoError(t, err) + + manifestBytes := []byte(` +streams: + - package: sql +`) + err = buildRoot.WriteFile(filepath.Join(datastreamDir, "manifest.yml"), manifestBytes, 0644) + require.NoError(t, err) + + fakeInputDir := createFakeInputWithMultiplePolicyTemplates(t) + inputPkgPaths := map[string]string{"sql": fakeInputDir} + + err = r.bundleDataStreamTemplates(inputPkgPaths, buildRoot) + require.NoError(t, err) + + // All templates from both policy templates must be present. + _, err = buildRoot.ReadFile(filepath.Join(datastreamDir, "agent", "stream", "sql-input.yml.hbs")) + require.NoError(t, err, "template from first policy_template must be bundled") + _, err = buildRoot.ReadFile(filepath.Join(datastreamDir, "agent", "stream", "sql-metrics.yml.hbs")) + require.NoError(t, err, "template from second policy_template must be bundled") + _, err = buildRoot.ReadFile(filepath.Join(datastreamDir, "agent", "stream", "sql-extra.yml.hbs")) + require.NoError(t, err, "extra template from second policy_template must be bundled") + + updated, err := buildRoot.ReadFile(filepath.Join(datastreamDir, "manifest.yml")) + require.NoError(t, err) + updatedManifest, err := packages.ReadDataStreamManifestBytes(updated) + require.NoError(t, err) + require.Len(t, updatedManifest.Streams, 1) + assert.Equal(t, []string{"sql-input.yml.hbs", "sql-metrics.yml.hbs", "sql-extra.yml.hbs"}, updatedManifest.Streams[0].TemplatePaths) +} + +func TestBundleDataStreamTemplates_SuccessTemplatesCopied(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + datastreamDir := filepath.Join("data_stream", "test_ds") + err = buildRoot.MkdirAll(datastreamDir, 0755) + require.NoError(t, err) + // create current package manifest with one data stream input referencing an input package template + // it has an existing template, so both the existing and input package template should be copied and the manifest updated to reference both + manifestBytes := []byte(` +streams: + - package: sql + template_path: existing.yml.hbs +`) + err = buildRoot.WriteFile(filepath.Join(datastreamDir, "manifest.yml"), manifestBytes, 0644) + require.NoError(t, err) + err = buildRoot.MkdirAll(filepath.Join(datastreamDir, "agent", "stream"), 0755) + require.NoError(t, err) + err = buildRoot.WriteFile(filepath.Join(datastreamDir, "agent", "stream", "existing.yml.hbs"), []byte("existing content"), 0644) + require.NoError(t, err) + + fakeInputDir := createFakeInputHelper(t) + inputPkgPaths := map[string]string{"sql": fakeInputDir} + + err = r.bundleDataStreamTemplates(inputPkgPaths, buildRoot) + require.NoError(t, err) + + // Files exist. + _, err = buildRoot.ReadFile(filepath.Join(datastreamDir, "agent", "stream", "sql-input.yml.hbs")) + require.NoError(t, err) + _, err = buildRoot.ReadFile(filepath.Join(datastreamDir, "agent", "stream", "existing.yml.hbs")) + require.NoError(t, err) + + // Written manifest has template_paths set and template_path removed for that input. + updated, err := buildRoot.ReadFile(filepath.Join(datastreamDir, "manifest.yml")) + require.NoError(t, err) + updatedManifest, err := packages.ReadDataStreamManifestBytes(updated) + require.NoError(t, err) + require.Len(t, updatedManifest.Streams, 1) + input := updatedManifest.Streams[0] + assert.Empty(t, input.TemplatePath) + assert.Equal(t, []string{"sql-input.yml.hbs", "existing.yml.hbs"}, input.TemplatePaths) +} + +// TestProcessDataStreamManifest_ReadFailure verifies that a missing manifest file returns an error. +func TestProcessDataStreamManifest_ReadFailure(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + err = r.processDataStreamManifest("data_stream/nonexistent/manifest.yml", nil, buildRoot) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to read data stream manifest") +} + +// TestProcessDataStreamManifest_InvalidYAML verifies that a manifest with invalid YAML returns an error. +func TestProcessDataStreamManifest_InvalidYAML(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + datastreamDir := filepath.Join("data_stream", "test_ds") + err = buildRoot.MkdirAll(datastreamDir, 0755) + require.NoError(t, err) + err = buildRoot.WriteFile(filepath.Join(datastreamDir, "manifest.yml"), []byte(":\tinvalid: yaml: {"), 0644) + require.NoError(t, err) + + err = r.processDataStreamManifest(filepath.Join(datastreamDir, "manifest.yml"), nil, buildRoot) + require.Error(t, err) +} + +// TestProcessDataStreamManifest_UnknownPackage verifies that a stream referencing a package not in +// inputPkgPaths returns an error and does NOT write back the manifest. +func TestProcessDataStreamManifest_UnknownPackage(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + datastreamDir := filepath.Join("data_stream", "test_ds") + err = buildRoot.MkdirAll(datastreamDir, 0755) + require.NoError(t, err) + + original := []byte("streams:\n - package: sql\n") + manifestPath := filepath.Join(datastreamDir, "manifest.yml") + err = buildRoot.WriteFile(manifestPath, original, 0644) + require.NoError(t, err) + + err = r.processDataStreamManifest(manifestPath, map[string]string{}, buildRoot) + require.Error(t, err) + assert.Contains(t, err.Error(), "not listed in requires.input") + + // Manifest must not have been overwritten. + written, readErr := buildRoot.ReadFile(manifestPath) + require.NoError(t, readErr) + assert.Equal(t, original, written) +} + +// TestProcessDataStreamManifest_PartialStreamError verifies that when one stream succeeds and another +// references an unknown package, the function returns an error and the manifest is not written back. +func TestProcessDataStreamManifest_PartialStreamError(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + datastreamDir := filepath.Join("data_stream", "test_ds") + err = buildRoot.MkdirAll(datastreamDir, 0755) + require.NoError(t, err) + + original := []byte("streams:\n - package: sql\n - package: unknown\n") + manifestPath := filepath.Join(datastreamDir, "manifest.yml") + err = buildRoot.WriteFile(manifestPath, original, 0644) + require.NoError(t, err) + + fakeInputDir := createFakeInputHelper(t) + err = r.processDataStreamManifest(manifestPath, map[string]string{"sql": fakeInputDir}, buildRoot) + require.Error(t, err) + assert.Contains(t, err.Error(), "unknown") + + // Manifest must not have been written back despite the first stream succeeding. + written, readErr := buildRoot.ReadFile(manifestPath) + require.NoError(t, readErr) + assert.Equal(t, original, written) +} + +// TestProcessDataStreamManifest_NoPackageSkipped verifies that streams without a package field are +// skipped and the manifest is written back unmodified (no template_paths added). +func TestProcessDataStreamManifest_NoPackageSkipped(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + datastreamDir := filepath.Join("data_stream", "test_ds") + err = buildRoot.MkdirAll(datastreamDir, 0755) + require.NoError(t, err) + + manifestPath := filepath.Join(datastreamDir, "manifest.yml") + err = buildRoot.WriteFile(manifestPath, []byte("streams:\n - title: plain stream\n"), 0644) + require.NoError(t, err) + + err = r.processDataStreamManifest(manifestPath, map[string]string{}, buildRoot) + require.NoError(t, err) + + updated, readErr := buildRoot.ReadFile(manifestPath) + require.NoError(t, readErr) + manifest, parseErr := packages.ReadDataStreamManifestBytes(updated) + require.NoError(t, parseErr) + require.Len(t, manifest.Streams, 1) + assert.Empty(t, manifest.Streams[0].TemplatePaths) + assert.Empty(t, manifest.Streams[0].TemplatePath) +} + +// TestBundleDataStreamTemplates_BundlesWithoutDataStreamsAssociation verifies that a data stream +// stream entry with package: X IS bundled even when the root policy template has no data_streams +// field. Bundling is driven solely by the data stream manifest's streams[].package reference. +func TestBundleDataStreamTemplates_BundlesWithoutDataStreamsAssociation(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + datastreamDir := filepath.Join("data_stream", "test_ds") + err = buildRoot.MkdirAll(datastreamDir, 0755) + require.NoError(t, err) + + manifestBytes := []byte(` +streams: + - package: sql +`) + err = buildRoot.WriteFile(filepath.Join(datastreamDir, "manifest.yml"), manifestBytes, 0644) + require.NoError(t, err) + + fakeInputDir := createFakeInputHelper(t) + inputPkgPaths := map[string]string{"sql": fakeInputDir} + + err = r.bundleDataStreamTemplates(inputPkgPaths, buildRoot) + require.NoError(t, err) + + // Template must be bundled even without a data_streams association in the root manifest. + _, err = buildRoot.ReadFile(filepath.Join(datastreamDir, "agent", "stream", "sql-input.yml.hbs")) + require.NoError(t, err, "template must be bundled when stream references an input package, regardless of data_streams field") + + // The data stream manifest must have template_paths set. + updated, err := buildRoot.ReadFile(filepath.Join(datastreamDir, "manifest.yml")) + require.NoError(t, err) + updatedManifest, err := packages.ReadDataStreamManifestBytes(updated) + require.NoError(t, err) + require.Len(t, updatedManifest.Streams, 1) + assert.Equal(t, []string{"sql-input.yml.hbs"}, updatedManifest.Streams[0].TemplatePaths) +} diff --git a/internal/requiredinputs/testhelpers_test.go b/internal/requiredinputs/testhelpers_test.go new file mode 100644 index 0000000000..fb5a4d8246 --- /dev/null +++ b/internal/requiredinputs/testhelpers_test.go @@ -0,0 +1,67 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" +) + +func createFakeInputHelper(t *testing.T) string { + t.Helper() + // create fake input package with manifest and template file + fakeDownloadedPkgDir := t.TempDir() + inputPkgDir := filepath.Join(fakeDownloadedPkgDir, "sql") + err := os.Mkdir(inputPkgDir, 0755) + require.NoError(t, err) + inputManifestBytes := []byte(`name: sql +version: 0.1.0 +type: input +policy_templates: + - input: sql + template_path: input.yml.hbs +`) + err = os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), inputManifestBytes, 0644) + require.NoError(t, err) + err = os.MkdirAll(filepath.Join(inputPkgDir, "agent", "input"), 0755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(inputPkgDir, "agent", "input", "input.yml.hbs"), []byte("template content"), 0644) + require.NoError(t, err) + return inputPkgDir +} + +func createFakeInputWithMultiplePolicyTemplates(t *testing.T) string { + t.Helper() + fakeDownloadedPkgDir := t.TempDir() + inputPkgDir := filepath.Join(fakeDownloadedPkgDir, "sql") + err := os.Mkdir(inputPkgDir, 0755) + require.NoError(t, err) + // Input package with two policy templates, each declaring a distinct template. + inputManifestBytes := []byte(`name: sql +version: 0.1.0 +type: input +policy_templates: + - input: sql + template_path: input.yml.hbs + - input: sql/metrics + template_paths: + - metrics.yml.hbs + - extra.yml.hbs +`) + err = os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), inputManifestBytes, 0644) + require.NoError(t, err) + err = os.MkdirAll(filepath.Join(inputPkgDir, "agent", "input"), 0755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(inputPkgDir, "agent", "input", "input.yml.hbs"), []byte("input template"), 0644) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(inputPkgDir, "agent", "input", "metrics.yml.hbs"), []byte("metrics template"), 0644) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(inputPkgDir, "agent", "input", "extra.yml.hbs"), []byte("extra template"), 0644) + require.NoError(t, err) + return inputPkgDir +} diff --git a/internal/requiredinputs/variables.go b/internal/requiredinputs/variables.go new file mode 100644 index 0000000000..1a35a42183 --- /dev/null +++ b/internal/requiredinputs/variables.go @@ -0,0 +1,536 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "fmt" + "io/fs" + "maps" + "os" + "path" + + "github.com/goccy/go-yaml/ast" + "github.com/goccy/go-yaml/parser" + + "github.com/elastic/elastic-package/internal/packages" +) + +// promotedVarScopeKey is the lookup key for composable-side var overrides: required +// input package name plus composable data stream name ("" if the template has no data_streams). +type promotedVarScopeKey struct { + refInputPackage string + composableDataStream string +} + +// mergeVariables merges variable definitions from input packages into the +// composable package's manifests (package-level and data-stream-level) under +// buildRoot (manifest.yml and data_stream/*/manifest.yml). +// +// Merging rule: input package vars are the base; composable package override +// fields win when explicitly specified. +// +// Input-level vars: vars declared in policy_templates[].inputs[].vars are +// "promoted" — they become input-level variables in the merged manifest. +// +// Data-stream-level vars: all remaining (non-promoted) base vars are placed at +// the data-stream level, merged with any stream-level overrides the composable +// package declares. +func (r *RequiredInputsResolver) mergeVariables( + manifest *packages.PackageManifest, + inputPkgPaths map[string]string, + buildRoot *os.Root, +) error { + root, err := readYAMLDocFromBuildRoot(buildRoot, "manifest.yml") + if err != nil { + return err + } + + promotedVarOverridesByScope, err := buildPromotedVarOverrideMap(manifest, root) + if err != nil { + return err + } + + if err := mergePolicyTemplateInputLevelVars(manifest, root, inputPkgPaths, promotedVarOverridesByScope); err != nil { + return err + } + + if err := writeFormattedYAMLDoc(buildRoot, "manifest.yml", root); err != nil { + return err + } + + return mergeDataStreamStreamLevelVars(buildRoot, inputPkgPaths, promotedVarOverridesByScope) +} + +// readYAMLDocFromBuildRoot reads relPath from buildRoot, parses it via yamledit, +// and returns the document root as a *ast.MappingNode. +func readYAMLDocFromBuildRoot(buildRoot *os.Root, relPath string) (*ast.MappingNode, error) { + b, err := buildRoot.ReadFile(relPath) + if err != nil { + return nil, fmt.Errorf("reading %q: %w", relPath, err) + } + root, err := parseDocumentRootMapping(b) + if err != nil { + return nil, fmt.Errorf("parsing YAML %q: %w", relPath, err) + } + return root, nil +} + +// buildPromotedVarOverrideMap indexes composable policy_templates[].inputs[].vars +// by input package name and data stream scope for use when merging promotions. +func buildPromotedVarOverrideMap(manifest *packages.PackageManifest, root *ast.MappingNode) (map[promotedVarScopeKey]map[string]*ast.MappingNode, error) { + out := make(map[promotedVarScopeKey]map[string]*ast.MappingNode) + for ptIdx, pt := range manifest.PolicyTemplates { + for inputIdx, input := range pt.Inputs { + if input.Package == "" || len(input.Vars) == 0 { + continue + } + + inputNode, err := getInputMappingNode(root, ptIdx, inputIdx) + if err != nil { + return nil, fmt.Errorf("getting input node at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) + } + + overrideNodes, err := readVarNodes(inputNode) + if err != nil { + return nil, fmt.Errorf("reading override var nodes at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) + } + + overrideByName := make(map[string]*ast.MappingNode, len(overrideNodes)) + for _, n := range overrideNodes { + overrideByName[varNodeName(n)] = n + } + + dsNames := pt.DataStreams + if len(dsNames) == 0 { + dsNames = []string{""} + } + for _, dsName := range dsNames { + out[promotedVarScopeKey{refInputPackage: input.Package, composableDataStream: dsName}] = overrideByName + } + } + } + return out, nil +} + +// mergePolicyTemplateInputLevelVars writes merged promoted vars onto each +// package-backed input in the composable manifest YAML (in-memory root mapping). +func mergePolicyTemplateInputLevelVars( + manifest *packages.PackageManifest, + root *ast.MappingNode, + inputPkgPaths map[string]string, + promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*ast.MappingNode, +) error { + for ptIdx, pt := range manifest.PolicyTemplates { + for inputIdx, input := range pt.Inputs { + if input.Package == "" { + continue + } + pkgPath, ok := inputPkgPaths[input.Package] + if !ok { + continue + } + + baseVarOrder, baseVarByName, err := loadInputPkgVarNodes(pkgPath) + if err != nil { + return fmt.Errorf("loading input pkg var nodes for %q: %w", input.Package, err) + } + if len(baseVarOrder) == 0 { + continue + } + + promotedOverrides := unionPromotedOverridesForInput(pt, input.Package, promotedVarOverridesByScope) + + inputNode, err := getInputMappingNode(root, ptIdx, inputIdx) + if err != nil { + return fmt.Errorf("getting input node at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) + } + + mergedSeq := mergeInputLevelVarNodes(baseVarOrder, baseVarByName, promotedOverrides) + + if len(mergedSeq.Values) > 0 { + upsertKey(inputNode, "vars", mergedSeq) + } else { + removeKey(inputNode, "vars") + } + } + } + return nil +} + +// unionPromotedOverridesForInput merges override nodes for refInputPackage across +// every data stream listed on the policy template (or "" if none listed). +func unionPromotedOverridesForInput( + pt packages.PolicyTemplate, + refInputPackage string, + promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*ast.MappingNode, +) map[string]*ast.MappingNode { + promotedOverrides := make(map[string]*ast.MappingNode) + dsNames := pt.DataStreams + if len(dsNames) == 0 { + dsNames = []string{""} + } + for _, dsName := range dsNames { + maps.Copy(promotedOverrides, promotedVarOverridesByScope[promotedVarScopeKey{ + refInputPackage: refInputPackage, + composableDataStream: dsName, + }]) + } + return promotedOverrides +} + +// writeFormattedYAMLDoc serializes root with package YAML formatting and writes it to relPath. +func writeFormattedYAMLDoc(buildRoot *os.Root, relPath string, root *ast.MappingNode) error { + updated, err := formatYAMLNode(root) + if err != nil { + return fmt.Errorf("formatting updated %q: %w", relPath, err) + } + if err := buildRoot.WriteFile(relPath, updated, 0664); err != nil { + return fmt.Errorf("writing updated %q: %w", relPath, err) + } + return nil +} + +// mergeDataStreamStreamLevelVars updates stream vars in every data_stream/*/manifest.yml under buildRoot. +func mergeDataStreamStreamLevelVars( + buildRoot *os.Root, + inputPkgPaths map[string]string, + promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*ast.MappingNode, +) error { + dsManifestPaths, err := fs.Glob(buildRoot.FS(), "data_stream/*/manifest.yml") + if err != nil { + return fmt.Errorf("globbing data stream manifests: %w", err) + } + + for _, manifestPath := range dsManifestPaths { + dsName := path.Base(path.Dir(manifestPath)) + + dsManifestBytes, err := buildRoot.ReadFile(manifestPath) + if err != nil { + return fmt.Errorf("reading data stream manifest %q: %w", manifestPath, err) + } + + dsRoot, err := parseDocumentRootMapping(dsManifestBytes) + if err != nil { + return fmt.Errorf("parsing data stream manifest YAML %q: %w", manifestPath, err) + } + + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + if err != nil { + return fmt.Errorf("parsing data stream manifest %q: %w", manifestPath, err) + } + + if err := mergeStreamsInDSManifest(dsRoot, dsManifest, dsName, inputPkgPaths, promotedVarOverridesByScope, manifestPath); err != nil { + return err + } + + if err := writeFormattedYAMLDoc(buildRoot, manifestPath, dsRoot); err != nil { + return fmt.Errorf("data stream manifest %q: %w", manifestPath, err) + } + } + + return nil +} + +// mergeStreamsInDSManifest merges non-promoted input vars into package-backed streams in one DS manifest. +func mergeStreamsInDSManifest( + dsRoot *ast.MappingNode, + dsManifest *packages.DataStreamManifest, + dsName string, + inputPkgPaths map[string]string, + promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*ast.MappingNode, + manifestPath string, +) error { + for streamIdx, stream := range dsManifest.Streams { + if stream.Package == "" { + continue + } + pkgPath, ok := inputPkgPaths[stream.Package] + if !ok { + continue + } + + baseVarOrder, baseVarByName, err := loadInputPkgVarNodes(pkgPath) + if err != nil { + return fmt.Errorf("loading input pkg var nodes for %q: %w", stream.Package, err) + } + if len(baseVarOrder) == 0 { + continue + } + + promotedNames := promotedVarNamesForStream(stream.Package, dsName, promotedVarOverridesByScope) + + streamNode, err := getStreamMappingNode(dsRoot, streamIdx) + if err != nil { + return fmt.Errorf("getting stream node at index %d in %q: %w", streamIdx, manifestPath, err) + } + + dsOverrideNodes, err := readVarNodes(streamNode) + if err != nil { + return fmt.Errorf("reading DS override var nodes in %q: %w", manifestPath, err) + } + + if err := checkDuplicateVarNodes(dsOverrideNodes); err != nil { + return fmt.Errorf("duplicate vars in data stream manifest %q: %w", manifestPath, err) + } + + mergedSeq := mergeStreamLevelVarNodes(baseVarOrder, baseVarByName, promotedNames, dsOverrideNodes) + + if len(mergedSeq.Values) > 0 { + upsertKey(streamNode, "vars", mergedSeq) + } else { + removeKey(streamNode, "vars") + } + } + return nil +} + +// promotedVarNamesForStream returns the set of var names promoted for this stream: +// overrides for (refInputPackage, composableDataStream) plus template-wide (refInputPackage, ""). +func promotedVarNamesForStream( + refInputPackage, composableDataStream string, + promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*ast.MappingNode, +) map[string]bool { + promotedNames := make(map[string]bool) + for _, key := range []promotedVarScopeKey{ + {refInputPackage: refInputPackage, composableDataStream: composableDataStream}, + {refInputPackage: refInputPackage, composableDataStream: ""}, + } { + for varName := range promotedVarOverridesByScope[key] { + promotedNames[varName] = true + } + } + return promotedNames +} + +// loadInputPkgVarNodes opens the input package at pkgPath, reads all vars from +// all policy templates (dedup by name, first wins) and returns them as an +// ordered slice and a name→node lookup map. +func loadInputPkgVarNodes(pkgPath string) ([]string, map[string]*ast.MappingNode, error) { + pkgFS, closeFn, err := openPackageFS(pkgPath) + if err != nil { + return nil, nil, fmt.Errorf("opening package: %w", err) + } + defer func() { _ = closeFn() }() + + manifestBytes, err := fs.ReadFile(pkgFS, packages.PackageManifestFile) + if err != nil { + return nil, nil, fmt.Errorf("reading manifest: %w", err) + } + + f, err := parser.ParseBytes(manifestBytes, 0) + if err != nil { + return nil, nil, fmt.Errorf("parsing manifest YAML: %w", err) + } + if len(f.Docs) == 0 || f.Docs[0] == nil { + return nil, nil, nil + } + root, ok := f.Docs[0].Body.(*ast.MappingNode) + if !ok { + return nil, nil, fmt.Errorf("expected mapping node at document root") + } + + policyTemplatesNode, ok := mappingValue(root, "policy_templates").(*ast.SequenceNode) + if !ok { + return nil, nil, nil + } + + order := make([]string, 0) + byName := make(map[string]*ast.MappingNode) + + for _, ptNode := range policyTemplatesNode.Values { + ptMapping, ok := ptNode.(*ast.MappingNode) + if !ok { + continue + } + varsNode, ok := mappingValue(ptMapping, "vars").(*ast.SequenceNode) + if !ok { + continue + } + for _, varNode := range varsNode.Values { + varMapping, ok := varNode.(*ast.MappingNode) + if !ok { + continue + } + name := varNodeName(varMapping) + if name == "" || byName[name] != nil { + continue // skip empty names and duplicates (first wins) + } + order = append(order, name) + byName[name] = varMapping + } + } + + return order, byName, nil +} + +// mergeInputLevelVarNodes returns a sequence node containing only the promoted +// vars (those in promotedOverrides), each merged with the override fields. +// Order follows baseVarOrder (input package declaration order). +func mergeInputLevelVarNodes( + baseVarOrder []string, + baseVarByName map[string]*ast.MappingNode, + promotedOverrides map[string]*ast.MappingNode, +) *ast.SequenceNode { + seqNode := newSeqNode() + for _, varName := range baseVarOrder { + overrideNode, promoted := promotedOverrides[varName] + if !promoted { + continue + } + merged := mergeVarNode(baseVarByName[varName], overrideNode) + seqNode.Values = append(seqNode.Values, merged) + } + return seqNode +} + +// mergeStreamLevelVarNodes returns a sequence node containing: +// 1. Non-promoted base vars (in input package order), merged with any DS +// override where names match. +// 2. Novel DS vars (names not in baseVarByName) appended in their declaration +// order. +func mergeStreamLevelVarNodes( + baseVarOrder []string, + baseVarByName map[string]*ast.MappingNode, + promotedNames map[string]bool, + dsOverrides []*ast.MappingNode, +) *ast.SequenceNode { + dsOverrideByName := make(map[string]*ast.MappingNode, len(dsOverrides)) + for _, v := range dsOverrides { + dsOverrideByName[varNodeName(v)] = v + } + + seqNode := newSeqNode() + + // Non-promoted base vars first (in input pkg order). + for _, varName := range baseVarOrder { + if promotedNames[varName] { + continue + } + baseNode := baseVarByName[varName] + overrideNode, hasOverride := dsOverrideByName[varName] + var merged *ast.MappingNode + if hasOverride { + merged = mergeVarNode(baseNode, overrideNode) + } else { + merged = cloneNode(baseNode).(*ast.MappingNode) + } + seqNode.Values = append(seqNode.Values, merged) + } + + // Novel DS vars (not present in base) appended in declaration order. + for _, v := range dsOverrides { + if _, inBase := baseVarByName[varNodeName(v)]; !inBase { + seqNode.Values = append(seqNode.Values, cloneNode(v).(*ast.MappingNode)) + } + } + + return seqNode +} + +// mergeVarNode merges fields from overrideNode into a clone of baseNode. +// All keys in override win; absent keys in override are inherited from base. +// The "name" key is always preserved from base. +func mergeVarNode(base, override *ast.MappingNode) *ast.MappingNode { + result := cloneNode(base).(*ast.MappingNode) + for _, kv := range override.Values { + if kv.Key.String() == "name" { + continue // always preserve name from base + } + upsertKey(result, kv.Key.String(), cloneNode(kv.Value)) + } + return result +} + +// checkDuplicateVarNodes returns an error if any var name appears more than +// once in the provided nodes. +func checkDuplicateVarNodes(varNodes []*ast.MappingNode) error { + seen := make(map[string]bool, len(varNodes)) + for _, v := range varNodes { + name := varNodeName(v) + if seen[name] { + return fmt.Errorf("duplicate variable %q", name) + } + seen[name] = true + } + return nil +} + +// varNodeName extracts the value of the "name" key from a var mapping node. +func varNodeName(v *ast.MappingNode) string { + return nodeStringValue(mappingValue(v, "name")) +} + +// readVarNodes extracts the individual var mapping nodes from the "vars" +// sequence of the given mapping node. Returns nil if no "vars" key is present. +func readVarNodes(mappingNode *ast.MappingNode) ([]*ast.MappingNode, error) { + varsSeq, ok := mappingValue(mappingNode, "vars").(*ast.SequenceNode) + if !ok { + v := mappingValue(mappingNode, "vars") + if v == nil { + return nil, nil + } + return nil, fmt.Errorf("'vars' is not a sequence node") + } + result := make([]*ast.MappingNode, 0, len(varsSeq.Values)) + for _, item := range varsSeq.Values { + mn, ok := item.(*ast.MappingNode) + if !ok { + return nil, fmt.Errorf("var entry is not a mapping node") + } + result = append(result, mn) + } + return result, nil +} + +// getInputMappingNode navigates to policy_templates[ptIdx].inputs[inputIdx] in +// the given YAML root mapping and returns the input mapping node. +func getInputMappingNode(root *ast.MappingNode, ptIdx, inputIdx int) (*ast.MappingNode, error) { + ptsNode, ok := mappingValue(root, "policy_templates").(*ast.SequenceNode) + if !ok { + return nil, fmt.Errorf("'policy_templates' not found or not a sequence") + } + if ptIdx < 0 || ptIdx >= len(ptsNode.Values) { + return nil, fmt.Errorf("policy template index %d out of range (len=%d)", ptIdx, len(ptsNode.Values)) + } + + ptNode, ok := ptsNode.Values[ptIdx].(*ast.MappingNode) + if !ok { + return nil, fmt.Errorf("policy template %d is not a mapping", ptIdx) + } + + inputsNode, ok := mappingValue(ptNode, "inputs").(*ast.SequenceNode) + if !ok { + return nil, fmt.Errorf("'inputs' not found or not a sequence in policy template %d", ptIdx) + } + if inputIdx < 0 || inputIdx >= len(inputsNode.Values) { + return nil, fmt.Errorf("input index %d out of range (len=%d)", inputIdx, len(inputsNode.Values)) + } + + inputNode, ok := inputsNode.Values[inputIdx].(*ast.MappingNode) + if !ok { + return nil, fmt.Errorf("input %d is not a mapping", inputIdx) + } + + return inputNode, nil +} + +// getStreamMappingNode navigates to streams[streamIdx] in the given YAML +// root mapping and returns the stream mapping node. +func getStreamMappingNode(root *ast.MappingNode, streamIdx int) (*ast.MappingNode, error) { + streamsNode, ok := mappingValue(root, "streams").(*ast.SequenceNode) + if !ok { + return nil, fmt.Errorf("'streams' not found or not a sequence") + } + if streamIdx < 0 || streamIdx >= len(streamsNode.Values) { + return nil, fmt.Errorf("stream index %d out of range (len=%d)", streamIdx, len(streamsNode.Values)) + } + + streamNode, ok := streamsNode.Values[streamIdx].(*ast.MappingNode) + if !ok { + return nil, fmt.Errorf("stream %d is not a mapping", streamIdx) + } + + return streamNode, nil +} diff --git a/internal/requiredinputs/variables_test.go b/internal/requiredinputs/variables_test.go new file mode 100644 index 0000000000..1105c4486c --- /dev/null +++ b/internal/requiredinputs/variables_test.go @@ -0,0 +1,657 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "os" + "path/filepath" + "testing" + + "github.com/goccy/go-yaml/ast" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-package/internal/packages" + "github.com/elastic/elastic-package/internal/yamledit" +) + +// ---- helpers ----------------------------------------------------------------- + +// varNode builds a minimal *ast.MappingNode representing a variable with the +// given name and extra key=value pairs (passed as alternating key, value +// strings for simple scalar values). +func varNode(name string, extras ...string) *ast.MappingNode { + n := &ast.MappingNode{BaseNode: &ast.BaseNode{}} + upsertKey(n, "name", strVal(name)) + for i := 0; i+1 < len(extras); i += 2 { + upsertKey(n, extras[i], strVal(extras[i+1])) + } + return n +} + +// copyFixturePackage copies the named package from test/manual_packages/required_inputs +// to a fresh temp dir and returns that dir path. +func copyFixturePackage(t *testing.T, fixtureName string) string { + t.Helper() + srcPath := filepath.Join("..", "..", "test", "manual_packages", "required_inputs", fixtureName) + destPath := t.TempDir() + err := os.CopyFS(destPath, os.DirFS(srcPath)) + require.NoError(t, err, "copying fixture package %q", fixtureName) + return destPath +} + +// ciInputFixturePath returns the path to test/manual_packages/composable/01_ci_input_pkg (repository-relative from this package). +func ciInputFixturePath() string { + return filepath.Join("..", "..", "test", "manual_packages", "composable", "01_ci_input_pkg") +} + +// copyComposableIntegrationFixture copies test/manual_packages/composable/02_ci_composable_integration for integration tests. +func copyComposableIntegrationFixture(t *testing.T) string { + t.Helper() + srcPath := filepath.Join("..", "..", "test", "manual_packages", "composable", "02_ci_composable_integration") + destPath := t.TempDir() + err := os.CopyFS(destPath, os.DirFS(srcPath)) + require.NoError(t, err, "copying composable CI integration fixture") + return destPath +} + +// Variable merge tests exercise mergeVariables (see variables.go): when an +// integration declares requires.input and references that input package under +// policy_templates[].inputs with optional vars, definitions from the input +// package must be merged into the built integration—composable and data-stream +// overrides on top of the input package as base, with selected vars promoted +// to input-level. Unit tests cover helpers; integration tests run +// Integration tests exercise Bundle on manual fixture packages. + +// ---- unit tests -------------------------------------------------------------- + +// TestCloneNode checks that YAML variable nodes are deep-cloned before merge. +// mergeVariables mutates cloned trees when applying overrides; without +// isolation, the resolver could corrupt cached or shared input-package nodes. +func TestCloneNode(t *testing.T) { + original := varNode("paths", "type", "text", "multi", "true") + cloned := cloneNode(original).(*ast.MappingNode) + + // Mutating the clone must not affect the original. + upsertKey(cloned, "type", strVal("keyword")) + assert.Equal(t, "text", nodeStringValue(mappingValue(original, "type"))) +} + +// TestMergeVarNode verifies mergeVarNode: per-variable field merge where the +// input package definition is the base and override keys from the composable +// package or data stream replace or add fields; the variable name always stays +// from the base. This is the primitive used for both promoted input vars and +// stream-level merges. +func TestMergeVarNode(t *testing.T) { + base := varNode("paths", "type", "text", "title", "Paths", "multi", "true") + + t.Run("full override", func(t *testing.T) { + override := varNode("paths", "type", "keyword", "title", "Custom Paths", "multi", "false") + merged := mergeVarNode(base, override) + assert.Equal(t, "paths", varNodeName(merged)) + assert.Equal(t, "keyword", nodeStringValue(mappingValue(merged, "type"))) + assert.Equal(t, "Custom Paths", nodeStringValue(mappingValue(merged, "title"))) + assert.Equal(t, "false", nodeStringValue(mappingValue(merged, "multi"))) + }) + + t.Run("partial override", func(t *testing.T) { + override := varNode("paths", "title", "My Paths") + merged := mergeVarNode(base, override) + assert.Equal(t, "paths", varNodeName(merged)) + assert.Equal(t, "text", nodeStringValue(mappingValue(merged, "type"))) // from base + assert.Equal(t, "My Paths", nodeStringValue(mappingValue(merged, "title"))) + assert.Equal(t, "true", nodeStringValue(mappingValue(merged, "multi"))) // from base + }) + + t.Run("empty override", func(t *testing.T) { + override := varNode("paths") + merged := mergeVarNode(base, override) + assert.Equal(t, "paths", varNodeName(merged)) + assert.Equal(t, "text", nodeStringValue(mappingValue(merged, "type"))) // from base + assert.Equal(t, "Paths", nodeStringValue(mappingValue(merged, "title"))) // from base + }) + + t.Run("name not renamed", func(t *testing.T) { + // Even if the override specifies a different name value, base name wins. + override := &ast.MappingNode{} + upsertKey(override, "name", strVal("should-be-ignored")) + upsertKey(override, "type", strVal("keyword")) + merged := mergeVarNode(base, override) + assert.Equal(t, "paths", varNodeName(merged)) + }) + + t.Run("adds new field from override", func(t *testing.T) { + override := varNode("paths", "description", "My description") + merged := mergeVarNode(base, override) + assert.Equal(t, "My description", nodeStringValue(mappingValue(merged, "description"))) + assert.Equal(t, "text", nodeStringValue(mappingValue(merged, "type"))) // base preserved + }) +} + +// TestCheckDuplicateVarNodes ensures duplicate var names in a single vars list +// are rejected before merge. That catches invalid integration manifests early +// instead of producing ambiguous merged output for Fleet. +func TestCheckDuplicateVarNodes(t *testing.T) { + t.Run("no duplicates", func(t *testing.T) { + nodes := []*ast.MappingNode{varNode("paths"), varNode("encoding"), varNode("timeout")} + assert.NoError(t, checkDuplicateVarNodes(nodes)) + }) + + t.Run("one duplicate", func(t *testing.T) { + nodes := []*ast.MappingNode{varNode("paths"), varNode("encoding"), varNode("paths")} + err := checkDuplicateVarNodes(nodes) + require.Error(t, err) + assert.Contains(t, err.Error(), "paths") + }) + + t.Run("empty slice", func(t *testing.T) { + assert.NoError(t, checkDuplicateVarNodes(nil)) + }) +} + +// TestMergeInputLevelVarNodes covers mergeInputLevelVarNodes: vars that appear +// under policy_templates[].inputs[] next to package: are promoted +// to merged input-level var definitions, in input-package declaration order, +// with only explicitly listed names included. +func TestMergeInputLevelVarNodes(t *testing.T) { + pathsBase := varNode("paths", "type", "text", "multi", "true") + encodingBase := varNode("encoding", "type", "text", "show_user", "false") + timeoutBase := varNode("timeout", "type", "text", "default", "30s") + + baseOrder := []string{"paths", "encoding", "timeout"} + baseByName := map[string]*ast.MappingNode{ + "paths": pathsBase, + "encoding": encodingBase, + "timeout": timeoutBase, + } + + t.Run("empty promoted → empty sequence", func(t *testing.T) { + seq := mergeInputLevelVarNodes(baseOrder, baseByName, map[string]*ast.MappingNode{}) + assert.Empty(t, seq.Values) + }) + + t.Run("one promoted partial override", func(t *testing.T) { + promotedOverrides := map[string]*ast.MappingNode{ + "paths": varNode("paths", "default", "/var/log/custom/*.log"), + } + seq := mergeInputLevelVarNodes(baseOrder, baseByName, promotedOverrides) + require.Len(t, seq.Values, 1) + item := seq.Values[0].(*ast.MappingNode) + assert.Equal(t, "paths", varNodeName(item)) + assert.Equal(t, "/var/log/custom/*.log", nodeStringValue(mappingValue(item, "default"))) + assert.Equal(t, "text", nodeStringValue(mappingValue(item, "type"))) // from base + }) + + t.Run("multiple promoted in base order", func(t *testing.T) { + promotedOverrides := map[string]*ast.MappingNode{ + "timeout": varNode("timeout", "default", "60s"), + "encoding": varNode("encoding", "show_user", "true"), + } + seq := mergeInputLevelVarNodes(baseOrder, baseByName, promotedOverrides) + require.Len(t, seq.Values, 2) + // Order must follow baseOrder: encoding before timeout. + item0 := seq.Values[0].(*ast.MappingNode) + item1 := seq.Values[1].(*ast.MappingNode) + assert.Equal(t, "encoding", varNodeName(item0)) + assert.Equal(t, "timeout", varNodeName(item1)) + assert.Equal(t, "true", nodeStringValue(mappingValue(item0, "show_user"))) + assert.Equal(t, "60s", nodeStringValue(mappingValue(item1, "default"))) + }) +} + +// TestMergeStreamLevelVarNodes covers mergeStreamLevelVarNodes: base vars from +// the input package that are not promoted stay on the data stream stream entry; +// they can be field-merged with DS overrides, and DS-only vars are appended. +// Promoted names must not appear on the stream to avoid duplicating Fleet vars. +func TestMergeStreamLevelVarNodes(t *testing.T) { + pathsBase := varNode("paths", "type", "text", "multi", "true") + encodingBase := varNode("encoding", "type", "text", "show_user", "false") + timeoutBase := varNode("timeout", "type", "text", "default", "30s") + + baseOrder := []string{"paths", "encoding", "timeout"} + baseByName := map[string]*ast.MappingNode{ + "paths": pathsBase, + "encoding": encodingBase, + "timeout": timeoutBase, + } + + t.Run("no promoted, no overrides → all base vars", func(t *testing.T) { + seq := mergeStreamLevelVarNodes(baseOrder, baseByName, nil, nil) + require.Len(t, seq.Values, 3) + assert.Equal(t, "paths", varNodeName(seq.Values[0].(*ast.MappingNode))) + assert.Equal(t, "encoding", varNodeName(seq.Values[1].(*ast.MappingNode))) + assert.Equal(t, "timeout", varNodeName(seq.Values[2].(*ast.MappingNode))) + }) + + t.Run("some promoted → promoted excluded", func(t *testing.T) { + promoted := map[string]bool{"paths": true, "encoding": true} + seq := mergeStreamLevelVarNodes(baseOrder, baseByName, promoted, nil) + require.Len(t, seq.Values, 1) + assert.Equal(t, "timeout", varNodeName(seq.Values[0].(*ast.MappingNode))) + }) + + t.Run("DS override on existing base var", func(t *testing.T) { + dsOverrides := []*ast.MappingNode{varNode("encoding", "show_user", "true")} + seq := mergeStreamLevelVarNodes(baseOrder, baseByName, nil, dsOverrides) + require.Len(t, seq.Values, 3) + // encoding is merged + encodingMerged := seq.Values[1].(*ast.MappingNode) + assert.Equal(t, "encoding", varNodeName(encodingMerged)) + assert.Equal(t, "true", nodeStringValue(mappingValue(encodingMerged, "show_user"))) + assert.Equal(t, "text", nodeStringValue(mappingValue(encodingMerged, "type"))) // from base + }) + + t.Run("novel DS var appended", func(t *testing.T) { + dsOverrides := []*ast.MappingNode{varNode("custom_tag", "type", "text")} + seq := mergeStreamLevelVarNodes(baseOrder, baseByName, nil, dsOverrides) + require.Len(t, seq.Values, 4) // 3 base + 1 novel + assert.Equal(t, "custom_tag", varNodeName(seq.Values[3].(*ast.MappingNode))) + }) + + t.Run("mixed: promoted + DS merge + novel", func(t *testing.T) { + promoted := map[string]bool{"paths": true} + dsOverrides := []*ast.MappingNode{ + varNode("encoding", "show_user", "true"), + varNode("custom_tag", "type", "text"), + } + seq := mergeStreamLevelVarNodes(baseOrder, baseByName, promoted, dsOverrides) + // paths excluded (promoted); encoding merged; timeout base; custom_tag novel + require.Len(t, seq.Values, 3) + item0 := seq.Values[0].(*ast.MappingNode) + item1 := seq.Values[1].(*ast.MappingNode) + item2 := seq.Values[2].(*ast.MappingNode) + assert.Equal(t, "encoding", varNodeName(item0)) + assert.Equal(t, "true", nodeStringValue(mappingValue(item0, "show_user"))) + assert.Equal(t, "timeout", varNodeName(item1)) + assert.Equal(t, "custom_tag", varNodeName(item2)) + }) +} + +// TestLoadInputPkgVarNodes checks loadInputPkgVarNodes: variable definitions +// are loaded from the resolved input package manifest so mergeVariables uses +// the input package as the authoritative base (order and fields) for merging. +func TestLoadInputPkgVarNodes(t *testing.T) { + t.Run("fixture with three vars", func(t *testing.T) { + pkgPath := ciInputFixturePath() + order, byName, err := loadInputPkgVarNodes(pkgPath) + require.NoError(t, err) + assert.Equal(t, []string{"paths", "encoding", "timeout"}, order) + assert.Equal(t, "text", nodeStringValue(mappingValue(byName["paths"], "type"))) + assert.Equal(t, "text", nodeStringValue(mappingValue(byName["encoding"], "type"))) + assert.Equal(t, "text", nodeStringValue(mappingValue(byName["timeout"], "type"))) + }) + + t.Run("package with no vars", func(t *testing.T) { + // Use the fake input helper which has no vars in its manifest. + pkgPath := createFakeInputHelper(t) + order, byName, err := loadInputPkgVarNodes(pkgPath) + require.NoError(t, err) + assert.Empty(t, order) + assert.Empty(t, byName) + }) +} + +// TestPromotedVarNamesForStream_UnionsScopedAndTemplateWide verifies that when +// resolving which base vars are promoted off a data stream, overrides keyed by +// (input package, composable data stream) are unioned with overrides keyed by +// (input package, "") so template-wide promotions still apply to named streams. +func TestPromotedVarNamesForStream_UnionsScopedAndTemplateWide(t *testing.T) { + const refPkg = "ci_input_pkg" + dsScoped := varNode("paths", "type", "text") + templateWide := varNode("encoding", "type", "text") + + byScope := map[promotedVarScopeKey]map[string]*ast.MappingNode{ + {refInputPackage: refPkg, composableDataStream: "my_logs"}: { + "paths": dsScoped, + }, + {refInputPackage: refPkg, composableDataStream: ""}: { + "encoding": templateWide, + }, + } + + names := promotedVarNamesForStream(refPkg, "my_logs", byScope) + assert.True(t, names["paths"]) + assert.True(t, names["encoding"]) + assert.False(t, names["timeout"]) +} + +// TestUnionPromotedOverridesForInput_MergesOverridesAcrossDataStreams checks +// unionPromotedOverridesForInput: a policy template listing several data streams +// must merge composable-side override nodes from every listed stream so +// input-level promotion sees the full set of vars declared anywhere on that +// template for the referenced input package. +func TestUnionPromotedOverridesForInput_MergesOverridesAcrossDataStreams(t *testing.T) { + const refPkg = "ci_input_pkg" + paths := varNode("paths", "title", "P") + encoding := varNode("encoding", "title", "E") + + byScope := map[promotedVarScopeKey]map[string]*ast.MappingNode{ + {refInputPackage: refPkg, composableDataStream: "ds_a"}: {"paths": paths}, + {refInputPackage: refPkg, composableDataStream: "ds_b"}: {"encoding": encoding}, + } + + pt := packages.PolicyTemplate{ + Name: "pt", + DataStreams: []string{"ds_a", "ds_b"}, + } + + got := unionPromotedOverridesForInput(pt, refPkg, byScope) + require.Len(t, got, 2) + assert.Same(t, paths, got["paths"]) + assert.Same(t, encoding, got["encoding"]) +} + +// TestBuildPromotedVarOverrideMap_PerDataStreamScopes builds the promoted +// override index from aligned manifest + YAML: each composable data stream +// listed under a policy template gets its own scope entry so downstream merge +// can distinguish stream-specific composable vars. +func TestBuildPromotedVarOverrideMap_PerDataStreamScopes(t *testing.T) { + manifestYAML := []byte(`format_version: 3.0.0 +name: scope_test +title: Scope test +version: 0.1.0 +type: integration +policy_templates: + - name: logs + title: Logs + data_streams: + - ds_alpha + - ds_beta + inputs: + - package: ref_pkg + vars: + - name: paths + type: text + title: Promoted paths +`) + + doc, err := yamledit.NewDocumentBytes(manifestYAML) + require.NoError(t, err) + root := doc.AST().Docs[0].Body.(*ast.MappingNode) + m, err := packages.ReadPackageManifestBytes(manifestYAML) + require.NoError(t, err) + + idx, err := buildPromotedVarOverrideMap(m, root) + require.NoError(t, err) + + keyAlpha := promotedVarScopeKey{refInputPackage: "ref_pkg", composableDataStream: "ds_alpha"} + keyBeta := promotedVarScopeKey{refInputPackage: "ref_pkg", composableDataStream: "ds_beta"} + require.Contains(t, idx, keyAlpha) + require.Contains(t, idx, keyBeta) + assert.Contains(t, idx[keyAlpha], "paths") + assert.Contains(t, idx[keyBeta], "paths") + assert.Equal(t, "Promoted paths", nodeStringValue(mappingValue(idx[keyAlpha]["paths"], "title"))) +} + +// TestBuildPromotedVarOverrideMap_NoDataStreamsUsesEmptyScope verifies that a +// policy template without data_streams still records promoted overrides under +// composableDataStream "", matching how streams are matched when the template is +// not scoped to named data streams. +func TestBuildPromotedVarOverrideMap_NoDataStreamsUsesEmptyScope(t *testing.T) { + manifestYAML := []byte(`format_version: 3.0.0 +name: scope_test2 +title: Scope test 2 +version: 0.1.0 +type: integration +policy_templates: + - name: logs + title: Logs + inputs: + - package: ref_pkg + vars: + - name: paths + type: text +`) + + doc, err := yamledit.NewDocumentBytes(manifestYAML) + require.NoError(t, err) + root := doc.AST().Docs[0].Body.(*ast.MappingNode) + m, err := packages.ReadPackageManifestBytes(manifestYAML) + require.NoError(t, err) + + idx, err := buildPromotedVarOverrideMap(m, root) + require.NoError(t, err) + + key := promotedVarScopeKey{refInputPackage: "ref_pkg", composableDataStream: ""} + require.Contains(t, idx, key) + assert.Contains(t, idx[key], "paths") +} + +// ---- integration tests ------------------------------------------------------- + +// makeFakeEprForVarMerging supplies the ci_input_pkg fixture path as if it were +// downloaded from the registry, so integration tests do not need a running stack. +func makeFakeEprForVarMerging(t *testing.T) *fakeEprClient { + t.Helper() + inputPkgPath := ciInputFixturePath() + return &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgPath, nil + }, + } +} + +// TestMergeVariables_Full runs the full merge pipeline: composable vars under +// the package input promote paths and encoding to manifest input-level defs +// (merged with input package defaults), while timeout stays on the data stream +// merged with a DS override and a novel DS-only var is appended—matching the +// end state Fleet expects for a mixed promotion + DS customization scenario. +func TestMergeVariables_Full(t *testing.T) { + buildPackageRoot := copyComposableIntegrationFixture(t) + resolver := NewRequiredInputsResolver(makeFakeEprForVarMerging(t)) + + err := resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + // Check package manifest: first input (package ref) should have 2 vars (paths, encoding). + manifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + inputVars := manifest.PolicyTemplates[0].Inputs[0].Vars + require.Len(t, inputVars, 2) + assert.Equal(t, "paths", inputVars[0].Name) + assert.Equal(t, "encoding", inputVars[1].Name) + + // paths: base fields preserved, default overridden. + assert.Equal(t, "text", inputVars[0].Type) + require.NotNil(t, inputVars[0].Default) + + // encoding: show_user overridden to true. + assert.True(t, inputVars[1].ShowUser) + assert.Equal(t, "text", inputVars[1].Type) + + // Check DS manifest: streams[0] should have 2 vars (timeout, custom_tag). + dsManifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "manifest.yml")) + require.NoError(t, err) + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + require.NoError(t, err) + + streamVars := dsManifest.Streams[0].Vars + require.Len(t, streamVars, 2) + assert.Equal(t, "timeout", streamVars[0].Name) + assert.Equal(t, "custom_tag", streamVars[1].Name) + + // timeout: merged from base + DS override (description). + assert.Equal(t, "text", streamVars[0].Type) + assert.Equal(t, "Timeout for log collection.", streamVars[0].Description) +} + +// TestMergeVariables_PromotesToInput verifies partial promotion: only vars +// listed under the composable input move to input level; remaining input +// package vars stay on the stream unchanged when the data stream supplies no +// overrides. +func TestMergeVariables_PromotesToInput(t *testing.T) { + buildPackageRoot := copyFixturePackage(t, "with_merging_promotes_to_input") + resolver := NewRequiredInputsResolver(makeFakeEprForVarMerging(t)) + + err := resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + manifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + // Input should have 1 var: paths (promoted, merged with composable override). + inputVars := manifest.PolicyTemplates[0].Inputs[0].Vars + require.Len(t, inputVars, 1) + assert.Equal(t, "paths", inputVars[0].Name) + assert.Equal(t, "text", inputVars[0].Type) // from base + + // DS should have 2 vars: encoding and timeout (both from base, no DS overrides). + dsManifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "var_merging_logs", "manifest.yml")) + require.NoError(t, err) + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + require.NoError(t, err) + + streamVars := dsManifest.Streams[0].Vars + require.Len(t, streamVars, 2) + assert.Equal(t, "encoding", streamVars[0].Name) + assert.Equal(t, "timeout", streamVars[1].Name) +} + +// TestMergeVariables_DsMerges covers the case where the composable input +// declares no vars (nothing promoted): all base vars remain on the stream, the +// data stream manifest can merge fields into an existing base var (e.g. title), +// and extra stream-only vars are kept in declaration order after base vars. +func TestMergeVariables_DsMerges(t *testing.T) { + buildPackageRoot := copyFixturePackage(t, "with_merging_ds_merges") + resolver := NewRequiredInputsResolver(makeFakeEprForVarMerging(t)) + + err := resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + manifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + // No input-level vars (nothing promoted). + assert.Empty(t, manifest.PolicyTemplates[0].Inputs[0].Vars) + + // DS should have 4 vars: paths, encoding (merged), timeout, custom_tag. + dsManifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "var_merging_logs", "manifest.yml")) + require.NoError(t, err) + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + require.NoError(t, err) + + streamVars := dsManifest.Streams[0].Vars + require.Len(t, streamVars, 4) + assert.Equal(t, "paths", streamVars[0].Name) + assert.Equal(t, "encoding", streamVars[1].Name) + assert.Equal(t, "timeout", streamVars[2].Name) + assert.Equal(t, "custom_tag", streamVars[3].Name) + + // encoding: title overridden. + assert.Equal(t, "Log Encoding Override", streamVars[1].Title) + assert.Equal(t, "text", streamVars[1].Type) // from base +} + +// TestMergeVariables_NoOverride ensures that when the integration does not +// specify composable or data-stream var overrides, merge still materializes +// input package var definitions onto the stream (cloned base) so behavior stays +// correct for packages that only declare requires.input without local var edits. +func TestMergeVariables_NoOverride(t *testing.T) { + buildPackageRoot := copyFixturePackage(t, "with_merging_no_override") + resolver := NewRequiredInputsResolver(makeFakeEprForVarMerging(t)) + + err := resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + manifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + // No input-level vars. + assert.Empty(t, manifest.PolicyTemplates[0].Inputs[0].Vars) + + // DS should have 3 vars: all from base, unmodified. + dsManifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "var_merging_logs", "manifest.yml")) + require.NoError(t, err) + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + require.NoError(t, err) + + streamVars := dsManifest.Streams[0].Vars + require.Len(t, streamVars, 3) + assert.Equal(t, "paths", streamVars[0].Name) + assert.Equal(t, "encoding", streamVars[1].Name) + assert.Equal(t, "timeout", streamVars[2].Name) + + // Base fields preserved. + assert.Equal(t, "text", streamVars[0].Type) + assert.True(t, streamVars[0].Multi) + assert.True(t, streamVars[0].Required) +} + +// TestMergeVariables_DuplicateError checks that an invalid data stream manifest +// listing the same var name twice fails during mergeVariables, surfacing a +// clear duplicate-variable error instead of silent corruption. +func TestMergeVariables_DuplicateError(t *testing.T) { + buildPackageRoot := copyFixturePackage(t, "with_merging_duplicate_error") + resolver := NewRequiredInputsResolver(makeFakeEprForVarMerging(t)) + + err := resolver.Bundle(buildPackageRoot) + require.Error(t, err) + assert.Contains(t, err.Error(), "paths") +} + +// TestMergeVariables_TwoPolicyTemplatesScopedPromotion verifies that promotion +// is scoped per policy template data stream: composable vars under one template +// promote only for that template's streams; another template referencing the +// same input package without composable vars keeps all base vars on its streams. +// This guards against incorrectly applying one template's promotions to every +// stream that uses the same input package. +func TestMergeVariables_TwoPolicyTemplatesScopedPromotion(t *testing.T) { + buildPackageRoot := copyFixturePackage(t, "with_merging_two_policy_templates") + resolver := NewRequiredInputsResolver(makeFakeEprForVarMerging(t)) + + err := resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + manifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + require.Len(t, manifest.PolicyTemplates, 2) + + // pt_alpha: composable input has promoted paths (merged title). + alphaPT := manifest.PolicyTemplates[0] + require.Equal(t, "pt_alpha", alphaPT.Name) + require.GreaterOrEqual(t, len(alphaPT.Inputs), 1) + alphaInputVars := alphaPT.Inputs[0].Vars + require.Len(t, alphaInputVars, 1) + assert.Equal(t, "paths", alphaInputVars[0].Name) + assert.Equal(t, "Alpha-only promoted paths title", alphaInputVars[0].Title) + assert.Equal(t, "text", alphaInputVars[0].Type) + + // pt_beta: no promotion — no vars on the composable input entry. + betaPT := manifest.PolicyTemplates[1] + require.Equal(t, "pt_beta", betaPT.Name) + assert.Empty(t, betaPT.Inputs[0].Vars) + + // alpha_logs: paths promoted — stream keeps encoding + timeout only. + alphaDSBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "alpha_logs", "manifest.yml")) + require.NoError(t, err) + alphaDS, err := packages.ReadDataStreamManifestBytes(alphaDSBytes) + require.NoError(t, err) + alphaStreamVars := alphaDS.Streams[0].Vars + require.Len(t, alphaStreamVars, 2) + assert.Equal(t, "encoding", alphaStreamVars[0].Name) + assert.Equal(t, "timeout", alphaStreamVars[1].Name) + + // beta_logs: no promotion — all three base vars on the stream. + betaDSBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "beta_logs", "manifest.yml")) + require.NoError(t, err) + betaDS, err := packages.ReadDataStreamManifestBytes(betaDSBytes) + require.NoError(t, err) + betaStreamVars := betaDS.Streams[0].Vars + require.Len(t, betaStreamVars, 3) + assert.Equal(t, "paths", betaStreamVars[0].Name) + assert.Equal(t, "encoding", betaStreamVars[1].Name) + assert.Equal(t, "timeout", betaStreamVars[2].Name) +} diff --git a/internal/requiredinputs/yamlutil.go b/internal/requiredinputs/yamlutil.go new file mode 100644 index 0000000000..7b37d8fa8b --- /dev/null +++ b/internal/requiredinputs/yamlutil.go @@ -0,0 +1,158 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "fmt" + + "github.com/goccy/go-yaml" + "github.com/goccy/go-yaml/ast" + "github.com/goccy/go-yaml/parser" + "github.com/goccy/go-yaml/printer" + "github.com/goccy/go-yaml/token" + + "github.com/elastic/elastic-package/internal/formatter" + "github.com/elastic/elastic-package/internal/yamledit" +) + +// mappingValue returns the value ast.Node for the given key in a YAML mapping +// node, or nil if the key is not present. +func mappingValue(node *ast.MappingNode, key string) ast.Node { + for _, kv := range node.Values { + if kv.Key.String() == key { + return kv.Value + } + } + return nil +} + +// removeKey removes a key-value pair from a YAML mapping node. +func removeKey(node *ast.MappingNode, key string) { + for i, kv := range node.Values { + if kv.Key.String() == key { + node.Values = append(node.Values[:i], node.Values[i+1:]...) + return + } + } +} + +// upsertKey sets key to value in a YAML mapping node, adding it if absent. +// When inserting a new key, the column position is derived from the existing +// entries so the new node serialises with the same indentation as its siblings. +// For block-style SequenceNode values, the sequence Start column is set to +// match the key column so blockStyleString generates correct indentation. +func upsertKey(node *ast.MappingNode, key string, value ast.Node) { + // Derive column from existing entries so new nodes indent like their + // siblings. Fall back to 1 when the mapping has no entries yet (e.g. + // freshly constructed nodes in tests). + col := 1 + if len(node.Values) > 0 { + col = node.Values[0].Key.GetToken().Position.Column + } + // For block-style sequences, match the sequence Start column to the key + // column so SequenceNode.blockStyleString produces the correct indentation + // regardless of whether the key is new or already exists. + if sn, ok := value.(*ast.SequenceNode); ok && !sn.IsFlowStyle { + sn.Start.Position.Column = col + } + + for _, kv := range node.Values { + if kv.Key.String() == key { + kv.Value = value + return + } + } + // Key not present — build a new MappingValueNode directly to avoid + // yaml.ValueToNode's MarshalYAML path which requires non-nil Start tokens + // on sequence/mapping nodes. + pos := &token.Position{Column: col, Line: 1} + keyTk := token.New(key, key, pos) + colonTk := token.New(":", ":", pos) + mv := ast.MappingValue(colonTk, ast.String(keyTk), value) + node.Values = append(node.Values, mv) +} + +// newSeqNode creates a *ast.SequenceNode with a valid Start token so that the +// goccy printer can serialise it without a nil-pointer panic. +// Values can be any ast.Node; for string scalars prefer strVal(). +func newSeqNode(values ...ast.Node) *ast.SequenceNode { + pos := &token.Position{Column: 1, Line: 1} + sn := ast.Sequence(token.New("-", "-", pos), false) + sn.Values = values + return sn +} + +// cloneNode returns a deep copy of the YAML node tree via round-trip +// serialization so base nodes from the input package can be reused for multiple +// independent merges without aliasing. +// Panics if serialization or parsing of an already-valid node fails (impossible). +func cloneNode(n ast.Node) ast.Node { + if n == nil { + return nil + } + p := printer.Printer{} + b := p.PrintNode(n) + f, err := parser.ParseBytes(b, 0) + if err != nil { + panic(fmt.Sprintf("cloneNode: failed to re-parse: %v", err)) + } + if len(f.Docs) == 0 || f.Docs[0] == nil { + return nil + } + return f.Docs[0].Body +} + +// formatYAMLNode marshals an ast.Node to bytes and applies the package's YAML +// formatter with KeysWithDotActionNone. +func formatYAMLNode(node ast.Node) ([]byte, error) { + p := printer.Printer{} + raw := p.PrintNode(node) + yamlFormatter := formatter.NewYAMLFormatter(formatter.KeysWithDotActionNone) + formatted, _, err := yamlFormatter.Format(raw) + if err != nil { + return nil, fmt.Errorf("failed to format YAML: %w", err) + } + return formatted, nil +} + +// nodeStringValue extracts the string value from a scalar ast.Node. For +// StringNode, the raw Value field is returned. For other scalars, String() is +// used. Returns "" for nil nodes. +func nodeStringValue(n ast.Node) string { + if n == nil { + return "" + } + if sn, ok := n.(*ast.StringNode); ok { + return sn.Value + } + return n.String() +} + +// strVal converts a plain string to a YAML scalar ast.Node. +// Panics if construction fails (impossible for string inputs). +func strVal(s string) ast.Node { + n, err := yaml.ValueToNode(s) + if err != nil { + panic(fmt.Sprintf("strVal: unexpected error for %q: %v", s, err)) + } + return n +} + +// parseDocumentRootMapping parses YAML bytes via yamledit and returns the +// document root as a *ast.MappingNode. Reuses internal/yamledit for parsing. +func parseDocumentRootMapping(data []byte) (*ast.MappingNode, error) { + doc, err := yamledit.NewDocumentBytes(data) + if err != nil { + return nil, err + } + if len(doc.AST().Docs) == 0 || doc.AST().Docs[0] == nil { + return nil, fmt.Errorf("empty YAML document") + } + root, ok := doc.AST().Docs[0].Body.(*ast.MappingNode) + if !ok { + return nil, fmt.Errorf("expected mapping node at document root, got %T", doc.AST().Docs[0].Body) + } + return root, nil +} diff --git a/internal/resources/fleetpackage.go b/internal/resources/fleetpackage.go index e5c17e96ff..408fb74f88 100644 --- a/internal/resources/fleetpackage.go +++ b/internal/resources/fleetpackage.go @@ -17,6 +17,7 @@ import ( "github.com/elastic/elastic-package/internal/kibana" "github.com/elastic/elastic-package/internal/packages" "github.com/elastic/elastic-package/internal/packages/installer" + "github.com/elastic/elastic-package/internal/requiredinputs" ) type FleetPackage struct { @@ -38,6 +39,9 @@ type FleetPackage struct { // Force forces operations, as reinstalling a package that seems to // be already installed. Force bool + + // RequiredInputsResolver is the resolver for required input packages. + RequiredInputsResolver requiredinputs.Resolver } func (f *FleetPackage) String() string { @@ -64,11 +68,12 @@ func (f *FleetPackage) installer(ctx resource.Context) (installer.Installer, err } return installer.NewForPackage(installer.Options{ - Kibana: provider.Client, - PackageRoot: f.PackageRoot, - SkipValidation: true, - RepositoryRoot: f.RepositoryRoot, - SchemaURLs: f.SchemaURLs, + Kibana: provider.Client, + PackageRoot: f.PackageRoot, + SkipValidation: true, + RepositoryRoot: f.RepositoryRoot, + SchemaURLs: f.SchemaURLs, + RequiredInputsResolver: f.RequiredInputsResolver, }) } diff --git a/internal/resources/fleetpackage_test.go b/internal/resources/fleetpackage_test.go index f0bb905475..d4036dd400 100644 --- a/internal/resources/fleetpackage_test.go +++ b/internal/resources/fleetpackage_test.go @@ -29,8 +29,9 @@ func TestRequiredProvider(t *testing.T) { _, err = manager.Apply(resource.Resources{ &FleetPackage{ - PackageRoot: "../../test/packages/parallel/nginx", - RepositoryRoot: repositoryRoot, + PackageRoot: "../../test/packages/parallel/nginx", + RepositoryRoot: repositoryRoot, + RequiredInputsResolver: &requiredInputsResolverMock{}, }, }) if assert.Error(t, err) { @@ -38,6 +39,17 @@ func TestRequiredProvider(t *testing.T) { } } +type requiredInputsResolverMock struct { + BundleFunc func(buildPackageRoot string) error +} + +func (r *requiredInputsResolverMock) Bundle(buildPackageRoot string) error { + if r.BundleFunc != nil { + return r.BundleFunc(buildPackageRoot) + } + return nil +} + func TestPackageLifecycle(t *testing.T) { cases := []struct { title string @@ -62,9 +74,10 @@ func TestPackageLifecycle(t *testing.T) { packageRoot := filepath.Join(repositoryRoot.Name(), "test", "packages", "parallel", c.name) fleetPackage := FleetPackage{ - PackageRoot: packageRoot, - RepositoryRoot: repositoryRoot, - SchemaURLs: fields.NewSchemaURLs(), + PackageRoot: packageRoot, + RepositoryRoot: repositoryRoot, + SchemaURLs: fields.NewSchemaURLs(), + RequiredInputsResolver: &requiredInputsResolverMock{}, } manager := resource.NewManager() manager.RegisterProvider(DefaultKibanaProviderName, &KibanaProvider{Client: kibanaClient}) diff --git a/internal/resources/fleetpolicy_test.go b/internal/resources/fleetpolicy_test.go index 1bf9617699..d7f96bf5f5 100644 --- a/internal/resources/fleetpolicy_test.go +++ b/internal/resources/fleetpolicy_test.go @@ -122,10 +122,11 @@ func withPackageResources(agentPolicy *FleetAgentPolicy, repostoryRoot *os.Root) var resources resource.Resources for _, policy := range agentPolicy.PackagePolicies { resources = append(resources, &FleetPackage{ - PackageRoot: policy.PackageRoot, - Absent: agentPolicy.Absent, - RepositoryRoot: repostoryRoot, - SchemaURLs: fields.NewSchemaURLs(), + PackageRoot: policy.PackageRoot, + Absent: agentPolicy.Absent, + RepositoryRoot: repostoryRoot, + SchemaURLs: fields.NewSchemaURLs(), + RequiredInputsResolver: &requiredInputsResolverMock{}, }) } return append(resources, agentPolicy) diff --git a/internal/stack/environment.go b/internal/stack/environment.go index 13d8faed26..5f099d9f80 100644 --- a/internal/stack/environment.go +++ b/internal/stack/environment.go @@ -154,7 +154,11 @@ func (p *environmentProvider) initClients(appConfig *install.ApplicationConfigur } p.elasticsearch = elasticsearch - p.registry = registry.NewClient(packageRegistryBaseURL(p.profile, appConfig)) + regClient, err := registry.NewClient(PackageRegistryBaseURL(p.profile, appConfig)) + if err != nil { + return fmt.Errorf("cannot create package registry client: %w", err) + } + p.registry = regClient return nil } diff --git a/internal/stack/registry.go b/internal/stack/registry.go index 4314389357..b1e8cea490 100644 --- a/internal/stack/registry.go +++ b/internal/stack/registry.go @@ -5,6 +5,10 @@ package stack import ( + "net/url" + "os" + "strings" + "github.com/elastic/elastic-package/internal/install" "github.com/elastic/elastic-package/internal/profile" "github.com/elastic/elastic-package/internal/registry" @@ -28,10 +32,10 @@ func packageRegistryProxyToURL(profile *profile.Profile, appConfig *install.Appl return registry.ProductionURL } -// packageRegistryBaseURL returns the package registry base URL to be used, considering +// PackageRegistryBaseURL returns the package registry base URL to be used, considering // profile settings and application configuration. The priority is given to // profile settings over application configuration. -func packageRegistryBaseURL(profile *profile.Profile, appConfig *install.ApplicationConfiguration) string { +func PackageRegistryBaseURL(profile *profile.Profile, appConfig *install.ApplicationConfiguration) string { if registryURL := profile.Config(configElasticEPRURL, ""); registryURL != "" { return registryURL } @@ -42,3 +46,28 @@ func packageRegistryBaseURL(profile *profile.Profile, appConfig *install.Applica } return registry.ProductionURL } + +// RegistryClientOptions returns TLS options for the registry client so it works +// with the elastic-package stack (same CA as Kibana/ES) or local HTTPS registries. +// Profile may be nil (e.g. in build); then only CACertificateEnv is used for CA. +func RegistryClientOptions(registryBaseURL string, profile *profile.Profile) []registry.ClientOption { + var opts []registry.ClientOption + caPath := os.Getenv(CACertificateEnv) + if caPath == "" && profile != nil { + caPath, _ = FindCACertificate(profile) + } + if caPath != "" { + if _, err := os.Stat(caPath); err == nil { + opts = append(opts, registry.CertificateAuthority(caPath)) + return opts + } + } + u, err := url.Parse(registryBaseURL) + if err != nil { + return opts + } + if u.Scheme == "https" && (strings.ToLower(u.Hostname()) == "localhost" || u.Hostname() == "127.0.0.1") { + opts = append(opts, registry.TLSSkipVerify()) + } + return opts +} diff --git a/internal/stack/serverless.go b/internal/stack/serverless.go index b190c682ad..df2895f220 100644 --- a/internal/stack/serverless.go +++ b/internal/stack/serverless.go @@ -232,7 +232,11 @@ func (sp *serverlessProvider) createClients(project *serverless.Project, appConf return fmt.Errorf("failed to create kibana client: %w", err) } - sp.registryClient = registry.NewClient(packageRegistryBaseURL(sp.profile, appConfig)) + regClient, err := registry.NewClient(PackageRegistryBaseURL(sp.profile, appConfig)) + if err != nil { + return fmt.Errorf("failed to create package registry client: %w", err) + } + sp.registryClient = regClient return nil } diff --git a/internal/testrunner/runners/asset/tester.go b/internal/testrunner/runners/asset/tester.go index fa144ceda7..1998aca8cf 100644 --- a/internal/testrunner/runners/asset/tester.go +++ b/internal/testrunner/runners/asset/tester.go @@ -15,6 +15,7 @@ import ( "github.com/elastic/elastic-package/internal/kibana" "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/resources" "github.com/elastic/elastic-package/internal/testrunner" ) @@ -88,11 +89,12 @@ func (r *tester) Run(ctx context.Context) ([]testrunner.TestResult, error) { func (r *tester) resources(installedPackage bool) resources.Resources { return resources.Resources{ &resources.FleetPackage{ - PackageRoot: r.packageRoot, - Absent: !installedPackage, - Force: installedPackage, // Force re-installation, in case there are code changes in the same package version. - RepositoryRoot: r.repositoryRoot, - SchemaURLs: r.schemaURLs, + PackageRoot: r.packageRoot, + Absent: !installedPackage, + Force: installedPackage, // Force re-installation, in case there are code changes in the same package version. + RepositoryRoot: r.repositoryRoot, + SchemaURLs: r.schemaURLs, + RequiredInputsResolver: &requiredinputs.NoopRequiredInputsResolver{}, }, } } diff --git a/internal/testrunner/runners/policy/runner.go b/internal/testrunner/runners/policy/runner.go index 340577ab2b..0902decb4a 100644 --- a/internal/testrunner/runners/policy/runner.go +++ b/internal/testrunner/runners/policy/runner.go @@ -15,6 +15,7 @@ import ( "github.com/elastic/elastic-package/internal/kibana" "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/resources" "github.com/elastic/elastic-package/internal/testrunner" ) @@ -39,37 +40,40 @@ type runner struct { repositoryRoot *os.Root - schemaURLs fields.SchemaURLs + schemaURLs fields.SchemaURLs + requiredInputsResolver requiredinputs.Resolver } // Ensures that runner implements testrunner.TestRunner interface var _ testrunner.TestRunner = new(runner) type PolicyTestRunnerOptions struct { - KibanaClient *kibana.Client - PackageRoot string - DataStreams []string - FailOnMissingTests bool - GenerateTestResult bool - GlobalTestConfig testrunner.GlobalRunnerTestConfig - WithCoverage bool - CoverageType string - RepositoryRoot *os.Root - SchemaURLs fields.SchemaURLs + KibanaClient *kibana.Client + PackageRoot string + DataStreams []string + FailOnMissingTests bool + GenerateTestResult bool + GlobalTestConfig testrunner.GlobalRunnerTestConfig + WithCoverage bool + CoverageType string + RepositoryRoot *os.Root + SchemaURLs fields.SchemaURLs + RequiredInputsResolver requiredinputs.Resolver } func NewPolicyTestRunner(options PolicyTestRunnerOptions) *runner { runner := runner{ - packageRoot: options.PackageRoot, - kibanaClient: options.KibanaClient, - dataStreams: options.DataStreams, - failOnMissingTests: options.FailOnMissingTests, - generateTestResult: options.GenerateTestResult, - globalTestConfig: options.GlobalTestConfig, - withCoverage: options.WithCoverage, - coverageType: options.CoverageType, - repositoryRoot: options.RepositoryRoot, - schemaURLs: options.SchemaURLs, + packageRoot: options.PackageRoot, + kibanaClient: options.KibanaClient, + dataStreams: options.DataStreams, + failOnMissingTests: options.FailOnMissingTests, + generateTestResult: options.GenerateTestResult, + globalTestConfig: options.GlobalTestConfig, + withCoverage: options.WithCoverage, + coverageType: options.CoverageType, + repositoryRoot: options.RepositoryRoot, + schemaURLs: options.SchemaURLs, + requiredInputsResolver: options.RequiredInputsResolver, } runner.resourcesManager = resources.NewManager() runner.resourcesManager.RegisterProvider(resources.DefaultKibanaProviderName, &resources.KibanaProvider{Client: runner.kibanaClient}) @@ -169,9 +173,10 @@ func (r *runner) Type() testrunner.TestType { func (r *runner) setupSuite(ctx context.Context, manager *resources.Manager) (cleanup func(ctx context.Context) error, err error) { packageResource := resources.FleetPackage{ - PackageRoot: r.packageRoot, - RepositoryRoot: r.repositoryRoot, - SchemaURLs: r.schemaURLs, + PackageRoot: r.packageRoot, + RepositoryRoot: r.repositoryRoot, + SchemaURLs: r.schemaURLs, + RequiredInputsResolver: r.requiredInputsResolver, } setupResources := resources.Resources{ &packageResource, diff --git a/internal/testrunner/runners/system/runner.go b/internal/testrunner/runners/system/runner.go index a851c734aa..0f5360b0c3 100644 --- a/internal/testrunner/runners/system/runner.go +++ b/internal/testrunner/runners/system/runner.go @@ -19,6 +19,7 @@ import ( "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" "github.com/elastic/elastic-package/internal/profile" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/resources" "github.com/elastic/elastic-package/internal/servicedeployer" "github.com/elastic/elastic-package/internal/testrunner" @@ -295,11 +296,12 @@ func (r *runner) Type() testrunner.TestType { func (r *runner) resources(opts resourcesOptions) resources.Resources { return resources.Resources{ &resources.FleetPackage{ - PackageRoot: r.packageRoot, - Absent: !opts.installedPackage, - Force: opts.installedPackage, // Force re-installation, in case there are code changes in the same package version. - RepositoryRoot: r.repositoryRoot, - SchemaURLs: r.schemaURLs, + PackageRoot: r.packageRoot, + Absent: !opts.installedPackage, + Force: opts.installedPackage, // Force re-installation, in case there are code changes in the same package version. + RepositoryRoot: r.repositoryRoot, + SchemaURLs: r.schemaURLs, + RequiredInputsResolver: &requiredinputs.NoopRequiredInputsResolver{}, }, } } diff --git a/internal/testrunner/script/package.go b/internal/testrunner/script/package.go index ed3ae283d6..d7fee036d3 100644 --- a/internal/testrunner/script/package.go +++ b/internal/testrunner/script/package.go @@ -22,7 +22,9 @@ import ( "github.com/elastic/elastic-package/internal/files" "github.com/elastic/elastic-package/internal/packages" "github.com/elastic/elastic-package/internal/registry" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/resources" + "github.com/elastic/elastic-package/internal/stack" ) func addPackage(ts *testscript.TestScript, neg bool, args []string) { @@ -71,11 +73,12 @@ func addPackage(ts *testscript.TestScript, neg bool, args []string) { m := resources.NewManager() m.RegisterProvider(resources.DefaultKibanaProviderName, &resources.KibanaProvider{Client: stk.kibana}) _, err = m.ApplyCtx(ctx, resources.Resources{&resources.FleetPackage{ - PackageRoot: pkgRoot, - Absent: false, - Force: true, - RepositoryRoot: root, - SchemaURLs: fields.NewSchemaURLs(fields.WithECSBaseURL(ecsBaseSchemaURL)), + PackageRoot: pkgRoot, + Absent: false, + Force: true, + RepositoryRoot: root, + SchemaURLs: fields.NewSchemaURLs(fields.WithECSBaseURL(ecsBaseSchemaURL)), + RequiredInputsResolver: &requiredinputs.NoopRequiredInputsResolver{}, }}) ts.Check(decoratedWith("installing package resources", err)) @@ -124,10 +127,11 @@ func removePackage(ts *testscript.TestScript, neg bool, args []string) { m := resources.NewManager() m.RegisterProvider(resources.DefaultKibanaProviderName, &resources.KibanaProvider{Client: stk.kibana}) _, err = m.ApplyCtx(ctx, resources.Resources{&resources.FleetPackage{ - PackageRoot: pkgRoot, - Absent: true, - Force: true, - RepositoryRoot: root, // Apparently not required, but adding for safety. + PackageRoot: pkgRoot, + Absent: true, + Force: true, + RepositoryRoot: root, // Apparently not required, but adding for safety. + RequiredInputsResolver: &requiredinputs.NoopRequiredInputsResolver{}, }}) ts.Check(decoratedWith("removing package resources", err)) @@ -184,7 +188,8 @@ func installPackageFromRegistry(ts *testscript.TestScript, neg bool, args []stri regPkgs[*profName] = append(regPkgs[*profName], registryPackage{name: name, version: version}) workDir := ts.MkAbs(".") - client := registry.NewClient(registryBaseURL) + client, err := registry.NewClient(registryBaseURL, stack.RegistryClientOptions(registryBaseURL, stk.profile)...) + ts.Check(decoratedWith("creating package registry client", err)) zipPath, err := client.DownloadPackage(name, version, workDir) ts.Check(decoratedWith("downloading package from registry", err)) diff --git a/internal/testrunner/script/script.go b/internal/testrunner/script/script.go index 14e5972a5c..472e374a02 100644 --- a/internal/testrunner/script/script.go +++ b/internal/testrunner/script/script.go @@ -33,7 +33,9 @@ import ( "github.com/elastic/elastic-package/internal/install" "github.com/elastic/elastic-package/internal/packages" "github.com/elastic/elastic-package/internal/packages/changelog" + "github.com/elastic/elastic-package/internal/profile" "github.com/elastic/elastic-package/internal/registry" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/resources" "github.com/elastic/elastic-package/internal/servicedeployer" "github.com/elastic/elastic-package/internal/stack" @@ -53,6 +55,41 @@ type Options struct { UpdateScripts bool // testscript.Params.UpdateScripts ContinueOnError bool // testscript.Params.ContinueOnError TestWork bool // testscript.Params.TestWork + + // Profile selects the package registry URL from profile config (with app + // config as fallback). When nil, the current profile name from application + // configuration is loaded. + Profile *profile.Profile +} + +func profileAndPackageRegistryBaseURL(opt Options, appConfig *install.ApplicationConfiguration) (*profile.Profile, string, error) { + prof := opt.Profile + if prof == nil { + var err error + prof, err = profile.LoadProfile(appConfig.CurrentProfile()) + if err != nil { + return nil, "", fmt.Errorf("loading profile %q: %w", appConfig.CurrentProfile(), err) + } + } + return prof, stack.PackageRegistryBaseURL(prof, appConfig), nil +} + +func revisionsFromRegistry(eprBaseURL string, prof *profile.Profile, pkgName string) ([]packages.PackageManifest, error) { + c, err := registry.NewClient(eprBaseURL, stack.RegistryClientOptions(eprBaseURL, prof)...) + if err != nil { + return nil, fmt.Errorf("creating package registry client: %w", err) + } + return c.Revisions(pkgName, registry.SearchOptions{}) +} + +func scriptTestWorkdirRoot(workRoot string, opt Options) (workdirRoot string, err error) { + if opt.TestWork { + return os.MkdirTemp(workRoot, "*") + } + if err := os.Setenv("GOTMPDIR", workRoot); err != nil { + return "", fmt.Errorf("could not set temp dir var: %w", err) + } + return "", nil } // TODO: refactor Run to reduce cognitive complexity (currently 89). @@ -72,6 +109,10 @@ func Run(dst *[]testrunner.TestResult, w io.Writer, opt Options) error { if err != nil { return fmt.Errorf("could read configuration: %w", err) } + prof, eprBaseURL, err := profileAndPackageRegistryBaseURL(opt, appConfig) + if err != nil { + return err + } loc, err := locations.NewLocationManager() if err != nil { return err @@ -81,27 +122,14 @@ func Run(dst *[]testrunner.TestResult, w io.Writer, opt Options) error { if err != nil { return fmt.Errorf("could not make work space root: %w", err) } - var workdirRoot string - if opt.TestWork { - // Only create a work root and pass it in if --work has been requested. - // The behaviour of testscript is to set TestWork to true if the work - // root is non-zero, so just let testscript put it where it wants in the - // case that we have not requested work to be retained. This will be in - // os.MkdirTemp(os.Getenv("GOTMPDIR"), "go-test-script") which on most - // systems will be /tmp/go-test-script. However, due to… decisions, we - // cannot operate in that directory… - workdirRoot, err = os.MkdirTemp(workRoot, "*") - if err != nil { + // Only pass a non-zero work root when --work is set; otherwise set $GOTMPDIR + // so testscript uses a directory we can operate in (see scriptTestWorkdirRoot). + workdirRoot, err := scriptTestWorkdirRoot(workRoot, opt) + if err != nil { + if opt.TestWork { return fmt.Errorf("could not make work space: %w", err) } - } else { - // … so set $GOTMPDIR to a location that we can work in. - // - // This is all obviously awful. - err = os.Setenv("GOTMPDIR", workRoot) - if err != nil { - return fmt.Errorf("could not set temp dir var: %w", err) - } + return err } dirs, err := scripts(opt.Dir) @@ -201,8 +229,7 @@ func Run(dst *[]testrunner.TestResult, w io.Writer, opt Options) error { if err != nil { return err } - eprClient := registry.NewClient(appConfig.PackageRegistryBaseURL()) - revisions, err := eprClient.Revisions(manifest.Name, registry.SearchOptions{}) + revisions, err := revisionsFromRegistry(eprBaseURL, prof, manifest.Name) if err != nil { return err } @@ -237,7 +264,7 @@ func Run(dst *[]testrunner.TestResult, w io.Writer, opt Options) error { "CONFIG_PROFILES": loc.ProfileDir(), "HOME": home, "ECS_BASE_SCHEMA_URL": appConfig.SchemaURLs().ECSBase(), - "PACKAGE_REGISTRY_BASE_URL": appConfig.PackageRegistryBaseURL(), + "PACKAGE_REGISTRY_BASE_URL": eprBaseURL, } if pkgRoot != "" { scriptEnv["PACKAGE_NAME"] = manifest.Name @@ -408,9 +435,10 @@ func cleanUp(ctx context.Context, pkgRoot string, srvs map[string]servicedeploye m := resources.NewManager() m.RegisterProvider(resources.DefaultKibanaProviderName, &resources.KibanaProvider{Client: stk.kibana}) _, err := m.ApplyCtx(ctx, resources.Resources{&resources.FleetPackage{ - PackageRoot: pkgRoot, - Absent: true, - Force: true, + PackageRoot: pkgRoot, + Absent: true, + Force: true, + RequiredInputsResolver: &requiredinputs.NoopRequiredInputsResolver{}, }}) if err != nil && !strings.Contains(err.Error(), "is not installed") { errs = append(errs, err) diff --git a/internal/testrunner/script/script_test.go b/internal/testrunner/script/script_test.go new file mode 100644 index 0000000000..e17c35dd1e --- /dev/null +++ b/internal/testrunner/script/script_test.go @@ -0,0 +1,43 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package script + +import ( + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-package/internal/stack" +) + +func TestRevisionsFromRegistry_searchOK(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/search" { + http.NotFound(w, r) + return + } + _, err := w.Write([]byte("[]")) + require.NoError(t, err) + })) + t.Cleanup(srv.Close) + + revs, err := revisionsFromRegistry(srv.URL, nil, "acme") + require.NoError(t, err) + require.Empty(t, revs) +} + +func TestRevisionsFromRegistry_propagatesRegistryClientError(t *testing.T) { + badCA := filepath.Join(t.TempDir(), "invalid-ca.pem") + require.NoError(t, os.WriteFile(badCA, []byte("not a PEM certificate"), 0o600)) + t.Setenv(stack.CACertificateEnv, badCA) + + _, err := revisionsFromRegistry("https://epr.example", nil, "acme") + require.Error(t, err) + require.ErrorContains(t, err, "creating package registry client") +} diff --git a/test/manual_packages/README.md b/test/manual_packages/README.md new file mode 100644 index 0000000000..57c6686f27 --- /dev/null +++ b/test/manual_packages/README.md @@ -0,0 +1,37 @@ +# Manual test packages + +Packages under `test/manual_packages/` are **not** picked up by CI’s main package glob beyond what each script includes. They are for **manual** workflows and **targeted** `go test` cases. + +## Composable coverage + +End-to-end composable integration coverage (`requires.input`, local registry, build + install) lives under: + +- [`composable/01_ci_input_pkg/`](composable/01_ci_input_pkg/) — `type: input` dependency +- [`composable/02_ci_composable_integration/`](composable/02_ci_composable_integration/) — `type: integration` that requires the input package above; must be built after `stack up` with `package_registry.base_url` set to `https://127.0.0.1:8080` + +`internal/requiredinputs` integration tests copy those same directories (see `ciInputFixturePath`, `copyComposableIntegrationFixture` in [`variables_test.go`](../../internal/requiredinputs/variables_test.go)). + +## `required_inputs` (manual / edge) + +Remaining trees under [`required_inputs/`](required_inputs/) exercise **narrow** variable-merge and template cases and are **not** required for the composable CI zip job: + +| Package | Role | +| --- | --- | +| `required_inputs/with_merging_promotes_to_input` | Only `paths` promoted; DS keeps `encoding`, `timeout`. | +| `required_inputs/with_merging_ds_merges` | No PT var overrides; DS merges `encoding` title + `custom_tag`. | +| `required_inputs/with_merging_no_override` | No composable overrides; all base vars on DS. | +| `required_inputs/with_merging_two_policy_templates` | Two PTs, scoped promotion on one. | +| `required_inputs/with_merging_duplicate_error` | Invalid duplicate `paths` on DS — **build must fail** (not in CI zip loop). | +| `required_inputs/with_linked_template_path` | Composable + policy `template_path` via `.link` (see [`dependency_management.md`](../../docs/howto/dependency_management.md)). | + +All of these depend on **`ci_input_pkg`** from [`composable/01_ci_input_pkg/`](composable/01_ci_input_pkg/) (see each package’s `_dev/test/config.yml` `requires` stub). + +### Manual workflow + +1. `elastic-package stack up -d` +2. Set `package_registry.base_url` in `~/.elastic-package/config.yml` to `https://127.0.0.1:8080` (see [local package registry how-to](../../docs/howto/local_package_registry.md)). +3. Build and install `01_ci_input_pkg` before any integration that lists `requires.input` for it, then build the integration. + +### Expected errors + +For `with_merging_duplicate_error`, `elastic-package build` should fail with an error mentioning `paths`. diff --git a/test/manual_packages/composable/01_ci_input_pkg/_dev/test/config.yml b/test/manual_packages/composable/01_ci_input_pkg/_dev/test/config.yml new file mode 100644 index 0000000000..a57750f85e --- /dev/null +++ b/test/manual_packages/composable/01_ci_input_pkg/_dev/test/config.yml @@ -0,0 +1,2 @@ +system: + parallel: true diff --git a/test/manual_packages/composable/01_ci_input_pkg/agent/input/extra.yml.hbs b/test/manual_packages/composable/01_ci_input_pkg/agent/input/extra.yml.hbs new file mode 100644 index 0000000000..1e1745752b --- /dev/null +++ b/test/manual_packages/composable/01_ci_input_pkg/agent/input/extra.yml.hbs @@ -0,0 +1,2 @@ +exclude_files: + - ".gz$" diff --git a/test/manual_packages/composable/01_ci_input_pkg/agent/input/input.yml.hbs b/test/manual_packages/composable/01_ci_input_pkg/agent/input/input.yml.hbs new file mode 100644 index 0000000000..9390bc05cb --- /dev/null +++ b/test/manual_packages/composable/01_ci_input_pkg/agent/input/input.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} diff --git a/test/manual_packages/composable/01_ci_input_pkg/changelog.yml b/test/manual_packages/composable/01_ci_input_pkg/changelog.yml new file mode 100644 index 0000000000..551e05b619 --- /dev/null +++ b/test/manual_packages/composable/01_ci_input_pkg/changelog.yml @@ -0,0 +1,5 @@ +- version: "0.1.0" + changes: + - description: Initial release for composable CI fixtures. + type: enhancement + link: https://github.com/elastic/elastic-package/pull/1 diff --git a/test/manual_packages/composable/01_ci_input_pkg/docs/README.md b/test/manual_packages/composable/01_ci_input_pkg/docs/README.md new file mode 100644 index 0000000000..ff612a5def --- /dev/null +++ b/test/manual_packages/composable/01_ci_input_pkg/docs/README.md @@ -0,0 +1,3 @@ +# CI composable input package + +`type: input` package consumed by [`02_ci_composable_integration`](../02_ci_composable_integration/). Used in CI (`test-build-install-zip.sh`) and `internal/requiredinputs` tests. Build the input package before the integration when using a local registry. diff --git a/test/manual_packages/composable/01_ci_input_pkg/fields/base-fields.yml b/test/manual_packages/composable/01_ci_input_pkg/fields/base-fields.yml new file mode 100644 index 0000000000..2b59a9a276 --- /dev/null +++ b/test/manual_packages/composable/01_ci_input_pkg/fields/base-fields.yml @@ -0,0 +1,18 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. +- name: message + type: text + description: Log message. +- name: log.level + type: keyword + description: Log level. diff --git a/test/manual_packages/composable/01_ci_input_pkg/manifest.yml b/test/manual_packages/composable/01_ci_input_pkg/manifest.yml new file mode 100644 index 0000000000..3b623b2153 --- /dev/null +++ b/test/manual_packages/composable/01_ci_input_pkg/manifest.yml @@ -0,0 +1,49 @@ +format_version: 3.6.0 +name: ci_input_pkg +title: CI Composable Input Package +description: >- + Input package for CI and tests: variable definitions, package-level fields, + and multiple agent templates for composable integration builds. +version: 0.1.0 +type: input +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +policy_templates: + - name: ci_input + type: logs + title: CI Input + description: Collect logs for composable CI and unit tests. + input: logfile + template_paths: + - input.yml.hbs + - extra.yml.hbs + vars: + - name: paths + type: text + title: Paths + multi: true + required: true + show_user: true + default: + - /var/log/*.log + - name: encoding + type: text + title: Encoding + multi: false + required: false + show_user: false + - name: timeout + type: text + title: Timeout + multi: false + required: false + show_user: false + default: 30s +owner: + github: elastic/integrations + type: elastic diff --git a/test/manual_packages/composable/02_ci_composable_integration/_dev/test/config.yml b/test/manual_packages/composable/02_ci_composable_integration/_dev/test/config.yml new file mode 100644 index 0000000000..a57750f85e --- /dev/null +++ b/test/manual_packages/composable/02_ci_composable_integration/_dev/test/config.yml @@ -0,0 +1,2 @@ +system: + parallel: true diff --git a/test/manual_packages/composable/02_ci_composable_integration/changelog.yml b/test/manual_packages/composable/02_ci_composable_integration/changelog.yml new file mode 100644 index 0000000000..551e05b619 --- /dev/null +++ b/test/manual_packages/composable/02_ci_composable_integration/changelog.yml @@ -0,0 +1,5 @@ +- version: "0.1.0" + changes: + - description: Initial release for composable CI fixtures. + type: enhancement + link: https://github.com/elastic/elastic-package/pull/1 diff --git a/test/manual_packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/agent/stream/stream.yml.hbs b/test/manual_packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..9390bc05cb --- /dev/null +++ b/test/manual_packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/agent/stream/stream.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} diff --git a/test/manual_packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/fields/base-fields.yml b/test/manual_packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/fields/base-fields.yml new file mode 100644 index 0000000000..0d1791ffed --- /dev/null +++ b/test/manual_packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. diff --git a/test/manual_packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/manifest.yml b/test/manual_packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/manifest.yml new file mode 100644 index 0000000000..e8a49efd6e --- /dev/null +++ b/test/manual_packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/manifest.yml @@ -0,0 +1,36 @@ +title: CI composable logs +type: logs +streams: + - package: ci_input_pkg + title: Logs via CI input package + description: Stream referencing the required input package. + vars: + - name: timeout + type: text + title: Timeout + multi: false + required: false + show_user: false + secret: false + description: Timeout for log collection. + - name: custom_tag + type: text + title: Custom Tag + multi: false + required: false + show_user: true + secret: false + - input: logfile + title: Plain logs stream + description: Native logs stream without package reference. + template_path: stream.yml.hbs + vars: + - name: paths + type: text + title: Paths + multi: true + required: true + show_user: true + secret: false + default: + - /var/log/ci/*.log diff --git a/test/manual_packages/composable/02_ci_composable_integration/docs/README.md b/test/manual_packages/composable/02_ci_composable_integration/docs/README.md new file mode 100644 index 0000000000..599697ab43 --- /dev/null +++ b/test/manual_packages/composable/02_ci_composable_integration/docs/README.md @@ -0,0 +1,7 @@ +# CI composable integration + +Declares `requires.input` on [`ci_input_pkg`](../01_ci_input_pkg/). After `elastic-package build` with the input package available from the registry, the built package includes merged variables, bundled input templates and fields, and resolved input types. + +**CI:** Built in a second phase by `scripts/test-build-install-zip.sh` after the stack is up and `package_registry.base_url` points at the local registry. It is also exercised via a dedicated CI job using `scripts/test-build-install-zip-file.sh -c` (composable-only). + +**Manual:** Build `01_ci_input_pkg` first, start the stack, set `package_registry.base_url` to `https://127.0.0.1:8080`, then build this package. diff --git a/test/manual_packages/composable/02_ci_composable_integration/manifest.yml b/test/manual_packages/composable/02_ci_composable_integration/manifest.yml new file mode 100644 index 0000000000..796d1a5d15 --- /dev/null +++ b/test/manual_packages/composable/02_ci_composable_integration/manifest.yml @@ -0,0 +1,53 @@ +format_version: 3.6.0 +name: ci_composable_integration +title: CI Composable Integration +description: >- + Integration package for CI and tests: requires ci_input_pkg, exercises + variable promotion and data stream merge, field bundling, template bundling, + and package-to-type resolution on inputs and streams. +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: ci_input_pkg + version: "0.1.0" +policy_templates: + - name: ci_composable_logs + title: CI composable logs + description: Collect logs via required input package and native logs stream + data_streams: + - ci_composable_logs + inputs: + - package: ci_input_pkg + title: Collect via CI input package + description: Required input package for composable build + vars: + - name: paths + type: text + title: Paths + multi: true + required: true + show_user: true + secret: false + default: + - /var/log/custom/*.log + - name: encoding + type: text + title: Encoding + multi: false + required: false + show_user: true + secret: false + - type: logs + title: Native logs input + description: Plain logs input alongside the package stream +owner: + github: elastic/integrations + type: elastic diff --git a/test/manual_packages/required_inputs/with_linked_template_path/_dev/test/config.yml b/test/manual_packages/required_inputs/with_linked_template_path/_dev/test/config.yml new file mode 100644 index 0000000000..e60432ec4c --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/_dev/test/config.yml @@ -0,0 +1,3 @@ +requires: + - package: ci_input_pkg + source: "../../../../composable/01_ci_input_pkg" diff --git a/test/manual_packages/required_inputs/with_linked_template_path/agent/input/_included/owned.hbs b/test/manual_packages/required_inputs/with_linked_template_path/agent/input/_included/owned.hbs new file mode 100644 index 0000000000..e291ebccc4 --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/agent/input/_included/owned.hbs @@ -0,0 +1 @@ +# integration-owned template for composable link test diff --git a/test/manual_packages/required_inputs/with_linked_template_path/agent/input/owned.hbs.link b/test/manual_packages/required_inputs/with_linked_template_path/agent/input/owned.hbs.link new file mode 100644 index 0000000000..6338b13242 --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/agent/input/owned.hbs.link @@ -0,0 +1 @@ +./_included/owned.hbs dbf30556543232c62e86aa8cecf1128fba9ae97cbee1fa5064d52f078ab51393 diff --git a/test/manual_packages/required_inputs/with_linked_template_path/changelog.yml b/test/manual_packages/required_inputs/with_linked_template_path/changelog.yml new file mode 100644 index 0000000000..c9785533f6 --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/changelog.yml @@ -0,0 +1,5 @@ +- version: 0.1.0 + changes: + - description: Manual fixture for composable build with linked policy template path. + type: enhancement + link: https://github.com/elastic/elastic-package/issues/3278 diff --git a/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/agent/stream/stream.yml.hbs b/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..9e9c27a8c0 --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/agent/stream/stream.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/fields/base-fields.yml new file mode 100644 index 0000000000..d3b0f5a163 --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/manifest.yml b/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/manifest.yml new file mode 100644 index 0000000000..87e44a58e1 --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/manifest.yml @@ -0,0 +1,19 @@ +title: Test Logs +type: logs +streams: + - package: ci_input_pkg + title: Test Logs from Input Package + description: Collect test logs using the referenced input package. + - input: logfile + title: Test Logs + description: Collect test logs using the logs input. + template_path: stream.yml.hbs + vars: + - name: paths + type: text + title: Paths + multi: true + required: true + show_user: true + default: + - /var/log/test/*.log \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_linked_template_path/docs/README.md b/test/manual_packages/required_inputs/with_linked_template_path/docs/README.md new file mode 100644 index 0000000000..1cc6fc4b61 --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/docs/README.md @@ -0,0 +1,5 @@ +# Integration With Linked Policy Template Path + +Manual fixture for composable integrations where `agent/input/owned.hbs` is produced +from `owned.hbs.link` at build time. The manifest uses `template_path: owned.hbs` +(the materialized filename), matching what Fleet expects after `elastic-package build`. diff --git a/test/manual_packages/required_inputs/with_linked_template_path/manifest.yml b/test/manual_packages/required_inputs/with_linked_template_path/manifest.yml new file mode 100644 index 0000000000..8dc5c5551e --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/manifest.yml @@ -0,0 +1,37 @@ +format_version: 3.6.0 +name: with_linked_template_path +title: Integration With Linked Policy Template Path +description: >- + Like with_input_package_requires, but the integration-owned policy input template + is provided via a .link file (agent/input/owned.hbs). The manifest must list + template_path: owned.hbs (materialized name), not owned.hbs.link. +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: ci_input_pkg + version: "0.1.0" +policy_templates: + - name: test_logs + title: Test logs + description: Collect test logs + data_streams: + - test_logs + inputs: + - package: ci_input_pkg + title: Collect test logs via input package + description: Use the test input package to collect logs + template_path: owned.hbs + - type: logs + title: Collect test logs via logs input + description: Use the logs input to collect logs +owner: + github: elastic/integrations + type: elastic diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_ds_merges/_dev/test/config.yml new file mode 100644 index 0000000000..1ca27d196b --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/_dev/test/config.yml @@ -0,0 +1,3 @@ +requires: + - package: ci_input_pkg + source: "../../../../composable/01_ci_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/changelog.yml b/test/manual_packages/required_inputs/with_merging_ds_merges/changelog.yml new file mode 100644 index 0000000000..fb3f5f7235 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/changelog.yml @@ -0,0 +1,5 @@ +- version: 0.1.0 + changes: + - description: Initial release. + type: enhancement + link: https://github.com/elastic/elastic-package/issues/1 \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/agent/stream/stream.yml.hbs b/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..9e9c27a8c0 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/agent/stream/stream.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/fields/base-fields.yml new file mode 100644 index 0000000000..d3b0f5a163 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/manifest.yml new file mode 100644 index 0000000000..0029f07bb4 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/manifest.yml @@ -0,0 +1,13 @@ +title: Var Merging Logs +type: logs +streams: + - package: ci_input_pkg + title: Var Merging Logs + description: Collect logs using the var merging input package. + vars: + - name: encoding + title: Log Encoding Override + - name: custom_tag + type: text + title: Custom Tag + show_user: true \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/docs/README.md b/test/manual_packages/required_inputs/with_merging_ds_merges/docs/README.md new file mode 100644 index 0000000000..db7779e04e --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/docs/README.md @@ -0,0 +1,13 @@ +# Variable Merging - Data Stream Merges + +Test fixture: composable package with no policy template variable overrides. +The data stream manifest overrides the "encoding" variable from the input +package (providing a different title) and adds a new "custom_tag" variable. + +Expected result after merging: +- Input variables: (none) +- Data stream variables: + - paths (unchanged from input package) + - encoding (merged: base from input pkg, title overridden) + - timeout (unchanged from input package) + - custom_tag (new, from data stream manifest) \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/manifest.yml b/test/manual_packages/required_inputs/with_merging_ds_merges/manifest.yml new file mode 100644 index 0000000000..ac227bf117 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/manifest.yml @@ -0,0 +1,33 @@ +format_version: 3.6.0 +name: with_merging_ds_merges +title: Variable Merging - Data Stream Merges +description: >- + Composable package with no policy template variable overrides. The data stream + manifest overrides the "encoding" variable (changing its title) and introduces + a new "custom_tag" variable. All variables remain in the data stream list. +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: ci_input_pkg + version: "0.1.0" +policy_templates: + - name: var_merging_logs + title: Var Merging Logs + description: Collect logs via var merging input package + data_streams: + - var_merging_logs + inputs: + - package: ci_input_pkg + title: Collect logs via var merging input package + description: Use the var merging input package to collect logs +owner: + github: elastic/integrations + type: elastic \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_duplicate_error/_dev/test/config.yml new file mode 100644 index 0000000000..1ca27d196b --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/_dev/test/config.yml @@ -0,0 +1,3 @@ +requires: + - package: ci_input_pkg + source: "../../../../composable/01_ci_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/changelog.yml b/test/manual_packages/required_inputs/with_merging_duplicate_error/changelog.yml new file mode 100644 index 0000000000..fb3f5f7235 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/changelog.yml @@ -0,0 +1,5 @@ +- version: 0.1.0 + changes: + - description: Initial release. + type: enhancement + link: https://github.com/elastic/elastic-package/issues/1 \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/agent/stream/stream.yml.hbs b/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..9e9c27a8c0 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/agent/stream/stream.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/fields/base-fields.yml new file mode 100644 index 0000000000..d3b0f5a163 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/manifest.yml new file mode 100644 index 0000000000..795111aabe --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/manifest.yml @@ -0,0 +1,11 @@ +title: Var Merging Logs +type: logs +streams: + - package: ci_input_pkg + title: Var Merging Logs + description: Collect logs using the var merging input package. + vars: + - name: paths + title: First paths definition + - name: paths + title: Duplicate paths definition \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/docs/README.md b/test/manual_packages/required_inputs/with_merging_duplicate_error/docs/README.md new file mode 100644 index 0000000000..83f27f9389 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/docs/README.md @@ -0,0 +1,8 @@ +# Variable Merging - Duplicate Error + +Test fixture: composable package whose data stream manifest defines the "paths" +variable twice. The merging algorithm must detect this duplicate and return an +error (Step 5: fail if there are multiple variables with the same name). + +Expected result: error indicating a duplicate variable name "paths" in the data +stream variable list. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/manifest.yml b/test/manual_packages/required_inputs/with_merging_duplicate_error/manifest.yml new file mode 100644 index 0000000000..21706052e5 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/manifest.yml @@ -0,0 +1,33 @@ +format_version: 3.6.0 +name: with_merging_duplicate_error +title: Variable Merging - Duplicate Error +description: >- + Composable package whose data stream manifest defines the "paths" variable + twice. This should cause the variable merging step to fail with a duplicate + variable name error. +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: ci_input_pkg + version: "0.1.0" +policy_templates: + - name: var_merging_logs + title: Var Merging Logs + description: Collect logs via var merging input package + data_streams: + - var_merging_logs + inputs: + - package: ci_input_pkg + title: Collect logs via var merging input package + description: Use the var merging input package to collect logs +owner: + github: elastic/integrations + type: elastic \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_no_override/_dev/test/config.yml new file mode 100644 index 0000000000..1ca27d196b --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_no_override/_dev/test/config.yml @@ -0,0 +1,3 @@ +requires: + - package: ci_input_pkg + source: "../../../../composable/01_ci_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/changelog.yml b/test/manual_packages/required_inputs/with_merging_no_override/changelog.yml new file mode 100644 index 0000000000..fb3f5f7235 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_no_override/changelog.yml @@ -0,0 +1,5 @@ +- version: 0.1.0 + changes: + - description: Initial release. + type: enhancement + link: https://github.com/elastic/elastic-package/issues/1 \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/agent/stream/stream.yml.hbs b/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..9e9c27a8c0 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/agent/stream/stream.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/fields/base-fields.yml new file mode 100644 index 0000000000..d3b0f5a163 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/manifest.yml new file mode 100644 index 0000000000..45bb39425b --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/manifest.yml @@ -0,0 +1,6 @@ +title: Var Merging Logs +type: logs +streams: + - package: ci_input_pkg + title: Var Merging Logs + description: Collect logs using the var merging input package. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/docs/README.md b/test/manual_packages/required_inputs/with_merging_no_override/docs/README.md new file mode 100644 index 0000000000..cd0cb48e30 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_no_override/docs/README.md @@ -0,0 +1,5 @@ +# Variable Merging - No Override + +Test fixture: composable package with no variable overrides. All variables +defined in the input package policy template become data stream variables +unchanged. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/manifest.yml b/test/manual_packages/required_inputs/with_merging_no_override/manifest.yml new file mode 100644 index 0000000000..9d87b0f12d --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_no_override/manifest.yml @@ -0,0 +1,32 @@ +format_version: 3.6.0 +name: with_merging_no_override +title: Variable Merging - No Override +description: >- + Composable package with no variable overrides at the policy template or data + stream level. All input package vars remain as data stream variables. +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: ci_input_pkg + version: "0.1.0" +policy_templates: + - name: var_merging_logs + title: Var Merging Logs + description: Collect logs via var merging input package + data_streams: + - var_merging_logs + inputs: + - package: ci_input_pkg + title: Collect logs via var merging input package + description: Use the var merging input package to collect logs +owner: + github: elastic/integrations + type: elastic \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_promotes_to_input/_dev/test/config.yml new file mode 100644 index 0000000000..1ca27d196b --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/_dev/test/config.yml @@ -0,0 +1,3 @@ +requires: + - package: ci_input_pkg + source: "../../../../composable/01_ci_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/changelog.yml b/test/manual_packages/required_inputs/with_merging_promotes_to_input/changelog.yml new file mode 100644 index 0000000000..fb3f5f7235 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/changelog.yml @@ -0,0 +1,5 @@ +- version: 0.1.0 + changes: + - description: Initial release. + type: enhancement + link: https://github.com/elastic/elastic-package/issues/1 \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/agent/stream/stream.yml.hbs b/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..9e9c27a8c0 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/agent/stream/stream.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/fields/base-fields.yml new file mode 100644 index 0000000000..d3b0f5a163 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/manifest.yml new file mode 100644 index 0000000000..45bb39425b --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/manifest.yml @@ -0,0 +1,6 @@ +title: Var Merging Logs +type: logs +streams: + - package: ci_input_pkg + title: Var Merging Logs + description: Collect logs using the var merging input package. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/docs/README.md b/test/manual_packages/required_inputs/with_merging_promotes_to_input/docs/README.md new file mode 100644 index 0000000000..cead82918f --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/docs/README.md @@ -0,0 +1,11 @@ +# Variable Merging - Promotes to Input Var + +Test fixture: composable package whose policy template declares a "paths" +variable override. Because "paths" is also defined in the input package policy +template, it is promoted from a data stream variable to an input variable and +merged (input package definition is the base; the override here changes the +default path). + +Expected result after merging: +- Input variables: paths (merged, default overridden to /var/log/custom/*.log) +- Data stream variables: encoding, timeout \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/manifest.yml b/test/manual_packages/required_inputs/with_merging_promotes_to_input/manifest.yml new file mode 100644 index 0000000000..31eae327c0 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/manifest.yml @@ -0,0 +1,38 @@ +format_version: 3.6.0 +name: with_merging_promotes_to_input +title: Variable Merging - Promotes to Input Var +description: >- + Composable package whose policy template overrides the "paths" variable from + the input package. This causes "paths" to be promoted from a data stream + variable to an input variable and merged. "encoding" and "timeout" remain as + data stream variables. +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: ci_input_pkg + version: "0.1.0" +policy_templates: + - name: var_merging_logs + title: Var Merging Logs + description: Collect logs via var merging input package + data_streams: + - var_merging_logs + inputs: + - package: ci_input_pkg + title: Collect logs via var merging input package + description: Use the var merging input package to collect logs + vars: + - name: paths + default: + - /var/log/custom/*.log +owner: + github: elastic/integrations + type: elastic \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/_dev/test/config.yml new file mode 100644 index 0000000000..e60432ec4c --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/_dev/test/config.yml @@ -0,0 +1,3 @@ +requires: + - package: ci_input_pkg + source: "../../../../composable/01_ci_input_pkg" diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/changelog.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/changelog.yml new file mode 100644 index 0000000000..af392ba551 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/changelog.yml @@ -0,0 +1,5 @@ +- version: 0.1.0 + changes: + - description: Initial release. + type: enhancement + link: https://github.com/elastic/elastic-package/issues/1 diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/agent/stream/stream.yml.hbs b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..9390bc05cb --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/agent/stream/stream.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/fields/base-fields.yml new file mode 100644 index 0000000000..0d1791ffed --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/manifest.yml new file mode 100644 index 0000000000..468cbe683f --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/manifest.yml @@ -0,0 +1,6 @@ +title: Alpha logs +type: logs +streams: + - package: ci_input_pkg + title: Alpha logs via input package + description: Collect alpha logs using the var merging input package. diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/agent/stream/stream.yml.hbs b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..9390bc05cb --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/agent/stream/stream.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/fields/base-fields.yml new file mode 100644 index 0000000000..0d1791ffed --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/manifest.yml new file mode 100644 index 0000000000..b76005594d --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/manifest.yml @@ -0,0 +1,6 @@ +title: Beta logs +type: logs +streams: + - package: ci_input_pkg + title: Beta logs via input package + description: Collect beta logs using the var merging input package. diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/docs/README.md b/test/manual_packages/required_inputs/with_merging_two_policy_templates/docs/README.md new file mode 100644 index 0000000000..a969fc6a65 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/docs/README.md @@ -0,0 +1,5 @@ +# Variable Merging - Two Policy Templates + +Test fixture: promotion of input-package vars is scoped to the policy +template's `data_streams` list. One template promotes `paths`; the other +leaves all vars on the data stream. diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/manifest.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/manifest.yml new file mode 100644 index 0000000000..91031fdca6 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/manifest.yml @@ -0,0 +1,52 @@ +format_version: 3.6.0 +name: with_merging_two_policy_templates +title: Variable Merging - Two Policy Templates Scoped Promotion +description: >- + Two policy templates share the same required input package: one promotes + "paths" to input-level vars for its data stream only; the other does not + promote any vars, so "paths" stays on the data stream. Exercises per-DS + promotion scoping when multiple templates reference the same input package. +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: ci_input_pkg + version: "0.1.0" +policy_templates: + - name: pt_alpha + title: Alpha logs (paths promoted) + description: Policy template that promotes paths to the composable input + data_streams: + - alpha_logs + inputs: + - package: ci_input_pkg + title: Collect via var merging input (alpha) + description: Alpha stream promotes paths to input-level vars + vars: + - name: paths + title: Alpha-only promoted paths title + - type: logs + title: Native logs input (alpha) + description: Fallback logs input for alpha + - name: pt_beta + title: Beta logs (no promotion) + description: Policy template with no composable var overrides + data_streams: + - beta_logs + inputs: + - package: ci_input_pkg + title: Collect via var merging input (beta) + description: Beta stream keeps all vars at data-stream level + - type: logs + title: Native logs input (beta) + description: Fallback logs input for beta +owner: + github: elastic/integrations + type: elastic