From b5c957a528cd1fad765f62ab1082baef14499592 Mon Sep 17 00:00:00 2001 From: Tere Date: Mon, 13 Apr 2026 16:54:50 +0200 Subject: [PATCH 01/28] feat: extend package manifest for required input dependencies Support requires.input (and related manifest fields) for composable integration packages per package-spec. Made-with: Cursor --- internal/packages/archetype/package_test.go | 22 ++++++--- internal/packages/packages.go | 49 +++++++++++++++++---- 2 files changed, 58 insertions(+), 13 deletions(-) diff --git a/internal/packages/archetype/package_test.go b/internal/packages/archetype/package_test.go index 965d9c7544..216723d492 100644 --- a/internal/packages/archetype/package_test.go +++ b/internal/packages/archetype/package_test.go @@ -96,17 +96,29 @@ func createPackageDescriptorForTest(packageType, kibanaVersion string) PackageDe } } +type requiredInputsResolverMock struct { + BundleFunc func(buildPackageRoot string) error +} + +func (r *requiredInputsResolverMock) Bundle(buildPackageRoot string) error { + if r.BundleFunc != nil { + return r.BundleFunc(buildPackageRoot) + } + return nil +} + func buildPackage(t *testing.T, repositoryRoot *os.Root, packageRoot string) error { buildDir := filepath.Join(repositoryRoot.Name(), "build") err := os.MkdirAll(buildDir, 0o755) require.NoError(t, err) _, err = builder.BuildPackage(builder.BuildOptions{ - PackageRoot: packageRoot, - BuildDir: buildDir, - RepositoryRoot: repositoryRoot, - UpdateReadmes: true, - SchemaURLs: fields.SchemaURLs{}, + PackageRoot: packageRoot, + BuildDir: buildDir, + RepositoryRoot: repositoryRoot, + UpdateReadmes: true, + SchemaURLs: fields.SchemaURLs{}, + RequiredInputsResolver: &requiredInputsResolverMock{}, }) return err } diff --git a/internal/packages/packages.go b/internal/packages/packages.go index 1b9962c23e..6daf05ceba 100644 --- a/internal/packages/packages.go +++ b/internal/packages/packages.go @@ -193,8 +193,23 @@ type Variable struct { // Input is a single input configuration. type Input struct { - Type string `config:"type" json:"type" yaml:"type"` - Vars []Variable `config:"vars" json:"vars" yaml:"vars"` + Type string `config:"type" json:"type" yaml:"type"` + Package string `config:"package,omitempty" json:"package,omitempty" yaml:"package,omitempty"` + Vars []Variable `config:"vars" json:"vars" yaml:"vars"` + TemplatePath string `config:"template_path,omitempty" json:"template_path,omitempty" yaml:"template_path,omitempty"` + TemplatePaths []string `config:"template_paths,omitempty" json:"template_paths,omitempty" yaml:"template_paths,omitempty"` +} + +// PackageDependency describes a dependency on another package. +type PackageDependency struct { + Package string `config:"package" json:"package" yaml:"package"` + Version string `config:"version" json:"version" yaml:"version"` +} + +// Requires lists the packages that an integration package depends on. +type Requires struct { + Input []PackageDependency `config:"input,omitempty" json:"input,omitempty" yaml:"input,omitempty"` + Content []PackageDependency `config:"content,omitempty" json:"content,omitempty" yaml:"content,omitempty"` } // Source contains metadata about the source code of the package. @@ -237,9 +252,10 @@ type PolicyTemplate struct { // For purposes of "input packages" Input string `config:"input,omitempty" json:"input,omitempty" yaml:"input,omitempty"` Type string `config:"type,omitempty" json:"type,omitempty" yaml:"type,omitempty"` - DynamicSignalTypes bool `config:"dynamic_signal_types,omitempty" json:"dynamic_signal_types,omitempty" yaml:"dynamic_signal_types,omitempty"` TemplatePath string `config:"template_path,omitempty" json:"template_path,omitempty" yaml:"template_path,omitempty"` + TemplatePaths []string `config:"template_paths,omitempty" json:"template_paths,omitempty" yaml:"template_paths,omitempty"` Vars []Variable `config:"vars,omitempty" json:"vars,omitempty" yaml:"vars,omitempty"` + DynamicSignalTypes bool `config:"dynamic_signal_types,omitempty" json:"dynamic_signal_types,omitempty" yaml:"dynamic_signal_types,omitempty"` } // Owner defines package owners, either a single person or a team. @@ -272,6 +288,7 @@ type PackageManifest struct { Categories []string `config:"categories" json:"categories" yaml:"categories"` Agent Agent `config:"agent" json:"agent" yaml:"agent"` Elasticsearch *Elasticsearch `config:"elasticsearch" json:"elasticsearch" yaml:"elasticsearch"` + Requires *Requires `config:"requires,omitempty" json:"requires,omitempty" yaml:"requires,omitempty"` } type PackageDirNameAndManifest struct { @@ -334,11 +351,13 @@ type TransformDefinition struct { // Stream contains information about an input stream. type Stream struct { - Input string `config:"input" json:"input" yaml:"input"` - Title string `config:"title" json:"title" yaml:"title"` - Description string `config:"description" json:"description" yaml:"description"` - TemplatePath string `config:"template_path" json:"template_path" yaml:"template_path"` - Vars []Variable `config:"vars" json:"vars" yaml:"vars"` + Input string `config:"input" json:"input" yaml:"input"` + Package string `config:"package,omitempty" json:"package,omitempty" yaml:"package,omitempty"` + Title string `config:"title" json:"title" yaml:"title"` + Description string `config:"description" json:"description" yaml:"description"` + TemplatePath string `config:"template_path,omitempty" json:"template_path,omitempty" yaml:"template_path,omitempty"` + TemplatePaths []string `config:"template_paths,omitempty" json:"template_paths,omitempty" yaml:"template_paths,omitempty"` + Vars []Variable `config:"vars" json:"vars" yaml:"vars"` } // HasSource checks if a given index or data stream name maches the transform sources @@ -717,6 +736,20 @@ func ReadPackageManifestBytes(contents []byte) (*PackageManifest, error) { return &m, nil } +func ReadDataStreamManifestBytes(contents []byte) (*DataStreamManifest, error) { + cfg, err := yaml.NewConfig(contents, ucfg.PathSep(".")) + if err != nil { + return nil, fmt.Errorf("reading manifest file failed: %w", err) + } + + var m DataStreamManifest + err = cfg.Unpack(&m) + if err != nil { + return nil, fmt.Errorf("unpacking data stream manifest failed: %w", err) + } + return &m, nil +} + // ReadDataStreamManifest reads and parses the given data stream manifest file. func ReadDataStreamManifest(path string) (*DataStreamManifest, error) { cfg, err := yaml.NewConfigWithFile(path, ucfg.PathSep(".")) From d9f3026d7d15bf5ae9ff5287ab2dc5f426d33d3c Mon Sep 17 00:00:00 2001 From: Tere Date: Mon, 13 Apr 2026 16:55:17 +0200 Subject: [PATCH 02/28] feat: add registry client package download with TLS Download integration zip artifacts from EPR for required input resolution during build. Made-with: Cursor --- internal/registry/client.go | 106 ++++++++++++++++- internal/registry/client_test.go | 192 +++++++++++++++++++++++++++++++ 2 files changed, 292 insertions(+), 6 deletions(-) create mode 100644 internal/registry/client_test.go diff --git a/internal/registry/client.go b/internal/registry/client.go index 6181c9dfa2..bac16e15c0 100644 --- a/internal/registry/client.go +++ b/internal/registry/client.go @@ -5,6 +5,7 @@ package registry import ( + "crypto/tls" "fmt" "io" "net/http" @@ -12,6 +13,8 @@ import ( "os" "path/filepath" + "github.com/elastic/elastic-package/internal/certs" + "github.com/elastic/elastic-package/internal/files" "github.com/elastic/elastic-package/internal/logger" ) @@ -19,18 +22,59 @@ const ( ProductionURL = "https://epr.elastic.co" ) -// Client is responsible for exporting dashboards from Kibana. +// ClientOption is a functional option for the registry client. +type ClientOption func(*Client) + +// Client is responsible for communicating with the Package Registry API. type Client struct { - baseURL string + baseURL string + certificateAuthority string + tlsSkipVerify bool + httpClient *http.Client } // NewClient creates a new instance of the client. -func NewClient(baseURL string) *Client { - return &Client{ - baseURL: baseURL, +func NewClient(baseURL string, opts ...ClientOption) *Client { + c := &Client{baseURL: baseURL} + for _, opt := range opts { + opt(c) + } + c.httpClient, _ = c.newHTTPClient() + return c +} + +// CertificateAuthority sets the certificate authority to use for TLS verification. +func CertificateAuthority(path string) ClientOption { + return func(c *Client) { + c.certificateAuthority = path } } +// TLSSkipVerify disables TLS certificate verification (e.g. for local HTTPS registries). +func TLSSkipVerify() ClientOption { + return func(c *Client) { + c.tlsSkipVerify = true + } +} + +func (c *Client) newHTTPClient() (*http.Client, error) { + client := &http.Client{} + if c.tlsSkipVerify { + client.Transport = &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + } + } else if c.certificateAuthority != "" { + rootCAs, err := certs.SystemPoolWithCACertificate(c.certificateAuthority) + if err != nil { + return nil, fmt.Errorf("reading CA certificate: %w", err) + } + client.Transport = &http.Transport{ + TLSClientConfig: &tls.Config{RootCAs: rootCAs}, + } + } + return client, nil +} + func (c *Client) get(resourcePath string) (int, []byte, error) { base, err := url.Parse(c.baseURL) if err != nil { @@ -52,7 +96,10 @@ func (c *Client) get(resourcePath string) (int, []byte, error) { return 0, nil, fmt.Errorf("could not create request to Package Registry API resource: %s: %w", resourcePath, err) } - client := http.Client{} + client := c.httpClient + if client == nil { + client = &http.Client{} + } resp, err := client.Do(req) if err != nil { return 0, nil, fmt.Errorf("could not send request to Package Registry API: %w", err) @@ -69,7 +116,16 @@ func (c *Client) get(resourcePath string) (int, []byte, error) { // DownloadPackage downloads a package zip from the registry and writes it to destDir. // It returns the path to the downloaded zip file. +// +// When ELASTIC_PACKAGE_VERIFY_PACKAGE_SIGNATURE is true and ELASTIC_PACKAGE_VERIFIER_PUBLIC_KEYFILE +// is set, the registry must also serve a detached signature at {zip}.sig and the zip is verified +// before returning; on failure the zip file is removed. func (c *Client) DownloadPackage(name, version, destDir string) (string, error) { + verify, pubKeyPath, err := files.PackageSignatureVerificationFromEnv() + if err != nil { + return "", err + } + resourcePath := fmt.Sprintf("/epr/%s/%s-%s.zip", name, name, version) statusCode, body, err := c.get(resourcePath) if err != nil { @@ -83,5 +139,43 @@ func (c *Client) DownloadPackage(name, version, destDir string) (string, error) if err := os.WriteFile(zipPath, body, 0o644); err != nil { return "", fmt.Errorf("writing package zip to %s: %w", zipPath, err) } + + if !verify { + return zipPath, nil + } + + logger.Debugf("Verifying detached signature for package %s-%s", name, version) + pubKey, err := os.ReadFile(pubKeyPath) + if err != nil { + _ = os.Remove(zipPath) + return "", fmt.Errorf("reading verifier public keyfile (path: %s): %w", pubKeyPath, err) + } + + sigPath := fmt.Sprintf("/epr/%s/%s-%s.zip.sig", name, name, version) + sigCode, sigBody, err := c.get(sigPath) + if err != nil { + _ = os.Remove(zipPath) + return "", fmt.Errorf("downloading package signature %s-%s: %w", name, version, err) + } + if sigCode != http.StatusOK { + _ = os.Remove(zipPath) + return "", fmt.Errorf("downloading package signature %s-%s: unexpected status code %d", name, version, sigCode) + } + + zipFile, err := os.Open(zipPath) + if err != nil { + _ = os.Remove(zipPath) + return "", fmt.Errorf("opening downloaded package zip %s: %w", zipPath, err) + } + verifyErr := files.VerifyDetachedPGP(zipFile, sigBody, pubKey) + closeErr := zipFile.Close() + if verifyErr != nil { + _ = os.Remove(zipPath) + return "", fmt.Errorf("verifying package %s-%s: %w", name, version, verifyErr) + } + if closeErr != nil { + return "", fmt.Errorf("closing downloaded package zip %s: %w", zipPath, closeErr) + } + return zipPath, nil } diff --git a/internal/registry/client_test.go b/internal/registry/client_test.go new file mode 100644 index 0000000000..877ef94e18 --- /dev/null +++ b/internal/registry/client_test.go @@ -0,0 +1,192 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package registry + +import ( + "archive/zip" + "bytes" + "fmt" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "testing" + + "github.com/ProtonMail/gopenpgp/v2/crypto" + "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-package/internal/environment" +) + +func TestDownloadPackage_withoutVerification(t *testing.T) { + zipBytes := testAcmePackageZip(t) + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/epr/acme/acme-1.0.0.zip" { + http.NotFound(w, r) + return + } + _, err := w.Write(zipBytes) + require.NoError(t, err) + })) + t.Cleanup(srv.Close) + + t.Setenv(environment.WithElasticPackagePrefix("VERIFY_PACKAGE_SIGNATURE"), "") + t.Setenv(environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE"), "") + + dest := t.TempDir() + client := NewClient(srv.URL) + zipPath, err := client.DownloadPackage("acme", "1.0.0", dest) + require.NoError(t, err) + require.FileExists(t, zipPath) +} + +func TestDownloadPackage_withVerification_success(t *testing.T) { + zipBytes := testAcmePackageZip(t) + passphrase := []byte("registry-test-pass") + + priv, err := crypto.GenerateKey("Registry Test", "", "rsa", 2048) + require.NoError(t, err) + priv, err = priv.Lock(passphrase) + require.NoError(t, err) + unlocked, err := priv.Unlock(passphrase) + require.NoError(t, err) + t.Cleanup(func() { unlocked.ClearPrivateParams() }) + + signRing, err := crypto.NewKeyRing(unlocked) + require.NoError(t, err) + sig, err := signRing.SignDetachedStream(bytes.NewReader(zipBytes)) + require.NoError(t, err) + armoredSig, err := sig.GetArmored() + require.NoError(t, err) + pubArmored, err := unlocked.GetArmoredPublicKey() + require.NoError(t, err) + + pubPath := filepath.Join(t.TempDir(), "verify.pub.asc") + require.NoError(t, os.WriteFile(pubPath, []byte(pubArmored), 0o600)) + + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.URL.Path { + case "/epr/acme/acme-1.0.0.zip": + _, err := w.Write(zipBytes) + require.NoError(t, err) + case "/epr/acme/acme-1.0.0.zip.sig": + _, err := w.Write([]byte(armoredSig)) + require.NoError(t, err) + default: + http.NotFound(w, r) + } + })) + t.Cleanup(srv.Close) + + t.Setenv(environment.WithElasticPackagePrefix("VERIFY_PACKAGE_SIGNATURE"), "true") + t.Setenv(environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE"), pubPath) + + dest := t.TempDir() + client := NewClient(srv.URL) + zipPath, err := client.DownloadPackage("acme", "1.0.0", dest) + require.NoError(t, err) + require.FileExists(t, zipPath) +} + +func TestDownloadPackage_withVerification_missingSignature(t *testing.T) { + zipBytes := testAcmePackageZip(t) + pubPath := filepath.Join(t.TempDir(), "verify.pub.asc") + require.NoError(t, os.WriteFile(pubPath, []byte("x"), 0o600)) + + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/epr/acme/acme-1.0.0.zip" { + _, err := w.Write(zipBytes) + require.NoError(t, err) + return + } + http.NotFound(w, r) + })) + t.Cleanup(srv.Close) + + t.Setenv(environment.WithElasticPackagePrefix("VERIFY_PACKAGE_SIGNATURE"), "true") + t.Setenv(environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE"), pubPath) + + dest := t.TempDir() + client := NewClient(srv.URL) + _, err := client.DownloadPackage("acme", "1.0.0", dest) + require.Error(t, err) + + _, statErr := os.Stat(filepath.Join(dest, "acme-1.0.0.zip")) + require.True(t, os.IsNotExist(statErr), "zip should be removed after failed verification") +} + +func TestDownloadPackage_withVerification_badSignature(t *testing.T) { + zipBytes := testAcmePackageZip(t) + passphrase := []byte("a") + + priv, err := crypto.GenerateKey("Signer A", "", "rsa", 2048) + require.NoError(t, err) + priv, err = priv.Lock(passphrase) + require.NoError(t, err) + unlocked, err := priv.Unlock(passphrase) + require.NoError(t, err) + t.Cleanup(func() { unlocked.ClearPrivateParams() }) + signRing, err := crypto.NewKeyRing(unlocked) + require.NoError(t, err) + sig, err := signRing.SignDetachedStream(bytes.NewReader(zipBytes)) + require.NoError(t, err) + armoredSig, err := sig.GetArmored() + require.NoError(t, err) + + priv2, err := crypto.GenerateKey("Signer B", "", "rsa", 2048) + require.NoError(t, err) + priv2, err = priv2.Lock(passphrase) + require.NoError(t, err) + unlocked2, err := priv2.Unlock(passphrase) + require.NoError(t, err) + t.Cleanup(func() { unlocked2.ClearPrivateParams() }) + pubArmored, err := unlocked2.GetArmoredPublicKey() + require.NoError(t, err) + + pubPath := filepath.Join(t.TempDir(), "b.pub.asc") + require.NoError(t, os.WriteFile(pubPath, []byte(pubArmored), 0o600)) + + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.URL.Path { + case "/epr/acme/acme-1.0.0.zip": + _, err := w.Write(zipBytes) + require.NoError(t, err) + case "/epr/acme/acme-1.0.0.zip.sig": + _, err := w.Write([]byte(armoredSig)) + require.NoError(t, err) + default: + http.NotFound(w, r) + } + })) + t.Cleanup(srv.Close) + + t.Setenv(environment.WithElasticPackagePrefix("VERIFY_PACKAGE_SIGNATURE"), "true") + t.Setenv(environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE"), pubPath) + + dest := t.TempDir() + client := NewClient(srv.URL) + _, err = client.DownloadPackage("acme", "1.0.0", dest) + require.Error(t, err) + + _, statErr := os.Stat(filepath.Join(dest, "acme-1.0.0.zip")) + require.True(t, os.IsNotExist(statErr), "zip should be removed after failed verification") +} + +func testAcmePackageZip(t *testing.T) []byte { + t.Helper() + const ( + name = "acme" + version = "1.0.0" + ) + var buf bytes.Buffer + zw := zip.NewWriter(&buf) + manifestPath := fmt.Sprintf("%s/manifest.yml", name) + w, err := zw.Create(manifestPath) + require.NoError(t, err) + _, err = fmt.Fprintf(w, "name: %s\nversion: %s\ntype: integration\n", name, version) + require.NoError(t, err) + require.NoError(t, zw.Close()) + return buf.Bytes() +} From aa4c8f909dce747ef21c5a6a6c470113eb47e22c Mon Sep 17 00:00:00 2001 From: Tere Date: Mon, 13 Apr 2026 16:55:25 +0200 Subject: [PATCH 03/28] feat: optional PGP verification for downloaded package zips Verify detached signatures when enabled via environment configuration. Made-with: Cursor --- internal/files/verify.go | 68 +++++++++++++++++++ internal/files/verify_test.go | 122 ++++++++++++++++++++++++++++++++++ 2 files changed, 190 insertions(+) create mode 100644 internal/files/verify.go create mode 100644 internal/files/verify_test.go diff --git a/internal/files/verify.go b/internal/files/verify.go new file mode 100644 index 0000000000..e8b9b05dcc --- /dev/null +++ b/internal/files/verify.go @@ -0,0 +1,68 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package files + +import ( + "fmt" + "io" + "os" + "strconv" + + "github.com/ProtonMail/gopenpgp/v2/crypto" + + "github.com/elastic/elastic-package/internal/environment" +) + +var ( + verifyPackageSignatureEnv = environment.WithElasticPackagePrefix("VERIFY_PACKAGE_SIGNATURE") + verifierPublicKeyfileEnv = environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE") +) + +// PackageSignatureVerificationFromEnv reports whether detached PGP verification should run +// for registry package downloads. When verify is true, publicKeyPath is the path to an +// armored public key and has been checked for existence. A non-nil err means the environment +// is inconsistent (e.g. verify enabled but no key path or inaccessible file). +func PackageSignatureVerificationFromEnv() (verify bool, publicKeyPath string, err error) { + raw := os.Getenv(verifyPackageSignatureEnv) + if raw == "" { + return false, "", nil + } + verify, err = strconv.ParseBool(raw) + if err != nil { + return false, "", fmt.Errorf("parse %s=%q: %w", verifyPackageSignatureEnv, raw, err) + } + if !verify { + return false, "", nil + } + publicKeyPath = os.Getenv(verifierPublicKeyfileEnv) + if publicKeyPath == "" { + return true, "", fmt.Errorf("%s is true but %s is not set", verifyPackageSignatureEnv, verifierPublicKeyfileEnv) + } + if _, err := os.Stat(publicKeyPath); err != nil { + return true, "", fmt.Errorf("can't access verifier public keyfile (path: %s): %w", publicKeyPath, err) + } + return true, publicKeyPath, nil +} + +// VerifyDetachedPGP checks that signatureArmored is a valid detached OpenPGP signature over +// the bytes read from data, using the armored publicKeyArmored. +func VerifyDetachedPGP(data io.Reader, signatureArmored []byte, publicKeyArmored []byte) error { + pubKey, err := crypto.NewKeyFromArmored(string(publicKeyArmored)) + if err != nil { + return fmt.Errorf("reading public key: %w", err) + } + keyRing, err := crypto.NewKeyRing(pubKey) + if err != nil { + return fmt.Errorf("building key ring: %w", err) + } + sig, err := crypto.NewPGPSignatureFromArmored(string(signatureArmored)) + if err != nil { + return fmt.Errorf("reading signature: %w", err) + } + if err := keyRing.VerifyDetachedStream(data, sig, crypto.GetUnixTime()); err != nil { + return fmt.Errorf("signature verification failed: %w", err) + } + return nil +} diff --git a/internal/files/verify_test.go b/internal/files/verify_test.go new file mode 100644 index 0000000000..90b8fd9ac4 --- /dev/null +++ b/internal/files/verify_test.go @@ -0,0 +1,122 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package files + +import ( + "bytes" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/ProtonMail/gopenpgp/v2/crypto" + "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-package/internal/environment" +) + +func TestVerifyDetachedPGP_roundTrip(t *testing.T) { + content := []byte("package-bytes-for-signature") + passphrase := []byte("test-passphrase") + + priv, err := crypto.GenerateKey("Test Verify", "", "rsa", 2048) + require.NoError(t, err) + priv, err = priv.Lock(passphrase) + require.NoError(t, err) + unlocked, err := priv.Unlock(passphrase) + require.NoError(t, err) + t.Cleanup(func() { unlocked.ClearPrivateParams() }) + + signRing, err := crypto.NewKeyRing(unlocked) + require.NoError(t, err) + + sig, err := signRing.SignDetachedStream(bytes.NewReader(content)) + require.NoError(t, err) + armoredSig, err := sig.GetArmored() + require.NoError(t, err) + + pubArmored, err := unlocked.GetArmoredPublicKey() + require.NoError(t, err) + + err = VerifyDetachedPGP(bytes.NewReader(content), []byte(armoredSig), []byte(pubArmored)) + require.NoError(t, err) +} + +func TestVerifyDetachedPGP_wrongContent(t *testing.T) { + content := []byte("original") + other := []byte("tampered") + passphrase := []byte("p") + + priv, err := crypto.GenerateKey("Test Wrong", "", "rsa", 2048) + require.NoError(t, err) + priv, err = priv.Lock(passphrase) + require.NoError(t, err) + unlocked, err := priv.Unlock(passphrase) + require.NoError(t, err) + t.Cleanup(func() { unlocked.ClearPrivateParams() }) + + signRing, err := crypto.NewKeyRing(unlocked) + require.NoError(t, err) + sig, err := signRing.SignDetachedStream(bytes.NewReader(content)) + require.NoError(t, err) + armoredSig, err := sig.GetArmored() + require.NoError(t, err) + pubArmored, err := unlocked.GetArmoredPublicKey() + require.NoError(t, err) + + err = VerifyDetachedPGP(bytes.NewReader(other), []byte(armoredSig), []byte(pubArmored)) + require.Error(t, err) + require.Contains(t, err.Error(), "signature verification failed") +} + +func TestPackageSignatureVerificationFromEnv(t *testing.T) { + keyFile := filepath.Join(t.TempDir(), "pub.asc") + require.NoError(t, os.WriteFile(keyFile, []byte("not-a-real-key-but-present"), 0o600)) + + prefix := environment.WithElasticPackagePrefix + t.Run("unset", func(t *testing.T) { + t.Setenv(prefix("VERIFY_PACKAGE_SIGNATURE"), "") + t.Setenv(prefix("VERIFIER_PUBLIC_KEYFILE"), "") + v, p, err := PackageSignatureVerificationFromEnv() + require.NoError(t, err) + require.False(t, v) + require.Empty(t, p) + }) + t.Run("false", func(t *testing.T) { + t.Setenv(prefix("VERIFY_PACKAGE_SIGNATURE"), "false") + t.Setenv(prefix("VERIFIER_PUBLIC_KEYFILE"), keyFile) + v, p, err := PackageSignatureVerificationFromEnv() + require.NoError(t, err) + require.False(t, v) + require.Empty(t, p) + }) + t.Run("true_ok", func(t *testing.T) { + t.Setenv(prefix("VERIFY_PACKAGE_SIGNATURE"), "true") + t.Setenv(prefix("VERIFIER_PUBLIC_KEYFILE"), keyFile) + v, p, err := PackageSignatureVerificationFromEnv() + require.NoError(t, err) + require.True(t, v) + require.Equal(t, keyFile, p) + }) + t.Run("true_missing_key_path", func(t *testing.T) { + t.Setenv(prefix("VERIFY_PACKAGE_SIGNATURE"), "1") + t.Setenv(prefix("VERIFIER_PUBLIC_KEYFILE"), "") + _, _, err := PackageSignatureVerificationFromEnv() + require.Error(t, err) + require.True(t, strings.Contains(err.Error(), "not set")) + }) + t.Run("invalid_bool", func(t *testing.T) { + t.Setenv(prefix("VERIFY_PACKAGE_SIGNATURE"), "maybe") + t.Setenv(prefix("VERIFIER_PUBLIC_KEYFILE"), keyFile) + _, _, err := PackageSignatureVerificationFromEnv() + require.Error(t, err) + }) + t.Run("true_missing_file", func(t *testing.T) { + t.Setenv(prefix("VERIFY_PACKAGE_SIGNATURE"), "true") + t.Setenv(prefix("VERIFIER_PUBLIC_KEYFILE"), filepath.Join(t.TempDir(), "nope.asc")) + _, _, err := PackageSignatureVerificationFromEnv() + require.Error(t, err) + }) +} From 84efe17646b72188a918d98276b834c2fadfadaf Mon Sep 17 00:00:00 2001 From: Tere Date: Mon, 13 Apr 2026 16:55:29 +0200 Subject: [PATCH 04/28] feat: bundle required input packages at build time Download required input packages, copy policy and data stream agent templates (Fleet merge order), merge manifest variables, bundle data stream field definitions, and resolve package: stream references to concrete input types. Made-with: Cursor --- internal/requiredinputs/fields.go | 203 +++++++ internal/requiredinputs/fields_test.go | 295 ++++++++++ internal/requiredinputs/policytemplates.go | 194 +++++++ .../requiredinputs/policytemplates_test.go | 140 +++++ internal/requiredinputs/requiredinputs.go | 176 ++++++ .../requiredinputs/requiredinputs_test.go | 196 +++++++ internal/requiredinputs/streamdefs.go | 191 +++++++ internal/requiredinputs/streamdefs_test.go | 455 +++++++++++++++ internal/requiredinputs/streams.go | 201 +++++++ internal/requiredinputs/streams_test.go | 148 +++++ internal/requiredinputs/testhelpers_test.go | 67 +++ internal/requiredinputs/variables.go | 500 +++++++++++++++++ internal/requiredinputs/variables_test.go | 523 ++++++++++++++++++ internal/requiredinputs/yamlutil.go | 78 +++ 14 files changed, 3367 insertions(+) create mode 100644 internal/requiredinputs/fields.go create mode 100644 internal/requiredinputs/fields_test.go create mode 100644 internal/requiredinputs/policytemplates.go create mode 100644 internal/requiredinputs/policytemplates_test.go create mode 100644 internal/requiredinputs/requiredinputs.go create mode 100644 internal/requiredinputs/requiredinputs_test.go create mode 100644 internal/requiredinputs/streamdefs.go create mode 100644 internal/requiredinputs/streamdefs_test.go create mode 100644 internal/requiredinputs/streams.go create mode 100644 internal/requiredinputs/streams_test.go create mode 100644 internal/requiredinputs/testhelpers_test.go create mode 100644 internal/requiredinputs/variables.go create mode 100644 internal/requiredinputs/variables_test.go create mode 100644 internal/requiredinputs/yamlutil.go diff --git a/internal/requiredinputs/fields.go b/internal/requiredinputs/fields.go new file mode 100644 index 0000000000..9532181303 --- /dev/null +++ b/internal/requiredinputs/fields.go @@ -0,0 +1,203 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "errors" + "fmt" + "io/fs" + "os" + "path" + + "gopkg.in/yaml.v3" + + "github.com/elastic/elastic-package/internal/logger" + "github.com/elastic/elastic-package/internal/packages" +) + +// bundleDataStreamFields bundles field definitions from required input packages +// into the composable integration package's data stream fields directories. +// For each data stream that references an input package, fields defined in the +// input package but not already present in the integration's data stream are +// copied into a new file named -fields.yml. +func (r *RequiredInputsResolver) bundleDataStreamFields(inputPkgPaths map[string]string, buildRoot *os.Root) error { + dsManifestsPaths, err := fs.Glob(buildRoot.FS(), "data_stream/*/manifest.yml") + if err != nil { + return fmt.Errorf("globbing data stream manifests: %w", err) + } + + errorList := make([]error, 0) + for _, manifestPath := range dsManifestsPaths { + manifestBytes, err := buildRoot.ReadFile(manifestPath) + if err != nil { + return fmt.Errorf("reading data stream manifest %q: %w", manifestPath, err) + } + manifest, err := packages.ReadDataStreamManifestBytes(manifestBytes) + if err != nil { + return fmt.Errorf("parsing data stream manifest %q: %w", manifestPath, err) + } + for _, stream := range manifest.Streams { + if stream.Package == "" { + continue + } + pkgPath, ok := inputPkgPaths[stream.Package] + if !ok { + errorList = append(errorList, fmt.Errorf("stream in manifest %q references input package %q which is not listed in requires.input", manifestPath, stream.Package)) + continue + } + dsRootDir := path.Dir(manifestPath) + if err := r.mergeInputPkgFields(dsRootDir, pkgPath, stream.Package, buildRoot); err != nil { + return fmt.Errorf("merging input package fields for manifest %q: %w", manifestPath, err) + } + } + } + return errors.Join(errorList...) +} + +// mergeInputPkgFields copies field definitions from the input package into the +// integration's data stream fields directory. Fields already defined in the +// integration take precedence; only fields absent from the integration are +// written to /fields/-fields.yml. +func (r *RequiredInputsResolver) mergeInputPkgFields(dsRootDir, inputPkgPath, inputPkgName string, buildRoot *os.Root) error { + existingNames, err := collectExistingFieldNames(dsRootDir, buildRoot) + if err != nil { + return fmt.Errorf("collecting existing field names: %w", err) + } + + inputPkgFS, closeFn, err := openPackageFS(inputPkgPath) + if err != nil { + return fmt.Errorf("opening package %q: %w", inputPkgPath, err) + } + defer closeFn() + + inputFieldFiles, err := fs.Glob(inputPkgFS, "fields/*.yml") + if err != nil { + return fmt.Errorf("globbing input package fields: %w", err) + } + if len(inputFieldFiles) == 0 { + logger.Debugf("Input package %q has no fields files, skipping field bundling", inputPkgName) + return nil + } + + // Collect field nodes from input package that are not already defined in the integration. + seenNames := make(map[string]bool) + newNodes := make([]*yaml.Node, 0) + for _, filePath := range inputFieldFiles { + nodes, err := loadFieldNodesFromFile(inputPkgFS, filePath) + if err != nil { + return fmt.Errorf("loading field nodes from %q: %w", filePath, err) + } + for _, node := range nodes { + name := fieldNodeName(node) + if name == "" || existingNames[name] || seenNames[name] { + continue + } + seenNames[name] = true + newNodes = append(newNodes, cloneNode(node)) + } + } + + if len(newNodes) == 0 { + logger.Debugf("No new fields from input package %q to bundle into %q", inputPkgName, dsRootDir) + return nil + } + + // Build a YAML document containing the new field nodes as a sequence. + seqNode := &yaml.Node{Kind: yaml.SequenceNode} + seqNode.Content = newNodes + docNode := &yaml.Node{Kind: yaml.DocumentNode, Content: []*yaml.Node{seqNode}} + + output, err := formatYAMLNode(docNode) + if err != nil { + return fmt.Errorf("formatting bundled fields YAML: %w", err) + } + + fieldsDir := path.Join(dsRootDir, "fields") + if err := buildRoot.MkdirAll(fieldsDir, 0755); err != nil { + return fmt.Errorf("creating fields directory %q: %w", fieldsDir, err) + } + + destPath := path.Join(fieldsDir, inputPkgName+"-fields.yml") + if err := buildRoot.WriteFile(destPath, output, 0644); err != nil { + return fmt.Errorf("writing bundled fields to %q: %w", destPath, err) + } + logger.Debugf("Bundled %d field(s) from input package %q into %s", len(newNodes), inputPkgName, destPath) + return nil +} + +// collectExistingFieldNames returns the set of top-level field names already +// defined in the integration's data stream fields directory. +func collectExistingFieldNames(dsRootDir string, buildRoot *os.Root) (map[string]bool, error) { + pattern := path.Join(dsRootDir, "fields", "*.yml") + paths, err := fs.Glob(buildRoot.FS(), pattern) + if err != nil { + return nil, fmt.Errorf("globbing fields in %q: %w", dsRootDir, err) + } + + names := make(map[string]bool) + for _, p := range paths { + data, err := buildRoot.ReadFile(p) + if err != nil { + return nil, fmt.Errorf("reading fields file %q: %w", p, err) + } + nodes, err := loadFieldNodesFromBytes(data) + if err != nil { + return nil, fmt.Errorf("parsing fields file %q: %w", p, err) + } + for _, node := range nodes { + if name := fieldNodeName(node); name != "" { + names[name] = true + } + } + } + return names, nil +} + +// loadFieldNodesFromFile reads a fields YAML file from an fs.FS and returns +// its top-level sequence items as individual yaml.Node pointers. +func loadFieldNodesFromFile(fsys fs.FS, filePath string) ([]*yaml.Node, error) { + data, err := fs.ReadFile(fsys, filePath) + if err != nil { + return nil, fmt.Errorf("reading file %q: %w", filePath, err) + } + return loadFieldNodesFromBytes(data) +} + +// loadFieldNodesFromBytes parses a fields YAML document (expected to be a +// sequence at the document root) and returns the individual item nodes. +func loadFieldNodesFromBytes(data []byte) ([]*yaml.Node, error) { + var doc yaml.Node + if err := yaml.Unmarshal(data, &doc); err != nil { + return nil, fmt.Errorf("unmarshalling fields YAML: %w", err) + } + if doc.Kind == 0 { + // Empty document. + return nil, nil + } + root := &doc + if root.Kind == yaml.DocumentNode { + if len(root.Content) == 0 { + return nil, nil + } + root = root.Content[0] + } + if root.Kind != yaml.SequenceNode { + return nil, fmt.Errorf("expected sequence at fields document root, got kind %v", root.Kind) + } + return root.Content, nil +} + +// fieldNodeName returns the value of the "name" key in a field mapping node, +// or an empty string if the key is absent or the node is nil. +func fieldNodeName(n *yaml.Node) string { + if n == nil { + return "" + } + v := mappingValue(n, "name") + if v == nil { + return "" + } + return v.Value +} diff --git a/internal/requiredinputs/fields_test.go b/internal/requiredinputs/fields_test.go new file mode 100644 index 0000000000..f9e3bbbd63 --- /dev/null +++ b/internal/requiredinputs/fields_test.go @@ -0,0 +1,295 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" +) + +// ---- unit tests -------------------------------------------------------------- + +// TestLoadFieldNodesFromBytes verifies that field YAML sequences are parsed +// correctly into individual yaml.Node pointers. +func TestLoadFieldNodesFromBytes(t *testing.T) { + t.Run("valid sequence", func(t *testing.T) { + data := []byte(` +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: message + type: text + description: Log message. +`) + nodes, err := loadFieldNodesFromBytes(data) + require.NoError(t, err) + require.Len(t, nodes, 2) + assert.Equal(t, "data_stream.type", fieldNodeName(nodes[0])) + assert.Equal(t, "message", fieldNodeName(nodes[1])) + }) + + t.Run("empty document", func(t *testing.T) { + nodes, err := loadFieldNodesFromBytes([]byte("")) + require.NoError(t, err) + assert.Empty(t, nodes) + }) + + t.Run("invalid YAML", func(t *testing.T) { + _, err := loadFieldNodesFromBytes([]byte(":\t:invalid")) + assert.Error(t, err) + }) + + t.Run("non-sequence root", func(t *testing.T) { + data := []byte(`name: foo\ntype: keyword`) + _, err := loadFieldNodesFromBytes(data) + assert.Error(t, err) + }) +} + +// TestFieldNodeName verifies extraction of the "name" field from a YAML +// mapping node representing a field definition. +func TestFieldNodeName(t *testing.T) { + t.Run("node with name", func(t *testing.T) { + n := &yaml.Node{Kind: yaml.MappingNode} + upsertKey(n, "name", &yaml.Node{Kind: yaml.ScalarNode, Value: "message"}) + assert.Equal(t, "message", fieldNodeName(n)) + }) + + t.Run("node without name", func(t *testing.T) { + n := &yaml.Node{Kind: yaml.MappingNode} + assert.Equal(t, "", fieldNodeName(n)) + }) + + t.Run("nil node", func(t *testing.T) { + assert.Equal(t, "", fieldNodeName(nil)) + }) +} + +// TestCollectExistingFieldNames verifies that field names are collected from +// all YAML files in a data stream's fields/ directory. +func TestCollectExistingFieldNames(t *testing.T) { + t.Run("collects names from multiple files", func(t *testing.T) { + tmpDir := t.TempDir() + buildRoot, err := os.OpenRoot(tmpDir) + require.NoError(t, err) + defer buildRoot.Close() + + require.NoError(t, buildRoot.MkdirAll("data_stream/logs/fields", 0755)) + require.NoError(t, buildRoot.WriteFile("data_stream/logs/fields/base-fields.yml", []byte(` +- name: "@timestamp" + type: date +- name: data_stream.type + type: constant_keyword +`), 0644)) + require.NoError(t, buildRoot.WriteFile("data_stream/logs/fields/extra-fields.yml", []byte(` +- name: message + type: text +`), 0644)) + + names, err := collectExistingFieldNames("data_stream/logs", buildRoot) + require.NoError(t, err) + assert.True(t, names["@timestamp"]) + assert.True(t, names["data_stream.type"]) + assert.True(t, names["message"]) + assert.Len(t, names, 3) + }) + + t.Run("returns empty set when fields directory does not exist", func(t *testing.T) { + tmpDir := t.TempDir() + buildRoot, err := os.OpenRoot(tmpDir) + require.NoError(t, err) + defer buildRoot.Close() + + require.NoError(t, buildRoot.MkdirAll("data_stream/logs", 0755)) + + names, err := collectExistingFieldNames("data_stream/logs", buildRoot) + require.NoError(t, err) + assert.Empty(t, names) + }) +} + +// ---- integration tests ------------------------------------------------------- + +// makeFakeEprForFieldBundling supplies the fields_input_pkg fixture path as if +// it were downloaded from the registry, so integration tests do not need a +// running stack. +func makeFakeEprForFieldBundling(t *testing.T) *fakeEprClient { + t.Helper() + inputPkgPath := filepath.Join("..", "..", "test", "manual_packages", "required_inputs", "fields_input_pkg") + return &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgPath, nil + }, + } +} + +// TestBundleDataStreamFields_PartialOverlap verifies the primary field bundling +// scenario: fields already present in the integration data stream are skipped +// (integration wins), and only fields unique to the input package are written +// to /fields/-fields.yml. +func TestBundleDataStreamFields_PartialOverlap(t *testing.T) { + // with_field_bundling has data_stream/field_logs/fields/base-fields.yml with + // 4 common fields. fields_input_pkg has those same 4 plus "message" and + // "log.level". After bundling, only "message" and "log.level" should appear + // in the generated file. + buildPackageRoot := copyFixturePackage(t, "with_field_bundling") + resolver := NewRequiredInputsResolver(makeFakeEprForFieldBundling(t)) + + err := resolver.Bundle(buildPackageRoot) + + bundledPath := filepath.Join(buildPackageRoot, "data_stream", "field_logs", "fields", "fields_input_pkg-fields.yml") + data, err := os.ReadFile(bundledPath) + require.NoError(t, err, "bundled fields file should exist") + + nodes, err := loadFieldNodesFromBytes(data) + require.NoError(t, err) + require.Len(t, nodes, 2) + + names := make([]string, 0, len(nodes)) + for _, n := range nodes { + names = append(names, fieldNodeName(n)) + } + assert.ElementsMatch(t, []string{"message", "log.level"}, names) + + // Original base-fields.yml must be untouched. + originalData, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "field_logs", "fields", "base-fields.yml")) + require.NoError(t, err) + originalNodes, err := loadFieldNodesFromBytes(originalData) + require.NoError(t, err) + assert.Len(t, originalNodes, 4) +} + +// TestBundleDataStreamFields_AllFieldsOverlap verifies that when all fields in +// the input package are already present in the integration data stream, no +// bundled file is created (nothing to add). +func TestBundleDataStreamFields_AllFieldsOverlap(t *testing.T) { + // with_input_package_requires has data_stream/test_logs/fields/base-fields.yml + // with the same 4 fields as test_input_pkg. No new fields → no output file. + inputPkgPath := filepath.Join("..", "..", "test", "manual_packages", "required_inputs", "test_input_pkg") + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgPath, nil + }, + } + + buildPackageRoot := copyFixturePackage(t, "with_input_package_requires") + resolver := NewRequiredInputsResolver(epr) + + err := resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + bundledPath := filepath.Join(buildPackageRoot, "data_stream", "test_logs", "fields", "test_input_pkg-fields.yml") + _, statErr := os.Stat(bundledPath) + assert.True(t, os.IsNotExist(statErr), "bundled fields file should not be created when all fields already exist") +} + +// TestBundleDataStreamFields_NoFieldsInInputPkg verifies that when the input +// package has no fields/ directory, no error occurs and no file is written. +func TestBundleDataStreamFields_NoFieldsInInputPkg(t *testing.T) { + // Create a minimal input package without a fields/ directory. + inputPkgDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), []byte(` +name: no_fields_pkg +version: 0.1.0 +type: input +policy_templates: + - name: t + input: logfile + template_path: input.yml.hbs +`), 0644)) + require.NoError(t, os.MkdirAll(filepath.Join(inputPkgDir, "agent", "input"), 0755)) + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "agent", "input", "input.yml.hbs"), []byte(""), 0644)) + + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgDir, nil + }, + } + + buildPackageRoot := copyFixturePackage(t, "with_field_bundling") + // Patch manifest to reference no_fields_pkg instead. + manifestPath := filepath.Join(buildPackageRoot, "manifest.yml") + manifestData, err := os.ReadFile(manifestPath) + require.NoError(t, err) + patched := []byte(`format_version: 3.6.0 +name: with_field_bundling +title: Integration With Field Bundling +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: no_fields_pkg + version: "0.1.0" +policy_templates: + - name: field_logs + title: Field Logs + description: Collect logs + data_streams: + - field_logs + inputs: + - package: no_fields_pkg + title: Collect logs + description: Use the no fields input package +owner: + github: elastic/integrations + type: elastic +`) + _ = manifestData // not used further + require.NoError(t, os.WriteFile(manifestPath, patched, 0644)) + + // Also patch the data stream manifest to reference no_fields_pkg. + dsManifestPath := filepath.Join(buildPackageRoot, "data_stream", "field_logs", "manifest.yml") + require.NoError(t, os.WriteFile(dsManifestPath, []byte(`title: Field Logs +type: logs +streams: + - package: no_fields_pkg + title: Field Logs + description: Collect field logs. +`), 0644)) + + resolver := NewRequiredInputsResolver(epr) + err = resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + // No bundled fields file should be created. + bundledPath := filepath.Join(buildPackageRoot, "data_stream", "field_logs", "fields", "no_fields_pkg-fields.yml") + _, statErr := os.Stat(bundledPath) + assert.True(t, os.IsNotExist(statErr), "no fields file should be created when input package has no fields") +} + +// TestBundleDataStreamFields_StreamWithoutPackage verifies that data stream +// streams with no package reference are skipped without error. +func TestBundleDataStreamFields_StreamWithoutPackage(t *testing.T) { + // with_input_package_requires has a second stream with input: logs (no package). + // The test confirms this is processed without error and no unexpected files appear. + inputPkgPath := filepath.Join("..", "..", "test", "manual_packages", "required_inputs", "test_input_pkg") + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgPath, nil + }, + } + + buildPackageRoot := copyFixturePackage(t, "with_input_package_requires") + resolver := NewRequiredInputsResolver(epr) + + err := resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + // The non-package stream (logs input) should not produce a bundled fields file. + _, statErr := os.Stat(filepath.Join(buildPackageRoot, "data_stream", "test_logs", "fields", "-fields.yml")) + assert.True(t, os.IsNotExist(statErr)) +} diff --git a/internal/requiredinputs/policytemplates.go b/internal/requiredinputs/policytemplates.go new file mode 100644 index 0000000000..b0a4fb5d17 --- /dev/null +++ b/internal/requiredinputs/policytemplates.go @@ -0,0 +1,194 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "fmt" + "io/fs" + "os" + "path" + + "gopkg.in/yaml.v3" + + "github.com/elastic/elastic-package/internal/logger" + "github.com/elastic/elastic-package/internal/packages" +) + +func (r *RequiredInputsResolver) bundlePolicyTemplatesInputPackageTemplates(manifestBytes []byte, manifest *packages.PackageManifest, inputPkgPaths map[string]string, buildRoot *os.Root) error { + + // parse the manifest YAML document preserving formatting for targeted modifications + // using manifestBytes allows us to preserve comments and formatting in the manifest when we update it with template paths from input packages + var doc yaml.Node + if err := yaml.Unmarshal(manifestBytes, &doc); err != nil { + return fmt.Errorf("failed to parse manifest YAML: %w", err) + } + + // for each policy template, with an input package reference: + // collect the templates from the input package and copy them to the agent/input directory of the build package + // then update the policy template manifest to include the copied templates as template_paths + for ptIdx, pt := range manifest.PolicyTemplates { + for inputIdx, input := range pt.Inputs { + if input.Package == "" { + continue + } + sourcePath, ok := inputPkgPaths[input.Package] + if !ok || sourcePath == "" { + return fmt.Errorf("failed to find input package %q referenced by policy template %q", input.Package, pt.Name) + } + inputPaths, err := r.collectAndCopyInputPkgPolicyTemplates(sourcePath, input.Package, buildRoot) + if err != nil { + return fmt.Errorf("failed to collect and copy input package policy templates: %w", err) + } + if len(inputPaths) == 0 { + continue + } + + // current manifest template paths + paths := make([]string, 0) + // if composable package has included custom template path or paths, include them + // if no template paths are included at the manifest, only the imported templates are included + if input.TemplatePath != "" { + paths = append(paths, input.TemplatePath) + } else if len(input.TemplatePaths) > 0 { + paths = append(paths, input.TemplatePaths...) + } + paths = append(inputPaths, paths...) + + if err := setInputPolicyTemplateTemplatePaths(&doc, ptIdx, inputIdx, paths); err != nil { + return fmt.Errorf("failed to update policy template manifest with input package templates: %w", err) + } + } + } + + // Serialise the updated YAML document back to disk. + updated, err := formatYAMLNode(&doc) + if err != nil { + return fmt.Errorf("failed to format updated manifest: %w", err) + } + if err := buildRoot.WriteFile("manifest.yml", updated, 0664); err != nil { + return fmt.Errorf("failed to write updated manifest: %w", err) + } + + return nil +} + +// collectAndCopyInputPkgPolicyTemplates collects the templates from the input package and copies them to the agent/input directory of the build package +// it returns the list of copied template names +func (r *RequiredInputsResolver) collectAndCopyInputPkgPolicyTemplates(inputPkgPath, inputPkgName string, buildRoot *os.Root) ([]string, error) { + inputPkgFS, closeFn, err := openPackageFS(inputPkgPath) + if err != nil { + return nil, fmt.Errorf("failed to open input package %q: %w", inputPkgPath, err) + } + defer func() { _ = closeFn() }() + + manifestBytes, err := fs.ReadFile(inputPkgFS, packages.PackageManifestFile) + if err != nil { + return nil, fmt.Errorf("failed to read input package manifest: %w", err) + } + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + if err != nil { + return nil, fmt.Errorf("failed to parse input package manifest: %w", err) + } + + seen := make(map[string]bool) + copiedNames := make([]string, 0) + for _, pt := range manifest.PolicyTemplates { + var names []string + switch { + case len(pt.TemplatePaths) > 0: + names = pt.TemplatePaths + case pt.TemplatePath != "": + names = []string{pt.TemplatePath} + } + for _, name := range names { + if seen[name] { + continue + } + seen[name] = true + // copy the template from "agent/input" directory of the input package to the "agent/input" directory of the build package + content, err := fs.ReadFile(inputPkgFS, path.Join("agent", "input", name)) + if err != nil { + return nil, fmt.Errorf("failed to read template %q from agent/input (declared in manifest): %w", name, err) + } + destName := inputPkgName + "-" + name + // create the agent/input directory if it doesn't exist + agentInputDir := path.Join("agent", "input") + if err := buildRoot.MkdirAll(agentInputDir, 0755); err != nil { + return nil, fmt.Errorf("failed to create agent/input directory: %w", err) + } + destPath := path.Join(agentInputDir, destName) + if err := buildRoot.WriteFile(destPath, content, 0644); err != nil { + return nil, fmt.Errorf("failed to write template %q: %w", destName, err) + } + logger.Debugf("Copied input package template: %s -> %s", name, destName) + copiedNames = append(copiedNames, destName) + } + } + return copiedNames, nil +} + +// setInputPolicyTemplateTemplatePaths updates the manifest YAML document to set the template_paths for the specified policy template input to the provided paths +func setInputPolicyTemplateTemplatePaths(doc *yaml.Node, policyTemplatesIdx int, inputIdx int, paths []string) error { + // Navigate: document -> root mapping -> "policy_templates" -> sequence -> item [policyTemplatesIdx] -> mapping -> "inputs" -> sequence -> item [inputIdx] -> input mapping. + root := doc + if root.Kind == yaml.DocumentNode { + if len(root.Content) == 0 { + return fmt.Errorf("failed to set policy template input paths: empty YAML document") + } + root = root.Content[0] + } + if root.Kind != yaml.MappingNode { + return fmt.Errorf("failed to set policy template input paths: expected mapping node at document root") + } + + // policy_templates: + // - inputs: + // - template_path: foo + policyTemplatesNode := mappingValue(root, "policy_templates") + if policyTemplatesNode == nil { + return fmt.Errorf("failed to set policy template input paths: 'policy_templates' key not found in manifest") + } + if policyTemplatesNode.Kind != yaml.SequenceNode { + return fmt.Errorf("failed to set policy template input paths: 'policy_templates' is not a sequence") + } + if policyTemplatesIdx < 0 || policyTemplatesIdx >= len(policyTemplatesNode.Content) { + return fmt.Errorf("failed to set policy template input paths: policy template index %d out of range (len=%d)", policyTemplatesIdx, len(policyTemplatesNode.Content)) + } + + policyTemplateNode := policyTemplatesNode.Content[policyTemplatesIdx] + if policyTemplateNode.Kind != yaml.MappingNode { + return fmt.Errorf("failed to set policy template input paths: policy template entry %d is not a mapping", policyTemplatesIdx) + } + + inputsNode := mappingValue(policyTemplateNode, "inputs") + if inputsNode == nil { + return fmt.Errorf("failed to set policy template input paths: 'inputs' key not found in policy template %d", policyTemplatesIdx) + } + if inputsNode.Kind != yaml.SequenceNode { + return fmt.Errorf("failed to set policy template input paths: 'inputs' is not a sequence") + } + if inputIdx < 0 || inputIdx >= len(inputsNode.Content) { + return fmt.Errorf("failed to set policy template input paths: input index %d out of range (len=%d)", inputIdx, len(inputsNode.Content)) + } + + inputNode := inputsNode.Content[inputIdx] + if inputNode.Kind != yaml.MappingNode { + return fmt.Errorf("failed to set policy template input paths: input entry %d is not a mapping", inputIdx) + } + + // Remove singular template_path if present. + removeKey(inputNode, "template_path") + + // Build the template_paths sequence node. + seqNode := &yaml.Node{Kind: yaml.SequenceNode} + for _, p := range paths { + seqNode.Content = append(seqNode.Content, &yaml.Node{Kind: yaml.ScalarNode, Value: p}) + } + + // Upsert template_paths on the input node. + upsertKey(inputNode, "template_paths", seqNode) + + return nil +} diff --git a/internal/requiredinputs/policytemplates_test.go b/internal/requiredinputs/policytemplates_test.go new file mode 100644 index 0000000000..e1f8de8d69 --- /dev/null +++ b/internal/requiredinputs/policytemplates_test.go @@ -0,0 +1,140 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-package/internal/packages" +) + +func TestBundlePolicyTemplatesInputPackageTemplates_InvalidYAML(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + manifestBytes := []byte("foo: [") + manifest, _ := packages.ReadPackageManifestBytes(manifestBytes) // may be nil/partial + + err = r.bundlePolicyTemplatesInputPackageTemplates(manifestBytes, manifest, nil, buildRoot) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to parse manifest YAML") +} + +// TestBundlePolicyTemplatesInputPackageTemplates_MultiplePolicyTemplates verifies that templates +// from ALL policy templates in an input package are bundled into agent/input/, not just the first +// one (Issue 5 in the alignment review). +func TestBundlePolicyTemplatesInputPackageTemplates_MultiplePolicyTemplates(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + manifestBytes := []byte(` +type: integration +requires: + input: + - package: sql + version: 0.1.0 +policy_templates: + - inputs: + - package: sql +`) + err = buildRoot.WriteFile("manifest.yml", manifestBytes, 0644) + require.NoError(t, err) + + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + fakeInputDir := createFakeInputWithMultiplePolicyTemplates(t) + inputPkgPaths := map[string]string{"sql": fakeInputDir} + + err = r.bundlePolicyTemplatesInputPackageTemplates(manifestBytes, manifest, inputPkgPaths, buildRoot) + require.NoError(t, err) + + // All templates from both policy templates in the input package must be present. + _, err = buildRoot.ReadFile(filepath.Join("agent", "input", "sql-input.yml.hbs")) + require.NoError(t, err, "template from first policy_template must be bundled") + _, err = buildRoot.ReadFile(filepath.Join("agent", "input", "sql-metrics.yml.hbs")) + require.NoError(t, err, "template from second policy_template must be bundled") + _, err = buildRoot.ReadFile(filepath.Join("agent", "input", "sql-extra.yml.hbs")) + require.NoError(t, err, "extra template from second policy_template must be bundled") + + updated, err := buildRoot.ReadFile("manifest.yml") + require.NoError(t, err) + updatedManifest, err := packages.ReadPackageManifestBytes(updated) + require.NoError(t, err) + require.Len(t, updatedManifest.PolicyTemplates, 1) + require.Len(t, updatedManifest.PolicyTemplates[0].Inputs, 1) + input := updatedManifest.PolicyTemplates[0].Inputs[0] + assert.Empty(t, input.TemplatePath) + assert.Equal(t, []string{"sql-input.yml.hbs", "sql-metrics.yml.hbs", "sql-extra.yml.hbs"}, input.TemplatePaths) +} + +func TestBundlePolicyTemplatesInputPackageTemplates_SuccessTemplatesCopied(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + // create current package manifest with one policy template input referencing an input package template + // it has an existing template, so both the existing and input package template should be copied and the manifest updated to reference both + manifestBytes := []byte(` +type: integration +requires: + input: + - package: sql + version: 0.1.0 +policy_templates: + - inputs: + - package: sql + template_path: existing.yml.hbs +`) + err = buildRoot.WriteFile("manifest.yml", manifestBytes, 0644) + require.NoError(t, err) + err = buildRoot.MkdirAll(filepath.Join("agent", "input"), 0755) + require.NoError(t, err) + err = buildRoot.WriteFile(filepath.Join("agent", "input", "existing.yml.hbs"), []byte("existing content"), 0644) + require.NoError(t, err) + + // parse manifest to pass to function + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + fakeInputDir := createFakeInputHelper(t) + inputPkgPaths := map[string]string{"sql": fakeInputDir} + + err = r.bundlePolicyTemplatesInputPackageTemplates(manifestBytes, manifest, inputPkgPaths, buildRoot) + require.NoError(t, err) + + // Files exist. + _, err = buildRoot.ReadFile(filepath.Join("agent", "input", "sql-input.yml.hbs")) + require.NoError(t, err) + _, err = buildRoot.ReadFile(filepath.Join("agent", "input", "existing.yml.hbs")) + require.NoError(t, err) + + // Written manifest has template_paths set and template_path removed for that input. + updated, err := buildRoot.ReadFile("manifest.yml") + require.NoError(t, err) + updatedManifest, err := packages.ReadPackageManifestBytes(updated) + require.NoError(t, err) + require.Len(t, updatedManifest.PolicyTemplates, 1) + require.Len(t, updatedManifest.PolicyTemplates[0].Inputs, 1) + input := updatedManifest.PolicyTemplates[0].Inputs[0] + assert.Empty(t, input.TemplatePath) + assert.Equal(t, []string{"sql-input.yml.hbs", "existing.yml.hbs"}, input.TemplatePaths) +} diff --git a/internal/requiredinputs/requiredinputs.go b/internal/requiredinputs/requiredinputs.go new file mode 100644 index 0000000000..9595ccecd2 --- /dev/null +++ b/internal/requiredinputs/requiredinputs.go @@ -0,0 +1,176 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "archive/zip" + "errors" + "fmt" + "io/fs" + "os" + "path" + + "github.com/elastic/elastic-package/internal/logger" + "github.com/elastic/elastic-package/internal/packages" +) + +type eprClient interface { + DownloadPackage(packageName string, packageVersion string, tmpDir string) (string, error) +} + +// Resolver enriches a built integration package using required input packages from the registry: +// policy and data stream templates, merged manifest variables, data stream field definitions, +// and resolution of package: references on inputs and streams to the effective input type +// from the required input package, where applicable. +type Resolver interface { + Bundle(buildPackageRoot string) error +} + +// NoopRequiredInputsResolver is a no-op implementation of Resolver. +// TODO: Replace with a resolver that supports test overrides (e.g. local package paths) +// when implementing local input package resolution for development and testing workflows. +type NoopRequiredInputsResolver struct{} + +func (r *NoopRequiredInputsResolver) Bundle(_ string) error { + return nil +} + +// RequiredInputsResolver implements Resolver by downloading required input packages via an EPR client +// and applying Bundle to the built package tree. +type RequiredInputsResolver struct { + eprClient eprClient +} + +// NewRequiredInputsResolver returns a Resolver that downloads required input packages from the registry. +func NewRequiredInputsResolver(eprClient eprClient) *RequiredInputsResolver { + return &RequiredInputsResolver{ + eprClient: eprClient, + } +} + +// Bundle updates buildPackageRoot (a built package directory) for integrations that declare +// requires.input: it downloads those input packages, copies policy and data stream templates, +// merges variables into the integration manifest, bundles data stream field definitions, and +// replaces package: references on policy template inputs and data stream streams with the +// concrete input type from the referenced input package (last, after variable merge). +// Non-integration packages or packages without requires.input are left unchanged. +func (r *RequiredInputsResolver) Bundle(buildPackageRoot string) error { + buildRoot, err := os.OpenRoot(buildPackageRoot) + if err != nil { + return fmt.Errorf("failed to open build package root: %w", err) + } + defer buildRoot.Close() + + manifestBytes, err := buildRoot.ReadFile("manifest.yml") + if err != nil { + return fmt.Errorf("failed to read package manifest: %w", err) + } + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + if err != nil { + return fmt.Errorf("failed to parse package manifest: %w", err) + } + + // validate that the package is an integration and has required input packages + if manifest.Type != "integration" { + return nil + } + if manifest.Requires == nil || len(manifest.Requires.Input) == 0 { + logger.Debug("Package has no required input packages, skipping required input processing") + return nil + } + + tmpDir, err := os.MkdirTemp("", "elastic-package-input-pkgs-*") + if err != nil { + return fmt.Errorf("failed to create temp directory for input packages: %w", err) + } + defer func() { _ = os.RemoveAll(tmpDir) }() + + inputPkgPaths, err := r.mapRequiredInputPackagesPaths(manifest.Requires.Input, tmpDir) + if err != nil { + return err + } + + if err := r.bundlePolicyTemplatesInputPackageTemplates(manifestBytes, manifest, inputPkgPaths, buildRoot); err != nil { + return fmt.Errorf("failed to bundle policy template input package templates: %w", err) + } + + if err := r.bundleDataStreamTemplates(inputPkgPaths, buildRoot); err != nil { + return fmt.Errorf("failed to bundle data stream input package templates: %w", err) + } + + if err := r.mergeVariables(manifest, inputPkgPaths, buildRoot); err != nil { + return fmt.Errorf("merging variables from input packages: %w", err) + } + + if err := r.bundleDataStreamFields(inputPkgPaths, buildRoot); err != nil { + return fmt.Errorf("bundling data stream fields from input packages: %w", err) + } + + if err := r.resolveStreamInputTypes(manifest, inputPkgPaths, buildRoot); err != nil { + return fmt.Errorf("resolving stream input types from input packages: %w", err) + } + + return nil +} + +// downloadInputsToTmp downloads required input packages to the temporary directory. +// It returns a map of package name to zip path. +func (r *RequiredInputsResolver) mapRequiredInputPackagesPaths(manifestInputRequires []packages.PackageDependency, tmpDir string) (map[string]string, error) { + inputPkgPaths := make(map[string]string, len(manifestInputRequires)) + errs := make([]error, 0, len(manifestInputRequires)) + for _, inputDependency := range manifestInputRequires { + if _, ok := inputPkgPaths[inputDependency.Package]; ok { + // skip if already downloaded + continue + } + path, err := r.eprClient.DownloadPackage(inputDependency.Package, inputDependency.Version, tmpDir) + if err != nil { + // all required input packages must be downloaded successfully + errs = append(errs, fmt.Errorf("failed to download input package %q: %w", inputDependency.Package, err)) + continue + } + + // key is package name, for now we only support one version per package + inputPkgPaths[inputDependency.Package] = path + logger.Debugf("Resolved input package %q at %s", inputDependency.Package, path) + } + + return inputPkgPaths, errors.Join(errs...) +} + +// openPackageFS returns an fs.FS rooted at the package root (manifest.yml at +// the top level) and a close function that must be called when done. For +// directory packages it closes the os.Root; for zip packages it closes the +// underlying zip.ReadCloser. +func openPackageFS(pkgPath string) (fs.FS, func() error, error) { + info, err := os.Stat(pkgPath) + if err != nil { + return nil, nil, err + } + if info.IsDir() { + // open the package directory as a root + root, err := os.OpenRoot(pkgPath) + if err != nil { + return nil, nil, err + } + return root.FS(), root.Close, nil + } + // open the package zip as a zip reader + zipRC, err := zip.OpenReader(pkgPath) + if err != nil { + return nil, nil, err + } + matched, err := fs.Glob(zipRC, "*/"+packages.PackageManifestFile) + if err != nil || len(matched) == 0 { + zipRC.Close() + return nil, nil, fmt.Errorf("failed to find package manifest in zip %s", pkgPath) + } + subFS, err := fs.Sub(zipRC, path.Dir(matched[0])) + if err != nil { + zipRC.Close() + return nil, nil, err + } + return subFS, zipRC.Close, nil +} diff --git a/internal/requiredinputs/requiredinputs_test.go b/internal/requiredinputs/requiredinputs_test.go new file mode 100644 index 0000000000..2d89a72129 --- /dev/null +++ b/internal/requiredinputs/requiredinputs_test.go @@ -0,0 +1,196 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "fmt" + "os" + "path" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-package/internal/packages" +) + +type fakeEprClient struct { + downloadPackageFunc func(packageName string, packageVersion string, tmpDir string) (string, error) +} + +func (f *fakeEprClient) DownloadPackage(packageName string, packageVersion string, tmpDir string) (string, error) { + if f.downloadPackageFunc != nil { + return f.downloadPackageFunc(packageName, packageVersion, tmpDir) + } + return "", fmt.Errorf("download package not implemented") +} + +func TestBundle_Success(t *testing.T) { + fakeInputPath := createFakeInputHelper(t) + fakeEprClient := &fakeEprClient{ + downloadPackageFunc: func(packageName string, packageVersion string, tmpDir string) (string, error) { + return fakeInputPath, nil + }, + } + buildPackageRoot := t.TempDir() + + manifest := []byte(`name: test-package +version: 0.1.0 +type: integration +requires: + input: + - package: sql + version: 0.1.0 +policy_templates: + - inputs: + - package: sql + - type: logs +`) + err := os.WriteFile(path.Join(buildPackageRoot, "manifest.yml"), manifest, 0644) + require.NoError(t, err) + + resolver := NewRequiredInputsResolver(fakeEprClient) + + err = resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + _, err = os.ReadFile(path.Join(buildPackageRoot, "agent", "input", "sql-input.yml.hbs")) + require.NoError(t, err) + + updatedManifestBytes, err := os.ReadFile(path.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + updatedManifest, err := packages.ReadPackageManifestBytes(updatedManifestBytes) + require.NoError(t, err) + require.Len(t, updatedManifest.Requires.Input, 1) + require.Equal(t, "sql", updatedManifest.Requires.Input[0].Package) + require.Equal(t, "0.1.0", updatedManifest.Requires.Input[0].Version) + + require.Equal(t, "sql", updatedManifest.PolicyTemplates[0].Inputs[0].Type) + require.Empty(t, updatedManifest.PolicyTemplates[0].Inputs[0].Package) + require.Len(t, updatedManifest.PolicyTemplates[0].Inputs[0].TemplatePaths, 1) + require.Equal(t, "sql-input.yml.hbs", updatedManifest.PolicyTemplates[0].Inputs[0].TemplatePaths[0]) + +} + +func TestBundle_NoManifest(t *testing.T) { + fakeInputPath := createFakeInputHelper(t) + fakeEprClient := &fakeEprClient{ + downloadPackageFunc: func(packageName string, packageVersion string, tmpDir string) (string, error) { + return fakeInputPath, nil + }, + } + buildPackageRoot := t.TempDir() + + resolver := NewRequiredInputsResolver(fakeEprClient) + + err := resolver.Bundle(buildPackageRoot) + require.Error(t, err) + assert.ErrorContains(t, err, "failed to read package manifest") +} + +func TestBundle_SkipNoIntegration(t *testing.T) { + fakeInputPath := createFakeInputHelper(t) + fakeEprClient := &fakeEprClient{ + downloadPackageFunc: func(packageName string, packageVersion string, tmpDir string) (string, error) { + return fakeInputPath, nil + }, + } + buildPackageRoot := t.TempDir() + + manifest := []byte(`name: test-package +version: 0.1.0 +type: input +`) + err := os.WriteFile(path.Join(buildPackageRoot, "manifest.yml"), manifest, 0644) + require.NoError(t, err) + + resolver := NewRequiredInputsResolver(fakeEprClient) + + err = resolver.Bundle(buildPackageRoot) + require.NoError(t, err) +} + +func TestBundle_NoRequires(t *testing.T) { + fakeEprClient := &fakeEprClient{ + downloadPackageFunc: func(packageName string, packageVersion string, tmpDir string) (string, error) { + return "", fmt.Errorf("no download without requires") + }, + } + buildPackageRoot := t.TempDir() + + manifest := []byte(`name: test-package +version: 0.1.0 +type: integration +policy_templates: + - inputs: + - type: logs +`) + err := os.WriteFile(path.Join(buildPackageRoot, "manifest.yml"), manifest, 0644) + require.NoError(t, err) + + resolver := NewRequiredInputsResolver(fakeEprClient) + + err = resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + updatedManifestBytes, err := os.ReadFile(path.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + updatedManifest, err := packages.ReadPackageManifestBytes(updatedManifestBytes) + require.NoError(t, err) + require.Nil(t, updatedManifest.Requires) +} + +// TestBundleInputPackageTemplates_PreservesLinkedTemplateTargetPath checks that after +// IncludeLinkedFiles has materialized a policy-template input template (regular file +// at the path named in manifest, not a *.link stub), bundling still prepends input-package +// templates and keeps the integration-owned template_path entry last in template_paths. +func TestBundleInputPackageTemplates_PreservesLinkedTemplateTargetPath(t *testing.T) { + fakeInputPath := createFakeInputHelper(t) + fakeEprClient := &fakeEprClient{ + downloadPackageFunc: func(packageName string, packageVersion string, tmpDir string) (string, error) { + return fakeInputPath, nil + }, + } + buildPackageRoot := t.TempDir() + + const ownedName = "integration_owned.hbs" + ownedContent := []byte("# from linked target\n") + err := os.MkdirAll(path.Join(buildPackageRoot, "agent", "input"), 0755) + require.NoError(t, err) + err = os.WriteFile(path.Join(buildPackageRoot, "agent", "input", ownedName), ownedContent, 0644) + require.NoError(t, err) + + manifest := []byte(`name: test-package +version: 0.1.0 +type: integration +requires: + input: + - package: sql + version: 0.1.0 +policy_templates: + - inputs: + - package: sql + template_path: ` + ownedName + ` + - type: logs +`) + err = os.WriteFile(path.Join(buildPackageRoot, "manifest.yml"), manifest, 0644) + require.NoError(t, err) + + resolver := NewRequiredInputsResolver(fakeEprClient) + err = resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + got, err := os.ReadFile(path.Join(buildPackageRoot, "agent", "input", ownedName)) + require.NoError(t, err) + require.Equal(t, ownedContent, got) + + updatedManifestBytes, err := os.ReadFile(path.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + updatedManifest, err := packages.ReadPackageManifestBytes(updatedManifestBytes) + require.NoError(t, err) + + paths := updatedManifest.PolicyTemplates[0].Inputs[0].TemplatePaths + require.Equal(t, []string{"sql-input.yml.hbs", ownedName}, paths) +} diff --git a/internal/requiredinputs/streamdefs.go b/internal/requiredinputs/streamdefs.go new file mode 100644 index 0000000000..1183d4d09c --- /dev/null +++ b/internal/requiredinputs/streamdefs.go @@ -0,0 +1,191 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "fmt" + "io/fs" + "os" + "path" + + "gopkg.in/yaml.v3" + + "github.com/elastic/elastic-package/internal/logger" + "github.com/elastic/elastic-package/internal/packages" +) + +// inputPkgInfo holds the resolved metadata from an input package needed to +// replace package: references in composable package manifests. +type inputPkgInfo struct { + identifier string // policy_templates[0].input (e.g. "logfile") + pkgTitle string // manifest.title (fallback title) + pkgDescription string // manifest.description (fallback description) +} + +// resolveStreamInputTypes replaces all package: references in the +// composable package's manifest.yml (policy_templates[].inputs) and in each +// data_stream/*/manifest.yml (streams[]) with the actual input type identifier +// from the referenced input package, then removes the package: key. +// +// This step must run last, after mergeVariables, because that step uses +// stream.Package and input.Package to identify which entries to process. +func (r *RequiredInputsResolver) resolveStreamInputTypes( + manifest *packages.PackageManifest, + inputPkgPaths map[string]string, + buildRoot *os.Root, +) error { + // Step 1 — Build a cache of inputPkgInfo per package name. + infoByPkg := make(map[string]inputPkgInfo, len(inputPkgPaths)) + for pkgName, pkgPath := range inputPkgPaths { + info, err := loadInputPkgInfo(pkgPath) + if err != nil { + return fmt.Errorf("loading input package info for %q: %w", pkgName, err) + } + infoByPkg[pkgName] = info + } + + // Step 2 — Update policy_templates[].inputs[] in manifest.yml. + manifestBytes, err := buildRoot.ReadFile("manifest.yml") + if err != nil { + return fmt.Errorf("reading manifest: %w", err) + } + var doc yaml.Node + if err := yaml.Unmarshal(manifestBytes, &doc); err != nil { + return fmt.Errorf("parsing manifest YAML: %w", err) + } + + for ptIdx, pt := range manifest.PolicyTemplates { + for inputIdx, input := range pt.Inputs { + if input.Package == "" { + continue + } + info, ok := infoByPkg[input.Package] + if !ok { + return fmt.Errorf("input package %q referenced in policy_templates[%d].inputs[%d] not found in required inputs", input.Package, ptIdx, inputIdx) + } + + inputNode, err := getInputMappingNode(&doc, ptIdx, inputIdx) + if err != nil { + return fmt.Errorf("getting input node at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) + } + + upsertKey(inputNode, "type", &yaml.Node{Kind: yaml.ScalarNode, Value: info.identifier}) + + if mappingValue(inputNode, "title") == nil && info.pkgTitle != "" { + upsertKey(inputNode, "title", &yaml.Node{Kind: yaml.ScalarNode, Value: info.pkgTitle}) + } + if mappingValue(inputNode, "description") == nil && info.pkgDescription != "" { + upsertKey(inputNode, "description", &yaml.Node{Kind: yaml.ScalarNode, Value: info.pkgDescription}) + } + + removeKey(inputNode, "package") + } + } + + updated, err := formatYAMLNode(&doc) + if err != nil { + return fmt.Errorf("formatting updated manifest: %w", err) + } + if err := buildRoot.WriteFile("manifest.yml", updated, 0664); err != nil { + return fmt.Errorf("writing updated manifest: %w", err) + } + + // Step 3 — Update streams[] in each data_stream/*/manifest.yml. + dsManifestPaths, err := fs.Glob(buildRoot.FS(), "data_stream/*/manifest.yml") + if err != nil { + return fmt.Errorf("globbing data stream manifests: %w", err) + } + + for _, manifestPath := range dsManifestPaths { + dsManifestBytes, err := buildRoot.ReadFile(manifestPath) + if err != nil { + return fmt.Errorf("reading data stream manifest %q: %w", manifestPath, err) + } + + var dsDoc yaml.Node + if err := yaml.Unmarshal(dsManifestBytes, &dsDoc); err != nil { + return fmt.Errorf("parsing data stream manifest YAML %q: %w", manifestPath, err) + } + + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + if err != nil { + return fmt.Errorf("parsing data stream manifest %q: %w", manifestPath, err) + } + + for streamIdx, stream := range dsManifest.Streams { + if stream.Package == "" { + continue + } + info, ok := infoByPkg[stream.Package] + if !ok { + return fmt.Errorf("input package %q referenced in %q streams[%d] not found in required inputs", stream.Package, path.Dir(manifestPath), streamIdx) + } + + streamNode, err := getStreamMappingNode(&dsDoc, streamIdx) + if err != nil { + return fmt.Errorf("getting stream node at index %d in %q: %w", streamIdx, manifestPath, err) + } + + upsertKey(streamNode, "input", &yaml.Node{Kind: yaml.ScalarNode, Value: info.identifier}) + + if stream.Title == "" && info.pkgTitle != "" { + upsertKey(streamNode, "title", &yaml.Node{Kind: yaml.ScalarNode, Value: info.pkgTitle}) + } + if stream.Description == "" && info.pkgDescription != "" { + upsertKey(streamNode, "description", &yaml.Node{Kind: yaml.ScalarNode, Value: info.pkgDescription}) + } + + removeKey(streamNode, "package") + } + + dsUpdated, err := formatYAMLNode(&dsDoc) + if err != nil { + return fmt.Errorf("formatting updated data stream manifest %q: %w", manifestPath, err) + } + if err := buildRoot.WriteFile(manifestPath, dsUpdated, 0664); err != nil { + return fmt.Errorf("writing updated data stream manifest %q: %w", manifestPath, err) + } + } + + return nil +} + +// loadInputPkgInfo reads an input package's manifest and extracts the metadata +// needed to replace package: references in composable packages. +func loadInputPkgInfo(pkgPath string) (inputPkgInfo, error) { + pkgFS, closeFn, err := openPackageFS(pkgPath) + if err != nil { + return inputPkgInfo{}, fmt.Errorf("opening package: %w", err) + } + defer closeFn() + + manifestBytes, err := fs.ReadFile(pkgFS, packages.PackageManifestFile) + if err != nil { + return inputPkgInfo{}, fmt.Errorf("reading manifest: %w", err) + } + + m, err := packages.ReadPackageManifestBytes(manifestBytes) + if err != nil { + return inputPkgInfo{}, fmt.Errorf("parsing manifest: %w", err) + } + + if len(m.PolicyTemplates) == 0 { + return inputPkgInfo{}, fmt.Errorf("input package %q has no policy templates", m.Name) + } + if len(m.PolicyTemplates) > 1 { + logger.Debugf("Input package %q has multiple policy templates; using input identifier %q from first", m.Name, m.PolicyTemplates[0].Input) + } + + pt := m.PolicyTemplates[0] + if pt.Input == "" { + return inputPkgInfo{}, fmt.Errorf("input package %q policy template %q has no input identifier", m.Name, pt.Name) + } + + return inputPkgInfo{ + identifier: pt.Input, + pkgTitle: m.Title, + pkgDescription: m.Description, + }, nil +} diff --git a/internal/requiredinputs/streamdefs_test.go b/internal/requiredinputs/streamdefs_test.go new file mode 100644 index 0000000000..03e7a314f2 --- /dev/null +++ b/internal/requiredinputs/streamdefs_test.go @@ -0,0 +1,455 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-package/internal/packages" +) + +// ---- unit tests -------------------------------------------------------------- + +// TestLoadInputPkgInfo verifies that metadata is correctly extracted from an +// input package manifest directory. +func TestLoadInputPkgInfo(t *testing.T) { + dir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(dir, "manifest.yml"), []byte(` +name: my_input_pkg +title: My Input Package +description: A test input package. +version: 0.1.0 +type: input +policy_templates: + - name: logs + input: logfile + type: logs +`), 0644)) + + info, err := loadInputPkgInfo(dir) + require.NoError(t, err) + assert.Equal(t, "logfile", info.identifier) + assert.Equal(t, "My Input Package", info.pkgTitle) + assert.Equal(t, "A test input package.", info.pkgDescription) +} + +func TestLoadInputPkgInfo_NoPolicyTemplates(t *testing.T) { + dir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(dir, "manifest.yml"), []byte(` +name: empty_pkg +version: 0.1.0 +type: input +`), 0644)) + + _, err := loadInputPkgInfo(dir) + assert.ErrorContains(t, err, "no policy templates") +} + +func TestLoadInputPkgInfo_EmptyInputIdentifier(t *testing.T) { + dir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(dir, "manifest.yml"), []byte(` +name: bad_pkg +version: 0.1.0 +type: input +policy_templates: + - name: logs + type: logs +`), 0644)) + + _, err := loadInputPkgInfo(dir) + assert.ErrorContains(t, err, "no input identifier") +} + +// ---- integration tests ------------------------------------------------------- + +// TestResolveStreamInputTypes_ReplacesPackageWithType verifies that a +// policy_templates[].inputs entry with package: is replaced by type: and that +// the package: key is removed. +func TestResolveStreamInputTypes_ReplacesPackageWithType(t *testing.T) { + inputPkgDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), []byte(` +name: test_input +title: Test Input +description: A test input package. +version: 0.1.0 +type: input +policy_templates: + - name: logs + input: logfile + type: logs +`), 0644)) + + buildRoot := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(buildRoot, "manifest.yml"), []byte(` +format_version: 3.0.0 +name: my_integration +version: 0.1.0 +type: integration +requires: + input: + - package: test_input + version: 0.1.0 +policy_templates: + - name: logs + title: Logs + description: Collect logs + inputs: + - package: test_input + title: Collect logs via test input + description: Use the test input to collect logs +`), 0644)) + + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgDir, nil + }, + } + resolver := NewRequiredInputsResolver(epr) + err := resolver.Bundle(buildRoot) + + manifestBytes, err := os.ReadFile(filepath.Join(buildRoot, "manifest.yml")) + require.NoError(t, err) + m, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + require.Len(t, m.PolicyTemplates[0].Inputs, 1) + assert.Equal(t, "logfile", m.PolicyTemplates[0].Inputs[0].Type) + assert.Empty(t, m.PolicyTemplates[0].Inputs[0].Package) +} + +// TestResolveStreamInputTypes_PreservesExistingTitleAndDescription verifies +// that title and description already set in the composable package input entry +// are preserved and not overwritten by the input package's values. +func TestResolveStreamInputTypes_PreservesExistingTitleAndDescription(t *testing.T) { + inputPkgDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), []byte(` +name: test_input +title: Input Pkg Title +description: Input pkg description. +version: 0.1.0 +type: input +policy_templates: + - name: logs + input: logfile + type: logs +`), 0644)) + + buildRoot := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(buildRoot, "manifest.yml"), []byte(` +format_version: 3.0.0 +name: my_integration +version: 0.1.0 +type: integration +requires: + input: + - package: test_input + version: 0.1.0 +policy_templates: + - name: logs + title: Logs + description: Collect logs + inputs: + - package: test_input + title: My Custom Title + description: My custom description. +`), 0644)) + + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgDir, nil + }, + } + resolver := NewRequiredInputsResolver(epr) + err := resolver.Bundle(buildRoot) + + manifestBytes, err := os.ReadFile(filepath.Join(buildRoot, "manifest.yml")) + require.NoError(t, err) + + // Check raw YAML to verify title/description are preserved verbatim. + assert.Contains(t, string(manifestBytes), "My Custom Title") + assert.Contains(t, string(manifestBytes), "My custom description.") + assert.NotContains(t, string(manifestBytes), "Input Pkg Title") +} + +// TestResolveStreamInputTypes_PopulatesTitleFromInputPkg verifies that when +// the composable package input entry has no title/description, they are +// populated from the input package manifest. +func TestResolveStreamInputTypes_PopulatesTitleFromInputPkg(t *testing.T) { + inputPkgDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), []byte(` +name: test_input +title: Input Pkg Title +description: Input pkg description. +version: 0.1.0 +type: input +policy_templates: + - name: logs + input: logfile + type: logs +`), 0644)) + + buildRoot := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(buildRoot, "manifest.yml"), []byte(` +format_version: 3.0.0 +name: my_integration +version: 0.1.0 +type: integration +requires: + input: + - package: test_input + version: 0.1.0 +policy_templates: + - name: logs + title: Logs + description: Collect logs + inputs: + - package: test_input +`), 0644)) + + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgDir, nil + }, + } + resolver := NewRequiredInputsResolver(epr) + err := resolver.Bundle(buildRoot) + + manifestBytes, err := os.ReadFile(filepath.Join(buildRoot, "manifest.yml")) + require.NoError(t, err) + + assert.Contains(t, string(manifestBytes), "Input Pkg Title") + assert.Contains(t, string(manifestBytes), "Input pkg description.") +} + +// TestResolveStreamInputTypes_SkipsNonPackageInputs verifies that inputs +// declared with type: (no package:) are not modified. +func TestResolveStreamInputTypes_SkipsNonPackageInputs(t *testing.T) { + inputPkgDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), []byte(` +name: test_input +title: Test Input +version: 0.1.0 +type: input +policy_templates: + - name: logs + input: logfile + type: logs +`), 0644)) + + buildRoot := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(buildRoot, "manifest.yml"), []byte(` +format_version: 3.0.0 +name: my_integration +version: 0.1.0 +type: integration +requires: + input: + - package: test_input + version: 0.1.0 +policy_templates: + - name: logs + title: Logs + description: Collect logs + inputs: + - package: test_input + title: From pkg + description: From pkg. + - type: metrics + title: Direct metrics + description: Direct metrics input. +`), 0644)) + + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgDir, nil + }, + } + resolver := NewRequiredInputsResolver(epr) + err := resolver.Bundle(buildRoot) + + manifestBytes, err := os.ReadFile(filepath.Join(buildRoot, "manifest.yml")) + require.NoError(t, err) + m, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + require.Len(t, m.PolicyTemplates[0].Inputs, 2) + assert.Equal(t, "logfile", m.PolicyTemplates[0].Inputs[0].Type) + assert.Empty(t, m.PolicyTemplates[0].Inputs[0].Package) + assert.Equal(t, "metrics", m.PolicyTemplates[0].Inputs[1].Type) + assert.Empty(t, m.PolicyTemplates[0].Inputs[1].Package) +} + +// TestResolveStreamInputTypes_DataStreamStreamReplacement verifies that +// streams[].package in data stream manifests is replaced with streams[].input. +func TestResolveStreamInputTypes_DataStreamStreamReplacement(t *testing.T) { + inputPkgDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), []byte(` +name: test_input +title: Test Input +description: Test input pkg. +version: 0.1.0 +type: input +policy_templates: + - name: logs + input: logfile + type: logs +`), 0644)) + + buildRoot := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(buildRoot, "manifest.yml"), []byte(` +format_version: 3.0.0 +name: my_integration +version: 0.1.0 +type: integration +requires: + input: + - package: test_input + version: 0.1.0 +policy_templates: + - name: logs + title: Logs + description: Collect logs + data_streams: + - test_logs + inputs: + - package: test_input + title: Collect logs + description: Collect logs. +`), 0644)) + + dsDir := filepath.Join(buildRoot, "data_stream", "test_logs") + require.NoError(t, os.MkdirAll(dsDir, 0755)) + require.NoError(t, os.WriteFile(filepath.Join(dsDir, "manifest.yml"), []byte(` +title: Test Logs +type: logs +streams: + - package: test_input + title: Test log stream + description: Collect test logs. +`), 0644)) + + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgDir, nil + }, + } + resolver := NewRequiredInputsResolver(epr) + err := resolver.Bundle(buildRoot) + dsManifestBytes, err := os.ReadFile(filepath.Join(dsDir, "manifest.yml")) + require.NoError(t, err) + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + require.NoError(t, err) + + require.Len(t, dsManifest.Streams, 1) + assert.Equal(t, "logfile", dsManifest.Streams[0].Input) + assert.Empty(t, dsManifest.Streams[0].Package) + assert.Equal(t, "Test log stream", dsManifest.Streams[0].Title) +} + +// TestResolveStreamInputTypes_SkipsNonPackageStreams verifies that streams +// declared with input: (no package:) are not modified. +func TestResolveStreamInputTypes_SkipsNonPackageStreams(t *testing.T) { + inputPkgDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), []byte(` +name: test_input +title: Test Input +version: 0.1.0 +type: input +policy_templates: + - name: logs + input: logfile + type: logs +`), 0644)) + + buildRoot := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(buildRoot, "manifest.yml"), []byte(` +format_version: 3.0.0 +name: my_integration +version: 0.1.0 +type: integration +requires: + input: + - package: test_input + version: 0.1.0 +policy_templates: + - name: logs + title: Logs + description: Collect logs + data_streams: + - test_logs + inputs: + - package: test_input + title: Collect logs + description: Collect logs. +`), 0644)) + + dsDir := filepath.Join(buildRoot, "data_stream", "test_logs") + require.NoError(t, os.MkdirAll(dsDir, 0755)) + require.NoError(t, os.WriteFile(filepath.Join(dsDir, "manifest.yml"), []byte(` +title: Test Logs +type: logs +streams: + - package: test_input + title: From pkg + description: From pkg. + - input: metrics + title: Direct metrics + description: Direct metrics stream. +`), 0644)) + + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgDir, nil + }, + } + resolver := NewRequiredInputsResolver(epr) + err := resolver.Bundle(buildRoot) + require.NoError(t, err) + + dsManifestBytes, err := os.ReadFile(filepath.Join(dsDir, "manifest.yml")) + require.NoError(t, err) + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + require.NoError(t, err) + + require.Len(t, dsManifest.Streams, 2) + assert.Equal(t, "logfile", dsManifest.Streams[0].Input) + assert.Empty(t, dsManifest.Streams[0].Package) + assert.Equal(t, "metrics", dsManifest.Streams[1].Input) + assert.Empty(t, dsManifest.Streams[1].Package) +} + +// TestResolveStreamInputTypes_FieldBundlingFixture runs the full +// Bundle pipeline on the with_field_bundling fixture and +// verifies that package: references are replaced in both the main manifest and +// the data stream manifest. +func TestResolveStreamInputTypes_FieldBundlingFixture(t *testing.T) { + buildPackageRoot := copyFixturePackage(t, "with_field_bundling") + resolver := NewRequiredInputsResolver(makeFakeEprForFieldBundling(t)) + err := resolver.Bundle(buildPackageRoot) + + // Check main manifest: package: fields_input_pkg → type: logfile + manifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + m, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + require.Len(t, m.PolicyTemplates[0].Inputs, 1) + assert.Equal(t, "logfile", m.PolicyTemplates[0].Inputs[0].Type) + assert.Empty(t, m.PolicyTemplates[0].Inputs[0].Package) + + // Check data stream manifest: package: fields_input_pkg → input: logfile + dsManifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "field_logs", "manifest.yml")) + require.NoError(t, err) + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + require.NoError(t, err) + require.Len(t, dsManifest.Streams, 1) + assert.Equal(t, "logfile", dsManifest.Streams[0].Input) + assert.Empty(t, dsManifest.Streams[0].Package) + assert.NotEmpty(t, dsManifest.Streams[0].Title) +} diff --git a/internal/requiredinputs/streams.go b/internal/requiredinputs/streams.go new file mode 100644 index 0000000000..a460d08d82 --- /dev/null +++ b/internal/requiredinputs/streams.go @@ -0,0 +1,201 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "errors" + "fmt" + "io/fs" + "os" + "path" + + "gopkg.in/yaml.v3" + + "github.com/elastic/elastic-package/internal/logger" + "github.com/elastic/elastic-package/internal/packages" +) + +func (r *RequiredInputsResolver) bundleDataStreamTemplates(inputPkgPaths map[string]string, buildRoot *os.Root) error { + // get all data stream manifest paths in the build package + dsManifestsPaths, err := fs.Glob(buildRoot.FS(), "data_stream/*/manifest.yml") + if err != nil { + return fmt.Errorf("failed to glob data stream manifests: %w", err) + } + + errorList := make([]error, 0) + for _, manifestPath := range dsManifestsPaths { + manifestBytes, err := buildRoot.ReadFile(manifestPath) + if err != nil { + return fmt.Errorf("failed to read data stream manifest %q: %w", manifestPath, err) + } + // parse the manifest YAML document preserving formatting for targeted modifications + // using manifestBytes allows us to preserve comments and formatting in the manifest when we update it with template paths from input packages + var doc yaml.Node + if err := yaml.Unmarshal(manifestBytes, &doc); err != nil { + return fmt.Errorf("failed to parse data stream manifest YAML: %w", err) + } + + manifest, err := packages.ReadDataStreamManifestBytes(manifestBytes) + if err != nil { + return fmt.Errorf("failed to parse data stream manifest %q: %w", manifestPath, err) + } + for idx, stream := range manifest.Streams { + if stream.Package == "" { + continue + } + pkgPath, ok := inputPkgPaths[stream.Package] + if !ok { + errorList = append(errorList, fmt.Errorf("failed to resolve input package %q for stream in manifest %q: not listed in requires.input", stream.Package, manifestPath)) + continue + } + dsRootDir := path.Dir(manifestPath) + inputPaths, err := r.collectAndCopyInputPkgDataStreams(dsRootDir, pkgPath, stream.Package, buildRoot) + if err != nil { + return fmt.Errorf("failed to collect and copy input package data stream templates for manifest %q: %w", manifestPath, err) + } + if len(inputPaths) == 0 { + continue + } + + // current manifest template paths + paths := make([]string, 0) + // if composable package has included custom template path or paths, include them + // if no template paths are included at the manifest, only the imported templates are included + if stream.TemplatePath != "" { + paths = append(paths, stream.TemplatePath) + } else if len(stream.TemplatePaths) > 0 { + paths = append(paths, stream.TemplatePaths...) + } + paths = append(inputPaths, paths...) + + if err := setStreamTemplatePaths(&doc, idx, paths); err != nil { + return fmt.Errorf("failed to set stream template paths in manifest %q: %w", manifestPath, err) + } + + } + + // Serialise the updated YAML document back to disk. + updated, err := formatYAMLNode(&doc) + if err != nil { + return fmt.Errorf("failed to format updated manifest: %w", err) + } + if err := buildRoot.WriteFile(manifestPath, updated, 0664); err != nil { + return fmt.Errorf("failed to write updated manifest: %w", err) + } + + } + return errors.Join(errorList...) +} + +// collectAndCopyInputPkgDataStreams collects the data streams from the input package and copies them to the agent/input directory of the build package +// it returns the list of copied data stream names +// +// Design note: input package templates are authored for input-level compilation, where available +// variables are: package vars + input.vars. When these templates are copied to the integration's +// data_stream//agent/stream/ directory and compiled as stream templates, Fleet compiles them +// with package vars + input.vars + stream.vars. For templates that only reference package-level +// or input-level variables this works correctly. However, stream-level vars defined on the +// integration's data stream will NOT be accessible from input package templates — the template +// content must explicitly reference them. If stream-level vars need to be rendered, add an +// integration-owned stream template and include it after the input package templates in +// template_paths (integration templates are appended last and take precedence). +// See https://github.com/elastic/elastic-package/issues/3279 for the follow-up work on +// merging variable definitions from input packages and composable packages at build time. +func (r *RequiredInputsResolver) collectAndCopyInputPkgDataStreams(dsRootDir, inputPkgPath, inputPkgName string, buildRoot *os.Root) ([]string, error) { + inputPkgFS, closeFn, err := openPackageFS(inputPkgPath) + if err != nil { + return nil, fmt.Errorf("failed to open input package %q: %w", inputPkgPath, err) + } + defer func() { _ = closeFn() }() + + manifestBytes, err := fs.ReadFile(inputPkgFS, "manifest.yml") + if err != nil { + return nil, fmt.Errorf("failed to read input package manifest: %w", err) + } + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + if err != nil { + return nil, fmt.Errorf("failed to parse input package manifest: %w", err) + } + + seen := make(map[string]bool) + copiedNames := make([]string, 0) + for _, pt := range manifest.PolicyTemplates { + var names []string + switch { + case len(pt.TemplatePaths) > 0: + names = pt.TemplatePaths + case pt.TemplatePath != "": + names = []string{pt.TemplatePath} + } + for _, name := range names { + if seen[name] { + continue + } + seen[name] = true + // copy the template from "agent/input" directory of the input package to the "agent/stream" directory of the build package + content, err := fs.ReadFile(inputPkgFS, path.Join("agent", "input", name)) + if err != nil { + return nil, fmt.Errorf("failed to read template %q from agent/input (declared in manifest): %w", name, err) + } + destName := inputPkgName + "-" + name + // create the agent/stream directory if it doesn't exist + agentStreamDir := path.Join(dsRootDir, "agent", "stream") + if err := buildRoot.MkdirAll(agentStreamDir, 0755); err != nil { + return nil, fmt.Errorf("failed to create agent/stream directory: %w", err) + } + destPath := path.Join(agentStreamDir, destName) + if err := buildRoot.WriteFile(destPath, content, 0644); err != nil { + return nil, fmt.Errorf("failed to write template %q: %w", destName, err) + } + logger.Debugf("Copied input package template: %s -> %s", name, destName) + copiedNames = append(copiedNames, destName) + } + } + return copiedNames, nil +} + +func setStreamTemplatePaths(doc *yaml.Node, streamIdx int, paths []string) error { + // Navigate: document -> mapping -> "streams" key -> sequence -> item [streamIdx] + root := doc + if root.Kind == yaml.DocumentNode { + if len(root.Content) == 0 { + return fmt.Errorf("failed to set stream template paths: empty YAML document") + } + root = root.Content[0] + } + if root.Kind != yaml.MappingNode { + return fmt.Errorf("failed to set stream template paths: expected mapping node at document root") + } + + streamsNode := mappingValue(root, "streams") + if streamsNode == nil { + return fmt.Errorf("failed to set stream template paths: 'streams' key not found in manifest") + } + if streamsNode.Kind != yaml.SequenceNode { + return fmt.Errorf("failed to set stream template paths: 'streams' is not a sequence") + } + if streamIdx >= len(streamsNode.Content) { + return fmt.Errorf("failed to set stream template paths: stream index %d out of range (len=%d)", streamIdx, len(streamsNode.Content)) + } + + streamNode := streamsNode.Content[streamIdx] + if streamNode.Kind != yaml.MappingNode { + return fmt.Errorf("failed to set stream template paths: stream entry %d is not a mapping", streamIdx) + } + + // Remove singular template_path if present. + removeKey(streamNode, "template_path") + + // Build the template_paths sequence node. + seqNode := &yaml.Node{Kind: yaml.SequenceNode} + for _, p := range paths { + seqNode.Content = append(seqNode.Content, &yaml.Node{Kind: yaml.ScalarNode, Value: p}) + } + + // Upsert template_paths. + upsertKey(streamNode, "template_paths", seqNode) + + return nil +} diff --git a/internal/requiredinputs/streams_test.go b/internal/requiredinputs/streams_test.go new file mode 100644 index 0000000000..f75b99e32d --- /dev/null +++ b/internal/requiredinputs/streams_test.go @@ -0,0 +1,148 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-package/internal/packages" +) + +// TestBundleDataStreamTemplates_MultiplePolicyTemplates verifies that templates from ALL +// policy templates in the input package are bundled, not just the first one (Issue 5). +func TestBundleDataStreamTemplates_MultiplePolicyTemplates(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + datastreamDir := filepath.Join("data_stream", "test_ds") + err = buildRoot.MkdirAll(datastreamDir, 0755) + require.NoError(t, err) + + manifestBytes := []byte(` +streams: + - package: sql +`) + err = buildRoot.WriteFile(filepath.Join(datastreamDir, "manifest.yml"), manifestBytes, 0644) + require.NoError(t, err) + + fakeInputDir := createFakeInputWithMultiplePolicyTemplates(t) + inputPkgPaths := map[string]string{"sql": fakeInputDir} + + err = r.bundleDataStreamTemplates(inputPkgPaths, buildRoot) + require.NoError(t, err) + + // All templates from both policy templates must be present. + _, err = buildRoot.ReadFile(filepath.Join(datastreamDir, "agent", "stream", "sql-input.yml.hbs")) + require.NoError(t, err, "template from first policy_template must be bundled") + _, err = buildRoot.ReadFile(filepath.Join(datastreamDir, "agent", "stream", "sql-metrics.yml.hbs")) + require.NoError(t, err, "template from second policy_template must be bundled") + _, err = buildRoot.ReadFile(filepath.Join(datastreamDir, "agent", "stream", "sql-extra.yml.hbs")) + require.NoError(t, err, "extra template from second policy_template must be bundled") + + updated, err := buildRoot.ReadFile(filepath.Join(datastreamDir, "manifest.yml")) + require.NoError(t, err) + updatedManifest, err := packages.ReadDataStreamManifestBytes(updated) + require.NoError(t, err) + require.Len(t, updatedManifest.Streams, 1) + assert.Equal(t, []string{"sql-input.yml.hbs", "sql-metrics.yml.hbs", "sql-extra.yml.hbs"}, updatedManifest.Streams[0].TemplatePaths) +} + +func TestBundleDataStreamTemplates_SuccessTemplatesCopied(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + datastreamDir := filepath.Join("data_stream", "test_ds") + err = buildRoot.MkdirAll(datastreamDir, 0755) + require.NoError(t, err) + // create current package manifest with one data stream input referencing an input package template + // it has an existing template, so both the existing and input package template should be copied and the manifest updated to reference both + manifestBytes := []byte(` +streams: + - package: sql + template_path: existing.yml.hbs +`) + err = buildRoot.WriteFile(filepath.Join(datastreamDir, "manifest.yml"), manifestBytes, 0644) + require.NoError(t, err) + err = buildRoot.MkdirAll(filepath.Join(datastreamDir, "agent", "stream"), 0755) + require.NoError(t, err) + err = buildRoot.WriteFile(filepath.Join(datastreamDir, "agent", "stream", "existing.yml.hbs"), []byte("existing content"), 0644) + require.NoError(t, err) + + fakeInputDir := createFakeInputHelper(t) + inputPkgPaths := map[string]string{"sql": fakeInputDir} + + err = r.bundleDataStreamTemplates(inputPkgPaths, buildRoot) + require.NoError(t, err) + + // Files exist. + _, err = buildRoot.ReadFile(filepath.Join(datastreamDir, "agent", "stream", "sql-input.yml.hbs")) + require.NoError(t, err) + _, err = buildRoot.ReadFile(filepath.Join(datastreamDir, "agent", "stream", "existing.yml.hbs")) + require.NoError(t, err) + + // Written manifest has template_paths set and template_path removed for that input. + updated, err := buildRoot.ReadFile(filepath.Join(datastreamDir, "manifest.yml")) + require.NoError(t, err) + updatedManifest, err := packages.ReadDataStreamManifestBytes(updated) + require.NoError(t, err) + require.Len(t, updatedManifest.Streams, 1) + input := updatedManifest.Streams[0] + assert.Empty(t, input.TemplatePath) + assert.Equal(t, []string{"sql-input.yml.hbs", "existing.yml.hbs"}, input.TemplatePaths) +} + +// TestBundleDataStreamTemplates_BundlesWithoutDataStreamsAssociation verifies that a data stream +// stream entry with package: X IS bundled even when the root policy template has no data_streams +// field. Bundling is driven solely by the data stream manifest's streams[].package reference. +func TestBundleDataStreamTemplates_BundlesWithoutDataStreamsAssociation(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + datastreamDir := filepath.Join("data_stream", "test_ds") + err = buildRoot.MkdirAll(datastreamDir, 0755) + require.NoError(t, err) + + manifestBytes := []byte(` +streams: + - package: sql +`) + err = buildRoot.WriteFile(filepath.Join(datastreamDir, "manifest.yml"), manifestBytes, 0644) + require.NoError(t, err) + + fakeInputDir := createFakeInputHelper(t) + inputPkgPaths := map[string]string{"sql": fakeInputDir} + + err = r.bundleDataStreamTemplates(inputPkgPaths, buildRoot) + require.NoError(t, err) + + // Template must be bundled even without a data_streams association in the root manifest. + _, err = buildRoot.ReadFile(filepath.Join(datastreamDir, "agent", "stream", "sql-input.yml.hbs")) + require.NoError(t, err, "template must be bundled when stream references an input package, regardless of data_streams field") + + // The data stream manifest must have template_paths set. + updated, err := buildRoot.ReadFile(filepath.Join(datastreamDir, "manifest.yml")) + require.NoError(t, err) + updatedManifest, err := packages.ReadDataStreamManifestBytes(updated) + require.NoError(t, err) + require.Len(t, updatedManifest.Streams, 1) + assert.Equal(t, []string{"sql-input.yml.hbs"}, updatedManifest.Streams[0].TemplatePaths) +} diff --git a/internal/requiredinputs/testhelpers_test.go b/internal/requiredinputs/testhelpers_test.go new file mode 100644 index 0000000000..fb5a4d8246 --- /dev/null +++ b/internal/requiredinputs/testhelpers_test.go @@ -0,0 +1,67 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" +) + +func createFakeInputHelper(t *testing.T) string { + t.Helper() + // create fake input package with manifest and template file + fakeDownloadedPkgDir := t.TempDir() + inputPkgDir := filepath.Join(fakeDownloadedPkgDir, "sql") + err := os.Mkdir(inputPkgDir, 0755) + require.NoError(t, err) + inputManifestBytes := []byte(`name: sql +version: 0.1.0 +type: input +policy_templates: + - input: sql + template_path: input.yml.hbs +`) + err = os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), inputManifestBytes, 0644) + require.NoError(t, err) + err = os.MkdirAll(filepath.Join(inputPkgDir, "agent", "input"), 0755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(inputPkgDir, "agent", "input", "input.yml.hbs"), []byte("template content"), 0644) + require.NoError(t, err) + return inputPkgDir +} + +func createFakeInputWithMultiplePolicyTemplates(t *testing.T) string { + t.Helper() + fakeDownloadedPkgDir := t.TempDir() + inputPkgDir := filepath.Join(fakeDownloadedPkgDir, "sql") + err := os.Mkdir(inputPkgDir, 0755) + require.NoError(t, err) + // Input package with two policy templates, each declaring a distinct template. + inputManifestBytes := []byte(`name: sql +version: 0.1.0 +type: input +policy_templates: + - input: sql + template_path: input.yml.hbs + - input: sql/metrics + template_paths: + - metrics.yml.hbs + - extra.yml.hbs +`) + err = os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), inputManifestBytes, 0644) + require.NoError(t, err) + err = os.MkdirAll(filepath.Join(inputPkgDir, "agent", "input"), 0755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(inputPkgDir, "agent", "input", "input.yml.hbs"), []byte("input template"), 0644) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(inputPkgDir, "agent", "input", "metrics.yml.hbs"), []byte("metrics template"), 0644) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(inputPkgDir, "agent", "input", "extra.yml.hbs"), []byte("extra template"), 0644) + require.NoError(t, err) + return inputPkgDir +} diff --git a/internal/requiredinputs/variables.go b/internal/requiredinputs/variables.go new file mode 100644 index 0000000000..c7561c068c --- /dev/null +++ b/internal/requiredinputs/variables.go @@ -0,0 +1,500 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "fmt" + "io/fs" + "maps" + "os" + "path" + + "gopkg.in/yaml.v3" + + "github.com/elastic/elastic-package/internal/packages" +) + +// pkgDsKey uniquely identifies the (input-package, data-stream) pair used to +// index promoted variable overrides. +type pkgDsKey struct { + pkg string + dsName string +} + +// mergeVariables merges variable definitions from input packages into the +// composable package's manifests (package-level and data-stream-level). +// +// Merging rule: input package vars are the base; composable package override +// fields win when explicitly specified. +// +// Input-level vars: vars declared in policy_templates[].inputs[].vars are +// "promoted" — they become input-level variables in the merged manifest. +// +// Data-stream-level vars: all remaining (non-promoted) base vars are placed at +// the data-stream level, merged with any stream-level overrides the composable +// package declares. +func (r *RequiredInputsResolver) mergeVariables( + manifest *packages.PackageManifest, + inputPkgPaths map[string]string, + buildRoot *os.Root, +) error { + // Step A — Re-read manifest.yml from disk as a YAML node so edits from the + // earlier template-bundling step are included. + manifestBytes, err := buildRoot.ReadFile("manifest.yml") + if err != nil { + return fmt.Errorf("reading manifest: %w", err) + } + var doc yaml.Node + if err := yaml.Unmarshal(manifestBytes, &doc); err != nil { + return fmt.Errorf("parsing manifest YAML: %w", err) + } + + // Step B — Build a promotedIndex: (pkg, dsName) → map[varName]overrideNode. + // The override nodes come from policy_templates[ptIdx].inputs[inputIdx].vars + // in the composable package manifest. + promotedIndex := make(map[pkgDsKey]map[string]*yaml.Node) + for ptIdx, pt := range manifest.PolicyTemplates { + for inputIdx, input := range pt.Inputs { + if input.Package == "" || len(input.Vars) == 0 { + continue + } + + inputNode, err := getInputMappingNode(&doc, ptIdx, inputIdx) + if err != nil { + return fmt.Errorf("getting input node at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) + } + + overrideNodes, err := readVarNodes(inputNode) + if err != nil { + return fmt.Errorf("reading override var nodes at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) + } + + overrideByName := make(map[string]*yaml.Node, len(overrideNodes)) + for _, n := range overrideNodes { + overrideByName[varNodeName(n)] = n + } + + dsNames := pt.DataStreams + if len(dsNames) == 0 { + dsNames = []string{""} + } + for _, dsName := range dsNames { + promotedIndex[pkgDsKey{pkg: input.Package, dsName: dsName}] = overrideByName + } + } + } + + // Step C — Merge and write input-level vars in manifest.yml. + for ptIdx, pt := range manifest.PolicyTemplates { + for inputIdx, input := range pt.Inputs { + if input.Package == "" { + continue + } + pkgPath, ok := inputPkgPaths[input.Package] + if !ok { + continue + } + + baseVarOrder, baseVarByName, err := loadInputPkgVarNodes(pkgPath) + if err != nil { + return fmt.Errorf("loading input pkg var nodes for %q: %w", input.Package, err) + } + if len(baseVarOrder) == 0 { + continue + } + + // Union of promoted overrides across all data streams for this input. + promotedOverrides := make(map[string]*yaml.Node) + dsNames := pt.DataStreams + if len(dsNames) == 0 { + dsNames = []string{""} + } + for _, dsName := range dsNames { + maps.Copy(promotedOverrides, promotedIndex[pkgDsKey{pkg: input.Package, dsName: dsName}]) + } + + inputNode, err := getInputMappingNode(&doc, ptIdx, inputIdx) + if err != nil { + return fmt.Errorf("getting input node at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) + } + + mergedSeq, err := mergeInputLevelVarNodes(baseVarOrder, baseVarByName, promotedOverrides) + if err != nil { + return fmt.Errorf("merging input-level vars for pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) + } + + if len(mergedSeq.Content) > 0 { + upsertKey(inputNode, "vars", mergedSeq) + } else { + removeKey(inputNode, "vars") + } + } + } + + // Step D — Write the updated manifest.yml back to disk. + updated, err := formatYAMLNode(&doc) + if err != nil { + return fmt.Errorf("formatting updated manifest: %w", err) + } + if err := buildRoot.WriteFile("manifest.yml", updated, 0664); err != nil { + return fmt.Errorf("writing updated manifest: %w", err) + } + + // Step E — Process each data_stream/*/manifest.yml. + dsManifestPaths, err := fs.Glob(buildRoot.FS(), "data_stream/*/manifest.yml") + if err != nil { + return fmt.Errorf("globbing data stream manifests: %w", err) + } + + for _, manifestPath := range dsManifestPaths { + // data_stream/var_merging_logs/manifest.yml → var_merging_logs + dsName := path.Base(path.Dir(manifestPath)) + + dsManifestBytes, err := buildRoot.ReadFile(manifestPath) + if err != nil { + return fmt.Errorf("reading data stream manifest %q: %w", manifestPath, err) + } + + var dsDoc yaml.Node + if err := yaml.Unmarshal(dsManifestBytes, &dsDoc); err != nil { + return fmt.Errorf("parsing data stream manifest YAML %q: %w", manifestPath, err) + } + + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + if err != nil { + return fmt.Errorf("parsing data stream manifest %q: %w", manifestPath, err) + } + + for streamIdx, stream := range dsManifest.Streams { + if stream.Package == "" { + continue + } + pkgPath, ok := inputPkgPaths[stream.Package] + if !ok { + continue + } + + baseVarOrder, baseVarByName, err := loadInputPkgVarNodes(pkgPath) + if err != nil { + return fmt.Errorf("loading input pkg var nodes for %q: %w", stream.Package, err) + } + if len(baseVarOrder) == 0 { + continue + } + + // Promoted names for this (pkg, dsName) combination. + promotedNames := make(map[string]bool) + for _, key := range []pkgDsKey{{stream.Package, dsName}, {stream.Package, ""}} { + for varName := range promotedIndex[key] { + promotedNames[varName] = true + } + } + + streamNode, err := getStreamMappingNode(&dsDoc, streamIdx) + if err != nil { + return fmt.Errorf("getting stream node at index %d in %q: %w", streamIdx, manifestPath, err) + } + + dsOverrideNodes, err := readVarNodes(streamNode) + if err != nil { + return fmt.Errorf("reading DS override var nodes in %q: %w", manifestPath, err) + } + + if err := checkDuplicateVarNodes(dsOverrideNodes); err != nil { + return fmt.Errorf("duplicate vars in data stream manifest %q: %w", manifestPath, err) + } + + mergedSeq, err := mergeStreamLevelVarNodes(baseVarOrder, baseVarByName, promotedNames, dsOverrideNodes) + if err != nil { + return fmt.Errorf("merging stream-level vars in %q: %w", manifestPath, err) + } + + if len(mergedSeq.Content) > 0 { + upsertKey(streamNode, "vars", mergedSeq) + } else { + removeKey(streamNode, "vars") + } + } + + // Step F — Write each updated DS manifest. + dsUpdated, err := formatYAMLNode(&dsDoc) + if err != nil { + return fmt.Errorf("formatting updated data stream manifest %q: %w", manifestPath, err) + } + if err := buildRoot.WriteFile(manifestPath, dsUpdated, 0664); err != nil { + return fmt.Errorf("writing updated data stream manifest %q: %w", manifestPath, err) + } + } + + return nil +} + +// loadInputPkgVarNodes opens the input package at pkgPath, reads all vars from +// all policy templates (dedup by name, first wins) and returns them as an +// ordered slice and a name→node lookup map. +func loadInputPkgVarNodes(pkgPath string) ([]string, map[string]*yaml.Node, error) { + pkgFS, closeFn, err := openPackageFS(pkgPath) + if err != nil { + return nil, nil, fmt.Errorf("opening package: %w", err) + } + defer closeFn() + + manifestBytes, err := fs.ReadFile(pkgFS, packages.PackageManifestFile) + if err != nil { + return nil, nil, fmt.Errorf("reading manifest: %w", err) + } + + var doc yaml.Node + if err := yaml.Unmarshal(manifestBytes, &doc); err != nil { + return nil, nil, fmt.Errorf("parsing manifest YAML: %w", err) + } + + root := &doc + if root.Kind == yaml.DocumentNode { + if len(root.Content) == 0 { + return nil, nil, nil + } + root = root.Content[0] + } + if root.Kind != yaml.MappingNode { + return nil, nil, fmt.Errorf("expected mapping node at document root") + } + + policyTemplatesNode := mappingValue(root, "policy_templates") + if policyTemplatesNode == nil || policyTemplatesNode.Kind != yaml.SequenceNode { + return nil, nil, nil + } + + order := make([]string, 0) + byName := make(map[string]*yaml.Node) + + for _, ptNode := range policyTemplatesNode.Content { + if ptNode.Kind != yaml.MappingNode { + continue + } + varsNode := mappingValue(ptNode, "vars") + if varsNode == nil || varsNode.Kind != yaml.SequenceNode { + continue + } + for _, varNode := range varsNode.Content { + if varNode.Kind != yaml.MappingNode { + continue + } + name := varNodeName(varNode) + if name == "" || byName[name] != nil { + continue // skip empty names and duplicates (first wins) + } + order = append(order, name) + byName[name] = varNode + } + } + + return order, byName, nil +} + +// mergeInputLevelVarNodes returns a sequence node containing only the promoted +// vars (those in promotedOverrides), each merged with the override fields. +// Order follows baseVarOrder (input package declaration order). +func mergeInputLevelVarNodes( + baseVarOrder []string, + baseVarByName map[string]*yaml.Node, + promotedOverrides map[string]*yaml.Node, +) (*yaml.Node, error) { + seqNode := &yaml.Node{Kind: yaml.SequenceNode} + for _, varName := range baseVarOrder { + overrideNode, promoted := promotedOverrides[varName] + if !promoted { + continue + } + merged, err := mergeVarNode(baseVarByName[varName], overrideNode) + if err != nil { + return nil, fmt.Errorf("merging var %q: %w", varName, err) + } + seqNode.Content = append(seqNode.Content, merged) + } + return seqNode, nil +} + +// mergeStreamLevelVarNodes returns a sequence node containing: +// 1. Non-promoted base vars (in input package order), merged with any DS +// override where names match. +// 2. Novel DS vars (names not in baseVarByName) appended in their declaration +// order. +func mergeStreamLevelVarNodes( + baseVarOrder []string, + baseVarByName map[string]*yaml.Node, + promotedNames map[string]bool, + dsOverrides []*yaml.Node, +) (*yaml.Node, error) { + dsOverrideByName := make(map[string]*yaml.Node, len(dsOverrides)) + for _, v := range dsOverrides { + dsOverrideByName[varNodeName(v)] = v + } + + seqNode := &yaml.Node{Kind: yaml.SequenceNode} + + // Non-promoted base vars first (in input pkg order). + for _, varName := range baseVarOrder { + if promotedNames[varName] { + continue + } + baseNode := baseVarByName[varName] + overrideNode, hasOverride := dsOverrideByName[varName] + var ( + merged *yaml.Node + merr error + ) + if hasOverride { + merged, merr = mergeVarNode(baseNode, overrideNode) + } else { + merged = cloneNode(baseNode) + } + if merr != nil { + return nil, fmt.Errorf("merging var %q: %w", varName, merr) + } + seqNode.Content = append(seqNode.Content, merged) + } + + // Novel DS vars (not present in base) appended in declaration order. + for _, v := range dsOverrides { + if _, inBase := baseVarByName[varNodeName(v)]; !inBase { + seqNode.Content = append(seqNode.Content, cloneNode(v)) + } + } + + return seqNode, nil +} + +// mergeVarNode merges fields from overrideNode into a clone of baseNode. +// All keys in override win; absent keys in override are inherited from base. +// The "name" key is always preserved from base. +func mergeVarNode(base, override *yaml.Node) (*yaml.Node, error) { + result := cloneNode(base) + for i := 0; i+1 < len(override.Content); i += 2 { + keyNode := override.Content[i] + valNode := override.Content[i+1] + if keyNode.Value == "name" { + continue // always preserve name from base + } + upsertKey(result, keyNode.Value, cloneNode(valNode)) + } + return result, nil +} + +// checkDuplicateVarNodes returns an error if any var name appears more than +// once in the provided nodes. +func checkDuplicateVarNodes(varNodes []*yaml.Node) error { + seen := make(map[string]bool, len(varNodes)) + for _, v := range varNodes { + name := varNodeName(v) + if seen[name] { + return fmt.Errorf("duplicate variable %q", name) + } + seen[name] = true + } + return nil +} + +// varNodeName extracts the value of the "name" key from a var mapping node. +func varNodeName(v *yaml.Node) string { + nameVal := mappingValue(v, "name") + if nameVal == nil { + return "" + } + return nameVal.Value +} + +// readVarNodes extracts the individual var mapping nodes from the "vars" +// sequence of the given mapping node. Returns nil if no "vars" key is present. +func readVarNodes(mappingNode *yaml.Node) ([]*yaml.Node, error) { + varsNode := mappingValue(mappingNode, "vars") + if varsNode == nil { + return nil, nil + } + if varsNode.Kind != yaml.SequenceNode { + return nil, fmt.Errorf("'vars' is not a sequence node") + } + result := make([]*yaml.Node, 0, len(varsNode.Content)) + for _, item := range varsNode.Content { + if item.Kind != yaml.MappingNode { + return nil, fmt.Errorf("var entry is not a mapping node") + } + result = append(result, item) + } + return result, nil +} + +// getInputMappingNode navigates to policy_templates[ptIdx].inputs[inputIdx] in +// the given YAML document and returns the input mapping node. +func getInputMappingNode(doc *yaml.Node, ptIdx, inputIdx int) (*yaml.Node, error) { + root := doc + if root.Kind == yaml.DocumentNode { + if len(root.Content) == 0 { + return nil, fmt.Errorf("empty YAML document") + } + root = root.Content[0] + } + if root.Kind != yaml.MappingNode { + return nil, fmt.Errorf("expected mapping node at document root") + } + + ptsNode := mappingValue(root, "policy_templates") + if ptsNode == nil || ptsNode.Kind != yaml.SequenceNode { + return nil, fmt.Errorf("'policy_templates' not found or not a sequence") + } + if ptIdx < 0 || ptIdx >= len(ptsNode.Content) { + return nil, fmt.Errorf("policy template index %d out of range (len=%d)", ptIdx, len(ptsNode.Content)) + } + + ptNode := ptsNode.Content[ptIdx] + if ptNode.Kind != yaml.MappingNode { + return nil, fmt.Errorf("policy template %d is not a mapping", ptIdx) + } + + inputsNode := mappingValue(ptNode, "inputs") + if inputsNode == nil || inputsNode.Kind != yaml.SequenceNode { + return nil, fmt.Errorf("'inputs' not found or not a sequence in policy template %d", ptIdx) + } + if inputIdx < 0 || inputIdx >= len(inputsNode.Content) { + return nil, fmt.Errorf("input index %d out of range (len=%d)", inputIdx, len(inputsNode.Content)) + } + + inputNode := inputsNode.Content[inputIdx] + if inputNode.Kind != yaml.MappingNode { + return nil, fmt.Errorf("input %d is not a mapping", inputIdx) + } + + return inputNode, nil +} + +// getStreamMappingNode navigates to streams[streamIdx] in the given YAML +// document and returns the stream mapping node. +func getStreamMappingNode(doc *yaml.Node, streamIdx int) (*yaml.Node, error) { + root := doc + if root.Kind == yaml.DocumentNode { + if len(root.Content) == 0 { + return nil, fmt.Errorf("empty YAML document") + } + root = root.Content[0] + } + if root.Kind != yaml.MappingNode { + return nil, fmt.Errorf("expected mapping node at document root") + } + + streamsNode := mappingValue(root, "streams") + if streamsNode == nil || streamsNode.Kind != yaml.SequenceNode { + return nil, fmt.Errorf("'streams' not found or not a sequence") + } + if streamIdx < 0 || streamIdx >= len(streamsNode.Content) { + return nil, fmt.Errorf("stream index %d out of range (len=%d)", streamIdx, len(streamsNode.Content)) + } + + streamNode := streamsNode.Content[streamIdx] + if streamNode.Kind != yaml.MappingNode { + return nil, fmt.Errorf("stream %d is not a mapping", streamIdx) + } + + return streamNode, nil +} diff --git a/internal/requiredinputs/variables_test.go b/internal/requiredinputs/variables_test.go new file mode 100644 index 0000000000..59de66f842 --- /dev/null +++ b/internal/requiredinputs/variables_test.go @@ -0,0 +1,523 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "gopkg.in/yaml.v3" + + "github.com/elastic/elastic-package/internal/packages" +) + +// ---- helpers ----------------------------------------------------------------- + +// varNode builds a minimal YAML mapping node representing a variable with the +// given name and extra key=value pairs (passed as alternating key, value +// strings for simple scalar values). +func varNode(name string, extras ...string) *yaml.Node { + n := &yaml.Node{Kind: yaml.MappingNode} + upsertKey(n, "name", &yaml.Node{Kind: yaml.ScalarNode, Value: name}) + for i := 0; i+1 < len(extras); i += 2 { + upsertKey(n, extras[i], &yaml.Node{Kind: yaml.ScalarNode, Value: extras[i+1]}) + } + return n +} + +// copyFixturePackage copies the named package from test/manual_packages/required_inputs +// to a fresh temp dir and returns that dir path. +func copyFixturePackage(t *testing.T, fixtureName string) string { + t.Helper() + srcPath := filepath.Join("..", "..", "test", "manual_packages", "required_inputs", fixtureName) + destPath := t.TempDir() + err := os.CopyFS(destPath, os.DirFS(srcPath)) + require.NoError(t, err, "copying fixture package %q", fixtureName) + return destPath +} + +// Variable merge tests exercise mergeVariables (see variables.go): when an +// integration declares requires.input and references that input package under +// policy_templates[].inputs with optional vars, definitions from the input +// package must be merged into the built integration—composable and data-stream +// overrides on top of the input package as base, with selected vars promoted +// to input-level. Unit tests cover helpers; integration tests run +// Integration tests exercise Bundle on manual fixture packages. + +// ---- unit tests -------------------------------------------------------------- + +// TestCloneNode checks that YAML variable nodes are deep-cloned before merge. +// mergeVariables mutates cloned trees when applying overrides; without +// isolation, the resolver could corrupt cached or shared input-package nodes. +func TestCloneNode(t *testing.T) { + original := varNode("paths", "type", "text", "multi", "true") + cloned := cloneNode(original) + + // Mutating the clone must not affect the original. + upsertKey(cloned, "type", &yaml.Node{Kind: yaml.ScalarNode, Value: "keyword"}) + assert.Equal(t, "text", mappingValue(original, "type").Value) +} + +// TestMergeVarNode verifies mergeVarNode: per-variable field merge where the +// input package definition is the base and override keys from the composable +// package or data stream replace or add fields; the variable name always stays +// from the base. This is the primitive used for both promoted input vars and +// stream-level merges. +func TestMergeVarNode(t *testing.T) { + base := varNode("paths", "type", "text", "title", "Paths", "multi", "true") + + t.Run("full override", func(t *testing.T) { + override := varNode("paths", "type", "keyword", "title", "Custom Paths", "multi", "false") + merged, err := mergeVarNode(base, override) + require.NoError(t, err) + assert.Equal(t, "paths", varNodeName(merged)) + assert.Equal(t, "keyword", mappingValue(merged, "type").Value) + assert.Equal(t, "Custom Paths", mappingValue(merged, "title").Value) + assert.Equal(t, "false", mappingValue(merged, "multi").Value) + }) + + t.Run("partial override", func(t *testing.T) { + override := varNode("paths", "title", "My Paths") + merged, err := mergeVarNode(base, override) + require.NoError(t, err) + assert.Equal(t, "paths", varNodeName(merged)) + assert.Equal(t, "text", mappingValue(merged, "type").Value) // from base + assert.Equal(t, "My Paths", mappingValue(merged, "title").Value) + assert.Equal(t, "true", mappingValue(merged, "multi").Value) // from base + }) + + t.Run("empty override", func(t *testing.T) { + override := varNode("paths") + merged, err := mergeVarNode(base, override) + require.NoError(t, err) + assert.Equal(t, "paths", varNodeName(merged)) + assert.Equal(t, "text", mappingValue(merged, "type").Value) // from base + assert.Equal(t, "Paths", mappingValue(merged, "title").Value) // from base + }) + + t.Run("name not renamed", func(t *testing.T) { + // Even if the override specifies a different name value, base name wins. + override := &yaml.Node{Kind: yaml.MappingNode} + upsertKey(override, "name", &yaml.Node{Kind: yaml.ScalarNode, Value: "should-be-ignored"}) + upsertKey(override, "type", &yaml.Node{Kind: yaml.ScalarNode, Value: "keyword"}) + merged, err := mergeVarNode(base, override) + require.NoError(t, err) + assert.Equal(t, "paths", varNodeName(merged)) + }) + + t.Run("adds new field from override", func(t *testing.T) { + override := varNode("paths", "description", "My description") + merged, err := mergeVarNode(base, override) + require.NoError(t, err) + assert.Equal(t, "My description", mappingValue(merged, "description").Value) + assert.Equal(t, "text", mappingValue(merged, "type").Value) // base preserved + }) +} + +// TestCheckDuplicateVarNodes ensures duplicate var names in a single vars list +// are rejected before merge. That catches invalid integration manifests early +// instead of producing ambiguous merged output for Fleet. +func TestCheckDuplicateVarNodes(t *testing.T) { + t.Run("no duplicates", func(t *testing.T) { + nodes := []*yaml.Node{varNode("paths"), varNode("encoding"), varNode("timeout")} + assert.NoError(t, checkDuplicateVarNodes(nodes)) + }) + + t.Run("one duplicate", func(t *testing.T) { + nodes := []*yaml.Node{varNode("paths"), varNode("encoding"), varNode("paths")} + err := checkDuplicateVarNodes(nodes) + require.Error(t, err) + assert.Contains(t, err.Error(), "paths") + }) + + t.Run("empty slice", func(t *testing.T) { + assert.NoError(t, checkDuplicateVarNodes(nil)) + }) +} + +// TestMergeInputLevelVarNodes covers mergeInputLevelVarNodes: vars that appear +// under policy_templates[].inputs[] next to package: are promoted +// to merged input-level var definitions, in input-package declaration order, +// with only explicitly listed names included. +func TestMergeInputLevelVarNodes(t *testing.T) { + pathsBase := varNode("paths", "type", "text", "multi", "true") + encodingBase := varNode("encoding", "type", "text", "show_user", "false") + timeoutBase := varNode("timeout", "type", "text", "default", "30s") + + baseOrder := []string{"paths", "encoding", "timeout"} + baseByName := map[string]*yaml.Node{ + "paths": pathsBase, + "encoding": encodingBase, + "timeout": timeoutBase, + } + + t.Run("empty promoted → empty sequence", func(t *testing.T) { + seq, err := mergeInputLevelVarNodes(baseOrder, baseByName, map[string]*yaml.Node{}) + require.NoError(t, err) + assert.Empty(t, seq.Content) + }) + + t.Run("one promoted partial override", func(t *testing.T) { + promotedOverrides := map[string]*yaml.Node{ + "paths": varNode("paths", "default", "/var/log/custom/*.log"), + } + seq, err := mergeInputLevelVarNodes(baseOrder, baseByName, promotedOverrides) + require.NoError(t, err) + require.Len(t, seq.Content, 1) + assert.Equal(t, "paths", varNodeName(seq.Content[0])) + assert.Equal(t, "/var/log/custom/*.log", mappingValue(seq.Content[0], "default").Value) + assert.Equal(t, "text", mappingValue(seq.Content[0], "type").Value) // from base + }) + + t.Run("multiple promoted in base order", func(t *testing.T) { + promotedOverrides := map[string]*yaml.Node{ + "timeout": varNode("timeout", "default", "60s"), + "encoding": varNode("encoding", "show_user", "true"), + } + seq, err := mergeInputLevelVarNodes(baseOrder, baseByName, promotedOverrides) + require.NoError(t, err) + require.Len(t, seq.Content, 2) + // Order must follow baseOrder: encoding before timeout. + assert.Equal(t, "encoding", varNodeName(seq.Content[0])) + assert.Equal(t, "timeout", varNodeName(seq.Content[1])) + assert.Equal(t, "true", mappingValue(seq.Content[0], "show_user").Value) + assert.Equal(t, "60s", mappingValue(seq.Content[1], "default").Value) + }) +} + +// TestMergeStreamLevelVarNodes covers mergeStreamLevelVarNodes: base vars from +// the input package that are not promoted stay on the data stream stream entry; +// they can be field-merged with DS overrides, and DS-only vars are appended. +// Promoted names must not appear on the stream to avoid duplicating Fleet vars. +func TestMergeStreamLevelVarNodes(t *testing.T) { + pathsBase := varNode("paths", "type", "text", "multi", "true") + encodingBase := varNode("encoding", "type", "text", "show_user", "false") + timeoutBase := varNode("timeout", "type", "text", "default", "30s") + + baseOrder := []string{"paths", "encoding", "timeout"} + baseByName := map[string]*yaml.Node{ + "paths": pathsBase, + "encoding": encodingBase, + "timeout": timeoutBase, + } + + t.Run("no promoted, no overrides → all base vars", func(t *testing.T) { + seq, err := mergeStreamLevelVarNodes(baseOrder, baseByName, nil, nil) + require.NoError(t, err) + require.Len(t, seq.Content, 3) + assert.Equal(t, "paths", varNodeName(seq.Content[0])) + assert.Equal(t, "encoding", varNodeName(seq.Content[1])) + assert.Equal(t, "timeout", varNodeName(seq.Content[2])) + }) + + t.Run("some promoted → promoted excluded", func(t *testing.T) { + promoted := map[string]bool{"paths": true, "encoding": true} + seq, err := mergeStreamLevelVarNodes(baseOrder, baseByName, promoted, nil) + require.NoError(t, err) + require.Len(t, seq.Content, 1) + assert.Equal(t, "timeout", varNodeName(seq.Content[0])) + }) + + t.Run("DS override on existing base var", func(t *testing.T) { + dsOverrides := []*yaml.Node{varNode("encoding", "show_user", "true")} + seq, err := mergeStreamLevelVarNodes(baseOrder, baseByName, nil, dsOverrides) + require.NoError(t, err) + require.Len(t, seq.Content, 3) + // encoding is merged + encodingMerged := seq.Content[1] + assert.Equal(t, "encoding", varNodeName(encodingMerged)) + assert.Equal(t, "true", mappingValue(encodingMerged, "show_user").Value) + assert.Equal(t, "text", mappingValue(encodingMerged, "type").Value) // from base + }) + + t.Run("novel DS var appended", func(t *testing.T) { + dsOverrides := []*yaml.Node{varNode("custom_tag", "type", "text")} + seq, err := mergeStreamLevelVarNodes(baseOrder, baseByName, nil, dsOverrides) + require.NoError(t, err) + require.Len(t, seq.Content, 4) // 3 base + 1 novel + assert.Equal(t, "custom_tag", varNodeName(seq.Content[3])) + }) + + t.Run("mixed: promoted + DS merge + novel", func(t *testing.T) { + promoted := map[string]bool{"paths": true} + dsOverrides := []*yaml.Node{ + varNode("encoding", "show_user", "true"), + varNode("custom_tag", "type", "text"), + } + seq, err := mergeStreamLevelVarNodes(baseOrder, baseByName, promoted, dsOverrides) + require.NoError(t, err) + // paths excluded (promoted); encoding merged; timeout base; custom_tag novel + require.Len(t, seq.Content, 3) + assert.Equal(t, "encoding", varNodeName(seq.Content[0])) + assert.Equal(t, "true", mappingValue(seq.Content[0], "show_user").Value) + assert.Equal(t, "timeout", varNodeName(seq.Content[1])) + assert.Equal(t, "custom_tag", varNodeName(seq.Content[2])) + }) +} + +// TestLoadInputPkgVarNodes checks loadInputPkgVarNodes: variable definitions +// are loaded from the resolved input package manifest so mergeVariables uses +// the input package as the authoritative base (order and fields) for merging. +func TestLoadInputPkgVarNodes(t *testing.T) { + t.Run("fixture with three vars", func(t *testing.T) { + pkgPath := filepath.Join("..", "..", "test", "manual_packages", "required_inputs", "var_merging_input_pkg") + order, byName, err := loadInputPkgVarNodes(pkgPath) + require.NoError(t, err) + assert.Equal(t, []string{"paths", "encoding", "timeout"}, order) + assert.Equal(t, "text", mappingValue(byName["paths"], "type").Value) + assert.Equal(t, "text", mappingValue(byName["encoding"], "type").Value) + assert.Equal(t, "text", mappingValue(byName["timeout"], "type").Value) + }) + + t.Run("package with no vars", func(t *testing.T) { + // Use the fake input helper which has no vars in its manifest. + pkgPath := createFakeInputHelper(t) + order, byName, err := loadInputPkgVarNodes(pkgPath) + require.NoError(t, err) + assert.Empty(t, order) + assert.Empty(t, byName) + }) +} + +// ---- integration tests ------------------------------------------------------- + +// makeFakeEprForVarMerging supplies the var_merging_input_pkg fixture path as +// if it were downloaded from the registry, so integration tests do not need a +// running stack. +func makeFakeEprForVarMerging(t *testing.T) *fakeEprClient { + t.Helper() + inputPkgPath := filepath.Join("..", "..", "test", "manual_packages", "required_inputs", "var_merging_input_pkg") + return &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgPath, nil + }, + } +} + +// TestMergeVariables_Full runs the full merge pipeline: composable vars under +// the package input promote paths and encoding to manifest input-level defs +// (merged with input package defaults), while timeout stays on the data stream +// merged with a DS override and a novel DS-only var is appended—matching the +// end state Fleet expects for a mixed promotion + DS customization scenario. +func TestMergeVariables_Full(t *testing.T) { + buildPackageRoot := copyFixturePackage(t, "with_merging_full") + resolver := NewRequiredInputsResolver(makeFakeEprForVarMerging(t)) + + err := resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + // Check package manifest: input should have 2 vars (paths, encoding). + manifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + inputVars := manifest.PolicyTemplates[0].Inputs[0].Vars + require.Len(t, inputVars, 2) + assert.Equal(t, "paths", inputVars[0].Name) + assert.Equal(t, "encoding", inputVars[1].Name) + + // paths: base fields preserved, default overridden. + assert.Equal(t, "text", inputVars[0].Type) + require.NotNil(t, inputVars[0].Default) + + // encoding: show_user overridden to true. + assert.True(t, inputVars[1].ShowUser) + assert.Equal(t, "text", inputVars[1].Type) + + // Check DS manifest: streams[0] should have 2 vars (timeout, custom_tag). + dsManifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "var_merging_logs", "manifest.yml")) + require.NoError(t, err) + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + require.NoError(t, err) + + streamVars := dsManifest.Streams[0].Vars + require.Len(t, streamVars, 2) + assert.Equal(t, "timeout", streamVars[0].Name) + assert.Equal(t, "custom_tag", streamVars[1].Name) + + // timeout: merged from base + DS override (description). + assert.Equal(t, "text", streamVars[0].Type) + assert.Equal(t, "Timeout for log collection.", streamVars[0].Description) +} + +// TestMergeVariables_PromotesToInput verifies partial promotion: only vars +// listed under the composable input move to input level; remaining input +// package vars stay on the stream unchanged when the data stream supplies no +// overrides. +func TestMergeVariables_PromotesToInput(t *testing.T) { + buildPackageRoot := copyFixturePackage(t, "with_merging_promotes_to_input") + resolver := NewRequiredInputsResolver(makeFakeEprForVarMerging(t)) + + err := resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + manifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + // Input should have 1 var: paths (promoted, merged with composable override). + inputVars := manifest.PolicyTemplates[0].Inputs[0].Vars + require.Len(t, inputVars, 1) + assert.Equal(t, "paths", inputVars[0].Name) + assert.Equal(t, "text", inputVars[0].Type) // from base + + // DS should have 2 vars: encoding and timeout (both from base, no DS overrides). + dsManifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "var_merging_logs", "manifest.yml")) + require.NoError(t, err) + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + require.NoError(t, err) + + streamVars := dsManifest.Streams[0].Vars + require.Len(t, streamVars, 2) + assert.Equal(t, "encoding", streamVars[0].Name) + assert.Equal(t, "timeout", streamVars[1].Name) +} + +// TestMergeVariables_DsMerges covers the case where the composable input +// declares no vars (nothing promoted): all base vars remain on the stream, the +// data stream manifest can merge fields into an existing base var (e.g. title), +// and extra stream-only vars are kept in declaration order after base vars. +func TestMergeVariables_DsMerges(t *testing.T) { + buildPackageRoot := copyFixturePackage(t, "with_merging_ds_merges") + resolver := NewRequiredInputsResolver(makeFakeEprForVarMerging(t)) + + err := resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + manifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + // No input-level vars (nothing promoted). + assert.Empty(t, manifest.PolicyTemplates[0].Inputs[0].Vars) + + // DS should have 4 vars: paths, encoding (merged), timeout, custom_tag. + dsManifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "var_merging_logs", "manifest.yml")) + require.NoError(t, err) + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + require.NoError(t, err) + + streamVars := dsManifest.Streams[0].Vars + require.Len(t, streamVars, 4) + assert.Equal(t, "paths", streamVars[0].Name) + assert.Equal(t, "encoding", streamVars[1].Name) + assert.Equal(t, "timeout", streamVars[2].Name) + assert.Equal(t, "custom_tag", streamVars[3].Name) + + // encoding: title overridden. + assert.Equal(t, "Log Encoding Override", streamVars[1].Title) + assert.Equal(t, "text", streamVars[1].Type) // from base +} + +// TestMergeVariables_NoOverride ensures that when the integration does not +// specify composable or data-stream var overrides, merge still materializes +// input package var definitions onto the stream (cloned base) so behavior stays +// correct for packages that only declare requires.input without local var edits. +func TestMergeVariables_NoOverride(t *testing.T) { + buildPackageRoot := copyFixturePackage(t, "with_merging_no_override") + resolver := NewRequiredInputsResolver(makeFakeEprForVarMerging(t)) + + err := resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + manifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + // No input-level vars. + assert.Empty(t, manifest.PolicyTemplates[0].Inputs[0].Vars) + + // DS should have 3 vars: all from base, unmodified. + dsManifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "var_merging_logs", "manifest.yml")) + require.NoError(t, err) + dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) + require.NoError(t, err) + + streamVars := dsManifest.Streams[0].Vars + require.Len(t, streamVars, 3) + assert.Equal(t, "paths", streamVars[0].Name) + assert.Equal(t, "encoding", streamVars[1].Name) + assert.Equal(t, "timeout", streamVars[2].Name) + + // Base fields preserved. + assert.Equal(t, "text", streamVars[0].Type) + assert.True(t, streamVars[0].Multi) + assert.True(t, streamVars[0].Required) +} + +// TestMergeVariables_DuplicateError checks that an invalid data stream manifest +// listing the same var name twice fails during mergeVariables, surfacing a +// clear duplicate-variable error instead of silent corruption. +func TestMergeVariables_DuplicateError(t *testing.T) { + buildPackageRoot := copyFixturePackage(t, "with_merging_duplicate_error") + resolver := NewRequiredInputsResolver(makeFakeEprForVarMerging(t)) + + err := resolver.Bundle(buildPackageRoot) + require.Error(t, err) + assert.Contains(t, err.Error(), "paths") +} + +// TestMergeVariables_TwoPolicyTemplatesScopedPromotion verifies that promotion +// is scoped per policy template data stream: composable vars under one template +// promote only for that template’s streams; another template referencing the +// same input package without composable vars keeps all base vars on its streams. +// This guards against incorrectly applying one template’s promotions to every +// stream that uses the same input package. +func TestMergeVariables_TwoPolicyTemplatesScopedPromotion(t *testing.T) { + buildPackageRoot := copyFixturePackage(t, "with_merging_two_policy_templates") + resolver := NewRequiredInputsResolver(makeFakeEprForVarMerging(t)) + + err := resolver.Bundle(buildPackageRoot) + require.NoError(t, err) + + manifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "manifest.yml")) + require.NoError(t, err) + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + require.Len(t, manifest.PolicyTemplates, 2) + + // pt_alpha: composable input has promoted paths (merged title). + alphaPT := manifest.PolicyTemplates[0] + require.Equal(t, "pt_alpha", alphaPT.Name) + require.GreaterOrEqual(t, len(alphaPT.Inputs), 1) + alphaInputVars := alphaPT.Inputs[0].Vars + require.Len(t, alphaInputVars, 1) + assert.Equal(t, "paths", alphaInputVars[0].Name) + assert.Equal(t, "Alpha-only promoted paths title", alphaInputVars[0].Title) + assert.Equal(t, "text", alphaInputVars[0].Type) + + // pt_beta: no promotion — no vars on the composable input entry. + betaPT := manifest.PolicyTemplates[1] + require.Equal(t, "pt_beta", betaPT.Name) + assert.Empty(t, betaPT.Inputs[0].Vars) + + // alpha_logs: paths promoted — stream keeps encoding + timeout only. + alphaDSBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "alpha_logs", "manifest.yml")) + require.NoError(t, err) + alphaDS, err := packages.ReadDataStreamManifestBytes(alphaDSBytes) + require.NoError(t, err) + alphaStreamVars := alphaDS.Streams[0].Vars + require.Len(t, alphaStreamVars, 2) + assert.Equal(t, "encoding", alphaStreamVars[0].Name) + assert.Equal(t, "timeout", alphaStreamVars[1].Name) + + // beta_logs: no promotion — all three base vars on the stream. + betaDSBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "beta_logs", "manifest.yml")) + require.NoError(t, err) + betaDS, err := packages.ReadDataStreamManifestBytes(betaDSBytes) + require.NoError(t, err) + betaStreamVars := betaDS.Streams[0].Vars + require.Len(t, betaStreamVars, 3) + assert.Equal(t, "paths", betaStreamVars[0].Name) + assert.Equal(t, "encoding", betaStreamVars[1].Name) + assert.Equal(t, "timeout", betaStreamVars[2].Name) +} diff --git a/internal/requiredinputs/yamlutil.go b/internal/requiredinputs/yamlutil.go new file mode 100644 index 0000000000..09aea0aad6 --- /dev/null +++ b/internal/requiredinputs/yamlutil.go @@ -0,0 +1,78 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "fmt" + "slices" + + "gopkg.in/yaml.v3" + + "github.com/elastic/elastic-package/internal/formatter" +) + +// mappingValue returns the value node for the given key in a YAML mapping node, +// or nil if the key is not present. +func mappingValue(node *yaml.Node, key string) *yaml.Node { + idx := slices.IndexFunc(node.Content, func(n *yaml.Node) bool { + return n.Value == key + }) + if idx < 0 || idx+1 >= len(node.Content) { + return nil + } + return node.Content[idx+1] +} + +// removeKey removes a key-value pair from a YAML mapping node. +func removeKey(node *yaml.Node, key string) { + idx := slices.IndexFunc(node.Content, func(n *yaml.Node) bool { + return n.Value == key + }) + if idx >= 0 && idx+1 < len(node.Content) { + node.Content = slices.Delete(node.Content, idx, idx+2) + } +} + +// upsertKey sets key to value in a YAML mapping node, adding it if absent. +func upsertKey(node *yaml.Node, key string, value *yaml.Node) { + idx := slices.IndexFunc(node.Content, func(n *yaml.Node) bool { + return n.Value == key + }) + if idx >= 0 && idx+1 < len(node.Content) { + node.Content[idx+1] = value + return + } + keyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: key} + node.Content = append(node.Content, keyNode, value) +} + +// cloneNode returns a deep copy of the YAML node tree so base nodes from the +// input package can be reused for multiple independent merges without aliasing. +func cloneNode(n *yaml.Node) *yaml.Node { + if n == nil { + return nil + } + clone := *n + if len(n.Content) > 0 { + clone.Content = make([]*yaml.Node, len(n.Content)) + for i, c := range n.Content { + clone.Content[i] = cloneNode(c) + } + } + return &clone +} + +func formatYAMLNode(doc *yaml.Node) ([]byte, error) { + raw, err := yaml.Marshal(doc) + if err != nil { + return nil, fmt.Errorf("failed to marshal YAML: %w", err) + } + yamlFormatter := formatter.NewYAMLFormatter(formatter.KeysWithDotActionNone) + formatted, _, err := yamlFormatter.Format(raw) + if err != nil { + return nil, fmt.Errorf("failed to format YAML: %w", err) + } + return formatted, nil +} From 9d71a6966dae7b01db092084d5a26a830a85b028 Mon Sep 17 00:00:00 2001 From: Tere Date: Mon, 13 Apr 2026 16:55:41 +0200 Subject: [PATCH 05/28] feat: wire required input resolver into build and EPR-aware commands Use profile-aware registry URLs for install, test, benchmark, and script runners; inject RequiredInputsResolver into the build pipeline. Made-with: Cursor --- cmd/benchmark.go | 22 ++++++ cmd/build.go | 32 ++++++--- cmd/install.go | 20 ++++-- cmd/testrunner.go | 33 ++++++--- internal/benchrunner/runners/rally/options.go | 40 ++++++----- internal/benchrunner/runners/rally/runner.go | 9 +-- .../benchrunner/runners/stream/options.go | 32 +++++---- internal/benchrunner/runners/stream/runner.go | 9 +-- internal/builder/packages.go | 21 ++++-- internal/packages/installer/factory.go | 29 ++++---- internal/resources/fleetpackage.go | 15 ++-- internal/resources/fleetpackage_test.go | 23 +++++-- internal/resources/fleetpolicy_test.go | 9 +-- internal/stack/environment.go | 2 +- internal/stack/registry.go | 33 ++++++++- internal/stack/serverless.go | 2 +- internal/testrunner/runners/asset/tester.go | 12 ++-- internal/testrunner/runners/policy/runner.go | 53 +++++++------- internal/testrunner/runners/system/runner.go | 12 ++-- internal/testrunner/script/package.go | 24 ++++--- internal/testrunner/script/script.go | 69 ++++++++++++------- 21 files changed, 337 insertions(+), 164 deletions(-) diff --git a/cmd/benchmark.go b/cmd/benchmark.go index 1b4073da23..28755f1344 100644 --- a/cmd/benchmark.go +++ b/cmd/benchmark.go @@ -29,6 +29,8 @@ import ( "github.com/elastic/elastic-package/internal/install" "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" + "github.com/elastic/elastic-package/internal/registry" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/signal" "github.com/elastic/elastic-package/internal/stack" "github.com/elastic/elastic-package/internal/testrunner" @@ -331,6 +333,15 @@ func rallyCommandAction(cmd *cobra.Command, args []string) error { return fmt.Errorf("can't create Kibana client: %w", err) } + appConfig, err := install.Configuration() + if err != nil { + return fmt.Errorf("can't load configuration: %w", err) + } + + baseURL := stack.PackageRegistryBaseURL(profile, appConfig) + eprClient := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, profile)...) + requiredInputsResolver := requiredinputs.NewRequiredInputsResolver(eprClient) + withOpts := []rally.OptionFunc{ rally.WithVariant(variant), rally.WithBenchmarkName(benchName), @@ -344,6 +355,7 @@ func rallyCommandAction(cmd *cobra.Command, args []string) error { rally.WithRallyPackageFromRegistry(packageName, packageVersion), rally.WithRallyCorpusAtPath(corpusAtPath), rally.WithRepositoryRoot(repositoryRoot), + rally.WithRequiredInputsResolver(requiredInputsResolver), } esMetricsClient, err := initializeESMetricsClient(ctx) @@ -506,6 +518,15 @@ func streamCommandAction(cmd *cobra.Command, args []string) error { return fmt.Errorf("can't create Kibana client: %w", err) } + appConfig, err := install.Configuration() + if err != nil { + return fmt.Errorf("can't load configuration: %w", err) + } + + baseURL := stack.PackageRegistryBaseURL(profile, appConfig) + eprClient := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, profile)...) + requiredInputsResolver := requiredinputs.NewRequiredInputsResolver(eprClient) + withOpts := []stream.OptionFunc{ stream.WithVariant(variant), stream.WithBenchmarkName(benchName), @@ -519,6 +540,7 @@ func streamCommandAction(cmd *cobra.Command, args []string) error { stream.WithKibanaClient(kc), stream.WithProfile(profile), stream.WithRepositoryRoot(repositoryRoot), + stream.WithRequiredInputsResolver(requiredInputsResolver), } runner := stream.NewStreamBenchmark(stream.NewOptions(withOpts...)) diff --git a/cmd/build.go b/cmd/build.go index b101a70133..d7036d0b39 100644 --- a/cmd/build.go +++ b/cmd/build.go @@ -16,6 +16,10 @@ import ( "github.com/elastic/elastic-package/internal/install" "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" + "github.com/elastic/elastic-package/internal/profile" + "github.com/elastic/elastic-package/internal/registry" + "github.com/elastic/elastic-package/internal/requiredinputs" + "github.com/elastic/elastic-package/internal/stack" ) const buildLongDescription = `Use this command to build a package. @@ -26,6 +30,8 @@ Built packages are served up by the Elastic Package Registry running locally (se Built packages can also be published to the global package registry service. +When the package declares required input packages ("requires.input" in manifest.yml), the build downloads those input packages from the configured package registry (see "package_registry.base_url" in ~/.elastic-package/config.yml). The build then incorporates their policy and data stream templates, merges variable definitions into the integration manifest, bundles data stream field definitions, and resolves package: references on inputs and streams to the effective input types expected by Fleet. For details on using a local or custom registry during development, see the [HOWTO guide](./docs/howto/local_package_registry.md). + For details on how to enable dependency management, see the [HOWTO guide](https://github.com/elastic/elastic-package/blob/main/docs/howto/dependency_management.md).` func setupBuildCommand() *cobraext.Command { @@ -84,15 +90,25 @@ func buildCommandAction(cmd *cobra.Command, args []string) error { return fmt.Errorf("can't load configuration: %w", err) } + baseURL := appConfig.PackageRegistryBaseURL() + prof, err := profile.LoadProfile(appConfig.CurrentProfile()) + if err != nil { + return fmt.Errorf("could not load profile: %w", err) + } + eprClient := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, prof)...) + + requiredInputsResolver := requiredinputs.NewRequiredInputsResolver(eprClient) + target, err := builder.BuildPackage(builder.BuildOptions{ - PackageRoot: packageRoot, - BuildDir: buildDir, - CreateZip: createZip, - SignPackage: signPackage, - SkipValidation: skipValidation, - RepositoryRoot: repositoryRoot, - UpdateReadmes: true, - SchemaURLs: appConfig.SchemaURLs(), + PackageRoot: packageRoot, + BuildDir: buildDir, + CreateZip: createZip, + SignPackage: signPackage, + SkipValidation: skipValidation, + RepositoryRoot: repositoryRoot, + UpdateReadmes: true, + SchemaURLs: appConfig.SchemaURLs(), + RequiredInputsResolver: requiredInputsResolver, }) if err != nil { return fmt.Errorf("building package failed: %w", err) diff --git a/cmd/install.go b/cmd/install.go index b3cae78823..84bba80c03 100644 --- a/cmd/install.go +++ b/cmd/install.go @@ -16,6 +16,8 @@ import ( "github.com/elastic/elastic-package/internal/kibana" "github.com/elastic/elastic-package/internal/packages" "github.com/elastic/elastic-package/internal/packages/installer" + "github.com/elastic/elastic-package/internal/registry" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/stack" ) @@ -96,13 +98,19 @@ func installCommandAction(cmd *cobra.Command, _ []string) error { return fmt.Errorf("can't load configuration: %w", err) } + baseURL := stack.PackageRegistryBaseURL(profile, appConfig) + eprClient := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, profile)...) + + requiredInputsResolver := requiredinputs.NewRequiredInputsResolver(eprClient) + installer, err := installer.NewForPackage(installer.Options{ - Kibana: kibanaClient, - PackageRoot: packageRoot, - SkipValidation: skipValidation, - ZipPath: zipPathFile, - RepositoryRoot: repositoryRoot, - SchemaURLs: appConfig.SchemaURLs(), + Kibana: kibanaClient, + PackageRoot: packageRoot, + SkipValidation: skipValidation, + ZipPath: zipPathFile, + RepositoryRoot: repositoryRoot, + SchemaURLs: appConfig.SchemaURLs(), + RequiredInputsResolver: requiredInputsResolver, }) if err != nil { return fmt.Errorf("package installation failed: %w", err) diff --git a/cmd/testrunner.go b/cmd/testrunner.go index 655670f263..5a52bc2906 100644 --- a/cmd/testrunner.go +++ b/cmd/testrunner.go @@ -21,6 +21,8 @@ import ( "github.com/elastic/elastic-package/internal/install" "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" + "github.com/elastic/elastic-package/internal/registry" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/signal" "github.com/elastic/elastic-package/internal/stack" "github.com/elastic/elastic-package/internal/testrunner" @@ -760,6 +762,12 @@ func testRunnerScriptCommandAction(cmd *cobra.Command, args []string) error { opts.Package = manifest.Name + profile, err := cobraext.GetProfileFlag(cmd) + if err != nil { + return err + } + opts.Profile = profile + var results []testrunner.TestResult err = script.Run(&results, cmd.OutOrStderr(), opts) if err != nil { @@ -870,19 +878,24 @@ func testRunnerPolicyCommandAction(cmd *cobra.Command, args []string) error { return fmt.Errorf("can't load configuration: %w", err) } + baseURL := stack.PackageRegistryBaseURL(profile, appConfig) + eprClient := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, profile)...) + requiredInputsResolver := requiredinputs.NewRequiredInputsResolver(eprClient) + logger.Info(version.Version()) logger.Infof("elastic-stack: %s", stackVersion.Version()) runner := policy.NewPolicyTestRunner(policy.PolicyTestRunnerOptions{ - PackageRoot: packageRoot, - KibanaClient: kibanaClient, - DataStreams: dataStreams, - FailOnMissingTests: failOnMissing, - GenerateTestResult: generateTestResult, - GlobalTestConfig: globalTestConfig.Policy, - WithCoverage: testCoverage, - CoverageType: testCoverageFormat, - RepositoryRoot: repositoryRoot, - SchemaURLs: appConfig.SchemaURLs(), + PackageRoot: packageRoot, + KibanaClient: kibanaClient, + DataStreams: dataStreams, + FailOnMissingTests: failOnMissing, + GenerateTestResult: generateTestResult, + GlobalTestConfig: globalTestConfig.Policy, + WithCoverage: testCoverage, + CoverageType: testCoverageFormat, + RepositoryRoot: repositoryRoot, + SchemaURLs: appConfig.SchemaURLs(), + RequiredInputsResolver: requiredInputsResolver, }) results, err := testrunner.RunSuite(ctx, runner) diff --git a/internal/benchrunner/runners/rally/options.go b/internal/benchrunner/runners/rally/options.go index 1696ec1bf2..be5a1c0145 100644 --- a/internal/benchrunner/runners/rally/options.go +++ b/internal/benchrunner/runners/rally/options.go @@ -11,26 +11,28 @@ import ( "github.com/elastic/elastic-package/internal/elasticsearch" "github.com/elastic/elastic-package/internal/kibana" "github.com/elastic/elastic-package/internal/profile" + "github.com/elastic/elastic-package/internal/requiredinputs" ) // Options contains benchmark runner options. type Options struct { - ESAPI *elasticsearch.API - KibanaClient *kibana.Client - DeferCleanup time.Duration - MetricsInterval time.Duration - ReindexData bool - ESMetricsAPI *elasticsearch.API - BenchName string - PackageRoot string - Variant string - Profile *profile.Profile - RallyTrackOutputDir string - DryRun bool - PackageName string - PackageVersion string - CorpusAtPath string - RepositoryRoot *os.Root + ESAPI *elasticsearch.API + KibanaClient *kibana.Client + DeferCleanup time.Duration + MetricsInterval time.Duration + ReindexData bool + ESMetricsAPI *elasticsearch.API + BenchName string + PackageRoot string + Variant string + Profile *profile.Profile + RallyTrackOutputDir string + DryRun bool + PackageName string + PackageVersion string + CorpusAtPath string + RepositoryRoot *os.Root + RequiredInputsResolver requiredinputs.Resolver } type ClientOptions struct { @@ -126,3 +128,9 @@ func WithRepositoryRoot(r *os.Root) OptionFunc { opts.RepositoryRoot = r } } + +func WithRequiredInputsResolver(r requiredinputs.Resolver) OptionFunc { + return func(opts *Options) { + opts.RequiredInputsResolver = r + } +} diff --git a/internal/benchrunner/runners/rally/runner.go b/internal/benchrunner/runners/rally/runner.go index 960aaaf1ed..fd6b660e3c 100644 --- a/internal/benchrunner/runners/rally/runner.go +++ b/internal/benchrunner/runners/rally/runner.go @@ -483,10 +483,11 @@ func (r *runner) installPackageFromRegistry(ctx context.Context, packageName, pa func (r *runner) installPackageFromPackageRoot(ctx context.Context) error { logger.Debug("Installing package...") installer, err := installer.NewForPackage(installer.Options{ - Kibana: r.options.KibanaClient, - PackageRoot: r.options.PackageRoot, - SkipValidation: true, - RepositoryRoot: r.options.RepositoryRoot, + Kibana: r.options.KibanaClient, + PackageRoot: r.options.PackageRoot, + SkipValidation: true, + RepositoryRoot: r.options.RepositoryRoot, + RequiredInputsResolver: r.options.RequiredInputsResolver, }) if err != nil { return fmt.Errorf("failed to initialize package installer: %w", err) diff --git a/internal/benchrunner/runners/stream/options.go b/internal/benchrunner/runners/stream/options.go index 7770f54b0a..32a7e7bcb1 100644 --- a/internal/benchrunner/runners/stream/options.go +++ b/internal/benchrunner/runners/stream/options.go @@ -11,22 +11,24 @@ import ( "github.com/elastic/elastic-package/internal/elasticsearch" "github.com/elastic/elastic-package/internal/kibana" "github.com/elastic/elastic-package/internal/profile" + "github.com/elastic/elastic-package/internal/requiredinputs" ) // Options contains benchmark runner options. type Options struct { - ESAPI *elasticsearch.API - KibanaClient *kibana.Client - BenchName string - BackFill time.Duration - EventsPerPeriod uint64 - PeriodDuration time.Duration - PerformCleanup bool - TimestampField string - PackageRoot string - Variant string - Profile *profile.Profile - RepositoryRoot *os.Root + ESAPI *elasticsearch.API + KibanaClient *kibana.Client + BenchName string + BackFill time.Duration + EventsPerPeriod uint64 + PeriodDuration time.Duration + PerformCleanup bool + TimestampField string + PackageRoot string + Variant string + Profile *profile.Profile + RepositoryRoot *os.Root + RequiredInputsResolver requiredinputs.Resolver } type ClientOptions struct { @@ -115,3 +117,9 @@ func WithRepositoryRoot(r *os.Root) OptionFunc { opts.RepositoryRoot = r } } + +func WithRequiredInputsResolver(r requiredinputs.Resolver) OptionFunc { + return func(opts *Options) { + opts.RequiredInputsResolver = r + } +} diff --git a/internal/benchrunner/runners/stream/runner.go b/internal/benchrunner/runners/stream/runner.go index 55c333d399..9d6aa137e2 100644 --- a/internal/benchrunner/runners/stream/runner.go +++ b/internal/benchrunner/runners/stream/runner.go @@ -253,10 +253,11 @@ func (r *runner) installPackage(ctx context.Context) error { func (r *runner) installPackageFromPackageRoot(ctx context.Context) error { logger.Debug("Installing package...") installer, err := installer.NewForPackage(installer.Options{ - Kibana: r.options.KibanaClient, - PackageRoot: r.options.PackageRoot, - SkipValidation: true, - RepositoryRoot: r.options.RepositoryRoot, + Kibana: r.options.KibanaClient, + PackageRoot: r.options.PackageRoot, + SkipValidation: true, + RepositoryRoot: r.options.RepositoryRoot, + RequiredInputsResolver: r.options.RequiredInputsResolver, }) if err != nil { diff --git a/internal/builder/packages.go b/internal/builder/packages.go index 239fbf085e..714a551fbd 100644 --- a/internal/builder/packages.go +++ b/internal/builder/packages.go @@ -18,6 +18,7 @@ import ( "github.com/elastic/elastic-package/internal/files" "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/validation" ) @@ -31,11 +32,12 @@ type BuildOptions struct { BuildDir string // directory where all the built packages are placed and zipped packages are stored RepositoryRoot *os.Root - CreateZip bool - SignPackage bool - SkipValidation bool - UpdateReadmes bool - SchemaURLs fields.SchemaURLs + CreateZip bool + SignPackage bool + SkipValidation bool + UpdateReadmes bool + SchemaURLs fields.SchemaURLs + RequiredInputsResolver requiredinputs.Resolver } // BuildDirectory function locates the target build directory. If the directory doesn't exist, it will create it. @@ -232,6 +234,15 @@ func BuildPackage(options BuildOptions) (string, error) { return "", fmt.Errorf("resolving transform manifests failed: %w", err) } + resolver := options.RequiredInputsResolver + if resolver == nil { + resolver = &requiredinputs.NoopRequiredInputsResolver{} + } + err = resolver.Bundle(buildPackageRoot) + if err != nil { + return "", fmt.Errorf("bundling input package templates failed: %w", err) + } + if options.UpdateReadmes { err = docs.UpdateReadmes(options.RepositoryRoot, options.PackageRoot, buildPackageRoot, options.SchemaURLs) if err != nil { diff --git a/internal/packages/installer/factory.go b/internal/packages/installer/factory.go index 306a942b5c..7a66d5fc04 100644 --- a/internal/packages/installer/factory.go +++ b/internal/packages/installer/factory.go @@ -17,6 +17,7 @@ import ( "github.com/elastic/elastic-package/internal/kibana" "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/validation" ) @@ -35,12 +36,13 @@ type Installer interface { // Options are the parameters used to build an installer. type Options struct { - Kibana *kibana.Client - PackageRoot string // Root path of the package to be installed. - ZipPath string - SkipValidation bool - RepositoryRoot *os.Root // Root of the repository where package source code is located. - SchemaURLs fields.SchemaURLs + Kibana *kibana.Client + PackageRoot string // Root path of the package to be installed. + ZipPath string + SkipValidation bool + RepositoryRoot *os.Root // Root of the repository where package source code is located. + SchemaURLs fields.SchemaURLs + RequiredInputsResolver requiredinputs.Resolver // Input dependency resolver for downloading input packages. } // NewForPackage creates a new installer for a package, given its root path, or its prebuilt zip. @@ -89,13 +91,14 @@ func NewForPackage(options Options) (Installer, error) { } target, err := builder.BuildPackage(builder.BuildOptions{ - PackageRoot: options.PackageRoot, - CreateZip: supportsUploadZip, - SignPackage: false, - SkipValidation: options.SkipValidation, - RepositoryRoot: options.RepositoryRoot, - UpdateReadmes: false, - SchemaURLs: options.SchemaURLs, + PackageRoot: options.PackageRoot, + CreateZip: supportsUploadZip, + SignPackage: false, + SkipValidation: options.SkipValidation, + RepositoryRoot: options.RepositoryRoot, + UpdateReadmes: false, + SchemaURLs: options.SchemaURLs, + RequiredInputsResolver: options.RequiredInputsResolver, }) if err != nil { return nil, fmt.Errorf("failed to build package: %v", err) diff --git a/internal/resources/fleetpackage.go b/internal/resources/fleetpackage.go index e5c17e96ff..408fb74f88 100644 --- a/internal/resources/fleetpackage.go +++ b/internal/resources/fleetpackage.go @@ -17,6 +17,7 @@ import ( "github.com/elastic/elastic-package/internal/kibana" "github.com/elastic/elastic-package/internal/packages" "github.com/elastic/elastic-package/internal/packages/installer" + "github.com/elastic/elastic-package/internal/requiredinputs" ) type FleetPackage struct { @@ -38,6 +39,9 @@ type FleetPackage struct { // Force forces operations, as reinstalling a package that seems to // be already installed. Force bool + + // RequiredInputsResolver is the resolver for required input packages. + RequiredInputsResolver requiredinputs.Resolver } func (f *FleetPackage) String() string { @@ -64,11 +68,12 @@ func (f *FleetPackage) installer(ctx resource.Context) (installer.Installer, err } return installer.NewForPackage(installer.Options{ - Kibana: provider.Client, - PackageRoot: f.PackageRoot, - SkipValidation: true, - RepositoryRoot: f.RepositoryRoot, - SchemaURLs: f.SchemaURLs, + Kibana: provider.Client, + PackageRoot: f.PackageRoot, + SkipValidation: true, + RepositoryRoot: f.RepositoryRoot, + SchemaURLs: f.SchemaURLs, + RequiredInputsResolver: f.RequiredInputsResolver, }) } diff --git a/internal/resources/fleetpackage_test.go b/internal/resources/fleetpackage_test.go index f0bb905475..d4036dd400 100644 --- a/internal/resources/fleetpackage_test.go +++ b/internal/resources/fleetpackage_test.go @@ -29,8 +29,9 @@ func TestRequiredProvider(t *testing.T) { _, err = manager.Apply(resource.Resources{ &FleetPackage{ - PackageRoot: "../../test/packages/parallel/nginx", - RepositoryRoot: repositoryRoot, + PackageRoot: "../../test/packages/parallel/nginx", + RepositoryRoot: repositoryRoot, + RequiredInputsResolver: &requiredInputsResolverMock{}, }, }) if assert.Error(t, err) { @@ -38,6 +39,17 @@ func TestRequiredProvider(t *testing.T) { } } +type requiredInputsResolverMock struct { + BundleFunc func(buildPackageRoot string) error +} + +func (r *requiredInputsResolverMock) Bundle(buildPackageRoot string) error { + if r.BundleFunc != nil { + return r.BundleFunc(buildPackageRoot) + } + return nil +} + func TestPackageLifecycle(t *testing.T) { cases := []struct { title string @@ -62,9 +74,10 @@ func TestPackageLifecycle(t *testing.T) { packageRoot := filepath.Join(repositoryRoot.Name(), "test", "packages", "parallel", c.name) fleetPackage := FleetPackage{ - PackageRoot: packageRoot, - RepositoryRoot: repositoryRoot, - SchemaURLs: fields.NewSchemaURLs(), + PackageRoot: packageRoot, + RepositoryRoot: repositoryRoot, + SchemaURLs: fields.NewSchemaURLs(), + RequiredInputsResolver: &requiredInputsResolverMock{}, } manager := resource.NewManager() manager.RegisterProvider(DefaultKibanaProviderName, &KibanaProvider{Client: kibanaClient}) diff --git a/internal/resources/fleetpolicy_test.go b/internal/resources/fleetpolicy_test.go index 1bf9617699..d7f96bf5f5 100644 --- a/internal/resources/fleetpolicy_test.go +++ b/internal/resources/fleetpolicy_test.go @@ -122,10 +122,11 @@ func withPackageResources(agentPolicy *FleetAgentPolicy, repostoryRoot *os.Root) var resources resource.Resources for _, policy := range agentPolicy.PackagePolicies { resources = append(resources, &FleetPackage{ - PackageRoot: policy.PackageRoot, - Absent: agentPolicy.Absent, - RepositoryRoot: repostoryRoot, - SchemaURLs: fields.NewSchemaURLs(), + PackageRoot: policy.PackageRoot, + Absent: agentPolicy.Absent, + RepositoryRoot: repostoryRoot, + SchemaURLs: fields.NewSchemaURLs(), + RequiredInputsResolver: &requiredInputsResolverMock{}, }) } return append(resources, agentPolicy) diff --git a/internal/stack/environment.go b/internal/stack/environment.go index 13d8faed26..5acafc1e84 100644 --- a/internal/stack/environment.go +++ b/internal/stack/environment.go @@ -154,7 +154,7 @@ func (p *environmentProvider) initClients(appConfig *install.ApplicationConfigur } p.elasticsearch = elasticsearch - p.registry = registry.NewClient(packageRegistryBaseURL(p.profile, appConfig)) + p.registry = registry.NewClient(PackageRegistryBaseURL(p.profile, appConfig)) return nil } diff --git a/internal/stack/registry.go b/internal/stack/registry.go index 4314389357..b1e8cea490 100644 --- a/internal/stack/registry.go +++ b/internal/stack/registry.go @@ -5,6 +5,10 @@ package stack import ( + "net/url" + "os" + "strings" + "github.com/elastic/elastic-package/internal/install" "github.com/elastic/elastic-package/internal/profile" "github.com/elastic/elastic-package/internal/registry" @@ -28,10 +32,10 @@ func packageRegistryProxyToURL(profile *profile.Profile, appConfig *install.Appl return registry.ProductionURL } -// packageRegistryBaseURL returns the package registry base URL to be used, considering +// PackageRegistryBaseURL returns the package registry base URL to be used, considering // profile settings and application configuration. The priority is given to // profile settings over application configuration. -func packageRegistryBaseURL(profile *profile.Profile, appConfig *install.ApplicationConfiguration) string { +func PackageRegistryBaseURL(profile *profile.Profile, appConfig *install.ApplicationConfiguration) string { if registryURL := profile.Config(configElasticEPRURL, ""); registryURL != "" { return registryURL } @@ -42,3 +46,28 @@ func packageRegistryBaseURL(profile *profile.Profile, appConfig *install.Applica } return registry.ProductionURL } + +// RegistryClientOptions returns TLS options for the registry client so it works +// with the elastic-package stack (same CA as Kibana/ES) or local HTTPS registries. +// Profile may be nil (e.g. in build); then only CACertificateEnv is used for CA. +func RegistryClientOptions(registryBaseURL string, profile *profile.Profile) []registry.ClientOption { + var opts []registry.ClientOption + caPath := os.Getenv(CACertificateEnv) + if caPath == "" && profile != nil { + caPath, _ = FindCACertificate(profile) + } + if caPath != "" { + if _, err := os.Stat(caPath); err == nil { + opts = append(opts, registry.CertificateAuthority(caPath)) + return opts + } + } + u, err := url.Parse(registryBaseURL) + if err != nil { + return opts + } + if u.Scheme == "https" && (strings.ToLower(u.Hostname()) == "localhost" || u.Hostname() == "127.0.0.1") { + opts = append(opts, registry.TLSSkipVerify()) + } + return opts +} diff --git a/internal/stack/serverless.go b/internal/stack/serverless.go index b190c682ad..bd8bcccb24 100644 --- a/internal/stack/serverless.go +++ b/internal/stack/serverless.go @@ -232,7 +232,7 @@ func (sp *serverlessProvider) createClients(project *serverless.Project, appConf return fmt.Errorf("failed to create kibana client: %w", err) } - sp.registryClient = registry.NewClient(packageRegistryBaseURL(sp.profile, appConfig)) + sp.registryClient = registry.NewClient(PackageRegistryBaseURL(sp.profile, appConfig)) return nil } diff --git a/internal/testrunner/runners/asset/tester.go b/internal/testrunner/runners/asset/tester.go index fa144ceda7..1998aca8cf 100644 --- a/internal/testrunner/runners/asset/tester.go +++ b/internal/testrunner/runners/asset/tester.go @@ -15,6 +15,7 @@ import ( "github.com/elastic/elastic-package/internal/kibana" "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/resources" "github.com/elastic/elastic-package/internal/testrunner" ) @@ -88,11 +89,12 @@ func (r *tester) Run(ctx context.Context) ([]testrunner.TestResult, error) { func (r *tester) resources(installedPackage bool) resources.Resources { return resources.Resources{ &resources.FleetPackage{ - PackageRoot: r.packageRoot, - Absent: !installedPackage, - Force: installedPackage, // Force re-installation, in case there are code changes in the same package version. - RepositoryRoot: r.repositoryRoot, - SchemaURLs: r.schemaURLs, + PackageRoot: r.packageRoot, + Absent: !installedPackage, + Force: installedPackage, // Force re-installation, in case there are code changes in the same package version. + RepositoryRoot: r.repositoryRoot, + SchemaURLs: r.schemaURLs, + RequiredInputsResolver: &requiredinputs.NoopRequiredInputsResolver{}, }, } } diff --git a/internal/testrunner/runners/policy/runner.go b/internal/testrunner/runners/policy/runner.go index 340577ab2b..0902decb4a 100644 --- a/internal/testrunner/runners/policy/runner.go +++ b/internal/testrunner/runners/policy/runner.go @@ -15,6 +15,7 @@ import ( "github.com/elastic/elastic-package/internal/kibana" "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/resources" "github.com/elastic/elastic-package/internal/testrunner" ) @@ -39,37 +40,40 @@ type runner struct { repositoryRoot *os.Root - schemaURLs fields.SchemaURLs + schemaURLs fields.SchemaURLs + requiredInputsResolver requiredinputs.Resolver } // Ensures that runner implements testrunner.TestRunner interface var _ testrunner.TestRunner = new(runner) type PolicyTestRunnerOptions struct { - KibanaClient *kibana.Client - PackageRoot string - DataStreams []string - FailOnMissingTests bool - GenerateTestResult bool - GlobalTestConfig testrunner.GlobalRunnerTestConfig - WithCoverage bool - CoverageType string - RepositoryRoot *os.Root - SchemaURLs fields.SchemaURLs + KibanaClient *kibana.Client + PackageRoot string + DataStreams []string + FailOnMissingTests bool + GenerateTestResult bool + GlobalTestConfig testrunner.GlobalRunnerTestConfig + WithCoverage bool + CoverageType string + RepositoryRoot *os.Root + SchemaURLs fields.SchemaURLs + RequiredInputsResolver requiredinputs.Resolver } func NewPolicyTestRunner(options PolicyTestRunnerOptions) *runner { runner := runner{ - packageRoot: options.PackageRoot, - kibanaClient: options.KibanaClient, - dataStreams: options.DataStreams, - failOnMissingTests: options.FailOnMissingTests, - generateTestResult: options.GenerateTestResult, - globalTestConfig: options.GlobalTestConfig, - withCoverage: options.WithCoverage, - coverageType: options.CoverageType, - repositoryRoot: options.RepositoryRoot, - schemaURLs: options.SchemaURLs, + packageRoot: options.PackageRoot, + kibanaClient: options.KibanaClient, + dataStreams: options.DataStreams, + failOnMissingTests: options.FailOnMissingTests, + generateTestResult: options.GenerateTestResult, + globalTestConfig: options.GlobalTestConfig, + withCoverage: options.WithCoverage, + coverageType: options.CoverageType, + repositoryRoot: options.RepositoryRoot, + schemaURLs: options.SchemaURLs, + requiredInputsResolver: options.RequiredInputsResolver, } runner.resourcesManager = resources.NewManager() runner.resourcesManager.RegisterProvider(resources.DefaultKibanaProviderName, &resources.KibanaProvider{Client: runner.kibanaClient}) @@ -169,9 +173,10 @@ func (r *runner) Type() testrunner.TestType { func (r *runner) setupSuite(ctx context.Context, manager *resources.Manager) (cleanup func(ctx context.Context) error, err error) { packageResource := resources.FleetPackage{ - PackageRoot: r.packageRoot, - RepositoryRoot: r.repositoryRoot, - SchemaURLs: r.schemaURLs, + PackageRoot: r.packageRoot, + RepositoryRoot: r.repositoryRoot, + SchemaURLs: r.schemaURLs, + RequiredInputsResolver: r.requiredInputsResolver, } setupResources := resources.Resources{ &packageResource, diff --git a/internal/testrunner/runners/system/runner.go b/internal/testrunner/runners/system/runner.go index a851c734aa..0f5360b0c3 100644 --- a/internal/testrunner/runners/system/runner.go +++ b/internal/testrunner/runners/system/runner.go @@ -19,6 +19,7 @@ import ( "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" "github.com/elastic/elastic-package/internal/profile" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/resources" "github.com/elastic/elastic-package/internal/servicedeployer" "github.com/elastic/elastic-package/internal/testrunner" @@ -295,11 +296,12 @@ func (r *runner) Type() testrunner.TestType { func (r *runner) resources(opts resourcesOptions) resources.Resources { return resources.Resources{ &resources.FleetPackage{ - PackageRoot: r.packageRoot, - Absent: !opts.installedPackage, - Force: opts.installedPackage, // Force re-installation, in case there are code changes in the same package version. - RepositoryRoot: r.repositoryRoot, - SchemaURLs: r.schemaURLs, + PackageRoot: r.packageRoot, + Absent: !opts.installedPackage, + Force: opts.installedPackage, // Force re-installation, in case there are code changes in the same package version. + RepositoryRoot: r.repositoryRoot, + SchemaURLs: r.schemaURLs, + RequiredInputsResolver: &requiredinputs.NoopRequiredInputsResolver{}, }, } } diff --git a/internal/testrunner/script/package.go b/internal/testrunner/script/package.go index ed3ae283d6..c3e2b4dd4c 100644 --- a/internal/testrunner/script/package.go +++ b/internal/testrunner/script/package.go @@ -22,7 +22,9 @@ import ( "github.com/elastic/elastic-package/internal/files" "github.com/elastic/elastic-package/internal/packages" "github.com/elastic/elastic-package/internal/registry" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/resources" + "github.com/elastic/elastic-package/internal/stack" ) func addPackage(ts *testscript.TestScript, neg bool, args []string) { @@ -71,11 +73,12 @@ func addPackage(ts *testscript.TestScript, neg bool, args []string) { m := resources.NewManager() m.RegisterProvider(resources.DefaultKibanaProviderName, &resources.KibanaProvider{Client: stk.kibana}) _, err = m.ApplyCtx(ctx, resources.Resources{&resources.FleetPackage{ - PackageRoot: pkgRoot, - Absent: false, - Force: true, - RepositoryRoot: root, - SchemaURLs: fields.NewSchemaURLs(fields.WithECSBaseURL(ecsBaseSchemaURL)), + PackageRoot: pkgRoot, + Absent: false, + Force: true, + RepositoryRoot: root, + SchemaURLs: fields.NewSchemaURLs(fields.WithECSBaseURL(ecsBaseSchemaURL)), + RequiredInputsResolver: &requiredinputs.NoopRequiredInputsResolver{}, }}) ts.Check(decoratedWith("installing package resources", err)) @@ -124,10 +127,11 @@ func removePackage(ts *testscript.TestScript, neg bool, args []string) { m := resources.NewManager() m.RegisterProvider(resources.DefaultKibanaProviderName, &resources.KibanaProvider{Client: stk.kibana}) _, err = m.ApplyCtx(ctx, resources.Resources{&resources.FleetPackage{ - PackageRoot: pkgRoot, - Absent: true, - Force: true, - RepositoryRoot: root, // Apparently not required, but adding for safety. + PackageRoot: pkgRoot, + Absent: true, + Force: true, + RepositoryRoot: root, // Apparently not required, but adding for safety. + RequiredInputsResolver: &requiredinputs.NoopRequiredInputsResolver{}, }}) ts.Check(decoratedWith("removing package resources", err)) @@ -184,7 +188,7 @@ func installPackageFromRegistry(ts *testscript.TestScript, neg bool, args []stri regPkgs[*profName] = append(regPkgs[*profName], registryPackage{name: name, version: version}) workDir := ts.MkAbs(".") - client := registry.NewClient(registryBaseURL) + client := registry.NewClient(registryBaseURL, stack.RegistryClientOptions(registryBaseURL, stk.profile)...) zipPath, err := client.DownloadPackage(name, version, workDir) ts.Check(decoratedWith("downloading package from registry", err)) diff --git a/internal/testrunner/script/script.go b/internal/testrunner/script/script.go index 14e5972a5c..910c6bc0ad 100644 --- a/internal/testrunner/script/script.go +++ b/internal/testrunner/script/script.go @@ -33,7 +33,9 @@ import ( "github.com/elastic/elastic-package/internal/install" "github.com/elastic/elastic-package/internal/packages" "github.com/elastic/elastic-package/internal/packages/changelog" + "github.com/elastic/elastic-package/internal/profile" "github.com/elastic/elastic-package/internal/registry" + "github.com/elastic/elastic-package/internal/requiredinputs" "github.com/elastic/elastic-package/internal/resources" "github.com/elastic/elastic-package/internal/servicedeployer" "github.com/elastic/elastic-package/internal/stack" @@ -53,6 +55,33 @@ type Options struct { UpdateScripts bool // testscript.Params.UpdateScripts ContinueOnError bool // testscript.Params.ContinueOnError TestWork bool // testscript.Params.TestWork + + // Profile selects the package registry URL from profile config (with app + // config as fallback). When nil, the current profile name from application + // configuration is loaded. + Profile *profile.Profile +} + +func profileAndPackageRegistryBaseURL(opt Options, appConfig *install.ApplicationConfiguration) (*profile.Profile, string, error) { + prof := opt.Profile + if prof == nil { + var err error + prof, err = profile.LoadProfile(appConfig.CurrentProfile()) + if err != nil { + return nil, "", fmt.Errorf("loading profile %q: %w", appConfig.CurrentProfile(), err) + } + } + return prof, stack.PackageRegistryBaseURL(prof, appConfig), nil +} + +func scriptTestWorkdirRoot(workRoot string, opt Options) (workdirRoot string, err error) { + if opt.TestWork { + return os.MkdirTemp(workRoot, "*") + } + if err := os.Setenv("GOTMPDIR", workRoot); err != nil { + return "", fmt.Errorf("could not set temp dir var: %w", err) + } + return "", nil } // TODO: refactor Run to reduce cognitive complexity (currently 89). @@ -72,6 +101,10 @@ func Run(dst *[]testrunner.TestResult, w io.Writer, opt Options) error { if err != nil { return fmt.Errorf("could read configuration: %w", err) } + prof, eprBaseURL, err := profileAndPackageRegistryBaseURL(opt, appConfig) + if err != nil { + return err + } loc, err := locations.NewLocationManager() if err != nil { return err @@ -81,27 +114,14 @@ func Run(dst *[]testrunner.TestResult, w io.Writer, opt Options) error { if err != nil { return fmt.Errorf("could not make work space root: %w", err) } - var workdirRoot string - if opt.TestWork { - // Only create a work root and pass it in if --work has been requested. - // The behaviour of testscript is to set TestWork to true if the work - // root is non-zero, so just let testscript put it where it wants in the - // case that we have not requested work to be retained. This will be in - // os.MkdirTemp(os.Getenv("GOTMPDIR"), "go-test-script") which on most - // systems will be /tmp/go-test-script. However, due to… decisions, we - // cannot operate in that directory… - workdirRoot, err = os.MkdirTemp(workRoot, "*") - if err != nil { + // Only pass a non-zero work root when --work is set; otherwise set $GOTMPDIR + // so testscript uses a directory we can operate in (see scriptTestWorkdirRoot). + workdirRoot, err := scriptTestWorkdirRoot(workRoot, opt) + if err != nil { + if opt.TestWork { return fmt.Errorf("could not make work space: %w", err) } - } else { - // … so set $GOTMPDIR to a location that we can work in. - // - // This is all obviously awful. - err = os.Setenv("GOTMPDIR", workRoot) - if err != nil { - return fmt.Errorf("could not set temp dir var: %w", err) - } + return err } dirs, err := scripts(opt.Dir) @@ -201,7 +221,7 @@ func Run(dst *[]testrunner.TestResult, w io.Writer, opt Options) error { if err != nil { return err } - eprClient := registry.NewClient(appConfig.PackageRegistryBaseURL()) + eprClient := registry.NewClient(eprBaseURL, stack.RegistryClientOptions(eprBaseURL, prof)...) revisions, err := eprClient.Revisions(manifest.Name, registry.SearchOptions{}) if err != nil { return err @@ -237,7 +257,7 @@ func Run(dst *[]testrunner.TestResult, w io.Writer, opt Options) error { "CONFIG_PROFILES": loc.ProfileDir(), "HOME": home, "ECS_BASE_SCHEMA_URL": appConfig.SchemaURLs().ECSBase(), - "PACKAGE_REGISTRY_BASE_URL": appConfig.PackageRegistryBaseURL(), + "PACKAGE_REGISTRY_BASE_URL": eprBaseURL, } if pkgRoot != "" { scriptEnv["PACKAGE_NAME"] = manifest.Name @@ -408,9 +428,10 @@ func cleanUp(ctx context.Context, pkgRoot string, srvs map[string]servicedeploye m := resources.NewManager() m.RegisterProvider(resources.DefaultKibanaProviderName, &resources.KibanaProvider{Client: stk.kibana}) _, err := m.ApplyCtx(ctx, resources.Resources{&resources.FleetPackage{ - PackageRoot: pkgRoot, - Absent: true, - Force: true, + PackageRoot: pkgRoot, + Absent: true, + Force: true, + RequiredInputsResolver: &requiredinputs.NoopRequiredInputsResolver{}, }}) if err != nil && !strings.Contains(err.Error(), "is not installed") { errs = append(errs, err) From 18a010cd2668d66c1ce7f5b0b5897500437d5307 Mon Sep 17 00:00:00 2001 From: Tere Date: Mon, 13 Apr 2026 16:55:45 +0200 Subject: [PATCH 06/28] test: add manual package fixtures for composable required inputs Fixtures cover template bundling, variable merge scenarios, field bundling, linked template_path, and stream resolution. Made-with: Cursor --- test/manual_packages/README.md | 76 +++++++++++++++++++ .../agent/input/input.yml.hbs | 4 + .../fields_input_pkg/changelog.yml | 5 ++ .../fields_input_pkg/docs/README.md | 3 + .../fields_input_pkg/fields/base-fields.yml | 18 +++++ .../fields_input_pkg/manifest.yml | 32 ++++++++ .../test_input_pkg/agent/input/extra.yml.hbs | 2 + .../test_input_pkg/agent/input/input.yml.hbs | 4 + .../test_input_pkg/changelog.yml | 5 ++ .../test_input_pkg/docs/README.md | 3 + .../test_input_pkg/fields/base-fields.yml | 12 +++ .../test_input_pkg/manifest.yml | 34 +++++++++ .../agent/input/input.yml.hbs | 4 + .../var_merging_input_pkg/changelog.yml | 5 ++ .../var_merging_input_pkg/docs/README.md | 3 + .../fields/base-fields.yml | 12 +++ .../var_merging_input_pkg/manifest.yml | 45 +++++++++++ .../with_field_bundling/changelog.yml | 5 ++ .../field_logs/fields/base-fields.yml | 12 +++ .../data_stream/field_logs/manifest.yml | 6 ++ .../with_field_bundling/docs/README.md | 3 + .../with_field_bundling/manifest.yml | 33 ++++++++ .../_dev/test/config.yml | 3 + .../with_input_package_requires/changelog.yml | 5 ++ .../test_logs/agent/stream/stream.yml.hbs | 4 + .../test_logs/fields/base-fields.yml | 12 +++ .../data_stream/test_logs/manifest.yml | 19 +++++ .../docs/README.md | 4 + .../with_input_package_requires/manifest.yml | 34 +++++++++ .../_dev/test/config.yml | 3 + .../agent/input/_included/owned.hbs | 1 + .../agent/input/owned.hbs.link | 1 + .../with_linked_template_path/changelog.yml | 5 ++ .../test_logs/agent/stream/stream.yml.hbs | 4 + .../test_logs/fields/base-fields.yml | 12 +++ .../data_stream/test_logs/manifest.yml | 19 +++++ .../with_linked_template_path/docs/README.md | 5 ++ .../with_linked_template_path/manifest.yml | 37 +++++++++ .../_dev/test/config.yml | 3 + .../with_merging_ds_merges/changelog.yml | 5 ++ .../agent/stream/stream.yml.hbs | 4 + .../var_merging_logs/fields/base-fields.yml | 12 +++ .../data_stream/var_merging_logs/manifest.yml | 13 ++++ .../with_merging_ds_merges/docs/README.md | 13 ++++ .../with_merging_ds_merges/manifest.yml | 33 ++++++++ .../_dev/test/config.yml | 3 + .../changelog.yml | 5 ++ .../agent/stream/stream.yml.hbs | 4 + .../var_merging_logs/fields/base-fields.yml | 12 +++ .../data_stream/var_merging_logs/manifest.yml | 11 +++ .../docs/README.md | 8 ++ .../with_merging_duplicate_error/manifest.yml | 33 ++++++++ .../with_merging_full/_dev/test/config.yml | 3 + .../with_merging_full/changelog.yml | 5 ++ .../agent/stream/stream.yml.hbs | 4 + .../var_merging_logs/fields/base-fields.yml | 12 +++ .../data_stream/var_merging_logs/manifest.yml | 13 ++++ .../with_merging_full/docs/README.md | 20 +++++ .../with_merging_full/manifest.yml | 40 ++++++++++ .../_dev/test/config.yml | 3 + .../with_merging_no_override/changelog.yml | 5 ++ .../agent/stream/stream.yml.hbs | 4 + .../var_merging_logs/fields/base-fields.yml | 12 +++ .../data_stream/var_merging_logs/manifest.yml | 6 ++ .../with_merging_no_override/docs/README.md | 5 ++ .../with_merging_no_override/manifest.yml | 32 ++++++++ .../_dev/test/config.yml | 3 + .../changelog.yml | 5 ++ .../agent/stream/stream.yml.hbs | 4 + .../var_merging_logs/fields/base-fields.yml | 12 +++ .../data_stream/var_merging_logs/manifest.yml | 6 ++ .../docs/README.md | 11 +++ .../manifest.yml | 38 ++++++++++ .../_dev/test/config.yml | 3 + .../changelog.yml | 5 ++ .../alpha_logs/agent/stream/stream.yml.hbs | 4 + .../alpha_logs/fields/base-fields.yml | 12 +++ .../data_stream/alpha_logs/manifest.yml | 6 ++ .../beta_logs/agent/stream/stream.yml.hbs | 4 + .../beta_logs/fields/base-fields.yml | 12 +++ .../data_stream/beta_logs/manifest.yml | 6 ++ .../docs/README.md | 5 ++ .../manifest.yml | 52 +++++++++++++ 83 files changed, 1005 insertions(+) create mode 100644 test/manual_packages/README.md create mode 100644 test/manual_packages/required_inputs/fields_input_pkg/agent/input/input.yml.hbs create mode 100644 test/manual_packages/required_inputs/fields_input_pkg/changelog.yml create mode 100644 test/manual_packages/required_inputs/fields_input_pkg/docs/README.md create mode 100644 test/manual_packages/required_inputs/fields_input_pkg/fields/base-fields.yml create mode 100644 test/manual_packages/required_inputs/fields_input_pkg/manifest.yml create mode 100644 test/manual_packages/required_inputs/test_input_pkg/agent/input/extra.yml.hbs create mode 100644 test/manual_packages/required_inputs/test_input_pkg/agent/input/input.yml.hbs create mode 100644 test/manual_packages/required_inputs/test_input_pkg/changelog.yml create mode 100644 test/manual_packages/required_inputs/test_input_pkg/docs/README.md create mode 100644 test/manual_packages/required_inputs/test_input_pkg/fields/base-fields.yml create mode 100644 test/manual_packages/required_inputs/test_input_pkg/manifest.yml create mode 100644 test/manual_packages/required_inputs/var_merging_input_pkg/agent/input/input.yml.hbs create mode 100644 test/manual_packages/required_inputs/var_merging_input_pkg/changelog.yml create mode 100644 test/manual_packages/required_inputs/var_merging_input_pkg/docs/README.md create mode 100644 test/manual_packages/required_inputs/var_merging_input_pkg/fields/base-fields.yml create mode 100644 test/manual_packages/required_inputs/var_merging_input_pkg/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_field_bundling/changelog.yml create mode 100644 test/manual_packages/required_inputs/with_field_bundling/data_stream/field_logs/fields/base-fields.yml create mode 100644 test/manual_packages/required_inputs/with_field_bundling/data_stream/field_logs/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_field_bundling/docs/README.md create mode 100644 test/manual_packages/required_inputs/with_field_bundling/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_input_package_requires/_dev/test/config.yml create mode 100644 test/manual_packages/required_inputs/with_input_package_requires/changelog.yml create mode 100644 test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/agent/stream/stream.yml.hbs create mode 100644 test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/fields/base-fields.yml create mode 100644 test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_input_package_requires/docs/README.md create mode 100644 test/manual_packages/required_inputs/with_input_package_requires/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_linked_template_path/_dev/test/config.yml create mode 100644 test/manual_packages/required_inputs/with_linked_template_path/agent/input/_included/owned.hbs create mode 100644 test/manual_packages/required_inputs/with_linked_template_path/agent/input/owned.hbs.link create mode 100644 test/manual_packages/required_inputs/with_linked_template_path/changelog.yml create mode 100644 test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/agent/stream/stream.yml.hbs create mode 100644 test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/fields/base-fields.yml create mode 100644 test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_linked_template_path/docs/README.md create mode 100644 test/manual_packages/required_inputs/with_linked_template_path/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_merging_ds_merges/_dev/test/config.yml create mode 100644 test/manual_packages/required_inputs/with_merging_ds_merges/changelog.yml create mode 100644 test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/agent/stream/stream.yml.hbs create mode 100644 test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/fields/base-fields.yml create mode 100644 test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_merging_ds_merges/docs/README.md create mode 100644 test/manual_packages/required_inputs/with_merging_ds_merges/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_merging_duplicate_error/_dev/test/config.yml create mode 100644 test/manual_packages/required_inputs/with_merging_duplicate_error/changelog.yml create mode 100644 test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/agent/stream/stream.yml.hbs create mode 100644 test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/fields/base-fields.yml create mode 100644 test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_merging_duplicate_error/docs/README.md create mode 100644 test/manual_packages/required_inputs/with_merging_duplicate_error/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_merging_full/_dev/test/config.yml create mode 100644 test/manual_packages/required_inputs/with_merging_full/changelog.yml create mode 100644 test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/agent/stream/stream.yml.hbs create mode 100644 test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/fields/base-fields.yml create mode 100644 test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_merging_full/docs/README.md create mode 100644 test/manual_packages/required_inputs/with_merging_full/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_merging_no_override/_dev/test/config.yml create mode 100644 test/manual_packages/required_inputs/with_merging_no_override/changelog.yml create mode 100644 test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/agent/stream/stream.yml.hbs create mode 100644 test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/fields/base-fields.yml create mode 100644 test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_merging_no_override/docs/README.md create mode 100644 test/manual_packages/required_inputs/with_merging_no_override/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_merging_promotes_to_input/_dev/test/config.yml create mode 100644 test/manual_packages/required_inputs/with_merging_promotes_to_input/changelog.yml create mode 100644 test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/agent/stream/stream.yml.hbs create mode 100644 test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/fields/base-fields.yml create mode 100644 test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_merging_promotes_to_input/docs/README.md create mode 100644 test/manual_packages/required_inputs/with_merging_promotes_to_input/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_merging_two_policy_templates/_dev/test/config.yml create mode 100644 test/manual_packages/required_inputs/with_merging_two_policy_templates/changelog.yml create mode 100644 test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/agent/stream/stream.yml.hbs create mode 100644 test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/fields/base-fields.yml create mode 100644 test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/agent/stream/stream.yml.hbs create mode 100644 test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/fields/base-fields.yml create mode 100644 test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/manifest.yml create mode 100644 test/manual_packages/required_inputs/with_merging_two_policy_templates/docs/README.md create mode 100644 test/manual_packages/required_inputs/with_merging_two_policy_templates/manifest.yml diff --git a/test/manual_packages/README.md b/test/manual_packages/README.md new file mode 100644 index 0000000000..94c7c3ed35 --- /dev/null +++ b/test/manual_packages/README.md @@ -0,0 +1,76 @@ +# Manual Test Packages + +Packages under `test/manual_packages/` are **not** picked up by CI build/install scripts (which glob `test/packages/*/*/`). They require manual setup to exercise. + +All **`requires.input`** fixtures live under [`test/manual_packages/required_inputs/`](required_inputs/). The same trees are used as fixtures by `go test` in [`internal/requiredinputs/variables_test.go`](../../internal/requiredinputs/variables_test.go) (variable merge) and [`internal/requiredinputs/fields_test.go`](../../internal/requiredinputs/fields_test.go) (field bundling). + +## required_inputs + +### Template bundling (smoke) + +- `required_inputs/test_input_pkg` — the input package that must be installed first. +- `required_inputs/with_input_package_requires` — an integration package that declares a dependency on `test_input_pkg`. +- `required_inputs/with_linked_template_path` — same as above, but the integration-owned policy input template is supplied via `agent/input/owned.hbs.link` (see `docs/howto/dependency_management.md` for `template_path` vs `.link` naming). + +### Variable merge (composable input vars) + +When an integration lists `requires.input` and its policy template references that input package with optional `vars`, elastic-package **merges** variable definitions from the input package into the built manifests (see [`internal/requiredinputs/variables.go`](../../internal/requiredinputs/variables.go) — `mergeVariables`). + +| Package | Role | +| --- | --- | +| `required_inputs/var_merging_input_pkg` | Required input package (`paths`, `encoding`, `timeout`). | +| `required_inputs/with_merging_full` | Promoted `paths` + `encoding`; DS merge for `timeout` + novel `custom_tag`. | +| `required_inputs/with_merging_promotes_to_input` | Only `paths` promoted; DS keeps `encoding`, `timeout`. | +| `required_inputs/with_merging_ds_merges` | No promotion; DS merges `encoding` title + adds `custom_tag`. | +| `required_inputs/with_merging_no_override` | No composable overrides; all base vars on DS, unchanged. | +| `required_inputs/with_merging_two_policy_templates` | Two PTs on the same input pkg: one promotes `paths` for its DS only; the other leaves all vars on the DS (`TestMergeVariables_TwoPolicyTemplatesScopedPromotion`). | +| `required_inputs/with_merging_duplicate_error` | Invalid: duplicate `paths` at DS level; **build should fail** with an error mentioning `paths`. | + +### Field bundling + +| Package | Role | +| --- | --- | +| `required_inputs/fields_input_pkg` | Required input package supplying field definitions. | +| `required_inputs/with_field_bundling` | Integration that requires `fields_input_pkg`; exercises merging field defs into the built data stream. | + +Build `fields_input_pkg` before `with_field_bundling`. See `TestBundleDataStreamFields_*` in [`fields_test.go`](../../internal/requiredinputs/fields_test.go). + +### Stream and input `package:` resolution + +After templates, variables, and fields are applied, the build replaces `package: ` on policy template inputs and data stream streams with the real input **type** from that required input package (implementation in [`internal/requiredinputs/streamdefs.go`](../../internal/requiredinputs/streamdefs.go)). + +### Manual testing workflow + +1. Start the stack and local package registry: + ```bash + elastic-package stack up -d + ``` +2. Configure `package_registry.base_url` in `~/.elastic-package/config.yml` so builds can resolve required input packages (see [local package registry how-to](../../docs/howto/local_package_registry.md) and the root [README](../../README.md) `package_registry` section). +3. Build and install in **dependency order** (input packages before integrations that require them). Examples: + + Template bundling smoke: + ```bash + elastic-package build -C test/manual_packages/required_inputs/test_input_pkg --zip + elastic-package build -C test/manual_packages/required_inputs/with_input_package_requires --zip + elastic-package build -C test/manual_packages/required_inputs/with_linked_template_path --zip + ``` + + Variable merge (build `var_merging_input_pkg` first, install it, then build the integration you need): + ```bash + elastic-package build -C test/manual_packages/required_inputs/var_merging_input_pkg --zip + elastic-package build -C test/manual_packages/required_inputs/with_merging_full --zip + ``` + + Field bundling (build `fields_input_pkg` first, then the integration): + ```bash + elastic-package build -C test/manual_packages/required_inputs/fields_input_pkg --zip + elastic-package build -C test/manual_packages/required_inputs/with_field_bundling --zip + ``` + +4. Install via the local registry in the same order (e.g. `test_input_pkg` before `with_input_package_requires`; `var_merging_input_pkg` before any `with_merging_*` integration; `fields_input_pkg` before `with_field_bundling`). + +For **expected merged manifests** after a successful variable-merge build, see `TestMergeVariables_*` in [`variables_test.go`](../../internal/requiredinputs/variables_test.go). For `with_merging_duplicate_error`, expect `elastic-package build` to fail and the error to contain `paths`. + +### When composable inputs are fully supported in CI + +Move `required_inputs/` under `test/packages/required_inputs/` so [`scripts/test-build-install-zip.sh`](../../scripts/test-build-install-zip.sh) can build and install them automatically (install order is lexicographic, so `var_merging_input_pkg` is installed before `with_merging_*`). Update [`internal/requiredinputs/variables_test.go`](../../internal/requiredinputs/variables_test.go) fixture paths to match. diff --git a/test/manual_packages/required_inputs/fields_input_pkg/agent/input/input.yml.hbs b/test/manual_packages/required_inputs/fields_input_pkg/agent/input/input.yml.hbs new file mode 100644 index 0000000000..9e9c27a8c0 --- /dev/null +++ b/test/manual_packages/required_inputs/fields_input_pkg/agent/input/input.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/fields_input_pkg/changelog.yml b/test/manual_packages/required_inputs/fields_input_pkg/changelog.yml new file mode 100644 index 0000000000..813cf1cf77 --- /dev/null +++ b/test/manual_packages/required_inputs/fields_input_pkg/changelog.yml @@ -0,0 +1,5 @@ +- version: "0.1.0" + changes: + - description: Initial release. + type: enhancement + link: https://github.com/elastic/elastic-package/pull/1 diff --git a/test/manual_packages/required_inputs/fields_input_pkg/docs/README.md b/test/manual_packages/required_inputs/fields_input_pkg/docs/README.md new file mode 100644 index 0000000000..1c1576a01b --- /dev/null +++ b/test/manual_packages/required_inputs/fields_input_pkg/docs/README.md @@ -0,0 +1,3 @@ +# Fields Input Package + +Input package used as a test fixture for field bundling tests. diff --git a/test/manual_packages/required_inputs/fields_input_pkg/fields/base-fields.yml b/test/manual_packages/required_inputs/fields_input_pkg/fields/base-fields.yml new file mode 100644 index 0000000000..2b59a9a276 --- /dev/null +++ b/test/manual_packages/required_inputs/fields_input_pkg/fields/base-fields.yml @@ -0,0 +1,18 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. +- name: message + type: text + description: Log message. +- name: log.level + type: keyword + description: Log level. diff --git a/test/manual_packages/required_inputs/fields_input_pkg/manifest.yml b/test/manual_packages/required_inputs/fields_input_pkg/manifest.yml new file mode 100644 index 0000000000..bf4502f107 --- /dev/null +++ b/test/manual_packages/required_inputs/fields_input_pkg/manifest.yml @@ -0,0 +1,32 @@ +format_version: 3.6.0 +name: fields_input_pkg +title: Fields Input Package +description: Input package used as a test fixture for field bundling. +version: 0.1.0 +type: input +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +policy_templates: + - name: field_bundling + type: logs + title: Field Bundling + description: Collect logs with field bundling. + input: logfile + template_path: input.yml.hbs + vars: + - name: paths + type: text + title: Paths + multi: true + required: true + show_user: true + default: + - /var/log/*.log +owner: + github: elastic/integrations + type: elastic diff --git a/test/manual_packages/required_inputs/test_input_pkg/agent/input/extra.yml.hbs b/test/manual_packages/required_inputs/test_input_pkg/agent/input/extra.yml.hbs new file mode 100644 index 0000000000..c51c9f1721 --- /dev/null +++ b/test/manual_packages/required_inputs/test_input_pkg/agent/input/extra.yml.hbs @@ -0,0 +1,2 @@ +exclude_files: + - ".gz$" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/test_input_pkg/agent/input/input.yml.hbs b/test/manual_packages/required_inputs/test_input_pkg/agent/input/input.yml.hbs new file mode 100644 index 0000000000..9e9c27a8c0 --- /dev/null +++ b/test/manual_packages/required_inputs/test_input_pkg/agent/input/input.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/test_input_pkg/changelog.yml b/test/manual_packages/required_inputs/test_input_pkg/changelog.yml new file mode 100644 index 0000000000..0f9966a2de --- /dev/null +++ b/test/manual_packages/required_inputs/test_input_pkg/changelog.yml @@ -0,0 +1,5 @@ +- version: 0.1.0 + changes: + - description: Initial release. + type: enhancement + link: https://github.com/elastic/elastic-package/issues/3278 \ No newline at end of file diff --git a/test/manual_packages/required_inputs/test_input_pkg/docs/README.md b/test/manual_packages/required_inputs/test_input_pkg/docs/README.md new file mode 100644 index 0000000000..5fa7854175 --- /dev/null +++ b/test/manual_packages/required_inputs/test_input_pkg/docs/README.md @@ -0,0 +1,3 @@ +# Test Input Package + +This is a test fixture package used to verify template bundling during build. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/test_input_pkg/fields/base-fields.yml b/test/manual_packages/required_inputs/test_input_pkg/fields/base-fields.yml new file mode 100644 index 0000000000..d3b0f5a163 --- /dev/null +++ b/test/manual_packages/required_inputs/test_input_pkg/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/test_input_pkg/manifest.yml b/test/manual_packages/required_inputs/test_input_pkg/manifest.yml new file mode 100644 index 0000000000..6a83dcc4bb --- /dev/null +++ b/test/manual_packages/required_inputs/test_input_pkg/manifest.yml @@ -0,0 +1,34 @@ +format_version: 3.6.0 +name: test_input_pkg +title: Test Input Package +description: Input package used as a test fixture for template bundling. +version: 0.1.0 +type: input +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +policy_templates: + - name: test_input + type: logs + title: Test Input + description: Collect test logs with a custom input template. + input: logfile + template_paths: + - input.yml.hbs + - extra.yml.hbs + vars: + - name: paths + type: text + title: Paths + multi: true + required: true + show_user: true + default: + - /var/log/*.log +owner: + github: elastic/integrations + type: elastic \ No newline at end of file diff --git a/test/manual_packages/required_inputs/var_merging_input_pkg/agent/input/input.yml.hbs b/test/manual_packages/required_inputs/var_merging_input_pkg/agent/input/input.yml.hbs new file mode 100644 index 0000000000..9e9c27a8c0 --- /dev/null +++ b/test/manual_packages/required_inputs/var_merging_input_pkg/agent/input/input.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/var_merging_input_pkg/changelog.yml b/test/manual_packages/required_inputs/var_merging_input_pkg/changelog.yml new file mode 100644 index 0000000000..fb3f5f7235 --- /dev/null +++ b/test/manual_packages/required_inputs/var_merging_input_pkg/changelog.yml @@ -0,0 +1,5 @@ +- version: 0.1.0 + changes: + - description: Initial release. + type: enhancement + link: https://github.com/elastic/elastic-package/issues/1 \ No newline at end of file diff --git a/test/manual_packages/required_inputs/var_merging_input_pkg/docs/README.md b/test/manual_packages/required_inputs/var_merging_input_pkg/docs/README.md new file mode 100644 index 0000000000..894e4fe149 --- /dev/null +++ b/test/manual_packages/required_inputs/var_merging_input_pkg/docs/README.md @@ -0,0 +1,3 @@ +# Var Merging Input Package + +Input package used as a test fixture for variable merging tests. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/var_merging_input_pkg/fields/base-fields.yml b/test/manual_packages/required_inputs/var_merging_input_pkg/fields/base-fields.yml new file mode 100644 index 0000000000..d3b0f5a163 --- /dev/null +++ b/test/manual_packages/required_inputs/var_merging_input_pkg/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/var_merging_input_pkg/manifest.yml b/test/manual_packages/required_inputs/var_merging_input_pkg/manifest.yml new file mode 100644 index 0000000000..0e315aa0de --- /dev/null +++ b/test/manual_packages/required_inputs/var_merging_input_pkg/manifest.yml @@ -0,0 +1,45 @@ +format_version: 3.6.0 +name: var_merging_input_pkg +title: Var Merging Input Package +description: Input package used as a test fixture for variable merging. +version: 0.1.0 +type: input +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +policy_templates: + - name: var_merging + type: logs + title: Var Merging + description: Collect logs with multiple variables. + input: logfile + template_path: input.yml.hbs + vars: + - name: paths + type: text + title: Paths + multi: true + required: true + show_user: true + default: + - /var/log/*.log + - name: encoding + type: text + title: Encoding + multi: false + required: false + show_user: false + - name: timeout + type: text + title: Timeout + multi: false + required: false + show_user: false + default: 30s +owner: + github: elastic/integrations + type: elastic \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_field_bundling/changelog.yml b/test/manual_packages/required_inputs/with_field_bundling/changelog.yml new file mode 100644 index 0000000000..813cf1cf77 --- /dev/null +++ b/test/manual_packages/required_inputs/with_field_bundling/changelog.yml @@ -0,0 +1,5 @@ +- version: "0.1.0" + changes: + - description: Initial release. + type: enhancement + link: https://github.com/elastic/elastic-package/pull/1 diff --git a/test/manual_packages/required_inputs/with_field_bundling/data_stream/field_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_field_bundling/data_stream/field_logs/fields/base-fields.yml new file mode 100644 index 0000000000..0d1791ffed --- /dev/null +++ b/test/manual_packages/required_inputs/with_field_bundling/data_stream/field_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. diff --git a/test/manual_packages/required_inputs/with_field_bundling/data_stream/field_logs/manifest.yml b/test/manual_packages/required_inputs/with_field_bundling/data_stream/field_logs/manifest.yml new file mode 100644 index 0000000000..826c7c676f --- /dev/null +++ b/test/manual_packages/required_inputs/with_field_bundling/data_stream/field_logs/manifest.yml @@ -0,0 +1,6 @@ +title: Field Logs +type: logs +streams: + - package: fields_input_pkg + title: Field Logs from Input Package + description: Collect field logs using the referenced input package. diff --git a/test/manual_packages/required_inputs/with_field_bundling/docs/README.md b/test/manual_packages/required_inputs/with_field_bundling/docs/README.md new file mode 100644 index 0000000000..87332b2fa6 --- /dev/null +++ b/test/manual_packages/required_inputs/with_field_bundling/docs/README.md @@ -0,0 +1,3 @@ +# Integration With Field Bundling + +Integration package that requires an input package, used to test field bundling. diff --git a/test/manual_packages/required_inputs/with_field_bundling/manifest.yml b/test/manual_packages/required_inputs/with_field_bundling/manifest.yml new file mode 100644 index 0000000000..487ea2e57a --- /dev/null +++ b/test/manual_packages/required_inputs/with_field_bundling/manifest.yml @@ -0,0 +1,33 @@ +format_version: 3.6.0 +name: with_field_bundling +title: Integration With Field Bundling +description: >- + Integration package that requires an input package, used to test field bundling. + The input package defines additional fields (message, log.level) that are not + present in the integration's data stream and should be bundled in. +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: fields_input_pkg + version: "0.1.0" +policy_templates: + - name: field_logs + title: Field Logs + description: Collect logs via field bundling input package + data_streams: + - field_logs + inputs: + - package: fields_input_pkg + title: Collect logs via field bundling input package + description: Use the field bundling input package to collect logs +owner: + github: elastic/integrations + type: elastic diff --git a/test/manual_packages/required_inputs/with_input_package_requires/_dev/test/config.yml b/test/manual_packages/required_inputs/with_input_package_requires/_dev/test/config.yml new file mode 100644 index 0000000000..c4e73f3a8d --- /dev/null +++ b/test/manual_packages/required_inputs/with_input_package_requires/_dev/test/config.yml @@ -0,0 +1,3 @@ +requires: + - package: test_input_pkg + source: "../../test_input_pkg" diff --git a/test/manual_packages/required_inputs/with_input_package_requires/changelog.yml b/test/manual_packages/required_inputs/with_input_package_requires/changelog.yml new file mode 100644 index 0000000000..0f9966a2de --- /dev/null +++ b/test/manual_packages/required_inputs/with_input_package_requires/changelog.yml @@ -0,0 +1,5 @@ +- version: 0.1.0 + changes: + - description: Initial release. + type: enhancement + link: https://github.com/elastic/elastic-package/issues/3278 \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/agent/stream/stream.yml.hbs b/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..9e9c27a8c0 --- /dev/null +++ b/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/agent/stream/stream.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/fields/base-fields.yml new file mode 100644 index 0000000000..d3b0f5a163 --- /dev/null +++ b/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/manifest.yml b/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/manifest.yml new file mode 100644 index 0000000000..4e198906eb --- /dev/null +++ b/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/manifest.yml @@ -0,0 +1,19 @@ +title: Test Logs +type: logs +streams: + - package: test_input_pkg + title: Test Logs from Input Package + description: Collect test logs using the referenced input package. + - input: logs + title: Test Logs + description: Collect test logs using the logs input. + template_path: stream.yml.hbs + vars: + - name: paths + type: text + title: Paths + multi: true + required: true + show_user: true + default: + - /var/log/test/*.log \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_input_package_requires/docs/README.md b/test/manual_packages/required_inputs/with_input_package_requires/docs/README.md new file mode 100644 index 0000000000..22de7c28e4 --- /dev/null +++ b/test/manual_packages/required_inputs/with_input_package_requires/docs/README.md @@ -0,0 +1,4 @@ +# Integration With Required Input Package + +This is a test fixture integration package that demonstrates template bundling +from a required input package. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_input_package_requires/manifest.yml b/test/manual_packages/required_inputs/with_input_package_requires/manifest.yml new file mode 100644 index 0000000000..7a6ad33229 --- /dev/null +++ b/test/manual_packages/required_inputs/with_input_package_requires/manifest.yml @@ -0,0 +1,34 @@ +format_version: 3.6.0 +name: with_input_package_requires +title: Integration With Required Input Package +description: >- + Integration package that requires an input package, used to test template bundling. +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: test_input_pkg + version: "0.1.0" +policy_templates: + - name: test_logs + title: Test logs + description: Collect test logs + data_streams: + - test_logs + inputs: + - package: test_input_pkg + title: Collect test logs via input package + description: Use the test input package to collect logs + - type: logs + title: Collect test logs via logs input + description: Use the logs input to collect logs +owner: + github: elastic/integrations + type: elastic \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_linked_template_path/_dev/test/config.yml b/test/manual_packages/required_inputs/with_linked_template_path/_dev/test/config.yml new file mode 100644 index 0000000000..c4e73f3a8d --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/_dev/test/config.yml @@ -0,0 +1,3 @@ +requires: + - package: test_input_pkg + source: "../../test_input_pkg" diff --git a/test/manual_packages/required_inputs/with_linked_template_path/agent/input/_included/owned.hbs b/test/manual_packages/required_inputs/with_linked_template_path/agent/input/_included/owned.hbs new file mode 100644 index 0000000000..e291ebccc4 --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/agent/input/_included/owned.hbs @@ -0,0 +1 @@ +# integration-owned template for composable link test diff --git a/test/manual_packages/required_inputs/with_linked_template_path/agent/input/owned.hbs.link b/test/manual_packages/required_inputs/with_linked_template_path/agent/input/owned.hbs.link new file mode 100644 index 0000000000..6338b13242 --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/agent/input/owned.hbs.link @@ -0,0 +1 @@ +./_included/owned.hbs dbf30556543232c62e86aa8cecf1128fba9ae97cbee1fa5064d52f078ab51393 diff --git a/test/manual_packages/required_inputs/with_linked_template_path/changelog.yml b/test/manual_packages/required_inputs/with_linked_template_path/changelog.yml new file mode 100644 index 0000000000..c9785533f6 --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/changelog.yml @@ -0,0 +1,5 @@ +- version: 0.1.0 + changes: + - description: Manual fixture for composable build with linked policy template path. + type: enhancement + link: https://github.com/elastic/elastic-package/issues/3278 diff --git a/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/agent/stream/stream.yml.hbs b/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..9e9c27a8c0 --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/agent/stream/stream.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/fields/base-fields.yml new file mode 100644 index 0000000000..d3b0f5a163 --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/manifest.yml b/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/manifest.yml new file mode 100644 index 0000000000..4e198906eb --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/manifest.yml @@ -0,0 +1,19 @@ +title: Test Logs +type: logs +streams: + - package: test_input_pkg + title: Test Logs from Input Package + description: Collect test logs using the referenced input package. + - input: logs + title: Test Logs + description: Collect test logs using the logs input. + template_path: stream.yml.hbs + vars: + - name: paths + type: text + title: Paths + multi: true + required: true + show_user: true + default: + - /var/log/test/*.log \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_linked_template_path/docs/README.md b/test/manual_packages/required_inputs/with_linked_template_path/docs/README.md new file mode 100644 index 0000000000..1cc6fc4b61 --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/docs/README.md @@ -0,0 +1,5 @@ +# Integration With Linked Policy Template Path + +Manual fixture for composable integrations where `agent/input/owned.hbs` is produced +from `owned.hbs.link` at build time. The manifest uses `template_path: owned.hbs` +(the materialized filename), matching what Fleet expects after `elastic-package build`. diff --git a/test/manual_packages/required_inputs/with_linked_template_path/manifest.yml b/test/manual_packages/required_inputs/with_linked_template_path/manifest.yml new file mode 100644 index 0000000000..c6203d6c7b --- /dev/null +++ b/test/manual_packages/required_inputs/with_linked_template_path/manifest.yml @@ -0,0 +1,37 @@ +format_version: 3.6.0 +name: with_linked_template_path +title: Integration With Linked Policy Template Path +description: >- + Like with_input_package_requires, but the integration-owned policy input template + is provided via a .link file (agent/input/owned.hbs). The manifest must list + template_path: owned.hbs (materialized name), not owned.hbs.link. +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: test_input_pkg + version: "0.1.0" +policy_templates: + - name: test_logs + title: Test logs + description: Collect test logs + data_streams: + - test_logs + inputs: + - package: test_input_pkg + title: Collect test logs via input package + description: Use the test input package to collect logs + template_path: owned.hbs + - type: logs + title: Collect test logs via logs input + description: Use the logs input to collect logs +owner: + github: elastic/integrations + type: elastic diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_ds_merges/_dev/test/config.yml new file mode 100644 index 0000000000..bbb3460521 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/_dev/test/config.yml @@ -0,0 +1,3 @@ +requires: + - package: var_merging_input_pkg + source: "../../var_merging_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/changelog.yml b/test/manual_packages/required_inputs/with_merging_ds_merges/changelog.yml new file mode 100644 index 0000000000..fb3f5f7235 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/changelog.yml @@ -0,0 +1,5 @@ +- version: 0.1.0 + changes: + - description: Initial release. + type: enhancement + link: https://github.com/elastic/elastic-package/issues/1 \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/agent/stream/stream.yml.hbs b/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..9e9c27a8c0 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/agent/stream/stream.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/fields/base-fields.yml new file mode 100644 index 0000000000..d3b0f5a163 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/manifest.yml new file mode 100644 index 0000000000..589b1e1604 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/manifest.yml @@ -0,0 +1,13 @@ +title: Var Merging Logs +type: logs +streams: + - package: var_merging_input_pkg + title: Var Merging Logs + description: Collect logs using the var merging input package. + vars: + - name: encoding + title: Log Encoding Override + - name: custom_tag + type: text + title: Custom Tag + show_user: true \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/docs/README.md b/test/manual_packages/required_inputs/with_merging_ds_merges/docs/README.md new file mode 100644 index 0000000000..db7779e04e --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/docs/README.md @@ -0,0 +1,13 @@ +# Variable Merging - Data Stream Merges + +Test fixture: composable package with no policy template variable overrides. +The data stream manifest overrides the "encoding" variable from the input +package (providing a different title) and adds a new "custom_tag" variable. + +Expected result after merging: +- Input variables: (none) +- Data stream variables: + - paths (unchanged from input package) + - encoding (merged: base from input pkg, title overridden) + - timeout (unchanged from input package) + - custom_tag (new, from data stream manifest) \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/manifest.yml b/test/manual_packages/required_inputs/with_merging_ds_merges/manifest.yml new file mode 100644 index 0000000000..9ad6c0e1a8 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/manifest.yml @@ -0,0 +1,33 @@ +format_version: 3.6.0 +name: with_merging_ds_merges +title: Variable Merging - Data Stream Merges +description: >- + Composable package with no policy template variable overrides. The data stream + manifest overrides the "encoding" variable (changing its title) and introduces + a new "custom_tag" variable. All variables remain in the data stream list. +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: var_merging_input_pkg + version: "0.1.0" +policy_templates: + - name: var_merging_logs + title: Var Merging Logs + description: Collect logs via var merging input package + data_streams: + - var_merging_logs + inputs: + - package: var_merging_input_pkg + title: Collect logs via var merging input package + description: Use the var merging input package to collect logs +owner: + github: elastic/integrations + type: elastic \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_duplicate_error/_dev/test/config.yml new file mode 100644 index 0000000000..bbb3460521 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/_dev/test/config.yml @@ -0,0 +1,3 @@ +requires: + - package: var_merging_input_pkg + source: "../../var_merging_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/changelog.yml b/test/manual_packages/required_inputs/with_merging_duplicate_error/changelog.yml new file mode 100644 index 0000000000..fb3f5f7235 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/changelog.yml @@ -0,0 +1,5 @@ +- version: 0.1.0 + changes: + - description: Initial release. + type: enhancement + link: https://github.com/elastic/elastic-package/issues/1 \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/agent/stream/stream.yml.hbs b/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..9e9c27a8c0 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/agent/stream/stream.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/fields/base-fields.yml new file mode 100644 index 0000000000..d3b0f5a163 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/manifest.yml new file mode 100644 index 0000000000..f7b06783dd --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/manifest.yml @@ -0,0 +1,11 @@ +title: Var Merging Logs +type: logs +streams: + - package: var_merging_input_pkg + title: Var Merging Logs + description: Collect logs using the var merging input package. + vars: + - name: paths + title: First paths definition + - name: paths + title: Duplicate paths definition \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/docs/README.md b/test/manual_packages/required_inputs/with_merging_duplicate_error/docs/README.md new file mode 100644 index 0000000000..83f27f9389 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/docs/README.md @@ -0,0 +1,8 @@ +# Variable Merging - Duplicate Error + +Test fixture: composable package whose data stream manifest defines the "paths" +variable twice. The merging algorithm must detect this duplicate and return an +error (Step 5: fail if there are multiple variables with the same name). + +Expected result: error indicating a duplicate variable name "paths" in the data +stream variable list. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/manifest.yml b/test/manual_packages/required_inputs/with_merging_duplicate_error/manifest.yml new file mode 100644 index 0000000000..64221efbaa --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/manifest.yml @@ -0,0 +1,33 @@ +format_version: 3.6.0 +name: with_merging_duplicate_error +title: Variable Merging - Duplicate Error +description: >- + Composable package whose data stream manifest defines the "paths" variable + twice. This should cause the variable merging step to fail with a duplicate + variable name error. +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: var_merging_input_pkg + version: "0.1.0" +policy_templates: + - name: var_merging_logs + title: Var Merging Logs + description: Collect logs via var merging input package + data_streams: + - var_merging_logs + inputs: + - package: var_merging_input_pkg + title: Collect logs via var merging input package + description: Use the var merging input package to collect logs +owner: + github: elastic/integrations + type: elastic \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_full/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_full/_dev/test/config.yml new file mode 100644 index 0000000000..bbb3460521 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_full/_dev/test/config.yml @@ -0,0 +1,3 @@ +requires: + - package: var_merging_input_pkg + source: "../../var_merging_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_full/changelog.yml b/test/manual_packages/required_inputs/with_merging_full/changelog.yml new file mode 100644 index 0000000000..fb3f5f7235 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_full/changelog.yml @@ -0,0 +1,5 @@ +- version: 0.1.0 + changes: + - description: Initial release. + type: enhancement + link: https://github.com/elastic/elastic-package/issues/1 \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/agent/stream/stream.yml.hbs b/test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..9e9c27a8c0 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/agent/stream/stream.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/fields/base-fields.yml new file mode 100644 index 0000000000..d3b0f5a163 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/manifest.yml new file mode 100644 index 0000000000..d3a6f017b0 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/manifest.yml @@ -0,0 +1,13 @@ +title: Var Merging Logs +type: logs +streams: + - package: var_merging_input_pkg + title: Var Merging Logs + description: Collect logs using the var merging input package. + vars: + - name: timeout + description: Timeout for log collection. + - name: custom_tag + type: text + title: Custom Tag + show_user: true \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_full/docs/README.md b/test/manual_packages/required_inputs/with_merging_full/docs/README.md new file mode 100644 index 0000000000..2900be833f --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_full/docs/README.md @@ -0,0 +1,20 @@ +# Variable Merging - Full Mix + +Test fixture: composable package that exercises all five variable merging steps +from SPEC.md simultaneously. + +Policy template input vars (Step 2 → Step 3 promotion): +- "paths" override with new default → promoted to input variable +- "encoding" override with show_user:true → promoted to input variable + +Data stream manifest vars (Step 4 merge): +- "timeout" override with new description → merged with remaining DS variable +- "custom_tag" new variable → added to DS variables + +Expected result after merging: +- Input variables: + - paths (merged: base from input pkg, default overridden to /var/log/custom/*.log) + - encoding (merged: base from input pkg, show_user overridden to true) +- Data stream variables: + - timeout (merged: base from input pkg, description overridden) + - custom_tag (new, from data stream manifest) \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_full/manifest.yml b/test/manual_packages/required_inputs/with_merging_full/manifest.yml new file mode 100644 index 0000000000..6b617324f5 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_full/manifest.yml @@ -0,0 +1,40 @@ +format_version: 3.6.0 +name: with_merging_full +title: Variable Merging - Full Mix +description: >- + Composable package exercising all variable merging steps. The policy template + overrides "paths" and "encoding" (both promoted to input variables). The data + stream manifest overrides "timeout" (merged with the remaining data stream + variable) and adds "custom_tag" (new data stream variable). +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: var_merging_input_pkg + version: "0.1.0" +policy_templates: + - name: var_merging_logs + title: Var Merging Logs + description: Collect logs via var merging input package + data_streams: + - var_merging_logs + inputs: + - package: var_merging_input_pkg + title: Collect logs via var merging input package + description: Use the var merging input package to collect logs + vars: + - name: paths + default: + - /var/log/custom/*.log + - name: encoding + show_user: true +owner: + github: elastic/integrations + type: elastic \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_no_override/_dev/test/config.yml new file mode 100644 index 0000000000..bbb3460521 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_no_override/_dev/test/config.yml @@ -0,0 +1,3 @@ +requires: + - package: var_merging_input_pkg + source: "../../var_merging_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/changelog.yml b/test/manual_packages/required_inputs/with_merging_no_override/changelog.yml new file mode 100644 index 0000000000..fb3f5f7235 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_no_override/changelog.yml @@ -0,0 +1,5 @@ +- version: 0.1.0 + changes: + - description: Initial release. + type: enhancement + link: https://github.com/elastic/elastic-package/issues/1 \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/agent/stream/stream.yml.hbs b/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..9e9c27a8c0 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/agent/stream/stream.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/fields/base-fields.yml new file mode 100644 index 0000000000..d3b0f5a163 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/manifest.yml new file mode 100644 index 0000000000..2026cd129c --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/manifest.yml @@ -0,0 +1,6 @@ +title: Var Merging Logs +type: logs +streams: + - package: var_merging_input_pkg + title: Var Merging Logs + description: Collect logs using the var merging input package. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/docs/README.md b/test/manual_packages/required_inputs/with_merging_no_override/docs/README.md new file mode 100644 index 0000000000..cd0cb48e30 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_no_override/docs/README.md @@ -0,0 +1,5 @@ +# Variable Merging - No Override + +Test fixture: composable package with no variable overrides. All variables +defined in the input package policy template become data stream variables +unchanged. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/manifest.yml b/test/manual_packages/required_inputs/with_merging_no_override/manifest.yml new file mode 100644 index 0000000000..ebc0de82ac --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_no_override/manifest.yml @@ -0,0 +1,32 @@ +format_version: 3.6.0 +name: with_merging_no_override +title: Variable Merging - No Override +description: >- + Composable package with no variable overrides at the policy template or data + stream level. All input package vars remain as data stream variables. +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: var_merging_input_pkg + version: "0.1.0" +policy_templates: + - name: var_merging_logs + title: Var Merging Logs + description: Collect logs via var merging input package + data_streams: + - var_merging_logs + inputs: + - package: var_merging_input_pkg + title: Collect logs via var merging input package + description: Use the var merging input package to collect logs +owner: + github: elastic/integrations + type: elastic \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_promotes_to_input/_dev/test/config.yml new file mode 100644 index 0000000000..bbb3460521 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/_dev/test/config.yml @@ -0,0 +1,3 @@ +requires: + - package: var_merging_input_pkg + source: "../../var_merging_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/changelog.yml b/test/manual_packages/required_inputs/with_merging_promotes_to_input/changelog.yml new file mode 100644 index 0000000000..fb3f5f7235 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/changelog.yml @@ -0,0 +1,5 @@ +- version: 0.1.0 + changes: + - description: Initial release. + type: enhancement + link: https://github.com/elastic/elastic-package/issues/1 \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/agent/stream/stream.yml.hbs b/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..9e9c27a8c0 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/agent/stream/stream.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/fields/base-fields.yml new file mode 100644 index 0000000000..d3b0f5a163 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/manifest.yml new file mode 100644 index 0000000000..2026cd129c --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/manifest.yml @@ -0,0 +1,6 @@ +title: Var Merging Logs +type: logs +streams: + - package: var_merging_input_pkg + title: Var Merging Logs + description: Collect logs using the var merging input package. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/docs/README.md b/test/manual_packages/required_inputs/with_merging_promotes_to_input/docs/README.md new file mode 100644 index 0000000000..cead82918f --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/docs/README.md @@ -0,0 +1,11 @@ +# Variable Merging - Promotes to Input Var + +Test fixture: composable package whose policy template declares a "paths" +variable override. Because "paths" is also defined in the input package policy +template, it is promoted from a data stream variable to an input variable and +merged (input package definition is the base; the override here changes the +default path). + +Expected result after merging: +- Input variables: paths (merged, default overridden to /var/log/custom/*.log) +- Data stream variables: encoding, timeout \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/manifest.yml b/test/manual_packages/required_inputs/with_merging_promotes_to_input/manifest.yml new file mode 100644 index 0000000000..c02a62f75d --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/manifest.yml @@ -0,0 +1,38 @@ +format_version: 3.6.0 +name: with_merging_promotes_to_input +title: Variable Merging - Promotes to Input Var +description: >- + Composable package whose policy template overrides the "paths" variable from + the input package. This causes "paths" to be promoted from a data stream + variable to an input variable and merged. "encoding" and "timeout" remain as + data stream variables. +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: var_merging_input_pkg + version: "0.1.0" +policy_templates: + - name: var_merging_logs + title: Var Merging Logs + description: Collect logs via var merging input package + data_streams: + - var_merging_logs + inputs: + - package: var_merging_input_pkg + title: Collect logs via var merging input package + description: Use the var merging input package to collect logs + vars: + - name: paths + default: + - /var/log/custom/*.log +owner: + github: elastic/integrations + type: elastic \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/_dev/test/config.yml new file mode 100644 index 0000000000..e958a08627 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/_dev/test/config.yml @@ -0,0 +1,3 @@ +requires: + - package: var_merging_input_pkg + source: "../../var_merging_input_pkg" diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/changelog.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/changelog.yml new file mode 100644 index 0000000000..af392ba551 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/changelog.yml @@ -0,0 +1,5 @@ +- version: 0.1.0 + changes: + - description: Initial release. + type: enhancement + link: https://github.com/elastic/elastic-package/issues/1 diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/agent/stream/stream.yml.hbs b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..9390bc05cb --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/agent/stream/stream.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/fields/base-fields.yml new file mode 100644 index 0000000000..0d1791ffed --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/manifest.yml new file mode 100644 index 0000000000..d1f0fb147d --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/manifest.yml @@ -0,0 +1,6 @@ +title: Alpha logs +type: logs +streams: + - package: var_merging_input_pkg + title: Alpha logs via input package + description: Collect alpha logs using the var merging input package. diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/agent/stream/stream.yml.hbs b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/agent/stream/stream.yml.hbs new file mode 100644 index 0000000000..9390bc05cb --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/agent/stream/stream.yml.hbs @@ -0,0 +1,4 @@ +paths: +{{#each paths}} + - {{this}} +{{/each}} diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/fields/base-fields.yml new file mode 100644 index 0000000000..0d1791ffed --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/fields/base-fields.yml @@ -0,0 +1,12 @@ +- name: data_stream.type + type: constant_keyword + description: Data stream type. +- name: data_stream.dataset + type: constant_keyword + description: Data stream dataset. +- name: data_stream.namespace + type: constant_keyword + description: Data stream namespace. +- name: "@timestamp" + type: date + description: Event timestamp. diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/manifest.yml new file mode 100644 index 0000000000..85d68a6e89 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/manifest.yml @@ -0,0 +1,6 @@ +title: Beta logs +type: logs +streams: + - package: var_merging_input_pkg + title: Beta logs via input package + description: Collect beta logs using the var merging input package. diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/docs/README.md b/test/manual_packages/required_inputs/with_merging_two_policy_templates/docs/README.md new file mode 100644 index 0000000000..a969fc6a65 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/docs/README.md @@ -0,0 +1,5 @@ +# Variable Merging - Two Policy Templates + +Test fixture: promotion of input-package vars is scoped to the policy +template's `data_streams` list. One template promotes `paths`; the other +leaves all vars on the data stream. diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/manifest.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/manifest.yml new file mode 100644 index 0000000000..4b8b7a78e2 --- /dev/null +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/manifest.yml @@ -0,0 +1,52 @@ +format_version: 3.6.0 +name: with_merging_two_policy_templates +title: Variable Merging - Two Policy Templates Scoped Promotion +description: >- + Two policy templates share the same required input package: one promotes + "paths" to input-level vars for its data stream only; the other does not + promote any vars, so "paths" stays on the data stream. Exercises per-DS + promotion scoping when multiple templates reference the same input package. +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: var_merging_input_pkg + version: "0.1.0" +policy_templates: + - name: pt_alpha + title: Alpha logs (paths promoted) + description: Policy template that promotes paths to the composable input + data_streams: + - alpha_logs + inputs: + - package: var_merging_input_pkg + title: Collect via var merging input (alpha) + description: Alpha stream promotes paths to input-level vars + vars: + - name: paths + title: Alpha-only promoted paths title + - type: logs + title: Native logs input (alpha) + description: Fallback logs input for alpha + - name: pt_beta + title: Beta logs (no promotion) + description: Policy template with no composable var overrides + data_streams: + - beta_logs + inputs: + - package: var_merging_input_pkg + title: Collect via var merging input (beta) + description: Beta stream keeps all vars at data-stream level + - type: logs + title: Native logs input (beta) + description: Fallback logs input for beta +owner: + github: elastic/integrations + type: elastic From 1f81dbeda4a2a680bd45f0f64e6ec9a4d1902411 Mon Sep 17 00:00:00 2001 From: Tere Date: Mon, 13 Apr 2026 16:55:50 +0200 Subject: [PATCH 07/28] docs: composable integrations, dependencies, and local registry Document requires.input build behavior and how to use a local or custom EPR during development. Made-with: Cursor --- README.md | 6 + docs/howto/dependency_management.md | 65 ++++++++++- docs/howto/local_package_registry.md | 158 +++++++++++++++++++++++++++ tools/readme/readme.md.tmpl | 4 + 4 files changed, 230 insertions(+), 3 deletions(-) create mode 100644 docs/howto/local_package_registry.md diff --git a/README.md b/README.md index fb7e44fd17..5ab8d3988a 100644 --- a/README.md +++ b/README.md @@ -238,6 +238,8 @@ Built packages are served up by the Elastic Package Registry running locally (se Built packages can also be published to the global package registry service. +When the package declares required input packages ("requires.input" in manifest.yml), the build downloads those input packages from the configured package registry (see "package_registry.base_url" in ~/.elastic-package/config.yml). The build then incorporates their policy and data stream templates, merges variable definitions into the integration manifest, bundles data stream field definitions, and resolves package: references on inputs and streams to the effective input types expected by Fleet. For details on using a local or custom registry during development, see the [HOWTO guide](./docs/howto/local_package_registry.md). + For details on how to enable dependency management, see the [HOWTO guide](https://github.com/elastic/elastic-package/blob/main/docs/howto/dependency_management.md). ### `elastic-package changelog` @@ -821,6 +823,10 @@ There are available some environment variables that could be used to change some - `ELASTIC_PACKAGE_SIGNER_PRIVATE_KEYFILE`: Path to the private key file to sign packages. - `ELASTIC_PACKAGE_SIGNER_PASSPHRASE`: Passphrase to use the private key file. +- Related to verifying packages downloaded from the Package Registry (EPR): + - `ELASTIC_PACKAGE_VERIFY_PACKAGE_SIGNATURE`: If set to `true` (or `1`), `elastic-package` verifies detached OpenPGP signatures for package zips fetched from the registry (for example when resolving required input packages). The registry must serve a signature at the same path as the zip with a `.sig` suffix (for example `/epr/apache/apache-1.0.0.zip.sig`). Leave this unset or `false` for local or unsigned registries. + - `ELASTIC_PACKAGE_VERIFIER_PUBLIC_KEYFILE`: Path to an armored **public** key matching the key that signed packages on that registry. Required when `ELASTIC_PACKAGE_VERIFY_PACKAGE_SIGNATURE` is enabled; the file must exist before the download runs. + - Related to tests: - `ELASTIC_PACKAGE_SERVERLESS_PIPELINE_TEST_DISABLE_COMPARE_RESULTS`: If set to `true`, the results from pipeline tests are not compared to avoid errors from GeoIP. - `ELASTIC_PACKAGE_DISABLE_ELASTIC_AGENT_WOLFI`: If set to `true`, the Elastic Agent image used for running agents will be using the Ubuntu docker images diff --git a/docs/howto/dependency_management.md b/docs/howto/dependency_management.md index 16d96f97b7..83bc76b4d0 100644 --- a/docs/howto/dependency_management.md +++ b/docs/howto/dependency_management.md @@ -11,10 +11,22 @@ which field definition was correct, maintenance and typo correction process was The described situation brought us to a point in time when a simple dependency management was a requirement to maintain all used fields, especially ones imported from external sources. +Elastic Packages support two kinds of build-time dependency: + +- **Field dependencies** — import field definitions from external schemas (e.g. ECS) using + `_dev/build/build.yml`. Resolved from Git references and cached locally. +- **Package dependencies** — composable (integration) packages can depend on input and content packages + declared under `requires` in `manifest.yml`. **Input package** dependencies are resolved + at build time by downloading from the package registry. **Content package** dependencies are + resolved at runtime by Fleet. + +Both are described in the sections below. + ## Principles of operation -Currently Elastic Packages support build-time dependencies that can be used as external field sources. They use a flat -dependency model represented with an additional build manifest, stored in an optional YAML file - `_dev/build/build.yml`: +Currently Elastic Packages support build-time field dependencies that can be used as external +field sources. They use a flat dependency model represented with an additional build manifest, +stored in an optional YAML file - `_dev/build/build.yml`: ```yaml dependencies: @@ -83,4 +95,51 @@ and use a following field definition: ```yaml - name: event.category external: ecs -``` \ No newline at end of file +``` + +## Composable packages and the package registry + +Composable (integration) packages can also depend on input or content packages by declaring them under +`requires` in `manifest.yml`. Depending on the package type, dependencies are resolved +differently: **input package** dependencies are fetched at build time; **content package** +dependencies are resolved at runtime by Fleet. + +```yaml +requires: + input: + - package: sql_input + version: "0.2.0" +``` + +This type of dependency is resolved at **build time** by downloading the required input package +from the **package registry**. During `elastic-package build`, elastic-package fetches those +packages and updates the built integration: it bundles agent templates (policy and data stream), +merges variable definitions from the input packages into the composable manifest, adds data +stream field definitions where configured, and rewrites `package:` references on inputs and +streams to the concrete input types Fleet needs. Fleet still merges policy-specific values at +policy creation time. + +Unlike field-level dependencies (which are resolved from Git references and cached locally), +package dependencies are fetched from the configured package registry URL +(`package_registry.base_url` in `~/.elastic-package/config.yml`, defaulting to +`https://epr.elastic.co`). + +For details on using a local or custom registry when the required input packages are still +under development, see [HOWTO: Use a local or custom package registry](./local_package_registry.md). + +### Linked files (`*.link`) and `template_path` + +Some repositories share agent templates using **link files** (files ending in `.link` that +point at shared content). During `elastic-package build`, linked content is copied into the +build output under the **target** path (the link filename without the `.link` suffix). + +Composable bundling (`requires.input`) runs **after** linked files are materialized in the +build directory. In `manifest.yml`, always set `template_path` / `template_paths` to those +**materialized** names (for example `owned.hbs`), **not** the stub name (`owned.hbs.link`). +Fleet and the builder resolve templates by the names declared in the manifest; the `.link` +file exists only in the source tree. + +A small manual fixture that combines `requires.input` with a linked policy input template +lives under `test/manual_packages/required_inputs/with_linked_template_path/`. Automated +coverage is in `TestBundleInputPackageTemplates_PreservesLinkedTemplateTargetPath` in +`internal/requiredinputs/requiredinputs_test.go`. \ No newline at end of file diff --git a/docs/howto/local_package_registry.md b/docs/howto/local_package_registry.md new file mode 100644 index 0000000000..3eb9f76302 --- /dev/null +++ b/docs/howto/local_package_registry.md @@ -0,0 +1,158 @@ +# HOWTO: Use a local or custom package registry for composable integrations + +## Overview + +Composable (integration) packages can declare required input packages in their `manifest.yml` +under `requires.input`. When you run `elastic-package build` or `elastic-package install`, +elastic-package resolves those dependencies by downloading them from the **package registry**. +By default it uses the production registry at `https://epr.elastic.co`. + +This guide explains how to point elastic-package at a local or custom registry, which is +useful when the required input packages are still under development and not yet published to +the production registry. + +For field-level build-time dependencies (ECS, `_dev/build/build.yml`), see +[HOWTO: Enable dependency management](./dependency_management.md). + +## Prerequisites + +- An integration package that declares `requires.input` in its `manifest.yml`, for example: + +```yaml +requires: + input: + - package: sql_input + version: "0.2.0" +``` + +- Optionally, a running local package registry that serves the required input packages. + +## Option 1: Use the built-in stack registry (recommended) + +`elastic-package stack up` (with the default compose provider) automatically starts a local +package registry container. The container runs in **proxy mode**: it serves packages found in +the repository's `build/packages/` directory and proxies all other package requests to the +production registry at `https://epr.elastic.co` (or to a custom upstream if configured). + +`elastic-package` discovers `build/packages/` by walking up from the current working +directory to the repository root, so you can run `elastic-package stack up` from anywhere +inside the repository. + +```shell +# 1. Build the required input package — this places the built package under build/packages/ +# at the repository root. +cd /path/to/sql_input +elastic-package build + +# 2. Start the Elastic Stack from anywhere inside the repository. +# The bundled registry picks up build/packages/ from the repository root. +elastic-package stack up -v -d +``` + +Then configure `~/.elastic-package/config.yml` to use the stack's local registry for +`elastic-package build`, `elastic-package test`, `elastic-package benchmark`, and +`elastic-package status`: + +```yaml +package_registry: + base_url: http://localhost:8080 +``` + +This setting defaults to `https://epr.elastic.co` when not set. + +> **Note:** This setting does not change the package registry container that the Elastic Stack +> itself uses (served by `elastic-package stack`). To also redirect the stack's proxy target, +> see [Option 2](#option-2-configure-the-registry-url-per-profile) below. + +### Alternative: standalone package registry container + +If you are not running `elastic-package stack`, you can start a standalone registry container. +Use a port other than `8080` to avoid conflicting with the stack's built-in registry: + +```shell +# Build your input package first +cd /path/to/sql_input +elastic-package build + +# Start a standalone registry on port 8081, mounting the build/packages/ directory +# at the repository root (run from anywhere inside the repo, or adjust the path). +docker run --rm -p 8081:8080 \ + -v "$(git -C /path/to/repo rev-parse --show-toplevel)/build/packages":/packages/package-registry \ + docker.elastic.co/package-registry/package-registry:v1.37.0 +``` + +> **Note:** The mounted directory must contain at least one valid package (a `.zip` file or an +> extracted package directory). If the directory is empty, the registry exits immediately with +> `No local packages found.` +> +> **Note:** The registry image tag above matches `PackageRegistryBaseImage` in +> [`internal/stack/versions.go`](../../internal/stack/versions.go); that constant is what +> `elastic-package stack` uses and is updated by automation, while this document is not — +> check there when upgrading. + +Then point `package_registry.base_url` at `http://localhost:8081` and run +`elastic-package build` from your integration package directory. + +## Option 2: Configure the registry URL per profile + +Use this option when you want both the **build tools** and the **stack's Fleet** to use the +same custom or standalone registry — for example, a registry serving packages not yet +published to production. + +Assume your custom registry is running on the host at port `8082`. Configure the active +profile (e.g. `~/.elastic-package/profiles/default/config.yml`): + +```yaml +# The stack's package registry container will proxy non-local requests to this URL. +# Use host.docker.internal so the container can reach the host. +stack.epr.proxy_to: http://host.docker.internal:8082 + +# elastic-package install (and stack commands) will use this URL to contact the registry. +stack.epr.base_url: http://localhost:8082 +``` + +To also cover `elastic-package build`, `elastic-package test`, `elastic-package benchmark`, +and `elastic-package status` (which do not read profile settings), add the global setting: + +```yaml +# ~/.elastic-package/config.yml +package_registry: + base_url: http://localhost:8082 +``` + +### URL resolution reference + +**For `elastic-package build`, `test`, `benchmark`, `status`** (global config only): + +| Priority | Setting | +| -------- | ------- | +| 1 | `package_registry.base_url` in `~/.elastic-package/config.yml` | +| 2 | `https://epr.elastic.co` (production fallback) | + +**For `elastic-package install` and stack commands** (profile takes precedence): + +| Priority | Setting | +| -------- | ------- | +| 1 | `stack.epr.base_url` in the active profile `config.yml` | +| 2 | `package_registry.base_url` in `~/.elastic-package/config.yml` | +| 3 | `https://epr.elastic.co` (production fallback) | + +**For the stack registry's proxy target** (`EPR_PROXY_TO` inside the container): + +| Priority | Setting | +| -------- | ------- | +| 1 | `stack.epr.proxy_to` in the active profile `config.yml` | +| 2 | `stack.epr.base_url` in the active profile `config.yml` | +| 3 | `package_registry.base_url` in `~/.elastic-package/config.yml` | +| 4 | `https://epr.elastic.co` (production fallback) | + +For more details on profiles, see the +[Elastic Package profiles section of the README](../../README.md#elastic-package-profiles). + +## Summary + +| Goal | Configuration | +| ---- | ------------- | +| Override registry for `build` / `test` / `benchmark` / `status` | `package_registry.base_url` in `~/.elastic-package/config.yml` | +| Override registry for `install` and stack commands | `stack.epr.base_url` in the active profile `config.yml` | +| Override proxy target for the stack's registry container | `stack.epr.proxy_to` in the active profile `config.yml` | diff --git a/tools/readme/readme.md.tmpl b/tools/readme/readme.md.tmpl index 984f9112e2..94b2bc1384 100644 --- a/tools/readme/readme.md.tmpl +++ b/tools/readme/readme.md.tmpl @@ -279,6 +279,10 @@ There are available some environment variables that could be used to change some - `ELASTIC_PACKAGE_SIGNER_PRIVATE_KEYFILE`: Path to the private key file to sign packages. - `ELASTIC_PACKAGE_SIGNER_PASSPHRASE`: Passphrase to use the private key file. +- Related to verifying packages downloaded from the Package Registry (EPR): + - `ELASTIC_PACKAGE_VERIFY_PACKAGE_SIGNATURE`: If set to `true` (or `1`), `elastic-package` verifies detached OpenPGP signatures for package zips fetched from the registry (for example when resolving required input packages). The registry must serve a signature at the same path as the zip with a `.sig` suffix (for example `/epr/apache/apache-1.0.0.zip.sig`). Leave this unset or `false` for local or unsigned registries. + - `ELASTIC_PACKAGE_VERIFIER_PUBLIC_KEYFILE`: Path to an armored **public** key matching the key that signed packages on that registry. Required when `ELASTIC_PACKAGE_VERIFY_PACKAGE_SIGNATURE` is enabled; the file must exist before the download runs. + - Related to tests: - `ELASTIC_PACKAGE_SERVERLESS_PIPELINE_TEST_DISABLE_COMPARE_RESULTS`: If set to `true`, the results from pipeline tests are not compared to avoid errors from GeoIP. - `ELASTIC_PACKAGE_DISABLE_ELASTIC_AGENT_WOLFI`: If set to `true`, the Elastic Agent image used for running agents will be using the Ubuntu docker images From 0dd8203db589237aaddb1222ba7eb11d4519cbfb Mon Sep 17 00:00:00 2001 From: Tere Date: Mon, 13 Apr 2026 17:29:57 +0200 Subject: [PATCH 08/28] fix(requiredinputs): satisfy golangci-lint Handle closeFn errors in defer, trim always-nil error returns from merge helpers, add gocognit nolint for mergeVariables, and fix test assignments. Made-with: Cursor --- internal/requiredinputs/fields.go | 2 +- internal/requiredinputs/fields_test.go | 4 +-- internal/requiredinputs/streamdefs.go | 2 +- internal/requiredinputs/streamdefs_test.go | 15 ++++---- internal/requiredinputs/variables.go | 41 ++++++++-------------- internal/requiredinputs/variables_test.go | 39 +++++++------------- 6 files changed, 38 insertions(+), 65 deletions(-) diff --git a/internal/requiredinputs/fields.go b/internal/requiredinputs/fields.go index 9532181303..d8eb9c920a 100644 --- a/internal/requiredinputs/fields.go +++ b/internal/requiredinputs/fields.go @@ -70,7 +70,7 @@ func (r *RequiredInputsResolver) mergeInputPkgFields(dsRootDir, inputPkgPath, in if err != nil { return fmt.Errorf("opening package %q: %w", inputPkgPath, err) } - defer closeFn() + defer func() { _ = closeFn() }() inputFieldFiles, err := fs.Glob(inputPkgFS, "fields/*.yml") if err != nil { diff --git a/internal/requiredinputs/fields_test.go b/internal/requiredinputs/fields_test.go index f9e3bbbd63..971ac72240 100644 --- a/internal/requiredinputs/fields_test.go +++ b/internal/requiredinputs/fields_test.go @@ -140,9 +140,9 @@ func TestBundleDataStreamFields_PartialOverlap(t *testing.T) { // "log.level". After bundling, only "message" and "log.level" should appear // in the generated file. buildPackageRoot := copyFixturePackage(t, "with_field_bundling") - resolver := NewRequiredInputsResolver(makeFakeEprForFieldBundling(t)) + resolver := NewRequiredInputsResolver(makeFakeEprForFieldBundling(t)) - err := resolver.Bundle(buildPackageRoot) + require.NoError(t, resolver.Bundle(buildPackageRoot)) bundledPath := filepath.Join(buildPackageRoot, "data_stream", "field_logs", "fields", "fields_input_pkg-fields.yml") data, err := os.ReadFile(bundledPath) diff --git a/internal/requiredinputs/streamdefs.go b/internal/requiredinputs/streamdefs.go index 1183d4d09c..a6eaabd12b 100644 --- a/internal/requiredinputs/streamdefs.go +++ b/internal/requiredinputs/streamdefs.go @@ -159,7 +159,7 @@ func loadInputPkgInfo(pkgPath string) (inputPkgInfo, error) { if err != nil { return inputPkgInfo{}, fmt.Errorf("opening package: %w", err) } - defer closeFn() + defer func() { _ = closeFn() }() manifestBytes, err := fs.ReadFile(pkgFS, packages.PackageManifestFile) if err != nil { diff --git a/internal/requiredinputs/streamdefs_test.go b/internal/requiredinputs/streamdefs_test.go index 03e7a314f2..f949a1a4ec 100644 --- a/internal/requiredinputs/streamdefs_test.go +++ b/internal/requiredinputs/streamdefs_test.go @@ -112,7 +112,7 @@ policy_templates: }, } resolver := NewRequiredInputsResolver(epr) - err := resolver.Bundle(buildRoot) + require.NoError(t, resolver.Bundle(buildRoot)) manifestBytes, err := os.ReadFile(filepath.Join(buildRoot, "manifest.yml")) require.NoError(t, err) @@ -167,7 +167,7 @@ policy_templates: }, } resolver := NewRequiredInputsResolver(epr) - err := resolver.Bundle(buildRoot) + require.NoError(t, resolver.Bundle(buildRoot)) manifestBytes, err := os.ReadFile(filepath.Join(buildRoot, "manifest.yml")) require.NoError(t, err) @@ -219,7 +219,7 @@ policy_templates: }, } resolver := NewRequiredInputsResolver(epr) - err := resolver.Bundle(buildRoot) + require.NoError(t, resolver.Bundle(buildRoot)) manifestBytes, err := os.ReadFile(filepath.Join(buildRoot, "manifest.yml")) require.NoError(t, err) @@ -272,7 +272,7 @@ policy_templates: }, } resolver := NewRequiredInputsResolver(epr) - err := resolver.Bundle(buildRoot) + require.NoError(t, resolver.Bundle(buildRoot)) manifestBytes, err := os.ReadFile(filepath.Join(buildRoot, "manifest.yml")) require.NoError(t, err) @@ -341,7 +341,7 @@ streams: }, } resolver := NewRequiredInputsResolver(epr) - err := resolver.Bundle(buildRoot) + require.NoError(t, resolver.Bundle(buildRoot)) dsManifestBytes, err := os.ReadFile(filepath.Join(dsDir, "manifest.yml")) require.NoError(t, err) dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) @@ -410,8 +410,7 @@ streams: }, } resolver := NewRequiredInputsResolver(epr) - err := resolver.Bundle(buildRoot) - require.NoError(t, err) + require.NoError(t, resolver.Bundle(buildRoot)) dsManifestBytes, err := os.ReadFile(filepath.Join(dsDir, "manifest.yml")) require.NoError(t, err) @@ -432,7 +431,7 @@ streams: func TestResolveStreamInputTypes_FieldBundlingFixture(t *testing.T) { buildPackageRoot := copyFixturePackage(t, "with_field_bundling") resolver := NewRequiredInputsResolver(makeFakeEprForFieldBundling(t)) - err := resolver.Bundle(buildPackageRoot) + require.NoError(t, resolver.Bundle(buildPackageRoot)) // Check main manifest: package: fields_input_pkg → type: logfile manifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "manifest.yml")) diff --git a/internal/requiredinputs/variables.go b/internal/requiredinputs/variables.go index c7561c068c..129e378c50 100644 --- a/internal/requiredinputs/variables.go +++ b/internal/requiredinputs/variables.go @@ -35,6 +35,8 @@ type pkgDsKey struct { // Data-stream-level vars: all remaining (non-promoted) base vars are placed at // the data-stream level, merged with any stream-level overrides the composable // package declares. +// +//nolint:gocognit // multi-step merge pipeline (promotion, DS manifests, policy templates) func (r *RequiredInputsResolver) mergeVariables( manifest *packages.PackageManifest, inputPkgPaths map[string]string, @@ -120,10 +122,7 @@ func (r *RequiredInputsResolver) mergeVariables( return fmt.Errorf("getting input node at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) } - mergedSeq, err := mergeInputLevelVarNodes(baseVarOrder, baseVarByName, promotedOverrides) - if err != nil { - return fmt.Errorf("merging input-level vars for pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) - } + mergedSeq := mergeInputLevelVarNodes(baseVarOrder, baseVarByName, promotedOverrides) if len(mergedSeq.Content) > 0 { upsertKey(inputNode, "vars", mergedSeq) @@ -206,10 +205,7 @@ func (r *RequiredInputsResolver) mergeVariables( return fmt.Errorf("duplicate vars in data stream manifest %q: %w", manifestPath, err) } - mergedSeq, err := mergeStreamLevelVarNodes(baseVarOrder, baseVarByName, promotedNames, dsOverrideNodes) - if err != nil { - return fmt.Errorf("merging stream-level vars in %q: %w", manifestPath, err) - } + mergedSeq := mergeStreamLevelVarNodes(baseVarOrder, baseVarByName, promotedNames, dsOverrideNodes) if len(mergedSeq.Content) > 0 { upsertKey(streamNode, "vars", mergedSeq) @@ -239,7 +235,7 @@ func loadInputPkgVarNodes(pkgPath string) ([]string, map[string]*yaml.Node, erro if err != nil { return nil, nil, fmt.Errorf("opening package: %w", err) } - defer closeFn() + defer func() { _ = closeFn() }() manifestBytes, err := fs.ReadFile(pkgFS, packages.PackageManifestFile) if err != nil { @@ -301,20 +297,17 @@ func mergeInputLevelVarNodes( baseVarOrder []string, baseVarByName map[string]*yaml.Node, promotedOverrides map[string]*yaml.Node, -) (*yaml.Node, error) { +) *yaml.Node { seqNode := &yaml.Node{Kind: yaml.SequenceNode} for _, varName := range baseVarOrder { overrideNode, promoted := promotedOverrides[varName] if !promoted { continue } - merged, err := mergeVarNode(baseVarByName[varName], overrideNode) - if err != nil { - return nil, fmt.Errorf("merging var %q: %w", varName, err) - } + merged := mergeVarNode(baseVarByName[varName], overrideNode) seqNode.Content = append(seqNode.Content, merged) } - return seqNode, nil + return seqNode } // mergeStreamLevelVarNodes returns a sequence node containing: @@ -327,7 +320,7 @@ func mergeStreamLevelVarNodes( baseVarByName map[string]*yaml.Node, promotedNames map[string]bool, dsOverrides []*yaml.Node, -) (*yaml.Node, error) { +) *yaml.Node { dsOverrideByName := make(map[string]*yaml.Node, len(dsOverrides)) for _, v := range dsOverrides { dsOverrideByName[varNodeName(v)] = v @@ -342,18 +335,12 @@ func mergeStreamLevelVarNodes( } baseNode := baseVarByName[varName] overrideNode, hasOverride := dsOverrideByName[varName] - var ( - merged *yaml.Node - merr error - ) + var merged *yaml.Node if hasOverride { - merged, merr = mergeVarNode(baseNode, overrideNode) + merged = mergeVarNode(baseNode, overrideNode) } else { merged = cloneNode(baseNode) } - if merr != nil { - return nil, fmt.Errorf("merging var %q: %w", varName, merr) - } seqNode.Content = append(seqNode.Content, merged) } @@ -364,13 +351,13 @@ func mergeStreamLevelVarNodes( } } - return seqNode, nil + return seqNode } // mergeVarNode merges fields from overrideNode into a clone of baseNode. // All keys in override win; absent keys in override are inherited from base. // The "name" key is always preserved from base. -func mergeVarNode(base, override *yaml.Node) (*yaml.Node, error) { +func mergeVarNode(base, override *yaml.Node) *yaml.Node { result := cloneNode(base) for i := 0; i+1 < len(override.Content); i += 2 { keyNode := override.Content[i] @@ -380,7 +367,7 @@ func mergeVarNode(base, override *yaml.Node) (*yaml.Node, error) { } upsertKey(result, keyNode.Value, cloneNode(valNode)) } - return result, nil + return result } // checkDuplicateVarNodes returns an error if any var name appears more than diff --git a/internal/requiredinputs/variables_test.go b/internal/requiredinputs/variables_test.go index 59de66f842..c30af1aa3b 100644 --- a/internal/requiredinputs/variables_test.go +++ b/internal/requiredinputs/variables_test.go @@ -73,8 +73,7 @@ func TestMergeVarNode(t *testing.T) { t.Run("full override", func(t *testing.T) { override := varNode("paths", "type", "keyword", "title", "Custom Paths", "multi", "false") - merged, err := mergeVarNode(base, override) - require.NoError(t, err) + merged := mergeVarNode(base, override) assert.Equal(t, "paths", varNodeName(merged)) assert.Equal(t, "keyword", mappingValue(merged, "type").Value) assert.Equal(t, "Custom Paths", mappingValue(merged, "title").Value) @@ -83,8 +82,7 @@ func TestMergeVarNode(t *testing.T) { t.Run("partial override", func(t *testing.T) { override := varNode("paths", "title", "My Paths") - merged, err := mergeVarNode(base, override) - require.NoError(t, err) + merged := mergeVarNode(base, override) assert.Equal(t, "paths", varNodeName(merged)) assert.Equal(t, "text", mappingValue(merged, "type").Value) // from base assert.Equal(t, "My Paths", mappingValue(merged, "title").Value) @@ -93,8 +91,7 @@ func TestMergeVarNode(t *testing.T) { t.Run("empty override", func(t *testing.T) { override := varNode("paths") - merged, err := mergeVarNode(base, override) - require.NoError(t, err) + merged := mergeVarNode(base, override) assert.Equal(t, "paths", varNodeName(merged)) assert.Equal(t, "text", mappingValue(merged, "type").Value) // from base assert.Equal(t, "Paths", mappingValue(merged, "title").Value) // from base @@ -105,15 +102,13 @@ func TestMergeVarNode(t *testing.T) { override := &yaml.Node{Kind: yaml.MappingNode} upsertKey(override, "name", &yaml.Node{Kind: yaml.ScalarNode, Value: "should-be-ignored"}) upsertKey(override, "type", &yaml.Node{Kind: yaml.ScalarNode, Value: "keyword"}) - merged, err := mergeVarNode(base, override) - require.NoError(t, err) + merged := mergeVarNode(base, override) assert.Equal(t, "paths", varNodeName(merged)) }) t.Run("adds new field from override", func(t *testing.T) { override := varNode("paths", "description", "My description") - merged, err := mergeVarNode(base, override) - require.NoError(t, err) + merged := mergeVarNode(base, override) assert.Equal(t, "My description", mappingValue(merged, "description").Value) assert.Equal(t, "text", mappingValue(merged, "type").Value) // base preserved }) @@ -157,8 +152,7 @@ func TestMergeInputLevelVarNodes(t *testing.T) { } t.Run("empty promoted → empty sequence", func(t *testing.T) { - seq, err := mergeInputLevelVarNodes(baseOrder, baseByName, map[string]*yaml.Node{}) - require.NoError(t, err) + seq := mergeInputLevelVarNodes(baseOrder, baseByName, map[string]*yaml.Node{}) assert.Empty(t, seq.Content) }) @@ -166,8 +160,7 @@ func TestMergeInputLevelVarNodes(t *testing.T) { promotedOverrides := map[string]*yaml.Node{ "paths": varNode("paths", "default", "/var/log/custom/*.log"), } - seq, err := mergeInputLevelVarNodes(baseOrder, baseByName, promotedOverrides) - require.NoError(t, err) + seq := mergeInputLevelVarNodes(baseOrder, baseByName, promotedOverrides) require.Len(t, seq.Content, 1) assert.Equal(t, "paths", varNodeName(seq.Content[0])) assert.Equal(t, "/var/log/custom/*.log", mappingValue(seq.Content[0], "default").Value) @@ -179,8 +172,7 @@ func TestMergeInputLevelVarNodes(t *testing.T) { "timeout": varNode("timeout", "default", "60s"), "encoding": varNode("encoding", "show_user", "true"), } - seq, err := mergeInputLevelVarNodes(baseOrder, baseByName, promotedOverrides) - require.NoError(t, err) + seq := mergeInputLevelVarNodes(baseOrder, baseByName, promotedOverrides) require.Len(t, seq.Content, 2) // Order must follow baseOrder: encoding before timeout. assert.Equal(t, "encoding", varNodeName(seq.Content[0])) @@ -207,8 +199,7 @@ func TestMergeStreamLevelVarNodes(t *testing.T) { } t.Run("no promoted, no overrides → all base vars", func(t *testing.T) { - seq, err := mergeStreamLevelVarNodes(baseOrder, baseByName, nil, nil) - require.NoError(t, err) + seq := mergeStreamLevelVarNodes(baseOrder, baseByName, nil, nil) require.Len(t, seq.Content, 3) assert.Equal(t, "paths", varNodeName(seq.Content[0])) assert.Equal(t, "encoding", varNodeName(seq.Content[1])) @@ -217,16 +208,14 @@ func TestMergeStreamLevelVarNodes(t *testing.T) { t.Run("some promoted → promoted excluded", func(t *testing.T) { promoted := map[string]bool{"paths": true, "encoding": true} - seq, err := mergeStreamLevelVarNodes(baseOrder, baseByName, promoted, nil) - require.NoError(t, err) + seq := mergeStreamLevelVarNodes(baseOrder, baseByName, promoted, nil) require.Len(t, seq.Content, 1) assert.Equal(t, "timeout", varNodeName(seq.Content[0])) }) t.Run("DS override on existing base var", func(t *testing.T) { dsOverrides := []*yaml.Node{varNode("encoding", "show_user", "true")} - seq, err := mergeStreamLevelVarNodes(baseOrder, baseByName, nil, dsOverrides) - require.NoError(t, err) + seq := mergeStreamLevelVarNodes(baseOrder, baseByName, nil, dsOverrides) require.Len(t, seq.Content, 3) // encoding is merged encodingMerged := seq.Content[1] @@ -237,8 +226,7 @@ func TestMergeStreamLevelVarNodes(t *testing.T) { t.Run("novel DS var appended", func(t *testing.T) { dsOverrides := []*yaml.Node{varNode("custom_tag", "type", "text")} - seq, err := mergeStreamLevelVarNodes(baseOrder, baseByName, nil, dsOverrides) - require.NoError(t, err) + seq := mergeStreamLevelVarNodes(baseOrder, baseByName, nil, dsOverrides) require.Len(t, seq.Content, 4) // 3 base + 1 novel assert.Equal(t, "custom_tag", varNodeName(seq.Content[3])) }) @@ -249,8 +237,7 @@ func TestMergeStreamLevelVarNodes(t *testing.T) { varNode("encoding", "show_user", "true"), varNode("custom_tag", "type", "text"), } - seq, err := mergeStreamLevelVarNodes(baseOrder, baseByName, promoted, dsOverrides) - require.NoError(t, err) + seq := mergeStreamLevelVarNodes(baseOrder, baseByName, promoted, dsOverrides) // paths excluded (promoted); encoding merged; timeout base; custom_tag novel require.Len(t, seq.Content, 3) assert.Equal(t, "encoding", varNodeName(seq.Content[0])) From f788b321357c2c0f4abe6460e36f5fc462b0e7f2 Mon Sep 17 00:00:00 2001 From: Tere Date: Tue, 14 Apr 2026 10:19:27 +0200 Subject: [PATCH 09/28] test(archetype): remove unnecessary resolver mock Package archetypes have no required input dependencies, so BuildOptions.RequiredInputsResolver can be left nil. The builder already falls back to NoopRequiredInputsResolver when the field is unset, making the mock redundant. Co-Authored-By: Claude Sonnet 4.6 --- internal/packages/archetype/package_test.go | 22 +++++---------------- 1 file changed, 5 insertions(+), 17 deletions(-) diff --git a/internal/packages/archetype/package_test.go b/internal/packages/archetype/package_test.go index 216723d492..965d9c7544 100644 --- a/internal/packages/archetype/package_test.go +++ b/internal/packages/archetype/package_test.go @@ -96,29 +96,17 @@ func createPackageDescriptorForTest(packageType, kibanaVersion string) PackageDe } } -type requiredInputsResolverMock struct { - BundleFunc func(buildPackageRoot string) error -} - -func (r *requiredInputsResolverMock) Bundle(buildPackageRoot string) error { - if r.BundleFunc != nil { - return r.BundleFunc(buildPackageRoot) - } - return nil -} - func buildPackage(t *testing.T, repositoryRoot *os.Root, packageRoot string) error { buildDir := filepath.Join(repositoryRoot.Name(), "build") err := os.MkdirAll(buildDir, 0o755) require.NoError(t, err) _, err = builder.BuildPackage(builder.BuildOptions{ - PackageRoot: packageRoot, - BuildDir: buildDir, - RepositoryRoot: repositoryRoot, - UpdateReadmes: true, - SchemaURLs: fields.SchemaURLs{}, - RequiredInputsResolver: &requiredInputsResolverMock{}, + PackageRoot: packageRoot, + BuildDir: buildDir, + RepositoryRoot: repositoryRoot, + UpdateReadmes: true, + SchemaURLs: fields.SchemaURLs{}, }) return err } From f3745febdcf3a329427150fa222f57d7436dd1e8 Mon Sep 17 00:00:00 2001 From: Tere Date: Tue, 14 Apr 2026 10:42:34 +0200 Subject: [PATCH 10/28] refactor(requiredinputs): extract processDataStreamManifest sub-function Extract per-manifest logic from bundleDataStreamTemplates into a new unexported processDataStreamManifest method, and add unit tests covering read failure, invalid YAML, unknown package (no write-back guard), partial stream errors, and no-package stream skipping. Co-Authored-By: Claude Sonnet 4.6 --- internal/requiredinputs/streams.go | 113 ++++++++++++---------- internal/requiredinputs/streams_test.go | 121 ++++++++++++++++++++++++ 2 files changed, 183 insertions(+), 51 deletions(-) diff --git a/internal/requiredinputs/streams.go b/internal/requiredinputs/streams.go index a460d08d82..4c5c3b3f30 100644 --- a/internal/requiredinputs/streams.go +++ b/internal/requiredinputs/streams.go @@ -26,67 +26,78 @@ func (r *RequiredInputsResolver) bundleDataStreamTemplates(inputPkgPaths map[str errorList := make([]error, 0) for _, manifestPath := range dsManifestsPaths { - manifestBytes, err := buildRoot.ReadFile(manifestPath) - if err != nil { - return fmt.Errorf("failed to read data stream manifest %q: %w", manifestPath, err) - } - // parse the manifest YAML document preserving formatting for targeted modifications - // using manifestBytes allows us to preserve comments and formatting in the manifest when we update it with template paths from input packages - var doc yaml.Node - if err := yaml.Unmarshal(manifestBytes, &doc); err != nil { - return fmt.Errorf("failed to parse data stream manifest YAML: %w", err) - } - - manifest, err := packages.ReadDataStreamManifestBytes(manifestBytes) - if err != nil { - return fmt.Errorf("failed to parse data stream manifest %q: %w", manifestPath, err) + if err := r.processDataStreamManifest(manifestPath, inputPkgPaths, buildRoot); err != nil { + errorList = append(errorList, err) } - for idx, stream := range manifest.Streams { - if stream.Package == "" { - continue - } - pkgPath, ok := inputPkgPaths[stream.Package] - if !ok { - errorList = append(errorList, fmt.Errorf("failed to resolve input package %q for stream in manifest %q: not listed in requires.input", stream.Package, manifestPath)) - continue - } - dsRootDir := path.Dir(manifestPath) - inputPaths, err := r.collectAndCopyInputPkgDataStreams(dsRootDir, pkgPath, stream.Package, buildRoot) - if err != nil { - return fmt.Errorf("failed to collect and copy input package data stream templates for manifest %q: %w", manifestPath, err) - } - if len(inputPaths) == 0 { - continue - } + } + return errors.Join(errorList...) +} - // current manifest template paths - paths := make([]string, 0) - // if composable package has included custom template path or paths, include them - // if no template paths are included at the manifest, only the imported templates are included - if stream.TemplatePath != "" { - paths = append(paths, stream.TemplatePath) - } else if len(stream.TemplatePaths) > 0 { - paths = append(paths, stream.TemplatePaths...) - } - paths = append(inputPaths, paths...) +func (r *RequiredInputsResolver) processDataStreamManifest(manifestPath string, inputPkgPaths map[string]string, buildRoot *os.Root) error { + manifestBytes, err := buildRoot.ReadFile(manifestPath) + if err != nil { + return fmt.Errorf("failed to read data stream manifest %q: %w", manifestPath, err) + } + // parse the manifest YAML document preserving formatting for targeted modifications + // using manifestBytes allows us to preserve comments and formatting in the manifest when we update it with template paths from input packages + var doc yaml.Node + if err := yaml.Unmarshal(manifestBytes, &doc); err != nil { + return fmt.Errorf("failed to parse data stream manifest YAML: %w", err) + } - if err := setStreamTemplatePaths(&doc, idx, paths); err != nil { - return fmt.Errorf("failed to set stream template paths in manifest %q: %w", manifestPath, err) - } + manifest, err := packages.ReadDataStreamManifestBytes(manifestBytes) + if err != nil { + return fmt.Errorf("failed to parse data stream manifest %q: %w", manifestPath, err) + } + errorList := make([]error, 0) + for idx, stream := range manifest.Streams { + if stream.Package == "" { + continue } - - // Serialise the updated YAML document back to disk. - updated, err := formatYAMLNode(&doc) + pkgPath, ok := inputPkgPaths[stream.Package] + if !ok { + errorList = append(errorList, fmt.Errorf("failed to resolve input package %q for stream in manifest %q: not listed in requires.input", stream.Package, manifestPath)) + continue + } + dsRootDir := path.Dir(manifestPath) + inputPaths, err := r.collectAndCopyInputPkgDataStreams(dsRootDir, pkgPath, stream.Package, buildRoot) if err != nil { - return fmt.Errorf("failed to format updated manifest: %w", err) + return fmt.Errorf("failed to collect and copy input package data stream templates for manifest %q: %w", manifestPath, err) + } + if len(inputPaths) == 0 { + continue + } + + // current manifest template paths + paths := make([]string, 0) + // if composable package has included custom template path or paths, include them + // if no template paths are included at the manifest, only the imported templates are included + if stream.TemplatePath != "" { + paths = append(paths, stream.TemplatePath) + } else if len(stream.TemplatePaths) > 0 { + paths = append(paths, stream.TemplatePaths...) } - if err := buildRoot.WriteFile(manifestPath, updated, 0664); err != nil { - return fmt.Errorf("failed to write updated manifest: %w", err) + paths = append(inputPaths, paths...) + + if err := setStreamTemplatePaths(&doc, idx, paths); err != nil { + return fmt.Errorf("failed to set stream template paths in manifest %q: %w", manifestPath, err) } + } + if err := errors.Join(errorList...); err != nil { + return err + } + // Serialise the updated YAML document back to disk. + updated, err := formatYAMLNode(&doc) + if err != nil { + return fmt.Errorf("failed to format updated manifest: %w", err) } - return errors.Join(errorList...) + if err := buildRoot.WriteFile(manifestPath, updated, 0664); err != nil { + return fmt.Errorf("failed to write updated manifest: %w", err) + } + + return nil } // collectAndCopyInputPkgDataStreams collects the data streams from the input package and copies them to the agent/input directory of the build package diff --git a/internal/requiredinputs/streams_test.go b/internal/requiredinputs/streams_test.go index f75b99e32d..3868ee7e3a 100644 --- a/internal/requiredinputs/streams_test.go +++ b/internal/requiredinputs/streams_test.go @@ -106,6 +106,127 @@ streams: assert.Equal(t, []string{"sql-input.yml.hbs", "existing.yml.hbs"}, input.TemplatePaths) } +// TestProcessDataStreamManifest_ReadFailure verifies that a missing manifest file returns an error. +func TestProcessDataStreamManifest_ReadFailure(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + err = r.processDataStreamManifest("data_stream/nonexistent/manifest.yml", nil, buildRoot) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to read data stream manifest") +} + +// TestProcessDataStreamManifest_InvalidYAML verifies that a manifest with invalid YAML returns an error. +func TestProcessDataStreamManifest_InvalidYAML(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + datastreamDir := filepath.Join("data_stream", "test_ds") + err = buildRoot.MkdirAll(datastreamDir, 0755) + require.NoError(t, err) + err = buildRoot.WriteFile(filepath.Join(datastreamDir, "manifest.yml"), []byte(":\tinvalid: yaml: {"), 0644) + require.NoError(t, err) + + err = r.processDataStreamManifest(filepath.Join(datastreamDir, "manifest.yml"), nil, buildRoot) + require.Error(t, err) +} + +// TestProcessDataStreamManifest_UnknownPackage verifies that a stream referencing a package not in +// inputPkgPaths returns an error and does NOT write back the manifest. +func TestProcessDataStreamManifest_UnknownPackage(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + datastreamDir := filepath.Join("data_stream", "test_ds") + err = buildRoot.MkdirAll(datastreamDir, 0755) + require.NoError(t, err) + + original := []byte("streams:\n - package: sql\n") + manifestPath := filepath.Join(datastreamDir, "manifest.yml") + err = buildRoot.WriteFile(manifestPath, original, 0644) + require.NoError(t, err) + + err = r.processDataStreamManifest(manifestPath, map[string]string{}, buildRoot) + require.Error(t, err) + assert.Contains(t, err.Error(), "not listed in requires.input") + + // Manifest must not have been overwritten. + written, readErr := buildRoot.ReadFile(manifestPath) + require.NoError(t, readErr) + assert.Equal(t, original, written) +} + +// TestProcessDataStreamManifest_PartialStreamError verifies that when one stream succeeds and another +// references an unknown package, the function returns an error and the manifest is not written back. +func TestProcessDataStreamManifest_PartialStreamError(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + datastreamDir := filepath.Join("data_stream", "test_ds") + err = buildRoot.MkdirAll(datastreamDir, 0755) + require.NoError(t, err) + + original := []byte("streams:\n - package: sql\n - package: unknown\n") + manifestPath := filepath.Join(datastreamDir, "manifest.yml") + err = buildRoot.WriteFile(manifestPath, original, 0644) + require.NoError(t, err) + + fakeInputDir := createFakeInputHelper(t) + err = r.processDataStreamManifest(manifestPath, map[string]string{"sql": fakeInputDir}, buildRoot) + require.Error(t, err) + assert.Contains(t, err.Error(), "unknown") + + // Manifest must not have been written back despite the first stream succeeding. + written, readErr := buildRoot.ReadFile(manifestPath) + require.NoError(t, readErr) + assert.Equal(t, original, written) +} + +// TestProcessDataStreamManifest_NoPackageSkipped verifies that streams without a package field are +// skipped and the manifest is written back unmodified (no template_paths added). +func TestProcessDataStreamManifest_NoPackageSkipped(t *testing.T) { + buildRootPath := t.TempDir() + buildRoot, err := os.OpenRoot(buildRootPath) + require.NoError(t, err) + defer buildRoot.Close() + + r := &RequiredInputsResolver{} + + datastreamDir := filepath.Join("data_stream", "test_ds") + err = buildRoot.MkdirAll(datastreamDir, 0755) + require.NoError(t, err) + + manifestPath := filepath.Join(datastreamDir, "manifest.yml") + err = buildRoot.WriteFile(manifestPath, []byte("streams:\n - title: plain stream\n"), 0644) + require.NoError(t, err) + + err = r.processDataStreamManifest(manifestPath, map[string]string{}, buildRoot) + require.NoError(t, err) + + updated, readErr := buildRoot.ReadFile(manifestPath) + require.NoError(t, readErr) + manifest, parseErr := packages.ReadDataStreamManifestBytes(updated) + require.NoError(t, parseErr) + require.Len(t, manifest.Streams, 1) + assert.Empty(t, manifest.Streams[0].TemplatePaths) + assert.Empty(t, manifest.Streams[0].TemplatePath) +} + // TestBundleDataStreamTemplates_BundlesWithoutDataStreamsAssociation verifies that a data stream // stream entry with package: X IS bundled even when the root policy template has no data_streams // field. Bundling is driven solely by the data stream manifest's streams[].package reference. From ce1685e30f3a57769f579aa7e94c95595c1c95ee Mon Sep 17 00:00:00 2001 From: Tere Date: Tue, 14 Apr 2026 10:49:02 +0200 Subject: [PATCH 11/28] refactor(requiredinputs): extract shared collectAndCopyPolicyTemplateFiles helper Extract the duplicated copy loop from collectAndCopyInputPkgPolicyTemplates and collectAndCopyInputPkgDataStreams into a single collectAndCopyPolicyTemplateFiles function parameterised by destDir. Remove the RequiredInputsResolver receiver from both wrappers since they don't use resolver state. Add unit tests for the new helper covering single/multiple template paths, deduplication, missing files, invalid paths, custom destDir, and content preservation. Co-Authored-By: Claude Sonnet 4.6 --- internal/requiredinputs/copy.go | 69 ++++++++ internal/requiredinputs/copy_test.go | 197 +++++++++++++++++++++ internal/requiredinputs/policytemplates.go | 57 +----- internal/requiredinputs/streams.go | 59 +----- 4 files changed, 274 insertions(+), 108 deletions(-) create mode 100644 internal/requiredinputs/copy.go create mode 100644 internal/requiredinputs/copy_test.go diff --git a/internal/requiredinputs/copy.go b/internal/requiredinputs/copy.go new file mode 100644 index 0000000000..e06b421d2d --- /dev/null +++ b/internal/requiredinputs/copy.go @@ -0,0 +1,69 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "fmt" + "io/fs" + "os" + "path" + + "github.com/elastic/elastic-package/internal/logger" + "github.com/elastic/elastic-package/internal/packages" +) + +// collectAndCopyPolicyTemplateFiles opens the input package at inputPkgPath, +// reads template names from its policy_templates manifest entries, copies each +// file from agent/input/ into destDir inside buildRoot with the prefix +// "-", and returns the list of destination file names. +func collectAndCopyPolicyTemplateFiles(inputPkgPath, pkgName, destDir string, buildRoot *os.Root) ([]string, error) { + inputPkgFS, closeFn, err := openPackageFS(inputPkgPath) + if err != nil { + return nil, fmt.Errorf("failed to open input package %q: %w", inputPkgPath, err) + } + defer func() { _ = closeFn() }() + + manifestBytes, err := fs.ReadFile(inputPkgFS, packages.PackageManifestFile) + if err != nil { + return nil, fmt.Errorf("failed to read input package manifest: %w", err) + } + manifest, err := packages.ReadPackageManifestBytes(manifestBytes) + if err != nil { + return nil, fmt.Errorf("failed to parse input package manifest: %w", err) + } + + seen := make(map[string]bool) + copiedNames := make([]string, 0) + for _, pt := range manifest.PolicyTemplates { + var names []string + switch { + case len(pt.TemplatePaths) > 0: + names = pt.TemplatePaths + case pt.TemplatePath != "": + names = []string{pt.TemplatePath} + } + for _, name := range names { + if seen[name] { + continue + } + seen[name] = true + content, err := fs.ReadFile(inputPkgFS, path.Join("agent", "input", name)) + if err != nil { + return nil, fmt.Errorf("failed to read template %q from agent/input (declared in manifest): %w", name, err) + } + destName := pkgName + "-" + name + if err := buildRoot.MkdirAll(destDir, 0755); err != nil { + return nil, fmt.Errorf("failed to create directory %q: %w", destDir, err) + } + destPath := path.Join(destDir, destName) + if err := buildRoot.WriteFile(destPath, content, 0644); err != nil { + return nil, fmt.Errorf("failed to write template %q: %w", destName, err) + } + logger.Debugf("Copied input package template: %s -> %s", name, destName) + copiedNames = append(copiedNames, destName) + } + } + return copiedNames, nil +} diff --git a/internal/requiredinputs/copy_test.go b/internal/requiredinputs/copy_test.go new file mode 100644 index 0000000000..9c572d1df7 --- /dev/null +++ b/internal/requiredinputs/copy_test.go @@ -0,0 +1,197 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package requiredinputs + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// buildRootFor creates a temporary os.Root for use in tests. +func buildRootFor(t *testing.T) *os.Root { + t.Helper() + root, err := os.OpenRoot(t.TempDir()) + require.NoError(t, err) + t.Cleanup(func() { root.Close() }) + return root +} + +// TestCollectAndCopyPolicyTemplateFiles_SingleTemplatePath verifies that a package whose +// policy_template declares a single template_path is copied into destDir with the +// "-" prefix, and that the returned slice contains exactly that name. +func TestCollectAndCopyPolicyTemplateFiles_SingleTemplatePath(t *testing.T) { + inputPkgDir := createFakeInputHelper(t) + buildRoot := buildRootFor(t) + + destDir := filepath.Join("agent", "input") + got, err := collectAndCopyPolicyTemplateFiles(inputPkgDir, "sql", destDir, buildRoot) + require.NoError(t, err) + + assert.Equal(t, []string{"sql-input.yml.hbs"}, got) + + content, err := buildRoot.ReadFile(filepath.Join(destDir, "sql-input.yml.hbs")) + require.NoError(t, err) + assert.Equal(t, "template content", string(content)) +} + +// TestCollectAndCopyPolicyTemplateFiles_MultipleTemplatePaths verifies that all names listed +// in template_paths across multiple policy_templates are copied. +func TestCollectAndCopyPolicyTemplateFiles_MultipleTemplatePaths(t *testing.T) { + inputPkgDir := createFakeInputWithMultiplePolicyTemplates(t) + buildRoot := buildRootFor(t) + + destDir := filepath.Join("agent", "input") + got, err := collectAndCopyPolicyTemplateFiles(inputPkgDir, "sql", destDir, buildRoot) + require.NoError(t, err) + + assert.Equal(t, []string{"sql-input.yml.hbs", "sql-metrics.yml.hbs", "sql-extra.yml.hbs"}, got) + + for _, name := range []string{"sql-input.yml.hbs", "sql-metrics.yml.hbs", "sql-extra.yml.hbs"} { + _, err := buildRoot.ReadFile(filepath.Join(destDir, name)) + require.NoError(t, err, "expected %s to exist in destDir", name) + } +} + +// TestCollectAndCopyPolicyTemplateFiles_Deduplication verifies that when the same template name +// appears in more than one policy_template it is only copied once. +func TestCollectAndCopyPolicyTemplateFiles_Deduplication(t *testing.T) { + inputPkgDir := t.TempDir() + manifest := []byte(`name: sql +version: 0.1.0 +type: input +policy_templates: + - input: sql + template_path: shared.yml.hbs + - input: sql/metrics + template_path: shared.yml.hbs +`) + err := os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), manifest, 0644) + require.NoError(t, err) + err = os.MkdirAll(filepath.Join(inputPkgDir, "agent", "input"), 0755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(inputPkgDir, "agent", "input", "shared.yml.hbs"), []byte("shared"), 0644) + require.NoError(t, err) + + buildRoot := buildRootFor(t) + destDir := filepath.Join("agent", "input") + + got, err := collectAndCopyPolicyTemplateFiles(inputPkgDir, "sql", destDir, buildRoot) + require.NoError(t, err) + + // Returned slice must contain the prefixed name exactly once. + assert.Equal(t, []string{"sql-shared.yml.hbs"}, got) +} + +// TestCollectAndCopyPolicyTemplateFiles_NoTemplates verifies that a package whose +// policy_templates have neither template_path nor template_paths returns an empty slice +// without error. +func TestCollectAndCopyPolicyTemplateFiles_NoTemplates(t *testing.T) { + inputPkgDir := t.TempDir() + manifest := []byte(`name: sql +version: 0.1.0 +type: input +policy_templates: + - input: sql +`) + err := os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), manifest, 0644) + require.NoError(t, err) + err = os.MkdirAll(filepath.Join(inputPkgDir, "agent", "input"), 0755) + require.NoError(t, err) + + buildRoot := buildRootFor(t) + + got, err := collectAndCopyPolicyTemplateFiles(inputPkgDir, "sql", "agent/input", buildRoot) + require.NoError(t, err) + assert.Empty(t, got) +} + +// TestCollectAndCopyPolicyTemplateFiles_MissingTemplateFile verifies that when a template +// name is declared in the manifest but the corresponding file is absent from agent/input/, +// the function returns an error. +func TestCollectAndCopyPolicyTemplateFiles_MissingTemplateFile(t *testing.T) { + inputPkgDir := t.TempDir() + manifest := []byte(`name: sql +version: 0.1.0 +type: input +policy_templates: + - input: sql + template_path: missing.yml.hbs +`) + err := os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), manifest, 0644) + require.NoError(t, err) + err = os.MkdirAll(filepath.Join(inputPkgDir, "agent", "input"), 0755) + require.NoError(t, err) + // Intentionally do NOT create missing.yml.hbs. + + buildRoot := buildRootFor(t) + + _, err = collectAndCopyPolicyTemplateFiles(inputPkgDir, "sql", "agent/input", buildRoot) + require.Error(t, err) + assert.Contains(t, err.Error(), "missing.yml.hbs") +} + +// TestCollectAndCopyPolicyTemplateFiles_InvalidPackagePath verifies that a non-existent +// package path returns an error from openPackageFS. +func TestCollectAndCopyPolicyTemplateFiles_InvalidPackagePath(t *testing.T) { + buildRoot := buildRootFor(t) + + _, err := collectAndCopyPolicyTemplateFiles("/nonexistent/path", "sql", "agent/input", buildRoot) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to open input package") +} + +// TestCollectAndCopyPolicyTemplateFiles_CustomDestDir verifies that files are written to the +// caller-supplied destDir, not hardcoded to agent/input. This covers the data-stream use-case +// where destDir is data_stream//agent/stream. +func TestCollectAndCopyPolicyTemplateFiles_CustomDestDir(t *testing.T) { + inputPkgDir := createFakeInputHelper(t) + buildRoot := buildRootFor(t) + + destDir := filepath.Join("data_stream", "logs", "agent", "stream") + got, err := collectAndCopyPolicyTemplateFiles(inputPkgDir, "sql", destDir, buildRoot) + require.NoError(t, err) + + assert.Equal(t, []string{"sql-input.yml.hbs"}, got) + + _, err = buildRoot.ReadFile(filepath.Join(destDir, "sql-input.yml.hbs")) + require.NoError(t, err, "file must be written to the custom destDir") + + // Must NOT appear in agent/input. + _, err = buildRoot.ReadFile(filepath.Join("agent", "input", "sql-input.yml.hbs")) + assert.Error(t, err, "file must not be written to agent/input when a custom destDir is given") +} + +// TestCollectAndCopyPolicyTemplateFiles_FileContentPreserved verifies that the byte content +// of the template is copied verbatim without modification. +func TestCollectAndCopyPolicyTemplateFiles_FileContentPreserved(t *testing.T) { + inputPkgDir := t.TempDir() + originalContent := []byte("{{#each processors}}\n- {{this}}\n{{/each}}") + manifest := []byte(`name: sql +version: 0.1.0 +type: input +policy_templates: + - input: sql + template_path: input.yml.hbs +`) + err := os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), manifest, 0644) + require.NoError(t, err) + err = os.MkdirAll(filepath.Join(inputPkgDir, "agent", "input"), 0755) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(inputPkgDir, "agent", "input", "input.yml.hbs"), originalContent, 0644) + require.NoError(t, err) + + buildRoot := buildRootFor(t) + + _, err = collectAndCopyPolicyTemplateFiles(inputPkgDir, "sql", "agent/input", buildRoot) + require.NoError(t, err) + + copied, err := buildRoot.ReadFile(filepath.Join("agent", "input", "sql-input.yml.hbs")) + require.NoError(t, err) + assert.Equal(t, originalContent, copied) +} diff --git a/internal/requiredinputs/policytemplates.go b/internal/requiredinputs/policytemplates.go index b0a4fb5d17..fd3f1e55c8 100644 --- a/internal/requiredinputs/policytemplates.go +++ b/internal/requiredinputs/policytemplates.go @@ -6,13 +6,11 @@ package requiredinputs import ( "fmt" - "io/fs" "os" "path" "gopkg.in/yaml.v3" - "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" ) @@ -37,7 +35,7 @@ func (r *RequiredInputsResolver) bundlePolicyTemplatesInputPackageTemplates(mani if !ok || sourcePath == "" { return fmt.Errorf("failed to find input package %q referenced by policy template %q", input.Package, pt.Name) } - inputPaths, err := r.collectAndCopyInputPkgPolicyTemplates(sourcePath, input.Package, buildRoot) + inputPaths, err := collectAndCopyInputPkgPolicyTemplates(sourcePath, input.Package, buildRoot) if err != nil { return fmt.Errorf("failed to collect and copy input package policy templates: %w", err) } @@ -76,57 +74,8 @@ func (r *RequiredInputsResolver) bundlePolicyTemplatesInputPackageTemplates(mani // collectAndCopyInputPkgPolicyTemplates collects the templates from the input package and copies them to the agent/input directory of the build package // it returns the list of copied template names -func (r *RequiredInputsResolver) collectAndCopyInputPkgPolicyTemplates(inputPkgPath, inputPkgName string, buildRoot *os.Root) ([]string, error) { - inputPkgFS, closeFn, err := openPackageFS(inputPkgPath) - if err != nil { - return nil, fmt.Errorf("failed to open input package %q: %w", inputPkgPath, err) - } - defer func() { _ = closeFn() }() - - manifestBytes, err := fs.ReadFile(inputPkgFS, packages.PackageManifestFile) - if err != nil { - return nil, fmt.Errorf("failed to read input package manifest: %w", err) - } - manifest, err := packages.ReadPackageManifestBytes(manifestBytes) - if err != nil { - return nil, fmt.Errorf("failed to parse input package manifest: %w", err) - } - - seen := make(map[string]bool) - copiedNames := make([]string, 0) - for _, pt := range manifest.PolicyTemplates { - var names []string - switch { - case len(pt.TemplatePaths) > 0: - names = pt.TemplatePaths - case pt.TemplatePath != "": - names = []string{pt.TemplatePath} - } - for _, name := range names { - if seen[name] { - continue - } - seen[name] = true - // copy the template from "agent/input" directory of the input package to the "agent/input" directory of the build package - content, err := fs.ReadFile(inputPkgFS, path.Join("agent", "input", name)) - if err != nil { - return nil, fmt.Errorf("failed to read template %q from agent/input (declared in manifest): %w", name, err) - } - destName := inputPkgName + "-" + name - // create the agent/input directory if it doesn't exist - agentInputDir := path.Join("agent", "input") - if err := buildRoot.MkdirAll(agentInputDir, 0755); err != nil { - return nil, fmt.Errorf("failed to create agent/input directory: %w", err) - } - destPath := path.Join(agentInputDir, destName) - if err := buildRoot.WriteFile(destPath, content, 0644); err != nil { - return nil, fmt.Errorf("failed to write template %q: %w", destName, err) - } - logger.Debugf("Copied input package template: %s -> %s", name, destName) - copiedNames = append(copiedNames, destName) - } - } - return copiedNames, nil +func collectAndCopyInputPkgPolicyTemplates(inputPkgPath, inputPkgName string, buildRoot *os.Root) ([]string, error) { + return collectAndCopyPolicyTemplateFiles(inputPkgPath, inputPkgName, path.Join("agent", "input"), buildRoot) } // setInputPolicyTemplateTemplatePaths updates the manifest YAML document to set the template_paths for the specified policy template input to the provided paths diff --git a/internal/requiredinputs/streams.go b/internal/requiredinputs/streams.go index 4c5c3b3f30..09533bfbca 100644 --- a/internal/requiredinputs/streams.go +++ b/internal/requiredinputs/streams.go @@ -13,7 +13,6 @@ import ( "gopkg.in/yaml.v3" - "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" ) @@ -61,7 +60,7 @@ func (r *RequiredInputsResolver) processDataStreamManifest(manifestPath string, continue } dsRootDir := path.Dir(manifestPath) - inputPaths, err := r.collectAndCopyInputPkgDataStreams(dsRootDir, pkgPath, stream.Package, buildRoot) + inputPaths, err := collectAndCopyInputPkgDataStreams(dsRootDir, pkgPath, stream.Package, buildRoot) if err != nil { return fmt.Errorf("failed to collect and copy input package data stream templates for manifest %q: %w", manifestPath, err) } @@ -100,7 +99,7 @@ func (r *RequiredInputsResolver) processDataStreamManifest(manifestPath string, return nil } -// collectAndCopyInputPkgDataStreams collects the data streams from the input package and copies them to the agent/input directory of the build package +// collectAndCopyInputPkgDataStreams collects the data streams from the input package and copies them to the agent/stream directory of the build package // it returns the list of copied data stream names // // Design note: input package templates are authored for input-level compilation, where available @@ -114,57 +113,9 @@ func (r *RequiredInputsResolver) processDataStreamManifest(manifestPath string, // template_paths (integration templates are appended last and take precedence). // See https://github.com/elastic/elastic-package/issues/3279 for the follow-up work on // merging variable definitions from input packages and composable packages at build time. -func (r *RequiredInputsResolver) collectAndCopyInputPkgDataStreams(dsRootDir, inputPkgPath, inputPkgName string, buildRoot *os.Root) ([]string, error) { - inputPkgFS, closeFn, err := openPackageFS(inputPkgPath) - if err != nil { - return nil, fmt.Errorf("failed to open input package %q: %w", inputPkgPath, err) - } - defer func() { _ = closeFn() }() - - manifestBytes, err := fs.ReadFile(inputPkgFS, "manifest.yml") - if err != nil { - return nil, fmt.Errorf("failed to read input package manifest: %w", err) - } - manifest, err := packages.ReadPackageManifestBytes(manifestBytes) - if err != nil { - return nil, fmt.Errorf("failed to parse input package manifest: %w", err) - } - - seen := make(map[string]bool) - copiedNames := make([]string, 0) - for _, pt := range manifest.PolicyTemplates { - var names []string - switch { - case len(pt.TemplatePaths) > 0: - names = pt.TemplatePaths - case pt.TemplatePath != "": - names = []string{pt.TemplatePath} - } - for _, name := range names { - if seen[name] { - continue - } - seen[name] = true - // copy the template from "agent/input" directory of the input package to the "agent/stream" directory of the build package - content, err := fs.ReadFile(inputPkgFS, path.Join("agent", "input", name)) - if err != nil { - return nil, fmt.Errorf("failed to read template %q from agent/input (declared in manifest): %w", name, err) - } - destName := inputPkgName + "-" + name - // create the agent/stream directory if it doesn't exist - agentStreamDir := path.Join(dsRootDir, "agent", "stream") - if err := buildRoot.MkdirAll(agentStreamDir, 0755); err != nil { - return nil, fmt.Errorf("failed to create agent/stream directory: %w", err) - } - destPath := path.Join(agentStreamDir, destName) - if err := buildRoot.WriteFile(destPath, content, 0644); err != nil { - return nil, fmt.Errorf("failed to write template %q: %w", destName, err) - } - logger.Debugf("Copied input package template: %s -> %s", name, destName) - copiedNames = append(copiedNames, destName) - } - } - return copiedNames, nil +func collectAndCopyInputPkgDataStreams(dsRootDir, inputPkgPath, inputPkgName string, buildRoot *os.Root) ([]string, error) { + agentStreamDir := path.Join(dsRootDir, "agent", "stream") + return collectAndCopyPolicyTemplateFiles(inputPkgPath, inputPkgName, agentStreamDir, buildRoot) } func setStreamTemplatePaths(doc *yaml.Node, streamIdx int, paths []string) error { From 4a6e78452aa2b38cab607080cf194d286e5944c0 Mon Sep 17 00:00:00 2001 From: Tere Date: Tue, 14 Apr 2026 14:27:42 +0200 Subject: [PATCH 12/28] Add composable CI test packages and two-phase zip build - Add test/packages/composable/01_ci_input_pkg and 02_ci_composable_integration for requires.input coverage (vars, fields, templates, mixed streams) - Phase-2 build in test-build-install-zip.sh after stack up; set package_registry.base_url to local EPR with restore in cleanup - Mirror composable skip + phase-2 in test-build-install-zip-file.sh - Point internal/requiredinputs integration tests at composable fixtures - Remove duplicate manual_packages; edge fixtures use ci_input_pkg; refresh manual_packages README Made-with: Cursor --- internal/requiredinputs/fields_test.go | 101 +++++++++++------- internal/requiredinputs/streamdefs_test.go | 18 ++-- internal/requiredinputs/variables_test.go | 30 ++++-- scripts/test-build-install-zip-file.sh | 45 +++++++- scripts/test-build-install-zip.sh | 42 ++++++++ test/manual_packages/README.md | 81 ++++---------- .../fields_input_pkg/docs/README.md | 3 - .../fields_input_pkg/manifest.yml | 32 ------ .../test_input_pkg/agent/input/input.yml.hbs | 4 - .../test_input_pkg/changelog.yml | 5 - .../test_input_pkg/docs/README.md | 3 - .../test_input_pkg/fields/base-fields.yml | 12 --- .../test_input_pkg/manifest.yml | 34 ------ .../agent/input/input.yml.hbs | 4 - .../var_merging_input_pkg/changelog.yml | 5 - .../var_merging_input_pkg/docs/README.md | 3 - .../fields/base-fields.yml | 12 --- .../data_stream/field_logs/manifest.yml | 6 -- .../with_field_bundling/docs/README.md | 3 - .../with_field_bundling/manifest.yml | 33 ------ .../_dev/test/config.yml | 3 - .../with_input_package_requires/changelog.yml | 5 - .../test_logs/agent/stream/stream.yml.hbs | 4 - .../test_logs/fields/base-fields.yml | 12 --- .../data_stream/test_logs/manifest.yml | 19 ---- .../docs/README.md | 4 - .../with_input_package_requires/manifest.yml | 34 ------ .../_dev/test/config.yml | 4 +- .../data_stream/test_logs/manifest.yml | 2 +- .../with_linked_template_path/manifest.yml | 4 +- .../_dev/test/config.yml | 4 +- .../data_stream/var_merging_logs/manifest.yml | 2 +- .../with_merging_ds_merges/manifest.yml | 4 +- .../_dev/test/config.yml | 4 +- .../data_stream/var_merging_logs/manifest.yml | 2 +- .../with_merging_duplicate_error/manifest.yml | 4 +- .../with_merging_full/_dev/test/config.yml | 3 - .../with_merging_full/changelog.yml | 5 - .../var_merging_logs/fields/base-fields.yml | 12 --- .../data_stream/var_merging_logs/manifest.yml | 13 --- .../with_merging_full/docs/README.md | 20 ---- .../with_merging_full/manifest.yml | 40 ------- .../_dev/test/config.yml | 4 +- .../data_stream/var_merging_logs/manifest.yml | 2 +- .../with_merging_no_override/manifest.yml | 4 +- .../_dev/test/config.yml | 4 +- .../data_stream/var_merging_logs/manifest.yml | 2 +- .../manifest.yml | 4 +- .../_dev/test/config.yml | 4 +- .../data_stream/alpha_logs/manifest.yml | 2 +- .../data_stream/beta_logs/manifest.yml | 2 +- .../manifest.yml | 6 +- .../01_ci_input_pkg/_dev/test/config.yml | 2 + .../agent/input/extra.yml.hbs | 2 +- .../agent/input/input.yml.hbs | 2 +- .../composable/01_ci_input_pkg}/changelog.yml | 2 +- .../composable/01_ci_input_pkg/docs/README.md | 3 + .../01_ci_input_pkg}/fields/base-fields.yml | 0 .../composable/01_ci_input_pkg}/manifest.yml | 20 ++-- .../_dev/test/config.yml | 2 + .../changelog.yml | 2 +- .../agent/stream/stream.yml.hbs | 2 +- .../fields/base-fields.yml | 0 .../ci_composable_logs/manifest.yml | 26 +++++ .../docs/README.md | 7 ++ .../02_ci_composable_integration/manifest.yml | 42 ++++++++ 66 files changed, 328 insertions(+), 498 deletions(-) delete mode 100644 test/manual_packages/required_inputs/fields_input_pkg/docs/README.md delete mode 100644 test/manual_packages/required_inputs/fields_input_pkg/manifest.yml delete mode 100644 test/manual_packages/required_inputs/test_input_pkg/agent/input/input.yml.hbs delete mode 100644 test/manual_packages/required_inputs/test_input_pkg/changelog.yml delete mode 100644 test/manual_packages/required_inputs/test_input_pkg/docs/README.md delete mode 100644 test/manual_packages/required_inputs/test_input_pkg/fields/base-fields.yml delete mode 100644 test/manual_packages/required_inputs/test_input_pkg/manifest.yml delete mode 100644 test/manual_packages/required_inputs/var_merging_input_pkg/agent/input/input.yml.hbs delete mode 100644 test/manual_packages/required_inputs/var_merging_input_pkg/changelog.yml delete mode 100644 test/manual_packages/required_inputs/var_merging_input_pkg/docs/README.md delete mode 100644 test/manual_packages/required_inputs/var_merging_input_pkg/fields/base-fields.yml delete mode 100644 test/manual_packages/required_inputs/with_field_bundling/data_stream/field_logs/manifest.yml delete mode 100644 test/manual_packages/required_inputs/with_field_bundling/docs/README.md delete mode 100644 test/manual_packages/required_inputs/with_field_bundling/manifest.yml delete mode 100644 test/manual_packages/required_inputs/with_input_package_requires/_dev/test/config.yml delete mode 100644 test/manual_packages/required_inputs/with_input_package_requires/changelog.yml delete mode 100644 test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/agent/stream/stream.yml.hbs delete mode 100644 test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/fields/base-fields.yml delete mode 100644 test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/manifest.yml delete mode 100644 test/manual_packages/required_inputs/with_input_package_requires/docs/README.md delete mode 100644 test/manual_packages/required_inputs/with_input_package_requires/manifest.yml delete mode 100644 test/manual_packages/required_inputs/with_merging_full/_dev/test/config.yml delete mode 100644 test/manual_packages/required_inputs/with_merging_full/changelog.yml delete mode 100644 test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/fields/base-fields.yml delete mode 100644 test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/manifest.yml delete mode 100644 test/manual_packages/required_inputs/with_merging_full/docs/README.md delete mode 100644 test/manual_packages/required_inputs/with_merging_full/manifest.yml create mode 100644 test/packages/composable/01_ci_input_pkg/_dev/test/config.yml rename test/{manual_packages/required_inputs/test_input_pkg => packages/composable/01_ci_input_pkg}/agent/input/extra.yml.hbs (57%) rename test/{manual_packages/required_inputs/fields_input_pkg => packages/composable/01_ci_input_pkg}/agent/input/input.yml.hbs (78%) rename test/{manual_packages/required_inputs/fields_input_pkg => packages/composable/01_ci_input_pkg}/changelog.yml (64%) create mode 100644 test/packages/composable/01_ci_input_pkg/docs/README.md rename test/{manual_packages/required_inputs/fields_input_pkg => packages/composable/01_ci_input_pkg}/fields/base-fields.yml (100%) rename test/{manual_packages/required_inputs/var_merging_input_pkg => packages/composable/01_ci_input_pkg}/manifest.yml (64%) create mode 100644 test/packages/composable/02_ci_composable_integration/_dev/test/config.yml rename test/{manual_packages/required_inputs/with_field_bundling => packages/composable/02_ci_composable_integration}/changelog.yml (64%) rename test/{manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs => packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs}/agent/stream/stream.yml.hbs (78%) rename test/{manual_packages/required_inputs/with_field_bundling/data_stream/field_logs => packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs}/fields/base-fields.yml (100%) create mode 100644 test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/manifest.yml create mode 100644 test/packages/composable/02_ci_composable_integration/docs/README.md create mode 100644 test/packages/composable/02_ci_composable_integration/manifest.yml diff --git a/internal/requiredinputs/fields_test.go b/internal/requiredinputs/fields_test.go index 971ac72240..fe404b86d8 100644 --- a/internal/requiredinputs/fields_test.go +++ b/internal/requiredinputs/fields_test.go @@ -117,12 +117,11 @@ func TestCollectExistingFieldNames(t *testing.T) { // ---- integration tests ------------------------------------------------------- -// makeFakeEprForFieldBundling supplies the fields_input_pkg fixture path as if -// it were downloaded from the registry, so integration tests do not need a -// running stack. +// makeFakeEprForFieldBundling supplies the ci_input_pkg fixture path as if it +// were downloaded from the registry, so integration tests do not need a stack. func makeFakeEprForFieldBundling(t *testing.T) *fakeEprClient { t.Helper() - inputPkgPath := filepath.Join("..", "..", "test", "manual_packages", "required_inputs", "fields_input_pkg") + inputPkgPath := ciInputFixturePath() return &fakeEprClient{ downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { return inputPkgPath, nil @@ -135,16 +134,16 @@ func makeFakeEprForFieldBundling(t *testing.T) *fakeEprClient { // (integration wins), and only fields unique to the input package are written // to /fields/-fields.yml. func TestBundleDataStreamFields_PartialOverlap(t *testing.T) { - // with_field_bundling has data_stream/field_logs/fields/base-fields.yml with - // 4 common fields. fields_input_pkg has those same 4 plus "message" and + // 02_ci_composable_integration has data_stream/ci_composable_logs/fields/base-fields.yml with + // 4 common fields. ci_input_pkg has those same 4 plus "message" and // "log.level". After bundling, only "message" and "log.level" should appear // in the generated file. - buildPackageRoot := copyFixturePackage(t, "with_field_bundling") + buildPackageRoot := copyComposableIntegrationFixture(t) resolver := NewRequiredInputsResolver(makeFakeEprForFieldBundling(t)) require.NoError(t, resolver.Bundle(buildPackageRoot)) - bundledPath := filepath.Join(buildPackageRoot, "data_stream", "field_logs", "fields", "fields_input_pkg-fields.yml") + bundledPath := filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "fields", "ci_input_pkg-fields.yml") data, err := os.ReadFile(bundledPath) require.NoError(t, err, "bundled fields file should exist") @@ -159,7 +158,7 @@ func TestBundleDataStreamFields_PartialOverlap(t *testing.T) { assert.ElementsMatch(t, []string{"message", "log.level"}, names) // Original base-fields.yml must be untouched. - originalData, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "field_logs", "fields", "base-fields.yml")) + originalData, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "fields", "base-fields.yml")) require.NoError(t, err) originalNodes, err := loadFieldNodesFromBytes(originalData) require.NoError(t, err) @@ -170,22 +169,25 @@ func TestBundleDataStreamFields_PartialOverlap(t *testing.T) { // the input package are already present in the integration data stream, no // bundled file is created (nothing to add). func TestBundleDataStreamFields_AllFieldsOverlap(t *testing.T) { - // with_input_package_requires has data_stream/test_logs/fields/base-fields.yml - // with the same 4 fields as test_input_pkg. No new fields → no output file. - inputPkgPath := filepath.Join("..", "..", "test", "manual_packages", "required_inputs", "test_input_pkg") + // Copy the composable integration and replace the data stream base fields with + // the full set from ci_input_pkg so every input field is already declared — no bundled file. + buildPackageRoot := copyComposableIntegrationFixture(t) + inputFields, err := os.ReadFile(filepath.Join(ciInputFixturePath(), "fields", "base-fields.yml")) + require.NoError(t, err) + dsFieldsPath := filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "fields", "base-fields.yml") + require.NoError(t, os.WriteFile(dsFieldsPath, inputFields, 0644)) + epr := &fakeEprClient{ downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { - return inputPkgPath, nil + return ciInputFixturePath(), nil }, } - - buildPackageRoot := copyFixturePackage(t, "with_input_package_requires") resolver := NewRequiredInputsResolver(epr) - err := resolver.Bundle(buildPackageRoot) + err = resolver.Bundle(buildPackageRoot) require.NoError(t, err) - bundledPath := filepath.Join(buildPackageRoot, "data_stream", "test_logs", "fields", "test_input_pkg-fields.yml") + bundledPath := filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "fields", "ci_input_pkg-fields.yml") _, statErr := os.Stat(bundledPath) assert.True(t, os.IsNotExist(statErr), "bundled fields file should not be created when all fields already exist") } @@ -213,14 +215,12 @@ policy_templates: }, } - buildPackageRoot := copyFixturePackage(t, "with_field_bundling") + buildPackageRoot := copyComposableIntegrationFixture(t) // Patch manifest to reference no_fields_pkg instead. manifestPath := filepath.Join(buildPackageRoot, "manifest.yml") - manifestData, err := os.ReadFile(manifestPath) - require.NoError(t, err) patched := []byte(`format_version: 3.6.0 -name: with_field_bundling -title: Integration With Field Bundling +name: ci_composable_integration +title: CI Composable Integration version: 0.1.0 type: integration categories: @@ -235,38 +235,51 @@ requires: - package: no_fields_pkg version: "0.1.0" policy_templates: - - name: field_logs - title: Field Logs + - name: ci_composable_logs + title: CI composable logs description: Collect logs data_streams: - - field_logs + - ci_composable_logs inputs: - package: no_fields_pkg title: Collect logs description: Use the no fields input package + - type: logs + title: Native logs input + description: Plain logs input owner: github: elastic/integrations type: elastic `) - _ = manifestData // not used further require.NoError(t, os.WriteFile(manifestPath, patched, 0644)) - // Also patch the data stream manifest to reference no_fields_pkg. - dsManifestPath := filepath.Join(buildPackageRoot, "data_stream", "field_logs", "manifest.yml") - require.NoError(t, os.WriteFile(dsManifestPath, []byte(`title: Field Logs + dsManifestPath := filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "manifest.yml") + require.NoError(t, os.WriteFile(dsManifestPath, []byte(`title: CI composable logs type: logs streams: - package: no_fields_pkg - title: Field Logs + title: Logs via no-fields input package description: Collect field logs. + - input: logs + title: Plain logs stream + description: Native logs stream without package reference. + template_path: stream.yml.hbs + vars: + - name: paths + type: text + title: Paths + multi: true + required: true + show_user: true + default: + - /var/log/ci/*.log `), 0644)) resolver := NewRequiredInputsResolver(epr) - err = resolver.Bundle(buildPackageRoot) + err := resolver.Bundle(buildPackageRoot) require.NoError(t, err) - // No bundled fields file should be created. - bundledPath := filepath.Join(buildPackageRoot, "data_stream", "field_logs", "fields", "no_fields_pkg-fields.yml") + bundledPath := filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "fields", "no_fields_pkg-fields.yml") _, statErr := os.Stat(bundledPath) assert.True(t, os.IsNotExist(statErr), "no fields file should be created when input package has no fields") } @@ -274,22 +287,28 @@ streams: // TestBundleDataStreamFields_StreamWithoutPackage verifies that data stream // streams with no package reference are skipped without error. func TestBundleDataStreamFields_StreamWithoutPackage(t *testing.T) { - // with_input_package_requires has a second stream with input: logs (no package). - // The test confirms this is processed without error and no unexpected files appear. - inputPkgPath := filepath.Join("..", "..", "test", "manual_packages", "required_inputs", "test_input_pkg") + // Second stream uses input: logs (no package); Bundle should succeed and only + // bundle fields for the package-backed stream. epr := &fakeEprClient{ downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { - return inputPkgPath, nil + return ciInputFixturePath(), nil }, } - buildPackageRoot := copyFixturePackage(t, "with_input_package_requires") + buildPackageRoot := copyComposableIntegrationFixture(t) resolver := NewRequiredInputsResolver(epr) err := resolver.Bundle(buildPackageRoot) require.NoError(t, err) - // The non-package stream (logs input) should not produce a bundled fields file. - _, statErr := os.Stat(filepath.Join(buildPackageRoot, "data_stream", "test_logs", "fields", "-fields.yml")) - assert.True(t, os.IsNotExist(statErr)) + fieldsDir := filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "fields") + entries, err := os.ReadDir(fieldsDir) + require.NoError(t, err) + var names []string + for _, e := range entries { + names = append(names, e.Name()) + } + assert.Contains(t, names, "base-fields.yml") + assert.Contains(t, names, "ci_input_pkg-fields.yml") + assert.Len(t, names, 2) } diff --git a/internal/requiredinputs/streamdefs_test.go b/internal/requiredinputs/streamdefs_test.go index f949a1a4ec..2a7c9b5f94 100644 --- a/internal/requiredinputs/streamdefs_test.go +++ b/internal/requiredinputs/streamdefs_test.go @@ -425,30 +425,34 @@ streams: } // TestResolveStreamInputTypes_FieldBundlingFixture runs the full -// Bundle pipeline on the with_field_bundling fixture and +// Bundle pipeline on the composable CI integration fixture and // verifies that package: references are replaced in both the main manifest and // the data stream manifest. func TestResolveStreamInputTypes_FieldBundlingFixture(t *testing.T) { - buildPackageRoot := copyFixturePackage(t, "with_field_bundling") + buildPackageRoot := copyComposableIntegrationFixture(t) resolver := NewRequiredInputsResolver(makeFakeEprForFieldBundling(t)) require.NoError(t, resolver.Bundle(buildPackageRoot)) - // Check main manifest: package: fields_input_pkg → type: logfile + // Check main manifest: package-backed input → type: logfile; native logs input unchanged. manifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "manifest.yml")) require.NoError(t, err) m, err := packages.ReadPackageManifestBytes(manifestBytes) require.NoError(t, err) - require.Len(t, m.PolicyTemplates[0].Inputs, 1) + require.Len(t, m.PolicyTemplates[0].Inputs, 2) assert.Equal(t, "logfile", m.PolicyTemplates[0].Inputs[0].Type) assert.Empty(t, m.PolicyTemplates[0].Inputs[0].Package) + assert.Equal(t, "logs", m.PolicyTemplates[0].Inputs[1].Type) + assert.Empty(t, m.PolicyTemplates[0].Inputs[1].Package) - // Check data stream manifest: package: fields_input_pkg → input: logfile - dsManifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "field_logs", "manifest.yml")) + // Check data stream manifest: package stream → input: logfile; plain logs stream stays logs. + dsManifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "manifest.yml")) require.NoError(t, err) dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) require.NoError(t, err) - require.Len(t, dsManifest.Streams, 1) + require.Len(t, dsManifest.Streams, 2) assert.Equal(t, "logfile", dsManifest.Streams[0].Input) assert.Empty(t, dsManifest.Streams[0].Package) assert.NotEmpty(t, dsManifest.Streams[0].Title) + assert.Equal(t, "logs", dsManifest.Streams[1].Input) + assert.Empty(t, dsManifest.Streams[1].Package) } diff --git a/internal/requiredinputs/variables_test.go b/internal/requiredinputs/variables_test.go index c30af1aa3b..3b441a69ef 100644 --- a/internal/requiredinputs/variables_test.go +++ b/internal/requiredinputs/variables_test.go @@ -41,6 +41,21 @@ func copyFixturePackage(t *testing.T, fixtureName string) string { return destPath } +// ciInputFixturePath returns the path to test/packages/composable/01_ci_input_pkg (repository-relative from this package). +func ciInputFixturePath() string { + return filepath.Join("..", "..", "test", "packages", "composable", "01_ci_input_pkg") +} + +// copyComposableIntegrationFixture copies test/packages/composable/02_ci_composable_integration for integration tests. +func copyComposableIntegrationFixture(t *testing.T) string { + t.Helper() + srcPath := filepath.Join("..", "..", "test", "packages", "composable", "02_ci_composable_integration") + destPath := t.TempDir() + err := os.CopyFS(destPath, os.DirFS(srcPath)) + require.NoError(t, err, "copying composable CI integration fixture") + return destPath +} + // Variable merge tests exercise mergeVariables (see variables.go): when an // integration declares requires.input and references that input package under // policy_templates[].inputs with optional vars, definitions from the input @@ -252,7 +267,7 @@ func TestMergeStreamLevelVarNodes(t *testing.T) { // the input package as the authoritative base (order and fields) for merging. func TestLoadInputPkgVarNodes(t *testing.T) { t.Run("fixture with three vars", func(t *testing.T) { - pkgPath := filepath.Join("..", "..", "test", "manual_packages", "required_inputs", "var_merging_input_pkg") + pkgPath := ciInputFixturePath() order, byName, err := loadInputPkgVarNodes(pkgPath) require.NoError(t, err) assert.Equal(t, []string{"paths", "encoding", "timeout"}, order) @@ -273,12 +288,11 @@ func TestLoadInputPkgVarNodes(t *testing.T) { // ---- integration tests ------------------------------------------------------- -// makeFakeEprForVarMerging supplies the var_merging_input_pkg fixture path as -// if it were downloaded from the registry, so integration tests do not need a -// running stack. +// makeFakeEprForVarMerging supplies the ci_input_pkg fixture path as if it were +// downloaded from the registry, so integration tests do not need a running stack. func makeFakeEprForVarMerging(t *testing.T) *fakeEprClient { t.Helper() - inputPkgPath := filepath.Join("..", "..", "test", "manual_packages", "required_inputs", "var_merging_input_pkg") + inputPkgPath := ciInputFixturePath() return &fakeEprClient{ downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { return inputPkgPath, nil @@ -292,13 +306,13 @@ func makeFakeEprForVarMerging(t *testing.T) *fakeEprClient { // merged with a DS override and a novel DS-only var is appended—matching the // end state Fleet expects for a mixed promotion + DS customization scenario. func TestMergeVariables_Full(t *testing.T) { - buildPackageRoot := copyFixturePackage(t, "with_merging_full") + buildPackageRoot := copyComposableIntegrationFixture(t) resolver := NewRequiredInputsResolver(makeFakeEprForVarMerging(t)) err := resolver.Bundle(buildPackageRoot) require.NoError(t, err) - // Check package manifest: input should have 2 vars (paths, encoding). + // Check package manifest: first input (package ref) should have 2 vars (paths, encoding). manifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "manifest.yml")) require.NoError(t, err) manifest, err := packages.ReadPackageManifestBytes(manifestBytes) @@ -318,7 +332,7 @@ func TestMergeVariables_Full(t *testing.T) { assert.Equal(t, "text", inputVars[1].Type) // Check DS manifest: streams[0] should have 2 vars (timeout, custom_tag). - dsManifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "var_merging_logs", "manifest.yml")) + dsManifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "manifest.yml")) require.NoError(t, err) dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) require.NoError(t, err) diff --git a/scripts/test-build-install-zip-file.sh b/scripts/test-build-install-zip-file.sh index 88ab084ea8..f5dda6004c 100755 --- a/scripts/test-build-install-zip-file.sh +++ b/scripts/test-build-install-zip-file.sh @@ -2,6 +2,25 @@ set -euxo pipefail +ELASTIC_PACKAGE_CONFIG_FILE="${HOME}/.elastic-package/config.yml" +PREV_REGISTRY_URL="" +PACKAGE_REGISTRY_CI_OVERRIDE=0 +COMPOSABLE_INTEGRATION_DIR="test/packages/composable/02_ci_composable_integration/" + +restore_package_registry_config() { + if [[ "${PACKAGE_REGISTRY_CI_OVERRIDE}" -ne 1 ]]; then + return 0 + fi + if [[ ! -f "${ELASTIC_PACKAGE_CONFIG_FILE}" ]]; then + return 0 + fi + if [[ -n "${PREV_REGISTRY_URL}" ]]; then + yq eval --inplace ".package_registry.base_url = \"${PREV_REGISTRY_URL}\"" "${ELASTIC_PACKAGE_CONFIG_FILE}" || true + else + yq eval --inplace 'del(.package_registry.base_url)' "${ELASTIC_PACKAGE_CONFIG_FILE}" || true + fi +} + cleanup() { local r=$? if [ "${r}" -ne 0 ]; then @@ -10,6 +29,8 @@ cleanup() { fi echo "~~~ elastic-package cleanup" + restore_package_registry_config + local output_path="build/elastic-stack-dump/install-zip" if [ ${USE_SHELLINIT} -eq 1 ]; then output_path="${output_path}-shellinit" @@ -107,7 +128,7 @@ elastic-package stack up -d -v ${ARG_VERSION} ELASTIC_PACKAGE_LINKS_FILE_PATH="$(pwd)/scripts/links_table.yml" export ELASTIC_PACKAGE_LINKS_FILE_PATH -# Build packages +# Build packages (see test-build-install-zip.sh for composable phase-2 notes). for d in test/packages/*/*/; do # Added set +x in a sub-shell to avoid printing the testype command in the output # This helps to keep the CI output cleaner @@ -116,16 +137,32 @@ for d in test/packages/*/*/; do if [ "${packageTestType}" == "false_positives" ]; then continue fi + if [[ "${d}" == "${COMPOSABLE_INTEGRATION_DIR}" ]]; then + echo "--- Skipping composable integration (phase-2 build): ${d}" + continue + fi echo "--- Building zip package: ${d}" elastic-package build -C "$d" done +eval "$(elastic-package stack shellinit)" + +if [[ -f "${ELASTIC_PACKAGE_CONFIG_FILE}" ]]; then + PREV_REGISTRY_URL=$(yq '.package_registry.base_url // ""' "${ELASTIC_PACKAGE_CONFIG_FILE}") + yq eval --inplace '.package_registry.base_url = "https://127.0.0.1:8080"' "${ELASTIC_PACKAGE_CONFIG_FILE}" +else + mkdir -p "$(dirname "${ELASTIC_PACKAGE_CONFIG_FILE}")" + yq -n '.package_registry.base_url = "https://127.0.0.1:8080"' > "${ELASTIC_PACKAGE_CONFIG_FILE}" +fi +PACKAGE_REGISTRY_CI_OVERRIDE=1 + +echo "--- Phase-2 build: composable integration" +elastic-package build -C "${COMPOSABLE_INTEGRATION_DIR}" + # Remove unzipped built packages, leave .zip files rm -r build/packages/*/ -if [ ${USE_SHELLINIT} -eq 1 ]; then - eval "$(elastic-package stack shellinit)" -else +if [ ${USE_SHELLINIT} -eq 0 ]; then export ELASTIC_PACKAGE_ELASTICSEARCH_USERNAME=elastic export ELASTIC_PACKAGE_ELASTICSEARCH_PASSWORD=changeme export ELASTIC_PACKAGE_KIBANA_HOST=https://127.0.0.1:5601 diff --git a/scripts/test-build-install-zip.sh b/scripts/test-build-install-zip.sh index d3bbd3d47d..0334f95639 100755 --- a/scripts/test-build-install-zip.sh +++ b/scripts/test-build-install-zip.sh @@ -2,6 +2,25 @@ set -euxo pipefail +ELASTIC_PACKAGE_CONFIG_FILE="${HOME}/.elastic-package/config.yml" +PREV_REGISTRY_URL="" +PACKAGE_REGISTRY_CI_OVERRIDE=0 +COMPOSABLE_INTEGRATION_DIR="test/packages/composable/02_ci_composable_integration/" + +restore_package_registry_config() { + if [[ "${PACKAGE_REGISTRY_CI_OVERRIDE}" -ne 1 ]]; then + return 0 + fi + if [[ ! -f "${ELASTIC_PACKAGE_CONFIG_FILE}" ]]; then + return 0 + fi + if [[ -n "${PREV_REGISTRY_URL}" ]]; then + yq eval --inplace ".package_registry.base_url = \"${PREV_REGISTRY_URL}\"" "${ELASTIC_PACKAGE_CONFIG_FILE}" || true + else + yq eval --inplace 'del(.package_registry.base_url)' "${ELASTIC_PACKAGE_CONFIG_FILE}" || true + fi +} + cleanup() { local r=$? if [ "${r}" -ne 0 ]; then @@ -10,6 +29,8 @@ cleanup() { fi echo "~~~ elastic-package cleanup" + restore_package_registry_config + # Dump stack logs # Required containers could not be running, so ignore the error elastic-package stack dump -v --output build/elastic-stack-dump/build-zip || true @@ -32,6 +53,7 @@ testype() { } OLDPWD=$PWD + # Build packages export ELASTIC_PACKAGE_SIGNER_PRIVATE_KEYFILE="$OLDPWD/scripts/gpg-private.asc" ELASTIC_PACKAGE_SIGNER_PASSPHRASE=$(cat "$OLDPWD/scripts/gpg-pass.txt") @@ -41,6 +63,8 @@ export ELASTIC_PACKAGE_LINKS_FILE_PATH go run ./scripts/gpgkey +# Composable integration: requires ci_input_pkg from the registry. It is built in a +# second phase after the stack is up and package_registry.base_url points at the local EPR. for d in test/packages/*/*/; do # Added set +x in a sub-shell to avoid printing the testype command in the output # This helps to keep the CI output cleaner @@ -49,6 +73,10 @@ for d in test/packages/*/*/; do if [ "${packageTestType}" == "false_positives" ]; then continue fi + if [[ "${d}" == "${COMPOSABLE_INTEGRATION_DIR}" ]]; then + echo "--- Skipping composable integration (phase-2 build after stack is up): ${d}" + continue + fi echo "--- Building package: ${d}" elastic-package build -C "$d" --zip --sign -v done @@ -62,6 +90,20 @@ elastic-package stack up -d -v eval "$(elastic-package stack shellinit)" +# Point elastic-package build at the stack's local package registry so phase-2 can +# download required input packages (see docs/howto/local_package_registry.md). +if [[ -f "${ELASTIC_PACKAGE_CONFIG_FILE}" ]]; then + PREV_REGISTRY_URL=$(yq '.package_registry.base_url // ""' "${ELASTIC_PACKAGE_CONFIG_FILE}") + yq eval --inplace '.package_registry.base_url = "https://127.0.0.1:8080"' "${ELASTIC_PACKAGE_CONFIG_FILE}" +else + mkdir -p "$(dirname "${ELASTIC_PACKAGE_CONFIG_FILE}")" + yq -n '.package_registry.base_url = "https://127.0.0.1:8080"' > "${ELASTIC_PACKAGE_CONFIG_FILE}" +fi +PACKAGE_REGISTRY_CI_OVERRIDE=1 + +echo "--- Phase-2 build: composable integration (requires local registry)" +elastic-package build -C "${COMPOSABLE_INTEGRATION_DIR}" --zip --sign -v + # Install packages from working copy for d in test/packages/*/*/; do # Added set +x in a sub-shell to avoid printing the testype command in the output diff --git a/test/manual_packages/README.md b/test/manual_packages/README.md index 94c7c3ed35..5176561a46 100644 --- a/test/manual_packages/README.md +++ b/test/manual_packages/README.md @@ -1,76 +1,37 @@ -# Manual Test Packages +# Manual test packages -Packages under `test/manual_packages/` are **not** picked up by CI build/install scripts (which glob `test/packages/*/*/`). They require manual setup to exercise. +Packages under `test/manual_packages/` are **not** picked up by CI’s main package glob beyond what each script includes. They are for **manual** workflows and **targeted** `go test` cases. -All **`requires.input`** fixtures live under [`test/manual_packages/required_inputs/`](required_inputs/). The same trees are used as fixtures by `go test` in [`internal/requiredinputs/variables_test.go`](../../internal/requiredinputs/variables_test.go) (variable merge) and [`internal/requiredinputs/fields_test.go`](../../internal/requiredinputs/fields_test.go) (field bundling). +## CI composable coverage -## required_inputs +End-to-end composable integration coverage (`requires.input`, local registry, build + install) lives under: -### Template bundling (smoke) +- [`test/packages/composable/01_ci_input_pkg/`](../packages/composable/01_ci_input_pkg/) — `type: input` dependency +- [`test/packages/composable/02_ci_composable_integration/`](../packages/composable/02_ci_composable_integration/) — `type: integration` built in **phase 2** by [`scripts/test-build-install-zip.sh`](../../scripts/test-build-install-zip.sh) after `stack up`, with `package_registry.base_url` set to `https://127.0.0.1:8080` -- `required_inputs/test_input_pkg` — the input package that must be installed first. -- `required_inputs/with_input_package_requires` — an integration package that declares a dependency on `test_input_pkg`. -- `required_inputs/with_linked_template_path` — same as above, but the integration-owned policy input template is supplied via `agent/input/owned.hbs.link` (see `docs/howto/dependency_management.md` for `template_path` vs `.link` naming). +`internal/requiredinputs` integration tests copy those same directories (see `ciInputFixturePath`, `copyComposableIntegrationFixture` in [`variables_test.go`](../../internal/requiredinputs/variables_test.go)). -### Variable merge (composable input vars) +## `required_inputs` (manual / edge) -When an integration lists `requires.input` and its policy template references that input package with optional `vars`, elastic-package **merges** variable definitions from the input package into the built manifests (see [`internal/requiredinputs/variables.go`](../../internal/requiredinputs/variables.go) — `mergeVariables`). +Remaining trees under [`required_inputs/`](required_inputs/) exercise **narrow** variable-merge and template cases and are **not** required for the composable CI zip job: | Package | Role | | --- | --- | -| `required_inputs/var_merging_input_pkg` | Required input package (`paths`, `encoding`, `timeout`). | -| `required_inputs/with_merging_full` | Promoted `paths` + `encoding`; DS merge for `timeout` + novel `custom_tag`. | | `required_inputs/with_merging_promotes_to_input` | Only `paths` promoted; DS keeps `encoding`, `timeout`. | -| `required_inputs/with_merging_ds_merges` | No promotion; DS merges `encoding` title + adds `custom_tag`. | -| `required_inputs/with_merging_no_override` | No composable overrides; all base vars on DS, unchanged. | -| `required_inputs/with_merging_two_policy_templates` | Two PTs on the same input pkg: one promotes `paths` for its DS only; the other leaves all vars on the DS (`TestMergeVariables_TwoPolicyTemplatesScopedPromotion`). | -| `required_inputs/with_merging_duplicate_error` | Invalid: duplicate `paths` at DS level; **build should fail** with an error mentioning `paths`. | +| `required_inputs/with_merging_ds_merges` | No PT var overrides; DS merges `encoding` title + `custom_tag`. | +| `required_inputs/with_merging_no_override` | No composable overrides; all base vars on DS. | +| `required_inputs/with_merging_two_policy_templates` | Two PTs, scoped promotion on one. | +| `required_inputs/with_merging_duplicate_error` | Invalid duplicate `paths` on DS — **build must fail** (not in CI zip loop). | +| `required_inputs/with_linked_template_path` | Composable + policy `template_path` via `.link` (see [`dependency_management.md`](../../docs/howto/dependency_management.md)). | -### Field bundling +All of these depend on **`ci_input_pkg`** from [`test/packages/composable/01_ci_input_pkg/`](../packages/composable/01_ci_input_pkg/) (see each package’s `_dev/test/config.yml` `requires` stub). -| Package | Role | -| --- | --- | -| `required_inputs/fields_input_pkg` | Required input package supplying field definitions. | -| `required_inputs/with_field_bundling` | Integration that requires `fields_input_pkg`; exercises merging field defs into the built data stream. | - -Build `fields_input_pkg` before `with_field_bundling`. See `TestBundleDataStreamFields_*` in [`fields_test.go`](../../internal/requiredinputs/fields_test.go). - -### Stream and input `package:` resolution - -After templates, variables, and fields are applied, the build replaces `package: ` on policy template inputs and data stream streams with the real input **type** from that required input package (implementation in [`internal/requiredinputs/streamdefs.go`](../../internal/requiredinputs/streamdefs.go)). - -### Manual testing workflow - -1. Start the stack and local package registry: - ```bash - elastic-package stack up -d - ``` -2. Configure `package_registry.base_url` in `~/.elastic-package/config.yml` so builds can resolve required input packages (see [local package registry how-to](../../docs/howto/local_package_registry.md) and the root [README](../../README.md) `package_registry` section). -3. Build and install in **dependency order** (input packages before integrations that require them). Examples: - - Template bundling smoke: - ```bash - elastic-package build -C test/manual_packages/required_inputs/test_input_pkg --zip - elastic-package build -C test/manual_packages/required_inputs/with_input_package_requires --zip - elastic-package build -C test/manual_packages/required_inputs/with_linked_template_path --zip - ``` - - Variable merge (build `var_merging_input_pkg` first, install it, then build the integration you need): - ```bash - elastic-package build -C test/manual_packages/required_inputs/var_merging_input_pkg --zip - elastic-package build -C test/manual_packages/required_inputs/with_merging_full --zip - ``` - - Field bundling (build `fields_input_pkg` first, then the integration): - ```bash - elastic-package build -C test/manual_packages/required_inputs/fields_input_pkg --zip - elastic-package build -C test/manual_packages/required_inputs/with_field_bundling --zip - ``` - -4. Install via the local registry in the same order (e.g. `test_input_pkg` before `with_input_package_requires`; `var_merging_input_pkg` before any `with_merging_*` integration; `fields_input_pkg` before `with_field_bundling`). +### Manual workflow -For **expected merged manifests** after a successful variable-merge build, see `TestMergeVariables_*` in [`variables_test.go`](../../internal/requiredinputs/variables_test.go). For `with_merging_duplicate_error`, expect `elastic-package build` to fail and the error to contain `paths`. +1. `elastic-package stack up -d` +2. Set `package_registry.base_url` in `~/.elastic-package/config.yml` to `https://127.0.0.1:8080` (see [local package registry how-to](../../docs/howto/local_package_registry.md)). +3. Build and install `01_ci_input_pkg` before any integration that lists `requires.input` for it, then build the integration. -### When composable inputs are fully supported in CI +### Expected errors -Move `required_inputs/` under `test/packages/required_inputs/` so [`scripts/test-build-install-zip.sh`](../../scripts/test-build-install-zip.sh) can build and install them automatically (install order is lexicographic, so `var_merging_input_pkg` is installed before `with_merging_*`). Update [`internal/requiredinputs/variables_test.go`](../../internal/requiredinputs/variables_test.go) fixture paths to match. +For `with_merging_duplicate_error`, `elastic-package build` should fail with an error mentioning `paths`. diff --git a/test/manual_packages/required_inputs/fields_input_pkg/docs/README.md b/test/manual_packages/required_inputs/fields_input_pkg/docs/README.md deleted file mode 100644 index 1c1576a01b..0000000000 --- a/test/manual_packages/required_inputs/fields_input_pkg/docs/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Fields Input Package - -Input package used as a test fixture for field bundling tests. diff --git a/test/manual_packages/required_inputs/fields_input_pkg/manifest.yml b/test/manual_packages/required_inputs/fields_input_pkg/manifest.yml deleted file mode 100644 index bf4502f107..0000000000 --- a/test/manual_packages/required_inputs/fields_input_pkg/manifest.yml +++ /dev/null @@ -1,32 +0,0 @@ -format_version: 3.6.0 -name: fields_input_pkg -title: Fields Input Package -description: Input package used as a test fixture for field bundling. -version: 0.1.0 -type: input -categories: - - custom -conditions: - kibana: - version: "^8.0.0" - elastic: - subscription: basic -policy_templates: - - name: field_bundling - type: logs - title: Field Bundling - description: Collect logs with field bundling. - input: logfile - template_path: input.yml.hbs - vars: - - name: paths - type: text - title: Paths - multi: true - required: true - show_user: true - default: - - /var/log/*.log -owner: - github: elastic/integrations - type: elastic diff --git a/test/manual_packages/required_inputs/test_input_pkg/agent/input/input.yml.hbs b/test/manual_packages/required_inputs/test_input_pkg/agent/input/input.yml.hbs deleted file mode 100644 index 9e9c27a8c0..0000000000 --- a/test/manual_packages/required_inputs/test_input_pkg/agent/input/input.yml.hbs +++ /dev/null @@ -1,4 +0,0 @@ -paths: -{{#each paths}} - - {{this}} -{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/test_input_pkg/changelog.yml b/test/manual_packages/required_inputs/test_input_pkg/changelog.yml deleted file mode 100644 index 0f9966a2de..0000000000 --- a/test/manual_packages/required_inputs/test_input_pkg/changelog.yml +++ /dev/null @@ -1,5 +0,0 @@ -- version: 0.1.0 - changes: - - description: Initial release. - type: enhancement - link: https://github.com/elastic/elastic-package/issues/3278 \ No newline at end of file diff --git a/test/manual_packages/required_inputs/test_input_pkg/docs/README.md b/test/manual_packages/required_inputs/test_input_pkg/docs/README.md deleted file mode 100644 index 5fa7854175..0000000000 --- a/test/manual_packages/required_inputs/test_input_pkg/docs/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Test Input Package - -This is a test fixture package used to verify template bundling during build. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/test_input_pkg/fields/base-fields.yml b/test/manual_packages/required_inputs/test_input_pkg/fields/base-fields.yml deleted file mode 100644 index d3b0f5a163..0000000000 --- a/test/manual_packages/required_inputs/test_input_pkg/fields/base-fields.yml +++ /dev/null @@ -1,12 +0,0 @@ -- name: data_stream.type - type: constant_keyword - description: Data stream type. -- name: data_stream.dataset - type: constant_keyword - description: Data stream dataset. -- name: data_stream.namespace - type: constant_keyword - description: Data stream namespace. -- name: "@timestamp" - type: date - description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/test_input_pkg/manifest.yml b/test/manual_packages/required_inputs/test_input_pkg/manifest.yml deleted file mode 100644 index 6a83dcc4bb..0000000000 --- a/test/manual_packages/required_inputs/test_input_pkg/manifest.yml +++ /dev/null @@ -1,34 +0,0 @@ -format_version: 3.6.0 -name: test_input_pkg -title: Test Input Package -description: Input package used as a test fixture for template bundling. -version: 0.1.0 -type: input -categories: - - custom -conditions: - kibana: - version: "^8.0.0" - elastic: - subscription: basic -policy_templates: - - name: test_input - type: logs - title: Test Input - description: Collect test logs with a custom input template. - input: logfile - template_paths: - - input.yml.hbs - - extra.yml.hbs - vars: - - name: paths - type: text - title: Paths - multi: true - required: true - show_user: true - default: - - /var/log/*.log -owner: - github: elastic/integrations - type: elastic \ No newline at end of file diff --git a/test/manual_packages/required_inputs/var_merging_input_pkg/agent/input/input.yml.hbs b/test/manual_packages/required_inputs/var_merging_input_pkg/agent/input/input.yml.hbs deleted file mode 100644 index 9e9c27a8c0..0000000000 --- a/test/manual_packages/required_inputs/var_merging_input_pkg/agent/input/input.yml.hbs +++ /dev/null @@ -1,4 +0,0 @@ -paths: -{{#each paths}} - - {{this}} -{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/var_merging_input_pkg/changelog.yml b/test/manual_packages/required_inputs/var_merging_input_pkg/changelog.yml deleted file mode 100644 index fb3f5f7235..0000000000 --- a/test/manual_packages/required_inputs/var_merging_input_pkg/changelog.yml +++ /dev/null @@ -1,5 +0,0 @@ -- version: 0.1.0 - changes: - - description: Initial release. - type: enhancement - link: https://github.com/elastic/elastic-package/issues/1 \ No newline at end of file diff --git a/test/manual_packages/required_inputs/var_merging_input_pkg/docs/README.md b/test/manual_packages/required_inputs/var_merging_input_pkg/docs/README.md deleted file mode 100644 index 894e4fe149..0000000000 --- a/test/manual_packages/required_inputs/var_merging_input_pkg/docs/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Var Merging Input Package - -Input package used as a test fixture for variable merging tests. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/var_merging_input_pkg/fields/base-fields.yml b/test/manual_packages/required_inputs/var_merging_input_pkg/fields/base-fields.yml deleted file mode 100644 index d3b0f5a163..0000000000 --- a/test/manual_packages/required_inputs/var_merging_input_pkg/fields/base-fields.yml +++ /dev/null @@ -1,12 +0,0 @@ -- name: data_stream.type - type: constant_keyword - description: Data stream type. -- name: data_stream.dataset - type: constant_keyword - description: Data stream dataset. -- name: data_stream.namespace - type: constant_keyword - description: Data stream namespace. -- name: "@timestamp" - type: date - description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_field_bundling/data_stream/field_logs/manifest.yml b/test/manual_packages/required_inputs/with_field_bundling/data_stream/field_logs/manifest.yml deleted file mode 100644 index 826c7c676f..0000000000 --- a/test/manual_packages/required_inputs/with_field_bundling/data_stream/field_logs/manifest.yml +++ /dev/null @@ -1,6 +0,0 @@ -title: Field Logs -type: logs -streams: - - package: fields_input_pkg - title: Field Logs from Input Package - description: Collect field logs using the referenced input package. diff --git a/test/manual_packages/required_inputs/with_field_bundling/docs/README.md b/test/manual_packages/required_inputs/with_field_bundling/docs/README.md deleted file mode 100644 index 87332b2fa6..0000000000 --- a/test/manual_packages/required_inputs/with_field_bundling/docs/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Integration With Field Bundling - -Integration package that requires an input package, used to test field bundling. diff --git a/test/manual_packages/required_inputs/with_field_bundling/manifest.yml b/test/manual_packages/required_inputs/with_field_bundling/manifest.yml deleted file mode 100644 index 487ea2e57a..0000000000 --- a/test/manual_packages/required_inputs/with_field_bundling/manifest.yml +++ /dev/null @@ -1,33 +0,0 @@ -format_version: 3.6.0 -name: with_field_bundling -title: Integration With Field Bundling -description: >- - Integration package that requires an input package, used to test field bundling. - The input package defines additional fields (message, log.level) that are not - present in the integration's data stream and should be bundled in. -version: 0.1.0 -type: integration -categories: - - custom -conditions: - kibana: - version: "^8.0.0" - elastic: - subscription: basic -requires: - input: - - package: fields_input_pkg - version: "0.1.0" -policy_templates: - - name: field_logs - title: Field Logs - description: Collect logs via field bundling input package - data_streams: - - field_logs - inputs: - - package: fields_input_pkg - title: Collect logs via field bundling input package - description: Use the field bundling input package to collect logs -owner: - github: elastic/integrations - type: elastic diff --git a/test/manual_packages/required_inputs/with_input_package_requires/_dev/test/config.yml b/test/manual_packages/required_inputs/with_input_package_requires/_dev/test/config.yml deleted file mode 100644 index c4e73f3a8d..0000000000 --- a/test/manual_packages/required_inputs/with_input_package_requires/_dev/test/config.yml +++ /dev/null @@ -1,3 +0,0 @@ -requires: - - package: test_input_pkg - source: "../../test_input_pkg" diff --git a/test/manual_packages/required_inputs/with_input_package_requires/changelog.yml b/test/manual_packages/required_inputs/with_input_package_requires/changelog.yml deleted file mode 100644 index 0f9966a2de..0000000000 --- a/test/manual_packages/required_inputs/with_input_package_requires/changelog.yml +++ /dev/null @@ -1,5 +0,0 @@ -- version: 0.1.0 - changes: - - description: Initial release. - type: enhancement - link: https://github.com/elastic/elastic-package/issues/3278 \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/agent/stream/stream.yml.hbs b/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/agent/stream/stream.yml.hbs deleted file mode 100644 index 9e9c27a8c0..0000000000 --- a/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/agent/stream/stream.yml.hbs +++ /dev/null @@ -1,4 +0,0 @@ -paths: -{{#each paths}} - - {{this}} -{{/each}} \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/fields/base-fields.yml deleted file mode 100644 index d3b0f5a163..0000000000 --- a/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/fields/base-fields.yml +++ /dev/null @@ -1,12 +0,0 @@ -- name: data_stream.type - type: constant_keyword - description: Data stream type. -- name: data_stream.dataset - type: constant_keyword - description: Data stream dataset. -- name: data_stream.namespace - type: constant_keyword - description: Data stream namespace. -- name: "@timestamp" - type: date - description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/manifest.yml b/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/manifest.yml deleted file mode 100644 index 4e198906eb..0000000000 --- a/test/manual_packages/required_inputs/with_input_package_requires/data_stream/test_logs/manifest.yml +++ /dev/null @@ -1,19 +0,0 @@ -title: Test Logs -type: logs -streams: - - package: test_input_pkg - title: Test Logs from Input Package - description: Collect test logs using the referenced input package. - - input: logs - title: Test Logs - description: Collect test logs using the logs input. - template_path: stream.yml.hbs - vars: - - name: paths - type: text - title: Paths - multi: true - required: true - show_user: true - default: - - /var/log/test/*.log \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_input_package_requires/docs/README.md b/test/manual_packages/required_inputs/with_input_package_requires/docs/README.md deleted file mode 100644 index 22de7c28e4..0000000000 --- a/test/manual_packages/required_inputs/with_input_package_requires/docs/README.md +++ /dev/null @@ -1,4 +0,0 @@ -# Integration With Required Input Package - -This is a test fixture integration package that demonstrates template bundling -from a required input package. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_input_package_requires/manifest.yml b/test/manual_packages/required_inputs/with_input_package_requires/manifest.yml deleted file mode 100644 index 7a6ad33229..0000000000 --- a/test/manual_packages/required_inputs/with_input_package_requires/manifest.yml +++ /dev/null @@ -1,34 +0,0 @@ -format_version: 3.6.0 -name: with_input_package_requires -title: Integration With Required Input Package -description: >- - Integration package that requires an input package, used to test template bundling. -version: 0.1.0 -type: integration -categories: - - custom -conditions: - kibana: - version: "^8.0.0" - elastic: - subscription: basic -requires: - input: - - package: test_input_pkg - version: "0.1.0" -policy_templates: - - name: test_logs - title: Test logs - description: Collect test logs - data_streams: - - test_logs - inputs: - - package: test_input_pkg - title: Collect test logs via input package - description: Use the test input package to collect logs - - type: logs - title: Collect test logs via logs input - description: Use the logs input to collect logs -owner: - github: elastic/integrations - type: elastic \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_linked_template_path/_dev/test/config.yml b/test/manual_packages/required_inputs/with_linked_template_path/_dev/test/config.yml index c4e73f3a8d..109cb53c2e 100644 --- a/test/manual_packages/required_inputs/with_linked_template_path/_dev/test/config.yml +++ b/test/manual_packages/required_inputs/with_linked_template_path/_dev/test/config.yml @@ -1,3 +1,3 @@ requires: - - package: test_input_pkg - source: "../../test_input_pkg" + - package: ci_input_pkg + source: "../../../../packages/composable/01_ci_input_pkg" diff --git a/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/manifest.yml b/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/manifest.yml index 4e198906eb..4a7f03cf46 100644 --- a/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/manifest.yml +++ b/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/manifest.yml @@ -1,7 +1,7 @@ title: Test Logs type: logs streams: - - package: test_input_pkg + - package: ci_input_pkg title: Test Logs from Input Package description: Collect test logs using the referenced input package. - input: logs diff --git a/test/manual_packages/required_inputs/with_linked_template_path/manifest.yml b/test/manual_packages/required_inputs/with_linked_template_path/manifest.yml index c6203d6c7b..8dc5c5551e 100644 --- a/test/manual_packages/required_inputs/with_linked_template_path/manifest.yml +++ b/test/manual_packages/required_inputs/with_linked_template_path/manifest.yml @@ -16,7 +16,7 @@ conditions: subscription: basic requires: input: - - package: test_input_pkg + - package: ci_input_pkg version: "0.1.0" policy_templates: - name: test_logs @@ -25,7 +25,7 @@ policy_templates: data_streams: - test_logs inputs: - - package: test_input_pkg + - package: ci_input_pkg title: Collect test logs via input package description: Use the test input package to collect logs template_path: owned.hbs diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_ds_merges/_dev/test/config.yml index bbb3460521..7a2d655a23 100644 --- a/test/manual_packages/required_inputs/with_merging_ds_merges/_dev/test/config.yml +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/_dev/test/config.yml @@ -1,3 +1,3 @@ requires: - - package: var_merging_input_pkg - source: "../../var_merging_input_pkg" \ No newline at end of file + - package: ci_input_pkg + source: "../../../../packages/composable/01_ci_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/manifest.yml index 589b1e1604..0029f07bb4 100644 --- a/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/manifest.yml +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/data_stream/var_merging_logs/manifest.yml @@ -1,7 +1,7 @@ title: Var Merging Logs type: logs streams: - - package: var_merging_input_pkg + - package: ci_input_pkg title: Var Merging Logs description: Collect logs using the var merging input package. vars: diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/manifest.yml b/test/manual_packages/required_inputs/with_merging_ds_merges/manifest.yml index 9ad6c0e1a8..ac227bf117 100644 --- a/test/manual_packages/required_inputs/with_merging_ds_merges/manifest.yml +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/manifest.yml @@ -16,7 +16,7 @@ conditions: subscription: basic requires: input: - - package: var_merging_input_pkg + - package: ci_input_pkg version: "0.1.0" policy_templates: - name: var_merging_logs @@ -25,7 +25,7 @@ policy_templates: data_streams: - var_merging_logs inputs: - - package: var_merging_input_pkg + - package: ci_input_pkg title: Collect logs via var merging input package description: Use the var merging input package to collect logs owner: diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_duplicate_error/_dev/test/config.yml index bbb3460521..7a2d655a23 100644 --- a/test/manual_packages/required_inputs/with_merging_duplicate_error/_dev/test/config.yml +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/_dev/test/config.yml @@ -1,3 +1,3 @@ requires: - - package: var_merging_input_pkg - source: "../../var_merging_input_pkg" \ No newline at end of file + - package: ci_input_pkg + source: "../../../../packages/composable/01_ci_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/manifest.yml index f7b06783dd..795111aabe 100644 --- a/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/manifest.yml +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/data_stream/var_merging_logs/manifest.yml @@ -1,7 +1,7 @@ title: Var Merging Logs type: logs streams: - - package: var_merging_input_pkg + - package: ci_input_pkg title: Var Merging Logs description: Collect logs using the var merging input package. vars: diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/manifest.yml b/test/manual_packages/required_inputs/with_merging_duplicate_error/manifest.yml index 64221efbaa..21706052e5 100644 --- a/test/manual_packages/required_inputs/with_merging_duplicate_error/manifest.yml +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/manifest.yml @@ -16,7 +16,7 @@ conditions: subscription: basic requires: input: - - package: var_merging_input_pkg + - package: ci_input_pkg version: "0.1.0" policy_templates: - name: var_merging_logs @@ -25,7 +25,7 @@ policy_templates: data_streams: - var_merging_logs inputs: - - package: var_merging_input_pkg + - package: ci_input_pkg title: Collect logs via var merging input package description: Use the var merging input package to collect logs owner: diff --git a/test/manual_packages/required_inputs/with_merging_full/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_full/_dev/test/config.yml deleted file mode 100644 index bbb3460521..0000000000 --- a/test/manual_packages/required_inputs/with_merging_full/_dev/test/config.yml +++ /dev/null @@ -1,3 +0,0 @@ -requires: - - package: var_merging_input_pkg - source: "../../var_merging_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_full/changelog.yml b/test/manual_packages/required_inputs/with_merging_full/changelog.yml deleted file mode 100644 index fb3f5f7235..0000000000 --- a/test/manual_packages/required_inputs/with_merging_full/changelog.yml +++ /dev/null @@ -1,5 +0,0 @@ -- version: 0.1.0 - changes: - - description: Initial release. - type: enhancement - link: https://github.com/elastic/elastic-package/issues/1 \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/fields/base-fields.yml b/test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/fields/base-fields.yml deleted file mode 100644 index d3b0f5a163..0000000000 --- a/test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/fields/base-fields.yml +++ /dev/null @@ -1,12 +0,0 @@ -- name: data_stream.type - type: constant_keyword - description: Data stream type. -- name: data_stream.dataset - type: constant_keyword - description: Data stream dataset. -- name: data_stream.namespace - type: constant_keyword - description: Data stream namespace. -- name: "@timestamp" - type: date - description: Event timestamp. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/manifest.yml deleted file mode 100644 index d3a6f017b0..0000000000 --- a/test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/manifest.yml +++ /dev/null @@ -1,13 +0,0 @@ -title: Var Merging Logs -type: logs -streams: - - package: var_merging_input_pkg - title: Var Merging Logs - description: Collect logs using the var merging input package. - vars: - - name: timeout - description: Timeout for log collection. - - name: custom_tag - type: text - title: Custom Tag - show_user: true \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_full/docs/README.md b/test/manual_packages/required_inputs/with_merging_full/docs/README.md deleted file mode 100644 index 2900be833f..0000000000 --- a/test/manual_packages/required_inputs/with_merging_full/docs/README.md +++ /dev/null @@ -1,20 +0,0 @@ -# Variable Merging - Full Mix - -Test fixture: composable package that exercises all five variable merging steps -from SPEC.md simultaneously. - -Policy template input vars (Step 2 → Step 3 promotion): -- "paths" override with new default → promoted to input variable -- "encoding" override with show_user:true → promoted to input variable - -Data stream manifest vars (Step 4 merge): -- "timeout" override with new description → merged with remaining DS variable -- "custom_tag" new variable → added to DS variables - -Expected result after merging: -- Input variables: - - paths (merged: base from input pkg, default overridden to /var/log/custom/*.log) - - encoding (merged: base from input pkg, show_user overridden to true) -- Data stream variables: - - timeout (merged: base from input pkg, description overridden) - - custom_tag (new, from data stream manifest) \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_full/manifest.yml b/test/manual_packages/required_inputs/with_merging_full/manifest.yml deleted file mode 100644 index 6b617324f5..0000000000 --- a/test/manual_packages/required_inputs/with_merging_full/manifest.yml +++ /dev/null @@ -1,40 +0,0 @@ -format_version: 3.6.0 -name: with_merging_full -title: Variable Merging - Full Mix -description: >- - Composable package exercising all variable merging steps. The policy template - overrides "paths" and "encoding" (both promoted to input variables). The data - stream manifest overrides "timeout" (merged with the remaining data stream - variable) and adds "custom_tag" (new data stream variable). -version: 0.1.0 -type: integration -categories: - - custom -conditions: - kibana: - version: "^8.0.0" - elastic: - subscription: basic -requires: - input: - - package: var_merging_input_pkg - version: "0.1.0" -policy_templates: - - name: var_merging_logs - title: Var Merging Logs - description: Collect logs via var merging input package - data_streams: - - var_merging_logs - inputs: - - package: var_merging_input_pkg - title: Collect logs via var merging input package - description: Use the var merging input package to collect logs - vars: - - name: paths - default: - - /var/log/custom/*.log - - name: encoding - show_user: true -owner: - github: elastic/integrations - type: elastic \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_no_override/_dev/test/config.yml index bbb3460521..7a2d655a23 100644 --- a/test/manual_packages/required_inputs/with_merging_no_override/_dev/test/config.yml +++ b/test/manual_packages/required_inputs/with_merging_no_override/_dev/test/config.yml @@ -1,3 +1,3 @@ requires: - - package: var_merging_input_pkg - source: "../../var_merging_input_pkg" \ No newline at end of file + - package: ci_input_pkg + source: "../../../../packages/composable/01_ci_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/manifest.yml index 2026cd129c..45bb39425b 100644 --- a/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/manifest.yml +++ b/test/manual_packages/required_inputs/with_merging_no_override/data_stream/var_merging_logs/manifest.yml @@ -1,6 +1,6 @@ title: Var Merging Logs type: logs streams: - - package: var_merging_input_pkg + - package: ci_input_pkg title: Var Merging Logs description: Collect logs using the var merging input package. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/manifest.yml b/test/manual_packages/required_inputs/with_merging_no_override/manifest.yml index ebc0de82ac..9d87b0f12d 100644 --- a/test/manual_packages/required_inputs/with_merging_no_override/manifest.yml +++ b/test/manual_packages/required_inputs/with_merging_no_override/manifest.yml @@ -15,7 +15,7 @@ conditions: subscription: basic requires: input: - - package: var_merging_input_pkg + - package: ci_input_pkg version: "0.1.0" policy_templates: - name: var_merging_logs @@ -24,7 +24,7 @@ policy_templates: data_streams: - var_merging_logs inputs: - - package: var_merging_input_pkg + - package: ci_input_pkg title: Collect logs via var merging input package description: Use the var merging input package to collect logs owner: diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_promotes_to_input/_dev/test/config.yml index bbb3460521..7a2d655a23 100644 --- a/test/manual_packages/required_inputs/with_merging_promotes_to_input/_dev/test/config.yml +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/_dev/test/config.yml @@ -1,3 +1,3 @@ requires: - - package: var_merging_input_pkg - source: "../../var_merging_input_pkg" \ No newline at end of file + - package: ci_input_pkg + source: "../../../../packages/composable/01_ci_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/manifest.yml index 2026cd129c..45bb39425b 100644 --- a/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/manifest.yml +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/data_stream/var_merging_logs/manifest.yml @@ -1,6 +1,6 @@ title: Var Merging Logs type: logs streams: - - package: var_merging_input_pkg + - package: ci_input_pkg title: Var Merging Logs description: Collect logs using the var merging input package. \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/manifest.yml b/test/manual_packages/required_inputs/with_merging_promotes_to_input/manifest.yml index c02a62f75d..31eae327c0 100644 --- a/test/manual_packages/required_inputs/with_merging_promotes_to_input/manifest.yml +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/manifest.yml @@ -17,7 +17,7 @@ conditions: subscription: basic requires: input: - - package: var_merging_input_pkg + - package: ci_input_pkg version: "0.1.0" policy_templates: - name: var_merging_logs @@ -26,7 +26,7 @@ policy_templates: data_streams: - var_merging_logs inputs: - - package: var_merging_input_pkg + - package: ci_input_pkg title: Collect logs via var merging input package description: Use the var merging input package to collect logs vars: diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/_dev/test/config.yml index e958a08627..109cb53c2e 100644 --- a/test/manual_packages/required_inputs/with_merging_two_policy_templates/_dev/test/config.yml +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/_dev/test/config.yml @@ -1,3 +1,3 @@ requires: - - package: var_merging_input_pkg - source: "../../var_merging_input_pkg" + - package: ci_input_pkg + source: "../../../../packages/composable/01_ci_input_pkg" diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/manifest.yml index d1f0fb147d..468cbe683f 100644 --- a/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/manifest.yml +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/alpha_logs/manifest.yml @@ -1,6 +1,6 @@ title: Alpha logs type: logs streams: - - package: var_merging_input_pkg + - package: ci_input_pkg title: Alpha logs via input package description: Collect alpha logs using the var merging input package. diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/manifest.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/manifest.yml index 85d68a6e89..b76005594d 100644 --- a/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/manifest.yml +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/data_stream/beta_logs/manifest.yml @@ -1,6 +1,6 @@ title: Beta logs type: logs streams: - - package: var_merging_input_pkg + - package: ci_input_pkg title: Beta logs via input package description: Collect beta logs using the var merging input package. diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/manifest.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/manifest.yml index 4b8b7a78e2..91031fdca6 100644 --- a/test/manual_packages/required_inputs/with_merging_two_policy_templates/manifest.yml +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/manifest.yml @@ -17,7 +17,7 @@ conditions: subscription: basic requires: input: - - package: var_merging_input_pkg + - package: ci_input_pkg version: "0.1.0" policy_templates: - name: pt_alpha @@ -26,7 +26,7 @@ policy_templates: data_streams: - alpha_logs inputs: - - package: var_merging_input_pkg + - package: ci_input_pkg title: Collect via var merging input (alpha) description: Alpha stream promotes paths to input-level vars vars: @@ -41,7 +41,7 @@ policy_templates: data_streams: - beta_logs inputs: - - package: var_merging_input_pkg + - package: ci_input_pkg title: Collect via var merging input (beta) description: Beta stream keeps all vars at data-stream level - type: logs diff --git a/test/packages/composable/01_ci_input_pkg/_dev/test/config.yml b/test/packages/composable/01_ci_input_pkg/_dev/test/config.yml new file mode 100644 index 0000000000..a57750f85e --- /dev/null +++ b/test/packages/composable/01_ci_input_pkg/_dev/test/config.yml @@ -0,0 +1,2 @@ +system: + parallel: true diff --git a/test/manual_packages/required_inputs/test_input_pkg/agent/input/extra.yml.hbs b/test/packages/composable/01_ci_input_pkg/agent/input/extra.yml.hbs similarity index 57% rename from test/manual_packages/required_inputs/test_input_pkg/agent/input/extra.yml.hbs rename to test/packages/composable/01_ci_input_pkg/agent/input/extra.yml.hbs index c51c9f1721..1e1745752b 100644 --- a/test/manual_packages/required_inputs/test_input_pkg/agent/input/extra.yml.hbs +++ b/test/packages/composable/01_ci_input_pkg/agent/input/extra.yml.hbs @@ -1,2 +1,2 @@ exclude_files: - - ".gz$" \ No newline at end of file + - ".gz$" diff --git a/test/manual_packages/required_inputs/fields_input_pkg/agent/input/input.yml.hbs b/test/packages/composable/01_ci_input_pkg/agent/input/input.yml.hbs similarity index 78% rename from test/manual_packages/required_inputs/fields_input_pkg/agent/input/input.yml.hbs rename to test/packages/composable/01_ci_input_pkg/agent/input/input.yml.hbs index 9e9c27a8c0..9390bc05cb 100644 --- a/test/manual_packages/required_inputs/fields_input_pkg/agent/input/input.yml.hbs +++ b/test/packages/composable/01_ci_input_pkg/agent/input/input.yml.hbs @@ -1,4 +1,4 @@ paths: {{#each paths}} - {{this}} -{{/each}} \ No newline at end of file +{{/each}} diff --git a/test/manual_packages/required_inputs/fields_input_pkg/changelog.yml b/test/packages/composable/01_ci_input_pkg/changelog.yml similarity index 64% rename from test/manual_packages/required_inputs/fields_input_pkg/changelog.yml rename to test/packages/composable/01_ci_input_pkg/changelog.yml index 813cf1cf77..551e05b619 100644 --- a/test/manual_packages/required_inputs/fields_input_pkg/changelog.yml +++ b/test/packages/composable/01_ci_input_pkg/changelog.yml @@ -1,5 +1,5 @@ - version: "0.1.0" changes: - - description: Initial release. + - description: Initial release for composable CI fixtures. type: enhancement link: https://github.com/elastic/elastic-package/pull/1 diff --git a/test/packages/composable/01_ci_input_pkg/docs/README.md b/test/packages/composable/01_ci_input_pkg/docs/README.md new file mode 100644 index 0000000000..ff612a5def --- /dev/null +++ b/test/packages/composable/01_ci_input_pkg/docs/README.md @@ -0,0 +1,3 @@ +# CI composable input package + +`type: input` package consumed by [`02_ci_composable_integration`](../02_ci_composable_integration/). Used in CI (`test-build-install-zip.sh`) and `internal/requiredinputs` tests. Build the input package before the integration when using a local registry. diff --git a/test/manual_packages/required_inputs/fields_input_pkg/fields/base-fields.yml b/test/packages/composable/01_ci_input_pkg/fields/base-fields.yml similarity index 100% rename from test/manual_packages/required_inputs/fields_input_pkg/fields/base-fields.yml rename to test/packages/composable/01_ci_input_pkg/fields/base-fields.yml diff --git a/test/manual_packages/required_inputs/var_merging_input_pkg/manifest.yml b/test/packages/composable/01_ci_input_pkg/manifest.yml similarity index 64% rename from test/manual_packages/required_inputs/var_merging_input_pkg/manifest.yml rename to test/packages/composable/01_ci_input_pkg/manifest.yml index 0e315aa0de..3b623b2153 100644 --- a/test/manual_packages/required_inputs/var_merging_input_pkg/manifest.yml +++ b/test/packages/composable/01_ci_input_pkg/manifest.yml @@ -1,7 +1,9 @@ format_version: 3.6.0 -name: var_merging_input_pkg -title: Var Merging Input Package -description: Input package used as a test fixture for variable merging. +name: ci_input_pkg +title: CI Composable Input Package +description: >- + Input package for CI and tests: variable definitions, package-level fields, + and multiple agent templates for composable integration builds. version: 0.1.0 type: input categories: @@ -12,12 +14,14 @@ conditions: elastic: subscription: basic policy_templates: - - name: var_merging + - name: ci_input type: logs - title: Var Merging - description: Collect logs with multiple variables. + title: CI Input + description: Collect logs for composable CI and unit tests. input: logfile - template_path: input.yml.hbs + template_paths: + - input.yml.hbs + - extra.yml.hbs vars: - name: paths type: text @@ -42,4 +46,4 @@ policy_templates: default: 30s owner: github: elastic/integrations - type: elastic \ No newline at end of file + type: elastic diff --git a/test/packages/composable/02_ci_composable_integration/_dev/test/config.yml b/test/packages/composable/02_ci_composable_integration/_dev/test/config.yml new file mode 100644 index 0000000000..a57750f85e --- /dev/null +++ b/test/packages/composable/02_ci_composable_integration/_dev/test/config.yml @@ -0,0 +1,2 @@ +system: + parallel: true diff --git a/test/manual_packages/required_inputs/with_field_bundling/changelog.yml b/test/packages/composable/02_ci_composable_integration/changelog.yml similarity index 64% rename from test/manual_packages/required_inputs/with_field_bundling/changelog.yml rename to test/packages/composable/02_ci_composable_integration/changelog.yml index 813cf1cf77..551e05b619 100644 --- a/test/manual_packages/required_inputs/with_field_bundling/changelog.yml +++ b/test/packages/composable/02_ci_composable_integration/changelog.yml @@ -1,5 +1,5 @@ - version: "0.1.0" changes: - - description: Initial release. + - description: Initial release for composable CI fixtures. type: enhancement link: https://github.com/elastic/elastic-package/pull/1 diff --git a/test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/agent/stream/stream.yml.hbs b/test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/agent/stream/stream.yml.hbs similarity index 78% rename from test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/agent/stream/stream.yml.hbs rename to test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/agent/stream/stream.yml.hbs index 9e9c27a8c0..9390bc05cb 100644 --- a/test/manual_packages/required_inputs/with_merging_full/data_stream/var_merging_logs/agent/stream/stream.yml.hbs +++ b/test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/agent/stream/stream.yml.hbs @@ -1,4 +1,4 @@ paths: {{#each paths}} - {{this}} -{{/each}} \ No newline at end of file +{{/each}} diff --git a/test/manual_packages/required_inputs/with_field_bundling/data_stream/field_logs/fields/base-fields.yml b/test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/fields/base-fields.yml similarity index 100% rename from test/manual_packages/required_inputs/with_field_bundling/data_stream/field_logs/fields/base-fields.yml rename to test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/fields/base-fields.yml diff --git a/test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/manifest.yml b/test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/manifest.yml new file mode 100644 index 0000000000..81423bd2bb --- /dev/null +++ b/test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/manifest.yml @@ -0,0 +1,26 @@ +title: CI composable logs +type: logs +streams: + - package: ci_input_pkg + title: Logs via CI input package + description: Stream referencing the required input package. + vars: + - name: timeout + description: Timeout for log collection. + - name: custom_tag + type: text + title: Custom Tag + show_user: true + - input: logs + title: Plain logs stream + description: Native logs stream without package reference. + template_path: stream.yml.hbs + vars: + - name: paths + type: text + title: Paths + multi: true + required: true + show_user: true + default: + - /var/log/ci/*.log diff --git a/test/packages/composable/02_ci_composable_integration/docs/README.md b/test/packages/composable/02_ci_composable_integration/docs/README.md new file mode 100644 index 0000000000..9fa3af404c --- /dev/null +++ b/test/packages/composable/02_ci_composable_integration/docs/README.md @@ -0,0 +1,7 @@ +# CI composable integration + +Declares `requires.input` on [`ci_input_pkg`](../01_ci_input_pkg/). After `elastic-package build` with the input package available from the registry, the built package includes merged variables, bundled input templates and fields, and resolved input types. + +**CI:** Built in a second phase by `scripts/test-build-install-zip.sh` after the stack is up and `package_registry.base_url` points at the local registry. + +**Manual:** Build `01_ci_input_pkg` first, start the stack, set `package_registry.base_url` to `https://127.0.0.1:8080`, then build this package. diff --git a/test/packages/composable/02_ci_composable_integration/manifest.yml b/test/packages/composable/02_ci_composable_integration/manifest.yml new file mode 100644 index 0000000000..df6a4e6595 --- /dev/null +++ b/test/packages/composable/02_ci_composable_integration/manifest.yml @@ -0,0 +1,42 @@ +format_version: 3.6.0 +name: ci_composable_integration +title: CI Composable Integration +description: >- + Integration package for CI and tests: requires ci_input_pkg, exercises + variable promotion and data stream merge, field bundling, template bundling, + and package-to-type resolution on inputs and streams. +version: 0.1.0 +type: integration +categories: + - custom +conditions: + kibana: + version: "^8.0.0" + elastic: + subscription: basic +requires: + input: + - package: ci_input_pkg + version: "0.1.0" +policy_templates: + - name: ci_composable_logs + title: CI composable logs + description: Collect logs via required input package and native logs stream + data_streams: + - ci_composable_logs + inputs: + - package: ci_input_pkg + title: Collect via CI input package + description: Required input package for composable build + vars: + - name: paths + default: + - /var/log/custom/*.log + - name: encoding + show_user: true + - type: logs + title: Native logs input + description: Plain logs input alongside the package stream +owner: + github: elastic/integrations + type: elastic From 34f2f176bd5dc754d5978b666a39ebd8052ee41a Mon Sep 17 00:00:00 2001 From: Tere Date: Tue, 14 Apr 2026 15:45:15 +0200 Subject: [PATCH 13/28] Fix composable CI package manifests for spec and Fleet Complete policy and data stream variable metadata (type, titles, flags) for package-spec validation. Use logfile for the native logs stream so Kibana accepts the data stream manifest. Update requiredinputs tests and the manual required_inputs fixture to match. Made-with: Cursor --- internal/requiredinputs/fields_test.go | 4 ++-- internal/requiredinputs/streamdefs_test.go | 4 ++-- .../data_stream/test_logs/manifest.yml | 2 +- .../data_stream/ci_composable_logs/manifest.yml | 12 +++++++++++- .../02_ci_composable_integration/manifest.yml | 11 +++++++++++ 5 files changed, 27 insertions(+), 6 deletions(-) diff --git a/internal/requiredinputs/fields_test.go b/internal/requiredinputs/fields_test.go index fe404b86d8..3cc85985f8 100644 --- a/internal/requiredinputs/fields_test.go +++ b/internal/requiredinputs/fields_test.go @@ -260,7 +260,7 @@ streams: - package: no_fields_pkg title: Logs via no-fields input package description: Collect field logs. - - input: logs + - input: logfile title: Plain logs stream description: Native logs stream without package reference. template_path: stream.yml.hbs @@ -287,7 +287,7 @@ streams: // TestBundleDataStreamFields_StreamWithoutPackage verifies that data stream // streams with no package reference are skipped without error. func TestBundleDataStreamFields_StreamWithoutPackage(t *testing.T) { - // Second stream uses input: logs (no package); Bundle should succeed and only + // Second stream uses input: logfile (no package); Bundle should succeed and only // bundle fields for the package-backed stream. epr := &fakeEprClient{ downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { diff --git a/internal/requiredinputs/streamdefs_test.go b/internal/requiredinputs/streamdefs_test.go index 2a7c9b5f94..003e2371ae 100644 --- a/internal/requiredinputs/streamdefs_test.go +++ b/internal/requiredinputs/streamdefs_test.go @@ -444,7 +444,7 @@ func TestResolveStreamInputTypes_FieldBundlingFixture(t *testing.T) { assert.Equal(t, "logs", m.PolicyTemplates[0].Inputs[1].Type) assert.Empty(t, m.PolicyTemplates[0].Inputs[1].Package) - // Check data stream manifest: package stream → input: logfile; plain logs stream stays logs. + // Check data stream manifest: package stream → input: logfile; native stream stays logfile. dsManifestBytes, err := os.ReadFile(filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "manifest.yml")) require.NoError(t, err) dsManifest, err := packages.ReadDataStreamManifestBytes(dsManifestBytes) @@ -453,6 +453,6 @@ func TestResolveStreamInputTypes_FieldBundlingFixture(t *testing.T) { assert.Equal(t, "logfile", dsManifest.Streams[0].Input) assert.Empty(t, dsManifest.Streams[0].Package) assert.NotEmpty(t, dsManifest.Streams[0].Title) - assert.Equal(t, "logs", dsManifest.Streams[1].Input) + assert.Equal(t, "logfile", dsManifest.Streams[1].Input) assert.Empty(t, dsManifest.Streams[1].Package) } diff --git a/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/manifest.yml b/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/manifest.yml index 4a7f03cf46..87e44a58e1 100644 --- a/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/manifest.yml +++ b/test/manual_packages/required_inputs/with_linked_template_path/data_stream/test_logs/manifest.yml @@ -4,7 +4,7 @@ streams: - package: ci_input_pkg title: Test Logs from Input Package description: Collect test logs using the referenced input package. - - input: logs + - input: logfile title: Test Logs description: Collect test logs using the logs input. template_path: stream.yml.hbs diff --git a/test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/manifest.yml b/test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/manifest.yml index 81423bd2bb..e8a49efd6e 100644 --- a/test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/manifest.yml +++ b/test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/manifest.yml @@ -6,12 +6,21 @@ streams: description: Stream referencing the required input package. vars: - name: timeout + type: text + title: Timeout + multi: false + required: false + show_user: false + secret: false description: Timeout for log collection. - name: custom_tag type: text title: Custom Tag + multi: false + required: false show_user: true - - input: logs + secret: false + - input: logfile title: Plain logs stream description: Native logs stream without package reference. template_path: stream.yml.hbs @@ -22,5 +31,6 @@ streams: multi: true required: true show_user: true + secret: false default: - /var/log/ci/*.log diff --git a/test/packages/composable/02_ci_composable_integration/manifest.yml b/test/packages/composable/02_ci_composable_integration/manifest.yml index df6a4e6595..796d1a5d15 100644 --- a/test/packages/composable/02_ci_composable_integration/manifest.yml +++ b/test/packages/composable/02_ci_composable_integration/manifest.yml @@ -30,10 +30,21 @@ policy_templates: description: Required input package for composable build vars: - name: paths + type: text + title: Paths + multi: true + required: true + show_user: true + secret: false default: - /var/log/custom/*.log - name: encoding + type: text + title: Encoding + multi: false + required: false show_user: true + secret: false - type: logs title: Native logs input description: Plain logs input alongside the package stream From a36572e3ce6f13ebd0058cdd6cf6d866de3d17bf Mon Sep 17 00:00:00 2001 From: Tere Date: Tue, 14 Apr 2026 16:04:46 +0200 Subject: [PATCH 14/28] Skip composable integration in test-build-zip test-build-zip runs after test-build-install-zip without a local registry; building 02_ci_composable_integration would download ci_input_pkg from production EPR and fail with 404. Mirror the install-zip phase-1 skip. Made-with: Cursor --- scripts/test-build-zip.sh | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/scripts/test-build-zip.sh b/scripts/test-build-zip.sh index b0514c0fbb..cb1d2fd028 100755 --- a/scripts/test-build-zip.sh +++ b/scripts/test-build-zip.sh @@ -23,6 +23,10 @@ testype() { trap cleanup EXIT +# Same as test-build-install-zip.sh: this integration needs the local stack registry +# and phase-2 build; building it here would hit production EPR for requires.input. +COMPOSABLE_INTEGRATION_DIR="test/packages/composable/02_ci_composable_integration/" + OLDPWD=$PWD # Build packages export ELASTIC_PACKAGE_SIGNER_PRIVATE_KEYFILE="$OLDPWD/scripts/gpg-private.asc" @@ -41,6 +45,10 @@ for d in test/packages/*/*/; do if [ "${packageTestType}" == "false_positives" ]; then continue fi + if [[ "${d}" == "${COMPOSABLE_INTEGRATION_DIR}" ]]; then + echo "--- Skipping composable integration (built in test-build-install-zip phase 2 only): ${d}" + continue + fi echo "--- Building zip package: ${d}" elastic-package build -C "$d" --zip --sign -v done From dc053534bc276ab079d0ac3658f0166cae1b3692 Mon Sep 17 00:00:00 2001 From: Tere Date: Wed, 15 Apr 2026 09:37:59 +0200 Subject: [PATCH 15/28] fix(registry): propagate HTTP client errors and harden package download NewClient now returns (*Client, error) so TLS/CA setup failures from newHTTPClient are visible to callers. DownloadPackage defers zip removal on failure after write, extracts verifyPackage for detached PGP checks, and defers closing the zip file during verification. Update all registry.NewClient call sites. Add revisionsFromRegistry in script tests to keep Run under gocyclo limits. Add tests for invalid CA paths/PEM, non-OK download responses, write failures, TLSSkipVerify construction, and revisionsFromRegistry behavior. Made-with: Cursor --- cmd/benchmark.go | 10 ++- cmd/build.go | 5 +- cmd/install.go | 5 +- cmd/status.go | 5 +- cmd/testrunner.go | 5 +- internal/registry/client.go | 75 ++++++++++++----- internal/registry/client_test.go | 99 +++++++++++++++++++++-- internal/stack/environment.go | 6 +- internal/stack/serverless.go | 6 +- internal/testrunner/script/package.go | 3 +- internal/testrunner/script/script.go | 11 ++- internal/testrunner/script/script_test.go | 43 ++++++++++ 12 files changed, 236 insertions(+), 37 deletions(-) create mode 100644 internal/testrunner/script/script_test.go diff --git a/cmd/benchmark.go b/cmd/benchmark.go index 28755f1344..22ff9d82cb 100644 --- a/cmd/benchmark.go +++ b/cmd/benchmark.go @@ -339,7 +339,10 @@ func rallyCommandAction(cmd *cobra.Command, args []string) error { } baseURL := stack.PackageRegistryBaseURL(profile, appConfig) - eprClient := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, profile)...) + eprClient, err := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, profile)...) + if err != nil { + return fmt.Errorf("failed to create package registry client: %w", err) + } requiredInputsResolver := requiredinputs.NewRequiredInputsResolver(eprClient) withOpts := []rally.OptionFunc{ @@ -524,7 +527,10 @@ func streamCommandAction(cmd *cobra.Command, args []string) error { } baseURL := stack.PackageRegistryBaseURL(profile, appConfig) - eprClient := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, profile)...) + eprClient, err := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, profile)...) + if err != nil { + return fmt.Errorf("failed to create package registry client: %w", err) + } requiredInputsResolver := requiredinputs.NewRequiredInputsResolver(eprClient) withOpts := []stream.OptionFunc{ diff --git a/cmd/build.go b/cmd/build.go index d7036d0b39..5177cae4ff 100644 --- a/cmd/build.go +++ b/cmd/build.go @@ -95,7 +95,10 @@ func buildCommandAction(cmd *cobra.Command, args []string) error { if err != nil { return fmt.Errorf("could not load profile: %w", err) } - eprClient := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, prof)...) + eprClient, err := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, prof)...) + if err != nil { + return fmt.Errorf("failed to create package registry client: %w", err) + } requiredInputsResolver := requiredinputs.NewRequiredInputsResolver(eprClient) diff --git a/cmd/install.go b/cmd/install.go index 84bba80c03..d7e250b6c6 100644 --- a/cmd/install.go +++ b/cmd/install.go @@ -99,7 +99,10 @@ func installCommandAction(cmd *cobra.Command, _ []string) error { } baseURL := stack.PackageRegistryBaseURL(profile, appConfig) - eprClient := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, profile)...) + eprClient, err := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, profile)...) + if err != nil { + return fmt.Errorf("failed to create package registry client: %w", err) + } requiredInputsResolver := requiredinputs.NewRequiredInputsResolver(eprClient) diff --git a/cmd/status.go b/cmd/status.go index 8ddaa8104d..3d7d868ba2 100644 --- a/cmd/status.go +++ b/cmd/status.go @@ -124,7 +124,10 @@ func statusCommandAction(cmd *cobra.Command, args []string) error { // Create registry client with configured URL // Currently, this command does not use profile, so we take the URL from the application configuration - registryClient := registry.NewClient(appConfig.PackageRegistryBaseURL()) + registryClient, err := registry.NewClient(appConfig.PackageRegistryBaseURL()) + if err != nil { + return fmt.Errorf("failed to create package registry client: %w", err) + } options := registry.SearchOptions{ All: showAll, diff --git a/cmd/testrunner.go b/cmd/testrunner.go index 5a52bc2906..47f1da5118 100644 --- a/cmd/testrunner.go +++ b/cmd/testrunner.go @@ -879,7 +879,10 @@ func testRunnerPolicyCommandAction(cmd *cobra.Command, args []string) error { } baseURL := stack.PackageRegistryBaseURL(profile, appConfig) - eprClient := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, profile)...) + eprClient, err := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, profile)...) + if err != nil { + return fmt.Errorf("failed to create package registry client: %w", err) + } requiredInputsResolver := requiredinputs.NewRequiredInputsResolver(eprClient) logger.Info(version.Version()) diff --git a/internal/registry/client.go b/internal/registry/client.go index bac16e15c0..5b891c2760 100644 --- a/internal/registry/client.go +++ b/internal/registry/client.go @@ -34,13 +34,17 @@ type Client struct { } // NewClient creates a new instance of the client. -func NewClient(baseURL string, opts ...ClientOption) *Client { +func NewClient(baseURL string, opts ...ClientOption) (*Client, error) { c := &Client{baseURL: baseURL} for _, opt := range opts { opt(c) } - c.httpClient, _ = c.newHTTPClient() - return c + httpClient, err := c.newHTTPClient() + if err != nil { + return nil, fmt.Errorf("creating registry HTTP client: %w", err) + } + c.httpClient = httpClient + return c, nil } // CertificateAuthority sets the certificate authority to use for TLS verification. @@ -136,46 +140,75 @@ func (c *Client) DownloadPackage(name, version, destDir string) (string, error) } zipPath := filepath.Join(destDir, fmt.Sprintf("%s-%s.zip", name, version)) + shouldRemove := false + defer func() { + if shouldRemove { + _ = os.Remove(zipPath) + } + }() + + shouldRemove = true if err := os.WriteFile(zipPath, body, 0o644); err != nil { return "", fmt.Errorf("writing package zip to %s: %w", zipPath, err) } if !verify { + shouldRemove = false return zipPath, nil } + discard, err := c.verifyPackage(name, version, zipPath, pubKeyPath) + if err != nil { + if !discard { + shouldRemove = false + } + return "", err + } + + shouldRemove = false + return zipPath, nil +} + +// verifyPackage verifies the detached PGP signature for a package zip already on disk. +// If it returns a non-nil error, discard is true when the zip file should be removed +// (verification or I/O failure before a successful read of the artifact). When discard +// is false, the zip should be kept (e.g. failure closing the file after verification). +func (c *Client) verifyPackage(name, version, zipPath, pubKeyPath string) (discard bool, err error) { + discard = true logger.Debugf("Verifying detached signature for package %s-%s", name, version) pubKey, err := os.ReadFile(pubKeyPath) if err != nil { - _ = os.Remove(zipPath) - return "", fmt.Errorf("reading verifier public keyfile (path: %s): %w", pubKeyPath, err) + return true, fmt.Errorf("reading verifier public keyfile (path: %s): %w", pubKeyPath, err) } sigPath := fmt.Sprintf("/epr/%s/%s-%s.zip.sig", name, name, version) sigCode, sigBody, err := c.get(sigPath) if err != nil { - _ = os.Remove(zipPath) - return "", fmt.Errorf("downloading package signature %s-%s: %w", name, version, err) + return true, fmt.Errorf("downloading package signature %s-%s: %w", name, version, err) } if sigCode != http.StatusOK { - _ = os.Remove(zipPath) - return "", fmt.Errorf("downloading package signature %s-%s: unexpected status code %d", name, version, sigCode) + return true, fmt.Errorf("downloading package signature %s-%s: unexpected status code %d", name, version, sigCode) } zipFile, err := os.Open(zipPath) if err != nil { - _ = os.Remove(zipPath) - return "", fmt.Errorf("opening downloaded package zip %s: %w", zipPath, err) - } - verifyErr := files.VerifyDetachedPGP(zipFile, sigBody, pubKey) - closeErr := zipFile.Close() - if verifyErr != nil { - _ = os.Remove(zipPath) - return "", fmt.Errorf("verifying package %s-%s: %w", name, version, verifyErr) - } - if closeErr != nil { - return "", fmt.Errorf("closing downloaded package zip %s: %w", zipPath, closeErr) + return true, fmt.Errorf("opening downloaded package zip %s: %w", zipPath, err) } + defer func() { + closeErr := zipFile.Close() + if closeErr == nil { + return + } + if err != nil { + return + } + discard = false + err = fmt.Errorf("closing downloaded package zip %s: %w", zipPath, closeErr) + }() - return zipPath, nil + if verifyErr := files.VerifyDetachedPGP(zipFile, sigBody, pubKey); verifyErr != nil { + err = fmt.Errorf("verifying package %s-%s: %w", name, version, verifyErr) + return + } + return false, nil } diff --git a/internal/registry/client_test.go b/internal/registry/client_test.go index 877ef94e18..c0dd546bd9 100644 --- a/internal/registry/client_test.go +++ b/internal/registry/client_test.go @@ -7,11 +7,14 @@ package registry import ( "archive/zip" "bytes" + "errors" "fmt" + "io/fs" "net/http" "net/http/httptest" "os" "path/filepath" + "runtime" "testing" "github.com/ProtonMail/gopenpgp/v2/crypto" @@ -20,6 +23,88 @@ import ( "github.com/elastic/elastic-package/internal/environment" ) +func TestNewClient_invalidCertificateAuthorityPath(t *testing.T) { + missing := filepath.Join(t.TempDir(), "missing-ca.pem") + client, err := NewClient("https://example.test", CertificateAuthority(missing)) + require.Error(t, err) + require.Nil(t, client) + require.ErrorContains(t, err, "creating registry HTTP client") + require.ErrorContains(t, err, "reading CA certificate") +} + +func TestNewClient_invalidCertificateAuthorityPEM(t *testing.T) { + badPath := filepath.Join(t.TempDir(), "not-a-cert.pem") + require.NoError(t, os.WriteFile(badPath, []byte("this is not a PEM certificate block"), 0o600)) + + client, err := NewClient("https://example.test", CertificateAuthority(badPath)) + require.Error(t, err) + require.Nil(t, client) + require.ErrorContains(t, err, "creating registry HTTP client") + require.ErrorContains(t, err, "no certificate found") +} + +func TestNewClient_tlsskipVerifyOption(t *testing.T) { + srv := httptest.NewServer(http.NotFoundHandler()) + t.Cleanup(srv.Close) + + client, err := NewClient(srv.URL, TLSSkipVerify()) + require.NoError(t, err) + require.NotNil(t, client) +} + +func TestDownloadPackage_unexpectedStatusDoesNotWriteZip(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + http.Error(w, "gone", http.StatusGone) + })) + t.Cleanup(srv.Close) + + t.Setenv(environment.WithElasticPackagePrefix("VERIFY_PACKAGE_SIGNATURE"), "") + t.Setenv(environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE"), "") + + dest := t.TempDir() + client, err := NewClient(srv.URL) + require.NoError(t, err) + _, err = client.DownloadPackage("acme", "1.0.0", dest) + require.Error(t, err) + require.ErrorContains(t, err, "unexpected status code") + + _, statErr := os.Stat(filepath.Join(dest, "acme-1.0.0.zip")) + require.True(t, errors.Is(statErr, fs.ErrNotExist), "no zip should be written when the registry returns a non-OK status") +} + +func TestDownloadPackage_writeFailureCleansUp(t *testing.T) { + if runtime.GOOS == "windows" { + t.Skip("read-only directory cleanup test relies on Unix directory permissions") + } + zipBytes := testAcmePackageZip(t) + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/epr/acme/acme-1.0.0.zip" { + http.NotFound(w, r) + return + } + _, err := w.Write(zipBytes) + require.NoError(t, err) + })) + t.Cleanup(srv.Close) + + t.Setenv(environment.WithElasticPackagePrefix("VERIFY_PACKAGE_SIGNATURE"), "") + t.Setenv(environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE"), "") + + root := t.TempDir() + readOnlyDir := filepath.Join(root, "readonly") + require.NoError(t, os.Mkdir(readOnlyDir, 0o555)) + t.Cleanup(func() { _ = os.Chmod(readOnlyDir, 0o700) }) + + client, err := NewClient(srv.URL) + require.NoError(t, err) + _, err = client.DownloadPackage("acme", "1.0.0", readOnlyDir) + require.Error(t, err) + require.ErrorContains(t, err, "writing package zip") + + _, statErr := os.Stat(filepath.Join(readOnlyDir, "acme-1.0.0.zip")) + require.True(t, errors.Is(statErr, fs.ErrNotExist), "partial zip should not remain after a write error") +} + func TestDownloadPackage_withoutVerification(t *testing.T) { zipBytes := testAcmePackageZip(t) srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { @@ -36,7 +121,8 @@ func TestDownloadPackage_withoutVerification(t *testing.T) { t.Setenv(environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE"), "") dest := t.TempDir() - client := NewClient(srv.URL) + client, err := NewClient(srv.URL) + require.NoError(t, err) zipPath, err := client.DownloadPackage("acme", "1.0.0", dest) require.NoError(t, err) require.FileExists(t, zipPath) @@ -84,7 +170,8 @@ func TestDownloadPackage_withVerification_success(t *testing.T) { t.Setenv(environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE"), pubPath) dest := t.TempDir() - client := NewClient(srv.URL) + client, err := NewClient(srv.URL) + require.NoError(t, err) zipPath, err := client.DownloadPackage("acme", "1.0.0", dest) require.NoError(t, err) require.FileExists(t, zipPath) @@ -109,8 +196,9 @@ func TestDownloadPackage_withVerification_missingSignature(t *testing.T) { t.Setenv(environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE"), pubPath) dest := t.TempDir() - client := NewClient(srv.URL) - _, err := client.DownloadPackage("acme", "1.0.0", dest) + client, err := NewClient(srv.URL) + require.NoError(t, err) + _, err = client.DownloadPackage("acme", "1.0.0", dest) require.Error(t, err) _, statErr := os.Stat(filepath.Join(dest, "acme-1.0.0.zip")) @@ -166,7 +254,8 @@ func TestDownloadPackage_withVerification_badSignature(t *testing.T) { t.Setenv(environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE"), pubPath) dest := t.TempDir() - client := NewClient(srv.URL) + client, err := NewClient(srv.URL) + require.NoError(t, err) _, err = client.DownloadPackage("acme", "1.0.0", dest) require.Error(t, err) diff --git a/internal/stack/environment.go b/internal/stack/environment.go index 5acafc1e84..5f099d9f80 100644 --- a/internal/stack/environment.go +++ b/internal/stack/environment.go @@ -154,7 +154,11 @@ func (p *environmentProvider) initClients(appConfig *install.ApplicationConfigur } p.elasticsearch = elasticsearch - p.registry = registry.NewClient(PackageRegistryBaseURL(p.profile, appConfig)) + regClient, err := registry.NewClient(PackageRegistryBaseURL(p.profile, appConfig)) + if err != nil { + return fmt.Errorf("cannot create package registry client: %w", err) + } + p.registry = regClient return nil } diff --git a/internal/stack/serverless.go b/internal/stack/serverless.go index bd8bcccb24..df2895f220 100644 --- a/internal/stack/serverless.go +++ b/internal/stack/serverless.go @@ -232,7 +232,11 @@ func (sp *serverlessProvider) createClients(project *serverless.Project, appConf return fmt.Errorf("failed to create kibana client: %w", err) } - sp.registryClient = registry.NewClient(PackageRegistryBaseURL(sp.profile, appConfig)) + regClient, err := registry.NewClient(PackageRegistryBaseURL(sp.profile, appConfig)) + if err != nil { + return fmt.Errorf("failed to create package registry client: %w", err) + } + sp.registryClient = regClient return nil } diff --git a/internal/testrunner/script/package.go b/internal/testrunner/script/package.go index c3e2b4dd4c..d7fee036d3 100644 --- a/internal/testrunner/script/package.go +++ b/internal/testrunner/script/package.go @@ -188,7 +188,8 @@ func installPackageFromRegistry(ts *testscript.TestScript, neg bool, args []stri regPkgs[*profName] = append(regPkgs[*profName], registryPackage{name: name, version: version}) workDir := ts.MkAbs(".") - client := registry.NewClient(registryBaseURL, stack.RegistryClientOptions(registryBaseURL, stk.profile)...) + client, err := registry.NewClient(registryBaseURL, stack.RegistryClientOptions(registryBaseURL, stk.profile)...) + ts.Check(decoratedWith("creating package registry client", err)) zipPath, err := client.DownloadPackage(name, version, workDir) ts.Check(decoratedWith("downloading package from registry", err)) diff --git a/internal/testrunner/script/script.go b/internal/testrunner/script/script.go index 910c6bc0ad..472e374a02 100644 --- a/internal/testrunner/script/script.go +++ b/internal/testrunner/script/script.go @@ -74,6 +74,14 @@ func profileAndPackageRegistryBaseURL(opt Options, appConfig *install.Applicatio return prof, stack.PackageRegistryBaseURL(prof, appConfig), nil } +func revisionsFromRegistry(eprBaseURL string, prof *profile.Profile, pkgName string) ([]packages.PackageManifest, error) { + c, err := registry.NewClient(eprBaseURL, stack.RegistryClientOptions(eprBaseURL, prof)...) + if err != nil { + return nil, fmt.Errorf("creating package registry client: %w", err) + } + return c.Revisions(pkgName, registry.SearchOptions{}) +} + func scriptTestWorkdirRoot(workRoot string, opt Options) (workdirRoot string, err error) { if opt.TestWork { return os.MkdirTemp(workRoot, "*") @@ -221,8 +229,7 @@ func Run(dst *[]testrunner.TestResult, w io.Writer, opt Options) error { if err != nil { return err } - eprClient := registry.NewClient(eprBaseURL, stack.RegistryClientOptions(eprBaseURL, prof)...) - revisions, err := eprClient.Revisions(manifest.Name, registry.SearchOptions{}) + revisions, err := revisionsFromRegistry(eprBaseURL, prof, manifest.Name) if err != nil { return err } diff --git a/internal/testrunner/script/script_test.go b/internal/testrunner/script/script_test.go new file mode 100644 index 0000000000..e17c35dd1e --- /dev/null +++ b/internal/testrunner/script/script_test.go @@ -0,0 +1,43 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package script + +import ( + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-package/internal/stack" +) + +func TestRevisionsFromRegistry_searchOK(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path != "/search" { + http.NotFound(w, r) + return + } + _, err := w.Write([]byte("[]")) + require.NoError(t, err) + })) + t.Cleanup(srv.Close) + + revs, err := revisionsFromRegistry(srv.URL, nil, "acme") + require.NoError(t, err) + require.Empty(t, revs) +} + +func TestRevisionsFromRegistry_propagatesRegistryClientError(t *testing.T) { + badCA := filepath.Join(t.TempDir(), "invalid-ca.pem") + require.NoError(t, os.WriteFile(badCA, []byte("not a PEM certificate"), 0o600)) + t.Setenv(stack.CACertificateEnv, badCA) + + _, err := revisionsFromRegistry("https://epr.example", nil, "acme") + require.Error(t, err) + require.ErrorContains(t, err, "creating package registry client") +} From b6ff9cbb0837ad48f684f3187bc3918f1fc1b02e Mon Sep 17 00:00:00 2001 From: Tere Date: Wed, 15 Apr 2026 09:39:57 +0200 Subject: [PATCH 16/28] build: resolve EPR base URL from profile like other commands Use stack.PackageRegistryBaseURL after loading the profile so elastic.epr.url matches install, test, and benchmark. Made-with: Cursor --- cmd/build.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/build.go b/cmd/build.go index 5177cae4ff..29d577c479 100644 --- a/cmd/build.go +++ b/cmd/build.go @@ -90,11 +90,11 @@ func buildCommandAction(cmd *cobra.Command, args []string) error { return fmt.Errorf("can't load configuration: %w", err) } - baseURL := appConfig.PackageRegistryBaseURL() prof, err := profile.LoadProfile(appConfig.CurrentProfile()) if err != nil { return fmt.Errorf("could not load profile: %w", err) } + baseURL := stack.PackageRegistryBaseURL(prof, appConfig) eprClient, err := registry.NewClient(baseURL, stack.RegistryClientOptions(baseURL, prof)...) if err != nil { return fmt.Errorf("failed to create package registry client: %w", err) From 90d857bd537af27bb907459241266cbf600646bc Mon Sep 17 00:00:00 2001 From: Tere Date: Wed, 15 Apr 2026 09:46:57 +0200 Subject: [PATCH 17/28] refactor(requiredinputs): split merge/resolve helpers and extend tests Extract mergeVariables and resolveStreamInputTypes into focused helpers, rename promoted var scope types, and warn when input packages define multiple policy templates. Use errors.Is(os.ErrNotExist) in field bundling tests. Add unit tests for promoted override scoping and stream input resolution with multi-template input packages. Document new helpers and updated entry points. Made-with: Cursor --- internal/requiredinputs/fields_test.go | 5 +- internal/requiredinputs/streamdefs.go | 43 +++- internal/requiredinputs/streamdefs_test.go | 72 ++++++ internal/requiredinputs/variables.go | 247 ++++++++++++++------- internal/requiredinputs/variables_test.go | 124 +++++++++++ 5 files changed, 396 insertions(+), 95 deletions(-) diff --git a/internal/requiredinputs/fields_test.go b/internal/requiredinputs/fields_test.go index 3cc85985f8..f88f34bad5 100644 --- a/internal/requiredinputs/fields_test.go +++ b/internal/requiredinputs/fields_test.go @@ -5,6 +5,7 @@ package requiredinputs import ( + "errors" "os" "path/filepath" "testing" @@ -189,7 +190,7 @@ func TestBundleDataStreamFields_AllFieldsOverlap(t *testing.T) { bundledPath := filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "fields", "ci_input_pkg-fields.yml") _, statErr := os.Stat(bundledPath) - assert.True(t, os.IsNotExist(statErr), "bundled fields file should not be created when all fields already exist") + assert.True(t, errors.Is(statErr, os.ErrNotExist), "bundled fields file should not be created when all fields already exist") } // TestBundleDataStreamFields_NoFieldsInInputPkg verifies that when the input @@ -281,7 +282,7 @@ streams: bundledPath := filepath.Join(buildPackageRoot, "data_stream", "ci_composable_logs", "fields", "no_fields_pkg-fields.yml") _, statErr := os.Stat(bundledPath) - assert.True(t, os.IsNotExist(statErr), "no fields file should be created when input package has no fields") + assert.True(t, errors.Is(statErr, os.ErrNotExist), "no fields file should be created when input package has no fields") } // TestBundleDataStreamFields_StreamWithoutPackage verifies that data stream diff --git a/internal/requiredinputs/streamdefs.go b/internal/requiredinputs/streamdefs.go index a6eaabd12b..7da023ac99 100644 --- a/internal/requiredinputs/streamdefs.go +++ b/internal/requiredinputs/streamdefs.go @@ -19,7 +19,7 @@ import ( // inputPkgInfo holds the resolved metadata from an input package needed to // replace package: references in composable package manifests. type inputPkgInfo struct { - identifier string // policy_templates[0].input (e.g. "logfile") + identifier string // policy_templates[0].input; if several templates exist, only the first is used pkgTitle string // manifest.title (fallback title) pkgDescription string // manifest.description (fallback description) } @@ -31,22 +31,45 @@ type inputPkgInfo struct { // // This step must run last, after mergeVariables, because that step uses // stream.Package and input.Package to identify which entries to process. +// It resolves metadata per required input via buildInputPkgInfoByName, then +// rewrites the root manifest and each data stream manifest. func (r *RequiredInputsResolver) resolveStreamInputTypes( manifest *packages.PackageManifest, inputPkgPaths map[string]string, buildRoot *os.Root, ) error { - // Step 1 — Build a cache of inputPkgInfo per package name. + infoByPkg, err := buildInputPkgInfoByName(inputPkgPaths) + if err != nil { + return err + } + + if err := applyInputTypesToComposableManifest(manifest, buildRoot, infoByPkg); err != nil { + return err + } + + return applyInputTypesToDataStreamManifests(buildRoot, infoByPkg) +} + +// buildInputPkgInfoByName loads inputPkgInfo for each downloaded required input package path. +func buildInputPkgInfoByName(inputPkgPaths map[string]string) (map[string]inputPkgInfo, error) { infoByPkg := make(map[string]inputPkgInfo, len(inputPkgPaths)) for pkgName, pkgPath := range inputPkgPaths { info, err := loadInputPkgInfo(pkgPath) if err != nil { - return fmt.Errorf("loading input package info for %q: %w", pkgName, err) + return nil, fmt.Errorf("loading input package info for %q: %w", pkgName, err) } infoByPkg[pkgName] = info } + return infoByPkg, nil +} - // Step 2 — Update policy_templates[].inputs[] in manifest.yml. +// applyInputTypesToComposableManifest sets type (and optional title/description) on +// package-backed policy template inputs in manifest.yml and drops package:. +func applyInputTypesToComposableManifest( + manifest *packages.PackageManifest, + buildRoot *os.Root, + infoByPkg map[string]inputPkgInfo, +) error { manifestBytes, err := buildRoot.ReadFile("manifest.yml") if err != nil { return fmt.Errorf("reading manifest: %w", err) @@ -91,8 +114,12 @@ func (r *RequiredInputsResolver) resolveStreamInputTypes( if err := buildRoot.WriteFile("manifest.yml", updated, 0664); err != nil { return fmt.Errorf("writing updated manifest: %w", err) } + return nil +} - // Step 3 — Update streams[] in each data_stream/*/manifest.yml. +// applyInputTypesToDataStreamManifests sets input on package-backed streams in each +// data_stream/*/manifest.yml and drops package:. +func applyInputTypesToDataStreamManifests(buildRoot *os.Root, infoByPkg map[string]inputPkgInfo) error { dsManifestPaths, err := fs.Glob(buildRoot.FS(), "data_stream/*/manifest.yml") if err != nil { return fmt.Errorf("globbing data stream manifests: %w", err) @@ -153,7 +180,9 @@ func (r *RequiredInputsResolver) resolveStreamInputTypes( } // loadInputPkgInfo reads an input package's manifest and extracts the metadata -// needed to replace package: references in composable packages. +// needed to replace package: references in composable packages. When the input +// package has several policy templates, only the first template's input id is +// used and a warning is logged. func loadInputPkgInfo(pkgPath string) (inputPkgInfo, error) { pkgFS, closeFn, err := openPackageFS(pkgPath) if err != nil { @@ -175,7 +204,7 @@ func loadInputPkgInfo(pkgPath string) (inputPkgInfo, error) { return inputPkgInfo{}, fmt.Errorf("input package %q has no policy templates", m.Name) } if len(m.PolicyTemplates) > 1 { - logger.Debugf("Input package %q has multiple policy templates; using input identifier %q from first", m.Name, m.PolicyTemplates[0].Input) + logger.Warnf("Input package %q has multiple policy templates; using input identifier %q from first policy template only", m.Name, m.PolicyTemplates[0].Input) } pt := m.PolicyTemplates[0] diff --git a/internal/requiredinputs/streamdefs_test.go b/internal/requiredinputs/streamdefs_test.go index 003e2371ae..f3a0b869bb 100644 --- a/internal/requiredinputs/streamdefs_test.go +++ b/internal/requiredinputs/streamdefs_test.go @@ -67,6 +67,19 @@ policy_templates: assert.ErrorContains(t, err, "no input identifier") } +// TestLoadInputPkgInfo_MultiplePolicyTemplatesUsesFirstInput verifies that when +// an input package declares more than one policy template, loadInputPkgInfo +// keeps the input identifier from the first template (see streamdefs.go). This +// matches resolveStreamInputTypes behavior and the warning logged for the +// ambiguous multi-template case. +func TestLoadInputPkgInfo_MultiplePolicyTemplatesUsesFirstInput(t *testing.T) { + dir := createFakeInputWithMultiplePolicyTemplates(t) + info, err := loadInputPkgInfo(dir) + require.NoError(t, err) + assert.Equal(t, "sql", info.identifier) + assert.NotEqual(t, "sql/metrics", info.identifier) +} + // ---- integration tests ------------------------------------------------------- // TestResolveStreamInputTypes_ReplacesPackageWithType verifies that a @@ -124,6 +137,65 @@ policy_templates: assert.Empty(t, m.PolicyTemplates[0].Inputs[0].Package) } +// TestResolveStreamInputTypes_InputPkgWithMultiplePolicyTemplatesUsesFirst +// exercises Bundle when the required input package has several policy +// templates with different input identifiers: resolution must use the first +// template only so composable manifests stay consistent with loadInputPkgInfo. +func TestResolveStreamInputTypes_InputPkgWithMultiplePolicyTemplatesUsesFirst(t *testing.T) { + inputPkgDir := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(inputPkgDir, "manifest.yml"), []byte(` +name: dual_template_input +title: Dual Template Input +description: Input with two policy templates. +version: 0.1.0 +type: input +policy_templates: + - name: first + input: logfile + type: logs + - name: second + input: winlog + type: logs +`), 0644)) + + buildRoot := t.TempDir() + require.NoError(t, os.WriteFile(filepath.Join(buildRoot, "manifest.yml"), []byte(` +format_version: 3.0.0 +name: my_integration +version: 0.1.0 +type: integration +requires: + input: + - package: dual_template_input + version: 0.1.0 +policy_templates: + - name: logs + title: Logs + description: Collect logs + inputs: + - package: dual_template_input + title: Collect logs via dual-template input + description: Use the input package +`), 0644)) + + epr := &fakeEprClient{ + downloadPackageFunc: func(packageName, packageVersion, tmpDir string) (string, error) { + return inputPkgDir, nil + }, + } + resolver := NewRequiredInputsResolver(epr) + require.NoError(t, resolver.Bundle(buildRoot)) + + manifestBytes, err := os.ReadFile(filepath.Join(buildRoot, "manifest.yml")) + require.NoError(t, err) + m, err := packages.ReadPackageManifestBytes(manifestBytes) + require.NoError(t, err) + + require.Len(t, m.PolicyTemplates[0].Inputs, 1) + assert.Equal(t, "logfile", m.PolicyTemplates[0].Inputs[0].Type) + assert.Empty(t, m.PolicyTemplates[0].Inputs[0].Package) +} + // TestResolveStreamInputTypes_PreservesExistingTitleAndDescription verifies // that title and description already set in the composable package input entry // are preserved and not overwritten by the input package's values. diff --git a/internal/requiredinputs/variables.go b/internal/requiredinputs/variables.go index 129e378c50..b907ee4081 100644 --- a/internal/requiredinputs/variables.go +++ b/internal/requiredinputs/variables.go @@ -16,15 +16,16 @@ import ( "github.com/elastic/elastic-package/internal/packages" ) -// pkgDsKey uniquely identifies the (input-package, data-stream) pair used to -// index promoted variable overrides. -type pkgDsKey struct { - pkg string - dsName string +// promotedVarScopeKey is the lookup key for composable-side var overrides: required +// input package name plus composable data stream name ("" if the template has no data_streams). +type promotedVarScopeKey struct { + refInputPackage string + composableDataStream string } // mergeVariables merges variable definitions from input packages into the -// composable package's manifests (package-level and data-stream-level). +// composable package's manifests (package-level and data-stream-level) under +// buildRoot (manifest.yml and data_stream/*/manifest.yml). // // Merging rule: input package vars are the base; composable package override // fields win when explicitly specified. @@ -35,42 +36,63 @@ type pkgDsKey struct { // Data-stream-level vars: all remaining (non-promoted) base vars are placed at // the data-stream level, merged with any stream-level overrides the composable // package declares. -// -//nolint:gocognit // multi-step merge pipeline (promotion, DS manifests, policy templates) func (r *RequiredInputsResolver) mergeVariables( manifest *packages.PackageManifest, inputPkgPaths map[string]string, buildRoot *os.Root, ) error { - // Step A — Re-read manifest.yml from disk as a YAML node so edits from the - // earlier template-bundling step are included. - manifestBytes, err := buildRoot.ReadFile("manifest.yml") + doc, err := readYAMLDocFromBuildRoot(buildRoot, "manifest.yml") + if err != nil { + return err + } + + promotedVarOverridesByScope, err := buildPromotedVarOverrideMap(manifest, &doc) if err != nil { - return fmt.Errorf("reading manifest: %w", err) + return err + } + + if err := mergePolicyTemplateInputLevelVars(manifest, &doc, inputPkgPaths, promotedVarOverridesByScope); err != nil { + return err + } + + if err := writeFormattedYAMLDoc(buildRoot, "manifest.yml", &doc); err != nil { + return err + } + + return mergeDataStreamStreamLevelVars(buildRoot, inputPkgPaths, promotedVarOverridesByScope) +} + +// readYAMLDocFromBuildRoot reads relPath from buildRoot and parses it as a YAML document node. +func readYAMLDocFromBuildRoot(buildRoot *os.Root, relPath string) (yaml.Node, error) { + b, err := buildRoot.ReadFile(relPath) + if err != nil { + return yaml.Node{}, fmt.Errorf("reading %q: %w", relPath, err) } var doc yaml.Node - if err := yaml.Unmarshal(manifestBytes, &doc); err != nil { - return fmt.Errorf("parsing manifest YAML: %w", err) + if err := yaml.Unmarshal(b, &doc); err != nil { + return yaml.Node{}, fmt.Errorf("parsing YAML %q: %w", relPath, err) } + return doc, nil +} - // Step B — Build a promotedIndex: (pkg, dsName) → map[varName]overrideNode. - // The override nodes come from policy_templates[ptIdx].inputs[inputIdx].vars - // in the composable package manifest. - promotedIndex := make(map[pkgDsKey]map[string]*yaml.Node) +// buildPromotedVarOverrideMap indexes composable policy_templates[].inputs[].vars +// by input package name and data stream scope for use when merging promotions. +func buildPromotedVarOverrideMap(manifest *packages.PackageManifest, doc *yaml.Node) (map[promotedVarScopeKey]map[string]*yaml.Node, error) { + out := make(map[promotedVarScopeKey]map[string]*yaml.Node) for ptIdx, pt := range manifest.PolicyTemplates { for inputIdx, input := range pt.Inputs { if input.Package == "" || len(input.Vars) == 0 { continue } - inputNode, err := getInputMappingNode(&doc, ptIdx, inputIdx) + inputNode, err := getInputMappingNode(doc, ptIdx, inputIdx) if err != nil { - return fmt.Errorf("getting input node at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) + return nil, fmt.Errorf("getting input node at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) } overrideNodes, err := readVarNodes(inputNode) if err != nil { - return fmt.Errorf("reading override var nodes at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) + return nil, fmt.Errorf("reading override var nodes at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) } overrideByName := make(map[string]*yaml.Node, len(overrideNodes)) @@ -83,12 +105,21 @@ func (r *RequiredInputsResolver) mergeVariables( dsNames = []string{""} } for _, dsName := range dsNames { - promotedIndex[pkgDsKey{pkg: input.Package, dsName: dsName}] = overrideByName + out[promotedVarScopeKey{refInputPackage: input.Package, composableDataStream: dsName}] = overrideByName } } } + return out, nil +} - // Step C — Merge and write input-level vars in manifest.yml. +// mergePolicyTemplateInputLevelVars writes merged promoted vars onto each +// package-backed input in the composable manifest YAML (in-memory doc). +func mergePolicyTemplateInputLevelVars( + manifest *packages.PackageManifest, + doc *yaml.Node, + inputPkgPaths map[string]string, + promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*yaml.Node, +) error { for ptIdx, pt := range manifest.PolicyTemplates { for inputIdx, input := range pt.Inputs { if input.Package == "" { @@ -107,17 +138,9 @@ func (r *RequiredInputsResolver) mergeVariables( continue } - // Union of promoted overrides across all data streams for this input. - promotedOverrides := make(map[string]*yaml.Node) - dsNames := pt.DataStreams - if len(dsNames) == 0 { - dsNames = []string{""} - } - for _, dsName := range dsNames { - maps.Copy(promotedOverrides, promotedIndex[pkgDsKey{pkg: input.Package, dsName: dsName}]) - } + promotedOverrides := unionPromotedOverridesForInput(pt, input.Package, promotedVarOverridesByScope) - inputNode, err := getInputMappingNode(&doc, ptIdx, inputIdx) + inputNode, err := getInputMappingNode(doc, ptIdx, inputIdx) if err != nil { return fmt.Errorf("getting input node at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) } @@ -131,24 +154,54 @@ func (r *RequiredInputsResolver) mergeVariables( } } } + return nil +} - // Step D — Write the updated manifest.yml back to disk. - updated, err := formatYAMLNode(&doc) +// unionPromotedOverridesForInput merges override nodes for refInputPackage across +// every data stream listed on the policy template (or "" if none listed). +func unionPromotedOverridesForInput( + pt packages.PolicyTemplate, + refInputPackage string, + promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*yaml.Node, +) map[string]*yaml.Node { + promotedOverrides := make(map[string]*yaml.Node) + dsNames := pt.DataStreams + if len(dsNames) == 0 { + dsNames = []string{""} + } + for _, dsName := range dsNames { + maps.Copy(promotedOverrides, promotedVarOverridesByScope[promotedVarScopeKey{ + refInputPackage: refInputPackage, + composableDataStream: dsName, + }]) + } + return promotedOverrides +} + +// writeFormattedYAMLDoc serializes doc with package YAML formatting and writes it to relPath. +func writeFormattedYAMLDoc(buildRoot *os.Root, relPath string, doc *yaml.Node) error { + updated, err := formatYAMLNode(doc) if err != nil { - return fmt.Errorf("formatting updated manifest: %w", err) + return fmt.Errorf("formatting updated %q: %w", relPath, err) } - if err := buildRoot.WriteFile("manifest.yml", updated, 0664); err != nil { - return fmt.Errorf("writing updated manifest: %w", err) + if err := buildRoot.WriteFile(relPath, updated, 0664); err != nil { + return fmt.Errorf("writing updated %q: %w", relPath, err) } + return nil +} - // Step E — Process each data_stream/*/manifest.yml. +// mergeDataStreamStreamLevelVars updates stream vars in every data_stream/*/manifest.yml under buildRoot. +func mergeDataStreamStreamLevelVars( + buildRoot *os.Root, + inputPkgPaths map[string]string, + promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*yaml.Node, +) error { dsManifestPaths, err := fs.Glob(buildRoot.FS(), "data_stream/*/manifest.yml") if err != nil { return fmt.Errorf("globbing data stream manifests: %w", err) } for _, manifestPath := range dsManifestPaths { - // data_stream/var_merging_logs/manifest.yml → var_merging_logs dsName := path.Base(path.Dir(manifestPath)) dsManifestBytes, err := buildRoot.ReadFile(manifestPath) @@ -166,67 +219,89 @@ func (r *RequiredInputsResolver) mergeVariables( return fmt.Errorf("parsing data stream manifest %q: %w", manifestPath, err) } - for streamIdx, stream := range dsManifest.Streams { - if stream.Package == "" { - continue - } - pkgPath, ok := inputPkgPaths[stream.Package] - if !ok { - continue - } - - baseVarOrder, baseVarByName, err := loadInputPkgVarNodes(pkgPath) - if err != nil { - return fmt.Errorf("loading input pkg var nodes for %q: %w", stream.Package, err) - } - if len(baseVarOrder) == 0 { - continue - } + if err := mergeStreamsInDSManifest(&dsDoc, dsManifest, dsName, inputPkgPaths, promotedVarOverridesByScope, manifestPath); err != nil { + return err + } - // Promoted names for this (pkg, dsName) combination. - promotedNames := make(map[string]bool) - for _, key := range []pkgDsKey{{stream.Package, dsName}, {stream.Package, ""}} { - for varName := range promotedIndex[key] { - promotedNames[varName] = true - } - } + if err := writeFormattedYAMLDoc(buildRoot, manifestPath, &dsDoc); err != nil { + return fmt.Errorf("data stream manifest %q: %w", manifestPath, err) + } + } - streamNode, err := getStreamMappingNode(&dsDoc, streamIdx) - if err != nil { - return fmt.Errorf("getting stream node at index %d in %q: %w", streamIdx, manifestPath, err) - } + return nil +} - dsOverrideNodes, err := readVarNodes(streamNode) - if err != nil { - return fmt.Errorf("reading DS override var nodes in %q: %w", manifestPath, err) - } +// mergeStreamsInDSManifest merges non-promoted input vars into package-backed streams in one DS manifest. +func mergeStreamsInDSManifest( + dsDoc *yaml.Node, + dsManifest *packages.DataStreamManifest, + dsName string, + inputPkgPaths map[string]string, + promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*yaml.Node, + manifestPath string, +) error { + for streamIdx, stream := range dsManifest.Streams { + if stream.Package == "" { + continue + } + pkgPath, ok := inputPkgPaths[stream.Package] + if !ok { + continue + } - if err := checkDuplicateVarNodes(dsOverrideNodes); err != nil { - return fmt.Errorf("duplicate vars in data stream manifest %q: %w", manifestPath, err) - } + baseVarOrder, baseVarByName, err := loadInputPkgVarNodes(pkgPath) + if err != nil { + return fmt.Errorf("loading input pkg var nodes for %q: %w", stream.Package, err) + } + if len(baseVarOrder) == 0 { + continue + } - mergedSeq := mergeStreamLevelVarNodes(baseVarOrder, baseVarByName, promotedNames, dsOverrideNodes) + promotedNames := promotedVarNamesForStream(stream.Package, dsName, promotedVarOverridesByScope) - if len(mergedSeq.Content) > 0 { - upsertKey(streamNode, "vars", mergedSeq) - } else { - removeKey(streamNode, "vars") - } + streamNode, err := getStreamMappingNode(dsDoc, streamIdx) + if err != nil { + return fmt.Errorf("getting stream node at index %d in %q: %w", streamIdx, manifestPath, err) } - // Step F — Write each updated DS manifest. - dsUpdated, err := formatYAMLNode(&dsDoc) + dsOverrideNodes, err := readVarNodes(streamNode) if err != nil { - return fmt.Errorf("formatting updated data stream manifest %q: %w", manifestPath, err) + return fmt.Errorf("reading DS override var nodes in %q: %w", manifestPath, err) } - if err := buildRoot.WriteFile(manifestPath, dsUpdated, 0664); err != nil { - return fmt.Errorf("writing updated data stream manifest %q: %w", manifestPath, err) + + if err := checkDuplicateVarNodes(dsOverrideNodes); err != nil { + return fmt.Errorf("duplicate vars in data stream manifest %q: %w", manifestPath, err) } - } + mergedSeq := mergeStreamLevelVarNodes(baseVarOrder, baseVarByName, promotedNames, dsOverrideNodes) + + if len(mergedSeq.Content) > 0 { + upsertKey(streamNode, "vars", mergedSeq) + } else { + removeKey(streamNode, "vars") + } + } return nil } +// promotedVarNamesForStream returns the set of var names promoted for this stream: +// overrides for (refInputPackage, composableDataStream) plus template-wide (refInputPackage, ""). +func promotedVarNamesForStream( + refInputPackage, composableDataStream string, + promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*yaml.Node, +) map[string]bool { + promotedNames := make(map[string]bool) + for _, key := range []promotedVarScopeKey{ + {refInputPackage: refInputPackage, composableDataStream: composableDataStream}, + {refInputPackage: refInputPackage, composableDataStream: ""}, + } { + for varName := range promotedVarOverridesByScope[key] { + promotedNames[varName] = true + } + } + return promotedNames +} + // loadInputPkgVarNodes opens the input package at pkgPath, reads all vars from // all policy templates (dedup by name, first wins) and returns them as an // ordered slice and a name→node lookup map. diff --git a/internal/requiredinputs/variables_test.go b/internal/requiredinputs/variables_test.go index 3b441a69ef..38b4d9a457 100644 --- a/internal/requiredinputs/variables_test.go +++ b/internal/requiredinputs/variables_test.go @@ -286,6 +286,130 @@ func TestLoadInputPkgVarNodes(t *testing.T) { }) } +// TestPromotedVarNamesForStream_UnionsScopedAndTemplateWide verifies that when +// resolving which base vars are promoted off a data stream, overrides keyed by +// (input package, composable data stream) are unioned with overrides keyed by +// (input package, "") so template-wide promotions still apply to named streams. +func TestPromotedVarNamesForStream_UnionsScopedAndTemplateWide(t *testing.T) { + const refPkg = "ci_input_pkg" + dsScoped := varNode("paths", "type", "text") + templateWide := varNode("encoding", "type", "text") + + byScope := map[promotedVarScopeKey]map[string]*yaml.Node{ + {refInputPackage: refPkg, composableDataStream: "my_logs"}: { + "paths": dsScoped, + }, + {refInputPackage: refPkg, composableDataStream: ""}: { + "encoding": templateWide, + }, + } + + names := promotedVarNamesForStream(refPkg, "my_logs", byScope) + assert.True(t, names["paths"]) + assert.True(t, names["encoding"]) + assert.False(t, names["timeout"]) +} + +// TestUnionPromotedOverridesForInput_MergesOverridesAcrossDataStreams checks +// unionPromotedOverridesForInput: a policy template listing several data streams +// must merge composable-side override nodes from every listed stream so +// input-level promotion sees the full set of vars declared anywhere on that +// template for the referenced input package. +func TestUnionPromotedOverridesForInput_MergesOverridesAcrossDataStreams(t *testing.T) { + const refPkg = "ci_input_pkg" + paths := varNode("paths", "title", "P") + encoding := varNode("encoding", "title", "E") + + byScope := map[promotedVarScopeKey]map[string]*yaml.Node{ + {refInputPackage: refPkg, composableDataStream: "ds_a"}: {"paths": paths}, + {refInputPackage: refPkg, composableDataStream: "ds_b"}: {"encoding": encoding}, + } + + pt := packages.PolicyTemplate{ + Name: "pt", + DataStreams: []string{"ds_a", "ds_b"}, + } + + got := unionPromotedOverridesForInput(pt, refPkg, byScope) + require.Len(t, got, 2) + assert.Same(t, paths, got["paths"]) + assert.Same(t, encoding, got["encoding"]) +} + +// TestBuildPromotedVarOverrideMap_PerDataStreamScopes builds the promoted +// override index from aligned manifest + YAML: each composable data stream +// listed under a policy template gets its own scope entry so downstream merge +// can distinguish stream-specific composable vars. +func TestBuildPromotedVarOverrideMap_PerDataStreamScopes(t *testing.T) { + manifestYAML := []byte(`format_version: 3.0.0 +name: scope_test +title: Scope test +version: 0.1.0 +type: integration +policy_templates: + - name: logs + title: Logs + data_streams: + - ds_alpha + - ds_beta + inputs: + - package: ref_pkg + vars: + - name: paths + type: text + title: Promoted paths +`) + + var doc yaml.Node + require.NoError(t, yaml.Unmarshal(manifestYAML, &doc)) + m, err := packages.ReadPackageManifestBytes(manifestYAML) + require.NoError(t, err) + + idx, err := buildPromotedVarOverrideMap(m, &doc) + require.NoError(t, err) + + keyAlpha := promotedVarScopeKey{refInputPackage: "ref_pkg", composableDataStream: "ds_alpha"} + keyBeta := promotedVarScopeKey{refInputPackage: "ref_pkg", composableDataStream: "ds_beta"} + require.Contains(t, idx, keyAlpha) + require.Contains(t, idx, keyBeta) + assert.Contains(t, idx[keyAlpha], "paths") + assert.Contains(t, idx[keyBeta], "paths") + assert.Equal(t, "Promoted paths", mappingValue(idx[keyAlpha]["paths"], "title").Value) +} + +// TestBuildPromotedVarOverrideMap_NoDataStreamsUsesEmptyScope verifies that a +// policy template without data_streams still records promoted overrides under +// composableDataStream "", matching how streams are matched when the template is +// not scoped to named data streams. +func TestBuildPromotedVarOverrideMap_NoDataStreamsUsesEmptyScope(t *testing.T) { + manifestYAML := []byte(`format_version: 3.0.0 +name: scope_test2 +title: Scope test 2 +version: 0.1.0 +type: integration +policy_templates: + - name: logs + title: Logs + inputs: + - package: ref_pkg + vars: + - name: paths + type: text +`) + + var doc yaml.Node + require.NoError(t, yaml.Unmarshal(manifestYAML, &doc)) + m, err := packages.ReadPackageManifestBytes(manifestYAML) + require.NoError(t, err) + + idx, err := buildPromotedVarOverrideMap(m, &doc) + require.NoError(t, err) + + key := promotedVarScopeKey{refInputPackage: "ref_pkg", composableDataStream: ""} + require.Contains(t, idx, key) + assert.Contains(t, idx[key], "paths") +} + // ---- integration tests ------------------------------------------------------- // makeFakeEprForVarMerging supplies the ci_input_pkg fixture path as if it were From 206316bcb6c5f1f0b9c28e0aee64e246631bcb46 Mon Sep 17 00:00:00 2001 From: Tere Date: Wed, 15 Apr 2026 09:51:50 +0200 Subject: [PATCH 18/28] ci: install yq for build-install-zip-file integration targets Buildkite runs test-build-install-zip-file.sh which uses yq to override package_registry.base_url for the composable phase-2 build. Include test-build-install-zip-file and test-build-install-zip-file-shellinit in the same with_yq branch as test-build-install-zip. Made-with: Cursor --- .buildkite/scripts/integration_tests.sh | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.buildkite/scripts/integration_tests.sh b/.buildkite/scripts/integration_tests.sh index 54deaf7dee..f16c83b701 100755 --- a/.buildkite/scripts/integration_tests.sh +++ b/.buildkite/scripts/integration_tests.sh @@ -21,6 +21,8 @@ KIND_TARGET="test-check-packages-with-kind" SYSTEM_TEST_FLAGS_TARGET="test-system-test-flags" TEST_BUILD_ZIP_TARGET="test-build-zip" TEST_BUILD_INSTALL_ZIP_TARGET="test-build-install-zip" +TEST_BUILD_INSTALL_ZIP_FILE_TARGET="test-build-install-zip-file" +TEST_BUILD_INSTALL_ZIP_FILE_SHELLINIT_TARGET="test-build-install-zip-file-shellinit" REPO_NAME=$(repo_name "${BUILDKITE_REPO}") REPO_BUILD_TAG="${REPO_NAME}/$(buildkite_pr_branch_build_id)" @@ -140,7 +142,7 @@ install_required_tools() { echo "--- Install kind" with_kubernetes ;; - "${FALSE_POSITIVES_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_TARGET}") + "${FALSE_POSITIVES_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_FILE_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_FILE_SHELLINIT_TARGET}") echo "--- Install yq" with_yq ;; From e7b6f0ace73c67eb569d7527d9082c90c7992d12 Mon Sep 17 00:00:00 2001 From: Tere Date: Wed, 15 Apr 2026 09:53:28 +0200 Subject: [PATCH 19/28] scripts: isolate stack env to USE_SHELLINIT branch Run elastic-package stack shellinit only when -s is used; apply manual exports only otherwise so the non-shellinit path is not polluted by shellinit exports. Made-with: Cursor --- scripts/test-build-install-zip-file.sh | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/test-build-install-zip-file.sh b/scripts/test-build-install-zip-file.sh index f5dda6004c..07d02fa952 100755 --- a/scripts/test-build-install-zip-file.sh +++ b/scripts/test-build-install-zip-file.sh @@ -145,8 +145,6 @@ for d in test/packages/*/*/; do elastic-package build -C "$d" done -eval "$(elastic-package stack shellinit)" - if [[ -f "${ELASTIC_PACKAGE_CONFIG_FILE}" ]]; then PREV_REGISTRY_URL=$(yq '.package_registry.base_url // ""' "${ELASTIC_PACKAGE_CONFIG_FILE}") yq eval --inplace '.package_registry.base_url = "https://127.0.0.1:8080"' "${ELASTIC_PACKAGE_CONFIG_FILE}" @@ -162,7 +160,10 @@ elastic-package build -C "${COMPOSABLE_INTEGRATION_DIR}" # Remove unzipped built packages, leave .zip files rm -r build/packages/*/ -if [ ${USE_SHELLINIT} -eq 0 ]; then +# Apply stack env only for the mode under test (shellinit vs manual exports). +if [ ${USE_SHELLINIT} -eq 1 ]; then + eval "$(elastic-package stack shellinit)" +else export ELASTIC_PACKAGE_ELASTICSEARCH_USERNAME=elastic export ELASTIC_PACKAGE_ELASTICSEARCH_PASSWORD=changeme export ELASTIC_PACKAGE_KIBANA_HOST=https://127.0.0.1:5601 From d88455eba7e263224e0f71a2e3678cb0e980b3a0 Mon Sep 17 00:00:00 2001 From: Tere Date: Wed, 15 Apr 2026 10:17:09 +0200 Subject: [PATCH 20/28] fix(tests): update error handling for package verification Made-with: Cursor --- internal/registry/client.go | 5 ++--- internal/registry/client_test.go | 4 ++-- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/internal/registry/client.go b/internal/registry/client.go index 5b891c2760..499dc75bb0 100644 --- a/internal/registry/client.go +++ b/internal/registry/client.go @@ -206,9 +206,8 @@ func (c *Client) verifyPackage(name, version, zipPath, pubKeyPath string) (disca err = fmt.Errorf("closing downloaded package zip %s: %w", zipPath, closeErr) }() - if verifyErr := files.VerifyDetachedPGP(zipFile, sigBody, pubKey); verifyErr != nil { - err = fmt.Errorf("verifying package %s-%s: %w", name, version, verifyErr) - return + if err := files.VerifyDetachedPGP(zipFile, sigBody, pubKey); err != nil { + return true, fmt.Errorf("verifying package %s-%s: %w", name, version, err) } return false, nil } diff --git a/internal/registry/client_test.go b/internal/registry/client_test.go index c0dd546bd9..be652f5701 100644 --- a/internal/registry/client_test.go +++ b/internal/registry/client_test.go @@ -202,7 +202,7 @@ func TestDownloadPackage_withVerification_missingSignature(t *testing.T) { require.Error(t, err) _, statErr := os.Stat(filepath.Join(dest, "acme-1.0.0.zip")) - require.True(t, os.IsNotExist(statErr), "zip should be removed after failed verification") + require.True(t, errors.Is(statErr, fs.ErrNotExist), "zip should be removed after failed verification") } func TestDownloadPackage_withVerification_badSignature(t *testing.T) { @@ -260,7 +260,7 @@ func TestDownloadPackage_withVerification_badSignature(t *testing.T) { require.Error(t, err) _, statErr := os.Stat(filepath.Join(dest, "acme-1.0.0.zip")) - require.True(t, os.IsNotExist(statErr), "zip should be removed after failed verification") + require.True(t, errors.Is(statErr, fs.ErrNotExist), "zip should be removed after failed verification") } func testAcmePackageZip(t *testing.T) []byte { From f05fcbcf5efa5e72c325705d4cee1ec4a46e18e1 Mon Sep 17 00:00:00 2001 From: Tere Date: Wed, 15 Apr 2026 11:02:29 +0200 Subject: [PATCH 21/28] feat(ci): add composable integration tests and update build scripts - Introduced new targets for composable integration tests in the Makefile. - Updated the Buildkite pipeline to include jobs for the new composable test targets. - Enhanced the integration test script to handle composable-only builds. - Modified the test-build-install-zip-file.sh script to support composable input packages. These changes facilitate dedicated testing for composable packages, improving CI coverage and ensuring proper integration. --- .../pipeline.trigger.integration.tests.sh | 16 ++++++ .buildkite/scripts/integration_tests.sh | 4 +- Makefile | 6 ++ scripts/test-build-install-zip-file.sh | 55 +++++++++++++------ .../docs/README.md | 2 +- 5 files changed, 63 insertions(+), 20 deletions(-) diff --git a/.buildkite/pipeline.trigger.integration.tests.sh b/.buildkite/pipeline.trigger.integration.tests.sh index 10baa1c00d..b3cecb0c67 100755 --- a/.buildkite/pipeline.trigger.integration.tests.sh +++ b/.buildkite/pipeline.trigger.integration.tests.sh @@ -199,6 +199,22 @@ echo " image: \"${UBUNTU_X86_64_AGENT_IMAGE}\"" echo " artifact_paths:" echo " - build/elastic-stack-dump/install-zip-shellinit/logs/*.log" +echo " - label: \":go: Integration test: build-install-zip-file-composable\"" +echo " command: ./.buildkite/scripts/integration_tests.sh -t test-build-install-zip-file-composable" +echo " agents:" +echo " provider: \"gcp\"" +echo " image: \"${UBUNTU_X86_64_AGENT_IMAGE}\"" +echo " artifact_paths:" +echo " - build/elastic-stack-dump/install-zip-composable/logs/*.log" + +echo " - label: \":go: Integration test: build-install-zip-file-composable-shellinit\"" +echo " command: ./.buildkite/scripts/integration_tests.sh -t test-build-install-zip-file-composable-shellinit" +echo " agents:" +echo " provider: \"gcp\"" +echo " image: \"${UBUNTU_X86_64_AGENT_IMAGE}\"" +echo " artifact_paths:" +echo " - build/elastic-stack-dump/install-zip-composable-shellinit/logs/*.log" + echo " - label: \":go: Integration test: system-flags\"" echo " command: ./.buildkite/scripts/integration_tests.sh -t test-system-test-flags" echo " agents:" diff --git a/.buildkite/scripts/integration_tests.sh b/.buildkite/scripts/integration_tests.sh index f16c83b701..465e520ebe 100755 --- a/.buildkite/scripts/integration_tests.sh +++ b/.buildkite/scripts/integration_tests.sh @@ -23,6 +23,8 @@ TEST_BUILD_ZIP_TARGET="test-build-zip" TEST_BUILD_INSTALL_ZIP_TARGET="test-build-install-zip" TEST_BUILD_INSTALL_ZIP_FILE_TARGET="test-build-install-zip-file" TEST_BUILD_INSTALL_ZIP_FILE_SHELLINIT_TARGET="test-build-install-zip-file-shellinit" +TEST_BUILD_INSTALL_ZIP_FILE_COMPOSABLE_TARGET="test-build-install-zip-file-composable" +TEST_BUILD_INSTALL_ZIP_FILE_COMPOSABLE_SHELLINIT_TARGET="test-build-install-zip-file-composable-shellinit" REPO_NAME=$(repo_name "${BUILDKITE_REPO}") REPO_BUILD_TAG="${REPO_NAME}/$(buildkite_pr_branch_build_id)" @@ -142,7 +144,7 @@ install_required_tools() { echo "--- Install kind" with_kubernetes ;; - "${FALSE_POSITIVES_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_FILE_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_FILE_SHELLINIT_TARGET}") + "${FALSE_POSITIVES_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_FILE_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_FILE_SHELLINIT_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_FILE_COMPOSABLE_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_FILE_COMPOSABLE_SHELLINIT_TARGET}") echo "--- Install yq" with_yq ;; diff --git a/Makefile b/Makefile index d4e5fa2969..f20f3dd8ef 100644 --- a/Makefile +++ b/Makefile @@ -139,6 +139,12 @@ test-build-install-zip-file: test-build-install-zip-file-shellinit: ./scripts/test-build-install-zip-file.sh -s +test-build-install-zip-file-composable: + ./scripts/test-build-install-zip-file.sh -c + +test-build-install-zip-file-composable-shellinit: + ./scripts/test-build-install-zip-file.sh -c -s + test-system-test-flags: ./scripts/test-system-test-flags.sh diff --git a/scripts/test-build-install-zip-file.sh b/scripts/test-build-install-zip-file.sh index 07d02fa952..e72be85206 100755 --- a/scripts/test-build-install-zip-file.sh +++ b/scripts/test-build-install-zip-file.sh @@ -6,6 +6,8 @@ ELASTIC_PACKAGE_CONFIG_FILE="${HOME}/.elastic-package/config.yml" PREV_REGISTRY_URL="" PACKAGE_REGISTRY_CI_OVERRIDE=0 COMPOSABLE_INTEGRATION_DIR="test/packages/composable/02_ci_composable_integration/" +COMPOSABLE_INPUT_DIR="test/packages/composable/01_ci_input_pkg/" +COMPOSABLE_ONLY=0 restore_package_registry_config() { if [[ "${PACKAGE_REGISTRY_CI_OVERRIDE}" -ne 1 ]]; then @@ -32,6 +34,9 @@ cleanup() { restore_package_registry_config local output_path="build/elastic-stack-dump/install-zip" + if [ ${COMPOSABLE_ONLY} -eq 1 ]; then + output_path="${output_path}-composable" + fi if [ ${USE_SHELLINIT} -eq 1 ]; then output_path="${output_path}-shellinit" fi @@ -79,8 +84,9 @@ installAndVerifyPackage() { } usage() { - echo "${0} [-s] [-v ] [-h]" + echo "${0} [-c] [-s] [-v ] [-h]" echo "Run test-install-zip suite" + echo -e "\t-c: Run composable-only flow (build input dependency + composable integration; install composable zip only)." echo -e "\t-s: Use elastic-package stack shellinit to export environment variablles. By default, they should be exported manually." echo -e "\t-v : Speciy which Elastic Stack version to use. If not specified it will use the default version in elastic-package." echo -e "\t-h: Show this message" @@ -88,8 +94,11 @@ usage() { USE_SHELLINIT=0 STACK_VERSION="default" -while getopts ":sv:h" o; do +while getopts ":csv:h" o; do case "${o}" in + c) + COMPOSABLE_ONLY=1 + ;; s) USE_SHELLINIT=1 ;; @@ -128,22 +137,27 @@ elastic-package stack up -d -v ${ARG_VERSION} ELASTIC_PACKAGE_LINKS_FILE_PATH="$(pwd)/scripts/links_table.yml" export ELASTIC_PACKAGE_LINKS_FILE_PATH -# Build packages (see test-build-install-zip.sh for composable phase-2 notes). -for d in test/packages/*/*/; do - # Added set +x in a sub-shell to avoid printing the testype command in the output - # This helps to keep the CI output cleaner - packageTestType=$(set +x ; testype "$d") - # Packages in false_positives can have issues. - if [ "${packageTestType}" == "false_positives" ]; then - continue - fi - if [[ "${d}" == "${COMPOSABLE_INTEGRATION_DIR}" ]]; then - echo "--- Skipping composable integration (phase-2 build): ${d}" - continue - fi - echo "--- Building zip package: ${d}" - elastic-package build -C "$d" -done +if [ ${COMPOSABLE_ONLY} -eq 1 ]; then + echo "--- Building zip package (composable dependency): ${COMPOSABLE_INPUT_DIR}" + elastic-package build -C "${COMPOSABLE_INPUT_DIR}" +else + # Build packages (see test-build-install-zip.sh for composable phase-2 notes). + for d in test/packages/*/*/; do + # Added set +x in a sub-shell to avoid printing the testype command in the output + # This helps to keep the CI output cleaner + packageTestType=$(set +x ; testype "$d") + # Packages in false_positives can have issues. + if [ "${packageTestType}" == "false_positives" ]; then + continue + fi + if [[ "${d}" == "${COMPOSABLE_INTEGRATION_DIR}" ]]; then + echo "--- Skipping composable integration (phase-2 build): ${d}" + continue + fi + echo "--- Building zip package: ${d}" + elastic-package build -C "$d" + done +fi if [[ -f "${ELASTIC_PACKAGE_CONFIG_FILE}" ]]; then PREV_REGISTRY_URL=$(yq '.package_registry.base_url // ""' "${ELASTIC_PACKAGE_CONFIG_FILE}") @@ -171,5 +185,10 @@ else fi for zipFile in build/packages/*.zip; do + if [ ${COMPOSABLE_ONLY} -eq 1 ]; then + if [[ "$(basename "${zipFile}")" != ci_composable_integration-*.zip ]]; then + continue + fi + fi installAndVerifyPackage "${zipFile}" done diff --git a/test/packages/composable/02_ci_composable_integration/docs/README.md b/test/packages/composable/02_ci_composable_integration/docs/README.md index 9fa3af404c..599697ab43 100644 --- a/test/packages/composable/02_ci_composable_integration/docs/README.md +++ b/test/packages/composable/02_ci_composable_integration/docs/README.md @@ -2,6 +2,6 @@ Declares `requires.input` on [`ci_input_pkg`](../01_ci_input_pkg/). After `elastic-package build` with the input package available from the registry, the built package includes merged variables, bundled input templates and fields, and resolved input types. -**CI:** Built in a second phase by `scripts/test-build-install-zip.sh` after the stack is up and `package_registry.base_url` points at the local registry. +**CI:** Built in a second phase by `scripts/test-build-install-zip.sh` after the stack is up and `package_registry.base_url` points at the local registry. It is also exercised via a dedicated CI job using `scripts/test-build-install-zip-file.sh -c` (composable-only). **Manual:** Build `01_ci_input_pkg` first, start the stack, set `package_registry.base_url` to `https://127.0.0.1:8080`, then build this package. From 16dc21d2f7cd19dc3f4496f2b51dc0401b2f110c Mon Sep 17 00:00:00 2001 From: Tere Date: Wed, 15 Apr 2026 11:19:19 +0200 Subject: [PATCH 22/28] refactor(tests): simplify cleanup logic in package download tests Removed Windows-specific skip condition and adjusted directory handling in TestDownloadPackage_writeFailureCleansUp. The test now uses a temporary directory for zip file creation, ensuring proper cleanup after write failures. This enhances test reliability across platforms. --- internal/registry/client_test.go | 15 +++++---------- 1 file changed, 5 insertions(+), 10 deletions(-) diff --git a/internal/registry/client_test.go b/internal/registry/client_test.go index be652f5701..34e68eaf87 100644 --- a/internal/registry/client_test.go +++ b/internal/registry/client_test.go @@ -14,7 +14,6 @@ import ( "net/http/httptest" "os" "path/filepath" - "runtime" "testing" "github.com/ProtonMail/gopenpgp/v2/crypto" @@ -73,9 +72,6 @@ func TestDownloadPackage_unexpectedStatusDoesNotWriteZip(t *testing.T) { } func TestDownloadPackage_writeFailureCleansUp(t *testing.T) { - if runtime.GOOS == "windows" { - t.Skip("read-only directory cleanup test relies on Unix directory permissions") - } zipBytes := testAcmePackageZip(t) srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path != "/epr/acme/acme-1.0.0.zip" { @@ -90,18 +86,17 @@ func TestDownloadPackage_writeFailureCleansUp(t *testing.T) { t.Setenv(environment.WithElasticPackagePrefix("VERIFY_PACKAGE_SIGNATURE"), "") t.Setenv(environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE"), "") - root := t.TempDir() - readOnlyDir := filepath.Join(root, "readonly") - require.NoError(t, os.Mkdir(readOnlyDir, 0o555)) - t.Cleanup(func() { _ = os.Chmod(readOnlyDir, 0o700) }) + dest := t.TempDir() + zipPath := filepath.Join(dest, "acme-1.0.0.zip") + require.NoError(t, os.Mkdir(zipPath, 0o700)) client, err := NewClient(srv.URL) require.NoError(t, err) - _, err = client.DownloadPackage("acme", "1.0.0", readOnlyDir) + _, err = client.DownloadPackage("acme", "1.0.0", dest) require.Error(t, err) require.ErrorContains(t, err, "writing package zip") - _, statErr := os.Stat(filepath.Join(readOnlyDir, "acme-1.0.0.zip")) + _, statErr := os.Stat(zipPath) require.True(t, errors.Is(statErr, fs.ErrNotExist), "partial zip should not remain after a write error") } From 56443bea8056713d7a4c122b118a1ca4090fb4e3 Mon Sep 17 00:00:00 2001 From: Tere Date: Wed, 15 Apr 2026 13:41:41 +0200 Subject: [PATCH 23/28] revert(tests): remove composable integration targets from build scripts --- .../pipeline.trigger.integration.tests.sh | 16 ---- .buildkite/scripts/integration_tests.sh | 6 +- Makefile | 6 -- scripts/test-build-install-zip-file.sh | 85 +++---------------- scripts/test-build-install-zip.sh | 42 --------- scripts/test-build-zip.sh | 8 -- 6 files changed, 15 insertions(+), 148 deletions(-) diff --git a/.buildkite/pipeline.trigger.integration.tests.sh b/.buildkite/pipeline.trigger.integration.tests.sh index b3cecb0c67..10baa1c00d 100755 --- a/.buildkite/pipeline.trigger.integration.tests.sh +++ b/.buildkite/pipeline.trigger.integration.tests.sh @@ -199,22 +199,6 @@ echo " image: \"${UBUNTU_X86_64_AGENT_IMAGE}\"" echo " artifact_paths:" echo " - build/elastic-stack-dump/install-zip-shellinit/logs/*.log" -echo " - label: \":go: Integration test: build-install-zip-file-composable\"" -echo " command: ./.buildkite/scripts/integration_tests.sh -t test-build-install-zip-file-composable" -echo " agents:" -echo " provider: \"gcp\"" -echo " image: \"${UBUNTU_X86_64_AGENT_IMAGE}\"" -echo " artifact_paths:" -echo " - build/elastic-stack-dump/install-zip-composable/logs/*.log" - -echo " - label: \":go: Integration test: build-install-zip-file-composable-shellinit\"" -echo " command: ./.buildkite/scripts/integration_tests.sh -t test-build-install-zip-file-composable-shellinit" -echo " agents:" -echo " provider: \"gcp\"" -echo " image: \"${UBUNTU_X86_64_AGENT_IMAGE}\"" -echo " artifact_paths:" -echo " - build/elastic-stack-dump/install-zip-composable-shellinit/logs/*.log" - echo " - label: \":go: Integration test: system-flags\"" echo " command: ./.buildkite/scripts/integration_tests.sh -t test-system-test-flags" echo " agents:" diff --git a/.buildkite/scripts/integration_tests.sh b/.buildkite/scripts/integration_tests.sh index 465e520ebe..54deaf7dee 100755 --- a/.buildkite/scripts/integration_tests.sh +++ b/.buildkite/scripts/integration_tests.sh @@ -21,10 +21,6 @@ KIND_TARGET="test-check-packages-with-kind" SYSTEM_TEST_FLAGS_TARGET="test-system-test-flags" TEST_BUILD_ZIP_TARGET="test-build-zip" TEST_BUILD_INSTALL_ZIP_TARGET="test-build-install-zip" -TEST_BUILD_INSTALL_ZIP_FILE_TARGET="test-build-install-zip-file" -TEST_BUILD_INSTALL_ZIP_FILE_SHELLINIT_TARGET="test-build-install-zip-file-shellinit" -TEST_BUILD_INSTALL_ZIP_FILE_COMPOSABLE_TARGET="test-build-install-zip-file-composable" -TEST_BUILD_INSTALL_ZIP_FILE_COMPOSABLE_SHELLINIT_TARGET="test-build-install-zip-file-composable-shellinit" REPO_NAME=$(repo_name "${BUILDKITE_REPO}") REPO_BUILD_TAG="${REPO_NAME}/$(buildkite_pr_branch_build_id)" @@ -144,7 +140,7 @@ install_required_tools() { echo "--- Install kind" with_kubernetes ;; - "${FALSE_POSITIVES_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_FILE_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_FILE_SHELLINIT_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_FILE_COMPOSABLE_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_FILE_COMPOSABLE_SHELLINIT_TARGET}") + "${FALSE_POSITIVES_TARGET}" | "${TEST_BUILD_INSTALL_ZIP_TARGET}") echo "--- Install yq" with_yq ;; diff --git a/Makefile b/Makefile index f20f3dd8ef..d4e5fa2969 100644 --- a/Makefile +++ b/Makefile @@ -139,12 +139,6 @@ test-build-install-zip-file: test-build-install-zip-file-shellinit: ./scripts/test-build-install-zip-file.sh -s -test-build-install-zip-file-composable: - ./scripts/test-build-install-zip-file.sh -c - -test-build-install-zip-file-composable-shellinit: - ./scripts/test-build-install-zip-file.sh -c -s - test-system-test-flags: ./scripts/test-system-test-flags.sh diff --git a/scripts/test-build-install-zip-file.sh b/scripts/test-build-install-zip-file.sh index e72be85206..88ab084ea8 100755 --- a/scripts/test-build-install-zip-file.sh +++ b/scripts/test-build-install-zip-file.sh @@ -2,27 +2,6 @@ set -euxo pipefail -ELASTIC_PACKAGE_CONFIG_FILE="${HOME}/.elastic-package/config.yml" -PREV_REGISTRY_URL="" -PACKAGE_REGISTRY_CI_OVERRIDE=0 -COMPOSABLE_INTEGRATION_DIR="test/packages/composable/02_ci_composable_integration/" -COMPOSABLE_INPUT_DIR="test/packages/composable/01_ci_input_pkg/" -COMPOSABLE_ONLY=0 - -restore_package_registry_config() { - if [[ "${PACKAGE_REGISTRY_CI_OVERRIDE}" -ne 1 ]]; then - return 0 - fi - if [[ ! -f "${ELASTIC_PACKAGE_CONFIG_FILE}" ]]; then - return 0 - fi - if [[ -n "${PREV_REGISTRY_URL}" ]]; then - yq eval --inplace ".package_registry.base_url = \"${PREV_REGISTRY_URL}\"" "${ELASTIC_PACKAGE_CONFIG_FILE}" || true - else - yq eval --inplace 'del(.package_registry.base_url)' "${ELASTIC_PACKAGE_CONFIG_FILE}" || true - fi -} - cleanup() { local r=$? if [ "${r}" -ne 0 ]; then @@ -31,12 +10,7 @@ cleanup() { fi echo "~~~ elastic-package cleanup" - restore_package_registry_config - local output_path="build/elastic-stack-dump/install-zip" - if [ ${COMPOSABLE_ONLY} -eq 1 ]; then - output_path="${output_path}-composable" - fi if [ ${USE_SHELLINIT} -eq 1 ]; then output_path="${output_path}-shellinit" fi @@ -84,9 +58,8 @@ installAndVerifyPackage() { } usage() { - echo "${0} [-c] [-s] [-v ] [-h]" + echo "${0} [-s] [-v ] [-h]" echo "Run test-install-zip suite" - echo -e "\t-c: Run composable-only flow (build input dependency + composable integration; install composable zip only)." echo -e "\t-s: Use elastic-package stack shellinit to export environment variablles. By default, they should be exported manually." echo -e "\t-v : Speciy which Elastic Stack version to use. If not specified it will use the default version in elastic-package." echo -e "\t-h: Show this message" @@ -94,11 +67,8 @@ usage() { USE_SHELLINIT=0 STACK_VERSION="default" -while getopts ":csv:h" o; do +while getopts ":sv:h" o; do case "${o}" in - c) - COMPOSABLE_ONLY=1 - ;; s) USE_SHELLINIT=1 ;; @@ -137,44 +107,22 @@ elastic-package stack up -d -v ${ARG_VERSION} ELASTIC_PACKAGE_LINKS_FILE_PATH="$(pwd)/scripts/links_table.yml" export ELASTIC_PACKAGE_LINKS_FILE_PATH -if [ ${COMPOSABLE_ONLY} -eq 1 ]; then - echo "--- Building zip package (composable dependency): ${COMPOSABLE_INPUT_DIR}" - elastic-package build -C "${COMPOSABLE_INPUT_DIR}" -else - # Build packages (see test-build-install-zip.sh for composable phase-2 notes). - for d in test/packages/*/*/; do - # Added set +x in a sub-shell to avoid printing the testype command in the output - # This helps to keep the CI output cleaner - packageTestType=$(set +x ; testype "$d") - # Packages in false_positives can have issues. - if [ "${packageTestType}" == "false_positives" ]; then - continue - fi - if [[ "${d}" == "${COMPOSABLE_INTEGRATION_DIR}" ]]; then - echo "--- Skipping composable integration (phase-2 build): ${d}" - continue - fi - echo "--- Building zip package: ${d}" - elastic-package build -C "$d" - done -fi - -if [[ -f "${ELASTIC_PACKAGE_CONFIG_FILE}" ]]; then - PREV_REGISTRY_URL=$(yq '.package_registry.base_url // ""' "${ELASTIC_PACKAGE_CONFIG_FILE}") - yq eval --inplace '.package_registry.base_url = "https://127.0.0.1:8080"' "${ELASTIC_PACKAGE_CONFIG_FILE}" -else - mkdir -p "$(dirname "${ELASTIC_PACKAGE_CONFIG_FILE}")" - yq -n '.package_registry.base_url = "https://127.0.0.1:8080"' > "${ELASTIC_PACKAGE_CONFIG_FILE}" -fi -PACKAGE_REGISTRY_CI_OVERRIDE=1 - -echo "--- Phase-2 build: composable integration" -elastic-package build -C "${COMPOSABLE_INTEGRATION_DIR}" +# Build packages +for d in test/packages/*/*/; do + # Added set +x in a sub-shell to avoid printing the testype command in the output + # This helps to keep the CI output cleaner + packageTestType=$(set +x ; testype "$d") + # Packages in false_positives can have issues. + if [ "${packageTestType}" == "false_positives" ]; then + continue + fi + echo "--- Building zip package: ${d}" + elastic-package build -C "$d" +done # Remove unzipped built packages, leave .zip files rm -r build/packages/*/ -# Apply stack env only for the mode under test (shellinit vs manual exports). if [ ${USE_SHELLINIT} -eq 1 ]; then eval "$(elastic-package stack shellinit)" else @@ -185,10 +133,5 @@ else fi for zipFile in build/packages/*.zip; do - if [ ${COMPOSABLE_ONLY} -eq 1 ]; then - if [[ "$(basename "${zipFile}")" != ci_composable_integration-*.zip ]]; then - continue - fi - fi installAndVerifyPackage "${zipFile}" done diff --git a/scripts/test-build-install-zip.sh b/scripts/test-build-install-zip.sh index 0334f95639..d3bbd3d47d 100755 --- a/scripts/test-build-install-zip.sh +++ b/scripts/test-build-install-zip.sh @@ -2,25 +2,6 @@ set -euxo pipefail -ELASTIC_PACKAGE_CONFIG_FILE="${HOME}/.elastic-package/config.yml" -PREV_REGISTRY_URL="" -PACKAGE_REGISTRY_CI_OVERRIDE=0 -COMPOSABLE_INTEGRATION_DIR="test/packages/composable/02_ci_composable_integration/" - -restore_package_registry_config() { - if [[ "${PACKAGE_REGISTRY_CI_OVERRIDE}" -ne 1 ]]; then - return 0 - fi - if [[ ! -f "${ELASTIC_PACKAGE_CONFIG_FILE}" ]]; then - return 0 - fi - if [[ -n "${PREV_REGISTRY_URL}" ]]; then - yq eval --inplace ".package_registry.base_url = \"${PREV_REGISTRY_URL}\"" "${ELASTIC_PACKAGE_CONFIG_FILE}" || true - else - yq eval --inplace 'del(.package_registry.base_url)' "${ELASTIC_PACKAGE_CONFIG_FILE}" || true - fi -} - cleanup() { local r=$? if [ "${r}" -ne 0 ]; then @@ -29,8 +10,6 @@ cleanup() { fi echo "~~~ elastic-package cleanup" - restore_package_registry_config - # Dump stack logs # Required containers could not be running, so ignore the error elastic-package stack dump -v --output build/elastic-stack-dump/build-zip || true @@ -53,7 +32,6 @@ testype() { } OLDPWD=$PWD - # Build packages export ELASTIC_PACKAGE_SIGNER_PRIVATE_KEYFILE="$OLDPWD/scripts/gpg-private.asc" ELASTIC_PACKAGE_SIGNER_PASSPHRASE=$(cat "$OLDPWD/scripts/gpg-pass.txt") @@ -63,8 +41,6 @@ export ELASTIC_PACKAGE_LINKS_FILE_PATH go run ./scripts/gpgkey -# Composable integration: requires ci_input_pkg from the registry. It is built in a -# second phase after the stack is up and package_registry.base_url points at the local EPR. for d in test/packages/*/*/; do # Added set +x in a sub-shell to avoid printing the testype command in the output # This helps to keep the CI output cleaner @@ -73,10 +49,6 @@ for d in test/packages/*/*/; do if [ "${packageTestType}" == "false_positives" ]; then continue fi - if [[ "${d}" == "${COMPOSABLE_INTEGRATION_DIR}" ]]; then - echo "--- Skipping composable integration (phase-2 build after stack is up): ${d}" - continue - fi echo "--- Building package: ${d}" elastic-package build -C "$d" --zip --sign -v done @@ -90,20 +62,6 @@ elastic-package stack up -d -v eval "$(elastic-package stack shellinit)" -# Point elastic-package build at the stack's local package registry so phase-2 can -# download required input packages (see docs/howto/local_package_registry.md). -if [[ -f "${ELASTIC_PACKAGE_CONFIG_FILE}" ]]; then - PREV_REGISTRY_URL=$(yq '.package_registry.base_url // ""' "${ELASTIC_PACKAGE_CONFIG_FILE}") - yq eval --inplace '.package_registry.base_url = "https://127.0.0.1:8080"' "${ELASTIC_PACKAGE_CONFIG_FILE}" -else - mkdir -p "$(dirname "${ELASTIC_PACKAGE_CONFIG_FILE}")" - yq -n '.package_registry.base_url = "https://127.0.0.1:8080"' > "${ELASTIC_PACKAGE_CONFIG_FILE}" -fi -PACKAGE_REGISTRY_CI_OVERRIDE=1 - -echo "--- Phase-2 build: composable integration (requires local registry)" -elastic-package build -C "${COMPOSABLE_INTEGRATION_DIR}" --zip --sign -v - # Install packages from working copy for d in test/packages/*/*/; do # Added set +x in a sub-shell to avoid printing the testype command in the output diff --git a/scripts/test-build-zip.sh b/scripts/test-build-zip.sh index cb1d2fd028..b0514c0fbb 100755 --- a/scripts/test-build-zip.sh +++ b/scripts/test-build-zip.sh @@ -23,10 +23,6 @@ testype() { trap cleanup EXIT -# Same as test-build-install-zip.sh: this integration needs the local stack registry -# and phase-2 build; building it here would hit production EPR for requires.input. -COMPOSABLE_INTEGRATION_DIR="test/packages/composable/02_ci_composable_integration/" - OLDPWD=$PWD # Build packages export ELASTIC_PACKAGE_SIGNER_PRIVATE_KEYFILE="$OLDPWD/scripts/gpg-private.asc" @@ -45,10 +41,6 @@ for d in test/packages/*/*/; do if [ "${packageTestType}" == "false_positives" ]; then continue fi - if [[ "${d}" == "${COMPOSABLE_INTEGRATION_DIR}" ]]; then - echo "--- Skipping composable integration (built in test-build-install-zip phase 2 only): ${d}" - continue - fi echo "--- Building zip package: ${d}" elastic-package build -C "$d" --zip --sign -v done From bdcc8214d71e92883d7c5435808f9fcc5f966b51 Mon Sep 17 00:00:00 2001 From: Tere Date: Wed, 15 Apr 2026 15:13:44 +0200 Subject: [PATCH 24/28] refactor(requiredinputs): migrate from gopkg.in/yaml.v3 to goccy/go-yaml Replace all gopkg.in/yaml.v3 usage in internal/requiredinputs with github.com/goccy/go-yaml, aligning the package with internal/yamledit. Rewrite yamlutil.go to operate on goccy/go-yaml AST types and reuse yamledit.NewDocumentBytes for parsing. Co-Authored-By: Claude Sonnet 4.6 --- internal/requiredinputs/fields.go | 57 ++--- internal/requiredinputs/fields_test.go | 8 +- internal/requiredinputs/policytemplates.go | 65 ++---- internal/requiredinputs/streamdefs.go | 31 ++- internal/requiredinputs/streams.go | 44 ++-- internal/requiredinputs/variables.go | 256 +++++++++------------ internal/requiredinputs/variables_test.go | 155 +++++++------ internal/requiredinputs/yamlutil.go | 162 +++++++++---- 8 files changed, 400 insertions(+), 378 deletions(-) diff --git a/internal/requiredinputs/fields.go b/internal/requiredinputs/fields.go index d8eb9c920a..6e3c44f6bb 100644 --- a/internal/requiredinputs/fields.go +++ b/internal/requiredinputs/fields.go @@ -11,7 +11,8 @@ import ( "os" "path" - "gopkg.in/yaml.v3" + "github.com/goccy/go-yaml/ast" + "github.com/goccy/go-yaml/parser" "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" @@ -83,7 +84,7 @@ func (r *RequiredInputsResolver) mergeInputPkgFields(dsRootDir, inputPkgPath, in // Collect field nodes from input package that are not already defined in the integration. seenNames := make(map[string]bool) - newNodes := make([]*yaml.Node, 0) + newNodes := make([]ast.Node, 0) for _, filePath := range inputFieldFiles { nodes, err := loadFieldNodesFromFile(inputPkgFS, filePath) if err != nil { @@ -104,12 +105,10 @@ func (r *RequiredInputsResolver) mergeInputPkgFields(dsRootDir, inputPkgPath, in return nil } - // Build a YAML document containing the new field nodes as a sequence. - seqNode := &yaml.Node{Kind: yaml.SequenceNode} - seqNode.Content = newNodes - docNode := &yaml.Node{Kind: yaml.DocumentNode, Content: []*yaml.Node{seqNode}} + // Build a YAML sequence containing the new field nodes. + seqNode := newSeqNode(newNodes...) - output, err := formatYAMLNode(docNode) + output, err := formatYAMLNode(seqNode) if err != nil { return fmt.Errorf("formatting bundled fields YAML: %w", err) } @@ -156,8 +155,8 @@ func collectExistingFieldNames(dsRootDir string, buildRoot *os.Root) (map[string } // loadFieldNodesFromFile reads a fields YAML file from an fs.FS and returns -// its top-level sequence items as individual yaml.Node pointers. -func loadFieldNodesFromFile(fsys fs.FS, filePath string) ([]*yaml.Node, error) { +// its top-level sequence items as individual ast.Node values. +func loadFieldNodesFromFile(fsys fs.FS, filePath string) ([]ast.Node, error) { data, err := fs.ReadFile(fsys, filePath) if err != nil { return nil, fmt.Errorf("reading file %q: %w", filePath, err) @@ -167,37 +166,31 @@ func loadFieldNodesFromFile(fsys fs.FS, filePath string) ([]*yaml.Node, error) { // loadFieldNodesFromBytes parses a fields YAML document (expected to be a // sequence at the document root) and returns the individual item nodes. -func loadFieldNodesFromBytes(data []byte) ([]*yaml.Node, error) { - var doc yaml.Node - if err := yaml.Unmarshal(data, &doc); err != nil { - return nil, fmt.Errorf("unmarshalling fields YAML: %w", err) +func loadFieldNodesFromBytes(data []byte) ([]ast.Node, error) { + f, err := parser.ParseBytes(data, 0) + if err != nil { + return nil, fmt.Errorf("parsing fields YAML: %w", err) } - if doc.Kind == 0 { - // Empty document. + if len(f.Docs) == 0 || f.Docs[0] == nil { return nil, nil } - root := &doc - if root.Kind == yaml.DocumentNode { - if len(root.Content) == 0 { - return nil, nil - } - root = root.Content[0] + body := f.Docs[0].Body + if body == nil { + return nil, nil } - if root.Kind != yaml.SequenceNode { - return nil, fmt.Errorf("expected sequence at fields document root, got kind %v", root.Kind) + seqNode, ok := body.(*ast.SequenceNode) + if !ok { + return nil, fmt.Errorf("expected sequence at fields document root, got %T", body) } - return root.Content, nil + return seqNode.Values, nil } // fieldNodeName returns the value of the "name" key in a field mapping node, -// or an empty string if the key is absent or the node is nil. -func fieldNodeName(n *yaml.Node) string { - if n == nil { - return "" - } - v := mappingValue(n, "name") - if v == nil { +// or an empty string if the key is absent or the node is not a mapping. +func fieldNodeName(n ast.Node) string { + mn, ok := n.(*ast.MappingNode) + if !ok || mn == nil { return "" } - return v.Value + return nodeStringValue(mappingValue(mn, "name")) } diff --git a/internal/requiredinputs/fields_test.go b/internal/requiredinputs/fields_test.go index f88f34bad5..ac58999cab 100644 --- a/internal/requiredinputs/fields_test.go +++ b/internal/requiredinputs/fields_test.go @@ -10,9 +10,9 @@ import ( "path/filepath" "testing" + "github.com/goccy/go-yaml/ast" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "gopkg.in/yaml.v3" ) // ---- unit tests -------------------------------------------------------------- @@ -58,13 +58,13 @@ func TestLoadFieldNodesFromBytes(t *testing.T) { // mapping node representing a field definition. func TestFieldNodeName(t *testing.T) { t.Run("node with name", func(t *testing.T) { - n := &yaml.Node{Kind: yaml.MappingNode} - upsertKey(n, "name", &yaml.Node{Kind: yaml.ScalarNode, Value: "message"}) + n := &ast.MappingNode{BaseNode: &ast.BaseNode{}} + upsertKey(n, "name", strVal("message")) assert.Equal(t, "message", fieldNodeName(n)) }) t.Run("node without name", func(t *testing.T) { - n := &yaml.Node{Kind: yaml.MappingNode} + n := &ast.MappingNode{BaseNode: &ast.BaseNode{}} assert.Equal(t, "", fieldNodeName(n)) }) diff --git a/internal/requiredinputs/policytemplates.go b/internal/requiredinputs/policytemplates.go index fd3f1e55c8..6d74eb4ad3 100644 --- a/internal/requiredinputs/policytemplates.go +++ b/internal/requiredinputs/policytemplates.go @@ -9,7 +9,7 @@ import ( "os" "path" - "gopkg.in/yaml.v3" + "github.com/goccy/go-yaml/ast" "github.com/elastic/elastic-package/internal/packages" ) @@ -18,8 +18,8 @@ func (r *RequiredInputsResolver) bundlePolicyTemplatesInputPackageTemplates(mani // parse the manifest YAML document preserving formatting for targeted modifications // using manifestBytes allows us to preserve comments and formatting in the manifest when we update it with template paths from input packages - var doc yaml.Node - if err := yaml.Unmarshal(manifestBytes, &doc); err != nil { + root, err := parseDocumentRootMapping(manifestBytes) + if err != nil { return fmt.Errorf("failed to parse manifest YAML: %w", err) } @@ -54,14 +54,14 @@ func (r *RequiredInputsResolver) bundlePolicyTemplatesInputPackageTemplates(mani } paths = append(inputPaths, paths...) - if err := setInputPolicyTemplateTemplatePaths(&doc, ptIdx, inputIdx, paths); err != nil { + if err := setInputPolicyTemplateTemplatePaths(root, ptIdx, inputIdx, paths); err != nil { return fmt.Errorf("failed to update policy template manifest with input package templates: %w", err) } } } // Serialise the updated YAML document back to disk. - updated, err := formatYAMLNode(&doc) + updated, err := formatYAMLNode(root) if err != nil { return fmt.Errorf("failed to format updated manifest: %w", err) } @@ -78,52 +78,33 @@ func collectAndCopyInputPkgPolicyTemplates(inputPkgPath, inputPkgName string, bu return collectAndCopyPolicyTemplateFiles(inputPkgPath, inputPkgName, path.Join("agent", "input"), buildRoot) } -// setInputPolicyTemplateTemplatePaths updates the manifest YAML document to set the template_paths for the specified policy template input to the provided paths -func setInputPolicyTemplateTemplatePaths(doc *yaml.Node, policyTemplatesIdx int, inputIdx int, paths []string) error { - // Navigate: document -> root mapping -> "policy_templates" -> sequence -> item [policyTemplatesIdx] -> mapping -> "inputs" -> sequence -> item [inputIdx] -> input mapping. - root := doc - if root.Kind == yaml.DocumentNode { - if len(root.Content) == 0 { - return fmt.Errorf("failed to set policy template input paths: empty YAML document") - } - root = root.Content[0] - } - if root.Kind != yaml.MappingNode { - return fmt.Errorf("failed to set policy template input paths: expected mapping node at document root") - } - - // policy_templates: - // - inputs: - // - template_path: foo - policyTemplatesNode := mappingValue(root, "policy_templates") - if policyTemplatesNode == nil { +// setInputPolicyTemplateTemplatePaths updates the manifest YAML root mapping to +// set template_paths for the specified policy template input. +func setInputPolicyTemplateTemplatePaths(root *ast.MappingNode, policyTemplatesIdx int, inputIdx int, paths []string) error { + // Navigate: root mapping -> "policy_templates" -> sequence -> item [policyTemplatesIdx] -> mapping -> "inputs" -> sequence -> item [inputIdx] -> input mapping. + policyTemplatesNode, ok := mappingValue(root, "policy_templates").(*ast.SequenceNode) + if !ok { return fmt.Errorf("failed to set policy template input paths: 'policy_templates' key not found in manifest") } - if policyTemplatesNode.Kind != yaml.SequenceNode { - return fmt.Errorf("failed to set policy template input paths: 'policy_templates' is not a sequence") - } - if policyTemplatesIdx < 0 || policyTemplatesIdx >= len(policyTemplatesNode.Content) { - return fmt.Errorf("failed to set policy template input paths: policy template index %d out of range (len=%d)", policyTemplatesIdx, len(policyTemplatesNode.Content)) + if policyTemplatesIdx < 0 || policyTemplatesIdx >= len(policyTemplatesNode.Values) { + return fmt.Errorf("failed to set policy template input paths: policy template index %d out of range (len=%d)", policyTemplatesIdx, len(policyTemplatesNode.Values)) } - policyTemplateNode := policyTemplatesNode.Content[policyTemplatesIdx] - if policyTemplateNode.Kind != yaml.MappingNode { + policyTemplateNode, ok := policyTemplatesNode.Values[policyTemplatesIdx].(*ast.MappingNode) + if !ok { return fmt.Errorf("failed to set policy template input paths: policy template entry %d is not a mapping", policyTemplatesIdx) } - inputsNode := mappingValue(policyTemplateNode, "inputs") - if inputsNode == nil { + inputsNode, ok := mappingValue(policyTemplateNode, "inputs").(*ast.SequenceNode) + if !ok { return fmt.Errorf("failed to set policy template input paths: 'inputs' key not found in policy template %d", policyTemplatesIdx) } - if inputsNode.Kind != yaml.SequenceNode { - return fmt.Errorf("failed to set policy template input paths: 'inputs' is not a sequence") - } - if inputIdx < 0 || inputIdx >= len(inputsNode.Content) { - return fmt.Errorf("failed to set policy template input paths: input index %d out of range (len=%d)", inputIdx, len(inputsNode.Content)) + if inputIdx < 0 || inputIdx >= len(inputsNode.Values) { + return fmt.Errorf("failed to set policy template input paths: input index %d out of range (len=%d)", inputIdx, len(inputsNode.Values)) } - inputNode := inputsNode.Content[inputIdx] - if inputNode.Kind != yaml.MappingNode { + inputNode, ok := inputsNode.Values[inputIdx].(*ast.MappingNode) + if !ok { return fmt.Errorf("failed to set policy template input paths: input entry %d is not a mapping", inputIdx) } @@ -131,9 +112,9 @@ func setInputPolicyTemplateTemplatePaths(doc *yaml.Node, policyTemplatesIdx int, removeKey(inputNode, "template_path") // Build the template_paths sequence node. - seqNode := &yaml.Node{Kind: yaml.SequenceNode} + seqNode := newSeqNode() for _, p := range paths { - seqNode.Content = append(seqNode.Content, &yaml.Node{Kind: yaml.ScalarNode, Value: p}) + seqNode.Values = append(seqNode.Values, strVal(p)) } // Upsert template_paths on the input node. diff --git a/internal/requiredinputs/streamdefs.go b/internal/requiredinputs/streamdefs.go index 7da023ac99..12b0b6a749 100644 --- a/internal/requiredinputs/streamdefs.go +++ b/internal/requiredinputs/streamdefs.go @@ -10,8 +10,6 @@ import ( "os" "path" - "gopkg.in/yaml.v3" - "github.com/elastic/elastic-package/internal/logger" "github.com/elastic/elastic-package/internal/packages" ) @@ -74,8 +72,8 @@ func applyInputTypesToComposableManifest( if err != nil { return fmt.Errorf("reading manifest: %w", err) } - var doc yaml.Node - if err := yaml.Unmarshal(manifestBytes, &doc); err != nil { + root, err := parseDocumentRootMapping(manifestBytes) + if err != nil { return fmt.Errorf("parsing manifest YAML: %w", err) } @@ -89,25 +87,25 @@ func applyInputTypesToComposableManifest( return fmt.Errorf("input package %q referenced in policy_templates[%d].inputs[%d] not found in required inputs", input.Package, ptIdx, inputIdx) } - inputNode, err := getInputMappingNode(&doc, ptIdx, inputIdx) + inputNode, err := getInputMappingNode(root, ptIdx, inputIdx) if err != nil { return fmt.Errorf("getting input node at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) } - upsertKey(inputNode, "type", &yaml.Node{Kind: yaml.ScalarNode, Value: info.identifier}) + upsertKey(inputNode, "type", strVal(info.identifier)) if mappingValue(inputNode, "title") == nil && info.pkgTitle != "" { - upsertKey(inputNode, "title", &yaml.Node{Kind: yaml.ScalarNode, Value: info.pkgTitle}) + upsertKey(inputNode, "title", strVal(info.pkgTitle)) } if mappingValue(inputNode, "description") == nil && info.pkgDescription != "" { - upsertKey(inputNode, "description", &yaml.Node{Kind: yaml.ScalarNode, Value: info.pkgDescription}) + upsertKey(inputNode, "description", strVal(info.pkgDescription)) } removeKey(inputNode, "package") } } - updated, err := formatYAMLNode(&doc) + updated, err := formatYAMLNode(root) if err != nil { return fmt.Errorf("formatting updated manifest: %w", err) } @@ -131,8 +129,8 @@ func applyInputTypesToDataStreamManifests(buildRoot *os.Root, infoByPkg map[stri return fmt.Errorf("reading data stream manifest %q: %w", manifestPath, err) } - var dsDoc yaml.Node - if err := yaml.Unmarshal(dsManifestBytes, &dsDoc); err != nil { + dsRoot, err := parseDocumentRootMapping(dsManifestBytes) + if err != nil { return fmt.Errorf("parsing data stream manifest YAML %q: %w", manifestPath, err) } @@ -150,24 +148,24 @@ func applyInputTypesToDataStreamManifests(buildRoot *os.Root, infoByPkg map[stri return fmt.Errorf("input package %q referenced in %q streams[%d] not found in required inputs", stream.Package, path.Dir(manifestPath), streamIdx) } - streamNode, err := getStreamMappingNode(&dsDoc, streamIdx) + streamNode, err := getStreamMappingNode(dsRoot, streamIdx) if err != nil { return fmt.Errorf("getting stream node at index %d in %q: %w", streamIdx, manifestPath, err) } - upsertKey(streamNode, "input", &yaml.Node{Kind: yaml.ScalarNode, Value: info.identifier}) + upsertKey(streamNode, "input", strVal(info.identifier)) if stream.Title == "" && info.pkgTitle != "" { - upsertKey(streamNode, "title", &yaml.Node{Kind: yaml.ScalarNode, Value: info.pkgTitle}) + upsertKey(streamNode, "title", strVal(info.pkgTitle)) } if stream.Description == "" && info.pkgDescription != "" { - upsertKey(streamNode, "description", &yaml.Node{Kind: yaml.ScalarNode, Value: info.pkgDescription}) + upsertKey(streamNode, "description", strVal(info.pkgDescription)) } removeKey(streamNode, "package") } - dsUpdated, err := formatYAMLNode(&dsDoc) + dsUpdated, err := formatYAMLNode(dsRoot) if err != nil { return fmt.Errorf("formatting updated data stream manifest %q: %w", manifestPath, err) } @@ -218,3 +216,4 @@ func loadInputPkgInfo(pkgPath string) (inputPkgInfo, error) { pkgDescription: m.Description, }, nil } + diff --git a/internal/requiredinputs/streams.go b/internal/requiredinputs/streams.go index 09533bfbca..77b6939a4c 100644 --- a/internal/requiredinputs/streams.go +++ b/internal/requiredinputs/streams.go @@ -11,7 +11,7 @@ import ( "os" "path" - "gopkg.in/yaml.v3" + "github.com/goccy/go-yaml/ast" "github.com/elastic/elastic-package/internal/packages" ) @@ -39,8 +39,8 @@ func (r *RequiredInputsResolver) processDataStreamManifest(manifestPath string, } // parse the manifest YAML document preserving formatting for targeted modifications // using manifestBytes allows us to preserve comments and formatting in the manifest when we update it with template paths from input packages - var doc yaml.Node - if err := yaml.Unmarshal(manifestBytes, &doc); err != nil { + root, err := parseDocumentRootMapping(manifestBytes) + if err != nil { return fmt.Errorf("failed to parse data stream manifest YAML: %w", err) } @@ -79,7 +79,7 @@ func (r *RequiredInputsResolver) processDataStreamManifest(manifestPath string, } paths = append(inputPaths, paths...) - if err := setStreamTemplatePaths(&doc, idx, paths); err != nil { + if err := setStreamTemplatePaths(root, idx, paths); err != nil { return fmt.Errorf("failed to set stream template paths in manifest %q: %w", manifestPath, err) } } @@ -88,7 +88,7 @@ func (r *RequiredInputsResolver) processDataStreamManifest(manifestPath string, } // Serialise the updated YAML document back to disk. - updated, err := formatYAMLNode(&doc) + updated, err := formatYAMLNode(root) if err != nil { return fmt.Errorf("failed to format updated manifest: %w", err) } @@ -118,32 +118,18 @@ func collectAndCopyInputPkgDataStreams(dsRootDir, inputPkgPath, inputPkgName str return collectAndCopyPolicyTemplateFiles(inputPkgPath, inputPkgName, agentStreamDir, buildRoot) } -func setStreamTemplatePaths(doc *yaml.Node, streamIdx int, paths []string) error { - // Navigate: document -> mapping -> "streams" key -> sequence -> item [streamIdx] - root := doc - if root.Kind == yaml.DocumentNode { - if len(root.Content) == 0 { - return fmt.Errorf("failed to set stream template paths: empty YAML document") - } - root = root.Content[0] - } - if root.Kind != yaml.MappingNode { - return fmt.Errorf("failed to set stream template paths: expected mapping node at document root") - } - - streamsNode := mappingValue(root, "streams") - if streamsNode == nil { +func setStreamTemplatePaths(root *ast.MappingNode, streamIdx int, paths []string) error { + // Navigate: root mapping -> "streams" key -> sequence -> item [streamIdx] + streamsNode, ok := mappingValue(root, "streams").(*ast.SequenceNode) + if !ok { return fmt.Errorf("failed to set stream template paths: 'streams' key not found in manifest") } - if streamsNode.Kind != yaml.SequenceNode { - return fmt.Errorf("failed to set stream template paths: 'streams' is not a sequence") - } - if streamIdx >= len(streamsNode.Content) { - return fmt.Errorf("failed to set stream template paths: stream index %d out of range (len=%d)", streamIdx, len(streamsNode.Content)) + if streamIdx >= len(streamsNode.Values) { + return fmt.Errorf("failed to set stream template paths: stream index %d out of range (len=%d)", streamIdx, len(streamsNode.Values)) } - streamNode := streamsNode.Content[streamIdx] - if streamNode.Kind != yaml.MappingNode { + streamNode, ok := streamsNode.Values[streamIdx].(*ast.MappingNode) + if !ok { return fmt.Errorf("failed to set stream template paths: stream entry %d is not a mapping", streamIdx) } @@ -151,9 +137,9 @@ func setStreamTemplatePaths(doc *yaml.Node, streamIdx int, paths []string) error removeKey(streamNode, "template_path") // Build the template_paths sequence node. - seqNode := &yaml.Node{Kind: yaml.SequenceNode} + seqNode := newSeqNode() for _, p := range paths { - seqNode.Content = append(seqNode.Content, &yaml.Node{Kind: yaml.ScalarNode, Value: p}) + seqNode.Values = append(seqNode.Values, strVal(p)) } // Upsert template_paths. diff --git a/internal/requiredinputs/variables.go b/internal/requiredinputs/variables.go index b907ee4081..1a35a42183 100644 --- a/internal/requiredinputs/variables.go +++ b/internal/requiredinputs/variables.go @@ -11,7 +11,8 @@ import ( "os" "path" - "gopkg.in/yaml.v3" + "github.com/goccy/go-yaml/ast" + "github.com/goccy/go-yaml/parser" "github.com/elastic/elastic-package/internal/packages" ) @@ -41,51 +42,52 @@ func (r *RequiredInputsResolver) mergeVariables( inputPkgPaths map[string]string, buildRoot *os.Root, ) error { - doc, err := readYAMLDocFromBuildRoot(buildRoot, "manifest.yml") + root, err := readYAMLDocFromBuildRoot(buildRoot, "manifest.yml") if err != nil { return err } - promotedVarOverridesByScope, err := buildPromotedVarOverrideMap(manifest, &doc) + promotedVarOverridesByScope, err := buildPromotedVarOverrideMap(manifest, root) if err != nil { return err } - if err := mergePolicyTemplateInputLevelVars(manifest, &doc, inputPkgPaths, promotedVarOverridesByScope); err != nil { + if err := mergePolicyTemplateInputLevelVars(manifest, root, inputPkgPaths, promotedVarOverridesByScope); err != nil { return err } - if err := writeFormattedYAMLDoc(buildRoot, "manifest.yml", &doc); err != nil { + if err := writeFormattedYAMLDoc(buildRoot, "manifest.yml", root); err != nil { return err } return mergeDataStreamStreamLevelVars(buildRoot, inputPkgPaths, promotedVarOverridesByScope) } -// readYAMLDocFromBuildRoot reads relPath from buildRoot and parses it as a YAML document node. -func readYAMLDocFromBuildRoot(buildRoot *os.Root, relPath string) (yaml.Node, error) { +// readYAMLDocFromBuildRoot reads relPath from buildRoot, parses it via yamledit, +// and returns the document root as a *ast.MappingNode. +func readYAMLDocFromBuildRoot(buildRoot *os.Root, relPath string) (*ast.MappingNode, error) { b, err := buildRoot.ReadFile(relPath) if err != nil { - return yaml.Node{}, fmt.Errorf("reading %q: %w", relPath, err) + return nil, fmt.Errorf("reading %q: %w", relPath, err) } - var doc yaml.Node - if err := yaml.Unmarshal(b, &doc); err != nil { - return yaml.Node{}, fmt.Errorf("parsing YAML %q: %w", relPath, err) + root, err := parseDocumentRootMapping(b) + if err != nil { + return nil, fmt.Errorf("parsing YAML %q: %w", relPath, err) } - return doc, nil + return root, nil } // buildPromotedVarOverrideMap indexes composable policy_templates[].inputs[].vars // by input package name and data stream scope for use when merging promotions. -func buildPromotedVarOverrideMap(manifest *packages.PackageManifest, doc *yaml.Node) (map[promotedVarScopeKey]map[string]*yaml.Node, error) { - out := make(map[promotedVarScopeKey]map[string]*yaml.Node) +func buildPromotedVarOverrideMap(manifest *packages.PackageManifest, root *ast.MappingNode) (map[promotedVarScopeKey]map[string]*ast.MappingNode, error) { + out := make(map[promotedVarScopeKey]map[string]*ast.MappingNode) for ptIdx, pt := range manifest.PolicyTemplates { for inputIdx, input := range pt.Inputs { if input.Package == "" || len(input.Vars) == 0 { continue } - inputNode, err := getInputMappingNode(doc, ptIdx, inputIdx) + inputNode, err := getInputMappingNode(root, ptIdx, inputIdx) if err != nil { return nil, fmt.Errorf("getting input node at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) } @@ -95,7 +97,7 @@ func buildPromotedVarOverrideMap(manifest *packages.PackageManifest, doc *yaml.N return nil, fmt.Errorf("reading override var nodes at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) } - overrideByName := make(map[string]*yaml.Node, len(overrideNodes)) + overrideByName := make(map[string]*ast.MappingNode, len(overrideNodes)) for _, n := range overrideNodes { overrideByName[varNodeName(n)] = n } @@ -113,12 +115,12 @@ func buildPromotedVarOverrideMap(manifest *packages.PackageManifest, doc *yaml.N } // mergePolicyTemplateInputLevelVars writes merged promoted vars onto each -// package-backed input in the composable manifest YAML (in-memory doc). +// package-backed input in the composable manifest YAML (in-memory root mapping). func mergePolicyTemplateInputLevelVars( manifest *packages.PackageManifest, - doc *yaml.Node, + root *ast.MappingNode, inputPkgPaths map[string]string, - promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*yaml.Node, + promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*ast.MappingNode, ) error { for ptIdx, pt := range manifest.PolicyTemplates { for inputIdx, input := range pt.Inputs { @@ -140,14 +142,14 @@ func mergePolicyTemplateInputLevelVars( promotedOverrides := unionPromotedOverridesForInput(pt, input.Package, promotedVarOverridesByScope) - inputNode, err := getInputMappingNode(doc, ptIdx, inputIdx) + inputNode, err := getInputMappingNode(root, ptIdx, inputIdx) if err != nil { return fmt.Errorf("getting input node at pt[%d].inputs[%d]: %w", ptIdx, inputIdx, err) } mergedSeq := mergeInputLevelVarNodes(baseVarOrder, baseVarByName, promotedOverrides) - if len(mergedSeq.Content) > 0 { + if len(mergedSeq.Values) > 0 { upsertKey(inputNode, "vars", mergedSeq) } else { removeKey(inputNode, "vars") @@ -162,9 +164,9 @@ func mergePolicyTemplateInputLevelVars( func unionPromotedOverridesForInput( pt packages.PolicyTemplate, refInputPackage string, - promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*yaml.Node, -) map[string]*yaml.Node { - promotedOverrides := make(map[string]*yaml.Node) + promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*ast.MappingNode, +) map[string]*ast.MappingNode { + promotedOverrides := make(map[string]*ast.MappingNode) dsNames := pt.DataStreams if len(dsNames) == 0 { dsNames = []string{""} @@ -178,9 +180,9 @@ func unionPromotedOverridesForInput( return promotedOverrides } -// writeFormattedYAMLDoc serializes doc with package YAML formatting and writes it to relPath. -func writeFormattedYAMLDoc(buildRoot *os.Root, relPath string, doc *yaml.Node) error { - updated, err := formatYAMLNode(doc) +// writeFormattedYAMLDoc serializes root with package YAML formatting and writes it to relPath. +func writeFormattedYAMLDoc(buildRoot *os.Root, relPath string, root *ast.MappingNode) error { + updated, err := formatYAMLNode(root) if err != nil { return fmt.Errorf("formatting updated %q: %w", relPath, err) } @@ -194,7 +196,7 @@ func writeFormattedYAMLDoc(buildRoot *os.Root, relPath string, doc *yaml.Node) e func mergeDataStreamStreamLevelVars( buildRoot *os.Root, inputPkgPaths map[string]string, - promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*yaml.Node, + promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*ast.MappingNode, ) error { dsManifestPaths, err := fs.Glob(buildRoot.FS(), "data_stream/*/manifest.yml") if err != nil { @@ -209,8 +211,8 @@ func mergeDataStreamStreamLevelVars( return fmt.Errorf("reading data stream manifest %q: %w", manifestPath, err) } - var dsDoc yaml.Node - if err := yaml.Unmarshal(dsManifestBytes, &dsDoc); err != nil { + dsRoot, err := parseDocumentRootMapping(dsManifestBytes) + if err != nil { return fmt.Errorf("parsing data stream manifest YAML %q: %w", manifestPath, err) } @@ -219,11 +221,11 @@ func mergeDataStreamStreamLevelVars( return fmt.Errorf("parsing data stream manifest %q: %w", manifestPath, err) } - if err := mergeStreamsInDSManifest(&dsDoc, dsManifest, dsName, inputPkgPaths, promotedVarOverridesByScope, manifestPath); err != nil { + if err := mergeStreamsInDSManifest(dsRoot, dsManifest, dsName, inputPkgPaths, promotedVarOverridesByScope, manifestPath); err != nil { return err } - if err := writeFormattedYAMLDoc(buildRoot, manifestPath, &dsDoc); err != nil { + if err := writeFormattedYAMLDoc(buildRoot, manifestPath, dsRoot); err != nil { return fmt.Errorf("data stream manifest %q: %w", manifestPath, err) } } @@ -233,11 +235,11 @@ func mergeDataStreamStreamLevelVars( // mergeStreamsInDSManifest merges non-promoted input vars into package-backed streams in one DS manifest. func mergeStreamsInDSManifest( - dsDoc *yaml.Node, + dsRoot *ast.MappingNode, dsManifest *packages.DataStreamManifest, dsName string, inputPkgPaths map[string]string, - promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*yaml.Node, + promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*ast.MappingNode, manifestPath string, ) error { for streamIdx, stream := range dsManifest.Streams { @@ -259,7 +261,7 @@ func mergeStreamsInDSManifest( promotedNames := promotedVarNamesForStream(stream.Package, dsName, promotedVarOverridesByScope) - streamNode, err := getStreamMappingNode(dsDoc, streamIdx) + streamNode, err := getStreamMappingNode(dsRoot, streamIdx) if err != nil { return fmt.Errorf("getting stream node at index %d in %q: %w", streamIdx, manifestPath, err) } @@ -275,7 +277,7 @@ func mergeStreamsInDSManifest( mergedSeq := mergeStreamLevelVarNodes(baseVarOrder, baseVarByName, promotedNames, dsOverrideNodes) - if len(mergedSeq.Content) > 0 { + if len(mergedSeq.Values) > 0 { upsertKey(streamNode, "vars", mergedSeq) } else { removeKey(streamNode, "vars") @@ -288,7 +290,7 @@ func mergeStreamsInDSManifest( // overrides for (refInputPackage, composableDataStream) plus template-wide (refInputPackage, ""). func promotedVarNamesForStream( refInputPackage, composableDataStream string, - promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*yaml.Node, + promotedVarOverridesByScope map[promotedVarScopeKey]map[string]*ast.MappingNode, ) map[string]bool { promotedNames := make(map[string]bool) for _, key := range []promotedVarScopeKey{ @@ -305,7 +307,7 @@ func promotedVarNamesForStream( // loadInputPkgVarNodes opens the input package at pkgPath, reads all vars from // all policy templates (dedup by name, first wins) and returns them as an // ordered slice and a name→node lookup map. -func loadInputPkgVarNodes(pkgPath string) ([]string, map[string]*yaml.Node, error) { +func loadInputPkgVarNodes(pkgPath string) ([]string, map[string]*ast.MappingNode, error) { pkgFS, closeFn, err := openPackageFS(pkgPath) if err != nil { return nil, nil, fmt.Errorf("opening package: %w", err) @@ -317,48 +319,46 @@ func loadInputPkgVarNodes(pkgPath string) ([]string, map[string]*yaml.Node, erro return nil, nil, fmt.Errorf("reading manifest: %w", err) } - var doc yaml.Node - if err := yaml.Unmarshal(manifestBytes, &doc); err != nil { + f, err := parser.ParseBytes(manifestBytes, 0) + if err != nil { return nil, nil, fmt.Errorf("parsing manifest YAML: %w", err) } - - root := &doc - if root.Kind == yaml.DocumentNode { - if len(root.Content) == 0 { - return nil, nil, nil - } - root = root.Content[0] + if len(f.Docs) == 0 || f.Docs[0] == nil { + return nil, nil, nil } - if root.Kind != yaml.MappingNode { + root, ok := f.Docs[0].Body.(*ast.MappingNode) + if !ok { return nil, nil, fmt.Errorf("expected mapping node at document root") } - policyTemplatesNode := mappingValue(root, "policy_templates") - if policyTemplatesNode == nil || policyTemplatesNode.Kind != yaml.SequenceNode { + policyTemplatesNode, ok := mappingValue(root, "policy_templates").(*ast.SequenceNode) + if !ok { return nil, nil, nil } order := make([]string, 0) - byName := make(map[string]*yaml.Node) + byName := make(map[string]*ast.MappingNode) - for _, ptNode := range policyTemplatesNode.Content { - if ptNode.Kind != yaml.MappingNode { + for _, ptNode := range policyTemplatesNode.Values { + ptMapping, ok := ptNode.(*ast.MappingNode) + if !ok { continue } - varsNode := mappingValue(ptNode, "vars") - if varsNode == nil || varsNode.Kind != yaml.SequenceNode { + varsNode, ok := mappingValue(ptMapping, "vars").(*ast.SequenceNode) + if !ok { continue } - for _, varNode := range varsNode.Content { - if varNode.Kind != yaml.MappingNode { + for _, varNode := range varsNode.Values { + varMapping, ok := varNode.(*ast.MappingNode) + if !ok { continue } - name := varNodeName(varNode) + name := varNodeName(varMapping) if name == "" || byName[name] != nil { continue // skip empty names and duplicates (first wins) } order = append(order, name) - byName[name] = varNode + byName[name] = varMapping } } @@ -370,17 +370,17 @@ func loadInputPkgVarNodes(pkgPath string) ([]string, map[string]*yaml.Node, erro // Order follows baseVarOrder (input package declaration order). func mergeInputLevelVarNodes( baseVarOrder []string, - baseVarByName map[string]*yaml.Node, - promotedOverrides map[string]*yaml.Node, -) *yaml.Node { - seqNode := &yaml.Node{Kind: yaml.SequenceNode} + baseVarByName map[string]*ast.MappingNode, + promotedOverrides map[string]*ast.MappingNode, +) *ast.SequenceNode { + seqNode := newSeqNode() for _, varName := range baseVarOrder { overrideNode, promoted := promotedOverrides[varName] if !promoted { continue } merged := mergeVarNode(baseVarByName[varName], overrideNode) - seqNode.Content = append(seqNode.Content, merged) + seqNode.Values = append(seqNode.Values, merged) } return seqNode } @@ -392,16 +392,16 @@ func mergeInputLevelVarNodes( // order. func mergeStreamLevelVarNodes( baseVarOrder []string, - baseVarByName map[string]*yaml.Node, + baseVarByName map[string]*ast.MappingNode, promotedNames map[string]bool, - dsOverrides []*yaml.Node, -) *yaml.Node { - dsOverrideByName := make(map[string]*yaml.Node, len(dsOverrides)) + dsOverrides []*ast.MappingNode, +) *ast.SequenceNode { + dsOverrideByName := make(map[string]*ast.MappingNode, len(dsOverrides)) for _, v := range dsOverrides { dsOverrideByName[varNodeName(v)] = v } - seqNode := &yaml.Node{Kind: yaml.SequenceNode} + seqNode := newSeqNode() // Non-promoted base vars first (in input pkg order). for _, varName := range baseVarOrder { @@ -410,19 +410,19 @@ func mergeStreamLevelVarNodes( } baseNode := baseVarByName[varName] overrideNode, hasOverride := dsOverrideByName[varName] - var merged *yaml.Node + var merged *ast.MappingNode if hasOverride { merged = mergeVarNode(baseNode, overrideNode) } else { - merged = cloneNode(baseNode) + merged = cloneNode(baseNode).(*ast.MappingNode) } - seqNode.Content = append(seqNode.Content, merged) + seqNode.Values = append(seqNode.Values, merged) } // Novel DS vars (not present in base) appended in declaration order. for _, v := range dsOverrides { if _, inBase := baseVarByName[varNodeName(v)]; !inBase { - seqNode.Content = append(seqNode.Content, cloneNode(v)) + seqNode.Values = append(seqNode.Values, cloneNode(v).(*ast.MappingNode)) } } @@ -432,22 +432,20 @@ func mergeStreamLevelVarNodes( // mergeVarNode merges fields from overrideNode into a clone of baseNode. // All keys in override win; absent keys in override are inherited from base. // The "name" key is always preserved from base. -func mergeVarNode(base, override *yaml.Node) *yaml.Node { - result := cloneNode(base) - for i := 0; i+1 < len(override.Content); i += 2 { - keyNode := override.Content[i] - valNode := override.Content[i+1] - if keyNode.Value == "name" { +func mergeVarNode(base, override *ast.MappingNode) *ast.MappingNode { + result := cloneNode(base).(*ast.MappingNode) + for _, kv := range override.Values { + if kv.Key.String() == "name" { continue // always preserve name from base } - upsertKey(result, keyNode.Value, cloneNode(valNode)) + upsertKey(result, kv.Key.String(), cloneNode(kv.Value)) } return result } // checkDuplicateVarNodes returns an error if any var name appears more than // once in the provided nodes. -func checkDuplicateVarNodes(varNodes []*yaml.Node) error { +func checkDuplicateVarNodes(varNodes []*ast.MappingNode) error { seen := make(map[string]bool, len(varNodes)) for _, v := range varNodes { name := varNodeName(v) @@ -460,71 +458,58 @@ func checkDuplicateVarNodes(varNodes []*yaml.Node) error { } // varNodeName extracts the value of the "name" key from a var mapping node. -func varNodeName(v *yaml.Node) string { - nameVal := mappingValue(v, "name") - if nameVal == nil { - return "" - } - return nameVal.Value +func varNodeName(v *ast.MappingNode) string { + return nodeStringValue(mappingValue(v, "name")) } // readVarNodes extracts the individual var mapping nodes from the "vars" // sequence of the given mapping node. Returns nil if no "vars" key is present. -func readVarNodes(mappingNode *yaml.Node) ([]*yaml.Node, error) { - varsNode := mappingValue(mappingNode, "vars") - if varsNode == nil { - return nil, nil - } - if varsNode.Kind != yaml.SequenceNode { +func readVarNodes(mappingNode *ast.MappingNode) ([]*ast.MappingNode, error) { + varsSeq, ok := mappingValue(mappingNode, "vars").(*ast.SequenceNode) + if !ok { + v := mappingValue(mappingNode, "vars") + if v == nil { + return nil, nil + } return nil, fmt.Errorf("'vars' is not a sequence node") } - result := make([]*yaml.Node, 0, len(varsNode.Content)) - for _, item := range varsNode.Content { - if item.Kind != yaml.MappingNode { + result := make([]*ast.MappingNode, 0, len(varsSeq.Values)) + for _, item := range varsSeq.Values { + mn, ok := item.(*ast.MappingNode) + if !ok { return nil, fmt.Errorf("var entry is not a mapping node") } - result = append(result, item) + result = append(result, mn) } return result, nil } // getInputMappingNode navigates to policy_templates[ptIdx].inputs[inputIdx] in -// the given YAML document and returns the input mapping node. -func getInputMappingNode(doc *yaml.Node, ptIdx, inputIdx int) (*yaml.Node, error) { - root := doc - if root.Kind == yaml.DocumentNode { - if len(root.Content) == 0 { - return nil, fmt.Errorf("empty YAML document") - } - root = root.Content[0] - } - if root.Kind != yaml.MappingNode { - return nil, fmt.Errorf("expected mapping node at document root") - } - - ptsNode := mappingValue(root, "policy_templates") - if ptsNode == nil || ptsNode.Kind != yaml.SequenceNode { +// the given YAML root mapping and returns the input mapping node. +func getInputMappingNode(root *ast.MappingNode, ptIdx, inputIdx int) (*ast.MappingNode, error) { + ptsNode, ok := mappingValue(root, "policy_templates").(*ast.SequenceNode) + if !ok { return nil, fmt.Errorf("'policy_templates' not found or not a sequence") } - if ptIdx < 0 || ptIdx >= len(ptsNode.Content) { - return nil, fmt.Errorf("policy template index %d out of range (len=%d)", ptIdx, len(ptsNode.Content)) + if ptIdx < 0 || ptIdx >= len(ptsNode.Values) { + return nil, fmt.Errorf("policy template index %d out of range (len=%d)", ptIdx, len(ptsNode.Values)) } - ptNode := ptsNode.Content[ptIdx] - if ptNode.Kind != yaml.MappingNode { + ptNode, ok := ptsNode.Values[ptIdx].(*ast.MappingNode) + if !ok { return nil, fmt.Errorf("policy template %d is not a mapping", ptIdx) } - inputsNode := mappingValue(ptNode, "inputs") - if inputsNode == nil || inputsNode.Kind != yaml.SequenceNode { + inputsNode, ok := mappingValue(ptNode, "inputs").(*ast.SequenceNode) + if !ok { return nil, fmt.Errorf("'inputs' not found or not a sequence in policy template %d", ptIdx) } - if inputIdx < 0 || inputIdx >= len(inputsNode.Content) { - return nil, fmt.Errorf("input index %d out of range (len=%d)", inputIdx, len(inputsNode.Content)) + if inputIdx < 0 || inputIdx >= len(inputsNode.Values) { + return nil, fmt.Errorf("input index %d out of range (len=%d)", inputIdx, len(inputsNode.Values)) } - inputNode := inputsNode.Content[inputIdx] - if inputNode.Kind != yaml.MappingNode { + inputNode, ok := inputsNode.Values[inputIdx].(*ast.MappingNode) + if !ok { return nil, fmt.Errorf("input %d is not a mapping", inputIdx) } @@ -532,29 +517,18 @@ func getInputMappingNode(doc *yaml.Node, ptIdx, inputIdx int) (*yaml.Node, error } // getStreamMappingNode navigates to streams[streamIdx] in the given YAML -// document and returns the stream mapping node. -func getStreamMappingNode(doc *yaml.Node, streamIdx int) (*yaml.Node, error) { - root := doc - if root.Kind == yaml.DocumentNode { - if len(root.Content) == 0 { - return nil, fmt.Errorf("empty YAML document") - } - root = root.Content[0] - } - if root.Kind != yaml.MappingNode { - return nil, fmt.Errorf("expected mapping node at document root") - } - - streamsNode := mappingValue(root, "streams") - if streamsNode == nil || streamsNode.Kind != yaml.SequenceNode { +// root mapping and returns the stream mapping node. +func getStreamMappingNode(root *ast.MappingNode, streamIdx int) (*ast.MappingNode, error) { + streamsNode, ok := mappingValue(root, "streams").(*ast.SequenceNode) + if !ok { return nil, fmt.Errorf("'streams' not found or not a sequence") } - if streamIdx < 0 || streamIdx >= len(streamsNode.Content) { - return nil, fmt.Errorf("stream index %d out of range (len=%d)", streamIdx, len(streamsNode.Content)) + if streamIdx < 0 || streamIdx >= len(streamsNode.Values) { + return nil, fmt.Errorf("stream index %d out of range (len=%d)", streamIdx, len(streamsNode.Values)) } - streamNode := streamsNode.Content[streamIdx] - if streamNode.Kind != yaml.MappingNode { + streamNode, ok := streamsNode.Values[streamIdx].(*ast.MappingNode) + if !ok { return nil, fmt.Errorf("stream %d is not a mapping", streamIdx) } diff --git a/internal/requiredinputs/variables_test.go b/internal/requiredinputs/variables_test.go index 38b4d9a457..8fbdde190a 100644 --- a/internal/requiredinputs/variables_test.go +++ b/internal/requiredinputs/variables_test.go @@ -9,23 +9,24 @@ import ( "path/filepath" "testing" + "github.com/goccy/go-yaml/ast" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "gopkg.in/yaml.v3" "github.com/elastic/elastic-package/internal/packages" + "github.com/elastic/elastic-package/internal/yamledit" ) // ---- helpers ----------------------------------------------------------------- -// varNode builds a minimal YAML mapping node representing a variable with the +// varNode builds a minimal *ast.MappingNode representing a variable with the // given name and extra key=value pairs (passed as alternating key, value // strings for simple scalar values). -func varNode(name string, extras ...string) *yaml.Node { - n := &yaml.Node{Kind: yaml.MappingNode} - upsertKey(n, "name", &yaml.Node{Kind: yaml.ScalarNode, Value: name}) +func varNode(name string, extras ...string) *ast.MappingNode { + n := &ast.MappingNode{BaseNode: &ast.BaseNode{}} + upsertKey(n, "name", strVal(name)) for i := 0; i+1 < len(extras); i += 2 { - upsertKey(n, extras[i], &yaml.Node{Kind: yaml.ScalarNode, Value: extras[i+1]}) + upsertKey(n, extras[i], strVal(extras[i+1])) } return n } @@ -71,11 +72,11 @@ func copyComposableIntegrationFixture(t *testing.T) string { // isolation, the resolver could corrupt cached or shared input-package nodes. func TestCloneNode(t *testing.T) { original := varNode("paths", "type", "text", "multi", "true") - cloned := cloneNode(original) + cloned := cloneNode(original).(*ast.MappingNode) // Mutating the clone must not affect the original. - upsertKey(cloned, "type", &yaml.Node{Kind: yaml.ScalarNode, Value: "keyword"}) - assert.Equal(t, "text", mappingValue(original, "type").Value) + upsertKey(cloned, "type", strVal("keyword")) + assert.Equal(t, "text", nodeStringValue(mappingValue(original, "type"))) } // TestMergeVarNode verifies mergeVarNode: per-variable field merge where the @@ -90,33 +91,33 @@ func TestMergeVarNode(t *testing.T) { override := varNode("paths", "type", "keyword", "title", "Custom Paths", "multi", "false") merged := mergeVarNode(base, override) assert.Equal(t, "paths", varNodeName(merged)) - assert.Equal(t, "keyword", mappingValue(merged, "type").Value) - assert.Equal(t, "Custom Paths", mappingValue(merged, "title").Value) - assert.Equal(t, "false", mappingValue(merged, "multi").Value) + assert.Equal(t, "keyword", nodeStringValue(mappingValue(merged, "type"))) + assert.Equal(t, "Custom Paths", nodeStringValue(mappingValue(merged, "title"))) + assert.Equal(t, "false", nodeStringValue(mappingValue(merged, "multi"))) }) t.Run("partial override", func(t *testing.T) { override := varNode("paths", "title", "My Paths") merged := mergeVarNode(base, override) assert.Equal(t, "paths", varNodeName(merged)) - assert.Equal(t, "text", mappingValue(merged, "type").Value) // from base - assert.Equal(t, "My Paths", mappingValue(merged, "title").Value) - assert.Equal(t, "true", mappingValue(merged, "multi").Value) // from base + assert.Equal(t, "text", nodeStringValue(mappingValue(merged, "type"))) // from base + assert.Equal(t, "My Paths", nodeStringValue(mappingValue(merged, "title"))) + assert.Equal(t, "true", nodeStringValue(mappingValue(merged, "multi"))) // from base }) t.Run("empty override", func(t *testing.T) { override := varNode("paths") merged := mergeVarNode(base, override) assert.Equal(t, "paths", varNodeName(merged)) - assert.Equal(t, "text", mappingValue(merged, "type").Value) // from base - assert.Equal(t, "Paths", mappingValue(merged, "title").Value) // from base + assert.Equal(t, "text", nodeStringValue(mappingValue(merged, "type"))) // from base + assert.Equal(t, "Paths", nodeStringValue(mappingValue(merged, "title"))) // from base }) t.Run("name not renamed", func(t *testing.T) { // Even if the override specifies a different name value, base name wins. - override := &yaml.Node{Kind: yaml.MappingNode} - upsertKey(override, "name", &yaml.Node{Kind: yaml.ScalarNode, Value: "should-be-ignored"}) - upsertKey(override, "type", &yaml.Node{Kind: yaml.ScalarNode, Value: "keyword"}) + override := &ast.MappingNode{} + upsertKey(override, "name", strVal("should-be-ignored")) + upsertKey(override, "type", strVal("keyword")) merged := mergeVarNode(base, override) assert.Equal(t, "paths", varNodeName(merged)) }) @@ -124,8 +125,8 @@ func TestMergeVarNode(t *testing.T) { t.Run("adds new field from override", func(t *testing.T) { override := varNode("paths", "description", "My description") merged := mergeVarNode(base, override) - assert.Equal(t, "My description", mappingValue(merged, "description").Value) - assert.Equal(t, "text", mappingValue(merged, "type").Value) // base preserved + assert.Equal(t, "My description", nodeStringValue(mappingValue(merged, "description"))) + assert.Equal(t, "text", nodeStringValue(mappingValue(merged, "type"))) // base preserved }) } @@ -134,12 +135,12 @@ func TestMergeVarNode(t *testing.T) { // instead of producing ambiguous merged output for Fleet. func TestCheckDuplicateVarNodes(t *testing.T) { t.Run("no duplicates", func(t *testing.T) { - nodes := []*yaml.Node{varNode("paths"), varNode("encoding"), varNode("timeout")} + nodes := []*ast.MappingNode{varNode("paths"), varNode("encoding"), varNode("timeout")} assert.NoError(t, checkDuplicateVarNodes(nodes)) }) t.Run("one duplicate", func(t *testing.T) { - nodes := []*yaml.Node{varNode("paths"), varNode("encoding"), varNode("paths")} + nodes := []*ast.MappingNode{varNode("paths"), varNode("encoding"), varNode("paths")} err := checkDuplicateVarNodes(nodes) require.Error(t, err) assert.Contains(t, err.Error(), "paths") @@ -160,40 +161,43 @@ func TestMergeInputLevelVarNodes(t *testing.T) { timeoutBase := varNode("timeout", "type", "text", "default", "30s") baseOrder := []string{"paths", "encoding", "timeout"} - baseByName := map[string]*yaml.Node{ + baseByName := map[string]*ast.MappingNode{ "paths": pathsBase, "encoding": encodingBase, "timeout": timeoutBase, } t.Run("empty promoted → empty sequence", func(t *testing.T) { - seq := mergeInputLevelVarNodes(baseOrder, baseByName, map[string]*yaml.Node{}) - assert.Empty(t, seq.Content) + seq := mergeInputLevelVarNodes(baseOrder, baseByName, map[string]*ast.MappingNode{}) + assert.Empty(t, seq.Values) }) t.Run("one promoted partial override", func(t *testing.T) { - promotedOverrides := map[string]*yaml.Node{ + promotedOverrides := map[string]*ast.MappingNode{ "paths": varNode("paths", "default", "/var/log/custom/*.log"), } seq := mergeInputLevelVarNodes(baseOrder, baseByName, promotedOverrides) - require.Len(t, seq.Content, 1) - assert.Equal(t, "paths", varNodeName(seq.Content[0])) - assert.Equal(t, "/var/log/custom/*.log", mappingValue(seq.Content[0], "default").Value) - assert.Equal(t, "text", mappingValue(seq.Content[0], "type").Value) // from base + require.Len(t, seq.Values, 1) + item := seq.Values[0].(*ast.MappingNode) + assert.Equal(t, "paths", varNodeName(item)) + assert.Equal(t, "/var/log/custom/*.log", nodeStringValue(mappingValue(item, "default"))) + assert.Equal(t, "text", nodeStringValue(mappingValue(item, "type"))) // from base }) t.Run("multiple promoted in base order", func(t *testing.T) { - promotedOverrides := map[string]*yaml.Node{ + promotedOverrides := map[string]*ast.MappingNode{ "timeout": varNode("timeout", "default", "60s"), "encoding": varNode("encoding", "show_user", "true"), } seq := mergeInputLevelVarNodes(baseOrder, baseByName, promotedOverrides) - require.Len(t, seq.Content, 2) + require.Len(t, seq.Values, 2) // Order must follow baseOrder: encoding before timeout. - assert.Equal(t, "encoding", varNodeName(seq.Content[0])) - assert.Equal(t, "timeout", varNodeName(seq.Content[1])) - assert.Equal(t, "true", mappingValue(seq.Content[0], "show_user").Value) - assert.Equal(t, "60s", mappingValue(seq.Content[1], "default").Value) + item0 := seq.Values[0].(*ast.MappingNode) + item1 := seq.Values[1].(*ast.MappingNode) + assert.Equal(t, "encoding", varNodeName(item0)) + assert.Equal(t, "timeout", varNodeName(item1)) + assert.Equal(t, "true", nodeStringValue(mappingValue(item0, "show_user"))) + assert.Equal(t, "60s", nodeStringValue(mappingValue(item1, "default"))) }) } @@ -207,7 +211,7 @@ func TestMergeStreamLevelVarNodes(t *testing.T) { timeoutBase := varNode("timeout", "type", "text", "default", "30s") baseOrder := []string{"paths", "encoding", "timeout"} - baseByName := map[string]*yaml.Node{ + baseByName := map[string]*ast.MappingNode{ "paths": pathsBase, "encoding": encodingBase, "timeout": timeoutBase, @@ -215,50 +219,53 @@ func TestMergeStreamLevelVarNodes(t *testing.T) { t.Run("no promoted, no overrides → all base vars", func(t *testing.T) { seq := mergeStreamLevelVarNodes(baseOrder, baseByName, nil, nil) - require.Len(t, seq.Content, 3) - assert.Equal(t, "paths", varNodeName(seq.Content[0])) - assert.Equal(t, "encoding", varNodeName(seq.Content[1])) - assert.Equal(t, "timeout", varNodeName(seq.Content[2])) + require.Len(t, seq.Values, 3) + assert.Equal(t, "paths", varNodeName(seq.Values[0].(*ast.MappingNode))) + assert.Equal(t, "encoding", varNodeName(seq.Values[1].(*ast.MappingNode))) + assert.Equal(t, "timeout", varNodeName(seq.Values[2].(*ast.MappingNode))) }) t.Run("some promoted → promoted excluded", func(t *testing.T) { promoted := map[string]bool{"paths": true, "encoding": true} seq := mergeStreamLevelVarNodes(baseOrder, baseByName, promoted, nil) - require.Len(t, seq.Content, 1) - assert.Equal(t, "timeout", varNodeName(seq.Content[0])) + require.Len(t, seq.Values, 1) + assert.Equal(t, "timeout", varNodeName(seq.Values[0].(*ast.MappingNode))) }) t.Run("DS override on existing base var", func(t *testing.T) { - dsOverrides := []*yaml.Node{varNode("encoding", "show_user", "true")} + dsOverrides := []*ast.MappingNode{varNode("encoding", "show_user", "true")} seq := mergeStreamLevelVarNodes(baseOrder, baseByName, nil, dsOverrides) - require.Len(t, seq.Content, 3) + require.Len(t, seq.Values, 3) // encoding is merged - encodingMerged := seq.Content[1] + encodingMerged := seq.Values[1].(*ast.MappingNode) assert.Equal(t, "encoding", varNodeName(encodingMerged)) - assert.Equal(t, "true", mappingValue(encodingMerged, "show_user").Value) - assert.Equal(t, "text", mappingValue(encodingMerged, "type").Value) // from base + assert.Equal(t, "true", nodeStringValue(mappingValue(encodingMerged, "show_user"))) + assert.Equal(t, "text", nodeStringValue(mappingValue(encodingMerged, "type"))) // from base }) t.Run("novel DS var appended", func(t *testing.T) { - dsOverrides := []*yaml.Node{varNode("custom_tag", "type", "text")} + dsOverrides := []*ast.MappingNode{varNode("custom_tag", "type", "text")} seq := mergeStreamLevelVarNodes(baseOrder, baseByName, nil, dsOverrides) - require.Len(t, seq.Content, 4) // 3 base + 1 novel - assert.Equal(t, "custom_tag", varNodeName(seq.Content[3])) + require.Len(t, seq.Values, 4) // 3 base + 1 novel + assert.Equal(t, "custom_tag", varNodeName(seq.Values[3].(*ast.MappingNode))) }) t.Run("mixed: promoted + DS merge + novel", func(t *testing.T) { promoted := map[string]bool{"paths": true} - dsOverrides := []*yaml.Node{ + dsOverrides := []*ast.MappingNode{ varNode("encoding", "show_user", "true"), varNode("custom_tag", "type", "text"), } seq := mergeStreamLevelVarNodes(baseOrder, baseByName, promoted, dsOverrides) // paths excluded (promoted); encoding merged; timeout base; custom_tag novel - require.Len(t, seq.Content, 3) - assert.Equal(t, "encoding", varNodeName(seq.Content[0])) - assert.Equal(t, "true", mappingValue(seq.Content[0], "show_user").Value) - assert.Equal(t, "timeout", varNodeName(seq.Content[1])) - assert.Equal(t, "custom_tag", varNodeName(seq.Content[2])) + require.Len(t, seq.Values, 3) + item0 := seq.Values[0].(*ast.MappingNode) + item1 := seq.Values[1].(*ast.MappingNode) + item2 := seq.Values[2].(*ast.MappingNode) + assert.Equal(t, "encoding", varNodeName(item0)) + assert.Equal(t, "true", nodeStringValue(mappingValue(item0, "show_user"))) + assert.Equal(t, "timeout", varNodeName(item1)) + assert.Equal(t, "custom_tag", varNodeName(item2)) }) } @@ -271,9 +278,9 @@ func TestLoadInputPkgVarNodes(t *testing.T) { order, byName, err := loadInputPkgVarNodes(pkgPath) require.NoError(t, err) assert.Equal(t, []string{"paths", "encoding", "timeout"}, order) - assert.Equal(t, "text", mappingValue(byName["paths"], "type").Value) - assert.Equal(t, "text", mappingValue(byName["encoding"], "type").Value) - assert.Equal(t, "text", mappingValue(byName["timeout"], "type").Value) + assert.Equal(t, "text", nodeStringValue(mappingValue(byName["paths"], "type"))) + assert.Equal(t, "text", nodeStringValue(mappingValue(byName["encoding"], "type"))) + assert.Equal(t, "text", nodeStringValue(mappingValue(byName["timeout"], "type"))) }) t.Run("package with no vars", func(t *testing.T) { @@ -295,7 +302,7 @@ func TestPromotedVarNamesForStream_UnionsScopedAndTemplateWide(t *testing.T) { dsScoped := varNode("paths", "type", "text") templateWide := varNode("encoding", "type", "text") - byScope := map[promotedVarScopeKey]map[string]*yaml.Node{ + byScope := map[promotedVarScopeKey]map[string]*ast.MappingNode{ {refInputPackage: refPkg, composableDataStream: "my_logs"}: { "paths": dsScoped, }, @@ -320,7 +327,7 @@ func TestUnionPromotedOverridesForInput_MergesOverridesAcrossDataStreams(t *test paths := varNode("paths", "title", "P") encoding := varNode("encoding", "title", "E") - byScope := map[promotedVarScopeKey]map[string]*yaml.Node{ + byScope := map[promotedVarScopeKey]map[string]*ast.MappingNode{ {refInputPackage: refPkg, composableDataStream: "ds_a"}: {"paths": paths}, {refInputPackage: refPkg, composableDataStream: "ds_b"}: {"encoding": encoding}, } @@ -360,12 +367,13 @@ policy_templates: title: Promoted paths `) - var doc yaml.Node - require.NoError(t, yaml.Unmarshal(manifestYAML, &doc)) + doc, err := yamledit.NewDocumentBytes(manifestYAML) + require.NoError(t, err) + root := doc.AST().Docs[0].Body.(*ast.MappingNode) m, err := packages.ReadPackageManifestBytes(manifestYAML) require.NoError(t, err) - idx, err := buildPromotedVarOverrideMap(m, &doc) + idx, err := buildPromotedVarOverrideMap(m, root) require.NoError(t, err) keyAlpha := promotedVarScopeKey{refInputPackage: "ref_pkg", composableDataStream: "ds_alpha"} @@ -374,7 +382,7 @@ policy_templates: require.Contains(t, idx, keyBeta) assert.Contains(t, idx[keyAlpha], "paths") assert.Contains(t, idx[keyBeta], "paths") - assert.Equal(t, "Promoted paths", mappingValue(idx[keyAlpha]["paths"], "title").Value) + assert.Equal(t, "Promoted paths", nodeStringValue(mappingValue(idx[keyAlpha]["paths"], "title"))) } // TestBuildPromotedVarOverrideMap_NoDataStreamsUsesEmptyScope verifies that a @@ -397,12 +405,13 @@ policy_templates: type: text `) - var doc yaml.Node - require.NoError(t, yaml.Unmarshal(manifestYAML, &doc)) + doc, err := yamledit.NewDocumentBytes(manifestYAML) + require.NoError(t, err) + root := doc.AST().Docs[0].Body.(*ast.MappingNode) m, err := packages.ReadPackageManifestBytes(manifestYAML) require.NoError(t, err) - idx, err := buildPromotedVarOverrideMap(m, &doc) + idx, err := buildPromotedVarOverrideMap(m, root) require.NoError(t, err) key := promotedVarScopeKey{refInputPackage: "ref_pkg", composableDataStream: ""} @@ -593,9 +602,9 @@ func TestMergeVariables_DuplicateError(t *testing.T) { // TestMergeVariables_TwoPolicyTemplatesScopedPromotion verifies that promotion // is scoped per policy template data stream: composable vars under one template -// promote only for that template’s streams; another template referencing the +// promote only for that template's streams; another template referencing the // same input package without composable vars keeps all base vars on its streams. -// This guards against incorrectly applying one template’s promotions to every +// This guards against incorrectly applying one template's promotions to every // stream that uses the same input package. func TestMergeVariables_TwoPolicyTemplatesScopedPromotion(t *testing.T) { buildPackageRoot := copyFixturePackage(t, "with_merging_two_policy_templates") diff --git a/internal/requiredinputs/yamlutil.go b/internal/requiredinputs/yamlutil.go index 09aea0aad6..7b37d8fa8b 100644 --- a/internal/requiredinputs/yamlutil.go +++ b/internal/requiredinputs/yamlutil.go @@ -6,69 +6,109 @@ package requiredinputs import ( "fmt" - "slices" - "gopkg.in/yaml.v3" + "github.com/goccy/go-yaml" + "github.com/goccy/go-yaml/ast" + "github.com/goccy/go-yaml/parser" + "github.com/goccy/go-yaml/printer" + "github.com/goccy/go-yaml/token" "github.com/elastic/elastic-package/internal/formatter" + "github.com/elastic/elastic-package/internal/yamledit" ) -// mappingValue returns the value node for the given key in a YAML mapping node, -// or nil if the key is not present. -func mappingValue(node *yaml.Node, key string) *yaml.Node { - idx := slices.IndexFunc(node.Content, func(n *yaml.Node) bool { - return n.Value == key - }) - if idx < 0 || idx+1 >= len(node.Content) { - return nil +// mappingValue returns the value ast.Node for the given key in a YAML mapping +// node, or nil if the key is not present. +func mappingValue(node *ast.MappingNode, key string) ast.Node { + for _, kv := range node.Values { + if kv.Key.String() == key { + return kv.Value + } } - return node.Content[idx+1] + return nil } // removeKey removes a key-value pair from a YAML mapping node. -func removeKey(node *yaml.Node, key string) { - idx := slices.IndexFunc(node.Content, func(n *yaml.Node) bool { - return n.Value == key - }) - if idx >= 0 && idx+1 < len(node.Content) { - node.Content = slices.Delete(node.Content, idx, idx+2) +func removeKey(node *ast.MappingNode, key string) { + for i, kv := range node.Values { + if kv.Key.String() == key { + node.Values = append(node.Values[:i], node.Values[i+1:]...) + return + } } } // upsertKey sets key to value in a YAML mapping node, adding it if absent. -func upsertKey(node *yaml.Node, key string, value *yaml.Node) { - idx := slices.IndexFunc(node.Content, func(n *yaml.Node) bool { - return n.Value == key - }) - if idx >= 0 && idx+1 < len(node.Content) { - node.Content[idx+1] = value - return +// When inserting a new key, the column position is derived from the existing +// entries so the new node serialises with the same indentation as its siblings. +// For block-style SequenceNode values, the sequence Start column is set to +// match the key column so blockStyleString generates correct indentation. +func upsertKey(node *ast.MappingNode, key string, value ast.Node) { + // Derive column from existing entries so new nodes indent like their + // siblings. Fall back to 1 when the mapping has no entries yet (e.g. + // freshly constructed nodes in tests). + col := 1 + if len(node.Values) > 0 { + col = node.Values[0].Key.GetToken().Position.Column + } + // For block-style sequences, match the sequence Start column to the key + // column so SequenceNode.blockStyleString produces the correct indentation + // regardless of whether the key is new or already exists. + if sn, ok := value.(*ast.SequenceNode); ok && !sn.IsFlowStyle { + sn.Start.Position.Column = col } - keyNode := &yaml.Node{Kind: yaml.ScalarNode, Value: key} - node.Content = append(node.Content, keyNode, value) + + for _, kv := range node.Values { + if kv.Key.String() == key { + kv.Value = value + return + } + } + // Key not present — build a new MappingValueNode directly to avoid + // yaml.ValueToNode's MarshalYAML path which requires non-nil Start tokens + // on sequence/mapping nodes. + pos := &token.Position{Column: col, Line: 1} + keyTk := token.New(key, key, pos) + colonTk := token.New(":", ":", pos) + mv := ast.MappingValue(colonTk, ast.String(keyTk), value) + node.Values = append(node.Values, mv) } -// cloneNode returns a deep copy of the YAML node tree so base nodes from the -// input package can be reused for multiple independent merges without aliasing. -func cloneNode(n *yaml.Node) *yaml.Node { +// newSeqNode creates a *ast.SequenceNode with a valid Start token so that the +// goccy printer can serialise it without a nil-pointer panic. +// Values can be any ast.Node; for string scalars prefer strVal(). +func newSeqNode(values ...ast.Node) *ast.SequenceNode { + pos := &token.Position{Column: 1, Line: 1} + sn := ast.Sequence(token.New("-", "-", pos), false) + sn.Values = values + return sn +} + +// cloneNode returns a deep copy of the YAML node tree via round-trip +// serialization so base nodes from the input package can be reused for multiple +// independent merges without aliasing. +// Panics if serialization or parsing of an already-valid node fails (impossible). +func cloneNode(n ast.Node) ast.Node { if n == nil { return nil } - clone := *n - if len(n.Content) > 0 { - clone.Content = make([]*yaml.Node, len(n.Content)) - for i, c := range n.Content { - clone.Content[i] = cloneNode(c) - } + p := printer.Printer{} + b := p.PrintNode(n) + f, err := parser.ParseBytes(b, 0) + if err != nil { + panic(fmt.Sprintf("cloneNode: failed to re-parse: %v", err)) } - return &clone + if len(f.Docs) == 0 || f.Docs[0] == nil { + return nil + } + return f.Docs[0].Body } -func formatYAMLNode(doc *yaml.Node) ([]byte, error) { - raw, err := yaml.Marshal(doc) - if err != nil { - return nil, fmt.Errorf("failed to marshal YAML: %w", err) - } +// formatYAMLNode marshals an ast.Node to bytes and applies the package's YAML +// formatter with KeysWithDotActionNone. +func formatYAMLNode(node ast.Node) ([]byte, error) { + p := printer.Printer{} + raw := p.PrintNode(node) yamlFormatter := formatter.NewYAMLFormatter(formatter.KeysWithDotActionNone) formatted, _, err := yamlFormatter.Format(raw) if err != nil { @@ -76,3 +116,43 @@ func formatYAMLNode(doc *yaml.Node) ([]byte, error) { } return formatted, nil } + +// nodeStringValue extracts the string value from a scalar ast.Node. For +// StringNode, the raw Value field is returned. For other scalars, String() is +// used. Returns "" for nil nodes. +func nodeStringValue(n ast.Node) string { + if n == nil { + return "" + } + if sn, ok := n.(*ast.StringNode); ok { + return sn.Value + } + return n.String() +} + +// strVal converts a plain string to a YAML scalar ast.Node. +// Panics if construction fails (impossible for string inputs). +func strVal(s string) ast.Node { + n, err := yaml.ValueToNode(s) + if err != nil { + panic(fmt.Sprintf("strVal: unexpected error for %q: %v", s, err)) + } + return n +} + +// parseDocumentRootMapping parses YAML bytes via yamledit and returns the +// document root as a *ast.MappingNode. Reuses internal/yamledit for parsing. +func parseDocumentRootMapping(data []byte) (*ast.MappingNode, error) { + doc, err := yamledit.NewDocumentBytes(data) + if err != nil { + return nil, err + } + if len(doc.AST().Docs) == 0 || doc.AST().Docs[0] == nil { + return nil, fmt.Errorf("empty YAML document") + } + root, ok := doc.AST().Docs[0].Body.(*ast.MappingNode) + if !ok { + return nil, fmt.Errorf("expected mapping node at document root, got %T", doc.AST().Docs[0].Body) + } + return root, nil +} From f6af43c1fa30cfe771be50de6abf5f9a7f40e2fa Mon Sep 17 00:00:00 2001 From: Tere Date: Wed, 15 Apr 2026 16:10:18 +0200 Subject: [PATCH 25/28] chore(requiredinputs): remove unnecessary blank line in streamdefs.go --- internal/requiredinputs/streamdefs.go | 1 - 1 file changed, 1 deletion(-) diff --git a/internal/requiredinputs/streamdefs.go b/internal/requiredinputs/streamdefs.go index 12b0b6a749..5fd9c5d9de 100644 --- a/internal/requiredinputs/streamdefs.go +++ b/internal/requiredinputs/streamdefs.go @@ -216,4 +216,3 @@ func loadInputPkgInfo(pkgPath string) (inputPkgInfo, error) { pkgDescription: m.Description, }, nil } - From 4a7129e396751fbbfc5581f9428789927942d9ca Mon Sep 17 00:00:00 2001 From: Tere Date: Wed, 15 Apr 2026 16:28:13 +0200 Subject: [PATCH 26/28] refactor(verification): remove package signature verification logic Deleted the package signature verification code from the project, including related tests and environment variable documentation. This simplifies the codebase and removes unused functionality related to verifying downloaded packages from the Package Registry. --- README.md | 4 - internal/files/verify.go | 68 -------------- internal/files/verify_test.go | 122 ------------------------- internal/registry/client.go | 66 -------------- internal/registry/client_test.go | 149 +------------------------------ tools/readme/readme.md.tmpl | 4 - 6 files changed, 1 insertion(+), 412 deletions(-) delete mode 100644 internal/files/verify.go delete mode 100644 internal/files/verify_test.go diff --git a/README.md b/README.md index 5ab8d3988a..5886e189d4 100644 --- a/README.md +++ b/README.md @@ -823,10 +823,6 @@ There are available some environment variables that could be used to change some - `ELASTIC_PACKAGE_SIGNER_PRIVATE_KEYFILE`: Path to the private key file to sign packages. - `ELASTIC_PACKAGE_SIGNER_PASSPHRASE`: Passphrase to use the private key file. -- Related to verifying packages downloaded from the Package Registry (EPR): - - `ELASTIC_PACKAGE_VERIFY_PACKAGE_SIGNATURE`: If set to `true` (or `1`), `elastic-package` verifies detached OpenPGP signatures for package zips fetched from the registry (for example when resolving required input packages). The registry must serve a signature at the same path as the zip with a `.sig` suffix (for example `/epr/apache/apache-1.0.0.zip.sig`). Leave this unset or `false` for local or unsigned registries. - - `ELASTIC_PACKAGE_VERIFIER_PUBLIC_KEYFILE`: Path to an armored **public** key matching the key that signed packages on that registry. Required when `ELASTIC_PACKAGE_VERIFY_PACKAGE_SIGNATURE` is enabled; the file must exist before the download runs. - - Related to tests: - `ELASTIC_PACKAGE_SERVERLESS_PIPELINE_TEST_DISABLE_COMPARE_RESULTS`: If set to `true`, the results from pipeline tests are not compared to avoid errors from GeoIP. - `ELASTIC_PACKAGE_DISABLE_ELASTIC_AGENT_WOLFI`: If set to `true`, the Elastic Agent image used for running agents will be using the Ubuntu docker images diff --git a/internal/files/verify.go b/internal/files/verify.go deleted file mode 100644 index e8b9b05dcc..0000000000 --- a/internal/files/verify.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License; -// you may not use this file except in compliance with the Elastic License. - -package files - -import ( - "fmt" - "io" - "os" - "strconv" - - "github.com/ProtonMail/gopenpgp/v2/crypto" - - "github.com/elastic/elastic-package/internal/environment" -) - -var ( - verifyPackageSignatureEnv = environment.WithElasticPackagePrefix("VERIFY_PACKAGE_SIGNATURE") - verifierPublicKeyfileEnv = environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE") -) - -// PackageSignatureVerificationFromEnv reports whether detached PGP verification should run -// for registry package downloads. When verify is true, publicKeyPath is the path to an -// armored public key and has been checked for existence. A non-nil err means the environment -// is inconsistent (e.g. verify enabled but no key path or inaccessible file). -func PackageSignatureVerificationFromEnv() (verify bool, publicKeyPath string, err error) { - raw := os.Getenv(verifyPackageSignatureEnv) - if raw == "" { - return false, "", nil - } - verify, err = strconv.ParseBool(raw) - if err != nil { - return false, "", fmt.Errorf("parse %s=%q: %w", verifyPackageSignatureEnv, raw, err) - } - if !verify { - return false, "", nil - } - publicKeyPath = os.Getenv(verifierPublicKeyfileEnv) - if publicKeyPath == "" { - return true, "", fmt.Errorf("%s is true but %s is not set", verifyPackageSignatureEnv, verifierPublicKeyfileEnv) - } - if _, err := os.Stat(publicKeyPath); err != nil { - return true, "", fmt.Errorf("can't access verifier public keyfile (path: %s): %w", publicKeyPath, err) - } - return true, publicKeyPath, nil -} - -// VerifyDetachedPGP checks that signatureArmored is a valid detached OpenPGP signature over -// the bytes read from data, using the armored publicKeyArmored. -func VerifyDetachedPGP(data io.Reader, signatureArmored []byte, publicKeyArmored []byte) error { - pubKey, err := crypto.NewKeyFromArmored(string(publicKeyArmored)) - if err != nil { - return fmt.Errorf("reading public key: %w", err) - } - keyRing, err := crypto.NewKeyRing(pubKey) - if err != nil { - return fmt.Errorf("building key ring: %w", err) - } - sig, err := crypto.NewPGPSignatureFromArmored(string(signatureArmored)) - if err != nil { - return fmt.Errorf("reading signature: %w", err) - } - if err := keyRing.VerifyDetachedStream(data, sig, crypto.GetUnixTime()); err != nil { - return fmt.Errorf("signature verification failed: %w", err) - } - return nil -} diff --git a/internal/files/verify_test.go b/internal/files/verify_test.go deleted file mode 100644 index 90b8fd9ac4..0000000000 --- a/internal/files/verify_test.go +++ /dev/null @@ -1,122 +0,0 @@ -// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one -// or more contributor license agreements. Licensed under the Elastic License; -// you may not use this file except in compliance with the Elastic License. - -package files - -import ( - "bytes" - "os" - "path/filepath" - "strings" - "testing" - - "github.com/ProtonMail/gopenpgp/v2/crypto" - "github.com/stretchr/testify/require" - - "github.com/elastic/elastic-package/internal/environment" -) - -func TestVerifyDetachedPGP_roundTrip(t *testing.T) { - content := []byte("package-bytes-for-signature") - passphrase := []byte("test-passphrase") - - priv, err := crypto.GenerateKey("Test Verify", "", "rsa", 2048) - require.NoError(t, err) - priv, err = priv.Lock(passphrase) - require.NoError(t, err) - unlocked, err := priv.Unlock(passphrase) - require.NoError(t, err) - t.Cleanup(func() { unlocked.ClearPrivateParams() }) - - signRing, err := crypto.NewKeyRing(unlocked) - require.NoError(t, err) - - sig, err := signRing.SignDetachedStream(bytes.NewReader(content)) - require.NoError(t, err) - armoredSig, err := sig.GetArmored() - require.NoError(t, err) - - pubArmored, err := unlocked.GetArmoredPublicKey() - require.NoError(t, err) - - err = VerifyDetachedPGP(bytes.NewReader(content), []byte(armoredSig), []byte(pubArmored)) - require.NoError(t, err) -} - -func TestVerifyDetachedPGP_wrongContent(t *testing.T) { - content := []byte("original") - other := []byte("tampered") - passphrase := []byte("p") - - priv, err := crypto.GenerateKey("Test Wrong", "", "rsa", 2048) - require.NoError(t, err) - priv, err = priv.Lock(passphrase) - require.NoError(t, err) - unlocked, err := priv.Unlock(passphrase) - require.NoError(t, err) - t.Cleanup(func() { unlocked.ClearPrivateParams() }) - - signRing, err := crypto.NewKeyRing(unlocked) - require.NoError(t, err) - sig, err := signRing.SignDetachedStream(bytes.NewReader(content)) - require.NoError(t, err) - armoredSig, err := sig.GetArmored() - require.NoError(t, err) - pubArmored, err := unlocked.GetArmoredPublicKey() - require.NoError(t, err) - - err = VerifyDetachedPGP(bytes.NewReader(other), []byte(armoredSig), []byte(pubArmored)) - require.Error(t, err) - require.Contains(t, err.Error(), "signature verification failed") -} - -func TestPackageSignatureVerificationFromEnv(t *testing.T) { - keyFile := filepath.Join(t.TempDir(), "pub.asc") - require.NoError(t, os.WriteFile(keyFile, []byte("not-a-real-key-but-present"), 0o600)) - - prefix := environment.WithElasticPackagePrefix - t.Run("unset", func(t *testing.T) { - t.Setenv(prefix("VERIFY_PACKAGE_SIGNATURE"), "") - t.Setenv(prefix("VERIFIER_PUBLIC_KEYFILE"), "") - v, p, err := PackageSignatureVerificationFromEnv() - require.NoError(t, err) - require.False(t, v) - require.Empty(t, p) - }) - t.Run("false", func(t *testing.T) { - t.Setenv(prefix("VERIFY_PACKAGE_SIGNATURE"), "false") - t.Setenv(prefix("VERIFIER_PUBLIC_KEYFILE"), keyFile) - v, p, err := PackageSignatureVerificationFromEnv() - require.NoError(t, err) - require.False(t, v) - require.Empty(t, p) - }) - t.Run("true_ok", func(t *testing.T) { - t.Setenv(prefix("VERIFY_PACKAGE_SIGNATURE"), "true") - t.Setenv(prefix("VERIFIER_PUBLIC_KEYFILE"), keyFile) - v, p, err := PackageSignatureVerificationFromEnv() - require.NoError(t, err) - require.True(t, v) - require.Equal(t, keyFile, p) - }) - t.Run("true_missing_key_path", func(t *testing.T) { - t.Setenv(prefix("VERIFY_PACKAGE_SIGNATURE"), "1") - t.Setenv(prefix("VERIFIER_PUBLIC_KEYFILE"), "") - _, _, err := PackageSignatureVerificationFromEnv() - require.Error(t, err) - require.True(t, strings.Contains(err.Error(), "not set")) - }) - t.Run("invalid_bool", func(t *testing.T) { - t.Setenv(prefix("VERIFY_PACKAGE_SIGNATURE"), "maybe") - t.Setenv(prefix("VERIFIER_PUBLIC_KEYFILE"), keyFile) - _, _, err := PackageSignatureVerificationFromEnv() - require.Error(t, err) - }) - t.Run("true_missing_file", func(t *testing.T) { - t.Setenv(prefix("VERIFY_PACKAGE_SIGNATURE"), "true") - t.Setenv(prefix("VERIFIER_PUBLIC_KEYFILE"), filepath.Join(t.TempDir(), "nope.asc")) - _, _, err := PackageSignatureVerificationFromEnv() - require.Error(t, err) - }) -} diff --git a/internal/registry/client.go b/internal/registry/client.go index 499dc75bb0..1e04fb9212 100644 --- a/internal/registry/client.go +++ b/internal/registry/client.go @@ -14,7 +14,6 @@ import ( "path/filepath" "github.com/elastic/elastic-package/internal/certs" - "github.com/elastic/elastic-package/internal/files" "github.com/elastic/elastic-package/internal/logger" ) @@ -120,16 +119,7 @@ func (c *Client) get(resourcePath string) (int, []byte, error) { // DownloadPackage downloads a package zip from the registry and writes it to destDir. // It returns the path to the downloaded zip file. -// -// When ELASTIC_PACKAGE_VERIFY_PACKAGE_SIGNATURE is true and ELASTIC_PACKAGE_VERIFIER_PUBLIC_KEYFILE -// is set, the registry must also serve a detached signature at {zip}.sig and the zip is verified -// before returning; on failure the zip file is removed. func (c *Client) DownloadPackage(name, version, destDir string) (string, error) { - verify, pubKeyPath, err := files.PackageSignatureVerificationFromEnv() - if err != nil { - return "", err - } - resourcePath := fmt.Sprintf("/epr/%s/%s-%s.zip", name, name, version) statusCode, body, err := c.get(resourcePath) if err != nil { @@ -152,62 +142,6 @@ func (c *Client) DownloadPackage(name, version, destDir string) (string, error) return "", fmt.Errorf("writing package zip to %s: %w", zipPath, err) } - if !verify { - shouldRemove = false - return zipPath, nil - } - - discard, err := c.verifyPackage(name, version, zipPath, pubKeyPath) - if err != nil { - if !discard { - shouldRemove = false - } - return "", err - } - shouldRemove = false return zipPath, nil } - -// verifyPackage verifies the detached PGP signature for a package zip already on disk. -// If it returns a non-nil error, discard is true when the zip file should be removed -// (verification or I/O failure before a successful read of the artifact). When discard -// is false, the zip should be kept (e.g. failure closing the file after verification). -func (c *Client) verifyPackage(name, version, zipPath, pubKeyPath string) (discard bool, err error) { - discard = true - logger.Debugf("Verifying detached signature for package %s-%s", name, version) - pubKey, err := os.ReadFile(pubKeyPath) - if err != nil { - return true, fmt.Errorf("reading verifier public keyfile (path: %s): %w", pubKeyPath, err) - } - - sigPath := fmt.Sprintf("/epr/%s/%s-%s.zip.sig", name, name, version) - sigCode, sigBody, err := c.get(sigPath) - if err != nil { - return true, fmt.Errorf("downloading package signature %s-%s: %w", name, version, err) - } - if sigCode != http.StatusOK { - return true, fmt.Errorf("downloading package signature %s-%s: unexpected status code %d", name, version, sigCode) - } - - zipFile, err := os.Open(zipPath) - if err != nil { - return true, fmt.Errorf("opening downloaded package zip %s: %w", zipPath, err) - } - defer func() { - closeErr := zipFile.Close() - if closeErr == nil { - return - } - if err != nil { - return - } - discard = false - err = fmt.Errorf("closing downloaded package zip %s: %w", zipPath, closeErr) - }() - - if err := files.VerifyDetachedPGP(zipFile, sigBody, pubKey); err != nil { - return true, fmt.Errorf("verifying package %s-%s: %w", name, version, err) - } - return false, nil -} diff --git a/internal/registry/client_test.go b/internal/registry/client_test.go index 34e68eaf87..8588655c47 100644 --- a/internal/registry/client_test.go +++ b/internal/registry/client_test.go @@ -16,10 +16,7 @@ import ( "path/filepath" "testing" - "github.com/ProtonMail/gopenpgp/v2/crypto" "github.com/stretchr/testify/require" - - "github.com/elastic/elastic-package/internal/environment" ) func TestNewClient_invalidCertificateAuthorityPath(t *testing.T) { @@ -57,9 +54,6 @@ func TestDownloadPackage_unexpectedStatusDoesNotWriteZip(t *testing.T) { })) t.Cleanup(srv.Close) - t.Setenv(environment.WithElasticPackagePrefix("VERIFY_PACKAGE_SIGNATURE"), "") - t.Setenv(environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE"), "") - dest := t.TempDir() client, err := NewClient(srv.URL) require.NoError(t, err) @@ -83,9 +77,6 @@ func TestDownloadPackage_writeFailureCleansUp(t *testing.T) { })) t.Cleanup(srv.Close) - t.Setenv(environment.WithElasticPackagePrefix("VERIFY_PACKAGE_SIGNATURE"), "") - t.Setenv(environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE"), "") - dest := t.TempDir() zipPath := filepath.Join(dest, "acme-1.0.0.zip") require.NoError(t, os.Mkdir(zipPath, 0o700)) @@ -100,7 +91,7 @@ func TestDownloadPackage_writeFailureCleansUp(t *testing.T) { require.True(t, errors.Is(statErr, fs.ErrNotExist), "partial zip should not remain after a write error") } -func TestDownloadPackage_withoutVerification(t *testing.T) { +func TestDownloadPackage_success(t *testing.T) { zipBytes := testAcmePackageZip(t) srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path != "/epr/acme/acme-1.0.0.zip" { @@ -112,58 +103,6 @@ func TestDownloadPackage_withoutVerification(t *testing.T) { })) t.Cleanup(srv.Close) - t.Setenv(environment.WithElasticPackagePrefix("VERIFY_PACKAGE_SIGNATURE"), "") - t.Setenv(environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE"), "") - - dest := t.TempDir() - client, err := NewClient(srv.URL) - require.NoError(t, err) - zipPath, err := client.DownloadPackage("acme", "1.0.0", dest) - require.NoError(t, err) - require.FileExists(t, zipPath) -} - -func TestDownloadPackage_withVerification_success(t *testing.T) { - zipBytes := testAcmePackageZip(t) - passphrase := []byte("registry-test-pass") - - priv, err := crypto.GenerateKey("Registry Test", "", "rsa", 2048) - require.NoError(t, err) - priv, err = priv.Lock(passphrase) - require.NoError(t, err) - unlocked, err := priv.Unlock(passphrase) - require.NoError(t, err) - t.Cleanup(func() { unlocked.ClearPrivateParams() }) - - signRing, err := crypto.NewKeyRing(unlocked) - require.NoError(t, err) - sig, err := signRing.SignDetachedStream(bytes.NewReader(zipBytes)) - require.NoError(t, err) - armoredSig, err := sig.GetArmored() - require.NoError(t, err) - pubArmored, err := unlocked.GetArmoredPublicKey() - require.NoError(t, err) - - pubPath := filepath.Join(t.TempDir(), "verify.pub.asc") - require.NoError(t, os.WriteFile(pubPath, []byte(pubArmored), 0o600)) - - srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - switch r.URL.Path { - case "/epr/acme/acme-1.0.0.zip": - _, err := w.Write(zipBytes) - require.NoError(t, err) - case "/epr/acme/acme-1.0.0.zip.sig": - _, err := w.Write([]byte(armoredSig)) - require.NoError(t, err) - default: - http.NotFound(w, r) - } - })) - t.Cleanup(srv.Close) - - t.Setenv(environment.WithElasticPackagePrefix("VERIFY_PACKAGE_SIGNATURE"), "true") - t.Setenv(environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE"), pubPath) - dest := t.TempDir() client, err := NewClient(srv.URL) require.NoError(t, err) @@ -172,92 +111,6 @@ func TestDownloadPackage_withVerification_success(t *testing.T) { require.FileExists(t, zipPath) } -func TestDownloadPackage_withVerification_missingSignature(t *testing.T) { - zipBytes := testAcmePackageZip(t) - pubPath := filepath.Join(t.TempDir(), "verify.pub.asc") - require.NoError(t, os.WriteFile(pubPath, []byte("x"), 0o600)) - - srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - if r.URL.Path == "/epr/acme/acme-1.0.0.zip" { - _, err := w.Write(zipBytes) - require.NoError(t, err) - return - } - http.NotFound(w, r) - })) - t.Cleanup(srv.Close) - - t.Setenv(environment.WithElasticPackagePrefix("VERIFY_PACKAGE_SIGNATURE"), "true") - t.Setenv(environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE"), pubPath) - - dest := t.TempDir() - client, err := NewClient(srv.URL) - require.NoError(t, err) - _, err = client.DownloadPackage("acme", "1.0.0", dest) - require.Error(t, err) - - _, statErr := os.Stat(filepath.Join(dest, "acme-1.0.0.zip")) - require.True(t, errors.Is(statErr, fs.ErrNotExist), "zip should be removed after failed verification") -} - -func TestDownloadPackage_withVerification_badSignature(t *testing.T) { - zipBytes := testAcmePackageZip(t) - passphrase := []byte("a") - - priv, err := crypto.GenerateKey("Signer A", "", "rsa", 2048) - require.NoError(t, err) - priv, err = priv.Lock(passphrase) - require.NoError(t, err) - unlocked, err := priv.Unlock(passphrase) - require.NoError(t, err) - t.Cleanup(func() { unlocked.ClearPrivateParams() }) - signRing, err := crypto.NewKeyRing(unlocked) - require.NoError(t, err) - sig, err := signRing.SignDetachedStream(bytes.NewReader(zipBytes)) - require.NoError(t, err) - armoredSig, err := sig.GetArmored() - require.NoError(t, err) - - priv2, err := crypto.GenerateKey("Signer B", "", "rsa", 2048) - require.NoError(t, err) - priv2, err = priv2.Lock(passphrase) - require.NoError(t, err) - unlocked2, err := priv2.Unlock(passphrase) - require.NoError(t, err) - t.Cleanup(func() { unlocked2.ClearPrivateParams() }) - pubArmored, err := unlocked2.GetArmoredPublicKey() - require.NoError(t, err) - - pubPath := filepath.Join(t.TempDir(), "b.pub.asc") - require.NoError(t, os.WriteFile(pubPath, []byte(pubArmored), 0o600)) - - srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - switch r.URL.Path { - case "/epr/acme/acme-1.0.0.zip": - _, err := w.Write(zipBytes) - require.NoError(t, err) - case "/epr/acme/acme-1.0.0.zip.sig": - _, err := w.Write([]byte(armoredSig)) - require.NoError(t, err) - default: - http.NotFound(w, r) - } - })) - t.Cleanup(srv.Close) - - t.Setenv(environment.WithElasticPackagePrefix("VERIFY_PACKAGE_SIGNATURE"), "true") - t.Setenv(environment.WithElasticPackagePrefix("VERIFIER_PUBLIC_KEYFILE"), pubPath) - - dest := t.TempDir() - client, err := NewClient(srv.URL) - require.NoError(t, err) - _, err = client.DownloadPackage("acme", "1.0.0", dest) - require.Error(t, err) - - _, statErr := os.Stat(filepath.Join(dest, "acme-1.0.0.zip")) - require.True(t, errors.Is(statErr, fs.ErrNotExist), "zip should be removed after failed verification") -} - func testAcmePackageZip(t *testing.T) []byte { t.Helper() const ( diff --git a/tools/readme/readme.md.tmpl b/tools/readme/readme.md.tmpl index 94b2bc1384..984f9112e2 100644 --- a/tools/readme/readme.md.tmpl +++ b/tools/readme/readme.md.tmpl @@ -279,10 +279,6 @@ There are available some environment variables that could be used to change some - `ELASTIC_PACKAGE_SIGNER_PRIVATE_KEYFILE`: Path to the private key file to sign packages. - `ELASTIC_PACKAGE_SIGNER_PASSPHRASE`: Passphrase to use the private key file. -- Related to verifying packages downloaded from the Package Registry (EPR): - - `ELASTIC_PACKAGE_VERIFY_PACKAGE_SIGNATURE`: If set to `true` (or `1`), `elastic-package` verifies detached OpenPGP signatures for package zips fetched from the registry (for example when resolving required input packages). The registry must serve a signature at the same path as the zip with a `.sig` suffix (for example `/epr/apache/apache-1.0.0.zip.sig`). Leave this unset or `false` for local or unsigned registries. - - `ELASTIC_PACKAGE_VERIFIER_PUBLIC_KEYFILE`: Path to an armored **public** key matching the key that signed packages on that registry. Required when `ELASTIC_PACKAGE_VERIFY_PACKAGE_SIGNATURE` is enabled; the file must exist before the download runs. - - Related to tests: - `ELASTIC_PACKAGE_SERVERLESS_PIPELINE_TEST_DISABLE_COMPARE_RESULTS`: If set to `true`, the results from pipeline tests are not compared to avoid errors from GeoIP. - `ELASTIC_PACKAGE_DISABLE_ELASTIC_AGENT_WOLFI`: If set to `true`, the Elastic Agent image used for running agents will be using the Ubuntu docker images From 490cb0d56b5a2e0b9c5389971f6258269e3ddd05 Mon Sep 17 00:00:00 2001 From: Tere Date: Thu, 16 Apr 2026 08:53:46 +0200 Subject: [PATCH 27/28] move test fixture to manual_packages --- internal/requiredinputs/variables_test.go | 8 ++++---- test/manual_packages/README.md | 8 ++++---- .../composable/01_ci_input_pkg/_dev/test/config.yml | 0 .../composable/01_ci_input_pkg/agent/input/extra.yml.hbs | 0 .../composable/01_ci_input_pkg/agent/input/input.yml.hbs | 0 .../composable/01_ci_input_pkg/changelog.yml | 0 .../composable/01_ci_input_pkg/docs/README.md | 0 .../composable/01_ci_input_pkg/fields/base-fields.yml | 0 .../composable/01_ci_input_pkg/manifest.yml | 0 .../02_ci_composable_integration/_dev/test/config.yml | 0 .../composable/02_ci_composable_integration/changelog.yml | 0 .../ci_composable_logs/agent/stream/stream.yml.hbs | 0 .../data_stream/ci_composable_logs/fields/base-fields.yml | 0 .../data_stream/ci_composable_logs/manifest.yml | 0 .../02_ci_composable_integration/docs/README.md | 0 .../composable/02_ci_composable_integration/manifest.yml | 0 .../with_linked_template_path/_dev/test/config.yml | 2 +- .../with_merging_ds_merges/_dev/test/config.yml | 2 +- .../with_merging_duplicate_error/_dev/test/config.yml | 2 +- .../with_merging_no_override/_dev/test/config.yml | 2 +- .../with_merging_promotes_to_input/_dev/test/config.yml | 2 +- .../_dev/test/config.yml | 2 +- 22 files changed, 14 insertions(+), 14 deletions(-) rename test/{packages => manual_packages}/composable/01_ci_input_pkg/_dev/test/config.yml (100%) rename test/{packages => manual_packages}/composable/01_ci_input_pkg/agent/input/extra.yml.hbs (100%) rename test/{packages => manual_packages}/composable/01_ci_input_pkg/agent/input/input.yml.hbs (100%) rename test/{packages => manual_packages}/composable/01_ci_input_pkg/changelog.yml (100%) rename test/{packages => manual_packages}/composable/01_ci_input_pkg/docs/README.md (100%) rename test/{packages => manual_packages}/composable/01_ci_input_pkg/fields/base-fields.yml (100%) rename test/{packages => manual_packages}/composable/01_ci_input_pkg/manifest.yml (100%) rename test/{packages => manual_packages}/composable/02_ci_composable_integration/_dev/test/config.yml (100%) rename test/{packages => manual_packages}/composable/02_ci_composable_integration/changelog.yml (100%) rename test/{packages => manual_packages}/composable/02_ci_composable_integration/data_stream/ci_composable_logs/agent/stream/stream.yml.hbs (100%) rename test/{packages => manual_packages}/composable/02_ci_composable_integration/data_stream/ci_composable_logs/fields/base-fields.yml (100%) rename test/{packages => manual_packages}/composable/02_ci_composable_integration/data_stream/ci_composable_logs/manifest.yml (100%) rename test/{packages => manual_packages}/composable/02_ci_composable_integration/docs/README.md (100%) rename test/{packages => manual_packages}/composable/02_ci_composable_integration/manifest.yml (100%) diff --git a/internal/requiredinputs/variables_test.go b/internal/requiredinputs/variables_test.go index 8fbdde190a..1105c4486c 100644 --- a/internal/requiredinputs/variables_test.go +++ b/internal/requiredinputs/variables_test.go @@ -42,15 +42,15 @@ func copyFixturePackage(t *testing.T, fixtureName string) string { return destPath } -// ciInputFixturePath returns the path to test/packages/composable/01_ci_input_pkg (repository-relative from this package). +// ciInputFixturePath returns the path to test/manual_packages/composable/01_ci_input_pkg (repository-relative from this package). func ciInputFixturePath() string { - return filepath.Join("..", "..", "test", "packages", "composable", "01_ci_input_pkg") + return filepath.Join("..", "..", "test", "manual_packages", "composable", "01_ci_input_pkg") } -// copyComposableIntegrationFixture copies test/packages/composable/02_ci_composable_integration for integration tests. +// copyComposableIntegrationFixture copies test/manual_packages/composable/02_ci_composable_integration for integration tests. func copyComposableIntegrationFixture(t *testing.T) string { t.Helper() - srcPath := filepath.Join("..", "..", "test", "packages", "composable", "02_ci_composable_integration") + srcPath := filepath.Join("..", "..", "test", "manual_packages", "composable", "02_ci_composable_integration") destPath := t.TempDir() err := os.CopyFS(destPath, os.DirFS(srcPath)) require.NoError(t, err, "copying composable CI integration fixture") diff --git a/test/manual_packages/README.md b/test/manual_packages/README.md index 5176561a46..57c6686f27 100644 --- a/test/manual_packages/README.md +++ b/test/manual_packages/README.md @@ -2,12 +2,12 @@ Packages under `test/manual_packages/` are **not** picked up by CI’s main package glob beyond what each script includes. They are for **manual** workflows and **targeted** `go test` cases. -## CI composable coverage +## Composable coverage End-to-end composable integration coverage (`requires.input`, local registry, build + install) lives under: -- [`test/packages/composable/01_ci_input_pkg/`](../packages/composable/01_ci_input_pkg/) — `type: input` dependency -- [`test/packages/composable/02_ci_composable_integration/`](../packages/composable/02_ci_composable_integration/) — `type: integration` built in **phase 2** by [`scripts/test-build-install-zip.sh`](../../scripts/test-build-install-zip.sh) after `stack up`, with `package_registry.base_url` set to `https://127.0.0.1:8080` +- [`composable/01_ci_input_pkg/`](composable/01_ci_input_pkg/) — `type: input` dependency +- [`composable/02_ci_composable_integration/`](composable/02_ci_composable_integration/) — `type: integration` that requires the input package above; must be built after `stack up` with `package_registry.base_url` set to `https://127.0.0.1:8080` `internal/requiredinputs` integration tests copy those same directories (see `ciInputFixturePath`, `copyComposableIntegrationFixture` in [`variables_test.go`](../../internal/requiredinputs/variables_test.go)). @@ -24,7 +24,7 @@ Remaining trees under [`required_inputs/`](required_inputs/) exercise **narrow** | `required_inputs/with_merging_duplicate_error` | Invalid duplicate `paths` on DS — **build must fail** (not in CI zip loop). | | `required_inputs/with_linked_template_path` | Composable + policy `template_path` via `.link` (see [`dependency_management.md`](../../docs/howto/dependency_management.md)). | -All of these depend on **`ci_input_pkg`** from [`test/packages/composable/01_ci_input_pkg/`](../packages/composable/01_ci_input_pkg/) (see each package’s `_dev/test/config.yml` `requires` stub). +All of these depend on **`ci_input_pkg`** from [`composable/01_ci_input_pkg/`](composable/01_ci_input_pkg/) (see each package’s `_dev/test/config.yml` `requires` stub). ### Manual workflow diff --git a/test/packages/composable/01_ci_input_pkg/_dev/test/config.yml b/test/manual_packages/composable/01_ci_input_pkg/_dev/test/config.yml similarity index 100% rename from test/packages/composable/01_ci_input_pkg/_dev/test/config.yml rename to test/manual_packages/composable/01_ci_input_pkg/_dev/test/config.yml diff --git a/test/packages/composable/01_ci_input_pkg/agent/input/extra.yml.hbs b/test/manual_packages/composable/01_ci_input_pkg/agent/input/extra.yml.hbs similarity index 100% rename from test/packages/composable/01_ci_input_pkg/agent/input/extra.yml.hbs rename to test/manual_packages/composable/01_ci_input_pkg/agent/input/extra.yml.hbs diff --git a/test/packages/composable/01_ci_input_pkg/agent/input/input.yml.hbs b/test/manual_packages/composable/01_ci_input_pkg/agent/input/input.yml.hbs similarity index 100% rename from test/packages/composable/01_ci_input_pkg/agent/input/input.yml.hbs rename to test/manual_packages/composable/01_ci_input_pkg/agent/input/input.yml.hbs diff --git a/test/packages/composable/01_ci_input_pkg/changelog.yml b/test/manual_packages/composable/01_ci_input_pkg/changelog.yml similarity index 100% rename from test/packages/composable/01_ci_input_pkg/changelog.yml rename to test/manual_packages/composable/01_ci_input_pkg/changelog.yml diff --git a/test/packages/composable/01_ci_input_pkg/docs/README.md b/test/manual_packages/composable/01_ci_input_pkg/docs/README.md similarity index 100% rename from test/packages/composable/01_ci_input_pkg/docs/README.md rename to test/manual_packages/composable/01_ci_input_pkg/docs/README.md diff --git a/test/packages/composable/01_ci_input_pkg/fields/base-fields.yml b/test/manual_packages/composable/01_ci_input_pkg/fields/base-fields.yml similarity index 100% rename from test/packages/composable/01_ci_input_pkg/fields/base-fields.yml rename to test/manual_packages/composable/01_ci_input_pkg/fields/base-fields.yml diff --git a/test/packages/composable/01_ci_input_pkg/manifest.yml b/test/manual_packages/composable/01_ci_input_pkg/manifest.yml similarity index 100% rename from test/packages/composable/01_ci_input_pkg/manifest.yml rename to test/manual_packages/composable/01_ci_input_pkg/manifest.yml diff --git a/test/packages/composable/02_ci_composable_integration/_dev/test/config.yml b/test/manual_packages/composable/02_ci_composable_integration/_dev/test/config.yml similarity index 100% rename from test/packages/composable/02_ci_composable_integration/_dev/test/config.yml rename to test/manual_packages/composable/02_ci_composable_integration/_dev/test/config.yml diff --git a/test/packages/composable/02_ci_composable_integration/changelog.yml b/test/manual_packages/composable/02_ci_composable_integration/changelog.yml similarity index 100% rename from test/packages/composable/02_ci_composable_integration/changelog.yml rename to test/manual_packages/composable/02_ci_composable_integration/changelog.yml diff --git a/test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/agent/stream/stream.yml.hbs b/test/manual_packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/agent/stream/stream.yml.hbs similarity index 100% rename from test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/agent/stream/stream.yml.hbs rename to test/manual_packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/agent/stream/stream.yml.hbs diff --git a/test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/fields/base-fields.yml b/test/manual_packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/fields/base-fields.yml similarity index 100% rename from test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/fields/base-fields.yml rename to test/manual_packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/fields/base-fields.yml diff --git a/test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/manifest.yml b/test/manual_packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/manifest.yml similarity index 100% rename from test/packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/manifest.yml rename to test/manual_packages/composable/02_ci_composable_integration/data_stream/ci_composable_logs/manifest.yml diff --git a/test/packages/composable/02_ci_composable_integration/docs/README.md b/test/manual_packages/composable/02_ci_composable_integration/docs/README.md similarity index 100% rename from test/packages/composable/02_ci_composable_integration/docs/README.md rename to test/manual_packages/composable/02_ci_composable_integration/docs/README.md diff --git a/test/packages/composable/02_ci_composable_integration/manifest.yml b/test/manual_packages/composable/02_ci_composable_integration/manifest.yml similarity index 100% rename from test/packages/composable/02_ci_composable_integration/manifest.yml rename to test/manual_packages/composable/02_ci_composable_integration/manifest.yml diff --git a/test/manual_packages/required_inputs/with_linked_template_path/_dev/test/config.yml b/test/manual_packages/required_inputs/with_linked_template_path/_dev/test/config.yml index 109cb53c2e..e60432ec4c 100644 --- a/test/manual_packages/required_inputs/with_linked_template_path/_dev/test/config.yml +++ b/test/manual_packages/required_inputs/with_linked_template_path/_dev/test/config.yml @@ -1,3 +1,3 @@ requires: - package: ci_input_pkg - source: "../../../../packages/composable/01_ci_input_pkg" + source: "../../../../composable/01_ci_input_pkg" diff --git a/test/manual_packages/required_inputs/with_merging_ds_merges/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_ds_merges/_dev/test/config.yml index 7a2d655a23..1ca27d196b 100644 --- a/test/manual_packages/required_inputs/with_merging_ds_merges/_dev/test/config.yml +++ b/test/manual_packages/required_inputs/with_merging_ds_merges/_dev/test/config.yml @@ -1,3 +1,3 @@ requires: - package: ci_input_pkg - source: "../../../../packages/composable/01_ci_input_pkg" \ No newline at end of file + source: "../../../../composable/01_ci_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_duplicate_error/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_duplicate_error/_dev/test/config.yml index 7a2d655a23..1ca27d196b 100644 --- a/test/manual_packages/required_inputs/with_merging_duplicate_error/_dev/test/config.yml +++ b/test/manual_packages/required_inputs/with_merging_duplicate_error/_dev/test/config.yml @@ -1,3 +1,3 @@ requires: - package: ci_input_pkg - source: "../../../../packages/composable/01_ci_input_pkg" \ No newline at end of file + source: "../../../../composable/01_ci_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_no_override/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_no_override/_dev/test/config.yml index 7a2d655a23..1ca27d196b 100644 --- a/test/manual_packages/required_inputs/with_merging_no_override/_dev/test/config.yml +++ b/test/manual_packages/required_inputs/with_merging_no_override/_dev/test/config.yml @@ -1,3 +1,3 @@ requires: - package: ci_input_pkg - source: "../../../../packages/composable/01_ci_input_pkg" \ No newline at end of file + source: "../../../../composable/01_ci_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_promotes_to_input/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_promotes_to_input/_dev/test/config.yml index 7a2d655a23..1ca27d196b 100644 --- a/test/manual_packages/required_inputs/with_merging_promotes_to_input/_dev/test/config.yml +++ b/test/manual_packages/required_inputs/with_merging_promotes_to_input/_dev/test/config.yml @@ -1,3 +1,3 @@ requires: - package: ci_input_pkg - source: "../../../../packages/composable/01_ci_input_pkg" \ No newline at end of file + source: "../../../../composable/01_ci_input_pkg" \ No newline at end of file diff --git a/test/manual_packages/required_inputs/with_merging_two_policy_templates/_dev/test/config.yml b/test/manual_packages/required_inputs/with_merging_two_policy_templates/_dev/test/config.yml index 109cb53c2e..e60432ec4c 100644 --- a/test/manual_packages/required_inputs/with_merging_two_policy_templates/_dev/test/config.yml +++ b/test/manual_packages/required_inputs/with_merging_two_policy_templates/_dev/test/config.yml @@ -1,3 +1,3 @@ requires: - package: ci_input_pkg - source: "../../../../packages/composable/01_ci_input_pkg" + source: "../../../../composable/01_ci_input_pkg" From 58e8841d85ebce936adc14c928ced52944ccc67a Mon Sep 17 00:00:00 2001 From: Tere Date: Thu, 16 Apr 2026 09:46:01 +0200 Subject: [PATCH 28/28] docs(dependency_management): update package registry configuration details Clarified the configuration settings for package dependencies in the `dependency_management.md` and `local_package_registry.md` files. Updated references to `stack.epr.base_url` for profile-specific settings and improved the explanation of registry URL resolution priorities for various `elastic-package` commands. --- docs/howto/dependency_management.md | 4 ++-- docs/howto/local_package_registry.md | 17 +++++++++++++---- 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/docs/howto/dependency_management.md b/docs/howto/dependency_management.md index 83bc76b4d0..43b6779962 100644 --- a/docs/howto/dependency_management.md +++ b/docs/howto/dependency_management.md @@ -121,8 +121,8 @@ policy creation time. Unlike field-level dependencies (which are resolved from Git references and cached locally), package dependencies are fetched from the configured package registry URL -(`package_registry.base_url` in `~/.elastic-package/config.yml`, defaulting to -`https://epr.elastic.co`). +(`stack.epr.base_url` in the active profile, or `package_registry.base_url` in +`~/.elastic-package/config.yml`, defaulting to `https://epr.elastic.co`). For details on using a local or custom registry when the required input packages are still under development, see [HOWTO: Use a local or custom package registry](./local_package_registry.md). diff --git a/docs/howto/local_package_registry.md b/docs/howto/local_package_registry.md index 3eb9f76302..ca5c2acc19 100644 --- a/docs/howto/local_package_registry.md +++ b/docs/howto/local_package_registry.md @@ -111,8 +111,8 @@ stack.epr.proxy_to: http://host.docker.internal:8082 stack.epr.base_url: http://localhost:8082 ``` -To also cover `elastic-package build`, `elastic-package test`, `elastic-package benchmark`, -and `elastic-package status` (which do not read profile settings), add the global setting: +To also cover `elastic-package test`, `elastic-package benchmark`, and `elastic-package status` +(which do not read profile settings), add the global setting: ```yaml # ~/.elastic-package/config.yml @@ -122,7 +122,15 @@ package_registry: ### URL resolution reference -**For `elastic-package build`, `test`, `benchmark`, `status`** (global config only): +**For `elastic-package build`** (profile, then global config): + +| Priority | Setting | +| -------- | ------- | +| 1 | `stack.epr.base_url` in the active profile `config.yml` | +| 2 | `package_registry.base_url` in `~/.elastic-package/config.yml` | +| 3 | `https://epr.elastic.co` (production fallback) | + +**For `elastic-package test`, `benchmark`, `status`** (global config only): | Priority | Setting | | -------- | ------- | @@ -153,6 +161,7 @@ For more details on profiles, see the | Goal | Configuration | | ---- | ------------- | -| Override registry for `build` / `test` / `benchmark` / `status` | `package_registry.base_url` in `~/.elastic-package/config.yml` | +| Override registry for `build` | `stack.epr.base_url` in the active profile `config.yml` (or `package_registry.base_url` in `~/.elastic-package/config.yml`) | +| Override registry for `test` / `benchmark` / `status` | `package_registry.base_url` in `~/.elastic-package/config.yml` | | Override registry for `install` and stack commands | `stack.epr.base_url` in the active profile `config.yml` | | Override proxy target for the stack's registry container | `stack.epr.proxy_to` in the active profile `config.yml` |