diff --git a/internal/kibana/policies.go b/internal/kibana/policies.go index 16d2061bf9..94ab86a60b 100644 --- a/internal/kibana/policies.go +++ b/internal/kibana/policies.go @@ -10,8 +10,6 @@ import ( "fmt" "net/http" "path" - - "github.com/elastic/elastic-package/internal/packages" ) // Policy represents an Agent Policy in Fleet. @@ -186,18 +184,6 @@ func (c *Client) DeletePolicy(ctx context.Context, policyID string) error { return nil } -// Var represents a single variable at the package or -// data stream level, encapsulating the data type of the -// variable and it's value. -type Var struct { - Value packages.VarValue `json:"value"` - Type string `json:"type"` -} - -// Vars is a collection of variables either at the package or -// data stream level. -type Vars map[string]Var - // DataStream represents a data stream within a package. type DataStream struct { Type string `json:"type"` diff --git a/internal/kibana/policyvariables.go b/internal/kibana/policyvariables.go new file mode 100644 index 0000000000..1dddaef951 --- /dev/null +++ b/internal/kibana/policyvariables.go @@ -0,0 +1,107 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package kibana + +import ( + "errors" + "strconv" + + "github.com/elastic/elastic-package/internal/common" + "github.com/elastic/elastic-package/internal/packages" +) + +// Var represents a single variable at the package or +// data stream level, encapsulating the data type of the +// variable and it's value. +type Var struct { + Value packages.VarValue `json:"value"` + Type string `json:"type"` +} + +// Vars is a collection of variables either at the package or +// data stream level. +type Vars map[string]Var + +func SetKibanaVariables(definitions []packages.Variable, values common.MapStr) Vars { + vars := Vars{} + for _, definition := range definitions { + // Elastic Package uses the deprecated 'inputs' array in its /api/fleet/package_policies request. + // When using this API parameter, default values are not automatically incorporated into + // the policy, whereas with the 'inputs' object, defaults are incorporated by the API service. + // This means that our client must include the default values in its request to ensure correct behavior. + val := definition.Default + + value, err := values.GetValue(definition.Name) + if err == nil { + val = &packages.VarValue{} + val.Unpack(value) + } else if errors.Is(err, common.ErrKeyNotFound) && definition.Default == nil { + // Do not include nulls for unset variables. + continue + } + + vars[definition.Name] = Var{ + Type: definition.Type, + Value: *val, + } + } + return vars +} + +func SetUseAPMVariable(vars Vars, variablesToAssign common.MapStr) Vars { + if _, found := vars["use_apm"]; found { + return vars + } + + useAPMData, err := variablesToAssign.GetValue("use_apm") + if errors.Is(err, common.ErrKeyNotFound) { + // No variable is set in the config, so it is not added + return vars + } + + if err != nil { + // Error getting the variable, so it is not added + return vars + } + + var value packages.VarValue + if useAPMString, ok := useAPMData.(string); ok && useAPMString != "" { + boolValue, err := strconv.ParseBool(useAPMString) + if err != nil { + return vars + } + value.Unpack(boolValue) + } + + if useAPM, ok := useAPMData.(bool); ok { + value.Unpack(useAPM) + } + if value.Value() != nil { + vars["use_apm"] = Var{ + Value: value, + Type: "boolean", + } + } + return vars +} + +func SetDataStreamDatasetVariable(vars Vars, variablesToAssign common.MapStr, defaultValue string) Vars { + if _, found := vars["data_stream.dataset"]; found { + return vars + } + + dataStreamDatasetValue := defaultValue + v, _ := variablesToAssign.GetValue("data_stream.dataset") + if dataset, ok := v.(string); ok && dataset != "" { + dataStreamDatasetValue = dataset + } + var value packages.VarValue + value.Unpack(dataStreamDatasetValue) + vars["data_stream.dataset"] = Var{ + Value: value, + Type: "text", + } + return vars +} diff --git a/internal/kibana/policyvariables_test.go b/internal/kibana/policyvariables_test.go new file mode 100644 index 0000000000..865bb360b8 --- /dev/null +++ b/internal/kibana/policyvariables_test.go @@ -0,0 +1,348 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package kibana + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/elastic/elastic-package/internal/common" + "github.com/elastic/elastic-package/internal/packages" +) + +func defaultVarValue(v any) packages.VarValue { + vv := packages.VarValue{} + vv.Unpack(v) + return vv +} + +func TestSetUseAPMVariable(t *testing.T) { + cases := []struct { + name string + vars Vars + variablesToAssign common.MapStr + wantUseAPM bool // true = key present and true, false = key present and false, = key absent + wantUseAPMPresent bool + wantUnchanged bool // when true, vars must be the same map (no new keys, existing keys unchanged) + }{ + { + name: "use_apm already in vars is left unchanged", + vars: Vars{"use_apm": {Value: defaultVarValue(false), Type: "boolean"}}, + variablesToAssign: common.MapStr{"use_apm": true}, + wantUseAPMPresent: true, + wantUseAPM: false, + wantUnchanged: true, + }, + { + name: "no use_apm in variablesToAssign leaves vars unchanged", + vars: Vars{}, + variablesToAssign: common.MapStr{}, + wantUseAPMPresent: false, + wantUnchanged: true, + }, + { + name: "use_apm true is added", + vars: Vars{}, + variablesToAssign: common.MapStr{"use_apm": true}, + wantUseAPMPresent: true, + wantUseAPM: true, + }, + { + name: "use_apm false is added", + vars: Vars{}, + variablesToAssign: common.MapStr{"use_apm": false}, + wantUseAPMPresent: true, + wantUseAPM: false, + }, + { + name: "use_apm as string true is added", + vars: Vars{}, + variablesToAssign: common.MapStr{"use_apm": "true"}, + wantUseAPMPresent: true, + wantUseAPM: true, + wantUnchanged: false, + }, + { + name: "use_apm as string false is added", + vars: Vars{}, + variablesToAssign: common.MapStr{"use_apm": "false"}, + wantUseAPMPresent: true, + wantUseAPM: false, + wantUnchanged: false, + }, + { + name: "use_apm as unexpected string is not added", + vars: Vars{}, + variablesToAssign: common.MapStr{"use_apm": "foo"}, + wantUseAPMPresent: false, + wantUseAPM: false, + wantUnchanged: true, + }, + { + name: "use_apm as int is not added", + vars: Vars{}, + variablesToAssign: common.MapStr{"use_apm": 1}, + wantUseAPMPresent: false, + wantUnchanged: true, + }, + { + name: "other vars are preserved when adding use_apm", + vars: Vars{"other": {Value: defaultVarValue("x"), Type: "text"}}, + variablesToAssign: common.MapStr{"use_apm": true}, + wantUseAPMPresent: true, + wantUseAPM: true, + }, + { + name: "nil variablesToAssign does not add use_apm", + vars: Vars{}, + variablesToAssign: nil, + wantUseAPMPresent: false, + wantUnchanged: true, + }, + } + + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + // Copy vars so we can compare for unchanged when needed + inputVars := make(Vars, len(c.vars)) + for k, v := range c.vars { + inputVars[k] = v + } + + got := SetUseAPMVariable(inputVars, c.variablesToAssign) + + if c.wantUnchanged && len(c.vars) == len(got) { + for k, v := range c.vars { + g, ok := got[k] + require.True(t, ok, "key %q should remain", k) + assert.Equal(t, v.Value.Value(), g.Value.Value(), "value for %q", k) + } + } + + if c.wantUseAPMPresent { + require.Contains(t, got, "use_apm", "vars should contain use_apm") + assert.Equal(t, "boolean", got["use_apm"].Type) + assert.Equal(t, c.wantUseAPM, got["use_apm"].Value.Value()) + } else { + assert.NotContains(t, got, "use_apm", "vars should not contain use_apm") + } + + // Original vars must always be preserved + for k, v := range c.vars { + require.Contains(t, got, k, "original var %q must be preserved", k) + assert.Equal(t, v.Value.Value(), got[k].Value.Value(), "value for %q", k) + } + }) + } +} + +func TestSetDataStreamDatasetVariable(t *testing.T) { + cases := []struct { + name string + vars Vars + variablesToAssign common.MapStr + defaultValue string + wantDataset string + wantDatasetPresent bool + wantUnchanged bool + }{ + { + name: "data_stream.dataset already in vars is left unchanged", + vars: Vars{"data_stream.dataset": {Value: defaultVarValue("existing"), Type: "text"}}, + variablesToAssign: common.MapStr{"data_stream.dataset": "overwrite"}, + defaultValue: "default", + wantDatasetPresent: true, + wantDataset: "existing", + wantUnchanged: true, + }, + { + name: "no data_stream.dataset in variablesToAssign uses default", + vars: Vars{}, + variablesToAssign: common.MapStr{}, + defaultValue: "default.dataset", + wantDatasetPresent: true, + wantDataset: "default.dataset", + }, + { + name: "data_stream.dataset string is set", + vars: Vars{}, + variablesToAssign: common.MapStr{"data_stream.dataset": "custom.dataset"}, + defaultValue: "default", + wantDatasetPresent: true, + wantDataset: "custom.dataset", + }, + { + name: "data_stream.dataset empty string uses default", + vars: Vars{}, + variablesToAssign: common.MapStr{"data_stream.dataset": ""}, + defaultValue: "default.dataset", + wantDatasetPresent: true, + wantDataset: "default.dataset", + }, + { + name: "nil variablesToAssign uses default", + vars: Vars{}, + variablesToAssign: nil, + defaultValue: "default.from.nil", + wantDatasetPresent: true, + wantDataset: "default.from.nil", + }, + { + name: "other vars are preserved when adding data_stream.dataset", + vars: Vars{"other": {Value: defaultVarValue("x"), Type: "text"}}, + variablesToAssign: common.MapStr{"data_stream.dataset": "my.dataset"}, + defaultValue: "default", + wantDatasetPresent: true, + wantDataset: "my.dataset", + }, + { + name: "non-string value in variablesToAssign uses default", + vars: Vars{}, + variablesToAssign: common.MapStr{"data_stream.dataset": 123}, + defaultValue: "default.dataset", + wantDatasetPresent: true, + wantDataset: "default.dataset", + }, + { + name: "defaultValue empty when no override yields empty", + vars: Vars{}, + variablesToAssign: common.MapStr{}, + defaultValue: "", + wantDatasetPresent: true, + wantDataset: "", + }, + } + + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + inputVars := make(Vars, len(c.vars)) + for k, v := range c.vars { + inputVars[k] = v + } + + got := SetDataStreamDatasetVariable(inputVars, c.variablesToAssign, c.defaultValue) + + if c.wantUnchanged && len(c.vars) == len(got) { + for k, v := range c.vars { + g, ok := got[k] + require.True(t, ok, "key %q should remain", k) + assert.Equal(t, v.Value.Value(), g.Value.Value(), "value for %q", k) + } + } + + if c.wantDatasetPresent { + require.Contains(t, got, "data_stream.dataset", "vars should contain data_stream.dataset") + assert.Equal(t, "text", got["data_stream.dataset"].Type) + assert.Equal(t, c.wantDataset, got["data_stream.dataset"].Value.Value()) + } else { + assert.NotContains(t, got, "data_stream.dataset", "vars should not contain data_stream.dataset") + } + + for k, v := range c.vars { + require.Contains(t, got, k, "original var %q must be preserved", k) + assert.Equal(t, v.Value.Value(), got[k].Value.Value(), "value for %q", k) + } + }) + } +} + +func TestSetKibanaVariables(t *testing.T) { + varDef := func(name, typ string, defaultVal any) packages.Variable { + def := packages.Variable{Name: name, Type: typ} + if defaultVal != nil { + vv := defaultVarValue(defaultVal) + def.Default = &vv + } + return def + } + + cases := []struct { + name string + definitions []packages.Variable + values common.MapStr + wantVars map[string]any // name -> expected Value(). nil means key must be absent + }{ + { + name: "empty definitions returns empty vars", + definitions: nil, + values: common.MapStr{"any": "value"}, + wantVars: map[string]any{}, + }, + { + name: "definition with default and no values uses default", + definitions: []packages.Variable{varDef("host", "text", "localhost")}, + values: common.MapStr{}, + wantVars: map[string]any{"host": "localhost"}, + }, + { + name: "definition with default overridden by values", + definitions: []packages.Variable{varDef("host", "text", "localhost")}, + values: common.MapStr{"host": "elastic.co"}, + wantVars: map[string]any{"host": "elastic.co"}, + }, + { + name: "definition with no default and no value is omitted", + definitions: []packages.Variable{varDef("optional", "text", nil)}, + values: common.MapStr{}, + wantVars: map[string]any{}, + }, + { + name: "definition with no default but value in values is included", + definitions: []packages.Variable{varDef("optional", "text", nil)}, + values: common.MapStr{"optional": "set"}, + wantVars: map[string]any{"optional": "set"}, + }, + { + name: "nil values uses defaults only", + definitions: []packages.Variable{ + varDef("a", "text", "default_a"), + varDef("b", "text", nil), + }, + values: nil, + wantVars: map[string]any{"a": "default_a"}, + }, + { + name: "multiple definitions mix default and override", + definitions: []packages.Variable{ + varDef("host", "text", "localhost"), + varDef("port", "integer", 9200), + varDef("optional", "text", nil), + }, + values: common.MapStr{"port": 9300}, + wantVars: map[string]any{ + "host": "localhost", + "port": 9300, + }, + }, + { + name: "boolean and list types preserved", + definitions: []packages.Variable{varDef("enabled", "bool", true), varDef("hosts", "text", []any{"a", "b"})}, + values: common.MapStr{}, + wantVars: map[string]any{ + "enabled": true, + "hosts": []any{"a", "b"}, + }, + }, + } + + for _, c := range cases { + t.Run(c.name, func(t *testing.T) { + got := SetKibanaVariables(c.definitions, c.values) + + assert.Len(t, got, len(c.wantVars), "number of vars") + for name, wantVal := range c.wantVars { + require.Contains(t, got, name, "var %q should be present", name) + assert.Equal(t, wantVal, got[name].Value.Value(), "var %q value", name) + } + for name, v := range got { + wantVal, ok := c.wantVars[name] + require.True(t, ok, "var %q should not be present", name) + assert.Equal(t, wantVal, v.Value.Value(), "var %q value", name) + } + }) + } +} diff --git a/internal/resources/fleetpolicy.go b/internal/resources/fleetpolicy.go index f2c5d88094..e628322199 100644 --- a/internal/resources/fleetpolicy.go +++ b/internal/resources/fleetpolicy.go @@ -227,17 +227,17 @@ func createIntegrationPackagePolicy(policy FleetAgentPolicy, manifest packages.P } // Add dataStream-level vars - streams[0].Vars = setKibanaVariables(stream.Vars, common.MapStr(packagePolicy.DataStreamVars)) + streams[0].Vars = kibana.SetKibanaVariables(stream.Vars, common.MapStr(packagePolicy.DataStreamVars)) ds.Inputs[0].Streams = streams // Add input-level vars input := policyTemplate.FindInputByType(streamInput) if input != nil { - ds.Inputs[0].Vars = setKibanaVariables(input.Vars, common.MapStr(packagePolicy.Vars)) + ds.Inputs[0].Vars = kibana.SetKibanaVariables(input.Vars, common.MapStr(packagePolicy.Vars)) } // Add package-level vars - ds.Vars = setKibanaVariables(manifest.Vars, common.MapStr(packagePolicy.Vars)) + ds.Vars = kibana.SetKibanaVariables(manifest.Vars, common.MapStr(packagePolicy.Vars)) return &ds, nil } @@ -291,47 +291,27 @@ func createInputPackagePolicy(policy FleetAgentPolicy, manifest packages.Package }, } + variablesToAssign := common.MapStr(packagePolicy.Vars) + // Add policyTemplate-level vars. - vars := setKibanaVariables(policyTemplate.Vars, common.MapStr(packagePolicy.Vars)) - if _, found := vars["data_stream.dataset"]; !found { - var value packages.VarValue - value.Unpack(policyTemplate.Name) - vars["data_stream.dataset"] = kibana.Var{ - Value: value, - Type: "text", - } + vars := kibana.SetKibanaVariables(policyTemplate.Vars, variablesToAssign) + + // data_stream.dataset is required by Fleet for input packages, so mimic the value the + // UI would use if this is not defined in the config or doesn't have a default. + // Fleet uses the policy template name as default dataset for input packages, do the same. + vars = kibana.SetDataStreamDatasetVariable(vars, variablesToAssign, policyTemplate.Name) + + if policyTemplate.Input == "otelcol" { + vars = kibana.SetUseAPMVariable(vars, variablesToAssign) } streams[0].Vars = vars ds.Inputs[0].Streams = streams - return &ds, nil -} - -func setKibanaVariables(definitions []packages.Variable, values common.MapStr) kibana.Vars { - vars := kibana.Vars{} - for _, definition := range definitions { - // Elastic Package uses the deprecated 'inputs' array in its /api/fleet/package_policies request. - // When using this API parameter, default values are not automatically incorporated into - // the policy, whereas with the 'inputs' object, defaults are incorporated by the API service. - // This means that our client must include the default values in its request to ensure correct behavior. - val := definition.Default - - value, err := values.GetValue(definition.Name) - if err == nil { - val = &packages.VarValue{} - val.Unpack(value) - } else if errors.Is(err, common.ErrKeyNotFound) && definition.Default == nil { - // Do not include nulls for unset variables. - continue - } + // Add package-level vars + ds.Vars = kibana.SetKibanaVariables(manifest.Vars, variablesToAssign) - vars[definition.Name] = kibana.Var{ - Type: definition.Type, - Value: *val, - } - } - return vars + return &ds, nil } func (f *FleetAgentPolicy) Update(ctx resource.Context) error { diff --git a/internal/testrunner/runners/system/tester.go b/internal/testrunner/runners/system/tester.go index e226f1ba07..5a24b254f1 100644 --- a/internal/testrunner/runners/system/tester.go +++ b/internal/testrunner/runners/system/tester.go @@ -2007,17 +2007,17 @@ func createIntegrationPackageDatastream( } // Add dataStream-level vars - streams[0].Vars = setKibanaVariables(stream.Vars, cfgDSVars) + streams[0].Vars = kibana.SetKibanaVariables(stream.Vars, cfgDSVars) r.Inputs[0].Streams = streams // Add input-level vars input := policyTemplate.FindInputByType(streamInput) if input != nil { - r.Inputs[0].Vars = setKibanaVariables(input.Vars, cfgVars) + r.Inputs[0].Vars = kibana.SetKibanaVariables(input.Vars, cfgVars) } // Add package-level vars - r.Vars = setKibanaVariables(pkg.Vars, cfgVars) + r.Vars = kibana.SetKibanaVariables(pkg.Vars, cfgVars) return r } @@ -2063,28 +2063,23 @@ func createInputPackageDatastream( } // Add policyTemplate-level vars. - vars := setKibanaVariables(policyTemplate.Vars, cfgVars) + vars := kibana.SetKibanaVariables(policyTemplate.Vars, cfgVars) // data_stream.dataset is required by Fleet for input packages, so mimic the value the // UI would use if this is not defined in the config or doesn't have a default. - if _, found := vars["data_stream.dataset"]; !found { - // Fleet uses the policy template name as default dataset for input packages, do the same. - dataStreamDataset := policyTemplate.Name - v, _ := cfgVars.GetValue("data_stream.dataset") - if dataset, ok := v.(string); ok && dataset != "" { - dataStreamDataset = dataset - } + // Fleet uses the policy template name as default dataset for input packages, do the same. + vars = kibana.SetDataStreamDatasetVariable(vars, cfgVars, policyTemplate.Name) - var value packages.VarValue - value.Unpack(dataStreamDataset) - vars["data_stream.dataset"] = kibana.Var{ - Value: value, - Type: "text", - } + if policyTemplate.Input == "otelcol" { + vars = kibana.SetUseAPMVariable(vars, cfgVars) } streams[0].Vars = vars r.Inputs[0].Streams = streams + + // Add package-level vars + r.Vars = kibana.SetKibanaVariables(pkg.Vars, cfgVars) + return r } @@ -2103,32 +2098,6 @@ func findDefaultValue(vars []packages.Variable, name string) string { return "" } -func setKibanaVariables(definitions []packages.Variable, values common.MapStr) kibana.Vars { - vars := kibana.Vars{} - for _, definition := range definitions { - // Elastic Package uses the deprecated 'inputs' array in its /api/fleet/package_policies request. - // When using this API parameter, default values are not automatically incorporated into - // the policy, whereas with the 'inputs' object, defaults are incorporated by the API service. - // This means that our client must include the default values in its request to ensure correct behavior. - val := definition.Default - - value, err := values.GetValue(definition.Name) - if err == nil { - val = &packages.VarValue{} - val.Unpack(value) - } else if errors.Is(err, common.ErrKeyNotFound) && definition.Default == nil { - // Do not include nulls for unset variables. - continue - } - - vars[definition.Name] = kibana.Var{ - Type: definition.Type, - Value: *val, - } - } - return vars -} - // getDataStreamIndex returns the index of the data stream whose input name // matches. Otherwise it returns the 0. func getDataStreamIndex(inputName string, ds *packages.DataStreamManifest) int { diff --git a/test/packages/parallel/zipkin_input_otel.stack_version b/test/packages/parallel/zipkin_input_otel.stack_version new file mode 100644 index 0000000000..b7ca1e13b0 --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel.stack_version @@ -0,0 +1 @@ +9.4.0-SNAPSHOT diff --git a/test/packages/parallel/zipkin_input_otel/LICENSE.txt b/test/packages/parallel/zipkin_input_otel/LICENSE.txt new file mode 100644 index 0000000000..d317b57b29 --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/LICENSE.txt @@ -0,0 +1,93 @@ +Elastic License 2.0 + +URL: https://www.elastic.co/licensing/elastic-license + +## Acceptance + +By using the software, you agree to all of the terms and conditions below. + +## Copyright License + +The licensor grants you a non-exclusive, royalty-free, worldwide, +non-sublicensable, non-transferable license to use, copy, distribute, make +available, and prepare derivative works of the software, in each case subject to +the limitations and conditions below. + +## Limitations + +You may not provide the software to third parties as a hosted or managed +service, where the service provides users with access to any substantial set of +the features or functionality of the software. + +You may not move, change, disable, or circumvent the license key functionality +in the software, and you may not remove or obscure any functionality in the +software that is protected by the license key. + +You may not alter, remove, or obscure any licensing, copyright, or other notices +of the licensor in the software. Any use of the licensor's trademarks is subject +to applicable law. + +## Patents + +The licensor grants you a license, under any patent claims the licensor can +license, or becomes able to license, to make, have made, use, sell, offer for +sale, import and have imported the software, in each case subject to the +limitations and conditions in this license. This license does not cover any +patent claims that you cause to be infringed by modifications or additions to +the software. If you or your company make any written claim that the software +infringes or contributes to infringement of any patent, your patent license for +the software granted under these terms ends immediately. If your company makes +such a claim, your patent license ends immediately for work on behalf of your +company. + +## Notices + +You must ensure that anyone who gets a copy of any part of the software from you +also gets a copy of these terms. + +If you modify the software, you must include in any modified copies of the +software prominent notices stating that you have modified the software. + +## No Other Rights + +These terms do not imply any licenses other than those expressly granted in +these terms. + +## Termination + +If you use the software in violation of these terms, such use is not licensed, +and your licenses will automatically terminate. If the licensor provides you +with a notice of your violation, and you cease all violation of this license no +later than 30 days after you receive that notice, your licenses will be +reinstated retroactively. However, if you violate these terms after such +reinstatement, any additional violation of these terms will cause your licenses +to terminate automatically and permanently. + +## No Liability + +*As far as the law allows, the software comes as is, without any warranty or +condition, and the licensor will not be liable to you for any damages arising +out of these terms or the use or nature of the software, under any kind of +legal claim.* + +## Definitions + +The **licensor** is the entity offering these terms, and the **software** is the +software the licensor makes available under these terms, including any portion +of it. + +**you** refers to the individual or entity agreeing to these terms. + +**your company** is any legal entity, sole proprietorship, or other kind of +organization that you work for, plus all organizations that have control over, +are under the control of, or are under common control with that +organization. **control** means ownership of substantially all the assets of an +entity, or the power to direct its management and policies by vote, contract, or +otherwise. Control can be direct or indirect. + +**your licenses** are all the licenses granted to you for the software under +these terms. + +**use** means anything you do with the software requiring one of your licenses. + +**trademark** means trademarks, service marks, and similar rights. diff --git a/test/packages/parallel/zipkin_input_otel/_dev/build/docs/README.md b/test/packages/parallel/zipkin_input_otel/_dev/build/docs/README.md new file mode 100644 index 0000000000..80968afe73 --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/_dev/build/docs/README.md @@ -0,0 +1,22 @@ +# Zipkin OpenTelemetry Input Package + +## Overview +The Zipkin OpenTelemetry Input Package for Elastic enables collection of trace data from applications instrumented with [Zipkin](https://zipkin.io/) through OpenTelemetry protocols using the [zipkinreceiver](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/receiver/zipkinreceiver#zipkin-receiver). + +### How it works +This package receives Zipkin trace data (V1 and V2 JSON/Protobuf formats) by configuring the Zipkin receiver in the Input Package, which then gets applied to the zipkinreceiver present in the EDOT collector, which then forwards the data to Elastic Agent. The Elastic Agent processes and enriches the data before sending it to Elasticsearch for indexing and analysis. + +## Configuration + +For the full list of settings exposed for the receiver and examples, refer to the [Zipkin Receiver documentation](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/receiver/zipkinreceiver). + +## Troubleshooting + +If you encounter issues: + +1. Verify the endpoint is accessible and not blocked by a firewall. +2. Ensure applications are sending Zipkin-formatted traces to the configured endpoint (default: `http://:9411`). +3. Check the Elastic Agent logs for any receiver errors. + +## Traces reference +For more details about the Zipkin receiver and its configuration options, refer to the [Zipkin Receiver documentation](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/receiver/zipkinreceiver) in the upstream OpenTelemetry Collector repository. diff --git a/test/packages/parallel/zipkin_input_otel/_dev/deploy/docker/docker-compose.yml b/test/packages/parallel/zipkin_input_otel/_dev/deploy/docker/docker-compose.yml new file mode 100644 index 0000000000..0cd0903cae --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/_dev/deploy/docker/docker-compose.yml @@ -0,0 +1,21 @@ +version: '2.3' +services: + run_queries: + image: curlimages/curl + environment: + - TARGET_URL=http://backend:9000 + volumes: + - ./scripts/run_queries.sh:/run_queries.sh + command: ["sh", "/run_queries.sh"] + depends_on: + backend: + condition: service_healthy + # Scenario based on the example defined in + # https://github.com/openzipkin/brave-example/tree/a71b18511b4fc8cbbe67ea2a6fd5ca43048c2465/docker + # https://github.com/openzipkin/brave-example/blob/a71b18511b4fc8cbbe67ea2a6fd5ca43048c2465/README.md#running-the-example + # The backend container is used to generate traces that are sent to the elastic-agent container. + backend: + image: ghcr.io/openzipkin/brave-example:armeria + entrypoint: start-backend + environment: + - ZIPKIN_BASEURL=http://elastic-agent:9411 diff --git a/test/packages/parallel/zipkin_input_otel/_dev/deploy/docker/scripts/run_queries.sh b/test/packages/parallel/zipkin_input_otel/_dev/deploy/docker/scripts/run_queries.sh new file mode 100755 index 0000000000..242c3d8deb --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/_dev/deploy/docker/scripts/run_queries.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env sh + +set -ex + +TARGET_URL=${TARGET_URL:-http://backend:9000} +SLEEP_TIME=${SLEEP_TIME:-5} + +# NOTE: it cannot be done using SIGHUP signal, since the backend container is also +# killed and the traces would not be sent to the elastic-agent container. + +# Wait for the elastic-agent container to be ready with the corresponding +# agent policy assigned +sleep ${SLEEP_TIME} + +echo "Sending traces to ${TARGET_URL}/api" +i=0 +while true; do + echo "Trigger query ${i}" + # Force creating traces containing requests with errors + # The following curl command will fail with a 404 error per the backend code. + curl -s -o /dev/null "${TARGET_URL}"; sleep 1; + + ## Create traces without any errors + # curl ${TARGET_URL}/api will return a 200 status code + curl -s -o /dev/null "${TARGET_URL}/api"; sleep 1; + i=$((i+1)) +done diff --git a/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-custom-dataset.expected b/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-custom-dataset.expected new file mode 100644 index 0000000000..0f2df9a670 --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-custom-dataset.expected @@ -0,0 +1,57 @@ +connectors: + forward: {} +exporters: + elasticsearch/componentid-0: + endpoints: + - https://elasticsearch:9200 +inputs: [] +output_permissions: + default: + _elastic_agent_checks: + cluster: + - monitor + _elastic_agent_monitoring: + indices: [] + uuid-for-permissions-on-related-indices: + indices: + - names: + - traces-*-* + privileges: + - auto_configure + - create_doc + - names: + - logs-generic.otel-* + privileges: + - auto_configure + - create_doc +processors: + transform/componentid-0: + trace_statements: + - context: span + statements: + - set(attributes["data_stream.type"], "traces") + - set(attributes["data_stream.dataset"], "zipkin.custom") + - set(attributes["data_stream.namespace"], "ep") + - context: spanevent + statements: + - set(attributes["data_stream.type"], "logs") + - set(attributes["data_stream.namespace"], "ep") +receivers: + zipkin/componentid-0: + endpoint: 0.0.0.0:9411 + parse_string_tags: true +secret_references: [] +service: + pipelines: + traces: + exporters: + - elasticsearch/componentid-0 + receivers: + - forward + traces/componentid-0: + exporters: + - forward + processors: + - transform/componentid-0 + receivers: + - zipkin/componentid-0 diff --git a/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-custom-dataset.yml b/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-custom-dataset.yml new file mode 100644 index 0000000000..90f01adbef --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-custom-dataset.yml @@ -0,0 +1,4 @@ +vars: + endpoint: "0.0.0.0:9411" + parse_string_tags: true + data_stream.dataset: "zipkin.custom" diff --git a/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-use-apm.expected b/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-use-apm.expected new file mode 100644 index 0000000000..0db5270d10 --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-use-apm.expected @@ -0,0 +1,76 @@ +connectors: + elasticapm: {} + forward: {} +exporters: + elasticsearch/componentid-0: + endpoints: + - https://elasticsearch:9200 +inputs: [] +output_permissions: + default: + _elastic_agent_checks: + cluster: + - monitor + _elastic_agent_monitoring: + indices: [] + uuid-for-permissions-on-related-indices: + indices: + - names: + - traces-*-* + privileges: + - auto_configure + - create_doc + - names: + - logs-generic.otel-* + privileges: + - auto_configure + - create_doc + - names: + - metrics-*-* + privileges: + - auto_configure + - create_doc +processors: + elasticapm: {} + transform/componentid-0: + trace_statements: + - context: span + statements: + - set(attributes["data_stream.type"], "traces") + - set(attributes["data_stream.dataset"], "zipkinreceiver") + - set(attributes["data_stream.namespace"], "ep") + - context: spanevent + statements: + - set(attributes["data_stream.type"], "logs") + - set(attributes["data_stream.namespace"], "ep") +receivers: + zipkin/componentid-0: + endpoint: 0.0.0.0:9411 + parse_string_tags: true +secret_references: [] +service: + pipelines: + metrics: + exporters: + - elasticsearch/componentid-0 + receivers: + - forward + metrics/componentid-1: + exporters: + - forward + receivers: + - elasticapm + traces: + exporters: + - elasticsearch/componentid-0 + receivers: + - forward + traces/componentid-0: + exporters: + - elasticapm + - forward + processors: + - elasticapm + - transform/componentid-0 + receivers: + - zipkin/componentid-0 diff --git a/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-use-apm.yml b/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-use-apm.yml new file mode 100644 index 0000000000..7b383dfd44 --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-use-apm.yml @@ -0,0 +1,4 @@ +vars: + endpoint: "0.0.0.0:9411" + parse_string_tags: true + use_apm: true diff --git a/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-vars.expected b/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-vars.expected new file mode 100644 index 0000000000..11e87e9f50 --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-vars.expected @@ -0,0 +1,57 @@ +connectors: + forward: {} +exporters: + elasticsearch/componentid-0: + endpoints: + - https://elasticsearch:9200 +inputs: [] +output_permissions: + default: + _elastic_agent_checks: + cluster: + - monitor + _elastic_agent_monitoring: + indices: [] + uuid-for-permissions-on-related-indices: + indices: + - names: + - traces-*-* + privileges: + - auto_configure + - create_doc + - names: + - logs-generic.otel-* + privileges: + - auto_configure + - create_doc +processors: + transform/componentid-0: + trace_statements: + - context: span + statements: + - set(attributes["data_stream.type"], "traces") + - set(attributes["data_stream.dataset"], "zipkinreceiver") + - set(attributes["data_stream.namespace"], "ep") + - context: spanevent + statements: + - set(attributes["data_stream.type"], "logs") + - set(attributes["data_stream.namespace"], "ep") +receivers: + zipkin/componentid-0: + endpoint: localhost:9411 + parse_string_tags: false +secret_references: [] +service: + pipelines: + traces: + exporters: + - elasticsearch/componentid-0 + receivers: + - forward + traces/componentid-0: + exporters: + - forward + processors: + - transform/componentid-0 + receivers: + - zipkin/componentid-0 diff --git a/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-vars.yml b/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-vars.yml new file mode 100644 index 0000000000..72b4ca320d --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default-vars.yml @@ -0,0 +1 @@ +vars: ~ diff --git a/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default.expected b/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default.expected new file mode 100644 index 0000000000..9bfab820c3 --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default.expected @@ -0,0 +1,57 @@ +connectors: + forward: {} +exporters: + elasticsearch/componentid-0: + endpoints: + - https://elasticsearch:9200 +inputs: [] +output_permissions: + default: + _elastic_agent_checks: + cluster: + - monitor + _elastic_agent_monitoring: + indices: [] + uuid-for-permissions-on-related-indices: + indices: + - names: + - traces-*-* + privileges: + - auto_configure + - create_doc + - names: + - logs-generic.otel-* + privileges: + - auto_configure + - create_doc +processors: + transform/componentid-0: + trace_statements: + - context: span + statements: + - set(attributes["data_stream.type"], "traces") + - set(attributes["data_stream.dataset"], "zipkinreceiver") + - set(attributes["data_stream.namespace"], "ep") + - context: spanevent + statements: + - set(attributes["data_stream.type"], "logs") + - set(attributes["data_stream.namespace"], "ep") +receivers: + zipkin/componentid-0: + endpoint: 0.0.0.0:9411 + parse_string_tags: true +secret_references: [] +service: + pipelines: + traces: + exporters: + - elasticsearch/componentid-0 + receivers: + - forward + traces/componentid-0: + exporters: + - forward + processors: + - transform/componentid-0 + receivers: + - zipkin/componentid-0 diff --git a/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default.yml b/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default.yml new file mode 100644 index 0000000000..5893fe8d4f --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/_dev/test/policy/test-default.yml @@ -0,0 +1,3 @@ +vars: + endpoint: "0.0.0.0:9411" + parse_string_tags: true diff --git a/test/packages/parallel/zipkin_input_otel/_dev/test/system/test-default-config.yml b/test/packages/parallel/zipkin_input_otel/_dev/test/system/test-default-config.yml new file mode 100644 index 0000000000..57cf096fef --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/_dev/test/system/test-default-config.yml @@ -0,0 +1,7 @@ +service: backend +vars: + endpoint: "0.0.0.0:9411" + parse_string_tags: false + use_apm: true +assert: + min_count: 20 diff --git a/test/packages/parallel/zipkin_input_otel/agent/input/input.yml.hbs b/test/packages/parallel/zipkin_input_otel/agent/input/input.yml.hbs new file mode 100644 index 0000000000..721ef84c9e --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/agent/input/input.yml.hbs @@ -0,0 +1,36 @@ +receivers: + zipkin: + endpoint: {{endpoint}} + parse_string_tags: {{parse_string_tags}} +{{#if tls_enabled}} + tls: + insecure: {{tls_insecure}} +{{#if tls_insecure_skip_verify}} + insecure_skip_verify: {{tls_insecure_skip_verify}} +{{/if}} +{{#if tls_ca_file}} + ca_file: {{tls_ca_file}} +{{/if}} +{{#if tls_cert_file}} + cert_file: {{tls_cert_file}} +{{/if}} +{{#if tls_key_file}} + key_file: {{tls_key_file}} +{{/if}} +{{#if tls_server_name_override}} + server_name_override: {{tls_server_name_override}} +{{/if}} +{{#if tls_min_version}} + min_version: "{{tls_min_version}}" +{{/if}} +{{#if tls_max_version}} + max_version: "{{tls_max_version}}" +{{/if}} +{{#if tls_include_system_ca_certs_pool}} + include_system_ca_certs_pool: {{tls_include_system_ca_certs_pool}} +{{/if}} +{{/if}} +service: + pipelines: + traces: + receivers: [zipkin] diff --git a/test/packages/parallel/zipkin_input_otel/changelog.yml b/test/packages/parallel/zipkin_input_otel/changelog.yml new file mode 100644 index 0000000000..ed6aabd03e --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/changelog.yml @@ -0,0 +1,6 @@ +# newer versions go on top +- version: "0.1.0" + changes: + - description: Initial draft of the package + type: enhancement + link: https://github.com/elastic/integrations/pull/17226 diff --git a/test/packages/parallel/zipkin_input_otel/docs/README.md b/test/packages/parallel/zipkin_input_otel/docs/README.md new file mode 100644 index 0000000000..80968afe73 --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/docs/README.md @@ -0,0 +1,22 @@ +# Zipkin OpenTelemetry Input Package + +## Overview +The Zipkin OpenTelemetry Input Package for Elastic enables collection of trace data from applications instrumented with [Zipkin](https://zipkin.io/) through OpenTelemetry protocols using the [zipkinreceiver](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/receiver/zipkinreceiver#zipkin-receiver). + +### How it works +This package receives Zipkin trace data (V1 and V2 JSON/Protobuf formats) by configuring the Zipkin receiver in the Input Package, which then gets applied to the zipkinreceiver present in the EDOT collector, which then forwards the data to Elastic Agent. The Elastic Agent processes and enriches the data before sending it to Elasticsearch for indexing and analysis. + +## Configuration + +For the full list of settings exposed for the receiver and examples, refer to the [Zipkin Receiver documentation](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/receiver/zipkinreceiver). + +## Troubleshooting + +If you encounter issues: + +1. Verify the endpoint is accessible and not blocked by a firewall. +2. Ensure applications are sending Zipkin-formatted traces to the configured endpoint (default: `http://:9411`). +3. Check the Elastic Agent logs for any receiver errors. + +## Traces reference +For more details about the Zipkin receiver and its configuration options, refer to the [Zipkin Receiver documentation](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/receiver/zipkinreceiver) in the upstream OpenTelemetry Collector repository. diff --git a/test/packages/parallel/zipkin_input_otel/img/zipkin_symbol_logo_otel.svg b/test/packages/parallel/zipkin_input_otel/img/zipkin_symbol_logo_otel.svg new file mode 100644 index 0000000000..06e4a16fde --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/img/zipkin_symbol_logo_otel.svg @@ -0,0 +1,63 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/test/packages/parallel/zipkin_input_otel/manifest.yml b/test/packages/parallel/zipkin_input_otel/manifest.yml new file mode 100644 index 0000000000..024fb0f5c8 --- /dev/null +++ b/test/packages/parallel/zipkin_input_otel/manifest.yml @@ -0,0 +1,131 @@ +format_version: 3.5.0 +name: zipkin_input_otel +title: "Zipkin OpenTelemetry Input Package" +version: "0.1.0" +source: + license: "Elastic-2.0" +description: "Collect Zipkin traces using OpenTelemetry Collector" +type: input +categories: + - observability + - opentelemetry +conditions: + kibana: + version: "^9.4.0" + elastic: + subscription: "basic" +icons: + - src: /img/zipkin_symbol_logo_otel.svg + title: Zipkin OTel logo + size: 32x32 + type: image/svg+xml +policy_templates: + - name: zipkinreceiver + type: traces + title: Zipkin OpenTelemetry Input + description: Collect Zipkin traces using OpenTelemetry Collector + input: otelcol + template_path: input.yml.hbs + vars: + - name: parse_string_tags + type: bool + title: Parse String Tags + required: false + description: If enabled, string tags/binary annotations will be parsed into int/bool/float types. + default: false + show_user: true + # TLS settings + - name: tls_enabled + type: bool + required: false + title: Enable TLS Configuration + description: Enable TLS configuration for connecting to the server. + default: false + show_user: true + - name: tls_insecure + type: bool + required: false + title: Disable TLS + description: Whether to disable client transport security for the connection. Set to false to enable TLS. + default: false + show_user: false + - name: tls_insecure_skip_verify + type: bool + required: false + title: Skip TLS Verification + description: Whether to skip verifying the server certificate when TLS is enabled. + default: false + show_user: false + - name: tls_ca_file + type: text + required: false + title: TLS CA File + description: Path to the CA certificate file for server certificate verification. Only used when TLS is enabled. + show_user: false + - name: tls_cert_file + type: text + required: false + title: TLS Certificate File + description: Path to the TLS certificate file for client authentication. + show_user: false + - name: tls_key_file + type: text + required: false + title: TLS Key File + description: Path to the TLS key file for client authentication. + show_user: false + - name: tls_server_name_override + type: text + required: false + title: TLS Server Name Override + description: Override the virtual host name of authority in TLS requests. + show_user: false + - name: tls_min_version + type: select + required: false + title: TLS Min Version + description: Minimum acceptable TLS version. + show_user: false + options: + - text: "1.0" + value: "1.0" + - text: "1.1" + value: "1.1" + - text: "1.2" + value: "1.2" + - text: "1.3" + value: "1.3" + - name: tls_max_version + type: select + required: false + title: TLS Max Version + description: Maximum acceptable TLS version. + show_user: false + options: + - text: "1.0" + value: "1.0" + - text: "1.1" + value: "1.1" + - text: "1.2" + value: "1.2" + - text: "1.3" + value: "1.3" + - name: tls_include_system_ca_certs_pool + type: bool + required: false + title: Include System CA Certs Pool + description: Whether to load the system certificate authorities pool alongside the certificate authority. + default: false + show_user: false +# Force to set a variable at package level +vars: + - name: endpoint + type: text + title: Endpoint + required: true + description: The host:port address to listen on for Zipkin spans. + default: localhost:9411 + show_user: true +owner: + github: elastic/ecosystem + type: elastic