Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 39 additions & 4 deletions internal/packages/assets.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@
package packages

import (
"encoding/json"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strings"

"github.com/pkg/errors"

Expand Down Expand Up @@ -38,6 +38,11 @@ type Asset struct {
DataStream string
}

// String method returns a string representation of the asset
func (asset Asset) String() string {
return fmt.Sprintf("%s (type: %s)", asset.ID, asset.Type)
}

// LoadPackageAssets parses the package contents and returns a list of assets defined by the package.
func LoadPackageAssets(pkgRootPath string) ([]Asset, error) {
assets, err := loadKibanaAssets(pkgRootPath)
Expand Down Expand Up @@ -116,6 +121,12 @@ func loadElasticsearchAssets(pkgRootPath string) ([]Asset, error) {
assets = append(assets, asset)

if dsManifest.Type == dataStreamTypeLogs {
elasticsearchDirPath := filepath.Join(filepath.Dir(dsManifestPath), "elasticsearch", "ingest_pipeline")
pipelineFiles, _ := ioutil.ReadDir(elasticsearchDirPath)
if pipelineFiles == nil || len(pipelineFiles) == 0 {
continue // ingest pipeline is not defined
}

ingestPipelineName := dsManifest.GetPipelineNameOrDefault()
if ingestPipelineName == defaultPipelineName {
ingestPipelineName = fmt.Sprintf("%s-%s.%s-%s", dsManifest.Type, pkgManifest.Name, dsManifest.Name, pkgManifest.Version)
Expand Down Expand Up @@ -154,15 +165,39 @@ func loadFileBasedAssets(kibanaAssetsFolderPath string, assetType AssetType) ([]
continue
}

name := f.Name()
id := strings.TrimSuffix(name, ".json")
assetPath := filepath.Join(assetsFolderPath, f.Name())
assetID, err := readAssetID(assetPath)
if err != nil {
return nil, errors.Wrapf(err, "can't read asset ID (path: %s)", assetPath)
}

asset := Asset{
ID: id,
ID: assetID,
Type: assetType,
}
assets = append(assets, asset)
}

return assets, nil
}

func readAssetID(assetPath string) (string, error) {
content, err := ioutil.ReadFile(assetPath)
if err != nil {
return "", errors.Wrap(err, "can't read file body")
}

assetBody := struct {
ID string `json:"id"`
}{}

err = json.Unmarshal(content, &assetBody)
if err != nil {
return "", errors.Wrap(err, "can't unmarshal asset")
}

if assetBody.ID == "" {
return "", errors.New("empty asset ID")
}
return assetBody.ID, nil
}
11 changes: 10 additions & 1 deletion internal/testrunner/runners/asset/runner.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ package asset
import (
"fmt"
"path/filepath"
"strings"
"time"

es "github.com/elastic/go-elasticsearch/v7"
Expand Down Expand Up @@ -125,7 +126,7 @@ func (r *runner) run() ([]testrunner.TestResult, error) {

if !findActualAsset(actualAssets, e) {
result.FailureMsg = "could not find expected asset"
result.FailureDetails = fmt.Sprintf("could not find expected asset with ID = %s and type = %s. Assets loaded = %v", e.ID, e.Type, actualAssets)
result.FailureDetails = fmt.Sprintf("could not find %s asset \"%s\". Assets loaded:\n%s", e.Type, e.ID, formatAssetsAsString(actualAssets))
}

results = append(results, result)
Expand Down Expand Up @@ -154,3 +155,11 @@ func findActualAsset(actualAssets []packages.Asset, expectedAsset packages.Asset

return false
}

func formatAssetsAsString(assets []packages.Asset) string {
var sb strings.Builder
for _, asset := range assets {
sb.WriteString(fmt.Sprintf("- %s\n", asset.String()))
}
return sb.String()
}
9 changes: 9 additions & 0 deletions test/packages/log/data_stream/log/agent/stream/stream.yml.hbs
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
paths:
{{#each paths}}
- {{this}}
{{/each}}

data_stream:
dataset: {{data_stream.dataset}}

{{custom}}
198 changes: 198 additions & 0 deletions test/packages/log/data_stream/log/fields/agent.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,198 @@
- name: cloud
title: Cloud
group: 2
description: Fields related to the cloud or infrastructure the events are coming from.
footnote: 'Examples: If Metricbeat is running on an EC2 host and fetches data from its host, the cloud info contains the data about this machine. If Metricbeat runs on a remote machine outside the cloud and fetches data from a service running in the cloud, the field contains cloud data from the machine the service is running on.'
type: group
fields:
- name: account.id
level: extended
type: keyword
ignore_above: 1024
description: 'The cloud account or organization id used to identify different entities in a multi-tenant environment.

Examples: AWS account id, Google Cloud ORG Id, or other unique identifier.'
example: 666777888999
- name: availability_zone
level: extended
type: keyword
ignore_above: 1024
description: Availability zone in which this host is running.
example: us-east-1c
- name: instance.id
level: extended
type: keyword
ignore_above: 1024
description: Instance ID of the host machine.
example: i-1234567890abcdef0
- name: instance.name
level: extended
type: keyword
ignore_above: 1024
description: Instance name of the host machine.
- name: machine.type
level: extended
type: keyword
ignore_above: 1024
description: Machine type of the host machine.
example: t2.medium
- name: provider
level: extended
type: keyword
ignore_above: 1024
description: Name of the cloud provider. Example values are aws, azure, gcp, or digitalocean.
example: aws
- name: region
level: extended
type: keyword
ignore_above: 1024
description: Region in which this host is running.
example: us-east-1
- name: project.id
type: keyword
description: Name of the project in Google Cloud.
- name: image.id
type: keyword
description: Image ID for the cloud instance.
- name: container
title: Container
group: 2
description: 'Container fields are used for meta information about the specific container that is the source of information.

These fields help correlate data based containers from any runtime.'
type: group
fields:
- name: id
level: core
type: keyword
ignore_above: 1024
description: Unique container id.
- name: image.name
level: extended
type: keyword
ignore_above: 1024
description: Name of the image the container was built on.
- name: labels
level: extended
type: object
object_type: keyword
description: Image labels.
- name: name
level: extended
type: keyword
ignore_above: 1024
description: Container name.
- name: host
title: Host
group: 2
description: 'A host is defined as a general computing instance.

ECS host.* fields should be populated with details about the host on which the event happened, or from which the measurement was taken. Host types include hardware, virtual machines, Docker containers, and Kubernetes nodes.'
type: group
fields:
- name: architecture
level: core
type: keyword
ignore_above: 1024
description: Operating system architecture.
example: x86_64
- name: domain
level: extended
type: keyword
ignore_above: 1024
description: 'Name of the domain of which the host is a member.

For example, on Windows this could be the host''s Active Directory domain or NetBIOS domain name. For Linux this could be the domain of the host''s LDAP provider.'
example: CONTOSO
default_field: false
- name: hostname
level: core
type: keyword
ignore_above: 1024
description: 'Hostname of the host.

It normally contains what the `hostname` command returns on the host machine.'
- name: id
level: core
type: keyword
ignore_above: 1024
description: 'Unique host id.

As hostname is not always unique, use values that are meaningful in your environment.

Example: The current usage of `beat.name`.'
- name: ip
level: core
type: ip
description: Host ip addresses.
- name: mac
level: core
type: keyword
ignore_above: 1024
description: Host mac addresses.
- name: name
level: core
type: keyword
ignore_above: 1024
description: 'Name of the host.

It can contain what `hostname` returns on Unix systems, the fully qualified domain name, or a name specified by the user. The sender decides which value to use.'
- name: os.family
level: extended
type: keyword
ignore_above: 1024
description: OS family (such as redhat, debian, freebsd, windows).
example: debian
- name: os.kernel
level: extended
type: keyword
ignore_above: 1024
description: Operating system kernel version as a raw string.
example: 4.4.0-112-generic
- name: os.name
level: extended
type: keyword
ignore_above: 1024
multi_fields:
- name: text
type: text
norms: false
default_field: false
description: Operating system name, without the version.
example: Mac OS X
- name: os.platform
level: extended
type: keyword
ignore_above: 1024
description: Operating system platform (such centos, ubuntu, windows).
example: darwin
- name: os.version
level: extended
type: keyword
ignore_above: 1024
description: Operating system version as a raw string.
example: 10.14.1
- name: type
level: core
type: keyword
ignore_above: 1024
description: 'Type of host.

For Cloud providers this can be the machine type like `t2.medium`. If vm, this could be the container, for example, or other information meaningful in your environment.'
- name: containerized
type: boolean
description: >
If the host is a container.

- name: os.build
type: keyword
example: "18D109"
description: >
OS build information.

- name: os.codename
type: keyword
example: "stretch"
description: >
OS codename, if any.

12 changes: 12 additions & 0 deletions test/packages/log/data_stream/log/fields/base-fields.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
- name: data_stream.type
type: constant_keyword
description: Data stream type.
- name: data_stream.dataset
type: constant_keyword
description: Data stream dataset.
- name: data_stream.namespace
type: constant_keyword
description: Data stream namespace.
- name: '@timestamp'
type: date
description: Event timestamp.
28 changes: 28 additions & 0 deletions test/packages/log/data_stream/log/manifest.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
title: Log Dataset
type: logs
streams:
- input: logfile
description: Collect your custom log files.
title: Collect log files
vars:
- name: paths
required: true
title: Log file path
description: Path to log files to be collected
type: text
multi: true
- name: data_stream.dataset
required: true
default: generic
title: Dataset name
description: >
Set the name for your dataset. Changing the dataset will send the data to a different index. You can't use `-` in the name of a dataset and only valid characters for [Elasticsearch index names](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html).

type: text
- name: custom
title: Custom configurations
description: >
Here YAML configuration options can be used to be added to your configuration. Be careful using this as it might break your configuration file.

type: yaml
default: ""
3 changes: 3 additions & 0 deletions test/packages/log/docs/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# Log Package

The log package is used as a generic package based on which any log file can be tailed by adjusting the ingest pipeline.
4 changes: 4 additions & 0 deletions test/packages/log/img/icon.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
25 changes: 25 additions & 0 deletions test/packages/log/manifest.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
format_version: 1.0.0
name: log
title: Custom logs
description: >
Collect your custom logs.

type: integration
version: 0.4.6
release: experimental
license: basic
categories:
- custom
policy_templates:
- name: logs
title: Custom logs
description: Collect your custom log files.
inputs:
- type: logfile
title: Custom log file
description: Collect your custom log files.
icons:
- src: "/img/icon.svg"
type: "image/svg+xml"
owner:
github: elastic/integrations-services