Skip to content

Commit

Permalink
[Observability Onboarding] Split Agent config into multiple files (el…
Browse files Browse the repository at this point in the history
…astic#191241)

Resolves elastic#191917

## Summary

Splits up the Agent config generated during auto-detect based onboarding
into multiple files.

This makes it easier for users to make changes following the initial
onboarding.

The backup feature has also been updated to include files inside
`inputs.d` directory.

## Example

Before this change the auto-detect script would write all settings to
`elastic-agent.yml` file.

After this change the script writes one separate config file for each
integrations that was detected, for example:

1. `elastic-agent.yml` - Contains global settings
2. `inputs.d/system.yml` - Contains inputs config for System integration
3. `inputs.d/docker.yml` - Contains inputs config for Docker integration

## Screenshot

<img width="1039" alt="Screenshot 2024-08-23 at 16 49 34"
src="https://github.com/user-attachments/assets/17bb7b01-d40e-4491-8bb5-20daf115938a">
  • Loading branch information
thomheymann authored Sep 3, 2024
1 parent a0cc891 commit d14432e
Show file tree
Hide file tree
Showing 10 changed files with 434 additions and 75 deletions.
3 changes: 1 addition & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -1229,7 +1229,7 @@
"suricata-sid-db": "^1.0.2",
"swr": "^2.2.5",
"symbol-observable": "^1.2.0",
"tar": "^6.2.1",
"tar": "^7.4.3",
"textarea-caret": "^3.1.0",
"tinycolor2": "1.4.1",
"tinygradient": "0.4.3",
Expand Down Expand Up @@ -1609,7 +1609,6 @@
"@types/styled-components": "^5.1.0",
"@types/supertest": "^6.0.2",
"@types/tapable": "^1.0.6",
"@types/tar": "^6.1.11",
"@types/testing-library__jest-dom": "^5.14.7",
"@types/textarea-caret": "^3.0.1",
"@types/tinycolor2": "^1.4.1",
Expand Down
4 changes: 2 additions & 2 deletions packages/kbn-dev-utils/src/extract.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import Path from 'path';
import { pipeline } from 'stream';
import { promisify } from 'util';

import Tar from 'tar';
import * as tar from 'tar';
import Yauzl, { ZipFile, Entry } from 'yauzl';
import * as Rx from 'rxjs';
import { map, mergeMap, takeUntil } from 'rxjs';
Expand Down Expand Up @@ -61,7 +61,7 @@ export async function extract({
await Fs.mkdir(targetDir, { recursive: true });

if (archivePath.endsWith('.tar') || archivePath.endsWith('.tar.gz')) {
return await Tar.x({
return await tar.extract({
file: archivePath,
cwd: targetDir,
stripComponents,
Expand Down
4 changes: 2 additions & 2 deletions src/dev/build/lib/fs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ import archiver from 'archiver';
import globby from 'globby';
import cpy from 'cpy';
import del from 'del';
import tar, { ExtractOptions } from 'tar';
import * as tar from 'tar';
import { ToolingLog } from '@kbn/tooling-log';

export function assertAbsolute(path: string) {
Expand Down Expand Up @@ -199,7 +199,7 @@ export async function getFileHash(path: string, algo: string) {
export async function untar(
source: string,
destination: string,
extractOptions: ExtractOptions = {}
extractOptions: tar.TarOptionsWithAliasesAsyncNoFile = {}
) {
assertAbsolute(source);
assertAbsolute(destination);
Expand Down
3 changes: 1 addition & 2 deletions src/dev/build/lib/integration_tests/fs.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -305,11 +305,10 @@ describe('untar()', () => {
expect(await read(resolve(destination, 'foo_dir/foo/foo.txt'))).toBe('foo\n');
});

it('passed thrid argument to Extract class, overriding path with destination', async () => {
it('passed thrid argument to Extract class', async () => {
const destination = resolve(TMP, 'a/b/c');

await untar(FOO_TAR_PATH, destination, {
path: '/dev/null',
strip: 1,
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

import { finished } from 'stream/promises';

import tar from 'tar';
import * as tar from 'tar';
import yauzl from 'yauzl';

import { bufferToStream, streamToBuffer } from '../streams';
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ selected_unknown_log_file_pattern_tsv_string=""
custom_log_file_path_list_tsv_string=""
elastic_agent_artifact_name=""
elastic_agent_config_path="/opt/Elastic/Agent/elastic-agent.yml"
elastic_agent_tmp_config_path="/tmp/elastic-agent-config-template.yml"
elastic_agent_tmp_config_path="/tmp/elastic-agent-config.tar"

OS="$(uname)"
ARCH="$(uname -m)"
Expand Down Expand Up @@ -149,7 +149,7 @@ update_step_progress() {
download_elastic_agent() {
local download_url="https://artifacts.elastic.co/downloads/beats/elastic-agent/${elastic_agent_artifact_name}.tar.gz"
rm -rf "./${elastic_agent_artifact_name}" "./${elastic_agent_artifact_name}.tar.gz"
curl -L -O $download_url --silent --fail
curl -L -O "$download_url" --silent --fail

if [ "$?" -eq 0 ]; then
printf "\e[1;32m✓\e[0m %s\n" "Elastic Agent downloaded to $(pwd)/$elastic_agent_artifact_name.tar.gz"
Expand All @@ -176,7 +176,7 @@ install_elastic_agent() {
"./${elastic_agent_artifact_name}/elastic-agent" install -f -n > /dev/null

if [ "$?" -eq 0 ]; then
printf "\e[1;32m✓\e[0m %s\n" "Elastic Agent installed to $(dirname $elastic_agent_config_path)"
printf "\e[1;32m✓\e[0m %s\n" "Elastic Agent installed to $(dirname "$elastic_agent_config_path")"
update_step_progress "ea-install" "complete"
else
update_step_progress "ea-install" "danger" "Failed to install Elastic Agent, see script output for error."
Expand Down Expand Up @@ -221,21 +221,27 @@ ensure_elastic_agent_healthy() {

backup_elastic_agent_config() {
if [ -f "$elastic_agent_config_path" ]; then
echo -e "\nExisting config file found at $elastic_agent_config_path";
echo -e "\nExisting config found at $elastic_agent_config_path";

printf "\n\e[1;36m?\e[0m \e[1m%s\e[0m \e[2m%s\e[0m" "Create backup and continue installation?" "[Y/n] (default: Yes): "
read confirmation_reply
confirmation_reply="${confirmation_reply:-Y}"

if [[ "$confirmation_reply" =~ ^[Yy](es)?$ ]]; then
local backup_path="$(pwd)/$(basename "${elastic_agent_config_path%.yml}.$(date +%s).yml")" # e.g. /opt/Elastic/Agent/elastic-agent.1712267614.yml
cp $elastic_agent_config_path $backup_path
# Backup to tar archive if `inputs.d` directory exists
if [ -d "$(dirname "$elastic_agent_config_path")/inputs.d" ]; then
backup_path="${backup_path%.yml}.tar" # Change file extension to `.tar`
tar --create --file "$backup_path" --directory "$(dirname "$elastic_agent_config_path")" "$(basename "$elastic_agent_config_path")" 'inputs.d'
else
cp "$elastic_agent_config_path" "$backup_path"
fi

if [ "$?" -eq 0 ]; then
printf "\n\e[1;32m✓\e[0m %s\n" "Backup saved to $backup_path"
else
update_step_progress "ea-config" "warning" "Failed to backup existing configuration"
fail "Failed to backup existing config file - Try manually creating a backup or delete your existing config file before re-running this script"
fail "Failed to backup existing config - Try manually creating a backup or delete your existing config before re-running this script"
fi
else
fail "Installation aborted"
Expand All @@ -257,15 +263,16 @@ install_integrations() {
done

curl --request POST \
-o $elastic_agent_tmp_config_path \
--url "$kibana_api_endpoint/internal/observability_onboarding/flow/$onboarding_flow_id/integrations/install" \
--header "Authorization: ApiKey $install_api_key_encoded" \
--header "Content-Type: text/tab-separated-values" \
--header "Accept: application/x-tar" \
--header "kbn-xsrf: true" \
--header "x-elastic-internal-origin: Kibana" \
--data "$(echo -e "$install_integrations_api_body_string")" \
--no-progress-meter \
--fail
--fail \
--output "$elastic_agent_tmp_config_path"

if [ "$?" -eq 0 ]; then
printf "\n\e[1;32m✓\e[0m %s\n" "Integrations installed"
Expand All @@ -278,9 +285,20 @@ install_integrations() {
apply_elastic_agent_config() {
local decoded_ingest_api_key=$(echo "$ingest_api_key_encoded" | base64 -d)

sed "s/'\${API_KEY}'/$decoded_ingest_api_key/g" $elastic_agent_tmp_config_path > $elastic_agent_config_path
# Verify that the downloaded archive contains the expected `elastic-agent.yml` file
tar --list --file "$elastic_agent_tmp_config_path" --include 'elastic-agent.yml' > /dev/null && \
# Remove existing config file including `inputs.d` directory
rm -rf "$elastic_agent_config_path" "$(dirname "$elastic_agent_config_path")/inputs.d" && \
# Extract new config files from downloaded archive
tar --extract --file "$elastic_agent_tmp_config_path" --include 'elastic-agent.yml' --include 'inputs.d/*.yml' --directory "$(dirname "$elastic_agent_config_path")" && \
# Replace placeholder with the Ingest API key
sed -i '' "s/\${API_KEY}/$decoded_ingest_api_key/" "$elastic_agent_config_path"
if [ "$?" -eq 0 ]; then
printf "\e[1;32m✓\e[0m %s\n" "Config written to $elastic_agent_config_path"
printf "\e[1;32m✓\e[0m %s\n" "Config written to:"
tar --list --file "$elastic_agent_tmp_config_path" --include 'elastic-agent.yml' --include 'inputs.d/*.yml' | while read -r file; do
echo " - $(dirname "$elastic_agent_config_path")/$file"
done

update_step_progress "ea-config" "complete"
else
update_step_progress "ea-config" "warning" "Failed to configure Elastic Agent"
Expand Down Expand Up @@ -389,7 +407,7 @@ build_unknown_log_file_patterns() {
fi

unknown_log_file_pattern_list_string+="$(dirname "$log_file_path")/*.log\n"
done <<< "$(echo -e $unknown_log_file_path_list_string)"
done <<< "$(echo -e "$unknown_log_file_path_list_string")"

unknown_log_file_pattern_list_string=$(echo -e "$unknown_log_file_pattern_list_string" | sort -u)
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

import { makeTar, type Entry } from './make_tar';
import * as tar from 'tar';
import expect from 'expect';

describe('makeTar', () => {
it('creates a valid tar archive that can be extracted', () => {
const archive = makeTar([
{
type: 'Directory',
path: 'inputs.d/',
mode: 0o755,
},
{
type: 'File',
path: 'inputs.d/system.yml',
mode: 0o644,
data: 's'.repeat(512),
},
{
type: 'File',
path: 'inputs.d/redis.yml',
mode: 0o644,
data: 'r'.repeat(1024),
},
]);

const extracted: Entry[] = [];
tar
.extract({
sync: true,
onReadEntry: (readEntry) => {
const entry: Entry = readEntry;
readEntry.on('data', (buffer) => {
if (!entry.data) {
entry.data = '';
}
entry.data += buffer.toString();
});
extracted.push(entry);
},
})
.write(archive);

expect(extracted).toEqual([
expect.objectContaining({
type: 'Directory',
path: 'inputs.d/',
mode: 0o755,
}),
expect.objectContaining({
type: 'File',
path: 'inputs.d/system.yml',
mode: 0o644,
data: 's'.repeat(512),
}),
expect.objectContaining({
type: 'File',
path: 'inputs.d/redis.yml',
mode: 0o644,
data: 'r'.repeat(1024),
}),
]);
});
});
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/

import { Header, type HeaderData } from 'tar';

const BLOCK_SIZE = 512; // https://www.gnu.org/software/tar/manual/html_node/Standard.html

export interface Entry extends Omit<HeaderData, 'size'> {
data?: string;
}

/**
* Creates a tar archive from a list of entries in memory.
*
* Ensure you set the appropriate permissions (`0o755` for directories and `0o644` for files) or the
* extracted files won't be readable.
*
* Example:
*
* ```ts
* const now = new Date();
* const archive = makeTar([
* {
* type: 'Directory',
* path: 'inputs.d/',
* mode: 0o755,
* mtime: now,
* },
* {
* type: 'File',
* path: 'inputs.d/redis.yml',
* mode: 0o644,
* mtime: now,
* data: 'inputs:\n- type: logs',
* },
* ]
* ```
*/
export function makeTar(entries: Entry[]) {
// A tar archive contains a series of blocks. Each block contains 512 bytes. Each file archived is
// represented by a header block which describes the file, followed by zero or more blocks which
// give the contents of the file.
const blocks = entries.map((entry) => {
const size = typeof entry.data === 'string' ? entry.data.length : 0;
const buffer = Buffer.alloc(BLOCK_SIZE * (Math.ceil(size / BLOCK_SIZE) + 1));

// Write header into first block
const header = new Header({ ...entry, size });
header.encode(buffer, 0);

// Write data into subsequent blocks
if (typeof entry.data === 'string') {
buffer.write(entry.data, BLOCK_SIZE);
}

return buffer;
});

// At the end of the archive file there are two 512-byte blocks filled with binary zeros as an
// end-of-file marker.
const eof = Buffer.alloc(2 * BLOCK_SIZE);
blocks.push(eof);

return Buffer.concat(blocks);
}
Loading

0 comments on commit d14432e

Please sign in to comment.