diff --git a/CHANGELOG.asciidoc b/CHANGELOG.asciidoc index 23b10b8810c..159d0600c6b 100644 --- a/CHANGELOG.asciidoc +++ b/CHANGELOG.asciidoc @@ -13,6 +13,7 @@ https://github.com/elastic/beats/compare/v5.0.0-alpha1...master[Check the HEAD d ==== Breaking changes *Affecting all Beats* +- Add scripts for managing the dashboards of a single Beat {pull}1359[1359] *Packetbeat* diff --git a/dev-tools/README.md b/dev-tools/README.md index 5707d4e01c3..e0f1739ba12 100644 --- a/dev-tools/README.md +++ b/dev-tools/README.md @@ -16,3 +16,14 @@ Other scripts: |----------------------|-------------| | aggregate_coverage.py | Used to create coverage reports that contain both unit and system tests data | | merge_pr | Used to make it easier to open a PR that merges one branch into another. | + + +Import / export the dashboards of a single Beat: + +| File | Description | +|-----------------------|-------------| +| import_dashboards.sh | Bash script to import the Beat dashboards from a local directory in Elasticsearch | +| import_dashboards.ps1 | Powershell script to import the Beat dashboards from a local directory in Elasticsearch | +| export_dashboards.py | Python script to export the Beat dashboards from Elasticsearch to a local directory| + + diff --git a/libbeat/scripts/kibana_export.py b/dev-tools/export_dashboards.py similarity index 95% rename from libbeat/scripts/kibana_export.py rename to dev-tools/export_dashboards.py index 528b7407537..ae6294ed74c 100644 --- a/libbeat/scripts/kibana_export.py +++ b/dev-tools/export_dashboards.py @@ -58,8 +58,6 @@ def ExportVisualization(es, visualization, kibana_index, output_directory): search, kibana_index, output_directory) - else: - print("Missing savedSearchId from {}".format(doc["_source"])) def ExportSearch(es, search, kibana_index, output_directory): @@ -115,8 +113,8 @@ def main(): help="Elasticsearch index for the Kibana dashboards. " "E.g. .kibana", default=".kibana") - parser.add_argument("--dir", help="Output directory", - default="saved") + parser.add_argument("--dir", help="Output directory. E.g. output", + default="output") args = parser.parse_args() diff --git a/libbeat/scripts/kibana_import.ps1 b/dev-tools/import_dashboards.ps1 similarity index 97% rename from libbeat/scripts/kibana_import.ps1 rename to dev-tools/import_dashboards.ps1 index e7abdb96abb..a2a08e5800e 100644 --- a/libbeat/scripts/kibana_import.ps1 +++ b/dev-tools/import_dashboards.ps1 @@ -5,13 +5,14 @@ param( [String] $d, [String] $dir, [switch] $h = $false, [switch] $help = $false ) +function prompt { "$pwd\" } # The default value of the variable. Initialize your own variables here $ELASTICSEARCH="http://localhost:9200" $CURL="Invoke-RestMethod" $KIBANA_INDEX=".kibana" $SCRIPT=$MyInvocation.MyCommand.Name -$KIBANA_DIR= +$KIBANA_DIR=prompt # Verify that Invoke-RestMethod is present. It was added in PS 3. if (!(Get-Command $CURL -errorAction SilentlyContinue)) @@ -33,6 +34,7 @@ Options: Print the help menu. -d | -dir Local directory where the dashboards, visualizations, searches and index pattern are saved. + By default is $KIBANA_DIR. -l | -url Elasticseacrh URL. By default is $ELASTICSEARCH. -u | -user diff --git a/libbeat/scripts/kibana_import.sh b/dev-tools/import_dashboards.sh similarity index 91% rename from libbeat/scripts/kibana_import.sh rename to dev-tools/import_dashboards.sh index 3897216be01..d575b2d5790 100755 --- a/libbeat/scripts/kibana_import.sh +++ b/dev-tools/import_dashboards.sh @@ -10,13 +10,16 @@ ELASTICSEARCH=http://localhost:9200 CURL=curl KIBANA_INDEX=".kibana" -DIR= +DIR=. print_usage() { echo " -Load the dashboards, visualizations and index patterns into the given -Elasticsearch instance. +Import the dashboards, visualizations and index patterns into Kibana. + +The Kibana dashboards together with its dependencies are saved into a +special index pattern in Elasticsearch (by default .kibana), so you need to +specify the Elasticsearch URL and optionally an username and password. Usage: $(basename "$0") -url ${ELASTICSEARCH} -user admin:secret -index ${KIBANA_INDEX} @@ -26,6 +29,7 @@ Options: Print the help menu. -d | -dir Local directory where the dashboards, visualizations, searches and index pattern are saved. + By default is current directory. -l | -url Elasticseacrh URL. By default is ${ELASTICSEARCH}. -u | -user diff --git a/libbeat/docs/index.asciidoc b/libbeat/docs/index.asciidoc index 7b7dec2a1bb..e93f2f43dd3 100644 --- a/libbeat/docs/index.asciidoc +++ b/libbeat/docs/index.asciidoc @@ -28,4 +28,6 @@ include::./dashboards.asciidoc[] include::./newbeat.asciidoc[] +include::./newdashboards.asciidoc[] + include::./release.asciidoc[] diff --git a/libbeat/docs/newdashboards.asciidoc b/libbeat/docs/newdashboards.asciidoc new file mode 100644 index 00000000000..fbafae6618c --- /dev/null +++ b/libbeat/docs/newdashboards.asciidoc @@ -0,0 +1,154 @@ +[[new-dashboards]] +== Developer Guide: Creating new Kibana dashboards + +This guide walks you through the steps for creating a new Kibana dashboards +or changing the existing Kibana dashboards for a single Beat. + +If the Beat you are targeting has already few dashboards, the first step would be to import +those dashboards to Kibana and then start changing or adding a dashboard from the existing ones. + +Kibana saves the dashboards together with all the dependencies (visualizations, searches and +index patterns) in a special index in Elasticsearch. By default the index is `.kibana`, but it can be changed to anything. + +After you have created or changed a dashboard in Kibana for a certain Beat, you can export it together with all dependencies to +your local directory. + +We recommend you use the virtual environment under +https://github.com/elastic/beats/tree/master/testing/environments[beats/testing/environments] with the latest version of +Kibana and Elasticsearch to import, create and export the Kibana dashboards, so the latest dashboards are exported from +the same Kibana version. + +=== Import existing Beat dashboards + +For Unix systems, you can use the bash script `import_dashboards.sh` +and for Windows you can use the powershell script `import_dashboards.ps1` from +https://github.com/elastic/beats/tree/master/dev-tools[dev-tools]. + +The command has the following options: + +[source,shell] +---------------------------------------------------------------------- +$ ./import_dashboards.sh -h + + +Import the dashboards, visualizations and index patterns into Kibana. + +The Kibana dashboards together with its dependencies are saved into a +special index pattern in Elasticsearch (by default .kibana), so you need to +specify the Elasticsearch URL and optionally an username and password. + +Usage: + import_dashboards.sh -url http://localhost:9200 -user admin:secret -index .kibana + +Options: + -h | -help + Print the help menu. + -d | -dir + Local directory where the dashboards, visualizations, searches and index + pattern are saved. + -l | -url + Elasticseacrh URL. By default is http://localhost:9200. + -u | -user + Username and password for authenticating to Elasticsearch using Basic + Authentication. The username and password should be separated by a + colon (i.e. admin:secret). By default no username and password are + used. + -i | -index + Kibana index pattern where to save the dashboards, visualizations, + index patterns. By default is .kibana. + +---------------------------------------------------------------------- + +==== dir +The input directory with the dashboards together with its dependencies. The default value is current directory. + +==== url +The Elasticsearch URL. The default value is http://localhost:9200. + +==== user +The username and password for authenticating the connection to Elasticsearch using Basic Authentication. The username and password should be separated by a colon. By default no username and password are used. + +==== kibana +The Elasticsearch index pattern where Kibana saved its configuration. The default value is `.kibana`. + + +To import all the dashboards together with all the dependencies (visualizations, searches and index patterns), you just +need to run the following command in the beats repository: + +On Unix systems: + +[source,shell] +---------------------------------------------------------------------- +../dev-tools/import_dashboards.sh -dir etc/kibana +---------------------------------------------------------------------- + +On Windows systems: + +[source,shell] +---------------------------------------------------------------------- +..\dev-tools\import_dashboards.ps1 -dir .\etc\kibana +---------------------------------------------------------------------- + + +=== Export the Beat dashboards + +To export all the dashboards for a Beat together with all dependencies (visualizations, searches and index patterns), +you can use the python script `export_dashboards.py` from +https://github.com/elastic/beats/tree/master/dev-tools[dev-tools]. + +The command has the following options: + +[source,shell] +---------------------------------------------------------------------- +$ python export_dashboards.py -h +usage: export_dashboards.py [-h] [--url URL] --beat BEAT [--index INDEX] + [--kibana KIBANA] [--dir DIR] + +Export the Kibana dashboards together with all used visualizations, searches +and index pattern + +optional arguments: + -h, --help Show this help message and exit + --url URL Elasticsearch URL. E.g. http://localhost:9200 + --beat BEAT Beat name e.g. topbeat + --index INDEX Elasticsearch index for the Beat data. E.g. topbeat-* + --kibana KIBANA Elasticsearch index for the Kibana dashboards. E.g. .kibana + --dir DIR Output directory. E.g. output + +---------------------------------------------------------------------- + +==== url +The Elasticsearch URL. The default value is http://localhost:9200. + +==== beat +The name of the Beat. This argument is required. + +==== index +The Elasticsearch index pattern where the Beat is storing the data. The default value is constructed from the Beat name + `-*` string. + +==== kibana +The Elasticsearch index pattern where Kibana saved its configuration. The default value is `.kibana`. + +==== dir +The output directory where to save the dashboards together with its dependencies. The default value is `output`. + +For example to export all Packetbeat dashboards, you can run the following command in the `packetbeat` repository: + +[source,shell] +---------------------------------------------------------------------- +../dev-tools/export_dashboards.py --beat packetbeat +---------------------------------------------------------------------- + +NOTE:: We have made it even easier for you to import and export the dashboards +by running the following commands in the Beats repository: + +[source,shell] +---------------------------------------------------------------------- +make import-dashboards +---------------------------------------------------------------------- + +[source,shell] +---------------------------------------------------------------------- +make export-dashboards +---------------------------------------------------------------------- + diff --git a/libbeat/etc/kibana/search/Default-Search.json b/libbeat/etc/kibana/search/Default-Search.json deleted file mode 100644 index d830b1b54e2..00000000000 --- a/libbeat/etc/kibana/search/Default-Search.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "sort": [ - "@timestamp", - "desc" - ], - "hits": 0, - "description": "", - "title": "Default Search", - "version": 1, - "kibanaSavedObjectMeta": { - "searchSourceJSON": "{\"index\":\"logstash-*\",\"highlight\":{\"pre_tags\":[\"@kibana-highlighted-field@\"],\"post_tags\":[\"@/kibana-highlighted-field@\"],\"fields\":{\"*\":{}},\"fragment_size\":2147483647},\"filter\":[],\"query\":{\"query_string\":{\"query\":\"*\",\"analyze_wildcard\":true}}}" - }, - "columns": [ - "method", - "type", - "path", - "responsetime", - "status", - "query" - ] -} \ No newline at end of file diff --git a/libbeat/etc/kibana/visualization/Navigation.json b/libbeat/etc/kibana/visualization/Navigation.json deleted file mode 100644 index 231ca1a52ce..00000000000 --- a/libbeat/etc/kibana/visualization/Navigation.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "visState": "{\"type\":\"markdown\",\"params\":{\"markdown\":\"###Packetbeat:\\n\\n[Dashboard](/#/dashboard/Packetbeat-Dashboard)\\n\\n[Web transactions](/#/dashboard/HTTP)\\n\\n[MySQL performance](/#/dashboard/MySQL-performance)\\n\\n[PostgreSQL performance](/#/dashboard/PgSQL-performance)\\n\\n[MongoDB performance](/#/dashboard/MongoDB-performance)\\n\\n[Thrift-RPC performance](/#/dashboard/Thrift-performance)\\n\\n###Topbeat:\\n\\n[Dashboard](/#/dashboard/Topbeat-Dashboard)\\n\\n###Winlogbeat:\\n\\n[Dashboard](/#/dashboard/Winlogbeat-Dashboard)\"},\"aggs\":[],\"listeners\":{}}", - "description": "", - "title": "Navigation", - "uiStateJSON": "{}", - "version": 1, - "kibanaSavedObjectMeta": { - "searchSourceJSON": "{\"query\":{\"query_string\":{\"analyze_wildcard\":true,\"query\":\"*\"}},\"filter\":[]}" - } -} \ No newline at end of file diff --git a/libbeat/scripts/Makefile b/libbeat/scripts/Makefile index 652221950e3..f81319e7956 100755 --- a/libbeat/scripts/Makefile +++ b/libbeat/scripts/Makefile @@ -207,16 +207,15 @@ update: bash ${ES_BEATS}/libbeat/scripts/update.sh ${BEATNAME} ${BEAT_DIR}/${BEATNAME} ${ES_BEATS}/libbeat ### KIBANA FILES HANDLING ### +ES_URL?=http://localhost:9200/ -KIBANA_ES?=http://localhost:9200/ .PHONY: update -kibana-export: - python ${ES_BEATS}/libbeat/scripts/kibana_export.py --url ${KIBANA_ES} --dir $(shell pwd)/etc/kibana --beat ${BEATNAME} +export-dashboards: + python ${ES_BEATS}/dev-tools//export_dashboards.py --url ${ES_URL} --dir $(shell pwd)/etc/kibana --beat ${BEATNAME} .PHONY: update -kibana-import: - python ${ES_BEATS}/libbeat/scripts/kibana_import.py --url ${KIBANA_ES} --dir $(shell pwd)/etc/kibana - +import-dashboards: + bash ${ES_BEATS}/dev-tools/import_dashboards.sh -url ${ES_URL} -dir $(shell pwd)/etc/kibana ### CONTAINER ENVIRONMENT #### diff --git a/libbeat/scripts/kibana_import.py b/libbeat/scripts/kibana_import.py deleted file mode 100644 index 677bdf3e302..00000000000 --- a/libbeat/scripts/kibana_import.py +++ /dev/null @@ -1,47 +0,0 @@ -from elasticsearch import Elasticsearch -import argparse -import os - -def store_object(es, type, name, doc): - print es.index(index=".kibana", doc_type=type, id=name, body=doc) - - -def main(): - parser = argparse.ArgumentParser( - description="Loads Kibana dashboards, vizualization and " + - "searches into Kibana") - parser.add_argument("--url", help="Elasticsearch URL. E.g. " + - "http://localhost:9200.", required=True) - parser.add_argument("--dir", help="Input directory (kibana folder)", default="saved", required=True) - - args = parser.parse_args() - - es = Elasticsearch(args.url) - - base = args.dir - folders = os.listdir(base) - - for folder in folders: - - base_dir = base + "/" + folder + "/" - - if os.path.isdir(base_dir): - files = os.listdir(base_dir) - - for file in files: - if os.path.isfile(base_dir + file) and os.path.splitext(file)[1] == '.json': - f = open(base_dir + file, 'r') - doc = f.read() - - type = os.path.splitext(file)[0] - - # Fixes windows problem with files with * inside - # Adds it to index pattern - if folder == "index-pattern": - type = type + "-*" - store_object(es, folder, type, doc) - - -if __name__ == "__main__": - main() -