ELK: update kibana and the elastic beats to 5.4 (#26252)

* Add kibana5 and logstash5
* Upgrade the elastic beats to 5.4
* Make sure all elastic products use the same version
(see elk5Version)
* Add a test for the ELK stack

authored by

Bas van Dijk and committed by
Franz Pletz
2444eab4 424dc013

+362 -192
+80 -26
nixos/modules/services/logging/logstash.nix
··· 4 4 5 5 let 6 6 cfg = config.services.logstash; 7 + atLeast54 = versionAtLeast (builtins.parseDrvName cfg.package.name).version "5.4"; 7 8 pluginPath = lib.concatStringsSep ":" cfg.plugins; 8 9 havePluginPath = lib.length cfg.plugins > 0; 9 10 ops = lib.optionalString; 10 - verbosityFlag = { 11 - debug = "--debug"; 12 - info = "--verbose"; 13 - warn = ""; # intentionally empty 14 - error = "--quiet"; 15 - fatal = "--silent"; 16 - }."${cfg.logLevel}"; 11 + verbosityFlag = 12 + if atLeast54 13 + then "--log.level " + cfg.logLevel 14 + else { 15 + debug = "--debug"; 16 + info = "--verbose"; 17 + warn = ""; # intentionally empty 18 + error = "--quiet"; 19 + fatal = "--silent"; 20 + }."${cfg.logLevel}"; 21 + 22 + pluginsPath = 23 + if atLeast54 24 + then "--path.plugins ${pluginPath}" 25 + else "--pluginpath ${pluginPath}"; 26 + 27 + logstashConf = pkgs.writeText "logstash.conf" '' 28 + input { 29 + ${cfg.inputConfig} 30 + } 31 + 32 + filter { 33 + ${cfg.filterConfig} 34 + } 35 + 36 + output { 37 + ${cfg.outputConfig} 38 + } 39 + ''; 17 40 41 + logstashSettingsYml = pkgs.writeText "logstash.yml" cfg.extraSettings; 42 + 43 + logstashSettingsDir = pkgs.runCommand "logstash-settings" {inherit logstashSettingsYml;} '' 44 + mkdir -p $out 45 + ln -s $logstashSettingsYml $out/logstash.yml 46 + ''; 18 47 in 19 48 20 49 { ··· 43 72 default = [ ]; 44 73 example = literalExample "[ pkgs.logstash-contrib ]"; 45 74 description = "The paths to find other logstash plugins in."; 75 + }; 76 + 77 + dataDir = mkOption { 78 + type = types.str; 79 + default = "/var/lib/logstash"; 80 + description = '' 81 + A path to directory writable by logstash that it uses to store data. 82 + Plugins will also have access to this path. 83 + ''; 46 84 }; 47 85 48 86 logLevel = mkOption { ··· 116 154 ''; 117 155 }; 118 156 157 + extraSettings = mkOption { 158 + type = types.lines; 159 + default = ""; 160 + description = "Extra Logstash settings in YAML format."; 161 + example = '' 162 + pipeline: 163 + batch: 164 + size: 125 165 + delay: 5 166 + ''; 167 + }; 168 + 169 + 119 170 }; 120 171 }; 121 172 ··· 123 174 ###### implementation 124 175 125 176 config = mkIf cfg.enable { 177 + assertions = [ 178 + { assertion = atLeast54 -> !cfg.enableWeb; 179 + message = '' 180 + The logstash web interface is only available for versions older than 5.4. 181 + So either set services.logstash.enableWeb = false, 182 + or set services.logstash.package to an older logstash. 183 + ''; 184 + } 185 + ]; 186 + 126 187 systemd.services.logstash = with pkgs; { 127 188 description = "Logstash Daemon"; 128 189 wantedBy = [ "multi-user.target" ]; 129 190 environment = { JAVA_HOME = jre; }; 130 191 path = [ pkgs.bash ]; 131 192 serviceConfig = { 132 - ExecStart = 133 - "${cfg.package}/bin/logstash agent " + 134 - "-w ${toString cfg.filterWorkers} " + 135 - ops havePluginPath "--pluginpath ${pluginPath} " + 136 - "${verbosityFlag} " + 137 - "-f ${writeText "logstash.conf" '' 138 - input { 139 - ${cfg.inputConfig} 140 - } 141 - 142 - filter { 143 - ${cfg.filterConfig} 144 - } 145 - 146 - output { 147 - ${cfg.outputConfig} 148 - } 149 - ''} " + 150 - ops cfg.enableWeb "-- web -a ${cfg.listenAddress} -p ${cfg.port}"; 193 + ExecStartPre = ''${pkgs.coreutils}/bin/mkdir -p "${cfg.dataDir}" ; ${pkgs.coreutils}/bin/chmod 700 "${cfg.dataDir}"''; 194 + ExecStart = concatStringsSep " " (filter (s: stringLength s != 0) [ 195 + "${cfg.package}/bin/logstash" 196 + (ops (!atLeast54) "agent") 197 + "-w ${toString cfg.filterWorkers}" 198 + (ops havePluginPath pluginsPath) 199 + "${verbosityFlag}" 200 + "-f ${logstashConf}" 201 + (ops atLeast54 "--path.settings ${logstashSettingsDir}") 202 + (ops atLeast54 "--path.data ${cfg.dataDir}") 203 + (ops cfg.enableWeb "-- web -a ${cfg.listenAddress} -p ${cfg.port}") 204 + ]); 151 205 }; 152 206 }; 153 207 };
+46 -2
nixos/modules/services/search/kibana.nix
··· 5 5 let 6 6 cfg = config.services.kibana; 7 7 8 - cfgFile = pkgs.writeText "kibana.json" (builtins.toJSON ( 8 + atLeast54 = versionAtLeast (builtins.parseDrvName cfg.package.name).version "5.4"; 9 + 10 + cfgFile = if atLeast54 then cfgFile5 else cfgFile4; 11 + 12 + cfgFile4 = pkgs.writeText "kibana.json" (builtins.toJSON ( 9 13 (filterAttrsRecursive (n: v: v != null) ({ 10 14 host = cfg.listenAddress; 11 15 port = cfg.port; ··· 36 40 ]; 37 41 } // cfg.extraConf) 38 42 ))); 43 + 44 + cfgFile5 = pkgs.writeText "kibana.json" (builtins.toJSON ( 45 + (filterAttrsRecursive (n: v: v != null) ({ 46 + server.host = cfg.listenAddress; 47 + server.port = cfg.port; 48 + server.ssl.certificate = cfg.cert; 49 + server.ssl.key = cfg.key; 50 + 51 + kibana.index = cfg.index; 52 + kibana.defaultAppId = cfg.defaultAppId; 53 + 54 + elasticsearch.url = cfg.elasticsearch.url; 55 + elasticsearch.username = cfg.elasticsearch.username; 56 + elasticsearch.password = cfg.elasticsearch.password; 57 + 58 + elasticsearch.ssl.certificate = cfg.elasticsearch.cert; 59 + elasticsearch.ssl.key = cfg.elasticsearch.key; 60 + elasticsearch.ssl.certificateAuthorities = cfg.elasticsearch.certificateAuthorities; 61 + } // cfg.extraConf) 62 + ))); 63 + 39 64 in { 40 65 options.services.kibana = { 41 66 enable = mkEnableOption "enable kibana service"; ··· 96 121 }; 97 122 98 123 ca = mkOption { 99 - description = "CA file to auth against elasticsearch."; 124 + description = '' 125 + CA file to auth against elasticsearch. 126 + 127 + It's recommended to use the <option>certificateAuthorities</option> option 128 + when using kibana-5.4 or newer. 129 + ''; 100 130 default = null; 101 131 type = types.nullOr types.path; 102 132 }; 103 133 134 + certificateAuthorities = mkOption { 135 + description = '' 136 + CA files to auth against elasticsearch. 137 + 138 + Please use the <option>ca</option> option when using kibana &lt; 5.4 139 + because those old versions don't support setting multiple CA's. 140 + 141 + This defaults to the singleton list [ca] when the <option>ca</option> option is defined. 142 + ''; 143 + default = if isNull cfg.elasticsearch.ca then [] else [ca]; 144 + type = types.listOf types.path; 145 + }; 146 + 104 147 cert = mkOption { 105 148 description = "Certificate file to auth against elasticsearch."; 106 149 default = null; ··· 118 161 description = "Kibana package to use"; 119 162 default = pkgs.kibana; 120 163 defaultText = "pkgs.kibana"; 164 + example = "pkgs.kibana5"; 121 165 type = types.package; 122 166 }; 123 167
+95
nixos/tests/elk.nix
··· 1 + # Test the ELK stack: Elasticsearch, Logstash and Kibana. 2 + 3 + import ./make-test.nix ({ pkgs, ...} : 4 + let 5 + esUrl = "http://localhost:9200"; 6 + in { 7 + name = "ELK"; 8 + meta = with pkgs.stdenv.lib.maintainers; { 9 + maintainers = [ eelco chaoflow offline basvandijk ]; 10 + }; 11 + 12 + nodes = { 13 + one = 14 + { config, pkgs, ... }: { 15 + # Not giving the machine at least 2060MB results in elasticsearch failing with the following error: 16 + # 17 + # OpenJDK 64-Bit Server VM warning: 18 + # INFO: os::commit_memory(0x0000000085330000, 2060255232, 0) 19 + # failed; error='Cannot allocate memory' (errno=12) 20 + # 21 + # There is insufficient memory for the Java Runtime Environment to continue. 22 + # Native memory allocation (mmap) failed to map 2060255232 bytes for committing reserved memory. 23 + # 24 + # When setting this to 2500 I got "Kernel panic - not syncing: Out of 25 + # memory: compulsory panic_on_oom is enabled" so lets give it even a 26 + # bit more room: 27 + virtualisation.memorySize = 3000; 28 + 29 + # For querying JSON objects returned from elasticsearch and kibana. 30 + environment.systemPackages = [ pkgs.jq ]; 31 + 32 + services = { 33 + logstash = { 34 + enable = true; 35 + package = pkgs.logstash5; 36 + inputConfig = '' 37 + exec { command => "echo -n flowers" interval => 1 type => "test" } 38 + exec { command => "echo -n dragons" interval => 1 type => "test" } 39 + ''; 40 + filterConfig = '' 41 + if [message] =~ /dragons/ { 42 + drop {} 43 + } 44 + ''; 45 + outputConfig = '' 46 + file { 47 + path => "/tmp/logstash.out" 48 + codec => line { format => "%{message}" } 49 + } 50 + elasticsearch { 51 + hosts => [ "${esUrl}" ] 52 + } 53 + ''; 54 + }; 55 + 56 + elasticsearch = { 57 + enable = true; 58 + package = pkgs.elasticsearch5; 59 + }; 60 + 61 + kibana = { 62 + enable = true; 63 + package = pkgs.kibana5; 64 + elasticsearch.url = esUrl; 65 + }; 66 + }; 67 + }; 68 + }; 69 + 70 + testScript = '' 71 + startAll; 72 + 73 + $one->waitForUnit("elasticsearch.service"); 74 + 75 + # Continue as long as the status is not "red". The status is probably 76 + # "yellow" instead of "green" because we are using a single elasticsearch 77 + # node which elasticsearch considers risky. 78 + # 79 + # TODO: extend this test with multiple elasticsearch nodes and see if the status turns "green". 80 + $one->waitUntilSucceeds("curl --silent --show-error '${esUrl}/_cluster/health' | jq .status | grep -v red"); 81 + 82 + # Perform some simple logstash tests. 83 + $one->waitForUnit("logstash.service"); 84 + $one->waitUntilSucceeds("cat /tmp/logstash.out | grep flowers"); 85 + $one->waitUntilSucceeds("cat /tmp/logstash.out | grep -v dragons"); 86 + 87 + # See if kibana is healthy. 88 + $one->waitForUnit("kibana.service"); 89 + $one->waitUntilSucceeds("curl --silent --show-error 'http://localhost:5601/api/status' | jq .status.overall.state | grep green"); 90 + 91 + # See if logstash messages arive in elasticsearch. 92 + $one->waitUntilSucceeds("curl --silent --show-error '${esUrl}/_search' -H 'Content-Type: application/json' -d '{\"query\" : { \"match\" : { \"message\" : \"flowers\"}}}' | jq .hits.total | grep -v 0"); 93 + $one->waitUntilSucceeds("curl --silent --show-error '${esUrl}/_search' -H 'Content-Type: application/json' -d '{\"query\" : { \"match\" : { \"message\" : \"dragons\"}}}' | jq .hits.total | grep 0"); 94 + ''; 95 + })
-41
nixos/tests/logstash.nix
··· 1 - # This test runs logstash and checks if messages flows and 2 - # elasticsearch is started. 3 - 4 - import ./make-test.nix ({ pkgs, ...} : { 5 - name = "logstash"; 6 - meta = with pkgs.stdenv.lib.maintainers; { 7 - maintainers = [ eelco chaoflow offline ]; 8 - }; 9 - 10 - nodes = { 11 - one = 12 - { config, pkgs, ... }: 13 - { 14 - services = { 15 - logstash = { 16 - enable = true; 17 - inputConfig = '' 18 - exec { command => "echo flowers" interval => 1 type => "test" } 19 - exec { command => "echo dragons" interval => 1 type => "test" } 20 - ''; 21 - filterConfig = '' 22 - if [message] =~ /dragons/ { 23 - drop {} 24 - } 25 - ''; 26 - outputConfig = '' 27 - stdout { codec => rubydebug } 28 - ''; 29 - }; 30 - }; 31 - }; 32 - }; 33 - 34 - testScript = '' 35 - startAll; 36 - 37 - $one->waitForUnit("logstash.service"); 38 - $one->waitUntilSucceeds("journalctl -n 20 _SYSTEMD_UNIT=logstash.service | grep flowers"); 39 - $one->fail("journalctl -n 20 _SYSTEMD_UNIT=logstash.service | grep dragons"); 40 - ''; 41 - })
+45
pkgs/development/tools/misc/kibana/5.x.nix
··· 1 + { stdenv, makeWrapper, fetchurl, elk5Version, nodejs, coreutils, which }: 2 + 3 + with stdenv.lib; 4 + let 5 + inherit (builtins) elemAt; 6 + archOverrides = { 7 + "i686" = "x86"; 8 + }; 9 + info = splitString "-" stdenv.system; 10 + arch = (elemAt info 0); 11 + elasticArch = archOverrides."${arch}" or arch; 12 + plat = elemAt info 1; 13 + shas = { 14 + "x86_64-linux" = "1g5i81wq77fk6pyaq3rpfqs2m23xsbz2cndh3rg4b59ibg5qv0sq"; 15 + "i686-linux" = "0pxnpg3g8l6hy8qz404kbkk5rd9y65jrzd0y9j8wr5fd4pqs7vgv"; 16 + "x86_64-darwin" = "0pffl2hbck3s271jlzdibp5698djm5fdvj15w2knm815rs2kfbl5"; 17 + }; 18 + in stdenv.mkDerivation rec { 19 + name = "kibana-${version}"; 20 + version = elk5Version; 21 + 22 + src = fetchurl { 23 + url = "https://artifacts.elastic.co/downloads/kibana/${name}-${plat}-${elasticArch}.tar.gz"; 24 + sha256 = shas."${stdenv.system}" or (throw "Unknown architecture"); 25 + }; 26 + 27 + buildInputs = [ makeWrapper ]; 28 + 29 + installPhase = '' 30 + mkdir -p $out/libexec/kibana $out/bin 31 + mv * $out/libexec/kibana/ 32 + rm -r $out/libexec/kibana/node 33 + makeWrapper $out/libexec/kibana/bin/kibana $out/bin/kibana \ 34 + --prefix PATH : "${stdenv.lib.makeBinPath [ nodejs coreutils which ]}" 35 + sed -i 's@NODE=.*@NODE=${nodejs}/bin/node@' $out/libexec/kibana/bin/kibana 36 + ''; 37 + 38 + meta = { 39 + description = "Visualize logs and time-stamped data"; 40 + homepage = http://www.elasticsearch.org/overview/kibana; 41 + license = licenses.asl20; 42 + maintainers = with maintainers; [ offline rickynils ]; 43 + platforms = with platforms; unix; 44 + }; 45 + }
+42
pkgs/misc/logging/beats/default.nix
··· 1 + { stdenv, fetchFromGitHub, elk5Version, buildGoPackage, libpcap }: 2 + 3 + let beat = package : extraArgs : buildGoPackage (rec { 4 + name = "${package}-${version}"; 5 + version = elk5Version; 6 + 7 + src = fetchFromGitHub { 8 + owner = "elastic"; 9 + repo = "beats"; 10 + rev = "v${version}"; 11 + sha256 = "1if16sqbhgxc7ahn9pak8av9rq9l8ldk44hr4w4g7lhxnqhmhsji"; 12 + }; 13 + 14 + goPackagePath = "github.com/elastic/beats"; 15 + 16 + subPackages = [ package ]; 17 + 18 + meta = with stdenv.lib; { 19 + homepage = https://www.elastic.co/products/beats; 20 + license = licenses.asl20; 21 + maintainers = with maintainers; [ fadenb basvandijk ]; 22 + platforms = platforms.linux; 23 + }; 24 + } // extraArgs); 25 + in { 26 + filebeat = beat "filebeat" {meta.description = "Lightweight shipper for logfiles";}; 27 + heartbeat = beat "heartbeat" {meta.description = "Lightweight shipper for uptime monitoring";}; 28 + metricbeat = beat "metricbeat" {meta.description = "Lightweight shipper for metrics";}; 29 + packetbeat = beat "packetbeat" { 30 + buildInputs = [ libpcap ]; 31 + meta.description = "Network packet analyzer that ships data to Elasticsearch"; 32 + meta.longDescription = '' 33 + Packetbeat is an open source network packet analyzer that ships the 34 + data to Elasticsearch. 35 + 36 + Think of it like a distributed real-time Wireshark with a lot more 37 + analytics features. The Packetbeat shippers sniff the traffic between 38 + your application processes, parse on the fly protocols like HTTP, MySQL, 39 + PostgreSQL, Redis or Thrift and correlate the messages into transactions. 40 + ''; 41 + }; 42 + }
-25
pkgs/misc/logging/filebeat/default.nix
··· 1 - { stdenv, fetchFromGitHub, buildGoPackage }: 2 - 3 - buildGoPackage rec { 4 - name = "filebeat-${version}"; 5 - version = "5.2.2"; 6 - 7 - src = fetchFromGitHub { 8 - owner = "elastic"; 9 - repo = "beats"; 10 - rev = "v${version}"; 11 - sha256 = "19hkq19xpi3c9y5g1yq77sm2d5vzybn6mxxf0s5l6sw4l98aak5q"; 12 - }; 13 - 14 - goPackagePath = "github.com/elastic/beats"; 15 - 16 - subPackages = [ "filebeat" ]; 17 - 18 - meta = with stdenv.lib; { 19 - description = "Lightweight shipper for logfiles"; 20 - homepage = https://www.elastic.co/products/beats; 21 - license = licenses.asl20; 22 - maintainers = [ maintainers.fadenb ]; 23 - platforms = platforms.linux; 24 - }; 25 - }
-25
pkgs/misc/logging/heartbeat/default.nix
··· 1 - { stdenv, fetchFromGitHub, buildGoPackage }: 2 - 3 - buildGoPackage rec { 4 - name = "heartbeat-${version}"; 5 - version = "5.2.2"; 6 - 7 - src = fetchFromGitHub { 8 - owner = "elastic"; 9 - repo = "beats"; 10 - rev = "v${version}"; 11 - sha256 = "19hkq19xpi3c9y5g1yq77sm2d5vzybn6mxxf0s5l6sw4l98aak5q"; 12 - }; 13 - 14 - goPackagePath = "github.com/elastic/beats"; 15 - 16 - subPackages = [ "heartbeat" ]; 17 - 18 - meta = with stdenv.lib; { 19 - description = "Lightweight shipper for uptime monitoring"; 20 - homepage = https://www.elastic.co/products/beats; 21 - license = licenses.asl20; 22 - maintainers = [ maintainers.fadenb ]; 23 - platforms = platforms.linux; 24 - }; 25 - }
-25
pkgs/misc/logging/metricbeat/default.nix
··· 1 - { stdenv, fetchFromGitHub, buildGoPackage }: 2 - 3 - buildGoPackage rec { 4 - name = "metricbeat-${version}"; 5 - version = "5.2.2"; 6 - 7 - src = fetchFromGitHub { 8 - owner = "elastic"; 9 - repo = "beats"; 10 - rev = "v${version}"; 11 - sha256 = "19hkq19xpi3c9y5g1yq77sm2d5vzybn6mxxf0s5l6sw4l98aak5q"; 12 - }; 13 - 14 - goPackagePath = "github.com/elastic/beats"; 15 - 16 - subPackages = [ "metricbeat" ]; 17 - 18 - meta = with stdenv.lib; { 19 - description = "Lightweight shipper for metrics"; 20 - homepage = https://www.elastic.co/products/beats; 21 - license = licenses.asl20; 22 - maintainers = [ maintainers.fadenb ]; 23 - platforms = platforms.linux; 24 - }; 25 - }
-36
pkgs/misc/logging/packetbeat/default.nix
··· 1 - { stdenv, fetchFromGitHub, buildGoPackage, libpcap }: 2 - 3 - buildGoPackage rec { 4 - name = "packetbeat-${version}"; 5 - version = "5.2.2"; 6 - 7 - src = fetchFromGitHub { 8 - owner = "elastic"; 9 - repo = "beats"; 10 - rev = "v${version}"; 11 - sha256 = "19hkq19xpi3c9y5g1yq77sm2d5vzybn6mxxf0s5l6sw4l98aak5q"; 12 - }; 13 - 14 - goPackagePath = "github.com/elastic/beats"; 15 - 16 - subPackages = [ "packetbeat" ]; 17 - 18 - buildInputs = [ libpcap ]; 19 - 20 - meta = with stdenv.lib; { 21 - description = "Network packet analyzer that ships data to Elasticsearch"; 22 - longDescription = '' 23 - Packetbeat is an open source network packet analyzer that ships the 24 - data to Elasticsearch. 25 - 26 - Think of it like a distributed real-time Wireshark with a lot more 27 - analytics features. The Packetbeat shippers sniff the traffic between 28 - your application processes, parse on the fly protocols like HTTP, MySQL, 29 - PostgreSQL, Redis or Thrift and correlate the messages into transactions. 30 - ''; 31 - homepage = https://www.elastic.co/products/beats; 32 - license = licenses.asl20; 33 - maintainers = [ maintainers.fadenb ]; 34 - platforms = platforms.linux; 35 - }; 36 - }
+2 -2
pkgs/servers/search/elasticsearch/5.x.nix
··· 1 - { stdenv, fetchurl, makeWrapper, jre, utillinux, getopt }: 1 + { stdenv, fetchurl, elk5Version, makeWrapper, jre, utillinux, getopt }: 2 2 3 3 with stdenv.lib; 4 4 5 5 stdenv.mkDerivation rec { 6 - version = "5.4.0"; 6 + version = elk5Version; 7 7 name = "elasticsearch-${version}"; 8 8 9 9 src = fetchurl {
+39
pkgs/tools/misc/logstash/5.x.nix
··· 1 + { stdenv, fetchurl, elk5Version, makeWrapper, jre }: 2 + 3 + stdenv.mkDerivation rec { 4 + version = elk5Version; 5 + name = "logstash-${version}"; 6 + 7 + src = fetchurl { 8 + url = "https://artifacts.elastic.co/downloads/logstash/${name}.tar.gz"; 9 + sha256 = "019bhsnbbbg1a4g9jf02j3jb1xhhmrr3i7882s5l4pmkyn1d3gd1"; 10 + }; 11 + 12 + dontBuild = true; 13 + dontPatchELF = true; 14 + dontStrip = true; 15 + dontPatchShebangs = true; 16 + 17 + buildInputs = [ 18 + makeWrapper jre 19 + ]; 20 + 21 + installPhase = '' 22 + mkdir -p $out 23 + cp -r {Gemfile*,vendor,lib,bin,config,data,logstash-core,logstash-core-plugin-api} $out 24 + 25 + wrapProgram $out/bin/logstash \ 26 + --set JAVA_HOME "${jre}" 27 + 28 + wrapProgram $out/bin/logstash-plugin \ 29 + --set JAVA_HOME "${jre}" 30 + ''; 31 + 32 + meta = with stdenv.lib; { 33 + description = "Logstash is a data pipeline that helps you process logs and other event data from a variety of systems"; 34 + homepage = https://www.elastic.co/products/logstash; 35 + license = licenses.asl20; 36 + platforms = platforms.unix; 37 + maintainers = [ maintainers.wjlroe maintainers.offline ]; 38 + }; 39 + }
+13 -10
pkgs/top-level/all-packages.nix
··· 667 667 668 668 bchunk = callPackage ../tools/cd-dvd/bchunk { }; 669 669 670 + inherit (callPackages ../misc/logging/beats { }) 671 + filebeat 672 + heartbeat 673 + metricbeat 674 + packetbeat; 675 + 670 676 bfr = callPackage ../tools/misc/bfr { }; 671 677 672 678 bibtool = callPackage ../tools/misc/bibtool { }; ··· 968 974 969 975 fastJson = callPackage ../development/libraries/fastjson { }; 970 976 971 - filebeat = callPackage ../misc/logging/filebeat { }; 972 - 973 977 filebench = callPackage ../tools/misc/filebench { }; 974 978 975 979 fsmon = callPackage ../tools/misc/fsmon { }; ··· 1018 1022 1019 1023 gti = callPackage ../tools/misc/gti { }; 1020 1024 1021 - heartbeat = callPackage ../misc/logging/heartbeat { }; 1022 - 1023 1025 heatseeker = callPackage ../tools/misc/heatseeker { }; 1024 1026 1025 1027 hexio = callPackage ../development/tools/hexio { }; ··· 1049 1051 masscan = callPackage ../tools/security/masscan { }; 1050 1052 1051 1053 meson = callPackage ../development/tools/build-managers/meson { }; 1052 - 1053 - metricbeat = callPackage ../misc/logging/metricbeat { }; 1054 1054 1055 1055 mp3fs = callPackage ../tools/filesystems/mp3fs { }; 1056 1056 ··· 1732 1732 1733 1733 evemu = callPackage ../tools/system/evemu { }; 1734 1734 1735 + # The latest version used by elasticsearch, logstash, kibana and the the beats from elastic. 1736 + elk5Version = "5.4.0"; 1737 + 1735 1738 elasticsearch = callPackage ../servers/search/elasticsearch { }; 1736 1739 elasticsearch2 = callPackage ../servers/search/elasticsearch/2.x.nix { }; 1737 1740 elasticsearch5 = callPackage ../servers/search/elasticsearch/5.x.nix { }; ··· 2625 2628 2626 2629 keyfuzz = callPackage ../tools/inputmethods/keyfuzz { }; 2627 2630 2628 - kibana = callPackage ../development/tools/misc/kibana { }; 2631 + kibana = callPackage ../development/tools/misc/kibana { }; 2632 + kibana5 = callPackage ../development/tools/misc/kibana/5.x.nix { }; 2629 2633 2630 2634 kismet = callPackage ../applications/networking/sniffers/kismet { }; 2631 2635 ··· 2685 2689 2686 2690 lockfileProgs = callPackage ../tools/misc/lockfile-progs { }; 2687 2691 2688 - logstash = callPackage ../tools/misc/logstash { }; 2692 + logstash = callPackage ../tools/misc/logstash { }; 2693 + logstash5 = callPackage ../tools/misc/logstash/5.x.nix { }; 2689 2694 2690 2695 logstash-contrib = callPackage ../tools/misc/logstash/contrib.nix { }; 2691 2696 ··· 3499 3504 packagekit = callPackage ../tools/package-management/packagekit { 3500 3505 nix = nixUnstable; 3501 3506 }; 3502 - 3503 - packetbeat = callPackage ../misc/logging/packetbeat { }; 3504 3507 3505 3508 packetdrill = callPackage ../tools/networking/packetdrill { }; 3506 3509