2021-11-03 14:24:41 +03:00
|
|
|
# To run the test on the unfree ELK use the folllowing command:
|
|
|
|
# cd path/to/nixpkgs
|
|
|
|
# NIXPKGS_ALLOW_UNFREE=1 nix-build -A nixosTests.elk.unfree.ELK-6
|
|
|
|
|
2018-11-11 11:41:11 +03:00
|
|
|
{ system ? builtins.currentSystem,
|
|
|
|
config ? {},
|
|
|
|
pkgs ? import ../.. { inherit system config; },
|
|
|
|
}:
|
|
|
|
|
2017-06-13 23:36:08 +03:00
|
|
|
let
|
2021-11-03 14:24:41 +03:00
|
|
|
inherit (pkgs) lib;
|
|
|
|
|
2017-06-13 23:36:08 +03:00
|
|
|
esUrl = "http://localhost:9200";
|
|
|
|
|
2019-04-14 22:39:46 +03:00
|
|
|
mkElkTest = name : elk :
|
2020-01-15 12:09:44 +03:00
|
|
|
import ./make-test-python.nix ({
|
2017-12-18 22:53:54 +03:00
|
|
|
inherit name;
|
2021-01-10 22:08:30 +03:00
|
|
|
meta = with pkgs.lib.maintainers; {
|
2019-02-22 18:14:13 +03:00
|
|
|
maintainers = [ eelco offline basvandijk ];
|
2017-12-18 22:53:54 +03:00
|
|
|
};
|
|
|
|
nodes = {
|
|
|
|
one =
|
2019-05-10 16:41:41 +03:00
|
|
|
{ pkgs, lib, ... }: {
|
2017-12-18 22:53:54 +03:00
|
|
|
# Not giving the machine at least 2060MB results in elasticsearch failing with the following error:
|
|
|
|
#
|
|
|
|
# OpenJDK 64-Bit Server VM warning:
|
|
|
|
# INFO: os::commit_memory(0x0000000085330000, 2060255232, 0)
|
|
|
|
# failed; error='Cannot allocate memory' (errno=12)
|
|
|
|
#
|
|
|
|
# There is insufficient memory for the Java Runtime Environment to continue.
|
|
|
|
# Native memory allocation (mmap) failed to map 2060255232 bytes for committing reserved memory.
|
|
|
|
#
|
|
|
|
# When setting this to 2500 I got "Kernel panic - not syncing: Out of
|
|
|
|
# memory: compulsory panic_on_oom is enabled" so lets give it even a
|
|
|
|
# bit more room:
|
|
|
|
virtualisation.memorySize = 3000;
|
2017-06-13 23:36:08 +03:00
|
|
|
|
2017-12-18 22:53:54 +03:00
|
|
|
# For querying JSON objects returned from elasticsearch and kibana.
|
|
|
|
environment.systemPackages = [ pkgs.jq ];
|
2017-06-13 23:36:08 +03:00
|
|
|
|
2017-12-18 22:53:54 +03:00
|
|
|
services = {
|
2019-05-10 16:41:41 +03:00
|
|
|
|
2021-12-16 18:20:52 +03:00
|
|
|
journalbeat = {
|
|
|
|
enable = elk ? journalbeat;
|
2019-05-10 16:41:41 +03:00
|
|
|
package = elk.journalbeat;
|
2020-01-08 12:02:26 +03:00
|
|
|
extraConfig = pkgs.lib.mkOptionDefault (''
|
2019-05-10 16:41:41 +03:00
|
|
|
logging:
|
|
|
|
to_syslog: true
|
|
|
|
level: warning
|
|
|
|
metrics.enabled: false
|
|
|
|
output.elasticsearch:
|
|
|
|
hosts: [ "127.0.0.1:9200" ]
|
|
|
|
journalbeat.inputs:
|
|
|
|
- paths: []
|
|
|
|
seek: cursor
|
|
|
|
'');
|
|
|
|
};
|
|
|
|
|
2021-12-16 18:20:52 +03:00
|
|
|
filebeat = {
|
|
|
|
enable = elk ? filebeat;
|
|
|
|
package = elk.filebeat;
|
|
|
|
inputs.journald.id = "everything";
|
|
|
|
|
|
|
|
inputs.log = {
|
|
|
|
enabled = true;
|
|
|
|
paths = [
|
|
|
|
"/var/lib/filebeat/test"
|
|
|
|
];
|
|
|
|
};
|
|
|
|
|
|
|
|
settings = {
|
|
|
|
logging.level = "info";
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
2021-03-19 19:16:10 +03:00
|
|
|
metricbeat = {
|
|
|
|
enable = true;
|
|
|
|
package = elk.metricbeat;
|
|
|
|
modules.system = {
|
|
|
|
metricsets = ["cpu" "load" "memory" "network" "process" "process_summary" "uptime" "socket_summary"];
|
|
|
|
enabled = true;
|
|
|
|
period = "5s";
|
|
|
|
processes = [".*"];
|
|
|
|
cpu.metrics = ["percentages" "normalized_percentages"];
|
|
|
|
core.metrics = ["percentages"];
|
|
|
|
};
|
|
|
|
settings = {
|
|
|
|
output.elasticsearch = {
|
|
|
|
hosts = ["127.0.0.1:9200"];
|
|
|
|
};
|
|
|
|
};
|
|
|
|
};
|
|
|
|
|
2017-12-18 22:53:54 +03:00
|
|
|
logstash = {
|
|
|
|
enable = true;
|
|
|
|
package = elk.logstash;
|
|
|
|
inputConfig = ''
|
|
|
|
exec { command => "echo -n flowers" interval => 1 type => "test" }
|
|
|
|
exec { command => "echo -n dragons" interval => 1 type => "test" }
|
|
|
|
'';
|
|
|
|
filterConfig = ''
|
|
|
|
if [message] =~ /dragons/ {
|
|
|
|
drop {}
|
|
|
|
}
|
|
|
|
'';
|
|
|
|
outputConfig = ''
|
|
|
|
file {
|
|
|
|
path => "/tmp/logstash.out"
|
|
|
|
codec => line { format => "%{message}" }
|
|
|
|
}
|
|
|
|
elasticsearch {
|
|
|
|
hosts => [ "${esUrl}" ]
|
|
|
|
}
|
|
|
|
'';
|
|
|
|
};
|
2017-06-13 23:36:08 +03:00
|
|
|
|
2017-12-18 22:53:54 +03:00
|
|
|
elasticsearch = {
|
|
|
|
enable = true;
|
|
|
|
package = elk.elasticsearch;
|
|
|
|
};
|
2017-06-13 23:36:08 +03:00
|
|
|
|
2017-12-18 22:53:54 +03:00
|
|
|
kibana = {
|
|
|
|
enable = true;
|
|
|
|
package = elk.kibana;
|
|
|
|
};
|
2018-08-21 11:39:25 +03:00
|
|
|
|
|
|
|
elasticsearch-curator = {
|
2020-01-15 12:09:44 +03:00
|
|
|
enable = true;
|
2018-08-21 11:39:25 +03:00
|
|
|
actionYAML = ''
|
|
|
|
---
|
|
|
|
actions:
|
|
|
|
1:
|
|
|
|
action: delete_indices
|
|
|
|
description: >-
|
2018-08-25 17:46:39 +03:00
|
|
|
Delete indices older than 1 second (based on index name), for logstash-
|
2018-08-21 11:39:25 +03:00
|
|
|
prefixed indices. Ignore the error if the filter does not result in an
|
|
|
|
actionable list of indices (ignore_empty_list) and exit cleanly.
|
|
|
|
options:
|
2020-04-29 16:23:50 +03:00
|
|
|
allow_ilm_indices: true
|
2018-08-21 11:39:25 +03:00
|
|
|
ignore_empty_list: True
|
|
|
|
disable_action: False
|
|
|
|
filters:
|
|
|
|
- filtertype: pattern
|
|
|
|
kind: prefix
|
|
|
|
value: logstash-
|
|
|
|
- filtertype: age
|
|
|
|
source: name
|
|
|
|
direction: older
|
|
|
|
timestring: '%Y.%m.%d'
|
2018-08-25 17:46:39 +03:00
|
|
|
unit: seconds
|
2018-08-21 11:39:25 +03:00
|
|
|
unit_count: 1
|
|
|
|
'';
|
|
|
|
};
|
2017-06-13 23:36:08 +03:00
|
|
|
};
|
|
|
|
};
|
2017-12-18 22:53:54 +03:00
|
|
|
};
|
2017-06-13 23:36:08 +03:00
|
|
|
|
2021-03-22 16:53:05 +03:00
|
|
|
passthru.elkPackages = elk;
|
2021-12-16 18:20:52 +03:00
|
|
|
testScript =
|
|
|
|
let
|
|
|
|
valueObject = lib.optionalString (lib.versionAtLeast elk.elasticsearch.version "7") ".value";
|
|
|
|
in ''
|
2020-01-08 12:02:26 +03:00
|
|
|
import json
|
|
|
|
|
|
|
|
|
2021-12-16 18:20:52 +03:00
|
|
|
def expect_hits(message):
|
|
|
|
dictionary = {"query": {"match": {"message": message}}}
|
|
|
|
return (
|
|
|
|
"curl --silent --show-error --fail-with-body '${esUrl}/_search' "
|
|
|
|
+ "-H 'Content-Type: application/json' "
|
|
|
|
+ "-d '{}' ".format(json.dumps(dictionary))
|
|
|
|
+ " | tee /dev/console"
|
|
|
|
+ " | jq -es 'if . == [] then null else .[] | .hits.total${valueObject} > 0 end'"
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def expect_no_hits(message):
|
2020-01-08 12:02:26 +03:00
|
|
|
dictionary = {"query": {"match": {"message": message}}}
|
|
|
|
return (
|
2021-12-16 18:20:52 +03:00
|
|
|
"curl --silent --show-error --fail-with-body '${esUrl}/_search' "
|
2020-01-08 12:02:26 +03:00
|
|
|
+ "-H 'Content-Type: application/json' "
|
|
|
|
+ "-d '{}' ".format(json.dumps(dictionary))
|
2021-12-16 18:20:52 +03:00
|
|
|
+ " | tee /dev/console"
|
|
|
|
+ " | jq -es 'if . == [] then null else .[] | .hits.total${valueObject} == 0 end'"
|
2020-01-08 12:02:26 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-03-19 19:16:10 +03:00
|
|
|
def has_metricbeat():
|
|
|
|
dictionary = {"query": {"match": {"event.dataset": {"query": "system.cpu"}}}}
|
|
|
|
return (
|
2021-12-16 18:20:52 +03:00
|
|
|
"curl --silent --show-error --fail-with-body '${esUrl}/_search' "
|
2021-03-19 19:16:10 +03:00
|
|
|
+ "-H 'Content-Type: application/json' "
|
|
|
|
+ "-d '{}' ".format(json.dumps(dictionary))
|
2021-12-16 18:20:52 +03:00
|
|
|
+ " | tee /dev/console"
|
|
|
|
+ " | jq -es 'if . == [] then null else .[] | .hits.total${valueObject} > 0 end'"
|
2021-03-19 19:16:10 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-01-08 12:02:26 +03:00
|
|
|
start_all()
|
2017-06-13 23:36:08 +03:00
|
|
|
|
2020-01-08 12:02:26 +03:00
|
|
|
one.wait_for_unit("elasticsearch.service")
|
|
|
|
one.wait_for_open_port(9200)
|
2017-06-13 23:36:08 +03:00
|
|
|
|
2017-12-18 22:53:54 +03:00
|
|
|
# Continue as long as the status is not "red". The status is probably
|
|
|
|
# "yellow" instead of "green" because we are using a single elasticsearch
|
|
|
|
# node which elasticsearch considers risky.
|
|
|
|
#
|
2019-05-10 16:41:41 +03:00
|
|
|
# TODO: extend this test with multiple elasticsearch nodes
|
|
|
|
# and see if the status turns "green".
|
2020-01-08 12:02:26 +03:00
|
|
|
one.wait_until_succeeds(
|
2021-12-16 18:20:52 +03:00
|
|
|
"curl --silent --show-error --fail-with-body '${esUrl}/_cluster/health'"
|
|
|
|
+ " | jq -es 'if . == [] then null else .[] | .status != \"red\" end'"
|
2020-01-08 12:02:26 +03:00
|
|
|
)
|
|
|
|
|
|
|
|
with subtest("Perform some simple logstash tests"):
|
|
|
|
one.wait_for_unit("logstash.service")
|
|
|
|
one.wait_until_succeeds("cat /tmp/logstash.out | grep flowers")
|
|
|
|
one.wait_until_succeeds("cat /tmp/logstash.out | grep -v dragons")
|
|
|
|
|
|
|
|
with subtest("Kibana is healthy"):
|
|
|
|
one.wait_for_unit("kibana.service")
|
|
|
|
one.wait_until_succeeds(
|
2021-12-16 18:20:52 +03:00
|
|
|
"curl --silent --show-error --fail-with-body 'http://localhost:5601/api/status'"
|
|
|
|
+ " | jq -es 'if . == [] then null else .[] | .status.overall.state == \"green\" end'"
|
2020-01-08 12:02:26 +03:00
|
|
|
)
|
|
|
|
|
2021-03-19 19:16:10 +03:00
|
|
|
with subtest("Metricbeat is running"):
|
|
|
|
one.wait_for_unit("metricbeat.service")
|
|
|
|
|
|
|
|
with subtest("Metricbeat metrics arrive in elasticsearch"):
|
2021-12-16 18:20:52 +03:00
|
|
|
one.wait_until_succeeds(has_metricbeat())
|
2021-03-19 19:16:10 +03:00
|
|
|
|
2020-01-08 12:02:26 +03:00
|
|
|
with subtest("Logstash messages arive in elasticsearch"):
|
2021-12-16 18:20:52 +03:00
|
|
|
one.wait_until_succeeds(expect_hits("flowers"))
|
|
|
|
one.wait_until_succeeds(expect_no_hits("dragons"))
|
2020-01-08 12:02:26 +03:00
|
|
|
|
2021-12-16 18:20:52 +03:00
|
|
|
'' + lib.optionalString (elk ? journalbeat) ''
|
2020-01-08 12:02:26 +03:00
|
|
|
with subtest(
|
|
|
|
"A message logged to the journal is ingested by elasticsearch via journalbeat"
|
|
|
|
):
|
|
|
|
one.wait_for_unit("journalbeat.service")
|
|
|
|
one.execute("echo 'Supercalifragilisticexpialidocious' | systemd-cat")
|
|
|
|
one.wait_until_succeeds(
|
2021-12-16 18:20:52 +03:00
|
|
|
expect_hits("Supercalifragilisticexpialidocious")
|
2020-01-08 12:02:26 +03:00
|
|
|
)
|
2021-12-16 18:20:52 +03:00
|
|
|
'' + lib.optionalString (elk ? filebeat) ''
|
|
|
|
with subtest(
|
|
|
|
"A message logged to the journal is ingested by elasticsearch via filebeat"
|
|
|
|
):
|
|
|
|
one.wait_for_unit("filebeat.service")
|
|
|
|
one.execute("echo 'Superdupercalifragilisticexpialidocious' | systemd-cat")
|
|
|
|
one.wait_until_succeeds(
|
|
|
|
expect_hits("Superdupercalifragilisticexpialidocious")
|
|
|
|
)
|
|
|
|
one.execute(
|
|
|
|
"echo 'SuperdupercalifragilisticexpialidociousIndeed' >> /var/lib/filebeat/test"
|
|
|
|
)
|
|
|
|
one.wait_until_succeeds(
|
|
|
|
expect_hits("SuperdupercalifragilisticexpialidociousIndeed")
|
|
|
|
)
|
|
|
|
'' + ''
|
2020-01-08 12:02:26 +03:00
|
|
|
with subtest("Elasticsearch-curator works"):
|
|
|
|
one.systemctl("stop logstash")
|
|
|
|
one.systemctl("start elasticsearch-curator")
|
|
|
|
one.wait_until_succeeds(
|
2021-12-16 18:20:52 +03:00
|
|
|
'! curl --silent --show-error --fail-with-body "${esUrl}/_cat/indices" | grep logstash | grep ^'
|
2020-01-08 12:02:26 +03:00
|
|
|
)
|
2017-12-18 22:53:54 +03:00
|
|
|
'';
|
2021-02-26 14:03:26 +03:00
|
|
|
}) { inherit pkgs system; };
|
2021-11-03 14:24:41 +03:00
|
|
|
in {
|
2021-11-03 22:35:11 +03:00
|
|
|
ELK-6 = mkElkTest "elk-6-oss" {
|
2021-11-03 14:24:41 +03:00
|
|
|
name = "elk-6-oss";
|
|
|
|
elasticsearch = pkgs.elasticsearch6-oss;
|
|
|
|
logstash = pkgs.logstash6-oss;
|
|
|
|
kibana = pkgs.kibana6-oss;
|
|
|
|
journalbeat = pkgs.journalbeat6;
|
|
|
|
metricbeat = pkgs.metricbeat6;
|
|
|
|
};
|
|
|
|
# We currently only package upstream binaries.
|
|
|
|
# Feel free to package an SSPL licensed source-based package!
|
2021-11-03 22:35:11 +03:00
|
|
|
# ELK-7 = mkElkTest "elk-7-oss" {
|
2021-11-03 14:24:41 +03:00
|
|
|
# name = "elk-7";
|
|
|
|
# elasticsearch = pkgs.elasticsearch7-oss;
|
|
|
|
# logstash = pkgs.logstash7-oss;
|
|
|
|
# kibana = pkgs.kibana7-oss;
|
2021-12-16 18:20:52 +03:00
|
|
|
# filebeat = pkgs.filebeat7;
|
2021-11-03 14:24:41 +03:00
|
|
|
# metricbeat = pkgs.metricbeat7;
|
|
|
|
# };
|
|
|
|
unfree = lib.dontRecurseIntoAttrs {
|
|
|
|
ELK-6 = mkElkTest "elk-6" {
|
2018-06-24 14:22:12 +03:00
|
|
|
elasticsearch = pkgs.elasticsearch6;
|
|
|
|
logstash = pkgs.logstash6;
|
|
|
|
kibana = pkgs.kibana6;
|
2019-05-10 16:41:41 +03:00
|
|
|
journalbeat = pkgs.journalbeat6;
|
2021-03-19 19:16:10 +03:00
|
|
|
metricbeat = pkgs.metricbeat6;
|
2018-06-24 14:22:12 +03:00
|
|
|
};
|
2021-11-03 14:24:41 +03:00
|
|
|
ELK-7 = mkElkTest "elk-7" {
|
2021-11-03 13:41:53 +03:00
|
|
|
elasticsearch = pkgs.elasticsearch7;
|
|
|
|
logstash = pkgs.logstash7;
|
|
|
|
kibana = pkgs.kibana7;
|
2021-12-16 18:20:52 +03:00
|
|
|
filebeat = pkgs.filebeat7;
|
2021-11-03 13:41:53 +03:00
|
|
|
metricbeat = pkgs.metricbeat7;
|
|
|
|
};
|
2021-11-03 14:24:41 +03:00
|
|
|
};
|
2021-11-03 13:41:53 +03:00
|
|
|
}
|