Skip to content

Commit

Permalink
Fix test
Browse files Browse the repository at this point in the history
  • Loading branch information
exekias committed Nov 6, 2017
1 parent e535809 commit cd2439b
Show file tree
Hide file tree
Showing 4 changed files with 67 additions and 87 deletions.
9 changes: 9 additions & 0 deletions filebeat/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ services:
env_file:
- ${PWD}/build/test.env
- ${PWD}/prospector/redis/_meta/env
environment:
- KIBANA_HOST=kibana
- KIBANA_PORT=5601
working_dir: /go/src/github.com/elastic/beats/filebeat
volumes:
- ${PWD}/..:/go/src/github.com/elastic/beats/
Expand All @@ -18,12 +21,18 @@ services:
image: busybox
depends_on:
elasticsearch: { condition: service_healthy }
kibana: { condition: service_healthy }
redis: { condition: service_healthy }

elasticsearch:
extends:
file: ../testing/environments/${TESTING_ENVIRONMENT}.yml
service: elasticsearch

kibana:
extends:
file: ../testing/environments/${TESTING_ENVIRONMENT}.yml
service: kibana

redis:
build: ${PWD}/prospector/redis/_meta
6 changes: 6 additions & 0 deletions filebeat/tests/system/config/filebeat_modules.yml.j2
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,9 @@ output.elasticsearch.index: {{ index_name }}

setup.template.name: {{ index_name }}
setup.template.pattern: {{ index_name }}*

setup.kibana.host: {{ kibana_url }}

{% if kibana_path %}
setup.dashboards.directory: {{ kibana_path }}
{% endif %}
130 changes: 43 additions & 87 deletions filebeat/tests/system/test_modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import os
import unittest
import glob
import shutil
import subprocess
from elasticsearch import Elasticsearch
import json
Expand All @@ -13,6 +14,7 @@ class Test(BaseTest):

def init(self):
self.elasticsearch_url = self.get_elasticsearch_url()
self.kibana_url = self.get_kibana_url()
print("Using elasticsearch: {}".format(self.elasticsearch_url))
self.es = Elasticsearch([self.elasticsearch_url])
logging.getLogger("urllib3").setLevel(logging.WARNING)
Expand All @@ -21,6 +23,9 @@ def init(self):
self.modules_path = os.path.abspath(self.working_dir +
"/../../../../module")

self.kibana_path = os.path.abspath(self.working_dir +
"/../../../../_meta/kibana")

self.filebeat = os.path.abspath(self.working_dir +
"/../../../../filebeat.test")

Expand Down Expand Up @@ -206,106 +211,55 @@ def search_objects():
@unittest.skipIf(not INTEGRATION_TESTS or
os.getenv("TESTING_ENVIRONMENT") == "2x",
"integration test not available on 2.x")
def test_setup_machine_learning_nginx(self):
"""
Tests that setup works and loads machine learning jobs using --modules flag.
"""
def test_ml_setup(self):
""" Test ML are installed in all possible ways """
for setup_flag in (True, False):
for modules_flag in (True, False):
self._run_ml_test(setup_flag, modules_flag)

def _run_ml_test(self, setup_flag, modules_flag):
self.init()
# generate a minimal configuration
cfgfile = os.path.join(self.working_dir, "filebeat.yml")
self.render_config_template(
template_name="filebeat_modules",
output=cfgfile,
index_name=self.index_name,
elasticsearch_url=self.elasticsearch_url)

cmd = [
self.filebeat, "-systemTest",
"-e", "-d", "*",
"-c", cfgfile,
"setup", "--modules=nginx", "--machine-learning"]
# Clean any previous state
for df in self.es.transport.perform_request("GET", "/_xpack/ml/datafeeds/")["datafeeds"]:
if df["datafeed_id"] == 'filebeat-nginx-access-response_code':
self.es.transport.perform_request("DELETE", "/_xpack/ml/datafeeds/" + df["datafeed_id"])

output = open(os.path.join(self.working_dir, "output.log"), "ab")
output.write(" ".join(cmd) + "\n")
subprocess.Popen(cmd,
stdin=None,
stdout=output,
stderr=subprocess.STDOUT,
bufsize=0).wait()

jobs = self.es.transport.perform_request("GET", "/_xpack/ml/anomaly_detectors/")
assert "filebeat-nginx-access-response_code" in (job["job_id"] for job in jobs["jobs"])
for df in self.es.transport.perform_request("GET", "/_xpack/ml/anomaly_detectors/")["jobs"]:
if df["job_id"] == 'datafeed-filebeat-nginx-access-response_code':
self.es.transport.perform_request("DELETE", "/_xpack/ml/anomaly_detectors/" + df["job_id"])

datafeeds = self.es.transport.perform_request("GET", "/_xpack/ml/datafeeds/")
assert "filebeat-nginx-access-response_code" in (df["job_id"] for df in datafeeds["datafeeds"])
shutil.rmtree(os.path.join(self.working_dir, "modules.d"), ignore_errors=True)

@unittest.skipIf(not INTEGRATION_TESTS or
os.getenv("TESTING_ENVIRONMENT") == "2x",
"integration test not available on 2.x")
def test_setup_machine_learning_nginx_enable(self):
"""
Tests that setup works and loads machine learning jobs for enabled modules.
"""
self.init()
# generate a minimal configuration
cfgfile = os.path.join(self.working_dir, "filebeat.yml")
self.render_config_template(
template_name="filebeat_modules",
output=cfgfile,
index_name=self.index_name,
elasticsearch_url=self.elasticsearch_url)
elasticsearch_url=self.elasticsearch_url,
kibana_url=self.kibana_url,
kibana_path=self.kibana_path)

# Enable nginx
os.mkdir(os.path.join(self.working_dir, "modules.d"))
with open(os.path.join(self.working_dir, "modules.d/nginx.yml"), "wb") as nginx:
nginx.write("- module: nginx")
if not modules_flag:
# Enable nginx
os.mkdir(os.path.join(self.working_dir, "modules.d"))
with open(os.path.join(self.working_dir, "modules.d/nginx.yml"), "wb") as nginx:
nginx.write("- module: nginx")

cmd = [
self.filebeat, "-systemTest",
"-e", "-d", "*",
"-c", cfgfile,
"setup", "--machine-learning"]

output = open(os.path.join(self.working_dir, "output.log"), "ab")
output.write(" ".join(cmd) + "\n")
subprocess.Popen(cmd,
stdin=None,
stdout=output,
stderr=output,
bufsize=0).wait()

jobs = self.es.transport.perform_request("GET", "/_xpack/ml/anomaly_detectors/")
assert "filebeat-nginx-access-response_code" in (job["job_id"] for job in jobs["jobs"])

datafeeds = self.es.transport.perform_request("GET", "/_xpack/ml/datafeeds/")
assert "filebeat-nginx-access-response_code" in (df["job_id"] for df in datafeeds["datafeeds"])

@unittest.skipIf(not INTEGRATION_TESTS or
os.getenv("TESTING_ENVIRONMENT") == "2x",
"integration test not available on 2.x")
def test_setup_flag_machine_learning_nginx_enable(self):
"""
Tests that setup works and loads machine learning jobs for enabled modules using --setup flag.
"""
self.init()
# generate a minimal configuration
cfgfile = os.path.join(self.working_dir, "filebeat.yml")
self.render_config_template(
template_name="filebeat_modules",
output=cfgfile,
index_name=self.index_name,
elasticsearch_url=self.elasticsearch_url)
"-c", cfgfile
]

# Enable nginx
os.mkdir(os.path.join(self.working_dir, "modules.d"))
with open(os.path.join(self.working_dir, "modules.d/nginx.yml"), "wb") as nginx:
nginx.write("- module: nginx")
if setup_flag:
cmd += ["--setup"]
else:
cmd += ["setup", "--machine-learning"]

cmd = [
self.filebeat, "-systemTest",
"-e", "-d", "*",
"-c", cfgfile,
"--setup"]
if modules_flag:
cmd += ["--modules=nginx"]

output = open(os.path.join(self.working_dir, "output.log"), "ab")
output.write(" ".join(cmd) + "\n")
Expand All @@ -315,10 +269,12 @@ def test_setup_flag_machine_learning_nginx_enable(self):
stderr=output,
bufsize=0)

jobs = self.es.transport.perform_request("GET", "/_xpack/ml/anomaly_detectors/")
assert "filebeat-nginx-access-response_code" in (job["job_id"] for job in jobs["jobs"])

datafeeds = self.es.transport.perform_request("GET", "/_xpack/ml/datafeeds/")
assert "filebeat-nginx-access-response_code" in (df["job_id"] for df in datafeeds["datafeeds"])
# Check result
self.wait_until(lambda: "filebeat-nginx-access-response_code" in
(df["job_id"] for df in self.es.transport.perform_request(
"GET", "/_xpack/ml/anomaly_detectors/")["jobs"]),
max_timeout=30)
self.wait_until(lambda: "datafeed-filebeat-nginx-access-response_code" in
(df["datafeed_id"] for df in self.es.transport.perform_request("GET", "/_xpack/ml/datafeeds/")["datafeeds"]))

beat.kill()
9 changes: 9 additions & 0 deletions libbeat/tests/system/beat/beat.py
Original file line number Diff line number Diff line change
Expand Up @@ -542,3 +542,12 @@ def get_elasticsearch_url(self):
host=os.getenv("ES_HOST", "localhost"),
port=os.getenv("ES_PORT", "9200"),
)

def get_kibana_url(self):
"""
Returns kibana host URL
"""
return "http://{host}:{port}".format(
host=os.getenv("KIBANA_HOST", "localhost"),
port=os.getenv("KIBANA_PORT", "5601"),
)

0 comments on commit cd2439b

Please sign in to comment.