diff --git a/.gitignore b/.gitignore index ced4dece32f..1dc5063f91f 100644 --- a/.gitignore +++ b/.gitignore @@ -26,3 +26,4 @@ spec/reports rspec.xml .install-done .vendor +integration_run diff --git a/Gemfile b/Gemfile index ae9d703285a..70d17c8350e 100644 --- a/Gemfile +++ b/Gemfile @@ -2,11 +2,17 @@ # If you modify this file manually all comments and formatting will be lost. source "https://rubygems.org" -gem "logstash-core", "3.0.0.dev", :path => "." +gem "logstash-core", "3.0.0.dev", :path => "./logstash-core" +gem "logstash-core-event", "3.0.0.dev", :path => "./logstash-core-event" +# gem "logstash-core-event-java", "3.0.0.dev", :path => "./logstash-core-event-java" gem "file-dependencies", "0.1.6" gem "ci_reporter_rspec", "1.0.0", :group => :development gem "simplecov", :group => :development gem "coveralls", :group => :development +# Tins 1.7 requires the ruby 2.0 platform to install, +# this gem is a dependency of term-ansi-color which is a dependency of coveralls. +# 1.6 is the last supported version on jruby. +gem "tins", "1.6", :group => :development gem "rspec", "~> 3.1.0", :group => :development gem "logstash-devutils", "~> 0.0.15", :group => :development gem "benchmark-ips", :group => :development diff --git a/Gemfile.jruby-1.9.lock b/Gemfile.jruby-1.9.lock index a2accef2d25..42682d7cc39 100644 --- a/Gemfile.jruby-1.9.lock +++ b/Gemfile.jruby-1.9.lock @@ -1,32 +1,38 @@ PATH - remote: . + remote: ./logstash-core specs: logstash-core (3.0.0.dev-java) cabin (~> 0.7.0) clamp (~> 0.6.5) - concurrent-ruby (~> 0.9.1) + concurrent-ruby (= 0.9.2) filesize (= 0.0.4) gems (~> 0.8.3) i18n (= 0.6.9) - jrjackson (~> 0.3.5) + jrjackson (~> 0.3.7) jruby-openssl (>= 0.9.11) + logstash-core-event (~> 3.0.0.dev) minitar (~> 0.5.4) pry (~> 0.10.1) stud (~> 0.0.19) thread_safe (~> 0.3.5) treetop (< 1.5.0) +PATH + remote: ./logstash-core-event + specs: + logstash-core-event (3.0.0.dev-java) + GEM remote: https://rubygems.org/ specs: addressable (2.3.8) arr-pm (0.0.10) cabin (> 0) - backports (3.6.6) + backports (3.6.7) benchmark-ips (2.3.0) builder (3.2.2) - cabin (0.7.1) - childprocess (0.5.6) + cabin (0.7.2) + childprocess (0.5.8) ffi (~> 1.0, >= 1.0.11) ci_reporter (2.0.0) builder (>= 2.1.2) @@ -35,13 +41,14 @@ GEM rspec (>= 2.14, < 4) clamp (0.6.5) coderay (1.1.0) - concurrent-ruby (0.9.1-java) - coveralls (0.8.3) + concurrent-ruby (0.9.2-java) + coveralls (0.8.9) json (~> 1.8) rest-client (>= 1.6.8, < 2) simplecov (~> 0.10.0) term-ansicolor (~> 1.3) thor (~> 0.19.1) + tins (~> 1.6.0) diff-lcs (1.2.5) docile (1.1.5) domain_name (0.5.25) @@ -67,7 +74,7 @@ GEM domain_name (~> 0.5) i18n (0.6.9) insist (1.0.0) - jrjackson (0.3.5) + jrjackson (0.3.7) jruby-openssl (0.9.12-java) json (1.8.3-java) kramdown (1.9.0) @@ -84,11 +91,11 @@ GEM mime-types (2.6.2) minitar (0.5.4) multipart-post (2.0.0) - netrc (0.10.3) + netrc (0.11.0) octokit (3.8.0) sawyer (~> 0.6.0, >= 0.5.3) polyglot (0.3.5) - pry (0.10.2-java) + pry (0.10.3-java) coderay (~> 1.1.0) method_source (~> 0.8.1) slop (~> 3.4) @@ -110,8 +117,8 @@ GEM rspec-mocks (3.1.3) rspec-support (~> 3.1.0) rspec-support (3.1.2) - rspec-wait (0.0.7) - rspec (>= 2.11, < 3.4) + rspec-wait (0.0.8) + rspec (>= 2.11, < 3.5) rubyzip (1.1.7) sawyer (0.6.0) addressable (~> 2.3.5) @@ -147,9 +154,11 @@ DEPENDENCIES fpm (~> 1.3.3) gems (~> 0.8.3) logstash-core (= 3.0.0.dev)! + logstash-core-event (= 3.0.0.dev)! logstash-devutils (~> 0.0.15) octokit (= 3.8.0) rspec (~> 3.1.0) rubyzip (~> 1.1.7) simplecov stud (~> 0.0.21) + tins (= 1.6) diff --git a/README.md b/README.md index 276a24418aa..7c908bfa0bf 100644 --- a/README.md +++ b/README.md @@ -33,9 +33,6 @@ For more info on developing and testing these plugins, please see the [README](h ### Plugin Issues and Pull Requests -We are migrating all of the existing pull requests to their respective repositories. Rest assured, we will maintain -all of the git history for these requests. - **Please open new issues and pull requests for plugins under its own repository** For example, if you have to report an issue/enhancement for the Elasticsearch output, please do so [here](https://github.com/logstash-plugins/logstash-output-elasticsearch/issues). diff --git a/Rakefile b/Rakefile index cf702f08752..12b4a262110 100644 --- a/Rakefile +++ b/Rakefile @@ -1,6 +1,7 @@ # encoding: utf-8 $: << File.join(File.dirname(__FILE__), "lib") +$: << File.join(File.dirname(__FILE__), "logstash-core/lib") task "default" => "help" diff --git a/bin/logstash.lib.sh b/bin/logstash.lib.sh index a5d6ccee3ec..f006387ed2c 100755 --- a/bin/logstash.lib.sh +++ b/bin/logstash.lib.sh @@ -35,6 +35,10 @@ setup_java() { JAVA_OPTS="$JAVA_OPTS -XX:CMSInitiatingOccupancyFraction=75" JAVA_OPTS="$JAVA_OPTS -XX:+UseCMSInitiatingOccupancyOnly" + # Causes the JVM to dump its heap on OutOfMemory. + JAVA_OPTS="$JAVA_OPTS -XX:+HeapDumpOnOutOfMemoryError" + # The path to the heap dump location + JAVA_OPTS="$JAVA_OPTS -XX:HeapDumpPath=${LOGSTASH_HOME}/heapdump.hprof" fi if [ "$LS_JAVA_OPTS" ] ; then diff --git a/bin/setup.bat b/bin/setup.bat index 557df31e1a9..4ad640ac7fa 100644 --- a/bin/setup.bat +++ b/bin/setup.bat @@ -48,7 +48,7 @@ REM Causes the JVM to dump its heap on OutOfMemory. set JAVA_OPTS=%JAVA_OPTS% -XX:+HeapDumpOnOutOfMemoryError REM The path to the heap dump location, note directory must exists and have enough REM space for a full heap dump. -REM JAVA_OPTS=%JAVA_OPTS% -XX:HeapDumpPath="$LS_HOME/logs/heapdump.hprof" +set JAVA_OPTS=%JAVA_OPTS% -XX:HeapDumpPath="$LS_HOME/heapdump.hprof" REM setup_vendored_jruby() set JRUBY_BIN="%LS_HOME%\vendor\jruby\bin\jruby" diff --git a/ci/ci_integration.sh b/ci/ci_integration.sh new file mode 100755 index 00000000000..139408fefc9 --- /dev/null +++ b/ci/ci_integration.sh @@ -0,0 +1,3 @@ +#!/bin/sh +rake test:install-default +rake test:integration diff --git a/docs/asciidoc/static/advanced-pipeline.asciidoc b/docs/asciidoc/static/advanced-pipeline.asciidoc index 867f0a80263..813fc068e25 100644 --- a/docs/asciidoc/static/advanced-pipeline.asciidoc +++ b/docs/asciidoc/static/advanced-pipeline.asciidoc @@ -16,6 +16,7 @@ image::static/images/basic_logstash_pipeline.png[] The following text represents the skeleton of a configuration pipeline: [source,shell] +-------------------------------------------------------------------------------- # The # character at the beginning of a line indicates a comment. Use # comments to describe your configuration. input { @@ -27,6 +28,7 @@ input { # } output { } +-------------------------------------------------------------------------------- This skeleton is non-functional, because the input and output sections don’t have any valid options defined. The examples in this tutorial build configuration files to address specific use cases. @@ -44,7 +46,7 @@ https://download.elastic.co/demos/logstash/gettingstarted/logstash-tutorial.log. [float] [[configuring-file-input]] -===== Configuring Logstash for File Input +==== Configuring Logstash for File Input To start your Logstash pipeline, configure the Logstash instance to read from a file using the {logstash}plugins-inputs-file.html[file] input plugin. @@ -52,12 +54,14 @@ To start your Logstash pipeline, configure the Logstash instance to read from a Edit the `first-pipeline.conf` file to add the following text: [source,json] +-------------------------------------------------------------------------------- input { file { path => "/path/to/logstash-tutorial.log" start_position => beginning <1> } } +-------------------------------------------------------------------------------- <1> The default behavior of the file input plugin is to monitor a file for new information, in a manner similar to the UNIX `tail -f` command. To change this default behavior and process the entire file, we need to specify the position @@ -78,9 +82,11 @@ decisions about how to identify the patterns that are of interest to your use ca server log sample looks like this: [source,shell] +-------------------------------------------------------------------------------- 83.149.9.216 - - [04/Jan/2015:05:13:42 +0000] "GET /presentations/logstash-monitorama-2013/images/kibana-search.png HTTP/1.1" 200 203023 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +-------------------------------------------------------------------------------- The IP address at the beginning of the line is easy to identify, as is the timestamp in brackets. In this tutorial, use the `%{COMBINEDAPACHELOG}` grok pattern, which structures lines from the Apache log using the following schema: @@ -102,15 +108,18 @@ User agent:: `agent` Edit the `first-pipeline.conf` file to add the following text: [source,json] +-------------------------------------------------------------------------------- filter { grok { match => { "message" => "%{COMBINEDAPACHELOG}"} } } +-------------------------------------------------------------------------------- After processing, the sample line has the following JSON representation: [source,json] +-------------------------------------------------------------------------------- { "clientip" : "83.149.9.216", "ident" : , @@ -124,6 +133,7 @@ After processing, the sample line has the following JSON representation: "referrer" : "http://semicomplete.com/presentations/logstash-monitorama-2013/", "agent" : "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" } +-------------------------------------------------------------------------------- [float] [[indexing-parsed-data-into-elasticsearch]] @@ -133,10 +143,12 @@ Now that the web logs are broken down into specific fields, the Logstash pipelin Elasticsearch cluster. Edit the `first-pipeline.conf` file to add the following text after the `input` section: [source,json] +-------------------------------------------------------------------------------- output { elasticsearch { } } +-------------------------------------------------------------------------------- With this configuration, Logstash uses http protocol to connect to Elasticsearch. The above example assumes Logstash and Elasticsearch to be running on the same instance. You can specify a remote Elasticsearch instance using `hosts` @@ -154,9 +166,11 @@ Configure your Logstash instance to use the `geoip` filter plugin by adding the of the `first-pipeline.conf` file: [source,json] +-------------------------------------------------------------------------------- geoip { source => "clientip" } +-------------------------------------------------------------------------------- The `geoip` plugin configuration requires data that is already defined as separate fields. Make sure that the `geoip` section is after the `grok` section of the configuration file. @@ -171,6 +185,7 @@ At this point, your `first-pipeline.conf` file has input, filter, and output sec like this: [source,json] +-------------------------------------------------------------------------------- input { file { path => "/Users/palecur/logstash-1.5.2/logstash-tutorial-dataset" @@ -189,28 +204,36 @@ output { elasticsearch {} stdout {} } +-------------------------------------------------------------------------------- To verify your configuration, run the following command: [source,shell] +-------------------------------------------------------------------------------- bin/logstash -f first-pipeline.conf --configtest +-------------------------------------------------------------------------------- The `--configtest` option parses your configuration file and reports any errors. When the configuration file passes the configuration test, start Logstash with the following command: [source,shell] +-------------------------------------------------------------------------------- bin/logstash -f first-pipeline.conf +-------------------------------------------------------------------------------- Try a test query to Elasticsearch based on the fields created by the `grok` filter plugin: [source,shell] +-------------------------------------------------------------------------------- curl -XGET 'localhost:9200/logstash-$DATE/_search?q=response=200' +-------------------------------------------------------------------------------- Replace $DATE with the current date, in YYYY.MM.DD format. Since our sample has just one 200 HTTP response, we get one hit back: [source,json] +-------------------------------------------------------------------------------- {"took":2, "timed_out":false, "_shards":{"total":5, @@ -242,17 +265,21 @@ Since our sample has just one 200 HTTP response, we get one hit back: }] } } +-------------------------------------------------------------------------------- Try another search for the geographic information derived from the IP address: [source,shell] +-------------------------------------------------------------------------------- curl -XGET 'localhost:9200/logstash-$DATE/_search?q=geoip.city_name=Buffalo' +-------------------------------------------------------------------------------- Replace $DATE with the current date, in YYYY.MM.DD format. Only one of the log entries comes from Buffalo, so the query produces a single response: [source,json] +-------------------------------------------------------------------------------- {"took":3, "timed_out":false, "_shards":{ @@ -303,6 +330,7 @@ Only one of the log entries comes from Buffalo, so the query produces a single r }] } } +-------------------------------------------------------------------------------- [[multiple-input-output-plugins]] ==== Multiple Input and Output Plugins @@ -311,12 +339,12 @@ The information you need to manage often comes from several disparate sources, a destinations for your data. Your Logstash pipeline can use multiple input and output plugins to handle these requirements. -This example creates a Logstash pipeline that takes input from a Twitter feed and the Logstash Forwarder client, then +This example creates a Logstash pipeline that takes input from a Twitter feed and the Filebeat client, then sends the information to an Elasticsearch cluster as well as writing the information directly to a file. [float] [[twitter-configuration]] -===== Reading from a Twitter feed +==== Reading from a Twitter feed To add a Twitter feed, you need several pieces of information: @@ -332,6 +360,7 @@ your OAuth token and secret. Use this information to add the following lines to the `input` section of the `first-pipeline.conf` file: [source,json] +-------------------------------------------------------------------------------- twitter { consumer_key => consumer_secret => @@ -339,68 +368,73 @@ twitter { oauth_token => oauth_token_secret => } +-------------------------------------------------------------------------------- [float] [[configuring-lsf]] -===== The Logstash Forwarder +==== The Filebeat Client -The https://github.com/elastic/logstash-forwarder[Logstash Forwarder] is a lightweight, resource-friendly tool that +The https://github.com/elastic/filebeat[filebeat] client is a lightweight, resource-friendly tool that collects logs from files on the server and forwards these logs to your Logstash instance for processing. The -Logstash Forwarder uses a secure protocol called _lumberjack_ to communicate with your Logstash instance. The -lumberjack protocol is designed for reliability and low latency. The Logstash Forwarder uses the computing resources of -the machine hosting the source data, and the Lumberjack input plugin minimizes the resource demands on the Logstash -instance. +Filebeat client uses the secure Beats protocol to communicate with your Logstash instance. The +lumberjack protocol is designed for reliability and low latency. Filebeat uses the computing resources of +the machine hosting the source data, and the {logstash}plugins-inputs-beats.html[Beats input] plugin minimizes the +resource demands on the Logstash instance. -NOTE: In a typical use case, the Logstash Forwarder client runs on a separate machine from the machine running your -Logstash instance. For the purposes of this tutorial, both Logstash and the Logstash Forwarder will be running on the +NOTE: In a typical use case, Filebeat runs on a separate machine from the machine running your +Logstash instance. For the purposes of this tutorial, Logstash and Filebeat are running on the same machine. -Default Logstash configuration includes the {logstash}plugins-inputs-lumberjack.html[Lumberjack input plugin], which is -designed to be resource-friendly. To install the Logstash Forwarder on your data source machine, install the -appropriate package from the main Logstash https://www.elastic.co/downloads/logstash[product page]. +Default Logstash configuration includes the {logstash}plugins-inputs-beats.html[Beats input plugin], which is +designed to be resource-friendly. To install Filebeat on your data source machine, download the +appropriate package from the Filebeat https://www.elastic.co/downloads/beats/filebeat[product page]. -Create a configuration file for the Logstash Forwarder similar to the following example: +Create a configuration file for Filebeat similar to the following example: -[source,json] +[source,shell] -------------------------------------------------------------------------------- -{ - "network": { - "servers": [ "localhost:5043" ], - "ssl ca": "/path/to/localhost.crt", <1> - "timeout": 15 - }, - "files": [ - { - "paths": [ - "/path/to/sample-log" <2> - ], - "fields": { "type": "apache" } - } - ] -} +filebeat: + prospectors: + - + paths: + - "/path/to/sample-log" <2> + fields: + type: syslog +output: + elasticsearch: + enabled: true + hosts: ["http://localhost:5043"] + tls: + certificate: /path/to/ssl-certificate.crt <2> + certificate_key: /path/to/ssl-certificate.key + certificate_authorities: /path/to/ssl-certificate.crt + timeout: 15 + +<1> Path to the file or files that Filebeat processes. +<2> Path to the SSL certificate for the Logstash instance. -------------------------------------------------------------------------------- -<1> Path to the SSL certificate for the Logstash instance. -<2> Path to the file or files that the Logstash Forwarder processes. +Save this configuration file as `filebeat.yml`. -Save this configuration file as `logstash-forwarder.conf`. - -Configure your Logstash instance to use the Lumberjack input plugin by adding the following lines to the `input` section +Configure your Logstash instance to use the Filebeat input plugin by adding the following lines to the `input` section of the `first-pipeline.conf` file: [source,json] -lumberjack { +-------------------------------------------------------------------------------- +beats { port => "5043" + ssl => true ssl_certificate => "/path/to/ssl-cert" <1> ssl_key => "/path/to/ssl-key" <2> } +-------------------------------------------------------------------------------- -<1> Path to the SSL certificate that the Logstash instance uses to authenticate itself to Logstash Forwarder. +<1> Path to the SSL certificate that the Logstash instance uses to authenticate itself to Filebeat. <2> Path to the key for the SSL certificate. [float] [[logstash-file-output]] -===== Writing Logstash Data to a File +==== Writing Logstash Data to a File You can configure your Logstash pipeline to write data directly to a file with the {logstash}plugins-outputs-file.html[`file`] output plugin. @@ -409,13 +443,15 @@ Configure your Logstash instance to use the `file` output plugin by adding the f of the `first-pipeline.conf` file: [source,json] +-------------------------------------------------------------------------------- file { path => /path/to/target/file } +-------------------------------------------------------------------------------- [float] [[multiple-es-nodes]] -===== Writing to multiple Elasticsearch nodes +==== Writing to multiple Elasticsearch nodes Writing to multiple Elasticsearch nodes lightens the resource demands on a given Elasticsearch node, as well as providing redundant points of entry into the cluster when a particular node is unavailable. @@ -451,8 +487,9 @@ input { oauth_token => oauth_token_secret => } - lumberjack { + beats { port => "5043" + ssl => true ssl_certificate => "/path/to/ssl-cert" ssl_key => "/path/to/ssl-key" } @@ -467,34 +504,44 @@ output { } -------------------------------------------------------------------------------- -Logstash is consuming data from the Twitter feed you configured, receiving data from the Logstash Forwarder, and +Logstash is consuming data from the Twitter feed you configured, receiving data from Filebeat, and indexing this information to three nodes in an Elasticsearch cluster as well as writing to a file. -At the data source machine, run the Logstash Forwarder with the following command: +At the data source machine, run Filebeat with the following command: [source,shell] -logstash-forwarder -config logstash-forwarder.conf +-------------------------------------------------------------------------------- +sudo ./filebeat -e -c filebeat.yml -d "publish" +-------------------------------------------------------------------------------- -Logstash Forwarder will attempt to connect on port 5403. Until Logstash starts with an active Lumberjack plugin, there +Filebeat will attempt to connect on port 5403. Until Logstash starts with an active Beats plugin, there won’t be any answer on that port, so any messages you see regarding failure to connect on that port are normal for now. To verify your configuration, run the following command: [source,shell] +-------------------------------------------------------------------------------- bin/logstash -f first-pipeline.conf --configtest +-------------------------------------------------------------------------------- The `--configtest` option parses your configuration file and reports any errors. When the configuration file passes the configuration test, start Logstash with the following command: [source,shell] +-------------------------------------------------------------------------------- bin/logstash -f first-pipeline.conf +-------------------------------------------------------------------------------- Use the `grep` utility to search in the target file to verify that information is present: [source,shell] +-------------------------------------------------------------------------------- grep Mozilla /path/to/target/file +-------------------------------------------------------------------------------- Run an Elasticsearch query to find the same information in the Elasticsearch cluster: [source,shell] +-------------------------------------------------------------------------------- curl -XGET 'localhost:9200/logstash-2015.07.30/_search?q=agent=Mozilla' +-------------------------------------------------------------------------------- diff --git a/docs/asciidoc/static/command-line-flags.asciidoc b/docs/asciidoc/static/command-line-flags.asciidoc index 839b162901b..c91f2db94c2 100644 --- a/docs/asciidoc/static/command-line-flags.asciidoc +++ b/docs/asciidoc/static/command-line-flags.asciidoc @@ -38,7 +38,10 @@ Logstash has the following flags. You can use the `--help` flag to display this -t, --configtest Checks configuration and then exit. Note that grok patterns are not checked for - correctness with this flag + correctness with this flag. + Logstash can read multiple config files from a directory. If you combine this + flag with `--debug`, Logstash will log the combined config file, annotating the + individual config blocks with the source file it came from. -h, --help Print help diff --git a/docs/asciidoc/static/deploying.asciidoc b/docs/asciidoc/static/deploying.asciidoc index a81239ddcfe..d33ea69a65e 100644 --- a/docs/asciidoc/static/deploying.asciidoc +++ b/docs/asciidoc/static/deploying.asciidoc @@ -39,18 +39,17 @@ filtering tasks. For example the `bin/logstash -w 8` command uses eight differen image::static/images/deploy_2.png[] [float] -[[deploying-logstash-forwarder]] -==== Using Logstash Forwarder - -The https://github.com/elastic/logstash-forwarder[Logstash Forwarder] is a lightweight, resource-friendly tool written -in Go that collects logs from files on the server and forwards these logs to other machines for processing. The -Logstash Forwarder uses a secure protocol called Lumberjack to communicate with a centralized Logstash instance. -Configure the Logstash instances that receive Lumberjack data to use the -{logstash}plugins-inputs-lumberjack.html[Lumberjack input plugin]. - -The Logstash Forwarder uses the computing resources of the machine hosting the source data, and the Lumberjack input -plugin minimizes the resource demands on the Logstash instance, making this architecture attractive for use cases with -resource constraints. +[[deploying-filebeat]] +==== Using Filebeat + +https://www.elastic.co/guide/en/beats/filebeat/current/index.html[Filebeat] is a lightweight, resource-friendly tool +written in Go that collects logs from files on the server and forwards these logs to other machines for processing. +Filebeat uses the https://www.elastic.co/guide/en/beats/libbeat/current/index.html[Beats] protocol to communicate with a +centralized Logstash instance. Configure the Logstash instances that receive Beats data to use the +{logstash}plugins-inputs-beats.html[Beats input plugin]. + +Filebeat uses the computing resources of the machine hosting the source data, and the Beats input plugin minimizes the +resource demands on the Logstash instance, making this architecture attractive for use cases with resource constraints. image::static/images/deploy_3.png[] diff --git a/docs/asciidoc/static/introduction.asciidoc b/docs/asciidoc/static/introduction.asciidoc index ec9af52cd15..c00119237b8 100644 --- a/docs/asciidoc/static/introduction.asciidoc +++ b/docs/asciidoc/static/introduction.asciidoc @@ -1,15 +1,3 @@ -[[introduction]] -== Logstash Introduction - -Logstash is an open source data collection engine with real-time pipelining capabilities. Logstash can dynamically -unify data from disparate sources and normalize the data into destinations of your choice. Cleanse and democratize all -your data for diverse advanced downstream analytics and visualization use cases. - -While Logstash originally drove innovation in log collection, its capabilities extend well beyond that use case. Any -type of event can be enriched and transformed with a broad array of input, filter, and output plugins, with many -native codecs further simplifying the ingestion process. Logstash accelerates your insights by harnessing a greater -volume and variety of data. - [float] [[power-of-logstash]] == The Power of Logstash @@ -43,8 +31,7 @@ Where it all started. logs like <> for Java ** Capture many other log formats like <>, <>, networking and firewall logs, and more -* Enjoy complementary secure log forwarding capabilities with https://github.com/elastic/logstash-forwarder[Logstash -Forwarder] +* Enjoy complementary secure log forwarding capabilities with https://github.com/elastic/filebeat[Filebeat] * Collect metrics from <>, <>, <>, <>, and many other infrastructure and application platforms over <> and <> diff --git a/docs/asciidoc/static/life-of-an-event.asciidoc b/docs/asciidoc/static/life-of-an-event.asciidoc index 569bd545f7c..41cadb8ac6e 100644 --- a/docs/asciidoc/static/life-of-an-event.asciidoc +++ b/docs/asciidoc/static/life-of-an-event.asciidoc @@ -19,8 +19,7 @@ according to the RFC3164 format * *redis*: reads from a redis server, using both redis channels and redis lists. Redis is often used as a "broker" in a centralized Logstash installation, which queues Logstash events from remote Logstash "shippers". -* *lumberjack*: processes events sent in the lumberjack protocol. Now called -https://github.com/elastic/logstash-forwarder[logstash-forwarder]. +* *beats*: processes events sent by https://www.elastic.co/downloads/beats/filebeat[Filebeat]. For more information about the available inputs, see <>. diff --git a/docs/asciidoc/static/managing-multiline-events.asciidoc b/docs/asciidoc/static/managing-multiline-events.asciidoc index 1185348bc7d..2e1cd694e70 100644 --- a/docs/asciidoc/static/managing-multiline-events.asciidoc +++ b/docs/asciidoc/static/managing-multiline-events.asciidoc @@ -9,8 +9,6 @@ processing is to implement the processing as early in the pipeline as possible. pipeline is the {logstash}plugins-codecs-multiline.html[multiline codec], which merges lines from a single input using a simple set of rules. -For more complex needs, the {logstash}plugins-filters-multiline.html[multiline filter] performs a similar task at the -filter stage of processing, where the Logstash instance aggregates multiple inputs. The most important aspects of configuring either multiline plugin are the following: @@ -25,19 +23,10 @@ _do not_ match the regular expression specified in the `pattern` option. See the full documentation for the {logstash}plugins-codecs-multiline.html[multiline codec] or the {logstash}plugins-filters-multiline.html[multiline filter] plugin for more information on configuration options. -==== Multiline Special Cases - -* The current release of the multiline codec plugin treats all input from the -{logstash}plugins-inputs-lumberjack[lumberjack] input plugin as a single stream. When your use case involves the -Logstash Forwarder processing multiple files concurrently, proper event ordering can be challenging to maintain, and -any resulting errors can be difficult to diagnose. Carefully monitor the output of Logstash configurations that involve -multiline processing of multiple files handled by the Logstash Forwarder. - -* The multiline codec plugin does not support file input from files that contain events from multiple sources. - -* The multiline filter plugin is not thread-safe. Avoid using multiple filter workers with the multiline filter. - -NOTE: You can track the progress of upgrades to the functionality of the multiline codec at +NOTE: For more complex needs, the {logstash}plugins-filters-multiline.html[multiline filter] performs a similar task at +the filter stage of processing, where the Logstash instance aggregates multiple inputs. +The multiline filter plugin is not thread-safe. Avoid using multiple filter workers with the multiline filter. You can +track the progress of upgrades to the functionality of the multiline codec at https://github.com/logstash-plugins/logstash-codec-multiline/issues/10[this Github issue]. ==== Examples of Multiline Plugin Configuration diff --git a/docs/asciidoc/static/offline-plugins.asciidoc b/docs/asciidoc/static/offline-plugins.asciidoc new file mode 100644 index 00000000000..be3d1bef601 --- /dev/null +++ b/docs/asciidoc/static/offline-plugins.asciidoc @@ -0,0 +1,58 @@ +[[offline-plugins]] +=== Offline Plugin Management + +The Logstash <> was introduced in the 1.5 release. This section discusses setting up +local repositories of plugins for use on systems without access to the Internet. + +The procedures in this section require a staging machine running Logstash that has access to a public or private Rubygems +server. This staging machine downloads and packages the files used for offline installation. + +See the <> section for information on setting up your own private +Rubygems server. + +[float] +=== Building the Offline Package + +Working with offline plugins requires you to create an _offline package_, which is a compressed file that contains all of +the plugins your offline Logstash installation requires, along with the dependencies for those plugins. + +. Create the offline package with the `bin/plugin pack` subcommand. ++ +When you run the `bin/plugin pack` subcommand, Logstash creates a compressed bundle that contains all of the currently +installed plugins and the dependencies for those plugins. By default, the compressed bundle is a GZipped TAR file when you +run the `bin/plugin pack` subcommand on a UNIX machine. By default, when you run the `bin/plugin pack` subcommand on a +Windows machine, the compressed bundle is a ZIP file. See <> for details on changing +these default behaviors. ++ +NOTE: Downloading all dependencies for the specified plugins may take some time, depending on the plugins listed. + +. Move the compressed bundle to the offline machines that are the source for offline plugin installation, then use the +`bin/plugin unpack` subcommand to make the packaged plugins available. + +[float] +=== Install or Update a local plugin + +To install or update a local plugin, use the `--local` option with the install and update commands, as in the following +examples: + +.Installing a local plugin +============ +`bin/plugin install --local logstash-input-jdbc` +============ + +.Updating a local plugin +============ +`bin/plugin update --local logstash-input-jdbc` +============ + +[float] +[[managing-packs]] +=== Managing Plugin Packs + +The `pack` and `unpack` subcommands for `bin/plugin` take the following options: + +[horizontal] +`--tgz`:: Generate the offline package as a GZipped TAR file. The default behavior on UNIX systems. +`--zip`:: Generate the offline package as a ZIP file. The default behavior on Windows systems. +`[packname] --override`:: Generates a new offline package that overwrites an existing offline with the specified name. +`[packname] --[no-]clean`: Deletes offline packages matching the specified name. \ No newline at end of file diff --git a/docs/asciidoc/static/plugin-manager.asciidoc b/docs/asciidoc/static/plugin-manager.asciidoc index f15ea99dbd0..2f531083008 100644 --- a/docs/asciidoc/static/plugin-manager.asciidoc +++ b/docs/asciidoc/static/plugin-manager.asciidoc @@ -1,15 +1,17 @@ [[working-with-plugins]] == Working with plugins -Logstash has a rich collection of input, filter, codec and output plugins. Plugins are available as self-contained packages called gems and hosted on RubyGems.org. The plugin manager accesed via `bin/plugin` script is used to manage the lifecycle of plugins in your Logstash deployment. You can install, uninstall and upgrade plugins using these Command Line Interface (CLI) described below. - -NOTE: Some sections here are for advanced users +Logstash has a rich collection of input, filter, codec and output plugins. Plugins are available as self-contained +packages called gems and hosted on RubyGems.org. The plugin manager accesed via `bin/plugin` script is used to manage the +lifecycle of plugins in your Logstash deployment. You can install, uninstall and upgrade plugins using these Command Line +Interface (CLI) described below. [float] [[listing-plugins]] === Listing plugins -Logstash release packages bundle common plugins so you can use them out of the box. To list the plugins currently available in your deployment: +Logstash release packages bundle common plugins so you can use them out of the box. To list the plugins currently +available in your deployment: [source,shell] ---------------------------------- @@ -30,7 +32,9 @@ bin/plugin list --group output <4> [[installing-plugins]] === Adding plugins to your deployment -The most common situation when dealing with plugin installation is when you have access to internet. Using this method, you will be able to retrieve plugins hosted on the public repository (RubyGems.org) and install on top of your Logstash installation. +The most common situation when dealing with plugin installation is when you have access to internet. Using this method, +you will be able to retrieve plugins hosted on the public repository (RubyGems.org) and install on top of your Logstash +installation. [source,shell] ---------------------------------- @@ -43,7 +47,8 @@ Once the plugin is successfully installed, you can start using it in your config [float] ==== Advanced: Adding a locally built plugin -In some cases, you want to install plugins which have not yet been released and not hosted on RubyGems.org. Logstash provides you the option to install a locally built plugin which is packaged as a ruby gem. Using a file location: +In some cases, you want to install plugins which have not yet been released and not hosted on RubyGems.org. Logstash +provides you the option to install a locally built plugin which is packaged as a ruby gem. Using a file location: [source,shell] ---------------------------------- @@ -54,7 +59,8 @@ bin/plugin install /path/to/logstash-output-kafka-1.0.0.gem [float] ==== Advanced: Using `--pluginpath` -Using the `--pluginpath` flag, you can load a plugin source code located on your file system. Typically this is used by developers who are iterating on a custom plugin and want to test it before creating a ruby gem. +Using the `--pluginpath` flag, you can load a plugin source code located on your file system. Typically this is used by +developers who are iterating on a custom plugin and want to test it before creating a ruby gem. [source,shell] ---------------------------------- @@ -65,7 +71,8 @@ bin/logstash --pluginpath /opt/shared/lib/logstash/input/my-custom-plugin-code.r [float] === Updating plugins -Plugins have their own release cycle and are often released independent of Logstash’s core release cycle. Using the update sub-command you can get the latest or update to a particular version of the plugin. +Plugins have their own release cycle and are often released independent of Logstash’s core release cycle. Using the update +subcommand you can get the latest or update to a particular version of the plugin. [source,shell] ---------------------------------- @@ -91,7 +98,9 @@ bin/plugin uninstall logstash-output-kafka [float] === Proxy Support -The previous sections relied on Logstash being able to communicate with RubyGems.org. In certain environments, Forwarding Proxy is used to handle HTTP requests. Logstash Plugins can be installed and updated through a Proxy by setting the `HTTP_PROXY` environment variable: +The previous sections relied on Logstash being able to communicate with RubyGems.org. In certain environments, Forwarding +Proxy is used to handle HTTP requests. Logstash Plugins can be installed and updated through a Proxy by setting the +`HTTP_PROXY` environment variable: [source,shell] ---------------------------------- @@ -101,3 +110,7 @@ bin/plugin install logstash-output-kafka ---------------------------------- Once set, plugin commands install, update can be used through this proxy. + +include::offline-plugins.asciidoc[] + +include::private-gem-repo.asciidoc[] \ No newline at end of file diff --git a/docs/asciidoc/static/private-gem-repo.asciidoc b/docs/asciidoc/static/private-gem-repo.asciidoc new file mode 100644 index 00000000000..dd96f63a60d --- /dev/null +++ b/docs/asciidoc/static/private-gem-repo.asciidoc @@ -0,0 +1,53 @@ +[[private-rubygem]] +=== Private Gem Repositories + +The Logstash plugin manager connects to a Ruby gems repository to install and update Logstash plugins. By default, this +repository is http://rubygems.org. + +Some use cases are unable to use the default repository, as in the following examples: + +* A firewall blocks access to the default repository. +* You are developing your own plugins locally. +* Airgap requirements on the local system. + +When you use a custom gem repository, be sure to make plugin dependencies available. + +Several open source projects enable you to run your own plugin server, among them: + +* https://github.com/geminabox/geminabox[Geminabox] +* https://github.com/PierreRambaud/gemirro[Gemirro] +* https://gemfury.com/[Gemfury] +* http://www.jfrog.com/open-source/[Artifactory] + +==== Editing the Gemfile + +The gemfile is a configuration file that specifies information required for plugin management. Each gem file has a +`source` line that specifies a location for plugin content. + +By default, the gemfile's `source` line reads: + +[source,shell] +---------- +# This is a Logstash generated Gemfile. +# If you modify this file manually all comments and formatting will be lost. + +source "https://rubygems.org" +---------- + +To change the source, edit the `source` line to contain your preferred source, as in the following example: + +[source,shell] +---------- +# This is a Logstash generated Gemfile. +# If you modify this file manually all comments and formatting will be lost. + +source "https://my.private.repository" +---------- + +After saving the new version of the gemfile, use <> normally. + +The following links contain further material on setting up some commonly used repositories: + +* https://github.com/geminabox/geminabox/blob/master/README.markdown[Geminabox] +* https://www.jfrog.com/confluence/display/RTF/RubyGems+Repositories[Artifactory] +* Running a http://guides.rubygems.org/run-your-own-gem-server/[rubygems mirror] diff --git a/docs/asciidoc/static/roadmap/index.asciidoc b/docs/asciidoc/static/roadmap/index.asciidoc index b6c33abcc92..b52e271675a 100644 --- a/docs/asciidoc/static/roadmap/index.asciidoc +++ b/docs/asciidoc/static/roadmap/index.asciidoc @@ -72,8 +72,7 @@ https://github.com/elastic/logstash/labels/resiliency[resiliency] tag. *Known unknowns.* If we don’t know it’s happening, it’s hard for us to fix it! Please report your issues in GitHub, under the -https://github.com/elastic/logstash/issues[Logstash], -https://github.com/elastic/logstash-forwarder/issues[Logstash Forwarder], or +https://github.com/elastic/logstash/issues[Logstash] or individual https://github.com/logstash-plugins/[Logstash plugin] repositories. == Manageability @@ -125,12 +124,6 @@ distributing the load between instances based on the latest cluster state. This is a complex use case that will require input from the community on current approaches to implementing HA and load balancing of Logstash instances. -== Logstash Forwarder -[float] -=== status: ongoing - -https://github.com/elastic/logstash-forwarder/[Logstash Forwarder] is a lightweight shipper for tailing files and forwarding this data to Logstash for further processing. It is often used in lieu of running Logstash on the servers, because it is lightweight, written in Go, and consumes less resources. It was created before we had the Beats framework for shipping data from servers and is currently maintained separately. We plan to move the Logstash Forwarder functionality to https://github.com/elastic/filebeat/[Filebeat] in the Beats framework, which is also written in Go. The first version of Filebeat will leverage the libbeat infrastructure and preserve existing features. Over time, we plan to enhance Filebeat with capabilities such as multiline and filtering support. Since Filebeat will serve as the direct replacement for Logstash Forwarder, we are not planning additional releases beyond http://www.elasticsearch.org/blog/logstash-forwarder-0-4-0-released/[Logstash Forwarder 0.4.0]. - == Performance [float] === status: ongoing; v1.5, v2.x @@ -187,4 +180,4 @@ In Logstash 1.5, we made it easier than ever to add and maintain plugins by putting each plugin into its own repository (see "Plugin Framework" section). We also greatly improved the S3, Twitter, RabbitMQ plugins. To follow requests for new Logstash plugins or contribute to the discussion, look for issues that -have the {LABELS}new-plugin[new-plugin] tag in Github. \ No newline at end of file +have the {LABELS}new-plugin[new-plugin] tag in Github. diff --git a/docs/asciidoc/static/upgrading.asciidoc b/docs/asciidoc/static/upgrading.asciidoc index 8e34d01933a..3043c67d5b0 100644 --- a/docs/asciidoc/static/upgrading.asciidoc +++ b/docs/asciidoc/static/upgrading.asciidoc @@ -14,10 +14,10 @@ Before upgrading Logstash: This procedure uses <> to upgrade Logstash. 1. Shut down your Logstash pipeline, including any inputs that send events to Logstash. -2. Using the directions in the _Package Repositories_ section, update your repository links to point to the 2.0 repositories +2. Using the directions in the _Package Repositories_ section, update your repository links to point to the 2.0 repositories instead of the previous version. 3. Run the `apt-get update logstash` or `yum update logstash` command as appropriate for your operating system. -4. Test your configuration file with the `logstash --configtest -f ` command. Configuration options for +4. Test your configuration file with the `logstash --configtest -f ` command. Configuration options for some Logstash plugins have changed in the 2.0 release. 5. Restart your Logstash pipeline after updating your configuration file. @@ -26,9 +26,9 @@ some Logstash plugins have changed in the 2.0 release. This procedure downloads the relevant Logstash binaries directly from Elastic. 1. Shut down your Logstash pipeline, including any inputs that send events to Logstash. -2. Download the https://www.elastic.co/downloads/logstash[Logstash installation file] that matches your host environment. +2. Download the https://www.elastic.co/downloads/logstash[Logstash installation file] that matches your host environment. 3. Unpack the installation file into your Logstash directory. -4. Test your configuration file with the `logstash --configtest -f ` command. Configuration options for +4. Test your configuration file with the `logstash --configtest -f ` command. Configuration options for some Logstash plugins have changed in the 2.0 release. 5. Restart your Logstash pipeline after updating your configuration file. @@ -40,11 +40,11 @@ before you upgrade. In addition, the following steps needs to be performed after **Mapping changes:** Users may have custom template changes, so by default a Logstash upgrade will leave the template as is. Even if you don't have a custom template, Logstash will not overwrite an existing -template by default. +template by default. There is one known issue (removal of https://www.elastic.co/guide/en/elasticsearch/reference/1.4/mapping-object-type.html#_path_3[path]) with using GeoIP filter that needs a manual update to the template. -Note: If you have custom template changes, please make sure to save it and merge any changes. You can +Note: If you have custom template changes, please make sure to save it and merge any changes. You can get the existing template by running: [source,shell] @@ -63,8 +63,8 @@ output { Restart Logstash. **Dots in fields:** Elasticsearch 2.0 does not allow field names to contain the `.` character. -Further details about this change https://www.elastic.co/guide/en/elasticsearch/reference/2.0/_mapping_changes.html#_field_names_may_not_contain_dots[here]. Some plugins already have been updated to compensate -for this breaking change, including logstash-filter-metrics and logstash-filter-elapsed. +Further details about this change https://www.elastic.co/guide/en/elasticsearch/reference/2.0/breaking_20_mapping_changes.html#_field_names_may_not_contain_dots[here]. Some plugins already have been updated to compensate +for this breaking change, including logstash-filter-metrics and logstash-filter-elapsed. These plugin updates are available for Logstash 2.0. To upgrade to the latest version of these plugins, the command is: diff --git a/integration/logstash_config/file_input_to_file_output_spec.rb b/integration/logstash_config/file_input_to_file_output_spec.rb new file mode 100644 index 00000000000..f857465b8ee --- /dev/null +++ b/integration/logstash_config/file_input_to_file_output_spec.rb @@ -0,0 +1,41 @@ +# Encoding: utf-8 +require_relative "../spec_helper" +require "stud/temporary" + +describe "File input to File output" do + let(:number_of_events) { IO.readlines(sample_log).size } + let(:sample_log) { File.expand_path(File.join(File.dirname(__FILE__), "..", "support", "sample.log")) } + let(:output_file) { Stud::Temporary.file.path } + let(:config) { +< \"#{sample_log}\" + stat_interval => 0 + start_position => \"beginning\" + sincedb_path => \"/dev/null\" + } + } + output { + file { + path => \"#{output_file}\" + } + } +EOS + } + + before :all do + command("bin/plugin install logstash-input-file logstash-output-file") + end + + it "writes events to file" do + cmd = "bin/logstash -e '#{config}'" + launch_logstash(cmd) + + expect(File.exist?(output_file)).to eq(true) + + # on shutdown the events arent flushed to disk correctly + # Known issue https://github.com/logstash-plugins/logstash-output-file/issues/12 + expect(IO.readlines(output_file).size).to be_between(number_of_events - 10, number_of_events).inclusive + end +end diff --git a/integration/plugin_manager/logstash_spec.rb b/integration/plugin_manager/logstash_spec.rb new file mode 100644 index 00000000000..8c2f4c97d73 --- /dev/null +++ b/integration/plugin_manager/logstash_spec.rb @@ -0,0 +1,11 @@ +# Encoding: utf-8 +require_relative "../spec_helper" +require_relative "../../lib/logstash/version" + +describe "bin/logstash" do + it "returns the logstash version" do + result = command("bin/logstash version") + expect(result.exit_status).to eq(0) + expect(result.stdout).to match(/^logstash\s#{LOGSTASH_VERSION}/) + end +end diff --git a/integration/plugin_manager/plugin_install_spec.rb b/integration/plugin_manager/plugin_install_spec.rb new file mode 100644 index 00000000000..313fd1b1f73 --- /dev/null +++ b/integration/plugin_manager/plugin_install_spec.rb @@ -0,0 +1,41 @@ +# Encoding: utf-8 +require_relative "../spec_helper" +require "fileutils" + +context "bin/plugin install" do + context "with a local gem" do + let(:gem_name) { "logstash-input-wmi" } + let(:local_gem) { gem_fetch(gem_name) } + + it "install the gem succesfully" do + result = command("bin/plugin install --no-verify #{local_gem}") + expect(result.exit_status).to eq(0) + expect(result.stdout).to match(/^Installing\s#{gem_name}\nInstallation\ssuccessful$/) + end + end + + context "when the plugin exist" do + let(:plugin_name) { "logstash-input-drupal_dblog" } + + it "sucessfully install" do + result = command("bin/plugin install #{plugin_name}") + expect(result.exit_status).to eq(0) + expect(result.stdout).to match(/^Validating\s#{plugin_name}\nInstalling\s#{plugin_name}\nInstallation\ssuccessful$/) + end + + it "allow to install a specific version" do + version = "2.0.2" + result = command("bin/plugin install --version 2.0.2 #{plugin_name}") + expect(result.exit_status).to eq(0) + expect(result.stdout).to match(/^Validating\s#{plugin_name}-#{version}\nInstalling\s#{plugin_name}\nInstallation\ssuccessful$/) + end + end + + context "when the plugin doesn't exist" do + it "fails to install" do + result = command("bin/plugin install --no-verify logstash-output-impossible-plugin") + expect(result.exit_status).to eq(1) + expect(result.stderr).to match(/Installation Aborted, message: Could not find gem/) + end + end +end diff --git a/integration/plugin_manager/plugin_list_spec.rb b/integration/plugin_manager/plugin_list_spec.rb new file mode 100644 index 00000000000..a8a2b19e453 --- /dev/null +++ b/integration/plugin_manager/plugin_list_spec.rb @@ -0,0 +1,41 @@ +# Encoding: utf-8 +require_relative "../spec_helper" + +describe "bin/plugin list" do + context "without a specific plugin" do + it "display a list of plugins" do + result = command("bin/plugin list") + expect(result.exit_status).to eq(0) + expect(result.stdout.split("\n").size).to be > 1 + end + + it "display a list of installed plugins" do + result = command("bin/plugin list --installed") + expect(result.exit_status).to eq(0) + expect(result.stdout.split("\n").size).to be > 1 + end + + it "list the plugins with their versions" do + result = command("bin/plugin list --verbose") + result.stdout.split("\n").each do |plugin| + expect(plugin).to match(/^logstash-\w+-\w+\s\(\d+\.\d+.\d+\)/) + end + expect(result.exit_status).to eq(0) + end + end + + context "with a specific plugin" do + let(:plugin_name) { "logstash-input-stdin" } + it "list the plugin and display the plugin name" do + result = command("bin/plugin list #{plugin_name}") + expect(result.stdout).to match(/^#{plugin_name}$/) + expect(result.exit_status).to eq(0) + end + + it "list the plugin with his version" do + result = command("bin/plugin list --verbose #{plugin_name}") + expect(result.stdout).to match(/^#{plugin_name} \(\d+\.\d+.\d+\)/) + expect(result.exit_status).to eq(0) + end + end +end diff --git a/integration/plugin_manager/plugin_uninstall_spec.rb b/integration/plugin_manager/plugin_uninstall_spec.rb new file mode 100644 index 00000000000..87f2fd747e2 --- /dev/null +++ b/integration/plugin_manager/plugin_uninstall_spec.rb @@ -0,0 +1,24 @@ +# Encoding: utf-8 +require_relative "../spec_helper" + +describe "bin/plugin uninstall" do + context "when the plugin isn't installed" do + it "fails to uninstall it" do + result = command("bin/plugin uninstall logstash-filter-cidr") + expect(result.stderr).to match(/ERROR: Uninstall Aborted, message: This plugin has not been previously installed, aborting/) + expect(result.exit_status).to eq(1) + end + end + + context "when the plugin is installed" do + it "succesfully uninstall it" do + # make sure we have the plugin installed. + command("bin/plugin install logstash-filter-ruby") + + result = command("bin/plugin uninstall logstash-filter-ruby") + + expect(result.stdout).to match(/^Uninstalling logstash-filter-ruby/) + expect(result.exit_status).to eq(0) + end + end +end diff --git a/integration/plugin_manager/plugin_update_spec.rb b/integration/plugin_manager/plugin_update_spec.rb new file mode 100644 index 00000000000..d8b291739cc --- /dev/null +++ b/integration/plugin_manager/plugin_update_spec.rb @@ -0,0 +1,32 @@ +# Encoding: utf-8 +require_relative "../spec_helper" + +describe "update" do + let(:plugin_name) { "logstash-input-stdin" } + let(:previous_version) { "2.0.1" } + + before do + command("bin/plugin install --version #{previous_version} #{plugin_name}") + cmd = command("bin/plugin list --verbose #{plugin_name}") + expect(cmd.stdout).to match(/#{plugin_name} \(#{previous_version}\)/) + end + + context "update a specific plugin" do + subject { command("bin/plugin update #{plugin_name}") } + + it "has executed succesfully" do + expect(subject.exit_status).to eq(0) + expect(subject.stdout).to match(/Updating #{plugin_name}/) + end + end + + context "update all the plugins" do + subject { command("bin/plugin update") } + + it "has executed succesfully" do + expect(subject.exit_status).to eq(0) + cmd = command("bin/plugin list --verbose #{plugin_name}").stdout + expect(cmd).to match(/logstash-input-stdin \(#{LogStashTestHelpers.latest_version(plugin_name)}\)/) + end + end +end diff --git a/integration/spec_helper.rb b/integration/spec_helper.rb new file mode 100644 index 00000000000..0076b8b2cdf --- /dev/null +++ b/integration/spec_helper.rb @@ -0,0 +1,37 @@ +# encoding: utf-8 +require_relative "support/integration_test_helpers" +require_relative "../lib/logstash/environment" +require "fileutils" + +if LogStash::Environment.windows? + puts "[integration] Theses integration test are specifically made to be run on under linux/unix" + puts "[integration] Please see our windows version of the tests https://github.com/elastic/logstash/tree/master/test/windows" +end + +# Configure the test environment +source = File.expand_path(File.join(File.dirname(__FILE__), "..")) +integration_path = File.join(source, "integration_run") + +puts "[integration_spec] configure environment" + +if Dir.exists?(integration_path) + # We copy the current logstash into a temporary directory + # since the tests are a bit destructive + FileUtils.mkdir_p(integration_path) + rsync_cmd = "rsync -a --delete --exclude 'rspec' --exclude '#{File.basename(integration_path)}' --exclude 'integration_spec' --exclude '.git' #{source} #{integration_path}" + + puts "[integration_spec] Rsync source code into: #{integration_path}" + system(rsync_cmd) + puts "[integration_spec] Finish rsync" + + LOGSTASH_TEST_PATH = File.join(integration_path, "logstash") +else + LOGSTASH_TEST_PATH = File.expand_path(File.join(File.dirname(__FILE__), "..")) +end + +puts "[integration_spec] Running the test in #{LOGSTASH_TEST_PATH}" +puts "[integration_spec] Running specs" + +RSpec.configure do |config| + config.order = "random" +end diff --git a/integration/support/integration_test_helpers.rb b/integration/support/integration_test_helpers.rb new file mode 100644 index 00000000000..aad90f8f07a --- /dev/null +++ b/integration/support/integration_test_helpers.rb @@ -0,0 +1,89 @@ +# encoding: utf-8 +require "json" +require "open3" +require "open-uri" +require "stud/temporary" +require "fileutils" +require "bundler" +require "gems" + +class CommandResponse + attr_reader :stdin, :stdout, :stderr, :exit_status + + def initialize(cmd, stdin, stdout, stderr, exit_status) + @stdin = stdin + @stdout = stdout + @stderr = stderr + @exit_status = exit_status + @cmd = cmd + end + + def to_debug + "DEBUG: stdout: #{stdout}, stderr: #{stderr}, exit_status: #{exit_status}" + end + + def to_s + @cmd + end +end + +def command(cmd, path = nil) + # http://bundler.io/v1.3/man/bundle-exec.1.html + # see shelling out. + # + # Since most of the integration test are environment destructive + # its better to run them in a cloned directory. + path = LOGSTASH_TEST_PATH if path == nil + + Bundler.with_clean_env do + Dir.chdir(path) do + Open3.popen3(cmd) do |stdin, stdout, stderr, wait_thr| + CommandResponse.new(cmd, + stdin, + stdout.read.chomp, + stderr.read.chomp, + wait_thr.value.exitstatus) + end + end + end +end + +def gem_fetch(name) + tmp = Stud::Temporary.directory + FileUtils.mkdir_p(tmp) + + c = command("gem fetch #{name}", tmp) + + if c.exit_status == 1 + raise RuntimeError, "Can't fetch gem #{name}" + end + + return Dir.glob(File.join(tmp, "#{name}*.gem")).first +end + +# This is a bit hacky since JRuby doesn't support fork, +# we use popen4 which return the pid of the process and make sure we kill it +# after letting it run for a few seconds. +def launch_logstash(cmd, path = nil) + path = LOGSTASH_TEST_PATH if path == nil + pid = 0 + + Thread.new do + Bundler.with_clean_env do + Dir.chdir(path) do + pid, input, output, error = IO.popen4(cmd) #jruby only + end + end + end + sleep(30) + begin + Process.kill("INT", pid) + rescue + end +end + +module LogStashTestHelpers + def self.latest_version(name) + Gems.versions(name).first["number"] + end +end diff --git a/integration/support/sample.log b/integration/support/sample.log new file mode 100644 index 00000000000..8f304b59c45 --- /dev/null +++ b/integration/support/sample.log @@ -0,0 +1,50 @@ +83.149.9.216 - - [26/Aug/2014:21:13:42 +0000] "GET /presentations/logstash-monitorama-2013/images/kibana-search.png HTTP/1.1" 200 203023 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:42 +0000] "GET /presentations/logstash-monitorama-2013/images/kibana-dashboard3.png HTTP/1.1" 200 171717 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:44 +0000] "GET /presentations/logstash-monitorama-2013/plugin/highlight/highlight.js HTTP/1.1" 200 26185 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:44 +0000] "GET /presentations/logstash-monitorama-2013/plugin/zoom-js/zoom.js HTTP/1.1" 200 7697 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:45 +0000] "GET /presentations/logstash-monitorama-2013/plugin/notes/notes.js HTTP/1.1" 200 2892 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:42 +0000] "GET /presentations/logstash-monitorama-2013/images/sad-medic.png HTTP/1.1" 200 430406 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:45 +0000] "GET /presentations/logstash-monitorama-2013/css/fonts/Roboto-Bold.ttf HTTP/1.1" 200 38720 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:45 +0000] "GET /presentations/logstash-monitorama-2013/css/fonts/Roboto-Regular.ttf HTTP/1.1" 200 41820 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:45 +0000] "GET /presentations/logstash-monitorama-2013/images/frontend-response-codes.png HTTP/1.1" 200 52878 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:43 +0000] "GET /presentations/logstash-monitorama-2013/images/kibana-dashboard.png HTTP/1.1" 200 321631 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:46 +0000] "GET /presentations/logstash-monitorama-2013/images/Dreamhost_logo.svg HTTP/1.1" 200 2126 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:43 +0000] "GET /presentations/logstash-monitorama-2013/images/kibana-dashboard2.png HTTP/1.1" 200 394967 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:46 +0000] "GET /presentations/logstash-monitorama-2013/images/apache-icon.gif HTTP/1.1" 200 8095 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:46 +0000] "GET /presentations/logstash-monitorama-2013/images/nagios-sms5.png HTTP/1.1" 200 78075 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:46 +0000] "GET /presentations/logstash-monitorama-2013/images/redis.png HTTP/1.1" 200 25230 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:47 +0000] "GET /presentations/logstash-monitorama-2013/images/elasticsearch.png HTTP/1.1" 200 8026 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:47 +0000] "GET /presentations/logstash-monitorama-2013/images/logstashbook.png HTTP/1.1" 200 54662 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:47 +0000] "GET /presentations/logstash-monitorama-2013/images/github-contributions.png HTTP/1.1" 200 34245 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:47 +0000] "GET /presentations/logstash-monitorama-2013/css/print/paper.css HTTP/1.1" 200 4254 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:47 +0000] "GET /presentations/logstash-monitorama-2013/images/1983_delorean_dmc-12-pic-38289.jpeg HTTP/1.1" 200 220562 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:46 +0000] "GET /presentations/logstash-monitorama-2013/images/simple-inputs-filters-outputs.jpg HTTP/1.1" 200 1168622 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:46 +0000] "GET /presentations/logstash-monitorama-2013/images/tiered-outputs-to-inputs.jpg HTTP/1.1" 200 1079983 "http://semicomplete.com/presentations/logstash-monitorama-2013/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +83.149.9.216 - - [26/Aug/2014:21:13:53 +0000] "GET /favicon.ico HTTP/1.1" 200 3638 "-" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1700.77 Safari/537.36" +24.236.252.67 - - [26/Aug/2014:21:14:10 +0000] "GET /favicon.ico HTTP/1.1" 200 3638 "-" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:26.0) Gecko/20100101 Firefox/26.0" +93.114.45.13 - - [26/Aug/2014:21:14:32 +0000] "GET /articles/dynamic-dns-with-dhcp/ HTTP/1.1" 200 18848 "http://www.google.ro/url?sa=t&rct=j&q=&esrc=s&source=web&cd=2&ved=0CCwQFjAB&url=http%3A%2F%2Fwww.semicomplete.com%2Farticles%2Fdynamic-dns-with-dhcp%2F&ei=W88AU4n9HOq60QXbv4GwBg&usg=AFQjCNEF1X4Rs52UYQyLiySTQxa97ozM4g&bvm=bv.61535280,d.d2k" "Mozilla/5.0 (X11; Linux x86_64; rv:25.0) Gecko/20100101 Firefox/25.0" +93.114.45.13 - - [26/Aug/2014:21:14:32 +0000] "GET /reset.css HTTP/1.1" 200 1015 "http://www.semicomplete.com/articles/dynamic-dns-with-dhcp/" "Mozilla/5.0 (X11; Linux x86_64; rv:25.0) Gecko/20100101 Firefox/25.0" +93.114.45.13 - - [26/Aug/2014:21:14:33 +0000] "GET /style2.css HTTP/1.1" 200 4877 "http://www.semicomplete.com/articles/dynamic-dns-with-dhcp/" "Mozilla/5.0 (X11; Linux x86_64; rv:25.0) Gecko/20100101 Firefox/25.0" +93.114.45.13 - - [26/Aug/2014:21:14:33 +0000] "GET /favicon.ico HTTP/1.1" 200 3638 "-" "Mozilla/5.0 (X11; Linux x86_64; rv:25.0) Gecko/20100101 Firefox/25.0" +93.114.45.13 - - [26/Aug/2014:21:14:33 +0000] "GET /images/jordan-80.png HTTP/1.1" 200 6146 "http://www.semicomplete.com/articles/dynamic-dns-with-dhcp/" "Mozilla/5.0 (X11; Linux x86_64; rv:25.0) Gecko/20100101 Firefox/25.0" +93.114.45.13 - - [26/Aug/2014:21:14:33 +0000] "GET /images/web/2009/banner.png HTTP/1.1" 200 52315 "http://www.semicomplete.com/style2.css" "Mozilla/5.0 (X11; Linux x86_64; rv:25.0) Gecko/20100101 Firefox/25.0" +66.249.73.135 - - [26/Aug/2014:21:15:03 +0000] "GET /blog/tags/ipv6 HTTP/1.1" 200 12251 "-" "Mozilla/5.0 (iPhone; CPU iPhone OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5376e Safari/8536.25 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)" +50.16.19.13 - - [26/Aug/2014:21:15:15 +0000] "GET /blog/tags/puppet?flav=rss20 HTTP/1.1" 200 14872 "http://www.semicomplete.com/blog/tags/puppet?flav=rss20" "Tiny Tiny RSS/1.11 (http://tt-rss.org/)" +66.249.73.185 - - [26/Aug/2014:21:15:23 +0000] "GET / HTTP/1.1" 200 37932 "-" "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)" +110.136.166.128 - - [26/Aug/2014:21:16:11 +0000] "GET /projects/xdotool/ HTTP/1.1" 200 12292 "http://www.google.com/url?sa=t&rct=j&q=&esrc=s&source=web&cd=5&cad=rja&sqi=2&ved=0CFYQFjAE&url=http%3A%2F%2Fwww.semicomplete.com%2Fprojects%2Fxdotool%2F&ei=6cwAU_bRHo6urAeI0YD4Ag&usg=AFQjCNE3V_aCf3-gfNcbS924S6jZ6FqffA&bvm=bv.61535280,d.bmk" "Mozilla/5.0 (Windows NT 6.2; WOW64; rv:28.0) Gecko/20100101 Firefox/28.0" +46.105.14.53 - - [26/Aug/2014:21:16:17 +0000] "GET /blog/tags/puppet?flav=rss20 HTTP/1.1" 200 14872 "-" "UniversalFeedParser/4.2-pre-314-svn +http://feedparser.org/" +110.136.166.128 - - [26/Aug/2014:21:16:22 +0000] "GET /reset.css HTTP/1.1" 200 1015 "http://www.semicomplete.com/projects/xdotool/" "Mozilla/5.0 (Windows NT 6.2; WOW64; rv:28.0) Gecko/20100101 Firefox/28.0" +110.136.166.128 - - [26/Aug/2014:21:16:22 +0000] "GET /style2.css HTTP/1.1" 200 4877 "http://www.semicomplete.com/projects/xdotool/" "Mozilla/5.0 (Windows NT 6.2; WOW64; rv:28.0) Gecko/20100101 Firefox/28.0" +110.136.166.128 - - [26/Aug/2014:21:16:22 +0000] "GET /favicon.ico HTTP/1.1" 200 3638 "-" "Mozilla/5.0 (Windows NT 6.2; WOW64; rv:28.0) Gecko/20100101 Firefox/28.0" +110.136.166.128 - - [26/Aug/2014:21:16:22 +0000] "GET /images/jordan-80.png HTTP/1.1" 200 6146 "http://www.semicomplete.com/projects/xdotool/" "Mozilla/5.0 (Windows NT 6.2; WOW64; rv:28.0) Gecko/20100101 Firefox/28.0" +123.125.71.35 - - [26/Aug/2014:21:16:31 +0000] "GET /blog/tags/release HTTP/1.1" 200 40693 "-" "Mozilla/5.0 (compatible; Baiduspider/2.0; +http://www.baidu.com/search/spider.html)" +110.136.166.128 - - [26/Aug/2014:21:16:22 +0000] "GET /images/web/2009/banner.png HTTP/1.1" 200 52315 "http://www.semicomplete.com/style2.css" "Mozilla/5.0 (Windows NT 6.2; WOW64; rv:28.0) Gecko/20100101 Firefox/28.0" +50.150.204.184 - - [26/Aug/2014:21:17:06 +0000] "GET /images/googledotcom.png HTTP/1.1" 200 65748 "http://www.google.com/search?q=https//:google.com&source=lnms&tbm=isch&sa=X&ei=4-r8UvDrKZOgkQe7x4CICw&ved=0CAkQ_AUoAA&biw=320&bih=441" "Mozilla/5.0 (Linux; U; Android 4.0.4; en-us; LG-MS770 Build/IMM76I) AppleWebKit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30" +207.241.237.225 - - [26/Aug/2014:21:17:35 +0000] "GET /blog/tags/examples HTTP/1.0" 200 9208 "http://www.semicomplete.com/blog/tags/C" "Mozilla/5.0 (compatible; archive.org_bot +http://www.archive.org/details/archive.org_bot)" +200.49.190.101 - - [26/Aug/2014:21:17:39 +0000] "GET /reset.css HTTP/1.1" 200 1015 "-" "-" +200.49.190.100 - - [26/Aug/2014:21:17:37 +0000] "GET /blog/tags/web HTTP/1.1" 200 44019 "-" "QS304 Profile/MIDP-2.0 Configuration/CLDC-1.1" +200.49.190.101 - - [26/Aug/2014:21:17:41 +0000] "GET /style2.css HTTP/1.1" 200 4877 "-" "-" +200.49.190.101 - - [26/Aug/2014:21:17:48 +0000] "GET /images/jordan-80.png HTTP/1.1" 200 6146 "-" "QS304 Profile/MIDP-2.0 Configuration/CLDC-1.1" +66.249.73.185 - - [26/Aug/2014:21:18:48 +0000] "GET /reset.css HTTP/1.1" 200 1015 "-" "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)" +66.249.73.135 - - [26/Aug/2014:21:18:55 +0000] "GET /blog/tags/munin HTTP/1.1" 200 9746 "-" "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)" +66.249.73.135 - - [26/Aug/2014:21:19:16 +0000] "GET /blog/tags/firefox?flav=rss20 HTTP/1.1" 200 16021 "-" "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)" diff --git a/lib/bootstrap/bundler.rb b/lib/bootstrap/bundler.rb index 23944d347fe..2948fe8aa29 100644 --- a/lib/bootstrap/bundler.rb +++ b/lib/bootstrap/bundler.rb @@ -27,6 +27,16 @@ def set_key(key, value, hash, file) value end end + + # This patch makes rubygems fetch directly from the remote servers + # the dependencies he need and might not have downloaded in a local + # repository. This basically enabled the offline feature to work as + # we remove the gems from the vendor directory before packacing. + ::Bundler::Source::Rubygems.module_exec do + def cached_gem(spec) + cached_built_in_gem(spec) + end + end end def setup!(options = {}) @@ -56,11 +66,19 @@ def setup!(options = {}) # execute bundle install and capture any $stdout output. any raised exception in the process will be trapped # and returned. logs errors to $stdout. - # @param options [Hash] invoke options with default values, :max_tries => 10, :clean => false, :install => false, :update => false - # @param options[:update] must be either false or a String or an Array of String + # @param [Hash] options invoke options with default values, :max_tries => 10, :clean => false, :install => false, :update => false + # @option options [Boolean] :max_tries The number of times bundler is going to try the installation before failing (default: 10) + # @option options [Boolean] :clean It cleans the unused gems (default: false) + # @option options [Boolean] :install Run the installation of a set of gems defined in a Gemfile (default: false) + # @option options [Boolean, String, Array] :update Update the current environment, must be either false or a String or an Array of String (default: false) + # @option options [Boolean] :local Do not attempt to fetch gems remotely and use the gem cache instead (default: false) + # @option options [Boolean] :package Locks and then caches all dependencies to be reused later on (default: false) + # @option options [Boolean] :all It packages dependencies defined with :git or :path (default: false) + # @option options [Array] :without Exclude gems that are part of the specified named group (default: [:development]) # @return [String, Exception] the installation captured output and any raised exception or nil if none def invoke!(options = {}) - options = {:max_tries => 10, :clean => false, :install => false, :update => false, :without => [:development]}.merge(options) + options = {:max_tries => 10, :clean => false, :install => false, :update => false, :local => false, + :all => false, :package => false, :without => [:development]}.merge(options) options[:without] = Array(options[:without]) options[:update] = Array(options[:update]) if options[:update] @@ -80,14 +98,13 @@ def invoke!(options = {}) LogStash::Bundler.patch! # force Rubygems sources to our Gemfile sources - ::Gem.sources = options[:rubygems_source] if options[:rubygems_source] + ::Gem.sources = ::Gem::SourceList.from(options[:rubygems_source]) if options[:rubygems_source] ::Bundler.settings[:path] = LogStash::Environment::BUNDLE_DIR ::Bundler.settings[:gemfile] = LogStash::Environment::GEMFILE_PATH ::Bundler.settings[:without] = options[:without].join(":") try = 0 - # capture_stdout also traps any raised exception and pass them back as the function return [output, exception] output, exception = capture_stdout do loop do @@ -130,11 +147,19 @@ def bundler_arguments(options = {}) if options[:install] arguments << "install" arguments << "--clean" if options[:clean] + if options[:local] + arguments << "--local" + arguments << "--no-prune" # From bundler docs: Don't remove stale gems from the cache. + end elsif options[:update] arguments << "update" arguments << options[:update] + arguments << "--local" if options[:local] elsif options[:clean] arguments << "clean" + elsif options[:package] + arguments << "package" + arguments << "--all" if options[:all] end arguments.flatten diff --git a/lib/bootstrap/environment.rb b/lib/bootstrap/environment.rb index 9f3e59f5b08..50f2211160a 100644 --- a/lib/bootstrap/environment.rb +++ b/lib/bootstrap/environment.rb @@ -16,6 +16,7 @@ module Environment BUNDLE_DIR = ::File.join(LOGSTASH_HOME, "vendor", "bundle") GEMFILE_PATH = ::File.join(LOGSTASH_HOME, "Gemfile") LOCAL_GEM_PATH = ::File.join(LOGSTASH_HOME, 'vendor', 'local_gems') + CACHE_PATH = File.join(LOGSTASH_HOME, "vendor", "cache") # @return [String] the ruby version string bundler uses to craft its gem path def gem_ruby_version @@ -32,9 +33,26 @@ def ruby_engine RUBY_ENGINE end + def windows? + ::Gem.win_platform? + end + + def jruby? + @jruby ||= !!(RUBY_PLATFORM == "java") + end + def logstash_gem_home ::File.join(BUNDLE_DIR, ruby_engine, gem_ruby_version) end + + def vendor_path(path) + return ::File.join(LOGSTASH_HOME, "vendor", path) + end + + def pattern_path(path) + return ::File.join(LOGSTASH_HOME, "patterns", path) + end + end end diff --git a/lib/bootstrap/util/compress.rb b/lib/bootstrap/util/compress.rb new file mode 100644 index 00000000000..79bd38461b4 --- /dev/null +++ b/lib/bootstrap/util/compress.rb @@ -0,0 +1,122 @@ +# encoding: utf-8 +require "zip" +require "rubygems/package" +require "fileutils" +require "zlib" +require "stud/temporary" + +module LogStash + + class CompressError < StandardError; end + + module Util + module Zip + + extend self + + # Extract a zip file into a destination directory. + # @param source [String] The location of the file to extract + # @param target [String] Where you do want the file to be extracted + # @raise [IOError] If the target directory already exist + def extract(source, target) + raise CompressError.new("Directory #{target} exist") if ::File.exist?(target) + ::Zip::File.open(source) do |zip_file| + zip_file.each do |file| + path = ::File.join(target, file.name) + FileUtils.mkdir_p(::File.dirname(path)) + zip_file.extract(file, path) + end + end + end + + # Compress a directory into a zip file + # @param dir [String] The directory to be compressed + # @param target [String] Destination to save the generated file + # @raise [IOError] If the target file already exist + def compress(dir, target) + raise CompressError.new("File #{target} exist") if ::File.exist?(target) + ::Zip::File.open(target, ::Zip::File::CREATE) do |zipfile| + Dir.glob("#{dir}/**/*").each do |file| + path_in_zip = file.gsub("#{dir}/","") + zipfile.add(path_in_zip, file) + end + end + end + end + + module Tar + + extend self + + # Extract a tar.gz file into a destination directory. + # @param source [String] The location of the file to extract + # @param target [String] Where you do want the file to be extracted + # @raise [IOError] If the target directory already exist + def extract(file, target) + raise CompressError.new("Directory #{target} exist") if ::File.exist?(target) + + FileUtils.mkdir(target) + Zlib::GzipReader.open(file) do |gzip_file| + ::Gem::Package::TarReader.new(gzip_file) do |tar_file| + tar_file.each do |entry| + target_path = ::File.join(target, entry.full_name) + + if entry.directory? + FileUtils.mkdir_p(target_path) + else # is a file to be extracted + ::File.open(target_path, "wb") { |f| f.write(entry.read) } + end + end + end + end + end + + # Compress a directory into a tar.gz file + # @param dir [String] The directory to be compressed + # @param target [String] Destination to save the generated file + # @raise [IOError] If the target file already exist + def compress(dir, target) + raise CompressError.new("File #{target} exist") if ::File.exist?(target) + + Stud::Temporary.file do |tar_file| + ::Gem::Package::TarWriter.new(tar_file) do |tar| + Dir.glob("#{dir}/**/*").each do |file| + name = file.gsub("#{dir}/","") + stats = ::File.stat(file) + mode = stats.mode + + if ::File.directory?(file) + tar.mkdir(name, mode) + else # is a file to be added + tar.add_file(name,mode) do |out| + File.open(file, "rb") do |fd| + chunk = nil + size = 0 + size += out.write(chunk) while chunk = fd.read(16384) + if stats.size != size + raise "Failure to write the entire file (#{path}) to the tarball. Expected to write #{stats.size} bytes; actually write #{size}" + end + end + end + end + end + end + + tar_file.rewind + gzip(target, tar_file) + end + end + + # Compress a file using gzip + # @param path [String] The location to be compressed + # @param target_file [String] Destination of the generated file + def gzip(path, target_file) + ::File.open(path, "wb") do |file| + gzip_file = ::Zlib::GzipWriter.new(file) + gzip_file.write(target_file.read) + gzip_file.close + end + end + end + end +end diff --git a/lib/logstash-event.rb b/lib/logstash-event.rb deleted file mode 100644 index 0f44322944b..00000000000 --- a/lib/logstash-event.rb +++ /dev/null @@ -1,2 +0,0 @@ -# encoding: utf-8 -require "logstash/event" diff --git a/lib/logstash/patches/bundler.rb b/lib/logstash/patches/bundler.rb deleted file mode 100644 index 25d93a09148..00000000000 --- a/lib/logstash/patches/bundler.rb +++ /dev/null @@ -1,36 +0,0 @@ -# encoding: utf-8 -# Bundler monkey patches -module ::Bundler - # Patch bundler to write a .lock file specific to the version of ruby. - # This keeps MRI/JRuby/RBX from conflicting over the Gemfile.lock updates - module SharedHelpers - def default_lockfile - ruby = "#{LogStash::Environment.ruby_engine}-#{LogStash::Environment.ruby_abi_version}" - Pathname.new("#{default_gemfile}.#{ruby}.lock") - end - end - - # Patch to prevent Bundler to save a .bundle/config file in the root - # of the application - class Settings - def set_key(key, value, hash, file) - key = key_for(key) - - unless hash[key] == value - hash[key] = value - hash.delete(key) if value.nil? - end - - value - end - end - - # Add the Bundler.reset! method which has been added in master but is not in 1.7.9. - class << self - unless self.method_defined?("reset!") - def reset! - @definition = nil - end - end - end -end diff --git a/lib/logstash/pipeline.rb b/lib/logstash/pipeline.rb deleted file mode 100644 index b3081073704..00000000000 --- a/lib/logstash/pipeline.rb +++ /dev/null @@ -1,312 +0,0 @@ -# encoding: utf-8 -require "thread" -require "stud/interval" -require "concurrent" -require "logstash/namespace" -require "logstash/errors" -require "logstash/event" -require "logstash/config/file" -require "logstash/filters/base" -require "logstash/inputs/base" -require "logstash/outputs/base" -require "logstash/util/reporter" -require "logstash/config/cpu_core_strategy" -require "logstash/util/defaults_printer" - -class LogStash::Pipeline - attr_reader :inputs, :filters, :outputs, :input_to_filter, :filter_to_output - - def initialize(configstr) - @logger = Cabin::Channel.get(LogStash) - - @inputs = nil - @filters = nil - @outputs = nil - - grammar = LogStashConfigParser.new - @config = grammar.parse(configstr) - if @config.nil? - raise LogStash::ConfigurationError, grammar.failure_reason - end - # This will compile the config to ruby and evaluate the resulting code. - # The code will initialize all the plugins and define the - # filter and output methods. - code = @config.compile - # The config code is hard to represent as a log message... - # So just print it. - @logger.debug? && @logger.debug("Compiled pipeline code:\n#{code}") - begin - eval(code) - rescue => e - raise - end - - @input_to_filter = SizedQueue.new(20) - # if no filters, pipe inputs directly to outputs - @filter_to_output = filters? ? SizedQueue.new(20) : @input_to_filter - - @settings = { - "filter-workers" => LogStash::Config::CpuCoreStrategy.fifty_percent - } - - # @ready requires thread safety since it is typically polled from outside the pipeline thread - @ready = Concurrent::AtomicBoolean.new(false) - @input_threads = [] - end # def initialize - - def ready? - @ready.value - end - - def configure(setting, value) - if setting == "filter-workers" && value > 1 - # Abort if we have any filters that aren't threadsafe - plugins = @filters.select { |f| !f.threadsafe? }.collect { |f| f.class.config_name } - if !plugins.size.zero? - raise LogStash::ConfigurationError, "Cannot use more than 1 filter worker because the following plugins don't work with more than one worker: #{plugins.join(", ")}" - end - end - @settings[setting] = value - end - - def filters? - return @filters.any? - end - - def run - @logger.terminal(LogStash::Util::DefaultsPrinter.print(@settings)) - - begin - start_inputs - start_filters if filters? - start_outputs - ensure - # it is important to garantee @ready to be true after the startup sequence has been completed - # to potentially unblock the shutdown method which may be waiting on @ready to proceed - @ready.make_true - end - - @logger.info("Pipeline started") - @logger.terminal("Logstash startup completed") - - wait_inputs - - if filters? - shutdown_filters - wait_filters - flush_filters_to_output!(:final => true) - end - - shutdown_outputs - wait_outputs - - @logger.info("Pipeline shutdown complete.") - @logger.terminal("Logstash shutdown completed") - - # exit code - return 0 - end # def run - - def wait_inputs - @input_threads.each(&:join) - end - - def shutdown_filters - @flusher_thread.kill - @input_to_filter.push(LogStash::SHUTDOWN) - end - - def wait_filters - @filter_threads.each(&:join) if @filter_threads - end - - def shutdown_outputs - # nothing, filters will do this - @filter_to_output.push(LogStash::SHUTDOWN) - end - - def wait_outputs - # Wait for the outputs to stop - @output_threads.each(&:join) - end - - def start_inputs - moreinputs = [] - @inputs.each do |input| - if input.threadable && input.threads > 1 - (input.threads - 1).times do |i| - moreinputs << input.clone - end - end - end - @inputs += moreinputs - - @inputs.each do |input| - input.register - start_input(input) - end - end - - def start_filters - @filters.each(&:register) - to_start = @settings["filter-workers"] - @filter_threads = to_start.times.collect do - Thread.new { filterworker } - end - actually_started = @filter_threads.select(&:alive?).size - msg = "Worker threads expected: #{to_start}, worker threads started: #{actually_started}" - if actually_started < to_start - @logger.warn(msg) - else - @logger.info(msg) - end - @flusher_thread = Thread.new { Stud.interval(5) { @input_to_filter.push(LogStash::FLUSH) } } - end - - def start_outputs - @outputs.each(&:register) - @output_threads = [ - Thread.new { outputworker } - ] - end - - def start_input(plugin) - @input_threads << Thread.new { inputworker(plugin) } - end - - def inputworker(plugin) - LogStash::Util::set_thread_name("<#{plugin.class.config_name}") - begin - plugin.run(@input_to_filter) - rescue => e - # if plugin is stopping, ignore uncatched exceptions and exit worker - if plugin.stop? - @logger.debug("Input plugin raised exception during shutdown, ignoring it.", - :plugin => plugin.class.config_name, :exception => e, - :backtrace => e.backtrace) - return - end - - # otherwise, report error and restart - if @logger.debug? - @logger.error(I18n.t("logstash.pipeline.worker-error-debug", - :plugin => plugin.inspect, :error => e.to_s, - :exception => e.class, - :stacktrace => e.backtrace.join("\n"))) - else - @logger.error(I18n.t("logstash.pipeline.worker-error", - :plugin => plugin.inspect, :error => e)) - end - - # Assuming the failure that caused this exception is transient, - # let's sleep for a bit and execute #run again - sleep(1) - retry - ensure - plugin.do_close - end - end # def inputworker - - def filterworker - LogStash::Util.set_thread_name("|worker") - begin - while true - event = @input_to_filter.pop - - case event - when LogStash::Event - # filter_func returns all filtered events, including cancelled ones - filter_func(event).each { |e| @filter_to_output.push(e) unless e.cancelled? } - when LogStash::FlushEvent - # handle filter flushing here so that non threadsafe filters (thus only running one filterworker) - # don't have to deal with thread safety implementing the flush method - flush_filters_to_output! - when LogStash::ShutdownEvent - # pass it down to any other filterworker and stop this worker - @input_to_filter.push(event) - break - end - end - rescue Exception => e - # Plugins authors should manage their own exceptions in the plugin code - # but if an exception is raised up to the worker thread they are considered - # fatal and logstash will not recover from this situation. - # - # Users need to check their configuration or see if there is a bug in the - # plugin. - @logger.error("Exception in filterworker, the pipeline stopped processing new events, please check your filter configuration and restart Logstash.", - "exception" => e, "backtrace" => e.backtrace) - raise - ensure - @filters.each(&:do_close) - end - end # def filterworker - - def outputworker - LogStash::Util.set_thread_name(">output") - @outputs.each(&:worker_setup) - - while true - event = @filter_to_output.pop - break if event == LogStash::SHUTDOWN - output_func(event) - end - ensure - @outputs.each do |output| - output.worker_plugins.each(&:do_close) - end - end # def outputworker - - # initiate the pipeline shutdown sequence - # this method is intended to be called from outside the pipeline thread - # @param before_stop [Proc] code block called before performing stop operation on input plugins - def shutdown(&before_stop) - # shutdown can only start once the pipeline has completed its startup. - # avoid potential race conditoon between the startup sequence and this - # shutdown method which can be called from another thread at any time - sleep(0.1) while !ready? - - # TODO: should we also check against calling shutdown multiple times concurently? - - before_stop.call if block_given? - - @inputs.each(&:do_stop) - end # def shutdown - - def plugin(plugin_type, name, *args) - args << {} if args.empty? - klass = LogStash::Plugin.lookup(plugin_type, name) - return klass.new(*args) - end - - # for backward compatibility in devutils for the rspec helpers, this method is not used - # in the pipeline anymore. - def filter(event, &block) - # filter_func returns all filtered events, including cancelled ones - filter_func(event).each { |e| block.call(e) } - end - - # perform filters flush and yeild flushed event to the passed block - # @param options [Hash] - # @option options [Boolean] :final => true to signal a final shutdown flush - def flush_filters(options = {}, &block) - flushers = options[:final] ? @shutdown_flushers : @periodic_flushers - - flushers.each do |flusher| - flusher.call(options, &block) - end - end - - # perform filters flush into the output queue - # @param options [Hash] - # @option options [Boolean] :final => true to signal a final shutdown flush - def flush_filters_to_output!(options = {}) - flush_filters(options) do |event| - unless event.cancelled? - @logger.debug? and @logger.debug("Pushing flushed events", :event => event) - @filter_to_output.push(event) - end - end - end # flush_filters_to_output! - -end # class Pipeline diff --git a/lib/logstash/util/reporter.rb b/lib/logstash/util/reporter.rb deleted file mode 100644 index 4d983a25e3e..00000000000 --- a/lib/logstash/util/reporter.rb +++ /dev/null @@ -1,28 +0,0 @@ -# encoding: utf-8 -class InflightEventsReporter - def self.logger=(logger) - @logger = logger - end - - def self.start(input_to_filter, filter_to_output, outputs) - Thread.new do - loop do - sleep 5 - report(input_to_filter, filter_to_output, outputs) - end - end - end - - def self.report(input_to_filter, filter_to_output, outputs) - report = { - "input_to_filter" => input_to_filter.size, - "filter_to_output" => filter_to_output.size, - "outputs" => [] - } - outputs.each do |output| - next unless output.worker_queue && output.worker_queue.size > 0 - report["outputs"] << [output.inspect, output.worker_queue.size] - end - @logger.warn ["INFLIGHT_EVENTS_REPORT", Time.now.iso8601, report] - end -end diff --git a/lib/logstash/util/worker_threads_default_printer.rb b/lib/logstash/util/worker_threads_default_printer.rb deleted file mode 100644 index c8b086635cb..00000000000 --- a/lib/logstash/util/worker_threads_default_printer.rb +++ /dev/null @@ -1,17 +0,0 @@ -# encoding: utf-8 -require "logstash/namespace" -require "logstash/util" - -# This class exists to format the settings for default worker threads -module LogStash module Util class WorkerThreadsDefaultPrinter - - def initialize(settings) - @setting = settings.fetch('filter-workers', 1) - end - - def visit(collector) - collector.push "Filter workers: #{@setting}" - end - -end end end - diff --git a/lib/pluginmanager/install.rb b/lib/pluginmanager/install.rb index bbc486ab236..27c865ca68e 100644 --- a/lib/pluginmanager/install.rb +++ b/lib/pluginmanager/install.rb @@ -10,6 +10,7 @@ class LogStash::PluginManager::Install < LogStash::PluginManager::Command option "--version", "VERSION", "version of the plugin to install" option "--[no-]verify", :flag, "verify plugin validity before installation", :default => true option "--development", :flag, "install all development dependencies of currently installed plugins", :default => false + option "--local", :flag, "force local-only plugin installation. see bin/plugin package|unpack", :default => false # the install logic below support installing multiple plugins with each a version specification # but the argument parsing does not support it for now so currently if specifying --version only @@ -23,7 +24,7 @@ def execute gems = plugins_development_gems else gems = plugins_gems - verify_remote!(gems) if verify? + verify_remote!(gems) if !local? && verify? end install_gems_list!(gems) @@ -45,12 +46,20 @@ def validate_cli_options! # Check if the specified gems contains # the logstash `metadata` def verify_remote!(gems) + options = { :rubygems_source => gemfile.gemset.sources } gems.each do |plugin, version| puts("Validating #{[plugin, version].compact.join("-")}") - signal_error("Installation aborted, verification failed for #{plugin} #{version}") unless LogStash::PluginManager.logstash_plugin?(plugin, version) + next if validate_plugin(plugin, version, options) + signal_error("Installation aborted, verification failed for #{plugin} #{version}") end end + def validate_plugin(plugin, version, options) + LogStash::PluginManager.logstash_plugin?(plugin, version, options) + rescue SocketError + false + end + def plugins_development_gems # Get currently defined gems and their dev dependencies specs = [] @@ -89,6 +98,7 @@ def install_gems_list!(install_list) bundler_options = {:install => true} bundler_options[:without] = [] if development? bundler_options[:rubygems_source] = gemfile.gemset.sources + bundler_options[:local] = true if local? output = LogStash::Bundler.invoke!(bundler_options) diff --git a/lib/pluginmanager/main.rb b/lib/pluginmanager/main.rb index a2004e4d6be..e14a131c84e 100644 --- a/lib/pluginmanager/main.rb +++ b/lib/pluginmanager/main.rb @@ -18,6 +18,8 @@ module PluginManager require "pluginmanager/uninstall" require "pluginmanager/list" require "pluginmanager/update" +require "pluginmanager/pack" +require "pluginmanager/unpack" module LogStash module PluginManager @@ -27,6 +29,8 @@ class Main < Clamp::Command subcommand "install", "Install a plugin", LogStash::PluginManager::Install subcommand "uninstall", "Uninstall a plugin", LogStash::PluginManager::Uninstall subcommand "update", "Update a plugin", LogStash::PluginManager::Update + subcommand "pack", "Package currently installed plugins", LogStash::PluginManager::Pack + subcommand "unpack", "Unpack packaged plugins", LogStash::PluginManager::Unpack subcommand "list", "List all installed plugins", LogStash::PluginManager::List end end diff --git a/lib/pluginmanager/pack.rb b/lib/pluginmanager/pack.rb new file mode 100644 index 00000000000..18b46e18511 --- /dev/null +++ b/lib/pluginmanager/pack.rb @@ -0,0 +1,43 @@ +# encoding: utf-8 +require_relative "pack_command" + +class LogStash::PluginManager::Pack < LogStash::PluginManager::PackCommand + option "--tgz", :flag, "compress package as a tar.gz file", :default => !LogStash::Environment.windows? + option "--zip", :flag, "compress package as a zip file", :default => LogStash::Environment.windows? + option "--[no-]clean", :flag, "clean up the generated dump of plugins", :default => true + option "--overwrite", :flag, "Overwrite a previously generated package file", :default => false + + def execute + puts("Packaging plugins for offline usage") + + validate_target_file + LogStash::Bundler.invoke!({:package => true, :all => true}) + archive_manager.compress(LogStash::Environment::CACHE_PATH, target_file) + FileUtils.rm_rf(LogStash::Environment::CACHE_PATH) if clean? + + puts("Generated at #{target_file}") + end + + private + + def delete_target_file? + return true if overwrite? + puts("File #{target_file} exist, do you want to overwrite it? (Y/N)") + ( "y" == STDIN.gets.strip.downcase ? true : false) + end + + def validate_target_file + if File.exist?(target_file) + if delete_target_file? + File.delete(target_file) + else + signal_error("Package creation cancelled, a previously generated package exist at location: #{target_file}, move this file to safe place and run the command again") + end + end + end + + def target_file + target_file = File.join(LogStash::Environment::LOGSTASH_HOME, "plugins_package") + "#{target_file}#{file_extension}" + end +end diff --git a/lib/pluginmanager/pack_command.rb b/lib/pluginmanager/pack_command.rb new file mode 100644 index 00000000000..2409b212f97 --- /dev/null +++ b/lib/pluginmanager/pack_command.rb @@ -0,0 +1,13 @@ +# encoding: utf-8 +require "bootstrap/util/compress" +require "fileutils" + +class LogStash::PluginManager::PackCommand < LogStash::PluginManager::Command + def archive_manager + zip? ? LogStash::Util::Zip : LogStash::Util::Tar + end + + def file_extension + zip? ? ".zip" : ".tar.gz" + end +end diff --git a/lib/pluginmanager/unpack.rb b/lib/pluginmanager/unpack.rb new file mode 100644 index 00000000000..4e7da6fb94e --- /dev/null +++ b/lib/pluginmanager/unpack.rb @@ -0,0 +1,35 @@ +# encoding: utf-8 +require_relative "pack_command" + +class LogStash::PluginManager::Unpack < LogStash::PluginManager::PackCommand + option "--tgz", :flag, "unpack a packaged tar.gz file", :default => !LogStash::Environment.windows? + option "--zip", :flag, "unpack a packaged zip file", :default => LogStash::Environment.windows? + + parameter "file", "the package file name", :attribute_name => :package_file, :required => true + + def execute + puts("Unpacking #{package_file}") + + FileUtils.rm_rf(LogStash::Environment::CACHE_PATH) + validate_cache_location + archive_manager.extract(package_file, LogStash::Environment::CACHE_PATH) + puts("Unpacked at #{LogStash::Environment::CACHE_PATH}") + puts("The unpacked plugins can now be installed in local-only mode using bin/plugin install --local [plugin name]") + end + + private + + def validate_cache_location + cache_location = LogStash::Environment::CACHE_PATH + if File.exist?(cache_location) + puts("Directory #{cache_location} is going to be overwritten, do you want to continue? (Y/N)") + override = ( "y" == STDIN.gets.strip.downcase ? true : false) + if override + FileUtils.rm_rf(cache_location) + else + puts("Unpack cancelled: file #{cache_location} already exists, please delete or move it") + exit + end + end + end +end diff --git a/lib/pluginmanager/update.rb b/lib/pluginmanager/update.rb index 3fc8b6b12a7..64c9767ea57 100644 --- a/lib/pluginmanager/update.rb +++ b/lib/pluginmanager/update.rb @@ -8,6 +8,8 @@ class LogStash::PluginManager::Update < LogStash::PluginManager::Command REJECTED_OPTIONS = [:path, :git, :github] parameter "[PLUGIN] ...", "Plugin name(s) to upgrade to latest version", :attribute_name => :plugins_arg + option "--[no-]verify", :flag, "verify plugin validity before installation", :default => true + option "--local", :flag, "force local-only plugin update. see bin/plugin package|unpack", :default => false def execute local_gems = gemfile.locally_installed_gems @@ -21,7 +23,6 @@ def execute warn_local_gems(plugins_with_path) end - update_gems! end @@ -41,10 +42,12 @@ def update_gems! # remove any version constrain from the Gemfile so the plugin(s) can be updated to latest version # calling update without requiremend will remove any previous requirements plugins = plugins_to_update(previous_gem_specs_map) + # Skipping the major version validation when using a local cache as we can have situations + # without internet connection. filtered_plugins = plugins.map { |plugin| gemfile.find(plugin) } .compact .reject { |plugin| REJECTED_OPTIONS.any? { |key| plugin.options.has_key?(key) } } - .select { |plugin| validates_version(plugin.name) } + .select { |plugin| local? || (verify? ? validates_version(plugin.name) : true) } .each { |plugin| gemfile.update(plugin.name) } # force a disk sync before running bundler @@ -54,9 +57,10 @@ def update_gems! # any errors will be logged to $stderr by invoke! # Bundler cannot update and clean gems in one operation so we have to call the CLI twice. - output = LogStash::Bundler.invoke!(:update => plugins) + options = {:update => plugins, :rubygems_source => gemfile.gemset.sources} + options[:local] = true if local? + output = LogStash::Bundler.invoke!(options) output = LogStash::Bundler.invoke!(:clean => true) - display_updated_plugins(previous_gem_specs_map) rescue => exception gemfile.restore! diff --git a/lib/pluginmanager/util.rb b/lib/pluginmanager/util.rb index 78bb7d38926..149ff6256d4 100644 --- a/lib/pluginmanager/util.rb +++ b/lib/pluginmanager/util.rb @@ -2,12 +2,18 @@ require "rubygems/package" module LogStash::PluginManager + + class ValidationError < StandardError; end + # check for valid logstash plugin gem name & version or .gem file, logs errors to $stdout # uses Rubygems API and will remotely validated agains the current Gem.sources # @param plugin [String] plugin name or .gem file path # @param version [String] gem version requirement string + # @param [Hash] options the options used to setup external components + # @option options [Array] :rubygems_source Gem sources to lookup for the verification # @return [Boolean] true if valid logstash plugin gem name & version or a .gem file - def self.logstash_plugin?(plugin, version = nil) + def self.logstash_plugin?(plugin, version = nil, options={}) + if plugin_file?(plugin) begin return logstash_plugin_gem_spec?(plugin_file_spec(plugin)) @@ -18,6 +24,7 @@ def self.logstash_plugin?(plugin, version = nil) end else dep = Gem::Dependency.new(plugin, version || Gem::Requirement.default) + Gem.sources = Gem::SourceList.from(options[:rubygems_source]) if options[:rubygems_source] specs, errors = Gem::SpecFetcher.fetcher.spec_for_dependency(dep) # dump errors @@ -46,6 +53,7 @@ def self.fetch_latest_version_info(plugin, options={}) require "gems" exclude_prereleases = options.fetch(:pre, false) versions = Gems.versions(plugin) + raise ValidationError.new("Something went wrong with the validation. You can skip the validation with the --no-verify option") if !versions.is_a?(Array) || versions.empty? versions = versions.select { |version| !version["prerelease"] } if !exclude_prereleases versions.first end diff --git a/logstash-core-event-java/.gitignore b/logstash-core-event-java/.gitignore new file mode 100644 index 00000000000..a453cb95034 --- /dev/null +++ b/logstash-core-event-java/.gitignore @@ -0,0 +1,9 @@ +*.class + +# build dirs +build +.gradle + +# Intellij +.idea +*.iml diff --git a/logstash-core-event-java/README.md b/logstash-core-event-java/README.md new file mode 100644 index 00000000000..7b12d19f135 --- /dev/null +++ b/logstash-core-event-java/README.md @@ -0,0 +1,63 @@ +# logstash-core-event-java + +## dev install + +1- build code with + +``` +$ cd logstash-core-event-java +$ gradle build +``` + +A bunch of warning are expected, it should end with: + +``` +BUILD SUCCESSFUL +``` + +2- update root logstash `Gemfile` to use this gem with: + +``` +# gem "logstash-core-event", "x.y.z", :path => "./logstash-core-event" +gem "logstash-core-event-java", "x.y.z", :path => "./logstash-core-event-java" +``` + +3- update `logstash-core/logstash-core.gemspec` with: + +``` +# gem.add_runtime_dependency "logstash-core-event", "x.y.z" +gem.add_runtime_dependency "logstash-core-event-java", "x.y.z" +``` + +4- and install: + +``` +$ bin/bundle +``` + +- install core plugins for tests + +``` +$ rake test:install-core +``` + +## specs + +``` +$ bin/rspec spec +$ bin/rspec logstash-core/spec +$ bin/rspec logstash-core-event/spec +$ bin/rspec logstash-core-event-java/spec +``` + +or + +``` +$ rake test:core +``` + +also + +``` +$ rake test:plugins +``` \ No newline at end of file diff --git a/logstash-core-event-java/build.gradle b/logstash-core-event-java/build.gradle new file mode 100644 index 00000000000..6e4c299a79d --- /dev/null +++ b/logstash-core-event-java/build.gradle @@ -0,0 +1,104 @@ +buildscript { + repositories { + mavenLocal() + mavenCentral() + jcenter() + } + dependencies { + classpath 'net.saliman:gradle-cobertura-plugin:2.2.8' + classpath 'com.github.jengelman.gradle.plugins:shadow:1.2.2' + } +} + +//allprojects { + + repositories { + mavenLocal() + mavenCentral() + } + + gradle.projectsEvaluated { + tasks.withType(JavaCompile) { + options.compilerArgs << "-Xlint:unchecked" << "-Xlint:deprecation" + } + } + +//} + +//subprojects { project -> + + apply plugin: 'java' + apply plugin: 'idea' + apply plugin: 'com.github.johnrengelman.shadow' + + group = 'org.logstash' + + project.sourceCompatibility = 1.7 + + task sourcesJar(type: Jar, dependsOn:classes) { + from sourceSets.main.allSource + classifier 'sources' + extension 'jar' + } + + task javadocJar(type: Jar, dependsOn:javadoc) { + from javadoc.destinationDir + classifier 'javadoc' + extension 'jar' + } + + configurations.create('sources') + configurations.create('javadoc') + configurations.archives { + extendsFrom configurations.sources + extendsFrom configurations.javadoc + } + + artifacts { + sources(sourcesJar) { + // Weird Gradle quirk where type will be used for the extension, but only for sources + type 'jar' + } + + javadoc(javadocJar) { + type 'javadoc' + } + } + + configurations { + provided + } + + project.sourceSets { + main.compileClasspath += project.configurations.provided + main.runtimeClasspath += project.configurations.provided + test.compileClasspath += project.configurations.provided + test.runtimeClasspath += project.configurations.provided + } + project.javadoc.classpath += project.configurations.provided + + idea { + module { + scopes.PROVIDED.plus += [ project.configurations.provided ] + } + } + + dependencies { + compile 'org.codehaus.jackson:jackson-mapper-asl:1.9.13' + compile 'org.codehaus.jackson:jackson-core-asl:1.9.13' + compile 'joda-time:joda-time:2.8.2' + compile 'com.google.guava:guava:18.0' + compile 'org.slf4j:slf4j-api:1.7.12' + provided 'org.jruby:jruby-core:1.7.22' + testCompile 'org.testng:testng:6.9.6' + testCompile 'org.mockito:mockito-core:1.10.19' + } + +//} + +// See http://www.gradle.org/docs/current/userguide/gradle_wrapper.html +task wrapper(type: Wrapper) { + description = 'Install Gradle wrapper' + gradleVersion = '2.7' +} + diff --git a/logstash-core-event-java/gradle.properties b/logstash-core-event-java/gradle.properties new file mode 100644 index 00000000000..b5cdaba6a69 --- /dev/null +++ b/logstash-core-event-java/gradle.properties @@ -0,0 +1 @@ +VERSION=0.0.1-SNAPSHOT diff --git a/logstash-core-event-java/gradle/wrapper/gradle-wrapper.jar b/logstash-core-event-java/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 00000000000..085a1cdc27d Binary files /dev/null and b/logstash-core-event-java/gradle/wrapper/gradle-wrapper.jar differ diff --git a/logstash-core-event-java/gradle/wrapper/gradle-wrapper.properties b/logstash-core-event-java/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000000..bd7deb6d61c --- /dev/null +++ b/logstash-core-event-java/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Tue Mar 17 11:58:32 PDT 2015 +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-2.3-bin.zip diff --git a/logstash-core-event-java/gradlew b/logstash-core-event-java/gradlew new file mode 100755 index 00000000000..91a7e269e19 --- /dev/null +++ b/logstash-core-event-java/gradlew @@ -0,0 +1,164 @@ +#!/usr/bin/env bash + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS="" + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn ( ) { + echo "$*" +} + +die ( ) { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; +esac + +# For Cygwin, ensure paths are in UNIX format before anything is touched. +if $cygwin ; then + [ -n "$JAVA_HOME" ] && JAVA_HOME=`cygpath --unix "$JAVA_HOME"` +fi + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >&- +APP_HOME="`pwd -P`" +cd "$SAVED" >&- + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin, switch paths to Windows format before running java +if $cygwin ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=$((i+1)) + done + case $i in + (0) set -- ;; + (1) set -- "$args0" ;; + (2) set -- "$args0" "$args1" ;; + (3) set -- "$args0" "$args1" "$args2" ;; + (4) set -- "$args0" "$args1" "$args2" "$args3" ;; + (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules +function splitJvmOpts() { + JVM_OPTS=("$@") +} +eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS +JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME" + +exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@" diff --git a/logstash-core-event-java/gradlew.bat b/logstash-core-event-java/gradlew.bat new file mode 100644 index 00000000000..aec99730b4e --- /dev/null +++ b/logstash-core-event-java/gradlew.bat @@ -0,0 +1,90 @@ +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS= + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windowz variants + +if not "%OS%" == "Windows_NT" goto win9xME_args +if "%@eval[2+2]" == "4" goto 4NT_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* +goto execute + +:4NT_args +@rem Get arguments from the 4NT Shell from JP Software +set CMD_LINE_ARGS=%$ + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/logstash-core-event-java/lib/logstash-core-event-java.rb b/logstash-core-event-java/lib/logstash-core-event-java.rb new file mode 100644 index 00000000000..29b487aa192 --- /dev/null +++ b/logstash-core-event-java/lib/logstash-core-event-java.rb @@ -0,0 +1 @@ +require "logstash-core-event-java/logstash-core-event-java" \ No newline at end of file diff --git a/logstash-core-event-java/lib/logstash-core-event-java/logstash-core-event-java.rb b/logstash-core-event-java/lib/logstash-core-event-java/logstash-core-event-java.rb new file mode 100644 index 00000000000..7bf90a9bc0b --- /dev/null +++ b/logstash-core-event-java/lib/logstash-core-event-java/logstash-core-event-java.rb @@ -0,0 +1,31 @@ +# encoding: utf-8 + +require "java" + +module LogStash +end + +# TODO: (colin) integrate jar loading with gradle and verify dev vs prod environment setups + +# insert all jars in this directory into CLASSPATH +Dir.glob(File.join(File.expand_path("..", __FILE__), "*.jar")).each do |jar| + $CLASSPATH << jar unless $CLASSPATH.include?(jar) +end + +# TODO: (colin) correctly handle dev env build/ dir and local jar + +# local dev setup +classes_dir = File.expand_path("../../../build/classes/main", __FILE__) + +if File.directory?(classes_dir) + # if in local dev setup, add target to classpath + $CLASSPATH << classes_dir unless $CLASSPATH.include?(classes_dir) +else + # otherwise use included jar + raise("TODO build dir not found and no jar file") +end + +require "jruby_event_ext" +require "jruby_timestamp_ext" +require "logstash/event" +require "logstash/timestamp" \ No newline at end of file diff --git a/logstash-core-event-java/lib/logstash-core-event-java/version.rb b/logstash-core-event-java/lib/logstash-core-event-java/version.rb new file mode 100644 index 00000000000..6c297b7c2fd --- /dev/null +++ b/logstash-core-event-java/lib/logstash-core-event-java/version.rb @@ -0,0 +1,8 @@ +# encoding: utf-8 + +# The version of logstash core event java gem. +# +# Note to authors: this should not include dashes because 'gem' barfs if +# you include a dash in the version string. + +LOGSTASH_CORE_EVENT_JAVA_VERSION = "3.0.0.dev" diff --git a/logstash-core-event-java/lib/logstash-core-event.rb b/logstash-core-event-java/lib/logstash-core-event.rb new file mode 100644 index 00000000000..29b487aa192 --- /dev/null +++ b/logstash-core-event-java/lib/logstash-core-event.rb @@ -0,0 +1 @@ +require "logstash-core-event-java/logstash-core-event-java" \ No newline at end of file diff --git a/logstash-core-event-java/lib/logstash/event.rb b/logstash-core-event-java/lib/logstash/event.rb new file mode 100644 index 00000000000..a2e6950885d --- /dev/null +++ b/logstash-core-event-java/lib/logstash/event.rb @@ -0,0 +1,24 @@ +# encoding: utf-8 + +require "logstash/namespace" +require "logstash/json" + +# transcient pipeline events for normal in-flow signaling as opposed to +# flow altering exceptions. for now having base classes is adequate and +# in the future it might be necessary to refactor using like a BaseEvent +# class to have a common interface for all pileline events to support +# eventual queueing persistence for example, TBD. +class LogStash::ShutdownEvent; end +class LogStash::FlushEvent; end + +module LogStash + FLUSH = LogStash::FlushEvent.new + + # LogStash::SHUTDOWN is used by plugins + SHUTDOWN = LogStash::ShutdownEvent.new +end + +# for backward compatibility, require "logstash/event" is used a lots of places so let's bootstrap the +# Java code loading from here. +# TODO: (colin) I think we should mass replace require "logstash/event" with require "logstash-core-event" +require "logstash-core-event" \ No newline at end of file diff --git a/logstash-core-event-java/lib/logstash/timestamp.rb b/logstash-core-event-java/lib/logstash/timestamp.rb new file mode 100644 index 00000000000..0a4661a2d19 --- /dev/null +++ b/logstash-core-event-java/lib/logstash/timestamp.rb @@ -0,0 +1,28 @@ +# encoding: utf-8 + +require "logstash/namespace" +require "logstash-core-event" + +module LogStash + class TimestampParserError < StandardError; end + + class Timestamp + include Comparable + + # TODO (colin) implement in Java + def <=>(other) + self.time <=> other.time + end + + # TODO (colin) implement in Java + def +(other) + self.time + other + end + + # TODO (colin) implement in Java + def -(value) + self.time - (value.is_a?(Timestamp) ? value.time : value) + end + + end +end diff --git a/logstash-core-event-java/logstash-core-event-java.gemspec b/logstash-core-event-java/logstash-core-event-java.gemspec new file mode 100644 index 00000000000..77667f66e40 --- /dev/null +++ b/logstash-core-event-java/logstash-core-event-java.gemspec @@ -0,0 +1,23 @@ +# -*- encoding: utf-8 -*- +lib = File.expand_path('../lib', __FILE__) +$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) +require 'logstash-core-event-java/version' + +Gem::Specification.new do |gem| + gem.authors = ["Jordan Sissel", "Pete Fritchman", "Elasticsearch"] + gem.email = ["jls@semicomplete.com", "petef@databits.net", "info@elasticsearch.com"] + gem.description = %q{The core event component of logstash, the scalable log and event management tool} + gem.summary = %q{logstash-core-event-java - The core event component of logstash} + gem.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html" + gem.license = "Apache License (2.0)" + + gem.files = Dir.glob(["logstash-core-event-java.gemspec", "lib/**/*.rb", "spec/**/*.rb"]) + gem.test_files = gem.files.grep(%r{^(test|spec|features)/}) + gem.name = "logstash-core-event-java" + gem.require_paths = ["lib"] + gem.version = LOGSTASH_CORE_EVENT_JAVA_VERSION + + if RUBY_PLATFORM == 'java' + gem.platform = RUBY_PLATFORM + end +end diff --git a/logstash-core-event-java/settings.gradle b/logstash-core-event-java/settings.gradle new file mode 100644 index 00000000000..3885bfa1686 --- /dev/null +++ b/logstash-core-event-java/settings.gradle @@ -0,0 +1,2 @@ +rootProject.name = 'logstash-core-event-java' + diff --git a/logstash-core-event-java/spec/event_spec.rb b/logstash-core-event-java/spec/event_spec.rb new file mode 100644 index 00000000000..a4866ad21db --- /dev/null +++ b/logstash-core-event-java/spec/event_spec.rb @@ -0,0 +1,138 @@ +# encoding: utf-8 + +require "spec_helper" +require "logstash/util" +require "logstash/event" +require "json" + +TIMESTAMP = "@timestamp" + +describe LogStash::Event do + context "to_json" do + it "should serialize simple values" do + e = LogStash::Event.new({"foo" => "bar", "bar" => 1, "baz" => 1.0, TIMESTAMP => "2015-05-28T23:02:05.350Z"}) + expect(JSON.parse(e.to_json)).to eq(JSON.parse("{\"foo\":\"bar\",\"bar\":1,\"baz\":1.0,\"@timestamp\":\"2015-05-28T23:02:05.350Z\",\"@version\":\"1\"}")) + end + + it "should serialize deep hash values" do + e = LogStash::Event.new({"foo" => {"bar" => 1, "baz" => 1.0, "biz" => "boz"}, TIMESTAMP => "2015-05-28T23:02:05.350Z"}) + expect(JSON.parse(e.to_json)).to eq(JSON.parse("{\"foo\":{\"bar\":1,\"baz\":1.0,\"biz\":\"boz\"},\"@timestamp\":\"2015-05-28T23:02:05.350Z\",\"@version\":\"1\"}")) + end + + it "should serialize deep array values" do + e = LogStash::Event.new({"foo" => ["bar", 1, 1.0], TIMESTAMP => "2015-05-28T23:02:05.350Z"}) + expect(JSON.parse(e.to_json)).to eq(JSON.parse("{\"foo\":[\"bar\",1,1.0],\"@timestamp\":\"2015-05-28T23:02:05.350Z\",\"@version\":\"1\"}")) + end + + it "should serialize deep hash from field reference assignments" do + e = LogStash::Event.new({TIMESTAMP => "2015-05-28T23:02:05.350Z"}) + e["foo"] = "bar" + e["bar"] = 1 + e["baz"] = 1.0 + e["[fancy][pants][socks]"] = "shoes" + expect(JSON.parse(e.to_json)).to eq(JSON.parse("{\"@timestamp\":\"2015-05-28T23:02:05.350Z\",\"@version\":\"1\",\"foo\":\"bar\",\"bar\":1,\"baz\":1.0,\"fancy\":{\"pants\":{\"socks\":\"shoes\"}}}")) + end + end + + context "[]" do + it "should get simple values" do + e = LogStash::Event.new({"foo" => "bar", "bar" => 1, "baz" => 1.0, TIMESTAMP => "2015-05-28T23:02:05.350Z"}) + expect(e["foo"]).to eq("bar") + expect(e["[foo]"]).to eq("bar") + expect(e["bar"]).to eq(1) + expect(e["[bar]"]).to eq(1) + expect(e["baz"]).to eq(1.0) + expect(e["[baz]"]).to eq(1.0) + expect(e[TIMESTAMP].to_s).to eq("2015-05-28T23:02:05.350Z") + expect(e["[#{TIMESTAMP}]"].to_s).to eq("2015-05-28T23:02:05.350Z") + end + + it "should get deep hash values" do + e = LogStash::Event.new({"foo" => {"bar" => 1, "baz" => 1.0}}) + expect(e["[foo][bar]"]).to eq(1) + expect(e["[foo][baz]"]).to eq(1.0) + end + + it "should get deep array values" do + e = LogStash::Event.new({"foo" => ["bar", 1, 1.0]}) + expect(e["[foo][0]"]).to eq("bar") + expect(e["[foo][1]"]).to eq(1) + expect(e["[foo][2]"]).to eq(1.0) + expect(e["[foo][3]"]).to be_nil + end + end + + context "[]=" do + it "should set simple values" do + e = LogStash::Event.new() + expect(e["foo"] = "bar").to eq("bar") + expect(e["foo"]).to eq("bar") + + e = LogStash::Event.new({"foo" => "test"}) + expect(e["foo"] = "bar").to eq("bar") + expect(e["foo"]).to eq("bar") + end + + it "should set deep hash values" do + e = LogStash::Event.new() + expect(e["[foo][bar]"] = "baz").to eq("baz") + expect(e["[foo][bar]"]).to eq("baz") + expect(e["[foo][baz]"]).to be_nil + end + + it "should set deep array values" do + e = LogStash::Event.new() + expect(e["[foo][0]"] = "bar").to eq("bar") + expect(e["[foo][0]"]).to eq("bar") + expect(e["[foo][1]"] = 1).to eq(1) + expect(e["[foo][1]"]).to eq(1) + expect(e["[foo][2]"] = 1.0 ).to eq(1.0) + expect(e["[foo][2]"]).to eq(1.0) + expect(e["[foo][3]"]).to be_nil + end + end + + context "timestamp" do + it "getters should present a Ruby LogStash::Timestamp" do + e = LogStash::Event.new() + expect(e.timestamp.class).to eq(LogStash::Timestamp) + expect(e[TIMESTAMP].class).to eq(LogStash::Timestamp) + end + + it "to_hash should inject a Ruby LogStash::Timestamp" do + e = LogStash::Event.new() + + expect(e.to_java).to be_kind_of(Java::ComLogstash::Event) + expect(e.to_java.get_field(TIMESTAMP)).to be_kind_of(Java::ComLogstash::Timestamp) + + expect(e.to_hash[TIMESTAMP]).to be_kind_of(LogStash::Timestamp) + # now make sure the original map was not touched + expect(e.to_java.get_field(TIMESTAMP)).to be_kind_of(Java::ComLogstash::Timestamp) + end + + it "should set timestamp" do + e = LogStash::Event.new + now = Time.now + e["@timestamp"] = LogStash::Timestamp.at(now.to_i) + expect(e.timestamp.to_i).to eq(now.to_i) + expect(e["@timestamp"].to_i).to eq(now.to_i) + end + end + + context "append" do + it "should append" do + event = LogStash::Event.new("message" => "hello world") + event.append(LogStash::Event.new("message" => "another thing")) + expect(event["message"]).to eq(["hello world", "another thing"]) + end + end + + context "tags" do + it "should tag" do + event = LogStash::Event.new("message" => "hello world") + expect(event["tags"]).to be_nil + event["tags"] = ["foo"] + expect(event["tags"]).to eq(["foo"]) + end + end +end diff --git a/logstash-core-event-java/spec/timestamp_spec.rb b/logstash-core-event-java/spec/timestamp_spec.rb new file mode 100644 index 00000000000..1c092696389 --- /dev/null +++ b/logstash-core-event-java/spec/timestamp_spec.rb @@ -0,0 +1,29 @@ +# encoding: utf-8 + +require "spec_helper" +require "logstash/timestamp" + +describe LogStash::Timestamp do + context "constructors" do + it "should work" do + t = LogStash::Timestamp.new + expect(t.time.to_i).to be_within(1).of Time.now.to_i + + t = LogStash::Timestamp.now + expect(t.time.to_i).to be_within(1).of Time.now.to_i + + now = Time.now.utc + t = LogStash::Timestamp.new(now) + expect(t.time).to eq(now) + + t = LogStash::Timestamp.at(now.to_i) + expect(t.time.to_i).to eq(now.to_i) + end + + it "should raise exception on invalid format" do + expect{LogStash::Timestamp.new("foobar")}.to raise_error + end + + end + +end diff --git a/logstash-core-event-java/src/main/java/JrubyEventExtService.java b/logstash-core-event-java/src/main/java/JrubyEventExtService.java new file mode 100644 index 00000000000..306a45f3971 --- /dev/null +++ b/logstash-core-event-java/src/main/java/JrubyEventExtService.java @@ -0,0 +1,14 @@ +import com.logstash.ext.JrubyEventExtLibrary; +import org.jruby.Ruby; +import org.jruby.runtime.load.BasicLibraryService; + +import java.io.IOException; + +public class JrubyEventExtService implements BasicLibraryService { + public boolean basicLoad(final Ruby runtime) + throws IOException + { + new JrubyEventExtLibrary().load(runtime, false); + return true; + } +} diff --git a/logstash-core-event-java/src/main/java/JrubyTimestampExtService.java b/logstash-core-event-java/src/main/java/JrubyTimestampExtService.java new file mode 100644 index 00000000000..32d8eb2bf98 --- /dev/null +++ b/logstash-core-event-java/src/main/java/JrubyTimestampExtService.java @@ -0,0 +1,15 @@ +import com.logstash.ext.JrubyEventExtLibrary; +import com.logstash.ext.JrubyTimestampExtLibrary; +import org.jruby.Ruby; +import org.jruby.runtime.load.BasicLibraryService; + +import java.io.IOException; + +public class JrubyTimestampExtService implements BasicLibraryService { + public boolean basicLoad(final Ruby runtime) + throws IOException + { + new JrubyTimestampExtLibrary().load(runtime, false); + return true; + } +} diff --git a/logstash-core-event-java/src/main/java/com/logstash/Accessors.java b/logstash-core-event-java/src/main/java/com/logstash/Accessors.java new file mode 100644 index 00000000000..e9c207b217e --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/Accessors.java @@ -0,0 +1,153 @@ +package com.logstash; + +import java.util.HashMap; +import java.util.Map; +import java.util.List; + +public class Accessors { + + private Map data; + protected Map lut; + + public Accessors(Map data) { + this.data = data; + this.lut = new HashMap<>(); // reference -> target LUT + } + + public Object get(String reference) { + FieldReference field = PathCache.getInstance().cache(reference); + Object target = findTarget(field); + return (target == null) ? null : fetch(target, field.getKey()); + } + + public Object set(String reference, Object value) { + FieldReference field = PathCache.getInstance().cache(reference); + Object target = findCreateTarget(field); + return store(target, field.getKey(), value); + } + + public Object del(String reference) { + FieldReference field = PathCache.getInstance().cache(reference); + Object target = findTarget(field); + if (target != null) { + if (target instanceof Map) { + return ((Map) target).remove(field.getKey()); + } else if (target instanceof List) { + int i = Integer.parseInt(field.getKey()); + if (i < 0 || i >= ((List) target).size()) { + return null; + } + return ((List) target).remove(i); + } else { + throw new ClassCastException("expecting List or Map"); + } + } + return null; + } + + public boolean includes(String reference) { + FieldReference field = PathCache.getInstance().cache(reference); + Object target = findTarget(field); + if (target instanceof Map && foundInMap((Map) target, field.getKey())) { + return true; + } else if (target instanceof List && foundInList((List) target, Integer.parseInt(field.getKey()))) { + return true; + } else { + return false; + } + } + + private Object findTarget(FieldReference field) { + Object target; + + if ((target = this.lut.get(field.getReference())) != null) { + return target; + } + + target = this.data; + for (String key : field.getPath()) { + target = fetch(target, key); + if (target == null) { + return null; + } + } + + this.lut.put(field.getReference(), target); + + return target; + } + + private Object findCreateTarget(FieldReference field) { + Object target; + + if ((target = this.lut.get(field.getReference())) != null) { + return target; + } + + target = this.data; + for (String key : field.getPath()) { + Object result = fetch(target, key); + if (result == null) { + result = new HashMap(); + if (target instanceof Map) { + ((Map)target).put(key, result); + } else if (target instanceof List) { + int i = Integer.parseInt(key); + // TODO: what about index out of bound? + ((List)target).set(i, result); + } else if (target == null) { + // do nothing + } else { + throw new ClassCastException("expecting List or Map"); + } + } + target = result; + } + + this.lut.put(field.getReference(), target); + + return target; + } + + private boolean foundInList(List target, int index) { + if (index < 0 || index >= target.size()) { + return false; + } + return target.get(index) != null; + } + + private boolean foundInMap(Map target, String key) { + return target.containsKey(key); + } + + private Object fetch(Object target, String key) { + if (target instanceof Map) { + Object result = ((Map) target).get(key); + return result; + } else if (target instanceof List) { + int i = Integer.parseInt(key); + if (i < 0 || i >= ((List) target).size()) { + return null; + } + Object result = ((List) target).get(i); + return result; + } else if (target == null) { + return null; + } { + throw new ClassCastException("expecting List or Map"); + } + } + + private Object store(Object target, String key, Object value) { + if (target instanceof Map) { + ((Map) target).put(key, value); + } else if (target instanceof List) { + int i = Integer.parseInt(key); + // TODO: what about index out of bound? + ((List) target).set(i, value); + } else { + throw new ClassCastException("expecting List or Map"); + } + return value; + } +} diff --git a/logstash-core-event-java/src/main/java/com/logstash/Cloner.java b/logstash-core-event-java/src/main/java/com/logstash/Cloner.java new file mode 100644 index 00000000000..4823f10726a --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/Cloner.java @@ -0,0 +1,56 @@ +package com.logstash; + +import java.util.*; + +public final class Cloner { + + private Cloner(){} + + public static T deep(final T input) { + if (input instanceof Map) { + return (T) deepMap((Map) input); + } else if (input instanceof List) { + return (T) deepList((List) input); + } else if (input instanceof Collection) { + throw new ClassCastException("unexpected Collection type " + input.getClass()); + } + + return input; + } + + private static List deepList(final List list) { + List clone; + if (list instanceof LinkedList) { + clone = new LinkedList(); + } else if (list instanceof ArrayList) { + clone = new ArrayList(); + } else { + throw new ClassCastException("unexpected List type " + list.getClass()); + } + + for (E item : list) { + clone.add(deep(item)); + } + + return clone; + } + + private static Map deepMap(final Map map) { + Map clone; + if (map instanceof LinkedHashMap) { + clone = new LinkedHashMap(); + } else if (map instanceof TreeMap) { + clone = new TreeMap(); + } else if (map instanceof HashMap) { + clone = new HashMap(); + } else { + throw new ClassCastException("unexpected Map type " + map.getClass()); + } + + for (Map.Entry entry : map.entrySet()) { + clone.put(entry.getKey(), deep(entry.getValue())); + } + + return clone; + } +} diff --git a/logstash-core-event-java/src/main/java/com/logstash/DateNode.java b/logstash-core-event-java/src/main/java/com/logstash/DateNode.java new file mode 100644 index 00000000000..560d9f53d3c --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/DateNode.java @@ -0,0 +1,24 @@ +package com.logstash; + +import org.joda.time.DateTimeZone; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; + +import java.io.IOError; +import java.io.IOException; + +/** + * Created by ph on 15-05-22. + */ +public class DateNode implements TemplateNode { + private DateTimeFormatter formatter; + + public DateNode(String format) { + this.formatter = DateTimeFormat.forPattern(format).withZone(DateTimeZone.UTC); + } + + @Override + public String evaluate(Event event) throws IOException { + return event.getTimestamp().getTime().toString(this.formatter); + } +} diff --git a/logstash-core-event-java/src/main/java/com/logstash/EpochNode.java b/logstash-core-event-java/src/main/java/com/logstash/EpochNode.java new file mode 100644 index 00000000000..4451ffa73c4 --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/EpochNode.java @@ -0,0 +1,15 @@ +package com.logstash; + +import java.io.IOException; + +/** + * Created by ph on 15-05-22. + */ +public class EpochNode implements TemplateNode { + public EpochNode(){ } + + @Override + public String evaluate(Event event) throws IOException { + return String.valueOf(event.getTimestamp().getTime().getMillis() / 1000); + } +} \ No newline at end of file diff --git a/logstash-core-event-java/src/main/java/com/logstash/Event.java b/logstash-core-event-java/src/main/java/com/logstash/Event.java new file mode 100644 index 00000000000..ccdb6d4eb2d --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/Event.java @@ -0,0 +1,253 @@ +package com.logstash; + +import com.logstash.ext.JrubyTimestampExtLibrary; +import org.codehaus.jackson.map.ObjectMapper; +import org.joda.time.DateTime; +import org.jruby.RubyHash; +import org.jruby.RubySymbol; + +import java.io.IOException; +import java.io.Serializable; +import java.util.*; + + +public class Event implements Cloneable, Serializable { + + private boolean cancelled; + private Map data; + private Map metadata; + private Timestamp timestamp; + private Accessors accessors; + private Accessors metadata_accessors; + + public static final String METADATA = "@metadata"; + public static final String METADATA_BRACKETS = "[" + METADATA + "]"; + public static final String TIMESTAMP = "@timestamp"; + public static final String TIMESTAMP_FAILURE_TAG = "_timestampparsefailure"; + public static final String TIMESTAMP_FAILURE_FIELD = "_@timestamp"; + public static final String VERSION = "@version"; + public static final String VERSION_ONE = "1"; + + private static final ObjectMapper mapper = new ObjectMapper(); + + // TODO: add metadata support + + public Event() + { + this.metadata = new HashMap(); + this.data = new HashMap(); + this.data.put(VERSION, VERSION_ONE); + this.cancelled = false; + this.timestamp = new Timestamp(); + this.data.put(TIMESTAMP, this.timestamp); + this.accessors = new Accessors(this.data); + this.metadata_accessors = new Accessors(this.metadata); + } + + public Event(Map data) + { + this.data = data; + this.data.putIfAbsent(VERSION, VERSION_ONE); + + if (this.data.containsKey(METADATA)) { + this.metadata = (HashMap) this.data.remove(METADATA); + } else { + this.metadata = new HashMap(); + } + this.metadata_accessors = new Accessors(this.metadata); + + this.cancelled = false; + this.timestamp = initTimestamp(data.get(TIMESTAMP)); + this.data.put(TIMESTAMP, this.timestamp); + this.accessors = new Accessors(this.data); + } + + public Map getData() { + return this.data; + } + + public Map getMetadata() { + return this.metadata; + } + + public void setData(Map data) { + this.data = data; + } + + public Accessors getAccessors() { + return this.accessors; + } + + public Accessors getMetadataAccessors() { + return this.metadata_accessors; + } + + public void setAccessors(Accessors accessors) { + this.accessors = accessors; + } + + public void setMetadataAccessors(Accessors accessors) { + this.metadata_accessors = accessors; + } + + public void cancel() { + this.cancelled = true; + } + + public void uncancel() { + this.cancelled = false; + } + + public boolean isCancelled() { + return this.cancelled; + } + + public Timestamp getTimestamp() throws IOException { + if (this.data.containsKey(TIMESTAMP)) { + return this.timestamp; + } else { + throw new IOException("fails"); + } + } + + public void setTimestamp(Timestamp t) { + this.timestamp = t; + this.data.put(TIMESTAMP, this.timestamp); + } + + public Object getField(String reference) { + if (reference.equals(METADATA)) { + return this.metadata; + } else if (reference.startsWith(METADATA_BRACKETS)) { + return this.metadata_accessors.get(reference.substring(METADATA_BRACKETS.length())); + } else { + return this.accessors.get(reference); + } + } + + public void setField(String reference, Object value) { + if (reference.equals(TIMESTAMP)) { + // TODO(talevy): check type of timestamp + this.accessors.set(reference, value); + } else if (reference.equals(METADATA_BRACKETS) || reference.equals(METADATA)) { + this.metadata = (HashMap) value; + this.metadata_accessors = new Accessors(this.metadata); + } else if (reference.startsWith(METADATA_BRACKETS)) { + this.metadata_accessors.set(reference.substring(METADATA_BRACKETS.length()), value); + } else { + this.accessors.set(reference, value); + } + } + + public boolean includes(String reference) { + if (reference.equals(METADATA_BRACKETS) || reference.equals(METADATA)) { + return true; + } else if (reference.startsWith(METADATA_BRACKETS)) { + return this.metadata_accessors.includes(reference.substring(METADATA_BRACKETS.length())); + } else { + return this.accessors.includes(reference); + } + } + + public String toJson() throws IOException { + return mapper.writeValueAsString((Map)this.data); + } + + public Map toMap() { + return this.data; + } + + public Event overwrite(Event e) { + this.data = e.getData(); + this.accessors = e.getAccessors(); + this.cancelled = e.isCancelled(); + try { + this.timestamp = e.getTimestamp(); + } catch (IOException exception) { + this.timestamp = new Timestamp(); + } + + return this; + } + + + public Event append(Event e) { + Util.mapMerge(this.data, e.data); + + return this; + } + + public Object remove(String path) { + return this.accessors.del(path); + } + + public String sprintf(String s) throws IOException { + return StringInterpolation.getInstance().evaluate(this, s); + } + + public Event clone() + throws CloneNotSupportedException + { +// Event clone = (Event)super.clone(); +// clone.setAccessors(new Accessors(clone.getData())); + + Event clone = new Event(Cloner.deep(getData())); + return clone; + } + + public String toString() { + // TODO: until we have sprintf + String host = (String)this.data.getOrDefault("host", "%{host}"); + String message = (String)this.data.getOrDefault("message", "%{message}"); + try { + return getTimestamp().toIso8601() + " " + host + " " + message; + } catch (IOException e) { + return host + " " + message; + } + } + + private Timestamp initTimestamp(Object o) { + try { + if (o == null) { + // most frequent + return new Timestamp(); + } else if (o instanceof String) { + // second most frequent + return new Timestamp((String) o); + } else if (o instanceof JrubyTimestampExtLibrary.RubyTimestamp) { + return new Timestamp(((JrubyTimestampExtLibrary.RubyTimestamp) o).getTimestamp()); + } else if (o instanceof Timestamp) { + return new Timestamp((Timestamp) o); + } else if (o instanceof DateTime) { + return new Timestamp((DateTime) o); + } else if (o instanceof Date) { + return new Timestamp((Date) o); + } else if (o instanceof RubySymbol) { + return new Timestamp(((RubySymbol) o).asJavaString()); + } else { + // TODO: add logging + //return Timestamp.now(); + throw new IllegalArgumentException(); + } + } catch (IllegalArgumentException e) { + // TODO: add error logging + tag(TIMESTAMP_FAILURE_TAG); + + this.data.put(TIMESTAMP_FAILURE_FIELD, o); + + return Timestamp.now(); + } + } + + public void tag(String tag) { + List tags = (List) this.data.get("tags"); + if (tags == null) { + tags = new ArrayList<>(); + this.data.put("tags", tags); + } + + if (!tags.contains(tag)) { + tags.add(tag); + } + } +} diff --git a/logstash-core-event-java/src/main/java/com/logstash/FieldReference.java b/logstash-core-event-java/src/main/java/com/logstash/FieldReference.java new file mode 100644 index 00000000000..508291693f1 --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/FieldReference.java @@ -0,0 +1,40 @@ +package com.logstash; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +// TODO: implement thread-safe path cache singleton to avoid parsing + +public class FieldReference { + + private List path; + private String key; + private String reference; + private static List EMPTY_STRINGS = new ArrayList(Arrays.asList(new String[]{""})); + + public FieldReference(List path, String key, String reference) { + this.path = path; + this.key = key; + this.reference = reference; + } + + public List getPath() { + return path; + } + + public String getKey() { + return key; + } + + public String getReference() { + return reference; + } + + public static FieldReference parse(String reference) { + List path = new ArrayList(Arrays.asList(reference.split("[\\[\\]]"))); + path.removeAll(EMPTY_STRINGS); + String key = path.remove(path.size() - 1); + return new FieldReference(path, key, reference); + } +} diff --git a/logstash-core-event-java/src/main/java/com/logstash/KeyNode.java b/logstash-core-event-java/src/main/java/com/logstash/KeyNode.java new file mode 100644 index 00000000000..c74902361fd --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/KeyNode.java @@ -0,0 +1,42 @@ +package com.logstash; + +import org.codehaus.jackson.JsonGenerationException; +import org.codehaus.jackson.map.ObjectMapper; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +/** + * Created by ph on 15-05-22. + */ +public class KeyNode implements TemplateNode { + private String key; + + public KeyNode(String key) { + this.key = key; + } + + /** + This will be more complicated with hash and array. + leverage jackson lib to do the actual. + */ + @Override + public String evaluate(Event event) throws IOException { + Object value = event.getField(this.key); + + if (value != null) { + if (value instanceof List) { + return String.join(",", (List) value); + } else if (value instanceof Map) { + ObjectMapper mapper = new ObjectMapper(); + return mapper.writeValueAsString((Map)value); + } else { + return event.getField(this.key).toString(); + } + + } else { + return "%{" + this.key + "}"; + } + } +} \ No newline at end of file diff --git a/logstash-core-event-java/src/main/java/com/logstash/PathCache.java b/logstash-core-event-java/src/main/java/com/logstash/PathCache.java new file mode 100644 index 00000000000..b7beff95b89 --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/PathCache.java @@ -0,0 +1,47 @@ +package com.logstash; + +import java.util.concurrent.ConcurrentHashMap; + +public class PathCache { + + private static PathCache instance = null; + private static ConcurrentHashMap cache = new ConcurrentHashMap<>(); + + private FieldReference timestamp; + + // TODO: dry with Event + public static final String TIMESTAMP = "@timestamp"; + public static final String BRACKETS_TIMESTAMP = "[" + TIMESTAMP + "]"; + + protected PathCache() { + // inject @timestamp + this.timestamp = cache(TIMESTAMP); + cache(BRACKETS_TIMESTAMP, this.timestamp); + } + + public static PathCache getInstance() { + if (instance == null) { + instance = new PathCache(); + } + return instance; + } + + public boolean isTimestamp(String reference) { + return (cache(reference) == this.timestamp); + } + + public FieldReference cache(String reference) { + // atomicity between the get and put is not important + FieldReference result = cache.get(reference); + if (result == null) { + result = FieldReference.parse(reference); + cache.put(reference, result); + } + return result; + } + + public FieldReference cache(String reference, FieldReference field) { + cache.put(reference, field); + return field; + } +} diff --git a/logstash-core-event-java/src/main/java/com/logstash/RubyToJavaConverter.java b/logstash-core-event-java/src/main/java/com/logstash/RubyToJavaConverter.java new file mode 100644 index 00000000000..2170ad4b5fb --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/RubyToJavaConverter.java @@ -0,0 +1,45 @@ +package com.logstash; + +import org.jruby.RubyArray; +import org.jruby.RubyHash; +import org.jruby.RubyString; +import org.jruby.runtime.builtin.IRubyObject; + +import java.util.*; + +public class RubyToJavaConverter { + + public static Object convert(IRubyObject obj) { + if (obj instanceof RubyArray) { + return convertToList((RubyArray) obj); + } else if (obj instanceof RubyHash) { + return convertToMap((RubyHash) obj); + } else if (obj instanceof RubyString) { + return convertToString((RubyString) obj); + } + + return obj.toJava(obj.getJavaClass()); + } + + public static HashMap convertToMap(RubyHash hash) { + HashMap hashMap = new HashMap(); + Set entries = hash.directEntrySet(); + for (RubyHash.RubyHashEntry e : entries) { + hashMap.put(e.getJavaifiedKey().toString(), convert((IRubyObject) e.getValue())); + } + return hashMap; + } + + public static List convertToList(RubyArray array) { + ArrayList list = new ArrayList(); + for (IRubyObject obj : array.toJavaArray()) { + list.add(convert(obj)); + } + + return list; + } + + public static String convertToString(RubyString string) { + return string.decodeString(); + } +} diff --git a/logstash-core-event-java/src/main/java/com/logstash/StaticNode.java b/logstash-core-event-java/src/main/java/com/logstash/StaticNode.java new file mode 100644 index 00000000000..73b5c160440 --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/StaticNode.java @@ -0,0 +1,19 @@ +package com.logstash; + +import java.io.IOException; + +/** + * Created by ph on 15-05-22. + */ +public class StaticNode implements TemplateNode { + private String content; + + public StaticNode(String content) { + this.content = content; + } + + @Override + public String evaluate(Event event) throws IOException { + return this.content; + } +} \ No newline at end of file diff --git a/logstash-core-event-java/src/main/java/com/logstash/StringInterpolation.java b/logstash-core-event-java/src/main/java/com/logstash/StringInterpolation.java new file mode 100644 index 00000000000..77aea3e41d6 --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/StringInterpolation.java @@ -0,0 +1,94 @@ +package com.logstash; + + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class StringInterpolation { + static Pattern TEMPLATE_TAG = Pattern.compile("%\\{([^}]+)\\}"); + static Map cache; + + protected static class HoldCurrent { + private static final StringInterpolation INSTANCE = new StringInterpolation(); + } + + private StringInterpolation() { + // TODO: + // This may need some tweaking for the concurrency level to get better memory usage. + // The current implementation doesn't allow the keys to expire, I think under normal usage + // the keys will converge to a fixed number. + // + // If this code make logstash goes OOM, we have the following options: + // - If the key doesn't contains a `%` do not cache it, this will reduce the key size at a performance cost. + // - Use some kind LRU cache + // - Create a new data structure that use weakref or use Google Guava for the cache https://code.google.com/p/guava-libraries/ + this.cache = new ConcurrentHashMap<>(); + } + + public String evaluate(Event event, String template) throws IOException { + TemplateNode compiledTemplate = (TemplateNode) this.cache.get(template); + + if(compiledTemplate == null) { + compiledTemplate = this.compile(template); + TemplateNode set = (TemplateNode) this.cache.putIfAbsent(template, compiledTemplate); + compiledTemplate = (set != null) ? set : compiledTemplate; + } + + return compiledTemplate.evaluate(event); + } + + public TemplateNode compile(String template) { + Template compiledTemplate = new Template(); + + if (template.indexOf('%') == -1) { + // Move the nodes to a custom instance + // so we can remove the iterator and do one `.evaluate` + compiledTemplate.add(new StaticNode(template)); + } else { + Matcher matcher = TEMPLATE_TAG.matcher(template); + String tag; + int pos = 0; + + while (matcher.find()) { + if (matcher.start() > 0) { + compiledTemplate.add(new StaticNode(template.substring(pos, matcher.start()))); + } + + tag = matcher.group(1); + compiledTemplate.add(identifyTag(tag)); + pos = matcher.end(); + } + + if(pos <= template.length() - 1) { + compiledTemplate.add(new StaticNode(template.substring(pos))); + } + } + + // if we only have one node return the node directly + // and remove the need to loop. + if(compiledTemplate.size() == 1) { + return compiledTemplate.get(0); + } else { + return compiledTemplate; + } + } + + public TemplateNode identifyTag(String tag) { + if(tag.equals("+%s")) { + return new EpochNode(); + } else if(tag.charAt(0) == '+') { + return new DateNode(tag.substring(1)); + + } else { + return new KeyNode(tag); + } + } + + static StringInterpolation getInstance() { + return HoldCurrent.INSTANCE; + } +} \ No newline at end of file diff --git a/logstash-core-event-java/src/main/java/com/logstash/Template.java b/logstash-core-event-java/src/main/java/com/logstash/Template.java new file mode 100644 index 00000000000..a17e69b3946 --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/Template.java @@ -0,0 +1,32 @@ +package com.logstash; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +public class Template implements TemplateNode { + public List nodes = new ArrayList<>(); + public Template() {} + + public void add(TemplateNode node) { + nodes.add(node); + } + + public int size() { + return nodes.size(); + } + + public TemplateNode get(int index) { + return (TemplateNode) nodes.get(index); + } + + @Override + public String evaluate(Event event) throws IOException { + StringBuffer results = new StringBuffer(); + + for (int i = 0; i < nodes.size(); i++) { + results.append(((TemplateNode) nodes.get(i)).evaluate(event)); + } + return results.toString(); + } +} \ No newline at end of file diff --git a/logstash-core-event-java/src/main/java/com/logstash/TemplateNode.java b/logstash-core-event-java/src/main/java/com/logstash/TemplateNode.java new file mode 100644 index 00000000000..c71dbd33e71 --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/TemplateNode.java @@ -0,0 +1,12 @@ +package com.logstash; + +import org.codehaus.jackson.JsonGenerationException; + +import java.io.IOException; + +/** + * Created by ph on 15-05-22. + */ +public interface TemplateNode { + String evaluate(Event event) throws IOException; +} diff --git a/logstash-core-event-java/src/main/java/com/logstash/Timestamp.java b/logstash-core-event-java/src/main/java/com/logstash/Timestamp.java new file mode 100644 index 00000000000..3fc7ff855bd --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/Timestamp.java @@ -0,0 +1,74 @@ +package com.logstash; + +import org.codehaus.jackson.map.annotate.JsonSerialize; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.format.DateTimeFormatter; +import org.joda.time.format.ISODateTimeFormat; +import org.jruby.Ruby; +import org.jruby.RubyString; + +import java.util.Date; + +@JsonSerialize(using = TimestampSerializer.class) +public class Timestamp implements Cloneable { + + private DateTime time; + // TODO: is this DateTimeFormatter thread safe? + private static DateTimeFormatter iso8601Formatter = ISODateTimeFormat.dateTime(); + + public Timestamp() { + this.time = new DateTime(DateTimeZone.UTC); + } + + public Timestamp(String iso8601) { + this.time = ISODateTimeFormat.dateTimeParser().parseDateTime(iso8601).toDateTime(DateTimeZone.UTC); + } + + public Timestamp(Timestamp t) { + this.time = t.getTime(); + } + + public Timestamp(long epoch_milliseconds) { + this.time = new DateTime(epoch_milliseconds, DateTimeZone.UTC); + } + + public Timestamp(Long epoch_milliseconds) { + this.time = new DateTime(epoch_milliseconds, DateTimeZone.UTC); + } + + public Timestamp(Date date) { + this.time = new DateTime(date, DateTimeZone.UTC); + } + + public Timestamp(DateTime date) { + this.time = date.toDateTime(DateTimeZone.UTC); + } + + public DateTime getTime() { + return time; + } + + public void setTime(DateTime time) { + this.time = time; + } + + public static Timestamp now() { + return new Timestamp(); + } + + public String toIso8601() { + return this.iso8601Formatter.print(this.time); + } + + public String toString() { + return toIso8601(); + } + + @Override + public Timestamp clone() throws CloneNotSupportedException { + Timestamp clone = (Timestamp)super.clone(); + clone.setTime(this.getTime()); + return clone; + } +} diff --git a/logstash-core-event-java/src/main/java/com/logstash/TimestampSerializer.java b/logstash-core-event-java/src/main/java/com/logstash/TimestampSerializer.java new file mode 100644 index 00000000000..51385986cad --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/TimestampSerializer.java @@ -0,0 +1,17 @@ +package com.logstash; + +import org.codehaus.jackson.JsonGenerator; +import org.codehaus.jackson.map.JsonSerializer; +import org.codehaus.jackson.map.SerializerProvider; + +import java.io.IOException; + +public class TimestampSerializer extends JsonSerializer { + + @Override + public void serialize(Timestamp value, JsonGenerator jgen, SerializerProvider provider) + throws IOException + { + jgen.writeString(value.toIso8601()); + } +} diff --git a/logstash-core-event-java/src/main/java/com/logstash/Util.java b/logstash-core-event-java/src/main/java/com/logstash/Util.java new file mode 100644 index 00000000000..9d2cc12f89e --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/Util.java @@ -0,0 +1,51 @@ +package com.logstash; + +import com.google.common.collect.Lists; +import org.jruby.RubyHash; + +import java.util.ArrayList; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; + +public class Util { + private Util() {} + + public static void mapMerge(Map target, Map add) { + for (Map.Entry e : add.entrySet()) { + if (target.containsKey(e.getKey())) { + if (target.get(e.getKey()) instanceof Map && e.getValue() instanceof Map) { + mapMerge((Map) target.get(e.getKey()), (Map) e.getValue()); + } else if (e.getValue() instanceof List) { + if (target.get(e.getKey()) instanceof List) { + // needs optimizing + List targetList = (List) target.get(e.getKey()); + targetList.addAll((List) e.getValue()); + target.put(e.getKey(), new ArrayList(new LinkedHashSet(targetList))); + } else { + Object targetValue = target.get(e.getKey()); + List targetValueList = Lists.newArrayList(targetValue); + for (Object o : (List) e.getValue()) { + if (!targetValue.equals(o)) { + targetValueList.add(o); + } + } + target.put(e.getKey(), targetValueList); + } + } else if (target.get(e.getKey()) instanceof List) { + List t = ((List) target.get(e.getKey())); + if (!t.contains(e.getValue())) { + t.add(e.getValue()); + } + } else if (!target.get(e.getKey()).equals(e.getValue())) { + Object targetValue = target.get(e.getKey()); + targetValue = Lists.newArrayList(targetValue); + ((List) targetValue).add(e.getValue()); + target.put(e.getKey(), targetValue); + } + } else { + target.put(e.getKey(), e.getValue()); + } + } + } +} diff --git a/logstash-core-event-java/src/main/java/com/logstash/ext/JrubyEventExtLibrary.java b/logstash-core-event-java/src/main/java/com/logstash/ext/JrubyEventExtLibrary.java new file mode 100644 index 00000000000..8ecb434f301 --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/ext/JrubyEventExtLibrary.java @@ -0,0 +1,278 @@ +package com.logstash.ext; + +import com.logstash.Event; +import com.logstash.PathCache; +import com.logstash.RubyToJavaConverter; +import com.logstash.Timestamp; +import org.jruby.*; +import org.jruby.anno.JRubyClass; +import org.jruby.anno.JRubyConstant; +import org.jruby.anno.JRubyMethod; +import org.jruby.exceptions.RaiseException; +import org.jruby.java.proxies.MapJavaProxy; +import org.jruby.javasupport.JavaUtil; +import org.jruby.runtime.Arity; +import org.jruby.runtime.ObjectAllocator; +import org.jruby.runtime.ThreadContext; +import org.jruby.runtime.builtin.IRubyObject; +import org.jruby.runtime.load.Library; + +import java.io.IOException; +import java.util.*; + + +public class JrubyEventExtLibrary implements Library { + + public void load(Ruby runtime, boolean wrap) throws IOException { + RubyModule module = runtime.defineModule("LogStash"); + RubyClass clazz = runtime.defineClassUnder("Event", runtime.getObject(), new ObjectAllocator() { + public IRubyObject allocate(Ruby runtime, RubyClass rubyClass) { + return new RubyEvent(runtime, rubyClass); + } + }, module); + clazz.setConstant("LOGGER", runtime.getModule("Cabin").getClass("Channel") + .callMethod("get", runtime.getModule("LogStash"))); + clazz.setConstant("TIMESTAMP", runtime.newString(Event.TIMESTAMP)); + clazz.setConstant("TIMESTAMP_FAILURE_TAG", runtime.newString(Event.TIMESTAMP_FAILURE_TAG)); + clazz.setConstant("TIMESTAMP_FAILURE_FIELD", runtime.newString(Event.TIMESTAMP_FAILURE_FIELD)); + clazz.defineAnnotatedMethods(RubyEvent.class); + clazz.defineAnnotatedConstants(RubyEvent.class); + } + + @JRubyClass(name = "Event", parent = "Object") + public static class RubyEvent extends RubyObject { + private Event event; + + public RubyEvent(Ruby runtime, RubyClass klass) { + super(runtime, klass); + } + + public RubyEvent(Ruby runtime) { + this(runtime, runtime.getModule("LogStash").getClass("Event")); + } + + public RubyEvent(Ruby runtime, Event event) { + this(runtime); + this.event = event; + } + + public static RubyEvent newRubyEvent(Ruby runtime, Event event) { + return new RubyEvent(runtime, event); + } + + public Event getEvent() { + return event; + } + + public void setEvent(Event event) { + this.event = event; + } + + // def initialize(data = {}) + @JRubyMethod(name = "initialize", optional = 1) + public IRubyObject ruby_initialize(ThreadContext context, IRubyObject[] args) + { + args = Arity.scanArgs(context.runtime, args, 0, 1); + IRubyObject data = args[0]; + + if (data.isNil()) { + this.event = new Event(); + } else if (data instanceof RubyHash) { + HashMap newObj = RubyToJavaConverter.convertToMap((RubyHash) data); + this.event = new Event(newObj); + } else if (data instanceof Map) { + this.event = new Event((Map) data); + } else if (Map.class.isAssignableFrom(data.getJavaClass())) { + this.event = new Event((Map)data.toJava(Map.class)); + } else { + throw context.runtime.newTypeError("wrong argument type " + data.getMetaClass() + " (expected Hash)"); + } + + return context.nil; + } + + @JRubyMethod(name = "[]", required = 1) + public IRubyObject ruby_get_field(ThreadContext context, RubyString reference) + { + String r = reference.asJavaString(); + Object value = this.event.getField(r); + if (value instanceof Timestamp) { + return JrubyTimestampExtLibrary.RubyTimestamp.newRubyTimestamp(context.runtime, (Timestamp)value); + } else if (value instanceof List) { + IRubyObject obj = JavaUtil.convertJavaToRuby(context.runtime, value); + return obj.callMethod(context, "to_a"); + } else { + return JavaUtil.convertJavaToRuby(context.runtime, value); + } + } + + @JRubyMethod(name = "[]=", required = 2) + public IRubyObject ruby_set_field(ThreadContext context, RubyString reference, IRubyObject value) + { + String r = reference.asJavaString(); + if (PathCache.getInstance().isTimestamp(r)) { + if (!(value instanceof JrubyTimestampExtLibrary.RubyTimestamp)) { + throw context.runtime.newTypeError("wrong argument type " + value.getMetaClass() + " (expected LogStash::Timestamp)"); + } + this.event.setTimestamp(((JrubyTimestampExtLibrary.RubyTimestamp)value).getTimestamp()); + } else { + if (value instanceof RubyString) { + String val = ((RubyString) value).asJavaString(); + this.event.setField(r, val); + } else if (value instanceof RubyInteger) { + this.event.setField(r, ((RubyInteger) value).getLongValue()); + } else if (value instanceof RubyFloat) { + this.event.setField(r, ((RubyFloat) value).getDoubleValue()); + } else if (value instanceof JrubyTimestampExtLibrary.RubyTimestamp) { + // RubyTimestamp could be assigned in another field thant @timestamp + this.event.setField(r, ((JrubyTimestampExtLibrary.RubyTimestamp) value).getTimestamp()); + } else if (value instanceof RubyArray) { + this.event.setField(r, RubyToJavaConverter.convertToList((RubyArray) value)); + } else if (value instanceof RubyHash) { + this.event.setField(r, RubyToJavaConverter.convertToMap((RubyHash) value)); + } else { + throw context.runtime.newTypeError("wrong argument type " + value.getMetaClass()); + } + } + return value; + } + + @JRubyMethod(name = "cancel") + public IRubyObject ruby_cancel(ThreadContext context) + { + this.event.cancel(); + return RubyBoolean.createTrueClass(context.runtime); + } + + @JRubyMethod(name = "uncancel") + public IRubyObject ruby_uncancel(ThreadContext context) + { + this.event.uncancel(); + return RubyBoolean.createFalseClass(context.runtime); + } + + @JRubyMethod(name = "cancelled?") + public IRubyObject ruby_cancelled(ThreadContext context) + { + return RubyBoolean.newBoolean(context.runtime, this.event.isCancelled()); + } + + @JRubyMethod(name = "include?", required = 1) + public IRubyObject ruby_includes(ThreadContext context, RubyString reference) + { + return RubyBoolean.newBoolean(context.runtime, this.event.includes(reference.asJavaString())); + } + + @JRubyMethod(name = "remove", required = 1) + public IRubyObject ruby_remove(ThreadContext context, RubyString reference) + { + return JavaUtil.convertJavaToRuby(context.runtime, this.event.remove(reference.asJavaString())); + } + + @JRubyMethod(name = "clone") + public IRubyObject ruby_clone(ThreadContext context) + { + try { + return RubyEvent.newRubyEvent(context.runtime, this.event.clone()); + } catch (CloneNotSupportedException e) { + throw context.runtime.newRuntimeError(e.getMessage()); + } + } + + @JRubyMethod(name = "overwrite", required = 1) + public IRubyObject ruby_overwrite(ThreadContext context, IRubyObject value) + { + if (!(value instanceof RubyEvent)) { + throw context.runtime.newTypeError("wrong argument type " + value.getMetaClass() + " (expected LogStash::Event)"); + } + + return RubyEvent.newRubyEvent(context.runtime, this.event.overwrite(((RubyEvent) value).event)); + } + + @JRubyMethod(name = "append", required = 1) + public IRubyObject ruby_append(ThreadContext context, IRubyObject value) + { + if (!(value instanceof RubyEvent)) { + throw context.runtime.newTypeError("wrong argument type " + value.getMetaClass() + " (expected LogStash::Event)"); + } + + this.event.append(((RubyEvent) value).getEvent()); + + return this; + } + + @JRubyMethod(name = "sprintf", required = 1) + public IRubyObject ruby_sprintf(ThreadContext context, IRubyObject format) throws IOException { + try { + return RubyString.newString(context.runtime, event.sprintf(format.toString())); + } catch (IOException e) { + throw new RaiseException(getRuntime(), + (RubyClass) getRuntime().getModule("LogStash").getClass("Error"), + "timestamp field is missing", true); + } + } + + @JRubyMethod(name = "to_s") + public IRubyObject ruby_to_s(ThreadContext context) + { + return RubyString.newString(context.runtime, event.toString()); + } + + @JRubyMethod(name = "to_hash") + public IRubyObject ruby_to_hash(ThreadContext context) throws IOException + { + // TODO: is this the most efficient? + RubyHash hash = JavaUtil.convertJavaToUsableRubyObject(context.runtime, this.event.toMap()).convertToHash(); + // inject RubyTimestamp in new hash + hash.put(PathCache.TIMESTAMP, JrubyTimestampExtLibrary.RubyTimestamp.newRubyTimestamp(context.runtime, this.event.getTimestamp())); + return hash; + } + + @JRubyMethod(name = "to_hash_with_metadata") + public IRubyObject ruby_to_hash_with_metadata(ThreadContext context) throws IOException + { + HashMap dataAndMetadata = new HashMap(this.event.getData()); + if (!this.event.getMetadata().isEmpty()) { + dataAndMetadata.put(Event.METADATA, this.event.getMetadata()); + } + + RubyHash hash = JavaUtil.convertJavaToUsableRubyObject(context.runtime, dataAndMetadata).convertToHash(); + + // inject RubyTimestamp in new hash + hash.put(PathCache.TIMESTAMP, JrubyTimestampExtLibrary.RubyTimestamp.newRubyTimestamp(context.runtime, this.event.getTimestamp())); + return hash; + } + + @JRubyMethod(name = "to_java") + public IRubyObject ruby_to_java(ThreadContext context) + { + return JavaUtil.convertJavaToUsableRubyObject(context.runtime, this.event); + } + + @JRubyMethod(name = "to_json", rest = true) + public IRubyObject ruby_to_json(ThreadContext context, IRubyObject[] args) + throws IOException + { + return RubyString.newString(context.runtime, event.toJson()); + } + + @JRubyMethod(name = "validate_value", required = 1, meta = true) + public static IRubyObject ruby_validate_value(ThreadContext context, IRubyObject recv, IRubyObject value) + { + // TODO: add UTF-8 validation + return value; + } + + @JRubyMethod(name = "tag", required = 1) + public IRubyObject ruby_tag(ThreadContext context, RubyString value) + { + this.event.tag(((RubyString) value).asJavaString()); + return context.runtime.getNil(); + } + + @JRubyMethod(name = "timestamp") + public IRubyObject ruby_timestamp(ThreadContext context) throws IOException { + return new JrubyTimestampExtLibrary.RubyTimestamp(context.getRuntime(), this.event.getTimestamp()); + } + } +} diff --git a/logstash-core-event-java/src/main/java/com/logstash/ext/JrubyTimestampExtLibrary.java b/logstash-core-event-java/src/main/java/com/logstash/ext/JrubyTimestampExtLibrary.java new file mode 100644 index 00000000000..b84aadc9507 --- /dev/null +++ b/logstash-core-event-java/src/main/java/com/logstash/ext/JrubyTimestampExtLibrary.java @@ -0,0 +1,208 @@ +package com.logstash.ext; + +import com.logstash.*; +import org.codehaus.jackson.map.annotate.JsonSerialize; +import org.jruby.*; +import org.jruby.anno.JRubyClass; +import org.jruby.anno.JRubyMethod; +import org.jruby.exceptions.RaiseException; +import org.jruby.javasupport.JavaUtil; +import org.jruby.runtime.Arity; +import org.jruby.runtime.ObjectAllocator; +import org.jruby.runtime.ThreadContext; +import org.jruby.runtime.builtin.IRubyObject; +import org.jruby.runtime.load.Library; + +import java.io.IOException; + +public class JrubyTimestampExtLibrary implements Library { + public void load(Ruby runtime, boolean wrap) throws IOException { + RubyModule module = runtime.defineModule("LogStash"); + RubyClass clazz = runtime.defineClassUnder("Timestamp", runtime.getObject(), new ObjectAllocator() { + public IRubyObject allocate(Ruby runtime, RubyClass rubyClass) { + return new RubyTimestamp(runtime, rubyClass); + } + }, module); + clazz.defineAnnotatedMethods(RubyTimestamp.class); + } + + @JRubyClass(name = "Timestamp", parent = "Object") + public static class RubyTimestamp extends RubyObject { + + private Timestamp timestamp; + + public RubyTimestamp(Ruby runtime, RubyClass klass) { + super(runtime, klass); + } + + public RubyTimestamp(Ruby runtime, RubyClass klass, Timestamp timestamp) { + this(runtime, klass); + this.timestamp = timestamp; + } + + public RubyTimestamp(Ruby runtime, Timestamp timestamp) { + this(runtime, runtime.getModule("LogStash").getClass("Timestamp"), timestamp); + } + + public RubyTimestamp(Ruby runtime) { + this(runtime, new Timestamp()); + } + + public static RubyTimestamp newRubyTimestamp(Ruby runtime) { + return new RubyTimestamp(runtime); + } + + public static RubyTimestamp newRubyTimestamp(Ruby runtime, long epoch) { + // Ruby epoch is in seconds, Java in milliseconds + return new RubyTimestamp(runtime, new Timestamp(epoch * 1000)); + } + + public static RubyTimestamp newRubyTimestamp(Ruby runtime, Timestamp timestamp) { + return new RubyTimestamp(runtime, timestamp); + } + + public Timestamp getTimestamp() { + return timestamp; + } + + public void setTimestamp(Timestamp timestamp) { + this.timestamp = timestamp; + } + + // def initialize(time = Time.new) + @JRubyMethod(name = "initialize", optional = 1) + public IRubyObject ruby_initialize(ThreadContext context, IRubyObject[] args) + { + args = Arity.scanArgs(context.runtime, args, 0, 1); + IRubyObject time = args[0]; + + if (time.isNil()) { + this.timestamp = new Timestamp(); + } else if (time instanceof RubyTime) { + this.timestamp = new Timestamp(((RubyTime)time).getDateTime()); + } else if (time instanceof RubyString) { + try { + this.timestamp = new Timestamp(((RubyString) time).toString()); + } catch (IllegalArgumentException e) { + throw new RaiseException( + getRuntime(), + getRuntime().getModule("LogStash").getClass("TimestampParserError"), + "invalid timestamp string format " + time, + true + ); + + } + } else { + throw context.runtime.newTypeError("wrong argument type " + time.getMetaClass() + " (expected Time)"); + } + return context.nil; + } + + @JRubyMethod(name = "time") + public IRubyObject ruby_time(ThreadContext context) + { + return RubyTime.newTime(context.runtime, this.timestamp.getTime()); + } + + @JRubyMethod(name = "to_i") + public IRubyObject ruby_to_i(ThreadContext context) + { + return RubyFixnum.newFixnum(context.runtime, this.timestamp.getTime().getMillis() / 1000); + } + + @JRubyMethod(name = "to_s") + public IRubyObject ruby_to_s(ThreadContext context) + { + return ruby_to_iso8601(context); + } + + @JRubyMethod(name = "to_iso8601") + public IRubyObject ruby_to_iso8601(ThreadContext context) + { + return RubyString.newString(context.runtime, this.timestamp.toIso8601()); + } + + @JRubyMethod(name = "to_java") + public IRubyObject ruby_to_java(ThreadContext context) + { + return JavaUtil.convertJavaToUsableRubyObject(context.runtime, this.timestamp); + } + + @JRubyMethod(name = "to_json", rest = true) + public IRubyObject ruby_to_json(ThreadContext context, IRubyObject[] args) + { + return RubyString.newString(context.runtime, "\"" + this.timestamp.toIso8601() + "\""); + } + + public static Timestamp newTimetsamp(IRubyObject time) + { + if (time.isNil()) { + return new Timestamp(); + } else if (time instanceof RubyTime) { + return new Timestamp(((RubyTime)time).getDateTime()); + } else if (time instanceof RubyString) { + return new Timestamp(((RubyString) time).toString()); + } else if (time instanceof RubyTimestamp) { + return new Timestamp(((RubyTimestamp) time).timestamp); + } else { + return null; + } + } + + + @JRubyMethod(name = "coerce", required = 1, meta = true) + public static IRubyObject ruby_coerce(ThreadContext context, IRubyObject recv, IRubyObject time) + { + try { + Timestamp ts = newTimetsamp(time); + return (ts == null) ? context.runtime.getNil() : RubyTimestamp.newRubyTimestamp(context.runtime, ts); + } catch (IllegalArgumentException e) { + throw new RaiseException( + context.runtime, + context.runtime.getModule("LogStash").getClass("TimestampParserError"), + "invalid timestamp format " + e.getMessage(), + true + ); + + } + } + + @JRubyMethod(name = "parse_iso8601", required = 1, meta = true) + public static IRubyObject ruby_parse_iso8601(ThreadContext context, IRubyObject recv, IRubyObject time) + { + if (time instanceof RubyString) { + try { + return RubyTimestamp.newRubyTimestamp(context.runtime, newTimetsamp(time)); + } catch (IllegalArgumentException e) { + throw new RaiseException( + context.runtime, + context.runtime.getModule("LogStash").getClass("TimestampParserError"), + "invalid timestamp format " + e.getMessage(), + true + ); + + } + } else { + throw context.runtime.newTypeError("wrong argument type " + time.getMetaClass() + " (expected String)"); + } + } + + @JRubyMethod(name = "at", required = 1, optional = 1, meta = true) + public static IRubyObject ruby_at(ThreadContext context, IRubyObject recv, IRubyObject[] args) + { + RubyTime t; + if (args.length == 1) { + t = (RubyTime)RubyTime.at(context, context.runtime.getTime(), args[0]); + } else { + t = (RubyTime)RubyTime.at(context, context.runtime.getTime(), args[0], args[1]); + } + return RubyTimestamp.newRubyTimestamp(context.runtime, new Timestamp(t.getDateTime())); + } + + @JRubyMethod(name = "now", meta = true) + public static IRubyObject ruby_now(ThreadContext context, IRubyObject recv) + { + return RubyTimestamp.newRubyTimestamp(context.runtime); + } + } +} diff --git a/logstash-core-event-java/src/test/java/com/logstash/AccessorsTest.java b/logstash-core-event-java/src/test/java/com/logstash/AccessorsTest.java new file mode 100644 index 00000000000..61855abc34b --- /dev/null +++ b/logstash-core-event-java/src/test/java/com/logstash/AccessorsTest.java @@ -0,0 +1,185 @@ +package com.logstash; + +import org.junit.Test; +import static org.junit.Assert.*; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class AccessorsTest { + + public class TestableAccessors extends Accessors { + + public TestableAccessors(Map data) { + super(data); + } + + public Map getLut() { + return lut; + } + + public Object lutGet(String reference) { + return this.lut.get(reference); + } + } + + @Test + public void testBareGet() throws Exception { + Map data = new HashMap(); + data.put("foo", "bar"); + String reference = "foo"; + + TestableAccessors accessors = new TestableAccessors(data); + assertEquals(accessors.lutGet(reference), null); + assertEquals(accessors.get(reference), "bar"); + assertEquals(accessors.lutGet(reference), data); + } + + @Test + public void testAbsentBareGet() throws Exception { + Map data = new HashMap(); + data.put("foo", "bar"); + String reference = "baz"; + + TestableAccessors accessors = new TestableAccessors(data); + assertEquals(accessors.lutGet(reference), null); + assertEquals(accessors.get(reference), null); + assertEquals(accessors.lutGet(reference), data); + } + + @Test + public void testBareBracketsGet() throws Exception { + Map data = new HashMap(); + data.put("foo", "bar"); + String reference = "[foo]"; + + TestableAccessors accessors = new TestableAccessors(data); + assertEquals(accessors.lutGet(reference), null); + assertEquals(accessors.get(reference), "bar"); + assertEquals(accessors.lutGet(reference), data); + } + + @Test + public void testDeepMapGet() throws Exception { + Map data = new HashMap(); + Map inner = new HashMap(); + data.put("foo", inner); + inner.put("bar", "baz"); + + String reference = "[foo][bar]"; + + TestableAccessors accessors = new TestableAccessors(data); + assertEquals(accessors.lutGet(reference), null); + assertEquals(accessors.get(reference), "baz"); + assertEquals(accessors.lutGet(reference), inner); + } + + @Test + public void testAbsentDeepMapGet() throws Exception { + Map data = new HashMap(); + Map inner = new HashMap(); + data.put("foo", inner); + inner.put("bar", "baz"); + + String reference = "[foo][foo]"; + + TestableAccessors accessors = new TestableAccessors(data); + assertEquals(accessors.lutGet(reference), null); + assertEquals(accessors.get(reference), null); + assertEquals(accessors.lutGet(reference), inner); + } + + @Test + public void testDeepListGet() throws Exception { + Map data = new HashMap(); + List inner = new ArrayList(); + data.put("foo", inner); + inner.add("bar"); + + String reference = "[foo][0]"; + + TestableAccessors accessors = new TestableAccessors(data); + assertEquals(accessors.lutGet(reference), null); + assertEquals(accessors.get(reference), "bar"); + assertEquals(accessors.lutGet(reference), inner); + } + + @Test + public void testAbsentDeepListGet() throws Exception { + Map data = new HashMap(); + List inner = new ArrayList(); + data.put("foo", inner); + inner.add("bar"); + + String reference = "[foo][1]"; + + TestableAccessors accessors = new TestableAccessors(data); + assertEquals(accessors.lutGet(reference), null); + assertEquals(accessors.get(reference), null); + assertEquals(accessors.lutGet(reference), inner); + } + + @Test + public void testBarePut() throws Exception { + Map data = new HashMap(); + String reference = "foo"; + + TestableAccessors accessors = new TestableAccessors(data); + assertEquals(accessors.lutGet(reference), null); + assertEquals(accessors.set(reference, "bar"), "bar"); + assertEquals(accessors.lutGet(reference), data); + assertEquals(accessors.get(reference), "bar"); + } + + @Test + public void testBareBracketsPut() throws Exception { + Map data = new HashMap(); + String reference = "[foo]"; + + TestableAccessors accessors = new TestableAccessors(data); + assertEquals(accessors.lutGet(reference), null); + assertEquals(accessors.set(reference, "bar"), "bar"); + assertEquals(accessors.lutGet(reference), data); + assertEquals(accessors.get(reference), "bar"); + } + + @Test + public void testDeepMapSet() throws Exception { + Map data = new HashMap(); + + String reference = "[foo][bar]"; + + TestableAccessors accessors = new TestableAccessors(data); + assertEquals(accessors.lutGet(reference), null); + assertEquals(accessors.set(reference, "baz"), "baz"); + assertEquals(accessors.lutGet(reference), data.get("foo")); + assertEquals(accessors.get(reference), "baz"); + } + + @Test + public void testDel() throws Exception { + Map data = new HashMap(); + List inner = new ArrayList(); + data.put("foo", inner); + inner.add("bar"); + data.put("bar", "baz"); + TestableAccessors accessors = new TestableAccessors(data); + + assertEquals(accessors.del("[foo][0]"), "bar"); + assertEquals(accessors.del("[foo][0]"), null); + assertEquals(accessors.get("[foo]"), new ArrayList<>()); + assertEquals(accessors.del("[bar]"), "baz"); + assertEquals(accessors.get("[bar]"), null); + } + + @Test + public void testNilInclude() throws Exception { + Map data = new HashMap(); + data.put("nilfield", null); + TestableAccessors accessors = new TestableAccessors(data); + + assertEquals(accessors.includes("nilfield"), true); + } +} diff --git a/logstash-core-event-java/src/test/java/com/logstash/EventTest.java b/logstash-core-event-java/src/test/java/com/logstash/EventTest.java new file mode 100644 index 00000000000..46ad8292817 --- /dev/null +++ b/logstash-core-event-java/src/test/java/com/logstash/EventTest.java @@ -0,0 +1,124 @@ +package com.logstash; + +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import org.jruby.RubyHash; +import org.jruby.ir.operands.Hash; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.junit.Assert.*; + +public class EventTest { + + @Test + public void testBareToJson() throws Exception { + Event e = new Event(); + assertEquals("{\"@timestamp\":\"" + e.getTimestamp().toIso8601() + "\",\"@version\":\"1\"}", e.toJson()); + } + + @Test + public void testSimpleStringFieldToJson() throws Exception { + Map data = new HashMap(); + data.put("foo", "bar"); + Event e = new Event(data); + assertEquals("{\"@timestamp\":\"" + e.getTimestamp().toIso8601() + "\",\"foo\":\"bar\",\"@version\":\"1\"}", e.toJson()); + } + + @Test + public void testSimpleIntegerFieldToJson() throws Exception { + Map data = new HashMap(); + data.put("foo", 1); + Event e = new Event(data); + assertEquals("{\"@timestamp\":\"" + e.getTimestamp().toIso8601() + "\",\"foo\":1,\"@version\":\"1\"}", e.toJson()); + } + + @Test + public void testSimpleDecimalFieldToJson() throws Exception { + Map data = new HashMap(); + data.put("foo", 1.0); + Event e = new Event(data); + assertEquals("{\"@timestamp\":\"" + e.getTimestamp().toIso8601() + "\",\"foo\":1.0,\"@version\":\"1\"}", e.toJson()); + } + + @Test + public void testSimpleMultipleFieldToJson() throws Exception { + Map data = new HashMap(); + data.put("foo", 1.0); + data.put("bar", "bar"); + data.put("baz", 1); + Event e = new Event(data); + assertEquals("{\"bar\":\"bar\",\"@timestamp\":\"" + e.getTimestamp().toIso8601() + "\",\"foo\":1.0,\"@version\":\"1\",\"baz\":1}", e.toJson()); + } + + @Test + public void testDeepMapFieldToJson() throws Exception { + Event e = new Event(); + e.setField("[foo][bar][baz]", 1); + assertEquals("{\"@timestamp\":\"" + e.getTimestamp().toIso8601() + "\",\"foo\":{\"bar\":{\"baz\":1}},\"@version\":\"1\"}", e.toJson()); + + e = new Event(); + e.setField("[foo][0][baz]", 1); + assertEquals("{\"@timestamp\":\"" + e.getTimestamp().toIso8601() + "\",\"foo\":{\"0\":{\"baz\":1}},\"@version\":\"1\"}", e.toJson()); + } + + @Test + public void testGetFieldList() throws Exception { + Map data = new HashMap(); + List l = new ArrayList(); + data.put("foo", l); + l.add(1); + Event e = new Event(data); + assertEquals(1, e.getField("[foo][0]")); + } + + @Test + public void testDeepGetField() throws Exception { + Map data = new HashMap(); + List l = new ArrayList(); + data.put("foo", l); + Map m = new HashMap(); + m.put("bar", "baz"); + l.add(m); + Event e = new Event(data); + assertEquals("baz", e.getField("[foo][0][bar]")); + } + + + @Test + public void testClone() throws Exception { + Map data = new HashMap(); + List l = new ArrayList(); + data.put("array", l); + + Map m = new HashMap(); + m.put("foo", "bar"); + l.add(m); + + data.put("foo", 1.0); + data.put("bar", "bar"); + data.put("baz", 1); + + Event e = new Event(data); + + Event f = e.clone(); + assertEquals("{\"bar\":\"bar\",\"@timestamp\":\"" + e.getTimestamp().toIso8601() + "\",\"array\":[{\"foo\":\"bar\"}],\"foo\":1.0,\"@version\":\"1\",\"baz\":1}", f.toJson()); + assertEquals(f.toJson(), e.toJson()); + } + + @Test + public void testAppend() throws Exception { + Map data1 = Maps.newHashMap(ImmutableMap.of("field1", Lists.newArrayList("original1", "original2"))); + Map data2 = Maps.newHashMap(ImmutableMap.of("field1", "original1")); + Event e = new Event(data1); + Event e2 = new Event(data2); + e.append(e2); + + assertEquals(Lists.newArrayList("original1", "original2"), e.getField("field1")); + } +} \ No newline at end of file diff --git a/logstash-core-event-java/src/test/java/com/logstash/FieldReferenceTest.java b/logstash-core-event-java/src/test/java/com/logstash/FieldReferenceTest.java new file mode 100644 index 00000000000..ad17810a72c --- /dev/null +++ b/logstash-core-event-java/src/test/java/com/logstash/FieldReferenceTest.java @@ -0,0 +1,40 @@ +package com.logstash; + +import org.junit.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import static org.junit.Assert.*; + +public class FieldReferenceTest { + + @Test + public void testParseSingleBareField() throws Exception { + FieldReference f = FieldReference.parse("foo"); + assertTrue(f.getPath().isEmpty()); + assertEquals(f.getKey(), "foo"); + } + + @Test + public void testParseSingleFieldPath() throws Exception { + FieldReference f = FieldReference.parse("[foo]"); + assertTrue(f.getPath().isEmpty()); + assertEquals(f.getKey(), "foo"); + } + + @Test + public void testParse2FieldsPath() throws Exception { + FieldReference f = FieldReference.parse("[foo][bar]"); + assertEquals(f.getPath().toArray(), new String[]{"foo"}); + assertEquals(f.getKey(), "bar"); + } + + @Test + public void testParse3FieldsPath() throws Exception { + FieldReference f = FieldReference.parse("[foo][bar]]baz]"); + assertEquals(f.getPath().toArray(), new String[]{"foo", "bar"}); + assertEquals(f.getKey(), "baz"); + } +} \ No newline at end of file diff --git a/logstash-core-event-java/src/test/java/com/logstash/StringInterpolationTest.java b/logstash-core-event-java/src/test/java/com/logstash/StringInterpolationTest.java new file mode 100644 index 00000000000..52d4563db4b --- /dev/null +++ b/logstash-core-event-java/src/test/java/com/logstash/StringInterpolationTest.java @@ -0,0 +1,143 @@ +package com.logstash; + + +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.junit.Test; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; + +import static org.junit.Assert.*; + + +public class StringInterpolationTest { + @Test + public void testCompletelyStaticTemplate() throws IOException { + Event event = getTestEvent(); + String path = "/full/path/awesome"; + StringInterpolation si = StringInterpolation.getInstance(); + + assertEquals(path, si.evaluate(event, path)); + } + + @Test + public void testOneLevelField() throws IOException { + Event event = getTestEvent(); + String path = "/full/%{bar}/awesome"; + StringInterpolation si = StringInterpolation.getInstance(); + + assertEquals("/full/foo/awesome", si.evaluate(event, path)); + } + + @Test + public void testMultipleLevelField() throws IOException { + Event event = getTestEvent(); + String path = "/full/%{bar}/%{awesome}"; + StringInterpolation si = StringInterpolation.getInstance(); + + assertEquals("/full/foo/logstash", si.evaluate(event, path)); + } + + @Test + public void testMissingKey() throws IOException { + Event event = getTestEvent(); + String path = "/full/%{do-not-exist}"; + StringInterpolation si = StringInterpolation.getInstance(); + + assertEquals("/full/%{do-not-exist}", si.evaluate(event, path)); + } + + @Test + public void testDateFormater() throws IOException { + Event event = getTestEvent(); + String path = "/full/%{+YYYY}"; + StringInterpolation si = StringInterpolation.getInstance(); + + assertEquals("/full/2015", si.evaluate(event, path)); + } + + @Test + public void TestMixDateAndFields() throws IOException { + Event event = getTestEvent(); + String path = "/full/%{+YYYY}/weeee/%{bar}"; + StringInterpolation si = StringInterpolation.getInstance(); + + assertEquals("/full/2015/weeee/foo", si.evaluate(event, path)); + } + + @Test + public void testUnclosedTag() throws IOException { + Event event = getTestEvent(); + String path = "/full/%{+YYY/web"; + StringInterpolation si = StringInterpolation.getInstance(); + + assertEquals("/full/%{+YYY/web", si.evaluate(event, path)); + } + + @Test + public void TestStringIsOneDateTag() throws IOException { + Event event = getTestEvent(); + String path = "%{+YYYY}"; + StringInterpolation si = StringInterpolation.getInstance(); + assertEquals("2015", si.evaluate(event, path)); + } + + @Test + public void TestFieldRef() throws IOException { + Event event = getTestEvent(); + String path = "%{[j][k1]}"; + StringInterpolation si = StringInterpolation.getInstance(); + assertEquals("v", si.evaluate(event, path)); + } + + @Test + public void TestEpoch() throws IOException { + Event event = getTestEvent(); + String path = "%{+%s}"; + StringInterpolation si = StringInterpolation.getInstance(); + assertEquals("1443657600", si.evaluate(event, path)); + } + + @Test + public void TestValueIsArray() throws IOException { + ArrayList l = new ArrayList(); + l.add("Hello"); + l.add("world"); + + Event event = getTestEvent(); + event.setField("message", l); + + String path = "%{message}"; + StringInterpolation si = StringInterpolation.getInstance(); + assertEquals("Hello,world", si.evaluate(event, path)); + } + + @Test + public void TestValueIsHash() throws IOException { + Event event = getTestEvent(); + + String path = "%{j}"; + StringInterpolation si = StringInterpolation.getInstance(); + assertEquals("{\"k1\":\"v\"}", si.evaluate(event, path)); + } + + public Event getTestEvent() { + Map data = new HashMap(); + Map inner = new HashMap(); + + inner.put("k1", "v"); + + data.put("bar", "foo"); + data.put("awesome", "logstash"); + data.put("j", inner); + data.put("@timestamp", new DateTime(2015, 10, 1, 0, 0, 0, DateTimeZone.UTC)); + + + Event event = new Event(data); + + return event; + } +} diff --git a/logstash-core-event-java/src/test/java/com/logstash/TimestampTest.java b/logstash-core-event-java/src/test/java/com/logstash/TimestampTest.java new file mode 100644 index 00000000000..539fbe227cb --- /dev/null +++ b/logstash-core-event-java/src/test/java/com/logstash/TimestampTest.java @@ -0,0 +1,46 @@ +package com.logstash; + +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.junit.Test; +import static org.junit.Assert.*; + +public class TimestampTest { + + + @Test + public void testCircularIso8601() throws Exception { + Timestamp t1 = new Timestamp(); + Timestamp t2 = new Timestamp(t1.toIso8601()); + assertEquals(t1.getTime(), t2.getTime()); + } + + @Test + public void testToIso8601() throws Exception { + Timestamp t = new Timestamp("2014-09-23T00:00:00-0800"); + assertEquals("2014-09-23T08:00:00.000Z", t.toIso8601()); + } + + // Timestamp should always be in a UTC representation + @Test + public void testUTC() throws Exception { + Timestamp t; + + t = new Timestamp(); + assertEquals(DateTimeZone.UTC, t.getTime().getZone()); + + t = new Timestamp("2014-09-23T00:00:00-0800"); + assertEquals(DateTimeZone.UTC, t.getTime().getZone()); + + t = new Timestamp("2014-09-23T08:00:00.000Z"); + assertEquals(DateTimeZone.UTC, t.getTime().getZone()); + + t = new Timestamp(new Timestamp()); + assertEquals(DateTimeZone.UTC, t.getTime().getZone()); + + long ms = DateTime.now(DateTimeZone.forID("EST")).getMillis(); + t = new Timestamp(ms); + assertEquals(DateTimeZone.UTC, t.getTime().getZone()); + } + +} \ No newline at end of file diff --git a/logstash-core-event/lib/logstash-core-event.rb b/logstash-core-event/lib/logstash-core-event.rb new file mode 100644 index 00000000000..b2979326dac --- /dev/null +++ b/logstash-core-event/lib/logstash-core-event.rb @@ -0,0 +1 @@ +require "logstash-core-event/logstash-core-event" \ No newline at end of file diff --git a/logstash-core-event/lib/logstash-core-event/logstash-core-event.rb b/logstash-core-event/lib/logstash-core-event/logstash-core-event.rb new file mode 100644 index 00000000000..b0f773e203c --- /dev/null +++ b/logstash-core-event/lib/logstash-core-event/logstash-core-event.rb @@ -0,0 +1,5 @@ +# encoding: utf-8 +module LogStash +end + +require "logstash/event" \ No newline at end of file diff --git a/logstash-core-event/lib/logstash-core-event/version.rb b/logstash-core-event/lib/logstash-core-event/version.rb new file mode 100644 index 00000000000..18e991d6b0c --- /dev/null +++ b/logstash-core-event/lib/logstash-core-event/version.rb @@ -0,0 +1,8 @@ +# encoding: utf-8 + +# The version of logstash core event gem. +# +# Note to authors: this should not include dashes because 'gem' barfs if +# you include a dash in the version string. + +LOGSTASH_CORE_EVENT_VERSION = "3.0.0.dev" diff --git a/lib/logstash/event.rb b/logstash-core-event/lib/logstash/event.rb similarity index 99% rename from lib/logstash/event.rb rename to logstash-core-event/lib/logstash/event.rb index c00d5531305..70eed147392 100644 --- a/lib/logstash/event.rb +++ b/logstash-core-event/lib/logstash/event.rb @@ -106,7 +106,7 @@ def clone public def to_s - self.sprintf("#{timestamp.to_iso8601} %{host} %{message}") + "#{timestamp.to_iso8601} #{self.sprintf("%{host} %{message}")}" end # def to_s public diff --git a/lib/logstash/string_interpolation.rb b/logstash-core-event/lib/logstash/string_interpolation.rb similarity index 100% rename from lib/logstash/string_interpolation.rb rename to logstash-core-event/lib/logstash/string_interpolation.rb diff --git a/lib/logstash/timestamp.rb b/logstash-core-event/lib/logstash/timestamp.rb similarity index 100% rename from lib/logstash/timestamp.rb rename to logstash-core-event/lib/logstash/timestamp.rb diff --git a/lib/logstash/util/accessors.rb b/logstash-core-event/lib/logstash/util/accessors.rb similarity index 100% rename from lib/logstash/util/accessors.rb rename to logstash-core-event/lib/logstash/util/accessors.rb diff --git a/logstash-core-event/logstash-core-event.gemspec b/logstash-core-event/logstash-core-event.gemspec new file mode 100644 index 00000000000..5fcddccfdf0 --- /dev/null +++ b/logstash-core-event/logstash-core-event.gemspec @@ -0,0 +1,23 @@ +# -*- encoding: utf-8 -*- +lib = File.expand_path('../lib', __FILE__) +$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) +require 'logstash-core-event/version' + +Gem::Specification.new do |gem| + gem.authors = ["Jordan Sissel", "Pete Fritchman", "Elasticsearch"] + gem.email = ["jls@semicomplete.com", "petef@databits.net", "info@elasticsearch.com"] + gem.description = %q{The core event component of logstash, the scalable log and event management tool} + gem.summary = %q{logstash-core-event - The core event component of logstash} + gem.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html" + gem.license = "Apache License (2.0)" + + gem.files = Dir.glob(["logstash-core-event.gemspec", "lib/**/*.rb", "spec/**/*.rb"]) + gem.test_files = gem.files.grep(%r{^(test|spec|features)/}) + gem.name = "logstash-core-event" + gem.require_paths = ["lib"] + gem.version = LOGSTASH_CORE_EVENT_VERSION + + if RUBY_PLATFORM == 'java' + gem.platform = RUBY_PLATFORM + end +end diff --git a/spec/core/event_spec.rb b/logstash-core-event/spec/logstash/event_spec.rb similarity index 91% rename from spec/core/event_spec.rb rename to logstash-core-event/spec/logstash/event_spec.rb index 52b22c3115c..7e3376591ad 100644 --- a/spec/core/event_spec.rb +++ b/logstash-core-event/spec/logstash/event_spec.rb @@ -1,5 +1,6 @@ # encoding: utf-8 require "spec_helper" +require "json" describe LogStash::Event do @@ -317,44 +318,45 @@ it "should coerce timestamp" do t = Time.iso8601("2014-06-12T00:12:17.114Z") - expect(LogStash::Timestamp).to receive(:coerce).exactly(3).times.and_call_original expect(LogStash::Event.new("@timestamp" => t).timestamp.to_i).to eq(t.to_i) expect(LogStash::Event.new("@timestamp" => LogStash::Timestamp.new(t)).timestamp.to_i).to eq(t.to_i) expect(LogStash::Event.new("@timestamp" => "2014-06-12T00:12:17.114Z").timestamp.to_i).to eq(t.to_i) end it "should assign current time when no timestamp" do - ts = LogStash::Timestamp.now - expect(LogStash::Timestamp).to receive(:now).and_return(ts) - expect(LogStash::Event.new({}).timestamp.to_i).to eq(ts.to_i) + expect(LogStash::Event.new({}).timestamp.to_i).to be_within(1).of (Time.now.to_i) end - it "should tag and warn for invalid value" do - ts = LogStash::Timestamp.now - expect(LogStash::Timestamp).to receive(:now).twice.and_return(ts) - expect(LogStash::Event::LOGGER).to receive(:warn).twice - + it "should tag for invalid value" do event = LogStash::Event.new("@timestamp" => :foo) - expect(event.timestamp.to_i).to eq(ts.to_i) + expect(event.timestamp.to_i).to be_within(1).of Time.now.to_i expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG]) expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq(:foo) event = LogStash::Event.new("@timestamp" => 666) - expect(event.timestamp.to_i).to eq(ts.to_i) + expect(event.timestamp.to_i).to be_within(1).of Time.now.to_i expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG]) expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq(666) end - it "should tag and warn for invalid string format" do - ts = LogStash::Timestamp.now - expect(LogStash::Timestamp).to receive(:now).and_return(ts) - expect(LogStash::Event::LOGGER).to receive(:warn) + it "should warn for invalid value" do + expect(LogStash::Event::LOGGER).to receive(:warn).twice + LogStash::Event.new("@timestamp" => :foo) + LogStash::Event.new("@timestamp" => 666) + end + + it "should tag for invalid string format" do event = LogStash::Event.new("@timestamp" => "foo") - expect(event.timestamp.to_i).to eq(ts.to_i) + expect(event.timestamp.to_i).to be_within(1).of Time.now.to_i expect(event["tags"]).to eq([LogStash::Event::TIMESTAMP_FAILURE_TAG]) expect(event[LogStash::Event::TIMESTAMP_FAILURE_FIELD]).to eq("foo") end + + it "should warn for invalid string format" do + expect(LogStash::Event::LOGGER).to receive(:warn) + LogStash::Event.new("@timestamp" => "foo") + end end context "to_json" do @@ -365,7 +367,7 @@ ) json = new_event.to_json - expect(json).to eq( "{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}") + expect(JSON.parse(json)).to eq( JSON.parse("{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}")) end it "should support to_json and ignore arguments" do @@ -375,7 +377,7 @@ ) json = new_event.to_json(:foo => 1, :bar => "baz") - expect(json).to eq( "{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}") + expect(JSON.parse(json)).to eq( JSON.parse("{\"@timestamp\":\"2014-09-23T19:26:15.832Z\",\"message\":\"foo bar\",\"@version\":\"1\"}")) end end @@ -496,4 +498,23 @@ subject{LogStash::Event.new(LogStash::Json.load(LogStash::Json.dump(event_hash)))} end end + + + describe "#to_s" do + let(:timestamp) { LogStash::Timestamp.new } + let(:event1) { LogStash::Event.new({ "@timestamp" => timestamp, "host" => "foo", "message" => "bar"}) } + let(:event2) { LogStash::Event.new({ "host" => "bar", "message" => "foo"}) } + + it "should cache only one template" do + LogStash::StringInterpolation::CACHE.clear + expect { + event1.to_s + event2.to_s + }.to change { LogStash::StringInterpolation::CACHE.size }.by(1) + end + + it "return the string containing the timestamp, the host and the message" do + expect(event1.to_s).to eq("#{timestamp.to_iso8601} #{event1["host"]} #{event1["message"]}") + end + end end diff --git a/spec/core/timestamp_spec.rb b/logstash-core-event/spec/logstash/timestamp_spec.rb similarity index 100% rename from spec/core/timestamp_spec.rb rename to logstash-core-event/spec/logstash/timestamp_spec.rb diff --git a/spec/util/accessors_spec.rb b/logstash-core-event/spec/logstash/util/accessors_spec.rb similarity index 93% rename from spec/util/accessors_spec.rb rename to logstash-core-event/spec/logstash/util/accessors_spec.rb index af719a32999..e3c1a73e60e 100644 --- a/spec/util/accessors_spec.rb +++ b/logstash-core-event/spec/logstash/util/accessors_spec.rb @@ -1,8 +1,17 @@ # encoding: utf-8 require "spec_helper" -require "logstash/util/accessors" -describe LogStash::Util::Accessors, :if => true do +# this is to skip specs when running agains an alternate logstash-core-event implementation +# that does not define the Accessors class. For example, in logstash-core-event-java +# the Accessors class does not exists in the Ruby namespace. +class_exists = begin + require "logstash/util/accessors" + true +rescue LoadError + false +end + +describe "LogStash::Util::Accessors", :if => class_exists do context "using simple field" do diff --git a/logstash-core/lib/logstash-core.rb b/logstash-core/lib/logstash-core.rb new file mode 100644 index 00000000000..c2e4557afa8 --- /dev/null +++ b/logstash-core/lib/logstash-core.rb @@ -0,0 +1 @@ +require "logstash-core/logstash-core" diff --git a/lib/logstash-core.rb b/logstash-core/lib/logstash-core/logstash-core.rb similarity index 100% rename from lib/logstash-core.rb rename to logstash-core/lib/logstash-core/logstash-core.rb diff --git a/lib/logstash/version.rb b/logstash-core/lib/logstash-core/version.rb similarity index 64% rename from lib/logstash/version.rb rename to logstash-core/lib/logstash-core/version.rb index 17a5cd8c15b..fdc9d13f1a4 100644 --- a/lib/logstash/version.rb +++ b/logstash-core/lib/logstash-core/version.rb @@ -1,6 +1,8 @@ # encoding: utf-8 -# The version of logstash. -LOGSTASH_VERSION = "3.0.0.dev" +# The version of logstash core gem. +# # Note to authors: this should not include dashes because 'gem' barfs if # you include a dash in the version string. + +LOGSTASH_CORE_VERSION = "3.0.0.dev" diff --git a/lib/logstash/agent.rb b/logstash-core/lib/logstash/agent.rb similarity index 90% rename from lib/logstash/agent.rb rename to logstash-core/lib/logstash/agent.rb index bb6734f8ad1..c30a5452a52 100644 --- a/lib/logstash/agent.rb +++ b/logstash-core/lib/logstash/agent.rb @@ -20,10 +20,10 @@ class LogStash::Agent < Clamp::Command :default_input => DEFAULT_INPUT, :default_output => DEFAULT_OUTPUT), :default => "", :attribute_name => :config_string - option ["-w", "--filterworkers"], "COUNT", - I18n.t("logstash.agent.flag.filterworkers"), - :attribute_name => :filter_workers, - :default => LogStash::Config::CpuCoreStrategy.fifty_percent, &:to_i + option ["-w", "--pipelineworkers"], "COUNT", + I18n.t("logstash.agent.flag.pipelineworkers"), + :attribute_name => :pipeline_workers, + :default => 0, &:to_i option ["-l", "--log"], "FILE", I18n.t("logstash.agent.flag.log"), @@ -50,6 +50,11 @@ class LogStash::Agent < Clamp::Command I18n.t("logstash.agent.flag.configtest"), :attribute_name => :config_test + option "--[no-]allow-unsafe-shutdown", :flag, + I18n.t("logstash.agent.flag.unsafe_shutdown"), + :attribute_name => :unsafe_shutdown, + :default => false + # Emit a warning message. def warn(message) # For now, all warnings are fatal. @@ -75,6 +80,9 @@ def execute require "logstash/plugin" @logger = Cabin::Channel.get(LogStash) + LogStash::ShutdownController.unsafe_shutdown = unsafe_shutdown? + LogStash::ShutdownController.logger = @logger + if version? show_version return 0 @@ -143,7 +151,7 @@ def execute configure_logging(log_file) end - pipeline.configure("filter-workers", filter_workers) + pipeline.configure("pipeline-workers", pipeline_workers) if pipeline_workers > 0 # Stop now if we are only asking for a config test. if config_test? @@ -176,8 +184,7 @@ def execute def shutdown(pipeline) pipeline.shutdown do - InflightEventsReporter.logger = @logger - InflightEventsReporter.start(pipeline.input_to_filter, pipeline.filter_to_output, pipeline.outputs) + ::LogStash::ShutdownController.start(pipeline) end end @@ -312,19 +319,27 @@ def local_config(path) Dir.glob(path).sort.each do |file| next unless File.file?(file) if file.match(/~$/) - @logger.debug("NOT reading config file because it is a temp file", :file => file) + @logger.debug("NOT reading config file because it is a temp file", :config_file => file) next end - @logger.debug("Reading config file", :file => file) + @logger.debug("Reading config file", :config_file => file) cfg = File.read(file) if !cfg.ascii_only? && !cfg.valid_encoding? encoding_issue_files << file end config << cfg + "\n" + if config_test? + @logger.debug? && @logger.debug("\nThe following is the content of a file", :config_file => file.to_s) + @logger.debug? && @logger.debug("\n" + cfg + "\n\n") + end end if (encoding_issue_files.any?) fail("The following config files contains non-ascii characters but are not UTF-8 encoded #{encoding_issue_files}") end + if config_test? + @logger.debug? && @logger.debug("\nThe following is the merged configuration") + @logger.debug? && @logger.debug("\n" + config + "\n\n") + end return config end # def load_config diff --git a/lib/logstash/certs/cacert.pem b/logstash-core/lib/logstash/certs/cacert.pem similarity index 100% rename from lib/logstash/certs/cacert.pem rename to logstash-core/lib/logstash/certs/cacert.pem diff --git a/lib/logstash/codecs/base.rb b/logstash-core/lib/logstash/codecs/base.rb similarity index 100% rename from lib/logstash/codecs/base.rb rename to logstash-core/lib/logstash/codecs/base.rb diff --git a/lib/logstash/config/config_ast.rb b/logstash-core/lib/logstash/config/config_ast.rb similarity index 93% rename from lib/logstash/config/config_ast.rb rename to logstash-core/lib/logstash/config/config_ast.rb index ace7322fedb..503e8d4760a 100644 --- a/lib/logstash/config/config_ast.rb +++ b/logstash-core/lib/logstash/config/config_ast.rb @@ -108,6 +108,7 @@ def compile # defines @filter_func and @output_func definitions << "def #{type}_func(event)" + definitions << " targeted_outputs = []" if type == "output" definitions << " events = [event]" if type == "filter" definitions << " @logger.debug? && @logger.debug(\"#{type} received\", :event => event.to_hash)" @@ -116,6 +117,7 @@ def compile end definitions << " events" if type == "filter" + definitions << " targeted_outputs" if type == "output" definitions << "end" end @@ -142,7 +144,7 @@ def compile_initializer code << <<-CODE - #{name} = #{plugin.compile_initializer} +#{name} = #{plugin.compile_initializer} @#{plugin.plugin_type}s << #{name} CODE @@ -150,7 +152,7 @@ def compile_initializer if plugin.plugin_type == "filter" code << <<-CODE - #{name}_flush = lambda do |options, &block| +#{name}_flush = lambda do |options, &block| @logger.debug? && @logger.debug(\"Flushing\", :plugin => #{name}) events = #{name}.flush(options) @@ -230,18 +232,18 @@ def compile_initializer def compile case plugin_type - when "input" - return "start_input(#{variable_name})" - when "filter" - return <<-CODE + when "input" + return "start_input(#{variable_name})" + when "filter" + return <<-CODE events = #{variable_name}.multi_filter(events) - CODE - when "output" - return "#{variable_name}.handle(event)\n" - when "codec" - settings = attributes.recursive_select(Attribute).collect(&:compile).reject(&:empty?) - attributes_code = "LogStash::Util.hash_merge_many(#{settings.map { |c| "{ #{c} }" }.join(", ")})" - return "plugin(#{plugin_type.inspect}, #{plugin_name.inspect}, #{attributes_code})" + CODE + when "output" + return "targeted_outputs << #{variable_name}\n" + when "codec" + settings = attributes.recursive_select(Attribute).collect(&:compile).reject(&:empty?) + attributes_code = "LogStash::Util.hash_merge_many(#{settings.map { |c| "{ #{c} }" }.join(", ")})" + return "plugin(#{plugin_type.inspect}, #{plugin_name.inspect}, #{attributes_code})" end end @@ -345,7 +347,7 @@ def validate! :column => input.column_of(interval.first), :byte => interval.first + 1, :after => input[0..interval.first] - ) + ) ) end end @@ -402,9 +404,9 @@ def cond_func_#{i}(input_events) <<-CODE events = cond_func_#{i}(events) CODE - else + else # Output <<-CODE - #{super} +#{super} end CODE end @@ -525,7 +527,7 @@ def _inspect(indent="") tv = "...#{tv[-20..-1]}" if tv.size > 20 indent + - self.class.to_s.sub(/.*:/,'') + + self.class.to_s.sub(/.*:/,'') + em.map{|m| "+"+m.to_s.sub(/.*:/,'')}*"" + " offset=#{interval.first}" + ", #{tv.inspect}" + @@ -533,13 +535,13 @@ def _inspect(indent="") (elements && elements.size > 0 ? ":" + (elements.select { |e| !e.is_a?(LogStash::Config::AST::Whitespace) && e.elements && e.elements.size > 0 }||[]).map{|e| - begin - "\n"+e.inspect(indent+" ") - rescue # Defend against inspect not taking a parameter - "\n"+indent+" "+e.inspect - end + begin + "\n"+e.inspect(indent+" ") + rescue # Defend against inspect not taking a parameter + "\n"+indent+" "+e.inspect + end }.join("") : "" ) end -end +end \ No newline at end of file diff --git a/lib/logstash/config/cpu_core_strategy.rb b/logstash-core/lib/logstash/config/cpu_core_strategy.rb similarity index 100% rename from lib/logstash/config/cpu_core_strategy.rb rename to logstash-core/lib/logstash/config/cpu_core_strategy.rb diff --git a/lib/logstash/config/defaults.rb b/logstash-core/lib/logstash/config/defaults.rb similarity index 100% rename from lib/logstash/config/defaults.rb rename to logstash-core/lib/logstash/config/defaults.rb diff --git a/lib/logstash/config/file.rb b/logstash-core/lib/logstash/config/file.rb similarity index 100% rename from lib/logstash/config/file.rb rename to logstash-core/lib/logstash/config/file.rb diff --git a/lib/logstash/config/grammar.rb b/logstash-core/lib/logstash/config/grammar.rb similarity index 100% rename from lib/logstash/config/grammar.rb rename to logstash-core/lib/logstash/config/grammar.rb diff --git a/lib/logstash/config/grammar.treetop b/logstash-core/lib/logstash/config/grammar.treetop similarity index 100% rename from lib/logstash/config/grammar.treetop rename to logstash-core/lib/logstash/config/grammar.treetop diff --git a/lib/logstash/config/mixin.rb b/logstash-core/lib/logstash/config/mixin.rb similarity index 100% rename from lib/logstash/config/mixin.rb rename to logstash-core/lib/logstash/config/mixin.rb diff --git a/lib/logstash/config/registry.rb b/logstash-core/lib/logstash/config/registry.rb similarity index 100% rename from lib/logstash/config/registry.rb rename to logstash-core/lib/logstash/config/registry.rb diff --git a/lib/logstash/environment.rb b/logstash-core/lib/logstash/environment.rb similarity index 81% rename from lib/logstash/environment.rb rename to logstash-core/lib/logstash/environment.rb index 8f710eed088..79e7f24d86c 100644 --- a/lib/logstash/environment.rb +++ b/logstash-core/lib/logstash/environment.rb @@ -1,18 +1,10 @@ # encoding: utf-8 require "logstash/errors" -require "logstash/version" module LogStash module Environment extend self - # rehydrate the bootstrap environment if the startup was not done by executing bootstrap.rb - # and we are in the context of the logstash package - if !LogStash::Environment.const_defined?("LOGSTASH_HOME") && !ENV["LOGSTASH_HOME"].to_s.empty? - $LOAD_PATH << ::File.join(ENV["LOGSTASH_HOME"], "lib") - require "bootstrap/environment" - end - LOGSTASH_CORE = ::File.expand_path(::File.join(::File.dirname(__FILE__), "..", "..")) LOGSTASH_ENV = (ENV["LS_ENV"] || 'production').to_s.freeze @@ -81,14 +73,6 @@ def windows? ::Gem.win_platform? end - def vendor_path(path) - return ::File.join(LOGSTASH_HOME, "vendor", path) - end - - def pattern_path(path) - return ::File.join(LOGSTASH_HOME, "patterns", path) - end - def locales_path(path) return ::File.join(LOGSTASH_CORE, "locales", path) end diff --git a/lib/logstash/errors.rb b/logstash-core/lib/logstash/errors.rb similarity index 100% rename from lib/logstash/errors.rb rename to logstash-core/lib/logstash/errors.rb diff --git a/lib/logstash/filters/base.rb b/logstash-core/lib/logstash/filters/base.rb similarity index 93% rename from lib/logstash/filters/base.rb rename to logstash-core/lib/logstash/filters/base.rb index 4ce752a0e33..d2813e5f9c0 100644 --- a/lib/logstash/filters/base.rb +++ b/logstash-core/lib/logstash/filters/base.rb @@ -143,6 +143,7 @@ def filter(event) # @return [Array tag) - event["tags"].delete(tag) + @logger.debug? and @logger.debug("filters/#{self.class.name}: removing tag", :tag => tag) + tags.delete(tag) + event["tags"] = tags end end # def filter_matched diff --git a/lib/logstash/inputs/base.rb b/logstash-core/lib/logstash/inputs/base.rb similarity index 100% rename from lib/logstash/inputs/base.rb rename to logstash-core/lib/logstash/inputs/base.rb diff --git a/lib/logstash/inputs/threadable.rb b/logstash-core/lib/logstash/inputs/threadable.rb similarity index 100% rename from lib/logstash/inputs/threadable.rb rename to logstash-core/lib/logstash/inputs/threadable.rb diff --git a/lib/logstash/java_integration.rb b/logstash-core/lib/logstash/java_integration.rb similarity index 100% rename from lib/logstash/java_integration.rb rename to logstash-core/lib/logstash/java_integration.rb diff --git a/lib/logstash/json.rb b/logstash-core/lib/logstash/json.rb similarity index 100% rename from lib/logstash/json.rb rename to logstash-core/lib/logstash/json.rb diff --git a/lib/logstash/logging.rb b/logstash-core/lib/logstash/logging.rb similarity index 100% rename from lib/logstash/logging.rb rename to logstash-core/lib/logstash/logging.rb diff --git a/lib/logstash/namespace.rb b/logstash-core/lib/logstash/namespace.rb similarity index 100% rename from lib/logstash/namespace.rb rename to logstash-core/lib/logstash/namespace.rb diff --git a/lib/logstash/outputs/base.rb b/logstash-core/lib/logstash/outputs/base.rb similarity index 61% rename from lib/logstash/outputs/base.rb rename to logstash-core/lib/logstash/outputs/base.rb index d3c49899860..0a9af01084e 100644 --- a/lib/logstash/outputs/base.rb +++ b/logstash-core/lib/logstash/outputs/base.rb @@ -4,6 +4,8 @@ require "logstash/plugin" require "logstash/namespace" require "logstash/config/mixin" +require "logstash/util/wrapped_synchronous_queue" +require "concurrent/atomic/atomic_fixnum" class LogStash::Outputs::Base < LogStash::Plugin include LogStash::Config::Mixin @@ -23,7 +25,7 @@ class LogStash::Outputs::Base < LogStash::Plugin # Note that this setting may not be useful for all outputs. config :workers, :validate => :number, :default => 1 - attr_reader :worker_plugins, :worker_queue + attr_reader :worker_plugins, :available_workers, :workers, :single_worker_mutex, :is_multi_worker, :worker_plugins public def workers_not_supported(message=nil) @@ -40,6 +42,11 @@ def workers_not_supported(message=nil) def initialize(params={}) super config_init(params) + + # If we're running with a single thread we must enforce single-threaded concurrency by default + # Maybe in a future version we'll assume output plugins are threadsafe + @single_worker_mutex = Mutex.new + worker_setup end public @@ -54,32 +61,59 @@ def receive(event) public def worker_setup + # TODO: Remove this branch, delete this function if @workers == 1 + @is_multi_worker = false @worker_plugins = [self] else - define_singleton_method(:handle, method(:handle_worker)) - @worker_queue = SizedQueue.new(20) + @is_multi_worker = true + define_singleton_method(:multi_handle, method(:handle_worker)) + @worker_plugins = @workers.times.map { self.class.new(@original_params.merge("workers" => 1)) } - @worker_plugins.map.with_index do |plugin, i| - Thread.new(original_params, @worker_queue) do |params, queue| - LogStash::Util::set_thread_name(">#{self.class.config_name}.#{i}") - plugin.register - while true - event = queue.pop - plugin.handle(event) - end - end + + @available_workers = SizedQueue.new(@worker_plugins.length) + + @worker_plugins.each do |wp| + wp.register + @available_workers << wp end end end public + # Not to be overriden by plugin authors! def handle(event) - receive(event) + @single_worker_mutex.synchronize { receive(event) } end # def handle - def handle_worker(event) - @worker_queue.push(event) + # To be overriden in implementations + def multi_receive(events) + events.each {|event| + receive(event) + } + end + + # Not to be overriden by plugin authors! + def multi_handle(events) + @single_worker_mutex.synchronize { multi_receive(events) } + end + + def handle_worker(events) + worker = @available_workers.pop + begin + worker.multi_receive(events) + ensure + @available_workers.push(worker) + end + end + + def do_close + if @worker_plugins + @worker_plugins.each do |wp| + wp.do_close unless wp === self + end + end + super end private @@ -87,4 +121,4 @@ def output?(event) # TODO: noop for now, remove this once we delete this call from all plugins true end # def output? -end # class LogStash::Outputs::Base +end # class LogStash::Outputs::Base \ No newline at end of file diff --git a/lib/logstash/patches.rb b/logstash-core/lib/logstash/patches.rb similarity index 100% rename from lib/logstash/patches.rb rename to logstash-core/lib/logstash/patches.rb diff --git a/lib/logstash/patches/bugfix_jruby_2558.rb b/logstash-core/lib/logstash/patches/bugfix_jruby_2558.rb similarity index 100% rename from lib/logstash/patches/bugfix_jruby_2558.rb rename to logstash-core/lib/logstash/patches/bugfix_jruby_2558.rb diff --git a/lib/logstash/patches/cabin.rb b/logstash-core/lib/logstash/patches/cabin.rb similarity index 100% rename from lib/logstash/patches/cabin.rb rename to logstash-core/lib/logstash/patches/cabin.rb diff --git a/lib/logstash/patches/profile_require_calls.rb b/logstash-core/lib/logstash/patches/profile_require_calls.rb similarity index 100% rename from lib/logstash/patches/profile_require_calls.rb rename to logstash-core/lib/logstash/patches/profile_require_calls.rb diff --git a/lib/logstash/patches/rubygems.rb b/logstash-core/lib/logstash/patches/rubygems.rb similarity index 100% rename from lib/logstash/patches/rubygems.rb rename to logstash-core/lib/logstash/patches/rubygems.rb diff --git a/lib/logstash/patches/stronger_openssl_defaults.rb b/logstash-core/lib/logstash/patches/stronger_openssl_defaults.rb similarity index 100% rename from lib/logstash/patches/stronger_openssl_defaults.rb rename to logstash-core/lib/logstash/patches/stronger_openssl_defaults.rb diff --git a/logstash-core/lib/logstash/pipeline.rb b/logstash-core/lib/logstash/pipeline.rb new file mode 100644 index 00000000000..e89ca66f5b1 --- /dev/null +++ b/logstash-core/lib/logstash/pipeline.rb @@ -0,0 +1,369 @@ +# encoding: utf-8 +require "thread" +require "stud/interval" +require "concurrent" +require "logstash/namespace" +require "logstash/errors" +require "logstash/event" +require "logstash/config/file" +require "logstash/filters/base" +require "logstash/inputs/base" +require "logstash/outputs/base" +require "logstash/config/cpu_core_strategy" +require "logstash/util/defaults_printer" +require "logstash/shutdown_controller" +require "logstash/util/wrapped_synchronous_queue" + +module LogStash; class Pipeline + attr_reader :inputs, :filters, :outputs, :worker_threads, :events_consumed, :events_emitted + + def initialize(configstr) + @logger = Cabin::Channel.get(LogStash) + + @inputs = nil + @filters = nil + @outputs = nil + + grammar = LogStashConfigParser.new + @config = grammar.parse(configstr) + if @config.nil? + raise LogStash::ConfigurationError, grammar.failure_reason + end + # This will compile the config to ruby and evaluate the resulting code. + # The code will initialize all the plugins and define the + # filter and output methods. + code = @config.compile + # The config code is hard to represent as a log message... + # So just print it. + @logger.debug? && @logger.debug("Compiled pipeline code:\n#{code}") + begin + eval(code) + rescue => e + raise + end + + @input_queue = LogStash::Util::WrappedSynchronousQueue.new + @events_emitted = Concurrent::AtomicFixnum.new(0) + @events_consumed = Concurrent::AtomicFixnum.new(0) + + # We generally only want one thread at a time able to access pop/take/poll operations + # from this queue. We also depend on this to be able to block consumers while we snapshot + # in-flight buffers + @input_queue_pop_mutex = Mutex.new + + @input_threads = [] + + @settings = { + "default-pipeline-workers" => LogStash::Config::CpuCoreStrategy.fifty_percent, + "batch-size" => 125, + "batch-poll-wait" => 50 # in milliseconds + } + + # @ready requires thread safety since it is typically polled from outside the pipeline thread + @ready = Concurrent::AtomicBoolean.new(false) + end # def initialize + + def ready? + @ready.value + end + + def configure(setting, value) + @settings[setting] = value + end + + def safe_pipeline_worker_count + default = @settings["default-pipeline-workers"] + thread_count = @settings["pipeline-workers"] #override from args "-w 8" or config + safe_filters, unsafe_filters = @filters.partition(&:threadsafe?) + + if unsafe_filters.any? + plugins = unsafe_filters.collect { |f| f.class.config_name } + case thread_count + when nil + # user did not specify a worker thread count + # warn if the default is multiple + @logger.warn("Defaulting pipeline worker threads to 1 because there are some filters that might not work with multiple worker threads", + :count_was => default, :filters => plugins) if default > 1 + 1 # can't allow the default value to propagate if there are unsafe filters + when 0, 1 + 1 + else + @logger.warn("Warning: Manual override - there are filters that might not work with multiple worker threads", + :worker_threads => thread_count, :filters => plugins) + thread_count # allow user to force this even if there are unsafe filters + end + else + thread_count || default + end + end + + def filters? + return @filters.any? + end + + def run + LogStash::Util.set_thread_name(">lsipeline") + @logger.terminal(LogStash::Util::DefaultsPrinter.print(@settings)) + + start_workers + + @logger.info("Pipeline started") + @logger.terminal("Logstash startup completed") + + @logger.info("Will run till input threads stopped") + + # Block until all inputs have stopped + # Generally this happens if SIGINT is sent and `shutdown` is called from an external thread + wait_inputs + @logger.info("Inputs stopped") + + shutdown_workers + + @logger.info("Pipeline shutdown complete.") + @logger.terminal("Logstash shutdown completed") + + # exit code + return 0 + end # def run + + def start_workers + @inflight_batches = {} + + @worker_threads = [] + begin + start_inputs + @outputs.each {|o| o.register } + @filters.each {|f| f.register} + + pipeline_workers = safe_pipeline_worker_count + batch_size = @settings['batch-size'] + batch_poll_wait = @settings['batch-poll-wait'] + @logger.info("Starting pipeline", + :id => self.object_id, + :settings => @settings) + + pipeline_workers.times do |t| + @worker_threads << Thread.new do + LogStash::Util.set_thread_name(">worker#{t}") + worker_loop(batch_size, batch_poll_wait) + end + end + ensure + # it is important to garantee @ready to be true after the startup sequence has been completed + # to potentially unblock the shutdown method which may be waiting on @ready to proceed + @ready.make_true + end + end + + # Main body of what a worker threda does + # Repeatedly takes batches off the queu, filters, then outputs them + def worker_loop(batch_size, batch_poll_wait) + running = true + + while running + # To understand the purpose behind this synchronize please read the body of take_batch + input_batch = @input_queue_pop_mutex.synchronize { take_batch(batch_size, batch_poll_wait) } + @events_consumed.increment(input_batch.size) + running = !input_batch.include?(LogStash::SHUTDOWN) + + filtered = filter_batch(input_batch) + output_batch(filtered) + + inflight_batches_synchronize { set_current_thread_inflight_batch(nil) } + end + end + + def take_batch(batch_size, batch_poll_wait) + batch = [] + # Since this is externally synchronized in `worker_look` wec can guarantee that the visibility of an insight batch + # guaranteed to be a full batch not a partial batch + set_current_thread_inflight_batch(batch) + + batch_size.times do |t| + event = t==0 ? @input_queue.take : @input_queue.poll(batch_poll_wait) + # Exit early so each thread only gets one copy of this + # This is necessary to ensure proper shutdown! + next if event.nil? + batch << event + break if event == LogStash::SHUTDOWN + end + batch + end + + def filter_batch(batch) + batch.reduce([]) do |acc,e| + if e.is_a?(LogStash::Event) + filtered = filter_func(e) + filtered.each {|fe| acc << fe unless fe.cancelled?} + end + acc + end + rescue Exception => e + # Plugins authors should manage their own exceptions in the plugin code + # but if an exception is raised up to the worker thread they are considered + # fatal and logstash will not recover from this situation. + # + # Users need to check their configuration or see if there is a bug in the + # plugin. + @logger.error("Exception in filterworker, the pipeline stopped processing new events, please check your filter configuration and restart Logstash.", + "exception" => e, "backtrace" => e.backtrace) + raise + end + + # Take an array of events and send them to the correct output + def output_batch(batch) + batch.reduce(Hash.new { |h, k| h[k] = [] }) do |outputs_events, event| + # We ask the AST to tell us which outputs to send each event to + output_func(event).each do |output| + outputs_events[output] << event + end + outputs_events + end.each do |output, events| + # Once we have a mapping of outputs => [events] we can execute them + output.multi_handle(events) + end + end + + def set_current_thread_inflight_batch(batch) + @inflight_batches[Thread.current] = batch + end + + def inflight_batches_synchronize + @input_queue_pop_mutex.synchronize do + yield(@inflight_batches) + end + end + + def dump_inflight(file_path) + inflight_batches_synchronize do |batches| + File.open(file_path, "w") do |f| + batches.values.each do |batch| + next unless batch + batch.each do |e| + f.write(LogStash::Json.dump(e)) + end + end + end + end + end + + def wait_inputs + @input_threads.each(&:join) + end + + def start_inputs + moreinputs = [] + @inputs.each do |input| + if input.threadable && input.threads > 1 + (input.threads - 1).times do |i| + moreinputs << input.clone + end + end + end + @inputs += moreinputs + + @inputs.each do |input| + input.register + start_input(input) + end + end + + def start_input(plugin) + @input_threads << Thread.new { inputworker(plugin) } + end + + def inputworker(plugin) + LogStash::Util::set_thread_name("<#{plugin.class.config_name}") + begin + plugin.run(@input_queue) + rescue => e + # if plugin is stop + if plugin.stop? + @logger.debug("Input plugin raised exception during shutdown, ignoring it.", + :plugin => plugin.class.config_name, :exception => e, + :backtrace => e.backtrace) + return + end + + # otherwise, report error and restart + if @logger.debug? + @logger.error(I18n.t("logstash.pipeline.worker-error-debug", + :plugin => plugin.inspect, :error => e.to_s, + :exception => e.class, + :stacktrace => e.backtrace.join("\n"))) + else + @logger.error(I18n.t("logstash.pipeline.worker-error", + :plugin => plugin.inspect, :error => e)) + end + + # Assuming the failure that caused this exception is transient, + # let's sleep for a bit and execute #run again + sleep(1) + retry + ensure + plugin.do_close + end + end # def inputworker + + # initiate the pipeline shutdown sequence + # this method is intended to be called from outside the pipeline thread + # @param before_stop [Proc] code block called before performing stop operation on input plugins + def shutdown(&before_stop) + # shutdown can only start once the pipeline has completed its startup. + # avoid potential race conditoon between the startup sequence and this + # shutdown method which can be called from another thread at any time + sleep(0.1) while !ready? + + # TODO: should we also check against calling shutdown multiple times concurently? + + before_stop.call if block_given? + + @logger.info "Closing inputs" + @inputs.each(&:do_stop) + @logger.info "Closed inputs" + end # def shutdown + + # After `shutdown` is called from an external thread this is called from the main thread to + # tell the worker threads to stop and then block until they've fully stopped + # This also stops all filter and output plugins + def shutdown_workers + # Each worker thread will receive this exactly once! + @worker_threads.each do |t| + @logger.debug("Pushing shutdown", :thread => t) + @input_queue.push(LogStash::SHUTDOWN) + end + + @worker_threads.each do |t| + @logger.debug("Shutdown waiting for worker thread #{t}") + t.join + end + + @filters.each(&:do_close) + @outputs.each(&:do_close) + end + + def plugin(plugin_type, name, *args) + args << {} if args.empty? + klass = LogStash::Plugin.lookup(plugin_type, name) + return klass.new(*args) + end + + # for backward compatibility in devutils for the rspec helpers, this method is not used + # in the pipeline anymore. + def filter(event, &block) + # filter_func returns all filtered events, including cancelled ones + filter_func(event).each { |e| block.call(e) } + end + + + # perform filters flush and yeild flushed event to the passed block + # @param options [Hash] + # @option options [Boolean] :final => true to signal a final shutdown flush + def flush_filters(options = {}, &block) + flushers = options[:final] ? @shutdown_flushers : @periodic_flushers + + flushers.each do |flusher| + flusher.call(options, &block) + end + end +end end \ No newline at end of file diff --git a/lib/logstash/plugin.rb b/logstash-core/lib/logstash/plugin.rb similarity index 98% rename from lib/logstash/plugin.rb rename to logstash-core/lib/logstash/plugin.rb index e4ed6171ecc..bfab9a58d28 100644 --- a/lib/logstash/plugin.rb +++ b/logstash-core/lib/logstash/plugin.rb @@ -59,6 +59,11 @@ def inspect end end + public + def debug_info + [self.class.to_s, original_params] + end + # Look up a plugin by type and name. public def self.lookup(type, name) diff --git a/lib/logstash/program.rb b/logstash-core/lib/logstash/program.rb similarity index 100% rename from lib/logstash/program.rb rename to logstash-core/lib/logstash/program.rb diff --git a/lib/logstash/runner.rb b/logstash-core/lib/logstash/runner.rb similarity index 100% rename from lib/logstash/runner.rb rename to logstash-core/lib/logstash/runner.rb diff --git a/logstash-core/lib/logstash/shutdown_controller.rb b/logstash-core/lib/logstash/shutdown_controller.rb new file mode 100644 index 00000000000..6941753bbc8 --- /dev/null +++ b/logstash-core/lib/logstash/shutdown_controller.rb @@ -0,0 +1,127 @@ +# encoding: utf-8 + +module LogStash + class ShutdownController + + CHECK_EVERY = 1 # second + REPORT_EVERY = 5 # checks + ABORT_AFTER = 3 # stalled reports + + attr_reader :cycle_period, :report_every, :abort_threshold + + def initialize(pipeline, cycle_period=CHECK_EVERY, report_every=REPORT_EVERY, abort_threshold=ABORT_AFTER) + @pipeline = pipeline + @cycle_period = cycle_period + @report_every = report_every + @abort_threshold = abort_threshold + @reports = [] + end + + def self.unsafe_shutdown=(boolean) + @unsafe_shutdown = boolean + end + + def self.unsafe_shutdown? + @unsafe_shutdown + end + + def self.logger=(logger) + @logger = logger + end + + def self.logger + @logger ||= Cabin::Channel.get(LogStash) + end + + def self.start(pipeline, cycle_period=CHECK_EVERY, report_every=REPORT_EVERY, abort_threshold=ABORT_AFTER) + controller = self.new(pipeline, cycle_period, report_every, abort_threshold) + Thread.new(controller) { |controller| controller.start } + end + + def logger + self.class.logger + end + + def start + sleep(@cycle_period) + cycle_number = 0 + stalled_count = 0 + Stud.interval(@cycle_period) do + @reports << Report.from_pipeline(@pipeline) + @reports.delete_at(0) if @reports.size > @report_every # expire old report + if cycle_number == (@report_every - 1) # it's report time! + logger.warn(@reports.last.to_hash) + + if shutdown_stalled? + logger.error("The shutdown process appears to be stalled due to busy or blocked plugins. Check the logs for more information.") if stalled_count == 0 + stalled_count += 1 + + if self.class.unsafe_shutdown? && @abort_threshold == stalled_count + logger.fatal("Forcefully quitting logstash..") + force_exit() + break + end + else + stalled_count = 0 + end + end + cycle_number = (cycle_number + 1) % @report_every + end + end + + # A pipeline shutdown is stalled if + # * at least REPORT_EVERY reports have been created + # * the inflight event count is in monotonically increasing + # * there are worker threads running which aren't blocked on SizedQueue pop/push + # * the stalled thread list is constant in the previous REPORT_EVERY reports + def shutdown_stalled? + return false unless @reports.size == @report_every # + # is stalled if inflight count is either constant or increasing + stalled_event_count = @reports.each_cons(2).all? do |prev_report, next_report| + prev_report.inflight_count["total"] <= next_report.inflight_count["total"] + end + if stalled_event_count + @reports.each_cons(2).all? do |prev_report, next_report| + prev_report.stalling_threads == next_report.stalling_threads + end + else + false + end + end + + def force_exit + exit(-1) + end + end + + class Report + + attr_reader :inflight_count, :stalling_threads + + def self.from_pipeline(pipeline) + new(pipeline.inflight_count, pipeline.stalling_threads) + end + + def initialize(inflight_count, stalling_threads) + @inflight_count = inflight_count + @stalling_threads = format_threads_by_plugin(stalling_threads) + end + + def to_hash + { + "INFLIGHT_EVENT_COUNT" => @inflight_count, + "STALLING_THREADS" => @stalling_threads + } + end + + def format_threads_by_plugin(stalling_threads) + stalled_plugins = {} + stalling_threads.each do |thr| + key = (thr.delete("plugin") || "other") + stalled_plugins[key] ||= [] + stalled_plugins[key] << thr + end + stalled_plugins + end + end +end diff --git a/lib/logstash/sized_queue.rb b/logstash-core/lib/logstash/sized_queue.rb similarity index 100% rename from lib/logstash/sized_queue.rb rename to logstash-core/lib/logstash/sized_queue.rb diff --git a/lib/logstash/util.rb b/logstash-core/lib/logstash/util.rb similarity index 81% rename from lib/logstash/util.rb rename to logstash-core/lib/logstash/util.rb index 2034803f43c..d3c5fe6ff41 100644 --- a/lib/logstash/util.rb +++ b/logstash-core/lib/logstash/util.rb @@ -24,6 +24,41 @@ def self.set_thread_name(name) end end # def set_thread_name + def self.set_thread_plugin(plugin) + Thread.current[:plugin] = plugin + end + + def self.get_thread_id(thread) + if RUBY_ENGINE == "jruby" + JRuby.reference(thread).native_thread.id + else + raise Exception.new("Native thread IDs aren't supported outside of JRuby") + end + end + + def self.thread_info(thread) + backtrace = thread.backtrace.map do |line| + line.gsub(LogStash::Environment::LOGSTASH_HOME, "[...]") + end + + blocked_on = case backtrace.first + when /in `push'/ then "blocked_on_push" + when /(?:pipeline|base).*pop/ then "waiting_for_events" + else nil + end + + { + "thread_id" => get_thread_id(thread), + "name" => thread[:name], + "plugin" => (thread[:plugin] ? thread[:plugin].debug_info : nil), + "backtrace" => backtrace, + "blocked_on" => blocked_on, + "status" => thread.status, + "current_call" => backtrace.first + } + end + + # Merge hash 'src' into 'dst' nondestructively # # Duplicate keys will become array values diff --git a/lib/logstash/util/buftok.rb b/logstash-core/lib/logstash/util/buftok.rb similarity index 100% rename from lib/logstash/util/buftok.rb rename to logstash-core/lib/logstash/util/buftok.rb diff --git a/lib/logstash/util/charset.rb b/logstash-core/lib/logstash/util/charset.rb similarity index 100% rename from lib/logstash/util/charset.rb rename to logstash-core/lib/logstash/util/charset.rb diff --git a/lib/logstash/util/decorators.rb b/logstash-core/lib/logstash/util/decorators.rb similarity index 56% rename from lib/logstash/util/decorators.rb rename to logstash-core/lib/logstash/util/decorators.rb index 0ea2c021aca..265656e5ce9 100644 --- a/lib/logstash/util/decorators.rb +++ b/logstash-core/lib/logstash/util/decorators.rb @@ -19,13 +19,16 @@ def add_fields(fields,event, pluginname) value.each do |v| v = event.sprintf(v) if event.include?(field) - event[field] = Array(event[field]) - event[field] << v + # note below that the array field needs to be updated then reassigned to the event. + # this is important because a construct like event[field] << v will not work + # in the current Java event implementation. see https://github.com/elastic/logstash/issues/4140 + a = Array(event[field]) + a << v + event[field] = a else event[field] = v end - @logger.debug? and @logger.debug("#{pluginname}: adding value to field", - :field => field, :value => value) + @logger.debug? and @logger.debug("#{pluginname}: adding value to field", :field => field, :value => value) end end end @@ -34,9 +37,13 @@ def add_fields(fields,event, pluginname) def add_tags(tags, event, pluginname) tags.each do |tag| tag = event.sprintf(tag) - @logger.debug? and @logger.debug("#{pluginname}: adding tag", - :tag => tag) - (event["tags"] ||= []) << tag + @logger.debug? and @logger.debug("#{pluginname}: adding tag", :tag => tag) + # note below that the tags array field needs to be updated then reassigned to the event. + # this is important because a construct like event["tags"] << tag will not work + # in the current Java event implementation. see https://github.com/elastic/logstash/issues/4140 + tags = event["tags"] || [] + tags << tag + event["tags"] = tags end end diff --git a/lib/logstash/util/defaults_printer.rb b/logstash-core/lib/logstash/util/defaults_printer.rb similarity index 91% rename from lib/logstash/util/defaults_printer.rb rename to logstash-core/lib/logstash/util/defaults_printer.rb index 13764e2414a..6dd850e1d50 100644 --- a/lib/logstash/util/defaults_printer.rb +++ b/logstash-core/lib/logstash/util/defaults_printer.rb @@ -20,7 +20,7 @@ def print @printers.each do |printer| printer.visit(collector) end - "Default settings used: " + collector.join(', ') + "Settings: " + collector.join(', ') end private diff --git a/lib/logstash/util/filetools.rb b/logstash-core/lib/logstash/util/filetools.rb similarity index 100% rename from lib/logstash/util/filetools.rb rename to logstash-core/lib/logstash/util/filetools.rb diff --git a/lib/logstash/util/java_version.rb b/logstash-core/lib/logstash/util/java_version.rb similarity index 100% rename from lib/logstash/util/java_version.rb rename to logstash-core/lib/logstash/util/java_version.rb diff --git a/lib/logstash/util/password.rb b/logstash-core/lib/logstash/util/password.rb similarity index 100% rename from lib/logstash/util/password.rb rename to logstash-core/lib/logstash/util/password.rb diff --git a/lib/logstash/util/plugin_version.rb b/logstash-core/lib/logstash/util/plugin_version.rb similarity index 100% rename from lib/logstash/util/plugin_version.rb rename to logstash-core/lib/logstash/util/plugin_version.rb diff --git a/lib/logstash/util/prctl.rb b/logstash-core/lib/logstash/util/prctl.rb similarity index 100% rename from lib/logstash/util/prctl.rb rename to logstash-core/lib/logstash/util/prctl.rb diff --git a/lib/logstash/util/retryable.rb b/logstash-core/lib/logstash/util/retryable.rb similarity index 100% rename from lib/logstash/util/retryable.rb rename to logstash-core/lib/logstash/util/retryable.rb diff --git a/lib/logstash/util/socket_peer.rb b/logstash-core/lib/logstash/util/socket_peer.rb similarity index 100% rename from lib/logstash/util/socket_peer.rb rename to logstash-core/lib/logstash/util/socket_peer.rb diff --git a/lib/logstash/util/unicode_trimmer.rb b/logstash-core/lib/logstash/util/unicode_trimmer.rb similarity index 100% rename from lib/logstash/util/unicode_trimmer.rb rename to logstash-core/lib/logstash/util/unicode_trimmer.rb diff --git a/logstash-core/lib/logstash/util/worker_threads_default_printer.rb b/logstash-core/lib/logstash/util/worker_threads_default_printer.rb new file mode 100644 index 00000000000..a4628552f7b --- /dev/null +++ b/logstash-core/lib/logstash/util/worker_threads_default_printer.rb @@ -0,0 +1,29 @@ +# encoding: utf-8 +require "logstash/namespace" +require "logstash/util" + +# This class exists to format the settings for default worker threads +module LogStash module Util class WorkerThreadsDefaultPrinter + + def initialize(settings) + @setting = settings.fetch('pipeline-workers', 0) + @default = settings.fetch('default-pipeline-workers', 0) + end + + def visit(collector) + visit_setting(collector) + visit_default(collector) + end + + def visit_setting(collector) + return if @setting == 0 + collector.push("User set pipeline workers: #{@setting}") + end + + def visit_default(collector) + return if @default == 0 + collector.push "Default pipeline workers: #{@default}" + end + +end end end + diff --git a/logstash-core/lib/logstash/util/wrapped_synchronous_queue.rb b/logstash-core/lib/logstash/util/wrapped_synchronous_queue.rb new file mode 100644 index 00000000000..05c5fc30342 --- /dev/null +++ b/logstash-core/lib/logstash/util/wrapped_synchronous_queue.rb @@ -0,0 +1,25 @@ +module LogStash; module Util + class WrappedSynchronousQueue + java_import java.util.concurrent.SynchronousQueue + java_import java.util.concurrent.TimeUnit + + def initialize() + @queue = java.util.concurrent.SynchronousQueue.new() + end + + def push(obj) + @queue.put(obj) + end + alias_method(:<<, :push) + + # Blocking + def take + @queue.take() + end + + # Block for X millis + def poll(millis) + @queue.poll(millis, TimeUnit::MILLISECONDS) + end + end +end end \ No newline at end of file diff --git a/logstash-core/lib/logstash/version.rb b/logstash-core/lib/logstash/version.rb new file mode 100644 index 00000000000..70715b097cb --- /dev/null +++ b/logstash-core/lib/logstash/version.rb @@ -0,0 +1,14 @@ +# encoding: utf-8 + +# The version of the logstash package (not the logstash-core gem version). +# +# Note to authors: this should not include dashes because 'gem' barfs if +# you include a dash in the version string. + +# TODO: (colin) the logstash-core gem uses it's own version number in logstash-core/lib/logstash-core/version.rb +# there are some dependencies in logstash-core on the LOGSTASH_VERSION constant this is why +# the logstash version is currently defined here in logstash-core/lib/logstash/version.rb but +# eventually this file should be in the root logstash lib fir and dependencies in logstash-core should be +# fixed. + +LOGSTASH_VERSION = "3.0.0.dev" diff --git a/locales/en.yml b/logstash-core/locales/en.yml similarity index 95% rename from locales/en.yml rename to logstash-core/locales/en.yml index f89fb254fed..18813d26c28 100644 --- a/locales/en.yml +++ b/logstash-core/locales/en.yml @@ -155,8 +155,8 @@ en: the empty string for the '-e' flag. configtest: |+ Check configuration for valid syntax and then exit. - filterworkers: |+ - Sets the number of filter workers to run. + pipelineworkers: |+ + Sets the number of pipeline workers to run. log: |+ Write logstash internal logs to the given file. Without this flag, logstash will emit @@ -187,3 +187,8 @@ en: debug: |+ Most verbose logging. This causes 'debug' level logs to be emitted. + unsafe_shutdown: |+ + Force logstash to exit during shutdown even + if there are still inflight events in memory. + By default, logstash will refuse to quit until all + received events have been pushed to the outputs. diff --git a/logstash-core.gemspec b/logstash-core/logstash-core.gemspec similarity index 84% rename from logstash-core.gemspec rename to logstash-core/logstash-core.gemspec index 635dd3d04d5..ba32f0d3d8c 100644 --- a/logstash-core.gemspec +++ b/logstash-core/logstash-core.gemspec @@ -1,7 +1,7 @@ # -*- encoding: utf-8 -*- lib = File.expand_path('../lib', __FILE__) $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib) -require 'logstash/version' +require 'logstash-core/version' Gem::Specification.new do |gem| gem.authors = ["Jordan Sissel", "Pete Fritchman", "Elasticsearch"] @@ -11,11 +11,13 @@ Gem::Specification.new do |gem| gem.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html" gem.license = "Apache License (2.0)" - gem.files = Dir.glob(["logstash-core.gemspec", "lib/logstash-core.rb", "lib/logstash/**/*.rb", "spec/**/*.rb", "locales/*"]) + gem.files = Dir.glob(["logstash-core.gemspec", "lib/**/*.rb", "spec/**/*.rb", "locales/*"]) gem.test_files = gem.files.grep(%r{^(test|spec|features)/}) gem.name = "logstash-core" gem.require_paths = ["lib"] - gem.version = LOGSTASH_VERSION + gem.version = LOGSTASH_CORE_VERSION + + gem.add_runtime_dependency "logstash-core-event", "~> 3.0.0.dev" gem.add_runtime_dependency "cabin", "~> 0.7.0" #(Apache 2.0 license) gem.add_runtime_dependency "pry", "~> 0.10.1" #(Ruby license) @@ -23,7 +25,7 @@ Gem::Specification.new do |gem| gem.add_runtime_dependency "clamp", "~> 0.6.5" #(MIT license) for command line args/flags gem.add_runtime_dependency "filesize", "0.0.4" #(MIT license) for :bytes config validator gem.add_runtime_dependency "gems", "~> 0.8.3" #(MIT license) - gem.add_runtime_dependency "concurrent-ruby", "~> 0.9.1" + gem.add_runtime_dependency "concurrent-ruby", "0.9.2" gem.add_runtime_dependency "jruby-openssl", ">= 0.9.11" # Required to support TLSv1.2 # TODO(sissel): Treetop 1.5.x doesn't seem to work well, but I haven't @@ -35,11 +37,12 @@ Gem::Specification.new do |gem| # filetools and rakelib gem.add_runtime_dependency "minitar", "~> 0.5.4" + gem.add_runtime_dependency "rubyzip", "~> 1.1.7" gem.add_runtime_dependency "thread_safe", "~> 0.3.5" #(Apache 2.0 license) if RUBY_PLATFORM == 'java' gem.platform = RUBY_PLATFORM - gem.add_runtime_dependency "jrjackson", "~> 0.3.6" #(Apache 2.0 license) + gem.add_runtime_dependency "jrjackson", "~> 0.3.7" #(Apache 2.0 license) else gem.add_runtime_dependency "oj" #(MIT-style license) end diff --git a/spec/core/conditionals_spec.rb b/logstash-core/spec/conditionals_spec.rb similarity index 100% rename from spec/core/conditionals_spec.rb rename to logstash-core/spec/conditionals_spec.rb diff --git a/spec/logstash/agent_spec.rb b/logstash-core/spec/logstash/agent_spec.rb similarity index 100% rename from spec/logstash/agent_spec.rb rename to logstash-core/spec/logstash/agent_spec.rb diff --git a/spec/core/config_spec.rb b/logstash-core/spec/logstash/config/config_ast_spec.rb similarity index 100% rename from spec/core/config_spec.rb rename to logstash-core/spec/logstash/config/config_ast_spec.rb diff --git a/spec/core/config_cpu_core_strategy_spec.rb b/logstash-core/spec/logstash/config/cpu_core_strategy_spec.rb similarity index 100% rename from spec/core/config_cpu_core_strategy_spec.rb rename to logstash-core/spec/logstash/config/cpu_core_strategy_spec.rb diff --git a/spec/core/config_defaults_spec.rb b/logstash-core/spec/logstash/config/defaults_spec.rb similarity index 100% rename from spec/core/config_defaults_spec.rb rename to logstash-core/spec/logstash/config/defaults_spec.rb diff --git a/spec/core/config_mixin_spec.rb b/logstash-core/spec/logstash/config/mixin_spec.rb similarity index 100% rename from spec/core/config_mixin_spec.rb rename to logstash-core/spec/logstash/config/mixin_spec.rb diff --git a/spec/core/environment_spec.rb b/logstash-core/spec/logstash/environment_spec.rb similarity index 100% rename from spec/core/environment_spec.rb rename to logstash-core/spec/logstash/environment_spec.rb diff --git a/spec/filters/base_spec.rb b/logstash-core/spec/logstash/filters/base_spec.rb similarity index 100% rename from spec/filters/base_spec.rb rename to logstash-core/spec/logstash/filters/base_spec.rb diff --git a/spec/inputs/base_spec.rb b/logstash-core/spec/logstash/inputs/base_spec.rb similarity index 100% rename from spec/inputs/base_spec.rb rename to logstash-core/spec/logstash/inputs/base_spec.rb diff --git a/spec/lib/logstash/java_integration_spec.rb b/logstash-core/spec/logstash/java_integration_spec.rb similarity index 100% rename from spec/lib/logstash/java_integration_spec.rb rename to logstash-core/spec/logstash/java_integration_spec.rb diff --git a/spec/util/json_spec.rb b/logstash-core/spec/logstash/json_spec.rb similarity index 100% rename from spec/util/json_spec.rb rename to logstash-core/spec/logstash/json_spec.rb diff --git a/spec/outputs/base_spec.rb b/logstash-core/spec/logstash/outputs/base_spec.rb similarity index 100% rename from spec/outputs/base_spec.rb rename to logstash-core/spec/logstash/outputs/base_spec.rb diff --git a/spec/logstash/patches_spec.rb b/logstash-core/spec/logstash/patches_spec.rb similarity index 100% rename from spec/logstash/patches_spec.rb rename to logstash-core/spec/logstash/patches_spec.rb diff --git a/logstash-core/spec/logstash/pipeline_spec.rb b/logstash-core/spec/logstash/pipeline_spec.rb new file mode 100644 index 00000000000..f1fd48f0872 --- /dev/null +++ b/logstash-core/spec/logstash/pipeline_spec.rb @@ -0,0 +1,303 @@ +# encoding: utf-8 +require "spec_helper" + +class DummyInput < LogStash::Inputs::Base + config_name "dummyinput" + milestone 2 + + def register + end + + def run(queue) + end + + def close + end +end + +class DummyCodec < LogStash::Codecs::Base + config_name "dummycodec" + milestone 2 + + def decode(data) + data + end + + def encode(event) + event + end + + def close + end +end + +class DummyOutput < LogStash::Outputs::Base + config_name "dummyoutput" + milestone 2 + + attr_reader :num_closes + + def initialize(params={}) + super + @num_closes = 0 + end + + def register + end + + def receive(event) + end + + def close + @num_closes += 1 + end +end + +class DummyFilter < LogStash::Filters::Base + config_name "dummyfilter" + milestone 2 + + def register() end + + def filter(event) end + + def threadsafe?() false; end + + def close() end +end + +class DummySafeFilter < LogStash::Filters::Base + config_name "dummysafefilter" + milestone 2 + + def register() end + + def filter(event) end + + def threadsafe?() true; end + + def close() end +end + +class TestPipeline < LogStash::Pipeline + attr_reader :outputs, :settings, :logger +end + +describe LogStash::Pipeline do + let(:worker_thread_count) { 8 } + let(:safe_thread_count) { 1 } + let(:override_thread_count) { 42 } + + describe "defaulting the pipeline workers based on thread safety" do + before(:each) do + allow(LogStash::Plugin).to receive(:lookup).with("input", "dummyinput").and_return(DummyInput) + allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_return(DummyCodec) + allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(DummyOutput) + allow(LogStash::Plugin).to receive(:lookup).with("filter", "dummyfilter").and_return(DummyFilter) + allow(LogStash::Plugin).to receive(:lookup).with("filter", "dummysafefilter").and_return(DummySafeFilter) + allow(LogStash::Config::CpuCoreStrategy).to receive(:fifty_percent).and_return(worker_thread_count) + end + + context "when there are some not threadsafe filters" do + let(:test_config_with_filters) { + <<-eos + input { + dummyinput {} + } + + filter { + dummyfilter {} + } + + output { + dummyoutput {} + } + eos + } + + context "when there is no command line -w N set" do + it "starts one filter thread" do + msg = "Defaulting pipeline worker threads to 1 because there are some" + + " filters that might not work with multiple worker threads" + pipeline = TestPipeline.new(test_config_with_filters) + expect(pipeline.logger).to receive(:warn).with(msg, + {:count_was=>worker_thread_count, :filters=>["dummyfilter"]}) + pipeline.run + expect(pipeline.worker_threads.size).to eq(safe_thread_count) + end + end + + context "when there is command line -w N set" do + it "starts multiple filter thread" do + msg = "Warning: Manual override - there are filters that might" + + " not work with multiple worker threads" + pipeline = TestPipeline.new(test_config_with_filters) + expect(pipeline.logger).to receive(:warn).with(msg, + {:worker_threads=> override_thread_count, :filters=>["dummyfilter"]}) + pipeline.configure("pipeline-workers", override_thread_count) + pipeline.run + expect(pipeline.worker_threads.size).to eq(override_thread_count) + end + end + end + + context "when there are threadsafe filters only" do + let(:test_config_with_filters) { + <<-eos + input { + dummyinput {} + } + + filter { + dummysafefilter {} + } + + output { + dummyoutput {} + } + eos + } + + it "starts multiple filter threads" do + pipeline = TestPipeline.new(test_config_with_filters) + pipeline.run + expect(pipeline.worker_threads.size).to eq(worker_thread_count) + end + end + end + + context "close" do + before(:each) do + allow(LogStash::Plugin).to receive(:lookup).with("input", "dummyinput").and_return(DummyInput) + allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_return(DummyCodec) + allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(DummyOutput) + end + + + let(:test_config_without_output_workers) { + <<-eos + input { + dummyinput {} + } + + output { + dummyoutput {} + } + eos + } + + let(:test_config_with_output_workers) { + <<-eos + input { + dummyinput {} + } + + output { + dummyoutput { + workers => 2 + } + } + eos + } + + context "output close" do + it "should call close of output without output-workers" do + pipeline = TestPipeline.new(test_config_without_output_workers) + pipeline.run + + expect(pipeline.outputs.size ).to eq(1) + expect(pipeline.outputs.first.worker_plugins.size ).to eq(1) + expect(pipeline.outputs.first.worker_plugins.first.num_closes ).to eq(1) + end + + it "should call output close correctly with output workers" do + pipeline = TestPipeline.new(test_config_with_output_workers) + pipeline.run + + expect(pipeline.outputs.size ).to eq(1) + # We even close the parent output worker, even though it doesn't receive messages + expect(pipeline.outputs.first.num_closes).to eq(1) + pipeline.outputs.first.worker_plugins.each do |plugin| + expect(plugin.num_closes ).to eq(1) + end + end + end + end + + context "compiled flush function" do + + context "cancelled events should not propagate down the filters" do + config <<-CONFIG + filter { + multiline { + pattern => "hello" + what => next + } + multiline { + pattern => "hello" + what => next + } + } + CONFIG + + sample("hello") do + expect(subject["message"]).to eq("hello") + end + end + + context "new events should propagate down the filters" do + config <<-CONFIG + filter { + clone { + clones => ["clone1"] + } + multiline { + pattern => "bar" + what => previous + } + } + CONFIG + + sample(["foo", "bar"]) do + expect(subject.size).to eq(2) + + expect(subject[0]["message"]).to eq("foo\nbar") + expect(subject[0]["type"]).to be_nil + expect(subject[1]["message"]).to eq("foo\nbar") + expect(subject[1]["type"]).to eq("clone1") + end + end + end + + context "compiled filter funtions" do + + context "new events should propagate down the filters" do + config <<-CONFIG + filter { + clone { + clones => ["clone1", "clone2"] + } + mutate { + add_field => {"foo" => "bar"} + } + } + CONFIG + + sample("hello") do + expect(subject.size).to eq(3) + + expect(subject[0]["message"]).to eq("hello") + expect(subject[0]["type"]).to be_nil + expect(subject[0]["foo"]).to eq("bar") + + expect(subject[1]["message"]).to eq("hello") + expect(subject[1]["type"]).to eq("clone1") + expect(subject[1]["foo"]).to eq("bar") + + expect(subject[2]["message"]).to eq("hello") + expect(subject[2]["type"]).to eq("clone2") + expect(subject[2]["foo"]).to eq("bar") + end + end + end +end diff --git a/spec/core/plugin_spec.rb b/logstash-core/spec/logstash/plugin_spec.rb similarity index 100% rename from spec/core/plugin_spec.rb rename to logstash-core/spec/logstash/plugin_spec.rb diff --git a/spec/core/runner_spec.rb b/logstash-core/spec/logstash/runner_spec.rb similarity index 100% rename from spec/core/runner_spec.rb rename to logstash-core/spec/logstash/runner_spec.rb diff --git a/logstash-core/spec/logstash/shutdown_controller_spec.rb b/logstash-core/spec/logstash/shutdown_controller_spec.rb new file mode 100644 index 00000000000..5f755f290a8 --- /dev/null +++ b/logstash-core/spec/logstash/shutdown_controller_spec.rb @@ -0,0 +1,107 @@ +# encoding: utf-8 +require "spec_helper" +require "logstash/shutdown_controller" + +describe LogStash::ShutdownController do + + let(:check_every) { 0.01 } + let(:check_threshold) { 100 } + subject { LogStash::ShutdownController.new(pipeline, check_every) } + let(:pipeline) { double("pipeline") } + report_count = 0 + + before :each do + allow(LogStash::Report).to receive(:from_pipeline).and_wrap_original do |m, *args| + report_count += 1 + m.call(*args) + end + end + + after :each do + report_count = 0 + end + + context "when pipeline is stalled" do + let(:increasing_count) { (1..5000).to_a.map {|i| { "total" => i } } } + before :each do + allow(pipeline).to receive(:inflight_count).and_return(*increasing_count) + allow(pipeline).to receive(:stalling_threads) { { } } + end + + describe ".unsafe_shutdown = true" do + let(:abort_threshold) { subject.abort_threshold } + let(:report_every) { subject.report_every } + + before :each do + subject.class.unsafe_shutdown = true + end + + it "should force the shutdown" do + expect(subject).to receive(:force_exit).once + subject.start + end + + it "should do exactly \"abort_threshold\" stall checks" do + allow(subject).to receive(:force_exit) + expect(subject).to receive(:shutdown_stalled?).exactly(abort_threshold).times.and_call_original + subject.start + end + + it "should do exactly \"abort_threshold\"*\"report_every\" stall checks" do + allow(subject).to receive(:force_exit) + expect(LogStash::Report).to receive(:from_pipeline).exactly(abort_threshold*report_every).times.and_call_original + subject.start + end + end + + describe ".unsafe_shutdown = false" do + + before :each do + subject.class.unsafe_shutdown = false + end + + it "shouldn't force the shutdown" do + expect(subject).to_not receive(:force_exit) + thread = Thread.new(subject) {|subject| subject.start } + sleep 0.1 until report_count > check_threshold + thread.kill + end + end + end + + context "when pipeline is not stalled" do + let(:decreasing_count) { (1..5000).to_a.reverse.map {|i| { "total" => i } } } + before :each do + allow(pipeline).to receive(:inflight_count).and_return(*decreasing_count) + allow(pipeline).to receive(:stalling_threads) { { } } + end + + describe ".unsafe_shutdown = true" do + + before :each do + subject.class.unsafe_shutdown = true + end + + it "should force the shutdown" do + expect(subject).to_not receive(:force_exit) + thread = Thread.new(subject) {|subject| subject.start } + sleep 0.1 until report_count > check_threshold + thread.kill + end + end + + describe ".unsafe_shutdown = false" do + + before :each do + subject.class.unsafe_shutdown = false + end + + it "shouldn't force the shutdown" do + expect(subject).to_not receive(:force_exit) + thread = Thread.new(subject) {|subject| subject.start } + sleep 0.1 until report_count > check_threshold + thread.kill + end + end + end +end diff --git a/spec/util/buftok_spec.rb b/logstash-core/spec/logstash/util/buftok_spec.rb similarity index 100% rename from spec/util/buftok_spec.rb rename to logstash-core/spec/logstash/util/buftok_spec.rb diff --git a/spec/util/charset_spec.rb b/logstash-core/spec/logstash/util/charset_spec.rb similarity index 100% rename from spec/util/charset_spec.rb rename to logstash-core/spec/logstash/util/charset_spec.rb diff --git a/spec/util/defaults_printer_spec.rb b/logstash-core/spec/logstash/util/defaults_printer_spec.rb similarity index 80% rename from spec/util/defaults_printer_spec.rb rename to logstash-core/spec/logstash/util/defaults_printer_spec.rb index 3e50a7032cb..a2eda701425 100644 --- a/spec/util/defaults_printer_spec.rb +++ b/logstash-core/spec/logstash/util/defaults_printer_spec.rb @@ -10,7 +10,7 @@ end let(:workers) { 1 } - let(:expected) { "Default settings used: Filter workers: #{workers}" } + let(:expected) { "Settings: User set pipeline workers: #{workers}" } let(:settings) { {} } describe 'class methods API' do @@ -19,13 +19,13 @@ end context 'when the settings hash is empty' do + let(:expected) { "Settings: " } it_behaves_like "a defaults printer" end context 'when the settings hash has content' do let(:workers) { 42 } - let(:settings) { {'filter-workers' => workers} } - + let(:settings) { {'pipeline-workers' => workers} } it_behaves_like "a defaults printer" end end @@ -36,12 +36,13 @@ end context 'when the settings hash is empty' do + let(:expected) { "Settings: " } it_behaves_like "a defaults printer" end context 'when the settings hash has content' do let(:workers) { 13 } - let(:settings) { {'filter-workers' => workers} } + let(:settings) { {'pipeline-workers' => workers} } it_behaves_like "a defaults printer" end diff --git a/spec/util/java_version_spec.rb b/logstash-core/spec/logstash/util/java_version_spec.rb similarity index 100% rename from spec/util/java_version_spec.rb rename to logstash-core/spec/logstash/util/java_version_spec.rb diff --git a/spec/util/plugin_version_spec.rb b/logstash-core/spec/logstash/util/plugin_version_spec.rb similarity index 100% rename from spec/util/plugin_version_spec.rb rename to logstash-core/spec/logstash/util/plugin_version_spec.rb diff --git a/spec/util/unicode_trimmer_spec.rb b/logstash-core/spec/logstash/util/unicode_trimmer_spec.rb similarity index 100% rename from spec/util/unicode_trimmer_spec.rb rename to logstash-core/spec/logstash/util/unicode_trimmer_spec.rb diff --git a/logstash-core/spec/logstash/util/worker_threads_default_printer_spec.rb b/logstash-core/spec/logstash/util/worker_threads_default_printer_spec.rb new file mode 100644 index 00000000000..410d8c9fbf7 --- /dev/null +++ b/logstash-core/spec/logstash/util/worker_threads_default_printer_spec.rb @@ -0,0 +1,45 @@ +# encoding: utf-8 +require "spec_helper" +require "logstash/util/worker_threads_default_printer" + +describe LogStash::Util::WorkerThreadsDefaultPrinter do + let(:settings) { {} } + let(:collector) { [] } + + subject { described_class.new(settings) } + + before { subject.visit(collector) } + + describe "the #visit method" do + context 'when the settings hash is empty' do + it 'adds nothing to the collector' do + subject.visit(collector) + expect(collector).to eq([]) + end + end + + context 'when the settings hash has both user and default content' do + let(:settings) { {'pipeline-workers' => 42, 'default-pipeline-workers' => 5} } + + it 'adds two strings' do + expect(collector).to eq(["User set pipeline workers: 42", "Default pipeline workers: 5"]) + end + end + + context 'when the settings hash has only user content' do + let(:settings) { {'pipeline-workers' => 42} } + + it 'adds a string with user set pipeline workers' do + expect(collector.first).to eq("User set pipeline workers: 42") + end + end + + context 'when the settings hash has only default content' do + let(:settings) { {'default-pipeline-workers' => 5} } + + it 'adds a string with default pipeline workers' do + expect(collector.first).to eq("Default pipeline workers: 5") + end + end + end +end diff --git a/spec/util_spec.rb b/logstash-core/spec/logstash/util_spec.rb similarity index 100% rename from spec/util_spec.rb rename to logstash-core/spec/logstash/util_spec.rb diff --git a/logstash-event.gemspec b/logstash-event.gemspec deleted file mode 100644 index ea6cce87e1a..00000000000 --- a/logstash-event.gemspec +++ /dev/null @@ -1,41 +0,0 @@ -# -*- encoding: utf-8 -*- -Gem::Specification.new do |gem| - gem.authors = ["Jordan Sissel"] - gem.email = ["jls@semicomplete.com"] - gem.description = %q{Library that contains the classes required to create LogStash events} - gem.summary = %q{Library that contains the classes required to create LogStash events} - gem.homepage = "https://github.com/logstash/logstash" - gem.license = "Apache License (2.0)" - - gem.files = %w{ - lib/logstash-event.rb - lib/logstash/environment.rb - lib/logstash/errors.rb - lib/logstash/event.rb - lib/logstash/java_integration.rb - lib/logstash/json.rb - lib/logstash/namespace.rb - lib/logstash/timestamp.rb - lib/logstash/version.rb - lib/logstash/util.rb - lib/logstash/util/accessors.rb - LICENSE - } - - gem.test_files = ["spec/core/event_spec.rb"] - gem.name = "logstash-event" - gem.require_paths = ["lib"] - gem.version = "1.3.0" - - gem.add_runtime_dependency "cabin" - gem.add_development_dependency "rspec" - gem.add_development_dependency "guard" - gem.add_development_dependency "guard-rspec" - - if RUBY_PLATFORM == 'java' - gem.platform = RUBY_PLATFORM - gem.add_runtime_dependency "jrjackson" - else - gem.add_runtime_dependency "oj" - end -end diff --git a/rakelib/artifacts.rake b/rakelib/artifacts.rake index e2637f09995..0c6f2642b28 100644 --- a/rakelib/artifacts.rake +++ b/rakelib/artifacts.rake @@ -54,20 +54,46 @@ namespace "artifact" do File.open(".bundle/config", "w") { } end - # locate the "gem "logstash-core" ..." line in Gemfile, and if the :path => "." option if specified + # locate the "gem "logstash-core" ..." line in Gemfile, and if the :path => "..." option if specified # build and install the local logstash-core gem otherwise just do nothing, bundler will deal with it. task "install-logstash-core" do + # regex which matches a Gemfile gem definition for the logstash-core gem and captures the :path option + gem_line_regex = /^\s*gem\s+["']logstash-core["'](?:\s*,\s*["'][^"^']+["'])?(?:\s*,\s*:path\s*=>\s*["']([^"^']+)["'])?/i + lines = File.readlines("Gemfile") - matches = lines.select{|line| line[/^gem\s+["']logstash-core["']/i]} + matches = lines.select{|line| line[gem_line_regex]} abort("ERROR: Gemfile format error, need a single logstash-core gem specification") if matches.size != 1 - if matches.first =~ /:path\s*=>\s*["']\.["']/ - Rake::Task["plugin:install-local-logstash-core-gem"].invoke + + path = matches.first[gem_line_regex, 1] + + if path + Rake::Task["plugin:install-local-core-gem"].invoke("logstash-core", path) else puts("[artifact:install-logstash-core] using logstash-core from Rubygems") end end - task "prepare" => ["bootstrap", "plugin:install-default", "install-logstash-core", "clean-bundle-config"] + # # locate the "gem "logstash-core-event*" ..." line in Gemfile, and if the :path => "." option if specified + # # build and install the local logstash-core-event* gem otherwise just do nothing, bundler will deal with it. + task "install-logstash-core-event" do + # regex which matches a Gemfile gem definition for the logstash-core-event* gem and captures the gem name and :path option + gem_line_regex = /^\s*gem\s+["'](logstash-core-event[^"^']*)["'](?:\s*,\s*["'][^"^']+["'])?(?:\s*,\s*:path\s*=>\s*["']([^"^']+)["'])?/i + + lines = File.readlines("Gemfile") + matches = lines.select{|line| line[gem_line_regex]} + abort("ERROR: Gemfile format error, need a single logstash-core-event gem specification") if matches.size != 1 + + name = matches.first[gem_line_regex, 1] + path = matches.first[gem_line_regex, 2] + + if path + Rake::Task["plugin:install-local-core-gem"].invoke(name, path) + else + puts("[artifact:install-logstash-core] using #{name} from Rubygems") + end + end + + task "prepare" => ["bootstrap", "plugin:install-default", "install-logstash-core", "install-logstash-core-event", "clean-bundle-config"] desc "Build a tar.gz of logstash with all dependencies" task "tar" => ["prepare"] do diff --git a/rakelib/compile.rake b/rakelib/compile.rake index df572de21bc..be5693bead7 100644 --- a/rakelib/compile.rake +++ b/rakelib/compile.rake @@ -8,8 +8,15 @@ end namespace "compile" do desc "Compile the config grammar" - task "grammar" => "lib/logstash/config/grammar.rb" + + task "grammar" => "logstash-core/lib/logstash/config/grammar.rb" desc "Build everything" - task "all" => "grammar" + # task "all" => ["grammar", "logstash-core-event-java"] + task "all" => ["grammar"] + + task "logstash-core-event-java" do + puts("Building logstash-core-event-java using gradle") + system("logstash-core-event-java/gradlew", "jar", "-p", "./logstash-core-event-java") + end end diff --git a/rakelib/default_plugins.rb b/rakelib/default_plugins.rb index 4b0e3e2d35e..f67b4a08609 100644 --- a/rakelib/default_plugins.rb +++ b/rakelib/default_plugins.rb @@ -58,6 +58,7 @@ module RakeLib logstash-input-http logstash-input-imap logstash-input-irc + logstash-input-jdbc logstash-input-log4j logstash-input-lumberjack logstash-input-pipe diff --git a/rakelib/package.rake b/rakelib/package.rake new file mode 100644 index 00000000000..96d06559844 --- /dev/null +++ b/rakelib/package.rake @@ -0,0 +1,13 @@ +namespace "package" do + + task "bundle" do + system("bin/plugin", "package") + raise(RuntimeError, $!.to_s) unless $?.success? + end + + desc "Build a package with the default plugins, including dependencies, to be installed offline" + task "plugins-default" => ["test:install-default", "bundle"] + + desc "Build a package with all the plugins, including dependencies, to be installed offline" + task "plugins-all" => ["test:install-all", "bundle"] +end diff --git a/rakelib/plugin.rake b/rakelib/plugin.rake index 9c2065c1f56..d08fbdf6f2a 100644 --- a/rakelib/plugin.rake +++ b/rakelib/plugin.rake @@ -57,26 +57,40 @@ namespace "plugin" do task.reenable # Allow this task to be run again end - task "clean-logstash-core-gem" do - Dir["logstash-core*.gem"].each do |gem| + task "clean-local-core-gem", [:name, :path] do |task, args| + name = args[:name] + path = args[:path] + + Dir[File.join(path, "#{name}*.gem")].each do |gem| + puts("[plugin:clean-local-core-gem] Cleaning #{gem}") rm(gem) end task.reenable # Allow this task to be run again end - task "build-logstash-core-gem" => [ "clean-logstash-core-gem" ] do - puts("[plugin:build-logstash-core-gem] Building logstash-core.gemspec") + task "build-local-core-gem", [:name, :path] do |task, args| + name = args[:name] + path = args[:path] + + Rake::Task["plugin:clean-local-core-gem"].invoke(name, path) - system("gem build logstash-core.gemspec") + puts("[plugin:build-local-core-gem] Building #{File.join(path, name)}.gemspec") + + system("cd #{path}; gem build #{name}.gemspec") task.reenable # Allow this task to be run again end - task "install-local-logstash-core-gem" => [ "build-logstash-core-gem" ] do - gems = Dir["logstash-core*.gem"] - abort("ERROR: logstash-core gem not found") if gems.size != 1 - puts("[plugin:install-local-logstash-core-gem] Installing #{gems.first}") + task "install-local-core-gem", [:name, :path] do |task, args| + name = args[:name] + path = args[:path] + + Rake::Task["plugin:build-local-core-gem"].invoke(name, path) + + gems = Dir[File.join(path, "#{name}*.gem")] + abort("ERROR: #{name} gem not found in #{path}") if gems.size != 1 + puts("[plugin:install-local-core-gem] Installing #{gems.first}") install_plugins("--no-verify", gems.first) task.reenable # Allow this task to be run again diff --git a/rakelib/test.rake b/rakelib/test.rake index 9c25d819589..8c0d16ff4ef 100644 --- a/rakelib/test.rake +++ b/rakelib/test.rake @@ -19,18 +19,36 @@ namespace "test" do require 'ci/reporter/rake/rspec_loader' end + def core_specs + # note that regardless if which logstash-core-event-* gem is live, we will always run the + # logstash-core-event specs since currently this is the most complete Event and Timestamp specs + # which actually defines the Event contract and should pass regardless of the actuall underlying + # implementation. + specs = ["spec/**/*_spec.rb", "logstash-core/spec/**/*_spec.rb", "logstash-core-event/spec/**/*_spec.rb"] + + # figure if the logstash-core-event-java gem is loaded and if so add its specific specs in the core specs to run + begin + require "logstash-core-event-java/version" + specs << "logstash-core-event-java/spec/**/*_spec.rb" + rescue LoadError + # logstash-core-event-java gem is not live, ignore and skip specs + end + + Rake::FileList[*specs] + end + desc "run core specs" task "core" => ["setup"] do - exit(RSpec::Core::Runner.run([Rake::FileList["spec/**/*_spec.rb"]])) + exit(RSpec::Core::Runner.run([core_specs])) end desc "run core specs in fail-fast mode" task "core-fail-fast" => ["setup"] do - exit(Spec::Core::Runner.run(["--fail-fast", Rake::FileList["spec/**/*_spec.rb"]])) + exit(RSpec::Core::Runner.run(["--fail-fast", core_specs])) end desc "run core specs on a single file" - task "core-single-file", [:specfile] => ["setup"] do |t,args| + task "core-single-file", [:specfile] => ["setup"] do |t, args| exit(RSpec::Core::Runner.run([Rake::FileList[args.specfile]])) end @@ -87,6 +105,28 @@ namespace "test" do task.reenable end + task "integration" => ["setup"] do + require "fileutils" + + source = File.expand_path(File.join(File.dirname(__FILE__), "..")) + integration_path = File.join(source, "integration_run") + FileUtils.rm_rf(integration_path) + + exit(RSpec::Core::Runner.run([Rake::FileList["integration/**/*_spec.rb"]])) + end + + namespace "integration" do + task "local" => ["setup"] do + require "fileutils" + + source = File.expand_path(File.join(File.dirname(__FILE__), "..")) + integration_path = File.join(source, "integration_run") + FileUtils.mkdir_p(integration_path) + + puts "[integration_spec] configuring local environment for running test in #{integration_path}, if you want to change this behavior delete the directory." + exit(RSpec::Core::Runner.run([Rake::FileList["integration/**/*_spec.rb"]])) + end + end end task "test" => [ "test:core" ] diff --git a/require-analyze.rb b/require-analyze.rb deleted file mode 100644 index f69d858aa45..00000000000 --- a/require-analyze.rb +++ /dev/null @@ -1,22 +0,0 @@ -require "csv" - -#0.003,psych/nodes/mapping,/Users/jls/.rvm/rubies/jruby-1.7.8/lib/ruby/shared/psych/nodes.rb:6:in `(root)' - -durations = {} -durations.default = 0 - -CSV.foreach(ARGV[0]) do |duration, path, source| - source, line, where = source.split(":") - #{"0.002"=>"/Users/jls/projects/logstash/vendor/bundle/jruby/1.9/gems/clamp-0.6.3/lib/clamp.rb"} - if source.include?("jruby/1.9/gems") - # Get the gem name - source = source.gsub(/.*\/jruby\/1.9\/gems/, "")[/[^\/]+/] - elsif source.include?("/lib/logstash/") - source = source.gsub(/^.*(\/lib\/logstash\/)/, "/lib/logstash/") - end - durations[source] += duration.to_f -end - -durations.sort_by { |k,v| v }.each do |k,v| - puts "#{v} #{k}" -end diff --git a/spec/lib/logstash/bundler_spec.rb b/spec/bootstrap/bundler_spec.rb similarity index 100% rename from spec/lib/logstash/bundler_spec.rb rename to spec/bootstrap/bundler_spec.rb diff --git a/spec/core/pipeline_spec.rb b/spec/core/pipeline_spec.rb deleted file mode 100644 index d0021d4a396..00000000000 --- a/spec/core/pipeline_spec.rb +++ /dev/null @@ -1,196 +0,0 @@ -# encoding: utf-8 -require "spec_helper" - -class DummyInput < LogStash::Inputs::Base - config_name "dummyinput" - milestone 2 - - def register - end - - def run(queue) - end - - def close - end -end - -class DummyCodec < LogStash::Codecs::Base - config_name "dummycodec" - milestone 2 - - def decode(data) - data - end - - def encode(event) - event - end - - def close - end -end - -class DummyOutput < LogStash::Outputs::Base - config_name "dummyoutput" - milestone 2 - - attr_reader :num_closes - - def initialize(params={}) - super - @num_closes = 0 - end - - def register - end - - def receive(event) - end - - def close - @num_closes += 1 - end -end - -class TestPipeline < LogStash::Pipeline - attr_reader :outputs -end - -describe LogStash::Pipeline do - -context "close" do - - before(:each) do - allow(LogStash::Plugin).to receive(:lookup).with("input", "dummyinput").and_return(DummyInput) - allow(LogStash::Plugin).to receive(:lookup).with("codec", "plain").and_return(DummyCodec) - allow(LogStash::Plugin).to receive(:lookup).with("output", "dummyoutput").and_return(DummyOutput) - end - - let(:test_config_without_output_workers) { - <<-eos - input { - dummyinput {} - } - - output { - dummyoutput {} - } - eos - } - - let(:test_config_with_output_workers) { - <<-eos - input { - dummyinput {} - } - - output { - dummyoutput { - workers => 2 - } - } - eos - } - - context "output close" do - it "should call close of output without output-workers" do - pipeline = TestPipeline.new(test_config_without_output_workers) - pipeline.run - - expect(pipeline.outputs.size ).to eq(1) - expect(pipeline.outputs.first.worker_plugins.size ).to eq(1) - expect(pipeline.outputs.first.worker_plugins.first.num_closes ).to eq(1) - end - - it "should call output close correctly with output workers" do - pipeline = TestPipeline.new(test_config_with_output_workers) - pipeline.run - - expect(pipeline.outputs.size ).to eq(1) - expect(pipeline.outputs.first.num_closes).to eq(0) - pipeline.outputs.first.worker_plugins.each do |plugin| - expect(plugin.num_closes ).to eq(1) - end - end - end - end - - context "compiled flush function" do - - context "cancelled events should not propagate down the filters" do - config <<-CONFIG - filter { - multiline { - pattern => "hello" - what => next - } - multiline { - pattern => "hello" - what => next - } - } - CONFIG - - sample("hello") do - expect(subject["message"]).to eq("hello") - end - end - - context "new events should propagate down the filters" do - config <<-CONFIG - filter { - clone { - clones => ["clone1"] - } - multiline { - pattern => "bar" - what => previous - } - } - CONFIG - - sample(["foo", "bar"]) do - expect(subject.size).to eq(2) - - expect(subject[0]["message"]).to eq("foo\nbar") - expect(subject[0]["type"]).to be_nil - expect(subject[1]["message"]).to eq("foo\nbar") - expect(subject[1]["type"]).to eq("clone1") - end - end - end - - context "compiled filter funtions" do - - context "new events should propagate down the filters" do - config <<-CONFIG - filter { - clone { - clones => ["clone1", "clone2"] - } - mutate { - add_field => {"foo" => "bar"} - } - } - CONFIG - - sample("hello") do - expect(subject.size).to eq(3) - - expect(subject[0]["message"]).to eq("hello") - expect(subject[0]["type"]).to be_nil - expect(subject[0]["foo"]).to eq("bar") - - expect(subject[1]["message"]).to eq("hello") - expect(subject[1]["type"]).to eq("clone1") - expect(subject[1]["foo"]).to eq("bar") - - expect(subject[2]["message"]).to eq("hello") - expect(subject[2]["type"]).to eq("clone2") - expect(subject[2]["foo"]).to eq("bar") - end - end - - end -end diff --git a/spec/coverage_helper.rb b/spec/coverage_helper.rb index 6ecb2f570eb..009f7fb5419 100644 --- a/spec/coverage_helper.rb +++ b/spec/coverage_helper.rb @@ -5,20 +5,19 @@ module CoverageHelper ## # Skip list used to avoid loading certain patterns within - # the logstash directories, this patterns are excluded becuause + # the logstash directories, this patterns are excluded because # of potential problems or because they are going to be loaded # in another way. ## SKIP_LIST = Regexp.union([ /^lib\/bootstrap\/rspec.rb$/, - /^lib\/logstash\/util\/prctl.rb$/ + /^logstash-core\/lib\/logstash\/util\/prctl.rb$/ ]) def self.eager_load - Dir.glob("lib/**/*.rb") do |file| + Dir.glob("{logstash-core{/,-event/},}lib/**/*.rb") do |file| next if file =~ SKIP_LIST require file end end - end diff --git a/spec/license_spec.rb b/spec/license_spec.rb index 6a0ec7ba4b0..f37f29d0431 100644 --- a/spec/license_spec.rb +++ b/spec/license_spec.rb @@ -12,6 +12,7 @@ Regexp.union([ /mit/, /apache*/, /bsd/, + /artistic 2.*/, /ruby/, /lgpl/]) } diff --git a/spec/plugin_manager/install_spec.rb b/spec/plugin_manager/install_spec.rb new file mode 100644 index 00000000000..40eb3dfe408 --- /dev/null +++ b/spec/plugin_manager/install_spec.rb @@ -0,0 +1,28 @@ +# encoding: utf-8 +require 'spec_helper' +require 'pluginmanager/main' + +describe LogStash::PluginManager::Install do + let(:cmd) { LogStash::PluginManager::Install.new("install") } + + before(:each) do + expect(cmd).to receive(:validate_cli_options!).and_return(nil) + end + + context "when validating plugins" do + let(:sources) { ["https://rubygems.org", "http://localhost:9292"] } + + before(:each) do + expect(cmd).to receive(:plugins_gems).and_return([["dummy", nil]]) + expect(cmd).to receive(:install_gems_list!).and_return(nil) + expect(cmd).to receive(:remove_unused_locally_installed_gems!).and_return(nil) + cmd.verify = true + end + + it "should load all the sources defined in the Gemfile" do + expect(cmd.gemfile.gemset).to receive(:sources).and_return(sources) + expect(LogStash::PluginManager).to receive(:logstash_plugin?).with("dummy", nil, {:rubygems_source => sources}).and_return(true) + cmd.execute + end + end +end diff --git a/spec/plugin_manager/update_spec.rb b/spec/plugin_manager/update_spec.rb new file mode 100644 index 00000000000..5498f9dea0c --- /dev/null +++ b/spec/plugin_manager/update_spec.rb @@ -0,0 +1,39 @@ +# encoding: utf-8 +require 'spec_helper' +require 'pluginmanager/main' + +describe LogStash::PluginManager::Update do + let(:cmd) { LogStash::PluginManager::Update.new("update") } + let(:sources) { cmd.gemfile.gemset.sources } + + before(:each) do + expect(cmd).to receive(:find_latest_gem_specs).and_return({}) + allow(cmd).to receive(:warn_local_gems).and_return(nil) + expect(cmd).to receive(:display_updated_plugins).and_return(nil) + expect_any_instance_of(LogStash::Bundler).to receive(:invoke!).with(:clean => true) + end + + it "pass all gem sources to the bundle update command" do + sources = cmd.gemfile.gemset.sources + expect_any_instance_of(LogStash::Bundler).to receive(:invoke!).with(:update => [], :rubygems_source => sources) + cmd.execute + end + + context "when skipping validation" do + let(:cmd) { LogStash::PluginManager::Update.new("update") } + let(:plugin) { OpenStruct.new(:name => "dummy", :options => {} ) } + + before(:each) do + expect(cmd.gemfile).to receive(:find).with(plugin).and_return(plugin) + expect(cmd.gemfile).to receive(:save).and_return(nil) + expect(cmd).to receive(:plugins_to_update).and_return([plugin]) + expect_any_instance_of(LogStash::Bundler).to receive(:invoke!).with(:update => [plugin], :rubygems_source => sources).and_return(nil) + end + + it "skips version verification when ask for it" do + cmd.verify = false + expect(cmd).to_not receive(:validates_version) + cmd.execute + end + end +end diff --git a/spec/plugin_manager/util_spec.rb b/spec/plugin_manager/util_spec.rb new file mode 100644 index 00000000000..10824e56adc --- /dev/null +++ b/spec/plugin_manager/util_spec.rb @@ -0,0 +1,71 @@ +#encoding: utf-8 +require 'spec_helper' +require 'pluginmanager/util' +require 'gems' + +describe LogStash::PluginManager do + + describe "fetching plugin information" do + let(:plugin_name) { "logstash-output-elasticsearch" } + + let(:version_data) do + [ { "authors"=>"Elastic", "built_at"=>"2015-08-11T00:00:00.000Z", "description"=>"Output events to elasticsearch", + "downloads_count"=>1638, "metadata"=>{"logstash_group"=>"output", "logstash_plugin"=>"true"}, "number"=>"2.0.0.pre", + "summary"=>"Logstash Output to Elasticsearch", "platform"=>"java", "ruby_version"=>">= 0", "prerelease"=>true, + "licenses"=>["apache-2.0"], "requirements"=>[], "sha"=>"194b27099c13605a882a3669e2363fdecccaab1de48dd44b0cda648dd5516799"}, + { "authors"=>"Elastic", "built_at"=>"2015-08-10T00:00:00.000Z", "description"=>"Output events to elasticsearch", + "downloads_count"=>1638, "metadata"=>{"logstash_group"=>"output", "logstash_plugin"=>"true"}, "number"=>"1.0.7", + "summary"=>"Logstash Output to Elasticsearch", "platform"=>"java", "ruby_version"=>">= 0", "prerelease"=>false, + "licenses"=>["apache-2.0"], "requirements"=>[], "sha"=>"194b27099c13605a882a3669e2363fdecccaab1de48dd44b0cda648dd5516799"}, + { "authors"=>"Elastic", "built_at"=>"2015-08-09T00:00:00.000Z", "description"=>"Output events to elasticsearch", + "downloads_count"=>1638, "metadata"=>{"logstash_group"=>"output", "logstash_plugin"=>"true"}, "number"=>"1.0.4", + "summary"=>"Logstash Output to Elasticsearch", "platform"=>"java", "ruby_version"=>">= 0", "prerelease"=>false, + "licenses"=>["apache-2.0"], "requirements"=>[], "sha"=>"194b27099c13605a882a3669e2363fdecccaab1de48dd44b0cda648dd5516799"} ] + end + + before(:each) do + allow(Gems).to receive(:versions).with(plugin_name).and_return(version_data) + end + + context "fetch plugin info" do + it "should search for the last version infomation non prerelease" do + version_info = LogStash::PluginManager.fetch_latest_version_info(plugin_name) + expect(version_info["number"]).to eq("1.0.7") + end + + + it "should search for the last version infomation with prerelease" do + version_info = LogStash::PluginManager.fetch_latest_version_info(plugin_name, :pre => true) + expect(version_info["number"]).to eq("2.0.0.pre") + end + end + end + + describe "a logstash_plugin validation" do + let(:plugin) { "foo" } + let(:version) { "9.0.0.0" } + + let(:sources) { ["http://source.01", "http://source.02"] } + let(:options) { {:rubygems_source => sources} } + + let(:gemset) { double("gemset") } + let(:gemfile) { double("gemfile") } + let(:dep) { double("dep") } + let(:fetcher) { double("fetcher") } + + before(:each) do + allow(gemfile).to receive(:gemset).and_return(gemset) + allow(gemset).to receive(:sources).and_return(sources) + expect(fetcher).to receive(:spec_for_dependency).and_return([[],[]]) + end + + it "should load all available sources" do + expect(subject).to receive(:plugin_file?).and_return(false) + expect(Gem::Dependency).to receive(:new).and_return(dep) + expect(Gem::SpecFetcher).to receive(:fetcher).and_return(fetcher) + + subject.logstash_plugin?(plugin, version, options) + expect(Gem.sources.map { |source| source }).to eq(sources) + end + end +end diff --git a/spec/util/gemfile_spec.rb b/spec/pluginmanager/gemfile_spec.rb similarity index 100% rename from spec/util/gemfile_spec.rb rename to spec/pluginmanager/gemfile_spec.rb diff --git a/spec/pluginmanager/util_spec.rb b/spec/pluginmanager/util_spec.rb deleted file mode 100644 index 6a14beeb950..00000000000 --- a/spec/pluginmanager/util_spec.rb +++ /dev/null @@ -1,42 +0,0 @@ -# encoding: utf-8 -require "spec_helper" -require "pluginmanager/util" -require "gems" - -describe LogStash::PluginManager do - - let(:plugin_name) { "logstash-output-elasticsearch" } - - let(:version_data) do - [ { "authors"=>"Elastic", "built_at"=>"2015-08-11T00:00:00.000Z", "description"=>"Output events to elasticsearch", - "downloads_count"=>1638, "metadata"=>{"logstash_group"=>"output", "logstash_plugin"=>"true"}, "number"=>"2.0.0.pre", - "summary"=>"Logstash Output to Elasticsearch", "platform"=>"java", "ruby_version"=>">= 0", "prerelease"=>true, - "licenses"=>["apache-2.0"], "requirements"=>[], "sha"=>"194b27099c13605a882a3669e2363fdecccaab1de48dd44b0cda648dd5516799"}, - { "authors"=>"Elastic", "built_at"=>"2015-08-10T00:00:00.000Z", "description"=>"Output events to elasticsearch", - "downloads_count"=>1638, "metadata"=>{"logstash_group"=>"output", "logstash_plugin"=>"true"}, "number"=>"1.0.7", - "summary"=>"Logstash Output to Elasticsearch", "platform"=>"java", "ruby_version"=>">= 0", "prerelease"=>false, - "licenses"=>["apache-2.0"], "requirements"=>[], "sha"=>"194b27099c13605a882a3669e2363fdecccaab1de48dd44b0cda648dd5516799"}, - { "authors"=>"Elastic", "built_at"=>"2015-08-09T00:00:00.000Z", "description"=>"Output events to elasticsearch", - "downloads_count"=>1638, "metadata"=>{"logstash_group"=>"output", "logstash_plugin"=>"true"}, "number"=>"1.0.4", - "summary"=>"Logstash Output to Elasticsearch", "platform"=>"java", "ruby_version"=>">= 0", "prerelease"=>false, - "licenses"=>["apache-2.0"], "requirements"=>[], "sha"=>"194b27099c13605a882a3669e2363fdecccaab1de48dd44b0cda648dd5516799"} ] - end - - before(:each) do - allow(Gems).to receive(:versions).with(plugin_name).and_return(version_data) - end - - context "fetch plugin info" do - - it "should search for the last version infomation non prerelease" do - version_info = LogStash::PluginManager.fetch_latest_version_info(plugin_name) - expect(version_info["number"]).to eq("1.0.7") - end - - - it "should search for the last version infomation with prerelease" do - version_info = LogStash::PluginManager.fetch_latest_version_info(plugin_name, :pre => true) - expect(version_info["number"]).to eq("2.0.0.pre") - end - end -end diff --git a/spec/util/compress_spec.rb b/spec/util/compress_spec.rb new file mode 100644 index 00000000000..47bab9e995a --- /dev/null +++ b/spec/util/compress_spec.rb @@ -0,0 +1,121 @@ +# encoding: utf-8 +require "spec_helper" +require 'ostruct' +require "bootstrap/util/compress" + +describe LogStash::Util::Zip do + + subject { Class.new { extend LogStash::Util::Zip } } + + context "#extraction" do + + let(:source) { File.join(File.expand_path("."), "source_file.zip") } + let(:target) { File.expand_path("target_dir") } + + it "raise an exception if the target dir exist" do + allow(File).to receive(:exist?).with(target).and_return(true) + expect { subject.extract(source, target) }.to raise_error + end + + let(:zip_file) do + [ "foo", "bar", "zoo" ].inject([]) do |acc, name| + acc << OpenStruct.new(:name => name) + acc + end + end + + it "extract the list of entries from a zip file" do + allow(Zip::File).to receive(:open).with(source).and_yield(zip_file) + expect(FileUtils).to receive(:mkdir_p).exactly(3).times + expect(zip_file).to receive(:extract).exactly(3).times + subject.extract(source, target) + end + end + + context "#compression" do + + let(:target) { File.join(File.expand_path("."), "target_file.zip") } + let(:source) { File.expand_path("source_dir") } + + it "raise an exception if the target file exist" do + allow(File).to receive(:exist?).with(target).and_return(true) + expect { subject.compress(source, target) }.to raise_error + end + + let(:dir_files) do + [ "foo", "bar", "zoo" ] + end + + let(:zip_file) { Class.new } + + it "add a dir to a zip file" do + allow(Zip::File).to receive(:open).with(target, ::Zip::File::CREATE).and_yield(zip_file) + allow(Dir).to receive(:glob).and_return(dir_files) + expect(zip_file).to receive(:add).exactly(3).times + subject.compress(source, target) + end + end +end + +describe LogStash::Util::Tar do + + subject { Class.new { extend LogStash::Util::Tar } } + + context "#extraction" do + + let(:source) { File.join(File.expand_path("."), "source_file.tar.gz") } + let(:target) { File.expand_path("target_dir") } + + it "raise an exception if the target dir exist" do + allow(File).to receive(:exist?).with(target).and_return(true) + expect { subject.extract(source, target) }.to raise_error + end + + let(:gzip_file) { Class.new } + + let(:tar_file) do + [ "foo", "bar", "zoo" ].inject([]) do |acc, name| + acc << OpenStruct.new(:full_name => name) + acc + end + end + + it "extract the list of entries from a tar.gz file" do + allow(Zlib::GzipReader).to receive(:open).with(source).and_yield(gzip_file) + allow(Gem::Package::TarReader).to receive(:new).with(gzip_file).and_yield(tar_file) + + expect(FileUtils).to receive(:mkdir).with(target) + expect(File).to receive(:open).exactly(3).times + subject.extract(source, target) + end + end + + context "#compression" do + + let(:target) { File.join(File.expand_path("."), "target_file.tar.gz") } + let(:source) { File.expand_path("source_dir") } + + it "raise an exception if the target file exist" do + allow(File).to receive(:exist?).with(target).and_return(true) + expect { subject.compress(source, target) }.to raise_error + end + + let(:dir_files) do + [ "foo", "bar", "zoo" ] + end + + let(:tar_file) { Class.new } + let(:tar) { Class.new } + + it "add a dir to a tgz file" do + allow(Stud::Temporary).to receive(:file).and_yield(tar_file) + allow(Gem::Package::TarWriter).to receive(:new).with(tar_file).and_yield(tar) + allow(Dir).to receive(:glob).and_return(dir_files) + expect(File).to receive(:stat).exactly(3).times.and_return(OpenStruct.new(:mode => "rw")) + expect(tar).to receive(:add_file).exactly(3).times + expect(tar_file).to receive(:rewind) + expect(subject).to receive(:gzip).with(target, tar_file) + subject.compress(source, target) + end + end +end diff --git a/spec/util/worker_threads_default_printer_spec.rb b/spec/util/worker_threads_default_printer_spec.rb deleted file mode 100644 index 348b9d263e1..00000000000 --- a/spec/util/worker_threads_default_printer_spec.rb +++ /dev/null @@ -1,26 +0,0 @@ -# encoding: utf-8 -require "spec_helper" -require "logstash/util/worker_threads_default_printer" - -describe LogStash::Util::WorkerThreadsDefaultPrinter do - let(:settings) { {} } - let(:collector) { [] } - - subject { described_class.new(settings) } - - context 'when the settings hash is empty' do - it 'the #visit method returns a string with 1 filter worker' do - subject.visit(collector) - expect(collector.first).to eq("Filter workers: 1") - end - end - - context 'when the settings hash has content' do - let(:settings) { {'filter-workers' => 42} } - - it 'the #visit method returns a string with 42 filter workers' do - subject.visit(collector) - expect(collector.first).to eq("Filter workers: 42") - end - end -end