From f5cf3d237dfaa3c21739b9a4522cb65f3bbd3f4d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20P=C3=A9rez-Aradros=20Herce?= Date: Tue, 22 Jan 2019 11:50:48 +0100 Subject: [PATCH] Fix bad bytes count in `docker` input when filtering by stream (#10211) This error happened when a reader restarts after having filtered one stream (stderr or stdout) before: ``` 2018-12-04T02:32:18.636Z ERROR log/harvester.go:281 Read line error: parsing CRI timestamp: parsing time "3.chunk.js" as "2006-01-02T15:04:05Z07:00": cannot parse "unk.js" as "2006"; File: /var/lib/docker/containers/2a7d435274655d581cbdba63c8477042a30583c7cb84c9b2ef5a4c5af9bd2da7/2a7d435274655d581cbdba63c8477042a30583c7cb84c9b2ef5a4c5af9bd2da7-json.log ``` Reason is bytes count is wrong, as filtered lines are not accounted. Uppon reader restart, it begins reading from the wrong offset, resuling in an parsing error. This PR fixes bytes count for this case, and adds tests to make sure the count is now right. --- CHANGELOG.next.asciidoc | 1 + libbeat/reader/readjson/docker_json.go | 12 ++++++- libbeat/reader/readjson/docker_json_test.go | 38 ++++++++++++++++++--- 3 files changed, 46 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.next.asciidoc b/CHANGELOG.next.asciidoc index a9da231a352c..b985d330146a 100644 --- a/CHANGELOG.next.asciidoc +++ b/CHANGELOG.next.asciidoc @@ -107,6 +107,7 @@ https://github.com/elastic/beats/compare/v7.0.0-alpha2...master[Check the HEAD d {issue}9836[9836] error log: {pull}9869[9869], access log: {pull}9955[9955]. - Support haproxy log lines without captured headers. {issue}9463[9463] {pull}9958[9958] - Make elasticsearch/audit fileset be more lenient in parsing node name. {issue}10035[10035] {pull}10135[10135] +- Fix bad bytes count in `docker` input when filtering by stream. {pull}10211[10211] *Heartbeat* diff --git a/libbeat/reader/readjson/docker_json.go b/libbeat/reader/readjson/docker_json.go index 6e2279bc62ec..bdea5177fd93 100644 --- a/libbeat/reader/readjson/docker_json.go +++ b/libbeat/reader/readjson/docker_json.go @@ -170,8 +170,14 @@ func (p *DockerJSONReader) parseLine(message *reader.Message, msg *logLine) erro // Next returns the next line. func (p *DockerJSONReader) Next() (reader.Message, error) { + var bytes int for { message, err := p.reader.Next() + + // keep the right bytes count even if we return an error + bytes += message.Bytes + message.Bytes = bytes + if err != nil { return message, err } @@ -185,6 +191,11 @@ func (p *DockerJSONReader) Next() (reader.Message, error) { // Handle multiline messages, join partial lines for p.partial && logLine.Partial { next, err := p.reader.Next() + + // keep the right bytes count even if we return an error + bytes += next.Bytes + message.Bytes = bytes + if err != nil { return message, err } @@ -193,7 +204,6 @@ func (p *DockerJSONReader) Next() (reader.Message, error) { return message, err } message.Content = append(message.Content, next.Content...) - message.Bytes += next.Bytes } if p.stream != "all" && p.stream != logLine.Stream { diff --git a/libbeat/reader/readjson/docker_json_test.go b/libbeat/reader/readjson/docker_json_test.go index e2cd08f959ab..a89c83419917 100644 --- a/libbeat/reader/readjson/docker_json_test.go +++ b/libbeat/reader/readjson/docker_json_test.go @@ -54,24 +54,36 @@ func TestDockerJSON(t *testing.T) { input: [][]byte{[]byte(`this is not JSON`)}, stream: "all", expectedError: true, + expectedMessage: reader.Message{ + Bytes: 16, + }, }, { name: "Wrong CRI", input: [][]byte{[]byte(`2017-09-12T22:32:21.212861448Z stdout`)}, stream: "all", expectedError: true, + expectedMessage: reader.Message{ + Bytes: 37, + }, }, { name: "Wrong CRI", input: [][]byte{[]byte(`{this is not JSON nor CRI`)}, stream: "all", expectedError: true, + expectedMessage: reader.Message{ + Bytes: 25, + }, }, { name: "Missing time", input: [][]byte{[]byte(`{"log":"1:M 09 Nov 13:27:36.276 # User requested shutdown...\n","stream":"stdout"}`)}, stream: "all", expectedError: true, + expectedMessage: reader.Message{ + Bytes: 82, + }, }, { name: "CRI log no tags", @@ -98,7 +110,7 @@ func TestDockerJSON(t *testing.T) { criflags: true, }, { - name: "Filtering stream", + name: "Filtering stream, bytes count accounts for all (filtered and not)", input: [][]byte{ []byte(`{"log":"filtered\n","stream":"stdout","time":"2017-11-09T13:27:36.277747246Z"}`), []byte(`{"log":"unfiltered\n","stream":"stderr","time":"2017-11-09T13:27:36.277747246Z"}`), @@ -109,11 +121,11 @@ func TestDockerJSON(t *testing.T) { Content: []byte("unfiltered\n"), Fields: common.MapStr{"stream": "stderr"}, Ts: time.Date(2017, 11, 9, 13, 27, 36, 277747246, time.UTC), - Bytes: 80, + Bytes: 158, }, }, { - name: "Filtering stream", + name: "Filtering CRI stream, bytes count accounts for all (filtered and not)", input: [][]byte{ []byte(`2017-10-12T13:32:21.232861448Z stdout F 2017-10-12 13:32:21.212 [INFO][88] table.go 710: Invalidating dataplane cache`), []byte(`2017-11-12T23:32:21.212771448Z stderr F 2017-11-12 23:32:21.212 [ERROR][77] table.go 111: error`), @@ -124,7 +136,7 @@ func TestDockerJSON(t *testing.T) { Content: []byte("2017-11-12 23:32:21.212 [ERROR][77] table.go 111: error"), Fields: common.MapStr{"stream": "stderr"}, Ts: time.Date(2017, 11, 12, 23, 32, 21, 212771448, time.UTC), - Bytes: 95, + Bytes: 212, }, criflags: true, }, @@ -196,6 +208,9 @@ func TestDockerJSON(t *testing.T) { stream: "all", forceCRI: true, expectedError: true, + expectedMessage: reader.Message{ + Bytes: 82, + }, }, { name: "Force CRI log no tags", @@ -257,6 +272,19 @@ func TestDockerJSON(t *testing.T) { forceCRI: true, criflags: true, }, + { + name: "Error parsing still keeps good bytes count", + input: [][]byte{ + []byte(`{"log":"1:M 09 Nov 13:27:36.276 # User requested ","stream":"stdout","time":"2017-11-09T13:27:36.277747246Z"}`), + []byte(`{"log":"shutdown...\n","stream`), + }, + stream: "stdout", + expectedError: true, + expectedMessage: reader.Message{ + Bytes: 139, + }, + partial: true, + }, } for _, test := range tests { @@ -273,6 +301,8 @@ func TestDockerJSON(t *testing.T) { if err == nil { assert.EqualValues(t, test.expectedMessage, message) + } else { + assert.Equal(t, test.expectedMessage.Bytes, message.Bytes) } }) }