Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add conditions to copy_fields processor #6730

Open
wants to merge 8 commits into
base: main
Choose a base branch
from
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# Kind can be one of:
# - breaking-change: a change to previously-documented behavior
# - deprecation: functionality that is being removed in a later release
# - bug-fix: fixes a problem in a previous version
# - enhancement: extends functionality but does not break or fix existing behavior
# - feature: new functionality
# - known-issue: problems that we are aware of in a given version
# - security: impacts on the security of a product or a user’s deployment.
# - upgrade: important information for someone upgrading from a prior version
# - other: does not fit into any of the other categories
kind: bug-fix

# Change summary; a 80ish characters long description of the change.
summary: >
Add conditions to copy_fields processors to prevent spamming the debug logs

# Long description; in case the summary is not enough to describe the change
# this field accommodate a description without length limits.
# NOTE: This field will be rendered only for breaking-change and known-issue kinds at the moment.
description:

# Affected component; usually one of "elastic-agent", "fleet-server", "filebeat", "metricbeat", "auditbeat", "all", etc.
component: elastic-agent

# PR URL; optional; the PR number that added the changeset.
# If not present is automatically filled by the tooling finding the PR where this changelog fragment has been added.
# NOTE: the tooling supports backports, so it's able to fill the original PR number instead of the backport PR number.
# Please provide it if you are adding a fragment for a different PR.
pr: https://github.com/elastic/elastic-agent/pull/6730

# Issue URL; optional; the GitHub issue related to this changeset (either closes or is part of).
# If not present is automatically filled by the tooling with the issue linked to the PR number.
issue: https://github.com/elastic/elastic-agent/issues/5299
28 changes: 28 additions & 0 deletions internal/pkg/agent/application/monitoring/v1_monitor.go
Original file line number Diff line number Diff line change
Expand Up @@ -424,6 +424,13 @@ func (b *BeatsMonitor) injectLogsInput(cfg map[string]interface{}, components []
"to": "data_stream.dataset_original",
},
},
"when": map[string]any{
"not": map[string]any{
"has_fields": []any{
"data_stream.dataset_original",
},
},
},
},
},
// drop the dataset field so following copy_field can copy to it
Expand All @@ -443,13 +450,27 @@ func (b *BeatsMonitor) injectLogsInput(cfg map[string]interface{}, components []
"to": "data_stream.dataset",
},
},
"when": map[string]any{
"not": map[string]any{
"has_fields": []any{
"data_stream.dataset",
},
},
},
"fail_on_error": false,
"ignore_missing": true,
},
},
// possible it's a log message from agent itself (doesn't have component.dataset)
map[string]interface{}{
"copy_fields": map[string]interface{}{
"when": map[string]any{
"not": map[string]any{
"has_fields": []any{
"data_stream.dataset",
},
},
},
"fields": []interface{}{
map[string]interface{}{
"from": "data_stream.dataset_original",
Expand All @@ -471,6 +492,13 @@ func (b *BeatsMonitor) injectLogsInput(cfg map[string]interface{}, components []
// update event.dataset with the now used data_stream.dataset
map[string]interface{}{
"copy_fields": map[string]interface{}{
"when": map[string]any{
"not": map[string]any{
"has_fields": []any{
"event.dataset",
},
},
},
"fields": []interface{}{
map[string]interface{}{
"from": "data_stream.dataset",
Expand Down
45 changes: 39 additions & 6 deletions testing/integration/event_logging_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (
"bufio"
"bytes"
"context"
"encoding/json"
"fmt"
"net/http"
"net/http/httputil"
Expand Down Expand Up @@ -37,11 +38,11 @@ outputs:
hosts:
- %s
protocol: http
preset: balanced

preset: latency
inputs:
- type: filestream
id: your-input-id
log_level: debug
streams:
- id: your-filestream-stream-id
data_stream:
Expand Down Expand Up @@ -87,7 +88,7 @@ func TestEventLogFile(t *testing.T) {
esURL := startMockES(t)

logFilepath := path.Join(t.TempDir(), t.Name())
generateLogFile(t, logFilepath, time.Millisecond*100, 1)
generateLogFile(t, logFilepath, time.Millisecond*100, 20)

cfg := fmt.Sprintf(eventLogConfig, esURL, logFilepath)

Expand Down Expand Up @@ -126,6 +127,7 @@ func TestEventLogFile(t *testing.T) {

// Now the Elastic-Agent is running, so validate the Event log file.
requireEventLogFileExistsWithData(t, agentFixture)
requireNoCopyProcessorError(t, agentFixture)

// The diagnostics command is already tested by another test,
// here we just want to validate the events log behaviour
Expand Down Expand Up @@ -307,7 +309,7 @@ func addOverwriteToPolicy(t *testing.T, info *define.Info, policyName, policyID
}
}

func requireEventLogFileExistsWithData(t *testing.T, agentFixture *atesting.Fixture) {
func readEventLogFile(t *testing.T, agentFixture *atesting.Fixture) string {
// Now the Elastic-Agent is running, so validate the Event log file.
// Because the path changes based on the Elastic-Agent version, we
// use glob to find the file
Expand Down Expand Up @@ -338,8 +340,39 @@ func requireEventLogFileExistsWithData(t *testing.T, agentFixture *atesting.Fixt
t.Fatalf("cannot read file '%s': %s", logFileName, err)
}

logEntry := string(logEntryBytes)
expectedStr := "Cannot index event"
return string(logEntryBytes)
}

func requireNoCopyProcessorError(t *testing.T, agentFixture *atesting.Fixture) {
data := readEventLogFile(t, agentFixture)
for _, line := range strings.Split(data, "\n") {
logEntry := struct {
LogLogger string `json:"log.logger"`
Message string `json:"message"`
}{}

if len(line) == 0 {
continue
}
if err := json.Unmarshal([]byte(line), &logEntry); err != nil {
t.Fatalf("could not parse log entry: %q", line)
}

if logEntry.LogLogger == "copy_fields" {
if strings.Contains(logEntry.Message, "Failed to copy fields") {
if strings.Contains(logEntry.Message, "already exists, drop or rename this field first") {
t.Fatal("copy_fields processor must not fail")
}
}
}
}
}

func requireEventLogFileExistsWithData(t *testing.T, agentFixture *atesting.Fixture) {
logEntry := readEventLogFile(t, agentFixture)
// That's part of the generated event that is logged by the 'processor'
// logger at level debug
expectedStr := "TestEventLogFile"
if !strings.Contains(logEntry, expectedStr) {
t.Errorf(
"did not find the expected log entry ('%s') in the events log file",
Expand Down
2 changes: 1 addition & 1 deletion testing/integration/logs_ingestion_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ func startMockES(t *testing.T) string {
uid,
clusterUUID,
nil,
time.Now().Add(time.Hour), 0, 0, 0, 100, 0))
time.Now().Add(time.Hour), 0, 0, 0, 0, 0))
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This causes the mock to accept all the events, instead of returning an error. The test that relied on the error has been updated.


s := httptest.NewServer(mux)
t.Cleanup(s.Close)
Expand Down