-
Notifications
You must be signed in to change notification settings - Fork 153
/
Copy pathupgrade_fleet_test.go
176 lines (149 loc) · 6.71 KB
/
upgrade_fleet_test.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
// or more contributor license agreements. Licensed under the Elastic License;
// you may not use this file except in compliance with the Elastic License.
//go:build integration
package integration
import (
"context"
"strings"
"testing"
"time"
"github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/elastic/elastic-agent-libs/kibana"
"github.com/elastic/elastic-agent/pkg/testing/tools/check"
"github.com/elastic/elastic-agent/pkg/testing/tools/fleettools"
atesting "github.com/elastic/elastic-agent/pkg/testing"
"github.com/elastic/elastic-agent/pkg/testing/define"
"github.com/elastic/elastic-agent/pkg/version"
"github.com/elastic/elastic-agent/testing/upgradetest"
)
// TestFleetManagedUpgrade tests that the build under test can retrieve an action from
// Fleet and perform the upgrade. It does not need to test all the combinations of
// versions as the standalone tests already perform those tests and would be redundant.
func TestFleetManagedUpgrade(t *testing.T) {
info := define.Require(t, define.Requirements{
Stack: &define.Stack{},
Local: false, // requires Agent installation
Sudo: true, // requires Agent installation
})
ctx, cancel := context.WithCancel(context.TODO())
defer cancel()
// Start at the build version as we want to test the retry
// logic that is in the build.
startFixture, err := define.NewFixture(t, define.Version())
require.NoError(t, err)
err = startFixture.Prepare(ctx)
require.NoError(t, err)
startVersionInfo, err := startFixture.ExecVersion(ctx)
require.NoError(t, err)
// Upgrade to a different build but of the same version (always a snapshot).
// In the case there is not a different build then the test is skipped.
// Fleet doesn't allow a downgrade to occur, so we cannot go to a lower version.
sameVersion := define.Version()
if !strings.HasSuffix(sameVersion, "-SNAPSHOT") {
sameVersion += "-SNAPSHOT"
}
endFixture, err := atesting.NewFixture(
t,
sameVersion,
atesting.WithFetcher(atesting.ArtifactFetcher()),
)
require.NoError(t, err)
err = endFixture.Prepare(ctx)
require.NoError(t, err)
endVersionInfo, err := endFixture.ExecVersion(ctx)
require.NoError(t, err)
if startVersionInfo.Binary.String() == endVersionInfo.Binary.String() && startVersionInfo.Binary.Commit == endVersionInfo.Binary.Commit {
t.Skipf("Build under test is the same as the build from the artifacts repository (version: %s) [commit: %s]", startVersionInfo.Binary.String(), startVersionInfo.Binary.Commit)
}
t.Logf("Testing Elastic Agent upgrade from %s to %s with Fleet...", define.Version(), endVersionInfo.Binary.String())
testUpgradeFleetManagedElasticAgent(ctx, t, info, startFixture, endFixture)
}
func testUpgradeFleetManagedElasticAgent(ctx context.Context, t *testing.T, info *define.Info, startFixture *atesting.Fixture, endFixture *atesting.Fixture) {
startVersionInfo, err := startFixture.ExecVersion(ctx)
require.NoError(t, err)
startParsedVersion, err := version.ParseVersion(startVersionInfo.Binary.String())
require.NoError(t, err)
endVersionInfo, err := endFixture.ExecVersion(ctx)
require.NoError(t, err)
kibClient := info.KibanaClient
policyUUID := uuid.New().String()
t.Log("Creating Agent policy...")
createPolicyReq := kibana.AgentPolicy{
Name: "test-policy-" + policyUUID,
Namespace: "default",
Description: "Test policy " + policyUUID,
MonitoringEnabled: []kibana.MonitoringEnabledOption{
kibana.MonitoringEnabledLogs,
kibana.MonitoringEnabledMetrics,
},
}
policy, err := kibClient.CreatePolicy(ctx, createPolicyReq)
require.NoError(t, err)
t.Log("Creating Agent enrollment API key...")
createEnrollmentApiKeyReq := kibana.CreateEnrollmentAPIKeyRequest{
PolicyID: policy.ID,
}
enrollmentToken, err := kibClient.CreateEnrollmentAPIKey(ctx, createEnrollmentApiKeyReq)
require.NoError(t, err)
t.Log("Getting default Fleet Server URL...")
fleetServerURL, err := fleettools.DefaultURL(kibClient)
require.NoError(t, err)
t.Log("Enrolling Elastic Agent...")
var nonInteractiveFlag bool
if upgradetest.Version_8_2_0.Less(*startParsedVersion) {
nonInteractiveFlag = true
}
installOpts := atesting.InstallOpts{
NonInteractive: nonInteractiveFlag,
Force: true,
EnrollOpts: atesting.EnrollOpts{
URL: fleetServerURL,
EnrollmentToken: enrollmentToken.APIKey,
},
}
output, err := startFixture.Install(ctx, &installOpts)
require.NoError(t, err, "failed to install start agent [output: %s]", string(output))
t.Cleanup(func() {
t.Log("Un-enrolling Elastic Agent...")
assert.NoError(t, fleettools.UnEnrollAgent(info.KibanaClient, policy.ID))
})
t.Log("Waiting for Agent to be correct version and healthy...")
err = upgradetest.WaitHealthyAndVersion(ctx, startFixture, startVersionInfo.Binary, 2*time.Minute, 10*time.Second, t)
require.NoError(t, err)
t.Log("Waiting for enrolled Agent status to be online...")
require.Eventually(t, check.FleetAgentStatus(t, kibClient, policy.ID, "online"), 2*time.Minute, 10*time.Second, "Agent status is not online")
t.Logf("Upgrading from version %q to version %q...", startParsedVersion, endVersionInfo.Binary.String())
err = fleettools.UpgradeAgent(kibClient, policy.ID, endVersionInfo.Binary.String(), true)
require.NoError(t, err)
// wait for the watcher to show up
t.Logf("Waiting for upgrade watcher to start...")
err = upgradetest.WaitForWatcher(ctx, 5*time.Minute, 10*time.Second)
require.NoError(t, err)
t.Logf("Upgrade watcher started")
// wait for the agent to be healthy and correct version
err = upgradetest.WaitHealthyAndVersion(ctx, startFixture, endVersionInfo.Binary, 2*time.Minute, 10*time.Second, t)
require.NoError(t, err)
t.Log("Waiting for enrolled Agent status to be online...")
require.Eventually(t, check.FleetAgentStatus(t, kibClient, policy.ID, "online"), 10*time.Minute, 15*time.Second, "Agent status is not online")
// wait for version
require.Eventually(t, func() bool {
t.Log("Getting Agent version...")
newVersion, err := fleettools.GetAgentVersion(kibClient, policy.ID)
if err != nil {
t.Logf("error getting agent version: %v", err)
return false
}
return endVersionInfo.Binary.Version == newVersion
}, 5*time.Minute, time.Second)
t.Logf("Waiting for upgrade watcher to finish...")
err = upgradetest.WaitForNoWatcher(ctx, 2*time.Minute, 10*time.Second, 1*time.Minute+15*time.Second)
require.NoError(t, err)
t.Logf("Upgrade watcher finished")
// now that the watcher has stopped lets ensure that it's still the expected
// version, otherwise it's possible that it was rolled back to the original version
err = upgradetest.CheckHealthyAndVersion(ctx, startFixture, endVersionInfo.Binary)
assert.NoError(t, err)
}