Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Performance Tracking Integration Test and Data Collection #497

Merged
merged 8 commits into from
Jun 23, 2022
53 changes: 53 additions & 0 deletions integration/test/performance_tracker/get_performance_metrics.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
package data_collector

import (
"context"
"encoding/json"

"github.com/aws/aws-sdk-go-v2/config"
"github.com/aws/aws-sdk-go-v2/service/cloudwatch"
)

//integration tests run on us-west-2
const region = "us-west-2"
gmealy1 marked this conversation as resolved.
Show resolved Hide resolved

func GetPerformanceMetrics(instanceId string, runtimeSeconds int) (error) {
gmealy1 marked this conversation as resolved.
Show resolved Hide resolved
//load default configuration
cfg, err := config.LoadDefaultConfig(context.TODO(), config.WithRegion(region))
if err != nil {
return err
}

client := cloudwatch.NewFromConfig(cfg)

//declare metrics you want to gather from cloudwatch agent
ids := []string{"m1", "m2"}
metricNames := []string{"procstat_cpu_usage", "procstat_memory_rss"}
gmealy1 marked this conversation as resolved.
Show resolved Hide resolved

//give a 30 second buffer before metrics collection to allow for agent startup
runtimeSeconds -= 30
input, err := GenerateGetMetricInputStruct(ids, metricNames, instanceId, runtimeSeconds)
gmealy1 marked this conversation as resolved.
Show resolved Hide resolved
if err != nil {
return err
}

//call to cloudwatch agent API
metrics, err := GetMetrics(context.TODO(), client, input)
gmealy1 marked this conversation as resolved.
Show resolved Hide resolved
if err != nil {
return err
}

//format data to json before passing output
outputData, err := json.MarshalIndent(metrics.MetricDataResults, "", " ")
if err != nil {
return err
}

//------ PASS TO DATABASE TRANSMITTER HERE------//
//useless code so that outputData is used and compiles
if outputData != nil {
return nil
}

return nil
}
44 changes: 44 additions & 0 deletions integration/test/performance_tracker/performance_metrics_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
//go:build linux && integration
// +build linux,integration

package data_collector
gmealy1 marked this conversation as resolved.
Show resolved Hide resolved

import(
"testing"
"time"
"log"

"github.com/aws/amazon-cloudwatch-agent/integration/test"
)

const (
configPath = "resources/config.json"
configOutputPath = "/opt/aws/amazon-cloudwatch-agent/bin/config.json"
agentRuntime = 20 * time.Minute
)

func PerformanceTest(t *testing.T) {

instanceId := test.GetInstanceId()
log.Println("Instance ID used for performance metrics : %s", instanceId)
gmealy1 marked this conversation as resolved.
Show resolved Hide resolved

test.CopyFile(configPath, configOutputPath)

test.StartAgent(configOutputPath, true)

//let agent run before collecting performance metrics on it
time.Sleep(agentRuntime)
SaxyPandaBear marked this conversation as resolved.
Show resolved Hide resolved
log.Printf("Agent has been running for : %s", agentRuntime.String())

//convert to int seconds for use in data collection
runtimeSeconds := int(agentRuntime / time.Second)
gmealy1 marked this conversation as resolved.
Show resolved Hide resolved

//collect data
err := GetPerformanceMetrics(instanceId, runtimeSeconds)
if (err != nil) {
gmealy1 marked this conversation as resolved.
Show resolved Hide resolved
log.Println("Error: " + err)
t.Fatalf("Error: %v", err)
}

test.StopAgent()
gmealy1 marked this conversation as resolved.
Show resolved Hide resolved
}
78 changes: 78 additions & 0 deletions integration/test/performance_tracker/performance_query_utils.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
package data_collector

import (
"time"
"context"
"errors"

"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/service/cloudwatch"
"github.com/aws/aws-sdk-go-v2/service/cloudwatch/types"
)

const (
Namespace = "CWAgent"
DimensionName = "InstanceId"
Stat = "Average"
Period = 30
)

// CWGetMetricDataAPI defines the interface for the GetMetricData function
type CWGetMetricDataAPI interface {
GetMetricData(ctx context.Context, params *cloudwatch.GetMetricDataInput, optFns ...func(*cloudwatch.Options)) (*cloudwatch.GetMetricDataOutput, error)
}
gmealy1 marked this conversation as resolved.
Show resolved Hide resolved

// GetMetrics Fetches the cloudwatch metrics for your provided input in the given time-frame
func GetMetrics(c context.Context, api CWGetMetricDataAPI, input *cloudwatch.GetMetricDataInput) (*cloudwatch.GetMetricDataOutput, error) {
return api.GetMetricData(c, input)
}
gmealy1 marked this conversation as resolved.
Show resolved Hide resolved

// GenerateGetMetricInputStruct generates the struct required to make a query request to cloudwatch's GetMetrics
func GenerateGetMetricInputStruct(ids []string, metricNames []string, instanceId string, timeDiff int) (*cloudwatch.GetMetricDataInput, error) {
gmealy1 marked this conversation as resolved.
Show resolved Hide resolved
if len(ids) != len(metricNames) {
return nil, errors.New("Mismatching lengths of metric ids and metricNames")
}

if len(ids) == 0 || len(metricNames) == 0 || instanceId == "" || timeDiff == 0 {
return nil, errors.New("Must supply metric ids, metric names, instance id, and time to collect metrics")
}

dimensionValue := instanceId
metricDataQueries := []types.MetricDataQuery{}

//generate list of individual metric requests
for i := 0; i < len(ids); i++ {
metricDataQueries = append(metricDataQueries, ConstructMetricDataQuery(ids[i], Namespace, DimensionName, dimensionValue, metricNames[i], timeDiff))
}
gmealy1 marked this conversation as resolved.
Show resolved Hide resolved

input := &cloudwatch.GetMetricDataInput{
EndTime: aws.Time(time.Unix(time.Now().Unix(), 0)),
StartTime: aws.Time(time.Unix(time.Now().Add(time.Duration(-timeDiff)*time.Minute).Unix(), 0)),
gmealy1 marked this conversation as resolved.
Show resolved Hide resolved
MetricDataQueries: metricDataQueries,
}

return input, nil
}

// ConstructMetricDataQuery is a helper function for GenerateGetMetricInputStruct and constructs individual metric requests
func ConstructMetricDataQuery(id string, namespace string, dimensionName string, dimensionValue string, metricName string, timeDiff int) (types.MetricDataQuery) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just pointing out that I think this function is fine, because it serves a different purpose than the GetMetrics function that I already called out as being unnecessary. This one is fine because it's taking a bunch of raw inputs and transforming it into a payload that we would then use for our request to the API. I am fine with it. Though, nitpick - we can consolidate the types

Suggested change
func ConstructMetricDataQuery(id string, namespace string, dimensionName string, dimensionValue string, metricName string, timeDiff int) (types.MetricDataQuery) {
func ConstructMetricDataQuery(id, namespace, dimensionName, dimensionValue, metricName string, timeDiff int) (types.MetricDataQuery) {

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That being said, I think that this might change a little if you move to GetMetricStatistics instead of GetMetricData.

query := types.MetricDataQuery{
Id: aws.String(id),
MetricStat: &types.MetricStat{
Metric: &types.Metric{
Namespace: aws.String(namespace),
MetricName: aws.String(metricName),
Dimensions: []types.Dimension{
types.Dimension{
Name: aws.String(dimensionName),
Value: aws.String(dimensionValue),
},
},
},
Period: aws.Int32(int32(Period)),
Stat: aws.String(Stat),
},
}

return query
}
50 changes: 50 additions & 0 deletions integration/test/performance_tracker/resources/config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
{
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

One thing to consider going forward is if there is going to be collision when trying to get metrics from different performance tests that run concurrently.

I ran into an issue when writing the CloudWatch Logs integration tests because I reused the log group and log stream name across all of the tests, so I had issues with getting consistent results from assertions because I was picking up data from multiple tests at the same time by accident.

Not something that needs to be addressed right now, but should be something looked into in the next few weeks. The main focus is getting some performance test running and persisting data.

"agent": {
"metrics_collection_interval": 60,
"run_as_user": "root"
},
"metrics": {
"aggregation_dimensions": [
[
"InstanceId"
]
],
"append_dimensions": {
"AutoScalingGroupName": "${aws:AutoScalingGroupName}",
"ImageId": "${aws:ImageId}",
"InstanceId": "${aws:InstanceId}",
"InstanceType": "${aws:InstanceType}"
},
"metrics_collected": {
"disk": {
"measurement": [
"used_percent"
],
"metrics_collection_interval": 60,
"resources": [
"*"
]
},
"mem": {
"measurement": [
"mem_used_percent"
],
"metrics_collection_interval": 60
},
"statsd": {
"metrics_aggregation_interval": 60,
"metrics_collection_interval": 10,
"service_address": ":8125"
},
gmealy1 marked this conversation as resolved.
Show resolved Hide resolved
"procstat": [
{
"exe": "cloudwatch-agent",
"measurement": [
"cpu_usage",
"memory_rss"
]
}
]
}
gmealy1 marked this conversation as resolved.
Show resolved Hide resolved
}
}