Skip to content

Commit

Permalink
Avoid allocating zero size slices (#13764)
Browse files Browse the repository at this point in the history
Signed-off-by: Bogdan Drutu <bogdandrutu@gmail.com>

Signed-off-by: Bogdan Drutu <bogdandrutu@gmail.com>
  • Loading branch information
bogdandrutu authored Sep 1, 2022
1 parent 63d7311 commit 54fcd93
Show file tree
Hide file tree
Showing 96 changed files with 206 additions and 278 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ const (
)

func logDataToLogService(ld plog.Logs) []*sls.Log {
slsLogs := make([]*sls.Log, 0)
var slsLogs []*sls.Log
rls := ld.ResourceLogs()
for i := 0; i < rls.Len(); i++ {
rl := rls.At(i)
Expand Down
2 changes: 1 addition & 1 deletion exporter/awscloudwatchlogsexporter/exporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ func logsToCWLogs(logger *zap.Logger, ld plog.Logs) ([]*cloudwatchlogs.InputLogE
}

var dropped int
out := make([]*cloudwatchlogs.InputLogEvent, 0) // TODO(jbd): set a better capacity
var out []*cloudwatchlogs.InputLogEvent

rls := ld.ResourceLogs()
for i := 0; i < rls.Len(); i++ {
Expand Down
4 changes: 2 additions & 2 deletions exporter/awsemfexporter/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ type MetricDescriptor struct {

// Validate filters out invalid metricDeclarations and metricDescriptors
func (config *Config) Validate() error {
validDeclarations := []*MetricDeclaration{}
var validDeclarations []*MetricDeclaration
for _, declaration := range config.MetricDeclarations {
err := declaration.init(config.logger)
if err != nil {
Expand All @@ -101,7 +101,7 @@ func (config *Config) Validate() error {
}
config.MetricDeclarations = validDeclarations

validDescriptors := []MetricDescriptor{}
var validDescriptors []MetricDescriptor
for _, descriptor := range config.MetricDescriptors {
if descriptor.metricName == "" {
continue
Expand Down
24 changes: 9 additions & 15 deletions exporter/awsemfexporter/config_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -60,13 +60,10 @@ func TestLoadConfig(t *testing.T) {
Region: "us-west-2",
RoleARN: "arn:aws:iam::123456789:role/monitoring-EKS-NodeInstanceRole",
},
LogGroupName: "",
LogStreamName: "",
DimensionRollupOption: "ZeroAndSingleDimensionRollup",
OutputDestination: "cloudwatch",
ParseJSONEncodedAttributeValues: make([]string, 0),
MetricDeclarations: []*MetricDeclaration{},
MetricDescriptors: []MetricDescriptor{},
LogGroupName: "",
LogStreamName: "",
DimensionRollupOption: "ZeroAndSingleDimensionRollup",
OutputDestination: "cloudwatch",
}, r1)

r2 := cfg.Exporters[config.NewComponentIDWithName(typeStr, "resource_attr_to_label")].(*Config)
Expand All @@ -84,14 +81,11 @@ func TestLoadConfig(t *testing.T) {
Region: "",
RoleARN: "",
},
LogGroupName: "",
LogStreamName: "",
DimensionRollupOption: "ZeroAndSingleDimensionRollup",
OutputDestination: "cloudwatch",
ResourceToTelemetrySettings: resourcetotelemetry.Settings{Enabled: true},
ParseJSONEncodedAttributeValues: make([]string, 0),
MetricDeclarations: []*MetricDeclaration{},
MetricDescriptors: []MetricDescriptor{},
LogGroupName: "",
LogStreamName: "",
DimensionRollupOption: "ZeroAndSingleDimensionRollup",
OutputDestination: "cloudwatch",
ResourceToTelemetrySettings: resourcetotelemetry.Settings{Enabled: true},
})
}

Expand Down
2 changes: 1 addition & 1 deletion exporter/awsemfexporter/emf_exporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ func (emf *emfExporter) listPushers() []cwlogs.Pusher {
emf.pusherMapLock.Lock()
defer emf.pusherMapLock.Unlock()

pushers := []cwlogs.Pusher{}
var pushers []cwlogs.Pusher
for _, pusherMap := range emf.groupStreamToPusherMap {
for _, pusher := range pusherMap {
pushers = append(pushers, pusher)
Expand Down
19 changes: 8 additions & 11 deletions exporter/awsemfexporter/factory.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,17 +41,14 @@ func NewFactory() component.ExporterFactory {
// CreateDefaultConfig creates the default configuration for exporter.
func createDefaultConfig() config.Exporter {
return &Config{
ExporterSettings: config.NewExporterSettings(config.NewComponentID(typeStr)),
AWSSessionSettings: awsutil.CreateDefaultSessionConfig(),
LogGroupName: "",
LogStreamName: "",
Namespace: "",
DimensionRollupOption: "ZeroAndSingleDimensionRollup",
ParseJSONEncodedAttributeValues: make([]string, 0),
MetricDeclarations: make([]*MetricDeclaration, 0),
MetricDescriptors: make([]MetricDescriptor, 0),
OutputDestination: "cloudwatch",
logger: nil,
ExporterSettings: config.NewExporterSettings(config.NewComponentID(typeStr)),
AWSSessionSettings: awsutil.CreateDefaultSessionConfig(),
LogGroupName: "",
LogStreamName: "",
Namespace: "",
DimensionRollupOption: "ZeroAndSingleDimensionRollup",
OutputDestination: "cloudwatch",
logger: nil,
}
}

Expand Down
2 changes: 1 addition & 1 deletion exporter/awsemfexporter/metric_translator.go
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ func groupedMetricToCWMeasurementsWithFilters(groupedMetric *groupedMetric, conf
// If the whole batch of metrics don't match any metric declarations, drop them
if len(metricDeclarations) == 0 {
labelsStr, _ := json.Marshal(labels)
metricNames := make([]string, 0)
var metricNames []string
for metricName := range groupedMetric.metrics {
metricNames = append(metricNames, metricName)
}
Expand Down
2 changes: 1 addition & 1 deletion exporter/awsemfexporter/util.go
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ func dedupDimensions(dimensions [][]string) (deduped [][]string) {
// The returned dimensions are sorted in alphabetical order within each dimension set
func dimensionRollup(dimensionRollupOption string, labels map[string]string) [][]string {
var rollupDimensionArray [][]string
dimensionZero := make([]string, 0)
var dimensionZero []string

instrLibName, hasOTelKey := labels[oTellibDimensionKey]
if hasOTelKey {
Expand Down
15 changes: 7 additions & 8 deletions exporter/awsxrayexporter/internal/translator/cause.go
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ func makeCause(span ptrace.Span, attributes map[string]pcommon.Value, resource p
language = val.StringVal()
}

exceptions := make([]awsxray.Exception, 0)
var exceptions []awsxray.Exception
for i := 0; i < span.Events().Len(); i++ {
event := span.Events().At(i)
if event.Name() == ExceptionEventName {
Expand Down Expand Up @@ -202,7 +202,7 @@ func fillJavaStacktrace(stacktrace string, exceptions []awsxray.Exception) []aws
return exceptions
}

exception.Stack = make([]awsxray.StackFrame, 0)
exception.Stack = nil
for {
if strings.HasPrefix(line, "\tat ") {
parenIdx := strings.IndexByte(line, '(')
Expand Down Expand Up @@ -260,7 +260,7 @@ func fillJavaStacktrace(stacktrace string, exceptions []awsxray.Exception) []aws
ID: aws.String(newSegmentID().HexString()),
Type: aws.String(causeType),
Message: aws.String(causeMessage),
Stack: make([]awsxray.StackFrame, 0),
Stack: nil,
})
// when append causes `exceptions` to outgrow its existing
// capacity, re-allocation will happen so the place
Expand Down Expand Up @@ -299,7 +299,7 @@ func fillPythonStacktrace(stacktrace string, exceptions []awsxray.Exception) []a
line := lines[lineIdx]
exception := &exceptions[0]

exception.Stack = make([]awsxray.StackFrame, 0)
exception.Stack = nil
for {
if strings.HasPrefix(line, " File ") {
parts := strings.Split(line, ",")
Expand Down Expand Up @@ -355,7 +355,6 @@ func fillPythonStacktrace(stacktrace string, exceptions []awsxray.Exception) []a
ID: aws.String(newSegmentID().HexString()),
Type: aws.String(causeType),
Message: aws.String(causeMessage),
Stack: make([]awsxray.StackFrame, 0),
})
// when append causes `exceptions` to outgrow its existing
// capacity, re-allocation will happen so the place
Expand Down Expand Up @@ -396,7 +395,7 @@ func fillJavaScriptStacktrace(stacktrace string, exceptions []awsxray.Exception)
return exceptions
}

exception.Stack = make([]awsxray.StackFrame, 0)
exception.Stack = nil
for {
if strings.HasPrefix(line, " at ") {
parenIdx := strings.IndexByte(line, '(')
Expand Down Expand Up @@ -455,7 +454,7 @@ func fillDotnetStacktrace(stacktrace string, exceptions []awsxray.Exception) []a
return exceptions
}

exception.Stack = make([]awsxray.StackFrame, 0)
exception.Stack = nil
for {
if strings.HasPrefix(line, "\tat ") {
index := strings.Index(line, " in ")
Expand Down Expand Up @@ -532,7 +531,7 @@ func fillGoStacktrace(stacktrace string, exceptions []awsxray.Exception) []awsxr
return exceptions
}

exception.Stack = make([]awsxray.StackFrame, 0)
exception.Stack = nil
for {
match := re.Match([]byte(line))
if match {
Expand Down
6 changes: 3 additions & 3 deletions exporter/datadogexporter/internal/metrics/consumer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ func TestRunningMetrics(t *testing.T) {
consumer := NewConsumer()
assert.NoError(t, tr.MapMetrics(ctx, ms, consumer))

runningHostnames := []string{}
var runningHostnames []string
for _, metric := range consumer.runningMetrics(0, component.BuildInfo{}) {
if metric.Host != nil {
runningHostnames = append(runningHostnames, *metric.Host)
Expand Down Expand Up @@ -117,8 +117,8 @@ func TestTagsMetrics(t *testing.T) {
assert.NoError(t, tr.MapMetrics(ctx, ms, consumer))

runningMetrics := consumer.runningMetrics(0, component.BuildInfo{})
runningTags := []string{}
runningHostnames := []string{}
var runningTags []string
var runningHostnames []string
for _, metric := range runningMetrics {
runningTags = append(runningTags, metric.Tags...)
if metric.Host != nil {
Expand Down
6 changes: 3 additions & 3 deletions exporter/dynatraceexporter/metrics_exporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ const (

// NewExporter exports to a Dynatrace Metrics v2 API
func newMetricsExporter(params component.ExporterCreateSettings, cfg *config.Config) *exporter {
confDefaultDims := []dimensions.Dimension{}
var confDefaultDims []dimensions.Dimension
for key, value := range cfg.DefaultDimensions {
confDefaultDims = append(confDefaultDims, dimensions.NewDimension(key, value))
}
Expand Down Expand Up @@ -82,7 +82,7 @@ type exporter struct {

// for backwards-compatibility with deprecated `Tags` config option
func dimensionsFromTags(tags []string) dimensions.NormalizedDimensionList {
dims := []dimensions.Dimension{}
var dims []dimensions.Dimension
for _, tag := range tags {
parts := strings.SplitN(tag, "=", 2)
if len(parts) == 2 {
Expand Down Expand Up @@ -120,7 +120,7 @@ func (e *exporter) PushMetricsData(ctx context.Context, md pmetric.Metrics) erro
}

func (e *exporter) serializeMetrics(md pmetric.Metrics) []string {
lines := make([]string, 0)
var lines []string

resourceMetrics := md.ResourceMetrics()

Expand Down
2 changes: 1 addition & 1 deletion exporter/instanaexporter/exporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ func (e *instanaExporter) start(_ context.Context, host component.Host) error {

func (e *instanaExporter) pushConvertedTraces(ctx context.Context, td ptrace.Traces) error {
converter := converter.NewConvertAllConverter(e.settings.Logger)
spans := make([]model.Span, 0)
var spans []model.Span

hostID := ""
resourceSpans := td.ResourceSpans()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ type Bundle struct {

func NewBundle() Bundle {
return Bundle{
Spans: make([]Span, 0),
Spans: []Span{},
}
}

Expand Down
28 changes: 2 additions & 26 deletions exporter/instanaexporter/internal/converter/model/util.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,37 +22,13 @@ import (
)

func convertTraceID(traceID pcommon.TraceID) string {
const byteLength = 16

bytes := traceID.Bytes()
traceBytes := make([]byte, 0)

for (len(traceBytes) + len(bytes)) < byteLength {
traceBytes = append(traceBytes, 0)
}

for _, byte := range bytes {
traceBytes = append(traceBytes, byte)
}

return hex.EncodeToString(traceBytes)
return hex.EncodeToString(bytes[:])
}

func convertSpanID(spanID pcommon.SpanID) string {
const byteLength = 8

bytes := spanID.Bytes()
spanBytes := make([]byte, 0)

for (len(spanBytes) + len(bytes)) < byteLength {
spanBytes = append(spanBytes, 0)
}

for _, byte := range bytes {
spanBytes = append(spanBytes, byte)
}

return hex.EncodeToString(spanBytes)
return hex.EncodeToString(bytes[:])
}

func otelKindToInstanaKind(otelKind ptrace.SpanKind) (string, bool) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,6 @@ func (c *SpanConverter) AcceptsSpans(attributes pcommon.Map, spanSlice ptrace.Sp

func (c *SpanConverter) ConvertSpans(attributes pcommon.Map, spanSlice ptrace.SpanSlice) model.Bundle {
bundle := model.NewBundle()
spans := make([]model.Span, 0)

fromS := model.FromS{}

hostIDValue, ex := attributes.Get(backend.AttributeInstanaHostID)
Expand Down Expand Up @@ -76,11 +74,9 @@ func (c *SpanConverter) ConvertSpans(attributes pcommon.Map, spanSlice ptrace.Sp
continue
}

spans = append(spans, instanaSpan)
bundle.Spans = append(bundle.Spans, instanaSpan)
}

bundle.Spans = spans

return bundle
}

Expand Down
2 changes: 1 addition & 1 deletion exporter/loadbalancingexporter/resolver_static_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ func TestResolvedOnlyOnce(t *testing.T) {

func TestFailOnMissingEndpoints(t *testing.T) {
// prepare
expected := []string{}
var expected []string

// test
res, err := newStaticResolver(expected)
Expand Down
2 changes: 1 addition & 1 deletion exporter/logzioexporter/logger.go
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ func (l *hclog2ZapLogger) StandardLogger(opts *hclog.StandardLoggerOptions) *log
}

func argsToFields(args ...interface{}) []zapcore.Field {
fields := []zapcore.Field{}
var fields []zapcore.Field
for i := 0; i < len(args); i += 2 {
fields = append(fields, zap.String(args[i].(string), fmt.Sprintf("%v", args[i+1])))
}
Expand Down
4 changes: 2 additions & 2 deletions exporter/lokiexporter/legacy_exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -382,7 +382,7 @@ func TestExporter_logDataToLoki(t *testing.T) {
lr.SetTimestamp(ts)

pr, numDroppedLogs := exp.logDataToLoki(logs)
expectedPr := &logproto.PushRequest{Streams: make([]logproto.Stream, 0)}
expectedPr := &logproto.PushRequest{Streams: []logproto.Stream{}}
require.Equal(t, 1, numDroppedLogs)
require.Equal(t, expectedPr, pr)
})
Expand Down Expand Up @@ -468,7 +468,7 @@ func TestExporter_logDataToLoki(t *testing.T) {
lri.SetTimestamp(ts)

pr, numDroppedLogs := exp.logDataToLoki(logs)
expectedPr := &logproto.PushRequest{Streams: make([]logproto.Stream, 0)}
expectedPr := &logproto.PushRequest{Streams: []logproto.Stream{}}
require.Equal(t, 1, numDroppedLogs)
require.Equal(t, expectedPr, pr)
})
Expand Down
2 changes: 1 addition & 1 deletion exporter/prometheusremotewriteexporter/exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ func Test_export(t *testing.T) {
assert.Equal(t, "snappy", r.Header.Get("Content-Encoding"))
assert.Equal(t, "opentelemetry-collector/1.0", r.Header.Get("User-Agent"))
writeReq := &prompb.WriteRequest{}
unzipped := []byte{}
var unzipped []byte

dest, err := snappy.Decode(unzipped, body)
require.NoError(t, err)
Expand Down
2 changes: 1 addition & 1 deletion exporter/prometheusremotewriteexporter/helper.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ func batchTimeSeries(tsMap map[string]*prompb.TimeSeries, maxBatchByteSize int)
wrapped := convertTimeseriesToRequest(tsArray)
requests = append(requests, wrapped)

tsArray = make([]prompb.TimeSeries, 0)
tsArray = nil
sizeOfCurrentBatch = 0
}

Expand Down
2 changes: 1 addition & 1 deletion exporter/skywalkingexporter/logrecord_to_logdata.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ const (
)

func logRecordToLogData(ld plog.Logs) []*logpb.LogData {
lds := make([]*logpb.LogData, 0)
var lds []*logpb.LogData
rls := ld.ResourceLogs()
for i := 0; i < rls.Len(); i++ {
rl := rls.At(i)
Expand Down
1 change: 0 additions & 1 deletion exporter/skywalkingexporter/metricrecord_to_metricdata.go
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,6 @@ func metricsRecordToMetricData(
service, serviceInstance := resourceToServiceInfo(resMetricSlice.Resource())
insMetricSlice := resMetricSlice.ScopeMetrics()
metrics = &metricpb.MeterDataCollection{}
metrics.MeterData = make([]*metricpb.MeterData, 0)
for j := 0; j < insMetricSlice.Len(); j++ {
insMetrics := insMetricSlice.At(j)
// ignore insMetrics.Scope()
Expand Down
Loading

0 comments on commit 54fcd93

Please sign in to comment.