Skip to content

Commit

Permalink
Merge branch 'main' into splunkenterprisereceiver
Browse files Browse the repository at this point in the history
  • Loading branch information
michael-burt authored Oct 25, 2024
2 parents 7b1b37c + ed09990 commit da98199
Show file tree
Hide file tree
Showing 16 changed files with 284 additions and 22 deletions.
27 changes: 27 additions & 0 deletions .chloggen/add_azure_default_auth.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Use this changelog template to create an entry for release notes.

# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: enhancement

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: azuredataexplorerexporter

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Add new configuration option `use_default_auth` to enable default authentication for Azure Data Explorer. This option allows users to leverage workload identity for authentication.

# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists.
issues: [33667]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext:

# If your change doesn't affect end users or the exported elements of any package,
# you should instead start your pull request title with [chore] or use the "Skip Changelog" label.
# Optional: The change log or logs in which this entry should be included.
# e.g. '[user]' or '[user, api]'
# Include 'user' if the change is relevant to end users.
# Include 'api' if there is a change to a library API.
# Default: '[user]'
change_logs: [user]
27 changes: 27 additions & 0 deletions .chloggen/receiver-otlpjsonfile-profiles.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Use this changelog template to create an entry for release notes.

# One of 'breaking', 'deprecation', 'new_component', 'enhancement', 'bug_fix'
change_type: enhancement

# The name of the component, or a single word describing the area of concern, (e.g. filelogreceiver)
component: otlpjsonfilereceiver

# A brief description of the change. Surround your text with quotes ("") if it needs to start with a backtick (`).
note: Add support for profiles signal

# Mandatory: One or more tracking issues related to the change. You can use the PR number here if no issue exists.
issues: [35977]

# (Optional) One or more lines of additional information to render under the primary note.
# These lines will be padded with 2 spaces and then inserted directly into the document.
# Use pipe (|) for multiline entries.
subtext:

# If your change doesn't affect end users or the exported elements of any package,
# you should instead start your pull request title with [chore] or use the "Skip Changelog" label.
# Optional: The change log or logs in which this entry should be included.
# e.g. '[user]' or '[user, api]'
# Include 'user' if the change is relevant to end users.
# Include 'api' if there is a change to a library API.
# Default: '[user]'
change_logs: []
13 changes: 10 additions & 3 deletions exporter/azuredataexplorerexporter/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,16 @@ This exporter sends metrics, logs and trace data to
The following settings are required:

- `cluster_uri` (no default): The cluster name of the provisioned ADX cluster to ingest the data.
- `application_id` (no default): The client id to connect to the cluster and ingest data.
- `application_key` (no default): The cluster secret corresponding to the client id.
- `tenant_id` (no default): The tenant id where the application_id is referenced from.

One authentication method is required:
- Service principal:
- `application_id` (no default): The client id to connect to the cluster and ingest data.
- `application_key` (no default): The cluster secret corresponding to the client id.
- `tenant_id` (no default): The tenant id where the application_id is referenced from.
- Managed identity:
- `managed_identity_id` (no default): The managed identity id to authenticate with. Set to "system" for system-assigned managed identity. Set the MI client Id (GUID) for user-assigned managed identity.
- Default authentication:
- `use_azure_auth` (default: false): Set to true to use the Azure [default authentication](https://learn.microsoft.com/en-us/azure/developer/go/azure-sdk-authentication?tabs=bash#2-authenticate-with-azure).

The following settings can be optionally configured and have default values:
> Note that the database tables are expected to be created upfront before the exporter is in operation , the definition of these are in the section [Database and Table definition scripts](#database-and-table-definition-scripts)
Expand Down
2 changes: 2 additions & 0 deletions exporter/azuredataexplorerexporter/adx_exporter.go
Original file line number Diff line number Diff line change
Expand Up @@ -218,6 +218,8 @@ func createKcsb(config *Config, version string) *kusto.ConnectionStringBuilder {
isSystemManagedIdentity := strings.EqualFold(strings.TrimSpace(config.ManagedIdentityID), "SYSTEM")
// If the user has managed identity done, use it. For System managed identity use the MI as system
switch {
case config.UseAzureAuth:
kcsb = kusto.NewConnectionStringBuilder(config.ClusterURI).WithDefaultAzureCredential()
case !isManagedIdentity:
kcsb = kusto.NewConnectionStringBuilder(config.ClusterURI).WithAadAppKey(config.ApplicationID, string(config.ApplicationKey), config.TenantID)
case isManagedIdentity && isSystemManagedIdentity:
Expand Down
12 changes: 12 additions & 0 deletions exporter/azuredataexplorerexporter/adx_exporter_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,7 @@ func TestCreateKcsb(t *testing.T) {
name string // name of the test
config Config // config for the test
isMsi bool // is MSI enabled
isAzureAuth bool // is azure authentication enabled
applicationID string // application id
managedIdentityID string // managed identity id
}{
Expand Down Expand Up @@ -216,6 +217,15 @@ func TestCreateKcsb(t *testing.T) {
managedIdentityID: "636d798f-b005-41c9-9809-81a5e5a12b2e",
applicationID: "",
},
{
name: "azure auth",
config: Config{
ClusterURI: "https://CLUSTER.kusto.windows.net",
Database: "tests",
UseAzureAuth: true,
},
isAzureAuth: true,
},
}
for i := range tests {
tt := tests[i]
Expand All @@ -229,6 +239,8 @@ func TestCreateKcsb(t *testing.T) {
wantManagedID := tt.managedIdentityID
assert.Equal(t, wantManagedID, gotKcsb.ManagedServiceIdentity)
assert.Equal(t, "https://CLUSTER.kusto.windows.net", gotKcsb.DataSource)
wantIsAzure := tt.isAzureAuth
assert.Equal(t, wantIsAzure, gotKcsb.DefaultAuth)
})
}
}
Expand Down
21 changes: 18 additions & 3 deletions exporter/azuredataexplorerexporter/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ type Config struct {
ApplicationKey configopaque.String `mapstructure:"application_key"`
TenantID string `mapstructure:"tenant_id"`
ManagedIdentityID string `mapstructure:"managed_identity_id"`
UseAzureAuth bool `mapstructure:"use_azure_auth"`
Database string `mapstructure:"db_name"`
MetricTable string `mapstructure:"metrics_table_name"`
LogTable string `mapstructure:"logs_table_name"`
Expand All @@ -46,9 +47,23 @@ func (adxCfg *Config) Validate() error {
if isClusterURIEmpty {
return errors.New(`clusterURI config is mandatory`)
}
// Parameters for AD App Auth or Managed Identity Auth are mandatory
if isAppAuthEmpty && isManagedAuthEmpty {
return errors.New(`either ["application_id" , "application_key" , "tenant_id"] or ["managed_identity_id"] are needed for auth`)
// Parameters for AD App Auth or Managed Identity Auth or Default Auth are mandatory
authMethods := 0

if !isAppAuthEmpty {
authMethods++
}

if !isManagedAuthEmpty {
authMethods++
}

if adxCfg.UseAzureAuth {
authMethods++
}

if authMethods != 1 {
return errors.New(`either ["application_id" , "application_key" , "tenant_id"] or ["managed_identity_id"] or ["use_azure_auth"] must be provided for auth`)
}

if !(adxCfg.IngestionType == managedIngestType || adxCfg.IngestionType == queuedIngestTest || isEmpty(adxCfg.IngestionType)) {
Expand Down
14 changes: 13 additions & 1 deletion exporter/azuredataexplorerexporter/config_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ func TestLoadConfig(t *testing.T) {
},
{
id: component.NewIDWithName(metadata.Type, "2"),
errorMessage: `either ["application_id" , "application_key" , "tenant_id"] or ["managed_identity_id"] are needed for auth`,
errorMessage: `either ["application_id" , "application_key" , "tenant_id"] or ["managed_identity_id"] or ["use_azure_auth"] must be provided for auth`,
},
{
id: component.NewIDWithName(metadata.Type, "3"),
Expand Down Expand Up @@ -111,6 +111,18 @@ func TestLoadConfig(t *testing.T) {
},
},
},
{
id: component.NewIDWithName(metadata.Type, "9"),
expected: &Config{
ClusterURI: "https://CLUSTER.kusto.windows.net",
Database: "oteldb",
MetricTable: "OTELMetrics",
LogTable: "OTELLogs",
TraceTable: "OTELTraces",
UseAzureAuth: true,
IngestionType: queuedIngestTest,
},
},
}

for _, tt := range tests {
Expand Down
7 changes: 6 additions & 1 deletion exporter/azuredataexplorerexporter/testdata/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -145,4 +145,9 @@ azuredataexplorer/8:
enabled: true
initial_interval: 10s
max_interval: 60s
max_elapsed_time: 10m
max_elapsed_time: 10m
azuredataexplorer/9:
# Kusto cluster uri
cluster_uri: "https://CLUSTER.kusto.windows.net"
# weather to use the default azure auth
use_azure_auth: true
4 changes: 3 additions & 1 deletion receiver/otlpjsonfilereceiver/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,13 @@
<!-- status autogenerated section -->
| Status | |
| ------------- |-----------|
| Stability | [alpha]: traces, metrics, logs |
| Stability | [development]: profiles |
| | [alpha]: traces, metrics, logs |
| Distributions | [contrib] |
| Issues | [![Open issues](https://img.shields.io/github/issues-search/open-telemetry/opentelemetry-collector-contrib?query=is%3Aissue%20is%3Aopen%20label%3Areceiver%2Fotlpjsonfile%20&label=open&color=orange&logo=opentelemetry)](https://github.com/open-telemetry/opentelemetry-collector-contrib/issues?q=is%3Aopen+is%3Aissue+label%3Areceiver%2Fotlpjsonfile) [![Closed issues](https://img.shields.io/github/issues-search/open-telemetry/opentelemetry-collector-contrib?query=is%3Aissue%20is%3Aclosed%20label%3Areceiver%2Fotlpjsonfile%20&label=closed&color=blue&logo=opentelemetry)](https://github.com/open-telemetry/opentelemetry-collector-contrib/issues?q=is%3Aclosed+is%3Aissue+label%3Areceiver%2Fotlpjsonfile) |
| [Code Owners](https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/CONTRIBUTING.md#becoming-a-code-owner) | [@djaglowski](https://www.github.com/djaglowski), [@atoulme](https://www.github.com/atoulme) |

[development]: https://github.com/open-telemetry/opentelemetry-collector#development
[alpha]: https://github.com/open-telemetry/opentelemetry-collector#alpha
[contrib]: https://github.com/open-telemetry/opentelemetry-collector-releases/tree/main/distributions/otelcol-contrib
<!-- end autogenerated section -->
Expand Down
33 changes: 29 additions & 4 deletions receiver/otlpjsonfilereceiver/file.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,14 @@ import (

"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/consumer"
"go.opentelemetry.io/collector/consumer/consumerprofiles"
"go.opentelemetry.io/collector/pdata/plog"
"go.opentelemetry.io/collector/pdata/pmetric"
"go.opentelemetry.io/collector/pdata/pprofile"
"go.opentelemetry.io/collector/pdata/ptrace"
"go.opentelemetry.io/collector/receiver"
"go.opentelemetry.io/collector/receiver/receiverhelper"
"go.opentelemetry.io/collector/receiver/receiverprofiles"

"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/adapter"
"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/fileconsumer"
Expand All @@ -25,12 +28,13 @@ const (

// NewFactory creates a factory for file receiver
func NewFactory() receiver.Factory {
return receiver.NewFactory(
return receiverprofiles.NewFactory(
metadata.Type,
createDefaultConfig,
receiver.WithMetrics(createMetricsReceiver, metadata.MetricsStability),
receiver.WithLogs(createLogsReceiver, metadata.LogsStability),
receiver.WithTraces(createTracesReceiver, metadata.TracesStability))
receiverprofiles.WithMetrics(createMetricsReceiver, metadata.MetricsStability),
receiverprofiles.WithLogs(createLogsReceiver, metadata.LogsStability),
receiverprofiles.WithTraces(createTracesReceiver, metadata.TracesStability),
receiverprofiles.WithProfiles(createProfilesReceiver, metadata.ProfilesStability))
}

type Config struct {
Expand Down Expand Up @@ -171,3 +175,24 @@ func createTracesReceiver(_ context.Context, settings receiver.Settings, configu

return &otlpjsonfilereceiver{input: input, id: settings.ID, storageID: cfg.StorageID}, nil
}

func createProfilesReceiver(_ context.Context, settings receiver.Settings, configuration component.Config, profiles consumerprofiles.Profiles) (receiverprofiles.Profiles, error) {
profilesUnmarshaler := &pprofile.JSONUnmarshaler{}
cfg := configuration.(*Config)
opts := make([]fileconsumer.Option, 0)
if cfg.ReplayFile {
opts = append(opts, fileconsumer.WithNoTracking())
}
input, err := cfg.Config.Build(settings.TelemetrySettings, func(ctx context.Context, token []byte, _ map[string]any) error {
p, _ := profilesUnmarshaler.UnmarshalProfiles(token)
if p.ResourceProfiles().Len() != 0 {
_ = profiles.ConsumeProfiles(ctx, p)
}
return nil
}, opts...)
if err != nil {
return nil, err
}

return &otlpjsonfilereceiver{input: input, id: settings.ID, storageID: cfg.StorageID}, nil
}
44 changes: 44 additions & 0 deletions receiver/otlpjsonfilereceiver/file_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,10 @@ import (
"go.opentelemetry.io/collector/consumer/consumertest"
"go.opentelemetry.io/collector/pdata/plog"
"go.opentelemetry.io/collector/pdata/pmetric"
"go.opentelemetry.io/collector/pdata/pprofile"
"go.opentelemetry.io/collector/pdata/ptrace"
"go.opentelemetry.io/collector/pdata/testdata"
"go.opentelemetry.io/collector/receiver/receiverprofiles"
"go.opentelemetry.io/collector/receiver/receivertest"

"github.com/open-telemetry/opentelemetry-collector-contrib/pkg/stanza/fileconsumer"
Expand All @@ -35,6 +37,33 @@ func TestDefaultConfig(t *testing.T) {
require.NoError(t, componenttest.CheckConfigStruct(cfg))
}

func TestFileProfilesReceiver(t *testing.T) {
tempFolder := t.TempDir()
factory := NewFactory()
cfg := createDefaultConfig().(*Config)
cfg.Config.Include = []string{filepath.Join(tempFolder, "*")}
cfg.Config.StartAt = "beginning"
sink := new(consumertest.ProfilesSink)
receiver, err := factory.(receiverprofiles.Factory).CreateProfiles(context.Background(), receivertest.NewNopSettings(), cfg, sink)
assert.NoError(t, err)
err = receiver.Start(context.Background(), nil)
require.NoError(t, err)

pd := testdata.GenerateProfiles(5)
marshaler := &pprofile.JSONMarshaler{}
b, err := marshaler.MarshalProfiles(pd)
assert.NoError(t, err)
b = append(b, '\n')
err = os.WriteFile(filepath.Join(tempFolder, "profiles.json"), b, 0600)
assert.NoError(t, err)
time.Sleep(1 * time.Second)

require.Len(t, sink.AllProfiles(), 1)
assert.EqualValues(t, pd, sink.AllProfiles()[0])
err = receiver.Shutdown(context.Background())
assert.NoError(t, err)
}

func TestFileTracesReceiver(t *testing.T) {
tempFolder := t.TempDir()
factory := NewFactory()
Expand Down Expand Up @@ -213,6 +242,11 @@ func TestFileMixedSignals(t *testing.T) {
assert.NoError(t, err)
err = lr.Start(context.Background(), nil)
assert.NoError(t, err)
ps := new(consumertest.ProfilesSink)
pr, err := factory.(receiverprofiles.Factory).CreateProfiles(context.Background(), cs, cfg, ps)
assert.NoError(t, err)
err = pr.Start(context.Background(), nil)
assert.NoError(t, err)

md := testdata.GenerateMetrics(5)
marshaler := &pmetric.JSONMarshaler{}
Expand All @@ -226,11 +260,17 @@ func TestFileMixedSignals(t *testing.T) {
lmarshaler := &plog.JSONMarshaler{}
b3, err := lmarshaler.MarshalLogs(ld)
assert.NoError(t, err)
pd := testdata.GenerateProfiles(5)
pmarshaler := &pprofile.JSONMarshaler{}
b4, err := pmarshaler.MarshalProfiles(pd)
assert.NoError(t, err)
b = append(b, '\n')
b = append(b, b2...)
b = append(b, '\n')
b = append(b, b3...)
b = append(b, '\n')
b = append(b, b4...)
b = append(b, '\n')
err = os.WriteFile(filepath.Join(tempFolder, "metrics.json"), b, 0600)
assert.NoError(t, err)
time.Sleep(1 * time.Second)
Expand All @@ -241,10 +281,14 @@ func TestFileMixedSignals(t *testing.T) {
assert.EqualValues(t, td, ts.AllTraces()[0])
require.Len(t, ls.AllLogs(), 1)
assert.EqualValues(t, ld, ls.AllLogs()[0])
require.Len(t, ps.AllProfiles(), 1)
assert.EqualValues(t, pd, ps.AllProfiles()[0])
err = mr.Shutdown(context.Background())
assert.NoError(t, err)
err = tr.Shutdown(context.Background())
assert.NoError(t, err)
err = lr.Shutdown(context.Background())
assert.NoError(t, err)
err = pr.Shutdown(context.Background())
assert.NoError(t, err)
}
14 changes: 8 additions & 6 deletions receiver/otlpjsonfilereceiver/go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,17 @@ require (
go.opentelemetry.io/collector/pdata/testdata v0.112.0
go.opentelemetry.io/collector/receiver v0.112.0
go.opentelemetry.io/collector/semconv v0.112.0 // indirect
go.opentelemetry.io/otel/metric v1.31.0 // indirect
go.opentelemetry.io/otel/trace v1.31.0 // indirect
go.opentelemetry.io/otel/metric v1.31.0
go.opentelemetry.io/otel/trace v1.31.0
go.uber.org/goleak v1.3.0
)

require go.opentelemetry.io/collector/consumer/consumertest v0.112.0
require (
go.opentelemetry.io/collector/consumer/consumerprofiles v0.112.0
go.opentelemetry.io/collector/consumer/consumertest v0.112.0
go.opentelemetry.io/collector/pdata/pprofile v0.112.0
go.opentelemetry.io/collector/receiver/receiverprofiles v0.112.0
)

require (
github.com/bmatcuk/doublestar/v4 v4.7.1 // indirect
Expand Down Expand Up @@ -50,13 +55,10 @@ require (
github.com/valyala/fastjson v1.6.4 // indirect
go.opentelemetry.io/collector/config/configtelemetry v0.112.0 // indirect
go.opentelemetry.io/collector/consumer/consumererror v0.112.0 // indirect
go.opentelemetry.io/collector/consumer/consumerprofiles v0.112.0 // indirect
go.opentelemetry.io/collector/extension v0.112.0 // indirect
go.opentelemetry.io/collector/extension/experimental/storage v0.112.0 // indirect
go.opentelemetry.io/collector/featuregate v1.18.0 // indirect
go.opentelemetry.io/collector/pdata/pprofile v0.112.0 // indirect
go.opentelemetry.io/collector/pipeline v0.112.0 // indirect
go.opentelemetry.io/collector/receiver/receiverprofiles v0.112.0 // indirect
go.opentelemetry.io/otel v1.31.0 // indirect
go.opentelemetry.io/otel/sdk v1.31.0 // indirect
go.opentelemetry.io/otel/sdk/metric v1.31.0 // indirect
Expand Down
Loading

0 comments on commit da98199

Please sign in to comment.