Skip to content

Commit

Permalink
Handle delayed delete / creation notifications
Browse files Browse the repository at this point in the history
Update the file-state tracking to support the case where we receive a
DELETE event but we observe the file as already re-created (event.Info
is not nil and hashes are likely populated).

Before this change, we would report a deletion but at the same store the
hashes and file info. Then, a following CREATION event would be ignored
because the diff-ing loggic doesn't take the previous action into
account (in this case prev.action==Deleted).

The best is to ignore the deletion and report on the observed file
changes (if any). Otherwise we have to deal with complex logic in the
cases where the OS event includes multiple actions ( ...|DELETED|...).
  • Loading branch information
adriansr committed Jan 13, 2021
1 parent 73de601 commit b30f19a
Show file tree
Hide file tree
Showing 2 changed files with 243 additions and 14 deletions.
10 changes: 10 additions & 0 deletions auditbeat/module/file_integrity/metricset.go
Original file line number Diff line number Diff line change
Expand Up @@ -268,6 +268,16 @@ func (ms *MetricSet) hasFileChangedSinceLastEvent(event *Event) (changed bool, l
return true, lastEvent
}

// Received a deleted event but the file now exists on disk (already re-created).
if event.Action&Deleted != 0 && event.Info != nil {
event.Action &= ^Action(Deleted)
event.Action |= Updated
}
// We receive a creation event for a deletion that we didn't observe due to the above.
if event.Action&Created != 0 && lastEvent != nil && lastEvent.Info != nil {
event.Action &= ^Action(Created)
event.Action |= Updated
}
action, changed := diffEvents(lastEvent, event)
if uint8(event.Action)&^uint8(Updated) == 0 {
if event.hashFailed && !changed {
Expand Down
247 changes: 233 additions & 14 deletions auditbeat/module/file_integrity/metricset_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -481,13 +481,7 @@ func (e expectedEvent) validate(t *testing.T, ms *MetricSet) {

type expectedEvents []expectedEvent

func (e expectedEvents) validate(t *testing.T, ms *MetricSet) {
for _, ev := range e {
ev.validate(t, ms)
}
}

func TestEventFailedHash(t *testing.T) {
func (e expectedEvents) validate(t *testing.T) {
store, err := ioutil.TempFile("", "bucket")
if err != nil {
t.Fatal(err)
Expand All @@ -507,7 +501,12 @@ func TestEventFailedHash(t *testing.T) {
t.Fatal("can't create metricset")
}
ms.bucket = bucket.(datastore.BoltBucket)
for _, ev := range e {
ev.validate(t, ms)
}
}

func TestEventFailedHash(t *testing.T) {
baseTime := time.Now()
t.Run("failed hash on update", func(t *testing.T) {
expectedEvents{
Expand Down Expand Up @@ -619,7 +618,7 @@ func TestEventFailedHash(t *testing.T) {
"file.hash.sha1": Digest("33333333333333333333"),
},
},
}.validate(t, ms)
}.validate(t)
})
t.Run("failed hash on creation", func(t *testing.T) {
expectedEvents{
Expand Down Expand Up @@ -665,7 +664,7 @@ func TestEventFailedHash(t *testing.T) {
"file.hash.sha1": Digest("22222222222222222222"),
},
},
}.validate(t, ms)
}.validate(t)
})
t.Run("delete", func(t *testing.T) {
expectedEvents{
Expand Down Expand Up @@ -698,17 +697,16 @@ func TestEventFailedHash(t *testing.T) {
Path: "/some/other/path",
Info: nil,
Source: SourceFSNotify,
// FSEvents likes to add extra flags to delete events.
Action: Deleted,
Hashes: nil,
Action: Deleted,
Hashes: nil,
},
expected: map[string]interface{}{
"event.action": []string{"deleted"},
"event.type": []string{"deletion"},
"file.hash.sha1": nil,
},
},
}.validate(t, ms)
}.validate(t)
})
t.Run("move", func(t *testing.T) {
expectedEvents{
Expand Down Expand Up @@ -751,7 +749,228 @@ func TestEventFailedHash(t *testing.T) {
"file.hash.sha1": nil,
},
},
}.validate(t, ms)
}.validate(t)
})
}

func TestEventDelete(t *testing.T) {
store, err := ioutil.TempFile("", "bucket")
if err != nil {
t.Fatal(err)
}
defer store.Close()
defer os.Remove(store.Name())
ds := datastore.New(store.Name(), 0644)
bucket, err := ds.OpenBucket(bucketName)
if err != nil {
t.Fatal(err)
}
defer bucket.Close()
config := getConfig("somepath")
config["hash_types"] = []string{"sha1"}
ms, ok := mbtest.NewPushMetricSetV2(t, config).(*MetricSet)
if !assert.True(t, ok) {
t.Fatal("can't create metricset")
}
ms.bucket = bucket.(datastore.BoltBucket)

baseTime := time.Now()
sha := Digest("22222222222222222222")
t.Run("delete event for file missing on disk", func(t *testing.T) {
expectedEvents{
expectedEvent{
title: "creation event",
input: Event{
Timestamp: baseTime,
Path: "/file",
Info: &Metadata{
MTime: baseTime,
CTime: baseTime,
Type: FileType,
},
Action: Created,
Source: SourceFSNotify,
Hashes: map[HashType]Digest{
SHA1: sha,
},
},
expected: map[string]interface{}{
"event.action": []string{"created"},
"event.type": []string{"creation"},
"file.hash.sha1": sha,
},
},
expectedEvent{
title: "delete",
input: Event{
Timestamp: time.Now(),
Path: "/file",
Source: SourceFSNotify,
Action: Deleted,
},
expected: map[string]interface{}{
"event.action": []string{"deleted"},
"event.type": []string{"deletion"},
},
},
expectedEvent{
title: "creation event",
input: Event{
Timestamp: baseTime,
Path: "/file",
Info: &Metadata{
MTime: baseTime,
CTime: baseTime,
Type: FileType,
},
Action: Created,
Source: SourceFSNotify,
Hashes: map[HashType]Digest{
SHA1: sha,
},
},
expected: map[string]interface{}{
"event.action": []string{"created"},
"event.type": []string{"creation"},
"file.hash.sha1": sha,
},
},
}.validate(t)
})

// This tests getting a DELETE followed by a CREATE, but by the time we observe the former the file already
// exists on disk.
shaNext := Digest("22222222222222222223")
t.Run("delete event for file present on disk (different contents)", func(t *testing.T) {
expectedEvents{
expectedEvent{
title: "create",
input: Event{
Timestamp: baseTime,
Path: "/file",
Info: &Metadata{
MTime: baseTime,
CTime: baseTime,
Type: FileType,
},
Action: Created,
Source: SourceFSNotify,
Hashes: map[HashType]Digest{
SHA1: sha,
},
},
expected: map[string]interface{}{
"event.action": []string{"created"},
"event.type": []string{"creation"},
"file.hash.sha1": sha,
},
},
expectedEvent{
title: "delete",
input: Event{
Timestamp: time.Now(),
Path: "/file",
Info: &Metadata{
MTime: baseTime,
CTime: baseTime,
Type: FileType,
},
Source: SourceFSNotify,
Action: Deleted,
Hashes: map[HashType]Digest{
SHA1: shaNext,
},
},
expected: map[string]interface{}{
"event.action": []string{"updated"},
"event.type": []string{"change"},
"file.hash.sha1": shaNext,
},
},
expectedEvent{
title: "re-create",
input: Event{
Timestamp: baseTime,
Path: "/file",
Info: &Metadata{
MTime: baseTime,
CTime: baseTime,
Type: FileType,
},
Action: Created,
Source: SourceFSNotify,
Hashes: map[HashType]Digest{
SHA1: shaNext,
},
},
expected: nil, // Already observed during handling of previous event.
},
}.validate(t)
})

t.Run("delete event for file present on disk (same contents)", func(t *testing.T) {
expectedEvents{
expectedEvent{
title: "create",
input: Event{
Timestamp: baseTime,
Path: "/file",
Info: &Metadata{
MTime: baseTime,
CTime: baseTime,
Type: FileType,
},
Action: Created,
Source: SourceFSNotify,
Hashes: map[HashType]Digest{
SHA1: sha,
},
},
expected: map[string]interface{}{
"event.action": []string{"created"},
"event.type": []string{"creation"},
"file.hash.sha1": sha,
},
},
expectedEvent{
title: "delete",
input: Event{
Timestamp: time.Now(),
Path: "/file",
Info: &Metadata{
MTime: baseTime,
CTime: baseTime,
Type: FileType,
},
Source: SourceFSNotify,
Action: Deleted,
Hashes: map[HashType]Digest{
SHA1: sha,
},
},
// No event because it has the same contents as before.
expected: nil,
},
expectedEvent{
title: "re-create",
input: Event{
Timestamp: baseTime,
Path: "/file",
Info: &Metadata{
MTime: baseTime,
CTime: baseTime,
Type: FileType,
},
Action: Created,
Source: SourceFSNotify,
Hashes: map[HashType]Digest{
SHA1: sha,
},
},
// No event because it has the same contents as before.
expected: nil,
},
}.validate(t)
})
}

Expand Down

0 comments on commit b30f19a

Please sign in to comment.