TUN-7133: Add sampling support for streaming logs

In addition to supporting sampling support for streaming logs,
cloudflared tail also supports this via `--sample 0.5` to sample 50%
of your log events.
This commit is contained in:
Devin Carr
2023-04-24 09:39:26 -07:00
parent 38cd455e4d
commit 88c25d2c67
6 changed files with 137 additions and 37 deletions

View File

@@ -52,8 +52,9 @@ type EventStartStreaming struct {
}
type StreamingFilters struct {
Events []LogEventType `json:"events,omitempty"`
Level *LogLevel `json:"level,omitempty"`
Events []LogEventType `json:"events,omitempty"`
Level *LogLevel `json:"level,omitempty"`
Sampling float64 `json:"sampling,omitempty"`
}
// EventStopStreaming signifies that the client wishes to halt receiving log events.

View File

@@ -57,13 +57,23 @@ func TestIntoClientEvent_StartStreaming(t *testing.T) {
},
},
},
{
name: "sampling filter",
expected: EventStartStreaming{
ClientEvent: ClientEvent{Type: StartStreaming},
Filters: &StreamingFilters{
Sampling: 0.5,
},
},
},
{
name: "level and events filters",
expected: EventStartStreaming{
ClientEvent: ClientEvent{Type: StartStreaming},
Filters: &StreamingFilters{
Level: infoLevel,
Events: []LogEventType{Cloudflared},
Level: infoLevel,
Events: []LogEventType{Cloudflared},
Sampling: 0.5,
},
},
},

View File

@@ -2,6 +2,7 @@ package management
import (
"context"
"math/rand"
"sync/atomic"
)
@@ -25,6 +26,8 @@ type session struct {
listener chan *Log
// Types of log events that this session will provide through the listener
filters *StreamingFilters
// Sampling of the log events this session will send (runs after all other filters if available)
sampler *sampler
}
// NewSession creates a new session.
@@ -43,6 +46,20 @@ func newSession(size int, actor actor, cancel context.CancelFunc) *session {
func (s *session) Filters(filters *StreamingFilters) {
if filters != nil {
s.filters = filters
sampling := filters.Sampling
// clamp the sampling values between 0 and 1
if sampling < 0 {
sampling = 0
}
if sampling > 1 {
sampling = 1
}
s.filters.Sampling = sampling
if sampling > 0 && sampling < 1 {
s.sampler = &sampler{
p: int(sampling * 100),
}
}
} else {
s.filters = &StreamingFilters{}
}
@@ -61,6 +78,10 @@ func (s *session) Insert(log *Log) {
if len(s.filters.Events) != 0 && !contains(s.filters.Events, log.Event) {
return
}
// Sampling is also optional
if s.sampler != nil && !s.sampler.Sample() {
return
}
select {
case s.listener <- log:
default:
@@ -86,3 +107,14 @@ func contains(array []LogEventType, t LogEventType) bool {
}
return false
}
// sampler will send approximately every p percentage log events out of 100.
type sampler struct {
p int
}
// Sample returns true if the event should be part of the sample, false if the event should be dropped.
func (s *sampler) Sample() bool {
return rand.Intn(100) <= s.p
}

View File

@@ -67,6 +67,27 @@ func TestSession_Insert(t *testing.T) {
},
expectLog: false,
},
{
name: "sampling",
filters: StreamingFilters{
Sampling: 0.9999999,
},
expectLog: true,
},
{
name: "sampling (invalid negative)",
filters: StreamingFilters{
Sampling: -1.0,
},
expectLog: true,
},
{
name: "sampling (invalid too large)",
filters: StreamingFilters{
Sampling: 2.0,
},
expectLog: true,
},
{
name: "filter and event",
filters: StreamingFilters{