Skip to content

Commit

Permalink
Experiment with alternatives
Browse files Browse the repository at this point in the history
  • Loading branch information
kentquirk committed Nov 28, 2023
1 parent 6204d49 commit 7e62a2b
Show file tree
Hide file tree
Showing 3 changed files with 173 additions and 0 deletions.
170 changes: 170 additions & 0 deletions beeline_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,16 @@ package beeline
import (
"context"
"fmt"
"math/rand"
"strconv"
"strings"
"sync"
"testing"

"github.com/honeycombio/libhoney-go/transmission"

cmap "github.com/orcaman/concurrent-map/v2"

libhoney "github.com/honeycombio/libhoney-go"
"github.com/stretchr/testify/assert"
)
Expand Down Expand Up @@ -145,3 +150,168 @@ func setupLibhoney(t testing.TB) *transmission.MockSender {

return mo
}

func getRandomString() string {
chars := []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ")
length := rand.Intn(20) + 5
result := make([]rune, length)
for i := range result {
result[i] = chars[rand.Intn(len(chars))]
}
return string(result)
}

func getPrefixedFieldNameOrig(name string) string {
return "app." + name
}

var syncCache sync.Map

// sync.Map avoids a lot of locking overhead but has no size limit
func getPrefixedFieldNameSync(key string) string {
const prefix = "app."

// return if the key already has the prefix
if strings.HasPrefix(key, prefix) {
return key
}

// check the cache first
val, ok := syncCache.Load(key)
if ok {
return val.(string)
}

// not in the cache, so add it
prefixedKey := prefix + key
syncCache.Store(key, prefixedKey)
return prefixedKey
}

var concurrentMap cmap.ConcurrentMap[string, string]

func getPrefixedFieldNameConcurrent(key string) string {
const prefix = "app."

// return if the key already has the prefix
if strings.HasPrefix(key, prefix) {
return key
}

// check the cache first
val, ok := concurrentMap.Get(key)
if ok {
return val
}

val = prefix + key
concurrentMap.Set(key, val)

return val
}

func getPrefixedFieldNameEjectRandom(key string) string {
const prefix = "app."

// return if the key already has the prefix
if strings.HasPrefix(key, prefix) {
return key
}

// check the cache using a read lock first
cachedFieldNamesLock.RLock()
val, ok := cachedFieldNames[key]
cachedFieldNamesLock.RUnlock()
if ok {
return val
}

// not in the cache, so get a write lock
cachedFieldNamesLock.Lock()
defer cachedFieldNamesLock.Unlock()

// check again in case it was added while we were waiting for the lock
val, ok = cachedFieldNames[key]
if ok {
return val
}

// before we add the key to the cache, reset the cache if it's getting too big.
// this can happen if lots of unique keys are being used and we don't want to
// grow the cache indefinitely
if len(cachedFieldNames) > 1000 {
for k := range cachedFieldNames {
delete(cachedFieldNames, k)
break
}
}

// add the prefixed key to the cache and return it
prefixedKey := prefix + key
cachedFieldNames[key] = prefixedKey
return prefixedKey
}

func BenchmarkGetPrefixedFieldNameBasic(b *testing.B) {
funcs := map[string]func(string) string{
"orig": getPrefixedFieldNameOrig,
"new": getPrefixedFieldName,
"sync": getPrefixedFieldNameSync,
"conc": getPrefixedFieldNameConcurrent,
"eject": getPrefixedFieldNameEjectRandom,
}
for _, numFields := range []int{10, 100, 1000, 3000} {
for name, f := range funcs {
b.Run(fmt.Sprintf("%s-%d", name, numFields), func(b *testing.B) {
names := make([]string, numFields)
for i := 0; i < numFields; i++ {
names[i] = getRandomString()
}
concurrentMap = cmap.New[string]()
b.ResetTimer()
for n := 0; n < b.N; n++ {
f(names[rand.Intn(numFields)])
}
})
}
}
}

func BenchmarkGetPrefixedFieldNameParallel(b *testing.B) {
funcs := map[string]func(string) string{
"orig": getPrefixedFieldNameOrig,
"new": getPrefixedFieldName,
"sync": getPrefixedFieldNameSync,
"conc": getPrefixedFieldNameConcurrent,
// "eject": getPrefixedFieldNameEjectRandom,
}
for _, numGoroutines := range []int{1, 50, 300} {
for name, f := range funcs {
for _, numFields := range []int{50, 500, 2000} {
b.Run(fmt.Sprintf("%s-f%d-g%d", name, numFields, numGoroutines), func(b *testing.B) {
names := make([]string, numFields)
for i := 0; i < numFields; i++ {
names[i] = getRandomString()
}
concurrentMap = cmap.New[string]()
b.ResetTimer()
wg := sync.WaitGroup{}
count := b.N / numGoroutines
if count == 0 {
count = 1
}
for g := 0; g < numGoroutines; g++ {
wg.Add(1)
go func() {
for n := 0; n < count; n++ {
f(names[rand.Intn(numFields)])
}
wg.Done()
}()
}
wg.Wait()
})
}
}
}
}
1 change: 1 addition & 0 deletions go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ require (
github.com/microcosm-cc/bluemonday v1.0.20 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/orcaman/concurrent-map/v2 v2.0.1 // indirect
github.com/pelletier/go-toml/v2 v2.0.8 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/rogpeppe/go-internal v1.9.0 // indirect
Expand Down
2 changes: 2 additions & 0 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -217,6 +217,8 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/orcaman/concurrent-map/v2 v2.0.1 h1:jOJ5Pg2w1oeB6PeDurIYf6k9PQ+aTITr/6lP/L/zp6c=
github.com/orcaman/concurrent-map/v2 v2.0.1/go.mod h1:9Eq3TG2oBe5FirmYWQfYO5iH1q0Jv47PLaNK++uCdOM=
github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ=
github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
Expand Down

0 comments on commit 7e62a2b

Please sign in to comment.