Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

go/cache: use generics and remove unused API #14850

Merged
merged 2 commits into from
Jan 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
111 changes: 36 additions & 75 deletions go/cache/lru_cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,15 +31,13 @@ import (

// LRUCache is a typical LRU cache implementation. If the cache
// reaches the capacity, the least recently used item is deleted from
// the cache. Note the capacity is not the number of items, but the
// total sum of the CachedSize() of each item.
type LRUCache struct {
// the cache.
type LRUCache[T any] struct {
mu sync.Mutex

// list & table contain *entry objects.
list *list.List
table map[string]*list.Element
cost func(any) int64

size int64
capacity int64
Expand All @@ -49,46 +47,44 @@ type LRUCache struct {
}

// Item is what is stored in the cache
type Item struct {
type Item[T any] struct {
Key string
Value any
Value T
}

type entry struct {
type entry[T any] struct {
key string
value any
size int64
value T
timeAccessed time.Time
}

// NewLRUCache creates a new empty cache with the given capacity.
func NewLRUCache(capacity int64, cost func(any) int64) *LRUCache {
return &LRUCache{
func NewLRUCache[T any](capacity int64) *LRUCache[T] {
return &LRUCache[T]{
list: list.New(),
table: make(map[string]*list.Element),
capacity: capacity,
cost: cost,
}
}

// Get returns a value from the cache, and marks the entry as most
// recently used.
func (lru *LRUCache) Get(key string) (v any, ok bool) {
func (lru *LRUCache[T]) Get(key string) (v T, ok bool) {
lru.mu.Lock()
defer lru.mu.Unlock()

element := lru.table[key]
if element == nil {
lru.misses++
return nil, false
return *new(T), false
}
lru.moveToFront(element)
lru.hits++
return element.Value.(*entry).value, true
return element.Value.(*entry[T]).value, true
}

// Set sets a value in the cache.
func (lru *LRUCache) Set(key string, value any) bool {
func (lru *LRUCache[T]) Set(key string, value T) bool {
lru.mu.Lock()
defer lru.mu.Unlock()

Expand All @@ -102,7 +98,7 @@ func (lru *LRUCache) Set(key string, value any) bool {
}

// Delete removes an entry from the cache, and returns if the entry existed.
func (lru *LRUCache) delete(key string) bool {
func (lru *LRUCache[T]) delete(key string) bool {
lru.mu.Lock()
defer lru.mu.Unlock()

Expand All @@ -113,27 +109,17 @@ func (lru *LRUCache) delete(key string) bool {

lru.list.Remove(element)
delete(lru.table, key)
lru.size -= element.Value.(*entry).size
lru.size--
return true
}

// Delete removes an entry from the cache
func (lru *LRUCache) Delete(key string) {
func (lru *LRUCache[T]) Delete(key string) {
lru.delete(key)
}

// Clear will clear the entire cache.
func (lru *LRUCache) Clear() {
lru.mu.Lock()
defer lru.mu.Unlock()

lru.list.Init()
lru.table = make(map[string]*list.Element)
lru.size = 0
}

// Len returns the size of the cache (in entries)
func (lru *LRUCache) Len() int {
func (lru *LRUCache[T]) Len() int {
lru.mu.Lock()
defer lru.mu.Unlock()
return lru.list.Len()
Expand All @@ -142,113 +128,88 @@ func (lru *LRUCache) Len() int {
// SetCapacity will set the capacity of the cache. If the capacity is
// smaller, and the current cache size exceed that capacity, the cache
// will be shrank.
func (lru *LRUCache) SetCapacity(capacity int64) {
func (lru *LRUCache[T]) SetCapacity(capacity int64) {
lru.mu.Lock()
defer lru.mu.Unlock()

lru.capacity = capacity
lru.checkCapacity()
}

// Wait is a no-op in the LRU cache
func (lru *LRUCache) Wait() {}

// UsedCapacity returns the size of the cache (in bytes)
func (lru *LRUCache) UsedCapacity() int64 {
func (lru *LRUCache[T]) UsedCapacity() int64 {
return lru.size
}

// MaxCapacity returns the cache maximum capacity.
func (lru *LRUCache) MaxCapacity() int64 {
func (lru *LRUCache[T]) MaxCapacity() int64 {
lru.mu.Lock()
defer lru.mu.Unlock()
return lru.capacity
}

// Evictions returns the number of evictions
func (lru *LRUCache) Evictions() int64 {
func (lru *LRUCache[T]) Evictions() int64 {
lru.mu.Lock()
defer lru.mu.Unlock()
return lru.evictions
}

// Hits returns number of cache hits since creation
func (lru *LRUCache) Hits() int64 {
func (lru *LRUCache[T]) Hits() int64 {
lru.mu.Lock()
defer lru.mu.Unlock()
return lru.hits
}

// Misses returns number of cache misses since creation
func (lru *LRUCache) Misses() int64 {
func (lru *LRUCache[T]) Misses() int64 {
lru.mu.Lock()
defer lru.mu.Unlock()
return lru.misses
}

// ForEach yields all the values for the cache, ordered from most recently
// used to least recently used.
func (lru *LRUCache) ForEach(callback func(value any) bool) {
lru.mu.Lock()
defer lru.mu.Unlock()

for e := lru.list.Front(); e != nil; e = e.Next() {
v := e.Value.(*entry)
if !callback(v.value) {
break
}
}
}

// Items returns all the values for the cache, ordered from most recently
// used to least recently used.
func (lru *LRUCache) Items() []Item {
func (lru *LRUCache[T]) Items() []Item[T] {
lru.mu.Lock()
defer lru.mu.Unlock()

items := make([]Item, 0, lru.list.Len())
items := make([]Item[T], 0, lru.list.Len())
for e := lru.list.Front(); e != nil; e = e.Next() {
v := e.Value.(*entry)
items = append(items, Item{Key: v.key, Value: v.value})
v := e.Value.(*entry[T])
items = append(items, Item[T]{Key: v.key, Value: v.value})
}
return items
}

func (lru *LRUCache) updateInplace(element *list.Element, value any) {
valueSize := lru.cost(value)
sizeDiff := valueSize - element.Value.(*entry).size
element.Value.(*entry).value = value
element.Value.(*entry).size = valueSize
lru.size += sizeDiff
func (lru *LRUCache[T]) updateInplace(element *list.Element, value T) {
element.Value.(*entry[T]).value = value
lru.moveToFront(element)
lru.checkCapacity()
}

func (lru *LRUCache) moveToFront(element *list.Element) {
func (lru *LRUCache[T]) moveToFront(element *list.Element) {
lru.list.MoveToFront(element)
element.Value.(*entry).timeAccessed = time.Now()
element.Value.(*entry[T]).timeAccessed = time.Now()
}

func (lru *LRUCache) addNew(key string, value any) {
newEntry := &entry{key, value, lru.cost(value), time.Now()}
func (lru *LRUCache[T]) addNew(key string, value T) {
newEntry := &entry[T]{key, value, time.Now()}
element := lru.list.PushFront(newEntry)
lru.table[key] = element
lru.size += newEntry.size
lru.size++
lru.checkCapacity()
}

func (lru *LRUCache) checkCapacity() {
func (lru *LRUCache[T]) checkCapacity() {
// Partially duplicated from Delete
for lru.size > lru.capacity {
delElem := lru.list.Back()
delValue := delElem.Value.(*entry)
delValue := delElem.Value.(*entry[T])
lru.list.Remove(delElem)
delete(lru.table, delValue.key)
lru.size -= delValue.size
lru.size--
lru.evictions++
}
}

func (lru *LRUCache) Close() {
lru.Clear()
}
75 changes: 12 additions & 63 deletions go/cache/lru_cache_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,8 @@ type CacheValue struct {
size int64
}

func cacheValueSize(val any) int64 {
return val.(*CacheValue).size
}

func TestInitialState(t *testing.T) {
cache := NewLRUCache(5, cacheValueSize)
cache := NewLRUCache[*CacheValue](5)
l, sz, c, e, h, m := cache.Len(), cache.UsedCapacity(), cache.MaxCapacity(), cache.Evictions(), cache.Hits(), cache.Misses()
if l != 0 {
t.Errorf("length = %v, want 0", l)
Expand All @@ -52,13 +48,13 @@ func TestInitialState(t *testing.T) {
}

func TestSetInsertsValue(t *testing.T) {
cache := NewLRUCache(100, cacheValueSize)
cache := NewLRUCache[*CacheValue](100)
data := &CacheValue{0}
key := "key"
cache.Set(key, data)

v, ok := cache.Get(key)
if !ok || v.(*CacheValue) != data {
if !ok || v != data {
t.Errorf("Cache has incorrect value: %v != %v", data, v)
}

Expand All @@ -69,80 +65,46 @@ func TestSetInsertsValue(t *testing.T) {
}

func TestGetValueWithMultipleTypes(t *testing.T) {
cache := NewLRUCache(100, cacheValueSize)
cache := NewLRUCache[*CacheValue](100)
data := &CacheValue{0}
key := "key"
cache.Set(key, data)

v, ok := cache.Get("key")
if !ok || v.(*CacheValue) != data {
if !ok || v != data {
t.Errorf("Cache has incorrect value for \"key\": %v != %v", data, v)
}

v, ok = cache.Get(string([]byte{'k', 'e', 'y'}))
if !ok || v.(*CacheValue) != data {
if !ok || v != data {
t.Errorf("Cache has incorrect value for []byte {'k','e','y'}: %v != %v", data, v)
}
}

func TestSetUpdatesSize(t *testing.T) {
cache := NewLRUCache(100, cacheValueSize)
emptyValue := &CacheValue{0}
key := "key1"
cache.Set(key, emptyValue)
if sz := cache.UsedCapacity(); sz != 0 {
t.Errorf("cache.UsedCapacity() = %v, expected 0", sz)
}
someValue := &CacheValue{20}
key = "key2"
cache.Set(key, someValue)
if sz := cache.UsedCapacity(); sz != 20 {
t.Errorf("cache.UsedCapacity() = %v, expected 20", sz)
}
}

func TestSetWithOldKeyUpdatesValue(t *testing.T) {
cache := NewLRUCache(100, cacheValueSize)
cache := NewLRUCache[*CacheValue](100)
emptyValue := &CacheValue{0}
key := "key1"
cache.Set(key, emptyValue)
someValue := &CacheValue{20}
cache.Set(key, someValue)

v, ok := cache.Get(key)
if !ok || v.(*CacheValue) != someValue {
if !ok || v != someValue {
t.Errorf("Cache has incorrect value: %v != %v", someValue, v)
}
}

func TestSetWithOldKeyUpdatesSize(t *testing.T) {
cache := NewLRUCache(100, cacheValueSize)
emptyValue := &CacheValue{0}
key := "key1"
cache.Set(key, emptyValue)

if sz := cache.UsedCapacity(); sz != 0 {
t.Errorf("cache.UsedCapacity() = %v, expected %v", sz, 0)
}

someValue := &CacheValue{20}
cache.Set(key, someValue)
expected := int64(someValue.size)
if sz := cache.UsedCapacity(); sz != expected {
t.Errorf("cache.UsedCapacity() = %v, expected %v", sz, expected)
}
}

func TestGetNonExistent(t *testing.T) {
cache := NewLRUCache(100, cacheValueSize)
cache := NewLRUCache[*CacheValue](100)

if _, ok := cache.Get("notthere"); ok {
t.Error("Cache returned a notthere value after no inserts.")
}
}

func TestDelete(t *testing.T) {
cache := NewLRUCache(100, cacheValueSize)
cache := NewLRUCache[*CacheValue](100)
value := &CacheValue{1}
key := "key"

Expand All @@ -159,22 +121,9 @@ func TestDelete(t *testing.T) {
}
}

func TestClear(t *testing.T) {
cache := NewLRUCache(100, cacheValueSize)
value := &CacheValue{1}
key := "key"

cache.Set(key, value)
cache.Clear()

if sz := cache.UsedCapacity(); sz != 0 {
t.Errorf("cache.UsedCapacity() = %v, expected 0 after Clear()", sz)
}
}

func TestCapacityIsObeyed(t *testing.T) {
size := int64(3)
cache := NewLRUCache(100, cacheValueSize)
cache := NewLRUCache[*CacheValue](100)
cache.SetCapacity(size)
value := &CacheValue{1}

Expand Down Expand Up @@ -215,7 +164,7 @@ func TestCapacityIsObeyed(t *testing.T) {

func TestLRUIsEvicted(t *testing.T) {
size := int64(3)
cache := NewLRUCache(size, cacheValueSize)
cache := NewLRUCache[*CacheValue](size)

cache.Set("key1", &CacheValue{1})
cache.Set("key2", &CacheValue{1})
Expand Down
Loading
Loading