Skip to content

Commit f9b0171

Browse files
committed
Rename struct LazyCache => Cache
1 parent 561ba93 commit f9b0171

File tree

3 files changed

+18
-18
lines changed

3 files changed

+18
-18
lines changed

‎README.md‎

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,14 +5,14 @@
55

66
**Note:** This is still a work in progress and the API will most likely change.
77

8-
**Lazycache** is a simple thread safe in-memory LRU cache. Under the hood it leverages the great [simpleru package in golang-lru](https://github.com/hashicorp/golang-lru), with its exellent performance. One big difference between `golang-lru` and this library is the [GetOrCreate](https://pkg.go.dev/github.com/bep/lazycache#LazyCache.GetOrCreate) method, which provides:
8+
**Lazycache** is a simple thread safe in-memory LRU cache. Under the hood it leverages the great [simpleru package in golang-lru](https://github.com/hashicorp/golang-lru), with its exellent performance. One big difference between `golang-lru` and this library is the [GetOrCreate](https://pkg.go.dev/github.com/bep/lazycache#Cache.GetOrCreate) method, which provides:
99

1010
* Non-blocking cache priming on cache misses.
1111
* A guarantee that the prime function is only called once for a given key.
1212
* The cache's [RWMutex](https://pkg.go.dev/sync#RWMutex) is not locked during the execution of the prime function, which should make it easier to reason about potential deadlocks.
1313

1414
Other notable features:
1515

16-
* The cache can be [resized](https://pkg.go.dev/github.com/bep/lazycache#LazyCache.Resize) while running.
16+
* The cache can be [resized](https://pkg.go.dev/github.com/bep/lazycache#Cache.Resize) while running.
1717
* When the number of entries overflows the defined cache size, the least recently used item gets discarded (LRU).
1818

‎lazycache.go‎

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -8,13 +8,13 @@ import (
88

99
var _ = Entry(&delayedEntry{})
1010

11-
// New creates a new LazyCache.
12-
func New(options CacheOptions) *LazyCache {
11+
// New creates a new Cache.
12+
func New(options CacheOptions) *Cache {
1313
lru, err := simplelru.NewLRU(int(options.MaxEntries), nil)
1414
if err != nil {
1515
panic(err)
1616
}
17-
c := &LazyCache{
17+
c := &Cache{
1818
lru: lru,
1919
}
2020
return c
@@ -33,13 +33,13 @@ type Entry interface {
3333
Err() error
3434
}
3535

36-
type LazyCache struct {
36+
type Cache struct {
3737
lru *simplelru.LRU
3838
mu sync.RWMutex
3939
}
4040

4141
// Contains returns true if the given key is in the cache.
42-
func (c *LazyCache) Contains(key any) bool {
42+
func (c *Cache) Contains(key any) bool {
4343
c.mu.RLock()
4444
b := c.lru.Contains(key)
4545
c.mu.RUnlock()
@@ -48,14 +48,14 @@ func (c *LazyCache) Contains(key any) bool {
4848

4949
// Delete deletes the item with given key from the cache, returning if the
5050
// key was contained.
51-
func (c *LazyCache) Delete(key any) bool {
51+
func (c *Cache) Delete(key any) bool {
5252
c.mu.Lock()
5353
defer c.mu.Unlock()
5454
return c.lru.Remove(key)
5555
}
5656

5757
// DeleteFunc deletes all entries for which the given function returns true.
58-
func (c *LazyCache) DeleteFunc(matches func(key any, item Entry) bool) int {
58+
func (c *Cache) DeleteFunc(matches func(key any, item Entry) bool) int {
5959
c.mu.RLock()
6060
keys := c.lru.Keys()
6161

@@ -81,21 +81,21 @@ func (c *LazyCache) DeleteFunc(matches func(key any, item Entry) bool) int {
8181
}
8282

8383
// Keys returns a slice of the keys in the cache, oldest first.
84-
func (c *LazyCache) Keys() []any {
84+
func (c *Cache) Keys() []any {
8585
c.mu.RLock()
8686
defer c.mu.RUnlock()
8787
return c.lru.Keys()
8888
}
8989

9090
// Len returns the number of items in the cache.
91-
func (c *LazyCache) Len() int {
91+
func (c *Cache) Len() int {
9292
c.mu.RLock()
9393
defer c.mu.RUnlock()
9494
return c.lru.Len()
9595
}
9696

9797
// Get returns the value associated with key.
98-
func (c *LazyCache) Get(key any) Entry {
98+
func (c *Cache) Get(key any) Entry {
9999
c.mu.Lock()
100100
v, ok := c.lru.Get(key)
101101
c.mu.Unlock()
@@ -109,7 +109,7 @@ func (c *LazyCache) Get(key any) Entry {
109109
// Note that create, the cache prime function, is called once and then not called again for a given key
110110
// unless the cache entry is evicted; it does not block other goroutines from calling GetOrCreate,
111111
// it is not called with the cache lock held.
112-
func (c *LazyCache) GetOrCreate(key any, create func(key any) (any, error)) Entry {
112+
func (c *Cache) GetOrCreate(key any, create func(key any) (any, error)) Entry {
113113
c.mu.Lock()
114114
v, ok := c.lru.Get(key)
115115
if ok {
@@ -138,15 +138,15 @@ func (c *LazyCache) GetOrCreate(key any, create func(key any) (any, error)) Entr
138138
}
139139

140140
// Resize changes the cache size and returns the number of entries evicted.
141-
func (c *LazyCache) Resize(size int) (evicted int) {
141+
func (c *Cache) Resize(size int) (evicted int) {
142142
c.mu.Lock()
143143
evicted = c.lru.Resize(size)
144144
c.mu.Unlock()
145145
return evicted
146146
}
147147

148148
// Set associates value with key.
149-
func (c *LazyCache) Set(key, value any) {
149+
func (c *Cache) Set(key, value any) {
150150
c.mu.Lock()
151151
if _, ok := value.(Entry); !ok {
152152
value = entry{

‎lazycache_test.go‎

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ func TestGetOrCreateConcurrent(t *testing.T) {
120120
func BenchmarkGetOrCreate(b *testing.B) {
121121
const maxSize = 1000
122122

123-
runBenchmark := func(b *testing.B, cache *LazyCache, getOrCreate func(key any, create func(key any) (any, error)) Entry) {
123+
runBenchmark := func(b *testing.B, cache *Cache, getOrCreate func(key any, create func(key any) (any, error)) Entry) {
124124
r := rand.New(rand.NewSource(99))
125125
var mu sync.Mutex
126126

@@ -241,7 +241,7 @@ func BenchmarkCacheParallel(b *testing.B) {
241241

242242
// These are only used in benchmarks.
243243
// This should be functionally equivalent to GetOrCreate.
244-
func (c *LazyCache) getOrCreateBaselineLock(key any, create func(key any) (any, error)) Entry {
244+
func (c *Cache) getOrCreateBaselineLock(key any, create func(key any) (any, error)) Entry {
245245
c.mu.Lock()
246246
defer c.mu.Unlock()
247247
v, ok := c.lru.Get(key)
@@ -263,7 +263,7 @@ func (c *LazyCache) getOrCreateBaselineLock(key any, create func(key any) (any,
263263
}
264264

265265
// This variant does not hold any lock while calling create, which means it may be called multiple times for the same key.
266-
func (c *LazyCache) getOrCreateBaselDoubleCheckedLock(key any, create func(key any) (any, error)) Entry {
266+
func (c *Cache) getOrCreateBaselDoubleCheckedLock(key any, create func(key any) (any, error)) Entry {
267267
c.mu.Lock()
268268
v, ok := c.lru.Get(key)
269269
if ok {

0 commit comments

Comments
 (0)