shithub: furgit

Download patch

ref: 19dc6aedddde8b5306f1fb0dc4d46ba57f318cce
parent: 1334ea3e45aaf9d2620223215763443647aae5a7
author: Runxi Yu <me@runxiyu.org>
date: Sat Feb 21 05:30:53 EST 2026

cache/lru: Add basic LRU

--- /dev/null
+++ b/internal/cache/lru/lru.go
@@ -1,0 +1,172 @@
+// Package lru provides a weighted least-recently-used cache.
+package lru
+
+import "container/list"
+
+// WeightFunc reports one entry's weight used for eviction budgeting.
+//
+// Returned weights MUST be non-negative.
+type WeightFunc[K comparable, V any] func(key K, value V) int64
+
+// OnEvictFunc runs when an entry leaves the cache.
+//
+// It is called for evictions, explicit removals, Clear, and replacement by Add.
+type OnEvictFunc[K comparable, V any] func(key K, value V)
+
+// Cache is a non-concurrent weighted LRU cache.
+//
+// Methods on Cache are not safe for concurrent use.
+type Cache[K comparable, V any] struct {
+	maxWeight int64
+	weightFn  WeightFunc[K, V]
+	onEvict   OnEvictFunc[K, V]
+
+	weight int64
+	items  map[K]*list.Element
+	lru    list.List
+}
+
+type entry[K comparable, V any] struct {
+	key    K
+	value  V
+	weight int64
+}
+
+// New creates a cache with a maximum total weight.
+//
+// New panics if maxWeight is negative or weightFn is nil.
+func New[K comparable, V any](maxWeight int64, weightFn WeightFunc[K, V], onEvict OnEvictFunc[K, V]) *Cache[K, V] {
+	if maxWeight < 0 {
+		panic("lru: negative max weight")
+	}
+	if weightFn == nil {
+		panic("lru: nil weight function")
+	}
+	return &Cache[K, V]{
+		maxWeight: maxWeight,
+		weightFn:  weightFn,
+		onEvict:   onEvict,
+		items:     make(map[K]*list.Element),
+	}
+}
+
+// Add inserts or replaces key and marks it most-recently-used.
+//
+// Add returns false when the entry's weight exceeds MaxWeight even for an empty
+// cache. In that case the cache is unchanged.
+//
+// Add panics if weightFn returns a negative weight.
+func (cache *Cache[K, V]) Add(key K, value V) bool {
+	w := cache.weightFn(key, value)
+	if w < 0 {
+		panic("lru: negative entry weight")
+	}
+	if w > cache.maxWeight {
+		return false
+	}
+
+	if elem, ok := cache.items[key]; ok {
+		cache.removeElem(elem)
+	}
+
+	ent := &entry[K, V]{
+		key:    key,
+		value:  value,
+		weight: w,
+	}
+	elem := cache.lru.PushBack(ent)
+	cache.items[key] = elem
+	cache.weight += w
+
+	cache.evictOverBudget()
+	return true
+}
+
+// Get returns value for key and marks it most-recently-used.
+func (cache *Cache[K, V]) Get(key K) (V, bool) {
+	elem, ok := cache.items[key]
+	if !ok {
+		var zero V
+		return zero, false
+	}
+	cache.lru.MoveToBack(elem)
+	return elem.Value.(*entry[K, V]).value, true
+}
+
+// Peek returns value for key without changing recency.
+func (cache *Cache[K, V]) Peek(key K) (V, bool) {
+	elem, ok := cache.items[key]
+	if !ok {
+		var zero V
+		return zero, false
+	}
+	return elem.Value.(*entry[K, V]).value, true
+}
+
+// Remove deletes key from the cache.
+func (cache *Cache[K, V]) Remove(key K) (V, bool) {
+	elem, ok := cache.items[key]
+	if !ok {
+		var zero V
+		return zero, false
+	}
+	ent := cache.removeElem(elem)
+	return ent.value, true
+}
+
+// Clear removes all entries from the cache.
+func (cache *Cache[K, V]) Clear() {
+	for elem := cache.lru.Front(); elem != nil; {
+		next := elem.Next()
+		cache.removeElem(elem)
+		elem = next
+	}
+}
+
+// Len returns the number of cached entries.
+func (cache *Cache[K, V]) Len() int {
+	return len(cache.items)
+}
+
+// Weight returns the current total weight.
+func (cache *Cache[K, V]) Weight() int64 {
+	return cache.weight
+}
+
+// MaxWeight returns the configured total weight budget.
+func (cache *Cache[K, V]) MaxWeight() int64 {
+	return cache.maxWeight
+}
+
+// SetMaxWeight updates the total weight budget and evicts LRU entries as
+// needed.
+//
+// SetMaxWeight panics if maxWeight is negative.
+func (cache *Cache[K, V]) SetMaxWeight(maxWeight int64) {
+	if maxWeight < 0 {
+		panic("lru: negative max weight")
+	}
+	cache.maxWeight = maxWeight
+	cache.evictOverBudget()
+}
+
+func (cache *Cache[K, V]) evictOverBudget() {
+	for cache.weight > cache.maxWeight {
+		elem := cache.lru.Front()
+		if elem == nil {
+			return
+		}
+		cache.removeElem(elem)
+	}
+}
+
+func (cache *Cache[K, V]) removeElem(elem *list.Element) *entry[K, V] {
+	ent := elem.Value.(*entry[K, V])
+	cache.lru.Remove(elem)
+	delete(cache.items, ent.key)
+	cache.weight -= ent.weight
+	if cache.onEvict != nil {
+		cache.onEvict(ent.key, ent.value)
+	}
+	return ent
+}
--- /dev/null
+++ b/internal/cache/lru/lru_test.go
@@ -1,0 +1,210 @@
+package lru_test
+
+import (
+	"slices"
+	"testing"
+
+	"codeberg.org/lindenii/furgit/internal/cache/lru"
+)
+
+type testValue struct {
+	weight int64
+	label  string
+}
+
+func weightFn(key string, value testValue) int64 {
+	return value.weight
+}
+
+func TestCacheEvictsLRUAndGetUpdatesRecency(t *testing.T) {
+	t.Parallel()
+
+	cache := lru.New[string, testValue](8, weightFn, nil)
+	cache.Add("a", testValue{weight: 4, label: "a"})
+	cache.Add("b", testValue{weight: 4, label: "b"})
+	cache.Add("c", testValue{weight: 4, label: "c"})
+
+	if _, ok := cache.Peek("a"); ok {
+		t.Fatalf("expected a to be evicted")
+	}
+	if _, ok := cache.Peek("b"); !ok {
+		t.Fatalf("expected b to be present")
+	}
+	if _, ok := cache.Peek("c"); !ok {
+		t.Fatalf("expected c to be present")
+	}
+
+	if _, ok := cache.Get("b"); !ok {
+		t.Fatalf("Get(b) should hit")
+	}
+	cache.Add("d", testValue{weight: 4, label: "d"})
+
+	if _, ok := cache.Peek("c"); ok {
+		t.Fatalf("expected c to be evicted after b was touched")
+	}
+	if _, ok := cache.Peek("b"); !ok {
+		t.Fatalf("expected b to remain present")
+	}
+	if _, ok := cache.Peek("d"); !ok {
+		t.Fatalf("expected d to be present")
+	}
+}
+
+func TestCachePeekDoesNotUpdateRecency(t *testing.T) {
+	t.Parallel()
+
+	cache := lru.New[string, testValue](4, weightFn, nil)
+	cache.Add("a", testValue{weight: 2, label: "a"})
+	cache.Add("b", testValue{weight: 2, label: "b"})
+
+	if _, ok := cache.Peek("a"); !ok {
+		t.Fatalf("Peek(a) should hit")
+	}
+	cache.Add("c", testValue{weight: 2, label: "c"})
+
+	if _, ok := cache.Peek("a"); ok {
+		t.Fatalf("expected a to be evicted; Peek must not update recency")
+	}
+	if _, ok := cache.Peek("b"); !ok {
+		t.Fatalf("expected b to remain present")
+	}
+}
+
+func TestCacheReplaceAndResize(t *testing.T) {
+	t.Parallel()
+
+	var evicted []string
+	cache := lru.New[string, testValue](10, weightFn, func(key string, value testValue) {
+		evicted = append(evicted, key+":"+value.label)
+	})
+
+	cache.Add("a", testValue{weight: 4, label: "old"})
+	cache.Add("b", testValue{weight: 4, label: "b"})
+	cache.Add("a", testValue{weight: 6, label: "new"})
+
+	if cache.Weight() != 10 {
+		t.Fatalf("Weight() = %d, want 10", cache.Weight())
+	}
+	if got, ok := cache.Peek("a"); !ok || got.label != "new" {
+		t.Fatalf("Peek(a) = (%+v,%v), want new,true", got, ok)
+	}
+	if !slices.Equal(evicted, []string{"a:old"}) {
+		t.Fatalf("evicted = %v, want [a:old]", evicted)
+	}
+
+	cache.SetMaxWeight(8)
+	if _, ok := cache.Peek("b"); ok {
+		t.Fatalf("expected b to be evicted after shrinking max weight")
+	}
+	if !slices.Equal(evicted, []string{"a:old", "b:b"}) {
+		t.Fatalf("evicted = %v, want [a:old b:b]", evicted)
+	}
+}
+
+func TestCacheRejectsOversizedWithoutMutation(t *testing.T) {
+	t.Parallel()
+
+	var evicted []string
+	cache := lru.New[string, testValue](5, weightFn, func(key string, value testValue) {
+		evicted = append(evicted, key)
+	})
+	cache.Add("a", testValue{weight: 3, label: "a"})
+
+	if ok := cache.Add("b", testValue{weight: 6, label: "b"}); ok {
+		t.Fatalf("Add oversized should return false")
+	}
+	if got, ok := cache.Peek("a"); !ok || got.label != "a" {
+		t.Fatalf("cache should remain unchanged after oversized add")
+	}
+	if cache.Weight() != 3 {
+		t.Fatalf("Weight() = %d, want 3", cache.Weight())
+	}
+	if len(evicted) != 0 {
+		t.Fatalf("evicted = %v, want none", evicted)
+	}
+
+	if ok := cache.Add("a", testValue{weight: 6, label: "new"}); ok {
+		t.Fatalf("oversized replace should return false")
+	}
+	if got, ok := cache.Peek("a"); !ok || got.label != "a" {
+		t.Fatalf("existing key should remain unchanged after oversized replace")
+	}
+	if len(evicted) != 0 {
+		t.Fatalf("evicted = %v, want none", evicted)
+	}
+}
+
+func TestCacheRemoveAndClear(t *testing.T) {
+	t.Parallel()
+
+	var evicted []string
+	cache := lru.New[string, testValue](10, weightFn, func(key string, value testValue) {
+		evicted = append(evicted, key)
+	})
+
+	cache.Add("a", testValue{weight: 2, label: "a"})
+	cache.Add("b", testValue{weight: 3, label: "b"})
+	cache.Add("c", testValue{weight: 4, label: "c"})
+
+	removed, ok := cache.Remove("b")
+	if !ok || removed.label != "b" {
+		t.Fatalf("Remove(b) = (%+v,%v), want b,true", removed, ok)
+	}
+	if cache.Len() != 2 || cache.Weight() != 6 {
+		t.Fatalf("post-remove Len/Weight = %d/%d, want 2/6", cache.Len(), cache.Weight())
+	}
+
+	cache.Clear()
+	if cache.Len() != 0 || cache.Weight() != 0 {
+		t.Fatalf("post-clear Len/Weight = %d/%d, want 0/0", cache.Len(), cache.Weight())
+	}
+
+	// Remove emits b, then Clear emits oldest-to-newest among remaining: a, c.
+	if !slices.Equal(evicted, []string{"b", "a", "c"}) {
+		t.Fatalf("evicted = %v, want [b a c]", evicted)
+	}
+}
+
+func TestCachePanicsForInvalidConfiguration(t *testing.T) {
+	t.Parallel()
+
+	t.Run("negative max", func(t *testing.T) {
+		defer func() {
+			if recover() == nil {
+				t.Fatalf("expected panic")
+			}
+		}()
+		_ = lru.New[string, testValue](-1, weightFn, nil)
+	})
+
+	t.Run("nil weight function", func(t *testing.T) {
+		defer func() {
+			if recover() == nil {
+				t.Fatalf("expected panic")
+			}
+		}()
+		_ = lru.New[string, testValue](1, nil, nil)
+	})
+
+	t.Run("negative entry weight", func(t *testing.T) {
+		cache := lru.New[string, testValue](10, func(_ string, _ testValue) int64 {
+			return -1
+		}, nil)
+		defer func() {
+			if recover() == nil {
+				t.Fatalf("expected panic")
+			}
+		}()
+		cache.Add("x", testValue{weight: 1, label: "x"})
+	})
+
+	t.Run("set negative max", func(t *testing.T) {
+		cache := lru.New[string, testValue](10, weightFn, nil)
+		defer func() {
+			if recover() == nil {
+				t.Fatalf("expected panic")
+			}
+		}()
+		cache.SetMaxWeight(-1)
+	})
+}
--