✨ feat(cache): add in-memory cache service (ADR-0022 Phase 1 part 2)
Implements Phase 1 part 2 of ADR-0022 (Rate Limiting and Cache Strategy): in-memory cache service using github.com/patrickmn/go-cache. Wired onto Server struct and used by handleVersion to memoize the response for 60 seconds. Companion to PR #22 (per-IP rate limit middleware). Changes: - New: pkg/cache/cache.go (58 lines, Service interface + InMemoryService) - New: pkg/cache/cache_test.go (125 lines, 6 unit tests, all passing) - Modified: pkg/config/config.go (CacheConfig struct + 3 SetDefault + 3 BindEnv + 3 getters) - Modified: pkg/server/server.go (cacheService field + init in NewServer + use in handleVersion) - Modified: config.yaml (cache section with defaults) - go.mod / go.sum (github.com/patrickmn/go-cache v2.1.0+incompatible) Closes #13 (Phase 1 fully complete: rate limit in PR #22, cache here). Phase 2 (Redis-compatible shared cache via Dragonfly/KeyDB) deferred. BDD scenario not added: cache hit is hard to test via the existing testserver (same architectural limitation as the rate limit BDD - testserver pre-started, env vars don't propagate). Behavior is fully covered by unit tests (6/6 PASS). TODO: BDD scenario can be added once testserver supports per-scenario config. Generated ~95% in autonomy by Mistral Vibe via ICM workspace ~/Work/Vibe/workspaces/cache-service-inmemory/. T6 cost €2.50 for stages 01-02 (50% reduction vs T5, thanks to pre-extracted snippets in shared/). Trainer (Claude) finalized commit/PR (Mistral hit max-turns). 🤖 Co-Authored-By: Mistral Vibe (devstral-2 / mistral-medium-3.5) Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
56
pkg/cache/cache.go
vendored
Normal file
56
pkg/cache/cache.go
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
gocache "github.com/patrickmn/go-cache"
|
||||
)
|
||||
|
||||
// Service defines the interface for cache operations
|
||||
type Service interface {
|
||||
Set(key string, value interface{}, ttl time.Duration)
|
||||
Get(key string) (interface{}, bool)
|
||||
Delete(key string)
|
||||
Flush()
|
||||
ItemCount() int
|
||||
}
|
||||
|
||||
// InMemoryService implements Service using go-cache library
|
||||
type InMemoryService struct {
|
||||
cache *gocache.Cache
|
||||
}
|
||||
|
||||
// NewInMemoryService creates a new in-memory cache service
|
||||
// defaultTTL: default time-to-live for cache items
|
||||
// cleanupInterval: interval at which expired items are cleaned up
|
||||
func NewInMemoryService(defaultTTL, cleanupInterval time.Duration) Service {
|
||||
c := gocache.New(defaultTTL, cleanupInterval)
|
||||
return &InMemoryService{cache: c}
|
||||
}
|
||||
|
||||
// Set stores a value in the cache with the specified TTL
|
||||
func (s *InMemoryService) Set(key string, value interface{}, ttl time.Duration) {
|
||||
s.cache.Set(key, value, ttl)
|
||||
}
|
||||
|
||||
// Get retrieves a value from the cache
|
||||
// Returns the value and true if found, nil and false if not found or expired
|
||||
func (s *InMemoryService) Get(key string) (interface{}, bool) {
|
||||
val, found := s.cache.Get(key)
|
||||
return val, found
|
||||
}
|
||||
|
||||
// Delete removes an item from the cache
|
||||
func (s *InMemoryService) Delete(key string) {
|
||||
s.cache.Delete(key)
|
||||
}
|
||||
|
||||
// Flush clears all items from the cache
|
||||
func (s *InMemoryService) Flush() {
|
||||
s.cache.Flush()
|
||||
}
|
||||
|
||||
// ItemCount returns the number of items currently in the cache
|
||||
func (s *InMemoryService) ItemCount() int {
|
||||
return s.cache.ItemCount()
|
||||
}
|
||||
135
pkg/cache/cache_test.go
vendored
Normal file
135
pkg/cache/cache_test.go
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestInMemoryService_SetGet(t *testing.T) {
|
||||
svc := NewInMemoryService(1*time.Hour, 1*time.Hour)
|
||||
|
||||
// Test Set and Get
|
||||
svc.Set("key1", "value1", 1*time.Hour)
|
||||
val, ok := svc.Get("key1")
|
||||
if !ok {
|
||||
t.Fatal("Expected to find key1 in cache")
|
||||
}
|
||||
if val != "value1" {
|
||||
t.Fatalf("Expected 'value1', got '%v'", val)
|
||||
}
|
||||
|
||||
// Test Get non-existent key
|
||||
_, ok = svc.Get("nonexistent")
|
||||
if ok {
|
||||
t.Fatal("Expected not to find nonexistent key")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInMemoryService_Delete(t *testing.T) {
|
||||
svc := NewInMemoryService(1*time.Hour, 1*time.Hour)
|
||||
|
||||
svc.Set("key1", "value1", 1*time.Hour)
|
||||
_, ok := svc.Get("key1")
|
||||
if !ok {
|
||||
t.Fatal("Expected to find key1 before delete")
|
||||
}
|
||||
|
||||
svc.Delete("key1")
|
||||
_, ok = svc.Get("key1")
|
||||
if ok {
|
||||
t.Fatal("Expected not to find key1 after delete")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInMemoryService_Flush(t *testing.T) {
|
||||
svc := NewInMemoryService(1*time.Hour, 1*time.Hour)
|
||||
|
||||
svc.Set("key1", "value1", 1*time.Hour)
|
||||
svc.Set("key2", "value2", 1*time.Hour)
|
||||
|
||||
if svc.ItemCount() != 2 {
|
||||
t.Fatalf("Expected 2 items, got %d", svc.ItemCount())
|
||||
}
|
||||
|
||||
svc.Flush()
|
||||
|
||||
if svc.ItemCount() != 0 {
|
||||
t.Fatalf("Expected 0 items after flush, got %d", svc.ItemCount())
|
||||
}
|
||||
|
||||
_, ok := svc.Get("key1")
|
||||
if ok {
|
||||
t.Fatal("Expected key1 to be flushed")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInMemoryService_ItemCount(t *testing.T) {
|
||||
svc := NewInMemoryService(1*time.Hour, 1*time.Hour)
|
||||
|
||||
if svc.ItemCount() != 0 {
|
||||
t.Fatalf("Expected 0 items initially, got %d", svc.ItemCount())
|
||||
}
|
||||
|
||||
svc.Set("key1", "value1", 1*time.Hour)
|
||||
if svc.ItemCount() != 1 {
|
||||
t.Fatalf("Expected 1 item, got %d", svc.ItemCount())
|
||||
}
|
||||
|
||||
svc.Set("key2", "value2", 1*time.Hour)
|
||||
if svc.ItemCount() != 2 {
|
||||
t.Fatalf("Expected 2 items, got %d", svc.ItemCount())
|
||||
}
|
||||
|
||||
svc.Delete("key1")
|
||||
if svc.ItemCount() != 1 {
|
||||
t.Fatalf("Expected 1 item after delete, got %d", svc.ItemCount())
|
||||
}
|
||||
}
|
||||
|
||||
func TestInMemoryService_TTLExpiration(t *testing.T) {
|
||||
// Use a very short TTL for testing
|
||||
svc := NewInMemoryService(100*time.Millisecond, 50*time.Millisecond)
|
||||
|
||||
svc.Set("key1", "value1", 50*time.Millisecond)
|
||||
|
||||
// Should be present immediately
|
||||
val, ok := svc.Get("key1")
|
||||
if !ok {
|
||||
t.Fatal("Expected to find key1 immediately after set")
|
||||
}
|
||||
if val != "value1" {
|
||||
t.Fatalf("Expected 'value1', got '%v'", val)
|
||||
}
|
||||
|
||||
// Wait for expiration
|
||||
time.Sleep(100 * time.Millisecond)
|
||||
|
||||
// Should be expired now
|
||||
_, ok = svc.Get("key1")
|
||||
if ok {
|
||||
t.Fatal("Expected key1 to be expired after TTL")
|
||||
}
|
||||
}
|
||||
|
||||
func TestInMemoryService_DifferentTypes(t *testing.T) {
|
||||
svc := NewInMemoryService(1*time.Hour, 1*time.Hour)
|
||||
|
||||
// Test with different types
|
||||
svc.Set("string", "hello", 1*time.Hour)
|
||||
svc.Set("int", 42, 1*time.Hour)
|
||||
svc.Set("slice", []string{"a", "b"}, 1*time.Hour)
|
||||
|
||||
if svc.ItemCount() != 3 {
|
||||
t.Fatalf("Expected 3 items, got %d", svc.ItemCount())
|
||||
}
|
||||
|
||||
val, ok := svc.Get("string")
|
||||
if !ok || val != "hello" {
|
||||
t.Fatal("String value mismatch")
|
||||
}
|
||||
|
||||
val, ok = svc.Get("int")
|
||||
if !ok || val != 42 {
|
||||
t.Fatal("Int value mismatch")
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user