tercul-backend/internal/jobs/linguistics/analysis_cache.go
Damir Mukimov d50722dad5
Some checks failed
Test / Integration Tests (push) Successful in 4s
Build / Build Binary (push) Failing after 2m9s
Docker Build / Build Docker Image (push) Failing after 2m32s
Test / Unit Tests (push) Failing after 3m12s
Lint / Go Lint (push) Failing after 1m0s
Refactor ID handling to use UUIDs across the application
- Updated database models and repositories to replace uint IDs with UUIDs.
- Modified test fixtures to generate and use UUIDs for authors, translations, users, and works.
- Adjusted mock implementations to align with the new UUID structure.
- Ensured all relevant functions and methods are updated to handle UUIDs correctly.
- Added necessary imports for UUID handling in various files.
2025-12-27 00:33:34 +01:00

198 lines
5.1 KiB
Go

package linguistics
import (
"context"
"fmt"
"github.com/hashicorp/golang-lru/v2"
"sync"
"tercul/internal/platform/cache"
"tercul/internal/platform/config"
"tercul/internal/platform/log"
"time"
)
// AnalysisCache defines the interface for caching analysis results
type AnalysisCache interface {
// Get retrieves cached analysis result
Get(ctx context.Context, key string) (*AnalysisResult, error)
// Set stores analysis result in cache
Set(ctx context.Context, key string, result *AnalysisResult) error
// IsEnabled returns whether caching is enabled
IsEnabled() bool
}
// MemoryAnalysisCache implements in-memory caching for analysis results
type MemoryAnalysisCache struct {
cache *lru.Cache[string, *AnalysisResult]
mutex sync.RWMutex
enabled bool
}
// NewMemoryAnalysisCache creates a new MemoryAnalysisCache
func NewMemoryAnalysisCache(cfg *config.Config, enabled bool) *MemoryAnalysisCache {
cap := cfg.NLPMemoryCacheCap
if cap <= 0 {
cap = 1024
}
l, _ := lru.New[string, *AnalysisResult](cap)
return &MemoryAnalysisCache{
cache: l,
enabled: enabled,
}
}
// Get retrieves cached analysis result from memory
func (c *MemoryAnalysisCache) Get(ctx context.Context, key string) (*AnalysisResult, error) {
if !c.enabled {
return nil, fmt.Errorf("cache disabled")
}
c.mutex.RLock()
defer c.mutex.RUnlock()
if result, exists := c.cache.Get(key); exists {
return result, nil
}
return nil, fmt.Errorf("cache miss")
}
// Set stores analysis result in memory cache
func (c *MemoryAnalysisCache) Set(ctx context.Context, key string, result *AnalysisResult) error {
if !c.enabled {
return nil
}
c.mutex.Lock()
defer c.mutex.Unlock()
c.cache.Add(key, result)
return nil
}
// IsEnabled returns whether caching is enabled
func (c *MemoryAnalysisCache) IsEnabled() bool {
return c.enabled
}
// RedisAnalysisCache implements Redis-based caching for analysis results
type RedisAnalysisCache struct {
cache cache.Cache
enabled bool
ttl time.Duration
}
// NewRedisAnalysisCache creates a new RedisAnalysisCache
func NewRedisAnalysisCache(cfg *config.Config, cache cache.Cache, enabled bool) *RedisAnalysisCache {
ttlSeconds := cfg.NLPRedisCacheTTLSeconds
if ttlSeconds <= 0 {
ttlSeconds = 3600 // default 1 hour
}
return &RedisAnalysisCache{
cache: cache,
enabled: enabled,
ttl: time.Duration(ttlSeconds) * time.Second,
}
}
// Get retrieves cached analysis result from Redis
func (c *RedisAnalysisCache) Get(ctx context.Context, key string) (*AnalysisResult, error) {
if !c.enabled || c.cache == nil {
return nil, fmt.Errorf("cache disabled or unavailable")
}
var result AnalysisResult
err := c.cache.Get(ctx, key, &result)
if err != nil {
return nil, fmt.Errorf("cache miss: %w", err)
}
return &result, nil
}
// Set stores analysis result in Redis cache
func (c *RedisAnalysisCache) Set(ctx context.Context, key string, result *AnalysisResult) error {
if !c.enabled || c.cache == nil {
return nil
}
err := c.cache.Set(ctx, key, result, c.ttl)
if err != nil {
log.FromContext(ctx).With("key", key).Error(err, "Failed to cache analysis result")
return err
}
return nil
}
// IsEnabled returns whether caching is enabled
func (c *RedisAnalysisCache) IsEnabled() bool {
return c.enabled && c.cache != nil
}
// CompositeAnalysisCache combines multiple cache layers
type CompositeAnalysisCache struct {
memoryCache AnalysisCache
redisCache AnalysisCache
enabled bool
}
// NewCompositeAnalysisCache creates a new CompositeAnalysisCache
func NewCompositeAnalysisCache(memoryCache AnalysisCache, redisCache AnalysisCache, enabled bool) *CompositeAnalysisCache {
return &CompositeAnalysisCache{
memoryCache: memoryCache,
redisCache: redisCache,
enabled: enabled,
}
}
// Get retrieves cached analysis result from memory first, then Redis
func (c *CompositeAnalysisCache) Get(ctx context.Context, key string) (*AnalysisResult, error) {
if !c.enabled {
return nil, fmt.Errorf("cache disabled")
}
// Try memory cache first
if result, err := c.memoryCache.Get(ctx, key); err == nil {
return result, nil
}
// Try Redis cache
if result, err := c.redisCache.Get(ctx, key); err == nil {
// Populate memory cache with Redis result
if err := c.memoryCache.Set(ctx, key, result); err != nil {
log.FromContext(ctx).Warn(fmt.Sprintf("Failed to populate memory cache from Redis for key %s: %v", key, err))
}
return result, nil
}
return nil, fmt.Errorf("cache miss")
}
// Set stores analysis result in both memory and Redis caches
func (c *CompositeAnalysisCache) Set(ctx context.Context, key string, result *AnalysisResult) error {
if !c.enabled {
return nil
}
// Set in memory cache
if err := c.memoryCache.Set(ctx, key, result); err != nil {
log.FromContext(ctx).With("key", key).Error(err, "Failed to set memory cache")
}
// Set in Redis cache
if err := c.redisCache.Set(ctx, key, result); err != nil {
log.FromContext(ctx).With("key", key).Error(err, "Failed to set Redis cache")
return err
}
return nil
}
// IsEnabled returns whether caching is enabled
func (c *CompositeAnalysisCache) IsEnabled() bool {
return c.enabled
}