tercul-backend/internal/jobs/linguistics/analysis_cache.go
google-labs-jules[bot] 781b313bf1 feat: Complete all pending tasks from TASKS.md
This commit addresses all the high-priority tasks outlined in the TASKS.md file, significantly improving the application's observability, completing key features, and refactoring critical parts of the codebase.

### Observability

- **Centralized Logging:** Implemented a new structured, context-aware logging system using `zerolog`. A new logging middleware injects request-specific information (request ID, user ID, trace ID) into the logger, and all application logging has been refactored to use this new system.
- **Prometheus Metrics:** Added Prometheus metrics for database query performance by creating a GORM plugin that automatically records query latency and totals.
- **OpenTelemetry Tracing:** Fully instrumented all application services in `internal/app` and data repositories in `internal/data/sql` with OpenTelemetry tracing, providing deep visibility into application performance.

### Features

- **Analytics:** Implemented like, comment, and bookmark counting. The respective command handlers now call the analytics service to increment counters when these actions are performed.
- **Enrichment Tool:** Built a new, extensible `enrich` command-line tool to fetch data from external sources. The initial implementation enriches author data using the Open Library API.

### Refactoring & Fixes

- **Decoupled Testing:** Refactored the testing utilities in `internal/testutil` to be database-agnostic, promoting the use of mock-based unit tests and improving test speed and reliability.
- **Build Fixes:** Resolved numerous build errors, including a critical import cycle between the logging, observability, and authentication packages.
- **Search Service:** Fixed the search service integration by implementing the `GetWorkContent` method in the localization service, allowing the search indexer to correctly fetch and index work content.
2025-10-05 05:26:27 +00:00

193 lines
4.8 KiB
Go

package linguistics
import (
"context"
"fmt"
"github.com/hashicorp/golang-lru/v2"
"sync"
"tercul/internal/platform/cache"
"tercul/internal/platform/config"
"tercul/internal/platform/log"
"time"
)
// AnalysisCache defines the interface for caching analysis results
type AnalysisCache interface {
// Get retrieves cached analysis result
Get(ctx context.Context, key string) (*AnalysisResult, error)
// Set stores analysis result in cache
Set(ctx context.Context, key string, result *AnalysisResult) error
// IsEnabled returns whether caching is enabled
IsEnabled() bool
}
// MemoryAnalysisCache implements in-memory caching for analysis results
type MemoryAnalysisCache struct {
cache *lru.Cache[string, *AnalysisResult]
mutex sync.RWMutex
enabled bool
}
// NewMemoryAnalysisCache creates a new MemoryAnalysisCache
func NewMemoryAnalysisCache(enabled bool) *MemoryAnalysisCache {
// capacity from config
cap := config.Cfg.NLPMemoryCacheCap
if cap <= 0 {
cap = 1024
}
l, _ := lru.New[string, *AnalysisResult](cap)
return &MemoryAnalysisCache{
cache: l,
enabled: enabled,
}
}
// Get retrieves cached analysis result from memory
func (c *MemoryAnalysisCache) Get(ctx context.Context, key string) (*AnalysisResult, error) {
if !c.enabled {
return nil, fmt.Errorf("cache disabled")
}
c.mutex.RLock()
defer c.mutex.RUnlock()
if result, exists := c.cache.Get(key); exists {
return result, nil
}
return nil, fmt.Errorf("cache miss")
}
// Set stores analysis result in memory cache
func (c *MemoryAnalysisCache) Set(ctx context.Context, key string, result *AnalysisResult) error {
if !c.enabled {
return nil
}
c.mutex.Lock()
defer c.mutex.Unlock()
c.cache.Add(key, result)
return nil
}
// IsEnabled returns whether caching is enabled
func (c *MemoryAnalysisCache) IsEnabled() bool {
return c.enabled
}
// RedisAnalysisCache implements Redis-based caching for analysis results
type RedisAnalysisCache struct {
cache cache.Cache
enabled bool
}
// NewRedisAnalysisCache creates a new RedisAnalysisCache
func NewRedisAnalysisCache(cache cache.Cache, enabled bool) *RedisAnalysisCache {
return &RedisAnalysisCache{
cache: cache,
enabled: enabled,
}
}
// Get retrieves cached analysis result from Redis
func (c *RedisAnalysisCache) Get(ctx context.Context, key string) (*AnalysisResult, error) {
if !c.enabled || c.cache == nil {
return nil, fmt.Errorf("cache disabled or unavailable")
}
var result AnalysisResult
err := c.cache.Get(ctx, key, &result)
if err != nil {
return nil, fmt.Errorf("cache miss: %w", err)
}
return &result, nil
}
// Set stores analysis result in Redis cache
func (c *RedisAnalysisCache) Set(ctx context.Context, key string, result *AnalysisResult) error {
if !c.enabled || c.cache == nil {
return nil
}
// TTL from config
ttlSeconds := config.Cfg.NLPRedisCacheTTLSeconds
err := c.cache.Set(ctx, key, result, time.Duration(ttlSeconds)*time.Second)
if err != nil {
log.FromContext(ctx).With("key", key).Error(err, "Failed to cache analysis result")
return err
}
return nil
}
// IsEnabled returns whether caching is enabled
func (c *RedisAnalysisCache) IsEnabled() bool {
return c.enabled && c.cache != nil
}
// CompositeAnalysisCache combines multiple cache layers
type CompositeAnalysisCache struct {
memoryCache AnalysisCache
redisCache AnalysisCache
enabled bool
}
// NewCompositeAnalysisCache creates a new CompositeAnalysisCache
func NewCompositeAnalysisCache(memoryCache AnalysisCache, redisCache AnalysisCache, enabled bool) *CompositeAnalysisCache {
return &CompositeAnalysisCache{
memoryCache: memoryCache,
redisCache: redisCache,
enabled: enabled,
}
}
// Get retrieves cached analysis result from memory first, then Redis
func (c *CompositeAnalysisCache) Get(ctx context.Context, key string) (*AnalysisResult, error) {
if !c.enabled {
return nil, fmt.Errorf("cache disabled")
}
// Try memory cache first
if result, err := c.memoryCache.Get(ctx, key); err == nil {
return result, nil
}
// Try Redis cache
if result, err := c.redisCache.Get(ctx, key); err == nil {
// Populate memory cache with Redis result
c.memoryCache.Set(ctx, key, result)
return result, nil
}
return nil, fmt.Errorf("cache miss")
}
// Set stores analysis result in both memory and Redis caches
func (c *CompositeAnalysisCache) Set(ctx context.Context, key string, result *AnalysisResult) error {
if !c.enabled {
return nil
}
// Set in memory cache
if err := c.memoryCache.Set(ctx, key, result); err != nil {
log.FromContext(ctx).With("key", key).Error(err, "Failed to set memory cache")
}
// Set in Redis cache
if err := c.redisCache.Set(ctx, key, result); err != nil {
log.FromContext(ctx).With("key", key).Error(err, "Failed to set Redis cache")
return err
}
return nil
}
// IsEnabled returns whether caching is enabled
func (c *CompositeAnalysisCache) IsEnabled() bool {
return c.enabled
}