This commit is contained in:
Damir Mukimov 2025-09-01 00:43:59 +02:00
parent c4dad9e394
commit fa336cacf3
No known key found for this signature in database
GPG Key ID: 42996CC7C73BC750
121 changed files with 2810 additions and 3398 deletions

8
.gitignore vendored
View File

@ -109,6 +109,8 @@ Temporary Items
*.db *.db
*.sqlite *.sqlite
*.sqlite3 *.sqlite3
*.db-shm
*.db-wal
tercul_data.db tercul_data.db
tercul_export.json tercul_export.json
extracted_data.json extracted_data.json
@ -118,6 +120,7 @@ data_analysis_report.json
tercul_data.sql tercul_data.sql
tercul_schema.sql tercul_schema.sql
current_schema.sql current_schema.sql
full_database_backup.sql
# Migration data # Migration data
migration_data/ migration_data/
@ -173,3 +176,8 @@ yarn-error.log*
# Generated files # Generated files
graph/generated.go graph/generated.go
graph/model/models_gen.go graph/model/models_gen.go
# Additional files that should be ignored
refactor.md
report.md
requirements.txt

1
.tool-versions Normal file
View File

@ -0,0 +1 @@
golang 1.25.0

153
TODO.md
View File

@ -2,131 +2,53 @@
--- ---
## [x] Performance Improvements ## [ ] Performance Improvements
- [x] **COMPLETED: Add pagination to all repository list operations** (High, 2d)
- [x] /works: Add limit/offset support to repository and resolver
- [x] /translations: Add limit/offset support to repository and resolver
- [x] /authors: Add limit/offset support to repository and resolver
- [x] /users: Add limit/offset support to repository and resolver
- [x] /collections: Add limit/offset support to repository and resolver
- [x] /tags: Add limit/offset support to repository and resolver
- [x] /categories: Add limit/offset support to repository and resolver
- [x] /comments: Add limit/offset support to repository and resolver
- [x] /search: Add limit/offset support to repository and resolver
- [x] Validate all endpoints for correct pagination and total count
- [x] Add unit tests for paginated list operations
- [x] Document pagination parameters in API docs
- [x] **COMPLETED: Refactor raw SQL queries to use GORM structured methods** (High, 1d)
- [x] Identify all usages of raw SQL queries in repositories and sync jobs
- [x] Refactor syncEntities in syncjob/entities_sync.go to use GORM methods
- [x] Refactor any string-concatenated queries to parameterized GORM queries
- [x] Validate correctness and performance of refactored queries
- [x] Add unit tests for refactored query logic
- [x] Document query changes and migration steps
- [ ] Implement batching for Weaviate operations (Medium, 2d) - [ ] Implement batching for Weaviate operations (Medium, 2d)
- [x] **COMPLETED: Optimize linguistic analysis algorithms** (Medium, 2d)
- [x] Introduced clean NLP ports/adapters (`LanguageDetector`, `SentimentProvider`, `KeywordProvider`)
- [x] Integrated lingua-go (language detection) and GoVADER (sentiment) behind adapters
- [x] Added TF-IDF-based keyword provider (lightweight, state-free)
- [x] Bounded in-memory cache via LRU with config-driven capacity
- [x] Switched text cache keys to SHA-256 content hashes
- [x] Concurrent analysis: provider-aware and context-cancellable
- [x] Config toggles for providers and cache TTL
- [x] **COMPLETED: Add database indexes for frequently queried fields** (Medium, 1d)
- [x] Foreign key indexes for all relationships
- [x] Unique indexes for constraint enforcement
- [x] Timestamp indexes for sorting and filtering
- [x] Composite indexes for complex queries
- [x] Linguistic analysis indexes for performance
- [x] **COMPLETED: Implement Redis caching for hot data** (Medium, 2d)
## [x] Security Enhancements ## [ ] Security Enhancements
- [x] **COMPLETED: Implement password hashing in User model** (Critical, 1d)
- [x] bcrypt password hashing in BeforeSave hook
- [x] CheckPassword method for password verification
- [x] Automatic password hashing on model save
- [x] **COMPLETED: Move hardcoded credentials to environment variables/config** (Critical, 1d)
- [x] Fixed internal/cmd/enrich/main.go to use config package
- [x] Fixed internal/testutil/testutil.go to use config package
- [x] All database connections now use environment variables
- [ ] Add comprehensive input validation for all GraphQL mutations (High, 2d) - [ ] Add comprehensive input validation for all GraphQL mutations (High, 2d)
- [x] **COMPLETED: Implement rate limiting for API and background jobs** (High, 2d)
- [x] Rate limiting middleware implemented
- [x] Configuration for rate limits in config package
- [x] **COMPLETED: Replace raw SQL with safe query builders to prevent SQL injection** (Critical, 1d)
- [x] All repositories use GORM structured methods
- [x] No raw SQL queries in production code
## [ ] Code Quality & Architecture ## [ ] Code Quality & Architecture
- [x] Remove duplicate GraphQL folder and legacy server helper; keep single GraphQL layer under `graph/` for now
- [x] **REFACTORED: Split linguistics/analyzer.go into focused components** (Completed)
- [x] **COMPLETED: Clean NLP infrastructure and factory wiring**
- [x] Ports for NLP capabilities with SRP/DRY boundaries
- [x] Adapters for lingua-go and GoVADER with fallbacks
- [x] Factory respects config toggles and wires providers
- [x] Repository no longer leaks GORM into services; added methods for fetching work and analysis data
- [x] Created `linguistics/text_analyzer.go` - Pure text analysis logic
- [x] Created `linguistics/analysis_cache.go` - Caching logic with multiple strategies
- [x] Created `linguistics/analysis_repository.go` - Database operations
- [x] Created `linguistics/work_analysis_service.go` - Work-specific analysis coordination
- [x] Created `linguistics/types.go` - Shared data structures
- [x] Created `linguistics/text_utils.go` - Text processing utilities
- [x] Created `linguistics/factory.go` - Component factory with dependency injection
- [x] **REFACTORED: Split main.go into focused components** (Completed)
- [x] Created `internal/app/application_builder.go` - Application initialization
- [x] Created `internal/app/server_factory.go` - Server creation and configuration
- [x] Refactored `main.go` to use dependency injection and builders
- [x] **REFACTORED: Standardize repository implementation** (Completed)
- [x] Improved BaseRepository with comprehensive error handling, validation, logging, and transaction support
- [x] Removed GenericRepository wrapper (unnecessary duplication)
- [x] Updated CachedRepository to use BaseRepository interface
- [x] Refactored WorkRepository and UserRepository to use BaseRepository pattern
- [x] Updated WorkService to use context in all repository calls
- [x] Fixed GraphQL resolvers to use context for WorkRepository calls
- [x] **REFACTORED: All repositories completed!** (Author, Tag, Category, Translation, Comment, Like, Bookmark, Collection, Book, Publisher, Country, Place, City, Source, Edition, UserProfile, UserSession, EmailVerification, PasswordReset, Contribution, Copyright, CopyrightClaim, Monetization, Edge)
- [x] **COMPLETED: Updated mock repositories for testing**
- [x] **COMPLETED: Updated services to use context in repository calls**
- [x] **COMPLETED: Updated GraphQL resolvers to use context and handle pagination**
- [x] **COMPLETED: Fixed linguistics package model field mismatches**
- [x] **COMPLETED: Fixed application builder CopyrightRepository initialization**
- [x] **COMPLETED: Fixed server factory configuration and interface issues**
- [x] **COMPLETED: Removed all legacy code and interfaces**
- [x] **COMPLETED: Project builds successfully!**
- [x] **COMPLETED: Add a service layer for business logic and validation** (High, 2d)
- [x] Comprehensive validation in all service methods
- [x] Business logic separation from repositories
- [x] Input validation for all service operations
- [x] Refactor duplicate code in sync jobs (Medium, 1d)
- [x] **COMPLETED: Improve error handling with custom error types and propagation** (High, 2d)
- [x] Custom error types defined in BaseRepository
- [x] Error wrapping and propagation throughout codebase
- [x] Standardized error handling patterns
- [ ] Expand Weaviate client to support all models (Medium, 2d) - [ ] Expand Weaviate client to support all models (Medium, 2d)
- [ ] Add code documentation and API docs (Medium, 2d) - [ ] Add code documentation and API docs (Medium, 2d)
## [ ] Architecture Refactor (DDD-lite)
- [ ] Create skeleton packages: `cmd/`, `internal/platform/`, `internal/domain/`, `internal/app/`, `internal/data/`, `internal/adapters/graphql/`, `internal/jobs/`
- [x] Move infra to `internal/platform/*` (`config`, `db`, `cache`, `auth`, `http`, `log`, `search`)
- [ ] Wire DI in `cmd/api/main.go` and expose an `Application` facade to adapters
- [ ] Unify GraphQL under `internal/adapters/graphql` and update `gqlgen.yml`; move `schema.graphqls` and resolvers
- [ ] Resolvers call application services only; add dataloaders per aggregate
- [ ] Introduce Unit-of-Work: `platform/db.WithTx(ctx, func(ctx) error)` and repo factory for `*sql.DB` / `*sql.Tx`
- [ ] Split write vs read paths for `work` (commands.go, queries.go); make read models cacheable
- [ ] Replace bespoke cached repositories with decorators in `internal/data/cache` (reads only; deterministic invalidation)
- [ ] Restructure `models/*` into domain aggregates with constructors and invariants
- [ ] Adopt migrations tool (goose/atlas/migrate); move SQL to `internal/data/migrations`; delete `migrations.go`
- [ ] Observability: centralize logging; add Prometheus metrics and OpenTelemetry tracing; request IDs
- [ ] Config: replace ad-hoc config with env parsing + validation (e.g., koanf/envconfig); no globals
- [ ] Security: move JWT/middleware to `internal/platform/auth`; add authz policy helpers (e.g., `CanEditWork`)
- [ ] Search: move Weaviate client/schema to `internal/platform/search`, optional domain interface
- [ ] Background jobs: move to `cmd/worker` and `internal/jobs/*`; ensure idempotency and lease
- [ ] Python ops: move scripts to `/ops/migration` and `/ops/analysis`; keep outputs under `/ops/migration/outputs/`
- [ ] Cleanup: delete dead packages (`store`, duplicate `repositories`); consolidate to `internal/data/sql`
- [ ] CI: add `make lint test test-integration` and integration tests with Docker compose
## [ ] Testing ## [ ] Testing
- [ ] Add unit tests for all models, repositories, and services (High, 3d) - [ ] Add unit tests for all models, repositories, and services (High, 3d)
- [ ] Add integration tests for GraphQL API and background jobs (High, 3d) - [ ] Add integration tests for GraphQL API and background jobs (High, 3d)
- [ ] Add performance benchmarks for critical paths (Medium, 2d) - [ ] Add performance benchmarks for critical paths (Medium, 2d)
- [x] Added unit tests for linguistics adapters (lingua-go, GoVADER) and utilities
- [ ] Add benchmarks for text analysis (sequential vs concurrent) and cache hit/miss rates - [ ] Add benchmarks for text analysis (sequential vs concurrent) and cache hit/miss rates
## [x] Monitoring & Logging ## [ ] Monitoring & Logging
- [x] **COMPLETED: Integrate a structured logging framework** (Medium, 1d)
- [x] Structured logging implemented throughout codebase
- [x] Performance timing and debug logging in repositories
- [x] Error logging with context and structured fields
- [ ] Add monitoring for background jobs and API endpoints (Medium, 2d) - [ ] Add monitoring for background jobs and API endpoints (Medium, 2d)
- [ ] Add metrics for linguistics: analysis duration, cache hit/miss, provider usage - [ ] Add metrics for linguistics: analysis duration, cache hit/miss, provider usage
--- ## [ ] Next Objective Proposal
## Next Objective Proposal
- [ ] Stabilize non-linguistics tests and interfaces (High, 2d) - [ ] Stabilize non-linguistics tests and interfaces (High, 2d)
- [ ] Fix `graph` mocks to accept context in service interfaces - [ ] Fix `graph` mocks to accept context in service interfaces
@ -139,31 +61,6 @@
- [ ] Document NLP provider toggles and defaults in README/config docs - [ ] Document NLP provider toggles and defaults in README/config docs
- [ ] Describe SRP/DRY design and extension points for new providers - [ ] Describe SRP/DRY design and extension points for new providers
## [x] Security & Auth
- [x] **COMPLETED: Implement JWT authentication and role-based authorization** (High, 2d)
- [x] JWT token generation and validation with proper error handling
- [x] Role-based authorization with hierarchy (reader < contributor < reviewer < editor < admin)
- [x] Authentication middleware for GraphQL and HTTP with context validation
- [x] Login and registration mutations with comprehensive input validation
- [x] Password hashing with bcrypt (already implemented in User model)
- [x] Environment variable configuration for JWT with secure defaults
- [x] Comprehensive authentication service following SRP and clean code principles
- [x] Structured logging with proper error context and performance timing
- [x] Input sanitization and validation using govalidator
- [x] Context validation and proper error propagation
- [x] Integration with existing rate limiting system
- [x] GraphQL schema alignment with Go models
- [x] Comprehensive test coverage for authentication components
- [x] Production-ready error handling and security practices
- [x] **COMPLETED: Add rate limiting middleware** (High, 1d)
- [x] Rate limiting middleware implemented and tested
- [x] Configuration-driven rate limits
- [x] **COMPLETED: Use environment variables for all sensitive config** (Critical, 1d)
- [x] All database credentials use environment variables
- [x] Redis configuration uses environment variables
- [x] Centralized configuration management
--- ---
> TODO items include context, priority, and estimated effort. Update this list after each milestone. > TODO items include context, priority, and estimated effort. Update this list after each milestone.

6
go.mod
View File

@ -17,9 +17,10 @@ require (
github.com/vektah/gqlparser/v2 v2.5.26 github.com/vektah/gqlparser/v2 v2.5.26
github.com/weaviate/weaviate v1.30.2 github.com/weaviate/weaviate v1.30.2
github.com/weaviate/weaviate-go-client/v5 v5.1.0 github.com/weaviate/weaviate-go-client/v5 v5.1.0
go.uber.org/zap v1.27.0
golang.org/x/crypto v0.37.0 golang.org/x/crypto v0.37.0
gorm.io/driver/postgres v1.5.11 gorm.io/driver/postgres v1.5.11
gorm.io/gorm v1.26.0 gorm.io/gorm v1.30.0
) )
require ( require (
@ -49,6 +50,7 @@ require (
github.com/jinzhu/now v1.1.5 // indirect github.com/jinzhu/now v1.1.5 // indirect
github.com/josharian/intern v1.0.0 // indirect github.com/josharian/intern v1.0.0 // indirect
github.com/mailru/easyjson v0.7.7 // indirect github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-sqlite3 v1.14.22 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/oklog/ulid v1.3.1 // indirect github.com/oklog/ulid v1.3.1 // indirect
github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect
@ -63,6 +65,7 @@ require (
github.com/urfave/cli/v2 v2.27.6 // indirect github.com/urfave/cli/v2 v2.27.6 // indirect
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
go.mongodb.org/mongo-driver v1.14.0 // indirect go.mongodb.org/mongo-driver v1.14.0 // indirect
go.uber.org/multierr v1.10.0 // indirect
golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa // indirect golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa // indirect
golang.org/x/mod v0.24.0 // indirect golang.org/x/mod v0.24.0 // indirect
golang.org/x/net v0.39.0 // indirect golang.org/x/net v0.39.0 // indirect
@ -78,4 +81,5 @@ require (
google.golang.org/protobuf v1.36.6 // indirect google.golang.org/protobuf v1.36.6 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect
gorm.io/driver/sqlite v1.6.0 // indirect
) )

10
go.sum
View File

@ -163,6 +163,8 @@ github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE= github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE=
github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0=
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
@ -252,6 +254,10 @@ go.opentelemetry.io/otel/trace v1.33.0 h1:cCJuF7LRjUFso9LPnEAHJDB2pqzp+hbO8eu1qq
go.opentelemetry.io/otel/trace v1.33.0/go.mod h1:uIcdVUZMpTAmz0tI1z04GoVSezK37CbGV4fr1f2nBck= go.opentelemetry.io/otel/trace v1.33.0/go.mod h1:uIcdVUZMpTAmz0tI1z04GoVSezK37CbGV4fr1f2nBck=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
go.uber.org/multierr v1.10.0 h1:S0h4aNzvfcFsC3dRF1jLoaov7oRaKqRGC/pUEJ2yvPQ=
go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
@ -346,6 +352,10 @@ gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gorm.io/driver/postgres v1.5.11 h1:ubBVAfbKEUld/twyKZ0IYn9rSQh448EdelLYk9Mv314= gorm.io/driver/postgres v1.5.11 h1:ubBVAfbKEUld/twyKZ0IYn9rSQh448EdelLYk9Mv314=
gorm.io/driver/postgres v1.5.11/go.mod h1:DX3GReXH+3FPWGrrgffdvCk3DQ1dwDPdmbenSkweRGI= gorm.io/driver/postgres v1.5.11/go.mod h1:DX3GReXH+3FPWGrrgffdvCk3DQ1dwDPdmbenSkweRGI=
gorm.io/driver/sqlite v1.6.0 h1:WHRRrIiulaPiPFmDcod6prc4l2VGVWHz80KspNsxSfQ=
gorm.io/driver/sqlite v1.6.0/go.mod h1:AO9V1qIQddBESngQUKWL9yoH93HIeA1X6V633rBwyT8=
gorm.io/gorm v1.26.0 h1:9lqQVPG5aNNS6AyHdRiwScAVnXHg/L/Srzx55G5fOgs= gorm.io/gorm v1.26.0 h1:9lqQVPG5aNNS6AyHdRiwScAVnXHg/L/Srzx55G5fOgs=
gorm.io/gorm v1.26.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE= gorm.io/gorm v1.26.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE=
gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs=
gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=

View File

@ -2,33 +2,20 @@ package graph_test
import ( import (
"bytes" "bytes"
"context"
"encoding/json" "encoding/json"
"fmt" "fmt"
"net/http" "net/http"
"net/http/httptest" "net/http/httptest"
"testing" "testing"
"context"
"tercul/graph" "tercul/graph"
"tercul/internal/testutil" "tercul/internal/testutil"
"tercul/models"
"tercul/services"
"github.com/99designs/gqlgen/graphql/handler" "github.com/99designs/gqlgen/graphql/handler"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
) )
// MockLocalizationService provides mock localization for tests
type MockLocalizationService struct{}
func (m *MockLocalizationService) GetWorkContent(ctx context.Context, workID uint, preferredLanguage string) (string, error) {
return "Test content", nil
}
func (m *MockLocalizationService) GetAuthorBiography(ctx context.Context, authorID uint, preferredLanguage string) (string, error) {
return "Test biography", nil
}
// GraphQLRequest represents a GraphQL request // GraphQLRequest represents a GraphQL request
type GraphQLRequest struct { type GraphQLRequest struct {
Query string `json:"query"` Query string `json:"query"`
@ -44,25 +31,18 @@ type GraphQLResponse struct {
// GraphQLIntegrationSuite is a test suite for GraphQL integration tests // GraphQLIntegrationSuite is a test suite for GraphQL integration tests
type GraphQLIntegrationSuite struct { type GraphQLIntegrationSuite struct {
testutil.BaseSuite testutil.SimpleTestSuite
server *httptest.Server server *httptest.Server
client *http.Client client *http.Client
workRepo *testutil.UnifiedMockWorkRepository // direct access to mock repo
} }
// SetupSuite sets up the test suite // SetupSuite sets up the test suite
func (s *GraphQLIntegrationSuite) SetupSuite() { func (s *GraphQLIntegrationSuite) SetupSuite() {
// Use in-memory/mock repositories and services // Use the simple test utilities
workRepo := &testutil.UnifiedMockWorkRepository{} s.SimpleTestSuite.SetupSuite()
workService := services.NewWorkService(workRepo, nil)
mockLocalization := &MockLocalizationService{}
resolver := &graph.Resolver{
WorkRepo: workRepo,
WorkService: workService,
Localization: mockLocalization,
}
// Create GraphQL server with the test resolver
resolver := s.GetResolver()
srv := handler.NewDefaultServer(graph.NewExecutableSchema(graph.Config{Resolvers: resolver})) srv := handler.NewDefaultServer(graph.NewExecutableSchema(graph.Config{Resolvers: resolver}))
s.server = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { s.server = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
@ -70,28 +50,16 @@ func (s *GraphQLIntegrationSuite) SetupSuite() {
})) }))
s.client = s.server.Client() s.client = s.server.Client()
s.workRepo = workRepo
} }
// TearDownSuite tears down the test suite // TearDownSuite tears down the test suite
func (s *GraphQLIntegrationSuite) TearDownSuite() { func (s *GraphQLIntegrationSuite) TearDownSuite() {
s.server.Close() s.server.Close()
s.BaseSuite.TearDownSuite()
} }
// SetupTest sets up each test // SetupTest sets up each test
func (s *GraphQLIntegrationSuite) SetupTest() { func (s *GraphQLIntegrationSuite) SetupTest() {
s.workRepo.Reset() s.SimpleTestSuite.SetupTest()
}
// createTestWork creates a test work
func (s *GraphQLIntegrationSuite) createTestWork(title, language string) *models.Work {
work := &models.Work{
Title: title,
}
work.Language = language // set via embedded TranslatableModel
s.workRepo.AddWork(work)
return work
} }
// executeGraphQL executes a GraphQL query // executeGraphQL executes a GraphQL query
@ -134,8 +102,8 @@ func (s *GraphQLIntegrationSuite) executeGraphQL(query string, variables map[str
// TestQueryWork tests the work query // TestQueryWork tests the work query
func (s *GraphQLIntegrationSuite) TestQueryWork() { func (s *GraphQLIntegrationSuite) TestQueryWork() {
// Create a test work // Create a test work with content
work := s.createTestWork("Test Work", "en") work := s.CreateTestWork("Test Work", "en", "Test content for work")
// Define the query // Define the query
query := ` query := `
@ -151,7 +119,7 @@ func (s *GraphQLIntegrationSuite) TestQueryWork() {
// Define the variables // Define the variables
variables := map[string]interface{}{ variables := map[string]interface{}{
"id": work.ID, "id": fmt.Sprintf("%d", work.ID),
} }
// Execute the query // Execute the query
@ -165,16 +133,16 @@ func (s *GraphQLIntegrationSuite) TestQueryWork() {
workData, ok := response.Data["work"].(map[string]interface{}) workData, ok := response.Data["work"].(map[string]interface{})
s.Require().True(ok, "GraphQL response should contain work data") s.Require().True(ok, "GraphQL response should contain work data")
s.Equal("Test Work", workData["name"], "Work name should match") s.Equal("Test Work", workData["name"], "Work name should match")
s.Equal("Test content", workData["content"], "Work content should match via localization") s.Equal("Test content for work", workData["content"], "Work content should match")
s.Equal("en", workData["language"], "Work language should match") s.Equal("en", workData["language"], "Work language should match")
} }
// TestQueryWorks tests the works query // TestQueryWorks tests the works query
func (s *GraphQLIntegrationSuite) TestQueryWorks() { func (s *GraphQLIntegrationSuite) TestQueryWorks() {
// Create test works // Create test works
work1 := s.createTestWork("Test Work 1", "en") work1 := s.CreateTestWork("Test Work 1", "en", "Test content for work 1")
work2 := s.createTestWork("Test Work 2", "en") work2 := s.CreateTestWork("Test Work 2", "en", "Test content for work 2")
work3 := s.createTestWork("Test Work 3", "fr") work3 := s.CreateTestWork("Test Work 3", "fr", "Test content for work 3")
// Define the query // Define the query
query := ` query := `
@ -183,6 +151,7 @@ func (s *GraphQLIntegrationSuite) TestQueryWorks() {
id id
name name
language language
content
} }
} }
` `
@ -197,7 +166,7 @@ func (s *GraphQLIntegrationSuite) TestQueryWorks() {
// Verify the response // Verify the response
worksData, ok := response.Data["works"].([]interface{}) worksData, ok := response.Data["works"].([]interface{})
s.Require().True(ok, "GraphQL response should contain works data") s.Require().True(ok, "GraphQL response should contain works data")
s.Equal(3, len(worksData), "GraphQL response should contain 3 works") s.True(len(worksData) >= 3, "GraphQL response should contain at least 3 works")
// Verify each work // Verify each work
foundWork1 := false foundWork1 := false
@ -208,18 +177,15 @@ func (s *GraphQLIntegrationSuite) TestQueryWorks() {
work, ok := workData.(map[string]interface{}) work, ok := workData.(map[string]interface{})
s.Require().True(ok, "Work data should be a map") s.Require().True(ok, "Work data should be a map")
id := work["id"].(string) // fix: treat id as string name := work["name"].(string)
if id == fmt.Sprintf("%d", work1.ID) { if name == "Test Work 1" {
foundWork1 = true foundWork1 = true
s.Equal("Test Work 1", work["name"], "Work 1 name should match")
s.Equal("en", work["language"], "Work 1 language should match") s.Equal("en", work["language"], "Work 1 language should match")
} else if id == fmt.Sprintf("%d", work2.ID) { } else if name == "Test Work 2" {
foundWork2 = true foundWork2 = true
s.Equal("Test Work 2", work["name"], "Work 2 name should match")
s.Equal("en", work["language"], "Work 2 language should match") s.Equal("en", work["language"], "Work 2 language should match")
} else if id == fmt.Sprintf("%d", work3.ID) { } else if name == "Test Work 3" {
foundWork3 = true foundWork3 = true
s.Equal("Test Work 3", work["name"], "Work 3 name should match")
s.Equal("fr", work["language"], "Work 3 language should match") s.Equal("fr", work["language"], "Work 3 language should match")
} }
} }
@ -229,12 +195,6 @@ func (s *GraphQLIntegrationSuite) TestQueryWorks() {
s.True(foundWork3, "GraphQL response should contain work 3") s.True(foundWork3, "GraphQL response should contain work 3")
} }
func stringToUint(s string) uint {
var id uint
fmt.Sscanf(s, "%d", &id)
return id
}
// TestCreateWork tests the createWork mutation // TestCreateWork tests the createWork mutation
func (s *GraphQLIntegrationSuite) TestCreateWork() { func (s *GraphQLIntegrationSuite) TestCreateWork() {
// Define the mutation // Define the mutation
@ -273,18 +233,20 @@ func (s *GraphQLIntegrationSuite) TestCreateWork() {
s.Equal("en", workData["language"], "Work language should match") s.Equal("en", workData["language"], "Work language should match")
s.Equal("New test content", workData["content"], "Work content should match") s.Equal("New test content", workData["content"], "Work content should match")
// Verify that the work was created in the mock repository // Verify that the work was created in the repository
var found *models.Work // Since we're using the real repository interface, we can query it
for _, w := range s.workRepo.Works { works, err := s.WorkRepo.ListAll(context.Background())
s.Require().NoError(err)
var found bool
for _, w := range works {
if w.Title == "New Test Work" { if w.Title == "New Test Work" {
found = w found = true
s.Equal("en", w.Language, "Work language should be set correctly")
break break
} }
} }
s.Require().NotNil(found) s.True(found, "Work should be created in repository")
s.Equal("New Test Work", found.Title)
s.Equal("en", found.Language)
// Content is not stored on Work model; translations hold content
} }
// TestGraphQLIntegrationSuite runs the test suite // TestGraphQLIntegrationSuite runs the test suite

View File

@ -1,7 +1,7 @@
package graph package graph
import ( import (
"tercul/repositories" repositories2 "tercul/internal/repositories"
"tercul/services" "tercul/services"
) )
@ -10,16 +10,16 @@ import (
// It serves as dependency injection for your app, add any dependencies you require here. // It serves as dependency injection for your app, add any dependencies you require here.
type Resolver struct { type Resolver struct {
WorkRepo repositories.WorkRepository WorkRepo repositories2.WorkRepository
UserRepo repositories.UserRepository UserRepo repositories2.UserRepository
AuthorRepo repositories.AuthorRepository AuthorRepo repositories2.AuthorRepository
TranslationRepo repositories.TranslationRepository TranslationRepo repositories2.TranslationRepository
CommentRepo repositories.CommentRepository CommentRepo repositories2.CommentRepository
LikeRepo repositories.LikeRepository LikeRepo repositories2.LikeRepository
BookmarkRepo repositories.BookmarkRepository BookmarkRepo repositories2.BookmarkRepository
CollectionRepo repositories.CollectionRepository CollectionRepo repositories2.CollectionRepository
TagRepo repositories.TagRepository TagRepo repositories2.TagRepository
CategoryRepo repositories.CategoryRepository CategoryRepo repositories2.CategoryRepository
WorkService services.WorkService WorkService services.WorkService
Localization services.LocalizationService Localization services.LocalizationService
AuthService services.AuthService AuthService services.AuthService

View File

@ -9,8 +9,7 @@ import (
"fmt" "fmt"
"strconv" "strconv"
"tercul/graph/model" "tercul/graph/model"
"tercul/internal/testutil" models2 "tercul/internal/models"
"tercul/models"
"tercul/services" "tercul/services"
) )
@ -81,16 +80,49 @@ func (r *mutationResolver) Login(ctx context.Context, email string, password str
// CreateWork is the resolver for the createWork field. // CreateWork is the resolver for the createWork field.
func (r *mutationResolver) CreateWork(ctx context.Context, input model.WorkInput) (*model.Work, error) { func (r *mutationResolver) CreateWork(ctx context.Context, input model.WorkInput) (*model.Work, error) {
work := &model.Work{ // Create work model
ID: fmt.Sprintf("%d", len(r.WorkRepo.(*testutil.UnifiedMockWorkRepository).Works)+1), work := &models2.Work{
Name: input.Name, Title: input.Name,
Language: input.Language,
Content: input.Content,
} }
m := &models.Work{Title: input.Name} work.Language = input.Language // Set language on the embedded TranslatableModel
m.Language = input.Language
r.WorkRepo.(*testutil.UnifiedMockWorkRepository).AddWork(m) // Create work using the work service
return work, nil err := r.WorkService.CreateWork(ctx, work)
if err != nil {
return nil, err
}
// If content is provided and TranslationRepo is available, create a translation for it
if input.Content != nil && *input.Content != "" && r.TranslationRepo != nil {
translation := &models2.Translation{
Title: input.Name,
Content: *input.Content,
Language: input.Language,
TranslatableID: work.ID,
TranslatableType: "Work",
IsOriginalLanguage: true,
}
err = r.TranslationRepo.Create(ctx, translation)
if err != nil {
return nil, fmt.Errorf("failed to create translation: %v", err)
}
}
// Return work with resolved content using the localization service
var content *string
if r.Localization != nil {
if resolvedContent, err := r.Localization.GetWorkContent(ctx, work.ID, input.Language); err == nil && resolvedContent != "" {
content = &resolvedContent
}
}
return &model.Work{
ID: fmt.Sprintf("%d", work.ID),
Name: work.Title,
Language: input.Language,
Content: content,
}, nil
} }
// UpdateWork is the resolver for the updateWork field. // UpdateWork is the resolver for the updateWork field.
@ -265,23 +297,34 @@ func (r *mutationResolver) ChangePassword(ctx context.Context, currentPassword s
// Work is the resolver for the work field. // Work is the resolver for the work field.
func (r *queryResolver) Work(ctx context.Context, id string) (*model.Work, error) { func (r *queryResolver) Work(ctx context.Context, id string) (*model.Work, error) {
for _, w := range r.WorkRepo.(*testutil.UnifiedMockWorkRepository).Works { // Parse ID to uint
if fmt.Sprintf("%d", w.ID) == id { workID, err := strconv.ParseUint(id, 10, 32)
// Content resolved via Localization service when requested later if err != nil {
return &model.Work{ return nil, fmt.Errorf("invalid work ID: %v", err)
ID: id,
Name: w.Title,
Language: w.Language,
Content: r.resolveWorkContent(ctx, w.ID, w.Language),
}, nil
}
} }
return nil, nil
// Get work by ID using repository
work, err := r.WorkRepo.GetByID(ctx, uint(workID))
if err != nil {
return nil, err
}
if work == nil {
return nil, nil
}
// Content resolved via Localization service when requested later
return &model.Work{
ID: id,
Name: work.Title,
Language: work.Language,
Content: r.resolveWorkContent(ctx, work.ID, work.Language),
}, nil
} }
// Works is the resolver for the works field. // Works is the resolver for the works field.
func (r *queryResolver) Works(ctx context.Context, limit *int32, offset *int32, language *string, authorID *string, categoryID *string, tagID *string, search *string) ([]*model.Work, error) { func (r *queryResolver) Works(ctx context.Context, limit *int32, offset *int32, language *string, authorID *string, categoryID *string, tagID *string, search *string) ([]*model.Work, error) {
var works []models.Work var works []models2.Work
var err error var err error
// Set default pagination // Set default pagination
@ -368,7 +411,7 @@ func (r *queryResolver) Author(ctx context.Context, id string) (*model.Author, e
// Authors is the resolver for the authors field. // Authors is the resolver for the authors field.
func (r *queryResolver) Authors(ctx context.Context, limit *int32, offset *int32, search *string, countryID *string) ([]*model.Author, error) { func (r *queryResolver) Authors(ctx context.Context, limit *int32, offset *int32, search *string, countryID *string) ([]*model.Author, error) {
var authors []models.Author var authors []models2.Author
var err error var err error
if countryID != nil { if countryID != nil {
@ -426,23 +469,23 @@ func (r *queryResolver) UserByUsername(ctx context.Context, username string) (*m
// Users is the resolver for the users field. // Users is the resolver for the users field.
func (r *queryResolver) Users(ctx context.Context, limit *int32, offset *int32, role *model.UserRole) ([]*model.User, error) { func (r *queryResolver) Users(ctx context.Context, limit *int32, offset *int32, role *model.UserRole) ([]*model.User, error) {
var users []models.User var users []models2.User
var err error var err error
if role != nil { if role != nil {
// Convert GraphQL role to model role // Convert GraphQL role to model role
var modelRole models.UserRole var modelRole models2.UserRole
switch *role { switch *role {
case model.UserRoleReader: case model.UserRoleReader:
modelRole = models.UserRoleReader modelRole = models2.UserRoleReader
case model.UserRoleContributor: case model.UserRoleContributor:
modelRole = models.UserRoleContributor modelRole = models2.UserRoleContributor
case model.UserRoleReviewer: case model.UserRoleReviewer:
modelRole = models.UserRoleReviewer modelRole = models2.UserRoleReviewer
case model.UserRoleEditor: case model.UserRoleEditor:
modelRole = models.UserRoleEditor modelRole = models2.UserRoleEditor
case model.UserRoleAdmin: case model.UserRoleAdmin:
modelRole = models.UserRoleAdmin modelRole = models2.UserRoleAdmin
default: default:
return nil, fmt.Errorf("invalid user role: %s", *role) return nil, fmt.Errorf("invalid user role: %s", *role)
} }
@ -465,15 +508,15 @@ func (r *queryResolver) Users(ctx context.Context, limit *int32, offset *int32,
// Convert model role to GraphQL role // Convert model role to GraphQL role
var graphqlRole model.UserRole var graphqlRole model.UserRole
switch u.Role { switch u.Role {
case models.UserRoleReader: case models2.UserRoleReader:
graphqlRole = model.UserRoleReader graphqlRole = model.UserRoleReader
case models.UserRoleContributor: case models2.UserRoleContributor:
graphqlRole = model.UserRoleContributor graphqlRole = model.UserRoleContributor
case models.UserRoleReviewer: case models2.UserRoleReviewer:
graphqlRole = model.UserRoleReviewer graphqlRole = model.UserRoleReviewer
case models.UserRoleEditor: case models2.UserRoleEditor:
graphqlRole = model.UserRoleEditor graphqlRole = model.UserRoleEditor
case models.UserRoleAdmin: case models2.UserRoleAdmin:
graphqlRole = model.UserRoleAdmin graphqlRole = model.UserRoleAdmin
default: default:
graphqlRole = model.UserRoleReader graphqlRole = model.UserRoleReader

View File

@ -3,19 +3,18 @@ package graph
import ( import (
"net/http" "net/http"
"tercul/internal/platform/auth"
"github.com/99designs/gqlgen/graphql/handler" "github.com/99designs/gqlgen/graphql/handler"
"github.com/99designs/gqlgen/graphql/playground"
"tercul/auth"
) )
// NewServer creates a new GraphQL server with the given resolver // NewServer creates a new GraphQL server with the given resolver
func NewServer(resolver *Resolver) http.Handler { func NewServer(resolver *Resolver) http.Handler {
srv := handler.NewDefaultServer(NewExecutableSchema(Config{Resolvers: resolver})) srv := handler.NewDefaultServer(NewExecutableSchema(Config{Resolvers: resolver}))
// Create a mux to handle both GraphQL and playground // Create a mux to handle GraphQL endpoint only (no playground here; served separately in production)
mux := http.NewServeMux() mux := http.NewServeMux()
mux.Handle("/query", srv) mux.Handle("/query", srv)
mux.Handle("/", playground.Handler("GraphQL playground", "/query"))
return mux return mux
} }
@ -27,10 +26,9 @@ func NewServerWithAuth(resolver *Resolver, jwtManager *auth.JWTManager) http.Han
// Apply authentication middleware to GraphQL endpoint // Apply authentication middleware to GraphQL endpoint
authHandler := auth.GraphQLAuthMiddleware(jwtManager)(srv) authHandler := auth.GraphQLAuthMiddleware(jwtManager)(srv)
// Create a mux to handle both GraphQL and playground // Create a mux to handle GraphQL endpoint only (no playground here; served separately in production)
mux := http.NewServeMux() mux := http.NewServeMux()
mux.Handle("/query", authHandler) mux.Handle("/query", authHandler)
mux.Handle("/", playground.Handler("GraphQL playground", "/query"))
return mux return mux
} }

View File

@ -1,12 +1,12 @@
package app package app
import ( import (
"tercul/cache" "tercul/internal/platform/cache"
"tercul/config" "tercul/internal/platform/config"
"tercul/db" "tercul/internal/platform/db"
"tercul/internal/platform/log"
repositories2 "tercul/internal/repositories"
"tercul/linguistics" "tercul/linguistics"
"tercul/logger"
"tercul/repositories"
"tercul/services" "tercul/services"
"time" "time"
@ -28,17 +28,17 @@ type ApplicationBuilder struct {
// RepositoryContainer holds all repository instances // RepositoryContainer holds all repository instances
type RepositoryContainer struct { type RepositoryContainer struct {
WorkRepository repositories.WorkRepository WorkRepository repositories2.WorkRepository
UserRepository repositories.UserRepository UserRepository repositories2.UserRepository
AuthorRepository repositories.AuthorRepository AuthorRepository repositories2.AuthorRepository
TranslationRepository repositories.TranslationRepository TranslationRepository repositories2.TranslationRepository
CommentRepository repositories.CommentRepository CommentRepository repositories2.CommentRepository
LikeRepository repositories.LikeRepository LikeRepository repositories2.LikeRepository
BookmarkRepository repositories.BookmarkRepository BookmarkRepository repositories2.BookmarkRepository
CollectionRepository repositories.CollectionRepository CollectionRepository repositories2.CollectionRepository
TagRepository repositories.TagRepository TagRepository repositories2.TagRepository
CategoryRepository repositories.CategoryRepository CategoryRepository repositories2.CategoryRepository
CopyrightRepository repositories.CopyrightRepository CopyrightRepository repositories2.CopyrightRepository
} }
// ServiceContainer holds all service instances // ServiceContainer holds all service instances
@ -56,38 +56,38 @@ func NewApplicationBuilder() *ApplicationBuilder {
// BuildDatabase initializes the database connection // BuildDatabase initializes the database connection
func (b *ApplicationBuilder) BuildDatabase() error { func (b *ApplicationBuilder) BuildDatabase() error {
logger.LogInfo("Initializing database connection") log.LogInfo("Initializing database connection")
dbConn, err := db.InitDB() dbConn, err := db.InitDB()
if err != nil { if err != nil {
logger.LogFatal("Failed to initialize database - application cannot start without database connection", log.LogFatal("Failed to initialize database - application cannot start without database connection",
logger.F("error", err), log.F("error", err),
logger.F("host", config.Cfg.DBHost), log.F("host", config.Cfg.DBHost),
logger.F("database", config.Cfg.DBName)) log.F("database", config.Cfg.DBName))
return err return err
} }
b.dbConn = dbConn b.dbConn = dbConn
logger.LogInfo("Database initialized successfully", log.LogInfo("Database initialized successfully",
logger.F("host", config.Cfg.DBHost), log.F("host", config.Cfg.DBHost),
logger.F("database", config.Cfg.DBName)) log.F("database", config.Cfg.DBName))
return nil return nil
} }
// BuildCache initializes the Redis cache // BuildCache initializes the Redis cache
func (b *ApplicationBuilder) BuildCache() error { func (b *ApplicationBuilder) BuildCache() error {
logger.LogInfo("Initializing Redis cache") log.LogInfo("Initializing Redis cache")
redisCache, err := cache.NewDefaultRedisCache() redisCache, err := cache.NewDefaultRedisCache()
if err != nil { if err != nil {
logger.LogWarn("Failed to initialize Redis cache, continuing without caching - performance may be degraded", log.LogWarn("Failed to initialize Redis cache, continuing without caching - performance may be degraded",
logger.F("error", err), log.F("error", err),
logger.F("redisAddr", config.Cfg.RedisAddr)) log.F("redisAddr", config.Cfg.RedisAddr))
} else { } else {
b.redisCache = redisCache b.redisCache = redisCache
logger.LogInfo("Redis cache initialized successfully", log.LogInfo("Redis cache initialized successfully",
logger.F("redisAddr", config.Cfg.RedisAddr)) log.F("redisAddr", config.Cfg.RedisAddr))
} }
return nil return nil
@ -95,32 +95,32 @@ func (b *ApplicationBuilder) BuildCache() error {
// BuildWeaviate initializes the Weaviate client // BuildWeaviate initializes the Weaviate client
func (b *ApplicationBuilder) BuildWeaviate() error { func (b *ApplicationBuilder) BuildWeaviate() error {
logger.LogInfo("Connecting to Weaviate", log.LogInfo("Connecting to Weaviate",
logger.F("host", config.Cfg.WeaviateHost), log.F("host", config.Cfg.WeaviateHost),
logger.F("scheme", config.Cfg.WeaviateScheme)) log.F("scheme", config.Cfg.WeaviateScheme))
wClient, err := weaviate.NewClient(weaviate.Config{ wClient, err := weaviate.NewClient(weaviate.Config{
Scheme: config.Cfg.WeaviateScheme, Scheme: config.Cfg.WeaviateScheme,
Host: config.Cfg.WeaviateHost, Host: config.Cfg.WeaviateHost,
}) })
if err != nil { if err != nil {
logger.LogFatal("Failed to create Weaviate client - vector search capabilities will not be available", log.LogFatal("Failed to create Weaviate client - vector search capabilities will not be available",
logger.F("error", err), log.F("error", err),
logger.F("host", config.Cfg.WeaviateHost), log.F("host", config.Cfg.WeaviateHost),
logger.F("scheme", config.Cfg.WeaviateScheme)) log.F("scheme", config.Cfg.WeaviateScheme))
return err return err
} }
b.weaviateClient = wClient b.weaviateClient = wClient
logger.LogInfo("Weaviate client initialized successfully") log.LogInfo("Weaviate client initialized successfully")
return nil return nil
} }
// BuildBackgroundJobs initializes Asynq for background job processing // BuildBackgroundJobs initializes Asynq for background job processing
func (b *ApplicationBuilder) BuildBackgroundJobs() error { func (b *ApplicationBuilder) BuildBackgroundJobs() error {
logger.LogInfo("Setting up background job processing", log.LogInfo("Setting up background job processing",
logger.F("redisAddr", config.Cfg.RedisAddr)) log.F("redisAddr", config.Cfg.RedisAddr))
redisOpt := asynq.RedisClientOpt{ redisOpt := asynq.RedisClientOpt{
Addr: config.Cfg.RedisAddr, Addr: config.Cfg.RedisAddr,
@ -131,41 +131,41 @@ func (b *ApplicationBuilder) BuildBackgroundJobs() error {
asynqClient := asynq.NewClient(redisOpt) asynqClient := asynq.NewClient(redisOpt)
b.asynqClient = asynqClient b.asynqClient = asynqClient
logger.LogInfo("Background job client initialized successfully") log.LogInfo("Background job client initialized successfully")
return nil return nil
} }
// BuildRepositories initializes all repositories // BuildRepositories initializes all repositories
func (b *ApplicationBuilder) BuildRepositories() error { func (b *ApplicationBuilder) BuildRepositories() error {
logger.LogInfo("Initializing repositories") log.LogInfo("Initializing repositories")
// Initialize base repositories // Initialize base repositories
baseWorkRepo := repositories.NewWorkRepository(b.dbConn) baseWorkRepo := repositories2.NewWorkRepository(b.dbConn)
userRepo := repositories.NewUserRepository(b.dbConn) userRepo := repositories2.NewUserRepository(b.dbConn)
authorRepo := repositories.NewAuthorRepository(b.dbConn) authorRepo := repositories2.NewAuthorRepository(b.dbConn)
translationRepo := repositories.NewTranslationRepository(b.dbConn) translationRepo := repositories2.NewTranslationRepository(b.dbConn)
commentRepo := repositories.NewCommentRepository(b.dbConn) commentRepo := repositories2.NewCommentRepository(b.dbConn)
likeRepo := repositories.NewLikeRepository(b.dbConn) likeRepo := repositories2.NewLikeRepository(b.dbConn)
bookmarkRepo := repositories.NewBookmarkRepository(b.dbConn) bookmarkRepo := repositories2.NewBookmarkRepository(b.dbConn)
collectionRepo := repositories.NewCollectionRepository(b.dbConn) collectionRepo := repositories2.NewCollectionRepository(b.dbConn)
tagRepo := repositories.NewTagRepository(b.dbConn) tagRepo := repositories2.NewTagRepository(b.dbConn)
categoryRepo := repositories.NewCategoryRepository(b.dbConn) categoryRepo := repositories2.NewCategoryRepository(b.dbConn)
copyrightRepo := repositories.NewCopyrightRepository(b.dbConn) copyrightRepo := repositories2.NewCopyrightRepository(b.dbConn)
// Wrap work repository with cache if available // Wrap work repository with cache if available
var workRepo repositories.WorkRepository var workRepo repositories2.WorkRepository
if b.redisCache != nil { if b.redisCache != nil {
workRepo = repositories.NewCachedWorkRepository( workRepo = repositories2.NewCachedWorkRepository(
baseWorkRepo, baseWorkRepo,
b.redisCache, b.redisCache,
nil, nil,
30*time.Minute, // Cache work data for 30 minutes 30*time.Minute, // Cache work data for 30 minutes
) )
logger.LogInfo("Using cached work repository") log.LogInfo("Using cached work repository")
} else { } else {
workRepo = baseWorkRepo workRepo = baseWorkRepo
logger.LogInfo("Using non-cached work repository") log.LogInfo("Using non-cached work repository")
} }
b.repositories = &RepositoryContainer{ b.repositories = &RepositoryContainer{
@ -182,14 +182,14 @@ func (b *ApplicationBuilder) BuildRepositories() error {
CopyrightRepository: copyrightRepo, CopyrightRepository: copyrightRepo,
} }
logger.LogInfo("Repositories initialized successfully") log.LogInfo("Repositories initialized successfully")
return nil return nil
} }
// BuildLinguistics initializes the linguistics components // BuildLinguistics initializes the linguistics components
func (b *ApplicationBuilder) BuildLinguistics() error { func (b *ApplicationBuilder) BuildLinguistics() error {
logger.LogInfo("Initializing linguistic analyzer") log.LogInfo("Initializing linguistic analyzer")
b.linguistics = linguistics.NewLinguisticsFactory( b.linguistics = linguistics.NewLinguisticsFactory(
b.dbConn, b.dbConn,
@ -198,14 +198,14 @@ func (b *ApplicationBuilder) BuildLinguistics() error {
true, // Cache enabled true, // Cache enabled
) )
logger.LogInfo("Linguistics components initialized successfully") log.LogInfo("Linguistics components initialized successfully")
return nil return nil
} }
// BuildServices initializes all services // BuildServices initializes all services
func (b *ApplicationBuilder) BuildServices() error { func (b *ApplicationBuilder) BuildServices() error {
logger.LogInfo("Initializing service layer") log.LogInfo("Initializing service layer")
workService := services.NewWorkService(b.repositories.WorkRepository, b.linguistics.GetAnalyzer()) workService := services.NewWorkService(b.repositories.WorkRepository, b.linguistics.GetAnalyzer())
copyrightService := services.NewCopyrightService(b.repositories.CopyrightRepository) copyrightService := services.NewCopyrightService(b.repositories.CopyrightRepository)
@ -219,7 +219,7 @@ func (b *ApplicationBuilder) BuildServices() error {
AuthService: authService, AuthService: authService,
} }
logger.LogInfo("Services initialized successfully") log.LogInfo("Services initialized successfully")
return nil return nil
} }
@ -255,7 +255,7 @@ func (b *ApplicationBuilder) Build() error {
return err return err
} }
logger.LogInfo("Application builder completed successfully") log.LogInfo("Application builder completed successfully")
return nil return nil
} }

View File

@ -2,11 +2,11 @@ package app
import ( import (
"net/http" "net/http"
"tercul/auth" "tercul/internal/platform/auth"
"tercul/config" "tercul/internal/platform/config"
"tercul/graph" "tercul/graph"
"tercul/linguistics" "tercul/linguistics"
"tercul/logger" "tercul/internal/platform/log"
"tercul/syncjob" "tercul/syncjob"
"github.com/99designs/gqlgen/graphql/playground" "github.com/99designs/gqlgen/graphql/playground"
@ -27,7 +27,7 @@ func NewServerFactory(appBuilder *ApplicationBuilder) *ServerFactory {
// CreateGraphQLServer creates and configures the GraphQL server // CreateGraphQLServer creates and configures the GraphQL server
func (f *ServerFactory) CreateGraphQLServer() (*http.Server, error) { func (f *ServerFactory) CreateGraphQLServer() (*http.Server, error) {
logger.LogInfo("Setting up GraphQL server") log.LogInfo("Setting up GraphQL server")
// Create GraphQL resolver with all dependencies // Create GraphQL resolver with all dependencies
resolver := &graph.Resolver{ resolver := &graph.Resolver{
@ -58,15 +58,15 @@ func (f *ServerFactory) CreateGraphQLServer() (*http.Server, error) {
Handler: srv, Handler: srv,
} }
logger.LogInfo("GraphQL server created successfully", log.LogInfo("GraphQL server created successfully",
logger.F("port", config.Cfg.ServerPort)) log.F("port", config.Cfg.ServerPort))
return httpServer, nil return httpServer, nil
} }
// CreateBackgroundJobServers creates and configures background job servers // CreateBackgroundJobServers creates and configures background job servers
func (f *ServerFactory) CreateBackgroundJobServers() ([]*asynq.Server, error) { func (f *ServerFactory) CreateBackgroundJobServers() ([]*asynq.Server, error) {
logger.LogInfo("Setting up background job servers") log.LogInfo("Setting up background job servers")
redisOpt := asynq.RedisClientOpt{ redisOpt := asynq.RedisClientOpt{
Addr: config.Cfg.RedisAddr, Addr: config.Cfg.RedisAddr,
@ -77,8 +77,8 @@ func (f *ServerFactory) CreateBackgroundJobServers() ([]*asynq.Server, error) {
var servers []*asynq.Server var servers []*asynq.Server
// Setup data synchronization server // Setup data synchronization server
logger.LogInfo("Setting up data synchronization server", log.LogInfo("Setting up data synchronization server",
logger.F("concurrency", config.Cfg.MaxRetries)) log.F("concurrency", config.Cfg.MaxRetries))
syncServer := asynq.NewServer(redisOpt, asynq.Config{Concurrency: config.Cfg.MaxRetries}) syncServer := asynq.NewServer(redisOpt, asynq.Config{Concurrency: config.Cfg.MaxRetries})
@ -93,8 +93,8 @@ func (f *ServerFactory) CreateBackgroundJobServers() ([]*asynq.Server, error) {
servers = append(servers, syncServer) servers = append(servers, syncServer)
// Setup linguistic analysis server // Setup linguistic analysis server
logger.LogInfo("Setting up linguistic analysis server", log.LogInfo("Setting up linguistic analysis server",
logger.F("concurrency", config.Cfg.MaxRetries)) log.F("concurrency", config.Cfg.MaxRetries))
// Create linguistic sync job // Create linguistic sync job
linguisticSyncJob := linguistics.NewLinguisticSyncJob( linguisticSyncJob := linguistics.NewLinguisticSyncJob(
@ -114,15 +114,15 @@ func (f *ServerFactory) CreateBackgroundJobServers() ([]*asynq.Server, error) {
// This is a temporary workaround - in production, you'd want to properly configure the server // This is a temporary workaround - in production, you'd want to properly configure the server
servers = append(servers, linguisticServer) servers = append(servers, linguisticServer)
logger.LogInfo("Background job servers created successfully", log.LogInfo("Background job servers created successfully",
logger.F("serverCount", len(servers))) log.F("serverCount", len(servers)))
return servers, nil return servers, nil
} }
// CreatePlaygroundServer creates the GraphQL playground server // CreatePlaygroundServer creates the GraphQL playground server
func (f *ServerFactory) CreatePlaygroundServer() *http.Server { func (f *ServerFactory) CreatePlaygroundServer() *http.Server {
logger.LogInfo("Setting up GraphQL playground") log.LogInfo("Setting up GraphQL playground")
playgroundHandler := playground.Handler("GraphQL", "/query") playgroundHandler := playground.Handler("GraphQL", "/query")
@ -131,8 +131,8 @@ func (f *ServerFactory) CreatePlaygroundServer() *http.Server {
Handler: playgroundHandler, Handler: playgroundHandler,
} }
logger.LogInfo("GraphQL playground created successfully", log.LogInfo("GraphQL playground created successfully",
logger.F("port", config.Cfg.PlaygroundPort)) log.F("port", config.Cfg.PlaygroundPort))
return playgroundServer return playgroundServer
} }

View File

@ -10,9 +10,9 @@ import (
"gorm.io/driver/postgres" "gorm.io/driver/postgres"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/config"
"tercul/internal/enrich" "tercul/internal/enrich"
"tercul/internal/store" "tercul/internal/store"
"tercul/internal/platform/config"
) )
func main() { func main() {

View File

@ -0,0 +1,27 @@
package enrich
import "testing"
func TestKeywordExtractor_Basic(t *testing.T) {
e := NewKeywordExtractor()
text := Text{Body: "The quick brown fox jumps over the lazy dog. The quick brown fox!"}
keywords, err := e.Extract(text)
if err != nil {
t.Fatalf("Extract returned error: %v", err)
}
if len(keywords) == 0 {
t.Fatalf("expected some keywords, got 0")
}
// Ensure stop words filtered and most frequent word appears first
if keywords[0].Text != "quick" && keywords[0].Text != "brown" && keywords[0].Text != "fox" {
t.Errorf("expected a content word as top keyword, got %q", keywords[0].Text)
}
for _, kw := range keywords {
if kw.Text == "the" || kw.Text == "over" {
t.Errorf("stop word %q should be filtered out", kw.Text)
}
if kw.Relevance <= 0 {
t.Errorf("keyword %q has non-positive relevance", kw.Text)
}
}
}

View File

@ -0,0 +1,48 @@
package enrich
import "testing"
func TestLanguageDetector_Detect_EN(t *testing.T) {
d := NewLanguageDetector()
lang, conf, err := d.Detect(Text{Body: " the and is in to of that for the "})
if err != nil {
t.Fatalf("Detect returned error: %v", err)
}
if lang != "en" {
t.Fatalf("expected language 'en', got %q", lang)
}
if conf <= 0 {
t.Errorf("expected positive confidence, got %f", conf)
}
}
func TestLanguageDetector_Detect_ES(t *testing.T) {
d := NewLanguageDetector()
lang, _, _ := d.Detect(Text{Body: " el la es en de que por para el "})
if lang != "es" {
t.Fatalf("expected language 'es', got %q", lang)
}
}
func TestLanguageDetector_Detect_FR(t *testing.T) {
d := NewLanguageDetector()
lang, _, _ := d.Detect(Text{Body: " le la est en de que pour dans le "})
if lang != "fr" {
t.Fatalf("expected language 'fr', got %q", lang)
}
}
func TestLanguageDetector_Detect_DefaultEnglish(t *testing.T) {
d := NewLanguageDetector()
// Balanced/unknown should default to English per implementation
lang, conf, err := d.Detect(Text{Body: " lorem ipsum dolor sit amet "})
if err != nil {
t.Fatalf("Detect returned error: %v", err)
}
if lang != "en" {
t.Fatalf("expected default language 'en', got %q", lang)
}
if conf != 0.5 {
t.Errorf("expected default confidence 0.5, got %f", conf)
}
}

View File

@ -0,0 +1,47 @@
package enrich
import "testing"
func TestLemmatizer_English(t *testing.T) {
l := NewLemmatizer()
cases := []struct{
in, want string
}{
{"Went", "go"},
{"running", "run"},
{"studies", "study"},
{"cars", "car"},
{"looked", "look"},
}
for _, c := range cases {
got, err := l.Lemma(c.in, "en")
if err != nil {
t.Fatalf("Lemma returned error: %v", err)
}
if got != c.want {
t.Errorf("Lemma(%q) = %q; want %q", c.in, got, c.want)
}
}
}
func TestLemmatizer_Spanish(t *testing.T) {
l := NewLemmatizer()
got, err := l.Lemma("hablando", "es")
if err != nil {
t.Fatalf("Lemma returned error: %v", err)
}
if got != "habl" {
t.Errorf("Lemma(hablando) = %q; want 'habl'", got)
}
}
func TestLemmatizer_French(t *testing.T) {
l := NewLemmatizer()
got, err := l.Lemma("parlent", "fr")
if err != nil {
t.Fatalf("Lemma returned error: %v", err)
}
if got != "parl" {
t.Errorf("Lemma(parlent) = %q; want 'parl'", got)
}
}

View File

@ -0,0 +1,43 @@
package enrich
import "testing"
func TestPhoneticEncoder_Soundex(t *testing.T) {
e := NewPhoneticEncoder()
cases := map[string]string{
"Robert": "R163",
"Ashcraft": "A261",
}
for in, want := range cases {
got := e.Encode(in)
if got != want {
t.Errorf("Encode(%q) = %q; want %q", in, got, want)
}
}
// property checks
if got := e.Encode("P"); got != "P000" {
t.Errorf("Encode(P) = %q; want P000", got)
}
}
func TestPhoneticEncoder_DoubleMetaphoneVariation(t *testing.T) {
e := NewPhoneticEncoder()
p, s := e.DoubleMetaphone("Robert")
if p != "R163" {
t.Fatalf("primary code = %q; want R163", p)
}
if s == p || len(s) != len(p) {
t.Errorf("secondary variation should differ but have same length: p=%q s=%q", p, s)
}
}
func TestPhoneticEncoder_Empty(t *testing.T) {
e := NewPhoneticEncoder()
if got := e.Encode(""); got != "" {
t.Errorf("Encode(\"\") = %q; want empty", got)
}
p, s := e.DoubleMetaphone("")
if p != "" || s != "" {
t.Errorf("DoubleMetaphone(\"\") = (%q,%q); want empty codes", p, s)
}
}

View File

@ -0,0 +1,32 @@
package enrich
import "testing"
func TestPoeticAnalyzer_QuatrainABAB(t *testing.T) {
poem := `In silver light we wander far, light
A gentle breeze across the sea, breeze
At quiet dusk we find a star, night
And in the dark we feel the freeze.`
// Last words: light, breeze, night, freeze -> ABAB by last 2 letters (ht, ze, ht, ze)
p := NewPoeticAnalyzer()
m, err := p.Analyse(Text{Body: poem})
if err != nil {
t.Fatalf("Analyse returned error: %v", err)
}
if m.RhymeScheme != "ABAB" {
t.Errorf("expected rhyme scheme ABAB, got %q", m.RhymeScheme)
}
if m.StanzaCount != 1 {
t.Errorf("expected 1 stanza, got %d", m.StanzaCount)
}
if m.LineCount != 4 {
t.Errorf("expected 4 lines, got %d", m.LineCount)
}
if m.Structure != "Quatrain" {
t.Errorf("expected structure Quatrain, got %q", m.Structure)
}
// Meter is heuristic; just ensure it's determined (not Unknown)
if m.MeterType == "Unknown" {
t.Errorf("expected a determined meter type, got %q", m.MeterType)
}
}

View File

@ -0,0 +1,29 @@
package enrich
import "testing"
func TestPOSTagger_Tag_Basic(t *testing.T) {
pos := NewPOSTagger()
tokens := []Token{
{Text: "the"}, // DET
{Text: "great"}, // ADJ (in common adjectives)
{Text: "fox"}, // default NOUN
{Text: "jumps"}, // VERB by suffix
{Text: "quickly"},// ADV by -ly
{Text: "over"}, // PREP
{Text: "him"}, // PRON
}
tags, err := pos.Tag(tokens)
if err != nil {
t.Fatalf("Tag returned error: %v", err)
}
expected := []string{"DET", "ADJ", "NOUN", "VERB", "ADV", "PREP", "PRON"}
if len(tags) != len(expected) {
t.Fatalf("expected %d tags, got %d: %#v", len(expected), len(tags), tags)
}
for i := range expected {
if tags[i] != expected[i] {
t.Errorf("tag %d: expected %s, got %s", i, expected[i], tags[i])
}
}
}

View File

@ -0,0 +1,58 @@
package enrich
import "testing"
func TestTokenizer_Basic(t *testing.T) {
text := Text{Body: "Hello, world! Go1 is great."}
tok := NewTokenizer()
tokens, err := tok.Tokenize(text)
if err != nil {
t.Fatalf("Tokenize returned error: %v", err)
}
expected := []string{"Hello", "world", "Go1", "is", "great"}
if len(tokens) != len(expected) {
t.Fatalf("expected %d tokens, got %d: %#v", len(expected), len(tokens), tokens)
}
for i, e := range expected {
if tokens[i].Text != e {
t.Errorf("token %d text: expected %q, got %q", i, e, tokens[i].Text)
}
if tokens[i].Position != i {
t.Errorf("token %d position: expected %d, got %d", i, i, tokens[i].Position)
}
if tokens[i].Length != len(e) {
t.Errorf("token %d length: expected %d, got %d", i, len(e), tokens[i].Length)
}
}
}
func TestTokenizer_UnicodeAndPunctuation(t *testing.T) {
text := Text{Body: "Привет, мир! — hello?"}
tok := NewTokenizer()
tokens, err := tok.Tokenize(text)
if err != nil {
t.Fatalf("Tokenize returned error: %v", err)
}
expected := []string{"Привет", "мир", "hello"}
if len(tokens) != len(expected) {
t.Fatalf("expected %d tokens, got %d: %#v", len(expected), len(tokens), tokens)
}
for i, e := range expected {
if tokens[i].Text != e {
t.Errorf("token %d text: expected %q, got %q", i, e, tokens[i].Text)
}
}
}
func TestTokenizer_Empty(t *testing.T) {
tok := NewTokenizer()
tokens, err := tok.Tokenize(Text{Body: " \t\n "})
if err != nil {
t.Fatalf("Tokenize returned error: %v", err)
}
if len(tokens) != 0 {
t.Fatalf("expected 0 tokens for whitespace-only input, got %d", len(tokens))
}
}

View File

@ -8,7 +8,7 @@ type WorkStats struct {
Comments int64 `gorm:"default:0"` Comments int64 `gorm:"default:0"`
Bookmarks int64 `gorm:"default:0"` Bookmarks int64 `gorm:"default:0"`
Shares int64 `gorm:"default:0"` Shares int64 `gorm:"default:0"`
WorkID uint WorkID uint `gorm:"uniqueIndex;index"`
Work *Work `gorm:"foreignKey:WorkID"` Work *Work `gorm:"foreignKey:WorkID"`
} }
@ -19,7 +19,7 @@ type TranslationStats struct {
Likes int64 `gorm:"default:0"` Likes int64 `gorm:"default:0"`
Comments int64 `gorm:"default:0"` Comments int64 `gorm:"default:0"`
Shares int64 `gorm:"default:0"` Shares int64 `gorm:"default:0"`
TranslationID uint TranslationID uint `gorm:"uniqueIndex;index"`
Translation *Translation `gorm:"foreignKey:TranslationID"` Translation *Translation `gorm:"foreignKey:TranslationID"`
} }
@ -32,7 +32,7 @@ type UserStats struct {
Comments int64 `gorm:"default:0"` // Number of comments posted Comments int64 `gorm:"default:0"` // Number of comments posted
Likes int64 `gorm:"default:0"` // Number of likes given Likes int64 `gorm:"default:0"` // Number of likes given
Bookmarks int64 `gorm:"default:0"` // Number of bookmarks created Bookmarks int64 `gorm:"default:0"` // Number of bookmarks created
UserID uint UserID uint `gorm:"uniqueIndex;index"`
User *User `gorm:"foreignKey:UserID"` User *User `gorm:"foreignKey:UserID"`
} }
@ -42,7 +42,7 @@ type BookStats struct {
Sales int64 `gorm:"default:0"` Sales int64 `gorm:"default:0"`
Views int64 `gorm:"default:0"` Views int64 `gorm:"default:0"`
Likes int64 `gorm:"default:0"` Likes int64 `gorm:"default:0"`
BookID uint BookID uint `gorm:"uniqueIndex;index"`
Book *Book `gorm:"foreignKey:BookID"` Book *Book `gorm:"foreignKey:BookID"`
} }
@ -52,7 +52,7 @@ type CollectionStats struct {
Items int64 `gorm:"default:0"` // Number of works in the collection Items int64 `gorm:"default:0"` // Number of works in the collection
Views int64 `gorm:"default:0"` Views int64 `gorm:"default:0"`
Likes int64 `gorm:"default:0"` Likes int64 `gorm:"default:0"`
CollectionID uint CollectionID uint `gorm:"uniqueIndex;index"`
Collection *Collection `gorm:"foreignKey:CollectionID"` Collection *Collection `gorm:"foreignKey:CollectionID"`
} }
@ -62,6 +62,6 @@ type MediaStats struct {
Views int64 `gorm:"default:0"` Views int64 `gorm:"default:0"`
Downloads int64 `gorm:"default:0"` Downloads int64 `gorm:"default:0"`
Shares int64 `gorm:"default:0"` Shares int64 `gorm:"default:0"`
MediaID uint MediaID uint `gorm:"uniqueIndex;index"`
Media interface{} `gorm:"-"` // This would be a pointer to a Media type if it existed Media interface{} `gorm:"-"` // This would be a pointer to a Media type if it existed
} }

View File

@ -20,11 +20,26 @@ func (j JSONB) Value() (driver.Value, error) {
// Scan unmarshals a JSONB value. // Scan unmarshals a JSONB value.
func (j *JSONB) Scan(value interface{}) error { func (j *JSONB) Scan(value interface{}) error {
bytes, ok := value.([]byte) if value == nil {
if !ok { *j = JSONB{}
return fmt.Errorf("failed to unmarshal JSONB value: %v", value) return nil
}
switch v := value.(type) {
case []byte:
if len(v) == 0 {
*j = JSONB{}
return nil
}
return json.Unmarshal(v, j)
case string:
if v == "" {
*j = JSONB{}
return nil
}
return json.Unmarshal([]byte(v), j)
default:
return fmt.Errorf("failed to unmarshal JSONB value of type %T: %v", value, value)
} }
return json.Unmarshal(bytes, j)
} }
// BaseModel contains common fields for all models // BaseModel contains common fields for all models

View File

@ -26,13 +26,13 @@ type Comment struct {
// Like represents a user like on a work, translation, or comment // Like represents a user like on a work, translation, or comment
type Like struct { type Like struct {
BaseModel BaseModel
UserID uint UserID uint `gorm:"index;uniqueIndex:uniq_like_user_target"`
User *User `gorm:"foreignKey:UserID"` User *User `gorm:"foreignKey:UserID"`
WorkID *uint WorkID *uint `gorm:"index;uniqueIndex:uniq_like_user_target"`
Work *Work `gorm:"foreignKey:WorkID"` Work *Work `gorm:"foreignKey:WorkID"`
TranslationID *uint TranslationID *uint `gorm:"index;uniqueIndex:uniq_like_user_target"`
Translation *Translation `gorm:"foreignKey:TranslationID"` Translation *Translation `gorm:"foreignKey:TranslationID"`
CommentID *uint CommentID *uint `gorm:"index;uniqueIndex:uniq_like_user_target"`
Comment *Comment `gorm:"foreignKey:CommentID"` Comment *Comment `gorm:"foreignKey:CommentID"`
} }
@ -40,9 +40,9 @@ type Like struct {
type Bookmark struct { type Bookmark struct {
BaseModel BaseModel
Name string `gorm:"size:100"` Name string `gorm:"size:100"`
UserID uint UserID uint `gorm:"index;uniqueIndex:uniq_bookmark_user_work"`
User *User `gorm:"foreignKey:UserID"` User *User `gorm:"foreignKey:UserID"`
WorkID uint WorkID uint `gorm:"index;uniqueIndex:uniq_bookmark_user_work"`
Work *Work `gorm:"foreignKey:WorkID"` Work *Work `gorm:"foreignKey:WorkID"`
Notes string `gorm:"type:text"` Notes string `gorm:"type:text"`
LastReadAt *time.Time LastReadAt *time.Time

View File

@ -3,9 +3,9 @@ package models
// BookWork represents the many-to-many relationship between books and works // BookWork represents the many-to-many relationship between books and works
type BookWork struct { type BookWork struct {
BaseModel BaseModel
BookID uint BookID uint `gorm:"index;uniqueIndex:uniq_book_work"`
Book *Book `gorm:"foreignKey:BookID"` Book *Book `gorm:"foreignKey:BookID"`
WorkID uint WorkID uint `gorm:"index;uniqueIndex:uniq_book_work"`
Work *Work `gorm:"foreignKey:WorkID"` Work *Work `gorm:"foreignKey:WorkID"`
Order int `gorm:"default:0"` // For ordering works in books Order int `gorm:"default:0"` // For ordering works in books
} }
@ -13,30 +13,30 @@ type BookWork struct {
// AuthorCountry represents the many-to-many relationship between authors and countries // AuthorCountry represents the many-to-many relationship between authors and countries
type AuthorCountry struct { type AuthorCountry struct {
BaseModel BaseModel
AuthorID uint AuthorID uint `gorm:"index;uniqueIndex:uniq_author_country"`
Author *Author `gorm:"foreignKey:AuthorID"` Author *Author `gorm:"foreignKey:AuthorID"`
CountryID uint CountryID uint `gorm:"index;uniqueIndex:uniq_author_country"`
Country *Country `gorm:"foreignKey:CountryID"` Country *Country `gorm:"foreignKey:CountryID"`
} }
// WorkAuthor represents authorship with role and order for a work // WorkAuthor represents authorship with role and order for a work
type WorkAuthor struct { type WorkAuthor struct {
BaseModel BaseModel
WorkID uint WorkID uint `gorm:"index;uniqueIndex:uniq_work_author_role"`
Work *Work `gorm:"foreignKey:WorkID"` Work *Work `gorm:"foreignKey:WorkID"`
AuthorID uint AuthorID uint `gorm:"index;uniqueIndex:uniq_work_author_role"`
Author *Author `gorm:"foreignKey:AuthorID"` Author *Author `gorm:"foreignKey:AuthorID"`
Role string `gorm:"size:50;default:'author'"` Role string `gorm:"size:50;default:'author';uniqueIndex:uniq_work_author_role"`
Ordinal int `gorm:"default:0"` Ordinal int `gorm:"default:0"`
} }
// BookAuthor represents book-level contributor role and order // BookAuthor represents book-level contributor role and order
type BookAuthor struct { type BookAuthor struct {
BaseModel BaseModel
BookID uint BookID uint `gorm:"index;uniqueIndex:uniq_book_author_role"`
Book *Book `gorm:"foreignKey:BookID"` Book *Book `gorm:"foreignKey:BookID"`
AuthorID uint AuthorID uint `gorm:"index;uniqueIndex:uniq_book_author_role"`
Author *Author `gorm:"foreignKey:AuthorID"` Author *Author `gorm:"foreignKey:AuthorID"`
Role string `gorm:"size:50;default:'author'"` Role string `gorm:"size:50;default:'author';uniqueIndex:uniq_book_author_role"`
Ordinal int `gorm:"default:0"` Ordinal int `gorm:"default:0"`
} }

View File

@ -23,13 +23,13 @@ type WritingStyle struct {
// LinguisticLayer represents a linguistic layer of analysis // LinguisticLayer represents a linguistic layer of analysis
type LinguisticLayer struct { type LinguisticLayer struct {
BaseModel BaseModel
Name string `gorm:"size:100;not null"` Name string `gorm:"size:100;not null"`
Description string `gorm:"type:text"` Description string `gorm:"type:text"`
Language string `gorm:"size:50;not null"` Language string `gorm:"size:50;not null"`
Type string `gorm:"size:50"` // e.g., morphological, syntactic, semantic, etc. Type string `gorm:"size:50"` // e.g., morphological, syntactic, semantic, etc.
WorkID uint WorkID uint
Work *Work `gorm:"foreignKey:WorkID"` Work *Work `gorm:"foreignKey:WorkID"`
Data JSONB `gorm:"type:jsonb;default:'{}'"` Data JSONB `gorm:"type:jsonb;default:'{}'"`
} }
// TextBlock represents a fine-grained unit of text // TextBlock represents a fine-grained unit of text
@ -91,8 +91,8 @@ type WordOccurrence struct {
TextBlockID uint TextBlockID uint
TextBlock *TextBlock `gorm:"foreignKey:TextBlockID"` TextBlock *TextBlock `gorm:"foreignKey:TextBlockID"`
WordID *uint WordID *uint
Word *Word `gorm:"foreignKey:WordID"` Word *Word `gorm:"foreignKey:WordID"`
StartOffset int `gorm:"default:0"` StartOffset int `gorm:"default:0"`
EndOffset int `gorm:"default:0"` EndOffset int `gorm:"default:0"`
Lemma string `gorm:"size:100"` Lemma string `gorm:"size:100"`
PartOfSpeech string `gorm:"size:20"` PartOfSpeech string `gorm:"size:20"`

View File

@ -7,35 +7,35 @@ import (
// LanguageAnalysis represents language analysis for a work // LanguageAnalysis represents language analysis for a work
type LanguageAnalysis struct { type LanguageAnalysis struct {
BaseModel BaseModel
Language string `gorm:"size:50;not null"` Language string `gorm:"size:50;not null;uniqueIndex:uniq_work_language_analysis"`
Analysis JSONB `gorm:"type:jsonb;default:'{}'"` Analysis JSONB `gorm:"type:jsonb;default:'{}'"`
WorkID uint WorkID uint `gorm:"index;uniqueIndex:uniq_work_language_analysis"`
Work *Work `gorm:"foreignKey:WorkID"` Work *Work `gorm:"foreignKey:WorkID"`
} }
// Gamification represents gamification elements for a user // Gamification represents gamification elements for a user
type Gamification struct { type Gamification struct {
BaseModel BaseModel
Points int `gorm:"default:0"` Points int `gorm:"default:0"`
Level int `gorm:"default:1"` Level int `gorm:"default:1"`
Badges JSONB `gorm:"type:jsonb;default:'{}'"` Badges JSONB `gorm:"type:jsonb;default:'{}'"`
Streaks int `gorm:"default:0"` Streaks int `gorm:"default:0"`
LastActive *time.Time LastActive *time.Time
UserID uint UserID uint `gorm:"uniqueIndex;index"`
User *User `gorm:"foreignKey:UserID"` User *User `gorm:"foreignKey:UserID"`
} }
// Stats represents general statistics // Stats represents general statistics
type Stats struct { type Stats struct {
BaseModel BaseModel
Data JSONB `gorm:"type:jsonb;default:'{}'"` Data JSONB `gorm:"type:jsonb;default:'{}'"`
Period string `gorm:"size:50"` // e.g., daily, weekly, monthly, etc. Period string `gorm:"size:50"` // e.g., daily, weekly, monthly, etc.
StartDate time.Time StartDate time.Time
EndDate time.Time EndDate time.Time
UserID *uint UserID *uint
User *User `gorm:"foreignKey:UserID"` User *User `gorm:"foreignKey:UserID"`
WorkID *uint WorkID *uint
Work *Work `gorm:"foreignKey:WorkID"` Work *Work `gorm:"foreignKey:WorkID"`
} }
// SearchDocument is a denormalized text representation for indexing // SearchDocument is a denormalized text representation for indexing

View File

@ -32,9 +32,9 @@ type Series struct {
// WorkSeries is a join capturing a work's position in a series // WorkSeries is a join capturing a work's position in a series
type WorkSeries struct { type WorkSeries struct {
BaseModel BaseModel
WorkID uint WorkID uint `gorm:"index;uniqueIndex:uniq_work_series"`
Work *Work `gorm:"foreignKey:WorkID"` Work *Work `gorm:"foreignKey:WorkID"`
SeriesID uint SeriesID uint `gorm:"index;uniqueIndex:uniq_work_series"`
Series *Series `gorm:"foreignKey:SeriesID"` Series *Series `gorm:"foreignKey:SeriesID"`
NumberInSeries int `gorm:"default:0"` NumberInSeries int `gorm:"default:0"`
} }

View File

@ -3,12 +3,12 @@ package models
// Edge represents a polymorphic relationship between entities // Edge represents a polymorphic relationship between entities
type Edge struct { type Edge struct {
BaseModel BaseModel
SourceTable string `gorm:"size:50;not null"` SourceTable string `gorm:"size:50;not null;index:idx_edge_source;uniqueIndex:uniq_edge"`
SourceID uint `gorm:"not null"` SourceID uint `gorm:"not null;index:idx_edge_source;uniqueIndex:uniq_edge"`
TargetTable string `gorm:"size:50;not null"` TargetTable string `gorm:"size:50;not null;index:idx_edge_target;uniqueIndex:uniq_edge"`
TargetID uint `gorm:"not null"` TargetID uint `gorm:"not null;index:idx_edge_target;uniqueIndex:uniq_edge"`
Relation string `gorm:"size:50;default:'ASSOCIATED_WITH';not null"` Relation string `gorm:"size:50;default:'ASSOCIATED_WITH';not null;index;uniqueIndex:uniq_edge"`
Language string `gorm:"size:10;default:'en'"` Language string `gorm:"size:10;default:'en';index;uniqueIndex:uniq_edge"`
Extra JSONB `gorm:"type:jsonb;default:'{}'"` Extra JSONB `gorm:"type:jsonb;default:'{}'"`
} }
@ -18,9 +18,9 @@ type Embedding struct {
BaseModel BaseModel
// External vector storage reference (e.g., Weaviate object UUID) // External vector storage reference (e.g., Weaviate object UUID)
ExternalID string `gorm:"size:64;index"` ExternalID string `gorm:"size:64;index"`
EntityType string `gorm:"size:50;not null"` EntityType string `gorm:"size:50;not null;index:idx_embedding_entity;uniqueIndex:uniq_embedding"`
EntityID uint `gorm:"not null"` EntityID uint `gorm:"not null;index:idx_embedding_entity;uniqueIndex:uniq_embedding"`
Model string `gorm:"size:50;not null"` // e.g., bert, gpt, etc. Model string `gorm:"size:50;not null;uniqueIndex:uniq_embedding"` // e.g., bert, gpt, etc.
Dim int `gorm:"default:0"` Dim int `gorm:"default:0"`
WorkID *uint WorkID *uint
Work *Work `gorm:"foreignKey:WorkID"` Work *Work `gorm:"foreignKey:WorkID"`

View File

@ -52,13 +52,13 @@ const (
// CopyrightClaim represents a copyright claim // CopyrightClaim represents a copyright claim
type CopyrightClaim struct { type CopyrightClaim struct {
BaseModel BaseModel
Details string `gorm:"type:text;not null"` Details string `gorm:"type:text;not null"`
Status CopyrightClaimStatus `gorm:"size:50;default:'pending'"` Status CopyrightClaimStatus `gorm:"size:50;default:'pending'"`
ClaimDate time.Time `gorm:"not null"` ClaimDate time.Time `gorm:"not null"`
Resolution string `gorm:"type:text"` Resolution string `gorm:"type:text"`
ResolvedAt *time.Time ResolvedAt *time.Time
UserID *uint UserID *uint
User *User `gorm:"foreignKey:UserID"` User *User `gorm:"foreignKey:UserID"`
// Polymorphic relationship - can attach to any entity // Polymorphic relationship - can attach to any entity
Claimables []Copyrightable `gorm:"polymorphic:Copyrightable"` Claimables []Copyrightable `gorm:"polymorphic:Copyrightable"`
} }
@ -130,12 +130,12 @@ type ModerationFlag struct {
// AuditLog captures changes for governance and traceability // AuditLog captures changes for governance and traceability
type AuditLog struct { type AuditLog struct {
BaseModel BaseModel
ActorID *uint ActorID *uint
Actor *User `gorm:"foreignKey:ActorID"` Actor *User `gorm:"foreignKey:ActorID"`
Action string `gorm:"size:50;not null"` Action string `gorm:"size:50;not null"`
EntityType string `gorm:"size:50;not null"` EntityType string `gorm:"size:50;not null"`
EntityID uint `gorm:"not null"` EntityID uint `gorm:"not null"`
Before JSONB `gorm:"type:jsonb;default:'{}'"` Before JSONB `gorm:"type:jsonb;default:'{}'"`
After JSONB `gorm:"type:jsonb;default:'{}'"` After JSONB `gorm:"type:jsonb;default:'{}'"`
At time.Time `gorm:"autoCreateTime"` At time.Time `gorm:"autoCreateTime"`
} }

View File

@ -12,8 +12,8 @@ type Notification struct {
Read bool `gorm:"default:false"` Read bool `gorm:"default:false"`
Language string `gorm:"size:50;not null"` Language string `gorm:"size:50;not null"`
UserID uint UserID uint
User *User `gorm:"foreignKey:UserID"` User *User `gorm:"foreignKey:UserID"`
RelatedID *uint // ID of the related entity (work, comment, etc.) RelatedID *uint // ID of the related entity (work, comment, etc.)
RelatedType string `gorm:"size:50"` // Type of the related entity RelatedType string `gorm:"size:50"` // Type of the related entity
} }
@ -75,8 +75,8 @@ type Contributor struct {
type InteractionEvent struct { type InteractionEvent struct {
BaseModel BaseModel
UserID *uint UserID *uint
User *User `gorm:"foreignKey:UserID"` User *User `gorm:"foreignKey:UserID"`
TargetType string `gorm:"size:50;not null"` // work|translation|comment|collection|media TargetType string `gorm:"size:50;not null"` // work|translation|comment|collection|media
TargetID uint `gorm:"not null"` TargetID uint `gorm:"not null"`
Kind string `gorm:"size:30;not null"` // view|like|comment|share|bookmark Kind string `gorm:"size:30;not null"` // view|like|comment|share|bookmark
OccurredAt time.Time `gorm:"index"` OccurredAt time.Time `gorm:"index"`

View File

@ -58,9 +58,9 @@ type UserProfile struct {
// UserSession represents a user session // UserSession represents a user session
type UserSession struct { type UserSession struct {
BaseModel BaseModel
UserID uint `gorm:"index"` UserID uint `gorm:"index"`
User *User `gorm:"foreignKey:UserID"` User *User `gorm:"foreignKey:UserID"`
Token string `gorm:"size:255;not null;uniqueIndex"` Token string `gorm:"size:255;not null;uniqueIndex"`
IP string `gorm:"size:50"` IP string `gorm:"size:50"`
UserAgent string `gorm:"size:255"` UserAgent string `gorm:"size:255"`
ExpiresAt time.Time `gorm:"not null"` ExpiresAt time.Time `gorm:"not null"`
@ -69,9 +69,9 @@ type UserSession struct {
// PasswordReset represents a password reset request // PasswordReset represents a password reset request
type PasswordReset struct { type PasswordReset struct {
BaseModel BaseModel
UserID uint `gorm:"index"` UserID uint `gorm:"index"`
User *User `gorm:"foreignKey:UserID"` User *User `gorm:"foreignKey:UserID"`
Token string `gorm:"size:255;not null;uniqueIndex"` Token string `gorm:"size:255;not null;uniqueIndex"`
ExpiresAt time.Time `gorm:"not null"` ExpiresAt time.Time `gorm:"not null"`
Used bool `gorm:"default:false"` Used bool `gorm:"default:false"`
} }
@ -79,9 +79,9 @@ type PasswordReset struct {
// EmailVerification represents an email verification request // EmailVerification represents an email verification request
type EmailVerification struct { type EmailVerification struct {
BaseModel BaseModel
UserID uint `gorm:"index"` UserID uint `gorm:"index"`
User *User `gorm:"foreignKey:UserID"` User *User `gorm:"foreignKey:UserID"`
Token string `gorm:"size:255;not null;uniqueIndex"` Token string `gorm:"size:255;not null;uniqueIndex"`
ExpiresAt time.Time `gorm:"not null"` ExpiresAt time.Time `gorm:"not null"`
Used bool `gorm:"default:false"` Used bool `gorm:"default:false"`
} }

View File

@ -1,14 +1,13 @@
package models_test package models_test
import ( import (
models2 "tercul/internal/models"
"testing" "testing"
"tercul/internal/testutil"
"tercul/models"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"golang.org/x/crypto/bcrypt" "golang.org/x/crypto/bcrypt"
"tercul/internal/testutil"
) )
// UserModelSuite is a test suite for the User model // UserModelSuite is a test suite for the User model
@ -16,28 +15,28 @@ import (
type UserModelSuite struct { type UserModelSuite struct {
suite.Suite suite.Suite
users []*models.User users []*models2.User
} }
func (s *UserModelSuite) SetupSuite() { func (s *UserModelSuite) SetupSuite() {
s.users = []*models.User{} s.users = []*models2.User{}
} }
func (s *UserModelSuite) SetupTest() { func (s *UserModelSuite) SetupTest() {
s.users = []*models.User{} s.users = []*models2.User{}
} }
// createTestUser creates a test user and stores it in-memory // createTestUser creates a test user and stores it in-memory
func (s *UserModelSuite) createTestUser(username, email, password string) *models.User { func (s *UserModelSuite) createTestUser(username, email, password string) *models2.User {
hashed, _ := hashPassword(password) hashed, _ := hashPassword(password)
user := &models.User{ user := &models2.User{
Username: username, Username: username,
Email: email, Email: email,
Password: hashed, Password: hashed,
FirstName: "Test", FirstName: "Test",
LastName: "User", LastName: "User",
DisplayName: "Test User", DisplayName: "Test User",
Role: models.UserRoleReader, Role: models2.UserRoleReader,
Active: true, Active: true,
} }
s.users = append(s.users, user) s.users = append(s.users, user)
@ -109,14 +108,14 @@ func (s *UserModelSuite) TestUserValidation() {
s.NotNil(user.Username, "User should be created with a valid Username") s.NotNil(user.Username, "User should be created with a valid Username")
// Invalid email // Invalid email
invalidEmailUser := &models.User{ invalidEmailUser := &models2.User{
Username: "testuser2", Username: "testuser2",
Email: "invalid-email", Email: "invalid-email",
Password: "password123", Password: "password123",
FirstName: "Test", FirstName: "Test",
LastName: "User", LastName: "User",
DisplayName: "Test User", DisplayName: "Test User",
Role: models.UserRoleReader, Role: models2.UserRoleReader,
Active: true, Active: true,
} }
isValidEmail := func(email string) bool { isValidEmail := func(email string) bool {
@ -125,14 +124,14 @@ func (s *UserModelSuite) TestUserValidation() {
s.False(isValidEmail(invalidEmailUser.Email), "User with invalid email should not be created") s.False(isValidEmail(invalidEmailUser.Email), "User with invalid email should not be created")
// Duplicate username // Duplicate username
duplicateUsernameUser := &models.User{ duplicateUsernameUser := &models2.User{
Username: "testuser", Username: "testuser",
Email: "another@example.com", Email: "another@example.com",
Password: "password123", Password: "password123",
FirstName: "Test", FirstName: "Test",
LastName: "User", LastName: "User",
DisplayName: "Test User", DisplayName: "Test User",
Role: models.UserRoleReader, Role: models2.UserRoleReader,
Active: true, Active: true,
} }
isDuplicateUsername := false isDuplicateUsername := false
@ -145,14 +144,14 @@ func (s *UserModelSuite) TestUserValidation() {
s.True(isDuplicateUsername, "User with duplicate username should not be created") s.True(isDuplicateUsername, "User with duplicate username should not be created")
// Duplicate email // Duplicate email
duplicateEmailUser := &models.User{ duplicateEmailUser := &models2.User{
Username: "testuser3", Username: "testuser3",
Email: "test@example.com", Email: "test@example.com",
Password: "password123", Password: "password123",
FirstName: "Test", FirstName: "Test",
LastName: "User", LastName: "User",
DisplayName: "Test User", DisplayName: "Test User",
Role: models.UserRoleReader, Role: models2.UserRoleReader,
Active: true, Active: true,
} }
isDuplicateEmail := false isDuplicateEmail := false
@ -167,15 +166,15 @@ func (s *UserModelSuite) TestUserValidation() {
// TestUserRoles tests the user role enum // TestUserRoles tests the user role enum
func (s *UserModelSuite) TestUserRoles() { func (s *UserModelSuite) TestUserRoles() {
roles := []models.UserRole{ roles := []models2.UserRole{
models.UserRoleReader, models2.UserRoleReader,
models.UserRoleContributor, models2.UserRoleContributor,
models.UserRoleReviewer, models2.UserRoleReviewer,
models.UserRoleEditor, models2.UserRoleEditor,
models.UserRoleAdmin, models2.UserRoleAdmin,
} }
for i, role := range roles { for i, role := range roles {
user := &models.User{ user := &models2.User{
Username: "testuser" + string(rune(i+'0')), Username: "testuser" + string(rune(i+'0')),
Email: "test" + string(rune(i+'0')) + "@example.com", Email: "test" + string(rune(i+'0')) + "@example.com",
Password: "password123", Password: "password123",
@ -200,14 +199,14 @@ func TestUserModelSuite(t *testing.T) {
// TestUserBeforeSave tests the BeforeSave hook directly // TestUserBeforeSave tests the BeforeSave hook directly
func TestUserBeforeSave(t *testing.T) { func TestUserBeforeSave(t *testing.T) {
// Create a user with a plain text password // Create a user with a plain text password
user := &models.User{ user := &models2.User{
Username: "testuser", Username: "testuser",
Email: "test@example.com", Email: "test@example.com",
Password: "password123", Password: "password123",
FirstName: "Test", FirstName: "Test",
LastName: "User", LastName: "User",
DisplayName: "Test User", DisplayName: "Test User",
Role: models.UserRoleReader, Role: models2.UserRoleReader,
Active: true, Active: true,
} }

View File

@ -4,11 +4,11 @@ import (
"errors" "errors"
"fmt" "fmt"
"strings" "strings"
"tercul/internal/models"
"time" "time"
"github.com/golang-jwt/jwt/v5" "github.com/golang-jwt/jwt/v5"
"tercul/config" "tercul/internal/platform/config"
"tercul/models"
) )
var ( var (

View File

@ -5,7 +5,7 @@ import (
"net/http" "net/http"
"strings" "strings"
"tercul/logger" "tercul/internal/platform/log"
) )
// ContextKey is a type for context keys // ContextKey is a type for context keys
@ -32,9 +32,9 @@ func AuthMiddleware(jwtManager *JWTManager) func(http.Handler) http.Handler {
authHeader := r.Header.Get("Authorization") authHeader := r.Header.Get("Authorization")
tokenString, err := jwtManager.ExtractTokenFromHeader(authHeader) tokenString, err := jwtManager.ExtractTokenFromHeader(authHeader)
if err != nil { if err != nil {
logger.LogWarn("Authentication failed - missing or invalid token", log.LogWarn("Authentication failed - missing or invalid token",
logger.F("path", r.URL.Path), log.F("path", r.URL.Path),
logger.F("error", err)) log.F("error", err))
http.Error(w, "Unauthorized", http.StatusUnauthorized) http.Error(w, "Unauthorized", http.StatusUnauthorized)
return return
} }
@ -42,9 +42,9 @@ func AuthMiddleware(jwtManager *JWTManager) func(http.Handler) http.Handler {
// Validate token // Validate token
claims, err := jwtManager.ValidateToken(tokenString) claims, err := jwtManager.ValidateToken(tokenString)
if err != nil { if err != nil {
logger.LogWarn("Authentication failed - invalid token", log.LogWarn("Authentication failed - invalid token",
logger.F("path", r.URL.Path), log.F("path", r.URL.Path),
logger.F("error", err)) log.F("error", err))
http.Error(w, "Unauthorized", http.StatusUnauthorized) http.Error(w, "Unauthorized", http.StatusUnauthorized)
return return
} }
@ -62,19 +62,19 @@ func RoleMiddleware(requiredRole string) func(http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
claims, ok := r.Context().Value(ClaimsContextKey).(*Claims) claims, ok := r.Context().Value(ClaimsContextKey).(*Claims)
if !ok { if !ok {
logger.LogWarn("Authorization failed - no claims in context", log.LogWarn("Authorization failed - no claims in context",
logger.F("path", r.URL.Path), log.F("path", r.URL.Path),
logger.F("required_role", requiredRole)) log.F("required_role", requiredRole))
http.Error(w, "Forbidden", http.StatusForbidden) http.Error(w, "Forbidden", http.StatusForbidden)
return return
} }
jwtManager := NewJWTManager() jwtManager := NewJWTManager()
if err := jwtManager.RequireRole(claims.Role, requiredRole); err != nil { if err := jwtManager.RequireRole(claims.Role, requiredRole); err != nil {
logger.LogWarn("Authorization failed - insufficient role", log.LogWarn("Authorization failed - insufficient role",
logger.F("path", r.URL.Path), log.F("path", r.URL.Path),
logger.F("user_role", claims.Role), log.F("user_role", claims.Role),
logger.F("required_role", requiredRole)) log.F("required_role", requiredRole))
http.Error(w, "Forbidden", http.StatusForbidden) http.Error(w, "Forbidden", http.StatusForbidden)
return return
} }
@ -103,8 +103,8 @@ func GraphQLAuthMiddleware(jwtManager *JWTManager) func(http.Handler) http.Handl
} }
} }
// If token is invalid, log warning but continue // If token is invalid, log warning but continue
logger.LogWarn("GraphQL authentication failed - continuing with anonymous access", log.LogWarn("GraphQL authentication failed - continuing with anonymous access",
logger.F("path", r.URL.Path)) log.F("path", r.URL.Path))
} }
// Continue without authentication // Continue without authentication

68
internal/platform/cache/cache_test.go vendored Normal file
View File

@ -0,0 +1,68 @@
package cache
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestDefaultKeyGenerator_DefaultPrefix(t *testing.T) {
g := NewDefaultKeyGenerator("")
require.NotNil(t, g)
// Table-driven tests for key generation
tests := []struct {
name string
entity string
id uint
page int
pageSize int
queryName string
params []interface{}
wantEntity string
wantList string
wantQuery string
}{
{
name: "basic",
entity: "user",
id: 42,
page: 1,
pageSize: 20,
queryName: "byEmail",
params: []interface{}{"foo@bar.com"},
wantEntity: "tercul:user:id:42",
wantList: "tercul:user:list:1:20",
wantQuery: "tercul:user:byEmail:foo@bar.com",
},
{
name: "different entity and multiple params",
entity: "work",
id: 7,
page: 3,
pageSize: 15,
queryName: "search",
params: []interface{}{"abc", 2020, true},
wantEntity: "tercul:work:id:7",
wantList: "tercul:work:list:3:15",
wantQuery: "tercul:work:search:abc:2020:true",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, tt.wantEntity, g.EntityKey(tt.entity, tt.id))
assert.Equal(t, tt.wantList, g.ListKey(tt.entity, tt.page, tt.pageSize))
assert.Equal(t, tt.wantQuery, g.QueryKey(tt.entity, tt.queryName, tt.params...))
})
}
}
func TestDefaultKeyGenerator_CustomPrefix(t *testing.T) {
g := NewDefaultKeyGenerator("mypfx:")
require.NotNil(t, g)
assert.Equal(t, "mypfx:book:id:1", g.EntityKey("book", 1))
assert.Equal(t, "mypfx:book:list:2:10", g.ListKey("book", 2, 10))
assert.Equal(t, "mypfx:book:find:tag:99", g.QueryKey("book", "find", "tag", 99))
}

View File

@ -8,8 +8,8 @@ import (
"time" "time"
"github.com/redis/go-redis/v9" "github.com/redis/go-redis/v9"
"tercul/config" "tercul/internal/platform/config"
"tercul/logger" "tercul/internal/platform/log"
) )
// RedisCache implements the Cache interface using Redis // RedisCache implements the Cache interface using Redis
@ -112,9 +112,9 @@ func (c *RedisCache) GetMulti(ctx context.Context, keys []string) (map[string][]
str, ok := values[i].(string) str, ok := values[i].(string)
if !ok { if !ok {
logger.LogWarn("Invalid type in Redis cache", log.LogWarn("Invalid type in Redis cache",
logger.F("key", key), log.F("key", key),
logger.F("type", fmt.Sprintf("%T", values[i]))) log.F("type", fmt.Sprintf("%T", values[i])))
continue continue
} }

View File

@ -53,8 +53,8 @@ type Config struct {
NLPUseTFIDF bool NLPUseTFIDF bool
// NLP cache configuration // NLP cache configuration
NLPMemoryCacheCap int NLPMemoryCacheCap int
NLPRedisCacheTTLSeconds int NLPRedisCacheTTLSeconds int
} }
// Cfg is the global configuration instance // Cfg is the global configuration instance
@ -146,16 +146,16 @@ func getEnvAsInt(key string, defaultValue int) int {
// getEnvAsBool gets an environment variable as a boolean or returns a default value // getEnvAsBool gets an environment variable as a boolean or returns a default value
func getEnvAsBool(key string, defaultValue bool) bool { func getEnvAsBool(key string, defaultValue bool) bool {
valueStr := getEnv(key, "") valueStr := getEnv(key, "")
if valueStr == "" { if valueStr == "" {
return defaultValue return defaultValue
} }
switch valueStr { switch valueStr {
case "1", "true", "TRUE", "True", "yes", "YES", "Yes", "on", "ON", "On": case "1", "true", "TRUE", "True", "yes", "YES", "Yes", "on", "ON", "On":
return true return true
case "0", "false", "FALSE", "False", "no", "NO", "No", "off", "OFF", "Off": case "0", "false", "FALSE", "False", "no", "NO", "No", "off", "OFF", "Off":
return false return false
default: default:
return defaultValue return defaultValue
} }
} }

View File

@ -7,8 +7,8 @@ import (
"gorm.io/driver/postgres" "gorm.io/driver/postgres"
"gorm.io/gorm" "gorm.io/gorm"
gormlogger "gorm.io/gorm/logger" gormlogger "gorm.io/gorm/logger"
"tercul/config" "tercul/internal/platform/config"
"tercul/logger" "tercul/internal/platform/log"
) )
// DB is a global database connection instance // DB is a global database connection instance
@ -17,9 +17,9 @@ var DB *gorm.DB
// Connect establishes a connection to the database using configuration settings // Connect establishes a connection to the database using configuration settings
// It returns the database connection and any error encountered // It returns the database connection and any error encountered
func Connect() (*gorm.DB, error) { func Connect() (*gorm.DB, error) {
logger.LogInfo("Connecting to database", log.LogInfo("Connecting to database",
logger.F("host", config.Cfg.DBHost), log.F("host", config.Cfg.DBHost),
logger.F("database", config.Cfg.DBName)) log.F("database", config.Cfg.DBName))
dsn := config.Cfg.GetDSN() dsn := config.Cfg.GetDSN()
db, err := gorm.Open(postgres.Open(dsn), &gorm.Config{ db, err := gorm.Open(postgres.Open(dsn), &gorm.Config{
@ -43,9 +43,9 @@ func Connect() (*gorm.DB, error) {
sqlDB.SetMaxIdleConns(5) // Idle connections sqlDB.SetMaxIdleConns(5) // Idle connections
sqlDB.SetConnMaxLifetime(30 * time.Minute) sqlDB.SetConnMaxLifetime(30 * time.Minute)
logger.LogInfo("Successfully connected to database", log.LogInfo("Successfully connected to database",
logger.F("host", config.Cfg.DBHost), log.F("host", config.Cfg.DBHost),
logger.F("database", config.Cfg.DBName)) log.F("database", config.Cfg.DBName))
return db, nil return db, nil
} }

View File

@ -2,142 +2,142 @@ package db
import ( import (
"gorm.io/gorm" "gorm.io/gorm"
"tercul/logger" models2 "tercul/internal/models"
"tercul/models" "tercul/internal/platform/log"
) )
// RunMigrations runs all database migrations // RunMigrations runs all database migrations
func RunMigrations(db *gorm.DB) error { func RunMigrations(db *gorm.DB) error {
logger.LogInfo("Running database migrations") log.LogInfo("Running database migrations")
// First, create all tables using GORM AutoMigrate // First, create all tables using GORM AutoMigrate
if err := createTables(db); err != nil { if err := createTables(db); err != nil {
logger.LogError("Failed to create tables", logger.F("error", err)) log.LogError("Failed to create tables", log.F("error", err))
return err return err
} }
// Then add indexes to improve query performance // Then add indexes to improve query performance
if err := addIndexes(db); err != nil { if err := addIndexes(db); err != nil {
logger.LogError("Failed to add indexes", logger.F("error", err)) log.LogError("Failed to add indexes", log.F("error", err))
return err return err
} }
logger.LogInfo("Database migrations completed successfully") log.LogInfo("Database migrations completed successfully")
return nil return nil
} }
// createTables creates all database tables using GORM AutoMigrate // createTables creates all database tables using GORM AutoMigrate
func createTables(db *gorm.DB) error { func createTables(db *gorm.DB) error {
logger.LogInfo("Creating database tables") log.LogInfo("Creating database tables")
// Enable recommended extensions // Enable recommended extensions
if err := db.Exec("CREATE EXTENSION IF NOT EXISTS pg_trgm").Error; err != nil { if err := db.Exec("CREATE EXTENSION IF NOT EXISTS pg_trgm").Error; err != nil {
logger.LogError("Failed to enable pg_trgm extension", logger.F("error", err)) log.LogError("Failed to enable pg_trgm extension", log.F("error", err))
return err return err
} }
// Create all models/tables // Create all models/tables
err := db.AutoMigrate( err := db.AutoMigrate(
// User-related models // User-related models
&models.User{}, &models2.User{},
&models.UserProfile{}, &models2.UserProfile{},
&models.UserSession{}, &models2.UserSession{},
&models.PasswordReset{}, &models2.PasswordReset{},
&models.EmailVerification{}, &models2.EmailVerification{},
// Literary models // Literary models
&models.Work{}, &models2.Work{},
&models.Translation{}, &models2.Translation{},
&models.Author{}, &models2.Author{},
&models.Book{}, &models2.Book{},
&models.Publisher{}, &models2.Publisher{},
&models.Source{}, &models2.Source{},
&models.Edition{}, &models2.Edition{},
&models.Series{}, &models2.Series{},
&models.WorkSeries{}, &models2.WorkSeries{},
// Organization models // Organization models
&models.Tag{}, &models2.Tag{},
&models.Category{}, &models2.Category{},
// Interaction models // Interaction models
&models.Comment{}, &models2.Comment{},
&models.Like{}, &models2.Like{},
&models.Bookmark{}, &models2.Bookmark{},
&models.Collection{}, &models2.Collection{},
&models.Contribution{}, &models2.Contribution{},
&models.InteractionEvent{}, &models2.InteractionEvent{},
// Location models // Location models
&models.Country{}, &models2.Country{},
&models.City{}, &models2.City{},
&models.Place{}, &models2.Place{},
&models.Address{}, &models2.Address{},
&models.Language{}, &models2.Language{},
// Linguistic models // Linguistic models
&models.ReadabilityScore{}, &models2.ReadabilityScore{},
&models.WritingStyle{}, &models2.WritingStyle{},
&models.LinguisticLayer{}, &models2.LinguisticLayer{},
&models.TextMetadata{}, &models2.TextMetadata{},
&models.PoeticAnalysis{}, &models2.PoeticAnalysis{},
&models.Word{}, &models2.Word{},
&models.Concept{}, &models2.Concept{},
&models.LanguageEntity{}, &models2.LanguageEntity{},
&models.TextBlock{}, &models2.TextBlock{},
&models.WordOccurrence{}, &models2.WordOccurrence{},
&models.EntityOccurrence{}, &models2.EntityOccurrence{},
// Relationship models // Relationship models
&models.Edge{}, &models2.Edge{},
&models.Embedding{}, &models2.Embedding{},
&models.Media{}, &models2.Media{},
&models.BookWork{}, &models2.BookWork{},
&models.AuthorCountry{}, &models2.AuthorCountry{},
&models.WorkAuthor{}, &models2.WorkAuthor{},
&models.BookAuthor{}, &models2.BookAuthor{},
// System models // System models
&models.Notification{}, &models2.Notification{},
&models.EditorialWorkflow{}, &models2.EditorialWorkflow{},
&models.Admin{}, &models2.Admin{},
&models.Vote{}, &models2.Vote{},
&models.Contributor{}, &models2.Contributor{},
&models.HybridEntityWork{}, &models2.HybridEntityWork{},
&models.ModerationFlag{}, &models2.ModerationFlag{},
&models.AuditLog{}, &models2.AuditLog{},
// Rights models // Rights models
&models.Copyright{}, &models2.Copyright{},
&models.CopyrightClaim{}, &models2.CopyrightClaim{},
&models.Monetization{}, &models2.Monetization{},
&models.License{}, &models2.License{},
// Analytics models // Analytics models
&models.WorkStats{}, &models2.WorkStats{},
&models.TranslationStats{}, &models2.TranslationStats{},
&models.UserStats{}, &models2.UserStats{},
&models.BookStats{}, &models2.BookStats{},
&models.CollectionStats{}, &models2.CollectionStats{},
&models.MediaStats{}, &models2.MediaStats{},
// Metadata models // Metadata models
&models.LanguageAnalysis{}, &models2.LanguageAnalysis{},
&models.Gamification{}, &models2.Gamification{},
&models.Stats{}, &models2.Stats{},
&models.SearchDocument{}, &models2.SearchDocument{},
// Psychological models // Psychological models
&models.Emotion{}, &models2.Emotion{},
&models.Mood{}, &models2.Mood{},
&models.TopicCluster{}, &models2.TopicCluster{},
) )
if err != nil { if err != nil {
return err return err
} }
logger.LogInfo("Database tables created successfully") log.LogInfo("Database tables created successfully")
return nil return nil
} }
@ -326,6 +326,6 @@ func addIndexes(db *gorm.DB) error {
return err return err
} }
logger.LogInfo("Database indexes added successfully") log.LogInfo("Database indexes added successfully")
return nil return nil
} }

View File

@ -1,10 +1,10 @@
package middleware package http
import ( import (
"net/http" "net/http"
"sync" "sync"
"tercul/config" "tercul/internal/platform/config"
"tercul/logger" "tercul/internal/platform/log"
"time" "time"
) )
@ -85,9 +85,9 @@ func RateLimitMiddleware(next http.Handler) http.Handler {
// Check if request is allowed // Check if request is allowed
if !rateLimiter.Allow(clientID) { if !rateLimiter.Allow(clientID) {
logger.LogWarn("Rate limit exceeded", log.LogWarn("Rate limit exceeded",
logger.F("clientID", clientID), log.F("clientID", clientID),
logger.F("path", r.URL.Path)) log.F("path", r.URL.Path))
w.WriteHeader(http.StatusTooManyRequests) w.WriteHeader(http.StatusTooManyRequests)
w.Write([]byte("Rate limit exceeded. Please try again later.")) w.Write([]byte("Rate limit exceeded. Please try again later."))

View File

@ -1,4 +1,4 @@
package middleware_test package http_test
import ( import (
"net/http" "net/http"
@ -6,8 +6,8 @@ import (
"testing" "testing"
"time" "time"
"tercul/config" "tercul/internal/platform/config"
"tercul/middleware" platformhttp "tercul/internal/platform/http"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
@ -21,7 +21,7 @@ type RateLimiterSuite struct {
// TestRateLimiter tests the RateLimiter // TestRateLimiter tests the RateLimiter
func (s *RateLimiterSuite) TestRateLimiter() { func (s *RateLimiterSuite) TestRateLimiter() {
// Create a new rate limiter with 2 requests per second and a burst of 3 // Create a new rate limiter with 2 requests per second and a burst of 3
limiter := middleware.NewRateLimiter(2, 3) limiter := platformhttp.NewRateLimiter(2, 3)
// Test that the first 3 requests are allowed (burst) // Test that the first 3 requests are allowed (burst)
for i := 0; i < 3; i++ { for i := 0; i < 3; i++ {
@ -50,7 +50,7 @@ func (s *RateLimiterSuite) TestRateLimiter() {
// TestRateLimiterMultipleClients tests the RateLimiter with multiple clients // TestRateLimiterMultipleClients tests the RateLimiter with multiple clients
func (s *RateLimiterSuite) TestRateLimiterMultipleClients() { func (s *RateLimiterSuite) TestRateLimiterMultipleClients() {
// Create a new rate limiter with 2 requests per second and a burst of 3 // Create a new rate limiter with 2 requests per second and a burst of 3
limiter := middleware.NewRateLimiter(2, 3) limiter := platformhttp.NewRateLimiter(2, 3)
// Test that the first 3 requests for client1 are allowed (burst) // Test that the first 3 requests for client1 are allowed (burst)
for i := 0; i < 3; i++ { for i := 0; i < 3; i++ {
@ -85,7 +85,7 @@ func (s *RateLimiterSuite) TestRateLimiterMiddleware() {
}) })
// Create a rate limiter middleware with 2 requests per second and a burst of 3 // Create a rate limiter middleware with 2 requests per second and a burst of 3
middleware := middleware.RateLimitMiddleware(testHandler) middleware := platformhttp.RateLimitMiddleware(testHandler)
// Create a test server // Create a test server
server := httptest.NewServer(middleware) server := httptest.NewServer(middleware)
@ -144,22 +144,22 @@ func TestRateLimiterSuite(t *testing.T) {
// TestNewRateLimiter tests the NewRateLimiter function // TestNewRateLimiter tests the NewRateLimiter function
func TestNewRateLimiter(t *testing.T) { func TestNewRateLimiter(t *testing.T) {
// Test with valid parameters // Test with valid parameters
limiter := middleware.NewRateLimiter(10, 20) limiter := platformhttp.NewRateLimiter(10, 20)
assert.NotNil(t, limiter, "NewRateLimiter should return a non-nil limiter") assert.NotNil(t, limiter, "NewRateLimiter should return a non-nil limiter")
// Test with zero rate (should use default) // Test with zero rate (should use default)
limiter = middleware.NewRateLimiter(0, 20) limiter = platformhttp.NewRateLimiter(0, 20)
assert.NotNil(t, limiter, "NewRateLimiter should return a non-nil limiter with default rate") assert.NotNil(t, limiter, "NewRateLimiter should return a non-nil limiter with default rate")
// Test with zero capacity (should use default) // Test with zero capacity (should use default)
limiter = middleware.NewRateLimiter(10, 0) limiter = platformhttp.NewRateLimiter(10, 0)
assert.NotNil(t, limiter, "NewRateLimiter should return a non-nil limiter with default capacity") assert.NotNil(t, limiter, "NewRateLimiter should return a non-nil limiter with default capacity")
// Test with negative rate (should use default) // Test with negative rate (should use default)
limiter = middleware.NewRateLimiter(-10, 20) limiter = platformhttp.NewRateLimiter(-10, 20)
assert.NotNil(t, limiter, "NewRateLimiter should return a non-nil limiter with default rate") assert.NotNil(t, limiter, "NewRateLimiter should return a non-nil limiter with default rate")
// Test with negative capacity (should use default) // Test with negative capacity (should use default)
limiter = middleware.NewRateLimiter(10, -20) limiter = platformhttp.NewRateLimiter(10, -20)
assert.NotNil(t, limiter, "NewRateLimiter should return a non-nil limiter with default capacity") assert.NotNil(t, limiter, "NewRateLimiter should return a non-nil limiter with default capacity")
} }

View File

@ -1,4 +1,4 @@
package logger package log
import ( import (
"fmt" "fmt"

View File

@ -1,8 +1,8 @@
package weaviate package search
import ( import (
"context" "context"
"log" "fmt"
"github.com/weaviate/weaviate-go-client/v5/weaviate" "github.com/weaviate/weaviate-go-client/v5/weaviate"
"github.com/weaviate/weaviate/entities/models" "github.com/weaviate/weaviate/entities/models"
@ -520,9 +520,9 @@ func CreateSchema(client *weaviate.Client) {
for _, class := range classes { for _, class := range classes {
err := client.Schema().ClassCreator().WithClass(class).Do(context.Background()) err := client.Schema().ClassCreator().WithClass(class).Do(context.Background())
if err != nil { if err != nil {
log.Printf("Failed to create class %s: %v", class.Class, err) fmt.Printf("Failed to create class %s: %v", class.Class, err)
} }
} }
log.Println("Weaviate schema created successfully.") fmt.Println("Weaviate schema created successfully.")
} }

View File

@ -1,14 +1,13 @@
package weaviate package search
import ( import (
"context" "context"
"fmt" "fmt"
"log" "log"
"tercul/models" "tercul/internal/models"
"tercul/internal/platform/config"
"time" "time"
"tercul/config"
"github.com/weaviate/weaviate-go-client/v5/weaviate" "github.com/weaviate/weaviate-go-client/v5/weaviate"
) )

View File

@ -3,7 +3,7 @@ package repositories
import ( import (
"context" "context"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// AuthorRepository defines CRUD methods specific to Author. // AuthorRepository defines CRUD methods specific to Author.

View File

@ -7,8 +7,8 @@ import (
"time" "time"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/config" "tercul/internal/platform/config"
"tercul/logger" "tercul/internal/platform/log"
) )
// Common repository errors // Common repository errors
@ -198,14 +198,14 @@ func (r *BaseRepositoryImpl[T]) Create(ctx context.Context, entity *T) error {
duration := time.Since(start) duration := time.Since(start)
if err != nil { if err != nil {
logger.LogError("Failed to create entity", log.LogError("Failed to create entity",
logger.F("error", err), log.F("error", err),
logger.F("duration", duration)) log.F("duration", duration))
return fmt.Errorf("%w: %v", ErrDatabaseOperation, err) return fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
} }
logger.LogDebug("Entity created successfully", log.LogDebug("Entity created successfully",
logger.F("duration", duration)) log.F("duration", duration))
return nil return nil
} }
@ -226,14 +226,14 @@ func (r *BaseRepositoryImpl[T]) CreateInTx(ctx context.Context, tx *gorm.DB, ent
duration := time.Since(start) duration := time.Since(start)
if err != nil { if err != nil {
logger.LogError("Failed to create entity in transaction", log.LogError("Failed to create entity in transaction",
logger.F("error", err), log.F("error", err),
logger.F("duration", duration)) log.F("duration", duration))
return fmt.Errorf("%w: %v", ErrDatabaseOperation, err) return fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
} }
logger.LogDebug("Entity created successfully in transaction", log.LogDebug("Entity created successfully in transaction",
logger.F("duration", duration)) log.F("duration", duration))
return nil return nil
} }
@ -253,21 +253,21 @@ func (r *BaseRepositoryImpl[T]) GetByID(ctx context.Context, id uint) (*T, error
if err != nil { if err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) { if errors.Is(err, gorm.ErrRecordNotFound) {
logger.LogDebug("Entity not found", log.LogDebug("Entity not found",
logger.F("id", id), log.F("id", id),
logger.F("duration", duration)) log.F("duration", duration))
return nil, ErrEntityNotFound return nil, ErrEntityNotFound
} }
logger.LogError("Failed to get entity by ID", log.LogError("Failed to get entity by ID",
logger.F("id", id), log.F("id", id),
logger.F("error", err), log.F("error", err),
logger.F("duration", duration)) log.F("duration", duration))
return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err) return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
} }
logger.LogDebug("Entity retrieved successfully", log.LogDebug("Entity retrieved successfully",
logger.F("id", id), log.F("id", id),
logger.F("duration", duration)) log.F("duration", duration))
return &entity, nil return &entity, nil
} }
@ -288,21 +288,21 @@ func (r *BaseRepositoryImpl[T]) GetByIDWithOptions(ctx context.Context, id uint,
if err != nil { if err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) { if errors.Is(err, gorm.ErrRecordNotFound) {
logger.LogDebug("Entity not found with options", log.LogDebug("Entity not found with options",
logger.F("id", id), log.F("id", id),
logger.F("duration", duration)) log.F("duration", duration))
return nil, ErrEntityNotFound return nil, ErrEntityNotFound
} }
logger.LogError("Failed to get entity by ID with options", log.LogError("Failed to get entity by ID with options",
logger.F("id", id), log.F("id", id),
logger.F("error", err), log.F("error", err),
logger.F("duration", duration)) log.F("duration", duration))
return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err) return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
} }
logger.LogDebug("Entity retrieved successfully with options", log.LogDebug("Entity retrieved successfully with options",
logger.F("id", id), log.F("id", id),
logger.F("duration", duration)) log.F("duration", duration))
return &entity, nil return &entity, nil
} }
@ -320,14 +320,14 @@ func (r *BaseRepositoryImpl[T]) Update(ctx context.Context, entity *T) error {
duration := time.Since(start) duration := time.Since(start)
if err != nil { if err != nil {
logger.LogError("Failed to update entity", log.LogError("Failed to update entity",
logger.F("error", err), log.F("error", err),
logger.F("duration", duration)) log.F("duration", duration))
return fmt.Errorf("%w: %v", ErrDatabaseOperation, err) return fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
} }
logger.LogDebug("Entity updated successfully", log.LogDebug("Entity updated successfully",
logger.F("duration", duration)) log.F("duration", duration))
return nil return nil
} }
@ -348,14 +348,14 @@ func (r *BaseRepositoryImpl[T]) UpdateInTx(ctx context.Context, tx *gorm.DB, ent
duration := time.Since(start) duration := time.Since(start)
if err != nil { if err != nil {
logger.LogError("Failed to update entity in transaction", log.LogError("Failed to update entity in transaction",
logger.F("error", err), log.F("error", err),
logger.F("duration", duration)) log.F("duration", duration))
return fmt.Errorf("%w: %v", ErrDatabaseOperation, err) return fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
} }
logger.LogDebug("Entity updated successfully in transaction", log.LogDebug("Entity updated successfully in transaction",
logger.F("duration", duration)) log.F("duration", duration))
return nil return nil
} }
@ -374,24 +374,24 @@ func (r *BaseRepositoryImpl[T]) Delete(ctx context.Context, id uint) error {
duration := time.Since(start) duration := time.Since(start)
if result.Error != nil { if result.Error != nil {
logger.LogError("Failed to delete entity", log.LogError("Failed to delete entity",
logger.F("id", id), log.F("id", id),
logger.F("error", result.Error), log.F("error", result.Error),
logger.F("duration", duration)) log.F("duration", duration))
return fmt.Errorf("%w: %v", ErrDatabaseOperation, result.Error) return fmt.Errorf("%w: %v", ErrDatabaseOperation, result.Error)
} }
if result.RowsAffected == 0 { if result.RowsAffected == 0 {
logger.LogDebug("No entity found to delete", log.LogDebug("No entity found to delete",
logger.F("id", id), log.F("id", id),
logger.F("duration", duration)) log.F("duration", duration))
return ErrEntityNotFound return ErrEntityNotFound
} }
logger.LogDebug("Entity deleted successfully", log.LogDebug("Entity deleted successfully",
logger.F("id", id), log.F("id", id),
logger.F("rowsAffected", result.RowsAffected), log.F("rowsAffected", result.RowsAffected),
logger.F("duration", duration)) log.F("duration", duration))
return nil return nil
} }
@ -413,24 +413,24 @@ func (r *BaseRepositoryImpl[T]) DeleteInTx(ctx context.Context, tx *gorm.DB, id
duration := time.Since(start) duration := time.Since(start)
if result.Error != nil { if result.Error != nil {
logger.LogError("Failed to delete entity in transaction", log.LogError("Failed to delete entity in transaction",
logger.F("id", id), log.F("id", id),
logger.F("error", result.Error), log.F("error", result.Error),
logger.F("duration", duration)) log.F("duration", duration))
return fmt.Errorf("%w: %v", ErrDatabaseOperation, result.Error) return fmt.Errorf("%w: %v", ErrDatabaseOperation, result.Error)
} }
if result.RowsAffected == 0 { if result.RowsAffected == 0 {
logger.LogDebug("No entity found to delete in transaction", log.LogDebug("No entity found to delete in transaction",
logger.F("id", id), log.F("id", id),
logger.F("duration", duration)) log.F("duration", duration))
return ErrEntityNotFound return ErrEntityNotFound
} }
logger.LogDebug("Entity deleted successfully in transaction", log.LogDebug("Entity deleted successfully in transaction",
logger.F("id", id), log.F("id", id),
logger.F("rowsAffected", result.RowsAffected), log.F("rowsAffected", result.RowsAffected),
logger.F("duration", duration)) log.F("duration", duration))
return nil return nil
} }
@ -451,9 +451,9 @@ func (r *BaseRepositoryImpl[T]) List(ctx context.Context, page, pageSize int) (*
// Get total count // Get total count
if err := r.db.WithContext(ctx).Model(new(T)).Count(&totalCount).Error; err != nil { if err := r.db.WithContext(ctx).Model(new(T)).Count(&totalCount).Error; err != nil {
logger.LogError("Failed to count entities", log.LogError("Failed to count entities",
logger.F("error", err), log.F("error", err),
logger.F("duration", time.Since(start))) log.F("duration", time.Since(start)))
return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err) return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
} }
@ -462,11 +462,11 @@ func (r *BaseRepositoryImpl[T]) List(ctx context.Context, page, pageSize int) (*
// Get paginated data // Get paginated data
if err := r.db.WithContext(ctx).Offset(offset).Limit(pageSize).Find(&entities).Error; err != nil { if err := r.db.WithContext(ctx).Offset(offset).Limit(pageSize).Find(&entities).Error; err != nil {
logger.LogError("Failed to get paginated entities", log.LogError("Failed to get paginated entities",
logger.F("page", page), log.F("page", page),
logger.F("pageSize", pageSize), log.F("pageSize", pageSize),
logger.F("error", err), log.F("error", err),
logger.F("duration", time.Since(start))) log.F("duration", time.Since(start)))
return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err) return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
} }
@ -481,14 +481,14 @@ func (r *BaseRepositoryImpl[T]) List(ctx context.Context, page, pageSize int) (*
hasNext := page < totalPages hasNext := page < totalPages
hasPrev := page > 1 hasPrev := page > 1
logger.LogDebug("Paginated entities retrieved successfully", log.LogDebug("Paginated entities retrieved successfully",
logger.F("page", page), log.F("page", page),
logger.F("pageSize", pageSize), log.F("pageSize", pageSize),
logger.F("totalCount", totalCount), log.F("totalCount", totalCount),
logger.F("totalPages", totalPages), log.F("totalPages", totalPages),
logger.F("hasNext", hasNext), log.F("hasNext", hasNext),
logger.F("hasPrev", hasPrev), log.F("hasPrev", hasPrev),
logger.F("duration", duration)) log.F("duration", duration))
return &PaginatedResult[T]{ return &PaginatedResult[T]{
Items: entities, Items: entities,
@ -512,16 +512,16 @@ func (r *BaseRepositoryImpl[T]) ListWithOptions(ctx context.Context, options *Qu
query := r.buildQuery(r.db.WithContext(ctx), options) query := r.buildQuery(r.db.WithContext(ctx), options)
if err := query.Find(&entities).Error; err != nil { if err := query.Find(&entities).Error; err != nil {
logger.LogError("Failed to get entities with options", log.LogError("Failed to get entities with options",
logger.F("error", err), log.F("error", err),
logger.F("duration", time.Since(start))) log.F("duration", time.Since(start)))
return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err) return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
} }
duration := time.Since(start) duration := time.Since(start)
logger.LogDebug("Entities retrieved successfully with options", log.LogDebug("Entities retrieved successfully with options",
logger.F("count", len(entities)), log.F("count", len(entities)),
logger.F("duration", duration)) log.F("duration", duration))
return entities, nil return entities, nil
} }
@ -535,16 +535,16 @@ func (r *BaseRepositoryImpl[T]) ListAll(ctx context.Context) ([]T, error) {
start := time.Now() start := time.Now()
var entities []T var entities []T
if err := r.db.WithContext(ctx).Find(&entities).Error; err != nil { if err := r.db.WithContext(ctx).Find(&entities).Error; err != nil {
logger.LogError("Failed to get all entities", log.LogError("Failed to get all entities",
logger.F("error", err), log.F("error", err),
logger.F("duration", time.Since(start))) log.F("duration", time.Since(start)))
return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err) return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
} }
duration := time.Since(start) duration := time.Since(start)
logger.LogDebug("All entities retrieved successfully", log.LogDebug("All entities retrieved successfully",
logger.F("count", len(entities)), log.F("count", len(entities)),
logger.F("duration", duration)) log.F("duration", duration))
return entities, nil return entities, nil
} }
@ -558,16 +558,16 @@ func (r *BaseRepositoryImpl[T]) Count(ctx context.Context) (int64, error) {
start := time.Now() start := time.Now()
var count int64 var count int64
if err := r.db.WithContext(ctx).Model(new(T)).Count(&count).Error; err != nil { if err := r.db.WithContext(ctx).Model(new(T)).Count(&count).Error; err != nil {
logger.LogError("Failed to count entities", log.LogError("Failed to count entities",
logger.F("error", err), log.F("error", err),
logger.F("duration", time.Since(start))) log.F("duration", time.Since(start)))
return 0, fmt.Errorf("%w: %v", ErrDatabaseOperation, err) return 0, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
} }
duration := time.Since(start) duration := time.Since(start)
logger.LogDebug("Entity count retrieved successfully", log.LogDebug("Entity count retrieved successfully",
logger.F("count", count), log.F("count", count),
logger.F("duration", duration)) log.F("duration", duration))
return count, nil return count, nil
} }
@ -583,16 +583,16 @@ func (r *BaseRepositoryImpl[T]) CountWithOptions(ctx context.Context, options *Q
query := r.buildQuery(r.db.WithContext(ctx), options) query := r.buildQuery(r.db.WithContext(ctx), options)
if err := query.Model(new(T)).Count(&count).Error; err != nil { if err := query.Model(new(T)).Count(&count).Error; err != nil {
logger.LogError("Failed to count entities with options", log.LogError("Failed to count entities with options",
logger.F("error", err), log.F("error", err),
logger.F("duration", time.Since(start))) log.F("duration", time.Since(start)))
return 0, fmt.Errorf("%w: %v", ErrDatabaseOperation, err) return 0, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
} }
duration := time.Since(start) duration := time.Since(start)
logger.LogDebug("Entity count retrieved successfully with options", log.LogDebug("Entity count retrieved successfully with options",
logger.F("count", count), log.F("count", count),
logger.F("duration", duration)) log.F("duration", duration))
return count, nil return count, nil
} }
@ -616,25 +616,25 @@ func (r *BaseRepositoryImpl[T]) FindWithPreload(ctx context.Context, preloads []
if err := query.First(&entity, id).Error; err != nil { if err := query.First(&entity, id).Error; err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) { if errors.Is(err, gorm.ErrRecordNotFound) {
logger.LogDebug("Entity not found with preloads", log.LogDebug("Entity not found with preloads",
logger.F("id", id), log.F("id", id),
logger.F("preloads", preloads), log.F("preloads", preloads),
logger.F("duration", time.Since(start))) log.F("duration", time.Since(start)))
return nil, ErrEntityNotFound return nil, ErrEntityNotFound
} }
logger.LogError("Failed to get entity with preloads", log.LogError("Failed to get entity with preloads",
logger.F("id", id), log.F("id", id),
logger.F("preloads", preloads), log.F("preloads", preloads),
logger.F("error", err), log.F("error", err),
logger.F("duration", time.Since(start))) log.F("duration", time.Since(start)))
return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err) return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
} }
duration := time.Since(start) duration := time.Since(start)
logger.LogDebug("Entity retrieved successfully with preloads", log.LogDebug("Entity retrieved successfully with preloads",
logger.F("id", id), log.F("id", id),
logger.F("preloads", preloads), log.F("preloads", preloads),
logger.F("duration", duration)) log.F("duration", duration))
return &entity, nil return &entity, nil
} }
@ -659,20 +659,20 @@ func (r *BaseRepositoryImpl[T]) GetAllForSync(ctx context.Context, batchSize, of
start := time.Now() start := time.Now()
var entities []T var entities []T
if err := r.db.WithContext(ctx).Offset(offset).Limit(batchSize).Find(&entities).Error; err != nil { if err := r.db.WithContext(ctx).Offset(offset).Limit(batchSize).Find(&entities).Error; err != nil {
logger.LogError("Failed to get entities for sync", log.LogError("Failed to get entities for sync",
logger.F("batchSize", batchSize), log.F("batchSize", batchSize),
logger.F("offset", offset), log.F("offset", offset),
logger.F("error", err), log.F("error", err),
logger.F("duration", time.Since(start))) log.F("duration", time.Since(start)))
return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err) return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
} }
duration := time.Since(start) duration := time.Since(start)
logger.LogDebug("Entities retrieved successfully for sync", log.LogDebug("Entities retrieved successfully for sync",
logger.F("batchSize", batchSize), log.F("batchSize", batchSize),
logger.F("offset", offset), log.F("offset", offset),
logger.F("count", len(entities)), log.F("count", len(entities)),
logger.F("duration", duration)) log.F("duration", duration))
return entities, nil return entities, nil
} }
@ -689,20 +689,20 @@ func (r *BaseRepositoryImpl[T]) Exists(ctx context.Context, id uint) (bool, erro
start := time.Now() start := time.Now()
var count int64 var count int64
if err := r.db.WithContext(ctx).Model(new(T)).Where("id = ?", id).Count(&count).Error; err != nil { if err := r.db.WithContext(ctx).Model(new(T)).Where("id = ?", id).Count(&count).Error; err != nil {
logger.LogError("Failed to check entity existence", log.LogError("Failed to check entity existence",
logger.F("id", id), log.F("id", id),
logger.F("error", err), log.F("error", err),
logger.F("duration", time.Since(start))) log.F("duration", time.Since(start)))
return false, fmt.Errorf("%w: %v", ErrDatabaseOperation, err) return false, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
} }
duration := time.Since(start) duration := time.Since(start)
exists := count > 0 exists := count > 0
logger.LogDebug("Entity existence checked", log.LogDebug("Entity existence checked",
logger.F("id", id), log.F("id", id),
logger.F("exists", exists), log.F("exists", exists),
logger.F("duration", duration)) log.F("duration", duration))
return exists, nil return exists, nil
} }
@ -715,12 +715,12 @@ func (r *BaseRepositoryImpl[T]) BeginTx(ctx context.Context) (*gorm.DB, error) {
tx := r.db.WithContext(ctx).Begin() tx := r.db.WithContext(ctx).Begin()
if tx.Error != nil { if tx.Error != nil {
logger.LogError("Failed to begin transaction", log.LogError("Failed to begin transaction",
logger.F("error", tx.Error)) log.F("error", tx.Error))
return nil, fmt.Errorf("%w: %v", ErrTransactionFailed, tx.Error) return nil, fmt.Errorf("%w: %v", ErrTransactionFailed, tx.Error)
} }
logger.LogDebug("Transaction started successfully") log.LogDebug("Transaction started successfully")
return tx, nil return tx, nil
} }
@ -738,29 +738,29 @@ func (r *BaseRepositoryImpl[T]) WithTx(ctx context.Context, fn func(tx *gorm.DB)
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
tx.Rollback() tx.Rollback()
logger.LogError("Transaction panic recovered", log.LogError("Transaction panic recovered",
logger.F("panic", r)) log.F("panic", r))
} }
}() }()
if err := fn(tx); err != nil { if err := fn(tx); err != nil {
if rbErr := tx.Rollback().Error; rbErr != nil { if rbErr := tx.Rollback().Error; rbErr != nil {
logger.LogError("Failed to rollback transaction", log.LogError("Failed to rollback transaction",
logger.F("originalError", err), log.F("originalError", err),
logger.F("rollbackError", rbErr)) log.F("rollbackError", rbErr))
return fmt.Errorf("transaction failed and rollback failed: %v (rollback: %v)", err, rbErr) return fmt.Errorf("transaction failed and rollback failed: %v (rollback: %v)", err, rbErr)
} }
logger.LogDebug("Transaction rolled back due to error", log.LogDebug("Transaction rolled back due to error",
logger.F("error", err)) log.F("error", err))
return err return err
} }
if err := tx.Commit().Error; err != nil { if err := tx.Commit().Error; err != nil {
logger.LogError("Failed to commit transaction", log.LogError("Failed to commit transaction",
logger.F("error", err)) log.F("error", err))
return fmt.Errorf("%w: %v", ErrTransactionFailed, err) return fmt.Errorf("%w: %v", ErrTransactionFailed, err)
} }
logger.LogDebug("Transaction committed successfully") log.LogDebug("Transaction committed successfully")
return nil return nil
} }

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"errors" "errors"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// BookRepository defines CRUD methods specific to Book. // BookRepository defines CRUD methods specific to Book.

View File

@ -3,7 +3,7 @@ package repositories
import ( import (
"context" "context"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// BookmarkRepository defines CRUD methods specific to Bookmark. // BookmarkRepository defines CRUD methods specific to Bookmark.

View File

@ -6,9 +6,8 @@ import (
"time" "time"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/internal/platform/cache"
"tercul/cache" "tercul/internal/platform/log"
"tercul/logger"
) )
// simpleKeyGenerator implements the cache.KeyGenerator interface // simpleKeyGenerator implements the cache.KeyGenerator interface
@ -37,8 +36,8 @@ func (g *simpleKeyGenerator) QueryKey(entityType string, queryName string, param
// CachedRepository wraps a BaseRepository with caching functionality // CachedRepository wraps a BaseRepository with caching functionality
type CachedRepository[T any] struct { type CachedRepository[T any] struct {
repo BaseRepository[T] repo BaseRepository[T]
cache cache.Cache cache cache.Cache
keyGenerator cache.KeyGenerator keyGenerator cache.KeyGenerator
entityType string entityType string
cacheExpiry time.Duration cacheExpiry time.Duration
@ -93,9 +92,9 @@ func (r *CachedRepository[T]) Create(ctx context.Context, entity *T) error {
if r.cacheEnabled { if r.cacheEnabled {
if redisCache, ok := r.cache.(*cache.RedisCache); ok { if redisCache, ok := r.cache.(*cache.RedisCache); ok {
if err := redisCache.InvalidateEntityType(ctx, r.entityType); err != nil { if err := redisCache.InvalidateEntityType(ctx, r.entityType); err != nil {
logger.LogWarn("Failed to invalidate cache", log.LogWarn("Failed to invalidate cache",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("error", err)) log.F("error", err))
} }
} }
} }
@ -120,16 +119,16 @@ func (r *CachedRepository[T]) GetByID(ctx context.Context, id uint) (*T, error)
err := r.cache.Get(ctx, cacheKey, &entity) err := r.cache.Get(ctx, cacheKey, &entity)
if err == nil { if err == nil {
// Cache hit // Cache hit
logger.LogDebug("Cache hit", log.LogDebug("Cache hit",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("id", id)) log.F("id", id))
return &entity, nil return &entity, nil
} }
// Cache miss, get from database // Cache miss, get from database
logger.LogDebug("Cache miss", log.LogDebug("Cache miss",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("id", id)) log.F("id", id))
entity_ptr, err := r.repo.GetByID(ctx, id) entity_ptr, err := r.repo.GetByID(ctx, id)
if err != nil { if err != nil {
@ -138,10 +137,10 @@ func (r *CachedRepository[T]) GetByID(ctx context.Context, id uint) (*T, error)
// Store in cache // Store in cache
if err := r.cache.Set(ctx, cacheKey, entity_ptr, r.cacheExpiry); err != nil { if err := r.cache.Set(ctx, cacheKey, entity_ptr, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache entity", log.LogWarn("Failed to cache entity",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("id", id), log.F("id", id),
logger.F("error", err)) log.F("error", err))
} }
return entity_ptr, nil return entity_ptr, nil
@ -165,17 +164,17 @@ func (r *CachedRepository[T]) Update(ctx context.Context, entity *T) error {
// Invalidate specific entity cache // Invalidate specific entity cache
cacheKey := r.keyGenerator.EntityKey(r.entityType, 0) // We don't have ID here, so invalidate all cacheKey := r.keyGenerator.EntityKey(r.entityType, 0) // We don't have ID here, so invalidate all
if err := r.cache.Delete(ctx, cacheKey); err != nil { if err := r.cache.Delete(ctx, cacheKey); err != nil {
logger.LogWarn("Failed to invalidate entity cache", log.LogWarn("Failed to invalidate entity cache",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("error", err)) log.F("error", err))
} }
// Invalidate list caches // Invalidate list caches
if redisCache, ok := r.cache.(*cache.RedisCache); ok { if redisCache, ok := r.cache.(*cache.RedisCache); ok {
if err := redisCache.InvalidateEntityType(ctx, r.entityType); err != nil { if err := redisCache.InvalidateEntityType(ctx, r.entityType); err != nil {
logger.LogWarn("Failed to invalidate cache", log.LogWarn("Failed to invalidate cache",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("error", err)) log.F("error", err))
} }
} }
} }
@ -199,18 +198,18 @@ func (r *CachedRepository[T]) Delete(ctx context.Context, id uint) error {
if r.cacheEnabled { if r.cacheEnabled {
cacheKey := r.keyGenerator.EntityKey(r.entityType, id) cacheKey := r.keyGenerator.EntityKey(r.entityType, id)
if err := r.cache.Delete(ctx, cacheKey); err != nil { if err := r.cache.Delete(ctx, cacheKey); err != nil {
logger.LogWarn("Failed to invalidate entity cache", log.LogWarn("Failed to invalidate entity cache",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("id", id), log.F("id", id),
logger.F("error", err)) log.F("error", err))
} }
// Invalidate list caches // Invalidate list caches
if redisCache, ok := r.cache.(*cache.RedisCache); ok { if redisCache, ok := r.cache.(*cache.RedisCache); ok {
if err := redisCache.InvalidateEntityType(ctx, r.entityType); err != nil { if err := redisCache.InvalidateEntityType(ctx, r.entityType); err != nil {
logger.LogWarn("Failed to invalidate cache", log.LogWarn("Failed to invalidate cache",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("error", err)) log.F("error", err))
} }
} }
} }
@ -235,18 +234,18 @@ func (r *CachedRepository[T]) List(ctx context.Context, page, pageSize int) (*Pa
err := r.cache.Get(ctx, cacheKey, &result) err := r.cache.Get(ctx, cacheKey, &result)
if err == nil { if err == nil {
// Cache hit // Cache hit
logger.LogDebug("Cache hit for list", log.LogDebug("Cache hit for list",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("page", page), log.F("page", page),
logger.F("pageSize", pageSize)) log.F("pageSize", pageSize))
return &result, nil return &result, nil
} }
// Cache miss, get from database // Cache miss, get from database
logger.LogDebug("Cache miss for list", log.LogDebug("Cache miss for list",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("page", page), log.F("page", page),
logger.F("pageSize", pageSize)) log.F("pageSize", pageSize))
result_ptr, err := r.repo.List(ctx, page, pageSize) result_ptr, err := r.repo.List(ctx, page, pageSize)
if err != nil { if err != nil {
@ -255,11 +254,11 @@ func (r *CachedRepository[T]) List(ctx context.Context, page, pageSize int) (*Pa
// Store in cache // Store in cache
if err := r.cache.Set(ctx, cacheKey, result_ptr, r.cacheExpiry); err != nil { if err := r.cache.Set(ctx, cacheKey, result_ptr, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache list", log.LogWarn("Failed to cache list",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("page", page), log.F("page", page),
logger.F("pageSize", pageSize), log.F("pageSize", pageSize),
logger.F("error", err)) log.F("error", err))
} }
return result_ptr, nil return result_ptr, nil
@ -283,14 +282,14 @@ func (r *CachedRepository[T]) ListAll(ctx context.Context) ([]T, error) {
err := r.cache.Get(ctx, cacheKey, &entities) err := r.cache.Get(ctx, cacheKey, &entities)
if err == nil { if err == nil {
// Cache hit // Cache hit
logger.LogDebug("Cache hit for listAll", log.LogDebug("Cache hit for listAll",
logger.F("entityType", r.entityType)) log.F("entityType", r.entityType))
return entities, nil return entities, nil
} }
// Cache miss, get from database // Cache miss, get from database
logger.LogDebug("Cache miss for listAll", log.LogDebug("Cache miss for listAll",
logger.F("entityType", r.entityType)) log.F("entityType", r.entityType))
entities, err = r.repo.ListAll(ctx) entities, err = r.repo.ListAll(ctx)
if err != nil { if err != nil {
@ -299,9 +298,9 @@ func (r *CachedRepository[T]) ListAll(ctx context.Context) ([]T, error) {
// Store in cache // Store in cache
if err := r.cache.Set(ctx, cacheKey, entities, r.cacheExpiry); err != nil { if err := r.cache.Set(ctx, cacheKey, entities, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache listAll", log.LogWarn("Failed to cache listAll",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("error", err)) log.F("error", err))
} }
return entities, nil return entities, nil
@ -319,14 +318,14 @@ func (r *CachedRepository[T]) Count(ctx context.Context) (int64, error) {
err := r.cache.Get(ctx, cacheKey, &count) err := r.cache.Get(ctx, cacheKey, &count)
if err == nil { if err == nil {
// Cache hit // Cache hit
logger.LogDebug("Cache hit for count", log.LogDebug("Cache hit for count",
logger.F("entityType", r.entityType)) log.F("entityType", r.entityType))
return count, nil return count, nil
} }
// Cache miss, get from database // Cache miss, get from database
logger.LogDebug("Cache miss for count", log.LogDebug("Cache miss for count",
logger.F("entityType", r.entityType)) log.F("entityType", r.entityType))
count, err = r.repo.Count(ctx) count, err = r.repo.Count(ctx)
if err != nil { if err != nil {
@ -335,9 +334,9 @@ func (r *CachedRepository[T]) Count(ctx context.Context) (int64, error) {
// Store in cache // Store in cache
if err := r.cache.Set(ctx, cacheKey, count, r.cacheExpiry); err != nil { if err := r.cache.Set(ctx, cacheKey, count, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache count", log.LogWarn("Failed to cache count",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("error", err)) log.F("error", err))
} }
return count, nil return count, nil

View File

@ -3,16 +3,15 @@ package repositories_test
import ( import (
"context" "context"
"errors" "errors"
"tercul/internal/models"
repositories2 "tercul/internal/repositories"
"testing" "testing"
"time" "time"
"tercul/internal/testutil"
"tercul/models"
"tercul/repositories"
"github.com/stretchr/testify/mock" "github.com/stretchr/testify/mock"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/internal/testutil"
) )
// TestModel is a simple entity used for cached repository tests // TestModel is a simple entity used for cached repository tests
@ -77,7 +76,7 @@ func (m *MockRepository[T]) GetByID(ctx context.Context, id uint) (*T, error) {
return args.Get(0).(*T), args.Error(1) return args.Get(0).(*T), args.Error(1)
} }
func (m *MockRepository[T]) GetByIDWithOptions(ctx context.Context, id uint, options *repositories.QueryOptions) (*T, error) { func (m *MockRepository[T]) GetByIDWithOptions(ctx context.Context, id uint, options *repositories2.QueryOptions) (*T, error) {
return nil, nil return nil, nil
} }
@ -95,15 +94,15 @@ func (m *MockRepository[T]) Delete(ctx context.Context, id uint) error {
func (m *MockRepository[T]) DeleteInTx(ctx context.Context, tx *gorm.DB, id uint) error { return nil } func (m *MockRepository[T]) DeleteInTx(ctx context.Context, tx *gorm.DB, id uint) error { return nil }
func (m *MockRepository[T]) List(ctx context.Context, page, pageSize int) (*repositories.PaginatedResult[T], error) { func (m *MockRepository[T]) List(ctx context.Context, page, pageSize int) (*repositories2.PaginatedResult[T], error) {
args := m.Called(ctx, page, pageSize) args := m.Called(ctx, page, pageSize)
if args.Get(0) == nil { if args.Get(0) == nil {
return nil, args.Error(1) return nil, args.Error(1)
} }
return args.Get(0).(*repositories.PaginatedResult[T]), args.Error(1) return args.Get(0).(*repositories2.PaginatedResult[T]), args.Error(1)
} }
func (m *MockRepository[T]) ListWithOptions(ctx context.Context, options *repositories.QueryOptions) ([]T, error) { func (m *MockRepository[T]) ListWithOptions(ctx context.Context, options *repositories2.QueryOptions) ([]T, error) {
var z []T var z []T
return z, nil return z, nil
} }
@ -129,7 +128,7 @@ func (m *MockRepository[T]) Count(ctx context.Context) (int64, error) {
return args.Get(0).(int64), args.Error(1) return args.Get(0).(int64), args.Error(1)
} }
func (m *MockRepository[T]) CountWithOptions(ctx context.Context, options *repositories.QueryOptions) (int64, error) { func (m *MockRepository[T]) CountWithOptions(ctx context.Context, options *repositories2.QueryOptions) (int64, error) {
return 0, nil return 0, nil
} }
@ -141,8 +140,8 @@ func (m *MockRepository[T]) FindWithPreload(ctx context.Context, preloads []stri
return args.Get(0).(*T), args.Error(1) return args.Get(0).(*T), args.Error(1)
} }
func (m *MockRepository[T]) Exists(ctx context.Context, id uint) (bool, error) { return false, nil } func (m *MockRepository[T]) Exists(ctx context.Context, id uint) (bool, error) { return false, nil }
func (m *MockRepository[T]) BeginTx(ctx context.Context) (*gorm.DB, error) { return nil, nil } func (m *MockRepository[T]) BeginTx(ctx context.Context) (*gorm.DB, error) { return nil, nil }
func (m *MockRepository[T]) WithTx(ctx context.Context, fn func(tx *gorm.DB) error) error { return nil } func (m *MockRepository[T]) WithTx(ctx context.Context, fn func(tx *gorm.DB) error) error { return nil }
// CachedRepositorySuite is a test suite for the CachedRepository // CachedRepositorySuite is a test suite for the CachedRepository
@ -150,14 +149,14 @@ type CachedRepositorySuite struct {
testutil.BaseSuite testutil.BaseSuite
mockRepo *MockRepository[TestModel] mockRepo *MockRepository[TestModel]
mockCache *MockCache mockCache *MockCache
repo *repositories.CachedRepository[TestModel] repo *repositories2.CachedRepository[TestModel]
} }
// SetupTest sets up each test // SetupTest sets up each test
func (s *CachedRepositorySuite) SetupTest() { func (s *CachedRepositorySuite) SetupTest() {
s.mockRepo = new(MockRepository[TestModel]) s.mockRepo = new(MockRepository[TestModel])
s.mockCache = new(MockCache) s.mockCache = new(MockCache)
s.repo = repositories.NewCachedRepository[TestModel]( s.repo = repositories2.NewCachedRepository[TestModel](
s.mockRepo, s.mockRepo,
s.mockCache, s.mockCache,
nil, nil,
@ -279,20 +278,20 @@ func (s *CachedRepositorySuite) TestUpdate() {
} }
// Mock repository // Mock repository
s.mockRepo.On("Update", mock.Anything, model). s.mockRepo.On("Update", mock.Anything, model).
Return(nil) Return(nil)
// Execute // Execute
ctx := context.Background() ctx := context.Background()
// Expect cache delete during update invalidation // Expect cache delete during update invalidation
s.mockCache.On("Delete", mock.Anything, mock.Anything).Return(nil) s.mockCache.On("Delete", mock.Anything, mock.Anything).Return(nil)
err := s.repo.Update(ctx, model) err := s.repo.Update(ctx, model)
// Assert // Assert
s.Require().NoError(err) s.Require().NoError(err)
// Verify mocks // Verify mocks
s.mockRepo.AssertCalled(s.T(), "Update", mock.Anything, model) s.mockRepo.AssertCalled(s.T(), "Update", mock.Anything, model)
} }
// TestDelete tests the Delete method // TestDelete tests the Delete method
@ -300,9 +299,9 @@ func (s *CachedRepositorySuite) TestDelete() {
// Setup // Setup
id := uint(1) id := uint(1)
// Mock repository and cache delete // Mock repository and cache delete
s.mockRepo.On("Delete", mock.Anything, id).Return(nil) s.mockRepo.On("Delete", mock.Anything, id).Return(nil)
s.mockCache.On("Delete", mock.Anything, mock.Anything).Return(nil) s.mockCache.On("Delete", mock.Anything, mock.Anything).Return(nil)
// Execute // Execute
ctx := context.Background() ctx := context.Background()
@ -320,7 +319,7 @@ func (s *CachedRepositorySuite) TestListCacheHit() {
// Setup // Setup
page := 1 page := 1
pageSize := 10 pageSize := 10
expectedResult := &repositories.PaginatedResult[TestModel]{ expectedResult := &repositories2.PaginatedResult[TestModel]{
Items: []TestModel{ Items: []TestModel{
{ {
BaseModel: models.BaseModel{ BaseModel: models.BaseModel{
@ -347,7 +346,7 @@ func (s *CachedRepositorySuite) TestListCacheHit() {
s.mockCache.On("Get", mock.Anything, mock.Anything, mock.Anything). s.mockCache.On("Get", mock.Anything, mock.Anything, mock.Anything).
Run(func(args mock.Arguments) { Run(func(args mock.Arguments) {
// Set the value to simulate cache hit // Set the value to simulate cache hit
value := args.Get(2).(*repositories.PaginatedResult[TestModel]) value := args.Get(2).(*repositories2.PaginatedResult[TestModel])
*value = *expectedResult *value = *expectedResult
}). }).
Return(nil) Return(nil)
@ -375,7 +374,7 @@ func (s *CachedRepositorySuite) TestListCacheMiss() {
// Setup // Setup
page := 1 page := 1
pageSize := 10 pageSize := 10
expectedResult := &repositories.PaginatedResult[TestModel]{ expectedResult := &repositories2.PaginatedResult[TestModel]{
Items: []TestModel{ Items: []TestModel{
{ {
BaseModel: models.BaseModel{ BaseModel: models.BaseModel{

View File

@ -2,11 +2,11 @@ package repositories
import ( import (
"context" "context"
"tercul/internal/models"
"time" "time"
"tercul/cache" "tercul/internal/platform/cache"
"tercul/logger" "tercul/internal/platform/log"
"tercul/models"
) )
// CachedWorkRepository wraps a WorkRepository with caching functionality // CachedWorkRepository wraps a WorkRepository with caching functionality
@ -54,16 +54,16 @@ func (r *CachedWorkRepository) FindByTitle(ctx context.Context, title string) ([
err := r.cache.Get(ctx, cacheKey, &result) err := r.cache.Get(ctx, cacheKey, &result)
if err == nil { if err == nil {
// Cache hit // Cache hit
logger.LogDebug("Cache hit for FindByTitle", log.LogDebug("Cache hit for FindByTitle",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("title", title)) log.F("title", title))
return result, nil return result, nil
} }
// Cache miss, get from database // Cache miss, get from database
logger.LogDebug("Cache miss for FindByTitle", log.LogDebug("Cache miss for FindByTitle",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("title", title)) log.F("title", title))
result, err = r.workRepo.FindByTitle(ctx, title) result, err = r.workRepo.FindByTitle(ctx, title)
if err != nil { if err != nil {
@ -72,10 +72,10 @@ func (r *CachedWorkRepository) FindByTitle(ctx context.Context, title string) ([
// Store in cache // Store in cache
if err := r.cache.Set(ctx, cacheKey, result, r.cacheExpiry); err != nil { if err := r.cache.Set(ctx, cacheKey, result, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache FindByTitle result", log.LogWarn("Failed to cache FindByTitle result",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("title", title), log.F("title", title),
logger.F("error", err)) log.F("error", err))
} }
return result, nil return result, nil
@ -93,16 +93,16 @@ func (r *CachedWorkRepository) FindByAuthor(ctx context.Context, authorID uint)
err := r.cache.Get(ctx, cacheKey, &result) err := r.cache.Get(ctx, cacheKey, &result)
if err == nil { if err == nil {
// Cache hit // Cache hit
logger.LogDebug("Cache hit for FindByAuthor", log.LogDebug("Cache hit for FindByAuthor",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("authorID", authorID)) log.F("authorID", authorID))
return result, nil return result, nil
} }
// Cache miss, get from database // Cache miss, get from database
logger.LogDebug("Cache miss for FindByAuthor", log.LogDebug("Cache miss for FindByAuthor",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("authorID", authorID)) log.F("authorID", authorID))
result, err = r.workRepo.FindByAuthor(ctx, authorID) result, err = r.workRepo.FindByAuthor(ctx, authorID)
if err != nil { if err != nil {
@ -111,10 +111,10 @@ func (r *CachedWorkRepository) FindByAuthor(ctx context.Context, authorID uint)
// Store in cache // Store in cache
if err := r.cache.Set(ctx, cacheKey, result, r.cacheExpiry); err != nil { if err := r.cache.Set(ctx, cacheKey, result, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache FindByAuthor result", log.LogWarn("Failed to cache FindByAuthor result",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("authorID", authorID), log.F("authorID", authorID),
logger.F("error", err)) log.F("error", err))
} }
return result, nil return result, nil
@ -132,16 +132,16 @@ func (r *CachedWorkRepository) FindByCategory(ctx context.Context, categoryID ui
err := r.cache.Get(ctx, cacheKey, &result) err := r.cache.Get(ctx, cacheKey, &result)
if err == nil { if err == nil {
// Cache hit // Cache hit
logger.LogDebug("Cache hit for FindByCategory", log.LogDebug("Cache hit for FindByCategory",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("categoryID", categoryID)) log.F("categoryID", categoryID))
return result, nil return result, nil
} }
// Cache miss, get from database // Cache miss, get from database
logger.LogDebug("Cache miss for FindByCategory", log.LogDebug("Cache miss for FindByCategory",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("categoryID", categoryID)) log.F("categoryID", categoryID))
result, err = r.workRepo.FindByCategory(ctx, categoryID) result, err = r.workRepo.FindByCategory(ctx, categoryID)
if err != nil { if err != nil {
@ -150,10 +150,10 @@ func (r *CachedWorkRepository) FindByCategory(ctx context.Context, categoryID ui
// Store in cache // Store in cache
if err := r.cache.Set(ctx, cacheKey, result, r.cacheExpiry); err != nil { if err := r.cache.Set(ctx, cacheKey, result, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache FindByCategory result", log.LogWarn("Failed to cache FindByCategory result",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("categoryID", categoryID), log.F("categoryID", categoryID),
logger.F("error", err)) log.F("error", err))
} }
return result, nil return result, nil
@ -171,20 +171,20 @@ func (r *CachedWorkRepository) FindByLanguage(ctx context.Context, language stri
err := r.cache.Get(ctx, cacheKey, &result) err := r.cache.Get(ctx, cacheKey, &result)
if err == nil { if err == nil {
// Cache hit // Cache hit
logger.LogDebug("Cache hit for FindByLanguage", log.LogDebug("Cache hit for FindByLanguage",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("language", language), log.F("language", language),
logger.F("page", page), log.F("page", page),
logger.F("pageSize", pageSize)) log.F("pageSize", pageSize))
return &result, nil return &result, nil
} }
// Cache miss, get from database // Cache miss, get from database
logger.LogDebug("Cache miss for FindByLanguage", log.LogDebug("Cache miss for FindByLanguage",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("language", language), log.F("language", language),
logger.F("page", page), log.F("page", page),
logger.F("pageSize", pageSize)) log.F("pageSize", pageSize))
result_ptr, err := r.workRepo.FindByLanguage(ctx, language, page, pageSize) result_ptr, err := r.workRepo.FindByLanguage(ctx, language, page, pageSize)
if err != nil { if err != nil {
@ -193,12 +193,12 @@ func (r *CachedWorkRepository) FindByLanguage(ctx context.Context, language stri
// Store in cache // Store in cache
if err := r.cache.Set(ctx, cacheKey, result_ptr, r.cacheExpiry); err != nil { if err := r.cache.Set(ctx, cacheKey, result_ptr, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache FindByLanguage result", log.LogWarn("Failed to cache FindByLanguage result",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("language", language), log.F("language", language),
logger.F("page", page), log.F("page", page),
logger.F("pageSize", pageSize), log.F("pageSize", pageSize),
logger.F("error", err)) log.F("error", err))
} }
return result_ptr, nil return result_ptr, nil
@ -216,16 +216,16 @@ func (r *CachedWorkRepository) GetWithTranslations(ctx context.Context, id uint)
err := r.cache.Get(ctx, cacheKey, &result) err := r.cache.Get(ctx, cacheKey, &result)
if err == nil { if err == nil {
// Cache hit // Cache hit
logger.LogDebug("Cache hit for GetWithTranslations", log.LogDebug("Cache hit for GetWithTranslations",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("id", id)) log.F("id", id))
return &result, nil return &result, nil
} }
// Cache miss, get from database // Cache miss, get from database
logger.LogDebug("Cache miss for GetWithTranslations", log.LogDebug("Cache miss for GetWithTranslations",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("id", id)) log.F("id", id))
result_ptr, err := r.workRepo.GetWithTranslations(ctx, id) result_ptr, err := r.workRepo.GetWithTranslations(ctx, id)
if err != nil { if err != nil {
@ -234,10 +234,10 @@ func (r *CachedWorkRepository) GetWithTranslations(ctx context.Context, id uint)
// Store in cache // Store in cache
if err := r.cache.Set(ctx, cacheKey, result_ptr, r.cacheExpiry); err != nil { if err := r.cache.Set(ctx, cacheKey, result_ptr, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache GetWithTranslations result", log.LogWarn("Failed to cache GetWithTranslations result",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("id", id), log.F("id", id),
logger.F("error", err)) log.F("error", err))
} }
return result_ptr, nil return result_ptr, nil
@ -255,18 +255,18 @@ func (r *CachedWorkRepository) ListWithTranslations(ctx context.Context, page, p
err := r.cache.Get(ctx, cacheKey, &result) err := r.cache.Get(ctx, cacheKey, &result)
if err == nil { if err == nil {
// Cache hit // Cache hit
logger.LogDebug("Cache hit for ListWithTranslations", log.LogDebug("Cache hit for ListWithTranslations",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("page", page), log.F("page", page),
logger.F("pageSize", pageSize)) log.F("pageSize", pageSize))
return &result, nil return &result, nil
} }
// Cache miss, get from database // Cache miss, get from database
logger.LogDebug("Cache miss for ListWithTranslations", log.LogDebug("Cache miss for ListWithTranslations",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("page", page), log.F("page", page),
logger.F("pageSize", pageSize)) log.F("pageSize", pageSize))
result_ptr, err := r.workRepo.ListWithTranslations(ctx, page, pageSize) result_ptr, err := r.workRepo.ListWithTranslations(ctx, page, pageSize)
if err != nil { if err != nil {
@ -275,11 +275,11 @@ func (r *CachedWorkRepository) ListWithTranslations(ctx context.Context, page, p
// Store in cache // Store in cache
if err := r.cache.Set(ctx, cacheKey, result_ptr, r.cacheExpiry); err != nil { if err := r.cache.Set(ctx, cacheKey, result_ptr, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache ListWithTranslations result", log.LogWarn("Failed to cache ListWithTranslations result",
logger.F("entityType", r.entityType), log.F("entityType", r.entityType),
logger.F("page", page), log.F("page", page),
logger.F("pageSize", pageSize), log.F("pageSize", pageSize),
logger.F("error", err)) log.F("error", err))
} }
return result_ptr, nil return result_ptr, nil

View File

@ -4,14 +4,13 @@ import (
"context" "context"
"encoding/json" "encoding/json"
"errors" "errors"
models2 "tercul/internal/models"
repositories2 "tercul/internal/repositories"
"testing" "testing"
"time" "time"
"tercul/internal/testutil"
"tercul/models"
"tercul/repositories"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
"tercul/internal/testutil"
) )
// ErrCacheMiss is returned when a key is not found in the cache // ErrCacheMiss is returned when a key is not found in the cache
@ -84,19 +83,19 @@ func (c *testCache) SetMulti(ctx context.Context, items map[string]interface{},
// MockWorkRepository for testing // MockWorkRepository for testing
type MockWorkRepository struct { type MockWorkRepository struct {
works []*models.Work works []*models2.Work
} }
func NewMockWorkRepository() *MockWorkRepository { func NewMockWorkRepository() *MockWorkRepository {
return &MockWorkRepository{works: []*models.Work{}} return &MockWorkRepository{works: []*models2.Work{}}
} }
func (m *MockWorkRepository) AddWork(work *models.Work) { func (m *MockWorkRepository) AddWork(work *models2.Work) {
work.ID = uint(len(m.works) + 1) work.ID = uint(len(m.works) + 1)
m.works = append(m.works, work) m.works = append(m.works, work)
} }
func (m *MockWorkRepository) GetByID(id uint) (*models.Work, error) { func (m *MockWorkRepository) GetByID(id uint) (*models2.Work, error) {
for _, w := range m.works { for _, w := range m.works {
if w.ID == id { if w.ID == id {
return w, nil return w, nil
@ -105,8 +104,8 @@ func (m *MockWorkRepository) GetByID(id uint) (*models.Work, error) {
return nil, errors.New("not found") return nil, errors.New("not found")
} }
func (m *MockWorkRepository) FindByTitle(title string) ([]*models.Work, error) { func (m *MockWorkRepository) FindByTitle(title string) ([]*models2.Work, error) {
var result []*models.Work var result []*models2.Work
for _, w := range m.works { for _, w := range m.works {
if len(title) == 0 || (len(w.Title) >= len(title) && w.Title[:len(title)] == title) { if len(title) == 0 || (len(w.Title) >= len(title) && w.Title[:len(title)] == title) {
result = append(result, w) result = append(result, w)
@ -115,8 +114,8 @@ func (m *MockWorkRepository) FindByTitle(title string) ([]*models.Work, error) {
return result, nil return result, nil
} }
func (m *MockWorkRepository) FindByLanguage(language string, page, pageSize int) (*repositories.PaginatedResult[*models.Work], error) { func (m *MockWorkRepository) FindByLanguage(language string, page, pageSize int) (*repositories2.PaginatedResult[*models2.Work], error) {
var filtered []*models.Work var filtered []*models2.Work
for _, w := range m.works { for _, w := range m.works {
if w.Language == language { if w.Language == language {
filtered = append(filtered, w) filtered = append(filtered, w)
@ -126,12 +125,12 @@ func (m *MockWorkRepository) FindByLanguage(language string, page, pageSize int)
start := (page - 1) * pageSize start := (page - 1) * pageSize
end := start + pageSize end := start + pageSize
if start > len(filtered) { if start > len(filtered) {
return &repositories.PaginatedResult[*models.Work]{Items: []*models.Work{}, TotalCount: total}, nil return &repositories2.PaginatedResult[*models2.Work]{Items: []*models2.Work{}, TotalCount: total}, nil
} }
if end > len(filtered) { if end > len(filtered) {
end = len(filtered) end = len(filtered)
} }
return &repositories.PaginatedResult[*models.Work]{Items: filtered[start:end], TotalCount: total}, nil return &repositories2.PaginatedResult[*models2.Work]{Items: filtered[start:end], TotalCount: total}, nil
} }
func (m *MockWorkRepository) Count() (int64, error) { func (m *MockWorkRepository) Count() (int64, error) {
@ -145,7 +144,7 @@ type CachedWorkRepositorySuite struct {
suite.Suite suite.Suite
baseRepo *testutil.UnifiedMockWorkRepository baseRepo *testutil.UnifiedMockWorkRepository
cache *testCache cache *testCache
repo *repositories.CachedWorkRepository repo *repositories2.CachedWorkRepository
} }
func (s *CachedWorkRepositorySuite) SetupSuite() { func (s *CachedWorkRepositorySuite) SetupSuite() {
@ -156,7 +155,7 @@ func (s *CachedWorkRepositorySuite) SetupTest() {
s.baseRepo = testutil.NewUnifiedMockWorkRepository() s.baseRepo = testutil.NewUnifiedMockWorkRepository()
s.cache = &testCache{data: make(map[string][]byte)} s.cache = &testCache{data: make(map[string][]byte)}
s.repo = repositories.NewCachedWorkRepository( s.repo = repositories2.NewCachedWorkRepository(
s.baseRepo, s.baseRepo,
s.cache, s.cache,
nil, nil,
@ -165,9 +164,9 @@ func (s *CachedWorkRepositorySuite) SetupTest() {
} }
// createTestWork creates a test work and adds it to the mock repo // createTestWork creates a test work and adds it to the mock repo
func (s *CachedWorkRepositorySuite) createTestWork(title, language string) *models.Work { func (s *CachedWorkRepositorySuite) createTestWork(title, language string) *models2.Work {
work := &models.Work{ work := &models2.Work{
TranslatableModel: models.TranslatableModel{BaseModel: models.BaseModel{ID: 0}, Language: language}, TranslatableModel: models2.TranslatableModel{BaseModel: models2.BaseModel{ID: 0}, Language: language},
Title: title, Title: title,
Description: "Test description", Description: "Test description",
Status: "published", Status: "published",
@ -180,13 +179,13 @@ func (s *CachedWorkRepositorySuite) createTestWork(title, language string) *mode
func (s *CachedWorkRepositorySuite) TestGetByID() { func (s *CachedWorkRepositorySuite) TestGetByID() {
work := s.createTestWork("Test Work", "en") work := s.createTestWork("Test Work", "en")
result1, err := s.repo.GetByID(context.Background(), work.ID) result1, err := s.repo.GetByID(context.Background(), work.ID)
s.Require().NoError(err) s.Require().NoError(err)
s.Require().NotNil(result1) s.Require().NotNil(result1)
s.Equal(work.ID, result1.ID) s.Equal(work.ID, result1.ID)
s.Equal(work.Title, result1.Title) s.Equal(work.Title, result1.Title)
result2, err := s.repo.GetByID(context.Background(), work.ID) result2, err := s.repo.GetByID(context.Background(), work.ID)
s.Require().NoError(err) s.Require().NoError(err)
s.Require().NotNil(result2) s.Require().NotNil(result2)
s.Equal(work.ID, result2.ID) s.Equal(work.ID, result2.ID)
@ -202,11 +201,11 @@ func (s *CachedWorkRepositorySuite) TestFindByTitle() {
work2 := s.createTestWork("Test Work 2", "en") work2 := s.createTestWork("Test Work 2", "en")
_ = s.createTestWork("Another Work", "en") _ = s.createTestWork("Another Work", "en")
works1, err := s.repo.FindByTitle(context.Background(), "Test") works1, err := s.repo.FindByTitle(context.Background(), "Test")
s.Require().NoError(err) s.Require().NoError(err)
s.Require().Len(works1, 2) s.Require().Len(works1, 2)
works2, err := s.repo.FindByTitle(context.Background(), "Test") works2, err := s.repo.FindByTitle(context.Background(), "Test")
s.Require().NoError(err) s.Require().NoError(err)
s.Require().Len(works2, 2) s.Require().Len(works2, 2)
@ -232,13 +231,13 @@ func (s *CachedWorkRepositorySuite) TestFindByLanguage() {
s.createTestWork("Work 4", "fr") s.createTestWork("Work 4", "fr")
s.createTestWork("Work 5", "es") s.createTestWork("Work 5", "es")
result1, err := s.repo.FindByLanguage(context.Background(), "en", 1, 10) result1, err := s.repo.FindByLanguage(context.Background(), "en", 1, 10)
s.Require().NoError(err) s.Require().NoError(err)
s.Require().NotNil(result1) s.Require().NotNil(result1)
s.Equal(int64(2), result1.TotalCount) s.Equal(int64(2), result1.TotalCount)
s.Equal(2, len(result1.Items)) s.Equal(2, len(result1.Items))
result2, err := s.repo.FindByLanguage(context.Background(), "en", 1, 10) result2, err := s.repo.FindByLanguage(context.Background(), "en", 1, 10)
s.Require().NoError(err) s.Require().NoError(err)
s.Require().NotNil(result2) s.Require().NotNil(result2)
s.Equal(int64(2), result2.TotalCount) s.Equal(int64(2), result2.TotalCount)

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"errors" "errors"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// CategoryRepository defines CRUD methods specific to Category. // CategoryRepository defines CRUD methods specific to Category.

View File

@ -3,7 +3,7 @@ package repositories
import ( import (
"context" "context"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// CityRepository defines CRUD methods specific to City. // CityRepository defines CRUD methods specific to City.

View File

@ -3,7 +3,7 @@ package repositories
import ( import (
"context" "context"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// CollectionRepository defines CRUD methods specific to Collection. // CollectionRepository defines CRUD methods specific to Collection.

View File

@ -3,7 +3,7 @@ package repositories
import ( import (
"context" "context"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// CommentRepository defines CRUD methods specific to Comment. // CommentRepository defines CRUD methods specific to Comment.

View File

@ -3,7 +3,7 @@ package repositories
import ( import (
"context" "context"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// ContributionRepository defines CRUD methods specific to Contribution. // ContributionRepository defines CRUD methods specific to Contribution.

View File

@ -3,7 +3,7 @@ package repositories
import ( import (
"context" "context"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// CopyrightClaimRepository defines CRUD methods specific to CopyrightClaim. // CopyrightClaimRepository defines CRUD methods specific to CopyrightClaim.

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"errors" "errors"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// CopyrightRepository defines CRUD methods specific to Copyright. // CopyrightRepository defines CRUD methods specific to Copyright.

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"errors" "errors"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// CountryRepository defines CRUD methods specific to Country. // CountryRepository defines CRUD methods specific to Country.

View File

@ -3,7 +3,7 @@ package repositories
import ( import (
"context" "context"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// EdgeRepository defines CRUD operations for the polymorphic edge table. // EdgeRepository defines CRUD operations for the polymorphic edge table.

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"errors" "errors"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// EditionRepository defines CRUD methods specific to Edition. // EditionRepository defines CRUD methods specific to Edition.

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"errors" "errors"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
"time" "time"
) )

View File

@ -3,7 +3,7 @@ package repositories
import ( import (
"context" "context"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// LikeRepository defines CRUD methods specific to Like. // LikeRepository defines CRUD methods specific to Like.

View File

@ -3,7 +3,7 @@ package repositories
import ( import (
"context" "context"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// MonetizationRepository defines CRUD methods specific to Monetization. // MonetizationRepository defines CRUD methods specific to Monetization.

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"errors" "errors"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
"time" "time"
) )

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"gorm.io/gorm" "gorm.io/gorm"
"math" "math"
"tercul/models" "tercul/internal/models"
) )
// PlaceRepository defines CRUD methods specific to Place. // PlaceRepository defines CRUD methods specific to Place.

View File

@ -3,7 +3,7 @@ package repositories
import ( import (
"context" "context"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// PublisherRepository defines CRUD methods specific to Publisher. // PublisherRepository defines CRUD methods specific to Publisher.

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"errors" "errors"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// SourceRepository defines CRUD methods specific to Source. // SourceRepository defines CRUD methods specific to Source.

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"errors" "errors"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// TagRepository defines CRUD methods specific to Tag. // TagRepository defines CRUD methods specific to Tag.

View File

@ -3,34 +3,34 @@ package repositories
import ( import (
"context" "context"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" models2 "tercul/internal/models"
) )
// TranslationRepository defines CRUD methods specific to Translation. // TranslationRepository defines CRUD methods specific to Translation.
type TranslationRepository interface { type TranslationRepository interface {
BaseRepository[models.Translation] BaseRepository[models2.Translation]
ListByWorkID(ctx context.Context, workID uint) ([]models.Translation, error) ListByWorkID(ctx context.Context, workID uint) ([]models2.Translation, error)
ListByEntity(ctx context.Context, entityType string, entityID uint) ([]models.Translation, error) ListByEntity(ctx context.Context, entityType string, entityID uint) ([]models2.Translation, error)
ListByTranslatorID(ctx context.Context, translatorID uint) ([]models.Translation, error) ListByTranslatorID(ctx context.Context, translatorID uint) ([]models2.Translation, error)
ListByStatus(ctx context.Context, status models.TranslationStatus) ([]models.Translation, error) ListByStatus(ctx context.Context, status models2.TranslationStatus) ([]models2.Translation, error)
} }
type translationRepository struct { type translationRepository struct {
BaseRepository[models.Translation] BaseRepository[models2.Translation]
db *gorm.DB db *gorm.DB
} }
// NewTranslationRepository creates a new TranslationRepository. // NewTranslationRepository creates a new TranslationRepository.
func NewTranslationRepository(db *gorm.DB) TranslationRepository { func NewTranslationRepository(db *gorm.DB) TranslationRepository {
return &translationRepository{ return &translationRepository{
BaseRepository: NewBaseRepositoryImpl[models.Translation](db), BaseRepository: NewBaseRepositoryImpl[models2.Translation](db),
db: db, db: db,
} }
} }
// ListByWorkID finds translations by work ID // ListByWorkID finds translations by work ID
func (r *translationRepository) ListByWorkID(ctx context.Context, workID uint) ([]models.Translation, error) { func (r *translationRepository) ListByWorkID(ctx context.Context, workID uint) ([]models2.Translation, error) {
var translations []models.Translation var translations []models2.Translation
if err := r.db.WithContext(ctx).Where("translatable_id = ? AND translatable_type = ?", workID, "Work").Find(&translations).Error; err != nil { if err := r.db.WithContext(ctx).Where("translatable_id = ? AND translatable_type = ?", workID, "Work").Find(&translations).Error; err != nil {
return nil, err return nil, err
} }
@ -38,8 +38,8 @@ func (r *translationRepository) ListByWorkID(ctx context.Context, workID uint) (
} }
// ListByEntity finds translations by entity type and ID // ListByEntity finds translations by entity type and ID
func (r *translationRepository) ListByEntity(ctx context.Context, entityType string, entityID uint) ([]models.Translation, error) { func (r *translationRepository) ListByEntity(ctx context.Context, entityType string, entityID uint) ([]models2.Translation, error) {
var translations []models.Translation var translations []models2.Translation
if err := r.db.WithContext(ctx).Where("translatable_id = ? AND translatable_type = ?", entityID, entityType).Find(&translations).Error; err != nil { if err := r.db.WithContext(ctx).Where("translatable_id = ? AND translatable_type = ?", entityID, entityType).Find(&translations).Error; err != nil {
return nil, err return nil, err
} }
@ -47,8 +47,8 @@ func (r *translationRepository) ListByEntity(ctx context.Context, entityType str
} }
// ListByTranslatorID finds translations by translator ID // ListByTranslatorID finds translations by translator ID
func (r *translationRepository) ListByTranslatorID(ctx context.Context, translatorID uint) ([]models.Translation, error) { func (r *translationRepository) ListByTranslatorID(ctx context.Context, translatorID uint) ([]models2.Translation, error) {
var translations []models.Translation var translations []models2.Translation
if err := r.db.WithContext(ctx).Where("translator_id = ?", translatorID).Find(&translations).Error; err != nil { if err := r.db.WithContext(ctx).Where("translator_id = ?", translatorID).Find(&translations).Error; err != nil {
return nil, err return nil, err
} }
@ -56,8 +56,8 @@ func (r *translationRepository) ListByTranslatorID(ctx context.Context, translat
} }
// ListByStatus finds translations by status // ListByStatus finds translations by status
func (r *translationRepository) ListByStatus(ctx context.Context, status models.TranslationStatus) ([]models.Translation, error) { func (r *translationRepository) ListByStatus(ctx context.Context, status models2.TranslationStatus) ([]models2.Translation, error) {
var translations []models.Translation var translations []models2.Translation
if err := r.db.WithContext(ctx).Where("status = ?", status).Find(&translations).Error; err != nil { if err := r.db.WithContext(ctx).Where("status = ?", status).Find(&translations).Error; err != nil {
return nil, err return nil, err
} }

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"errors" "errors"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// UserProfileRepository defines CRUD methods specific to UserProfile. // UserProfileRepository defines CRUD methods specific to UserProfile.

View File

@ -4,33 +4,33 @@ import (
"context" "context"
"errors" "errors"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" models2 "tercul/internal/models"
) )
// UserRepository defines CRUD methods specific to User. // UserRepository defines CRUD methods specific to User.
type UserRepository interface { type UserRepository interface {
BaseRepository[models.User] BaseRepository[models2.User]
FindByUsername(ctx context.Context, username string) (*models.User, error) FindByUsername(ctx context.Context, username string) (*models2.User, error)
FindByEmail(ctx context.Context, email string) (*models.User, error) FindByEmail(ctx context.Context, email string) (*models2.User, error)
ListByRole(ctx context.Context, role models.UserRole) ([]models.User, error) ListByRole(ctx context.Context, role models2.UserRole) ([]models2.User, error)
} }
type userRepository struct { type userRepository struct {
BaseRepository[models.User] BaseRepository[models2.User]
db *gorm.DB db *gorm.DB
} }
// NewUserRepository creates a new UserRepository. // NewUserRepository creates a new UserRepository.
func NewUserRepository(db *gorm.DB) UserRepository { func NewUserRepository(db *gorm.DB) UserRepository {
return &userRepository{ return &userRepository{
BaseRepository: NewBaseRepositoryImpl[models.User](db), BaseRepository: NewBaseRepositoryImpl[models2.User](db),
db: db, db: db,
} }
} }
// FindByUsername finds a user by username // FindByUsername finds a user by username
func (r *userRepository) FindByUsername(ctx context.Context, username string) (*models.User, error) { func (r *userRepository) FindByUsername(ctx context.Context, username string) (*models2.User, error) {
var user models.User var user models2.User
if err := r.db.WithContext(ctx).Where("username = ?", username).First(&user).Error; err != nil { if err := r.db.WithContext(ctx).Where("username = ?", username).First(&user).Error; err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) { if errors.Is(err, gorm.ErrRecordNotFound) {
return nil, ErrEntityNotFound return nil, ErrEntityNotFound
@ -41,8 +41,8 @@ func (r *userRepository) FindByUsername(ctx context.Context, username string) (*
} }
// FindByEmail finds a user by email // FindByEmail finds a user by email
func (r *userRepository) FindByEmail(ctx context.Context, email string) (*models.User, error) { func (r *userRepository) FindByEmail(ctx context.Context, email string) (*models2.User, error) {
var user models.User var user models2.User
if err := r.db.WithContext(ctx).Where("email = ?", email).First(&user).Error; err != nil { if err := r.db.WithContext(ctx).Where("email = ?", email).First(&user).Error; err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) { if errors.Is(err, gorm.ErrRecordNotFound) {
return nil, ErrEntityNotFound return nil, ErrEntityNotFound
@ -53,8 +53,8 @@ func (r *userRepository) FindByEmail(ctx context.Context, email string) (*models
} }
// ListByRole lists users by role // ListByRole lists users by role
func (r *userRepository) ListByRole(ctx context.Context, role models.UserRole) ([]models.User, error) { func (r *userRepository) ListByRole(ctx context.Context, role models2.UserRole) ([]models2.User, error) {
var users []models.User var users []models2.User
if err := r.db.WithContext(ctx).Where("role = ?", role).Find(&users).Error; err != nil { if err := r.db.WithContext(ctx).Where("role = ?", role).Find(&users).Error; err != nil {
return nil, err return nil, err
} }

View File

@ -4,7 +4,7 @@ import (
"context" "context"
"errors" "errors"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
"time" "time"
) )

View File

@ -3,7 +3,7 @@ package repositories
import ( import (
"context" "context"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
) )
// WorkRepository defines methods specific to Work. // WorkRepository defines methods specific to Work.

View File

@ -1,12 +1,12 @@
package repositories_test package repositories_test
import ( import (
"context" "context"
"tercul/internal/testutil" models2 "tercul/internal/models"
"tercul/models" "tercul/internal/testutil"
"testing" "testing"
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
) )
// WorkRepositorySuite is a test suite for the MockWorkRepository // WorkRepositorySuite is a test suite for the MockWorkRepository
@ -20,31 +20,31 @@ func (s *WorkRepositorySuite) SetupTest() {
} }
func (s *WorkRepositorySuite) TestCreate() { func (s *WorkRepositorySuite) TestCreate() {
work := &models.Work{Title: "Test Work"} work := &models2.Work{Title: "Test Work"}
err := s.repo.Create(context.Background(), work) err := s.repo.Create(context.Background(), work)
s.Require().NoError(err) s.Require().NoError(err)
} }
func (s *WorkRepositorySuite) TestGetByID() { func (s *WorkRepositorySuite) TestGetByID() {
work := &models.Work{Title: "Test Work"} work := &models2.Work{Title: "Test Work"}
s.repo.Create(context.Background(), work) s.repo.Create(context.Background(), work)
got, err := s.repo.GetByID(context.Background(), work.ID) got, err := s.repo.GetByID(context.Background(), work.ID)
s.Require().NoError(err) s.Require().NoError(err)
s.Require().Equal(work.ID, got.ID) s.Require().Equal(work.ID, got.ID)
} }
func (s *WorkRepositorySuite) TestFindByTitle() { func (s *WorkRepositorySuite) TestFindByTitle() {
work := &models.Work{Title: "Test"} work := &models2.Work{Title: "Test"}
s.repo.Create(context.Background(), work) s.repo.Create(context.Background(), work)
works, err := s.repo.FindByTitle(context.Background(), "Test") works, err := s.repo.FindByTitle(context.Background(), "Test")
s.Require().NoError(err) s.Require().NoError(err)
s.Require().Len(works, 1) s.Require().Len(works, 1)
} }
func (s *WorkRepositorySuite) TestFindByLanguage() { func (s *WorkRepositorySuite) TestFindByLanguage() {
work := &models.Work{TranslatableModel: models.TranslatableModel{Language: "en"}, Title: "Test"} work := &models2.Work{TranslatableModel: models2.TranslatableModel{Language: "en"}, Title: "Test"}
s.repo.Create(context.Background(), work) s.repo.Create(context.Background(), work)
result, err := s.repo.FindByLanguage(context.Background(), "en", 1, 10) result, err := s.repo.FindByLanguage(context.Background(), "en", 1, 10)
s.Require().NoError(err) s.Require().NoError(err)
s.Require().Len(result.Items, 1) s.Require().Len(result.Items, 1)
} }

View File

@ -3,7 +3,7 @@ package store
import ( import (
"gorm.io/gorm" "gorm.io/gorm"
"strings" "strings"
"tercul/models" models2 "tercul/internal/models"
) )
// DB represents a database connection // DB represents a database connection
@ -24,14 +24,14 @@ func ListPendingWorks(db *DB) []Work {
var works []Work var works []Work
// Query for works that haven't been enriched yet // Query for works that haven't been enriched yet
var modelWorks []models.Work var modelWorks []models2.Work
db.Where("id NOT IN (SELECT work_id FROM language_analyses)").Find(&modelWorks) db.Where("id NOT IN (SELECT work_id FROM language_analyses)").Find(&modelWorks)
// Convert to store.Work // Convert to store.Work
for _, work := range modelWorks { for _, work := range modelWorks {
// Prefer original language translation; fallback to work language; then any // Prefer original language translation; fallback to work language; then any
var content string var content string
var t models.Translation var t models2.Translation
// Try original // Try original
if err := db.Where("translatable_type = ? AND translatable_id = ? AND is_original_language = ?", "Work", work.ID, true). if err := db.Where("translatable_type = ? AND translatable_id = ? AND is_original_language = ?", "Work", work.ID, true).
First(&t).Error; err == nil { First(&t).Error; err == nil {
@ -61,7 +61,7 @@ func ListPendingWorks(db *DB) []Work {
// UpsertWord creates or updates a word in the database // UpsertWord creates or updates a word in the database
func UpsertWord(db *DB, workID uint, text, lemma, pos, phonetic string) error { func UpsertWord(db *DB, workID uint, text, lemma, pos, phonetic string) error {
// Check if the word already exists // Check if the word already exists
var word models.Word var word models2.Word
result := db.Where("text = ? AND language = ?", text, "auto").First(&word) result := db.Where("text = ? AND language = ?", text, "auto").First(&word)
if result.Error != nil && result.Error != gorm.ErrRecordNotFound { if result.Error != nil && result.Error != gorm.ErrRecordNotFound {
@ -71,7 +71,7 @@ func UpsertWord(db *DB, workID uint, text, lemma, pos, phonetic string) error {
// Create or update the word // Create or update the word
if result.Error == gorm.ErrRecordNotFound { if result.Error == gorm.ErrRecordNotFound {
// Create new word // Create new word
word = models.Word{ word = models2.Word{
Text: text, Text: text,
Language: "auto", // This would be set to the detected language Language: "auto", // This would be set to the detected language
PartOfSpeech: pos, PartOfSpeech: pos,
@ -101,7 +101,7 @@ func SaveKeywords(db *DB, workID uint, keywords []string) error {
} }
// Create a topic cluster for the keywords // Create a topic cluster for the keywords
cluster := models.TopicCluster{ cluster := models2.TopicCluster{
Name: "Auto-generated", Name: "Auto-generated",
Description: "Automatically generated keywords", Description: "Automatically generated keywords",
Keywords: strings.Join(keywords, ", "), Keywords: strings.Join(keywords, ", "),
@ -117,7 +117,7 @@ func SaveKeywords(db *DB, workID uint, keywords []string) error {
// SavePoetics saves poetic analysis for a work // SavePoetics saves poetic analysis for a work
func SavePoetics(db *DB, workID uint, metrics PoeticMetrics) error { func SavePoetics(db *DB, workID uint, metrics PoeticMetrics) error {
poetics := models.PoeticAnalysis{ poetics := models2.PoeticAnalysis{
WorkID: workID, WorkID: workID,
Language: "auto", // This would be set to the detected language Language: "auto", // This would be set to the detected language
RhymeScheme: metrics.RhymeScheme, RhymeScheme: metrics.RhymeScheme,
@ -133,10 +133,10 @@ func SavePoetics(db *DB, workID uint, metrics PoeticMetrics) error {
// MarkEnriched marks a work as enriched with the detected language // MarkEnriched marks a work as enriched with the detected language
func MarkEnriched(db *DB, workID uint, language string) error { func MarkEnriched(db *DB, workID uint, language string) error {
// Create a language analysis record to mark the work as processed // Create a language analysis record to mark the work as processed
analysis := models.LanguageAnalysis{ analysis := models2.LanguageAnalysis{
WorkID: workID, WorkID: workID,
Language: language, Language: language,
Analysis: models.JSONB{ Analysis: models2.JSONB{
"enriched": true, "enriched": true,
"language": language, "language": language,
}, },

View File

@ -0,0 +1,348 @@
package testutil
import (
"context"
"fmt"
"log"
"os"
"path/filepath"
"testing"
"time"
"github.com/stretchr/testify/suite"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"gorm.io/gorm/logger"
"tercul/internal/models"
"tercul/internal/repositories"
"tercul/services"
"tercul/graph"
)
// IntegrationTestSuite provides a comprehensive test environment with either in-memory SQLite or mock repositories
type IntegrationTestSuite struct {
suite.Suite
DB *gorm.DB
WorkRepo repositories.WorkRepository
UserRepo repositories.UserRepository
AuthorRepo repositories.AuthorRepository
TranslationRepo repositories.TranslationRepository
CommentRepo repositories.CommentRepository
LikeRepo repositories.LikeRepository
BookmarkRepo repositories.BookmarkRepository
CollectionRepo repositories.CollectionRepository
TagRepo repositories.TagRepository
CategoryRepo repositories.CategoryRepository
// Services
WorkService services.WorkService
Localization services.LocalizationService
AuthService services.AuthService
// Test data
TestWorks []*models.Work
TestUsers []*models.User
TestAuthors []*models.Author
TestTranslations []*models.Translation
}
// TestConfig holds configuration for the test environment
type TestConfig struct {
UseInMemoryDB bool // If true, use SQLite in-memory, otherwise use mock repositories
DBPath string // Path for SQLite file (only used if UseInMemoryDB is false)
LogLevel logger.LogLevel
}
// DefaultTestConfig returns a default test configuration
func DefaultTestConfig() *TestConfig {
return &TestConfig{
UseInMemoryDB: true,
DBPath: "",
LogLevel: logger.Silent,
}
}
// SetupSuite sets up the test suite with the specified configuration
func (s *IntegrationTestSuite) SetupSuite(config *TestConfig) {
if config == nil {
config = DefaultTestConfig()
}
if config.UseInMemoryDB {
s.setupInMemoryDB(config)
} else {
s.setupMockRepositories()
}
s.setupServices()
s.setupTestData()
}
// setupInMemoryDB sets up an in-memory SQLite database for testing
func (s *IntegrationTestSuite) setupInMemoryDB(config *TestConfig) {
var dbPath string
if config.DBPath != "" {
// Ensure directory exists
dir := filepath.Dir(config.DBPath)
if err := os.MkdirAll(dir, 0755); err != nil {
s.T().Fatalf("Failed to create database directory: %v", err)
}
dbPath = config.DBPath
} else {
// Use in-memory database
dbPath = ":memory:"
}
// Custom logger for tests
newLogger := logger.New(
log.New(os.Stdout, "\r\n", log.LstdFlags),
logger.Config{
SlowThreshold: time.Second,
LogLevel: config.LogLevel,
IgnoreRecordNotFoundError: true,
Colorful: false,
},
)
db, err := gorm.Open(sqlite.Open(dbPath), &gorm.Config{
Logger: newLogger,
})
if err != nil {
s.T().Fatalf("Failed to connect to test database: %v", err)
}
s.DB = db
// Run migrations
if err := db.AutoMigrate(
&models.Work{},
&models.User{},
&models.Author{},
&models.Translation{},
&models.Comment{},
&models.Like{},
&models.Bookmark{},
&models.Collection{},
&models.Tag{},
&models.Category{},
&models.Country{},
&models.City{},
&models.Place{},
&models.Address{},
&models.Copyright{},
&models.CopyrightClaim{},
&models.Monetization{},
&models.Book{},
&models.Publisher{},
&models.Source{},
// &models.WorkAnalytics{}, // Commented out as it's not in models package
&models.ReadabilityScore{},
&models.WritingStyle{},
&models.Emotion{},
&models.TopicCluster{},
&models.Mood{},
&models.Concept{},
&models.LinguisticLayer{},
&models.WorkStats{},
&models.TextMetadata{},
&models.PoeticAnalysis{},
&models.TranslationField{},
); err != nil {
s.T().Fatalf("Failed to run migrations: %v", err)
}
// Create repository instances
s.WorkRepo = repositories.NewWorkRepository(db)
s.UserRepo = repositories.NewUserRepository(db)
s.AuthorRepo = repositories.NewAuthorRepository(db)
s.TranslationRepo = repositories.NewTranslationRepository(db)
s.CommentRepo = repositories.NewCommentRepository(db)
s.LikeRepo = repositories.NewLikeRepository(db)
s.BookmarkRepo = repositories.NewBookmarkRepository(db)
s.CollectionRepo = repositories.NewCollectionRepository(db)
s.TagRepo = repositories.NewTagRepository(db)
s.CategoryRepo = repositories.NewCategoryRepository(db)
}
// setupMockRepositories sets up mock repositories for testing
func (s *IntegrationTestSuite) setupMockRepositories() {
s.WorkRepo = NewUnifiedMockWorkRepository()
// Temporarily comment out problematic repositories until we fix the interface implementations
// s.UserRepo = NewMockUserRepository()
// s.AuthorRepo = NewMockAuthorRepository()
// s.TranslationRepo = NewMockTranslationRepository()
// s.CommentRepo = NewMockCommentRepository()
// s.LikeRepo = NewMockLikeRepository()
// s.BookmarkRepo = NewMockBookmarkRepository()
// s.CollectionRepo = NewMockCollectionRepository()
// s.TagRepo = NewMockTagRepository()
// s.CategoryRepo = NewMockCategoryRepository()
}
// setupServices sets up service instances
func (s *IntegrationTestSuite) setupServices() {
s.WorkService = services.NewWorkService(s.WorkRepo, nil)
// Temporarily comment out services that depend on problematic repositories
// s.Localization = services.NewLocalizationService(s.TranslationRepo)
// s.AuthService = services.NewAuthService(s.UserRepo, "test-secret-key")
}
// setupTestData creates initial test data
func (s *IntegrationTestSuite) setupTestData() {
// Create test users
s.TestUsers = []*models.User{
{Username: "testuser1", Email: "test1@example.com", FirstName: "Test", LastName: "User1"},
{Username: "testuser2", Email: "test2@example.com", FirstName: "Test", LastName: "User2"},
}
for _, user := range s.TestUsers {
if err := s.UserRepo.Create(context.Background(), user); err != nil {
s.T().Logf("Warning: Failed to create test user: %v", err)
}
}
// Create test authors
s.TestAuthors = []*models.Author{
{Name: "Test Author 1", Language: "en"},
{Name: "Test Author 2", Language: "fr"},
}
for _, author := range s.TestAuthors {
if err := s.AuthorRepo.Create(context.Background(), author); err != nil {
s.T().Logf("Warning: Failed to create test author: %v", err)
}
}
// Create test works
s.TestWorks = []*models.Work{
{Title: "Test Work 1", Language: "en"},
{Title: "Test Work 2", Language: "en"},
{Title: "Test Work 3", Language: "fr"},
}
for _, work := range s.TestWorks {
if err := s.WorkRepo.Create(context.Background(), work); err != nil {
s.T().Logf("Warning: Failed to create test work: %v", err)
}
}
// Create test translations
s.TestTranslations = []*models.Translation{
{
Title: "Test Work 1",
Content: "Test content for work 1",
Language: "en",
TranslatableID: s.TestWorks[0].ID,
TranslatableType: "Work",
IsOriginalLanguage: true,
},
{
Title: "Test Work 2",
Content: "Test content for work 2",
Language: "en",
TranslatableID: s.TestWorks[1].ID,
TranslatableType: "Work",
IsOriginalLanguage: true,
},
{
Title: "Test Work 3",
Content: "Test content for work 3",
Language: "fr",
TranslatableID: s.TestWorks[2].ID,
TranslatableType: "Work",
IsOriginalLanguage: true,
},
}
for _, translation := range s.TestTranslations {
if err := s.TranslationRepo.Create(context.Background(), translation); err != nil {
s.T().Logf("Warning: Failed to create test translation: %v", err)
}
}
}
// TearDownSuite cleans up the test suite
func (s *IntegrationTestSuite) TearDownSuite() {
if s.DB != nil {
sqlDB, err := s.DB.DB()
if err == nil {
sqlDB.Close()
}
}
}
// SetupTest resets test data for each test
func (s *IntegrationTestSuite) SetupTest() {
if s.DB != nil {
// Reset database for each test
s.DB.Exec("DELETE FROM translations")
s.DB.Exec("DELETE FROM works")
s.DB.Exec("DELETE FROM authors")
s.DB.Exec("DELETE FROM users")
s.setupTestData()
} else {
// Reset mock repositories
if mockRepo, ok := s.WorkRepo.(*UnifiedMockWorkRepository); ok {
mockRepo.Reset()
}
// Add similar reset logic for other mock repositories
}
}
// GetResolver returns a properly configured GraphQL resolver for testing
func (s *IntegrationTestSuite) GetResolver() *graph.Resolver {
return &graph.Resolver{
WorkRepo: s.WorkRepo,
UserRepo: s.UserRepo,
AuthorRepo: s.AuthorRepo,
TranslationRepo: s.TranslationRepo,
CommentRepo: s.CommentRepo,
LikeRepo: s.LikeRepo,
BookmarkRepo: s.BookmarkRepo,
CollectionRepo: s.CollectionRepo,
TagRepo: s.TagRepo,
CategoryRepo: s.CategoryRepo,
WorkService: s.WorkService,
Localization: s.Localization,
AuthService: s.AuthService,
}
}
// CreateTestWork creates a test work with optional content
func (s *IntegrationTestSuite) CreateTestWork(title, language string, content string) *models.Work {
work := &models.Work{
Title: title,
}
work.Language = language
if err := s.WorkRepo.Create(context.Background(), work); err != nil {
s.T().Fatalf("Failed to create test work: %v", err)
}
if content != "" {
translation := &models.Translation{
Title: title,
Content: content,
Language: language,
TranslatableID: work.ID,
TranslatableType: "Work",
IsOriginalLanguage: true,
}
if err := s.TranslationRepo.Create(context.Background(), translation); err != nil {
s.T().Logf("Warning: Failed to create test translation: %v", err)
}
}
return work
}
// CleanupTestData removes all test data
func (s *IntegrationTestSuite) CleanupTestData() {
if s.DB != nil {
s.DB.Exec("DELETE FROM translations")
s.DB.Exec("DELETE FROM works")
s.DB.Exec("DELETE FROM authors")
s.DB.Exec("DELETE FROM users")
}
}

View File

@ -0,0 +1,72 @@
package testutil
import (
"context"
"errors"
"fmt"
"tercul/internal/repositories"
"gorm.io/gorm"
)
// MockBaseRepository provides common mock implementations for BaseRepository methods
type MockBaseRepository[T any] struct {
// This is a helper struct that can be embedded in mock repositories
// to provide common mock implementations
}
// BeginTx starts a new transaction (mock implementation)
func (m *MockBaseRepository[T]) BeginTx(ctx context.Context) (*gorm.DB, error) {
return nil, fmt.Errorf("transactions not supported in mock repository")
}
// WithTx executes a function within a transaction (mock implementation)
func (m *MockBaseRepository[T]) WithTx(ctx context.Context, fn func(tx *gorm.DB) error) error {
return fmt.Errorf("transactions not supported in mock repository")
}
// CreateInTx creates an entity within a transaction (mock implementation)
func (m *MockBaseRepository[T]) CreateInTx(ctx context.Context, tx *gorm.DB, entity *T) error {
return fmt.Errorf("CreateInTx not implemented in mock repository")
}
// UpdateInTx updates an entity within a transaction (mock implementation)
func (m *MockBaseRepository[T]) UpdateInTx(ctx context.Context, tx *gorm.DB, entity *T) error {
return fmt.Errorf("UpdateInTx not implemented in mock repository")
}
// DeleteInTx removes an entity by its ID within a transaction (mock implementation)
func (m *MockBaseRepository[T]) DeleteInTx(ctx context.Context, tx *gorm.DB, id uint) error {
return fmt.Errorf("DeleteInTx not implemented in mock repository")
}
// GetByIDWithOptions retrieves an entity by its ID with query options (mock implementation)
func (m *MockBaseRepository[T]) GetByIDWithOptions(ctx context.Context, id uint, options *repositories.QueryOptions) (*T, error) {
return nil, fmt.Errorf("GetByIDWithOptions not implemented in mock repository")
}
// ListWithOptions returns entities with query options (mock implementation)
func (m *MockBaseRepository[T]) ListWithOptions(ctx context.Context, options *repositories.QueryOptions) ([]T, error) {
return nil, fmt.Errorf("ListWithOptions not implemented in mock repository")
}
// CountWithOptions returns the count with query options (mock implementation)
func (m *MockBaseRepository[T]) CountWithOptions(ctx context.Context, options *repositories.QueryOptions) (int64, error) {
return 0, fmt.Errorf("CountWithOptions not implemented in mock repository")
}
// Exists checks if an entity exists by ID (mock implementation)
func (m *MockBaseRepository[T]) Exists(ctx context.Context, id uint) (bool, error) {
return false, fmt.Errorf("Exists not implemented in mock repository")
}
// GetAllForSync returns entities in batches for synchronization (mock implementation)
func (m *MockBaseRepository[T]) GetAllForSync(ctx context.Context, batchSize, offset int) ([]T, error) {
return nil, fmt.Errorf("GetAllForSync not implemented in mock repository")
}
// AddMockBaseRepositoryMethods adds all the missing BaseRepository methods to a mock repository
// This is a helper function to avoid duplicating code
func AddMockBaseRepositoryMethods[T any](repo interface{}) {
// This function would use reflection to add methods, but for now
// we'll implement them manually in each repository
}

View File

@ -4,23 +4,23 @@ import (
"context" "context"
"errors" "errors"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" models2 "tercul/internal/models"
"tercul/repositories" repositories2 "tercul/internal/repositories"
) )
// MockTranslationRepository is an in-memory implementation of TranslationRepository // MockTranslationRepository is an in-memory implementation of TranslationRepository
type MockTranslationRepository struct { type MockTranslationRepository struct {
items []models.Translation items []models2.Translation
} }
func NewMockTranslationRepository() *MockTranslationRepository { func NewMockTranslationRepository() *MockTranslationRepository {
return &MockTranslationRepository{items: []models.Translation{}} return &MockTranslationRepository{items: []models2.Translation{}}
} }
var _ repositories.TranslationRepository = (*MockTranslationRepository)(nil) var _ repositories2.TranslationRepository = (*MockTranslationRepository)(nil)
// BaseRepository methods with context support // BaseRepository methods with context support
func (m *MockTranslationRepository) Create(ctx context.Context, t *models.Translation) error { func (m *MockTranslationRepository) Create(ctx context.Context, t *models2.Translation) error {
if t == nil { if t == nil {
return errors.New("nil translation") return errors.New("nil translation")
} }
@ -29,24 +29,24 @@ func (m *MockTranslationRepository) Create(ctx context.Context, t *models.Transl
return nil return nil
} }
func (m *MockTranslationRepository) GetByID(ctx context.Context, id uint) (*models.Translation, error) { func (m *MockTranslationRepository) GetByID(ctx context.Context, id uint) (*models2.Translation, error) {
for i := range m.items { for i := range m.items {
if m.items[i].ID == id { if m.items[i].ID == id {
cp := m.items[i] cp := m.items[i]
return &cp, nil return &cp, nil
} }
} }
return nil, repositories.ErrEntityNotFound return nil, repositories2.ErrEntityNotFound
} }
func (m *MockTranslationRepository) Update(ctx context.Context, t *models.Translation) error { func (m *MockTranslationRepository) Update(ctx context.Context, t *models2.Translation) error {
for i := range m.items { for i := range m.items {
if m.items[i].ID == t.ID { if m.items[i].ID == t.ID {
m.items[i] = *t m.items[i] = *t
return nil return nil
} }
} }
return repositories.ErrEntityNotFound return repositories2.ErrEntityNotFound
} }
func (m *MockTranslationRepository) Delete(ctx context.Context, id uint) error { func (m *MockTranslationRepository) Delete(ctx context.Context, id uint) error {
@ -56,57 +56,57 @@ func (m *MockTranslationRepository) Delete(ctx context.Context, id uint) error {
return nil return nil
} }
} }
return repositories.ErrEntityNotFound return repositories2.ErrEntityNotFound
} }
func (m *MockTranslationRepository) List(ctx context.Context, page, pageSize int) (*repositories.PaginatedResult[models.Translation], error) { func (m *MockTranslationRepository) List(ctx context.Context, page, pageSize int) (*repositories2.PaginatedResult[models2.Translation], error) {
all := append([]models.Translation(nil), m.items...) all := append([]models2.Translation(nil), m.items...)
total := int64(len(all)) total := int64(len(all))
start := (page - 1) * pageSize start := (page - 1) * pageSize
end := start + pageSize end := start + pageSize
if start > len(all) { if start > len(all) {
return &repositories.PaginatedResult[models.Translation]{Items: []models.Translation{}, TotalCount: total}, nil return &repositories2.PaginatedResult[models2.Translation]{Items: []models2.Translation{}, TotalCount: total}, nil
} }
if end > len(all) { if end > len(all) {
end = len(all) end = len(all)
} }
return &repositories.PaginatedResult[models.Translation]{Items: all[start:end], TotalCount: total}, nil return &repositories2.PaginatedResult[models2.Translation]{Items: all[start:end], TotalCount: total}, nil
} }
func (m *MockTranslationRepository) ListAll(ctx context.Context) ([]models.Translation, error) { func (m *MockTranslationRepository) ListAll(ctx context.Context) ([]models2.Translation, error) {
return append([]models.Translation(nil), m.items...), nil return append([]models2.Translation(nil), m.items...), nil
} }
func (m *MockTranslationRepository) Count(ctx context.Context) (int64, error) { func (m *MockTranslationRepository) Count(ctx context.Context) (int64, error) {
return int64(len(m.items)), nil return int64(len(m.items)), nil
} }
func (m *MockTranslationRepository) FindWithPreload(ctx context.Context, preloads []string, id uint) (*models.Translation, error) { func (m *MockTranslationRepository) FindWithPreload(ctx context.Context, preloads []string, id uint) (*models2.Translation, error) {
return m.GetByID(ctx, id) return m.GetByID(ctx, id)
} }
func (m *MockTranslationRepository) GetAllForSync(ctx context.Context, batchSize, offset int) ([]models.Translation, error) { func (m *MockTranslationRepository) GetAllForSync(ctx context.Context, batchSize, offset int) ([]models2.Translation, error) {
all := append([]models.Translation(nil), m.items...) all := append([]models2.Translation(nil), m.items...)
end := offset + batchSize end := offset + batchSize
if end > len(all) { if end > len(all) {
end = len(all) end = len(all)
} }
if offset > len(all) { if offset > len(all) {
return []models.Translation{}, nil return []models2.Translation{}, nil
} }
return all[offset:end], nil return all[offset:end], nil
} }
// New BaseRepository methods // New BaseRepository methods
func (m *MockTranslationRepository) CreateInTx(ctx context.Context, tx *gorm.DB, entity *models.Translation) error { func (m *MockTranslationRepository) CreateInTx(ctx context.Context, tx *gorm.DB, entity *models2.Translation) error {
return m.Create(ctx, entity) return m.Create(ctx, entity)
} }
func (m *MockTranslationRepository) GetByIDWithOptions(ctx context.Context, id uint, options *repositories.QueryOptions) (*models.Translation, error) { func (m *MockTranslationRepository) GetByIDWithOptions(ctx context.Context, id uint, options *repositories2.QueryOptions) (*models2.Translation, error) {
return m.GetByID(ctx, id) return m.GetByID(ctx, id)
} }
func (m *MockTranslationRepository) UpdateInTx(ctx context.Context, tx *gorm.DB, entity *models.Translation) error { func (m *MockTranslationRepository) UpdateInTx(ctx context.Context, tx *gorm.DB, entity *models2.Translation) error {
return m.Update(ctx, entity) return m.Update(ctx, entity)
} }
@ -114,7 +114,7 @@ func (m *MockTranslationRepository) DeleteInTx(ctx context.Context, tx *gorm.DB,
return m.Delete(ctx, id) return m.Delete(ctx, id)
} }
func (m *MockTranslationRepository) ListWithOptions(ctx context.Context, options *repositories.QueryOptions) ([]models.Translation, error) { func (m *MockTranslationRepository) ListWithOptions(ctx context.Context, options *repositories2.QueryOptions) ([]models2.Translation, error) {
result, err := m.List(ctx, 1, 1000) result, err := m.List(ctx, 1, 1000)
if err != nil { if err != nil {
return nil, err return nil, err
@ -122,7 +122,7 @@ func (m *MockTranslationRepository) ListWithOptions(ctx context.Context, options
return result.Items, nil return result.Items, nil
} }
func (m *MockTranslationRepository) CountWithOptions(ctx context.Context, options *repositories.QueryOptions) (int64, error) { func (m *MockTranslationRepository) CountWithOptions(ctx context.Context, options *repositories2.QueryOptions) (int64, error) {
return m.Count(ctx) return m.Count(ctx)
} }
@ -140,12 +140,12 @@ func (m *MockTranslationRepository) WithTx(ctx context.Context, fn func(tx *gorm
} }
// TranslationRepository specific methods // TranslationRepository specific methods
func (m *MockTranslationRepository) ListByWorkID(ctx context.Context, workID uint) ([]models.Translation, error) { func (m *MockTranslationRepository) ListByWorkID(ctx context.Context, workID uint) ([]models2.Translation, error) {
return m.ListByEntity(ctx, "Work", workID) return m.ListByEntity(ctx, "Work", workID)
} }
func (m *MockTranslationRepository) ListByEntity(ctx context.Context, entityType string, entityID uint) ([]models.Translation, error) { func (m *MockTranslationRepository) ListByEntity(ctx context.Context, entityType string, entityID uint) ([]models2.Translation, error) {
var out []models.Translation var out []models2.Translation
for i := range m.items { for i := range m.items {
tr := m.items[i] tr := m.items[i]
if tr.TranslatableType == entityType && tr.TranslatableID == entityID { if tr.TranslatableType == entityType && tr.TranslatableID == entityID {
@ -155,8 +155,8 @@ func (m *MockTranslationRepository) ListByEntity(ctx context.Context, entityType
return out, nil return out, nil
} }
func (m *MockTranslationRepository) ListByTranslatorID(ctx context.Context, translatorID uint) ([]models.Translation, error) { func (m *MockTranslationRepository) ListByTranslatorID(ctx context.Context, translatorID uint) ([]models2.Translation, error) {
var out []models.Translation var out []models2.Translation
for i := range m.items { for i := range m.items {
if m.items[i].TranslatorID != nil && *m.items[i].TranslatorID == translatorID { if m.items[i].TranslatorID != nil && *m.items[i].TranslatorID == translatorID {
out = append(out, m.items[i]) out = append(out, m.items[i])
@ -165,8 +165,8 @@ func (m *MockTranslationRepository) ListByTranslatorID(ctx context.Context, tran
return out, nil return out, nil
} }
func (m *MockTranslationRepository) ListByStatus(ctx context.Context, status models.TranslationStatus) ([]models.Translation, error) { func (m *MockTranslationRepository) ListByStatus(ctx context.Context, status models2.TranslationStatus) ([]models2.Translation, error) {
var out []models.Translation var out []models2.Translation
for i := range m.items { for i := range m.items {
if m.items[i].Status == status { if m.items[i].Status == status {
out = append(out, m.items[i]) out = append(out, m.items[i])
@ -177,12 +177,12 @@ func (m *MockTranslationRepository) ListByStatus(ctx context.Context, status mod
// Test helper: add a translation for a Work // Test helper: add a translation for a Work
func (m *MockTranslationRepository) AddTranslationForWork(workID uint, language string, content string, isOriginal bool) { func (m *MockTranslationRepository) AddTranslationForWork(workID uint, language string, content string, isOriginal bool) {
m.Create(context.Background(), &models.Translation{ m.Create(context.Background(), &models2.Translation{
Title: "", Title: "",
Content: content, Content: content,
Description: "", Description: "",
Language: language, Language: language,
Status: models.TranslationStatusPublished, Status: models2.TranslationStatusPublished,
TranslatableID: workID, TranslatableID: workID,
TranslatableType: "Work", TranslatableType: "Work",
IsOriginalLanguage: isOriginal, IsOriginalLanguage: isOriginal,

View File

@ -3,8 +3,8 @@ package testutil
import ( import (
"context" "context"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models" "tercul/internal/models"
"tercul/repositories" "tercul/internal/repositories"
) )
// UnifiedMockWorkRepository is a shared mock for WorkRepository tests // UnifiedMockWorkRepository is a shared mock for WorkRepository tests

View File

@ -0,0 +1,52 @@
package testutil
import (
"tercul/graph"
"tercul/internal/models"
"tercul/services"
"github.com/stretchr/testify/suite"
)
// SimpleTestSuite provides a minimal test environment with just the essentials
type SimpleTestSuite struct {
suite.Suite
WorkRepo *UnifiedMockWorkRepository
WorkService services.WorkService
}
// SetupSuite sets up the test suite
func (s *SimpleTestSuite) SetupSuite() {
s.WorkRepo = NewUnifiedMockWorkRepository()
s.WorkService = services.NewWorkService(s.WorkRepo, nil)
}
// SetupTest resets test data for each test
func (s *SimpleTestSuite) SetupTest() {
s.WorkRepo.Reset()
}
// GetResolver returns a minimal GraphQL resolver for testing
func (s *SimpleTestSuite) GetResolver() *graph.Resolver {
return &graph.Resolver{
WorkRepo: s.WorkRepo,
WorkService: s.WorkService,
// Other fields will be nil, but that's okay for basic tests
}
}
// CreateTestWork creates a test work with optional content
func (s *SimpleTestSuite) CreateTestWork(title, language string, content string) *models.Work {
work := &models.Work{
Title: title,
}
work.Language = language
// Add work to the mock repository
s.WorkRepo.AddWork(work)
// If content is provided, we'll need to handle it differently
// since the mock repository doesn't support translations yet
// For now, just return the work
return work
}

View File

@ -12,7 +12,7 @@ import (
"gorm.io/driver/postgres" "gorm.io/driver/postgres"
"gorm.io/gorm" "gorm.io/gorm"
"gorm.io/gorm/logger" "gorm.io/gorm/logger"
"tercul/config" "tercul/internal/platform/config"
) )
// TestDB holds the test database connection // TestDB holds the test database connection

View File

@ -1,24 +1,22 @@
package linguistics package linguistics
import ( import (
"github.com/jonreiter/govader" "github.com/jonreiter/govader"
) )
// GoVADERSentimentProvider implements SentimentProvider using VADER // GoVADERSentimentProvider implements SentimentProvider using VADER
type GoVADERSentimentProvider struct { type GoVADERSentimentProvider struct {
analyzer *govader.SentimentIntensityAnalyzer analyzer *govader.SentimentIntensityAnalyzer
} }
// NewGoVADERSentimentProvider constructs a VADER-based sentiment provider // NewGoVADERSentimentProvider constructs a VADER-based sentiment provider
func NewGoVADERSentimentProvider() (*GoVADERSentimentProvider, error) { func NewGoVADERSentimentProvider() (*GoVADERSentimentProvider, error) {
analyzer := govader.NewSentimentIntensityAnalyzer() analyzer := govader.NewSentimentIntensityAnalyzer()
return &GoVADERSentimentProvider{analyzer: analyzer}, nil return &GoVADERSentimentProvider{analyzer: analyzer}, nil
} }
// Score returns the compound VADER polarity score in [-1, 1] // Score returns the compound VADER polarity score in [-1, 1]
func (p *GoVADERSentimentProvider) Score(text string, _ string) (float64, error) { func (p *GoVADERSentimentProvider) Score(text string, _ string) (float64, error) {
scores := p.analyzer.PolarityScores(text) scores := p.analyzer.PolarityScores(text)
return scores.Compound, nil return scores.Compound, nil
} }

View File

@ -1,19 +1,17 @@
package linguistics package linguistics
import ( import (
"testing" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/require" "testing"
) )
func TestGoVADERSentimentProvider_Score(t *testing.T) { func TestGoVADERSentimentProvider_Score(t *testing.T) {
sp, err := NewGoVADERSentimentProvider() sp, err := NewGoVADERSentimentProvider()
require.NoError(t, err) require.NoError(t, err)
pos, err := sp.Score("I love this wonderful product!", "en") pos, err := sp.Score("I love this wonderful product!", "en")
require.NoError(t, err) require.NoError(t, err)
require.Greater(t, pos, 0.0) require.Greater(t, pos, 0.0)
neg, err := sp.Score("This is the worst thing ever. I hate it.", "en") neg, err := sp.Score("This is the worst thing ever. I hate it.", "en")
require.NoError(t, err) require.NoError(t, err)
require.Less(t, neg, 0.0) require.Less(t, neg, 0.0)
} }

View File

@ -1,36 +1,34 @@
package linguistics package linguistics
import ( import (
"strings" lingua "github.com/pemistahl/lingua-go"
lingua "github.com/pemistahl/lingua-go" "strings"
) )
// LinguaLanguageDetector implements LanguageDetector using lingua-go // LinguaLanguageDetector implements LanguageDetector using lingua-go
type LinguaLanguageDetector struct { type LinguaLanguageDetector struct {
detector lingua.LanguageDetector detector lingua.LanguageDetector
} }
// NewLinguaLanguageDetector builds a detector for all supported languages // NewLinguaLanguageDetector builds a detector for all supported languages
func NewLinguaLanguageDetector() *LinguaLanguageDetector { func NewLinguaLanguageDetector() *LinguaLanguageDetector {
det := lingua.NewLanguageDetectorBuilder().FromAllLanguages().Build() det := lingua.NewLanguageDetectorBuilder().FromAllLanguages().Build()
return &LinguaLanguageDetector{detector: det} return &LinguaLanguageDetector{detector: det}
} }
// DetectLanguage returns a lowercase ISO 639-1 code if possible // DetectLanguage returns a lowercase ISO 639-1 code if possible
func (l *LinguaLanguageDetector) DetectLanguage(text string) (string, bool) { func (l *LinguaLanguageDetector) DetectLanguage(text string) (string, bool) {
lang, ok := l.detector.DetectLanguageOf(text) lang, ok := l.detector.DetectLanguageOf(text)
if !ok { if !ok {
return "", false return "", false
} }
// Prefer ISO 639-1 when available else fallback to ISO 639-3 // Prefer ISO 639-1 when available else fallback to ISO 639-3
if s := lang.IsoCode639_1().String(); s != "" { if s := lang.IsoCode639_1().String(); s != "" {
return s, true return s, true
} }
if s := lang.IsoCode639_3().String(); s != "" { if s := lang.IsoCode639_3().String(); s != "" {
return s, true return s, true
} }
// fallback to language name // fallback to language name
return strings.ToLower(lang.String()), true return strings.ToLower(lang.String()), true
} }

View File

@ -1,15 +1,13 @@
package linguistics package linguistics
import ( import (
"testing" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/require" "testing"
) )
func TestLinguaLanguageDetector_DetectLanguage(t *testing.T) { func TestLinguaLanguageDetector_DetectLanguage(t *testing.T) {
d := NewLinguaLanguageDetector() d := NewLinguaLanguageDetector()
code, ok := d.DetectLanguage("This is an English sentence.") code, ok := d.DetectLanguage("This is an English sentence.")
require.True(t, ok) require.True(t, ok)
require.NotEmpty(t, code) require.NotEmpty(t, code)
} }

View File

@ -1,8 +1,8 @@
package linguistics package linguistics
import ( import (
"sort" "sort"
"strings" "strings"
) )
// TFIDFKeywordProvider is a lightweight keyword provider using local term frequencies as a proxy. // TFIDFKeywordProvider is a lightweight keyword provider using local term frequencies as a proxy.
@ -13,33 +13,31 @@ type TFIDFKeywordProvider struct{}
func NewTFIDFKeywordProvider() *TFIDFKeywordProvider { return &TFIDFKeywordProvider{} } func NewTFIDFKeywordProvider() *TFIDFKeywordProvider { return &TFIDFKeywordProvider{} }
func (p *TFIDFKeywordProvider) Extract(text string, language string) ([]Keyword, error) { func (p *TFIDFKeywordProvider) Extract(text string, language string) ([]Keyword, error) {
tokens := tokenizeWords(text) tokens := tokenizeWords(text)
if len(tokens) == 0 { if len(tokens) == 0 {
return nil, nil return nil, nil
} }
freq := make(map[string]int, len(tokens)) freq := make(map[string]int, len(tokens))
for _, t := range tokens { for _, t := range tokens {
if len(t) <= 2 || isStopWord(t, language) { if len(t) <= 2 || isStopWord(t, language) {
continue continue
} }
freq[strings.ToLower(t)]++ freq[strings.ToLower(t)]++
} }
total := 0 total := 0
for _, c := range freq { for _, c := range freq {
total += c total += c
} }
keywords := make([]Keyword, 0, len(freq)) keywords := make([]Keyword, 0, len(freq))
for w, c := range freq { for w, c := range freq {
rel := float64(c) / float64(len(tokens)) rel := float64(c) / float64(len(tokens))
if rel > 0 { if rel > 0 {
keywords = append(keywords, Keyword{Text: w, Relevance: rel}) keywords = append(keywords, Keyword{Text: w, Relevance: rel})
} }
} }
sort.Slice(keywords, func(i, j int) bool { return keywords[i].Relevance > keywords[j].Relevance }) sort.Slice(keywords, func(i, j int) bool { return keywords[i].Relevance > keywords[j].Relevance })
if len(keywords) > 10 { if len(keywords) > 10 {
keywords = keywords[:10] keywords = keywords[:10]
} }
return keywords, nil return keywords, nil
} }

View File

@ -1,8 +1,8 @@
package linguistics package linguistics
import ( import (
"errors" "errors"
"strings" "strings"
) )
// --- LanguageDetector Adapters --- // --- LanguageDetector Adapters ---
@ -11,7 +11,7 @@ import (
type NullLanguageDetector struct{} type NullLanguageDetector struct{}
func (n NullLanguageDetector) DetectLanguage(text string) (string, bool) { func (n NullLanguageDetector) DetectLanguage(text string) (string, bool) {
return "", false return "", false
} }
// --- SentimentProvider Adapters --- // --- SentimentProvider Adapters ---
@ -20,10 +20,8 @@ func (n NullLanguageDetector) DetectLanguage(text string) (string, bool) {
type RuleBasedSentimentProvider struct{} type RuleBasedSentimentProvider struct{}
func (r RuleBasedSentimentProvider) Score(text string, language string) (float64, error) { func (r RuleBasedSentimentProvider) Score(text string, language string) (float64, error) {
if strings.TrimSpace(text) == "" { if strings.TrimSpace(text) == "" {
return 0, errors.New("empty text") return 0, errors.New("empty text")
} }
return estimateSentimentOptimized(text, language), nil return estimateSentimentOptimized(text, language), nil
} }

View File

@ -3,12 +3,12 @@ package linguistics
import ( import (
"context" "context"
"fmt" "fmt"
"sync"
"github.com/hashicorp/golang-lru/v2" "github.com/hashicorp/golang-lru/v2"
"tercul/cache" "sync"
"tercul/logger" "tercul/internal/platform/cache"
"tercul/config" "tercul/internal/platform/config"
"time" "tercul/internal/platform/log"
"time"
) )
// AnalysisCache defines the interface for caching analysis results // AnalysisCache defines the interface for caching analysis results
@ -32,12 +32,12 @@ type MemoryAnalysisCache struct {
// NewMemoryAnalysisCache creates a new MemoryAnalysisCache // NewMemoryAnalysisCache creates a new MemoryAnalysisCache
func NewMemoryAnalysisCache(enabled bool) *MemoryAnalysisCache { func NewMemoryAnalysisCache(enabled bool) *MemoryAnalysisCache {
// capacity from config // capacity from config
cap := config.Cfg.NLPMemoryCacheCap cap := config.Cfg.NLPMemoryCacheCap
if cap <= 0 { if cap <= 0 {
cap = 1024 cap = 1024
} }
l, _ := lru.New[string, *AnalysisResult](cap) l, _ := lru.New[string, *AnalysisResult](cap)
return &MemoryAnalysisCache{ return &MemoryAnalysisCache{
cache: l, cache: l,
enabled: enabled, enabled: enabled,
@ -53,9 +53,9 @@ func (c *MemoryAnalysisCache) Get(ctx context.Context, key string) (*AnalysisRes
c.mutex.RLock() c.mutex.RLock()
defer c.mutex.RUnlock() defer c.mutex.RUnlock()
if result, exists := c.cache.Get(key); exists { if result, exists := c.cache.Get(key); exists {
return result, nil return result, nil
} }
return nil, fmt.Errorf("cache miss") return nil, fmt.Errorf("cache miss")
} }
@ -113,13 +113,13 @@ func (c *RedisAnalysisCache) Set(ctx context.Context, key string, result *Analys
return nil return nil
} }
// TTL from config // TTL from config
ttlSeconds := config.Cfg.NLPRedisCacheTTLSeconds ttlSeconds := config.Cfg.NLPRedisCacheTTLSeconds
err := c.cache.Set(ctx, key, result, time.Duration(ttlSeconds)*time.Second) err := c.cache.Set(ctx, key, result, time.Duration(ttlSeconds)*time.Second)
if err != nil { if err != nil {
logger.LogWarn("Failed to cache analysis result", log.LogWarn("Failed to cache analysis result",
logger.F("key", key), log.F("key", key),
logger.F("error", err)) log.F("error", err))
return err return err
} }
@ -176,16 +176,16 @@ func (c *CompositeAnalysisCache) Set(ctx context.Context, key string, result *An
// Set in memory cache // Set in memory cache
if err := c.memoryCache.Set(ctx, key, result); err != nil { if err := c.memoryCache.Set(ctx, key, result); err != nil {
logger.LogWarn("Failed to set memory cache", log.LogWarn("Failed to set memory cache",
logger.F("key", key), log.F("key", key),
logger.F("error", err)) log.F("error", err))
} }
// Set in Redis cache // Set in Redis cache
if err := c.redisCache.Set(ctx, key, result); err != nil { if err := c.redisCache.Set(ctx, key, result); err != nil {
logger.LogWarn("Failed to set Redis cache", log.LogWarn("Failed to set Redis cache",
logger.F("key", key), log.F("key", key),
logger.F("error", err)) log.F("error", err))
return err return err
} }

View File

@ -3,9 +3,10 @@ package linguistics
import ( import (
"context" "context"
"fmt" "fmt"
models2 "tercul/internal/models"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/logger" "tercul/internal/platform/log"
"tercul/models"
) )
// AnalysisRepository defines the interface for database operations related to analysis // AnalysisRepository defines the interface for database operations related to analysis
@ -17,14 +18,14 @@ type AnalysisRepository interface {
GetWorkContent(ctx context.Context, workID uint, language string) (string, error) GetWorkContent(ctx context.Context, workID uint, language string) (string, error)
// StoreWorkAnalysis stores work-specific analysis results // StoreWorkAnalysis stores work-specific analysis results
StoreWorkAnalysis(ctx context.Context, workID uint, textMetadata *models.TextMetadata, StoreWorkAnalysis(ctx context.Context, workID uint, textMetadata *models2.TextMetadata,
readabilityScore *models.ReadabilityScore, languageAnalysis *models.LanguageAnalysis) error readabilityScore *models2.ReadabilityScore, languageAnalysis *models2.LanguageAnalysis) error
// GetWorkByID fetches a work by ID // GetWorkByID fetches a work by ID
GetWorkByID(ctx context.Context, workID uint) (*models.Work, error) GetWorkByID(ctx context.Context, workID uint) (*models2.Work, error)
// GetAnalysisData fetches persisted analysis data for a work // GetAnalysisData fetches persisted analysis data for a work
GetAnalysisData(ctx context.Context, workID uint) (*models.TextMetadata, *models.ReadabilityScore, *models.LanguageAnalysis, error) GetAnalysisData(ctx context.Context, workID uint) (*models2.TextMetadata, *models2.ReadabilityScore, *models2.LanguageAnalysis, error)
} }
// GORMAnalysisRepository implements AnalysisRepository using GORM // GORMAnalysisRepository implements AnalysisRepository using GORM
@ -43,19 +44,19 @@ func (r *GORMAnalysisRepository) StoreAnalysisResults(ctx context.Context, workI
return fmt.Errorf("analysis result cannot be nil") return fmt.Errorf("analysis result cannot be nil")
} }
// Determine language from the work record to avoid hardcoded defaults // Determine language from the work record to avoid hardcoded defaults
var work models.Work var work models2.Work
if err := r.db.WithContext(ctx).First(&work, workID).Error; err != nil { if err := r.db.WithContext(ctx).First(&work, workID).Error; err != nil {
logger.LogError("Failed to fetch work for language", log.LogError("Failed to fetch work for language",
logger.F("workID", workID), log.F("workID", workID),
logger.F("error", err)) log.F("error", err))
return fmt.Errorf("failed to fetch work for language: %w", err) return fmt.Errorf("failed to fetch work for language: %w", err)
} }
// Create text metadata // Create text metadata
textMetadata := &models.TextMetadata{ textMetadata := &models2.TextMetadata{
WorkID: workID, WorkID: workID,
Language: work.Language, Language: work.Language,
WordCount: result.WordCount, WordCount: result.WordCount,
SentenceCount: result.SentenceCount, SentenceCount: result.SentenceCount,
ParagraphCount: result.ParagraphCount, ParagraphCount: result.ParagraphCount,
@ -64,18 +65,18 @@ func (r *GORMAnalysisRepository) StoreAnalysisResults(ctx context.Context, workI
} }
// Create readability score // Create readability score
readabilityScore := &models.ReadabilityScore{ readabilityScore := &models2.ReadabilityScore{
WorkID: workID, WorkID: workID,
Language: work.Language, Language: work.Language,
Score: result.ReadabilityScore, Score: result.ReadabilityScore,
Method: result.ReadabilityMethod, Method: result.ReadabilityMethod,
} }
// Create language analysis // Create language analysis
languageAnalysis := &models.LanguageAnalysis{ languageAnalysis := &models2.LanguageAnalysis{
WorkID: workID, WorkID: workID,
Language: work.Language, Language: work.Language,
Analysis: models.JSONB{ Analysis: models2.JSONB{
"sentiment": result.Sentiment, "sentiment": result.Sentiment,
"keywords": extractKeywordsAsJSON(result.Keywords), "keywords": extractKeywordsAsJSON(result.Keywords),
"topics": extractTopicsAsJSON(result.Topics), "topics": extractTopicsAsJSON(result.Topics),
@ -88,11 +89,11 @@ func (r *GORMAnalysisRepository) StoreAnalysisResults(ctx context.Context, workI
// GetWorkContent retrieves content for a work from translations // GetWorkContent retrieves content for a work from translations
func (r *GORMAnalysisRepository) GetWorkContent(ctx context.Context, workID uint, language string) (string, error) { func (r *GORMAnalysisRepository) GetWorkContent(ctx context.Context, workID uint, language string) (string, error) {
// First, get the work to determine its language // First, get the work to determine its language
var work models.Work var work models2.Work
if err := r.db.First(&work, workID).Error; err != nil { if err := r.db.First(&work, workID).Error; err != nil {
logger.LogError("Failed to fetch work for content retrieval", log.LogError("Failed to fetch work for content retrieval",
logger.F("workID", workID), log.F("workID", workID),
logger.F("error", err)) log.F("error", err))
return "", fmt.Errorf("failed to fetch work: %w", err) return "", fmt.Errorf("failed to fetch work: %w", err)
} }
@ -101,7 +102,7 @@ func (r *GORMAnalysisRepository) GetWorkContent(ctx context.Context, workID uint
// 2. Work's language translation // 2. Work's language translation
// 3. Any available translation // 3. Any available translation
var translation models.Translation var translation models2.Translation
// Try original language first // Try original language first
if err := r.db.Where("translatable_type = ? AND translatable_id = ? AND is_original_language = ?", if err := r.db.Where("translatable_type = ? AND translatable_id = ? AND is_original_language = ?",
@ -125,107 +126,107 @@ func (r *GORMAnalysisRepository) GetWorkContent(ctx context.Context, workID uint
} }
// GetWorkByID fetches a work by ID // GetWorkByID fetches a work by ID
func (r *GORMAnalysisRepository) GetWorkByID(ctx context.Context, workID uint) (*models.Work, error) { func (r *GORMAnalysisRepository) GetWorkByID(ctx context.Context, workID uint) (*models2.Work, error) {
var work models.Work var work models2.Work
if err := r.db.WithContext(ctx).First(&work, workID).Error; err != nil { if err := r.db.WithContext(ctx).First(&work, workID).Error; err != nil {
return nil, fmt.Errorf("failed to fetch work: %w", err) return nil, fmt.Errorf("failed to fetch work: %w", err)
} }
return &work, nil return &work, nil
} }
// GetAnalysisData fetches persisted analysis data for a work // GetAnalysisData fetches persisted analysis data for a work
func (r *GORMAnalysisRepository) GetAnalysisData(ctx context.Context, workID uint) (*models.TextMetadata, *models.ReadabilityScore, *models.LanguageAnalysis, error) { func (r *GORMAnalysisRepository) GetAnalysisData(ctx context.Context, workID uint) (*models2.TextMetadata, *models2.ReadabilityScore, *models2.LanguageAnalysis, error) {
var textMetadata models.TextMetadata var textMetadata models2.TextMetadata
var readabilityScore models.ReadabilityScore var readabilityScore models2.ReadabilityScore
var languageAnalysis models.LanguageAnalysis var languageAnalysis models2.LanguageAnalysis
if err := r.db.WithContext(ctx).Where("work_id = ?", workID).First(&textMetadata).Error; err != nil { if err := r.db.WithContext(ctx).Where("work_id = ?", workID).First(&textMetadata).Error; err != nil {
logger.LogWarn("No text metadata found for work", log.LogWarn("No text metadata found for work",
logger.F("workID", workID)) log.F("workID", workID))
} }
if err := r.db.WithContext(ctx).Where("work_id = ?", workID).First(&readabilityScore).Error; err != nil { if err := r.db.WithContext(ctx).Where("work_id = ?", workID).First(&readabilityScore).Error; err != nil {
logger.LogWarn("No readability score found for work", log.LogWarn("No readability score found for work",
logger.F("workID", workID)) log.F("workID", workID))
} }
if err := r.db.WithContext(ctx).Where("work_id = ?", workID).First(&languageAnalysis).Error; err != nil { if err := r.db.WithContext(ctx).Where("work_id = ?", workID).First(&languageAnalysis).Error; err != nil {
logger.LogWarn("No language analysis found for work", log.LogWarn("No language analysis found for work",
logger.F("workID", workID)) log.F("workID", workID))
} }
return &textMetadata, &readabilityScore, &languageAnalysis, nil return &textMetadata, &readabilityScore, &languageAnalysis, nil
} }
// StoreWorkAnalysis stores work-specific analysis results // StoreWorkAnalysis stores work-specific analysis results
func (r *GORMAnalysisRepository) StoreWorkAnalysis(ctx context.Context, workID uint, func (r *GORMAnalysisRepository) StoreWorkAnalysis(ctx context.Context, workID uint,
textMetadata *models.TextMetadata, readabilityScore *models.ReadabilityScore, textMetadata *models2.TextMetadata, readabilityScore *models2.ReadabilityScore,
languageAnalysis *models.LanguageAnalysis) error { languageAnalysis *models2.LanguageAnalysis) error {
// Use a transaction to ensure all data is stored atomically // Use a transaction to ensure all data is stored atomically
return r.db.WithContext(ctx).Transaction(func(tx *gorm.DB) error { return r.db.WithContext(ctx).Transaction(func(tx *gorm.DB) error {
// Store text metadata // Store text metadata
if textMetadata != nil { if textMetadata != nil {
if err := tx.Where("work_id = ?", workID).Delete(&models.TextMetadata{}).Error; err != nil { if err := tx.Where("work_id = ?", workID).Delete(&models2.TextMetadata{}).Error; err != nil {
logger.LogError("Failed to delete existing text metadata", log.LogError("Failed to delete existing text metadata",
logger.F("workID", workID), log.F("workID", workID),
logger.F("error", err)) log.F("error", err))
return fmt.Errorf("failed to delete existing text metadata: %w", err) return fmt.Errorf("failed to delete existing text metadata: %w", err)
} }
if err := tx.Create(textMetadata).Error; err != nil { if err := tx.Create(textMetadata).Error; err != nil {
logger.LogError("Failed to store text metadata", log.LogError("Failed to store text metadata",
logger.F("workID", workID), log.F("workID", workID),
logger.F("error", err)) log.F("error", err))
return fmt.Errorf("failed to store text metadata: %w", err) return fmt.Errorf("failed to store text metadata: %w", err)
} }
} }
// Store readability score // Store readability score
if readabilityScore != nil { if readabilityScore != nil {
if err := tx.Where("work_id = ?", workID).Delete(&models.ReadabilityScore{}).Error; err != nil { if err := tx.Where("work_id = ?", workID).Delete(&models2.ReadabilityScore{}).Error; err != nil {
logger.LogError("Failed to delete existing readability score", log.LogError("Failed to delete existing readability score",
logger.F("workID", workID), log.F("workID", workID),
logger.F("error", err)) log.F("error", err))
return fmt.Errorf("failed to delete existing readability score: %w", err) return fmt.Errorf("failed to delete existing readability score: %w", err)
} }
if err := tx.Create(readabilityScore).Error; err != nil { if err := tx.Create(readabilityScore).Error; err != nil {
logger.LogError("Failed to store readability score", log.LogError("Failed to store readability score",
logger.F("workID", workID), log.F("workID", workID),
logger.F("error", err)) log.F("error", err))
return fmt.Errorf("failed to store readability score: %w", err) return fmt.Errorf("failed to store readability score: %w", err)
} }
} }
// Store language analysis // Store language analysis
if languageAnalysis != nil { if languageAnalysis != nil {
if err := tx.Where("work_id = ?", workID).Delete(&models.LanguageAnalysis{}).Error; err != nil { if err := tx.Where("work_id = ?", workID).Delete(&models2.LanguageAnalysis{}).Error; err != nil {
logger.LogError("Failed to delete existing language analysis", log.LogError("Failed to delete existing language analysis",
logger.F("workID", workID), log.F("workID", workID),
logger.F("error", err)) log.F("error", err))
return fmt.Errorf("failed to delete existing language analysis: %w", err) return fmt.Errorf("failed to delete existing language analysis: %w", err)
} }
if err := tx.Create(languageAnalysis).Error; err != nil { if err := tx.Create(languageAnalysis).Error; err != nil {
logger.LogError("Failed to store language analysis", log.LogError("Failed to store language analysis",
logger.F("workID", workID), log.F("workID", workID),
logger.F("error", err)) log.F("error", err))
return fmt.Errorf("failed to store language analysis: %w", err) return fmt.Errorf("failed to store language analysis: %w", err)
} }
} }
logger.LogInfo("Successfully stored analysis results", log.LogInfo("Successfully stored analysis results",
logger.F("workID", workID)) log.F("workID", workID))
return nil return nil
}) })
} }
// Helper functions for data conversion // Helper functions for data conversion
func extractKeywordsAsJSON(keywords []Keyword) models.JSONB { func extractKeywordsAsJSON(keywords []Keyword) models2.JSONB {
if len(keywords) == 0 { if len(keywords) == 0 {
return models.JSONB{} return models2.JSONB{}
} }
keywordData := make([]map[string]interface{}, len(keywords)) keywordData := make([]map[string]interface{}, len(keywords))
@ -236,12 +237,12 @@ func extractKeywordsAsJSON(keywords []Keyword) models.JSONB {
} }
} }
return models.JSONB{"keywords": keywordData} return models2.JSONB{"keywords": keywordData}
} }
func extractTopicsAsJSON(topics []Topic) models.JSONB { func extractTopicsAsJSON(topics []Topic) models2.JSONB {
if len(topics) == 0 { if len(topics) == 0 {
return models.JSONB{} return models2.JSONB{}
} }
topicData := make([]map[string]interface{}, len(topics)) topicData := make([]map[string]interface{}, len(topics))
@ -252,5 +253,5 @@ func extractTopicsAsJSON(topics []Topic) models.JSONB {
} }
} }
return models.JSONB{"topics": topicData} return models2.JSONB{"topics": topicData}
} }

View File

@ -1,12 +1,13 @@
package linguistics package linguistics
import ( import (
"context" "context"
"crypto/sha256" "crypto/sha256"
"encoding/hex" "encoding/hex"
"sync" "sync"
"tercul/cache"
"tercul/logger" "tercul/internal/platform/cache"
"tercul/internal/platform/log"
) )
// Analyzer defines the interface for linguistic analysis services // Analyzer defines the interface for linguistic analysis services
@ -22,34 +23,34 @@ type Analyzer interface {
// It delegates pure text analysis to TextAnalyzer and work analysis to WorkAnalysisService, // It delegates pure text analysis to TextAnalyzer and work analysis to WorkAnalysisService,
// and only handles caching and orchestration concerns here to preserve SRP/DRY. // and only handles caching and orchestration concerns here to preserve SRP/DRY.
type BasicAnalyzer struct { type BasicAnalyzer struct {
textAnalyzer TextAnalyzer textAnalyzer TextAnalyzer
workAnalysisService WorkAnalysisService workAnalysisService WorkAnalysisService
cache cache.Cache cache cache.Cache
resultCache map[string]*AnalysisResult resultCache map[string]*AnalysisResult
cacheMutex sync.RWMutex cacheMutex sync.RWMutex
concurrency int concurrency int
cacheEnabled bool cacheEnabled bool
} }
// NewBasicAnalyzer creates a new BasicAnalyzer // NewBasicAnalyzer creates a new BasicAnalyzer
func NewBasicAnalyzer( func NewBasicAnalyzer(
textAnalyzer TextAnalyzer, textAnalyzer TextAnalyzer,
workService WorkAnalysisService, workService WorkAnalysisService,
redis cache.Cache, redis cache.Cache,
concurrency int, concurrency int,
cacheEnabled bool, cacheEnabled bool,
) *BasicAnalyzer { ) *BasicAnalyzer {
if concurrency <= 0 { if concurrency <= 0 {
concurrency = 4 concurrency = 4
} }
return &BasicAnalyzer{ return &BasicAnalyzer{
textAnalyzer: textAnalyzer, textAnalyzer: textAnalyzer,
workAnalysisService: workService, workAnalysisService: workService,
cache: redis, cache: redis,
resultCache: make(map[string]*AnalysisResult), resultCache: make(map[string]*AnalysisResult),
concurrency: concurrency, concurrency: concurrency,
cacheEnabled: cacheEnabled, cacheEnabled: cacheEnabled,
} }
} }
// WithCache adds a cache to the analyzer // WithCache adds a cache to the analyzer
@ -78,86 +79,86 @@ func (a *BasicAnalyzer) DisableCache() {
// AnalyzeText performs basic linguistic analysis on the given text // AnalyzeText performs basic linguistic analysis on the given text
func (a *BasicAnalyzer) AnalyzeText(ctx context.Context, text string, language string) (*AnalysisResult, error) { func (a *BasicAnalyzer) AnalyzeText(ctx context.Context, text string, language string) (*AnalysisResult, error) {
// Check in-memory cache first if enabled // Check in-memory cache first if enabled
if a.cacheEnabled { if a.cacheEnabled {
cacheKey := makeTextCacheKey(language, text) cacheKey := makeTextCacheKey(language, text)
// Try to get from in-memory cache // Try to get from in-memory cache
a.cacheMutex.RLock() a.cacheMutex.RLock()
cachedResult, found := a.resultCache[cacheKey] cachedResult, found := a.resultCache[cacheKey]
a.cacheMutex.RUnlock() a.cacheMutex.RUnlock()
if found { if found {
logger.LogDebug("In-memory cache hit for text analysis", log.LogDebug("In-memory cache hit for text analysis",
logger.F("language", language), log.F("language", language),
logger.F("textLength", len(text))) log.F("textLength", len(text)))
return cachedResult, nil return cachedResult, nil
} }
// Try to get from Redis cache if available // Try to get from Redis cache if available
if a.cache != nil { if a.cache != nil {
var cachedResult AnalysisResult var cachedResult AnalysisResult
err := a.cache.Get(ctx, "text_analysis:"+cacheKey, &cachedResult) err := a.cache.Get(ctx, "text_analysis:"+cacheKey, &cachedResult)
if err == nil { if err == nil {
logger.LogDebug("Redis cache hit for text analysis", log.LogDebug("Redis cache hit for text analysis",
logger.F("language", language), log.F("language", language),
logger.F("textLength", len(text))) log.F("textLength", len(text)))
// Store in in-memory cache too // Store in in-memory cache too
a.cacheMutex.Lock() a.cacheMutex.Lock()
a.resultCache[cacheKey] = &cachedResult a.resultCache[cacheKey] = &cachedResult
a.cacheMutex.Unlock() a.cacheMutex.Unlock()
return &cachedResult, nil return &cachedResult, nil
} }
} }
} }
// Cache miss or caching disabled, perform analysis using the pure TextAnalyzer // Cache miss or caching disabled, perform analysis using the pure TextAnalyzer
logger.LogDebug("Performing text analysis", log.LogDebug("Performing text analysis",
logger.F("language", language), log.F("language", language),
logger.F("textLength", len(text))) log.F("textLength", len(text)))
var ( var (
result *AnalysisResult result *AnalysisResult
err error err error
) )
if len(text) > 10000 && a.concurrency > 1 { if len(text) > 10000 && a.concurrency > 1 {
result, err = a.textAnalyzer.AnalyzeTextConcurrently(ctx, text, language, a.concurrency) result, err = a.textAnalyzer.AnalyzeTextConcurrently(ctx, text, language, a.concurrency)
} else { } else {
result, err = a.textAnalyzer.AnalyzeText(ctx, text, language) result, err = a.textAnalyzer.AnalyzeText(ctx, text, language)
} }
if err != nil { if err != nil {
return nil, err return nil, err
} }
// Cache the result if caching is enabled // Cache the result if caching is enabled
if a.cacheEnabled { if a.cacheEnabled {
cacheKey := makeTextCacheKey(language, text) cacheKey := makeTextCacheKey(language, text)
// Store in in-memory cache // Store in in-memory cache
a.cacheMutex.Lock() a.cacheMutex.Lock()
a.resultCache[cacheKey] = result a.resultCache[cacheKey] = result
a.cacheMutex.Unlock() a.cacheMutex.Unlock()
// Store in Redis cache if available // Store in Redis cache if available
if a.cache != nil { if a.cache != nil {
if err := a.cache.Set(ctx, "text_analysis:"+cacheKey, result, 0); err != nil { if err := a.cache.Set(ctx, "text_analysis:"+cacheKey, result, 0); err != nil {
logger.LogWarn("Failed to cache text analysis result", log.LogWarn("Failed to cache text analysis result",
logger.F("language", language), log.F("language", language),
logger.F("textLength", len(text)), log.F("textLength", len(text)),
logger.F("error", err)) log.F("error", err))
} }
} }
} }
return result, nil return result, nil
} }
// AnalyzeWork performs linguistic analysis on a work and stores the results // AnalyzeWork performs linguistic analysis on a work and stores the results
func (a *BasicAnalyzer) AnalyzeWork(ctx context.Context, workID uint) error { func (a *BasicAnalyzer) AnalyzeWork(ctx context.Context, workID uint) error {
// Delegate to the WorkAnalysisService to preserve single ownership // Delegate to the WorkAnalysisService to preserve single ownership
return a.workAnalysisService.AnalyzeWork(ctx, workID) return a.workAnalysisService.AnalyzeWork(ctx, workID)
} }
// Helper functions for text analysis // Helper functions for text analysis
@ -174,6 +175,6 @@ func min(a, b int) int {
// makeTextCacheKey builds a stable cache key using a content hash to avoid collisions/leaks // makeTextCacheKey builds a stable cache key using a content hash to avoid collisions/leaks
func makeTextCacheKey(language, text string) string { func makeTextCacheKey(language, text string) string {
h := sha256.Sum256([]byte(text)) h := sha256.Sum256([]byte(text))
return language + ":" + hex.EncodeToString(h[:]) return language + ":" + hex.EncodeToString(h[:])
} }

View File

@ -1,9 +1,10 @@
package linguistics package linguistics
import ( import (
"gorm.io/gorm" "tercul/internal/platform/cache"
"tercul/cache" "tercul/internal/platform/config"
"tercul/config"
"gorm.io/gorm"
) )
// LinguisticsFactory provides easy access to all linguistics components // LinguisticsFactory provides easy access to all linguistics components
@ -22,29 +23,29 @@ func NewLinguisticsFactory(
concurrency int, concurrency int,
cacheEnabled bool, cacheEnabled bool,
) *LinguisticsFactory { ) *LinguisticsFactory {
// Create text analyzer and wire providers (prefer external libs when available) // Create text analyzer and wire providers (prefer external libs when available)
textAnalyzer := NewBasicTextAnalyzer() textAnalyzer := NewBasicTextAnalyzer()
// Wire sentiment provider: GoVADER (configurable) // Wire sentiment provider: GoVADER (configurable)
if config.Cfg.NLPUseVADER { if config.Cfg.NLPUseVADER {
if sp, err := NewGoVADERSentimentProvider(); err == nil { if sp, err := NewGoVADERSentimentProvider(); err == nil {
textAnalyzer = textAnalyzer.WithSentimentProvider(sp) textAnalyzer = textAnalyzer.WithSentimentProvider(sp)
} else { } else {
textAnalyzer = textAnalyzer.WithSentimentProvider(RuleBasedSentimentProvider{}) textAnalyzer = textAnalyzer.WithSentimentProvider(RuleBasedSentimentProvider{})
} }
} else { } else {
textAnalyzer = textAnalyzer.WithSentimentProvider(RuleBasedSentimentProvider{}) textAnalyzer = textAnalyzer.WithSentimentProvider(RuleBasedSentimentProvider{})
} }
// Wire language detector: lingua-go (configurable) // Wire language detector: lingua-go (configurable)
if config.Cfg.NLPUseLingua { if config.Cfg.NLPUseLingua {
textAnalyzer = textAnalyzer.WithLanguageDetector(NewLinguaLanguageDetector()) textAnalyzer = textAnalyzer.WithLanguageDetector(NewLinguaLanguageDetector())
} }
// Wire keyword provider: lightweight TF-IDF approximation (configurable) // Wire keyword provider: lightweight TF-IDF approximation (configurable)
if config.Cfg.NLPUseTFIDF { if config.Cfg.NLPUseTFIDF {
textAnalyzer = textAnalyzer.WithKeywordProvider(NewTFIDFKeywordProvider()) textAnalyzer = textAnalyzer.WithKeywordProvider(NewTFIDFKeywordProvider())
} }
// Create cache components // Create cache components
memoryCache := NewMemoryAnalysisCache(cacheEnabled) memoryCache := NewMemoryAnalysisCache(cacheEnabled)
@ -64,13 +65,13 @@ func NewLinguisticsFactory(
) )
// Create analyzer that combines text analysis and work analysis // Create analyzer that combines text analysis and work analysis
analyzer := NewBasicAnalyzer( analyzer := NewBasicAnalyzer(
textAnalyzer, textAnalyzer,
workAnalysisService, workAnalysisService,
cache, cache,
concurrency, concurrency,
cacheEnabled, cacheEnabled,
) )
return &LinguisticsFactory{ return &LinguisticsFactory{
textAnalyzer: textAnalyzer, textAnalyzer: textAnalyzer,

View File

@ -1,15 +1,13 @@
package linguistics package linguistics
import ( import (
"testing" "github.com/stretchr/testify/require"
"github.com/stretchr/testify/require" "testing"
) )
func TestFactory_WiresProviders(t *testing.T) { func TestFactory_WiresProviders(t *testing.T) {
// We won't spin a DB/cache here; this is a smoke test of wiring methods // We won't spin a DB/cache here; this is a smoke test of wiring methods
f := NewLinguisticsFactory(nil, nil, 2, true) f := NewLinguisticsFactory(nil, nil, 2, true)
ta := f.GetTextAnalyzer().(*BasicTextAnalyzer) ta := f.GetTextAnalyzer().(*BasicTextAnalyzer)
require.NotNil(t, ta) require.NotNil(t, ta)
} }

View File

@ -2,20 +2,18 @@ package linguistics
// LanguageDetector defines a provider that can detect the language of a text // LanguageDetector defines a provider that can detect the language of a text
type LanguageDetector interface { type LanguageDetector interface {
// DetectLanguage returns a BCP-47 or ISO-like code and whether detection was confident // DetectLanguage returns a BCP-47 or ISO-like code and whether detection was confident
DetectLanguage(text string) (string, bool) DetectLanguage(text string) (string, bool)
} }
// SentimentProvider defines a provider that scores sentiment in [-1, 1] // SentimentProvider defines a provider that scores sentiment in [-1, 1]
type SentimentProvider interface { type SentimentProvider interface {
// Score returns sentiment for the text (optionally using language) // Score returns sentiment for the text (optionally using language)
Score(text string, language string) (float64, error) Score(text string, language string) (float64, error)
} }
// KeywordProvider defines a provider that extracts keywords from text // KeywordProvider defines a provider that extracts keywords from text
type KeywordProvider interface { type KeywordProvider interface {
// Extract returns a list of keywords with relevance in [0,1] // Extract returns a list of keywords with relevance in [0,1]
Extract(text string, language string) ([]Keyword, error) Extract(text string, language string) ([]Keyword, error)
} }

View File

@ -5,11 +5,11 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"log" "log"
models2 "tercul/internal/models"
"time" "time"
"github.com/hibiken/asynq" "github.com/hibiken/asynq"
"gorm.io/gorm" "gorm.io/gorm"
"tercul/models"
) )
const ( const (
@ -60,7 +60,7 @@ func (j *LinguisticSyncJob) EnqueueAnalysisForAllWorks() error {
log.Println("Enqueueing linguistic analysis jobs for all works...") log.Println("Enqueueing linguistic analysis jobs for all works...")
var workIDs []uint var workIDs []uint
if err := j.DB.Model(&models.Work{}).Pluck("id", &workIDs).Error; err != nil { if err := j.DB.Model(&models2.Work{}).Pluck("id", &workIDs).Error; err != nil {
return fmt.Errorf("error fetching work IDs: %w", err) return fmt.Errorf("error fetching work IDs: %w", err)
} }
@ -87,7 +87,7 @@ func (j *LinguisticSyncJob) HandleLinguisticAnalysis(ctx context.Context, t *asy
// Check if analysis already exists // Check if analysis already exists
var count int64 var count int64
if err := j.DB.Model(&models.LanguageAnalysis{}).Where("work_id = ?", payload.WorkID).Count(&count).Error; err != nil { if err := j.DB.Model(&models2.LanguageAnalysis{}).Where("work_id = ?", payload.WorkID).Count(&count).Error; err != nil {
return fmt.Errorf("error checking existing analysis: %w", err) return fmt.Errorf("error checking existing analysis: %w", err)
} }

Some files were not shown because too many files have changed in this diff Show More