This commit is contained in:
Damir Mukimov 2025-09-01 00:43:59 +02:00
parent c4dad9e394
commit fa336cacf3
No known key found for this signature in database
GPG Key ID: 42996CC7C73BC750
121 changed files with 2810 additions and 3398 deletions

8
.gitignore vendored
View File

@ -109,6 +109,8 @@ Temporary Items
*.db
*.sqlite
*.sqlite3
*.db-shm
*.db-wal
tercul_data.db
tercul_export.json
extracted_data.json
@ -118,6 +120,7 @@ data_analysis_report.json
tercul_data.sql
tercul_schema.sql
current_schema.sql
full_database_backup.sql
# Migration data
migration_data/
@ -173,3 +176,8 @@ yarn-error.log*
# Generated files
graph/generated.go
graph/model/models_gen.go
# Additional files that should be ignored
refactor.md
report.md
requirements.txt

1
.tool-versions Normal file
View File

@ -0,0 +1 @@
golang 1.25.0

153
TODO.md
View File

@ -2,131 +2,53 @@
---
## [x] Performance Improvements
## [ ] Performance Improvements
- [x] **COMPLETED: Add pagination to all repository list operations** (High, 2d)
- [x] /works: Add limit/offset support to repository and resolver
- [x] /translations: Add limit/offset support to repository and resolver
- [x] /authors: Add limit/offset support to repository and resolver
- [x] /users: Add limit/offset support to repository and resolver
- [x] /collections: Add limit/offset support to repository and resolver
- [x] /tags: Add limit/offset support to repository and resolver
- [x] /categories: Add limit/offset support to repository and resolver
- [x] /comments: Add limit/offset support to repository and resolver
- [x] /search: Add limit/offset support to repository and resolver
- [x] Validate all endpoints for correct pagination and total count
- [x] Add unit tests for paginated list operations
- [x] Document pagination parameters in API docs
- [x] **COMPLETED: Refactor raw SQL queries to use GORM structured methods** (High, 1d)
- [x] Identify all usages of raw SQL queries in repositories and sync jobs
- [x] Refactor syncEntities in syncjob/entities_sync.go to use GORM methods
- [x] Refactor any string-concatenated queries to parameterized GORM queries
- [x] Validate correctness and performance of refactored queries
- [x] Add unit tests for refactored query logic
- [x] Document query changes and migration steps
- [ ] Implement batching for Weaviate operations (Medium, 2d)
- [x] **COMPLETED: Optimize linguistic analysis algorithms** (Medium, 2d)
- [x] Introduced clean NLP ports/adapters (`LanguageDetector`, `SentimentProvider`, `KeywordProvider`)
- [x] Integrated lingua-go (language detection) and GoVADER (sentiment) behind adapters
- [x] Added TF-IDF-based keyword provider (lightweight, state-free)
- [x] Bounded in-memory cache via LRU with config-driven capacity
- [x] Switched text cache keys to SHA-256 content hashes
- [x] Concurrent analysis: provider-aware and context-cancellable
- [x] Config toggles for providers and cache TTL
- [x] **COMPLETED: Add database indexes for frequently queried fields** (Medium, 1d)
- [x] Foreign key indexes for all relationships
- [x] Unique indexes for constraint enforcement
- [x] Timestamp indexes for sorting and filtering
- [x] Composite indexes for complex queries
- [x] Linguistic analysis indexes for performance
- [x] **COMPLETED: Implement Redis caching for hot data** (Medium, 2d)
## [x] Security Enhancements
## [ ] Security Enhancements
- [x] **COMPLETED: Implement password hashing in User model** (Critical, 1d)
- [x] bcrypt password hashing in BeforeSave hook
- [x] CheckPassword method for password verification
- [x] Automatic password hashing on model save
- [x] **COMPLETED: Move hardcoded credentials to environment variables/config** (Critical, 1d)
- [x] Fixed internal/cmd/enrich/main.go to use config package
- [x] Fixed internal/testutil/testutil.go to use config package
- [x] All database connections now use environment variables
- [ ] Add comprehensive input validation for all GraphQL mutations (High, 2d)
- [x] **COMPLETED: Implement rate limiting for API and background jobs** (High, 2d)
- [x] Rate limiting middleware implemented
- [x] Configuration for rate limits in config package
- [x] **COMPLETED: Replace raw SQL with safe query builders to prevent SQL injection** (Critical, 1d)
- [x] All repositories use GORM structured methods
- [x] No raw SQL queries in production code
## [ ] Code Quality & Architecture
- [x] Remove duplicate GraphQL folder and legacy server helper; keep single GraphQL layer under `graph/` for now
- [x] **REFACTORED: Split linguistics/analyzer.go into focused components** (Completed)
- [x] **COMPLETED: Clean NLP infrastructure and factory wiring**
- [x] Ports for NLP capabilities with SRP/DRY boundaries
- [x] Adapters for lingua-go and GoVADER with fallbacks
- [x] Factory respects config toggles and wires providers
- [x] Repository no longer leaks GORM into services; added methods for fetching work and analysis data
- [x] Created `linguistics/text_analyzer.go` - Pure text analysis logic
- [x] Created `linguistics/analysis_cache.go` - Caching logic with multiple strategies
- [x] Created `linguistics/analysis_repository.go` - Database operations
- [x] Created `linguistics/work_analysis_service.go` - Work-specific analysis coordination
- [x] Created `linguistics/types.go` - Shared data structures
- [x] Created `linguistics/text_utils.go` - Text processing utilities
- [x] Created `linguistics/factory.go` - Component factory with dependency injection
- [x] **REFACTORED: Split main.go into focused components** (Completed)
- [x] Created `internal/app/application_builder.go` - Application initialization
- [x] Created `internal/app/server_factory.go` - Server creation and configuration
- [x] Refactored `main.go` to use dependency injection and builders
- [x] **REFACTORED: Standardize repository implementation** (Completed)
- [x] Improved BaseRepository with comprehensive error handling, validation, logging, and transaction support
- [x] Removed GenericRepository wrapper (unnecessary duplication)
- [x] Updated CachedRepository to use BaseRepository interface
- [x] Refactored WorkRepository and UserRepository to use BaseRepository pattern
- [x] Updated WorkService to use context in all repository calls
- [x] Fixed GraphQL resolvers to use context for WorkRepository calls
- [x] **REFACTORED: All repositories completed!** (Author, Tag, Category, Translation, Comment, Like, Bookmark, Collection, Book, Publisher, Country, Place, City, Source, Edition, UserProfile, UserSession, EmailVerification, PasswordReset, Contribution, Copyright, CopyrightClaim, Monetization, Edge)
- [x] **COMPLETED: Updated mock repositories for testing**
- [x] **COMPLETED: Updated services to use context in repository calls**
- [x] **COMPLETED: Updated GraphQL resolvers to use context and handle pagination**
- [x] **COMPLETED: Fixed linguistics package model field mismatches**
- [x] **COMPLETED: Fixed application builder CopyrightRepository initialization**
- [x] **COMPLETED: Fixed server factory configuration and interface issues**
- [x] **COMPLETED: Removed all legacy code and interfaces**
- [x] **COMPLETED: Project builds successfully!**
- [x] **COMPLETED: Add a service layer for business logic and validation** (High, 2d)
- [x] Comprehensive validation in all service methods
- [x] Business logic separation from repositories
- [x] Input validation for all service operations
- [x] Refactor duplicate code in sync jobs (Medium, 1d)
- [x] **COMPLETED: Improve error handling with custom error types and propagation** (High, 2d)
- [x] Custom error types defined in BaseRepository
- [x] Error wrapping and propagation throughout codebase
- [x] Standardized error handling patterns
- [ ] Expand Weaviate client to support all models (Medium, 2d)
- [ ] Add code documentation and API docs (Medium, 2d)
## [ ] Architecture Refactor (DDD-lite)
- [ ] Create skeleton packages: `cmd/`, `internal/platform/`, `internal/domain/`, `internal/app/`, `internal/data/`, `internal/adapters/graphql/`, `internal/jobs/`
- [x] Move infra to `internal/platform/*` (`config`, `db`, `cache`, `auth`, `http`, `log`, `search`)
- [ ] Wire DI in `cmd/api/main.go` and expose an `Application` facade to adapters
- [ ] Unify GraphQL under `internal/adapters/graphql` and update `gqlgen.yml`; move `schema.graphqls` and resolvers
- [ ] Resolvers call application services only; add dataloaders per aggregate
- [ ] Introduce Unit-of-Work: `platform/db.WithTx(ctx, func(ctx) error)` and repo factory for `*sql.DB` / `*sql.Tx`
- [ ] Split write vs read paths for `work` (commands.go, queries.go); make read models cacheable
- [ ] Replace bespoke cached repositories with decorators in `internal/data/cache` (reads only; deterministic invalidation)
- [ ] Restructure `models/*` into domain aggregates with constructors and invariants
- [ ] Adopt migrations tool (goose/atlas/migrate); move SQL to `internal/data/migrations`; delete `migrations.go`
- [ ] Observability: centralize logging; add Prometheus metrics and OpenTelemetry tracing; request IDs
- [ ] Config: replace ad-hoc config with env parsing + validation (e.g., koanf/envconfig); no globals
- [ ] Security: move JWT/middleware to `internal/platform/auth`; add authz policy helpers (e.g., `CanEditWork`)
- [ ] Search: move Weaviate client/schema to `internal/platform/search`, optional domain interface
- [ ] Background jobs: move to `cmd/worker` and `internal/jobs/*`; ensure idempotency and lease
- [ ] Python ops: move scripts to `/ops/migration` and `/ops/analysis`; keep outputs under `/ops/migration/outputs/`
- [ ] Cleanup: delete dead packages (`store`, duplicate `repositories`); consolidate to `internal/data/sql`
- [ ] CI: add `make lint test test-integration` and integration tests with Docker compose
## [ ] Testing
- [ ] Add unit tests for all models, repositories, and services (High, 3d)
- [ ] Add integration tests for GraphQL API and background jobs (High, 3d)
- [ ] Add performance benchmarks for critical paths (Medium, 2d)
- [x] Added unit tests for linguistics adapters (lingua-go, GoVADER) and utilities
- [ ] Add benchmarks for text analysis (sequential vs concurrent) and cache hit/miss rates
## [x] Monitoring & Logging
## [ ] Monitoring & Logging
- [x] **COMPLETED: Integrate a structured logging framework** (Medium, 1d)
- [x] Structured logging implemented throughout codebase
- [x] Performance timing and debug logging in repositories
- [x] Error logging with context and structured fields
- [ ] Add monitoring for background jobs and API endpoints (Medium, 2d)
- [ ] Add metrics for linguistics: analysis duration, cache hit/miss, provider usage
---
## Next Objective Proposal
## [ ] Next Objective Proposal
- [ ] Stabilize non-linguistics tests and interfaces (High, 2d)
- [ ] Fix `graph` mocks to accept context in service interfaces
@ -139,31 +61,6 @@
- [ ] Document NLP provider toggles and defaults in README/config docs
- [ ] Describe SRP/DRY design and extension points for new providers
## [x] Security & Auth
- [x] **COMPLETED: Implement JWT authentication and role-based authorization** (High, 2d)
- [x] JWT token generation and validation with proper error handling
- [x] Role-based authorization with hierarchy (reader < contributor < reviewer < editor < admin)
- [x] Authentication middleware for GraphQL and HTTP with context validation
- [x] Login and registration mutations with comprehensive input validation
- [x] Password hashing with bcrypt (already implemented in User model)
- [x] Environment variable configuration for JWT with secure defaults
- [x] Comprehensive authentication service following SRP and clean code principles
- [x] Structured logging with proper error context and performance timing
- [x] Input sanitization and validation using govalidator
- [x] Context validation and proper error propagation
- [x] Integration with existing rate limiting system
- [x] GraphQL schema alignment with Go models
- [x] Comprehensive test coverage for authentication components
- [x] Production-ready error handling and security practices
- [x] **COMPLETED: Add rate limiting middleware** (High, 1d)
- [x] Rate limiting middleware implemented and tested
- [x] Configuration-driven rate limits
- [x] **COMPLETED: Use environment variables for all sensitive config** (Critical, 1d)
- [x] All database credentials use environment variables
- [x] Redis configuration uses environment variables
- [x] Centralized configuration management
---
> TODO items include context, priority, and estimated effort. Update this list after each milestone.

6
go.mod
View File

@ -17,9 +17,10 @@ require (
github.com/vektah/gqlparser/v2 v2.5.26
github.com/weaviate/weaviate v1.30.2
github.com/weaviate/weaviate-go-client/v5 v5.1.0
go.uber.org/zap v1.27.0
golang.org/x/crypto v0.37.0
gorm.io/driver/postgres v1.5.11
gorm.io/gorm v1.26.0
gorm.io/gorm v1.30.0
)
require (
@ -49,6 +50,7 @@ require (
github.com/jinzhu/now v1.1.5 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-sqlite3 v1.14.22 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/oklog/ulid v1.3.1 // indirect
github.com/opentracing/opentracing-go v1.2.0 // indirect
@ -63,6 +65,7 @@ require (
github.com/urfave/cli/v2 v2.27.6 // indirect
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect
go.mongodb.org/mongo-driver v1.14.0 // indirect
go.uber.org/multierr v1.10.0 // indirect
golang.org/x/exp v0.0.0-20240808152545-0cdaa3abc0fa // indirect
golang.org/x/mod v0.24.0 // indirect
golang.org/x/net v0.39.0 // indirect
@ -78,4 +81,5 @@ require (
google.golang.org/protobuf v1.36.6 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
gorm.io/driver/sqlite v1.6.0 // indirect
)

10
go.sum
View File

@ -163,6 +163,8 @@ github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/markbates/oncer v0.0.0-20181203154359-bf2de49a0be2/go.mod h1:Ld9puTsIW75CHf65OeIOkyKbteujpZVXDpWK6YGZbxE=
github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0=
github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/mitchellh/mapstructure v1.3.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/mapstructure v1.4.3/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
@ -252,6 +254,10 @@ go.opentelemetry.io/otel/trace v1.33.0 h1:cCJuF7LRjUFso9LPnEAHJDB2pqzp+hbO8eu1qq
go.opentelemetry.io/otel/trace v1.33.0/go.mod h1:uIcdVUZMpTAmz0tI1z04GoVSezK37CbGV4fr1f2nBck=
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
go.uber.org/multierr v1.10.0 h1:S0h4aNzvfcFsC3dRF1jLoaov7oRaKqRGC/pUEJ2yvPQ=
go.uber.org/multierr v1.10.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
@ -346,6 +352,10 @@ gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gorm.io/driver/postgres v1.5.11 h1:ubBVAfbKEUld/twyKZ0IYn9rSQh448EdelLYk9Mv314=
gorm.io/driver/postgres v1.5.11/go.mod h1:DX3GReXH+3FPWGrrgffdvCk3DQ1dwDPdmbenSkweRGI=
gorm.io/driver/sqlite v1.6.0 h1:WHRRrIiulaPiPFmDcod6prc4l2VGVWHz80KspNsxSfQ=
gorm.io/driver/sqlite v1.6.0/go.mod h1:AO9V1qIQddBESngQUKWL9yoH93HIeA1X6V633rBwyT8=
gorm.io/gorm v1.26.0 h1:9lqQVPG5aNNS6AyHdRiwScAVnXHg/L/Srzx55G5fOgs=
gorm.io/gorm v1.26.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE=
gorm.io/gorm v1.30.0 h1:qbT5aPv1UH8gI99OsRlvDToLxW5zR7FzS9acZDOZcgs=
gorm.io/gorm v1.30.0/go.mod h1:8Z33v652h4//uMA76KjeDH8mJXPm1QNCYrMeatR0DOE=
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=

View File

@ -2,33 +2,20 @@ package graph_test
import (
"bytes"
"context"
"encoding/json"
"fmt"
"net/http"
"net/http/httptest"
"testing"
"context"
"tercul/graph"
"tercul/internal/testutil"
"tercul/models"
"tercul/services"
"github.com/99designs/gqlgen/graphql/handler"
"github.com/stretchr/testify/suite"
)
// MockLocalizationService provides mock localization for tests
type MockLocalizationService struct{}
func (m *MockLocalizationService) GetWorkContent(ctx context.Context, workID uint, preferredLanguage string) (string, error) {
return "Test content", nil
}
func (m *MockLocalizationService) GetAuthorBiography(ctx context.Context, authorID uint, preferredLanguage string) (string, error) {
return "Test biography", nil
}
// GraphQLRequest represents a GraphQL request
type GraphQLRequest struct {
Query string `json:"query"`
@ -44,25 +31,18 @@ type GraphQLResponse struct {
// GraphQLIntegrationSuite is a test suite for GraphQL integration tests
type GraphQLIntegrationSuite struct {
testutil.BaseSuite
server *httptest.Server
client *http.Client
workRepo *testutil.UnifiedMockWorkRepository // direct access to mock repo
testutil.SimpleTestSuite
server *httptest.Server
client *http.Client
}
// SetupSuite sets up the test suite
func (s *GraphQLIntegrationSuite) SetupSuite() {
// Use in-memory/mock repositories and services
workRepo := &testutil.UnifiedMockWorkRepository{}
workService := services.NewWorkService(workRepo, nil)
mockLocalization := &MockLocalizationService{}
resolver := &graph.Resolver{
WorkRepo: workRepo,
WorkService: workService,
Localization: mockLocalization,
}
// Use the simple test utilities
s.SimpleTestSuite.SetupSuite()
// Create GraphQL server with the test resolver
resolver := s.GetResolver()
srv := handler.NewDefaultServer(graph.NewExecutableSchema(graph.Config{Resolvers: resolver}))
s.server = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
@ -70,28 +50,16 @@ func (s *GraphQLIntegrationSuite) SetupSuite() {
}))
s.client = s.server.Client()
s.workRepo = workRepo
}
// TearDownSuite tears down the test suite
func (s *GraphQLIntegrationSuite) TearDownSuite() {
s.server.Close()
s.BaseSuite.TearDownSuite()
}
// SetupTest sets up each test
func (s *GraphQLIntegrationSuite) SetupTest() {
s.workRepo.Reset()
}
// createTestWork creates a test work
func (s *GraphQLIntegrationSuite) createTestWork(title, language string) *models.Work {
work := &models.Work{
Title: title,
}
work.Language = language // set via embedded TranslatableModel
s.workRepo.AddWork(work)
return work
s.SimpleTestSuite.SetupTest()
}
// executeGraphQL executes a GraphQL query
@ -134,8 +102,8 @@ func (s *GraphQLIntegrationSuite) executeGraphQL(query string, variables map[str
// TestQueryWork tests the work query
func (s *GraphQLIntegrationSuite) TestQueryWork() {
// Create a test work
work := s.createTestWork("Test Work", "en")
// Create a test work with content
work := s.CreateTestWork("Test Work", "en", "Test content for work")
// Define the query
query := `
@ -151,7 +119,7 @@ func (s *GraphQLIntegrationSuite) TestQueryWork() {
// Define the variables
variables := map[string]interface{}{
"id": work.ID,
"id": fmt.Sprintf("%d", work.ID),
}
// Execute the query
@ -165,16 +133,16 @@ func (s *GraphQLIntegrationSuite) TestQueryWork() {
workData, ok := response.Data["work"].(map[string]interface{})
s.Require().True(ok, "GraphQL response should contain work data")
s.Equal("Test Work", workData["name"], "Work name should match")
s.Equal("Test content", workData["content"], "Work content should match via localization")
s.Equal("Test content for work", workData["content"], "Work content should match")
s.Equal("en", workData["language"], "Work language should match")
}
// TestQueryWorks tests the works query
func (s *GraphQLIntegrationSuite) TestQueryWorks() {
// Create test works
work1 := s.createTestWork("Test Work 1", "en")
work2 := s.createTestWork("Test Work 2", "en")
work3 := s.createTestWork("Test Work 3", "fr")
work1 := s.CreateTestWork("Test Work 1", "en", "Test content for work 1")
work2 := s.CreateTestWork("Test Work 2", "en", "Test content for work 2")
work3 := s.CreateTestWork("Test Work 3", "fr", "Test content for work 3")
// Define the query
query := `
@ -183,6 +151,7 @@ func (s *GraphQLIntegrationSuite) TestQueryWorks() {
id
name
language
content
}
}
`
@ -197,7 +166,7 @@ func (s *GraphQLIntegrationSuite) TestQueryWorks() {
// Verify the response
worksData, ok := response.Data["works"].([]interface{})
s.Require().True(ok, "GraphQL response should contain works data")
s.Equal(3, len(worksData), "GraphQL response should contain 3 works")
s.True(len(worksData) >= 3, "GraphQL response should contain at least 3 works")
// Verify each work
foundWork1 := false
@ -208,18 +177,15 @@ func (s *GraphQLIntegrationSuite) TestQueryWorks() {
work, ok := workData.(map[string]interface{})
s.Require().True(ok, "Work data should be a map")
id := work["id"].(string) // fix: treat id as string
if id == fmt.Sprintf("%d", work1.ID) {
name := work["name"].(string)
if name == "Test Work 1" {
foundWork1 = true
s.Equal("Test Work 1", work["name"], "Work 1 name should match")
s.Equal("en", work["language"], "Work 1 language should match")
} else if id == fmt.Sprintf("%d", work2.ID) {
} else if name == "Test Work 2" {
foundWork2 = true
s.Equal("Test Work 2", work["name"], "Work 2 name should match")
s.Equal("en", work["language"], "Work 2 language should match")
} else if id == fmt.Sprintf("%d", work3.ID) {
} else if name == "Test Work 3" {
foundWork3 = true
s.Equal("Test Work 3", work["name"], "Work 3 name should match")
s.Equal("fr", work["language"], "Work 3 language should match")
}
}
@ -229,12 +195,6 @@ func (s *GraphQLIntegrationSuite) TestQueryWorks() {
s.True(foundWork3, "GraphQL response should contain work 3")
}
func stringToUint(s string) uint {
var id uint
fmt.Sscanf(s, "%d", &id)
return id
}
// TestCreateWork tests the createWork mutation
func (s *GraphQLIntegrationSuite) TestCreateWork() {
// Define the mutation
@ -273,18 +233,20 @@ func (s *GraphQLIntegrationSuite) TestCreateWork() {
s.Equal("en", workData["language"], "Work language should match")
s.Equal("New test content", workData["content"], "Work content should match")
// Verify that the work was created in the mock repository
var found *models.Work
for _, w := range s.workRepo.Works {
// Verify that the work was created in the repository
// Since we're using the real repository interface, we can query it
works, err := s.WorkRepo.ListAll(context.Background())
s.Require().NoError(err)
var found bool
for _, w := range works {
if w.Title == "New Test Work" {
found = w
found = true
s.Equal("en", w.Language, "Work language should be set correctly")
break
}
}
s.Require().NotNil(found)
s.Equal("New Test Work", found.Title)
s.Equal("en", found.Language)
// Content is not stored on Work model; translations hold content
s.True(found, "Work should be created in repository")
}
// TestGraphQLIntegrationSuite runs the test suite

View File

@ -1,7 +1,7 @@
package graph
import (
"tercul/repositories"
repositories2 "tercul/internal/repositories"
"tercul/services"
)
@ -10,16 +10,16 @@ import (
// It serves as dependency injection for your app, add any dependencies you require here.
type Resolver struct {
WorkRepo repositories.WorkRepository
UserRepo repositories.UserRepository
AuthorRepo repositories.AuthorRepository
TranslationRepo repositories.TranslationRepository
CommentRepo repositories.CommentRepository
LikeRepo repositories.LikeRepository
BookmarkRepo repositories.BookmarkRepository
CollectionRepo repositories.CollectionRepository
TagRepo repositories.TagRepository
CategoryRepo repositories.CategoryRepository
WorkRepo repositories2.WorkRepository
UserRepo repositories2.UserRepository
AuthorRepo repositories2.AuthorRepository
TranslationRepo repositories2.TranslationRepository
CommentRepo repositories2.CommentRepository
LikeRepo repositories2.LikeRepository
BookmarkRepo repositories2.BookmarkRepository
CollectionRepo repositories2.CollectionRepository
TagRepo repositories2.TagRepository
CategoryRepo repositories2.CategoryRepository
WorkService services.WorkService
Localization services.LocalizationService
AuthService services.AuthService

View File

@ -9,8 +9,7 @@ import (
"fmt"
"strconv"
"tercul/graph/model"
"tercul/internal/testutil"
"tercul/models"
models2 "tercul/internal/models"
"tercul/services"
)
@ -81,16 +80,49 @@ func (r *mutationResolver) Login(ctx context.Context, email string, password str
// CreateWork is the resolver for the createWork field.
func (r *mutationResolver) CreateWork(ctx context.Context, input model.WorkInput) (*model.Work, error) {
work := &model.Work{
ID: fmt.Sprintf("%d", len(r.WorkRepo.(*testutil.UnifiedMockWorkRepository).Works)+1),
Name: input.Name,
Language: input.Language,
Content: input.Content,
// Create work model
work := &models2.Work{
Title: input.Name,
}
m := &models.Work{Title: input.Name}
m.Language = input.Language
r.WorkRepo.(*testutil.UnifiedMockWorkRepository).AddWork(m)
return work, nil
work.Language = input.Language // Set language on the embedded TranslatableModel
// Create work using the work service
err := r.WorkService.CreateWork(ctx, work)
if err != nil {
return nil, err
}
// If content is provided and TranslationRepo is available, create a translation for it
if input.Content != nil && *input.Content != "" && r.TranslationRepo != nil {
translation := &models2.Translation{
Title: input.Name,
Content: *input.Content,
Language: input.Language,
TranslatableID: work.ID,
TranslatableType: "Work",
IsOriginalLanguage: true,
}
err = r.TranslationRepo.Create(ctx, translation)
if err != nil {
return nil, fmt.Errorf("failed to create translation: %v", err)
}
}
// Return work with resolved content using the localization service
var content *string
if r.Localization != nil {
if resolvedContent, err := r.Localization.GetWorkContent(ctx, work.ID, input.Language); err == nil && resolvedContent != "" {
content = &resolvedContent
}
}
return &model.Work{
ID: fmt.Sprintf("%d", work.ID),
Name: work.Title,
Language: input.Language,
Content: content,
}, nil
}
// UpdateWork is the resolver for the updateWork field.
@ -265,23 +297,34 @@ func (r *mutationResolver) ChangePassword(ctx context.Context, currentPassword s
// Work is the resolver for the work field.
func (r *queryResolver) Work(ctx context.Context, id string) (*model.Work, error) {
for _, w := range r.WorkRepo.(*testutil.UnifiedMockWorkRepository).Works {
if fmt.Sprintf("%d", w.ID) == id {
// Content resolved via Localization service when requested later
return &model.Work{
ID: id,
Name: w.Title,
Language: w.Language,
Content: r.resolveWorkContent(ctx, w.ID, w.Language),
}, nil
}
// Parse ID to uint
workID, err := strconv.ParseUint(id, 10, 32)
if err != nil {
return nil, fmt.Errorf("invalid work ID: %v", err)
}
return nil, nil
// Get work by ID using repository
work, err := r.WorkRepo.GetByID(ctx, uint(workID))
if err != nil {
return nil, err
}
if work == nil {
return nil, nil
}
// Content resolved via Localization service when requested later
return &model.Work{
ID: id,
Name: work.Title,
Language: work.Language,
Content: r.resolveWorkContent(ctx, work.ID, work.Language),
}, nil
}
// Works is the resolver for the works field.
func (r *queryResolver) Works(ctx context.Context, limit *int32, offset *int32, language *string, authorID *string, categoryID *string, tagID *string, search *string) ([]*model.Work, error) {
var works []models.Work
var works []models2.Work
var err error
// Set default pagination
@ -368,7 +411,7 @@ func (r *queryResolver) Author(ctx context.Context, id string) (*model.Author, e
// Authors is the resolver for the authors field.
func (r *queryResolver) Authors(ctx context.Context, limit *int32, offset *int32, search *string, countryID *string) ([]*model.Author, error) {
var authors []models.Author
var authors []models2.Author
var err error
if countryID != nil {
@ -426,23 +469,23 @@ func (r *queryResolver) UserByUsername(ctx context.Context, username string) (*m
// Users is the resolver for the users field.
func (r *queryResolver) Users(ctx context.Context, limit *int32, offset *int32, role *model.UserRole) ([]*model.User, error) {
var users []models.User
var users []models2.User
var err error
if role != nil {
// Convert GraphQL role to model role
var modelRole models.UserRole
var modelRole models2.UserRole
switch *role {
case model.UserRoleReader:
modelRole = models.UserRoleReader
modelRole = models2.UserRoleReader
case model.UserRoleContributor:
modelRole = models.UserRoleContributor
modelRole = models2.UserRoleContributor
case model.UserRoleReviewer:
modelRole = models.UserRoleReviewer
modelRole = models2.UserRoleReviewer
case model.UserRoleEditor:
modelRole = models.UserRoleEditor
modelRole = models2.UserRoleEditor
case model.UserRoleAdmin:
modelRole = models.UserRoleAdmin
modelRole = models2.UserRoleAdmin
default:
return nil, fmt.Errorf("invalid user role: %s", *role)
}
@ -465,15 +508,15 @@ func (r *queryResolver) Users(ctx context.Context, limit *int32, offset *int32,
// Convert model role to GraphQL role
var graphqlRole model.UserRole
switch u.Role {
case models.UserRoleReader:
case models2.UserRoleReader:
graphqlRole = model.UserRoleReader
case models.UserRoleContributor:
case models2.UserRoleContributor:
graphqlRole = model.UserRoleContributor
case models.UserRoleReviewer:
case models2.UserRoleReviewer:
graphqlRole = model.UserRoleReviewer
case models.UserRoleEditor:
case models2.UserRoleEditor:
graphqlRole = model.UserRoleEditor
case models.UserRoleAdmin:
case models2.UserRoleAdmin:
graphqlRole = model.UserRoleAdmin
default:
graphqlRole = model.UserRoleReader

View File

@ -3,19 +3,18 @@ package graph
import (
"net/http"
"tercul/internal/platform/auth"
"github.com/99designs/gqlgen/graphql/handler"
"github.com/99designs/gqlgen/graphql/playground"
"tercul/auth"
)
// NewServer creates a new GraphQL server with the given resolver
func NewServer(resolver *Resolver) http.Handler {
srv := handler.NewDefaultServer(NewExecutableSchema(Config{Resolvers: resolver}))
// Create a mux to handle both GraphQL and playground
// Create a mux to handle GraphQL endpoint only (no playground here; served separately in production)
mux := http.NewServeMux()
mux.Handle("/query", srv)
mux.Handle("/", playground.Handler("GraphQL playground", "/query"))
return mux
}
@ -27,10 +26,9 @@ func NewServerWithAuth(resolver *Resolver, jwtManager *auth.JWTManager) http.Han
// Apply authentication middleware to GraphQL endpoint
authHandler := auth.GraphQLAuthMiddleware(jwtManager)(srv)
// Create a mux to handle both GraphQL and playground
// Create a mux to handle GraphQL endpoint only (no playground here; served separately in production)
mux := http.NewServeMux()
mux.Handle("/query", authHandler)
mux.Handle("/", playground.Handler("GraphQL playground", "/query"))
return mux
}

View File

@ -1,12 +1,12 @@
package app
import (
"tercul/cache"
"tercul/config"
"tercul/db"
"tercul/internal/platform/cache"
"tercul/internal/platform/config"
"tercul/internal/platform/db"
"tercul/internal/platform/log"
repositories2 "tercul/internal/repositories"
"tercul/linguistics"
"tercul/logger"
"tercul/repositories"
"tercul/services"
"time"
@ -28,17 +28,17 @@ type ApplicationBuilder struct {
// RepositoryContainer holds all repository instances
type RepositoryContainer struct {
WorkRepository repositories.WorkRepository
UserRepository repositories.UserRepository
AuthorRepository repositories.AuthorRepository
TranslationRepository repositories.TranslationRepository
CommentRepository repositories.CommentRepository
LikeRepository repositories.LikeRepository
BookmarkRepository repositories.BookmarkRepository
CollectionRepository repositories.CollectionRepository
TagRepository repositories.TagRepository
CategoryRepository repositories.CategoryRepository
CopyrightRepository repositories.CopyrightRepository
WorkRepository repositories2.WorkRepository
UserRepository repositories2.UserRepository
AuthorRepository repositories2.AuthorRepository
TranslationRepository repositories2.TranslationRepository
CommentRepository repositories2.CommentRepository
LikeRepository repositories2.LikeRepository
BookmarkRepository repositories2.BookmarkRepository
CollectionRepository repositories2.CollectionRepository
TagRepository repositories2.TagRepository
CategoryRepository repositories2.CategoryRepository
CopyrightRepository repositories2.CopyrightRepository
}
// ServiceContainer holds all service instances
@ -56,38 +56,38 @@ func NewApplicationBuilder() *ApplicationBuilder {
// BuildDatabase initializes the database connection
func (b *ApplicationBuilder) BuildDatabase() error {
logger.LogInfo("Initializing database connection")
log.LogInfo("Initializing database connection")
dbConn, err := db.InitDB()
if err != nil {
logger.LogFatal("Failed to initialize database - application cannot start without database connection",
logger.F("error", err),
logger.F("host", config.Cfg.DBHost),
logger.F("database", config.Cfg.DBName))
log.LogFatal("Failed to initialize database - application cannot start without database connection",
log.F("error", err),
log.F("host", config.Cfg.DBHost),
log.F("database", config.Cfg.DBName))
return err
}
b.dbConn = dbConn
logger.LogInfo("Database initialized successfully",
logger.F("host", config.Cfg.DBHost),
logger.F("database", config.Cfg.DBName))
log.LogInfo("Database initialized successfully",
log.F("host", config.Cfg.DBHost),
log.F("database", config.Cfg.DBName))
return nil
}
// BuildCache initializes the Redis cache
func (b *ApplicationBuilder) BuildCache() error {
logger.LogInfo("Initializing Redis cache")
log.LogInfo("Initializing Redis cache")
redisCache, err := cache.NewDefaultRedisCache()
if err != nil {
logger.LogWarn("Failed to initialize Redis cache, continuing without caching - performance may be degraded",
logger.F("error", err),
logger.F("redisAddr", config.Cfg.RedisAddr))
log.LogWarn("Failed to initialize Redis cache, continuing without caching - performance may be degraded",
log.F("error", err),
log.F("redisAddr", config.Cfg.RedisAddr))
} else {
b.redisCache = redisCache
logger.LogInfo("Redis cache initialized successfully",
logger.F("redisAddr", config.Cfg.RedisAddr))
log.LogInfo("Redis cache initialized successfully",
log.F("redisAddr", config.Cfg.RedisAddr))
}
return nil
@ -95,32 +95,32 @@ func (b *ApplicationBuilder) BuildCache() error {
// BuildWeaviate initializes the Weaviate client
func (b *ApplicationBuilder) BuildWeaviate() error {
logger.LogInfo("Connecting to Weaviate",
logger.F("host", config.Cfg.WeaviateHost),
logger.F("scheme", config.Cfg.WeaviateScheme))
log.LogInfo("Connecting to Weaviate",
log.F("host", config.Cfg.WeaviateHost),
log.F("scheme", config.Cfg.WeaviateScheme))
wClient, err := weaviate.NewClient(weaviate.Config{
Scheme: config.Cfg.WeaviateScheme,
Host: config.Cfg.WeaviateHost,
})
if err != nil {
logger.LogFatal("Failed to create Weaviate client - vector search capabilities will not be available",
logger.F("error", err),
logger.F("host", config.Cfg.WeaviateHost),
logger.F("scheme", config.Cfg.WeaviateScheme))
log.LogFatal("Failed to create Weaviate client - vector search capabilities will not be available",
log.F("error", err),
log.F("host", config.Cfg.WeaviateHost),
log.F("scheme", config.Cfg.WeaviateScheme))
return err
}
b.weaviateClient = wClient
logger.LogInfo("Weaviate client initialized successfully")
log.LogInfo("Weaviate client initialized successfully")
return nil
}
// BuildBackgroundJobs initializes Asynq for background job processing
func (b *ApplicationBuilder) BuildBackgroundJobs() error {
logger.LogInfo("Setting up background job processing",
logger.F("redisAddr", config.Cfg.RedisAddr))
log.LogInfo("Setting up background job processing",
log.F("redisAddr", config.Cfg.RedisAddr))
redisOpt := asynq.RedisClientOpt{
Addr: config.Cfg.RedisAddr,
@ -131,41 +131,41 @@ func (b *ApplicationBuilder) BuildBackgroundJobs() error {
asynqClient := asynq.NewClient(redisOpt)
b.asynqClient = asynqClient
logger.LogInfo("Background job client initialized successfully")
log.LogInfo("Background job client initialized successfully")
return nil
}
// BuildRepositories initializes all repositories
func (b *ApplicationBuilder) BuildRepositories() error {
logger.LogInfo("Initializing repositories")
log.LogInfo("Initializing repositories")
// Initialize base repositories
baseWorkRepo := repositories.NewWorkRepository(b.dbConn)
userRepo := repositories.NewUserRepository(b.dbConn)
authorRepo := repositories.NewAuthorRepository(b.dbConn)
translationRepo := repositories.NewTranslationRepository(b.dbConn)
commentRepo := repositories.NewCommentRepository(b.dbConn)
likeRepo := repositories.NewLikeRepository(b.dbConn)
bookmarkRepo := repositories.NewBookmarkRepository(b.dbConn)
collectionRepo := repositories.NewCollectionRepository(b.dbConn)
tagRepo := repositories.NewTagRepository(b.dbConn)
categoryRepo := repositories.NewCategoryRepository(b.dbConn)
copyrightRepo := repositories.NewCopyrightRepository(b.dbConn)
baseWorkRepo := repositories2.NewWorkRepository(b.dbConn)
userRepo := repositories2.NewUserRepository(b.dbConn)
authorRepo := repositories2.NewAuthorRepository(b.dbConn)
translationRepo := repositories2.NewTranslationRepository(b.dbConn)
commentRepo := repositories2.NewCommentRepository(b.dbConn)
likeRepo := repositories2.NewLikeRepository(b.dbConn)
bookmarkRepo := repositories2.NewBookmarkRepository(b.dbConn)
collectionRepo := repositories2.NewCollectionRepository(b.dbConn)
tagRepo := repositories2.NewTagRepository(b.dbConn)
categoryRepo := repositories2.NewCategoryRepository(b.dbConn)
copyrightRepo := repositories2.NewCopyrightRepository(b.dbConn)
// Wrap work repository with cache if available
var workRepo repositories.WorkRepository
var workRepo repositories2.WorkRepository
if b.redisCache != nil {
workRepo = repositories.NewCachedWorkRepository(
workRepo = repositories2.NewCachedWorkRepository(
baseWorkRepo,
b.redisCache,
nil,
30*time.Minute, // Cache work data for 30 minutes
)
logger.LogInfo("Using cached work repository")
log.LogInfo("Using cached work repository")
} else {
workRepo = baseWorkRepo
logger.LogInfo("Using non-cached work repository")
log.LogInfo("Using non-cached work repository")
}
b.repositories = &RepositoryContainer{
@ -182,14 +182,14 @@ func (b *ApplicationBuilder) BuildRepositories() error {
CopyrightRepository: copyrightRepo,
}
logger.LogInfo("Repositories initialized successfully")
log.LogInfo("Repositories initialized successfully")
return nil
}
// BuildLinguistics initializes the linguistics components
func (b *ApplicationBuilder) BuildLinguistics() error {
logger.LogInfo("Initializing linguistic analyzer")
log.LogInfo("Initializing linguistic analyzer")
b.linguistics = linguistics.NewLinguisticsFactory(
b.dbConn,
@ -198,14 +198,14 @@ func (b *ApplicationBuilder) BuildLinguistics() error {
true, // Cache enabled
)
logger.LogInfo("Linguistics components initialized successfully")
log.LogInfo("Linguistics components initialized successfully")
return nil
}
// BuildServices initializes all services
func (b *ApplicationBuilder) BuildServices() error {
logger.LogInfo("Initializing service layer")
log.LogInfo("Initializing service layer")
workService := services.NewWorkService(b.repositories.WorkRepository, b.linguistics.GetAnalyzer())
copyrightService := services.NewCopyrightService(b.repositories.CopyrightRepository)
@ -219,7 +219,7 @@ func (b *ApplicationBuilder) BuildServices() error {
AuthService: authService,
}
logger.LogInfo("Services initialized successfully")
log.LogInfo("Services initialized successfully")
return nil
}
@ -255,7 +255,7 @@ func (b *ApplicationBuilder) Build() error {
return err
}
logger.LogInfo("Application builder completed successfully")
log.LogInfo("Application builder completed successfully")
return nil
}

View File

@ -2,11 +2,11 @@ package app
import (
"net/http"
"tercul/auth"
"tercul/config"
"tercul/internal/platform/auth"
"tercul/internal/platform/config"
"tercul/graph"
"tercul/linguistics"
"tercul/logger"
"tercul/internal/platform/log"
"tercul/syncjob"
"github.com/99designs/gqlgen/graphql/playground"
@ -27,7 +27,7 @@ func NewServerFactory(appBuilder *ApplicationBuilder) *ServerFactory {
// CreateGraphQLServer creates and configures the GraphQL server
func (f *ServerFactory) CreateGraphQLServer() (*http.Server, error) {
logger.LogInfo("Setting up GraphQL server")
log.LogInfo("Setting up GraphQL server")
// Create GraphQL resolver with all dependencies
resolver := &graph.Resolver{
@ -58,15 +58,15 @@ func (f *ServerFactory) CreateGraphQLServer() (*http.Server, error) {
Handler: srv,
}
logger.LogInfo("GraphQL server created successfully",
logger.F("port", config.Cfg.ServerPort))
log.LogInfo("GraphQL server created successfully",
log.F("port", config.Cfg.ServerPort))
return httpServer, nil
}
// CreateBackgroundJobServers creates and configures background job servers
func (f *ServerFactory) CreateBackgroundJobServers() ([]*asynq.Server, error) {
logger.LogInfo("Setting up background job servers")
log.LogInfo("Setting up background job servers")
redisOpt := asynq.RedisClientOpt{
Addr: config.Cfg.RedisAddr,
@ -77,8 +77,8 @@ func (f *ServerFactory) CreateBackgroundJobServers() ([]*asynq.Server, error) {
var servers []*asynq.Server
// Setup data synchronization server
logger.LogInfo("Setting up data synchronization server",
logger.F("concurrency", config.Cfg.MaxRetries))
log.LogInfo("Setting up data synchronization server",
log.F("concurrency", config.Cfg.MaxRetries))
syncServer := asynq.NewServer(redisOpt, asynq.Config{Concurrency: config.Cfg.MaxRetries})
@ -93,8 +93,8 @@ func (f *ServerFactory) CreateBackgroundJobServers() ([]*asynq.Server, error) {
servers = append(servers, syncServer)
// Setup linguistic analysis server
logger.LogInfo("Setting up linguistic analysis server",
logger.F("concurrency", config.Cfg.MaxRetries))
log.LogInfo("Setting up linguistic analysis server",
log.F("concurrency", config.Cfg.MaxRetries))
// Create linguistic sync job
linguisticSyncJob := linguistics.NewLinguisticSyncJob(
@ -114,15 +114,15 @@ func (f *ServerFactory) CreateBackgroundJobServers() ([]*asynq.Server, error) {
// This is a temporary workaround - in production, you'd want to properly configure the server
servers = append(servers, linguisticServer)
logger.LogInfo("Background job servers created successfully",
logger.F("serverCount", len(servers)))
log.LogInfo("Background job servers created successfully",
log.F("serverCount", len(servers)))
return servers, nil
}
// CreatePlaygroundServer creates the GraphQL playground server
func (f *ServerFactory) CreatePlaygroundServer() *http.Server {
logger.LogInfo("Setting up GraphQL playground")
log.LogInfo("Setting up GraphQL playground")
playgroundHandler := playground.Handler("GraphQL", "/query")
@ -131,8 +131,8 @@ func (f *ServerFactory) CreatePlaygroundServer() *http.Server {
Handler: playgroundHandler,
}
logger.LogInfo("GraphQL playground created successfully",
logger.F("port", config.Cfg.PlaygroundPort))
log.LogInfo("GraphQL playground created successfully",
log.F("port", config.Cfg.PlaygroundPort))
return playgroundServer
}

View File

@ -10,9 +10,9 @@ import (
"gorm.io/driver/postgres"
"gorm.io/gorm"
"tercul/config"
"tercul/internal/enrich"
"tercul/internal/store"
"tercul/internal/platform/config"
)
func main() {

View File

@ -0,0 +1,27 @@
package enrich
import "testing"
func TestKeywordExtractor_Basic(t *testing.T) {
e := NewKeywordExtractor()
text := Text{Body: "The quick brown fox jumps over the lazy dog. The quick brown fox!"}
keywords, err := e.Extract(text)
if err != nil {
t.Fatalf("Extract returned error: %v", err)
}
if len(keywords) == 0 {
t.Fatalf("expected some keywords, got 0")
}
// Ensure stop words filtered and most frequent word appears first
if keywords[0].Text != "quick" && keywords[0].Text != "brown" && keywords[0].Text != "fox" {
t.Errorf("expected a content word as top keyword, got %q", keywords[0].Text)
}
for _, kw := range keywords {
if kw.Text == "the" || kw.Text == "over" {
t.Errorf("stop word %q should be filtered out", kw.Text)
}
if kw.Relevance <= 0 {
t.Errorf("keyword %q has non-positive relevance", kw.Text)
}
}
}

View File

@ -0,0 +1,48 @@
package enrich
import "testing"
func TestLanguageDetector_Detect_EN(t *testing.T) {
d := NewLanguageDetector()
lang, conf, err := d.Detect(Text{Body: " the and is in to of that for the "})
if err != nil {
t.Fatalf("Detect returned error: %v", err)
}
if lang != "en" {
t.Fatalf("expected language 'en', got %q", lang)
}
if conf <= 0 {
t.Errorf("expected positive confidence, got %f", conf)
}
}
func TestLanguageDetector_Detect_ES(t *testing.T) {
d := NewLanguageDetector()
lang, _, _ := d.Detect(Text{Body: " el la es en de que por para el "})
if lang != "es" {
t.Fatalf("expected language 'es', got %q", lang)
}
}
func TestLanguageDetector_Detect_FR(t *testing.T) {
d := NewLanguageDetector()
lang, _, _ := d.Detect(Text{Body: " le la est en de que pour dans le "})
if lang != "fr" {
t.Fatalf("expected language 'fr', got %q", lang)
}
}
func TestLanguageDetector_Detect_DefaultEnglish(t *testing.T) {
d := NewLanguageDetector()
// Balanced/unknown should default to English per implementation
lang, conf, err := d.Detect(Text{Body: " lorem ipsum dolor sit amet "})
if err != nil {
t.Fatalf("Detect returned error: %v", err)
}
if lang != "en" {
t.Fatalf("expected default language 'en', got %q", lang)
}
if conf != 0.5 {
t.Errorf("expected default confidence 0.5, got %f", conf)
}
}

View File

@ -0,0 +1,47 @@
package enrich
import "testing"
func TestLemmatizer_English(t *testing.T) {
l := NewLemmatizer()
cases := []struct{
in, want string
}{
{"Went", "go"},
{"running", "run"},
{"studies", "study"},
{"cars", "car"},
{"looked", "look"},
}
for _, c := range cases {
got, err := l.Lemma(c.in, "en")
if err != nil {
t.Fatalf("Lemma returned error: %v", err)
}
if got != c.want {
t.Errorf("Lemma(%q) = %q; want %q", c.in, got, c.want)
}
}
}
func TestLemmatizer_Spanish(t *testing.T) {
l := NewLemmatizer()
got, err := l.Lemma("hablando", "es")
if err != nil {
t.Fatalf("Lemma returned error: %v", err)
}
if got != "habl" {
t.Errorf("Lemma(hablando) = %q; want 'habl'", got)
}
}
func TestLemmatizer_French(t *testing.T) {
l := NewLemmatizer()
got, err := l.Lemma("parlent", "fr")
if err != nil {
t.Fatalf("Lemma returned error: %v", err)
}
if got != "parl" {
t.Errorf("Lemma(parlent) = %q; want 'parl'", got)
}
}

View File

@ -0,0 +1,43 @@
package enrich
import "testing"
func TestPhoneticEncoder_Soundex(t *testing.T) {
e := NewPhoneticEncoder()
cases := map[string]string{
"Robert": "R163",
"Ashcraft": "A261",
}
for in, want := range cases {
got := e.Encode(in)
if got != want {
t.Errorf("Encode(%q) = %q; want %q", in, got, want)
}
}
// property checks
if got := e.Encode("P"); got != "P000" {
t.Errorf("Encode(P) = %q; want P000", got)
}
}
func TestPhoneticEncoder_DoubleMetaphoneVariation(t *testing.T) {
e := NewPhoneticEncoder()
p, s := e.DoubleMetaphone("Robert")
if p != "R163" {
t.Fatalf("primary code = %q; want R163", p)
}
if s == p || len(s) != len(p) {
t.Errorf("secondary variation should differ but have same length: p=%q s=%q", p, s)
}
}
func TestPhoneticEncoder_Empty(t *testing.T) {
e := NewPhoneticEncoder()
if got := e.Encode(""); got != "" {
t.Errorf("Encode(\"\") = %q; want empty", got)
}
p, s := e.DoubleMetaphone("")
if p != "" || s != "" {
t.Errorf("DoubleMetaphone(\"\") = (%q,%q); want empty codes", p, s)
}
}

View File

@ -0,0 +1,32 @@
package enrich
import "testing"
func TestPoeticAnalyzer_QuatrainABAB(t *testing.T) {
poem := `In silver light we wander far, light
A gentle breeze across the sea, breeze
At quiet dusk we find a star, night
And in the dark we feel the freeze.`
// Last words: light, breeze, night, freeze -> ABAB by last 2 letters (ht, ze, ht, ze)
p := NewPoeticAnalyzer()
m, err := p.Analyse(Text{Body: poem})
if err != nil {
t.Fatalf("Analyse returned error: %v", err)
}
if m.RhymeScheme != "ABAB" {
t.Errorf("expected rhyme scheme ABAB, got %q", m.RhymeScheme)
}
if m.StanzaCount != 1 {
t.Errorf("expected 1 stanza, got %d", m.StanzaCount)
}
if m.LineCount != 4 {
t.Errorf("expected 4 lines, got %d", m.LineCount)
}
if m.Structure != "Quatrain" {
t.Errorf("expected structure Quatrain, got %q", m.Structure)
}
// Meter is heuristic; just ensure it's determined (not Unknown)
if m.MeterType == "Unknown" {
t.Errorf("expected a determined meter type, got %q", m.MeterType)
}
}

View File

@ -0,0 +1,29 @@
package enrich
import "testing"
func TestPOSTagger_Tag_Basic(t *testing.T) {
pos := NewPOSTagger()
tokens := []Token{
{Text: "the"}, // DET
{Text: "great"}, // ADJ (in common adjectives)
{Text: "fox"}, // default NOUN
{Text: "jumps"}, // VERB by suffix
{Text: "quickly"},// ADV by -ly
{Text: "over"}, // PREP
{Text: "him"}, // PRON
}
tags, err := pos.Tag(tokens)
if err != nil {
t.Fatalf("Tag returned error: %v", err)
}
expected := []string{"DET", "ADJ", "NOUN", "VERB", "ADV", "PREP", "PRON"}
if len(tags) != len(expected) {
t.Fatalf("expected %d tags, got %d: %#v", len(expected), len(tags), tags)
}
for i := range expected {
if tags[i] != expected[i] {
t.Errorf("tag %d: expected %s, got %s", i, expected[i], tags[i])
}
}
}

View File

@ -0,0 +1,58 @@
package enrich
import "testing"
func TestTokenizer_Basic(t *testing.T) {
text := Text{Body: "Hello, world! Go1 is great."}
tok := NewTokenizer()
tokens, err := tok.Tokenize(text)
if err != nil {
t.Fatalf("Tokenize returned error: %v", err)
}
expected := []string{"Hello", "world", "Go1", "is", "great"}
if len(tokens) != len(expected) {
t.Fatalf("expected %d tokens, got %d: %#v", len(expected), len(tokens), tokens)
}
for i, e := range expected {
if tokens[i].Text != e {
t.Errorf("token %d text: expected %q, got %q", i, e, tokens[i].Text)
}
if tokens[i].Position != i {
t.Errorf("token %d position: expected %d, got %d", i, i, tokens[i].Position)
}
if tokens[i].Length != len(e) {
t.Errorf("token %d length: expected %d, got %d", i, len(e), tokens[i].Length)
}
}
}
func TestTokenizer_UnicodeAndPunctuation(t *testing.T) {
text := Text{Body: "Привет, мир! — hello?"}
tok := NewTokenizer()
tokens, err := tok.Tokenize(text)
if err != nil {
t.Fatalf("Tokenize returned error: %v", err)
}
expected := []string{"Привет", "мир", "hello"}
if len(tokens) != len(expected) {
t.Fatalf("expected %d tokens, got %d: %#v", len(expected), len(tokens), tokens)
}
for i, e := range expected {
if tokens[i].Text != e {
t.Errorf("token %d text: expected %q, got %q", i, e, tokens[i].Text)
}
}
}
func TestTokenizer_Empty(t *testing.T) {
tok := NewTokenizer()
tokens, err := tok.Tokenize(Text{Body: " \t\n "})
if err != nil {
t.Fatalf("Tokenize returned error: %v", err)
}
if len(tokens) != 0 {
t.Fatalf("expected 0 tokens for whitespace-only input, got %d", len(tokens))
}
}

View File

@ -8,7 +8,7 @@ type WorkStats struct {
Comments int64 `gorm:"default:0"`
Bookmarks int64 `gorm:"default:0"`
Shares int64 `gorm:"default:0"`
WorkID uint
WorkID uint `gorm:"uniqueIndex;index"`
Work *Work `gorm:"foreignKey:WorkID"`
}
@ -19,7 +19,7 @@ type TranslationStats struct {
Likes int64 `gorm:"default:0"`
Comments int64 `gorm:"default:0"`
Shares int64 `gorm:"default:0"`
TranslationID uint
TranslationID uint `gorm:"uniqueIndex;index"`
Translation *Translation `gorm:"foreignKey:TranslationID"`
}
@ -32,7 +32,7 @@ type UserStats struct {
Comments int64 `gorm:"default:0"` // Number of comments posted
Likes int64 `gorm:"default:0"` // Number of likes given
Bookmarks int64 `gorm:"default:0"` // Number of bookmarks created
UserID uint
UserID uint `gorm:"uniqueIndex;index"`
User *User `gorm:"foreignKey:UserID"`
}
@ -42,7 +42,7 @@ type BookStats struct {
Sales int64 `gorm:"default:0"`
Views int64 `gorm:"default:0"`
Likes int64 `gorm:"default:0"`
BookID uint
BookID uint `gorm:"uniqueIndex;index"`
Book *Book `gorm:"foreignKey:BookID"`
}
@ -52,7 +52,7 @@ type CollectionStats struct {
Items int64 `gorm:"default:0"` // Number of works in the collection
Views int64 `gorm:"default:0"`
Likes int64 `gorm:"default:0"`
CollectionID uint
CollectionID uint `gorm:"uniqueIndex;index"`
Collection *Collection `gorm:"foreignKey:CollectionID"`
}
@ -62,6 +62,6 @@ type MediaStats struct {
Views int64 `gorm:"default:0"`
Downloads int64 `gorm:"default:0"`
Shares int64 `gorm:"default:0"`
MediaID uint
MediaID uint `gorm:"uniqueIndex;index"`
Media interface{} `gorm:"-"` // This would be a pointer to a Media type if it existed
}

View File

@ -20,11 +20,26 @@ func (j JSONB) Value() (driver.Value, error) {
// Scan unmarshals a JSONB value.
func (j *JSONB) Scan(value interface{}) error {
bytes, ok := value.([]byte)
if !ok {
return fmt.Errorf("failed to unmarshal JSONB value: %v", value)
if value == nil {
*j = JSONB{}
return nil
}
switch v := value.(type) {
case []byte:
if len(v) == 0 {
*j = JSONB{}
return nil
}
return json.Unmarshal(v, j)
case string:
if v == "" {
*j = JSONB{}
return nil
}
return json.Unmarshal([]byte(v), j)
default:
return fmt.Errorf("failed to unmarshal JSONB value of type %T: %v", value, value)
}
return json.Unmarshal(bytes, j)
}
// BaseModel contains common fields for all models

View File

@ -26,13 +26,13 @@ type Comment struct {
// Like represents a user like on a work, translation, or comment
type Like struct {
BaseModel
UserID uint
UserID uint `gorm:"index;uniqueIndex:uniq_like_user_target"`
User *User `gorm:"foreignKey:UserID"`
WorkID *uint
WorkID *uint `gorm:"index;uniqueIndex:uniq_like_user_target"`
Work *Work `gorm:"foreignKey:WorkID"`
TranslationID *uint
TranslationID *uint `gorm:"index;uniqueIndex:uniq_like_user_target"`
Translation *Translation `gorm:"foreignKey:TranslationID"`
CommentID *uint
CommentID *uint `gorm:"index;uniqueIndex:uniq_like_user_target"`
Comment *Comment `gorm:"foreignKey:CommentID"`
}
@ -40,9 +40,9 @@ type Like struct {
type Bookmark struct {
BaseModel
Name string `gorm:"size:100"`
UserID uint
User *User `gorm:"foreignKey:UserID"`
WorkID uint
UserID uint `gorm:"index;uniqueIndex:uniq_bookmark_user_work"`
User *User `gorm:"foreignKey:UserID"`
WorkID uint `gorm:"index;uniqueIndex:uniq_bookmark_user_work"`
Work *Work `gorm:"foreignKey:WorkID"`
Notes string `gorm:"type:text"`
LastReadAt *time.Time

View File

@ -3,9 +3,9 @@ package models
// BookWork represents the many-to-many relationship between books and works
type BookWork struct {
BaseModel
BookID uint
BookID uint `gorm:"index;uniqueIndex:uniq_book_work"`
Book *Book `gorm:"foreignKey:BookID"`
WorkID uint
WorkID uint `gorm:"index;uniqueIndex:uniq_book_work"`
Work *Work `gorm:"foreignKey:WorkID"`
Order int `gorm:"default:0"` // For ordering works in books
}
@ -13,30 +13,30 @@ type BookWork struct {
// AuthorCountry represents the many-to-many relationship between authors and countries
type AuthorCountry struct {
BaseModel
AuthorID uint
AuthorID uint `gorm:"index;uniqueIndex:uniq_author_country"`
Author *Author `gorm:"foreignKey:AuthorID"`
CountryID uint
CountryID uint `gorm:"index;uniqueIndex:uniq_author_country"`
Country *Country `gorm:"foreignKey:CountryID"`
}
// WorkAuthor represents authorship with role and order for a work
type WorkAuthor struct {
BaseModel
WorkID uint
WorkID uint `gorm:"index;uniqueIndex:uniq_work_author_role"`
Work *Work `gorm:"foreignKey:WorkID"`
AuthorID uint
AuthorID uint `gorm:"index;uniqueIndex:uniq_work_author_role"`
Author *Author `gorm:"foreignKey:AuthorID"`
Role string `gorm:"size:50;default:'author'"`
Role string `gorm:"size:50;default:'author';uniqueIndex:uniq_work_author_role"`
Ordinal int `gorm:"default:0"`
}
// BookAuthor represents book-level contributor role and order
type BookAuthor struct {
BaseModel
BookID uint
BookID uint `gorm:"index;uniqueIndex:uniq_book_author_role"`
Book *Book `gorm:"foreignKey:BookID"`
AuthorID uint
AuthorID uint `gorm:"index;uniqueIndex:uniq_book_author_role"`
Author *Author `gorm:"foreignKey:AuthorID"`
Role string `gorm:"size:50;default:'author'"`
Role string `gorm:"size:50;default:'author';uniqueIndex:uniq_book_author_role"`
Ordinal int `gorm:"default:0"`
}

View File

@ -23,13 +23,13 @@ type WritingStyle struct {
// LinguisticLayer represents a linguistic layer of analysis
type LinguisticLayer struct {
BaseModel
Name string `gorm:"size:100;not null"`
Name string `gorm:"size:100;not null"`
Description string `gorm:"type:text"`
Language string `gorm:"size:50;not null"`
Type string `gorm:"size:50"` // e.g., morphological, syntactic, semantic, etc.
WorkID uint
Work *Work `gorm:"foreignKey:WorkID"`
Data JSONB `gorm:"type:jsonb;default:'{}'"`
WorkID uint
Work *Work `gorm:"foreignKey:WorkID"`
Data JSONB `gorm:"type:jsonb;default:'{}'"`
}
// TextBlock represents a fine-grained unit of text
@ -91,8 +91,8 @@ type WordOccurrence struct {
TextBlockID uint
TextBlock *TextBlock `gorm:"foreignKey:TextBlockID"`
WordID *uint
Word *Word `gorm:"foreignKey:WordID"`
StartOffset int `gorm:"default:0"`
Word *Word `gorm:"foreignKey:WordID"`
StartOffset int `gorm:"default:0"`
EndOffset int `gorm:"default:0"`
Lemma string `gorm:"size:100"`
PartOfSpeech string `gorm:"size:20"`

View File

@ -7,35 +7,35 @@ import (
// LanguageAnalysis represents language analysis for a work
type LanguageAnalysis struct {
BaseModel
Language string `gorm:"size:50;not null"`
Language string `gorm:"size:50;not null;uniqueIndex:uniq_work_language_analysis"`
Analysis JSONB `gorm:"type:jsonb;default:'{}'"`
WorkID uint
Work *Work `gorm:"foreignKey:WorkID"`
WorkID uint `gorm:"index;uniqueIndex:uniq_work_language_analysis"`
Work *Work `gorm:"foreignKey:WorkID"`
}
// Gamification represents gamification elements for a user
type Gamification struct {
BaseModel
Points int `gorm:"default:0"`
Level int `gorm:"default:1"`
Badges JSONB `gorm:"type:jsonb;default:'{}'"`
Streaks int `gorm:"default:0"`
Points int `gorm:"default:0"`
Level int `gorm:"default:1"`
Badges JSONB `gorm:"type:jsonb;default:'{}'"`
Streaks int `gorm:"default:0"`
LastActive *time.Time
UserID uint
User *User `gorm:"foreignKey:UserID"`
UserID uint `gorm:"uniqueIndex;index"`
User *User `gorm:"foreignKey:UserID"`
}
// Stats represents general statistics
type Stats struct {
BaseModel
Data JSONB `gorm:"type:jsonb;default:'{}'"`
Period string `gorm:"size:50"` // e.g., daily, weekly, monthly, etc.
Data JSONB `gorm:"type:jsonb;default:'{}'"`
Period string `gorm:"size:50"` // e.g., daily, weekly, monthly, etc.
StartDate time.Time
EndDate time.Time
UserID *uint
User *User `gorm:"foreignKey:UserID"`
WorkID *uint
Work *Work `gorm:"foreignKey:WorkID"`
UserID *uint
User *User `gorm:"foreignKey:UserID"`
WorkID *uint
Work *Work `gorm:"foreignKey:WorkID"`
}
// SearchDocument is a denormalized text representation for indexing

View File

@ -32,9 +32,9 @@ type Series struct {
// WorkSeries is a join capturing a work's position in a series
type WorkSeries struct {
BaseModel
WorkID uint
Work *Work `gorm:"foreignKey:WorkID"`
SeriesID uint
WorkID uint `gorm:"index;uniqueIndex:uniq_work_series"`
Work *Work `gorm:"foreignKey:WorkID"`
SeriesID uint `gorm:"index;uniqueIndex:uniq_work_series"`
Series *Series `gorm:"foreignKey:SeriesID"`
NumberInSeries int `gorm:"default:0"`
}

View File

@ -3,12 +3,12 @@ package models
// Edge represents a polymorphic relationship between entities
type Edge struct {
BaseModel
SourceTable string `gorm:"size:50;not null"`
SourceID uint `gorm:"not null"`
TargetTable string `gorm:"size:50;not null"`
TargetID uint `gorm:"not null"`
Relation string `gorm:"size:50;default:'ASSOCIATED_WITH';not null"`
Language string `gorm:"size:10;default:'en'"`
SourceTable string `gorm:"size:50;not null;index:idx_edge_source;uniqueIndex:uniq_edge"`
SourceID uint `gorm:"not null;index:idx_edge_source;uniqueIndex:uniq_edge"`
TargetTable string `gorm:"size:50;not null;index:idx_edge_target;uniqueIndex:uniq_edge"`
TargetID uint `gorm:"not null;index:idx_edge_target;uniqueIndex:uniq_edge"`
Relation string `gorm:"size:50;default:'ASSOCIATED_WITH';not null;index;uniqueIndex:uniq_edge"`
Language string `gorm:"size:10;default:'en';index;uniqueIndex:uniq_edge"`
Extra JSONB `gorm:"type:jsonb;default:'{}'"`
}
@ -18,9 +18,9 @@ type Embedding struct {
BaseModel
// External vector storage reference (e.g., Weaviate object UUID)
ExternalID string `gorm:"size:64;index"`
EntityType string `gorm:"size:50;not null"`
EntityID uint `gorm:"not null"`
Model string `gorm:"size:50;not null"` // e.g., bert, gpt, etc.
EntityType string `gorm:"size:50;not null;index:idx_embedding_entity;uniqueIndex:uniq_embedding"`
EntityID uint `gorm:"not null;index:idx_embedding_entity;uniqueIndex:uniq_embedding"`
Model string `gorm:"size:50;not null;uniqueIndex:uniq_embedding"` // e.g., bert, gpt, etc.
Dim int `gorm:"default:0"`
WorkID *uint
Work *Work `gorm:"foreignKey:WorkID"`

View File

@ -52,13 +52,13 @@ const (
// CopyrightClaim represents a copyright claim
type CopyrightClaim struct {
BaseModel
Details string `gorm:"type:text;not null"`
Status CopyrightClaimStatus `gorm:"size:50;default:'pending'"`
ClaimDate time.Time `gorm:"not null"`
Details string `gorm:"type:text;not null"`
Status CopyrightClaimStatus `gorm:"size:50;default:'pending'"`
ClaimDate time.Time `gorm:"not null"`
Resolution string `gorm:"type:text"`
ResolvedAt *time.Time
UserID *uint
User *User `gorm:"foreignKey:UserID"`
UserID *uint
User *User `gorm:"foreignKey:UserID"`
// Polymorphic relationship - can attach to any entity
Claimables []Copyrightable `gorm:"polymorphic:Copyrightable"`
}
@ -130,12 +130,12 @@ type ModerationFlag struct {
// AuditLog captures changes for governance and traceability
type AuditLog struct {
BaseModel
ActorID *uint
Actor *User `gorm:"foreignKey:ActorID"`
Action string `gorm:"size:50;not null"`
ActorID *uint
Actor *User `gorm:"foreignKey:ActorID"`
Action string `gorm:"size:50;not null"`
EntityType string `gorm:"size:50;not null"`
EntityID uint `gorm:"not null"`
Before JSONB `gorm:"type:jsonb;default:'{}'"`
After JSONB `gorm:"type:jsonb;default:'{}'"`
At time.Time `gorm:"autoCreateTime"`
EntityID uint `gorm:"not null"`
Before JSONB `gorm:"type:jsonb;default:'{}'"`
After JSONB `gorm:"type:jsonb;default:'{}'"`
At time.Time `gorm:"autoCreateTime"`
}

View File

@ -12,8 +12,8 @@ type Notification struct {
Read bool `gorm:"default:false"`
Language string `gorm:"size:50;not null"`
UserID uint
User *User `gorm:"foreignKey:UserID"`
RelatedID *uint // ID of the related entity (work, comment, etc.)
User *User `gorm:"foreignKey:UserID"`
RelatedID *uint // ID of the related entity (work, comment, etc.)
RelatedType string `gorm:"size:50"` // Type of the related entity
}
@ -75,8 +75,8 @@ type Contributor struct {
type InteractionEvent struct {
BaseModel
UserID *uint
User *User `gorm:"foreignKey:UserID"`
TargetType string `gorm:"size:50;not null"` // work|translation|comment|collection|media
User *User `gorm:"foreignKey:UserID"`
TargetType string `gorm:"size:50;not null"` // work|translation|comment|collection|media
TargetID uint `gorm:"not null"`
Kind string `gorm:"size:30;not null"` // view|like|comment|share|bookmark
OccurredAt time.Time `gorm:"index"`

View File

@ -58,9 +58,9 @@ type UserProfile struct {
// UserSession represents a user session
type UserSession struct {
BaseModel
UserID uint `gorm:"index"`
User *User `gorm:"foreignKey:UserID"`
Token string `gorm:"size:255;not null;uniqueIndex"`
UserID uint `gorm:"index"`
User *User `gorm:"foreignKey:UserID"`
Token string `gorm:"size:255;not null;uniqueIndex"`
IP string `gorm:"size:50"`
UserAgent string `gorm:"size:255"`
ExpiresAt time.Time `gorm:"not null"`
@ -69,9 +69,9 @@ type UserSession struct {
// PasswordReset represents a password reset request
type PasswordReset struct {
BaseModel
UserID uint `gorm:"index"`
User *User `gorm:"foreignKey:UserID"`
Token string `gorm:"size:255;not null;uniqueIndex"`
UserID uint `gorm:"index"`
User *User `gorm:"foreignKey:UserID"`
Token string `gorm:"size:255;not null;uniqueIndex"`
ExpiresAt time.Time `gorm:"not null"`
Used bool `gorm:"default:false"`
}
@ -79,9 +79,9 @@ type PasswordReset struct {
// EmailVerification represents an email verification request
type EmailVerification struct {
BaseModel
UserID uint `gorm:"index"`
User *User `gorm:"foreignKey:UserID"`
Token string `gorm:"size:255;not null;uniqueIndex"`
UserID uint `gorm:"index"`
User *User `gorm:"foreignKey:UserID"`
Token string `gorm:"size:255;not null;uniqueIndex"`
ExpiresAt time.Time `gorm:"not null"`
Used bool `gorm:"default:false"`
}

View File

@ -1,14 +1,13 @@
package models_test
import (
models2 "tercul/internal/models"
"testing"
"tercul/internal/testutil"
"tercul/models"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
"golang.org/x/crypto/bcrypt"
"tercul/internal/testutil"
)
// UserModelSuite is a test suite for the User model
@ -16,28 +15,28 @@ import (
type UserModelSuite struct {
suite.Suite
users []*models.User
users []*models2.User
}
func (s *UserModelSuite) SetupSuite() {
s.users = []*models.User{}
s.users = []*models2.User{}
}
func (s *UserModelSuite) SetupTest() {
s.users = []*models.User{}
s.users = []*models2.User{}
}
// createTestUser creates a test user and stores it in-memory
func (s *UserModelSuite) createTestUser(username, email, password string) *models.User {
func (s *UserModelSuite) createTestUser(username, email, password string) *models2.User {
hashed, _ := hashPassword(password)
user := &models.User{
user := &models2.User{
Username: username,
Email: email,
Password: hashed,
FirstName: "Test",
LastName: "User",
DisplayName: "Test User",
Role: models.UserRoleReader,
Role: models2.UserRoleReader,
Active: true,
}
s.users = append(s.users, user)
@ -109,14 +108,14 @@ func (s *UserModelSuite) TestUserValidation() {
s.NotNil(user.Username, "User should be created with a valid Username")
// Invalid email
invalidEmailUser := &models.User{
invalidEmailUser := &models2.User{
Username: "testuser2",
Email: "invalid-email",
Password: "password123",
FirstName: "Test",
LastName: "User",
DisplayName: "Test User",
Role: models.UserRoleReader,
Role: models2.UserRoleReader,
Active: true,
}
isValidEmail := func(email string) bool {
@ -125,14 +124,14 @@ func (s *UserModelSuite) TestUserValidation() {
s.False(isValidEmail(invalidEmailUser.Email), "User with invalid email should not be created")
// Duplicate username
duplicateUsernameUser := &models.User{
duplicateUsernameUser := &models2.User{
Username: "testuser",
Email: "another@example.com",
Password: "password123",
FirstName: "Test",
LastName: "User",
DisplayName: "Test User",
Role: models.UserRoleReader,
Role: models2.UserRoleReader,
Active: true,
}
isDuplicateUsername := false
@ -145,14 +144,14 @@ func (s *UserModelSuite) TestUserValidation() {
s.True(isDuplicateUsername, "User with duplicate username should not be created")
// Duplicate email
duplicateEmailUser := &models.User{
duplicateEmailUser := &models2.User{
Username: "testuser3",
Email: "test@example.com",
Password: "password123",
FirstName: "Test",
LastName: "User",
DisplayName: "Test User",
Role: models.UserRoleReader,
Role: models2.UserRoleReader,
Active: true,
}
isDuplicateEmail := false
@ -167,15 +166,15 @@ func (s *UserModelSuite) TestUserValidation() {
// TestUserRoles tests the user role enum
func (s *UserModelSuite) TestUserRoles() {
roles := []models.UserRole{
models.UserRoleReader,
models.UserRoleContributor,
models.UserRoleReviewer,
models.UserRoleEditor,
models.UserRoleAdmin,
roles := []models2.UserRole{
models2.UserRoleReader,
models2.UserRoleContributor,
models2.UserRoleReviewer,
models2.UserRoleEditor,
models2.UserRoleAdmin,
}
for i, role := range roles {
user := &models.User{
user := &models2.User{
Username: "testuser" + string(rune(i+'0')),
Email: "test" + string(rune(i+'0')) + "@example.com",
Password: "password123",
@ -200,14 +199,14 @@ func TestUserModelSuite(t *testing.T) {
// TestUserBeforeSave tests the BeforeSave hook directly
func TestUserBeforeSave(t *testing.T) {
// Create a user with a plain text password
user := &models.User{
user := &models2.User{
Username: "testuser",
Email: "test@example.com",
Password: "password123",
FirstName: "Test",
LastName: "User",
DisplayName: "Test User",
Role: models.UserRoleReader,
Role: models2.UserRoleReader,
Active: true,
}

View File

@ -4,11 +4,11 @@ import (
"errors"
"fmt"
"strings"
"tercul/internal/models"
"time"
"github.com/golang-jwt/jwt/v5"
"tercul/config"
"tercul/models"
"tercul/internal/platform/config"
)
var (

View File

@ -5,7 +5,7 @@ import (
"net/http"
"strings"
"tercul/logger"
"tercul/internal/platform/log"
)
// ContextKey is a type for context keys
@ -32,9 +32,9 @@ func AuthMiddleware(jwtManager *JWTManager) func(http.Handler) http.Handler {
authHeader := r.Header.Get("Authorization")
tokenString, err := jwtManager.ExtractTokenFromHeader(authHeader)
if err != nil {
logger.LogWarn("Authentication failed - missing or invalid token",
logger.F("path", r.URL.Path),
logger.F("error", err))
log.LogWarn("Authentication failed - missing or invalid token",
log.F("path", r.URL.Path),
log.F("error", err))
http.Error(w, "Unauthorized", http.StatusUnauthorized)
return
}
@ -42,9 +42,9 @@ func AuthMiddleware(jwtManager *JWTManager) func(http.Handler) http.Handler {
// Validate token
claims, err := jwtManager.ValidateToken(tokenString)
if err != nil {
logger.LogWarn("Authentication failed - invalid token",
logger.F("path", r.URL.Path),
logger.F("error", err))
log.LogWarn("Authentication failed - invalid token",
log.F("path", r.URL.Path),
log.F("error", err))
http.Error(w, "Unauthorized", http.StatusUnauthorized)
return
}
@ -62,19 +62,19 @@ func RoleMiddleware(requiredRole string) func(http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
claims, ok := r.Context().Value(ClaimsContextKey).(*Claims)
if !ok {
logger.LogWarn("Authorization failed - no claims in context",
logger.F("path", r.URL.Path),
logger.F("required_role", requiredRole))
log.LogWarn("Authorization failed - no claims in context",
log.F("path", r.URL.Path),
log.F("required_role", requiredRole))
http.Error(w, "Forbidden", http.StatusForbidden)
return
}
jwtManager := NewJWTManager()
if err := jwtManager.RequireRole(claims.Role, requiredRole); err != nil {
logger.LogWarn("Authorization failed - insufficient role",
logger.F("path", r.URL.Path),
logger.F("user_role", claims.Role),
logger.F("required_role", requiredRole))
log.LogWarn("Authorization failed - insufficient role",
log.F("path", r.URL.Path),
log.F("user_role", claims.Role),
log.F("required_role", requiredRole))
http.Error(w, "Forbidden", http.StatusForbidden)
return
}
@ -103,8 +103,8 @@ func GraphQLAuthMiddleware(jwtManager *JWTManager) func(http.Handler) http.Handl
}
}
// If token is invalid, log warning but continue
logger.LogWarn("GraphQL authentication failed - continuing with anonymous access",
logger.F("path", r.URL.Path))
log.LogWarn("GraphQL authentication failed - continuing with anonymous access",
log.F("path", r.URL.Path))
}
// Continue without authentication

68
internal/platform/cache/cache_test.go vendored Normal file
View File

@ -0,0 +1,68 @@
package cache
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestDefaultKeyGenerator_DefaultPrefix(t *testing.T) {
g := NewDefaultKeyGenerator("")
require.NotNil(t, g)
// Table-driven tests for key generation
tests := []struct {
name string
entity string
id uint
page int
pageSize int
queryName string
params []interface{}
wantEntity string
wantList string
wantQuery string
}{
{
name: "basic",
entity: "user",
id: 42,
page: 1,
pageSize: 20,
queryName: "byEmail",
params: []interface{}{"foo@bar.com"},
wantEntity: "tercul:user:id:42",
wantList: "tercul:user:list:1:20",
wantQuery: "tercul:user:byEmail:foo@bar.com",
},
{
name: "different entity and multiple params",
entity: "work",
id: 7,
page: 3,
pageSize: 15,
queryName: "search",
params: []interface{}{"abc", 2020, true},
wantEntity: "tercul:work:id:7",
wantList: "tercul:work:list:3:15",
wantQuery: "tercul:work:search:abc:2020:true",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, tt.wantEntity, g.EntityKey(tt.entity, tt.id))
assert.Equal(t, tt.wantList, g.ListKey(tt.entity, tt.page, tt.pageSize))
assert.Equal(t, tt.wantQuery, g.QueryKey(tt.entity, tt.queryName, tt.params...))
})
}
}
func TestDefaultKeyGenerator_CustomPrefix(t *testing.T) {
g := NewDefaultKeyGenerator("mypfx:")
require.NotNil(t, g)
assert.Equal(t, "mypfx:book:id:1", g.EntityKey("book", 1))
assert.Equal(t, "mypfx:book:list:2:10", g.ListKey("book", 2, 10))
assert.Equal(t, "mypfx:book:find:tag:99", g.QueryKey("book", "find", "tag", 99))
}

View File

@ -8,8 +8,8 @@ import (
"time"
"github.com/redis/go-redis/v9"
"tercul/config"
"tercul/logger"
"tercul/internal/platform/config"
"tercul/internal/platform/log"
)
// RedisCache implements the Cache interface using Redis
@ -112,9 +112,9 @@ func (c *RedisCache) GetMulti(ctx context.Context, keys []string) (map[string][]
str, ok := values[i].(string)
if !ok {
logger.LogWarn("Invalid type in Redis cache",
logger.F("key", key),
logger.F("type", fmt.Sprintf("%T", values[i])))
log.LogWarn("Invalid type in Redis cache",
log.F("key", key),
log.F("type", fmt.Sprintf("%T", values[i])))
continue
}

View File

@ -53,8 +53,8 @@ type Config struct {
NLPUseTFIDF bool
// NLP cache configuration
NLPMemoryCacheCap int
NLPRedisCacheTTLSeconds int
NLPMemoryCacheCap int
NLPRedisCacheTTLSeconds int
}
// Cfg is the global configuration instance
@ -146,16 +146,16 @@ func getEnvAsInt(key string, defaultValue int) int {
// getEnvAsBool gets an environment variable as a boolean or returns a default value
func getEnvAsBool(key string, defaultValue bool) bool {
valueStr := getEnv(key, "")
if valueStr == "" {
return defaultValue
}
switch valueStr {
case "1", "true", "TRUE", "True", "yes", "YES", "Yes", "on", "ON", "On":
return true
case "0", "false", "FALSE", "False", "no", "NO", "No", "off", "OFF", "Off":
return false
default:
return defaultValue
}
valueStr := getEnv(key, "")
if valueStr == "" {
return defaultValue
}
switch valueStr {
case "1", "true", "TRUE", "True", "yes", "YES", "Yes", "on", "ON", "On":
return true
case "0", "false", "FALSE", "False", "no", "NO", "No", "off", "OFF", "Off":
return false
default:
return defaultValue
}
}

View File

@ -7,8 +7,8 @@ import (
"gorm.io/driver/postgres"
"gorm.io/gorm"
gormlogger "gorm.io/gorm/logger"
"tercul/config"
"tercul/logger"
"tercul/internal/platform/config"
"tercul/internal/platform/log"
)
// DB is a global database connection instance
@ -17,9 +17,9 @@ var DB *gorm.DB
// Connect establishes a connection to the database using configuration settings
// It returns the database connection and any error encountered
func Connect() (*gorm.DB, error) {
logger.LogInfo("Connecting to database",
logger.F("host", config.Cfg.DBHost),
logger.F("database", config.Cfg.DBName))
log.LogInfo("Connecting to database",
log.F("host", config.Cfg.DBHost),
log.F("database", config.Cfg.DBName))
dsn := config.Cfg.GetDSN()
db, err := gorm.Open(postgres.Open(dsn), &gorm.Config{
@ -43,9 +43,9 @@ func Connect() (*gorm.DB, error) {
sqlDB.SetMaxIdleConns(5) // Idle connections
sqlDB.SetConnMaxLifetime(30 * time.Minute)
logger.LogInfo("Successfully connected to database",
logger.F("host", config.Cfg.DBHost),
logger.F("database", config.Cfg.DBName))
log.LogInfo("Successfully connected to database",
log.F("host", config.Cfg.DBHost),
log.F("database", config.Cfg.DBName))
return db, nil
}

View File

@ -2,142 +2,142 @@ package db
import (
"gorm.io/gorm"
"tercul/logger"
"tercul/models"
models2 "tercul/internal/models"
"tercul/internal/platform/log"
)
// RunMigrations runs all database migrations
func RunMigrations(db *gorm.DB) error {
logger.LogInfo("Running database migrations")
log.LogInfo("Running database migrations")
// First, create all tables using GORM AutoMigrate
if err := createTables(db); err != nil {
logger.LogError("Failed to create tables", logger.F("error", err))
log.LogError("Failed to create tables", log.F("error", err))
return err
}
// Then add indexes to improve query performance
if err := addIndexes(db); err != nil {
logger.LogError("Failed to add indexes", logger.F("error", err))
log.LogError("Failed to add indexes", log.F("error", err))
return err
}
logger.LogInfo("Database migrations completed successfully")
log.LogInfo("Database migrations completed successfully")
return nil
}
// createTables creates all database tables using GORM AutoMigrate
func createTables(db *gorm.DB) error {
logger.LogInfo("Creating database tables")
log.LogInfo("Creating database tables")
// Enable recommended extensions
if err := db.Exec("CREATE EXTENSION IF NOT EXISTS pg_trgm").Error; err != nil {
logger.LogError("Failed to enable pg_trgm extension", logger.F("error", err))
log.LogError("Failed to enable pg_trgm extension", log.F("error", err))
return err
}
// Create all models/tables
err := db.AutoMigrate(
// User-related models
&models.User{},
&models.UserProfile{},
&models.UserSession{},
&models.PasswordReset{},
&models.EmailVerification{},
&models2.User{},
&models2.UserProfile{},
&models2.UserSession{},
&models2.PasswordReset{},
&models2.EmailVerification{},
// Literary models
&models.Work{},
&models.Translation{},
&models.Author{},
&models.Book{},
&models.Publisher{},
&models.Source{},
&models.Edition{},
&models.Series{},
&models.WorkSeries{},
&models2.Work{},
&models2.Translation{},
&models2.Author{},
&models2.Book{},
&models2.Publisher{},
&models2.Source{},
&models2.Edition{},
&models2.Series{},
&models2.WorkSeries{},
// Organization models
&models.Tag{},
&models.Category{},
&models2.Tag{},
&models2.Category{},
// Interaction models
&models.Comment{},
&models.Like{},
&models.Bookmark{},
&models.Collection{},
&models.Contribution{},
&models.InteractionEvent{},
&models2.Comment{},
&models2.Like{},
&models2.Bookmark{},
&models2.Collection{},
&models2.Contribution{},
&models2.InteractionEvent{},
// Location models
&models.Country{},
&models.City{},
&models.Place{},
&models.Address{},
&models.Language{},
&models2.Country{},
&models2.City{},
&models2.Place{},
&models2.Address{},
&models2.Language{},
// Linguistic models
&models.ReadabilityScore{},
&models.WritingStyle{},
&models.LinguisticLayer{},
&models.TextMetadata{},
&models.PoeticAnalysis{},
&models.Word{},
&models.Concept{},
&models.LanguageEntity{},
&models.TextBlock{},
&models.WordOccurrence{},
&models.EntityOccurrence{},
&models2.ReadabilityScore{},
&models2.WritingStyle{},
&models2.LinguisticLayer{},
&models2.TextMetadata{},
&models2.PoeticAnalysis{},
&models2.Word{},
&models2.Concept{},
&models2.LanguageEntity{},
&models2.TextBlock{},
&models2.WordOccurrence{},
&models2.EntityOccurrence{},
// Relationship models
&models.Edge{},
&models.Embedding{},
&models.Media{},
&models.BookWork{},
&models.AuthorCountry{},
&models.WorkAuthor{},
&models.BookAuthor{},
&models2.Edge{},
&models2.Embedding{},
&models2.Media{},
&models2.BookWork{},
&models2.AuthorCountry{},
&models2.WorkAuthor{},
&models2.BookAuthor{},
// System models
&models.Notification{},
&models.EditorialWorkflow{},
&models.Admin{},
&models.Vote{},
&models.Contributor{},
&models.HybridEntityWork{},
&models.ModerationFlag{},
&models.AuditLog{},
&models2.Notification{},
&models2.EditorialWorkflow{},
&models2.Admin{},
&models2.Vote{},
&models2.Contributor{},
&models2.HybridEntityWork{},
&models2.ModerationFlag{},
&models2.AuditLog{},
// Rights models
&models.Copyright{},
&models.CopyrightClaim{},
&models.Monetization{},
&models.License{},
&models2.Copyright{},
&models2.CopyrightClaim{},
&models2.Monetization{},
&models2.License{},
// Analytics models
&models.WorkStats{},
&models.TranslationStats{},
&models.UserStats{},
&models.BookStats{},
&models.CollectionStats{},
&models.MediaStats{},
&models2.WorkStats{},
&models2.TranslationStats{},
&models2.UserStats{},
&models2.BookStats{},
&models2.CollectionStats{},
&models2.MediaStats{},
// Metadata models
&models.LanguageAnalysis{},
&models.Gamification{},
&models.Stats{},
&models.SearchDocument{},
&models2.LanguageAnalysis{},
&models2.Gamification{},
&models2.Stats{},
&models2.SearchDocument{},
// Psychological models
&models.Emotion{},
&models.Mood{},
&models.TopicCluster{},
&models2.Emotion{},
&models2.Mood{},
&models2.TopicCluster{},
)
if err != nil {
return err
}
logger.LogInfo("Database tables created successfully")
log.LogInfo("Database tables created successfully")
return nil
}
@ -326,6 +326,6 @@ func addIndexes(db *gorm.DB) error {
return err
}
logger.LogInfo("Database indexes added successfully")
log.LogInfo("Database indexes added successfully")
return nil
}

View File

@ -1,10 +1,10 @@
package middleware
package http
import (
"net/http"
"sync"
"tercul/config"
"tercul/logger"
"tercul/internal/platform/config"
"tercul/internal/platform/log"
"time"
)
@ -85,9 +85,9 @@ func RateLimitMiddleware(next http.Handler) http.Handler {
// Check if request is allowed
if !rateLimiter.Allow(clientID) {
logger.LogWarn("Rate limit exceeded",
logger.F("clientID", clientID),
logger.F("path", r.URL.Path))
log.LogWarn("Rate limit exceeded",
log.F("clientID", clientID),
log.F("path", r.URL.Path))
w.WriteHeader(http.StatusTooManyRequests)
w.Write([]byte("Rate limit exceeded. Please try again later."))

View File

@ -1,4 +1,4 @@
package middleware_test
package http_test
import (
"net/http"
@ -6,8 +6,8 @@ import (
"testing"
"time"
"tercul/config"
"tercul/middleware"
"tercul/internal/platform/config"
platformhttp "tercul/internal/platform/http"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/suite"
@ -21,7 +21,7 @@ type RateLimiterSuite struct {
// TestRateLimiter tests the RateLimiter
func (s *RateLimiterSuite) TestRateLimiter() {
// Create a new rate limiter with 2 requests per second and a burst of 3
limiter := middleware.NewRateLimiter(2, 3)
limiter := platformhttp.NewRateLimiter(2, 3)
// Test that the first 3 requests are allowed (burst)
for i := 0; i < 3; i++ {
@ -50,7 +50,7 @@ func (s *RateLimiterSuite) TestRateLimiter() {
// TestRateLimiterMultipleClients tests the RateLimiter with multiple clients
func (s *RateLimiterSuite) TestRateLimiterMultipleClients() {
// Create a new rate limiter with 2 requests per second and a burst of 3
limiter := middleware.NewRateLimiter(2, 3)
limiter := platformhttp.NewRateLimiter(2, 3)
// Test that the first 3 requests for client1 are allowed (burst)
for i := 0; i < 3; i++ {
@ -85,7 +85,7 @@ func (s *RateLimiterSuite) TestRateLimiterMiddleware() {
})
// Create a rate limiter middleware with 2 requests per second and a burst of 3
middleware := middleware.RateLimitMiddleware(testHandler)
middleware := platformhttp.RateLimitMiddleware(testHandler)
// Create a test server
server := httptest.NewServer(middleware)
@ -144,22 +144,22 @@ func TestRateLimiterSuite(t *testing.T) {
// TestNewRateLimiter tests the NewRateLimiter function
func TestNewRateLimiter(t *testing.T) {
// Test with valid parameters
limiter := middleware.NewRateLimiter(10, 20)
limiter := platformhttp.NewRateLimiter(10, 20)
assert.NotNil(t, limiter, "NewRateLimiter should return a non-nil limiter")
// Test with zero rate (should use default)
limiter = middleware.NewRateLimiter(0, 20)
limiter = platformhttp.NewRateLimiter(0, 20)
assert.NotNil(t, limiter, "NewRateLimiter should return a non-nil limiter with default rate")
// Test with zero capacity (should use default)
limiter = middleware.NewRateLimiter(10, 0)
limiter = platformhttp.NewRateLimiter(10, 0)
assert.NotNil(t, limiter, "NewRateLimiter should return a non-nil limiter with default capacity")
// Test with negative rate (should use default)
limiter = middleware.NewRateLimiter(-10, 20)
limiter = platformhttp.NewRateLimiter(-10, 20)
assert.NotNil(t, limiter, "NewRateLimiter should return a non-nil limiter with default rate")
// Test with negative capacity (should use default)
limiter = middleware.NewRateLimiter(10, -20)
limiter = platformhttp.NewRateLimiter(10, -20)
assert.NotNil(t, limiter, "NewRateLimiter should return a non-nil limiter with default capacity")
}

View File

@ -1,4 +1,4 @@
package logger
package log
import (
"fmt"

View File

@ -1,8 +1,8 @@
package weaviate
package search
import (
"context"
"log"
"fmt"
"github.com/weaviate/weaviate-go-client/v5/weaviate"
"github.com/weaviate/weaviate/entities/models"
@ -520,9 +520,9 @@ func CreateSchema(client *weaviate.Client) {
for _, class := range classes {
err := client.Schema().ClassCreator().WithClass(class).Do(context.Background())
if err != nil {
log.Printf("Failed to create class %s: %v", class.Class, err)
fmt.Printf("Failed to create class %s: %v", class.Class, err)
}
}
log.Println("Weaviate schema created successfully.")
fmt.Println("Weaviate schema created successfully.")
}

View File

@ -1,14 +1,13 @@
package weaviate
package search
import (
"context"
"fmt"
"log"
"tercul/models"
"tercul/internal/models"
"tercul/internal/platform/config"
"time"
"tercul/config"
"github.com/weaviate/weaviate-go-client/v5/weaviate"
)

View File

@ -3,7 +3,7 @@ package repositories
import (
"context"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// AuthorRepository defines CRUD methods specific to Author.

View File

@ -7,8 +7,8 @@ import (
"time"
"gorm.io/gorm"
"tercul/config"
"tercul/logger"
"tercul/internal/platform/config"
"tercul/internal/platform/log"
)
// Common repository errors
@ -198,14 +198,14 @@ func (r *BaseRepositoryImpl[T]) Create(ctx context.Context, entity *T) error {
duration := time.Since(start)
if err != nil {
logger.LogError("Failed to create entity",
logger.F("error", err),
logger.F("duration", duration))
log.LogError("Failed to create entity",
log.F("error", err),
log.F("duration", duration))
return fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
}
logger.LogDebug("Entity created successfully",
logger.F("duration", duration))
log.LogDebug("Entity created successfully",
log.F("duration", duration))
return nil
}
@ -226,14 +226,14 @@ func (r *BaseRepositoryImpl[T]) CreateInTx(ctx context.Context, tx *gorm.DB, ent
duration := time.Since(start)
if err != nil {
logger.LogError("Failed to create entity in transaction",
logger.F("error", err),
logger.F("duration", duration))
log.LogError("Failed to create entity in transaction",
log.F("error", err),
log.F("duration", duration))
return fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
}
logger.LogDebug("Entity created successfully in transaction",
logger.F("duration", duration))
log.LogDebug("Entity created successfully in transaction",
log.F("duration", duration))
return nil
}
@ -253,21 +253,21 @@ func (r *BaseRepositoryImpl[T]) GetByID(ctx context.Context, id uint) (*T, error
if err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) {
logger.LogDebug("Entity not found",
logger.F("id", id),
logger.F("duration", duration))
log.LogDebug("Entity not found",
log.F("id", id),
log.F("duration", duration))
return nil, ErrEntityNotFound
}
logger.LogError("Failed to get entity by ID",
logger.F("id", id),
logger.F("error", err),
logger.F("duration", duration))
log.LogError("Failed to get entity by ID",
log.F("id", id),
log.F("error", err),
log.F("duration", duration))
return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
}
logger.LogDebug("Entity retrieved successfully",
logger.F("id", id),
logger.F("duration", duration))
log.LogDebug("Entity retrieved successfully",
log.F("id", id),
log.F("duration", duration))
return &entity, nil
}
@ -288,21 +288,21 @@ func (r *BaseRepositoryImpl[T]) GetByIDWithOptions(ctx context.Context, id uint,
if err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) {
logger.LogDebug("Entity not found with options",
logger.F("id", id),
logger.F("duration", duration))
log.LogDebug("Entity not found with options",
log.F("id", id),
log.F("duration", duration))
return nil, ErrEntityNotFound
}
logger.LogError("Failed to get entity by ID with options",
logger.F("id", id),
logger.F("error", err),
logger.F("duration", duration))
log.LogError("Failed to get entity by ID with options",
log.F("id", id),
log.F("error", err),
log.F("duration", duration))
return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
}
logger.LogDebug("Entity retrieved successfully with options",
logger.F("id", id),
logger.F("duration", duration))
log.LogDebug("Entity retrieved successfully with options",
log.F("id", id),
log.F("duration", duration))
return &entity, nil
}
@ -320,14 +320,14 @@ func (r *BaseRepositoryImpl[T]) Update(ctx context.Context, entity *T) error {
duration := time.Since(start)
if err != nil {
logger.LogError("Failed to update entity",
logger.F("error", err),
logger.F("duration", duration))
log.LogError("Failed to update entity",
log.F("error", err),
log.F("duration", duration))
return fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
}
logger.LogDebug("Entity updated successfully",
logger.F("duration", duration))
log.LogDebug("Entity updated successfully",
log.F("duration", duration))
return nil
}
@ -348,14 +348,14 @@ func (r *BaseRepositoryImpl[T]) UpdateInTx(ctx context.Context, tx *gorm.DB, ent
duration := time.Since(start)
if err != nil {
logger.LogError("Failed to update entity in transaction",
logger.F("error", err),
logger.F("duration", duration))
log.LogError("Failed to update entity in transaction",
log.F("error", err),
log.F("duration", duration))
return fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
}
logger.LogDebug("Entity updated successfully in transaction",
logger.F("duration", duration))
log.LogDebug("Entity updated successfully in transaction",
log.F("duration", duration))
return nil
}
@ -374,24 +374,24 @@ func (r *BaseRepositoryImpl[T]) Delete(ctx context.Context, id uint) error {
duration := time.Since(start)
if result.Error != nil {
logger.LogError("Failed to delete entity",
logger.F("id", id),
logger.F("error", result.Error),
logger.F("duration", duration))
log.LogError("Failed to delete entity",
log.F("id", id),
log.F("error", result.Error),
log.F("duration", duration))
return fmt.Errorf("%w: %v", ErrDatabaseOperation, result.Error)
}
if result.RowsAffected == 0 {
logger.LogDebug("No entity found to delete",
logger.F("id", id),
logger.F("duration", duration))
log.LogDebug("No entity found to delete",
log.F("id", id),
log.F("duration", duration))
return ErrEntityNotFound
}
logger.LogDebug("Entity deleted successfully",
logger.F("id", id),
logger.F("rowsAffected", result.RowsAffected),
logger.F("duration", duration))
log.LogDebug("Entity deleted successfully",
log.F("id", id),
log.F("rowsAffected", result.RowsAffected),
log.F("duration", duration))
return nil
}
@ -413,24 +413,24 @@ func (r *BaseRepositoryImpl[T]) DeleteInTx(ctx context.Context, tx *gorm.DB, id
duration := time.Since(start)
if result.Error != nil {
logger.LogError("Failed to delete entity in transaction",
logger.F("id", id),
logger.F("error", result.Error),
logger.F("duration", duration))
log.LogError("Failed to delete entity in transaction",
log.F("id", id),
log.F("error", result.Error),
log.F("duration", duration))
return fmt.Errorf("%w: %v", ErrDatabaseOperation, result.Error)
}
if result.RowsAffected == 0 {
logger.LogDebug("No entity found to delete in transaction",
logger.F("id", id),
logger.F("duration", duration))
log.LogDebug("No entity found to delete in transaction",
log.F("id", id),
log.F("duration", duration))
return ErrEntityNotFound
}
logger.LogDebug("Entity deleted successfully in transaction",
logger.F("id", id),
logger.F("rowsAffected", result.RowsAffected),
logger.F("duration", duration))
log.LogDebug("Entity deleted successfully in transaction",
log.F("id", id),
log.F("rowsAffected", result.RowsAffected),
log.F("duration", duration))
return nil
}
@ -451,9 +451,9 @@ func (r *BaseRepositoryImpl[T]) List(ctx context.Context, page, pageSize int) (*
// Get total count
if err := r.db.WithContext(ctx).Model(new(T)).Count(&totalCount).Error; err != nil {
logger.LogError("Failed to count entities",
logger.F("error", err),
logger.F("duration", time.Since(start)))
log.LogError("Failed to count entities",
log.F("error", err),
log.F("duration", time.Since(start)))
return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
}
@ -462,11 +462,11 @@ func (r *BaseRepositoryImpl[T]) List(ctx context.Context, page, pageSize int) (*
// Get paginated data
if err := r.db.WithContext(ctx).Offset(offset).Limit(pageSize).Find(&entities).Error; err != nil {
logger.LogError("Failed to get paginated entities",
logger.F("page", page),
logger.F("pageSize", pageSize),
logger.F("error", err),
logger.F("duration", time.Since(start)))
log.LogError("Failed to get paginated entities",
log.F("page", page),
log.F("pageSize", pageSize),
log.F("error", err),
log.F("duration", time.Since(start)))
return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
}
@ -481,14 +481,14 @@ func (r *BaseRepositoryImpl[T]) List(ctx context.Context, page, pageSize int) (*
hasNext := page < totalPages
hasPrev := page > 1
logger.LogDebug("Paginated entities retrieved successfully",
logger.F("page", page),
logger.F("pageSize", pageSize),
logger.F("totalCount", totalCount),
logger.F("totalPages", totalPages),
logger.F("hasNext", hasNext),
logger.F("hasPrev", hasPrev),
logger.F("duration", duration))
log.LogDebug("Paginated entities retrieved successfully",
log.F("page", page),
log.F("pageSize", pageSize),
log.F("totalCount", totalCount),
log.F("totalPages", totalPages),
log.F("hasNext", hasNext),
log.F("hasPrev", hasPrev),
log.F("duration", duration))
return &PaginatedResult[T]{
Items: entities,
@ -512,16 +512,16 @@ func (r *BaseRepositoryImpl[T]) ListWithOptions(ctx context.Context, options *Qu
query := r.buildQuery(r.db.WithContext(ctx), options)
if err := query.Find(&entities).Error; err != nil {
logger.LogError("Failed to get entities with options",
logger.F("error", err),
logger.F("duration", time.Since(start)))
log.LogError("Failed to get entities with options",
log.F("error", err),
log.F("duration", time.Since(start)))
return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
}
duration := time.Since(start)
logger.LogDebug("Entities retrieved successfully with options",
logger.F("count", len(entities)),
logger.F("duration", duration))
log.LogDebug("Entities retrieved successfully with options",
log.F("count", len(entities)),
log.F("duration", duration))
return entities, nil
}
@ -535,16 +535,16 @@ func (r *BaseRepositoryImpl[T]) ListAll(ctx context.Context) ([]T, error) {
start := time.Now()
var entities []T
if err := r.db.WithContext(ctx).Find(&entities).Error; err != nil {
logger.LogError("Failed to get all entities",
logger.F("error", err),
logger.F("duration", time.Since(start)))
log.LogError("Failed to get all entities",
log.F("error", err),
log.F("duration", time.Since(start)))
return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
}
duration := time.Since(start)
logger.LogDebug("All entities retrieved successfully",
logger.F("count", len(entities)),
logger.F("duration", duration))
log.LogDebug("All entities retrieved successfully",
log.F("count", len(entities)),
log.F("duration", duration))
return entities, nil
}
@ -558,16 +558,16 @@ func (r *BaseRepositoryImpl[T]) Count(ctx context.Context) (int64, error) {
start := time.Now()
var count int64
if err := r.db.WithContext(ctx).Model(new(T)).Count(&count).Error; err != nil {
logger.LogError("Failed to count entities",
logger.F("error", err),
logger.F("duration", time.Since(start)))
log.LogError("Failed to count entities",
log.F("error", err),
log.F("duration", time.Since(start)))
return 0, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
}
duration := time.Since(start)
logger.LogDebug("Entity count retrieved successfully",
logger.F("count", count),
logger.F("duration", duration))
log.LogDebug("Entity count retrieved successfully",
log.F("count", count),
log.F("duration", duration))
return count, nil
}
@ -583,16 +583,16 @@ func (r *BaseRepositoryImpl[T]) CountWithOptions(ctx context.Context, options *Q
query := r.buildQuery(r.db.WithContext(ctx), options)
if err := query.Model(new(T)).Count(&count).Error; err != nil {
logger.LogError("Failed to count entities with options",
logger.F("error", err),
logger.F("duration", time.Since(start)))
log.LogError("Failed to count entities with options",
log.F("error", err),
log.F("duration", time.Since(start)))
return 0, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
}
duration := time.Since(start)
logger.LogDebug("Entity count retrieved successfully with options",
logger.F("count", count),
logger.F("duration", duration))
log.LogDebug("Entity count retrieved successfully with options",
log.F("count", count),
log.F("duration", duration))
return count, nil
}
@ -616,25 +616,25 @@ func (r *BaseRepositoryImpl[T]) FindWithPreload(ctx context.Context, preloads []
if err := query.First(&entity, id).Error; err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) {
logger.LogDebug("Entity not found with preloads",
logger.F("id", id),
logger.F("preloads", preloads),
logger.F("duration", time.Since(start)))
log.LogDebug("Entity not found with preloads",
log.F("id", id),
log.F("preloads", preloads),
log.F("duration", time.Since(start)))
return nil, ErrEntityNotFound
}
logger.LogError("Failed to get entity with preloads",
logger.F("id", id),
logger.F("preloads", preloads),
logger.F("error", err),
logger.F("duration", time.Since(start)))
log.LogError("Failed to get entity with preloads",
log.F("id", id),
log.F("preloads", preloads),
log.F("error", err),
log.F("duration", time.Since(start)))
return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
}
duration := time.Since(start)
logger.LogDebug("Entity retrieved successfully with preloads",
logger.F("id", id),
logger.F("preloads", preloads),
logger.F("duration", duration))
log.LogDebug("Entity retrieved successfully with preloads",
log.F("id", id),
log.F("preloads", preloads),
log.F("duration", duration))
return &entity, nil
}
@ -659,20 +659,20 @@ func (r *BaseRepositoryImpl[T]) GetAllForSync(ctx context.Context, batchSize, of
start := time.Now()
var entities []T
if err := r.db.WithContext(ctx).Offset(offset).Limit(batchSize).Find(&entities).Error; err != nil {
logger.LogError("Failed to get entities for sync",
logger.F("batchSize", batchSize),
logger.F("offset", offset),
logger.F("error", err),
logger.F("duration", time.Since(start)))
log.LogError("Failed to get entities for sync",
log.F("batchSize", batchSize),
log.F("offset", offset),
log.F("error", err),
log.F("duration", time.Since(start)))
return nil, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
}
duration := time.Since(start)
logger.LogDebug("Entities retrieved successfully for sync",
logger.F("batchSize", batchSize),
logger.F("offset", offset),
logger.F("count", len(entities)),
logger.F("duration", duration))
log.LogDebug("Entities retrieved successfully for sync",
log.F("batchSize", batchSize),
log.F("offset", offset),
log.F("count", len(entities)),
log.F("duration", duration))
return entities, nil
}
@ -689,20 +689,20 @@ func (r *BaseRepositoryImpl[T]) Exists(ctx context.Context, id uint) (bool, erro
start := time.Now()
var count int64
if err := r.db.WithContext(ctx).Model(new(T)).Where("id = ?", id).Count(&count).Error; err != nil {
logger.LogError("Failed to check entity existence",
logger.F("id", id),
logger.F("error", err),
logger.F("duration", time.Since(start)))
log.LogError("Failed to check entity existence",
log.F("id", id),
log.F("error", err),
log.F("duration", time.Since(start)))
return false, fmt.Errorf("%w: %v", ErrDatabaseOperation, err)
}
duration := time.Since(start)
exists := count > 0
logger.LogDebug("Entity existence checked",
logger.F("id", id),
logger.F("exists", exists),
logger.F("duration", duration))
log.LogDebug("Entity existence checked",
log.F("id", id),
log.F("exists", exists),
log.F("duration", duration))
return exists, nil
}
@ -715,12 +715,12 @@ func (r *BaseRepositoryImpl[T]) BeginTx(ctx context.Context) (*gorm.DB, error) {
tx := r.db.WithContext(ctx).Begin()
if tx.Error != nil {
logger.LogError("Failed to begin transaction",
logger.F("error", tx.Error))
log.LogError("Failed to begin transaction",
log.F("error", tx.Error))
return nil, fmt.Errorf("%w: %v", ErrTransactionFailed, tx.Error)
}
logger.LogDebug("Transaction started successfully")
log.LogDebug("Transaction started successfully")
return tx, nil
}
@ -738,29 +738,29 @@ func (r *BaseRepositoryImpl[T]) WithTx(ctx context.Context, fn func(tx *gorm.DB)
defer func() {
if r := recover(); r != nil {
tx.Rollback()
logger.LogError("Transaction panic recovered",
logger.F("panic", r))
log.LogError("Transaction panic recovered",
log.F("panic", r))
}
}()
if err := fn(tx); err != nil {
if rbErr := tx.Rollback().Error; rbErr != nil {
logger.LogError("Failed to rollback transaction",
logger.F("originalError", err),
logger.F("rollbackError", rbErr))
log.LogError("Failed to rollback transaction",
log.F("originalError", err),
log.F("rollbackError", rbErr))
return fmt.Errorf("transaction failed and rollback failed: %v (rollback: %v)", err, rbErr)
}
logger.LogDebug("Transaction rolled back due to error",
logger.F("error", err))
log.LogDebug("Transaction rolled back due to error",
log.F("error", err))
return err
}
if err := tx.Commit().Error; err != nil {
logger.LogError("Failed to commit transaction",
logger.F("error", err))
log.LogError("Failed to commit transaction",
log.F("error", err))
return fmt.Errorf("%w: %v", ErrTransactionFailed, err)
}
logger.LogDebug("Transaction committed successfully")
log.LogDebug("Transaction committed successfully")
return nil
}

View File

@ -4,7 +4,7 @@ import (
"context"
"errors"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// BookRepository defines CRUD methods specific to Book.

View File

@ -3,7 +3,7 @@ package repositories
import (
"context"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// BookmarkRepository defines CRUD methods specific to Bookmark.

View File

@ -6,9 +6,8 @@ import (
"time"
"gorm.io/gorm"
"tercul/cache"
"tercul/logger"
"tercul/internal/platform/cache"
"tercul/internal/platform/log"
)
// simpleKeyGenerator implements the cache.KeyGenerator interface
@ -37,8 +36,8 @@ func (g *simpleKeyGenerator) QueryKey(entityType string, queryName string, param
// CachedRepository wraps a BaseRepository with caching functionality
type CachedRepository[T any] struct {
repo BaseRepository[T]
cache cache.Cache
repo BaseRepository[T]
cache cache.Cache
keyGenerator cache.KeyGenerator
entityType string
cacheExpiry time.Duration
@ -93,9 +92,9 @@ func (r *CachedRepository[T]) Create(ctx context.Context, entity *T) error {
if r.cacheEnabled {
if redisCache, ok := r.cache.(*cache.RedisCache); ok {
if err := redisCache.InvalidateEntityType(ctx, r.entityType); err != nil {
logger.LogWarn("Failed to invalidate cache",
logger.F("entityType", r.entityType),
logger.F("error", err))
log.LogWarn("Failed to invalidate cache",
log.F("entityType", r.entityType),
log.F("error", err))
}
}
}
@ -120,16 +119,16 @@ func (r *CachedRepository[T]) GetByID(ctx context.Context, id uint) (*T, error)
err := r.cache.Get(ctx, cacheKey, &entity)
if err == nil {
// Cache hit
logger.LogDebug("Cache hit",
logger.F("entityType", r.entityType),
logger.F("id", id))
log.LogDebug("Cache hit",
log.F("entityType", r.entityType),
log.F("id", id))
return &entity, nil
}
// Cache miss, get from database
logger.LogDebug("Cache miss",
logger.F("entityType", r.entityType),
logger.F("id", id))
log.LogDebug("Cache miss",
log.F("entityType", r.entityType),
log.F("id", id))
entity_ptr, err := r.repo.GetByID(ctx, id)
if err != nil {
@ -138,10 +137,10 @@ func (r *CachedRepository[T]) GetByID(ctx context.Context, id uint) (*T, error)
// Store in cache
if err := r.cache.Set(ctx, cacheKey, entity_ptr, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache entity",
logger.F("entityType", r.entityType),
logger.F("id", id),
logger.F("error", err))
log.LogWarn("Failed to cache entity",
log.F("entityType", r.entityType),
log.F("id", id),
log.F("error", err))
}
return entity_ptr, nil
@ -165,17 +164,17 @@ func (r *CachedRepository[T]) Update(ctx context.Context, entity *T) error {
// Invalidate specific entity cache
cacheKey := r.keyGenerator.EntityKey(r.entityType, 0) // We don't have ID here, so invalidate all
if err := r.cache.Delete(ctx, cacheKey); err != nil {
logger.LogWarn("Failed to invalidate entity cache",
logger.F("entityType", r.entityType),
logger.F("error", err))
log.LogWarn("Failed to invalidate entity cache",
log.F("entityType", r.entityType),
log.F("error", err))
}
// Invalidate list caches
if redisCache, ok := r.cache.(*cache.RedisCache); ok {
if err := redisCache.InvalidateEntityType(ctx, r.entityType); err != nil {
logger.LogWarn("Failed to invalidate cache",
logger.F("entityType", r.entityType),
logger.F("error", err))
log.LogWarn("Failed to invalidate cache",
log.F("entityType", r.entityType),
log.F("error", err))
}
}
}
@ -199,18 +198,18 @@ func (r *CachedRepository[T]) Delete(ctx context.Context, id uint) error {
if r.cacheEnabled {
cacheKey := r.keyGenerator.EntityKey(r.entityType, id)
if err := r.cache.Delete(ctx, cacheKey); err != nil {
logger.LogWarn("Failed to invalidate entity cache",
logger.F("entityType", r.entityType),
logger.F("id", id),
logger.F("error", err))
log.LogWarn("Failed to invalidate entity cache",
log.F("entityType", r.entityType),
log.F("id", id),
log.F("error", err))
}
// Invalidate list caches
if redisCache, ok := r.cache.(*cache.RedisCache); ok {
if err := redisCache.InvalidateEntityType(ctx, r.entityType); err != nil {
logger.LogWarn("Failed to invalidate cache",
logger.F("entityType", r.entityType),
logger.F("error", err))
log.LogWarn("Failed to invalidate cache",
log.F("entityType", r.entityType),
log.F("error", err))
}
}
}
@ -235,18 +234,18 @@ func (r *CachedRepository[T]) List(ctx context.Context, page, pageSize int) (*Pa
err := r.cache.Get(ctx, cacheKey, &result)
if err == nil {
// Cache hit
logger.LogDebug("Cache hit for list",
logger.F("entityType", r.entityType),
logger.F("page", page),
logger.F("pageSize", pageSize))
log.LogDebug("Cache hit for list",
log.F("entityType", r.entityType),
log.F("page", page),
log.F("pageSize", pageSize))
return &result, nil
}
// Cache miss, get from database
logger.LogDebug("Cache miss for list",
logger.F("entityType", r.entityType),
logger.F("page", page),
logger.F("pageSize", pageSize))
log.LogDebug("Cache miss for list",
log.F("entityType", r.entityType),
log.F("page", page),
log.F("pageSize", pageSize))
result_ptr, err := r.repo.List(ctx, page, pageSize)
if err != nil {
@ -255,11 +254,11 @@ func (r *CachedRepository[T]) List(ctx context.Context, page, pageSize int) (*Pa
// Store in cache
if err := r.cache.Set(ctx, cacheKey, result_ptr, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache list",
logger.F("entityType", r.entityType),
logger.F("page", page),
logger.F("pageSize", pageSize),
logger.F("error", err))
log.LogWarn("Failed to cache list",
log.F("entityType", r.entityType),
log.F("page", page),
log.F("pageSize", pageSize),
log.F("error", err))
}
return result_ptr, nil
@ -283,14 +282,14 @@ func (r *CachedRepository[T]) ListAll(ctx context.Context) ([]T, error) {
err := r.cache.Get(ctx, cacheKey, &entities)
if err == nil {
// Cache hit
logger.LogDebug("Cache hit for listAll",
logger.F("entityType", r.entityType))
log.LogDebug("Cache hit for listAll",
log.F("entityType", r.entityType))
return entities, nil
}
// Cache miss, get from database
logger.LogDebug("Cache miss for listAll",
logger.F("entityType", r.entityType))
log.LogDebug("Cache miss for listAll",
log.F("entityType", r.entityType))
entities, err = r.repo.ListAll(ctx)
if err != nil {
@ -299,9 +298,9 @@ func (r *CachedRepository[T]) ListAll(ctx context.Context) ([]T, error) {
// Store in cache
if err := r.cache.Set(ctx, cacheKey, entities, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache listAll",
logger.F("entityType", r.entityType),
logger.F("error", err))
log.LogWarn("Failed to cache listAll",
log.F("entityType", r.entityType),
log.F("error", err))
}
return entities, nil
@ -319,14 +318,14 @@ func (r *CachedRepository[T]) Count(ctx context.Context) (int64, error) {
err := r.cache.Get(ctx, cacheKey, &count)
if err == nil {
// Cache hit
logger.LogDebug("Cache hit for count",
logger.F("entityType", r.entityType))
log.LogDebug("Cache hit for count",
log.F("entityType", r.entityType))
return count, nil
}
// Cache miss, get from database
logger.LogDebug("Cache miss for count",
logger.F("entityType", r.entityType))
log.LogDebug("Cache miss for count",
log.F("entityType", r.entityType))
count, err = r.repo.Count(ctx)
if err != nil {
@ -335,9 +334,9 @@ func (r *CachedRepository[T]) Count(ctx context.Context) (int64, error) {
// Store in cache
if err := r.cache.Set(ctx, cacheKey, count, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache count",
logger.F("entityType", r.entityType),
logger.F("error", err))
log.LogWarn("Failed to cache count",
log.F("entityType", r.entityType),
log.F("error", err))
}
return count, nil

View File

@ -3,16 +3,15 @@ package repositories_test
import (
"context"
"errors"
"tercul/internal/models"
repositories2 "tercul/internal/repositories"
"testing"
"time"
"tercul/internal/testutil"
"tercul/models"
"tercul/repositories"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/suite"
"gorm.io/gorm"
"tercul/internal/testutil"
)
// TestModel is a simple entity used for cached repository tests
@ -77,7 +76,7 @@ func (m *MockRepository[T]) GetByID(ctx context.Context, id uint) (*T, error) {
return args.Get(0).(*T), args.Error(1)
}
func (m *MockRepository[T]) GetByIDWithOptions(ctx context.Context, id uint, options *repositories.QueryOptions) (*T, error) {
func (m *MockRepository[T]) GetByIDWithOptions(ctx context.Context, id uint, options *repositories2.QueryOptions) (*T, error) {
return nil, nil
}
@ -95,15 +94,15 @@ func (m *MockRepository[T]) Delete(ctx context.Context, id uint) error {
func (m *MockRepository[T]) DeleteInTx(ctx context.Context, tx *gorm.DB, id uint) error { return nil }
func (m *MockRepository[T]) List(ctx context.Context, page, pageSize int) (*repositories.PaginatedResult[T], error) {
func (m *MockRepository[T]) List(ctx context.Context, page, pageSize int) (*repositories2.PaginatedResult[T], error) {
args := m.Called(ctx, page, pageSize)
if args.Get(0) == nil {
return nil, args.Error(1)
}
return args.Get(0).(*repositories.PaginatedResult[T]), args.Error(1)
return args.Get(0).(*repositories2.PaginatedResult[T]), args.Error(1)
}
func (m *MockRepository[T]) ListWithOptions(ctx context.Context, options *repositories.QueryOptions) ([]T, error) {
func (m *MockRepository[T]) ListWithOptions(ctx context.Context, options *repositories2.QueryOptions) ([]T, error) {
var z []T
return z, nil
}
@ -129,7 +128,7 @@ func (m *MockRepository[T]) Count(ctx context.Context) (int64, error) {
return args.Get(0).(int64), args.Error(1)
}
func (m *MockRepository[T]) CountWithOptions(ctx context.Context, options *repositories.QueryOptions) (int64, error) {
func (m *MockRepository[T]) CountWithOptions(ctx context.Context, options *repositories2.QueryOptions) (int64, error) {
return 0, nil
}
@ -141,8 +140,8 @@ func (m *MockRepository[T]) FindWithPreload(ctx context.Context, preloads []stri
return args.Get(0).(*T), args.Error(1)
}
func (m *MockRepository[T]) Exists(ctx context.Context, id uint) (bool, error) { return false, nil }
func (m *MockRepository[T]) BeginTx(ctx context.Context) (*gorm.DB, error) { return nil, nil }
func (m *MockRepository[T]) Exists(ctx context.Context, id uint) (bool, error) { return false, nil }
func (m *MockRepository[T]) BeginTx(ctx context.Context) (*gorm.DB, error) { return nil, nil }
func (m *MockRepository[T]) WithTx(ctx context.Context, fn func(tx *gorm.DB) error) error { return nil }
// CachedRepositorySuite is a test suite for the CachedRepository
@ -150,14 +149,14 @@ type CachedRepositorySuite struct {
testutil.BaseSuite
mockRepo *MockRepository[TestModel]
mockCache *MockCache
repo *repositories.CachedRepository[TestModel]
repo *repositories2.CachedRepository[TestModel]
}
// SetupTest sets up each test
func (s *CachedRepositorySuite) SetupTest() {
s.mockRepo = new(MockRepository[TestModel])
s.mockCache = new(MockCache)
s.repo = repositories.NewCachedRepository[TestModel](
s.repo = repositories2.NewCachedRepository[TestModel](
s.mockRepo,
s.mockCache,
nil,
@ -279,20 +278,20 @@ func (s *CachedRepositorySuite) TestUpdate() {
}
// Mock repository
s.mockRepo.On("Update", mock.Anything, model).
s.mockRepo.On("Update", mock.Anything, model).
Return(nil)
// Execute
ctx := context.Background()
// Expect cache delete during update invalidation
s.mockCache.On("Delete", mock.Anything, mock.Anything).Return(nil)
err := s.repo.Update(ctx, model)
// Execute
ctx := context.Background()
// Expect cache delete during update invalidation
s.mockCache.On("Delete", mock.Anything, mock.Anything).Return(nil)
err := s.repo.Update(ctx, model)
// Assert
s.Require().NoError(err)
// Verify mocks
s.mockRepo.AssertCalled(s.T(), "Update", mock.Anything, model)
s.mockRepo.AssertCalled(s.T(), "Update", mock.Anything, model)
}
// TestDelete tests the Delete method
@ -300,9 +299,9 @@ func (s *CachedRepositorySuite) TestDelete() {
// Setup
id := uint(1)
// Mock repository and cache delete
s.mockRepo.On("Delete", mock.Anything, id).Return(nil)
s.mockCache.On("Delete", mock.Anything, mock.Anything).Return(nil)
// Mock repository and cache delete
s.mockRepo.On("Delete", mock.Anything, id).Return(nil)
s.mockCache.On("Delete", mock.Anything, mock.Anything).Return(nil)
// Execute
ctx := context.Background()
@ -320,7 +319,7 @@ func (s *CachedRepositorySuite) TestListCacheHit() {
// Setup
page := 1
pageSize := 10
expectedResult := &repositories.PaginatedResult[TestModel]{
expectedResult := &repositories2.PaginatedResult[TestModel]{
Items: []TestModel{
{
BaseModel: models.BaseModel{
@ -347,7 +346,7 @@ func (s *CachedRepositorySuite) TestListCacheHit() {
s.mockCache.On("Get", mock.Anything, mock.Anything, mock.Anything).
Run(func(args mock.Arguments) {
// Set the value to simulate cache hit
value := args.Get(2).(*repositories.PaginatedResult[TestModel])
value := args.Get(2).(*repositories2.PaginatedResult[TestModel])
*value = *expectedResult
}).
Return(nil)
@ -375,7 +374,7 @@ func (s *CachedRepositorySuite) TestListCacheMiss() {
// Setup
page := 1
pageSize := 10
expectedResult := &repositories.PaginatedResult[TestModel]{
expectedResult := &repositories2.PaginatedResult[TestModel]{
Items: []TestModel{
{
BaseModel: models.BaseModel{

View File

@ -2,11 +2,11 @@ package repositories
import (
"context"
"tercul/internal/models"
"time"
"tercul/cache"
"tercul/logger"
"tercul/models"
"tercul/internal/platform/cache"
"tercul/internal/platform/log"
)
// CachedWorkRepository wraps a WorkRepository with caching functionality
@ -54,16 +54,16 @@ func (r *CachedWorkRepository) FindByTitle(ctx context.Context, title string) ([
err := r.cache.Get(ctx, cacheKey, &result)
if err == nil {
// Cache hit
logger.LogDebug("Cache hit for FindByTitle",
logger.F("entityType", r.entityType),
logger.F("title", title))
log.LogDebug("Cache hit for FindByTitle",
log.F("entityType", r.entityType),
log.F("title", title))
return result, nil
}
// Cache miss, get from database
logger.LogDebug("Cache miss for FindByTitle",
logger.F("entityType", r.entityType),
logger.F("title", title))
log.LogDebug("Cache miss for FindByTitle",
log.F("entityType", r.entityType),
log.F("title", title))
result, err = r.workRepo.FindByTitle(ctx, title)
if err != nil {
@ -72,10 +72,10 @@ func (r *CachedWorkRepository) FindByTitle(ctx context.Context, title string) ([
// Store in cache
if err := r.cache.Set(ctx, cacheKey, result, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache FindByTitle result",
logger.F("entityType", r.entityType),
logger.F("title", title),
logger.F("error", err))
log.LogWarn("Failed to cache FindByTitle result",
log.F("entityType", r.entityType),
log.F("title", title),
log.F("error", err))
}
return result, nil
@ -93,16 +93,16 @@ func (r *CachedWorkRepository) FindByAuthor(ctx context.Context, authorID uint)
err := r.cache.Get(ctx, cacheKey, &result)
if err == nil {
// Cache hit
logger.LogDebug("Cache hit for FindByAuthor",
logger.F("entityType", r.entityType),
logger.F("authorID", authorID))
log.LogDebug("Cache hit for FindByAuthor",
log.F("entityType", r.entityType),
log.F("authorID", authorID))
return result, nil
}
// Cache miss, get from database
logger.LogDebug("Cache miss for FindByAuthor",
logger.F("entityType", r.entityType),
logger.F("authorID", authorID))
log.LogDebug("Cache miss for FindByAuthor",
log.F("entityType", r.entityType),
log.F("authorID", authorID))
result, err = r.workRepo.FindByAuthor(ctx, authorID)
if err != nil {
@ -111,10 +111,10 @@ func (r *CachedWorkRepository) FindByAuthor(ctx context.Context, authorID uint)
// Store in cache
if err := r.cache.Set(ctx, cacheKey, result, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache FindByAuthor result",
logger.F("entityType", r.entityType),
logger.F("authorID", authorID),
logger.F("error", err))
log.LogWarn("Failed to cache FindByAuthor result",
log.F("entityType", r.entityType),
log.F("authorID", authorID),
log.F("error", err))
}
return result, nil
@ -132,16 +132,16 @@ func (r *CachedWorkRepository) FindByCategory(ctx context.Context, categoryID ui
err := r.cache.Get(ctx, cacheKey, &result)
if err == nil {
// Cache hit
logger.LogDebug("Cache hit for FindByCategory",
logger.F("entityType", r.entityType),
logger.F("categoryID", categoryID))
log.LogDebug("Cache hit for FindByCategory",
log.F("entityType", r.entityType),
log.F("categoryID", categoryID))
return result, nil
}
// Cache miss, get from database
logger.LogDebug("Cache miss for FindByCategory",
logger.F("entityType", r.entityType),
logger.F("categoryID", categoryID))
log.LogDebug("Cache miss for FindByCategory",
log.F("entityType", r.entityType),
log.F("categoryID", categoryID))
result, err = r.workRepo.FindByCategory(ctx, categoryID)
if err != nil {
@ -150,10 +150,10 @@ func (r *CachedWorkRepository) FindByCategory(ctx context.Context, categoryID ui
// Store in cache
if err := r.cache.Set(ctx, cacheKey, result, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache FindByCategory result",
logger.F("entityType", r.entityType),
logger.F("categoryID", categoryID),
logger.F("error", err))
log.LogWarn("Failed to cache FindByCategory result",
log.F("entityType", r.entityType),
log.F("categoryID", categoryID),
log.F("error", err))
}
return result, nil
@ -171,20 +171,20 @@ func (r *CachedWorkRepository) FindByLanguage(ctx context.Context, language stri
err := r.cache.Get(ctx, cacheKey, &result)
if err == nil {
// Cache hit
logger.LogDebug("Cache hit for FindByLanguage",
logger.F("entityType", r.entityType),
logger.F("language", language),
logger.F("page", page),
logger.F("pageSize", pageSize))
log.LogDebug("Cache hit for FindByLanguage",
log.F("entityType", r.entityType),
log.F("language", language),
log.F("page", page),
log.F("pageSize", pageSize))
return &result, nil
}
// Cache miss, get from database
logger.LogDebug("Cache miss for FindByLanguage",
logger.F("entityType", r.entityType),
logger.F("language", language),
logger.F("page", page),
logger.F("pageSize", pageSize))
log.LogDebug("Cache miss for FindByLanguage",
log.F("entityType", r.entityType),
log.F("language", language),
log.F("page", page),
log.F("pageSize", pageSize))
result_ptr, err := r.workRepo.FindByLanguage(ctx, language, page, pageSize)
if err != nil {
@ -193,12 +193,12 @@ func (r *CachedWorkRepository) FindByLanguage(ctx context.Context, language stri
// Store in cache
if err := r.cache.Set(ctx, cacheKey, result_ptr, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache FindByLanguage result",
logger.F("entityType", r.entityType),
logger.F("language", language),
logger.F("page", page),
logger.F("pageSize", pageSize),
logger.F("error", err))
log.LogWarn("Failed to cache FindByLanguage result",
log.F("entityType", r.entityType),
log.F("language", language),
log.F("page", page),
log.F("pageSize", pageSize),
log.F("error", err))
}
return result_ptr, nil
@ -216,16 +216,16 @@ func (r *CachedWorkRepository) GetWithTranslations(ctx context.Context, id uint)
err := r.cache.Get(ctx, cacheKey, &result)
if err == nil {
// Cache hit
logger.LogDebug("Cache hit for GetWithTranslations",
logger.F("entityType", r.entityType),
logger.F("id", id))
log.LogDebug("Cache hit for GetWithTranslations",
log.F("entityType", r.entityType),
log.F("id", id))
return &result, nil
}
// Cache miss, get from database
logger.LogDebug("Cache miss for GetWithTranslations",
logger.F("entityType", r.entityType),
logger.F("id", id))
log.LogDebug("Cache miss for GetWithTranslations",
log.F("entityType", r.entityType),
log.F("id", id))
result_ptr, err := r.workRepo.GetWithTranslations(ctx, id)
if err != nil {
@ -234,10 +234,10 @@ func (r *CachedWorkRepository) GetWithTranslations(ctx context.Context, id uint)
// Store in cache
if err := r.cache.Set(ctx, cacheKey, result_ptr, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache GetWithTranslations result",
logger.F("entityType", r.entityType),
logger.F("id", id),
logger.F("error", err))
log.LogWarn("Failed to cache GetWithTranslations result",
log.F("entityType", r.entityType),
log.F("id", id),
log.F("error", err))
}
return result_ptr, nil
@ -255,18 +255,18 @@ func (r *CachedWorkRepository) ListWithTranslations(ctx context.Context, page, p
err := r.cache.Get(ctx, cacheKey, &result)
if err == nil {
// Cache hit
logger.LogDebug("Cache hit for ListWithTranslations",
logger.F("entityType", r.entityType),
logger.F("page", page),
logger.F("pageSize", pageSize))
log.LogDebug("Cache hit for ListWithTranslations",
log.F("entityType", r.entityType),
log.F("page", page),
log.F("pageSize", pageSize))
return &result, nil
}
// Cache miss, get from database
logger.LogDebug("Cache miss for ListWithTranslations",
logger.F("entityType", r.entityType),
logger.F("page", page),
logger.F("pageSize", pageSize))
log.LogDebug("Cache miss for ListWithTranslations",
log.F("entityType", r.entityType),
log.F("page", page),
log.F("pageSize", pageSize))
result_ptr, err := r.workRepo.ListWithTranslations(ctx, page, pageSize)
if err != nil {
@ -275,11 +275,11 @@ func (r *CachedWorkRepository) ListWithTranslations(ctx context.Context, page, p
// Store in cache
if err := r.cache.Set(ctx, cacheKey, result_ptr, r.cacheExpiry); err != nil {
logger.LogWarn("Failed to cache ListWithTranslations result",
logger.F("entityType", r.entityType),
logger.F("page", page),
logger.F("pageSize", pageSize),
logger.F("error", err))
log.LogWarn("Failed to cache ListWithTranslations result",
log.F("entityType", r.entityType),
log.F("page", page),
log.F("pageSize", pageSize),
log.F("error", err))
}
return result_ptr, nil

View File

@ -4,14 +4,13 @@ import (
"context"
"encoding/json"
"errors"
models2 "tercul/internal/models"
repositories2 "tercul/internal/repositories"
"testing"
"time"
"tercul/internal/testutil"
"tercul/models"
"tercul/repositories"
"github.com/stretchr/testify/suite"
"tercul/internal/testutil"
)
// ErrCacheMiss is returned when a key is not found in the cache
@ -84,19 +83,19 @@ func (c *testCache) SetMulti(ctx context.Context, items map[string]interface{},
// MockWorkRepository for testing
type MockWorkRepository struct {
works []*models.Work
works []*models2.Work
}
func NewMockWorkRepository() *MockWorkRepository {
return &MockWorkRepository{works: []*models.Work{}}
return &MockWorkRepository{works: []*models2.Work{}}
}
func (m *MockWorkRepository) AddWork(work *models.Work) {
func (m *MockWorkRepository) AddWork(work *models2.Work) {
work.ID = uint(len(m.works) + 1)
m.works = append(m.works, work)
}
func (m *MockWorkRepository) GetByID(id uint) (*models.Work, error) {
func (m *MockWorkRepository) GetByID(id uint) (*models2.Work, error) {
for _, w := range m.works {
if w.ID == id {
return w, nil
@ -105,8 +104,8 @@ func (m *MockWorkRepository) GetByID(id uint) (*models.Work, error) {
return nil, errors.New("not found")
}
func (m *MockWorkRepository) FindByTitle(title string) ([]*models.Work, error) {
var result []*models.Work
func (m *MockWorkRepository) FindByTitle(title string) ([]*models2.Work, error) {
var result []*models2.Work
for _, w := range m.works {
if len(title) == 0 || (len(w.Title) >= len(title) && w.Title[:len(title)] == title) {
result = append(result, w)
@ -115,8 +114,8 @@ func (m *MockWorkRepository) FindByTitle(title string) ([]*models.Work, error) {
return result, nil
}
func (m *MockWorkRepository) FindByLanguage(language string, page, pageSize int) (*repositories.PaginatedResult[*models.Work], error) {
var filtered []*models.Work
func (m *MockWorkRepository) FindByLanguage(language string, page, pageSize int) (*repositories2.PaginatedResult[*models2.Work], error) {
var filtered []*models2.Work
for _, w := range m.works {
if w.Language == language {
filtered = append(filtered, w)
@ -126,12 +125,12 @@ func (m *MockWorkRepository) FindByLanguage(language string, page, pageSize int)
start := (page - 1) * pageSize
end := start + pageSize
if start > len(filtered) {
return &repositories.PaginatedResult[*models.Work]{Items: []*models.Work{}, TotalCount: total}, nil
return &repositories2.PaginatedResult[*models2.Work]{Items: []*models2.Work{}, TotalCount: total}, nil
}
if end > len(filtered) {
end = len(filtered)
}
return &repositories.PaginatedResult[*models.Work]{Items: filtered[start:end], TotalCount: total}, nil
return &repositories2.PaginatedResult[*models2.Work]{Items: filtered[start:end], TotalCount: total}, nil
}
func (m *MockWorkRepository) Count() (int64, error) {
@ -145,7 +144,7 @@ type CachedWorkRepositorySuite struct {
suite.Suite
baseRepo *testutil.UnifiedMockWorkRepository
cache *testCache
repo *repositories.CachedWorkRepository
repo *repositories2.CachedWorkRepository
}
func (s *CachedWorkRepositorySuite) SetupSuite() {
@ -156,7 +155,7 @@ func (s *CachedWorkRepositorySuite) SetupTest() {
s.baseRepo = testutil.NewUnifiedMockWorkRepository()
s.cache = &testCache{data: make(map[string][]byte)}
s.repo = repositories.NewCachedWorkRepository(
s.repo = repositories2.NewCachedWorkRepository(
s.baseRepo,
s.cache,
nil,
@ -165,9 +164,9 @@ func (s *CachedWorkRepositorySuite) SetupTest() {
}
// createTestWork creates a test work and adds it to the mock repo
func (s *CachedWorkRepositorySuite) createTestWork(title, language string) *models.Work {
work := &models.Work{
TranslatableModel: models.TranslatableModel{BaseModel: models.BaseModel{ID: 0}, Language: language},
func (s *CachedWorkRepositorySuite) createTestWork(title, language string) *models2.Work {
work := &models2.Work{
TranslatableModel: models2.TranslatableModel{BaseModel: models2.BaseModel{ID: 0}, Language: language},
Title: title,
Description: "Test description",
Status: "published",
@ -180,13 +179,13 @@ func (s *CachedWorkRepositorySuite) createTestWork(title, language string) *mode
func (s *CachedWorkRepositorySuite) TestGetByID() {
work := s.createTestWork("Test Work", "en")
result1, err := s.repo.GetByID(context.Background(), work.ID)
result1, err := s.repo.GetByID(context.Background(), work.ID)
s.Require().NoError(err)
s.Require().NotNil(result1)
s.Equal(work.ID, result1.ID)
s.Equal(work.Title, result1.Title)
result2, err := s.repo.GetByID(context.Background(), work.ID)
result2, err := s.repo.GetByID(context.Background(), work.ID)
s.Require().NoError(err)
s.Require().NotNil(result2)
s.Equal(work.ID, result2.ID)
@ -202,11 +201,11 @@ func (s *CachedWorkRepositorySuite) TestFindByTitle() {
work2 := s.createTestWork("Test Work 2", "en")
_ = s.createTestWork("Another Work", "en")
works1, err := s.repo.FindByTitle(context.Background(), "Test")
works1, err := s.repo.FindByTitle(context.Background(), "Test")
s.Require().NoError(err)
s.Require().Len(works1, 2)
works2, err := s.repo.FindByTitle(context.Background(), "Test")
works2, err := s.repo.FindByTitle(context.Background(), "Test")
s.Require().NoError(err)
s.Require().Len(works2, 2)
@ -232,13 +231,13 @@ func (s *CachedWorkRepositorySuite) TestFindByLanguage() {
s.createTestWork("Work 4", "fr")
s.createTestWork("Work 5", "es")
result1, err := s.repo.FindByLanguage(context.Background(), "en", 1, 10)
result1, err := s.repo.FindByLanguage(context.Background(), "en", 1, 10)
s.Require().NoError(err)
s.Require().NotNil(result1)
s.Equal(int64(2), result1.TotalCount)
s.Equal(2, len(result1.Items))
result2, err := s.repo.FindByLanguage(context.Background(), "en", 1, 10)
result2, err := s.repo.FindByLanguage(context.Background(), "en", 1, 10)
s.Require().NoError(err)
s.Require().NotNil(result2)
s.Equal(int64(2), result2.TotalCount)

View File

@ -4,7 +4,7 @@ import (
"context"
"errors"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// CategoryRepository defines CRUD methods specific to Category.

View File

@ -3,7 +3,7 @@ package repositories
import (
"context"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// CityRepository defines CRUD methods specific to City.

View File

@ -3,7 +3,7 @@ package repositories
import (
"context"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// CollectionRepository defines CRUD methods specific to Collection.

View File

@ -3,7 +3,7 @@ package repositories
import (
"context"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// CommentRepository defines CRUD methods specific to Comment.

View File

@ -3,7 +3,7 @@ package repositories
import (
"context"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// ContributionRepository defines CRUD methods specific to Contribution.

View File

@ -3,7 +3,7 @@ package repositories
import (
"context"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// CopyrightClaimRepository defines CRUD methods specific to CopyrightClaim.

View File

@ -4,7 +4,7 @@ import (
"context"
"errors"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// CopyrightRepository defines CRUD methods specific to Copyright.

View File

@ -4,7 +4,7 @@ import (
"context"
"errors"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// CountryRepository defines CRUD methods specific to Country.

View File

@ -3,7 +3,7 @@ package repositories
import (
"context"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// EdgeRepository defines CRUD operations for the polymorphic edge table.

View File

@ -4,7 +4,7 @@ import (
"context"
"errors"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// EditionRepository defines CRUD methods specific to Edition.

View File

@ -4,7 +4,7 @@ import (
"context"
"errors"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
"time"
)

View File

@ -3,7 +3,7 @@ package repositories
import (
"context"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// LikeRepository defines CRUD methods specific to Like.

View File

@ -3,7 +3,7 @@ package repositories
import (
"context"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// MonetizationRepository defines CRUD methods specific to Monetization.

View File

@ -4,7 +4,7 @@ import (
"context"
"errors"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
"time"
)

View File

@ -4,7 +4,7 @@ import (
"context"
"gorm.io/gorm"
"math"
"tercul/models"
"tercul/internal/models"
)
// PlaceRepository defines CRUD methods specific to Place.

View File

@ -3,7 +3,7 @@ package repositories
import (
"context"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// PublisherRepository defines CRUD methods specific to Publisher.

View File

@ -4,7 +4,7 @@ import (
"context"
"errors"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// SourceRepository defines CRUD methods specific to Source.

View File

@ -4,7 +4,7 @@ import (
"context"
"errors"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// TagRepository defines CRUD methods specific to Tag.

View File

@ -3,34 +3,34 @@ package repositories
import (
"context"
"gorm.io/gorm"
"tercul/models"
models2 "tercul/internal/models"
)
// TranslationRepository defines CRUD methods specific to Translation.
type TranslationRepository interface {
BaseRepository[models.Translation]
ListByWorkID(ctx context.Context, workID uint) ([]models.Translation, error)
ListByEntity(ctx context.Context, entityType string, entityID uint) ([]models.Translation, error)
ListByTranslatorID(ctx context.Context, translatorID uint) ([]models.Translation, error)
ListByStatus(ctx context.Context, status models.TranslationStatus) ([]models.Translation, error)
BaseRepository[models2.Translation]
ListByWorkID(ctx context.Context, workID uint) ([]models2.Translation, error)
ListByEntity(ctx context.Context, entityType string, entityID uint) ([]models2.Translation, error)
ListByTranslatorID(ctx context.Context, translatorID uint) ([]models2.Translation, error)
ListByStatus(ctx context.Context, status models2.TranslationStatus) ([]models2.Translation, error)
}
type translationRepository struct {
BaseRepository[models.Translation]
BaseRepository[models2.Translation]
db *gorm.DB
}
// NewTranslationRepository creates a new TranslationRepository.
func NewTranslationRepository(db *gorm.DB) TranslationRepository {
return &translationRepository{
BaseRepository: NewBaseRepositoryImpl[models.Translation](db),
BaseRepository: NewBaseRepositoryImpl[models2.Translation](db),
db: db,
}
}
// ListByWorkID finds translations by work ID
func (r *translationRepository) ListByWorkID(ctx context.Context, workID uint) ([]models.Translation, error) {
var translations []models.Translation
func (r *translationRepository) ListByWorkID(ctx context.Context, workID uint) ([]models2.Translation, error) {
var translations []models2.Translation
if err := r.db.WithContext(ctx).Where("translatable_id = ? AND translatable_type = ?", workID, "Work").Find(&translations).Error; err != nil {
return nil, err
}
@ -38,8 +38,8 @@ func (r *translationRepository) ListByWorkID(ctx context.Context, workID uint) (
}
// ListByEntity finds translations by entity type and ID
func (r *translationRepository) ListByEntity(ctx context.Context, entityType string, entityID uint) ([]models.Translation, error) {
var translations []models.Translation
func (r *translationRepository) ListByEntity(ctx context.Context, entityType string, entityID uint) ([]models2.Translation, error) {
var translations []models2.Translation
if err := r.db.WithContext(ctx).Where("translatable_id = ? AND translatable_type = ?", entityID, entityType).Find(&translations).Error; err != nil {
return nil, err
}
@ -47,8 +47,8 @@ func (r *translationRepository) ListByEntity(ctx context.Context, entityType str
}
// ListByTranslatorID finds translations by translator ID
func (r *translationRepository) ListByTranslatorID(ctx context.Context, translatorID uint) ([]models.Translation, error) {
var translations []models.Translation
func (r *translationRepository) ListByTranslatorID(ctx context.Context, translatorID uint) ([]models2.Translation, error) {
var translations []models2.Translation
if err := r.db.WithContext(ctx).Where("translator_id = ?", translatorID).Find(&translations).Error; err != nil {
return nil, err
}
@ -56,8 +56,8 @@ func (r *translationRepository) ListByTranslatorID(ctx context.Context, translat
}
// ListByStatus finds translations by status
func (r *translationRepository) ListByStatus(ctx context.Context, status models.TranslationStatus) ([]models.Translation, error) {
var translations []models.Translation
func (r *translationRepository) ListByStatus(ctx context.Context, status models2.TranslationStatus) ([]models2.Translation, error) {
var translations []models2.Translation
if err := r.db.WithContext(ctx).Where("status = ?", status).Find(&translations).Error; err != nil {
return nil, err
}

View File

@ -4,7 +4,7 @@ import (
"context"
"errors"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// UserProfileRepository defines CRUD methods specific to UserProfile.

View File

@ -4,33 +4,33 @@ import (
"context"
"errors"
"gorm.io/gorm"
"tercul/models"
models2 "tercul/internal/models"
)
// UserRepository defines CRUD methods specific to User.
type UserRepository interface {
BaseRepository[models.User]
FindByUsername(ctx context.Context, username string) (*models.User, error)
FindByEmail(ctx context.Context, email string) (*models.User, error)
ListByRole(ctx context.Context, role models.UserRole) ([]models.User, error)
BaseRepository[models2.User]
FindByUsername(ctx context.Context, username string) (*models2.User, error)
FindByEmail(ctx context.Context, email string) (*models2.User, error)
ListByRole(ctx context.Context, role models2.UserRole) ([]models2.User, error)
}
type userRepository struct {
BaseRepository[models.User]
BaseRepository[models2.User]
db *gorm.DB
}
// NewUserRepository creates a new UserRepository.
func NewUserRepository(db *gorm.DB) UserRepository {
return &userRepository{
BaseRepository: NewBaseRepositoryImpl[models.User](db),
BaseRepository: NewBaseRepositoryImpl[models2.User](db),
db: db,
}
}
// FindByUsername finds a user by username
func (r *userRepository) FindByUsername(ctx context.Context, username string) (*models.User, error) {
var user models.User
func (r *userRepository) FindByUsername(ctx context.Context, username string) (*models2.User, error) {
var user models2.User
if err := r.db.WithContext(ctx).Where("username = ?", username).First(&user).Error; err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) {
return nil, ErrEntityNotFound
@ -41,8 +41,8 @@ func (r *userRepository) FindByUsername(ctx context.Context, username string) (*
}
// FindByEmail finds a user by email
func (r *userRepository) FindByEmail(ctx context.Context, email string) (*models.User, error) {
var user models.User
func (r *userRepository) FindByEmail(ctx context.Context, email string) (*models2.User, error) {
var user models2.User
if err := r.db.WithContext(ctx).Where("email = ?", email).First(&user).Error; err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) {
return nil, ErrEntityNotFound
@ -53,8 +53,8 @@ func (r *userRepository) FindByEmail(ctx context.Context, email string) (*models
}
// ListByRole lists users by role
func (r *userRepository) ListByRole(ctx context.Context, role models.UserRole) ([]models.User, error) {
var users []models.User
func (r *userRepository) ListByRole(ctx context.Context, role models2.UserRole) ([]models2.User, error) {
var users []models2.User
if err := r.db.WithContext(ctx).Where("role = ?", role).Find(&users).Error; err != nil {
return nil, err
}

View File

@ -4,7 +4,7 @@ import (
"context"
"errors"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
"time"
)

View File

@ -3,7 +3,7 @@ package repositories
import (
"context"
"gorm.io/gorm"
"tercul/models"
"tercul/internal/models"
)
// WorkRepository defines methods specific to Work.

View File

@ -1,12 +1,12 @@
package repositories_test
import (
"context"
"tercul/internal/testutil"
"tercul/models"
"testing"
"context"
models2 "tercul/internal/models"
"tercul/internal/testutil"
"testing"
"github.com/stretchr/testify/suite"
"github.com/stretchr/testify/suite"
)
// WorkRepositorySuite is a test suite for the MockWorkRepository
@ -20,31 +20,31 @@ func (s *WorkRepositorySuite) SetupTest() {
}
func (s *WorkRepositorySuite) TestCreate() {
work := &models.Work{Title: "Test Work"}
err := s.repo.Create(context.Background(), work)
work := &models2.Work{Title: "Test Work"}
err := s.repo.Create(context.Background(), work)
s.Require().NoError(err)
}
func (s *WorkRepositorySuite) TestGetByID() {
work := &models.Work{Title: "Test Work"}
s.repo.Create(context.Background(), work)
got, err := s.repo.GetByID(context.Background(), work.ID)
work := &models2.Work{Title: "Test Work"}
s.repo.Create(context.Background(), work)
got, err := s.repo.GetByID(context.Background(), work.ID)
s.Require().NoError(err)
s.Require().Equal(work.ID, got.ID)
}
func (s *WorkRepositorySuite) TestFindByTitle() {
work := &models.Work{Title: "Test"}
s.repo.Create(context.Background(), work)
works, err := s.repo.FindByTitle(context.Background(), "Test")
work := &models2.Work{Title: "Test"}
s.repo.Create(context.Background(), work)
works, err := s.repo.FindByTitle(context.Background(), "Test")
s.Require().NoError(err)
s.Require().Len(works, 1)
}
func (s *WorkRepositorySuite) TestFindByLanguage() {
work := &models.Work{TranslatableModel: models.TranslatableModel{Language: "en"}, Title: "Test"}
s.repo.Create(context.Background(), work)
result, err := s.repo.FindByLanguage(context.Background(), "en", 1, 10)
work := &models2.Work{TranslatableModel: models2.TranslatableModel{Language: "en"}, Title: "Test"}
s.repo.Create(context.Background(), work)
result, err := s.repo.FindByLanguage(context.Background(), "en", 1, 10)
s.Require().NoError(err)
s.Require().Len(result.Items, 1)
}

View File

@ -3,7 +3,7 @@ package store
import (
"gorm.io/gorm"
"strings"
"tercul/models"
models2 "tercul/internal/models"
)
// DB represents a database connection
@ -24,14 +24,14 @@ func ListPendingWorks(db *DB) []Work {
var works []Work
// Query for works that haven't been enriched yet
var modelWorks []models.Work
var modelWorks []models2.Work
db.Where("id NOT IN (SELECT work_id FROM language_analyses)").Find(&modelWorks)
// Convert to store.Work
for _, work := range modelWorks {
// Prefer original language translation; fallback to work language; then any
var content string
var t models.Translation
var t models2.Translation
// Try original
if err := db.Where("translatable_type = ? AND translatable_id = ? AND is_original_language = ?", "Work", work.ID, true).
First(&t).Error; err == nil {
@ -61,7 +61,7 @@ func ListPendingWorks(db *DB) []Work {
// UpsertWord creates or updates a word in the database
func UpsertWord(db *DB, workID uint, text, lemma, pos, phonetic string) error {
// Check if the word already exists
var word models.Word
var word models2.Word
result := db.Where("text = ? AND language = ?", text, "auto").First(&word)
if result.Error != nil && result.Error != gorm.ErrRecordNotFound {
@ -71,7 +71,7 @@ func UpsertWord(db *DB, workID uint, text, lemma, pos, phonetic string) error {
// Create or update the word
if result.Error == gorm.ErrRecordNotFound {
// Create new word
word = models.Word{
word = models2.Word{
Text: text,
Language: "auto", // This would be set to the detected language
PartOfSpeech: pos,
@ -101,7 +101,7 @@ func SaveKeywords(db *DB, workID uint, keywords []string) error {
}
// Create a topic cluster for the keywords
cluster := models.TopicCluster{
cluster := models2.TopicCluster{
Name: "Auto-generated",
Description: "Automatically generated keywords",
Keywords: strings.Join(keywords, ", "),
@ -117,7 +117,7 @@ func SaveKeywords(db *DB, workID uint, keywords []string) error {
// SavePoetics saves poetic analysis for a work
func SavePoetics(db *DB, workID uint, metrics PoeticMetrics) error {
poetics := models.PoeticAnalysis{
poetics := models2.PoeticAnalysis{
WorkID: workID,
Language: "auto", // This would be set to the detected language
RhymeScheme: metrics.RhymeScheme,
@ -133,10 +133,10 @@ func SavePoetics(db *DB, workID uint, metrics PoeticMetrics) error {
// MarkEnriched marks a work as enriched with the detected language
func MarkEnriched(db *DB, workID uint, language string) error {
// Create a language analysis record to mark the work as processed
analysis := models.LanguageAnalysis{
analysis := models2.LanguageAnalysis{
WorkID: workID,
Language: language,
Analysis: models.JSONB{
Analysis: models2.JSONB{
"enriched": true,
"language": language,
},

View File

@ -0,0 +1,348 @@
package testutil
import (
"context"
"fmt"
"log"
"os"
"path/filepath"
"testing"
"time"
"github.com/stretchr/testify/suite"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"gorm.io/gorm/logger"
"tercul/internal/models"
"tercul/internal/repositories"
"tercul/services"
"tercul/graph"
)
// IntegrationTestSuite provides a comprehensive test environment with either in-memory SQLite or mock repositories
type IntegrationTestSuite struct {
suite.Suite
DB *gorm.DB
WorkRepo repositories.WorkRepository
UserRepo repositories.UserRepository
AuthorRepo repositories.AuthorRepository
TranslationRepo repositories.TranslationRepository
CommentRepo repositories.CommentRepository
LikeRepo repositories.LikeRepository
BookmarkRepo repositories.BookmarkRepository
CollectionRepo repositories.CollectionRepository
TagRepo repositories.TagRepository
CategoryRepo repositories.CategoryRepository
// Services
WorkService services.WorkService
Localization services.LocalizationService
AuthService services.AuthService
// Test data
TestWorks []*models.Work
TestUsers []*models.User
TestAuthors []*models.Author
TestTranslations []*models.Translation
}
// TestConfig holds configuration for the test environment
type TestConfig struct {
UseInMemoryDB bool // If true, use SQLite in-memory, otherwise use mock repositories
DBPath string // Path for SQLite file (only used if UseInMemoryDB is false)
LogLevel logger.LogLevel
}
// DefaultTestConfig returns a default test configuration
func DefaultTestConfig() *TestConfig {
return &TestConfig{
UseInMemoryDB: true,
DBPath: "",
LogLevel: logger.Silent,
}
}
// SetupSuite sets up the test suite with the specified configuration
func (s *IntegrationTestSuite) SetupSuite(config *TestConfig) {
if config == nil {
config = DefaultTestConfig()
}
if config.UseInMemoryDB {
s.setupInMemoryDB(config)
} else {
s.setupMockRepositories()
}
s.setupServices()
s.setupTestData()
}
// setupInMemoryDB sets up an in-memory SQLite database for testing
func (s *IntegrationTestSuite) setupInMemoryDB(config *TestConfig) {
var dbPath string
if config.DBPath != "" {
// Ensure directory exists
dir := filepath.Dir(config.DBPath)
if err := os.MkdirAll(dir, 0755); err != nil {
s.T().Fatalf("Failed to create database directory: %v", err)
}
dbPath = config.DBPath
} else {
// Use in-memory database
dbPath = ":memory:"
}
// Custom logger for tests
newLogger := logger.New(
log.New(os.Stdout, "\r\n", log.LstdFlags),
logger.Config{
SlowThreshold: time.Second,
LogLevel: config.LogLevel,
IgnoreRecordNotFoundError: true,
Colorful: false,
},
)
db, err := gorm.Open(sqlite.Open(dbPath), &gorm.Config{
Logger: newLogger,
})
if err != nil {
s.T().Fatalf("Failed to connect to test database: %v", err)
}
s.DB = db
// Run migrations
if err := db.AutoMigrate(
&models.Work{},
&models.User{},
&models.Author{},
&models.Translation{},
&models.Comment{},
&models.Like{},
&models.Bookmark{},
&models.Collection{},
&models.Tag{},
&models.Category{},
&models.Country{},
&models.City{},
&models.Place{},
&models.Address{},
&models.Copyright{},
&models.CopyrightClaim{},
&models.Monetization{},
&models.Book{},
&models.Publisher{},
&models.Source{},
// &models.WorkAnalytics{}, // Commented out as it's not in models package
&models.ReadabilityScore{},
&models.WritingStyle{},
&models.Emotion{},
&models.TopicCluster{},
&models.Mood{},
&models.Concept{},
&models.LinguisticLayer{},
&models.WorkStats{},
&models.TextMetadata{},
&models.PoeticAnalysis{},
&models.TranslationField{},
); err != nil {
s.T().Fatalf("Failed to run migrations: %v", err)
}
// Create repository instances
s.WorkRepo = repositories.NewWorkRepository(db)
s.UserRepo = repositories.NewUserRepository(db)
s.AuthorRepo = repositories.NewAuthorRepository(db)
s.TranslationRepo = repositories.NewTranslationRepository(db)
s.CommentRepo = repositories.NewCommentRepository(db)
s.LikeRepo = repositories.NewLikeRepository(db)
s.BookmarkRepo = repositories.NewBookmarkRepository(db)
s.CollectionRepo = repositories.NewCollectionRepository(db)
s.TagRepo = repositories.NewTagRepository(db)
s.CategoryRepo = repositories.NewCategoryRepository(db)
}
// setupMockRepositories sets up mock repositories for testing
func (s *IntegrationTestSuite) setupMockRepositories() {
s.WorkRepo = NewUnifiedMockWorkRepository()
// Temporarily comment out problematic repositories until we fix the interface implementations
// s.UserRepo = NewMockUserRepository()
// s.AuthorRepo = NewMockAuthorRepository()
// s.TranslationRepo = NewMockTranslationRepository()
// s.CommentRepo = NewMockCommentRepository()
// s.LikeRepo = NewMockLikeRepository()
// s.BookmarkRepo = NewMockBookmarkRepository()
// s.CollectionRepo = NewMockCollectionRepository()
// s.TagRepo = NewMockTagRepository()
// s.CategoryRepo = NewMockCategoryRepository()
}
// setupServices sets up service instances
func (s *IntegrationTestSuite) setupServices() {
s.WorkService = services.NewWorkService(s.WorkRepo, nil)
// Temporarily comment out services that depend on problematic repositories
// s.Localization = services.NewLocalizationService(s.TranslationRepo)
// s.AuthService = services.NewAuthService(s.UserRepo, "test-secret-key")
}
// setupTestData creates initial test data
func (s *IntegrationTestSuite) setupTestData() {
// Create test users
s.TestUsers = []*models.User{
{Username: "testuser1", Email: "test1@example.com", FirstName: "Test", LastName: "User1"},
{Username: "testuser2", Email: "test2@example.com", FirstName: "Test", LastName: "User2"},
}
for _, user := range s.TestUsers {
if err := s.UserRepo.Create(context.Background(), user); err != nil {
s.T().Logf("Warning: Failed to create test user: %v", err)
}
}
// Create test authors
s.TestAuthors = []*models.Author{
{Name: "Test Author 1", Language: "en"},
{Name: "Test Author 2", Language: "fr"},
}
for _, author := range s.TestAuthors {
if err := s.AuthorRepo.Create(context.Background(), author); err != nil {
s.T().Logf("Warning: Failed to create test author: %v", err)
}
}
// Create test works
s.TestWorks = []*models.Work{
{Title: "Test Work 1", Language: "en"},
{Title: "Test Work 2", Language: "en"},
{Title: "Test Work 3", Language: "fr"},
}
for _, work := range s.TestWorks {
if err := s.WorkRepo.Create(context.Background(), work); err != nil {
s.T().Logf("Warning: Failed to create test work: %v", err)
}
}
// Create test translations
s.TestTranslations = []*models.Translation{
{
Title: "Test Work 1",
Content: "Test content for work 1",
Language: "en",
TranslatableID: s.TestWorks[0].ID,
TranslatableType: "Work",
IsOriginalLanguage: true,
},
{
Title: "Test Work 2",
Content: "Test content for work 2",
Language: "en",
TranslatableID: s.TestWorks[1].ID,
TranslatableType: "Work",
IsOriginalLanguage: true,
},
{
Title: "Test Work 3",
Content: "Test content for work 3",
Language: "fr",
TranslatableID: s.TestWorks[2].ID,
TranslatableType: "Work",
IsOriginalLanguage: true,
},
}
for _, translation := range s.TestTranslations {
if err := s.TranslationRepo.Create(context.Background(), translation); err != nil {
s.T().Logf("Warning: Failed to create test translation: %v", err)
}
}
}
// TearDownSuite cleans up the test suite
func (s *IntegrationTestSuite) TearDownSuite() {
if s.DB != nil {
sqlDB, err := s.DB.DB()
if err == nil {
sqlDB.Close()
}
}
}
// SetupTest resets test data for each test
func (s *IntegrationTestSuite) SetupTest() {
if s.DB != nil {
// Reset database for each test
s.DB.Exec("DELETE FROM translations")
s.DB.Exec("DELETE FROM works")
s.DB.Exec("DELETE FROM authors")
s.DB.Exec("DELETE FROM users")
s.setupTestData()
} else {
// Reset mock repositories
if mockRepo, ok := s.WorkRepo.(*UnifiedMockWorkRepository); ok {
mockRepo.Reset()
}
// Add similar reset logic for other mock repositories
}
}
// GetResolver returns a properly configured GraphQL resolver for testing
func (s *IntegrationTestSuite) GetResolver() *graph.Resolver {
return &graph.Resolver{
WorkRepo: s.WorkRepo,
UserRepo: s.UserRepo,
AuthorRepo: s.AuthorRepo,
TranslationRepo: s.TranslationRepo,
CommentRepo: s.CommentRepo,
LikeRepo: s.LikeRepo,
BookmarkRepo: s.BookmarkRepo,
CollectionRepo: s.CollectionRepo,
TagRepo: s.TagRepo,
CategoryRepo: s.CategoryRepo,
WorkService: s.WorkService,
Localization: s.Localization,
AuthService: s.AuthService,
}
}
// CreateTestWork creates a test work with optional content
func (s *IntegrationTestSuite) CreateTestWork(title, language string, content string) *models.Work {
work := &models.Work{
Title: title,
}
work.Language = language
if err := s.WorkRepo.Create(context.Background(), work); err != nil {
s.T().Fatalf("Failed to create test work: %v", err)
}
if content != "" {
translation := &models.Translation{
Title: title,
Content: content,
Language: language,
TranslatableID: work.ID,
TranslatableType: "Work",
IsOriginalLanguage: true,
}
if err := s.TranslationRepo.Create(context.Background(), translation); err != nil {
s.T().Logf("Warning: Failed to create test translation: %v", err)
}
}
return work
}
// CleanupTestData removes all test data
func (s *IntegrationTestSuite) CleanupTestData() {
if s.DB != nil {
s.DB.Exec("DELETE FROM translations")
s.DB.Exec("DELETE FROM works")
s.DB.Exec("DELETE FROM authors")
s.DB.Exec("DELETE FROM users")
}
}

View File

@ -0,0 +1,72 @@
package testutil
import (
"context"
"errors"
"fmt"
"tercul/internal/repositories"
"gorm.io/gorm"
)
// MockBaseRepository provides common mock implementations for BaseRepository methods
type MockBaseRepository[T any] struct {
// This is a helper struct that can be embedded in mock repositories
// to provide common mock implementations
}
// BeginTx starts a new transaction (mock implementation)
func (m *MockBaseRepository[T]) BeginTx(ctx context.Context) (*gorm.DB, error) {
return nil, fmt.Errorf("transactions not supported in mock repository")
}
// WithTx executes a function within a transaction (mock implementation)
func (m *MockBaseRepository[T]) WithTx(ctx context.Context, fn func(tx *gorm.DB) error) error {
return fmt.Errorf("transactions not supported in mock repository")
}
// CreateInTx creates an entity within a transaction (mock implementation)
func (m *MockBaseRepository[T]) CreateInTx(ctx context.Context, tx *gorm.DB, entity *T) error {
return fmt.Errorf("CreateInTx not implemented in mock repository")
}
// UpdateInTx updates an entity within a transaction (mock implementation)
func (m *MockBaseRepository[T]) UpdateInTx(ctx context.Context, tx *gorm.DB, entity *T) error {
return fmt.Errorf("UpdateInTx not implemented in mock repository")
}
// DeleteInTx removes an entity by its ID within a transaction (mock implementation)
func (m *MockBaseRepository[T]) DeleteInTx(ctx context.Context, tx *gorm.DB, id uint) error {
return fmt.Errorf("DeleteInTx not implemented in mock repository")
}
// GetByIDWithOptions retrieves an entity by its ID with query options (mock implementation)
func (m *MockBaseRepository[T]) GetByIDWithOptions(ctx context.Context, id uint, options *repositories.QueryOptions) (*T, error) {
return nil, fmt.Errorf("GetByIDWithOptions not implemented in mock repository")
}
// ListWithOptions returns entities with query options (mock implementation)
func (m *MockBaseRepository[T]) ListWithOptions(ctx context.Context, options *repositories.QueryOptions) ([]T, error) {
return nil, fmt.Errorf("ListWithOptions not implemented in mock repository")
}
// CountWithOptions returns the count with query options (mock implementation)
func (m *MockBaseRepository[T]) CountWithOptions(ctx context.Context, options *repositories.QueryOptions) (int64, error) {
return 0, fmt.Errorf("CountWithOptions not implemented in mock repository")
}
// Exists checks if an entity exists by ID (mock implementation)
func (m *MockBaseRepository[T]) Exists(ctx context.Context, id uint) (bool, error) {
return false, fmt.Errorf("Exists not implemented in mock repository")
}
// GetAllForSync returns entities in batches for synchronization (mock implementation)
func (m *MockBaseRepository[T]) GetAllForSync(ctx context.Context, batchSize, offset int) ([]T, error) {
return nil, fmt.Errorf("GetAllForSync not implemented in mock repository")
}
// AddMockBaseRepositoryMethods adds all the missing BaseRepository methods to a mock repository
// This is a helper function to avoid duplicating code
func AddMockBaseRepositoryMethods[T any](repo interface{}) {
// This function would use reflection to add methods, but for now
// we'll implement them manually in each repository
}

View File

@ -4,23 +4,23 @@ import (
"context"
"errors"
"gorm.io/gorm"
"tercul/models"
"tercul/repositories"
models2 "tercul/internal/models"
repositories2 "tercul/internal/repositories"
)
// MockTranslationRepository is an in-memory implementation of TranslationRepository
type MockTranslationRepository struct {
items []models.Translation
items []models2.Translation
}
func NewMockTranslationRepository() *MockTranslationRepository {
return &MockTranslationRepository{items: []models.Translation{}}
return &MockTranslationRepository{items: []models2.Translation{}}
}
var _ repositories.TranslationRepository = (*MockTranslationRepository)(nil)
var _ repositories2.TranslationRepository = (*MockTranslationRepository)(nil)
// BaseRepository methods with context support
func (m *MockTranslationRepository) Create(ctx context.Context, t *models.Translation) error {
func (m *MockTranslationRepository) Create(ctx context.Context, t *models2.Translation) error {
if t == nil {
return errors.New("nil translation")
}
@ -29,24 +29,24 @@ func (m *MockTranslationRepository) Create(ctx context.Context, t *models.Transl
return nil
}
func (m *MockTranslationRepository) GetByID(ctx context.Context, id uint) (*models.Translation, error) {
func (m *MockTranslationRepository) GetByID(ctx context.Context, id uint) (*models2.Translation, error) {
for i := range m.items {
if m.items[i].ID == id {
cp := m.items[i]
return &cp, nil
}
}
return nil, repositories.ErrEntityNotFound
return nil, repositories2.ErrEntityNotFound
}
func (m *MockTranslationRepository) Update(ctx context.Context, t *models.Translation) error {
func (m *MockTranslationRepository) Update(ctx context.Context, t *models2.Translation) error {
for i := range m.items {
if m.items[i].ID == t.ID {
m.items[i] = *t
return nil
}
}
return repositories.ErrEntityNotFound
return repositories2.ErrEntityNotFound
}
func (m *MockTranslationRepository) Delete(ctx context.Context, id uint) error {
@ -56,57 +56,57 @@ func (m *MockTranslationRepository) Delete(ctx context.Context, id uint) error {
return nil
}
}
return repositories.ErrEntityNotFound
return repositories2.ErrEntityNotFound
}
func (m *MockTranslationRepository) List(ctx context.Context, page, pageSize int) (*repositories.PaginatedResult[models.Translation], error) {
all := append([]models.Translation(nil), m.items...)
func (m *MockTranslationRepository) List(ctx context.Context, page, pageSize int) (*repositories2.PaginatedResult[models2.Translation], error) {
all := append([]models2.Translation(nil), m.items...)
total := int64(len(all))
start := (page - 1) * pageSize
end := start + pageSize
if start > len(all) {
return &repositories.PaginatedResult[models.Translation]{Items: []models.Translation{}, TotalCount: total}, nil
return &repositories2.PaginatedResult[models2.Translation]{Items: []models2.Translation{}, TotalCount: total}, nil
}
if end > len(all) {
end = len(all)
}
return &repositories.PaginatedResult[models.Translation]{Items: all[start:end], TotalCount: total}, nil
return &repositories2.PaginatedResult[models2.Translation]{Items: all[start:end], TotalCount: total}, nil
}
func (m *MockTranslationRepository) ListAll(ctx context.Context) ([]models.Translation, error) {
return append([]models.Translation(nil), m.items...), nil
func (m *MockTranslationRepository) ListAll(ctx context.Context) ([]models2.Translation, error) {
return append([]models2.Translation(nil), m.items...), nil
}
func (m *MockTranslationRepository) Count(ctx context.Context) (int64, error) {
return int64(len(m.items)), nil
}
func (m *MockTranslationRepository) FindWithPreload(ctx context.Context, preloads []string, id uint) (*models.Translation, error) {
func (m *MockTranslationRepository) FindWithPreload(ctx context.Context, preloads []string, id uint) (*models2.Translation, error) {
return m.GetByID(ctx, id)
}
func (m *MockTranslationRepository) GetAllForSync(ctx context.Context, batchSize, offset int) ([]models.Translation, error) {
all := append([]models.Translation(nil), m.items...)
func (m *MockTranslationRepository) GetAllForSync(ctx context.Context, batchSize, offset int) ([]models2.Translation, error) {
all := append([]models2.Translation(nil), m.items...)
end := offset + batchSize
if end > len(all) {
end = len(all)
}
if offset > len(all) {
return []models.Translation{}, nil
return []models2.Translation{}, nil
}
return all[offset:end], nil
}
// New BaseRepository methods
func (m *MockTranslationRepository) CreateInTx(ctx context.Context, tx *gorm.DB, entity *models.Translation) error {
func (m *MockTranslationRepository) CreateInTx(ctx context.Context, tx *gorm.DB, entity *models2.Translation) error {
return m.Create(ctx, entity)
}
func (m *MockTranslationRepository) GetByIDWithOptions(ctx context.Context, id uint, options *repositories.QueryOptions) (*models.Translation, error) {
func (m *MockTranslationRepository) GetByIDWithOptions(ctx context.Context, id uint, options *repositories2.QueryOptions) (*models2.Translation, error) {
return m.GetByID(ctx, id)
}
func (m *MockTranslationRepository) UpdateInTx(ctx context.Context, tx *gorm.DB, entity *models.Translation) error {
func (m *MockTranslationRepository) UpdateInTx(ctx context.Context, tx *gorm.DB, entity *models2.Translation) error {
return m.Update(ctx, entity)
}
@ -114,7 +114,7 @@ func (m *MockTranslationRepository) DeleteInTx(ctx context.Context, tx *gorm.DB,
return m.Delete(ctx, id)
}
func (m *MockTranslationRepository) ListWithOptions(ctx context.Context, options *repositories.QueryOptions) ([]models.Translation, error) {
func (m *MockTranslationRepository) ListWithOptions(ctx context.Context, options *repositories2.QueryOptions) ([]models2.Translation, error) {
result, err := m.List(ctx, 1, 1000)
if err != nil {
return nil, err
@ -122,7 +122,7 @@ func (m *MockTranslationRepository) ListWithOptions(ctx context.Context, options
return result.Items, nil
}
func (m *MockTranslationRepository) CountWithOptions(ctx context.Context, options *repositories.QueryOptions) (int64, error) {
func (m *MockTranslationRepository) CountWithOptions(ctx context.Context, options *repositories2.QueryOptions) (int64, error) {
return m.Count(ctx)
}
@ -140,12 +140,12 @@ func (m *MockTranslationRepository) WithTx(ctx context.Context, fn func(tx *gorm
}
// TranslationRepository specific methods
func (m *MockTranslationRepository) ListByWorkID(ctx context.Context, workID uint) ([]models.Translation, error) {
func (m *MockTranslationRepository) ListByWorkID(ctx context.Context, workID uint) ([]models2.Translation, error) {
return m.ListByEntity(ctx, "Work", workID)
}
func (m *MockTranslationRepository) ListByEntity(ctx context.Context, entityType string, entityID uint) ([]models.Translation, error) {
var out []models.Translation
func (m *MockTranslationRepository) ListByEntity(ctx context.Context, entityType string, entityID uint) ([]models2.Translation, error) {
var out []models2.Translation
for i := range m.items {
tr := m.items[i]
if tr.TranslatableType == entityType && tr.TranslatableID == entityID {
@ -155,8 +155,8 @@ func (m *MockTranslationRepository) ListByEntity(ctx context.Context, entityType
return out, nil
}
func (m *MockTranslationRepository) ListByTranslatorID(ctx context.Context, translatorID uint) ([]models.Translation, error) {
var out []models.Translation
func (m *MockTranslationRepository) ListByTranslatorID(ctx context.Context, translatorID uint) ([]models2.Translation, error) {
var out []models2.Translation
for i := range m.items {
if m.items[i].TranslatorID != nil && *m.items[i].TranslatorID == translatorID {
out = append(out, m.items[i])
@ -165,8 +165,8 @@ func (m *MockTranslationRepository) ListByTranslatorID(ctx context.Context, tran
return out, nil
}
func (m *MockTranslationRepository) ListByStatus(ctx context.Context, status models.TranslationStatus) ([]models.Translation, error) {
var out []models.Translation
func (m *MockTranslationRepository) ListByStatus(ctx context.Context, status models2.TranslationStatus) ([]models2.Translation, error) {
var out []models2.Translation
for i := range m.items {
if m.items[i].Status == status {
out = append(out, m.items[i])
@ -177,12 +177,12 @@ func (m *MockTranslationRepository) ListByStatus(ctx context.Context, status mod
// Test helper: add a translation for a Work
func (m *MockTranslationRepository) AddTranslationForWork(workID uint, language string, content string, isOriginal bool) {
m.Create(context.Background(), &models.Translation{
m.Create(context.Background(), &models2.Translation{
Title: "",
Content: content,
Description: "",
Language: language,
Status: models.TranslationStatusPublished,
Status: models2.TranslationStatusPublished,
TranslatableID: workID,
TranslatableType: "Work",
IsOriginalLanguage: isOriginal,

View File

@ -3,8 +3,8 @@ package testutil
import (
"context"
"gorm.io/gorm"
"tercul/models"
"tercul/repositories"
"tercul/internal/models"
"tercul/internal/repositories"
)
// UnifiedMockWorkRepository is a shared mock for WorkRepository tests

View File

@ -0,0 +1,52 @@
package testutil
import (
"tercul/graph"
"tercul/internal/models"
"tercul/services"
"github.com/stretchr/testify/suite"
)
// SimpleTestSuite provides a minimal test environment with just the essentials
type SimpleTestSuite struct {
suite.Suite
WorkRepo *UnifiedMockWorkRepository
WorkService services.WorkService
}
// SetupSuite sets up the test suite
func (s *SimpleTestSuite) SetupSuite() {
s.WorkRepo = NewUnifiedMockWorkRepository()
s.WorkService = services.NewWorkService(s.WorkRepo, nil)
}
// SetupTest resets test data for each test
func (s *SimpleTestSuite) SetupTest() {
s.WorkRepo.Reset()
}
// GetResolver returns a minimal GraphQL resolver for testing
func (s *SimpleTestSuite) GetResolver() *graph.Resolver {
return &graph.Resolver{
WorkRepo: s.WorkRepo,
WorkService: s.WorkService,
// Other fields will be nil, but that's okay for basic tests
}
}
// CreateTestWork creates a test work with optional content
func (s *SimpleTestSuite) CreateTestWork(title, language string, content string) *models.Work {
work := &models.Work{
Title: title,
}
work.Language = language
// Add work to the mock repository
s.WorkRepo.AddWork(work)
// If content is provided, we'll need to handle it differently
// since the mock repository doesn't support translations yet
// For now, just return the work
return work
}

View File

@ -12,7 +12,7 @@ import (
"gorm.io/driver/postgres"
"gorm.io/gorm"
"gorm.io/gorm/logger"
"tercul/config"
"tercul/internal/platform/config"
)
// TestDB holds the test database connection

View File

@ -1,24 +1,22 @@
package linguistics
import (
"github.com/jonreiter/govader"
"github.com/jonreiter/govader"
)
// GoVADERSentimentProvider implements SentimentProvider using VADER
type GoVADERSentimentProvider struct {
analyzer *govader.SentimentIntensityAnalyzer
analyzer *govader.SentimentIntensityAnalyzer
}
// NewGoVADERSentimentProvider constructs a VADER-based sentiment provider
func NewGoVADERSentimentProvider() (*GoVADERSentimentProvider, error) {
analyzer := govader.NewSentimentIntensityAnalyzer()
return &GoVADERSentimentProvider{analyzer: analyzer}, nil
analyzer := govader.NewSentimentIntensityAnalyzer()
return &GoVADERSentimentProvider{analyzer: analyzer}, nil
}
// Score returns the compound VADER polarity score in [-1, 1]
func (p *GoVADERSentimentProvider) Score(text string, _ string) (float64, error) {
scores := p.analyzer.PolarityScores(text)
return scores.Compound, nil
scores := p.analyzer.PolarityScores(text)
return scores.Compound, nil
}

View File

@ -1,19 +1,17 @@
package linguistics
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/require"
"testing"
)
func TestGoVADERSentimentProvider_Score(t *testing.T) {
sp, err := NewGoVADERSentimentProvider()
require.NoError(t, err)
pos, err := sp.Score("I love this wonderful product!", "en")
require.NoError(t, err)
require.Greater(t, pos, 0.0)
neg, err := sp.Score("This is the worst thing ever. I hate it.", "en")
require.NoError(t, err)
require.Less(t, neg, 0.0)
sp, err := NewGoVADERSentimentProvider()
require.NoError(t, err)
pos, err := sp.Score("I love this wonderful product!", "en")
require.NoError(t, err)
require.Greater(t, pos, 0.0)
neg, err := sp.Score("This is the worst thing ever. I hate it.", "en")
require.NoError(t, err)
require.Less(t, neg, 0.0)
}

View File

@ -1,36 +1,34 @@
package linguistics
import (
"strings"
lingua "github.com/pemistahl/lingua-go"
lingua "github.com/pemistahl/lingua-go"
"strings"
)
// LinguaLanguageDetector implements LanguageDetector using lingua-go
type LinguaLanguageDetector struct {
detector lingua.LanguageDetector
detector lingua.LanguageDetector
}
// NewLinguaLanguageDetector builds a detector for all supported languages
func NewLinguaLanguageDetector() *LinguaLanguageDetector {
det := lingua.NewLanguageDetectorBuilder().FromAllLanguages().Build()
return &LinguaLanguageDetector{detector: det}
det := lingua.NewLanguageDetectorBuilder().FromAllLanguages().Build()
return &LinguaLanguageDetector{detector: det}
}
// DetectLanguage returns a lowercase ISO 639-1 code if possible
func (l *LinguaLanguageDetector) DetectLanguage(text string) (string, bool) {
lang, ok := l.detector.DetectLanguageOf(text)
if !ok {
return "", false
}
// Prefer ISO 639-1 when available else fallback to ISO 639-3
if s := lang.IsoCode639_1().String(); s != "" {
return s, true
}
if s := lang.IsoCode639_3().String(); s != "" {
return s, true
}
// fallback to language name
return strings.ToLower(lang.String()), true
lang, ok := l.detector.DetectLanguageOf(text)
if !ok {
return "", false
}
// Prefer ISO 639-1 when available else fallback to ISO 639-3
if s := lang.IsoCode639_1().String(); s != "" {
return s, true
}
if s := lang.IsoCode639_3().String(); s != "" {
return s, true
}
// fallback to language name
return strings.ToLower(lang.String()), true
}

View File

@ -1,15 +1,13 @@
package linguistics
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/require"
"testing"
)
func TestLinguaLanguageDetector_DetectLanguage(t *testing.T) {
d := NewLinguaLanguageDetector()
code, ok := d.DetectLanguage("This is an English sentence.")
require.True(t, ok)
require.NotEmpty(t, code)
d := NewLinguaLanguageDetector()
code, ok := d.DetectLanguage("This is an English sentence.")
require.True(t, ok)
require.NotEmpty(t, code)
}

View File

@ -1,8 +1,8 @@
package linguistics
import (
"sort"
"strings"
"sort"
"strings"
)
// TFIDFKeywordProvider is a lightweight keyword provider using local term frequencies as a proxy.
@ -13,33 +13,31 @@ type TFIDFKeywordProvider struct{}
func NewTFIDFKeywordProvider() *TFIDFKeywordProvider { return &TFIDFKeywordProvider{} }
func (p *TFIDFKeywordProvider) Extract(text string, language string) ([]Keyword, error) {
tokens := tokenizeWords(text)
if len(tokens) == 0 {
return nil, nil
}
freq := make(map[string]int, len(tokens))
for _, t := range tokens {
if len(t) <= 2 || isStopWord(t, language) {
continue
}
freq[strings.ToLower(t)]++
}
total := 0
for _, c := range freq {
total += c
}
keywords := make([]Keyword, 0, len(freq))
for w, c := range freq {
rel := float64(c) / float64(len(tokens))
if rel > 0 {
keywords = append(keywords, Keyword{Text: w, Relevance: rel})
}
}
sort.Slice(keywords, func(i, j int) bool { return keywords[i].Relevance > keywords[j].Relevance })
if len(keywords) > 10 {
keywords = keywords[:10]
}
return keywords, nil
tokens := tokenizeWords(text)
if len(tokens) == 0 {
return nil, nil
}
freq := make(map[string]int, len(tokens))
for _, t := range tokens {
if len(t) <= 2 || isStopWord(t, language) {
continue
}
freq[strings.ToLower(t)]++
}
total := 0
for _, c := range freq {
total += c
}
keywords := make([]Keyword, 0, len(freq))
for w, c := range freq {
rel := float64(c) / float64(len(tokens))
if rel > 0 {
keywords = append(keywords, Keyword{Text: w, Relevance: rel})
}
}
sort.Slice(keywords, func(i, j int) bool { return keywords[i].Relevance > keywords[j].Relevance })
if len(keywords) > 10 {
keywords = keywords[:10]
}
return keywords, nil
}

View File

@ -1,8 +1,8 @@
package linguistics
import (
"errors"
"strings"
"errors"
"strings"
)
// --- LanguageDetector Adapters ---
@ -11,7 +11,7 @@ import (
type NullLanguageDetector struct{}
func (n NullLanguageDetector) DetectLanguage(text string) (string, bool) {
return "", false
return "", false
}
// --- SentimentProvider Adapters ---
@ -20,10 +20,8 @@ func (n NullLanguageDetector) DetectLanguage(text string) (string, bool) {
type RuleBasedSentimentProvider struct{}
func (r RuleBasedSentimentProvider) Score(text string, language string) (float64, error) {
if strings.TrimSpace(text) == "" {
return 0, errors.New("empty text")
}
return estimateSentimentOptimized(text, language), nil
if strings.TrimSpace(text) == "" {
return 0, errors.New("empty text")
}
return estimateSentimentOptimized(text, language), nil
}

View File

@ -3,12 +3,12 @@ package linguistics
import (
"context"
"fmt"
"sync"
"github.com/hashicorp/golang-lru/v2"
"tercul/cache"
"tercul/logger"
"tercul/config"
"time"
"sync"
"tercul/internal/platform/cache"
"tercul/internal/platform/config"
"tercul/internal/platform/log"
"time"
)
// AnalysisCache defines the interface for caching analysis results
@ -32,12 +32,12 @@ type MemoryAnalysisCache struct {
// NewMemoryAnalysisCache creates a new MemoryAnalysisCache
func NewMemoryAnalysisCache(enabled bool) *MemoryAnalysisCache {
// capacity from config
cap := config.Cfg.NLPMemoryCacheCap
if cap <= 0 {
cap = 1024
}
l, _ := lru.New[string, *AnalysisResult](cap)
// capacity from config
cap := config.Cfg.NLPMemoryCacheCap
if cap <= 0 {
cap = 1024
}
l, _ := lru.New[string, *AnalysisResult](cap)
return &MemoryAnalysisCache{
cache: l,
enabled: enabled,
@ -53,9 +53,9 @@ func (c *MemoryAnalysisCache) Get(ctx context.Context, key string) (*AnalysisRes
c.mutex.RLock()
defer c.mutex.RUnlock()
if result, exists := c.cache.Get(key); exists {
return result, nil
}
if result, exists := c.cache.Get(key); exists {
return result, nil
}
return nil, fmt.Errorf("cache miss")
}
@ -113,13 +113,13 @@ func (c *RedisAnalysisCache) Set(ctx context.Context, key string, result *Analys
return nil
}
// TTL from config
ttlSeconds := config.Cfg.NLPRedisCacheTTLSeconds
err := c.cache.Set(ctx, key, result, time.Duration(ttlSeconds)*time.Second)
// TTL from config
ttlSeconds := config.Cfg.NLPRedisCacheTTLSeconds
err := c.cache.Set(ctx, key, result, time.Duration(ttlSeconds)*time.Second)
if err != nil {
logger.LogWarn("Failed to cache analysis result",
logger.F("key", key),
logger.F("error", err))
log.LogWarn("Failed to cache analysis result",
log.F("key", key),
log.F("error", err))
return err
}
@ -176,16 +176,16 @@ func (c *CompositeAnalysisCache) Set(ctx context.Context, key string, result *An
// Set in memory cache
if err := c.memoryCache.Set(ctx, key, result); err != nil {
logger.LogWarn("Failed to set memory cache",
logger.F("key", key),
logger.F("error", err))
log.LogWarn("Failed to set memory cache",
log.F("key", key),
log.F("error", err))
}
// Set in Redis cache
if err := c.redisCache.Set(ctx, key, result); err != nil {
logger.LogWarn("Failed to set Redis cache",
logger.F("key", key),
logger.F("error", err))
log.LogWarn("Failed to set Redis cache",
log.F("key", key),
log.F("error", err))
return err
}

View File

@ -3,9 +3,10 @@ package linguistics
import (
"context"
"fmt"
models2 "tercul/internal/models"
"gorm.io/gorm"
"tercul/logger"
"tercul/models"
"tercul/internal/platform/log"
)
// AnalysisRepository defines the interface for database operations related to analysis
@ -17,14 +18,14 @@ type AnalysisRepository interface {
GetWorkContent(ctx context.Context, workID uint, language string) (string, error)
// StoreWorkAnalysis stores work-specific analysis results
StoreWorkAnalysis(ctx context.Context, workID uint, textMetadata *models.TextMetadata,
readabilityScore *models.ReadabilityScore, languageAnalysis *models.LanguageAnalysis) error
StoreWorkAnalysis(ctx context.Context, workID uint, textMetadata *models2.TextMetadata,
readabilityScore *models2.ReadabilityScore, languageAnalysis *models2.LanguageAnalysis) error
// GetWorkByID fetches a work by ID
GetWorkByID(ctx context.Context, workID uint) (*models.Work, error)
// GetWorkByID fetches a work by ID
GetWorkByID(ctx context.Context, workID uint) (*models2.Work, error)
// GetAnalysisData fetches persisted analysis data for a work
GetAnalysisData(ctx context.Context, workID uint) (*models.TextMetadata, *models.ReadabilityScore, *models.LanguageAnalysis, error)
// GetAnalysisData fetches persisted analysis data for a work
GetAnalysisData(ctx context.Context, workID uint) (*models2.TextMetadata, *models2.ReadabilityScore, *models2.LanguageAnalysis, error)
}
// GORMAnalysisRepository implements AnalysisRepository using GORM
@ -43,19 +44,19 @@ func (r *GORMAnalysisRepository) StoreAnalysisResults(ctx context.Context, workI
return fmt.Errorf("analysis result cannot be nil")
}
// Determine language from the work record to avoid hardcoded defaults
var work models.Work
if err := r.db.WithContext(ctx).First(&work, workID).Error; err != nil {
logger.LogError("Failed to fetch work for language",
logger.F("workID", workID),
logger.F("error", err))
return fmt.Errorf("failed to fetch work for language: %w", err)
}
// Determine language from the work record to avoid hardcoded defaults
var work models2.Work
if err := r.db.WithContext(ctx).First(&work, workID).Error; err != nil {
log.LogError("Failed to fetch work for language",
log.F("workID", workID),
log.F("error", err))
return fmt.Errorf("failed to fetch work for language: %w", err)
}
// Create text metadata
textMetadata := &models.TextMetadata{
textMetadata := &models2.TextMetadata{
WorkID: workID,
Language: work.Language,
Language: work.Language,
WordCount: result.WordCount,
SentenceCount: result.SentenceCount,
ParagraphCount: result.ParagraphCount,
@ -64,18 +65,18 @@ func (r *GORMAnalysisRepository) StoreAnalysisResults(ctx context.Context, workI
}
// Create readability score
readabilityScore := &models.ReadabilityScore{
WorkID: workID,
Language: work.Language,
Score: result.ReadabilityScore,
Method: result.ReadabilityMethod,
readabilityScore := &models2.ReadabilityScore{
WorkID: workID,
Language: work.Language,
Score: result.ReadabilityScore,
Method: result.ReadabilityMethod,
}
// Create language analysis
languageAnalysis := &models.LanguageAnalysis{
languageAnalysis := &models2.LanguageAnalysis{
WorkID: workID,
Language: work.Language,
Analysis: models.JSONB{
Language: work.Language,
Analysis: models2.JSONB{
"sentiment": result.Sentiment,
"keywords": extractKeywordsAsJSON(result.Keywords),
"topics": extractTopicsAsJSON(result.Topics),
@ -88,11 +89,11 @@ func (r *GORMAnalysisRepository) StoreAnalysisResults(ctx context.Context, workI
// GetWorkContent retrieves content for a work from translations
func (r *GORMAnalysisRepository) GetWorkContent(ctx context.Context, workID uint, language string) (string, error) {
// First, get the work to determine its language
var work models.Work
var work models2.Work
if err := r.db.First(&work, workID).Error; err != nil {
logger.LogError("Failed to fetch work for content retrieval",
logger.F("workID", workID),
logger.F("error", err))
log.LogError("Failed to fetch work for content retrieval",
log.F("workID", workID),
log.F("error", err))
return "", fmt.Errorf("failed to fetch work: %w", err)
}
@ -101,7 +102,7 @@ func (r *GORMAnalysisRepository) GetWorkContent(ctx context.Context, workID uint
// 2. Work's language translation
// 3. Any available translation
var translation models.Translation
var translation models2.Translation
// Try original language first
if err := r.db.Where("translatable_type = ? AND translatable_id = ? AND is_original_language = ?",
@ -125,107 +126,107 @@ func (r *GORMAnalysisRepository) GetWorkContent(ctx context.Context, workID uint
}
// GetWorkByID fetches a work by ID
func (r *GORMAnalysisRepository) GetWorkByID(ctx context.Context, workID uint) (*models.Work, error) {
var work models.Work
if err := r.db.WithContext(ctx).First(&work, workID).Error; err != nil {
return nil, fmt.Errorf("failed to fetch work: %w", err)
}
return &work, nil
func (r *GORMAnalysisRepository) GetWorkByID(ctx context.Context, workID uint) (*models2.Work, error) {
var work models2.Work
if err := r.db.WithContext(ctx).First(&work, workID).Error; err != nil {
return nil, fmt.Errorf("failed to fetch work: %w", err)
}
return &work, nil
}
// GetAnalysisData fetches persisted analysis data for a work
func (r *GORMAnalysisRepository) GetAnalysisData(ctx context.Context, workID uint) (*models.TextMetadata, *models.ReadabilityScore, *models.LanguageAnalysis, error) {
var textMetadata models.TextMetadata
var readabilityScore models.ReadabilityScore
var languageAnalysis models.LanguageAnalysis
func (r *GORMAnalysisRepository) GetAnalysisData(ctx context.Context, workID uint) (*models2.TextMetadata, *models2.ReadabilityScore, *models2.LanguageAnalysis, error) {
var textMetadata models2.TextMetadata
var readabilityScore models2.ReadabilityScore
var languageAnalysis models2.LanguageAnalysis
if err := r.db.WithContext(ctx).Where("work_id = ?", workID).First(&textMetadata).Error; err != nil {
logger.LogWarn("No text metadata found for work",
logger.F("workID", workID))
}
if err := r.db.WithContext(ctx).Where("work_id = ?", workID).First(&textMetadata).Error; err != nil {
log.LogWarn("No text metadata found for work",
log.F("workID", workID))
}
if err := r.db.WithContext(ctx).Where("work_id = ?", workID).First(&readabilityScore).Error; err != nil {
logger.LogWarn("No readability score found for work",
logger.F("workID", workID))
}
if err := r.db.WithContext(ctx).Where("work_id = ?", workID).First(&readabilityScore).Error; err != nil {
log.LogWarn("No readability score found for work",
log.F("workID", workID))
}
if err := r.db.WithContext(ctx).Where("work_id = ?", workID).First(&languageAnalysis).Error; err != nil {
logger.LogWarn("No language analysis found for work",
logger.F("workID", workID))
}
if err := r.db.WithContext(ctx).Where("work_id = ?", workID).First(&languageAnalysis).Error; err != nil {
log.LogWarn("No language analysis found for work",
log.F("workID", workID))
}
return &textMetadata, &readabilityScore, &languageAnalysis, nil
return &textMetadata, &readabilityScore, &languageAnalysis, nil
}
// StoreWorkAnalysis stores work-specific analysis results
func (r *GORMAnalysisRepository) StoreWorkAnalysis(ctx context.Context, workID uint,
textMetadata *models.TextMetadata, readabilityScore *models.ReadabilityScore,
languageAnalysis *models.LanguageAnalysis) error {
textMetadata *models2.TextMetadata, readabilityScore *models2.ReadabilityScore,
languageAnalysis *models2.LanguageAnalysis) error {
// Use a transaction to ensure all data is stored atomically
return r.db.WithContext(ctx).Transaction(func(tx *gorm.DB) error {
// Store text metadata
if textMetadata != nil {
if err := tx.Where("work_id = ?", workID).Delete(&models.TextMetadata{}).Error; err != nil {
logger.LogError("Failed to delete existing text metadata",
logger.F("workID", workID),
logger.F("error", err))
if err := tx.Where("work_id = ?", workID).Delete(&models2.TextMetadata{}).Error; err != nil {
log.LogError("Failed to delete existing text metadata",
log.F("workID", workID),
log.F("error", err))
return fmt.Errorf("failed to delete existing text metadata: %w", err)
}
if err := tx.Create(textMetadata).Error; err != nil {
logger.LogError("Failed to store text metadata",
logger.F("workID", workID),
logger.F("error", err))
log.LogError("Failed to store text metadata",
log.F("workID", workID),
log.F("error", err))
return fmt.Errorf("failed to store text metadata: %w", err)
}
}
// Store readability score
if readabilityScore != nil {
if err := tx.Where("work_id = ?", workID).Delete(&models.ReadabilityScore{}).Error; err != nil {
logger.LogError("Failed to delete existing readability score",
logger.F("workID", workID),
logger.F("error", err))
if err := tx.Where("work_id = ?", workID).Delete(&models2.ReadabilityScore{}).Error; err != nil {
log.LogError("Failed to delete existing readability score",
log.F("workID", workID),
log.F("error", err))
return fmt.Errorf("failed to delete existing readability score: %w", err)
}
if err := tx.Create(readabilityScore).Error; err != nil {
logger.LogError("Failed to store readability score",
logger.F("workID", workID),
logger.F("error", err))
log.LogError("Failed to store readability score",
log.F("workID", workID),
log.F("error", err))
return fmt.Errorf("failed to store readability score: %w", err)
}
}
// Store language analysis
if languageAnalysis != nil {
if err := tx.Where("work_id = ?", workID).Delete(&models.LanguageAnalysis{}).Error; err != nil {
logger.LogError("Failed to delete existing language analysis",
logger.F("workID", workID),
logger.F("error", err))
if err := tx.Where("work_id = ?", workID).Delete(&models2.LanguageAnalysis{}).Error; err != nil {
log.LogError("Failed to delete existing language analysis",
log.F("workID", workID),
log.F("error", err))
return fmt.Errorf("failed to delete existing language analysis: %w", err)
}
if err := tx.Create(languageAnalysis).Error; err != nil {
logger.LogError("Failed to store language analysis",
logger.F("workID", workID),
logger.F("error", err))
log.LogError("Failed to store language analysis",
log.F("workID", workID),
log.F("error", err))
return fmt.Errorf("failed to store language analysis: %w", err)
}
}
logger.LogInfo("Successfully stored analysis results",
logger.F("workID", workID))
log.LogInfo("Successfully stored analysis results",
log.F("workID", workID))
return nil
})
}
// Helper functions for data conversion
func extractKeywordsAsJSON(keywords []Keyword) models.JSONB {
func extractKeywordsAsJSON(keywords []Keyword) models2.JSONB {
if len(keywords) == 0 {
return models.JSONB{}
return models2.JSONB{}
}
keywordData := make([]map[string]interface{}, len(keywords))
@ -236,12 +237,12 @@ func extractKeywordsAsJSON(keywords []Keyword) models.JSONB {
}
}
return models.JSONB{"keywords": keywordData}
return models2.JSONB{"keywords": keywordData}
}
func extractTopicsAsJSON(topics []Topic) models.JSONB {
func extractTopicsAsJSON(topics []Topic) models2.JSONB {
if len(topics) == 0 {
return models.JSONB{}
return models2.JSONB{}
}
topicData := make([]map[string]interface{}, len(topics))
@ -252,5 +253,5 @@ func extractTopicsAsJSON(topics []Topic) models.JSONB {
}
}
return models.JSONB{"topics": topicData}
return models2.JSONB{"topics": topicData}
}

View File

@ -1,12 +1,13 @@
package linguistics
import (
"context"
"crypto/sha256"
"encoding/hex"
"sync"
"tercul/cache"
"tercul/logger"
"context"
"crypto/sha256"
"encoding/hex"
"sync"
"tercul/internal/platform/cache"
"tercul/internal/platform/log"
)
// Analyzer defines the interface for linguistic analysis services
@ -22,34 +23,34 @@ type Analyzer interface {
// It delegates pure text analysis to TextAnalyzer and work analysis to WorkAnalysisService,
// and only handles caching and orchestration concerns here to preserve SRP/DRY.
type BasicAnalyzer struct {
textAnalyzer TextAnalyzer
workAnalysisService WorkAnalysisService
cache cache.Cache
resultCache map[string]*AnalysisResult
cacheMutex sync.RWMutex
concurrency int
cacheEnabled bool
textAnalyzer TextAnalyzer
workAnalysisService WorkAnalysisService
cache cache.Cache
resultCache map[string]*AnalysisResult
cacheMutex sync.RWMutex
concurrency int
cacheEnabled bool
}
// NewBasicAnalyzer creates a new BasicAnalyzer
func NewBasicAnalyzer(
textAnalyzer TextAnalyzer,
workService WorkAnalysisService,
redis cache.Cache,
concurrency int,
cacheEnabled bool,
textAnalyzer TextAnalyzer,
workService WorkAnalysisService,
redis cache.Cache,
concurrency int,
cacheEnabled bool,
) *BasicAnalyzer {
if concurrency <= 0 {
concurrency = 4
}
return &BasicAnalyzer{
textAnalyzer: textAnalyzer,
workAnalysisService: workService,
cache: redis,
resultCache: make(map[string]*AnalysisResult),
concurrency: concurrency,
cacheEnabled: cacheEnabled,
}
if concurrency <= 0 {
concurrency = 4
}
return &BasicAnalyzer{
textAnalyzer: textAnalyzer,
workAnalysisService: workService,
cache: redis,
resultCache: make(map[string]*AnalysisResult),
concurrency: concurrency,
cacheEnabled: cacheEnabled,
}
}
// WithCache adds a cache to the analyzer
@ -78,86 +79,86 @@ func (a *BasicAnalyzer) DisableCache() {
// AnalyzeText performs basic linguistic analysis on the given text
func (a *BasicAnalyzer) AnalyzeText(ctx context.Context, text string, language string) (*AnalysisResult, error) {
// Check in-memory cache first if enabled
if a.cacheEnabled {
cacheKey := makeTextCacheKey(language, text)
// Check in-memory cache first if enabled
if a.cacheEnabled {
cacheKey := makeTextCacheKey(language, text)
// Try to get from in-memory cache
a.cacheMutex.RLock()
cachedResult, found := a.resultCache[cacheKey]
a.cacheMutex.RUnlock()
// Try to get from in-memory cache
a.cacheMutex.RLock()
cachedResult, found := a.resultCache[cacheKey]
a.cacheMutex.RUnlock()
if found {
logger.LogDebug("In-memory cache hit for text analysis",
logger.F("language", language),
logger.F("textLength", len(text)))
return cachedResult, nil
}
if found {
log.LogDebug("In-memory cache hit for text analysis",
log.F("language", language),
log.F("textLength", len(text)))
return cachedResult, nil
}
// Try to get from Redis cache if available
if a.cache != nil {
var cachedResult AnalysisResult
err := a.cache.Get(ctx, "text_analysis:"+cacheKey, &cachedResult)
if err == nil {
logger.LogDebug("Redis cache hit for text analysis",
logger.F("language", language),
logger.F("textLength", len(text)))
// Try to get from Redis cache if available
if a.cache != nil {
var cachedResult AnalysisResult
err := a.cache.Get(ctx, "text_analysis:"+cacheKey, &cachedResult)
if err == nil {
log.LogDebug("Redis cache hit for text analysis",
log.F("language", language),
log.F("textLength", len(text)))
// Store in in-memory cache too
a.cacheMutex.Lock()
a.resultCache[cacheKey] = &cachedResult
a.cacheMutex.Unlock()
// Store in in-memory cache too
a.cacheMutex.Lock()
a.resultCache[cacheKey] = &cachedResult
a.cacheMutex.Unlock()
return &cachedResult, nil
}
}
}
return &cachedResult, nil
}
}
}
// Cache miss or caching disabled, perform analysis using the pure TextAnalyzer
logger.LogDebug("Performing text analysis",
logger.F("language", language),
logger.F("textLength", len(text)))
// Cache miss or caching disabled, perform analysis using the pure TextAnalyzer
log.LogDebug("Performing text analysis",
log.F("language", language),
log.F("textLength", len(text)))
var (
result *AnalysisResult
err error
)
if len(text) > 10000 && a.concurrency > 1 {
result, err = a.textAnalyzer.AnalyzeTextConcurrently(ctx, text, language, a.concurrency)
} else {
result, err = a.textAnalyzer.AnalyzeText(ctx, text, language)
}
if err != nil {
return nil, err
}
var (
result *AnalysisResult
err error
)
if len(text) > 10000 && a.concurrency > 1 {
result, err = a.textAnalyzer.AnalyzeTextConcurrently(ctx, text, language, a.concurrency)
} else {
result, err = a.textAnalyzer.AnalyzeText(ctx, text, language)
}
if err != nil {
return nil, err
}
// Cache the result if caching is enabled
if a.cacheEnabled {
cacheKey := makeTextCacheKey(language, text)
// Cache the result if caching is enabled
if a.cacheEnabled {
cacheKey := makeTextCacheKey(language, text)
// Store in in-memory cache
a.cacheMutex.Lock()
a.resultCache[cacheKey] = result
a.cacheMutex.Unlock()
// Store in in-memory cache
a.cacheMutex.Lock()
a.resultCache[cacheKey] = result
a.cacheMutex.Unlock()
// Store in Redis cache if available
if a.cache != nil {
if err := a.cache.Set(ctx, "text_analysis:"+cacheKey, result, 0); err != nil {
logger.LogWarn("Failed to cache text analysis result",
logger.F("language", language),
logger.F("textLength", len(text)),
logger.F("error", err))
}
}
}
// Store in Redis cache if available
if a.cache != nil {
if err := a.cache.Set(ctx, "text_analysis:"+cacheKey, result, 0); err != nil {
log.LogWarn("Failed to cache text analysis result",
log.F("language", language),
log.F("textLength", len(text)),
log.F("error", err))
}
}
}
return result, nil
return result, nil
}
// AnalyzeWork performs linguistic analysis on a work and stores the results
func (a *BasicAnalyzer) AnalyzeWork(ctx context.Context, workID uint) error {
// Delegate to the WorkAnalysisService to preserve single ownership
return a.workAnalysisService.AnalyzeWork(ctx, workID)
// Delegate to the WorkAnalysisService to preserve single ownership
return a.workAnalysisService.AnalyzeWork(ctx, workID)
}
// Helper functions for text analysis
@ -174,6 +175,6 @@ func min(a, b int) int {
// makeTextCacheKey builds a stable cache key using a content hash to avoid collisions/leaks
func makeTextCacheKey(language, text string) string {
h := sha256.Sum256([]byte(text))
return language + ":" + hex.EncodeToString(h[:])
h := sha256.Sum256([]byte(text))
return language + ":" + hex.EncodeToString(h[:])
}

View File

@ -1,9 +1,10 @@
package linguistics
import (
"gorm.io/gorm"
"tercul/cache"
"tercul/config"
"tercul/internal/platform/cache"
"tercul/internal/platform/config"
"gorm.io/gorm"
)
// LinguisticsFactory provides easy access to all linguistics components
@ -22,29 +23,29 @@ func NewLinguisticsFactory(
concurrency int,
cacheEnabled bool,
) *LinguisticsFactory {
// Create text analyzer and wire providers (prefer external libs when available)
textAnalyzer := NewBasicTextAnalyzer()
// Create text analyzer and wire providers (prefer external libs when available)
textAnalyzer := NewBasicTextAnalyzer()
// Wire sentiment provider: GoVADER (configurable)
if config.Cfg.NLPUseVADER {
if sp, err := NewGoVADERSentimentProvider(); err == nil {
textAnalyzer = textAnalyzer.WithSentimentProvider(sp)
} else {
textAnalyzer = textAnalyzer.WithSentimentProvider(RuleBasedSentimentProvider{})
}
} else {
textAnalyzer = textAnalyzer.WithSentimentProvider(RuleBasedSentimentProvider{})
}
// Wire sentiment provider: GoVADER (configurable)
if config.Cfg.NLPUseVADER {
if sp, err := NewGoVADERSentimentProvider(); err == nil {
textAnalyzer = textAnalyzer.WithSentimentProvider(sp)
} else {
textAnalyzer = textAnalyzer.WithSentimentProvider(RuleBasedSentimentProvider{})
}
} else {
textAnalyzer = textAnalyzer.WithSentimentProvider(RuleBasedSentimentProvider{})
}
// Wire language detector: lingua-go (configurable)
if config.Cfg.NLPUseLingua {
textAnalyzer = textAnalyzer.WithLanguageDetector(NewLinguaLanguageDetector())
}
// Wire language detector: lingua-go (configurable)
if config.Cfg.NLPUseLingua {
textAnalyzer = textAnalyzer.WithLanguageDetector(NewLinguaLanguageDetector())
}
// Wire keyword provider: lightweight TF-IDF approximation (configurable)
if config.Cfg.NLPUseTFIDF {
textAnalyzer = textAnalyzer.WithKeywordProvider(NewTFIDFKeywordProvider())
}
// Wire keyword provider: lightweight TF-IDF approximation (configurable)
if config.Cfg.NLPUseTFIDF {
textAnalyzer = textAnalyzer.WithKeywordProvider(NewTFIDFKeywordProvider())
}
// Create cache components
memoryCache := NewMemoryAnalysisCache(cacheEnabled)
@ -64,13 +65,13 @@ func NewLinguisticsFactory(
)
// Create analyzer that combines text analysis and work analysis
analyzer := NewBasicAnalyzer(
textAnalyzer,
workAnalysisService,
cache,
concurrency,
cacheEnabled,
)
analyzer := NewBasicAnalyzer(
textAnalyzer,
workAnalysisService,
cache,
concurrency,
cacheEnabled,
)
return &LinguisticsFactory{
textAnalyzer: textAnalyzer,

View File

@ -1,15 +1,13 @@
package linguistics
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/require"
"testing"
)
func TestFactory_WiresProviders(t *testing.T) {
// We won't spin a DB/cache here; this is a smoke test of wiring methods
f := NewLinguisticsFactory(nil, nil, 2, true)
ta := f.GetTextAnalyzer().(*BasicTextAnalyzer)
require.NotNil(t, ta)
// We won't spin a DB/cache here; this is a smoke test of wiring methods
f := NewLinguisticsFactory(nil, nil, 2, true)
ta := f.GetTextAnalyzer().(*BasicTextAnalyzer)
require.NotNil(t, ta)
}

View File

@ -2,20 +2,18 @@ package linguistics
// LanguageDetector defines a provider that can detect the language of a text
type LanguageDetector interface {
// DetectLanguage returns a BCP-47 or ISO-like code and whether detection was confident
DetectLanguage(text string) (string, bool)
// DetectLanguage returns a BCP-47 or ISO-like code and whether detection was confident
DetectLanguage(text string) (string, bool)
}
// SentimentProvider defines a provider that scores sentiment in [-1, 1]
type SentimentProvider interface {
// Score returns sentiment for the text (optionally using language)
Score(text string, language string) (float64, error)
// Score returns sentiment for the text (optionally using language)
Score(text string, language string) (float64, error)
}
// KeywordProvider defines a provider that extracts keywords from text
type KeywordProvider interface {
// Extract returns a list of keywords with relevance in [0,1]
Extract(text string, language string) ([]Keyword, error)
// Extract returns a list of keywords with relevance in [0,1]
Extract(text string, language string) ([]Keyword, error)
}

View File

@ -5,11 +5,11 @@ import (
"encoding/json"
"fmt"
"log"
models2 "tercul/internal/models"
"time"
"github.com/hibiken/asynq"
"gorm.io/gorm"
"tercul/models"
)
const (
@ -60,7 +60,7 @@ func (j *LinguisticSyncJob) EnqueueAnalysisForAllWorks() error {
log.Println("Enqueueing linguistic analysis jobs for all works...")
var workIDs []uint
if err := j.DB.Model(&models.Work{}).Pluck("id", &workIDs).Error; err != nil {
if err := j.DB.Model(&models2.Work{}).Pluck("id", &workIDs).Error; err != nil {
return fmt.Errorf("error fetching work IDs: %w", err)
}
@ -87,7 +87,7 @@ func (j *LinguisticSyncJob) HandleLinguisticAnalysis(ctx context.Context, t *asy
// Check if analysis already exists
var count int64
if err := j.DB.Model(&models.LanguageAnalysis{}).Where("work_id = ?", payload.WorkID).Count(&count).Error; err != nil {
if err := j.DB.Model(&models2.LanguageAnalysis{}).Where("work_id = ?", payload.WorkID).Count(&count).Error; err != nil {
return fmt.Errorf("error checking existing analysis: %w", err)
}

Some files were not shown because too many files have changed in this diff Show More