Merge pull request #7 from SamyRai/feature/refactor-analytics-service

Refactor: Expose Analytics Service via GraphQL
This commit is contained in:
Damir Mukimov 2025-10-03 11:22:02 +02:00 committed by GitHub
commit c26d86ae80
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
25 changed files with 1052 additions and 829 deletions

View File

@ -6,7 +6,7 @@
- [x] **Complete the Architecture Refactor (High, 5d):** Finalize the transition to a clean, domain-driven architecture. This will significantly improve maintainability, scalability, and developer velocity. - [x] **Complete the Architecture Refactor (High, 5d):** Finalize the transition to a clean, domain-driven architecture. This will significantly improve maintainability, scalability, and developer velocity.
- [x] Ensure resolvers call application services only and add dataloaders per aggregate. - [x] Ensure resolvers call application services only and add dataloaders per aggregate.
- [ ] Adopt a migrations tool and move all SQL to migration files. - [x] Adopt a migrations tool and move all SQL to migration files.
- [ ] Implement full observability with centralized logging, metrics, and tracing. - [ ] Implement full observability with centralized logging, metrics, and tracing.
- [x] **Full Test Coverage (High, 5d):** Increase test coverage across the application to ensure stability and prevent regressions. - [x] **Full Test Coverage (High, 5d):** Increase test coverage across the application to ensure stability and prevent regressions.
- [x] Write unit tests for all models, repositories, and services. - [x] Write unit tests for all models, repositories, and services.
@ -33,8 +33,8 @@
- [x] `monetization` domain - [x] `monetization` domain
- [x] `search` domain - [x] `search` domain
- [x] `work` domain - [x] `work` domain
- [ ] Resolvers call application services only; add dataloaders per aggregate (High, 3d) - [x] Resolvers call application services only; add dataloaders per aggregate (High, 3d)
- [ ] Adopt migrations tool (goose/atlas/migrate); move SQL to `internal/data/migrations`; delete `migrations.go` (High, 2d) - [x] Adopt migrations tool (goose/atlas/migrate); move SQL to `internal/data/migrations`; delete `migrations.go` (High, 2d)
- [ ] Observability: centralize logging; add Prometheus metrics and OpenTelemetry tracing; request IDs (High, 3d) - [ ] Observability: centralize logging; add Prometheus metrics and OpenTelemetry tracing; request IDs (High, 3d)
- [ ] CI: add `make lint test test-integration` and integration tests with Docker compose (High, 2d) - [ ] CI: add `make lint test test-integration` and integration tests with Docker compose (High, 2d)

View File

@ -5,11 +5,13 @@ import (
"net/http" "net/http"
"os" "os"
"os/signal" "os/signal"
"path/filepath"
"runtime"
"syscall" "syscall"
"tercul/internal/app" "tercul/internal/app"
"tercul/internal/app/analytics" "tercul/internal/app/analytics"
graph "tercul/internal/adapters/graphql" graph "tercul/internal/adapters/graphql"
"tercul/internal/data/sql" dbsql "tercul/internal/data/sql"
"tercul/internal/jobs/linguistics" "tercul/internal/jobs/linguistics"
"tercul/internal/platform/auth" "tercul/internal/platform/auth"
"tercul/internal/platform/config" "tercul/internal/platform/config"
@ -19,9 +21,34 @@ import (
"time" "time"
"github.com/99designs/gqlgen/graphql/playground" "github.com/99designs/gqlgen/graphql/playground"
"github.com/pressly/goose/v3"
"github.com/weaviate/weaviate-go-client/v5/weaviate" "github.com/weaviate/weaviate-go-client/v5/weaviate"
"gorm.io/gorm"
) )
// runMigrations applies database migrations using goose.
func runMigrations(gormDB *gorm.DB) error {
sqlDB, err := gormDB.DB()
if err != nil {
return err
}
if err := goose.SetDialect("postgres"); err != nil {
return err
}
// This is brittle. A better approach might be to use an env var or config.
_, b, _, _ := runtime.Caller(0)
migrationsDir := filepath.Join(filepath.Dir(b), "../../internal/data/migrations")
log.LogInfo("Applying database migrations", log.F("directory", migrationsDir))
if err := goose.Up(sqlDB, migrationsDir); err != nil {
return err
}
log.LogInfo("Database migrations applied successfully")
return nil
}
// main is the entry point for the Tercul application. // main is the entry point for the Tercul application.
func main() { func main() {
// Load configuration from environment variables // Load configuration from environment variables
@ -40,6 +67,10 @@ func main() {
} }
defer db.Close() defer db.Close()
if err := runMigrations(database); err != nil {
log.LogFatal("Failed to apply database migrations", log.F("error", err))
}
// Initialize Weaviate client // Initialize Weaviate client
weaviateCfg := weaviate.Config{ weaviateCfg := weaviate.Config{
Host: config.Cfg.WeaviateHost, Host: config.Cfg.WeaviateHost,
@ -54,7 +85,7 @@ func main() {
searchClient := search.NewWeaviateWrapper(weaviateClient) searchClient := search.NewWeaviateWrapper(weaviateClient)
// Create repositories // Create repositories
repos := sql.NewRepositories(database) repos := dbsql.NewRepositories(database)
// Create linguistics dependencies // Create linguistics dependencies
analysisRepo := linguistics.NewGORMAnalysisRepository(database) analysisRepo := linguistics.NewGORMAnalysisRepository(database)

View File

@ -1,143 +0,0 @@
//go:build tools
package main
import (
"fmt"
"go/ast"
"go/parser"
"go/token"
"io/ioutil"
"os"
"path/filepath"
"strings"
)
func main() {
sqlDir := "internal/data/sql"
domainDir := "internal/domain"
files, err := ioutil.ReadDir(sqlDir)
if err != nil {
fmt.Println("Error reading sql directory:", err)
return
}
for _, file := range files {
if strings.HasSuffix(file.Name(), "_repository.go") {
repoName := strings.TrimSuffix(file.Name(), "_repository.go")
repoInterfaceName := strings.Title(repoName) + "Repository"
domainPackageName := repoName
// Create domain directory
domainRepoDir := filepath.Join(domainDir, domainPackageName)
if err := os.MkdirAll(domainRepoDir, 0755); err != nil {
fmt.Printf("Error creating directory %s: %v\n", domainRepoDir, err)
continue
}
// Read the sql repository file
filePath := filepath.Join(sqlDir, file.Name())
src, err := ioutil.ReadFile(filePath)
if err != nil {
fmt.Printf("Error reading file %s: %v\n", filePath, err)
continue
}
// Parse the file
fset := token.NewFileSet()
node, err := parser.ParseFile(fset, "", src, parser.ParseComments)
if err != nil {
fmt.Printf("Error parsing file %s: %v\n", filePath, err)
continue
}
// Find public methods
var methods []string
ast.Inspect(node, func(n ast.Node) bool {
if fn, ok := n.(*ast.FuncDecl); ok {
if fn.Recv != nil && len(fn.Recv.List) > 0 {
if star, ok := fn.Recv.List[0].Type.(*ast.StarExpr); ok {
if ident, ok := star.X.(*ast.Ident); ok {
if strings.HasSuffix(ident.Name, "Repository") && fn.Name.IsExported() {
methods = append(methods, getFuncSignature(fn))
}
}
}
}
}
return true
})
// Create the repo.go file
repoFilePath := filepath.Join(domainRepoDir, "repo.go")
repoFileContent := fmt.Sprintf(`package %s
import (
"context"
"tercul/internal/domain"
)
// %s defines CRUD methods specific to %s.
type %s interface {
domain.BaseRepository[domain.%s]
%s
}
`, domainPackageName, repoInterfaceName, strings.Title(repoName), repoInterfaceName, strings.Title(repoName), formatMethods(methods))
if err := ioutil.WriteFile(repoFilePath, []byte(repoFileContent), 0644); err != nil {
fmt.Printf("Error writing file %s: %v\n", repoFilePath, err)
} else {
fmt.Printf("Created %s\n", repoFilePath)
}
}
}
}
func getFuncSignature(fn *ast.FuncDecl) string {
params := ""
for _, p := range fn.Type.Params.List {
if len(p.Names) > 0 {
params += p.Names[0].Name + " "
}
params += getTypeString(p.Type) + ", "
}
if len(params) > 0 {
params = params[:len(params)-2]
}
results := ""
if fn.Type.Results != nil {
for _, r := range fn.Type.Results.List {
results += getTypeString(r.Type) + ", "
}
if len(results) > 0 {
results = "(" + results[:len(results)-2] + ")"
}
}
return fmt.Sprintf("\t%s(%s) %s", fn.Name.Name, params, results)
}
func getTypeString(expr ast.Expr) string {
switch t := expr.(type) {
case *ast.Ident:
return t.Name
case *ast.SelectorExpr:
return getTypeString(t.X) + "." + t.Sel.Name
case *ast.StarExpr:
return "*" + getTypeString(t.X)
case *ast.ArrayType:
return "[]" + getTypeString(t.Elt)
case *ast.InterfaceType:
return "interface{}"
default:
return ""
}
}
func formatMethods(methods []string) string {
if len(methods) == 0 {
return ""
}
return "\n" + strings.Join(methods, "\n")
}

View File

@ -1,51 +0,0 @@
//go:build tools
package main
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strings"
)
func main() {
domainDir := "internal/domain"
dirs, err := ioutil.ReadDir(domainDir)
if err != nil {
fmt.Println("Error reading domain directory:", err)
return
}
for _, dir := range dirs {
if dir.IsDir() {
repoFilePath := filepath.Join(domainDir, dir.Name(), "repo.go")
if _, err := os.Stat(repoFilePath); err == nil {
content, err := ioutil.ReadFile(repoFilePath)
if err != nil {
fmt.Printf("Error reading file %s: %v\n", repoFilePath, err)
continue
}
newContent := strings.Replace(string(content), "domain.Base", "domain.BaseRepository", -1)
newContent = strings.Replace(newContent, "domain."+strings.Title(dir.Name()), "domain."+strings.Title(dir.Name()), -1)
// Fix for names with underscore
newContent = strings.Replace(newContent, "domain.Copyright_claim", "domain.CopyrightClaim", -1)
newContent = strings.Replace(newContent, "domain.Email_verification", "domain.EmailVerification", -1)
newContent = strings.Replace(newContent, "domain.Password_reset", "domain.PasswordReset", -1)
newContent = strings.Replace(newContent, "domain.User_profile", "domain.UserProfile", -1)
newContent = strings.Replace(newContent, "domain.User_session", "domain.UserSession", -1)
if err := ioutil.WriteFile(repoFilePath, []byte(newContent), 0644); err != nil {
fmt.Printf("Error writing file %s: %v\n", repoFilePath, err)
} else {
fmt.Printf("Fixed repo %s\n", repoFilePath)
}
}
}
}
}

View File

@ -1,42 +0,0 @@
//go:build tools
package main
import (
"fmt"
"io/ioutil"
"path/filepath"
"strings"
)
func main() {
sqlDir := "internal/data/sql"
files, err := ioutil.ReadDir(sqlDir)
if err != nil {
fmt.Println("Error reading sql directory:", err)
return
}
for _, file := range files {
if strings.HasSuffix(file.Name(), "_repository.go") {
repoName := strings.TrimSuffix(file.Name(), "_repository.go")
filePath := filepath.Join(sqlDir, file.Name())
content, err := ioutil.ReadFile(filePath)
if err != nil {
fmt.Printf("Error reading file %s: %v\n", filePath, err)
continue
}
newContent := strings.Replace(string(content), `"tercul/internal/domain"`, fmt.Sprintf(`"%s"`, filepath.Join("tercul/internal/domain", repoName)), 1)
newContent = strings.Replace(newContent, "domain."+strings.Title(repoName)+"Repository", repoName+"."+strings.Title(repoName)+"Repository", 1)
if err := ioutil.WriteFile(filePath, []byte(newContent), 0644); err != nil {
fmt.Printf("Error writing file %s: %v\n", filePath, err)
} else {
fmt.Printf("Fixed imports in %s\n", filePath)
}
}
}
}

1
go.mod
View File

@ -25,6 +25,7 @@ require (
) )
require ( require (
ariga.io/atlas-go-sdk v0.5.1 // indirect
filippo.io/edwards25519 v1.1.0 // indirect filippo.io/edwards25519 v1.1.0 // indirect
github.com/ClickHouse/ch-go v0.67.0 // indirect github.com/ClickHouse/ch-go v0.67.0 // indirect
github.com/ClickHouse/clickhouse-go/v2 v2.40.1 // indirect github.com/ClickHouse/clickhouse-go/v2 v2.40.1 // indirect

2
go.sum
View File

@ -1,3 +1,5 @@
ariga.io/atlas-go-sdk v0.5.1 h1:I3iRshdwSODVWwMS4zvXObnfCQrEOY8BLRwynJQA+qE=
ariga.io/atlas-go-sdk v0.5.1/go.mod h1:UZXG++2NQCDAetk+oIitYIGpL/VsBVCt4GXbtWBA/GY=
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=

View File

@ -0,0 +1,97 @@
package graphql_test
import (
"bytes"
"encoding/json"
"net/http"
"net/http/httptest"
)
// GraphQLRequest represents a GraphQL request
type GraphQLRequest struct {
Query string `json:"query"`
OperationName string `json:"operationName,omitempty"`
Variables map[string]interface{} `json:"variables,omitempty"`
}
// GraphQLResponse represents a generic GraphQL response
type GraphQLResponse[T any] struct {
Data T `json:"data,omitempty"`
Errors []map[string]interface{} `json:"errors,omitempty"`
}
// graphQLTestServer defines the interface for a test server that can execute GraphQL requests.
type graphQLTestServer interface {
getURL() string
getClient() *http.Client
}
// executeGraphQL executes a GraphQL query against a test server and decodes the response.
func executeGraphQL[T any](s graphQLTestServer, query string, variables map[string]interface{}, token *string) (*GraphQLResponse[T], error) {
request := GraphQLRequest{
Query: query,
Variables: variables,
}
requestBody, err := json.Marshal(request)
if err != nil {
return nil, err
}
req, err := http.NewRequest("POST", s.getURL(), bytes.NewBuffer(requestBody))
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")
if token != nil {
req.Header.Set("Authorization", "Bearer "+*token)
}
resp, err := s.getClient().Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
var response GraphQLResponse[T]
if err := json.NewDecoder(resp.Body).Decode(&response); err != nil {
return nil, err
}
return &response, nil
}
// Implement the graphQLTestServer interface for GraphQLIntegrationSuite
func (s *GraphQLIntegrationSuite) getURL() string {
return s.server.URL
}
func (s *GraphQLIntegrationSuite) getClient() *http.Client {
return s.client
}
// MockGraphQLServer provides a mock server for unit tests that don't require the full integration suite.
type MockGraphQLServer struct {
Server *httptest.Server
Client *http.Client
}
func NewMockGraphQLServer(h http.Handler) *MockGraphQLServer {
ts := httptest.NewServer(h)
return &MockGraphQLServer{
Server: ts,
Client: ts.Client(),
}
}
func (s *MockGraphQLServer) getURL() string {
return s.Server.URL
}
func (s *MockGraphQLServer) getClient() *http.Client {
return s.Client
}
func (s *MockGraphQLServer) Close() {
s.Server.Close()
}

View File

@ -1,9 +1,7 @@
package graphql_test package graphql_test
import ( import (
"bytes"
"context" "context"
"encoding/json"
"fmt" "fmt"
"net/http" "net/http"
"net/http/httptest" "net/http/httptest"
@ -26,19 +24,6 @@ import (
"github.com/stretchr/testify/suite" "github.com/stretchr/testify/suite"
) )
// GraphQLRequest represents a GraphQL request
type GraphQLRequest struct {
Query string `json:"query"`
OperationName string `json:"operationName,omitempty"`
Variables map[string]interface{} `json:"variables,omitempty"`
}
// GraphQLResponse represents a generic GraphQL response
type GraphQLResponse[T any] struct {
Data T `json:"data,omitempty"`
Errors []map[string]interface{} `json:"errors,omitempty"`
}
// GraphQLIntegrationSuite is a test suite for GraphQL integration tests // GraphQLIntegrationSuite is a test suite for GraphQL integration tests
type GraphQLIntegrationSuite struct { type GraphQLIntegrationSuite struct {
testutil.IntegrationTestSuite testutil.IntegrationTestSuite
@ -103,47 +88,6 @@ func (s *GraphQLIntegrationSuite) SetupTest() {
s.DB.Exec("DELETE FROM trendings") s.DB.Exec("DELETE FROM trendings")
} }
// executeGraphQL executes a GraphQL query and decodes the response into a generic type
func executeGraphQL[T any](s *GraphQLIntegrationSuite, query string, variables map[string]interface{}, token *string) (*GraphQLResponse[T], error) {
// Create the request
request := GraphQLRequest{
Query: query,
Variables: variables,
}
// Marshal the request to JSON
requestBody, err := json.Marshal(request)
if err != nil {
return nil, err
}
// Create an HTTP request
req, err := http.NewRequest("POST", s.server.URL, bytes.NewBuffer(requestBody))
if err != nil {
return nil, err
}
req.Header.Set("Content-Type", "application/json")
if token != nil {
req.Header.Set("Authorization", "Bearer "+*token)
}
// Execute the request
resp, err := s.client.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
// Parse the response
var response GraphQLResponse[T]
err = json.NewDecoder(resp.Body).Decode(&response)
if err != nil {
return nil, err
}
return &response, nil
}
type GetWorkResponse struct { type GetWorkResponse struct {
Work struct { Work struct {
ID string `json:"id"` ID string `json:"id"`
@ -1048,7 +992,7 @@ func (s *GraphQLIntegrationSuite) TestTrendingWorksQuery() {
func (s *GraphQLIntegrationSuite) TestCollectionMutations() { func (s *GraphQLIntegrationSuite) TestCollectionMutations() {
// Create users for testing authorization // Create users for testing authorization
_, ownerToken := s.CreateAuthenticatedUser("collectionowner", "owner@test.com", domain.UserRoleReader) owner, ownerToken := s.CreateAuthenticatedUser("collectionowner", "owner@test.com", domain.UserRoleReader)
otherUser, otherToken := s.CreateAuthenticatedUser("otheruser", "other@test.com", domain.UserRoleReader) otherUser, otherToken := s.CreateAuthenticatedUser("otheruser", "other@test.com", domain.UserRoleReader)
_ = otherUser _ = otherUser
@ -1175,6 +1119,7 @@ func (s *GraphQLIntegrationSuite) TestCollectionMutations() {
err = s.App.Collection.Commands.AddWorkToCollection(context.Background(), collection.AddWorkToCollectionInput{ err = s.App.Collection.Commands.AddWorkToCollection(context.Background(), collection.AddWorkToCollectionInput{
CollectionID: uint(collectionIDInt), CollectionID: uint(collectionIDInt),
WorkID: work.ID, WorkID: work.ID,
UserID: owner.ID,
}) })
s.Require().NoError(err) s.Require().NoError(err)

View File

@ -0,0 +1,120 @@
package graphql_test
import (
"context"
"fmt"
"strconv"
"testing"
"tercul/internal/adapters/graphql"
"tercul/internal/adapters/graphql/model"
"tercul/internal/app"
"tercul/internal/app/analytics"
"tercul/internal/app/like"
"tercul/internal/domain"
platform_auth "tercul/internal/platform/auth"
"tercul/internal/testutil"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/suite"
)
// LikeResolversUnitSuite is a unit test suite for the like resolvers.
type LikeResolversUnitSuite struct {
suite.Suite
resolver *graphql.Resolver
mockLikeRepo *testutil.MockLikeRepository
mockWorkRepo *testutil.MockWorkRepository
mockAnalyticsSvc *testutil.MockAnalyticsService
}
func (s *LikeResolversUnitSuite) SetupTest() {
// 1. Create mock repositories
s.mockLikeRepo = new(testutil.MockLikeRepository)
s.mockWorkRepo = new(testutil.MockWorkRepository)
s.mockAnalyticsSvc = new(testutil.MockAnalyticsService)
// 2. Create real services with mock repositories
likeService := like.NewService(s.mockLikeRepo)
analyticsService := analytics.NewService(s.mockAnalyticsSvc, nil, nil, nil, nil)
// 3. Create the resolver with the services
s.resolver = &graphql.Resolver{
App: &app.Application{
Like: likeService,
Analytics: analyticsService,
},
}
}
func TestLikeResolversUnitSuite(t *testing.T) {
suite.Run(t, new(LikeResolversUnitSuite))
}
func (s *LikeResolversUnitSuite) TestCreateLike() {
// 1. Setup
workIDStr := "1"
workIDUint64, _ := strconv.ParseUint(workIDStr, 10, 32)
workIDUint := uint(workIDUint64)
userID := uint(123)
// Mock repository responses
s.mockWorkRepo.On("Exists", mock.Anything, workIDUint).Return(true, nil)
s.mockLikeRepo.On("Create", mock.Anything, mock.AnythingOfType("*domain.Like")).Run(func(args mock.Arguments) {
arg := args.Get(1).(*domain.Like)
arg.ID = 1 // Simulate database assigning an ID
}).Return(nil)
s.mockAnalyticsSvc.On("IncrementWorkCounter", mock.Anything, workIDUint, "likes", 1).Return(nil)
// Create a context with an authenticated user
ctx := platform_auth.ContextWithUserID(context.Background(), userID)
// 2. Execution
likeInput := model.LikeInput{
WorkID: &workIDStr,
}
createdLike, err := s.resolver.Mutation().CreateLike(ctx, likeInput)
// 3. Assertions
s.Require().NoError(err)
s.Require().NotNil(createdLike)
s.Equal("1", createdLike.ID)
s.Equal(fmt.Sprintf("%d", userID), createdLike.User.ID)
// Verify that the repository's Create method was called
s.mockLikeRepo.AssertCalled(s.T(), "Create", mock.Anything, mock.MatchedBy(func(l *domain.Like) bool {
return *l.WorkID == workIDUint && l.UserID == userID
}))
// Verify that analytics was called
s.mockAnalyticsSvc.AssertCalled(s.T(), "IncrementWorkCounter", mock.Anything, workIDUint, "likes", 1)
}
func (s *LikeResolversUnitSuite) TestDeleteLike() {
// 1. Setup
likeIDStr := "1"
likeIDUint, _ := strconv.ParseUint(likeIDStr, 10, 32)
userID := uint(123)
// Mock the repository response for the initial 'find'
s.mockLikeRepo.On("GetByID", mock.Anything, uint(likeIDUint)).Return(&domain.Like{
BaseModel: domain.BaseModel{ID: uint(likeIDUint)},
UserID: userID,
}, nil)
// Mock the repository response for the 'delete'
s.mockLikeRepo.On("Delete", mock.Anything, uint(likeIDUint)).Return(nil)
// Create a context with an authenticated user
ctx := platform_auth.ContextWithUserID(context.Background(), userID)
// 2. Execution
deleted, err := s.resolver.Mutation().DeleteLike(ctx, likeIDStr)
// 3. Assertions
s.Require().NoError(err)
s.True(deleted)
// Verify that the repository's Delete method was called
s.mockLikeRepo.AssertCalled(s.T(), "Delete", mock.Anything, uint(likeIDUint))
}

View File

@ -395,24 +395,11 @@ func (r *mutationResolver) UpdateCollection(ctx context.Context, id string, inpu
return nil, fmt.Errorf("invalid collection ID: %v", err) return nil, fmt.Errorf("invalid collection ID: %v", err)
} }
// Fetch the existing collection
collectionModel, err := r.App.Collection.Queries.Collection(ctx, uint(collectionID))
if err != nil {
return nil, err
}
if collectionModel == nil {
return nil, fmt.Errorf("collection not found")
}
// Check ownership
if collectionModel.UserID != userID {
return nil, fmt.Errorf("unauthorized")
}
// Call collection service // Call collection service
updateInput := collection.UpdateCollectionInput{ updateInput := collection.UpdateCollectionInput{
ID: uint(collectionID), ID: uint(collectionID),
Name: input.Name, Name: input.Name,
UserID: userID,
} }
if input.Description != nil { if input.Description != nil {
updateInput.Description = *input.Description updateInput.Description = *input.Description
@ -447,22 +434,8 @@ func (r *mutationResolver) DeleteCollection(ctx context.Context, id string) (boo
return false, fmt.Errorf("invalid collection ID: %v", err) return false, fmt.Errorf("invalid collection ID: %v", err)
} }
// Fetch the existing collection // Call collection service
collection, err := r.App.Collection.Queries.Collection(ctx, uint(collectionID)) err = r.App.Collection.Commands.DeleteCollection(ctx, uint(collectionID), userID)
if err != nil {
return false, err
}
if collection == nil {
return false, fmt.Errorf("collection not found")
}
// Check ownership
if collection.UserID != userID {
return false, fmt.Errorf("unauthorized")
}
// Call collection repository
err = r.App.Collection.Commands.DeleteCollection(ctx, uint(collectionID))
if err != nil { if err != nil {
return false, err return false, err
} }
@ -488,24 +461,11 @@ func (r *mutationResolver) AddWorkToCollection(ctx context.Context, collectionID
return nil, fmt.Errorf("invalid work ID: %v", err) return nil, fmt.Errorf("invalid work ID: %v", err)
} }
// Fetch the existing collection
collectionModel, err := r.App.Collection.Queries.Collection(ctx, uint(collID))
if err != nil {
return nil, err
}
if collectionModel == nil {
return nil, fmt.Errorf("collection not found")
}
// Check ownership
if collectionModel.UserID != userID {
return nil, fmt.Errorf("unauthorized")
}
// Add work to collection // Add work to collection
addInput := collection.AddWorkToCollectionInput{ addInput := collection.AddWorkToCollectionInput{
CollectionID: uint(collID), CollectionID: uint(collID),
WorkID: uint(wID), WorkID: uint(wID),
UserID: userID,
} }
err = r.App.Collection.Commands.AddWorkToCollection(ctx, addInput) err = r.App.Collection.Commands.AddWorkToCollection(ctx, addInput)
if err != nil { if err != nil {
@ -544,24 +504,11 @@ func (r *mutationResolver) RemoveWorkFromCollection(ctx context.Context, collect
return nil, fmt.Errorf("invalid work ID: %v", err) return nil, fmt.Errorf("invalid work ID: %v", err)
} }
// Fetch the existing collection
collectionModel, err := r.App.Collection.Queries.Collection(ctx, uint(collID))
if err != nil {
return nil, err
}
if collectionModel == nil {
return nil, fmt.Errorf("collection not found")
}
// Check ownership
if collectionModel.UserID != userID {
return nil, fmt.Errorf("unauthorized")
}
// Remove work from collection // Remove work from collection
removeInput := collection.RemoveWorkFromCollectionInput{ removeInput := collection.RemoveWorkFromCollectionInput{
CollectionID: uint(collID), CollectionID: uint(collID),
WorkID: uint(wID), WorkID: uint(wID),
UserID: userID,
} }
err = r.App.Collection.Commands.RemoveWorkFromCollection(ctx, removeInput) err = r.App.Collection.Commands.RemoveWorkFromCollection(ctx, removeInput)
if err != nil { if err != nil {
@ -1325,16 +1272,27 @@ type queryResolver struct{ *Resolver }
// it when you're done. // it when you're done.
// - You have helper methods in this file. Move them out to keep these resolver files clean. // - You have helper methods in this file. Move them out to keep these resolver files clean.
/* /*
func (r *Resolver) Work() WorkResolver { return &workResolver{r} } func (r *translationResolver) Stats(ctx context.Context, obj *model.Translation) (*model.TranslationStats, error) {
func (r *Resolver) Translation() TranslationResolver { return &translationResolver{r} } translationID, err := strconv.ParseUint(obj.ID, 10, 32)
type workResolver struct{ *Resolver } if err != nil {
type translationResolver struct{ *Resolver } return nil, fmt.Errorf("invalid translation ID: %v", err)
func toInt32(i int64) *int { }
val := int(i)
return &val stats, err := r.App.Analytics.GetOrCreateTranslationStats(ctx, uint(translationID))
} if err != nil {
func toInt(i int) *int { return nil, err
return &i }
// Convert domain model to GraphQL model
return &model.TranslationStats{
ID: fmt.Sprintf("%d", stats.ID),
Views: toInt32(stats.Views),
Likes: toInt32(stats.Likes),
Comments: toInt32(stats.Comments),
Shares: toInt32(stats.Shares),
ReadingTime: toInt32(int64(stats.ReadingTime)),
Sentiment: &stats.Sentiment,
}, nil
} }
func (r *workResolver) Stats(ctx context.Context, obj *model.Work) (*model.WorkStats, error) { func (r *workResolver) Stats(ctx context.Context, obj *model.Work) (*model.WorkStats, error) {
workID, err := strconv.ParseUint(obj.ID, 10, 32) workID, err := strconv.ParseUint(obj.ID, 10, 32)
@ -1356,31 +1314,13 @@ func (r *workResolver) Stats(ctx context.Context, obj *model.Work) (*model.WorkS
Bookmarks: toInt32(stats.Bookmarks), Bookmarks: toInt32(stats.Bookmarks),
Shares: toInt32(stats.Shares), Shares: toInt32(stats.Shares),
TranslationCount: toInt32(stats.TranslationCount), TranslationCount: toInt32(stats.TranslationCount),
ReadingTime: toInt(stats.ReadingTime), ReadingTime: toInt32(int64(stats.ReadingTime)),
Complexity: &stats.Complexity, Complexity: &stats.Complexity,
Sentiment: &stats.Sentiment, Sentiment: &stats.Sentiment,
}, nil }, nil
} }
func (r *translationResolver) Stats(ctx context.Context, obj *model.Translation) (*model.TranslationStats, error) { func (r *Resolver) Translation() TranslationResolver { return &translationResolver{r} }
translationID, err := strconv.ParseUint(obj.ID, 10, 32) func (r *Resolver) Work() WorkResolver { return &workResolver{r} }
if err != nil { type translationResolver struct{ *Resolver }
return nil, fmt.Errorf("invalid translation ID: %v", err) type workResolver struct{ *Resolver }
}
stats, err := r.App.Analytics.GetOrCreateTranslationStats(ctx, uint(translationID))
if err != nil {
return nil, err
}
// Convert domain model to GraphQL model
return &model.TranslationStats{
ID: fmt.Sprintf("%d", stats.ID),
Views: toInt32(stats.Views),
Likes: toInt32(stats.Likes),
Comments: toInt32(stats.Comments),
Shares: toInt32(stats.Shares),
ReadingTime: toInt(stats.ReadingTime),
Sentiment: &stats.Sentiment,
}, nil
}
*/ */

View File

@ -2,6 +2,7 @@ package collection
import ( import (
"context" "context"
"fmt"
"tercul/internal/domain" "tercul/internal/domain"
) )
@ -47,6 +48,7 @@ type UpdateCollectionInput struct {
Description string Description string
IsPublic bool IsPublic bool
CoverImageURL string CoverImageURL string
UserID uint
} }
// UpdateCollection updates an existing collection. // UpdateCollection updates an existing collection.
@ -55,6 +57,9 @@ func (c *CollectionCommands) UpdateCollection(ctx context.Context, input UpdateC
if err != nil { if err != nil {
return nil, err return nil, err
} }
if collection.UserID != input.UserID {
return nil, fmt.Errorf("unauthorized: user %d cannot update collection %d", input.UserID, input.ID)
}
collection.Name = input.Name collection.Name = input.Name
collection.Description = input.Description collection.Description = input.Description
collection.IsPublic = input.IsPublic collection.IsPublic = input.IsPublic
@ -67,7 +72,14 @@ func (c *CollectionCommands) UpdateCollection(ctx context.Context, input UpdateC
} }
// DeleteCollection deletes a collection by ID. // DeleteCollection deletes a collection by ID.
func (c *CollectionCommands) DeleteCollection(ctx context.Context, id uint) error { func (c *CollectionCommands) DeleteCollection(ctx context.Context, id uint, userID uint) error {
collection, err := c.repo.GetByID(ctx, id)
if err != nil {
return err
}
if collection.UserID != userID {
return fmt.Errorf("unauthorized: user %d cannot delete collection %d", userID, id)
}
return c.repo.Delete(ctx, id) return c.repo.Delete(ctx, id)
} }
@ -75,10 +87,18 @@ func (c *CollectionCommands) DeleteCollection(ctx context.Context, id uint) erro
type AddWorkToCollectionInput struct { type AddWorkToCollectionInput struct {
CollectionID uint CollectionID uint
WorkID uint WorkID uint
UserID uint
} }
// AddWorkToCollection adds a work to a collection. // AddWorkToCollection adds a work to a collection.
func (c *CollectionCommands) AddWorkToCollection(ctx context.Context, input AddWorkToCollectionInput) error { func (c *CollectionCommands) AddWorkToCollection(ctx context.Context, input AddWorkToCollectionInput) error {
collection, err := c.repo.GetByID(ctx, input.CollectionID)
if err != nil {
return err
}
if collection.UserID != input.UserID {
return fmt.Errorf("unauthorized: user %d cannot add work to collection %d", input.UserID, input.CollectionID)
}
return c.repo.AddWorkToCollection(ctx, input.CollectionID, input.WorkID) return c.repo.AddWorkToCollection(ctx, input.CollectionID, input.WorkID)
} }
@ -86,9 +106,17 @@ func (c *CollectionCommands) AddWorkToCollection(ctx context.Context, input AddW
type RemoveWorkFromCollectionInput struct { type RemoveWorkFromCollectionInput struct {
CollectionID uint CollectionID uint
WorkID uint WorkID uint
UserID uint
} }
// RemoveWorkFromCollection removes a work from a collection. // RemoveWorkFromCollection removes a work from a collection.
func (c *CollectionCommands) RemoveWorkFromCollection(ctx context.Context, input RemoveWorkFromCollectionInput) error { func (c *CollectionCommands) RemoveWorkFromCollection(ctx context.Context, input RemoveWorkFromCollectionInput) error {
collection, err := c.repo.GetByID(ctx, input.CollectionID)
if err != nil {
return err
}
if collection.UserID != input.UserID {
return fmt.Errorf("unauthorized: user %d cannot remove work from collection %d", input.UserID, input.CollectionID)
}
return c.repo.RemoveWorkFromCollection(ctx, input.CollectionID, input.WorkID) return c.repo.RemoveWorkFromCollection(ctx, input.CollectionID, input.WorkID)
} }

View File

@ -0,0 +1,185 @@
-- +goose Up
CREATE TABLE "countries" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"language" text DEFAULT 'multi',"slug" text,"name" text NOT NULL,"code" text NOT NULL,"phone_code" text,"currency" text,"continent" text);
CREATE TABLE "cities" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"language" text DEFAULT 'multi',"slug" text,"name" text NOT NULL,"country_id" bigint,CONSTRAINT "fk_countries_cities" FOREIGN KEY ("country_id") REFERENCES "countries"("id"));
CREATE TABLE "addresses" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"street" text,"street_number" text,"postal_code" text,"country_id" bigint,"city_id" bigint,"latitude" real,"longitude" real,CONSTRAINT "fk_cities_addresses" FOREIGN KEY ("city_id") REFERENCES "cities"("id"),CONSTRAINT "fk_countries_addresses" FOREIGN KEY ("country_id") REFERENCES "countries"("id"));
CREATE TABLE "users" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"username" text NOT NULL,"email" text NOT NULL,"password" text NOT NULL,"first_name" text,"last_name" text,"display_name" text,"bio" text,"avatar_url" text,"role" text DEFAULT 'reader',"last_login_at" timestamptz,"verified" boolean DEFAULT false,"active" boolean DEFAULT true,"country_id" bigint,"city_id" bigint,"address_id" bigint,CONSTRAINT "fk_users_city" FOREIGN KEY ("city_id") REFERENCES "cities"("id"),CONSTRAINT "fk_users_address" FOREIGN KEY ("address_id") REFERENCES "addresses"("id"),CONSTRAINT "fk_users_country" FOREIGN KEY ("country_id") REFERENCES "countries"("id"),CONSTRAINT "uni_users_username" UNIQUE ("username"),CONSTRAINT "uni_users_email" UNIQUE ("email"));
CREATE TABLE "user_sessions" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"user_id" bigint,"token" text NOT NULL,"ip" text,"user_agent" text,"expires_at" timestamptz NOT NULL,CONSTRAINT "fk_user_sessions_user" FOREIGN KEY ("user_id") REFERENCES "users"("id"));
CREATE TABLE "password_resets" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"user_id" bigint,"token" text NOT NULL,"expires_at" timestamptz NOT NULL,"used" boolean DEFAULT false,CONSTRAINT "fk_password_resets_user" FOREIGN KEY ("user_id") REFERENCES "users"("id"));
CREATE TABLE "email_verifications" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"user_id" bigint,"token" text NOT NULL,"expires_at" timestamptz NOT NULL,"used" boolean DEFAULT false,CONSTRAINT "fk_email_verifications_user" FOREIGN KEY ("user_id") REFERENCES "users"("id"));
CREATE TABLE "works" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"language" text DEFAULT 'multi',"slug" text,"title" text NOT NULL,"description" text,"type" text DEFAULT 'other',"status" text DEFAULT 'draft',"published_at" timestamptz);
CREATE TABLE "work_copyrights" ("work_id" bigint,"copyright_id" bigint,"created_at" timestamptz,PRIMARY KEY ("work_id","copyright_id"));
CREATE TABLE "categories" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"name" text NOT NULL,"description" text,"parent_id" bigint,"path" text,"slug" text,CONSTRAINT "fk_categories_children" FOREIGN KEY ("parent_id") REFERENCES "categories"("id"));
CREATE TABLE "work_categories" ("category_id" bigint,"work_id" bigint,PRIMARY KEY ("category_id","work_id"),CONSTRAINT "fk_work_categories_category" FOREIGN KEY ("category_id") REFERENCES "categories"("id"),CONSTRAINT "fk_work_categories_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"));
CREATE TABLE "tags" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"name" text NOT NULL,"description" text,"slug" text);
CREATE TABLE "work_tags" ("tag_id" bigint,"work_id" bigint,PRIMARY KEY ("tag_id","work_id"),CONSTRAINT "fk_work_tags_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"),CONSTRAINT "fk_work_tags_tag" FOREIGN KEY ("tag_id") REFERENCES "tags"("id"));
CREATE TABLE "places" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"language" text DEFAULT 'multi',"slug" text,"name" text NOT NULL,"description" text,"latitude" real,"longitude" real,"country_id" bigint,"city_id" bigint,CONSTRAINT "fk_cities_places" FOREIGN KEY ("city_id") REFERENCES "cities"("id"),CONSTRAINT "fk_countries_places" FOREIGN KEY ("country_id") REFERENCES "countries"("id"));
CREATE TABLE "authors" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"language" text DEFAULT 'multi',"slug" text,"name" text NOT NULL,"status" text DEFAULT 'active',"birth_date" timestamptz,"death_date" timestamptz,"country_id" bigint,"city_id" bigint,"place_id" bigint,"address_id" bigint,CONSTRAINT "fk_authors_country" FOREIGN KEY ("country_id") REFERENCES "countries"("id"),CONSTRAINT "fk_authors_city" FOREIGN KEY ("city_id") REFERENCES "cities"("id"),CONSTRAINT "fk_authors_place" FOREIGN KEY ("place_id") REFERENCES "places"("id"),CONSTRAINT "fk_authors_address" FOREIGN KEY ("address_id") REFERENCES "addresses"("id"));
CREATE TABLE "work_authors" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"work_id" bigint,"author_id" bigint,"role" text DEFAULT 'author',"ordinal" integer DEFAULT 0,CONSTRAINT "fk_work_authors_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"),CONSTRAINT "fk_work_authors_author" FOREIGN KEY ("author_id") REFERENCES "authors"("id"));
CREATE TABLE "work_monetizations" ("work_id" bigint,"monetization_id" bigint,"created_at" timestamptz,PRIMARY KEY ("work_id","monetization_id"));
CREATE TABLE "publishers" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"language" text DEFAULT 'multi',"slug" text,"name" text NOT NULL,"description" text,"status" text DEFAULT 'active',"country_id" bigint,CONSTRAINT "fk_publishers_country" FOREIGN KEY ("country_id") REFERENCES "countries"("id"));
CREATE TABLE "books" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"language" text DEFAULT 'multi',"slug" text,"title" text NOT NULL,"description" text,"isbn" text,"format" text DEFAULT 'paperback',"status" text DEFAULT 'draft',"published_at" timestamptz,"publisher_id" bigint,CONSTRAINT "fk_publishers_books" FOREIGN KEY ("publisher_id") REFERENCES "publishers"("id"));
CREATE TABLE "book_authors" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"book_id" bigint,"author_id" bigint,"role" text DEFAULT 'author',"ordinal" integer DEFAULT 0,CONSTRAINT "fk_book_authors_book" FOREIGN KEY ("book_id") REFERENCES "books"("id"),CONSTRAINT "fk_book_authors_author" FOREIGN KEY ("author_id") REFERENCES "authors"("id"));
CREATE TABLE "author_monetizations" ("author_id" bigint,"monetization_id" bigint,"created_at" timestamptz,PRIMARY KEY ("author_id","monetization_id"));
CREATE TABLE "author_copyrights" ("author_id" bigint,"copyright_id" bigint,"created_at" timestamptz,PRIMARY KEY ("author_id","copyright_id"));
CREATE TABLE "book_works" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"book_id" bigint,"work_id" bigint,"order" integer DEFAULT 0,CONSTRAINT "fk_book_works_book" FOREIGN KEY ("book_id") REFERENCES "books"("id"),CONSTRAINT "fk_book_works_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"));
CREATE TABLE "book_monetizations" ("book_id" bigint,"monetization_id" bigint,"created_at" timestamptz,PRIMARY KEY ("book_id","monetization_id"));
CREATE TABLE "book_copyrights" ("book_id" bigint,"copyright_id" bigint,"created_at" timestamptz,PRIMARY KEY ("book_id","copyright_id"));
CREATE TABLE "publisher_monetizations" ("publisher_id" bigint,"monetization_id" bigint,"created_at" timestamptz,PRIMARY KEY ("publisher_id","monetization_id"));
CREATE TABLE "publisher_copyrights" ("publisher_id" bigint,"copyright_id" bigint,"created_at" timestamptz,PRIMARY KEY ("publisher_id","copyright_id"));
CREATE TABLE "sources" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"language" text DEFAULT 'multi',"slug" text,"name" text NOT NULL,"description" text,"url" text,"status" text DEFAULT 'active');
CREATE TABLE "source_monetizations" ("source_id" bigint,"monetization_id" bigint,"created_at" timestamptz,PRIMARY KEY ("source_id","monetization_id"));
CREATE TABLE "source_copyrights" ("source_id" bigint,"copyright_id" bigint,"created_at" timestamptz,PRIMARY KEY ("source_id","copyright_id"));
CREATE TABLE "work_sources" ("source_id" bigint,"work_id" bigint,PRIMARY KEY ("source_id","work_id"),CONSTRAINT "fk_work_sources_source" FOREIGN KEY ("source_id") REFERENCES "sources"("id"),CONSTRAINT "fk_work_sources_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"));
CREATE TABLE "editions" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"title" text NOT NULL,"description" text,"isbn" text,"version" text,"format" text DEFAULT 'paperback',"status" text DEFAULT 'draft',"published_at" timestamptz,"book_id" bigint,CONSTRAINT "fk_editions_book" FOREIGN KEY ("book_id") REFERENCES "books"("id"));
CREATE TABLE "translations" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"title" text NOT NULL,"content" text,"description" text,"language" text NOT NULL,"status" text DEFAULT 'draft',"published_at" timestamptz,"translatable_id" bigint NOT NULL,"translatable_type" text NOT NULL,"translator_id" bigint,"is_original_language" boolean DEFAULT false,"audio_url" text,"date_translated" timestamptz,CONSTRAINT "fk_users_translations" FOREIGN KEY ("translator_id") REFERENCES "users"("id"));
CREATE TABLE "text_blocks" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"work_id" bigint,"translation_id" bigint,"index" bigint,"type" text,"start_offset" integer DEFAULT 0,"end_offset" integer DEFAULT 0,"text" text,CONSTRAINT "fk_text_blocks_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"),CONSTRAINT "fk_text_blocks_translation" FOREIGN KEY ("translation_id") REFERENCES "translations"("id"));
CREATE TABLE "comments" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"text" text NOT NULL,"user_id" bigint,"work_id" bigint,"translation_id" bigint,"line_number" bigint,"text_block_id" bigint,"parent_id" bigint,CONSTRAINT "fk_comments_translation" FOREIGN KEY ("translation_id") REFERENCES "translations"("id"),CONSTRAINT "fk_comments_text_block" FOREIGN KEY ("text_block_id") REFERENCES "text_blocks"("id"),CONSTRAINT "fk_comments_children" FOREIGN KEY ("parent_id") REFERENCES "comments"("id"),CONSTRAINT "fk_users_comments" FOREIGN KEY ("user_id") REFERENCES "users"("id"),CONSTRAINT "fk_comments_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"));
CREATE TABLE "likes" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"user_id" bigint,"work_id" bigint,"translation_id" bigint,"comment_id" bigint,CONSTRAINT "fk_users_likes" FOREIGN KEY ("user_id") REFERENCES "users"("id"),CONSTRAINT "fk_likes_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"),CONSTRAINT "fk_likes_translation" FOREIGN KEY ("translation_id") REFERENCES "translations"("id"),CONSTRAINT "fk_comments_likes" FOREIGN KEY ("comment_id") REFERENCES "comments"("id"));
CREATE TABLE "bookmarks" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"name" text,"user_id" bigint,"work_id" bigint,"notes" text,"last_read_at" timestamptz,"progress" integer DEFAULT 0,CONSTRAINT "fk_bookmarks_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"),CONSTRAINT "fk_users_bookmarks" FOREIGN KEY ("user_id") REFERENCES "users"("id"));
CREATE TABLE "collections" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"language" text DEFAULT 'multi',"slug" text,"name" text NOT NULL,"description" text,"user_id" bigint,"is_public" boolean DEFAULT true,"cover_image_url" text,CONSTRAINT "fk_users_collections" FOREIGN KEY ("user_id") REFERENCES "users"("id"));
CREATE TABLE "collection_works" ("collection_id" bigint,"work_id" bigint,PRIMARY KEY ("collection_id","work_id"),CONSTRAINT "fk_collection_works_collection" FOREIGN KEY ("collection_id") REFERENCES "collections"("id"),CONSTRAINT "fk_collection_works_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"));
CREATE TABLE "contributions" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"name" text NOT NULL,"status" text DEFAULT 'draft',"user_id" bigint,"work_id" bigint,"translation_id" bigint,"reviewer_id" bigint,"reviewed_at" timestamptz,"feedback" text,CONSTRAINT "fk_contributions_reviewer" FOREIGN KEY ("reviewer_id") REFERENCES "users"("id"),CONSTRAINT "fk_users_contributions" FOREIGN KEY ("user_id") REFERENCES "users"("id"),CONSTRAINT "fk_contributions_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"),CONSTRAINT "fk_contributions_translation" FOREIGN KEY ("translation_id") REFERENCES "translations"("id"));
CREATE TABLE "languages" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"code" text NOT NULL,"name" text NOT NULL,"script" text,"direction" text);
CREATE TABLE "series" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"name" text NOT NULL,"description" text);
CREATE TABLE "work_series" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"work_id" bigint,"series_id" bigint,"number_in_series" integer DEFAULT 0,CONSTRAINT "fk_work_series_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"),CONSTRAINT "fk_work_series_series" FOREIGN KEY ("series_id") REFERENCES "series"("id"));
CREATE TABLE "translation_fields" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"translation_id" bigint,"field_name" text NOT NULL,"field_value" text NOT NULL,"language" text NOT NULL,CONSTRAINT "fk_translation_fields_translation" FOREIGN KEY ("translation_id") REFERENCES "translations"("id"));
CREATE TABLE "copyrights" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"identificator" text NOT NULL,"name" text NOT NULL,"description" text,"license" text,"start_date" timestamptz,"end_date" timestamptz);
CREATE TABLE "copyright_translations" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"copyright_id" bigint,"language_code" text NOT NULL,"message" text NOT NULL,"description" text,CONSTRAINT "fk_copyrights_translations" FOREIGN KEY ("copyright_id") REFERENCES "copyrights"("id"));
CREATE TABLE "copyright_claims" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"details" text NOT NULL,"status" text DEFAULT 'pending',"claim_date" timestamptz NOT NULL,"resolution" text,"resolved_at" timestamptz,"user_id" bigint,CONSTRAINT "fk_copyright_claims_user" FOREIGN KEY ("user_id") REFERENCES "users"("id"));
CREATE TABLE "monetizations" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"amount" decimal(10,2) DEFAULT 0,"currency" text DEFAULT 'USD',"type" text,"status" text DEFAULT 'active',"start_date" timestamptz,"end_date" timestamptz,"language" text NOT NULL);
CREATE TABLE "licenses" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"spdx_identifier" text,"name" text NOT NULL,"url" text,"description" text);
CREATE TABLE "moderation_flags" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"target_type" text NOT NULL,"target_id" bigint NOT NULL,"reason" text,"status" text DEFAULT 'open',"reviewer_id" bigint,"notes" text,CONSTRAINT "fk_moderation_flags_reviewer" FOREIGN KEY ("reviewer_id") REFERENCES "users"("id"));
CREATE TABLE "audit_logs" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"actor_id" bigint,"action" text NOT NULL,"entity_type" text NOT NULL,"entity_id" bigint NOT NULL,"before" jsonb DEFAULT '{}',"after" jsonb DEFAULT '{}',"at" timestamptz,CONSTRAINT "fk_audit_logs_actor" FOREIGN KEY ("actor_id") REFERENCES "users"("id"));
CREATE TABLE "work_stats" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"views" bigint DEFAULT 0,"likes" bigint DEFAULT 0,"comments" bigint DEFAULT 0,"bookmarks" bigint DEFAULT 0,"shares" bigint DEFAULT 0,"translation_count" bigint DEFAULT 0,"reading_time" integer DEFAULT 0,"complexity" decimal(5,2) DEFAULT 0,"sentiment" decimal(5,2) DEFAULT 0,"work_id" bigint,CONSTRAINT "fk_work_stats_work" FOREIGN KEY ("work_id") REFERENCES "works"("id") ON DELETE CASCADE ON UPDATE CASCADE);
CREATE TABLE "translation_stats" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"views" bigint DEFAULT 0,"likes" bigint DEFAULT 0,"comments" bigint DEFAULT 0,"shares" bigint DEFAULT 0,"reading_time" integer DEFAULT 0,"sentiment" decimal(5,2) DEFAULT 0,"translation_id" bigint,CONSTRAINT "fk_translation_stats_translation" FOREIGN KEY ("translation_id") REFERENCES "translations"("id") ON DELETE CASCADE ON UPDATE CASCADE);
CREATE TABLE "user_engagements" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"user_id" bigint,"date" date,"works_read" integer DEFAULT 0,"comments_made" integer DEFAULT 0,"likes_given" integer DEFAULT 0,"bookmarks_made" integer DEFAULT 0,"translations_made" integer DEFAULT 0,CONSTRAINT "fk_user_engagements_user" FOREIGN KEY ("user_id") REFERENCES "users"("id"));
CREATE TABLE "trendings" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"entity_type" text NOT NULL,"entity_id" bigint NOT NULL,"rank" integer NOT NULL,"score" decimal(10,2) DEFAULT 0,"time_period" text NOT NULL,"date" date);
CREATE TABLE "book_stats" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"sales" bigint DEFAULT 0,"views" bigint DEFAULT 0,"likes" bigint DEFAULT 0,"book_id" bigint,CONSTRAINT "fk_book_stats_book" FOREIGN KEY ("book_id") REFERENCES "books"("id"));
CREATE TABLE "collection_stats" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"items" bigint DEFAULT 0,"views" bigint DEFAULT 0,"likes" bigint DEFAULT 0,"collection_id" bigint,CONSTRAINT "fk_collection_stats_collection" FOREIGN KEY ("collection_id") REFERENCES "collections"("id"));
CREATE TABLE "media_stats" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"views" bigint DEFAULT 0,"downloads" bigint DEFAULT 0,"shares" bigint DEFAULT 0,"media_id" bigint);
CREATE TABLE "author_countries" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"author_id" bigint,"country_id" bigint,CONSTRAINT "fk_author_countries_author" FOREIGN KEY ("author_id") REFERENCES "authors"("id"),CONSTRAINT "fk_author_countries_country" FOREIGN KEY ("country_id") REFERENCES "countries"("id"));
CREATE TABLE "readability_scores" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"score" decimal(5,2),"language" text NOT NULL,"method" text,"work_id" bigint,CONSTRAINT "fk_readability_scores_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"));
CREATE TABLE "writing_styles" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"name" text NOT NULL,"description" text,"language" text NOT NULL,"work_id" bigint,CONSTRAINT "fk_writing_styles_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"));
CREATE TABLE "linguistic_layers" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"name" text NOT NULL,"description" text,"language" text NOT NULL,"type" text,"work_id" bigint,"data" jsonb DEFAULT '{}',CONSTRAINT "fk_linguistic_layers_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"));
CREATE TABLE "text_metadata" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"analysis" text,"language" text NOT NULL,"word_count" integer DEFAULT 0,"sentence_count" integer DEFAULT 0,"paragraph_count" integer DEFAULT 0,"average_word_length" decimal(5,2),"average_sentence_length" decimal(5,2),"work_id" bigint,CONSTRAINT "fk_text_metadata_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"));
CREATE TABLE "poetic_analyses" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"structure" text,"language" text NOT NULL,"rhyme_scheme" text,"meter_type" text,"stanza_count" integer DEFAULT 0,"line_count" integer DEFAULT 0,"work_id" bigint,CONSTRAINT "fk_poetic_analyses_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"));
CREATE TABLE "concepts" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"name" text NOT NULL,"description" text);
CREATE TABLE "words" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"text" text NOT NULL,"language" text NOT NULL,"part_of_speech" text,"lemma" text,"concept_id" bigint,CONSTRAINT "fk_concepts_words" FOREIGN KEY ("concept_id") REFERENCES "concepts"("id"));
CREATE TABLE "work_words" ("word_id" bigint,"work_id" bigint,PRIMARY KEY ("word_id","work_id"),CONSTRAINT "fk_work_words_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"),CONSTRAINT "fk_work_words_word" FOREIGN KEY ("word_id") REFERENCES "words"("id"));
CREATE TABLE "word_occurrences" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"text_block_id" bigint,"word_id" bigint,"start_offset" integer DEFAULT 0,"end_offset" integer DEFAULT 0,"lemma" text,"part_of_speech" text,CONSTRAINT "fk_word_occurrences_text_block" FOREIGN KEY ("text_block_id") REFERENCES "text_blocks"("id"),CONSTRAINT "fk_word_occurrences_word" FOREIGN KEY ("word_id") REFERENCES "words"("id"));
CREATE TABLE "work_concepts" ("concept_id" bigint,"work_id" bigint,PRIMARY KEY ("concept_id","work_id"),CONSTRAINT "fk_work_concepts_concept" FOREIGN KEY ("concept_id") REFERENCES "concepts"("id"),CONSTRAINT "fk_work_concepts_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"));
CREATE TABLE "language_entities" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"name" text NOT NULL,"type" text,"language" text NOT NULL);
CREATE TABLE "work_language_entities" ("language_entity_id" bigint,"work_id" bigint,PRIMARY KEY ("language_entity_id","work_id"),CONSTRAINT "fk_work_language_entities_language_entity" FOREIGN KEY ("language_entity_id") REFERENCES "language_entities"("id"),CONSTRAINT "fk_work_language_entities_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"));
CREATE TABLE "entity_occurrences" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"text_block_id" bigint,"language_entity_id" bigint,"start_offset" integer DEFAULT 0,"end_offset" integer DEFAULT 0,CONSTRAINT "fk_entity_occurrences_text_block" FOREIGN KEY ("text_block_id") REFERENCES "text_blocks"("id"),CONSTRAINT "fk_entity_occurrences_language_entity" FOREIGN KEY ("language_entity_id") REFERENCES "language_entities"("id"));
CREATE TABLE "language_analyses" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"language" text NOT NULL,"analysis" jsonb DEFAULT '{}',"work_id" bigint,CONSTRAINT "fk_language_analyses_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"));
CREATE TABLE "gamifications" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"points" integer DEFAULT 0,"level" integer DEFAULT 1,"badges" jsonb DEFAULT '{}',"streaks" integer DEFAULT 0,"last_active" timestamptz,"user_id" bigint,CONSTRAINT "fk_gamifications_user" FOREIGN KEY ("user_id") REFERENCES "users"("id"));
CREATE TABLE "stats" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"data" jsonb DEFAULT '{}',"period" text,"start_date" timestamptz,"end_date" timestamptz,"user_id" bigint,"work_id" bigint,CONSTRAINT "fk_stats_user" FOREIGN KEY ("user_id") REFERENCES "users"("id"),CONSTRAINT "fk_stats_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"));
CREATE TABLE "search_documents" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"entity_type" text,"entity_id" bigint,"language_code" text,"title" text,"body" text,"keywords" text);
CREATE TABLE "emotions" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"name" text NOT NULL,"description" text,"language" text NOT NULL,"intensity" decimal(5,2) DEFAULT 0,"user_id" bigint,"work_id" bigint,"collection_id" bigint,CONSTRAINT "fk_emotions_user" FOREIGN KEY ("user_id") REFERENCES "users"("id"),CONSTRAINT "fk_emotions_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"),CONSTRAINT "fk_emotions_collection" FOREIGN KEY ("collection_id") REFERENCES "collections"("id"));
CREATE TABLE "moods" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"name" text NOT NULL,"description" text,"language" text NOT NULL);
CREATE TABLE "work_moods" ("mood_id" bigint,"work_id" bigint,PRIMARY KEY ("mood_id","work_id"),CONSTRAINT "fk_work_moods_mood" FOREIGN KEY ("mood_id") REFERENCES "moods"("id"),CONSTRAINT "fk_work_moods_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"));
CREATE TABLE "topic_clusters" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"name" text NOT NULL,"description" text,"keywords" text);
CREATE TABLE "work_topic_clusters" ("topic_cluster_id" bigint,"work_id" bigint,PRIMARY KEY ("topic_cluster_id","work_id"),CONSTRAINT "fk_work_topic_clusters_topic_cluster" FOREIGN KEY ("topic_cluster_id") REFERENCES "topic_clusters"("id"),CONSTRAINT "fk_work_topic_clusters_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"));
CREATE TABLE "edges" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"source_table" text NOT NULL,"source_id" bigint NOT NULL,"target_table" text NOT NULL,"target_id" bigint NOT NULL,"relation" text NOT NULL DEFAULT 'ASSOCIATED_WITH',"language" text DEFAULT 'en',"extra" jsonb DEFAULT '{}');
CREATE TABLE "embeddings" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"external_id" text,"entity_type" text NOT NULL,"entity_id" bigint NOT NULL,"model" text NOT NULL,"dim" integer DEFAULT 0,"work_id" bigint,"translation_id" bigint,CONSTRAINT "fk_embeddings_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"),CONSTRAINT "fk_embeddings_translation" FOREIGN KEY ("translation_id") REFERENCES "translations"("id"));
CREATE TABLE "localizations" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"key" text NOT NULL,"value" text NOT NULL,"language" text NOT NULL);
CREATE TABLE "media" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"url" text NOT NULL,"type" text NOT NULL,"mime_type" text,"size" bigint DEFAULT 0,"title" text,"description" text,"language" text NOT NULL,"author_id" bigint,"translation_id" bigint,"country_id" bigint,"city_id" bigint,CONSTRAINT "fk_media_city" FOREIGN KEY ("city_id") REFERENCES "cities"("id"),CONSTRAINT "fk_media_author" FOREIGN KEY ("author_id") REFERENCES "authors"("id"),CONSTRAINT "fk_media_translation" FOREIGN KEY ("translation_id") REFERENCES "translations"("id"),CONSTRAINT "fk_media_country" FOREIGN KEY ("country_id") REFERENCES "countries"("id"));
CREATE TABLE "notifications" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"message" text NOT NULL,"type" text,"read" boolean DEFAULT false,"language" text NOT NULL,"user_id" bigint,"related_id" bigint,"related_type" text,CONSTRAINT "fk_notifications_user" FOREIGN KEY ("user_id") REFERENCES "users"("id"));
CREATE TABLE "editorial_workflows" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"stage" text NOT NULL,"notes" text,"language" text NOT NULL,"work_id" bigint,"translation_id" bigint,"user_id" bigint,"assigned_to_id" bigint,"due_date" timestamptz,"completed_at" timestamptz,CONSTRAINT "fk_editorial_workflows_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"),CONSTRAINT "fk_editorial_workflows_translation" FOREIGN KEY ("translation_id") REFERENCES "translations"("id"),CONSTRAINT "fk_editorial_workflows_user" FOREIGN KEY ("user_id") REFERENCES "users"("id"),CONSTRAINT "fk_editorial_workflows_assigned_to" FOREIGN KEY ("assigned_to_id") REFERENCES "users"("id"));
CREATE TABLE "admins" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"user_id" bigint,"role" text NOT NULL,"permissions" jsonb DEFAULT '{}',CONSTRAINT "fk_admins_user" FOREIGN KEY ("user_id") REFERENCES "users"("id"));
CREATE TABLE "votes" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"value" integer DEFAULT 0,"user_id" bigint,"work_id" bigint,"translation_id" bigint,"comment_id" bigint,CONSTRAINT "fk_votes_user" FOREIGN KEY ("user_id") REFERENCES "users"("id"),CONSTRAINT "fk_votes_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"),CONSTRAINT "fk_votes_translation" FOREIGN KEY ("translation_id") REFERENCES "translations"("id"),CONSTRAINT "fk_votes_comment" FOREIGN KEY ("comment_id") REFERENCES "comments"("id"));
CREATE TABLE "contributors" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"name" text NOT NULL,"role" text,"user_id" bigint,"work_id" bigint,"translation_id" bigint,CONSTRAINT "fk_contributors_user" FOREIGN KEY ("user_id") REFERENCES "users"("id"),CONSTRAINT "fk_contributors_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"),CONSTRAINT "fk_contributors_translation" FOREIGN KEY ("translation_id") REFERENCES "translations"("id"));
CREATE TABLE "interaction_events" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"user_id" bigint,"target_type" text NOT NULL,"target_id" bigint NOT NULL,"kind" text NOT NULL,"occurred_at" timestamptz,CONSTRAINT "fk_interaction_events_user" FOREIGN KEY ("user_id") REFERENCES "users"("id"));
CREATE TABLE "hybrid_entity_works" ("id" SERIAL PRIMARY KEY,"created_at" timestamptz,"updated_at" timestamptz,"name" text NOT NULL,"type" text,"work_id" bigint,"translation_id" bigint,CONSTRAINT "fk_hybrid_entity_works_work" FOREIGN KEY ("work_id") REFERENCES "works"("id"),CONSTRAINT "fk_hybrid_entity_works_translation" FOREIGN KEY ("translation_id") REFERENCES "translations"("id"));
-- +goose Down
DROP TABLE IF EXISTS "hybrid_entity_works";
DROP TABLE IF EXISTS "interaction_events";
DROP TABLE IF EXISTS "contributors";
DROP TABLE IF EXISTS "votes";
DROP TABLE IF EXISTS "admins";
DROP TABLE IF EXISTS "editorial_workflows";
DROP TABLE IF EXISTS "notifications";
DROP TABLE IF EXISTS "media";
DROP TABLE IF EXISTS "localizations";
DROP TABLE IF EXISTS "embeddings";
DROP TABLE IF EXISTS "edges";
DROP TABLE IF EXISTS "work_topic_clusters";
DROP TABLE IF EXISTS "topic_clusters";
DROP TABLE IF EXISTS "work_moods";
DROP TABLE IF EXISTS "moods";
DROP TABLE IF EXISTS "emotions";
DROP TABLE IF EXISTS "search_documents";
DROP TABLE IF EXISTS "stats";
DROP TABLE IF EXISTS "gamifications";
DROP TABLE IF EXISTS "language_analyses";
DROP TABLE IF EXISTS "entity_occurrences";
DROP TABLE IF EXISTS "work_language_entities";
DROP TABLE IF EXISTS "language_entities";
DROP TABLE IF EXISTS "work_concepts";
DROP TABLE IF EXISTS "word_occurrences";
DROP TABLE IF EXISTS "work_words";
DROP TABLE IF EXISTS "words";
DROP TABLE IF EXISTS "concepts";
DROP TABLE IF EXISTS "poetic_analyses";
DROP TABLE IF EXISTS "text_metadata";
DROP TABLE IF EXISTS "linguistic_layers";
DROP TABLE IF EXISTS "writing_styles";
DROP TABLE IF EXISTS "readability_scores";
DROP TABLE IF EXISTS "author_countries";
DROP TABLE IF EXISTS "media_stats";
DROP TABLE IF EXISTS "collection_stats";
DROP TABLE IF EXISTS "book_stats";
DROP TABLE IF EXISTS "trendings";
DROP TABLE IF EXISTS "user_engagements";
DROP TABLE IF EXISTS "translation_stats";
DROP TABLE IF EXISTS "work_stats";
DROP TABLE IF EXISTS "audit_logs";
DROP TABLE IF EXISTS "moderation_flags";
DROP TABLE IF EXISTS "licenses";
DROP TABLE IF EXISTS "monetizations";
DROP TABLE IF EXISTS "copyright_claims";
DROP TABLE IF EXISTS "copyright_translations";
DROP TABLE IF EXISTS "copyrights";
DROP TABLE IF EXISTS "translation_fields";
DROP TABLE IF EXISTS "work_series";
DROP TABLE IF EXISTS "series";
DROP TABLE IF EXISTS "languages";
DROP TABLE IF EXISTS "contributions";
DROP TABLE IF EXISTS "collection_works";
DROP TABLE IF EXISTS "collections";
DROP TABLE IF EXISTS "bookmarks";
DROP TABLE IF EXISTS "likes";
DROP TABLE IF EXISTS "comments";
DROP TABLE IF EXISTS "text_blocks";
DROP TABLE IF EXISTS "translations";
DROP TABLE IF EXISTS "editions";
DROP TABLE IF EXISTS "work_sources";
DROP TABLE IF EXISTS "source_copyrights";
DROP TABLE IF EXISTS "source_monetizations";
DROP TABLE IF EXISTS "sources";
DROP TABLE IF EXISTS "publisher_copyrights";
DROP TABLE IF EXISTS "publisher_monetizations";
DROP TABLE IF EXISTS "book_copyrights";
DROP TABLE IF EXISTS "book_monetizations";
DROP TABLE IF EXISTS "book_works";
DROP TABLE IF EXISTS "author_copyrights";
DROP TABLE IF EXISTS "author_monetizations";
DROP TABLE IF EXISTS "book_authors";
DROP TABLE IF EXISTS "work_authors";
DROP TABLE IF EXISTS "authors";
DROP TABLE IF EXISTS "places";
DROP TABLE IF EXISTS "work_tags";
DROP TABLE IF EXISTS "tags";
DROP TABLE IF EXISTS "work_categories";
DROP TABLE IF EXISTS "categories";
DROP TABLE IF EXISTS "work_copyrights";
DROP TABLE IF EXISTS "works";
DROP TABLE IF EXISTS "email_verifications";
DROP TABLE IF EXISTS "password_resets";
DROP TABLE IF EXISTS "user_sessions";
DROP TABLE IF EXISTS "users";
DROP TABLE IF EXISTS "addresses";
DROP TABLE IF EXISTS "cities";
DROP TABLE IF EXISTS "countries";
DROP TABLE IF EXISTS "sqlite_sequence";

View File

@ -790,17 +790,6 @@ type Trending struct {
Date time.Time `gorm:"type:date;index:idx_trending_entity_period_date,uniqueIndex:uniq_trending_rank"` Date time.Time `gorm:"type:date;index:idx_trending_entity_period_date,uniqueIndex:uniq_trending_rank"`
} }
type UserStats struct {
BaseModel
Activity int64 `gorm:"default:0"`
Works int64 `gorm:"default:0"`
Translations int64 `gorm:"default:0"`
Comments int64 `gorm:"default:0"`
Likes int64 `gorm:"default:0"`
Bookmarks int64 `gorm:"default:0"`
UserID uint `gorm:"uniqueIndex;index"`
User *User `gorm:"foreignKey:UserID"`
}
type BookStats struct { type BookStats struct {
BaseModel BaseModel
Sales int64 `gorm:"default:0"` Sales int64 `gorm:"default:0"`

View File

@ -181,3 +181,9 @@ func shouldSkipAuth(path string) bool {
return false return false
} }
// ContextWithUserID adds a user ID to the context for testing purposes.
func ContextWithUserID(ctx context.Context, userID uint) context.Context {
claims := &Claims{UserID: userID}
return context.WithValue(ctx, ClaimsContextKey, claims)
}

View File

@ -0,0 +1,101 @@
package testutil
import (
"context"
"tercul/internal/domain"
"time"
"github.com/stretchr/testify/mock"
)
// MockAnalyticsService is a mock implementation of the analytics.Service interface.
type MockAnalyticsService struct {
mock.Mock
}
func (m *MockAnalyticsService) GetTrendingWorks(ctx context.Context, timePeriod string, limit int) ([]*domain.Work, error) {
args := m.Called(ctx, timePeriod, limit)
if args.Get(0) == nil {
return nil, args.Error(1)
}
return args.Get(0).([]*domain.Work), args.Error(1)
}
func (m *MockAnalyticsService) UpdateTrending(ctx context.Context) error {
args := m.Called(ctx)
return args.Error(0)
}
func (m *MockAnalyticsService) IncrementWorkLikes(ctx context.Context, workID uint) {
m.Called(ctx, workID)
}
func (m *MockAnalyticsService) IncrementTranslationLikes(ctx context.Context, translationID uint) {
m.Called(ctx, translationID)
}
func (m *MockAnalyticsService) IncrementWorkComments(ctx context.Context, workID uint) {
m.Called(ctx, workID)
}
func (m *MockAnalyticsService) IncrementTranslationComments(ctx context.Context, translationID uint) {
m.Called(ctx, translationID)
}
func (m *MockAnalyticsService) IncrementWorkBookmarks(ctx context.Context, workID uint) {
m.Called(ctx, workID)
}
func (m *MockAnalyticsService) GetOrCreateWorkStats(ctx context.Context, workID uint) (*domain.WorkStats, error) {
args := m.Called(ctx, workID)
if args.Get(0) == nil {
return nil, args.Error(1)
}
return args.Get(0).(*domain.WorkStats), args.Error(1)
}
func (m *MockAnalyticsService) GetOrCreateTranslationStats(ctx context.Context, translationID uint) (*domain.TranslationStats, error) {
args := m.Called(ctx, translationID)
if args.Get(0) == nil {
return nil, args.Error(1)
}
return args.Get(0).(*domain.TranslationStats), args.Error(1)
}
func (m *MockAnalyticsService) GetOrCreateUserEngagement(ctx context.Context, userID uint, date time.Time) (*domain.UserEngagement, error) {
args := m.Called(ctx, userID, date)
if args.Get(0) == nil {
return nil, args.Error(1)
}
return args.Get(0).(*domain.UserEngagement), args.Error(1)
}
func (m *MockAnalyticsService) UpdateUserEngagement(ctx context.Context, engagement *domain.UserEngagement) error {
args := m.Called(ctx, engagement)
return args.Error(0)
}
func (m *MockAnalyticsService) IncrementWorkCounter(ctx context.Context, workID uint, counter string, value int) error {
args := m.Called(ctx, workID, counter, value)
return args.Error(0)
}
func (m *MockAnalyticsService) IncrementTranslationCounter(ctx context.Context, translationID uint, counter string, value int) error {
args := m.Called(ctx, translationID, counter, value)
return args.Error(0)
}
func (m *MockAnalyticsService) UpdateWorkStats(ctx context.Context, workID uint, stats domain.WorkStats) error {
args := m.Called(ctx, workID, stats)
return args.Error(0)
}
func (m *MockAnalyticsService) UpdateTranslationStats(ctx context.Context, translationID uint, stats domain.TranslationStats) error {
args := m.Called(ctx, translationID, stats)
return args.Error(0)
}
func (m *MockAnalyticsService) UpdateTrendingWorks(ctx context.Context, timePeriod string, trendingWorks []*domain.Trending) error {
args := m.Called(ctx, timePeriod, trendingWorks)
return args.Error(0)
}

View File

@ -0,0 +1,40 @@
package testutil
import (
"tercul/internal/domain"
"tercul/internal/platform/auth"
"time"
"github.com/golang-jwt/jwt/v5"
)
// MockJWTManager is a mock implementation of the JWTManagement interface.
type MockJWTManager struct{}
// NewMockJWTManager creates a new MockJWTManager.
func NewMockJWTManager() auth.JWTManagement {
return &MockJWTManager{}
}
// GenerateToken generates a dummy token for a user.
func (m *MockJWTManager) GenerateToken(user *domain.User) (string, error) {
return "dummy-token-for-" + user.Username, nil
}
// ValidateToken validates a dummy token.
func (m *MockJWTManager) ValidateToken(tokenString string) (*auth.Claims, error) {
if tokenString != "" {
// A real implementation would parse the user from the token.
// For this mock, we'll just return a generic user.
return &auth.Claims{
UserID: 1,
Username: "testuser",
Email: "test@test.com",
Role: "reader",
RegisteredClaims: jwt.RegisteredClaims{
ExpiresAt: jwt.NewNumericDate(time.Now().Add(24 * time.Hour)),
},
}, nil
}
return nil, auth.ErrInvalidToken
}

View File

@ -0,0 +1,152 @@
package testutil
import (
"context"
"tercul/internal/domain"
"github.com/stretchr/testify/mock"
"gorm.io/gorm"
)
// MockLikeRepository is a mock implementation of the LikeRepository interface.
type MockLikeRepository struct {
mock.Mock
Likes []*domain.Like // Keep for other potential tests, but new mocks will use testify
}
// NewMockLikeRepository creates a new MockLikeRepository.
func NewMockLikeRepository() *MockLikeRepository {
return &MockLikeRepository{Likes: []*domain.Like{}}
}
// Create uses the mock's Called method.
func (m *MockLikeRepository) Create(ctx context.Context, like *domain.Like) error {
args := m.Called(ctx, like)
return args.Error(0)
}
// GetByID retrieves a like by its ID from the mock repository.
func (m *MockLikeRepository) GetByID(ctx context.Context, id uint) (*domain.Like, error) {
args := m.Called(ctx, id)
if args.Get(0) == nil {
return nil, args.Error(1)
}
return args.Get(0).(*domain.Like), args.Error(1)
}
// ListByUserID retrieves likes by their user ID from the mock repository.
func (m *MockLikeRepository) ListByUserID(ctx context.Context, userID uint) ([]domain.Like, error) {
var likes []domain.Like
for _, l := range m.Likes {
if l.UserID == userID {
likes = append(likes, *l)
}
}
return likes, nil
}
// ListByWorkID retrieves likes by their work ID from the mock repository.
func (m *MockLikeRepository) ListByWorkID(ctx context.Context, workID uint) ([]domain.Like, error) {
var likes []domain.Like
for _, l := range m.Likes {
if l.WorkID != nil && *l.WorkID == workID {
likes = append(likes, *l)
}
}
return likes, nil
}
// ListByTranslationID retrieves likes by their translation ID from the mock repository.
func (m *MockLikeRepository) ListByTranslationID(ctx context.Context, translationID uint) ([]domain.Like, error) {
var likes []domain.Like
for _, l := range m.Likes {
if l.TranslationID != nil && *l.TranslationID == translationID {
likes = append(likes, *l)
}
}
return likes, nil
}
// ListByCommentID retrieves likes by their comment ID from the mock repository.
func (m *MockLikeRepository) ListByCommentID(ctx context.Context, commentID uint) ([]domain.Like, error) {
var likes []domain.Like
for _, l := range m.Likes {
if l.CommentID != nil && *l.CommentID == commentID {
likes = append(likes, *l)
}
}
return likes, nil
}
// The rest of the BaseRepository methods can be stubbed out or implemented as needed.
func (m *MockLikeRepository) CreateInTx(ctx context.Context, tx *gorm.DB, entity *domain.Like) error {
return m.Create(ctx, entity)
}
func (m *MockLikeRepository) GetByIDWithOptions(ctx context.Context, id uint, options *domain.QueryOptions) (*domain.Like, error) {
return m.GetByID(ctx, id)
}
func (m *MockLikeRepository) Update(ctx context.Context, entity *domain.Like) error {
args := m.Called(ctx, entity)
return args.Error(0)
}
func (m *MockLikeRepository) UpdateInTx(ctx context.Context, tx *gorm.DB, entity *domain.Like) error {
return m.Update(ctx, entity)
}
func (m *MockLikeRepository) Delete(ctx context.Context, id uint) error {
args := m.Called(ctx, id)
return args.Error(0)
}
func (m *MockLikeRepository) DeleteInTx(ctx context.Context, tx *gorm.DB, id uint) error {
return m.Delete(ctx, id)
}
func (m *MockLikeRepository) List(ctx context.Context, page, pageSize int) (*domain.PaginatedResult[domain.Like], error) {
panic("not implemented")
}
func (m *MockLikeRepository) ListWithOptions(ctx context.Context, options *domain.QueryOptions) ([]domain.Like, error) {
panic("not implemented")
}
func (m *MockLikeRepository) ListAll(ctx context.Context) ([]domain.Like, error) {
var likes []domain.Like
for _, l := range m.Likes {
likes = append(likes, *l)
}
return likes, nil
}
func (m *MockLikeRepository) Count(ctx context.Context) (int64, error) {
return int64(len(m.Likes)), nil
}
func (m *MockLikeRepository) CountWithOptions(ctx context.Context, options *domain.QueryOptions) (int64, error) {
panic("not implemented")
}
func (m *MockLikeRepository) FindWithPreload(ctx context.Context, preloads []string, id uint) (*domain.Like, error) {
return m.GetByID(ctx, id)
}
func (m *MockLikeRepository) GetAllForSync(ctx context.Context, batchSize, offset int) ([]domain.Like, error) {
panic("not implemented")
}
func (m *MockLikeRepository) Exists(ctx context.Context, id uint) (bool, error) {
args := m.Called(ctx, id)
return args.Bool(0), args.Error(1)
}
func (m *MockLikeRepository) BeginTx(ctx context.Context) (*gorm.DB, error) {
return nil, nil
}
func (m *MockLikeRepository) WithTx(ctx context.Context, fn func(tx *gorm.DB) error) error {
return fn(nil)
}

View File

@ -0,0 +1,27 @@
package testutil
import (
"context"
"tercul/internal/app/like"
"tercul/internal/domain"
"github.com/stretchr/testify/mock"
)
// MockLikeService is a mock implementation of the like.Commands interface.
type MockLikeService struct {
mock.Mock
}
func (m *MockLikeService) CreateLike(ctx context.Context, input like.CreateLikeInput) (*domain.Like, error) {
args := m.Called(ctx, input)
if args.Get(0) == nil {
return nil, args.Error(1)
}
return args.Get(0).(*domain.Like), args.Error(1)
}
func (m *MockLikeService) DeleteLike(ctx context.Context, likeID uint) error {
args := m.Called(ctx, likeID)
return args.Error(0)
}

View File

@ -0,0 +1,134 @@
package testutil
import (
"context"
"strings"
"tercul/internal/domain"
"gorm.io/gorm"
)
// MockUserRepository is a mock implementation of the UserRepository interface.
type MockUserRepository struct {
Users []*domain.User
}
// NewMockUserRepository creates a new MockUserRepository.
func NewMockUserRepository() *MockUserRepository {
return &MockUserRepository{Users: []*domain.User{}}
}
// Create adds a new user to the mock repository.
func (m *MockUserRepository) Create(ctx context.Context, user *domain.User) error {
user.ID = uint(len(m.Users) + 1)
m.Users = append(m.Users, user)
return nil
}
// GetByID retrieves a user by their ID from the mock repository.
func (m *MockUserRepository) GetByID(ctx context.Context, id uint) (*domain.User, error) {
for _, u := range m.Users {
if u.ID == id {
return u, nil
}
}
return nil, gorm.ErrRecordNotFound
}
// FindByUsername retrieves a user by their username from the mock repository.
func (m *MockUserRepository) FindByUsername(ctx context.Context, username string) (*domain.User, error) {
for _, u := range m.Users {
if strings.EqualFold(u.Username, username) {
return u, nil
}
}
return nil, gorm.ErrRecordNotFound
}
// FindByEmail retrieves a user by their email from the mock repository.
func (m *MockUserRepository) FindByEmail(ctx context.Context, email string) (*domain.User, error) {
for _, u := range m.Users {
if strings.EqualFold(u.Email, email) {
return u, nil
}
}
return nil, gorm.ErrRecordNotFound
}
// ListByRole retrieves users by their role from the mock repository.
func (m *MockUserRepository) ListByRole(ctx context.Context, role domain.UserRole) ([]domain.User, error) {
var users []domain.User
for _, u := range m.Users {
if u.Role == role {
users = append(users, *u)
}
}
return users, nil
}
// The rest of the BaseRepository methods can be stubbed out or implemented as needed.
func (m *MockUserRepository) CreateInTx(ctx context.Context, tx *gorm.DB, entity *domain.User) error {
return m.Create(ctx, entity)
}
func (m *MockUserRepository) GetByIDWithOptions(ctx context.Context, id uint, options *domain.QueryOptions) (*domain.User, error) {
return m.GetByID(ctx, id)
}
func (m *MockUserRepository) Update(ctx context.Context, entity *domain.User) error {
for i, u := range m.Users {
if u.ID == entity.ID {
m.Users[i] = entity
return nil
}
}
return gorm.ErrRecordNotFound
}
func (m *MockUserRepository) UpdateInTx(ctx context.Context, tx *gorm.DB, entity *domain.User) error {
return m.Update(ctx, entity)
}
func (m *MockUserRepository) Delete(ctx context.Context, id uint) error {
for i, u := range m.Users {
if u.ID == id {
m.Users = append(m.Users[:i], m.Users[i+1:]...)
return nil
}
}
return gorm.ErrRecordNotFound
}
func (m *MockUserRepository) DeleteInTx(ctx context.Context, tx *gorm.DB, id uint) error {
return m.Delete(ctx, id)
}
func (m *MockUserRepository) List(ctx context.Context, page, pageSize int) (*domain.PaginatedResult[domain.User], error) {
panic("not implemented")
}
func (m *MockUserRepository) ListWithOptions(ctx context.Context, options *domain.QueryOptions) ([]domain.User, error) {
panic("not implemented")
}
func (m *MockUserRepository) ListAll(ctx context.Context) ([]domain.User, error) {
var users []domain.User
for _, u := range m.Users {
users = append(users, *u)
}
return users, nil
}
func (m *MockUserRepository) Count(ctx context.Context) (int64, error) {
return int64(len(m.Users)), nil
}
func (m *MockUserRepository) CountWithOptions(ctx context.Context, options *domain.QueryOptions) (int64, error) {
panic("not implemented")
}
func (m *MockUserRepository) FindWithPreload(ctx context.Context, preloads []string, id uint) (*domain.User, error) {
return m.GetByID(ctx, id)
}
func (m *MockUserRepository) GetAllForSync(ctx context.Context, batchSize, offset int) ([]domain.User, error) {
panic("not implemented")
}
func (m *MockUserRepository) Exists(ctx context.Context, id uint) (bool, error) {
_, err := m.GetByID(ctx, id)
return err == nil, nil
}
func (m *MockUserRepository) BeginTx(ctx context.Context) (*gorm.DB, error) {
return nil, nil
}
func (m *MockUserRepository) WithTx(ctx context.Context, fn func(tx *gorm.DB) error) error {
return fn(nil)
}

View File

@ -2,254 +2,123 @@ package testutil
import ( import (
"context" "context"
"gorm.io/gorm"
"tercul/internal/domain" "tercul/internal/domain"
"github.com/stretchr/testify/mock"
"gorm.io/gorm"
) )
// UnifiedMockWorkRepository is a shared mock for WorkRepository tests // MockWorkRepository is a mock implementation of the WorkRepository interface.
// Implements all required methods and uses an in-memory slice type MockWorkRepository struct {
mock.Mock
type UnifiedMockWorkRepository struct {
Works []*domain.Work Works []*domain.Work
} }
func NewUnifiedMockWorkRepository() *UnifiedMockWorkRepository { // NewMockWorkRepository creates a new MockWorkRepository.
return &UnifiedMockWorkRepository{Works: []*domain.Work{}} func NewMockWorkRepository() *MockWorkRepository {
return &MockWorkRepository{Works: []*domain.Work{}}
} }
func (m *UnifiedMockWorkRepository) AddWork(work *domain.Work) { // Create adds a new work to the mock repository.
func (m *MockWorkRepository) Create(ctx context.Context, work *domain.Work) error {
work.ID = uint(len(m.Works) + 1) work.ID = uint(len(m.Works) + 1)
m.Works = append(m.Works, work) m.Works = append(m.Works, work)
}
// BaseRepository methods with context support
func (m *UnifiedMockWorkRepository) Create(ctx context.Context, entity *domain.Work) error {
m.AddWork(entity)
return nil return nil
} }
func (m *UnifiedMockWorkRepository) GetByID(ctx context.Context, id uint) (*domain.Work, error) { // GetByID retrieves a work by its ID from the mock repository.
func (m *MockWorkRepository) GetByID(ctx context.Context, id uint) (*domain.Work, error) {
for _, w := range m.Works { for _, w := range m.Works {
if w.ID == id { if w.ID == id {
return w, nil return w, nil
} }
} }
return nil, ErrEntityNotFound return nil, gorm.ErrRecordNotFound
} }
func (m *UnifiedMockWorkRepository) Update(ctx context.Context, entity *domain.Work) error { // Exists uses the mock's Called method.
func (m *MockWorkRepository) Exists(ctx context.Context, id uint) (bool, error) {
args := m.Called(ctx, id)
return args.Bool(0), args.Error(1)
}
// The rest of the WorkRepository and BaseRepository methods can be stubbed out.
func (m *MockWorkRepository) FindByTitle(ctx context.Context, title string) ([]domain.Work, error) {
panic("not implemented")
}
func (m *MockWorkRepository) FindByAuthor(ctx context.Context, authorID uint) ([]domain.Work, error) {
panic("not implemented")
}
func (m *MockWorkRepository) FindByCategory(ctx context.Context, categoryID uint) ([]domain.Work, error) {
panic("not implemented")
}
func (m *MockWorkRepository) FindByLanguage(ctx context.Context, language string, page, pageSize int) (*domain.PaginatedResult[domain.Work], error) {
panic("not implemented")
}
func (m *MockWorkRepository) GetWithTranslations(ctx context.Context, id uint) (*domain.Work, error) {
return m.GetByID(ctx, id)
}
func (m *MockWorkRepository) ListWithTranslations(ctx context.Context, page, pageSize int) (*domain.PaginatedResult[domain.Work], error) {
panic("not implemented")
}
func (m *MockWorkRepository) CreateInTx(ctx context.Context, tx *gorm.DB, entity *domain.Work) error {
return m.Create(ctx, entity)
}
func (m *MockWorkRepository) GetByIDWithOptions(ctx context.Context, id uint, options *domain.QueryOptions) (*domain.Work, error) {
return m.GetByID(ctx, id)
}
func (m *MockWorkRepository) Update(ctx context.Context, entity *domain.Work) error {
for i, w := range m.Works { for i, w := range m.Works {
if w.ID == entity.ID { if w.ID == entity.ID {
m.Works[i] = entity m.Works[i] = entity
return nil return nil
} }
} }
return ErrEntityNotFound return gorm.ErrRecordNotFound
} }
func (m *MockWorkRepository) UpdateInTx(ctx context.Context, tx *gorm.DB, entity *domain.Work) error {
func (m *UnifiedMockWorkRepository) Delete(ctx context.Context, id uint) error { return m.Update(ctx, entity)
}
func (m *MockWorkRepository) Delete(ctx context.Context, id uint) error {
for i, w := range m.Works { for i, w := range m.Works {
if w.ID == id { if w.ID == id {
m.Works = append(m.Works[:i], m.Works[i+1:]...) m.Works = append(m.Works[:i], m.Works[i+1:]...)
return nil return nil
} }
} }
return ErrEntityNotFound return gorm.ErrRecordNotFound
} }
func (m *MockWorkRepository) DeleteInTx(ctx context.Context, tx *gorm.DB, id uint) error {
func (m *UnifiedMockWorkRepository) List(ctx context.Context, page, pageSize int) (*domain.PaginatedResult[domain.Work], error) {
var all []domain.Work
for _, w := range m.Works {
if w != nil {
all = append(all, *w)
}
}
total := int64(len(all))
start := (page - 1) * pageSize
end := start + pageSize
if start > len(all) {
return &domain.PaginatedResult[domain.Work]{Items: []domain.Work{}, TotalCount: total}, nil
}
if end > len(all) {
end = len(all)
}
return &domain.PaginatedResult[domain.Work]{Items: all[start:end], TotalCount: total}, nil
}
func (m *UnifiedMockWorkRepository) ListAll(ctx context.Context) ([]domain.Work, error) {
var all []domain.Work
for _, w := range m.Works {
if w != nil {
all = append(all, *w)
}
}
return all, nil
}
func (m *UnifiedMockWorkRepository) Count(ctx context.Context) (int64, error) {
return int64(len(m.Works)), nil
}
func (m *UnifiedMockWorkRepository) FindWithPreload(ctx context.Context, preloads []string, id uint) (*domain.Work, error) {
for _, w := range m.Works {
if w.ID == id {
return w, nil
}
}
return nil, ErrEntityNotFound
}
func (m *UnifiedMockWorkRepository) GetAllForSync(ctx context.Context, batchSize, offset int) ([]domain.Work, error) {
var result []domain.Work
end := offset + batchSize
if end > len(m.Works) {
end = len(m.Works)
}
for i := offset; i < end; i++ {
if m.Works[i] != nil {
result = append(result, *m.Works[i])
}
}
return result, nil
}
// New BaseRepository methods
func (m *UnifiedMockWorkRepository) CreateInTx(ctx context.Context, tx *gorm.DB, entity *domain.Work) error {
return m.Create(ctx, entity)
}
func (m *UnifiedMockWorkRepository) GetByIDWithOptions(ctx context.Context, id uint, options *domain.QueryOptions) (*domain.Work, error) {
return m.GetByID(ctx, id)
}
func (m *UnifiedMockWorkRepository) UpdateInTx(ctx context.Context, tx *gorm.DB, entity *domain.Work) error {
return m.Update(ctx, entity)
}
func (m *UnifiedMockWorkRepository) DeleteInTx(ctx context.Context, tx *gorm.DB, id uint) error {
return m.Delete(ctx, id) return m.Delete(ctx, id)
} }
func (m *MockWorkRepository) List(ctx context.Context, page, pageSize int) (*domain.PaginatedResult[domain.Work], error) {
func (m *UnifiedMockWorkRepository) ListWithOptions(ctx context.Context, options *domain.QueryOptions) ([]domain.Work, error) { panic("not implemented")
result, err := m.List(ctx, 1, 1000) }
if err != nil { func (m *MockWorkRepository) ListWithOptions(ctx context.Context, options *domain.QueryOptions) ([]domain.Work, error) {
return nil, err panic("not implemented")
}
func (m *MockWorkRepository) ListAll(ctx context.Context) ([]domain.Work, error) {
var works []domain.Work
for _, w := range m.Works {
works = append(works, *w)
} }
return result.Items, nil return works, nil
} }
func (m *MockWorkRepository) Count(ctx context.Context) (int64, error) {
func (m *UnifiedMockWorkRepository) CountWithOptions(ctx context.Context, options *domain.QueryOptions) (int64, error) { return int64(len(m.Works)), nil
return m.Count(ctx)
} }
func (m *MockWorkRepository) CountWithOptions(ctx context.Context, options *domain.QueryOptions) (int64, error) {
func (m *UnifiedMockWorkRepository) Exists(ctx context.Context, id uint) (bool, error) { panic("not implemented")
_, err := m.GetByID(ctx, id)
return err == nil, nil
} }
func (m *MockWorkRepository) FindWithPreload(ctx context.Context, preloads []string, id uint) (*domain.Work, error) {
func (m *UnifiedMockWorkRepository) BeginTx(ctx context.Context) (*gorm.DB, error) { return m.GetByID(ctx, id)
}
func (m *MockWorkRepository) GetAllForSync(ctx context.Context, batchSize, offset int) ([]domain.Work, error) {
panic("not implemented")
}
func (m *MockWorkRepository) BeginTx(ctx context.Context) (*gorm.DB, error) {
return nil, nil return nil, nil
} }
func (m *MockWorkRepository) WithTx(ctx context.Context, fn func(tx *gorm.DB) error) error {
func (m *UnifiedMockWorkRepository) WithTx(ctx context.Context, fn func(tx *gorm.DB) error) error {
return fn(nil) return fn(nil)
} }
// WorkRepository specific methods
func (m *UnifiedMockWorkRepository) FindByTitle(ctx context.Context, title string) ([]domain.Work, error) {
var result []domain.Work
for _, w := range m.Works {
if len(title) == 0 || (len(w.Title) >= len(title) && w.Title[:len(title)] == title) {
result = append(result, *w)
}
}
return result, nil
}
func (m *UnifiedMockWorkRepository) FindByLanguage(ctx context.Context, language string, page, pageSize int) (*domain.PaginatedResult[domain.Work], error) {
var filtered []domain.Work
for _, w := range m.Works {
if w.Language == language {
filtered = append(filtered, *w)
}
}
total := int64(len(filtered))
start := (page - 1) * pageSize
end := start + pageSize
if start > len(filtered) {
return &domain.PaginatedResult[domain.Work]{Items: []domain.Work{}, TotalCount: total}, nil
}
if end > len(filtered) {
end = len(filtered)
}
return &domain.PaginatedResult[domain.Work]{Items: filtered[start:end], TotalCount: total}, nil
}
func (m *UnifiedMockWorkRepository) FindByAuthor(ctx context.Context, authorID uint) ([]domain.Work, error) {
result := make([]domain.Work, len(m.Works))
for i, w := range m.Works {
if w != nil {
result[i] = *w
}
}
return result, nil
}
func (m *UnifiedMockWorkRepository) FindByCategory(ctx context.Context, categoryID uint) ([]domain.Work, error) {
result := make([]domain.Work, len(m.Works))
for i, w := range m.Works {
if w != nil {
result[i] = *w
}
}
return result, nil
}
func (m *UnifiedMockWorkRepository) GetWithTranslations(ctx context.Context, id uint) (*domain.Work, error) {
for _, w := range m.Works {
if w.ID == id {
return w, nil
}
}
return nil, ErrEntityNotFound
}
func (m *UnifiedMockWorkRepository) ListWithTranslations(ctx context.Context, page, pageSize int) (*domain.PaginatedResult[domain.Work], error) {
var all []domain.Work
for _, w := range m.Works {
if w != nil {
all = append(all, *w)
}
}
total := int64(len(all))
start := (page - 1) * pageSize
end := start + pageSize
if start > len(all) {
return &domain.PaginatedResult[domain.Work]{Items: []domain.Work{}, TotalCount: total}, nil
}
if end > len(all) {
end = len(all)
}
return &domain.PaginatedResult[domain.Work]{Items: all[start:end], TotalCount: total}, nil
}
func (m *UnifiedMockWorkRepository) Reset() {
m.Works = []*domain.Work{}
}
// Add helper to get GraphQL-style Work with Name mapped from Title
func (m *UnifiedMockWorkRepository) GetGraphQLWorkByID(id uint) map[string]interface{} {
for _, w := range m.Works {
if w.ID == id {
return map[string]interface{}{
"id": w.ID,
"name": w.Title,
"language": w.Language,
"content": "",
}
}
}
return nil
}
// Add other interface methods as needed for your tests

View File

@ -15,7 +15,7 @@ import (
// SimpleTestSuite provides a minimal test environment with just the essentials // SimpleTestSuite provides a minimal test environment with just the essentials
type SimpleTestSuite struct { type SimpleTestSuite struct {
suite.Suite suite.Suite
WorkRepo *UnifiedMockWorkRepository WorkRepo *MockWorkRepository
WorkService *work.Service WorkService *work.Service
MockSearchClient *MockSearchClient MockSearchClient *MockSearchClient
} }
@ -30,14 +30,14 @@ func (m *MockSearchClient) IndexWork(ctx context.Context, work *domain.Work, pip
// SetupSuite sets up the test suite // SetupSuite sets up the test suite
func (s *SimpleTestSuite) SetupSuite() { func (s *SimpleTestSuite) SetupSuite() {
s.WorkRepo = NewUnifiedMockWorkRepository() s.WorkRepo = NewMockWorkRepository()
s.MockSearchClient = &MockSearchClient{} s.MockSearchClient = &MockSearchClient{}
s.WorkService = work.NewService(s.WorkRepo, s.MockSearchClient) s.WorkService = work.NewService(s.WorkRepo, s.MockSearchClient)
} }
// SetupTest resets test data for each test // SetupTest resets test data for each test
func (s *SimpleTestSuite) SetupTest() { func (s *SimpleTestSuite) SetupTest() {
s.WorkRepo.Reset() s.WorkRepo = NewMockWorkRepository()
} }
// MockLocalizationRepository is a mock implementation of the localization repository. // MockLocalizationRepository is a mock implementation of the localization repository.

View File

@ -84,11 +84,11 @@ Short, sharp audit. Youve got good bones but too many cross-cutting seams: du
# 2) Specific refactors (high ROI) # 2) Specific refactors (high ROI)
1. **Unify GraphQL** 1. **Unify GraphQL** `[COMPLETED]`
* Delete one of: `/graph` or `/graphql`. Keep **gqlgen** in `internal/adapters/graphql`. * Delete one of: `/graph` or `/graphql`. Keep **gqlgen** in `internal/adapters/graphql`. `[COMPLETED]`
* Put `schema.graphqls` there. Configure `gqlgen.yml` to output generated code in the same package. * Put `schema.graphqls` there. Configure `gqlgen.yml` to output generated code in the same package. `[COMPLETED]`
* Resolvers should call `internal/app/*` use-cases (not repos), returning **read models** tailored for GraphQL. * Resolvers should call `internal/app/*` use-cases (not repos), returning **read models** tailored for GraphQL. `[COMPLETED]`
2. **Introduce Unit-of-Work (UoW) + Transaction boundaries** 2. **Introduce Unit-of-Work (UoW) + Transaction boundaries**
@ -114,10 +114,10 @@ Short, sharp audit. Youve got good bones but too many cross-cutting seams: du
* Current `models/*.go` mixes everything. Group by aggregate (`work`, `author`, `user`, …). Co-locate value objects and invariants. Keep **constructors** that validate invariants (no anemic structs). * Current `models/*.go` mixes everything. Group by aggregate (`work`, `author`, `user`, …). Co-locate value objects and invariants. Keep **constructors** that validate invariants (no anemic structs).
6. **Migrations** 6. **Migrations** `[COMPLETED]`
* Move raw SQL to `internal/data/migrations` (or `/migrations` at repo root) and adopt a tool (goose, atlas, migrate). Delete `migrations.go` hand-rollers. * Move raw SQL to `internal/data/migrations` (or `/migrations` at repo root) and adopt a tool (goose, atlas, migrate). Delete `migrations.go` hand-rollers. `[COMPLETED]`
* Version generated `tercul_schema.sql` as **snapshots** in `/ops/migration/outputs/` instead of in runtime code. * Version generated `tercul_schema.sql` as **snapshots** in `/ops/migration/outputs/` instead of in runtime code. `[COMPLETED]`
7. **Observability** 7. **Observability**

163
report.md
View File

@ -1,163 +0,0 @@
# Tercul Go Application Analysis Report
## Current Status
### Overview
The Tercul backend is a Go-based application for literary text analysis and management. It uses a combination of technologies:
1. **PostgreSQL with GORM**: For relational data storage
2. **Weaviate**: For vector search capabilities
3. **GraphQL with gqlgen**: For API layer
4. **Asynq with Redis**: For asynchronous job processing
### Core Components
#### 1. Data Models
The application has a comprehensive set of models organized in separate files in the `models` package, including:
- Core literary content: Work, Translation, Author, Book
- User interaction: Comment, Like, Bookmark, Collection, Contribution
- Analytics: WorkStats, TranslationStats, UserStats
- Linguistic analysis: TextMetadata, PoeticAnalysis, ReadabilityScore, LinguisticLayer
- Location: Country, City, Place, Address
- System: Notification, EditorialWorkflow, Copyright, CopyrightClaim
The models use inheritance patterns with BaseModel and TranslatableModel providing common fields. The models are well-structured with appropriate relationships between entities.
#### 2. Repositories
The application uses the repository pattern for data access:
- `GenericRepository`: Provides a generic implementation of CRUD operations using Go generics
- `WorkRepository`: CRUD operations for Work model
- Various other repositories for specific entity types
The repositories provide a clean abstraction over the database operations.
#### 3. Synchronization Jobs
The application includes a synchronization mechanism between PostgreSQL and Weaviate:
- `SyncJob`: Manages synchronization process
- `SyncAllEntities`: Syncs entities from PostgreSQL to Weaviate
- `SyncAllEdges`: Syncs edges (relationships) between entities
The synchronization process uses Asynq for background job processing, allowing for scalable asynchronous operations.
#### 4. Linguistic Analysis
The application includes a linguistic analysis system:
- `Analyzer` interface: Defines methods for text analysis
- `BasicAnalyzer`: Implements simple text analysis algorithms
- `LinguisticSyncJob`: Manages background jobs for linguistic analysis
The linguistic analysis includes basic text statistics, readability metrics, keyword extraction, and sentiment analysis, though the implementations are simplified.
#### 5. GraphQL API
The GraphQL API is well-defined with a comprehensive schema that includes types, queries, and mutations for all major entities. The schema supports operations like creating and updating works, translations, and authors, as well as social features like comments, likes, and bookmarks.
## Areas for Improvement
### 1. Performance Concerns
1. **Lack of pagination in repositories**: Many repository methods retrieve all records without pagination, which could cause performance issues with large datasets. For example, the `List()` and `GetAllForSync()` methods in repositories return all records without any limit.
2. **Raw SQL queries in entity synchronization**: The `syncEntities` function in `syncjob/entities_sync.go` uses raw SQL queries with string concatenation instead of GORM's structured query methods, which could lead to SQL injection vulnerabilities and is less efficient.
3. **Loading all records at once**: The synchronization process loads all records of each entity type at once, which could cause memory issues with large datasets. There's no batching or pagination for large datasets.
4. **No batching in Weaviate operations**: The Weaviate client doesn't use batching for operations, which could be inefficient for large datasets. Each entity is sent to Weaviate in a separate API call.
5. **Inefficient linguistic analysis algorithms**: The linguistic analysis algorithms in `linguistics/analyzer.go` are very simplified and not optimized for performance. For example, the sentiment analysis algorithm checks each word against a small list of positive and negative words, which is inefficient.
### 2. Security Concerns
1. **Hardcoded database credentials**: The `main.go` file contains hardcoded database credentials, which is a security risk. These should be moved to environment variables or a secure configuration system.
2. **SQL injection risk**: The `syncEntities` function in `syncjob/entities_sync.go` uses raw SQL queries with string concatenation, which could lead to SQL injection vulnerabilities.
3. **No input validation**: There doesn't appear to be comprehensive input validation for GraphQL mutations, which could lead to data integrity issues or security vulnerabilities.
4. **No rate limiting**: There's no rate limiting for API requests or background jobs, which could make the system vulnerable to denial-of-service attacks.
### 3. Code Quality Issues
1. **Incomplete Weaviate integration**: The Weaviate client in `weaviate/weaviate_client.go` only supports the Work model, not other models, which limits the search capabilities.
2. **Simplified linguistic analysis**: The linguistic analysis algorithms in `linguistics/analyzer.go` are very basic and not suitable for production use. They use simplified approaches that don't leverage modern NLP techniques.
3. **Hardcoded string mappings**: The `toSnakeCase` function in `syncjob/entities_sync.go` has hardcoded mappings for many entity types, which is not maintainable.
### 4. Testing and Documentation
1. **Lack of API documentation**: The GraphQL schema lacks documentation for types, queries, and mutations, which makes it harder for developers to use the API.
2. **Missing code documentation**: Many functions and packages lack proper documentation, which makes the codebase harder to understand and maintain.
3. **No performance benchmarks**: There are no performance benchmarks to identify bottlenecks and measure improvements.
## Recommendations for Future Development
### 1. Architecture Improvements
1. **Implement a service layer**: Add a service layer between repositories and resolvers to encapsulate business logic and improve separation of concerns. This would include services for each domain entity (WorkService, UserService, etc.) that handle validation, business rules, and coordination between repositories.
2. **Improve error handling**: Implement consistent error handling with proper error types and recovery mechanisms. Create custom error types for common scenarios (NotFoundError, ValidationError, etc.) and ensure errors are properly propagated and logged.
3. **Add configuration management**: Use a proper configuration management system instead of hardcoded values. Implement a configuration struct that can be loaded from environment variables, config files, or other sources, with support for defaults and validation.
4. **Implement a logging framework**: Use a structured logging framework for better observability. A library like zap or logrus would provide structured logging with different log levels, contextual information, and better performance than the standard log package.
### 2. Performance Optimizations
1. **Add pagination to all list operations**: Implement pagination for all repository methods that return lists. This would include adding page and pageSize parameters to List methods, calculating the total count, and returning both the paginated results and the total count.
2. **Use GORM's structured query methods**: Replace raw SQL queries with GORM's structured query methods. Instead of using raw SQL queries with string concatenation, use GORM's Table(), Find(), Where(), and other methods to build queries in a structured and safe way.
3. **Implement batching for Weaviate operations**: Use batching for Weaviate operations to reduce the number of API calls. Process entities in batches of a configurable size (e.g., 100) to reduce the number of API calls and improve performance.
4. **Add caching for frequently accessed data**: Implement Redis caching for frequently accessed data. Use Redis to cache frequently accessed data like works, authors, and other entities, with appropriate TTL values and cache invalidation strategies.
5. **Optimize linguistic analysis algorithms**: Replace simplified algorithms with more efficient implementations or use external NLP libraries. The current sentiment analysis and keyword extraction algorithms are very basic and inefficient. Use established NLP libraries like spaCy, NLTK, or specialized sentiment analysis libraries.
6. **Implement database indexing**: Add appropriate indexes to database tables for better query performance. Add indexes to frequently queried fields like title, language, and foreign keys to improve query performance.
### 3. Code Quality Enhancements
1. **Add input validation**: Implement input validation for all GraphQL mutations. Validate required fields, field formats, and business rules before processing data to ensure data integrity and security.
2. **Improve error messages**: Provide more descriptive error messages for better debugging. Include context information in error messages, distinguish between different types of errors (not found, validation, database, etc.), and use error wrapping to preserve the error chain.
3. **Add code documentation**: Add comprehensive documentation to all packages and functions. Include descriptions of function purpose, parameters, return values, and examples where appropriate. Follow Go's documentation conventions for godoc compatibility.
4. **Refactor duplicate code**: Identify and refactor duplicate code, especially in the synchronization process. Extract common functionality into reusable functions or methods, and consider using interfaces for common behavior patterns.
### 4. Testing Improvements
1. **Add integration tests**: Implement integration tests for the GraphQL API and background jobs. Test the entire request-response cycle for GraphQL queries and mutations, including error handling and validation. For background jobs, test the job enqueuing, processing, and completion.
2. **Add performance tests**: Implement performance tests to identify bottlenecks. Use Go's built-in benchmarking tools to measure the performance of critical operations like database queries, synchronization processes, and linguistic analysis. Set performance baselines and monitor for regressions.
### 5. Security Enhancements
1. **Implement proper authentication**: Add JWT authentication with proper token validation. Implement a middleware that validates JWT tokens in the Authorization header, extracts user information from claims, and adds it to the request context for use in resolvers.
2. **Add authorization checks**: Implement role-based access control for all operations. Add checks in resolvers to verify that the authenticated user has the appropriate role and permissions to perform the requested operation, especially for mutations that modify data.
3. **Use environment variables for credentials**: Move hardcoded credentials to environment variables. Replace hardcoded database credentials, API keys, and other sensitive information with values loaded from environment variables or a secure configuration system.
4. **Implement rate limiting**: Add rate limiting for API requests and background jobs. Use a rate limiting middleware to prevent abuse of the API, with configurable limits based on user role, IP address, or other criteria. Also implement rate limiting for background job processing to prevent resource exhaustion.
## Conclusion
The Tercul Go application has a solid foundation with a well-structured domain model, repository pattern, and GraphQL API. The application demonstrates good architectural decisions such as using background job processing for synchronization and having a modular design for linguistic analysis.
A comprehensive suite of unit tests has been added for all models, repositories, and services, which significantly improves the code quality and will help prevent regressions. The password hashing for users has also been implemented.
However, there are still several areas that need improvement:
1. **Performance**: The application has potential performance issues with lack of pagination, inefficient database queries, and simplified algorithms.
2. **Security**: There are security vulnerabilities such as hardcoded credentials and SQL injection risks in some parts of the application.
3. **Code Quality**: The codebase has some inconsistencies in repository implementation, limited error handling, and incomplete features.
4. **Testing**: While unit test coverage is now good, integration and performance tests are still lacking.
By addressing these issues and implementing the recommended improvements, the Tercul Go application can become more robust, secure, and scalable. The most critical issues to address are implementing proper password hashing, adding pagination to list operations, improving error handling, and enhancing the linguistic analysis capabilities.
The application has the potential to be a powerful platform for literary text analysis and management, but it requires significant development to reach production readiness.

View File

@ -1,45 +0,0 @@
import json
import os
from jsonschema import validate
from referencing import Registry, Resource
from referencing.jsonschema import DRAFT202012
def main():
"""
Validates the example blog posts against the blog.json schema.
"""
schemas_dir = "schemas"
content_dir = "content/blog"
# Create a resource for each schema
blog_schema_path = os.path.join(schemas_dir, "blog.json")
with open(blog_schema_path, "r") as f:
blog_schema_resource = Resource.from_contents(json.load(f), default_specification=DRAFT202012)
defs_schema_path = os.path.join(schemas_dir, "_defs.json")
with open(defs_schema_path, "r") as f:
defs_schema_resource = Resource.from_contents(json.load(f), default_specification=DRAFT202012)
# Create a registry and add the resources
registry = Registry().with_resources(
[
("blog.json", blog_schema_resource),
("_defs.json", defs_schema_resource),
]
)
# Validate each blog post
for filename in os.listdir(content_dir):
if filename.endswith(".json"):
filepath = os.path.join(content_dir, filename)
with open(filepath, "r") as f:
instance = json.load(f)
try:
validate(instance=instance, schema=blog_schema_resource.contents, registry=registry)
print(f"Successfully validated {filename}")
except Exception as e:
print(f"Validation failed for {filename}: {e}")
if __name__ == "__main__":
main()