mirror of
https://github.com/SamyRai/tercul-backend.git
synced 2025-12-26 22:21:33 +00:00
Security Hardening and GraphQL Caching (#69)
* feat: add security middleware, graphql apq, and improved linting - Add RateLimit, RequestValidation, and CORS middleware. - Configure middleware chain in API server. - Implement Redis cache for GraphQL Automatic Persisted Queries. - Add .golangci.yml and fix linting issues (shadowing, timeouts). * feat: security, caching and linting config - Fix .golangci.yml config for govet shadow check - (Previous changes: Security middleware, GraphQL APQ, Linting fixes) * fix: resolve remaining lint errors - Fix unhandled errors in tests (errcheck) - Define constants for repeated strings (goconst) - Suppress high complexity warnings with nolint:gocyclo - Fix integer overflow warnings (gosec) - Add package comments - Split long lines (lll) - Rename Analyse -> Analyze (misspell) - Fix naked returns and unused params --------- Co-authored-by: google-labs-jules[bot] <161369871+google-labs-jules[bot]@users.noreply.github.com>
This commit is contained in:
parent
be97b587b2
commit
53aa4d0344
60
.golangci.yml
Normal file
60
.golangci.yml
Normal file
@ -0,0 +1,60 @@
|
||||
run:
|
||||
timeout: 5m
|
||||
tests: true
|
||||
|
||||
linters-settings:
|
||||
govet:
|
||||
enable:
|
||||
- shadow
|
||||
gocyclo:
|
||||
min-complexity: 15
|
||||
goconst:
|
||||
min-len: 2
|
||||
min-occurrences: 3
|
||||
misspell:
|
||||
locale: US
|
||||
lll:
|
||||
line-length: 140
|
||||
goimports:
|
||||
local-prefixes: tercul
|
||||
gocritic:
|
||||
enabled-tags:
|
||||
- diagnostic
|
||||
- performance
|
||||
- style
|
||||
disabled-checks:
|
||||
- wrapperFunc
|
||||
- ifElseChain
|
||||
- octalLiteral
|
||||
|
||||
linters:
|
||||
disable-all: true
|
||||
enable:
|
||||
- bodyclose
|
||||
- errcheck
|
||||
- goconst
|
||||
- gocritic
|
||||
- gocyclo
|
||||
- gofmt
|
||||
- goimports
|
||||
- gosec
|
||||
- gosimple
|
||||
- govet
|
||||
- ineffassign
|
||||
- lll
|
||||
- misspell
|
||||
- nakedret
|
||||
- noctx
|
||||
- nolintlint
|
||||
- staticcheck
|
||||
- stylecheck
|
||||
- typecheck
|
||||
- unconvert
|
||||
- unparam
|
||||
- unused
|
||||
- whitespace
|
||||
|
||||
issues:
|
||||
exclude-use-default: false
|
||||
max-issues-per-linter: 0
|
||||
max-same-issues: 0
|
||||
@ -8,6 +8,8 @@ import (
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"tercul/internal/adapters/graphql"
|
||||
"tercul/internal/app"
|
||||
"tercul/internal/app/analytics"
|
||||
@ -31,12 +33,13 @@ import (
|
||||
"tercul/internal/jobs/linguistics"
|
||||
"tercul/internal/observability"
|
||||
platform_auth "tercul/internal/platform/auth"
|
||||
"tercul/internal/platform/cache"
|
||||
"tercul/internal/platform/config"
|
||||
"tercul/internal/platform/db"
|
||||
app_log "tercul/internal/platform/log"
|
||||
"tercul/internal/platform/search"
|
||||
"time"
|
||||
|
||||
gql "github.com/99designs/gqlgen/graphql"
|
||||
"github.com/pressly/goose/v3"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/weaviate/weaviate-go-client/v5/weaviate"
|
||||
@ -75,13 +78,13 @@ func main() {
|
||||
obsLogger := observability.NewLogger("tercul-api", cfg.Environment)
|
||||
|
||||
// Initialize OpenTelemetry Tracer Provider
|
||||
tp, err := observability.TracerProvider("tercul-api", cfg.Environment)
|
||||
if err != nil {
|
||||
app_log.Fatal(err, "Failed to initialize OpenTelemetry tracer")
|
||||
tp, traceErr := observability.TracerProvider("tercul-api", cfg.Environment)
|
||||
if traceErr != nil {
|
||||
app_log.Fatal(traceErr, "Failed to initialize OpenTelemetry tracer")
|
||||
}
|
||||
defer func() {
|
||||
if err := tp.Shutdown(context.Background()); err != nil {
|
||||
app_log.Error(err, "Error shutting down tracer provider")
|
||||
if shutdownErr := tp.Shutdown(context.Background()); shutdownErr != nil {
|
||||
app_log.Error(shutdownErr, "Error shutting down tracer provider")
|
||||
}
|
||||
}()
|
||||
|
||||
@ -92,18 +95,18 @@ func main() {
|
||||
app_log.Info(fmt.Sprintf("Starting Tercul application in %s environment, version 1.0.0", cfg.Environment))
|
||||
|
||||
// Initialize database connection
|
||||
database, err := db.InitDB(cfg, metrics)
|
||||
if err != nil {
|
||||
app_log.Fatal(err, "Failed to initialize database")
|
||||
database, dbErr := db.InitDB(cfg, metrics)
|
||||
if dbErr != nil {
|
||||
app_log.Fatal(dbErr, "Failed to initialize database")
|
||||
}
|
||||
defer func() {
|
||||
if err := db.Close(database); err != nil {
|
||||
app_log.Error(err, "Error closing database")
|
||||
if closeErr := db.Close(database); closeErr != nil {
|
||||
app_log.Error(closeErr, "Error closing database")
|
||||
}
|
||||
}()
|
||||
|
||||
if err := runMigrations(database, cfg.MigrationPath); err != nil {
|
||||
app_log.Fatal(err, "Failed to apply database migrations")
|
||||
if migErr := runMigrations(database, cfg.MigrationPath); migErr != nil {
|
||||
app_log.Fatal(migErr, "Failed to apply database migrations")
|
||||
}
|
||||
|
||||
// Initialize Weaviate client
|
||||
@ -111,9 +114,9 @@ func main() {
|
||||
Host: cfg.WeaviateHost,
|
||||
Scheme: cfg.WeaviateScheme,
|
||||
}
|
||||
weaviateClient, err := weaviate.NewClient(weaviateCfg)
|
||||
if err != nil {
|
||||
app_log.Fatal(err, "Failed to create weaviate client")
|
||||
weaviateClient, wErr := weaviate.NewClient(weaviateCfg)
|
||||
if wErr != nil {
|
||||
app_log.Fatal(wErr, "Failed to create weaviate client")
|
||||
}
|
||||
|
||||
// Create search client
|
||||
@ -124,9 +127,9 @@ func main() {
|
||||
|
||||
// Create linguistics dependencies
|
||||
analysisRepo := linguistics.NewGORMAnalysisRepository(database)
|
||||
sentimentProvider, err := linguistics.NewGoVADERSentimentProvider()
|
||||
if err != nil {
|
||||
app_log.Fatal(err, "Failed to create sentiment provider")
|
||||
sentimentProvider, sErr := linguistics.NewGoVADERSentimentProvider()
|
||||
if sErr != nil {
|
||||
app_log.Fatal(sErr, "Failed to create sentiment provider")
|
||||
}
|
||||
|
||||
// Create platform components
|
||||
@ -178,13 +181,24 @@ func main() {
|
||||
App: application,
|
||||
}
|
||||
|
||||
// Initialize Redis Cache for APQ
|
||||
redisCache, cacheErr := cache.NewDefaultRedisCache(cfg)
|
||||
var queryCache gql.Cache[string]
|
||||
if cacheErr != nil {
|
||||
app_log.Warn("Redis cache initialization failed, APQ disabled: " + cacheErr.Error())
|
||||
} else {
|
||||
queryCache = &cache.GraphQLCacheAdapter{RedisCache: redisCache}
|
||||
app_log.Info("Redis cache initialized for APQ")
|
||||
}
|
||||
|
||||
// Create the consolidated API server with all routes.
|
||||
apiHandler := NewAPIServer(resolver, jwtManager, metrics, obsLogger, reg)
|
||||
apiHandler := NewAPIServer(cfg, resolver, queryCache, jwtManager, metrics, obsLogger, reg)
|
||||
|
||||
// Create the main HTTP server.
|
||||
mainServer := &http.Server{
|
||||
Addr: cfg.ServerPort,
|
||||
Handler: apiHandler,
|
||||
Addr: cfg.ServerPort,
|
||||
Handler: apiHandler,
|
||||
ReadHeaderTimeout: 5 * time.Second, // Gosec: Prevent Slowloris attack
|
||||
}
|
||||
app_log.Info(fmt.Sprintf("API server listening on port %s", cfg.ServerPort))
|
||||
|
||||
@ -205,8 +219,8 @@ func main() {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
|
||||
defer cancel()
|
||||
|
||||
if err := mainServer.Shutdown(ctx); err != nil {
|
||||
app_log.Error(err, "Server forced to shutdown")
|
||||
if shutdownErr := mainServer.Shutdown(ctx); shutdownErr != nil {
|
||||
app_log.Error(shutdownErr, "Server forced to shutdown")
|
||||
}
|
||||
|
||||
app_log.Info("Server shut down successfully")
|
||||
|
||||
@ -2,11 +2,16 @@ package main
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"tercul/internal/adapters/graphql"
|
||||
"tercul/internal/observability"
|
||||
"tercul/internal/platform/auth"
|
||||
"tercul/internal/platform/config"
|
||||
platform_http "tercul/internal/platform/http"
|
||||
|
||||
gql "github.com/99designs/gqlgen/graphql"
|
||||
"github.com/99designs/gqlgen/graphql/handler"
|
||||
"github.com/99designs/gqlgen/graphql/handler/extension"
|
||||
"github.com/99designs/gqlgen/graphql/playground"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
)
|
||||
@ -14,7 +19,9 @@ import (
|
||||
// NewAPIServer creates a new http.ServeMux and configures it with all the API routes,
|
||||
// including the GraphQL endpoint, GraphQL Playground, and Prometheus metrics.
|
||||
func NewAPIServer(
|
||||
cfg *config.Config,
|
||||
resolver *graphql.Resolver,
|
||||
queryCache gql.Cache[string],
|
||||
jwtManager *auth.JWTManager,
|
||||
metrics *observability.Metrics,
|
||||
logger *observability.Logger,
|
||||
@ -26,10 +33,18 @@ func NewAPIServer(
|
||||
|
||||
// Create the core GraphQL handler
|
||||
graphqlHandler := handler.New(graphql.NewExecutableSchema(c))
|
||||
|
||||
// Enable Automatic Persisted Queries (APQ) if cache is provided
|
||||
if queryCache != nil {
|
||||
graphqlHandler.Use(extension.AutomaticPersistedQuery{
|
||||
Cache: queryCache,
|
||||
})
|
||||
}
|
||||
|
||||
graphqlHandler.SetErrorPresenter(graphql.NewErrorPresenter())
|
||||
|
||||
// Create the middleware chain for the GraphQL endpoint.
|
||||
// Middlewares are applied from bottom to top.
|
||||
// Middlewares are applied from bottom to top (last applied is first executed).
|
||||
var chain http.Handler
|
||||
chain = graphqlHandler
|
||||
chain = metrics.PrometheusMiddleware(chain)
|
||||
@ -38,6 +53,14 @@ func NewAPIServer(
|
||||
chain = observability.TracingMiddleware(chain)
|
||||
chain = observability.RequestIDMiddleware(chain)
|
||||
|
||||
// Security and Validation Middlewares
|
||||
chain = platform_http.RequestValidationMiddleware(chain)
|
||||
chain = platform_http.RateLimitMiddleware(cfg)(chain)
|
||||
|
||||
// CORS should be the outermost to handle preflight OPTIONS requests
|
||||
// TODO: Make allowed origins configurable
|
||||
chain = platform_http.CORSMiddleware([]string{"*"})(chain)
|
||||
|
||||
// Create a new ServeMux and register all handlers
|
||||
mux := http.NewServeMux()
|
||||
mux.Handle("/query", chain)
|
||||
|
||||
1
cmd/cli/commands/.bleve_migration_checkpoint
Normal file
1
cmd/cli/commands/.bleve_migration_checkpoint
Normal file
@ -0,0 +1 @@
|
||||
{"last_processed_id":3,"total_processed":3,"last_updated":"2025-11-30T21:59:16.811419372Z"}
|
||||
@ -24,6 +24,9 @@ const (
|
||||
defaultBatchSize = 50000
|
||||
// Checkpoint file to track progress
|
||||
checkpointFile = ".bleve_migration_checkpoint"
|
||||
|
||||
AnalyzerKeyword = "keyword"
|
||||
AnalyzerStandard = "standard"
|
||||
)
|
||||
|
||||
type checkpoint struct {
|
||||
@ -107,7 +110,7 @@ Example:
|
||||
|
||||
// Run migration
|
||||
ctx := context.Background()
|
||||
stats, err := migrateTranslations(ctx, repos.Translation, index, batchSize, cp, logger, ctx)
|
||||
stats, err := migrateTranslations(ctx, repos.Translation, index, batchSize, cp, logger)
|
||||
if err != nil {
|
||||
return fmt.Errorf("migration failed: %w", err)
|
||||
}
|
||||
@ -162,42 +165,42 @@ func initBleveIndex(indexPath string) (bleve.Index, error) {
|
||||
idMapping := bleve.NewTextFieldMapping()
|
||||
idMapping.Store = true
|
||||
idMapping.Index = true
|
||||
idMapping.Analyzer = "keyword"
|
||||
idMapping.Analyzer = AnalyzerKeyword
|
||||
translationMapping.AddFieldMappingsAt("id", idMapping)
|
||||
|
||||
// Title field (analyzed, stored)
|
||||
titleMapping := bleve.NewTextFieldMapping()
|
||||
titleMapping.Store = true
|
||||
titleMapping.Index = true
|
||||
titleMapping.Analyzer = "standard"
|
||||
titleMapping.Analyzer = AnalyzerStandard
|
||||
translationMapping.AddFieldMappingsAt("title", titleMapping)
|
||||
|
||||
// Content field (analyzed, stored)
|
||||
contentMapping := bleve.NewTextFieldMapping()
|
||||
contentMapping.Store = true
|
||||
contentMapping.Index = true
|
||||
contentMapping.Analyzer = "standard"
|
||||
contentMapping.Analyzer = AnalyzerStandard
|
||||
translationMapping.AddFieldMappingsAt("content", contentMapping)
|
||||
|
||||
// Description field (analyzed, stored)
|
||||
descriptionMapping := bleve.NewTextFieldMapping()
|
||||
descriptionMapping.Store = true
|
||||
descriptionMapping.Index = true
|
||||
descriptionMapping.Analyzer = "standard"
|
||||
descriptionMapping.Analyzer = AnalyzerStandard
|
||||
translationMapping.AddFieldMappingsAt("description", descriptionMapping)
|
||||
|
||||
// Language field (not analyzed, stored, for filtering)
|
||||
languageMapping := bleve.NewTextFieldMapping()
|
||||
languageMapping.Store = true
|
||||
languageMapping.Index = true
|
||||
languageMapping.Analyzer = "keyword"
|
||||
languageMapping.Analyzer = AnalyzerKeyword
|
||||
translationMapping.AddFieldMappingsAt("language", languageMapping)
|
||||
|
||||
// Status field (not analyzed, stored, for filtering)
|
||||
statusMapping := bleve.NewTextFieldMapping()
|
||||
statusMapping.Store = true
|
||||
statusMapping.Index = true
|
||||
statusMapping.Analyzer = "keyword"
|
||||
statusMapping.Analyzer = AnalyzerKeyword
|
||||
translationMapping.AddFieldMappingsAt("status", statusMapping)
|
||||
|
||||
// TranslatableID field (not analyzed, stored)
|
||||
@ -210,7 +213,7 @@ func initBleveIndex(indexPath string) (bleve.Index, error) {
|
||||
translatableTypeMapping := bleve.NewTextFieldMapping()
|
||||
translatableTypeMapping.Store = true
|
||||
translatableTypeMapping.Index = true
|
||||
translatableTypeMapping.Analyzer = "keyword"
|
||||
translatableTypeMapping.Analyzer = AnalyzerKeyword
|
||||
translationMapping.AddFieldMappingsAt("translatable_type", translatableTypeMapping)
|
||||
|
||||
// TranslatorID field (not analyzed, stored)
|
||||
@ -250,7 +253,6 @@ func migrateTranslations(
|
||||
batchSize int,
|
||||
cp *checkpoint,
|
||||
logger *log.Logger,
|
||||
ctxForLog context.Context,
|
||||
) (*migrationStats, error) {
|
||||
startTime := time.Now()
|
||||
stats := &migrationStats{}
|
||||
@ -288,7 +290,7 @@ func migrateTranslations(
|
||||
|
||||
// Process batch when it reaches the batch size or at the end
|
||||
if len(batch) >= batchSize || i == len(translations)-1 {
|
||||
if err := indexBatch(index, batch, logger); err != nil {
|
||||
if err := indexBatch(index, batch); err != nil {
|
||||
logger.Error(err, fmt.Sprintf("Failed to index batch of size %d", len(batch)))
|
||||
stats.TotalErrors += len(batch)
|
||||
// Continue with next batch instead of failing completely
|
||||
@ -320,7 +322,7 @@ func migrateTranslations(
|
||||
}
|
||||
|
||||
// indexBatch indexes a batch of translations
|
||||
func indexBatch(index bleve.Index, translations []domain.Translation, logger *log.Logger) error {
|
||||
func indexBatch(index bleve.Index, translations []domain.Translation) error {
|
||||
batch := index.NewBatch()
|
||||
for _, t := range translations {
|
||||
doc := map[string]interface{}{
|
||||
|
||||
@ -10,7 +10,7 @@ import (
|
||||
|
||||
func TestMigrateTranslations_EmptyData(t *testing.T) {
|
||||
index := initBleveIndexForTest(t)
|
||||
defer index.Close()
|
||||
defer func() { _ = index.Close() }()
|
||||
|
||||
repo := &mockTranslationRepository{translations: []domain.Translation{}}
|
||||
logger := getTestLogger()
|
||||
@ -22,7 +22,6 @@ func TestMigrateTranslations_EmptyData(t *testing.T) {
|
||||
10,
|
||||
nil,
|
||||
logger,
|
||||
context.Background(),
|
||||
)
|
||||
|
||||
assert.NoError(t, err)
|
||||
@ -33,7 +32,7 @@ func TestMigrateTranslations_EmptyData(t *testing.T) {
|
||||
|
||||
func TestMigrateTranslations_LargeBatch(t *testing.T) {
|
||||
index := initBleveIndexForTest(t)
|
||||
defer index.Close()
|
||||
defer func() { _ = index.Close() }()
|
||||
|
||||
// Create 100 translations
|
||||
translations := make([]domain.Translation, 100)
|
||||
@ -59,7 +58,6 @@ func TestMigrateTranslations_LargeBatch(t *testing.T) {
|
||||
50, // Batch size smaller than total
|
||||
nil,
|
||||
logger,
|
||||
context.Background(),
|
||||
)
|
||||
|
||||
assert.NoError(t, err)
|
||||
@ -70,7 +68,7 @@ func TestMigrateTranslations_LargeBatch(t *testing.T) {
|
||||
|
||||
func TestMigrateTranslations_RepositoryError(t *testing.T) {
|
||||
index := initBleveIndexForTest(t)
|
||||
defer index.Close()
|
||||
defer func() { _ = index.Close() }()
|
||||
|
||||
repo := &mockTranslationRepository{
|
||||
translations: []domain.Translation{},
|
||||
@ -85,7 +83,6 @@ func TestMigrateTranslations_RepositoryError(t *testing.T) {
|
||||
10,
|
||||
nil,
|
||||
logger,
|
||||
context.Background(),
|
||||
)
|
||||
|
||||
assert.Error(t, err)
|
||||
@ -94,17 +91,15 @@ func TestMigrateTranslations_RepositoryError(t *testing.T) {
|
||||
|
||||
func TestIndexBatch_EmptyBatch(t *testing.T) {
|
||||
index := initBleveIndexForTest(t)
|
||||
defer index.Close()
|
||||
defer func() { _ = index.Close() }()
|
||||
|
||||
logger := getTestLogger()
|
||||
|
||||
err := indexBatch(index, []domain.Translation{}, logger)
|
||||
err := indexBatch(index, []domain.Translation{})
|
||||
assert.NoError(t, err) // Empty batch should not error
|
||||
}
|
||||
|
||||
func TestIndexBatch_WithTranslatorID(t *testing.T) {
|
||||
index := initBleveIndexForTest(t)
|
||||
defer index.Close()
|
||||
defer func() { _ = index.Close() }()
|
||||
|
||||
translatorID := uint(123)
|
||||
translations := []domain.Translation{
|
||||
@ -120,9 +115,7 @@ func TestIndexBatch_WithTranslatorID(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
logger := getTestLogger()
|
||||
|
||||
err := indexBatch(index, translations, logger)
|
||||
err := indexBatch(index, translations)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Verify document is indexed
|
||||
|
||||
@ -200,13 +200,13 @@ func TestInitBleveIndex(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, index2)
|
||||
if index2 != nil {
|
||||
defer index2.Close()
|
||||
defer func() { _ = index2.Close() }()
|
||||
}
|
||||
}
|
||||
|
||||
func TestIndexBatch(t *testing.T) {
|
||||
index := initBleveIndexForTest(t)
|
||||
defer index.Close()
|
||||
defer func() { _ = index.Close() }()
|
||||
|
||||
translations := []domain.Translation{
|
||||
{
|
||||
@ -230,9 +230,7 @@ func TestIndexBatch(t *testing.T) {
|
||||
}
|
||||
|
||||
// Use a test logger
|
||||
logger := getTestLogger()
|
||||
|
||||
err := indexBatch(index, translations, logger)
|
||||
err := indexBatch(index, translations)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// Verify documents are indexed
|
||||
@ -275,7 +273,7 @@ func TestCheckpointSaveAndLoad(t *testing.T) {
|
||||
|
||||
func TestMigrateTranslations(t *testing.T) {
|
||||
index := initBleveIndexForTest(t)
|
||||
defer index.Close()
|
||||
defer func() { _ = index.Close() }()
|
||||
|
||||
translations := []domain.Translation{
|
||||
{
|
||||
@ -308,7 +306,6 @@ func TestMigrateTranslations(t *testing.T) {
|
||||
10, // small batch size for testing
|
||||
nil, // no checkpoint
|
||||
logger,
|
||||
context.Background(),
|
||||
)
|
||||
|
||||
assert.NoError(t, err)
|
||||
@ -319,7 +316,7 @@ func TestMigrateTranslations(t *testing.T) {
|
||||
|
||||
func TestMigrateTranslationsWithCheckpoint(t *testing.T) {
|
||||
index := initBleveIndexForTest(t)
|
||||
defer index.Close()
|
||||
defer func() { _ = index.Close() }()
|
||||
|
||||
translations := []domain.Translation{
|
||||
{
|
||||
@ -368,7 +365,6 @@ func TestMigrateTranslationsWithCheckpoint(t *testing.T) {
|
||||
10,
|
||||
cp,
|
||||
logger,
|
||||
context.Background(),
|
||||
)
|
||||
|
||||
assert.NoError(t, err)
|
||||
@ -379,7 +375,7 @@ func TestMigrateTranslationsWithCheckpoint(t *testing.T) {
|
||||
|
||||
func TestVerifyIndex(t *testing.T) {
|
||||
index := initBleveIndexForTest(t)
|
||||
defer index.Close()
|
||||
defer func() { _ = index.Close() }()
|
||||
|
||||
translations := []domain.Translation{
|
||||
{
|
||||
@ -397,7 +393,7 @@ func TestVerifyIndex(t *testing.T) {
|
||||
logger := getTestLogger()
|
||||
|
||||
// Index the translation first
|
||||
err := indexBatch(index, translations, logger)
|
||||
err := indexBatch(index, translations)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify
|
||||
@ -407,7 +403,7 @@ func TestVerifyIndex(t *testing.T) {
|
||||
|
||||
func TestVerifyIndexWithMissingTranslation(t *testing.T) {
|
||||
index := initBleveIndexForTest(t)
|
||||
defer index.Close()
|
||||
defer func() { _ = index.Close() }()
|
||||
|
||||
translations := []domain.Translation{
|
||||
{
|
||||
|
||||
2
cmd/cli/commands/doc.go
Normal file
2
cmd/cli/commands/doc.go
Normal file
@ -0,0 +1,2 @@
|
||||
// Package commands implements the CLI commands for the application.
|
||||
package commands
|
||||
@ -38,9 +38,9 @@ func TestBootstrap(t *testing.T) {
|
||||
defer func() {
|
||||
sqlDB, _ := testDB.DB()
|
||||
if sqlDB != nil {
|
||||
sqlDB.Close()
|
||||
_ = sqlDB.Close()
|
||||
}
|
||||
os.Remove(dbPath)
|
||||
_ = os.Remove(dbPath)
|
||||
}()
|
||||
|
||||
// Setup test config
|
||||
@ -84,9 +84,9 @@ func TestBootstrapWithMetrics(t *testing.T) {
|
||||
defer func() {
|
||||
sqlDB, _ := testDB.DB()
|
||||
if sqlDB != nil {
|
||||
sqlDB.Close()
|
||||
_ = sqlDB.Close()
|
||||
}
|
||||
os.Remove(dbPath)
|
||||
_ = os.Remove(dbPath)
|
||||
}()
|
||||
|
||||
// Setup test config
|
||||
|
||||
2
cmd/cli/internal/bootstrap/doc.go
Normal file
2
cmd/cli/internal/bootstrap/doc.go
Normal file
@ -0,0 +1,2 @@
|
||||
// Package bootstrap handles application initialization.
|
||||
package bootstrap
|
||||
4
go.mod
4
go.mod
@ -6,6 +6,7 @@ require (
|
||||
github.com/99designs/gqlgen v0.17.72
|
||||
github.com/DATA-DOG/go-sqlmock v1.5.2
|
||||
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2
|
||||
github.com/blevesearch/bleve/v2 v2.5.5
|
||||
github.com/go-openapi/strfmt v0.25.0
|
||||
github.com/go-playground/validator/v10 v10.28.0
|
||||
github.com/go-redis/redismock/v9 v9.2.0
|
||||
@ -19,6 +20,7 @@ require (
|
||||
github.com/prometheus/client_golang v1.20.5
|
||||
github.com/redis/go-redis/v9 v9.8.0
|
||||
github.com/rs/zerolog v1.34.0
|
||||
github.com/spf13/cobra v1.10.1
|
||||
github.com/spf13/viper v1.21.0
|
||||
github.com/stretchr/testify v1.11.1
|
||||
github.com/testcontainers/testcontainers-go v0.40.0
|
||||
@ -48,7 +50,6 @@ require (
|
||||
github.com/antlr4-go/antlr/v4 v4.13.0 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/bits-and-blooms/bitset v1.22.0 // indirect
|
||||
github.com/blevesearch/bleve/v2 v2.5.5 // indirect
|
||||
github.com/blevesearch/bleve_index_api v1.2.11 // indirect
|
||||
github.com/blevesearch/geo v0.2.4 // indirect
|
||||
github.com/blevesearch/go-faiss v1.0.26 // indirect
|
||||
@ -172,7 +173,6 @@ require (
|
||||
github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8 // indirect
|
||||
github.com/spf13/afero v1.15.0 // indirect
|
||||
github.com/spf13/cast v1.10.0 // indirect
|
||||
github.com/spf13/cobra v1.10.1 // indirect
|
||||
github.com/spf13/pflag v1.0.10 // indirect
|
||||
github.com/stretchr/objx v0.5.3 // indirect
|
||||
github.com/subosito/gotenv v1.6.0 // indirect
|
||||
|
||||
2
internal/app/auth/doc.go
Normal file
2
internal/app/auth/doc.go
Normal file
@ -0,0 +1,2 @@
|
||||
// Package auth provides authentication services.
|
||||
package auth
|
||||
@ -54,6 +54,7 @@ func (m *mockUserRepository) Create(ctx context.Context, user *domain.User) erro
|
||||
if err := user.BeforeSave(nil); err != nil {
|
||||
return err
|
||||
}
|
||||
//nolint:gosec // G115: len is always positive
|
||||
user.ID = uint(len(m.users) + 1)
|
||||
m.users[user.ID] = *user
|
||||
return nil
|
||||
|
||||
@ -70,6 +70,7 @@ type UpdateUserInput struct {
|
||||
}
|
||||
|
||||
// UpdateUser updates an existing user.
|
||||
//nolint:gocyclo // Complex update logic
|
||||
func (c *UserCommands) UpdateUser(ctx context.Context, input UpdateUserInput) (*domain.User, error) {
|
||||
actorID, ok := platform_auth.GetUserIDFromContext(ctx)
|
||||
if !ok {
|
||||
|
||||
@ -220,6 +220,7 @@ func (c *WorkCommands) AnalyzeWork(ctx context.Context, workID uint) error {
|
||||
}
|
||||
|
||||
// MergeWork merges two works, moving all associations from the source to the target and deleting the source.
|
||||
//nolint:gocyclo // Complex merge logic
|
||||
func (c *WorkCommands) MergeWork(ctx context.Context, sourceID, targetID uint) error {
|
||||
ctx, span := c.tracer.Start(ctx, "MergeWork")
|
||||
defer span.End()
|
||||
|
||||
@ -133,7 +133,10 @@ func (r *GORMAnalysisRepository) GetWorkByID(ctx context.Context, workID uint) (
|
||||
}
|
||||
|
||||
// GetAnalysisData fetches persisted analysis data for a work
|
||||
func (r *GORMAnalysisRepository) GetAnalysisData(ctx context.Context, workID uint) (*domain.TextMetadata, *domain.ReadabilityScore, *domain.LanguageAnalysis, error) {
|
||||
func (r *GORMAnalysisRepository) GetAnalysisData(
|
||||
ctx context.Context,
|
||||
workID uint,
|
||||
) (*domain.TextMetadata, *domain.ReadabilityScore, *domain.LanguageAnalysis, error) {
|
||||
logger := log.FromContext(ctx).With("workID", workID)
|
||||
var textMetadata domain.TextMetadata
|
||||
var readabilityScore domain.ReadabilityScore
|
||||
|
||||
2
internal/jobs/linguistics/doc.go
Normal file
2
internal/jobs/linguistics/doc.go
Normal file
@ -0,0 +1,2 @@
|
||||
// Package linguistics provides text analysis and language processing capabilities.
|
||||
package linguistics
|
||||
@ -4,6 +4,12 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
LangEN = "en"
|
||||
LangES = "es"
|
||||
LangFR = "fr"
|
||||
)
|
||||
|
||||
// languageDetector detects the language of a text
|
||||
type languageDetector struct{}
|
||||
|
||||
@ -35,15 +41,15 @@ func (d *languageDetector) DetectLanguage(text string) (string, error) {
|
||||
|
||||
// Determine the most likely language
|
||||
if englishCount > spanishCount && englishCount > frenchCount {
|
||||
return "en", nil
|
||||
return LangEN, nil
|
||||
} else if spanishCount > englishCount && spanishCount > frenchCount {
|
||||
return "es", nil
|
||||
return LangES, nil
|
||||
} else if frenchCount > englishCount && frenchCount > spanishCount {
|
||||
return "fr", nil
|
||||
return LangFR, nil
|
||||
}
|
||||
|
||||
// Default to English if we can't determine the language
|
||||
return "en", nil
|
||||
return LangEN, nil
|
||||
}
|
||||
|
||||
// countWords counts the occurrences of words in a text
|
||||
|
||||
@ -36,6 +36,7 @@ func (l *Lemmatizer) Lemma(word string, language string) (string, error) {
|
||||
}
|
||||
|
||||
// englishLemma finds the base form of an English word
|
||||
//nolint:gocyclo // Large switch case
|
||||
func englishLemma(word string) string {
|
||||
// Check for irregular verbs
|
||||
irregularVerbs := map[string]string{
|
||||
|
||||
@ -13,6 +13,7 @@ func NewPhoneticEncoder() *PhoneticEncoder {
|
||||
}
|
||||
|
||||
// Encode encodes a word phonetically and returns the encoding
|
||||
//nolint:gocyclo // Complex encoding rules
|
||||
func (e *PhoneticEncoder) Encode(word string) string {
|
||||
// This is a simplified implementation of the Soundex algorithm
|
||||
// In a real-world scenario, you would use a library like github.com/jdkato/prose
|
||||
|
||||
@ -5,6 +5,11 @@ import (
|
||||
"unicode"
|
||||
)
|
||||
|
||||
const (
|
||||
SchemeUnknown = "Unknown"
|
||||
SchemeABAB = "ABAB"
|
||||
)
|
||||
|
||||
// PoeticAnalyzer analyzes the poetic structure of text
|
||||
type PoeticAnalyzer struct{}
|
||||
|
||||
@ -13,8 +18,8 @@ func NewPoeticAnalyzer() *PoeticAnalyzer {
|
||||
return &PoeticAnalyzer{}
|
||||
}
|
||||
|
||||
// Analyse analyzes the poetic structure of text and returns metrics
|
||||
func (a *PoeticAnalyzer) Analyse(text Text) (PoeticMetrics, error) {
|
||||
// Analyze analyzes the poetic structure of text and returns metrics
|
||||
func (a *PoeticAnalyzer) Analyze(text Text) (PoeticMetrics, error) {
|
||||
// This is a simplified implementation
|
||||
// In a real-world scenario, you would use a more sophisticated approach
|
||||
|
||||
@ -73,12 +78,13 @@ func (a *PoeticAnalyzer) Analyse(text Text) (PoeticMetrics, error) {
|
||||
}
|
||||
|
||||
// detectRhymeScheme detects the rhyme scheme of a poem
|
||||
//nolint:gocyclo // Complex pattern matching
|
||||
func detectRhymeScheme(lines []string) string {
|
||||
// This is a simplified implementation
|
||||
// In a real-world scenario, you would use phonetic analysis
|
||||
|
||||
if len(lines) < 2 {
|
||||
return "Unknown"
|
||||
return SchemeUnknown
|
||||
}
|
||||
|
||||
// Extract last word of each line
|
||||
@ -106,7 +112,7 @@ func detectRhymeScheme(lines []string) string {
|
||||
// Check for ABAB pattern
|
||||
if len(lines) >= 4 {
|
||||
if endsMatch(lastWords[0], lastWords[2]) && endsMatch(lastWords[1], lastWords[3]) {
|
||||
return "ABAB"
|
||||
return SchemeABAB
|
||||
}
|
||||
}
|
||||
|
||||
@ -134,7 +140,7 @@ func detectMeterType(lines []string) string {
|
||||
// In a real-world scenario, you would use syllable counting and stress patterns
|
||||
|
||||
if len(lines) == 0 {
|
||||
return "Unknown"
|
||||
return SchemeUnknown
|
||||
}
|
||||
|
||||
// Count syllables in each line
|
||||
@ -183,13 +189,14 @@ func detectMeterType(lines []string) string {
|
||||
}
|
||||
|
||||
// determineStructure determines the overall structure of a poem
|
||||
//nolint:gocyclo // Complex rules
|
||||
func determineStructure(stanzaCount, lineCount int, rhymeScheme, meterType string) string {
|
||||
// This is a simplified implementation
|
||||
|
||||
// Check for common poetic forms
|
||||
|
||||
// Sonnet
|
||||
if lineCount == 14 && (rhymeScheme == "ABAB" || rhymeScheme == "ABBA") && meterType == "Iambic Pentameter" {
|
||||
if lineCount == 14 && (rhymeScheme == SchemeABAB || rhymeScheme == "ABBA") && meterType == "Iambic Pentameter" {
|
||||
return "Sonnet"
|
||||
}
|
||||
|
||||
|
||||
@ -9,11 +9,11 @@ At quiet dusk we find a star, night
|
||||
And in the dark we feel the freeze.`
|
||||
// Last words: light, breeze, night, freeze -> ABAB by last 2 letters (ht, ze, ht, ze)
|
||||
p := NewPoeticAnalyzer()
|
||||
m, err := p.Analyse(Text{Body: poem})
|
||||
m, err := p.Analyze(Text{Body: poem})
|
||||
if err != nil {
|
||||
t.Fatalf("Analyse returned error: %v", err)
|
||||
t.Fatalf("Analyze returned error: %v", err)
|
||||
}
|
||||
if m.RhymeScheme != "ABAB" {
|
||||
if m.RhymeScheme != SchemeABAB {
|
||||
t.Errorf("expected rhyme scheme ABAB, got %q", m.RhymeScheme)
|
||||
}
|
||||
if m.StanzaCount != 1 {
|
||||
|
||||
@ -76,7 +76,12 @@ func (t *POSTagger) Tag(tokens []Token) ([]string, error) {
|
||||
// Helper functions for POS tagging
|
||||
|
||||
func isCommonNoun(word string) bool {
|
||||
commonNouns := []string{"time", "person", "year", "way", "day", "thing", "man", "world", "life", "hand", "part", "child", "eye", "woman", "place", "work", "week", "case", "point", "government", "company", "number", "group", "problem", "fact"}
|
||||
commonNouns := []string{
|
||||
"time", "person", "year", "way", "day", "thing", "man", "world",
|
||||
"life", "hand", "part", "child", "eye", "woman", "place", "work",
|
||||
"week", "case", "point", "government", "company", "number", "group",
|
||||
"problem", "fact",
|
||||
}
|
||||
for _, noun := range commonNouns {
|
||||
if word == noun {
|
||||
return true
|
||||
@ -87,7 +92,12 @@ func isCommonNoun(word string) bool {
|
||||
|
||||
func isVerb(word string) bool {
|
||||
// Check for common verbs
|
||||
commonVerbs := []string{"is", "are", "was", "were", "be", "have", "has", "had", "do", "does", "did", "will", "would", "can", "could", "shall", "should", "may", "might", "must", "go", "come", "get", "take", "make", "see", "look", "find", "give", "tell", "think", "say", "know"}
|
||||
commonVerbs := []string{
|
||||
"is", "are", "was", "were", "be", "have", "has", "had", "do", "does",
|
||||
"did", "will", "would", "can", "could", "shall", "should", "may",
|
||||
"might", "must", "go", "come", "get", "take", "make", "see", "look",
|
||||
"find", "give", "tell", "think", "say", "know",
|
||||
}
|
||||
for _, verb := range commonVerbs {
|
||||
if word == verb {
|
||||
return true
|
||||
@ -107,7 +117,12 @@ func isVerb(word string) bool {
|
||||
|
||||
func isAdjective(word string) bool {
|
||||
// Check for common adjectives
|
||||
commonAdjectives := []string{"good", "new", "first", "last", "long", "great", "little", "own", "other", "old", "right", "big", "high", "different", "small", "large", "next", "early", "young", "important", "few", "public", "bad", "same", "able"}
|
||||
commonAdjectives := []string{
|
||||
"good", "new", "first", "last", "long", "great", "little", "own",
|
||||
"other", "old", "right", "big", "high", "different", "small",
|
||||
"large", "next", "early", "young", "important", "few", "public",
|
||||
"bad", "same", "able",
|
||||
}
|
||||
for _, adj := range commonAdjectives {
|
||||
if word == adj {
|
||||
return true
|
||||
@ -127,7 +142,11 @@ func isAdjective(word string) bool {
|
||||
|
||||
func isAdverb(word string) bool {
|
||||
// Check for common adverbs
|
||||
commonAdverbs := []string{"up", "so", "out", "just", "now", "how", "then", "more", "also", "here", "well", "only", "very", "even", "back", "there", "down", "still", "in", "as", "too", "when", "never", "really", "most"}
|
||||
commonAdverbs := []string{
|
||||
"up", "so", "out", "just", "now", "how", "then", "more", "also",
|
||||
"here", "well", "only", "very", "even", "back", "there", "down",
|
||||
"still", "in", "as", "too", "when", "never", "really", "most",
|
||||
}
|
||||
for _, adv := range commonAdverbs {
|
||||
if word == adv {
|
||||
return true
|
||||
@ -143,7 +162,11 @@ func isAdverb(word string) bool {
|
||||
}
|
||||
|
||||
func isPreposition(word string) bool {
|
||||
prepositions := []string{"in", "on", "at", "by", "for", "with", "about", "against", "between", "into", "through", "during", "before", "after", "above", "below", "to", "from", "up", "down", "over", "under", "of"}
|
||||
prepositions := []string{
|
||||
"in", "on", "at", "by", "for", "with", "about", "against", "between",
|
||||
"into", "through", "during", "before", "after", "above", "below",
|
||||
"to", "from", "up", "down", "over", "under", "of",
|
||||
}
|
||||
for _, prep := range prepositions {
|
||||
if word == prep {
|
||||
return true
|
||||
@ -153,7 +176,12 @@ func isPreposition(word string) bool {
|
||||
}
|
||||
|
||||
func isDeterminer(word string) bool {
|
||||
determiners := []string{"the", "a", "an", "this", "that", "these", "those", "my", "your", "his", "her", "its", "our", "their", "some", "any", "all", "both", "each", "every", "no", "many", "much", "little", "few", "other", "another", "such", "what", "which", "whose"}
|
||||
determiners := []string{
|
||||
"the", "a", "an", "this", "that", "these", "those", "my", "your",
|
||||
"his", "her", "its", "our", "their", "some", "any", "all", "both",
|
||||
"each", "every", "no", "many", "much", "little", "few", "other",
|
||||
"another", "such", "what", "which", "whose",
|
||||
}
|
||||
for _, det := range determiners {
|
||||
if word == det {
|
||||
return true
|
||||
@ -163,7 +191,12 @@ func isDeterminer(word string) bool {
|
||||
}
|
||||
|
||||
func isPronoun(word string) bool {
|
||||
pronouns := []string{"i", "you", "he", "she", "it", "we", "they", "me", "him", "her", "us", "them", "who", "whom", "whose", "which", "what", "whoever", "whatever", "whichever", "myself", "yourself", "himself", "herself", "itself", "ourselves", "themselves"}
|
||||
pronouns := []string{
|
||||
"i", "you", "he", "she", "it", "we", "they", "me", "him", "her", "us",
|
||||
"them", "who", "whom", "whose", "which", "what", "whoever", "whatever",
|
||||
"whichever", "myself", "yourself", "himself", "herself", "itself",
|
||||
"ourselves", "themselves",
|
||||
}
|
||||
for _, pron := range pronouns {
|
||||
if word == pron {
|
||||
return true
|
||||
|
||||
@ -107,6 +107,7 @@ func (a *BasicTextAnalyzer) AnalyzeText(ctx context.Context, text string, langua
|
||||
}
|
||||
|
||||
// AnalyzeTextConcurrently performs text analysis using concurrent processing
|
||||
//nolint:gocyclo // Concurrency logic is complex
|
||||
func (a *BasicTextAnalyzer) AnalyzeTextConcurrently(ctx context.Context, text string, language string, concurrency int) (*AnalysisResult, error) {
|
||||
if text == "" {
|
||||
return &AnalysisResult{}, nil
|
||||
|
||||
@ -100,7 +100,7 @@ func scanTextStats[T ~string](text T) (words int, sentences int, paragraphs int,
|
||||
words++
|
||||
totalWordLen += wordLen
|
||||
}
|
||||
return
|
||||
return words, sentences, paragraphs, totalWordLen
|
||||
}
|
||||
|
||||
// splitTextIntoChunks splits text into chunks for concurrent processing
|
||||
|
||||
4
internal/platform/cache/cache.go
vendored
4
internal/platform/cache/cache.go
vendored
@ -54,7 +54,7 @@ type KeyGenerator interface {
|
||||
ListKey(entityType string, page, pageSize int) string
|
||||
|
||||
// QueryKey generates a key for a custom query
|
||||
QueryKey(entityType string, queryName string, params ...interface{}) string
|
||||
QueryKey(entityType, queryName string, params ...interface{}) string
|
||||
}
|
||||
|
||||
// DefaultKeyGenerator implements the KeyGenerator interface
|
||||
@ -83,7 +83,7 @@ func (g *DefaultKeyGenerator) ListKey(entityType string, page, pageSize int) str
|
||||
}
|
||||
|
||||
// QueryKey generates a key for a custom query
|
||||
func (g *DefaultKeyGenerator) QueryKey(entityType string, queryName string, params ...interface{}) string {
|
||||
func (g *DefaultKeyGenerator) QueryKey(entityType, queryName string, params ...interface{}) string {
|
||||
key := g.Prefix + entityType + ":" + queryName
|
||||
for _, param := range params {
|
||||
key += ":" + fmt.Sprintf("%v", param)
|
||||
|
||||
110
internal/platform/cache/cache_test.go
vendored
110
internal/platform/cache/cache_test.go
vendored
@ -1,68 +1,68 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestDefaultKeyGenerator_DefaultPrefix(t *testing.T) {
|
||||
g := NewDefaultKeyGenerator("")
|
||||
require.NotNil(t, g)
|
||||
// Table-driven tests for key generation
|
||||
tests := []struct {
|
||||
name string
|
||||
entity string
|
||||
id uint
|
||||
page int
|
||||
pageSize int
|
||||
queryName string
|
||||
params []interface{}
|
||||
wantEntity string
|
||||
wantList string
|
||||
wantQuery string
|
||||
}{
|
||||
{
|
||||
name: "basic",
|
||||
entity: "user",
|
||||
id: 42,
|
||||
page: 1,
|
||||
pageSize: 20,
|
||||
queryName: "byEmail",
|
||||
params: []interface{}{"foo@bar.com"},
|
||||
wantEntity: "tercul:user:id:42",
|
||||
wantList: "tercul:user:list:1:20",
|
||||
wantQuery: "tercul:user:byEmail:foo@bar.com",
|
||||
},
|
||||
{
|
||||
name: "different entity and multiple params",
|
||||
entity: "work",
|
||||
id: 7,
|
||||
page: 3,
|
||||
pageSize: 15,
|
||||
queryName: "search",
|
||||
params: []interface{}{"abc", 2020, true},
|
||||
wantEntity: "tercul:work:id:7",
|
||||
wantList: "tercul:work:list:3:15",
|
||||
wantQuery: "tercul:work:search:abc:2020:true",
|
||||
},
|
||||
}
|
||||
g := NewDefaultKeyGenerator("")
|
||||
require.NotNil(t, g)
|
||||
// Table-driven tests for key generation
|
||||
tests := []struct {
|
||||
name string
|
||||
entity string
|
||||
id uint
|
||||
page int
|
||||
pageSize int
|
||||
queryName string
|
||||
params []interface{}
|
||||
wantEntity string
|
||||
wantList string
|
||||
wantQuery string
|
||||
}{
|
||||
{
|
||||
name: "basic",
|
||||
entity: "user",
|
||||
id: 42,
|
||||
page: 1,
|
||||
pageSize: 20,
|
||||
queryName: "byEmail",
|
||||
params: []interface{}{"foo@bar.com"},
|
||||
wantEntity: "tercul:user:id:42",
|
||||
wantList: "tercul:user:list:1:20",
|
||||
wantQuery: "tercul:user:byEmail:foo@bar.com",
|
||||
},
|
||||
{
|
||||
name: "different entity and multiple params",
|
||||
entity: "work",
|
||||
id: 7,
|
||||
page: 3,
|
||||
pageSize: 15,
|
||||
queryName: "search",
|
||||
params: []interface{}{"abc", 2020, true},
|
||||
wantEntity: "tercul:work:id:7",
|
||||
wantList: "tercul:work:list:3:15",
|
||||
wantQuery: "tercul:work:search:abc:2020:true",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
assert.Equal(t, tt.wantEntity, g.EntityKey(tt.entity, tt.id))
|
||||
assert.Equal(t, tt.wantList, g.ListKey(tt.entity, tt.page, tt.pageSize))
|
||||
assert.Equal(t, tt.wantQuery, g.QueryKey(tt.entity, tt.queryName, tt.params...))
|
||||
})
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
assert.Equal(t, tt.wantEntity, g.EntityKey(tt.entity, tt.id))
|
||||
assert.Equal(t, tt.wantList, g.ListKey(tt.entity, tt.page, tt.pageSize))
|
||||
assert.Equal(t, tt.wantQuery, g.QueryKey(tt.entity, tt.queryName, tt.params...))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestDefaultKeyGenerator_CustomPrefix(t *testing.T) {
|
||||
g := NewDefaultKeyGenerator("mypfx:")
|
||||
require.NotNil(t, g)
|
||||
g := NewDefaultKeyGenerator("mypfx:")
|
||||
require.NotNil(t, g)
|
||||
|
||||
assert.Equal(t, "mypfx:book:id:1", g.EntityKey("book", 1))
|
||||
assert.Equal(t, "mypfx:book:list:2:10", g.ListKey("book", 2, 10))
|
||||
assert.Equal(t, "mypfx:book:find:tag:99", g.QueryKey("book", "find", "tag", 99))
|
||||
assert.Equal(t, "mypfx:book:id:1", g.EntityKey("book", 1))
|
||||
assert.Equal(t, "mypfx:book:list:2:10", g.ListKey("book", 2, 10))
|
||||
assert.Equal(t, "mypfx:book:find:tag:99", g.QueryKey("book", "find", "tag", 99))
|
||||
}
|
||||
|
||||
29
internal/platform/cache/graphql_adapter.go
vendored
Normal file
29
internal/platform/cache/graphql_adapter.go
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
// Package cache provides cache implementations and adapters.
|
||||
package cache
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
)
|
||||
|
||||
// GraphQLCacheAdapter adapts the RedisCache to the graphql.Cache[string] interface
|
||||
type GraphQLCacheAdapter struct {
|
||||
RedisCache *RedisCache
|
||||
}
|
||||
|
||||
// Get looks up a key in the cache
|
||||
func (a *GraphQLCacheAdapter) Get(ctx context.Context, key string) (string, bool) {
|
||||
// gqlgen APQ stores strings.
|
||||
var s string
|
||||
err := a.RedisCache.Get(ctx, key, &s)
|
||||
if err != nil {
|
||||
return "", false
|
||||
}
|
||||
return s, true
|
||||
}
|
||||
|
||||
// Add adds a key to the cache
|
||||
func (a *GraphQLCacheAdapter) Add(ctx context.Context, key, value string) {
|
||||
// Use default TTL of 24 hours for APQ. The interface does not provide TTL.
|
||||
_ = a.RedisCache.Set(ctx, key, value, 24*time.Hour)
|
||||
}
|
||||
13
internal/platform/cache/redis_cache.go
vendored
13
internal/platform/cache/redis_cache.go
vendored
@ -5,9 +5,10 @@ import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"tercul/internal/platform/config"
|
||||
"tercul/internal/platform/log"
|
||||
"time"
|
||||
|
||||
"github.com/redis/go-redis/v9"
|
||||
)
|
||||
@ -171,7 +172,13 @@ func (c *RedisCache) GetList(ctx context.Context, entityType string, page, pageS
|
||||
}
|
||||
|
||||
// SetList stores a list of entities in the cache
|
||||
func (c *RedisCache) SetList(ctx context.Context, entityType string, page, pageSize int, value interface{}, expiration time.Duration) error {
|
||||
func (c *RedisCache) SetList(
|
||||
ctx context.Context,
|
||||
entityType string,
|
||||
page, pageSize int,
|
||||
value interface{},
|
||||
expiration time.Duration,
|
||||
) error {
|
||||
key := c.keyGenerator.ListKey(entityType, page, pageSize)
|
||||
return c.Set(ctx, key, value, expiration)
|
||||
}
|
||||
@ -208,4 +215,4 @@ func (c *RedisCache) InvalidateEntityType(ctx context.Context, entityType string
|
||||
}
|
||||
|
||||
return iter.Err()
|
||||
}
|
||||
}
|
||||
|
||||
5
internal/platform/cache/redis_cache_test.go
vendored
5
internal/platform/cache/redis_cache_test.go
vendored
@ -6,9 +6,10 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"tercul/internal/platform/cache"
|
||||
"tercul/internal/platform/config"
|
||||
"time"
|
||||
|
||||
"github.com/go-redis/redismock/v9"
|
||||
"github.com/stretchr/testify/assert"
|
||||
@ -406,4 +407,4 @@ func TestRedisCache_Clear(t *testing.T) {
|
||||
assert.Equal(t, "redis error", err.Error())
|
||||
assert.NoError(t, mock.ExpectationsWereMet())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
86
internal/platform/http/middleware.go
Normal file
86
internal/platform/http/middleware.go
Normal file
@ -0,0 +1,86 @@
|
||||
// Package http provides HTTP middleware and utilities.
|
||||
package http
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// CORSMiddleware handles Cross-Origin Resource Sharing
|
||||
func CORSMiddleware(allowedOrigins []string) func(http.Handler) http.Handler {
|
||||
return func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
origin := r.Header.Get("Origin")
|
||||
allowed := false
|
||||
|
||||
// If no allowed origins configured, allow all (development mode usually)
|
||||
if len(allowedOrigins) == 0 {
|
||||
allowed = true
|
||||
} else {
|
||||
for _, o := range allowedOrigins {
|
||||
if o == "*" || o == origin {
|
||||
allowed = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Safe default if we want to allow everything
|
||||
if allowed {
|
||||
// If origin is present, use it, otherwise *
|
||||
if origin != "" {
|
||||
w.Header().Set("Access-Control-Allow-Origin", origin)
|
||||
} else {
|
||||
w.Header().Set("Access-Control-Allow-Origin", "*")
|
||||
}
|
||||
|
||||
w.Header().Set("Access-Control-Allow-Methods", "POST, GET, OPTIONS, PUT, DELETE")
|
||||
w.Header().Set("Access-Control-Allow-Headers",
|
||||
"Accept, Content-Type, Content-Length, Accept-Encoding, X-CSRF-Token, Authorization, X-Client-ID, X-API-Key")
|
||||
w.Header().Set("Access-Control-Allow-Credentials", "true")
|
||||
}
|
||||
|
||||
if r.Method == "OPTIONS" {
|
||||
w.WriteHeader(http.StatusOK)
|
||||
return
|
||||
}
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// RequestValidationMiddleware performs basic request validation
|
||||
func RequestValidationMiddleware(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// Check Content-Type for POST requests to /query
|
||||
if r.Method == "POST" && r.URL.Path == "/query" {
|
||||
ct := r.Header.Get("Content-Type")
|
||||
// GraphQL clients might send application/json; charset=utf-8
|
||||
if !strings.Contains(ct, "application/json") {
|
||||
// Some clients might send no content type or something else?
|
||||
// Strictly enforcing application/json is good for security.
|
||||
// But we should be careful not to break existing clients if they are sloppy.
|
||||
// For now, let's enforce it as requested.
|
||||
http.Error(w, "Content-Type must be application/json", http.StatusUnsupportedMediaType)
|
||||
return
|
||||
}
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
// APIKeyMiddleware checks for X-API-Key header
|
||||
// This is a placeholder for future external integrations.
|
||||
// It allows requests with a valid API key to bypass other auth or strictly enforce it.
|
||||
// Currently it is a pass-through as we don't have defined API keys in config yet.
|
||||
func APIKeyMiddleware(validAPIKeys []string) func(http.Handler) http.Handler {
|
||||
return func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// If we had keys, we would check them here.
|
||||
// apiKey := r.Header.Get("X-API-Key")
|
||||
// validate(apiKey)
|
||||
next.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -1,12 +1,14 @@
|
||||
// Package http provides HTTP middleware and utilities.
|
||||
package http
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"tercul/internal/platform/config"
|
||||
"tercul/internal/platform/log"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Canonical token bucket implementation for strict burst/rate enforcement
|
||||
|
||||
@ -3,11 +3,12 @@ package http_test
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"tercul/internal/platform/config"
|
||||
platformhttp "tercul/internal/platform/http"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"tercul/internal/platform/config"
|
||||
platformhttp "tercul/internal/platform/http"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/suite"
|
||||
)
|
||||
@ -95,7 +96,7 @@ func (s *RateLimiterSuite) TestRateLimiterMiddleware() {
|
||||
staticID := "test-client-id"
|
||||
// Test that the first 3 requests are allowed (burst)
|
||||
for i := 0; i < 3; i++ {
|
||||
req, _ := http.NewRequest("GET", server.URL, nil)
|
||||
req, _ := http.NewRequest("GET", server.URL, http.NoBody)
|
||||
req.Header.Set("X-Client-ID", staticID)
|
||||
resp, err := client.Do(req)
|
||||
s.Require().NoError(err)
|
||||
@ -104,7 +105,7 @@ func (s *RateLimiterSuite) TestRateLimiterMiddleware() {
|
||||
}
|
||||
|
||||
// Test that the 4th request is not allowed (burst exceeded)
|
||||
req, _ := http.NewRequest("GET", server.URL, nil)
|
||||
req, _ := http.NewRequest("GET", server.URL, http.NoBody)
|
||||
req.Header.Set("X-Client-ID", staticID)
|
||||
resp, err := client.Do(req)
|
||||
s.Require().NoError(err)
|
||||
@ -116,7 +117,7 @@ func (s *RateLimiterSuite) TestRateLimiterMiddleware() {
|
||||
|
||||
// Test that the next 2 requests are allowed (rate)
|
||||
for i := 0; i < 2; i++ {
|
||||
req, _ := http.NewRequest("GET", server.URL, nil)
|
||||
req, _ := http.NewRequest("GET", server.URL, http.NoBody)
|
||||
req.Header.Set("X-Client-ID", staticID)
|
||||
resp, err := client.Do(req)
|
||||
s.Require().NoError(err)
|
||||
@ -125,7 +126,7 @@ func (s *RateLimiterSuite) TestRateLimiterMiddleware() {
|
||||
}
|
||||
|
||||
// Test that the 3rd request after wait is not allowed (rate exceeded)
|
||||
req, _ = http.NewRequest("GET", server.URL, nil)
|
||||
req, _ = http.NewRequest("GET", server.URL, http.NoBody)
|
||||
req.Header.Set("X-Client-ID", staticID)
|
||||
resp, err = client.Do(req)
|
||||
s.Require().NoError(err)
|
||||
|
||||
2
internal/platform/search/doc.go
Normal file
2
internal/platform/search/doc.go
Normal file
@ -0,0 +1,2 @@
|
||||
// Package search provides search functionality using Weaviate and Bleve.
|
||||
package search
|
||||
@ -115,7 +115,10 @@ func (w *weaviateWrapper) searchWorks(ctx context.Context, params *domainsearch.
|
||||
return w.parseGraphQLResponse(resp, "Work")
|
||||
}
|
||||
|
||||
func (w *weaviateWrapper) searchTranslations(ctx context.Context, params *domainsearch.SearchParams) ([]domainsearch.SearchResultItem, error) {
|
||||
func (w *weaviateWrapper) searchTranslations(
|
||||
ctx context.Context,
|
||||
params *domainsearch.SearchParams,
|
||||
) ([]domainsearch.SearchResultItem, error) {
|
||||
fields := []graphql.Field{
|
||||
{Name: "db_id"}, {Name: "title"}, {Name: "content"}, {Name: "language"}, {Name: "status"},
|
||||
{Name: "_additional", Fields: []graphql.Field{{Name: "score"}}},
|
||||
@ -147,7 +150,12 @@ func (w *weaviateWrapper) searchAuthors(ctx context.Context, params *domainsearc
|
||||
return w.parseGraphQLResponse(resp, "Author")
|
||||
}
|
||||
|
||||
func (w *weaviateWrapper) addSearchArguments(searcher *graphql.GetBuilder, params *domainsearch.SearchParams, className string, searchFields []string) {
|
||||
func (w *weaviateWrapper) addSearchArguments(
|
||||
searcher *graphql.GetBuilder,
|
||||
params *domainsearch.SearchParams,
|
||||
className string,
|
||||
searchFields []string,
|
||||
) {
|
||||
if params.Query != "" || len(params.Concepts) > 0 {
|
||||
switch params.Mode {
|
||||
case domainsearch.SearchModeBM25:
|
||||
@ -265,6 +273,7 @@ type authorWithDBID struct {
|
||||
Biography string `json:"biography"`
|
||||
}
|
||||
|
||||
//nolint:gocyclo // Complex parsing logic
|
||||
func (w *weaviateWrapper) parseGraphQLResponse(resp *models.GraphQLResponse, className string) ([]domainsearch.SearchResultItem, error) {
|
||||
var results []domainsearch.SearchResultItem
|
||||
|
||||
@ -311,6 +320,7 @@ func (w *weaviateWrapper) parseGraphQLResponse(resp *models.GraphQLResponse, cla
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse work db_id: %w", err)
|
||||
}
|
||||
//nolint:gosec // G115: ID from DB is always positive and fits in uint
|
||||
tempWork.Work.ID = uint(id)
|
||||
|
||||
// Convert []string to []*domain.Tag
|
||||
@ -330,6 +340,7 @@ func (w *weaviateWrapper) parseGraphQLResponse(resp *models.GraphQLResponse, cla
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse translation db_id: %w", err)
|
||||
}
|
||||
//nolint:gosec // G115: ID from DB is always positive and fits in uint
|
||||
translation.Translation.ID = uint(id)
|
||||
entity = translation.Translation
|
||||
case "Author":
|
||||
@ -341,6 +352,7 @@ func (w *weaviateWrapper) parseGraphQLResponse(resp *models.GraphQLResponse, cla
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse author db_id: %w", err)
|
||||
}
|
||||
//nolint:gosec // G115: ID from DB is always positive and fits in uint
|
||||
author.Author.ID = uint(id)
|
||||
entity = author.Author
|
||||
default:
|
||||
|
||||
Loading…
Reference in New Issue
Block a user