mirror of
https://github.com/SamyRai/tercul-backend.git
synced 2025-12-27 05:11:34 +00:00
This commit marks the completion of a major refactoring effort to stabilize the codebase, improve its structure, and prepare it for production. The key changes include: - **Domain Layer Consolidation:** The `Work` entity and its related types, along with all other domain entities and repository interfaces, have been consolidated into the main `internal/domain` package. This eliminates import cycles and provides a single, coherent source of truth for the domain model. - **Data Access Layer Refactoring:** The repository implementations in `internal/data/sql` have been updated to align with the new domain layer. The `BaseRepositoryImpl` has been corrected to use pointer receivers, and all concrete repositories now correctly embed it, ensuring consistent and correct behavior. - **Application Layer Stabilization:** All application services in `internal/app` have been updated to use the new domain types and repository interfaces. Dependency injection has been corrected throughout the application, ensuring that all services are initialized with the correct dependencies. - **GraphQL Adapter Fixes:** The GraphQL resolver implementation in `internal/adapters/graphql` has been updated to correctly handle the new domain types and service methods. The auto-generated GraphQL code has been regenerated to ensure it is in sync with the schema and runtime. - **Test Suite Overhaul:** All test suites have been fixed to correctly implement their respective interfaces and use the updated domain model. Mock repositories and test suites have been corrected to properly embed the `testify` base types, resolving numerous build and linter errors. - **Dependency Management:** The Go modules have been tidied, and the module cache has been cleaned to ensure a consistent and correct dependency graph. - **Code Quality and Verification:** The entire codebase now passes all builds, tests, and linter checks, ensuring a high level of quality and stability. This comprehensive effort has resulted in a more robust, maintainable, and production-ready application.
113 lines
3.2 KiB
Go
113 lines
3.2 KiB
Go
package linguistics
|
|
|
|
import (
|
|
"context"
|
|
"encoding/json"
|
|
"fmt"
|
|
"log"
|
|
"tercul/internal/domain"
|
|
"time"
|
|
|
|
"github.com/hibiken/asynq"
|
|
"gorm.io/gorm"
|
|
)
|
|
|
|
const (
|
|
TaskLinguisticAnalysis = "analysis:linguistic"
|
|
)
|
|
|
|
// LinguisticSyncJob manages the linguistic analysis sync process
|
|
type LinguisticSyncJob struct {
|
|
DB *gorm.DB
|
|
Analyzer Analyzer
|
|
Client *asynq.Client
|
|
}
|
|
|
|
// NewLinguisticSyncJob creates a new LinguisticSyncJob
|
|
func NewLinguisticSyncJob(db *gorm.DB, analyzer Analyzer, client *asynq.Client) *LinguisticSyncJob {
|
|
return &LinguisticSyncJob{
|
|
DB: db,
|
|
Analyzer: analyzer,
|
|
Client: client,
|
|
}
|
|
}
|
|
|
|
// AnalysisPayload contains data for the linguistic analysis task
|
|
type AnalysisPayload struct {
|
|
WorkID uint `json:"work_id"`
|
|
}
|
|
|
|
// EnqueueAnalysisForWork enqueues a linguistic analysis task for a specific work
|
|
func EnqueueAnalysisForWork(client *asynq.Client, workID uint) error {
|
|
payload := AnalysisPayload{WorkID: workID}
|
|
data, err := json.Marshal(payload)
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
task := asynq.NewTask(TaskLinguisticAnalysis, data)
|
|
_, err = client.Enqueue(task, asynq.ProcessIn(5*time.Second))
|
|
if err != nil {
|
|
return err
|
|
}
|
|
|
|
log.Printf("Enqueued linguistic analysis task for work ID %d", workID)
|
|
return nil
|
|
}
|
|
|
|
// EnqueueAnalysisForAllWorks enqueues linguistic analysis tasks for all works
|
|
func (j *LinguisticSyncJob) EnqueueAnalysisForAllWorks() error {
|
|
log.Println("Enqueueing linguistic analysis jobs for all works...")
|
|
|
|
var workIDs []uint
|
|
if err := j.DB.Model(&domain.Work{}).Pluck("id", &workIDs).Error; err != nil {
|
|
return fmt.Errorf("error fetching work IDs: %w", err)
|
|
}
|
|
|
|
for _, workID := range workIDs {
|
|
if err := EnqueueAnalysisForWork(j.Client, workID); err != nil {
|
|
log.Printf("Error enqueueing linguistic analysis for work ID %d: %v", workID, err)
|
|
} else {
|
|
log.Printf("Enqueued linguistic analysis for work ID %d", workID)
|
|
}
|
|
}
|
|
|
|
log.Println("Linguistic analysis jobs enqueued successfully.")
|
|
return nil
|
|
}
|
|
|
|
// HandleLinguisticAnalysis handles the linguistic analysis task
|
|
func (j *LinguisticSyncJob) HandleLinguisticAnalysis(ctx context.Context, t *asynq.Task) error {
|
|
var payload AnalysisPayload
|
|
if err := json.Unmarshal(t.Payload(), &payload); err != nil {
|
|
return fmt.Errorf("failed to unmarshal linguistic analysis payload: %v", err)
|
|
}
|
|
|
|
log.Printf("Processing linguistic analysis for work ID %d", payload.WorkID)
|
|
|
|
// Check if analysis already exists
|
|
var count int64
|
|
if err := j.DB.Model(&domain.LanguageAnalysis{}).Where("work_id = ?", payload.WorkID).Count(&count).Error; err != nil {
|
|
return fmt.Errorf("error checking existing analysis: %w", err)
|
|
}
|
|
|
|
// Skip if analysis already exists
|
|
if count > 0 {
|
|
log.Printf("Linguistic analysis already exists for work ID %d, skipping", payload.WorkID)
|
|
return nil
|
|
}
|
|
|
|
// Perform the analysis
|
|
if err := j.Analyzer.AnalyzeWork(ctx, payload.WorkID); err != nil {
|
|
return fmt.Errorf("error analyzing work ID %d: %w", payload.WorkID, err)
|
|
}
|
|
|
|
log.Printf("Completed linguistic analysis for work ID %d", payload.WorkID)
|
|
return nil
|
|
}
|
|
|
|
// RegisterLinguisticHandlers registers the linguistic analysis task handlers
|
|
func RegisterLinguisticHandlers(mux *asynq.ServeMux, job *LinguisticSyncJob) {
|
|
mux.HandleFunc(TaskLinguisticAnalysis, job.HandleLinguisticAnalysis)
|
|
}
|