mirror of
https://github.com/SamyRai/tercul-backend.git
synced 2025-12-27 05:11:34 +00:00
This commit introduces a new application layer to the codebase, which decouples the GraphQL resolvers from the data layer. The resolvers now call application services, which in turn call the repositories. This change improves the separation of concerns and makes the code more testable and maintainable. Additionally, this commit introduces dataloaders to solve the N+1 problem in the GraphQL resolvers. The dataloaders are used to batch and cache database queries, which significantly improves the performance of the API. The following changes were made: - Created application services for most of the domains. - Refactored the GraphQL resolvers to use the new application services. - Implemented dataloaders for the `Author` aggregate. - Updated the `app.Application` struct to hold the application services instead of the repositories. - Fixed a large number of compilation errors in the test files that arose from these changes. There are still some compilation errors in the `internal/adapters/graphql/integration_test.go` file. These errors are due to the test files still trying to access the repositories directly from the `app.Application` struct. The remaining work is to update these tests to use the new application services.
186 lines
4.7 KiB
Go
186 lines
4.7 KiB
Go
package sql
|
|
|
|
import (
|
|
"context"
|
|
"tercul/internal/domain"
|
|
|
|
"gorm.io/gorm"
|
|
)
|
|
|
|
type workRepository struct {
|
|
domain.BaseRepository[domain.Work]
|
|
db *gorm.DB
|
|
}
|
|
|
|
// NewWorkRepository creates a new WorkRepository.
|
|
func NewWorkRepository(db *gorm.DB) domain.WorkRepository {
|
|
return &workRepository{
|
|
BaseRepository: NewBaseRepositoryImpl[domain.Work](db),
|
|
db: db,
|
|
}
|
|
}
|
|
|
|
// FindByTitle finds works by title (partial match)
|
|
func (r *workRepository) FindByTitle(ctx context.Context, title string) ([]domain.Work, error) {
|
|
var works []domain.Work
|
|
if err := r.db.WithContext(ctx).Where("title LIKE ?", "%"+title+"%").Find(&works).Error; err != nil {
|
|
return nil, err
|
|
}
|
|
return works, nil
|
|
}
|
|
|
|
// FindByAuthor finds works by author ID
|
|
func (r *workRepository) FindByAuthor(ctx context.Context, authorID uint) ([]domain.Work, error) {
|
|
var works []domain.Work
|
|
if err := r.db.WithContext(ctx).Joins("JOIN work_authors ON work_authors.work_id = works.id").
|
|
Where("work_authors.author_id = ?", authorID).
|
|
Find(&works).Error; err != nil {
|
|
return nil, err
|
|
}
|
|
return works, nil
|
|
}
|
|
|
|
// FindByCategory finds works by category ID
|
|
func (r *workRepository) FindByCategory(ctx context.Context, categoryID uint) ([]domain.Work, error) {
|
|
var works []domain.Work
|
|
if err := r.db.WithContext(ctx).Joins("JOIN work_categories ON work_categories.work_id = works.id").
|
|
Where("work_categories.category_id = ?", categoryID).
|
|
Find(&works).Error; err != nil {
|
|
return nil, err
|
|
}
|
|
return works, nil
|
|
}
|
|
|
|
// FindByLanguage finds works by language with pagination
|
|
func (r *workRepository) FindByLanguage(ctx context.Context, language string, page, pageSize int) (*domain.PaginatedResult[domain.Work], error) {
|
|
if page < 1 {
|
|
page = 1
|
|
}
|
|
|
|
if pageSize < 1 {
|
|
pageSize = 20
|
|
}
|
|
|
|
var works []domain.Work
|
|
var totalCount int64
|
|
|
|
// Get total count
|
|
if err := r.db.WithContext(ctx).Model(&domain.Work{}).Where("language = ?", language).Count(&totalCount).Error; err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
// Calculate offset
|
|
offset := (page - 1) * pageSize
|
|
|
|
// Get paginated data
|
|
if err := r.db.WithContext(ctx).Where("language = ?", language).
|
|
Offset(offset).Limit(pageSize).
|
|
Find(&works).Error; err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
// Calculate total pages
|
|
totalPages := int(totalCount) / pageSize
|
|
if int(totalCount)%pageSize > 0 {
|
|
totalPages++
|
|
}
|
|
|
|
hasNext := page < totalPages
|
|
hasPrev := page > 1
|
|
|
|
return &domain.PaginatedResult[domain.Work]{
|
|
Items: works,
|
|
TotalCount: totalCount,
|
|
Page: page,
|
|
PageSize: pageSize,
|
|
TotalPages: totalPages,
|
|
HasNext: hasNext,
|
|
HasPrev: hasPrev,
|
|
}, nil
|
|
}
|
|
|
|
// GetByIDs finds works by a list of IDs
|
|
func (r *workRepository) GetByIDs(ctx context.Context, ids []uint) ([]domain.Work, error) {
|
|
var works []domain.Work
|
|
if err := r.db.WithContext(ctx).Where("id IN (?)", ids).Find(&works).Error; err != nil {
|
|
return nil, err
|
|
}
|
|
return works, nil
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// Delete removes a work and its associations
|
|
func (r *workRepository) Delete(ctx context.Context, id uint) error {
|
|
return r.db.WithContext(ctx).Transaction(func(tx *gorm.DB) error {
|
|
// Manually delete associations
|
|
if err := tx.Select("Copyrights", "Monetizations", "Authors", "Tags", "Categories").Delete(&domain.Work{TranslatableModel: domain.TranslatableModel{BaseModel: domain.BaseModel{ID: id}}}).Error; err != nil {
|
|
return err
|
|
}
|
|
// Also delete the work itself
|
|
if err := tx.Delete(&domain.Work{}, id).Error; err != nil {
|
|
return err
|
|
}
|
|
return nil
|
|
})
|
|
}
|
|
|
|
// GetWithTranslations gets a work with its translations
|
|
func (r *workRepository) GetWithTranslations(ctx context.Context, id uint) (*domain.Work, error) {
|
|
return r.FindWithPreload(ctx, []string{"Translations"}, id)
|
|
}
|
|
|
|
// ListWithTranslations lists works with their translations
|
|
func (r *workRepository) ListWithTranslations(ctx context.Context, page, pageSize int) (*domain.PaginatedResult[domain.Work], error) {
|
|
if page < 1 {
|
|
page = 1
|
|
}
|
|
|
|
if pageSize < 1 {
|
|
pageSize = 20
|
|
}
|
|
|
|
var works []domain.Work
|
|
var totalCount int64
|
|
|
|
// Get total count
|
|
if err := r.db.WithContext(ctx).Model(&domain.Work{}).Count(&totalCount).Error; err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
// Calculate offset
|
|
offset := (page - 1) * pageSize
|
|
|
|
// Get paginated data with preloaded translations
|
|
if err := r.db.WithContext(ctx).Preload("Translations").
|
|
Offset(offset).Limit(pageSize).
|
|
Find(&works).Error; err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
// Calculate total pages
|
|
totalPages := int(totalCount) / pageSize
|
|
if int(totalCount)%pageSize > 0 {
|
|
totalPages++
|
|
}
|
|
|
|
hasNext := page < totalPages
|
|
hasPrev := page > 1
|
|
|
|
return &domain.PaginatedResult[domain.Work]{
|
|
Items: works,
|
|
TotalCount: totalCount,
|
|
Page: page,
|
|
PageSize: pageSize,
|
|
TotalPages: totalPages,
|
|
HasNext: hasNext,
|
|
HasPrev: hasPrev,
|
|
}, nil
|
|
}
|