mirror of
https://github.com/idanoo/autobrr
synced 2025-07-22 16:29:12 +00:00
feat(filters): skip duplicates (#1711)
* feat(filters): skip duplicates * fix: add interface instead of any * fix(filters): tonullint * feat(filters): skip dupes check month day * chore: cleanup * feat(db): set autoincrement id * feat(filters): add repack and proper to dupe profile * feat(filters): add default dupe profiles * feat(duplicates): check audio and website * feat(duplicates): update tests * feat(duplicates): add toggles on addform * feat(duplicates): fix sqlite upgrade path and initialize duplicate profiles * feat(duplicates): simplify sqlite upgrade avoiding temp table and unwieldy select. Besides, FK constraints are turned off anyway in #229. * feat(duplicates): change CheckIsDuplicateRelease treatment of PROPER and REPACK "Proper" and "Repack" are not parallel to the other conditions like "Title", so they do not belong as dedup conditions. "PROPER" means there was an issue in the previous release, and so a PROPER is never a duplicate, even if it replaces another PROPER. Similarly, "REPACK" means there was an issue in the previous release by that group, and so it is a duplicate only if we previously took a release from a DIFFERENT group. I have not removed Proper and Repack from the UI or the schema yet. * feat(duplicates): update postgres schema to match sqlite * feat(duplicates): fix web build errors * feat(duplicates): fix postgres errors * feat(filters): do leftjoin for duplicate profile * fix(filters): partial update dupe profile * go fmt `internal/domain/filter.go` * feat(duplicates): restore straightforward logic for proper/repack * feat(duplicates): remove mostly duplicate TV duplicate profiles Having one profile seems the cleanest. If somebody wants multiple resolutions then they can add Resolution to the duplicate profile. Tested this profile with both weekly episodic releases and daily show releases. * feat(release): add db indexes and sub_title * feat(release): add IsDuplicate tests * feat(release): update action handler * feat(release): add more tests for skip duplicates * feat(duplicates): check audio * feat(duplicates): add more tests * feat(duplicates): match edition cut and more * fix(duplicates): tests * fix(duplicates): missing imports * fix(duplicates): tests * feat(duplicates): handle sub_title edition and language in ui * fix(duplicates): tests * feat(duplicates): check name against normalized hash * fix(duplicates): tests * chore: update .gitignore to ignore .pnpm-store * fix: tests * fix(filters): tests * fix: bad conflict merge * fix: update release type in test * fix: use vendored hot-toast * fix: release_test.go * fix: rss_test.go * feat(duplicates): improve title hashing for unique check * feat(duplicates): further improve title hashing for unique check with lang * feat(duplicates): fix tests * feat(duplicates): add macros IsDuplicate and DuplicateProfile ID and name * feat(duplicates): add normalized hash match option * fix: headlessui-state prop warning * fix(duplicates): add missing year in daily ep normalize * fix(duplicates): check rejections len --------- Co-authored-by: ze0s <ze0s@riseup.net>
This commit is contained in:
parent
d153ac44b8
commit
4009554d10
49 changed files with 3792 additions and 743 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -29,6 +29,7 @@ node_modules/
|
||||||
!.yarn/versions
|
!.yarn/versions
|
||||||
# Ditto for yarn, except we're using pnpm
|
# Ditto for yarn, except we're using pnpm
|
||||||
yarn.lock
|
yarn.lock
|
||||||
|
.pnpm-store
|
||||||
# If needed, package-lock.json shall be added manually using an explicit git add command
|
# If needed, package-lock.json shall be added manually using an explicit git add command
|
||||||
package-lock.json
|
package-lock.json
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
type Service interface {
|
type Service interface {
|
||||||
Store(ctx context.Context, action domain.Action) (*domain.Action, error)
|
Store(ctx context.Context, action *domain.Action) error
|
||||||
StoreFilterActions(ctx context.Context, filterID int64, actions []*domain.Action) ([]*domain.Action, error)
|
StoreFilterActions(ctx context.Context, filterID int64, actions []*domain.Action) ([]*domain.Action, error)
|
||||||
List(ctx context.Context) ([]domain.Action, error)
|
List(ctx context.Context) ([]domain.Action, error)
|
||||||
Get(ctx context.Context, req *domain.GetActionRequest) (*domain.Action, error)
|
Get(ctx context.Context, req *domain.GetActionRequest) (*domain.Action, error)
|
||||||
|
@ -63,7 +63,7 @@ func NewService(log logger.Logger, repo domain.ActionRepo, clientSvc download_cl
|
||||||
return s
|
return s
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *service) Store(ctx context.Context, action domain.Action) (*domain.Action, error) {
|
func (s *service) Store(ctx context.Context, action *domain.Action) error {
|
||||||
return s.repo.Store(ctx, action)
|
return s.repo.Store(ctx, action)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -706,7 +706,7 @@ func (r *ActionRepo) DeleteByFilterID(ctx context.Context, filterID int) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *ActionRepo) Store(ctx context.Context, action domain.Action) (*domain.Action, error) {
|
func (r *ActionRepo) Store(ctx context.Context, action *domain.Action) error {
|
||||||
queryBuilder := r.db.squirrel.
|
queryBuilder := r.db.squirrel.
|
||||||
Insert("action").
|
Insert("action").
|
||||||
Columns(
|
Columns(
|
||||||
|
@ -783,14 +783,14 @@ func (r *ActionRepo) Store(ctx context.Context, action domain.Action) (*domain.A
|
||||||
var retID int64
|
var retID int64
|
||||||
|
|
||||||
if err := queryBuilder.QueryRowContext(ctx).Scan(&retID); err != nil {
|
if err := queryBuilder.QueryRowContext(ctx).Scan(&retID); err != nil {
|
||||||
return nil, errors.Wrap(err, "error executing query")
|
return errors.Wrap(err, "error executing query")
|
||||||
}
|
}
|
||||||
|
|
||||||
action.ID = int(retID)
|
action.ID = int(retID)
|
||||||
|
|
||||||
r.log.Debug().Msgf("action.store: added new %d", retID)
|
r.log.Debug().Msgf("action.store: added new %d", retID)
|
||||||
|
|
||||||
return &action, nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *ActionRepo) Update(ctx context.Context, action domain.Action) (*domain.Action, error) {
|
func (r *ActionRepo) Update(ctx context.Context, action domain.Action) (*domain.Action, error) {
|
||||||
|
|
|
@ -16,8 +16,8 @@ import (
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func getMockAction() domain.Action {
|
func getMockAction() *domain.Action {
|
||||||
return domain.Action{
|
return &domain.Action{
|
||||||
Name: "randomAction",
|
Name: "randomAction",
|
||||||
Type: domain.ActionTypeTest,
|
Type: domain.ActionTypeTest,
|
||||||
Enabled: true,
|
Enabled: true,
|
||||||
|
@ -78,29 +78,29 @@ func TestActionRepo_Store(t *testing.T) {
|
||||||
mockData.FilterID = createdFilters[0].ID
|
mockData.FilterID = createdFilters[0].ID
|
||||||
|
|
||||||
// Actual test for Store
|
// Actual test for Store
|
||||||
createdAction, err := repo.Store(context.Background(), mockData)
|
err = repo.Store(context.Background(), mockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.NotNil(t, createdAction)
|
assert.NotNil(t, mockData)
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
_ = repo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: createdAction.ID})
|
_ = repo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: mockData.ID})
|
||||||
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
||||||
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run(fmt.Sprintf("Store_Succeeds_With_Missing_or_empty_fields [%s]", dbType), func(t *testing.T) {
|
t.Run(fmt.Sprintf("Store_Succeeds_With_Missing_or_empty_fields [%s]", dbType), func(t *testing.T) {
|
||||||
mockData := domain.Action{}
|
mockData := &domain.Action{}
|
||||||
createdAction, err := repo.Store(context.Background(), mockData)
|
err := repo.Store(context.Background(), mockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
_ = repo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: createdAction.ID})
|
_ = repo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: mockData.ID})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run(fmt.Sprintf("Store_Fails_With_Invalid_ClientID [%s]", dbType), func(t *testing.T) {
|
t.Run(fmt.Sprintf("Store_Fails_With_Invalid_ClientID [%s]", dbType), func(t *testing.T) {
|
||||||
mockData := getMockAction()
|
mockData := getMockAction()
|
||||||
mockData.ClientID = 9999
|
mockData.ClientID = 9999
|
||||||
_, err := repo.Store(context.Background(), mockData)
|
err := repo.Store(context.Background(), mockData)
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -110,7 +110,7 @@ func TestActionRepo_Store(t *testing.T) {
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Nanosecond)
|
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Nanosecond)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
_, err := repo.Store(ctx, mockData)
|
err := repo.Store(ctx, mockData)
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -142,7 +142,7 @@ func TestActionRepo_StoreFilterActions(t *testing.T) {
|
||||||
mockData.FilterID = createdFilters[0].ID
|
mockData.FilterID = createdFilters[0].ID
|
||||||
|
|
||||||
// Actual test for StoreFilterActions
|
// Actual test for StoreFilterActions
|
||||||
createdActions, err := repo.StoreFilterActions(context.Background(), int64(createdFilters[0].ID), []*domain.Action{&mockData})
|
createdActions, err := repo.StoreFilterActions(context.Background(), int64(createdFilters[0].ID), []*domain.Action{mockData})
|
||||||
|
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.NotNil(t, createdActions)
|
assert.NotNil(t, createdActions)
|
||||||
|
@ -154,7 +154,7 @@ func TestActionRepo_StoreFilterActions(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run(fmt.Sprintf("StoreFilterActions_Fails_Invalid_FilterID [%s]", dbType), func(t *testing.T) {
|
t.Run(fmt.Sprintf("StoreFilterActions_Fails_Invalid_FilterID [%s]", dbType), func(t *testing.T) {
|
||||||
_, err := repo.StoreFilterActions(context.Background(), 9999, []*domain.Action{&mockData})
|
_, err := repo.StoreFilterActions(context.Background(), 9999, []*domain.Action{mockData})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -186,7 +186,7 @@ func TestActionRepo_StoreFilterActions(t *testing.T) {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.NotNil(t, createdFilters)
|
assert.NotNil(t, createdFilters)
|
||||||
|
|
||||||
_, err = repo.StoreFilterActions(ctx, int64(createdFilters[0].ID), []*domain.Action{&mockData})
|
_, err = repo.StoreFilterActions(ctx, int64(createdFilters[0].ID), []*domain.Action{mockData})
|
||||||
assert.Error(t, err)
|
assert.Error(t, err)
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
|
@ -219,7 +219,7 @@ func TestActionRepo_FindByFilterID(t *testing.T) {
|
||||||
|
|
||||||
mockData.ClientID = mock.ID
|
mockData.ClientID = mock.ID
|
||||||
mockData.FilterID = createdFilters[0].ID
|
mockData.FilterID = createdFilters[0].ID
|
||||||
createdActions, err := repo.StoreFilterActions(context.Background(), int64(createdFilters[0].ID), []*domain.Action{&mockData})
|
createdActions, err := repo.StoreFilterActions(context.Background(), int64(createdFilters[0].ID), []*domain.Action{mockData})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
// Actual test for FindByFilterID
|
// Actual test for FindByFilterID
|
||||||
|
@ -294,7 +294,7 @@ func TestActionRepo_List(t *testing.T) {
|
||||||
|
|
||||||
mockData.ClientID = mock.ID
|
mockData.ClientID = mock.ID
|
||||||
mockData.FilterID = createdFilters[0].ID
|
mockData.FilterID = createdFilters[0].ID
|
||||||
createdActions, err := repo.StoreFilterActions(context.Background(), int64(createdFilters[0].ID), []*domain.Action{&mockData})
|
createdActions, err := repo.StoreFilterActions(context.Background(), int64(createdFilters[0].ID), []*domain.Action{mockData})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
// Actual test for List
|
// Actual test for List
|
||||||
|
@ -344,7 +344,7 @@ func TestActionRepo_Get(t *testing.T) {
|
||||||
|
|
||||||
mockData.ClientID = mock.ID
|
mockData.ClientID = mock.ID
|
||||||
mockData.FilterID = createdFilters[0].ID
|
mockData.FilterID = createdFilters[0].ID
|
||||||
createdActions, err := repo.StoreFilterActions(context.Background(), int64(createdFilters[0].ID), []*domain.Action{&mockData})
|
createdActions, err := repo.StoreFilterActions(context.Background(), int64(createdFilters[0].ID), []*domain.Action{mockData})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
// Actual test for Get
|
// Actual test for Get
|
||||||
|
@ -401,7 +401,7 @@ func TestActionRepo_Delete(t *testing.T) {
|
||||||
|
|
||||||
mockData.ClientID = mock.ID
|
mockData.ClientID = mock.ID
|
||||||
mockData.FilterID = createdFilters[0].ID
|
mockData.FilterID = createdFilters[0].ID
|
||||||
createdActions, err := repo.StoreFilterActions(context.Background(), int64(createdFilters[0].ID), []*domain.Action{&mockData})
|
createdActions, err := repo.StoreFilterActions(context.Background(), int64(createdFilters[0].ID), []*domain.Action{mockData})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
// Actual test for Delete
|
// Actual test for Delete
|
||||||
|
@ -455,7 +455,7 @@ func TestActionRepo_DeleteByFilterID(t *testing.T) {
|
||||||
|
|
||||||
mockData.ClientID = mock.ID
|
mockData.ClientID = mock.ID
|
||||||
mockData.FilterID = createdFilters[0].ID
|
mockData.FilterID = createdFilters[0].ID
|
||||||
createdActions, err := repo.StoreFilterActions(context.Background(), int64(createdFilters[0].ID), []*domain.Action{&mockData})
|
createdActions, err := repo.StoreFilterActions(context.Background(), int64(createdFilters[0].ID), []*domain.Action{mockData})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
err = repo.DeleteByFilterID(context.Background(), mockData.FilterID)
|
err = repo.DeleteByFilterID(context.Background(), mockData.FilterID)
|
||||||
|
@ -508,7 +508,7 @@ func TestActionRepo_ToggleEnabled(t *testing.T) {
|
||||||
mockData.ClientID = mock.ID
|
mockData.ClientID = mock.ID
|
||||||
mockData.FilterID = createdFilters[0].ID
|
mockData.FilterID = createdFilters[0].ID
|
||||||
mockData.Enabled = false
|
mockData.Enabled = false
|
||||||
createdActions, err := repo.StoreFilterActions(context.Background(), int64(createdFilters[0].ID), []*domain.Action{&mockData})
|
createdActions, err := repo.StoreFilterActions(context.Background(), int64(createdFilters[0].ID), []*domain.Action{mockData})
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
// Actual test for ToggleEnabled
|
// Actual test for ToggleEnabled
|
||||||
|
|
|
@ -129,13 +129,9 @@ type Tx struct {
|
||||||
handler *DB
|
handler *DB
|
||||||
}
|
}
|
||||||
|
|
||||||
type ILikeDynamic interface {
|
|
||||||
ToSql() (sql string, args []interface{}, err error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ILike is a wrapper for sq.Like and sq.ILike
|
// ILike is a wrapper for sq.Like and sq.ILike
|
||||||
// SQLite does not support ILike but postgres does so this checks what database is being used
|
// SQLite does not support ILike but postgres does so this checks what database is being used
|
||||||
func (db *DB) ILike(col string, val string) ILikeDynamic {
|
func (db *DB) ILike(col string, val string) sq.Sqlizer {
|
||||||
//if databaseDriver == "sqlite" {
|
//if databaseDriver == "sqlite" {
|
||||||
if db.Driver == "sqlite" {
|
if db.Driver == "sqlite" {
|
||||||
return sq.Like{col: val}
|
return sq.Like{col: val}
|
||||||
|
|
|
@ -240,6 +240,7 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
||||||
"f.max_seeders",
|
"f.max_seeders",
|
||||||
"f.min_leechers",
|
"f.min_leechers",
|
||||||
"f.max_leechers",
|
"f.max_leechers",
|
||||||
|
"f.release_profile_duplicate_id",
|
||||||
"f.created_at",
|
"f.created_at",
|
||||||
"f.updated_at",
|
"f.updated_at",
|
||||||
).
|
).
|
||||||
|
@ -266,6 +267,7 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
||||||
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, months, days, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, matchRecordLabels, exceptRecordLabels, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString
|
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, months, days, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, matchRecordLabels, exceptRecordLabels, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString
|
||||||
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
|
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
|
||||||
var delay, maxDownloads, logScore sql.NullInt32
|
var delay, maxDownloads, logScore sql.NullInt32
|
||||||
|
var releaseProfileDuplicateId sql.NullInt64
|
||||||
|
|
||||||
err = row.Scan(
|
err = row.Scan(
|
||||||
&f.ID,
|
&f.ID,
|
||||||
|
@ -335,6 +337,7 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
||||||
&f.MaxSeeders,
|
&f.MaxSeeders,
|
||||||
&f.MinLeechers,
|
&f.MinLeechers,
|
||||||
&f.MaxLeechers,
|
&f.MaxLeechers,
|
||||||
|
&releaseProfileDuplicateId,
|
||||||
&f.CreatedAt,
|
&f.CreatedAt,
|
||||||
&f.UpdatedAt,
|
&f.UpdatedAt,
|
||||||
)
|
)
|
||||||
|
@ -385,6 +388,7 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
||||||
f.UseRegex = useRegex.Bool
|
f.UseRegex = useRegex.Bool
|
||||||
f.Scene = scene.Bool
|
f.Scene = scene.Bool
|
||||||
f.Freeleech = freeleech.Bool
|
f.Freeleech = freeleech.Bool
|
||||||
|
f.ReleaseProfileDuplicateID = releaseProfileDuplicateId.Int64
|
||||||
|
|
||||||
return &f, nil
|
return &f, nil
|
||||||
}
|
}
|
||||||
|
@ -466,10 +470,35 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
|
||||||
"f.max_leechers",
|
"f.max_leechers",
|
||||||
"f.created_at",
|
"f.created_at",
|
||||||
"f.updated_at",
|
"f.updated_at",
|
||||||
|
"f.release_profile_duplicate_id",
|
||||||
|
"rdp.id",
|
||||||
|
"rdp.name",
|
||||||
|
"rdp.release_name",
|
||||||
|
"rdp.hash",
|
||||||
|
"rdp.title",
|
||||||
|
"rdp.sub_title",
|
||||||
|
"rdp.year",
|
||||||
|
"rdp.month",
|
||||||
|
"rdp.day",
|
||||||
|
"rdp.source",
|
||||||
|
"rdp.resolution",
|
||||||
|
"rdp.codec",
|
||||||
|
"rdp.container",
|
||||||
|
"rdp.dynamic_range",
|
||||||
|
"rdp.audio",
|
||||||
|
"rdp.release_group",
|
||||||
|
"rdp.season",
|
||||||
|
"rdp.episode",
|
||||||
|
"rdp.website",
|
||||||
|
"rdp.proper",
|
||||||
|
"rdp.repack",
|
||||||
|
"rdp.edition",
|
||||||
|
"rdp.language",
|
||||||
).
|
).
|
||||||
From("filter f").
|
From("filter f").
|
||||||
Join("filter_indexer fi ON f.id = fi.filter_id").
|
Join("filter_indexer fi ON f.id = fi.filter_id").
|
||||||
Join("indexer i ON i.id = fi.indexer_id").
|
Join("indexer i ON i.id = fi.indexer_id").
|
||||||
|
LeftJoin("release_profile_duplicate rdp ON rdp.id = f.release_profile_duplicate_id").
|
||||||
Where(sq.Eq{"i.identifier": indexer}).
|
Where(sq.Eq{"i.identifier": indexer}).
|
||||||
Where(sq.Eq{"i.enabled": true}).
|
Where(sq.Eq{"i.enabled": true}).
|
||||||
Where(sq.Eq{"f.enabled": true}).
|
Where(sq.Eq{"f.enabled": true}).
|
||||||
|
@ -495,6 +524,10 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
|
||||||
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, months, days, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, matchRecordLabels, exceptRecordLabels, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString
|
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, months, days, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, matchRecordLabels, exceptRecordLabels, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString
|
||||||
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
|
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
|
||||||
var delay, maxDownloads, logScore sql.NullInt32
|
var delay, maxDownloads, logScore sql.NullInt32
|
||||||
|
var releaseProfileDuplicateID, rdpId sql.NullInt64
|
||||||
|
|
||||||
|
var rdpName sql.NullString
|
||||||
|
var rdpRelName, rdpHash, rdpTitle, rdpSubTitle, rdpYear, rdpMonth, rdpDay, rdpSource, rdpResolution, rdpCodec, rdpContainer, rdpDynRange, rdpAudio, rdpGroup, rdpSeason, rdpEpisode, rdpWebsite, rdpProper, rdpRepack, rdpEdition, rdpLanguage sql.NullBool
|
||||||
|
|
||||||
err := rows.Scan(
|
err := rows.Scan(
|
||||||
&f.ID,
|
&f.ID,
|
||||||
|
@ -566,6 +599,30 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
|
||||||
&f.MaxLeechers,
|
&f.MaxLeechers,
|
||||||
&f.CreatedAt,
|
&f.CreatedAt,
|
||||||
&f.UpdatedAt,
|
&f.UpdatedAt,
|
||||||
|
&releaseProfileDuplicateID,
|
||||||
|
&rdpId,
|
||||||
|
&rdpName,
|
||||||
|
&rdpRelName,
|
||||||
|
&rdpHash,
|
||||||
|
&rdpTitle,
|
||||||
|
&rdpSubTitle,
|
||||||
|
&rdpYear,
|
||||||
|
&rdpMonth,
|
||||||
|
&rdpDay,
|
||||||
|
&rdpSource,
|
||||||
|
&rdpResolution,
|
||||||
|
&rdpCodec,
|
||||||
|
&rdpContainer,
|
||||||
|
&rdpDynRange,
|
||||||
|
&rdpAudio,
|
||||||
|
&rdpGroup,
|
||||||
|
&rdpSeason,
|
||||||
|
&rdpEpisode,
|
||||||
|
&rdpWebsite,
|
||||||
|
&rdpProper,
|
||||||
|
&rdpRepack,
|
||||||
|
&rdpEdition,
|
||||||
|
&rdpLanguage,
|
||||||
)
|
)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrap(err, "error scanning row")
|
return nil, errors.Wrap(err, "error scanning row")
|
||||||
|
@ -610,9 +667,40 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
|
||||||
f.UseRegex = useRegex.Bool
|
f.UseRegex = useRegex.Bool
|
||||||
f.Scene = scene.Bool
|
f.Scene = scene.Bool
|
||||||
f.Freeleech = freeleech.Bool
|
f.Freeleech = freeleech.Bool
|
||||||
|
f.ReleaseProfileDuplicateID = releaseProfileDuplicateID.Int64
|
||||||
|
|
||||||
f.Rejections = []string{}
|
f.Rejections = []string{}
|
||||||
|
|
||||||
|
if releaseProfileDuplicateID.Valid {
|
||||||
|
profile := domain.DuplicateReleaseProfile{
|
||||||
|
ID: rdpId.Int64,
|
||||||
|
//Protocol: rdpName.String,
|
||||||
|
Name: rdpName.String,
|
||||||
|
ReleaseName: rdpRelName.Bool,
|
||||||
|
Hash: rdpHash.Bool,
|
||||||
|
Title: rdpTitle.Bool,
|
||||||
|
SubTitle: rdpSubTitle.Bool,
|
||||||
|
Year: rdpYear.Bool,
|
||||||
|
Month: rdpMonth.Bool,
|
||||||
|
Day: rdpDay.Bool,
|
||||||
|
Source: rdpSource.Bool,
|
||||||
|
Resolution: rdpResolution.Bool,
|
||||||
|
Codec: rdpCodec.Bool,
|
||||||
|
Container: rdpContainer.Bool,
|
||||||
|
DynamicRange: rdpDynRange.Bool,
|
||||||
|
Audio: rdpAudio.Bool,
|
||||||
|
Group: rdpGroup.Bool,
|
||||||
|
Season: rdpSeason.Bool,
|
||||||
|
Episode: rdpEpisode.Bool,
|
||||||
|
Website: rdpWebsite.Bool,
|
||||||
|
Proper: rdpProper.Bool,
|
||||||
|
Repack: rdpRepack.Bool,
|
||||||
|
Edition: rdpEdition.Bool,
|
||||||
|
Language: rdpLanguage.Bool,
|
||||||
|
}
|
||||||
|
f.DuplicateHandling = &profile
|
||||||
|
}
|
||||||
|
|
||||||
filters = append(filters, &f)
|
filters = append(filters, &f)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -774,6 +862,7 @@ func (r *FilterRepo) Store(ctx context.Context, filter *domain.Filter) error {
|
||||||
"max_seeders",
|
"max_seeders",
|
||||||
"min_leechers",
|
"min_leechers",
|
||||||
"max_leechers",
|
"max_leechers",
|
||||||
|
"release_profile_duplicate_id",
|
||||||
).
|
).
|
||||||
Values(
|
Values(
|
||||||
filter.Name,
|
filter.Name,
|
||||||
|
@ -842,6 +931,7 @@ func (r *FilterRepo) Store(ctx context.Context, filter *domain.Filter) error {
|
||||||
filter.MaxSeeders,
|
filter.MaxSeeders,
|
||||||
filter.MinLeechers,
|
filter.MinLeechers,
|
||||||
filter.MaxLeechers,
|
filter.MaxLeechers,
|
||||||
|
toNullInt64(filter.ReleaseProfileDuplicateID),
|
||||||
).
|
).
|
||||||
Suffix("RETURNING id").RunWith(r.db.handler)
|
Suffix("RETURNING id").RunWith(r.db.handler)
|
||||||
|
|
||||||
|
@ -928,6 +1018,7 @@ func (r *FilterRepo) Update(ctx context.Context, filter *domain.Filter) error {
|
||||||
Set("max_seeders", filter.MaxSeeders).
|
Set("max_seeders", filter.MaxSeeders).
|
||||||
Set("min_leechers", filter.MinLeechers).
|
Set("min_leechers", filter.MinLeechers).
|
||||||
Set("max_leechers", filter.MaxLeechers).
|
Set("max_leechers", filter.MaxLeechers).
|
||||||
|
Set("release_profile_duplicate_id", toNullInt64(filter.ReleaseProfileDuplicateID)).
|
||||||
Set("updated_at", time.Now().Format(time.RFC3339)).
|
Set("updated_at", time.Now().Format(time.RFC3339)).
|
||||||
Where(sq.Eq{"id": filter.ID})
|
Where(sq.Eq{"id": filter.ID})
|
||||||
|
|
||||||
|
@ -1153,6 +1244,9 @@ func (r *FilterRepo) UpdatePartial(ctx context.Context, filter domain.FilterUpda
|
||||||
if filter.MaxLeechers != nil {
|
if filter.MaxLeechers != nil {
|
||||||
q = q.Set("max_leechers", filter.MaxLeechers)
|
q = q.Set("max_leechers", filter.MaxLeechers)
|
||||||
}
|
}
|
||||||
|
if filter.ReleaseProfileDuplicateID != nil {
|
||||||
|
q = q.Set("release_profile_duplicate_id", filter.ReleaseProfileDuplicateID)
|
||||||
|
}
|
||||||
|
|
||||||
q = q.Where(sq.Eq{"id": filter.ID})
|
q = q.Where(sq.Eq{"id": filter.ID})
|
||||||
|
|
||||||
|
|
|
@ -800,7 +800,7 @@ func TestFilterRepo_GetDownloadsByFilterId(t *testing.T) {
|
||||||
mockAction.FilterID = mockData.ID
|
mockAction.FilterID = mockData.ID
|
||||||
mockAction.ClientID = mockClient.ID
|
mockAction.ClientID = mockClient.ID
|
||||||
|
|
||||||
action, err := actionRepo.Store(context.Background(), mockAction)
|
err = actionRepo.Store(context.Background(), mockAction)
|
||||||
|
|
||||||
mockReleaseActionStatus.FilterID = int64(mockData.ID)
|
mockReleaseActionStatus.FilterID = int64(mockData.ID)
|
||||||
mockRelease.FilterID = mockData.ID
|
mockRelease.FilterID = mockData.ID
|
||||||
|
@ -808,7 +808,7 @@ func TestFilterRepo_GetDownloadsByFilterId(t *testing.T) {
|
||||||
err = releaseRepo.Store(context.Background(), mockRelease)
|
err = releaseRepo.Store(context.Background(), mockRelease)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
mockReleaseActionStatus.ActionID = int64(action.ID)
|
mockReleaseActionStatus.ActionID = int64(mockAction.ID)
|
||||||
mockReleaseActionStatus.ReleaseID = mockRelease.ID
|
mockReleaseActionStatus.ReleaseID = mockRelease.ID
|
||||||
|
|
||||||
err = releaseRepo.StoreReleaseActionStatus(context.Background(), mockReleaseActionStatus)
|
err = releaseRepo.StoreReleaseActionStatus(context.Background(), mockReleaseActionStatus)
|
||||||
|
@ -827,7 +827,7 @@ func TestFilterRepo_GetDownloadsByFilterId(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: action.ID})
|
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: mockAction.ID})
|
||||||
_ = repo.Delete(context.Background(), mockData.ID)
|
_ = repo.Delete(context.Background(), mockData.ID)
|
||||||
_ = downloadClientRepo.Delete(context.Background(), mockClient.ID)
|
_ = downloadClientRepo.Delete(context.Background(), mockClient.ID)
|
||||||
_ = releaseRepo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
_ = releaseRepo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
||||||
|
@ -861,13 +861,15 @@ func TestFilterRepo_GetDownloadsByFilterId(t *testing.T) {
|
||||||
mockAction1.FilterID = mockData.ID
|
mockAction1.FilterID = mockData.ID
|
||||||
mockAction1.ClientID = mockClient.ID
|
mockAction1.ClientID = mockClient.ID
|
||||||
|
|
||||||
action1, err := actionRepo.Store(context.Background(), mockAction1)
|
actionErr := actionRepo.Store(context.Background(), mockAction1)
|
||||||
|
assert.NoError(t, actionErr)
|
||||||
|
|
||||||
mockAction2 := getMockAction()
|
mockAction2 := getMockAction()
|
||||||
mockAction2.FilterID = mockData.ID
|
mockAction2.FilterID = mockData.ID
|
||||||
mockAction2.ClientID = mockClient.ID
|
mockAction2.ClientID = mockClient.ID
|
||||||
|
|
||||||
action2, err := actionRepo.Store(context.Background(), mockAction2)
|
action2Err := actionRepo.Store(context.Background(), mockAction2)
|
||||||
|
assert.NoError(t, action2Err)
|
||||||
|
|
||||||
mockRelease.FilterID = mockData.ID
|
mockRelease.FilterID = mockData.ID
|
||||||
|
|
||||||
|
@ -875,7 +877,7 @@ func TestFilterRepo_GetDownloadsByFilterId(t *testing.T) {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
mockReleaseActionStatus1 := getMockReleaseActionStatus()
|
mockReleaseActionStatus1 := getMockReleaseActionStatus()
|
||||||
mockReleaseActionStatus1.ActionID = int64(action1.ID)
|
mockReleaseActionStatus1.ActionID = int64(mockAction1.ID)
|
||||||
mockReleaseActionStatus1.FilterID = int64(mockData.ID)
|
mockReleaseActionStatus1.FilterID = int64(mockData.ID)
|
||||||
mockReleaseActionStatus1.ReleaseID = mockRelease.ID
|
mockReleaseActionStatus1.ReleaseID = mockRelease.ID
|
||||||
|
|
||||||
|
@ -883,7 +885,7 @@ func TestFilterRepo_GetDownloadsByFilterId(t *testing.T) {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
mockReleaseActionStatus2 := getMockReleaseActionStatus()
|
mockReleaseActionStatus2 := getMockReleaseActionStatus()
|
||||||
mockReleaseActionStatus2.ActionID = int64(action2.ID)
|
mockReleaseActionStatus2.ActionID = int64(mockAction2.ID)
|
||||||
mockReleaseActionStatus2.FilterID = int64(mockData.ID)
|
mockReleaseActionStatus2.FilterID = int64(mockData.ID)
|
||||||
mockReleaseActionStatus2.ReleaseID = mockRelease.ID
|
mockReleaseActionStatus2.ReleaseID = mockRelease.ID
|
||||||
|
|
||||||
|
@ -903,8 +905,8 @@ func TestFilterRepo_GetDownloadsByFilterId(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: action1.ID})
|
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: mockAction1.ID})
|
||||||
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: action2.ID})
|
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: mockAction2.ID})
|
||||||
_ = repo.Delete(context.Background(), mockData.ID)
|
_ = repo.Delete(context.Background(), mockData.ID)
|
||||||
_ = downloadClientRepo.Delete(context.Background(), mockClient.ID)
|
_ = downloadClientRepo.Delete(context.Background(), mockClient.ID)
|
||||||
_ = releaseRepo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
_ = releaseRepo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
||||||
|
@ -924,13 +926,15 @@ func TestFilterRepo_GetDownloadsByFilterId(t *testing.T) {
|
||||||
mockAction.FilterID = mockData.ID
|
mockAction.FilterID = mockData.ID
|
||||||
mockAction.ClientID = mockClient.ID
|
mockAction.ClientID = mockClient.ID
|
||||||
|
|
||||||
action, err := actionRepo.Store(context.Background(), mockAction)
|
err = actionRepo.Store(context.Background(), mockAction)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
mockAction2 := getMockAction()
|
mockAction2 := getMockAction()
|
||||||
mockAction2.FilterID = mockData.ID
|
mockAction2.FilterID = mockData.ID
|
||||||
mockAction2.ClientID = mockClient.ID
|
mockAction2.ClientID = mockClient.ID
|
||||||
|
|
||||||
action2, err := actionRepo.Store(context.Background(), mockAction2)
|
err = actionRepo.Store(context.Background(), mockAction2)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
mockRelease.FilterID = mockData.ID
|
mockRelease.FilterID = mockData.ID
|
||||||
|
|
||||||
|
@ -938,7 +942,7 @@ func TestFilterRepo_GetDownloadsByFilterId(t *testing.T) {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
mockReleaseActionStatus = getMockReleaseActionStatus()
|
mockReleaseActionStatus = getMockReleaseActionStatus()
|
||||||
mockReleaseActionStatus.ActionID = int64(action.ID)
|
mockReleaseActionStatus.ActionID = int64(mockAction.ID)
|
||||||
mockReleaseActionStatus.FilterID = int64(mockData.ID)
|
mockReleaseActionStatus.FilterID = int64(mockData.ID)
|
||||||
mockReleaseActionStatus.ReleaseID = mockRelease.ID
|
mockReleaseActionStatus.ReleaseID = mockRelease.ID
|
||||||
mockReleaseActionStatus.Timestamp = mockReleaseActionStatus.Timestamp.AddDate(0, -1, 0)
|
mockReleaseActionStatus.Timestamp = mockReleaseActionStatus.Timestamp.AddDate(0, -1, 0)
|
||||||
|
@ -947,7 +951,7 @@ func TestFilterRepo_GetDownloadsByFilterId(t *testing.T) {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
mockReleaseActionStatus2 := getMockReleaseActionStatus()
|
mockReleaseActionStatus2 := getMockReleaseActionStatus()
|
||||||
mockReleaseActionStatus2.ActionID = int64(action2.ID)
|
mockReleaseActionStatus2.ActionID = int64(mockAction2.ID)
|
||||||
mockReleaseActionStatus2.FilterID = int64(mockData.ID)
|
mockReleaseActionStatus2.FilterID = int64(mockData.ID)
|
||||||
mockReleaseActionStatus2.ReleaseID = mockRelease.ID
|
mockReleaseActionStatus2.ReleaseID = mockRelease.ID
|
||||||
mockReleaseActionStatus2.Timestamp = mockReleaseActionStatus2.Timestamp.AddDate(0, -1, 0)
|
mockReleaseActionStatus2.Timestamp = mockReleaseActionStatus2.Timestamp.AddDate(0, -1, 0)
|
||||||
|
@ -968,7 +972,7 @@ func TestFilterRepo_GetDownloadsByFilterId(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: action.ID})
|
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: mockAction.ID})
|
||||||
_ = repo.Delete(context.Background(), mockData.ID)
|
_ = repo.Delete(context.Background(), mockData.ID)
|
||||||
_ = downloadClientRepo.Delete(context.Background(), mockClient.ID)
|
_ = downloadClientRepo.Delete(context.Background(), mockClient.ID)
|
||||||
_ = releaseRepo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
_ = releaseRepo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
||||||
|
|
|
@ -88,6 +88,39 @@ CREATE TABLE irc_channel
|
||||||
UNIQUE (network_id, name)
|
UNIQUE (network_id, name)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
CREATE TABLE release_profile_duplicate
|
||||||
|
(
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
protocol BOOLEAN DEFAULT FALSE,
|
||||||
|
release_name BOOLEAN DEFAULT FALSE,
|
||||||
|
hash BOOLEAN DEFAULT FALSE,
|
||||||
|
title BOOLEAN DEFAULT FALSE,
|
||||||
|
sub_title BOOLEAN DEFAULT FALSE,
|
||||||
|
year BOOLEAN DEFAULT FALSE,
|
||||||
|
month BOOLEAN DEFAULT FALSE,
|
||||||
|
day BOOLEAN DEFAULT FALSE,
|
||||||
|
source BOOLEAN DEFAULT FALSE,
|
||||||
|
resolution BOOLEAN DEFAULT FALSE,
|
||||||
|
codec BOOLEAN DEFAULT FALSE,
|
||||||
|
container BOOLEAN DEFAULT FALSE,
|
||||||
|
dynamic_range BOOLEAN DEFAULT FALSE,
|
||||||
|
audio BOOLEAN DEFAULT FALSE,
|
||||||
|
release_group BOOLEAN DEFAULT FALSE,
|
||||||
|
season BOOLEAN DEFAULT FALSE,
|
||||||
|
episode BOOLEAN DEFAULT FALSE,
|
||||||
|
website BOOLEAN DEFAULT FALSE,
|
||||||
|
proper BOOLEAN DEFAULT FALSE,
|
||||||
|
repack BOOLEAN DEFAULT FALSE,
|
||||||
|
edition BOOLEAN DEFAULT FALSE,
|
||||||
|
language BOOLEAN DEFAULT FALSE
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO release_profile_duplicate (id, name, protocol, release_name, hash, title, sub_title, year, month, day, source, resolution, codec, container, dynamic_range, audio, release_group, season, episode, website, proper, repack, edition, language)
|
||||||
|
VALUES (1, 'Exact release', 'f', 't', 't', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f'),
|
||||||
|
(2, 'Movie', 'f', 'f', 'f', 't', 'f', 't', 'f', 'f', 'f', 't', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f'),
|
||||||
|
(3, 'TV', 'f', 'f', 'f', 't', 'f', 't', 't', 't', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 't', 't', 'f', 'f', 'f', 'f', 'f');
|
||||||
|
|
||||||
CREATE TABLE filter
|
CREATE TABLE filter
|
||||||
(
|
(
|
||||||
id SERIAL PRIMARY KEY,
|
id SERIAL PRIMARY KEY,
|
||||||
|
@ -159,7 +192,9 @@ CREATE TABLE filter
|
||||||
min_seeders INTEGER DEFAULT 0,
|
min_seeders INTEGER DEFAULT 0,
|
||||||
max_seeders INTEGER DEFAULT 0,
|
max_seeders INTEGER DEFAULT 0,
|
||||||
min_leechers INTEGER DEFAULT 0,
|
min_leechers INTEGER DEFAULT 0,
|
||||||
max_leechers INTEGER DEFAULT 0
|
max_leechers INTEGER DEFAULT 0,
|
||||||
|
release_profile_duplicate_id INTEGER,
|
||||||
|
FOREIGN KEY (release_profile_duplicate_id) REFERENCES release_profile_duplicate(id) ON DELETE SET NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE INDEX filter_enabled_index
|
CREATE INDEX filter_enabled_index
|
||||||
|
@ -270,9 +305,11 @@ CREATE TABLE "release"
|
||||||
group_id TEXT,
|
group_id TEXT,
|
||||||
torrent_id TEXT,
|
torrent_id TEXT,
|
||||||
torrent_name TEXT,
|
torrent_name TEXT,
|
||||||
|
normalized_hash TEXT,
|
||||||
size BIGINT,
|
size BIGINT,
|
||||||
raw TEXT,
|
raw TEXT,
|
||||||
title TEXT,
|
title TEXT,
|
||||||
|
sub_title TEXT,
|
||||||
category TEXT,
|
category TEXT,
|
||||||
season INTEGER,
|
season INTEGER,
|
||||||
episode INTEGER,
|
episode INTEGER,
|
||||||
|
@ -285,15 +322,18 @@ CREATE TABLE "release"
|
||||||
container TEXT,
|
container TEXT,
|
||||||
hdr TEXT,
|
hdr TEXT,
|
||||||
audio TEXT,
|
audio TEXT,
|
||||||
|
audio_channels TEXT,
|
||||||
release_group TEXT,
|
release_group TEXT,
|
||||||
region TEXT,
|
region TEXT,
|
||||||
language TEXT,
|
language TEXT,
|
||||||
edition TEXT,
|
edition TEXT,
|
||||||
|
cut TEXT,
|
||||||
unrated BOOLEAN,
|
unrated BOOLEAN,
|
||||||
hybrid BOOLEAN,
|
hybrid BOOLEAN,
|
||||||
proper BOOLEAN,
|
proper BOOLEAN,
|
||||||
repack BOOLEAN,
|
repack BOOLEAN,
|
||||||
website TEXT,
|
website TEXT,
|
||||||
|
media_processing TEXT,
|
||||||
artists TEXT [] DEFAULT '{}' NOT NULL,
|
artists TEXT [] DEFAULT '{}' NOT NULL,
|
||||||
type TEXT,
|
type TEXT,
|
||||||
format TEXT,
|
format TEXT,
|
||||||
|
@ -308,6 +348,7 @@ CREATE TABLE "release"
|
||||||
freeleech_percent INTEGER,
|
freeleech_percent INTEGER,
|
||||||
uploader TEXT,
|
uploader TEXT,
|
||||||
pre_time TEXT,
|
pre_time TEXT,
|
||||||
|
other TEXT [] DEFAULT '{}' NOT NULL,
|
||||||
filter_id INTEGER
|
filter_id INTEGER
|
||||||
CONSTRAINT release_filter_id_fk
|
CONSTRAINT release_filter_id_fk
|
||||||
REFERENCES filter
|
REFERENCES filter
|
||||||
|
@ -326,6 +367,81 @@ CREATE INDEX release_timestamp_index
|
||||||
CREATE INDEX release_torrent_name_index
|
CREATE INDEX release_torrent_name_index
|
||||||
ON "release" (torrent_name);
|
ON "release" (torrent_name);
|
||||||
|
|
||||||
|
CREATE INDEX release_normalized_hash_index
|
||||||
|
ON "release" (normalized_hash);
|
||||||
|
|
||||||
|
CREATE INDEX release_title_index
|
||||||
|
ON "release" (title);
|
||||||
|
|
||||||
|
CREATE INDEX release_sub_title_index
|
||||||
|
ON "release" (sub_title);
|
||||||
|
|
||||||
|
CREATE INDEX release_season_index
|
||||||
|
ON "release" (season);
|
||||||
|
|
||||||
|
CREATE INDEX release_episode_index
|
||||||
|
ON "release" (episode);
|
||||||
|
|
||||||
|
CREATE INDEX release_year_index
|
||||||
|
ON "release" (year);
|
||||||
|
|
||||||
|
CREATE INDEX release_month_index
|
||||||
|
ON "release" (month);
|
||||||
|
|
||||||
|
CREATE INDEX release_day_index
|
||||||
|
ON "release" (day);
|
||||||
|
|
||||||
|
CREATE INDEX release_resolution_index
|
||||||
|
ON "release" (resolution);
|
||||||
|
|
||||||
|
CREATE INDEX release_source_index
|
||||||
|
ON "release" (source);
|
||||||
|
|
||||||
|
CREATE INDEX release_codec_index
|
||||||
|
ON "release" (codec);
|
||||||
|
|
||||||
|
CREATE INDEX release_container_index
|
||||||
|
ON "release" (container);
|
||||||
|
|
||||||
|
CREATE INDEX release_hdr_index
|
||||||
|
ON "release" (hdr);
|
||||||
|
|
||||||
|
CREATE INDEX release_audio_index
|
||||||
|
ON "release" (audio);
|
||||||
|
|
||||||
|
CREATE INDEX release_audio_channels_index
|
||||||
|
ON "release" (audio_channels);
|
||||||
|
|
||||||
|
CREATE INDEX release_release_group_index
|
||||||
|
ON "release" (release_group);
|
||||||
|
|
||||||
|
CREATE INDEX release_language_index
|
||||||
|
ON "release" (language);
|
||||||
|
|
||||||
|
CREATE INDEX release_proper_index
|
||||||
|
ON "release" (proper);
|
||||||
|
|
||||||
|
CREATE INDEX release_repack_index
|
||||||
|
ON "release" (repack);
|
||||||
|
|
||||||
|
CREATE INDEX release_website_index
|
||||||
|
ON "release" (website);
|
||||||
|
|
||||||
|
CREATE INDEX release_media_processing_index
|
||||||
|
ON "release" (media_processing);
|
||||||
|
|
||||||
|
CREATE INDEX release_region_index
|
||||||
|
ON "release" (region);
|
||||||
|
|
||||||
|
CREATE INDEX release_edition_index
|
||||||
|
ON "release" (edition);
|
||||||
|
|
||||||
|
CREATE INDEX release_cut_index
|
||||||
|
ON "release" (cut);
|
||||||
|
|
||||||
|
CREATE INDEX release_hybrid_index
|
||||||
|
ON "release" (hybrid);
|
||||||
|
|
||||||
CREATE TABLE release_action_status
|
CREATE TABLE release_action_status
|
||||||
(
|
(
|
||||||
id SERIAL PRIMARY KEY,
|
id SERIAL PRIMARY KEY,
|
||||||
|
@ -1074,5 +1190,154 @@ CREATE TABLE list_filter
|
||||||
|
|
||||||
ALTER TABLE filter
|
ALTER TABLE filter
|
||||||
ADD COLUMN except_record_labels TEXT DEFAULT '';
|
ADD COLUMN except_record_labels TEXT DEFAULT '';
|
||||||
|
`,
|
||||||
|
`CREATE TABLE release_profile_duplicate
|
||||||
|
(
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
protocol BOOLEAN DEFAULT FALSE,
|
||||||
|
release_name BOOLEAN DEFAULT FALSE,
|
||||||
|
hash BOOLEAN DEFAULT FALSE,
|
||||||
|
title BOOLEAN DEFAULT FALSE,
|
||||||
|
sub_title BOOLEAN DEFAULT FALSE,
|
||||||
|
year BOOLEAN DEFAULT FALSE,
|
||||||
|
month BOOLEAN DEFAULT FALSE,
|
||||||
|
day BOOLEAN DEFAULT FALSE,
|
||||||
|
source BOOLEAN DEFAULT FALSE,
|
||||||
|
resolution BOOLEAN DEFAULT FALSE,
|
||||||
|
codec BOOLEAN DEFAULT FALSE,
|
||||||
|
container BOOLEAN DEFAULT FALSE,
|
||||||
|
dynamic_range BOOLEAN DEFAULT FALSE,
|
||||||
|
audio BOOLEAN DEFAULT FALSE,
|
||||||
|
release_group BOOLEAN DEFAULT FALSE,
|
||||||
|
season BOOLEAN DEFAULT FALSE,
|
||||||
|
episode BOOLEAN DEFAULT FALSE,
|
||||||
|
website BOOLEAN DEFAULT FALSE,
|
||||||
|
proper BOOLEAN DEFAULT FALSE,
|
||||||
|
repack BOOLEAN DEFAULT FALSE,
|
||||||
|
edition BOOLEAN DEFAULT FALSE,
|
||||||
|
language BOOLEAN DEFAULT FALSE
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO release_profile_duplicate (id, name, protocol, release_name, hash, title, sub_title, year, month, day, source, resolution, codec, container, dynamic_range, audio, release_group, season, episode, website, proper, repack, edition, language)
|
||||||
|
VALUES (1, 'Exact release', 'f', 't', 't', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f'),
|
||||||
|
(2, 'Movie', 'f', 'f', 'f', 't', 'f', 't', 'f', 'f', 'f', 't', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 'f'),
|
||||||
|
(3, 'TV', 'f', 'f', 'f', 't', 'f', 't', 't', 't', 'f', 'f', 'f', 'f', 'f', 'f', 'f', 't', 't', 'f', 'f', 'f', 'f', 'f');
|
||||||
|
|
||||||
|
ALTER TABLE filter
|
||||||
|
ADD release_profile_duplicate_id INTEGER;
|
||||||
|
|
||||||
|
ALTER TABLE filter
|
||||||
|
ADD CONSTRAINT filter_release_profile_duplicate_id_fk
|
||||||
|
FOREIGN KEY (release_profile_duplicate_id) REFERENCES release_profile_duplicate (id)
|
||||||
|
ON DELETE SET NULL;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD normalized_hash TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD sub_title TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD COLUMN IF NOT EXISTS audio TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD audio_channels TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD IF NOT EXISTS language TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD media_processing TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD IF NOT EXISTS edition TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD IF NOT EXISTS cut TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD IF NOT EXISTS hybrid TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD IF NOT EXISTS region TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD IF NOT EXISTS other TEXT [] DEFAULT '{}' NOT NULL;
|
||||||
|
|
||||||
|
CREATE INDEX release_normalized_hash_index
|
||||||
|
ON "release" (normalized_hash);
|
||||||
|
|
||||||
|
CREATE INDEX release_title_index
|
||||||
|
ON "release" (title);
|
||||||
|
|
||||||
|
CREATE INDEX release_sub_title_index
|
||||||
|
ON "release" (sub_title);
|
||||||
|
|
||||||
|
CREATE INDEX release_season_index
|
||||||
|
ON "release" (season);
|
||||||
|
|
||||||
|
CREATE INDEX release_episode_index
|
||||||
|
ON "release" (episode);
|
||||||
|
|
||||||
|
CREATE INDEX release_year_index
|
||||||
|
ON "release" (year);
|
||||||
|
|
||||||
|
CREATE INDEX release_month_index
|
||||||
|
ON "release" (month);
|
||||||
|
|
||||||
|
CREATE INDEX release_day_index
|
||||||
|
ON "release" (day);
|
||||||
|
|
||||||
|
CREATE INDEX release_resolution_index
|
||||||
|
ON "release" (resolution);
|
||||||
|
|
||||||
|
CREATE INDEX release_source_index
|
||||||
|
ON "release" (source);
|
||||||
|
|
||||||
|
CREATE INDEX release_codec_index
|
||||||
|
ON "release" (codec);
|
||||||
|
|
||||||
|
CREATE INDEX release_container_index
|
||||||
|
ON "release" (container);
|
||||||
|
|
||||||
|
CREATE INDEX release_hdr_index
|
||||||
|
ON "release" (hdr);
|
||||||
|
|
||||||
|
CREATE INDEX release_audio_index
|
||||||
|
ON "release" (audio);
|
||||||
|
|
||||||
|
CREATE INDEX release_audio_channels_index
|
||||||
|
ON "release" (audio_channels);
|
||||||
|
|
||||||
|
CREATE INDEX release_release_group_index
|
||||||
|
ON "release" (release_group);
|
||||||
|
|
||||||
|
CREATE INDEX release_proper_index
|
||||||
|
ON "release" (proper);
|
||||||
|
|
||||||
|
CREATE INDEX release_repack_index
|
||||||
|
ON "release" (repack);
|
||||||
|
|
||||||
|
CREATE INDEX release_website_index
|
||||||
|
ON "release" (website);
|
||||||
|
|
||||||
|
CREATE INDEX release_media_processing_index
|
||||||
|
ON "release" (media_processing);
|
||||||
|
|
||||||
|
CREATE INDEX release_language_index
|
||||||
|
ON "release" (language);
|
||||||
|
|
||||||
|
CREATE INDEX release_region_index
|
||||||
|
ON "release" (region);
|
||||||
|
|
||||||
|
CREATE INDEX release_edition_index
|
||||||
|
ON "release" (edition);
|
||||||
|
|
||||||
|
CREATE INDEX release_cut_index
|
||||||
|
ON "release" (cut);
|
||||||
|
|
||||||
|
CREATE INDEX release_hybrid_index
|
||||||
|
ON "release" (hybrid);
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,23 +33,31 @@ func NewReleaseRepo(log logger.Logger, db *DB) domain.ReleaseRepo {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (repo *ReleaseRepo) Store(ctx context.Context, r *domain.Release) error {
|
func (repo *ReleaseRepo) Store(ctx context.Context, r *domain.Release) error {
|
||||||
codecStr := strings.Join(r.Codec, ",")
|
var (
|
||||||
hdrStr := strings.Join(r.HDR, ",")
|
codecStr = strings.Join(r.Codec, ",")
|
||||||
|
hdrStr = strings.Join(r.HDR, ",")
|
||||||
|
audioStr = strings.Join(r.Audio, ",")
|
||||||
|
editionStr = strings.Join(r.Edition, ",")
|
||||||
|
cutStr = strings.Join(r.Cut, ",")
|
||||||
|
languageStr = strings.Join(r.Language, ",")
|
||||||
|
)
|
||||||
|
|
||||||
queryBuilder := repo.db.squirrel.
|
queryBuilder := repo.db.squirrel.
|
||||||
Insert("release").
|
Insert("release").
|
||||||
Columns("filter_status", "rejections", "indexer", "filter", "protocol", "implementation", "timestamp", "announce_type", "group_id", "torrent_id", "info_url", "download_url", "torrent_name", "size", "title", "category", "season", "episode", "year", "month", "day", "resolution", "source", "codec", "container", "hdr", "release_group", "proper", "repack", "website", "type", "origin", "tags", "uploader", "pre_time", "filter_id").
|
Columns("filter_status", "rejections", "indexer", "filter", "protocol", "implementation", "timestamp", "announce_type", "group_id", "torrent_id", "info_url", "download_url", "torrent_name", "normalized_hash", "size", "title", "sub_title", "category", "season", "episode", "year", "month", "day", "resolution", "source", "codec", "container", "hdr", "audio", "audio_channels", "release_group", "proper", "repack", "region", "language", "cut", "edition", "hybrid", "media_processing", "website", "type", "origin", "tags", "uploader", "pre_time", "other", "filter_id").
|
||||||
Values(r.FilterStatus, pq.Array(r.Rejections), r.Indexer.Identifier, r.FilterName, r.Protocol, r.Implementation, r.Timestamp.Format(time.RFC3339), r.AnnounceType, r.GroupID, r.TorrentID, r.InfoURL, r.DownloadURL, r.TorrentName, r.Size, r.Title, r.Category, r.Season, r.Episode, r.Year, r.Month, r.Day, r.Resolution, r.Source, codecStr, r.Container, hdrStr, r.Group, r.Proper, r.Repack, r.Website, r.Type, r.Origin, pq.Array(r.Tags), r.Uploader, r.PreTime, r.FilterID).
|
Values(r.FilterStatus, pq.Array(r.Rejections), r.Indexer.Identifier, r.FilterName, r.Protocol, r.Implementation, r.Timestamp.Format(time.RFC3339), r.AnnounceType, r.GroupID, r.TorrentID, r.InfoURL, r.DownloadURL, r.TorrentName, r.NormalizedHash, r.Size, r.Title, r.SubTitle, r.Category, r.Season, r.Episode, r.Year, r.Month, r.Day, r.Resolution, r.Source, codecStr, r.Container, hdrStr, audioStr, r.AudioChannels, r.Group, r.Proper, r.Repack, r.Region, languageStr, cutStr, editionStr, r.Hybrid, r.MediaProcessing, r.Website, r.Type.String(), r.Origin, pq.Array(r.Tags), r.Uploader, r.PreTime, pq.Array(r.Other), r.FilterID).
|
||||||
Suffix("RETURNING id").RunWith(repo.db.handler)
|
Suffix("RETURNING id").RunWith(repo.db.handler)
|
||||||
|
|
||||||
// return values
|
q, args, err := queryBuilder.ToSql()
|
||||||
var retID int64
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "error building query")
|
||||||
if err := queryBuilder.QueryRowContext(ctx).Scan(&retID); err != nil {
|
|
||||||
return errors.Wrap(err, "error executing query")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
r.ID = retID
|
repo.log.Debug().Msgf("release.store: %s %v", q, args)
|
||||||
|
|
||||||
|
if err := queryBuilder.QueryRowContext(ctx).Scan(&r.ID); err != nil {
|
||||||
|
return errors.Wrap(err, "error executing query")
|
||||||
|
}
|
||||||
|
|
||||||
repo.log.Debug().Msgf("release.store: %+v", r)
|
repo.log.Debug().Msgf("release.store: %+v", r)
|
||||||
|
|
||||||
|
@ -102,14 +110,9 @@ func (repo *ReleaseRepo) StoreReleaseActionStatus(ctx context.Context, status *d
|
||||||
Values(status.Status, status.Action, status.ActionID, status.Type, status.Client, status.Filter, status.FilterID, pq.Array(status.Rejections), status.Timestamp.Format(time.RFC3339), status.ReleaseID).
|
Values(status.Status, status.Action, status.ActionID, status.Type, status.Client, status.Filter, status.FilterID, pq.Array(status.Rejections), status.Timestamp.Format(time.RFC3339), status.ReleaseID).
|
||||||
Suffix("RETURNING id").RunWith(repo.db.handler)
|
Suffix("RETURNING id").RunWith(repo.db.handler)
|
||||||
|
|
||||||
// return values
|
if err := queryBuilder.QueryRowContext(ctx).Scan(&status.ID); err != nil {
|
||||||
var retID int64
|
|
||||||
|
|
||||||
if err := queryBuilder.QueryRowContext(ctx).Scan(&retID); err != nil {
|
|
||||||
return errors.Wrap(err, "error executing query")
|
return errors.Wrap(err, "error executing query")
|
||||||
}
|
}
|
||||||
|
|
||||||
status.ID = retID
|
|
||||||
}
|
}
|
||||||
|
|
||||||
repo.log.Trace().Msgf("release.store_release_action_status: %+v", status)
|
repo.log.Trace().Msgf("release.store_release_action_status: %+v", status)
|
||||||
|
@ -117,6 +120,62 @@ func (repo *ReleaseRepo) StoreReleaseActionStatus(ctx context.Context, status *d
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (repo *ReleaseRepo) StoreDuplicateProfile(ctx context.Context, profile *domain.DuplicateReleaseProfile) error {
|
||||||
|
if profile.ID == 0 {
|
||||||
|
queryBuilder := repo.db.squirrel.
|
||||||
|
Insert("release_profile_duplicate").
|
||||||
|
Columns("name", "protocol", "release_name", "hash", "title", "sub_title", "season", "episode", "year", "month", "day", "resolution", "source", "codec", "container", "dynamic_range", "audio", "release_group", "website", "proper", "repack").
|
||||||
|
Values(profile.Name, profile.Protocol, profile.ReleaseName, profile.Hash, profile.Title, profile.SubTitle, profile.Season, profile.Episode, profile.Year, profile.Month, profile.Day, profile.Resolution, profile.Source, profile.Codec, profile.Container, profile.DynamicRange, profile.Audio, profile.Group, profile.Website, profile.Proper, profile.Repack).
|
||||||
|
Suffix("RETURNING id").
|
||||||
|
RunWith(repo.db.handler)
|
||||||
|
|
||||||
|
// return values
|
||||||
|
var retID int64
|
||||||
|
|
||||||
|
err := queryBuilder.QueryRowContext(ctx).Scan(&retID)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "error executing query")
|
||||||
|
}
|
||||||
|
|
||||||
|
profile.ID = retID
|
||||||
|
} else {
|
||||||
|
queryBuilder := repo.db.squirrel.
|
||||||
|
Update("release_profile_duplicate").
|
||||||
|
Set("name", profile.Name).
|
||||||
|
Set("protocol", profile.Protocol).
|
||||||
|
Set("release_name", profile.ReleaseName).
|
||||||
|
Set("hash", profile.Hash).
|
||||||
|
Set("title", profile.Title).
|
||||||
|
Set("sub_title", profile.SubTitle).
|
||||||
|
Set("season", profile.Season).
|
||||||
|
Set("episode", profile.Episode).
|
||||||
|
Set("year", profile.Year).
|
||||||
|
Set("month", profile.Month).
|
||||||
|
Set("day", profile.Day).
|
||||||
|
Set("resolution", profile.Resolution).
|
||||||
|
Set("source", profile.Source).
|
||||||
|
Set("codec", profile.Codec).
|
||||||
|
Set("container", profile.Container).
|
||||||
|
Set("dynamic_range", profile.DynamicRange).
|
||||||
|
Set("audio", profile.Audio).
|
||||||
|
Set("release_group", profile.Group).
|
||||||
|
Set("website", profile.Website).
|
||||||
|
Set("proper", profile.Proper).
|
||||||
|
Set("repack", profile.Repack).
|
||||||
|
Where(sq.Eq{"id": profile.ID}).
|
||||||
|
RunWith(repo.db.handler)
|
||||||
|
|
||||||
|
_, err := queryBuilder.ExecContext(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "error executing query")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
repo.log.Debug().Msgf("release.StoreDuplicateProfile: %+v", profile)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (repo *ReleaseRepo) Find(ctx context.Context, params domain.ReleaseQueryParams) (*domain.FindReleasesResponse, error) {
|
func (repo *ReleaseRepo) Find(ctx context.Context, params domain.ReleaseQueryParams) (*domain.FindReleasesResponse, error) {
|
||||||
tx, err := repo.db.BeginTx(ctx, &sql.TxOptions{Isolation: sql.LevelReadCommitted})
|
tx, err := repo.db.BeginTx(ctx, &sql.TxOptions{Isolation: sql.LevelReadCommitted})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -192,7 +251,7 @@ func (repo *ReleaseRepo) findReleases(ctx context.Context, tx *Tx, params domain
|
||||||
|
|
||||||
whereQuery, _, err := whereQueryBuilder.ToSql()
|
whereQuery, _, err := whereQueryBuilder.ToSql()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrap(err, "error building wherequery")
|
return nil, errors.Wrap(err, "error building where query")
|
||||||
}
|
}
|
||||||
|
|
||||||
subQueryBuilder := repo.db.squirrel.
|
subQueryBuilder := repo.db.squirrel.
|
||||||
|
@ -230,8 +289,49 @@ func (repo *ReleaseRepo) findReleases(ctx context.Context, tx *Tx, params domain
|
||||||
}
|
}
|
||||||
|
|
||||||
queryBuilder := repo.db.squirrel.
|
queryBuilder := repo.db.squirrel.
|
||||||
Select("r.id", "r.filter_status", "r.rejections", "r.indexer", "i.id", "i.name", "i.identifier_external", "r.filter", "r.protocol", "r.announce_type", "r.info_url", "r.download_url", "r.title", "r.torrent_name", "r.size", "r.category", "r.season", "r.episode", "r.year", "r.resolution", "r.source", "r.codec", "r.container", "r.release_group", "r.timestamp",
|
Select(
|
||||||
"ras.id", "ras.status", "ras.action", "ras.action_id", "ras.type", "ras.client", "ras.filter", "ras.filter_id", "ras.release_id", "ras.rejections", "ras.timestamp").
|
"r.id",
|
||||||
|
"r.filter_status",
|
||||||
|
"r.rejections",
|
||||||
|
"r.indexer",
|
||||||
|
"i.id",
|
||||||
|
"i.name",
|
||||||
|
"i.identifier_external",
|
||||||
|
"r.filter",
|
||||||
|
"r.protocol",
|
||||||
|
"r.announce_type",
|
||||||
|
"r.info_url",
|
||||||
|
"r.download_url",
|
||||||
|
"r.title",
|
||||||
|
"r.sub_title",
|
||||||
|
"r.torrent_name",
|
||||||
|
"r.normalized_hash",
|
||||||
|
"r.size",
|
||||||
|
"r.category",
|
||||||
|
"r.season",
|
||||||
|
"r.episode",
|
||||||
|
"r.year",
|
||||||
|
"r.resolution",
|
||||||
|
"r.source",
|
||||||
|
"r.codec",
|
||||||
|
"r.container",
|
||||||
|
"r.hdr",
|
||||||
|
"r.audio",
|
||||||
|
"r.audio_channels",
|
||||||
|
"r.release_group",
|
||||||
|
"r.region",
|
||||||
|
"r.language",
|
||||||
|
"r.edition",
|
||||||
|
"r.cut",
|
||||||
|
"r.hybrid",
|
||||||
|
"r.proper",
|
||||||
|
"r.repack",
|
||||||
|
"r.website",
|
||||||
|
"r.media_processing",
|
||||||
|
"r.type",
|
||||||
|
"r.timestamp",
|
||||||
|
"ras.id", "ras.status", "ras.action", "ras.action_id", "ras.type", "ras.client", "ras.filter", "ras.filter_id", "ras.release_id", "ras.rejections", "ras.timestamp",
|
||||||
|
).
|
||||||
Column(sq.Alias(countQuery, "page_total")).
|
Column(sq.Alias(countQuery, "page_total")).
|
||||||
From("release r").
|
From("release r").
|
||||||
OrderBy("r.id DESC").
|
OrderBy("r.id DESC").
|
||||||
|
@ -267,7 +367,7 @@ func (repo *ReleaseRepo) findReleases(ctx context.Context, tx *Tx, params domain
|
||||||
var rls domain.Release
|
var rls domain.Release
|
||||||
var ras domain.ReleaseActionStatus
|
var ras domain.ReleaseActionStatus
|
||||||
|
|
||||||
var rlsIndexer, rlsIndexerName, rlsIndexerExternalName, rlsFilter, rlsAnnounceType, infoUrl, downloadUrl, codec sql.NullString
|
var rlsIndexer, rlsIndexerName, rlsIndexerExternalName, rlsFilter, rlsAnnounceType, infoUrl, downloadUrl, subTitle, normalizedHash, codec, hdr, rlsType, audioStr, languageStr, editionStr, cutStr, website sql.NullString
|
||||||
|
|
||||||
var rlsIndexerID sql.NullInt64
|
var rlsIndexerID sql.NullInt64
|
||||||
var rasId, rasFilterId, rasReleaseId, rasActionId sql.NullInt64
|
var rasId, rasFilterId, rasReleaseId, rasActionId sql.NullInt64
|
||||||
|
@ -275,7 +375,49 @@ func (repo *ReleaseRepo) findReleases(ctx context.Context, tx *Tx, params domain
|
||||||
var rasRejections []sql.NullString
|
var rasRejections []sql.NullString
|
||||||
var rasTimestamp sql.NullTime
|
var rasTimestamp sql.NullTime
|
||||||
|
|
||||||
if err := rows.Scan(&rls.ID, &rls.FilterStatus, pq.Array(&rls.Rejections), &rlsIndexer, &rlsIndexerID, &rlsIndexerName, &rlsIndexerExternalName, &rlsFilter, &rls.Protocol, &rlsAnnounceType, &infoUrl, &downloadUrl, &rls.Title, &rls.TorrentName, &rls.Size, &rls.Category, &rls.Season, &rls.Episode, &rls.Year, &rls.Resolution, &rls.Source, &codec, &rls.Container, &rls.Group, &rls.Timestamp, &rasId, &rasStatus, &rasAction, &rasActionId, &rasType, &rasClient, &rasFilter, &rasFilterId, &rasReleaseId, pq.Array(&rasRejections), &rasTimestamp, &resp.TotalCount); err != nil {
|
if err := rows.Scan(
|
||||||
|
&rls.ID,
|
||||||
|
&rls.FilterStatus,
|
||||||
|
pq.Array(&rls.Rejections),
|
||||||
|
&rlsIndexer,
|
||||||
|
&rlsIndexerID,
|
||||||
|
&rlsIndexerName,
|
||||||
|
&rlsIndexerExternalName,
|
||||||
|
&rlsFilter,
|
||||||
|
&rls.Protocol,
|
||||||
|
&rlsAnnounceType,
|
||||||
|
&infoUrl,
|
||||||
|
&downloadUrl,
|
||||||
|
&rls.Title,
|
||||||
|
&subTitle,
|
||||||
|
&rls.TorrentName,
|
||||||
|
&normalizedHash,
|
||||||
|
&rls.Size,
|
||||||
|
&rls.Category,
|
||||||
|
&rls.Season,
|
||||||
|
&rls.Episode,
|
||||||
|
&rls.Year,
|
||||||
|
&rls.Resolution,
|
||||||
|
&rls.Source,
|
||||||
|
&codec,
|
||||||
|
&rls.Container,
|
||||||
|
&hdr,
|
||||||
|
&audioStr,
|
||||||
|
&rls.AudioChannels,
|
||||||
|
&rls.Group,
|
||||||
|
&rls.Region,
|
||||||
|
&languageStr,
|
||||||
|
&editionStr,
|
||||||
|
&cutStr,
|
||||||
|
&rls.Hybrid,
|
||||||
|
&rls.Proper,
|
||||||
|
&rls.Repack,
|
||||||
|
&website,
|
||||||
|
&rls.MediaProcessing,
|
||||||
|
&rlsType,
|
||||||
|
&rls.Timestamp,
|
||||||
|
&rasId, &rasStatus, &rasAction, &rasActionId, &rasType, &rasClient, &rasFilter, &rasFilterId, &rasReleaseId, pq.Array(&rasRejections), &rasTimestamp, &resp.TotalCount,
|
||||||
|
); err != nil {
|
||||||
return resp, errors.Wrap(err, "error scanning row")
|
return resp, errors.Wrap(err, "error scanning row")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -324,7 +466,19 @@ func (repo *ReleaseRepo) findReleases(ctx context.Context, tx *Tx, params domain
|
||||||
rls.ActionStatus = make([]domain.ReleaseActionStatus, 0)
|
rls.ActionStatus = make([]domain.ReleaseActionStatus, 0)
|
||||||
rls.InfoURL = infoUrl.String
|
rls.InfoURL = infoUrl.String
|
||||||
rls.DownloadURL = downloadUrl.String
|
rls.DownloadURL = downloadUrl.String
|
||||||
|
rls.SubTitle = subTitle.String
|
||||||
|
rls.NormalizedHash = normalizedHash.String
|
||||||
rls.Codec = strings.Split(codec.String, ",")
|
rls.Codec = strings.Split(codec.String, ",")
|
||||||
|
rls.HDR = strings.Split(hdr.String, ",")
|
||||||
|
rls.Audio = strings.Split(audioStr.String, ",")
|
||||||
|
rls.Language = strings.Split(languageStr.String, ",")
|
||||||
|
rls.Edition = strings.Split(editionStr.String, ",")
|
||||||
|
rls.Cut = strings.Split(cutStr.String, ",")
|
||||||
|
rls.Website = website.String
|
||||||
|
//rls.Type = rlsType.String
|
||||||
|
if rlsType.Valid {
|
||||||
|
rls.ParseType(rlsType.String)
|
||||||
|
}
|
||||||
|
|
||||||
// only add ActionStatus if it's not empty
|
// only add ActionStatus if it's not empty
|
||||||
if ras.ID > 0 {
|
if ras.ID > 0 {
|
||||||
|
@ -342,6 +496,66 @@ func (repo *ReleaseRepo) findReleases(ctx context.Context, tx *Tx, params domain
|
||||||
return resp, nil
|
return resp, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (repo *ReleaseRepo) FindDuplicateReleaseProfiles(ctx context.Context) ([]*domain.DuplicateReleaseProfile, error) {
|
||||||
|
queryBuilder := repo.db.squirrel.
|
||||||
|
Select(
|
||||||
|
"id",
|
||||||
|
"name",
|
||||||
|
"protocol",
|
||||||
|
"release_name",
|
||||||
|
"hash",
|
||||||
|
"title",
|
||||||
|
"sub_title",
|
||||||
|
"year",
|
||||||
|
"month",
|
||||||
|
"day",
|
||||||
|
"source",
|
||||||
|
"resolution",
|
||||||
|
"codec",
|
||||||
|
"container",
|
||||||
|
"dynamic_range",
|
||||||
|
"audio",
|
||||||
|
"release_group",
|
||||||
|
"season",
|
||||||
|
"episode",
|
||||||
|
"website",
|
||||||
|
"proper",
|
||||||
|
"repack",
|
||||||
|
).
|
||||||
|
From("release_profile_duplicate")
|
||||||
|
|
||||||
|
query, args, err := queryBuilder.ToSql()
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "error building query")
|
||||||
|
}
|
||||||
|
|
||||||
|
rows, err := repo.db.handler.QueryContext(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "error executing query")
|
||||||
|
}
|
||||||
|
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, errors.Wrap(err, "error rows FindDuplicateReleaseProfiles")
|
||||||
|
}
|
||||||
|
|
||||||
|
res := make([]*domain.DuplicateReleaseProfile, 0)
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var p domain.DuplicateReleaseProfile
|
||||||
|
|
||||||
|
err := rows.Scan(&p.ID, &p.Name, &p.Protocol, &p.ReleaseName, &p.Hash, &p.Title, &p.SubTitle, &p.Year, &p.Month, &p.Day, &p.Source, &p.Resolution, &p.Codec, &p.Container, &p.DynamicRange, &p.Audio, &p.Group, &p.Season, &p.Episode, &p.Website, &p.Proper, &p.Repack)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "error scanning row")
|
||||||
|
}
|
||||||
|
|
||||||
|
res = append(res, &p)
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (repo *ReleaseRepo) GetIndexerOptions(ctx context.Context) ([]string, error) {
|
func (repo *ReleaseRepo) GetIndexerOptions(ctx context.Context) ([]string, error) {
|
||||||
query := `SELECT DISTINCT indexer FROM "release" UNION SELECT DISTINCT identifier indexer FROM indexer;`
|
query := `SELECT DISTINCT indexer FROM "release" UNION SELECT DISTINCT identifier indexer FROM indexer;`
|
||||||
|
|
||||||
|
@ -420,7 +634,7 @@ func (repo *ReleaseRepo) GetActionStatusByReleaseID(ctx context.Context, release
|
||||||
|
|
||||||
func (repo *ReleaseRepo) Get(ctx context.Context, req *domain.GetReleaseRequest) (*domain.Release, error) {
|
func (repo *ReleaseRepo) Get(ctx context.Context, req *domain.GetReleaseRequest) (*domain.Release, error) {
|
||||||
queryBuilder := repo.db.squirrel.
|
queryBuilder := repo.db.squirrel.
|
||||||
Select("r.id", "r.filter_status", "r.rejections", "r.indexer", "r.filter", "r.filter_id", "r.protocol", "r.implementation", "r.announce_type", "r.info_url", "r.download_url", "r.title", "r.torrent_name", "r.category", "r.size", "r.group_id", "r.torrent_id", "r.uploader", "r.timestamp").
|
Select("r.id", "r.filter_status", "r.rejections", "r.indexer", "r.filter", "r.filter_id", "r.protocol", "r.implementation", "r.announce_type", "r.info_url", "r.download_url", "r.title", "r.sub_title", "r.torrent_name", "r.category", "r.size", "r.group_id", "r.torrent_id", "r.uploader", "r.timestamp").
|
||||||
From("release r").
|
From("release r").
|
||||||
OrderBy("r.id DESC").
|
OrderBy("r.id DESC").
|
||||||
Where(sq.Eq{"r.id": req.Id})
|
Where(sq.Eq{"r.id": req.Id})
|
||||||
|
@ -439,10 +653,10 @@ func (repo *ReleaseRepo) Get(ctx context.Context, req *domain.GetReleaseRequest)
|
||||||
|
|
||||||
var rls domain.Release
|
var rls domain.Release
|
||||||
|
|
||||||
var indexerName, filterName, announceType, infoUrl, downloadUrl, groupId, torrentId, category, uploader sql.NullString
|
var indexerName, filterName, announceType, infoUrl, downloadUrl, subTitle, groupId, torrentId, category, uploader sql.NullString
|
||||||
var filterId sql.NullInt64
|
var filterId sql.NullInt64
|
||||||
|
|
||||||
if err := row.Scan(&rls.ID, &rls.FilterStatus, pq.Array(&rls.Rejections), &indexerName, &filterName, &filterId, &rls.Protocol, &rls.Implementation, &announceType, &infoUrl, &downloadUrl, &rls.Title, &rls.TorrentName, &category, &rls.Size, &groupId, &torrentId, &uploader, &rls.Timestamp); err != nil {
|
if err := row.Scan(&rls.ID, &rls.FilterStatus, pq.Array(&rls.Rejections), &indexerName, &filterName, &filterId, &rls.Protocol, &rls.Implementation, &announceType, &infoUrl, &downloadUrl, &rls.Title, &subTitle, &rls.TorrentName, &category, &rls.Size, &groupId, &torrentId, &uploader, &rls.Timestamp); err != nil {
|
||||||
if errors.Is(err, sql.ErrNoRows) {
|
if errors.Is(err, sql.ErrNoRows) {
|
||||||
return nil, domain.ErrRecordNotFound
|
return nil, domain.ErrRecordNotFound
|
||||||
}
|
}
|
||||||
|
@ -457,6 +671,7 @@ func (repo *ReleaseRepo) Get(ctx context.Context, req *domain.GetReleaseRequest)
|
||||||
rls.AnnounceType = domain.AnnounceType(announceType.String)
|
rls.AnnounceType = domain.AnnounceType(announceType.String)
|
||||||
rls.InfoURL = infoUrl.String
|
rls.InfoURL = infoUrl.String
|
||||||
rls.DownloadURL = downloadUrl.String
|
rls.DownloadURL = downloadUrl.String
|
||||||
|
rls.SubTitle = subTitle.String
|
||||||
rls.Category = category.String
|
rls.Category = category.String
|
||||||
rls.GroupID = groupId.String
|
rls.GroupID = groupId.String
|
||||||
rls.TorrentID = torrentId.String
|
rls.TorrentID = torrentId.String
|
||||||
|
@ -670,6 +885,31 @@ func (repo *ReleaseRepo) Delete(ctx context.Context, req *domain.DeleteReleaseRe
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (repo *ReleaseRepo) DeleteReleaseProfileDuplicate(ctx context.Context, id int64) error {
|
||||||
|
qb := repo.db.squirrel.Delete("release_profile_duplicate").Where(sq.Eq{"id": id})
|
||||||
|
|
||||||
|
query, args, err := qb.ToSql()
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "error building SQL query")
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = repo.db.handler.ExecContext(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "error executing delete query")
|
||||||
|
}
|
||||||
|
|
||||||
|
//deletedRows, err := result.RowsAffected()
|
||||||
|
//if err != nil {
|
||||||
|
// return errors.Wrap(err, "error fetching rows affected")
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
//repo.log.Debug().Msgf("deleted %d rows from release table", deletedRows)
|
||||||
|
|
||||||
|
repo.log.Debug().Msgf("deleted duplicate release profile: %d", id)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (repo *ReleaseRepo) CheckSmartEpisodeCanDownload(ctx context.Context, p *domain.SmartEpisodeParams) (bool, error) {
|
func (repo *ReleaseRepo) CheckSmartEpisodeCanDownload(ctx context.Context, p *domain.SmartEpisodeParams) (bool, error) {
|
||||||
queryBuilder := repo.db.squirrel.
|
queryBuilder := repo.db.squirrel.
|
||||||
Select("COUNT(*)").
|
Select("COUNT(*)").
|
||||||
|
@ -793,3 +1033,200 @@ func (repo *ReleaseRepo) UpdateBaseURL(ctx context.Context, indexer string, oldB
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (repo *ReleaseRepo) CheckIsDuplicateRelease(ctx context.Context, profile *domain.DuplicateReleaseProfile, release *domain.Release) (bool, error) {
|
||||||
|
queryBuilder := repo.db.squirrel.
|
||||||
|
Select("r.id, r.torrent_name, r.normalized_hash, r.title, ras.action, ras.status").
|
||||||
|
From("release r").
|
||||||
|
LeftJoin("release_action_status ras ON r.id = ras.release_id").
|
||||||
|
Where("ras.status = 'PUSH_APPROVED'")
|
||||||
|
|
||||||
|
if profile.ReleaseName && profile.Hash {
|
||||||
|
//queryBuilder = queryBuilder.Where(repo.db.ILike("r.torrent_name", release.TorrentName))
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.normalized_hash": release.NormalizedHash})
|
||||||
|
} else {
|
||||||
|
if profile.Title {
|
||||||
|
queryBuilder = queryBuilder.Where(repo.db.ILike("r.title", release.Title))
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.SubTitle {
|
||||||
|
queryBuilder = queryBuilder.Where(repo.db.ILike("r.sub_title", release.SubTitle))
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.ReleaseName && profile.Hash {
|
||||||
|
//queryBuilder = queryBuilder.Where(repo.db.ILike("r.torrent_name", release.TorrentName))
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.normalized_hash": release.NormalizedHash})
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.Year {
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.year": release.Year})
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.Month {
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.month": release.Month})
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.Day {
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.day": release.Day})
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.Source {
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.source": release.Source})
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.Container {
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.container": release.Container})
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.Edition {
|
||||||
|
//queryBuilder = queryBuilder.Where(sq.Eq{"r.cut": release.Cut})
|
||||||
|
if len(release.Cut) > 1 {
|
||||||
|
var and sq.And
|
||||||
|
for _, cut := range release.Cut {
|
||||||
|
//and = append(and, sq.Eq{"r.cut": "%" + cut + "%"})
|
||||||
|
and = append(and, repo.db.ILike("r.cut", "%"+cut+"%"))
|
||||||
|
}
|
||||||
|
queryBuilder = queryBuilder.Where(and)
|
||||||
|
} else if len(release.Cut) == 1 {
|
||||||
|
queryBuilder = queryBuilder.Where(repo.db.ILike("r.cut", "%"+release.Cut[0]+"%"))
|
||||||
|
}
|
||||||
|
|
||||||
|
//queryBuilder = queryBuilder.Where(sq.Eq{"r.edition": release.Edition})
|
||||||
|
if len(release.Edition) > 1 {
|
||||||
|
var and sq.And
|
||||||
|
for _, edition := range release.Edition {
|
||||||
|
and = append(and, repo.db.ILike("r.edition", "%"+edition+"%"))
|
||||||
|
}
|
||||||
|
queryBuilder = queryBuilder.Where(and)
|
||||||
|
} else if len(release.Edition) == 1 {
|
||||||
|
queryBuilder = queryBuilder.Where(repo.db.ILike("r.edition", "%"+release.Edition[0]+"%"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// video features (hybrid, remux)
|
||||||
|
if release.IsTypeVideo() {
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.hybrid": release.Hybrid})
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.media_processing": release.MediaProcessing})
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.Language {
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.region": release.Region})
|
||||||
|
|
||||||
|
if len(release.Language) > 0 {
|
||||||
|
var and sq.And
|
||||||
|
for _, lang := range release.Language {
|
||||||
|
and = append(and, repo.db.ILike("r.language", "%"+lang+"%"))
|
||||||
|
}
|
||||||
|
|
||||||
|
queryBuilder = queryBuilder.Where(and)
|
||||||
|
} else {
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.language": ""})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.Codec {
|
||||||
|
if len(release.Codec) > 1 {
|
||||||
|
var and sq.And
|
||||||
|
for _, codec := range release.Codec {
|
||||||
|
and = append(and, repo.db.ILike("r.codec", "%"+codec+"%"))
|
||||||
|
}
|
||||||
|
queryBuilder = queryBuilder.Where(and)
|
||||||
|
} else {
|
||||||
|
// FIXME this does an IN (arg)
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.codec": release.Codec})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.Resolution {
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.resolution": release.Resolution})
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.DynamicRange {
|
||||||
|
//if len(release.HDR) > 1 {
|
||||||
|
// var and sq.And
|
||||||
|
// for _, hdr := range release.HDR {
|
||||||
|
// and = append(and, repo.db.ILike("r.hdr", "%"+hdr+"%"))
|
||||||
|
// }
|
||||||
|
// queryBuilder = queryBuilder.Where(and)
|
||||||
|
//} else {
|
||||||
|
// queryBuilder = queryBuilder.Where(sq.Eq{"r.hdr": release.HDR})
|
||||||
|
//}
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.hdr": strings.Join(release.HDR, ",")})
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.Audio {
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.audio": strings.Join(release.Audio, ",")})
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.audio_channels": release.AudioChannels})
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.Group {
|
||||||
|
queryBuilder = queryBuilder.Where(repo.db.ILike("r.release_group", release.Group))
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.Season {
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.season": release.Season})
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.Episode {
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.episode": release.Episode})
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.Website {
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.website": release.Website})
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.Proper {
|
||||||
|
queryBuilder = queryBuilder.Where(sq.Eq{"r.proper": release.Proper})
|
||||||
|
}
|
||||||
|
|
||||||
|
if profile.Repack {
|
||||||
|
queryBuilder = queryBuilder.Where(sq.And{
|
||||||
|
sq.Eq{"r.repack": release.Repack},
|
||||||
|
repo.db.ILike("r.release_group", release.Group),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query, args, err := queryBuilder.ToSql()
|
||||||
|
if err != nil {
|
||||||
|
return false, errors.Wrap(err, "error building query")
|
||||||
|
}
|
||||||
|
|
||||||
|
repo.log.Trace().Str("database", "release.FindDuplicateReleases").Msgf("query: %q, args: %q", query, args)
|
||||||
|
|
||||||
|
rows, err := repo.db.handler.QueryContext(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return false, errors.Wrap(err, "error rows CheckIsDuplicateRelease")
|
||||||
|
}
|
||||||
|
|
||||||
|
type result struct {
|
||||||
|
id int
|
||||||
|
release string
|
||||||
|
hash string
|
||||||
|
title string
|
||||||
|
action string
|
||||||
|
status string
|
||||||
|
}
|
||||||
|
|
||||||
|
var res []result
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
r := result{}
|
||||||
|
if err := rows.Scan(&r.id, &r.release, &r.hash, &r.title, &r.action, &r.status); err != nil {
|
||||||
|
return false, errors.Wrap(err, "error scan CheckIsDuplicateRelease")
|
||||||
|
}
|
||||||
|
res = append(res, r)
|
||||||
|
}
|
||||||
|
|
||||||
|
repo.log.Trace().Str("database", "release.FindDuplicateReleases").Msgf("found duplicate releases: %+v", res)
|
||||||
|
|
||||||
|
if len(res) == 0 {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
|
@ -13,6 +13,7 @@ import (
|
||||||
|
|
||||||
"github.com/autobrr/autobrr/internal/domain"
|
"github.com/autobrr/autobrr/internal/domain"
|
||||||
|
|
||||||
|
"github.com/moistari/rls"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -49,12 +50,13 @@ func getMockRelease() *domain.Release {
|
||||||
Proper: true,
|
Proper: true,
|
||||||
Repack: false,
|
Repack: false,
|
||||||
Website: "https://example.com",
|
Website: "https://example.com",
|
||||||
Type: "Movie",
|
Type: rls.Movie,
|
||||||
Origin: "P2P",
|
Origin: "P2P",
|
||||||
Tags: []string{"Action", "Adventure"},
|
Tags: []string{"Action", "Adventure"},
|
||||||
Uploader: "john_doe",
|
Uploader: "john_doe",
|
||||||
PreTime: "10m",
|
PreTime: "10m",
|
||||||
FilterID: 1,
|
FilterID: 1,
|
||||||
|
Other: []string{},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,11 +110,11 @@ func TestReleaseRepo_Store(t *testing.T) {
|
||||||
// Execute
|
// Execute
|
||||||
err = repo.Store(context.Background(), mockData)
|
err = repo.Store(context.Background(), mockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
createdAction, err := actionRepo.Store(context.Background(), actionMockData)
|
err = actionRepo.Store(context.Background(), actionMockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
releaseActionMockData.ReleaseID = mockData.ID
|
releaseActionMockData.ReleaseID = mockData.ID
|
||||||
releaseActionMockData.ActionID = int64(createdAction.ID)
|
releaseActionMockData.ActionID = int64(actionMockData.ID)
|
||||||
releaseActionMockData.FilterID = int64(createdFilters[0].ID)
|
releaseActionMockData.FilterID = int64(createdFilters[0].ID)
|
||||||
|
|
||||||
err = repo.StoreReleaseActionStatus(context.Background(), releaseActionMockData)
|
err = repo.StoreReleaseActionStatus(context.Background(), releaseActionMockData)
|
||||||
|
@ -123,7 +125,7 @@ func TestReleaseRepo_Store(t *testing.T) {
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
_ = repo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
_ = repo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
||||||
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: createdAction.ID})
|
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: actionMockData.ID})
|
||||||
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
||||||
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
||||||
})
|
})
|
||||||
|
@ -164,11 +166,11 @@ func TestReleaseRepo_StoreReleaseActionStatus(t *testing.T) {
|
||||||
// Execute
|
// Execute
|
||||||
err = repo.Store(context.Background(), mockData)
|
err = repo.Store(context.Background(), mockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
createdAction, err := actionRepo.Store(context.Background(), actionMockData)
|
err = actionRepo.Store(context.Background(), actionMockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
releaseActionMockData.ReleaseID = mockData.ID
|
releaseActionMockData.ReleaseID = mockData.ID
|
||||||
releaseActionMockData.ActionID = int64(createdAction.ID)
|
releaseActionMockData.ActionID = int64(actionMockData.ID)
|
||||||
releaseActionMockData.FilterID = int64(createdFilters[0].ID)
|
releaseActionMockData.FilterID = int64(createdFilters[0].ID)
|
||||||
|
|
||||||
err = repo.StoreReleaseActionStatus(context.Background(), releaseActionMockData)
|
err = repo.StoreReleaseActionStatus(context.Background(), releaseActionMockData)
|
||||||
|
@ -179,7 +181,7 @@ func TestReleaseRepo_StoreReleaseActionStatus(t *testing.T) {
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
_ = repo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
_ = repo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
||||||
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: createdAction.ID})
|
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: actionMockData.ID})
|
||||||
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
||||||
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
||||||
})
|
})
|
||||||
|
@ -328,11 +330,11 @@ func TestReleaseRepo_GetIndexerOptions(t *testing.T) {
|
||||||
|
|
||||||
err = repo.Store(context.Background(), mockData)
|
err = repo.Store(context.Background(), mockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
createdAction, err := actionRepo.Store(context.Background(), actionMockData)
|
err = actionRepo.Store(context.Background(), actionMockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
releaseActionMockData.ReleaseID = mockData.ID
|
releaseActionMockData.ReleaseID = mockData.ID
|
||||||
releaseActionMockData.ActionID = int64(createdAction.ID)
|
releaseActionMockData.ActionID = int64(actionMockData.ID)
|
||||||
releaseActionMockData.FilterID = int64(createdFilters[0].ID)
|
releaseActionMockData.FilterID = int64(createdFilters[0].ID)
|
||||||
|
|
||||||
err = repo.StoreReleaseActionStatus(context.Background(), releaseActionMockData)
|
err = repo.StoreReleaseActionStatus(context.Background(), releaseActionMockData)
|
||||||
|
@ -347,7 +349,7 @@ func TestReleaseRepo_GetIndexerOptions(t *testing.T) {
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
_ = repo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
_ = repo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
||||||
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: createdAction.ID})
|
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: actionMockData.ID})
|
||||||
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
||||||
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
||||||
})
|
})
|
||||||
|
@ -387,11 +389,11 @@ func TestReleaseRepo_GetActionStatusByReleaseID(t *testing.T) {
|
||||||
|
|
||||||
err = repo.Store(context.Background(), mockData)
|
err = repo.Store(context.Background(), mockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
createdAction, err := actionRepo.Store(context.Background(), actionMockData)
|
err = actionRepo.Store(context.Background(), actionMockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
releaseActionMockData.ReleaseID = mockData.ID
|
releaseActionMockData.ReleaseID = mockData.ID
|
||||||
releaseActionMockData.ActionID = int64(createdAction.ID)
|
releaseActionMockData.ActionID = int64(actionMockData.ID)
|
||||||
releaseActionMockData.FilterID = int64(createdFilters[0].ID)
|
releaseActionMockData.FilterID = int64(createdFilters[0].ID)
|
||||||
|
|
||||||
err = repo.StoreReleaseActionStatus(context.Background(), releaseActionMockData)
|
err = repo.StoreReleaseActionStatus(context.Background(), releaseActionMockData)
|
||||||
|
@ -407,7 +409,7 @@ func TestReleaseRepo_GetActionStatusByReleaseID(t *testing.T) {
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
_ = repo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
_ = repo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
||||||
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: createdAction.ID})
|
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: actionMockData.ID})
|
||||||
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
||||||
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
||||||
})
|
})
|
||||||
|
@ -447,11 +449,11 @@ func TestReleaseRepo_Get(t *testing.T) {
|
||||||
|
|
||||||
err = repo.Store(context.Background(), mockData)
|
err = repo.Store(context.Background(), mockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
createdAction, err := actionRepo.Store(context.Background(), actionMockData)
|
err = actionRepo.Store(context.Background(), actionMockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
releaseActionMockData.ReleaseID = mockData.ID
|
releaseActionMockData.ReleaseID = mockData.ID
|
||||||
releaseActionMockData.ActionID = int64(createdAction.ID)
|
releaseActionMockData.ActionID = int64(actionMockData.ID)
|
||||||
releaseActionMockData.FilterID = int64(createdFilters[0].ID)
|
releaseActionMockData.FilterID = int64(createdFilters[0].ID)
|
||||||
|
|
||||||
err = repo.StoreReleaseActionStatus(context.Background(), releaseActionMockData)
|
err = repo.StoreReleaseActionStatus(context.Background(), releaseActionMockData)
|
||||||
|
@ -467,7 +469,7 @@ func TestReleaseRepo_Get(t *testing.T) {
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
_ = repo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
_ = repo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
||||||
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: createdAction.ID})
|
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: actionMockData.ID})
|
||||||
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
||||||
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
||||||
})
|
})
|
||||||
|
@ -507,11 +509,11 @@ func TestReleaseRepo_Stats(t *testing.T) {
|
||||||
|
|
||||||
err = repo.Store(context.Background(), mockData)
|
err = repo.Store(context.Background(), mockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
createdAction, err := actionRepo.Store(context.Background(), actionMockData)
|
err = actionRepo.Store(context.Background(), actionMockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
releaseActionMockData.ReleaseID = mockData.ID
|
releaseActionMockData.ReleaseID = mockData.ID
|
||||||
releaseActionMockData.ActionID = int64(createdAction.ID)
|
releaseActionMockData.ActionID = int64(actionMockData.ID)
|
||||||
releaseActionMockData.FilterID = int64(createdFilters[0].ID)
|
releaseActionMockData.FilterID = int64(createdFilters[0].ID)
|
||||||
|
|
||||||
err = repo.StoreReleaseActionStatus(context.Background(), releaseActionMockData)
|
err = repo.StoreReleaseActionStatus(context.Background(), releaseActionMockData)
|
||||||
|
@ -527,7 +529,7 @@ func TestReleaseRepo_Stats(t *testing.T) {
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
_ = repo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
_ = repo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
||||||
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: createdAction.ID})
|
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: actionMockData.ID})
|
||||||
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
||||||
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
||||||
})
|
})
|
||||||
|
@ -567,11 +569,11 @@ func TestReleaseRepo_Delete(t *testing.T) {
|
||||||
|
|
||||||
err = repo.Store(context.Background(), mockData)
|
err = repo.Store(context.Background(), mockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
createdAction, err := actionRepo.Store(context.Background(), actionMockData)
|
err = actionRepo.Store(context.Background(), actionMockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
releaseActionMockData.ReleaseID = mockData.ID
|
releaseActionMockData.ReleaseID = mockData.ID
|
||||||
releaseActionMockData.ActionID = int64(createdAction.ID)
|
releaseActionMockData.ActionID = int64(actionMockData.ID)
|
||||||
releaseActionMockData.FilterID = int64(createdFilters[0].ID)
|
releaseActionMockData.FilterID = int64(createdFilters[0].ID)
|
||||||
|
|
||||||
err = repo.StoreReleaseActionStatus(context.Background(), releaseActionMockData)
|
err = repo.StoreReleaseActionStatus(context.Background(), releaseActionMockData)
|
||||||
|
@ -584,7 +586,7 @@ func TestReleaseRepo_Delete(t *testing.T) {
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: createdAction.ID})
|
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: actionMockData.ID})
|
||||||
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
||||||
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
||||||
})
|
})
|
||||||
|
@ -624,11 +626,11 @@ func TestReleaseRepo_CheckSmartEpisodeCanDownloadShow(t *testing.T) {
|
||||||
|
|
||||||
err = repo.Store(context.Background(), mockData)
|
err = repo.Store(context.Background(), mockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
createdAction, err := actionRepo.Store(context.Background(), actionMockData)
|
err = actionRepo.Store(context.Background(), actionMockData)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
releaseActionMockData.ReleaseID = mockData.ID
|
releaseActionMockData.ReleaseID = mockData.ID
|
||||||
releaseActionMockData.ActionID = int64(createdAction.ID)
|
releaseActionMockData.ActionID = int64(actionMockData.ID)
|
||||||
releaseActionMockData.FilterID = int64(createdFilters[0].ID)
|
releaseActionMockData.FilterID = int64(createdFilters[0].ID)
|
||||||
|
|
||||||
err = repo.StoreReleaseActionStatus(context.Background(), releaseActionMockData)
|
err = repo.StoreReleaseActionStatus(context.Background(), releaseActionMockData)
|
||||||
|
@ -652,9 +654,724 @@ func TestReleaseRepo_CheckSmartEpisodeCanDownloadShow(t *testing.T) {
|
||||||
|
|
||||||
// Cleanup
|
// Cleanup
|
||||||
_ = repo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
_ = repo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
||||||
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: createdAction.ID})
|
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: actionMockData.ID})
|
||||||
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
||||||
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
_ = downloadClientRepo.Delete(context.Background(), mock.ID)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getMockDuplicateReleaseProfileTV() *domain.DuplicateReleaseProfile {
|
||||||
|
return &domain.DuplicateReleaseProfile{
|
||||||
|
ID: 0,
|
||||||
|
Name: "TV",
|
||||||
|
Protocol: false,
|
||||||
|
ReleaseName: false,
|
||||||
|
Hash: false,
|
||||||
|
Title: true,
|
||||||
|
SubTitle: false,
|
||||||
|
Year: false,
|
||||||
|
Month: false,
|
||||||
|
Day: false,
|
||||||
|
Source: false,
|
||||||
|
Resolution: false,
|
||||||
|
Codec: false,
|
||||||
|
Container: false,
|
||||||
|
DynamicRange: false,
|
||||||
|
Audio: false,
|
||||||
|
Group: false,
|
||||||
|
Season: true,
|
||||||
|
Episode: true,
|
||||||
|
Website: false,
|
||||||
|
Proper: false,
|
||||||
|
Repack: false,
|
||||||
|
Edition: false,
|
||||||
|
Language: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getMockDuplicateReleaseProfileTVDaily() *domain.DuplicateReleaseProfile {
|
||||||
|
return &domain.DuplicateReleaseProfile{
|
||||||
|
ID: 0,
|
||||||
|
Name: "TV",
|
||||||
|
Protocol: false,
|
||||||
|
ReleaseName: false,
|
||||||
|
Hash: false,
|
||||||
|
Title: true,
|
||||||
|
SubTitle: false,
|
||||||
|
Year: true,
|
||||||
|
Month: true,
|
||||||
|
Day: true,
|
||||||
|
Source: false,
|
||||||
|
Resolution: false,
|
||||||
|
Codec: false,
|
||||||
|
Container: false,
|
||||||
|
DynamicRange: false,
|
||||||
|
Audio: false,
|
||||||
|
Group: false,
|
||||||
|
Season: false,
|
||||||
|
Episode: false,
|
||||||
|
Website: false,
|
||||||
|
Proper: false,
|
||||||
|
Repack: false,
|
||||||
|
Edition: false,
|
||||||
|
Language: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getMockFilterDuplicates() *domain.Filter {
|
||||||
|
return &domain.Filter{
|
||||||
|
Name: "New Filter",
|
||||||
|
Enabled: true,
|
||||||
|
CreatedAt: time.Now(),
|
||||||
|
UpdatedAt: time.Now(),
|
||||||
|
MinSize: "10mb",
|
||||||
|
MaxSize: "20mb",
|
||||||
|
Delay: 60,
|
||||||
|
Priority: 1,
|
||||||
|
MaxDownloads: 100,
|
||||||
|
MaxDownloadsUnit: domain.FilterMaxDownloadsHour,
|
||||||
|
MatchReleases: "BRRip",
|
||||||
|
ExceptReleases: "BRRip",
|
||||||
|
UseRegex: false,
|
||||||
|
MatchReleaseGroups: "AMIABLE",
|
||||||
|
ExceptReleaseGroups: "NTb",
|
||||||
|
Scene: false,
|
||||||
|
Origins: nil,
|
||||||
|
ExceptOrigins: nil,
|
||||||
|
Bonus: nil,
|
||||||
|
Freeleech: false,
|
||||||
|
FreeleechPercent: "100%",
|
||||||
|
SmartEpisode: false,
|
||||||
|
Shows: "Is It Wrong to Try to Pick Up Girls in a Dungeon?",
|
||||||
|
Seasons: "4",
|
||||||
|
Episodes: "500",
|
||||||
|
Resolutions: []string{"1080p"},
|
||||||
|
Codecs: []string{"x264"},
|
||||||
|
Sources: []string{"BluRay"},
|
||||||
|
Containers: []string{"mkv"},
|
||||||
|
MatchHDR: []string{"HDR10"},
|
||||||
|
ExceptHDR: []string{"HDR10"},
|
||||||
|
MatchOther: []string{"Atmos"},
|
||||||
|
ExceptOther: []string{"Atmos"},
|
||||||
|
Years: "2023",
|
||||||
|
Months: "",
|
||||||
|
Days: "",
|
||||||
|
Artists: "",
|
||||||
|
Albums: "",
|
||||||
|
MatchReleaseTypes: []string{"Remux"},
|
||||||
|
ExceptReleaseTypes: "Remux",
|
||||||
|
Formats: []string{"FLAC"},
|
||||||
|
Quality: []string{"Lossless"},
|
||||||
|
Media: []string{"CD"},
|
||||||
|
PerfectFlac: true,
|
||||||
|
Cue: true,
|
||||||
|
Log: true,
|
||||||
|
LogScore: 100,
|
||||||
|
MatchCategories: "Anime",
|
||||||
|
ExceptCategories: "Anime",
|
||||||
|
MatchUploaders: "SubsPlease",
|
||||||
|
ExceptUploaders: "SubsPlease",
|
||||||
|
MatchLanguage: []string{"English", "Japanese"},
|
||||||
|
ExceptLanguage: []string{"English", "Japanese"},
|
||||||
|
Tags: "Anime, x264",
|
||||||
|
ExceptTags: "Anime, x264",
|
||||||
|
TagsAny: "Anime, x264",
|
||||||
|
ExceptTagsAny: "Anime, x264",
|
||||||
|
TagsMatchLogic: "AND",
|
||||||
|
ExceptTagsMatchLogic: "AND",
|
||||||
|
MatchReleaseTags: "Anime, x264",
|
||||||
|
ExceptReleaseTags: "Anime, x264",
|
||||||
|
UseRegexReleaseTags: true,
|
||||||
|
MatchDescription: "Anime, x264",
|
||||||
|
ExceptDescription: "Anime, x264",
|
||||||
|
UseRegexDescription: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestReleaseRepo_CheckIsDuplicateRelease(t *testing.T) {
|
||||||
|
for dbType, db := range testDBs {
|
||||||
|
log := setupLoggerForTest()
|
||||||
|
|
||||||
|
downloadClientRepo := NewDownloadClientRepo(log, db)
|
||||||
|
filterRepo := NewFilterRepo(log, db)
|
||||||
|
actionRepo := NewActionRepo(log, db, downloadClientRepo)
|
||||||
|
releaseRepo := NewReleaseRepo(log, db)
|
||||||
|
|
||||||
|
// reset
|
||||||
|
//db.handler.Exec("DELETE FROM release")
|
||||||
|
//db.handler.Exec("DELETE FROM action")
|
||||||
|
//db.handler.Exec("DELETE FROM release_action_status")
|
||||||
|
|
||||||
|
mockIndexer := domain.IndexerMinimal{ID: 0, Name: "Mock", Identifier: "mock", IdentifierExternal: "Mock"}
|
||||||
|
actionMock := &domain.Action{Name: "Test", Type: domain.ActionTypeTest, Enabled: true}
|
||||||
|
filterMock := getMockFilterDuplicates()
|
||||||
|
|
||||||
|
// Setup
|
||||||
|
err := filterRepo.Store(context.Background(), filterMock)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
createdFilters, err := filterRepo.ListFilters(context.Background())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.NotNil(t, createdFilters)
|
||||||
|
|
||||||
|
actionMock.FilterID = filterMock.ID
|
||||||
|
|
||||||
|
err = actionRepo.Store(context.Background(), actionMock)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
type fields struct {
|
||||||
|
releaseTitles []string
|
||||||
|
releaseTitle string
|
||||||
|
profile *domain.DuplicateReleaseProfile
|
||||||
|
}
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
fields fields
|
||||||
|
isDuplicate bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "1",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"Inkheart 2008 BluRay 1080p DD5.1 x264-BADGROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "Inkheart 2008 BluRay 1080p DD5.1 x264-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "2",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"That.Movie.2023.BluRay.2160p.x265.DTS-HD-GROUP",
|
||||||
|
"That.Movie.2023.BluRay.720p.x265.DTS-HD-GROUP",
|
||||||
|
"That.Movie.2023.WEB.2160p.x265.DTS-HD-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "That.Movie.2023.BluRay.2160p.x265.DTS-HD-GROUP1",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Source: true, Resolution: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "3",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"That.Movie.2023.BluRay.2160p.x265.DTS-HD-GROUP",
|
||||||
|
"That.Movie.2023.BluRay.720p.x265.DTS-HD-GROUP",
|
||||||
|
"That.Movie.2023.WEB.2160p.x265.DTS-HD-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "That.Movie.2023.BluRay.2160p.x265.DTS-HD-GROUP1",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Source: true, Codec: true, Resolution: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "4",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"That.Movie.2023.BluRay.2160p.x265.DTS-HD-GROUP",
|
||||||
|
"That.Movie.2023.BluRay.720p.x265.DTS-HD-GROUP",
|
||||||
|
"That.Movie.2023.WEB.2160p.x265.DTS-HD-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "That.Movie.2023.BluRay.2160p.x265.DTS-HD-GROUP1",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Source: true, Codec: true, Resolution: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "5",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"That.Tv.Show.2023.S01E01.BluRay.2160p.x265.DTS-HD-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "That.Tv.Show.2023.S01E01.BluRay.2160p.x265.DTS-HD-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Season: true, Episode: true, Source: true, Codec: true, Resolution: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "6",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"That.Tv.Show.2023.S01E01.BluRay.2160p.x265.DTS-HD-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "That.Tv.Show.2023.S01E02.BluRay.2160p.x265.DTS-HD-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Season: true, Episode: true, Source: true, Codec: true, Resolution: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "7",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"That.Tv.Show.2023.S01.BluRay.2160p.x265.DTS-HD-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "That.Tv.Show.2023.S01.BluRay.2160p.x265.DTS-HD-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Season: true, Episode: true, Source: true, Codec: true, Resolution: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "8",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"The Best Show 2020 S04E10 1080p AMZN WEB-DL DDP 5.1 SDR H.264-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "The Best Show 2020 S04E10 1080p AMZN WEB-DL DDP 5.1 SDR H.264-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Season: true, Episode: true, Source: true, Codec: true, Resolution: true, Website: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "9",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"The Best Show 2020 S04E10 1080p HULU WEB-DL DDP 5.1 SDR H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.1080p.HULU.WEB-DL.DDP.5.1.SDR.H.264-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "The Best Show 2020 S04E10 1080p AMZN WEB-DL DDP 5.1 SDR H.264-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Season: true, Episode: true, Source: true, Codec: true, Resolution: true, Website: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "10",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"The Best Show 2020 S04E10 1080p HULU WEB-DL DDP 5.1 H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.1080p.AMZN.WEB-DL.DDP.5.1.H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.1080p.AMZN.WEB-DL.DDP.5.1.HDR.DV.H.264-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "The Best Show 2020 S04E10 1080p AMZN WEB-DL DDP 5.1 H.264-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Season: true, Episode: true, Source: true, Codec: true, Resolution: true, Website: true, DynamicRange: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "11",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"The Best Show 2020 S04E10 1080p HULU WEB-DL DDP 5.1 SDR H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.1080p.AMZN.WEB-DL.DDP.5.1.SDR.H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.1080p.AMZN.WEB-DL.DDP.5.1.HDR.DV.H.264-GROUP",
|
||||||
|
"The Best Show 2020 S04E10 1080p amzn web-dl ddp 5.1 hdr dv h.264-group",
|
||||||
|
},
|
||||||
|
releaseTitle: "The Best Show 2020 S04E10 1080p AMZN WEB-DL DDP 5.1 HDR DV H.264-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Season: true, Episode: true, Source: true, Codec: true, Resolution: true, Website: true, DynamicRange: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "12",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"The Best Show 2020 S04E10 1080p HULU WEB-DL DDP 5.1 SDR H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.1080p.AMZN.WEB-DL.DDP.5.1.SDR.H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.1080p.AMZN.WEB-DL.DDP.5.1.HDR.DV.H.264-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "The Best Show 2020 S04E10 1080p AMZN WEB-DL DDP 5.1 DV H.264-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Season: true, Episode: true, Source: true, Codec: true, Resolution: true, Website: true, DynamicRange: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "13",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"The Best Show 2020 S04E10 1080p HULU WEB-DL DDP 5.1 SDR H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.1080p.AMZN.WEB-DL.DDP.5.1.SDR.H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.1080p.AMZN.WEB-DL.DDP.5.1.HDR.DV.H.264-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "The Best Show 2020 S04E10 Episode Title 1080p AMZN WEB-DL DDP 5.1 HDR DV H.264-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, SubTitle: true, Year: true, Season: true, Episode: true, Source: true, Codec: true, Resolution: true, Website: true, DynamicRange: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "14",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"The Best Show 2020 S04E10 1080p HULU WEB-DL DDP 5.1 SDR H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.1080p.AMZN.WEB-DL.DDP.5.1.SDR.H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.Episode.Title.1080p.AMZN.WEB-DL.DDP.5.1.HDR.DV.H.264-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "The Best Show 2020 S04E10 Episode Title 1080p AMZN WEB-DL DDP 5.1 HDR DV H.264-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, SubTitle: true, Year: true, Season: true, Episode: true, Source: true, Codec: true, Resolution: true, Website: true, DynamicRange: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "15",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"The Best Show 2020 S04E10 1080p HULU WEB-DL DDP 5.1 SDR H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.1080p.AMZN.WEB-DL.DDP.5.1.SDR.H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.Episode.Title.1080p.AMZN.WEB-DL.DDP.5.1.HDR.DV.H.264-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "The Best Show 2020 S04E10 Episode Title 1080p AMZN WEB-DL DDP 5.1 HDR DV H.264-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, SubTitle: true, Season: true, Episode: true, DynamicRange: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "16",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"The Best Show 2020 S04E10 1080p HULU WEB-DL DDP 5.1 SDR H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.1080p.AMZN.WEB-DL.DDP.5.1.SDR.H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.Episode.Title.1080p.AMZN.WEB-DL.DDP.5.1.HDR.DV.H.264-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "The Best Show 2020 S04E11 Episode Title 1080p AMZN WEB-DL DDP 5.1 HDR DV H.264-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, SubTitle: true, Season: true, Episode: true, DynamicRange: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "17",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"The Best Show 2020 S04E10 1080p HULU WEB-DL DDP 5.1 SDR H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.1080p.AMZN.WEB-DL.DDP.5.1.SDR.H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.Episode.Title.1080p.AMZN.WEB-DL.DDP.5.1.HDR.DV.H.264-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "The Best Show 2020 S04E10 Episode Title REPACK 1080p AMZN WEB-DL DDP 5.1 HDR DV H.264-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, SubTitle: true, Season: true, Episode: true, DynamicRange: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "18",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"The Best Show 2020 S04E10 1080p HULU WEB-DL DDP 5.1 SDR H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.1080p.AMZN.WEB-DL.DDP.5.1.SDR.H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.Episode.Title.REPACK.1080p.AMZN.WEB-DL.DDP.5.1.HDR.DV.H.264-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "The Best Show 2020 S04E10 Episode Title REPACK 1080p AMZN WEB-DL DDP 5.1 DV H.264-OTHERGROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Season: true, Episode: true, Repack: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false, // not a match because REPACK checks for the same group
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "19",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"The Daily Show 2024-09-21 1080p HULU WEB-DL DDP 5.1 SDR H.264-GROUP",
|
||||||
|
"The Daily Show 2024-09-21.1080p.AMZN.WEB-DL.DDP.5.1.SDR.H.264-GROUP",
|
||||||
|
"The Daily Show 2024-09-21.Guest.1080p.AMZN.WEB-DL.DDP.5.1.H.264-GROUP1",
|
||||||
|
},
|
||||||
|
releaseTitle: "The Daily Show 2024-09-21.Other.Guest.1080p.AMZN.WEB-DL.DDP.5.1.H.264-GROUP1",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Season: true, Episode: true, Year: true, Month: true, Day: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "20",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"The Daily Show 2024-09-21 1080p HULU WEB-DL DDP 5.1 SDR H.264-GROUP",
|
||||||
|
"The Daily Show 2024-09-21.1080p.AMZN.WEB-DL.DDP.5.1.SDR.H.264-GROUP",
|
||||||
|
"The Daily Show 2024-09-21.Guest.1080p.AMZN.WEB-DL.DDP.5.1.H.264-GROUP1",
|
||||||
|
},
|
||||||
|
releaseTitle: "The Daily Show 2024-09-21 Other Guest 1080p AMZN WEB-DL DDP 5.1 H.264-GROUP1",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Season: true, Episode: true, Year: true, Month: true, Day: true, SubTitle: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "21",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"The Daily Show 2024-09-21 1080p HULU WEB-DL DDP 5.1 SDR H.264-GROUP",
|
||||||
|
"The Daily Show 2024-09-21.1080p.AMZN.WEB-DL.DDP.5.1.SDR.H.264-GROUP",
|
||||||
|
"The Daily Show 2024-09-21.Guest.1080p.AMZN.WEB-DL.DDP.5.1.H.264-GROUP1",
|
||||||
|
},
|
||||||
|
releaseTitle: "The Daily Show 2024-09-22 Other Guest 1080p AMZN WEB-DL DDP 5.1 H.264-GROUP1",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Season: true, Episode: true, Year: true, Month: true, Day: true, SubTitle: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "22",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"That.Movie.2023.BluRay.2160p.x265.DTS-HD-GROUP",
|
||||||
|
"That.Movie.2023.BluRay.720p.x265.DTS-HD-GROUP",
|
||||||
|
"That.Movie.2023.2160p.BluRay.DTS-HD.5.1.x265-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "That.Movie.2023.2160p.BluRay.DD.2.0.x265-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Source: true, Codec: true, Resolution: true, Audio: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "23",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"That.Movie.2023.BluRay.2160p.x265.DTS-HD-GROUP",
|
||||||
|
"That.Movie.2023.BluRay.720p.x265.DTS-HD-GROUP",
|
||||||
|
"That.Movie.2023.2160p.BluRay.DTS-HD.5.1.x265-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "That.Movie.2023.2160p.BluRay.DTS-HD.5.1.x265-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Source: true, Codec: true, Resolution: true, Audio: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "24",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"That.Movie.2023.BluRay.2160p.x265.DTS-HD-GROUP",
|
||||||
|
"That.Movie.2023.BluRay.720p.x265.DTS-HD-GROUP",
|
||||||
|
"That.Movie.2023.2160p.BluRay.DD.5.1.x265-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "That.Movie.2023.2160p.BluRay.AC3.5.1.x265-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Source: true, Codec: true, Resolution: true, Audio: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "25",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
//"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 DV HEVC REMUX-FraMeSToR",
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 DV HEVC-FraMeSToR",
|
||||||
|
},
|
||||||
|
releaseTitle: "Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 DV HEVC REMUX-FraMeSToR",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Source: true, Codec: true, Resolution: true, Audio: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "26",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
//"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 DV HEVC REMUX-FraMeSToR",
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 DV HEVC REMUX-FraMeSToR",
|
||||||
|
},
|
||||||
|
releaseTitle: "Despicable Me 4 2024 Collectors Edition UHD BluRay 2160p TrueHD Atmos 7.1 DV HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Source: true, Codec: true, Resolution: true, Audio: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "27",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 DV HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
},
|
||||||
|
releaseTitle: "Despicable Me 4 2024 Collectors Edition UHD BluRay 2160p TrueHD Atmos 7.1 DV HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Edition: false, Source: true, Codec: true, Resolution: true, Audio: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "28",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 DV HEVC REMUX-FraMeSToR",
|
||||||
|
"Despicable Me 4 2024 Collectors Edition UHD BluRay 2160p TrueHD Atmos 7.1 DV HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
},
|
||||||
|
releaseTitle: "Despicable Me 4 2024 Collectors Edition UHD BluRay 2160p TrueHD Atmos 7.1 DV HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Edition: true, Source: true, Codec: true, Resolution: true, Audio: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "29",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 HDR10 HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 HDR HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 HDR DV HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
},
|
||||||
|
releaseTitle: "Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 DV HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Source: true, Codec: true, Resolution: true, DynamicRange: true, Audio: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "30",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 HDR10 HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 HDR HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 DV HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
},
|
||||||
|
releaseTitle: "Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 HDR DV HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Source: true, Codec: true, Resolution: true, DynamicRange: true, Audio: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "31",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 HDR10 HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 HDR HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 DV HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
},
|
||||||
|
releaseTitle: "Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Source: true, Codec: true, Resolution: true, DynamicRange: true, Audio: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "32",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 HDR10 HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 HDR HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 HEVC REMUX Hybrid-FraMeSToR",
|
||||||
|
},
|
||||||
|
releaseTitle: "Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 HEVC DV REMUX Hybrid-FraMeSToR",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Source: true, Codec: true, Resolution: true, DynamicRange: true, Audio: true, Group: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "33",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 HEVC DV REMUX Hybrid-FraMeSToR",
|
||||||
|
"Despicable Me 4 2024 FRENCH UHD BluRay 2160p TrueHD Atmos 7.1 HEVC DV REMUX Hybrid-FraMeSToR",
|
||||||
|
},
|
||||||
|
releaseTitle: "Despicable Me 4 2024 GERMAN UHD BluRay 2160p TrueHD Atmos 7.1 HEVC DV REMUX Hybrid-FraMeSToR",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Source: true, Codec: true, Resolution: true, DynamicRange: true, Audio: true, Group: true, Language: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "34",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 HEVC DV REMUX Hybrid-FraMeSToR",
|
||||||
|
"Despicable Me 4 2024 FRENCH UHD BluRay 2160p TrueHD Atmos 7.1 HEVC DV REMUX Hybrid-FraMeSToR",
|
||||||
|
"Despicable Me 4 2024 GERMAN UHD BluRay 2160p TrueHD Atmos 7.1 HEVC DV REMUX Hybrid-FraMeSToR",
|
||||||
|
},
|
||||||
|
releaseTitle: "Despicable Me 4 2024 GERMAN UHD BluRay 2160p TrueHD Atmos 7.1 HEVC DV REMUX Hybrid-FraMeSToR",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Source: true, Codec: true, Resolution: true, DynamicRange: true, Audio: true, Group: true, Language: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "35",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"Despicable Me 4 2024 FRENCH UHD BluRay 2160p TrueHD Atmos 7.1 HEVC DV REMUX Hybrid-FraMeSToR",
|
||||||
|
"Despicable Me 4 2024 GERMAN UHD BluRay 2160p TrueHD Atmos 7.1 HEVC DV REMUX Hybrid-FraMeSToR",
|
||||||
|
},
|
||||||
|
releaseTitle: "Despicable Me 4 2024 UHD BluRay 2160p TrueHD Atmos 7.1 HEVC DV REMUX Hybrid-FraMeSToR",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Source: true, Codec: true, Resolution: true, DynamicRange: true, Audio: true, Group: true, Language: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "36",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"Road House 1989 1080p GER Blu-ray AVC LPCM 2.0-MONUMENT",
|
||||||
|
},
|
||||||
|
releaseTitle: "Road House 1989 1080p Blu-ray AVC LPCM 2.0-MONUMENT",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Source: true, Codec: true, Resolution: true, Group: true, Language: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "37",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"Road House 1989 1080p ITA Blu-ray AVC LPCM 2.0-MONUMENT",
|
||||||
|
"Road House 1989 1080p GER Blu-ray AVC LPCM 2.0-MONUMENT",
|
||||||
|
},
|
||||||
|
releaseTitle: "Road House 1989 1080p NOR Blu-ray AVC LPCM 2.0-MONUMENT",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Source: true, Codec: true, Resolution: true, Group: true, Language: true},
|
||||||
|
},
|
||||||
|
isDuplicate: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "38",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"Road House 1989 1080p GER Blu-ray AVC LPCM 2.0-MONUMENT",
|
||||||
|
},
|
||||||
|
releaseTitle: "Road House 1989 1080p GER Blu-ray AVC LPCM 2.0-MONUMENT",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{Title: true, Year: true, Source: true, Codec: true, Resolution: true, Group: true, Language: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "39",
|
||||||
|
fields: fields{
|
||||||
|
releaseTitles: []string{
|
||||||
|
"The Best Show 2020 S04E10 1080p AMZN WEB-DL DDP 5.1 H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.1080p.AMZN.WEB-DL.DDP.5.1.H.264-GROUP",
|
||||||
|
"The.Best.Show.2020.S04E10.1080p.AMZN.WEB-DL.DDP.5.1.HDR.DV.H.264-GROUP",
|
||||||
|
},
|
||||||
|
releaseTitle: "The Best Show 2020 S04E10 1080p AMZN WEB-DL DDP 5.1 H.264-GROUP",
|
||||||
|
profile: &domain.DuplicateReleaseProfile{ReleaseName: true},
|
||||||
|
},
|
||||||
|
isDuplicate: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(fmt.Sprintf("Check_Is_Duplicate_Release %s [%s]", tt.name, dbType), func(t *testing.T) {
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
// Setup
|
||||||
|
for _, rel := range tt.fields.releaseTitles {
|
||||||
|
mockRel := domain.NewRelease(mockIndexer)
|
||||||
|
mockRel.ParseString(rel)
|
||||||
|
|
||||||
|
mockRel.FilterID = filterMock.ID
|
||||||
|
|
||||||
|
err = releaseRepo.Store(ctx, mockRel)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
ras := &domain.ReleaseActionStatus{
|
||||||
|
ID: 0,
|
||||||
|
Status: domain.ReleasePushStatusApproved,
|
||||||
|
Action: "test",
|
||||||
|
ActionID: int64(actionMock.ID),
|
||||||
|
Type: domain.ActionTypeTest,
|
||||||
|
Client: "",
|
||||||
|
Filter: "Test filter",
|
||||||
|
FilterID: int64(filterMock.ID),
|
||||||
|
Rejections: []string{},
|
||||||
|
ReleaseID: mockRel.ID,
|
||||||
|
Timestamp: time.Now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
err = releaseRepo.StoreReleaseActionStatus(ctx, ras)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
releases, err := releaseRepo.Find(ctx, domain.ReleaseQueryParams{})
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, releases.Data, len(tt.fields.releaseTitles))
|
||||||
|
|
||||||
|
compareRel := domain.NewRelease(mockIndexer)
|
||||||
|
compareRel.ParseString(tt.fields.releaseTitle)
|
||||||
|
|
||||||
|
// Execute
|
||||||
|
isDuplicate, err := releaseRepo.CheckIsDuplicateRelease(ctx, tt.fields.profile, compareRel)
|
||||||
|
|
||||||
|
// Verify
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, tt.isDuplicate, isDuplicate)
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
_ = releaseRepo.Delete(ctx, &domain.DeleteReleaseRequest{OlderThan: 0})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cleanup
|
||||||
|
//_ = releaseRepo.Delete(context.Background(), &domain.DeleteReleaseRequest{OlderThan: 0})
|
||||||
|
_ = actionRepo.Delete(context.Background(), &domain.DeleteActionRequest{ActionId: actionMock.ID})
|
||||||
|
_ = filterRepo.Delete(context.Background(), createdFilters[0].ID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -88,9 +88,42 @@ CREATE TABLE irc_channel
|
||||||
UNIQUE (network_id, name)
|
UNIQUE (network_id, name)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
CREATE TABLE release_profile_duplicate
|
||||||
|
(
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
protocol BOOLEAN DEFAULT FALSE,
|
||||||
|
release_name BOOLEAN DEFAULT FALSE,
|
||||||
|
hash BOOLEAN DEFAULT FALSE,
|
||||||
|
title BOOLEAN DEFAULT FALSE,
|
||||||
|
sub_title BOOLEAN DEFAULT FALSE,
|
||||||
|
year BOOLEAN DEFAULT FALSE,
|
||||||
|
month BOOLEAN DEFAULT FALSE,
|
||||||
|
day BOOLEAN DEFAULT FALSE,
|
||||||
|
source BOOLEAN DEFAULT FALSE,
|
||||||
|
resolution BOOLEAN DEFAULT FALSE,
|
||||||
|
codec BOOLEAN DEFAULT FALSE,
|
||||||
|
container BOOLEAN DEFAULT FALSE,
|
||||||
|
dynamic_range BOOLEAN DEFAULT FALSE,
|
||||||
|
audio BOOLEAN DEFAULT FALSE,
|
||||||
|
release_group BOOLEAN DEFAULT FALSE,
|
||||||
|
season BOOLEAN DEFAULT FALSE,
|
||||||
|
episode BOOLEAN DEFAULT FALSE,
|
||||||
|
website BOOLEAN DEFAULT FALSE,
|
||||||
|
proper BOOLEAN DEFAULT FALSE,
|
||||||
|
repack BOOLEAN DEFAULT FALSE,
|
||||||
|
edition BOOLEAN DEFAULT FALSE,
|
||||||
|
language BOOLEAN DEFAULT FALSE
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO release_profile_duplicate (id, name, protocol, release_name, hash, title, sub_title, year, month, day, source, resolution, codec, container, dynamic_range, audio, release_group, season, episode, website, proper, repack, edition, language)
|
||||||
|
VALUES (1, 'Exact release', 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
|
||||||
|
(2, 'Movie', 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
|
||||||
|
(3, 'TV', 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0);
|
||||||
|
|
||||||
CREATE TABLE filter
|
CREATE TABLE filter
|
||||||
(
|
(
|
||||||
id INTEGER PRIMARY KEY,
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
enabled BOOLEAN,
|
enabled BOOLEAN,
|
||||||
name TEXT NOT NULL,
|
name TEXT NOT NULL,
|
||||||
min_size TEXT,
|
min_size TEXT,
|
||||||
|
@ -159,7 +192,9 @@ CREATE TABLE filter
|
||||||
min_seeders INTEGER DEFAULT 0,
|
min_seeders INTEGER DEFAULT 0,
|
||||||
max_seeders INTEGER DEFAULT 0,
|
max_seeders INTEGER DEFAULT 0,
|
||||||
min_leechers INTEGER DEFAULT 0,
|
min_leechers INTEGER DEFAULT 0,
|
||||||
max_leechers INTEGER DEFAULT 0
|
max_leechers INTEGER DEFAULT 0,
|
||||||
|
release_profile_duplicate_id INTEGER,
|
||||||
|
FOREIGN KEY (release_profile_duplicate_id) REFERENCES release_profile_duplicate(id) ON DELETE SET NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
CREATE INDEX filter_enabled_index
|
CREATE INDEX filter_enabled_index
|
||||||
|
@ -273,8 +308,10 @@ CREATE TABLE "release"
|
||||||
group_id TEXT,
|
group_id TEXT,
|
||||||
torrent_id TEXT,
|
torrent_id TEXT,
|
||||||
torrent_name TEXT,
|
torrent_name TEXT,
|
||||||
|
normalized_hash TEXT,
|
||||||
size INTEGER,
|
size INTEGER,
|
||||||
title TEXT,
|
title TEXT,
|
||||||
|
sub_title TEXT,
|
||||||
category TEXT,
|
category TEXT,
|
||||||
season INTEGER,
|
season INTEGER,
|
||||||
episode INTEGER,
|
episode INTEGER,
|
||||||
|
@ -286,15 +323,24 @@ CREATE TABLE "release"
|
||||||
codec TEXT,
|
codec TEXT,
|
||||||
container TEXT,
|
container TEXT,
|
||||||
hdr TEXT,
|
hdr TEXT,
|
||||||
|
audio TEXT,
|
||||||
|
audio_channels TEXT,
|
||||||
release_group TEXT,
|
release_group TEXT,
|
||||||
|
region TEXT,
|
||||||
|
language TEXT,
|
||||||
|
edition TEXT,
|
||||||
|
cut TEXT,
|
||||||
|
hybrid BOOLEAN,
|
||||||
proper BOOLEAN,
|
proper BOOLEAN,
|
||||||
repack BOOLEAN,
|
repack BOOLEAN,
|
||||||
website TEXT,
|
website TEXT,
|
||||||
|
media_processing TEXT,
|
||||||
type TEXT,
|
type TEXT,
|
||||||
origin TEXT,
|
origin TEXT,
|
||||||
tags TEXT [] DEFAULT '{}' NOT NULL,
|
tags TEXT [] DEFAULT '{}' NOT NULL,
|
||||||
uploader TEXT,
|
uploader TEXT,
|
||||||
pre_time TEXT,
|
pre_time TEXT,
|
||||||
|
other TEXT [] DEFAULT '{}' NOT NULL,
|
||||||
filter_id INTEGER
|
filter_id INTEGER
|
||||||
REFERENCES filter
|
REFERENCES filter
|
||||||
ON DELETE SET NULL
|
ON DELETE SET NULL
|
||||||
|
@ -312,6 +358,81 @@ CREATE INDEX release_timestamp_index
|
||||||
CREATE INDEX release_torrent_name_index
|
CREATE INDEX release_torrent_name_index
|
||||||
ON "release" (torrent_name);
|
ON "release" (torrent_name);
|
||||||
|
|
||||||
|
CREATE INDEX release_normalized_hash_index
|
||||||
|
ON "release" (normalized_hash);
|
||||||
|
|
||||||
|
CREATE INDEX release_title_index
|
||||||
|
ON "release" (title);
|
||||||
|
|
||||||
|
CREATE INDEX release_sub_title_index
|
||||||
|
ON "release" (sub_title);
|
||||||
|
|
||||||
|
CREATE INDEX release_season_index
|
||||||
|
ON "release" (season);
|
||||||
|
|
||||||
|
CREATE INDEX release_episode_index
|
||||||
|
ON "release" (episode);
|
||||||
|
|
||||||
|
CREATE INDEX release_year_index
|
||||||
|
ON "release" (year);
|
||||||
|
|
||||||
|
CREATE INDEX release_month_index
|
||||||
|
ON "release" (month);
|
||||||
|
|
||||||
|
CREATE INDEX release_day_index
|
||||||
|
ON "release" (day);
|
||||||
|
|
||||||
|
CREATE INDEX release_resolution_index
|
||||||
|
ON "release" (resolution);
|
||||||
|
|
||||||
|
CREATE INDEX release_source_index
|
||||||
|
ON "release" (source);
|
||||||
|
|
||||||
|
CREATE INDEX release_codec_index
|
||||||
|
ON "release" (codec);
|
||||||
|
|
||||||
|
CREATE INDEX release_container_index
|
||||||
|
ON "release" (container);
|
||||||
|
|
||||||
|
CREATE INDEX release_hdr_index
|
||||||
|
ON "release" (hdr);
|
||||||
|
|
||||||
|
CREATE INDEX release_audio_index
|
||||||
|
ON "release" (audio);
|
||||||
|
|
||||||
|
CREATE INDEX release_audio_channels_index
|
||||||
|
ON "release" (audio_channels);
|
||||||
|
|
||||||
|
CREATE INDEX release_release_group_index
|
||||||
|
ON "release" (release_group);
|
||||||
|
|
||||||
|
CREATE INDEX release_language_index
|
||||||
|
ON "release" (language);
|
||||||
|
|
||||||
|
CREATE INDEX release_proper_index
|
||||||
|
ON "release" (proper);
|
||||||
|
|
||||||
|
CREATE INDEX release_repack_index
|
||||||
|
ON "release" (repack);
|
||||||
|
|
||||||
|
CREATE INDEX release_website_index
|
||||||
|
ON "release" (website);
|
||||||
|
|
||||||
|
CREATE INDEX release_media_processing_index
|
||||||
|
ON "release" (media_processing);
|
||||||
|
|
||||||
|
CREATE INDEX release_region_index
|
||||||
|
ON "release" (region);
|
||||||
|
|
||||||
|
CREATE INDEX release_edition_index
|
||||||
|
ON "release" (edition);
|
||||||
|
|
||||||
|
CREATE INDEX release_cut_index
|
||||||
|
ON "release" (cut);
|
||||||
|
|
||||||
|
CREATE INDEX release_hybrid_index
|
||||||
|
ON "release" (hybrid);
|
||||||
|
|
||||||
CREATE TABLE release_action_status
|
CREATE TABLE release_action_status
|
||||||
(
|
(
|
||||||
id INTEGER PRIMARY KEY,
|
id INTEGER PRIMARY KEY,
|
||||||
|
@ -1716,5 +1837,152 @@ CREATE TABLE list_filter
|
||||||
|
|
||||||
ALTER TABLE filter
|
ALTER TABLE filter
|
||||||
ADD COLUMN except_record_labels TEXT DEFAULT '';
|
ADD COLUMN except_record_labels TEXT DEFAULT '';
|
||||||
|
`,
|
||||||
|
`CREATE TABLE release_profile_duplicate
|
||||||
|
(
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
protocol BOOLEAN DEFAULT FALSE,
|
||||||
|
release_name BOOLEAN DEFAULT FALSE,
|
||||||
|
hash BOOLEAN DEFAULT FALSE,
|
||||||
|
title BOOLEAN DEFAULT FALSE,
|
||||||
|
sub_title BOOLEAN DEFAULT FALSE,
|
||||||
|
year BOOLEAN DEFAULT FALSE,
|
||||||
|
month BOOLEAN DEFAULT FALSE,
|
||||||
|
day BOOLEAN DEFAULT FALSE,
|
||||||
|
source BOOLEAN DEFAULT FALSE,
|
||||||
|
resolution BOOLEAN DEFAULT FALSE,
|
||||||
|
codec BOOLEAN DEFAULT FALSE,
|
||||||
|
container BOOLEAN DEFAULT FALSE,
|
||||||
|
dynamic_range BOOLEAN DEFAULT FALSE,
|
||||||
|
audio BOOLEAN DEFAULT FALSE,
|
||||||
|
release_group BOOLEAN DEFAULT FALSE,
|
||||||
|
season BOOLEAN DEFAULT FALSE,
|
||||||
|
episode BOOLEAN DEFAULT FALSE,
|
||||||
|
website BOOLEAN DEFAULT FALSE,
|
||||||
|
proper BOOLEAN DEFAULT FALSE,
|
||||||
|
repack BOOLEAN DEFAULT FALSE,
|
||||||
|
edition BOOLEAN DEFAULT FALSE,
|
||||||
|
language BOOLEAN DEFAULT FALSE
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO release_profile_duplicate (id, name, protocol, release_name, hash, title, sub_title, year, month, day, source, resolution, codec, container, dynamic_range, audio, release_group, season, episode, website, proper, repack, edition, language)
|
||||||
|
VALUES (1, 'Exact release', 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
|
||||||
|
(2, 'Movie', 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0),
|
||||||
|
(3, 'TV', 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0);
|
||||||
|
|
||||||
|
ALTER TABLE filter
|
||||||
|
ADD COLUMN release_profile_duplicate_id INTEGER
|
||||||
|
CONSTRAINT filter_release_profile_duplicate_id_fk
|
||||||
|
REFERENCES release_profile_duplicate (id)
|
||||||
|
ON DELETE SET NULL;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD normalized_hash TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD sub_title TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD audio TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD audio_channels TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD language TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD media_processing TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD edition TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD cut TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD hybrid TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD region TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE "release"
|
||||||
|
ADD other TEXT [] DEFAULT '{}' NOT NULL;
|
||||||
|
|
||||||
|
CREATE INDEX release_normalized_hash_index
|
||||||
|
ON "release" (normalized_hash);
|
||||||
|
|
||||||
|
CREATE INDEX release_title_index
|
||||||
|
ON "release" (title);
|
||||||
|
|
||||||
|
CREATE INDEX release_sub_title_index
|
||||||
|
ON "release" (sub_title);
|
||||||
|
|
||||||
|
CREATE INDEX release_season_index
|
||||||
|
ON "release" (season);
|
||||||
|
|
||||||
|
CREATE INDEX release_episode_index
|
||||||
|
ON "release" (episode);
|
||||||
|
|
||||||
|
CREATE INDEX release_year_index
|
||||||
|
ON "release" (year);
|
||||||
|
|
||||||
|
CREATE INDEX release_month_index
|
||||||
|
ON "release" (month);
|
||||||
|
|
||||||
|
CREATE INDEX release_day_index
|
||||||
|
ON "release" (day);
|
||||||
|
|
||||||
|
CREATE INDEX release_resolution_index
|
||||||
|
ON "release" (resolution);
|
||||||
|
|
||||||
|
CREATE INDEX release_source_index
|
||||||
|
ON "release" (source);
|
||||||
|
|
||||||
|
CREATE INDEX release_codec_index
|
||||||
|
ON "release" (codec);
|
||||||
|
|
||||||
|
CREATE INDEX release_container_index
|
||||||
|
ON "release" (container);
|
||||||
|
|
||||||
|
CREATE INDEX release_hdr_index
|
||||||
|
ON "release" (hdr);
|
||||||
|
|
||||||
|
CREATE INDEX release_audio_index
|
||||||
|
ON "release" (audio);
|
||||||
|
|
||||||
|
CREATE INDEX release_audio_channels_index
|
||||||
|
ON "release" (audio_channels);
|
||||||
|
|
||||||
|
CREATE INDEX release_release_group_index
|
||||||
|
ON "release" (release_group);
|
||||||
|
|
||||||
|
CREATE INDEX release_proper_index
|
||||||
|
ON "release" (proper);
|
||||||
|
|
||||||
|
CREATE INDEX release_repack_index
|
||||||
|
ON "release" (repack);
|
||||||
|
|
||||||
|
CREATE INDEX release_website_index
|
||||||
|
ON "release" (website);
|
||||||
|
|
||||||
|
CREATE INDEX release_media_processing_index
|
||||||
|
ON "release" (media_processing);
|
||||||
|
|
||||||
|
CREATE INDEX release_language_index
|
||||||
|
ON "release" (language);
|
||||||
|
|
||||||
|
CREATE INDEX release_region_index
|
||||||
|
ON "release" (region);
|
||||||
|
|
||||||
|
CREATE INDEX release_edition_index
|
||||||
|
ON "release" (edition);
|
||||||
|
|
||||||
|
CREATE INDEX release_cut_index
|
||||||
|
ON "release" (cut);
|
||||||
|
|
||||||
|
CREATE INDEX release_hybrid_index
|
||||||
|
ON "release" (hybrid);
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,6 +29,7 @@ func toNullInt32(s int32) sql.NullInt32 {
|
||||||
Valid: s != 0,
|
Valid: s != 0,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func toNullInt64(s int64) sql.NullInt64 {
|
func toNullInt64(s int64) sql.NullInt64 {
|
||||||
return sql.NullInt64{
|
return sql.NullInt64{
|
||||||
Int64: s,
|
Int64: s,
|
||||||
|
|
|
@ -11,7 +11,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
type ActionRepo interface {
|
type ActionRepo interface {
|
||||||
Store(ctx context.Context, action Action) (*Action, error)
|
Store(ctx context.Context, action *Action) error
|
||||||
StoreFilterActions(ctx context.Context, filterID int64, actions []*Action) ([]*Action, error)
|
StoreFilterActions(ctx context.Context, filterID int64, actions []*Action) ([]*Action, error)
|
||||||
FindByFilterID(ctx context.Context, filterID int, active *bool, withClient bool) ([]*Action, error)
|
FindByFilterID(ctx context.Context, filterID int, active *bool, withClient bool) ([]*Action, error)
|
||||||
List(ctx context.Context) ([]Action, error)
|
List(ctx context.Context) ([]Action, error)
|
||||||
|
|
|
@ -170,6 +170,8 @@ type Filter struct {
|
||||||
Actions []*Action `json:"actions,omitempty"`
|
Actions []*Action `json:"actions,omitempty"`
|
||||||
External []FilterExternal `json:"external,omitempty"`
|
External []FilterExternal `json:"external,omitempty"`
|
||||||
Indexers []Indexer `json:"indexers"`
|
Indexers []Indexer `json:"indexers"`
|
||||||
|
ReleaseProfileDuplicateID int64 `json:"release_profile_duplicate_id,omitempty"`
|
||||||
|
DuplicateHandling *DuplicateReleaseProfile `json:"release_profile_duplicate"`
|
||||||
Downloads *FilterDownloads `json:"-"`
|
Downloads *FilterDownloads `json:"-"`
|
||||||
Rejections []string `json:"-"`
|
Rejections []string `json:"-"`
|
||||||
RejectReasons *RejectionReasons `json:"-"`
|
RejectReasons *RejectionReasons `json:"-"`
|
||||||
|
@ -290,6 +292,7 @@ type FilterUpdate struct {
|
||||||
MaxSeeders *int `json:"max_seeders,omitempty"`
|
MaxSeeders *int `json:"max_seeders,omitempty"`
|
||||||
MinLeechers *int `json:"min_leechers,omitempty"`
|
MinLeechers *int `json:"min_leechers,omitempty"`
|
||||||
MaxLeechers *int `json:"max_leechers,omitempty"`
|
MaxLeechers *int `json:"max_leechers,omitempty"`
|
||||||
|
ReleaseProfileDuplicateID *int64 `json:"release_profile_duplicate_id,omitempty"`
|
||||||
Actions []*Action `json:"actions,omitempty"`
|
Actions []*Action `json:"actions,omitempty"`
|
||||||
External []FilterExternal `json:"external,omitempty"`
|
External []FilterExternal `json:"external,omitempty"`
|
||||||
Indexers []Indexer `json:"indexers,omitempty"`
|
Indexers []Indexer `json:"indexers,omitempty"`
|
||||||
|
|
|
@ -50,6 +50,7 @@ type Macro struct {
|
||||||
IndexerIdentifierExternal string
|
IndexerIdentifierExternal string
|
||||||
IndexerName string
|
IndexerName string
|
||||||
InfoUrl string
|
InfoUrl string
|
||||||
|
IsDuplicate bool
|
||||||
Language []string
|
Language []string
|
||||||
Leechers int
|
Leechers int
|
||||||
LogScore int
|
LogScore int
|
||||||
|
@ -66,6 +67,8 @@ type Macro struct {
|
||||||
Seeders int
|
Seeders int
|
||||||
Size uint64
|
Size uint64
|
||||||
SizeString string
|
SizeString string
|
||||||
|
SkipDuplicateProfileID int64
|
||||||
|
SkipDuplicateProfileName string
|
||||||
Source string
|
Source string
|
||||||
Tags string
|
Tags string
|
||||||
Title string
|
Title string
|
||||||
|
@ -123,6 +126,7 @@ func NewMacro(release Release) Macro {
|
||||||
IndexerIdentifierExternal: release.Indexer.IdentifierExternal,
|
IndexerIdentifierExternal: release.Indexer.IdentifierExternal,
|
||||||
IndexerName: release.Indexer.Name,
|
IndexerName: release.Indexer.Name,
|
||||||
InfoUrl: release.InfoURL,
|
InfoUrl: release.InfoURL,
|
||||||
|
IsDuplicate: release.IsDuplicate,
|
||||||
Language: release.Language,
|
Language: release.Language,
|
||||||
Leechers: release.Leechers,
|
Leechers: release.Leechers,
|
||||||
LogScore: release.LogScore,
|
LogScore: release.LogScore,
|
||||||
|
@ -140,6 +144,8 @@ func NewMacro(release Release) Macro {
|
||||||
Size: release.Size,
|
Size: release.Size,
|
||||||
SizeString: humanize.Bytes(release.Size),
|
SizeString: humanize.Bytes(release.Size),
|
||||||
Source: release.Source,
|
Source: release.Source,
|
||||||
|
SkipDuplicateProfileID: release.SkipDuplicateProfileID,
|
||||||
|
SkipDuplicateProfileName: release.SkipDuplicateProfileName,
|
||||||
Tags: strings.Join(release.Tags, ", "),
|
Tags: strings.Join(release.Tags, ", "),
|
||||||
Title: release.Title,
|
Title: release.Title,
|
||||||
TorrentDataRawBytes: release.TorrentDataRawBytes,
|
TorrentDataRawBytes: release.TorrentDataRawBytes,
|
||||||
|
@ -149,7 +155,7 @@ func NewMacro(release Release) Macro {
|
||||||
TorrentPathName: release.TorrentTmpFile,
|
TorrentPathName: release.TorrentTmpFile,
|
||||||
TorrentUrl: release.DownloadURL,
|
TorrentUrl: release.DownloadURL,
|
||||||
TorrentTmpFile: release.TorrentTmpFile,
|
TorrentTmpFile: release.TorrentTmpFile,
|
||||||
Type: release.Type,
|
Type: release.Type.String(),
|
||||||
Uploader: release.Uploader,
|
Uploader: release.Uploader,
|
||||||
RecordLabel: release.RecordLabel,
|
RecordLabel: release.RecordLabel,
|
||||||
Website: release.Website,
|
Website: release.Website,
|
||||||
|
|
|
@ -5,6 +5,7 @@ package domain
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/moistari/rls"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -231,7 +232,7 @@ func TestMacros_Parse(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "test_type",
|
name: "test_type",
|
||||||
release: Release{
|
release: Release{
|
||||||
Type: "episode",
|
Type: rls.Episode,
|
||||||
},
|
},
|
||||||
args: args{text: "Type: {{ .Type }}"},
|
args: args{text: "Type: {{ .Type }}"},
|
||||||
want: "Type: episode",
|
want: "Type: episode",
|
||||||
|
|
|
@ -7,7 +7,11 @@ import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
|
"crypto/md5"
|
||||||
|
"encoding/hex"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"golang.org/x/text/transform"
|
||||||
|
"golang.org/x/text/unicode/norm"
|
||||||
"html"
|
"html"
|
||||||
"io"
|
"io"
|
||||||
"math"
|
"math"
|
||||||
|
@ -18,6 +22,7 @@ import (
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
"unicode"
|
||||||
|
|
||||||
"github.com/autobrr/autobrr/pkg/errors"
|
"github.com/autobrr/autobrr/pkg/errors"
|
||||||
"github.com/autobrr/autobrr/pkg/sharedhttp"
|
"github.com/autobrr/autobrr/pkg/sharedhttp"
|
||||||
|
@ -43,6 +48,11 @@ type ReleaseRepo interface {
|
||||||
|
|
||||||
GetActionStatus(ctx context.Context, req *GetReleaseActionStatusRequest) (*ReleaseActionStatus, error)
|
GetActionStatus(ctx context.Context, req *GetReleaseActionStatusRequest) (*ReleaseActionStatus, error)
|
||||||
StoreReleaseActionStatus(ctx context.Context, status *ReleaseActionStatus) error
|
StoreReleaseActionStatus(ctx context.Context, status *ReleaseActionStatus) error
|
||||||
|
|
||||||
|
StoreDuplicateProfile(ctx context.Context, profile *DuplicateReleaseProfile) error
|
||||||
|
FindDuplicateReleaseProfiles(ctx context.Context) ([]*DuplicateReleaseProfile, error)
|
||||||
|
DeleteReleaseProfileDuplicate(ctx context.Context, id int64) error
|
||||||
|
CheckIsDuplicateRelease(ctx context.Context, profile *DuplicateReleaseProfile, release *Release) (bool, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type Release struct {
|
type Release struct {
|
||||||
|
@ -55,6 +65,7 @@ type Release struct {
|
||||||
Implementation ReleaseImplementation `json:"implementation"` // irc, rss, api
|
Implementation ReleaseImplementation `json:"implementation"` // irc, rss, api
|
||||||
Timestamp time.Time `json:"timestamp"`
|
Timestamp time.Time `json:"timestamp"`
|
||||||
AnnounceType AnnounceType `json:"announce_type"`
|
AnnounceType AnnounceType `json:"announce_type"`
|
||||||
|
Type rls.Type `json:"type"` // rls.Type
|
||||||
InfoURL string `json:"info_url"`
|
InfoURL string `json:"info_url"`
|
||||||
DownloadURL string `json:"download_url"`
|
DownloadURL string `json:"download_url"`
|
||||||
MagnetURI string `json:"-"`
|
MagnetURI string `json:"-"`
|
||||||
|
@ -64,8 +75,10 @@ type Release struct {
|
||||||
TorrentDataRawBytes []byte `json:"-"`
|
TorrentDataRawBytes []byte `json:"-"`
|
||||||
TorrentHash string `json:"-"`
|
TorrentHash string `json:"-"`
|
||||||
TorrentName string `json:"name"` // full release name
|
TorrentName string `json:"name"` // full release name
|
||||||
|
NormalizedHash string `json:"normalized_hash"` // normalized torrent name and md5 hashed
|
||||||
Size uint64 `json:"size"`
|
Size uint64 `json:"size"`
|
||||||
Title string `json:"title"` // Parsed title
|
Title string `json:"title"` // Parsed title
|
||||||
|
SubTitle string `json:"sub_title"` // Parsed secondary title for shows e.g. episode name
|
||||||
Description string `json:"-"`
|
Description string `json:"-"`
|
||||||
Category string `json:"category"`
|
Category string `json:"category"`
|
||||||
Categories []string `json:"categories,omitempty"`
|
Categories []string `json:"categories,omitempty"`
|
||||||
|
@ -89,8 +102,11 @@ type Release struct {
|
||||||
Proper bool `json:"proper"`
|
Proper bool `json:"proper"`
|
||||||
Repack bool `json:"repack"`
|
Repack bool `json:"repack"`
|
||||||
Website string `json:"website"`
|
Website string `json:"website"`
|
||||||
|
Hybrid bool `json:"hybrid"`
|
||||||
|
Edition []string `json:"edition"`
|
||||||
|
Cut []string `json:"cut"`
|
||||||
|
MediaProcessing string `json:"media_processing"` // Remux, Encode, Untouched
|
||||||
Artists string `json:"-"`
|
Artists string `json:"-"`
|
||||||
Type string `json:"type"` // Album,Single,EP
|
|
||||||
LogScore int `json:"-"`
|
LogScore int `json:"-"`
|
||||||
HasCue bool `json:"-"`
|
HasCue bool `json:"-"`
|
||||||
HasLog bool `json:"-"`
|
HasLog bool `json:"-"`
|
||||||
|
@ -110,15 +126,183 @@ type Release struct {
|
||||||
AdditionalSizeCheckRequired bool `json:"-"`
|
AdditionalSizeCheckRequired bool `json:"-"`
|
||||||
AdditionalUploaderCheckRequired bool `json:"-"`
|
AdditionalUploaderCheckRequired bool `json:"-"`
|
||||||
AdditionalRecordLabelCheckRequired bool `json:"-"`
|
AdditionalRecordLabelCheckRequired bool `json:"-"`
|
||||||
|
IsDuplicate bool `json:"-"`
|
||||||
|
SkipDuplicateProfileID int64 `json:"-"`
|
||||||
|
SkipDuplicateProfileName string `json:"-"`
|
||||||
FilterID int `json:"-"`
|
FilterID int `json:"-"`
|
||||||
Filter *Filter `json:"-"`
|
Filter *Filter `json:"-"`
|
||||||
ActionStatus []ReleaseActionStatus `json:"action_status"`
|
ActionStatus []ReleaseActionStatus `json:"action_status"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Hash return md5 hashed normalized release name
|
||||||
|
func (r *Release) Hash() string {
|
||||||
|
formatted := r.TorrentName
|
||||||
|
|
||||||
|
// for tv and movies we create the formatted title to have the best chance of matching
|
||||||
|
if r.IsTypeVideo() {
|
||||||
|
formatted = r.NormalizedTitle()
|
||||||
|
}
|
||||||
|
|
||||||
|
normalized := MustNormalize(formatted)
|
||||||
|
h := md5.Sum([]byte(normalized))
|
||||||
|
str := hex.EncodeToString(h[:])
|
||||||
|
return str
|
||||||
|
}
|
||||||
|
|
||||||
|
// MustNormalize applies the Normalize transform to s, returning a lower cased,
|
||||||
|
// clean form of s useful for matching titles.
|
||||||
|
func MustNormalize(s string) string {
|
||||||
|
s, _, err := transform.String(NewNormalizer(), s)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewNormalizer is a custom rls.Normalizer that keeps plus sign + for HDR10+ fx
|
||||||
|
// It creates a new a text transformer chain (similiar to
|
||||||
|
// NewCleaner) that normalizes text to lower case clean form useful for
|
||||||
|
// matching titles.
|
||||||
|
//
|
||||||
|
// See: https://go.dev/blog/normalization
|
||||||
|
func NewNormalizer() transform.Transformer {
|
||||||
|
return transform.Chain(
|
||||||
|
norm.NFD,
|
||||||
|
rls.NewCollapser(
|
||||||
|
true, true,
|
||||||
|
"`"+`':;~!@#%^*=()[]{}<>/?|\",`, " \t\r\n\f._",
|
||||||
|
func(r, prev, next rune) rune {
|
||||||
|
switch {
|
||||||
|
case r == '-' && unicode.IsSpace(prev):
|
||||||
|
return -1
|
||||||
|
case r == '$' && (unicode.IsLetter(prev) || unicode.IsLetter(next)):
|
||||||
|
return 'S'
|
||||||
|
case r == '£' && (unicode.IsLetter(prev) || unicode.IsLetter(next)):
|
||||||
|
return 'L'
|
||||||
|
case r == '$', r == '£':
|
||||||
|
return -1
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
},
|
||||||
|
),
|
||||||
|
norm.NFC,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Release) NormalizedTitle() string {
|
||||||
|
var v []string
|
||||||
|
|
||||||
|
v = append(v, r.Title)
|
||||||
|
|
||||||
|
if r.Year > 0 && r.Month > 0 && r.Day > 0 {
|
||||||
|
v = append(v, fmt.Sprintf("%d %d %d", r.Year, r.Month, r.Day))
|
||||||
|
} else if r.Year > 0 {
|
||||||
|
v = append(v, fmt.Sprintf("%d", r.Year))
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(r.Language) > 0 {
|
||||||
|
v = append(v, strings.Join(r.Language, " "))
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(r.Cut) > 0 {
|
||||||
|
v = append(v, strings.Join(r.Cut, " "))
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(r.Edition) > 0 {
|
||||||
|
v = append(v, strings.Join(r.Edition, " "))
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.Season > 0 && r.Episode > 0 {
|
||||||
|
v = append(v, fmt.Sprintf("S%dE%d", r.Season, r.Episode))
|
||||||
|
} else if r.Season > 0 && r.Episode == 0 {
|
||||||
|
v = append(v, fmt.Sprintf("S%d", r.Season))
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.Proper {
|
||||||
|
v = append(v, "PROPER")
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.Repack {
|
||||||
|
v = append(v, r.RepackStr())
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.Hybrid {
|
||||||
|
v = append(v, "HYBRiD")
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.SubTitle != "" {
|
||||||
|
v = append(v, r.SubTitle)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.Resolution != "" {
|
||||||
|
v = append(v, r.Resolution)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.Website != "" {
|
||||||
|
v = append(v, r.Website)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.Region != "" {
|
||||||
|
v = append(v, r.Region)
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.Source != "" {
|
||||||
|
v = append(v, r.Source)
|
||||||
|
}
|
||||||
|
|
||||||
|
// remux
|
||||||
|
if r.MediaProcessing == "REMUX" {
|
||||||
|
v = append(v, "REMUX")
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(r.Codec) > 0 {
|
||||||
|
v = append(v, strings.Join(r.Codec, " "))
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(r.HDR) > 0 {
|
||||||
|
v = append(v, strings.Join(r.HDR, " "))
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(r.Audio) > 0 {
|
||||||
|
v = append(v, r.AudioString())
|
||||||
|
}
|
||||||
|
|
||||||
|
str := strings.Join(v, " ")
|
||||||
|
|
||||||
|
if r.Group != "" {
|
||||||
|
str = fmt.Sprintf("%s-%s", str, r.Group)
|
||||||
|
}
|
||||||
|
|
||||||
|
return str
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Release) RepackStr() string {
|
||||||
|
if r.Other != nil {
|
||||||
|
if slices.Contains(r.Other, "REPACK") {
|
||||||
|
return "REPACK"
|
||||||
|
} else if slices.Contains(r.Other, "REREPACK") {
|
||||||
|
return "REREPACK"
|
||||||
|
} else if slices.Contains(r.Other, "REPACK2") {
|
||||||
|
return "REPACK2"
|
||||||
|
} else if slices.Contains(r.Other, "REPACK3") {
|
||||||
|
return "REPACK3"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
func (r *Release) Raw(s string) rls.Release {
|
func (r *Release) Raw(s string) rls.Release {
|
||||||
return rls.ParseString(s)
|
return rls.ParseString(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *Release) ParseType(s string) {
|
||||||
|
r.Type = rls.ParseType(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Release) IsTypeVideo() bool {
|
||||||
|
return r.Type.Is(rls.Movie, rls.Series, rls.Episode)
|
||||||
|
}
|
||||||
|
|
||||||
type AnnounceType string
|
type AnnounceType string
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
@ -361,6 +545,10 @@ func NewRelease(indexer IndexerMinimal) *Release {
|
||||||
Implementation: ReleaseImplementationIRC,
|
Implementation: ReleaseImplementationIRC,
|
||||||
Timestamp: time.Now(),
|
Timestamp: time.Now(),
|
||||||
Tags: []string{},
|
Tags: []string{},
|
||||||
|
Language: []string{},
|
||||||
|
Edition: []string{},
|
||||||
|
Cut: []string{},
|
||||||
|
Other: []string{},
|
||||||
Size: 0,
|
Size: 0,
|
||||||
AnnounceType: AnnounceTypeNew,
|
AnnounceType: AnnounceTypeNew,
|
||||||
}
|
}
|
||||||
|
@ -371,28 +559,42 @@ func NewRelease(indexer IndexerMinimal) *Release {
|
||||||
func (r *Release) ParseString(title string) {
|
func (r *Release) ParseString(title string) {
|
||||||
rel := rls.ParseString(title)
|
rel := rls.ParseString(title)
|
||||||
|
|
||||||
r.Type = rel.Type.String()
|
r.Type = rel.Type
|
||||||
|
|
||||||
r.TorrentName = title
|
r.TorrentName = title
|
||||||
|
|
||||||
r.Source = rel.Source
|
r.Source = rel.Source
|
||||||
r.Resolution = rel.Resolution
|
r.Resolution = rel.Resolution
|
||||||
r.Region = rel.Region
|
r.Region = rel.Region
|
||||||
|
|
||||||
|
if rel.Language != nil {
|
||||||
|
r.Language = rel.Language
|
||||||
|
}
|
||||||
|
|
||||||
r.Audio = rel.Audio
|
r.Audio = rel.Audio
|
||||||
r.AudioChannels = rel.Channels
|
r.AudioChannels = rel.Channels
|
||||||
r.Codec = rel.Codec
|
r.Codec = rel.Codec
|
||||||
r.Container = rel.Container
|
r.Container = rel.Container
|
||||||
r.HDR = rel.HDR
|
r.HDR = rel.HDR
|
||||||
r.Artists = rel.Artist
|
r.Artists = rel.Artist
|
||||||
r.Language = rel.Language
|
|
||||||
|
|
||||||
|
if rel.Other != nil {
|
||||||
r.Other = rel.Other
|
r.Other = rel.Other
|
||||||
|
}
|
||||||
|
|
||||||
r.Proper = slices.Contains(r.Other, "PROPER")
|
r.Proper = slices.Contains(r.Other, "PROPER")
|
||||||
r.Repack = slices.Contains(r.Other, "REPACK")
|
r.Repack = slices.Contains(r.Other, "REPACK") || slices.Contains(r.Other, "REREPACK")
|
||||||
|
r.Hybrid = slices.Contains(r.Other, "HYBRiD")
|
||||||
|
|
||||||
|
// TODO default to Encode and set Untouched for discs
|
||||||
|
if slices.Contains(r.Other, "REMUX") {
|
||||||
|
r.MediaProcessing = "REMUX"
|
||||||
|
}
|
||||||
|
|
||||||
if r.Title == "" {
|
if r.Title == "" {
|
||||||
r.Title = rel.Title
|
r.Title = rel.Title
|
||||||
}
|
}
|
||||||
|
r.SubTitle = rel.Subtitle
|
||||||
|
|
||||||
if r.Season == 0 {
|
if r.Season == 0 {
|
||||||
r.Season = rel.Series
|
r.Season = rel.Series
|
||||||
|
@ -415,8 +617,22 @@ func (r *Release) ParseString(title string) {
|
||||||
r.Group = rel.Group
|
r.Group = rel.Group
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if r.Website == "" {
|
||||||
|
r.Website = rel.Collection
|
||||||
|
}
|
||||||
|
|
||||||
|
if rel.Cut != nil {
|
||||||
|
r.Cut = rel.Cut
|
||||||
|
}
|
||||||
|
|
||||||
|
if rel.Edition != nil {
|
||||||
|
r.Edition = rel.Edition
|
||||||
|
}
|
||||||
|
|
||||||
r.ParseReleaseTagsString(r.ReleaseTags)
|
r.ParseReleaseTagsString(r.ReleaseTags)
|
||||||
r.extraParseSource(rel)
|
r.extraParseSource(rel)
|
||||||
|
|
||||||
|
r.NormalizedHash = r.Hash()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Release) extraParseSource(rel rls.Release) {
|
func (r *Release) extraParseSource(rel rls.Release) {
|
||||||
|
@ -451,7 +667,7 @@ func (r *Release) extraParseSource(rel rls.Release) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// check res to be 1080p or 2160p and codec to be AVC, HEVC or if other contains Remux, then set source to BluRay if it differs
|
// check res to be 1080p or 2160p and codec to be AVC, HEVC or if other contains Remux, then set source to BluRay if it differs
|
||||||
if !basicContainsSlice(r.Source, []string{"WEB-DL", "BluRay", "UHD.BluRay"}) && basicContainsSlice(r.Resolution, []string{"1080p", "2160p"}) && basicContainsMatch(r.Codec, []string{"AVC", "HEVC"}) && basicContainsMatch(r.Other, []string{"REMUX"}) {
|
if !basicContainsSlice(r.Source, []string{"WEB-DL", "BluRay", "UHD.BluRay"}) && basicContainsSlice(r.Resolution, []string{"1080p", "2160p"}) && basicContainsMatch(r.Codec, []string{"AVC", "H.264", "H.265", "HEVC"}) && basicContainsMatch(r.Other, []string{"REMUX"}) {
|
||||||
// handle missing or unexpected source for some bluray releases
|
// handle missing or unexpected source for some bluray releases
|
||||||
if r.Resolution == "1080p" {
|
if r.Resolution == "1080p" {
|
||||||
r.Source = "BluRay"
|
r.Source = "BluRay"
|
||||||
|
@ -463,6 +679,10 @@ func (r *Release) extraParseSource(rel rls.Release) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Release) ParseReleaseTagsString(tags string) {
|
func (r *Release) ParseReleaseTagsString(tags string) {
|
||||||
|
if tags == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
cleanTags := CleanReleaseTags(tags)
|
cleanTags := CleanReleaseTags(tags)
|
||||||
t := ParseReleaseTagString(cleanTags)
|
t := ParseReleaseTagString(cleanTags)
|
||||||
|
|
||||||
|
@ -543,6 +763,20 @@ func (r *Release) OpenTorrentFile() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// AudioString takes r.Audio and r.AudioChannels and returns a string like "DDP Atmos 5.1"
|
||||||
|
func (r *Release) AudioString() string {
|
||||||
|
var audio []string
|
||||||
|
|
||||||
|
audio = append(audio, r.Audio...)
|
||||||
|
audio = append(audio, r.AudioChannels)
|
||||||
|
|
||||||
|
if len(audio) > 0 {
|
||||||
|
return strings.Join(audio, " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
func (r *Release) DownloadTorrentFileCtx(ctx context.Context) error {
|
func (r *Release) DownloadTorrentFileCtx(ctx context.Context) error {
|
||||||
return r.downloadTorrentFile(ctx)
|
return r.downloadTorrentFile(ctx)
|
||||||
}
|
}
|
||||||
|
@ -992,3 +1226,30 @@ func getUniqueTags(target []string, source []string) []string {
|
||||||
|
|
||||||
return target
|
return target
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type DuplicateReleaseProfile struct {
|
||||||
|
ID int64 `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Protocol bool `json:"protocol"`
|
||||||
|
ReleaseName bool `json:"release_name"`
|
||||||
|
Hash bool `json:"hash"`
|
||||||
|
Title bool `json:"title"`
|
||||||
|
SubTitle bool `json:"sub_title"`
|
||||||
|
Year bool `json:"year"`
|
||||||
|
Month bool `json:"month"`
|
||||||
|
Day bool `json:"day"`
|
||||||
|
Source bool `json:"source"`
|
||||||
|
Resolution bool `json:"resolution"`
|
||||||
|
Codec bool `json:"codec"`
|
||||||
|
Container bool `json:"container"`
|
||||||
|
DynamicRange bool `json:"dynamic_range"`
|
||||||
|
Audio bool `json:"audio"`
|
||||||
|
Group bool `json:"group"`
|
||||||
|
Season bool `json:"season"`
|
||||||
|
Episode bool `json:"episode"`
|
||||||
|
Website bool `json:"website"`
|
||||||
|
Proper bool `json:"proper"`
|
||||||
|
Repack bool `json:"repack"`
|
||||||
|
Edition bool `json:"edition"`
|
||||||
|
Language bool `json:"language"`
|
||||||
|
}
|
||||||
|
|
|
@ -15,6 +15,7 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/moistari/rls"
|
||||||
"github.com/rs/zerolog"
|
"github.com/rs/zerolog"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -127,7 +128,7 @@ func TestRelease_DownloadTorrentFile(t *testing.T) {
|
||||||
Repack bool
|
Repack bool
|
||||||
Website string
|
Website string
|
||||||
Artists string
|
Artists string
|
||||||
Type string
|
Type rls.Type
|
||||||
LogScore int
|
LogScore int
|
||||||
Origin string
|
Origin string
|
||||||
Tags []string
|
Tags []string
|
||||||
|
|
|
@ -6,6 +6,7 @@ package domain
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/moistari/rls"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -22,6 +23,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "Servant S01 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-FLUX",
|
TorrentName: "Servant S01 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-FLUX",
|
||||||
|
NormalizedHash: "9b73e77d51fb0b69367ea96c761577b0",
|
||||||
Title: "Servant",
|
Title: "Servant",
|
||||||
Season: 1,
|
Season: 1,
|
||||||
Episode: 0,
|
Episode: 0,
|
||||||
|
@ -32,8 +34,8 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
AudioChannels: "5.1",
|
AudioChannels: "5.1",
|
||||||
HDR: []string{"DV"},
|
HDR: []string{"DV"},
|
||||||
Group: "FLUX",
|
Group: "FLUX",
|
||||||
//Website: "ATVP",
|
Website: "ATVP",
|
||||||
Type: "series",
|
Type: rls.Series,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -43,6 +45,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "Servant.S01.2160p.ATVP.WEB-DL.DDP.5.1.Atmos.DV.HEVC-FLUX",
|
TorrentName: "Servant.S01.2160p.ATVP.WEB-DL.DDP.5.1.Atmos.DV.HEVC-FLUX",
|
||||||
|
NormalizedHash: "9b73e77d51fb0b69367ea96c761577b0",
|
||||||
Title: "Servant",
|
Title: "Servant",
|
||||||
Season: 1,
|
Season: 1,
|
||||||
Episode: 0,
|
Episode: 0,
|
||||||
|
@ -53,7 +56,8 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
AudioChannels: "5.1",
|
AudioChannels: "5.1",
|
||||||
HDR: []string{"DV"},
|
HDR: []string{"DV"},
|
||||||
Group: "FLUX",
|
Group: "FLUX",
|
||||||
Type: "series",
|
Website: "ATVP",
|
||||||
|
Type: rls.Series,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -64,6 +68,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "Servant.S01.2160p.ATVP.WEB-DL.DDP.5.1.Atmos.DV.HEVC-FLUX",
|
TorrentName: "Servant.S01.2160p.ATVP.WEB-DL.DDP.5.1.Atmos.DV.HEVC-FLUX",
|
||||||
|
NormalizedHash: "9b73e77d51fb0b69367ea96c761577b0",
|
||||||
ReleaseTags: "MKV / 2160p / WEB-DL",
|
ReleaseTags: "MKV / 2160p / WEB-DL",
|
||||||
Title: "Servant",
|
Title: "Servant",
|
||||||
Season: 1,
|
Season: 1,
|
||||||
|
@ -76,7 +81,8 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
AudioChannels: "5.1",
|
AudioChannels: "5.1",
|
||||||
HDR: []string{"DV"},
|
HDR: []string{"DV"},
|
||||||
Group: "FLUX",
|
Group: "FLUX",
|
||||||
Type: "series",
|
Website: "ATVP",
|
||||||
|
Type: rls.Series,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -87,6 +93,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "Servant.S01.2160p.ATVP.WEB-DL.DDP.5.1.Atmos.DV.HEVC-FLUX",
|
TorrentName: "Servant.S01.2160p.ATVP.WEB-DL.DDP.5.1.Atmos.DV.HEVC-FLUX",
|
||||||
|
NormalizedHash: "9b73e77d51fb0b69367ea96c761577b0",
|
||||||
ReleaseTags: "MKV | 2160p | WEB-DL",
|
ReleaseTags: "MKV | 2160p | WEB-DL",
|
||||||
Title: "Servant",
|
Title: "Servant",
|
||||||
Season: 1,
|
Season: 1,
|
||||||
|
@ -99,7 +106,8 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
AudioChannels: "5.1",
|
AudioChannels: "5.1",
|
||||||
HDR: []string{"DV"},
|
HDR: []string{"DV"},
|
||||||
Group: "FLUX",
|
Group: "FLUX",
|
||||||
Type: "series",
|
Website: "ATVP",
|
||||||
|
Type: rls.Series,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -110,6 +118,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "Servant.S01.2160p.ATVP.WEB-DL.DDP.5.1.Atmos.DV.HEVC-FLUX",
|
TorrentName: "Servant.S01.2160p.ATVP.WEB-DL.DDP.5.1.Atmos.DV.HEVC-FLUX",
|
||||||
|
NormalizedHash: "9b73e77d51fb0b69367ea96c761577b0",
|
||||||
ReleaseTags: "MP4 | 2160p | WEB-DL",
|
ReleaseTags: "MP4 | 2160p | WEB-DL",
|
||||||
Title: "Servant",
|
Title: "Servant",
|
||||||
Season: 1,
|
Season: 1,
|
||||||
|
@ -122,7 +131,8 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
AudioChannels: "5.1",
|
AudioChannels: "5.1",
|
||||||
HDR: []string{"DV"},
|
HDR: []string{"DV"},
|
||||||
Group: "FLUX",
|
Group: "FLUX",
|
||||||
Type: "series",
|
Website: "ATVP",
|
||||||
|
Type: rls.Series,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -133,6 +143,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "Servant.S01.2160p.ATVP.WEB-DL.DDP.5.1.Atmos.DV.HEVC-FLUX",
|
TorrentName: "Servant.S01.2160p.ATVP.WEB-DL.DDP.5.1.Atmos.DV.HEVC-FLUX",
|
||||||
|
NormalizedHash: "9b73e77d51fb0b69367ea96c761577b0",
|
||||||
ReleaseTags: "MP4 | 2160p | WEB-DL | Freeleech!",
|
ReleaseTags: "MP4 | 2160p | WEB-DL | Freeleech!",
|
||||||
Title: "Servant",
|
Title: "Servant",
|
||||||
Season: 1,
|
Season: 1,
|
||||||
|
@ -147,7 +158,8 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
Group: "FLUX",
|
Group: "FLUX",
|
||||||
Freeleech: true,
|
Freeleech: true,
|
||||||
Bonus: []string{"Freeleech"},
|
Bonus: []string{"Freeleech"},
|
||||||
Type: "series",
|
Website: "ATVP",
|
||||||
|
Type: rls.Series,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -157,6 +169,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "Rippers.Revenge.2023.German.DL.1080p.BluRay.MPEG2-GROUP",
|
TorrentName: "Rippers.Revenge.2023.German.DL.1080p.BluRay.MPEG2-GROUP",
|
||||||
|
NormalizedHash: "cb28c69ce117723d01ea6562ef3bae67",
|
||||||
Title: "Rippers Revenge",
|
Title: "Rippers Revenge",
|
||||||
Year: 2023,
|
Year: 2023,
|
||||||
Language: []string{"GERMAN", "DL"},
|
Language: []string{"GERMAN", "DL"},
|
||||||
|
@ -164,7 +177,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
Source: "BluRay",
|
Source: "BluRay",
|
||||||
Codec: []string{"MPEG-2"},
|
Codec: []string{"MPEG-2"},
|
||||||
Group: "GROUP",
|
Group: "GROUP",
|
||||||
Type: "movie",
|
Type: rls.Movie,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -174,12 +187,13 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "Analogue.1080i.AHDTV.H264-ABCDEF",
|
TorrentName: "Analogue.1080i.AHDTV.H264-ABCDEF",
|
||||||
|
NormalizedHash: "1a1eb25b15269d3189138326658aafe0",
|
||||||
Title: "Analogue",
|
Title: "Analogue",
|
||||||
Resolution: "1080p", // rls does not differentiate between 1080i and 1080p which results in all 1080 releases being parsed as 1080p
|
Resolution: "1080p", // rls does not differentiate between 1080i and 1080p which results in all 1080 releases being parsed as 1080p
|
||||||
Source: "AHDTV",
|
Source: "AHDTV",
|
||||||
Codec: []string{"H.264"},
|
Codec: []string{"H.264"},
|
||||||
Group: "ABCDEF",
|
Group: "ABCDEF",
|
||||||
Type: "movie",
|
Type: rls.Movie,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -190,6 +204,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "Artist - Albumname",
|
TorrentName: "Artist - Albumname",
|
||||||
|
NormalizedHash: "5de820bfae23f2bdc3a56c827a5230ac",
|
||||||
ReleaseTags: "FLAC / Lossless / Log / 100% / Cue / CD",
|
ReleaseTags: "FLAC / Lossless / Log / 100% / Cue / CD",
|
||||||
Title: "Artist",
|
Title: "Artist",
|
||||||
Group: "Albumname",
|
Group: "Albumname",
|
||||||
|
@ -211,6 +226,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "Various Artists - Music '21",
|
TorrentName: "Various Artists - Music '21",
|
||||||
|
NormalizedHash: "fbf7713532f4d2d03b62897fc990e37e",
|
||||||
Tags: []string{"house, techno, tech.house, electro.house, future.house, bass.house, melodic.house"},
|
Tags: []string{"house, techno, tech.house, electro.house, future.house, bass.house, melodic.house"},
|
||||||
ReleaseTags: "MP3 / 320 / Cassette",
|
ReleaseTags: "MP3 / 320 / Cassette",
|
||||||
Title: "Various Artists - Music '21",
|
Title: "Various Artists - Music '21",
|
||||||
|
@ -228,6 +244,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "The artist (ザ・フリーダムユニティ) - Long album name",
|
TorrentName: "The artist (ザ・フリーダムユニティ) - Long album name",
|
||||||
|
NormalizedHash: "6b4051de7524f65d3c25be535d2c95ed",
|
||||||
ReleaseTags: "MP3 / V0 (VBR) / CD",
|
ReleaseTags: "MP3 / V0 (VBR) / CD",
|
||||||
Title: "The artist",
|
Title: "The artist",
|
||||||
Group: "name",
|
Group: "name",
|
||||||
|
@ -245,6 +262,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "Artist - Albumname",
|
TorrentName: "Artist - Albumname",
|
||||||
|
NormalizedHash: "5de820bfae23f2bdc3a56c827a5230ac",
|
||||||
ReleaseTags: "FLAC / Lossless / Log / 100% / Cue / CD",
|
ReleaseTags: "FLAC / Lossless / Log / 100% / Cue / CD",
|
||||||
Title: "Artist",
|
Title: "Artist",
|
||||||
Group: "Albumname",
|
Group: "Albumname",
|
||||||
|
@ -265,6 +283,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "Artist - Albumname",
|
TorrentName: "Artist - Albumname",
|
||||||
|
NormalizedHash: "5de820bfae23f2bdc3a56c827a5230ac",
|
||||||
ReleaseTags: "FLAC / 24bit Lossless / Log / 100% / Cue / CD",
|
ReleaseTags: "FLAC / 24bit Lossless / Log / 100% / Cue / CD",
|
||||||
Title: "Artist",
|
Title: "Artist",
|
||||||
Group: "Albumname",
|
Group: "Albumname",
|
||||||
|
@ -285,6 +304,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "Artist - Albumname",
|
TorrentName: "Artist - Albumname",
|
||||||
|
NormalizedHash: "5de820bfae23f2bdc3a56c827a5230ac",
|
||||||
ReleaseTags: "FLAC / 24bit Lossless / Log / 78% / Cue / CD",
|
ReleaseTags: "FLAC / 24bit Lossless / Log / 78% / Cue / CD",
|
||||||
Title: "Artist",
|
Title: "Artist",
|
||||||
Group: "Albumname",
|
Group: "Albumname",
|
||||||
|
@ -304,6 +324,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "I Am Movie 2007 Theatrical UHD BluRay 2160p DTS-HD MA 5.1 DV HEVC HYBRID REMUX-GROUP1",
|
TorrentName: "I Am Movie 2007 Theatrical UHD BluRay 2160p DTS-HD MA 5.1 DV HEVC HYBRID REMUX-GROUP1",
|
||||||
|
NormalizedHash: "f70840f42149d7d2b539c75f5e853493",
|
||||||
Title: "I Am Movie",
|
Title: "I Am Movie",
|
||||||
Resolution: "2160p",
|
Resolution: "2160p",
|
||||||
Source: "UHD.BluRay",
|
Source: "UHD.BluRay",
|
||||||
|
@ -313,8 +334,11 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
AudioChannels: "5.1",
|
AudioChannels: "5.1",
|
||||||
Year: 2007,
|
Year: 2007,
|
||||||
Group: "GROUP1",
|
Group: "GROUP1",
|
||||||
|
Hybrid: true,
|
||||||
|
Cut: []string{"Theatrical.Cut"},
|
||||||
|
MediaProcessing: "REMUX",
|
||||||
Other: []string{"HYBRiD", "REMUX"},
|
Other: []string{"HYBRiD", "REMUX"},
|
||||||
Type: "movie",
|
Type: rls.Movie,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -324,6 +348,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
},
|
},
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "The Peripheral (2022) S01 (2160p AMZN WEB-DL H265 HDR10+ DDP 5.1 English - GROUP1)",
|
TorrentName: "The Peripheral (2022) S01 (2160p AMZN WEB-DL H265 HDR10+ DDP 5.1 English - GROUP1)",
|
||||||
|
NormalizedHash: "33fe914c00ea5a2945883cffb4183923",
|
||||||
Title: "The Peripheral",
|
Title: "The Peripheral",
|
||||||
Resolution: "2160p",
|
Resolution: "2160p",
|
||||||
Source: "WEB-DL",
|
Source: "WEB-DL",
|
||||||
|
@ -335,7 +360,8 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
Group: "GROUP1",
|
Group: "GROUP1",
|
||||||
Season: 1,
|
Season: 1,
|
||||||
Language: []string{"ENGLiSH"},
|
Language: []string{"ENGLiSH"},
|
||||||
Type: "series",
|
Website: "AMZN",
|
||||||
|
Type: rls.Series,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -346,6 +372,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "Old Movie 1954 2160p Remux DoVi HDR10 HEVC DTS-HD MA 5.1-CiNEPHiLES",
|
TorrentName: "Old Movie 1954 2160p Remux DoVi HDR10 HEVC DTS-HD MA 5.1-CiNEPHiLES",
|
||||||
Title: "Old Movie",
|
Title: "Old Movie",
|
||||||
|
NormalizedHash: "9990678babd4f24f268dfe7856b29f0e",
|
||||||
Year: 1954,
|
Year: 1954,
|
||||||
Source: "UHD.BluRay",
|
Source: "UHD.BluRay",
|
||||||
Resolution: "2160p",
|
Resolution: "2160p",
|
||||||
|
@ -355,7 +382,8 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
Audio: []string{"DTS-HD.MA"},
|
Audio: []string{"DTS-HD.MA"},
|
||||||
AudioChannels: "5.1",
|
AudioChannels: "5.1",
|
||||||
Group: "CiNEPHiLES",
|
Group: "CiNEPHiLES",
|
||||||
Type: "movie",
|
Type: rls.Movie,
|
||||||
|
MediaProcessing: "REMUX",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -366,6 +394,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "Death Hunt 1981 1080p Remux AVC DTS-HD MA 2.0-playBD",
|
TorrentName: "Death Hunt 1981 1080p Remux AVC DTS-HD MA 2.0-playBD",
|
||||||
Title: "Death Hunt",
|
Title: "Death Hunt",
|
||||||
|
NormalizedHash: "e6b1b708b8ea39470de6d327da2920d6",
|
||||||
Year: 1981,
|
Year: 1981,
|
||||||
Source: "BluRay",
|
Source: "BluRay",
|
||||||
Resolution: "1080p",
|
Resolution: "1080p",
|
||||||
|
@ -374,7 +403,8 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
Audio: []string{"DTS-HD.MA"},
|
Audio: []string{"DTS-HD.MA"},
|
||||||
AudioChannels: "2.0",
|
AudioChannels: "2.0",
|
||||||
Group: "playBD",
|
Group: "playBD",
|
||||||
Type: "movie",
|
Type: rls.Movie,
|
||||||
|
MediaProcessing: "REMUX",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -385,6 +415,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "Old Movie 1954 2160p Remux DoVi HDR10 HEVC DTS-HD MA 5.1-VHS",
|
TorrentName: "Old Movie 1954 2160p Remux DoVi HDR10 HEVC DTS-HD MA 5.1-VHS",
|
||||||
Title: "Old Movie",
|
Title: "Old Movie",
|
||||||
|
NormalizedHash: "1ea814c8c21979e7692efb1b24298e95",
|
||||||
Year: 1954,
|
Year: 1954,
|
||||||
Source: "UHD.BluRay",
|
Source: "UHD.BluRay",
|
||||||
Resolution: "2160p",
|
Resolution: "2160p",
|
||||||
|
@ -394,7 +425,8 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
Audio: []string{"DTS-HD.MA"},
|
Audio: []string{"DTS-HD.MA"},
|
||||||
AudioChannels: "5.1",
|
AudioChannels: "5.1",
|
||||||
Group: "VHS",
|
Group: "VHS",
|
||||||
Type: "movie",
|
Type: rls.Movie,
|
||||||
|
MediaProcessing: "REMUX",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -405,6 +437,7 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
want: Release{
|
want: Release{
|
||||||
TorrentName: "Old Movie 1954 2160p Remux DoVi HDR10 HEVC DTS-HD MA 5.1 VHS",
|
TorrentName: "Old Movie 1954 2160p Remux DoVi HDR10 HEVC DTS-HD MA 5.1 VHS",
|
||||||
Title: "Old Movie",
|
Title: "Old Movie",
|
||||||
|
NormalizedHash: "1ea814c8c21979e7692efb1b24298e95",
|
||||||
Year: 1954,
|
Year: 1954,
|
||||||
Source: "UHD.BluRay",
|
Source: "UHD.BluRay",
|
||||||
Resolution: "2160p",
|
Resolution: "2160p",
|
||||||
|
@ -414,7 +447,8 @@ func TestRelease_Parse(t *testing.T) {
|
||||||
Audio: []string{"DTS-HD.MA"},
|
Audio: []string{"DTS-HD.MA"},
|
||||||
AudioChannels: "5.1",
|
AudioChannels: "5.1",
|
||||||
Group: "VHS",
|
Group: "VHS",
|
||||||
Type: "movie",
|
Type: rls.Movie,
|
||||||
|
MediaProcessing: "REMUX",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -863,3 +897,220 @@ func Test_getUniqueTags(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestRelease_Hash(t *testing.T) {
|
||||||
|
type fields struct {
|
||||||
|
TorrentName string
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
fields fields
|
||||||
|
want string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "1",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That.Movie.2023.2160p.BluRay.DTS-HD.5.1.x265-GROUP",
|
||||||
|
},
|
||||||
|
want: "833e6c4c78e960ccbce4ef68f6564082",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "2",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Movie 2023 2160p BluRay DTS-HD 5.1 x265-GROUP",
|
||||||
|
},
|
||||||
|
want: "833e6c4c78e960ccbce4ef68f6564082",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "3",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Movie 2023 2160p BluRay DTS-HD 5 1 x265-GROUP",
|
||||||
|
},
|
||||||
|
want: "833e6c4c78e960ccbce4ef68f6564082",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "4",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Movie 2023 1080p Remux DTS-HD 5 1 AVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "ee15fe3c926a5b40370aec970f6b4c43",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "5",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Movie 2023 Bluray 1080p Remux DTS-HD 5.1 AVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "ee15fe3c926a5b40370aec970f6b4c43",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "6",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Movie 2023 Theatrical Cut Bluray 1080p Remux DTS-HD 5.1 AVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "db7e55fb3471e76ba27bf8a2e1e59236",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "7",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Movie 2023 Directors Cut Bluray 1080p Remux DTS-HD 5.1 AVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "bccfe6762b34cbc3a54784351b5424fd",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "8",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Show 2023 S01 Bluray 1080p Remux DTS-HD 5.1 AVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "d1c74c33e1c36f3545d06aa8857610b1",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "9",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Show 2023 S01 Bluray 1080p Remux DTS-HD 5.1 AVC-GROUP2",
|
||||||
|
},
|
||||||
|
want: "29e228f705f070041e72ebaa7b29239e",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "10",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Show 2023 S1 Bluray 1080p Remux DTS-HD 5.1 AVC-GROUP2",
|
||||||
|
},
|
||||||
|
want: "29e228f705f070041e72ebaa7b29239e",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "11",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Show 2023 S2 Bluray 1080p Remux DTS-HD 5.1 AVC-GROUP2",
|
||||||
|
},
|
||||||
|
want: "cbc67f4eec93428361eddf040654e077",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "12",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Show 2023 S01E01 2160p WEB-DL DTS-HD 5.1 HEVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "806240c76cc32413efd5bcf033abf512",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "13",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Show 2023 S01E01 This is the start 2160p WEB-DL DTS-HD 5.1 HEVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "33c866b898a59d255a09c10bd23a5e3f",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "14",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Show 2023 REPACK S01E01 This is the start 2160p WEB-DL DTS-HD 5.1 HEVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "7be5ebeee948820f53f24091054fc7cd",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "15",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Show 2023 REPACK2 S01E01 This is the start 2160p WEB-DL DTS-HD 5.1 HEVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "84e3e70f6c983f96f10cba50213a338d",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "16",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Show 2023 PROPER S01E01 This is the start 2160p WEB-DL DTS-HD 5.1 HEVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "52d5d1a28a340bc5a836c0244c206c3b",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "17",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Show 2023 S01E01 This is the start 2160p WEB-DL DV DTS-HD 5.1 HEVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "5e196039d426649ab5221c3e87ed108d",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "18",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Show 2023 S01E01 This is the start 2160p WEB-DL DoVi DTS-HD 5.1 HEVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "5e196039d426649ab5221c3e87ed108d",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "19",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Show 2023 S01E01 This is the start 2160p WEB-DL DoVi HDR10 DTS-HD 5.1 HEVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "f52a9a2f26e94c90bdaa8eaca2136cc3",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "20",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Show 2023 S01E01 This is the start 2160p WEB-DL DV HDR10 DTS-HD 5.1 HEVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "f52a9a2f26e94c90bdaa8eaca2136cc3",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "21",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "that show 2023 S01E01 this is the start 2160p WEB-DL DV HDR10 DTS-HD 5.1 HEVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "f52a9a2f26e94c90bdaa8eaca2136cc3",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "22",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Show 2023 S01E01 This is the start 2160p HYBRID WEB-DL DV HDR10 DTS-HD 5.1 HEVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "414f792a345148ed4c8e070ef77340ff",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "23",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Movie 2008 1080p GBR Blu-ray AVC TrueHD 5.1",
|
||||||
|
},
|
||||||
|
want: "70bb23ff5da4209419b6d83628310a43",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "24",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Movie 2008 1080p GER Blu-ray AVC TrueHD 5.1",
|
||||||
|
},
|
||||||
|
want: "369ba9f92fb7819e0d07791fddf3c85f",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "25",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Show 2023 PROPER S01E01 This is the start 2160p AMZN WEB-DL DTS-HD 5.1 HEVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "28e215fee790fbad3957383162f7c5a6",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "26",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Show 2023 PROPER S01E01 This is the start 2160p DSNP WEB-DL DTS-HD 5.1 HEVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "6a26cd652170d0efc4b0e0320fbd38c1",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "27",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Other Show 2023 S01E01 2160p WEB-DL DV HDR10 DTS-HD 5.1 HEVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "544c2fc4024ece5664c0f623a5feaeab",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "28",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "That Other Show 2023 S01E01 2160p WEB-DL DV HDR10+ DTS-HD 5.1 HEVC-GROUP",
|
||||||
|
},
|
||||||
|
want: "63b5d87abe5fb49131785de426708d31",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
r := &Release{
|
||||||
|
TorrentName: tt.fields.TorrentName,
|
||||||
|
}
|
||||||
|
r.ParseString(tt.fields.TorrentName)
|
||||||
|
assert.Equalf(t, tt.want, r.NormalizedHash, "Hash()")
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -425,9 +425,14 @@ func ParseReleaseTags(tags []string) ReleaseTags {
|
||||||
|
|
||||||
return releaseTags
|
return releaseTags
|
||||||
}
|
}
|
||||||
|
|
||||||
func ParseReleaseTagString(tags string) ReleaseTags {
|
func ParseReleaseTagString(tags string) ReleaseTags {
|
||||||
releaseTags := ReleaseTags{}
|
releaseTags := ReleaseTags{}
|
||||||
|
|
||||||
|
if tags == "" {
|
||||||
|
return releaseTags
|
||||||
|
}
|
||||||
|
|
||||||
for tagType, tagInfos := range types {
|
for tagType, tagInfos := range types {
|
||||||
|
|
||||||
for _, info := range tagInfos {
|
for _, info := range tagInfos {
|
||||||
|
|
|
@ -12,6 +12,7 @@ import (
|
||||||
|
|
||||||
"github.com/dustin/go-humanize"
|
"github.com/dustin/go-humanize"
|
||||||
"github.com/mmcdole/gofeed"
|
"github.com/mmcdole/gofeed"
|
||||||
|
"github.com/moistari/rls"
|
||||||
"github.com/rs/zerolog"
|
"github.com/rs/zerolog"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
@ -73,7 +74,67 @@ func TestRSSJob_processItem(t *testing.T) {
|
||||||
Link: "/details.php?id=00000&hit=1",
|
Link: "/details.php?id=00000&hit=1",
|
||||||
GUID: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
|
GUID: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
|
||||||
}},
|
}},
|
||||||
want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: domain.IndexerMinimal{0, "Mock Feed", "mock-feed", "Mock Indexer"}, FilterName: "", Protocol: "torrent", Implementation: "RSS", AnnounceType: domain.AnnounceTypeNew, Timestamp: now, GroupID: "", TorrentID: "", DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 1490000000, Title: "Some Release Title", Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n", Category: "", Season: 0, Episode: 0, Year: 2022, Month: 9, Day: 22, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: nil, Proper: false, Repack: false, Website: "", Artists: "", Type: "episode", LogScore: 0, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, AdditionalUploaderCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)},
|
want: &domain.Release{
|
||||||
|
ID: 0,
|
||||||
|
FilterStatus: "PENDING",
|
||||||
|
Rejections: []string{},
|
||||||
|
Indexer: domain.IndexerMinimal{0, "Mock Feed", "mock-feed", "Mock Indexer"},
|
||||||
|
FilterName: "",
|
||||||
|
Protocol: "torrent",
|
||||||
|
Implementation: "RSS",
|
||||||
|
AnnounceType: domain.AnnounceTypeNew,
|
||||||
|
Timestamp: now,
|
||||||
|
GroupID: "",
|
||||||
|
TorrentID: "",
|
||||||
|
DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1",
|
||||||
|
TorrentTmpFile: "",
|
||||||
|
TorrentDataRawBytes: []uint8(nil),
|
||||||
|
TorrentHash: "",
|
||||||
|
TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
|
||||||
|
NormalizedHash: "edfbe552ccde335f34b801e15930bc35",
|
||||||
|
Size: 1490000000,
|
||||||
|
Title: "Some Release Title",
|
||||||
|
Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n",
|
||||||
|
Category: "",
|
||||||
|
Season: 0,
|
||||||
|
Episode: 0,
|
||||||
|
Year: 2022,
|
||||||
|
Month: 9,
|
||||||
|
Day: 22,
|
||||||
|
Resolution: "720p",
|
||||||
|
Source: "WEB",
|
||||||
|
Codec: []string{"H.264"},
|
||||||
|
Container: "",
|
||||||
|
HDR: []string(nil),
|
||||||
|
Audio: []string(nil),
|
||||||
|
AudioChannels: "",
|
||||||
|
Group: "GROUP",
|
||||||
|
Region: "",
|
||||||
|
Language: []string{},
|
||||||
|
Proper: false,
|
||||||
|
Repack: false,
|
||||||
|
Edition: []string{},
|
||||||
|
Cut: []string{},
|
||||||
|
Website: "",
|
||||||
|
Artists: "",
|
||||||
|
Type: rls.Episode,
|
||||||
|
LogScore: 0,
|
||||||
|
Origin: "",
|
||||||
|
Tags: []string{},
|
||||||
|
ReleaseTags: "",
|
||||||
|
Freeleech: false,
|
||||||
|
FreeleechPercent: 0,
|
||||||
|
Bonus: []string(nil),
|
||||||
|
Uploader: "",
|
||||||
|
PreTime: "",
|
||||||
|
Other: []string{},
|
||||||
|
RawCookie: "",
|
||||||
|
AdditionalSizeCheckRequired: false,
|
||||||
|
AdditionalUploaderCheckRequired: false,
|
||||||
|
FilterID: 0,
|
||||||
|
Filter: (*domain.Filter)(nil),
|
||||||
|
ActionStatus: []domain.ReleaseActionStatus(nil),
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "with_baseurl",
|
name: "with_baseurl",
|
||||||
|
@ -107,7 +168,67 @@ func TestRSSJob_processItem(t *testing.T) {
|
||||||
Link: "https://fake-feed.com/details.php?id=00000&hit=1",
|
Link: "https://fake-feed.com/details.php?id=00000&hit=1",
|
||||||
GUID: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
|
GUID: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
|
||||||
}},
|
}},
|
||||||
want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: domain.IndexerMinimal{0, "Mock Feed", "mock-feed", "Mock Indexer"}, FilterName: "", Protocol: "torrent", Implementation: "RSS", AnnounceType: domain.AnnounceTypeNew, Timestamp: now, GroupID: "", TorrentID: "", DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 1490000000, Title: "Some Release Title", Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n", Category: "", Season: 0, Episode: 0, Year: 2022, Month: 9, Day: 22, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: nil, Proper: false, Repack: false, Website: "", Artists: "", Type: "episode", LogScore: 0, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, AdditionalUploaderCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)},
|
want: &domain.Release{
|
||||||
|
ID: 0,
|
||||||
|
FilterStatus: "PENDING",
|
||||||
|
Rejections: []string{},
|
||||||
|
Indexer: domain.IndexerMinimal{0, "Mock Feed", "mock-feed", "Mock Indexer"},
|
||||||
|
FilterName: "",
|
||||||
|
Protocol: "torrent",
|
||||||
|
Implementation: "RSS",
|
||||||
|
AnnounceType: domain.AnnounceTypeNew,
|
||||||
|
Timestamp: now,
|
||||||
|
GroupID: "",
|
||||||
|
TorrentID: "",
|
||||||
|
DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1",
|
||||||
|
TorrentTmpFile: "",
|
||||||
|
TorrentDataRawBytes: []uint8(nil),
|
||||||
|
TorrentHash: "",
|
||||||
|
TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
|
||||||
|
NormalizedHash: "edfbe552ccde335f34b801e15930bc35",
|
||||||
|
Size: 1490000000,
|
||||||
|
Title: "Some Release Title",
|
||||||
|
Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n",
|
||||||
|
Category: "",
|
||||||
|
Season: 0,
|
||||||
|
Episode: 0,
|
||||||
|
Year: 2022,
|
||||||
|
Month: 9,
|
||||||
|
Day: 22,
|
||||||
|
Resolution: "720p",
|
||||||
|
Source: "WEB",
|
||||||
|
Codec: []string{"H.264"},
|
||||||
|
Container: "",
|
||||||
|
HDR: []string(nil),
|
||||||
|
Audio: []string(nil),
|
||||||
|
AudioChannels: "",
|
||||||
|
Group: "GROUP",
|
||||||
|
Region: "",
|
||||||
|
Language: []string{},
|
||||||
|
Proper: false,
|
||||||
|
Repack: false,
|
||||||
|
Edition: []string{},
|
||||||
|
Cut: []string{},
|
||||||
|
Website: "",
|
||||||
|
Artists: "",
|
||||||
|
Type: rls.Episode,
|
||||||
|
LogScore: 0,
|
||||||
|
Origin: "",
|
||||||
|
Tags: []string{},
|
||||||
|
ReleaseTags: "",
|
||||||
|
Freeleech: false,
|
||||||
|
FreeleechPercent: 0,
|
||||||
|
Bonus: []string(nil),
|
||||||
|
Uploader: "",
|
||||||
|
PreTime: "",
|
||||||
|
Other: []string{},
|
||||||
|
RawCookie: "",
|
||||||
|
AdditionalSizeCheckRequired: false,
|
||||||
|
AdditionalUploaderCheckRequired: false,
|
||||||
|
FilterID: 0,
|
||||||
|
Filter: (*domain.Filter)(nil),
|
||||||
|
ActionStatus: []domain.ReleaseActionStatus(nil),
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "time_parse",
|
name: "time_parse",
|
||||||
|
@ -142,7 +263,67 @@ func TestRSSJob_processItem(t *testing.T) {
|
||||||
GUID: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
|
GUID: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
|
||||||
//PublishedParsed: &nowMinusTime,
|
//PublishedParsed: &nowMinusTime,
|
||||||
}},
|
}},
|
||||||
want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: domain.IndexerMinimal{0, "Mock Feed", "mock-feed", "Mock Indexer"}, FilterName: "", Protocol: "torrent", Implementation: "RSS", AnnounceType: domain.AnnounceTypeNew, Timestamp: now, GroupID: "", TorrentID: "", DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 1490000000, Title: "Some Release Title", Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n", Category: "", Season: 0, Episode: 0, Year: 2022, Month: 9, Day: 22, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: nil, Proper: false, Repack: false, Website: "", Artists: "", Type: "episode", LogScore: 0, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, AdditionalUploaderCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)},
|
want: &domain.Release{
|
||||||
|
ID: 0,
|
||||||
|
FilterStatus: "PENDING",
|
||||||
|
Rejections: []string{},
|
||||||
|
Indexer: domain.IndexerMinimal{0, "Mock Feed", "mock-feed", "Mock Indexer"},
|
||||||
|
FilterName: "",
|
||||||
|
Protocol: "torrent",
|
||||||
|
Implementation: "RSS",
|
||||||
|
AnnounceType: domain.AnnounceTypeNew,
|
||||||
|
Timestamp: now,
|
||||||
|
GroupID: "",
|
||||||
|
TorrentID: "",
|
||||||
|
DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1",
|
||||||
|
TorrentTmpFile: "",
|
||||||
|
TorrentDataRawBytes: []uint8(nil),
|
||||||
|
TorrentHash: "",
|
||||||
|
TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
|
||||||
|
NormalizedHash: "edfbe552ccde335f34b801e15930bc35",
|
||||||
|
Size: 1490000000,
|
||||||
|
Title: "Some Release Title",
|
||||||
|
Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n",
|
||||||
|
Category: "",
|
||||||
|
Season: 0,
|
||||||
|
Episode: 0,
|
||||||
|
Year: 2022,
|
||||||
|
Month: 9,
|
||||||
|
Day: 22,
|
||||||
|
Resolution: "720p",
|
||||||
|
Source: "WEB",
|
||||||
|
Codec: []string{"H.264"},
|
||||||
|
Container: "",
|
||||||
|
HDR: []string(nil),
|
||||||
|
Audio: []string(nil),
|
||||||
|
AudioChannels: "",
|
||||||
|
Group: "GROUP",
|
||||||
|
Region: "",
|
||||||
|
Language: []string{},
|
||||||
|
Proper: false,
|
||||||
|
Repack: false,
|
||||||
|
Edition: []string{},
|
||||||
|
Cut: []string{},
|
||||||
|
Website: "",
|
||||||
|
Artists: "",
|
||||||
|
Type: rls.Episode,
|
||||||
|
LogScore: 0,
|
||||||
|
Origin: "",
|
||||||
|
Tags: []string{},
|
||||||
|
ReleaseTags: "",
|
||||||
|
Freeleech: false,
|
||||||
|
FreeleechPercent: 0,
|
||||||
|
Bonus: []string(nil),
|
||||||
|
Uploader: "",
|
||||||
|
PreTime: "",
|
||||||
|
Other: []string{},
|
||||||
|
RawCookie: "",
|
||||||
|
AdditionalSizeCheckRequired: false,
|
||||||
|
AdditionalUploaderCheckRequired: false,
|
||||||
|
FilterID: 0,
|
||||||
|
Filter: (*domain.Filter)(nil),
|
||||||
|
ActionStatus: []domain.ReleaseActionStatus(nil),
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "time_parse",
|
name: "time_parse",
|
||||||
|
@ -208,7 +389,68 @@ func TestRSSJob_processItem(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}},
|
}},
|
||||||
want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: domain.IndexerMinimal{0, "Mock Feed", "mock-feed", "Mock Indexer"}, FilterName: "", Protocol: "torrent", Implementation: "RSS", AnnounceType: domain.AnnounceTypeNew, Timestamp: now, MagnetURI: "magnet:?xt=this-not-a-valid-magnet", GroupID: "", TorrentID: "", DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 0, Title: "Some Release Title", Description: "Category: Example", Category: "", Season: 0, Episode: 0, Year: 2022, Month: 9, Day: 22, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: nil, Proper: false, Repack: false, Website: "", Artists: "", Type: "episode", LogScore: 0, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)},
|
want: &domain.Release{
|
||||||
|
ID: 0,
|
||||||
|
FilterStatus: "PENDING",
|
||||||
|
Rejections: []string{},
|
||||||
|
Indexer: domain.IndexerMinimal{0, "Mock Feed", "mock-feed", "Mock Indexer"},
|
||||||
|
FilterName: "",
|
||||||
|
Protocol: "torrent",
|
||||||
|
Implementation: "RSS",
|
||||||
|
AnnounceType: domain.AnnounceTypeNew,
|
||||||
|
Timestamp: now,
|
||||||
|
GroupID: "",
|
||||||
|
TorrentID: "",
|
||||||
|
DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1",
|
||||||
|
MagnetURI: "magnet:?xt=this-not-a-valid-magnet",
|
||||||
|
TorrentTmpFile: "",
|
||||||
|
TorrentDataRawBytes: []uint8(nil),
|
||||||
|
TorrentHash: "",
|
||||||
|
TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
|
||||||
|
NormalizedHash: "edfbe552ccde335f34b801e15930bc35",
|
||||||
|
Size: 0,
|
||||||
|
Title: "Some Release Title",
|
||||||
|
Description: "Category: Example",
|
||||||
|
Category: "",
|
||||||
|
Season: 0,
|
||||||
|
Episode: 0,
|
||||||
|
Year: 2022,
|
||||||
|
Month: 9,
|
||||||
|
Day: 22,
|
||||||
|
Resolution: "720p",
|
||||||
|
Source: "WEB",
|
||||||
|
Codec: []string{"H.264"},
|
||||||
|
Container: "",
|
||||||
|
HDR: []string(nil),
|
||||||
|
Audio: []string(nil),
|
||||||
|
AudioChannels: "",
|
||||||
|
Group: "GROUP",
|
||||||
|
Region: "",
|
||||||
|
Language: []string{},
|
||||||
|
Proper: false,
|
||||||
|
Repack: false,
|
||||||
|
Edition: []string{},
|
||||||
|
Cut: []string{},
|
||||||
|
Website: "",
|
||||||
|
Artists: "",
|
||||||
|
Type: rls.Episode,
|
||||||
|
LogScore: 0,
|
||||||
|
Origin: "",
|
||||||
|
Tags: []string{},
|
||||||
|
ReleaseTags: "",
|
||||||
|
Freeleech: false,
|
||||||
|
FreeleechPercent: 0,
|
||||||
|
Bonus: []string(nil),
|
||||||
|
Uploader: "",
|
||||||
|
PreTime: "",
|
||||||
|
Other: []string{},
|
||||||
|
RawCookie: "",
|
||||||
|
AdditionalSizeCheckRequired: false,
|
||||||
|
AdditionalUploaderCheckRequired: false,
|
||||||
|
FilterID: 0,
|
||||||
|
Filter: (*domain.Filter)(nil),
|
||||||
|
ActionStatus: []domain.ReleaseActionStatus(nil),
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
|
|
|
@ -46,6 +46,7 @@ type Service interface {
|
||||||
AdditionalRecordLabelCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error)
|
AdditionalRecordLabelCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error)
|
||||||
CheckSmartEpisodeCanDownload(ctx context.Context, params *domain.SmartEpisodeParams) (bool, error)
|
CheckSmartEpisodeCanDownload(ctx context.Context, params *domain.SmartEpisodeParams) (bool, error)
|
||||||
GetDownloadsByFilterId(ctx context.Context, filterID int) (*domain.FilterDownloads, error)
|
GetDownloadsByFilterId(ctx context.Context, filterID int) (*domain.FilterDownloads, error)
|
||||||
|
CheckIsDuplicateRelease(ctx context.Context, profile *domain.DuplicateReleaseProfile, release *domain.Release) (bool, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type service struct {
|
type service struct {
|
||||||
|
@ -374,6 +375,8 @@ func (s *service) Delete(ctx context.Context, filterID int) error {
|
||||||
func (s *service) CheckFilter(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error) {
|
func (s *service) CheckFilter(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error) {
|
||||||
l := s.log.With().Str("method", "CheckFilter").Logger()
|
l := s.log.With().Str("method", "CheckFilter").Logger()
|
||||||
|
|
||||||
|
l.Debug().Msgf("checking filter: %s with release %s", f.Name, release.TorrentName)
|
||||||
|
|
||||||
l.Trace().Msgf("checking filter: %s %+v", f.Name, f)
|
l.Trace().Msgf("checking filter: %s %+v", f.Name, f)
|
||||||
l.Trace().Msgf("checking filter: %s for release: %+v", f.Name, release)
|
l.Trace().Msgf("checking filter: %s for release: %+v", f.Name, release)
|
||||||
|
|
||||||
|
@ -393,7 +396,11 @@ func (s *service) CheckFilter(ctx context.Context, f *domain.Filter, release *do
|
||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if matchedFilter {
|
if !matchedFilter {
|
||||||
|
// if no match, return nil
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
// smartEpisode check
|
// smartEpisode check
|
||||||
if f.SmartEpisode {
|
if f.SmartEpisode {
|
||||||
params := &domain.SmartEpisodeParams{
|
params := &domain.SmartEpisodeParams{
|
||||||
|
@ -415,17 +422,37 @@ func (s *service) CheckFilter(ctx context.Context, f *domain.Filter, release *do
|
||||||
|
|
||||||
if !canDownloadShow {
|
if !canDownloadShow {
|
||||||
l.Trace().Msgf("failed smart episode check: %s", f.Name)
|
l.Trace().Msgf("failed smart episode check: %s", f.Name)
|
||||||
|
|
||||||
if params.IsDailyEpisode() {
|
if params.IsDailyEpisode() {
|
||||||
f.RejectReasons.Add("smart episode", fmt.Sprintf("not new (%s) daily: %d-%d-%d", release.Title, release.Year, release.Month, release.Day), fmt.Sprintf("expected newer than (%s) daily: %d-%d-%d", release.Title, release.Year, release.Month, release.Day))
|
f.RejectReasons.Add("smart episode", fmt.Sprintf("not new (%s) daily: %d-%d-%d", release.Title, release.Year, release.Month, release.Day), fmt.Sprintf("expected newer than (%s) daily: %d-%d-%d", release.Title, release.Year, release.Month, release.Day))
|
||||||
} else {
|
} else {
|
||||||
f.RejectReasons.Add("smart episode", fmt.Sprintf("not new (%s) season: %d ep: %d", release.Title, release.Season, release.Episode), fmt.Sprintf("expected newer than (%s) season: %d ep: %d", release.Title, release.Season, release.Episode))
|
f.RejectReasons.Add("smart episode", fmt.Sprintf("not new (%s) season: %d ep: %d", release.Title, release.Season, release.Episode), fmt.Sprintf("expected newer than (%s) season: %d ep: %d", release.Title, release.Season, release.Episode))
|
||||||
}
|
}
|
||||||
|
|
||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// check duplicates
|
||||||
|
if f.DuplicateHandling != nil {
|
||||||
|
l.Debug().Msgf("(%s) check is duplicate with profile %s", f.Name, f.DuplicateHandling.Name)
|
||||||
|
|
||||||
|
release.SkipDuplicateProfileID = f.DuplicateHandling.ID
|
||||||
|
release.SkipDuplicateProfileName = f.DuplicateHandling.Name
|
||||||
|
|
||||||
|
isDuplicate, err := s.CheckIsDuplicateRelease(ctx, f.DuplicateHandling, release)
|
||||||
|
if err != nil {
|
||||||
|
return false, errors.Wrap(err, "error finding duplicate handle")
|
||||||
|
}
|
||||||
|
|
||||||
|
if isDuplicate {
|
||||||
|
l.Debug().Msgf("filter %s rejected release %q as duplicate with profile %q", f.Name, release.TorrentName, f.DuplicateHandling.Name)
|
||||||
|
f.RejectReasons.Add("duplicate", "duplicate", "not duplicate")
|
||||||
|
|
||||||
|
// let it continue so external filters can trigger checks
|
||||||
|
//return false, nil
|
||||||
|
release.IsDuplicate = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// if matched, do additional size check if needed, attach actions and return the filter
|
// if matched, do additional size check if needed, attach actions and return the filter
|
||||||
|
|
||||||
l.Debug().Msgf("found and matched filter: %s", f.Name)
|
l.Debug().Msgf("found and matched filter: %s", f.Name)
|
||||||
|
@ -493,10 +520,6 @@ func (s *service) CheckFilter(ctx context.Context, f *domain.Filter, release *do
|
||||||
}
|
}
|
||||||
|
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
|
||||||
|
|
||||||
// if no match, return nil
|
|
||||||
return false, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// AdditionalSizeCheck performs additional out-of-band checks to determine the
|
// AdditionalSizeCheck performs additional out-of-band checks to determine the
|
||||||
|
@ -732,10 +755,18 @@ func (s *service) CheckSmartEpisodeCanDownload(ctx context.Context, params *doma
|
||||||
return s.releaseRepo.CheckSmartEpisodeCanDownload(ctx, params)
|
return s.releaseRepo.CheckSmartEpisodeCanDownload(ctx, params)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *service) CheckIsDuplicateRelease(ctx context.Context, profile *domain.DuplicateReleaseProfile, release *domain.Release) (bool, error) {
|
||||||
|
return s.releaseRepo.CheckIsDuplicateRelease(ctx, profile, release)
|
||||||
|
}
|
||||||
|
|
||||||
func (s *service) RunExternalFilters(ctx context.Context, f *domain.Filter, externalFilters []domain.FilterExternal, release *domain.Release) (ok bool, err error) {
|
func (s *service) RunExternalFilters(ctx context.Context, f *domain.Filter, externalFilters []domain.FilterExternal, release *domain.Release) (ok bool, err error) {
|
||||||
defer func() {
|
defer func() {
|
||||||
// try recover panic if anything went wrong with the external filter checks
|
// try recover panic if anything went wrong with the external filter checks
|
||||||
errors.RecoverPanic(recover(), &err)
|
errors.RecoverPanic(recover(), &err)
|
||||||
|
if err != nil {
|
||||||
|
s.log.Error().Err(err).Msgf("filter %s external filter check panic", f.Name)
|
||||||
|
ok = false
|
||||||
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
// sort filters by index
|
// sort filters by index
|
||||||
|
|
|
@ -17,7 +17,7 @@ import (
|
||||||
|
|
||||||
type actionService interface {
|
type actionService interface {
|
||||||
List(ctx context.Context) ([]domain.Action, error)
|
List(ctx context.Context) ([]domain.Action, error)
|
||||||
Store(ctx context.Context, action domain.Action) (*domain.Action, error)
|
Store(ctx context.Context, action *domain.Action) error
|
||||||
Delete(ctx context.Context, req *domain.DeleteActionRequest) error
|
Delete(ctx context.Context, req *domain.DeleteActionRequest) error
|
||||||
ToggleEnabled(actionID int) error
|
ToggleEnabled(actionID int) error
|
||||||
}
|
}
|
||||||
|
@ -56,35 +56,35 @@ func (h actionHandler) getActions(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h actionHandler) storeAction(w http.ResponseWriter, r *http.Request) {
|
func (h actionHandler) storeAction(w http.ResponseWriter, r *http.Request) {
|
||||||
var data domain.Action
|
var data *domain.Action
|
||||||
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
|
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
|
||||||
h.encoder.Error(w, err)
|
h.encoder.Error(w, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
action, err := h.service.Store(r.Context(), data)
|
err := h.service.Store(r.Context(), data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
h.encoder.Error(w, err)
|
h.encoder.Error(w, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
h.encoder.StatusResponse(w, http.StatusCreated, action)
|
h.encoder.StatusResponse(w, http.StatusCreated, data)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h actionHandler) updateAction(w http.ResponseWriter, r *http.Request) {
|
func (h actionHandler) updateAction(w http.ResponseWriter, r *http.Request) {
|
||||||
var data domain.Action
|
var data *domain.Action
|
||||||
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
|
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
|
||||||
h.encoder.Error(w, err)
|
h.encoder.Error(w, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
action, err := h.service.Store(r.Context(), data)
|
err := h.service.Store(r.Context(), data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
h.encoder.Error(w, err)
|
h.encoder.Error(w, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
h.encoder.StatusResponse(w, http.StatusCreated, action)
|
h.encoder.StatusResponse(w, http.StatusCreated, data)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h actionHandler) deleteAction(w http.ResponseWriter, r *http.Request) {
|
func (h actionHandler) deleteAction(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
|
@ -25,6 +25,10 @@ type releaseService interface {
|
||||||
Delete(ctx context.Context, req *domain.DeleteReleaseRequest) error
|
Delete(ctx context.Context, req *domain.DeleteReleaseRequest) error
|
||||||
Retry(ctx context.Context, req *domain.ReleaseActionRetryReq) error
|
Retry(ctx context.Context, req *domain.ReleaseActionRetryReq) error
|
||||||
ProcessManual(ctx context.Context, req *domain.ReleaseProcessReq) error
|
ProcessManual(ctx context.Context, req *domain.ReleaseProcessReq) error
|
||||||
|
|
||||||
|
StoreReleaseProfileDuplicate(ctx context.Context, profile *domain.DuplicateReleaseProfile) error
|
||||||
|
FindDuplicateReleaseProfiles(ctx context.Context) ([]*domain.DuplicateReleaseProfile, error)
|
||||||
|
DeleteReleaseProfileDuplicate(ctx context.Context, id int64) error
|
||||||
}
|
}
|
||||||
|
|
||||||
type releaseHandler struct {
|
type releaseHandler struct {
|
||||||
|
@ -52,6 +56,13 @@ func (h releaseHandler) Routes(r chi.Router) {
|
||||||
r.Get("/", h.getReleaseByID)
|
r.Get("/", h.getReleaseByID)
|
||||||
r.Post("/actions/{actionStatusID}/retry", h.retryAction)
|
r.Post("/actions/{actionStatusID}/retry", h.retryAction)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
r.Route("/profiles/duplicate", func(r chi.Router) {
|
||||||
|
r.Get("/", h.findReleaseProfileDuplicate)
|
||||||
|
r.Post("/", h.storeReleaseProfileDuplicate)
|
||||||
|
|
||||||
|
r.Delete("/{profileId}", h.deleteReleaseProfileDuplicate)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h releaseHandler) findReleases(w http.ResponseWriter, r *http.Request) {
|
func (h releaseHandler) findReleases(w http.ResponseWriter, r *http.Request) {
|
||||||
|
@ -309,3 +320,55 @@ func (h releaseHandler) retryAction(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
h.encoder.NoContent(w)
|
h.encoder.NoContent(w)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (h releaseHandler) storeReleaseProfileDuplicate(w http.ResponseWriter, r *http.Request) {
|
||||||
|
var data *domain.DuplicateReleaseProfile
|
||||||
|
|
||||||
|
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
|
||||||
|
h.encoder.Error(w, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.service.StoreReleaseProfileDuplicate(r.Context(), data); err != nil {
|
||||||
|
h.encoder.Error(w, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
h.encoder.StatusCreatedData(w, data)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h releaseHandler) findReleaseProfileDuplicate(w http.ResponseWriter, r *http.Request) {
|
||||||
|
profiles, err := h.service.FindDuplicateReleaseProfiles(r.Context())
|
||||||
|
if err != nil {
|
||||||
|
h.encoder.StatusResponse(w, http.StatusInternalServerError, map[string]interface{}{
|
||||||
|
"code": "INTERNAL_SERVER_ERROR",
|
||||||
|
"message": err.Error(),
|
||||||
|
})
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
//ret := struct {
|
||||||
|
// Data []*domain.DuplicateReleaseProfile `json:"data"`
|
||||||
|
//}{
|
||||||
|
// Data: profiles,
|
||||||
|
//}
|
||||||
|
|
||||||
|
h.encoder.StatusResponse(w, http.StatusOK, profiles)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h releaseHandler) deleteReleaseProfileDuplicate(w http.ResponseWriter, r *http.Request) {
|
||||||
|
//profileIdParam := chi.URLParam(r, "releaseId")
|
||||||
|
|
||||||
|
profileId, err := strconv.Atoi(chi.URLParam(r, "profileId"))
|
||||||
|
if err != nil {
|
||||||
|
h.encoder.StatusError(w, http.StatusBadRequest, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := h.service.DeleteReleaseProfileDuplicate(r.Context(), int64(profileId)); err != nil {
|
||||||
|
h.encoder.Error(w, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
h.encoder.NoContent(w)
|
||||||
|
}
|
||||||
|
|
|
@ -32,6 +32,10 @@ type Service interface {
|
||||||
ProcessMultiple(releases []*domain.Release)
|
ProcessMultiple(releases []*domain.Release)
|
||||||
ProcessManual(ctx context.Context, req *domain.ReleaseProcessReq) error
|
ProcessManual(ctx context.Context, req *domain.ReleaseProcessReq) error
|
||||||
Retry(ctx context.Context, req *domain.ReleaseActionRetryReq) error
|
Retry(ctx context.Context, req *domain.ReleaseActionRetryReq) error
|
||||||
|
|
||||||
|
StoreReleaseProfileDuplicate(ctx context.Context, profile *domain.DuplicateReleaseProfile) error
|
||||||
|
FindDuplicateReleaseProfiles(ctx context.Context) ([]*domain.DuplicateReleaseProfile, error)
|
||||||
|
DeleteReleaseProfileDuplicate(ctx context.Context, id int64) error
|
||||||
}
|
}
|
||||||
|
|
||||||
type actionClientTypeKey struct {
|
type actionClientTypeKey struct {
|
||||||
|
@ -94,6 +98,18 @@ func (s *service) Delete(ctx context.Context, req *domain.DeleteReleaseRequest)
|
||||||
return s.repo.Delete(ctx, req)
|
return s.repo.Delete(ctx, req)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *service) FindDuplicateReleaseProfiles(ctx context.Context) ([]*domain.DuplicateReleaseProfile, error) {
|
||||||
|
return s.repo.FindDuplicateReleaseProfiles(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *service) StoreReleaseProfileDuplicate(ctx context.Context, profile *domain.DuplicateReleaseProfile) error {
|
||||||
|
return s.repo.StoreDuplicateProfile(ctx, profile)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *service) DeleteReleaseProfileDuplicate(ctx context.Context, id int64) error {
|
||||||
|
return s.repo.DeleteReleaseProfileDuplicate(ctx, id)
|
||||||
|
}
|
||||||
|
|
||||||
func (s *service) ProcessManual(ctx context.Context, req *domain.ReleaseProcessReq) error {
|
func (s *service) ProcessManual(ctx context.Context, req *domain.ReleaseProcessReq) error {
|
||||||
// get indexer definition with data
|
// get indexer definition with data
|
||||||
def, err := s.indexerSvc.GetMappedDefinitionByName(req.IndexerIdentifier)
|
def, err := s.indexerSvc.GetMappedDefinitionByName(req.IndexerIdentifier)
|
||||||
|
@ -183,8 +199,6 @@ func (s *service) Process(release *domain.Release) {
|
||||||
s.log.Error().Err(err).Msgf("release.Process: error processing filters for indexer: %s", release.Indexer.Name)
|
s.log.Error().Err(err).Msgf("release.Process: error processing filters for indexer: %s", release.Indexer.Name)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *service) processFilters(ctx context.Context, filters []*domain.Filter, release *domain.Release) error {
|
func (s *service) processFilters(ctx context.Context, filters []*domain.Filter, release *domain.Release) error {
|
||||||
|
@ -201,6 +215,11 @@ func (s *service) processFilters(ctx context.Context, filters []*domain.Filter,
|
||||||
release.FilterName = f.Name
|
release.FilterName = f.Name
|
||||||
release.FilterID = f.ID
|
release.FilterID = f.ID
|
||||||
|
|
||||||
|
// reset IsDuplicate
|
||||||
|
release.IsDuplicate = false
|
||||||
|
release.SkipDuplicateProfileID = 0
|
||||||
|
release.SkipDuplicateProfileName = ""
|
||||||
|
|
||||||
// test filter
|
// test filter
|
||||||
match, err := s.filterSvc.CheckFilter(ctx, f, release)
|
match, err := s.filterSvc.CheckFilter(ctx, f, release)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -208,10 +227,10 @@ func (s *service) processFilters(ctx context.Context, filters []*domain.Filter,
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if !match {
|
if !match || f.RejectReasons.Len() > 0 {
|
||||||
l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s, no match. rejections: %s", release.Indexer.Name, release.FilterName, release.TorrentName, f.RejectReasons.String())
|
l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s, no match. rejections: %s", release.Indexer.Name, release.FilterName, release.TorrentName, f.RejectReasons.String())
|
||||||
|
|
||||||
l.Debug().Msgf("filter %s rejected release: %s", f.Name, release.TorrentName)
|
l.Debug().Msgf("filter %s rejected release: %s with reasons: %s", f.Name, release.TorrentName, f.RejectReasons.String())
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -231,13 +250,6 @@ func (s *service) processFilters(ctx context.Context, filters []*domain.Filter,
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// sleep for the delay period specified in the filter before running actions
|
|
||||||
delay := release.Filter.Delay
|
|
||||||
if delay > 0 {
|
|
||||||
l.Debug().Msgf("release.Process: delaying processing of '%s' (%s) for %s by %d seconds as specified in the filter", release.TorrentName, release.FilterName, release.Indexer.Name, delay)
|
|
||||||
time.Sleep(time.Duration(delay) * time.Second)
|
|
||||||
}
|
|
||||||
|
|
||||||
// save release here to only save those with rejections from actions instead of all releases
|
// save release here to only save those with rejections from actions instead of all releases
|
||||||
if release.ID == 0 {
|
if release.ID == 0 {
|
||||||
release.FilterStatus = domain.ReleaseStatusFilterApproved
|
release.FilterStatus = domain.ReleaseStatusFilterApproved
|
||||||
|
@ -251,24 +263,40 @@ func (s *service) processFilters(ctx context.Context, filters []*domain.Filter,
|
||||||
var rejections []string
|
var rejections []string
|
||||||
|
|
||||||
// run actions (watchFolder, test, exec, qBittorrent, Deluge, arr etc.)
|
// run actions (watchFolder, test, exec, qBittorrent, Deluge, arr etc.)
|
||||||
for _, act := range actions {
|
for idx, act := range actions {
|
||||||
// only run enabled actions
|
// only run enabled actions
|
||||||
if !act.Enabled {
|
if !act.Enabled {
|
||||||
l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s action '%s' not enabled, skip", release.Indexer.Name, release.FilterName, release.TorrentName, act.Name)
|
l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s action '%s' not enabled, skip", release.Indexer.Name, release.FilterName, release.TorrentName, act.Name)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// add action status as pending
|
||||||
|
actionStatus := domain.NewReleaseActionStatus(act, release)
|
||||||
|
|
||||||
|
if err := s.StoreReleaseActionStatus(ctx, actionStatus); err != nil {
|
||||||
|
s.log.Error().Err(err).Msgf("release.runAction: error storing action for filter: %s", release.FilterName)
|
||||||
|
}
|
||||||
|
|
||||||
|
if idx == 0 {
|
||||||
|
// sleep for the delay period specified in the filter before running actions
|
||||||
|
delay := release.Filter.Delay
|
||||||
|
if delay > 0 {
|
||||||
|
l.Debug().Msgf("release.Process: delaying processing of '%s' (%s) for %s by %d seconds as specified in the filter", release.TorrentName, release.FilterName, release.Indexer.Name, delay)
|
||||||
|
time.Sleep(time.Duration(delay) * time.Second)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s , run action: %s", release.Indexer.Name, release.FilterName, release.TorrentName, act.Name)
|
l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s , run action: %s", release.Indexer.Name, release.FilterName, release.TorrentName, act.Name)
|
||||||
|
|
||||||
// keep track of action clients to avoid sending the same thing all over again
|
// keep track of action clients to avoid sending the same thing all over again
|
||||||
_, tried := triedActionClients[actionClientTypeKey{Type: act.Type, ClientID: act.ClientID}]
|
_, tried := triedActionClients[actionClientTypeKey{Type: act.Type, ClientID: act.ClientID}]
|
||||||
if tried {
|
if tried {
|
||||||
l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s action client already tried, skip", release.Indexer.Name, release.FilterName, release.TorrentName)
|
l.Debug().Msgf("release.Process: indexer: %s, filter: %s release: %s action client already tried, skip", release.Indexer.Name, release.FilterName, release.TorrentName)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// run action
|
// run action
|
||||||
status, err := s.runAction(ctx, act, release)
|
status, err := s.runAction(ctx, act, release, actionStatus)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
l.Error().Err(err).Msgf("release.Process: error running actions for filter: %s", release.FilterName)
|
l.Error().Err(err).Msgf("release.Process: error running actions for filter: %s", release.FilterName)
|
||||||
//continue
|
//continue
|
||||||
|
@ -320,13 +348,13 @@ func (s *service) ProcessMultiple(releases []*domain.Release) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *service) runAction(ctx context.Context, action *domain.Action, release *domain.Release) (*domain.ReleaseActionStatus, error) {
|
func (s *service) runAction(ctx context.Context, action *domain.Action, release *domain.Release, status *domain.ReleaseActionStatus) (*domain.ReleaseActionStatus, error) {
|
||||||
// add action status as pending
|
// add action status as pending
|
||||||
status := domain.NewReleaseActionStatus(action, release)
|
//status := domain.NewReleaseActionStatus(action, release)
|
||||||
|
//
|
||||||
if err := s.StoreReleaseActionStatus(ctx, status); err != nil {
|
//if err := s.StoreReleaseActionStatus(ctx, status); err != nil {
|
||||||
s.log.Error().Err(err).Msgf("release.runAction: error storing action for filter: %s", release.FilterName)
|
// s.log.Error().Err(err).Msgf("release.runAction: error storing action for filter: %s", release.FilterName)
|
||||||
}
|
//}
|
||||||
|
|
||||||
rejections, err := s.actionSvc.RunAction(ctx, action, release)
|
rejections, err := s.actionSvc.RunAction(ctx, action, release)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -351,7 +379,14 @@ func (s *service) runAction(ctx context.Context, action *domain.Action, release
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *service) retryAction(ctx context.Context, action *domain.Action, release *domain.Release) error {
|
func (s *service) retryAction(ctx context.Context, action *domain.Action, release *domain.Release) error {
|
||||||
actionStatus, err := s.runAction(ctx, action, release)
|
// add action status as pending
|
||||||
|
status := domain.NewReleaseActionStatus(action, release)
|
||||||
|
|
||||||
|
if err := s.StoreReleaseActionStatus(ctx, status); err != nil {
|
||||||
|
s.log.Error().Err(err).Msgf("release.runAction: error storing action for filter: %s", release.FilterName)
|
||||||
|
}
|
||||||
|
|
||||||
|
actionStatus, err := s.runAction(ctx, action, release, status)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
s.log.Error().Err(err).Msgf("release.retryAction: error running actions for filter: %s", release.FilterName)
|
s.log.Error().Err(err).Msgf("release.retryAction: error running actions for filter: %s", release.FilterName)
|
||||||
|
|
||||||
|
|
|
@ -498,7 +498,16 @@ export const APIClient = {
|
||||||
},
|
},
|
||||||
replayAction: (releaseId: number, actionId: number) => appClient.Post(
|
replayAction: (releaseId: number, actionId: number) => appClient.Post(
|
||||||
`api/release/${releaseId}/actions/${actionId}/retry`
|
`api/release/${releaseId}/actions/${actionId}/retry`
|
||||||
)
|
),
|
||||||
|
profiles: {
|
||||||
|
duplicates: {
|
||||||
|
list: () => appClient.Get<ReleaseProfileDuplicate[]>(`api/release/profiles/duplicate`),
|
||||||
|
delete: (id: number) => appClient.Delete(`api/release/profiles/duplicate/${id}`),
|
||||||
|
store: (profile: ReleaseProfileDuplicate) => appClient.Post(`api/release/profiles/duplicate`, {
|
||||||
|
body: profile
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
},
|
},
|
||||||
updates: {
|
updates: {
|
||||||
check: () => appClient.Get("api/updates/check"),
|
check: () => appClient.Get("api/updates/check"),
|
||||||
|
|
|
@ -12,7 +12,7 @@ import {
|
||||||
FilterKeys,
|
FilterKeys,
|
||||||
IndexerKeys,
|
IndexerKeys,
|
||||||
IrcKeys, ListKeys, NotificationKeys, ProxyKeys,
|
IrcKeys, ListKeys, NotificationKeys, ProxyKeys,
|
||||||
ReleaseKeys,
|
ReleaseKeys, ReleaseProfileDuplicateKeys,
|
||||||
SettingsKeys
|
SettingsKeys
|
||||||
} from "@api/query_keys";
|
} from "@api/query_keys";
|
||||||
import { ColumnFilter } from "@tanstack/react-table";
|
import { ColumnFilter } from "@tanstack/react-table";
|
||||||
|
@ -165,6 +165,14 @@ export const ReleasesIndexersQueryOptions = () =>
|
||||||
staleTime: Infinity
|
staleTime: Infinity
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export const ReleaseProfileDuplicateList = () =>
|
||||||
|
queryOptions({
|
||||||
|
queryKey: ReleaseProfileDuplicateKeys.lists(),
|
||||||
|
queryFn: () => APIClient.release.profiles.duplicates.list(),
|
||||||
|
staleTime: 5000,
|
||||||
|
refetchOnWindowFocus: true,
|
||||||
|
});
|
||||||
|
|
||||||
export const ProxiesQueryOptions = () =>
|
export const ProxiesQueryOptions = () =>
|
||||||
queryOptions({
|
queryOptions({
|
||||||
queryKey: ProxyKeys.lists(),
|
queryKey: ProxyKeys.lists(),
|
||||||
|
|
|
@ -35,6 +35,13 @@ export const ReleaseKeys = {
|
||||||
latestActivity: () => [...ReleaseKeys.all, "latest-activity"] as const,
|
latestActivity: () => [...ReleaseKeys.all, "latest-activity"] as const,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const ReleaseProfileDuplicateKeys = {
|
||||||
|
all: ["releaseProfileDuplicate"] as const,
|
||||||
|
lists: () => [...ReleaseProfileDuplicateKeys.all, "list"] as const,
|
||||||
|
details: () => [...ReleaseProfileDuplicateKeys.all, "detail"] as const,
|
||||||
|
detail: (id: number) => [...ReleaseProfileDuplicateKeys.details(), id] as const,
|
||||||
|
};
|
||||||
|
|
||||||
export const ApiKeys = {
|
export const ApiKeys = {
|
||||||
all: ["api_keys"] as const,
|
all: ["api_keys"] as const,
|
||||||
lists: () => [...ApiKeys.all, "list"] as const,
|
lists: () => [...ApiKeys.all, "list"] as const,
|
||||||
|
|
|
@ -251,7 +251,7 @@ export function DownloadClientSelect({
|
||||||
|
|
||||||
export interface SelectFieldOption {
|
export interface SelectFieldOption {
|
||||||
label: string;
|
label: string;
|
||||||
value: string;
|
value: string | number | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SelectFieldProps {
|
export interface SelectFieldProps {
|
||||||
|
@ -293,7 +293,7 @@ export const Select = ({
|
||||||
onChange={(value) => setFieldValue(field.name, value)}
|
onChange={(value) => setFieldValue(field.name, value)}
|
||||||
>
|
>
|
||||||
{({ open }) => (
|
{({ open }) => (
|
||||||
<>
|
<div>
|
||||||
<Label className="flex text-xs font-bold text-gray-800 dark:text-gray-100 uppercase tracking-wide">
|
<Label className="flex text-xs font-bold text-gray-800 dark:text-gray-100 uppercase tracking-wide">
|
||||||
{tooltip ? (
|
{tooltip ? (
|
||||||
<DocsTooltip label={label}>{tooltip}</DocsTooltip>
|
<DocsTooltip label={label}>{tooltip}</DocsTooltip>
|
||||||
|
@ -364,7 +364,7 @@ export const Select = ({
|
||||||
</ListboxOptions>
|
</ListboxOptions>
|
||||||
</Transition>
|
</Transition>
|
||||||
</div>
|
</div>
|
||||||
</>
|
</div>
|
||||||
)}
|
)}
|
||||||
</Listbox>
|
</Listbox>
|
||||||
)}
|
)}
|
||||||
|
|
15
web/src/forms/_shared.ts
Normal file
15
web/src/forms/_shared.ts
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
/*
|
||||||
|
* Copyright (c) 2021 - 2024, Ludvig Lundgren and the autobrr contributors.
|
||||||
|
* SPDX-License-Identifier: GPL-2.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface AddFormProps {
|
||||||
|
isOpen: boolean;
|
||||||
|
toggle: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface UpdateFormProps<T> {
|
||||||
|
isOpen: boolean;
|
||||||
|
toggle: () => void;
|
||||||
|
data: T;
|
||||||
|
}
|
|
@ -16,14 +16,9 @@ import { FilterKeys } from "@api/query_keys";
|
||||||
import { DEBUG } from "@components/debug";
|
import { DEBUG } from "@components/debug";
|
||||||
import { toast } from "@components/hot-toast";
|
import { toast } from "@components/hot-toast";
|
||||||
import Toast from "@components/notifications/Toast";
|
import Toast from "@components/notifications/Toast";
|
||||||
|
import { AddFormProps } from "@forms/_shared";
|
||||||
|
|
||||||
|
export function FilterAddForm({ isOpen, toggle }: AddFormProps) {
|
||||||
interface filterAddFormProps {
|
|
||||||
isOpen: boolean;
|
|
||||||
toggle: () => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function FilterAddForm({ isOpen, toggle }: filterAddFormProps) {
|
|
||||||
const inputRef = useRef(null)
|
const inputRef = useRef(null)
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
const navigate = useNavigate();
|
const navigate = useNavigate();
|
||||||
|
|
|
@ -15,13 +15,9 @@ import { ApiKeys } from "@api/query_keys";
|
||||||
import { DEBUG } from "@components/debug";
|
import { DEBUG } from "@components/debug";
|
||||||
import { toast } from "@components/hot-toast";
|
import { toast } from "@components/hot-toast";
|
||||||
import Toast from "@components/notifications/Toast";
|
import Toast from "@components/notifications/Toast";
|
||||||
|
import { AddFormProps } from "@forms/_shared";
|
||||||
|
|
||||||
interface apiKeyAddFormProps {
|
export function APIKeyAddForm({ isOpen, toggle }: AddFormProps) {
|
||||||
isOpen: boolean;
|
|
||||||
toggle: () => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function APIKeyAddForm({ isOpen, toggle }: apiKeyAddFormProps) {
|
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
|
|
||||||
const mutation = useMutation({
|
const mutation = useMutation({
|
||||||
|
|
|
@ -27,6 +27,7 @@ import {
|
||||||
} from "@components/inputs";
|
} from "@components/inputs";
|
||||||
import { DocsLink, ExternalLink } from "@components/ExternalLink";
|
import { DocsLink, ExternalLink } from "@components/ExternalLink";
|
||||||
import { SelectFieldBasic } from "@components/inputs/select_wide";
|
import { SelectFieldBasic } from "@components/inputs/select_wide";
|
||||||
|
import { AddFormProps, UpdateFormProps } from "@forms/_shared";
|
||||||
|
|
||||||
interface InitialValuesSettings {
|
interface InitialValuesSettings {
|
||||||
basic?: {
|
basic?: {
|
||||||
|
@ -691,12 +692,7 @@ function DownloadClientFormButtons({
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
interface formProps {
|
export function DownloadClientAddForm({ isOpen, toggle }: AddFormProps) {
|
||||||
isOpen: boolean;
|
|
||||||
toggle: () => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function DownloadClientAddForm({ isOpen, toggle }: formProps) {
|
|
||||||
const [isTesting, setIsTesting] = useState(false);
|
const [isTesting, setIsTesting] = useState(false);
|
||||||
const [isSuccessfulTest, setIsSuccessfulTest] = useState(false);
|
const [isSuccessfulTest, setIsSuccessfulTest] = useState(false);
|
||||||
const [isErrorTest, setIsErrorTest] = useState(false);
|
const [isErrorTest, setIsErrorTest] = useState(false);
|
||||||
|
@ -856,13 +852,7 @@ export function DownloadClientAddForm({ isOpen, toggle }: formProps) {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
interface updateFormProps {
|
export function DownloadClientUpdateForm({ isOpen, toggle, data: client}: UpdateFormProps<DownloadClient>) {
|
||||||
isOpen: boolean;
|
|
||||||
toggle: () => void;
|
|
||||||
client: DownloadClient;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function DownloadClientUpdateForm({ client, isOpen, toggle }: updateFormProps) {
|
|
||||||
const [isTesting, setIsTesting] = useState(false);
|
const [isTesting, setIsTesting] = useState(false);
|
||||||
const [isSuccessfulTest, setIsSuccessfulTest] = useState(false);
|
const [isSuccessfulTest, setIsSuccessfulTest] = useState(false);
|
||||||
const [isErrorTest, setIsErrorTest] = useState(false);
|
const [isErrorTest, setIsErrorTest] = useState(false);
|
||||||
|
|
|
@ -18,13 +18,7 @@ import { componentMapType } from "./DownloadClientForms";
|
||||||
import { sleep } from "@utils";
|
import { sleep } from "@utils";
|
||||||
import { ImplementationBadges } from "@screens/settings/Indexer";
|
import { ImplementationBadges } from "@screens/settings/Indexer";
|
||||||
import { FeedDownloadTypeOptions } from "@domain/constants";
|
import { FeedDownloadTypeOptions } from "@domain/constants";
|
||||||
|
import { UpdateFormProps } from "@forms/_shared";
|
||||||
|
|
||||||
interface UpdateProps {
|
|
||||||
isOpen: boolean;
|
|
||||||
toggle: () => void;
|
|
||||||
feed: Feed;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface InitialValues {
|
interface InitialValues {
|
||||||
id: number;
|
id: number;
|
||||||
|
@ -41,7 +35,8 @@ interface InitialValues {
|
||||||
settings: FeedSettings;
|
settings: FeedSettings;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function FeedUpdateForm({ isOpen, toggle, feed }: UpdateProps) {
|
export function FeedUpdateForm({ isOpen, toggle, data}: UpdateFormProps<Feed>) {
|
||||||
|
const feed = data;
|
||||||
const [isTesting, setIsTesting] = useState(false);
|
const [isTesting, setIsTesting] = useState(false);
|
||||||
const [isTestSuccessful, setIsSuccessfulTest] = useState(false);
|
const [isTestSuccessful, setIsSuccessfulTest] = useState(false);
|
||||||
const [isTestError, setIsErrorTest] = useState(false);
|
const [isTestError, setIsErrorTest] = useState(false);
|
||||||
|
|
|
@ -25,6 +25,7 @@ import { FeedDownloadTypeOptions } from "@domain/constants";
|
||||||
import { DocsLink } from "@components/ExternalLink";
|
import { DocsLink } from "@components/ExternalLink";
|
||||||
import * as common from "@components/inputs/common";
|
import * as common from "@components/inputs/common";
|
||||||
import { SelectField } from "@forms/settings/IrcForms";
|
import { SelectField } from "@forms/settings/IrcForms";
|
||||||
|
import { AddFormProps, UpdateFormProps } from "@forms/_shared";
|
||||||
|
|
||||||
// const isRequired = (message: string) => (value?: string | undefined) => (!!value ? undefined : message);
|
// const isRequired = (message: string) => (value?: string | undefined) => (!!value ? undefined : message);
|
||||||
|
|
||||||
|
@ -255,12 +256,7 @@ type SelectValue = {
|
||||||
value: string;
|
value: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export interface AddProps {
|
export function IndexerAddForm({ isOpen, toggle }: AddFormProps) {
|
||||||
isOpen: boolean;
|
|
||||||
toggle: () => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function IndexerAddForm({ isOpen, toggle }: AddProps) {
|
|
||||||
const [indexer, setIndexer] = useState<IndexerDefinition>({} as IndexerDefinition);
|
const [indexer, setIndexer] = useState<IndexerDefinition>({} as IndexerDefinition);
|
||||||
|
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
|
@ -729,13 +725,7 @@ interface IndexerUpdateInitialValues {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
interface UpdateProps {
|
export function IndexerUpdateForm({ isOpen, toggle, data: indexer }: UpdateFormProps<IndexerDefinition>) {
|
||||||
isOpen: boolean;
|
|
||||||
toggle: () => void;
|
|
||||||
indexer: IndexerDefinition;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function IndexerUpdateForm({ isOpen, toggle, indexer }: UpdateProps) {
|
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
|
|
||||||
const proxies = useQuery(ProxiesQueryOptions());
|
const proxies = useQuery(ProxiesQueryOptions());
|
||||||
|
|
|
@ -22,6 +22,7 @@ import Toast from "@components/notifications/Toast";
|
||||||
import * as common from "@components/inputs/common";
|
import * as common from "@components/inputs/common";
|
||||||
import { classNames } from "@utils";
|
import { classNames } from "@utils";
|
||||||
import { ProxiesQueryOptions } from "@api/queries";
|
import { ProxiesQueryOptions } from "@api/queries";
|
||||||
|
import { AddFormProps, UpdateFormProps } from "@forms/_shared";
|
||||||
|
|
||||||
interface ChannelsFieldArrayProps {
|
interface ChannelsFieldArrayProps {
|
||||||
channels: IrcChannel[];
|
channels: IrcChannel[];
|
||||||
|
@ -122,11 +123,6 @@ interface IrcNetworkAddFormValues {
|
||||||
channels: IrcChannel[];
|
channels: IrcChannel[];
|
||||||
}
|
}
|
||||||
|
|
||||||
interface AddFormProps {
|
|
||||||
isOpen: boolean;
|
|
||||||
toggle: () => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function IrcNetworkAddForm({ isOpen, toggle }: AddFormProps) {
|
export function IrcNetworkAddForm({ isOpen, toggle }: AddFormProps) {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
|
|
||||||
|
@ -275,17 +271,11 @@ interface IrcNetworkUpdateFormValues {
|
||||||
proxy_id: number;
|
proxy_id: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface IrcNetworkUpdateFormProps {
|
|
||||||
isOpen: boolean;
|
|
||||||
toggle: () => void;
|
|
||||||
network: IrcNetwork;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function IrcNetworkUpdateForm({
|
export function IrcNetworkUpdateForm({
|
||||||
isOpen,
|
isOpen,
|
||||||
toggle,
|
toggle,
|
||||||
network
|
data: network
|
||||||
}: IrcNetworkUpdateFormProps) {
|
}: UpdateFormProps<IrcNetwork>) {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
|
|
||||||
const proxies = useQuery(ProxiesQueryOptions());
|
const proxies = useQuery(ProxiesQueryOptions());
|
||||||
|
|
|
@ -23,6 +23,7 @@ import * as common from "@components/inputs/common";
|
||||||
import { NumberFieldWide, PasswordFieldWide, SwitchGroupWide, TextFieldWide } from "@components/inputs";
|
import { NumberFieldWide, PasswordFieldWide, SwitchGroupWide, TextFieldWide } from "@components/inputs";
|
||||||
|
|
||||||
import { componentMapType } from "./DownloadClientForms";
|
import { componentMapType } from "./DownloadClientForms";
|
||||||
|
import { AddFormProps, UpdateFormProps } from "@forms/_shared";
|
||||||
|
|
||||||
function FormFieldsDiscord() {
|
function FormFieldsDiscord() {
|
||||||
return (
|
return (
|
||||||
|
@ -311,12 +312,7 @@ interface NotificationAddFormValues {
|
||||||
enabled: boolean;
|
enabled: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface AddProps {
|
export function NotificationAddForm({ isOpen, toggle }: AddFormProps) {
|
||||||
isOpen: boolean;
|
|
||||||
toggle: () => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function NotificationAddForm({ isOpen, toggle }: AddProps) {
|
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
|
|
||||||
const createMutation = useMutation({
|
const createMutation = useMutation({
|
||||||
|
@ -565,12 +561,6 @@ const EventCheckBoxes = () => (
|
||||||
</fieldset>
|
</fieldset>
|
||||||
);
|
);
|
||||||
|
|
||||||
interface UpdateProps {
|
|
||||||
isOpen: boolean;
|
|
||||||
toggle: () => void;
|
|
||||||
notification: ServiceNotification;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface InitialValues {
|
interface InitialValues {
|
||||||
id: number;
|
id: number;
|
||||||
enabled: boolean;
|
enabled: boolean;
|
||||||
|
@ -587,7 +577,7 @@ interface InitialValues {
|
||||||
username?: string
|
username?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export function NotificationUpdateForm({ isOpen, toggle, notification }: UpdateProps) {
|
export function NotificationUpdateForm({ isOpen, toggle, data: notification }: UpdateFormProps<ServiceNotification>) {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
|
|
||||||
const mutation = useMutation({
|
const mutation = useMutation({
|
||||||
|
|
|
@ -9,7 +9,7 @@ import { Dialog, DialogPanel, DialogTitle, Transition, TransitionChild } from "@
|
||||||
import { XMarkIcon } from "@heroicons/react/24/solid";
|
import { XMarkIcon } from "@heroicons/react/24/solid";
|
||||||
import { useMutation, useQueryClient } from "@tanstack/react-query";
|
import { useMutation, useQueryClient } from "@tanstack/react-query";
|
||||||
|
|
||||||
import { AddProps } from "@forms/settings/IndexerForms";
|
import { AddFormProps } from "@forms/_shared";
|
||||||
import { DEBUG } from "@components/debug.tsx";
|
import { DEBUG } from "@components/debug.tsx";
|
||||||
import { PasswordFieldWide, SwitchGroupWide, TextFieldWide } from "@components/inputs";
|
import { PasswordFieldWide, SwitchGroupWide, TextFieldWide } from "@components/inputs";
|
||||||
import { SelectFieldBasic } from "@components/inputs/select_wide";
|
import { SelectFieldBasic } from "@components/inputs/select_wide";
|
||||||
|
@ -20,7 +20,7 @@ import { toast } from "@components/hot-toast";
|
||||||
import Toast from "@components/notifications/Toast";
|
import Toast from "@components/notifications/Toast";
|
||||||
import { SlideOver } from "@components/panels";
|
import { SlideOver } from "@components/panels";
|
||||||
|
|
||||||
export function ProxyAddForm({ isOpen, toggle }: AddProps) {
|
export function ProxyAddForm({ isOpen, toggle }: AddFormProps) {
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
|
|
||||||
const createMutation = useMutation({
|
const createMutation = useMutation({
|
||||||
|
|
198
web/src/forms/settings/ReleaseForms.tsx
Normal file
198
web/src/forms/settings/ReleaseForms.tsx
Normal file
|
@ -0,0 +1,198 @@
|
||||||
|
/*
|
||||||
|
* Copyright (c) 2021 - 2024, Ludvig Lundgren and the autobrr contributors.
|
||||||
|
* SPDX-License-Identifier: GPL-2.0-or-later
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { useMutation, useQueryClient } from "@tanstack/react-query";
|
||||||
|
import { APIClient } from "@api/APIClient.ts";
|
||||||
|
import { ReleaseProfileDuplicateKeys } from "@api/query_keys.ts";
|
||||||
|
import { toast } from "@components/hot-toast";
|
||||||
|
import Toast from "@components/notifications/Toast.tsx";
|
||||||
|
import { SwitchGroupWide, TextFieldWide } from "@components/inputs";
|
||||||
|
import { SlideOver } from "@components/panels";
|
||||||
|
import { AddFormProps, UpdateFormProps } from "@forms/_shared";
|
||||||
|
|
||||||
|
export function ReleaseProfileDuplicateAddForm({ isOpen, toggle }: AddFormProps) {
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
|
||||||
|
const addMutation = useMutation({
|
||||||
|
mutationFn: (profile: ReleaseProfileDuplicate) => APIClient.release.profiles.duplicates.store(profile),
|
||||||
|
onSuccess: () => {
|
||||||
|
queryClient.invalidateQueries({ queryKey: ReleaseProfileDuplicateKeys.lists() });
|
||||||
|
toast.custom((t) => <Toast type="success" body="Profile was added" t={t} />);
|
||||||
|
|
||||||
|
toggle();
|
||||||
|
},
|
||||||
|
onError: () => {
|
||||||
|
toast.custom((t) => <Toast type="error" body="Profile could not be added" t={t} />);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const onSubmit = (data: unknown) => addMutation.mutate(data as ReleaseProfileDuplicate);
|
||||||
|
|
||||||
|
const initialValues: ReleaseProfileDuplicate = {
|
||||||
|
id: 0,
|
||||||
|
name: "",
|
||||||
|
protocol: false,
|
||||||
|
release_name: false,
|
||||||
|
hash: false,
|
||||||
|
title: false,
|
||||||
|
sub_title: false,
|
||||||
|
year: false,
|
||||||
|
month: false,
|
||||||
|
day: false,
|
||||||
|
source: false,
|
||||||
|
resolution: false,
|
||||||
|
codec: false,
|
||||||
|
container: false,
|
||||||
|
dynamic_range: false,
|
||||||
|
audio: false,
|
||||||
|
group: false,
|
||||||
|
season: false,
|
||||||
|
episode: false,
|
||||||
|
website: false,
|
||||||
|
proper: false,
|
||||||
|
repack: false,
|
||||||
|
edition: false,
|
||||||
|
language: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<SlideOver
|
||||||
|
type="CREATE"
|
||||||
|
title="Duplicate Profile"
|
||||||
|
isOpen={isOpen}
|
||||||
|
toggle={toggle}
|
||||||
|
onSubmit={onSubmit}
|
||||||
|
initialValues={initialValues}
|
||||||
|
>
|
||||||
|
{() => (
|
||||||
|
<div className="py-2 space-y-6 sm:py-0 sm:space-y-0 divide-y divide-gray-200 dark:divide-gray-700">
|
||||||
|
<TextFieldWide required name="name" label="Name"/>
|
||||||
|
|
||||||
|
<SwitchGroupWide name="release_name" label="Release name" description="Full release name" />
|
||||||
|
<SwitchGroupWide name="hash" label="Hash" description="Normalized hash of the release name. Use with Releae name for exact match" />
|
||||||
|
<SwitchGroupWide name="title" label="Title" description="Parsed title" />
|
||||||
|
<SwitchGroupWide name="sub_title" label="Sub Title" description="Parsed Sub Title like Episode Name" />
|
||||||
|
<SwitchGroupWide name="year" label="Year" />
|
||||||
|
<SwitchGroupWide name="month" label="Month" description="For daily releases" />
|
||||||
|
<SwitchGroupWide name="day" label="Day" description="For daily releases" />
|
||||||
|
<SwitchGroupWide name="source" label="Source" />
|
||||||
|
<SwitchGroupWide name="resolution" label="Resolution" />
|
||||||
|
<SwitchGroupWide name="codec" label="Codec" />
|
||||||
|
<SwitchGroupWide name="container" label="Container" />
|
||||||
|
<SwitchGroupWide name="dynamic_range" label="Dynamic Range" />
|
||||||
|
<SwitchGroupWide name="audio" label="Audio" />
|
||||||
|
<SwitchGroupWide name="group" label="Group" description="Release group" />
|
||||||
|
<SwitchGroupWide name="season" label="Season" />
|
||||||
|
<SwitchGroupWide name="episode" label="Episode" />
|
||||||
|
<SwitchGroupWide name="website" label="Website/Service" description="Services such as AMZN/HULU/NF" />
|
||||||
|
<SwitchGroupWide name="proper" label="Proper" />
|
||||||
|
<SwitchGroupWide name="repack" label="Repack" />
|
||||||
|
<SwitchGroupWide name="edition" label="Edition" />
|
||||||
|
<SwitchGroupWide name="language" label="Language" />
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</SlideOver>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function ReleaseProfileDuplicateUpdateForm({ isOpen, toggle, data: profile }: UpdateFormProps<ReleaseProfileDuplicate>) {
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
|
||||||
|
const storeMutation = useMutation({
|
||||||
|
mutationFn: (profile: ReleaseProfileDuplicate) => APIClient.release.profiles.duplicates.store(profile),
|
||||||
|
onSuccess: () => {
|
||||||
|
queryClient.invalidateQueries({ queryKey: ReleaseProfileDuplicateKeys.lists() });
|
||||||
|
toast.custom((t) => <Toast type="success" body="Profile was added" t={t} />);
|
||||||
|
|
||||||
|
toggle();
|
||||||
|
},
|
||||||
|
onError: () => {
|
||||||
|
toast.custom((t) => <Toast type="error" body="Profile could not be added" t={t} />);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const onSubmit = (data: unknown) => storeMutation.mutate(data as ReleaseProfileDuplicate);
|
||||||
|
|
||||||
|
const deleteMutation = useMutation({
|
||||||
|
mutationFn: (profileId: number) => APIClient.release.profiles.duplicates.delete(profileId),
|
||||||
|
onSuccess: () => {
|
||||||
|
queryClient.invalidateQueries({ queryKey: ReleaseProfileDuplicateKeys.lists() });
|
||||||
|
queryClient.invalidateQueries({ queryKey: ReleaseProfileDuplicateKeys.detail(profile.id) });
|
||||||
|
|
||||||
|
toast.custom((t) => <Toast type="success" body={`Profile ${profile.name} was deleted!`} t={t} />);
|
||||||
|
|
||||||
|
toggle();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const onDelete = () => deleteMutation.mutate(profile.id);
|
||||||
|
|
||||||
|
const initialValues: ReleaseProfileDuplicate = {
|
||||||
|
id: profile.id,
|
||||||
|
name: profile.name,
|
||||||
|
protocol: profile.protocol,
|
||||||
|
release_name: profile.release_name,
|
||||||
|
hash: profile.hash,
|
||||||
|
title: profile.title,
|
||||||
|
sub_title: profile.sub_title,
|
||||||
|
year: profile.year,
|
||||||
|
month: profile.month,
|
||||||
|
day: profile.day,
|
||||||
|
source: profile.source,
|
||||||
|
resolution: profile.resolution,
|
||||||
|
codec: profile.codec,
|
||||||
|
container: profile.container,
|
||||||
|
dynamic_range: profile.dynamic_range,
|
||||||
|
audio: profile.audio,
|
||||||
|
group: profile.group,
|
||||||
|
season: profile.season,
|
||||||
|
episode: profile.episode,
|
||||||
|
website: profile.website,
|
||||||
|
proper: profile.proper,
|
||||||
|
repack: profile.repack,
|
||||||
|
edition: profile.edition,
|
||||||
|
language: profile.language,
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<SlideOver
|
||||||
|
type="UPDATE"
|
||||||
|
title="Duplicate Profile"
|
||||||
|
isOpen={isOpen}
|
||||||
|
toggle={toggle}
|
||||||
|
deleteAction={onDelete}
|
||||||
|
onSubmit={onSubmit}
|
||||||
|
initialValues={initialValues}
|
||||||
|
>
|
||||||
|
{() => (
|
||||||
|
<div className="py-2 space-y-6 sm:py-0 sm:space-y-0 divide-y divide-gray-200 dark:divide-gray-700">
|
||||||
|
<TextFieldWide required name="name" label="Name"/>
|
||||||
|
|
||||||
|
<SwitchGroupWide name="release_name" label="Release name" description="Full release name" />
|
||||||
|
<SwitchGroupWide name="hash" label="Hash" description="Normalized hash of the release name. Use with Releae name for exact match" />
|
||||||
|
<SwitchGroupWide name="title" label="Title" description="Parsed title" />
|
||||||
|
<SwitchGroupWide name="sub_title" label="Sub Title" description="Parsed Sub Title like Episode Name" />
|
||||||
|
<SwitchGroupWide name="year" label="Year" />
|
||||||
|
<SwitchGroupWide name="month" label="Month" description="For daily releases" />
|
||||||
|
<SwitchGroupWide name="day" label="Day" description="For daily releases" />
|
||||||
|
<SwitchGroupWide name="source" label="Source" />
|
||||||
|
<SwitchGroupWide name="resolution" label="Resolution" />
|
||||||
|
<SwitchGroupWide name="codec" label="Codec" />
|
||||||
|
<SwitchGroupWide name="container" label="Container" />
|
||||||
|
<SwitchGroupWide name="dynamic_range" label="Dynamic Range (HDR,DV etc)" />
|
||||||
|
<SwitchGroupWide name="audio" label="Audio" />
|
||||||
|
<SwitchGroupWide name="group" label="Group" description="Release group" />
|
||||||
|
<SwitchGroupWide name="season" label="Season" />
|
||||||
|
<SwitchGroupWide name="episode" label="Episode" />
|
||||||
|
<SwitchGroupWide name="website" label="Website/Service" description="Services such as AMZN/HULU/NF" />
|
||||||
|
<SwitchGroupWide name="repack" label="Repack" />
|
||||||
|
<SwitchGroupWide name="proper" label="Proper" />
|
||||||
|
<SwitchGroupWide name="edition" label="Edition and Cut" />
|
||||||
|
<SwitchGroupWide name="language" label="Language and Region" />
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</SlideOver>
|
||||||
|
);
|
||||||
|
}
|
|
@ -455,7 +455,8 @@ export const FilterDetails = () => {
|
||||||
max_leechers: filter.max_leechers,
|
max_leechers: filter.max_leechers,
|
||||||
indexers: filter.indexers || [],
|
indexers: filter.indexers || [],
|
||||||
actions: filter.actions || [],
|
actions: filter.actions || [],
|
||||||
external: filter.external || []
|
external: filter.external || [],
|
||||||
|
release_profile_duplicate_id: filter.release_profile_duplicate_id,
|
||||||
} as Filter}
|
} as Filter}
|
||||||
onSubmit={handleSubmit}
|
onSubmit={handleSubmit}
|
||||||
enableReinitialize={true}
|
enableReinitialize={true}
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
import { useSuspenseQuery } from "@tanstack/react-query";
|
import { useSuspenseQuery } from "@tanstack/react-query";
|
||||||
|
|
||||||
import { downloadsPerUnitOptions } from "@domain/constants";
|
import { downloadsPerUnitOptions } from "@domain/constants";
|
||||||
import { IndexersOptionsQueryOptions } from "@api/queries";
|
import { IndexersOptionsQueryOptions, ReleaseProfileDuplicateList } from "@api/queries";
|
||||||
|
|
||||||
import { DocsLink } from "@components/ExternalLink";
|
import { DocsLink } from "@components/ExternalLink";
|
||||||
import { FilterLayout, FilterPage, FilterSection } from "./_components";
|
import { FilterLayout, FilterPage, FilterSection } from "./_components";
|
||||||
|
@ -16,20 +16,27 @@ import {
|
||||||
MultiSelectOption,
|
MultiSelectOption,
|
||||||
NumberField,
|
NumberField,
|
||||||
Select,
|
Select,
|
||||||
|
SelectFieldOption,
|
||||||
SwitchGroup,
|
SwitchGroup,
|
||||||
TextField
|
TextField
|
||||||
} from "@components/inputs";
|
} from "@components/inputs";
|
||||||
import * as CONSTS from "@domain/constants.ts";
|
import * as CONSTS from "@domain/constants.ts";
|
||||||
|
|
||||||
|
|
||||||
const MapIndexer = (indexer: Indexer) => (
|
const MapIndexer = (indexer: Indexer) => (
|
||||||
{ label: indexer.name, value: indexer.id } as MultiSelectOption
|
{ label: indexer.name, value: indexer.id } as MultiSelectOption
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const MapReleaseProfile = (profile: ReleaseProfileDuplicate) => (
|
||||||
|
{ label: profile.name, value: profile.id } as SelectFieldOption
|
||||||
|
);
|
||||||
|
|
||||||
export const General = () => {
|
export const General = () => {
|
||||||
const indexersQuery = useSuspenseQuery(IndexersOptionsQueryOptions())
|
const indexersQuery = useSuspenseQuery(IndexersOptionsQueryOptions())
|
||||||
const indexerOptions = indexersQuery.data && indexersQuery.data.map(MapIndexer)
|
const indexerOptions = indexersQuery.data && indexersQuery.data.map(MapIndexer)
|
||||||
|
|
||||||
|
const duplicateProfilesQuery = useSuspenseQuery(ReleaseProfileDuplicateList())
|
||||||
|
const duplicateProfilesOptions = duplicateProfilesQuery.data && duplicateProfilesQuery.data.map(MapReleaseProfile)
|
||||||
|
|
||||||
// const indexerOptions = data?.map(MapIndexer) ?? [];
|
// const indexerOptions = data?.map(MapIndexer) ?? [];
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
@ -129,6 +136,13 @@ export const General = () => {
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
|
<Select
|
||||||
|
name={`release_profile_duplicate_id`}
|
||||||
|
label="Skip Duplicates profile"
|
||||||
|
optionDefaultText="Select profile"
|
||||||
|
options={[{label: "Select profile", value: null}, ...duplicateProfilesOptions]}
|
||||||
|
tooltip={<div><p>Select the skip duplicate profile.</p></div>}
|
||||||
|
/>
|
||||||
</FilterLayout>
|
</FilterLayout>
|
||||||
|
|
||||||
<FilterLayout>
|
<FilterLayout>
|
||||||
|
|
|
@ -106,9 +106,9 @@ function ListItem({ client }: DLSettingsItemProps) {
|
||||||
<li>
|
<li>
|
||||||
<div className="grid grid-cols-12 items-center py-2">
|
<div className="grid grid-cols-12 items-center py-2">
|
||||||
<DownloadClientUpdateForm
|
<DownloadClientUpdateForm
|
||||||
client={client}
|
|
||||||
isOpen={updateClientIsOpen}
|
isOpen={updateClientIsOpen}
|
||||||
toggle={toggleUpdateClient}
|
toggle={toggleUpdateClient}
|
||||||
|
data={client}
|
||||||
/>
|
/>
|
||||||
<div className="col-span-2 sm:col-span-1 pl-1 sm:pl-6 flex items-center">
|
<div className="col-span-2 sm:col-span-1 pl-1 sm:pl-6 flex items-center">
|
||||||
<Checkbox
|
<Checkbox
|
||||||
|
|
|
@ -167,7 +167,7 @@ function ListItem({ feed }: ListItemProps) {
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<li key={feed.id}>
|
<li key={feed.id}>
|
||||||
<FeedUpdateForm isOpen={updateFormIsOpen} toggle={toggleUpdateForm} feed={feed} />
|
<FeedUpdateForm isOpen={updateFormIsOpen} toggle={toggleUpdateForm} data={feed} />
|
||||||
|
|
||||||
<div className="grid grid-cols-12 items-center text-sm font-medium text-gray-900 dark:text-gray-500">
|
<div className="grid grid-cols-12 items-center text-sm font-medium text-gray-900 dark:text-gray-500">
|
||||||
<div className="col-span-2 sm:col-span-1 pl-6 flex items-center">
|
<div className="col-span-2 sm:col-span-1 pl-6 flex items-center">
|
||||||
|
|
|
@ -136,7 +136,7 @@ const ListItem = ({ indexer }: ListItemProps) => {
|
||||||
<IndexerUpdateForm
|
<IndexerUpdateForm
|
||||||
isOpen={updateIsOpen}
|
isOpen={updateIsOpen}
|
||||||
toggle={toggleUpdate}
|
toggle={toggleUpdate}
|
||||||
indexer={indexer}
|
data={indexer}
|
||||||
/>
|
/>
|
||||||
<div className="col-span-2 sm:col-span-1 flex pl-1 sm:pl-5 items-center">
|
<div className="col-span-2 sm:col-span-1 flex pl-1 sm:pl-5 items-center">
|
||||||
<Checkbox value={indexer.enabled ?? false} setValue={onToggleMutation} />
|
<Checkbox value={indexer.enabled ?? false} setValue={onToggleMutation} />
|
||||||
|
|
|
@ -237,7 +237,7 @@ const ListItem = ({ network, expanded }: ListItemProps) => {
|
||||||
<IrcNetworkUpdateForm
|
<IrcNetworkUpdateForm
|
||||||
isOpen={updateIsOpen}
|
isOpen={updateIsOpen}
|
||||||
toggle={toggleUpdate}
|
toggle={toggleUpdate}
|
||||||
network={network}
|
data={network}
|
||||||
/>
|
/>
|
||||||
<div className="col-span-2 md:col-span-1 flex pl-1 sm:pl-2.5 text-gray-500 dark:text-gray-400">
|
<div className="col-span-2 md:col-span-1 flex pl-1 sm:pl-2.5 text-gray-500 dark:text-gray-400">
|
||||||
<Checkbox
|
<Checkbox
|
||||||
|
|
|
@ -105,7 +105,7 @@ function ListItem({ notification }: ListItemProps) {
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<li key={notification.id} className="text-gray-500 dark:text-gray-400">
|
<li key={notification.id} className="text-gray-500 dark:text-gray-400">
|
||||||
<NotificationUpdateForm isOpen={updateFormIsOpen} toggle={toggleUpdateForm} notification={notification} />
|
<NotificationUpdateForm isOpen={updateFormIsOpen} toggle={toggleUpdateForm} data={notification} />
|
||||||
|
|
||||||
<div className="grid grid-cols-12 items-center py-2">
|
<div className="grid grid-cols-12 items-center py-2">
|
||||||
<div className="col-span-2 sm:col-span-1 pl-1 py-0.5 sm:pl-6 flex items-center">
|
<div className="col-span-2 sm:col-span-1 pl-1 py-0.5 sm:pl-6 flex items-center">
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { useRef, useState } from "react";
|
import { useRef, useState } from "react";
|
||||||
import { useMutation, useQueryClient, useQuery } from "@tanstack/react-query";
|
import { useMutation, useQueryClient, useQuery, useSuspenseQuery } from "@tanstack/react-query";
|
||||||
import { MultiSelect as RMSC } from "react-multi-select-component";
|
import { MultiSelect as RMSC } from "react-multi-select-component";
|
||||||
import { AgeSelect } from "@components/inputs"
|
import { AgeSelect } from "@components/inputs"
|
||||||
|
|
||||||
|
@ -15,21 +15,150 @@ import Toast from "@components/notifications/Toast";
|
||||||
import { useToggle } from "@hooks/hooks";
|
import { useToggle } from "@hooks/hooks";
|
||||||
import { DeleteModal } from "@components/modals";
|
import { DeleteModal } from "@components/modals";
|
||||||
import { Section } from "./_components";
|
import { Section } from "./_components";
|
||||||
|
import { ReleaseProfileDuplicateList } from "@api/queries.ts";
|
||||||
|
import { EmptySimple } from "@components/emptystates";
|
||||||
|
import { PlusIcon } from "@heroicons/react/24/solid";
|
||||||
|
import { ReleaseProfileDuplicateAddForm, ReleaseProfileDuplicateUpdateForm } from "@forms/settings/ReleaseForms.tsx";
|
||||||
|
import { classNames } from "@utils";
|
||||||
|
|
||||||
const ReleaseSettings = () => (
|
const ReleaseSettings = () => (
|
||||||
<Section
|
<div className="lg:col-span-9">
|
||||||
title="Releases"
|
<ReleaseProfileDuplicates/>
|
||||||
description="Manage release history."
|
|
||||||
>
|
<div className="py-6 px-4 sm:p-6">
|
||||||
<div className="border border-red-500 rounded">
|
<div className="border border-red-500 rounded">
|
||||||
<div className="py-6 px-4 sm:p-6">
|
<div className="py-6 px-4 sm:p-6">
|
||||||
<DeleteReleases />
|
<DeleteReleases/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
</Section>
|
|
||||||
);
|
);
|
||||||
|
|
||||||
|
interface ReleaseProfileProps {
|
||||||
|
profile: ReleaseProfileDuplicate;
|
||||||
|
}
|
||||||
|
|
||||||
|
function ReleaseProfileListItem({ profile }: ReleaseProfileProps) {
|
||||||
|
const [updatePanelIsOpen, toggleUpdatePanel] = useToggle(false);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<li>
|
||||||
|
<div className="grid grid-cols-12 items-center py-2">
|
||||||
|
<ReleaseProfileDuplicateUpdateForm isOpen={updatePanelIsOpen} toggle={toggleUpdatePanel} data={profile}/>
|
||||||
|
<div
|
||||||
|
className="col-span-2 sm:col-span-2 lg:col-span-2 pl-4 sm:pl-4 pr-6 py-3 block flex-col text-sm font-medium text-gray-900 dark:text-white truncate"
|
||||||
|
title={profile.name}>
|
||||||
|
{profile.name}
|
||||||
|
</div>
|
||||||
|
<div className="col-span-9 sm:col-span-9 lg:col-span-9 pl-4 sm:pl-4 pr-6 py-3 flex gap-x-0.5 flex-row text-sm font-medium text-gray-900 dark:text-white truncate">
|
||||||
|
{profile.release_name && <EnabledPill value={profile.release_name} label="RLS" title="Release name" />}
|
||||||
|
{profile.hash && <EnabledPill value={profile.hash} label="Hash" title="Normalized hash of the release name. Use with Releae name for exact match" />}
|
||||||
|
{profile.title && <EnabledPill value={profile.title} label="Title" title="Parsed titel" />}
|
||||||
|
{profile.sub_title && <EnabledPill value={profile.sub_title} label="Sub Title" title="Parsed sub titel like Episode name" />}
|
||||||
|
{profile.group && <EnabledPill value={profile.group} label="Group" title="Releae group" />}
|
||||||
|
{profile.year && <EnabledPill value={profile.year} label="Year" title="Year" />}
|
||||||
|
{profile.month && <EnabledPill value={profile.month} label="Month" title="Month" />}
|
||||||
|
{profile.day && <EnabledPill value={profile.day} label="Day" title="Day" />}
|
||||||
|
{profile.source && <EnabledPill value={profile.source} label="Source" title="Source" />}
|
||||||
|
{profile.resolution && <EnabledPill value={profile.resolution} label="Resolution" title="Resolution" />}
|
||||||
|
{profile.codec && <EnabledPill value={profile.codec} label="Codec" title="Codec" />}
|
||||||
|
{profile.container && <EnabledPill value={profile.container} label="Container" title="Container" />}
|
||||||
|
{profile.dynamic_range && <EnabledPill value={profile.dynamic_range} label="Dynamic Range" title="Dynamic Range (HDR,DV)" />}
|
||||||
|
{profile.audio && <EnabledPill value={profile.audio} label="Audio" title="Audio formats" />}
|
||||||
|
{profile.season && <EnabledPill value={profile.season} label="Season" title="Season number" />}
|
||||||
|
{profile.episode && <EnabledPill value={profile.episode} label="Episode" title="Episode number" />}
|
||||||
|
{profile.website && <EnabledPill value={profile.website} label="Website" title="Website/Service" />}
|
||||||
|
{profile.proper && <EnabledPill value={profile.proper} label="Proper" title="Scene proper" />}
|
||||||
|
{profile.repack && <EnabledPill value={profile.repack} label="Repack" title="Scene repack" />}
|
||||||
|
{profile.edition && <EnabledPill value={profile.edition} label="Edition" title="Edition (eg. Collectors Edition) and Cut (eg. Directors Cut)" />}
|
||||||
|
{profile.language && <EnabledPill value={profile.language} label="Language" title="Language and Region" />}
|
||||||
|
</div>
|
||||||
|
<div className="col-span-1 pl-0.5 whitespace-nowrap text-center text-sm font-medium">
|
||||||
|
<span className="text-blue-600 dark:text-gray-300 hover:text-blue-900 cursor-pointer"
|
||||||
|
onClick={toggleUpdatePanel}
|
||||||
|
>
|
||||||
|
Edit
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</li>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PillProps {
|
||||||
|
value: boolean;
|
||||||
|
label: string;
|
||||||
|
title: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const EnabledPill = ({ value, label, title }: PillProps) => (
|
||||||
|
<span title={title} className={classNames("inline-flex items-center rounded-md px-1.5 py-0.5 text-xs font-medium ring-1 ring-inset", value ? "bg-blue-100 dark:bg-blue-400/10 text-blue-700 dark:text-blue-400 ring-blue-700/10 dark:ring-blue-400/30" : "bg-gray-100 dark:bg-gray-400/10 text-gray-600 dark:text-gray-400 ring-gray-500/10 dark:ring-gray-400/30")}>
|
||||||
|
{label}
|
||||||
|
</span>
|
||||||
|
);
|
||||||
|
|
||||||
|
function ReleaseProfileDuplicates() {
|
||||||
|
const [addPanelIsOpen, toggleAdd] = useToggle(false);
|
||||||
|
|
||||||
|
const releaseProfileQuery = useSuspenseQuery(ReleaseProfileDuplicateList())
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Section
|
||||||
|
title="Release Duplicate Profiles"
|
||||||
|
description="Manage duplicate profiles."
|
||||||
|
rightSide={
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
className="relative inline-flex items-center px-4 py-2 border border-transparent shadow-sm text-sm font-medium rounded-md text-white bg-blue-600 dark:bg-blue-600 hover:bg-blue-700 dark:hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500 dark:focus:ring-blue-500"
|
||||||
|
onClick={toggleAdd}
|
||||||
|
>
|
||||||
|
<PlusIcon className="h-5 w-5 mr-1"/>
|
||||||
|
Add new
|
||||||
|
</button>
|
||||||
|
}
|
||||||
|
>
|
||||||
|
<ReleaseProfileDuplicateAddForm isOpen={addPanelIsOpen} toggle={toggleAdd}/>
|
||||||
|
|
||||||
|
<div className="flex flex-col">
|
||||||
|
{releaseProfileQuery.data.length > 0 ? (
|
||||||
|
<ul className="min-w-full relative">
|
||||||
|
<li className="grid grid-cols-12 border-b border-gray-200 dark:border-gray-700">
|
||||||
|
<div
|
||||||
|
className="col-span-2 sm:col-span-1 pl-1 sm:pl-4 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">Name
|
||||||
|
</div>
|
||||||
|
{/*<div*/}
|
||||||
|
{/* className="col-span-6 sm:col-span-4 lg:col-span-4 pl-10 sm:pl-12 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider cursor-pointer"*/}
|
||||||
|
{/* // onClick={() => sortedClients.requestSort("name")}*/}
|
||||||
|
{/*>*/}
|
||||||
|
{/* Name*/}
|
||||||
|
{/*</div>*/}
|
||||||
|
|
||||||
|
{/*<div*/}
|
||||||
|
{/* className="hidden sm:flex col-span-4 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider cursor-pointer"*/}
|
||||||
|
{/* onClick={() => sortedClients.requestSort("host")}*/}
|
||||||
|
{/*>*/}
|
||||||
|
{/* Host <span className="sort-indicator">{sortedClients.getSortIndicator("host")}</span>*/}
|
||||||
|
{/*</div>*/}
|
||||||
|
{/*<div className="hidden sm:flex col-span-3 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider cursor-pointer"*/}
|
||||||
|
{/* onClick={() => sortedClients.requestSort("type")}*/}
|
||||||
|
{/*>*/}
|
||||||
|
{/* Type <span className="sort-indicator">{sortedClients.getSortIndicator("type")}</span>*/}
|
||||||
|
{/*</div>*/}
|
||||||
|
</li>
|
||||||
|
{releaseProfileQuery.data.map((profile) => (
|
||||||
|
<ReleaseProfileListItem key={profile.id} profile={profile}/>
|
||||||
|
))}
|
||||||
|
</ul>
|
||||||
|
) : (
|
||||||
|
<EmptySimple title="No duplicate rlease profiles" subtitle="" buttonText="Add new profile"
|
||||||
|
buttonAction={toggleAdd}/>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</Section>
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
const getDurationLabel = (durationValue: number): string => {
|
const getDurationLabel = (durationValue: number): string => {
|
||||||
const durationOptions: Record<number, string> = {
|
const durationOptions: Record<number, string> = {
|
||||||
|
@ -87,11 +216,12 @@ function DeleteReleases() {
|
||||||
onSuccess: () => {
|
onSuccess: () => {
|
||||||
if (parsedDuration === 0) {
|
if (parsedDuration === 0) {
|
||||||
toast.custom((t) => (
|
toast.custom((t) => (
|
||||||
<Toast type="success" body={"All releases based on criteria were deleted."} t={t} />
|
<Toast type="success" body={"All releases based on criteria were deleted."} t={t}/>
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
toast.custom((t) => (
|
toast.custom((t) => (
|
||||||
<Toast type="success" body={`Releases older than ${getDurationLabel(parsedDuration ?? 0)} were deleted.`} t={t} />
|
<Toast type="success" body={`Releases older than ${getDurationLabel(parsedDuration ?? 0)} were deleted.`}
|
||||||
|
t={t}/>
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -101,11 +231,15 @@ function DeleteReleases() {
|
||||||
|
|
||||||
const deleteOlderReleases = () => {
|
const deleteOlderReleases = () => {
|
||||||
if (parsedDuration === undefined || isNaN(parsedDuration) || parsedDuration < 0) {
|
if (parsedDuration === undefined || isNaN(parsedDuration) || parsedDuration < 0) {
|
||||||
toast.custom((t) => <Toast type="error" body={"Please select a valid age."} t={t} />);
|
toast.custom((t) => <Toast type="error" body={"Please select a valid age."} t={t}/>);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
deleteOlderMutation.mutate({ olderThan: parsedDuration, indexers: indexers.map(i => i.value), releaseStatuses: releaseStatuses.map(rs => rs.value) });
|
deleteOlderMutation.mutate({
|
||||||
|
olderThan: parsedDuration,
|
||||||
|
indexers: indexers.map(i => i.value),
|
||||||
|
releaseStatuses: releaseStatuses.map(rs => rs.value)
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
@ -122,19 +256,22 @@ function DeleteReleases() {
|
||||||
<div className="flex flex-col gap-2 w-full">
|
<div className="flex flex-col gap-2 w-full">
|
||||||
<div>
|
<div>
|
||||||
<h2 className="text-lg leading-4 font-bold text-gray-900 dark:text-white">Delete release history</h2>
|
<h2 className="text-lg leading-4 font-bold text-gray-900 dark:text-white">Delete release history</h2>
|
||||||
<p className="text-sm mt-1 text-gray-500 dark:text-gray-400">
|
<p className="text-sm mt-2 text-gray-500 dark:text-gray-400">
|
||||||
Select the criteria below to permanently delete release history records that are older than the chosen age and optionally match the selected indexers and release statuses:
|
Select the criteria below to permanently delete release history records that are older than the chosen age
|
||||||
<ul className="list-disc pl-5 mt-2">
|
and optionally match the selected indexers and release statuses:
|
||||||
|
</p>
|
||||||
|
<ul className="list-disc pl-5 my-4 text-sm text-gray-500 dark:text-gray-400">
|
||||||
<li>
|
<li>
|
||||||
Older than (e.g., 6 months - all records older than 6 months will be deleted) - <strong className="text-gray-600 dark:text-gray-300">Required</strong>
|
Older than (e.g., 6 months - all records older than 6 months will be deleted) - <strong
|
||||||
|
className="text-gray-600 dark:text-gray-300">Required</strong>
|
||||||
</li>
|
</li>
|
||||||
<li>Indexers - Optional (if none selected, applies to all indexers)</li>
|
<li>Indexers - Optional (if none selected, applies to all indexers)</li>
|
||||||
<li>Release statuses - Optional (if none selected, applies to all release statuses)</li>
|
<li>Release statuses - Optional (if none selected, applies to all release statuses)</li>
|
||||||
</ul>
|
</ul>
|
||||||
<p className="mt-2 text-red-600 dark:text-red-500">
|
<span className="pt-2 text-red-600 dark:text-red-500">
|
||||||
<strong>Warning:</strong> If no indexers or release statuses are selected, all release history records older than the selected age will be permanently deleted, regardless of indexer or status.
|
<strong>Warning:</strong> If no indexers or release statuses are selected, all release history records
|
||||||
</p>
|
older than the selected age will be permanently deleted, regardless of indexer or status.
|
||||||
</p>
|
</span>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="flex flex-col sm:flex-row gap-2 pt-4 items-center text-sm">
|
<div className="flex flex-col sm:flex-row gap-2 pt-4 items-center text-sm">
|
||||||
|
@ -146,19 +283,23 @@ function DeleteReleases() {
|
||||||
<span className="text-red-600 dark:text-red-500"> *</span>
|
<span className="text-red-600 dark:text-red-500"> *</span>
|
||||||
</>
|
</>
|
||||||
),
|
),
|
||||||
content: <AgeSelect duration={duration} setDuration={setDuration} setParsedDuration={setParsedDuration} />
|
content: <AgeSelect duration={duration} setDuration={setDuration} setParsedDuration={setParsedDuration}/>
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'Indexers:',
|
label: 'Indexers:',
|
||||||
content: <RMSC options={indexerOptions?.map(option => ({ value: option.identifier, label: option.name })) || []} value={indexers} onChange={setIndexers} labelledBy="Select indexers" />
|
content: <RMSC
|
||||||
|
options={indexerOptions?.map(option => ({ value: option.identifier, label: option.name })) || []}
|
||||||
|
value={indexers} onChange={setIndexers} labelledBy="Select indexers"/>
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: 'Release statuses:',
|
label: 'Release statuses:',
|
||||||
content: <RMSC options={releaseStatusOptions} value={releaseStatuses} onChange={setReleaseStatuses} labelledBy="Select release statuses" />
|
content: <RMSC options={releaseStatusOptions} value={releaseStatuses} onChange={setReleaseStatuses}
|
||||||
|
labelledBy="Select release statuses"/>
|
||||||
}
|
}
|
||||||
].map((item, index) => (
|
].map((item, index) => (
|
||||||
<div key={index} className="flex flex-col w-full">
|
<div key={index} className="flex flex-col w-full">
|
||||||
<p className="text-xs font-bold text-gray-800 dark:text-gray-100 uppercase p-1 cursor-default">{item.label}</p>
|
<p
|
||||||
|
className="text-xs font-bold text-gray-800 dark:text-gray-100 uppercase p-1 cursor-default">{item.label}</p>
|
||||||
{item.content}
|
{item.content}
|
||||||
</div>
|
</div>
|
||||||
))}
|
))}
|
||||||
|
|
1
web/src/types/Filter.d.ts
vendored
1
web/src/types/Filter.d.ts
vendored
|
@ -82,6 +82,7 @@ interface Filter {
|
||||||
actions: Action[];
|
actions: Action[];
|
||||||
indexers: Indexer[];
|
indexers: Indexer[];
|
||||||
external: ExternalFilter[];
|
external: ExternalFilter[];
|
||||||
|
release_profile_duplicate_id?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface Action {
|
interface Action {
|
||||||
|
|
27
web/src/types/Release.d.ts
vendored
27
web/src/types/Release.d.ts
vendored
|
@ -75,3 +75,30 @@ interface DeleteParams {
|
||||||
indexers?: string[];
|
indexers?: string[];
|
||||||
releaseStatuses?: string[];
|
releaseStatuses?: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface ReleaseProfileDuplicate {
|
||||||
|
id: number;
|
||||||
|
name: string;
|
||||||
|
protocol: boolean;
|
||||||
|
release_name: boolean;
|
||||||
|
hash: boolean;
|
||||||
|
title: boolean;
|
||||||
|
sub_title: boolean;
|
||||||
|
year: boolean;
|
||||||
|
month: boolean;
|
||||||
|
day: boolean;
|
||||||
|
source: boolean;
|
||||||
|
resolution: boolean;
|
||||||
|
codec: boolean;
|
||||||
|
container: boolean;
|
||||||
|
dynamic_range: boolean;
|
||||||
|
audio: boolean;
|
||||||
|
group: boolean;
|
||||||
|
season: boolean;
|
||||||
|
episode: boolean;
|
||||||
|
website: boolean;
|
||||||
|
proper: boolean;
|
||||||
|
repack: boolean;
|
||||||
|
edition: boolean;
|
||||||
|
language: boolean;
|
||||||
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue