refactor(filters): move rejections from release to filter (#1272)

* refactor: size check

* refactor(filters): checkfilter rejections from release to filter

---------

Co-authored-by: ze0s <ze0s@riseup.net>
This commit is contained in:
Frederick Robinson 2023-12-01 12:04:23 -05:00 committed by GitHub
parent df5fb37c9c
commit 365055fbe1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 186 additions and 161 deletions

View file

@ -431,11 +431,11 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
} }
// FindByIndexerIdentifier find active filters with active indexer only // FindByIndexerIdentifier find active filters with active indexer only
func (r *FilterRepo) FindByIndexerIdentifier(ctx context.Context, indexer string) ([]domain.Filter, error) { func (r *FilterRepo) FindByIndexerIdentifier(ctx context.Context, indexer string) ([]*domain.Filter, error) {
return r.findByIndexerIdentifier(ctx, indexer) return r.findByIndexerIdentifier(ctx, indexer)
} }
func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string) ([]domain.Filter, error) { func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string) ([]*domain.Filter, error) {
queryBuilder := r.db.squirrel. queryBuilder := r.db.squirrel.
Select( Select(
"f.id", "f.id",
@ -537,9 +537,7 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
defer rows.Close() defer rows.Close()
var filters []domain.Filter filtersMap := make(map[int]*domain.Filter)
externalMap := make(map[int][]domain.FilterExternal)
for rows.Next() { for rows.Next() {
var f domain.Filter var f domain.Filter
@ -671,6 +669,14 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
f.Scene = scene.Bool f.Scene = scene.Bool
f.Freeleech = freeleech.Bool f.Freeleech = freeleech.Bool
f.Rejections = []string{}
filter, ok := filtersMap[f.ID]
if !ok {
filter = &f
filtersMap[f.ID] = filter
}
if extId.Valid { if extId.Valid {
external := domain.FilterExternal{ external := domain.FilterExternal{
ID: int(extId.Int32), ID: int(extId.Int32),
@ -691,21 +697,15 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
WebhookRetryDelaySeconds: int(extWebhookDelaySeconds.Int32), WebhookRetryDelaySeconds: int(extWebhookDelaySeconds.Int32),
FilterId: int(extFilterId.Int32), FilterId: int(extFilterId.Int32),
} }
externalMap[external.FilterId] = append(externalMap[external.FilterId], external) filter.External = append(filter.External, external)
}
} }
filters = append(filters, f) var filters []*domain.Filter
}
for i, filter := range filters { for _, filter := range filtersMap {
v, ok := externalMap[filter.ID] filter := filter
if !ok { filters = append(filters, filter)
continue
}
filter.External = v
filters[i] = filter
} }
return filters, nil return filters, nil

View file

@ -5,13 +5,13 @@ package domain
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"regexp" "regexp"
"strconv" "strconv"
"strings" "strings"
"time" "time"
"github.com/autobrr/autobrr/pkg/errors"
"github.com/autobrr/autobrr/pkg/wildcard" "github.com/autobrr/autobrr/pkg/wildcard"
"github.com/dustin/go-humanize" "github.com/dustin/go-humanize"
@ -26,7 +26,7 @@ type FilterRepo interface {
ListFilters(ctx context.Context) ([]Filter, error) ListFilters(ctx context.Context) ([]Filter, error)
Find(ctx context.Context, params FilterQueryParams) ([]Filter, error) Find(ctx context.Context, params FilterQueryParams) ([]Filter, error)
FindByID(ctx context.Context, filterID int) (*Filter, error) FindByID(ctx context.Context, filterID int) (*Filter, error)
FindByIndexerIdentifier(ctx context.Context, indexer string) ([]Filter, error) FindByIndexerIdentifier(ctx context.Context, indexer string) ([]*Filter, error)
FindExternalFiltersByID(ctx context.Context, filterId int) ([]FilterExternal, error) FindExternalFiltersByID(ctx context.Context, filterId int) ([]FilterExternal, error)
Store(ctx context.Context, filter *Filter) error Store(ctx context.Context, filter *Filter) error
Update(ctx context.Context, filter *Filter) error Update(ctx context.Context, filter *Filter) error
@ -137,6 +137,7 @@ type Filter struct {
External []FilterExternal `json:"external,omitempty"` External []FilterExternal `json:"external,omitempty"`
Indexers []Indexer `json:"indexers"` Indexers []Indexer `json:"indexers"`
Downloads *FilterDownloads `json:"-"` Downloads *FilterDownloads `json:"-"`
Rejections []string `json:"-"`
} }
type FilterExternal struct { type FilterExternal struct {
@ -245,7 +246,7 @@ type FilterUpdate struct {
Indexers []Indexer `json:"indexers,omitempty"` Indexers []Indexer `json:"indexers,omitempty"`
} }
func (f Filter) Validate() error { func (f *Filter) Validate() error {
if f.Name == "" { if f.Name == "" {
return errors.New("validation: name can't be empty") return errors.New("validation: name can't be empty")
} }
@ -257,14 +258,11 @@ func (f Filter) Validate() error {
return nil return nil
} }
func (f Filter) CheckFilter(r *Release) ([]string, bool) { func (f *Filter) CheckFilter(r *Release) ([]string, bool) {
// reset rejections first to clean previous checks
r.resetRejections()
// max downloads check. If reached return early // max downloads check. If reached return early
if f.MaxDownloads > 0 && !f.checkMaxDownloads(f.MaxDownloads, f.MaxDownloadsUnit) { if f.MaxDownloads > 0 && !f.checkMaxDownloads(f.MaxDownloads, f.MaxDownloadsUnit) {
r.addRejectionF("max downloads (%d) this (%v) reached", f.MaxDownloads, f.MaxDownloadsUnit) f.addRejectionF("max downloads (%d) this (%v) reached", f.MaxDownloads, f.MaxDownloadsUnit)
return r.Rejections, false return f.Rejections, false
} }
if len(f.Bonus) > 0 && !sliceContainsSlice(r.Bonus, f.Bonus) { if len(f.Bonus) > 0 && !sliceContainsSlice(r.Bonus, f.Bonus) {
@ -272,136 +270,136 @@ func (f Filter) CheckFilter(r *Release) ([]string, bool) {
} }
if f.Freeleech && r.Freeleech != f.Freeleech { if f.Freeleech && r.Freeleech != f.Freeleech {
r.addRejection("wanted: freeleech") f.addRejection("wanted: freeleech")
} }
if f.FreeleechPercent != "" && !checkFreeleechPercent(r.FreeleechPercent, f.FreeleechPercent) { if f.FreeleechPercent != "" && !checkFreeleechPercent(r.FreeleechPercent, f.FreeleechPercent) {
r.addRejectionF("freeleech percent not matching. got: %v want: %v", r.FreeleechPercent, f.FreeleechPercent) f.addRejectionF("freeleech percent not matching. got: %v want: %v", r.FreeleechPercent, f.FreeleechPercent)
} }
if len(f.Origins) > 0 && !containsSlice(r.Origin, f.Origins) { if len(f.Origins) > 0 && !containsSlice(r.Origin, f.Origins) {
r.addRejectionF("origin not matching. got: %v want: %v", r.Origin, f.Origins) f.addRejectionF("origin not matching. got: %v want: %v", r.Origin, f.Origins)
} }
if len(f.ExceptOrigins) > 0 && containsSlice(r.Origin, f.ExceptOrigins) { if len(f.ExceptOrigins) > 0 && containsSlice(r.Origin, f.ExceptOrigins) {
r.addRejectionF("except origin not matching. got: %v unwanted: %v", r.Origin, f.ExceptOrigins) f.addRejectionF("except origin not matching. got: %v unwanted: %v", r.Origin, f.ExceptOrigins)
} }
// title is the parsed title // title is the parsed title
if f.Shows != "" && !contains(r.Title, f.Shows) { if f.Shows != "" && !contains(r.Title, f.Shows) {
r.addRejectionF("shows not matching. got: %v want: %v", r.Title, f.Shows) f.addRejectionF("shows not matching. got: %v want: %v", r.Title, f.Shows)
} }
if f.Seasons != "" && !containsIntStrings(r.Season, f.Seasons) { if f.Seasons != "" && !containsIntStrings(r.Season, f.Seasons) {
r.addRejectionF("season not matching. got: %d want: %v", r.Season, f.Seasons) f.addRejectionF("season not matching. got: %d want: %v", r.Season, f.Seasons)
} }
if f.Episodes != "" && !containsIntStrings(r.Episode, f.Episodes) { if f.Episodes != "" && !containsIntStrings(r.Episode, f.Episodes) {
r.addRejectionF("episodes not matching. got: %d want: %v", r.Episode, f.Episodes) f.addRejectionF("episodes not matching. got: %d want: %v", r.Episode, f.Episodes)
} }
// matchRelease // matchRelease
// match against regex // match against regex
if f.UseRegex { if f.UseRegex {
if f.MatchReleases != "" && !matchRegex(r.TorrentName, f.MatchReleases) { if f.MatchReleases != "" && !matchRegex(r.TorrentName, f.MatchReleases) {
r.addRejectionF("match release regex not matching. got: %v want: %v", r.TorrentName, f.MatchReleases) f.addRejectionF("match release regex not matching. got: %v want: %v", r.TorrentName, f.MatchReleases)
} }
if f.ExceptReleases != "" && matchRegex(r.TorrentName, f.ExceptReleases) { if f.ExceptReleases != "" && matchRegex(r.TorrentName, f.ExceptReleases) {
r.addRejectionF("except releases regex: unwanted release. got: %v want: %v", r.TorrentName, f.ExceptReleases) f.addRejectionF("except releases regex: unwanted release. got: %v want: %v", r.TorrentName, f.ExceptReleases)
} }
} else { } else {
if f.MatchReleases != "" && !containsFuzzy(r.TorrentName, f.MatchReleases) { if f.MatchReleases != "" && !containsFuzzy(r.TorrentName, f.MatchReleases) {
r.addRejectionF("match release not matching. got: %v want: %v", r.TorrentName, f.MatchReleases) f.addRejectionF("match release not matching. got: %v want: %v", r.TorrentName, f.MatchReleases)
} }
if f.ExceptReleases != "" && containsFuzzy(r.TorrentName, f.ExceptReleases) { if f.ExceptReleases != "" && containsFuzzy(r.TorrentName, f.ExceptReleases) {
r.addRejectionF("except releases: unwanted release. got: %v want: %v", r.TorrentName, f.ExceptReleases) f.addRejectionF("except releases: unwanted release. got: %v want: %v", r.TorrentName, f.ExceptReleases)
} }
} }
if f.MatchReleaseGroups != "" && !contains(r.Group, f.MatchReleaseGroups) { if f.MatchReleaseGroups != "" && !contains(r.Group, f.MatchReleaseGroups) {
r.addRejectionF("release groups not matching. got: %v want: %v", r.Group, f.MatchReleaseGroups) f.addRejectionF("release groups not matching. got: %v want: %v", r.Group, f.MatchReleaseGroups)
} }
if f.ExceptReleaseGroups != "" && contains(r.Group, f.ExceptReleaseGroups) { if f.ExceptReleaseGroups != "" && contains(r.Group, f.ExceptReleaseGroups) {
r.addRejectionF("unwanted release group. got: %v unwanted: %v", r.Group, f.ExceptReleaseGroups) f.addRejectionF("unwanted release group. got: %v unwanted: %v", r.Group, f.ExceptReleaseGroups)
} }
// check raw releaseTags string // check raw releaseTags string
if f.UseRegexReleaseTags { if f.UseRegexReleaseTags {
if f.MatchReleaseTags != "" && !matchRegex(r.ReleaseTags, f.MatchReleaseTags) { if f.MatchReleaseTags != "" && !matchRegex(r.ReleaseTags, f.MatchReleaseTags) {
r.addRejectionF("match release tags regex not matching. got: %v want: %v", r.ReleaseTags, f.MatchReleaseTags) f.addRejectionF("match release tags regex not matching. got: %v want: %v", r.ReleaseTags, f.MatchReleaseTags)
} }
if f.ExceptReleaseTags != "" && matchRegex(r.ReleaseTags, f.ExceptReleaseTags) { if f.ExceptReleaseTags != "" && matchRegex(r.ReleaseTags, f.ExceptReleaseTags) {
r.addRejectionF("except release tags regex: unwanted release. got: %v want: %v", r.ReleaseTags, f.ExceptReleaseTags) f.addRejectionF("except release tags regex: unwanted release. got: %v want: %v", r.ReleaseTags, f.ExceptReleaseTags)
} }
} else { } else {
if f.MatchReleaseTags != "" && !containsFuzzy(r.ReleaseTags, f.MatchReleaseTags) { if f.MatchReleaseTags != "" && !containsFuzzy(r.ReleaseTags, f.MatchReleaseTags) {
r.addRejectionF("match release tags not matching. got: %v want: %v", r.ReleaseTags, f.MatchReleaseTags) f.addRejectionF("match release tags not matching. got: %v want: %v", r.ReleaseTags, f.MatchReleaseTags)
} }
if f.ExceptReleaseTags != "" && containsFuzzy(r.ReleaseTags, f.ExceptReleaseTags) { if f.ExceptReleaseTags != "" && containsFuzzy(r.ReleaseTags, f.ExceptReleaseTags) {
r.addRejectionF("except release tags: unwanted release. got: %v want: %v", r.ReleaseTags, f.ExceptReleaseTags) f.addRejectionF("except release tags: unwanted release. got: %v want: %v", r.ReleaseTags, f.ExceptReleaseTags)
} }
} }
if f.MatchUploaders != "" && !contains(r.Uploader, f.MatchUploaders) { if f.MatchUploaders != "" && !contains(r.Uploader, f.MatchUploaders) {
r.addRejectionF("uploaders not matching. got: %v want: %v", r.Uploader, f.MatchUploaders) f.addRejectionF("uploaders not matching. got: %v want: %v", r.Uploader, f.MatchUploaders)
} }
if f.ExceptUploaders != "" && contains(r.Uploader, f.ExceptUploaders) { if f.ExceptUploaders != "" && contains(r.Uploader, f.ExceptUploaders) {
r.addRejectionF("unwanted uploaders. got: %v unwanted: %v", r.Uploader, f.ExceptUploaders) f.addRejectionF("unwanted uploaders. got: %v unwanted: %v", r.Uploader, f.ExceptUploaders)
} }
if len(f.MatchLanguage) > 0 && !sliceContainsSlice(r.Language, f.MatchLanguage) { if len(f.MatchLanguage) > 0 && !sliceContainsSlice(r.Language, f.MatchLanguage) {
r.addRejectionF("language not matching. got: %v want: %v", r.Language, f.MatchLanguage) f.addRejectionF("language not matching. got: %v want: %v", r.Language, f.MatchLanguage)
} }
if len(f.ExceptLanguage) > 0 && sliceContainsSlice(r.Language, f.ExceptLanguage) { if len(f.ExceptLanguage) > 0 && sliceContainsSlice(r.Language, f.ExceptLanguage) {
r.addRejectionF("language unwanted. got: %v want: %v", r.Language, f.ExceptLanguage) f.addRejectionF("language unwanted. got: %v want: %v", r.Language, f.ExceptLanguage)
} }
if len(f.Resolutions) > 0 && !containsSlice(r.Resolution, f.Resolutions) { if len(f.Resolutions) > 0 && !containsSlice(r.Resolution, f.Resolutions) {
r.addRejectionF("resolution not matching. got: %v want: %v", r.Resolution, f.Resolutions) f.addRejectionF("resolution not matching. got: %v want: %v", r.Resolution, f.Resolutions)
} }
if len(f.Codecs) > 0 && !sliceContainsSlice(r.Codec, f.Codecs) { if len(f.Codecs) > 0 && !sliceContainsSlice(r.Codec, f.Codecs) {
r.addRejectionF("codec not matching. got: %v want: %v", r.Codec, f.Codecs) f.addRejectionF("codec not matching. got: %v want: %v", r.Codec, f.Codecs)
} }
if len(f.Sources) > 0 && !containsSlice(r.Source, f.Sources) { if len(f.Sources) > 0 && !containsSlice(r.Source, f.Sources) {
r.addRejectionF("source not matching. got: %v want: %v", r.Source, f.Sources) f.addRejectionF("source not matching. got: %v want: %v", r.Source, f.Sources)
} }
if len(f.Containers) > 0 && !containsSlice(r.Container, f.Containers) { if len(f.Containers) > 0 && !containsSlice(r.Container, f.Containers) {
r.addRejectionF("container not matching. got: %v want: %v", r.Container, f.Containers) f.addRejectionF("container not matching. got: %v want: %v", r.Container, f.Containers)
} }
// HDR is parsed into the Codec slice from rls // HDR is parsed into the Codec slice from rls
if len(f.MatchHDR) > 0 && !matchHDR(r.HDR, f.MatchHDR) { if len(f.MatchHDR) > 0 && !matchHDR(r.HDR, f.MatchHDR) {
r.addRejectionF("hdr not matching. got: %v want: %v", r.HDR, f.MatchHDR) f.addRejectionF("hdr not matching. got: %v want: %v", r.HDR, f.MatchHDR)
} }
// HDR is parsed into the Codec slice from rls // HDR is parsed into the Codec slice from rls
if len(f.ExceptHDR) > 0 && matchHDR(r.HDR, f.ExceptHDR) { if len(f.ExceptHDR) > 0 && matchHDR(r.HDR, f.ExceptHDR) {
r.addRejectionF("hdr unwanted. got: %v want: %v", r.HDR, f.ExceptHDR) f.addRejectionF("hdr unwanted. got: %v want: %v", r.HDR, f.ExceptHDR)
} }
// Other is parsed into the Other slice from rls // Other is parsed into the Other slice from rls
if len(f.MatchOther) > 0 && !sliceContainsSlice(r.Other, f.MatchOther) { if len(f.MatchOther) > 0 && !sliceContainsSlice(r.Other, f.MatchOther) {
r.addRejectionF("match other not matching. got: %v want: %v", r.Other, f.MatchOther) f.addRejectionF("match other not matching. got: %v want: %v", r.Other, f.MatchOther)
} }
// Other is parsed into the Other slice from rls // Other is parsed into the Other slice from rls
if len(f.ExceptOther) > 0 && sliceContainsSlice(r.Other, f.ExceptOther) { if len(f.ExceptOther) > 0 && sliceContainsSlice(r.Other, f.ExceptOther) {
r.addRejectionF("except other unwanted. got: %v unwanted: %v", r.Other, f.ExceptOther) f.addRejectionF("except other unwanted. got: %v unwanted: %v", r.Other, f.ExceptOther)
} }
if f.Years != "" && !containsIntStrings(r.Year, f.Years) { if f.Years != "" && !containsIntStrings(r.Year, f.Years) {
r.addRejectionF("year not matching. got: %d want: %v", r.Year, f.Years) f.addRejectionF("year not matching. got: %d want: %v", r.Year, f.Years)
} }
if f.MatchCategories != "" { if f.MatchCategories != "" {
@ -411,7 +409,7 @@ func (f Filter) CheckFilter(r *Release) ([]string, bool) {
categories = append(categories, r.Category) categories = append(categories, r.Category)
} }
if !contains(r.Category, f.MatchCategories) && !containsAny(categories, f.MatchCategories) { if !contains(r.Category, f.MatchCategories) && !containsAny(categories, f.MatchCategories) {
r.addRejectionF("category not matching. got: %v want: %v", strings.Join(categories, ","), f.MatchCategories) f.addRejectionF("category not matching. got: %v want: %v", strings.Join(categories, ","), f.MatchCategories)
} }
} }
@ -422,99 +420,99 @@ func (f Filter) CheckFilter(r *Release) ([]string, bool) {
categories = append(categories, r.Category) categories = append(categories, r.Category)
} }
if contains(r.Category, f.ExceptCategories) && containsAny(categories, f.ExceptCategories) { if contains(r.Category, f.ExceptCategories) && containsAny(categories, f.ExceptCategories) {
r.addRejectionF("category unwanted. got: %v unwanted: %v", strings.Join(categories, ","), f.ExceptCategories) f.addRejectionF("category unwanted. got: %v unwanted: %v", strings.Join(categories, ","), f.ExceptCategories)
} }
} }
if len(f.MatchReleaseTypes) > 0 && !containsSlice(r.Category, f.MatchReleaseTypes) { if len(f.MatchReleaseTypes) > 0 && !containsSlice(r.Category, f.MatchReleaseTypes) {
r.addRejectionF("release type not matching. got: %v want: %v", r.Category, f.MatchReleaseTypes) f.addRejectionF("release type not matching. got: %v want: %v", r.Category, f.MatchReleaseTypes)
} }
if (f.MinSize != "" || f.MaxSize != "") && !f.checkSizeFilter(r, f.MinSize, f.MaxSize) { if (f.MinSize != "" || f.MaxSize != "") && !f.checkSizeFilter(r) {
r.addRejectionF("size not matching. got: %v want min: %v max: %v", r.Size, f.MinSize, f.MaxSize) f.addRejectionF("size not matching. got: %v want min: %v max: %v", r.Size, f.MinSize, f.MaxSize)
} }
if f.Tags != "" { if f.Tags != "" {
if f.TagsMatchLogic == "ALL" && !containsAll(r.Tags, f.Tags) { if f.TagsMatchLogic == "ALL" && !containsAll(r.Tags, f.Tags) {
r.addRejectionF("tags not matching. got: %v want(all): %v", r.Tags, f.Tags) f.addRejectionF("tags not matching. got: %v want(all): %v", r.Tags, f.Tags)
} else if !containsAny(r.Tags, f.Tags) { // TagsMatchLogic is set to "" by default, this makes sure that "" and "ANY" are treated the same way. } else if !containsAny(r.Tags, f.Tags) { // TagsMatchLogic is set to "" by default, this makes sure that "" and "ANY" are treated the same way.
r.addRejectionF("tags not matching. got: %v want: %v", r.Tags, f.Tags) f.addRejectionF("tags not matching. got: %v want: %v", r.Tags, f.Tags)
} }
} }
if f.ExceptTags != "" { if f.ExceptTags != "" {
if f.ExceptTagsMatchLogic == "ALL" && containsAll(r.Tags, f.ExceptTags) { if f.ExceptTagsMatchLogic == "ALL" && containsAll(r.Tags, f.ExceptTags) {
r.addRejectionF("tags unwanted. got: %v don't want: %v", r.Tags, f.ExceptTags) f.addRejectionF("tags unwanted. got: %v don't want: %v", r.Tags, f.ExceptTags)
} else if containsAny(r.Tags, f.ExceptTags) { // ExceptTagsMatchLogic is set to "" by default, this makes sure that "" and "ANY" are treated the same way. } else if containsAny(r.Tags, f.ExceptTags) { // ExceptTagsMatchLogic is set to "" by default, this makes sure that "" and "ANY" are treated the same way.
r.addRejectionF("tags unwanted. got: %v don't want: %v", r.Tags, f.ExceptTags) f.addRejectionF("tags unwanted. got: %v don't want: %v", r.Tags, f.ExceptTags)
} }
} }
if len(f.Artists) > 0 && !contains(r.Artists, f.Artists) { if len(f.Artists) > 0 && !contains(r.Artists, f.Artists) {
r.addRejectionF("artists not matching. got: %v want: %v", r.Artists, f.Artists) f.addRejectionF("artists not matching. got: %v want: %v", r.Artists, f.Artists)
} }
if len(f.Albums) > 0 && !contains(r.Title, f.Albums) { if len(f.Albums) > 0 && !contains(r.Title, f.Albums) {
r.addRejectionF("albums not matching. got: %v want: %v", r.Title, f.Albums) f.addRejectionF("albums not matching. got: %v want: %v", r.Title, f.Albums)
} }
// Perfect flac requires Cue, Log, Log Score 100, FLAC and 24bit Lossless // Perfect flac requires Cue, Log, Log Score 100, FLAC and 24bit Lossless
if f.PerfectFlac && !f.isPerfectFLAC(r) { if f.PerfectFlac && !f.isPerfectFLAC(r) {
r.addRejectionF("wanted: perfect flac. got: %v", r.Audio) f.addRejectionF("wanted: perfect flac. got: %v", r.Audio)
} }
if len(f.Formats) > 0 && !sliceContainsSlice(r.Audio, f.Formats) { if len(f.Formats) > 0 && !sliceContainsSlice(r.Audio, f.Formats) {
r.addRejectionF("formats not matching. got: %v want: %v", r.Audio, f.Formats) f.addRejectionF("formats not matching. got: %v want: %v", r.Audio, f.Formats)
} }
if len(f.Quality) > 0 && !sliceContainsSlice(r.Audio, f.Quality) { if len(f.Quality) > 0 && !sliceContainsSlice(r.Audio, f.Quality) {
r.addRejectionF("quality not matching. got: %v want: %v", r.Audio, f.Quality) f.addRejectionF("quality not matching. got: %v want: %v", r.Audio, f.Quality)
} }
if len(f.Media) > 0 && !containsSlice(r.Source, f.Media) { if len(f.Media) > 0 && !containsSlice(r.Source, f.Media) {
r.addRejectionF("media not matching. got: %v want: %v", r.Source, f.Media) f.addRejectionF("media not matching. got: %v want: %v", r.Source, f.Media)
} }
if f.Cue && !containsAny(r.Audio, "Cue") { if f.Cue && !containsAny(r.Audio, "Cue") {
r.addRejection("wanted: cue") f.addRejection("wanted: cue")
} }
if f.Log && !containsAny(r.Audio, "Log") { if f.Log && !containsAny(r.Audio, "Log") {
r.addRejection("wanted: log") f.addRejection("wanted: log")
} }
if f.Log && f.LogScore != 0 && r.LogScore != f.LogScore { if f.Log && f.LogScore != 0 && r.LogScore != f.LogScore {
r.addRejectionF("log score. got: %v want: %v", r.LogScore, f.LogScore) f.addRejectionF("log score. got: %v want: %v", r.LogScore, f.LogScore)
} }
// check description string // check description string
if f.UseRegexDescription { if f.UseRegexDescription {
if f.MatchDescription != "" && !matchRegex(r.Description, f.MatchDescription) { if f.MatchDescription != "" && !matchRegex(r.Description, f.MatchDescription) {
r.addRejectionF("match description regex not matching. got: %v want: %v", r.Description, f.MatchDescription) f.addRejectionF("match description regex not matching. got: %v want: %v", r.Description, f.MatchDescription)
} }
if f.ExceptDescription != "" && matchRegex(r.Description, f.ExceptDescription) { if f.ExceptDescription != "" && matchRegex(r.Description, f.ExceptDescription) {
r.addRejectionF("except description regex: unwanted release. got: %v want: %v", r.Description, f.ExceptDescription) f.addRejectionF("except description regex: unwanted release. got: %v want: %v", r.Description, f.ExceptDescription)
} }
} else { } else {
if f.MatchDescription != "" && !containsFuzzy(r.Description, f.MatchDescription) { if f.MatchDescription != "" && !containsFuzzy(r.Description, f.MatchDescription) {
r.addRejectionF("match description not matching. got: %v want: %v", r.Description, f.MatchDescription) f.addRejectionF("match description not matching. got: %v want: %v", r.Description, f.MatchDescription)
} }
if f.ExceptDescription != "" && containsFuzzy(r.Description, f.ExceptDescription) { if f.ExceptDescription != "" && containsFuzzy(r.Description, f.ExceptDescription) {
r.addRejectionF("except description: unwanted release. got: %v want: %v", r.Description, f.ExceptDescription) f.addRejectionF("except description: unwanted release. got: %v want: %v", r.Description, f.ExceptDescription)
} }
} }
if len(r.Rejections) > 0 { if len(f.Rejections) > 0 {
return r.Rejections, false return f.Rejections, false
} }
return nil, true return nil, true
} }
func (f Filter) checkMaxDownloads(max int, perTimeUnit FilterMaxDownloadsUnit) bool { func (f *Filter) checkMaxDownloads(max int, perTimeUnit FilterMaxDownloadsUnit) bool {
if f.Downloads == nil { if f.Downloads == nil {
return false return false
} }
@ -548,7 +546,7 @@ func (f Filter) checkMaxDownloads(max int, perTimeUnit FilterMaxDownloadsUnit) b
} }
// isPerfectFLAC Perfect is "CD FLAC Cue Log 100% Lossless or 24bit Lossless" // isPerfectFLAC Perfect is "CD FLAC Cue Log 100% Lossless or 24bit Lossless"
func (f Filter) isPerfectFLAC(r *Release) bool { func (f *Filter) isPerfectFLAC(r *Release) bool {
if !contains(r.Source, "CD") { if !contains(r.Source, "CD") {
return false return false
} }
@ -573,7 +571,7 @@ func (f Filter) isPerfectFLAC(r *Release) bool {
// checkSizeFilter compares the filter size limits to a release's size if it is // checkSizeFilter compares the filter size limits to a release's size if it is
// known from the announce line. // known from the announce line.
func (f Filter) checkSizeFilter(r *Release, minSize string, maxSize string) bool { func (f *Filter) checkSizeFilter(r *Release) bool {
if r.Size == 0 { if r.Size == 0 {
r.AdditionalSizeCheckRequired = true r.AdditionalSizeCheckRequired = true
return true return true
@ -581,19 +579,48 @@ func (f Filter) checkSizeFilter(r *Release, minSize string, maxSize string) bool
r.AdditionalSizeCheckRequired = false r.AdditionalSizeCheckRequired = false
} }
sizeErr, err := f.CheckReleaseSize(r.Size) sizeOK, err := f.CheckReleaseSize(r.Size)
if err != nil { if err != nil {
r.addRejectionF("size: error checking release size against filter: %+v", err) f.addRejectionF("size: error checking release size against filter: %+v", err)
return false return false
} }
if sizeErr != nil {
r.addRejectionF("%+v", sizeErr) if !sizeOK {
return false return false
} }
return true return true
} }
func (f *Filter) addRejection(reason string) {
f.Rejections = append(f.Rejections, reason)
}
func (f *Filter) AddRejectionF(format string, v ...interface{}) {
f.addRejectionF(format, v...)
}
func (f *Filter) addRejectionF(format string, v ...interface{}) {
f.Rejections = append(f.Rejections, fmt.Sprintf(format, v...))
}
// ResetRejections reset rejections
func (f *Filter) resetRejections() {
f.Rejections = []string{}
}
func (f *Filter) RejectionsString(trim bool) string {
if len(f.Rejections) > 0 {
out := strings.Join(f.Rejections, ", ")
if trim && len(out) > 1024 {
out = out[:1024]
}
return out
}
return ""
}
func matchRegex(tag string, filterList string) bool { func matchRegex(tag string, filterList string) bool {
if tag == "" { if tag == "" {
return false return false
@ -923,21 +950,23 @@ func matchHDR(releaseValues []string, filterValues []string) bool {
return false return false
} }
func (f Filter) CheckReleaseSize(releaseSize uint64) (sizeErr, err error) { func (f *Filter) CheckReleaseSize(releaseSize uint64) (bool, error) {
min, max, err := f.parsedSizeLimits() minBytes, maxBytes, err := f.parsedSizeLimits()
if err != nil { if err != nil {
return err, err return false, err
} }
if min != nil && releaseSize <= *min { if minBytes != nil && releaseSize <= *minBytes {
return fmt.Errorf("release size %d bytes <= min size %d bytes", releaseSize, *min), nil f.addRejectionF("release size %d bytes smaller than filter min size %d bytes", releaseSize, *minBytes)
return false, nil
} }
if max != nil && releaseSize >= *max { if maxBytes != nil && releaseSize >= *maxBytes {
return fmt.Errorf("release size %d bytes <= max size %d bytes", releaseSize, *max), nil f.addRejectionF("release size %d bytes is larger than filter max size %d bytes", releaseSize, *maxBytes)
return false, nil
} }
return nil, nil return true, nil
} }
// parsedSizeLimits parses filter bytes limits (expressed as a string) into a // parsedSizeLimits parses filter bytes limits (expressed as a string) into a
@ -945,18 +974,18 @@ func (f Filter) CheckReleaseSize(releaseSize uint64) (sizeErr, err error) {
// with "nil" representing "no limit". We break out filter size limit parsing // with "nil" representing "no limit". We break out filter size limit parsing
// into a discrete step so that we can more easily check parsability at filter // into a discrete step so that we can more easily check parsability at filter
// creation time. // creation time.
func (f Filter) parsedSizeLimits() (*uint64, *uint64, error) { func (f *Filter) parsedSizeLimits() (*uint64, *uint64, error) {
min, err := parseBytes(f.MinSize) minBytes, err := parseBytes(f.MinSize)
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("trouble parsing min size: %w", err) return nil, nil, errors.Wrap(err, "could not parse filter min size")
} }
max, err := parseBytes(f.MaxSize) maxBytes, err := parseBytes(f.MaxSize)
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("trouble parsing max size: %w", err) return nil, nil, errors.Wrap(err, "could not parse filter max size")
} }
return min, max, nil return minBytes, maxBytes, nil
} }
// parseBytes parses a string representation of a file size into a number of // parseBytes parses a string representation of a file size into a number of

View file

@ -2135,39 +2135,31 @@ func Test_validation(t *testing.T) {
} }
func Test_checkSizeFilter(t *testing.T) { func Test_checkSizeFilter(t *testing.T) {
type args struct {
minSize string
maxSize string
releaseSize uint64
}
tests := []struct { tests := []struct {
name string name string
filter Filter filter Filter
releaseSize uint64 releaseSize uint64
want bool want bool
wantErr bool wantErr string
}{ }{
{name: "test_1", filter: Filter{MinSize: "1GB", MaxSize: ""}, releaseSize: 100, want: false, wantErr: false}, {name: "test_1", filter: Filter{MinSize: "1GB", MaxSize: ""}, releaseSize: 100, want: false},
{name: "test_2", filter: Filter{MinSize: "1GB", MaxSize: ""}, releaseSize: 2000000000, want: true, wantErr: false}, {name: "test_2", filter: Filter{MinSize: "1GB", MaxSize: ""}, releaseSize: 2000000000, want: true},
{name: "test_3", filter: Filter{MinSize: "1GB", MaxSize: "2.2GB"}, releaseSize: 2000000000, want: true, wantErr: false}, {name: "test_3", filter: Filter{MinSize: "1GB", MaxSize: "2.2GB"}, releaseSize: 2000000000, want: true},
{name: "test_4", filter: Filter{MinSize: "1GB", MaxSize: "2GIB"}, releaseSize: 2000000000, want: true, wantErr: false}, {name: "test_4", filter: Filter{MinSize: "1GB", MaxSize: "2GIB"}, releaseSize: 2000000000, want: true},
{name: "test_5", filter: Filter{MinSize: "1GB", MaxSize: "2GB"}, releaseSize: 2000000010, want: false, wantErr: false}, {name: "test_5", filter: Filter{MinSize: "1GB", MaxSize: "2GB"}, releaseSize: 2000000010, want: false},
{name: "test_6", filter: Filter{MinSize: "1GB", MaxSize: "2GB"}, releaseSize: 2000000000, want: false, wantErr: false}, {name: "test_6", filter: Filter{MinSize: "1GB", MaxSize: "2GB"}, releaseSize: 2000000000, want: false},
{name: "test_7", filter: Filter{MaxSize: "2GB"}, releaseSize: 2500000000, want: false, wantErr: false}, {name: "test_7", filter: Filter{MaxSize: "2GB"}, releaseSize: 2500000000, want: false},
{name: "test_8", filter: Filter{MaxSize: "20GB"}, releaseSize: 2500000000, want: true, wantErr: false}, {name: "test_8", filter: Filter{MaxSize: "20GB"}, releaseSize: 2500000000, want: true},
{name: "test_9", filter: Filter{MinSize: "unparseable", MaxSize: "20GB"}, releaseSize: 2500000000, want: false, wantErr: true}, {name: "test_9", filter: Filter{MinSize: "unparseable", MaxSize: "20GB"}, releaseSize: 2500000000, want: false, wantErr: "could not parse filter min size: strconv.ParseFloat: parsing \"\": invalid syntax"},
} }
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
checkErr, err := tt.filter.CheckReleaseSize(tt.releaseSize) got, err := tt.filter.CheckReleaseSize(tt.releaseSize)
if err != nil != tt.wantErr { if tt.wantErr != "" && assert.Error(t, err) {
t.Errorf("checkSizeFilter() error = %v, wantErr %v", err, tt.wantErr) assert.EqualErrorf(t, err, tt.wantErr, "Error should be: %v, got: %v", tt.wantErr, err)
return
}
if checkErr == nil != tt.want {
t.Errorf("checkSizeFilter() got = %v, want %v", checkErr, tt.want)
} }
assert.Equal(t, tt.want, got)
}) })
} }
} }

View file

@ -30,9 +30,9 @@ import (
type Service interface { type Service interface {
FindByID(ctx context.Context, filterID int) (*domain.Filter, error) FindByID(ctx context.Context, filterID int) (*domain.Filter, error)
FindByIndexerIdentifier(ctx context.Context, indexer string) ([]domain.Filter, error) FindByIndexerIdentifier(ctx context.Context, indexer string) ([]*domain.Filter, error)
Find(ctx context.Context, params domain.FilterQueryParams) ([]domain.Filter, error) Find(ctx context.Context, params domain.FilterQueryParams) ([]domain.Filter, error)
CheckFilter(ctx context.Context, f domain.Filter, release *domain.Release) (bool, error) CheckFilter(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error)
ListFilters(ctx context.Context) ([]domain.Filter, error) ListFilters(ctx context.Context) ([]domain.Filter, error)
Store(ctx context.Context, filter *domain.Filter) error Store(ctx context.Context, filter *domain.Filter) error
Update(ctx context.Context, filter *domain.Filter) error Update(ctx context.Context, filter *domain.Filter) error
@ -40,7 +40,7 @@ type Service interface {
Duplicate(ctx context.Context, filterID int) (*domain.Filter, error) Duplicate(ctx context.Context, filterID int) (*domain.Filter, error)
ToggleEnabled(ctx context.Context, filterID int, enabled bool) error ToggleEnabled(ctx context.Context, filterID int, enabled bool) error
Delete(ctx context.Context, filterID int) error Delete(ctx context.Context, filterID int) error
AdditionalSizeCheck(ctx context.Context, f domain.Filter, release *domain.Release) (bool, error) AdditionalSizeCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error)
CanDownloadShow(ctx context.Context, release *domain.Release) (bool, error) CanDownloadShow(ctx context.Context, release *domain.Release) (bool, error)
GetDownloadsByFilterId(ctx context.Context, filterID int) (*domain.FilterDownloads, error) GetDownloadsByFilterId(ctx context.Context, filterID int) (*domain.FilterDownloads, error)
} }
@ -134,7 +134,7 @@ func (s *service) FindByID(ctx context.Context, filterID int) (*domain.Filter, e
return filter, nil return filter, nil
} }
func (s *service) FindByIndexerIdentifier(ctx context.Context, indexer string) ([]domain.Filter, error) { func (s *service) FindByIndexerIdentifier(ctx context.Context, indexer string) ([]*domain.Filter, error) {
// get filters for indexer // get filters for indexer
// we do not load actions here since we do not need it at this stage // we do not load actions here since we do not need it at this stage
// only load those after filter has matched // only load those after filter has matched
@ -332,16 +332,17 @@ func (s *service) Delete(ctx context.Context, filterID int) error {
return nil return nil
} }
func (s *service) CheckFilter(ctx context.Context, f domain.Filter, release *domain.Release) (bool, error) { func (s *service) CheckFilter(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error) {
l := s.log.With().Str("method", "CheckFilter").Logger()
s.log.Trace().Msgf("filter.Service.CheckFilter: checking filter: %s %+v", f.Name, f) l.Trace().Msgf("checking filter: %s %+v", f.Name, f)
s.log.Trace().Msgf("filter.Service.CheckFilter: checking filter: %s for release: %+v", f.Name, release) l.Trace().Msgf("checking filter: %s for release: %+v", f.Name, release)
// do additional fetch to get download counts for filter // do additional fetch to get download counts for filter
if f.MaxDownloads > 0 { if f.MaxDownloads > 0 {
downloadCounts, err := s.repo.GetDownloadsByFilterId(ctx, f.ID) downloadCounts, err := s.repo.GetDownloadsByFilterId(ctx, f.ID)
if err != nil { if err != nil {
s.log.Error().Err(err).Msg("filter.Service.CheckFilter: error getting download counters for filter") l.Error().Err(err).Msg("error getting download counters for filter")
return false, nil return false, nil
} }
f.Downloads = downloadCounts f.Downloads = downloadCounts
@ -349,7 +350,7 @@ func (s *service) CheckFilter(ctx context.Context, f domain.Filter, release *dom
rejections, matchedFilter := f.CheckFilter(release) rejections, matchedFilter := f.CheckFilter(release)
if len(rejections) > 0 { if len(rejections) > 0 {
s.log.Debug().Msgf("filter.Service.CheckFilter: (%s) for release: %v rejections: (%s)", f.Name, release.TorrentName, release.RejectionsString(true)) l.Debug().Msgf("(%s) for release: %v rejections: (%s)", f.Name, release.TorrentName, f.RejectionsString(true))
return false, nil return false, nil
} }
@ -358,12 +359,12 @@ func (s *service) CheckFilter(ctx context.Context, f domain.Filter, release *dom
if f.SmartEpisode { if f.SmartEpisode {
canDownloadShow, err := s.CanDownloadShow(ctx, release) canDownloadShow, err := s.CanDownloadShow(ctx, release)
if err != nil { if err != nil {
s.log.Trace().Msgf("filter.Service.CheckFilter: failed smart episode check: %s", f.Name) l.Trace().Msgf("failed smart episode check: %s", f.Name)
return false, nil return false, nil
} }
if !canDownloadShow { if !canDownloadShow {
s.log.Trace().Msgf("filter.Service.CheckFilter: failed smart episode check: %s", f.Name) l.Trace().Msgf("failed smart episode check: %s", f.Name)
release.AddRejectionF("smart episode check: not new: (%s) season: %d ep: %d", release.Title, release.Season, release.Episode) release.AddRejectionF("smart episode check: not new: (%s) season: %d ep: %d", release.Title, release.Season, release.Episode)
return false, nil return false, nil
} }
@ -371,36 +372,36 @@ func (s *service) CheckFilter(ctx context.Context, f domain.Filter, release *dom
// if matched, do additional size check if needed, attach actions and return the filter // if matched, do additional size check if needed, attach actions and return the filter
s.log.Debug().Msgf("filter.Service.CheckFilter: found and matched filter: %s", f.Name) l.Debug().Msgf("found and matched filter: %s", f.Name)
// If size constraints are set in a filter and the indexer did not // If size constraints are set in a filter and the indexer did not
// announce the size, we need to do an additional out of band size // announce the size, we need to do an additional out of band size
// check. // check.
if release.AdditionalSizeCheckRequired { if release.AdditionalSizeCheckRequired {
s.log.Debug().Msgf("filter.Service.CheckFilter: (%s) additional size check required", f.Name) l.Debug().Msgf("(%s) additional size check required", f.Name)
ok, err := s.AdditionalSizeCheck(ctx, f, release) ok, err := s.AdditionalSizeCheck(ctx, f, release)
if err != nil { if err != nil {
s.log.Error().Err(err).Msgf("filter.Service.CheckFilter: (%s) additional size check error", f.Name) l.Error().Err(err).Msgf("(%s) additional size check error", f.Name)
return false, err return false, err
} }
if !ok { if !ok {
s.log.Trace().Msgf("filter.Service.CheckFilter: (%s) additional size check not matching what filter wanted", f.Name) l.Trace().Msgf("(%s) additional size check not matching what filter wanted", f.Name)
return false, nil return false, nil
} }
} }
// run external filters // run external filters
if f.External != nil { if f.External != nil {
externalOk, err := s.RunExternalFilters(ctx, f.External, release) externalOk, err := s.RunExternalFilters(ctx, f, f.External, release)
if err != nil { if err != nil {
s.log.Error().Err(err).Msgf("filter.Service.CheckFilter: (%s) external filter check error", f.Name) l.Error().Err(err).Msgf("(%s) external filter check error", f.Name)
return false, err return false, err
} }
if !externalOk { if !externalOk {
s.log.Trace().Msgf("filter.Service.CheckFilter: (%s) additional size check not matching what filter wanted", f.Name) l.Debug().Msgf("(%s) external filter check not matching what filter wanted", f.Name)
return false, nil return false, nil
} }
} }
@ -419,7 +420,7 @@ func (s *service) CheckFilter(ctx context.Context, f domain.Filter, release *dom
// it is necessary to download the torrent file and parse it to make the size // it is necessary to download the torrent file and parse it to make the size
// check. We use the API where available to minimize the number of torrents we // check. We use the API where available to minimize the number of torrents we
// need to download. // need to download.
func (s *service) AdditionalSizeCheck(ctx context.Context, f domain.Filter, release *domain.Release) (bool, error) { func (s *service) AdditionalSizeCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error) {
var err error var err error
defer func() { defer func() {
// try recover panic if anything went wrong with API or size checks // try recover panic if anything went wrong with API or size checks
@ -427,44 +428,45 @@ func (s *service) AdditionalSizeCheck(ctx context.Context, f domain.Filter, rele
}() }()
// do additional size check against indexer api or torrent for size // do additional size check against indexer api or torrent for size
s.log.Debug().Msgf("filter.Service.AdditionalSizeCheck: (%s) additional size check required", f.Name) l := s.log.With().Str("method", "AdditionalSizeCheck").Logger()
l.Debug().Msgf("(%s) additional size check required", f.Name)
switch release.Indexer { switch release.Indexer {
case "ptp", "btn", "ggn", "redacted", "ops", "mock": case "ptp", "btn", "ggn", "redacted", "ops", "mock":
if release.Size == 0 { if release.Size == 0 {
s.log.Trace().Msgf("filter.Service.AdditionalSizeCheck: (%s) preparing to check via api", f.Name) l.Trace().Msgf("(%s) preparing to check via api", f.Name)
torrentInfo, err := s.apiService.GetTorrentByID(ctx, release.Indexer, release.TorrentID) torrentInfo, err := s.apiService.GetTorrentByID(ctx, release.Indexer, release.TorrentID)
if err != nil || torrentInfo == nil { if err != nil || torrentInfo == nil {
s.log.Error().Stack().Err(err).Msgf("filter.Service.AdditionalSizeCheck: (%s) could not get torrent info from api: '%s' from: %s", f.Name, release.TorrentID, release.Indexer) l.Error().Err(err).Msgf("(%s) could not get torrent info from api: '%s' from: %s", f.Name, release.TorrentID, release.Indexer)
return false, err return false, err
} }
s.log.Debug().Msgf("filter.Service.AdditionalSizeCheck: (%s) got torrent info from api: %+v", f.Name, torrentInfo) l.Debug().Msgf("(%s) got torrent info from api: %+v", f.Name, torrentInfo)
release.Size = torrentInfo.ReleaseSizeBytes() release.Size = torrentInfo.ReleaseSizeBytes()
} }
default: default:
s.log.Trace().Msgf("filter.Service.AdditionalSizeCheck: (%s) preparing to download torrent metafile", f.Name) l.Trace().Msgf("(%s) preparing to download torrent metafile", f.Name)
// if indexer doesn't have api, download torrent and add to tmpPath // if indexer doesn't have api, download torrent and add to tmpPath
if err := release.DownloadTorrentFileCtx(ctx); err != nil { if err := release.DownloadTorrentFileCtx(ctx); err != nil {
s.log.Error().Stack().Err(err).Msgf("filter.Service.AdditionalSizeCheck: (%s) could not download torrent file with id: '%s' from: %s", f.Name, release.TorrentID, release.Indexer) l.Error().Err(err).Msgf("(%s) could not download torrent file with id: '%s' from: %s", f.Name, release.TorrentID, release.Indexer)
return false, err return false, err
} }
} }
// compare size against filter sizeOk, err := f.CheckReleaseSize(release.Size)
sizeErr, err := f.CheckReleaseSize(release.Size)
if err != nil { if err != nil {
s.log.Error().Stack().Err(err).Msgf("filter.Service.AdditionalSizeCheck: (%s) error checking extra size filter", f.Name) l.Error().Err(err).Msgf("(%s) error comparing release and filter size", f.Name)
return false, err return false, err
} }
//no match, lets continue to next filter
if sizeErr != nil { if !sizeOk {
s.log.Debug().Msgf("filter.Service.AdditionalSizeCheck: (%s) filter did not match after additional size check, trying next", f.Name) l.Debug().Msgf("(%s) filter did not match after additional size check, trying next", f.Name)
return false, nil return false, err
} }
return true, nil return true, nil
@ -474,7 +476,7 @@ func (s *service) CanDownloadShow(ctx context.Context, release *domain.Release)
return s.releaseRepo.CanDownloadShow(ctx, release.Title, release.Season, release.Episode) return s.releaseRepo.CanDownloadShow(ctx, release.Title, release.Season, release.Episode)
} }
func (s *service) RunExternalFilters(ctx context.Context, externalFilters []domain.FilterExternal, release *domain.Release) (bool, error) { func (s *service) RunExternalFilters(ctx context.Context, f *domain.Filter, externalFilters []domain.FilterExternal, release *domain.Release) (bool, error) {
var err error var err error
defer func() { defer func() {
@ -504,7 +506,7 @@ func (s *service) RunExternalFilters(ctx context.Context, externalFilters []doma
if exitCode != external.ExecExpectStatus { if exitCode != external.ExecExpectStatus {
s.log.Trace().Msgf("filter.Service.CheckFilter: external script unexpected exit code. got: %d want: %d", exitCode, external.ExecExpectStatus) s.log.Trace().Msgf("filter.Service.CheckFilter: external script unexpected exit code. got: %d want: %d", exitCode, external.ExecExpectStatus)
release.AddRejectionF("external script unexpected exit code. got: %d want: %d", exitCode, external.ExecExpectStatus) f.AddRejectionF("external script unexpected exit code. got: %d want: %d", exitCode, external.ExecExpectStatus)
return false, nil return false, nil
} }
@ -517,7 +519,7 @@ func (s *service) RunExternalFilters(ctx context.Context, externalFilters []doma
if statusCode != external.WebhookExpectStatus { if statusCode != external.WebhookExpectStatus {
s.log.Trace().Msgf("filter.Service.CheckFilter: external webhook unexpected status code. got: %d want: %d", statusCode, external.WebhookExpectStatus) s.log.Trace().Msgf("filter.Service.CheckFilter: external webhook unexpected status code. got: %d want: %d", statusCode, external.WebhookExpectStatus)
release.AddRejectionF("external webhook unexpected status code. got: %d want: %d", statusCode, external.WebhookExpectStatus) f.AddRejectionF("external webhook unexpected status code. got: %d want: %d", statusCode, external.WebhookExpectStatus)
return false, nil return false, nil
} }
} }

View file

@ -130,17 +130,19 @@ func (s *service) Process(release *domain.Release) {
return return
} }
func (s *service) processFilters(ctx context.Context, filters []domain.Filter, release *domain.Release) error { func (s *service) processFilters(ctx context.Context, filters []*domain.Filter, release *domain.Release) error {
// keep track of action clients to avoid sending the same thing all over again // keep track of action clients to avoid sending the same thing all over again
// save both client type and client id to potentially try another client of same type // save both client type and client id to potentially try another client of same type
triedActionClients := map[actionClientTypeKey]struct{}{} triedActionClients := map[actionClientTypeKey]struct{}{}
// loop over and check filters // loop over and check filters
for _, f := range filters { for _, f := range filters {
f := f
l := s.log.With().Str("indexer", release.Indexer).Str("filter", f.Name).Str("release", release.TorrentName).Logger() l := s.log.With().Str("indexer", release.Indexer).Str("filter", f.Name).Str("release", release.TorrentName).Logger()
// save filter on release // save filter on release
release.Filter = &f release.Filter = f
release.FilterName = f.Name release.FilterName = f.Name
release.FilterID = f.ID release.FilterID = f.ID
@ -152,9 +154,9 @@ func (s *service) processFilters(ctx context.Context, filters []domain.Filter, r
} }
if !match { if !match {
l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s, no match. rejections: %s", release.Indexer, release.FilterName, release.TorrentName, release.RejectionsString(false)) l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s, no match. rejections: %s", release.Indexer, release.FilterName, release.TorrentName, f.RejectionsString(false))
l.Debug().Msgf("release rejected: %s", release.RejectionsString(true)) l.Debug().Msgf("filter %s rejected release: %s", f.Name, f.RejectionsString(true))
continue continue
} }