feat(filters): RED and OPS fetch uploader from API (#1348)

* feat: added uploader when fetching torrent details on RED indexer

* revert

* tests

* refactor(filters): size and uploader api checks

* refactor(filters): fix test

* refactor(filters): add mutex to rejections

---------

Co-authored-by: Kyle Sanderson <kyle.leet@gmail.com>
Co-authored-by: ze0s <ze0s@riseup.net>
This commit is contained in:
Alexander Chernov 2024-12-17 19:39:09 +00:00 committed by GitHub
parent 04c4bd482f
commit acef4ac624
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 251 additions and 88 deletions

View file

@ -460,12 +460,8 @@ func (f *Filter) CheckFilter(r *Release) (*RejectionReasons, bool) {
} }
} }
if f.MatchUploaders != "" && !contains(r.Uploader, f.MatchUploaders) { if (f.MatchUploaders != "" || f.ExceptUploaders != "") && !f.checkUploader(r) {
f.RejectReasons.Add("match uploaders", r.Uploader, f.MatchUploaders) // f.checkUploader sets the rejections
}
if f.ExceptUploaders != "" && contains(r.Uploader, f.ExceptUploaders) {
f.RejectReasons.Add("except uploaders", r.Uploader, f.ExceptUploaders)
} }
if len(f.MatchLanguage) > 0 && !sliceContainsSlice(r.Language, f.MatchLanguage) { if len(f.MatchLanguage) > 0 && !sliceContainsSlice(r.Language, f.MatchLanguage) {
@ -731,6 +727,27 @@ func (f *Filter) checkSizeFilter(r *Release) bool {
return true return true
} }
// checkUploader checks if the uploader is within the given list.
// if the haystack is not empty but the uploader is, then a further
// investigation is needed
func (f *Filter) checkUploader(r *Release) bool {
// only support additional uploader check for RED and OPS
if r.Uploader == "" && (r.Indexer.Identifier == "redacted" || r.Indexer.Identifier == "ops") {
r.AdditionalUploaderCheckRequired = true
return true
}
if f.MatchUploaders != "" && !contains(r.Uploader, f.MatchUploaders) {
f.RejectReasons.Add("match uploaders", r.Uploader, f.MatchUploaders)
}
if f.ExceptUploaders != "" && contains(r.Uploader, f.ExceptUploaders) {
f.RejectReasons.Add("except uploaders", r.Uploader, f.ExceptUploaders)
}
return true
}
// IsPerfectFLAC Perfect is "CD FLAC Cue Log 100% Lossless or 24bit Lossless" // IsPerfectFLAC Perfect is "CD FLAC Cue Log 100% Lossless or 24bit Lossless"
func (f *Filter) IsPerfectFLAC(r *Release) ([]string, bool) { func (f *Filter) IsPerfectFLAC(r *Release) ([]string, bool) {
rejections := []string{} rejections := []string{}
@ -1168,6 +1185,20 @@ func (f *Filter) CheckReleaseSize(releaseSize uint64) (bool, error) {
return true, nil return true, nil
} }
func (f *Filter) CheckUploader(uploader string) (bool, error) {
if f.MatchUploaders != "" && !contains(uploader, f.MatchUploaders) {
f.RejectReasons.Add("match uploader", uploader, f.MatchUploaders)
return false, nil
}
if f.ExceptUploaders != "" && contains(uploader, f.ExceptUploaders) {
f.RejectReasons.Add("except uploader", uploader, f.ExceptUploaders)
return false, nil
}
return true, nil
}
// parsedSizeLimits parses filter bytes limits (expressed as a string) into a // parsedSizeLimits parses filter bytes limits (expressed as a string) into a
// uint64 number of bytes. The bounds are returned as *uint64 number of bytes, // uint64 number of bytes. The bounds are returned as *uint64 number of bytes,
// with "nil" representing "no limit". We break out filter size limit parsing // with "nil" representing "no limit". We break out filter size limit parsing

View file

@ -7,6 +7,7 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"strings" "strings"
"sync"
) )
type Rejection struct { type Rejection struct {
@ -17,6 +18,7 @@ type Rejection struct {
} }
type RejectionReasons struct { type RejectionReasons struct {
m sync.RWMutex
data []Rejection data []Rejection
} }
@ -31,6 +33,9 @@ func NewRejectionReasons() *RejectionReasons {
} }
func (r *RejectionReasons) String() string { func (r *RejectionReasons) String() string {
r.m.RLock()
defer r.m.RUnlock()
if len(r.data) == 0 { if len(r.data) == 0 {
return "" return ""
} }
@ -53,6 +58,9 @@ func (r *RejectionReasons) String() string {
} }
func (r *RejectionReasons) StringTruncated() string { func (r *RejectionReasons) StringTruncated() string {
r.m.RLock()
defer r.m.RUnlock()
if len(r.data) == 0 { if len(r.data) == 0 {
return "" return ""
} }
@ -85,6 +93,9 @@ func (r *RejectionReasons) StringTruncated() string {
} }
func (r *RejectionReasons) WriteString() string { func (r *RejectionReasons) WriteString() string {
r.m.RLock()
defer r.m.RUnlock()
var output []string var output []string
for _, rejection := range r.data { for _, rejection := range r.data {
output = append(output, fmt.Sprintf("[%s] not matching: got %v want: %v", rejection.key, rejection.got, rejection.want)) output = append(output, fmt.Sprintf("[%s] not matching: got %v want: %v", rejection.key, rejection.got, rejection.want))
@ -94,7 +105,10 @@ func (r *RejectionReasons) WriteString() string {
} }
func (r *RejectionReasons) WriteJSON() ([]byte, error) { func (r *RejectionReasons) WriteJSON() ([]byte, error) {
r.m.RLock()
defer r.m.RUnlock()
var output map[string]string var output map[string]string
for _, rejection := range r.data { for _, rejection := range r.data {
output[rejection.key] = fmt.Sprintf("[%s] not matching: got %v want: %v", rejection.key, rejection.got, rejection.want) output[rejection.key] = fmt.Sprintf("[%s] not matching: got %v want: %v", rejection.key, rejection.got, rejection.want)
} }
@ -103,6 +117,9 @@ func (r *RejectionReasons) WriteJSON() ([]byte, error) {
} }
func (r *RejectionReasons) Add(key string, got any, want any) { func (r *RejectionReasons) Add(key string, got any, want any) {
r.m.Lock()
defer r.m.Unlock()
r.data = append(r.data, Rejection{ r.data = append(r.data, Rejection{
key: key, key: key,
got: got, got: got,
@ -111,6 +128,9 @@ func (r *RejectionReasons) Add(key string, got any, want any) {
} }
func (r *RejectionReasons) Addf(key string, format string, got any, want any) { func (r *RejectionReasons) Addf(key string, format string, got any, want any) {
r.m.Lock()
defer r.m.Unlock()
r.data = append(r.data, Rejection{ r.data = append(r.data, Rejection{
key: key, key: key,
format: format, format: format,
@ -120,6 +140,9 @@ func (r *RejectionReasons) Addf(key string, format string, got any, want any) {
} }
func (r *RejectionReasons) AddTruncated(key string, got any, want any) { func (r *RejectionReasons) AddTruncated(key string, got any, want any) {
r.m.Lock()
defer r.m.Unlock()
switch wanted := want.(type) { switch wanted := want.(type) {
case string: case string:
if len(wanted) > 1024 { if len(wanted) > 1024 {
@ -139,5 +162,7 @@ func (r *RejectionReasons) AddTruncated(key string, got any, want any) {
// Clear rejections // Clear rejections
func (r *RejectionReasons) Clear() { func (r *RejectionReasons) Clear() {
r.m.Lock()
defer r.m.Unlock()
r.data = make([]Rejection, 0) r.data = make([]Rejection, 0)
} }

View file

@ -107,6 +107,7 @@ type Release struct {
Seeders int `json:"-"` Seeders int `json:"-"`
Leechers int `json:"-"` Leechers int `json:"-"`
AdditionalSizeCheckRequired bool `json:"-"` AdditionalSizeCheckRequired bool `json:"-"`
AdditionalUploaderCheckRequired bool `json:"-"`
FilterID int `json:"-"` FilterID int `json:"-"`
Filter *Filter `json:"-"` Filter *Filter `json:"-"`
ActionStatus []ReleaseActionStatus `json:"action_status"` ActionStatus []ReleaseActionStatus `json:"action_status"`

View file

@ -73,7 +73,7 @@ func TestRSSJob_processItem(t *testing.T) {
Link: "/details.php?id=00000&hit=1", Link: "/details.php?id=00000&hit=1",
GUID: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", GUID: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
}}, }},
want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: domain.IndexerMinimal{0, "Mock Feed", "mock-feed", "Mock Indexer"}, FilterName: "", Protocol: "torrent", Implementation: "RSS", AnnounceType: domain.AnnounceTypeNew, Timestamp: now, GroupID: "", TorrentID: "", DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 1490000000, Title: "Some Release Title", Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n", Category: "", Season: 0, Episode: 0, Year: 2022, Month: 9, Day: 22, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: nil, Proper: false, Repack: false, Website: "", Artists: "", Type: "episode", LogScore: 0, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)}, want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: domain.IndexerMinimal{0, "Mock Feed", "mock-feed", "Mock Indexer"}, FilterName: "", Protocol: "torrent", Implementation: "RSS", AnnounceType: domain.AnnounceTypeNew, Timestamp: now, GroupID: "", TorrentID: "", DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 1490000000, Title: "Some Release Title", Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n", Category: "", Season: 0, Episode: 0, Year: 2022, Month: 9, Day: 22, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: nil, Proper: false, Repack: false, Website: "", Artists: "", Type: "episode", LogScore: 0, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, AdditionalUploaderCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)},
}, },
{ {
name: "with_baseurl", name: "with_baseurl",
@ -107,7 +107,7 @@ func TestRSSJob_processItem(t *testing.T) {
Link: "https://fake-feed.com/details.php?id=00000&hit=1", Link: "https://fake-feed.com/details.php?id=00000&hit=1",
GUID: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", GUID: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
}}, }},
want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: domain.IndexerMinimal{0, "Mock Feed", "mock-feed", "Mock Indexer"}, FilterName: "", Protocol: "torrent", Implementation: "RSS", AnnounceType: domain.AnnounceTypeNew, Timestamp: now, GroupID: "", TorrentID: "", DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 1490000000, Title: "Some Release Title", Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n", Category: "", Season: 0, Episode: 0, Year: 2022, Month: 9, Day: 22, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: nil, Proper: false, Repack: false, Website: "", Artists: "", Type: "episode", LogScore: 0, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)}, want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: domain.IndexerMinimal{0, "Mock Feed", "mock-feed", "Mock Indexer"}, FilterName: "", Protocol: "torrent", Implementation: "RSS", AnnounceType: domain.AnnounceTypeNew, Timestamp: now, GroupID: "", TorrentID: "", DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 1490000000, Title: "Some Release Title", Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n", Category: "", Season: 0, Episode: 0, Year: 2022, Month: 9, Day: 22, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: nil, Proper: false, Repack: false, Website: "", Artists: "", Type: "episode", LogScore: 0, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, AdditionalUploaderCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)},
}, },
{ {
name: "time_parse", name: "time_parse",
@ -142,7 +142,7 @@ func TestRSSJob_processItem(t *testing.T) {
GUID: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", GUID: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
//PublishedParsed: &nowMinusTime, //PublishedParsed: &nowMinusTime,
}}, }},
want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: domain.IndexerMinimal{0, "Mock Feed", "mock-feed", "Mock Indexer"}, FilterName: "", Protocol: "torrent", Implementation: "RSS", AnnounceType: domain.AnnounceTypeNew, Timestamp: now, GroupID: "", TorrentID: "", DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 1490000000, Title: "Some Release Title", Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n", Category: "", Season: 0, Episode: 0, Year: 2022, Month: 9, Day: 22, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: nil, Proper: false, Repack: false, Website: "", Artists: "", Type: "episode", LogScore: 0, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)}, want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: domain.IndexerMinimal{0, "Mock Feed", "mock-feed", "Mock Indexer"}, FilterName: "", Protocol: "torrent", Implementation: "RSS", AnnounceType: domain.AnnounceTypeNew, Timestamp: now, GroupID: "", TorrentID: "", DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 1490000000, Title: "Some Release Title", Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n", Category: "", Season: 0, Episode: 0, Year: 2022, Month: 9, Day: 22, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: nil, Proper: false, Repack: false, Website: "", Artists: "", Type: "episode", LogScore: 0, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, AdditionalUploaderCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)},
}, },
{ {
name: "time_parse", name: "time_parse",

View file

@ -42,6 +42,7 @@ type Service interface {
ToggleEnabled(ctx context.Context, filterID int, enabled bool) error ToggleEnabled(ctx context.Context, filterID int, enabled bool) error
Delete(ctx context.Context, filterID int) error Delete(ctx context.Context, filterID int) error
AdditionalSizeCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error) AdditionalSizeCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error)
AdditionalUploaderCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error)
CheckSmartEpisodeCanDownload(ctx context.Context, params *domain.SmartEpisodeParams) (bool, error) CheckSmartEpisodeCanDownload(ctx context.Context, params *domain.SmartEpisodeParams) (bool, error)
GetDownloadsByFilterId(ctx context.Context, filterID int) (*domain.FilterDownloads, error) GetDownloadsByFilterId(ctx context.Context, filterID int) (*domain.FilterDownloads, error)
} }
@ -429,8 +430,7 @@ func (s *service) CheckFilter(ctx context.Context, f *domain.Filter, release *do
l.Debug().Msgf("found and matched filter: %s", f.Name) l.Debug().Msgf("found and matched filter: %s", f.Name)
// If size constraints are set in a filter and the indexer did not // If size constraints are set in a filter and the indexer did not
// announce the size, we need to do an additional out of band size // announce the size, we need to do an additional out of band size check.
// check.
if release.AdditionalSizeCheckRequired { if release.AdditionalSizeCheckRequired {
l.Debug().Msgf("(%s) additional size check required", f.Name) l.Debug().Msgf("(%s) additional size check required", f.Name)
@ -446,6 +446,22 @@ func (s *service) CheckFilter(ctx context.Context, f *domain.Filter, release *do
} }
} }
// check uploader if the indexer supports check via api
if release.AdditionalUploaderCheckRequired {
l.Debug().Msgf("(%s) additional uploader check required", f.Name)
ok, err := s.AdditionalUploaderCheck(ctx, f, release)
if err != nil {
l.Error().Err(err).Msgf("(%s) additional uploader check error", f.Name)
return false, err
}
if !ok {
l.Trace().Msgf("(%s) additional uploader check not matching what filter wanted", f.Name)
return false, nil
}
}
// run external filters // run external filters
if f.External != nil { if f.External != nil {
externalOk, err := s.RunExternalFilters(ctx, f, f.External, release) externalOk, err := s.RunExternalFilters(ctx, f, f.External, release)
@ -467,8 +483,8 @@ func (s *service) CheckFilter(ctx context.Context, f *domain.Filter, release *do
return false, nil return false, nil
} }
// AdditionalSizeCheck performs additional out of band checks to determine the // AdditionalSizeCheck performs additional out-of-band checks to determine the
// size of a torrent. Some indexers do not announce torrent size, so it is // values of a torrent. Some indexers do not announce torrent size, so it is
// necessary to determine the size of the torrent in some other way. Some // necessary to determine the size of the torrent in some other way. Some
// indexers have an API implemented to fetch this data. For those which don't, // indexers have an API implemented to fetch this data. For those which don't,
// it is necessary to download the torrent file and parse it to make the size // it is necessary to download the torrent file and parse it to make the size
@ -484,11 +500,93 @@ func (s *service) AdditionalSizeCheck(ctx context.Context, f *domain.Filter, rel
// do additional size check against indexer api or torrent for size // do additional size check against indexer api or torrent for size
l := s.log.With().Str("method", "AdditionalSizeCheck").Logger() l := s.log.With().Str("method", "AdditionalSizeCheck").Logger()
l.Debug().Msgf("(%s) additional size check required", f.Name) l.Debug().Msgf("(%s) additional api size check required", f.Name)
switch release.Indexer.Identifier { switch release.Indexer.Identifier {
case "ptp", "btn", "ggn", "redacted", "ops", "mock": case "btn", "ggn", "redacted", "ops", "mock":
if release.Size == 0 { if (release.Size == 0 && release.AdditionalSizeCheckRequired) || (release.Uploader == "" && release.AdditionalUploaderCheckRequired) {
l.Trace().Msgf("(%s) preparing to check size via api", f.Name)
torrentInfo, err := s.apiService.GetTorrentByID(ctx, release.Indexer.Identifier, release.TorrentID)
if err != nil || torrentInfo == nil {
l.Error().Err(err).Msgf("(%s) could not get torrent info from api: '%s' from: %s", f.Name, release.TorrentID, release.Indexer.Identifier)
return false, err
}
l.Debug().Msgf("(%s) got torrent info from api: %+v", f.Name, torrentInfo)
torrentSize := torrentInfo.ReleaseSizeBytes()
if release.Size == 0 && torrentSize > 0 {
release.Size = torrentSize
}
if release.Uploader == "" {
release.Uploader = torrentInfo.Uploader
}
}
default:
if release.Size == 0 && release.AdditionalSizeCheckRequired {
l.Trace().Msgf("(%s) preparing to download torrent metafile", f.Name)
// if indexer doesn't have api, download torrent and add to tmpPath
if err := s.downloadSvc.DownloadRelease(ctx, release); err != nil {
l.Error().Err(err).Msgf("(%s) could not download torrent file with id: '%s' from: %s", f.Name, release.TorrentID, release.Indexer.Identifier)
return false, errors.Wrap(err, "could not download torrent file for release: %s", release.TorrentName)
}
}
}
sizeOk, err := f.CheckReleaseSize(release.Size)
if err != nil {
l.Error().Err(err).Msgf("(%s) error comparing release and filter size", f.Name)
return false, err
}
// reset AdditionalSizeCheckRequired to not re-trigger check
release.AdditionalSizeCheckRequired = false
if !sizeOk {
l.Debug().Msgf("(%s) filter did not match after additional size check, trying next", f.Name)
return false, err
}
return true, nil
}
func (s *service) AdditionalUploaderCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error) {
var err error
defer func() {
// try recover panic if anything went wrong with API or size checks
errors.RecoverPanic(recover(), &err)
}()
// do additional check against indexer api
l := s.log.With().Str("method", "AdditionalUploaderCheck").Logger()
// if uploader was fetched before during size check we check it and return early
if release.Uploader != "" {
uploaderOk, err := f.CheckUploader(release.Uploader)
if err != nil {
l.Error().Err(err).Msgf("(%s) error comparing release and uploaders", f.Name)
return false, err
}
// reset AdditionalUploaderCheckRequired to not re-trigger check
release.AdditionalUploaderCheckRequired = false
if !uploaderOk {
l.Debug().Msgf("(%s) filter did not match after additional uploaders check, trying next", f.Name)
return false, err
}
return true, nil
}
l.Debug().Msgf("(%s) additional api size check required", f.Name)
switch release.Indexer.Identifier {
case "redacted", "ops", "mock":
l.Trace().Msgf("(%s) preparing to check via api", f.Name) l.Trace().Msgf("(%s) preparing to check via api", f.Name)
torrentInfo, err := s.apiService.GetTorrentByID(ctx, release.Indexer.Identifier, release.TorrentID) torrentInfo, err := s.apiService.GetTorrentByID(ctx, release.Indexer.Identifier, release.TorrentID)
@ -499,27 +597,33 @@ func (s *service) AdditionalSizeCheck(ctx context.Context, f *domain.Filter, rel
l.Debug().Msgf("(%s) got torrent info from api: %+v", f.Name, torrentInfo) l.Debug().Msgf("(%s) got torrent info from api: %+v", f.Name, torrentInfo)
release.Size = torrentInfo.ReleaseSizeBytes() torrentSize := torrentInfo.ReleaseSizeBytes()
if release.Size == 0 && torrentSize > 0 {
release.Size = torrentSize
// reset AdditionalSizeCheckRequired to not re-trigger check
release.AdditionalSizeCheckRequired = false
}
if release.Uploader == "" {
release.Uploader = torrentInfo.Uploader
} }
default: default:
l.Trace().Msgf("(%s) preparing to download torrent metafile", f.Name) return false, errors.New("additional uploader check not supported for this indexer: %s", release.Indexer.Identifier)
// if indexer doesn't have api, download torrent and add to tmpPath
if err := s.downloadSvc.DownloadRelease(ctx, release); err != nil {
l.Error().Err(err).Msgf("(%s) could not download torrent file with id: '%s' from: %s", f.Name, release.TorrentID, release.Indexer.Identifier)
return false, errors.Wrap(err, "could not download torrent file for release: %s", release.TorrentName)
}
} }
sizeOk, err := f.CheckReleaseSize(release.Size) uploaderOk, err := f.CheckUploader(release.Uploader)
if err != nil { if err != nil {
l.Error().Err(err).Msgf("(%s) error comparing release and filter size", f.Name) l.Error().Err(err).Msgf("(%s) error comparing release and uploaders", f.Name)
return false, err return false, err
} }
if !sizeOk { // reset AdditionalUploaderCheckRequired to not re-trigger check
l.Debug().Msgf("(%s) filter did not match after additional size check, trying next", f.Name) release.AdditionalUploaderCheckRequired = false
if !uploaderOk {
l.Debug().Msgf("(%s) filter did not match after additional uploaders check, trying next", f.Name)
return false, err return false, err
} }

View file

@ -234,6 +234,7 @@ func (c *Client) GetTorrentByID(ctx context.Context, torrentID string) (*domain.
Id: strconv.Itoa(response.Response.Torrent.Id), Id: strconv.Itoa(response.Response.Torrent.Id),
InfoHash: response.Response.Torrent.InfoHash, InfoHash: response.Response.Torrent.InfoHash,
Size: strconv.Itoa(response.Response.Torrent.Size), Size: strconv.Itoa(response.Response.Torrent.Size),
Uploader: response.Response.Torrent.Username,
}, nil }, nil
} }

View file

@ -75,6 +75,7 @@ func TestREDClient_GetTorrentByID(t *testing.T) {
Id: "29991962", Id: "29991962",
InfoHash: "B2BABD3A361EAFC6C4E9142C422DF7DDF5D7E163", InfoHash: "B2BABD3A361EAFC6C4E9142C422DF7DDF5D7E163",
Size: "527749302", Size: "527749302",
Uploader: "Uploader",
}, },
wantErr: "", wantErr: "",
}, },

View file

@ -71,7 +71,7 @@
"fileList": "00-logistics-fear_not-cd-flac-2012.jpg{{{1233205}}}|||00-logistics-fear_not-cd-flac-2012.m3u{{{538}}}|||00-logistics-fear_not-cd-flac-2012.nfo{{{1607}}}|||00-logistics-fear_not-cd-flac-2012.sfv{{{688}}}|||01-logistics-fear_not.flac{{{38139451}}}|||02-logistics-timelapse.flac{{{39346037}}}|||03-logistics-2999_(wherever_you_go).flac{{{41491133}}}|||04-logistics-try_again.flac{{{32151567}}}|||05-logistics-we_are_one.flac{{{40778041}}}|||06-logistics-crystal_skies_(feat_nightshade_and_sarah_callander).flac{{{34544405}}}|||07-logistics-feels_so_good.flac{{{41363732}}}|||08-logistics-running_late.flac{{{16679269}}}|||09-logistics-early_again.flac{{{35373278}}}|||10-logistics-believe_in_me.flac{{{39495420}}}|||11-logistics-letting_go.flac{{{30846730}}}|||12-logistics-sendai_song.flac{{{35021141}}}|||13-logistics-over_and_out.flac{{{44621200}}}|||14-logistics-destination_unknown.flac{{{13189493}}}|||15-logistics-watching_the_world_go_by_(feat_alice_smith).flac{{{43472367}}}", "fileList": "00-logistics-fear_not-cd-flac-2012.jpg{{{1233205}}}|||00-logistics-fear_not-cd-flac-2012.m3u{{{538}}}|||00-logistics-fear_not-cd-flac-2012.nfo{{{1607}}}|||00-logistics-fear_not-cd-flac-2012.sfv{{{688}}}|||01-logistics-fear_not.flac{{{38139451}}}|||02-logistics-timelapse.flac{{{39346037}}}|||03-logistics-2999_(wherever_you_go).flac{{{41491133}}}|||04-logistics-try_again.flac{{{32151567}}}|||05-logistics-we_are_one.flac{{{40778041}}}|||06-logistics-crystal_skies_(feat_nightshade_and_sarah_callander).flac{{{34544405}}}|||07-logistics-feels_so_good.flac{{{41363732}}}|||08-logistics-running_late.flac{{{16679269}}}|||09-logistics-early_again.flac{{{35373278}}}|||10-logistics-believe_in_me.flac{{{39495420}}}|||11-logistics-letting_go.flac{{{30846730}}}|||12-logistics-sendai_song.flac{{{35021141}}}|||13-logistics-over_and_out.flac{{{44621200}}}|||14-logistics-destination_unknown.flac{{{13189493}}}|||15-logistics-watching_the_world_go_by_(feat_alice_smith).flac{{{43472367}}}",
"filePath": "Logistics-Fear_Not-CD-FLAC-2012-TaBoo", "filePath": "Logistics-Fear_Not-CD-FLAC-2012-TaBoo",
"userId": 567, "userId": 567,
"username": null "username": "Uploader"
} }
} }
} }