mirror of
https://github.com/idanoo/autobrr
synced 2025-07-22 08:19:12 +00:00
feat(filters): RED and OPS fetch record label from API (#1881)
* feat(filters): RED and OPS fetch record label from API * test: add record label to RED and OPS test data * refactor: record label check --------- Co-authored-by: ze0s <ze0s@riseup.net>
This commit is contained in:
parent
221bc35371
commit
d153ac44b8
16 changed files with 380 additions and 154 deletions
|
@ -226,6 +226,8 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
|||
"f.except_categories",
|
||||
"f.match_uploaders",
|
||||
"f.except_uploaders",
|
||||
"f.match_record_labels",
|
||||
"f.except_record_labels",
|
||||
"f.match_language",
|
||||
"f.except_language",
|
||||
"f.tags",
|
||||
|
@ -261,7 +263,7 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
|||
var f domain.Filter
|
||||
|
||||
// filter
|
||||
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, months, days, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString
|
||||
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, months, days, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, matchRecordLabels, exceptRecordLabels, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString
|
||||
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
|
||||
var delay, maxDownloads, logScore sql.NullInt32
|
||||
|
||||
|
@ -319,6 +321,8 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
|||
&exceptCategories,
|
||||
&matchUploaders,
|
||||
&exceptUploaders,
|
||||
&matchRecordLabels,
|
||||
&exceptRecordLabels,
|
||||
pq.Array(&f.MatchLanguage),
|
||||
pq.Array(&f.ExceptLanguage),
|
||||
&tags,
|
||||
|
@ -372,6 +376,8 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
|||
f.ExceptCategories = exceptCategories.String
|
||||
f.MatchUploaders = matchUploaders.String
|
||||
f.ExceptUploaders = exceptUploaders.String
|
||||
f.MatchRecordLabels = matchRecordLabels.String
|
||||
f.ExceptRecordLabels = exceptRecordLabels.String
|
||||
f.Tags = tags.String
|
||||
f.ExceptTags = exceptTags.String
|
||||
f.TagsMatchLogic = tagsMatchLogic.String
|
||||
|
@ -444,6 +450,8 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
|
|||
"f.except_categories",
|
||||
"f.match_uploaders",
|
||||
"f.except_uploaders",
|
||||
"f.match_record_labels",
|
||||
"f.except_record_labels",
|
||||
"f.match_language",
|
||||
"f.except_language",
|
||||
"f.tags",
|
||||
|
@ -484,7 +492,7 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
|
|||
for rows.Next() {
|
||||
var f domain.Filter
|
||||
|
||||
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, months, days, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString
|
||||
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, months, days, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, matchRecordLabels, exceptRecordLabels, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString
|
||||
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
|
||||
var delay, maxDownloads, logScore sql.NullInt32
|
||||
|
||||
|
@ -542,6 +550,8 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
|
|||
&exceptCategories,
|
||||
&matchUploaders,
|
||||
&exceptUploaders,
|
||||
&matchRecordLabels,
|
||||
&exceptRecordLabels,
|
||||
pq.Array(&f.MatchLanguage),
|
||||
pq.Array(&f.ExceptLanguage),
|
||||
&tags,
|
||||
|
@ -591,6 +601,8 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
|
|||
f.ExceptCategories = exceptCategories.String
|
||||
f.MatchUploaders = matchUploaders.String
|
||||
f.ExceptUploaders = exceptUploaders.String
|
||||
f.MatchRecordLabels = matchRecordLabels.String
|
||||
f.ExceptRecordLabels = exceptRecordLabels.String
|
||||
f.Tags = tags.String
|
||||
f.ExceptTags = exceptTags.String
|
||||
f.TagsMatchLogic = tagsMatchLogic.String
|
||||
|
@ -738,6 +750,8 @@ func (r *FilterRepo) Store(ctx context.Context, filter *domain.Filter) error {
|
|||
"except_categories",
|
||||
"match_uploaders",
|
||||
"except_uploaders",
|
||||
"match_record_labels",
|
||||
"except_record_labels",
|
||||
"match_language",
|
||||
"except_language",
|
||||
"tags",
|
||||
|
@ -804,6 +818,8 @@ func (r *FilterRepo) Store(ctx context.Context, filter *domain.Filter) error {
|
|||
filter.ExceptCategories,
|
||||
filter.MatchUploaders,
|
||||
filter.ExceptUploaders,
|
||||
filter.MatchRecordLabels,
|
||||
filter.ExceptRecordLabels,
|
||||
pq.Array(filter.MatchLanguage),
|
||||
pq.Array(filter.ExceptLanguage),
|
||||
filter.Tags,
|
||||
|
@ -888,6 +904,8 @@ func (r *FilterRepo) Update(ctx context.Context, filter *domain.Filter) error {
|
|||
Set("except_categories", filter.ExceptCategories).
|
||||
Set("match_uploaders", filter.MatchUploaders).
|
||||
Set("except_uploaders", filter.ExceptUploaders).
|
||||
Set("match_record_labels", filter.MatchRecordLabels).
|
||||
Set("except_record_labels", filter.ExceptRecordLabels).
|
||||
Set("match_language", pq.Array(filter.MatchLanguage)).
|
||||
Set("except_language", pq.Array(filter.ExceptLanguage)).
|
||||
Set("tags", filter.Tags).
|
||||
|
@ -1063,6 +1081,12 @@ func (r *FilterRepo) UpdatePartial(ctx context.Context, filter domain.FilterUpda
|
|||
if filter.ExceptUploaders != nil {
|
||||
q = q.Set("except_uploaders", filter.ExceptUploaders)
|
||||
}
|
||||
if filter.MatchRecordLabels != nil {
|
||||
q = q.Set("match_record_labels", filter.MatchRecordLabels)
|
||||
}
|
||||
if filter.ExceptRecordLabels != nil {
|
||||
q = q.Set("except_record_labels", filter.ExceptRecordLabels)
|
||||
}
|
||||
if filter.MatchLanguage != nil {
|
||||
q = q.Set("match_language", pq.Array(filter.MatchLanguage))
|
||||
}
|
||||
|
|
|
@ -144,6 +144,8 @@ CREATE TABLE filter
|
|||
except_categories TEXT,
|
||||
match_uploaders TEXT,
|
||||
except_uploaders TEXT,
|
||||
match_record_labels TEXT,
|
||||
except_record_labels TEXT,
|
||||
match_language TEXT [] DEFAULT '{}',
|
||||
except_language TEXT [] DEFAULT '{}',
|
||||
tags TEXT,
|
||||
|
@ -1066,5 +1068,11 @@ CREATE TABLE list_filter
|
|||
FOREIGN KEY (filter_id) REFERENCES filter(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (list_id, filter_id)
|
||||
);
|
||||
`,
|
||||
`ALTER TABLE filter
|
||||
ADD COLUMN match_record_labels TEXT DEFAULT '';
|
||||
|
||||
ALTER TABLE filter
|
||||
ADD COLUMN except_record_labels TEXT DEFAULT '';
|
||||
`,
|
||||
}
|
||||
|
|
|
@ -144,6 +144,8 @@ CREATE TABLE filter
|
|||
except_categories TEXT,
|
||||
match_uploaders TEXT,
|
||||
except_uploaders TEXT,
|
||||
match_record_labels TEXT,
|
||||
except_record_labels TEXT,
|
||||
match_language TEXT [] DEFAULT '{}',
|
||||
except_language TEXT [] DEFAULT '{}',
|
||||
tags TEXT,
|
||||
|
@ -1708,5 +1710,11 @@ CREATE TABLE list_filter
|
|||
FOREIGN KEY (filter_id) REFERENCES filter(id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (list_id, filter_id)
|
||||
);
|
||||
`,
|
||||
`ALTER TABLE filter
|
||||
ADD COLUMN match_record_labels TEXT DEFAULT '';
|
||||
|
||||
ALTER TABLE filter
|
||||
ADD COLUMN except_record_labels TEXT DEFAULT '';
|
||||
`,
|
||||
}
|
||||
|
|
|
@ -144,6 +144,8 @@ type Filter struct {
|
|||
ExceptCategories string `json:"except_categories,omitempty"`
|
||||
MatchUploaders string `json:"match_uploaders,omitempty"`
|
||||
ExceptUploaders string `json:"except_uploaders,omitempty"`
|
||||
MatchRecordLabels string `json:"match_record_labels,omitempty"`
|
||||
ExceptRecordLabels string `json:"except_record_labels,omitempty"`
|
||||
MatchLanguage []string `json:"match_language,omitempty"`
|
||||
ExceptLanguage []string `json:"except_language,omitempty"`
|
||||
Tags string `json:"tags,omitempty"`
|
||||
|
@ -274,6 +276,8 @@ type FilterUpdate struct {
|
|||
ExceptCategories *string `json:"except_categories,omitempty"`
|
||||
MatchUploaders *string `json:"match_uploaders,omitempty"`
|
||||
ExceptUploaders *string `json:"except_uploaders,omitempty"`
|
||||
MatchRecordLabels *string `json:"match_record_labels,omitempty"`
|
||||
ExceptRecordLabels *string `json:"except_record_labels,omitempty"`
|
||||
MatchLanguage *[]string `json:"match_language,omitempty"`
|
||||
ExceptLanguage *[]string `json:"except_language,omitempty"`
|
||||
Tags *string `json:"tags,omitempty"`
|
||||
|
@ -364,6 +368,9 @@ func (f *Filter) Sanitize() error {
|
|||
f.Artists = sanitize.FilterString(f.Artists)
|
||||
f.Albums = sanitize.FilterString(f.Albums)
|
||||
|
||||
f.MatchRecordLabels = sanitize.FilterString(f.MatchRecordLabels)
|
||||
f.ExceptRecordLabels = sanitize.FilterString(f.ExceptRecordLabels)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -465,6 +472,10 @@ func (f *Filter) CheckFilter(r *Release) (*RejectionReasons, bool) {
|
|||
// f.checkUploader sets the rejections
|
||||
}
|
||||
|
||||
if (f.MatchRecordLabels != "" || f.ExceptRecordLabels != "") && !f.checkRecordLabel(r) {
|
||||
// f.checkRecordLabel sets the rejections
|
||||
}
|
||||
|
||||
if len(f.MatchLanguage) > 0 && !sliceContainsSlice(r.Language, f.MatchLanguage) {
|
||||
f.RejectReasons.Add("match language", r.Language, f.MatchLanguage)
|
||||
}
|
||||
|
@ -749,6 +760,26 @@ func (f *Filter) checkUploader(r *Release) bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// checkRecordLabel checks if the record label is within the given list.
|
||||
// if the haystack is not empty but the record label is, then a further
|
||||
// investigation is needed
|
||||
func (f *Filter) checkRecordLabel(r *Release) bool {
|
||||
if r.RecordLabel == "" && (r.Indexer.Identifier == "redacted" || r.Indexer.Identifier == "ops") {
|
||||
r.AdditionalRecordLabelCheckRequired = true
|
||||
return true
|
||||
}
|
||||
|
||||
if f.MatchRecordLabels != "" && !contains(r.RecordLabel, f.MatchRecordLabels) {
|
||||
f.RejectReasons.Add("match record labels", r.RecordLabel, f.MatchRecordLabels)
|
||||
}
|
||||
|
||||
if f.ExceptRecordLabels != "" && contains(r.RecordLabel, f.ExceptRecordLabels) {
|
||||
f.RejectReasons.Add("except record labels", r.RecordLabel, f.ExceptRecordLabels)
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// IsPerfectFLAC Perfect is "CD FLAC Cue Log 100% Lossless or 24bit Lossless"
|
||||
func (f *Filter) IsPerfectFLAC(r *Release) ([]string, bool) {
|
||||
rejections := []string{}
|
||||
|
@ -1200,6 +1231,20 @@ func (f *Filter) CheckUploader(uploader string) (bool, error) {
|
|||
return true, nil
|
||||
}
|
||||
|
||||
func (f *Filter) CheckRecordLabel(recordLabel string) (bool, error) {
|
||||
if f.MatchRecordLabels != "" && !contains(recordLabel, f.MatchRecordLabels) {
|
||||
f.RejectReasons.Add("match record label", recordLabel, f.MatchRecordLabels)
|
||||
return false, nil
|
||||
}
|
||||
|
||||
if f.ExceptRecordLabels != "" && contains(recordLabel, f.ExceptRecordLabels) {
|
||||
f.RejectReasons.Add("except record label", recordLabel, f.ExceptRecordLabels)
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// parsedSizeLimits parses filter bytes limits (expressed as a string) into a
|
||||
// uint64 number of bytes. The bounds are returned as *uint64 number of bytes,
|
||||
// with "nil" representing "no limit". We break out filter size limit parsing
|
||||
|
|
|
@ -429,11 +429,12 @@ func (p *IndexerIRCParse) Parse(def *IndexerDefinition, vars map[string]string,
|
|||
}
|
||||
|
||||
type TorrentBasic struct {
|
||||
Id string `json:"Id"`
|
||||
TorrentId string `json:"TorrentId,omitempty"`
|
||||
InfoHash string `json:"InfoHash"`
|
||||
Size string `json:"Size"`
|
||||
Uploader string `json:"Uploader"`
|
||||
Id string `json:"Id"`
|
||||
TorrentId string `json:"TorrentId,omitempty"`
|
||||
InfoHash string `json:"InfoHash"`
|
||||
Size string `json:"Size"`
|
||||
Uploader string `json:"Uploader"`
|
||||
RecordLabel string `json:"RecordLabel"`
|
||||
}
|
||||
|
||||
func (t TorrentBasic) ReleaseSizeBytes() uint64 {
|
||||
|
|
|
@ -32,7 +32,7 @@ type Macro struct {
|
|||
CurrentMonth int
|
||||
CurrentSecond int
|
||||
CurrentYear int
|
||||
Description string
|
||||
Description string
|
||||
DownloadUrl string
|
||||
Episode int
|
||||
FilterID int
|
||||
|
@ -78,6 +78,7 @@ type Macro struct {
|
|||
TorrentTmpFile string
|
||||
Type string
|
||||
Uploader string
|
||||
RecordLabel string
|
||||
Website string
|
||||
Year int
|
||||
Month int
|
||||
|
@ -150,6 +151,7 @@ func NewMacro(release Release) Macro {
|
|||
TorrentTmpFile: release.TorrentTmpFile,
|
||||
Type: release.Type,
|
||||
Uploader: release.Uploader,
|
||||
RecordLabel: release.RecordLabel,
|
||||
Website: release.Website,
|
||||
Year: release.Year,
|
||||
Month: release.Month,
|
||||
|
|
|
@ -46,71 +46,73 @@ type ReleaseRepo interface {
|
|||
}
|
||||
|
||||
type Release struct {
|
||||
ID int64 `json:"id"`
|
||||
FilterStatus ReleaseFilterStatus `json:"filter_status"`
|
||||
Rejections []string `json:"rejections"`
|
||||
Indexer IndexerMinimal `json:"indexer"`
|
||||
FilterName string `json:"filter"`
|
||||
Protocol ReleaseProtocol `json:"protocol"`
|
||||
Implementation ReleaseImplementation `json:"implementation"` // irc, rss, api
|
||||
Timestamp time.Time `json:"timestamp"`
|
||||
AnnounceType AnnounceType `json:"announce_type"`
|
||||
InfoURL string `json:"info_url"`
|
||||
DownloadURL string `json:"download_url"`
|
||||
MagnetURI string `json:"-"`
|
||||
GroupID string `json:"group_id"`
|
||||
TorrentID string `json:"torrent_id"`
|
||||
TorrentTmpFile string `json:"-"`
|
||||
TorrentDataRawBytes []byte `json:"-"`
|
||||
TorrentHash string `json:"-"`
|
||||
TorrentName string `json:"name"` // full release name
|
||||
Size uint64 `json:"size"`
|
||||
Title string `json:"title"` // Parsed title
|
||||
Description string `json:"-"`
|
||||
Category string `json:"category"`
|
||||
Categories []string `json:"categories,omitempty"`
|
||||
Season int `json:"season"`
|
||||
Episode int `json:"episode"`
|
||||
Year int `json:"year"`
|
||||
Month int `json:"month"`
|
||||
Day int `json:"day"`
|
||||
Resolution string `json:"resolution"`
|
||||
Source string `json:"source"`
|
||||
Codec []string `json:"codec"`
|
||||
Container string `json:"container"`
|
||||
HDR []string `json:"hdr"`
|
||||
Audio []string `json:"-"`
|
||||
AudioChannels string `json:"-"`
|
||||
AudioFormat string `json:"-"`
|
||||
Bitrate string `json:"-"`
|
||||
Group string `json:"group"`
|
||||
Region string `json:"-"`
|
||||
Language []string `json:"-"`
|
||||
Proper bool `json:"proper"`
|
||||
Repack bool `json:"repack"`
|
||||
Website string `json:"website"`
|
||||
Artists string `json:"-"`
|
||||
Type string `json:"type"` // Album,Single,EP
|
||||
LogScore int `json:"-"`
|
||||
HasCue bool `json:"-"`
|
||||
HasLog bool `json:"-"`
|
||||
Origin string `json:"origin"` // P2P, Internal
|
||||
Tags []string `json:"-"`
|
||||
ReleaseTags string `json:"-"`
|
||||
Freeleech bool `json:"-"`
|
||||
FreeleechPercent int `json:"-"`
|
||||
Bonus []string `json:"-"`
|
||||
Uploader string `json:"uploader"`
|
||||
PreTime string `json:"pre_time"`
|
||||
Other []string `json:"-"`
|
||||
RawCookie string `json:"-"`
|
||||
Seeders int `json:"-"`
|
||||
Leechers int `json:"-"`
|
||||
AdditionalSizeCheckRequired bool `json:"-"`
|
||||
AdditionalUploaderCheckRequired bool `json:"-"`
|
||||
FilterID int `json:"-"`
|
||||
Filter *Filter `json:"-"`
|
||||
ActionStatus []ReleaseActionStatus `json:"action_status"`
|
||||
ID int64 `json:"id"`
|
||||
FilterStatus ReleaseFilterStatus `json:"filter_status"`
|
||||
Rejections []string `json:"rejections"`
|
||||
Indexer IndexerMinimal `json:"indexer"`
|
||||
FilterName string `json:"filter"`
|
||||
Protocol ReleaseProtocol `json:"protocol"`
|
||||
Implementation ReleaseImplementation `json:"implementation"` // irc, rss, api
|
||||
Timestamp time.Time `json:"timestamp"`
|
||||
AnnounceType AnnounceType `json:"announce_type"`
|
||||
InfoURL string `json:"info_url"`
|
||||
DownloadURL string `json:"download_url"`
|
||||
MagnetURI string `json:"-"`
|
||||
GroupID string `json:"group_id"`
|
||||
TorrentID string `json:"torrent_id"`
|
||||
TorrentTmpFile string `json:"-"`
|
||||
TorrentDataRawBytes []byte `json:"-"`
|
||||
TorrentHash string `json:"-"`
|
||||
TorrentName string `json:"name"` // full release name
|
||||
Size uint64 `json:"size"`
|
||||
Title string `json:"title"` // Parsed title
|
||||
Description string `json:"-"`
|
||||
Category string `json:"category"`
|
||||
Categories []string `json:"categories,omitempty"`
|
||||
Season int `json:"season"`
|
||||
Episode int `json:"episode"`
|
||||
Year int `json:"year"`
|
||||
Month int `json:"month"`
|
||||
Day int `json:"day"`
|
||||
Resolution string `json:"resolution"`
|
||||
Source string `json:"source"`
|
||||
Codec []string `json:"codec"`
|
||||
Container string `json:"container"`
|
||||
HDR []string `json:"hdr"`
|
||||
Audio []string `json:"-"`
|
||||
AudioChannels string `json:"-"`
|
||||
AudioFormat string `json:"-"`
|
||||
Bitrate string `json:"-"`
|
||||
Group string `json:"group"`
|
||||
Region string `json:"-"`
|
||||
Language []string `json:"-"`
|
||||
Proper bool `json:"proper"`
|
||||
Repack bool `json:"repack"`
|
||||
Website string `json:"website"`
|
||||
Artists string `json:"-"`
|
||||
Type string `json:"type"` // Album,Single,EP
|
||||
LogScore int `json:"-"`
|
||||
HasCue bool `json:"-"`
|
||||
HasLog bool `json:"-"`
|
||||
Origin string `json:"origin"` // P2P, Internal
|
||||
Tags []string `json:"-"`
|
||||
ReleaseTags string `json:"-"`
|
||||
Freeleech bool `json:"-"`
|
||||
FreeleechPercent int `json:"-"`
|
||||
Bonus []string `json:"-"`
|
||||
Uploader string `json:"uploader"`
|
||||
RecordLabel string `json:"record_label"`
|
||||
PreTime string `json:"pre_time"`
|
||||
Other []string `json:"-"`
|
||||
RawCookie string `json:"-"`
|
||||
Seeders int `json:"-"`
|
||||
Leechers int `json:"-"`
|
||||
AdditionalSizeCheckRequired bool `json:"-"`
|
||||
AdditionalUploaderCheckRequired bool `json:"-"`
|
||||
AdditionalRecordLabelCheckRequired bool `json:"-"`
|
||||
FilterID int `json:"-"`
|
||||
Filter *Filter `json:"-"`
|
||||
ActionStatus []ReleaseActionStatus `json:"action_status"`
|
||||
}
|
||||
|
||||
func (r *Release) Raw(s string) rls.Release {
|
||||
|
@ -846,6 +848,10 @@ func (r *Release) MapVars(def *IndexerDefinition, varMap map[string]string) erro
|
|||
r.Uploader = uploader
|
||||
}
|
||||
|
||||
if recordLabel, err := getStringMapValue(varMap, "recordLabel"); err == nil {
|
||||
r.RecordLabel = recordLabel
|
||||
}
|
||||
|
||||
if torrentSize, err := getStringMapValue(varMap, "torrentSize"); err == nil {
|
||||
// Some indexers like BTFiles announces size with comma. Humanize does not handle that well and strips it.
|
||||
torrentSize = strings.Replace(torrentSize, ",", ".", 1)
|
||||
|
|
|
@ -43,6 +43,7 @@ type Service interface {
|
|||
Delete(ctx context.Context, filterID int) error
|
||||
AdditionalSizeCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error)
|
||||
AdditionalUploaderCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error)
|
||||
AdditionalRecordLabelCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error)
|
||||
CheckSmartEpisodeCanDownload(ctx context.Context, params *domain.SmartEpisodeParams) (bool, error)
|
||||
GetDownloadsByFilterId(ctx context.Context, filterID int) (*domain.FilterDownloads, error)
|
||||
}
|
||||
|
@ -462,6 +463,21 @@ func (s *service) CheckFilter(ctx context.Context, f *domain.Filter, release *do
|
|||
}
|
||||
}
|
||||
|
||||
if release.AdditionalRecordLabelCheckRequired {
|
||||
l.Debug().Msgf("(%s) additional record label check required", f.Name)
|
||||
|
||||
ok, err := s.AdditionalRecordLabelCheck(ctx, f, release)
|
||||
if err != nil {
|
||||
l.Error().Err(err).Msgf("(%s) additional record label check error", f.Name)
|
||||
return false, err
|
||||
}
|
||||
|
||||
if !ok {
|
||||
l.Trace().Msgf("(%s) additional record label check not matching what filter wanted", f.Name)
|
||||
return false, nil
|
||||
}
|
||||
}
|
||||
|
||||
// run external filters
|
||||
if f.External != nil {
|
||||
externalOk, err := s.RunExternalFilters(ctx, f, f.External, release)
|
||||
|
@ -503,7 +519,7 @@ func (s *service) AdditionalSizeCheck(ctx context.Context, f *domain.Filter, rel
|
|||
|
||||
switch release.Indexer.Identifier {
|
||||
case "btn", "ggn", "redacted", "ops", "mock":
|
||||
if (release.Size == 0 && release.AdditionalSizeCheckRequired) || (release.Uploader == "" && release.AdditionalUploaderCheckRequired) {
|
||||
if (release.Size == 0 && release.AdditionalSizeCheckRequired) || (release.Uploader == "" && release.AdditionalUploaderCheckRequired) || (release.RecordLabel == "" && release.AdditionalRecordLabelCheckRequired) {
|
||||
l.Trace().Msgf("(%s) preparing to check size via api", f.Name)
|
||||
|
||||
torrentInfo, err := s.apiService.GetTorrentByID(ctx, release.Indexer.Identifier, release.TorrentID)
|
||||
|
@ -522,6 +538,10 @@ func (s *service) AdditionalSizeCheck(ctx context.Context, f *domain.Filter, rel
|
|||
if release.Uploader == "" {
|
||||
release.Uploader = torrentInfo.Uploader
|
||||
}
|
||||
|
||||
if release.RecordLabel == "" {
|
||||
release.RecordLabel = torrentInfo.RecordLabel
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
|
@ -547,7 +567,7 @@ func (s *service) AdditionalSizeCheck(ctx context.Context, f *domain.Filter, rel
|
|||
|
||||
if !sizeOk {
|
||||
l.Debug().Msgf("(%s) filter did not match after additional size check, trying next", f.Name)
|
||||
return false, err
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, nil
|
||||
|
@ -575,13 +595,13 @@ func (s *service) AdditionalUploaderCheck(ctx context.Context, f *domain.Filter,
|
|||
|
||||
if !uploaderOk {
|
||||
l.Debug().Msgf("(%s) filter did not match after additional uploaders check, trying next", f.Name)
|
||||
return false, err
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
l.Debug().Msgf("(%s) additional api size check required", f.Name)
|
||||
l.Debug().Msgf("(%s) additional api uploader check required", f.Name)
|
||||
|
||||
switch release.Indexer.Identifier {
|
||||
case "redacted", "ops", "mock":
|
||||
|
@ -598,9 +618,10 @@ func (s *service) AdditionalUploaderCheck(ctx context.Context, f *domain.Filter,
|
|||
torrentSize := torrentInfo.ReleaseSizeBytes()
|
||||
if release.Size == 0 && torrentSize > 0 {
|
||||
release.Size = torrentSize
|
||||
}
|
||||
|
||||
// reset AdditionalSizeCheckRequired to not re-trigger check
|
||||
release.AdditionalSizeCheckRequired = false
|
||||
if release.RecordLabel == "" {
|
||||
release.RecordLabel = torrentInfo.RecordLabel
|
||||
}
|
||||
|
||||
if release.Uploader == "" {
|
||||
|
@ -622,9 +643,88 @@ func (s *service) AdditionalUploaderCheck(ctx context.Context, f *domain.Filter,
|
|||
|
||||
if !uploaderOk {
|
||||
l.Debug().Msgf("(%s) filter did not match after additional uploaders check, trying next", f.Name)
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (s *service) AdditionalRecordLabelCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (ok bool, err error) {
|
||||
defer func() {
|
||||
// try recover panic if anything went wrong with API or size checks
|
||||
errors.RecoverPanic(recover(), &err)
|
||||
if err != nil {
|
||||
ok = false
|
||||
}
|
||||
}()
|
||||
|
||||
// do additional check against indexer api
|
||||
l := s.log.With().Str("method", "AdditionalRecordLabelCheck").Logger()
|
||||
|
||||
// if record label was fetched before during size check or uploader check we check it and return early
|
||||
if release.RecordLabel != "" {
|
||||
recordLabelOk, err := f.CheckRecordLabel(release.RecordLabel)
|
||||
if err != nil {
|
||||
l.Error().Err(err).Msgf("(%s) error comparing release and record label", f.Name)
|
||||
return false, err
|
||||
}
|
||||
|
||||
// reset AdditionalRecordLabelCheckRequired to not re-trigger check
|
||||
release.AdditionalRecordLabelCheckRequired = false
|
||||
|
||||
if !recordLabelOk {
|
||||
l.Debug().Msgf("(%s) filter did not match after additional record label check, trying next", f.Name)
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
l.Debug().Msgf("(%s) additional api record label check required", f.Name)
|
||||
|
||||
switch release.Indexer.Identifier {
|
||||
case "redacted", "ops", "mock":
|
||||
l.Trace().Msgf("(%s) preparing to check via api", f.Name)
|
||||
|
||||
torrentInfo, err := s.apiService.GetTorrentByID(ctx, release.Indexer.Identifier, release.TorrentID)
|
||||
if err != nil || torrentInfo == nil {
|
||||
l.Error().Err(err).Msgf("(%s) could not get torrent info from api: '%s' from: %s", f.Name, release.TorrentID, release.Indexer.Identifier)
|
||||
return false, err
|
||||
}
|
||||
|
||||
l.Debug().Msgf("(%s) got torrent info from api: %+v", f.Name, torrentInfo)
|
||||
|
||||
torrentSize := torrentInfo.ReleaseSizeBytes()
|
||||
if release.Size == 0 && torrentSize > 0 {
|
||||
release.Size = torrentSize
|
||||
}
|
||||
|
||||
if release.Uploader == "" {
|
||||
release.Uploader = torrentInfo.Uploader
|
||||
}
|
||||
|
||||
if release.RecordLabel == "" {
|
||||
release.RecordLabel = torrentInfo.RecordLabel
|
||||
}
|
||||
|
||||
default:
|
||||
return false, errors.New("additional record label check not supported for this indexer: %s", release.Indexer.Identifier)
|
||||
}
|
||||
|
||||
recordLabelOk, err := f.CheckRecordLabel(release.RecordLabel)
|
||||
if err != nil {
|
||||
l.Error().Err(err).Msgf("(%s) error comparing release and record label", f.Name)
|
||||
return false, err
|
||||
}
|
||||
|
||||
// reset AdditionalRecordLabelCheckRequired to not re-trigger check
|
||||
release.AdditionalRecordLabelCheckRequired = false
|
||||
|
||||
if !recordLabelOk {
|
||||
l.Debug().Msgf("(%s) filter did not match after additional record label check, trying next", f.Name)
|
||||
return false, nil
|
||||
}
|
||||
|
||||
return true, nil
|
||||
}
|
||||
|
||||
|
|
|
@ -105,32 +105,32 @@ type Group struct {
|
|||
}
|
||||
|
||||
type Torrent struct {
|
||||
Id int `json:"id"`
|
||||
InfoHash string `json:"infoHash"`
|
||||
Media string `json:"media"`
|
||||
Format string `json:"format"`
|
||||
Encoding string `json:"encoding"`
|
||||
Remastered bool `json:"remastered"`
|
||||
RemasterYear int `json:"remasterYear"`
|
||||
RemasterTitle string `json:"remasterTitle"`
|
||||
RemasterRecordLabel string `json:"remasterRecordLabel"`
|
||||
RemasterCatalogueNumber string `json:"remasterCatalogueNumber"`
|
||||
Scene bool `json:"scene"`
|
||||
HasLog bool `json:"hasLog"`
|
||||
HasCue bool `json:"hasCue"`
|
||||
LogScore int `json:"logScore"`
|
||||
FileCount int `json:"fileCount"`
|
||||
Size int `json:"size"`
|
||||
Seeders int `json:"seeders"`
|
||||
Leechers int `json:"leechers"`
|
||||
Snatched int `json:"snatched"`
|
||||
FreeTorrent string `json:"freeTorrent"`
|
||||
Time string `json:"time"`
|
||||
Description string `json:"description"`
|
||||
FileList string `json:"fileList"`
|
||||
FilePath string `json:"filePath"`
|
||||
UserId int `json:"userId"`
|
||||
Username string `json:"username"`
|
||||
Id int `json:"id"`
|
||||
InfoHash string `json:"infoHash"`
|
||||
Media string `json:"media"`
|
||||
Format string `json:"format"`
|
||||
Encoding string `json:"encoding"`
|
||||
Remastered bool `json:"remastered"`
|
||||
RemasterYear int `json:"remasterYear"`
|
||||
RemasterTitle string `json:"remasterTitle"`
|
||||
RecordLabel string `json:"remasterRecordLabel"` // remasterRecordLabel is the record label of the release, which should be used instead of the record label of the group
|
||||
CatalogueNumber string `json:"remasterCatalogueNumber"` // remasterCatalogueNumber is the catalogue number of the release, which should be used instead of the catalogue number of the group
|
||||
Scene bool `json:"scene"`
|
||||
HasLog bool `json:"hasLog"`
|
||||
HasCue bool `json:"hasCue"`
|
||||
LogScore int `json:"logScore"`
|
||||
FileCount int `json:"fileCount"`
|
||||
Size int `json:"size"`
|
||||
Seeders int `json:"seeders"`
|
||||
Leechers int `json:"leechers"`
|
||||
Snatched int `json:"snatched"`
|
||||
FreeTorrent string `json:"freeTorrent"`
|
||||
Time string `json:"time"`
|
||||
Description string `json:"description"`
|
||||
FileList string `json:"fileList"`
|
||||
FilePath string `json:"filePath"`
|
||||
UserId int `json:"userId"`
|
||||
Username string `json:"username"`
|
||||
}
|
||||
|
||||
type GetIndexResponse struct {
|
||||
|
@ -243,10 +243,11 @@ func (c *Client) GetTorrentByID(ctx context.Context, torrentID string) (*domain.
|
|||
}
|
||||
|
||||
res := &domain.TorrentBasic{
|
||||
Id: strconv.Itoa(response.Response.Torrent.Id),
|
||||
InfoHash: response.Response.Torrent.InfoHash,
|
||||
Size: strconv.Itoa(response.Response.Torrent.Size),
|
||||
Uploader: response.Response.Torrent.Username,
|
||||
Id: strconv.Itoa(response.Response.Torrent.Id),
|
||||
InfoHash: response.Response.Torrent.InfoHash,
|
||||
Size: strconv.Itoa(response.Response.Torrent.Size),
|
||||
Uploader: response.Response.Torrent.Username,
|
||||
RecordLabel: response.Response.Torrent.RecordLabel,
|
||||
}
|
||||
|
||||
return res, nil
|
||||
|
|
|
@ -72,10 +72,11 @@ func TestOrpheusClient_GetTorrentByID(t *testing.T) {
|
|||
},
|
||||
args: args{torrentID: "2156788"},
|
||||
want: &domain.TorrentBasic{
|
||||
Id: "2156788",
|
||||
InfoHash: "",
|
||||
Size: "255299244",
|
||||
Uploader: "uploader",
|
||||
Id: "2156788",
|
||||
InfoHash: "",
|
||||
Size: "255299244",
|
||||
Uploader: "uploader",
|
||||
RecordLabel: "FAJo Music",
|
||||
},
|
||||
wantErr: "",
|
||||
},
|
||||
|
|
|
@ -105,34 +105,34 @@ type Group struct {
|
|||
}
|
||||
|
||||
type Torrent struct {
|
||||
Id int `json:"id"`
|
||||
InfoHash string `json:"infoHash"`
|
||||
Media string `json:"media"`
|
||||
Format string `json:"format"`
|
||||
Encoding string `json:"encoding"`
|
||||
Remastered bool `json:"remastered"`
|
||||
RemasterYear int `json:"remasterYear"`
|
||||
RemasterTitle string `json:"remasterTitle"`
|
||||
RemasterRecordLabel string `json:"remasterRecordLabel"`
|
||||
RemasterCatalogueNumber string `json:"remasterCatalogueNumber"`
|
||||
Scene bool `json:"scene"`
|
||||
HasLog bool `json:"hasLog"`
|
||||
HasCue bool `json:"hasCue"`
|
||||
LogScore int `json:"logScore"`
|
||||
FileCount int `json:"fileCount"`
|
||||
Size int `json:"size"`
|
||||
Seeders int `json:"seeders"`
|
||||
Leechers int `json:"leechers"`
|
||||
Snatched int `json:"snatched"`
|
||||
FreeTorrent bool `json:"freeTorrent"`
|
||||
IsNeutralleech bool `json:"isNeutralleech"`
|
||||
IsFreeload bool `json:"isFreeload"`
|
||||
Time string `json:"time"`
|
||||
Description string `json:"description"`
|
||||
FileList string `json:"fileList"`
|
||||
FilePath string `json:"filePath"`
|
||||
UserId int `json:"userId"`
|
||||
Username string `json:"username"`
|
||||
Id int `json:"id"`
|
||||
InfoHash string `json:"infoHash"`
|
||||
Media string `json:"media"`
|
||||
Format string `json:"format"`
|
||||
Encoding string `json:"encoding"`
|
||||
Remastered bool `json:"remastered"`
|
||||
RemasterYear int `json:"remasterYear"`
|
||||
RemasterTitle string `json:"remasterTitle"`
|
||||
RecordLabel string `json:"remasterRecordLabel"` // remasterRecordLabel is the record label of the release, which should be used instead of the record label of the group
|
||||
CatalogueNumber string `json:"remasterCatalogueNumber"` // remasterCatalogueNumber is the catalogue number of the release, which should be used instead of the catalogue number of the group
|
||||
Scene bool `json:"scene"`
|
||||
HasLog bool `json:"hasLog"`
|
||||
HasCue bool `json:"hasCue"`
|
||||
LogScore int `json:"logScore"`
|
||||
FileCount int `json:"fileCount"`
|
||||
Size int `json:"size"`
|
||||
Seeders int `json:"seeders"`
|
||||
Leechers int `json:"leechers"`
|
||||
Snatched int `json:"snatched"`
|
||||
FreeTorrent bool `json:"freeTorrent"`
|
||||
IsNeutralleech bool `json:"isNeutralleech"`
|
||||
IsFreeload bool `json:"isFreeload"`
|
||||
Time string `json:"time"`
|
||||
Description string `json:"description"`
|
||||
FileList string `json:"fileList"`
|
||||
FilePath string `json:"filePath"`
|
||||
UserId int `json:"userId"`
|
||||
Username string `json:"username"`
|
||||
}
|
||||
|
||||
type IndexResponse struct {
|
||||
|
@ -231,10 +231,11 @@ func (c *Client) GetTorrentByID(ctx context.Context, torrentID string) (*domain.
|
|||
}
|
||||
|
||||
return &domain.TorrentBasic{
|
||||
Id: strconv.Itoa(response.Response.Torrent.Id),
|
||||
InfoHash: response.Response.Torrent.InfoHash,
|
||||
Size: strconv.Itoa(response.Response.Torrent.Size),
|
||||
Uploader: response.Response.Torrent.Username,
|
||||
Id: strconv.Itoa(response.Response.Torrent.Id),
|
||||
InfoHash: response.Response.Torrent.InfoHash,
|
||||
Size: strconv.Itoa(response.Response.Torrent.Size),
|
||||
Uploader: response.Response.Torrent.Username,
|
||||
RecordLabel: response.Response.Torrent.RecordLabel,
|
||||
}, nil
|
||||
|
||||
}
|
||||
|
|
|
@ -72,10 +72,11 @@ func TestREDClient_GetTorrentByID(t *testing.T) {
|
|||
},
|
||||
args: args{torrentID: "29991962"},
|
||||
want: &domain.TorrentBasic{
|
||||
Id: "29991962",
|
||||
InfoHash: "B2BABD3A361EAFC6C4E9142C422DF7DDF5D7E163",
|
||||
Size: "527749302",
|
||||
Uploader: "Uploader",
|
||||
Id: "29991962",
|
||||
InfoHash: "B2BABD3A361EAFC6C4E9142C422DF7DDF5D7E163",
|
||||
Size: "527749302",
|
||||
Uploader: "Uploader",
|
||||
RecordLabel: "FAJo Music",
|
||||
},
|
||||
wantErr: "",
|
||||
},
|
||||
|
|
2
pkg/red/testdata/get_torrent_by_id.json
vendored
2
pkg/red/testdata/get_torrent_by_id.json
vendored
|
@ -52,7 +52,7 @@
|
|||
"remastered": false,
|
||||
"remasterYear": 0,
|
||||
"remasterTitle": "",
|
||||
"remasterRecordLabel": "",
|
||||
"remasterRecordLabel": "FAJo Music",
|
||||
"remasterCatalogueNumber": "",
|
||||
"scene": true,
|
||||
"hasLog": false,
|
||||
|
|
|
@ -431,6 +431,8 @@ export const FilterDetails = () => {
|
|||
except_tags_match_logic: filter.except_tags_match_logic,
|
||||
match_uploaders: filter.match_uploaders,
|
||||
except_uploaders: filter.except_uploaders,
|
||||
match_record_labels: filter.match_record_labels,
|
||||
except_record_labels: filter.except_record_labels,
|
||||
match_language: filter.match_language || [],
|
||||
except_language: filter.except_language || [],
|
||||
freeleech: filter.freeleech,
|
||||
|
|
|
@ -43,6 +43,30 @@ export const Music = () => {
|
|||
</div>
|
||||
}
|
||||
/>
|
||||
<TextAreaAutoResize
|
||||
name="match_record_labels"
|
||||
label="Match record labels"
|
||||
columns={6}
|
||||
placeholder="eg. Anjunabeats, Armada"
|
||||
tooltip={
|
||||
<div>
|
||||
<p>Comma separated list of record labels to match. Only Orpheus and Redacted support this.</p>
|
||||
<DocsLink href="https://autobrr.com/filters#music" />
|
||||
</div>
|
||||
}
|
||||
/>
|
||||
<TextAreaAutoResize
|
||||
name="except_record_labels"
|
||||
label="Except record labels"
|
||||
columns={6}
|
||||
placeholder="eg. Anjunadeep, Armind"
|
||||
tooltip={
|
||||
<div>
|
||||
<p>Comma separated list of record labels to ignore (takes priority over Match record labels). Only Orpheus and Redacted support this.</p>
|
||||
<DocsLink href="https://autobrr.com/filters#music" />
|
||||
</div>
|
||||
}
|
||||
/>
|
||||
</FilterLayout>
|
||||
</FilterSection>
|
||||
|
||||
|
@ -168,8 +192,8 @@ export const Music = () => {
|
|||
<div className="col-span-12 flex items-center justify-center">
|
||||
<span className="border-b border-gray-150 dark:border-gray-750 w-full" />
|
||||
<span className="flex mx-2 shrink-0 text-lg font-bold uppercase tracking-wide text-gray-700 dark:text-gray-200">
|
||||
OR
|
||||
</span>
|
||||
OR
|
||||
</span>
|
||||
<span className="border-b border-gray-150 dark:border-gray-750 w-full" />
|
||||
</div>
|
||||
|
||||
|
@ -188,8 +212,8 @@ export const Music = () => {
|
|||
/>
|
||||
|
||||
<span className="col-span-12 sm:col-span-6 self-center ml-0 text-center sm:text-left text-sm text-gray-500 dark:text-gray-425 underline underline-offset-2">
|
||||
This is what you want in 90% of cases (instead of options above).
|
||||
</span>
|
||||
This is what you want in 90% of cases (instead of options above).
|
||||
</span>
|
||||
</FilterLayout>
|
||||
</FilterSection>
|
||||
</FilterPage>
|
||||
|
|
2
web/src/types/Filter.d.ts
vendored
2
web/src/types/Filter.d.ts
vendored
|
@ -62,6 +62,8 @@ interface Filter {
|
|||
except_categories: string;
|
||||
match_uploaders: string;
|
||||
except_uploaders: string;
|
||||
match_record_labels: string;
|
||||
except_record_labels: string;
|
||||
match_language: string[];
|
||||
except_language: string[];
|
||||
tags: string;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue