mirror of
https://github.com/idanoo/autobrr
synced 2025-07-22 16:29:12 +00:00
feat(filters): RED and OPS fetch record label from API (#1881)
* feat(filters): RED and OPS fetch record label from API * test: add record label to RED and OPS test data * refactor: record label check --------- Co-authored-by: ze0s <ze0s@riseup.net>
This commit is contained in:
parent
221bc35371
commit
d153ac44b8
16 changed files with 380 additions and 154 deletions
|
@ -226,6 +226,8 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
||||||
"f.except_categories",
|
"f.except_categories",
|
||||||
"f.match_uploaders",
|
"f.match_uploaders",
|
||||||
"f.except_uploaders",
|
"f.except_uploaders",
|
||||||
|
"f.match_record_labels",
|
||||||
|
"f.except_record_labels",
|
||||||
"f.match_language",
|
"f.match_language",
|
||||||
"f.except_language",
|
"f.except_language",
|
||||||
"f.tags",
|
"f.tags",
|
||||||
|
@ -261,7 +263,7 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
||||||
var f domain.Filter
|
var f domain.Filter
|
||||||
|
|
||||||
// filter
|
// filter
|
||||||
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, months, days, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString
|
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, months, days, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, matchRecordLabels, exceptRecordLabels, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString
|
||||||
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
|
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
|
||||||
var delay, maxDownloads, logScore sql.NullInt32
|
var delay, maxDownloads, logScore sql.NullInt32
|
||||||
|
|
||||||
|
@ -319,6 +321,8 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
||||||
&exceptCategories,
|
&exceptCategories,
|
||||||
&matchUploaders,
|
&matchUploaders,
|
||||||
&exceptUploaders,
|
&exceptUploaders,
|
||||||
|
&matchRecordLabels,
|
||||||
|
&exceptRecordLabels,
|
||||||
pq.Array(&f.MatchLanguage),
|
pq.Array(&f.MatchLanguage),
|
||||||
pq.Array(&f.ExceptLanguage),
|
pq.Array(&f.ExceptLanguage),
|
||||||
&tags,
|
&tags,
|
||||||
|
@ -372,6 +376,8 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
||||||
f.ExceptCategories = exceptCategories.String
|
f.ExceptCategories = exceptCategories.String
|
||||||
f.MatchUploaders = matchUploaders.String
|
f.MatchUploaders = matchUploaders.String
|
||||||
f.ExceptUploaders = exceptUploaders.String
|
f.ExceptUploaders = exceptUploaders.String
|
||||||
|
f.MatchRecordLabels = matchRecordLabels.String
|
||||||
|
f.ExceptRecordLabels = exceptRecordLabels.String
|
||||||
f.Tags = tags.String
|
f.Tags = tags.String
|
||||||
f.ExceptTags = exceptTags.String
|
f.ExceptTags = exceptTags.String
|
||||||
f.TagsMatchLogic = tagsMatchLogic.String
|
f.TagsMatchLogic = tagsMatchLogic.String
|
||||||
|
@ -444,6 +450,8 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
|
||||||
"f.except_categories",
|
"f.except_categories",
|
||||||
"f.match_uploaders",
|
"f.match_uploaders",
|
||||||
"f.except_uploaders",
|
"f.except_uploaders",
|
||||||
|
"f.match_record_labels",
|
||||||
|
"f.except_record_labels",
|
||||||
"f.match_language",
|
"f.match_language",
|
||||||
"f.except_language",
|
"f.except_language",
|
||||||
"f.tags",
|
"f.tags",
|
||||||
|
@ -484,7 +492,7 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var f domain.Filter
|
var f domain.Filter
|
||||||
|
|
||||||
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, months, days, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString
|
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, months, days, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, matchRecordLabels, exceptRecordLabels, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString
|
||||||
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
|
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
|
||||||
var delay, maxDownloads, logScore sql.NullInt32
|
var delay, maxDownloads, logScore sql.NullInt32
|
||||||
|
|
||||||
|
@ -542,6 +550,8 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
|
||||||
&exceptCategories,
|
&exceptCategories,
|
||||||
&matchUploaders,
|
&matchUploaders,
|
||||||
&exceptUploaders,
|
&exceptUploaders,
|
||||||
|
&matchRecordLabels,
|
||||||
|
&exceptRecordLabels,
|
||||||
pq.Array(&f.MatchLanguage),
|
pq.Array(&f.MatchLanguage),
|
||||||
pq.Array(&f.ExceptLanguage),
|
pq.Array(&f.ExceptLanguage),
|
||||||
&tags,
|
&tags,
|
||||||
|
@ -591,6 +601,8 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
|
||||||
f.ExceptCategories = exceptCategories.String
|
f.ExceptCategories = exceptCategories.String
|
||||||
f.MatchUploaders = matchUploaders.String
|
f.MatchUploaders = matchUploaders.String
|
||||||
f.ExceptUploaders = exceptUploaders.String
|
f.ExceptUploaders = exceptUploaders.String
|
||||||
|
f.MatchRecordLabels = matchRecordLabels.String
|
||||||
|
f.ExceptRecordLabels = exceptRecordLabels.String
|
||||||
f.Tags = tags.String
|
f.Tags = tags.String
|
||||||
f.ExceptTags = exceptTags.String
|
f.ExceptTags = exceptTags.String
|
||||||
f.TagsMatchLogic = tagsMatchLogic.String
|
f.TagsMatchLogic = tagsMatchLogic.String
|
||||||
|
@ -738,6 +750,8 @@ func (r *FilterRepo) Store(ctx context.Context, filter *domain.Filter) error {
|
||||||
"except_categories",
|
"except_categories",
|
||||||
"match_uploaders",
|
"match_uploaders",
|
||||||
"except_uploaders",
|
"except_uploaders",
|
||||||
|
"match_record_labels",
|
||||||
|
"except_record_labels",
|
||||||
"match_language",
|
"match_language",
|
||||||
"except_language",
|
"except_language",
|
||||||
"tags",
|
"tags",
|
||||||
|
@ -804,6 +818,8 @@ func (r *FilterRepo) Store(ctx context.Context, filter *domain.Filter) error {
|
||||||
filter.ExceptCategories,
|
filter.ExceptCategories,
|
||||||
filter.MatchUploaders,
|
filter.MatchUploaders,
|
||||||
filter.ExceptUploaders,
|
filter.ExceptUploaders,
|
||||||
|
filter.MatchRecordLabels,
|
||||||
|
filter.ExceptRecordLabels,
|
||||||
pq.Array(filter.MatchLanguage),
|
pq.Array(filter.MatchLanguage),
|
||||||
pq.Array(filter.ExceptLanguage),
|
pq.Array(filter.ExceptLanguage),
|
||||||
filter.Tags,
|
filter.Tags,
|
||||||
|
@ -888,6 +904,8 @@ func (r *FilterRepo) Update(ctx context.Context, filter *domain.Filter) error {
|
||||||
Set("except_categories", filter.ExceptCategories).
|
Set("except_categories", filter.ExceptCategories).
|
||||||
Set("match_uploaders", filter.MatchUploaders).
|
Set("match_uploaders", filter.MatchUploaders).
|
||||||
Set("except_uploaders", filter.ExceptUploaders).
|
Set("except_uploaders", filter.ExceptUploaders).
|
||||||
|
Set("match_record_labels", filter.MatchRecordLabels).
|
||||||
|
Set("except_record_labels", filter.ExceptRecordLabels).
|
||||||
Set("match_language", pq.Array(filter.MatchLanguage)).
|
Set("match_language", pq.Array(filter.MatchLanguage)).
|
||||||
Set("except_language", pq.Array(filter.ExceptLanguage)).
|
Set("except_language", pq.Array(filter.ExceptLanguage)).
|
||||||
Set("tags", filter.Tags).
|
Set("tags", filter.Tags).
|
||||||
|
@ -1063,6 +1081,12 @@ func (r *FilterRepo) UpdatePartial(ctx context.Context, filter domain.FilterUpda
|
||||||
if filter.ExceptUploaders != nil {
|
if filter.ExceptUploaders != nil {
|
||||||
q = q.Set("except_uploaders", filter.ExceptUploaders)
|
q = q.Set("except_uploaders", filter.ExceptUploaders)
|
||||||
}
|
}
|
||||||
|
if filter.MatchRecordLabels != nil {
|
||||||
|
q = q.Set("match_record_labels", filter.MatchRecordLabels)
|
||||||
|
}
|
||||||
|
if filter.ExceptRecordLabels != nil {
|
||||||
|
q = q.Set("except_record_labels", filter.ExceptRecordLabels)
|
||||||
|
}
|
||||||
if filter.MatchLanguage != nil {
|
if filter.MatchLanguage != nil {
|
||||||
q = q.Set("match_language", pq.Array(filter.MatchLanguage))
|
q = q.Set("match_language", pq.Array(filter.MatchLanguage))
|
||||||
}
|
}
|
||||||
|
|
|
@ -144,6 +144,8 @@ CREATE TABLE filter
|
||||||
except_categories TEXT,
|
except_categories TEXT,
|
||||||
match_uploaders TEXT,
|
match_uploaders TEXT,
|
||||||
except_uploaders TEXT,
|
except_uploaders TEXT,
|
||||||
|
match_record_labels TEXT,
|
||||||
|
except_record_labels TEXT,
|
||||||
match_language TEXT [] DEFAULT '{}',
|
match_language TEXT [] DEFAULT '{}',
|
||||||
except_language TEXT [] DEFAULT '{}',
|
except_language TEXT [] DEFAULT '{}',
|
||||||
tags TEXT,
|
tags TEXT,
|
||||||
|
@ -1066,5 +1068,11 @@ CREATE TABLE list_filter
|
||||||
FOREIGN KEY (filter_id) REFERENCES filter(id) ON DELETE CASCADE,
|
FOREIGN KEY (filter_id) REFERENCES filter(id) ON DELETE CASCADE,
|
||||||
PRIMARY KEY (list_id, filter_id)
|
PRIMARY KEY (list_id, filter_id)
|
||||||
);
|
);
|
||||||
|
`,
|
||||||
|
`ALTER TABLE filter
|
||||||
|
ADD COLUMN match_record_labels TEXT DEFAULT '';
|
||||||
|
|
||||||
|
ALTER TABLE filter
|
||||||
|
ADD COLUMN except_record_labels TEXT DEFAULT '';
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
|
|
|
@ -144,6 +144,8 @@ CREATE TABLE filter
|
||||||
except_categories TEXT,
|
except_categories TEXT,
|
||||||
match_uploaders TEXT,
|
match_uploaders TEXT,
|
||||||
except_uploaders TEXT,
|
except_uploaders TEXT,
|
||||||
|
match_record_labels TEXT,
|
||||||
|
except_record_labels TEXT,
|
||||||
match_language TEXT [] DEFAULT '{}',
|
match_language TEXT [] DEFAULT '{}',
|
||||||
except_language TEXT [] DEFAULT '{}',
|
except_language TEXT [] DEFAULT '{}',
|
||||||
tags TEXT,
|
tags TEXT,
|
||||||
|
@ -1708,5 +1710,11 @@ CREATE TABLE list_filter
|
||||||
FOREIGN KEY (filter_id) REFERENCES filter(id) ON DELETE CASCADE,
|
FOREIGN KEY (filter_id) REFERENCES filter(id) ON DELETE CASCADE,
|
||||||
PRIMARY KEY (list_id, filter_id)
|
PRIMARY KEY (list_id, filter_id)
|
||||||
);
|
);
|
||||||
|
`,
|
||||||
|
`ALTER TABLE filter
|
||||||
|
ADD COLUMN match_record_labels TEXT DEFAULT '';
|
||||||
|
|
||||||
|
ALTER TABLE filter
|
||||||
|
ADD COLUMN except_record_labels TEXT DEFAULT '';
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
|
|
|
@ -144,6 +144,8 @@ type Filter struct {
|
||||||
ExceptCategories string `json:"except_categories,omitempty"`
|
ExceptCategories string `json:"except_categories,omitempty"`
|
||||||
MatchUploaders string `json:"match_uploaders,omitempty"`
|
MatchUploaders string `json:"match_uploaders,omitempty"`
|
||||||
ExceptUploaders string `json:"except_uploaders,omitempty"`
|
ExceptUploaders string `json:"except_uploaders,omitempty"`
|
||||||
|
MatchRecordLabels string `json:"match_record_labels,omitempty"`
|
||||||
|
ExceptRecordLabels string `json:"except_record_labels,omitempty"`
|
||||||
MatchLanguage []string `json:"match_language,omitempty"`
|
MatchLanguage []string `json:"match_language,omitempty"`
|
||||||
ExceptLanguage []string `json:"except_language,omitempty"`
|
ExceptLanguage []string `json:"except_language,omitempty"`
|
||||||
Tags string `json:"tags,omitempty"`
|
Tags string `json:"tags,omitempty"`
|
||||||
|
@ -274,6 +276,8 @@ type FilterUpdate struct {
|
||||||
ExceptCategories *string `json:"except_categories,omitempty"`
|
ExceptCategories *string `json:"except_categories,omitempty"`
|
||||||
MatchUploaders *string `json:"match_uploaders,omitempty"`
|
MatchUploaders *string `json:"match_uploaders,omitempty"`
|
||||||
ExceptUploaders *string `json:"except_uploaders,omitempty"`
|
ExceptUploaders *string `json:"except_uploaders,omitempty"`
|
||||||
|
MatchRecordLabels *string `json:"match_record_labels,omitempty"`
|
||||||
|
ExceptRecordLabels *string `json:"except_record_labels,omitempty"`
|
||||||
MatchLanguage *[]string `json:"match_language,omitempty"`
|
MatchLanguage *[]string `json:"match_language,omitempty"`
|
||||||
ExceptLanguage *[]string `json:"except_language,omitempty"`
|
ExceptLanguage *[]string `json:"except_language,omitempty"`
|
||||||
Tags *string `json:"tags,omitempty"`
|
Tags *string `json:"tags,omitempty"`
|
||||||
|
@ -364,6 +368,9 @@ func (f *Filter) Sanitize() error {
|
||||||
f.Artists = sanitize.FilterString(f.Artists)
|
f.Artists = sanitize.FilterString(f.Artists)
|
||||||
f.Albums = sanitize.FilterString(f.Albums)
|
f.Albums = sanitize.FilterString(f.Albums)
|
||||||
|
|
||||||
|
f.MatchRecordLabels = sanitize.FilterString(f.MatchRecordLabels)
|
||||||
|
f.ExceptRecordLabels = sanitize.FilterString(f.ExceptRecordLabels)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -465,6 +472,10 @@ func (f *Filter) CheckFilter(r *Release) (*RejectionReasons, bool) {
|
||||||
// f.checkUploader sets the rejections
|
// f.checkUploader sets the rejections
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (f.MatchRecordLabels != "" || f.ExceptRecordLabels != "") && !f.checkRecordLabel(r) {
|
||||||
|
// f.checkRecordLabel sets the rejections
|
||||||
|
}
|
||||||
|
|
||||||
if len(f.MatchLanguage) > 0 && !sliceContainsSlice(r.Language, f.MatchLanguage) {
|
if len(f.MatchLanguage) > 0 && !sliceContainsSlice(r.Language, f.MatchLanguage) {
|
||||||
f.RejectReasons.Add("match language", r.Language, f.MatchLanguage)
|
f.RejectReasons.Add("match language", r.Language, f.MatchLanguage)
|
||||||
}
|
}
|
||||||
|
@ -749,6 +760,26 @@ func (f *Filter) checkUploader(r *Release) bool {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// checkRecordLabel checks if the record label is within the given list.
|
||||||
|
// if the haystack is not empty but the record label is, then a further
|
||||||
|
// investigation is needed
|
||||||
|
func (f *Filter) checkRecordLabel(r *Release) bool {
|
||||||
|
if r.RecordLabel == "" && (r.Indexer.Identifier == "redacted" || r.Indexer.Identifier == "ops") {
|
||||||
|
r.AdditionalRecordLabelCheckRequired = true
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.MatchRecordLabels != "" && !contains(r.RecordLabel, f.MatchRecordLabels) {
|
||||||
|
f.RejectReasons.Add("match record labels", r.RecordLabel, f.MatchRecordLabels)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.ExceptRecordLabels != "" && contains(r.RecordLabel, f.ExceptRecordLabels) {
|
||||||
|
f.RejectReasons.Add("except record labels", r.RecordLabel, f.ExceptRecordLabels)
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
// IsPerfectFLAC Perfect is "CD FLAC Cue Log 100% Lossless or 24bit Lossless"
|
// IsPerfectFLAC Perfect is "CD FLAC Cue Log 100% Lossless or 24bit Lossless"
|
||||||
func (f *Filter) IsPerfectFLAC(r *Release) ([]string, bool) {
|
func (f *Filter) IsPerfectFLAC(r *Release) ([]string, bool) {
|
||||||
rejections := []string{}
|
rejections := []string{}
|
||||||
|
@ -1200,6 +1231,20 @@ func (f *Filter) CheckUploader(uploader string) (bool, error) {
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (f *Filter) CheckRecordLabel(recordLabel string) (bool, error) {
|
||||||
|
if f.MatchRecordLabels != "" && !contains(recordLabel, f.MatchRecordLabels) {
|
||||||
|
f.RejectReasons.Add("match record label", recordLabel, f.MatchRecordLabels)
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.ExceptRecordLabels != "" && contains(recordLabel, f.ExceptRecordLabels) {
|
||||||
|
f.RejectReasons.Add("except record label", recordLabel, f.ExceptRecordLabels)
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
// parsedSizeLimits parses filter bytes limits (expressed as a string) into a
|
// parsedSizeLimits parses filter bytes limits (expressed as a string) into a
|
||||||
// uint64 number of bytes. The bounds are returned as *uint64 number of bytes,
|
// uint64 number of bytes. The bounds are returned as *uint64 number of bytes,
|
||||||
// with "nil" representing "no limit". We break out filter size limit parsing
|
// with "nil" representing "no limit". We break out filter size limit parsing
|
||||||
|
|
|
@ -429,11 +429,12 @@ func (p *IndexerIRCParse) Parse(def *IndexerDefinition, vars map[string]string,
|
||||||
}
|
}
|
||||||
|
|
||||||
type TorrentBasic struct {
|
type TorrentBasic struct {
|
||||||
Id string `json:"Id"`
|
Id string `json:"Id"`
|
||||||
TorrentId string `json:"TorrentId,omitempty"`
|
TorrentId string `json:"TorrentId,omitempty"`
|
||||||
InfoHash string `json:"InfoHash"`
|
InfoHash string `json:"InfoHash"`
|
||||||
Size string `json:"Size"`
|
Size string `json:"Size"`
|
||||||
Uploader string `json:"Uploader"`
|
Uploader string `json:"Uploader"`
|
||||||
|
RecordLabel string `json:"RecordLabel"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t TorrentBasic) ReleaseSizeBytes() uint64 {
|
func (t TorrentBasic) ReleaseSizeBytes() uint64 {
|
||||||
|
|
|
@ -32,7 +32,7 @@ type Macro struct {
|
||||||
CurrentMonth int
|
CurrentMonth int
|
||||||
CurrentSecond int
|
CurrentSecond int
|
||||||
CurrentYear int
|
CurrentYear int
|
||||||
Description string
|
Description string
|
||||||
DownloadUrl string
|
DownloadUrl string
|
||||||
Episode int
|
Episode int
|
||||||
FilterID int
|
FilterID int
|
||||||
|
@ -78,6 +78,7 @@ type Macro struct {
|
||||||
TorrentTmpFile string
|
TorrentTmpFile string
|
||||||
Type string
|
Type string
|
||||||
Uploader string
|
Uploader string
|
||||||
|
RecordLabel string
|
||||||
Website string
|
Website string
|
||||||
Year int
|
Year int
|
||||||
Month int
|
Month int
|
||||||
|
@ -150,6 +151,7 @@ func NewMacro(release Release) Macro {
|
||||||
TorrentTmpFile: release.TorrentTmpFile,
|
TorrentTmpFile: release.TorrentTmpFile,
|
||||||
Type: release.Type,
|
Type: release.Type,
|
||||||
Uploader: release.Uploader,
|
Uploader: release.Uploader,
|
||||||
|
RecordLabel: release.RecordLabel,
|
||||||
Website: release.Website,
|
Website: release.Website,
|
||||||
Year: release.Year,
|
Year: release.Year,
|
||||||
Month: release.Month,
|
Month: release.Month,
|
||||||
|
|
|
@ -46,71 +46,73 @@ type ReleaseRepo interface {
|
||||||
}
|
}
|
||||||
|
|
||||||
type Release struct {
|
type Release struct {
|
||||||
ID int64 `json:"id"`
|
ID int64 `json:"id"`
|
||||||
FilterStatus ReleaseFilterStatus `json:"filter_status"`
|
FilterStatus ReleaseFilterStatus `json:"filter_status"`
|
||||||
Rejections []string `json:"rejections"`
|
Rejections []string `json:"rejections"`
|
||||||
Indexer IndexerMinimal `json:"indexer"`
|
Indexer IndexerMinimal `json:"indexer"`
|
||||||
FilterName string `json:"filter"`
|
FilterName string `json:"filter"`
|
||||||
Protocol ReleaseProtocol `json:"protocol"`
|
Protocol ReleaseProtocol `json:"protocol"`
|
||||||
Implementation ReleaseImplementation `json:"implementation"` // irc, rss, api
|
Implementation ReleaseImplementation `json:"implementation"` // irc, rss, api
|
||||||
Timestamp time.Time `json:"timestamp"`
|
Timestamp time.Time `json:"timestamp"`
|
||||||
AnnounceType AnnounceType `json:"announce_type"`
|
AnnounceType AnnounceType `json:"announce_type"`
|
||||||
InfoURL string `json:"info_url"`
|
InfoURL string `json:"info_url"`
|
||||||
DownloadURL string `json:"download_url"`
|
DownloadURL string `json:"download_url"`
|
||||||
MagnetURI string `json:"-"`
|
MagnetURI string `json:"-"`
|
||||||
GroupID string `json:"group_id"`
|
GroupID string `json:"group_id"`
|
||||||
TorrentID string `json:"torrent_id"`
|
TorrentID string `json:"torrent_id"`
|
||||||
TorrentTmpFile string `json:"-"`
|
TorrentTmpFile string `json:"-"`
|
||||||
TorrentDataRawBytes []byte `json:"-"`
|
TorrentDataRawBytes []byte `json:"-"`
|
||||||
TorrentHash string `json:"-"`
|
TorrentHash string `json:"-"`
|
||||||
TorrentName string `json:"name"` // full release name
|
TorrentName string `json:"name"` // full release name
|
||||||
Size uint64 `json:"size"`
|
Size uint64 `json:"size"`
|
||||||
Title string `json:"title"` // Parsed title
|
Title string `json:"title"` // Parsed title
|
||||||
Description string `json:"-"`
|
Description string `json:"-"`
|
||||||
Category string `json:"category"`
|
Category string `json:"category"`
|
||||||
Categories []string `json:"categories,omitempty"`
|
Categories []string `json:"categories,omitempty"`
|
||||||
Season int `json:"season"`
|
Season int `json:"season"`
|
||||||
Episode int `json:"episode"`
|
Episode int `json:"episode"`
|
||||||
Year int `json:"year"`
|
Year int `json:"year"`
|
||||||
Month int `json:"month"`
|
Month int `json:"month"`
|
||||||
Day int `json:"day"`
|
Day int `json:"day"`
|
||||||
Resolution string `json:"resolution"`
|
Resolution string `json:"resolution"`
|
||||||
Source string `json:"source"`
|
Source string `json:"source"`
|
||||||
Codec []string `json:"codec"`
|
Codec []string `json:"codec"`
|
||||||
Container string `json:"container"`
|
Container string `json:"container"`
|
||||||
HDR []string `json:"hdr"`
|
HDR []string `json:"hdr"`
|
||||||
Audio []string `json:"-"`
|
Audio []string `json:"-"`
|
||||||
AudioChannels string `json:"-"`
|
AudioChannels string `json:"-"`
|
||||||
AudioFormat string `json:"-"`
|
AudioFormat string `json:"-"`
|
||||||
Bitrate string `json:"-"`
|
Bitrate string `json:"-"`
|
||||||
Group string `json:"group"`
|
Group string `json:"group"`
|
||||||
Region string `json:"-"`
|
Region string `json:"-"`
|
||||||
Language []string `json:"-"`
|
Language []string `json:"-"`
|
||||||
Proper bool `json:"proper"`
|
Proper bool `json:"proper"`
|
||||||
Repack bool `json:"repack"`
|
Repack bool `json:"repack"`
|
||||||
Website string `json:"website"`
|
Website string `json:"website"`
|
||||||
Artists string `json:"-"`
|
Artists string `json:"-"`
|
||||||
Type string `json:"type"` // Album,Single,EP
|
Type string `json:"type"` // Album,Single,EP
|
||||||
LogScore int `json:"-"`
|
LogScore int `json:"-"`
|
||||||
HasCue bool `json:"-"`
|
HasCue bool `json:"-"`
|
||||||
HasLog bool `json:"-"`
|
HasLog bool `json:"-"`
|
||||||
Origin string `json:"origin"` // P2P, Internal
|
Origin string `json:"origin"` // P2P, Internal
|
||||||
Tags []string `json:"-"`
|
Tags []string `json:"-"`
|
||||||
ReleaseTags string `json:"-"`
|
ReleaseTags string `json:"-"`
|
||||||
Freeleech bool `json:"-"`
|
Freeleech bool `json:"-"`
|
||||||
FreeleechPercent int `json:"-"`
|
FreeleechPercent int `json:"-"`
|
||||||
Bonus []string `json:"-"`
|
Bonus []string `json:"-"`
|
||||||
Uploader string `json:"uploader"`
|
Uploader string `json:"uploader"`
|
||||||
PreTime string `json:"pre_time"`
|
RecordLabel string `json:"record_label"`
|
||||||
Other []string `json:"-"`
|
PreTime string `json:"pre_time"`
|
||||||
RawCookie string `json:"-"`
|
Other []string `json:"-"`
|
||||||
Seeders int `json:"-"`
|
RawCookie string `json:"-"`
|
||||||
Leechers int `json:"-"`
|
Seeders int `json:"-"`
|
||||||
AdditionalSizeCheckRequired bool `json:"-"`
|
Leechers int `json:"-"`
|
||||||
AdditionalUploaderCheckRequired bool `json:"-"`
|
AdditionalSizeCheckRequired bool `json:"-"`
|
||||||
FilterID int `json:"-"`
|
AdditionalUploaderCheckRequired bool `json:"-"`
|
||||||
Filter *Filter `json:"-"`
|
AdditionalRecordLabelCheckRequired bool `json:"-"`
|
||||||
ActionStatus []ReleaseActionStatus `json:"action_status"`
|
FilterID int `json:"-"`
|
||||||
|
Filter *Filter `json:"-"`
|
||||||
|
ActionStatus []ReleaseActionStatus `json:"action_status"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Release) Raw(s string) rls.Release {
|
func (r *Release) Raw(s string) rls.Release {
|
||||||
|
@ -846,6 +848,10 @@ func (r *Release) MapVars(def *IndexerDefinition, varMap map[string]string) erro
|
||||||
r.Uploader = uploader
|
r.Uploader = uploader
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if recordLabel, err := getStringMapValue(varMap, "recordLabel"); err == nil {
|
||||||
|
r.RecordLabel = recordLabel
|
||||||
|
}
|
||||||
|
|
||||||
if torrentSize, err := getStringMapValue(varMap, "torrentSize"); err == nil {
|
if torrentSize, err := getStringMapValue(varMap, "torrentSize"); err == nil {
|
||||||
// Some indexers like BTFiles announces size with comma. Humanize does not handle that well and strips it.
|
// Some indexers like BTFiles announces size with comma. Humanize does not handle that well and strips it.
|
||||||
torrentSize = strings.Replace(torrentSize, ",", ".", 1)
|
torrentSize = strings.Replace(torrentSize, ",", ".", 1)
|
||||||
|
|
|
@ -43,6 +43,7 @@ type Service interface {
|
||||||
Delete(ctx context.Context, filterID int) error
|
Delete(ctx context.Context, filterID int) error
|
||||||
AdditionalSizeCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error)
|
AdditionalSizeCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error)
|
||||||
AdditionalUploaderCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error)
|
AdditionalUploaderCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error)
|
||||||
|
AdditionalRecordLabelCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (bool, error)
|
||||||
CheckSmartEpisodeCanDownload(ctx context.Context, params *domain.SmartEpisodeParams) (bool, error)
|
CheckSmartEpisodeCanDownload(ctx context.Context, params *domain.SmartEpisodeParams) (bool, error)
|
||||||
GetDownloadsByFilterId(ctx context.Context, filterID int) (*domain.FilterDownloads, error)
|
GetDownloadsByFilterId(ctx context.Context, filterID int) (*domain.FilterDownloads, error)
|
||||||
}
|
}
|
||||||
|
@ -462,6 +463,21 @@ func (s *service) CheckFilter(ctx context.Context, f *domain.Filter, release *do
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if release.AdditionalRecordLabelCheckRequired {
|
||||||
|
l.Debug().Msgf("(%s) additional record label check required", f.Name)
|
||||||
|
|
||||||
|
ok, err := s.AdditionalRecordLabelCheck(ctx, f, release)
|
||||||
|
if err != nil {
|
||||||
|
l.Error().Err(err).Msgf("(%s) additional record label check error", f.Name)
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !ok {
|
||||||
|
l.Trace().Msgf("(%s) additional record label check not matching what filter wanted", f.Name)
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// run external filters
|
// run external filters
|
||||||
if f.External != nil {
|
if f.External != nil {
|
||||||
externalOk, err := s.RunExternalFilters(ctx, f, f.External, release)
|
externalOk, err := s.RunExternalFilters(ctx, f, f.External, release)
|
||||||
|
@ -503,7 +519,7 @@ func (s *service) AdditionalSizeCheck(ctx context.Context, f *domain.Filter, rel
|
||||||
|
|
||||||
switch release.Indexer.Identifier {
|
switch release.Indexer.Identifier {
|
||||||
case "btn", "ggn", "redacted", "ops", "mock":
|
case "btn", "ggn", "redacted", "ops", "mock":
|
||||||
if (release.Size == 0 && release.AdditionalSizeCheckRequired) || (release.Uploader == "" && release.AdditionalUploaderCheckRequired) {
|
if (release.Size == 0 && release.AdditionalSizeCheckRequired) || (release.Uploader == "" && release.AdditionalUploaderCheckRequired) || (release.RecordLabel == "" && release.AdditionalRecordLabelCheckRequired) {
|
||||||
l.Trace().Msgf("(%s) preparing to check size via api", f.Name)
|
l.Trace().Msgf("(%s) preparing to check size via api", f.Name)
|
||||||
|
|
||||||
torrentInfo, err := s.apiService.GetTorrentByID(ctx, release.Indexer.Identifier, release.TorrentID)
|
torrentInfo, err := s.apiService.GetTorrentByID(ctx, release.Indexer.Identifier, release.TorrentID)
|
||||||
|
@ -522,6 +538,10 @@ func (s *service) AdditionalSizeCheck(ctx context.Context, f *domain.Filter, rel
|
||||||
if release.Uploader == "" {
|
if release.Uploader == "" {
|
||||||
release.Uploader = torrentInfo.Uploader
|
release.Uploader = torrentInfo.Uploader
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if release.RecordLabel == "" {
|
||||||
|
release.RecordLabel = torrentInfo.RecordLabel
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
|
@ -547,7 +567,7 @@ func (s *service) AdditionalSizeCheck(ctx context.Context, f *domain.Filter, rel
|
||||||
|
|
||||||
if !sizeOk {
|
if !sizeOk {
|
||||||
l.Debug().Msgf("(%s) filter did not match after additional size check, trying next", f.Name)
|
l.Debug().Msgf("(%s) filter did not match after additional size check, trying next", f.Name)
|
||||||
return false, err
|
return false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return true, nil
|
return true, nil
|
||||||
|
@ -575,13 +595,13 @@ func (s *service) AdditionalUploaderCheck(ctx context.Context, f *domain.Filter,
|
||||||
|
|
||||||
if !uploaderOk {
|
if !uploaderOk {
|
||||||
l.Debug().Msgf("(%s) filter did not match after additional uploaders check, trying next", f.Name)
|
l.Debug().Msgf("(%s) filter did not match after additional uploaders check, trying next", f.Name)
|
||||||
return false, err
|
return false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
l.Debug().Msgf("(%s) additional api size check required", f.Name)
|
l.Debug().Msgf("(%s) additional api uploader check required", f.Name)
|
||||||
|
|
||||||
switch release.Indexer.Identifier {
|
switch release.Indexer.Identifier {
|
||||||
case "redacted", "ops", "mock":
|
case "redacted", "ops", "mock":
|
||||||
|
@ -598,9 +618,10 @@ func (s *service) AdditionalUploaderCheck(ctx context.Context, f *domain.Filter,
|
||||||
torrentSize := torrentInfo.ReleaseSizeBytes()
|
torrentSize := torrentInfo.ReleaseSizeBytes()
|
||||||
if release.Size == 0 && torrentSize > 0 {
|
if release.Size == 0 && torrentSize > 0 {
|
||||||
release.Size = torrentSize
|
release.Size = torrentSize
|
||||||
|
}
|
||||||
|
|
||||||
// reset AdditionalSizeCheckRequired to not re-trigger check
|
if release.RecordLabel == "" {
|
||||||
release.AdditionalSizeCheckRequired = false
|
release.RecordLabel = torrentInfo.RecordLabel
|
||||||
}
|
}
|
||||||
|
|
||||||
if release.Uploader == "" {
|
if release.Uploader == "" {
|
||||||
|
@ -622,9 +643,88 @@ func (s *service) AdditionalUploaderCheck(ctx context.Context, f *domain.Filter,
|
||||||
|
|
||||||
if !uploaderOk {
|
if !uploaderOk {
|
||||||
l.Debug().Msgf("(%s) filter did not match after additional uploaders check, trying next", f.Name)
|
l.Debug().Msgf("(%s) filter did not match after additional uploaders check, trying next", f.Name)
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *service) AdditionalRecordLabelCheck(ctx context.Context, f *domain.Filter, release *domain.Release) (ok bool, err error) {
|
||||||
|
defer func() {
|
||||||
|
// try recover panic if anything went wrong with API or size checks
|
||||||
|
errors.RecoverPanic(recover(), &err)
|
||||||
|
if err != nil {
|
||||||
|
ok = false
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// do additional check against indexer api
|
||||||
|
l := s.log.With().Str("method", "AdditionalRecordLabelCheck").Logger()
|
||||||
|
|
||||||
|
// if record label was fetched before during size check or uploader check we check it and return early
|
||||||
|
if release.RecordLabel != "" {
|
||||||
|
recordLabelOk, err := f.CheckRecordLabel(release.RecordLabel)
|
||||||
|
if err != nil {
|
||||||
|
l.Error().Err(err).Msgf("(%s) error comparing release and record label", f.Name)
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// reset AdditionalRecordLabelCheckRequired to not re-trigger check
|
||||||
|
release.AdditionalRecordLabelCheckRequired = false
|
||||||
|
|
||||||
|
if !recordLabelOk {
|
||||||
|
l.Debug().Msgf("(%s) filter did not match after additional record label check, trying next", f.Name)
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
l.Debug().Msgf("(%s) additional api record label check required", f.Name)
|
||||||
|
|
||||||
|
switch release.Indexer.Identifier {
|
||||||
|
case "redacted", "ops", "mock":
|
||||||
|
l.Trace().Msgf("(%s) preparing to check via api", f.Name)
|
||||||
|
|
||||||
|
torrentInfo, err := s.apiService.GetTorrentByID(ctx, release.Indexer.Identifier, release.TorrentID)
|
||||||
|
if err != nil || torrentInfo == nil {
|
||||||
|
l.Error().Err(err).Msgf("(%s) could not get torrent info from api: '%s' from: %s", f.Name, release.TorrentID, release.Indexer.Identifier)
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
l.Debug().Msgf("(%s) got torrent info from api: %+v", f.Name, torrentInfo)
|
||||||
|
|
||||||
|
torrentSize := torrentInfo.ReleaseSizeBytes()
|
||||||
|
if release.Size == 0 && torrentSize > 0 {
|
||||||
|
release.Size = torrentSize
|
||||||
|
}
|
||||||
|
|
||||||
|
if release.Uploader == "" {
|
||||||
|
release.Uploader = torrentInfo.Uploader
|
||||||
|
}
|
||||||
|
|
||||||
|
if release.RecordLabel == "" {
|
||||||
|
release.RecordLabel = torrentInfo.RecordLabel
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
return false, errors.New("additional record label check not supported for this indexer: %s", release.Indexer.Identifier)
|
||||||
|
}
|
||||||
|
|
||||||
|
recordLabelOk, err := f.CheckRecordLabel(release.RecordLabel)
|
||||||
|
if err != nil {
|
||||||
|
l.Error().Err(err).Msgf("(%s) error comparing release and record label", f.Name)
|
||||||
return false, err
|
return false, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// reset AdditionalRecordLabelCheckRequired to not re-trigger check
|
||||||
|
release.AdditionalRecordLabelCheckRequired = false
|
||||||
|
|
||||||
|
if !recordLabelOk {
|
||||||
|
l.Debug().Msgf("(%s) filter did not match after additional record label check, trying next", f.Name)
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
return true, nil
|
return true, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -105,32 +105,32 @@ type Group struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type Torrent struct {
|
type Torrent struct {
|
||||||
Id int `json:"id"`
|
Id int `json:"id"`
|
||||||
InfoHash string `json:"infoHash"`
|
InfoHash string `json:"infoHash"`
|
||||||
Media string `json:"media"`
|
Media string `json:"media"`
|
||||||
Format string `json:"format"`
|
Format string `json:"format"`
|
||||||
Encoding string `json:"encoding"`
|
Encoding string `json:"encoding"`
|
||||||
Remastered bool `json:"remastered"`
|
Remastered bool `json:"remastered"`
|
||||||
RemasterYear int `json:"remasterYear"`
|
RemasterYear int `json:"remasterYear"`
|
||||||
RemasterTitle string `json:"remasterTitle"`
|
RemasterTitle string `json:"remasterTitle"`
|
||||||
RemasterRecordLabel string `json:"remasterRecordLabel"`
|
RecordLabel string `json:"remasterRecordLabel"` // remasterRecordLabel is the record label of the release, which should be used instead of the record label of the group
|
||||||
RemasterCatalogueNumber string `json:"remasterCatalogueNumber"`
|
CatalogueNumber string `json:"remasterCatalogueNumber"` // remasterCatalogueNumber is the catalogue number of the release, which should be used instead of the catalogue number of the group
|
||||||
Scene bool `json:"scene"`
|
Scene bool `json:"scene"`
|
||||||
HasLog bool `json:"hasLog"`
|
HasLog bool `json:"hasLog"`
|
||||||
HasCue bool `json:"hasCue"`
|
HasCue bool `json:"hasCue"`
|
||||||
LogScore int `json:"logScore"`
|
LogScore int `json:"logScore"`
|
||||||
FileCount int `json:"fileCount"`
|
FileCount int `json:"fileCount"`
|
||||||
Size int `json:"size"`
|
Size int `json:"size"`
|
||||||
Seeders int `json:"seeders"`
|
Seeders int `json:"seeders"`
|
||||||
Leechers int `json:"leechers"`
|
Leechers int `json:"leechers"`
|
||||||
Snatched int `json:"snatched"`
|
Snatched int `json:"snatched"`
|
||||||
FreeTorrent string `json:"freeTorrent"`
|
FreeTorrent string `json:"freeTorrent"`
|
||||||
Time string `json:"time"`
|
Time string `json:"time"`
|
||||||
Description string `json:"description"`
|
Description string `json:"description"`
|
||||||
FileList string `json:"fileList"`
|
FileList string `json:"fileList"`
|
||||||
FilePath string `json:"filePath"`
|
FilePath string `json:"filePath"`
|
||||||
UserId int `json:"userId"`
|
UserId int `json:"userId"`
|
||||||
Username string `json:"username"`
|
Username string `json:"username"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type GetIndexResponse struct {
|
type GetIndexResponse struct {
|
||||||
|
@ -243,10 +243,11 @@ func (c *Client) GetTorrentByID(ctx context.Context, torrentID string) (*domain.
|
||||||
}
|
}
|
||||||
|
|
||||||
res := &domain.TorrentBasic{
|
res := &domain.TorrentBasic{
|
||||||
Id: strconv.Itoa(response.Response.Torrent.Id),
|
Id: strconv.Itoa(response.Response.Torrent.Id),
|
||||||
InfoHash: response.Response.Torrent.InfoHash,
|
InfoHash: response.Response.Torrent.InfoHash,
|
||||||
Size: strconv.Itoa(response.Response.Torrent.Size),
|
Size: strconv.Itoa(response.Response.Torrent.Size),
|
||||||
Uploader: response.Response.Torrent.Username,
|
Uploader: response.Response.Torrent.Username,
|
||||||
|
RecordLabel: response.Response.Torrent.RecordLabel,
|
||||||
}
|
}
|
||||||
|
|
||||||
return res, nil
|
return res, nil
|
||||||
|
|
|
@ -72,10 +72,11 @@ func TestOrpheusClient_GetTorrentByID(t *testing.T) {
|
||||||
},
|
},
|
||||||
args: args{torrentID: "2156788"},
|
args: args{torrentID: "2156788"},
|
||||||
want: &domain.TorrentBasic{
|
want: &domain.TorrentBasic{
|
||||||
Id: "2156788",
|
Id: "2156788",
|
||||||
InfoHash: "",
|
InfoHash: "",
|
||||||
Size: "255299244",
|
Size: "255299244",
|
||||||
Uploader: "uploader",
|
Uploader: "uploader",
|
||||||
|
RecordLabel: "FAJo Music",
|
||||||
},
|
},
|
||||||
wantErr: "",
|
wantErr: "",
|
||||||
},
|
},
|
||||||
|
|
|
@ -105,34 +105,34 @@ type Group struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type Torrent struct {
|
type Torrent struct {
|
||||||
Id int `json:"id"`
|
Id int `json:"id"`
|
||||||
InfoHash string `json:"infoHash"`
|
InfoHash string `json:"infoHash"`
|
||||||
Media string `json:"media"`
|
Media string `json:"media"`
|
||||||
Format string `json:"format"`
|
Format string `json:"format"`
|
||||||
Encoding string `json:"encoding"`
|
Encoding string `json:"encoding"`
|
||||||
Remastered bool `json:"remastered"`
|
Remastered bool `json:"remastered"`
|
||||||
RemasterYear int `json:"remasterYear"`
|
RemasterYear int `json:"remasterYear"`
|
||||||
RemasterTitle string `json:"remasterTitle"`
|
RemasterTitle string `json:"remasterTitle"`
|
||||||
RemasterRecordLabel string `json:"remasterRecordLabel"`
|
RecordLabel string `json:"remasterRecordLabel"` // remasterRecordLabel is the record label of the release, which should be used instead of the record label of the group
|
||||||
RemasterCatalogueNumber string `json:"remasterCatalogueNumber"`
|
CatalogueNumber string `json:"remasterCatalogueNumber"` // remasterCatalogueNumber is the catalogue number of the release, which should be used instead of the catalogue number of the group
|
||||||
Scene bool `json:"scene"`
|
Scene bool `json:"scene"`
|
||||||
HasLog bool `json:"hasLog"`
|
HasLog bool `json:"hasLog"`
|
||||||
HasCue bool `json:"hasCue"`
|
HasCue bool `json:"hasCue"`
|
||||||
LogScore int `json:"logScore"`
|
LogScore int `json:"logScore"`
|
||||||
FileCount int `json:"fileCount"`
|
FileCount int `json:"fileCount"`
|
||||||
Size int `json:"size"`
|
Size int `json:"size"`
|
||||||
Seeders int `json:"seeders"`
|
Seeders int `json:"seeders"`
|
||||||
Leechers int `json:"leechers"`
|
Leechers int `json:"leechers"`
|
||||||
Snatched int `json:"snatched"`
|
Snatched int `json:"snatched"`
|
||||||
FreeTorrent bool `json:"freeTorrent"`
|
FreeTorrent bool `json:"freeTorrent"`
|
||||||
IsNeutralleech bool `json:"isNeutralleech"`
|
IsNeutralleech bool `json:"isNeutralleech"`
|
||||||
IsFreeload bool `json:"isFreeload"`
|
IsFreeload bool `json:"isFreeload"`
|
||||||
Time string `json:"time"`
|
Time string `json:"time"`
|
||||||
Description string `json:"description"`
|
Description string `json:"description"`
|
||||||
FileList string `json:"fileList"`
|
FileList string `json:"fileList"`
|
||||||
FilePath string `json:"filePath"`
|
FilePath string `json:"filePath"`
|
||||||
UserId int `json:"userId"`
|
UserId int `json:"userId"`
|
||||||
Username string `json:"username"`
|
Username string `json:"username"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type IndexResponse struct {
|
type IndexResponse struct {
|
||||||
|
@ -231,10 +231,11 @@ func (c *Client) GetTorrentByID(ctx context.Context, torrentID string) (*domain.
|
||||||
}
|
}
|
||||||
|
|
||||||
return &domain.TorrentBasic{
|
return &domain.TorrentBasic{
|
||||||
Id: strconv.Itoa(response.Response.Torrent.Id),
|
Id: strconv.Itoa(response.Response.Torrent.Id),
|
||||||
InfoHash: response.Response.Torrent.InfoHash,
|
InfoHash: response.Response.Torrent.InfoHash,
|
||||||
Size: strconv.Itoa(response.Response.Torrent.Size),
|
Size: strconv.Itoa(response.Response.Torrent.Size),
|
||||||
Uploader: response.Response.Torrent.Username,
|
Uploader: response.Response.Torrent.Username,
|
||||||
|
RecordLabel: response.Response.Torrent.RecordLabel,
|
||||||
}, nil
|
}, nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -72,10 +72,11 @@ func TestREDClient_GetTorrentByID(t *testing.T) {
|
||||||
},
|
},
|
||||||
args: args{torrentID: "29991962"},
|
args: args{torrentID: "29991962"},
|
||||||
want: &domain.TorrentBasic{
|
want: &domain.TorrentBasic{
|
||||||
Id: "29991962",
|
Id: "29991962",
|
||||||
InfoHash: "B2BABD3A361EAFC6C4E9142C422DF7DDF5D7E163",
|
InfoHash: "B2BABD3A361EAFC6C4E9142C422DF7DDF5D7E163",
|
||||||
Size: "527749302",
|
Size: "527749302",
|
||||||
Uploader: "Uploader",
|
Uploader: "Uploader",
|
||||||
|
RecordLabel: "FAJo Music",
|
||||||
},
|
},
|
||||||
wantErr: "",
|
wantErr: "",
|
||||||
},
|
},
|
||||||
|
|
2
pkg/red/testdata/get_torrent_by_id.json
vendored
2
pkg/red/testdata/get_torrent_by_id.json
vendored
|
@ -52,7 +52,7 @@
|
||||||
"remastered": false,
|
"remastered": false,
|
||||||
"remasterYear": 0,
|
"remasterYear": 0,
|
||||||
"remasterTitle": "",
|
"remasterTitle": "",
|
||||||
"remasterRecordLabel": "",
|
"remasterRecordLabel": "FAJo Music",
|
||||||
"remasterCatalogueNumber": "",
|
"remasterCatalogueNumber": "",
|
||||||
"scene": true,
|
"scene": true,
|
||||||
"hasLog": false,
|
"hasLog": false,
|
||||||
|
|
|
@ -431,6 +431,8 @@ export const FilterDetails = () => {
|
||||||
except_tags_match_logic: filter.except_tags_match_logic,
|
except_tags_match_logic: filter.except_tags_match_logic,
|
||||||
match_uploaders: filter.match_uploaders,
|
match_uploaders: filter.match_uploaders,
|
||||||
except_uploaders: filter.except_uploaders,
|
except_uploaders: filter.except_uploaders,
|
||||||
|
match_record_labels: filter.match_record_labels,
|
||||||
|
except_record_labels: filter.except_record_labels,
|
||||||
match_language: filter.match_language || [],
|
match_language: filter.match_language || [],
|
||||||
except_language: filter.except_language || [],
|
except_language: filter.except_language || [],
|
||||||
freeleech: filter.freeleech,
|
freeleech: filter.freeleech,
|
||||||
|
|
|
@ -43,6 +43,30 @@ export const Music = () => {
|
||||||
</div>
|
</div>
|
||||||
}
|
}
|
||||||
/>
|
/>
|
||||||
|
<TextAreaAutoResize
|
||||||
|
name="match_record_labels"
|
||||||
|
label="Match record labels"
|
||||||
|
columns={6}
|
||||||
|
placeholder="eg. Anjunabeats, Armada"
|
||||||
|
tooltip={
|
||||||
|
<div>
|
||||||
|
<p>Comma separated list of record labels to match. Only Orpheus and Redacted support this.</p>
|
||||||
|
<DocsLink href="https://autobrr.com/filters#music" />
|
||||||
|
</div>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<TextAreaAutoResize
|
||||||
|
name="except_record_labels"
|
||||||
|
label="Except record labels"
|
||||||
|
columns={6}
|
||||||
|
placeholder="eg. Anjunadeep, Armind"
|
||||||
|
tooltip={
|
||||||
|
<div>
|
||||||
|
<p>Comma separated list of record labels to ignore (takes priority over Match record labels). Only Orpheus and Redacted support this.</p>
|
||||||
|
<DocsLink href="https://autobrr.com/filters#music" />
|
||||||
|
</div>
|
||||||
|
}
|
||||||
|
/>
|
||||||
</FilterLayout>
|
</FilterLayout>
|
||||||
</FilterSection>
|
</FilterSection>
|
||||||
|
|
||||||
|
@ -168,8 +192,8 @@ export const Music = () => {
|
||||||
<div className="col-span-12 flex items-center justify-center">
|
<div className="col-span-12 flex items-center justify-center">
|
||||||
<span className="border-b border-gray-150 dark:border-gray-750 w-full" />
|
<span className="border-b border-gray-150 dark:border-gray-750 w-full" />
|
||||||
<span className="flex mx-2 shrink-0 text-lg font-bold uppercase tracking-wide text-gray-700 dark:text-gray-200">
|
<span className="flex mx-2 shrink-0 text-lg font-bold uppercase tracking-wide text-gray-700 dark:text-gray-200">
|
||||||
OR
|
OR
|
||||||
</span>
|
</span>
|
||||||
<span className="border-b border-gray-150 dark:border-gray-750 w-full" />
|
<span className="border-b border-gray-150 dark:border-gray-750 w-full" />
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -188,8 +212,8 @@ export const Music = () => {
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<span className="col-span-12 sm:col-span-6 self-center ml-0 text-center sm:text-left text-sm text-gray-500 dark:text-gray-425 underline underline-offset-2">
|
<span className="col-span-12 sm:col-span-6 self-center ml-0 text-center sm:text-left text-sm text-gray-500 dark:text-gray-425 underline underline-offset-2">
|
||||||
This is what you want in 90% of cases (instead of options above).
|
This is what you want in 90% of cases (instead of options above).
|
||||||
</span>
|
</span>
|
||||||
</FilterLayout>
|
</FilterLayout>
|
||||||
</FilterSection>
|
</FilterSection>
|
||||||
</FilterPage>
|
</FilterPage>
|
||||||
|
|
2
web/src/types/Filter.d.ts
vendored
2
web/src/types/Filter.d.ts
vendored
|
@ -62,6 +62,8 @@ interface Filter {
|
||||||
except_categories: string;
|
except_categories: string;
|
||||||
match_uploaders: string;
|
match_uploaders: string;
|
||||||
except_uploaders: string;
|
except_uploaders: string;
|
||||||
|
match_record_labels: string;
|
||||||
|
except_record_labels: string;
|
||||||
match_language: string[];
|
match_language: string[];
|
||||||
except_language: string[];
|
except_language: string[];
|
||||||
tags: string;
|
tags: string;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue