mirror of
https://github.com/idanoo/autobrr
synced 2025-07-23 00:39:13 +00:00
feat: improve release parsing and filtering (#257)
* feat(releases): improve parsing * refactor: extend filtering add more tests * feat: improve macro * feat: add and remove fields * feat: add freeleech percent to bonus * feat: filter by origin
This commit is contained in:
parent
bb62e724a1
commit
e6c151a029
26 changed files with 3210 additions and 3201 deletions
2
go.mod
2
go.mod
|
@ -52,6 +52,7 @@ require (
|
||||||
github.com/mattn/go-colorable v0.1.8 // indirect
|
github.com/mattn/go-colorable v0.1.8 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.14 // indirect
|
github.com/mattn/go-isatty v0.0.14 // indirect
|
||||||
github.com/mitchellh/mapstructure v1.4.2 // indirect
|
github.com/mitchellh/mapstructure v1.4.2 // indirect
|
||||||
|
github.com/moistari/rls v0.2.0 // indirect
|
||||||
github.com/nxadm/tail v1.4.6 // indirect
|
github.com/nxadm/tail v1.4.6 // indirect
|
||||||
github.com/onsi/ginkgo v1.14.2 // indirect
|
github.com/onsi/ginkgo v1.14.2 // indirect
|
||||||
github.com/onsi/gomega v1.10.1 // indirect
|
github.com/onsi/gomega v1.10.1 // indirect
|
||||||
|
@ -65,6 +66,7 @@ require (
|
||||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||||
github.com/subosito/gotenv v1.2.0 // indirect
|
github.com/subosito/gotenv v1.2.0 // indirect
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 // indirect
|
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 // indirect
|
||||||
|
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect
|
||||||
golang.org/x/sys v0.0.0-20220318055525-2edf467146b5 // indirect
|
golang.org/x/sys v0.0.0-20220318055525-2edf467146b5 // indirect
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 // indirect
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 // indirect
|
||||||
golang.org/x/text v0.3.7 // indirect
|
golang.org/x/text v0.3.7 // indirect
|
||||||
|
|
6
go.sum
6
go.sum
|
@ -299,6 +299,7 @@ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
||||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ=
|
github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ=
|
||||||
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
|
github.com/google/go-cmp v0.5.7 h1:81/ik6ipDQS2aGcBfIN5dHDB36BwrStyeAQquSYCV4o=
|
||||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||||
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
||||||
|
@ -471,6 +472,10 @@ github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJ
|
||||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||||
|
github.com/moistari/rls v0.1.20 h1:RsLbJqzev3O/BnSuuiAkSL1VcsymHW1i1z6/rB4Lz6c=
|
||||||
|
github.com/moistari/rls v0.1.20/go.mod h1:2oVpWLhkuUzu2xqRINGnvvlcAGizZGMfMv8UYnntUCg=
|
||||||
|
github.com/moistari/rls v0.2.0 h1:0+aJk8yNBKi/eAOhnQSz1pZoYcYYVnBUicl0WE75oAg=
|
||||||
|
github.com/moistari/rls v0.2.0/go.mod h1:2oVpWLhkuUzu2xqRINGnvvlcAGizZGMfMv8UYnntUCg=
|
||||||
github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg=
|
github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg=
|
||||||
github.com/mschoch/smat v0.2.0/go.mod h1:kc9mz7DoBKqDyiRL7VZN8KvXQMWeTaVnttLRXOlotKw=
|
github.com/mschoch/smat v0.2.0/go.mod h1:kc9mz7DoBKqDyiRL7VZN8KvXQMWeTaVnttLRXOlotKw=
|
||||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||||
|
@ -771,6 +776,7 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ
|
||||||
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=
|
||||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
|
|
@ -2,6 +2,7 @@ package action
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"strings"
|
||||||
"text/template"
|
"text/template"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -14,6 +15,7 @@ type Macro struct {
|
||||||
TorrentHash string
|
TorrentHash string
|
||||||
TorrentUrl string
|
TorrentUrl string
|
||||||
Indexer string
|
Indexer string
|
||||||
|
Title string
|
||||||
Resolution string
|
Resolution string
|
||||||
Source string
|
Source string
|
||||||
HDR string
|
HDR string
|
||||||
|
@ -36,9 +38,10 @@ func NewMacro(release domain.Release) Macro {
|
||||||
TorrentPathName: release.TorrentTmpFile,
|
TorrentPathName: release.TorrentTmpFile,
|
||||||
TorrentHash: release.TorrentHash,
|
TorrentHash: release.TorrentHash,
|
||||||
Indexer: release.Indexer,
|
Indexer: release.Indexer,
|
||||||
|
Title: release.Title,
|
||||||
Resolution: release.Resolution,
|
Resolution: release.Resolution,
|
||||||
Source: release.Source,
|
Source: release.Source,
|
||||||
HDR: release.HDR,
|
HDR: strings.Join(release.HDR, ", "),
|
||||||
Season: release.Season,
|
Season: release.Season,
|
||||||
Episode: release.Episode,
|
Episode: release.Episode,
|
||||||
Year: currentTime.Year(),
|
Year: currentTime.Year(),
|
||||||
|
|
|
@ -133,7 +133,7 @@ func TestMacros_Parse(t *testing.T) {
|
||||||
TorrentURL: "https://some.site/download/fakeid",
|
TorrentURL: "https://some.site/download/fakeid",
|
||||||
Indexer: "mock1",
|
Indexer: "mock1",
|
||||||
Resolution: "2160p",
|
Resolution: "2160p",
|
||||||
HDR: "DV",
|
HDR: []string{"DV"},
|
||||||
},
|
},
|
||||||
args: args{text: "movies-{{.Resolution}}{{ if .HDR }}-{{.HDR}}{{ end }}"},
|
args: args{text: "movies-{{.Resolution}}{{ if .HDR }}-{{.HDR}}{{ end }}"},
|
||||||
want: "movies-2160p-DV",
|
want: "movies-2160p-DV",
|
||||||
|
@ -146,7 +146,7 @@ func TestMacros_Parse(t *testing.T) {
|
||||||
TorrentURL: "https://some.site/download/fakeid",
|
TorrentURL: "https://some.site/download/fakeid",
|
||||||
Indexer: "mock1",
|
Indexer: "mock1",
|
||||||
Resolution: "2160p",
|
Resolution: "2160p",
|
||||||
HDR: "HDR",
|
HDR: []string{"HDR"},
|
||||||
},
|
},
|
||||||
args: args{text: "movies-{{.Resolution}}{{ if .HDR }}-{{.HDR}}{{ end }}"},
|
args: args{text: "movies-{{.Resolution}}{{ if .HDR }}-{{.HDR}}{{ end }}"},
|
||||||
want: "movies-2160p-HDR",
|
want: "movies-2160p-HDR",
|
||||||
|
|
|
@ -98,21 +98,21 @@ func (a *announceProcessor) processQueue(queue chan string) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
newRelease, err := domain.NewRelease(a.indexer.Identifier, "")
|
rls, err := domain.NewRelease(a.indexer.Identifier)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error().Err(err).Msg("could not create new release")
|
log.Error().Err(err).Msg("could not create new release")
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// on lines matched
|
// on lines matched
|
||||||
err = a.onLinesMatched(a.indexer, tmpVars, newRelease)
|
err = a.onLinesMatched(a.indexer, tmpVars, rls)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Debug().Msgf("error match line: %v", "")
|
log.Debug().Msgf("error match line: %v", "")
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// process release in a new go routine
|
// process release in a new go routine
|
||||||
go a.releaseSvc.Process(newRelease)
|
go a.releaseSvc.Process(rls)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -166,24 +166,24 @@ func (a *announceProcessor) parseExtract(pattern string, vars []string, tmpVars
|
||||||
}
|
}
|
||||||
|
|
||||||
// onLinesMatched process vars into release
|
// onLinesMatched process vars into release
|
||||||
func (a *announceProcessor) onLinesMatched(def domain.IndexerDefinition, vars map[string]string, release *domain.Release) error {
|
func (a *announceProcessor) onLinesMatched(def domain.IndexerDefinition, vars map[string]string, rls *domain.Release) error {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
err = release.MapVars(def, vars)
|
err = rls.MapVars(def, vars)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error().Stack().Err(err).Msg("announce: could not map vars for release")
|
log.Error().Stack().Err(err).Msg("announce: could not map vars for release")
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// parse fields
|
// parse fields
|
||||||
err = release.Parse()
|
err = rls.ParseString(rls.TorrentName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error().Stack().Err(err).Msg("announce: could not parse release")
|
log.Error().Stack().Err(err).Msg("announce: could not parse release")
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// parse torrentUrl
|
// parse torrentUrl
|
||||||
err = release.ParseTorrentUrl(def.Parse.Match.TorrentURL, vars, def.SettingsMap, def.Parse.Match.Encode)
|
err = def.Parse.ParseTorrentUrl(vars, def.SettingsMap, rls)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error().Stack().Err(err).Msg("announce: could not parse torrent url")
|
log.Error().Stack().Err(err).Msg("announce: could not parse torrent url")
|
||||||
return err
|
return err
|
||||||
|
@ -235,41 +235,6 @@ func (a *announceProcessor) processTorrentUrl(match string, vars map[string]stri
|
||||||
return b.String(), nil
|
return b.String(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func split(r rune) bool {
|
|
||||||
return r == ' ' || r == '.'
|
|
||||||
}
|
|
||||||
|
|
||||||
func Splitter(s string, splits string) []string {
|
|
||||||
m := make(map[rune]int)
|
|
||||||
for _, r := range splits {
|
|
||||||
m[r] = 1
|
|
||||||
}
|
|
||||||
|
|
||||||
splitter := func(r rune) bool {
|
|
||||||
return m[r] == 1
|
|
||||||
}
|
|
||||||
|
|
||||||
return strings.FieldsFunc(s, splitter)
|
|
||||||
}
|
|
||||||
|
|
||||||
func canonicalizeString(s string) []string {
|
|
||||||
//a := strings.FieldsFunc(s, split)
|
|
||||||
a := Splitter(s, " .")
|
|
||||||
|
|
||||||
return a
|
|
||||||
}
|
|
||||||
|
|
||||||
func cleanReleaseName(input string) string {
|
|
||||||
// Make a Regex to say we only want letters and numbers
|
|
||||||
reg, err := regexp.Compile("[^a-zA-Z0-9]+")
|
|
||||||
if err != nil {
|
|
||||||
//log.Fatal(err)
|
|
||||||
}
|
|
||||||
processedString := reg.ReplaceAllString(input, " ")
|
|
||||||
|
|
||||||
return processedString
|
|
||||||
}
|
|
||||||
|
|
||||||
func removeElement(s []string, i int) ([]string, error) {
|
func removeElement(s []string, i int) ([]string, error) {
|
||||||
// s is [1,2,3,4,5,6], i is 2
|
// s is [1,2,3,4,5,6], i is 2
|
||||||
|
|
||||||
|
|
|
@ -3,10 +3,9 @@ package database
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
sq "github.com/Masterminds/squirrel"
|
|
||||||
"strings"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
sq "github.com/Masterminds/squirrel"
|
||||||
"github.com/lib/pq"
|
"github.com/lib/pq"
|
||||||
"github.com/rs/zerolog/log"
|
"github.com/rs/zerolog/log"
|
||||||
|
|
||||||
|
@ -100,6 +99,8 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
||||||
"containers",
|
"containers",
|
||||||
"match_hdr",
|
"match_hdr",
|
||||||
"except_hdr",
|
"except_hdr",
|
||||||
|
"match_other",
|
||||||
|
"except_other",
|
||||||
"years",
|
"years",
|
||||||
"artists",
|
"artists",
|
||||||
"albums",
|
"albums",
|
||||||
|
@ -117,6 +118,7 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
||||||
"except_uploaders",
|
"except_uploaders",
|
||||||
"tags",
|
"tags",
|
||||||
"except_tags",
|
"except_tags",
|
||||||
|
"origins",
|
||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
).
|
).
|
||||||
|
@ -140,7 +142,7 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
||||||
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
|
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
|
||||||
var delay, logScore sql.NullInt32
|
var delay, logScore sql.NullInt32
|
||||||
|
|
||||||
if err := row.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, &f.CreatedAt, &f.UpdatedAt); err != nil {
|
if err := row.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), pq.Array(&f.MatchOther), pq.Array(&f.ExceptOther), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, pq.Array(&f.Origins), &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||||
log.Error().Stack().Err(err).Msgf("filter.findByID: %v : error scanning row", filterID)
|
log.Error().Stack().Err(err).Msgf("filter.findByID: %v : error scanning row", filterID)
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -204,6 +206,8 @@ func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, e
|
||||||
"f.containers",
|
"f.containers",
|
||||||
"f.match_hdr",
|
"f.match_hdr",
|
||||||
"f.except_hdr",
|
"f.except_hdr",
|
||||||
|
"f.match_other",
|
||||||
|
"f.except_other",
|
||||||
"f.years",
|
"f.years",
|
||||||
"f.artists",
|
"f.artists",
|
||||||
"f.albums",
|
"f.albums",
|
||||||
|
@ -221,6 +225,7 @@ func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, e
|
||||||
"f.except_uploaders",
|
"f.except_uploaders",
|
||||||
"f.tags",
|
"f.tags",
|
||||||
"f.except_tags",
|
"f.except_tags",
|
||||||
|
"f.origins",
|
||||||
"f.created_at",
|
"f.created_at",
|
||||||
"f.updated_at",
|
"f.updated_at",
|
||||||
).
|
).
|
||||||
|
@ -254,7 +259,7 @@ func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, e
|
||||||
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
|
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
|
||||||
var delay, logScore sql.NullInt32
|
var delay, logScore sql.NullInt32
|
||||||
|
|
||||||
if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, &f.CreatedAt, &f.UpdatedAt); err != nil {
|
if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), pq.Array(&f.MatchOther), pq.Array(&f.ExceptOther), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, pq.Array(&f.Origins), &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||||
log.Error().Stack().Err(err).Msg("filter.findByIndexerIdentifier: error scanning row")
|
log.Error().Stack().Err(err).Msg("filter.findByIndexerIdentifier: error scanning row")
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -320,6 +325,8 @@ func (r *FilterRepo) Store(ctx context.Context, filter domain.Filter) (*domain.F
|
||||||
"containers",
|
"containers",
|
||||||
"match_hdr",
|
"match_hdr",
|
||||||
"except_hdr",
|
"except_hdr",
|
||||||
|
"match_other",
|
||||||
|
"except_other",
|
||||||
"years",
|
"years",
|
||||||
"match_categories",
|
"match_categories",
|
||||||
"except_categories",
|
"except_categories",
|
||||||
|
@ -337,6 +344,7 @@ func (r *FilterRepo) Store(ctx context.Context, filter domain.Filter) (*domain.F
|
||||||
"has_log",
|
"has_log",
|
||||||
"has_cue",
|
"has_cue",
|
||||||
"perfect_flac",
|
"perfect_flac",
|
||||||
|
"origins",
|
||||||
).
|
).
|
||||||
Values(
|
Values(
|
||||||
filter.Name,
|
filter.Name,
|
||||||
|
@ -362,6 +370,8 @@ func (r *FilterRepo) Store(ctx context.Context, filter domain.Filter) (*domain.F
|
||||||
pq.Array(filter.Containers),
|
pq.Array(filter.Containers),
|
||||||
pq.Array(filter.MatchHDR),
|
pq.Array(filter.MatchHDR),
|
||||||
pq.Array(filter.ExceptHDR),
|
pq.Array(filter.ExceptHDR),
|
||||||
|
pq.Array(filter.MatchOther),
|
||||||
|
pq.Array(filter.ExceptOther),
|
||||||
filter.Years,
|
filter.Years,
|
||||||
filter.MatchCategories,
|
filter.MatchCategories,
|
||||||
filter.ExceptCategories,
|
filter.ExceptCategories,
|
||||||
|
@ -379,6 +389,7 @@ func (r *FilterRepo) Store(ctx context.Context, filter domain.Filter) (*domain.F
|
||||||
filter.Log,
|
filter.Log,
|
||||||
filter.Cue,
|
filter.Cue,
|
||||||
filter.PerfectFlac,
|
filter.PerfectFlac,
|
||||||
|
pq.Array(filter.Origins),
|
||||||
).
|
).
|
||||||
Suffix("RETURNING id").RunWith(r.db.handler)
|
Suffix("RETURNING id").RunWith(r.db.handler)
|
||||||
|
|
||||||
|
@ -424,6 +435,8 @@ func (r *FilterRepo) Update(ctx context.Context, filter domain.Filter) (*domain.
|
||||||
Set("containers", pq.Array(filter.Containers)).
|
Set("containers", pq.Array(filter.Containers)).
|
||||||
Set("match_hdr", pq.Array(filter.MatchHDR)).
|
Set("match_hdr", pq.Array(filter.MatchHDR)).
|
||||||
Set("except_hdr", pq.Array(filter.ExceptHDR)).
|
Set("except_hdr", pq.Array(filter.ExceptHDR)).
|
||||||
|
Set("match_other", pq.Array(filter.MatchOther)).
|
||||||
|
Set("except_other", pq.Array(filter.ExceptOther)).
|
||||||
Set("years", filter.Years).
|
Set("years", filter.Years).
|
||||||
Set("match_categories", filter.MatchCategories).
|
Set("match_categories", filter.MatchCategories).
|
||||||
Set("except_categories", filter.ExceptCategories).
|
Set("except_categories", filter.ExceptCategories).
|
||||||
|
@ -441,6 +454,7 @@ func (r *FilterRepo) Update(ctx context.Context, filter domain.Filter) (*domain.
|
||||||
Set("has_log", filter.Log).
|
Set("has_log", filter.Log).
|
||||||
Set("has_cue", filter.Cue).
|
Set("has_cue", filter.Cue).
|
||||||
Set("perfect_flac", filter.PerfectFlac).
|
Set("perfect_flac", filter.PerfectFlac).
|
||||||
|
Set("origins", pq.Array(filter.Origins)).
|
||||||
Set("updated_at", time.Now().Format(time.RFC3339)).
|
Set("updated_at", time.Now().Format(time.RFC3339)).
|
||||||
Where("id = ?", filter.ID)
|
Where("id = ?", filter.ID)
|
||||||
|
|
||||||
|
@ -596,14 +610,14 @@ func (r *FilterRepo) Delete(ctx context.Context, filterID int) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Split string to slice. We store comma separated strings and convert to slice
|
// Split string to slice. We store comma separated strings and convert to slice
|
||||||
func stringToSlice(str string) []string {
|
//func stringToSlice(str string) []string {
|
||||||
if str == "" {
|
// if str == "" {
|
||||||
return []string{}
|
// return []string{}
|
||||||
} else if !strings.Contains(str, ",") {
|
// } else if !strings.Contains(str, ",") {
|
||||||
return []string{str}
|
// return []string{str}
|
||||||
}
|
// }
|
||||||
|
//
|
||||||
split := strings.Split(str, ",")
|
// split := strings.Split(str, ",")
|
||||||
|
//
|
||||||
return split
|
// return split
|
||||||
}
|
//}
|
||||||
|
|
|
@ -81,6 +81,8 @@ CREATE TABLE filter
|
||||||
containers TEXT [] DEFAULT '{}' NOT NULL,
|
containers TEXT [] DEFAULT '{}' NOT NULL,
|
||||||
match_hdr TEXT [] DEFAULT '{}',
|
match_hdr TEXT [] DEFAULT '{}',
|
||||||
except_hdr TEXT [] DEFAULT '{}',
|
except_hdr TEXT [] DEFAULT '{}',
|
||||||
|
match_other TEXT [] DEFAULT '{}',
|
||||||
|
except_other TEXT [] DEFAULT '{}',
|
||||||
years TEXT,
|
years TEXT,
|
||||||
artists TEXT,
|
artists TEXT,
|
||||||
albums TEXT,
|
albums TEXT,
|
||||||
|
@ -99,6 +101,7 @@ CREATE TABLE filter
|
||||||
except_uploaders TEXT,
|
except_uploaders TEXT,
|
||||||
tags TEXT,
|
tags TEXT,
|
||||||
except_tags TEXT,
|
except_tags TEXT,
|
||||||
|
origins TEXT [] DEFAULT '{}' NOT NULL,
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
);
|
);
|
||||||
|
@ -169,7 +172,6 @@ CREATE TABLE "release"
|
||||||
torrent_id TEXT,
|
torrent_id TEXT,
|
||||||
torrent_name TEXT,
|
torrent_name TEXT,
|
||||||
size INTEGER,
|
size INTEGER,
|
||||||
raw TEXT,
|
|
||||||
title TEXT,
|
title TEXT,
|
||||||
category TEXT,
|
category TEXT,
|
||||||
season INTEGER,
|
season INTEGER,
|
||||||
|
@ -180,28 +182,13 @@ CREATE TABLE "release"
|
||||||
codec TEXT,
|
codec TEXT,
|
||||||
container TEXT,
|
container TEXT,
|
||||||
hdr TEXT,
|
hdr TEXT,
|
||||||
audio TEXT,
|
group TEXT,
|
||||||
release_group TEXT,
|
|
||||||
region TEXT,
|
|
||||||
language TEXT,
|
|
||||||
edition TEXT,
|
|
||||||
unrated BOOLEAN,
|
|
||||||
hybrid BOOLEAN,
|
|
||||||
proper BOOLEAN,
|
proper BOOLEAN,
|
||||||
repack BOOLEAN,
|
repack BOOLEAN,
|
||||||
website TEXT,
|
website TEXT,
|
||||||
artists TEXT [] DEFAULT '{}' NOT NULL,
|
|
||||||
type TEXT,
|
type TEXT,
|
||||||
format TEXT,
|
|
||||||
quality TEXT,
|
|
||||||
log_score INTEGER,
|
|
||||||
has_log BOOLEAN,
|
|
||||||
has_cue BOOLEAN,
|
|
||||||
is_scene BOOLEAN,
|
|
||||||
origin TEXT,
|
origin TEXT,
|
||||||
tags TEXT [] DEFAULT '{}' NOT NULL,
|
tags TEXT [] DEFAULT '{}' NOT NULL,
|
||||||
freeleech BOOLEAN,
|
|
||||||
freeleech_percent INTEGER,
|
|
||||||
uploader TEXT,
|
uploader TEXT,
|
||||||
pre_time TEXT
|
pre_time TEXT
|
||||||
);
|
);
|
||||||
|
@ -595,6 +582,68 @@ ALTER TABLE release_action_status_dg_tmp
|
||||||
ALTER TABLE indexer
|
ALTER TABLE indexer
|
||||||
ADD COLUMN implementation TEXT;
|
ADD COLUMN implementation TEXT;
|
||||||
`,
|
`,
|
||||||
|
`
|
||||||
|
ALTER TABLE release
|
||||||
|
RENAME COLUMN release_group TO "group";
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN raw;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN audio;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN region;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN language;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN edition;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN unrated;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN hybrid;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN artists;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN format;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN quality;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN log_score;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN has_log;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN has_cue;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN is_scene;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN freeleech;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN freeleech_percent;
|
||||||
|
|
||||||
|
ALTER TABLE "filter"
|
||||||
|
ADD COLUMN origins TEXT [] DEFAULT '{}';
|
||||||
|
`,
|
||||||
|
`
|
||||||
|
ALTER TABLE "filter"
|
||||||
|
ADD COLUMN match_other TEXT [] DEFAULT '{}';
|
||||||
|
|
||||||
|
ALTER TABLE "filter"
|
||||||
|
ADD COLUMN except_other TEXT [] DEFAULT '{}';
|
||||||
|
`,
|
||||||
}
|
}
|
||||||
|
|
||||||
const postgresSchema = `
|
const postgresSchema = `
|
||||||
|
@ -678,6 +727,8 @@ CREATE TABLE filter
|
||||||
containers TEXT [] DEFAULT '{}' NOT NULL,
|
containers TEXT [] DEFAULT '{}' NOT NULL,
|
||||||
match_hdr TEXT [] DEFAULT '{}',
|
match_hdr TEXT [] DEFAULT '{}',
|
||||||
except_hdr TEXT [] DEFAULT '{}',
|
except_hdr TEXT [] DEFAULT '{}',
|
||||||
|
match_other TEXT [] DEFAULT '{}',
|
||||||
|
except_other TEXT [] DEFAULT '{}',
|
||||||
years TEXT,
|
years TEXT,
|
||||||
artists TEXT,
|
artists TEXT,
|
||||||
albums TEXT,
|
albums TEXT,
|
||||||
|
@ -696,6 +747,7 @@ CREATE TABLE filter
|
||||||
except_uploaders TEXT,
|
except_uploaders TEXT,
|
||||||
tags TEXT,
|
tags TEXT,
|
||||||
except_tags TEXT,
|
except_tags TEXT,
|
||||||
|
origins TEXT [] DEFAULT '{}',
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||||
);
|
);
|
||||||
|
@ -926,4 +978,66 @@ var postgresMigrations = []string{
|
||||||
ALTER TABLE indexer
|
ALTER TABLE indexer
|
||||||
ADD COLUMN implementation TEXT;
|
ADD COLUMN implementation TEXT;
|
||||||
`,
|
`,
|
||||||
|
`
|
||||||
|
ALTER TABLE release
|
||||||
|
RENAME COLUMN release_group TO "group";
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN raw;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN audio;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN region;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN language;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN edition;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN unrated;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN hybrid;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN artists;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN format;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN quality;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN log_score;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN has_log;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN has_cue;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN is_scene;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN freeleech;
|
||||||
|
|
||||||
|
ALTER TABLE release
|
||||||
|
DROP COLUMN freeleech_percent;
|
||||||
|
|
||||||
|
ALTER TABLE "filter"
|
||||||
|
ADD COLUMN origins TEXT [] DEFAULT '{}';
|
||||||
|
`,
|
||||||
|
`
|
||||||
|
ALTER TABLE "filter"
|
||||||
|
ADD COLUMN match_other TEXT [] DEFAULT '{}';
|
||||||
|
|
||||||
|
ALTER TABLE "filter"
|
||||||
|
ADD COLUMN except_other TEXT [] DEFAULT '{}';
|
||||||
|
`,
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,6 +3,8 @@ package database
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
|
"strings"
|
||||||
|
|
||||||
sq "github.com/Masterminds/squirrel"
|
sq "github.com/Masterminds/squirrel"
|
||||||
"github.com/autobrr/autobrr/internal/domain"
|
"github.com/autobrr/autobrr/internal/domain"
|
||||||
"github.com/lib/pq"
|
"github.com/lib/pq"
|
||||||
|
@ -18,10 +20,13 @@ func NewReleaseRepo(db *DB) domain.ReleaseRepo {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (repo *ReleaseRepo) Store(ctx context.Context, r *domain.Release) (*domain.Release, error) {
|
func (repo *ReleaseRepo) Store(ctx context.Context, r *domain.Release) (*domain.Release, error) {
|
||||||
|
codecStr := strings.Join(r.Codec, ",")
|
||||||
|
hdrStr := strings.Join(r.HDR, ",")
|
||||||
|
|
||||||
queryBuilder := repo.db.squirrel.
|
queryBuilder := repo.db.squirrel.
|
||||||
Insert("release").
|
Insert("release").
|
||||||
Columns("filter_status", "rejections", "indexer", "filter", "protocol", "implementation", "timestamp", "group_id", "torrent_id", "torrent_name", "size", "raw", "title", "category", "season", "episode", "year", "resolution", "source", "codec", "container", "hdr", "audio", "release_group", "region", "language", "edition", "unrated", "hybrid", "proper", "repack", "website", "artists", "type", "format", "quality", "log_score", "has_log", "has_cue", "is_scene", "origin", "tags", "freeleech", "freeleech_percent", "uploader", "pre_time").
|
Columns("filter_status", "rejections", "indexer", "filter", "protocol", "implementation", "timestamp", "group_id", "torrent_id", "torrent_name", "size", "title", "category", "season", "episode", "year", "resolution", "source", "codec", "container", "hdr", "group", "proper", "repack", "website", "type", "origin", "tags", "uploader", "pre_time").
|
||||||
Values(r.FilterStatus, pq.Array(r.Rejections), r.Indexer, r.FilterName, r.Protocol, r.Implementation, r.Timestamp, r.GroupID, r.TorrentID, r.TorrentName, r.Size, r.Raw, r.Title, r.Category, r.Season, r.Episode, r.Year, r.Resolution, r.Source, r.Codec, r.Container, r.HDR, r.Audio, r.Group, r.Region, r.Language, r.Edition, r.Unrated, r.Hybrid, r.Proper, r.Repack, r.Website, pq.Array(r.Artists), r.Type, r.Format, r.Quality, r.LogScore, r.HasLog, r.HasCue, r.IsScene, r.Origin, pq.Array(r.Tags), r.Freeleech, r.FreeleechPercent, r.Uploader, r.PreTime).
|
Values(r.FilterStatus, pq.Array(r.Rejections), r.Indexer, r.FilterName, r.Protocol, r.Implementation, r.Timestamp, r.GroupID, r.TorrentID, r.TorrentName, r.Size, r.Title, r.Category, r.Season, r.Episode, r.Year, r.Resolution, r.Source, codecStr, r.Container, hdrStr, r.Group, r.Proper, r.Repack, r.Website, r.Type, r.Origin, pq.Array(r.Tags), r.Uploader, r.PreTime).
|
||||||
Suffix("RETURNING id").RunWith(repo.db.handler)
|
Suffix("RETURNING id").RunWith(repo.db.handler)
|
||||||
|
|
||||||
// return values
|
// return values
|
||||||
|
|
|
@ -2,7 +2,13 @@ package domain
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/autobrr/autobrr/pkg/wildcard"
|
||||||
|
|
||||||
|
"github.com/dustin/go-humanize"
|
||||||
)
|
)
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -39,7 +45,7 @@ type Filter struct {
|
||||||
MatchReleaseGroups string `json:"match_release_groups"`
|
MatchReleaseGroups string `json:"match_release_groups"`
|
||||||
ExceptReleaseGroups string `json:"except_release_groups"`
|
ExceptReleaseGroups string `json:"except_release_groups"`
|
||||||
Scene bool `json:"scene"`
|
Scene bool `json:"scene"`
|
||||||
Origins string `json:"origins"`
|
Origins []string `json:"origins"`
|
||||||
Freeleech bool `json:"freeleech"`
|
Freeleech bool `json:"freeleech"`
|
||||||
FreeleechPercent string `json:"freeleech_percent"`
|
FreeleechPercent string `json:"freeleech_percent"`
|
||||||
Shows string `json:"shows"`
|
Shows string `json:"shows"`
|
||||||
|
@ -51,6 +57,8 @@ type Filter struct {
|
||||||
Containers []string `json:"containers"`
|
Containers []string `json:"containers"`
|
||||||
MatchHDR []string `json:"match_hdr"`
|
MatchHDR []string `json:"match_hdr"`
|
||||||
ExceptHDR []string `json:"except_hdr"`
|
ExceptHDR []string `json:"except_hdr"`
|
||||||
|
MatchOther []string `json:"match_other"`
|
||||||
|
ExceptOther []string `json:"except_other"`
|
||||||
Years string `json:"years"`
|
Years string `json:"years"`
|
||||||
Artists string `json:"artists"`
|
Artists string `json:"artists"`
|
||||||
Albums string `json:"albums"`
|
Albums string `json:"albums"`
|
||||||
|
@ -74,3 +82,433 @@ type Filter struct {
|
||||||
Actions []*Action `json:"actions"`
|
Actions []*Action `json:"actions"`
|
||||||
Indexers []Indexer `json:"indexers"`
|
Indexers []Indexer `json:"indexers"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (f Filter) CheckFilter(r *Release) ([]string, bool) {
|
||||||
|
// reset rejections first to clean previous checks
|
||||||
|
r.resetRejections()
|
||||||
|
|
||||||
|
if f.Freeleech && r.Freeleech != f.Freeleech {
|
||||||
|
r.addRejection("wanted: freeleech")
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.FreeleechPercent != "" && !checkFreeleechPercent(r.FreeleechPercent, f.FreeleechPercent) {
|
||||||
|
r.addRejectionF("freeleech percent not matching. got: %v want: %v", r.FreeleechPercent, f.FreeleechPercent)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(f.Origins) > 0 && !containsSlice(r.Origin, f.Origins) {
|
||||||
|
r.addRejectionF("origin not matching. got: %v want: %v", r.Origin, f.Origins)
|
||||||
|
}
|
||||||
|
|
||||||
|
// title is the parsed title
|
||||||
|
if f.Shows != "" && !contains(r.Title, f.Shows) {
|
||||||
|
r.addRejectionF("shows not matching. got: %v want: %v", r.Title, f.Shows)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.Seasons != "" && !containsIntStrings(r.Season, f.Seasons) {
|
||||||
|
r.addRejectionF("season not matching. got: %d want: %v", r.Season, f.Seasons)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.Episodes != "" && !containsIntStrings(r.Episode, f.Episodes) {
|
||||||
|
r.addRejectionF("episodes not matching. got: %d want: %v", r.Episode, f.Episodes)
|
||||||
|
}
|
||||||
|
|
||||||
|
// matchRelease
|
||||||
|
// TODO allow to match against regex
|
||||||
|
if f.MatchReleases != "" && !containsFuzzy(r.TorrentName, f.MatchReleases) {
|
||||||
|
r.addRejectionF("match release not matching. got: %v want: %v", r.TorrentName, f.MatchReleases)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.ExceptReleases != "" && containsFuzzy(r.TorrentName, f.ExceptReleases) {
|
||||||
|
r.addRejectionF("except releases: unwanted release. got: %v want: %v", r.TorrentName, f.ExceptReleases)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.MatchReleaseGroups != "" && !contains(r.Group, f.MatchReleaseGroups) {
|
||||||
|
r.addRejectionF("release groups not matching. got: %v want: %v", r.Group, f.MatchReleaseGroups)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.ExceptReleaseGroups != "" && contains(r.Group, f.ExceptReleaseGroups) {
|
||||||
|
r.addRejectionF("unwanted release group. got: %v unwanted: %v", r.Group, f.ExceptReleaseGroups)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.MatchUploaders != "" && !contains(r.Uploader, f.MatchUploaders) {
|
||||||
|
r.addRejectionF("uploaders not matching. got: %v want: %v", r.Uploader, f.MatchUploaders)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.ExceptUploaders != "" && contains(r.Uploader, f.ExceptUploaders) {
|
||||||
|
r.addRejectionF("unwanted uploaders. got: %v unwanted: %v", r.Uploader, f.ExceptUploaders)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(f.Resolutions) > 0 && !containsSlice(r.Resolution, f.Resolutions) {
|
||||||
|
r.addRejectionF("resolution not matching. got: %v want: %v", r.Resolution, f.Resolutions)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(f.Codecs) > 0 && !sliceContainsSlice(r.Codec, f.Codecs) {
|
||||||
|
r.addRejectionF("codec not matching. got: %v want: %v", r.Codec, f.Codecs)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(f.Sources) > 0 && !containsSlice(r.Source, f.Sources) {
|
||||||
|
r.addRejectionF("source not matching. got: %v want: %v", r.Source, f.Sources)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(f.Containers) > 0 && !containsSlice(r.Container, f.Containers) {
|
||||||
|
r.addRejectionF("container not matching. got: %v want: %v", r.Container, f.Containers)
|
||||||
|
}
|
||||||
|
|
||||||
|
// HDR is parsed into the Codec slice from rls
|
||||||
|
if len(f.MatchHDR) > 0 && !sliceContainsSlice(r.HDR, f.MatchHDR) {
|
||||||
|
r.addRejectionF("hdr not matching. got: %v want: %v", r.HDR, f.MatchHDR)
|
||||||
|
}
|
||||||
|
|
||||||
|
// HDR is parsed into the Codec slice from rls
|
||||||
|
if len(f.ExceptHDR) > 0 && sliceContainsSlice(r.HDR, f.ExceptHDR) {
|
||||||
|
r.addRejectionF("hdr unwanted. got: %v want: %v", r.HDR, f.ExceptHDR)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.Years != "" && !containsIntStrings(r.Year, f.Years) {
|
||||||
|
r.addRejectionF("year not matching. got: %d want: %v", r.Year, f.Years)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.MatchCategories != "" && !contains(r.Category, f.MatchCategories) {
|
||||||
|
r.addRejectionF("category not matching. got: %v want: %v", r.Category, f.MatchCategories)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.ExceptCategories != "" && contains(r.Category, f.ExceptCategories) {
|
||||||
|
r.addRejectionF("category unwanted. got: %v want: %v", r.Category, f.ExceptCategories)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(f.MatchReleaseTypes) > 0 && !containsSlice(r.Category, f.MatchReleaseTypes) {
|
||||||
|
r.addRejectionF("release type not matching. got: %v want: %v", r.Category, f.MatchReleaseTypes)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (f.MinSize != "" || f.MaxSize != "") && !f.checkSizeFilter(r, f.MinSize, f.MaxSize) {
|
||||||
|
r.addRejectionF("size not matching. got: %v want min: %v max: %v", r.Size, f.MinSize, f.MaxSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.Tags != "" && !containsAny(r.Tags, f.Tags) {
|
||||||
|
r.addRejectionF("tags not matching. got: %v want: %v", r.Tags, f.Tags)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.ExceptTags != "" && containsAny(r.Tags, f.ExceptTags) {
|
||||||
|
r.addRejectionF("tags unwanted. got: %v want: %v", r.Tags, f.ExceptTags)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(f.Artists) > 0 && !containsFuzzy(r.TorrentName, f.Artists) {
|
||||||
|
r.addRejectionF("artists not matching. got: %v want: %v", r.TorrentName, f.Artists)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(f.Albums) > 0 && !containsFuzzy(r.TorrentName, f.Albums) {
|
||||||
|
r.addRejectionF("albums not matching. got: %v want: %v", r.TorrentName, f.Albums)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perfect flac requires Cue, Log, Log Score 100, FLAC and 24bit Lossless
|
||||||
|
if f.PerfectFlac && !f.isPerfectFLAC(r) {
|
||||||
|
r.addRejectionF("wanted: perfect flac. got: %v", r.Audio)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(f.Formats) > 0 && !sliceContainsSlice(r.Audio, f.Formats) {
|
||||||
|
r.addRejectionF("formats not matching. got: %v want: %v", r.Audio, f.Formats)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(f.Quality) > 0 && !sliceContainsSlice(r.Audio, f.Quality) {
|
||||||
|
r.addRejectionF("quality not matching. got: %v want: %v", r.Audio, f.Quality)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(f.Media) > 0 && !containsSlice(r.Source, f.Media) {
|
||||||
|
r.addRejectionF("media not matching. got: %v want: %v", r.Source, f.Media)
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.Cue && !containsAny(r.Audio, "Cue") {
|
||||||
|
r.addRejection("wanted: cue")
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.Log && !containsAny(r.Audio, "Log") {
|
||||||
|
r.addRejection("wanted: log")
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.Log && f.LogScore != 0 && r.LogScore != f.LogScore {
|
||||||
|
r.addRejectionF("log score. got: %v want: %v", r.LogScore, f.LogScore)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(r.Rejections) > 0 {
|
||||||
|
return r.Rejections, false
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, true
|
||||||
|
}
|
||||||
|
|
||||||
|
// isPerfectFLAC Perfect is "CD FLAC Cue Log 100% Lossless or 24bit Lossless"
|
||||||
|
func (f Filter) isPerfectFLAC(r *Release) bool {
|
||||||
|
if !contains(r.Source, "CD") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !containsAny(r.Audio, "Cue") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !containsAny(r.Audio, "Log") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !containsAny(r.Audio, "Log100") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !containsAny(r.Audio, "FLAC") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !containsAnySlice(r.Audio, []string{"Lossless", "24bit Lossless"}) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkSizeFilter additional size check
|
||||||
|
// for indexers that doesn't announce size, like some gazelle based
|
||||||
|
// set flag r.AdditionalSizeCheckRequired if there's a size in the filter, otherwise go a head
|
||||||
|
// implement API for ptp,btn,ggn to check for size if needed
|
||||||
|
// for others pull down torrent and do check
|
||||||
|
func (f Filter) checkSizeFilter(r *Release, minSize string, maxSize string) bool {
|
||||||
|
|
||||||
|
if r.Size == 0 {
|
||||||
|
r.AdditionalSizeCheckRequired = true
|
||||||
|
|
||||||
|
return true
|
||||||
|
} else {
|
||||||
|
r.AdditionalSizeCheckRequired = false
|
||||||
|
}
|
||||||
|
|
||||||
|
// if r.Size parse filter to bytes and compare
|
||||||
|
// handle both min and max
|
||||||
|
if minSize != "" {
|
||||||
|
// string to bytes
|
||||||
|
minSizeBytes, err := humanize.ParseBytes(minSize)
|
||||||
|
if err != nil {
|
||||||
|
// log could not parse into bytes
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.Size <= minSizeBytes {
|
||||||
|
r.addRejection("size: smaller than min size")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
if maxSize != "" {
|
||||||
|
// string to bytes
|
||||||
|
maxSizeBytes, err := humanize.ParseBytes(maxSize)
|
||||||
|
if err != nil {
|
||||||
|
// log could not parse into bytes
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.Size >= maxSizeBytes {
|
||||||
|
r.addRejection("size: larger than max size")
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkFilterIntStrings "1,2,3-20"
|
||||||
|
func containsIntStrings(value int, filterList string) bool {
|
||||||
|
filters := strings.Split(filterList, ",")
|
||||||
|
|
||||||
|
for _, s := range filters {
|
||||||
|
s = strings.Replace(s, "%", "", -1)
|
||||||
|
s = strings.Trim(s, " ")
|
||||||
|
|
||||||
|
if strings.Contains(s, "-") {
|
||||||
|
minMax := strings.Split(s, "-")
|
||||||
|
|
||||||
|
// to int
|
||||||
|
min, err := strconv.ParseInt(minMax[0], 10, 32)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
max, err := strconv.ParseInt(minMax[1], 10, 32)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if min > max {
|
||||||
|
// handle error
|
||||||
|
return false
|
||||||
|
} else {
|
||||||
|
// if announcePercent is greater than min and less than max return true
|
||||||
|
if value >= int(min) && value <= int(max) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
filterInt, err := strconv.ParseInt(s, 10, 32)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if int(filterInt) == value {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func contains(tag string, filter string) bool {
|
||||||
|
return containsMatch([]string{tag}, strings.Split(filter, ","))
|
||||||
|
}
|
||||||
|
|
||||||
|
func containsFuzzy(tag string, filter string) bool {
|
||||||
|
return containsMatchFuzzy([]string{tag}, strings.Split(filter, ","))
|
||||||
|
}
|
||||||
|
|
||||||
|
func containsSlice(tag string, filters []string) bool {
|
||||||
|
return containsMatch([]string{tag}, filters)
|
||||||
|
}
|
||||||
|
|
||||||
|
func containsAny(tags []string, filter string) bool {
|
||||||
|
return containsMatch(tags, strings.Split(filter, ","))
|
||||||
|
}
|
||||||
|
|
||||||
|
func sliceContainsSlice(tags []string, filters []string) bool {
|
||||||
|
return containsMatchBasic(tags, filters)
|
||||||
|
}
|
||||||
|
|
||||||
|
func containsMatchFuzzy(tags []string, filters []string) bool {
|
||||||
|
for _, tag := range tags {
|
||||||
|
tag = strings.ToLower(tag)
|
||||||
|
|
||||||
|
for _, filter := range filters {
|
||||||
|
filter = strings.ToLower(filter)
|
||||||
|
filter = strings.Trim(filter, " ")
|
||||||
|
// check if line contains * or ?, if so try wildcard match, otherwise try substring match
|
||||||
|
a := strings.ContainsAny(filter, "?|*")
|
||||||
|
if a {
|
||||||
|
match := wildcard.Match(filter, tag)
|
||||||
|
if match {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
} else if strings.Contains(tag, filter) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func containsMatch(tags []string, filters []string) bool {
|
||||||
|
for _, tag := range tags {
|
||||||
|
tag = strings.ToLower(tag)
|
||||||
|
|
||||||
|
for _, filter := range filters {
|
||||||
|
filter = strings.ToLower(filter)
|
||||||
|
filter = strings.Trim(filter, " ")
|
||||||
|
// check if line contains * or ?, if so try wildcard match, otherwise try substring match
|
||||||
|
a := strings.ContainsAny(filter, "?|*")
|
||||||
|
if a {
|
||||||
|
match := wildcard.Match(filter, tag)
|
||||||
|
if match {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
} else if tag == filter {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func containsMatchBasic(tags []string, filters []string) bool {
|
||||||
|
for _, tag := range tags {
|
||||||
|
tag = strings.ToLower(tag)
|
||||||
|
|
||||||
|
for _, filter := range filters {
|
||||||
|
filter = strings.ToLower(filter)
|
||||||
|
filter = strings.Trim(filter, " ")
|
||||||
|
|
||||||
|
if tag == filter {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func containsAnySlice(tags []string, filters []string) bool {
|
||||||
|
for _, tag := range tags {
|
||||||
|
tag = strings.ToLower(tag)
|
||||||
|
|
||||||
|
for _, filter := range filters {
|
||||||
|
filter = strings.ToLower(filter)
|
||||||
|
filter = strings.Trim(filter, " ")
|
||||||
|
// check if line contains * or ?, if so try wildcard match, otherwise try substring match
|
||||||
|
wild := strings.ContainsAny(filter, "?|*")
|
||||||
|
if wild {
|
||||||
|
match := wildcard.Match(filter, tag)
|
||||||
|
if match {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
} else if tag == filter {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkFreeleechPercent(announcePercent int, filterPercent string) bool {
|
||||||
|
filters := strings.Split(filterPercent, ",")
|
||||||
|
|
||||||
|
// remove % and trim spaces
|
||||||
|
//announcePercent = strings.Replace(announcePercent, "%", "", -1)
|
||||||
|
//announcePercent = strings.Trim(announcePercent, " ")
|
||||||
|
|
||||||
|
//announcePercentInt, err := strconv.ParseInt(announcePercent, 10, 32)
|
||||||
|
//if err != nil {
|
||||||
|
// return false
|
||||||
|
//}
|
||||||
|
|
||||||
|
for _, s := range filters {
|
||||||
|
s = strings.Replace(s, "%", "", -1)
|
||||||
|
s = strings.Trim(s, " ")
|
||||||
|
|
||||||
|
if strings.Contains(s, "-") {
|
||||||
|
minMax := strings.Split(s, "-")
|
||||||
|
|
||||||
|
// to int
|
||||||
|
min, err := strconv.ParseInt(minMax[0], 10, 32)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
max, err := strconv.ParseInt(minMax[1], 10, 32)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if min > max {
|
||||||
|
// handle error
|
||||||
|
return false
|
||||||
|
} else {
|
||||||
|
// if announcePercent is greater than min and less than max return true
|
||||||
|
if announcePercent >= int(min) && announcePercent <= int(max) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
filterPercentInt, err := strconv.ParseInt(s, 10, 32)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if int(filterPercentInt) == announcePercent {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
1537
internal/domain/filter_test.go
Normal file
1537
internal/domain/filter_test.go
Normal file
File diff suppressed because it is too large
Load diff
|
@ -1,9 +1,13 @@
|
||||||
package domain
|
package domain
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
|
"net/url"
|
||||||
|
"text/template"
|
||||||
|
|
||||||
"github.com/dustin/go-humanize"
|
"github.com/dustin/go-humanize"
|
||||||
|
"github.com/rs/zerolog/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
type IndexerRepo interface {
|
type IndexerRepo interface {
|
||||||
|
@ -115,6 +119,56 @@ type IndexerParseMatch struct {
|
||||||
Encode []string `json:"encode"`
|
Encode []string `json:"encode"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *IndexerParse) ParseTorrentUrl(vars map[string]string, extraVars map[string]string, release *Release) error {
|
||||||
|
tmpVars := map[string]string{}
|
||||||
|
|
||||||
|
// copy vars to new tmp map
|
||||||
|
for k, v := range vars {
|
||||||
|
tmpVars[k] = v
|
||||||
|
}
|
||||||
|
|
||||||
|
// merge extra vars with vars
|
||||||
|
if extraVars != nil {
|
||||||
|
for k, v := range extraVars {
|
||||||
|
tmpVars[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// handle url encode of values
|
||||||
|
if p.Match.Encode != nil {
|
||||||
|
for _, e := range p.Match.Encode {
|
||||||
|
if v, ok := tmpVars[e]; ok {
|
||||||
|
// url encode value
|
||||||
|
t := url.QueryEscape(v)
|
||||||
|
tmpVars[e] = t
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// setup text template to inject variables into
|
||||||
|
tmpl, err := template.New("torrenturl").Parse(p.Match.TorrentURL)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msg("could not create torrent url template")
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var urlBytes bytes.Buffer
|
||||||
|
err = tmpl.Execute(&urlBytes, &tmpVars)
|
||||||
|
if err != nil {
|
||||||
|
log.Error().Err(err).Msg("could not write torrent url template output")
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
release.TorrentURL = urlBytes.String()
|
||||||
|
|
||||||
|
// handle cookies
|
||||||
|
if v, ok := extraVars["cookie"]; ok {
|
||||||
|
release.RawCookie = v
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
type TorrentBasic struct {
|
type TorrentBasic struct {
|
||||||
Id string `json:"Id"`
|
Id string `json:"Id"`
|
||||||
TorrentId string `json:"TorrentId,omitempty"`
|
TorrentId string `json:"TorrentId,omitempty"`
|
||||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
377
internal/domain/releasetags.go
Normal file
377
internal/domain/releasetags.go
Normal file
|
@ -0,0 +1,377 @@
|
||||||
|
package domain
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
)
|
||||||
|
|
||||||
|
var types map[string][]*TagInfo
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
types = make(map[string][]*TagInfo)
|
||||||
|
|
||||||
|
audio := []*TagInfo{
|
||||||
|
{tag: "24BIT", title: "", regexp: "(?-i:24BIT)", re: nil},
|
||||||
|
{tag: "24BIT Lossless", title: "", regexp: "(?:24BIT lossless)", re: nil},
|
||||||
|
{tag: "16BIT", title: "", regexp: "(?-i:16BIT)", re: nil},
|
||||||
|
{tag: "320", title: "320 Kbps", regexp: "320[\\\\-\\\\._ kbps]?", re: nil},
|
||||||
|
{tag: "256", title: "256 Kbps", regexp: "256[\\\\-\\\\._ kbps]?", re: nil},
|
||||||
|
{tag: "192", title: "192 Kbps", regexp: "192[\\\\-\\\\._ kbps]?", re: nil},
|
||||||
|
{tag: "128", title: "128 Kbps", regexp: "128[\\\\-\\\\._ kbps]?", re: nil},
|
||||||
|
{tag: "AAC-LC", title: "Advanced Audio Coding (LC)", regexp: "aac[\\-\\._ ]?lc", re: nil},
|
||||||
|
{tag: "AAC", title: "Advanced Audio Coding (LC)", regexp: "", re: nil},
|
||||||
|
{tag: "AC3D", title: "", regexp: "ac[\\-\\._ ]?3d", re: nil},
|
||||||
|
{tag: "Atmos", title: "Dolby Atmos", regexp: "", re: nil},
|
||||||
|
{tag: "CBR", title: "Constant Bit Rate", regexp: "", re: nil},
|
||||||
|
{tag: "Cue", title: "Cue File", regexp: "", re: nil},
|
||||||
|
{tag: "DDPA", title: "Dolby Digital+ Atmos (E-AC-3+Atmos)", regexp: "dd[p\\+]a", re: nil},
|
||||||
|
{tag: "DDP", title: "Dolby Digital+ (E-AC-3)", regexp: "dd[p\\+]|e[\\-\\._ ]?ac3", re: nil},
|
||||||
|
{tag: "DD", title: "Dolby Digital (AC-3)", regexp: "dd|ac3|dolby[\\-\\._ ]?digital", re: nil},
|
||||||
|
{tag: "DTS-HD.HRA", title: "DTS (HD HRA)", regexp: "dts[\\-\\._ ]?hd[\\-\\._ ]?hra", re: nil},
|
||||||
|
{tag: "DTS-HD.HR", title: "DTS (HD HR)", regexp: "dts[\\-\\._ ]?hd[\\-\\._ ]?hr", re: nil},
|
||||||
|
{tag: "DTS-HD.MA", title: "DTS (HD MA)", regexp: "dts[\\-\\._ ]?hd[\\-\\._ ]?ma", re: nil},
|
||||||
|
{tag: "DTS-HD", title: "DTS (HD)", regexp: "dts[\\-\\._ ]?hd[\\-\\._ ]?", re: nil},
|
||||||
|
{tag: "DTS-MA", title: "DTS (MA)", regexp: "dts[\\-\\._ ]?ma[\\-\\._ ]?", re: nil},
|
||||||
|
{tag: "DTS-X", title: "DTS (X)", regexp: "dts[\\-\\._ ]?x", re: nil},
|
||||||
|
{tag: "DTS", title: "", regexp: "", re: nil},
|
||||||
|
{tag: "DUAL.AUDIO", title: "Dual Audio", regexp: "dual(?:[\\-\\._ ]?audio)?", re: nil},
|
||||||
|
{tag: "EAC3D", title: "", regexp: "", re: nil},
|
||||||
|
{tag: "ES", title: "Dolby Digital (ES)", regexp: "(?-i:ES)", re: nil},
|
||||||
|
{tag: "EX", title: "Dolby Digital (EX)", regexp: "(?-i:EX)", re: nil},
|
||||||
|
{tag: "FLAC", title: "Free Lossless Audio Codec", regexp: "", re: nil},
|
||||||
|
{tag: "LiNE", title: "Line", regexp: "(?-i:L[iI]NE)", re: nil},
|
||||||
|
{tag: "Lossless", title: "", regexp: "(?i:Lossless)", re: nil},
|
||||||
|
{tag: "Log100", title: "", regexp: "(log 100%)", re: nil},
|
||||||
|
{tag: "Log", title: "", regexp: "(?:log)", re: nil},
|
||||||
|
{tag: "LPCM", title: "Linear Pulse-Code Modulation", regexp: "", re: nil},
|
||||||
|
{tag: "MP3", title: "", regexp: "", re: nil},
|
||||||
|
{tag: "OGG", title: "", regexp: "", re: nil},
|
||||||
|
{tag: "OPUS", title: "", regexp: "", re: nil},
|
||||||
|
{tag: "TrueHD", title: "Dolby TrueHD", regexp: "(?:dolby[\\-\\._ ]?)?true[\\-\\._ ]?hd", re: nil},
|
||||||
|
{tag: "VBR", title: "Variable Bit Rate", regexp: "", re: nil},
|
||||||
|
}
|
||||||
|
types["audio"] = audio
|
||||||
|
|
||||||
|
bonus := []*TagInfo{
|
||||||
|
{tag: "Freeleech", title: "Freeleech", regexp: "freeleech", re: nil},
|
||||||
|
}
|
||||||
|
types["bonus"] = bonus
|
||||||
|
|
||||||
|
channels := []*TagInfo{
|
||||||
|
{tag: "7.1", title: "", regexp: "7\\.1(?:[\\-\\._ ]?audios)?", re: nil},
|
||||||
|
{tag: "6.1", title: "", regexp: "6\\.1(?:[\\-\\._ ]?audios)?", re: nil},
|
||||||
|
{tag: "6.0", title: "", regexp: "6\\.0(?:[\\-\\._ ]?audios)?", re: nil},
|
||||||
|
{tag: "5.1", title: "", regexp: "5\\.1(?:[\\-\\._ ]?audios)?", re: nil},
|
||||||
|
{tag: "5.0", title: "", regexp: "5\\.0(?:[\\-\\._ ]?audios)?", re: nil},
|
||||||
|
{tag: "4.1", title: "", regexp: "4\\.1(?:[\\-\\._ ]?audios)?", re: nil},
|
||||||
|
{tag: "4.0", title: "", regexp: "4\\.0(?:[\\-\\._ ]?audios)?", re: nil},
|
||||||
|
{tag: "3.1", title: "", regexp: "3\\.1(?:[\\-\\._ ]?audios)?", re: nil},
|
||||||
|
{tag: "3.0", title: "", regexp: "3\\.0(?:[\\-\\._ ]?audios)?", re: nil},
|
||||||
|
{tag: "2.1", title: "", regexp: "2\\.1(?:[\\-\\._ ]?audios)?", re: nil},
|
||||||
|
{tag: "2.0", title: "", regexp: "2\\.0(?:[\\-\\._ ]?audios)?", re: nil},
|
||||||
|
{tag: "1.0", title: "", regexp: "1\\.0(?:[\\-\\._ ]?audios)?", re: nil},
|
||||||
|
}
|
||||||
|
types["channels"] = channels
|
||||||
|
|
||||||
|
codecs := []*TagInfo{
|
||||||
|
{tag: "DiVX.SBC", title: "DivX SBC", regexp: "(?:divx[\\-\\._ ]?)?sbc", re: nil},
|
||||||
|
{tag: "x264.HQ", title: "x264 (HQ)", regexp: "x[\\\\-\\\\._ ]?264[\\\\-\\\\._ ]?hq", re: nil},
|
||||||
|
{tag: "MPEG-2", title: "", regexp: "mpe?g(?:[\\-\\._ ]?2)?", re: nil},
|
||||||
|
{tag: "H.265", title: "", regexp: "h[\\-\\._ ]?265", re: nil},
|
||||||
|
{tag: "H.264", title: "", regexp: "h[\\-\\._ ]?264", re: nil},
|
||||||
|
{tag: "H.263", title: "", regexp: "h[\\-\\._ ]?263", re: nil},
|
||||||
|
{tag: "H.262", title: "", regexp: "h[\\-\\._ ]?2[26]2", re: nil},
|
||||||
|
{tag: "H.261", title: "", regexp: "h[\\-\\._ ]?261", re: nil},
|
||||||
|
{tag: "dxva", title: "Direct-X Video Acceleration", regexp: "", re: nil},
|
||||||
|
{tag: "HEVC", title: "High Efficiency Video Coding", regexp: "", re: nil},
|
||||||
|
{tag: "VC-1", title: "", regexp: "vc[\\-\\._ ]?1", re: nil},
|
||||||
|
{tag: "x265", title: "", regexp: "x[\\-\\._ ]?265", re: nil},
|
||||||
|
{tag: "x264", title: "", regexp: "x[\\-\\._ ]?264", re: nil},
|
||||||
|
{tag: "XViD", title: "Xvid", regexp: "", re: nil},
|
||||||
|
{tag: "AVC", title: "Advanced Video Coding", regexp: "avc(?:[\\-\\._ ]?1)?", re: nil},
|
||||||
|
{tag: "VP9", title: "", regexp: "vp[\\-\\._ ]?9", re: nil},
|
||||||
|
{tag: "VP8", title: "", regexp: "vp[\\-\\._ ]?8", re: nil},
|
||||||
|
{tag: "VP7", title: "", regexp: "vp[\\-\\._ ]?7", re: nil},
|
||||||
|
}
|
||||||
|
types["codecs"] = codecs
|
||||||
|
|
||||||
|
container := []*TagInfo{
|
||||||
|
{tag: "avi", title: "Audio Video Interleave (avi)", regexp: "", re: nil},
|
||||||
|
{tag: "img", title: "IMG", regexp: "", re: nil},
|
||||||
|
{tag: "iso", title: "ISO", regexp: "", re: nil},
|
||||||
|
{tag: "mkv", title: "Matroska (mkv)", regexp: "", re: nil},
|
||||||
|
{tag: "mov", title: "MOV", regexp: "", re: nil},
|
||||||
|
{tag: "mp4", title: "MP4", regexp: "", re: nil},
|
||||||
|
{tag: "mpg", title: "MPEG", regexp: "mpe?g", re: nil},
|
||||||
|
{tag: "m2ts", title: "BluRay Disc (m2ts)", regexp: "", re: nil},
|
||||||
|
{tag: "vob", title: "VOB", regexp: "", re: nil},
|
||||||
|
}
|
||||||
|
types["container"] = container
|
||||||
|
|
||||||
|
hdr := []*TagInfo{
|
||||||
|
{tag: "HDR10+", title: "High Dynamic Range (10-bit+)", regexp: "hdr[\\-\\.]?10\\+|10\\+[\\-\\.]?bit|hdr10plus|hi10p", re: nil},
|
||||||
|
{tag: "HDR10", title: "High Dynamic Range (10-bit)", regexp: "hdr[\\-\\.]?10|10[\\-\\.]?bit|hi10", re: nil},
|
||||||
|
{tag: "HDR+", title: "High Dynamic Range+", regexp: "hdr\\+", re: nil},
|
||||||
|
{tag: "HDR", title: "High Dynamic Range", regexp: "", re: nil},
|
||||||
|
{tag: "SDR", title: "Standard Dynamic Range", regexp: "", re: nil},
|
||||||
|
{tag: "DV", title: "Dolby Vision", regexp: "(?i:dolby[\\-\\._ ]vision|dovi|\\Qdv\\E\\b)", re: nil},
|
||||||
|
}
|
||||||
|
types["hdr"] = hdr
|
||||||
|
|
||||||
|
other := []*TagInfo{
|
||||||
|
{tag: "HYBRID", title: "Hybrid", regexp: "", re: nil},
|
||||||
|
{tag: "REMUX", title: "Remux", regexp: "", re: nil},
|
||||||
|
{tag: "REPACK", title: "Repack", regexp: "repack(?:ed)?", re: nil},
|
||||||
|
{tag: "REREPACK", title: "Rerepack", regexp: "rerepack(?:ed)?", re: nil},
|
||||||
|
}
|
||||||
|
types["other"] = other
|
||||||
|
|
||||||
|
source := []*TagInfo{
|
||||||
|
{tag: "Cassette", title: "Cassette", regexp: "", re: nil},
|
||||||
|
{tag: "CD", title: "Compact Disc", regexp: "cd[\\-\\._ ]?(?:album)?", re: nil},
|
||||||
|
{tag: "BDRiP", title: "BluRay (rip)", regexp: "b[dr]?[\\-\\._ ]?rip", re: nil},
|
||||||
|
{tag: "BDSCR", title: "BluRay (screener)", regexp: "b[dr][\\-\\._ ]?scr(?:eener)?", re: nil},
|
||||||
|
{tag: "BluRay3D", title: "", regexp: "blu[\\-\\._ ]?ray[\\-\\._ ]?3d|bd3d", re: nil},
|
||||||
|
{tag: "BluRayRiP", title: "BluRay (rip)", regexp: "", re: nil},
|
||||||
|
{tag: "BluRay", title: "", regexp: "blu[\\-\\._ ]?ray|bd", re: nil},
|
||||||
|
{tag: "BRDRip", title: "BluRay Disc (rip)", regexp: "", re: nil},
|
||||||
|
{tag: "DAT", title: "Datacable", regexp: "(?-i:DAT)", re: nil},
|
||||||
|
{tag: "DVBRiP", title: "Digital Video Broadcasting (rip)", regexp: "dvb[\\-\\._ ]?rip", re: nil},
|
||||||
|
{tag: "DVDA", title: "Audio DVD", regexp: "", re: nil},
|
||||||
|
{tag: "DVDRiP", title: "Digital Video Disc (rip)", regexp: "dvd[\\-\\._ ]?rip", re: nil},
|
||||||
|
{tag: "DVDSCRRiP", title: "Digital Video Disc (screener rip)", regexp: "(?:dvd[\\-\\._ ]?)?scr(?:eener)?[\\-\\._ ]?rip", re: nil},
|
||||||
|
{tag: "DVDSCR", title: "Digital Video Disc (screener)", regexp: "(?:dvd[\\-\\._ ]?)?scr(?:eener)?", re: nil},
|
||||||
|
{tag: "DVDS", title: "Digital Video Disc (single)", regexp: "dvds(?:ingle)?", re: nil},
|
||||||
|
{tag: "DVD", title: "Digital Video Disc", regexp: "dvd", re: nil},
|
||||||
|
{tag: "SACD", title: "Super Audio Compact Disc", regexp: "", re: nil},
|
||||||
|
{tag: "RADIO", title: "Radio", regexp: "(?-i:R[aA]D[iI][oO])", re: nil},
|
||||||
|
{tag: "SATRiP", title: "Satellite (rip)", regexp: "sat[\\-\\._ ]?rip", re: nil},
|
||||||
|
{tag: "SAT", title: "Satellite Radio", regexp: "(?-i:SAT)", re: nil},
|
||||||
|
{tag: "SBD", title: "Soundboard", regexp: "(?-i:SBD|DAB|Soundboard)", re: nil},
|
||||||
|
{tag: "UHD.BDRiP", title: "Ultra High-Definition BluRay (rip)", regexp: "uhd[\\-\\._ ]?(?:bd)?rip", re: nil},
|
||||||
|
{tag: "UHD.BluRay", title: "Ultra High-Definition BluRay", regexp: "uhd[\\-\\._ ]?(?:blu[\\-\\._ ]?ray|bd)", re: nil},
|
||||||
|
{tag: "UHDTV", title: "Ultra High-Definition TV", regexp: "", re: nil},
|
||||||
|
{tag: "UMDMOVIE", title: "Universal Media Disc Movie", regexp: "", re: nil},
|
||||||
|
{tag: "Vinyl", title: "Vinyl", regexp: "vinyl|vl", re: nil},
|
||||||
|
{tag: "WEB-DL", title: "Web (DL)", regexp: "web[\\-\\._ ]?dl", re: nil},
|
||||||
|
{tag: "WEB-HD", title: "Web (HD)", regexp: "web[\\-\\._ ]?hd", re: nil},
|
||||||
|
{tag: "WEBFLAC", title: "Web (FLAC)", regexp: "", re: nil},
|
||||||
|
{tag: "WebHDRiP", title: "Web (HD rip)", regexp: "", re: nil},
|
||||||
|
{tag: "WEBRiP", title: "Web (rip)", regexp: "web[\\-\\._ ]?rip", re: nil},
|
||||||
|
{tag: "WEBSCR", title: "Web (screener)", regexp: "web[\\-\\._ ]?scr(?:eener)?", re: nil},
|
||||||
|
{tag: "WebUHD", title: "Web (UHD)", regexp: "", re: nil},
|
||||||
|
{tag: "WEB", title: "Web", regexp: "", re: nil},
|
||||||
|
}
|
||||||
|
types["source"] = source
|
||||||
|
|
||||||
|
resolution := []*TagInfo{
|
||||||
|
{tag: "PN", title: "Selector", regexp: "p(?:al)?[\\-\\._ ]?n(?:tsc)?[\\-\\._ ]selector", re: nil},
|
||||||
|
{tag: "DCI4K", title: "DCI 4k", regexp: "dci[\\-\\._ ]?4k|4096x2160", re: nil},
|
||||||
|
{tag: "DCI2K", title: "DCI 2k", regexp: "dci[\\-\\._ ]?2k|2048x1080", re: nil},
|
||||||
|
{tag: "4320p", title: "UltraHD 8K (4320p)", regexp: "4320p|7680x4320", re: nil},
|
||||||
|
{tag: "2880p", title: "5k (2880p)", regexp: "2880p|5k|5120x2880", re: nil},
|
||||||
|
{tag: "2160p", title: "UltraHD 4K (2160p)", regexp: "2160p|3840x2160|uhd|4k", re: nil},
|
||||||
|
{tag: "1800p", title: "QHD+ (1800p)", regexp: "1800p|3200x1800", re: nil},
|
||||||
|
{tag: "1440p", title: "QHD (1440p)", regexp: "1440p|2560x1440", re: nil},
|
||||||
|
{tag: "1080p", title: "FullHD (1080p)", regexp: "1080[ip]|1920x1080", re: nil},
|
||||||
|
{tag: "900p", title: "900[ip]|1600x900", regexp: "900[ip]|1600x900", re: nil},
|
||||||
|
{tag: "720p", title: "HD (720p)", regexp: "720[ip]|1280x720", re: nil},
|
||||||
|
{tag: "576p", title: "PAL (576p)", regexp: "576[ip]|720x576|pal", re: nil},
|
||||||
|
{tag: "540p", title: "qHD (540p)", regexp: "540[ip]|960x540", re: nil},
|
||||||
|
{tag: "480p", title: "NTSC (480p)", regexp: "480[ip]|720x480|848x480|854x480|ntsc", re: nil},
|
||||||
|
{tag: "360p", title: "nHD (360p)", regexp: "360[ip]|640x360", re: nil},
|
||||||
|
{tag: "$1p", title: "Other ($1p)", regexp: "([123]\\d{3})p", re: nil},
|
||||||
|
}
|
||||||
|
types["resolution"] = resolution
|
||||||
|
|
||||||
|
// language `(?i)\b((DK|DKSUBS|DANiSH|DUTCH|NL|NLSUBBED|ENG|FI|FLEMiSH|FiNNiSH|DE|FRENCH|GERMAN|HE|HEBREW|HebSub|HiNDi|iCELANDiC|KOR|MULTi|MULTiSUBS|NORWEGiAN|NO|NORDiC|PL|PO|POLiSH|PLDUB|RO|ROMANiAN|RUS|SPANiSH|SE|SWEDiSH|SWESUB||))\b`)
|
||||||
|
// websites `(?i)\b((AMBC|AS|AMZN|AMC|ANPL|ATVP|iP|CORE|BCORE|CMOR|CN|CBC|CBS|CMAX|CNBC|CC|CRIT|CR|CSPN|CW|DAZN|DCU|DISC|DSCP|DSNY|DSNP|DPLY|ESPN|FOX|FUNI|PLAY|HBO|HMAX|HIST|HS|HOTSTAR|HULU|iT|MNBC|MTV|NATG|NBC|NF|NICK|NRK|PMNT|PMNP|PCOK|PBS|PBSK|PSN|QIBI|SBS|SHO|STAN|STZ|SVT|SYFY|TLC|TRVL|TUBI|TV3|TV4|TVL|VH1|VICE|VMEO|UFC|USAN|VIAP|VIAPLAY|VL|WWEN|XBOX|YHOO|YT|RED))\b`)
|
||||||
|
|
||||||
|
for s, infos := range types {
|
||||||
|
for _, info := range infos {
|
||||||
|
var err error
|
||||||
|
//if info.re, err = regexp.Compile(`(?i)^(?:` + info.RE() + `)$`); err != nil {
|
||||||
|
if info.re, err = regexp.Compile(`(?i)(?:` + info.RE() + `)`); err != nil {
|
||||||
|
fmt.Errorf("tag %q has invalid regexp %q\n", s, info.re)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type TagInfo struct {
|
||||||
|
tag string
|
||||||
|
title string
|
||||||
|
regexp string
|
||||||
|
re *regexp.Regexp
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tag returns the tag info tag.
|
||||||
|
func (info *TagInfo) Tag() string {
|
||||||
|
return info.tag
|
||||||
|
}
|
||||||
|
|
||||||
|
// Title returns the tag info title.
|
||||||
|
func (info *TagInfo) Title() string {
|
||||||
|
return info.title
|
||||||
|
}
|
||||||
|
|
||||||
|
// Regexp returns the tag info regexp.
|
||||||
|
func (info *TagInfo) Regexp() string {
|
||||||
|
return info.regexp
|
||||||
|
}
|
||||||
|
|
||||||
|
//// Other returns the tag info other.
|
||||||
|
//func (info *TagInfo) Other() string {
|
||||||
|
// return info.other
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
//// Type returns the tag info type.
|
||||||
|
//func (info *TagInfo) Type() int {
|
||||||
|
// return info.typ
|
||||||
|
//}
|
||||||
|
|
||||||
|
//// Excl returns the tag info excl.
|
||||||
|
//func (info *TagInfo) Excl() bool {
|
||||||
|
// return info.excl
|
||||||
|
//}
|
||||||
|
|
||||||
|
// RE returns the tag info regexp string.
|
||||||
|
func (info *TagInfo) RE() string {
|
||||||
|
if info.regexp != "" {
|
||||||
|
return info.regexp
|
||||||
|
}
|
||||||
|
return `\Q` + info.tag + `\E`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Match matches the tag info to s.
|
||||||
|
func (info *TagInfo) Match(s string) bool {
|
||||||
|
return info.re.MatchString(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindFunc is the find signature..
|
||||||
|
type FindFunc func(string) *TagInfo
|
||||||
|
|
||||||
|
// Find returns a func to find tag info.
|
||||||
|
func Find(infos ...*TagInfo) FindFunc {
|
||||||
|
n := len(infos)
|
||||||
|
return func(s string) *TagInfo {
|
||||||
|
for i := 0; i < n; i++ {
|
||||||
|
if infos[i].Match(s) {
|
||||||
|
return infos[i]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type ReleaseTags struct {
|
||||||
|
Audio []string
|
||||||
|
Channels string
|
||||||
|
Source string
|
||||||
|
Resolution string
|
||||||
|
Container string
|
||||||
|
Codec string
|
||||||
|
HDR []string
|
||||||
|
Other []string
|
||||||
|
Bonus []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func ParseReleaseTags(tags []string) ReleaseTags {
|
||||||
|
releaseTags := ReleaseTags{}
|
||||||
|
|
||||||
|
for _, tag := range tags {
|
||||||
|
|
||||||
|
for tagType, tagInfos := range types {
|
||||||
|
|
||||||
|
for _, info := range tagInfos {
|
||||||
|
// check tag
|
||||||
|
match := info.Match(tag)
|
||||||
|
if match {
|
||||||
|
fmt.Printf("match: %v, info: %v\n", tag, info.Tag())
|
||||||
|
switch tagType {
|
||||||
|
case "audio":
|
||||||
|
releaseTags.Audio = append(releaseTags.Audio, info.Tag())
|
||||||
|
continue
|
||||||
|
case "bonus":
|
||||||
|
releaseTags.Bonus = append(releaseTags.Bonus, info.Tag())
|
||||||
|
continue
|
||||||
|
case "channels":
|
||||||
|
releaseTags.Channels = info.Tag()
|
||||||
|
break
|
||||||
|
case "codecs":
|
||||||
|
releaseTags.Codec = info.Tag()
|
||||||
|
break
|
||||||
|
case "container":
|
||||||
|
releaseTags.Container = info.Tag()
|
||||||
|
break
|
||||||
|
case "hdr":
|
||||||
|
releaseTags.HDR = append(releaseTags.HDR, info.Tag())
|
||||||
|
continue
|
||||||
|
case "other":
|
||||||
|
releaseTags.Other = append(releaseTags.Other, info.Tag())
|
||||||
|
continue
|
||||||
|
case "source":
|
||||||
|
releaseTags.Source = info.Tag()
|
||||||
|
break
|
||||||
|
case "resolution":
|
||||||
|
releaseTags.Resolution = info.Tag()
|
||||||
|
break
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return releaseTags
|
||||||
|
}
|
||||||
|
func ParseReleaseTagString(tags string) ReleaseTags {
|
||||||
|
releaseTags := ReleaseTags{}
|
||||||
|
|
||||||
|
for tagType, tagInfos := range types {
|
||||||
|
//fmt.Printf("tagType: %v\n", tagType)
|
||||||
|
|
||||||
|
for _, info := range tagInfos {
|
||||||
|
// check tag
|
||||||
|
match := info.Match(tags)
|
||||||
|
if !match {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
//fmt.Printf("match: info: %v\n", info.Tag())
|
||||||
|
switch tagType {
|
||||||
|
case "audio":
|
||||||
|
releaseTags.Audio = append(releaseTags.Audio, info.Tag())
|
||||||
|
continue
|
||||||
|
case "bonus":
|
||||||
|
releaseTags.Bonus = append(releaseTags.Bonus, info.Tag())
|
||||||
|
continue
|
||||||
|
case "channels":
|
||||||
|
releaseTags.Channels = info.Tag()
|
||||||
|
break
|
||||||
|
case "codecs":
|
||||||
|
releaseTags.Codec = info.Tag()
|
||||||
|
break
|
||||||
|
case "container":
|
||||||
|
releaseTags.Container = info.Tag()
|
||||||
|
break
|
||||||
|
case "hdr":
|
||||||
|
releaseTags.HDR = append(releaseTags.HDR, info.Tag())
|
||||||
|
continue
|
||||||
|
case "other":
|
||||||
|
releaseTags.Other = append(releaseTags.Other, info.Tag())
|
||||||
|
continue
|
||||||
|
case "source":
|
||||||
|
releaseTags.Source = info.Tag()
|
||||||
|
break
|
||||||
|
case "resolution":
|
||||||
|
releaseTags.Resolution = info.Tag()
|
||||||
|
break
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
return releaseTags
|
||||||
|
}
|
56
internal/domain/releasetags_test.go
Normal file
56
internal/domain/releasetags_test.go
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
package domain
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestParseReleaseTags(t *testing.T) {
|
||||||
|
type args struct {
|
||||||
|
tags []string
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want ReleaseTags
|
||||||
|
}{
|
||||||
|
{name: "test_1", args: args{tags: []string{"CD", "FLAC", "Lossless"}}, want: ReleaseTags{Source: "CD", Audio: []string{"FLAC", "Lossless"}}},
|
||||||
|
{name: "test_2", args: args{tags: []string{"MP4", "2160p", "BluRay", "DV"}}, want: ReleaseTags{Source: "BluRay", Resolution: "2160p", Container: "mp4", HDR: []string{"DV"}}},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
assert.Equalf(t, tt.want, ParseReleaseTags(tt.args.tags), "ParseReleaseTags(%v)", tt.args.tags)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParseReleaseTagString(t *testing.T) {
|
||||||
|
type args struct {
|
||||||
|
tags string
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want ReleaseTags
|
||||||
|
}{
|
||||||
|
{name: "music_1", args: args{tags: "FLAC / Lossless / Log / 80% / Cue / CD"}, want: ReleaseTags{Audio: []string{"Cue", "FLAC", "Lossless", "Log"}, Source: "CD"}},
|
||||||
|
{name: "music_2", args: args{tags: "FLAC Lossless Log 80% Cue CD"}, want: ReleaseTags{Audio: []string{"Cue", "FLAC", "Lossless", "Log"}, Source: "CD"}},
|
||||||
|
{name: "music_3", args: args{tags: "FLAC Lossless Log 100% Cue CD"}, want: ReleaseTags{Audio: []string{"Cue", "FLAC", "Lossless", "Log100", "Log"}, Source: "CD"}},
|
||||||
|
{name: "music_4", args: args{tags: "FLAC 24bit Lossless Log 100% Cue CD"}, want: ReleaseTags{Audio: []string{"24BIT Lossless", "Cue", "FLAC", "Lossless", "Log100", "Log"}, Source: "CD"}},
|
||||||
|
{name: "music_5", args: args{tags: "MP3 320 WEB"}, want: ReleaseTags{Audio: []string{"320", "MP3"}, Source: "WEB"}},
|
||||||
|
{name: "movies_1", args: args{tags: "x264 Blu-ray MKV 1080p"}, want: ReleaseTags{Codec: "x264", Source: "BluRay", Resolution: "1080p", Container: "mkv"}},
|
||||||
|
{name: "movies_2", args: args{tags: "HEVC HDR Blu-ray mp4 2160p"}, want: ReleaseTags{Codec: "HEVC", Source: "BluRay", Resolution: "2160p", Container: "mp4", HDR: []string{"HDR"}}},
|
||||||
|
{name: "movies_3", args: args{tags: "HEVC HDR DV Blu-ray mp4 2160p"}, want: ReleaseTags{Codec: "HEVC", Source: "BluRay", Resolution: "2160p", Container: "mp4", HDR: []string{"HDR", "DV"}}},
|
||||||
|
{name: "movies_4", args: args{tags: "H.264, Blu-ray/HD DVD"}, want: ReleaseTags{Codec: "H.264", Source: "BluRay"}},
|
||||||
|
{name: "movies_5", args: args{tags: "H.264, Remux"}, want: ReleaseTags{Codec: "H.264", Other: []string{"REMUX"}}},
|
||||||
|
{name: "movies_6", args: args{tags: "H.264, DVD"}, want: ReleaseTags{Codec: "H.264", Source: "DVD"}},
|
||||||
|
{name: "movies_7", args: args{tags: "H.264, DVD, Freeleech"}, want: ReleaseTags{Codec: "H.264", Source: "DVD", Bonus: []string{"Freeleech"}}},
|
||||||
|
{name: "movies_8", args: args{tags: "H.264, DVD, Freeleech!"}, want: ReleaseTags{Codec: "H.264", Source: "DVD", Bonus: []string{"Freeleech"}}},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
assert.Equalf(t, tt.want, ParseReleaseTagString(tt.args.tags), "ParseReleaseTagString(%v)", tt.args.tags)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -56,7 +56,7 @@ func (j *TorznabJob) process() error {
|
||||||
releases := make([]*domain.Release, 0)
|
releases := make([]*domain.Release, 0)
|
||||||
|
|
||||||
for _, item := range items {
|
for _, item := range items {
|
||||||
rls, err := domain.NewRelease(item.Title, "")
|
rls, err := domain.NewRelease(j.IndexerIdentifier)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -69,7 +69,7 @@ func (j *TorznabJob) process() error {
|
||||||
// parse size bytes string
|
// parse size bytes string
|
||||||
rls.ParseSizeBytesString(item.Size)
|
rls.ParseSizeBytesString(item.Size)
|
||||||
|
|
||||||
if err := rls.Parse(); err != nil {
|
if err := rls.ParseString(item.Title); err != nil {
|
||||||
j.Log.Error().Err(err).Msgf("torznab.process: error parsing release")
|
j.Log.Error().Err(err).Msgf("torznab.process: error parsing release")
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
|
@ -234,7 +234,7 @@ func (s *service) CheckFilter(f domain.Filter, release *domain.Release) (bool, e
|
||||||
log.Trace().Msgf("filter.Service.CheckFilter: checking filter: %v %+v", f.Name, f)
|
log.Trace().Msgf("filter.Service.CheckFilter: checking filter: %v %+v", f.Name, f)
|
||||||
log.Trace().Msgf("filter.Service.CheckFilter: checking filter: %v for release: %+v", f.Name, release)
|
log.Trace().Msgf("filter.Service.CheckFilter: checking filter: %v for release: %+v", f.Name, release)
|
||||||
|
|
||||||
rejections, matchedFilter := release.CheckFilter(f)
|
rejections, matchedFilter := f.CheckFilter(release)
|
||||||
if len(rejections) > 0 {
|
if len(rejections) > 0 {
|
||||||
log.Trace().Msgf("filter.Service.CheckFilter: (%v) for release: %v rejections: (%v)", f.Name, release.TorrentName, release.RejectionsString())
|
log.Trace().Msgf("filter.Service.CheckFilter: (%v) for release: %v rejections: (%v)", f.Name, release.TorrentName, release.RejectionsString())
|
||||||
return false, nil
|
return false, nil
|
||||||
|
|
|
@ -1,644 +1,38 @@
|
||||||
package filter
|
package filter
|
||||||
|
|
||||||
//func Test_checkFilterStrings(t *testing.T) {
|
import "testing"
|
||||||
// type args struct {
|
|
||||||
// name string
|
|
||||||
// filterList string
|
|
||||||
// }
|
|
||||||
// tests := []struct {
|
|
||||||
// name string
|
|
||||||
// args args
|
|
||||||
// want bool
|
|
||||||
// }{
|
|
||||||
// {
|
|
||||||
// name: "test_01",
|
|
||||||
// args: args{
|
|
||||||
// name: "The End",
|
|
||||||
// filterList: "The End, Other movie",
|
|
||||||
// },
|
|
||||||
// want: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "test_02",
|
|
||||||
// args: args{
|
|
||||||
// name: "The Simpsons S12",
|
|
||||||
// filterList: "The End, Other movie",
|
|
||||||
// },
|
|
||||||
// want: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "test_03",
|
|
||||||
// args: args{
|
|
||||||
// name: "The.Simpsons.S12",
|
|
||||||
// filterList: "The?Simpsons*, Other movie",
|
|
||||||
// },
|
|
||||||
// want: true,
|
|
||||||
// },
|
|
||||||
// }
|
|
||||||
// for _, tt := range tests {
|
|
||||||
// t.Run(tt.name, func(t *testing.T) {
|
|
||||||
// got := checkFilterStrings(tt.args.name, tt.args.filterList)
|
|
||||||
// assert.Equal(t, tt.want, got)
|
|
||||||
// })
|
|
||||||
// }
|
|
||||||
//}
|
|
||||||
|
|
||||||
//func Test_service_checkFilter(t *testing.T) {
|
func Test_checkSizeFilter(t *testing.T) {
|
||||||
// type args struct {
|
type args struct {
|
||||||
// filter domain.Filter
|
minSize string
|
||||||
// announce domain.Announce
|
maxSize string
|
||||||
// }
|
releaseSize uint64
|
||||||
//
|
}
|
||||||
// svcMock := &service{
|
tests := []struct {
|
||||||
// repo: nil,
|
name string
|
||||||
// actionRepo: nil,
|
args args
|
||||||
// indexerSvc: nil,
|
want bool
|
||||||
// }
|
wantErr bool
|
||||||
//
|
}{
|
||||||
// tests := []struct {
|
{name: "test_1", args: args{minSize: "1GB", maxSize: "", releaseSize: 100}, want: false, wantErr: false},
|
||||||
// name string
|
{name: "test_2", args: args{minSize: "1GB", maxSize: "", releaseSize: 2000000000}, want: true, wantErr: false},
|
||||||
// args args
|
{name: "test_3", args: args{minSize: "1GB", maxSize: "2.2GB", releaseSize: 2000000000}, want: true, wantErr: false},
|
||||||
// expected bool
|
{name: "test_4", args: args{minSize: "1GB", maxSize: "2GIB", releaseSize: 2000000000}, want: true, wantErr: false},
|
||||||
// }{
|
{name: "test_5", args: args{minSize: "1GB", maxSize: "2GB", releaseSize: 2000000010}, want: false, wantErr: false},
|
||||||
// {
|
{name: "test_6", args: args{minSize: "1GB", maxSize: "2GB", releaseSize: 2000000000}, want: false, wantErr: false},
|
||||||
// name: "freeleech",
|
{name: "test_7", args: args{minSize: "", maxSize: "2GB", releaseSize: 2500000000}, want: false, wantErr: false},
|
||||||
// args: args{
|
{name: "test_8", args: args{minSize: "", maxSize: "20GB", releaseSize: 2500000000}, want: true, wantErr: false},
|
||||||
// announce: domain.Announce{
|
}
|
||||||
// Freeleech: true,
|
for _, tt := range tests {
|
||||||
// },
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
// filter: domain.Filter{
|
got, err := checkSizeFilter(tt.args.minSize, tt.args.maxSize, tt.args.releaseSize)
|
||||||
// Enabled: true,
|
if (err != nil) != tt.wantErr {
|
||||||
// FilterP2P: domain.FilterP2P{
|
t.Errorf("checkSizeFilter() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
// Freeleech: true,
|
return
|
||||||
// },
|
}
|
||||||
// },
|
if got != tt.want {
|
||||||
// },
|
t.Errorf("checkSizeFilter() got = %v, want %v", got, tt.want)
|
||||||
// expected: true,
|
}
|
||||||
// },
|
})
|
||||||
// {
|
}
|
||||||
// name: "scene",
|
}
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// Scene: true,
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterP2P: domain.FilterP2P{
|
|
||||||
// Scene: true,
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "not_scene",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// Scene: false,
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterP2P: domain.FilterP2P{
|
|
||||||
// Scene: true,
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "shows_1",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Shows: "That show",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "shows_2",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Shows: "That show, The Other show",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "shows_3",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Shows: "That?show*, The?Other?show",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "shows_4",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Shows: "The Other show",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "shows_5",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Shows: "*show*",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "shows_6",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That.Show.S06.1080p.BluRay.DD5.1.x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Shows: "*show*",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "shows_7",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That.Show.S06.1080p.BluRay.DD5.1.x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Shows: "That?show*",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "match_releases_single",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterP2P: domain.FilterP2P{
|
|
||||||
// MatchReleases: "That show",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "match_releases_single_wildcard",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterP2P: domain.FilterP2P{
|
|
||||||
// MatchReleases: "That show*",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "match_releases_multiple",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterP2P: domain.FilterP2P{
|
|
||||||
// MatchReleases: "That show*, Other one",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "match_release_groups",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// ReleaseGroup: "GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterP2P: domain.FilterP2P{
|
|
||||||
// MatchReleaseGroups: "GROUP1",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "match_release_groups_multiple",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// ReleaseGroup: "GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterP2P: domain.FilterP2P{
|
|
||||||
// MatchReleaseGroups: "GROUP1,GROUP2",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "match_release_groups_dont_match",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// ReleaseGroup: "GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterP2P: domain.FilterP2P{
|
|
||||||
// MatchReleaseGroups: "GROUP2",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "except_release_groups",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// ReleaseGroup: "GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterP2P: domain.FilterP2P{
|
|
||||||
// ExceptReleaseGroups: "GROUP1",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "match_uploaders",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// Uploader: "Uploader1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterAdvanced: domain.FilterAdvanced{
|
|
||||||
// MatchUploaders: "Uploader1",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "non_match_uploaders",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// Uploader: "Uploader2",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterAdvanced: domain.FilterAdvanced{
|
|
||||||
// MatchUploaders: "Uploader1",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "except_uploaders",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// Uploader: "Uploader1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterAdvanced: domain.FilterAdvanced{
|
|
||||||
// ExceptUploaders: "Uploader1",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "resolutions_1080p",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// Resolution: "1080p",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Resolutions: []string{"1080p"},
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "resolutions_2160p",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 2160p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// Resolution: "2160p",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Resolutions: []string{"2160p"},
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "resolutions_no_match",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 2160p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// Resolution: "2160p",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Resolutions: []string{"1080p"},
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "codecs_1_match",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 2160p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Codecs: []string{"x264"},
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "codecs_2_no_match",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 2160p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Codecs: []string{"h264"},
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "sources_1_match",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 2160p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Sources: []string{"BluRay"},
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "sources_2_no_match",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 2160p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Sources: []string{"WEB"},
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "years_1",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Movie 2020 2160p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Years: "2020",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "years_2",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Movie 2020 2160p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Years: "2020,1990",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "years_3_no_match",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Movie 2020 2160p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Years: "1990",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "years_4_no_match",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Show S06 2160p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Years: "2020",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "match_categories_1",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// Category: "TV",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterAdvanced: domain.FilterAdvanced{
|
|
||||||
// MatchCategories: "TV",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "match_categories_2",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// Category: "TV :: HD",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterAdvanced: domain.FilterAdvanced{
|
|
||||||
// MatchCategories: "*TV*",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "match_categories_3",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// Category: "TV :: HD",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterAdvanced: domain.FilterAdvanced{
|
|
||||||
// MatchCategories: "*TV*, *HD*",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "match_categories_4_no_match",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// Category: "TV :: HD",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterAdvanced: domain.FilterAdvanced{
|
|
||||||
// MatchCategories: "Movies",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "except_categories_1",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// Category: "Movies",
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterAdvanced: domain.FilterAdvanced{
|
|
||||||
// ExceptCategories: "Movies",
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: false,
|
|
||||||
// },
|
|
||||||
// {
|
|
||||||
// name: "match_multiple_fields_1",
|
|
||||||
// args: args{
|
|
||||||
// announce: domain.Announce{
|
|
||||||
// TorrentName: "That Movie 2020 2160p BluRay DD5.1 x264-GROUP1",
|
|
||||||
// Category: "Movies",
|
|
||||||
// Freeleech: true,
|
|
||||||
// },
|
|
||||||
// filter: domain.Filter{
|
|
||||||
// Enabled: true,
|
|
||||||
// FilterAdvanced: domain.FilterAdvanced{
|
|
||||||
// MatchCategories: "Movies",
|
|
||||||
// },
|
|
||||||
// FilterTVMovies: domain.FilterTVMovies{
|
|
||||||
// Resolutions: []string{"2160p"},
|
|
||||||
// Sources: []string{"BluRay"},
|
|
||||||
// Years: "2020",
|
|
||||||
// },
|
|
||||||
// FilterP2P: domain.FilterP2P{
|
|
||||||
// MatchReleaseGroups: "GROUP1",
|
|
||||||
// MatchReleases: "That movie",
|
|
||||||
// Freeleech: true,
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// expected: true,
|
|
||||||
// },
|
|
||||||
// }
|
|
||||||
// for _, tt := range tests {
|
|
||||||
// t.Run(tt.name, func(t *testing.T) {
|
|
||||||
// got := svcMock.checkFilter(tt.args.filter, tt.args.announce)
|
|
||||||
// assert.Equal(t, tt.expected, got)
|
|
||||||
// })
|
|
||||||
// }
|
|
||||||
//}
|
|
||||||
|
|
|
@ -50,7 +50,7 @@ parse:
|
||||||
vars:
|
vars:
|
||||||
- torrentName
|
- torrentName
|
||||||
- freeleech
|
- freeleech
|
||||||
- internal
|
- origin
|
||||||
- category
|
- category
|
||||||
- torrentSize
|
- torrentSize
|
||||||
- baseUrl
|
- baseUrl
|
||||||
|
|
|
@ -53,12 +53,12 @@ parse:
|
||||||
- "New Torrent: PilotsEYE tv: QUITO 2014 1080p Blu-ray AVC DD 2.0 - Type: Documentary (H.264, Blu-ray/HD DVD) - Uploaded by: Anonymous - Size: 23.14 GiB - https://hdbits.org/details.php?id=12345&hit=1"
|
- "New Torrent: PilotsEYE tv: QUITO 2014 1080p Blu-ray AVC DD 2.0 - Type: Documentary (H.264, Blu-ray/HD DVD) - Uploaded by: Anonymous - Size: 23.14 GiB - https://hdbits.org/details.php?id=12345&hit=1"
|
||||||
- "New Torrent: Xiao Q 2019 720p BluRay DD-EX 5.1 x264-Anonymous - Type: Movie (H.264, Encode) Internal! - Uploaded by: Anonymous - Size: 4.54 GiB - https://hdbits.org/details.php?id=12345&hit=1"
|
- "New Torrent: Xiao Q 2019 720p BluRay DD-EX 5.1 x264-Anonymous - Type: Movie (H.264, Encode) Internal! - Uploaded by: Anonymous - Size: 4.54 GiB - https://hdbits.org/details.php?id=12345&hit=1"
|
||||||
- "New Torrent: The Gentlemen 2019 UHD Blu-ray English TrueHD 7.1 - Type: Audio Track - Uploaded by: Anonymous - Size: 3.19 GiB - https://hdbits.org/details.php?id=519896&hit=1"
|
- "New Torrent: The Gentlemen 2019 UHD Blu-ray English TrueHD 7.1 - Type: Audio Track - Uploaded by: Anonymous - Size: 3.19 GiB - https://hdbits.org/details.php?id=519896&hit=1"
|
||||||
pattern: '^New Torrent: (.+) - Type: (.+?) (?:\((.+)\))?\s?(Internal!)?\s?- Uploaded by: (.+) - Size: (.+) - (https://.+?/).+id=(\d+)'
|
pattern: '^New Torrent: (.+) - Type: (.+?) (?:\((.+)\))?\s?(?:(Internal)!?)?\s?- Uploaded by: (.+) - Size: (.+) - (https://.+?/).+id=(\d+)'
|
||||||
vars:
|
vars:
|
||||||
- torrentName
|
- torrentName
|
||||||
- category
|
- category
|
||||||
- releaseTags
|
- releaseTags
|
||||||
- internal
|
- origin
|
||||||
- uploader
|
- uploader
|
||||||
- torrentSize
|
- torrentSize
|
||||||
- baseUrl
|
- baseUrl
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
"react-cookie": "^4.1.1",
|
"react-cookie": "^4.1.1",
|
||||||
"react-dom": "^17.0.2",
|
"react-dom": "^17.0.2",
|
||||||
"react-hot-toast": "^2.1.1",
|
"react-hot-toast": "^2.1.1",
|
||||||
"react-multi-select-component": "^4.0.2",
|
"react-multi-select-component": "4.2.5",
|
||||||
"react-query": "^3.18.1",
|
"react-query": "^3.18.1",
|
||||||
"react-ridge-state": "4.2.2",
|
"react-ridge-state": "4.2.2",
|
||||||
"react-router-dom": "^5.2.0",
|
"react-router-dom": "^5.2.0",
|
||||||
|
@ -124,4 +124,4 @@
|
||||||
},
|
},
|
||||||
"globals": {}
|
"globals": {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,10 +8,11 @@ import { classNames, COL_WIDTHS } from "../../utils";
|
||||||
import { SettingsContext } from "../../utils/Context";
|
import { SettingsContext } from "../../utils/Context";
|
||||||
|
|
||||||
interface MultiSelectProps {
|
interface MultiSelectProps {
|
||||||
|
name: string;
|
||||||
label?: string;
|
label?: string;
|
||||||
options?: [] | any;
|
options?: [] | any;
|
||||||
name: string;
|
|
||||||
columns?: COL_WIDTHS;
|
columns?: COL_WIDTHS;
|
||||||
|
creatable?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const MultiSelect = ({
|
export const MultiSelect = ({
|
||||||
|
@ -19,8 +20,16 @@ export const MultiSelect = ({
|
||||||
label,
|
label,
|
||||||
options,
|
options,
|
||||||
columns,
|
columns,
|
||||||
|
creatable,
|
||||||
}: MultiSelectProps) => {
|
}: MultiSelectProps) => {
|
||||||
const settingsContext = SettingsContext.useValue();
|
const settingsContext = SettingsContext.useValue();
|
||||||
|
|
||||||
|
const handleNewField = (value: string) => ({
|
||||||
|
value: value.toUpperCase(),
|
||||||
|
label: value.toUpperCase(),
|
||||||
|
key: value,
|
||||||
|
});
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
className={classNames(
|
className={classNames(
|
||||||
|
@ -42,11 +51,17 @@ export const MultiSelect = ({
|
||||||
<RMSC
|
<RMSC
|
||||||
{...field}
|
{...field}
|
||||||
type="select"
|
type="select"
|
||||||
options={options}
|
options={[...[...options, ...field.value.map((i: any) => ({ value: i.value ?? i, label: i.label ?? i}))].reduce((map, obj) => map.set(obj.value, obj), new Map()).values()]}
|
||||||
labelledBy={name}
|
labelledBy={name}
|
||||||
value={field.value && field.value.map((item: any) => options.find((o: any) => o.value === item))}
|
isCreatable={creatable}
|
||||||
|
onCreateOption={handleNewField}
|
||||||
|
value={field.value && field.value.map((item: any) => ({
|
||||||
|
value: item.value ? item.value : item,
|
||||||
|
label: item.label ? item.label : item,
|
||||||
|
}))}
|
||||||
onChange={(values: any) => {
|
onChange={(values: any) => {
|
||||||
const am = values && values.map((i: any) => i.value);
|
const am = values && values.map((i: any) => i.value);
|
||||||
|
|
||||||
setFieldValue(field.name, am);
|
setFieldValue(field.name, am);
|
||||||
}}
|
}}
|
||||||
className={settingsContext.darkTheme ? "dark" : ""}
|
className={settingsContext.darkTheme ? "dark" : ""}
|
||||||
|
|
|
@ -12,29 +12,25 @@ export const resolutions = [
|
||||||
export const RESOLUTION_OPTIONS = resolutions.map(r => ({ value: r, label: r, key: r}));
|
export const RESOLUTION_OPTIONS = resolutions.map(r => ({ value: r, label: r, key: r}));
|
||||||
|
|
||||||
export const codecs = [
|
export const codecs = [
|
||||||
"AVC",
|
|
||||||
"Remux",
|
|
||||||
"h.264 Remux",
|
|
||||||
"h.265 Remux",
|
|
||||||
"HEVC",
|
"HEVC",
|
||||||
"VC-1",
|
"H.264",
|
||||||
"VC-1 Remux",
|
"H.265",
|
||||||
"h264",
|
|
||||||
"h265",
|
|
||||||
"x264",
|
"x264",
|
||||||
"x265",
|
"x265",
|
||||||
"h264 10-bit",
|
"AVC",
|
||||||
"h265 10-bit",
|
"VC-1",
|
||||||
"x264 10-bit",
|
"AV1",
|
||||||
"x265 10-bit",
|
|
||||||
"XviD"
|
"XviD"
|
||||||
];
|
];
|
||||||
|
|
||||||
export const CODECS_OPTIONS = codecs.map(v => ({ value: v, label: v, key: v}));
|
export const CODECS_OPTIONS = codecs.map(v => ({ value: v, label: v, key: v}));
|
||||||
|
|
||||||
export const sources = [
|
export const sources = [
|
||||||
"WEB-DL",
|
|
||||||
"BluRay",
|
"BluRay",
|
||||||
|
"UHD.BluRay",
|
||||||
|
"WEB-DL",
|
||||||
|
"WEB",
|
||||||
|
"WEBRip",
|
||||||
"BD5",
|
"BD5",
|
||||||
"BD9",
|
"BD9",
|
||||||
"BDr",
|
"BDr",
|
||||||
|
@ -51,7 +47,6 @@ export const sources = [
|
||||||
"HDTV",
|
"HDTV",
|
||||||
"Mixed",
|
"Mixed",
|
||||||
"SiteRip",
|
"SiteRip",
|
||||||
"Webrip",
|
|
||||||
];
|
];
|
||||||
|
|
||||||
export const SOURCES_OPTIONS = sources.map(v => ({ value: v, label: v, key: v}));
|
export const SOURCES_OPTIONS = sources.map(v => ({ value: v, label: v, key: v}));
|
||||||
|
@ -68,6 +63,7 @@ export const hdr = [
|
||||||
"HDR",
|
"HDR",
|
||||||
"HDR10",
|
"HDR10",
|
||||||
"HDR10+",
|
"HDR10+",
|
||||||
|
"HLG",
|
||||||
"DV",
|
"DV",
|
||||||
"DV HDR",
|
"DV HDR",
|
||||||
"DV HDR10",
|
"DV HDR10",
|
||||||
|
@ -78,6 +74,13 @@ export const hdr = [
|
||||||
|
|
||||||
export const HDR_OPTIONS = hdr.map(v => ({ value: v, label: v, key: v}));
|
export const HDR_OPTIONS = hdr.map(v => ({ value: v, label: v, key: v}));
|
||||||
|
|
||||||
|
export const quality_other = [
|
||||||
|
"REMUX",
|
||||||
|
"HYBRID",
|
||||||
|
"REPACK",
|
||||||
|
];
|
||||||
|
|
||||||
|
export const OTHER_OPTIONS = quality_other.map(v => ({ value: v, label: v, key: v}));
|
||||||
|
|
||||||
export const formatMusic = [
|
export const formatMusic = [
|
||||||
"MP3",
|
"MP3",
|
||||||
|
@ -135,11 +138,20 @@ export const releaseTypeMusic = [
|
||||||
"Demo",
|
"Demo",
|
||||||
"Concert Recording",
|
"Concert Recording",
|
||||||
"DJ Mix",
|
"DJ Mix",
|
||||||
"Unkown",
|
"Unknown",
|
||||||
];
|
];
|
||||||
|
|
||||||
export const RELEASE_TYPE_MUSIC_OPTIONS = releaseTypeMusic.map(v => ({ value: v, label: v, key: v}));
|
export const RELEASE_TYPE_MUSIC_OPTIONS = releaseTypeMusic.map(v => ({ value: v, label: v, key: v}));
|
||||||
|
|
||||||
|
export const originOptions = [
|
||||||
|
"P2P",
|
||||||
|
"Internal",
|
||||||
|
"SCENE",
|
||||||
|
"O-SCENE",
|
||||||
|
];
|
||||||
|
|
||||||
|
export const ORIGIN_OPTIONS = originOptions.map(v => ({ value: v, label: v, key: v}));
|
||||||
|
|
||||||
export interface RadioFieldsetOption {
|
export interface RadioFieldsetOption {
|
||||||
label: string;
|
label: string;
|
||||||
description: string;
|
description: string;
|
||||||
|
|
|
@ -26,7 +26,7 @@ import {
|
||||||
FORMATS_OPTIONS,
|
FORMATS_OPTIONS,
|
||||||
SOURCES_MUSIC_OPTIONS,
|
SOURCES_MUSIC_OPTIONS,
|
||||||
QUALITY_MUSIC_OPTIONS,
|
QUALITY_MUSIC_OPTIONS,
|
||||||
RELEASE_TYPE_MUSIC_OPTIONS
|
RELEASE_TYPE_MUSIC_OPTIONS, OTHER_OPTIONS, ORIGIN_OPTIONS
|
||||||
} from "../../domain/constants";
|
} from "../../domain/constants";
|
||||||
import { queryClient } from "../../App";
|
import { queryClient } from "../../App";
|
||||||
import { APIClient } from "../../api/APIClient";
|
import { APIClient } from "../../api/APIClient";
|
||||||
|
@ -264,6 +264,8 @@ export default function FilterDetails() {
|
||||||
containers: filter.containers || [],
|
containers: filter.containers || [],
|
||||||
match_hdr: filter.match_hdr || [],
|
match_hdr: filter.match_hdr || [],
|
||||||
except_hdr: filter.except_hdr || [],
|
except_hdr: filter.except_hdr || [],
|
||||||
|
match_other: filter.match_other || [],
|
||||||
|
except_other: filter.except_other || [],
|
||||||
seasons: filter.seasons,
|
seasons: filter.seasons,
|
||||||
episodes: filter.episodes,
|
episodes: filter.episodes,
|
||||||
match_releases: filter.match_releases,
|
match_releases: filter.match_releases,
|
||||||
|
@ -288,6 +290,7 @@ export default function FilterDetails() {
|
||||||
perfect_flac: filter.perfect_flac,
|
perfect_flac: filter.perfect_flac,
|
||||||
artists: filter.artists,
|
artists: filter.artists,
|
||||||
albums: filter.albums,
|
albums: filter.albums,
|
||||||
|
origins: filter.origins || [],
|
||||||
indexers: filter.indexers || [],
|
indexers: filter.indexers || [],
|
||||||
actions: filter.actions || [],
|
actions: filter.actions || [],
|
||||||
} as Filter}
|
} as Filter}
|
||||||
|
@ -403,18 +406,23 @@ function MoviesTv() {
|
||||||
<TitleSubtitle title="Quality" subtitle="Set resolution, source, codec and related match constraints" />
|
<TitleSubtitle title="Quality" subtitle="Set resolution, source, codec and related match constraints" />
|
||||||
|
|
||||||
<div className="mt-6 grid grid-cols-12 gap-6">
|
<div className="mt-6 grid grid-cols-12 gap-6">
|
||||||
<MultiSelect name="resolutions" options={RESOLUTION_OPTIONS} label="resolutions" columns={6} />
|
<MultiSelect name="resolutions" options={RESOLUTION_OPTIONS} label="resolutions" columns={6} creatable={true} />
|
||||||
<MultiSelect name="sources" options={SOURCES_OPTIONS} label="sources" columns={6} />
|
<MultiSelect name="sources" options={SOURCES_OPTIONS} label="sources" columns={6} creatable={true} />
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="mt-6 grid grid-cols-12 gap-6">
|
<div className="mt-6 grid grid-cols-12 gap-6">
|
||||||
<MultiSelect name="codecs" options={CODECS_OPTIONS} label="codecs" columns={6} />
|
<MultiSelect name="codecs" options={CODECS_OPTIONS} label="codecs" columns={6} creatable={true} />
|
||||||
<MultiSelect name="containers" options={CONTAINER_OPTIONS} label="containers" columns={6} />
|
<MultiSelect name="containers" options={CONTAINER_OPTIONS} label="containers" columns={6} creatable={true} />
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="mt-6 grid grid-cols-12 gap-6">
|
<div className="mt-6 grid grid-cols-12 gap-6">
|
||||||
<MultiSelect name="match_hdr" options={HDR_OPTIONS} label="Match HDR" columns={6} />
|
<MultiSelect name="match_hdr" options={HDR_OPTIONS} label="Match HDR" columns={6} creatable={true} />
|
||||||
<MultiSelect name="except_hdr" options={HDR_OPTIONS} label="Except HDR" columns={6} />
|
<MultiSelect name="except_hdr" options={HDR_OPTIONS} label="Except HDR" columns={6} creatable={true} />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="mt-6 grid grid-cols-12 gap-6">
|
||||||
|
<MultiSelect name="match_other" options={OTHER_OPTIONS} label="Match Other" columns={6} creatable={true} />
|
||||||
|
<MultiSelect name="except_other" options={OTHER_OPTIONS} label="Except Other" columns={6} creatable={true} />
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -474,134 +482,76 @@ function Music() {
|
||||||
}
|
}
|
||||||
|
|
||||||
function Advanced() {
|
function Advanced() {
|
||||||
const [releasesIsOpen, toggleReleases] = useToggle(false)
|
|
||||||
const [groupsIsOpen, toggleGroups] = useToggle(false)
|
|
||||||
const [categoriesIsOpen, toggleCategories] = useToggle(false)
|
|
||||||
const [uploadersIsOpen, toggleUploaders] = useToggle(false)
|
|
||||||
const [freeleechIsOpen, toggleFreeleech] = useToggle(false)
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
<div className="mt-6 lg:pb-6 border-b border-gray-200 dark:border-gray-700">
|
<CollapsableSection title="Releases" subtitle="Match only certain release names and/or ignore other release names">
|
||||||
<div className="flex justify-between items-center cursor-pointer" onClick={toggleReleases}>
|
<TextField name="match_releases" label="Match releases" columns={6} placeholder="eg. *some?movie*,*some?show*s01*" />
|
||||||
<div className="-ml-2 -mt-2 flex flex-wrap items-baseline">
|
<TextField name="except_releases" label="Except releases" columns={6} placeholder="" />
|
||||||
<h3 className="ml-2 mt-2 text-lg leading-6 font-medium text-gray-900 dark:text-gray-200">Releases</h3>
|
</CollapsableSection>
|
||||||
<p className="ml-2 mt-1 text-sm text-gray-500 dark:text-gray-400 truncate">Match only certain release names and/or ignore other release names</p>
|
|
||||||
</div>
|
<CollapsableSection title="Groups" subtitle="Match only certain groups and/or ignore other groups">
|
||||||
<div className="mt-3 sm:mt-0 sm:ml-4">
|
<TextField name="match_release_groups" label="Match release groups" columns={6} placeholder="eg. group1,group2" />
|
||||||
<button
|
<TextField name="except_release_groups" label="Except release groups" columns={6} placeholder="eg. badgroup1,badgroup2" />
|
||||||
type="button"
|
</CollapsableSection>
|
||||||
className="inline-flex items-center px-4 py-2 border-transparent text-sm font-medium text-white"
|
|
||||||
>
|
<CollapsableSection title="Categories and tags" subtitle="Match or ignore categories or tags">
|
||||||
{releasesIsOpen ? <ChevronDownIcon className="h-6 w-6 text-gray-500" aria-hidden="true" /> : <ChevronRightIcon className="h-6 w-6 text-gray-500" aria-hidden="true" />}
|
<TextField name="match_categories" label="Match categories" columns={6} placeholder="eg. *category*,category1" />
|
||||||
</button>
|
<TextField name="except_categories" label="Except categories" columns={6} placeholder="eg. *category*" />
|
||||||
</div>
|
|
||||||
|
<TextField name="tags" label="Match tags" columns={6} placeholder="eg. tag1,tag2" />
|
||||||
|
<TextField name="except_tags" label="Except tags" columns={6} placeholder="eg. tag1,tag2" />
|
||||||
|
</CollapsableSection>
|
||||||
|
|
||||||
|
<CollapsableSection title="Uploaders" subtitle="Match or ignore uploaders">
|
||||||
|
<TextField name="match_uploaders" label="Match uploaders" columns={6} placeholder="eg. uploader1" />
|
||||||
|
<TextField name="except_uploaders" label="Except uploaders" columns={6} placeholder="eg. anonymous" />
|
||||||
|
</CollapsableSection>
|
||||||
|
|
||||||
|
<CollapsableSection title="Origins" subtitle="Match Internals, scene, p2p etc if announced">
|
||||||
|
<MultiSelect name="origins" options={ORIGIN_OPTIONS} label="Origins" columns={6} />
|
||||||
|
</CollapsableSection>
|
||||||
|
|
||||||
|
<CollapsableSection title="Freeleech" subtitle="Match only freeleech and freeleech percent">
|
||||||
|
<div className="col-span-6">
|
||||||
|
<SwitchGroup name="freeleech" label="Freeleech" />
|
||||||
</div>
|
</div>
|
||||||
{releasesIsOpen && (
|
|
||||||
<div className="mt-6 grid grid-cols-12 gap-6">
|
|
||||||
<TextField name="match_releases" label="Match releases" columns={6} placeholder="eg. *some?movie*,*some?show*s01*" />
|
|
||||||
<TextField name="except_releases" label="Except releases" columns={6} placeholder="" />
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="mt-6 lg:pb-6 border-b border-gray-200 dark:border-gray-700">
|
<TextField name="freeleech_percent" label="Freeleech percent" columns={6} />
|
||||||
<div className="flex justify-between items-center cursor-pointer" onClick={toggleGroups}>
|
</CollapsableSection>
|
||||||
<div className="-ml-2 -mt-2 flex flex-wrap items-baseline">
|
</div>
|
||||||
<h3 className="ml-2 mt-2 text-lg leading-6 font-medium text-gray-900 dark:text-gray-200">Groups</h3>
|
)
|
||||||
<p className="ml-2 mt-1 text-sm text-gray-500 dark:text-gray-400 truncate">Match only certain groups and/or ignore other groups</p>
|
}
|
||||||
</div>
|
|
||||||
<div className="mt-3 sm:mt-0 sm:ml-4">
|
interface CollapsableSectionProps {
|
||||||
<button
|
title: string;
|
||||||
type="button"
|
subtitle: string;
|
||||||
className="inline-flex items-center px-4 py-2 border-transparent text-sm font-medium text-white"
|
children: any;
|
||||||
>
|
}
|
||||||
{groupsIsOpen ? <ChevronDownIcon className="h-6 w-6 text-gray-500" aria-hidden="true" /> : <ChevronRightIcon className="h-6 w-6 text-gray-500" aria-hidden="true" />}
|
|
||||||
</button>
|
function CollapsableSection({ title, subtitle, children }: CollapsableSectionProps) {
|
||||||
</div>
|
const [isOpen, toggleOpen] = useToggle(false)
|
||||||
|
|
||||||
|
return(
|
||||||
|
<div className="mt-6 lg:pb-6 border-b border-gray-200 dark:border-gray-700">
|
||||||
|
<div className="flex justify-between items-center cursor-pointer" onClick={toggleOpen}>
|
||||||
|
<div className="-ml-2 -mt-2 flex flex-wrap items-baseline">
|
||||||
|
<h3 className="ml-2 mt-2 text-lg leading-6 font-medium text-gray-900 dark:text-gray-200">{title}</h3>
|
||||||
|
<p className="ml-2 mt-1 text-sm text-gray-500 dark:text-gray-400 truncate">{subtitle}</p>
|
||||||
</div>
|
</div>
|
||||||
{groupsIsOpen && (
|
<div className="mt-3 sm:mt-0 sm:ml-4">
|
||||||
<div className="mt-6 grid grid-cols-12 gap-6">
|
<button
|
||||||
<TextField name="match_release_groups" label="Match release groups" columns={6} placeholder="eg. group1,group2" />
|
type="button"
|
||||||
<TextField name="except_release_groups" label="Except release groups" columns={6} placeholder="eg. badgroup1,badgroup2" />
|
className="inline-flex items-center px-4 py-2 border-transparent text-sm font-medium text-white"
|
||||||
</div>
|
>
|
||||||
)}
|
{isOpen ? <ChevronDownIcon className="h-6 w-6 text-gray-500" aria-hidden="true" /> : <ChevronRightIcon className="h-6 w-6 text-gray-500" aria-hidden="true" />}
|
||||||
</div>
|
</button>
|
||||||
|
|
||||||
<div className="mt-6 lg:pb-6 border-b border-gray-200 dark:border-gray-700">
|
|
||||||
<div className="flex justify-between items-center cursor-pointer" onClick={toggleCategories}>
|
|
||||||
<div className="-ml-2 -mt-2 flex flex-wrap items-baseline">
|
|
||||||
<h3 className="ml-2 mt-2 text-lg leading-6 font-medium text-gray-900 dark:text-gray-200">Categories and tags</h3>
|
|
||||||
<p className="ml-2 mt-1 text-sm text-gray-500 dark:text-gray-400 truncate">Match or ignore categories or tags</p>
|
|
||||||
</div>
|
|
||||||
<div className="mt-3 sm:mt-0 sm:ml-4">
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="inline-flex items-center px-4 py-2 border-transparent text-sm font-medium text-white"
|
|
||||||
>
|
|
||||||
{categoriesIsOpen ? <ChevronDownIcon className="h-6 w-6 text-gray-500" aria-hidden="true" /> : <ChevronRightIcon className="h-6 w-6 text-gray-500" aria-hidden="true" />}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
{categoriesIsOpen && (
|
|
||||||
<div className="mt-6 grid grid-cols-12 gap-6">
|
|
||||||
<TextField name="match_categories" label="Match categories" columns={6} placeholder="eg. *category*,category1" />
|
|
||||||
<TextField name="except_categories" label="Except categories" columns={6} placeholder="eg. *category*" />
|
|
||||||
|
|
||||||
<TextField name="tags" label="Match tags" columns={6} placeholder="eg. tag1,tag2" />
|
|
||||||
<TextField name="except_tags" label="Except tags" columns={6} placeholder="eg. tag1,tag2" />
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
|
{isOpen && (
|
||||||
<div className="mt-6 lg:pb-6 border-b border-gray-200 dark:border-gray-700">
|
<div className="mt-6 grid grid-cols-12 gap-6">
|
||||||
<div className="flex justify-between items-center cursor-pointer" onClick={toggleUploaders}>
|
{children}
|
||||||
<div className="-ml-2 -mt-2 flex flex-wrap items-baseline">
|
|
||||||
<h3 className="ml-2 mt-2 text-lg leading-6 font-medium text-gray-900 dark:text-gray-200">Uploaders</h3>
|
|
||||||
<p className="ml-2 mt-1 text-sm text-gray-500 dark:text-gray-400 truncate">Match or ignore uploaders</p>
|
|
||||||
</div>
|
|
||||||
<div className="mt-3 sm:mt-0 sm:ml-4">
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="inline-flex items-center px-4 py-2 border-transparent text-sm font-medium text-white"
|
|
||||||
>
|
|
||||||
{uploadersIsOpen ? <ChevronDownIcon className="h-6 w-6 text-gray-500" aria-hidden="true" /> : <ChevronRightIcon className="h-6 w-6 text-gray-500" aria-hidden="true" />}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
{uploadersIsOpen && (
|
)}
|
||||||
<div className="mt-6 grid grid-cols-12 gap-6">
|
|
||||||
<TextField name="match_uploaders" label="Match uploaders" columns={6} placeholder="eg. uploader1" />
|
|
||||||
<TextField name="except_uploaders" label="Except uploaders" columns={6} placeholder="eg. anonymous" />
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div className="mt-6 lg:pb-6 border-b border-gray-200 dark:border-gray-700">
|
|
||||||
<div className="flex justify-between items-center cursor-pointer" onClick={toggleFreeleech}>
|
|
||||||
<div className="-ml-2 -mt-2 flex flex-wrap items-baseline">
|
|
||||||
<h3 className="ml-2 mt-2 text-lg leading-6 font-medium text-gray-900 dark:text-gray-200">Freeleech</h3>
|
|
||||||
<p className="ml-2 mt-1 text-sm text-gray-500 dark:text-gray-400 truncate">Match only freeleech and freeleech percent</p>
|
|
||||||
</div>
|
|
||||||
<div className="mt-3 sm:mt-0 sm:ml-4">
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
className="inline-flex items-center px-4 py-2 border-transparent text-sm font-medium text-white"
|
|
||||||
>
|
|
||||||
{freeleechIsOpen ? <ChevronDownIcon className="h-6 w-6 text-gray-500" aria-hidden="true" /> : <ChevronRightIcon className="h-6 w-6 text-gray-500" aria-hidden="true" />}
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{freeleechIsOpen && (
|
|
||||||
<div className="mt-6 grid grid-cols-12 gap-6">
|
|
||||||
<div className="col-span-6">
|
|
||||||
<SwitchGroup name="freeleech" label="Freeleech" />
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<TextField name="freeleech_percent" label="Freeleech percent" columns={6} />
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
4
web/src/types/Filter.d.ts
vendored
4
web/src/types/Filter.d.ts
vendored
|
@ -14,7 +14,7 @@ interface Filter {
|
||||||
match_release_groups: string;
|
match_release_groups: string;
|
||||||
except_release_groups: string;
|
except_release_groups: string;
|
||||||
scene: boolean;
|
scene: boolean;
|
||||||
origins: string;
|
origins: string[];
|
||||||
freeleech: boolean;
|
freeleech: boolean;
|
||||||
freeleech_percent: string;
|
freeleech_percent: string;
|
||||||
shows: string;
|
shows: string;
|
||||||
|
@ -26,6 +26,8 @@ interface Filter {
|
||||||
containers: string[];
|
containers: string[];
|
||||||
match_hdr: string[];
|
match_hdr: string[];
|
||||||
except_hdr: string[];
|
except_hdr: string[];
|
||||||
|
match_other: string[];
|
||||||
|
except_other: string[];
|
||||||
years: string;
|
years: string;
|
||||||
artists: string;
|
artists: string;
|
||||||
albums: string;
|
albums: string;
|
||||||
|
|
|
@ -7334,10 +7334,10 @@ react-is@^17.0.1:
|
||||||
resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0"
|
resolved "https://registry.yarnpkg.com/react-is/-/react-is-17.0.2.tgz#e691d4a8e9c789365655539ab372762b0efb54f0"
|
||||||
integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==
|
integrity sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==
|
||||||
|
|
||||||
react-multi-select-component@^4.0.2:
|
react-multi-select-component@4.2.5:
|
||||||
version "4.2.4"
|
version "4.2.5"
|
||||||
resolved "https://registry.yarnpkg.com/react-multi-select-component/-/react-multi-select-component-4.2.4.tgz#4eb11f0c1b0d94b05738b21f1c09a4a6ec8089c6"
|
resolved "https://registry.yarnpkg.com/react-multi-select-component/-/react-multi-select-component-4.2.5.tgz#507a0814baa856bfbd98e48a854f14e6b4d0f0d8"
|
||||||
integrity sha512-HhXV3lLi5k2FCGuVUsM8KoFLPCGhb2JAz3HWS8jg7IY1LKr+5/W54+0w7MlsyjeMS0r+vg4CYnv315dn3B20IA==
|
integrity sha512-/rfyCqp+Q01BSDlzfkF8PWpuAxhIf7650CnW8xD01+NC5nEUj8/JcFL3MgMlM8bpYqrSiyMX/v3hKZ9nDNsZdA==
|
||||||
|
|
||||||
react-query@^3.18.1:
|
react-query@^3.18.1:
|
||||||
version "3.34.19"
|
version "3.34.19"
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue