mirror of
https://github.com/idanoo/autobrr
synced 2025-07-23 08:49:13 +00:00
fix(releases): releasetags freeleech parsing (#306)
* refactor(releases): remove err from constructor * fix(releases): freeleech parsing and filtering * chore: remove unused releaseinfo package
This commit is contained in:
parent
fd3f10f95a
commit
6675a1df3e
10 changed files with 79 additions and 618 deletions
|
@ -100,14 +100,10 @@ func (a *announceProcessor) processQueue(queue chan string) {
|
|||
continue
|
||||
}
|
||||
|
||||
rls, err := domain.NewRelease(a.indexer.Identifier)
|
||||
if err != nil {
|
||||
a.log.Error().Err(err).Msg("could not create new release")
|
||||
continue
|
||||
}
|
||||
rls := domain.NewRelease(a.indexer.Identifier)
|
||||
|
||||
// on lines matched
|
||||
err = a.onLinesMatched(a.indexer, tmpVars, rls)
|
||||
err := a.onLinesMatched(a.indexer, tmpVars, rls)
|
||||
if err != nil {
|
||||
a.log.Debug().Msgf("error match line: %v", "")
|
||||
continue
|
||||
|
@ -178,11 +174,7 @@ func (a *announceProcessor) onLinesMatched(def *domain.IndexerDefinition, vars m
|
|||
}
|
||||
|
||||
// parse fields
|
||||
err = rls.ParseString(rls.TorrentName)
|
||||
if err != nil {
|
||||
a.log.Error().Stack().Err(err).Msg("announce: could not parse release")
|
||||
return err
|
||||
}
|
||||
rls.ParseString(rls.TorrentName)
|
||||
|
||||
// parse torrentUrl
|
||||
err = def.Parse.ParseTorrentUrl(vars, def.SettingsMap, rls)
|
||||
|
|
|
@ -67,6 +67,7 @@ type Filter struct {
|
|||
ExceptReleaseGroups string `json:"except_release_groups"`
|
||||
Scene bool `json:"scene"`
|
||||
Origins []string `json:"origins"`
|
||||
Bonus []string `json:"bonus"`
|
||||
Freeleech bool `json:"freeleech"`
|
||||
FreeleechPercent string `json:"freeleech_percent"`
|
||||
Shows string `json:"shows"`
|
||||
|
@ -115,6 +116,10 @@ func (f Filter) CheckFilter(r *Release) ([]string, bool) {
|
|||
return r.Rejections, false
|
||||
}
|
||||
|
||||
if len(f.Bonus) > 0 && !sliceContainsSlice(r.Bonus, f.Bonus) {
|
||||
r.addRejectionF("bonus not matching. got: %v want: %v", r.Bonus, f.Bonus)
|
||||
}
|
||||
|
||||
if f.Freeleech && r.Freeleech != f.Freeleech {
|
||||
r.addRejection("wanted: freeleech")
|
||||
}
|
||||
|
@ -575,15 +580,6 @@ func containsAnySlice(tags []string, filters []string) bool {
|
|||
func checkFreeleechPercent(announcePercent int, filterPercent string) bool {
|
||||
filters := strings.Split(filterPercent, ",")
|
||||
|
||||
// remove % and trim spaces
|
||||
//announcePercent = strings.Replace(announcePercent, "%", "", -1)
|
||||
//announcePercent = strings.Trim(announcePercent, " ")
|
||||
|
||||
//announcePercentInt, err := strconv.ParseInt(announcePercent, 10, 32)
|
||||
//if err != nil {
|
||||
// return false
|
||||
//}
|
||||
|
||||
for _, s := range filters {
|
||||
s = strings.Replace(s, "%", "", -1)
|
||||
s = strings.Trim(s, " ")
|
||||
|
|
|
@ -954,12 +954,41 @@ func TestFilter_CheckFilter(t *testing.T) {
|
|||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "match_anime_1",
|
||||
fields: &Release{
|
||||
TorrentName: "Kaginado",
|
||||
ReleaseTags: "Web / MKV / h264 / 1080p / AAC 2.0 / Softsubs (SubsPlease) / Episode 22 / Freeleech",
|
||||
},
|
||||
args: args{
|
||||
filter: Filter{
|
||||
Enabled: true,
|
||||
Freeleech: true,
|
||||
},
|
||||
},
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "match_anime_2",
|
||||
fields: &Release{
|
||||
TorrentName: "Kaginado",
|
||||
ReleaseTags: "Web / MKV / h264 / 1080p / AAC 2.0 / Softsubs (SubsPlease) / Episode 22",
|
||||
},
|
||||
args: args{
|
||||
filter: Filter{
|
||||
Enabled: true,
|
||||
Freeleech: true,
|
||||
},
|
||||
rejections: []string{"wanted: freeleech"},
|
||||
},
|
||||
want: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
r := tt.fields // Release
|
||||
|
||||
_ = r.ParseString(tt.fields.TorrentName) // Parse TorrentName into struct
|
||||
r.ParseString(tt.fields.TorrentName) // Parse TorrentName into struct
|
||||
rejections, got := tt.args.filter.CheckFilter(r)
|
||||
|
||||
assert.Equal(t, tt.want, got)
|
||||
|
|
|
@ -116,7 +116,8 @@ const (
|
|||
ReleasePushStatusApproved ReleasePushStatus = "PUSH_APPROVED"
|
||||
ReleasePushStatusRejected ReleasePushStatus = "PUSH_REJECTED"
|
||||
ReleasePushStatusErr ReleasePushStatus = "PUSH_ERROR"
|
||||
ReleasePushStatusPending ReleasePushStatus = "PENDING" // Initial status
|
||||
|
||||
//ReleasePushStatusPending ReleasePushStatus = "PENDING" // Initial status
|
||||
)
|
||||
|
||||
func (r ReleasePushStatus) String() string {
|
||||
|
@ -136,8 +137,9 @@ type ReleaseFilterStatus string
|
|||
|
||||
const (
|
||||
ReleaseStatusFilterApproved ReleaseFilterStatus = "FILTER_APPROVED"
|
||||
ReleaseStatusFilterRejected ReleaseFilterStatus = "FILTER_REJECTED"
|
||||
ReleaseStatusFilterPending ReleaseFilterStatus = "PENDING"
|
||||
|
||||
//ReleaseStatusFilterRejected ReleaseFilterStatus = "FILTER_REJECTED"
|
||||
)
|
||||
|
||||
type ReleaseProtocol string
|
||||
|
@ -165,7 +167,7 @@ type ReleaseQueryParams struct {
|
|||
Search string
|
||||
}
|
||||
|
||||
func NewRelease(indexer string) (*Release, error) {
|
||||
func NewRelease(indexer string) *Release {
|
||||
r := &Release{
|
||||
Indexer: indexer,
|
||||
FilterStatus: ReleaseStatusFilterPending,
|
||||
|
@ -176,10 +178,10 @@ func NewRelease(indexer string) (*Release, error) {
|
|||
Tags: []string{},
|
||||
}
|
||||
|
||||
return r, nil
|
||||
return r
|
||||
}
|
||||
|
||||
func (r *Release) ParseString(title string) error {
|
||||
func (r *Release) ParseString(title string) {
|
||||
rel := rls.ParseString(title)
|
||||
|
||||
r.TorrentName = title
|
||||
|
@ -204,12 +206,12 @@ func (r *Release) ParseString(title string) error {
|
|||
|
||||
r.ParseReleaseTagsString(r.ReleaseTags)
|
||||
|
||||
return nil
|
||||
return
|
||||
}
|
||||
|
||||
func (r *Release) ParseReleaseTagsString(tags string) error {
|
||||
func (r *Release) ParseReleaseTagsString(tags string) {
|
||||
// trim delimiters and closest space
|
||||
re := regexp.MustCompile(`\| |\/ |, `)
|
||||
re := regexp.MustCompile(`\| |/ |, `)
|
||||
cleanTags := re.ReplaceAllString(tags, "")
|
||||
|
||||
t := ParseReleaseTagString(cleanTags)
|
||||
|
@ -218,6 +220,11 @@ func (r *Release) ParseReleaseTagsString(tags string) error {
|
|||
r.Audio = append(r.Audio, t.Audio...)
|
||||
}
|
||||
if len(t.Bonus) > 0 {
|
||||
if sliceContainsSlice([]string{"Freeleech"}, t.Bonus) {
|
||||
r.Freeleech = true
|
||||
}
|
||||
// TODO handle percent and other types
|
||||
|
||||
r.Bonus = append(r.Bonus, t.Bonus...)
|
||||
}
|
||||
if len(t.Codec) > 0 {
|
||||
|
@ -240,7 +247,7 @@ func (r *Release) ParseReleaseTagsString(tags string) error {
|
|||
r.AudioChannels = t.Channels
|
||||
}
|
||||
|
||||
return nil
|
||||
return
|
||||
}
|
||||
|
||||
func (r *Release) ParseSizeBytesString(size string) {
|
||||
|
@ -385,8 +392,11 @@ func (r *Release) MapVars(def *IndexerDefinition, varMap map[string]string) erro
|
|||
//log.Debug().Msgf("bad freeleechPercent var: %v", year)
|
||||
}
|
||||
|
||||
r.Freeleech = true
|
||||
r.FreeleechPercent = freeleechPercentInt
|
||||
|
||||
r.Bonus = append(r.Bonus, "Freeleech")
|
||||
|
||||
switch freeleechPercentInt {
|
||||
case 25:
|
||||
r.Bonus = append(r.Bonus, "Freeleech25")
|
||||
|
@ -472,39 +482,6 @@ func getStringMapValue(stringMap map[string]string, key string) (string, error)
|
|||
return "", fmt.Errorf("key was not found in map: %q", lowerKey)
|
||||
}
|
||||
|
||||
func findLast(input string, pattern string) (string, error) {
|
||||
matched := make([]string, 0)
|
||||
//for _, s := range arr {
|
||||
|
||||
rxp, err := regexp.Compile(pattern)
|
||||
if err != nil {
|
||||
return "", err
|
||||
//return errors.Wrapf(err, "invalid regex: %s", value)
|
||||
}
|
||||
|
||||
matches := rxp.FindStringSubmatch(input)
|
||||
if matches != nil {
|
||||
// first value is the match, second value is the text
|
||||
if len(matches) >= 1 {
|
||||
last := matches[len(matches)-1]
|
||||
|
||||
// add to temp slice
|
||||
matched = append(matched, last)
|
||||
}
|
||||
}
|
||||
|
||||
//}
|
||||
|
||||
// check if multiple values in temp slice, if so get the last one
|
||||
if len(matched) >= 1 {
|
||||
last := matched[len(matched)-1]
|
||||
|
||||
return last, nil
|
||||
}
|
||||
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func SplitAny(s string, seps string) []string {
|
||||
splitter := func(r rune) bool {
|
||||
return strings.ContainsRune(seps, r)
|
||||
|
|
|
@ -8,10 +8,9 @@ import (
|
|||
|
||||
func TestRelease_Parse(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
fields Release
|
||||
want Release
|
||||
wantErr bool
|
||||
name string
|
||||
fields Release
|
||||
want Release
|
||||
}{
|
||||
{
|
||||
name: "parse_1",
|
||||
|
@ -32,7 +31,6 @@ func TestRelease_Parse(t *testing.T) {
|
|||
Group: "FLUX",
|
||||
//Website: "ATVP",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "parse_2",
|
||||
|
@ -52,7 +50,6 @@ func TestRelease_Parse(t *testing.T) {
|
|||
HDR: []string{"DV"},
|
||||
Group: "FLUX",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "parse_3",
|
||||
|
@ -75,7 +72,6 @@ func TestRelease_Parse(t *testing.T) {
|
|||
HDR: []string{"DV"},
|
||||
Group: "FLUX",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "parse_4",
|
||||
|
@ -98,7 +94,6 @@ func TestRelease_Parse(t *testing.T) {
|
|||
HDR: []string{"DV"},
|
||||
Group: "FLUX",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "parse_5",
|
||||
|
@ -121,7 +116,6 @@ func TestRelease_Parse(t *testing.T) {
|
|||
HDR: []string{"DV"},
|
||||
Group: "FLUX",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "parse_6",
|
||||
|
@ -143,9 +137,9 @@ func TestRelease_Parse(t *testing.T) {
|
|||
AudioChannels: "5.1",
|
||||
HDR: []string{"DV"},
|
||||
Group: "FLUX",
|
||||
Freeleech: true,
|
||||
Bonus: []string{"Freeleech"},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "parse_music_1",
|
||||
|
@ -161,7 +155,6 @@ func TestRelease_Parse(t *testing.T) {
|
|||
Audio: []string{"Cue", "FLAC", "Lossless", "Log100", "Log"},
|
||||
Source: "CD",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "parse_music_2",
|
||||
|
@ -178,7 +171,6 @@ func TestRelease_Parse(t *testing.T) {
|
|||
Source: "Cassette",
|
||||
Audio: []string{"320", "MP3"},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "parse_music_3",
|
||||
|
@ -194,7 +186,6 @@ func TestRelease_Parse(t *testing.T) {
|
|||
Source: "CD",
|
||||
Audio: []string{"MP3", "VBR"},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "parse_music_4",
|
||||
|
@ -210,7 +201,6 @@ func TestRelease_Parse(t *testing.T) {
|
|||
Audio: []string{"Cue", "FLAC", "Lossless", "Log100", "Log"},
|
||||
Source: "CD",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "parse_music_5",
|
||||
|
@ -226,7 +216,6 @@ func TestRelease_Parse(t *testing.T) {
|
|||
Audio: []string{"24BIT Lossless", "Cue", "FLAC", "Lossless", "Log100", "Log"},
|
||||
Source: "CD",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "parse_movies_case_1",
|
||||
|
@ -246,15 +235,12 @@ func TestRelease_Parse(t *testing.T) {
|
|||
Group: "GROUP1",
|
||||
Other: []string{"HYBRiD", "REMUX"},
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
r := tt.fields
|
||||
if err := r.ParseString(tt.fields.TorrentName); (err != nil) != tt.wantErr {
|
||||
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
r.ParseString(tt.fields.TorrentName)
|
||||
|
||||
assert.Equal(t, tt.want, r)
|
||||
})
|
||||
|
@ -307,8 +293,9 @@ func TestRelease_MapVars(t *testing.T) {
|
|||
want: &Release{
|
||||
TorrentName: "Good show S02 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-GROUP2",
|
||||
Category: "tv",
|
||||
Freeleech: true,
|
||||
FreeleechPercent: 100,
|
||||
Bonus: []string{"Freeleech100"},
|
||||
Bonus: []string{"Freeleech", "Freeleech100"},
|
||||
Uploader: "Anon",
|
||||
Size: uint64(10000000000),
|
||||
},
|
||||
|
@ -326,8 +313,9 @@ func TestRelease_MapVars(t *testing.T) {
|
|||
want: &Release{
|
||||
TorrentName: "Good show S02 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-GROUP2",
|
||||
Category: "tv",
|
||||
Freeleech: true,
|
||||
FreeleechPercent: 50,
|
||||
Bonus: []string{"Freeleech50"},
|
||||
Bonus: []string{"Freeleech", "Freeleech50"},
|
||||
Uploader: "Anon",
|
||||
Size: uint64(10000000000),
|
||||
Tags: []string{"foreign", "tv"},
|
||||
|
@ -347,8 +335,9 @@ func TestRelease_MapVars(t *testing.T) {
|
|||
want: &Release{
|
||||
TorrentName: "Good show S02 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-GROUP2",
|
||||
Category: "tv",
|
||||
Freeleech: true,
|
||||
FreeleechPercent: 100,
|
||||
Bonus: []string{"Freeleech100"},
|
||||
Bonus: []string{"Freeleech", "Freeleech100"},
|
||||
Uploader: "Anon",
|
||||
Size: uint64(10000000000),
|
||||
Tags: []string{"foreign", "tv"},
|
||||
|
@ -369,8 +358,9 @@ func TestRelease_MapVars(t *testing.T) {
|
|||
TorrentName: "Good show S02 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-GROUP2",
|
||||
Category: "tv",
|
||||
Year: 2020,
|
||||
Freeleech: true,
|
||||
FreeleechPercent: 100,
|
||||
Bonus: []string{"Freeleech100"},
|
||||
Bonus: []string{"Freeleech", "Freeleech100"},
|
||||
Uploader: "Anon",
|
||||
Size: uint64(10000000000),
|
||||
Tags: []string{"foreign", "tv"},
|
||||
|
@ -392,8 +382,9 @@ func TestRelease_MapVars(t *testing.T) {
|
|||
TorrentName: "Good show S02 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-GROUP2",
|
||||
Category: "tv",
|
||||
Year: 2020,
|
||||
Freeleech: true,
|
||||
FreeleechPercent: 25,
|
||||
Bonus: []string{"Freeleech25"},
|
||||
Bonus: []string{"Freeleech", "Freeleech25"},
|
||||
Uploader: "Anon",
|
||||
Size: uint64(10000000000),
|
||||
Tags: []string{"hip.hop", "rhythm.and.blues", "2000s"},
|
||||
|
@ -415,8 +406,9 @@ func TestRelease_MapVars(t *testing.T) {
|
|||
TorrentName: "Good show S02 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-GROUP2",
|
||||
Category: "tv",
|
||||
Year: 2020,
|
||||
Freeleech: true,
|
||||
FreeleechPercent: 100,
|
||||
Bonus: []string{"Freeleech100"},
|
||||
Bonus: []string{"Freeleech", "Freeleech100"},
|
||||
Uploader: "Anon",
|
||||
Size: uint64(10000000000),
|
||||
Tags: []string{"hip.hop", "rhythm.and.blues", "2000s"},
|
||||
|
@ -572,8 +564,7 @@ func TestRelease_ParseString(t *testing.T) {
|
|||
Filter: tt.fields.Filter,
|
||||
ActionStatus: tt.fields.ActionStatus,
|
||||
}
|
||||
_ = r.ParseString(tt.args.title)
|
||||
//fmt.Sprintf("ParseString(%v)", tt.args.title)
|
||||
r.ParseString(tt.args.title)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,6 +47,8 @@ func TestParseReleaseTagString(t *testing.T) {
|
|||
{name: "movies_6", args: args{tags: "H.264, DVD"}, want: ReleaseTags{Codec: "H.264", Source: "DVD"}},
|
||||
{name: "movies_7", args: args{tags: "H.264, DVD, Freeleech"}, want: ReleaseTags{Codec: "H.264", Source: "DVD", Bonus: []string{"Freeleech"}}},
|
||||
{name: "movies_8", args: args{tags: "H.264, DVD, Freeleech!"}, want: ReleaseTags{Codec: "H.264", Source: "DVD", Bonus: []string{"Freeleech"}}},
|
||||
{name: "anime_1", args: args{tags: "Web / MKV / h264 / 1080p / AAC 2.0 / Softsubs (SubsPlease) / Episode 22 / Freeleech"}, want: ReleaseTags{Audio: []string{"AAC"}, Channels: "2.0", Source: "WEB", Resolution: "1080p", Container: "iso", Codec: "H.264", Bonus: []string{"Freeleech"}}},
|
||||
{name: "anime_2", args: args{tags: "Web | MKV | h264 | 1080p | AAC 2.0 | Softsubs (SubsPlease) | Episode 22 | Freeleech"}, want: ReleaseTags{Audio: []string{"AAC"}, Channels: "2.0", Source: "WEB", Resolution: "1080p", Container: "iso", Codec: "H.264", Bonus: []string{"Freeleech"}}},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
|
|
@ -56,10 +56,7 @@ func (j *TorznabJob) process() error {
|
|||
releases := make([]*domain.Release, 0)
|
||||
|
||||
for _, item := range items {
|
||||
rls, err := domain.NewRelease(j.IndexerIdentifier)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
rls := domain.NewRelease(j.IndexerIdentifier)
|
||||
|
||||
rls.TorrentName = item.Title
|
||||
rls.TorrentURL = item.GUID
|
||||
|
@ -69,10 +66,7 @@ func (j *TorznabJob) process() error {
|
|||
// parse size bytes string
|
||||
rls.ParseSizeBytesString(item.Size)
|
||||
|
||||
if err := rls.ParseString(item.Title); err != nil {
|
||||
j.Log.Error().Err(err).Msgf("torznab.process: error parsing release")
|
||||
continue
|
||||
}
|
||||
rls.ParseString(item.Title)
|
||||
|
||||
releases = append(releases, rls)
|
||||
}
|
||||
|
|
|
@ -1,109 +0,0 @@
|
|||
package releaseinfo
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ReleaseInfo is the resulting structure returned by Parse
|
||||
type ReleaseInfo struct {
|
||||
Title string
|
||||
Season int
|
||||
Episode int
|
||||
Year int
|
||||
Resolution string
|
||||
Source string
|
||||
Codec string
|
||||
Container string
|
||||
Audio string
|
||||
Group string
|
||||
Region string
|
||||
Extended bool
|
||||
Hardcoded bool
|
||||
Proper bool
|
||||
Repack bool
|
||||
Widescreen bool
|
||||
Website string
|
||||
Language string
|
||||
Sbs string
|
||||
Unrated bool
|
||||
Size string
|
||||
ThreeD bool
|
||||
}
|
||||
|
||||
func setField(tor *ReleaseInfo, field, raw, val string) {
|
||||
ttor := reflect.TypeOf(tor)
|
||||
torV := reflect.ValueOf(tor)
|
||||
field = strings.Title(field)
|
||||
v, _ := ttor.Elem().FieldByName(field)
|
||||
//fmt.Printf(" field=%v, type=%+v, value=%v, raw=%v\n", field, v.Type, val, raw)
|
||||
switch v.Type.Kind() {
|
||||
case reflect.Bool:
|
||||
torV.Elem().FieldByName(field).SetBool(true)
|
||||
case reflect.Int:
|
||||
clean, _ := strconv.ParseInt(val, 10, 64)
|
||||
torV.Elem().FieldByName(field).SetInt(clean)
|
||||
case reflect.Uint:
|
||||
clean, _ := strconv.ParseUint(val, 10, 64)
|
||||
torV.Elem().FieldByName(field).SetUint(clean)
|
||||
case reflect.String:
|
||||
torV.Elem().FieldByName(field).SetString(val)
|
||||
}
|
||||
}
|
||||
|
||||
// Parse breaks up the given filename in TorrentInfo
|
||||
func Parse(filename string) (*ReleaseInfo, error) {
|
||||
tor := &ReleaseInfo{}
|
||||
//fmt.Printf("filename %q\n", filename)
|
||||
|
||||
var startIndex, endIndex = 0, len(filename)
|
||||
cleanName := strings.Replace(filename, "_", " ", -1)
|
||||
for _, pattern := range patterns {
|
||||
matches := pattern.re.FindAllStringSubmatch(cleanName, -1)
|
||||
if len(matches) == 0 {
|
||||
continue
|
||||
}
|
||||
matchIdx := 0
|
||||
if pattern.last {
|
||||
// Take last occurrence of element.
|
||||
matchIdx = len(matches) - 1
|
||||
}
|
||||
//fmt.Printf(" %s: pattern:%q match:%#v\n", pattern.name, pattern.re, matches[matchIdx])
|
||||
|
||||
index := strings.Index(cleanName, matches[matchIdx][1])
|
||||
if index == 0 {
|
||||
startIndex = len(matches[matchIdx][1])
|
||||
//fmt.Printf(" startIndex moved to %d [%q]\n", startIndex, filename[startIndex:endIndex])
|
||||
} else if index < endIndex {
|
||||
endIndex = index
|
||||
//fmt.Printf(" endIndex moved to %d [%q]\n", endIndex, filename[startIndex:endIndex])
|
||||
}
|
||||
setField(tor, pattern.name, matches[matchIdx][1], matches[matchIdx][2])
|
||||
}
|
||||
|
||||
if startIndex > endIndex {
|
||||
// FIXME temp solution to not panic if the are the reverse
|
||||
tmpStart := startIndex
|
||||
tmpEnd := endIndex
|
||||
|
||||
startIndex = tmpEnd
|
||||
endIndex = tmpStart
|
||||
}
|
||||
|
||||
// Start process for title
|
||||
//fmt.Println(" title: <internal>")
|
||||
raw := strings.Split(filename[startIndex:endIndex], "(")[0]
|
||||
cleanName = raw
|
||||
if strings.HasPrefix(cleanName, "- ") {
|
||||
cleanName = raw[2:]
|
||||
}
|
||||
if strings.ContainsRune(cleanName, '.') && !strings.ContainsRune(cleanName, ' ') {
|
||||
cleanName = strings.Replace(cleanName, ".", " ", -1)
|
||||
}
|
||||
cleanName = strings.Replace(cleanName, "_", " ", -1)
|
||||
//cleanName = re.sub('([\[\(_]|- )$', '', cleanName).strip()
|
||||
setField(tor, "title", raw, strings.TrimSpace(cleanName))
|
||||
|
||||
return tor, nil
|
||||
}
|
|
@ -1,353 +0,0 @@
|
|||
package releaseinfo
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
var updateGoldenFiles = flag.Bool("update", false, "update golden files in testdata/")
|
||||
|
||||
var testData = []string{
|
||||
"The Walking Dead S05E03 720p HDTV x264-ASAP[ettv]",
|
||||
"Hercules (2014) 1080p BrRip H264 - YIFY",
|
||||
"Dawn.of.the.Planet.of.the.Apes.2014.HDRip.XViD-EVO",
|
||||
"The Big Bang Theory S08E06 HDTV XviD-LOL [eztv]",
|
||||
"22 Jump Street (2014) 720p BrRip x264 - YIFY",
|
||||
"Hercules.2014.EXTENDED.1080p.WEB-DL.DD5.1.H264-RARBG",
|
||||
"Hercules.2014.Extended.Cut.HDRip.XViD-juggs[ETRG]",
|
||||
"Hercules (2014) WEBDL DVDRip XviD-MAX",
|
||||
"WWE Hell in a Cell 2014 PPV WEB-DL x264-WD -={SPARROW}=-",
|
||||
"UFC.179.PPV.HDTV.x264-Ebi[rartv]",
|
||||
"Marvels Agents of S H I E L D S02E05 HDTV x264-KILLERS [eztv]",
|
||||
"X-Men.Days.of.Future.Past.2014.1080p.WEB-DL.DD5.1.H264-RARBG",
|
||||
"Guardians Of The Galaxy 2014 R6 720p HDCAM x264-JYK",
|
||||
"Marvel's.Agents.of.S.H.I.E.L.D.S02E01.Shadows.1080p.WEB-DL.DD5.1",
|
||||
"Marvels Agents of S.H.I.E.L.D. S02E06 HDTV x264-KILLERS[ettv]",
|
||||
"Guardians of the Galaxy (CamRip / 2014)",
|
||||
"The.Walking.Dead.S05E03.1080p.WEB-DL.DD5.1.H.264-Cyphanix[rartv]",
|
||||
"Brave.2012.R5.DVDRip.XViD.LiNE-UNiQUE",
|
||||
"Lets.Be.Cops.2014.BRRip.XViD-juggs[ETRG]",
|
||||
"These.Final.Hours.2013.WBBRip XViD",
|
||||
"Downton Abbey 5x06 HDTV x264-FoV [eztv]",
|
||||
"Annabelle.2014.HC.HDRip.XViD.AC3-juggs[ETRG]",
|
||||
"Lucy.2014.HC.HDRip.XViD-juggs[ETRG]",
|
||||
"The Flash 2014 S01E04 HDTV x264-FUM[ettv]",
|
||||
"South Park S18E05 HDTV x264-KILLERS [eztv]",
|
||||
"The Flash 2014 S01E03 HDTV x264-LOL[ettv]",
|
||||
"The Flash 2014 S01E01 HDTV x264-LOL[ettv]",
|
||||
"Lucy 2014 Dual-Audio WEBRip 1400Mb",
|
||||
"Teenage Mutant Ninja Turtles (HdRip / 2014)",
|
||||
"Teenage Mutant Ninja Turtles (unknown_release_type / 2014)",
|
||||
"The Simpsons S26E05 HDTV x264 PROPER-LOL [eztv]",
|
||||
"2047 - Sights of Death (2014) 720p BrRip x264 - YIFY",
|
||||
"Two and a Half Men S12E01 HDTV x264 REPACK-LOL [eztv]",
|
||||
"Dinosaur 13 2014 WEBrip XviD AC3 MiLLENiUM",
|
||||
"Teenage.Mutant.Ninja.Turtles.2014.HDRip.XviD.MP3-RARBG",
|
||||
"Dawn.Of.The.Planet.of.The.Apes.2014.1080p.WEB-DL.DD51.H264-RARBG",
|
||||
"Teenage.Mutant.Ninja.Turtles.2014.720p.HDRip.x264.AC3.5.1-RARBG",
|
||||
"Gotham.S01E05.Viper.WEB-DL.x264.AAC",
|
||||
"Into.The.Storm.2014.1080p.WEB-DL.AAC2.0.H264-RARBG",
|
||||
"Lucy 2014 Dual-Audio 720p WEBRip",
|
||||
"Into The Storm 2014 1080p BRRip x264 DTS-JYK",
|
||||
"Sin.City.A.Dame.to.Kill.For.2014.1080p.BluRay.x264-SPARKS",
|
||||
"WWE Monday Night Raw 3rd Nov 2014 HDTV x264-Sir Paul",
|
||||
"Jack.And.The.Cuckoo-Clock.Heart.2013.BRRip XViD",
|
||||
"WWE Hell in a Cell 2014 HDTV x264 SNHD",
|
||||
"Dracula.Untold.2014.TS.XViD.AC3.MrSeeN-SiMPLE",
|
||||
"The Missing 1x01 Pilot HDTV x264-FoV [eztv]",
|
||||
"Doctor.Who.2005.8x11.Dark.Water.720p.HDTV.x264-FoV[rartv]",
|
||||
"Gotham.S01E07.Penguins.Umbrella.WEB-DL.x264.AAC",
|
||||
"One Shot [2014] DVDRip XViD-ViCKY",
|
||||
"The Shaukeens 2014 Hindi (1CD) DvDScr x264 AAC...Hon3y",
|
||||
"The Shaukeens (2014) 1CD DvDScr Rip x264 [DDR]",
|
||||
"Annabelle.2014.1080p.PROPER.HC.WEBRip.x264.AAC.2.0-RARBG",
|
||||
"Interstellar (2014) CAM ENG x264 AAC-CPG",
|
||||
"Guardians of the Galaxy (2014) Dual Audio DVDRip AVI",
|
||||
"Eliza Graves (2014) Dual Audio WEB-DL 720p MKV x264",
|
||||
"WWE Monday Night Raw 2014 11 10 WS PDTV x264-RKOFAN1990 -={SPARR",
|
||||
"Sons.of.Anarchy.S01E03",
|
||||
"doctor_who_2005.8x12.death_in_heaven.720p_hdtv_x264-fov",
|
||||
"breaking.bad.s01e01.720p.bluray.x264-reward",
|
||||
"Game of Thrones - 4x03 - Breaker of Chains",
|
||||
"[720pMkv.Com]_sons.of.anarchy.s05e10.480p.BluRay.x264-GAnGSteR",
|
||||
"[ www.Speed.cd ] -Sons.of.Anarchy.S07E07.720p.HDTV.X264-DIMENSION",
|
||||
"Community.s02e20.rus.eng.720p.Kybik.v.Kybe",
|
||||
"The.Jungle.Book.2016.3D.1080p.BRRip.SBS.x264.AAC-ETRG",
|
||||
"Ant-Man.2015.3D.1080p.BRRip.Half-SBS.x264.AAC-m2g",
|
||||
"Ice.Age.Collision.Course.2016.READNFO.720p.HDRIP.X264.AC3.TiTAN",
|
||||
"Red.Sonja.Queen.Of.Plagues.2016.BDRip.x264-W4F[PRiME]",
|
||||
"The Purge: Election Year (2016) HC - 720p HDRiP - 900MB - ShAaNi",
|
||||
"War Dogs (2016) HDTS 600MB - NBY",
|
||||
"The Hateful Eight (2015) 720p BluRay - x265 HEVC - 999MB - ShAaN",
|
||||
"The.Boss.2016.UNRATED.720p.BRRip.x264.AAC-ETRG",
|
||||
"Return.To.Snowy.River.1988.iNTERNAL.DVDRip.x264-W4F[PRiME]",
|
||||
"Akira (2016) - UpScaled - 720p - DesiSCR-Rip - Hindi - x264 - AC3 - 5.1 - Mafiaking - M2Tv",
|
||||
"Ben Hur 2016 TELESYNC x264 AC3 MAXPRO",
|
||||
"The.Secret.Life.of.Pets.2016.HDRiP.AAC-LC.x264-LEGi0N",
|
||||
"[HorribleSubs] Clockwork Planet - 10 [480p].mkv",
|
||||
"[HorribleSubs] Detective Conan - 862 [1080p].mkv",
|
||||
"thomas.and.friends.s19e09_s20e14.convert.hdtv.x264-w4f[eztv].mkv",
|
||||
"Blade.Runner.2049.2017.1080p.WEB-DL.DD5.1.H264-FGT-[rarbg.to]",
|
||||
"2012(2009).1080p.Dual Audio(Hindi+English) 5.1 Audios",
|
||||
"2012 (2009) 1080p BrRip x264 - 1.7GB - YIFY",
|
||||
"2012 2009 x264 720p Esub BluRay 6.0 Dual Audio English Hindi GOPISAHI",
|
||||
}
|
||||
|
||||
var moreTestData = []string{
|
||||
"Tokyo Olympics 2020 Street Skateboarding Prelims and Final 25 07 2021 1080p WEB-DL AAC2 0 H 264-playWEB",
|
||||
"Tokyo Olympics 2020 Taekwondo Day3 Finals 26 07 720pEN25fps ES",
|
||||
"Die Freundin der Haie 2021 German DUBBED DL DOKU 1080p WEB x264-WiSHTV",
|
||||
}
|
||||
|
||||
var movieTests = []string{
|
||||
"The Last Letter from Your Lover 2021 2160p NF WEBRip DDP5 1 Atmos x265-KiNGS",
|
||||
"Blade 1998 Hybrid 1080p BluRay REMUX AVC Atmos-EPSiLON",
|
||||
"Forrest Gump 1994 1080p BluRay DDP7 1 x264-Geek",
|
||||
"Deux sous de violettes 1951 1080p Blu-ray Remux AVC FLAC 2 0-EDPH",
|
||||
"Predator 1987 2160p UHD BluRay DTS-HD MA 5 1 HDR x265-W4NK3R",
|
||||
"Final Destination 2 2003 1080p BluRay x264-ETHOS",
|
||||
"Hellboy.II.The.Golden.Army.2008.REMASTERED.NORDiC.1080p.BluRay.x264-PANDEMONiUM",
|
||||
"Wonders of the Sea 2017 BluRay 1080p AVC DTS-HD MA 2.0-BeyondHD",
|
||||
"A Week Away 2021 1080p NF WEB-DL DDP 5.1 Atmos DV H.265-SymBiOTes",
|
||||
"Control 2004 BluRay 1080p DTS-HD MA 5.1 AVC REMUX-FraMeSToR",
|
||||
"Mimi 2021 1080p Hybrid WEB-DL DDP 5.1 x264-Telly",
|
||||
"She's So Lovely 1997 BluRay 1080p DTS-HD MA 5.1 AVC REMUX-FraMeSToR",
|
||||
"Those Who Wish Me Dead 2021 BluRay 1080p DD5.1 x264-BHDStudio",
|
||||
"The Last Letter from Your Lover 2021 2160p NF WEBRip DDP 5.1 Atmos x265-KiNGS",
|
||||
"Spinning Man 2018 BluRay 1080p DTS 5.1 x264-MTeam",
|
||||
"The Wicker Man 1973 Final Cut 1080p BluRay FLAC 1.0 x264-NTb",
|
||||
"New Police Story 2004 720p BluRay DTS x264-HiFi",
|
||||
"La Cienaga 2001 Criterion Collection NTSC DVD9 DD 2.0",
|
||||
"The Thin Blue Line 1988 Criterion Collection NTSC DVD9 DD 2.0",
|
||||
"The Thin Red Line 1998 Criterion Collection NTSC 2xDVD9 DD 5.1",
|
||||
"The Sword of Doom AKA daibosatsu 1966 Criterion Collection NTSC DVD9 DD 1.0",
|
||||
"Freaks 2018 Hybrid REPACK 1080p BluRay REMUX AVC DTS-HD MA 5.1-EPSiLON",
|
||||
"The Oxford Murders 2008 1080p BluRay Remux AVC DTS-HD MA 7.1-Pootis",
|
||||
"Berlin Babylon 2001 PAL DVD9 DD 5.1",
|
||||
"Dillinger 1973 1080p BluRay REMUX AVC DTS-HD MA 1.0-HiDeFZeN",
|
||||
"True Romance 1993 2160p UHD Blu-ray DV HDR HEVC DTS-HD MA 5.1",
|
||||
"Family 2019 1080p AMZN WEB-DL DD+ 5.1 H.264-TEPES",
|
||||
"Family 2019 720p AMZN WEB-DL DD+ 5.1 H.264-TEPES",
|
||||
"The Banana Splits Movie 2019 NTSC DVD9 DD 5.1-(_10_)",
|
||||
"Sex Is Zero AKA saegjeugsigong 2002 720p BluRay DD 5.1 x264-KiR",
|
||||
"Sex Is Zero AKA saegjeugsigong 2002 1080p BluRay DTS 5.1 x264-KiR",
|
||||
"Sex Is Zero AKA saegjeugsigong 2002 1080p KOR Blu-ray AVC DTS-HD MA 5.1-ARiN",
|
||||
"The Stranger AKA aagntuk 1991 Criterion Collection NTSC DVD9 DD 1.0",
|
||||
"The Taking of Power by Louis XIV AKA La prise de pouvoir par Louis XIV 1966 Criterion Collection NTSC DVD9 DD 1.0",
|
||||
"La Cienaga 2001 Criterion Collection NTSC DVD9 DD 2.0",
|
||||
"The Thin Blue Line 1988 Criterion Collection NTSC DVD9 DD 2.0",
|
||||
"The Thin Red Line 1998 Criterion Collection NTSC 2xDVD9 DD 5.1",
|
||||
"The Sword of Doom AKA daibosatsu 1966 Criterion Collection NTSC DVD9 DD 1.0",
|
||||
"Freaks 2018 Hybrid REPACK 1080p BluRay REMUX AVC DTS-HD MA 5.1-EPSiLON",
|
||||
"The Oxford Murders 2008 1080p BluRay Remux AVC DTS-HD MA 7.1-Pootis",
|
||||
"Berlin Babylon 2001 PAL DVD9 DD 5.1",
|
||||
"Dillinger 1973 1080p BluRay REMUX AVC DTS-HD MA 1.0-HiDeFZeN",
|
||||
"True Romance 1993 2160p UHD Blu-ray DV HDR HEVC DTS-HD MA 5.1",
|
||||
"La Cienaga 2001 Criterion Collection NTSC DVD9 DD 2.0",
|
||||
"Freaks 2018 Hybrid REPACK 1080p BluRay REMUX AVC DTS-HD MA 5.1-EPSiLON",
|
||||
"The Oxford Murders 2008 1080p BluRay Remux AVC DTS-HD MA 7.1-Pootis",
|
||||
}
|
||||
|
||||
//func TestParse_Movies(t *testing.T) {
|
||||
// type args struct {
|
||||
// filename string
|
||||
// }
|
||||
// tests := []struct {
|
||||
// filename string
|
||||
// want *ReleaseInfo
|
||||
// wantErr bool
|
||||
// }{
|
||||
// {filename: "", want: nil, wantErr: false},
|
||||
// }
|
||||
// for _, tt := range tests {
|
||||
// t.Run(tt.filename, func(t *testing.T) {
|
||||
// got, err := Parse(tt.filename)
|
||||
// if (err != nil) != tt.wantErr {
|
||||
// t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantErr)
|
||||
// return
|
||||
// }
|
||||
// if !reflect.DeepEqual(got, tt.want) {
|
||||
// t.Errorf("Parse() got = %v, want %v", got, tt.want)
|
||||
// }
|
||||
// })
|
||||
// }
|
||||
//}
|
||||
|
||||
var tvTests = []string{
|
||||
"Melrose Place S04 480p web-dl eac3 x264",
|
||||
"Privileged.S01E17.1080p.WEB.h264-DiRT",
|
||||
"Banshee S02 BluRay 720p DD5.1 x264-NTb",
|
||||
"Banshee S04 BluRay 720p DTS x264-NTb",
|
||||
"Servant S01 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-FLUX",
|
||||
"South Park S06 1080p BluRay DD5.1 x264-W4NK3R",
|
||||
"The Walking Dead: Origins S01E01 1080p WEB-DL DDP 2.0 H.264-GOSSIP",
|
||||
"Mythic Quest S01 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-FLUX",
|
||||
"Masameer County S01 1080p NF WEB-DL DD+ 5.1 H.264-XIQ",
|
||||
"Kevin Can F**K Himself 2021 S01 1080p AMZN WEB-DL DD+ 5.1 H.264-SaiTama",
|
||||
"How to Sell Drugs Online (Fast) S03 1080p NF WEB-DL DD+ 5.1 x264-KnightKing",
|
||||
"Power Book III: Raising Kanan S01E01 2160p WEB-DL DD+ 5.1 H265-GGEZ",
|
||||
"Power Book III: Raising Kanan S01E02 2160p WEB-DL DD+ 5.1 H265-GGWP",
|
||||
"Thea Walking Dead: Origins S01E01 1080p WEB-DL DD+ 2.0 H.264-GOSSIP",
|
||||
"Mean Mums S01 1080p AMZN WEB-DL DD+ 2.0 H.264-FLUX",
|
||||
"[BBT-RMX] Servant x Service",
|
||||
}
|
||||
|
||||
func TestParse_TV(t *testing.T) {
|
||||
tests := []struct {
|
||||
filename string
|
||||
want *ReleaseInfo
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
filename: "Melrose Place S04 480p web-dl eac3 x264",
|
||||
want: &ReleaseInfo{
|
||||
Title: "Melrose Place",
|
||||
Season: 4,
|
||||
Resolution: "480p",
|
||||
Source: "web-dl",
|
||||
Codec: "x264",
|
||||
Group: "dl eac3 x264",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
filename: "Privileged.S01E17.1080p.WEB.h264-DiRT",
|
||||
want: &ReleaseInfo{
|
||||
Title: "Privileged",
|
||||
Season: 1,
|
||||
Episode: 17,
|
||||
Resolution: "1080p",
|
||||
Source: "WEB",
|
||||
Codec: "h264",
|
||||
Group: "DiRT",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
filename: "Banshee S02 BluRay 720p DD5.1 x264-NTb",
|
||||
want: &ReleaseInfo{
|
||||
Title: "Banshee",
|
||||
Season: 2,
|
||||
Resolution: "720p",
|
||||
Source: "BluRay",
|
||||
Codec: "x264",
|
||||
Audio: "DD5.1",
|
||||
Group: "NTb",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
filename: "Banshee Season 2 BluRay 720p DD5.1 x264-NTb",
|
||||
want: &ReleaseInfo{
|
||||
Title: "Banshee",
|
||||
Season: 2,
|
||||
Resolution: "720p",
|
||||
Source: "BluRay",
|
||||
Codec: "x264",
|
||||
Audio: "DD5.1",
|
||||
Group: "NTb",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
filename: "[BBT-RMX] Servant x Service",
|
||||
want: &ReleaseInfo{
|
||||
Title: "",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
filename: "[Dekinai] Dungeon Ni Deai O Motomeru No Wa Machigatte Iru Darouka ~Familia Myth~ (2015) [BD 1080p x264 10bit - FLAC 2 0]",
|
||||
want: &ReleaseInfo{
|
||||
Title: "",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
filename: "[SubsPlease] Higurashi no Naku Koro ni Sotsu - 09 (1080p) [C00D6C68]",
|
||||
want: &ReleaseInfo{
|
||||
Title: "",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.filename, func(t *testing.T) {
|
||||
got, err := Parse(tt.filename)
|
||||
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
|
||||
assert.Equal(t, tt.want, got)
|
||||
//if !reflect.DeepEqual(got, tt.want) {
|
||||
// t.Errorf("Parse() got = %v, want %v", got, tt.want)
|
||||
//}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
var gamesTests = []string{
|
||||
"Night Book NSW-LUMA",
|
||||
"Evdeki Lanet-DARKSiDERS",
|
||||
"Evdeki.Lanet-DARKSiDERS",
|
||||
}
|
||||
|
||||
//func TestParser(t *testing.T) {
|
||||
// for i, fname := range testData {
|
||||
// t.Run(fmt.Sprintf("golden_file_%03d", i), func(t *testing.T) {
|
||||
// tor, err := Parse(fname)
|
||||
// if err != nil {
|
||||
// t.Fatalf("test %v: parser error:\n %v", i, err)
|
||||
// }
|
||||
//
|
||||
// var want ReleaseInfo
|
||||
//
|
||||
// if !reflect.DeepEqual(*tor, want) {
|
||||
// t.Fatalf("test %v: wrong result for %q\nwant:\n %v\ngot:\n %v", i, fname, want, *tor)
|
||||
// }
|
||||
// })
|
||||
// }
|
||||
//}
|
||||
|
||||
//func TestParserWriteToFiles(t *testing.T) {
|
||||
// for i, fname := range testData {
|
||||
// t.Run(fmt.Sprintf("golden_file_%03d", i), func(t *testing.T) {
|
||||
// tor, err := Parse(fname)
|
||||
// if err != nil {
|
||||
// t.Fatalf("test %v: parser error:\n %v", i, err)
|
||||
// }
|
||||
//
|
||||
// goldenFilename := filepath.Join("testdata", fmt.Sprintf("golden_file_%03d.json", i))
|
||||
//
|
||||
// if *updateGoldenFiles {
|
||||
// buf, err := json.MarshalIndent(tor, "", " ")
|
||||
// if err != nil {
|
||||
// t.Fatalf("error marshaling result: %v", err)
|
||||
// }
|
||||
//
|
||||
// if err = ioutil.WriteFile(goldenFilename, buf, 0644); err != nil {
|
||||
// t.Fatalf("unable to update golden file: %v", err)
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// buf, err := ioutil.ReadFile(goldenFilename)
|
||||
// if err != nil {
|
||||
// t.Fatalf("error loading golden file: %v", err)
|
||||
// }
|
||||
//
|
||||
// var want ReleaseInfo
|
||||
// err = json.Unmarshal(buf, &want)
|
||||
// if err != nil {
|
||||
// t.Fatalf("error unmarshalling golden file %v: %v", goldenFilename, err)
|
||||
// }
|
||||
//
|
||||
// if !reflect.DeepEqual(*tor, want) {
|
||||
// t.Fatalf("test %v: wrong result for %q\nwant:\n %v\ngot:\n %v", i, fname, want, *tor)
|
||||
// }
|
||||
// })
|
||||
// }
|
||||
//}
|
|
@ -1,58 +0,0 @@
|
|||
package releaseinfo
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"reflect"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
var patterns = []struct {
|
||||
name string
|
||||
// Use the last matching pattern. E.g. Year.
|
||||
last bool
|
||||
kind reflect.Kind
|
||||
// REs need to have 2 sub expressions (groups), the first one is "raw", and
|
||||
// the second one for the "clean" value.
|
||||
// E.g. Epiode matching on "S01E18" will result in: raw = "E18", clean = "18".
|
||||
re *regexp.Regexp
|
||||
}{
|
||||
//{"season", false, reflect.Int, regexp.MustCompile(`(?i)(s?([0-9]{1,2}))[ex]`)},
|
||||
{"season", false, reflect.Int, regexp.MustCompile(`(?i)((?:S|Season\s*)(\d{1,3}))`)},
|
||||
{"episode", false, reflect.Int, regexp.MustCompile(`(?i)([ex]([0-9]{2})(?:[^0-9]|$))`)},
|
||||
{"episode", false, reflect.Int, regexp.MustCompile(`(-\s+([0-9]+)(?:[^0-9]|$))`)},
|
||||
{"year", true, reflect.Int, regexp.MustCompile(`\b(((?:19[0-9]|20[0-9])[0-9]))\b`)},
|
||||
|
||||
{"resolution", false, reflect.String, regexp.MustCompile(`\b(([0-9]{3,4}p|i))\b`)},
|
||||
{"source", false, reflect.String, regexp.MustCompile(`(?i)\b(((?:PPV\.)?[HP]DTV|(?:HD)?CAM|B[DR]Rip|(?:HD-?)?TS|(?:PPV )?WEB-?DL(?: DVDRip)?|HDRip|DVDRip|DVDRIP|CamRip|WEB|W[EB]BRip|BluRay|DvDScr|telesync))\b`)},
|
||||
{"codec", false, reflect.String, regexp.MustCompile(`(?i)\b((xvid|HEVC|[hx]\.?26[45]))\b`)},
|
||||
{"container", false, reflect.String, regexp.MustCompile(`(?i)\b((MKV|AVI|MP4))\b`)},
|
||||
|
||||
{"audio", false, reflect.String, regexp.MustCompile(`(?i)\b((MP3|DD5\.?1|Dual[\- ]Audio|LiNE|DTS|AAC[.-]LC|AAC(?:\.?2\.0)?|AC3(?:\.5\.1)?))\b`)},
|
||||
{"region", false, reflect.String, regexp.MustCompile(`(?i)\b(R([0-9]))\b`)},
|
||||
{"size", false, reflect.String, regexp.MustCompile(`(?i)\b((\d+(?:\.\d+)?(?:GB|MB)))\b`)},
|
||||
{"website", false, reflect.String, regexp.MustCompile(`^(\[ ?([^\]]+?) ?\])`)},
|
||||
{"language", false, reflect.String, regexp.MustCompile(`(?i)\b((rus\.eng|ita\.eng))\b`)},
|
||||
{"sbs", false, reflect.String, regexp.MustCompile(`(?i)\b(((?:Half-)?SBS))\b`)},
|
||||
|
||||
{"group", false, reflect.String, regexp.MustCompile(`\b(- ?([^-]+(?:-={[^-]+-?$)?))$`)},
|
||||
|
||||
{"extended", false, reflect.Bool, regexp.MustCompile(`(?i)\b(EXTENDED(:?.CUT)?)\b`)},
|
||||
{"hardcoded", false, reflect.Bool, regexp.MustCompile(`(?i)\b((HC))\b`)},
|
||||
|
||||
{"proper", false, reflect.Bool, regexp.MustCompile(`(?i)\b((PROPER))\b`)},
|
||||
{"repack", false, reflect.Bool, regexp.MustCompile(`(?i)\b((REPACK))\b`)},
|
||||
|
||||
{"widescreen", false, reflect.Bool, regexp.MustCompile(`(?i)\b((WS))\b`)},
|
||||
{"unrated", false, reflect.Bool, regexp.MustCompile(`(?i)\b((UNRATED))\b`)},
|
||||
{"threeD", false, reflect.Bool, regexp.MustCompile(`(?i)\b((3D))\b`)},
|
||||
}
|
||||
|
||||
func init() {
|
||||
for _, pat := range patterns {
|
||||
if pat.re.NumSubexp() != 2 {
|
||||
fmt.Printf("Pattern %q does not have enough capture groups. want 2, got %d\n", pat.name, pat.re.NumSubexp())
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue