feat(feeds): torznab parse category and freeleech (#492)

* feat(feeds): torznab parse freeleech

* feat(feeds): torznab parse categories
This commit is contained in:
ze0s 2022-10-14 18:01:04 +02:00 committed by GitHub
parent 924899d9f3
commit b7d2161fdb
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 478 additions and 57 deletions

View file

@ -309,12 +309,26 @@ func (f Filter) CheckFilter(r *Release) ([]string, bool) {
r.addRejectionF("year not matching. got: %d want: %v", r.Year, f.Years) r.addRejectionF("year not matching. got: %d want: %v", r.Year, f.Years)
} }
if f.MatchCategories != "" && !contains(r.Category, f.MatchCategories) { if f.MatchCategories != "" {
r.addRejectionF("category not matching. got: %v want: %v", r.Category, f.MatchCategories) var categories []string
categories = append(categories, r.Categories...)
if r.Category != "" {
categories = append(categories, r.Category)
}
if !contains(r.Category, f.MatchCategories) && !containsAny(r.Categories, f.MatchCategories) {
r.addRejectionF("category not matching. got: %v want: %v", strings.Join(categories, ","), f.MatchCategories)
}
} }
if f.ExceptCategories != "" && contains(r.Category, f.ExceptCategories) { if f.ExceptCategories != "" {
r.addRejectionF("category unwanted. got: %v want: %v", r.Category, f.ExceptCategories) var categories []string
categories = append(categories, r.Categories...)
if r.Category != "" {
categories = append(categories, r.Category)
}
if !contains(r.Category, f.ExceptCategories) && !containsAny(r.Categories, f.ExceptCategories) {
r.addRejectionF("category unwanted. got: %v want: %v", strings.Join(categories, ","), f.ExceptCategories)
}
} }
if len(f.MatchReleaseTypes) > 0 && !containsSlice(r.Category, f.MatchReleaseTypes) { if len(f.MatchReleaseTypes) > 0 && !containsSlice(r.Category, f.MatchReleaseTypes) {
@ -566,15 +580,25 @@ func containsAny(tags []string, filter string) bool {
return containsMatch(tags, strings.Split(filter, ",")) return containsMatch(tags, strings.Split(filter, ","))
} }
func containsAnyOther(filter string, tags ...string) bool {
return containsMatch(tags, strings.Split(filter, ","))
}
func sliceContainsSlice(tags []string, filters []string) bool { func sliceContainsSlice(tags []string, filters []string) bool {
return containsMatchBasic(tags, filters) return containsMatchBasic(tags, filters)
} }
func containsMatchFuzzy(tags []string, filters []string) bool { func containsMatchFuzzy(tags []string, filters []string) bool {
for _, tag := range tags { for _, tag := range tags {
if tag == "" {
continue
}
tag = strings.ToLower(tag) tag = strings.ToLower(tag)
for _, filter := range filters { for _, filter := range filters {
if filter == "" {
continue
}
filter = strings.ToLower(filter) filter = strings.ToLower(filter)
filter = strings.Trim(filter, " ") filter = strings.Trim(filter, " ")
// check if line contains * or ?, if so try wildcard match, otherwise try substring match // check if line contains * or ?, if so try wildcard match, otherwise try substring match

View file

@ -252,6 +252,111 @@ func TestFilter_CheckFilter(t *testing.T) {
}, },
want: false, want: false,
}, },
{
name: "movie_bad_category_2",
fields: &Release{
TorrentName: "That Movie 2020 2160p BluRay DD5.1 x264-GROUP1",
//Category: "Movies",
Categories: []string{"Movies/HD", "2040"},
Freeleech: true,
Size: uint64(30000000001), // 30GB
},
args: args{
filter: Filter{
Enabled: true,
MatchCategories: "*tv*",
Freeleech: true,
MinSize: "10 GB",
MaxSize: "40GB",
Resolutions: []string{"1080p", "2160p"},
Sources: []string{"BluRay"},
Codecs: []string{"x264"},
Years: "2015,2018-2022",
MatchReleaseGroups: "GROUP1,BADGROUP",
Shows: "*Movie*, good story, bad movie",
},
rejections: []string{"category not matching. got: Movies/HD,2040 want: *tv*"},
},
want: false,
},
{
name: "movie_category_2",
fields: &Release{
TorrentName: "That Movie 2020 2160p BluRay DD5.1 x264-GROUP1",
//Category: "Movies",
Categories: []string{"Movies/HD", "2040"},
Freeleech: true,
Size: uint64(30000000001), // 30GB
},
args: args{
filter: Filter{
Enabled: true,
MatchCategories: "*Movies*",
Freeleech: true,
MinSize: "10 GB",
MaxSize: "40GB",
Resolutions: []string{"1080p", "2160p"},
Sources: []string{"BluRay"},
Codecs: []string{"x264"},
Years: "2015,2018-2022",
MatchReleaseGroups: "GROUP1,BADGROUP",
Shows: "*Movie*, good story, bad movie",
},
},
want: true,
},
{
name: "movie_category_3",
fields: &Release{
TorrentName: "That Movie 2020 2160p BluRay DD5.1 x264-GROUP1",
//Category: "Movies",
Categories: []string{"Movies/HD", "2040"},
Freeleech: true,
Size: uint64(30000000001), // 30GB
},
args: args{
filter: Filter{
Enabled: true,
MatchCategories: "2040",
Freeleech: true,
MinSize: "10 GB",
MaxSize: "40GB",
Resolutions: []string{"1080p", "2160p"},
Sources: []string{"BluRay"},
Codecs: []string{"x264"},
Years: "2015,2018-2022",
MatchReleaseGroups: "GROUP1,BADGROUP",
Shows: "*Movie*, good story, bad movie",
},
},
want: true,
},
{
name: "movie_category_4",
fields: &Release{
TorrentName: "That Movie 2020 2160p BluRay DD5.1 x264-GROUP1",
//Category: "Movies",
Categories: []string{"Movies/HD", "2040"},
Freeleech: true,
Size: uint64(30000000001), // 30GB
},
args: args{
filter: Filter{
Enabled: true,
MatchCategories: "*HD*",
Freeleech: true,
MinSize: "10 GB",
MaxSize: "40GB",
Resolutions: []string{"1080p", "2160p"},
Sources: []string{"BluRay"},
Codecs: []string{"x264"},
Years: "2015,2018-2022",
MatchReleaseGroups: "GROUP1,BADGROUP",
Shows: "*Movie*, good story, bad movie",
},
},
want: true,
},
{ {
name: "tv_match_season_episode", name: "tv_match_season_episode",
fields: &Release{ fields: &Release{

View file

@ -52,6 +52,7 @@ type Release struct {
Size uint64 `json:"size"` Size uint64 `json:"size"`
Title string `json:"title"` // Parsed title Title string `json:"title"` // Parsed title
Category string `json:"category"` Category string `json:"category"`
Categories []string `json:"categories,omitempty"`
Season int `json:"season"` Season int `json:"season"`
Episode int `json:"episode"` Episode int `json:"episode"`
Year int `json:"year"` Year int `json:"year"`

View file

@ -226,15 +226,10 @@ func (s *service) Test(ctx context.Context, feed *domain.Feed) error {
if feed.Type == string(domain.FeedTypeTorznab) { if feed.Type == string(domain.FeedTypeTorznab) {
// setup torznab Client // setup torznab Client
c := torznab.NewClient(torznab.Config{Host: feed.URL, ApiKey: feed.ApiKey, Log: subLogger}) c := torznab.NewClient(torznab.Config{Host: feed.URL, ApiKey: feed.ApiKey, Log: subLogger})
caps, err := c.GetCaps()
if err != nil {
s.log.Error().Err(err).Msg("error testing feed")
return err
}
if caps == nil { if _, err := c.FetchFeed(); err != nil {
s.log.Error().Msg("could not test feed and get caps") s.log.Error().Err(err).Msg("error getting torznab feed")
return errors.New("could not test feed and get caps") return err
} }
} }

View file

@ -2,6 +2,7 @@ package feed
import ( import (
"sort" "sort"
"strconv"
"time" "time"
"github.com/autobrr/autobrr/internal/domain" "github.com/autobrr/autobrr/internal/domain"
@ -79,6 +80,17 @@ func (j *TorznabJob) process() error {
rls.ParseString(item.Title) rls.ParseString(item.Title)
if parseFreeleech(item) {
rls.Freeleech = true
rls.Bonus = []string{"Freeleech"}
}
// map torznab categories ID and Name into rls.Categories
// so we can filter on both ID and Name
for _, category := range item.Categories {
rls.Categories = append(rls.Categories, []string{category.Name, strconv.Itoa(category.ID)}...)
}
releases = append(releases, rls) releases = append(releases, rls)
} }
@ -88,9 +100,21 @@ func (j *TorznabJob) process() error {
return nil return nil
} }
func parseFreeleech(item torznab.FeedItem) bool {
for _, attr := range item.Attributes {
if attr.Name == "downloadvolumefactor" {
if attr.Value == "0" {
return true
}
}
}
return false
}
func (j *TorznabJob) getFeed() ([]torznab.FeedItem, error) { func (j *TorznabJob) getFeed() ([]torznab.FeedItem, error) {
// get feed // get feed
feedItems, err := j.Client.GetFeed() feedItems, err := j.Client.FetchFeed()
if err != nil { if err != nil {
j.Log.Error().Err(err).Msgf("error fetching feed items") j.Log.Error().Err(err).Msgf("error fetching feed items")
return nil, errors.Wrap(err, "error fetching feed items") return nil, errors.Wrap(err, "error fetching feed items")

View file

@ -36,19 +36,14 @@ type Search struct {
SupportedParams string `xml:"supportedParams,attr"` SupportedParams string `xml:"supportedParams,attr"`
} }
type Categories struct { type CapCategories struct {
Category []Category `xml:"category"` Categories []Category `xml:"category"`
} }
type Category struct { type CapCategory struct {
ID string `xml:"id,attr"` ID string `xml:"id,attr"`
Name string `xml:"name,attr"` Name string `xml:"name,attr"`
Subcat []SubCategory `xml:"subcat"` SubCategories []CapCategory `xml:"subcat"`
}
type SubCategory struct {
ID string `xml:"id,attr"`
Name string `xml:"name,attr"`
} }
type Groups struct { type Groups struct {
@ -85,14 +80,14 @@ type CapsResponse struct {
} }
type Caps struct { type Caps struct {
XMLName xml.Name `xml:"caps"` XMLName xml.Name `xml:"caps"`
Server Server `xml:"server"` Server Server `xml:"server"`
Limits Limits `xml:"limits"` Limits Limits `xml:"limits"`
Retention Retention `xml:"retention"` Retention Retention `xml:"retention"`
Registration Registration `xml:"registration"` Registration Registration `xml:"registration"`
Searching Searching `xml:"searching"` Searching Searching `xml:"searching"`
Categories Categories `xml:"categories"` Categories CapCategories `xml:"categories"`
Groups Groups `xml:"groups"` Groups Groups `xml:"groups"`
Genres Genres `xml:"genres"` Genres Genres `xml:"genres"`
Tags Tags `xml:"tags"` Tags Tags `xml:"tags"`
} }

219
pkg/torznab/category.go Normal file
View file

@ -0,0 +1,219 @@
package torznab
import (
"fmt"
"regexp"
"strconv"
)
type Category struct {
ID int `xml:"id,attr"`
Name string `xml:"name,attr"`
SubCategories []Category `xml:"subcat"`
}
func (c Category) String() string {
return fmt.Sprintf("%s[%d]", c.Name, c.ID)
}
func (c Category) FromString(str string) {
var re = regexp.MustCompile(`(?m)(.+)\[(.+)\]`)
match := re.FindAllString(str, -1)
c.Name = match[1]
c.ID, _ = strconv.Atoi(match[2])
}
const (
CustomCategoryOffset = 100000
)
// Categories from the Newznab spec
// https://github.com/nZEDb/nZEDb/blob/0.x/docs/newznab_api_specification.txt#L627
var (
CategoryOther = Category{0, "Other", nil}
CategoryOther_Misc = Category{10, "Other/Misc", nil}
CategoryOther_Hashed = Category{20, "Other/Hashed", nil}
CategoryConsole = Category{1000, "Console", nil}
CategoryConsole_NDS = Category{1010, "Console/NDS", nil}
CategoryConsole_PSP = Category{1020, "Console/PSP", nil}
CategoryConsole_Wii = Category{1030, "Console/Wii", nil}
CategoryConsole_XBOX = Category{1040, "Console/Xbox", nil}
CategoryConsole_XBOX360 = Category{1050, "Console/Xbox360", nil}
CategoryConsole_WiiwareVC = Category{1060, "Console/Wiiware/V", nil}
CategoryConsole_XBOX360DLC = Category{1070, "Console/Xbox360", nil}
CategoryConsole_PS3 = Category{1080, "Console/PS3", nil}
CategoryConsole_Other = Category{1999, "Console/Other", nil}
CategoryConsole_3DS = Category{1110, "Console/3DS", nil}
CategoryConsole_PSVita = Category{1120, "Console/PS Vita", nil}
CategoryConsole_WiiU = Category{1130, "Console/WiiU", nil}
CategoryConsole_XBOXOne = Category{1140, "Console/XboxOne", nil}
CategoryConsole_PS4 = Category{1180, "Console/PS4", nil}
CategoryMovies = Category{2000, "Movies", nil}
CategoryMovies_Foreign = Category{2010, "Movies/Foreign", nil}
CategoryMovies_Other = Category{2020, "Movies/Other", nil}
CategoryMovies_SD = Category{2030, "Movies/SD", nil}
CategoryMovies_HD = Category{2040, "Movies/HD", nil}
CategoryMovies_3D = Category{2050, "Movies/3D", nil}
CategoryMovies_BluRay = Category{2060, "Movies/BluRay", nil}
CategoryMovies_DVD = Category{2070, "Movies/DVD", nil}
CategoryMovies_WEBDL = Category{2080, "Movies/WEBDL", nil}
CategoryAudio = Category{3000, "Audio", nil}
CategoryAudio_MP3 = Category{3010, "Audio/MP3", nil}
CategoryAudio_Video = Category{3020, "Audio/Video", nil}
CategoryAudio_Audiobook = Category{3030, "Audio/Audiobook", nil}
CategoryAudio_Lossless = Category{3040, "Audio/Lossless", nil}
CategoryAudio_Other = Category{3999, "Audio/Other", nil}
CategoryAudio_Foreign = Category{3060, "Audio/Foreign", nil}
CategoryPC = Category{4000, "PC", nil}
CategoryPC_0day = Category{4010, "PC/0day", nil}
CategoryPC_ISO = Category{4020, "PC/ISO", nil}
CategoryPC_Mac = Category{4030, "PC/Mac", nil}
CategoryPC_PhoneOther = Category{4040, "PC/Phone-Other", nil}
CategoryPC_Games = Category{4050, "PC/Games", nil}
CategoryPC_PhoneIOS = Category{4060, "PC/Phone-IOS", nil}
CategoryPC_PhoneAndroid = Category{4070, "PC/Phone-Android", nil}
CategoryTV = Category{5000, "TV", nil}
CategoryTV_WEBDL = Category{5010, "TV/WEB-DL", nil}
CategoryTV_FOREIGN = Category{5020, "TV/Foreign", nil}
CategoryTV_SD = Category{5030, "TV/SD", nil}
CategoryTV_HD = Category{5040, "TV/HD", nil}
CategoryTV_Other = Category{5999, "TV/Other", nil}
CategoryTV_Sport = Category{5060, "TV/Sport", nil}
CategoryTV_Anime = Category{5070, "TV/Anime", nil}
CategoryTV_Documentary = Category{5080, "TV/Documentary", nil}
CategoryXXX = Category{6000, "XXX", nil}
CategoryXXX_DVD = Category{6010, "XXX/DVD", nil}
CategoryXXX_WMV = Category{6020, "XXX/WMV", nil}
CategoryXXX_XviD = Category{6030, "XXX/XviD", nil}
CategoryXXX_x264 = Category{6040, "XXX/x264", nil}
CategoryXXX_Other = Category{6999, "XXX/Other", nil}
CategoryXXX_Imageset = Category{6060, "XXX/Imageset", nil}
CategoryXXX_Packs = Category{6070, "XXX/Packs", nil}
CategoryBooks = Category{7000, "Books", nil}
CategoryBooks_Magazines = Category{7010, "Books/Magazines", nil}
CategoryBooks_Ebook = Category{7020, "Books/Ebook", nil}
CategoryBooks_Comics = Category{7030, "Books/Comics", nil}
CategoryBooks_Technical = Category{7040, "Books/Technical", nil}
CategoryBooks_Foreign = Category{7060, "Books/Foreign", nil}
CategoryBooks_Unknown = Category{7999, "Books/Unknown", nil}
)
var AllCategories = Categories{
CategoryOther,
CategoryOther_Misc,
CategoryOther_Hashed,
CategoryConsole,
CategoryConsole_NDS,
CategoryConsole_PSP,
CategoryConsole_Wii,
CategoryConsole_XBOX,
CategoryConsole_XBOX360,
CategoryConsole_WiiwareVC,
CategoryConsole_XBOX360DLC,
CategoryConsole_PS3,
CategoryConsole_Other,
CategoryConsole_3DS,
CategoryConsole_PSVita,
CategoryConsole_WiiU,
CategoryConsole_XBOXOne,
CategoryConsole_PS4,
CategoryMovies,
CategoryMovies_Foreign,
CategoryMovies_Other,
CategoryMovies_SD,
CategoryMovies_HD,
CategoryMovies_3D,
CategoryMovies_BluRay,
CategoryMovies_DVD,
CategoryMovies_WEBDL,
CategoryAudio,
CategoryAudio_MP3,
CategoryAudio_Video,
CategoryAudio_Audiobook,
CategoryAudio_Lossless,
CategoryAudio_Other,
CategoryAudio_Foreign,
CategoryPC,
CategoryPC_0day,
CategoryPC_ISO,
CategoryPC_Mac,
CategoryPC_PhoneOther,
CategoryPC_Games,
CategoryPC_PhoneIOS,
CategoryPC_PhoneAndroid,
CategoryTV,
CategoryTV_WEBDL,
CategoryTV_FOREIGN,
CategoryTV_SD,
CategoryTV_HD,
CategoryTV_Other,
CategoryTV_Sport,
CategoryTV_Anime,
CategoryTV_Documentary,
CategoryXXX,
CategoryXXX_DVD,
CategoryXXX_WMV,
CategoryXXX_XviD,
CategoryXXX_x264,
CategoryXXX_Other,
CategoryXXX_Imageset,
CategoryXXX_Packs,
CategoryBooks,
CategoryBooks_Magazines,
CategoryBooks_Ebook,
CategoryBooks_Comics,
CategoryBooks_Technical,
CategoryBooks_Foreign,
CategoryBooks_Unknown,
}
func ParentCategory(c Category) Category {
switch {
case c.ID < 1000:
return CategoryOther
case c.ID < 2000:
return CategoryConsole
case c.ID < 3000:
return CategoryMovies
case c.ID < 4000:
return CategoryAudio
case c.ID < 5000:
return CategoryPC
case c.ID < 6000:
return CategoryTV
case c.ID < 7000:
return CategoryXXX
case c.ID < 8000:
return CategoryBooks
}
return CategoryOther
}
type Categories []Category
func (slice Categories) Subset(ids ...int) Categories {
cats := Categories{}
for _, cat := range AllCategories {
for _, id := range ids {
if cat.ID == id {
cats = append(cats, cat)
}
}
}
return cats
}
func (slice Categories) Len() int {
return len(slice)
}
func (slice Categories) Less(i, j int) bool {
return slice[i].ID < slice[j].ID
}
func (slice Categories) Swap(i, j int) {
slice[i], slice[j] = slice[j], slice[i]
}

View file

@ -21,11 +21,11 @@ type FeedItem struct {
Text string `xml:",chardata"` Text string `xml:",chardata"`
ID string `xml:"id,attr"` ID string `xml:"id,attr"`
} `xml:"prowlarrindexer"` } `xml:"prowlarrindexer"`
Comments string `xml:"comments"` Comments string `xml:"comments"`
Size string `xml:"size"` Size string `xml:"size"`
Link string `xml:"link"` Link string `xml:"link"`
Category []string `xml:"category,omitempty"` Category []int `xml:"category,omitempty"`
Categories []string Categories Categories
// attributes // attributes
TvdbId string `xml:"tvdb,omitempty"` TvdbId string `xml:"tvdb,omitempty"`
@ -33,11 +33,33 @@ type FeedItem struct {
ImdbId string `xml:"imdb,omitempty"` ImdbId string `xml:"imdb,omitempty"`
TmdbId string `xml:"tmdb,omitempty"` TmdbId string `xml:"tmdb,omitempty"`
Attributes []struct { Attributes []ItemAttr `xml:"attr"`
XMLName xml.Name }
Name string `xml:"name,attr"`
Value string `xml:"value,attr"` type ItemAttr struct {
} `xml:"attr"` Name string `xml:"name,attr"`
Value string `xml:"value,attr"`
}
func (f FeedItem) MapCategories(categories []Category) {
for _, category := range f.Category {
// less than 10000 it's default categories
if category < 10000 {
f.Categories = append(f.Categories, ParentCategory(Category{ID: category}))
continue
}
// categories 10000+ are custom tracker specific
for _, capCat := range categories {
if capCat.ID == category {
f.Categories = append(f.Categories, Category{
ID: capCat.ID,
Name: capCat.Name,
})
break
}
}
}
} }
// Time credits: https://github.com/mrobinsn/go-newznab/blob/cd89d9c56447859fa1298dc9a0053c92c45ac7ef/newznab/structs.go#L150 // Time credits: https://github.com/mrobinsn/go-newznab/blob/cd89d9c56447859fa1298dc9a0053c92c45ac7ef/newznab/structs.go#L150

View file

@ -15,8 +15,9 @@ import (
) )
type Client interface { type Client interface {
GetFeed() ([]FeedItem, error) FetchFeed() ([]FeedItem, error)
GetCaps() (*Caps, error) FetchCaps() (*Caps, error)
GetCaps() *Caps
} }
type client struct { type client struct {
@ -28,6 +29,8 @@ type client struct {
UseBasicAuth bool UseBasicAuth bool
BasicAuth BasicAuth BasicAuth BasicAuth
Capabilities *Caps
Log *log.Logger Log *log.Logger
} }
@ -47,6 +50,11 @@ type Config struct {
Log *log.Logger Log *log.Logger
} }
type Capabilities struct {
Search Searching
Categories Categories
}
func NewClient(config Config) Client { func NewClient(config Config) Client {
httpClient := &http.Client{ httpClient := &http.Client{
Timeout: config.Timeout, Timeout: config.Timeout,
@ -101,6 +109,13 @@ func (c *client) get(endpoint string, opts map[string]string) (int, *Response, e
defer resp.Body.Close() defer resp.Body.Close()
dump, err := httputil.DumpResponse(resp, true)
if err != nil {
return 0, nil, errors.Wrap(err, "could not dump response")
}
c.Log.Printf("torznab get feed response dump: %q", dump)
var buf bytes.Buffer var buf bytes.Buffer
if _, err = io.Copy(&buf, resp.Body); err != nil { if _, err = io.Copy(&buf, resp.Body); err != nil {
return resp.StatusCode, nil, errors.Wrap(err, "torznab.io.Copy") return resp.StatusCode, nil, errors.Wrap(err, "torznab.io.Copy")
@ -114,7 +129,20 @@ func (c *client) get(endpoint string, opts map[string]string) (int, *Response, e
return resp.StatusCode, &response, nil return resp.StatusCode, &response, nil
} }
func (c *client) GetFeed() ([]FeedItem, error) { func (c *client) FetchFeed() ([]FeedItem, error) {
if c.Capabilities == nil {
status, caps, err := c.getCaps("?t=caps", nil)
if err != nil {
return nil, errors.Wrap(err, "could not get caps for feed")
}
if status != http.StatusOK {
return nil, errors.Wrap(err, "could not get caps for feed")
}
c.Capabilities = caps
}
status, res, err := c.get("", nil) status, res, err := c.get("", nil)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "could not get feed") return nil, errors.Wrap(err, "could not get feed")
@ -124,6 +152,10 @@ func (c *client) GetFeed() ([]FeedItem, error) {
return nil, errors.New("could not get feed") return nil, errors.New("could not get feed")
} }
for _, item := range res.Channel.Items {
item.MapCategories(c.Capabilities.Categories.Categories)
}
return res.Channel.Items, nil return res.Channel.Items, nil
} }
@ -167,7 +199,7 @@ func (c *client) getCaps(endpoint string, opts map[string]string) (int, *Caps, e
return 0, nil, errors.Wrap(err, "could not dump response") return 0, nil, errors.Wrap(err, "could not dump response")
} }
c.Log.Printf("get torrent trackers response dump: %q", dump) c.Log.Printf("torznab get caps response dump: %q", dump)
if resp.StatusCode == http.StatusUnauthorized { if resp.StatusCode == http.StatusUnauthorized {
return resp.StatusCode, nil, errors.New("unauthorized") return resp.StatusCode, nil, errors.New("unauthorized")
@ -188,7 +220,7 @@ func (c *client) getCaps(endpoint string, opts map[string]string) (int, *Caps, e
return resp.StatusCode, &response, nil return resp.StatusCode, &response, nil
} }
func (c *client) GetCaps() (*Caps, error) { func (c *client) FetchCaps() (*Caps, error) {
status, res, err := c.getCaps("?t=caps", nil) status, res, err := c.getCaps("?t=caps", nil)
if err != nil { if err != nil {
@ -202,6 +234,10 @@ func (c *client) GetCaps() (*Caps, error) {
return res, nil return res, nil
} }
func (c *client) GetCaps() *Caps {
return c.Capabilities
}
func (c *client) Search(query string) ([]FeedItem, error) { func (c *client) Search(query string) ([]FeedItem, error) {
v := url.Values{} v := url.Values{}
v.Add("q", query) v.Add("q", query)

View file

@ -160,23 +160,23 @@ func TestClient_GetCaps(t *testing.T) {
SupportedParams: "q", SupportedParams: "q",
}, },
}, },
Categories: Categories{Category: []Category{ Categories: CapCategories{Categories: []Category{
{ {
ID: "2000", ID: 2000,
Name: "Movies", Name: "Movies",
Subcat: []SubCategory{ SubCategories: []Category{
{ {
ID: "2010", ID: 2010,
Name: "Foreign", Name: "Foreign",
}, },
}, },
}, },
{ {
ID: "5000", ID: 5000,
Name: "TV", Name: "TV",
Subcat: []SubCategory{ SubCategories: []Category{
{ {
ID: "5040", ID: 5040,
Name: "HD", Name: "HD",
}, },
{ {
@ -232,7 +232,7 @@ func TestClient_GetCaps(t *testing.T) {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
c := NewClient(Config{Host: tt.fields.Host, ApiKey: tt.fields.ApiKey}) c := NewClient(Config{Host: tt.fields.Host, ApiKey: tt.fields.ApiKey})
got, err := c.GetCaps() got, err := c.FetchCaps()
if tt.wantErr && assert.Error(t, err) { if tt.wantErr && assert.Error(t, err) {
assert.EqualErrorf(t, err, tt.expectedErr, "Error should be: %v, got: %v", tt.wantErr, err) assert.EqualErrorf(t, err, tt.expectedErr, "Error should be: %v, got: %v", tt.wantErr, err)
} }