mirror of
https://github.com/idanoo/autobrr
synced 2025-07-23 08:49:13 +00:00
feat: add usenet support (#543)
* feat(autobrr): implement usenet support * feat(sonarr): implement usenet support * feat(radarr): implement usenet support * feat(announce): implement usenet support * announce: cast a line * feat(release): prevent unknown protocol transfer * release: lines for days. * feat: add newznab and sabnzbd support * feat: add category to sabnzbd * feat(newznab): map categories * feat(newznab): map categories --------- Co-authored-by: ze0s <43699394+zze0s@users.noreply.github.com> Co-authored-by: ze0s <ze0s@riseup.net>
This commit is contained in:
parent
b2d93d50c5
commit
13a74f7cc8
29 changed files with 1588 additions and 37 deletions
93
pkg/newznab/caps.go
Normal file
93
pkg/newznab/caps.go
Normal file
|
@ -0,0 +1,93 @@
|
|||
package newznab
|
||||
|
||||
import "encoding/xml"
|
||||
|
||||
type Server struct {
|
||||
Version string `xml:"version,attr"`
|
||||
Title string `xml:"title,attr"`
|
||||
Strapline string `xml:"strapline,attr"`
|
||||
Email string `xml:"email,attr"`
|
||||
URL string `xml:"url,attr"`
|
||||
Image string `xml:"image,attr"`
|
||||
}
|
||||
type Limits struct {
|
||||
Max string `xml:"max,attr"`
|
||||
Default string `xml:"default,attr"`
|
||||
}
|
||||
type Retention struct {
|
||||
Days string `xml:"days,attr"`
|
||||
}
|
||||
|
||||
type Registration struct {
|
||||
Available string `xml:"available,attr"`
|
||||
Open string `xml:"open,attr"`
|
||||
}
|
||||
|
||||
type Searching struct {
|
||||
Search Search `xml:"search"`
|
||||
TvSearch Search `xml:"tv-search"`
|
||||
MovieSearch Search `xml:"movie-search"`
|
||||
AudioSearch Search `xml:"audio-search"`
|
||||
BookSearch Search `xml:"book-search"`
|
||||
}
|
||||
|
||||
type Search struct {
|
||||
Available string `xml:"available,attr"`
|
||||
SupportedParams string `xml:"supportedParams,attr"`
|
||||
}
|
||||
|
||||
type CapCategories struct {
|
||||
Categories []Category `xml:"category"`
|
||||
}
|
||||
|
||||
type CapCategory struct {
|
||||
ID string `xml:"id,attr"`
|
||||
Name string `xml:"name,attr"`
|
||||
SubCategories []CapCategory `xml:"subcat"`
|
||||
}
|
||||
|
||||
type Groups struct {
|
||||
Group Group `xml:"group"`
|
||||
}
|
||||
type Group struct {
|
||||
ID string `xml:"id,attr"`
|
||||
Name string `xml:"name,attr"`
|
||||
Description string `xml:"description,attr"`
|
||||
Lastupdate string `xml:"lastupdate,attr"`
|
||||
}
|
||||
|
||||
type Genres struct {
|
||||
Genre Genre `xml:"genre"`
|
||||
}
|
||||
|
||||
type Genre struct {
|
||||
ID string `xml:"id,attr"`
|
||||
Categoryid string `xml:"categoryid,attr"`
|
||||
Name string `xml:"name,attr"`
|
||||
}
|
||||
|
||||
type Tags struct {
|
||||
Tag []Tag `xml:"tag"`
|
||||
}
|
||||
|
||||
type Tag struct {
|
||||
Name string `xml:"name,attr"`
|
||||
Description string `xml:"description,attr"`
|
||||
}
|
||||
|
||||
type CapsResponse struct {
|
||||
Caps Caps `xml:"caps"`
|
||||
}
|
||||
|
||||
type Caps struct {
|
||||
XMLName xml.Name `xml:"caps"`
|
||||
Server Server `xml:"server"`
|
||||
Limits Limits `xml:"limits"`
|
||||
Retention Retention `xml:"retention"`
|
||||
Registration Registration `xml:"registration"`
|
||||
Searching Searching `xml:"searching"`
|
||||
Categories CapCategories `xml:"categories"`
|
||||
Groups Groups `xml:"groups"`
|
||||
Genres Genres `xml:"genres"`
|
||||
Tags Tags `xml:"tags"`
|
||||
}
|
219
pkg/newznab/category.go
Normal file
219
pkg/newznab/category.go
Normal file
|
@ -0,0 +1,219 @@
|
|||
package newznab
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
type Category struct {
|
||||
ID int `xml:"id,attr"`
|
||||
Name string `xml:"name,attr"`
|
||||
SubCategories []Category `xml:"subcat"`
|
||||
}
|
||||
|
||||
func (c Category) String() string {
|
||||
return fmt.Sprintf("%s[%d]", c.Name, c.ID)
|
||||
}
|
||||
|
||||
func (c Category) FromString(str string) {
|
||||
var re = regexp.MustCompile(`(?m)(.+)\[(.+)\]`)
|
||||
match := re.FindAllString(str, -1)
|
||||
|
||||
c.Name = match[1]
|
||||
c.ID, _ = strconv.Atoi(match[2])
|
||||
}
|
||||
|
||||
const (
|
||||
CustomCategoryOffset = 100000
|
||||
)
|
||||
|
||||
// Categories from the Newznab spec
|
||||
// https://github.com/nZEDb/nZEDb/blob/0.x/docs/newznab_api_specification.txt#L627
|
||||
var (
|
||||
CategoryOther = Category{0, "Other", nil}
|
||||
CategoryOther_Misc = Category{10, "Other/Misc", nil}
|
||||
CategoryOther_Hashed = Category{20, "Other/Hashed", nil}
|
||||
CategoryConsole = Category{1000, "Console", nil}
|
||||
CategoryConsole_NDS = Category{1010, "Console/NDS", nil}
|
||||
CategoryConsole_PSP = Category{1020, "Console/PSP", nil}
|
||||
CategoryConsole_Wii = Category{1030, "Console/Wii", nil}
|
||||
CategoryConsole_XBOX = Category{1040, "Console/Xbox", nil}
|
||||
CategoryConsole_XBOX360 = Category{1050, "Console/Xbox360", nil}
|
||||
CategoryConsole_WiiwareVC = Category{1060, "Console/Wiiware/V", nil}
|
||||
CategoryConsole_XBOX360DLC = Category{1070, "Console/Xbox360", nil}
|
||||
CategoryConsole_PS3 = Category{1080, "Console/PS3", nil}
|
||||
CategoryConsole_Other = Category{1999, "Console/Other", nil}
|
||||
CategoryConsole_3DS = Category{1110, "Console/3DS", nil}
|
||||
CategoryConsole_PSVita = Category{1120, "Console/PS Vita", nil}
|
||||
CategoryConsole_WiiU = Category{1130, "Console/WiiU", nil}
|
||||
CategoryConsole_XBOXOne = Category{1140, "Console/XboxOne", nil}
|
||||
CategoryConsole_PS4 = Category{1180, "Console/PS4", nil}
|
||||
CategoryMovies = Category{2000, "Movies", nil}
|
||||
CategoryMovies_Foreign = Category{2010, "Movies/Foreign", nil}
|
||||
CategoryMovies_Other = Category{2020, "Movies/Other", nil}
|
||||
CategoryMovies_SD = Category{2030, "Movies/SD", nil}
|
||||
CategoryMovies_HD = Category{2040, "Movies/HD", nil}
|
||||
CategoryMovies_3D = Category{2050, "Movies/3D", nil}
|
||||
CategoryMovies_BluRay = Category{2060, "Movies/BluRay", nil}
|
||||
CategoryMovies_DVD = Category{2070, "Movies/DVD", nil}
|
||||
CategoryMovies_WEBDL = Category{2080, "Movies/WEBDL", nil}
|
||||
CategoryAudio = Category{3000, "Audio", nil}
|
||||
CategoryAudio_MP3 = Category{3010, "Audio/MP3", nil}
|
||||
CategoryAudio_Video = Category{3020, "Audio/Video", nil}
|
||||
CategoryAudio_Audiobook = Category{3030, "Audio/Audiobook", nil}
|
||||
CategoryAudio_Lossless = Category{3040, "Audio/Lossless", nil}
|
||||
CategoryAudio_Other = Category{3999, "Audio/Other", nil}
|
||||
CategoryAudio_Foreign = Category{3060, "Audio/Foreign", nil}
|
||||
CategoryPC = Category{4000, "PC", nil}
|
||||
CategoryPC_0day = Category{4010, "PC/0day", nil}
|
||||
CategoryPC_ISO = Category{4020, "PC/ISO", nil}
|
||||
CategoryPC_Mac = Category{4030, "PC/Mac", nil}
|
||||
CategoryPC_PhoneOther = Category{4040, "PC/Phone-Other", nil}
|
||||
CategoryPC_Games = Category{4050, "PC/Games", nil}
|
||||
CategoryPC_PhoneIOS = Category{4060, "PC/Phone-IOS", nil}
|
||||
CategoryPC_PhoneAndroid = Category{4070, "PC/Phone-Android", nil}
|
||||
CategoryTV = Category{5000, "TV", nil}
|
||||
CategoryTV_WEBDL = Category{5010, "TV/WEB-DL", nil}
|
||||
CategoryTV_FOREIGN = Category{5020, "TV/Foreign", nil}
|
||||
CategoryTV_SD = Category{5030, "TV/SD", nil}
|
||||
CategoryTV_HD = Category{5040, "TV/HD", nil}
|
||||
CategoryTV_Other = Category{5999, "TV/Other", nil}
|
||||
CategoryTV_Sport = Category{5060, "TV/Sport", nil}
|
||||
CategoryTV_Anime = Category{5070, "TV/Anime", nil}
|
||||
CategoryTV_Documentary = Category{5080, "TV/Documentary", nil}
|
||||
CategoryXXX = Category{6000, "XXX", nil}
|
||||
CategoryXXX_DVD = Category{6010, "XXX/DVD", nil}
|
||||
CategoryXXX_WMV = Category{6020, "XXX/WMV", nil}
|
||||
CategoryXXX_XviD = Category{6030, "XXX/XviD", nil}
|
||||
CategoryXXX_x264 = Category{6040, "XXX/x264", nil}
|
||||
CategoryXXX_Other = Category{6999, "XXX/Other", nil}
|
||||
CategoryXXX_Imageset = Category{6060, "XXX/Imageset", nil}
|
||||
CategoryXXX_Packs = Category{6070, "XXX/Packs", nil}
|
||||
CategoryBooks = Category{7000, "Books", nil}
|
||||
CategoryBooks_Magazines = Category{7010, "Books/Magazines", nil}
|
||||
CategoryBooks_Ebook = Category{7020, "Books/Ebook", nil}
|
||||
CategoryBooks_Comics = Category{7030, "Books/Comics", nil}
|
||||
CategoryBooks_Technical = Category{7040, "Books/Technical", nil}
|
||||
CategoryBooks_Foreign = Category{7060, "Books/Foreign", nil}
|
||||
CategoryBooks_Unknown = Category{7999, "Books/Unknown", nil}
|
||||
)
|
||||
|
||||
var AllCategories = Categories{
|
||||
CategoryOther,
|
||||
CategoryOther_Misc,
|
||||
CategoryOther_Hashed,
|
||||
CategoryConsole,
|
||||
CategoryConsole_NDS,
|
||||
CategoryConsole_PSP,
|
||||
CategoryConsole_Wii,
|
||||
CategoryConsole_XBOX,
|
||||
CategoryConsole_XBOX360,
|
||||
CategoryConsole_WiiwareVC,
|
||||
CategoryConsole_XBOX360DLC,
|
||||
CategoryConsole_PS3,
|
||||
CategoryConsole_Other,
|
||||
CategoryConsole_3DS,
|
||||
CategoryConsole_PSVita,
|
||||
CategoryConsole_WiiU,
|
||||
CategoryConsole_XBOXOne,
|
||||
CategoryConsole_PS4,
|
||||
CategoryMovies,
|
||||
CategoryMovies_Foreign,
|
||||
CategoryMovies_Other,
|
||||
CategoryMovies_SD,
|
||||
CategoryMovies_HD,
|
||||
CategoryMovies_3D,
|
||||
CategoryMovies_BluRay,
|
||||
CategoryMovies_DVD,
|
||||
CategoryMovies_WEBDL,
|
||||
CategoryAudio,
|
||||
CategoryAudio_MP3,
|
||||
CategoryAudio_Video,
|
||||
CategoryAudio_Audiobook,
|
||||
CategoryAudio_Lossless,
|
||||
CategoryAudio_Other,
|
||||
CategoryAudio_Foreign,
|
||||
CategoryPC,
|
||||
CategoryPC_0day,
|
||||
CategoryPC_ISO,
|
||||
CategoryPC_Mac,
|
||||
CategoryPC_PhoneOther,
|
||||
CategoryPC_Games,
|
||||
CategoryPC_PhoneIOS,
|
||||
CategoryPC_PhoneAndroid,
|
||||
CategoryTV,
|
||||
CategoryTV_WEBDL,
|
||||
CategoryTV_FOREIGN,
|
||||
CategoryTV_SD,
|
||||
CategoryTV_HD,
|
||||
CategoryTV_Other,
|
||||
CategoryTV_Sport,
|
||||
CategoryTV_Anime,
|
||||
CategoryTV_Documentary,
|
||||
CategoryXXX,
|
||||
CategoryXXX_DVD,
|
||||
CategoryXXX_WMV,
|
||||
CategoryXXX_XviD,
|
||||
CategoryXXX_x264,
|
||||
CategoryXXX_Other,
|
||||
CategoryXXX_Imageset,
|
||||
CategoryXXX_Packs,
|
||||
CategoryBooks,
|
||||
CategoryBooks_Magazines,
|
||||
CategoryBooks_Ebook,
|
||||
CategoryBooks_Comics,
|
||||
CategoryBooks_Technical,
|
||||
CategoryBooks_Foreign,
|
||||
CategoryBooks_Unknown,
|
||||
}
|
||||
|
||||
func ParentCategory(c Category) Category {
|
||||
switch {
|
||||
case c.ID < 1000:
|
||||
return CategoryOther
|
||||
case c.ID < 2000:
|
||||
return CategoryConsole
|
||||
case c.ID < 3000:
|
||||
return CategoryMovies
|
||||
case c.ID < 4000:
|
||||
return CategoryAudio
|
||||
case c.ID < 5000:
|
||||
return CategoryPC
|
||||
case c.ID < 6000:
|
||||
return CategoryTV
|
||||
case c.ID < 7000:
|
||||
return CategoryXXX
|
||||
case c.ID < 8000:
|
||||
return CategoryBooks
|
||||
}
|
||||
return CategoryOther
|
||||
}
|
||||
|
||||
type Categories []Category
|
||||
|
||||
func (slice Categories) Subset(ids ...int) Categories {
|
||||
cats := Categories{}
|
||||
|
||||
for _, cat := range AllCategories {
|
||||
for _, id := range ids {
|
||||
if cat.ID == id {
|
||||
cats = append(cats, cat)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return cats
|
||||
}
|
||||
|
||||
func (slice Categories) Len() int {
|
||||
return len(slice)
|
||||
}
|
||||
|
||||
func (slice Categories) Less(i, j int) bool {
|
||||
return slice[i].ID < slice[j].ID
|
||||
}
|
||||
|
||||
func (slice Categories) Swap(i, j int) {
|
||||
slice[i], slice[j] = slice[j], slice[i]
|
||||
}
|
144
pkg/newznab/feed.go
Normal file
144
pkg/newznab/feed.go
Normal file
|
@ -0,0 +1,144 @@
|
|||
package newznab
|
||||
|
||||
import (
|
||||
"encoding/xml"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/autobrr/autobrr/pkg/errors"
|
||||
)
|
||||
|
||||
type Feed struct {
|
||||
Channel Channel `xml:"channel"`
|
||||
Raw string
|
||||
}
|
||||
|
||||
func (f Feed) Len() int {
|
||||
return len(f.Channel.Items)
|
||||
}
|
||||
|
||||
type Channel struct {
|
||||
Title string `xml:"title"`
|
||||
Items []FeedItem `xml:"item"`
|
||||
}
|
||||
|
||||
type Response struct {
|
||||
Channel struct {
|
||||
Items []FeedItem `xml:"item"`
|
||||
} `xml:"channel"`
|
||||
}
|
||||
|
||||
type FeedItem struct {
|
||||
Title string `xml:"title,omitempty"`
|
||||
GUID string `xml:"guid,omitempty"`
|
||||
PubDate Time `xml:"pub_date,omitempty"`
|
||||
Prowlarrindexer struct {
|
||||
Text string `xml:",chardata"`
|
||||
ID string `xml:"id,attr"`
|
||||
} `xml:"prowlarrindexer,omitempty"`
|
||||
Comments string `xml:"comments"`
|
||||
Size string `xml:"size"`
|
||||
Link string `xml:"link"`
|
||||
Enclosure *Enclosure `xml:"enclosure,omitempty"`
|
||||
Category []string `xml:"category,omitempty"`
|
||||
Categories Categories
|
||||
|
||||
// attributes
|
||||
TvdbId string `xml:"tvdb,omitempty"`
|
||||
//TvMazeId string
|
||||
ImdbId string `xml:"imdb,omitempty"`
|
||||
TmdbId string `xml:"tmdb,omitempty"`
|
||||
|
||||
Attributes []ItemAttr `xml:"attr"`
|
||||
}
|
||||
|
||||
type ItemAttr struct {
|
||||
Name string `xml:"name,attr"`
|
||||
Value string `xml:"value,attr"`
|
||||
}
|
||||
|
||||
type Enclosure struct {
|
||||
Url string `xml:"url,attr"`
|
||||
Length string `xml:"length,attr"`
|
||||
Type string `xml:"type,attr"`
|
||||
}
|
||||
|
||||
func (f FeedItem) MapCategoriesFromAttr() {
|
||||
for _, attr := range f.Attributes {
|
||||
if attr.Name == "category" {
|
||||
catId, err := strconv.Atoi(attr.Value)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if catId > 0 && catId < 10000 {
|
||||
f.Categories = append(f.Categories, ParentCategory(Category{ID: catId}))
|
||||
}
|
||||
} else if attr.Name == "size" {
|
||||
if f.Size == "" && attr.Value != "" {
|
||||
f.Size = attr.Value
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (f FeedItem) MapCustomCategoriesFromAttr(categories []Category) {
|
||||
for _, attr := range f.Attributes {
|
||||
if attr.Name == "category" {
|
||||
catId, err := strconv.Atoi(attr.Value)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if catId > 0 && catId < 10000 {
|
||||
f.Categories = append(f.Categories, ParentCategory(Category{ID: catId}))
|
||||
} else if catId > 10000 {
|
||||
// categories 10000+ are custom indexer specific
|
||||
for _, capCat := range categories {
|
||||
if capCat.ID == catId {
|
||||
f.Categories = append(f.Categories, Category{
|
||||
ID: capCat.ID,
|
||||
Name: capCat.Name,
|
||||
})
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Time credits: https://github.com/mrobinsn/go-newznab/blob/cd89d9c56447859fa1298dc9a0053c92c45ac7ef/newznab/structs.go#L150
|
||||
type Time struct {
|
||||
time.Time
|
||||
}
|
||||
|
||||
func (t *Time) MarshalXML(e *xml.Encoder, start xml.StartElement) error {
|
||||
if err := e.EncodeToken(start); err != nil {
|
||||
return errors.Wrap(err, "failed to encode xml token")
|
||||
}
|
||||
if err := e.EncodeToken(xml.CharData([]byte(t.UTC().Format(time.RFC1123Z)))); err != nil {
|
||||
return errors.Wrap(err, "failed to encode xml token")
|
||||
}
|
||||
if err := e.EncodeToken(xml.EndElement{Name: start.Name}); err != nil {
|
||||
return errors.Wrap(err, "failed to encode xml token")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *Time) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error {
|
||||
var raw string
|
||||
|
||||
err := d.DecodeElement(&raw, &start)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "could not decode element")
|
||||
}
|
||||
|
||||
date, err := time.Parse(time.RFC1123Z, raw)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "could not parse date")
|
||||
}
|
||||
|
||||
*t = Time{date}
|
||||
return nil
|
||||
}
|
355
pkg/newznab/newznab.go
Normal file
355
pkg/newznab/newznab.go
Normal file
|
@ -0,0 +1,355 @@
|
|||
package newznab
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/xml"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/http/httputil"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/autobrr/autobrr/pkg/errors"
|
||||
)
|
||||
|
||||
const DefaultTimeout = 60
|
||||
|
||||
type Client interface {
|
||||
GetFeed(ctx context.Context) (*Feed, error)
|
||||
GetCaps(ctx context.Context) (*Caps, error)
|
||||
Caps() *Caps
|
||||
}
|
||||
|
||||
type client struct {
|
||||
http *http.Client
|
||||
|
||||
Host string
|
||||
ApiKey string
|
||||
|
||||
UseBasicAuth bool
|
||||
BasicAuth BasicAuth
|
||||
|
||||
Capabilities *Caps
|
||||
|
||||
Log *log.Logger
|
||||
}
|
||||
|
||||
type BasicAuth struct {
|
||||
Username string
|
||||
Password string
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Host string
|
||||
ApiKey string
|
||||
Timeout time.Duration
|
||||
|
||||
UseBasicAuth bool
|
||||
BasicAuth BasicAuth
|
||||
|
||||
Log *log.Logger
|
||||
}
|
||||
|
||||
type Capabilities struct {
|
||||
Search Searching
|
||||
Categories Categories
|
||||
}
|
||||
|
||||
func NewClient(config Config) Client {
|
||||
httpClient := &http.Client{
|
||||
Timeout: time.Second * DefaultTimeout,
|
||||
}
|
||||
|
||||
if config.Timeout > 0 {
|
||||
httpClient.Timeout = time.Second * config.Timeout
|
||||
}
|
||||
|
||||
c := &client{
|
||||
http: httpClient,
|
||||
Host: config.Host,
|
||||
ApiKey: config.ApiKey,
|
||||
Log: log.New(io.Discard, "", log.LstdFlags),
|
||||
}
|
||||
|
||||
if config.Log != nil {
|
||||
c.Log = config.Log
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
func (c *client) get(ctx context.Context, endpoint string, queryParams map[string]string) (int, *Feed, error) {
|
||||
params := url.Values{}
|
||||
params.Set("t", "search")
|
||||
|
||||
for k, v := range queryParams {
|
||||
params.Add(k, v)
|
||||
}
|
||||
|
||||
if c.ApiKey != "" {
|
||||
params.Add("apikey", c.ApiKey)
|
||||
}
|
||||
|
||||
u, err := url.Parse(c.Host)
|
||||
u.Path = strings.TrimSuffix(u.Path, "/")
|
||||
u.RawQuery = params.Encode()
|
||||
reqUrl := u.String()
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, reqUrl, nil)
|
||||
if err != nil {
|
||||
return 0, nil, errors.Wrap(err, "could not build request")
|
||||
}
|
||||
|
||||
if c.UseBasicAuth {
|
||||
req.SetBasicAuth(c.BasicAuth.Username, c.BasicAuth.Password)
|
||||
}
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return 0, nil, errors.Wrap(err, "could not make request. %+v", req)
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
|
||||
dump, err := httputil.DumpResponse(resp, true)
|
||||
if err != nil {
|
||||
return 0, nil, errors.Wrap(err, "could not dump response")
|
||||
}
|
||||
|
||||
c.Log.Printf("newznab get feed response dump: %q", dump)
|
||||
|
||||
var buf bytes.Buffer
|
||||
if _, err = io.Copy(&buf, resp.Body); err != nil {
|
||||
return resp.StatusCode, nil, errors.Wrap(err, "newznab.io.Copy")
|
||||
}
|
||||
|
||||
var response Feed
|
||||
if err := xml.Unmarshal(buf.Bytes(), &response); err != nil {
|
||||
return resp.StatusCode, nil, errors.Wrap(err, "newznab: could not decode feed")
|
||||
}
|
||||
|
||||
response.Raw = buf.String()
|
||||
|
||||
return resp.StatusCode, &response, nil
|
||||
}
|
||||
|
||||
func (c *client) getData(ctx context.Context, endpoint string, queryParams map[string]string) (*http.Response, error) {
|
||||
u, err := url.Parse(c.Host)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "could not build request")
|
||||
}
|
||||
u.Path = strings.TrimSuffix(u.Path, "/")
|
||||
|
||||
qp, err := url.ParseQuery(u.RawQuery)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "could not build request")
|
||||
}
|
||||
|
||||
if c.ApiKey != "" {
|
||||
qp.Add("apikey", c.ApiKey)
|
||||
}
|
||||
|
||||
for k, v := range queryParams {
|
||||
if qp.Has("t") {
|
||||
continue
|
||||
}
|
||||
qp.Add(k, v)
|
||||
}
|
||||
|
||||
u.RawQuery = qp.Encode()
|
||||
reqUrl := u.String()
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, reqUrl, nil)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "could not build request")
|
||||
}
|
||||
|
||||
if c.UseBasicAuth {
|
||||
req.SetBasicAuth(c.BasicAuth.Username, c.BasicAuth.Password)
|
||||
}
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "could not make request. %+v", req)
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (c *client) GetFeed(ctx context.Context) (*Feed, error) {
|
||||
|
||||
p := map[string]string{"t": "search"}
|
||||
|
||||
resp, err := c.getData(ctx, "", p)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "could not get feed")
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
|
||||
dump, err := httputil.DumpResponse(resp, true)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "could not dump response")
|
||||
}
|
||||
|
||||
c.Log.Printf("newznab get feed response dump: %q", dump)
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, errors.New("could not get feed")
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
if _, err = io.Copy(&buf, resp.Body); err != nil {
|
||||
return nil, errors.Wrap(err, "newznab.io.Copy")
|
||||
}
|
||||
|
||||
var response Feed
|
||||
if err := xml.Unmarshal(buf.Bytes(), &response); err != nil {
|
||||
return nil, errors.Wrap(err, "newznab: could not decode feed")
|
||||
}
|
||||
|
||||
response.Raw = buf.String()
|
||||
|
||||
if c.Capabilities != nil {
|
||||
for _, item := range response.Channel.Items {
|
||||
item.MapCustomCategoriesFromAttr(c.Capabilities.Categories.Categories)
|
||||
}
|
||||
} else {
|
||||
for _, item := range response.Channel.Items {
|
||||
item.MapCategoriesFromAttr()
|
||||
}
|
||||
}
|
||||
|
||||
return &response, nil
|
||||
}
|
||||
|
||||
func (c *client) GetFeedAndCaps(ctx context.Context) (*Feed, error) {
|
||||
if c.Capabilities == nil {
|
||||
status, caps, err := c.getCaps(ctx, "?t=caps", nil)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "could not get caps for feed")
|
||||
}
|
||||
|
||||
if status != http.StatusOK {
|
||||
return nil, errors.Wrap(err, "could not get caps for feed")
|
||||
}
|
||||
|
||||
c.Capabilities = caps
|
||||
}
|
||||
|
||||
p := map[string]string{"t": "search"}
|
||||
|
||||
status, res, err := c.get(ctx, "", p)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "could not get feed")
|
||||
}
|
||||
|
||||
if status != http.StatusOK {
|
||||
return nil, errors.New("could not get feed")
|
||||
}
|
||||
|
||||
for _, item := range res.Channel.Items {
|
||||
item.MapCustomCategoriesFromAttr(c.Capabilities.Categories.Categories)
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *client) getCaps(ctx context.Context, endpoint string, opts map[string]string) (int, *Caps, error) {
|
||||
params := url.Values{
|
||||
"t": {"caps"},
|
||||
}
|
||||
|
||||
if c.ApiKey != "" {
|
||||
params.Add("apikey", c.ApiKey)
|
||||
}
|
||||
|
||||
u, err := url.Parse(c.Host)
|
||||
u.Path = strings.TrimSuffix(u.Path, "/")
|
||||
u.RawQuery = params.Encode()
|
||||
reqUrl := u.String()
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, reqUrl, nil)
|
||||
if err != nil {
|
||||
return 0, nil, errors.Wrap(err, "could not build request")
|
||||
}
|
||||
|
||||
if c.UseBasicAuth {
|
||||
req.SetBasicAuth(c.BasicAuth.Username, c.BasicAuth.Password)
|
||||
}
|
||||
|
||||
// Jackett only supports api key via url param while Prowlarr does that and via header
|
||||
//if c.ApiKey != "" {
|
||||
// req.Header.Add("X-API-Key", c.ApiKey)
|
||||
//}
|
||||
|
||||
resp, err := c.http.Do(req)
|
||||
if err != nil {
|
||||
return 0, nil, errors.Wrap(err, "could not make request. %+v", req)
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
|
||||
dump, err := httputil.DumpResponse(resp, true)
|
||||
if err != nil {
|
||||
return 0, nil, errors.Wrap(err, "could not dump response")
|
||||
}
|
||||
|
||||
c.Log.Printf("newznab get caps response dump: %q", dump)
|
||||
|
||||
if resp.StatusCode == http.StatusUnauthorized {
|
||||
return resp.StatusCode, nil, errors.New("unauthorized")
|
||||
} else if resp.StatusCode != http.StatusOK {
|
||||
return resp.StatusCode, nil, errors.New("bad status: %d", resp.StatusCode)
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
if _, err = io.Copy(&buf, resp.Body); err != nil {
|
||||
return resp.StatusCode, nil, errors.Wrap(err, "newznab.io.Copy")
|
||||
}
|
||||
|
||||
var response Caps
|
||||
if err := xml.Unmarshal(buf.Bytes(), &response); err != nil {
|
||||
return resp.StatusCode, nil, errors.Wrap(err, "newznab: could not decode feed")
|
||||
}
|
||||
|
||||
return resp.StatusCode, &response, nil
|
||||
}
|
||||
|
||||
func (c *client) GetCaps(ctx context.Context) (*Caps, error) {
|
||||
|
||||
status, res, err := c.getCaps(ctx, "?t=caps", nil)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "could not get caps for feed")
|
||||
}
|
||||
|
||||
if status != http.StatusOK {
|
||||
return nil, errors.Wrap(err, "could not get caps for feed")
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (c *client) Caps() *Caps {
|
||||
return c.Capabilities
|
||||
}
|
||||
|
||||
//func (c *client) Search(ctx context.Context, query string) ([]FeedItem, error) {
|
||||
// v := url.Values{}
|
||||
// v.Add("q", query)
|
||||
// params := v.Encode()
|
||||
//
|
||||
// status, res, err := c.get(ctx, "&t=search&"+params, nil)
|
||||
// if err != nil {
|
||||
// return nil, errors.Wrap(err, "could not search feed")
|
||||
// }
|
||||
//
|
||||
// if status != http.StatusOK {
|
||||
// return nil, errors.New("could not search feed")
|
||||
// }
|
||||
//
|
||||
// return res.Channel.Items, nil
|
||||
//}
|
173
pkg/sabnzbd/sabnzbd.go
Normal file
173
pkg/sabnzbd/sabnzbd.go
Normal file
|
@ -0,0 +1,173 @@
|
|||
package sabnzbd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Client struct {
|
||||
addr string
|
||||
apiKey string
|
||||
|
||||
basicUser string
|
||||
basicPass string
|
||||
|
||||
log *log.Logger
|
||||
|
||||
Http *http.Client
|
||||
}
|
||||
|
||||
type Options struct {
|
||||
Addr string
|
||||
ApiKey string
|
||||
|
||||
BasicUser string
|
||||
BasicPass string
|
||||
|
||||
Log *log.Logger
|
||||
}
|
||||
|
||||
func New(opts Options) *Client {
|
||||
c := &Client{
|
||||
addr: opts.Addr,
|
||||
apiKey: opts.ApiKey,
|
||||
basicUser: opts.BasicUser,
|
||||
basicPass: opts.BasicPass,
|
||||
log: log.New(io.Discard, "", log.LstdFlags),
|
||||
Http: &http.Client{
|
||||
Timeout: time.Second * 60,
|
||||
},
|
||||
}
|
||||
|
||||
if opts.Log != nil {
|
||||
c.log = opts.Log
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
func (c *Client) AddFromUrl(ctx context.Context, r AddNzbRequest) (*AddFileResponse, error) {
|
||||
v := url.Values{}
|
||||
v.Set("mode", "addurl")
|
||||
v.Set("name", r.Url)
|
||||
v.Set("output", "json")
|
||||
v.Set("apikey", c.apiKey)
|
||||
v.Set("cat", "*")
|
||||
|
||||
if r.Category != "" {
|
||||
v.Set("cat", r.Category)
|
||||
}
|
||||
|
||||
addr, err := url.JoinPath(c.addr, "/api")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
u, err := url.Parse(addr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
u.RawQuery = v.Encode()
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if c.basicUser != "" && c.basicPass != "" {
|
||||
req.SetBasicAuth(c.basicUser, c.basicPass)
|
||||
}
|
||||
|
||||
res, err := c.Http.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
defer res.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fmt.Print(body)
|
||||
|
||||
var data AddFileResponse
|
||||
if err := json.Unmarshal(body, &data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &data, nil
|
||||
}
|
||||
|
||||
func (c *Client) Version(ctx context.Context) (*VersionResponse, error) {
|
||||
v := url.Values{}
|
||||
v.Set("mode", "version")
|
||||
v.Set("output", "json")
|
||||
v.Set("apikey", c.apiKey)
|
||||
|
||||
addr, err := url.JoinPath(c.addr, "/api")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
u, err := url.Parse(addr)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
u.RawQuery = v.Encode()
|
||||
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if c.basicUser != "" && c.basicPass != "" {
|
||||
req.SetBasicAuth(c.basicUser, c.basicPass)
|
||||
}
|
||||
|
||||
res, err := c.Http.Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
defer res.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var data VersionResponse
|
||||
if err := json.Unmarshal(body, &data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &data, nil
|
||||
}
|
||||
|
||||
type VersionResponse struct {
|
||||
Version string `json:"version"`
|
||||
}
|
||||
|
||||
type AddFileResponse struct {
|
||||
NzoIDs []string `json:"nzo_ids"`
|
||||
ApiError
|
||||
}
|
||||
|
||||
type ApiError struct {
|
||||
ErrorMsg string `json:"error,omitempty"`
|
||||
}
|
||||
|
||||
type AddNzbRequest struct {
|
||||
Url string
|
||||
Category string
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue