feat: add usenet support (#543)

* feat(autobrr): implement usenet support

* feat(sonarr): implement usenet support

* feat(radarr): implement usenet support

* feat(announce): implement usenet support

* announce: cast a line

* feat(release): prevent unknown protocol transfer

* release: lines for days.

* feat: add newznab and sabnzbd support

* feat: add category to sabnzbd

* feat(newznab): map categories

* feat(newznab): map categories

---------

Co-authored-by: ze0s <43699394+zze0s@users.noreply.github.com>
Co-authored-by: ze0s <ze0s@riseup.net>
This commit is contained in:
Kyle Sanderson 2023-03-04 11:27:18 -08:00 committed by GitHub
parent b2d93d50c5
commit 13a74f7cc8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
29 changed files with 1588 additions and 37 deletions

View file

@ -47,8 +47,8 @@ func (s *service) radarr(ctx context.Context, action *domain.Action, release dom
MagnetUrl: release.MagnetURI, MagnetUrl: release.MagnetURI,
Size: int64(release.Size), Size: int64(release.Size),
Indexer: release.Indexer, Indexer: release.Indexer,
DownloadProtocol: "torrent", DownloadProtocol: string(release.Protocol),
Protocol: "torrent", Protocol: string(release.Protocol),
PublishDate: time.Now().Format(time.RFC3339), PublishDate: time.Now().Format(time.RFC3339),
} }

View file

@ -85,6 +85,9 @@ func (s *service) RunAction(ctx context.Context, action *domain.Action, release
case domain.ActionTypeReadarr: case domain.ActionTypeReadarr:
rejections, err = s.readarr(ctx, action, release) rejections, err = s.readarr(ctx, action, release)
case domain.ActionTypeSabnzbd:
rejections, err = s.sabnzbd(ctx, action, release)
default: default:
s.log.Warn().Msgf("unsupported action type: %v", action.Type) s.log.Warn().Msgf("unsupported action type: %v", action.Type)
return rejections, err return rejections, err

View file

@ -0,0 +1,52 @@
package action
import (
"context"
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/pkg/errors"
"github.com/autobrr/autobrr/pkg/sabnzbd"
)
func (s *service) sabnzbd(ctx context.Context, action *domain.Action, release domain.Release) ([]string, error) {
s.log.Trace().Msg("action Sabnzbd")
if release.Protocol != domain.ReleaseProtocolNzb {
return nil, errors.New("action type: %s invalid protocol: %s", action.Type, release.Protocol)
}
// get client for action
client, err := s.clientSvc.FindByID(ctx, action.ClientID)
if err != nil {
return nil, errors.Wrap(err, "sonarr could not find client: %d", action.ClientID)
}
// return early if no client found
if client == nil {
return nil, errors.New("no sabnzbd client found by id: %d", action.ClientID)
}
opts := sabnzbd.Options{
Addr: client.Host,
ApiKey: client.Settings.APIKey,
Log: nil,
}
if client.Settings.Basic.Auth {
opts.BasicUser = client.Settings.Basic.Username
opts.BasicPass = client.Settings.Basic.Password
}
sab := sabnzbd.New(opts)
ids, err := sab.AddFromUrl(ctx, sabnzbd.AddNzbRequest{Url: release.TorrentURL, Category: action.Category})
if err != nil {
return nil, errors.Wrap(err, "could not add nzb to sabnzbd")
}
s.log.Trace().Msgf("nzb successfully added to client: '%+v'", ids)
s.log.Info().Msgf("nzb successfully added to client: '%s'", client.Name)
return nil, nil
}

View file

@ -47,8 +47,8 @@ func (s *service) sonarr(ctx context.Context, action *domain.Action, release dom
MagnetUrl: release.MagnetURI, MagnetUrl: release.MagnetURI,
Size: int64(release.Size), Size: int64(release.Size),
Indexer: release.Indexer, Indexer: release.Indexer,
DownloadProtocol: "torrent", DownloadProtocol: string(release.Protocol),
Protocol: "torrent", Protocol: string(release.Protocol),
PublishDate: time.Now().Format(time.RFC3339), PublishDate: time.Now().Format(time.RFC3339),
} }

View file

@ -99,6 +99,7 @@ func (a *announceProcessor) processQueue(queue chan string) {
} }
rls := domain.NewRelease(a.indexer.Identifier) rls := domain.NewRelease(a.indexer.Identifier)
rls.Protocol = domain.ReleaseProtocol(a.indexer.Protocol)
// on lines matched // on lines matched
if err := a.onLinesMatched(a.indexer, tmpVars, rls); err != nil { if err := a.onLinesMatched(a.indexer, tmpVars, rls); err != nil {

View file

@ -89,6 +89,7 @@ const (
ActionTypeLidarr ActionType = "LIDARR" ActionTypeLidarr ActionType = "LIDARR"
ActionTypeWhisparr ActionType = "WHISPARR" ActionTypeWhisparr ActionType = "WHISPARR"
ActionTypeReadarr ActionType = "READARR" ActionTypeReadarr ActionType = "READARR"
ActionTypeSabnzbd ActionType = "SABNZBD"
) )
type ActionContentLayout string type ActionContentLayout string

View file

@ -79,6 +79,7 @@ const (
DownloadClientTypeLidarr DownloadClientType = "LIDARR" DownloadClientTypeLidarr DownloadClientType = "LIDARR"
DownloadClientTypeWhisparr DownloadClientType = "WHISPARR" DownloadClientTypeWhisparr DownloadClientType = "WHISPARR"
DownloadClientTypeReadarr DownloadClientType = "READARR" DownloadClientTypeReadarr DownloadClientType = "READARR"
DownloadClientTypeSabnzbd DownloadClientType = "SABNZBD"
) )
// Validate basic validation of client // Validate basic validation of client

View file

@ -64,6 +64,7 @@ type FeedType string
const ( const (
FeedTypeTorznab FeedType = "TORZNAB" FeedTypeTorznab FeedType = "TORZNAB"
FeedTypeNewznab FeedType = "NEWZNAB"
FeedTypeRSS FeedType = "RSS" FeedTypeRSS FeedType = "RSS"
) )

View file

@ -48,9 +48,37 @@ type IndexerDefinition struct {
SettingsMap map[string]string `json:"-"` SettingsMap map[string]string `json:"-"`
IRC *IndexerIRC `json:"irc,omitempty"` IRC *IndexerIRC `json:"irc,omitempty"`
Torznab *Torznab `json:"torznab,omitempty"` Torznab *Torznab `json:"torznab,omitempty"`
Newznab *Newznab `json:"newznab,omitempty"`
RSS *FeedSettings `json:"rss,omitempty"` RSS *FeedSettings `json:"rss,omitempty"`
} }
type IndexerImplementation string
const (
IndexerImplementationIRC IndexerImplementation = "irc"
IndexerImplementationTorznab IndexerImplementation = "torznab"
IndexerImplementationNewznab IndexerImplementation = "newznab"
IndexerImplementationRSS IndexerImplementation = "rss"
IndexerImplementationLegacy IndexerImplementation = ""
)
func (i IndexerImplementation) String() string {
switch i {
case IndexerImplementationIRC:
return "irc"
case IndexerImplementationTorznab:
return "torznab"
case IndexerImplementationNewznab:
return "newznab"
case IndexerImplementationRSS:
return "rss"
case IndexerImplementationLegacy:
return ""
}
return ""
}
func (i IndexerDefinition) HasApi() bool { func (i IndexerDefinition) HasApi() bool {
for _, a := range i.Supports { for _, a := range i.Supports {
if a == "api" { if a == "api" {
@ -77,6 +105,7 @@ type IndexerDefinitionCustom struct {
SettingsMap map[string]string `json:"-"` SettingsMap map[string]string `json:"-"`
IRC *IndexerIRC `json:"irc,omitempty"` IRC *IndexerIRC `json:"irc,omitempty"`
Torznab *Torznab `json:"torznab,omitempty"` Torznab *Torznab `json:"torznab,omitempty"`
Newznab *Newznab `json:"newznab,omitempty"`
RSS *FeedSettings `json:"rss,omitempty"` RSS *FeedSettings `json:"rss,omitempty"`
Parse *IndexerIRCParse `json:"parse,omitempty"` Parse *IndexerIRCParse `json:"parse,omitempty"`
} }
@ -99,6 +128,7 @@ func (i *IndexerDefinitionCustom) ToIndexerDefinition() *IndexerDefinition {
SettingsMap: i.SettingsMap, SettingsMap: i.SettingsMap,
IRC: i.IRC, IRC: i.IRC,
Torznab: i.Torznab, Torznab: i.Torznab,
Newznab: i.Newznab,
RSS: i.RSS, RSS: i.RSS,
} }
@ -126,6 +156,11 @@ type Torznab struct {
Settings []IndexerSetting `json:"settings"` Settings []IndexerSetting `json:"settings"`
} }
type Newznab struct {
MinInterval int `json:"minInterval"`
Settings []IndexerSetting `json:"settings"`
}
type FeedSettings struct { type FeedSettings struct {
MinInterval int `json:"minInterval"` MinInterval int `json:"minInterval"`
Settings []IndexerSetting `json:"settings"` Settings []IndexerSetting `json:"settings"`

View file

@ -154,16 +154,44 @@ type ReleaseProtocol string
const ( const (
ReleaseProtocolTorrent ReleaseProtocol = "torrent" ReleaseProtocolTorrent ReleaseProtocol = "torrent"
ReleaseProtocolNzb ReleaseProtocol = "nzb"
) )
func (r ReleaseProtocol) String() string {
switch r {
case ReleaseProtocolTorrent:
return "torrent"
case ReleaseProtocolNzb:
return "nzb"
default:
return "torrent"
}
}
type ReleaseImplementation string type ReleaseImplementation string
const ( const (
ReleaseImplementationIRC ReleaseImplementation = "IRC" ReleaseImplementationIRC ReleaseImplementation = "IRC"
ReleaseImplementationTorznab ReleaseImplementation = "TORZNAB" ReleaseImplementationTorznab ReleaseImplementation = "TORZNAB"
ReleaseImplementationNewznab ReleaseImplementation = "NEWZNAB"
ReleaseImplementationRSS ReleaseImplementation = "RSS" ReleaseImplementationRSS ReleaseImplementation = "RSS"
) )
func (r ReleaseImplementation) String() string {
switch r {
case ReleaseImplementationIRC:
return "IRC"
case ReleaseImplementationTorznab:
return "TORZNAB"
case ReleaseImplementationNewznab:
return "NEWZNAB"
case ReleaseImplementationRSS:
return "RSS"
default:
return "IRC"
}
}
type ReleaseQueryParams struct { type ReleaseQueryParams struct {
Limit uint64 Limit uint64
Offset uint64 Offset uint64
@ -291,7 +319,9 @@ func (r *Release) DownloadTorrentFile() error {
} }
func (r *Release) downloadTorrentFile(ctx context.Context) error { func (r *Release) downloadTorrentFile(ctx context.Context) error {
if r.HasMagnetUri() { if r.Protocol != ReleaseProtocolTorrent {
return errors.New("download_file: protocol is not %s: %s", ReleaseProtocolTorrent, r.Protocol)
} else if r.HasMagnetUri() {
return fmt.Errorf("error trying to download magnet link: %s", r.MagnetURI) return fmt.Errorf("error trying to download magnet link: %s", r.MagnetURI)
} }

View file

@ -10,6 +10,7 @@ import (
"github.com/autobrr/autobrr/pkg/porla" "github.com/autobrr/autobrr/pkg/porla"
"github.com/autobrr/autobrr/pkg/radarr" "github.com/autobrr/autobrr/pkg/radarr"
"github.com/autobrr/autobrr/pkg/readarr" "github.com/autobrr/autobrr/pkg/readarr"
"github.com/autobrr/autobrr/pkg/sabnzbd"
"github.com/autobrr/autobrr/pkg/sonarr" "github.com/autobrr/autobrr/pkg/sonarr"
"github.com/autobrr/autobrr/pkg/whisparr" "github.com/autobrr/autobrr/pkg/whisparr"
"github.com/autobrr/go-qbittorrent" "github.com/autobrr/go-qbittorrent"
@ -51,6 +52,9 @@ func (s *service) testConnection(ctx context.Context, client domain.DownloadClie
case domain.DownloadClientTypeReadarr: case domain.DownloadClientTypeReadarr:
return s.testReadarrConnection(ctx, client) return s.testReadarrConnection(ctx, client)
case domain.DownloadClientTypeSabnzbd:
return s.testSabnzbdConnection(ctx, client)
default: default:
return errors.New("unsupported client") return errors.New("unsupported client")
} }
@ -285,3 +289,23 @@ func (s *service) testPorlaConnection(client domain.DownloadClient) error {
return nil return nil
} }
func (s *service) testSabnzbdConnection(ctx context.Context, client domain.DownloadClient) error {
opts := sabnzbd.Options{
Addr: client.Host,
ApiKey: client.Settings.APIKey,
BasicUser: client.Settings.Basic.Username,
BasicPass: client.Settings.Basic.Password,
Log: nil,
}
sab := sabnzbd.New(opts)
version, err := sab.Version(ctx)
if err != nil {
return errors.Wrap(err, "error getting version from sabnzbd")
}
s.log.Debug().Msgf("test client connection for sabnzbd: success got version: %s", version.Version)
return nil
}

168
internal/feed/newznab.go Normal file
View file

@ -0,0 +1,168 @@
package feed
import (
"context"
"sort"
"strconv"
"time"
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/internal/release"
"github.com/autobrr/autobrr/internal/scheduler"
"github.com/autobrr/autobrr/pkg/errors"
"github.com/autobrr/autobrr/pkg/newznab"
"github.com/rs/zerolog"
)
type NewznabJob struct {
Feed *domain.Feed
Name string
IndexerIdentifier string
Log zerolog.Logger
URL string
Client newznab.Client
Repo domain.FeedRepo
CacheRepo domain.FeedCacheRepo
ReleaseSvc release.Service
SchedulerSvc scheduler.Service
attempts int
errors []error
JobID int
}
func NewNewznabJob(feed *domain.Feed, name string, indexerIdentifier string, log zerolog.Logger, url string, client newznab.Client, repo domain.FeedRepo, cacheRepo domain.FeedCacheRepo, releaseSvc release.Service) *NewznabJob {
return &NewznabJob{
Feed: feed,
Name: name,
IndexerIdentifier: indexerIdentifier,
Log: log,
URL: url,
Client: client,
Repo: repo,
CacheRepo: cacheRepo,
ReleaseSvc: releaseSvc,
}
}
func (j *NewznabJob) Run() {
ctx := context.Background()
if err := j.process(ctx); err != nil {
j.Log.Err(err).Int("attempts", j.attempts).Msg("newznab process error")
j.errors = append(j.errors, err)
}
j.attempts = 0
j.errors = j.errors[:0]
}
func (j *NewznabJob) process(ctx context.Context) error {
// get feed
items, err := j.getFeed(ctx)
if err != nil {
j.Log.Error().Err(err).Msgf("error fetching feed items")
return errors.Wrap(err, "error getting feed items")
}
j.Log.Debug().Msgf("found (%d) new items to process", len(items))
if len(items) == 0 {
return nil
}
releases := make([]*domain.Release, 0)
for _, item := range items {
rls := domain.NewRelease(j.IndexerIdentifier)
rls.TorrentName = item.Title
rls.InfoURL = item.GUID
rls.Implementation = domain.ReleaseImplementationNewznab
rls.Protocol = domain.ReleaseProtocolNzb
// parse size bytes string
rls.ParseSizeBytesString(item.Size)
rls.ParseString(item.Title)
if item.Enclosure != nil {
if item.Enclosure.Type == "application/x-nzb" {
rls.TorrentURL = item.Enclosure.Url
}
}
// map newznab categories ID and Name into rls.Categories
// so we can filter on both ID and Name
for _, category := range item.Categories {
rls.Categories = append(rls.Categories, []string{category.Name, strconv.Itoa(category.ID)}...)
}
releases = append(releases, rls)
}
// process all new releases
go j.ReleaseSvc.ProcessMultiple(releases)
return nil
}
func (j *NewznabJob) getFeed(ctx context.Context) ([]newznab.FeedItem, error) {
// get feed
feed, err := j.Client.GetFeed(ctx)
if err != nil {
j.Log.Error().Err(err).Msgf("error fetching feed items")
return nil, errors.Wrap(err, "error fetching feed items")
}
if err := j.Repo.UpdateLastRunWithData(ctx, j.Feed.ID, feed.Raw); err != nil {
j.Log.Error().Err(err).Msgf("error updating last run for feed id: %v", j.Feed.ID)
}
j.Log.Debug().Msgf("refreshing feed: %v, found (%d) items", j.Name, len(feed.Channel.Items))
items := make([]newznab.FeedItem, 0)
if len(feed.Channel.Items) == 0 {
return items, nil
}
sort.SliceStable(feed.Channel.Items, func(i, j int) bool {
return feed.Channel.Items[i].PubDate.After(feed.Channel.Items[j].PubDate.Time)
})
for _, i := range feed.Channel.Items {
if i.GUID == "" {
j.Log.Error().Err(err).Msgf("missing GUID from feed: %s", j.Feed.Name)
continue
}
exists, err := j.CacheRepo.Exists(j.Name, i.GUID)
if err != nil {
j.Log.Error().Err(err).Msg("could not check if item exists")
continue
}
if exists {
j.Log.Trace().Msgf("cache item exists, skipping release: %s", i.Title)
continue
}
j.Log.Debug().Msgf("found new release: %s", i.Title)
// set ttl to 1 month
ttl := time.Now().AddDate(0, 1, 0)
if err := j.CacheRepo.Put(j.Name, i.GUID, []byte(i.Title), ttl); err != nil {
j.Log.Error().Stack().Err(err).Str("guid", i.GUID).Msg("cache.Put: error storing item in cache")
continue
}
// only append if we successfully added to cache
items = append(items, i)
}
// send to filters
return items, nil
}

View file

@ -12,6 +12,7 @@ import (
"github.com/autobrr/autobrr/internal/release" "github.com/autobrr/autobrr/internal/release"
"github.com/autobrr/autobrr/internal/scheduler" "github.com/autobrr/autobrr/internal/scheduler"
"github.com/autobrr/autobrr/pkg/errors" "github.com/autobrr/autobrr/pkg/errors"
"github.com/autobrr/autobrr/pkg/newznab"
"github.com/autobrr/autobrr/pkg/torznab" "github.com/autobrr/autobrr/pkg/torznab"
"github.com/dcarbone/zadapters/zstdlog" "github.com/dcarbone/zadapters/zstdlog"
@ -222,17 +223,27 @@ func (s *service) test(ctx context.Context, feed *domain.Feed) error {
subLogger := zstdlog.NewStdLoggerWithLevel(s.log.With().Logger(), zerolog.DebugLevel) subLogger := zstdlog.NewStdLoggerWithLevel(s.log.With().Logger(), zerolog.DebugLevel)
// test feeds // test feeds
if feed.Type == string(domain.FeedTypeTorznab) { switch feed.Type {
case string(domain.FeedTypeTorznab):
if err := s.testTorznab(ctx, feed, subLogger); err != nil { if err := s.testTorznab(ctx, feed, subLogger); err != nil {
return err return err
} }
} else if feed.Type == string(domain.FeedTypeRSS) {
case string(domain.FeedTypeNewznab):
if err := s.testNewznab(ctx, feed, subLogger); err != nil {
return err
}
case string(domain.FeedTypeRSS):
if err := s.testRSS(ctx, feed); err != nil { if err := s.testRSS(ctx, feed); err != nil {
return err return err
} }
default:
return errors.New("unsupported feed type: %s", feed.Type)
} }
s.log.Info().Msgf("feed test successful - connected to feed: %v", feed.URL) s.log.Info().Msgf("feed test successful - connected to feed: %s", feed.URL)
return nil return nil
} }
@ -264,6 +275,21 @@ func (s *service) testTorznab(ctx context.Context, feed *domain.Feed, subLogger
return nil return nil
} }
func (s *service) testNewznab(ctx context.Context, feed *domain.Feed, subLogger *log.Logger) error {
// setup newznab Client
c := newznab.NewClient(newznab.Config{Host: feed.URL, ApiKey: feed.ApiKey, Log: subLogger})
items, err := c.GetFeed(ctx)
if err != nil {
s.log.Error().Err(err).Msg("error getting newznab feed")
return err
}
s.log.Info().Msgf("refreshing newznab feed: %v, found (%d) items", feed.Name, len(items.Channel.Items))
return nil
}
func (s *service) start() error { func (s *service) start() error {
// get all torznab indexer definitions // get all torznab indexer definitions
feeds, err := s.repo.Find(context.TODO()) feeds, err := s.repo.Find(context.TODO())
@ -335,6 +361,13 @@ func (s *service) startJob(f *domain.Feed) error {
s.log.Error().Err(err).Msg("failed to initialize torznab feed") s.log.Error().Err(err).Msg("failed to initialize torznab feed")
return err return err
} }
case string(domain.FeedTypeNewznab):
if err := s.addNewznabJob(fi); err != nil {
s.log.Error().Err(err).Msg("failed to initialize newznab feed")
return err
}
case string(domain.FeedTypeRSS): case string(domain.FeedTypeRSS):
if err := s.addRSSJob(fi); err != nil { if err := s.addRSSJob(fi); err != nil {
s.log.Error().Err(err).Msg("failed to initialize rss feed") s.log.Error().Err(err).Msg("failed to initialize rss feed")
@ -380,6 +413,37 @@ func (s *service) addTorznabJob(f feedInstance) error {
return nil return nil
} }
func (s *service) addNewznabJob(f feedInstance) error {
if f.URL == "" {
return errors.New("newznab feed requires URL")
}
// setup logger
l := s.log.With().Str("feed", f.Name).Logger()
// setup newznab Client
c := newznab.NewClient(newznab.Config{Host: f.URL, ApiKey: f.ApiKey, Timeout: f.Timeout})
// create job
job := NewNewznabJob(f.Feed, f.Name, f.IndexerIdentifier, l, f.URL, c, s.repo, s.cacheRepo, s.releaseSvc)
identifierKey := feedKey{f.Feed.ID, f.Feed.Indexer, f.Feed.Name}.ToString()
// schedule job
id, err := s.scheduler.AddJob(job, f.CronSchedule, identifierKey)
if err != nil {
return errors.Wrap(err, "feed.AddNewznabJob: add job failed")
}
job.JobID = id
// add to job map
s.jobs[identifierKey] = id
s.log.Debug().Msgf("add newznab job: %v", f.Name)
return nil
}
func (s *service) addRSSJob(f feedInstance) error { func (s *service) addRSSJob(f feedInstance) error {
if f.URL == "" { if f.URL == "" {
return errors.New("rss feed requires URL") return errors.New("rss feed requires URL")

View file

@ -49,6 +49,8 @@ type service struct {
lookupIRCServerDefinition map[string]map[string]*domain.IndexerDefinition lookupIRCServerDefinition map[string]map[string]*domain.IndexerDefinition
// torznab indexers // torznab indexers
torznabIndexers map[string]*domain.IndexerDefinition torznabIndexers map[string]*domain.IndexerDefinition
// newznab indexers
newznabIndexers map[string]*domain.IndexerDefinition
// rss indexers // rss indexers
rssIndexers map[string]*domain.IndexerDefinition rssIndexers map[string]*domain.IndexerDefinition
} }
@ -62,6 +64,7 @@ func NewService(log logger.Logger, config *domain.Config, repo domain.IndexerRep
scheduler: scheduler, scheduler: scheduler,
lookupIRCServerDefinition: make(map[string]map[string]*domain.IndexerDefinition), lookupIRCServerDefinition: make(map[string]map[string]*domain.IndexerDefinition),
torznabIndexers: make(map[string]*domain.IndexerDefinition), torznabIndexers: make(map[string]*domain.IndexerDefinition),
newznabIndexers: make(map[string]*domain.IndexerDefinition),
rssIndexers: make(map[string]*domain.IndexerDefinition), rssIndexers: make(map[string]*domain.IndexerDefinition),
definitions: make(map[string]domain.IndexerDefinition), definitions: make(map[string]domain.IndexerDefinition),
mappedDefinitions: make(map[string]*domain.IndexerDefinition), mappedDefinitions: make(map[string]*domain.IndexerDefinition),
@ -72,7 +75,7 @@ func (s *service) Store(ctx context.Context, indexer domain.Indexer) (*domain.In
// if indexer is rss or torznab do additional cleanup for identifier // if indexer is rss or torznab do additional cleanup for identifier
switch indexer.Implementation { switch indexer.Implementation {
case "torznab", "rss": case "torznab", "newznab", "rss":
// make lowercase // make lowercase
cleanName := strings.ToLower(indexer.Name) cleanName := strings.ToLower(indexer.Name)
@ -213,6 +216,8 @@ func (s *service) mapIndexer(indexer domain.Indexer) (*domain.IndexerDefinition,
definitionName := indexer.Identifier definitionName := indexer.Identifier
if indexer.Implementation == "torznab" { if indexer.Implementation == "torznab" {
definitionName = "torznab" definitionName = "torznab"
} else if indexer.Implementation == "newznab" {
definitionName = "newznab"
} else if indexer.Implementation == "rss" { } else if indexer.Implementation == "rss" {
definitionName = "rss" definitionName = "rss"
} }
@ -336,6 +341,8 @@ func (s *service) Start() error {
// handle Torznab // handle Torznab
if indexer.Implementation == "torznab" { if indexer.Implementation == "torznab" {
s.torznabIndexers[indexer.Identifier] = indexer s.torznabIndexers[indexer.Identifier] = indexer
} else if indexer.Implementation == "newznab" {
s.newznabIndexers[indexer.Identifier] = indexer
} else if indexer.Implementation == "rss" { } else if indexer.Implementation == "rss" {
s.rssIndexers[indexer.Identifier] = indexer s.rssIndexers[indexer.Identifier] = indexer
} }
@ -350,6 +357,8 @@ func (s *service) removeIndexer(indexer domain.Indexer) {
// remove Torznab // remove Torznab
if indexer.Implementation == "torznab" { if indexer.Implementation == "torznab" {
delete(s.torznabIndexers, indexer.Identifier) delete(s.torznabIndexers, indexer.Identifier)
} else if indexer.Implementation == "newznab" {
delete(s.newznabIndexers, indexer.Identifier)
} else if indexer.Implementation == "rss" { } else if indexer.Implementation == "rss" {
delete(s.rssIndexers, indexer.Identifier) delete(s.rssIndexers, indexer.Identifier)
} }
@ -383,6 +392,8 @@ func (s *service) addIndexer(indexer domain.Indexer) error {
// handle Torznab and RSS // handle Torznab and RSS
if indexerDefinition.Implementation == "torznab" { if indexerDefinition.Implementation == "torznab" {
s.torznabIndexers[indexer.Identifier] = indexerDefinition s.torznabIndexers[indexer.Identifier] = indexerDefinition
} else if indexer.Implementation == "newznab" {
s.newznabIndexers[indexer.Identifier] = indexerDefinition
} else if indexerDefinition.Implementation == "rss" { } else if indexerDefinition.Implementation == "rss" {
s.rssIndexers[indexer.Identifier] = indexerDefinition s.rssIndexers[indexer.Identifier] = indexerDefinition
} }
@ -417,6 +428,8 @@ func (s *service) updateIndexer(indexer domain.Indexer) error {
// handle Torznab // handle Torznab
if indexerDefinition.Implementation == "torznab" { if indexerDefinition.Implementation == "torznab" {
s.torznabIndexers[indexer.Identifier] = indexerDefinition s.torznabIndexers[indexer.Identifier] = indexerDefinition
} else if indexer.Implementation == "newznab" {
s.newznabIndexers[indexer.Identifier] = indexerDefinition
} else if indexerDefinition.Implementation == "rss" { } else if indexerDefinition.Implementation == "rss" {
s.rssIndexers[indexer.Identifier] = indexerDefinition s.rssIndexers[indexer.Identifier] = indexerDefinition
} }

93
pkg/newznab/caps.go Normal file
View file

@ -0,0 +1,93 @@
package newznab
import "encoding/xml"
type Server struct {
Version string `xml:"version,attr"`
Title string `xml:"title,attr"`
Strapline string `xml:"strapline,attr"`
Email string `xml:"email,attr"`
URL string `xml:"url,attr"`
Image string `xml:"image,attr"`
}
type Limits struct {
Max string `xml:"max,attr"`
Default string `xml:"default,attr"`
}
type Retention struct {
Days string `xml:"days,attr"`
}
type Registration struct {
Available string `xml:"available,attr"`
Open string `xml:"open,attr"`
}
type Searching struct {
Search Search `xml:"search"`
TvSearch Search `xml:"tv-search"`
MovieSearch Search `xml:"movie-search"`
AudioSearch Search `xml:"audio-search"`
BookSearch Search `xml:"book-search"`
}
type Search struct {
Available string `xml:"available,attr"`
SupportedParams string `xml:"supportedParams,attr"`
}
type CapCategories struct {
Categories []Category `xml:"category"`
}
type CapCategory struct {
ID string `xml:"id,attr"`
Name string `xml:"name,attr"`
SubCategories []CapCategory `xml:"subcat"`
}
type Groups struct {
Group Group `xml:"group"`
}
type Group struct {
ID string `xml:"id,attr"`
Name string `xml:"name,attr"`
Description string `xml:"description,attr"`
Lastupdate string `xml:"lastupdate,attr"`
}
type Genres struct {
Genre Genre `xml:"genre"`
}
type Genre struct {
ID string `xml:"id,attr"`
Categoryid string `xml:"categoryid,attr"`
Name string `xml:"name,attr"`
}
type Tags struct {
Tag []Tag `xml:"tag"`
}
type Tag struct {
Name string `xml:"name,attr"`
Description string `xml:"description,attr"`
}
type CapsResponse struct {
Caps Caps `xml:"caps"`
}
type Caps struct {
XMLName xml.Name `xml:"caps"`
Server Server `xml:"server"`
Limits Limits `xml:"limits"`
Retention Retention `xml:"retention"`
Registration Registration `xml:"registration"`
Searching Searching `xml:"searching"`
Categories CapCategories `xml:"categories"`
Groups Groups `xml:"groups"`
Genres Genres `xml:"genres"`
Tags Tags `xml:"tags"`
}

219
pkg/newznab/category.go Normal file
View file

@ -0,0 +1,219 @@
package newznab
import (
"fmt"
"regexp"
"strconv"
)
type Category struct {
ID int `xml:"id,attr"`
Name string `xml:"name,attr"`
SubCategories []Category `xml:"subcat"`
}
func (c Category) String() string {
return fmt.Sprintf("%s[%d]", c.Name, c.ID)
}
func (c Category) FromString(str string) {
var re = regexp.MustCompile(`(?m)(.+)\[(.+)\]`)
match := re.FindAllString(str, -1)
c.Name = match[1]
c.ID, _ = strconv.Atoi(match[2])
}
const (
CustomCategoryOffset = 100000
)
// Categories from the Newznab spec
// https://github.com/nZEDb/nZEDb/blob/0.x/docs/newznab_api_specification.txt#L627
var (
CategoryOther = Category{0, "Other", nil}
CategoryOther_Misc = Category{10, "Other/Misc", nil}
CategoryOther_Hashed = Category{20, "Other/Hashed", nil}
CategoryConsole = Category{1000, "Console", nil}
CategoryConsole_NDS = Category{1010, "Console/NDS", nil}
CategoryConsole_PSP = Category{1020, "Console/PSP", nil}
CategoryConsole_Wii = Category{1030, "Console/Wii", nil}
CategoryConsole_XBOX = Category{1040, "Console/Xbox", nil}
CategoryConsole_XBOX360 = Category{1050, "Console/Xbox360", nil}
CategoryConsole_WiiwareVC = Category{1060, "Console/Wiiware/V", nil}
CategoryConsole_XBOX360DLC = Category{1070, "Console/Xbox360", nil}
CategoryConsole_PS3 = Category{1080, "Console/PS3", nil}
CategoryConsole_Other = Category{1999, "Console/Other", nil}
CategoryConsole_3DS = Category{1110, "Console/3DS", nil}
CategoryConsole_PSVita = Category{1120, "Console/PS Vita", nil}
CategoryConsole_WiiU = Category{1130, "Console/WiiU", nil}
CategoryConsole_XBOXOne = Category{1140, "Console/XboxOne", nil}
CategoryConsole_PS4 = Category{1180, "Console/PS4", nil}
CategoryMovies = Category{2000, "Movies", nil}
CategoryMovies_Foreign = Category{2010, "Movies/Foreign", nil}
CategoryMovies_Other = Category{2020, "Movies/Other", nil}
CategoryMovies_SD = Category{2030, "Movies/SD", nil}
CategoryMovies_HD = Category{2040, "Movies/HD", nil}
CategoryMovies_3D = Category{2050, "Movies/3D", nil}
CategoryMovies_BluRay = Category{2060, "Movies/BluRay", nil}
CategoryMovies_DVD = Category{2070, "Movies/DVD", nil}
CategoryMovies_WEBDL = Category{2080, "Movies/WEBDL", nil}
CategoryAudio = Category{3000, "Audio", nil}
CategoryAudio_MP3 = Category{3010, "Audio/MP3", nil}
CategoryAudio_Video = Category{3020, "Audio/Video", nil}
CategoryAudio_Audiobook = Category{3030, "Audio/Audiobook", nil}
CategoryAudio_Lossless = Category{3040, "Audio/Lossless", nil}
CategoryAudio_Other = Category{3999, "Audio/Other", nil}
CategoryAudio_Foreign = Category{3060, "Audio/Foreign", nil}
CategoryPC = Category{4000, "PC", nil}
CategoryPC_0day = Category{4010, "PC/0day", nil}
CategoryPC_ISO = Category{4020, "PC/ISO", nil}
CategoryPC_Mac = Category{4030, "PC/Mac", nil}
CategoryPC_PhoneOther = Category{4040, "PC/Phone-Other", nil}
CategoryPC_Games = Category{4050, "PC/Games", nil}
CategoryPC_PhoneIOS = Category{4060, "PC/Phone-IOS", nil}
CategoryPC_PhoneAndroid = Category{4070, "PC/Phone-Android", nil}
CategoryTV = Category{5000, "TV", nil}
CategoryTV_WEBDL = Category{5010, "TV/WEB-DL", nil}
CategoryTV_FOREIGN = Category{5020, "TV/Foreign", nil}
CategoryTV_SD = Category{5030, "TV/SD", nil}
CategoryTV_HD = Category{5040, "TV/HD", nil}
CategoryTV_Other = Category{5999, "TV/Other", nil}
CategoryTV_Sport = Category{5060, "TV/Sport", nil}
CategoryTV_Anime = Category{5070, "TV/Anime", nil}
CategoryTV_Documentary = Category{5080, "TV/Documentary", nil}
CategoryXXX = Category{6000, "XXX", nil}
CategoryXXX_DVD = Category{6010, "XXX/DVD", nil}
CategoryXXX_WMV = Category{6020, "XXX/WMV", nil}
CategoryXXX_XviD = Category{6030, "XXX/XviD", nil}
CategoryXXX_x264 = Category{6040, "XXX/x264", nil}
CategoryXXX_Other = Category{6999, "XXX/Other", nil}
CategoryXXX_Imageset = Category{6060, "XXX/Imageset", nil}
CategoryXXX_Packs = Category{6070, "XXX/Packs", nil}
CategoryBooks = Category{7000, "Books", nil}
CategoryBooks_Magazines = Category{7010, "Books/Magazines", nil}
CategoryBooks_Ebook = Category{7020, "Books/Ebook", nil}
CategoryBooks_Comics = Category{7030, "Books/Comics", nil}
CategoryBooks_Technical = Category{7040, "Books/Technical", nil}
CategoryBooks_Foreign = Category{7060, "Books/Foreign", nil}
CategoryBooks_Unknown = Category{7999, "Books/Unknown", nil}
)
var AllCategories = Categories{
CategoryOther,
CategoryOther_Misc,
CategoryOther_Hashed,
CategoryConsole,
CategoryConsole_NDS,
CategoryConsole_PSP,
CategoryConsole_Wii,
CategoryConsole_XBOX,
CategoryConsole_XBOX360,
CategoryConsole_WiiwareVC,
CategoryConsole_XBOX360DLC,
CategoryConsole_PS3,
CategoryConsole_Other,
CategoryConsole_3DS,
CategoryConsole_PSVita,
CategoryConsole_WiiU,
CategoryConsole_XBOXOne,
CategoryConsole_PS4,
CategoryMovies,
CategoryMovies_Foreign,
CategoryMovies_Other,
CategoryMovies_SD,
CategoryMovies_HD,
CategoryMovies_3D,
CategoryMovies_BluRay,
CategoryMovies_DVD,
CategoryMovies_WEBDL,
CategoryAudio,
CategoryAudio_MP3,
CategoryAudio_Video,
CategoryAudio_Audiobook,
CategoryAudio_Lossless,
CategoryAudio_Other,
CategoryAudio_Foreign,
CategoryPC,
CategoryPC_0day,
CategoryPC_ISO,
CategoryPC_Mac,
CategoryPC_PhoneOther,
CategoryPC_Games,
CategoryPC_PhoneIOS,
CategoryPC_PhoneAndroid,
CategoryTV,
CategoryTV_WEBDL,
CategoryTV_FOREIGN,
CategoryTV_SD,
CategoryTV_HD,
CategoryTV_Other,
CategoryTV_Sport,
CategoryTV_Anime,
CategoryTV_Documentary,
CategoryXXX,
CategoryXXX_DVD,
CategoryXXX_WMV,
CategoryXXX_XviD,
CategoryXXX_x264,
CategoryXXX_Other,
CategoryXXX_Imageset,
CategoryXXX_Packs,
CategoryBooks,
CategoryBooks_Magazines,
CategoryBooks_Ebook,
CategoryBooks_Comics,
CategoryBooks_Technical,
CategoryBooks_Foreign,
CategoryBooks_Unknown,
}
func ParentCategory(c Category) Category {
switch {
case c.ID < 1000:
return CategoryOther
case c.ID < 2000:
return CategoryConsole
case c.ID < 3000:
return CategoryMovies
case c.ID < 4000:
return CategoryAudio
case c.ID < 5000:
return CategoryPC
case c.ID < 6000:
return CategoryTV
case c.ID < 7000:
return CategoryXXX
case c.ID < 8000:
return CategoryBooks
}
return CategoryOther
}
type Categories []Category
func (slice Categories) Subset(ids ...int) Categories {
cats := Categories{}
for _, cat := range AllCategories {
for _, id := range ids {
if cat.ID == id {
cats = append(cats, cat)
}
}
}
return cats
}
func (slice Categories) Len() int {
return len(slice)
}
func (slice Categories) Less(i, j int) bool {
return slice[i].ID < slice[j].ID
}
func (slice Categories) Swap(i, j int) {
slice[i], slice[j] = slice[j], slice[i]
}

144
pkg/newznab/feed.go Normal file
View file

@ -0,0 +1,144 @@
package newznab
import (
"encoding/xml"
"strconv"
"time"
"github.com/autobrr/autobrr/pkg/errors"
)
type Feed struct {
Channel Channel `xml:"channel"`
Raw string
}
func (f Feed) Len() int {
return len(f.Channel.Items)
}
type Channel struct {
Title string `xml:"title"`
Items []FeedItem `xml:"item"`
}
type Response struct {
Channel struct {
Items []FeedItem `xml:"item"`
} `xml:"channel"`
}
type FeedItem struct {
Title string `xml:"title,omitempty"`
GUID string `xml:"guid,omitempty"`
PubDate Time `xml:"pub_date,omitempty"`
Prowlarrindexer struct {
Text string `xml:",chardata"`
ID string `xml:"id,attr"`
} `xml:"prowlarrindexer,omitempty"`
Comments string `xml:"comments"`
Size string `xml:"size"`
Link string `xml:"link"`
Enclosure *Enclosure `xml:"enclosure,omitempty"`
Category []string `xml:"category,omitempty"`
Categories Categories
// attributes
TvdbId string `xml:"tvdb,omitempty"`
//TvMazeId string
ImdbId string `xml:"imdb,omitempty"`
TmdbId string `xml:"tmdb,omitempty"`
Attributes []ItemAttr `xml:"attr"`
}
type ItemAttr struct {
Name string `xml:"name,attr"`
Value string `xml:"value,attr"`
}
type Enclosure struct {
Url string `xml:"url,attr"`
Length string `xml:"length,attr"`
Type string `xml:"type,attr"`
}
func (f FeedItem) MapCategoriesFromAttr() {
for _, attr := range f.Attributes {
if attr.Name == "category" {
catId, err := strconv.Atoi(attr.Value)
if err != nil {
continue
}
if catId > 0 && catId < 10000 {
f.Categories = append(f.Categories, ParentCategory(Category{ID: catId}))
}
} else if attr.Name == "size" {
if f.Size == "" && attr.Value != "" {
f.Size = attr.Value
}
}
}
}
func (f FeedItem) MapCustomCategoriesFromAttr(categories []Category) {
for _, attr := range f.Attributes {
if attr.Name == "category" {
catId, err := strconv.Atoi(attr.Value)
if err != nil {
continue
}
if catId > 0 && catId < 10000 {
f.Categories = append(f.Categories, ParentCategory(Category{ID: catId}))
} else if catId > 10000 {
// categories 10000+ are custom indexer specific
for _, capCat := range categories {
if capCat.ID == catId {
f.Categories = append(f.Categories, Category{
ID: capCat.ID,
Name: capCat.Name,
})
break
}
}
}
}
}
}
// Time credits: https://github.com/mrobinsn/go-newznab/blob/cd89d9c56447859fa1298dc9a0053c92c45ac7ef/newznab/structs.go#L150
type Time struct {
time.Time
}
func (t *Time) MarshalXML(e *xml.Encoder, start xml.StartElement) error {
if err := e.EncodeToken(start); err != nil {
return errors.Wrap(err, "failed to encode xml token")
}
if err := e.EncodeToken(xml.CharData([]byte(t.UTC().Format(time.RFC1123Z)))); err != nil {
return errors.Wrap(err, "failed to encode xml token")
}
if err := e.EncodeToken(xml.EndElement{Name: start.Name}); err != nil {
return errors.Wrap(err, "failed to encode xml token")
}
return nil
}
func (t *Time) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error {
var raw string
err := d.DecodeElement(&raw, &start)
if err != nil {
return errors.Wrap(err, "could not decode element")
}
date, err := time.Parse(time.RFC1123Z, raw)
if err != nil {
return errors.Wrap(err, "could not parse date")
}
*t = Time{date}
return nil
}

355
pkg/newznab/newznab.go Normal file
View file

@ -0,0 +1,355 @@
package newznab
import (
"bytes"
"context"
"encoding/xml"
"io"
"log"
"net/http"
"net/http/httputil"
"net/url"
"strings"
"time"
"github.com/autobrr/autobrr/pkg/errors"
)
const DefaultTimeout = 60
type Client interface {
GetFeed(ctx context.Context) (*Feed, error)
GetCaps(ctx context.Context) (*Caps, error)
Caps() *Caps
}
type client struct {
http *http.Client
Host string
ApiKey string
UseBasicAuth bool
BasicAuth BasicAuth
Capabilities *Caps
Log *log.Logger
}
type BasicAuth struct {
Username string
Password string
}
type Config struct {
Host string
ApiKey string
Timeout time.Duration
UseBasicAuth bool
BasicAuth BasicAuth
Log *log.Logger
}
type Capabilities struct {
Search Searching
Categories Categories
}
func NewClient(config Config) Client {
httpClient := &http.Client{
Timeout: time.Second * DefaultTimeout,
}
if config.Timeout > 0 {
httpClient.Timeout = time.Second * config.Timeout
}
c := &client{
http: httpClient,
Host: config.Host,
ApiKey: config.ApiKey,
Log: log.New(io.Discard, "", log.LstdFlags),
}
if config.Log != nil {
c.Log = config.Log
}
return c
}
func (c *client) get(ctx context.Context, endpoint string, queryParams map[string]string) (int, *Feed, error) {
params := url.Values{}
params.Set("t", "search")
for k, v := range queryParams {
params.Add(k, v)
}
if c.ApiKey != "" {
params.Add("apikey", c.ApiKey)
}
u, err := url.Parse(c.Host)
u.Path = strings.TrimSuffix(u.Path, "/")
u.RawQuery = params.Encode()
reqUrl := u.String()
req, err := http.NewRequestWithContext(ctx, http.MethodGet, reqUrl, nil)
if err != nil {
return 0, nil, errors.Wrap(err, "could not build request")
}
if c.UseBasicAuth {
req.SetBasicAuth(c.BasicAuth.Username, c.BasicAuth.Password)
}
resp, err := c.http.Do(req)
if err != nil {
return 0, nil, errors.Wrap(err, "could not make request. %+v", req)
}
defer resp.Body.Close()
dump, err := httputil.DumpResponse(resp, true)
if err != nil {
return 0, nil, errors.Wrap(err, "could not dump response")
}
c.Log.Printf("newznab get feed response dump: %q", dump)
var buf bytes.Buffer
if _, err = io.Copy(&buf, resp.Body); err != nil {
return resp.StatusCode, nil, errors.Wrap(err, "newznab.io.Copy")
}
var response Feed
if err := xml.Unmarshal(buf.Bytes(), &response); err != nil {
return resp.StatusCode, nil, errors.Wrap(err, "newznab: could not decode feed")
}
response.Raw = buf.String()
return resp.StatusCode, &response, nil
}
func (c *client) getData(ctx context.Context, endpoint string, queryParams map[string]string) (*http.Response, error) {
u, err := url.Parse(c.Host)
if err != nil {
return nil, errors.Wrap(err, "could not build request")
}
u.Path = strings.TrimSuffix(u.Path, "/")
qp, err := url.ParseQuery(u.RawQuery)
if err != nil {
return nil, errors.Wrap(err, "could not build request")
}
if c.ApiKey != "" {
qp.Add("apikey", c.ApiKey)
}
for k, v := range queryParams {
if qp.Has("t") {
continue
}
qp.Add(k, v)
}
u.RawQuery = qp.Encode()
reqUrl := u.String()
req, err := http.NewRequestWithContext(ctx, http.MethodGet, reqUrl, nil)
if err != nil {
return nil, errors.Wrap(err, "could not build request")
}
if c.UseBasicAuth {
req.SetBasicAuth(c.BasicAuth.Username, c.BasicAuth.Password)
}
resp, err := c.http.Do(req)
if err != nil {
return nil, errors.Wrap(err, "could not make request. %+v", req)
}
return resp, nil
}
func (c *client) GetFeed(ctx context.Context) (*Feed, error) {
p := map[string]string{"t": "search"}
resp, err := c.getData(ctx, "", p)
if err != nil {
return nil, errors.Wrap(err, "could not get feed")
}
defer resp.Body.Close()
dump, err := httputil.DumpResponse(resp, true)
if err != nil {
return nil, errors.Wrap(err, "could not dump response")
}
c.Log.Printf("newznab get feed response dump: %q", dump)
if resp.StatusCode != http.StatusOK {
return nil, errors.New("could not get feed")
}
var buf bytes.Buffer
if _, err = io.Copy(&buf, resp.Body); err != nil {
return nil, errors.Wrap(err, "newznab.io.Copy")
}
var response Feed
if err := xml.Unmarshal(buf.Bytes(), &response); err != nil {
return nil, errors.Wrap(err, "newznab: could not decode feed")
}
response.Raw = buf.String()
if c.Capabilities != nil {
for _, item := range response.Channel.Items {
item.MapCustomCategoriesFromAttr(c.Capabilities.Categories.Categories)
}
} else {
for _, item := range response.Channel.Items {
item.MapCategoriesFromAttr()
}
}
return &response, nil
}
func (c *client) GetFeedAndCaps(ctx context.Context) (*Feed, error) {
if c.Capabilities == nil {
status, caps, err := c.getCaps(ctx, "?t=caps", nil)
if err != nil {
return nil, errors.Wrap(err, "could not get caps for feed")
}
if status != http.StatusOK {
return nil, errors.Wrap(err, "could not get caps for feed")
}
c.Capabilities = caps
}
p := map[string]string{"t": "search"}
status, res, err := c.get(ctx, "", p)
if err != nil {
return nil, errors.Wrap(err, "could not get feed")
}
if status != http.StatusOK {
return nil, errors.New("could not get feed")
}
for _, item := range res.Channel.Items {
item.MapCustomCategoriesFromAttr(c.Capabilities.Categories.Categories)
}
return res, nil
}
func (c *client) getCaps(ctx context.Context, endpoint string, opts map[string]string) (int, *Caps, error) {
params := url.Values{
"t": {"caps"},
}
if c.ApiKey != "" {
params.Add("apikey", c.ApiKey)
}
u, err := url.Parse(c.Host)
u.Path = strings.TrimSuffix(u.Path, "/")
u.RawQuery = params.Encode()
reqUrl := u.String()
req, err := http.NewRequestWithContext(ctx, http.MethodGet, reqUrl, nil)
if err != nil {
return 0, nil, errors.Wrap(err, "could not build request")
}
if c.UseBasicAuth {
req.SetBasicAuth(c.BasicAuth.Username, c.BasicAuth.Password)
}
// Jackett only supports api key via url param while Prowlarr does that and via header
//if c.ApiKey != "" {
// req.Header.Add("X-API-Key", c.ApiKey)
//}
resp, err := c.http.Do(req)
if err != nil {
return 0, nil, errors.Wrap(err, "could not make request. %+v", req)
}
defer resp.Body.Close()
dump, err := httputil.DumpResponse(resp, true)
if err != nil {
return 0, nil, errors.Wrap(err, "could not dump response")
}
c.Log.Printf("newznab get caps response dump: %q", dump)
if resp.StatusCode == http.StatusUnauthorized {
return resp.StatusCode, nil, errors.New("unauthorized")
} else if resp.StatusCode != http.StatusOK {
return resp.StatusCode, nil, errors.New("bad status: %d", resp.StatusCode)
}
var buf bytes.Buffer
if _, err = io.Copy(&buf, resp.Body); err != nil {
return resp.StatusCode, nil, errors.Wrap(err, "newznab.io.Copy")
}
var response Caps
if err := xml.Unmarshal(buf.Bytes(), &response); err != nil {
return resp.StatusCode, nil, errors.Wrap(err, "newznab: could not decode feed")
}
return resp.StatusCode, &response, nil
}
func (c *client) GetCaps(ctx context.Context) (*Caps, error) {
status, res, err := c.getCaps(ctx, "?t=caps", nil)
if err != nil {
return nil, errors.Wrap(err, "could not get caps for feed")
}
if status != http.StatusOK {
return nil, errors.Wrap(err, "could not get caps for feed")
}
return res, nil
}
func (c *client) Caps() *Caps {
return c.Capabilities
}
//func (c *client) Search(ctx context.Context, query string) ([]FeedItem, error) {
// v := url.Values{}
// v.Add("q", query)
// params := v.Encode()
//
// status, res, err := c.get(ctx, "&t=search&"+params, nil)
// if err != nil {
// return nil, errors.Wrap(err, "could not search feed")
// }
//
// if status != http.StatusOK {
// return nil, errors.New("could not search feed")
// }
//
// return res.Channel.Items, nil
//}

173
pkg/sabnzbd/sabnzbd.go Normal file
View file

@ -0,0 +1,173 @@
package sabnzbd
import (
"context"
"encoding/json"
"fmt"
"io"
"log"
"net/http"
"net/url"
"time"
)
type Client struct {
addr string
apiKey string
basicUser string
basicPass string
log *log.Logger
Http *http.Client
}
type Options struct {
Addr string
ApiKey string
BasicUser string
BasicPass string
Log *log.Logger
}
func New(opts Options) *Client {
c := &Client{
addr: opts.Addr,
apiKey: opts.ApiKey,
basicUser: opts.BasicUser,
basicPass: opts.BasicPass,
log: log.New(io.Discard, "", log.LstdFlags),
Http: &http.Client{
Timeout: time.Second * 60,
},
}
if opts.Log != nil {
c.log = opts.Log
}
return c
}
func (c *Client) AddFromUrl(ctx context.Context, r AddNzbRequest) (*AddFileResponse, error) {
v := url.Values{}
v.Set("mode", "addurl")
v.Set("name", r.Url)
v.Set("output", "json")
v.Set("apikey", c.apiKey)
v.Set("cat", "*")
if r.Category != "" {
v.Set("cat", r.Category)
}
addr, err := url.JoinPath(c.addr, "/api")
if err != nil {
return nil, err
}
u, err := url.Parse(addr)
if err != nil {
return nil, err
}
u.RawQuery = v.Encode()
req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil)
if err != nil {
return nil, err
}
if c.basicUser != "" && c.basicPass != "" {
req.SetBasicAuth(c.basicUser, c.basicPass)
}
res, err := c.Http.Do(req)
if err != nil {
return nil, err
}
defer res.Body.Close()
body, err := io.ReadAll(res.Body)
if err != nil {
return nil, err
}
fmt.Print(body)
var data AddFileResponse
if err := json.Unmarshal(body, &data); err != nil {
return nil, err
}
return &data, nil
}
func (c *Client) Version(ctx context.Context) (*VersionResponse, error) {
v := url.Values{}
v.Set("mode", "version")
v.Set("output", "json")
v.Set("apikey", c.apiKey)
addr, err := url.JoinPath(c.addr, "/api")
if err != nil {
return nil, err
}
u, err := url.Parse(addr)
if err != nil {
return nil, err
}
u.RawQuery = v.Encode()
req, err := http.NewRequestWithContext(ctx, http.MethodGet, u.String(), nil)
if err != nil {
return nil, err
}
if c.basicUser != "" && c.basicPass != "" {
req.SetBasicAuth(c.basicUser, c.basicPass)
}
res, err := c.Http.Do(req)
if err != nil {
return nil, err
}
defer res.Body.Close()
body, err := io.ReadAll(res.Body)
if err != nil {
return nil, err
}
var data VersionResponse
if err := json.Unmarshal(body, &data); err != nil {
return nil, err
}
return &data, nil
}
type VersionResponse struct {
Version string `json:"version"`
}
type AddFileResponse struct {
NzoIDs []string `json:"nzo_ids"`
ApiError
}
type ApiError struct {
ErrorMsg string `json:"error,omitempty"`
}
type AddNzbRequest struct {
Url string
Category string
}

View file

@ -6,6 +6,7 @@ export interface radioFieldsetOption {
label: string; label: string;
description: string; description: string;
value: string; value: string;
type?: string;
} }
interface props { interface props {
@ -75,7 +76,7 @@ function RadioFieldsetWide({ name, legend, options }: props) {
)} )}
aria-hidden="true" aria-hidden="true"
/> />
<div className="ml-3 flex flex-col"> <div className="ml-3 flex flex-col w-full">
<RadioGroup.Label <RadioGroup.Label
as="span" as="span"
className={classNames( className={classNames(
@ -83,7 +84,10 @@ function RadioFieldsetWide({ name, legend, options }: props) {
checked ? "font-bold" : "font-medium" checked ? "font-bold" : "font-medium"
)} )}
> >
{setting.label} <div className="flex justify-between">
{setting.label}
{setting.type && <span className="rounded bg-orange-500 text-orange-900 px-1 ml-2 text-sm">{setting.type}</span>}
</div>
</RadioGroup.Label> </RadioGroup.Label>
<RadioGroup.Description <RadioGroup.Description
as="span" as="span"

View file

@ -217,6 +217,7 @@ export interface RadioFieldsetOption {
label: string; label: string;
description: string; description: string;
value: ActionType; value: ActionType;
type?: string;
} }
export const DownloadClientTypeOptions: RadioFieldsetOption[] = [ export const DownloadClientTypeOptions: RadioFieldsetOption[] = [
@ -274,6 +275,12 @@ export const DownloadClientTypeOptions: RadioFieldsetOption[] = [
label: "Readarr", label: "Readarr",
description: "Send to Readarr and let it decide", description: "Send to Readarr and let it decide",
value: "READARR" value: "READARR"
},
{
label: "Sabnzbd",
description: "Add nzbs directly to Sabnzbd",
value: "SABNZBD",
type: "nzb"
} }
]; ];
@ -288,7 +295,8 @@ export const DownloadClientTypeNameMap: Record<DownloadClientType | string, stri
"SONARR": "Sonarr", "SONARR": "Sonarr",
"LIDARR": "Lidarr", "LIDARR": "Lidarr",
"WHISPARR": "Whisparr", "WHISPARR": "Whisparr",
"READARR": "Readarr" "READARR": "Readarr",
"SABNZBD": "Sabnzbd"
}; };
export const ActionTypeOptions: RadioFieldsetOption[] = [ export const ActionTypeOptions: RadioFieldsetOption[] = [
@ -306,7 +314,8 @@ export const ActionTypeOptions: RadioFieldsetOption[] = [
{ label: "Sonarr", description: "Send to Sonarr and let it decide", value: "SONARR" }, { label: "Sonarr", description: "Send to Sonarr and let it decide", value: "SONARR" },
{ label: "Lidarr", description: "Send to Lidarr and let it decide", value: "LIDARR" }, { label: "Lidarr", description: "Send to Lidarr and let it decide", value: "LIDARR" },
{ label: "Whisparr", description: "Send to Whisparr and let it decide", value: "WHISPARR" }, { label: "Whisparr", description: "Send to Whisparr and let it decide", value: "WHISPARR" },
{ label: "Readarr", description: "Send to Readarr and let it decide", value: "READARR" } { label: "Readarr", description: "Send to Readarr and let it decide", value: "READARR" },
{ label: "Sabnzbd", description: "Add to Sabnzbd", value: "SABNZBD" }
]; ];
export const ActionTypeNameMap = { export const ActionTypeNameMap = {
@ -324,7 +333,8 @@ export const ActionTypeNameMap = {
"SONARR": "Sonarr", "SONARR": "Sonarr",
"LIDARR": "Lidarr", "LIDARR": "Lidarr",
"WHISPARR": "Whisparr", "WHISPARR": "Whisparr",
"READARR": "Readarr" "READARR": "Readarr",
"SABNZBD": "Sabnzbd"
}; };
export const ActionContentLayoutOptions: SelectGenericOption<ActionContentLayout>[] = [ export const ActionContentLayoutOptions: SelectGenericOption<ActionContentLayout>[] = [

View file

@ -240,22 +240,71 @@ function FormFieldsTransmission() {
); );
} }
function FormFieldsSabnzbd() {
const {
values: { port, tls, settings }
} = useFormikContext<InitialValues>();
return (
<div className="flex flex-col space-y-4 px-1 py-6 sm:py-0 sm:space-y-0">
<TextFieldWide
name="host"
label="Host"
help="Eg. ip:port"
// tooltip={<div><p>See guides for how to connect to qBittorrent for various server types in our docs.</p><br /><p>Dedicated servers:</p><a href='https://autobrr.com/configuration/download-clients/dedicated#qbittorrent' className='text-blue-400 visited:text-blue-400' target='_blank'>https://autobrr.com/configuration/download-clients/dedicated#qbittorrent</a><p>Shared seedbox providers:</p><a href='https://autobrr.com/configuration/download-clients/shared-seedboxes#qbittorrent' className='text-blue-400 visited:text-blue-400' target='_blank'>https://autobrr.com/configuration/download-clients/shared-seedboxes#qbittorrent</a></div>}
/>
{port > 0 && (
<NumberFieldWide
name="port"
label="Port"
help="port for Sabnzbd"
/>
)}
<SwitchGroupWide name="tls" label="TLS" />
{tls && (
<SwitchGroupWide
name="tls_skip_verify"
label="Skip TLS verification (insecure)"
/>
)}
{/*<TextFieldWide name="username" label="Username" />*/}
{/*<PasswordFieldWide name="password" label="Password" />*/}
<PasswordFieldWide name="settings.apikey" label="API key" />
<SwitchGroupWide name="settings.basic.auth" label="Basic auth" />
{settings.basic?.auth === true && (
<>
<TextFieldWide name="settings.basic.username" label="Username" />
<PasswordFieldWide name="settings.basic.password" label="Password" />
</>
)}
</div>
);
}
export interface componentMapType { export interface componentMapType {
[key: string]: React.ReactElement; [key: string]: React.ReactElement;
} }
export const componentMap: componentMapType = { export const componentMap: componentMapType = {
DELUGE_V1: <FormFieldsDeluge/>, DELUGE_V1: <FormFieldsDeluge />,
DELUGE_V2: <FormFieldsDeluge/>, DELUGE_V2: <FormFieldsDeluge />,
QBITTORRENT: <FormFieldsQbit/>, QBITTORRENT: <FormFieldsQbit />,
RTORRENT: <FormFieldsRTorrent />, RTORRENT: <FormFieldsRTorrent />,
TRANSMISSION: <FormFieldsTransmission/>, TRANSMISSION: <FormFieldsTransmission />,
PORLA: <FormFieldsPorla />, PORLA: <FormFieldsPorla />,
RADARR: <FormFieldsArr/>, RADARR: <FormFieldsArr />,
SONARR: <FormFieldsArr/>, SONARR: <FormFieldsArr />,
LIDARR: <FormFieldsArr/>, LIDARR: <FormFieldsArr />,
WHISPARR: <FormFieldsArr/>, WHISPARR: <FormFieldsArr />,
READARR: <FormFieldsArr/> READARR: <FormFieldsArr />,
SABNZBD: <FormFieldsSabnzbd />
}; };
function FormFieldsRulesBasic() { function FormFieldsRulesBasic() {

View file

@ -203,6 +203,30 @@ function FormFieldsTorznab() {
); );
} }
function FormFieldsNewznab() {
const {
values: { interval }
} = useFormikContext<InitialValues>();
return (
<div className="border-t border-gray-200 dark:border-gray-700 py-5">
<TextFieldWide
name="url"
label="URL"
help="Newznab url"
/>
<PasswordFieldWide name="api_key" label="API key" />
{interval < 15 && <WarningLabel />}
<NumberFieldWide name="interval" label="Refresh interval" help="Minutes. Recommended 15-30. Too low and risk ban."/>
<NumberFieldWide name="timeout" label="Refresh timeout" help="Seconds to wait before cancelling refresh."/>
<NumberFieldWide name="max_age" label="Max age" help="Seconds. Will not grab older than this value."/>
</div>
);
}
function FormFieldsRSS() { function FormFieldsRSS() {
const { const {
values: { interval } values: { interval }
@ -230,5 +254,6 @@ function FormFieldsRSS() {
const componentMap: componentMapType = { const componentMap: componentMapType = {
TORZNAB: <FormFieldsTorznab />, TORZNAB: <FormFieldsTorznab />,
NEWZNAB: <FormFieldsNewznab />,
RSS: <FormFieldsRSS /> RSS: <FormFieldsRSS />
}; };

View file

@ -100,7 +100,7 @@ const IrcSettingFields = (ind: IndexerDefinition, indexer: string) => {
} }
}; };
const FeedSettingFields = (ind: IndexerDefinition, indexer: string) => { const TorznabFeedSettingFields = (ind: IndexerDefinition, indexer: string) => {
if (indexer !== "") { if (indexer !== "") {
return ( return (
<Fragment> <Fragment>
@ -139,6 +139,37 @@ const FeedSettingFields = (ind: IndexerDefinition, indexer: string) => {
} }
}; };
const NewznabFeedSettingFields = (ind: IndexerDefinition, indexer: string) => {
if (indexer !== "") {
return (
<Fragment>
{ind && ind.newznab && ind.newznab.settings && (
<div className="">
<div className="px-4 space-y-1">
<Dialog.Title className="text-lg font-medium text-gray-900 dark:text-white">Newznab</Dialog.Title>
<p className="text-sm text-gray-500 dark:text-gray-200">
Newznab feed
</p>
</div>
<TextFieldWide name="name" label="Name" defaultValue="" />
{ind.newznab.settings.map((f: IndexerSetting, idx: number) => {
switch (f.type) {
case "text":
return <TextFieldWide name={`feed.${f.name}`} label={f.label} required={f.required} key={idx} help={f.help} validate={validateField(f)} />;
case "secret":
return <PasswordFieldWide name={`feed.${f.name}`} label={f.label} required={f.required} key={idx} help={f.help} defaultValue={f.default} validate={validateField(f)} />;
}
return null;
})}
</div>
)}
</Fragment>
);
}
};
const RSSFeedSettingFields = (ind: IndexerDefinition, indexer: string) => { const RSSFeedSettingFields = (ind: IndexerDefinition, indexer: string) => {
if (indexer !== "") { if (indexer !== "") {
return ( return (
@ -274,6 +305,31 @@ export function IndexerAddForm({ isOpen, toggle }: AddProps) {
}); });
return; return;
} else if (formData.implementation === "newznab") {
formData.url = formData.feed.url;
const createFeed: FeedCreate = {
name: formData.name,
enabled: false,
type: "NEWZNAB",
url: formData.feed.newznab_url,
api_key: formData.feed.api_key,
interval: 30,
timeout: 60,
indexer_id: 0,
settings: formData.feed.settings
};
mutation.mutate(formData as Indexer, {
onSuccess: (indexer) => {
// @eslint-ignore
createFeed.indexer_id = indexer.id;
feedMutation.mutate(createFeed);
}
});
return;
} else if (formData.implementation === "rss") { } else if (formData.implementation === "rss") {
const createFeed: FeedCreate = { const createFeed: FeedCreate = {
name: formData.name, name: formData.name,
@ -482,7 +538,8 @@ export function IndexerAddForm({ isOpen, toggle }: AddProps) {
</div> </div>
{IrcSettingFields(indexer, values.identifier)} {IrcSettingFields(indexer, values.identifier)}
{FeedSettingFields(indexer, values.identifier)} {TorznabFeedSettingFields(indexer, values.identifier)}
{NewznabFeedSettingFields(indexer, values.identifier)}
{RSSFeedSettingFields(indexer, values.identifier)} {RSSFeedSettingFields(indexer, values.identifier)}
</div> </div>

View file

@ -449,6 +449,26 @@ const TypeForm = ({ action, idx, clients }: TypeFormProps) => {
</div> </div>
); );
case "SABNZBD":
return (
<div>
<div className="mt-6 grid grid-cols-12 gap-6">
<DownloadClientSelect
name={`actions.${idx}.client_id`}
action={action}
clients={clients}
/>
<TextField
name={`actions.${idx}.category`}
label="Category"
columns={6}
placeholder="eg. category"
tooltip={<CustomTooltip anchorId={`actions.${idx}.category`} clickable={true}><p>Category must exist already.</p></CustomTooltip>} />
</div>
</div>
);
default: default:
return null; return null;
} }

View file

@ -19,6 +19,12 @@ const ImplementationBadgeTorznab = () => (
</span> </span>
); );
const ImplementationBadgeNewznab = () => (
<span className="inline-flex items-center px-2.5 py-0.5 rounded-md text-sm font-medium bg-blue-200 dark:bg-blue-400 text-blue-800 dark:text-blue-800">
Newznab
</span>
);
const ImplementationBadgeRSS = () => ( const ImplementationBadgeRSS = () => (
<span className="inline-flex items-center px-2.5 py-0.5 rounded-md text-sm font-medium bg-amber-200 dark:bg-amber-400 text-amber-800 dark:text-amber-800"> <span className="inline-flex items-center px-2.5 py-0.5 rounded-md text-sm font-medium bg-amber-200 dark:bg-amber-400 text-amber-800 dark:text-amber-800">
RSS RSS
@ -28,6 +34,7 @@ const ImplementationBadgeRSS = () => (
export const ImplementationBadges: componentMapType = { export const ImplementationBadges: componentMapType = {
irc: <ImplementationBadgeIRC />, irc: <ImplementationBadgeIRC />,
torznab: <ImplementationBadgeTorznab />, torznab: <ImplementationBadgeTorznab />,
newznab: <ImplementationBadgeNewznab />,
rss: <ImplementationBadgeRSS /> rss: <ImplementationBadgeRSS />
}; };

View file

@ -1,15 +1,16 @@
type DownloadClientType = type DownloadClientType =
"QBITTORRENT" | "QBITTORRENT" |
"DELUGE_V1" | "DELUGE_V1" |
"DELUGE_V2" | "DELUGE_V2" |
"RTORRENT" | "RTORRENT" |
"TRANSMISSION" | "TRANSMISSION" |
"PORLA" | "PORLA" |
"RADARR" | "RADARR" |
"SONARR" | "SONARR" |
"LIDARR" | "LIDARR" |
"WHISPARR" | "WHISPARR" |
"READARR"; "READARR" |
"SABNZBD";
// export enum DownloadClientTypeEnum { // export enum DownloadClientTypeEnum {
// QBITTORRENT = "QBITTORRENT", // QBITTORRENT = "QBITTORRENT",

View file

@ -24,7 +24,7 @@ interface FeedSettings {
type FeedDownloadType = "MAGNET" | "TORRENT"; type FeedDownloadType = "MAGNET" | "TORRENT";
type FeedType = "TORZNAB" | "RSS"; type FeedType = "TORZNAB" | "NEWZNAB" | "RSS";
interface FeedCreate { interface FeedCreate {
name: string; name: string;

View file

@ -24,6 +24,7 @@ interface IndexerDefinition {
settings: IndexerSetting[]; settings: IndexerSetting[];
irc: IndexerIRC; irc: IndexerIRC;
torznab: IndexerTorznab; torznab: IndexerTorznab;
newznab?: IndexerTorznab;
rss: IndexerFeed; rss: IndexerFeed;
parse: IndexerParse; parse: IndexerParse;
} }