feat: add support for proxies to use with IRC and Indexers (#1421)

* feat: add support for proxies

* fix(http): release handler

* fix(migrations): define proxy early

* fix(migrations): pg proxy

* fix(proxy): list update delete

* fix(proxy): remove log and imports

* feat(irc): use proxy

* feat(irc): tests

* fix(web): update imports for ProxyForms.tsx

* fix(database): migration

* feat(proxy): test

* feat(proxy): validate proxy type

* feat(proxy): validate and test

* feat(proxy): improve validate and test

* feat(proxy): fix db schema

* feat(proxy): add db tests

* feat(proxy): handle http errors

* fix(http): imports

* feat(proxy): use proxy for indexer downloads

* feat(proxy): indexerforms select proxy

* feat(proxy): handle torrent download

* feat(proxy): skip if disabled

* feat(proxy): imports

* feat(proxy): implement in Feeds

* feat(proxy): update helper text indexer proxy

* feat(proxy): add internal cache
This commit is contained in:
ze0s 2024-09-02 11:10:45 +02:00 committed by GitHub
parent 472d327308
commit bc0f4cc055
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
59 changed files with 2533 additions and 371 deletions

View file

@ -42,10 +42,23 @@ func NewFeedParser(timeout time.Duration, cookie string) *RSSParser {
}
c.http.Timeout = timeout
c.parser.Client = httpClient
return c
}
func (c *RSSParser) WithHTTPClient(client *http.Client) {
httpClient := client
if client.Jar == nil {
jarOptions := &cookiejar.Options{PublicSuffixList: publicsuffix.List}
jar, _ := cookiejar.New(jarOptions)
httpClient.Jar = jar
}
c.http = httpClient
c.parser.Client = httpClient
}
func (c *RSSParser) ParseURLWithContext(ctx context.Context, feedURL string) (feed *gofeed.Feed, err error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, feedURL, nil)
if err != nil {

View file

@ -10,6 +10,7 @@ import (
"time"
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/internal/proxy"
"github.com/autobrr/autobrr/internal/release"
"github.com/autobrr/autobrr/internal/scheduler"
"github.com/autobrr/autobrr/pkg/errors"
@ -129,6 +130,18 @@ func (j *NewznabJob) process(ctx context.Context) error {
}
func (j *NewznabJob) getFeed(ctx context.Context) ([]newznab.FeedItem, error) {
// add proxy if enabled and exists
if j.Feed.UseProxy && j.Feed.Proxy != nil {
proxyClient, err := proxy.GetProxiedHTTPClient(j.Feed.Proxy)
if err != nil {
return nil, errors.Wrap(err, "could not get proxy client")
}
j.Client.WithHTTPClient(proxyClient)
j.Log.Debug().Msgf("using proxy %s for feed %s", j.Feed.Proxy.Name, j.Feed.Name)
}
// get feed
feed, err := j.Client.GetFeed(ctx)
if err != nil {
@ -156,36 +169,34 @@ func (j *NewznabJob) getFeed(ctx context.Context) ([]newznab.FeedItem, error) {
// set ttl to 1 month
ttl := time.Now().AddDate(0, 1, 0)
for _, i := range feed.Channel.Items {
i := i
if i.GUID == "" {
for _, item := range feed.Channel.Items {
if item.GUID == "" {
j.Log.Error().Msgf("missing GUID from feed: %s", j.Feed.Name)
continue
}
exists, err := j.CacheRepo.Exists(j.Feed.ID, i.GUID)
exists, err := j.CacheRepo.Exists(j.Feed.ID, item.GUID)
if err != nil {
j.Log.Error().Err(err).Msg("could not check if item exists")
continue
}
if exists {
j.Log.Trace().Msgf("cache item exists, skipping release: %s", i.Title)
j.Log.Trace().Msgf("cache item exists, skipping release: %s", item.Title)
continue
}
j.Log.Debug().Msgf("found new release: %s", i.Title)
j.Log.Debug().Msgf("found new release: %s", item.Title)
toCache = append(toCache, domain.FeedCacheItem{
FeedId: strconv.Itoa(j.Feed.ID),
Key: i.GUID,
Value: []byte(i.Title),
Key: item.GUID,
Value: []byte(item.Title),
TTL: ttl,
})
// only append if we successfully added to cache
items = append(items, *i)
items = append(items, *item)
}
if len(toCache) > 0 {

View file

@ -13,6 +13,7 @@ import (
"time"
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/internal/proxy"
"github.com/autobrr/autobrr/internal/release"
"github.com/autobrr/autobrr/pkg/errors"
@ -93,7 +94,6 @@ func (j *RSSJob) process(ctx context.Context) error {
releases := make([]*domain.Release, 0)
for _, item := range items {
item := item
j.Log.Debug().Msgf("item: %v", item.Title)
rls := j.processItem(item)
@ -139,7 +139,7 @@ func (j *RSSJob) processItem(item *gofeed.Item) *domain.Release {
}
if j.Feed.Settings != nil && j.Feed.Settings.DownloadType == domain.FeedDownloadTypeMagnet {
if !strings.HasPrefix(rls.MagnetURI, "magnet:?") && strings.HasPrefix(e.URL, "magnet:?") {
if !strings.HasPrefix(rls.MagnetURI, domain.MagnetURIPrefix) && strings.HasPrefix(e.URL, domain.MagnetURIPrefix) {
rls.MagnetURI = e.URL
rls.DownloadURL = ""
}
@ -232,7 +232,20 @@ func (j *RSSJob) getFeed(ctx context.Context) (items []*gofeed.Item, err error)
ctx, cancel := context.WithTimeout(ctx, j.Timeout)
defer cancel()
feed, err := NewFeedParser(j.Timeout, j.Feed.Cookie).ParseURLWithContext(ctx, j.URL)
feedParser := NewFeedParser(j.Timeout, j.Feed.Cookie)
if j.Feed.UseProxy && j.Feed.Proxy != nil {
proxyClient, err := proxy.GetProxiedHTTPClient(j.Feed.Proxy)
if err != nil {
return nil, errors.Wrap(err, "could not get proxy client")
}
feedParser.WithHTTPClient(proxyClient)
j.Log.Debug().Msgf("using proxy %s for feed %s", j.Feed.Proxy.Name, j.Feed.Name)
}
feed, err := feedParser.ParseURLWithContext(ctx, j.URL)
if err != nil {
return nil, errors.Wrap(err, "error fetching rss feed items")
}
@ -257,9 +270,7 @@ func (j *RSSJob) getFeed(ctx context.Context) (items []*gofeed.Item, err error)
// set ttl to 1 month
ttl := time.Now().AddDate(0, 1, 0)
for _, i := range feed.Items {
item := i
for _, item := range feed.Items {
key := item.GUID
if len(key) == 0 {
key = item.Link
@ -278,12 +289,12 @@ func (j *RSSJob) getFeed(ctx context.Context) (items []*gofeed.Item, err error)
continue
}
j.Log.Debug().Msgf("found new release: %s", i.Title)
j.Log.Debug().Msgf("found new release: %s", item.Title)
toCache = append(toCache, domain.FeedCacheItem{
FeedId: strconv.Itoa(j.Feed.ID),
Key: key,
Value: []byte(i.Title),
Value: []byte(item.Title),
TTL: ttl,
})

View file

@ -11,6 +11,7 @@ import (
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/internal/logger"
"github.com/autobrr/autobrr/internal/proxy"
"github.com/autobrr/autobrr/internal/release"
"github.com/autobrr/autobrr/internal/scheduler"
"github.com/autobrr/autobrr/pkg/errors"
@ -68,16 +69,18 @@ type service struct {
repo domain.FeedRepo
cacheRepo domain.FeedCacheRepo
releaseSvc release.Service
proxySvc proxy.Service
scheduler scheduler.Service
}
func NewService(log logger.Logger, repo domain.FeedRepo, cacheRepo domain.FeedCacheRepo, releaseSvc release.Service, scheduler scheduler.Service) Service {
func NewService(log logger.Logger, repo domain.FeedRepo, cacheRepo domain.FeedCacheRepo, releaseSvc release.Service, proxySvc proxy.Service, scheduler scheduler.Service) Service {
return &service{
log: log.With().Str("module", "feed").Logger(),
jobs: map[string]int{},
repo: repo,
cacheRepo: cacheRepo,
releaseSvc: releaseSvc,
proxySvc: proxySvc,
scheduler: scheduler,
}
}
@ -150,6 +153,13 @@ func (s *service) update(ctx context.Context, feed *domain.Feed) error {
return err
}
// get Feed again for ProxyID and UseProxy to be correctly populated
feed, err := s.repo.FindByID(ctx, feed.ID)
if err != nil {
s.log.Error().Err(err).Msg("error finding feed")
return err
}
if err := s.restartJob(feed); err != nil {
s.log.Error().Err(err).Msg("error restarting feed")
return err
@ -227,6 +237,18 @@ func (s *service) test(ctx context.Context, feed *domain.Feed) error {
// create sub logger
subLogger := zstdlog.NewStdLoggerWithLevel(s.log.With().Logger(), zerolog.DebugLevel)
// add proxy conf
if feed.UseProxy {
proxyConf, err := s.proxySvc.FindByID(ctx, feed.ProxyID)
if err != nil {
return errors.Wrap(err, "could not find proxy for indexer feed")
}
if proxyConf.Enabled {
feed.Proxy = proxyConf
}
}
// test feeds
switch feed.Type {
case string(domain.FeedTypeTorznab):
@ -254,13 +276,27 @@ func (s *service) test(ctx context.Context, feed *domain.Feed) error {
}
func (s *service) testRSS(ctx context.Context, feed *domain.Feed) error {
f, err := NewFeedParser(time.Duration(feed.Timeout)*time.Second, feed.Cookie).ParseURLWithContext(ctx, feed.URL)
feedParser := NewFeedParser(time.Duration(feed.Timeout)*time.Second, feed.Cookie)
// add proxy if enabled and exists
if feed.UseProxy && feed.Proxy != nil {
proxyClient, err := proxy.GetProxiedHTTPClient(feed.Proxy)
if err != nil {
return errors.Wrap(err, "could not get proxy client")
}
feedParser.WithHTTPClient(proxyClient)
s.log.Debug().Msgf("using proxy %s for feed %s", feed.Proxy.Name, feed.Name)
}
feedResponse, err := feedParser.ParseURLWithContext(ctx, feed.URL)
if err != nil {
s.log.Error().Err(err).Msgf("error fetching rss feed items")
return errors.Wrap(err, "error fetching rss feed items")
}
s.log.Info().Msgf("refreshing rss feed: %s, found (%d) items", feed.Name, len(f.Items))
s.log.Info().Msgf("refreshing rss feed: %s, found (%d) items", feed.Name, len(feedResponse.Items))
return nil
}
@ -269,6 +305,18 @@ func (s *service) testTorznab(ctx context.Context, feed *domain.Feed, subLogger
// setup torznab Client
c := torznab.NewClient(torznab.Config{Host: feed.URL, ApiKey: feed.ApiKey, Log: subLogger})
// add proxy if enabled and exists
if feed.UseProxy && feed.Proxy != nil {
proxyClient, err := proxy.GetProxiedHTTPClient(feed.Proxy)
if err != nil {
return errors.Wrap(err, "could not get proxy client")
}
c.WithHTTPClient(proxyClient)
s.log.Debug().Msgf("using proxy %s for feed %s", feed.Proxy.Name, feed.Name)
}
items, err := c.FetchFeed(ctx)
if err != nil {
s.log.Error().Err(err).Msg("error getting torznab feed")
@ -284,6 +332,18 @@ func (s *service) testNewznab(ctx context.Context, feed *domain.Feed, subLogger
// setup newznab Client
c := newznab.NewClient(newznab.Config{Host: feed.URL, ApiKey: feed.ApiKey, Log: subLogger})
// add proxy if enabled and exists
if feed.UseProxy && feed.Proxy != nil {
proxyClient, err := proxy.GetProxiedHTTPClient(feed.Proxy)
if err != nil {
return errors.Wrap(err, "could not get proxy client")
}
c.WithHTTPClient(proxyClient)
s.log.Debug().Msgf("using proxy %s for feed %s", feed.Proxy.Name, feed.Name)
}
items, err := c.GetFeed(ctx)
if err != nil {
s.log.Error().Err(err).Msg("error getting newznab feed")
@ -316,8 +376,6 @@ func (s *service) start() error {
s.log.Debug().Msgf("preparing staggered start of %d feeds", len(feeds))
for _, feed := range feeds {
feed := feed
if !feed.Enabled {
s.log.Trace().Msgf("feed disabled, skipping... %s", feed.Name)
continue
@ -408,6 +466,18 @@ func (s *service) startJob(f *domain.Feed) error {
return errors.New("no URL provided for feed: %s", f.Name)
}
// add proxy conf
if f.UseProxy {
proxyConf, err := s.proxySvc.FindByID(context.Background(), f.ProxyID)
if err != nil {
return errors.Wrap(err, "could not find proxy for indexer feed")
}
if proxyConf.Enabled {
f.Proxy = proxyConf
}
}
fi := newFeedInstance(f)
job, err := s.initializeFeedJob(fi)

View file

@ -5,6 +5,7 @@ package feed
import (
"context"
"github.com/autobrr/autobrr/internal/proxy"
"math"
"sort"
"strconv"
@ -224,6 +225,18 @@ func mapFreeleechToBonus(percentage int) string {
}
func (j *TorznabJob) getFeed(ctx context.Context) ([]torznab.FeedItem, error) {
// add proxy if enabled and exists
if j.Feed.UseProxy && j.Feed.Proxy != nil {
proxyClient, err := proxy.GetProxiedHTTPClient(j.Feed.Proxy)
if err != nil {
return nil, errors.Wrap(err, "could not get proxy client")
}
j.Client.WithHTTPClient(proxyClient)
j.Log.Debug().Msgf("using proxy %s for feed %s", j.Feed.Proxy.Name, j.Feed.Name)
}
// get feed
feed, err := j.Client.FetchFeed(ctx)
if err != nil {
@ -251,35 +264,33 @@ func (j *TorznabJob) getFeed(ctx context.Context) ([]torznab.FeedItem, error) {
// set ttl to 1 month
ttl := time.Now().AddDate(0, 1, 0)
for _, i := range feed.Channel.Items {
i := i
if i.GUID == "" {
for _, item := range feed.Channel.Items {
if item.GUID == "" {
j.Log.Error().Msgf("missing GUID from feed: %s", j.Feed.Name)
continue
}
exists, err := j.CacheRepo.Exists(j.Feed.ID, i.GUID)
exists, err := j.CacheRepo.Exists(j.Feed.ID, item.GUID)
if err != nil {
j.Log.Error().Err(err).Msg("could not check if item exists")
continue
}
if exists {
j.Log.Trace().Msgf("cache item exists, skipping release: %s", i.Title)
j.Log.Trace().Msgf("cache item exists, skipping release: %s", item.Title)
continue
}
j.Log.Debug().Msgf("found new release: %s", i.Title)
j.Log.Debug().Msgf("found new release: %s", item.Title)
toCache = append(toCache, domain.FeedCacheItem{
FeedId: strconv.Itoa(j.Feed.ID),
Key: i.GUID,
Value: []byte(i.Title),
Key: item.GUID,
Value: []byte(item.Title),
TTL: ttl,
})
// only append if we successfully added to cache
items = append(items, *i)
items = append(items, *item)
}
if len(toCache) > 0 {