mirror of
https://github.com/idanoo/autobrr
synced 2025-07-22 16:29:12 +00:00
feat: add support for proxies to use with IRC and Indexers (#1421)
* feat: add support for proxies * fix(http): release handler * fix(migrations): define proxy early * fix(migrations): pg proxy * fix(proxy): list update delete * fix(proxy): remove log and imports * feat(irc): use proxy * feat(irc): tests * fix(web): update imports for ProxyForms.tsx * fix(database): migration * feat(proxy): test * feat(proxy): validate proxy type * feat(proxy): validate and test * feat(proxy): improve validate and test * feat(proxy): fix db schema * feat(proxy): add db tests * feat(proxy): handle http errors * fix(http): imports * feat(proxy): use proxy for indexer downloads * feat(proxy): indexerforms select proxy * feat(proxy): handle torrent download * feat(proxy): skip if disabled * feat(proxy): imports * feat(proxy): implement in Feeds * feat(proxy): update helper text indexer proxy * feat(proxy): add internal cache
This commit is contained in:
parent
472d327308
commit
bc0f4cc055
59 changed files with 2533 additions and 371 deletions
|
@ -11,6 +11,7 @@ import (
|
|||
|
||||
"github.com/autobrr/autobrr/internal/domain"
|
||||
"github.com/autobrr/autobrr/internal/logger"
|
||||
"github.com/autobrr/autobrr/internal/proxy"
|
||||
"github.com/autobrr/autobrr/internal/release"
|
||||
"github.com/autobrr/autobrr/internal/scheduler"
|
||||
"github.com/autobrr/autobrr/pkg/errors"
|
||||
|
@ -68,16 +69,18 @@ type service struct {
|
|||
repo domain.FeedRepo
|
||||
cacheRepo domain.FeedCacheRepo
|
||||
releaseSvc release.Service
|
||||
proxySvc proxy.Service
|
||||
scheduler scheduler.Service
|
||||
}
|
||||
|
||||
func NewService(log logger.Logger, repo domain.FeedRepo, cacheRepo domain.FeedCacheRepo, releaseSvc release.Service, scheduler scheduler.Service) Service {
|
||||
func NewService(log logger.Logger, repo domain.FeedRepo, cacheRepo domain.FeedCacheRepo, releaseSvc release.Service, proxySvc proxy.Service, scheduler scheduler.Service) Service {
|
||||
return &service{
|
||||
log: log.With().Str("module", "feed").Logger(),
|
||||
jobs: map[string]int{},
|
||||
repo: repo,
|
||||
cacheRepo: cacheRepo,
|
||||
releaseSvc: releaseSvc,
|
||||
proxySvc: proxySvc,
|
||||
scheduler: scheduler,
|
||||
}
|
||||
}
|
||||
|
@ -150,6 +153,13 @@ func (s *service) update(ctx context.Context, feed *domain.Feed) error {
|
|||
return err
|
||||
}
|
||||
|
||||
// get Feed again for ProxyID and UseProxy to be correctly populated
|
||||
feed, err := s.repo.FindByID(ctx, feed.ID)
|
||||
if err != nil {
|
||||
s.log.Error().Err(err).Msg("error finding feed")
|
||||
return err
|
||||
}
|
||||
|
||||
if err := s.restartJob(feed); err != nil {
|
||||
s.log.Error().Err(err).Msg("error restarting feed")
|
||||
return err
|
||||
|
@ -227,6 +237,18 @@ func (s *service) test(ctx context.Context, feed *domain.Feed) error {
|
|||
// create sub logger
|
||||
subLogger := zstdlog.NewStdLoggerWithLevel(s.log.With().Logger(), zerolog.DebugLevel)
|
||||
|
||||
// add proxy conf
|
||||
if feed.UseProxy {
|
||||
proxyConf, err := s.proxySvc.FindByID(ctx, feed.ProxyID)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "could not find proxy for indexer feed")
|
||||
}
|
||||
|
||||
if proxyConf.Enabled {
|
||||
feed.Proxy = proxyConf
|
||||
}
|
||||
}
|
||||
|
||||
// test feeds
|
||||
switch feed.Type {
|
||||
case string(domain.FeedTypeTorznab):
|
||||
|
@ -254,13 +276,27 @@ func (s *service) test(ctx context.Context, feed *domain.Feed) error {
|
|||
}
|
||||
|
||||
func (s *service) testRSS(ctx context.Context, feed *domain.Feed) error {
|
||||
f, err := NewFeedParser(time.Duration(feed.Timeout)*time.Second, feed.Cookie).ParseURLWithContext(ctx, feed.URL)
|
||||
feedParser := NewFeedParser(time.Duration(feed.Timeout)*time.Second, feed.Cookie)
|
||||
|
||||
// add proxy if enabled and exists
|
||||
if feed.UseProxy && feed.Proxy != nil {
|
||||
proxyClient, err := proxy.GetProxiedHTTPClient(feed.Proxy)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "could not get proxy client")
|
||||
}
|
||||
|
||||
feedParser.WithHTTPClient(proxyClient)
|
||||
|
||||
s.log.Debug().Msgf("using proxy %s for feed %s", feed.Proxy.Name, feed.Name)
|
||||
}
|
||||
|
||||
feedResponse, err := feedParser.ParseURLWithContext(ctx, feed.URL)
|
||||
if err != nil {
|
||||
s.log.Error().Err(err).Msgf("error fetching rss feed items")
|
||||
return errors.Wrap(err, "error fetching rss feed items")
|
||||
}
|
||||
|
||||
s.log.Info().Msgf("refreshing rss feed: %s, found (%d) items", feed.Name, len(f.Items))
|
||||
s.log.Info().Msgf("refreshing rss feed: %s, found (%d) items", feed.Name, len(feedResponse.Items))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -269,6 +305,18 @@ func (s *service) testTorznab(ctx context.Context, feed *domain.Feed, subLogger
|
|||
// setup torznab Client
|
||||
c := torznab.NewClient(torznab.Config{Host: feed.URL, ApiKey: feed.ApiKey, Log: subLogger})
|
||||
|
||||
// add proxy if enabled and exists
|
||||
if feed.UseProxy && feed.Proxy != nil {
|
||||
proxyClient, err := proxy.GetProxiedHTTPClient(feed.Proxy)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "could not get proxy client")
|
||||
}
|
||||
|
||||
c.WithHTTPClient(proxyClient)
|
||||
|
||||
s.log.Debug().Msgf("using proxy %s for feed %s", feed.Proxy.Name, feed.Name)
|
||||
}
|
||||
|
||||
items, err := c.FetchFeed(ctx)
|
||||
if err != nil {
|
||||
s.log.Error().Err(err).Msg("error getting torznab feed")
|
||||
|
@ -284,6 +332,18 @@ func (s *service) testNewznab(ctx context.Context, feed *domain.Feed, subLogger
|
|||
// setup newznab Client
|
||||
c := newznab.NewClient(newznab.Config{Host: feed.URL, ApiKey: feed.ApiKey, Log: subLogger})
|
||||
|
||||
// add proxy if enabled and exists
|
||||
if feed.UseProxy && feed.Proxy != nil {
|
||||
proxyClient, err := proxy.GetProxiedHTTPClient(feed.Proxy)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "could not get proxy client")
|
||||
}
|
||||
|
||||
c.WithHTTPClient(proxyClient)
|
||||
|
||||
s.log.Debug().Msgf("using proxy %s for feed %s", feed.Proxy.Name, feed.Name)
|
||||
}
|
||||
|
||||
items, err := c.GetFeed(ctx)
|
||||
if err != nil {
|
||||
s.log.Error().Err(err).Msg("error getting newznab feed")
|
||||
|
@ -316,8 +376,6 @@ func (s *service) start() error {
|
|||
s.log.Debug().Msgf("preparing staggered start of %d feeds", len(feeds))
|
||||
|
||||
for _, feed := range feeds {
|
||||
feed := feed
|
||||
|
||||
if !feed.Enabled {
|
||||
s.log.Trace().Msgf("feed disabled, skipping... %s", feed.Name)
|
||||
continue
|
||||
|
@ -408,6 +466,18 @@ func (s *service) startJob(f *domain.Feed) error {
|
|||
return errors.New("no URL provided for feed: %s", f.Name)
|
||||
}
|
||||
|
||||
// add proxy conf
|
||||
if f.UseProxy {
|
||||
proxyConf, err := s.proxySvc.FindByID(context.Background(), f.ProxyID)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "could not find proxy for indexer feed")
|
||||
}
|
||||
|
||||
if proxyConf.Enabled {
|
||||
f.Proxy = proxyConf
|
||||
}
|
||||
}
|
||||
|
||||
fi := newFeedInstance(f)
|
||||
|
||||
job, err := s.initializeFeedJob(fi)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue