mirror of
https://github.com/idanoo/autobrr
synced 2025-07-23 00:39:13 +00:00
feat(feeds): add scheduled cleanup (#1073)
* refactor(feeds): make feed scheduling more robust * feat(feeds): add daily cleanup job * removes feed cache older than 30 days * fix(feeds): fmt wrong type
This commit is contained in:
parent
cfc2436d50
commit
6fd8626507
11 changed files with 301 additions and 215 deletions
35
internal/feed/cleanup.go
Normal file
35
internal/feed/cleanup.go
Normal file
|
@ -0,0 +1,35 @@
|
|||
// Copyright (c) 2021 - 2023, Ludvig Lundgren and the autobrr contributors.
|
||||
// SPDX-License-Identifier: GPL-2.0-or-later
|
||||
|
||||
package feed
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/autobrr/autobrr/internal/domain"
|
||||
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
type CleanupJob struct {
|
||||
log zerolog.Logger
|
||||
cacheRepo domain.FeedCacheRepo
|
||||
|
||||
CronSchedule time.Duration
|
||||
}
|
||||
|
||||
func NewCleanupJob(log zerolog.Logger, cacheRepo domain.FeedCacheRepo) *CleanupJob {
|
||||
return &CleanupJob{
|
||||
log: log,
|
||||
cacheRepo: cacheRepo,
|
||||
}
|
||||
}
|
||||
|
||||
func (j *CleanupJob) Run() {
|
||||
if err := j.cacheRepo.DeleteStale(context.Background()); err != nil {
|
||||
j.log.Error().Err(err).Msg("error when running feed cache cleanup job")
|
||||
}
|
||||
|
||||
j.log.Info().Msg("successfully ran feed-cache-cleanup job")
|
||||
}
|
|
@ -133,7 +133,7 @@ func (j *NewznabJob) getFeed(ctx context.Context) ([]newznab.FeedItem, error) {
|
|||
j.Log.Error().Err(err).Msgf("error updating last run for feed id: %v", j.Feed.ID)
|
||||
}
|
||||
|
||||
j.Log.Debug().Msgf("refreshing feed: %v, found (%d) items", j.Name, len(feed.Channel.Items))
|
||||
j.Log.Debug().Msgf("refreshing feed: %s, found (%d) items", j.Name, len(feed.Channel.Items))
|
||||
|
||||
items := make([]newznab.FeedItem, 0)
|
||||
if len(feed.Channel.Items) == 0 {
|
||||
|
@ -146,15 +146,16 @@ func (j *NewznabJob) getFeed(ctx context.Context) ([]newznab.FeedItem, error) {
|
|||
|
||||
for _, i := range feed.Channel.Items {
|
||||
if i.GUID == "" {
|
||||
j.Log.Error().Err(err).Msgf("missing GUID from feed: %s", j.Feed.Name)
|
||||
j.Log.Error().Msgf("missing GUID from feed: %s", j.Feed.Name)
|
||||
continue
|
||||
}
|
||||
|
||||
exists, err := j.CacheRepo.Exists(j.Name, i.GUID)
|
||||
exists, err := j.CacheRepo.Exists(j.Feed.ID, i.GUID)
|
||||
if err != nil {
|
||||
j.Log.Error().Err(err).Msg("could not check if item exists")
|
||||
continue
|
||||
}
|
||||
|
||||
if exists {
|
||||
j.Log.Trace().Msgf("cache item exists, skipping release: %s", i.Title)
|
||||
continue
|
||||
|
@ -165,7 +166,7 @@ func (j *NewznabJob) getFeed(ctx context.Context) ([]newznab.FeedItem, error) {
|
|||
// set ttl to 1 month
|
||||
ttl := time.Now().AddDate(0, 1, 0)
|
||||
|
||||
if err := j.CacheRepo.Put(j.Name, i.GUID, []byte(i.Title), ttl); err != nil {
|
||||
if err := j.CacheRepo.Put(j.Feed.ID, i.GUID, []byte(i.Title), ttl); err != nil {
|
||||
j.Log.Error().Stack().Err(err).Str("guid", i.GUID).Msg("cache.Put: error storing item in cache")
|
||||
continue
|
||||
}
|
||||
|
|
|
@ -6,7 +6,6 @@ package feed
|
|||
import (
|
||||
"context"
|
||||
"encoding/xml"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"time"
|
||||
|
@ -227,8 +226,6 @@ func (j *RSSJob) getFeed(ctx context.Context) (items []*gofeed.Item, err error)
|
|||
return
|
||||
}
|
||||
|
||||
bucketKey := fmt.Sprintf("%v+%v", j.IndexerIdentifier, j.Name)
|
||||
|
||||
//sort.Sort(feed)
|
||||
|
||||
// set ttl to 1 month
|
||||
|
@ -245,7 +242,7 @@ func (j *RSSJob) getFeed(ctx context.Context) (items []*gofeed.Item, err error)
|
|||
}
|
||||
}
|
||||
|
||||
exists, err := j.CacheRepo.Exists(bucketKey, key)
|
||||
exists, err := j.CacheRepo.Exists(j.Feed.ID, key)
|
||||
if err != nil {
|
||||
j.Log.Error().Err(err).Msg("could not check if item exists")
|
||||
continue
|
||||
|
@ -257,7 +254,7 @@ func (j *RSSJob) getFeed(ctx context.Context) (items []*gofeed.Item, err error)
|
|||
|
||||
j.Log.Debug().Msgf("found new release: %s", i.Title)
|
||||
|
||||
if err := j.CacheRepo.Put(bucketKey, key, []byte(item.Title), ttl); err != nil {
|
||||
if err := j.CacheRepo.Put(j.Feed.ID, key, []byte(item.Title), ttl); err != nil {
|
||||
j.Log.Error().Err(err).Str("entry", key).Msg("cache.Put: error storing item in cache")
|
||||
continue
|
||||
}
|
||||
|
|
|
@ -7,7 +7,6 @@ import (
|
|||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/autobrr/autobrr/internal/domain"
|
||||
|
@ -20,6 +19,7 @@ import (
|
|||
|
||||
"github.com/dcarbone/zadapters/zstdlog"
|
||||
"github.com/mmcdole/gofeed"
|
||||
"github.com/robfig/cron/v3"
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
|
@ -27,7 +27,7 @@ type Service interface {
|
|||
FindByID(ctx context.Context, id int) (*domain.Feed, error)
|
||||
FindByIndexerIdentifier(ctx context.Context, indexer string) (*domain.Feed, error)
|
||||
Find(ctx context.Context) ([]domain.Feed, error)
|
||||
GetCacheByID(ctx context.Context, bucket string) ([]domain.FeedCacheItem, error)
|
||||
GetCacheByID(ctx context.Context, feedId int) ([]domain.FeedCacheItem, error)
|
||||
Store(ctx context.Context, feed *domain.Feed) error
|
||||
Update(ctx context.Context, feed *domain.Feed) error
|
||||
Test(ctx context.Context, feed *domain.Feed) error
|
||||
|
@ -35,6 +35,7 @@ type Service interface {
|
|||
Delete(ctx context.Context, id int) error
|
||||
DeleteFeedCache(ctx context.Context, id int) error
|
||||
GetLastRunData(ctx context.Context, id int) (string, error)
|
||||
DeleteFeedCacheStale(ctx context.Context) error
|
||||
|
||||
Start() error
|
||||
}
|
||||
|
@ -50,14 +51,14 @@ type feedInstance struct {
|
|||
Timeout time.Duration
|
||||
}
|
||||
|
||||
// feedKey creates a unique identifier to be used for controlling jobs in the scheduler
|
||||
type feedKey struct {
|
||||
id int
|
||||
indexer string
|
||||
name string
|
||||
id int
|
||||
}
|
||||
|
||||
// ToString creates a string of the unique id to be used for controlling jobs in the scheduler
|
||||
func (k feedKey) ToString() string {
|
||||
return fmt.Sprintf("%v+%v+%v", k.id, k.indexer, k.name)
|
||||
return fmt.Sprintf("feed-%d", k.id)
|
||||
}
|
||||
|
||||
type service struct {
|
||||
|
@ -93,22 +94,8 @@ func (s *service) Find(ctx context.Context) ([]domain.Feed, error) {
|
|||
return s.repo.Find(ctx)
|
||||
}
|
||||
|
||||
func (s *service) GetCacheByID(ctx context.Context, bucket string) ([]domain.FeedCacheItem, error) {
|
||||
id, _ := strconv.Atoi(bucket)
|
||||
|
||||
feed, err := s.repo.FindByID(ctx, id)
|
||||
if err != nil {
|
||||
s.log.Error().Err(err).Msgf("could not find feed by id: %v", id)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
data, err := s.cacheRepo.GetByBucket(ctx, feed.Name)
|
||||
if err != nil {
|
||||
s.log.Error().Err(err).Msg("could not get feed cache")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return data, err
|
||||
func (s *service) GetCacheByID(ctx context.Context, feedId int) ([]domain.FeedCacheItem, error) {
|
||||
return s.cacheRepo.GetByFeed(ctx, feedId)
|
||||
}
|
||||
|
||||
func (s *service) Store(ctx context.Context, feed *domain.Feed) error {
|
||||
|
@ -124,18 +111,11 @@ func (s *service) Delete(ctx context.Context, id int) error {
|
|||
}
|
||||
|
||||
func (s *service) DeleteFeedCache(ctx context.Context, id int) error {
|
||||
feed, err := s.repo.FindByID(ctx, id)
|
||||
if err != nil {
|
||||
s.log.Error().Err(err).Msgf("could not find feed by id: %d", id)
|
||||
return err
|
||||
}
|
||||
return s.cacheRepo.DeleteByFeed(ctx, id)
|
||||
}
|
||||
|
||||
if err := s.cacheRepo.DeleteBucket(ctx, feed.Name); err != nil {
|
||||
s.log.Error().Err(err).Msgf("could not clear feed cache: %d", id)
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
func (s *service) DeleteFeedCacheStale(ctx context.Context) error {
|
||||
return s.cacheRepo.DeleteStale(ctx)
|
||||
}
|
||||
|
||||
func (s *service) ToggleEnabled(ctx context.Context, id int, enabled bool) error {
|
||||
|
@ -171,22 +151,16 @@ func (s *service) delete(ctx context.Context, id int) error {
|
|||
return err
|
||||
}
|
||||
|
||||
s.log.Debug().Msgf("stopping and removing feed: %v", f.Name)
|
||||
s.log.Debug().Msgf("stopping and removing feed: %s", f.Name)
|
||||
|
||||
identifierKey := feedKey{f.ID, f.Indexer, f.Name}.ToString()
|
||||
|
||||
if err := s.stopFeedJob(identifierKey); err != nil {
|
||||
s.log.Error().Err(err).Msg("error stopping rss job")
|
||||
if err := s.stopFeedJob(f.ID); err != nil {
|
||||
s.log.Error().Err(err).Msgf("error stopping rss job: %s id: %d", f.Name, f.ID)
|
||||
return err
|
||||
}
|
||||
|
||||
if err := s.repo.Delete(ctx, id); err != nil {
|
||||
s.log.Error().Err(err).Msg("error deleting feed")
|
||||
return err
|
||||
}
|
||||
|
||||
if err := s.cacheRepo.DeleteBucket(ctx, f.Name); err != nil {
|
||||
s.log.Error().Err(err).Msgf("could not delete feedCache bucket by id: %v", id)
|
||||
// delete feed and cascade delete feed_cache by fk
|
||||
if err := s.repo.Delete(ctx, f.ID); err != nil {
|
||||
s.log.Error().Err(err).Msgf("error deleting feed: %s", f.Name)
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -215,23 +189,21 @@ func (s *service) toggleEnabled(ctx context.Context, id int, enabled bool) error
|
|||
return err
|
||||
}
|
||||
|
||||
s.log.Debug().Msgf("feed started: %v", f.Name)
|
||||
|
||||
return nil
|
||||
} else {
|
||||
s.log.Debug().Msgf("stopping feed: %v", f.Name)
|
||||
|
||||
identifierKey := feedKey{f.ID, f.Indexer, f.Name}.ToString()
|
||||
|
||||
if err := s.stopFeedJob(identifierKey); err != nil {
|
||||
s.log.Error().Err(err).Msg("error stopping feed job")
|
||||
return err
|
||||
}
|
||||
|
||||
s.log.Debug().Msgf("feed stopped: %v", f.Name)
|
||||
s.log.Debug().Msgf("feed started: %s", f.Name)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
s.log.Debug().Msgf("stopping feed: %s", f.Name)
|
||||
|
||||
if err := s.stopFeedJob(f.ID); err != nil {
|
||||
s.log.Error().Err(err).Msg("error stopping feed job")
|
||||
return err
|
||||
}
|
||||
|
||||
s.log.Debug().Msgf("feed stopped: %s", f.Name)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
return nil
|
||||
|
@ -274,7 +246,7 @@ func (s *service) testRSS(ctx context.Context, feed *domain.Feed) error {
|
|||
return errors.Wrap(err, "error fetching rss feed items")
|
||||
}
|
||||
|
||||
s.log.Info().Msgf("refreshing rss feed: %v, found (%d) items", feed.Name, len(f.Items))
|
||||
s.log.Info().Msgf("refreshing rss feed: %s, found (%d) items", feed.Name, len(f.Items))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -289,7 +261,7 @@ func (s *service) testTorznab(ctx context.Context, feed *domain.Feed, subLogger
|
|||
return err
|
||||
}
|
||||
|
||||
s.log.Info().Msgf("refreshing torznab feed: %v, found (%d) items", feed.Name, len(items.Channel.Items))
|
||||
s.log.Info().Msgf("refreshing torznab feed: %s, found (%d) items", feed.Name, len(items.Channel.Items))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -304,13 +276,18 @@ func (s *service) testNewznab(ctx context.Context, feed *domain.Feed, subLogger
|
|||
return err
|
||||
}
|
||||
|
||||
s.log.Info().Msgf("refreshing newznab feed: %v, found (%d) items", feed.Name, len(items.Channel.Items))
|
||||
s.log.Info().Msgf("refreshing newznab feed: %s, found (%d) items", feed.Name, len(items.Channel.Items))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *service) start() error {
|
||||
// get all torznab indexer definitions
|
||||
// always run feed cache maintenance job
|
||||
if err := s.createCleanupJob(); err != nil {
|
||||
s.log.Error().Err(err).Msg("could not start feed cache cleanup job")
|
||||
}
|
||||
|
||||
// get all feeds
|
||||
feeds, err := s.repo.Find(context.TODO())
|
||||
if err != nil {
|
||||
s.log.Error().Err(err).Msg("error finding feeds")
|
||||
|
@ -329,12 +306,10 @@ func (s *service) start() error {
|
|||
}
|
||||
|
||||
func (s *service) restartJob(f *domain.Feed) error {
|
||||
s.log.Debug().Msgf("stopping feed: %v", f.Name)
|
||||
|
||||
identifierKey := feedKey{f.ID, f.Indexer, f.Name}.ToString()
|
||||
s.log.Debug().Msgf("stopping feed: %s", f.Name)
|
||||
|
||||
// stop feed job
|
||||
if err := s.stopFeedJob(identifierKey); err != nil {
|
||||
if err := s.stopFeedJob(f.ID); err != nil {
|
||||
s.log.Error().Err(err).Msg("error stopping feed job")
|
||||
return err
|
||||
}
|
||||
|
@ -345,21 +320,21 @@ func (s *service) restartJob(f *domain.Feed) error {
|
|||
return err
|
||||
}
|
||||
|
||||
s.log.Debug().Msgf("restarted feed: %v", f.Name)
|
||||
s.log.Debug().Msgf("restarted feed: %s", f.Name)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *service) startJob(f *domain.Feed) error {
|
||||
// get all torznab indexer definitions
|
||||
// if it's not enabled we should not start it
|
||||
if !f.Enabled {
|
||||
return nil
|
||||
}
|
||||
|
||||
// get torznab_url from settings
|
||||
if f.URL == "" {
|
||||
return errors.New("no URL provided for feed: %v", f.Name)
|
||||
return errors.New("no URL provided for feed: %s", f.Name)
|
||||
}
|
||||
|
||||
// cron schedule to run every X minutes
|
||||
|
@ -374,32 +349,49 @@ func (s *service) startJob(f *domain.Feed) error {
|
|||
Timeout: time.Duration(f.Timeout) * time.Second,
|
||||
}
|
||||
|
||||
var err error
|
||||
var job cron.Job
|
||||
|
||||
switch fi.Implementation {
|
||||
case string(domain.FeedTypeTorznab):
|
||||
if err := s.addTorznabJob(fi); err != nil {
|
||||
s.log.Error().Err(err).Msg("failed to initialize torznab feed")
|
||||
return err
|
||||
}
|
||||
job, err = s.createTorznabJob(fi)
|
||||
|
||||
case string(domain.FeedTypeNewznab):
|
||||
if err := s.addNewznabJob(fi); err != nil {
|
||||
s.log.Error().Err(err).Msg("failed to initialize newznab feed")
|
||||
return err
|
||||
}
|
||||
job, err = s.createNewznabJob(fi)
|
||||
|
||||
case string(domain.FeedTypeRSS):
|
||||
if err := s.addRSSJob(fi); err != nil {
|
||||
s.log.Error().Err(err).Msg("failed to initialize rss feed")
|
||||
return err
|
||||
}
|
||||
job, err = s.createRSSJob(fi)
|
||||
|
||||
default:
|
||||
return errors.New("unsupported feed type: %s", fi.Implementation)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
s.log.Error().Err(err).Msgf("failed to initialize %s feed", fi.Implementation)
|
||||
return err
|
||||
}
|
||||
|
||||
identifierKey := feedKey{f.ID}.ToString()
|
||||
|
||||
// schedule job
|
||||
id, err := s.scheduler.ScheduleJob(job, fi.CronSchedule, identifierKey)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "add job %s failed", identifierKey)
|
||||
}
|
||||
|
||||
// add to job map
|
||||
s.jobs[identifierKey] = id
|
||||
|
||||
s.log.Debug().Msgf("successfully started feed: %s", f.Name)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *service) addTorznabJob(f feedInstance) error {
|
||||
func (s *service) createTorznabJob(f feedInstance) (cron.Job, error) {
|
||||
s.log.Debug().Msgf("create torznab job: %s", f.Name)
|
||||
|
||||
if f.URL == "" {
|
||||
return errors.New("torznab feed requires URL")
|
||||
return nil, errors.New("torznab feed requires URL")
|
||||
}
|
||||
|
||||
//if f.CronSchedule < 5*time.Minute {
|
||||
|
@ -410,62 +402,38 @@ func (s *service) addTorznabJob(f feedInstance) error {
|
|||
l := s.log.With().Str("feed", f.Name).Logger()
|
||||
|
||||
// setup torznab Client
|
||||
c := torznab.NewClient(torznab.Config{Host: f.URL, ApiKey: f.ApiKey, Timeout: f.Timeout})
|
||||
client := torznab.NewClient(torznab.Config{Host: f.URL, ApiKey: f.ApiKey, Timeout: f.Timeout})
|
||||
|
||||
// create job
|
||||
job := NewTorznabJob(f.Feed, f.Name, f.IndexerIdentifier, l, f.URL, c, s.repo, s.cacheRepo, s.releaseSvc)
|
||||
job := NewTorznabJob(f.Feed, f.Name, f.IndexerIdentifier, l, f.URL, client, s.repo, s.cacheRepo, s.releaseSvc)
|
||||
|
||||
identifierKey := feedKey{f.Feed.ID, f.Feed.Indexer, f.Feed.Name}.ToString()
|
||||
|
||||
// schedule job
|
||||
id, err := s.scheduler.AddJob(job, f.CronSchedule, identifierKey)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "feed.AddTorznabJob: add job failed")
|
||||
}
|
||||
job.JobID = id
|
||||
|
||||
// add to job map
|
||||
s.jobs[identifierKey] = id
|
||||
|
||||
s.log.Debug().Msgf("add torznab job: %v", f.Name)
|
||||
|
||||
return nil
|
||||
return job, nil
|
||||
}
|
||||
|
||||
func (s *service) addNewznabJob(f feedInstance) error {
|
||||
func (s *service) createNewznabJob(f feedInstance) (cron.Job, error) {
|
||||
s.log.Debug().Msgf("add newznab job: %s", f.Name)
|
||||
|
||||
if f.URL == "" {
|
||||
return errors.New("newznab feed requires URL")
|
||||
return nil, errors.New("newznab feed requires URL")
|
||||
}
|
||||
|
||||
// setup logger
|
||||
l := s.log.With().Str("feed", f.Name).Logger()
|
||||
|
||||
// setup newznab Client
|
||||
c := newznab.NewClient(newznab.Config{Host: f.URL, ApiKey: f.ApiKey, Timeout: f.Timeout})
|
||||
client := newznab.NewClient(newznab.Config{Host: f.URL, ApiKey: f.ApiKey, Timeout: f.Timeout})
|
||||
|
||||
// create job
|
||||
job := NewNewznabJob(f.Feed, f.Name, f.IndexerIdentifier, l, f.URL, c, s.repo, s.cacheRepo, s.releaseSvc)
|
||||
job := NewNewznabJob(f.Feed, f.Name, f.IndexerIdentifier, l, f.URL, client, s.repo, s.cacheRepo, s.releaseSvc)
|
||||
|
||||
identifierKey := feedKey{f.Feed.ID, f.Feed.Indexer, f.Feed.Name}.ToString()
|
||||
|
||||
// schedule job
|
||||
id, err := s.scheduler.AddJob(job, f.CronSchedule, identifierKey)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "feed.AddNewznabJob: add job failed")
|
||||
}
|
||||
job.JobID = id
|
||||
|
||||
// add to job map
|
||||
s.jobs[identifierKey] = id
|
||||
|
||||
s.log.Debug().Msgf("add newznab job: %v", f.Name)
|
||||
|
||||
return nil
|
||||
return job, nil
|
||||
}
|
||||
|
||||
func (s *service) addRSSJob(f feedInstance) error {
|
||||
func (s *service) createRSSJob(f feedInstance) (cron.Job, error) {
|
||||
s.log.Debug().Msgf("add rss job: %s", f.Name)
|
||||
|
||||
if f.URL == "" {
|
||||
return errors.New("rss feed requires URL")
|
||||
return nil, errors.New("rss feed requires URL")
|
||||
}
|
||||
|
||||
//if f.CronSchedule < time.Duration(5*time.Minute) {
|
||||
|
@ -478,36 +446,43 @@ func (s *service) addRSSJob(f feedInstance) error {
|
|||
// create job
|
||||
job := NewRSSJob(f.Feed, f.Name, f.IndexerIdentifier, l, f.URL, s.repo, s.cacheRepo, s.releaseSvc, f.Timeout)
|
||||
|
||||
identifierKey := feedKey{f.Feed.ID, f.Feed.Indexer, f.Feed.Name}.ToString()
|
||||
return job, nil
|
||||
}
|
||||
|
||||
// schedule job
|
||||
id, err := s.scheduler.AddJob(job, f.CronSchedule, identifierKey)
|
||||
func (s *service) createCleanupJob() error {
|
||||
// setup logger
|
||||
l := s.log.With().Str("job", "feed-cache-cleanup").Logger()
|
||||
|
||||
// create job
|
||||
job := NewCleanupJob(l, s.cacheRepo)
|
||||
|
||||
identifierKey := "feed-cache-cleanup"
|
||||
|
||||
// schedule job for every day at 03:05
|
||||
id, err := s.scheduler.AddJob(job, "5 3 * * *", identifierKey)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "feed.AddRSSJob: add job failed")
|
||||
return errors.Wrap(err, "add job %s failed", identifierKey)
|
||||
}
|
||||
job.JobID = id
|
||||
|
||||
// add to job map
|
||||
s.jobs[identifierKey] = id
|
||||
|
||||
s.log.Debug().Msgf("add rss job: %v", f.Name)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *service) stopFeedJob(indexer string) error {
|
||||
func (s *service) stopFeedJob(id int) error {
|
||||
// remove job from scheduler
|
||||
if err := s.scheduler.RemoveJobByIdentifier(indexer); err != nil {
|
||||
if err := s.scheduler.RemoveJobByIdentifier(feedKey{id}.ToString()); err != nil {
|
||||
return errors.Wrap(err, "stop job failed")
|
||||
}
|
||||
|
||||
s.log.Debug().Msgf("stop feed job: %v", indexer)
|
||||
s.log.Debug().Msgf("stop feed job: %d", id)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *service) GetNextRun(indexer string) (time.Time, error) {
|
||||
return s.scheduler.GetNextRun(indexer)
|
||||
func (s *service) GetNextRun(id int) (time.Time, error) {
|
||||
return s.scheduler.GetNextRun(feedKey{id}.ToString())
|
||||
}
|
||||
|
||||
func (s *service) GetLastRunData(ctx context.Context, id int) (string, error) {
|
||||
|
|
|
@ -214,7 +214,7 @@ func (j *TorznabJob) getFeed(ctx context.Context) ([]torznab.FeedItem, error) {
|
|||
continue
|
||||
}
|
||||
|
||||
exists, err := j.CacheRepo.Exists(j.Name, i.GUID)
|
||||
exists, err := j.CacheRepo.Exists(j.Feed.ID, i.GUID)
|
||||
if err != nil {
|
||||
j.Log.Error().Err(err).Msg("could not check if item exists")
|
||||
continue
|
||||
|
@ -229,7 +229,7 @@ func (j *TorznabJob) getFeed(ctx context.Context) ([]torznab.FeedItem, error) {
|
|||
// set ttl to 1 month
|
||||
ttl := time.Now().AddDate(0, 1, 0)
|
||||
|
||||
if err := j.CacheRepo.Put(j.Name, i.GUID, []byte(i.Title), ttl); err != nil {
|
||||
if err := j.CacheRepo.Put(j.Feed.ID, i.GUID, []byte(i.Title), ttl); err != nil {
|
||||
j.Log.Error().Stack().Err(err).Str("guid", i.GUID).Msg("cache.Put: error storing item in cache")
|
||||
continue
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue