feat(feed): Configurable request timeout (#456)

* feat(feed): Add field for setting request timeout

* fix: missing type in interface

* feat: add postgres migration and column to base schema
This commit is contained in:
paperclip-go-brr 2022-10-04 17:33:35 +02:00 committed by GitHub
parent 47eaeaa635
commit 72be86a34f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 42 additions and 14 deletions

View file

@ -34,6 +34,7 @@ func (r *FeedRepo) FindByID(ctx context.Context, id int) (*domain.Feed, error) {
"enabled",
"url",
"interval",
"timeout",
"api_key",
"created_at",
"updated_at",
@ -55,7 +56,7 @@ func (r *FeedRepo) FindByID(ctx context.Context, id int) (*domain.Feed, error) {
var apiKey sql.NullString
if err := row.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &apiKey, &f.CreatedAt, &f.UpdatedAt); err != nil {
if err := row.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &apiKey, &f.CreatedAt, &f.UpdatedAt); err != nil {
return nil, errors.Wrap(err, "error scanning row")
}
@ -75,6 +76,7 @@ func (r *FeedRepo) FindByIndexerIdentifier(ctx context.Context, indexer string)
"enabled",
"url",
"interval",
"timeout",
"api_key",
"created_at",
"updated_at",
@ -96,7 +98,7 @@ func (r *FeedRepo) FindByIndexerIdentifier(ctx context.Context, indexer string)
var apiKey sql.NullString
if err := row.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &apiKey, &f.CreatedAt, &f.UpdatedAt); err != nil {
if err := row.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &apiKey, &f.CreatedAt, &f.UpdatedAt); err != nil {
return nil, errors.Wrap(err, "error scanning row")
}
@ -116,6 +118,7 @@ func (r *FeedRepo) Find(ctx context.Context) ([]domain.Feed, error) {
"enabled",
"url",
"interval",
"timeout",
"api_key",
"created_at",
"updated_at",
@ -141,7 +144,7 @@ func (r *FeedRepo) Find(ctx context.Context) ([]domain.Feed, error) {
var apiKey sql.NullString
if err := rows.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &apiKey, &f.CreatedAt, &f.UpdatedAt); err != nil {
if err := rows.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &apiKey, &f.CreatedAt, &f.UpdatedAt); err != nil {
return nil, errors.Wrap(err, "error scanning row")
}
@ -164,6 +167,7 @@ func (r *FeedRepo) Store(ctx context.Context, feed *domain.Feed) error {
"enabled",
"url",
"interval",
"timeout",
"api_key",
"indexer_id",
).
@ -174,6 +178,7 @@ func (r *FeedRepo) Store(ctx context.Context, feed *domain.Feed) error {
feed.Enabled,
feed.URL,
feed.Interval,
feed.Timeout,
feed.ApiKey,
feed.IndexerID,
).
@ -199,6 +204,7 @@ func (r *FeedRepo) Update(ctx context.Context, feed *domain.Feed) error {
Set("enabled", feed.Enabled).
Set("url", feed.URL).
Set("interval", feed.Interval).
Set("timeout", feed.Timeout).
Set("api_key", feed.ApiKey).
Where("id = ?", feed.ID)

View file

@ -298,6 +298,7 @@ CREATE TABLE feed
enabled BOOLEAN,
url TEXT,
interval INTEGER,
timeout INTEGER DEFAULT 60,
categories TEXT [] DEFAULT '{}' NOT NULL,
capabilities TEXT [] DEFAULT '{}' NOT NULL,
api_key TEXT,
@ -557,4 +558,7 @@ CREATE INDEX indexer_identifier_index
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
`,
`ALTER TABLE feed
ADD COLUMN timeout INTEGER DEFAULT 60;
`,
}

View file

@ -281,6 +281,7 @@ CREATE TABLE feed
enabled BOOLEAN,
url TEXT,
interval INTEGER,
timeout INTEGER DEFAULT 60,
categories TEXT [] DEFAULT '{}' NOT NULL,
capabilities TEXT [] DEFAULT '{}' NOT NULL,
api_key TEXT,
@ -877,4 +878,7 @@ CREATE INDEX indexer_identifier_index
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
`,
`ALTER TABLE feed
ADD COLUMN timeout INTEGER DEFAULT 60;
`,
}

View file

@ -31,6 +31,7 @@ type Feed struct {
Enabled bool `json:"enabled"`
URL string `json:"url"`
Interval int `json:"interval"`
Timeout int `json:"timeout"`
Capabilities []string `json:"capabilities"`
ApiKey string `json:"api_key"`
Settings map[string]string `json:"settings"`

View file

@ -21,6 +21,7 @@ type RSSJob struct {
URL string
Repo domain.FeedCacheRepo
ReleaseSvc release.Service
Timeout time.Duration
attempts int
errors []error
@ -28,7 +29,7 @@ type RSSJob struct {
JobID int
}
func NewRSSJob(name string, indexerIdentifier string, log zerolog.Logger, url string, repo domain.FeedCacheRepo, releaseSvc release.Service) *RSSJob {
func NewRSSJob(name string, indexerIdentifier string, log zerolog.Logger, url string, repo domain.FeedCacheRepo, releaseSvc release.Service, timeout time.Duration) *RSSJob {
return &RSSJob{
Name: name,
IndexerIdentifier: indexerIdentifier,
@ -36,6 +37,7 @@ func NewRSSJob(name string, indexerIdentifier string, log zerolog.Logger, url st
URL: url,
Repo: repo,
ReleaseSvc: releaseSvc,
Timeout: timeout,
}
}
@ -140,7 +142,7 @@ func (j *RSSJob) processItem(item *gofeed.Item) *domain.Release {
}
func (j *RSSJob) getFeed() (items []*gofeed.Item, err error) {
ctx, cancel := context.WithTimeout(context.Background(), 60*time.Second)
ctx, cancel := context.WithTimeout(context.Background(), j.Timeout)
defer cancel()
feed, err := gofeed.NewParser().ParseURLWithContext(j.URL, ctx) // there's an RSS specific parser as well.

View file

@ -35,6 +35,7 @@ type feedInstance struct {
ApiKey string
Implementation string
CronSchedule time.Duration
Timeout time.Duration
}
type service struct {
@ -300,6 +301,7 @@ func (s *service) startJob(f domain.Feed) error {
URL: f.URL,
ApiKey: f.ApiKey,
CronSchedule: time.Duration(f.Interval) * time.Minute,
Timeout: time.Duration(f.Timeout) * time.Second,
}
switch fi.Implementation {
@ -330,7 +332,7 @@ func (s *service) addTorznabJob(f feedInstance) error {
l := s.log.With().Str("feed", f.Name).Logger()
// setup torznab Client
c := torznab.NewClient(torznab.Config{Host: f.URL, ApiKey: f.ApiKey})
c := torznab.NewClient(torznab.Config{Host: f.URL, ApiKey: f.ApiKey, Timeout: f.Timeout})
// create job
job := NewTorznabJob(f.Name, f.IndexerIdentifier, l, f.URL, c, s.cacheRepo, s.releaseSvc)
@ -373,7 +375,7 @@ func (s *service) addRSSJob(f feedInstance) error {
l := s.log.With().Str("feed", f.Name).Logger()
// create job
job := NewRSSJob(f.Name, f.IndexerIdentifier, l, f.URL, s.cacheRepo, s.releaseSvc)
job := NewRSSJob(f.Name, f.IndexerIdentifier, l, f.URL, s.cacheRepo, s.releaseSvc, f.Timeout)
// schedule job
id, err := s.scheduler.AddJob(job, f.CronSchedule, f.IndexerIdentifier)