mirror of
https://github.com/idanoo/autobrr
synced 2025-07-23 16:59:12 +00:00
feat(feeds): improve RSS (#502)
* feat(feeds): improve rss * save last_run time * remove interval check * refactor feed job keys * add rss test * add max_age check * feat(feeds): rss basic freeleech parsing * feat(feeds): rss cookie support * feat(feeds): db get max_age * feat(feeds): update log messages * feat(feeds): pass cookie to release for download * feat(feeds): improve size parsing * feat(feeds): improve datetime check
This commit is contained in:
parent
ac988f28f4
commit
e2bb14afa4
15 changed files with 741 additions and 209 deletions
|
@ -35,7 +35,9 @@ func (r *FeedRepo) FindByID(ctx context.Context, id int) (*domain.Feed, error) {
|
||||||
"url",
|
"url",
|
||||||
"interval",
|
"interval",
|
||||||
"timeout",
|
"timeout",
|
||||||
|
"max_age",
|
||||||
"api_key",
|
"api_key",
|
||||||
|
"cookie",
|
||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
).
|
).
|
||||||
|
@ -54,14 +56,15 @@ func (r *FeedRepo) FindByID(ctx context.Context, id int) (*domain.Feed, error) {
|
||||||
|
|
||||||
var f domain.Feed
|
var f domain.Feed
|
||||||
|
|
||||||
var apiKey sql.NullString
|
var apiKey, cookie sql.NullString
|
||||||
|
|
||||||
if err := row.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &apiKey, &f.CreatedAt, &f.UpdatedAt); err != nil {
|
if err := row.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &f.MaxAge, &apiKey, &cookie, &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||||
return nil, errors.Wrap(err, "error scanning row")
|
return nil, errors.Wrap(err, "error scanning row")
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
f.ApiKey = apiKey.String
|
f.ApiKey = apiKey.String
|
||||||
|
f.Cookie = cookie.String
|
||||||
|
|
||||||
return &f, nil
|
return &f, nil
|
||||||
}
|
}
|
||||||
|
@ -77,7 +80,9 @@ func (r *FeedRepo) FindByIndexerIdentifier(ctx context.Context, indexer string)
|
||||||
"url",
|
"url",
|
||||||
"interval",
|
"interval",
|
||||||
"timeout",
|
"timeout",
|
||||||
|
"max_age",
|
||||||
"api_key",
|
"api_key",
|
||||||
|
"cookie",
|
||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
).
|
).
|
||||||
|
@ -96,14 +101,15 @@ func (r *FeedRepo) FindByIndexerIdentifier(ctx context.Context, indexer string)
|
||||||
|
|
||||||
var f domain.Feed
|
var f domain.Feed
|
||||||
|
|
||||||
var apiKey sql.NullString
|
var apiKey, cookie sql.NullString
|
||||||
|
|
||||||
if err := row.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &apiKey, &f.CreatedAt, &f.UpdatedAt); err != nil {
|
if err := row.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &f.MaxAge, &apiKey, &cookie, &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||||
return nil, errors.Wrap(err, "error scanning row")
|
return nil, errors.Wrap(err, "error scanning row")
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
f.ApiKey = apiKey.String
|
f.ApiKey = apiKey.String
|
||||||
|
f.Cookie = cookie.String
|
||||||
|
|
||||||
return &f, nil
|
return &f, nil
|
||||||
}
|
}
|
||||||
|
@ -119,7 +125,11 @@ func (r *FeedRepo) Find(ctx context.Context) ([]domain.Feed, error) {
|
||||||
"url",
|
"url",
|
||||||
"interval",
|
"interval",
|
||||||
"timeout",
|
"timeout",
|
||||||
|
"max_age",
|
||||||
"api_key",
|
"api_key",
|
||||||
|
"cookie",
|
||||||
|
"last_run",
|
||||||
|
"last_run_data",
|
||||||
"created_at",
|
"created_at",
|
||||||
"updated_at",
|
"updated_at",
|
||||||
).
|
).
|
||||||
|
@ -142,14 +152,17 @@ func (r *FeedRepo) Find(ctx context.Context) ([]domain.Feed, error) {
|
||||||
for rows.Next() {
|
for rows.Next() {
|
||||||
var f domain.Feed
|
var f domain.Feed
|
||||||
|
|
||||||
var apiKey sql.NullString
|
var apiKey, cookie, lastRunData sql.NullString
|
||||||
|
var lastRun sql.NullTime
|
||||||
|
|
||||||
if err := rows.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &apiKey, &f.CreatedAt, &f.UpdatedAt); err != nil {
|
if err := rows.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &f.MaxAge, &apiKey, &cookie, &lastRun, &lastRunData, &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||||
return nil, errors.Wrap(err, "error scanning row")
|
return nil, errors.Wrap(err, "error scanning row")
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
f.LastRun = lastRun.Time
|
||||||
|
f.LastRunData = lastRunData.String
|
||||||
f.ApiKey = apiKey.String
|
f.ApiKey = apiKey.String
|
||||||
|
f.Cookie = cookie.String
|
||||||
|
|
||||||
feeds = append(feeds, f)
|
feeds = append(feeds, f)
|
||||||
}
|
}
|
||||||
|
@ -205,7 +218,10 @@ func (r *FeedRepo) Update(ctx context.Context, feed *domain.Feed) error {
|
||||||
Set("url", feed.URL).
|
Set("url", feed.URL).
|
||||||
Set("interval", feed.Interval).
|
Set("interval", feed.Interval).
|
||||||
Set("timeout", feed.Timeout).
|
Set("timeout", feed.Timeout).
|
||||||
|
Set("max_age", feed.MaxAge).
|
||||||
Set("api_key", feed.ApiKey).
|
Set("api_key", feed.ApiKey).
|
||||||
|
Set("cookie", feed.Cookie).
|
||||||
|
Set("updated_at", sq.Expr("CURRENT_TIMESTAMP")).
|
||||||
Where("id = ?", feed.ID)
|
Where("id = ?", feed.ID)
|
||||||
|
|
||||||
query, args, err := queryBuilder.ToSql()
|
query, args, err := queryBuilder.ToSql()
|
||||||
|
@ -221,6 +237,45 @@ func (r *FeedRepo) Update(ctx context.Context, feed *domain.Feed) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *FeedRepo) UpdateLastRun(ctx context.Context, feedID int) error {
|
||||||
|
queryBuilder := r.db.squirrel.
|
||||||
|
Update("feed").
|
||||||
|
Set("last_run", sq.Expr("CURRENT_TIMESTAMP")).
|
||||||
|
Where("id = ?", feedID)
|
||||||
|
|
||||||
|
query, args, err := queryBuilder.ToSql()
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "error building query")
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = r.db.handler.ExecContext(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "error executing query")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *FeedRepo) UpdateLastRunWithData(ctx context.Context, feedID int, data string) error {
|
||||||
|
queryBuilder := r.db.squirrel.
|
||||||
|
Update("feed").
|
||||||
|
Set("last_run", sq.Expr("CURRENT_TIMESTAMP")).
|
||||||
|
Set("last_run_data", data).
|
||||||
|
Where("id = ?", feedID)
|
||||||
|
|
||||||
|
query, args, err := queryBuilder.ToSql()
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "error building query")
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = r.db.handler.ExecContext(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return errors.Wrap(err, "error executing query")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (r *FeedRepo) ToggleEnabled(ctx context.Context, id int, enabled bool) error {
|
func (r *FeedRepo) ToggleEnabled(ctx context.Context, id int, enabled bool) error {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
|
|
|
@ -55,6 +55,74 @@ func (r *FeedCacheRepo) Get(bucket string, key string) ([]byte, error) {
|
||||||
return value, nil
|
return value, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *FeedCacheRepo) GetByBucket(ctx context.Context, bucket string) ([]domain.FeedCacheItem, error) {
|
||||||
|
queryBuilder := r.db.squirrel.
|
||||||
|
Select(
|
||||||
|
"bucket",
|
||||||
|
"key",
|
||||||
|
"value",
|
||||||
|
"ttl",
|
||||||
|
).
|
||||||
|
From("feed_cache").
|
||||||
|
Where("bucket = ?", bucket)
|
||||||
|
|
||||||
|
query, args, err := queryBuilder.ToSql()
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "error building query")
|
||||||
|
}
|
||||||
|
|
||||||
|
rows, err := r.db.handler.QueryContext(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "error executing query")
|
||||||
|
}
|
||||||
|
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var data []domain.FeedCacheItem
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var d domain.FeedCacheItem
|
||||||
|
|
||||||
|
if err := rows.Scan(&d.Bucket, &d.Key, &d.Value, &d.TTL); err != nil {
|
||||||
|
return nil, errors.Wrap(err, "error scanning row")
|
||||||
|
}
|
||||||
|
|
||||||
|
data = append(data, d)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := rows.Err(); err != nil {
|
||||||
|
return nil, errors.Wrap(err, "row error")
|
||||||
|
}
|
||||||
|
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *FeedCacheRepo) GetCountByBucket(ctx context.Context, bucket string) (int, error) {
|
||||||
|
|
||||||
|
queryBuilder := r.db.squirrel.
|
||||||
|
Select("COUNT(*)").
|
||||||
|
From("feed_cache").
|
||||||
|
Where("bucket = ?", bucket)
|
||||||
|
|
||||||
|
query, args, err := queryBuilder.ToSql()
|
||||||
|
if err != nil {
|
||||||
|
return 0, errors.Wrap(err, "error building query")
|
||||||
|
}
|
||||||
|
|
||||||
|
row := r.db.handler.QueryRowContext(ctx, query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return 0, errors.Wrap(err, "error executing query")
|
||||||
|
}
|
||||||
|
|
||||||
|
var count = 0
|
||||||
|
|
||||||
|
if err := row.Scan(&count); err != nil {
|
||||||
|
return 0, errors.Wrap(err, "error scanning row")
|
||||||
|
}
|
||||||
|
|
||||||
|
return count, nil
|
||||||
|
}
|
||||||
|
|
||||||
func (r *FeedCacheRepo) Exists(bucket string, key string) (bool, error) {
|
func (r *FeedCacheRepo) Exists(bucket string, key string) (bool, error) {
|
||||||
queryBuilder := r.db.squirrel.
|
queryBuilder := r.db.squirrel.
|
||||||
Select("1").
|
Select("1").
|
||||||
|
|
|
@ -291,21 +291,25 @@ CREATE TABLE notification
|
||||||
|
|
||||||
CREATE TABLE feed
|
CREATE TABLE feed
|
||||||
(
|
(
|
||||||
id SERIAL PRIMARY KEY,
|
id SERIAL PRIMARY KEY,
|
||||||
indexer TEXT,
|
indexer TEXT,
|
||||||
name TEXT,
|
name TEXT,
|
||||||
type TEXT,
|
type TEXT,
|
||||||
enabled BOOLEAN,
|
enabled BOOLEAN,
|
||||||
url TEXT,
|
url TEXT,
|
||||||
interval INTEGER,
|
interval INTEGER,
|
||||||
timeout INTEGER DEFAULT 60,
|
timeout INTEGER DEFAULT 60,
|
||||||
categories TEXT [] DEFAULT '{}' NOT NULL,
|
max_age INTEGER DEFAULT 3600,
|
||||||
capabilities TEXT [] DEFAULT '{}' NOT NULL,
|
categories TEXT [] DEFAULT '{}' NOT NULL,
|
||||||
api_key TEXT,
|
capabilities TEXT [] DEFAULT '{}' NOT NULL,
|
||||||
settings TEXT,
|
api_key TEXT,
|
||||||
indexer_id INTEGER,
|
cookie TEXT,
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
settings TEXT,
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
indexer_id INTEGER,
|
||||||
|
last_run TIMESTAMP,
|
||||||
|
last_run_data TEXT,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
FOREIGN KEY (indexer_id) REFERENCES indexer(id) ON DELETE SET NULL
|
FOREIGN KEY (indexer_id) REFERENCES indexer(id) ON DELETE SET NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -561,4 +565,16 @@ CREATE INDEX indexer_identifier_index
|
||||||
`ALTER TABLE feed
|
`ALTER TABLE feed
|
||||||
ADD COLUMN timeout INTEGER DEFAULT 60;
|
ADD COLUMN timeout INTEGER DEFAULT 60;
|
||||||
`,
|
`,
|
||||||
|
`ALTER TABLE feed
|
||||||
|
ADD COLUMN max_age INTEGER DEFAULT 3600;
|
||||||
|
|
||||||
|
ALTER TABLE feed
|
||||||
|
ADD COLUMN last_run TIMESTAMP;
|
||||||
|
|
||||||
|
ALTER TABLE feed
|
||||||
|
ADD COLUMN last_run_data TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE feed
|
||||||
|
ADD COLUMN cookie TEXT;
|
||||||
|
`,
|
||||||
}
|
}
|
||||||
|
|
|
@ -274,21 +274,25 @@ CREATE TABLE notification
|
||||||
|
|
||||||
CREATE TABLE feed
|
CREATE TABLE feed
|
||||||
(
|
(
|
||||||
id INTEGER PRIMARY KEY,
|
id INTEGER PRIMARY KEY,
|
||||||
indexer TEXT,
|
indexer TEXT,
|
||||||
name TEXT,
|
name TEXT,
|
||||||
type TEXT,
|
type TEXT,
|
||||||
enabled BOOLEAN,
|
enabled BOOLEAN,
|
||||||
url TEXT,
|
url TEXT,
|
||||||
interval INTEGER,
|
interval INTEGER,
|
||||||
timeout INTEGER DEFAULT 60,
|
timeout INTEGER DEFAULT 60,
|
||||||
categories TEXT [] DEFAULT '{}' NOT NULL,
|
max_age INTEGER DEFAULT 3600,
|
||||||
capabilities TEXT [] DEFAULT '{}' NOT NULL,
|
categories TEXT [] DEFAULT '{}' NOT NULL,
|
||||||
api_key TEXT,
|
capabilities TEXT [] DEFAULT '{}' NOT NULL,
|
||||||
settings TEXT,
|
api_key TEXT,
|
||||||
indexer_id INTEGER,
|
cookie TEXT,
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
settings TEXT,
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
indexer_id INTEGER,
|
||||||
|
last_run TIMESTAMP,
|
||||||
|
last_run_data TEXT,
|
||||||
|
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
|
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||||
FOREIGN KEY (indexer_id) REFERENCES indexer(id) ON DELETE SET NULL
|
FOREIGN KEY (indexer_id) REFERENCES indexer(id) ON DELETE SET NULL
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -881,4 +885,16 @@ CREATE INDEX indexer_identifier_index
|
||||||
`ALTER TABLE feed
|
`ALTER TABLE feed
|
||||||
ADD COLUMN timeout INTEGER DEFAULT 60;
|
ADD COLUMN timeout INTEGER DEFAULT 60;
|
||||||
`,
|
`,
|
||||||
|
`ALTER TABLE feed
|
||||||
|
ADD COLUMN max_age INTEGER DEFAULT 3600;
|
||||||
|
|
||||||
|
ALTER TABLE feed
|
||||||
|
ADD COLUMN last_run TIMESTAMP;
|
||||||
|
|
||||||
|
ALTER TABLE feed
|
||||||
|
ADD COLUMN last_run_data TEXT;
|
||||||
|
|
||||||
|
ALTER TABLE feed
|
||||||
|
ADD COLUMN cookie TEXT;
|
||||||
|
`,
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,6 +7,8 @@ import (
|
||||||
|
|
||||||
type FeedCacheRepo interface {
|
type FeedCacheRepo interface {
|
||||||
Get(bucket string, key string) ([]byte, error)
|
Get(bucket string, key string) ([]byte, error)
|
||||||
|
GetByBucket(ctx context.Context, bucket string) ([]FeedCacheItem, error)
|
||||||
|
GetCountByBucket(ctx context.Context, bucket string) (int, error)
|
||||||
Exists(bucket string, key string) (bool, error)
|
Exists(bucket string, key string) (bool, error)
|
||||||
Put(bucket string, key string, val []byte, ttl time.Time) error
|
Put(bucket string, key string, val []byte, ttl time.Time) error
|
||||||
Delete(ctx context.Context, bucket string, key string) error
|
Delete(ctx context.Context, bucket string, key string) error
|
||||||
|
@ -19,6 +21,8 @@ type FeedRepo interface {
|
||||||
Find(ctx context.Context) ([]Feed, error)
|
Find(ctx context.Context) ([]Feed, error)
|
||||||
Store(ctx context.Context, feed *Feed) error
|
Store(ctx context.Context, feed *Feed) error
|
||||||
Update(ctx context.Context, feed *Feed) error
|
Update(ctx context.Context, feed *Feed) error
|
||||||
|
UpdateLastRun(ctx context.Context, feedID int) error
|
||||||
|
UpdateLastRunWithData(ctx context.Context, feedID int, data string) error
|
||||||
ToggleEnabled(ctx context.Context, id int, enabled bool) error
|
ToggleEnabled(ctx context.Context, id int, enabled bool) error
|
||||||
Delete(ctx context.Context, id int) error
|
Delete(ctx context.Context, id int) error
|
||||||
}
|
}
|
||||||
|
@ -31,14 +35,18 @@ type Feed struct {
|
||||||
Enabled bool `json:"enabled"`
|
Enabled bool `json:"enabled"`
|
||||||
URL string `json:"url"`
|
URL string `json:"url"`
|
||||||
Interval int `json:"interval"`
|
Interval int `json:"interval"`
|
||||||
Timeout int `json:"timeout"`
|
Timeout int `json:"timeout"` // seconds
|
||||||
|
MaxAge int `json:"max_age"` // seconds
|
||||||
Capabilities []string `json:"capabilities"`
|
Capabilities []string `json:"capabilities"`
|
||||||
ApiKey string `json:"api_key"`
|
ApiKey string `json:"api_key"`
|
||||||
|
Cookie string `json:"cookie"`
|
||||||
Settings map[string]string `json:"settings"`
|
Settings map[string]string `json:"settings"`
|
||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt time.Time `json:"created_at"`
|
||||||
UpdatedAt time.Time `json:"updated_at"`
|
UpdatedAt time.Time `json:"updated_at"`
|
||||||
IndexerID int `json:"indexer_id,omitempty"`
|
IndexerID int `json:"indexer_id,omitempty"`
|
||||||
Indexerr FeedIndexer `json:"-"`
|
Indexerr FeedIndexer `json:"-"`
|
||||||
|
LastRun time.Time `json:"last_run"`
|
||||||
|
LastRunData string `json:"last_run_data"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type FeedIndexer struct {
|
type FeedIndexer struct {
|
||||||
|
@ -53,3 +61,10 @@ const (
|
||||||
FeedTypeTorznab FeedType = "TORZNAB"
|
FeedTypeTorznab FeedType = "TORZNAB"
|
||||||
FeedTypeRSS FeedType = "RSS"
|
FeedTypeRSS FeedType = "RSS"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type FeedCacheItem struct {
|
||||||
|
Bucket string `json:"bucket"`
|
||||||
|
Key string `json:"key"`
|
||||||
|
Value []byte `json:"value"`
|
||||||
|
TTL time.Time `json:"ttl"`
|
||||||
|
}
|
||||||
|
|
80
internal/feed/client.go
Normal file
80
internal/feed/client.go
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
package feed
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"crypto/tls"
|
||||||
|
"net/http"
|
||||||
|
"net/http/cookiejar"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/mmcdole/gofeed"
|
||||||
|
"golang.org/x/net/publicsuffix"
|
||||||
|
)
|
||||||
|
|
||||||
|
type RSSParser struct {
|
||||||
|
parser *gofeed.Parser
|
||||||
|
http *http.Client
|
||||||
|
cookie string
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewFeedParser wraps the gofeed.Parser using our own http client for full control
|
||||||
|
func NewFeedParser(timeout time.Duration, cookie string) *RSSParser {
|
||||||
|
//store cookies in jar
|
||||||
|
jarOptions := &cookiejar.Options{PublicSuffixList: publicsuffix.List}
|
||||||
|
jar, _ := cookiejar.New(jarOptions)
|
||||||
|
|
||||||
|
customTransport := http.DefaultTransport.(*http.Transport).Clone()
|
||||||
|
customTransport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
|
||||||
|
httpClient := &http.Client{
|
||||||
|
Timeout: time.Second * 60,
|
||||||
|
Transport: customTransport,
|
||||||
|
Jar: jar,
|
||||||
|
}
|
||||||
|
|
||||||
|
c := &RSSParser{
|
||||||
|
parser: gofeed.NewParser(),
|
||||||
|
http: httpClient,
|
||||||
|
cookie: cookie,
|
||||||
|
}
|
||||||
|
|
||||||
|
c.http.Timeout = timeout
|
||||||
|
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *RSSParser) ParseURLWithContext(ctx context.Context, feedURL string) (feed *gofeed.Feed, err error) {
|
||||||
|
req, err := http.NewRequestWithContext(ctx, http.MethodGet, feedURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
req.Header.Set("User-Agent", "Gofeed/1.0")
|
||||||
|
|
||||||
|
if c.cookie != "" {
|
||||||
|
// set raw cookie as header
|
||||||
|
req.Header.Set("Cookie", c.cookie)
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := c.http.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp != nil {
|
||||||
|
defer func() {
|
||||||
|
ce := resp.Body.Close()
|
||||||
|
if ce != nil {
|
||||||
|
err = ce
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||||
|
return nil, gofeed.HTTPError{
|
||||||
|
StatusCode: resp.StatusCode,
|
||||||
|
Status: resp.Status,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.parser.Parse(resp.Body)
|
||||||
|
}
|
|
@ -2,8 +2,10 @@ package feed
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"encoding/xml"
|
||||||
|
"fmt"
|
||||||
"net/url"
|
"net/url"
|
||||||
"sort"
|
"regexp"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/autobrr/autobrr/internal/domain"
|
"github.com/autobrr/autobrr/internal/domain"
|
||||||
|
@ -15,11 +17,13 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
type RSSJob struct {
|
type RSSJob struct {
|
||||||
|
Feed *domain.Feed
|
||||||
Name string
|
Name string
|
||||||
IndexerIdentifier string
|
IndexerIdentifier string
|
||||||
Log zerolog.Logger
|
Log zerolog.Logger
|
||||||
URL string
|
URL string
|
||||||
Repo domain.FeedCacheRepo
|
Repo domain.FeedRepo
|
||||||
|
CacheRepo domain.FeedCacheRepo
|
||||||
ReleaseSvc release.Service
|
ReleaseSvc release.Service
|
||||||
Timeout time.Duration
|
Timeout time.Duration
|
||||||
|
|
||||||
|
@ -29,13 +33,15 @@ type RSSJob struct {
|
||||||
JobID int
|
JobID int
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewRSSJob(name string, indexerIdentifier string, log zerolog.Logger, url string, repo domain.FeedCacheRepo, releaseSvc release.Service, timeout time.Duration) *RSSJob {
|
func NewRSSJob(feed *domain.Feed, name string, indexerIdentifier string, log zerolog.Logger, url string, repo domain.FeedRepo, cacheRepo domain.FeedCacheRepo, releaseSvc release.Service, timeout time.Duration) *RSSJob {
|
||||||
return &RSSJob{
|
return &RSSJob{
|
||||||
|
Feed: feed,
|
||||||
Name: name,
|
Name: name,
|
||||||
IndexerIdentifier: indexerIdentifier,
|
IndexerIdentifier: indexerIdentifier,
|
||||||
Log: log,
|
Log: log,
|
||||||
URL: url,
|
URL: url,
|
||||||
Repo: repo,
|
Repo: repo,
|
||||||
|
CacheRepo: cacheRepo,
|
||||||
ReleaseSvc: releaseSvc,
|
ReleaseSvc: releaseSvc,
|
||||||
Timeout: timeout,
|
Timeout: timeout,
|
||||||
}
|
}
|
||||||
|
@ -43,7 +49,7 @@ func NewRSSJob(name string, indexerIdentifier string, log zerolog.Logger, url st
|
||||||
|
|
||||||
func (j *RSSJob) Run() {
|
func (j *RSSJob) Run() {
|
||||||
if err := j.process(); err != nil {
|
if err := j.process(); err != nil {
|
||||||
j.Log.Err(err).Int("attempts", j.attempts).Msg("rss feed process error")
|
j.Log.Error().Err(err).Int("attempts", j.attempts).Msg("rss feed process error")
|
||||||
|
|
||||||
j.errors = append(j.errors, err)
|
j.errors = append(j.errors, err)
|
||||||
return
|
return
|
||||||
|
@ -71,9 +77,13 @@ func (j *RSSJob) process() error {
|
||||||
releases := make([]*domain.Release, 0)
|
releases := make([]*domain.Release, 0)
|
||||||
|
|
||||||
for _, item := range items {
|
for _, item := range items {
|
||||||
rls := j.processItem(item)
|
item := item
|
||||||
|
j.Log.Debug().Msgf("item: %v", item.Title)
|
||||||
|
|
||||||
releases = append(releases, rls)
|
rls := j.processItem(item)
|
||||||
|
if rls != nil {
|
||||||
|
releases = append(releases, rls)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// process all new releases
|
// process all new releases
|
||||||
|
@ -83,6 +93,16 @@ func (j *RSSJob) process() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (j *RSSJob) processItem(item *gofeed.Item) *domain.Release {
|
func (j *RSSJob) processItem(item *gofeed.Item) *domain.Release {
|
||||||
|
now := time.Now()
|
||||||
|
|
||||||
|
if j.Feed.MaxAge > 0 {
|
||||||
|
if item.PublishedParsed != nil {
|
||||||
|
if !isNewerThanMaxAge(j.Feed.MaxAge, *item.PublishedParsed, now) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
rls := domain.NewRelease(j.IndexerIdentifier)
|
rls := domain.NewRelease(j.IndexerIdentifier)
|
||||||
rls.Implementation = domain.ReleaseImplementationRSS
|
rls.Implementation = domain.ReleaseImplementationRSS
|
||||||
|
|
||||||
|
@ -117,6 +137,8 @@ func (j *RSSJob) processItem(item *gofeed.Item) *domain.Release {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, v := range item.Categories {
|
for _, v := range item.Categories {
|
||||||
|
rls.Categories = append(rls.Categories, item.Categories...)
|
||||||
|
|
||||||
if len(rls.Category) != 0 {
|
if len(rls.Category) != 0 {
|
||||||
rls.Category += ", "
|
rls.Category += ", "
|
||||||
}
|
}
|
||||||
|
@ -138,6 +160,38 @@ func (j *RSSJob) processItem(item *gofeed.Item) *domain.Release {
|
||||||
rls.ParseSizeBytesString(sz)
|
rls.ParseSizeBytesString(sz)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// additional size parsing
|
||||||
|
// some feeds have a fixed size for enclosure so lets check for custom elements
|
||||||
|
// and parse size from there if it differs
|
||||||
|
if customTorrent, ok := item.Custom["torrent"]; ok {
|
||||||
|
var element itemCustomElement
|
||||||
|
if err := xml.Unmarshal([]byte("<torrent>"+customTorrent+"</torrent>"), &element); err != nil {
|
||||||
|
j.Log.Error().Err(err).Msg("could not unmarshal item.Custom.Torrent")
|
||||||
|
}
|
||||||
|
|
||||||
|
if element.ContentLength > 0 {
|
||||||
|
if uint64(element.ContentLength) != rls.Size {
|
||||||
|
rls.Size = uint64(element.ContentLength)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if rls.TorrentHash == "" && element.InfoHash != "" {
|
||||||
|
rls.TorrentHash = element.InfoHash
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// basic freeleech parsing
|
||||||
|
if isFreeleech([]string{item.Title, item.Description}) {
|
||||||
|
rls.Freeleech = true
|
||||||
|
rls.Bonus = []string{"Freeleech"}
|
||||||
|
}
|
||||||
|
|
||||||
|
// add cookie to release for download if needed
|
||||||
|
if j.Feed.Cookie != "" {
|
||||||
|
rls.RawCookie = j.Feed.Cookie
|
||||||
|
}
|
||||||
|
|
||||||
return rls
|
return rls
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -145,51 +199,103 @@ func (j *RSSJob) getFeed() (items []*gofeed.Item, err error) {
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), j.Timeout)
|
ctx, cancel := context.WithTimeout(context.Background(), j.Timeout)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
feed, err := gofeed.NewParser().ParseURLWithContext(j.URL, ctx) // there's an RSS specific parser as well.
|
feed, err := NewFeedParser(j.Timeout, j.Feed.Cookie).ParseURLWithContext(ctx, j.URL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
j.Log.Error().Err(err).Msgf("error fetching rss feed items")
|
|
||||||
return nil, errors.Wrap(err, "error fetching rss feed items")
|
return nil, errors.Wrap(err, "error fetching rss feed items")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// get feed as JSON string
|
||||||
|
feedData := feed.String()
|
||||||
|
|
||||||
|
if err := j.Repo.UpdateLastRunWithData(context.Background(), j.Feed.ID, feedData); err != nil {
|
||||||
|
j.Log.Error().Err(err).Msgf("error updating last run for feed id: %v", j.Feed.ID)
|
||||||
|
}
|
||||||
|
|
||||||
j.Log.Debug().Msgf("refreshing rss feed: %v, found (%d) items", j.Name, len(feed.Items))
|
j.Log.Debug().Msgf("refreshing rss feed: %v, found (%d) items", j.Name, len(feed.Items))
|
||||||
|
|
||||||
if len(feed.Items) == 0 {
|
if len(feed.Items) == 0 {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
sort.Sort(feed)
|
bucketKey := fmt.Sprintf("%v+%v", j.IndexerIdentifier, j.Name)
|
||||||
|
|
||||||
|
//sort.Sort(feed)
|
||||||
|
|
||||||
|
bucketCount, err := j.CacheRepo.GetCountByBucket(ctx, bucketKey)
|
||||||
|
if err != nil {
|
||||||
|
j.Log.Error().Err(err).Msg("could not check if item exists")
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// set ttl to 1 month
|
||||||
|
ttl := time.Now().AddDate(0, 1, 0)
|
||||||
|
|
||||||
for _, i := range feed.Items {
|
for _, i := range feed.Items {
|
||||||
s := i.GUID
|
item := i
|
||||||
if len(s) == 0 {
|
|
||||||
s = i.Title
|
key := item.GUID
|
||||||
if len(s) == 0 {
|
if len(key) == 0 {
|
||||||
|
key = item.Title
|
||||||
|
if len(key) == 0 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exists, err := j.Repo.Exists(j.Name, s)
|
exists, err := j.CacheRepo.Exists(bucketKey, key)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
j.Log.Error().Err(err).Msg("could not check if item exists")
|
j.Log.Error().Err(err).Msg("could not check if item exists")
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if exists {
|
if exists {
|
||||||
j.Log.Trace().Msgf("cache item exists, skipping release: %v", i.Title)
|
j.Log.Trace().Msgf("cache item exists, skipping release: %v", item.Title)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// set ttl to 1 month
|
if err := j.CacheRepo.Put(bucketKey, key, []byte(item.Title), ttl); err != nil {
|
||||||
ttl := time.Now().AddDate(0, 1, 0)
|
j.Log.Error().Err(err).Str("entry", key).Msg("cache.Put: error storing item in cache")
|
||||||
|
|
||||||
if err := j.Repo.Put(j.Name, s, []byte(i.Title), ttl); err != nil {
|
|
||||||
j.Log.Error().Stack().Err(err).Str("entry", s).Msg("cache.Put: error storing item in cache")
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// only append if we successfully added to cache
|
// first time we fetch the feed the cached bucket count will be 0
|
||||||
items = append(items, i)
|
// only append to items if it's bigger than 0, so we get new items only
|
||||||
|
if bucketCount > 0 {
|
||||||
|
items = append(items, item)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// send to filters
|
// send to filters
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func isNewerThanMaxAge(maxAge int, item, now time.Time) bool {
|
||||||
|
// now minus max age
|
||||||
|
nowMaxAge := now.Add(time.Duration(-maxAge) * time.Second)
|
||||||
|
|
||||||
|
if item.After(nowMaxAge) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// isFreeleech basic freeleech parsing
|
||||||
|
func isFreeleech(str []string) bool {
|
||||||
|
for _, s := range str {
|
||||||
|
var re = regexp.MustCompile(`(?mi)(\bfreeleech\b)`)
|
||||||
|
|
||||||
|
match := re.FindAllString(s, -1)
|
||||||
|
|
||||||
|
if len(match) > 0 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// itemCustomElement
|
||||||
|
// used for some feeds like Aviztas network
|
||||||
|
type itemCustomElement struct {
|
||||||
|
ContentLength int64 `xml:"contentLength"`
|
||||||
|
InfoHash string `xml:"infoHash"`
|
||||||
|
}
|
||||||
|
|
|
@ -14,8 +14,10 @@ import (
|
||||||
|
|
||||||
func TestRSSJob_processItem(t *testing.T) {
|
func TestRSSJob_processItem(t *testing.T) {
|
||||||
now := time.Now()
|
now := time.Now()
|
||||||
|
nowMinusTime := time.Now().Add(time.Duration(-3000) * time.Second)
|
||||||
|
|
||||||
type fields struct {
|
type fields struct {
|
||||||
|
Feed *domain.Feed
|
||||||
Name string
|
Name string
|
||||||
IndexerIdentifier string
|
IndexerIdentifier string
|
||||||
Log zerolog.Logger
|
Log zerolog.Logger
|
||||||
|
@ -38,6 +40,9 @@ func TestRSSJob_processItem(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "no_baseurl",
|
name: "no_baseurl",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
|
Feed: &domain.Feed{
|
||||||
|
MaxAge: 3600,
|
||||||
|
},
|
||||||
Name: "test feed",
|
Name: "test feed",
|
||||||
IndexerIdentifier: "mock-feed",
|
IndexerIdentifier: "mock-feed",
|
||||||
Log: zerolog.Logger{},
|
Log: zerolog.Logger{},
|
||||||
|
@ -64,6 +69,9 @@ func TestRSSJob_processItem(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "with_baseurl",
|
name: "with_baseurl",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
|
Feed: &domain.Feed{
|
||||||
|
MaxAge: 3600,
|
||||||
|
},
|
||||||
Name: "test feed",
|
Name: "test feed",
|
||||||
IndexerIdentifier: "mock-feed",
|
IndexerIdentifier: "mock-feed",
|
||||||
Log: zerolog.Logger{},
|
Log: zerolog.Logger{},
|
||||||
|
@ -87,24 +95,124 @@ func TestRSSJob_processItem(t *testing.T) {
|
||||||
}},
|
}},
|
||||||
want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: "mock-feed", FilterName: "", Protocol: "torrent", Implementation: "RSS", Timestamp: now, GroupID: "", TorrentID: "", TorrentURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 0x0, Title: "Some Release Title", Category: "", Season: 0, Episode: 0, Year: 2022, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: "", Proper: false, Repack: false, Website: "", Artists: "", Type: "", LogScore: 0, IsScene: false, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)},
|
want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: "mock-feed", FilterName: "", Protocol: "torrent", Implementation: "RSS", Timestamp: now, GroupID: "", TorrentID: "", TorrentURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 0x0, Title: "Some Release Title", Category: "", Season: 0, Episode: 0, Year: 2022, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: "", Proper: false, Repack: false, Website: "", Artists: "", Type: "", LogScore: 0, IsScene: false, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "time_parse",
|
||||||
|
fields: fields{
|
||||||
|
Feed: &domain.Feed{
|
||||||
|
MaxAge: 360,
|
||||||
|
},
|
||||||
|
Name: "test feed",
|
||||||
|
IndexerIdentifier: "mock-feed",
|
||||||
|
Log: zerolog.Logger{},
|
||||||
|
URL: "https://fake-feed.com/rss",
|
||||||
|
Repo: nil,
|
||||||
|
ReleaseSvc: nil,
|
||||||
|
attempts: 0,
|
||||||
|
errors: nil,
|
||||||
|
JobID: 0,
|
||||||
|
},
|
||||||
|
args: args{item: &gofeed.Item{
|
||||||
|
Title: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
|
||||||
|
Description: `Category: Example
|
||||||
|
Size: 1.49 GB
|
||||||
|
Status: 27 seeders and 1 leechers
|
||||||
|
Speed: 772.16 kB/s
|
||||||
|
Added: 2022-09-29 16:06:08
|
||||||
|
`,
|
||||||
|
Link: "https://fake-feed.com/details.php?id=00000&hit=1",
|
||||||
|
GUID: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
|
||||||
|
//PublishedParsed: &nowMinusTime,
|
||||||
|
}},
|
||||||
|
want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: "mock-feed", FilterName: "", Protocol: "torrent", Implementation: "RSS", Timestamp: now, GroupID: "", TorrentID: "", TorrentURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 0x0, Title: "Some Release Title", Category: "", Season: 0, Episode: 0, Year: 2022, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: "", Proper: false, Repack: false, Website: "", Artists: "", Type: "", LogScore: 0, IsScene: false, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "time_parse",
|
||||||
|
fields: fields{
|
||||||
|
Feed: &domain.Feed{
|
||||||
|
MaxAge: 360,
|
||||||
|
},
|
||||||
|
Name: "test feed",
|
||||||
|
IndexerIdentifier: "mock-feed",
|
||||||
|
Log: zerolog.Logger{},
|
||||||
|
URL: "https://fake-feed.com/rss",
|
||||||
|
Repo: nil,
|
||||||
|
ReleaseSvc: nil,
|
||||||
|
attempts: 0,
|
||||||
|
errors: nil,
|
||||||
|
JobID: 0,
|
||||||
|
},
|
||||||
|
args: args{item: &gofeed.Item{
|
||||||
|
Title: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
|
||||||
|
Description: `Category: Example
|
||||||
|
Size: 1.49 GB
|
||||||
|
Status: 27 seeders and 1 leechers
|
||||||
|
Speed: 772.16 kB/s
|
||||||
|
Added: 2022-09-29 16:06:08
|
||||||
|
`,
|
||||||
|
Link: "https://fake-feed.com/details.php?id=00000&hit=1",
|
||||||
|
GUID: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
|
||||||
|
PublishedParsed: &nowMinusTime,
|
||||||
|
}},
|
||||||
|
want: nil,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
j := &RSSJob{
|
j := &RSSJob{
|
||||||
|
Feed: tt.fields.Feed,
|
||||||
Name: tt.fields.Name,
|
Name: tt.fields.Name,
|
||||||
IndexerIdentifier: tt.fields.IndexerIdentifier,
|
IndexerIdentifier: tt.fields.IndexerIdentifier,
|
||||||
Log: tt.fields.Log,
|
Log: tt.fields.Log,
|
||||||
URL: tt.fields.URL,
|
URL: tt.fields.URL,
|
||||||
Repo: tt.fields.Repo,
|
CacheRepo: tt.fields.Repo,
|
||||||
ReleaseSvc: tt.fields.ReleaseSvc,
|
ReleaseSvc: tt.fields.ReleaseSvc,
|
||||||
attempts: tt.fields.attempts,
|
attempts: tt.fields.attempts,
|
||||||
errors: tt.fields.errors,
|
errors: tt.fields.errors,
|
||||||
JobID: tt.fields.JobID,
|
JobID: tt.fields.JobID,
|
||||||
}
|
}
|
||||||
got := j.processItem(tt.args.item)
|
got := j.processItem(tt.args.item)
|
||||||
got.Timestamp = now // override to match
|
if got != nil {
|
||||||
|
got.Timestamp = now // override to match
|
||||||
|
}
|
||||||
|
|
||||||
assert.Equal(t, tt.want, got)
|
assert.Equal(t, tt.want, got)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Test_isMaxAge(t *testing.T) {
|
||||||
|
type args struct {
|
||||||
|
maxAge int
|
||||||
|
item time.Time
|
||||||
|
now time.Time
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "01",
|
||||||
|
args: args{
|
||||||
|
maxAge: 3600,
|
||||||
|
item: time.Now().Add(time.Duration(-500) * time.Second),
|
||||||
|
now: time.Now(),
|
||||||
|
},
|
||||||
|
want: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "02",
|
||||||
|
args: args{
|
||||||
|
maxAge: 3600,
|
||||||
|
item: time.Now().Add(time.Duration(-5000) * time.Second),
|
||||||
|
now: time.Now(),
|
||||||
|
},
|
||||||
|
want: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
assert.Equalf(t, tt.want, isNewerThanMaxAge(tt.args.maxAge, tt.args.item, tt.args.now), "isNewerThanMaxAge(%v, %v, %v)", tt.args.maxAge, tt.args.item, tt.args.now)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -2,6 +2,9 @@ package feed
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/autobrr/autobrr/internal/domain"
|
"github.com/autobrr/autobrr/internal/domain"
|
||||||
|
@ -12,6 +15,7 @@ import (
|
||||||
"github.com/autobrr/autobrr/pkg/torznab"
|
"github.com/autobrr/autobrr/pkg/torznab"
|
||||||
|
|
||||||
"github.com/dcarbone/zadapters/zstdlog"
|
"github.com/dcarbone/zadapters/zstdlog"
|
||||||
|
"github.com/mmcdole/gofeed"
|
||||||
"github.com/rs/zerolog"
|
"github.com/rs/zerolog"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -19,6 +23,7 @@ type Service interface {
|
||||||
FindByID(ctx context.Context, id int) (*domain.Feed, error)
|
FindByID(ctx context.Context, id int) (*domain.Feed, error)
|
||||||
FindByIndexerIdentifier(ctx context.Context, indexer string) (*domain.Feed, error)
|
FindByIndexerIdentifier(ctx context.Context, indexer string) (*domain.Feed, error)
|
||||||
Find(ctx context.Context) ([]domain.Feed, error)
|
Find(ctx context.Context) ([]domain.Feed, error)
|
||||||
|
GetCacheByID(ctx context.Context, bucket string) ([]domain.FeedCacheItem, error)
|
||||||
Store(ctx context.Context, feed *domain.Feed) error
|
Store(ctx context.Context, feed *domain.Feed) error
|
||||||
Update(ctx context.Context, feed *domain.Feed) error
|
Update(ctx context.Context, feed *domain.Feed) error
|
||||||
Test(ctx context.Context, feed *domain.Feed) error
|
Test(ctx context.Context, feed *domain.Feed) error
|
||||||
|
@ -29,6 +34,7 @@ type Service interface {
|
||||||
}
|
}
|
||||||
|
|
||||||
type feedInstance struct {
|
type feedInstance struct {
|
||||||
|
Feed *domain.Feed
|
||||||
Name string
|
Name string
|
||||||
IndexerIdentifier string
|
IndexerIdentifier string
|
||||||
URL string
|
URL string
|
||||||
|
@ -38,6 +44,16 @@ type feedInstance struct {
|
||||||
Timeout time.Duration
|
Timeout time.Duration
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type feedKey struct {
|
||||||
|
id int
|
||||||
|
indexer string
|
||||||
|
name string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (k feedKey) ToString() string {
|
||||||
|
return fmt.Sprintf("%v+%v+%v", k.id, k.indexer, k.name)
|
||||||
|
}
|
||||||
|
|
||||||
type service struct {
|
type service struct {
|
||||||
log zerolog.Logger
|
log zerolog.Logger
|
||||||
jobs map[string]int
|
jobs map[string]int
|
||||||
|
@ -60,82 +76,67 @@ func NewService(log logger.Logger, repo domain.FeedRepo, cacheRepo domain.FeedCa
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *service) FindByID(ctx context.Context, id int) (*domain.Feed, error) {
|
func (s *service) FindByID(ctx context.Context, id int) (*domain.Feed, error) {
|
||||||
|
return s.repo.FindByID(ctx, id)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *service) FindByIndexerIdentifier(ctx context.Context, indexer string) (*domain.Feed, error) {
|
||||||
|
return s.repo.FindByIndexerIdentifier(ctx, indexer)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *service) Find(ctx context.Context) ([]domain.Feed, error) {
|
||||||
|
return s.repo.Find(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *service) GetCacheByID(ctx context.Context, bucket string) ([]domain.FeedCacheItem, error) {
|
||||||
|
id, _ := strconv.Atoi(bucket)
|
||||||
|
|
||||||
feed, err := s.repo.FindByID(ctx, id)
|
feed, err := s.repo.FindByID(ctx, id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
s.log.Error().Err(err).Msgf("could not find feed by id: %v", id)
|
s.log.Error().Err(err).Msgf("could not find feed by id: %v", id)
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return feed, nil
|
data, err := s.cacheRepo.GetByBucket(ctx, feed.Name)
|
||||||
}
|
|
||||||
|
|
||||||
func (s *service) FindByIndexerIdentifier(ctx context.Context, indexer string) (*domain.Feed, error) {
|
|
||||||
feed, err := s.repo.FindByIndexerIdentifier(ctx, indexer)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
s.log.Error().Err(err).Msgf("could not find feed by indexer: %v", indexer)
|
s.log.Error().Err(err).Msg("could not get feed cache")
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return feed, nil
|
return data, err
|
||||||
}
|
|
||||||
|
|
||||||
func (s *service) Find(ctx context.Context) ([]domain.Feed, error) {
|
|
||||||
feeds, err := s.repo.Find(ctx)
|
|
||||||
if err != nil {
|
|
||||||
s.log.Error().Err(err).Msg("could not find feeds")
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return feeds, err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *service) Store(ctx context.Context, feed *domain.Feed) error {
|
func (s *service) Store(ctx context.Context, feed *domain.Feed) error {
|
||||||
if err := s.repo.Store(ctx, feed); err != nil {
|
return s.repo.Store(ctx, feed)
|
||||||
s.log.Error().Err(err).Msgf("could not store feed: %+v", feed)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
s.log.Debug().Msgf("successfully added feed: %+v", feed)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *service) Update(ctx context.Context, feed *domain.Feed) error {
|
func (s *service) Update(ctx context.Context, feed *domain.Feed) error {
|
||||||
if err := s.update(ctx, feed); err != nil {
|
return s.update(ctx, feed)
|
||||||
s.log.Error().Err(err).Msgf("could not update feed: %+v", feed)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
s.log.Debug().Msgf("successfully updated feed: %+v", feed)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *service) Delete(ctx context.Context, id int) error {
|
func (s *service) Delete(ctx context.Context, id int) error {
|
||||||
if err := s.delete(ctx, id); err != nil {
|
return s.delete(ctx, id)
|
||||||
s.log.Error().Err(err).Msgf("could not delete feed by id: %v", id)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *service) ToggleEnabled(ctx context.Context, id int, enabled bool) error {
|
func (s *service) ToggleEnabled(ctx context.Context, id int, enabled bool) error {
|
||||||
if err := s.toggleEnabled(ctx, id, enabled); err != nil {
|
return s.toggleEnabled(ctx, id, enabled)
|
||||||
s.log.Error().Err(err).Msgf("could not toggle feed by id: %v", id)
|
}
|
||||||
return err
|
|
||||||
}
|
func (s *service) Test(ctx context.Context, feed *domain.Feed) error {
|
||||||
return nil
|
return s.test(ctx, feed)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *service) Start() error {
|
||||||
|
return s.start()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *service) update(ctx context.Context, feed *domain.Feed) error {
|
func (s *service) update(ctx context.Context, feed *domain.Feed) error {
|
||||||
if err := s.repo.Update(ctx, feed); err != nil {
|
if err := s.repo.Update(ctx, feed); err != nil {
|
||||||
s.log.Error().Err(err).Msg("feed.Update: error updating feed")
|
s.log.Error().Err(err).Msg("error updating feed")
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := s.restartJob(feed); err != nil {
|
if err := s.restartJob(feed); err != nil {
|
||||||
s.log.Error().Err(err).Msg("feed.Update: error restarting feed")
|
s.log.Error().Err(err).Msg("error restarting feed")
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -149,17 +150,13 @@ func (s *service) delete(ctx context.Context, id int) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
switch f.Type {
|
s.log.Debug().Msgf("stopping and removing feed: %v", f.Name)
|
||||||
case string(domain.FeedTypeTorznab):
|
|
||||||
if err := s.stopTorznabJob(f.Indexer); err != nil {
|
identifierKey := feedKey{f.ID, f.Indexer, f.Name}.ToString()
|
||||||
s.log.Error().Err(err).Msg("error stopping torznab job")
|
|
||||||
return err
|
if err := s.stopFeedJob(identifierKey); err != nil {
|
||||||
}
|
s.log.Error().Err(err).Msg("error stopping rss job")
|
||||||
case string(domain.FeedTypeRSS):
|
return err
|
||||||
if err := s.stopRSSJob(f.Indexer); err != nil {
|
|
||||||
s.log.Error().Err(err).Msg("error stopping rss job")
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := s.repo.Delete(ctx, id); err != nil {
|
if err := s.repo.Delete(ctx, id); err != nil {
|
||||||
|
@ -172,83 +169,112 @@ func (s *service) delete(ctx context.Context, id int) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
s.log.Debug().Msgf("feed.Delete: stopping and removing feed: %v", f.Name)
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *service) toggleEnabled(ctx context.Context, id int, enabled bool) error {
|
func (s *service) toggleEnabled(ctx context.Context, id int, enabled bool) error {
|
||||||
f, err := s.repo.FindByID(ctx, id)
|
f, err := s.repo.FindByID(ctx, id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
s.log.Error().Err(err).Msg("feed.ToggleEnabled: error finding feed")
|
s.log.Error().Err(err).Msg("error finding feed")
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := s.repo.ToggleEnabled(ctx, id, enabled); err != nil {
|
if err := s.repo.ToggleEnabled(ctx, id, enabled); err != nil {
|
||||||
s.log.Error().Err(err).Msg("feed.ToggleEnabled: error toggle enabled")
|
s.log.Error().Err(err).Msg("error feed toggle enabled")
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if f.Enabled && !enabled {
|
if f.Enabled != enabled {
|
||||||
switch f.Type {
|
if enabled {
|
||||||
case string(domain.FeedTypeTorznab):
|
// override enabled
|
||||||
if err := s.stopTorznabJob(f.Indexer); err != nil {
|
f.Enabled = true
|
||||||
s.log.Error().Err(err).Msg("feed.ToggleEnabled: error stopping torznab job")
|
|
||||||
|
if err := s.startJob(f); err != nil {
|
||||||
|
s.log.Error().Err(err).Msg("error starting feed job")
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
case string(domain.FeedTypeRSS):
|
|
||||||
if err := s.stopRSSJob(f.Indexer); err != nil {
|
s.log.Debug().Msgf("feed started: %v", f.Name)
|
||||||
s.log.Error().Err(err).Msg("feed.ToggleEnabled: error stopping rss job")
|
|
||||||
|
return nil
|
||||||
|
} else {
|
||||||
|
s.log.Debug().Msgf("stopping feed: %v", f.Name)
|
||||||
|
|
||||||
|
identifierKey := feedKey{f.ID, f.Indexer, f.Name}.ToString()
|
||||||
|
|
||||||
|
if err := s.stopFeedJob(identifierKey); err != nil {
|
||||||
|
s.log.Error().Err(err).Msg("error stopping feed job")
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
s.log.Debug().Msgf("feed stopped: %v", f.Name)
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
s.log.Debug().Msgf("feed.ToggleEnabled: stopping feed: %v", f.Name)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := s.startJob(*f); err != nil {
|
|
||||||
s.log.Error().Err(err).Msg("feed.ToggleEnabled: error starting torznab job")
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
s.log.Debug().Msgf("feed.ToggleEnabled: started feed: %v", f.Name)
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *service) Test(ctx context.Context, feed *domain.Feed) error {
|
func (s *service) test(ctx context.Context, feed *domain.Feed) error {
|
||||||
|
// create sub logger
|
||||||
subLogger := zstdlog.NewStdLoggerWithLevel(s.log.With().Logger(), zerolog.DebugLevel)
|
subLogger := zstdlog.NewStdLoggerWithLevel(s.log.With().Logger(), zerolog.DebugLevel)
|
||||||
|
|
||||||
// implementation == TORZNAB
|
// test feeds
|
||||||
if feed.Type == string(domain.FeedTypeTorznab) {
|
if feed.Type == string(domain.FeedTypeTorznab) {
|
||||||
// setup torznab Client
|
if err := s.testTorznab(feed, subLogger); err != nil {
|
||||||
c := torznab.NewClient(torznab.Config{Host: feed.URL, ApiKey: feed.ApiKey, Log: subLogger})
|
return err
|
||||||
|
}
|
||||||
if _, err := c.FetchFeed(); err != nil {
|
} else if feed.Type == string(domain.FeedTypeRSS) {
|
||||||
s.log.Error().Err(err).Msg("error getting torznab feed")
|
if err := s.testRSS(ctx, feed); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
s.log.Debug().Msgf("test successful - connected to feed: %+v", feed.URL)
|
s.log.Info().Msgf("feed test successful - connected to feed: %v", feed.URL)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *service) Start() error {
|
func (s *service) testRSS(ctx context.Context, feed *domain.Feed) error {
|
||||||
// get all torznab indexer definitions
|
f, err := gofeed.NewParser().ParseURLWithContext(feed.URL, ctx)
|
||||||
feeds, err := s.repo.Find(context.TODO())
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
s.log.Error().Err(err).Msg("feed.Start: error finding feeds")
|
s.log.Error().Err(err).Msgf("error fetching rss feed items")
|
||||||
|
return errors.Wrap(err, "error fetching rss feed items")
|
||||||
|
}
|
||||||
|
|
||||||
|
s.log.Info().Msgf("refreshing rss feed: %v, found (%d) items", feed.Name, len(f.Items))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *service) testTorznab(feed *domain.Feed, subLogger *log.Logger) error {
|
||||||
|
// setup torznab Client
|
||||||
|
c := torznab.NewClient(torznab.Config{Host: feed.URL, ApiKey: feed.ApiKey, Log: subLogger})
|
||||||
|
|
||||||
|
items, err := c.FetchFeed()
|
||||||
|
if err != nil {
|
||||||
|
s.log.Error().Err(err).Msg("error getting torznab feed")
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, i := range feeds {
|
s.log.Info().Msgf("refreshing torznab feed: %v, found (%d) items", feed.Name, len(items))
|
||||||
if err := s.startJob(i); err != nil {
|
|
||||||
s.log.Error().Err(err).Msg("feed.Start: failed to initialize torznab job")
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *service) start() error {
|
||||||
|
// get all torznab indexer definitions
|
||||||
|
feeds, err := s.repo.Find(context.TODO())
|
||||||
|
if err != nil {
|
||||||
|
s.log.Error().Err(err).Msg("error finding feeds")
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, feed := range feeds {
|
||||||
|
feed := feed
|
||||||
|
if err := s.startJob(&feed); err != nil {
|
||||||
|
s.log.Error().Err(err).Msg("failed to initialize torznab job")
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -257,27 +283,29 @@ func (s *service) Start() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *service) restartJob(f *domain.Feed) error {
|
func (s *service) restartJob(f *domain.Feed) error {
|
||||||
// stop feed
|
s.log.Debug().Msgf("stopping feed: %v", f.Name)
|
||||||
if err := s.stopTorznabJob(f.Indexer); err != nil {
|
|
||||||
s.log.Error().Err(err).Msg("feed.restartJob: error stopping torznab job")
|
identifierKey := feedKey{f.ID, f.Indexer, f.Name}.ToString()
|
||||||
|
|
||||||
|
// stop feed job
|
||||||
|
if err := s.stopFeedJob(identifierKey); err != nil {
|
||||||
|
s.log.Error().Err(err).Msg("error stopping feed job")
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
s.log.Debug().Msgf("feed.restartJob: stopping feed: %v", f.Name)
|
|
||||||
|
|
||||||
if f.Enabled {
|
if f.Enabled {
|
||||||
if err := s.startJob(*f); err != nil {
|
if err := s.startJob(f); err != nil {
|
||||||
s.log.Error().Err(err).Msg("feed.restartJob: error starting torznab job")
|
s.log.Error().Err(err).Msg("error starting feed job")
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
s.log.Debug().Msgf("feed.restartJob: restarted feed: %v", f.Name)
|
s.log.Debug().Msgf("restarted feed: %v", f.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *service) startJob(f domain.Feed) error {
|
func (s *service) startJob(f *domain.Feed) error {
|
||||||
// get all torznab indexer definitions
|
// get all torznab indexer definitions
|
||||||
if !f.Enabled {
|
if !f.Enabled {
|
||||||
return nil
|
return nil
|
||||||
|
@ -285,11 +313,12 @@ func (s *service) startJob(f domain.Feed) error {
|
||||||
|
|
||||||
// get torznab_url from settings
|
// get torznab_url from settings
|
||||||
if f.URL == "" {
|
if f.URL == "" {
|
||||||
return nil
|
return errors.New("no URL provided for feed: %v", f.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
// cron schedule to run every X minutes
|
// cron schedule to run every X minutes
|
||||||
fi := feedInstance{
|
fi := feedInstance{
|
||||||
|
Feed: f,
|
||||||
Name: f.Name,
|
Name: f.Name,
|
||||||
IndexerIdentifier: f.Indexer,
|
IndexerIdentifier: f.Indexer,
|
||||||
Implementation: f.Type,
|
Implementation: f.Type,
|
||||||
|
@ -302,12 +331,12 @@ func (s *service) startJob(f domain.Feed) error {
|
||||||
switch fi.Implementation {
|
switch fi.Implementation {
|
||||||
case string(domain.FeedTypeTorznab):
|
case string(domain.FeedTypeTorznab):
|
||||||
if err := s.addTorznabJob(fi); err != nil {
|
if err := s.addTorznabJob(fi); err != nil {
|
||||||
s.log.Error().Err(err).Msg("feed.startJob: failed to initialize torznab feed")
|
s.log.Error().Err(err).Msg("failed to initialize torznab feed")
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
case string(domain.FeedTypeRSS):
|
case string(domain.FeedTypeRSS):
|
||||||
if err := s.addRSSJob(fi); err != nil {
|
if err := s.addRSSJob(fi); err != nil {
|
||||||
s.log.Error().Err(err).Msg("feed.startJob: failed to initialize rss feed")
|
s.log.Error().Err(err).Msg("failed to initialize rss feed")
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -319,9 +348,10 @@ func (s *service) addTorznabJob(f feedInstance) error {
|
||||||
if f.URL == "" {
|
if f.URL == "" {
|
||||||
return errors.New("torznab feed requires URL")
|
return errors.New("torznab feed requires URL")
|
||||||
}
|
}
|
||||||
if f.CronSchedule < time.Duration(5*time.Minute) {
|
|
||||||
f.CronSchedule = time.Duration(15 * time.Minute)
|
//if f.CronSchedule < 5*time.Minute {
|
||||||
}
|
// f.CronSchedule = 15 * time.Minute
|
||||||
|
//}
|
||||||
|
|
||||||
// setup logger
|
// setup logger
|
||||||
l := s.log.With().Str("feed", f.Name).Logger()
|
l := s.log.With().Str("feed", f.Name).Logger()
|
||||||
|
@ -332,28 +362,19 @@ func (s *service) addTorznabJob(f feedInstance) error {
|
||||||
// create job
|
// create job
|
||||||
job := NewTorznabJob(f.Name, f.IndexerIdentifier, l, f.URL, c, s.cacheRepo, s.releaseSvc)
|
job := NewTorznabJob(f.Name, f.IndexerIdentifier, l, f.URL, c, s.cacheRepo, s.releaseSvc)
|
||||||
|
|
||||||
|
identifierKey := feedKey{f.Feed.ID, f.Feed.Indexer, f.Feed.Name}.ToString()
|
||||||
|
|
||||||
// schedule job
|
// schedule job
|
||||||
id, err := s.scheduler.AddJob(job, f.CronSchedule, f.IndexerIdentifier)
|
id, err := s.scheduler.AddJob(job, f.CronSchedule, identifierKey)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "feed.AddTorznabJob: add job failed")
|
return errors.Wrap(err, "feed.AddTorznabJob: add job failed")
|
||||||
}
|
}
|
||||||
job.JobID = id
|
job.JobID = id
|
||||||
|
|
||||||
// add to job map
|
// add to job map
|
||||||
s.jobs[f.IndexerIdentifier] = id
|
s.jobs[identifierKey] = id
|
||||||
|
|
||||||
s.log.Debug().Msgf("feed.AddTorznabJob: %v", f.Name)
|
s.log.Debug().Msgf("add torznab job: %v", f.Name)
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *service) stopTorznabJob(indexer string) error {
|
|
||||||
// remove job from scheduler
|
|
||||||
if err := s.scheduler.RemoveJobByIdentifier(indexer); err != nil {
|
|
||||||
return errors.Wrap(err, "feed.stopTorznabJob: stop job failed")
|
|
||||||
}
|
|
||||||
|
|
||||||
s.log.Debug().Msgf("feed.stopTorznabJob: %v", indexer)
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -362,38 +383,41 @@ func (s *service) addRSSJob(f feedInstance) error {
|
||||||
if f.URL == "" {
|
if f.URL == "" {
|
||||||
return errors.New("rss feed requires URL")
|
return errors.New("rss feed requires URL")
|
||||||
}
|
}
|
||||||
if f.CronSchedule < time.Duration(5*time.Minute) {
|
|
||||||
f.CronSchedule = time.Duration(15 * time.Minute)
|
//if f.CronSchedule < time.Duration(5*time.Minute) {
|
||||||
}
|
// f.CronSchedule = time.Duration(15 * time.Minute)
|
||||||
|
//}
|
||||||
|
|
||||||
// setup logger
|
// setup logger
|
||||||
l := s.log.With().Str("feed", f.Name).Logger()
|
l := s.log.With().Str("feed", f.Name).Logger()
|
||||||
|
|
||||||
// create job
|
// create job
|
||||||
job := NewRSSJob(f.Name, f.IndexerIdentifier, l, f.URL, s.cacheRepo, s.releaseSvc, f.Timeout)
|
job := NewRSSJob(f.Feed, f.Name, f.IndexerIdentifier, l, f.URL, s.repo, s.cacheRepo, s.releaseSvc, f.Timeout)
|
||||||
|
|
||||||
|
identifierKey := feedKey{f.Feed.ID, f.Feed.Indexer, f.Feed.Name}.ToString()
|
||||||
|
|
||||||
// schedule job
|
// schedule job
|
||||||
id, err := s.scheduler.AddJob(job, f.CronSchedule, f.IndexerIdentifier)
|
id, err := s.scheduler.AddJob(job, f.CronSchedule, identifierKey)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "feed.AddRSSJob: add job failed")
|
return errors.Wrap(err, "feed.AddRSSJob: add job failed")
|
||||||
}
|
}
|
||||||
job.JobID = id
|
job.JobID = id
|
||||||
|
|
||||||
// add to job map
|
// add to job map
|
||||||
s.jobs[f.IndexerIdentifier] = id
|
s.jobs[identifierKey] = id
|
||||||
|
|
||||||
s.log.Debug().Msgf("feed.AddRSSJob: %v", f.Name)
|
s.log.Debug().Msgf("add rss job: %v", f.Name)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *service) stopRSSJob(indexer string) error {
|
func (s *service) stopFeedJob(indexer string) error {
|
||||||
// remove job from scheduler
|
// remove job from scheduler
|
||||||
if err := s.scheduler.RemoveJobByIdentifier(indexer); err != nil {
|
if err := s.scheduler.RemoveJobByIdentifier(indexer); err != nil {
|
||||||
return errors.Wrap(err, "feed.stopRSSJob: stop job failed")
|
return errors.Wrap(err, "stop job failed")
|
||||||
}
|
}
|
||||||
|
|
||||||
s.log.Debug().Msgf("feed.stopRSSJob: %v", indexer)
|
s.log.Debug().Msgf("stop feed job: %v", indexer)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -80,7 +80,7 @@ func (j *TorznabJob) process() error {
|
||||||
|
|
||||||
rls.ParseString(item.Title)
|
rls.ParseString(item.Title)
|
||||||
|
|
||||||
if parseFreeleech(item) {
|
if parseFreeleechTorznab(item) {
|
||||||
rls.Freeleech = true
|
rls.Freeleech = true
|
||||||
rls.Bonus = []string{"Freeleech"}
|
rls.Bonus = []string{"Freeleech"}
|
||||||
}
|
}
|
||||||
|
@ -100,7 +100,7 @@ func (j *TorznabJob) process() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseFreeleech(item torznab.FeedItem) bool {
|
func parseFreeleechTorznab(item torznab.FeedItem) bool {
|
||||||
for _, attr := range item.Attributes {
|
for _, attr := range item.Attributes {
|
||||||
if attr.Name == "downloadvolumefactor" {
|
if attr.Name == "downloadvolumefactor" {
|
||||||
if attr.Value == "0" {
|
if attr.Value == "0" {
|
||||||
|
|
|
@ -200,6 +200,7 @@ func (s *service) ProcessMultiple(releases []*domain.Release) {
|
||||||
s.log.Debug().Msgf("process (%v) new releases from feed", len(releases))
|
s.log.Debug().Msgf("process (%v) new releases from feed", len(releases))
|
||||||
|
|
||||||
for _, rls := range releases {
|
for _, rls := range releases {
|
||||||
|
rls := rls
|
||||||
if rls == nil {
|
if rls == nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
|
@ -180,7 +180,7 @@ func TestClient_GetCaps(t *testing.T) {
|
||||||
Name: "HD",
|
Name: "HD",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
ID: "5070",
|
ID: 5070,
|
||||||
Name: "Anime",
|
Name: "Anime",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
@ -9,6 +9,7 @@ import { componentMapType } from "./DownloadClientForms";
|
||||||
import { sleep } from "../../utils";
|
import { sleep } from "../../utils";
|
||||||
import { useState } from "react";
|
import { useState } from "react";
|
||||||
import { ImplementationBadges } from "../../screens/settings/Indexer";
|
import { ImplementationBadges } from "../../screens/settings/Indexer";
|
||||||
|
import { useFormikContext } from "formik";
|
||||||
|
|
||||||
interface UpdateProps {
|
interface UpdateProps {
|
||||||
isOpen: boolean;
|
isOpen: boolean;
|
||||||
|
@ -24,8 +25,10 @@ interface InitialValues {
|
||||||
name: string;
|
name: string;
|
||||||
url: string;
|
url: string;
|
||||||
api_key: string;
|
api_key: string;
|
||||||
|
cookie: string;
|
||||||
interval: number;
|
interval: number;
|
||||||
timeout: number;
|
timeout: number;
|
||||||
|
max_age: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function FeedUpdateForm({ isOpen, toggle, feed }: UpdateProps) {
|
export function FeedUpdateForm({ isOpen, toggle, feed }: UpdateProps) {
|
||||||
|
@ -104,8 +107,10 @@ export function FeedUpdateForm({ isOpen, toggle, feed }: UpdateProps) {
|
||||||
name: feed.name,
|
name: feed.name,
|
||||||
url: feed.url,
|
url: feed.url,
|
||||||
api_key: feed.api_key,
|
api_key: feed.api_key,
|
||||||
|
cookie: feed.cookie || "",
|
||||||
interval: feed.interval,
|
interval: feed.interval,
|
||||||
timeout: feed.timeout
|
timeout: feed.timeout,
|
||||||
|
max_age: feed.max_age
|
||||||
};
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
@ -153,7 +158,26 @@ export function FeedUpdateForm({ isOpen, toggle, feed }: UpdateProps) {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function WarningLabel() {
|
||||||
|
return (
|
||||||
|
<div className="px-4 py-1">
|
||||||
|
<span className="w-full block px-2 py-2 bg-red-300 dark:bg-red-400 text-red-900 dark:text-red-900 text-sm rounded">
|
||||||
|
<span className="font-semibold">
|
||||||
|
Warning: Indexers might ban you for too low interval!
|
||||||
|
</span>
|
||||||
|
<span className="ml-1">
|
||||||
|
Read the indexer rules.
|
||||||
|
</span>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
function FormFieldsTorznab() {
|
function FormFieldsTorznab() {
|
||||||
|
const {
|
||||||
|
values: { interval }
|
||||||
|
} = useFormikContext<InitialValues>();
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="border-t border-gray-200 dark:border-gray-700 py-5">
|
<div className="border-t border-gray-200 dark:border-gray-700 py-5">
|
||||||
<TextFieldWide
|
<TextFieldWide
|
||||||
|
@ -164,14 +188,20 @@ function FormFieldsTorznab() {
|
||||||
|
|
||||||
<PasswordFieldWide name="api_key" label="API key" />
|
<PasswordFieldWide name="api_key" label="API key" />
|
||||||
|
|
||||||
|
{interval < 15 && <WarningLabel />}
|
||||||
<NumberFieldWide name="interval" label="Refresh interval" help="Minutes. Recommended 15-30. Too low and risk ban."/>
|
<NumberFieldWide name="interval" label="Refresh interval" help="Minutes. Recommended 15-30. Too low and risk ban."/>
|
||||||
|
|
||||||
<NumberFieldWide name="timeout" label="Refresh timeout" help="Seconds to wait before cancelling refresh."/>
|
<NumberFieldWide name="timeout" label="Refresh timeout" help="Seconds to wait before cancelling refresh."/>
|
||||||
|
<NumberFieldWide name="max_age" label="Max age" help="Seconds. Will not grab older than this value."/>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
function FormFieldsRSS() {
|
function FormFieldsRSS() {
|
||||||
|
const {
|
||||||
|
values: { interval }
|
||||||
|
} = useFormikContext<InitialValues>();
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="border-t border-gray-200 dark:border-gray-700 py-5">
|
<div className="border-t border-gray-200 dark:border-gray-700 py-5">
|
||||||
<TextFieldWide
|
<TextFieldWide
|
||||||
|
@ -180,8 +210,12 @@ function FormFieldsRSS() {
|
||||||
help="RSS url"
|
help="RSS url"
|
||||||
/>
|
/>
|
||||||
|
|
||||||
|
{interval < 15 && <WarningLabel />}
|
||||||
<NumberFieldWide name="interval" label="Refresh interval" help="Minutes. Recommended 15-30. Too low and risk ban."/>
|
<NumberFieldWide name="interval" label="Refresh interval" help="Minutes. Recommended 15-30. Too low and risk ban."/>
|
||||||
<NumberFieldWide name="timeout" label="Refresh timeout" help="Seconds to wait before cancelling refresh."/>
|
<NumberFieldWide name="timeout" label="Refresh timeout" help="Seconds to wait before cancelling refresh."/>
|
||||||
|
<NumberFieldWide name="max_age" label="Max age" help="Seconds. Will not grab older than this value."/>
|
||||||
|
|
||||||
|
<PasswordFieldWide name="cookie" label="Cookie" help="Not commonly used" />
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ import { useMutation, useQuery, useQueryClient } from "react-query";
|
||||||
import { APIClient } from "../../api/APIClient";
|
import { APIClient } from "../../api/APIClient";
|
||||||
import { Menu, Switch, Transition } from "@headlessui/react";
|
import { Menu, Switch, Transition } from "@headlessui/react";
|
||||||
|
|
||||||
import { classNames } from "../../utils";
|
import { classNames, IsEmptyDate, simplifyDate } from "../../utils";
|
||||||
import { Fragment, useRef, useState } from "react";
|
import { Fragment, useRef, useState } from "react";
|
||||||
import { toast } from "react-hot-toast";
|
import { toast } from "react-hot-toast";
|
||||||
import Toast from "../../components/notifications/Toast";
|
import Toast from "../../components/notifications/Toast";
|
||||||
|
@ -44,10 +44,10 @@ function FeedSettings() {
|
||||||
className="col-span-4 px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">Name
|
className="col-span-4 px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">Name
|
||||||
</div>
|
</div>
|
||||||
<div
|
<div
|
||||||
className="col-span-3 px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">Indexer
|
className="col-span-2 px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">Type
|
||||||
</div>
|
</div>
|
||||||
<div
|
<div
|
||||||
className="col-span-3 px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">Type
|
className="col-span-3 px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">Last run
|
||||||
</div>
|
</div>
|
||||||
{/*<div className="col-span-4 px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">Events</div>*/}
|
{/*<div className="col-span-4 px-6 py-3 text-left text-xs font-medium text-gray-500 dark:text-gray-400 uppercase tracking-wider">Events</div>*/}
|
||||||
</li>
|
</li>
|
||||||
|
@ -115,15 +115,20 @@ function ListItem({ feed }: ListItemProps) {
|
||||||
/>
|
/>
|
||||||
</Switch>
|
</Switch>
|
||||||
</div>
|
</div>
|
||||||
<div className="col-span-4 flex items-center sm:px-6 text-sm font-medium text-gray-900 dark:text-white">
|
<div className="col-span-4 flex flex-col sm:px-6 text-sm font-medium text-gray-900 dark:text-white">
|
||||||
{feed.name}
|
<span>{feed.name}</span>
|
||||||
</div>
|
<span className="text-gray-900 dark:text-gray-500 text-xs">
|
||||||
<div className="col-span-3 flex items-center sm:px-6 text-sm font-medium text-gray-900 dark:text-gray-500">
|
{feed.indexer}
|
||||||
{feed.indexer}
|
</span>
|
||||||
</div>
|
</div>
|
||||||
<div className="col-span-2 flex items-center sm:px-6">
|
<div className="col-span-2 flex items-center sm:px-6">
|
||||||
{ImplementationBadges[feed.type.toLowerCase()]}
|
{ImplementationBadges[feed.type.toLowerCase()]}
|
||||||
</div>
|
</div>
|
||||||
|
<div className="col-span-3 flex items-center sm:px-6 text-sm font-medium text-gray-900 dark:text-gray-500">
|
||||||
|
<span title={simplifyDate(feed.last_run)}>
|
||||||
|
{IsEmptyDate(feed.last_run)}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
<div className="col-span-1 flex justify-center items-center sm:px-6">
|
<div className="col-span-1 flex justify-center items-center sm:px-6">
|
||||||
<FeedItemDropdown
|
<FeedItemDropdown
|
||||||
feed={feed}
|
feed={feed}
|
||||||
|
|
4
web/src/types/Feed.d.ts
vendored
4
web/src/types/Feed.d.ts
vendored
|
@ -7,7 +7,11 @@ interface Feed {
|
||||||
url: string;
|
url: string;
|
||||||
interval: number;
|
interval: number;
|
||||||
timeout: number;
|
timeout: number;
|
||||||
|
max_age: number;
|
||||||
api_key: string;
|
api_key: string;
|
||||||
|
cookie: string;
|
||||||
|
last_run: string;
|
||||||
|
last_run_data: string;
|
||||||
created_at: Date;
|
created_at: Date;
|
||||||
updated_at: Date;
|
updated_at: Date;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue