mirror of
https://github.com/idanoo/autobrr
synced 2025-07-23 08:49:13 +00:00
feat(feeds): improve RSS (#502)
* feat(feeds): improve rss * save last_run time * remove interval check * refactor feed job keys * add rss test * add max_age check * feat(feeds): rss basic freeleech parsing * feat(feeds): rss cookie support * feat(feeds): db get max_age * feat(feeds): update log messages * feat(feeds): pass cookie to release for download * feat(feeds): improve size parsing * feat(feeds): improve datetime check
This commit is contained in:
parent
ac988f28f4
commit
e2bb14afa4
15 changed files with 741 additions and 209 deletions
|
@ -35,7 +35,9 @@ func (r *FeedRepo) FindByID(ctx context.Context, id int) (*domain.Feed, error) {
|
|||
"url",
|
||||
"interval",
|
||||
"timeout",
|
||||
"max_age",
|
||||
"api_key",
|
||||
"cookie",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
).
|
||||
|
@ -54,14 +56,15 @@ func (r *FeedRepo) FindByID(ctx context.Context, id int) (*domain.Feed, error) {
|
|||
|
||||
var f domain.Feed
|
||||
|
||||
var apiKey sql.NullString
|
||||
var apiKey, cookie sql.NullString
|
||||
|
||||
if err := row.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &apiKey, &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||
if err := row.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &f.MaxAge, &apiKey, &cookie, &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||
return nil, errors.Wrap(err, "error scanning row")
|
||||
|
||||
}
|
||||
|
||||
f.ApiKey = apiKey.String
|
||||
f.Cookie = cookie.String
|
||||
|
||||
return &f, nil
|
||||
}
|
||||
|
@ -77,7 +80,9 @@ func (r *FeedRepo) FindByIndexerIdentifier(ctx context.Context, indexer string)
|
|||
"url",
|
||||
"interval",
|
||||
"timeout",
|
||||
"max_age",
|
||||
"api_key",
|
||||
"cookie",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
).
|
||||
|
@ -96,14 +101,15 @@ func (r *FeedRepo) FindByIndexerIdentifier(ctx context.Context, indexer string)
|
|||
|
||||
var f domain.Feed
|
||||
|
||||
var apiKey sql.NullString
|
||||
var apiKey, cookie sql.NullString
|
||||
|
||||
if err := row.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &apiKey, &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||
if err := row.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &f.MaxAge, &apiKey, &cookie, &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||
return nil, errors.Wrap(err, "error scanning row")
|
||||
|
||||
}
|
||||
|
||||
f.ApiKey = apiKey.String
|
||||
f.Cookie = cookie.String
|
||||
|
||||
return &f, nil
|
||||
}
|
||||
|
@ -119,7 +125,11 @@ func (r *FeedRepo) Find(ctx context.Context) ([]domain.Feed, error) {
|
|||
"url",
|
||||
"interval",
|
||||
"timeout",
|
||||
"max_age",
|
||||
"api_key",
|
||||
"cookie",
|
||||
"last_run",
|
||||
"last_run_data",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
).
|
||||
|
@ -142,14 +152,17 @@ func (r *FeedRepo) Find(ctx context.Context) ([]domain.Feed, error) {
|
|||
for rows.Next() {
|
||||
var f domain.Feed
|
||||
|
||||
var apiKey sql.NullString
|
||||
var apiKey, cookie, lastRunData sql.NullString
|
||||
var lastRun sql.NullTime
|
||||
|
||||
if err := rows.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &apiKey, &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||
if err := rows.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &f.MaxAge, &apiKey, &cookie, &lastRun, &lastRunData, &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||
return nil, errors.Wrap(err, "error scanning row")
|
||||
|
||||
}
|
||||
|
||||
f.LastRun = lastRun.Time
|
||||
f.LastRunData = lastRunData.String
|
||||
f.ApiKey = apiKey.String
|
||||
f.Cookie = cookie.String
|
||||
|
||||
feeds = append(feeds, f)
|
||||
}
|
||||
|
@ -205,7 +218,10 @@ func (r *FeedRepo) Update(ctx context.Context, feed *domain.Feed) error {
|
|||
Set("url", feed.URL).
|
||||
Set("interval", feed.Interval).
|
||||
Set("timeout", feed.Timeout).
|
||||
Set("max_age", feed.MaxAge).
|
||||
Set("api_key", feed.ApiKey).
|
||||
Set("cookie", feed.Cookie).
|
||||
Set("updated_at", sq.Expr("CURRENT_TIMESTAMP")).
|
||||
Where("id = ?", feed.ID)
|
||||
|
||||
query, args, err := queryBuilder.ToSql()
|
||||
|
@ -221,6 +237,45 @@ func (r *FeedRepo) Update(ctx context.Context, feed *domain.Feed) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (r *FeedRepo) UpdateLastRun(ctx context.Context, feedID int) error {
|
||||
queryBuilder := r.db.squirrel.
|
||||
Update("feed").
|
||||
Set("last_run", sq.Expr("CURRENT_TIMESTAMP")).
|
||||
Where("id = ?", feedID)
|
||||
|
||||
query, args, err := queryBuilder.ToSql()
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "error building query")
|
||||
}
|
||||
|
||||
_, err = r.db.handler.ExecContext(ctx, query, args...)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "error executing query")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *FeedRepo) UpdateLastRunWithData(ctx context.Context, feedID int, data string) error {
|
||||
queryBuilder := r.db.squirrel.
|
||||
Update("feed").
|
||||
Set("last_run", sq.Expr("CURRENT_TIMESTAMP")).
|
||||
Set("last_run_data", data).
|
||||
Where("id = ?", feedID)
|
||||
|
||||
query, args, err := queryBuilder.ToSql()
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "error building query")
|
||||
}
|
||||
|
||||
_, err = r.db.handler.ExecContext(ctx, query, args...)
|
||||
if err != nil {
|
||||
return errors.Wrap(err, "error executing query")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *FeedRepo) ToggleEnabled(ctx context.Context, id int, enabled bool) error {
|
||||
var err error
|
||||
|
||||
|
|
|
@ -55,6 +55,74 @@ func (r *FeedCacheRepo) Get(bucket string, key string) ([]byte, error) {
|
|||
return value, nil
|
||||
}
|
||||
|
||||
func (r *FeedCacheRepo) GetByBucket(ctx context.Context, bucket string) ([]domain.FeedCacheItem, error) {
|
||||
queryBuilder := r.db.squirrel.
|
||||
Select(
|
||||
"bucket",
|
||||
"key",
|
||||
"value",
|
||||
"ttl",
|
||||
).
|
||||
From("feed_cache").
|
||||
Where("bucket = ?", bucket)
|
||||
|
||||
query, args, err := queryBuilder.ToSql()
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error building query")
|
||||
}
|
||||
|
||||
rows, err := r.db.handler.QueryContext(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, errors.Wrap(err, "error executing query")
|
||||
}
|
||||
|
||||
defer rows.Close()
|
||||
|
||||
var data []domain.FeedCacheItem
|
||||
|
||||
for rows.Next() {
|
||||
var d domain.FeedCacheItem
|
||||
|
||||
if err := rows.Scan(&d.Bucket, &d.Key, &d.Value, &d.TTL); err != nil {
|
||||
return nil, errors.Wrap(err, "error scanning row")
|
||||
}
|
||||
|
||||
data = append(data, d)
|
||||
}
|
||||
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, errors.Wrap(err, "row error")
|
||||
}
|
||||
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func (r *FeedCacheRepo) GetCountByBucket(ctx context.Context, bucket string) (int, error) {
|
||||
|
||||
queryBuilder := r.db.squirrel.
|
||||
Select("COUNT(*)").
|
||||
From("feed_cache").
|
||||
Where("bucket = ?", bucket)
|
||||
|
||||
query, args, err := queryBuilder.ToSql()
|
||||
if err != nil {
|
||||
return 0, errors.Wrap(err, "error building query")
|
||||
}
|
||||
|
||||
row := r.db.handler.QueryRowContext(ctx, query, args...)
|
||||
if err != nil {
|
||||
return 0, errors.Wrap(err, "error executing query")
|
||||
}
|
||||
|
||||
var count = 0
|
||||
|
||||
if err := row.Scan(&count); err != nil {
|
||||
return 0, errors.Wrap(err, "error scanning row")
|
||||
}
|
||||
|
||||
return count, nil
|
||||
}
|
||||
|
||||
func (r *FeedCacheRepo) Exists(bucket string, key string) (bool, error) {
|
||||
queryBuilder := r.db.squirrel.
|
||||
Select("1").
|
||||
|
|
|
@ -291,21 +291,25 @@ CREATE TABLE notification
|
|||
|
||||
CREATE TABLE feed
|
||||
(
|
||||
id SERIAL PRIMARY KEY,
|
||||
indexer TEXT,
|
||||
name TEXT,
|
||||
type TEXT,
|
||||
enabled BOOLEAN,
|
||||
url TEXT,
|
||||
interval INTEGER,
|
||||
timeout INTEGER DEFAULT 60,
|
||||
categories TEXT [] DEFAULT '{}' NOT NULL,
|
||||
capabilities TEXT [] DEFAULT '{}' NOT NULL,
|
||||
api_key TEXT,
|
||||
settings TEXT,
|
||||
indexer_id INTEGER,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
id SERIAL PRIMARY KEY,
|
||||
indexer TEXT,
|
||||
name TEXT,
|
||||
type TEXT,
|
||||
enabled BOOLEAN,
|
||||
url TEXT,
|
||||
interval INTEGER,
|
||||
timeout INTEGER DEFAULT 60,
|
||||
max_age INTEGER DEFAULT 3600,
|
||||
categories TEXT [] DEFAULT '{}' NOT NULL,
|
||||
capabilities TEXT [] DEFAULT '{}' NOT NULL,
|
||||
api_key TEXT,
|
||||
cookie TEXT,
|
||||
settings TEXT,
|
||||
indexer_id INTEGER,
|
||||
last_run TIMESTAMP,
|
||||
last_run_data TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (indexer_id) REFERENCES indexer(id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
|
@ -561,4 +565,16 @@ CREATE INDEX indexer_identifier_index
|
|||
`ALTER TABLE feed
|
||||
ADD COLUMN timeout INTEGER DEFAULT 60;
|
||||
`,
|
||||
`ALTER TABLE feed
|
||||
ADD COLUMN max_age INTEGER DEFAULT 3600;
|
||||
|
||||
ALTER TABLE feed
|
||||
ADD COLUMN last_run TIMESTAMP;
|
||||
|
||||
ALTER TABLE feed
|
||||
ADD COLUMN last_run_data TEXT;
|
||||
|
||||
ALTER TABLE feed
|
||||
ADD COLUMN cookie TEXT;
|
||||
`,
|
||||
}
|
||||
|
|
|
@ -274,21 +274,25 @@ CREATE TABLE notification
|
|||
|
||||
CREATE TABLE feed
|
||||
(
|
||||
id INTEGER PRIMARY KEY,
|
||||
indexer TEXT,
|
||||
name TEXT,
|
||||
type TEXT,
|
||||
enabled BOOLEAN,
|
||||
url TEXT,
|
||||
interval INTEGER,
|
||||
timeout INTEGER DEFAULT 60,
|
||||
categories TEXT [] DEFAULT '{}' NOT NULL,
|
||||
capabilities TEXT [] DEFAULT '{}' NOT NULL,
|
||||
api_key TEXT,
|
||||
settings TEXT,
|
||||
indexer_id INTEGER,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
id INTEGER PRIMARY KEY,
|
||||
indexer TEXT,
|
||||
name TEXT,
|
||||
type TEXT,
|
||||
enabled BOOLEAN,
|
||||
url TEXT,
|
||||
interval INTEGER,
|
||||
timeout INTEGER DEFAULT 60,
|
||||
max_age INTEGER DEFAULT 3600,
|
||||
categories TEXT [] DEFAULT '{}' NOT NULL,
|
||||
capabilities TEXT [] DEFAULT '{}' NOT NULL,
|
||||
api_key TEXT,
|
||||
cookie TEXT,
|
||||
settings TEXT,
|
||||
indexer_id INTEGER,
|
||||
last_run TIMESTAMP,
|
||||
last_run_data TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (indexer_id) REFERENCES indexer(id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
|
@ -881,4 +885,16 @@ CREATE INDEX indexer_identifier_index
|
|||
`ALTER TABLE feed
|
||||
ADD COLUMN timeout INTEGER DEFAULT 60;
|
||||
`,
|
||||
`ALTER TABLE feed
|
||||
ADD COLUMN max_age INTEGER DEFAULT 3600;
|
||||
|
||||
ALTER TABLE feed
|
||||
ADD COLUMN last_run TIMESTAMP;
|
||||
|
||||
ALTER TABLE feed
|
||||
ADD COLUMN last_run_data TEXT;
|
||||
|
||||
ALTER TABLE feed
|
||||
ADD COLUMN cookie TEXT;
|
||||
`,
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue