feat: add torznab feed support (#246)

* feat(torznab): initial impl

* feat: torznab processing

* feat: torznab more scheduling

* feat: feeds web

* feat(feeds): create on indexer create

* feat(feeds): update migration

* feat(feeds): restart on update

* feat(feeds): set cron schedule

* feat(feeds): use basic empty state

* chore: remove duplicate migrations

* feat: parse release size from torznab

* chore: cleanup unused code
This commit is contained in:
Ludvig Lundgren 2022-04-25 12:58:54 +02:00 committed by GitHub
parent d4d864cd2c
commit bb62e724a1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
34 changed files with 2408 additions and 361 deletions

270
internal/database/feed.go Normal file
View file

@ -0,0 +1,270 @@
package database
import (
"context"
"database/sql"
"github.com/autobrr/autobrr/internal/domain"
sq "github.com/Masterminds/squirrel"
"github.com/rs/zerolog/log"
)
func NewFeedRepo(db *DB) domain.FeedRepo {
return &FeedRepo{
db: db,
}
}
type FeedRepo struct {
db *DB
}
func (r *FeedRepo) FindByID(ctx context.Context, id int) (*domain.Feed, error) {
queryBuilder := r.db.squirrel.
Select(
"id",
"indexer",
"name",
"type",
"enabled",
"url",
"interval",
"api_key",
"created_at",
"updated_at",
).
From("feed").
Where("id = ?", id)
query, args, err := queryBuilder.ToSql()
if err != nil {
log.Error().Stack().Err(err).Msg("feed.FindById: error building query")
return nil, err
}
row := r.db.handler.QueryRowContext(ctx, query, args...)
if err := row.Err(); err != nil {
log.Error().Stack().Err(err).Msg("feed.FindById: error executing query")
return nil, err
}
var f domain.Feed
var apiKey sql.NullString
if err := row.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &apiKey, &f.CreatedAt, &f.UpdatedAt); err != nil {
log.Error().Stack().Err(err).Msg("feed.FindById: error scanning row")
return nil, err
}
f.ApiKey = apiKey.String
return &f, nil
}
func (r *FeedRepo) FindByIndexerIdentifier(ctx context.Context, indexer string) (*domain.Feed, error) {
queryBuilder := r.db.squirrel.
Select(
"id",
"indexer",
"name",
"type",
"enabled",
"url",
"interval",
"api_key",
"created_at",
"updated_at",
).
From("feed").
Where("indexer = ?", indexer)
query, args, err := queryBuilder.ToSql()
if err != nil {
log.Error().Stack().Err(err).Msg("feed.FindByIndexerIdentifier: error building query")
return nil, err
}
row := r.db.handler.QueryRowContext(ctx, query, args...)
if err := row.Err(); err != nil {
log.Error().Stack().Err(err).Msg("feed.FindByIndexerIdentifier: error executing query")
return nil, err
}
var f domain.Feed
var apiKey sql.NullString
if err := row.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &apiKey, &f.CreatedAt, &f.UpdatedAt); err != nil {
log.Error().Stack().Err(err).Msg("feed.FindByIndexerIdentifier: error scanning row")
return nil, err
}
f.ApiKey = apiKey.String
return &f, nil
}
func (r *FeedRepo) Find(ctx context.Context) ([]domain.Feed, error) {
queryBuilder := r.db.squirrel.
Select(
"id",
"indexer",
"name",
"type",
"enabled",
"url",
"interval",
"api_key",
"created_at",
"updated_at",
).
From("feed").
OrderBy("name ASC")
query, args, err := queryBuilder.ToSql()
if err != nil {
log.Error().Stack().Err(err).Msg("feed.Find: error building query")
return nil, err
}
rows, err := r.db.handler.QueryContext(ctx, query, args...)
if err != nil {
log.Error().Stack().Err(err).Msg("feed.Find: error executing query")
return nil, err
}
defer rows.Close()
feeds := make([]domain.Feed, 0)
for rows.Next() {
var f domain.Feed
var apiKey sql.NullString
if err := rows.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &apiKey, &f.CreatedAt, &f.UpdatedAt); err != nil {
log.Error().Stack().Err(err).Msg("feed.Find: error scanning row")
return nil, err
}
f.ApiKey = apiKey.String
feeds = append(feeds, f)
}
return feeds, nil
}
func (r *FeedRepo) Store(ctx context.Context, feed *domain.Feed) error {
queryBuilder := r.db.squirrel.
Insert("feed").
Columns(
"name",
"indexer",
"type",
"enabled",
"url",
"interval",
"api_key",
"indexer_id",
).
Values(
feed.Name,
feed.Indexer,
feed.Type,
feed.Enabled,
feed.URL,
feed.Interval,
feed.ApiKey,
feed.IndexerID,
).
Suffix("RETURNING id").RunWith(r.db.handler)
var retID int
if err := queryBuilder.QueryRowContext(ctx).Scan(&retID); err != nil {
log.Error().Stack().Err(err).Msg("feed.Store: error executing query")
return err
}
feed.ID = retID
return nil
}
func (r *FeedRepo) Update(ctx context.Context, feed *domain.Feed) error {
queryBuilder := r.db.squirrel.
Update("feed").
Set("name", feed.Name).
Set("indexer", feed.Indexer).
Set("type", feed.Type).
Set("enabled", feed.Enabled).
Set("url", feed.URL).
Set("interval", feed.Interval).
Set("api_key", feed.ApiKey).
Set("indexer_id", feed.IndexerID).
Where("id = ?", feed.ID)
query, args, err := queryBuilder.ToSql()
if err != nil {
log.Error().Stack().Err(err).Msg("feed.Update: error building query")
return err
}
_, err = r.db.handler.ExecContext(ctx, query, args...)
if err != nil {
log.Error().Stack().Err(err).Msg("feed.Update: error executing query")
return err
}
return nil
}
func (r *FeedRepo) ToggleEnabled(ctx context.Context, id int, enabled bool) error {
var err error
queryBuilder := r.db.squirrel.
Update("feed").
Set("enabled", enabled).
Set("updated_at", sq.Expr("CURRENT_TIMESTAMP")).
Where("id = ?", id)
query, args, err := queryBuilder.ToSql()
if err != nil {
log.Error().Stack().Err(err).Msg("feed.ToggleEnabled: error building query")
return err
}
_, err = r.db.handler.ExecContext(ctx, query, args...)
if err != nil {
log.Error().Stack().Err(err).Msg("feed.ToggleEnabled: error executing query")
return err
}
return nil
}
func (r *FeedRepo) Delete(ctx context.Context, id int) error {
queryBuilder := r.db.squirrel.
Delete("feed").
Where("id = ?", id)
query, args, err := queryBuilder.ToSql()
if err != nil {
log.Error().Stack().Err(err).Msg("feed.delete: error building query")
return err
}
_, err = r.db.handler.ExecContext(ctx, query, args...)
if err != nil {
log.Error().Stack().Err(err).Msg("feed.delete: error executing query")
return err
}
log.Info().Msgf("feed.delete: successfully deleted: %v", id)
return nil
}

View file

@ -0,0 +1,103 @@
package database
import (
"database/sql"
"time"
"github.com/rs/zerolog/log"
"github.com/autobrr/autobrr/internal/domain"
)
type FeedCacheRepo struct {
db *DB
}
func NewFeedCacheRepo(db *DB) domain.FeedCacheRepo {
return &FeedCacheRepo{
db: db,
}
}
func (r *FeedCacheRepo) Get(bucket string, key string) ([]byte, error) {
queryBuilder := r.db.squirrel.
Select(
"value",
"ttl",
).
From("feed_cache").
Where("bucket = ?", bucket).
Where("key = ?", key).
Where("ttl > ?", time.Now())
query, args, err := queryBuilder.ToSql()
if err != nil {
log.Error().Stack().Err(err).Msg("feedCache.Get: error building query")
return nil, err
}
row := r.db.handler.QueryRow(query, args...)
if err := row.Err(); err != nil {
log.Error().Stack().Err(err).Msg("feedCache.Get: query error")
return nil, err
}
var value []byte
var ttl time.Duration
if err := row.Scan(&value, &ttl); err != nil && err != sql.ErrNoRows {
log.Error().Stack().Err(err).Msg("feedCache.Get: error scanning row")
return nil, err
}
return value, nil
}
func (r *FeedCacheRepo) Exists(bucket string, key string) (bool, error) {
queryBuilder := r.db.squirrel.
Select("1").
Prefix("SELECT EXISTS (").
From("feed_cache").
Where("bucket = ?", bucket).
Where("key = ?", key).
Suffix(")")
query, args, err := queryBuilder.ToSql()
if err != nil {
log.Error().Stack().Err(err).Msg("feedCache.Exists: error building query")
return false, err
}
var exists bool
err = r.db.handler.QueryRow(query, args...).Scan(&exists)
if err != nil && err != sql.ErrNoRows {
log.Error().Stack().Err(err).Msg("feedCache.Exists: query error")
}
return exists, nil
}
func (r *FeedCacheRepo) Put(bucket string, key string, val []byte, ttl time.Duration) error {
queryBuilder := r.db.squirrel.
Insert("feed_cache").
Columns("bucket", "key", "value", "ttl").
Values(bucket, key, val, ttl)
query, args, err := queryBuilder.ToSql()
if err != nil {
log.Error().Stack().Err(err).Msg("feedCache.Put: error building query")
return err
}
if _, err = r.db.handler.Exec(query, args...); err != nil {
log.Error().Stack().Err(err).Msg("feedCache.Put: error executing query")
return err
}
return nil
}
func (r *FeedCacheRepo) Delete(bucket string, key string) error {
//TODO implement me
panic("implement me")
}

View file

@ -2,6 +2,7 @@ package database
import (
"context"
"database/sql"
"encoding/json"
"time"
@ -28,8 +29,8 @@ func (r *IndexerRepo) Store(ctx context.Context, indexer domain.Indexer) (*domai
}
queryBuilder := r.db.squirrel.
Insert("indexer").Columns("enabled", "name", "identifier", "settings").
Values(indexer.Enabled, indexer.Name, indexer.Identifier, settings).
Insert("indexer").Columns("enabled", "name", "identifier", "implementation", "settings").
Values(indexer.Enabled, indexer.Name, indexer.Identifier, indexer.Implementation, settings).
Suffix("RETURNING id").RunWith(r.db.handler)
// return values
@ -77,7 +78,7 @@ func (r *IndexerRepo) Update(ctx context.Context, indexer domain.Indexer) (*doma
}
func (r *IndexerRepo) List(ctx context.Context) ([]domain.Indexer, error) {
rows, err := r.db.handler.QueryContext(ctx, "SELECT id, enabled, name, identifier, settings FROM indexer ORDER BY name ASC")
rows, err := r.db.handler.QueryContext(ctx, "SELECT id, enabled, name, identifier, implementation, settings FROM indexer ORDER BY name ASC")
if err != nil {
log.Error().Stack().Err(err).Msg("indexer.list: error query indexer")
return nil, err
@ -89,14 +90,17 @@ func (r *IndexerRepo) List(ctx context.Context) ([]domain.Indexer, error) {
for rows.Next() {
var f domain.Indexer
var implementation sql.NullString
var settings string
var settingsMap map[string]string
if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, &f.Identifier, &settings); err != nil {
if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, &f.Identifier, &implementation, &settings); err != nil {
log.Error().Stack().Err(err).Msg("indexer.list: error scanning data to struct")
return nil, err
}
f.Implementation = implementation.String
err = json.Unmarshal([]byte(settings), &settingsMap)
if err != nil {
log.Error().Stack().Err(err).Msg("indexer.list: error unmarshal settings")

View file

@ -13,13 +13,14 @@ CREATE TABLE users
CREATE TABLE indexer
(
id INTEGER PRIMARY KEY,
identifier TEXT,
enabled BOOLEAN,
name TEXT NOT NULL,
settings TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
id INTEGER PRIMARY KEY,
identifier TEXT,
implementation TEXT,
enabled BOOLEAN,
name TEXT NOT NULL,
settings TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE (identifier)
);
@ -241,6 +242,33 @@ CREATE TABLE notification
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE feed
(
id INTEGER PRIMARY KEY,
indexer TEXT,
name TEXT,
type TEXT,
enabled BOOLEAN,
url TEXT,
interval INTEGER,
categories TEXT [] DEFAULT '{}' NOT NULL,
capabilities TEXT [] DEFAULT '{}' NOT NULL,
api_key TEXT,
settings TEXT,
indexer_id INTEGER,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (indexer_id) REFERENCES indexer(id) ON DELETE SET NULL
);
CREATE TABLE feed_cache
(
bucket TEXT,
key TEXT,
value TEXT,
ttl TIMESTAMP
);
`
var sqliteMigrations = []string{
@ -535,6 +563,38 @@ ALTER TABLE release_action_status_dg_tmp
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
`,
`
CREATE TABLE feed
(
id INTEGER PRIMARY KEY,
indexer TEXT,
name TEXT,
type TEXT,
enabled BOOLEAN,
url TEXT,
interval INTEGER,
categories TEXT [] DEFAULT '{}' NOT NULL,
capabilities TEXT [] DEFAULT '{}' NOT NULL,
api_key TEXT,
settings TEXT,
indexer_id INTEGER,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (indexer_id) REFERENCES indexer(id) ON DELETE SET NULL
);
CREATE TABLE feed_cache
(
bucket TEXT,
key TEXT,
value TEXT,
ttl TIMESTAMP
);
`,
`
ALTER TABLE indexer
ADD COLUMN implementation TEXT;
`,
}
const postgresSchema = `
@ -550,13 +610,14 @@ CREATE TABLE users
CREATE TABLE indexer
(
id SERIAL PRIMARY KEY,
identifier TEXT,
enabled BOOLEAN,
name TEXT NOT NULL,
settings TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
id SERIAL PRIMARY KEY,
identifier TEXT,
implementation TEXT,
enabled BOOLEAN,
name TEXT NOT NULL,
settings TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
UNIQUE (identifier)
);
@ -778,6 +839,33 @@ CREATE TABLE notification
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE feed
(
id SERIAL PRIMARY KEY,
indexer TEXT,
name TEXT,
type TEXT,
enabled BOOLEAN,
url TEXT,
interval INTEGER,
categories TEXT [] DEFAULT '{}' NOT NULL,
capabilities TEXT [] DEFAULT '{}' NOT NULL,
api_key TEXT,
settings TEXT,
indexer_id INTEGER,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (indexer_id) REFERENCES indexer(id) ON DELETE SET NULL
);
CREATE TABLE feed_cache
(
bucket TEXT,
key TEXT,
value TEXT,
ttl TIMESTAMP
);
`
var postgresMigrations = []string{
@ -806,4 +894,36 @@ var postgresMigrations = []string{
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
`,
`
CREATE TABLE feed
(
id SERIAL PRIMARY KEY,
indexer TEXT,
name TEXT,
type TEXT,
enabled BOOLEAN,
url TEXT,
interval INTEGER,
categories TEXT [] DEFAULT '{}' NOT NULL,
capabilities TEXT [] DEFAULT '{}' NOT NULL,
api_key TEXT,
settings TEXT,
indexer_id INTEGER,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (indexer_id) REFERENCES indexer(id) ON DELETE SET NULL
);
CREATE TABLE feed_cache
(
bucket TEXT,
key TEXT,
value TEXT,
ttl TIMESTAMP
);
`,
`
ALTER TABLE indexer
ADD COLUMN implementation TEXT;
`,
}