fix(feeds): ttl and correct field types (#259)

This commit is contained in:
Ludvig Lundgren 2022-05-01 16:09:00 +02:00 committed by GitHub
parent 5d032dd075
commit b7d1f216c0
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 19 additions and 8 deletions

View file

@ -206,7 +206,6 @@ func (r *FeedRepo) Update(ctx context.Context, feed *domain.Feed) error {
Set("url", feed.URL).
Set("interval", feed.Interval).
Set("api_key", feed.ApiKey).
Set("indexer_id", feed.IndexerID).
Where("id = ?", feed.ID)
query, args, err := queryBuilder.ToSql()

View file

@ -77,7 +77,7 @@ func (r *FeedCacheRepo) Exists(bucket string, key string) (bool, error) {
return exists, nil
}
func (r *FeedCacheRepo) Put(bucket string, key string, val []byte, ttl time.Duration) error {
func (r *FeedCacheRepo) Put(bucket string, key string, val []byte, ttl time.Time) error {
queryBuilder := r.db.squirrel.
Insert("feed_cache").
Columns("bucket", "key", "value", "ttl").

View file

@ -644,6 +644,10 @@ ALTER TABLE release_action_status_dg_tmp
ALTER TABLE "filter"
ADD COLUMN except_other TEXT [] DEFAULT '{}';
`,
`
ALTER TABLE release
RENAME COLUMN "group"" TO "release_group";
`,
}
const postgresSchema = `
@ -817,7 +821,7 @@ CREATE TABLE "release"
group_id TEXT,
torrent_id TEXT,
torrent_name TEXT,
size INTEGER,
size BIGINT,
raw TEXT,
title TEXT,
category TEXT,
@ -1040,4 +1044,11 @@ var postgresMigrations = []string{
ALTER TABLE "filter"
ADD COLUMN except_other TEXT [] DEFAULT '{}';
`,
`
ALTER TABLE release
RENAME COLUMN "group"" TO "release_group";
ALTER TABLE release
ALTER COLUMN size TYPE BIGINT USING size::BIGINT;
`,
}

View file

@ -25,7 +25,7 @@ func (repo *ReleaseRepo) Store(ctx context.Context, r *domain.Release) (*domain.
queryBuilder := repo.db.squirrel.
Insert("release").
Columns("filter_status", "rejections", "indexer", "filter", "protocol", "implementation", "timestamp", "group_id", "torrent_id", "torrent_name", "size", "title", "category", "season", "episode", "year", "resolution", "source", "codec", "container", "hdr", "group", "proper", "repack", "website", "type", "origin", "tags", "uploader", "pre_time").
Columns("filter_status", "rejections", "indexer", "filter", "protocol", "implementation", "timestamp", "group_id", "torrent_id", "torrent_name", "size", "title", "category", "season", "episode", "year", "resolution", "source", "codec", "container", "hdr", "release_group", "proper", "repack", "website", "type", "origin", "tags", "uploader", "pre_time").
Values(r.FilterStatus, pq.Array(r.Rejections), r.Indexer, r.FilterName, r.Protocol, r.Implementation, r.Timestamp, r.GroupID, r.TorrentID, r.TorrentName, r.Size, r.Title, r.Category, r.Season, r.Episode, r.Year, r.Resolution, r.Source, codecStr, r.Container, hdrStr, r.Group, r.Proper, r.Repack, r.Website, r.Type, r.Origin, pq.Array(r.Tags), r.Uploader, r.PreTime).
Suffix("RETURNING id").RunWith(repo.db.handler)

View file

@ -8,7 +8,7 @@ import (
type FeedCacheRepo interface {
Get(bucket string, key string) ([]byte, error)
Exists(bucket string, key string) (bool, error)
Put(bucket string, key string, val []byte, ttl time.Duration) error
Put(bucket string, key string, val []byte, ttl time.Time) error
Delete(bucket string, key string) error
}

View file

@ -122,10 +122,11 @@ func (j *TorznabJob) getFeed() ([]torznab.FeedItem, error) {
items = append(items, i)
ttl := (24 * time.Hour) * 28
// set ttl to 1 month
ttl := time.Now().AddDate(0, 1, 0)
if err := j.Repo.Put(j.Name, i.GUID, []byte("test"), ttl); err != nil {
j.Log.Error().Err(err).Str("guid", i.GUID).Msg("torznab getFeed: cache.Put: error storing item in cache")
if err := j.Repo.Put(j.Name, i.GUID, []byte(i.Title), ttl); err != nil {
j.Log.Error().Stack().Err(err).Str("guid", i.GUID).Msg("torznab getFeed: cache.Put: error storing item in cache")
}
}