feat: improve release parsing and filtering (#257)

* feat(releases): improve parsing

* refactor: extend filtering add more tests

* feat: improve macro

* feat: add and remove fields

* feat: add freeleech percent to bonus

* feat: filter by origin
This commit is contained in:
Ludvig Lundgren 2022-04-30 13:43:51 +02:00 committed by GitHub
parent bb62e724a1
commit e6c151a029
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
26 changed files with 3210 additions and 3201 deletions

View file

@ -3,10 +3,9 @@ package database
import (
"context"
"database/sql"
sq "github.com/Masterminds/squirrel"
"strings"
"time"
sq "github.com/Masterminds/squirrel"
"github.com/lib/pq"
"github.com/rs/zerolog/log"
@ -100,6 +99,8 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
"containers",
"match_hdr",
"except_hdr",
"match_other",
"except_other",
"years",
"artists",
"albums",
@ -117,6 +118,7 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
"except_uploaders",
"tags",
"except_tags",
"origins",
"created_at",
"updated_at",
).
@ -140,7 +142,7 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
var delay, logScore sql.NullInt32
if err := row.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, &f.CreatedAt, &f.UpdatedAt); err != nil {
if err := row.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), pq.Array(&f.MatchOther), pq.Array(&f.ExceptOther), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, pq.Array(&f.Origins), &f.CreatedAt, &f.UpdatedAt); err != nil {
log.Error().Stack().Err(err).Msgf("filter.findByID: %v : error scanning row", filterID)
return nil, err
}
@ -204,6 +206,8 @@ func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, e
"f.containers",
"f.match_hdr",
"f.except_hdr",
"f.match_other",
"f.except_other",
"f.years",
"f.artists",
"f.albums",
@ -221,6 +225,7 @@ func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, e
"f.except_uploaders",
"f.tags",
"f.except_tags",
"f.origins",
"f.created_at",
"f.updated_at",
).
@ -254,7 +259,7 @@ func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, e
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
var delay, logScore sql.NullInt32
if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, &f.CreatedAt, &f.UpdatedAt); err != nil {
if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), pq.Array(&f.MatchOther), pq.Array(&f.ExceptOther), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, pq.Array(&f.Origins), &f.CreatedAt, &f.UpdatedAt); err != nil {
log.Error().Stack().Err(err).Msg("filter.findByIndexerIdentifier: error scanning row")
return nil, err
}
@ -320,6 +325,8 @@ func (r *FilterRepo) Store(ctx context.Context, filter domain.Filter) (*domain.F
"containers",
"match_hdr",
"except_hdr",
"match_other",
"except_other",
"years",
"match_categories",
"except_categories",
@ -337,6 +344,7 @@ func (r *FilterRepo) Store(ctx context.Context, filter domain.Filter) (*domain.F
"has_log",
"has_cue",
"perfect_flac",
"origins",
).
Values(
filter.Name,
@ -362,6 +370,8 @@ func (r *FilterRepo) Store(ctx context.Context, filter domain.Filter) (*domain.F
pq.Array(filter.Containers),
pq.Array(filter.MatchHDR),
pq.Array(filter.ExceptHDR),
pq.Array(filter.MatchOther),
pq.Array(filter.ExceptOther),
filter.Years,
filter.MatchCategories,
filter.ExceptCategories,
@ -379,6 +389,7 @@ func (r *FilterRepo) Store(ctx context.Context, filter domain.Filter) (*domain.F
filter.Log,
filter.Cue,
filter.PerfectFlac,
pq.Array(filter.Origins),
).
Suffix("RETURNING id").RunWith(r.db.handler)
@ -424,6 +435,8 @@ func (r *FilterRepo) Update(ctx context.Context, filter domain.Filter) (*domain.
Set("containers", pq.Array(filter.Containers)).
Set("match_hdr", pq.Array(filter.MatchHDR)).
Set("except_hdr", pq.Array(filter.ExceptHDR)).
Set("match_other", pq.Array(filter.MatchOther)).
Set("except_other", pq.Array(filter.ExceptOther)).
Set("years", filter.Years).
Set("match_categories", filter.MatchCategories).
Set("except_categories", filter.ExceptCategories).
@ -441,6 +454,7 @@ func (r *FilterRepo) Update(ctx context.Context, filter domain.Filter) (*domain.
Set("has_log", filter.Log).
Set("has_cue", filter.Cue).
Set("perfect_flac", filter.PerfectFlac).
Set("origins", pq.Array(filter.Origins)).
Set("updated_at", time.Now().Format(time.RFC3339)).
Where("id = ?", filter.ID)
@ -596,14 +610,14 @@ func (r *FilterRepo) Delete(ctx context.Context, filterID int) error {
}
// Split string to slice. We store comma separated strings and convert to slice
func stringToSlice(str string) []string {
if str == "" {
return []string{}
} else if !strings.Contains(str, ",") {
return []string{str}
}
split := strings.Split(str, ",")
return split
}
//func stringToSlice(str string) []string {
// if str == "" {
// return []string{}
// } else if !strings.Contains(str, ",") {
// return []string{str}
// }
//
// split := strings.Split(str, ",")
//
// return split
//}

View file

@ -81,6 +81,8 @@ CREATE TABLE filter
containers TEXT [] DEFAULT '{}' NOT NULL,
match_hdr TEXT [] DEFAULT '{}',
except_hdr TEXT [] DEFAULT '{}',
match_other TEXT [] DEFAULT '{}',
except_other TEXT [] DEFAULT '{}',
years TEXT,
artists TEXT,
albums TEXT,
@ -99,6 +101,7 @@ CREATE TABLE filter
except_uploaders TEXT,
tags TEXT,
except_tags TEXT,
origins TEXT [] DEFAULT '{}' NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
@ -169,7 +172,6 @@ CREATE TABLE "release"
torrent_id TEXT,
torrent_name TEXT,
size INTEGER,
raw TEXT,
title TEXT,
category TEXT,
season INTEGER,
@ -180,28 +182,13 @@ CREATE TABLE "release"
codec TEXT,
container TEXT,
hdr TEXT,
audio TEXT,
release_group TEXT,
region TEXT,
language TEXT,
edition TEXT,
unrated BOOLEAN,
hybrid BOOLEAN,
group TEXT,
proper BOOLEAN,
repack BOOLEAN,
website TEXT,
artists TEXT [] DEFAULT '{}' NOT NULL,
type TEXT,
format TEXT,
quality TEXT,
log_score INTEGER,
has_log BOOLEAN,
has_cue BOOLEAN,
is_scene BOOLEAN,
origin TEXT,
tags TEXT [] DEFAULT '{}' NOT NULL,
freeleech BOOLEAN,
freeleech_percent INTEGER,
uploader TEXT,
pre_time TEXT
);
@ -595,6 +582,68 @@ ALTER TABLE release_action_status_dg_tmp
ALTER TABLE indexer
ADD COLUMN implementation TEXT;
`,
`
ALTER TABLE release
RENAME COLUMN release_group TO "group";
ALTER TABLE release
DROP COLUMN raw;
ALTER TABLE release
DROP COLUMN audio;
ALTER TABLE release
DROP COLUMN region;
ALTER TABLE release
DROP COLUMN language;
ALTER TABLE release
DROP COLUMN edition;
ALTER TABLE release
DROP COLUMN unrated;
ALTER TABLE release
DROP COLUMN hybrid;
ALTER TABLE release
DROP COLUMN artists;
ALTER TABLE release
DROP COLUMN format;
ALTER TABLE release
DROP COLUMN quality;
ALTER TABLE release
DROP COLUMN log_score;
ALTER TABLE release
DROP COLUMN has_log;
ALTER TABLE release
DROP COLUMN has_cue;
ALTER TABLE release
DROP COLUMN is_scene;
ALTER TABLE release
DROP COLUMN freeleech;
ALTER TABLE release
DROP COLUMN freeleech_percent;
ALTER TABLE "filter"
ADD COLUMN origins TEXT [] DEFAULT '{}';
`,
`
ALTER TABLE "filter"
ADD COLUMN match_other TEXT [] DEFAULT '{}';
ALTER TABLE "filter"
ADD COLUMN except_other TEXT [] DEFAULT '{}';
`,
}
const postgresSchema = `
@ -678,6 +727,8 @@ CREATE TABLE filter
containers TEXT [] DEFAULT '{}' NOT NULL,
match_hdr TEXT [] DEFAULT '{}',
except_hdr TEXT [] DEFAULT '{}',
match_other TEXT [] DEFAULT '{}',
except_other TEXT [] DEFAULT '{}',
years TEXT,
artists TEXT,
albums TEXT,
@ -696,6 +747,7 @@ CREATE TABLE filter
except_uploaders TEXT,
tags TEXT,
except_tags TEXT,
origins TEXT [] DEFAULT '{}',
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
@ -926,4 +978,66 @@ var postgresMigrations = []string{
ALTER TABLE indexer
ADD COLUMN implementation TEXT;
`,
`
ALTER TABLE release
RENAME COLUMN release_group TO "group";
ALTER TABLE release
DROP COLUMN raw;
ALTER TABLE release
DROP COLUMN audio;
ALTER TABLE release
DROP COLUMN region;
ALTER TABLE release
DROP COLUMN language;
ALTER TABLE release
DROP COLUMN edition;
ALTER TABLE release
DROP COLUMN unrated;
ALTER TABLE release
DROP COLUMN hybrid;
ALTER TABLE release
DROP COLUMN artists;
ALTER TABLE release
DROP COLUMN format;
ALTER TABLE release
DROP COLUMN quality;
ALTER TABLE release
DROP COLUMN log_score;
ALTER TABLE release
DROP COLUMN has_log;
ALTER TABLE release
DROP COLUMN has_cue;
ALTER TABLE release
DROP COLUMN is_scene;
ALTER TABLE release
DROP COLUMN freeleech;
ALTER TABLE release
DROP COLUMN freeleech_percent;
ALTER TABLE "filter"
ADD COLUMN origins TEXT [] DEFAULT '{}';
`,
`
ALTER TABLE "filter"
ADD COLUMN match_other TEXT [] DEFAULT '{}';
ALTER TABLE "filter"
ADD COLUMN except_other TEXT [] DEFAULT '{}';
`,
}

View file

@ -3,6 +3,8 @@ package database
import (
"context"
"database/sql"
"strings"
sq "github.com/Masterminds/squirrel"
"github.com/autobrr/autobrr/internal/domain"
"github.com/lib/pq"
@ -18,10 +20,13 @@ func NewReleaseRepo(db *DB) domain.ReleaseRepo {
}
func (repo *ReleaseRepo) Store(ctx context.Context, r *domain.Release) (*domain.Release, error) {
codecStr := strings.Join(r.Codec, ",")
hdrStr := strings.Join(r.HDR, ",")
queryBuilder := repo.db.squirrel.
Insert("release").
Columns("filter_status", "rejections", "indexer", "filter", "protocol", "implementation", "timestamp", "group_id", "torrent_id", "torrent_name", "size", "raw", "title", "category", "season", "episode", "year", "resolution", "source", "codec", "container", "hdr", "audio", "release_group", "region", "language", "edition", "unrated", "hybrid", "proper", "repack", "website", "artists", "type", "format", "quality", "log_score", "has_log", "has_cue", "is_scene", "origin", "tags", "freeleech", "freeleech_percent", "uploader", "pre_time").
Values(r.FilterStatus, pq.Array(r.Rejections), r.Indexer, r.FilterName, r.Protocol, r.Implementation, r.Timestamp, r.GroupID, r.TorrentID, r.TorrentName, r.Size, r.Raw, r.Title, r.Category, r.Season, r.Episode, r.Year, r.Resolution, r.Source, r.Codec, r.Container, r.HDR, r.Audio, r.Group, r.Region, r.Language, r.Edition, r.Unrated, r.Hybrid, r.Proper, r.Repack, r.Website, pq.Array(r.Artists), r.Type, r.Format, r.Quality, r.LogScore, r.HasLog, r.HasCue, r.IsScene, r.Origin, pq.Array(r.Tags), r.Freeleech, r.FreeleechPercent, r.Uploader, r.PreTime).
Columns("filter_status", "rejections", "indexer", "filter", "protocol", "implementation", "timestamp", "group_id", "torrent_id", "torrent_name", "size", "title", "category", "season", "episode", "year", "resolution", "source", "codec", "container", "hdr", "group", "proper", "repack", "website", "type", "origin", "tags", "uploader", "pre_time").
Values(r.FilterStatus, pq.Array(r.Rejections), r.Indexer, r.FilterName, r.Protocol, r.Implementation, r.Timestamp, r.GroupID, r.TorrentID, r.TorrentName, r.Size, r.Title, r.Category, r.Season, r.Episode, r.Year, r.Resolution, r.Source, codecStr, r.Container, hdrStr, r.Group, r.Proper, r.Repack, r.Website, r.Type, r.Origin, pq.Array(r.Tags), r.Uploader, r.PreTime).
Suffix("RETURNING id").RunWith(repo.db.handler)
// return values