feat(filters): add download limits (#266)

* feat(filters): add download limits
This commit is contained in:
Ludvig Lundgren 2022-05-18 14:14:13 +02:00 committed by GitHub
parent 2903e7b493
commit 2a23ed0185
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 458 additions and 82 deletions

View file

@ -82,6 +82,8 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
"max_size", "max_size",
"delay", "delay",
"priority", "priority",
"max_downloads",
"max_downloads_unit",
"match_releases", "match_releases",
"except_releases", "except_releases",
"use_regex", "use_regex",
@ -138,11 +140,11 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
} }
var f domain.Filter var f domain.Filter
var minSize, maxSize, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, freeleechPercent, shows, seasons, episodes, years, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags sql.NullString var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, freeleechPercent, shows, seasons, episodes, years, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags sql.NullString
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
var delay, logScore sql.NullInt32 var delay, maxDownloads, logScore sql.NullInt32
if err := row.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), pq.Array(&f.MatchOther), pq.Array(&f.ExceptOther), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, pq.Array(&f.Origins), &f.CreatedAt, &f.UpdatedAt); err != nil { if err := row.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &maxDownloads, &maxDownloadsUnit, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), pq.Array(&f.MatchOther), pq.Array(&f.ExceptOther), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, pq.Array(&f.Origins), &f.CreatedAt, &f.UpdatedAt); err != nil {
log.Error().Stack().Err(err).Msgf("filter.findByID: %v : error scanning row", filterID) log.Error().Stack().Err(err).Msgf("filter.findByID: %v : error scanning row", filterID)
return nil, err return nil, err
} }
@ -150,6 +152,8 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
f.MinSize = minSize.String f.MinSize = minSize.String
f.MaxSize = maxSize.String f.MaxSize = maxSize.String
f.Delay = int(delay.Int32) f.Delay = int(delay.Int32)
f.MaxDownloads = int(maxDownloads.Int32)
f.MaxDownloadsUnit = domain.FilterMaxDownloadsUnit(maxDownloadsUnit.String)
f.MatchReleases = matchReleases.String f.MatchReleases = matchReleases.String
f.ExceptReleases = exceptReleases.String f.ExceptReleases = exceptReleases.String
f.MatchReleaseGroups = matchReleaseGroups.String f.MatchReleaseGroups = matchReleaseGroups.String
@ -180,6 +184,30 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
// FindByIndexerIdentifier find active filters with active indexer only // FindByIndexerIdentifier find active filters with active indexer only
func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, error) { func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, error) {
ctx := context.TODO()
tx, err := r.db.BeginTx(ctx, nil)
if err != nil {
return nil, err
}
defer tx.Rollback()
filters, err := r.findByIndexerIdentifier(ctx, tx, indexer)
if err != nil {
return nil, err
}
for i, filter := range filters {
downloads, err := r.attachDownloadsByFilter(ctx, tx, filter.ID)
if err != nil {
continue
}
filters[i].Downloads = downloads
}
return filters, nil
}
func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, tx *Tx, indexer string) ([]domain.Filter, error) {
queryBuilder := r.db.squirrel. queryBuilder := r.db.squirrel.
Select( Select(
"f.id", "f.id",
@ -189,6 +217,8 @@ func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, e
"f.max_size", "f.max_size",
"f.delay", "f.delay",
"f.priority", "f.priority",
"f.max_downloads",
"f.max_downloads_unit",
"f.match_releases", "f.match_releases",
"f.except_releases", "f.except_releases",
"f.use_regex", "f.use_regex",
@ -243,7 +273,7 @@ func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, e
return nil, err return nil, err
} }
rows, err := r.db.handler.Query(query, args...) rows, err := tx.QueryContext(ctx, query, args...)
if err != nil { if err != nil {
log.Error().Stack().Err(err).Msg("filter.findByIndexerIdentifier: error executing query") log.Error().Stack().Err(err).Msg("filter.findByIndexerIdentifier: error executing query")
return nil, err return nil, err
@ -255,11 +285,11 @@ func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, e
for rows.Next() { for rows.Next() {
var f domain.Filter var f domain.Filter
var minSize, maxSize, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, freeleechPercent, shows, seasons, episodes, years, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags sql.NullString var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, freeleechPercent, shows, seasons, episodes, years, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags sql.NullString
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
var delay, logScore sql.NullInt32 var delay, maxDownloads, logScore sql.NullInt32
if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), pq.Array(&f.MatchOther), pq.Array(&f.ExceptOther), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, pq.Array(&f.Origins), &f.CreatedAt, &f.UpdatedAt); err != nil { if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &maxDownloads, &maxDownloadsUnit, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), pq.Array(&f.MatchOther), pq.Array(&f.ExceptOther), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, pq.Array(&f.Origins), &f.CreatedAt, &f.UpdatedAt); err != nil {
log.Error().Stack().Err(err).Msg("filter.findByIndexerIdentifier: error scanning row") log.Error().Stack().Err(err).Msg("filter.findByIndexerIdentifier: error scanning row")
return nil, err return nil, err
} }
@ -267,6 +297,8 @@ func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, e
f.MinSize = minSize.String f.MinSize = minSize.String
f.MaxSize = maxSize.String f.MaxSize = maxSize.String
f.Delay = int(delay.Int32) f.Delay = int(delay.Int32)
f.MaxDownloads = int(maxDownloads.Int32)
f.MaxDownloadsUnit = domain.FilterMaxDownloadsUnit(maxDownloadsUnit.String)
f.MatchReleases = matchReleases.String f.MatchReleases = matchReleases.String
f.ExceptReleases = exceptReleases.String f.ExceptReleases = exceptReleases.String
f.MatchReleaseGroups = matchReleaseGroups.String f.MatchReleaseGroups = matchReleaseGroups.String
@ -308,6 +340,8 @@ func (r *FilterRepo) Store(ctx context.Context, filter domain.Filter) (*domain.F
"max_size", "max_size",
"delay", "delay",
"priority", "priority",
"max_downloads",
"max_downloads_unit",
"match_releases", "match_releases",
"except_releases", "except_releases",
"use_regex", "use_regex",
@ -353,6 +387,8 @@ func (r *FilterRepo) Store(ctx context.Context, filter domain.Filter) (*domain.F
filter.MaxSize, filter.MaxSize,
filter.Delay, filter.Delay,
filter.Priority, filter.Priority,
filter.MaxDownloads,
filter.MaxDownloadsUnit,
filter.MatchReleases, filter.MatchReleases,
filter.ExceptReleases, filter.ExceptReleases,
filter.UseRegex, filter.UseRegex,
@ -418,6 +454,8 @@ func (r *FilterRepo) Update(ctx context.Context, filter domain.Filter) (*domain.
Set("max_size", filter.MaxSize). Set("max_size", filter.MaxSize).
Set("delay", filter.Delay). Set("delay", filter.Delay).
Set("priority", filter.Priority). Set("priority", filter.Priority).
Set("max_downloads", filter.MaxDownloads).
Set("max_downloads_unit", filter.MaxDownloadsUnit).
Set("use_regex", filter.UseRegex). Set("use_regex", filter.UseRegex).
Set("match_releases", filter.MatchReleases). Set("match_releases", filter.MatchReleases).
Set("except_releases", filter.ExceptReleases). Set("except_releases", filter.ExceptReleases).
@ -609,15 +647,62 @@ func (r *FilterRepo) Delete(ctx context.Context, filterID int) error {
return nil return nil
} }
// Split string to slice. We store comma separated strings and convert to slice func (r *FilterRepo) attachDownloadsByFilter(ctx context.Context, tx *Tx, filterID int) (*domain.FilterDownloads, error) {
//func stringToSlice(str string) []string { if r.db.Driver == "sqlite" {
// if str == "" { return r.downloadsByFilterSqlite(ctx, tx, filterID)
// return []string{} }
// } else if !strings.Contains(str, ",") {
// return []string{str} return r.downloadsByFilterPostgres(ctx, tx, filterID)
// } }
//
// split := strings.Split(str, ",") func (r *FilterRepo) downloadsByFilterSqlite(ctx context.Context, tx *Tx, filterID int) (*domain.FilterDownloads, error) {
// query := `SELECT
// return split IFNULL(SUM(CASE WHEN "release".timestamp >= datetime('now', '-1 hour') THEN 1 ELSE 0 END),0) as "hour_count",
//} IFNULL(SUM(CASE WHEN "release".timestamp >= datetime('now', 'start of day') THEN 1 ELSE 0 END),0) as "day_count",
IFNULL(SUM(CASE WHEN "release".timestamp >= datetime('now', 'weekday 0', '-7 days') THEN 1 ELSE 0 END),0) as "week_count",
IFNULL(SUM(CASE WHEN "release".timestamp >= datetime('now', 'start of month') THEN 1 ELSE 0 END),0) as "month_count",
count(*) as "total_count"
FROM "release"
WHERE "release".filter_id = ?;`
row := tx.QueryRowContext(ctx, query, filterID)
if err := row.Err(); err != nil {
log.Error().Stack().Err(err).Msg("filter.downloadsByFilterSqlite: error querying stats")
return nil, err
}
var f domain.FilterDownloads
if err := row.Scan(&f.HourCount, &f.DayCount, &f.WeekCount, &f.MonthCount, &f.TotalCount); err != nil {
log.Error().Stack().Err(err).Msg("filter.downloadsByFilterSqlite: error scanning stats data to struct")
return nil, err
}
return &f, nil
}
func (r *FilterRepo) downloadsByFilterPostgres(ctx context.Context, tx *Tx, filterID int) (*domain.FilterDownloads, error) {
query := `SELECT
COALESCE(SUM(CASE WHEN "release".timestamp >= date_trunc('hour', CURRENT_TIMESTAMP) THEN 1 ELSE 0 END),0) as "hour_count",
COALESCE(SUM(CASE WHEN "release".timestamp >= date_trunc('day', CURRENT_DATE) THEN 1 ELSE 0 END),0) as "day_count",
COALESCE(SUM(CASE WHEN "release".timestamp >= date_trunc('week', CURRENT_DATE) THEN 1 ELSE 0 END),0) as "week_count",
COALESCE(SUM(CASE WHEN "release".timestamp >= date_trunc('month', CURRENT_DATE) THEN 1 ELSE 0 END),0) as "month_count",
count(*) as "total_count"
FROM "release"
WHERE "release".filter_id = ?;`
row := tx.QueryRowContext(ctx, query, filterID)
if err := row.Err(); err != nil {
log.Error().Stack().Err(err).Msg("filter.downloadsByFilterPostgres: error querying stats")
return nil, err
}
var f domain.FilterDownloads
if err := row.Scan(&f.HourCount, &f.DayCount, &f.WeekCount, &f.MonthCount, &f.TotalCount); err != nil {
log.Error().Stack().Err(err).Msg("filter.downloadsByFilterPostgres: error scanning stats data to struct")
return nil, err
}
return &f, nil
}

View file

@ -64,6 +64,8 @@ CREATE TABLE filter
max_size TEXT, max_size TEXT,
delay INTEGER, delay INTEGER,
priority INTEGER DEFAULT 0 NOT NULL, priority INTEGER DEFAULT 0 NOT NULL,
max_downloads INTEGER DEFAULT 0,
max_downloads_unit TEXT,
match_releases TEXT, match_releases TEXT,
except_releases TEXT, except_releases TEXT,
use_regex BOOLEAN, use_regex BOOLEAN,
@ -196,9 +198,15 @@ CREATE TABLE "release"
origin TEXT, origin TEXT,
tags TEXT [] DEFAULT '{}' NOT NULL, tags TEXT [] DEFAULT '{}' NOT NULL,
uploader TEXT, uploader TEXT,
pre_time TEXT pre_time TEXT,
filter_id INTEGER
REFERENCES filter
ON DELETE SET NULL
); );
CREATE INDEX release_filter_id_index
ON "release" (filter_id);
CREATE TABLE release_action_status CREATE TABLE release_action_status
( (
id INTEGER PRIMARY KEY, id INTEGER PRIMARY KEY,
@ -674,6 +682,98 @@ ALTER TABLE release_action_status_dg_tmp
ALTER TABLE "action" ALTER TABLE "action"
ADD COLUMN limit_seed_time INTEGER DEFAULT 0; ADD COLUMN limit_seed_time INTEGER DEFAULT 0;
`, `,
`
alter table filter
add max_downloads INTEGER default 0;
alter table filter
add max_downloads_unit TEXT;
create table release_dg_tmp
(
id INTEGER
primary key,
filter_status TEXT,
rejections TEXT [] default '{}' not null,
indexer TEXT,
filter TEXT,
protocol TEXT,
implementation TEXT,
timestamp TIMESTAMP default CURRENT_TIMESTAMP,
group_id TEXT,
torrent_id TEXT,
torrent_name TEXT,
size INTEGER,
title TEXT,
category TEXT,
season INTEGER,
episode INTEGER,
year INTEGER,
resolution TEXT,
source TEXT,
codec TEXT,
container TEXT,
hdr TEXT,
release_group TEXT,
proper BOOLEAN,
repack BOOLEAN,
website TEXT,
type TEXT,
origin TEXT,
tags TEXT [] default '{}' not null,
uploader TEXT,
pre_time TEXT,
filter_id INTEGER
CONSTRAINT release_filter_id_fk
REFERENCES filter
ON DELETE SET NULL
);
INSERT INTO release_dg_tmp(id, filter_status, rejections, indexer, filter, protocol, implementation, timestamp,
group_id, torrent_id, torrent_name, size, title, category, season, episode, year, resolution,
source, codec, container, hdr, release_group, proper, repack, website, type, origin, tags,
uploader, pre_time)
SELECT id,
filter_status,
rejections,
indexer,
filter,
protocol,
implementation,
timestamp,
group_id,
torrent_id,
torrent_name,
size,
title,
category,
season,
episode,
year,
resolution,
source,
codec,
container,
hdr,
release_group,
proper,
repack,
website,
type,
origin,
tags,
uploader,
pre_time
FROM "release";
DROP TABLE "release";
ALTER TABLE release_dg_tmp
RENAME TO "release";
CREATE INDEX release_filter_id_index
ON "release" (filter_id);
`,
} }
const postgresSchema = ` const postgresSchema = `
@ -740,6 +840,8 @@ CREATE TABLE filter
max_size TEXT, max_size TEXT,
delay INTEGER, delay INTEGER,
priority INTEGER DEFAULT 0 NOT NULL, priority INTEGER DEFAULT 0 NOT NULL,
max_downloads INTEGER DEFAULT 0,
max_downloads_unit TEXT,
match_releases TEXT, match_releases TEXT,
except_releases TEXT, except_releases TEXT,
use_regex BOOLEAN, use_regex BOOLEAN,
@ -888,9 +990,16 @@ CREATE TABLE "release"
freeleech BOOLEAN, freeleech BOOLEAN,
freeleech_percent INTEGER, freeleech_percent INTEGER,
uploader TEXT, uploader TEXT,
pre_time TEXT pre_time TEXT,
filter_id INTEGER
CONSTRAINT release_filter_id_fk
REFERENCES filter
ON DELETE SET NULL
); );
CREATE INDEX release_filter_id_index
ON release (filter_id);
CREATE TABLE release_action_status CREATE TABLE release_action_status
( (
id SERIAL PRIMARY KEY, id SERIAL PRIMARY KEY,
@ -1103,4 +1212,22 @@ var postgresMigrations = []string{
ALTER TABLE "action" ALTER TABLE "action"
ADD COLUMN limit_seed_time INTEGER DEFAULT 0; ADD COLUMN limit_seed_time INTEGER DEFAULT 0;
`, `,
`
ALTER TABLE filter
ADD max_downloads INTEGER default 0;
ALTER TABLE filter
ADD max_downloads_unit TEXT;
ALTER TABLE release
add filter_id INTEGER;
CREATE INDEX release_filter_id_index
ON release (filter_id);
ALTER TABLE release
ADD CONSTRAINT release_filter_id_fk
FOREIGN KEY (filter_id) REFERENCES FILTER
ON DELETE SET NULL;
`,
} }

View file

@ -25,8 +25,8 @@ func (repo *ReleaseRepo) Store(ctx context.Context, r *domain.Release) (*domain.
queryBuilder := repo.db.squirrel. queryBuilder := repo.db.squirrel.
Insert("release"). Insert("release").
Columns("filter_status", "rejections", "indexer", "filter", "protocol", "implementation", "timestamp", "group_id", "torrent_id", "torrent_name", "size", "title", "category", "season", "episode", "year", "resolution", "source", "codec", "container", "hdr", "release_group", "proper", "repack", "website", "type", "origin", "tags", "uploader", "pre_time"). Columns("filter_status", "rejections", "indexer", "filter", "protocol", "implementation", "timestamp", "group_id", "torrent_id", "torrent_name", "size", "title", "category", "season", "episode", "year", "resolution", "source", "codec", "container", "hdr", "release_group", "proper", "repack", "website", "type", "origin", "tags", "uploader", "pre_time", "filter_id").
Values(r.FilterStatus, pq.Array(r.Rejections), r.Indexer, r.FilterName, r.Protocol, r.Implementation, r.Timestamp, r.GroupID, r.TorrentID, r.TorrentName, r.Size, r.Title, r.Category, r.Season, r.Episode, r.Year, r.Resolution, r.Source, codecStr, r.Container, hdrStr, r.Group, r.Proper, r.Repack, r.Website, r.Type, r.Origin, pq.Array(r.Tags), r.Uploader, r.PreTime). Values(r.FilterStatus, pq.Array(r.Rejections), r.Indexer, r.FilterName, r.Protocol, r.Implementation, r.Timestamp, r.GroupID, r.TorrentID, r.TorrentName, r.Size, r.Title, r.Category, r.Season, r.Episode, r.Year, r.Resolution, r.Source, codecStr, r.Container, hdrStr, r.Group, r.Proper, r.Repack, r.Website, r.Type, r.Origin, pq.Array(r.Tags), r.Uploader, r.PreTime, r.FilterID).
Suffix("RETURNING id").RunWith(repo.db.handler) Suffix("RETURNING id").RunWith(repo.db.handler)
// return values // return values

View file

@ -30,64 +30,91 @@ type FilterRepo interface {
DeleteIndexerConnections(ctx context.Context, filterID int) error DeleteIndexerConnections(ctx context.Context, filterID int) error
} }
type FilterDownloads struct {
HourCount int
DayCount int
WeekCount int
MonthCount int
TotalCount int
}
type FilterMaxDownloadsUnit string
const (
FilterMaxDownloadsHour FilterMaxDownloadsUnit = "HOUR"
FilterMaxDownloadsDay FilterMaxDownloadsUnit = "DAY"
FilterMaxDownloadsWeek FilterMaxDownloadsUnit = "WEEK"
FilterMaxDownloadsMonth FilterMaxDownloadsUnit = "MONTH"
FilterMaxDownloadsEver FilterMaxDownloadsUnit = "EVER"
)
type Filter struct { type Filter struct {
ID int `json:"id"` ID int `json:"id"`
Name string `json:"name"` Name string `json:"name"`
Enabled bool `json:"enabled"` Enabled bool `json:"enabled"`
CreatedAt time.Time `json:"created_at"` CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"` UpdatedAt time.Time `json:"updated_at"`
MinSize string `json:"min_size"` MinSize string `json:"min_size"`
MaxSize string `json:"max_size"` MaxSize string `json:"max_size"`
Delay int `json:"delay"` Delay int `json:"delay"`
Priority int32 `json:"priority"` Priority int32 `json:"priority"`
MatchReleases string `json:"match_releases"` MaxDownloads int `json:"max_downloads"`
ExceptReleases string `json:"except_releases"` MaxDownloadsUnit FilterMaxDownloadsUnit `json:"max_downloads_unit"`
UseRegex bool `json:"use_regex"` MatchReleases string `json:"match_releases"`
MatchReleaseGroups string `json:"match_release_groups"` ExceptReleases string `json:"except_releases"`
ExceptReleaseGroups string `json:"except_release_groups"` UseRegex bool `json:"use_regex"`
Scene bool `json:"scene"` MatchReleaseGroups string `json:"match_release_groups"`
Origins []string `json:"origins"` ExceptReleaseGroups string `json:"except_release_groups"`
Freeleech bool `json:"freeleech"` Scene bool `json:"scene"`
FreeleechPercent string `json:"freeleech_percent"` Origins []string `json:"origins"`
Shows string `json:"shows"` Freeleech bool `json:"freeleech"`
Seasons string `json:"seasons"` FreeleechPercent string `json:"freeleech_percent"`
Episodes string `json:"episodes"` Shows string `json:"shows"`
Resolutions []string `json:"resolutions"` // SD, 480i, 480p, 576p, 720p, 810p, 1080i, 1080p. Seasons string `json:"seasons"`
Codecs []string `json:"codecs"` // XviD, DivX, x264, h.264 (or h264), mpeg2 (or mpeg-2), VC-1 (or VC1), WMV, Remux, h.264 Remux (or h264 Remux), VC-1 Remux (or VC1 Remux). Episodes string `json:"episodes"`
Sources []string `json:"sources"` // DSR, PDTV, HDTV, HR.PDTV, HR.HDTV, DVDRip, DVDScr, BDr, BD5, BD9, BDRip, BRRip, DVDR, MDVDR, HDDVD, HDDVDRip, BluRay, WEB-DL, TVRip, CAM, R5, TELESYNC, TS, TELECINE, TC. TELESYNC and TS are synonyms (you don't need both). Same for TELECINE and TC Resolutions []string `json:"resolutions"` // SD, 480i, 480p, 576p, 720p, 810p, 1080i, 1080p.
Containers []string `json:"containers"` Codecs []string `json:"codecs"` // XviD, DivX, x264, h.264 (or h264), mpeg2 (or mpeg-2), VC-1 (or VC1), WMV, Remux, h.264 Remux (or h264 Remux), VC-1 Remux (or VC1 Remux).
MatchHDR []string `json:"match_hdr"` Sources []string `json:"sources"` // DSR, PDTV, HDTV, HR.PDTV, HR.HDTV, DVDRip, DVDScr, BDr, BD5, BD9, BDRip, BRRip, DVDR, MDVDR, HDDVD, HDDVDRip, BluRay, WEB-DL, TVRip, CAM, R5, TELESYNC, TS, TELECINE, TC. TELESYNC and TS are synonyms (you don't need both). Same for TELECINE and TC
ExceptHDR []string `json:"except_hdr"` Containers []string `json:"containers"`
MatchOther []string `json:"match_other"` MatchHDR []string `json:"match_hdr"`
ExceptOther []string `json:"except_other"` ExceptHDR []string `json:"except_hdr"`
Years string `json:"years"` MatchOther []string `json:"match_other"`
Artists string `json:"artists"` ExceptOther []string `json:"except_other"`
Albums string `json:"albums"` Years string `json:"years"`
MatchReleaseTypes []string `json:"match_release_types"` // Album,Single,EP Artists string `json:"artists"`
ExceptReleaseTypes string `json:"except_release_types"` Albums string `json:"albums"`
Formats []string `json:"formats"` // MP3, FLAC, Ogg, AAC, AC3, DTS MatchReleaseTypes []string `json:"match_release_types"` // Album,Single,EP
Quality []string `json:"quality"` // 192, 320, APS (VBR), V2 (VBR), V1 (VBR), APX (VBR), V0 (VBR), q8.x (VBR), Lossless, 24bit Lossless, Other ExceptReleaseTypes string `json:"except_release_types"`
Media []string `json:"media"` // CD, DVD, Vinyl, Soundboard, SACD, DAT, Cassette, WEB, Other Formats []string `json:"formats"` // MP3, FLAC, Ogg, AAC, AC3, DTS
PerfectFlac bool `json:"perfect_flac"` Quality []string `json:"quality"` // 192, 320, APS (VBR), V2 (VBR), V1 (VBR), APX (VBR), V0 (VBR), q8.x (VBR), Lossless, 24bit Lossless, Other
Cue bool `json:"cue"` Media []string `json:"media"` // CD, DVD, Vinyl, Soundboard, SACD, DAT, Cassette, WEB, Other
Log bool `json:"log"` PerfectFlac bool `json:"perfect_flac"`
LogScore int `json:"log_score"` Cue bool `json:"cue"`
MatchCategories string `json:"match_categories"` Log bool `json:"log"`
ExceptCategories string `json:"except_categories"` LogScore int `json:"log_score"`
MatchUploaders string `json:"match_uploaders"` MatchCategories string `json:"match_categories"`
ExceptUploaders string `json:"except_uploaders"` ExceptCategories string `json:"except_categories"`
Tags string `json:"tags"` MatchUploaders string `json:"match_uploaders"`
ExceptTags string `json:"except_tags"` ExceptUploaders string `json:"except_uploaders"`
TagsAny string `json:"tags_any"` Tags string `json:"tags"`
ExceptTagsAny string `json:"except_tags_any"` ExceptTags string `json:"except_tags"`
Actions []*Action `json:"actions"` TagsAny string `json:"tags_any"`
Indexers []Indexer `json:"indexers"` ExceptTagsAny string `json:"except_tags_any"`
Actions []*Action `json:"actions"`
Indexers []Indexer `json:"indexers"`
Downloads *FilterDownloads `json:"-"`
} }
func (f Filter) CheckFilter(r *Release) ([]string, bool) { func (f Filter) CheckFilter(r *Release) ([]string, bool) {
// reset rejections first to clean previous checks // reset rejections first to clean previous checks
r.resetRejections() r.resetRejections()
// max downloads check. If reached return early
if f.MaxDownloads > 0 && !f.checkMaxDownloads(f.MaxDownloads, f.MaxDownloadsUnit) {
r.addRejectionF("max downloads (%d) this (%v) reached", f.MaxDownloads, f.MaxDownloadsUnit)
return r.Rejections, false
}
if f.Freeleech && r.Freeleech != f.Freeleech { if f.Freeleech && r.Freeleech != f.Freeleech {
r.addRejection("wanted: freeleech") r.addRejection("wanted: freeleech")
} }
@ -248,6 +275,39 @@ func (f Filter) CheckFilter(r *Release) ([]string, bool) {
return nil, true return nil, true
} }
func (f Filter) checkMaxDownloads(max int, perTimeUnit FilterMaxDownloadsUnit) bool {
if f.Downloads == nil {
return false
}
switch perTimeUnit {
case FilterMaxDownloadsHour:
if f.Downloads.HourCount > 0 && f.Downloads.HourCount >= max {
return false
}
case FilterMaxDownloadsDay:
if f.Downloads.DayCount > 0 && f.Downloads.DayCount >= max {
return false
}
case FilterMaxDownloadsWeek:
if f.Downloads.WeekCount > 0 && f.Downloads.WeekCount >= max {
return false
}
case FilterMaxDownloadsMonth:
if f.Downloads.MonthCount > 0 && f.Downloads.MonthCount >= max {
return false
}
case FilterMaxDownloadsEver:
if f.Downloads.TotalCount > 0 && f.Downloads.TotalCount >= max {
return false
}
default:
return true
}
return true
}
// isPerfectFLAC Perfect is "CD FLAC Cue Log 100% Lossless or 24bit Lossless" // isPerfectFLAC Perfect is "CD FLAC Cue Log 100% Lossless or 24bit Lossless"
func (f Filter) isPerfectFLAC(r *Release) bool { func (f Filter) isPerfectFLAC(r *Release) bool {
if !contains(r.Source, "CD") { if !contains(r.Source, "CD") {

View file

@ -977,6 +977,8 @@ func TestFilter_CheckFilter1(t *testing.T) {
UpdatedAt time.Time UpdatedAt time.Time
MinSize string MinSize string
MaxSize string MaxSize string
MaxDownloads int
MaxDownloadsPer FilterMaxDownloadsUnit
Delay int Delay int
Priority int32 Priority int32
MatchReleases string MatchReleases string
@ -1019,6 +1021,7 @@ func TestFilter_CheckFilter1(t *testing.T) {
ExceptTagsAny string ExceptTagsAny string
Actions []*Action Actions []*Action
Indexers []Indexer Indexers []Indexer
State *FilterDownloads
} }
type args struct { type args struct {
r *Release r *Release
@ -1411,6 +1414,73 @@ func TestFilter_CheckFilter1(t *testing.T) {
wantRejections: nil, wantRejections: nil,
wantMatch: true, wantMatch: true,
}, },
{
name: "test_32",
fields: fields{
MaxDownloads: 1,
MaxDownloadsPer: FilterMaxDownloadsMonth,
State: &FilterDownloads{
MonthCount: 0,
},
},
args: args{&Release{TorrentName: "Show.Name.S01.DV.2160p.ATVP.WEB-DL.DDPA5.1.x265-GROUP2"}},
wantRejections: nil,
wantMatch: true,
},
{
name: "test_33",
fields: fields{
MaxDownloads: 10,
MaxDownloadsPer: FilterMaxDownloadsMonth,
State: &FilterDownloads{
MonthCount: 10,
},
},
args: args{&Release{TorrentName: "Show.Name.S01.DV.2160p.ATVP.WEB-DL.DDPA5.1.x265-GROUP2"}},
wantRejections: []string{"max downloads (10) this (MONTH) reached"},
wantMatch: false,
},
{
name: "test_34",
fields: fields{
MaxDownloads: 10,
MaxDownloadsPer: FilterMaxDownloadsMonth,
State: &FilterDownloads{
MonthCount: 50,
},
},
args: args{&Release{TorrentName: "Show.Name.S01.DV.2160p.ATVP.WEB-DL.DDPA5.1.x265-GROUP2"}},
wantRejections: []string{"max downloads (10) this (MONTH) reached"},
wantMatch: false,
},
{
name: "test_35",
fields: fields{
MaxDownloads: 15,
MaxDownloadsPer: FilterMaxDownloadsHour,
State: &FilterDownloads{
HourCount: 20,
MonthCount: 50,
},
},
args: args{&Release{TorrentName: "Show.Name.S01.DV.2160p.ATVP.WEB-DL.DDPA5.1.x265-GROUP2"}},
wantRejections: []string{"max downloads (15) this (HOUR) reached"},
wantMatch: false,
},
{
name: "test_36",
fields: fields{
MaxDownloads: 15,
MaxDownloadsPer: FilterMaxDownloadsHour,
State: &FilterDownloads{
HourCount: 14,
MonthCount: 50,
},
},
args: args{&Release{TorrentName: "Show.Name.S01.DV.2160p.ATVP.WEB-DL.DDPA5.1.x265-GROUP2"}},
wantRejections: nil,
wantMatch: true,
},
} }
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
@ -1424,6 +1494,8 @@ func TestFilter_CheckFilter1(t *testing.T) {
MaxSize: tt.fields.MaxSize, MaxSize: tt.fields.MaxSize,
Delay: tt.fields.Delay, Delay: tt.fields.Delay,
Priority: tt.fields.Priority, Priority: tt.fields.Priority,
MaxDownloads: tt.fields.MaxDownloads,
MaxDownloadsUnit: tt.fields.MaxDownloadsPer,
MatchReleases: tt.fields.MatchReleases, MatchReleases: tt.fields.MatchReleases,
ExceptReleases: tt.fields.ExceptReleases, ExceptReleases: tt.fields.ExceptReleases,
UseRegex: tt.fields.UseRegex, UseRegex: tt.fields.UseRegex,
@ -1464,6 +1536,7 @@ func TestFilter_CheckFilter1(t *testing.T) {
ExceptTagsAny: tt.fields.ExceptTagsAny, ExceptTagsAny: tt.fields.ExceptTagsAny,
Actions: tt.fields.Actions, Actions: tt.fields.Actions,
Indexers: tt.fields.Indexers, Indexers: tt.fields.Indexers,
Downloads: tt.fields.State,
} }
tt.args.r.ParseString(tt.args.r.TorrentName) tt.args.r.ParseString(tt.args.r.TorrentName)
rejections, match := f.CheckFilter(tt.args.r) rejections, match := f.CheckFilter(tt.args.r)

View file

@ -267,7 +267,7 @@ export const Select = ({
{label} {label}
</Listbox.Label> </Listbox.Label>
<div className="mt-2 relative"> <div className="mt-2 relative">
<Listbox.Button className="bg-white dark:bg-gray-800 relative w-full border border-gray-300 dark:border-gray-700 rounded-md shadow-sm pl-3 pr-10 py-2 text-left cursor-default focus:outline-none focus:ring-1 focus:ring-indigo-500 dark:focus:ring-blue-500 focus:border-indigo-500 dark:focus:border-blue-500 dark:text-gray-200 sm:text-sm"> <Listbox.Button className="bg-white dark:bg-gray-800 relative w-full border border-gray-300 dark:border-gray-700 rounded-md shadow-sm pl-3 pr-10 py-2.5 text-left cursor-default focus:outline-none focus:ring-1 focus:ring-indigo-500 dark:focus:ring-blue-500 focus:border-indigo-500 dark:focus:border-blue-500 dark:text-gray-200 sm:text-sm">
<span className="block truncate"> <span className="block truncate">
{field.value {field.value
? options.find((c) => c.value === field.value)?.label ? options.find((c) => c.value === field.value)?.label

View file

@ -263,6 +263,33 @@ export const NotificationTypeOptions: OptionBasic[] = [
} }
]; ];
export const downloadsPerUnitOptions: OptionBasic[] = [
{
label: "Select",
value: ""
},
{
label: "HOUR",
value: "HOUR"
},
{
label: "DAY",
value: "DAY"
},
{
label: "WEEK",
value: "WEEK"
},
{
label: "MONTH",
value: "MONTH"
},
{
label: "EVER",
value: "EVER"
}
];
export interface SelectOption { export interface SelectOption {
label: string; label: string;
description: string; description: string;

View file

@ -25,7 +25,10 @@ import {
FORMATS_OPTIONS, FORMATS_OPTIONS,
SOURCES_MUSIC_OPTIONS, SOURCES_MUSIC_OPTIONS,
QUALITY_MUSIC_OPTIONS, QUALITY_MUSIC_OPTIONS,
RELEASE_TYPE_MUSIC_OPTIONS, OTHER_OPTIONS, ORIGIN_OPTIONS RELEASE_TYPE_MUSIC_OPTIONS,
OTHER_OPTIONS,
ORIGIN_OPTIONS,
downloadsPerUnitOptions
} from "../../domain/constants"; } from "../../domain/constants";
import { queryClient } from "../../App"; import { queryClient } from "../../App";
import { APIClient } from "../../api/APIClient"; import { APIClient } from "../../api/APIClient";
@ -247,6 +250,8 @@ export default function FilterDetails() {
max_size: filter.max_size, max_size: filter.max_size,
delay: filter.delay, delay: filter.delay,
priority: filter.priority, priority: filter.priority,
max_downloads: filter.max_downloads,
max_downloads_unit: filter.max_downloads_unit,
use_regex: filter.use_regex || false, use_regex: filter.use_regex || false,
shows: filter.shows, shows: filter.shows,
years: filter.years, years: filter.years,
@ -338,12 +343,6 @@ function General() {
const opts = indexers && indexers.length > 0 ? indexers.map(v => ({ const opts = indexers && indexers.length > 0 ? indexers.map(v => ({
label: v.name, label: v.name,
value: v.id value: v.id
// value: {
// id: v.id,
// name: v.name,
// identifier: v.identifier,
// enabled: v.enabled
// }
})) : []; })) : [];
return ( return (
@ -367,6 +366,9 @@ function General() {
<TextField name="max_size" label="Max size" columns={6} placeholder="" /> <TextField name="max_size" label="Max size" columns={6} placeholder="" />
<NumberField name="delay" label="Delay" placeholder="" /> <NumberField name="delay" label="Delay" placeholder="" />
<NumberField name="priority" label="Priority" placeholder="" /> <NumberField name="priority" label="Priority" placeholder="" />
<NumberField name="max_downloads" label="Max downloads" placeholder="" />
<Select name="max_downloads_unit" label="Max downloads per" options={downloadsPerUnitOptions} optionDefaultText="Select unit" />
</div> </div>
</div> </div>

View file

@ -8,6 +8,8 @@ interface Filter {
max_size: string; max_size: string;
delay: number; delay: number;
priority: number; priority: number;
max_downloads: number;
max_downloads_unit: string;
match_releases: string; match_releases: string;
except_releases: string; except_releases: string;
use_regex: boolean; use_regex: boolean;