mirror of
https://github.com/idanoo/autobrr
synced 2025-07-22 16:29:12 +00:00
feat(filters): add download limits (#266)
* feat(filters): add download limits
This commit is contained in:
parent
2903e7b493
commit
2a23ed0185
9 changed files with 458 additions and 82 deletions
|
@ -82,6 +82,8 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
|||
"max_size",
|
||||
"delay",
|
||||
"priority",
|
||||
"max_downloads",
|
||||
"max_downloads_unit",
|
||||
"match_releases",
|
||||
"except_releases",
|
||||
"use_regex",
|
||||
|
@ -138,11 +140,11 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
|||
}
|
||||
|
||||
var f domain.Filter
|
||||
var minSize, maxSize, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, freeleechPercent, shows, seasons, episodes, years, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags sql.NullString
|
||||
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, freeleechPercent, shows, seasons, episodes, years, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags sql.NullString
|
||||
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
|
||||
var delay, logScore sql.NullInt32
|
||||
var delay, maxDownloads, logScore sql.NullInt32
|
||||
|
||||
if err := row.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), pq.Array(&f.MatchOther), pq.Array(&f.ExceptOther), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, pq.Array(&f.Origins), &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||
if err := row.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &maxDownloads, &maxDownloadsUnit, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), pq.Array(&f.MatchOther), pq.Array(&f.ExceptOther), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, pq.Array(&f.Origins), &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||
log.Error().Stack().Err(err).Msgf("filter.findByID: %v : error scanning row", filterID)
|
||||
return nil, err
|
||||
}
|
||||
|
@ -150,6 +152,8 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
|||
f.MinSize = minSize.String
|
||||
f.MaxSize = maxSize.String
|
||||
f.Delay = int(delay.Int32)
|
||||
f.MaxDownloads = int(maxDownloads.Int32)
|
||||
f.MaxDownloadsUnit = domain.FilterMaxDownloadsUnit(maxDownloadsUnit.String)
|
||||
f.MatchReleases = matchReleases.String
|
||||
f.ExceptReleases = exceptReleases.String
|
||||
f.MatchReleaseGroups = matchReleaseGroups.String
|
||||
|
@ -180,6 +184,30 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
|||
|
||||
// FindByIndexerIdentifier find active filters with active indexer only
|
||||
func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, error) {
|
||||
ctx := context.TODO()
|
||||
tx, err := r.db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer tx.Rollback()
|
||||
|
||||
filters, err := r.findByIndexerIdentifier(ctx, tx, indexer)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for i, filter := range filters {
|
||||
downloads, err := r.attachDownloadsByFilter(ctx, tx, filter.ID)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
filters[i].Downloads = downloads
|
||||
}
|
||||
|
||||
return filters, nil
|
||||
}
|
||||
|
||||
func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, tx *Tx, indexer string) ([]domain.Filter, error) {
|
||||
queryBuilder := r.db.squirrel.
|
||||
Select(
|
||||
"f.id",
|
||||
|
@ -189,6 +217,8 @@ func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, e
|
|||
"f.max_size",
|
||||
"f.delay",
|
||||
"f.priority",
|
||||
"f.max_downloads",
|
||||
"f.max_downloads_unit",
|
||||
"f.match_releases",
|
||||
"f.except_releases",
|
||||
"f.use_regex",
|
||||
|
@ -243,7 +273,7 @@ func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, e
|
|||
return nil, err
|
||||
}
|
||||
|
||||
rows, err := r.db.handler.Query(query, args...)
|
||||
rows, err := tx.QueryContext(ctx, query, args...)
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.findByIndexerIdentifier: error executing query")
|
||||
return nil, err
|
||||
|
@ -255,11 +285,11 @@ func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, e
|
|||
for rows.Next() {
|
||||
var f domain.Filter
|
||||
|
||||
var minSize, maxSize, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, freeleechPercent, shows, seasons, episodes, years, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags sql.NullString
|
||||
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, freeleechPercent, shows, seasons, episodes, years, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags sql.NullString
|
||||
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
|
||||
var delay, logScore sql.NullInt32
|
||||
var delay, maxDownloads, logScore sql.NullInt32
|
||||
|
||||
if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), pq.Array(&f.MatchOther), pq.Array(&f.ExceptOther), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, pq.Array(&f.Origins), &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||
if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &maxDownloads, &maxDownloadsUnit, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), pq.Array(&f.MatchOther), pq.Array(&f.ExceptOther), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, pq.Array(&f.Origins), &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.findByIndexerIdentifier: error scanning row")
|
||||
return nil, err
|
||||
}
|
||||
|
@ -267,6 +297,8 @@ func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, e
|
|||
f.MinSize = minSize.String
|
||||
f.MaxSize = maxSize.String
|
||||
f.Delay = int(delay.Int32)
|
||||
f.MaxDownloads = int(maxDownloads.Int32)
|
||||
f.MaxDownloadsUnit = domain.FilterMaxDownloadsUnit(maxDownloadsUnit.String)
|
||||
f.MatchReleases = matchReleases.String
|
||||
f.ExceptReleases = exceptReleases.String
|
||||
f.MatchReleaseGroups = matchReleaseGroups.String
|
||||
|
@ -308,6 +340,8 @@ func (r *FilterRepo) Store(ctx context.Context, filter domain.Filter) (*domain.F
|
|||
"max_size",
|
||||
"delay",
|
||||
"priority",
|
||||
"max_downloads",
|
||||
"max_downloads_unit",
|
||||
"match_releases",
|
||||
"except_releases",
|
||||
"use_regex",
|
||||
|
@ -353,6 +387,8 @@ func (r *FilterRepo) Store(ctx context.Context, filter domain.Filter) (*domain.F
|
|||
filter.MaxSize,
|
||||
filter.Delay,
|
||||
filter.Priority,
|
||||
filter.MaxDownloads,
|
||||
filter.MaxDownloadsUnit,
|
||||
filter.MatchReleases,
|
||||
filter.ExceptReleases,
|
||||
filter.UseRegex,
|
||||
|
@ -418,6 +454,8 @@ func (r *FilterRepo) Update(ctx context.Context, filter domain.Filter) (*domain.
|
|||
Set("max_size", filter.MaxSize).
|
||||
Set("delay", filter.Delay).
|
||||
Set("priority", filter.Priority).
|
||||
Set("max_downloads", filter.MaxDownloads).
|
||||
Set("max_downloads_unit", filter.MaxDownloadsUnit).
|
||||
Set("use_regex", filter.UseRegex).
|
||||
Set("match_releases", filter.MatchReleases).
|
||||
Set("except_releases", filter.ExceptReleases).
|
||||
|
@ -609,15 +647,62 @@ func (r *FilterRepo) Delete(ctx context.Context, filterID int) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// Split string to slice. We store comma separated strings and convert to slice
|
||||
//func stringToSlice(str string) []string {
|
||||
// if str == "" {
|
||||
// return []string{}
|
||||
// } else if !strings.Contains(str, ",") {
|
||||
// return []string{str}
|
||||
// }
|
||||
//
|
||||
// split := strings.Split(str, ",")
|
||||
//
|
||||
// return split
|
||||
//}
|
||||
func (r *FilterRepo) attachDownloadsByFilter(ctx context.Context, tx *Tx, filterID int) (*domain.FilterDownloads, error) {
|
||||
if r.db.Driver == "sqlite" {
|
||||
return r.downloadsByFilterSqlite(ctx, tx, filterID)
|
||||
}
|
||||
|
||||
return r.downloadsByFilterPostgres(ctx, tx, filterID)
|
||||
}
|
||||
|
||||
func (r *FilterRepo) downloadsByFilterSqlite(ctx context.Context, tx *Tx, filterID int) (*domain.FilterDownloads, error) {
|
||||
query := `SELECT
|
||||
IFNULL(SUM(CASE WHEN "release".timestamp >= datetime('now', '-1 hour') THEN 1 ELSE 0 END),0) as "hour_count",
|
||||
IFNULL(SUM(CASE WHEN "release".timestamp >= datetime('now', 'start of day') THEN 1 ELSE 0 END),0) as "day_count",
|
||||
IFNULL(SUM(CASE WHEN "release".timestamp >= datetime('now', 'weekday 0', '-7 days') THEN 1 ELSE 0 END),0) as "week_count",
|
||||
IFNULL(SUM(CASE WHEN "release".timestamp >= datetime('now', 'start of month') THEN 1 ELSE 0 END),0) as "month_count",
|
||||
count(*) as "total_count"
|
||||
FROM "release"
|
||||
WHERE "release".filter_id = ?;`
|
||||
|
||||
row := tx.QueryRowContext(ctx, query, filterID)
|
||||
if err := row.Err(); err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.downloadsByFilterSqlite: error querying stats")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var f domain.FilterDownloads
|
||||
|
||||
if err := row.Scan(&f.HourCount, &f.DayCount, &f.WeekCount, &f.MonthCount, &f.TotalCount); err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.downloadsByFilterSqlite: error scanning stats data to struct")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &f, nil
|
||||
}
|
||||
|
||||
func (r *FilterRepo) downloadsByFilterPostgres(ctx context.Context, tx *Tx, filterID int) (*domain.FilterDownloads, error) {
|
||||
query := `SELECT
|
||||
COALESCE(SUM(CASE WHEN "release".timestamp >= date_trunc('hour', CURRENT_TIMESTAMP) THEN 1 ELSE 0 END),0) as "hour_count",
|
||||
COALESCE(SUM(CASE WHEN "release".timestamp >= date_trunc('day', CURRENT_DATE) THEN 1 ELSE 0 END),0) as "day_count",
|
||||
COALESCE(SUM(CASE WHEN "release".timestamp >= date_trunc('week', CURRENT_DATE) THEN 1 ELSE 0 END),0) as "week_count",
|
||||
COALESCE(SUM(CASE WHEN "release".timestamp >= date_trunc('month', CURRENT_DATE) THEN 1 ELSE 0 END),0) as "month_count",
|
||||
count(*) as "total_count"
|
||||
FROM "release"
|
||||
WHERE "release".filter_id = ?;`
|
||||
|
||||
row := tx.QueryRowContext(ctx, query, filterID)
|
||||
if err := row.Err(); err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.downloadsByFilterPostgres: error querying stats")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var f domain.FilterDownloads
|
||||
|
||||
if err := row.Scan(&f.HourCount, &f.DayCount, &f.WeekCount, &f.MonthCount, &f.TotalCount); err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.downloadsByFilterPostgres: error scanning stats data to struct")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &f, nil
|
||||
}
|
||||
|
|
|
@ -64,6 +64,8 @@ CREATE TABLE filter
|
|||
max_size TEXT,
|
||||
delay INTEGER,
|
||||
priority INTEGER DEFAULT 0 NOT NULL,
|
||||
max_downloads INTEGER DEFAULT 0,
|
||||
max_downloads_unit TEXT,
|
||||
match_releases TEXT,
|
||||
except_releases TEXT,
|
||||
use_regex BOOLEAN,
|
||||
|
@ -196,9 +198,15 @@ CREATE TABLE "release"
|
|||
origin TEXT,
|
||||
tags TEXT [] DEFAULT '{}' NOT NULL,
|
||||
uploader TEXT,
|
||||
pre_time TEXT
|
||||
pre_time TEXT,
|
||||
filter_id INTEGER
|
||||
REFERENCES filter
|
||||
ON DELETE SET NULL
|
||||
);
|
||||
|
||||
CREATE INDEX release_filter_id_index
|
||||
ON "release" (filter_id);
|
||||
|
||||
CREATE TABLE release_action_status
|
||||
(
|
||||
id INTEGER PRIMARY KEY,
|
||||
|
@ -674,6 +682,98 @@ ALTER TABLE release_action_status_dg_tmp
|
|||
ALTER TABLE "action"
|
||||
ADD COLUMN limit_seed_time INTEGER DEFAULT 0;
|
||||
`,
|
||||
`
|
||||
alter table filter
|
||||
add max_downloads INTEGER default 0;
|
||||
|
||||
alter table filter
|
||||
add max_downloads_unit TEXT;
|
||||
|
||||
create table release_dg_tmp
|
||||
(
|
||||
id INTEGER
|
||||
primary key,
|
||||
filter_status TEXT,
|
||||
rejections TEXT [] default '{}' not null,
|
||||
indexer TEXT,
|
||||
filter TEXT,
|
||||
protocol TEXT,
|
||||
implementation TEXT,
|
||||
timestamp TIMESTAMP default CURRENT_TIMESTAMP,
|
||||
group_id TEXT,
|
||||
torrent_id TEXT,
|
||||
torrent_name TEXT,
|
||||
size INTEGER,
|
||||
title TEXT,
|
||||
category TEXT,
|
||||
season INTEGER,
|
||||
episode INTEGER,
|
||||
year INTEGER,
|
||||
resolution TEXT,
|
||||
source TEXT,
|
||||
codec TEXT,
|
||||
container TEXT,
|
||||
hdr TEXT,
|
||||
release_group TEXT,
|
||||
proper BOOLEAN,
|
||||
repack BOOLEAN,
|
||||
website TEXT,
|
||||
type TEXT,
|
||||
origin TEXT,
|
||||
tags TEXT [] default '{}' not null,
|
||||
uploader TEXT,
|
||||
pre_time TEXT,
|
||||
filter_id INTEGER
|
||||
CONSTRAINT release_filter_id_fk
|
||||
REFERENCES filter
|
||||
ON DELETE SET NULL
|
||||
);
|
||||
|
||||
INSERT INTO release_dg_tmp(id, filter_status, rejections, indexer, filter, protocol, implementation, timestamp,
|
||||
group_id, torrent_id, torrent_name, size, title, category, season, episode, year, resolution,
|
||||
source, codec, container, hdr, release_group, proper, repack, website, type, origin, tags,
|
||||
uploader, pre_time)
|
||||
SELECT id,
|
||||
filter_status,
|
||||
rejections,
|
||||
indexer,
|
||||
filter,
|
||||
protocol,
|
||||
implementation,
|
||||
timestamp,
|
||||
group_id,
|
||||
torrent_id,
|
||||
torrent_name,
|
||||
size,
|
||||
title,
|
||||
category,
|
||||
season,
|
||||
episode,
|
||||
year,
|
||||
resolution,
|
||||
source,
|
||||
codec,
|
||||
container,
|
||||
hdr,
|
||||
release_group,
|
||||
proper,
|
||||
repack,
|
||||
website,
|
||||
type,
|
||||
origin,
|
||||
tags,
|
||||
uploader,
|
||||
pre_time
|
||||
FROM "release";
|
||||
|
||||
DROP TABLE "release";
|
||||
|
||||
ALTER TABLE release_dg_tmp
|
||||
RENAME TO "release";
|
||||
|
||||
CREATE INDEX release_filter_id_index
|
||||
ON "release" (filter_id);
|
||||
`,
|
||||
}
|
||||
|
||||
const postgresSchema = `
|
||||
|
@ -740,6 +840,8 @@ CREATE TABLE filter
|
|||
max_size TEXT,
|
||||
delay INTEGER,
|
||||
priority INTEGER DEFAULT 0 NOT NULL,
|
||||
max_downloads INTEGER DEFAULT 0,
|
||||
max_downloads_unit TEXT,
|
||||
match_releases TEXT,
|
||||
except_releases TEXT,
|
||||
use_regex BOOLEAN,
|
||||
|
@ -888,9 +990,16 @@ CREATE TABLE "release"
|
|||
freeleech BOOLEAN,
|
||||
freeleech_percent INTEGER,
|
||||
uploader TEXT,
|
||||
pre_time TEXT
|
||||
pre_time TEXT,
|
||||
filter_id INTEGER
|
||||
CONSTRAINT release_filter_id_fk
|
||||
REFERENCES filter
|
||||
ON DELETE SET NULL
|
||||
);
|
||||
|
||||
CREATE INDEX release_filter_id_index
|
||||
ON release (filter_id);
|
||||
|
||||
CREATE TABLE release_action_status
|
||||
(
|
||||
id SERIAL PRIMARY KEY,
|
||||
|
@ -1103,4 +1212,22 @@ var postgresMigrations = []string{
|
|||
ALTER TABLE "action"
|
||||
ADD COLUMN limit_seed_time INTEGER DEFAULT 0;
|
||||
`,
|
||||
`
|
||||
ALTER TABLE filter
|
||||
ADD max_downloads INTEGER default 0;
|
||||
|
||||
ALTER TABLE filter
|
||||
ADD max_downloads_unit TEXT;
|
||||
|
||||
ALTER TABLE release
|
||||
add filter_id INTEGER;
|
||||
|
||||
CREATE INDEX release_filter_id_index
|
||||
ON release (filter_id);
|
||||
|
||||
ALTER TABLE release
|
||||
ADD CONSTRAINT release_filter_id_fk
|
||||
FOREIGN KEY (filter_id) REFERENCES FILTER
|
||||
ON DELETE SET NULL;
|
||||
`,
|
||||
}
|
||||
|
|
|
@ -25,8 +25,8 @@ func (repo *ReleaseRepo) Store(ctx context.Context, r *domain.Release) (*domain.
|
|||
|
||||
queryBuilder := repo.db.squirrel.
|
||||
Insert("release").
|
||||
Columns("filter_status", "rejections", "indexer", "filter", "protocol", "implementation", "timestamp", "group_id", "torrent_id", "torrent_name", "size", "title", "category", "season", "episode", "year", "resolution", "source", "codec", "container", "hdr", "release_group", "proper", "repack", "website", "type", "origin", "tags", "uploader", "pre_time").
|
||||
Values(r.FilterStatus, pq.Array(r.Rejections), r.Indexer, r.FilterName, r.Protocol, r.Implementation, r.Timestamp, r.GroupID, r.TorrentID, r.TorrentName, r.Size, r.Title, r.Category, r.Season, r.Episode, r.Year, r.Resolution, r.Source, codecStr, r.Container, hdrStr, r.Group, r.Proper, r.Repack, r.Website, r.Type, r.Origin, pq.Array(r.Tags), r.Uploader, r.PreTime).
|
||||
Columns("filter_status", "rejections", "indexer", "filter", "protocol", "implementation", "timestamp", "group_id", "torrent_id", "torrent_name", "size", "title", "category", "season", "episode", "year", "resolution", "source", "codec", "container", "hdr", "release_group", "proper", "repack", "website", "type", "origin", "tags", "uploader", "pre_time", "filter_id").
|
||||
Values(r.FilterStatus, pq.Array(r.Rejections), r.Indexer, r.FilterName, r.Protocol, r.Implementation, r.Timestamp, r.GroupID, r.TorrentID, r.TorrentName, r.Size, r.Title, r.Category, r.Season, r.Episode, r.Year, r.Resolution, r.Source, codecStr, r.Container, hdrStr, r.Group, r.Proper, r.Repack, r.Website, r.Type, r.Origin, pq.Array(r.Tags), r.Uploader, r.PreTime, r.FilterID).
|
||||
Suffix("RETURNING id").RunWith(repo.db.handler)
|
||||
|
||||
// return values
|
||||
|
|
|
@ -30,64 +30,91 @@ type FilterRepo interface {
|
|||
DeleteIndexerConnections(ctx context.Context, filterID int) error
|
||||
}
|
||||
|
||||
type FilterDownloads struct {
|
||||
HourCount int
|
||||
DayCount int
|
||||
WeekCount int
|
||||
MonthCount int
|
||||
TotalCount int
|
||||
}
|
||||
|
||||
type FilterMaxDownloadsUnit string
|
||||
|
||||
const (
|
||||
FilterMaxDownloadsHour FilterMaxDownloadsUnit = "HOUR"
|
||||
FilterMaxDownloadsDay FilterMaxDownloadsUnit = "DAY"
|
||||
FilterMaxDownloadsWeek FilterMaxDownloadsUnit = "WEEK"
|
||||
FilterMaxDownloadsMonth FilterMaxDownloadsUnit = "MONTH"
|
||||
FilterMaxDownloadsEver FilterMaxDownloadsUnit = "EVER"
|
||||
)
|
||||
|
||||
type Filter struct {
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Enabled bool `json:"enabled"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
MinSize string `json:"min_size"`
|
||||
MaxSize string `json:"max_size"`
|
||||
Delay int `json:"delay"`
|
||||
Priority int32 `json:"priority"`
|
||||
MatchReleases string `json:"match_releases"`
|
||||
ExceptReleases string `json:"except_releases"`
|
||||
UseRegex bool `json:"use_regex"`
|
||||
MatchReleaseGroups string `json:"match_release_groups"`
|
||||
ExceptReleaseGroups string `json:"except_release_groups"`
|
||||
Scene bool `json:"scene"`
|
||||
Origins []string `json:"origins"`
|
||||
Freeleech bool `json:"freeleech"`
|
||||
FreeleechPercent string `json:"freeleech_percent"`
|
||||
Shows string `json:"shows"`
|
||||
Seasons string `json:"seasons"`
|
||||
Episodes string `json:"episodes"`
|
||||
Resolutions []string `json:"resolutions"` // SD, 480i, 480p, 576p, 720p, 810p, 1080i, 1080p.
|
||||
Codecs []string `json:"codecs"` // XviD, DivX, x264, h.264 (or h264), mpeg2 (or mpeg-2), VC-1 (or VC1), WMV, Remux, h.264 Remux (or h264 Remux), VC-1 Remux (or VC1 Remux).
|
||||
Sources []string `json:"sources"` // DSR, PDTV, HDTV, HR.PDTV, HR.HDTV, DVDRip, DVDScr, BDr, BD5, BD9, BDRip, BRRip, DVDR, MDVDR, HDDVD, HDDVDRip, BluRay, WEB-DL, TVRip, CAM, R5, TELESYNC, TS, TELECINE, TC. TELESYNC and TS are synonyms (you don't need both). Same for TELECINE and TC
|
||||
Containers []string `json:"containers"`
|
||||
MatchHDR []string `json:"match_hdr"`
|
||||
ExceptHDR []string `json:"except_hdr"`
|
||||
MatchOther []string `json:"match_other"`
|
||||
ExceptOther []string `json:"except_other"`
|
||||
Years string `json:"years"`
|
||||
Artists string `json:"artists"`
|
||||
Albums string `json:"albums"`
|
||||
MatchReleaseTypes []string `json:"match_release_types"` // Album,Single,EP
|
||||
ExceptReleaseTypes string `json:"except_release_types"`
|
||||
Formats []string `json:"formats"` // MP3, FLAC, Ogg, AAC, AC3, DTS
|
||||
Quality []string `json:"quality"` // 192, 320, APS (VBR), V2 (VBR), V1 (VBR), APX (VBR), V0 (VBR), q8.x (VBR), Lossless, 24bit Lossless, Other
|
||||
Media []string `json:"media"` // CD, DVD, Vinyl, Soundboard, SACD, DAT, Cassette, WEB, Other
|
||||
PerfectFlac bool `json:"perfect_flac"`
|
||||
Cue bool `json:"cue"`
|
||||
Log bool `json:"log"`
|
||||
LogScore int `json:"log_score"`
|
||||
MatchCategories string `json:"match_categories"`
|
||||
ExceptCategories string `json:"except_categories"`
|
||||
MatchUploaders string `json:"match_uploaders"`
|
||||
ExceptUploaders string `json:"except_uploaders"`
|
||||
Tags string `json:"tags"`
|
||||
ExceptTags string `json:"except_tags"`
|
||||
TagsAny string `json:"tags_any"`
|
||||
ExceptTagsAny string `json:"except_tags_any"`
|
||||
Actions []*Action `json:"actions"`
|
||||
Indexers []Indexer `json:"indexers"`
|
||||
ID int `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Enabled bool `json:"enabled"`
|
||||
CreatedAt time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
MinSize string `json:"min_size"`
|
||||
MaxSize string `json:"max_size"`
|
||||
Delay int `json:"delay"`
|
||||
Priority int32 `json:"priority"`
|
||||
MaxDownloads int `json:"max_downloads"`
|
||||
MaxDownloadsUnit FilterMaxDownloadsUnit `json:"max_downloads_unit"`
|
||||
MatchReleases string `json:"match_releases"`
|
||||
ExceptReleases string `json:"except_releases"`
|
||||
UseRegex bool `json:"use_regex"`
|
||||
MatchReleaseGroups string `json:"match_release_groups"`
|
||||
ExceptReleaseGroups string `json:"except_release_groups"`
|
||||
Scene bool `json:"scene"`
|
||||
Origins []string `json:"origins"`
|
||||
Freeleech bool `json:"freeleech"`
|
||||
FreeleechPercent string `json:"freeleech_percent"`
|
||||
Shows string `json:"shows"`
|
||||
Seasons string `json:"seasons"`
|
||||
Episodes string `json:"episodes"`
|
||||
Resolutions []string `json:"resolutions"` // SD, 480i, 480p, 576p, 720p, 810p, 1080i, 1080p.
|
||||
Codecs []string `json:"codecs"` // XviD, DivX, x264, h.264 (or h264), mpeg2 (or mpeg-2), VC-1 (or VC1), WMV, Remux, h.264 Remux (or h264 Remux), VC-1 Remux (or VC1 Remux).
|
||||
Sources []string `json:"sources"` // DSR, PDTV, HDTV, HR.PDTV, HR.HDTV, DVDRip, DVDScr, BDr, BD5, BD9, BDRip, BRRip, DVDR, MDVDR, HDDVD, HDDVDRip, BluRay, WEB-DL, TVRip, CAM, R5, TELESYNC, TS, TELECINE, TC. TELESYNC and TS are synonyms (you don't need both). Same for TELECINE and TC
|
||||
Containers []string `json:"containers"`
|
||||
MatchHDR []string `json:"match_hdr"`
|
||||
ExceptHDR []string `json:"except_hdr"`
|
||||
MatchOther []string `json:"match_other"`
|
||||
ExceptOther []string `json:"except_other"`
|
||||
Years string `json:"years"`
|
||||
Artists string `json:"artists"`
|
||||
Albums string `json:"albums"`
|
||||
MatchReleaseTypes []string `json:"match_release_types"` // Album,Single,EP
|
||||
ExceptReleaseTypes string `json:"except_release_types"`
|
||||
Formats []string `json:"formats"` // MP3, FLAC, Ogg, AAC, AC3, DTS
|
||||
Quality []string `json:"quality"` // 192, 320, APS (VBR), V2 (VBR), V1 (VBR), APX (VBR), V0 (VBR), q8.x (VBR), Lossless, 24bit Lossless, Other
|
||||
Media []string `json:"media"` // CD, DVD, Vinyl, Soundboard, SACD, DAT, Cassette, WEB, Other
|
||||
PerfectFlac bool `json:"perfect_flac"`
|
||||
Cue bool `json:"cue"`
|
||||
Log bool `json:"log"`
|
||||
LogScore int `json:"log_score"`
|
||||
MatchCategories string `json:"match_categories"`
|
||||
ExceptCategories string `json:"except_categories"`
|
||||
MatchUploaders string `json:"match_uploaders"`
|
||||
ExceptUploaders string `json:"except_uploaders"`
|
||||
Tags string `json:"tags"`
|
||||
ExceptTags string `json:"except_tags"`
|
||||
TagsAny string `json:"tags_any"`
|
||||
ExceptTagsAny string `json:"except_tags_any"`
|
||||
Actions []*Action `json:"actions"`
|
||||
Indexers []Indexer `json:"indexers"`
|
||||
Downloads *FilterDownloads `json:"-"`
|
||||
}
|
||||
|
||||
func (f Filter) CheckFilter(r *Release) ([]string, bool) {
|
||||
// reset rejections first to clean previous checks
|
||||
r.resetRejections()
|
||||
|
||||
// max downloads check. If reached return early
|
||||
if f.MaxDownloads > 0 && !f.checkMaxDownloads(f.MaxDownloads, f.MaxDownloadsUnit) {
|
||||
r.addRejectionF("max downloads (%d) this (%v) reached", f.MaxDownloads, f.MaxDownloadsUnit)
|
||||
return r.Rejections, false
|
||||
}
|
||||
|
||||
if f.Freeleech && r.Freeleech != f.Freeleech {
|
||||
r.addRejection("wanted: freeleech")
|
||||
}
|
||||
|
@ -248,6 +275,39 @@ func (f Filter) CheckFilter(r *Release) ([]string, bool) {
|
|||
return nil, true
|
||||
}
|
||||
|
||||
func (f Filter) checkMaxDownloads(max int, perTimeUnit FilterMaxDownloadsUnit) bool {
|
||||
if f.Downloads == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
switch perTimeUnit {
|
||||
case FilterMaxDownloadsHour:
|
||||
if f.Downloads.HourCount > 0 && f.Downloads.HourCount >= max {
|
||||
return false
|
||||
}
|
||||
case FilterMaxDownloadsDay:
|
||||
if f.Downloads.DayCount > 0 && f.Downloads.DayCount >= max {
|
||||
return false
|
||||
}
|
||||
case FilterMaxDownloadsWeek:
|
||||
if f.Downloads.WeekCount > 0 && f.Downloads.WeekCount >= max {
|
||||
return false
|
||||
}
|
||||
case FilterMaxDownloadsMonth:
|
||||
if f.Downloads.MonthCount > 0 && f.Downloads.MonthCount >= max {
|
||||
return false
|
||||
}
|
||||
case FilterMaxDownloadsEver:
|
||||
if f.Downloads.TotalCount > 0 && f.Downloads.TotalCount >= max {
|
||||
return false
|
||||
}
|
||||
default:
|
||||
return true
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// isPerfectFLAC Perfect is "CD FLAC Cue Log 100% Lossless or 24bit Lossless"
|
||||
func (f Filter) isPerfectFLAC(r *Release) bool {
|
||||
if !contains(r.Source, "CD") {
|
||||
|
|
|
@ -977,6 +977,8 @@ func TestFilter_CheckFilter1(t *testing.T) {
|
|||
UpdatedAt time.Time
|
||||
MinSize string
|
||||
MaxSize string
|
||||
MaxDownloads int
|
||||
MaxDownloadsPer FilterMaxDownloadsUnit
|
||||
Delay int
|
||||
Priority int32
|
||||
MatchReleases string
|
||||
|
@ -1019,6 +1021,7 @@ func TestFilter_CheckFilter1(t *testing.T) {
|
|||
ExceptTagsAny string
|
||||
Actions []*Action
|
||||
Indexers []Indexer
|
||||
State *FilterDownloads
|
||||
}
|
||||
type args struct {
|
||||
r *Release
|
||||
|
@ -1411,6 +1414,73 @@ func TestFilter_CheckFilter1(t *testing.T) {
|
|||
wantRejections: nil,
|
||||
wantMatch: true,
|
||||
},
|
||||
{
|
||||
name: "test_32",
|
||||
fields: fields{
|
||||
MaxDownloads: 1,
|
||||
MaxDownloadsPer: FilterMaxDownloadsMonth,
|
||||
State: &FilterDownloads{
|
||||
MonthCount: 0,
|
||||
},
|
||||
},
|
||||
args: args{&Release{TorrentName: "Show.Name.S01.DV.2160p.ATVP.WEB-DL.DDPA5.1.x265-GROUP2"}},
|
||||
wantRejections: nil,
|
||||
wantMatch: true,
|
||||
},
|
||||
{
|
||||
name: "test_33",
|
||||
fields: fields{
|
||||
MaxDownloads: 10,
|
||||
MaxDownloadsPer: FilterMaxDownloadsMonth,
|
||||
State: &FilterDownloads{
|
||||
MonthCount: 10,
|
||||
},
|
||||
},
|
||||
args: args{&Release{TorrentName: "Show.Name.S01.DV.2160p.ATVP.WEB-DL.DDPA5.1.x265-GROUP2"}},
|
||||
wantRejections: []string{"max downloads (10) this (MONTH) reached"},
|
||||
wantMatch: false,
|
||||
},
|
||||
{
|
||||
name: "test_34",
|
||||
fields: fields{
|
||||
MaxDownloads: 10,
|
||||
MaxDownloadsPer: FilterMaxDownloadsMonth,
|
||||
State: &FilterDownloads{
|
||||
MonthCount: 50,
|
||||
},
|
||||
},
|
||||
args: args{&Release{TorrentName: "Show.Name.S01.DV.2160p.ATVP.WEB-DL.DDPA5.1.x265-GROUP2"}},
|
||||
wantRejections: []string{"max downloads (10) this (MONTH) reached"},
|
||||
wantMatch: false,
|
||||
},
|
||||
{
|
||||
name: "test_35",
|
||||
fields: fields{
|
||||
MaxDownloads: 15,
|
||||
MaxDownloadsPer: FilterMaxDownloadsHour,
|
||||
State: &FilterDownloads{
|
||||
HourCount: 20,
|
||||
MonthCount: 50,
|
||||
},
|
||||
},
|
||||
args: args{&Release{TorrentName: "Show.Name.S01.DV.2160p.ATVP.WEB-DL.DDPA5.1.x265-GROUP2"}},
|
||||
wantRejections: []string{"max downloads (15) this (HOUR) reached"},
|
||||
wantMatch: false,
|
||||
},
|
||||
{
|
||||
name: "test_36",
|
||||
fields: fields{
|
||||
MaxDownloads: 15,
|
||||
MaxDownloadsPer: FilterMaxDownloadsHour,
|
||||
State: &FilterDownloads{
|
||||
HourCount: 14,
|
||||
MonthCount: 50,
|
||||
},
|
||||
},
|
||||
args: args{&Release{TorrentName: "Show.Name.S01.DV.2160p.ATVP.WEB-DL.DDPA5.1.x265-GROUP2"}},
|
||||
wantRejections: nil,
|
||||
wantMatch: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
|
@ -1424,6 +1494,8 @@ func TestFilter_CheckFilter1(t *testing.T) {
|
|||
MaxSize: tt.fields.MaxSize,
|
||||
Delay: tt.fields.Delay,
|
||||
Priority: tt.fields.Priority,
|
||||
MaxDownloads: tt.fields.MaxDownloads,
|
||||
MaxDownloadsUnit: tt.fields.MaxDownloadsPer,
|
||||
MatchReleases: tt.fields.MatchReleases,
|
||||
ExceptReleases: tt.fields.ExceptReleases,
|
||||
UseRegex: tt.fields.UseRegex,
|
||||
|
@ -1464,6 +1536,7 @@ func TestFilter_CheckFilter1(t *testing.T) {
|
|||
ExceptTagsAny: tt.fields.ExceptTagsAny,
|
||||
Actions: tt.fields.Actions,
|
||||
Indexers: tt.fields.Indexers,
|
||||
Downloads: tt.fields.State,
|
||||
}
|
||||
tt.args.r.ParseString(tt.args.r.TorrentName)
|
||||
rejections, match := f.CheckFilter(tt.args.r)
|
||||
|
|
|
@ -267,7 +267,7 @@ export const Select = ({
|
|||
{label}
|
||||
</Listbox.Label>
|
||||
<div className="mt-2 relative">
|
||||
<Listbox.Button className="bg-white dark:bg-gray-800 relative w-full border border-gray-300 dark:border-gray-700 rounded-md shadow-sm pl-3 pr-10 py-2 text-left cursor-default focus:outline-none focus:ring-1 focus:ring-indigo-500 dark:focus:ring-blue-500 focus:border-indigo-500 dark:focus:border-blue-500 dark:text-gray-200 sm:text-sm">
|
||||
<Listbox.Button className="bg-white dark:bg-gray-800 relative w-full border border-gray-300 dark:border-gray-700 rounded-md shadow-sm pl-3 pr-10 py-2.5 text-left cursor-default focus:outline-none focus:ring-1 focus:ring-indigo-500 dark:focus:ring-blue-500 focus:border-indigo-500 dark:focus:border-blue-500 dark:text-gray-200 sm:text-sm">
|
||||
<span className="block truncate">
|
||||
{field.value
|
||||
? options.find((c) => c.value === field.value)?.label
|
||||
|
|
|
@ -263,6 +263,33 @@ export const NotificationTypeOptions: OptionBasic[] = [
|
|||
}
|
||||
];
|
||||
|
||||
export const downloadsPerUnitOptions: OptionBasic[] = [
|
||||
{
|
||||
label: "Select",
|
||||
value: ""
|
||||
},
|
||||
{
|
||||
label: "HOUR",
|
||||
value: "HOUR"
|
||||
},
|
||||
{
|
||||
label: "DAY",
|
||||
value: "DAY"
|
||||
},
|
||||
{
|
||||
label: "WEEK",
|
||||
value: "WEEK"
|
||||
},
|
||||
{
|
||||
label: "MONTH",
|
||||
value: "MONTH"
|
||||
},
|
||||
{
|
||||
label: "EVER",
|
||||
value: "EVER"
|
||||
}
|
||||
];
|
||||
|
||||
export interface SelectOption {
|
||||
label: string;
|
||||
description: string;
|
||||
|
|
|
@ -25,7 +25,10 @@ import {
|
|||
FORMATS_OPTIONS,
|
||||
SOURCES_MUSIC_OPTIONS,
|
||||
QUALITY_MUSIC_OPTIONS,
|
||||
RELEASE_TYPE_MUSIC_OPTIONS, OTHER_OPTIONS, ORIGIN_OPTIONS
|
||||
RELEASE_TYPE_MUSIC_OPTIONS,
|
||||
OTHER_OPTIONS,
|
||||
ORIGIN_OPTIONS,
|
||||
downloadsPerUnitOptions
|
||||
} from "../../domain/constants";
|
||||
import { queryClient } from "../../App";
|
||||
import { APIClient } from "../../api/APIClient";
|
||||
|
@ -247,6 +250,8 @@ export default function FilterDetails() {
|
|||
max_size: filter.max_size,
|
||||
delay: filter.delay,
|
||||
priority: filter.priority,
|
||||
max_downloads: filter.max_downloads,
|
||||
max_downloads_unit: filter.max_downloads_unit,
|
||||
use_regex: filter.use_regex || false,
|
||||
shows: filter.shows,
|
||||
years: filter.years,
|
||||
|
@ -338,12 +343,6 @@ function General() {
|
|||
const opts = indexers && indexers.length > 0 ? indexers.map(v => ({
|
||||
label: v.name,
|
||||
value: v.id
|
||||
// value: {
|
||||
// id: v.id,
|
||||
// name: v.name,
|
||||
// identifier: v.identifier,
|
||||
// enabled: v.enabled
|
||||
// }
|
||||
})) : [];
|
||||
|
||||
return (
|
||||
|
@ -367,6 +366,9 @@ function General() {
|
|||
<TextField name="max_size" label="Max size" columns={6} placeholder="" />
|
||||
<NumberField name="delay" label="Delay" placeholder="" />
|
||||
<NumberField name="priority" label="Priority" placeholder="" />
|
||||
|
||||
<NumberField name="max_downloads" label="Max downloads" placeholder="" />
|
||||
<Select name="max_downloads_unit" label="Max downloads per" options={downloadsPerUnitOptions} optionDefaultText="Select unit" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
2
web/src/types/Filter.d.ts
vendored
2
web/src/types/Filter.d.ts
vendored
|
@ -8,6 +8,8 @@ interface Filter {
|
|||
max_size: string;
|
||||
delay: number;
|
||||
priority: number;
|
||||
max_downloads: number;
|
||||
max_downloads_unit: string;
|
||||
match_releases: string;
|
||||
except_releases: string;
|
||||
use_regex: boolean;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue