mirror of
https://github.com/idanoo/autobrr
synced 2025-07-23 00:39:13 +00:00
Feature: Save releases (#36)
* chore: tidy deps * refactor: database migration * refactor: store release * refactor: save release * chore: add packages * feat(web): show stats and recent releases * refactor: simply filter struct * feat: add eventbus * chore: cleanup logging * chore: update packages
This commit is contained in:
parent
d22dd2fe84
commit
7177e48c02
40 changed files with 5859 additions and 3328 deletions
|
@ -1,20 +1,16 @@
|
|||
package filter
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
|
||||
"github.com/autobrr/autobrr/internal/domain"
|
||||
"github.com/autobrr/autobrr/internal/indexer"
|
||||
"github.com/autobrr/autobrr/pkg/wildcard"
|
||||
)
|
||||
|
||||
type Service interface {
|
||||
//FindFilter(announce domain.Announce) (*domain.Filter, error)
|
||||
|
||||
FindByID(filterID int) (*domain.Filter, error)
|
||||
FindByIndexerIdentifier(announce domain.Announce) (*domain.Filter, error)
|
||||
FindByIndexerIdentifier(indexer string) ([]domain.Filter, error)
|
||||
FindAndCheckFilters(release *domain.Release) (bool, *domain.Filter, error)
|
||||
ListFilters() ([]domain.Filter, error)
|
||||
Store(filter domain.Filter) (*domain.Filter, error)
|
||||
Update(filter domain.Filter) (*domain.Filter, error)
|
||||
|
@ -65,7 +61,6 @@ func (s *service) FindByID(filterID int) (*domain.Filter, error) {
|
|||
}
|
||||
|
||||
// find actions and attach
|
||||
//actions, err := s.actionRepo.FindFilterActions(filter.ID)
|
||||
actions, err := s.actionRepo.FindByFilterID(filter.ID)
|
||||
if err != nil {
|
||||
log.Error().Msgf("could not find filter actions: %+v", &filter.ID)
|
||||
|
@ -80,79 +75,20 @@ func (s *service) FindByID(filterID int) (*domain.Filter, error) {
|
|||
}
|
||||
filter.Indexers = indexers
|
||||
|
||||
//log.Debug().Msgf("found filter: %+v", filter)
|
||||
|
||||
return filter, nil
|
||||
}
|
||||
|
||||
func (s *service) FindByIndexerIdentifier(announce domain.Announce) (*domain.Filter, error) {
|
||||
// get filter for tracker
|
||||
filters, err := s.repo.FindByIndexerIdentifier(announce.Site)
|
||||
func (s *service) FindByIndexerIdentifier(indexer string) ([]domain.Filter, error) {
|
||||
// get filters for indexer
|
||||
filters, err := s.repo.FindByIndexerIdentifier(indexer)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msgf("could not find filters for indexer: %v", announce.Site)
|
||||
log.Error().Err(err).Msgf("could not find filters for indexer: %v", indexer)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// match against announce/releaseInfo
|
||||
for _, filter := range filters {
|
||||
// if match, return the filter
|
||||
matchedFilter := s.checkFilter(filter, announce)
|
||||
if matchedFilter {
|
||||
log.Trace().Msgf("found matching filter: %+v", &filter)
|
||||
log.Debug().Msgf("found matching filter: %v", &filter.Name)
|
||||
|
||||
// find actions and attach
|
||||
actions, err := s.actionRepo.FindByFilterID(filter.ID)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msgf("could not find filter actions: %+v", &filter.ID)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// if no actions found, check next filter
|
||||
if actions == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
filter.Actions = actions
|
||||
|
||||
return &filter, nil
|
||||
}
|
||||
}
|
||||
|
||||
// if no match, return nil
|
||||
return nil, nil
|
||||
return filters, nil
|
||||
}
|
||||
|
||||
//func (s *service) FindFilter(announce domain.Announce) (*domain.Filter, error) {
|
||||
// // get filter for tracker
|
||||
// filters, err := s.repo.FindFiltersForSite(announce.Site)
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
//
|
||||
// // match against announce/releaseInfo
|
||||
// for _, filter := range filters {
|
||||
// // if match, return the filter
|
||||
// matchedFilter := s.checkFilter(filter, announce)
|
||||
// if matchedFilter {
|
||||
//
|
||||
// log.Debug().Msgf("found filter: %+v", &filter)
|
||||
//
|
||||
// // find actions and attach
|
||||
// actions, err := s.actionRepo.FindByFilterID(filter.ID)
|
||||
// if err != nil {
|
||||
// log.Error().Msgf("could not find filter actions: %+v", &filter.ID)
|
||||
// }
|
||||
// filter.Actions = actions
|
||||
//
|
||||
// return &filter, nil
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// // if no match, return nil
|
||||
// return nil, nil
|
||||
//}
|
||||
|
||||
func (s *service) Store(filter domain.Filter) (*domain.Filter, error) {
|
||||
// validate data
|
||||
|
||||
|
@ -216,137 +152,49 @@ func (s *service) Delete(filterID int) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// checkFilter tries to match filter against announce
|
||||
func (s *service) checkFilter(filter domain.Filter, announce domain.Announce) bool {
|
||||
func (s *service) FindAndCheckFilters(release *domain.Release) (bool, *domain.Filter, error) {
|
||||
|
||||
if !filter.Enabled {
|
||||
return false
|
||||
filters, err := s.repo.FindByIndexerIdentifier(release.Indexer)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msgf("could not find filters for indexer: %v", release.Indexer)
|
||||
return false, nil, err
|
||||
}
|
||||
|
||||
if filter.Scene && announce.Scene != filter.Scene {
|
||||
return false
|
||||
}
|
||||
// loop and check release to filter until match
|
||||
for _, f := range filters {
|
||||
log.Trace().Msgf("checking filter: %+v", f.Name)
|
||||
|
||||
if filter.Freeleech && announce.Freeleech != filter.Freeleech {
|
||||
return false
|
||||
}
|
||||
matchedFilter := release.CheckFilter(f)
|
||||
// if matched, attach actions and return the f
|
||||
if matchedFilter {
|
||||
//release.Filter = &f
|
||||
//release.FilterID = f.ID
|
||||
//release.FilterName = f.Name
|
||||
|
||||
if filter.Shows != "" && !checkFilterStrings(announce.TorrentName, filter.Shows) {
|
||||
return false
|
||||
}
|
||||
log.Debug().Msgf("found and matched filter: %+v", f.Name)
|
||||
|
||||
//if filter.Seasons != "" && !checkFilterStrings(announce.TorrentName, filter.Seasons) {
|
||||
// return false
|
||||
//}
|
||||
//
|
||||
//if filter.Episodes != "" && !checkFilterStrings(announce.TorrentName, filter.Episodes) {
|
||||
// return false
|
||||
//}
|
||||
|
||||
// matchRelease
|
||||
if filter.MatchReleases != "" && !checkFilterStrings(announce.TorrentName, filter.MatchReleases) {
|
||||
return false
|
||||
}
|
||||
|
||||
if filter.MatchReleaseGroups != "" && !checkFilterStrings(announce.TorrentName, filter.MatchReleaseGroups) {
|
||||
return false
|
||||
}
|
||||
|
||||
if filter.ExceptReleaseGroups != "" && checkFilterStrings(announce.TorrentName, filter.ExceptReleaseGroups) {
|
||||
return false
|
||||
}
|
||||
|
||||
if filter.MatchUploaders != "" && !checkFilterStrings(announce.Uploader, filter.MatchUploaders) {
|
||||
return false
|
||||
}
|
||||
|
||||
if filter.ExceptUploaders != "" && checkFilterStrings(announce.Uploader, filter.ExceptUploaders) {
|
||||
return false
|
||||
}
|
||||
|
||||
if len(filter.Resolutions) > 0 && !checkFilterSlice(announce.TorrentName, filter.Resolutions) {
|
||||
return false
|
||||
}
|
||||
|
||||
if len(filter.Codecs) > 0 && !checkFilterSlice(announce.TorrentName, filter.Codecs) {
|
||||
return false
|
||||
}
|
||||
|
||||
if len(filter.Sources) > 0 && !checkFilterSlice(announce.TorrentName, filter.Sources) {
|
||||
return false
|
||||
}
|
||||
|
||||
if len(filter.Containers) > 0 && !checkFilterSlice(announce.TorrentName, filter.Containers) {
|
||||
return false
|
||||
}
|
||||
|
||||
if filter.Years != "" && !checkFilterStrings(announce.TorrentName, filter.Years) {
|
||||
return false
|
||||
}
|
||||
|
||||
if filter.MatchCategories != "" && !checkFilterStrings(announce.Category, filter.MatchCategories) {
|
||||
return false
|
||||
}
|
||||
|
||||
if filter.ExceptCategories != "" && checkFilterStrings(announce.Category, filter.ExceptCategories) {
|
||||
return false
|
||||
}
|
||||
|
||||
if filter.Tags != "" && !checkFilterStrings(announce.Tags, filter.Tags) {
|
||||
return false
|
||||
}
|
||||
|
||||
if filter.ExceptTags != "" && checkFilterStrings(announce.Tags, filter.ExceptTags) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func checkFilterSlice(name string, filterList []string) bool {
|
||||
name = strings.ToLower(name)
|
||||
|
||||
for _, filter := range filterList {
|
||||
filter = strings.ToLower(filter)
|
||||
// check if line contains * or ?, if so try wildcard match, otherwise try substring match
|
||||
a := strings.ContainsAny(filter, "?|*")
|
||||
if a {
|
||||
match := wildcard.Match(filter, name)
|
||||
if match {
|
||||
return true
|
||||
// TODO do additional size check against indexer api or torrent for size
|
||||
if release.AdditionalSizeCheckRequired {
|
||||
log.Debug().Msgf("additional size check required for: %+v", f.Name)
|
||||
// check if indexer = btn,ptp,ggn,red
|
||||
// fetch api for data
|
||||
// else download torrent and add to tmpPath
|
||||
// if size != response.size
|
||||
// r.RecheckSizeFilter(f)
|
||||
//continue
|
||||
}
|
||||
} else {
|
||||
b := strings.Contains(name, filter)
|
||||
if b {
|
||||
return true
|
||||
|
||||
// find actions and attach
|
||||
actions, err := s.actionRepo.FindByFilterID(f.ID)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msgf("could not find actions for filter: %+v", f.Name)
|
||||
}
|
||||
f.Actions = actions
|
||||
|
||||
return true, &f, nil
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func checkFilterStrings(name string, filterList string) bool {
|
||||
filterSplit := strings.Split(filterList, ",")
|
||||
name = strings.ToLower(name)
|
||||
|
||||
for _, s := range filterSplit {
|
||||
s = strings.ToLower(s)
|
||||
// check if line contains * or ?, if so try wildcard match, otherwise try substring match
|
||||
a := strings.ContainsAny(s, "?|*")
|
||||
if a {
|
||||
match := wildcard.Match(s, name)
|
||||
if match {
|
||||
return true
|
||||
}
|
||||
} else {
|
||||
b := strings.Contains(name, s)
|
||||
if b {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return false
|
||||
// if no match, return nil
|
||||
return false, nil, nil
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue