Feature: Save releases (#36)

* chore: tidy deps

* refactor: database migration

* refactor: store release

* refactor: save release

* chore: add packages

* feat(web): show stats and recent releases

* refactor: simply filter struct

* feat: add eventbus

* chore: cleanup logging

* chore: update packages
This commit is contained in:
Ludvig Lundgren 2021-11-24 23:18:12 +01:00 committed by GitHub
parent d22dd2fe84
commit 7177e48c02
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
40 changed files with 5859 additions and 3328 deletions

View file

@ -7,6 +7,7 @@ import (
"os/signal"
"syscall"
"github.com/asaskevich/EventBus"
"github.com/r3labs/sse/v2"
"github.com/rs/zerolog/log"
"github.com/spf13/pflag"
@ -19,6 +20,7 @@ import (
"github.com/autobrr/autobrr/internal/database"
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/internal/download_client"
"github.com/autobrr/autobrr/internal/events"
"github.com/autobrr/autobrr/internal/filter"
"github.com/autobrr/autobrr/internal/http"
"github.com/autobrr/autobrr/internal/indexer"
@ -47,6 +49,9 @@ func main() {
serverEvents.CreateStream("logs")
// setup internal eventbus
bus := EventBus.New()
// setup logger
logger.Setup(cfg, serverEvents)
@ -65,34 +70,37 @@ func main() {
}
// setup repos
// var announceRepo = database.NewAnnounceRepo(db)
var (
actionRepo = database.NewActionRepo(db)
downloadClientRepo = database.NewDownloadClientRepo(db)
filterRepo = database.NewFilterRepo(db)
indexerRepo = database.NewIndexerRepo(db)
ircRepo = database.NewIrcRepo(db)
releaseRepo = database.NewReleaseRepo(db)
userRepo = database.NewUserRepo(db)
)
var (
downloadClientService = download_client.NewService(downloadClientRepo)
actionService = action.NewService(actionRepo, downloadClientService)
actionService = action.NewService(actionRepo, downloadClientService, bus)
indexerService = indexer.NewService(indexerRepo)
filterService = filter.NewService(filterRepo, actionRepo, indexerService)
releaseService = release.NewService(actionService)
releaseService = release.NewService(releaseRepo, actionService)
announceService = announce.NewService(filterService, indexerService, releaseService)
ircService = irc.NewService(ircRepo, announceService)
userService = user.NewService(userRepo)
authService = auth.NewService(userService)
)
// register event subscribers
events.NewSubscribers(bus, releaseService)
addr := fmt.Sprintf("%v:%v", cfg.Host, cfg.Port)
errorChannel := make(chan error)
go func() {
httpServer := http.NewServer(serverEvents, addr, cfg.BaseURL, actionService, authService, downloadClientService, filterService, indexerService, ircService)
httpServer := http.NewServer(serverEvents, addr, cfg.BaseURL, actionService, authService, downloadClientService, filterService, indexerService, ircService, releaseService)
errorChannel <- httpServer.Open()
}()

34
go.mod
View file

@ -3,26 +3,32 @@ module github.com/autobrr/autobrr
go 1.16
require (
github.com/anacrolix/torrent v1.29.1
github.com/Masterminds/squirrel v1.5.1
github.com/anacrolix/torrent v1.38.0
github.com/asaskevich/EventBus v0.0.0-20200907212545-49d423059eef
github.com/dustin/go-humanize v1.0.0
github.com/gdm85/go-libdeluge v0.5.5
github.com/go-chi/chi v1.5.4
github.com/gorilla/sessions v1.2.1
github.com/gorilla/websocket v1.4.2
github.com/lib/pq v1.10.2
github.com/pelletier/go-toml v1.6.0 // indirect
github.com/lib/pq v1.10.4
github.com/mattn/go-isatty v0.0.14 // indirect
github.com/pkg/errors v0.9.1
github.com/r3labs/sse/v2 v2.3.6
github.com/r3labs/sse/v2 v2.7.2
github.com/rs/cors v1.8.0
github.com/rs/zerolog v1.20.0
github.com/smartystreets/assertions v1.0.0 // indirect
github.com/spf13/pflag v1.0.3
github.com/spf13/viper v1.7.1
github.com/rs/zerolog v1.26.0
github.com/spf13/pflag v1.0.5
github.com/spf13/viper v1.9.0
github.com/stretchr/testify v1.7.0
golang.org/x/crypto v0.0.0-20210812204632-0ba0e8f03122
golang.org/x/net v0.0.0-20210427231257-85d9c07bbe3a
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c // indirect
gopkg.in/irc.v3 v3.1.1
golang.org/x/crypto v0.0.0-20211117183948-ae814b36b871
golang.org/x/mod v0.5.1 // indirect
golang.org/x/net v0.0.0-20211123203042-d83791d6bcd9
golang.org/x/sys v0.0.0-20211124211545-fe61309f8881 // indirect
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 // indirect
golang.org/x/text v0.3.7 // indirect
gopkg.in/ini.v1 v1.64.0 // indirect
gopkg.in/irc.v3 v3.1.4
gopkg.in/natefinch/lumberjack.v2 v2.0.0
gopkg.in/yaml.v2 v2.4.0
modernc.org/sqlite v1.12.0
modernc.org/ccgo/v3 v3.12.73 // indirect
modernc.org/sqlite v1.14.1
)

663
go.sum

File diff suppressed because it is too large Load diff

View file

@ -10,8 +10,8 @@ import (
"github.com/rs/zerolog/log"
)
func (s *service) execCmd(announce domain.Announce, action domain.Action, torrentFile string) {
log.Debug().Msgf("action exec: %v release: %v", action.Name, announce.TorrentName)
func (s *service) execCmd(release domain.Release, action domain.Action, torrentFile string) {
log.Debug().Msgf("action exec: %v release: %v", action.Name, release.TorrentName)
// check if program exists
cmd, err := exec.LookPath(action.ExecCmd)
@ -22,9 +22,9 @@ func (s *service) execCmd(announce domain.Announce, action domain.Action, torren
// handle args and replace vars
m := Macro{
TorrentName: announce.TorrentName,
TorrentName: release.TorrentName,
TorrentPathName: torrentFile,
TorrentUrl: announce.TorrentUrl,
TorrentUrl: release.TorrentURL,
}
// parse and replace values in argument string before continuing
@ -53,5 +53,5 @@ func (s *service) execCmd(announce domain.Announce, action domain.Action, torren
duration := time.Since(start)
log.Info().Msgf("executed command: '%v', args: '%v' %v,%v, total time %v", cmd, parsedArgs, announce.TorrentName, announce.Site, duration)
log.Info().Msgf("executed command: '%v', args: '%v' %v,%v, total time %v", cmd, parsedArgs, release.TorrentName, release.Indexer, duration)
}

View file

@ -9,7 +9,7 @@ import (
"github.com/rs/zerolog/log"
)
func (s *service) lidarr(announce domain.Announce, action domain.Action) error {
func (s *service) lidarr(release domain.Release, action domain.Action) error {
log.Trace().Msg("action LIDARR")
// TODO validate data
@ -39,28 +39,35 @@ func (s *service) lidarr(announce domain.Announce, action domain.Action) error {
cfg.Password = client.Settings.Basic.Password
}
r := lidarr.New(cfg)
arr := lidarr.New(cfg)
release := lidarr.Release{
Title: announce.TorrentName,
DownloadUrl: announce.TorrentUrl,
Size: 0,
Indexer: announce.Site,
r := lidarr.Release{
Title: release.TorrentName,
DownloadUrl: release.TorrentURL,
Size: int64(release.Size),
Indexer: release.Indexer,
DownloadProtocol: "torrent",
Protocol: "torrent",
PublishDate: time.Now().Format(time.RFC3339),
}
success, err := r.Push(release)
success, rejections, err := arr.Push(r)
if err != nil {
log.Error().Stack().Err(err).Msgf("lidarr: failed to push release: %v", release)
log.Error().Stack().Err(err).Msgf("lidarr: failed to push release: %v", r)
return err
}
if success {
// TODO save pushed release
log.Debug().Msgf("lidarr: successfully pushed release: %v, indexer %v to %v", release.Title, release.Indexer, client.Host)
if !success {
log.Debug().Msgf("lidarr: release push rejected: %v, indexer %v to %v reasons: '%v'", r.Title, r.Indexer, client.Host, rejections)
// save pushed release
s.bus.Publish("release:update-push-status-rejected", release.ID, rejections)
return nil
}
log.Debug().Msgf("lidarr: successfully pushed release: %v, indexer %v to %v", r.Title, r.Indexer, client.Host)
s.bus.Publish("release:update-push-status", release.ID, domain.ReleasePushStatusApproved)
return nil
}

View file

@ -9,7 +9,7 @@ import (
"github.com/rs/zerolog/log"
)
func (s *service) radarr(announce domain.Announce, action domain.Action) error {
func (s *service) radarr(release domain.Release, action domain.Action) error {
log.Trace().Msg("action RADARR")
// TODO validate data
@ -39,28 +39,35 @@ func (s *service) radarr(announce domain.Announce, action domain.Action) error {
cfg.Password = client.Settings.Basic.Password
}
r := radarr.New(cfg)
arr := radarr.New(cfg)
release := radarr.Release{
Title: announce.TorrentName,
DownloadUrl: announce.TorrentUrl,
Size: 0,
Indexer: announce.Site,
r := radarr.Release{
Title: release.TorrentName,
DownloadUrl: release.TorrentURL,
Size: int64(release.Size),
Indexer: release.Indexer,
DownloadProtocol: "torrent",
Protocol: "torrent",
PublishDate: time.Now().Format(time.RFC3339),
}
success, err := r.Push(release)
success, rejections, err := arr.Push(r)
if err != nil {
log.Error().Stack().Err(err).Msgf("radarr: failed to push release: %v", release)
log.Error().Stack().Err(err).Msgf("radarr: failed to push release: %v", r)
return err
}
if success {
// TODO save pushed release
log.Debug().Msgf("radarr: successfully pushed release: %v, indexer %v to %v", release.Title, release.Indexer, client.Host)
if !success {
log.Debug().Msgf("radarr: release push rejected: %v, indexer %v to %v reasons: '%v'", r.Title, r.Indexer, client.Host, rejections)
// save pushed release
s.bus.Publish("release:update-push-status-rejected", release.ID, rejections)
return nil
}
log.Debug().Msgf("radarr: successfully pushed release: %v, indexer %v to %v", r.Title, r.Indexer, client.Host)
s.bus.Publish("release:update-push-status", release.ID, domain.ReleasePushStatusApproved)
return nil
}

View file

@ -12,7 +12,7 @@ import (
"github.com/autobrr/autobrr/internal/domain"
)
func (s *service) RunActions(actions []domain.Action, announce domain.Announce) error {
func (s *service) RunActions(actions []domain.Action, release domain.Release) error {
var err error
var tmpFile string
@ -24,33 +24,36 @@ func (s *service) RunActions(actions []domain.Action, announce domain.Announce)
continue
}
log.Debug().Msgf("process action: %v", action.Name)
log.Debug().Msgf("process action: %v for '%v'", action.Name, release.TorrentName)
switch action.Type {
case domain.ActionTypeTest:
s.test(action.Name)
s.bus.Publish("release:update-push-status", release.ID, domain.ReleasePushStatusApproved)
case domain.ActionTypeExec:
if tmpFile == "" {
tmpFile, hash, err = downloadFile(announce.TorrentUrl)
tmpFile, hash, err = downloadFile(release.TorrentURL)
if err != nil {
log.Error().Stack().Err(err)
return err
}
}
go func(announce domain.Announce, action domain.Action, tmpFile string) {
s.execCmd(announce, action, tmpFile)
}(announce, action, tmpFile)
go func(release domain.Release, action domain.Action, tmpFile string) {
s.execCmd(release, action, tmpFile)
s.bus.Publish("release:update-push-status", release.ID, domain.ReleasePushStatusApproved)
}(release, action, tmpFile)
case domain.ActionTypeWatchFolder:
if tmpFile == "" {
tmpFile, hash, err = downloadFile(announce.TorrentUrl)
tmpFile, hash, err = downloadFile(release.TorrentURL)
if err != nil {
log.Error().Stack().Err(err)
return err
}
}
s.watchFolder(action.WatchFolder, tmpFile)
s.bus.Publish("release:update-push-status", release.ID, domain.ReleasePushStatusApproved)
case domain.ActionTypeDelugeV1, domain.ActionTypeDelugeV2:
canDownload, err := s.delugeCheckRulesCanDownload(action)
@ -58,22 +61,25 @@ func (s *service) RunActions(actions []domain.Action, announce domain.Announce)
log.Error().Stack().Err(err).Msgf("error checking client rules: %v", action.Name)
continue
}
if canDownload {
if tmpFile == "" {
tmpFile, hash, err = downloadFile(announce.TorrentUrl)
if err != nil {
log.Error().Stack().Err(err)
return err
}
}
go func(action domain.Action, tmpFile string) {
err = s.deluge(action, tmpFile)
if err != nil {
log.Error().Stack().Err(err).Msg("error sending torrent to Deluge")
}
}(action, tmpFile)
if !canDownload {
s.bus.Publish("release:update-push-status-rejected", release.ID, "deluge busy")
continue
}
if tmpFile == "" {
tmpFile, hash, err = downloadFile(release.TorrentURL)
if err != nil {
log.Error().Stack().Err(err)
return err
}
}
go func(action domain.Action, tmpFile string) {
err = s.deluge(action, tmpFile)
if err != nil {
log.Error().Stack().Err(err).Msg("error sending torrent to Deluge")
}
s.bus.Publish("release:update-push-status", release.ID, domain.ReleasePushStatusApproved)
}(action, tmpFile)
case domain.ActionTypeQbittorrent:
canDownload, err := s.qbittorrentCheckRulesCanDownload(action)
@ -81,49 +87,53 @@ func (s *service) RunActions(actions []domain.Action, announce domain.Announce)
log.Error().Stack().Err(err).Msgf("error checking client rules: %v", action.Name)
continue
}
if canDownload {
if tmpFile == "" {
tmpFile, hash, err = downloadFile(announce.TorrentUrl)
if err != nil {
log.Error().Stack().Err(err)
return err
}
}
go func(action domain.Action, hash string, tmpFile string) {
err = s.qbittorrent(action, hash, tmpFile)
if err != nil {
log.Error().Stack().Err(err).Msg("error sending torrent to qBittorrent")
}
}(action, hash, tmpFile)
if !canDownload {
s.bus.Publish("release:update-push-status-rejected", release.ID, "qbittorrent busy")
continue
}
if tmpFile == "" {
tmpFile, hash, err = downloadFile(release.TorrentURL)
if err != nil {
log.Error().Stack().Err(err)
return err
}
}
go func(action domain.Action, hash string, tmpFile string) {
err = s.qbittorrent(action, hash, tmpFile)
if err != nil {
log.Error().Stack().Err(err).Msg("error sending torrent to qBittorrent")
}
s.bus.Publish("release:update-push-status", release.ID, domain.ReleasePushStatusApproved)
}(action, hash, tmpFile)
case domain.ActionTypeRadarr:
go func(announce domain.Announce, action domain.Action) {
err = s.radarr(announce, action)
go func(release domain.Release, action domain.Action) {
err = s.radarr(release, action)
if err != nil {
log.Error().Stack().Err(err).Msg("error sending torrent to radarr")
//continue
}
}(announce, action)
}(release, action)
case domain.ActionTypeSonarr:
go func(announce domain.Announce, action domain.Action) {
err = s.sonarr(announce, action)
go func(release domain.Release, action domain.Action) {
err = s.sonarr(release, action)
if err != nil {
log.Error().Stack().Err(err).Msg("error sending torrent to sonarr")
//continue
}
}(announce, action)
}(release, action)
case domain.ActionTypeLidarr:
go func(announce domain.Announce, action domain.Action) {
err = s.lidarr(announce, action)
go func(release domain.Release, action domain.Action) {
err = s.lidarr(release, action)
if err != nil {
log.Error().Stack().Err(err).Msg("error sending torrent to lidarr")
//continue
}
}(announce, action)
}(release, action)
default:
log.Warn().Msgf("unsupported action: %v type: %v", action.Name, action.Type)

View file

@ -1,6 +1,7 @@
package action
import (
"github.com/asaskevich/EventBus"
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/internal/download_client"
)
@ -11,16 +12,17 @@ type Service interface {
Delete(actionID int) error
ToggleEnabled(actionID int) error
RunActions(actions []domain.Action, announce domain.Announce) error
RunActions(actions []domain.Action, release domain.Release) error
}
type service struct {
repo domain.ActionRepo
clientSvc download_client.Service
bus EventBus.Bus
}
func NewService(repo domain.ActionRepo, clientSvc download_client.Service) Service {
return &service{repo: repo, clientSvc: clientSvc}
func NewService(repo domain.ActionRepo, clientSvc download_client.Service, bus EventBus.Bus) Service {
return &service{repo: repo, clientSvc: clientSvc, bus: bus}
}
func (s *service) Store(action domain.Action) (*domain.Action, error) {

View file

@ -9,7 +9,7 @@ import (
"github.com/rs/zerolog/log"
)
func (s *service) sonarr(announce domain.Announce, action domain.Action) error {
func (s *service) sonarr(release domain.Release, action domain.Action) error {
log.Trace().Msg("action SONARR")
// TODO validate data
@ -39,28 +39,35 @@ func (s *service) sonarr(announce domain.Announce, action domain.Action) error {
cfg.Password = client.Settings.Basic.Password
}
r := sonarr.New(cfg)
arr := sonarr.New(cfg)
release := sonarr.Release{
Title: announce.TorrentName,
DownloadUrl: announce.TorrentUrl,
Size: 0,
Indexer: announce.Site,
r := sonarr.Release{
Title: release.TorrentName,
DownloadUrl: release.TorrentURL,
Size: int64(release.Size),
Indexer: release.Indexer,
DownloadProtocol: "torrent",
Protocol: "torrent",
PublishDate: time.Now().Format(time.RFC3339),
}
success, err := r.Push(release)
success, rejections, err := arr.Push(r)
if err != nil {
log.Error().Stack().Err(err).Msgf("sonarr: failed to push release: %v", release)
log.Error().Stack().Err(err).Msgf("sonarr: failed to push release: %v", r)
return err
}
if success {
// TODO save pushed release
log.Debug().Msgf("sonarr: successfully pushed release: %v, indexer %v to %v", release.Title, release.Indexer, client.Host)
if !success {
log.Debug().Msgf("sonarr: release push rejected: %v, indexer %v to %v reasons: '%v'", r.Title, r.Indexer, client.Host, rejections)
// save pushed release
s.bus.Publish("release:update-push-status-rejected", release.ID, rejections)
return nil
}
log.Debug().Msgf("sonarr: successfully pushed release: %v, indexer %v to %v", r.Title, r.Indexer, client.Host)
s.bus.Publish("release:update-push-status", release.ID, domain.ReleasePushStatusApproved)
return nil
}

View file

@ -2,32 +2,35 @@ package announce
import (
"bytes"
"errors"
"fmt"
"html"
"net/url"
"regexp"
"strconv"
"strings"
"text/template"
"github.com/autobrr/autobrr/internal/domain"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
func (s *service) parseLineSingle(def *domain.IndexerDefinition, announce *domain.Announce, line string) error {
func (s *service) parseLineSingle(def *domain.IndexerDefinition, release *domain.Release, line string) error {
for _, extract := range def.Parse.Lines {
tmpVars := map[string]string{}
var err error
err = s.parseExtract(extract.Pattern, extract.Vars, tmpVars, line)
match, err := s.parseExtract(extract.Pattern, extract.Vars, tmpVars, line)
if err != nil {
log.Debug().Msgf("error parsing extract: %v", line)
return err
}
if !match {
log.Debug().Msgf("line not matching expected regex pattern: %v", line)
return errors.New("line not matching expected regex pattern")
}
// on lines matched
err = s.onLinesMatched(def, tmpVars, announce)
err = s.onLinesMatched(def, tmpVars, release)
if err != nil {
log.Debug().Msgf("error match line: %v", line)
return err
@ -41,7 +44,7 @@ func (s *service) parseMultiLine() error {
return nil
}
func (s *service) parseExtract(pattern string, vars []string, tmpVars map[string]string, line string) error {
func (s *service) parseExtract(pattern string, vars []string, tmpVars map[string]string, line string) (bool, error) {
rxp, err := regExMatch(pattern, line)
if err != nil {
@ -50,7 +53,7 @@ func (s *service) parseExtract(pattern string, vars []string, tmpVars map[string
if rxp == nil {
//return nil, nil
return nil
return false, nil
}
// extract matched
@ -64,44 +67,34 @@ func (s *service) parseExtract(pattern string, vars []string, tmpVars map[string
tmpVars[v] = value
}
return nil
return true, nil
}
func (s *service) onLinesMatched(def *domain.IndexerDefinition, vars map[string]string, announce *domain.Announce) error {
// TODO implement set tracker.lastAnnounce = now
func (s *service) onLinesMatched(def *domain.IndexerDefinition, vars map[string]string, release *domain.Release) error {
var err error
announce.TorrentName = vars["torrentName"]
err = release.MapVars(vars)
//err := s.postProcess(ti, vars, *announce)
//if err != nil {
// return err
//}
// TODO extractReleaseInfo
err := s.extractReleaseInfo(vars, announce.TorrentName)
if err != nil {
return err
}
// resolution
// source
// encoder
// TODO is this even needed anymore
// canonicalize name
//canonReleaseName := cleanReleaseName(release.TorrentName)
//log.Trace().Msgf("canonicalize release name: %v", canonReleaseName)
err = s.mapToAnnounce(vars, announce)
err = release.Parse()
if err != nil {
log.Error().Err(err).Msg("announce: could not parse release")
return err
}
// torrent url
torrentUrl, err := s.processTorrentUrl(def.Parse.Match.TorrentURL, vars, def.SettingsMap, def.Parse.Match.Encode)
if err != nil {
log.Debug().Msgf("error torrent url: %v", err)
log.Error().Err(err).Msg("announce: could not process torrent url")
return err
}
if torrentUrl != "" {
announce.TorrentUrl = torrentUrl
release.TorrentURL = torrentUrl
}
return nil
@ -185,367 +178,6 @@ func cleanReleaseName(input string) string {
return processedString
}
func findLast(input string, pattern string) (string, error) {
matched := make([]string, 0)
//for _, s := range arr {
rxp, err := regexp.Compile(pattern)
if err != nil {
return "", err
//return errors.Wrapf(err, "invalid regex: %s", value)
}
matches := rxp.FindStringSubmatch(input)
if matches != nil {
log.Trace().Msgf("matches: %v", matches)
// first value is the match, second value is the text
if len(matches) >= 1 {
last := matches[len(matches)-1]
// add to temp slice
matched = append(matched, last)
}
}
//}
// check if multiple values in temp slice, if so get the last one
if len(matched) >= 1 {
last := matched[len(matched)-1]
return last, nil
}
return "", nil
}
func extractYear(releaseName string) (string, bool) {
yearMatch, err := findLast(releaseName, "(?:^|\\D)(19[3-9]\\d|20[012]\\d)(?:\\D|$)")
if err != nil {
return "", false
}
log.Trace().Msgf("year matches: %v", yearMatch)
return yearMatch, true
}
func extractSeason(releaseName string) (string, bool) {
seasonMatch, err := findLast(releaseName, "\\sS(\\d+)\\s?[ED]\\d+/i")
sm2, err := findLast(releaseName, "\\s(?:S|Season\\s*)(\\d+)/i")
//sm3, err := findLast(releaseName, "\\s((?<!\\d)\\d{1,2})x\\d+/i")
if err != nil {
return "", false
}
log.Trace().Msgf("season matches: %v", seasonMatch)
log.Trace().Msgf("season matches: %v", sm2)
return seasonMatch, false
}
func extractEpisode(releaseName string) (string, bool) {
epMatch, err := findLast(releaseName, "\\sS\\d+\\s?E(\\d+)/i")
ep2, err := findLast(releaseName, "\\s(?:E|Episode\\s*)(\\d+)/i")
//ep3, err := findLast(releaseName, "\\s(?<!\\d)\\d{1,2}x(\\d+)/i")
if err != nil {
return "", false
}
log.Trace().Msgf("ep matches: %v", epMatch)
log.Trace().Msgf("ep matches: %v", ep2)
return epMatch, false
}
func (s *service) extractReleaseInfo(varMap map[string]string, releaseName string) error {
// https://github.com/middelink/go-parse-torrent-name
canonReleaseName := cleanReleaseName(releaseName)
log.Trace().Msgf("canonicalize release name: %v", canonReleaseName)
//release, err := releaseinfo.Parse(releaseName)
//if err != nil {
// return err
//}
//
//log.Trace().Msgf("release: %+v", release)
// https://github.com/autodl-community/autodl-irssi/pull/194/files
// year
//year, yearMatch := extractYear(canonReleaseName)
//if yearMatch {
// setVariable("year", year, varMap, nil)
//}
//log.Trace().Msgf("year matches: %v", year)
// season
//season, seasonMatch := extractSeason(canonReleaseName)
//if seasonMatch {
// // set var
// log.Trace().Msgf("season matches: %v", season)
//}
// episode
//episode, episodeMatch := extractEpisode(canonReleaseName)
//if episodeMatch {
// // set var
// log.Trace().Msgf("episode matches: %v", episode)
//}
// resolution
// source
// encoder
// ignore
// tv or movie
// music stuff
// game stuff
return nil
}
func (s *service) mapToAnnounce(varMap map[string]string, ann *domain.Announce) error {
if torrentName, err := getFirstStringMapValue(varMap, []string{"torrentName"}); err != nil {
return errors.Wrap(err, "failed parsing required field")
} else {
ann.TorrentName = html.UnescapeString(torrentName)
}
if category, err := getFirstStringMapValue(varMap, []string{"category"}); err == nil {
ann.Category = category
}
if freeleech, err := getFirstStringMapValue(varMap, []string{"freeleech"}); err == nil {
ann.Freeleech = strings.EqualFold(freeleech, "freeleech") || strings.EqualFold(freeleech, "yes")
}
if freeleechPercent, err := getFirstStringMapValue(varMap, []string{"freeleechPercent"}); err == nil {
ann.FreeleechPercent = freeleechPercent
}
if uploader, err := getFirstStringMapValue(varMap, []string{"uploader"}); err == nil {
ann.Uploader = uploader
}
if scene, err := getFirstStringMapValue(varMap, []string{"scene"}); err == nil {
ann.Scene = strings.EqualFold(scene, "true") || strings.EqualFold(scene, "yes")
}
if year, err := getFirstStringMapValue(varMap, []string{"year"}); err == nil {
yearI, err := strconv.Atoi(year)
if err != nil {
//log.Debug().Msgf("bad year var: %v", year)
}
ann.Year = yearI
}
if tags, err := getFirstStringMapValue(varMap, []string{"releaseTags", "tags"}); err == nil {
ann.Tags = tags
}
return nil
}
func (s *service) mapToAnnounceObj(varMap map[string]string, ann *domain.Announce) error {
if torrentName, err := getFirstStringMapValue(varMap, []string{"torrentName", "$torrentName"}); err != nil {
return errors.Wrap(err, "failed parsing required field")
} else {
ann.TorrentName = html.UnescapeString(torrentName)
}
if torrentUrl, err := getFirstStringMapValue(varMap, []string{"torrentUrl", "$torrentUrl"}); err != nil {
return errors.Wrap(err, "failed parsing required field")
} else {
ann.TorrentUrl = torrentUrl
}
if releaseType, err := getFirstStringMapValue(varMap, []string{"releaseType", "$releaseType"}); err == nil {
ann.ReleaseType = releaseType
}
if name1, err := getFirstStringMapValue(varMap, []string{"name1", "$name1"}); err == nil {
ann.Name1 = name1
}
if name2, err := getFirstStringMapValue(varMap, []string{"name2", "$name2"}); err == nil {
ann.Name2 = name2
}
if category, err := getFirstStringMapValue(varMap, []string{"category", "$category"}); err == nil {
ann.Category = category
}
if freeleech, err := getFirstStringMapValue(varMap, []string{"freeleech", "$freeleech"}); err == nil {
ann.Freeleech = strings.EqualFold(freeleech, "true")
}
if uploader, err := getFirstStringMapValue(varMap, []string{"uploader", "$uploader"}); err == nil {
ann.Uploader = uploader
}
if tags, err := getFirstStringMapValue(varMap, []string{"$releaseTags", "$tags", "releaseTags", "tags"}); err == nil {
ann.Tags = tags
}
if cue, err := getFirstStringMapValue(varMap, []string{"cue", "$cue"}); err == nil {
ann.Cue = strings.EqualFold(cue, "true")
}
if logVar, err := getFirstStringMapValue(varMap, []string{"log", "$log"}); err == nil {
ann.Log = logVar
}
if media, err := getFirstStringMapValue(varMap, []string{"media", "$media"}); err == nil {
ann.Media = media
}
if format, err := getFirstStringMapValue(varMap, []string{"format", "$format"}); err == nil {
ann.Format = format
}
if bitRate, err := getFirstStringMapValue(varMap, []string{"bitrate", "$bitrate"}); err == nil {
ann.Bitrate = bitRate
}
if resolution, err := getFirstStringMapValue(varMap, []string{"resolution"}); err == nil {
ann.Resolution = resolution
}
if source, err := getFirstStringMapValue(varMap, []string{"source"}); err == nil {
ann.Source = source
}
if encoder, err := getFirstStringMapValue(varMap, []string{"encoder"}); err == nil {
ann.Encoder = encoder
}
if container, err := getFirstStringMapValue(varMap, []string{"container"}); err == nil {
ann.Container = container
}
if scene, err := getFirstStringMapValue(varMap, []string{"scene", "$scene"}); err == nil {
ann.Scene = strings.EqualFold(scene, "true")
}
if year, err := getFirstStringMapValue(varMap, []string{"year", "$year"}); err == nil {
yearI, err := strconv.Atoi(year)
if err != nil {
//log.Debug().Msgf("bad year var: %v", year)
}
ann.Year = yearI
}
//return &ann, nil
return nil
}
func setVariable(varName string, value string, varMap map[string]string, settings map[string]string) bool {
// check in instance options (auth)
//optVal, ok := settings[name]
//if !ok {
// //return ""
//}
////ret = optVal
//if optVal != "" {
// return false
//}
// else in varMap
val, ok := varMap[varName]
if !ok {
//return ""
varMap[varName] = value
} else {
// do something else?
}
log.Trace().Msgf("setVariable: %v", val)
return true
}
func getVariable(name string, varMap map[string]string, obj domain.Announce, settings map[string]string) string {
var ret string
// check in announce obj
// TODO reflect struct
// check in instance options (auth)
optVal, ok := settings[name]
if !ok {
//return ""
}
//ret = optVal
if optVal != "" {
return optVal
}
// else in varMap
val, ok := varMap[name]
if !ok {
//return ""
}
ret = val
return ret
}
//func contains(s []string, str string) bool {
// for _, v := range s {
// if v == str {
// return true
// }
// }
//
// return false
//}
func listContains(list []string, key string) bool {
for _, lKey := range list {
if strings.EqualFold(lKey, key) {
return true
}
}
return false
}
func getStringMapValue(stringMap map[string]string, key string) (string, error) {
lowerKey := strings.ToLower(key)
// case sensitive match
//if caseSensitive {
// v, ok := stringMap[key]
// if !ok {
// return "", fmt.Errorf("key was not found in map: %q", key)
// }
//
// return v, nil
//}
// case insensitive match
for k, v := range stringMap {
if strings.ToLower(k) == lowerKey {
return v, nil
}
}
return "", fmt.Errorf("key was not found in map: %q", lowerKey)
}
func getFirstStringMapValue(stringMap map[string]string, keys []string) (string, error) {
for _, k := range keys {
if val, err := getStringMapValue(stringMap, k); err == nil {
return val, nil
}
}
return "", fmt.Errorf("key were not found in map: %q", strings.Join(keys, ", "))
}
func removeElement(s []string, i int) ([]string, error) {
// s is [1,2,3,4,5,6], i is 2

View file

@ -1,9 +1,5 @@
package announce
import (
"testing"
)
//func Test_service_OnNewLine(t *testing.T) {
// tfiles := tracker.NewService()
// tfiles.ReadFiles()
@ -287,7 +283,7 @@ var (
// want: &domain.Announce{
// Freeleech: false,
// Category: "PC :: Iso",
// TorrentName: "debian live 10 6 0 amd64 standard iso",
// Name: "debian live 10 6 0 amd64 standard iso",
// Uploader: "Anonymous",
// TorrentUrl: "https://www.tracker01.test/rss/download/263302/000aaa111bbb222ccc333ddd/debian+live+10+6+0+amd64+standard+iso.torrent",
// Site: "T01",
@ -307,7 +303,7 @@ var (
// want: &domain.Announce{
// Freeleech: true,
// Category: "PC :: Iso",
// TorrentName: "debian live 10 6 0 amd64 standard iso",
// Name: "debian live 10 6 0 amd64 standard iso",
// Uploader: "Anonymous",
// TorrentUrl: "https://www.tracker01.test/rss/download/263302/000aaa111bbb222ccc333ddd/debian+live+10+6+0+amd64+standard+iso.torrent",
// Site: "T01",
@ -328,7 +324,7 @@ var (
// Name1: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name2: "Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// Freeleech: false,
// TorrentName: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// TorrentUrl: "https://tracker05.test/torrents.php?action=download&id=1382972&authkey=000aaa111bbb222ccc333ddd&torrent_pass=eee444fff555ggg666hhh777",
// Site: "T05",
// Tags: "blues, rock, classic rock,jazz,blues rock,electric blues",
@ -357,7 +353,7 @@ var (
// Name1: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name2: "Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// Freeleech: false,
// TorrentName: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// TorrentUrl: "https://tracker05.test/torrents.php?action=download&id=29910415&authkey=000aaa111bbb222ccc333ddd&torrent_pass=eee444fff555ggg666hhh777",
// Site: "T05",
// Tags: "1990s, folk, world music, celtic",
@ -421,7 +417,7 @@ var (
// want: &domain.Announce{
// Freeleech: false,
// Category: "PC :: Iso",
// TorrentName: "debian live 10 6 0 amd64 standard iso",
// Name: "debian live 10 6 0 amd64 standard iso",
// Uploader: "Anonymous",
// TorrentUrl: "https://www.tracker01.test/rss/download/263302/000aaa111bbb222ccc333ddd/debian+live+10+6+0+amd64+standard+iso.torrent",
// Site: "T01",
@ -441,7 +437,7 @@ var (
// want: &domain.Announce{
// Freeleech: true,
// Category: "PC :: Iso",
// TorrentName: "debian live 10 6 0 amd64 standard iso",
// Name: "debian live 10 6 0 amd64 standard iso",
// Uploader: "Anonymous",
// TorrentUrl: "https://www.tracker01.test/rss/download/263302/000aaa111bbb222ccc333ddd/debian+live+10+6+0+amd64+standard+iso.torrent",
// Site: "T01",
@ -462,7 +458,7 @@ var (
// Name1: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name2: "Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// Freeleech: false,
// TorrentName: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// TorrentUrl: "https://tracker05.test/torrents.php?action=download&id=1382972&authkey=000aaa111bbb222ccc333ddd&torrent_pass=eee444fff555ggg666hhh777",
// Site: "T05",
// Tags: "blues, rock, classic rock,jazz,blues rock,electric blues",
@ -496,7 +492,7 @@ var (
// Name1: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name2: "Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// Freeleech: false,
// TorrentName: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// TorrentUrl: "https://tracker05.test/torrents.php?action=download&id=29910415&authkey=000aaa111bbb222ccc333ddd&torrent_pass=eee444fff555ggg666hhh777",
// Site: "T05",
// Tags: "1990s, folk, world music, celtic",
@ -534,52 +530,52 @@ var (
// }
//}
func Test_service_extractReleaseInfo(t *testing.T) {
type fields struct {
name string
queues map[string]chan string
}
type args struct {
varMap map[string]string
releaseName string
}
tests := []struct {
name string
fields fields
args args
wantErr bool
}{
{
name: "test_01",
fields: fields{
name: "", queues: nil,
},
args: args{
varMap: map[string]string{},
releaseName: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
},
wantErr: false,
},
{
name: "test_02",
fields: fields{
name: "", queues: nil,
},
args: args{
varMap: map[string]string{},
releaseName: "Lost S06E07 720p WEB-DL DD 5.1 H.264 - LP",
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := &service{
queues: tt.fields.queues,
}
if err := s.extractReleaseInfo(tt.args.varMap, tt.args.releaseName); (err != nil) != tt.wantErr {
t.Errorf("extractReleaseInfo() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}
//func Test_service_extractReleaseInfo(t *testing.T) {
// type fields struct {
// name string
// queues map[string]chan string
// }
// type args struct {
// varMap map[string]string
// releaseName string
// }
// tests := []struct {
// name string
// fields fields
// args args
// wantErr bool
// }{
// {
// name: "test_01",
// fields: fields{
// name: "", queues: nil,
// },
// args: args{
// varMap: map[string]string{},
// releaseName: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// },
// wantErr: false,
// },
// {
// name: "test_02",
// fields: fields{
// name: "", queues: nil,
// },
// args: args{
// varMap: map[string]string{},
// releaseName: "Lost S06E07 720p WEB-DL DD 5.1 H.264 - LP",
// },
// wantErr: false,
// },
// }
// for _, tt := range tests {
// t.Run(tt.name, func(t *testing.T) {
// s := &service{
// queues: tt.fields.queues,
// }
// if err := s.extractReleaseInfo(tt.args.varMap, tt.args.releaseName); (err != nil) != tt.wantErr {
// t.Errorf("extractReleaseInfo() error = %v, wantErr %v", err, tt.wantErr)
// }
// })
// }
//}

View file

@ -1,11 +1,11 @@
package announce
import (
"context"
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/internal/filter"
"github.com/autobrr/autobrr/internal/indexer"
"github.com/autobrr/autobrr/internal/release"
"github.com/rs/zerolog/log"
)
@ -36,54 +36,73 @@ func NewService(filterService filter.Service, indexerSvc indexer.Service, releas
// Parse announce line
func (s *service) Parse(announceID string, msg string) error {
ctx := context.Background()
// make simpler by injecting indexer, or indexerdefinitions
// announceID (server:channel:announcer)
def := s.indexerSvc.GetIndexerByAnnounce(announceID)
if def == nil {
definition := s.indexerSvc.GetIndexerByAnnounce(announceID)
if definition == nil {
log.Debug().Msgf("could not find indexer definition: %v", announceID)
return nil
}
announce := domain.Announce{
Site: def.Identifier,
Line: msg,
newRelease, err := domain.NewRelease(definition.Identifier, msg)
if err != nil {
log.Error().Err(err).Msg("could not create new release")
return err
}
// parse lines
if def.Parse.Type == "single" {
err := s.parseLineSingle(def, &announce, msg)
if definition.Parse.Type == "single" {
err = s.parseLineSingle(definition, newRelease, msg)
if err != nil {
log.Debug().Msgf("could not parse single line: %v", msg)
log.Error().Err(err).Msgf("could not parse single line: %v", msg)
return err
}
}
// implement multiline parsing
// find filter
foundFilter, err := s.filterSvc.FindByIndexerIdentifier(announce)
// TODO implement multiline parsing
filterOK, foundFilter, err := s.filterSvc.FindAndCheckFilters(newRelease)
if err != nil {
log.Error().Err(err).Msg("could not find filter")
return err
}
// no filter found, lets return
if foundFilter == nil {
// no foundFilter found, lets return
if !filterOK || foundFilter == nil {
log.Trace().Msg("no matching filter found")
// TODO check in config for "Save all releases"
// Save as rejected
//newRelease.FilterStatus = domain.ReleaseStatusFilterRejected
//err = s.releaseSvc.Store(ctx, newRelease)
//if err != nil {
// log.Error().Err(err).Msgf("error writing release to database: %+v", newRelease)
// return nil
//}
return nil
}
announce.Filter = foundFilter
log.Trace().Msgf("announce: %+v", announce)
// save release
newRelease.Filter = foundFilter
newRelease.FilterName = foundFilter.Name
newRelease.FilterID = foundFilter.ID
log.Info().Msgf("Matched '%v' (%v) for %v", announce.TorrentName, announce.Filter.Name, announce.Site)
newRelease.FilterStatus = domain.ReleaseStatusFilterApproved
err = s.releaseSvc.Store(ctx, newRelease)
if err != nil {
log.Error().Err(err).Msgf("error writing release to database: %+v", newRelease)
return nil
}
// match release
log.Info().Msgf("Matched '%v' (%v) for %v", newRelease.TorrentName, newRelease.Filter.Name, newRelease.Indexer)
// process release
go func() {
err = s.releaseSvc.Process(announce)
err = s.releaseSvc.Process(*newRelease)
if err != nil {
log.Error().Err(err).Msgf("could not process release: %+v", announce)
log.Error().Err(err).Msgf("could not process release: %+v", newRelease)
}
}()

View file

@ -1,19 +0,0 @@
package database
import (
"database/sql"
"github.com/autobrr/autobrr/internal/domain"
)
type AnnounceRepo struct {
db *sql.DB
}
func NewAnnounceRepo(db *sql.DB) domain.AnnounceRepo {
return &AnnounceRepo{db: db}
}
func (a *AnnounceRepo) Store(announce domain.Announce) error {
return nil
}

View file

@ -17,10 +17,6 @@ func NewIrcRepo(db *sql.DB) domain.IrcRepo {
return &IrcRepo{db: db}
}
func (ir *IrcRepo) Store(announce domain.Announce) error {
return nil
}
func (ir *IrcRepo) GetNetworkByID(id int64) (*domain.IrcNetwork, error) {
row := ir.db.QueryRow("SELECT id, enabled, name, server, port, tls, pass, invite_command, nickserv_account, nickserv_password FROM irc_network WHERE id = ?", id)

View file

@ -138,10 +138,115 @@ CREATE TABLE action
FOREIGN KEY (client_id) REFERENCES client(id),
FOREIGN KEY (filter_id) REFERENCES filter(id)
);
CREATE TABLE "release"
(
id INTEGER PRIMARY KEY,
filter_status TEXT,
push_status TEXT,
rejections TEXT [] DEFAULT '{}' NOT NULL,
indexer TEXT,
filter TEXT,
protocol TEXT,
implementation TEXT,
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
group_id TEXT,
torrent_id TEXT,
torrent_name TEXT,
size INTEGER,
raw TEXT,
title TEXT,
category TEXT,
season INTEGER,
episode INTEGER,
year INTEGER,
resolution TEXT,
source TEXT,
codec TEXT,
container TEXT,
hdr TEXT,
audio TEXT,
release_group TEXT,
region TEXT,
language TEXT,
edition TEXT,
unrated BOOLEAN,
hybrid BOOLEAN,
proper BOOLEAN,
repack BOOLEAN,
website TEXT,
artists TEXT [] DEFAULT '{}' NOT NULL,
type TEXT,
format TEXT,
bitrate TEXT,
log_score INTEGER,
has_log BOOLEAN,
has_cue BOOLEAN,
is_scene BOOLEAN,
origin TEXT,
tags TEXT [] DEFAULT '{}' NOT NULL,
freeleech BOOLEAN,
freeleech_percent INTEGER,
uploader TEXT,
pre_time TEXT
);
`
var migrations = []string{
"",
`
CREATE TABLE "release"
(
id INTEGER PRIMARY KEY,
filter_status TEXT,
push_status TEXT,
rejections TEXT [] DEFAULT '{}' NOT NULL,
indexer TEXT,
filter TEXT,
protocol TEXT,
implementation TEXT,
timestamp TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
group_id TEXT,
torrent_id TEXT,
torrent_name TEXT,
size INTEGER,
raw TEXT,
title TEXT,
category TEXT,
season INTEGER,
episode INTEGER,
year INTEGER,
resolution TEXT,
source TEXT,
codec TEXT,
container TEXT,
hdr TEXT,
audio TEXT,
release_group TEXT,
region TEXT,
language TEXT,
edition TEXT,
unrated BOOLEAN,
hybrid BOOLEAN,
proper BOOLEAN,
repack BOOLEAN,
website TEXT,
artists TEXT [] DEFAULT '{}' NOT NULL,
type TEXT,
format TEXT,
bitrate TEXT,
log_score INTEGER,
has_log BOOLEAN,
has_cue BOOLEAN,
is_scene BOOLEAN,
origin TEXT,
tags TEXT [] DEFAULT '{}' NOT NULL,
freeleech BOOLEAN,
freeleech_percent INTEGER,
uploader TEXT,
pre_time TEXT
);
`,
}
func Migrate(db *sql.DB) error {

View file

@ -0,0 +1,175 @@
package database
import (
"context"
"database/sql"
"time"
sq "github.com/Masterminds/squirrel"
"github.com/lib/pq"
"github.com/rs/zerolog/log"
"github.com/autobrr/autobrr/internal/domain"
)
type ReleaseRepo struct {
db *sql.DB
}
func NewReleaseRepo(db *sql.DB) domain.ReleaseRepo {
return &ReleaseRepo{db: db}
}
func (repo *ReleaseRepo) Store(ctx context.Context, r *domain.Release) (*domain.Release, error) {
query, args, err := sq.
Insert("release").
Columns("filter_status", "push_status", "rejections", "indexer", "filter", "protocol", "implementation", "timestamp", "group_id", "torrent_id", "torrent_name", "size", "raw", "title", "category", "season", "episode", "year", "resolution", "source", "codec", "container", "hdr", "audio", "release_group", "region", "language", "edition", "unrated", "hybrid", "proper", "repack", "website", "artists", "type", "format", "bitrate", "log_score", "has_log", "has_cue", "is_scene", "origin", "tags", "freeleech", "freeleech_percent", "uploader", "pre_time").
Values(r.FilterStatus, r.PushStatus, pq.Array(r.Rejections), r.Indexer, r.FilterName, r.Protocol, r.Implementation, r.Timestamp, r.GroupID, r.TorrentID, r.TorrentName, r.Size, r.Raw, r.Title, r.Category, r.Season, r.Episode, r.Year, r.Resolution, r.Source, r.Codec, r.Container, r.HDR, r.Audio, r.Group, r.Region, r.Language, r.Edition, r.Unrated, r.Hybrid, r.Proper, r.Repack, r.Website, pq.Array(r.Artists), r.Type, r.Format, r.Bitrate, r.LogScore, r.HasLog, r.HasCue, r.IsScene, r.Origin, pq.Array(r.Tags), r.Freeleech, r.FreeleechPercent, r.Uploader, r.PreTime).
ToSql()
res, err := repo.db.ExecContext(ctx, query, args...)
if err != nil {
log.Error().Stack().Err(err).Msg("error inserting release")
return nil, err
}
resId, _ := res.LastInsertId()
r.ID = resId
log.Trace().Msgf("release.store: %+v", r)
return r, nil
}
func (repo *ReleaseRepo) UpdatePushStatus(ctx context.Context, id int64, status domain.ReleasePushStatus) error {
query, args, err := sq.Update("release").Set("push_status", status).Where("id = ?", id).ToSql()
_, err = repo.db.ExecContext(ctx, query, args...)
if err != nil {
log.Error().Stack().Err(err).Msg("error updating status of release")
return err
}
log.Trace().Msgf("release.update_push_status: id %+v", id)
return nil
}
func (repo *ReleaseRepo) UpdatePushStatusRejected(ctx context.Context, id int64, rejections string) error {
r := []string{rejections}
query, args, err := sq.
Update("release").
Set("push_status", domain.ReleasePushStatusRejected).
Set("rejections", pq.Array(r)).
Where("id = ?", id).
ToSql()
_, err = repo.db.ExecContext(ctx, query, args...)
if err != nil {
log.Error().Stack().Err(err).Msg("error updating status of release")
return err
}
log.Trace().Msgf("release.update_push_status_rejected: id %+v", id)
return nil
}
func (repo *ReleaseRepo) Find(ctx context.Context, params domain.QueryParams) ([]domain.Release, int64, error) {
queryBuilder := sq.Select("id", "filter_status", "push_status", "rejections", "indexer", "filter", "protocol", "title", "torrent_name", "size", "timestamp").From("release").OrderBy("timestamp DESC")
if params.Limit > 0 {
queryBuilder = queryBuilder.Limit(params.Limit)
} else {
queryBuilder = queryBuilder.Limit(20)
}
if params.Cursor > 0 {
//queryBuilder = queryBuilder.Where(sq.Gt{"id": params.Cursor})
queryBuilder = queryBuilder.Where(sq.Lt{"id": params.Cursor})
}
if params.Filter != nil {
filter := sq.And{}
for k, v := range params.Filter {
filter = append(filter, sq.Eq{k: v})
}
queryBuilder = queryBuilder.Where(filter)
}
query, args, err := queryBuilder.ToSql()
log.Trace().Str("database", "release.find").Msgf("query: '%v', args: '%v'", query, args)
res := make([]domain.Release, 0)
rows, err := repo.db.QueryContext(ctx, query, args...)
if err != nil {
log.Error().Stack().Err(err).Msg("error fetching releases")
//return
return res, 0, nil
}
defer rows.Close()
if err := rows.Err(); err != nil {
log.Error().Stack().Err(err)
return res, 0, err
}
for rows.Next() {
var rls domain.Release
var indexer, filter sql.NullString
var timestamp string
if err := rows.Scan(&rls.ID, &rls.FilterStatus, &rls.PushStatus, pq.Array(&rls.Rejections), &indexer, &filter, &rls.Protocol, &rls.Title, &rls.TorrentName, &rls.Size, &timestamp); err != nil {
log.Error().Stack().Err(err).Msg("release.find: error scanning data to struct")
return res, 0, err
}
rls.Indexer = indexer.String
rls.FilterName = filter.String
ca, _ := time.Parse(time.RFC3339, timestamp)
rls.Timestamp = ca
res = append(res, rls)
}
nextCursor := int64(0)
if len(res) > 0 {
lastID := res[len(res)-1].ID
nextCursor = lastID
//nextCursor, _ = strconv.ParseInt(lastID, 10, 64)
}
return res, nextCursor, nil
}
func (repo *ReleaseRepo) Stats(ctx context.Context) (*domain.ReleaseStats, error) {
query := `SELECT
COUNT(*) total,
IFNULL(SUM(CASE WHEN push_status = 'PUSH_APPROVED' THEN 1 ELSE 0 END), 0) push_approved_count,
IFNULL(SUM(CASE WHEN push_status = 'PUSH_REJECTED' THEN 1 ELSE 0 END), 0) push_rejected_count,
IFNULL(SUM(CASE WHEN filter_status = 'FILTER_APPROVED' THEN 1 ELSE 0 END), 0) filtered_count,
IFNULL(SUM(CASE WHEN filter_status = 'FILTER_REJECTED' THEN 1 ELSE 0 END), 0) filter_rejected_count
FROM "release";`
row := repo.db.QueryRowContext(ctx, query)
if err := row.Err(); err != nil {
log.Error().Stack().Err(err).Msg("release.stats: error querying stats")
return nil, err
}
var rls domain.ReleaseStats
if err := row.Scan(&rls.TotalCount, &rls.PushApprovedCount, &rls.PushRejectedCount, &rls.FilteredCount, &rls.FilterRejectedCount); err != nil {
log.Error().Stack().Err(err).Msg("release.stats: error scanning stats data to struct")
return nil, err
}
return &rls, nil
}

View file

@ -1,51 +0,0 @@
package domain
type Announce struct {
ReleaseType string
Freeleech bool
FreeleechPercent string
Origin string
ReleaseGroup string
Category string
TorrentName string
Uploader string
TorrentSize string
PreTime string
TorrentUrl string
TorrentUrlSSL string
Year int
Name1 string // artist, show, movie
Name2 string // album
Season int
Episode int
Resolution string
Source string
Encoder string
Container string
Format string
Bitrate string
Media string
Tags string
Scene bool
Log string
LogScore string
Cue bool
Line string
OrigLine string
Site string
HttpHeaders string
Filter *Filter
}
//type Announce struct {
// Channel string
// Announcer string
// Message string
// CreatedAt time.Time
//}
//
type AnnounceRepo interface {
Store(announce Announce) error
}

View file

@ -20,71 +20,49 @@ type FilterRepo interface {
}
type Filter struct {
ID int `json:"id"`
Name string `json:"name"`
Enabled bool `json:"enabled"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
FilterGeneral
FilterP2P
FilterTVMovies
FilterMusic
FilterAdvanced
Actions []Action `json:"actions"`
Indexers []Indexer `json:"indexers"`
}
type FilterGeneral struct {
MinSize string `json:"min_size"`
MaxSize string `json:"max_size"`
Delay int `json:"delay"`
}
type FilterP2P struct {
MatchReleases string `json:"match_releases"`
ExceptReleases string `json:"except_releases"`
UseRegex bool `json:"use_regex"`
MatchReleaseGroups string `json:"match_release_groups"`
ExceptReleaseGroups string `json:"except_release_groups"`
Scene bool `json:"scene"`
Origins string `json:"origins"`
Freeleech bool `json:"freeleech"`
FreeleechPercent string `json:"freeleech_percent"`
}
type FilterTVMovies struct {
Shows string `json:"shows"`
Seasons string `json:"seasons"`
Episodes string `json:"episodes"`
Resolutions []string `json:"resolutions"` // SD, 480i, 480p, 576p, 720p, 810p, 1080i, 1080p.
Codecs []string `json:"codecs"` // XviD, DivX, x264, h.264 (or h264), mpeg2 (or mpeg-2), VC-1 (or VC1), WMV, Remux, h.264 Remux (or h264 Remux), VC-1 Remux (or VC1 Remux).
Sources []string `json:"sources"` // DSR, PDTV, HDTV, HR.PDTV, HR.HDTV, DVDRip, DVDScr, BDr, BD5, BD9, BDRip, BRRip, DVDR, MDVDR, HDDVD, HDDVDRip, BluRay, WEB-DL, TVRip, CAM, R5, TELESYNC, TS, TELECINE, TC. TELESYNC and TS are synonyms (you don't need both). Same for TELECINE and TC
Containers []string `json:"containers"`
Years string `json:"years"`
}
type FilterMusic struct {
Artists string `json:"artists"`
Albums string `json:"albums"`
MatchReleaseTypes string `json:"match_release_types"` // Album,Single,EP
ExceptReleaseTypes string `json:"except_release_types"`
Formats []string `json:"formats"` // MP3, FLAC, Ogg, AAC, AC3, DTS
Bitrates []string `json:"bitrates"` // 192, 320, APS (VBR), V2 (VBR), V1 (VBR), APX (VBR), V0 (VBR), q8.x (VBR), Lossless, 24bit Lossless, Other
Media []string `json:"media"` // CD, DVD, Vinyl, Soundboard, SACD, DAT, Cassette, WEB, Other
Cue bool `json:"cue"`
Log bool `json:"log"`
LogScores string `json:"log_scores"`
}
type FilterAdvanced struct {
MatchCategories string `json:"match_categories"`
ExceptCategories string `json:"except_categories"`
MatchUploaders string `json:"match_uploaders"`
ExceptUploaders string `json:"except_uploaders"`
Tags string `json:"tags"`
ExceptTags string `json:"except_tags"`
TagsAny string `json:"tags_any"`
ExceptTagsAny string `json:"except_tags_any"`
ID int `json:"id"`
Name string `json:"name"`
Enabled bool `json:"enabled"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
MinSize string `json:"min_size"`
MaxSize string `json:"max_size"`
Delay int `json:"delay"`
MatchReleases string `json:"match_releases"`
ExceptReleases string `json:"except_releases"`
UseRegex bool `json:"use_regex"`
MatchReleaseGroups string `json:"match_release_groups"`
ExceptReleaseGroups string `json:"except_release_groups"`
Scene bool `json:"scene"`
Origins string `json:"origins"`
Freeleech bool `json:"freeleech"`
FreeleechPercent string `json:"freeleech_percent"`
Shows string `json:"shows"`
Seasons string `json:"seasons"`
Episodes string `json:"episodes"`
Resolutions []string `json:"resolutions"` // SD, 480i, 480p, 576p, 720p, 810p, 1080i, 1080p.
Codecs []string `json:"codecs"` // XviD, DivX, x264, h.264 (or h264), mpeg2 (or mpeg-2), VC-1 (or VC1), WMV, Remux, h.264 Remux (or h264 Remux), VC-1 Remux (or VC1 Remux).
Sources []string `json:"sources"` // DSR, PDTV, HDTV, HR.PDTV, HR.HDTV, DVDRip, DVDScr, BDr, BD5, BD9, BDRip, BRRip, DVDR, MDVDR, HDDVD, HDDVDRip, BluRay, WEB-DL, TVRip, CAM, R5, TELESYNC, TS, TELECINE, TC. TELESYNC and TS are synonyms (you don't need both). Same for TELECINE and TC
Containers []string `json:"containers"`
Years string `json:"years"`
Artists string `json:"artists"`
Albums string `json:"albums"`
MatchReleaseTypes string `json:"match_release_types"` // Album,Single,EP
ExceptReleaseTypes string `json:"except_release_types"`
Formats []string `json:"formats"` // MP3, FLAC, Ogg, AAC, AC3, DTS
Bitrates []string `json:"bitrates"` // 192, 320, APS (VBR), V2 (VBR), V1 (VBR), APX (VBR), V0 (VBR), q8.x (VBR), Lossless, 24bit Lossless, Other
Media []string `json:"media"` // CD, DVD, Vinyl, Soundboard, SACD, DAT, Cassette, WEB, Other
Cue bool `json:"cue"`
Log bool `json:"log"`
LogScores string `json:"log_scores"`
MatchCategories string `json:"match_categories"`
ExceptCategories string `json:"except_categories"`
MatchUploaders string `json:"match_uploaders"`
ExceptUploaders string `json:"except_uploaders"`
Tags string `json:"tags"`
ExceptTags string `json:"except_tags"`
TagsAny string `json:"tags_any"`
ExceptTagsAny string `json:"except_tags_any"`
Actions []Action `json:"actions"`
Indexers []Indexer `json:"indexers"`
}

View file

@ -35,7 +35,6 @@ type IrcNetwork struct {
}
type IrcRepo interface {
Store(announce Announce) error
StoreNetwork(network *IrcNetwork) error
StoreChannel(networkID int64, channel *IrcChannel) error
ListNetworks(ctx context.Context) ([]IrcNetwork, error)

920
internal/domain/release.go Normal file
View file

@ -0,0 +1,920 @@
package domain
import (
"context"
"fmt"
"html"
"regexp"
"strconv"
"strings"
"time"
"github.com/autobrr/autobrr/pkg/wildcard"
"github.com/dustin/go-humanize"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
type ReleaseRepo interface {
Store(ctx context.Context, release *Release) (*Release, error)
Find(ctx context.Context, params QueryParams) (res []Release, nextCursor int64, err error)
Stats(ctx context.Context) (*ReleaseStats, error)
UpdatePushStatus(ctx context.Context, id int64, status ReleasePushStatus) error
UpdatePushStatusRejected(ctx context.Context, id int64, rejections string) error
}
type Release struct {
ID int64 `json:"id"`
FilterStatus ReleaseFilterStatus `json:"filter_status"`
PushStatus ReleasePushStatus `json:"push_status"`
Rejections []string `json:"rejections"`
Indexer string `json:"indexer"`
FilterName string `json:"filter"`
Protocol ReleaseProtocol `json:"protocol"`
Implementation ReleaseImplementation `json:"implementation"` // irc, rss, api
Timestamp time.Time `json:"timestamp"`
GroupID string `json:"group_id"`
TorrentID string `json:"torrent_id"`
TorrentURL string `json:"-"`
TorrentName string `json:"torrent_name"` // full release name
Size uint64 `json:"size"`
Raw string `json:"raw"` // Raw release
Title string `json:"title"` // Parsed title
Category string `json:"category"`
Season int `json:"season"`
Episode int `json:"episode"`
Year int `json:"year"`
Resolution string `json:"resolution"`
Source string `json:"source"` // CD, DVD, Vinyl, DAT, Cassette, WEB, Other
Codec string `json:"codec"`
Container string `json:"container"`
HDR string `json:"hdr"`
Audio string `json:"audio"`
Group string `json:"group"`
Region string `json:"region"`
Language string `json:"language"`
Edition string `json:"edition"` // Extended, directors cut
Unrated bool `json:"unrated"`
Hybrid bool `json:"hybrid"`
Proper bool `json:"proper"`
Repack bool `json:"repack"`
Website string `json:"website"`
ThreeD bool `json:"-"`
Artists []string `json:"artists"`
Type string `json:"type"` // Album,Single,EP
Format string `json:"format"` // music only
Bitrate string `json:"bitrate"` // bitrate
LogScore int `json:"log_score"`
HasLog bool `json:"has_log"`
HasCue bool `json:"has_cue"`
IsScene bool `json:"is_scene"`
Origin string `json:"origin"` // P2P, Internal
Tags []string `json:"tags"`
Freeleech bool `json:"freeleech"`
FreeleechPercent int `json:"freeleech_percent"`
Uploader string `json:"uploader"`
PreTime string `json:"pre_time"`
AdditionalSizeCheckRequired bool `json:"-"`
FilterID int `json:"-"`
Filter *Filter `json:"-"`
}
func NewRelease(indexer string, line string) (*Release, error) {
r := &Release{
Indexer: indexer,
Raw: line,
FilterStatus: ReleaseStatusFilterPending,
PushStatus: ReleasePushStatusPending,
Rejections: []string{},
Protocol: ReleaseProtocolTorrent,
Implementation: ReleaseImplementationIRC,
Timestamp: time.Now(),
Artists: []string{},
Tags: []string{},
}
return r, nil
}
func (r *Release) Parse() error {
var err error
err = r.extractYear()
err = r.extractSeason()
err = r.extractEpisode()
err = r.extractResolution()
err = r.extractSource()
err = r.extractCodec()
err = r.extractContainer()
err = r.extractHDR()
err = r.extractAudio()
err = r.extractGroup()
err = r.extractRegion()
err = r.extractLanguage()
err = r.extractEdition()
err = r.extractUnrated()
err = r.extractHybrid()
err = r.extractProper()
err = r.extractRepack()
err = r.extractWebsite()
if err != nil {
log.Trace().Msgf("could not parse release: %v", r.TorrentName)
return err
}
return nil
}
func (r *Release) extractYear() error {
y, err := findLastInt(r.TorrentName, `\b(((?:19[0-9]|20[0-9])[0-9]))\b`)
if err != nil {
return err
}
r.Year = y
return nil
}
func (r *Release) extractSeason() error {
s, err := findLastInt(r.TorrentName, `(?:S|Season\s*)(\d{1,3})`)
if err != nil {
return err
}
r.Season = s
return nil
}
func (r *Release) extractEpisode() error {
e, err := findLastInt(r.TorrentName, `(?i)[ex]([0-9]{2})(?:[^0-9]|$)`)
if err != nil {
return err
}
r.Episode = e
return nil
}
func (r *Release) extractResolution() error {
v, err := findLast(r.TorrentName, `\b(([0-9]{3,4}p|i))\b`)
if err != nil {
return err
}
r.Resolution = v
return nil
}
func (r *Release) extractSource() error {
v, err := findLast(r.TorrentName, `(?i)\b(((?:PPV\.)?[HP]DTV|(?:HD)?CAM|B[DR]Rip|(?:HD-?)?TS|(?:PPV )?WEB-?DL(?: DVDRip)?|HDRip|DVDRip|DVDRIP|CamRip|WEB|W[EB]BRip|Blu-?Ray|DvDScr|telesync|CD|DVD|Vinyl|DAT|Cassette))\b`)
if err != nil {
return err
}
r.Source = v
return nil
}
func (r *Release) extractCodec() error {
v, err := findLast(r.TorrentName, `(?i)\b(HEVC|[hx]\.?26[45]|xvid|divx|AVC|MPEG-?2|AV1|VC-?1|VP9|WebP)\b`)
if err != nil {
return err
}
r.Codec = v
return nil
}
func (r *Release) extractContainer() error {
v, err := findLast(r.TorrentName, `(?i)\b(AVI|MPG|MKV|MP4|VOB|m2ts|ISO|IMG)\b`)
if err != nil {
return err
}
r.Container = v
return nil
}
func (r *Release) extractHDR() error {
v, err := findLast(r.TorrentName, `(?i)(HDR10\+|HDR10|DoVi HDR|DV HDR|HDR|DV|DoVi|Dolby Vision \+ HDR10|Dolby Vision)`)
if err != nil {
return err
}
r.HDR = v
return nil
}
func (r *Release) extractAudio() error {
v, err := findLast(r.TorrentName, `(?i)(MP3|FLAC[\. ][1-7][\. ][0-2]|FLAC|Opus|DD-EX|DDP[\. ]?[124567][\. ][012] Atmos|DDP[\. ]?[124567][\. ][012]|DDP|DD[1-7][\. ][0-2]|Dual[\- ]Audio|LiNE|PCM|Dolby TrueHD [0-9][\. ][0-4]|TrueHD [0-9][\. ][0-4] Atmos|TrueHD [0-9][\. ][0-4]|DTS X|DTS-HD MA [0-9][\. ][0-4]|DTS-HD MA|DTS-ES|DTS [1-7][\. ][0-2]|DTS|DD|DD[12][\. ]0|Dolby Atmos|TrueHD ATMOS|TrueHD|Atmos|Dolby Digital Plus|Dolby Digital Audio|Dolby Digital|AAC[.-]LC|AAC (?:\.?[1-7]\.[0-2])?|AAC|eac3|AC3(?:\.5\.1)?)`)
if err != nil {
return err
}
r.Audio = v
return nil
}
func (r *Release) extractGroup() error {
// try first for wierd anime group names [group] show name, or in brackets at the end
group := ""
g, err := findLast(r.TorrentName, `\[(.*?)\]`)
if err != nil {
return err
}
group = g
if group == "" {
g2, err := findLast(r.TorrentName, `(- ?([^-]+(?:-={[^-]+-?$)?))$`)
if err != nil {
return err
}
group = g2
}
r.Group = group
return nil
}
func (r *Release) extractRegion() error {
v, err := findLast(r.TorrentName, `(?i)\b(R([0-9]))\b`)
if err != nil {
return err
}
r.Region = v
return nil
}
func (r *Release) extractLanguage() error {
v, err := findLast(r.TorrentName, `(?i)\b((DK|DKSUBS|DANiSH|DUTCH|NL|NLSUBBED|ENG|FI|FLEMiSH|FiNNiSH|DE|FRENCH|GERMAN|HE|HEBREW|HebSub|HiNDi|iCELANDiC|KOR|MULTi|MULTiSUBS|NORWEGiAN|NO|NORDiC|PL|PO|POLiSH|PLDUB|RO|ROMANiAN|RUS|SPANiSH|SE|SWEDiSH|SWESUB||))\b`)
if err != nil {
return err
}
r.Language = v
return nil
}
func (r *Release) extractEdition() error {
v, err := findLast(r.TorrentName, `(?i)\b((?:DIRECTOR'?S|EXTENDED|INTERNATIONAL|THEATRICAL|ORIGINAL|FINAL|BOOTLEG)(?:.CUT)?)\b`)
if err != nil {
return err
}
r.Edition = v
return nil
}
func (r *Release) extractUnrated() error {
v, err := findLastBool(r.TorrentName, `(?i)\b((UNRATED))\b`)
if err != nil {
return err
}
r.Unrated = v
return nil
}
func (r *Release) extractHybrid() error {
v, err := findLastBool(r.TorrentName, `(?i)\b((HYBRID))\b`)
if err != nil {
return err
}
r.Hybrid = v
return nil
}
func (r *Release) extractProper() error {
v, err := findLastBool(r.TorrentName, `(?i)\b((PROPER))\b`)
if err != nil {
return err
}
r.Proper = v
return nil
}
func (r *Release) extractRepack() error {
v, err := findLastBool(r.TorrentName, `(?i)\b((REPACK))\b`)
if err != nil {
return err
}
r.Repack = v
return nil
}
func (r *Release) extractWebsite() error {
// Start with the basic most common ones
v, err := findLast(r.TorrentName, `(?i)\b((AMBC|AS|AMZN|AMC|ANPL|ATVP|iP|CORE|BCORE|CMOR|CN|CBC|CBS|CMAX|CNBC|CC|CRIT|CR|CSPN|CW|DAZN|DCU|DISC|DSCP|DSNY|DSNP|DPLY|ESPN|FOX|FUNI|PLAY|HBO|HMAX|HIST|HS|HOTSTAR|HULU|iT|MNBC|MTV|NATG|NBC|NF|NICK|NRK|PMNT|PMNP|PCOK|PBS|PBSK|PSN|QIBI|SBS|SHO|STAN|STZ|SVT|SYFY|TLC|TRVL|TUBI|TV3|TV4|TVL|VH1|VICE|VMEO|UFC|USAN|VIAP|VIAPLAY|VL|WWEN|XBOX|YHOO|YT|RED))\b`)
if err != nil {
return err
}
r.Website = v
return nil
}
func (r *Release) addRejection(reason string) {
r.Rejections = append(r.Rejections, reason)
}
// ResetRejections reset rejections between filter checks
func (r *Release) resetRejections() {
r.Rejections = []string{}
}
func (r *Release) CheckFilter(filter Filter) bool {
// reset rejections first to clean previous checks
r.resetRejections()
if !filter.Enabled {
return false
}
// FIXME what if someone explicitly doesnt want scene, or toggles in filter. Make enum? 0,1,2? Yes, No, Dont care
if filter.Scene && r.IsScene != filter.Scene {
r.addRejection("wanted: scene")
return false
}
if filter.Freeleech && r.Freeleech != filter.Freeleech {
r.addRejection("wanted: freeleech")
return false
}
if filter.FreeleechPercent != "" && !checkFreeleechPercent(r.FreeleechPercent, filter.FreeleechPercent) {
r.addRejection("freeleech percent not matching")
return false
}
// check against title when parsed correctly
if filter.Shows != "" && !checkFilterStrings(r.TorrentName, filter.Shows) {
r.addRejection("shows not matching")
return false
}
if filter.Seasons != "" && !checkFilterIntStrings(r.Season, filter.Seasons) {
r.addRejection("season not matching")
return false
}
if filter.Episodes != "" && !checkFilterIntStrings(r.Episode, filter.Episodes) {
r.addRejection("episode not matching")
return false
}
// matchRelease
// TODO allow to match against regex
if filter.MatchReleases != "" && !checkFilterStrings(r.TorrentName, filter.MatchReleases) {
r.addRejection("match release not matching")
return false
}
if filter.ExceptReleases != "" && checkFilterStrings(r.TorrentName, filter.ExceptReleases) {
r.addRejection("except_releases: unwanted release")
return false
}
if filter.MatchReleaseGroups != "" && !checkFilterStrings(r.Group, filter.MatchReleaseGroups) {
r.addRejection("release groups not matching")
return false
}
if filter.ExceptReleaseGroups != "" && checkFilterStrings(r.Group, filter.ExceptReleaseGroups) {
r.addRejection("unwanted release group")
return false
}
if filter.MatchUploaders != "" && !checkFilterStrings(r.Uploader, filter.MatchUploaders) {
r.addRejection("uploaders not matching")
return false
}
if filter.ExceptUploaders != "" && checkFilterStrings(r.Uploader, filter.ExceptUploaders) {
r.addRejection("unwanted uploaders")
return false
}
if len(filter.Resolutions) > 0 && !checkFilterSlice(r.Resolution, filter.Resolutions) {
r.addRejection("resolution not matching")
return false
}
if len(filter.Codecs) > 0 && !checkFilterSlice(r.Codec, filter.Codecs) {
r.addRejection("codec not matching")
return false
}
if len(filter.Sources) > 0 && !checkFilterSlice(r.Source, filter.Sources) {
r.addRejection("source not matching")
return false
}
if len(filter.Containers) > 0 && !checkFilterSlice(r.Container, filter.Containers) {
r.addRejection("container not matching")
return false
}
if filter.Years != "" && !checkFilterIntStrings(r.Year, filter.Years) {
r.addRejection("year not matching")
return false
}
if filter.MatchCategories != "" && !checkFilterStrings(r.Category, filter.MatchCategories) {
r.addRejection("category not matching")
return false
}
if filter.ExceptCategories != "" && checkFilterStrings(r.Category, filter.ExceptCategories) {
r.addRejection("unwanted category")
return false
}
if (filter.MinSize != "" || filter.MaxSize != "") && !r.CheckSizeFilter(filter.MinSize, filter.MaxSize) {
return false
}
//if filter.Tags != "" && !checkFilterStrings(r.Tags, filter.Tags) {
// r.addRejection("tags not matching")
// return false
//}
//
//if filter.ExceptTags != "" && checkFilterStrings(r.Tags, filter.ExceptTags) {
// r.addRejection("unwanted tags")
// return false
//}
return true
}
// CheckSizeFilter additional size check
// for indexers that doesn't announce size, like some cabals
// set flag r.AdditionalSizeCheckRequired if there's a size in the filter, otherwise go a head
// implement API for ptp,btn,bhd,ggn to check for size if needed
// for others pull down torrent and do check
func (r *Release) CheckSizeFilter(minSize string, maxSize string) bool {
if r.Size == 0 {
r.AdditionalSizeCheckRequired = true
return true
} else {
r.AdditionalSizeCheckRequired = false
}
// if r.Size parse filter to bytes and compare
// handle both min and max
if minSize != "" {
// string to bytes
minSizeBytes, err := humanize.ParseBytes(minSize)
if err != nil {
// log could not parse into bytes
}
if r.Size <= minSizeBytes {
r.addRejection("size: smaller than min size")
return false
}
}
if maxSize != "" {
// string to bytes
maxSizeBytes, err := humanize.ParseBytes(maxSize)
if err != nil {
// log could not parse into bytes
}
if r.Size >= maxSizeBytes {
r.addRejection("size: larger than max size")
return false
}
}
return true
}
// MapVars better name
func (r *Release) MapVars(varMap map[string]string) error {
if torrentName, err := getFirstStringMapValue(varMap, []string{"torrentName"}); err != nil {
return errors.Wrap(err, "failed parsing required field")
} else {
r.TorrentName = html.UnescapeString(torrentName)
}
if category, err := getFirstStringMapValue(varMap, []string{"category"}); err == nil {
r.Category = category
}
if freeleech, err := getFirstStringMapValue(varMap, []string{"freeleech"}); err == nil {
r.Freeleech = strings.EqualFold(freeleech, "freeleech") || strings.EqualFold(freeleech, "yes")
}
if freeleechPercent, err := getFirstStringMapValue(varMap, []string{"freeleechPercent"}); err == nil {
// remove % and trim spaces
freeleechPercent = strings.Replace(freeleechPercent, "%", "", -1)
freeleechPercent = strings.Trim(freeleechPercent, " ")
freeleechPercentInt, err := strconv.Atoi(freeleechPercent)
if err != nil {
//log.Debug().Msgf("bad freeleechPercent var: %v", year)
}
r.FreeleechPercent = freeleechPercentInt
}
if uploader, err := getFirstStringMapValue(varMap, []string{"uploader"}); err == nil {
r.Uploader = uploader
}
if torrentSize, err := getFirstStringMapValue(varMap, []string{"torrentSize"}); err == nil {
size, err := humanize.ParseBytes(torrentSize)
if err != nil {
// log could not parse into bytes
}
r.Size = size
// TODO implement other size checks in filter
}
if scene, err := getFirstStringMapValue(varMap, []string{"scene"}); err == nil {
r.IsScene = strings.EqualFold(scene, "true") || strings.EqualFold(scene, "yes")
}
//if year, err := getFirstStringMapValue(varMap, []string{"year"}); err == nil {
// yearI, err := strconv.Atoi(year)
// if err != nil {
// //log.Debug().Msgf("bad year var: %v", year)
// }
// r.Year = yearI
//}
// TODO split this into two
if tags, err := getFirstStringMapValue(varMap, []string{"releaseTags", "tags"}); err == nil {
r.Tags = []string{tags}
}
// TODO parse releaseType
//if releaseType, err := getFirstStringMapValue(varMap, []string{"releaseType", "$releaseType"}); err == nil {
// r.Type = releaseType
//}
//if cue, err := getFirstStringMapValue(varMap, []string{"cue", "$cue"}); err == nil {
// r.Cue = strings.EqualFold(cue, "true")
//}
//if logVar, err := getFirstStringMapValue(varMap, []string{"log", "$log"}); err == nil {
// r.Log = logVar
//}
//if media, err := getFirstStringMapValue(varMap, []string{"media", "$media"}); err == nil {
// r.Media = media
//}
//if format, err := getFirstStringMapValue(varMap, []string{"format", "$format"}); err == nil {
// r.Format = format
//}
//if bitRate, err := getFirstStringMapValue(varMap, []string{"bitrate", "$bitrate"}); err == nil {
// r.Bitrate = bitRate
//}
return nil
}
func checkFilterSlice(name string, filterList []string) bool {
name = strings.ToLower(name)
for _, filter := range filterList {
filter = strings.ToLower(filter)
filter = strings.Trim(filter, " ")
// check if line contains * or ?, if so try wildcard match, otherwise try substring match
a := strings.ContainsAny(filter, "?|*")
if a {
match := wildcard.Match(filter, name)
if match {
return true
}
} else {
b := strings.Contains(name, filter)
if b {
return true
}
}
}
return false
}
func checkFilterStrings(name string, filterList string) bool {
filterSplit := strings.Split(filterList, ",")
name = strings.ToLower(name)
for _, s := range filterSplit {
s = strings.ToLower(s)
s = strings.Trim(s, " ")
// check if line contains * or ?, if so try wildcard match, otherwise try substring match
a := strings.ContainsAny(s, "?|*")
if a {
match := wildcard.Match(s, name)
if match {
return true
}
} else {
b := strings.Contains(name, s)
if b {
return true
}
}
}
return false
}
// checkFilterIntStrings "1,2,3-20"
func checkFilterIntStrings(value int, filterList string) bool {
filters := strings.Split(filterList, ",")
for _, s := range filters {
s = strings.Replace(s, "%", "", -1)
s = strings.Trim(s, " ")
if strings.Contains(s, "-") {
minMax := strings.Split(s, "-")
// to int
min, err := strconv.ParseInt(minMax[0], 10, 32)
if err != nil {
return false
}
max, err := strconv.ParseInt(minMax[1], 10, 32)
if err != nil {
return false
}
if min > max {
// handle error
return false
} else {
// if announcePercent is greater than min and less than max return true
if value >= int(min) && value <= int(max) {
return true
}
}
}
filterInt, err := strconv.ParseInt(s, 10, 32)
if err != nil {
return false
}
if int(filterInt) == value {
return true
}
}
return false
}
func checkFreeleechPercent(announcePercent int, filterPercent string) bool {
filters := strings.Split(filterPercent, ",")
// remove % and trim spaces
//announcePercent = strings.Replace(announcePercent, "%", "", -1)
//announcePercent = strings.Trim(announcePercent, " ")
//announcePercentInt, err := strconv.ParseInt(announcePercent, 10, 32)
//if err != nil {
// return false
//}
for _, s := range filters {
s = strings.Replace(s, "%", "", -1)
s = strings.Trim(s, " ")
if strings.Contains(s, "-") {
minMax := strings.Split(s, "-")
// to int
min, err := strconv.ParseInt(minMax[0], 10, 32)
if err != nil {
return false
}
max, err := strconv.ParseInt(minMax[1], 10, 32)
if err != nil {
return false
}
if min > max {
// handle error
return false
} else {
// if announcePercent is greater than min and less than max return true
if announcePercent >= int(min) && announcePercent <= int(max) {
return true
}
}
}
filterPercentInt, err := strconv.ParseInt(s, 10, 32)
if err != nil {
return false
}
if int(filterPercentInt) == announcePercent {
return true
}
}
return false
}
func getStringMapValue(stringMap map[string]string, key string) (string, error) {
lowerKey := strings.ToLower(key)
// case-sensitive match
//if caseSensitive {
// v, ok := stringMap[key]
// if !ok {
// return "", fmt.Errorf("key was not found in map: %q", key)
// }
//
// return v, nil
//}
// case-insensitive match
for k, v := range stringMap {
if strings.ToLower(k) == lowerKey {
return v, nil
}
}
return "", fmt.Errorf("key was not found in map: %q", lowerKey)
}
func getFirstStringMapValue(stringMap map[string]string, keys []string) (string, error) {
for _, k := range keys {
if val, err := getStringMapValue(stringMap, k); err == nil {
return val, nil
}
}
return "", fmt.Errorf("key were not found in map: %q", strings.Join(keys, ", "))
}
func findLast(input string, pattern string) (string, error) {
matched := make([]string, 0)
//for _, s := range arr {
rxp, err := regexp.Compile(pattern)
if err != nil {
return "", err
//return errors.Wrapf(err, "invalid regex: %s", value)
}
matches := rxp.FindStringSubmatch(input)
if matches != nil {
// first value is the match, second value is the text
if len(matches) >= 1 {
last := matches[len(matches)-1]
// add to temp slice
matched = append(matched, last)
}
}
//}
// check if multiple values in temp slice, if so get the last one
if len(matched) >= 1 {
last := matched[len(matched)-1]
return last, nil
}
return "", nil
}
func findLastBool(input string, pattern string) (bool, error) {
matched := make([]string, 0)
rxp, err := regexp.Compile(pattern)
if err != nil {
return false, err
}
matches := rxp.FindStringSubmatch(input)
if matches != nil {
// first value is the match, second value is the text
if len(matches) >= 1 {
last := matches[len(matches)-1]
// add to temp slice
matched = append(matched, last)
}
}
//}
// check if multiple values in temp slice, if so get the last one
if len(matched) >= 1 {
//last := matched[len(matched)-1]
return true, nil
}
return false, nil
}
func findLastInt(input string, pattern string) (int, error) {
matched := make([]string, 0)
//for _, s := range arr {
rxp, err := regexp.Compile(pattern)
if err != nil {
return 0, err
//return errors.Wrapf(err, "invalid regex: %s", value)
}
matches := rxp.FindStringSubmatch(input)
if matches != nil {
// first value is the match, second value is the text
if len(matches) >= 1 {
last := matches[len(matches)-1]
// add to temp slice
matched = append(matched, last)
}
}
//}
// check if multiple values in temp slice, if so get the last one
if len(matched) >= 1 {
last := matched[len(matched)-1]
i, err := strconv.Atoi(last)
if err != nil {
return 0, err
}
return i, nil
}
return 0, nil
}
type ReleaseStats struct {
TotalCount int64 `json:"total_count"`
FilteredCount int64 `json:"filtered_count"`
FilterRejectedCount int64 `json:"filter_rejected_count"`
PushApprovedCount int64 `json:"push_approved_count"`
PushRejectedCount int64 `json:"push_rejected_count"`
}
type ReleasePushStatus string
const (
ReleasePushStatusApproved ReleasePushStatus = "PUSH_APPROVED"
ReleasePushStatusRejected ReleasePushStatus = "PUSH_REJECTED"
ReleasePushStatusMixed ReleasePushStatus = "MIXED" // For multiple actions, one might go and the other not
ReleasePushStatusPending ReleasePushStatus = "PENDING" // Initial status
)
type ReleaseFilterStatus string
const (
ReleaseStatusFilterApproved ReleaseFilterStatus = "FILTER_APPROVED"
ReleaseStatusFilterRejected ReleaseFilterStatus = "FILTER_REJECTED"
ReleaseStatusFilterPending ReleaseFilterStatus = "PENDING"
)
type ReleaseProtocol string
const (
ReleaseProtocolTorrent ReleaseProtocol = "torrent"
)
type ReleaseImplementation string
const (
ReleaseImplementationIRC ReleaseImplementation = "IRC"
)
type QueryParams struct {
Limit uint64
Cursor uint64
Sort map[string]string
Filter map[string]string
Search string
}

View file

@ -0,0 +1,364 @@
package domain
import (
"github.com/stretchr/testify/assert"
"testing"
"time"
)
func TestRelease_Parse(t *testing.T) {
tests := []struct {
name string
fields Release
wantErr bool
}{
{name: "parse_1", fields: Release{
ID: 0,
Rejections: nil,
Indexer: "",
FilterName: "",
Protocol: "",
Implementation: "",
Timestamp: time.Time{},
TorrentID: "",
GroupID: "",
TorrentName: "Servant S01 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-FLUX",
Raw: "",
Title: "",
Category: "",
Season: 0,
Episode: 0,
Year: 0,
Resolution: "",
Source: "",
Codec: "",
Container: "",
HDR: "",
Audio: "",
Group: "",
Region: "",
Edition: "",
Proper: false,
Repack: false,
Website: "",
Language: "",
Unrated: false,
Hybrid: false,
Size: 0,
ThreeD: false,
Artists: nil,
Type: "",
Format: "",
Bitrate: "",
LogScore: 0,
HasLog: false,
HasCue: false,
IsScene: false,
Origin: "",
Tags: nil,
Freeleech: false,
FreeleechPercent: 0,
Uploader: "",
PreTime: "",
TorrentURL: "",
Filter: nil,
}, wantErr: false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
r := tt.fields
if err := r.Parse(); (err != nil) != tt.wantErr {
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}
func TestRelease_CheckFilter(t *testing.T) {
type args struct {
filter Filter
}
tests := []struct {
name string
fields *Release
args args
want bool
}{
{
name: "size_between_max_min",
fields: &Release{Size: uint64(10000000001)},
args: args{
filter: Filter{
Enabled: true,
MinSize: "10 GB",
MaxSize: "20GB",
},
},
want: true,
},
{
name: "size_larger_than_max",
fields: &Release{Size: uint64(30000000001)},
args: args{
filter: Filter{
Enabled: true,
MinSize: "10 GB",
MaxSize: "20GB",
},
},
want: false,
},
//{
// name: "test_no_size",
// fields: &Release{Size: uint64(0)},
// args: args{
// filter: Filter{
// Enabled: true,
// FilterGeneral: FilterGeneral{MinSize: "10 GB", MaxSize: "20GB"},
// },
// },
// want: false, // additional checks
//},
{
name: "movie_parse_1",
fields: &Release{
TorrentName: "That Movie 2020 2160p BluRay DD5.1 x264-GROUP1",
Category: "Movies",
Freeleech: true,
Size: uint64(30000000001),
},
args: args{
filter: Filter{
Enabled: true,
MatchCategories: "Movies",
Freeleech: true,
MinSize: "10 GB",
MaxSize: "40GB",
Resolutions: []string{"2160p"},
Sources: []string{"BluRay"},
Codecs: []string{"x264"},
Years: "2020",
MatchReleaseGroups: "GROUP1",
},
},
want: true,
},
{
name: "movie_parse_shows",
fields: &Release{
TorrentName: "That Movie 2020 2160p BluRay DD5.1 x264-GROUP1",
Category: "Movies",
Freeleech: true,
Size: uint64(30000000001),
},
args: args{
filter: Filter{
Enabled: true,
MatchCategories: "Movies",
Freeleech: true,
MinSize: "10 GB",
MaxSize: "40GB",
Resolutions: []string{"2160p"},
Sources: []string{"BluRay"},
Codecs: []string{"x264"},
Years: "2020",
MatchReleaseGroups: "GROUP1",
Shows: "That Movie",
},
},
want: true,
},
{
name: "movie_parse_multiple_shows",
fields: &Release{
TorrentName: "That Movie 2020 2160p BluRay DD5.1 x264-GROUP1",
Category: "Movies",
Freeleech: true,
Size: uint64(30000000001),
},
args: args{
filter: Filter{
Enabled: true,
MatchCategories: "Movies",
Freeleech: true,
MinSize: "10 GB",
MaxSize: "40GB",
Resolutions: []string{"2160p"},
Sources: []string{"BluRay"},
Codecs: []string{"x264"},
Years: "2020",
MatchReleaseGroups: "GROUP1",
Shows: "That Movie, good story, bad movie",
},
},
want: true,
},
{
name: "movie_parse_wildcard_shows",
fields: &Release{
TorrentName: "That Movie 2020 2160p BluRay DD5.1 x264-GROUP1",
Category: "Movies",
Freeleech: true,
Size: uint64(30000000001), // 30GB
},
args: args{
filter: Filter{
Enabled: true,
MatchCategories: "Movies, tv",
Freeleech: true,
MinSize: "10 GB",
MaxSize: "40GB",
Resolutions: []string{"1080p", "2160p"},
Sources: []string{"BluRay"},
Codecs: []string{"x264"},
Years: "2015,2018-2022",
MatchReleaseGroups: "GROUP1,BADGROUP",
Shows: "*Movie*, good story, bad movie",
},
},
want: true,
},
{
name: "movie_bad_category",
fields: &Release{
TorrentName: "That Movie 2020 2160p BluRay DD5.1 x264-GROUP1",
Category: "Movies",
Freeleech: true,
Size: uint64(30000000001), // 30GB
},
args: args{
filter: Filter{
Enabled: true,
MatchCategories: "*tv*",
Freeleech: true,
MinSize: "10 GB",
MaxSize: "40GB",
Resolutions: []string{"1080p", "2160p"},
Sources: []string{"BluRay"},
Codecs: []string{"x264"},
Years: "2015,2018-2022",
MatchReleaseGroups: "GROUP1,BADGROUP",
Shows: "*Movie*, good story, bad movie",
},
},
want: false,
},
{
name: "tv_match_season_episode",
fields: &Release{
TorrentName: "Good show S01E01 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-GROUP2",
Category: "TV",
},
args: args{
filter: Filter{
Enabled: true,
MatchCategories: "*tv*",
Resolutions: []string{"1080p", "2160p"},
Sources: []string{"WEB-DL"},
Codecs: []string{"HEVC"},
MatchReleaseGroups: "GROUP1,GROUP2",
Seasons: "1,2",
Episodes: "1",
},
},
want: true,
},
{
name: "tv_match_season",
fields: &Release{
TorrentName: "Good show S01 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-GROUP2",
Category: "TV",
},
args: args{
filter: Filter{
Enabled: true,
MatchCategories: "*tv*",
Resolutions: []string{"1080p", "2160p"},
Sources: []string{"WEB-DL"},
Codecs: []string{"HEVC"},
MatchReleaseGroups: "GROUP1,GROUP2",
Seasons: "1,2",
},
},
want: true,
},
{
name: "tv_bad_match_season",
fields: &Release{
TorrentName: "Good show S02 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-GROUP2",
Category: "TV",
},
args: args{
filter: Filter{
Enabled: true,
MatchCategories: "*tv*",
Resolutions: []string{"1080p", "2160p"},
Sources: []string{"WEB-DL"},
Codecs: []string{"HEVC"},
MatchReleaseGroups: "GROUP1,GROUP2",
Seasons: "1",
},
},
want: false,
},
{
name: "match_uploader",
fields: &Release{
TorrentName: "Good show S02 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-GROUP2",
Category: "TV",
Uploader: "Uploader1",
},
args: args{
filter: Filter{
Enabled: true,
MatchCategories: "*tv*",
MatchUploaders: "Uploader1",
},
},
want: true,
},
{
name: "except_uploader",
fields: &Release{
TorrentName: "Good show S02 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-GROUP2",
Category: "TV",
Uploader: "Anonymous",
},
args: args{
filter: Filter{
Enabled: true,
MatchCategories: "*tv*",
ExceptUploaders: "Anonymous",
},
},
want: false,
},
{
name: "match_except_uploader",
fields: &Release{
TorrentName: "Good show S02 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-GROUP2",
Category: "TV",
Uploader: "Uploader1",
},
args: args{
filter: Filter{
Enabled: true,
MatchCategories: "*tv*",
MatchUploaders: "Uploader1,Uploader2",
ExceptUploaders: "Anonymous",
},
},
want: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
r := tt.fields // Release
_ = r.Parse() // Parse TorrentName into struct
got := r.CheckFilter(tt.args.filter)
assert.Equal(t, tt.want, got)
})
}
}

View file

@ -0,0 +1,46 @@
package events
import (
"context"
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/internal/release"
"github.com/asaskevich/EventBus"
"github.com/rs/zerolog/log"
)
type Subscriber struct {
eventbus EventBus.Bus
releaseSvc release.Service
}
func NewSubscribers(eventbus EventBus.Bus, releaseSvc release.Service) Subscriber {
s := Subscriber{eventbus: eventbus, releaseSvc: releaseSvc}
s.Register()
return s
}
func (s Subscriber) Register() {
s.eventbus.Subscribe("release:update-push-status", s.releaseUpdatePushStatus)
s.eventbus.Subscribe("release:update-push-status-rejected", s.releaseUpdatePushStatusRejected)
}
func (s Subscriber) releaseUpdatePushStatus(id int64, status domain.ReleasePushStatus) {
log.Trace().Msgf("event: 'release:update-push-status' release ID '%v' update push status: '%v'", id, status)
err := s.releaseSvc.UpdatePushStatus(context.Background(), id, status)
if err != nil {
log.Error().Err(err).Msgf("events: error")
}
}
func (s Subscriber) releaseUpdatePushStatusRejected(id int64, rejections string) {
log.Trace().Msgf("event: 'release:update-push-status-rejected' release ID '%v' update push status rejected rejections: '%v'", id, rejections)
err := s.releaseSvc.UpdatePushStatusRejected(context.Background(), id, rejections)
if err != nil {
log.Error().Err(err).Msgf("events: error")
}
}

View file

@ -1,20 +1,16 @@
package filter
import (
"strings"
"github.com/rs/zerolog/log"
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/internal/indexer"
"github.com/autobrr/autobrr/pkg/wildcard"
)
type Service interface {
//FindFilter(announce domain.Announce) (*domain.Filter, error)
FindByID(filterID int) (*domain.Filter, error)
FindByIndexerIdentifier(announce domain.Announce) (*domain.Filter, error)
FindByIndexerIdentifier(indexer string) ([]domain.Filter, error)
FindAndCheckFilters(release *domain.Release) (bool, *domain.Filter, error)
ListFilters() ([]domain.Filter, error)
Store(filter domain.Filter) (*domain.Filter, error)
Update(filter domain.Filter) (*domain.Filter, error)
@ -65,7 +61,6 @@ func (s *service) FindByID(filterID int) (*domain.Filter, error) {
}
// find actions and attach
//actions, err := s.actionRepo.FindFilterActions(filter.ID)
actions, err := s.actionRepo.FindByFilterID(filter.ID)
if err != nil {
log.Error().Msgf("could not find filter actions: %+v", &filter.ID)
@ -80,79 +75,20 @@ func (s *service) FindByID(filterID int) (*domain.Filter, error) {
}
filter.Indexers = indexers
//log.Debug().Msgf("found filter: %+v", filter)
return filter, nil
}
func (s *service) FindByIndexerIdentifier(announce domain.Announce) (*domain.Filter, error) {
// get filter for tracker
filters, err := s.repo.FindByIndexerIdentifier(announce.Site)
func (s *service) FindByIndexerIdentifier(indexer string) ([]domain.Filter, error) {
// get filters for indexer
filters, err := s.repo.FindByIndexerIdentifier(indexer)
if err != nil {
log.Error().Err(err).Msgf("could not find filters for indexer: %v", announce.Site)
log.Error().Err(err).Msgf("could not find filters for indexer: %v", indexer)
return nil, err
}
// match against announce/releaseInfo
for _, filter := range filters {
// if match, return the filter
matchedFilter := s.checkFilter(filter, announce)
if matchedFilter {
log.Trace().Msgf("found matching filter: %+v", &filter)
log.Debug().Msgf("found matching filter: %v", &filter.Name)
// find actions and attach
actions, err := s.actionRepo.FindByFilterID(filter.ID)
if err != nil {
log.Error().Err(err).Msgf("could not find filter actions: %+v", &filter.ID)
return nil, err
}
// if no actions found, check next filter
if actions == nil {
continue
}
filter.Actions = actions
return &filter, nil
}
}
// if no match, return nil
return nil, nil
return filters, nil
}
//func (s *service) FindFilter(announce domain.Announce) (*domain.Filter, error) {
// // get filter for tracker
// filters, err := s.repo.FindFiltersForSite(announce.Site)
// if err != nil {
// return nil, err
// }
//
// // match against announce/releaseInfo
// for _, filter := range filters {
// // if match, return the filter
// matchedFilter := s.checkFilter(filter, announce)
// if matchedFilter {
//
// log.Debug().Msgf("found filter: %+v", &filter)
//
// // find actions and attach
// actions, err := s.actionRepo.FindByFilterID(filter.ID)
// if err != nil {
// log.Error().Msgf("could not find filter actions: %+v", &filter.ID)
// }
// filter.Actions = actions
//
// return &filter, nil
// }
// }
//
// // if no match, return nil
// return nil, nil
//}
func (s *service) Store(filter domain.Filter) (*domain.Filter, error) {
// validate data
@ -216,137 +152,49 @@ func (s *service) Delete(filterID int) error {
return nil
}
// checkFilter tries to match filter against announce
func (s *service) checkFilter(filter domain.Filter, announce domain.Announce) bool {
func (s *service) FindAndCheckFilters(release *domain.Release) (bool, *domain.Filter, error) {
if !filter.Enabled {
return false
filters, err := s.repo.FindByIndexerIdentifier(release.Indexer)
if err != nil {
log.Error().Err(err).Msgf("could not find filters for indexer: %v", release.Indexer)
return false, nil, err
}
if filter.Scene && announce.Scene != filter.Scene {
return false
}
// loop and check release to filter until match
for _, f := range filters {
log.Trace().Msgf("checking filter: %+v", f.Name)
if filter.Freeleech && announce.Freeleech != filter.Freeleech {
return false
}
matchedFilter := release.CheckFilter(f)
// if matched, attach actions and return the f
if matchedFilter {
//release.Filter = &f
//release.FilterID = f.ID
//release.FilterName = f.Name
if filter.Shows != "" && !checkFilterStrings(announce.TorrentName, filter.Shows) {
return false
}
log.Debug().Msgf("found and matched filter: %+v", f.Name)
//if filter.Seasons != "" && !checkFilterStrings(announce.TorrentName, filter.Seasons) {
// return false
//}
//
//if filter.Episodes != "" && !checkFilterStrings(announce.TorrentName, filter.Episodes) {
// return false
//}
// matchRelease
if filter.MatchReleases != "" && !checkFilterStrings(announce.TorrentName, filter.MatchReleases) {
return false
}
if filter.MatchReleaseGroups != "" && !checkFilterStrings(announce.TorrentName, filter.MatchReleaseGroups) {
return false
}
if filter.ExceptReleaseGroups != "" && checkFilterStrings(announce.TorrentName, filter.ExceptReleaseGroups) {
return false
}
if filter.MatchUploaders != "" && !checkFilterStrings(announce.Uploader, filter.MatchUploaders) {
return false
}
if filter.ExceptUploaders != "" && checkFilterStrings(announce.Uploader, filter.ExceptUploaders) {
return false
}
if len(filter.Resolutions) > 0 && !checkFilterSlice(announce.TorrentName, filter.Resolutions) {
return false
}
if len(filter.Codecs) > 0 && !checkFilterSlice(announce.TorrentName, filter.Codecs) {
return false
}
if len(filter.Sources) > 0 && !checkFilterSlice(announce.TorrentName, filter.Sources) {
return false
}
if len(filter.Containers) > 0 && !checkFilterSlice(announce.TorrentName, filter.Containers) {
return false
}
if filter.Years != "" && !checkFilterStrings(announce.TorrentName, filter.Years) {
return false
}
if filter.MatchCategories != "" && !checkFilterStrings(announce.Category, filter.MatchCategories) {
return false
}
if filter.ExceptCategories != "" && checkFilterStrings(announce.Category, filter.ExceptCategories) {
return false
}
if filter.Tags != "" && !checkFilterStrings(announce.Tags, filter.Tags) {
return false
}
if filter.ExceptTags != "" && checkFilterStrings(announce.Tags, filter.ExceptTags) {
return false
}
return true
}
func checkFilterSlice(name string, filterList []string) bool {
name = strings.ToLower(name)
for _, filter := range filterList {
filter = strings.ToLower(filter)
// check if line contains * or ?, if so try wildcard match, otherwise try substring match
a := strings.ContainsAny(filter, "?|*")
if a {
match := wildcard.Match(filter, name)
if match {
return true
// TODO do additional size check against indexer api or torrent for size
if release.AdditionalSizeCheckRequired {
log.Debug().Msgf("additional size check required for: %+v", f.Name)
// check if indexer = btn,ptp,ggn,red
// fetch api for data
// else download torrent and add to tmpPath
// if size != response.size
// r.RecheckSizeFilter(f)
//continue
}
} else {
b := strings.Contains(name, filter)
if b {
return true
// find actions and attach
actions, err := s.actionRepo.FindByFilterID(f.ID)
if err != nil {
log.Error().Err(err).Msgf("could not find actions for filter: %+v", f.Name)
}
f.Actions = actions
return true, &f, nil
}
}
return false
}
func checkFilterStrings(name string, filterList string) bool {
filterSplit := strings.Split(filterList, ",")
name = strings.ToLower(name)
for _, s := range filterSplit {
s = strings.ToLower(s)
// check if line contains * or ?, if so try wildcard match, otherwise try substring match
a := strings.ContainsAny(s, "?|*")
if a {
match := wildcard.Match(s, name)
if match {
return true
}
} else {
b := strings.Contains(name, s)
if b {
return true
}
}
}
return false
// if no match, return nil
return false, nil, nil
}

File diff suppressed because it is too large Load diff

90
internal/http/release.go Normal file
View file

@ -0,0 +1,90 @@
package http
import (
"context"
"net/http"
"strconv"
"github.com/autobrr/autobrr/internal/domain"
"github.com/go-chi/chi"
)
type releaseService interface {
Find(ctx context.Context, query domain.QueryParams) (res []domain.Release, nextCursor int64, err error)
Stats(ctx context.Context) (*domain.ReleaseStats, error)
}
type releaseHandler struct {
encoder encoder
service releaseService
}
func newReleaseHandler(encoder encoder, service releaseService) *releaseHandler {
return &releaseHandler{
encoder: encoder,
service: service,
}
}
func (h releaseHandler) Routes(r chi.Router) {
r.Get("/", h.findReleases)
r.Get("/stats", h.getStats)
}
func (h releaseHandler) findReleases(w http.ResponseWriter, r *http.Request) {
limitP := r.URL.Query().Get("limit")
limit, err := strconv.Atoi(limitP)
if err != nil && limitP != "" {
h.encoder.StatusResponse(r.Context(), w, map[string]interface{}{
"code": "BAD_REQUEST_PARAMS",
"message": "limit parameter is invalid",
}, http.StatusBadRequest)
}
if limit == 0 {
limit = 20
}
cursorP := r.URL.Query().Get("cursor")
cursor, err := strconv.Atoi(cursorP)
if err != nil && cursorP != "" {
h.encoder.StatusResponse(r.Context(), w, map[string]interface{}{
"code": "BAD_REQUEST_PARAMS",
"message": "cursor parameter is invalid",
}, http.StatusBadRequest)
}
query := domain.QueryParams{
Limit: uint64(limit),
Cursor: uint64(cursor),
Sort: nil,
//Filter: "",
}
releases, nextCursor, err := h.service.Find(r.Context(), query)
if err != nil {
h.encoder.StatusNotFound(r.Context(), w)
return
}
ret := struct {
Data []domain.Release `json:"data"`
NextCursor int64 `json:"next_cursor"`
}{
Data: releases,
NextCursor: nextCursor,
}
h.encoder.StatusResponse(r.Context(), w, ret, http.StatusOK)
}
func (h releaseHandler) getStats(w http.ResponseWriter, r *http.Request) {
stats, err := h.service.Stats(r.Context())
if err != nil {
h.encoder.StatusNotFound(r.Context(), w)
return
}
h.encoder.StatusResponse(r.Context(), w, stats, http.StatusOK)
}

View file

@ -25,9 +25,10 @@ type Server struct {
filterService filterService
indexerService indexerService
ircService ircService
releaseService releaseService
}
func NewServer(sse *sse.Server, address string, baseUrl string, actionService actionService, authService authService, downloadClientSvc downloadClientService, filterSvc filterService, indexerSvc indexerService, ircSvc ircService) Server {
func NewServer(sse *sse.Server, address string, baseUrl string, actionService actionService, authService authService, downloadClientSvc downloadClientService, filterSvc filterService, indexerSvc indexerService, ircSvc ircService, releaseSvc releaseService) Server {
return Server{
sse: sse,
address: address,
@ -39,6 +40,7 @@ func NewServer(sse *sse.Server, address string, baseUrl string, actionService ac
filterService: filterSvc,
indexerService: indexerSvc,
ircService: ircSvc,
releaseService: releaseSvc,
}
}
@ -94,6 +96,7 @@ func (s Server) Handler() http.Handler {
r.Route("/filters", newFilterHandler(encoder, s.filterService).Routes)
r.Route("/irc", newIrcHandler(encoder, s.ircService).Routes)
r.Route("/indexer", newIndexerHandler(encoder, s.indexerService, s.ircService).Routes)
r.Route("/release", newReleaseHandler(encoder, s.releaseService).Routes)
r.HandleFunc("/events", func(w http.ResponseWriter, r *http.Request) {
@ -105,7 +108,7 @@ func (s Server) Handler() http.Handler {
"X-Accel-Buffering": "no",
}
s.sse.HTTPHandler(w, r)
s.sse.ServeHTTP(w, r)
})
})
})

View file

@ -1,41 +0,0 @@
package release
import (
"fmt"
"github.com/rs/zerolog/log"
"github.com/autobrr/autobrr/internal/action"
"github.com/autobrr/autobrr/internal/domain"
)
type Service interface {
Process(announce domain.Announce) error
}
type service struct {
actionSvc action.Service
}
func NewService(actionService action.Service) Service {
return &service{actionSvc: actionService}
}
func (s *service) Process(announce domain.Announce) error {
log.Trace().Msgf("start to process release: %+v", announce)
if announce.Filter.Actions == nil {
return fmt.Errorf("no actions for filter: %v", announce.Filter.Name)
}
// smart episode?
// run actions (watchFolder, test, exec, qBittorrent, Deluge etc.)
err := s.actionSvc.RunActions(announce.Filter.Actions, announce)
if err != nil {
log.Error().Stack().Err(err).Msgf("error running actions for filter: %v", announce.Filter.Name)
return err
}
return nil
}

View file

@ -0,0 +1,99 @@
package release
import (
"context"
"fmt"
"github.com/rs/zerolog/log"
"github.com/autobrr/autobrr/internal/action"
"github.com/autobrr/autobrr/internal/domain"
)
type Service interface {
Find(ctx context.Context, query domain.QueryParams) (res []domain.Release, nextCursor int64, err error)
Stats(ctx context.Context) (*domain.ReleaseStats, error)
Store(ctx context.Context, release *domain.Release) error
UpdatePushStatus(ctx context.Context, id int64, status domain.ReleasePushStatus) error
UpdatePushStatusRejected(ctx context.Context, id int64, rejections string) error
Process(release domain.Release) error
}
type service struct {
repo domain.ReleaseRepo
actionSvc action.Service
}
func NewService(repo domain.ReleaseRepo, actionService action.Service) Service {
return &service{
repo: repo,
actionSvc: actionService,
}
}
func (s *service) Find(ctx context.Context, query domain.QueryParams) (res []domain.Release, nextCursor int64, err error) {
//releases, err := s.repo.Find(ctx, query)
res, nextCursor, err = s.repo.Find(ctx, query)
if err != nil {
//return nil, err
return
}
return
//return releases, nil
}
func (s *service) Stats(ctx context.Context) (*domain.ReleaseStats, error) {
stats, err := s.repo.Stats(ctx)
if err != nil {
return nil, err
}
return stats, nil
}
func (s *service) Store(ctx context.Context, release *domain.Release) error {
_, err := s.repo.Store(ctx, release)
if err != nil {
return err
}
return nil
}
func (s *service) UpdatePushStatus(ctx context.Context, id int64, status domain.ReleasePushStatus) error {
err := s.repo.UpdatePushStatus(ctx, id, status)
if err != nil {
return err
}
return nil
}
func (s *service) UpdatePushStatusRejected(ctx context.Context, id int64, rejections string) error {
err := s.repo.UpdatePushStatusRejected(ctx, id, rejections)
if err != nil {
return err
}
return nil
}
func (s *service) Process(release domain.Release) error {
log.Trace().Msgf("start to process release: %+v", release)
if release.Filter.Actions == nil {
return fmt.Errorf("no actions for filter: %v", release.Filter.Name)
}
// smart episode?
// run actions (watchFolder, test, exec, qBittorrent, Deluge etc.)
err := s.actionSvc.RunActions(release.Filter.Actions, release)
if err != nil {
log.Error().Stack().Err(err).Msgf("error running actions for filter: %v", release.Filter.Name)
return err
}
return nil
}

View file

@ -22,7 +22,7 @@ type Config struct {
type Client interface {
Test() (*SystemStatusResponse, error)
Push(release Release) (bool, error)
Push(release Release) (bool, string, error)
}
type client struct {
@ -93,15 +93,15 @@ func (c *client) Test() (*SystemStatusResponse, error) {
return &response, nil
}
func (c *client) Push(release Release) (bool, error) {
func (c *client) Push(release Release) (bool, string, error) {
res, err := c.post("release/push", release)
if err != nil {
log.Error().Stack().Err(err).Msg("lidarr client post error")
return false, err
return false, "", err
}
if res == nil {
return false, nil
return false, "", nil
}
defer res.Body.Close()
@ -109,14 +109,14 @@ func (c *client) Push(release Release) (bool, error) {
body, err := io.ReadAll(res.Body)
if err != nil {
log.Error().Stack().Err(err).Msg("lidarr client error reading body")
return false, err
return false, "", err
}
pushResponse := PushResponse{}
err = json.Unmarshal(body, &pushResponse)
if err != nil {
log.Error().Stack().Err(err).Msg("lidarr client error json unmarshal")
return false, err
return false, "", err
}
log.Trace().Msgf("lidarr release/push response body: %+v", string(body))
@ -126,8 +126,8 @@ func (c *client) Push(release Release) (bool, error) {
rejections := strings.Join(pushResponse.Rejections, ", ")
log.Trace().Msgf("lidarr push rejected: %s - reasons: %q", release.Title, rejections)
return false, nil
return false, rejections, nil
}
return true, nil
return true, "", nil
}

View file

@ -103,7 +103,7 @@ func Test_client_Push(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
c := New(tt.fields.config)
_, err := c.Push(tt.args.release)
_, _, err := c.Push(tt.args.release)
if tt.wantErr && assert.Error(t, err) {
assert.Equal(t, tt.err, err)
}

View file

@ -22,7 +22,7 @@ type Config struct {
type Client interface {
Test() (*SystemStatusResponse, error)
Push(release Release) (bool, error)
Push(release Release) (bool, string, error)
}
type client struct {
@ -92,15 +92,15 @@ func (c *client) Test() (*SystemStatusResponse, error) {
return &response, nil
}
func (c *client) Push(release Release) (bool, error) {
func (c *client) Push(release Release) (bool, string, error) {
res, err := c.post("release/push", release)
if err != nil {
log.Error().Stack().Err(err).Msg("radarr client post error")
return false, err
return false, "", err
}
if res == nil {
return false, nil
return false, "", nil
}
defer res.Body.Close()
@ -108,14 +108,14 @@ func (c *client) Push(release Release) (bool, error) {
body, err := io.ReadAll(res.Body)
if err != nil {
log.Error().Stack().Err(err).Msg("radarr client error reading body")
return false, err
return false, "", err
}
pushResponse := make([]PushResponse, 0)
err = json.Unmarshal(body, &pushResponse)
if err != nil {
log.Error().Stack().Err(err).Msg("radarr client error json unmarshal")
return false, err
return false, "", err
}
log.Trace().Msgf("radarr release/push response body: %+v", string(body))
@ -125,9 +125,9 @@ func (c *client) Push(release Release) (bool, error) {
rejections := strings.Join(pushResponse[0].Rejections, ", ")
log.Trace().Msgf("radarr push rejected: %s - reasons: %q", release.Title, rejections)
return false, nil
return false, rejections, nil
}
// success true
return true, nil
return true, "", nil
}

View file

@ -104,7 +104,7 @@ func Test_client_Push(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
c := New(tt.fields.config)
_, err := c.Push(tt.args.release)
_, _, err := c.Push(tt.args.release)
if tt.wantErr && assert.Error(t, err) {
assert.Equal(t, tt.err, err)
}

View file

@ -22,7 +22,7 @@ type Config struct {
type Client interface {
Test() (*SystemStatusResponse, error)
Push(release Release) (bool, error)
Push(release Release) (bool, string, error)
}
type client struct {
@ -93,15 +93,15 @@ func (c *client) Test() (*SystemStatusResponse, error) {
return &response, nil
}
func (c *client) Push(release Release) (bool, error) {
func (c *client) Push(release Release) (bool, string, error) {
res, err := c.post("release/push", release)
if err != nil {
log.Error().Stack().Err(err).Msg("sonarr client post error")
return false, err
return false, "", err
}
if res == nil {
return false, nil
return false, "", nil
}
defer res.Body.Close()
@ -109,14 +109,14 @@ func (c *client) Push(release Release) (bool, error) {
body, err := io.ReadAll(res.Body)
if err != nil {
log.Error().Stack().Err(err).Msg("sonarr client error reading body")
return false, err
return false, "", err
}
pushResponse := make([]PushResponse, 0)
err = json.Unmarshal(body, &pushResponse)
if err != nil {
log.Error().Stack().Err(err).Msg("sonarr client error json unmarshal")
return false, err
return false, "", err
}
log.Trace().Msgf("sonarr release/push response body: %+v", string(body))
@ -126,9 +126,9 @@ func (c *client) Push(release Release) (bool, error) {
rejections := strings.Join(pushResponse[0].Rejections, ", ")
log.Trace().Msgf("sonarr push rejected: %s - reasons: %q", release.Title, rejections)
return false, nil
return false, rejections, nil
}
// successful push
return true, nil
return true, "", nil
}

View file

@ -104,7 +104,7 @@ func Test_client_Push(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
c := New(tt.fields.config)
_, err := c.Push(tt.args.release)
_, _, err := c.Push(tt.args.release)
if tt.wantErr && assert.Error(t, err) {
assert.Equal(t, tt.err, err)
}

View file

@ -15,6 +15,7 @@
"@types/node": "^12.0.0",
"@types/react": "^17.0.0",
"@types/react-dom": "^17.0.0",
"date-fns": "^2.25.0",
"final-form": "^4.20.2",
"final-form-arrays": "^3.0.2",
"formik": "^2.2.9",
@ -29,6 +30,7 @@
"react-router-dom": "^5.2.0",
"react-scripts": "4.0.3",
"react-select": "5.0.0-beta.0",
"react-table": "^7.7.0",
"recoil": "^0.4.0",
"typescript": "^4.1.2",
"web-vitals": "^1.0.1"
@ -60,6 +62,7 @@
"devDependencies": {
"@tailwindcss/forms": "^0.3.2",
"@types/react-router-dom": "^5.1.7",
"@types/react-table": "^7.7.7",
"autoprefixer": "^9",
"postcss": "^7",
"tailwindcss": "npm:@tailwindcss/postcss7-compat"

View file

@ -108,6 +108,10 @@ const APIClient = {
},
events: {
logs: () => new EventSource(`${sseBaseUrl()}api/events?stream=logs`, { withCredentials: true })
},
release: {
find: (query?: string) => appClient.Get(`api/release${query}`),
stats: () => appClient.Get(`api/release/stats`)
}
}

View file

@ -169,3 +169,30 @@ export interface Config {
log_path: string;
base_url: string;
}
export interface Release {
id: number;
filter_status: string;
push_status: string;
rejections: string[];
indexer: string;
filter: string;
protocol: string;
title: string;
size: number;
raw: string;
timestamp: Date
}
export interface ReleaseFindResponse {
data: Release[];
next_cursor: number;
}
export interface ReleaseStats {
total_count: number;
filtered_count: number;
filter_rejected_count: number;
push_approved_count: number;
push_rejected_count: number;
}

120
web/src/domain/react-table-config.d.ts vendored Normal file
View file

@ -0,0 +1,120 @@
import {
UseColumnOrderInstanceProps,
UseColumnOrderState,
UseExpandedHooks,
UseExpandedInstanceProps,
UseExpandedOptions,
UseExpandedRowProps,
UseExpandedState,
UseFiltersColumnOptions,
UseFiltersColumnProps,
UseFiltersInstanceProps,
UseFiltersOptions,
UseFiltersState,
UseGlobalFiltersColumnOptions,
UseGlobalFiltersInstanceProps,
UseGlobalFiltersOptions,
UseGlobalFiltersState,
UseGroupByCellProps,
UseGroupByColumnOptions,
UseGroupByColumnProps,
UseGroupByHooks,
UseGroupByInstanceProps,
UseGroupByOptions,
UseGroupByRowProps,
UseGroupByState,
UsePaginationInstanceProps,
UsePaginationOptions,
UsePaginationState,
UseResizeColumnsColumnOptions,
UseResizeColumnsColumnProps,
UseResizeColumnsOptions,
UseResizeColumnsState,
UseRowSelectHooks,
UseRowSelectInstanceProps,
UseRowSelectOptions,
UseRowSelectRowProps,
UseRowSelectState,
UseRowStateCellProps,
UseRowStateInstanceProps,
UseRowStateOptions,
UseRowStateRowProps,
UseRowStateState,
UseSortByColumnOptions,
UseSortByColumnProps,
UseSortByHooks,
UseSortByInstanceProps,
UseSortByOptions,
UseSortByState
} from 'react-table'
declare module 'react-table' {
// take this file as-is, or comment out the sections that don't apply to your plugin configuration
export interface TableOptions<D extends Record<string, unknown>>
extends UseExpandedOptions<D>,
UseFiltersOptions<D>,
UseGlobalFiltersOptions<D>,
UseGroupByOptions<D>,
UsePaginationOptions<D>,
UseResizeColumnsOptions<D>,
UseRowSelectOptions<D>,
UseRowStateOptions<D>,
UseSortByOptions<D>,
// note that having Record here allows you to add anything to the options, this matches the spirit of the
// underlying js library, but might be cleaner if it's replaced by a more specific type that matches your
// feature set, this is a safe default.
Record<string, any> {}
export interface Hooks<D extends Record<string, unknown> = Record<string, unknown>>
extends UseExpandedHooks<D>,
UseGroupByHooks<D>,
UseRowSelectHooks<D>,
UseSortByHooks<D> {}
export interface TableInstance<D extends Record<string, unknown> = Record<string, unknown>>
extends UseColumnOrderInstanceProps<D>,
UseExpandedInstanceProps<D>,
UseFiltersInstanceProps<D>,
UseGlobalFiltersInstanceProps<D>,
UseGroupByInstanceProps<D>,
UsePaginationInstanceProps<D>,
UseRowSelectInstanceProps<D>,
UseRowStateInstanceProps<D>,
UseSortByInstanceProps<D> {}
export interface TableState<D extends Record<string, unknown> = Record<string, unknown>>
extends UseColumnOrderState<D>,
UseExpandedState<D>,
UseFiltersState<D>,
UseGlobalFiltersState<D>,
UseGroupByState<D>,
UsePaginationState<D>,
UseResizeColumnsState<D>,
UseRowSelectState<D>,
UseRowStateState<D>,
UseSortByState<D> {}
export interface ColumnInterface<D extends Record<string, unknown> = Record<string, unknown>>
extends UseFiltersColumnOptions<D>,
UseGlobalFiltersColumnOptions<D>,
UseGroupByColumnOptions<D>,
UseResizeColumnsColumnOptions<D>,
UseSortByColumnOptions<D> {}
export interface ColumnInstance<D extends Record<string, unknown> = Record<string, unknown>>
extends UseFiltersColumnProps<D>,
UseGroupByColumnProps<D>,
UseResizeColumnsColumnProps<D>,
UseSortByColumnProps<D> {}
export interface Cell<D extends Record<string, unknown> = Record<string, unknown>, V = any>
extends UseGroupByCellProps<D>,
UseRowStateCellProps<D> {}
export interface Row<D extends Record<string, unknown> = Record<string, unknown>>
extends UseExpandedRowProps<D>,
UseGroupByRowProps<D>,
UseRowSelectRowProps<D>,
UseRowStateRowProps<D> {}
}

View file

@ -1,16 +1,696 @@
import formatDistanceToNowStrict from 'date-fns/formatDistanceToNowStrict'
import React from 'react'
import App from '../App'
import { useTable, useFilters, useGlobalFilter, useSortBy, usePagination } from 'react-table'
import APIClient from '../api/APIClient'
import { useQuery } from 'react-query'
import { ReleaseFindResponse, ReleaseStats } from '../domain/interfaces'
import { EmptyListState } from '../components/EmptyListState'
export function Dashboard() {
return (
<main className="-mt-48">
<header className="py-10">
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8">
<h1 className="text-3xl font-bold text-white capitalize">Dashboard</h1>
</div>
</header>
<div className="max-w-7xl mx-auto pb-12 px-4 sm:px-6 lg:px-8">
<div className="bg-white dark:bg-gray-800 rounded-lg shadow px-5 py-6 sm:px-6">
<div className="border-4 border-dashed border-gray-200 dark:border-gray-700 rounded-lg h-96" />
</div>
return (
<main className="py-10 -mt-48">
<div className="px-4 pb-8 mx-auto max-w-7xl sm:px-6 lg:px-8">
<Stats />
<DataTablee />
</div>
</main>
)
}
const StatsItem = ({ name, stat }: any) => (
<div
className="relative px-4 pt-5 pb-2 overflow-hidden bg-white rounded-lg shadow dark:bg-gray-800 sm:pt-6 sm:px-6"
title="All time"
>
<dt>
<p className="pb-1 text-sm font-medium text-gray-500 truncate dark:text-gray-600">{name}</p>
</dt>
<dd className="flex items-baseline pb-6 sm:pb-7">
<p className="text-2xl font-semibold text-gray-900 dark:text-gray-300">{stat}</p>
</dd>
</div>
)
function Stats() {
const { isLoading, data } = useQuery<ReleaseStats, Error>('dash_release_staats', () => APIClient.release.stats(),
{
refetchOnWindowFocus: false
}
)
if (isLoading) {
return null
}
return (
<div>
<h3 className="text-lg font-medium leading-6 text-gray-900 dark:text-gray-600">Stats</h3>
<dl className="grid grid-cols-1 gap-5 mt-5 sm:grid-cols-2 lg:grid-cols-3">
<StatsItem name="Filtered Releases" stat={data?.filtered_count} />
{/* <StatsItem name="Filter Rejected Releases" stat={data?.filter_rejected_count} /> */}
<StatsItem name="Rejected Pushes" stat={data?.push_rejected_count} />
<StatsItem name="Approved Pushes" stat={data?.push_approved_count} />
</dl>
</div>
)
}
/* function RecentActivity() {
let data: any[] = [
{
id: 1,
status: "FILTERED",
created_at: "2021-10-16 20:25:26",
indexer: "tl",
title: "That movie 2019 1080p x264-GROUP",
},
{
id: 2,
status: "PUSH_APPROVED",
created_at: "2021-10-15 16:16:23",
indexer: "tl",
title: "That great movie 2009 1080p x264-1GROUP",
},
{
id: 3,
status: "FILTER_REJECTED",
created_at: "2021-10-15 10:16:23",
indexer: "tl",
title: "Movie 1 2002 720p x264-1GROUP",
},
{
id: 4,
status: "PUSH_APPROVED",
created_at: "2021-10-14 16:16:23",
indexer: "tl",
title: "That bad movie 2019 2160p x265-1GROUP",
},
{
id: 5,
status: "PUSH_REJECTED",
created_at: "2021-10-13 16:16:23",
indexer: "tl",
title: "That really bad movie 20010 1080p x264-GROUP2",
},
]
return (
<div className="flex flex-col mt-12">
<h3 className="text-lg font-medium leading-6 text-gray-900 dark:text-gray-600">Recent activity</h3>
<div className="mt-3 overflow-x-auto sm:-mx-6 lg:-mx-8">
<div className="inline-block min-w-full py-2 sm:px-6 lg:px-8">
<div className="overflow-hidden light:shadow light:border-b light:border-gray-200 sm:rounded-lg">
<table className="min-w-full divide-y divide-gray-200 dark:divide-gray-700">
<thead className="light:bg-gray-50 dark:bg-gray-800">
<tr>
<th
scope="col"
className="px-6 py-3 text-xs font-medium tracking-wider text-left text-gray-500 uppercase dark:text-gray-400"
>
Age
</th>
<th
scope="col"
className="px-6 py-3 text-xs font-medium tracking-wider text-left text-gray-500 uppercase dark:text-gray-400"
>
Release
</th>
<th
scope="col"
className="px-6 py-3 text-xs font-medium tracking-wider text-left text-gray-500 uppercase dark:text-gray-400"
>
Status
</th>
<th
scope="col"
className="px-6 py-3 text-xs font-medium tracking-wider text-left text-gray-500 uppercase dark:text-gray-400"
>
Indexer
</th>
</tr>
</thead>
<tbody className="bg-gray-800 divide-y divide-gray-200 light:bg-white dark:divide-gray-700">
{data && data.length > 0 ?
data.map((release: any, idx) => (
<ListItem key={idx} idx={idx} release={release} />
))
: <span>No recent activity</span>}
</tbody>
</table>
<nav
className="flex items-center justify-between px-4 py-3 bg-white border-t border-gray-200 dark:bg-gray-800 dark:border-gray-700 sm:px-6"
aria-label="Pagination"
>
<div className="hidden sm:block">
<p className="text-sm text-gray-700 dark:text-gray-500">
Showing <span className="font-medium">1</span> to <span className="font-medium">10</span> of{' '}
<span className="font-medium">20</span> results
</p>
</div>
<div className="flex items-center justify-between flex-1 sm:justify-end">
<p className="relative text-sm text-gray-700 dark:text-gray-500">
Show <span className="font-medium">10</span>
</p>
<Menu as="div" className="relative text-left">
<Menu.Button className="flex items-center text-sm font-medium text-gray-900 rounded-md focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-600">
<span>Show</span>
<ChevronDownIcon className="w-5 h-5 ml-1 text-gray-500" aria-hidden="true" />
</Menu.Button>
<Transition
as={Fragment}
enter="transition ease-out duration-100"
enterFrom="transform opacity-0 scale-95"
enterTo="transform opacity-100 scale-100"
leave="transition ease-in duration-75"
leaveFrom="transform opacity-100 scale-100"
leaveTo="transform opacity-0 scale-95"
>
<Menu.Items className="absolute right-0 z-30 w-40 mt-2 origin-top-right bg-white rounded-md shadow-lg ring-1 ring-black ring-opacity-5 focus:outline-none">
<div className="py-1">
{[5, 10, 25, 50].map((child) => (
<Menu.Item key={child}>
{({ active }) => (
<a
// href={child.href}
className={classNames(
active ? 'bg-gray-100' : '',
'block px-4 py-2 text-sm text-gray-700'
)}
>
{child}
</a>
)}
</Menu.Item>
))}
</div>
</Menu.Items>
</Transition>
</Menu>
<a
href="#"
// className="px-4 py-2 mr-4 text-sm font-medium text-gray-700 bg-white border border-gray-300 rounded-md shadow-sm dark:bg-gray-700 dark:border-gray-600 hover:bg-gray-50 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-indigo-500 dark:focus:ring-blue-500"
className="relative inline-flex items-center px-4 py-2 ml-5 text-sm font-medium text-gray-700 bg-white border border-gray-300 rounded-md dark:border-gray-600 dark:text-gray-400 dark:bg-gray-700 hover:bg-gray-50 dark:hover:bg-gray-600"
>
Previous
</a>
<a
href="#"
// className="relative inline-flex items-center px-4 py-2 ml-3 text-sm font-medium text-gray-700 bg-white border border-gray-300 rounded-md hover:bg-gray-50"
className="relative inline-flex items-center px-4 py-2 ml-3 text-sm font-medium text-gray-700 bg-white border border-gray-300 rounded-md dark:border-gray-600 dark:text-gray-400 dark:bg-gray-700 hover:bg-gray-50 dark:hover:bg-gray-600"
>
Next
</a>
</div>
</nav>
</div>
</div>
</div>
</div>
)
} */
/* const ListItem = ({ idx, release }: any) => {
const formatDate = formatDistanceToNowStrict(
new Date(release.created_at),
{ addSuffix: true }
)
return (
<tr key={release.id} className={idx % 2 === 0 ? 'light:bg-white' : 'light:bg-gray-50'}>
<td className="px-6 py-4 text-sm text-gray-500 whitespace-nowrap dark:text-gray-400" title={release.created_at}>{formatDate}</td>
<td className="px-6 py-4 text-sm font-medium text-gray-900 whitespace-nowrap dark:text-gray-300">{release.title}</td>
<td className="px-6 py-4 text-sm text-gray-500 whitespace-nowrap dark:text-gray-300">{statusMap[release.status]}</td>
<td className="px-6 py-4 text-sm text-gray-500 whitespace-nowrap dark:text-gray-300">{release.indexer}</td>
</tr>
)
} */
/*
const getData = () => {
const data: any[] = [
{
id: 1,
status: "FILTERED",
created_at: "2021-10-16 20:25:26",
indexer: "tl",
title: "That movie 2019 1080p x264-GROUP",
},
{
id: 2,
status: "PUSH_APPROVED",
created_at: "2021-10-15 16:16:23",
indexer: "tl",
title: "That great movie 2009 1080p x264-1GROUP",
},
{
id: 3,
status: "FILTER_REJECTED",
created_at: "2021-10-15 10:16:23",
indexer: "tl",
title: "Movie 1 2002 720p x264-1GROUP",
},
{
id: 4,
status: "PUSH_APPROVED",
created_at: "2021-10-14 16:16:23",
indexer: "tl",
title: "That bad movie 2019 2160p x265-1GROUP",
},
{
id: 5,
status: "PUSH_REJECTED",
created_at: "2021-10-13 16:16:23",
indexer: "tl",
title: "That really bad movie 20010 1080p x264-GROUP2",
},
]
return [...data, ...data, ...data]
} */
// Define a default UI for filtering
/* function GlobalFilter({
preGlobalFilteredRows,
globalFilter,
setGlobalFilter,
}: any) {
const count = preGlobalFilteredRows.length
const [value, setValue] = React.useState(globalFilter)
const onChange = useAsyncDebounce((value: any) => {
setGlobalFilter(value || undefined)
}, 200)
return (
<label className="flex items-baseline gap-x-2">
<span className="text-gray-700">Search: </span>
<input
type="text"
className="border-gray-300 rounded-md shadow-sm focus:border-indigo-300 focus:ring focus:ring-indigo-200 focus:ring-opacity-50"
value={value || ""}
onChange={e => {
setValue(e.target.value);
onChange(e.target.value);
}}
placeholder={`${count} records...`}
/>
</label>
)
} */
// This is a custom filter UI for selecting
// a unique option from a list
export function SelectColumnFilter({
column: { filterValue, setFilter, preFilteredRows, id, render },
}: any) {
// Calculate the options for filtering
// using the preFilteredRows
const options = React.useMemo(() => {
const options: any = new Set()
preFilteredRows.forEach((row: { values: { [x: string]: unknown } }) => {
options.add(row.values[id])
})
return [...options.values()]
}, [id, preFilteredRows])
// Render a multi-select box
return (
<label className="flex items-baseline gap-x-2">
<span className="text-gray-700">{render("Header")}: </span>
<select
className="border-gray-300 rounded-md shadow-sm focus:border-indigo-300 focus:ring focus:ring-indigo-200 focus:ring-opacity-50"
name={id}
id={id}
value={filterValue}
onChange={e => {
setFilter(e.target.value || undefined)
}}
>
<option value="">All</option>
{options.map((option, i) => (
<option key={i} value={option}>
{option}
</option>
))}
</select>
</label>
)
}
// export function StatusPill({ value }: any) {
// const status = value ? value.toLowerCase() : "unknown";
// return (
// <span
// className={
// classNames(
// "px-3 py-1 uppercase leading-wide font-bold text-xs rounded-full shadow-sm",
// status.startsWith("active") ? "bg-green-100 text-green-800" : "",
// status.startsWith("inactive") ? "bg-yellow-100 text-yellow-800" : "",
// status.startsWith("offline") ? "bg-red-100 text-red-800" : "",
// )
// }
// >
// {status}
// </span>
// );
// };
export function StatusPill({ value }: any) {
const statusMap: any = {
"FILTER_APPROVED": <span className="inline-flex items-center px-2 py-0.5 rounded text-xs font-semibold uppercase bg-blue-100 text-blue-800 ">Approved</span>,
"FILTER_REJECTED": <span className="inline-flex items-center px-2 py-0.5 rounded text-xs font-semibold uppercase bg-red-100 text-red-800">Rejected</span>,
"PUSH_REJECTED": <span className="inline-flex items-center px-2 py-0.5 rounded text-xs font-semibold uppercase bg-pink-100 text-pink-800">Rejected</span>,
"PUSH_APPROVED": <span className="inline-flex items-center px-2 py-0.5 rounded text-xs font-semibold uppercase bg-green-100 text-green-800">Approved</span>,
"PENDING": <span className="inline-flex items-center px-2 py-0.5 rounded text-xs font-semibold uppercase bg-yellow-100 text-yellow-800">PENDING</span>,
"MIXED": <span className="inline-flex items-center px-2 py-0.5 rounded text-xs font-semibold uppercase bg-yellow-100 text-yellow-800">MIXED</span>,
}
return (
statusMap[value]
);
};
export function AgeCell({ value, column, row }: any) {
const formatDate = formatDistanceToNowStrict(
new Date(value),
{ addSuffix: true }
)
return (
<div className="text-sm text-gray-500" title={value}>{formatDate}</div>
)
}
export function ReleaseCell({ value, column, row }: any) {
return (
<div className="text-sm font-medium text-gray-900 dark:text-gray-300">{value}</div>
)
}
function Table({ columns, data }: any) {
// Use the state and functions returned from useTable to build your UI
const {
getTableProps,
getTableBodyProps,
headerGroups,
prepareRow,
page, // Instead of using 'rows', we'll use page,
// which has only the rows for the active page
// The rest of these things are super handy, too ;)
// canPreviousPage,
// canNextPage,
// pageOptions,
// pageCount,
// gotoPage,
// nextPage,
// previousPage,
// setPageSize,
// state,
// preGlobalFilteredRows,
// setGlobalFilter,
} = useTable({
columns,
data,
},
useFilters, // useFilters!
useGlobalFilter,
useSortBy,
usePagination, // new
)
// Render the UI for your table
return (
<>
<div className="sm:flex sm:gap-x-2">
{/* <GlobalFilter
preGlobalFilteredRows={preGlobalFilteredRows}
globalFilter={state.globalFilter}
setGlobalFilter={setGlobalFilter}
/> */}
{/* {headerGroups.map((headerGroup: { headers: any[] }) =>
headerGroup.headers.map((column) =>
column.Filter ? (
<div className="mt-2 sm:mt-0" key={column.id}>
{column.render("Filter")}
</div>
) : null
)
)} */}
</div>
{page.length > 0 ?
<div className="flex flex-col mt-4">
<div className="-mx-4 -my-2 overflow-x-auto sm:-mx-6 lg:-mx-8">
<div className="inline-block min-w-full py-2 align-middle sm:px-6 lg:px-8">
<div className="overflow-hidden bg-white shadow dark:bg-gray-800 sm:rounded-lg">
<table {...getTableProps()} className="min-w-full divide-y divide-gray-200 dark:divide-gray-700">
<thead className="bg-gray-50 dark:bg-gray-800">
{headerGroups.map((headerGroup: { getHeaderGroupProps: () => JSX.IntrinsicAttributes & React.ClassAttributes<HTMLTableRowElement> & React.HTMLAttributes<HTMLTableRowElement>; headers: any[] }) => (
<tr {...headerGroup.getHeaderGroupProps()}>
{headerGroup.headers.map(column => (
// Add the sorting props to control sorting. For this example
// we can add them into the header props
<th
scope="col"
className="px-6 py-3 text-xs font-medium tracking-wider text-left text-gray-500 uppercase group"
{...column.getHeaderProps(column.getSortByToggleProps())}
>
<div className="flex items-center justify-between">
{column.render('Header')}
{/* Add a sort direction indicator */}
<span>
{column.isSorted
? column.isSortedDesc
? <SortDownIcon className="w-4 h-4 text-gray-400" />
: <SortUpIcon className="w-4 h-4 text-gray-400" />
: (
<SortIcon className="w-4 h-4 text-gray-400 opacity-0 group-hover:opacity-100" />
)}
</span>
</div>
</th>
))}
</tr>
))}
</thead>
<tbody
{...getTableBodyProps()}
className="divide-y divide-gray-200 dark:divide-gray-700"
>
{page.map((row: any, i: any) => { // new
prepareRow(row)
return (
<tr {...row.getRowProps()}>
{row.cells.map((cell: any) => {
return (
<td
{...cell.getCellProps()}
className="px-6 py-4 whitespace-nowrap"
role="cell"
>
{cell.column.Cell.name === "defaultRenderer"
? <div className="text-sm text-gray-500">{cell.render('Cell')}</div>
: cell.render('Cell')
}
</td>
)
})}
</tr>
)
})}
</tbody>
</table>
{/* Pagination */}
{/* <div className="flex items-center justify-between px-6 py-3 border-t border-gray-200 dark:border-gray-700">
<div className="flex justify-between flex-1 sm:hidden">
<Button onClick={() => previousPage()} disabled={!canPreviousPage}>Previous</Button>
<Button onClick={() => nextPage()} disabled={!canNextPage}>Next</Button>
</div>
<div className="hidden sm:flex-1 sm:flex sm:items-center sm:justify-between">
<div className="flex items-baseline gap-x-2">
<span className="text-sm text-gray-700">
Page <span className="font-medium">{state.pageIndex + 1}</span> of <span className="font-medium">{pageOptions.length}</span>
</span>
<label>
<span className="sr-only">Items Per Page</span>
<select
className="block w-full border-gray-300 rounded-md shadow-sm cursor-pointer dark:bg-gray-800 dark:border-gray-800 dark:text-gray-600 dark:hover:text-gray-500 focus:border-blue-300 focus:ring focus:ring-blue-200 focus:ring-opacity-50"
value={state.pageSize}
onChange={e => {
setPageSize(Number(e.target.value))
}}
>
{[5, 10, 20].map(pageSize => (
<option key={pageSize} value={pageSize}>
Show {pageSize}
</option>
))}
</select>
</label>
</div>
<div>
<nav className="relative z-0 inline-flex -space-x-px rounded-md shadow-sm" aria-label="Pagination">
<PageButton
className="rounded-l-md"
onClick={() => gotoPage(0)}
disabled={!canPreviousPage}
>
<span className="sr-only">First</span>
<ChevronDoubleLeftIcon className="w-5 h-5 text-gray-400" aria-hidden="true" />
</PageButton>
<PageButton
onClick={() => previousPage()}
disabled={!canPreviousPage}
>
<span className="sr-only">Previous</span>
<ChevronLeftIcon className="w-5 h-5 text-gray-400" aria-hidden="true" />
</PageButton>
<PageButton
onClick={() => nextPage()}
disabled={!canNextPage
}>
<span className="sr-only">Next</span>
<ChevronRightIcon className="w-5 h-5 text-gray-400" aria-hidden="true" />
</PageButton>
<PageButton
className="rounded-r-md"
onClick={() => gotoPage(pageCount - 1)}
disabled={!canNextPage}
>
<span className="sr-only">Last</span>
<ChevronDoubleRightIcon className="w-5 h-5 text-gray-400" aria-hidden="true" />
</PageButton>
</nav>
</div>
</div>
</div> */}
</div>
</main>
)
}
</div>
</div>
</div>
: <EmptyListState text="No recent activity"/>}
</>
)
}
function SortIcon({ className }: any) {
return (
<svg className={className} stroke="currentColor" fill="currentColor" strokeWidth="0" viewBox="0 0 320 512" height="1em" width="1em" xmlns="http://www.w3.org/2000/svg"><path d="M41 288h238c21.4 0 32.1 25.9 17 41L177 448c-9.4 9.4-24.6 9.4-33.9 0L24 329c-15.1-15.1-4.4-41 17-41zm255-105L177 64c-9.4-9.4-24.6-9.4-33.9 0L24 183c-15.1 15.1-4.4 41 17 41h238c21.4 0 32.1-25.9 17-41z"></path></svg>
)
}
function SortUpIcon({ className }: any) {
return (
<svg className={className} stroke="currentColor" fill="currentColor" strokeWidth="0" viewBox="0 0 320 512" height="1em" width="1em" xmlns="http://www.w3.org/2000/svg"><path d="M279 224H41c-21.4 0-32.1-25.9-17-41L143 64c9.4-9.4 24.6-9.4 33.9 0l119 119c15.2 15.1 4.5 41-16.9 41z"></path></svg>
)
}
function SortDownIcon({ className }: any) {
return (
<svg className={className} stroke="currentColor" fill="currentColor" strokeWidth="0" viewBox="0 0 320 512" height="1em" width="1em" xmlns="http://www.w3.org/2000/svg"><path d="M41 288h238c21.4 0 32.1 25.9 17 41L177 448c-9.4 9.4-24.6 9.4-33.9 0L24 329c-15.1-15.1-4.4-41 17-41z"></path></svg>
)
}
/* function Button({ children, className, ...rest }: any) {
return (
<button
type="button"
className={
classNames(
"relative inline-flex items-center px-4 py-2 border border-gray-300 text-sm font-medium rounded-md text-gray-700 bg-white hover:bg-gray-50",
className
)}
{...rest}
>
{children}
</button>
)
}
function PageButton({ children, className, ...rest }: any) {
return (
<button
type="button"
className={
classNames(
"relative inline-flex items-center px-2 py-2 border border-gray-300 dark:border-gray-600 bg-white dark:bg-gray-700 text-sm font-medium text-gray-500 dark:text-gray-400 hover:bg-gray-50 dark:hover:bg-gray-600",
className
)}
{...rest}
>
{children}
</button>
)
} */
function DataTablee() {
const columns = React.useMemo(() => [
{
Header: "Age",
accessor: 'timestamp',
Cell: AgeCell,
},
{
Header: "Release",
accessor: 'torrent_name',
Cell: ReleaseCell,
},
// {
// Header: "Filter Status",
// accessor: 'filter_status',
// Cell: StatusPill,
// },
{
Header: "Push Status",
accessor: 'push_status',
Cell: StatusPill,
},
{
Header: "Indexer",
accessor: 'indexer',
Filter: SelectColumnFilter, // new
filter: 'includes',
},
], [])
// const data = React.useMemo(() => getData(), [])
const { isLoading, data } = useQuery<ReleaseFindResponse, Error>('dash_release', () => APIClient.release.find("?limit=10"),
{
refetchOnWindowFocus: false
}
)
if (isLoading) {
return null
}
return (
<div className="flex flex-col mt-12">
<h3 className="text-lg font-medium leading-6 text-gray-900 dark:text-gray-600">Recent activity</h3>
<Table columns={columns} data={data?.data} />
</div>
)
}
export default App;

File diff suppressed because it is too large Load diff