feat: add backend

This commit is contained in:
Ludvig Lundgren 2021-08-11 15:26:17 +02:00
parent bc418ff248
commit a838d994a6
68 changed files with 9561 additions and 0 deletions

109
cmd/autobrr/main.go Normal file
View file

@ -0,0 +1,109 @@
package main
import (
"database/sql"
"fmt"
"os"
"os/signal"
"syscall"
"github.com/rs/zerolog/log"
"github.com/spf13/pflag"
_ "modernc.org/sqlite"
"github.com/autobrr/autobrr/internal/action"
"github.com/autobrr/autobrr/internal/announce"
"github.com/autobrr/autobrr/internal/config"
"github.com/autobrr/autobrr/internal/database"
"github.com/autobrr/autobrr/internal/download_client"
"github.com/autobrr/autobrr/internal/filter"
"github.com/autobrr/autobrr/internal/http"
"github.com/autobrr/autobrr/internal/indexer"
"github.com/autobrr/autobrr/internal/irc"
"github.com/autobrr/autobrr/internal/logger"
"github.com/autobrr/autobrr/internal/release"
"github.com/autobrr/autobrr/internal/server"
)
var (
cfg config.Cfg
)
func main() {
var configPath string
pflag.StringVar(&configPath, "config", "", "path to configuration file")
pflag.Parse()
// read config
cfg = config.Read(configPath)
// setup logger
logger.Setup(cfg)
// if configPath is set then put database inside that path, otherwise create wherever it's run
var dataSource = database.DataSourceName(configPath, "autobrr.db")
// open database connection
db, err := sql.Open("sqlite", dataSource)
if err != nil {
log.Fatal().Err(err).Msg("could not open db connection")
}
defer db.Close()
if err = database.Migrate(db); err != nil {
log.Fatal().Err(err).Msg("could not migrate db")
}
// setup repos
// var announceRepo = database.NewAnnounceRepo(db)
var (
actionRepo = database.NewActionRepo(db)
downloadClientRepo = database.NewDownloadClientRepo(db)
filterRepo = database.NewFilterRepo(db)
indexerRepo = database.NewIndexerRepo(db)
ircRepo = database.NewIrcRepo(db)
)
var (
downloadClientService = download_client.NewService(downloadClientRepo)
actionService = action.NewService(actionRepo, downloadClientService)
indexerService = indexer.NewService(indexerRepo)
filterService = filter.NewService(filterRepo, actionRepo, indexerService)
releaseService = release.NewService(actionService)
announceService = announce.NewService(filterService, indexerService, releaseService)
ircService = irc.NewService(ircRepo, announceService)
)
addr := fmt.Sprintf("%v:%v", cfg.Host, cfg.Port)
errorChannel := make(chan error)
go func() {
httpServer := http.NewServer(addr, cfg.BaseURL, actionService, downloadClientService, filterService, indexerService, ircService)
errorChannel <- httpServer.Open()
}()
srv := server.NewServer(ircService, indexerService)
srv.Hostname = cfg.Host
srv.Port = cfg.Port
sigCh := make(chan os.Signal, 1)
signal.Notify(sigCh, syscall.SIGINT, syscall.SIGTERM, syscall.SIGHUP)
if err := srv.Start(); err != nil {
log.Fatal().Err(err).Msg("could not start server")
}
for sig := range sigCh {
switch sig {
case syscall.SIGHUP:
log.Print("shutting down server")
os.Exit(1)
case syscall.SIGINT, syscall.SIGTERM:
log.Print("shutting down server")
//srv.Shutdown()
os.Exit(1)
return
}
}
}

36
config.toml Normal file
View file

@ -0,0 +1,36 @@
# config.toml
# Hostname / IP
#
# Default: "localhost"
#
host = "127.0.0.1"
# Port
#
# Default: 8989
#
port = 8989
# Base url
# Set custom baseUrl eg /autobrr/ to serve in subdirectory.
# Not needed for subdomain, or by accessing with the :port directly.
#
# Optional
#
#baseUrl = "/autobrr/"
# autobrr logs file
# If not defined, logs to stdout
#
# Optional
#
#logPath = "log/autobrr.log"
# Log level
#
# Default: "DEBUG"
#
# Options: "ERROR", "DEBUG", "INFO", "WARN"
#
logLevel = "DEBUG"

23
go.mod Normal file
View file

@ -0,0 +1,23 @@
module github.com/autobrr/autobrr
go 1.16
require (
github.com/anacrolix/torrent v1.29.1
github.com/fluffle/goirc v1.0.3
github.com/go-chi/chi v1.5.4
github.com/lib/pq v1.10.2
github.com/pelletier/go-toml v1.6.0 // indirect
github.com/pkg/errors v0.9.1
github.com/rs/zerolog v1.20.0
github.com/smartystreets/assertions v1.0.0 // indirect
github.com/spf13/pflag v1.0.3
github.com/spf13/viper v1.7.1
github.com/stretchr/testify v1.7.0
golang.org/x/net v0.0.0-20210427231257-85d9c07bbe3a
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c // indirect
gopkg.in/irc.v3 v3.1.1
gopkg.in/natefinch/lumberjack.v2 v2.0.0
gopkg.in/yaml.v2 v2.4.0
modernc.org/sqlite v1.12.0
)

1053
go.sum Normal file

File diff suppressed because it is too large Load diff

278
internal/action/service.go Normal file
View file

@ -0,0 +1,278 @@
package action
import (
"fmt"
"io"
"os"
"strconv"
"strings"
"time"
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/internal/download_client"
"github.com/autobrr/autobrr/pkg/qbittorrent"
"github.com/rs/zerolog/log"
)
const REANNOUNCE_MAX_ATTEMPTS = 30
const REANNOUNCE_INTERVAL = 7000
type Service interface {
RunActions(torrentFile string, hash string, filter domain.Filter) error
Store(action domain.Action) (*domain.Action, error)
Fetch() ([]domain.Action, error)
Delete(actionID int) error
ToggleEnabled(actionID int) error
}
type service struct {
repo domain.ActionRepo
clientSvc download_client.Service
}
func NewService(repo domain.ActionRepo, clientSvc download_client.Service) Service {
return &service{repo: repo, clientSvc: clientSvc}
}
func (s *service) RunActions(torrentFile string, hash string, filter domain.Filter) error {
for _, action := range filter.Actions {
if !action.Enabled {
// only run active actions
continue
}
log.Debug().Msgf("process action: %v", action.Name)
switch action.Type {
case domain.ActionTypeTest:
go s.test(torrentFile)
case domain.ActionTypeWatchFolder:
go s.watchFolder(action.WatchFolder, torrentFile)
case domain.ActionTypeQbittorrent:
go func() {
err := s.qbittorrent(action, hash, torrentFile)
if err != nil {
log.Error().Err(err).Msg("error sending torrent to client")
}
}()
// deluge
// pvr *arr
// exec
default:
panic("implement me")
}
}
return nil
}
func (s *service) Store(action domain.Action) (*domain.Action, error) {
// validate data
a, err := s.repo.Store(action)
if err != nil {
return nil, err
}
return a, nil
}
func (s *service) Delete(actionID int) error {
if err := s.repo.Delete(actionID); err != nil {
return err
}
return nil
}
func (s *service) Fetch() ([]domain.Action, error) {
actions, err := s.repo.List()
if err != nil {
return nil, err
}
return actions, nil
}
func (s *service) ToggleEnabled(actionID int) error {
if err := s.repo.ToggleEnabled(actionID); err != nil {
return err
}
return nil
}
func (s *service) test(torrentFile string) {
log.Info().Msgf("action TEST: %v", torrentFile)
}
func (s *service) watchFolder(dir string, torrentFile string) {
log.Debug().Msgf("action WATCH_FOLDER: %v file: %v", dir, torrentFile)
// Open original file
original, err := os.Open(torrentFile)
if err != nil {
log.Fatal().Err(err)
}
defer original.Close()
tmpFileName := strings.Split(torrentFile, "/")
fullFileName := fmt.Sprintf("%v/%v", dir, tmpFileName[1])
// Create new file
newFile, err := os.Create(fullFileName)
if err != nil {
log.Fatal().Err(err)
}
defer newFile.Close()
// Copy file
_, err = io.Copy(newFile, original)
if err != nil {
log.Fatal().Err(err)
}
log.Info().Msgf("action WATCH_FOLDER: wrote file: %v", fullFileName)
}
func (s *service) qbittorrent(action domain.Action, hash string, torrentFile string) error {
log.Debug().Msgf("action QBITTORRENT: %v", torrentFile)
// get client for action
client, err := s.clientSvc.FindByID(action.ClientID)
if err != nil {
log.Error().Err(err).Msgf("error finding client: %v", action.ClientID)
return err
}
if client == nil {
return err
}
qbtSettings := qbittorrent.Settings{
Hostname: client.Host,
Port: uint(client.Port),
Username: client.Username,
Password: client.Password,
SSL: client.SSL,
}
qbt := qbittorrent.NewClient(qbtSettings)
// save cookies?
err = qbt.Login()
if err != nil {
log.Error().Err(err).Msgf("error logging into client: %v", action.ClientID)
return err
}
// TODO check for active downloads and other rules
options := map[string]string{}
if action.Paused {
options["paused"] = "true"
}
if action.SavePath != "" {
options["savepath"] = action.SavePath
options["autoTMM"] = "false"
}
if action.Category != "" {
options["category"] = action.Category
}
if action.Tags != "" {
options["tags"] = action.Tags
}
if action.LimitUploadSpeed > 0 {
options["upLimit"] = strconv.FormatInt(action.LimitUploadSpeed, 10)
}
if action.LimitDownloadSpeed > 0 {
options["dlLimit"] = strconv.FormatInt(action.LimitDownloadSpeed, 10)
}
err = qbt.AddTorrentFromFile(torrentFile, options)
if err != nil {
log.Error().Err(err).Msgf("error sending to client: %v", action.ClientID)
return err
}
if !action.Paused && hash != "" {
err = checkTrackerStatus(*qbt, hash)
if err != nil {
log.Error().Err(err).Msgf("could not get tracker status for torrent: %v", hash)
return err
}
}
log.Debug().Msgf("torrent %v successfully added to: %v", hash, client.Name)
return nil
}
func checkTrackerStatus(qb qbittorrent.Client, hash string) error {
announceOK := false
attempts := 0
for attempts < REANNOUNCE_MAX_ATTEMPTS {
log.Debug().Msgf("RE-ANNOUNCE %v attempt: %v", hash, attempts)
// initial sleep to give tracker a head start
time.Sleep(REANNOUNCE_INTERVAL * time.Millisecond)
trackers, err := qb.GetTorrentTrackers(hash)
if err != nil {
log.Error().Err(err).Msgf("could not get trackers of torrent: %v", hash)
return err
}
// check if status not working or something else
_, working := findTrackerStatus(trackers, qbittorrent.TrackerStatusOK)
if !working {
err = qb.ReAnnounceTorrents([]string{hash})
if err != nil {
log.Error().Err(err).Msgf("could not get re-announce torrent: %v", hash)
return err
}
attempts++
continue
} else {
log.Debug().Msgf("RE-ANNOUNCE %v OK", hash)
announceOK = true
break
}
}
if !announceOK {
log.Debug().Msgf("RE-ANNOUNCE %v took too long, deleting torrent", hash)
err := qb.DeleteTorrents([]string{hash}, false)
if err != nil {
log.Error().Err(err).Msgf("could not delete torrent: %v", hash)
return err
}
}
return nil
}
// Check if status not working or something else
// https://github.com/qbittorrent/qBittorrent/wiki/WebUI-API-(qBittorrent-4.1)#get-torrent-trackers
// 0 Tracker is disabled (used for DHT, PeX, and LSD)
// 1 Tracker has not been contacted yet
// 2 Tracker has been contacted and is working
// 3 Tracker is updating
// 4 Tracker has been contacted, but it is not working (or doesn't send proper replies)
func findTrackerStatus(slice []qbittorrent.TorrentTracker, status qbittorrent.TrackerStatus) (int, bool) {
for i, item := range slice {
if item.Status == status {
return i, true
}
}
return -1, false
}

588
internal/announce/parse.go Normal file
View file

@ -0,0 +1,588 @@
package announce
import (
"bytes"
"fmt"
"html"
"net/url"
"regexp"
"strconv"
"strings"
"text/template"
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/pkg/releaseinfo"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
func (s *service) parseLineSingle(def *domain.IndexerDefinition, announce *domain.Announce, line string) error {
for _, extract := range def.Parse.Lines {
tmpVars := map[string]string{}
var err error
err = s.parseExtract(extract.Pattern, extract.Vars, tmpVars, line)
if err != nil {
log.Debug().Msgf("error parsing extract: %v", line)
return err
}
// on lines matched
err = s.onLinesMatched(def, tmpVars, announce)
if err != nil {
log.Debug().Msgf("error match line: %v", line)
return err
}
}
return nil
}
func (s *service) parseMultiLine() error {
return nil
}
func (s *service) parseExtract(pattern string, vars []string, tmpVars map[string]string, line string) error {
rxp, err := regExMatch(pattern, line)
if err != nil {
log.Debug().Msgf("did not match expected line: %v", line)
}
if rxp == nil {
//return nil, nil
return nil
}
// extract matched
for i, v := range vars {
value := ""
if rxp[i] != "" {
value = rxp[i]
// tmpVars[v] = rxp[i]
}
tmpVars[v] = value
}
return nil
}
func (s *service) onLinesMatched(def *domain.IndexerDefinition, vars map[string]string, announce *domain.Announce) error {
// TODO implement set tracker.lastAnnounce = now
announce.TorrentName = vars["torrentName"]
//err := s.postProcess(ti, vars, *announce)
//if err != nil {
// return err
//}
// TODO extractReleaseInfo
err := s.extractReleaseInfo(vars, announce.TorrentName)
if err != nil {
return err
}
// resolution
// source
// encoder
// canonicalize name
err = s.mapToAnnounce(vars, announce)
if err != nil {
return err
}
// torrent url
torrentUrl, err := s.processTorrentUrl(def.Parse.Match.TorrentURL, vars, def.SettingsMap, def.Parse.Match.Encode)
if err != nil {
log.Debug().Msgf("error torrent url: %v", err)
return err
}
if torrentUrl != "" {
announce.TorrentUrl = torrentUrl
}
return nil
}
func (s *service) processTorrentUrl(match string, vars map[string]string, extraVars map[string]string, encode []string) (string, error) {
tmpVars := map[string]string{}
// copy vars to new tmp map
for k, v := range vars {
tmpVars[k] = v
}
// merge extra vars with vars
if extraVars != nil {
for k, v := range extraVars {
tmpVars[k] = v
}
}
// handle url encode of values
if encode != nil {
for _, e := range encode {
if v, ok := tmpVars[e]; ok {
// url encode value
t := url.QueryEscape(v)
tmpVars[e] = t
}
}
}
// setup text template to inject variables into
tmpl, err := template.New("torrenturl").Parse(match)
if err != nil {
log.Error().Err(err).Msg("could not create torrent url template")
return "", err
}
var b bytes.Buffer
err = tmpl.Execute(&b, &tmpVars)
if err != nil {
log.Error().Err(err).Msg("could not write torrent url template output")
return "", err
}
return b.String(), nil
}
func split(r rune) bool {
return r == ' ' || r == '.'
}
func Splitter(s string, splits string) []string {
m := make(map[rune]int)
for _, r := range splits {
m[r] = 1
}
splitter := func(r rune) bool {
return m[r] == 1
}
return strings.FieldsFunc(s, splitter)
}
func canonicalizeString(s string) []string {
//a := strings.FieldsFunc(s, split)
a := Splitter(s, " .")
return a
}
func cleanReleaseName(input string) string {
// Make a Regex to say we only want letters and numbers
reg, err := regexp.Compile("[^a-zA-Z0-9]+")
if err != nil {
//log.Fatal(err)
}
processedString := reg.ReplaceAllString(input, " ")
return processedString
}
func findLast(input string, pattern string) (string, error) {
matched := make([]string, 0)
//for _, s := range arr {
rxp, err := regexp.Compile(pattern)
if err != nil {
return "", err
//return errors.Wrapf(err, "invalid regex: %s", value)
}
matches := rxp.FindStringSubmatch(input)
if matches != nil {
log.Trace().Msgf("matches: %v", matches)
// first value is the match, second value is the text
if len(matches) >= 1 {
last := matches[len(matches)-1]
// add to temp slice
matched = append(matched, last)
}
}
//}
// check if multiple values in temp slice, if so get the last one
if len(matched) >= 1 {
last := matched[len(matched)-1]
return last, nil
}
return "", nil
}
func extractYear(releaseName string) (string, bool) {
yearMatch, err := findLast(releaseName, "(?:^|\\D)(19[3-9]\\d|20[012]\\d)(?:\\D|$)")
if err != nil {
return "", false
}
log.Trace().Msgf("year matches: %v", yearMatch)
return yearMatch, true
}
func extractSeason(releaseName string) (string, bool) {
seasonMatch, err := findLast(releaseName, "\\sS(\\d+)\\s?[ED]\\d+/i")
sm2, err := findLast(releaseName, "\\s(?:S|Season\\s*)(\\d+)/i")
//sm3, err := findLast(releaseName, "\\s((?<!\\d)\\d{1,2})x\\d+/i")
if err != nil {
return "", false
}
log.Trace().Msgf("season matches: %v", seasonMatch)
log.Trace().Msgf("season matches: %v", sm2)
return seasonMatch, false
}
func extractEpisode(releaseName string) (string, bool) {
epMatch, err := findLast(releaseName, "\\sS\\d+\\s?E(\\d+)/i")
ep2, err := findLast(releaseName, "\\s(?:E|Episode\\s*)(\\d+)/i")
//ep3, err := findLast(releaseName, "\\s(?<!\\d)\\d{1,2}x(\\d+)/i")
if err != nil {
return "", false
}
log.Trace().Msgf("ep matches: %v", epMatch)
log.Trace().Msgf("ep matches: %v", ep2)
return epMatch, false
}
func (s *service) extractReleaseInfo(varMap map[string]string, releaseName string) error {
// https://github.com/middelink/go-parse-torrent-name
canonReleaseName := cleanReleaseName(releaseName)
log.Trace().Msgf("canonicalize release name: %v", canonReleaseName)
release, err := releaseinfo.Parse(releaseName)
if err != nil {
return err
}
log.Debug().Msgf("release: %+v", release)
// https://github.com/autodl-community/autodl-irssi/pull/194/files
// year
//year, yearMatch := extractYear(canonReleaseName)
//if yearMatch {
// setVariable("year", year, varMap, nil)
//}
//log.Trace().Msgf("year matches: %v", year)
// season
//season, seasonMatch := extractSeason(canonReleaseName)
//if seasonMatch {
// // set var
// log.Trace().Msgf("season matches: %v", season)
//}
// episode
//episode, episodeMatch := extractEpisode(canonReleaseName)
//if episodeMatch {
// // set var
// log.Trace().Msgf("episode matches: %v", episode)
//}
// resolution
// source
// encoder
// ignore
// tv or movie
// music stuff
// game stuff
return nil
}
func (s *service) mapToAnnounce(varMap map[string]string, ann *domain.Announce) error {
if torrentName, err := getFirstStringMapValue(varMap, []string{"torrentName"}); err != nil {
return errors.Wrap(err, "failed parsing required field")
} else {
ann.TorrentName = html.UnescapeString(torrentName)
}
if category, err := getFirstStringMapValue(varMap, []string{"category"}); err == nil {
ann.Category = category
}
if freeleech, err := getFirstStringMapValue(varMap, []string{"freeleech"}); err == nil {
ann.Freeleech = strings.EqualFold(freeleech, "freeleech") || strings.EqualFold(freeleech, "yes")
}
if freeleechPercent, err := getFirstStringMapValue(varMap, []string{"freeleechPercent"}); err == nil {
ann.FreeleechPercent = freeleechPercent
}
if uploader, err := getFirstStringMapValue(varMap, []string{"uploader"}); err == nil {
ann.Uploader = uploader
}
if scene, err := getFirstStringMapValue(varMap, []string{"scene"}); err == nil {
ann.Scene = strings.EqualFold(scene, "true") || strings.EqualFold(scene, "yes")
}
if year, err := getFirstStringMapValue(varMap, []string{"year"}); err == nil {
yearI, err := strconv.Atoi(year)
if err != nil {
//log.Debug().Msgf("bad year var: %v", year)
}
ann.Year = yearI
}
if tags, err := getFirstStringMapValue(varMap, []string{"releaseTags", "tags"}); err == nil {
ann.Tags = tags
}
return nil
}
func (s *service) mapToAnnounceObj(varMap map[string]string, ann *domain.Announce) error {
if torrentName, err := getFirstStringMapValue(varMap, []string{"torrentName", "$torrentName"}); err != nil {
return errors.Wrap(err, "failed parsing required field")
} else {
ann.TorrentName = html.UnescapeString(torrentName)
}
if torrentUrl, err := getFirstStringMapValue(varMap, []string{"torrentUrl", "$torrentUrl"}); err != nil {
return errors.Wrap(err, "failed parsing required field")
} else {
ann.TorrentUrl = torrentUrl
}
if releaseType, err := getFirstStringMapValue(varMap, []string{"releaseType", "$releaseType"}); err == nil {
ann.ReleaseType = releaseType
}
if name1, err := getFirstStringMapValue(varMap, []string{"name1", "$name1"}); err == nil {
ann.Name1 = name1
}
if name2, err := getFirstStringMapValue(varMap, []string{"name2", "$name2"}); err == nil {
ann.Name2 = name2
}
if category, err := getFirstStringMapValue(varMap, []string{"category", "$category"}); err == nil {
ann.Category = category
}
if freeleech, err := getFirstStringMapValue(varMap, []string{"freeleech", "$freeleech"}); err == nil {
ann.Freeleech = strings.EqualFold(freeleech, "true")
}
if uploader, err := getFirstStringMapValue(varMap, []string{"uploader", "$uploader"}); err == nil {
ann.Uploader = uploader
}
if tags, err := getFirstStringMapValue(varMap, []string{"$releaseTags", "$tags", "releaseTags", "tags"}); err == nil {
ann.Tags = tags
}
if cue, err := getFirstStringMapValue(varMap, []string{"cue", "$cue"}); err == nil {
ann.Cue = strings.EqualFold(cue, "true")
}
if logVar, err := getFirstStringMapValue(varMap, []string{"log", "$log"}); err == nil {
ann.Log = logVar
}
if media, err := getFirstStringMapValue(varMap, []string{"media", "$media"}); err == nil {
ann.Media = media
}
if format, err := getFirstStringMapValue(varMap, []string{"format", "$format"}); err == nil {
ann.Format = format
}
if bitRate, err := getFirstStringMapValue(varMap, []string{"bitrate", "$bitrate"}); err == nil {
ann.Bitrate = bitRate
}
if resolution, err := getFirstStringMapValue(varMap, []string{"resolution"}); err == nil {
ann.Resolution = resolution
}
if source, err := getFirstStringMapValue(varMap, []string{"source"}); err == nil {
ann.Source = source
}
if encoder, err := getFirstStringMapValue(varMap, []string{"encoder"}); err == nil {
ann.Encoder = encoder
}
if container, err := getFirstStringMapValue(varMap, []string{"container"}); err == nil {
ann.Container = container
}
if scene, err := getFirstStringMapValue(varMap, []string{"scene", "$scene"}); err == nil {
ann.Scene = strings.EqualFold(scene, "true")
}
if year, err := getFirstStringMapValue(varMap, []string{"year", "$year"}); err == nil {
yearI, err := strconv.Atoi(year)
if err != nil {
//log.Debug().Msgf("bad year var: %v", year)
}
ann.Year = yearI
}
//return &ann, nil
return nil
}
func setVariable(varName string, value string, varMap map[string]string, settings map[string]string) bool {
// check in instance options (auth)
//optVal, ok := settings[name]
//if !ok {
// //return ""
//}
////ret = optVal
//if optVal != "" {
// return false
//}
// else in varMap
val, ok := varMap[varName]
if !ok {
//return ""
varMap[varName] = value
} else {
// do something else?
}
log.Trace().Msgf("setVariable: %v", val)
return true
}
func getVariable(name string, varMap map[string]string, obj domain.Announce, settings map[string]string) string {
var ret string
// check in announce obj
// TODO reflect struct
// check in instance options (auth)
optVal, ok := settings[name]
if !ok {
//return ""
}
//ret = optVal
if optVal != "" {
return optVal
}
// else in varMap
val, ok := varMap[name]
if !ok {
//return ""
}
ret = val
return ret
}
//func contains(s []string, str string) bool {
// for _, v := range s {
// if v == str {
// return true
// }
// }
//
// return false
//}
func listContains(list []string, key string) bool {
for _, lKey := range list {
if strings.EqualFold(lKey, key) {
return true
}
}
return false
}
func getStringMapValue(stringMap map[string]string, key string) (string, error) {
lowerKey := strings.ToLower(key)
// case sensitive match
//if caseSensitive {
// v, ok := stringMap[key]
// if !ok {
// return "", fmt.Errorf("key was not found in map: %q", key)
// }
//
// return v, nil
//}
// case insensitive match
for k, v := range stringMap {
if strings.ToLower(k) == lowerKey {
return v, nil
}
}
return "", fmt.Errorf("key was not found in map: %q", lowerKey)
}
func getFirstStringMapValue(stringMap map[string]string, keys []string) (string, error) {
for _, k := range keys {
if val, err := getStringMapValue(stringMap, k); err == nil {
return val, nil
}
}
return "", fmt.Errorf("key were not found in map: %q", strings.Join(keys, ", "))
}
func removeElement(s []string, i int) ([]string, error) {
// s is [1,2,3,4,5,6], i is 2
// perform bounds checking first to prevent a panic!
if i >= len(s) || i < 0 {
return nil, fmt.Errorf("Index is out of range. Index is %d with slice length %d", i, len(s))
}
// This creates a new slice by creating 2 slices from the original:
// s[:i] -> [1, 2]
// s[i+1:] -> [4, 5, 6]
// and joining them together using `append`
return append(s[:i], s[i+1:]...), nil
}
func regExMatch(pattern string, value string) ([]string, error) {
rxp, err := regexp.Compile(pattern)
if err != nil {
return nil, err
//return errors.Wrapf(err, "invalid regex: %s", value)
}
matches := rxp.FindStringSubmatch(value)
if matches == nil {
return nil, nil
}
res := make([]string, 0)
if matches != nil {
res, err = removeElement(matches, 0)
if err != nil {
return nil, err
}
}
return res, nil
}

View file

@ -0,0 +1,585 @@
package announce
import (
"testing"
)
//func Test_service_OnNewLine(t *testing.T) {
// tfiles := tracker.NewService()
// tfiles.ReadFiles()
//
// type fields struct {
// trackerSvc tracker.Service
// }
// type args struct {
// msg string
// }
// tests := []struct {
// name string
// fields fields
// args args
// wantErr bool
// }{
// // TODO: Add test cases.
// {
// name: "parse announce",
// fields: fields{
// trackerSvc: tfiles,
// },
// args: args{
// msg: "New Torrent Announcement: <PC :: Iso> Name:'debian live 10 6 0 amd64 standard iso' uploaded by 'Anonymous' - http://www.tracker01.test/torrent/263302",
// },
// // expect struct: category, torrentName uploader freeleech baseurl torrentId
// wantErr: false,
// },
// }
// for _, tt := range tests {
// t.Run(tt.name, func(t *testing.T) {
// s := &service{
// trackerSvc: tt.fields.trackerSvc,
// }
// if err := s.OnNewLine(tt.args.msg); (err != nil) != tt.wantErr {
// t.Errorf("OnNewLine() error = %v, wantErr %v", err, tt.wantErr)
// }
// })
// }
//}
//func Test_service_parse(t *testing.T) {
// type fields struct {
// trackerSvc tracker.Service
// }
// type args struct {
// serverName string
// channelName string
// announcer string
// line string
// }
// tests := []struct {
// name string
// fields fields
// args args
// wantErr bool
// }{
// // TODO: Add test cases.
// }
// for _, tt := range tests {
// t.Run(tt.name, func(t *testing.T) {
// s := &service{
// trackerSvc: tt.fields.trackerSvc,
// }
// if err := s.parse(tt.args.serverName, tt.args.channelName, tt.args.announcer, tt.args.line); (err != nil) != tt.wantErr {
// t.Errorf("parse() error = %v, wantErr %v", err, tt.wantErr)
// }
// })
// }
//}
/*
var (
tracker01 = domain.TrackerInstance{
Name: "T01",
Enabled: true,
Settings: nil,
Auth: map[string]string{"rsskey": "000aaa111bbb222ccc333ddd"},
//IRC: nil,
Info: &domain.TrackerInfo{
Type: "t01",
ShortName: "T01",
LongName: "Tracker01",
SiteName: "www.tracker01.test",
IRC: domain.TrackerIRCServer{
Network: "Tracker01.test",
ServerNames: []string{"irc.tracker01.test"},
ChannelNames: []string{"#tracker01", "#t01announces"},
AnnouncerNames: []string{"_AnnounceBot_"},
},
ParseInfo: domain.ParseInfo{
LinePatterns: []domain.TrackerExtractPattern{
{
PatternType: "linepattern",
Optional: false,
Regex: regexp.MustCompile("New Torrent Announcement:\\s*<([^>]*)>\\s*Name:'(.*)' uploaded by '([^']*)'\\s*(freeleech)*\\s*-\\s*https?\\:\\/\\/([^\\/]+\\/)torrent\\/(\\d+)"),
Vars: []string{"category", "torrentName", "uploader", "$freeleech", "$baseUrl", "$torrentId"},
},
},
MultiLinePatterns: nil,
LineMatched: domain.LineMatched{
Vars: []domain.LineMatchVars{
{
Name: "freeleech",
Vars: []domain.LineMatchVarElem{
{Type: "string", Value: "false"},
},
},
{
Name: "torrentUrl",
Vars: []domain.LineMatchVarElem{
{Type: "string", Value: "https://"},
{Type: "var", Value: "$baseUrl"},
{Type: "string", Value: "rss/download/"},
{Type: "var", Value: "$torrentId"},
{Type: "string", Value: "/"},
{Type: "var", Value: "rsskey"},
{Type: "string", Value: "/"},
{Type: "varenc", Value: "torrentName"},
{Type: "string", Value: ".torrent"},
},
},
},
Extract: nil,
LineMatchIf: nil,
VarReplace: nil,
SetRegex: &domain.SetRegex{
SrcVar: "$freeleech",
Regex: regexp.MustCompile("freeleech"),
VarName: "freeleech",
NewValue: "true",
},
ExtractOne: domain.ExtractOne{Extract: nil},
ExtractTags: domain.ExtractTags{
Name: "",
SrcVar: "",
Split: "",
Regex: nil,
SetVarIf: nil,
},
},
Ignore: []domain.TrackerIgnore{},
},
},
}
tracker05 = domain.TrackerInstance{
Name: "T05",
Enabled: true,
Settings: nil,
Auth: map[string]string{"authkey": "000aaa111bbb222ccc333ddd", "torrent_pass": "eee444fff555ggg666hhh777"},
//IRC: nil,
Info: &domain.TrackerInfo{
Type: "t05",
ShortName: "T05",
LongName: "Tracker05",
SiteName: "tracker05.test",
IRC: domain.TrackerIRCServer{
Network: "Tracker05.test",
ServerNames: []string{"irc.tracker05.test"},
ChannelNames: []string{"#t05-announce"},
AnnouncerNames: []string{"Drone"},
},
ParseInfo: domain.ParseInfo{
LinePatterns: []domain.TrackerExtractPattern{
{
PatternType: "linepattern",
Optional: false,
Regex: regexp.MustCompile("^(.*)\\s+-\\s+https?:.*[&amp;\\?]id=.*https?\\:\\/\\/([^\\/]+\\/).*[&amp;\\?]id=(\\d+)\\s*-\\s*(.*)"),
Vars: []string{"torrentName", "$baseUrl", "$torrentId", "tags"},
},
},
MultiLinePatterns: nil,
LineMatched: domain.LineMatched{
Vars: []domain.LineMatchVars{
{
Name: "scene",
Vars: []domain.LineMatchVarElem{
{Type: "string", Value: "false"},
},
},
{
Name: "log",
Vars: []domain.LineMatchVarElem{
{Type: "string", Value: "false"},
},
},
{
Name: "cue",
Vars: []domain.LineMatchVarElem{
{Type: "string", Value: "false"},
},
},
{
Name: "freeleech",
Vars: []domain.LineMatchVarElem{
{Type: "string", Value: "false"},
},
},
{
Name: "torrentUrl",
Vars: []domain.LineMatchVarElem{
{Type: "string", Value: "https://"},
{Type: "var", Value: "$baseUrl"},
{Type: "string", Value: "torrents.php?action=download&id="},
{Type: "var", Value: "$torrentId"},
{Type: "string", Value: "&authkey="},
{Type: "var", Value: "authkey"},
{Type: "string", Value: "&torrent_pass="},
{Type: "var", Value: "torrent_pass"},
},
},
},
Extract: []domain.Extract{
{SrcVar: "torrentName", Optional: true, Regex: regexp.MustCompile("[(\\[]((?:19|20)\\d\\d)[)\\]]"), Vars: []string{"year"}},
{SrcVar: "$releaseTags", Optional: true, Regex: regexp.MustCompile("([\\d.]+)%"), Vars: []string{"logScore"}},
},
LineMatchIf: nil,
VarReplace: []domain.ParseVarReplace{
{Name: "tags", SrcVar: "tags", Regex: regexp.MustCompile("[._]"), Replace: " "},
},
SetRegex: nil,
ExtractOne: domain.ExtractOne{Extract: []domain.Extract{
{SrcVar: "torrentName", Optional: false, Regex: regexp.MustCompile("^(.+?) - ([^\\[]+).*\\[(\\d{4})\\] \\[([^\\[]+)\\] - ([^\\-\\[\\]]+)"), Vars: []string{"name1", "name2", "year", "releaseType", "$releaseTags"}},
{SrcVar: "torrentName", Optional: false, Regex: regexp.MustCompile("^([^\\-]+)\\s+-\\s+(.+)"), Vars: []string{"name1", "name2"}},
{SrcVar: "torrentName", Optional: false, Regex: regexp.MustCompile("(.*)"), Vars: []string{"name1"}},
}},
ExtractTags: domain.ExtractTags{
Name: "",
SrcVar: "$releaseTags",
Split: "/",
Regex: []*regexp.Regexp{regexp.MustCompile("^(?:5\\.1 Audio|\\.m4a|Various.*|~.*|&gt;.*)$")},
SetVarIf: []domain.SetVarIf{
{VarName: "format", Value: "", NewValue: "", Regex: regexp.MustCompile("^(?:MP3|FLAC|Ogg Vorbis|AAC|AC3|DTS)$")},
{VarName: "bitrate", Value: "", NewValue: "", Regex: regexp.MustCompile("Lossless$")},
{VarName: "bitrate", Value: "", NewValue: "", Regex: regexp.MustCompile("^(?:vbr|aps|apx|v\\d|\\d{2,4}|\\d+\\.\\d+|q\\d+\\.[\\dx]+|Other)?(?:\\s*kbps|\\s*kbits?|\\s*k)?(?:\\s*\\(?(?:vbr|cbr)\\)?)?$")},
{VarName: "media", Value: "", NewValue: "", Regex: regexp.MustCompile("^(?:CD|DVD|Vinyl|Soundboard|SACD|DAT|Cassette|WEB|Blu-ray|Other)$")},
{VarName: "scene", Value: "Scene", NewValue: "true", Regex: nil},
{VarName: "log", Value: "Log", NewValue: "true", Regex: nil},
{VarName: "cue", Value: "Cue", NewValue: "true", Regex: nil},
{VarName: "freeleech", Value: "Freeleech!", NewValue: "true", Regex: nil},
},
},
},
Ignore: []domain.TrackerIgnore{},
},
},
}
)
*/
//func Test_service_parse(t *testing.T) {
// type fields struct {
// name string
// trackerSvc tracker.Service
// queues map[string]chan string
// }
// type args struct {
// ti *domain.TrackerInstance
// message string
// }
//
// tests := []struct {
// name string
// fields fields
// args args
// want *domain.Announce
// wantErr bool
// }{
// {
// name: "tracker01_no_freeleech",
// fields: fields{
// name: "T01",
// trackerSvc: nil,
// queues: make(map[string]chan string),
// }, args: args{
// ti: &tracker01,
// message: "New Torrent Announcement: <PC :: Iso> Name:'debian live 10 6 0 amd64 standard iso' uploaded by 'Anonymous' - http://www.tracker01.test/torrent/263302",
// },
// want: &domain.Announce{
// Freeleech: false,
// Category: "PC :: Iso",
// TorrentName: "debian live 10 6 0 amd64 standard iso",
// Uploader: "Anonymous",
// TorrentUrl: "https://www.tracker01.test/rss/download/263302/000aaa111bbb222ccc333ddd/debian+live+10+6+0+amd64+standard+iso.torrent",
// Site: "T01",
// },
// wantErr: false,
// },
// {
// name: "tracker01_freeleech",
// fields: fields{
// name: "T01",
// trackerSvc: nil,
// queues: make(map[string]chan string),
// }, args: args{
// ti: &tracker01,
// message: "New Torrent Announcement: <PC :: Iso> Name:'debian live 10 6 0 amd64 standard iso' uploaded by 'Anonymous' freeleech - http://www.tracker01.test/torrent/263302",
// },
// want: &domain.Announce{
// Freeleech: true,
// Category: "PC :: Iso",
// TorrentName: "debian live 10 6 0 amd64 standard iso",
// Uploader: "Anonymous",
// TorrentUrl: "https://www.tracker01.test/rss/download/263302/000aaa111bbb222ccc333ddd/debian+live+10+6+0+amd64+standard+iso.torrent",
// Site: "T01",
// },
// wantErr: false,
// },
// {
// name: "tracker05_01",
// fields: fields{
// name: "T05",
// trackerSvc: nil,
// queues: make(map[string]chan string),
// }, args: args{
// ti: &tracker05,
// message: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD - http://passtheheadphones.me/torrents.php?id=97614 / http://tracker05.test/torrents.php?action=download&id=1382972 - blues, rock, classic.rock,jazz,blues.rock,electric.blues",
// },
// want: &domain.Announce{
// Name1: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name2: "Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// Freeleech: false,
// TorrentName: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// TorrentUrl: "https://tracker05.test/torrents.php?action=download&id=1382972&authkey=000aaa111bbb222ccc333ddd&torrent_pass=eee444fff555ggg666hhh777",
// Site: "T05",
// Tags: "blues, rock, classic rock,jazz,blues rock,electric blues",
// Log: "true",
// Cue: true,
// Format: "FLAC",
// Bitrate: "Lossless",
// Media: "CD",
// Scene: false,
// Year: 1977,
// },
// wantErr: false,
// },
// {
// name: "tracker05_02",
// fields: fields{
// name: "T05",
// trackerSvc: nil,
// queues: make(map[string]chan string),
// }, args: args{
// ti: &tracker05,
// message: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD - http://tracker05.test/torrents.php?id=72158898 / http://tracker05.test/torrents.php?action=download&id=29910415 - 1990s, folk, world_music, celtic",
// },
// want: &domain.Announce{
// ReleaseType: "Album",
// Name1: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name2: "Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// Freeleech: false,
// TorrentName: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// TorrentUrl: "https://tracker05.test/torrents.php?action=download&id=29910415&authkey=000aaa111bbb222ccc333ddd&torrent_pass=eee444fff555ggg666hhh777",
// Site: "T05",
// Tags: "1990s, folk, world music, celtic",
// Log: "true",
// Cue: true,
// Format: "FLAC",
// Bitrate: "Lossless",
// Media: "CD",
// Scene: false,
// Year: 1998,
// },
// wantErr: false,
// },
// }
// for _, tt := range tests {
// t.Run(tt.name, func(t *testing.T) {
// s := &service{
// name: tt.fields.name,
// trackerSvc: tt.fields.trackerSvc,
// queues: tt.fields.queues,
// }
// got, err := s.parse(tt.args.ti, tt.args.message)
//
// if (err != nil) != tt.wantErr {
// t.Errorf("parse() error = %v, wantErr %v", err, tt.wantErr)
// return
// }
// assert.Equal(t, tt.want, got)
// })
// }
//}
//func Test_service_parseSingleLine(t *testing.T) {
// type fields struct {
// name string
// ts tracker.Service
// queues map[string]chan string
// }
// type args struct {
// ti *domain.TrackerInstance
// line string
// }
//
// tests := []struct {
// name string
// fields fields
// args args
// want *domain.Announce
// wantErr bool
// }{
// {
// name: "tracker01_no_freeleech",
// fields: fields{
// name: "T01",
// ts: nil,
// queues: make(map[string]chan string),
// }, args: args{
// ti: &tracker01,
// line: "New Torrent Announcement: <PC :: Iso> Name:'debian live 10 6 0 amd64 standard iso' uploaded by 'Anonymous' - http://www.tracker01.test/torrent/263302",
// },
// want: &domain.Announce{
// Freeleech: false,
// Category: "PC :: Iso",
// TorrentName: "debian live 10 6 0 amd64 standard iso",
// Uploader: "Anonymous",
// TorrentUrl: "https://www.tracker01.test/rss/download/263302/000aaa111bbb222ccc333ddd/debian+live+10+6+0+amd64+standard+iso.torrent",
// Site: "T01",
// },
// wantErr: false,
// },
// {
// name: "tracker01_freeleech",
// fields: fields{
// name: "T01",
// ts: nil,
// queues: make(map[string]chan string),
// }, args: args{
// ti: &tracker01,
// line: "New Torrent Announcement: <PC :: Iso> Name:'debian live 10 6 0 amd64 standard iso' uploaded by 'Anonymous' freeleech - http://www.tracker01.test/torrent/263302",
// },
// want: &domain.Announce{
// Freeleech: true,
// Category: "PC :: Iso",
// TorrentName: "debian live 10 6 0 amd64 standard iso",
// Uploader: "Anonymous",
// TorrentUrl: "https://www.tracker01.test/rss/download/263302/000aaa111bbb222ccc333ddd/debian+live+10+6+0+amd64+standard+iso.torrent",
// Site: "T01",
// },
// wantErr: false,
// },
// {
// name: "tracker05_01",
// fields: fields{
// name: "T05",
// ts: nil,
// queues: make(map[string]chan string),
// }, args: args{
// ti: &tracker05,
// line: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD - http://passtheheadphones.me/torrents.php?id=97614 / http://tracker05.test/torrents.php?action=download&id=1382972 - blues, rock, classic.rock,jazz,blues.rock,electric.blues",
// },
// want: &domain.Announce{
// Name1: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name2: "Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// Freeleech: false,
// TorrentName: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// TorrentUrl: "https://tracker05.test/torrents.php?action=download&id=1382972&authkey=000aaa111bbb222ccc333ddd&torrent_pass=eee444fff555ggg666hhh777",
// Site: "T05",
// Tags: "blues, rock, classic rock,jazz,blues rock,electric blues",
// //Log: "true",
// //Cue: true,
// //Format: "FLAC",
// //Bitrate: "Lossless",
// //Media: "CD",
// Log: "false",
// Cue: false,
// Format: "",
// Bitrate: "",
// Media: "",
// Scene: false,
// Year: 1977,
// },
// wantErr: false,
// },
// {
// name: "tracker05_02",
// fields: fields{
// name: "T05",
// ts: nil,
// queues: make(map[string]chan string),
// }, args: args{
// ti: &tracker05,
// line: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD - http://tracker05.test/torrents.php?id=72158898 / http://tracker05.test/torrents.php?action=download&id=29910415 - 1990s, folk, world_music, celtic",
// },
// want: &domain.Announce{
// ReleaseType: "Album",
// Name1: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name2: "Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// Freeleech: false,
// TorrentName: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// TorrentUrl: "https://tracker05.test/torrents.php?action=download&id=29910415&authkey=000aaa111bbb222ccc333ddd&torrent_pass=eee444fff555ggg666hhh777",
// Site: "T05",
// Tags: "1990s, folk, world music, celtic",
// Log: "true",
// Cue: true,
// Format: "FLAC",
// Bitrate: "Lossless",
// Media: "CD",
// Scene: false,
// Year: 1998,
// },
// wantErr: false,
// },
// }
// for _, tt := range tests {
// t.Run(tt.name, func(t *testing.T) {
// s := &service{
// name: tt.fields.name,
// trackerSvc: tt.fields.ts,
// queues: tt.fields.queues,
// }
//
// announce := domain.Announce{
// Site: tt.fields.name,
// //Line: msg,
// }
// got, err := s.parseSingleLine(tt.args.ti, tt.args.line, &announce)
// if (err != nil) != tt.wantErr {
// t.Errorf("parseSingleLine() error = %v, wantErr %v", err, tt.wantErr)
// return
// }
//
// assert.Equal(t, tt.want, got)
// })
// }
//}
func Test_service_extractReleaseInfo(t *testing.T) {
type fields struct {
name string
queues map[string]chan string
}
type args struct {
varMap map[string]string
releaseName string
}
tests := []struct {
name string
fields fields
args args
wantErr bool
}{
{
name: "test_01",
fields: fields{
name: "", queues: nil,
},
args: args{
varMap: map[string]string{},
releaseName: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
},
wantErr: false,
},
{
name: "test_02",
fields: fields{
name: "", queues: nil,
},
args: args{
varMap: map[string]string{},
releaseName: "Lost S06E07 720p WEB-DL DD 5.1 H.264 - LP",
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := &service{
queues: tt.fields.queues,
}
if err := s.extractReleaseInfo(tt.args.varMap, tt.args.releaseName); (err != nil) != tt.wantErr {
t.Errorf("extractReleaseInfo() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}

View file

@ -0,0 +1,91 @@
package announce
import (
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/internal/filter"
"github.com/autobrr/autobrr/internal/indexer"
"github.com/autobrr/autobrr/internal/release"
"github.com/rs/zerolog/log"
)
type Service interface {
Parse(announceID string, msg string) error
}
type service struct {
filterSvc filter.Service
indexerSvc indexer.Service
releaseSvc release.Service
queues map[string]chan string
}
func NewService(filterService filter.Service, indexerSvc indexer.Service, releaseService release.Service) Service {
//queues := make(map[string]chan string)
//for _, channel := range tinfo {
//
//}
return &service{
filterSvc: filterService,
indexerSvc: indexerSvc,
releaseSvc: releaseService,
}
}
// Parse announce line
func (s *service) Parse(announceID string, msg string) error {
// announceID (server:channel:announcer)
def := s.indexerSvc.GetIndexerByAnnounce(announceID)
if def == nil {
log.Debug().Msgf("could not find indexer definition: %v", announceID)
return nil
}
announce := domain.Announce{
Site: def.Identifier,
Line: msg,
}
// parse lines
if def.Parse.Type == "single" {
err := s.parseLineSingle(def, &announce, msg)
if err != nil {
log.Debug().Msgf("could not parse single line: %v", msg)
log.Error().Err(err).Msgf("could not parse single line: %v", msg)
return err
}
}
// implement multiline parsing
// find filter
foundFilter, err := s.filterSvc.FindByIndexerIdentifier(announce)
if err != nil {
log.Error().Err(err).Msg("could not find filter")
return err
}
// no filter found, lets return
if foundFilter == nil {
log.Debug().Msg("no matching filter found")
return nil
}
announce.Filter = foundFilter
log.Trace().Msgf("announce: %+v", announce)
log.Info().Msgf("Matched %v (%v) for %v", announce.TorrentName, announce.Filter.Name, announce.Site)
// match release
// process release
go func() {
err = s.releaseSvc.Process(announce)
if err != nil {
log.Error().Err(err).Msgf("could not process release: %+v", announce)
}
}()
return nil
}

81
internal/client/http.go Normal file
View file

@ -0,0 +1,81 @@
package client
import (
"crypto/md5"
"encoding/hex"
"fmt"
"io"
"net/http"
"os"
"time"
"github.com/rs/zerolog/log"
)
type DownloadFileResponse struct {
Body *io.ReadCloser
FileName string
}
type HttpClient struct {
http *http.Client
}
func NewHttpClient() *HttpClient {
httpClient := &http.Client{
Timeout: time.Second * 10,
}
return &HttpClient{
http: httpClient,
}
}
func (c *HttpClient) DownloadFile(url string, opts map[string]string) (*DownloadFileResponse, error) {
if url == "" {
return nil, nil
}
// create md5 hash of url for tmp file
hash := md5.Sum([]byte(url))
hashString := hex.EncodeToString(hash[:])
tmpFileName := fmt.Sprintf("/tmp/%v", hashString)
log.Debug().Msgf("tmpFileName: %v", tmpFileName)
// Create the file
out, err := os.Create(tmpFileName)
if err != nil {
return nil, err
}
defer out.Close()
// Get the data
resp, err := http.Get(url)
if err != nil {
// TODO better error message
return nil, err
}
defer resp.Body.Close()
// retry logic
if resp.StatusCode != 200 {
return nil, err
}
// Write the body to file
_, err = io.Copy(out, resp.Body)
if err != nil {
return nil, err
}
// remove file if fail
res := DownloadFileResponse{
Body: &resp.Body,
FileName: tmpFileName,
}
return &res, nil
}

154
internal/config/config.go Normal file
View file

@ -0,0 +1,154 @@
package config
import (
"errors"
"log"
"os"
"path"
"path/filepath"
"github.com/spf13/viper"
)
type Cfg struct {
Host string `toml:"host"`
Port int `toml:"port"`
LogLevel string `toml:"logLevel"`
LogPath string `toml:"logPath"`
BaseURL string `toml:"baseUrl"`
}
var Config Cfg
func Defaults() Cfg {
hostname, err := os.Hostname()
if err != nil {
hostname = "localhost"
}
return Cfg{
Host: hostname,
Port: 8989,
LogLevel: "DEBUG",
LogPath: "",
BaseURL: "/",
}
}
func writeConfig(configPath string, configFile string) error {
path := filepath.Join(configPath, configFile)
// check if configPath exists, if not create it
if _, err := os.Stat(configPath); errors.Is(err, os.ErrNotExist) {
err := os.MkdirAll(configPath, os.ModePerm)
if err != nil {
log.Println(err)
return err
}
}
// check if config exists, if not create it
if _, err := os.Stat(path); errors.Is(err, os.ErrNotExist) {
f, err := os.Create(path)
if err != nil { // perm 0666
// handle failed create
log.Printf("error creating file: %q", err)
return err
}
defer f.Close()
_, err = f.WriteString(`# config.toml
# Hostname / IP
#
# Default: "localhost"
#
host = "127.0.0.1"
# Port
#
# Default: 8989
#
port = 8989
# Base url
# Set custom baseUrl eg /autobrr/ to serve in subdirectory.
# Not needed for subdomain, or by accessing with the :port directly.
#
# Optional
#
#baseUrl = "/autobrr/"
# autobrr logs file
# If not defined, logs to stdout
#
# Optional
#
#logPath = "log/autobrr.log"
# Log level
#
# Default: "DEBUG"
#
# Options: "ERROR", "DEBUG", "INFO", "WARN"
#
logLevel = "DEBUG"`)
if err != nil {
log.Printf("error writing contents to file: %v %q", configPath, err)
return err
}
return f.Sync()
}
return nil
}
func Read(configPath string) Cfg {
config := Defaults()
// or use viper.SetDefault(val, def)
//viper.SetDefault("host", config.Host)
//viper.SetDefault("port", config.Port)
//viper.SetDefault("logLevel", config.LogLevel)
//viper.SetDefault("logPath", config.LogPath)
viper.SetConfigType("toml")
// clean trailing slash from configPath
configPath = path.Clean(configPath)
if configPath != "" {
//viper.SetConfigName("config")
// check if path and file exists
// if not, create path and file
err := writeConfig(configPath, "config.toml")
if err != nil {
log.Printf("write error: %q", err)
}
viper.SetConfigFile(path.Join(configPath, "config.toml"))
} else {
viper.SetConfigName("config")
// Search config in directories
viper.AddConfigPath(".")
viper.AddConfigPath("$HOME/.config/autobrr")
viper.AddConfigPath("$HOME/.autobrr")
}
// read config
if err := viper.ReadInConfig(); err != nil {
log.Printf("config read error: %q", err)
}
if err := viper.Unmarshal(&config); err != nil {
log.Fatalf("Could not unmarshal config file: %v", viper.ConfigFileUsed())
}
Config = config
return config
}

197
internal/database/action.go Normal file
View file

@ -0,0 +1,197 @@
package database
import (
"database/sql"
"github.com/autobrr/autobrr/internal/domain"
"github.com/rs/zerolog/log"
)
type ActionRepo struct {
db *sql.DB
}
func NewActionRepo(db *sql.DB) domain.ActionRepo {
return &ActionRepo{db: db}
}
func (r *ActionRepo) FindByFilterID(filterID int) ([]domain.Action, error) {
rows, err := r.db.Query("SELECT id, name, type, enabled, exec_cmd, exec_args, watch_folder, category, tags, label, save_path, paused, ignore_rules, limit_download_speed, limit_upload_speed, client_id FROM action WHERE action.filter_id = ?", filterID)
if err != nil {
log.Fatal().Err(err)
}
defer rows.Close()
var actions []domain.Action
for rows.Next() {
var a domain.Action
var execCmd, execArgs, watchFolder, category, tags, label, savePath sql.NullString
var limitUl, limitDl sql.NullInt64
var clientID sql.NullInt32
// filterID
var paused, ignoreRules sql.NullBool
if err := rows.Scan(&a.ID, &a.Name, &a.Type, &a.Enabled, &execCmd, &execArgs, &watchFolder, &category, &tags, &label, &savePath, &paused, &ignoreRules, &limitDl, &limitUl, &clientID); err != nil {
log.Fatal().Err(err)
}
if err != nil {
return nil, err
}
a.ExecCmd = execCmd.String
a.ExecArgs = execArgs.String
a.WatchFolder = watchFolder.String
a.Category = category.String
a.Tags = tags.String
a.Label = label.String
a.SavePath = savePath.String
a.Paused = paused.Bool
a.IgnoreRules = ignoreRules.Bool
a.LimitUploadSpeed = limitUl.Int64
a.LimitDownloadSpeed = limitDl.Int64
a.ClientID = clientID.Int32
actions = append(actions, a)
}
if err := rows.Err(); err != nil {
return nil, err
}
return actions, nil
}
func (r *ActionRepo) List() ([]domain.Action, error) {
rows, err := r.db.Query("SELECT id, name, type, enabled, exec_cmd, exec_args, watch_folder, category, tags, label, save_path, paused, ignore_rules, limit_download_speed, limit_upload_speed, client_id FROM action")
if err != nil {
log.Fatal().Err(err)
}
defer rows.Close()
var actions []domain.Action
for rows.Next() {
var a domain.Action
var execCmd, execArgs, watchFolder, category, tags, label, savePath sql.NullString
var limitUl, limitDl sql.NullInt64
var clientID sql.NullInt32
var paused, ignoreRules sql.NullBool
if err := rows.Scan(&a.ID, &a.Name, &a.Type, &a.Enabled, &execCmd, &execArgs, &watchFolder, &category, &tags, &label, &savePath, &paused, &ignoreRules, &limitDl, &limitUl, &clientID); err != nil {
log.Fatal().Err(err)
}
if err != nil {
return nil, err
}
a.Category = category.String
a.Tags = tags.String
a.Label = label.String
a.SavePath = savePath.String
a.Paused = paused.Bool
a.IgnoreRules = ignoreRules.Bool
a.LimitUploadSpeed = limitUl.Int64
a.LimitDownloadSpeed = limitDl.Int64
a.ClientID = clientID.Int32
actions = append(actions, a)
}
if err := rows.Err(); err != nil {
return nil, err
}
return actions, nil
}
func (r *ActionRepo) Delete(actionID int) error {
res, err := r.db.Exec(`DELETE FROM action WHERE action.id = ?`, actionID)
if err != nil {
return err
}
rows, _ := res.RowsAffected()
log.Info().Msgf("rows affected %v", rows)
return nil
}
func (r *ActionRepo) Store(action domain.Action) (*domain.Action, error) {
execCmd := toNullString(action.ExecCmd)
execArgs := toNullString(action.ExecArgs)
watchFolder := toNullString(action.WatchFolder)
category := toNullString(action.Category)
tags := toNullString(action.Tags)
label := toNullString(action.Label)
savePath := toNullString(action.SavePath)
limitDL := toNullInt64(action.LimitDownloadSpeed)
limitUL := toNullInt64(action.LimitUploadSpeed)
clientID := toNullInt32(action.ClientID)
filterID := toNullInt32(int32(action.FilterID))
var err error
if action.ID != 0 {
log.Info().Msg("UPDATE existing record")
_, err = r.db.Exec(`UPDATE action SET name = ?, type = ?, enabled = ?, exec_cmd = ?, exec_args = ?, watch_folder = ? , category =? , tags = ?, label = ?, save_path = ?, paused = ?, ignore_rules = ?, limit_upload_speed = ?, limit_download_speed = ?, client_id = ?
WHERE id = ?`, action.Name, action.Type, action.Enabled, execCmd, execArgs, watchFolder, category, tags, label, savePath, action.Paused, action.IgnoreRules, limitUL, limitDL, clientID, action.ID)
} else {
var res sql.Result
res, err = r.db.Exec(`INSERT INTO action(name, type, enabled, exec_cmd, exec_args, watch_folder, category, tags, label, save_path, paused, ignore_rules, limit_upload_speed, limit_download_speed, client_id, filter_id)
VALUES (?, ?, ?, ?, ?,? ,?, ?,?,?,?,?,?,?,?,?) ON CONFLICT DO NOTHING`, action.Name, action.Type, action.Enabled, execCmd, execArgs, watchFolder, category, tags, label, savePath, action.Paused, action.IgnoreRules, limitUL, limitDL, clientID, filterID)
if err != nil {
log.Error().Err(err)
return nil, err
}
resId, _ := res.LastInsertId()
log.Info().Msgf("LAST INSERT ID %v", resId)
action.ID = int(resId)
}
return &action, nil
}
func (r *ActionRepo) ToggleEnabled(actionID int) error {
var err error
var res sql.Result
res, err = r.db.Exec(`UPDATE action SET enabled = NOT enabled WHERE id = ?`, actionID)
if err != nil {
log.Error().Err(err)
return err
}
resId, _ := res.LastInsertId()
log.Info().Msgf("LAST INSERT ID %v", resId)
return nil
}
func toNullString(s string) sql.NullString {
return sql.NullString{
String: s,
Valid: s != "",
}
}
func toNullInt32(s int32) sql.NullInt32 {
return sql.NullInt32{
Int32: s,
Valid: s != 0,
}
}
func toNullInt64(s int64) sql.NullInt64 {
return sql.NullInt64{
Int64: s,
Valid: s != 0,
}
}

View file

@ -0,0 +1,19 @@
package database
import (
"database/sql"
"github.com/autobrr/autobrr/internal/domain"
)
type AnnounceRepo struct {
db *sql.DB
}
func NewAnnounceRepo(db *sql.DB) domain.AnnounceRepo {
return &AnnounceRepo{db: db}
}
func (a *AnnounceRepo) Store(announce domain.Announce) error {
return nil
}

View file

@ -0,0 +1,134 @@
package database
import (
"database/sql"
"github.com/autobrr/autobrr/internal/domain"
"github.com/rs/zerolog/log"
)
type DownloadClientRepo struct {
db *sql.DB
}
func NewDownloadClientRepo(db *sql.DB) domain.DownloadClientRepo {
return &DownloadClientRepo{db: db}
}
func (r *DownloadClientRepo) List() ([]domain.DownloadClient, error) {
rows, err := r.db.Query("SELECT id, name, type, enabled, host, port, ssl, username, password FROM client")
if err != nil {
log.Fatal().Err(err)
}
defer rows.Close()
clients := make([]domain.DownloadClient, 0)
for rows.Next() {
var f domain.DownloadClient
if err := rows.Scan(&f.ID, &f.Name, &f.Type, &f.Enabled, &f.Host, &f.Port, &f.SSL, &f.Username, &f.Password); err != nil {
log.Error().Err(err)
}
if err != nil {
return nil, err
}
clients = append(clients, f)
}
if err := rows.Err(); err != nil {
return nil, err
}
return clients, nil
}
func (r *DownloadClientRepo) FindByID(id int32) (*domain.DownloadClient, error) {
query := `
SELECT id, name, type, enabled, host, port, ssl, username, password FROM client WHERE id = ?
`
row := r.db.QueryRow(query, id)
if err := row.Err(); err != nil {
return nil, err
}
var client domain.DownloadClient
if err := row.Scan(&client.ID, &client.Name, &client.Type, &client.Enabled, &client.Host, &client.Port, &client.SSL, &client.Username, &client.Password); err != nil {
log.Error().Err(err).Msg("could not scan download client to struct")
return nil, err
}
return &client, nil
}
func (r *DownloadClientRepo) FindByActionID(actionID int) ([]domain.DownloadClient, error) {
rows, err := r.db.Query("SELECT id, name, type, enabled, host, port, ssl, username, password FROM client, action_client WHERE client.id = action_client.client_id AND action_client.action_id = ?", actionID)
if err != nil {
log.Fatal().Err(err)
}
defer rows.Close()
var clients []domain.DownloadClient
for rows.Next() {
var f domain.DownloadClient
if err := rows.Scan(&f.ID, &f.Name, &f.Type, &f.Enabled, &f.Host, &f.Port, &f.SSL, &f.Username, &f.Password); err != nil {
log.Error().Err(err)
}
if err != nil {
return nil, err
}
clients = append(clients, f)
}
if err := rows.Err(); err != nil {
return nil, err
}
return clients, nil
}
func (r *DownloadClientRepo) Store(client domain.DownloadClient) (*domain.DownloadClient, error) {
var err error
if client.ID != 0 {
log.Info().Msg("UPDATE existing record")
_, err = r.db.Exec(`UPDATE client SET name = ?, type = ?, enabled = ?, host = ?, port = ?, ssl = ?, username = ?, password = ? WHERE id = ?`, client.Name, client.Type, client.Enabled, client.Host, client.Port, client.SSL, client.Username, client.Password, client.ID)
} else {
var res sql.Result
res, err = r.db.Exec(`INSERT INTO client(name, type, enabled, host, port, ssl, username, password)
VALUES (?, ?, ?, ?, ?, ? , ?, ?) ON CONFLICT DO NOTHING`, client.Name, client.Type, client.Enabled, client.Host, client.Port, client.SSL, client.Username, client.Password)
if err != nil {
log.Error().Err(err)
return nil, err
}
resId, _ := res.LastInsertId()
log.Info().Msgf("LAST INSERT ID %v", resId)
client.ID = int(resId)
}
return &client, nil
}
func (r *DownloadClientRepo) Delete(clientID int) error {
res, err := r.db.Exec(`DELETE FROM client WHERE client.id = ?`, clientID)
if err != nil {
return err
}
rows, _ := res.RowsAffected()
log.Info().Msgf("rows affected %v", rows)
return nil
}

441
internal/database/filter.go Normal file
View file

@ -0,0 +1,441 @@
package database
import (
"database/sql"
"strings"
"time"
"github.com/lib/pq"
"github.com/rs/zerolog/log"
"github.com/autobrr/autobrr/internal/domain"
)
type FilterRepo struct {
db *sql.DB
}
func NewFilterRepo(db *sql.DB) domain.FilterRepo {
return &FilterRepo{db: db}
}
func (r *FilterRepo) ListFilters() ([]domain.Filter, error) {
rows, err := r.db.Query("SELECT id, enabled, name, match_releases, except_releases, created_at, updated_at FROM filter")
if err != nil {
log.Fatal().Err(err)
}
defer rows.Close()
var filters []domain.Filter
for rows.Next() {
var f domain.Filter
var matchReleases, exceptReleases sql.NullString
var createdAt, updatedAt string
if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, &matchReleases, &exceptReleases, &createdAt, &updatedAt); err != nil {
log.Error().Stack().Err(err).Msg("filters_list: error scanning data to struct")
}
if err != nil {
return nil, err
}
f.MatchReleases = matchReleases.String
f.ExceptReleases = exceptReleases.String
ua, _ := time.Parse(time.RFC3339, updatedAt)
ca, _ := time.Parse(time.RFC3339, createdAt)
f.UpdatedAt = ua
f.CreatedAt = ca
filters = append(filters, f)
}
if err := rows.Err(); err != nil {
return nil, err
}
return filters, nil
}
func (r *FilterRepo) FindByID(filterID int) (*domain.Filter, error) {
row := r.db.QueryRow("SELECT id, enabled, name, min_size, max_size, delay, match_releases, except_releases, use_regex, match_release_groups, except_release_groups, scene, freeleech, freeleech_percent, shows, seasons, episodes, resolutions, codecs, sources, containers, years, match_categories, except_categories, match_uploaders, except_uploaders, tags, except_tags, created_at, updated_at FROM filter WHERE id = ?", filterID)
var f domain.Filter
if err := row.Err(); err != nil {
return nil, err
}
var minSize, maxSize, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, freeleechPercent, shows, seasons, episodes, years, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags sql.NullString
var useRegex, scene, freeleech sql.NullBool
var delay sql.NullInt32
var createdAt, updatedAt string
if err := row.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), &years, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, &createdAt, &updatedAt); err != nil {
log.Error().Stack().Err(err).Msgf("filter: %v : error scanning data to struct", filterID)
return nil, err
}
f.MinSize = minSize.String
f.MaxSize = maxSize.String
f.Delay = int(delay.Int32)
f.MatchReleases = matchReleases.String
f.ExceptReleases = exceptReleases.String
f.MatchReleaseGroups = matchReleaseGroups.String
f.ExceptReleaseGroups = exceptReleaseGroups.String
f.FreeleechPercent = freeleechPercent.String
f.Shows = shows.String
f.Seasons = seasons.String
f.Episodes = minSize.String
f.Years = years.String
f.MatchCategories = matchCategories.String
f.ExceptCategories = exceptCategories.String
f.MatchUploaders = matchUploaders.String
f.ExceptUploaders = exceptUploaders.String
f.Tags = tags.String
f.ExceptTags = exceptTags.String
f.UseRegex = useRegex.Bool
f.Scene = scene.Bool
f.Freeleech = freeleech.Bool
updatedTime, _ := time.Parse(time.RFC3339, updatedAt)
createdTime, _ := time.Parse(time.RFC3339, createdAt)
f.UpdatedAt = updatedTime
f.CreatedAt = createdTime
return &f, nil
}
// TODO remove
func (r *FilterRepo) FindFiltersForSite(site string) ([]domain.Filter, error) {
rows, err := r.db.Query("SELECT id, enabled, name, match_releases, except_releases, created_at, updated_at FROM filter WHERE match_sites LIKE ?", site)
if err != nil {
log.Fatal().Err(err)
}
defer rows.Close()
var filters []domain.Filter
for rows.Next() {
var f domain.Filter
if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, pq.Array(&f.MatchReleases), pq.Array(&f.ExceptReleases), &f.CreatedAt, &f.UpdatedAt); err != nil {
log.Error().Stack().Err(err).Msg("error scanning data to struct")
}
if err != nil {
return nil, err
}
filters = append(filters, f)
}
if err := rows.Err(); err != nil {
return nil, err
}
return filters, nil
}
func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, error) {
rows, err := r.db.Query(`
SELECT
f.id,
f.enabled,
f.name,
f.min_size,
f.max_size,
f.delay,
f.match_releases,
f.except_releases,
f.use_regex,
f.match_release_groups,
f.except_release_groups,
f.scene,
f.freeleech,
f.freeleech_percent,
f.shows,
f.seasons,
f.episodes,
f.resolutions,
f.codecs,
f.sources,
f.containers,
f.years,
f.match_categories,
f.except_categories,
f.match_uploaders,
f.except_uploaders,
f.tags,
f.except_tags,
f.created_at,
f.updated_at
FROM filter f
JOIN filter_indexer fi on f.id = fi.filter_id
JOIN indexer i on i.id = fi.indexer_id
WHERE i.identifier = ?`, indexer)
if err != nil {
log.Fatal().Err(err)
}
defer rows.Close()
var filters []domain.Filter
for rows.Next() {
var f domain.Filter
var minSize, maxSize, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, freeleechPercent, shows, seasons, episodes, years, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags sql.NullString
var useRegex, scene, freeleech sql.NullBool
var delay sql.NullInt32
var createdAt, updatedAt string
if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), &years, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, &createdAt, &updatedAt); err != nil {
log.Error().Stack().Err(err).Msg("error scanning data to struct")
}
if err != nil {
return nil, err
}
f.MinSize = minSize.String
f.MaxSize = maxSize.String
f.Delay = int(delay.Int32)
f.MatchReleases = matchReleases.String
f.ExceptReleases = exceptReleases.String
f.MatchReleaseGroups = matchReleaseGroups.String
f.ExceptReleaseGroups = exceptReleaseGroups.String
f.FreeleechPercent = freeleechPercent.String
f.Shows = shows.String
f.Seasons = seasons.String
f.Episodes = minSize.String
f.Years = years.String
f.MatchCategories = matchCategories.String
f.ExceptCategories = exceptCategories.String
f.MatchUploaders = matchUploaders.String
f.ExceptUploaders = exceptUploaders.String
f.Tags = tags.String
f.ExceptTags = exceptTags.String
f.UseRegex = useRegex.Bool
f.Scene = scene.Bool
f.Freeleech = freeleech.Bool
updatedTime, _ := time.Parse(time.RFC3339, updatedAt)
createdTime, _ := time.Parse(time.RFC3339, createdAt)
f.UpdatedAt = updatedTime
f.CreatedAt = createdTime
filters = append(filters, f)
}
if err := rows.Err(); err != nil {
return nil, err
}
return filters, nil
}
func (r *FilterRepo) Store(filter domain.Filter) (*domain.Filter, error) {
var err error
if filter.ID != 0 {
log.Debug().Msg("update existing record")
} else {
var res sql.Result
res, err = r.db.Exec(`INSERT INTO filter (
name,
enabled,
min_size,
max_size,
delay,
match_releases,
except_releases,
use_regex,
match_release_groups,
except_release_groups,
scene,
freeleech,
freeleech_percent,
shows,
seasons,
episodes,
resolutions,
codecs,
sources,
containers,
years,
match_categories,
except_categories,
match_uploaders,
except_uploaders,
tags,
except_tags
)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20, $21, $22, $23, $24, $25, $26, $27) ON CONFLICT DO NOTHING`,
filter.Name,
filter.Enabled,
filter.MinSize,
filter.MaxSize,
filter.Delay,
filter.MatchReleases,
filter.ExceptReleases,
filter.UseRegex,
filter.MatchReleaseGroups,
filter.ExceptReleaseGroups,
filter.Scene,
filter.Freeleech,
filter.FreeleechPercent,
filter.Shows,
filter.Seasons,
filter.Episodes,
pq.Array(filter.Resolutions),
pq.Array(filter.Codecs),
pq.Array(filter.Sources),
pq.Array(filter.Containers),
filter.Years,
filter.MatchCategories,
filter.ExceptCategories,
filter.MatchUploaders,
filter.ExceptUploaders,
filter.Tags,
filter.ExceptTags,
)
if err != nil {
log.Error().Stack().Err(err).Msg("error executing query")
return nil, err
}
resId, _ := res.LastInsertId()
filter.ID = int(resId)
}
return &filter, nil
}
func (r *FilterRepo) Update(filter domain.Filter) (*domain.Filter, error) {
//var res sql.Result
var err error
_, err = r.db.Exec(`
UPDATE filter SET
name = ?,
enabled = ?,
min_size = ?,
max_size = ?,
delay = ?,
match_releases = ?,
except_releases = ?,
use_regex = ?,
match_release_groups = ?,
except_release_groups = ?,
scene = ?,
freeleech = ?,
freeleech_percent = ?,
shows = ?,
seasons = ?,
episodes = ?,
resolutions = ?,
codecs = ?,
sources = ?,
containers = ?,
years = ?,
match_categories = ?,
except_categories = ?,
match_uploaders = ?,
except_uploaders = ?,
tags = ?,
except_tags = ?,
updated_at = CURRENT_TIMESTAMP
WHERE id = ?`,
filter.Name,
filter.Enabled,
filter.MinSize,
filter.MaxSize,
filter.Delay,
filter.MatchReleases,
filter.ExceptReleases,
filter.UseRegex,
filter.MatchReleaseGroups,
filter.ExceptReleaseGroups,
filter.Scene,
filter.Freeleech,
filter.FreeleechPercent,
filter.Shows,
filter.Seasons,
filter.Episodes,
pq.Array(filter.Resolutions),
pq.Array(filter.Codecs),
pq.Array(filter.Sources),
pq.Array(filter.Containers),
filter.Years,
filter.MatchCategories,
filter.ExceptCategories,
filter.MatchUploaders,
filter.ExceptUploaders,
filter.Tags,
filter.ExceptTags,
filter.ID,
)
if err != nil {
log.Error().Stack().Err(err).Msg("error executing query")
return nil, err
}
return &filter, nil
}
func (r *FilterRepo) StoreIndexerConnection(filterID int, indexerID int) error {
query := `INSERT INTO filter_indexer (filter_id, indexer_id) VALUES ($1, $2)`
_, err := r.db.Exec(query, filterID, indexerID)
if err != nil {
log.Error().Stack().Err(err).Msg("error executing query")
return err
}
return nil
}
func (r *FilterRepo) DeleteIndexerConnections(filterID int) error {
query := `DELETE FROM filter_indexer WHERE filter_id = ?`
_, err := r.db.Exec(query, filterID)
if err != nil {
log.Error().Stack().Err(err).Msg("error executing query")
return err
}
return nil
}
func (r *FilterRepo) Delete(filterID int) error {
res, err := r.db.Exec(`DELETE FROM filter WHERE id = ?`, filterID)
if err != nil {
log.Error().Stack().Err(err).Msg("error executing query")
return err
}
rows, _ := res.RowsAffected()
log.Info().Msgf("rows affected %v", rows)
return nil
}
// Split string to slice. We store comma separated strings and convert to slice
func stringToSlice(str string) []string {
if str == "" {
return []string{}
} else if !strings.Contains(str, ",") {
return []string{str}
}
split := strings.Split(str, ",")
return split
}

View file

@ -0,0 +1,152 @@
package database
import (
"database/sql"
"encoding/json"
"github.com/autobrr/autobrr/internal/domain"
"github.com/rs/zerolog/log"
)
type IndexerRepo struct {
db *sql.DB
}
func NewIndexerRepo(db *sql.DB) domain.IndexerRepo {
return &IndexerRepo{
db: db,
}
}
func (r *IndexerRepo) Store(indexer domain.Indexer) (*domain.Indexer, error) {
settings, err := json.Marshal(indexer.Settings)
if err != nil {
log.Error().Stack().Err(err).Msg("error marshaling json data")
return nil, err
}
_, err = r.db.Exec(`INSERT INTO indexer (enabled, name, identifier, settings) VALUES (?, ?, ?, ?)`, indexer.Enabled, indexer.Name, indexer.Identifier, settings)
if err != nil {
log.Error().Stack().Err(err).Msg("error executing query")
return nil, err
}
return &indexer, nil
}
func (r *IndexerRepo) Update(indexer domain.Indexer) (*domain.Indexer, error) {
sett, err := json.Marshal(indexer.Settings)
if err != nil {
log.Error().Stack().Err(err).Msg("error marshaling json data")
return nil, err
}
_, err = r.db.Exec(`UPDATE indexer SET enabled = ?, name = ?, settings = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?`, indexer.Enabled, indexer.Name, sett, indexer.ID)
if err != nil {
log.Error().Stack().Err(err).Msg("error executing query")
return nil, err
}
return &indexer, nil
}
func (r *IndexerRepo) List() ([]domain.Indexer, error) {
rows, err := r.db.Query("SELECT id, enabled, name, identifier, settings FROM indexer")
if err != nil {
log.Fatal().Err(err)
}
defer rows.Close()
var indexers []domain.Indexer
for rows.Next() {
var f domain.Indexer
var settings string
var settingsMap map[string]string
if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, &f.Identifier, &settings); err != nil {
log.Error().Stack().Err(err).Msg("indexer.list: error scanning data to struct")
}
if err != nil {
return nil, err
}
err = json.Unmarshal([]byte(settings), &settingsMap)
if err != nil {
log.Error().Stack().Err(err).Msg("indexer.list: error unmarshal settings")
return nil, err
}
f.Settings = settingsMap
indexers = append(indexers, f)
}
if err := rows.Err(); err != nil {
return nil, err
}
return indexers, nil
}
func (r *IndexerRepo) FindByFilterID(id int) ([]domain.Indexer, error) {
rows, err := r.db.Query(`
SELECT i.id, i.enabled, i.name, i.identifier
FROM indexer i
JOIN filter_indexer fi on i.id = fi.indexer_id
WHERE fi.filter_id = ?`, id)
if err != nil {
log.Fatal().Err(err)
}
defer rows.Close()
var indexers []domain.Indexer
for rows.Next() {
var f domain.Indexer
//var settings string
//var settingsMap map[string]string
if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, &f.Identifier); err != nil {
log.Error().Stack().Err(err).Msg("indexer.list: error scanning data to struct")
}
if err != nil {
return nil, err
}
//err = json.Unmarshal([]byte(settings), &settingsMap)
//if err != nil {
// log.Error().Stack().Err(err).Msg("indexer.list: error unmarshal settings")
// return nil, err
//}
//
//f.Settings = settingsMap
indexers = append(indexers, f)
}
if err := rows.Err(); err != nil {
return nil, err
}
return indexers, nil
}
func (r *IndexerRepo) Delete(id int) error {
res, err := r.db.Exec(`DELETE FROM indexer WHERE id = ?`, id)
if err != nil {
log.Error().Stack().Err(err).Msg("error executing query")
return err
}
rows, _ := res.RowsAffected()
log.Info().Msgf("rows affected %v", rows)
return nil
}

277
internal/database/irc.go Normal file
View file

@ -0,0 +1,277 @@
package database
import (
"context"
"database/sql"
"strings"
"github.com/autobrr/autobrr/internal/domain"
"github.com/rs/zerolog/log"
)
type IrcRepo struct {
db *sql.DB
}
func NewIrcRepo(db *sql.DB) domain.IrcRepo {
return &IrcRepo{db: db}
}
func (ir *IrcRepo) Store(announce domain.Announce) error {
return nil
}
func (ir *IrcRepo) GetNetworkByID(id int64) (*domain.IrcNetwork, error) {
row := ir.db.QueryRow("SELECT id, enabled, name, addr, tls, nick, pass, connect_commands, sasl_mechanism, sasl_plain_username, sasl_plain_password FROM irc_network WHERE id = ?", id)
if err := row.Err(); err != nil {
log.Fatal().Err(err)
return nil, err
}
var n domain.IrcNetwork
var pass, connectCommands sql.NullString
var saslMechanism, saslPlainUsername, saslPlainPassword sql.NullString
var tls sql.NullBool
if err := row.Scan(&n.ID, &n.Enabled, &n.Name, &n.Addr, &tls, &n.Nick, &pass, &connectCommands, &saslMechanism, &saslPlainUsername, &saslPlainPassword); err != nil {
log.Fatal().Err(err)
}
n.TLS = tls.Bool
n.Pass = pass.String
if connectCommands.Valid {
n.ConnectCommands = strings.Split(connectCommands.String, "\r\n")
}
n.SASL.Mechanism = saslMechanism.String
n.SASL.Plain.Username = saslPlainUsername.String
n.SASL.Plain.Password = saslPlainPassword.String
return &n, nil
}
func (ir *IrcRepo) DeleteNetwork(ctx context.Context, id int64) error {
tx, err := ir.db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer tx.Rollback()
_, err = tx.ExecContext(ctx, `DELETE FROM irc_network WHERE id = ?`, id)
if err != nil {
log.Error().Stack().Err(err).Msgf("error deleting network: %v", id)
return err
}
_, err = tx.ExecContext(ctx, `DELETE FROM irc_channel WHERE network_id = ?`, id)
if err != nil {
log.Error().Stack().Err(err).Msgf("error deleting channels for network: %v", id)
return err
}
err = tx.Commit()
if err != nil {
log.Error().Stack().Err(err).Msgf("error deleting network: %v", id)
return err
}
return nil
}
func (ir *IrcRepo) ListNetworks(ctx context.Context) ([]domain.IrcNetwork, error) {
rows, err := ir.db.QueryContext(ctx, "SELECT id, enabled, name, addr, tls, nick, pass, connect_commands FROM irc_network")
if err != nil {
log.Fatal().Err(err)
}
defer rows.Close()
var networks []domain.IrcNetwork
for rows.Next() {
var net domain.IrcNetwork
//var username, realname, pass, connectCommands sql.NullString
var pass, connectCommands sql.NullString
var tls sql.NullBool
if err := rows.Scan(&net.ID, &net.Enabled, &net.Name, &net.Addr, &tls, &net.Nick, &pass, &connectCommands); err != nil {
log.Fatal().Err(err)
}
net.TLS = tls.Bool
net.Pass = pass.String
if connectCommands.Valid {
net.ConnectCommands = strings.Split(connectCommands.String, "\r\n")
}
networks = append(networks, net)
}
if err := rows.Err(); err != nil {
return nil, err
}
return networks, nil
}
func (ir *IrcRepo) ListChannels(networkID int64) ([]domain.IrcChannel, error) {
rows, err := ir.db.Query("SELECT id, name, enabled FROM irc_channel WHERE network_id = ?", networkID)
if err != nil {
log.Fatal().Err(err)
}
defer rows.Close()
var channels []domain.IrcChannel
for rows.Next() {
var ch domain.IrcChannel
//if err := rows.Scan(&ch.ID, &ch.Name, &ch.Enabled, &ch.Pass, &ch.InviteCommand, &ch.InviteHTTPURL, &ch.InviteHTTPHeader, &ch.InviteHTTPData); err != nil {
if err := rows.Scan(&ch.ID, &ch.Name, &ch.Enabled); err != nil {
log.Fatal().Err(err)
}
channels = append(channels, ch)
}
if err := rows.Err(); err != nil {
return nil, err
}
return channels, nil
}
func (ir *IrcRepo) StoreNetwork(network *domain.IrcNetwork) error {
netName := toNullString(network.Name)
pass := toNullString(network.Pass)
connectCommands := toNullString(strings.Join(network.ConnectCommands, "\r\n"))
var saslMechanism, saslPlainUsername, saslPlainPassword sql.NullString
if network.SASL.Mechanism != "" {
saslMechanism = toNullString(network.SASL.Mechanism)
switch network.SASL.Mechanism {
case "PLAIN":
saslPlainUsername = toNullString(network.SASL.Plain.Username)
saslPlainPassword = toNullString(network.SASL.Plain.Password)
default:
log.Warn().Msgf("unsupported SASL mechanism: %q", network.SASL.Mechanism)
//return fmt.Errorf("cannot store network: unsupported SASL mechanism %q", network.SASL.Mechanism)
}
}
var err error
if network.ID != 0 {
// update record
_, err = ir.db.Exec(`UPDATE irc_network
SET enabled = ?,
name = ?,
addr = ?,
tls = ?,
nick = ?,
pass = ?,
connect_commands = ?,
sasl_mechanism = ?,
sasl_plain_username = ?,
sasl_plain_password = ?,
updated_at = CURRENT_TIMESTAMP
WHERE id = ?`,
network.Enabled,
netName,
network.Addr,
network.TLS,
network.Nick,
pass,
connectCommands,
saslMechanism,
saslPlainUsername,
saslPlainPassword,
network.ID,
)
} else {
var res sql.Result
res, err = ir.db.Exec(`INSERT INTO irc_network (
enabled,
name,
addr,
tls,
nick,
pass,
connect_commands,
sasl_mechanism,
sasl_plain_username,
sasl_plain_password
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
network.Enabled,
netName,
network.Addr,
network.TLS,
network.Nick,
pass,
connectCommands,
saslMechanism,
saslPlainUsername,
saslPlainPassword,
)
if err != nil {
log.Error().Stack().Err(err).Msg("error executing query")
return err
}
network.ID, err = res.LastInsertId()
}
return err
}
func (ir *IrcRepo) StoreChannel(networkID int64, channel *domain.IrcChannel) error {
pass := toNullString(channel.Password)
var err error
if channel.ID != 0 {
// update record
_, err = ir.db.Exec(`UPDATE irc_channel
SET
enabled = ?,
detached = ?,
name = ?,
password = ?
WHERE
id = ?`,
channel.Enabled,
channel.Detached,
channel.Name,
pass,
channel.ID,
)
} else {
var res sql.Result
res, err = ir.db.Exec(`INSERT INTO irc_channel (
enabled,
detached,
name,
password,
network_id
) VALUES (?, ?, ?, ?, ?)`,
channel.Enabled,
true,
channel.Name,
pass,
networkID,
)
if err != nil {
log.Error().Stack().Err(err).Msg("error executing query")
return err
}
channel.ID, err = res.LastInsertId()
}
return err
}

View file

@ -0,0 +1,175 @@
package database
import (
"database/sql"
"fmt"
"github.com/rs/zerolog/log"
)
const schema = `
CREATE TABLE indexer
(
id INTEGER PRIMARY KEY,
identifier TEXT,
enabled BOOLEAN,
name TEXT NOT NULL,
settings TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE irc_network
(
id INTEGER PRIMARY KEY,
enabled BOOLEAN,
name TEXT NOT NULL,
addr TEXT NOT NULL,
nick TEXT NOT NULL,
tls BOOLEAN,
pass TEXT,
connect_commands TEXT,
sasl_mechanism TEXT,
sasl_plain_username TEXT,
sasl_plain_password TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
unique (addr, nick)
);
CREATE TABLE irc_channel
(
id INTEGER PRIMARY KEY,
enabled BOOLEAN,
name TEXT NOT NULL,
password TEXT,
detached BOOLEAN,
network_id INTEGER NOT NULL,
FOREIGN KEY (network_id) REFERENCES irc_network(id),
unique (network_id, name)
);
CREATE TABLE filter
(
id INTEGER PRIMARY KEY,
enabled BOOLEAN,
name TEXT NOT NULL,
min_size TEXT,
max_size TEXT,
delay INTEGER,
match_releases TEXT,
except_releases TEXT,
use_regex BOOLEAN,
match_release_groups TEXT,
except_release_groups TEXT,
scene BOOLEAN,
freeleech BOOLEAN,
freeleech_percent TEXT,
shows TEXT,
seasons TEXT,
episodes TEXT,
resolutions TEXT [] DEFAULT '{}' NOT NULL,
codecs TEXT [] DEFAULT '{}' NOT NULL,
sources TEXT [] DEFAULT '{}' NOT NULL,
containers TEXT [] DEFAULT '{}' NOT NULL,
years TEXT,
match_categories TEXT,
except_categories TEXT,
match_uploaders TEXT,
except_uploaders TEXT,
tags TEXT,
except_tags TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE filter_indexer
(
filter_id INTEGER,
indexer_id INTEGER,
FOREIGN KEY (filter_id) REFERENCES filter(id),
FOREIGN KEY (indexer_id) REFERENCES indexer(id),
PRIMARY KEY (filter_id, indexer_id)
);
CREATE TABLE client
(
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
enabled BOOLEAN,
type TEXT,
host TEXT NOT NULL,
port INTEGER,
ssl BOOLEAN,
username TEXT,
password TEXT,
settings TEXT
);
CREATE TABLE action
(
id INTEGER PRIMARY KEY,
name TEXT,
type TEXT,
enabled BOOLEAN,
exec_cmd TEXT,
exec_args TEXT,
watch_folder TEXT,
category TEXT,
tags TEXT,
label TEXT,
save_path TEXT,
paused BOOLEAN,
ignore_rules BOOLEAN,
limit_upload_speed INT,
limit_download_speed INT,
client_id INTEGER,
filter_id INTEGER,
FOREIGN KEY (client_id) REFERENCES client(id),
FOREIGN KEY (filter_id) REFERENCES filter(id)
);
`
var migrations = []string{
"",
}
func Migrate(db *sql.DB) error {
log.Info().Msg("Migrating database...")
var version int
if err := db.QueryRow("PRAGMA user_version").Scan(&version); err != nil {
return fmt.Errorf("failed to query schema version: %v", err)
}
if version == len(migrations) {
return nil
} else if version > len(migrations) {
return fmt.Errorf("autobrr (version %d) older than schema (version: %d)", len(migrations), version)
}
tx, err := db.Begin()
if err != nil {
return err
}
defer tx.Rollback()
if version == 0 {
if _, err := tx.Exec(schema); err != nil {
return fmt.Errorf("failed to initialize schema: %v", err)
}
} else {
for i := version; i < len(migrations); i++ {
if _, err := tx.Exec(migrations[i]); err != nil {
return fmt.Errorf("failed to execute migration #%v: %v", i, err)
}
}
}
_, err = tx.Exec(fmt.Sprintf("PRAGMA user_version = %d", len(migrations)))
if err != nil {
return fmt.Errorf("failed to bump schema version: %v", err)
}
return tx.Commit()
}

View file

@ -0,0 +1,13 @@
package database
import (
"path"
)
func DataSourceName(configPath string, name string) string {
if configPath != "" {
return path.Join(configPath, name)
}
return name
}

View file

@ -0,0 +1,58 @@
package database
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestDataSourceName(t *testing.T) {
type args struct {
configPath string
name string
}
tests := []struct {
name string
args args
want string
}{
{
name: "default",
args: args{
configPath: "",
name: "autobrr.db",
},
want: "autobrr.db",
},
{
name: "path_1",
args: args{
configPath: "/config",
name: "autobrr.db",
},
want: "/config/autobrr.db",
},
{
name: "path_2",
args: args{
configPath: "/config/",
name: "autobrr.db",
},
want: "/config/autobrr.db",
},
{
name: "path_3",
args: args{
configPath: "/config//",
name: "autobrr.db",
},
want: "/config/autobrr.db",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := DataSourceName(tt.args.configPath, tt.args.name)
assert.Equal(t, tt.want, got)
})
}
}

39
internal/domain/action.go Normal file
View file

@ -0,0 +1,39 @@
package domain
type ActionRepo interface {
Store(action Action) (*Action, error)
FindByFilterID(filterID int) ([]Action, error)
List() ([]Action, error)
Delete(actionID int) error
ToggleEnabled(actionID int) error
}
type Action struct {
ID int `json:"id"`
Name string `json:"name"`
Type ActionType `json:"type"`
Enabled bool `json:"enabled"`
ExecCmd string `json:"exec_cmd,omitempty"`
ExecArgs string `json:"exec_args,omitempty"`
WatchFolder string `json:"watch_folder,omitempty"`
Category string `json:"category,omitempty"`
Tags string `json:"tags,omitempty"`
Label string `json:"label,omitempty"`
SavePath string `json:"save_path,omitempty"`
Paused bool `json:"paused,omitempty"`
IgnoreRules bool `json:"ignore_rules,omitempty"`
LimitUploadSpeed int64 `json:"limit_upload_speed,omitempty"`
LimitDownloadSpeed int64 `json:"limit_download_speed,omitempty"`
FilterID int `json:"filter_id,omitempty"`
ClientID int32 `json:"client_id,omitempty"`
}
type ActionType string
const (
ActionTypeTest ActionType = "TEST"
ActionTypeExec ActionType = "EXEC"
ActionTypeQbittorrent ActionType = "QBITTORRENT"
ActionTypeDeluge ActionType = "DELUGE"
ActionTypeWatchFolder ActionType = "WATCH_FOLDER"
)

View file

@ -0,0 +1,51 @@
package domain
type Announce struct {
ReleaseType string
Freeleech bool
FreeleechPercent string
Origin string
ReleaseGroup string
Category string
TorrentName string
Uploader string
TorrentSize string
PreTime string
TorrentUrl string
TorrentUrlSSL string
Year int
Name1 string // artist, show, movie
Name2 string // album
Season int
Episode int
Resolution string
Source string
Encoder string
Container string
Format string
Bitrate string
Media string
Tags string
Scene bool
Log string
LogScore string
Cue bool
Line string
OrigLine string
Site string
HttpHeaders string
Filter *Filter
}
//type Announce struct {
// Channel string
// Announcer string
// Message string
// CreatedAt time.Time
//}
//
type AnnounceRepo interface {
Store(announce Announce) error
}

28
internal/domain/client.go Normal file
View file

@ -0,0 +1,28 @@
package domain
type DownloadClientRepo interface {
//FindByActionID(actionID int) ([]DownloadClient, error)
List() ([]DownloadClient, error)
FindByID(id int32) (*DownloadClient, error)
Store(client DownloadClient) (*DownloadClient, error)
Delete(clientID int) error
}
type DownloadClient struct {
ID int `json:"id"`
Name string `json:"name"`
Type DownloadClientType `json:"type"`
Enabled bool `json:"enabled"`
Host string `json:"host"`
Port int `json:"port"`
SSL bool `json:"ssl"`
Username string `json:"username"`
Password string `json:"password"`
}
type DownloadClientType string
const (
DownloadClientTypeQbittorrent DownloadClientType = "QBITTORRENT"
DownloadClientTypeDeluge DownloadClientType = "DELUGE"
)

11
internal/domain/config.go Normal file
View file

@ -0,0 +1,11 @@
package domain
type Settings struct {
Host string `toml:"host"`
Debug bool
}
//type AppConfig struct {
// Settings `toml:"settings"`
// Trackers []Tracker `mapstructure:"tracker"`
//}

90
internal/domain/filter.go Normal file
View file

@ -0,0 +1,90 @@
package domain
import "time"
/*
Works the same way as for autodl-irssi
https://autodl-community.github.io/autodl-irssi/configuration/filter/
*/
type FilterRepo interface {
FindByID(filterID int) (*Filter, error)
FindFiltersForSite(site string) ([]Filter, error)
FindByIndexerIdentifier(indexer string) ([]Filter, error)
ListFilters() ([]Filter, error)
Store(filter Filter) (*Filter, error)
Update(filter Filter) (*Filter, error)
Delete(filterID int) error
StoreIndexerConnection(filterID int, indexerID int) error
DeleteIndexerConnections(filterID int) error
}
type Filter struct {
ID int `json:"id"`
Name string `json:"name"`
Enabled bool `json:"enabled"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
FilterGeneral
FilterP2P
FilterTVMovies
FilterMusic
FilterAdvanced
Actions []Action `json:"actions"`
Indexers []Indexer `json:"indexers"`
}
type FilterGeneral struct {
MinSize string `json:"min_size"`
MaxSize string `json:"max_size"`
Delay int `json:"delay"`
}
type FilterP2P struct {
MatchReleases string `json:"match_releases"`
ExceptReleases string `json:"except_releases"`
UseRegex bool `json:"use_regex"`
MatchReleaseGroups string `json:"match_release_groups"`
ExceptReleaseGroups string `json:"except_release_groups"`
Scene bool `json:"scene"`
Origins string `json:"origins"`
Freeleech bool `json:"freeleech"`
FreeleechPercent string `json:"freeleech_percent"`
}
type FilterTVMovies struct {
Shows string `json:"shows"`
Seasons string `json:"seasons"`
Episodes string `json:"episodes"`
Resolutions []string `json:"resolutions"` // SD, 480i, 480p, 576p, 720p, 810p, 1080i, 1080p.
Codecs []string `json:"codecs"` // XviD, DivX, x264, h.264 (or h264), mpeg2 (or mpeg-2), VC-1 (or VC1), WMV, Remux, h.264 Remux (or h264 Remux), VC-1 Remux (or VC1 Remux).
Sources []string `json:"sources"` // DSR, PDTV, HDTV, HR.PDTV, HR.HDTV, DVDRip, DVDScr, BDr, BD5, BD9, BDRip, BRRip, DVDR, MDVDR, HDDVD, HDDVDRip, BluRay, WEB-DL, TVRip, CAM, R5, TELESYNC, TS, TELECINE, TC. TELESYNC and TS are synonyms (you don't need both). Same for TELECINE and TC
Containers []string `json:"containers"`
Years string `json:"years"`
}
type FilterMusic struct {
Artists string `json:"artists"`
Albums string `json:"albums"`
MatchReleaseTypes string `json:"match_release_types"` // Album,Single,EP
ExceptReleaseTypes string `json:"except_release_types"`
Formats []string `json:"formats"` // MP3, FLAC, Ogg, AAC, AC3, DTS
Bitrates []string `json:"bitrates"` // 192, 320, APS (VBR), V2 (VBR), V1 (VBR), APX (VBR), V0 (VBR), q8.x (VBR), Lossless, 24bit Lossless, Other
Media []string `json:"media"` // CD, DVD, Vinyl, Soundboard, SACD, DAT, Cassette, WEB, Other
Cue bool `json:"cue"`
Log bool `json:"log"`
LogScores string `json:"log_scores"`
}
type FilterAdvanced struct {
MatchCategories string `json:"match_categories"`
ExceptCategories string `json:"except_categories"`
MatchUploaders string `json:"match_uploaders"`
ExceptUploaders string `json:"except_uploaders"`
Tags string `json:"tags"`
ExceptTags string `json:"except_tags"`
TagsAny string `json:"tags_any"`
ExceptTagsAny string `json:"except_tags_any"`
}

View file

@ -0,0 +1,68 @@
package domain
type IndexerRepo interface {
Store(indexer Indexer) (*Indexer, error)
Update(indexer Indexer) (*Indexer, error)
List() ([]Indexer, error)
Delete(id int) error
FindByFilterID(id int) ([]Indexer, error)
}
type Indexer struct {
ID int `json:"id"`
Name string `json:"name"`
Identifier string `json:"identifier"`
Enabled bool `json:"enabled"`
Type string `json:"type,omitempty"`
Settings map[string]string `json:"settings,omitempty"`
}
type IndexerDefinition struct {
ID int `json:"id,omitempty"`
Name string `json:"name"`
Identifier string `json:"identifier"`
Enabled bool `json:"enabled,omitempty"`
Description string `json:"description"`
Language string `json:"language"`
Privacy string `json:"privacy"`
Protocol string `json:"protocol"`
URLS []string `json:"urls"`
Settings []IndexerSetting `json:"settings"`
SettingsMap map[string]string `json:"-"`
IRC *IndexerIRC `json:"irc"`
Parse IndexerParse `json:"parse"`
}
type IndexerSetting struct {
Name string `json:"name"`
Required bool `json:"required,omitempty"`
Type string `json:"type"`
Value string `json:"value,omitempty"`
Label string `json:"label"`
Description string `json:"description"`
Regex string `json:"regex,omitempty"`
}
type IndexerIRC struct {
Network string
Server string
Channels []string
Announcers []string
}
type IndexerParse struct {
Type string `json:"type"`
Lines []IndexerParseExtract `json:"lines"`
Match IndexerParseMatch `json:"match"`
}
type IndexerParseExtract struct {
Test []string `json:"test"`
Pattern string `json:"pattern"`
Vars []string `json:"vars"`
}
type IndexerParseMatch struct {
TorrentURL string `json:"torrenturl"`
Encode []string `json:"encode"`
}

43
internal/domain/irc.go Normal file
View file

@ -0,0 +1,43 @@
package domain
import "context"
type IrcChannel struct {
ID int64 `json:"id"`
Enabled bool `json:"enabled"`
Detached bool `json:"detached"`
Name string `json:"name"`
Password string `json:"password"`
}
type SASL struct {
Mechanism string `json:"mechanism,omitempty"`
Plain struct {
Username string `json:"username,omitempty"`
Password string `json:"password,omitempty"`
} `json:"plain,omitempty"`
}
type IrcNetwork struct {
ID int64 `json:"id"`
Name string `json:"name"`
Enabled bool `json:"enabled"`
Addr string `json:"addr"`
TLS bool `json:"tls"`
Nick string `json:"nick"`
Pass string `json:"pass"`
ConnectCommands []string `json:"connect_commands"`
SASL SASL `json:"sasl,omitempty"`
Channels []IrcChannel `json:"channels"`
}
type IrcRepo interface {
Store(announce Announce) error
StoreNetwork(network *IrcNetwork) error
StoreChannel(networkID int64, channel *IrcChannel) error
ListNetworks(ctx context.Context) ([]IrcNetwork, error)
ListChannels(networkID int64) ([]IrcChannel, error)
GetNetworkByID(id int64) (*IrcNetwork, error)
DeleteNetwork(ctx context.Context, id int64) error
}

View file

@ -0,0 +1,95 @@
package download_client
import (
"github.com/rs/zerolog/log"
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/pkg/qbittorrent"
)
type Service interface {
List() ([]domain.DownloadClient, error)
FindByID(id int32) (*domain.DownloadClient, error)
Store(client domain.DownloadClient) (*domain.DownloadClient, error)
Delete(clientID int) error
Test(client domain.DownloadClient) error
}
type service struct {
repo domain.DownloadClientRepo
}
func NewService(repo domain.DownloadClientRepo) Service {
return &service{repo: repo}
}
func (s *service) List() ([]domain.DownloadClient, error) {
clients, err := s.repo.List()
if err != nil {
return nil, err
}
return clients, nil
}
func (s *service) FindByID(id int32) (*domain.DownloadClient, error) {
client, err := s.repo.FindByID(id)
if err != nil {
return nil, err
}
return client, nil
}
func (s *service) Store(client domain.DownloadClient) (*domain.DownloadClient, error) {
// validate data
// store
c, err := s.repo.Store(client)
if err != nil {
return nil, err
}
return c, nil
}
func (s *service) Delete(clientID int) error {
if err := s.repo.Delete(clientID); err != nil {
return err
}
log.Debug().Msgf("delete client: %v", clientID)
return nil
}
func (s *service) Test(client domain.DownloadClient) error {
// test
err := s.testConnection(client)
if err != nil {
return err
}
return nil
}
func (s *service) testConnection(client domain.DownloadClient) error {
if client.Type == "QBITTORRENT" {
qbtSettings := qbittorrent.Settings{
Hostname: client.Host,
Port: uint(client.Port),
Username: client.Username,
Password: client.Password,
SSL: client.SSL,
}
qbt := qbittorrent.NewClient(qbtSettings)
err := qbt.Login()
if err != nil {
log.Error().Err(err).Msgf("error logging into client: %v", client.Host)
return err
}
}
return nil
}

342
internal/filter/service.go Normal file
View file

@ -0,0 +1,342 @@
package filter
import (
"strings"
"github.com/rs/zerolog/log"
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/internal/indexer"
"github.com/autobrr/autobrr/pkg/wildcard"
)
type Service interface {
//FindFilter(announce domain.Announce) (*domain.Filter, error)
FindByID(filterID int) (*domain.Filter, error)
FindByIndexerIdentifier(announce domain.Announce) (*domain.Filter, error)
ListFilters() ([]domain.Filter, error)
Store(filter domain.Filter) (*domain.Filter, error)
Update(filter domain.Filter) (*domain.Filter, error)
Delete(filterID int) error
}
type service struct {
repo domain.FilterRepo
actionRepo domain.ActionRepo
indexerSvc indexer.Service
}
func NewService(repo domain.FilterRepo, actionRepo domain.ActionRepo, indexerSvc indexer.Service) Service {
return &service{
repo: repo,
actionRepo: actionRepo,
indexerSvc: indexerSvc,
}
}
func (s *service) ListFilters() ([]domain.Filter, error) {
// get filters
filters, err := s.repo.ListFilters()
if err != nil {
return nil, err
}
var ret []domain.Filter
for _, filter := range filters {
indexers, err := s.indexerSvc.FindByFilterID(filter.ID)
if err != nil {
return nil, err
}
filter.Indexers = indexers
ret = append(ret, filter)
}
return ret, nil
}
func (s *service) FindByID(filterID int) (*domain.Filter, error) {
// find filter
filter, err := s.repo.FindByID(filterID)
if err != nil {
return nil, err
}
// find actions and attach
//actions, err := s.actionRepo.FindFilterActions(filter.ID)
actions, err := s.actionRepo.FindByFilterID(filter.ID)
if err != nil {
log.Error().Msgf("could not find filter actions: %+v", &filter.ID)
}
filter.Actions = actions
// find indexers and attach
indexers, err := s.indexerSvc.FindByFilterID(filter.ID)
if err != nil {
log.Error().Err(err).Msgf("could not find indexers for filter: %+v", &filter.Name)
return nil, err
}
filter.Indexers = indexers
//log.Debug().Msgf("found filter: %+v", filter)
return filter, nil
}
func (s *service) FindByIndexerIdentifier(announce domain.Announce) (*domain.Filter, error) {
// get filter for tracker
filters, err := s.repo.FindByIndexerIdentifier(announce.Site)
if err != nil {
log.Error().Err(err).Msgf("could not find filters for indexer: %v", announce.Site)
return nil, err
}
// match against announce/releaseInfo
for _, filter := range filters {
// if match, return the filter
matchedFilter := s.checkFilter(filter, announce)
if matchedFilter {
log.Trace().Msgf("found filter: %+v", &filter)
log.Debug().Msgf("found filter: %+v", &filter.Name)
// find actions and attach
actions, err := s.actionRepo.FindByFilterID(filter.ID)
if err != nil {
log.Error().Err(err).Msgf("could not find filter actions: %+v", &filter.ID)
return nil, err
}
// if no actions found, check next filter
if actions == nil {
continue
}
filter.Actions = actions
return &filter, nil
}
}
// if no match, return nil
return nil, nil
}
//func (s *service) FindFilter(announce domain.Announce) (*domain.Filter, error) {
// // get filter for tracker
// filters, err := s.repo.FindFiltersForSite(announce.Site)
// if err != nil {
// return nil, err
// }
//
// // match against announce/releaseInfo
// for _, filter := range filters {
// // if match, return the filter
// matchedFilter := s.checkFilter(filter, announce)
// if matchedFilter {
//
// log.Debug().Msgf("found filter: %+v", &filter)
//
// // find actions and attach
// actions, err := s.actionRepo.FindByFilterID(filter.ID)
// if err != nil {
// log.Error().Msgf("could not find filter actions: %+v", &filter.ID)
// }
// filter.Actions = actions
//
// return &filter, nil
// }
// }
//
// // if no match, return nil
// return nil, nil
//}
func (s *service) Store(filter domain.Filter) (*domain.Filter, error) {
// validate data
// store
f, err := s.repo.Store(filter)
if err != nil {
log.Error().Err(err).Msgf("could not store filter: %v", filter)
return nil, err
}
return f, nil
}
func (s *service) Update(filter domain.Filter) (*domain.Filter, error) {
// validate data
// store
f, err := s.repo.Update(filter)
if err != nil {
log.Error().Err(err).Msgf("could not update filter: %v", filter.Name)
return nil, err
}
// take care of connected indexers
if err = s.repo.DeleteIndexerConnections(f.ID); err != nil {
log.Error().Err(err).Msgf("could not delete filter indexer connections: %v", filter.Name)
return nil, err
}
for _, i := range filter.Indexers {
if err = s.repo.StoreIndexerConnection(f.ID, i.ID); err != nil {
log.Error().Err(err).Msgf("could not store filter indexer connections: %v", filter.Name)
return nil, err
}
}
return f, nil
}
func (s *service) Delete(filterID int) error {
if filterID == 0 {
return nil
}
// delete
if err := s.repo.Delete(filterID); err != nil {
log.Error().Err(err).Msgf("could not delete filter: %v", filterID)
return err
}
return nil
}
// checkFilter tries to match filter against announce
func (s *service) checkFilter(filter domain.Filter, announce domain.Announce) bool {
if !filter.Enabled {
return false
}
if filter.Scene && announce.Scene != filter.Scene {
return false
}
if filter.Freeleech && announce.Freeleech != filter.Freeleech {
return false
}
if filter.Shows != "" && !checkFilterStrings(announce.TorrentName, filter.Shows) {
return false
}
//if filter.Seasons != "" && !checkFilterStrings(announce.TorrentName, filter.Seasons) {
// return false
//}
//
//if filter.Episodes != "" && !checkFilterStrings(announce.TorrentName, filter.Episodes) {
// return false
//}
// matchRelease
if filter.MatchReleases != "" && !checkFilterStrings(announce.TorrentName, filter.MatchReleases) {
return false
}
if filter.MatchReleaseGroups != "" && !checkFilterStrings(announce.TorrentName, filter.MatchReleaseGroups) {
return false
}
if filter.ExceptReleaseGroups != "" && checkFilterStrings(announce.TorrentName, filter.ExceptReleaseGroups) {
return false
}
if filter.MatchUploaders != "" && !checkFilterStrings(announce.Uploader, filter.MatchUploaders) {
return false
}
if filter.ExceptUploaders != "" && checkFilterStrings(announce.Uploader, filter.ExceptUploaders) {
return false
}
if len(filter.Resolutions) > 0 && !checkFilterSlice(announce.TorrentName, filter.Resolutions) {
return false
}
if len(filter.Codecs) > 0 && !checkFilterSlice(announce.TorrentName, filter.Codecs) {
return false
}
if len(filter.Sources) > 0 && !checkFilterSlice(announce.TorrentName, filter.Sources) {
return false
}
if len(filter.Containers) > 0 && !checkFilterSlice(announce.TorrentName, filter.Containers) {
return false
}
if filter.Years != "" && !checkFilterStrings(announce.TorrentName, filter.Years) {
return false
}
if filter.MatchCategories != "" && !checkFilterStrings(announce.Category, filter.MatchCategories) {
return false
}
if filter.ExceptCategories != "" && checkFilterStrings(announce.Category, filter.ExceptCategories) {
return false
}
if filter.Tags != "" && !checkFilterStrings(announce.Tags, filter.Tags) {
return false
}
if filter.ExceptTags != "" && checkFilterStrings(announce.Tags, filter.ExceptTags) {
return false
}
return true
}
func checkFilterSlice(name string, filterList []string) bool {
name = strings.ToLower(name)
for _, filter := range filterList {
filter = strings.ToLower(filter)
// check if line contains * or ?, if so try wildcard match, otherwise try substring match
a := strings.ContainsAny(filter, "?|*")
if a {
match := wildcard.Match(filter, name)
if match {
return true
}
} else {
b := strings.Contains(name, filter)
if b {
return true
}
}
}
return false
}
func checkFilterStrings(name string, filterList string) bool {
filterSplit := strings.Split(filterList, ",")
name = strings.ToLower(name)
for _, s := range filterSplit {
s = strings.ToLower(s)
// check if line contains * or ?, if so try wildcard match, otherwise try substring match
a := strings.ContainsAny(s, "?|*")
if a {
match := wildcard.Match(s, name)
if match {
return true
}
} else {
b := strings.Contains(name, s)
if b {
return true
}
}
}
return false
}

View file

@ -0,0 +1,651 @@
package filter
import (
"testing"
"github.com/autobrr/autobrr/internal/domain"
"github.com/stretchr/testify/assert"
)
func Test_checkFilterStrings(t *testing.T) {
type args struct {
name string
filterList string
}
tests := []struct {
name string
args args
want bool
}{
{
name: "test_01",
args: args{
name: "The End",
filterList: "The End, Other movie",
},
want: true,
},
{
name: "test_02",
args: args{
name: "The Simpsons S12",
filterList: "The End, Other movie",
},
want: false,
},
{
name: "test_03",
args: args{
name: "The.Simpsons.S12",
filterList: "The?Simpsons*, Other movie",
},
want: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := checkFilterStrings(tt.args.name, tt.args.filterList)
assert.Equal(t, tt.want, got)
})
}
}
func Test_service_checkFilter(t *testing.T) {
type args struct {
filter domain.Filter
announce domain.Announce
}
svcMock := &service{
repo: nil,
actionRepo: nil,
indexerSvc: nil,
}
tests := []struct {
name string
args args
expected bool
}{
{
name: "freeleech",
args: args{
announce: domain.Announce{
Freeleech: true,
},
filter: domain.Filter{
Enabled: true,
FilterP2P: domain.FilterP2P{
Freeleech: true,
},
},
},
expected: true,
},
{
name: "scene",
args: args{
announce: domain.Announce{
Scene: true,
},
filter: domain.Filter{
Enabled: true,
FilterP2P: domain.FilterP2P{
Scene: true,
},
},
},
expected: true,
},
{
name: "not_scene",
args: args{
announce: domain.Announce{
Scene: false,
},
filter: domain.Filter{
Enabled: true,
FilterP2P: domain.FilterP2P{
Scene: true,
},
},
},
expected: false,
},
{
name: "shows_1",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Shows: "That show",
},
},
},
expected: true,
},
{
name: "shows_2",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Shows: "That show, The Other show",
},
},
},
expected: true,
},
{
name: "shows_3",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Shows: "That?show*, The?Other?show",
},
},
},
expected: true,
},
{
name: "shows_4",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Shows: "The Other show",
},
},
},
expected: false,
},
{
name: "shows_5",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Shows: "*show*",
},
},
},
expected: true,
},
{
name: "shows_6",
args: args{
announce: domain.Announce{
TorrentName: "That.Show.S06.1080p.BluRay.DD5.1.x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Shows: "*show*",
},
},
},
expected: true,
},
{
name: "shows_7",
args: args{
announce: domain.Announce{
TorrentName: "That.Show.S06.1080p.BluRay.DD5.1.x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Shows: "That?show*",
},
},
},
expected: true,
},
{
name: "match_releases_single",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterP2P: domain.FilterP2P{
MatchReleases: "That show",
},
},
},
expected: true,
},
{
name: "match_releases_single_wildcard",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterP2P: domain.FilterP2P{
MatchReleases: "That show*",
},
},
},
expected: true,
},
{
name: "match_releases_multiple",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterP2P: domain.FilterP2P{
MatchReleases: "That show*, Other one",
},
},
},
expected: true,
},
{
name: "match_release_groups",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
ReleaseGroup: "GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterP2P: domain.FilterP2P{
MatchReleaseGroups: "GROUP1",
},
},
},
expected: true,
},
{
name: "match_release_groups_multiple",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
ReleaseGroup: "GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterP2P: domain.FilterP2P{
MatchReleaseGroups: "GROUP1,GROUP2",
},
},
},
expected: true,
},
{
name: "match_release_groups_dont_match",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
ReleaseGroup: "GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterP2P: domain.FilterP2P{
MatchReleaseGroups: "GROUP2",
},
},
},
expected: false,
},
{
name: "except_release_groups",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
ReleaseGroup: "GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterP2P: domain.FilterP2P{
ExceptReleaseGroups: "GROUP1",
},
},
},
expected: false,
},
{
name: "match_uploaders",
args: args{
announce: domain.Announce{
Uploader: "Uploader1",
},
filter: domain.Filter{
Enabled: true,
FilterAdvanced: domain.FilterAdvanced{
MatchUploaders: "Uploader1",
},
},
},
expected: true,
},
{
name: "non_match_uploaders",
args: args{
announce: domain.Announce{
Uploader: "Uploader2",
},
filter: domain.Filter{
Enabled: true,
FilterAdvanced: domain.FilterAdvanced{
MatchUploaders: "Uploader1",
},
},
},
expected: false,
},
{
name: "except_uploaders",
args: args{
announce: domain.Announce{
Uploader: "Uploader1",
},
filter: domain.Filter{
Enabled: true,
FilterAdvanced: domain.FilterAdvanced{
ExceptUploaders: "Uploader1",
},
},
},
expected: false,
},
{
name: "resolutions_1080p",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 1080p BluRay DD5.1 x264-GROUP1",
Resolution: "1080p",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Resolutions: []string{"1080p"},
},
},
},
expected: true,
},
{
name: "resolutions_2160p",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 2160p BluRay DD5.1 x264-GROUP1",
Resolution: "2160p",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Resolutions: []string{"2160p"},
},
},
},
expected: true,
},
{
name: "resolutions_no_match",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 2160p BluRay DD5.1 x264-GROUP1",
Resolution: "2160p",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Resolutions: []string{"1080p"},
},
},
},
expected: false,
},
{
name: "codecs_1_match",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 2160p BluRay DD5.1 x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Codecs: []string{"x264"},
},
},
},
expected: true,
},
{
name: "codecs_2_no_match",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 2160p BluRay DD5.1 x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Codecs: []string{"h264"},
},
},
},
expected: false,
},
{
name: "sources_1_match",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 2160p BluRay DD5.1 x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Sources: []string{"BluRay"},
},
},
},
expected: true,
},
{
name: "sources_2_no_match",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 2160p BluRay DD5.1 x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Sources: []string{"WEB"},
},
},
},
expected: false,
},
{
name: "years_1",
args: args{
announce: domain.Announce{
TorrentName: "That Movie 2020 2160p BluRay DD5.1 x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Years: "2020",
},
},
},
expected: true,
},
{
name: "years_2",
args: args{
announce: domain.Announce{
TorrentName: "That Movie 2020 2160p BluRay DD5.1 x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Years: "2020,1990",
},
},
},
expected: true,
},
{
name: "years_3_no_match",
args: args{
announce: domain.Announce{
TorrentName: "That Movie 2020 2160p BluRay DD5.1 x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Years: "1990",
},
},
},
expected: false,
},
{
name: "years_4_no_match",
args: args{
announce: domain.Announce{
TorrentName: "That Show S06 2160p BluRay DD5.1 x264-GROUP1",
},
filter: domain.Filter{
Enabled: true,
FilterTVMovies: domain.FilterTVMovies{
Years: "2020",
},
},
},
expected: false,
},
{
name: "match_categories_1",
args: args{
announce: domain.Announce{
Category: "TV",
},
filter: domain.Filter{
Enabled: true,
FilterAdvanced: domain.FilterAdvanced{
MatchCategories: "TV",
},
},
},
expected: true,
},
{
name: "match_categories_2",
args: args{
announce: domain.Announce{
Category: "TV :: HD",
},
filter: domain.Filter{
Enabled: true,
FilterAdvanced: domain.FilterAdvanced{
MatchCategories: "*TV*",
},
},
},
expected: true,
},
{
name: "match_categories_3",
args: args{
announce: domain.Announce{
Category: "TV :: HD",
},
filter: domain.Filter{
Enabled: true,
FilterAdvanced: domain.FilterAdvanced{
MatchCategories: "*TV*, *HD*",
},
},
},
expected: true,
},
{
name: "match_categories_4_no_match",
args: args{
announce: domain.Announce{
Category: "TV :: HD",
},
filter: domain.Filter{
Enabled: true,
FilterAdvanced: domain.FilterAdvanced{
MatchCategories: "Movies",
},
},
},
expected: false,
},
{
name: "except_categories_1",
args: args{
announce: domain.Announce{
Category: "Movies",
},
filter: domain.Filter{
Enabled: true,
FilterAdvanced: domain.FilterAdvanced{
ExceptCategories: "Movies",
},
},
},
expected: false,
},
{
name: "match_multiple_fields_1",
args: args{
announce: domain.Announce{
TorrentName: "That Movie 2020 2160p BluRay DD5.1 x264-GROUP1",
Category: "Movies",
Freeleech: true,
},
filter: domain.Filter{
Enabled: true,
FilterAdvanced: domain.FilterAdvanced{
MatchCategories: "Movies",
},
FilterTVMovies: domain.FilterTVMovies{
Resolutions: []string{"2160p"},
Sources: []string{"BluRay"},
Years: "2020",
},
FilterP2P: domain.FilterP2P{
MatchReleaseGroups: "GROUP1",
MatchReleases: "That movie",
Freeleech: true,
},
},
},
expected: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := svcMock.checkFilter(tt.args.filter, tt.args.announce)
assert.Equal(t, tt.expected, got)
})
}
}

113
internal/http/action.go Normal file
View file

@ -0,0 +1,113 @@
package http
import (
"encoding/json"
"net/http"
"strconv"
"github.com/autobrr/autobrr/internal/domain"
"github.com/go-chi/chi"
)
type actionService interface {
Fetch() ([]domain.Action, error)
Store(action domain.Action) (*domain.Action, error)
Delete(actionID int) error
ToggleEnabled(actionID int) error
}
type actionHandler struct {
encoder encoder
actionService actionService
}
func (h actionHandler) Routes(r chi.Router) {
r.Get("/", h.getActions)
r.Post("/", h.storeAction)
r.Delete("/{actionID}", h.deleteAction)
r.Put("/{actionID}", h.updateAction)
r.Patch("/{actionID}/toggleEnabled", h.toggleActionEnabled)
}
func (h actionHandler) getActions(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
actions, err := h.actionService.Fetch()
if err != nil {
// encode error
}
h.encoder.StatusResponse(ctx, w, actions, http.StatusOK)
}
func (h actionHandler) storeAction(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
data domain.Action
)
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
// encode error
return
}
action, err := h.actionService.Store(data)
if err != nil {
// encode error
}
h.encoder.StatusResponse(ctx, w, action, http.StatusCreated)
}
func (h actionHandler) updateAction(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
data domain.Action
)
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
// encode error
return
}
action, err := h.actionService.Store(data)
if err != nil {
// encode error
}
h.encoder.StatusResponse(ctx, w, action, http.StatusCreated)
}
func (h actionHandler) deleteAction(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
actionID = chi.URLParam(r, "actionID")
)
// if !actionID return error
id, _ := strconv.Atoi(actionID)
if err := h.actionService.Delete(id); err != nil {
// encode error
}
h.encoder.StatusResponse(ctx, w, nil, http.StatusNoContent)
}
func (h actionHandler) toggleActionEnabled(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
actionID = chi.URLParam(r, "actionID")
)
// if !actionID return error
id, _ := strconv.Atoi(actionID)
if err := h.actionService.ToggleEnabled(id); err != nil {
// encode error
}
h.encoder.StatusResponse(ctx, w, nil, http.StatusCreated)
}

41
internal/http/config.go Normal file
View file

@ -0,0 +1,41 @@
package http
import (
"net/http"
"github.com/autobrr/autobrr/internal/config"
"github.com/go-chi/chi"
)
type configJson struct {
Host string `json:"host"`
Port int `json:"port"`
LogLevel string `json:"log_level"`
LogPath string `json:"log_path"`
BaseURL string `json:"base_url"`
}
type configHandler struct {
encoder encoder
}
func (h configHandler) Routes(r chi.Router) {
r.Get("/", h.getConfig)
}
func (h configHandler) getConfig(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
c := config.Config
conf := configJson{
Host: c.Host,
Port: c.Port,
LogLevel: c.LogLevel,
LogPath: c.LogPath,
BaseURL: c.BaseURL,
}
h.encoder.StatusResponse(ctx, w, conf, http.StatusOK)
}

View file

@ -0,0 +1,119 @@
package http
import (
"encoding/json"
"net/http"
"strconv"
"github.com/go-chi/chi"
"github.com/autobrr/autobrr/internal/domain"
)
type downloadClientService interface {
List() ([]domain.DownloadClient, error)
Store(client domain.DownloadClient) (*domain.DownloadClient, error)
Delete(clientID int) error
Test(client domain.DownloadClient) error
}
type downloadClientHandler struct {
encoder encoder
downloadClientService downloadClientService
}
func (h downloadClientHandler) Routes(r chi.Router) {
r.Get("/", h.listDownloadClients)
r.Post("/", h.store)
r.Put("/", h.update)
r.Post("/test", h.test)
r.Delete("/{clientID}", h.delete)
}
func (h downloadClientHandler) listDownloadClients(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
clients, err := h.downloadClientService.List()
if err != nil {
//
}
h.encoder.StatusResponse(ctx, w, clients, http.StatusOK)
}
func (h downloadClientHandler) store(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
data domain.DownloadClient
)
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
// encode error
return
}
client, err := h.downloadClientService.Store(data)
if err != nil {
// encode error
}
h.encoder.StatusResponse(ctx, w, client, http.StatusCreated)
}
func (h downloadClientHandler) test(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
data domain.DownloadClient
)
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
// encode error
h.encoder.StatusResponse(ctx, w, nil, http.StatusBadRequest)
return
}
err := h.downloadClientService.Test(data)
if err != nil {
// encode error
h.encoder.StatusResponse(ctx, w, nil, http.StatusBadRequest)
return
}
h.encoder.StatusResponse(ctx, w, nil, http.StatusNoContent)
}
func (h downloadClientHandler) update(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
data domain.DownloadClient
)
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
// encode error
return
}
client, err := h.downloadClientService.Store(data)
if err != nil {
// encode error
}
h.encoder.StatusResponse(ctx, w, client, http.StatusCreated)
}
func (h downloadClientHandler) delete(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
clientID = chi.URLParam(r, "clientID")
)
// if !clientID return error
id, _ := strconv.Atoi(clientID)
if err := h.downloadClientService.Delete(id); err != nil {
// encode error
}
h.encoder.StatusResponse(ctx, w, nil, http.StatusNoContent)
}

26
internal/http/encoder.go Normal file
View file

@ -0,0 +1,26 @@
package http
import (
"context"
"encoding/json"
"net/http"
)
type encoder struct {
}
func (e encoder) StatusResponse(ctx context.Context, w http.ResponseWriter, response interface{}, status int) {
if response != nil {
w.Header().Set("Content-Type", "application/json; charset=utf=8")
w.WriteHeader(status)
if err := json.NewEncoder(w).Encode(response); err != nil {
// log err
}
} else {
w.WriteHeader(status)
}
}
func (e encoder) StatusNotFound(ctx context.Context, w http.ResponseWriter) {
w.WriteHeader(http.StatusNotFound)
}

132
internal/http/filter.go Normal file
View file

@ -0,0 +1,132 @@
package http
import (
"encoding/json"
"net/http"
"strconv"
"github.com/go-chi/chi"
"github.com/autobrr/autobrr/internal/domain"
)
type filterService interface {
ListFilters() ([]domain.Filter, error)
FindByID(filterID int) (*domain.Filter, error)
Store(filter domain.Filter) (*domain.Filter, error)
Delete(filterID int) error
Update(filter domain.Filter) (*domain.Filter, error)
//StoreFilterAction(action domain.Action) error
}
type filterHandler struct {
encoder encoder
filterService filterService
}
func (h filterHandler) Routes(r chi.Router) {
r.Get("/", h.getFilters)
r.Get("/{filterID}", h.getByID)
r.Post("/", h.store)
r.Put("/{filterID}", h.update)
r.Delete("/{filterID}", h.delete)
}
func (h filterHandler) getFilters(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
trackers, err := h.filterService.ListFilters()
if err != nil {
//
}
h.encoder.StatusResponse(ctx, w, trackers, http.StatusOK)
}
func (h filterHandler) getByID(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
filterID = chi.URLParam(r, "filterID")
)
id, _ := strconv.Atoi(filterID)
filter, err := h.filterService.FindByID(id)
if err != nil {
h.encoder.StatusNotFound(ctx, w)
return
}
h.encoder.StatusResponse(ctx, w, filter, http.StatusOK)
}
func (h filterHandler) storeFilterAction(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
filterID = chi.URLParam(r, "filterID")
)
id, _ := strconv.Atoi(filterID)
filter, err := h.filterService.FindByID(id)
if err != nil {
//
}
h.encoder.StatusResponse(ctx, w, filter, http.StatusCreated)
}
func (h filterHandler) store(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
data domain.Filter
)
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
// encode error
return
}
filter, err := h.filterService.Store(data)
if err != nil {
// encode error
return
}
h.encoder.StatusResponse(ctx, w, filter, http.StatusCreated)
}
func (h filterHandler) update(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
data domain.Filter
)
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
// encode error
return
}
filter, err := h.filterService.Update(data)
if err != nil {
// encode error
return
}
h.encoder.StatusResponse(ctx, w, filter, http.StatusOK)
}
func (h filterHandler) delete(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
filterID = chi.URLParam(r, "filterID")
)
id, _ := strconv.Atoi(filterID)
if err := h.filterService.Delete(id); err != nil {
// return err
}
h.encoder.StatusResponse(ctx, w, nil, http.StatusNoContent)
}

118
internal/http/indexer.go Normal file
View file

@ -0,0 +1,118 @@
package http
import (
"encoding/json"
"net/http"
"strconv"
"github.com/autobrr/autobrr/internal/domain"
"github.com/go-chi/chi"
)
type indexerService interface {
Store(indexer domain.Indexer) (*domain.Indexer, error)
Update(indexer domain.Indexer) (*domain.Indexer, error)
List() ([]domain.Indexer, error)
GetAll() ([]*domain.IndexerDefinition, error)
GetTemplates() ([]domain.IndexerDefinition, error)
Delete(id int) error
}
type indexerHandler struct {
encoder encoder
indexerService indexerService
}
func (h indexerHandler) Routes(r chi.Router) {
r.Get("/schema", h.getSchema)
r.Post("/", h.store)
r.Put("/", h.update)
r.Get("/", h.getAll)
r.Get("/options", h.list)
r.Delete("/{indexerID}", h.delete)
}
func (h indexerHandler) getSchema(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
indexers, err := h.indexerService.GetTemplates()
if err != nil {
//
}
h.encoder.StatusResponse(ctx, w, indexers, http.StatusOK)
}
func (h indexerHandler) store(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
data domain.Indexer
)
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
return
}
indexer, err := h.indexerService.Store(data)
if err != nil {
//
}
h.encoder.StatusResponse(ctx, w, indexer, http.StatusCreated)
}
func (h indexerHandler) update(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
data domain.Indexer
)
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
return
}
indexer, err := h.indexerService.Update(data)
if err != nil {
//
}
h.encoder.StatusResponse(ctx, w, indexer, http.StatusOK)
}
func (h indexerHandler) delete(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
idParam = chi.URLParam(r, "indexerID")
)
id, _ := strconv.Atoi(idParam)
if err := h.indexerService.Delete(id); err != nil {
// return err
}
h.encoder.StatusResponse(ctx, w, nil, http.StatusNoContent)
}
func (h indexerHandler) getAll(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
indexers, err := h.indexerService.GetAll()
if err != nil {
//
}
h.encoder.StatusResponse(ctx, w, indexers, http.StatusOK)
}
func (h indexerHandler) list(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
indexers, err := h.indexerService.List()
if err != nil {
//
}
h.encoder.StatusResponse(ctx, w, indexers, http.StatusOK)
}

132
internal/http/irc.go Normal file
View file

@ -0,0 +1,132 @@
package http
import (
"context"
"encoding/json"
"net/http"
"strconv"
"github.com/go-chi/chi"
"github.com/autobrr/autobrr/internal/domain"
)
type ircService interface {
ListNetworks(ctx context.Context) ([]domain.IrcNetwork, error)
DeleteNetwork(ctx context.Context, id int64) error
GetNetworkByID(id int64) (*domain.IrcNetwork, error)
StoreNetwork(network *domain.IrcNetwork) error
StoreChannel(networkID int64, channel *domain.IrcChannel) error
StopNetwork(name string) error
}
type ircHandler struct {
encoder encoder
ircService ircService
}
func (h ircHandler) Routes(r chi.Router) {
r.Get("/", h.listNetworks)
r.Post("/", h.storeNetwork)
r.Put("/network/{networkID}", h.storeNetwork)
r.Post("/network/{networkID}/channel", h.storeChannel)
r.Get("/network/{networkID}/stop", h.stopNetwork)
r.Get("/network/{networkID}", h.getNetworkByID)
r.Delete("/network/{networkID}", h.deleteNetwork)
}
func (h ircHandler) listNetworks(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
networks, err := h.ircService.ListNetworks(ctx)
if err != nil {
//
}
h.encoder.StatusResponse(ctx, w, networks, http.StatusOK)
}
func (h ircHandler) getNetworkByID(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
networkID = chi.URLParam(r, "networkID")
)
id, _ := strconv.Atoi(networkID)
network, err := h.ircService.GetNetworkByID(int64(id))
if err != nil {
//
}
h.encoder.StatusResponse(ctx, w, network, http.StatusOK)
}
func (h ircHandler) storeNetwork(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
data domain.IrcNetwork
)
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
return
}
err := h.ircService.StoreNetwork(&data)
if err != nil {
//
}
h.encoder.StatusResponse(ctx, w, nil, http.StatusCreated)
}
func (h ircHandler) storeChannel(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
data domain.IrcChannel
networkID = chi.URLParam(r, "networkID")
)
id, _ := strconv.Atoi(networkID)
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
return
}
err := h.ircService.StoreChannel(int64(id), &data)
if err != nil {
//
}
h.encoder.StatusResponse(ctx, w, nil, http.StatusCreated)
}
func (h ircHandler) stopNetwork(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
networkID = chi.URLParam(r, "networkID")
)
err := h.ircService.StopNetwork(networkID)
if err != nil {
//
}
h.encoder.StatusResponse(ctx, w, nil, http.StatusCreated)
}
func (h ircHandler) deleteNetwork(w http.ResponseWriter, r *http.Request) {
var (
ctx = r.Context()
networkID = chi.URLParam(r, "networkID")
)
id, _ := strconv.Atoi(networkID)
err := h.ircService.DeleteNetwork(ctx, int64(id))
if err != nil {
//
}
h.encoder.StatusResponse(ctx, w, nil, http.StatusNoContent)
}

123
internal/http/service.go Normal file
View file

@ -0,0 +1,123 @@
package http
import (
"io/fs"
"net"
"net/http"
"github.com/autobrr/autobrr/internal/config"
"github.com/autobrr/autobrr/web"
"github.com/go-chi/chi"
)
type Server struct {
address string
baseUrl string
actionService actionService
downloadClientService downloadClientService
filterService filterService
indexerService indexerService
ircService ircService
}
func NewServer(address string, baseUrl string, actionService actionService, downloadClientSvc downloadClientService, filterSvc filterService, indexerSvc indexerService, ircSvc ircService) Server {
return Server{
address: address,
baseUrl: baseUrl,
actionService: actionService,
downloadClientService: downloadClientSvc,
filterService: filterSvc,
indexerService: indexerSvc,
ircService: ircSvc,
}
}
func (s Server) Open() error {
listener, err := net.Listen("tcp", s.address)
if err != nil {
return err
}
server := http.Server{
Handler: s.Handler(),
}
return server.Serve(listener)
}
func (s Server) Handler() http.Handler {
r := chi.NewRouter()
//r.Get("/", index)
//r.Get("/dashboard", dashboard)
//handler := web.AssetHandler("/", "build")
encoder := encoder{}
assets, _ := fs.Sub(web.Assets, "build/static")
r.HandleFunc("/static/*", func(w http.ResponseWriter, r *http.Request) {
fileSystem := http.StripPrefix("/static/", http.FileServer(http.FS(assets)))
fileSystem.ServeHTTP(w, r)
})
r.Group(func(r chi.Router) {
actionHandler := actionHandler{
encoder: encoder,
actionService: s.actionService,
}
r.Route("/api/actions", actionHandler.Routes)
downloadClientHandler := downloadClientHandler{
encoder: encoder,
downloadClientService: s.downloadClientService,
}
r.Route("/api/download_clients", downloadClientHandler.Routes)
filterHandler := filterHandler{
encoder: encoder,
filterService: s.filterService,
}
r.Route("/api/filters", filterHandler.Routes)
ircHandler := ircHandler{
encoder: encoder,
ircService: s.ircService,
}
r.Route("/api/irc", ircHandler.Routes)
indexerHandler := indexerHandler{
encoder: encoder,
indexerService: s.indexerService,
}
r.Route("/api/indexer", indexerHandler.Routes)
configHandler := configHandler{
encoder: encoder,
}
r.Route("/api/config", configHandler.Routes)
})
//r.HandleFunc("/*", handler.ServeHTTP)
r.Get("/", index)
r.Get("/*", index)
return r
}
func index(w http.ResponseWriter, r *http.Request) {
p := web.IndexParams{
Title: "Dashboard",
Version: "thisistheversion",
BaseUrl: config.Config.BaseURL,
}
web.Index(w, p)
}

View file

@ -0,0 +1,6 @@
package indexer
import "embed"
//go:embed definitions
var Definitions embed.FS

View file

@ -0,0 +1,60 @@
---
#id: alpharatio
name: AlphaRatio
identifier: alpharatio
description: AlphaRatio (AR) is a private torrent tracker for 0DAY / GENERAL
language: en-us
urls:
- https://alpharatio.cc/
privacy: private
protocol: torrent
supports:
- irc
- rss
source: gazelle
settings:
- name: authkey
type: text
label: Auth key
tooltip: Right click DL on a torrent and get the authkey.
description: Right click DL on a torrent and get the authkey.
- name: torrent_pass
type: text
label: Torrent pass
tooltip: Right click DL on a torrent and get the torrent_pass.
description: Right click DL on a torrent and get the torrent_pass.
irc:
network: AlphaRatio
server: irc.alpharatio.cc:6697
port: 6697
channels:
- "#Announce"
announcers:
- Voyager
parse:
type: multi
lines:
-
test:
- "[New Release]-[MovieHD]-[War.For.The.Planet.Of.The.Apes.2017.INTERNAL.1080p.BluRay.CRF.x264-SAPHiRE]-[URL]-[ https://alpharatio.cc/torrents.php?id=699463 ]-[ 699434 ]-[ Uploaded 2 Mins, 59 Secs after pre. ]"
pattern: \[New Release\]-\[(.*)\]-\[(.*)\]-\[URL\]-\[ (https?://.*)id=\d+ \]-\[ (\d+) \](?:-\[ Uploaded (.*) after pre. ])?
vars:
- category
- torrentName
- baseUrl
- torrentId
- preTime
-
test:
- "[AutoDL]-[MovieHD]-[699434]-[ 1 | 10659 | 1 | 1 ]-[War.For.The.Planet.Of.The.Apes.2017.INTERNAL.1080p.BluRay.CRF.x264-SAPHiRE]"
pattern: \[AutoDL\]-\[.*\]-\[.*\]-\[ ([01]) \| (\d+) \| ([01]) \| ([01]) \]-\[.+\]
vars:
- scene
- torrentSize
- freeleech
- auto
match:
torrenturl: "{{ .baseUrl }}action=download&id={{ .torrentId }}&authkey={{ .authkey }}&torrent_pass={{ .torrent_pass }}"

View file

@ -0,0 +1,48 @@
---
#id: beyondhd
name: BeyondHD
identifier: beyondhd
description: BeyondHD (BHD) is a private torrent tracker for HD MOVIES / TV
language: en-us
urls:
- https://beyond-hd.me/
privacy: private
protocol: torrent
supports:
- irc
- rss
source: UNIT3D (F3NIX)
settings:
- name: passkey
type: text
label: Passkey
tooltip: The passkey in your BeyondHD RSS feed.
description: "Go to your profile and copy and paste your RSS link to extract the rsskey."
irc:
network: BeyondHD-IRC
server: irc.beyond-hd.me:6697
port: 6697
channels:
- "#bhd_announce"
announcers:
- Willie
- Millie
parse:
type: single
lines:
-
test:
- "New Torrent: Orange.Is.the.New.Black.S01.1080p.Blu-ray.AVC.DTS-HD.MA.5.1-Test Category: TV By: Uploader Size: 137.73 GB Link: https://beyond-hd.me/details.php?id=25918"
pattern: 'New Torrent:(.*)Category:(.*)By:(.*)Size:(.*)Link: https?\:\/\/([^\/]+\/).*[&\?]id=(\d+)'
vars:
- torrentName
- category
- uploader
- torrentSize
- baseUrl
- torrentId
match:
torrenturl: "https://{{ .baseUrl }}torrent/download/auto.{{ .torrentId }}.{{ .passkey }}"

View file

@ -0,0 +1,68 @@
---
#id: btn
name: BroadcasTheNet
identifier: btn
description: BroadcasTheNet (BTN) is a private torrent tracker focused on TV shows
language: en-us
urls:
- https://broadcasthe.net/
privacy: private
protocol: torrent
supports:
- irc
- rss
source: gazelle
settings:
- name: authkey
type: text
label: Auth key
description: Right click DL on a torrent and get the authkey.
- name: torrent_pass
type: text
label: Torrent pass
description: Right click DL on a torrent and get the torrent_pass.
irc:
network: BroadcasTheNet
server: irc.broadcasthenet.net:6697
port: 6697
channels:
- "#BTN-Announce"
announcers:
- Barney
parse:
type: multi
lines:
-
test:
- "NOW BROADCASTING! [ Lost S06E07 720p WEB-DL DD 5.1 H.264 - LP ]"
pattern: ^NOW BROADCASTING! \[(.*)\]
vars:
- torrentName
-
test:
- "[ Title: S06E07 ] [ Series: Lost ]"
pattern: '^\[ Title: (.*) \] \[ Series: (.*) \]'
vars:
- title
- name1
-
test:
- "[ 2010 ] [ Episode ] [ MKV | x264 | WEB ] [ Uploader: Uploader1 ]"
pattern: '^(?:\[ (\d+) \] )?\[ (.*) \] \[ (.*) \] \[ Uploader: (.*?) \](?: \[ Pretime: (.*) \])?'
vars:
- year
- category
- tags
- uploader
- preTime
-
test:
- "[ https://XXXXXXXXX/torrents.php?id=7338 / https://XXXXXXXXX/torrents.php?action=download&id=9116 ]"
pattern: ^\[ .* / (https?://.*id=\d+) \]
vars:
- baseUrl
match:
torrenturl: "{{ .baseUrl }}&authkey={{ .authkey }}&torrent_pass={{ .torrent_pass }}"

View file

@ -0,0 +1,48 @@
---
#id: emp
name: Empornium
identifier: emp
description: Empornium (EMP) is a private torrent tracker for XXX
language: en-us
urls:
- https://www.empornium.is
privacy: private
protocol: torrent
supports:
- irc
- rss
source: gazelle
settings:
- name: authkey
type: text
label: Auth key
description: Right click DL on a torrent and get the authkey.
- name: torrent_pass
type: text
label: Torrent pass
description: Right click DL on a torrent and get the torrent_pass.
irc:
network: DigitalIRC
server: irc.empornium.is:6697
port: 6697
channels:
- "#empornium-announce"
announcers:
- "^Wizard^"
parse:
type: single
lines:
-
pattern: '^(.*?) - Size: ([0-9]+?.*?) - Uploader: (.*?) - Tags: (.*?) - (https://.*torrents.php\?)id=(.*)$'
vars:
- torrentName
- torrentSize
- uploader
- tags
- baseUrl
- torrentId
match:
torrenturl: "{{ .baseUrl }}action=download&id={{ .torrentId }}&authkey={{ .authkey }}&torrent_pass={{ .torrent_pass }}"

View file

@ -0,0 +1,53 @@
---
#id: filelist
name: FileList
identifier: fl
description: FileList (FL) is a ROMANIAN private torrent tracker for MOVIES / TV / GENERAL
language: en-us
urls:
- https://filelist.io
privacy: private
protocol: torrent
supports:
- irc
- rss
source: custom
settings:
- name: passkey
type: text
label: Passkey
tooltip: The passkey in your profile.
description: "The passkey in your profile."
irc:
network: FileList
server: irc.filelist.io:6697
port: 6697
channels:
- "#announce"
announcers:
- Announce
parse:
type: single
lines:
-
test:
- 'New Torrent: This.Really.Old.Movie.1965.DVDRip.DD1.0.x264 -- [Filme SD] [1.91 GB] -- https://filelist.io/details.php?id=746781 -- by uploader1'
- 'New Torrent: This.New.Movie.2021.1080p.Blu-ray.AVC.DTS-HD.MA.5.1-BEATRIX -- [FreeLeech!] -- [Filme Blu-Ray] [26.78 GB] -- https://filelist.io/details.php?id=746782 -- by uploader1'
- 'New Torrent: This.New.Movie.2021.1080p.Remux.AVC.DTS-HD.MA.5.1-playBD -- [FreeLeech!] -- [Internal!] -- [Filme Blu-Ray] [17.69 GB] -- https://filelist.io/details.php?id=746789 -- by uploader1'
pattern: 'New Torrent: (.*?) (?:-- \[(FreeLeech!)] )?(?:-- \[(Internal!)] )?-- \[(.*)] \[(.*)] -- (https?:\/\/filelist.io\/).*id=(.*) -- by (.*)'
vars:
- torrentName
- freeleech
- internal
- category
- torrentSize
- baseUrl
- torrentId
- uploader
match:
torrenturl: "{{ .baseUrl }}download.php?id={{ .torrentId }}&file={{ .torrentName }}.torrent&passkey={{ .passkey }}"
encode:
- torrentName

View file

@ -0,0 +1,56 @@
---
#id: gazellegames
name: GazelleGames
identifier: ggn
description: GazelleGames (GGn) is a private torrent tracker for GAMES
language: en-us
urls:
- https://gazellegames.net/
privacy: private
protocol: torrent
supports:
- irc
- rss
source: gazelle
settings:
- name: authkey
type: text
label: Auth key
tooltip: Right click DL on a torrent and get the authkey.
description: Right click DL on a torrent and get the authkey.
- name: torrent_pass
type: text
label: Torrent pass
tooltip: Right click DL on a torrent and get the torrent_pass.
description: Right click DL on a torrent and get the torrent_pass.
irc:
network: GGn
server: irc.gazellegames.net:7000
port: 7000
channels:
- "#GGn-Announce"
announcers:
- Vertigo
parse:
type: single
lines:
-
test:
- "Uploader :-: Nintendo 3DS :-: Yo-Kai.Watch.KOR.3DS-BigBlueBox in Yo-kai Watch [2013] ::Korean, Multi-Region, Scene:: https://gazellegames.net/torrents.php?torrentid=78851 - adventure, role_playing_game, nintendo;"
- "Uploader :-: Windows :-: Warriors.Wrath.Evil.Challenge-HI2U in Warriors' Wrath [2016] ::English, Scene:: FREELEECH! :: https://gazellegames.net/torrents.php?torrentid=78902 - action, adventure, casual, indie, role.playing.game;"
pattern: '^(.+) :-: (.+) :-: (.+) \[(\d+)\] ::(.+?):: ?(.+? ::)? https?:\/\/([^\/]+\/)torrents.php\?torrentid=(\d+) ?-? ?(.*?)?;?$'
vars:
- uploader
- category
- torrentName
- year
- flags
- bonus
- baseUrl
- torrentId
- tags
match:
torrenturl: "{{ .baseUrl }}torrents.php?action=download&id={{ .torrentId }}&authkey={{ .authkey }}&torrent_pass={{ .torrent_pass }}"

View file

@ -0,0 +1,49 @@
---
#id: hdt
name: HD-Torrents
identifier: hdt
description: HD-Torrents (HD-T) is a private torrent tracker for HD MOVIES / TV
language: en-us
urls:
- https://hd-torrents.org/
- https://hdts.ru
privacy: private
protocol: torrent
supports:
- irc
- rss
source: xbtit
settings:
- name: cookie
type: text
label: Cookie
description: "FireFox -> Preferences -> Privacy -> Show Cookies and find the uid and pass cookies. Example: uid=1234; pass=asdf12347asdf13"
irc:
network: P2P-NET
server: irc.p2p-network.net:6697
port: 6697
channels:
- "#HD-Torrents.Announce"
announcers:
- HoboLarry
parse:
type: single
lines:
-
test:
- "New Torrent in category [XXX/Blu-ray] Erotische Fantasien 3D (2008) Blu-ray 1080p AVC DTS-HD MA 7 1 (14.60 GB) uploaded! Download: https://hd-torrents.org/download.php?id=806bc36530d146969d300c5352483a5e6e0639e9"
pattern: 'New Torrent in category \[([^\]]*)\] (.*) \(([^\)]*)\) uploaded! Download\: https?\:\/\/([^\/]+\/).*[&\?]id=([a-f0-9]+)'
vars:
- category
- torrentName
- torrentSize
- baseUrl
- torrentId
match:
torrenturl: "https://{{ .baseUrl }}download.php?id={{ .torrentId }}&f={{ .torrentName }}.torrent"
cookie: true
encode:
- torrentName

View file

@ -0,0 +1,53 @@
---
#id: iptorrents
name: IPTorrents
identifier: ipt
description: IPTorrents (IPT) is a private torrent tracker for 0DAY / GENERAL.
language: en-us
urls:
- https://iptorrents.com/
- https://iptorrents.me/
privacy: private
protocol: torrent
supports:
- irc
- rss
source: unknown
settings:
- name: passkey
type: text
label: Passkey
tooltip: Copy the passkey from your details page
description: "Copy the passkey from your details page."
irc:
network: IPTorrents
server: irc.iptorrents.com:6697
port: 6697
channels:
- "#ipt.announce"
- "#ipt.announce2"
announcers:
- IPT
- FunTimes
parse:
type: single
lines:
-
test:
- "[Movie/XXX] Audrey Bitoni HD Pack FREELEECH - http://www.iptorrents.com/details.php?id=789421 - 14.112 GB"
- "[Movies/XviD] The First Men In The Moon 2010 DVDRip XviD-VoMiT - http://www.iptorrents.com/details.php?id=396589 - 716.219 MB"
pattern: '^\[([^\]]*)](.*?)\s*(FREELEECH)*\s*-\s+https?\:\/\/([^\/]+).*[&\?]id=(\d+)\s*-(.*)'
vars:
- category
- torrentName
- freeleech
- baseUrl
- torrentId
- torrentSize
match:
torrenturl: "{{ .baseUrl }}download.php?id={{ .torrentId }}&file={{ .torrentName }}.torrent&passkey={{ .passkey }}"
encode:
- torrentName

View file

@ -0,0 +1,55 @@
---
#id: nebulance
name: Nebulance
identifier: nbl
description: Nebulance (NBL) is a ratioless private torrent tracker for TV
language: en-us
urls:
- https://nebulance.io/
privacy: private
protocol: torrent
supports:
- irc
- rss
source: gazelle
settings:
- name: authkey
type: text
label: Auth key
tooltip: Right click DL on a torrent and get the authkey.
description: Right click DL on a torrent and get the authkey.
- name: torrent_pass
type: text
label: Torrent pass
tooltip: Right click DL on a torrent and get the torrent_pass.
description: Right click DL on a torrent and get the torrent_pass.
irc:
network: Nebulance
server: irc.nebulance.cc:6697
port: 6697
channels:
- "#nbl-announce"
announcers:
- DRADIS
parse:
type: single
lines:
-
test:
- "[Episodes] The Vet Life - S02E08 [WebRip / x264 / MKV / 720p / HD / VLAD / The.Vet.Life.S02E08.Tuskegee.Reunion.720p.ANPL.WEBRip.AAC2.0.x264-VLAD.mkv] [702.00 MB - Uploader: UPLOADER] - http://nebulance.io/torrents.php?id=147 [Tags: comedy,subtitles,cbs]"
- "[Seasons] Police Interceptors - S10 [HDTV / x264 / MKV / MP4 / 480p / SD / BTN / Police.Interceptors.S10.HDTV.x264-BTN] [5.27 GB - Uploader: UPLOADER] - http://nebulance.io/torrents.php?id=1472 [Tags: comedy,subtitles,cbs]"
pattern: '\[(.*?)\] (.*?) \[(.*?)\] \[(.*?) - Uploader: (.*?)\] - (https?://.*)id=(\d+) \[Tags: (.*)\]'
vars:
- category
- torrentName
- releaseTags
- torrentSize
- uploader
- baseUrl
- torrentId
- tags
match:
torrenturl: "{{ .baseUrl }}action=download&id={{ .torrentId }}&authkey={{ .authkey }}&torrent_pass={{ .torrent_pass }}"

View file

@ -0,0 +1,50 @@
---
#id: orpheus
name: Orpheus
identifier: ops
description: Orpheus (OPS) is a Private Torrent Tracker for MUSIC
language: en-us
urls:
- https://orpheus.network/
privacy: private
protocol: torrent
supports:
- irc
- rss
source: gazelle
settings:
- name: authkey
type: text
label: Auth key
tooltip: Right click DL on a torrent and get the authkey.
description: Right click DL on a torrent and get the authkey.
- name: torrent_pass
type: text
label: Torrent pass
tooltip: Right click DL on a torrent and get the torrent_pass.
description: Right click DL on a torrent and get the torrent_pass.
irc:
network: Orpheus
server: irc.orpheus.network:7000
port: 7000
channels:
- "#announce"
announcers:
- hermes
parse:
type: single
lines:
-
test:
- "TORRENT: Todd Edwards - You Came To Me [2002] [Single] - FLAC / Lossless / WEB - 2000s,house,uk.garage,garage.house - https://orpheus.network/torrents.php?id=756102 / https://orpheus.network/torrents.php?action=download&id=1647868"
- "TORRENT: THE BOOK [2021] [Album] - FLAC / Lossless / CD - - https://orpheus.network/torrents.php?id=693523 / https://orpheus.network/torrents.php?action=download&id=1647867"
pattern: 'TORRENT: (.*) - (.*) - https?://.* / (https?://.*id=\d+)'
vars:
- torrentName
- tags
- torrentId
match:
torrenturl: "{{ .baseUrl }}&authkey={{ .authkey }}&torrent_pass={{ .torrent_pass }}"

View file

@ -0,0 +1,51 @@
---
#id: ptp
name: PassThePopcorn
identifier: ptp
description: PassThePopcorn (PTP) is a private torrent tracker for MOVIES
language: en-us
urls:
- https://passthepopcorn.me
privacy: private
protocol: torrent
supports:
- irc
- rss
source: gazelle
settings:
- name: authkey
type: text
label: Auth key
tooltip: Right click DL on a torrent and get the authkey.
description: Right click DL on a torrent and get the authkey.
- name: torrent_pass
type: text
label: Torrent pass
tooltip: Right click DL on a torrent and get the torrent_pass.
description: Right click DL on a torrent and get the torrent_pass.
irc:
network: PassThePopcorn
server: irc.passthepopcorn.me:7000
port: 7000
channels:
- "#ptp-announce"
announcers:
- Hummingbird
parse:
type: single
lines:
-
test:
- "Irene Huss - Nattrond AKA The Night Round [2008] by Anders Engström - XviD / DVD / AVI / 640x352 - http://passthepopcorn.me/torrents.php?id=51627 / http://passthepopcorn.me/torrents.php?action=download&id=97333 - crime, drama, mystery"
- "Dirty Rotten Scoundrels [1988] by Frank Oz - x264 / Blu-ray / MKV / 720p - http://passthepopcorn.me/torrents.php?id=10735 / http://passthepopcorn.me/torrents.php?action=download&id=97367 - comedy, crime"
pattern: '^(.*)-\s*https?:.*[&\?]id=.*https?\:\/\/([^\/]+\/).*[&\?]id=(\d+)\s*-\s*(.*)'
vars:
- torrentName
- baseUrl
- torrentId
- tags
match:
torrenturl: "https://{{ .baseUrl }}torrents.php?action=download&id={{ .torrentId }}&authkey={{ .authkey }}&torrent_pass={{ .torrent_pass }}"

View file

@ -0,0 +1,51 @@
---
#id: red
name: Redacted
identifier: redacted
description: Redacted (RED) is a private torrent tracker for MUSIC
language: en-us
urls:
- https://redacted.ch/
privacy: private
protocol: torrent
supports:
- irc
- rss
source: gazelle
settings:
- name: authkey
type: text
label: Auth key
tooltip: Right click DL on a torrent and get the authkey.
description: Right click DL on a torrent and get the authkey.
- name: torrent_pass
type: text
label: Torrent pass
tooltip: Right click DL on a torrent and get the torrent_pass.
description: Right click DL on a torrent and get the torrent_pass.
irc:
network: Scratch-Network
server: irc.scratch-network.net:6697
port: 6697
channels:
- "#red-announce"
announcers:
- Drone
parse:
type: single
lines:
-
test:
- "JR Get Money - Nobody But You [2008] [Single] - FLAC / Lossless / Log / 100% / Cue / CD - https://redacted.ch/torrents.php?id=1592366 / https://redacted.ch/torrents.php?action=download&id=3372962 - hip.hop,rhythm.and.blues,2000s"
- "Johann Sebastian Bach performed by Festival Strings Lucerne under Rudolf Baumgartner - Brandenburg Concertos 5 and 6, Suite No 2 [1991] [Album] - FLAC / Lossless / Log / 100% / Cue / CD - https://redacted.ch/torrents.php?id=1592367 / https://redacted.ch/torrents.php?action=download&id=3372963 - classical"
pattern: '^(.*)\s+-\s+https?:.*[&\?]id=.*https?\:\/\/([^\/]+\/).*[&\?]id=(\d+)\s*-\s*(.*)'
vars:
- torrentName
- baseUrl
- torrentId
- tags
match:
torrenturl: "https://{{ .baseUrl }}torrents.php?action=download&id={{ .torrentId }}&authkey={{ .authkey }}&torrent_pass={{ .torrent_pass }}"

View file

@ -0,0 +1,49 @@
---
#id: superbits
name: SuperBits
identifier: superbits
description: Superbits is a SWEDISH private torrent tracker for MOVIES / TV / 0DAY / GENERAL
language: en-us
urls:
- https://superbits.org/
privacy: private
protocol: torrent
supports:
- irc
- rss
source: rartracker
settings:
- name: passkey
type: text
label: Passkey
tooltip: Copy the passkey from the /rss page
description: "Copy the passkey from the /rss page."
irc:
network: SuperBits
server: irc.superbits.org:6697
port: 6697
channels:
- "#autodl"
announcers:
- SuperBits
parse:
type: single
lines:
-
test:
- "-[archive Film 1080]2[Asterix.Et.La.Surprise.De.Cesar.1985.FRENCH.1080p.BluRay.x264-TSuNaMi]3[844551]4[Size: 4.41 GB]5[FL: no]6[Scene: yes]"
- "-[new TV]2[Party.Down.South.S05E05.720p.WEB.h264-DiRT]3[844557]4[Size: 964.04 MB]5[FL: no]6[Scene: yes]7[Pred 1m 30s ago]"
pattern: '\-\[(.*)\]2\[(.*)\]3\[(\d+)\]4\[Size\:\s(.*)\]5\[FL\:\s(no|yes)\]6\[Scene\:\s(no|yes)\](?:7\[Pred\s(.*)\sago\])?'
vars:
- category
- torrentName
- torrentId
- torrentSize
- freeleech
- scene
- preTime
match:
torrenturl: "https://superbits.org/download.php?id={{ .torrentId }}&passkey={{ .passkey }}"

View file

@ -0,0 +1,53 @@
---
#id: tracker01
name: TorrentLeech
identifier: torrentleech
description: TorrentLeech (TL) is a private torrent tracker for 0DAY / GENERAL.
language: en-us
urls:
- https://www.torrentleech.org
privacy: private
protocol: torrent
supports:
- irc
- rss
source: custom
settings:
- name: rsskey
type: text
label: RSS key
tooltip: The rsskey in your TorrentLeech RSS feed link.
description: "Go to your profile and copy and paste your RSS link to extract the rsskey."
regex: /([\da-fA-F]{20})
irc:
network: TorrentLeech.org
server: irc.torrentleech.org:7021
port: 7021
channels:
- "#tlannounces"
announcers:
- _AnnounceBot_
parse:
type: single
lines:
-
test:
- "New Torrent Announcement: <PC :: Iso> Name:'debian live 10 6 0 amd64 standard iso' uploaded by 'Anonymous' - http://www.tracker01.test/torrent/263302"
- "New Torrent Announcement: <PC :: Iso> Name:'debian live 10 6 0 amd64 standard iso' uploaded by 'Anonymous' freeleech - http://www.tracker01.test/torrent/263302"
pattern: New Torrent Announcement:\s*<([^>]*)>\s*Name:'(.*)' uploaded by '([^']*)'\s*(freeleech)*\s*-\s*https?\:\/\/([^\/]+\/)torrent\/(\d+)
vars:
- category
- torrentName
- uploader
- freeleech
- baseUrl
- torrentId
match:
torrenturl: "https://{{ .baseUrl }}rss/download/{{ .torrentId }}/{{ .rsskey }}/{{ .torrentName }}.torrent"
encode:
- torrentName

View file

@ -0,0 +1,52 @@
---
#id: uhd
name: UHDBits
identifier: uhdbits
description: UHDBits (UHD) is a private torrent tracker for HD MOVIES / TV
language: en-us
urls:
- https://uhdbits.org/
privacy: private
protocol: torrent
supports:
- irc
- rss
source: gazelle
settings:
- name: authkey
type: text
label: Auth key
tooltip: Right click DL on a torrent and get the authkey.
description: Right click DL on a torrent and get the authkey.
- name: torrent_pass
type: text
label: Torrent pass
tooltip: Right click DL on a torrent and get the torrent_pass.
description: Right click DL on a torrent and get the torrent_pass.
irc:
network: P2P-Network
server: irc.p2p-network.net:6697
port: 6697
channels:
- "#UHD.Announce"
announcers:
- UHDBot
- cr0nusbot
parse:
type: single
lines:
-
test:
- "New Torrent: D'Ardennen [2015] - TayTO Type: Movie / 1080p / Encode / Freeleech: 100 Size: 7.00GB - https://uhdbits.org/torrents.php?id=13882 / https://uhdbits.org/torrents.php?action=download&id=20488"
pattern: 'New Torrent: (.*) Type: (.*?) Freeleech: (.*) Size: (.*) - https?:\/\/.* \/ (https?:\/\/.*id=\d+)'
vars:
- torrentName
- releaseTags
- freeleechPercent
- torrentSize
- baseUrl
match:
torrenturl: "{{ .baseUrl }}&authkey={{ .authkey }}&torrent_pass={{ .torrent_pass }}"

252
internal/indexer/service.go Normal file
View file

@ -0,0 +1,252 @@
package indexer
import (
"fmt"
"io/fs"
"strings"
"gopkg.in/yaml.v2"
"github.com/rs/zerolog/log"
"github.com/autobrr/autobrr/internal/domain"
)
type Service interface {
Store(indexer domain.Indexer) (*domain.Indexer, error)
Update(indexer domain.Indexer) (*domain.Indexer, error)
Delete(id int) error
FindByFilterID(id int) ([]domain.Indexer, error)
List() ([]domain.Indexer, error)
GetAll() ([]*domain.IndexerDefinition, error)
GetTemplates() ([]domain.IndexerDefinition, error)
LoadIndexerDefinitions() error
GetIndexerByAnnounce(name string) *domain.IndexerDefinition
Start() error
}
type service struct {
repo domain.IndexerRepo
indexerDefinitions map[string]domain.IndexerDefinition
indexerInstances map[string]domain.IndexerDefinition
mapIndexerIRCToName map[string]string
}
func NewService(repo domain.IndexerRepo) Service {
return &service{
repo: repo,
indexerDefinitions: make(map[string]domain.IndexerDefinition),
indexerInstances: make(map[string]domain.IndexerDefinition),
mapIndexerIRCToName: make(map[string]string),
}
}
func (s *service) Store(indexer domain.Indexer) (*domain.Indexer, error) {
i, err := s.repo.Store(indexer)
if err != nil {
return nil, err
}
return i, nil
}
func (s *service) Update(indexer domain.Indexer) (*domain.Indexer, error) {
i, err := s.repo.Update(indexer)
if err != nil {
return nil, err
}
return i, nil
}
func (s *service) Delete(id int) error {
if err := s.repo.Delete(id); err != nil {
return err
}
return nil
}
func (s *service) FindByFilterID(id int) ([]domain.Indexer, error) {
filters, err := s.repo.FindByFilterID(id)
if err != nil {
return nil, err
}
return filters, nil
}
func (s *service) List() ([]domain.Indexer, error) {
i, err := s.repo.List()
if err != nil {
return nil, err
}
return i, nil
}
func (s *service) GetAll() ([]*domain.IndexerDefinition, error) {
indexers, err := s.repo.List()
if err != nil {
return nil, err
}
var res = make([]*domain.IndexerDefinition, 0)
for _, indexer := range indexers {
in := s.getDefinitionByName(indexer.Identifier)
if in == nil {
// if no indexerDefinition found, continue
continue
}
temp := domain.IndexerDefinition{
ID: indexer.ID,
Name: in.Name,
Identifier: in.Identifier,
Enabled: indexer.Enabled,
Description: in.Description,
Language: in.Language,
Privacy: in.Privacy,
Protocol: in.Protocol,
URLS: in.URLS,
Settings: nil,
SettingsMap: make(map[string]string),
IRC: in.IRC,
Parse: in.Parse,
}
// map settings
// add value to settings objects
for _, setting := range in.Settings {
if v, ok := indexer.Settings[setting.Name]; ok {
setting.Value = v
temp.SettingsMap[setting.Name] = v
}
temp.Settings = append(temp.Settings, setting)
}
res = append(res, &temp)
}
return res, nil
}
func (s *service) GetTemplates() ([]domain.IndexerDefinition, error) {
definitions := s.indexerDefinitions
var ret []domain.IndexerDefinition
for _, definition := range definitions {
ret = append(ret, definition)
}
return ret, nil
}
func (s *service) Start() error {
err := s.LoadIndexerDefinitions()
if err != nil {
return err
}
indexers, err := s.GetAll()
if err != nil {
return err
}
for _, indexer := range indexers {
if !indexer.Enabled {
continue
}
s.indexerInstances[indexer.Identifier] = *indexer
// map irc stuff to indexer.name
if indexer.IRC != nil {
server := indexer.IRC.Server
for _, channel := range indexer.IRC.Channels {
for _, announcer := range indexer.IRC.Announcers {
val := fmt.Sprintf("%v:%v:%v", server, channel, announcer)
s.mapIndexerIRCToName[val] = indexer.Identifier
}
}
}
}
return nil
}
// LoadIndexerDefinitions load definitions from golang embed fs
func (s *service) LoadIndexerDefinitions() error {
entries, err := fs.ReadDir(Definitions, "definitions")
if err != nil {
log.Fatal().Msgf("failed reading directory: %s", err)
}
if len(entries) == 0 {
log.Fatal().Msgf("failed reading directory: %s", err)
return err
}
for _, f := range entries {
filePath := "definitions/" + f.Name()
if strings.Contains(f.Name(), ".yaml") {
log.Debug().Msgf("parsing: %v", filePath)
var d domain.IndexerDefinition
data, err := fs.ReadFile(Definitions, filePath)
if err != nil {
log.Debug().Err(err).Msgf("failed reading file: %v", filePath)
return err
}
err = yaml.Unmarshal(data, &d)
if err != nil {
log.Error().Err(err).Msgf("failed unmarshal file: %v", filePath)
return err
}
s.indexerDefinitions[d.Identifier] = d
}
}
return nil
}
func (s *service) GetIndexerByAnnounce(name string) *domain.IndexerDefinition {
if identifier, idOk := s.mapIndexerIRCToName[name]; idOk {
if indexer, ok := s.indexerInstances[identifier]; ok {
return &indexer
}
}
return nil
}
func (s *service) getDefinitionByName(name string) *domain.IndexerDefinition {
if v, ok := s.indexerDefinitions[name]; ok {
return &v
}
return nil
}
func (s *service) getDefinitionForAnnounce(name string) *domain.IndexerDefinition {
// map[network:channel:announcer] = indexer01
if v, ok := s.indexerDefinitions[name]; ok {
return &v
}
return nil
}

260
internal/irc/handler.go Normal file
View file

@ -0,0 +1,260 @@
package irc
import (
"context"
"crypto/tls"
"errors"
"fmt"
"net"
"regexp"
"strings"
"time"
"github.com/autobrr/autobrr/internal/announce"
"github.com/autobrr/autobrr/internal/domain"
"github.com/rs/zerolog/log"
"gopkg.in/irc.v3"
)
var (
connectTimeout = 15 * time.Second
)
type Handler struct {
network *domain.IrcNetwork
announceService announce.Service
conn net.Conn
ctx context.Context
stopped chan struct{}
cancel context.CancelFunc
}
func NewHandler(network domain.IrcNetwork, announceService announce.Service) *Handler {
return &Handler{
conn: nil,
ctx: nil,
stopped: make(chan struct{}),
network: &network,
announceService: announceService,
}
}
func (s *Handler) Run() error {
//log.Debug().Msgf("server %+v", s.network)
if s.network.Addr == "" {
return errors.New("addr not set")
}
ctx, cancel := context.WithCancel(context.Background())
s.ctx = ctx
s.cancel = cancel
dialer := net.Dialer{
Timeout: connectTimeout,
}
var netConn net.Conn
var err error
addr := s.network.Addr
// decide to use SSL or not
if s.network.TLS {
tlsConf := &tls.Config{
InsecureSkipVerify: true,
}
netConn, err = dialer.DialContext(s.ctx, "tcp", addr)
if err != nil {
log.Error().Err(err).Msgf("failed to dial %v", addr)
return fmt.Errorf("failed to dial %q: %v", addr, err)
}
netConn = tls.Client(netConn, tlsConf)
s.conn = netConn
} else {
netConn, err = dialer.DialContext(s.ctx, "tcp", addr)
if err != nil {
log.Error().Err(err).Msgf("failed to dial %v", addr)
return fmt.Errorf("failed to dial %q: %v", addr, err)
}
s.conn = netConn
}
log.Info().Msgf("Connected to: %v", addr)
config := irc.ClientConfig{
Nick: s.network.Nick,
User: s.network.Nick,
Name: s.network.Nick,
Pass: s.network.Pass,
Handler: irc.HandlerFunc(func(c *irc.Client, m *irc.Message) {
switch m.Command {
case "001":
// 001 is a welcome event, so we join channels there
err := s.onConnect(c, s.network.Channels)
if err != nil {
log.Error().Msgf("error joining channels %v", err)
}
case "366":
// TODO: handle joined
log.Debug().Msgf("JOINED: %v", m)
case "433":
// TODO: handle nick in use
log.Debug().Msgf("NICK IN USE: %v", m)
case "448", "475", "477":
// TODO: handle join failed
log.Debug().Msgf("JOIN FAILED: %v", m)
case "KICK":
log.Debug().Msgf("KICK: %v", m)
case "MODE":
// TODO: handle mode change
log.Debug().Msgf("MODE CHANGE: %v", m)
case "INVITE":
// TODO: handle invite
log.Debug().Msgf("INVITE: %v", m)
case "PART":
// TODO: handle parted
log.Debug().Msgf("PART: %v", m)
case "PRIVMSG":
err := s.onMessage(m)
if err != nil {
log.Error().Msgf("error on message %v", err)
}
}
}),
}
// Create the client
client := irc.NewClient(s.conn, config)
// Connect
err = client.RunContext(ctx)
if err != nil {
log.Error().Err(err).Msgf("could not connect to %v", addr)
return err
}
return nil
}
func (s *Handler) GetNetwork() *domain.IrcNetwork {
return s.network
}
func (s *Handler) Stop() {
s.cancel()
//if !s.isStopped() {
// close(s.stopped)
//}
if s.conn != nil {
s.conn.Close()
}
}
func (s *Handler) isStopped() bool {
select {
case <-s.stopped:
return true
default:
return false
}
}
func (s *Handler) onConnect(client *irc.Client, channels []domain.IrcChannel) error {
// TODO check commands like nickserv before joining
for _, command := range s.network.ConnectCommands {
cmd := strings.TrimLeft(command, "/")
log.Info().Msgf("send connect command: %v to network: %s", cmd, s.network.Name)
err := client.Write(cmd)
if err != nil {
log.Error().Err(err).Msgf("error sending connect command %v to network: %v", command, s.network.Name)
continue
//return err
}
time.Sleep(1 * time.Second)
}
for _, ch := range channels {
myChan := fmt.Sprintf("JOIN %s", ch.Name)
// handle channel password
if ch.Password != "" {
myChan = fmt.Sprintf("JOIN %s %s", ch.Name, ch.Password)
}
err := client.Write(myChan)
if err != nil {
log.Error().Err(err).Msgf("error joining channel: %v", ch.Name)
continue
//return err
}
log.Info().Msgf("Monitoring channel %s", ch.Name)
time.Sleep(1 * time.Second)
}
return nil
}
func (s *Handler) OnJoin(msg string) (interface{}, error) {
return nil, nil
}
func (s *Handler) onMessage(msg *irc.Message) error {
log.Debug().Msgf("msg: %v", msg)
// parse announce
channel := &msg.Params[0]
announcer := &msg.Name
message := msg.Trailing()
// TODO add network
// add correlationID and tracing
announceID := fmt.Sprintf("%v:%v:%v", s.network.Addr, *channel, *announcer)
// clean message
cleanedMsg := cleanMessage(message)
go func() {
err := s.announceService.Parse(announceID, cleanedMsg)
if err != nil {
log.Error().Err(err).Msgf("could not parse line: %v", cleanedMsg)
}
}()
return nil
}
// irc line can contain lots of extra stuff like color so lets clean that
func cleanMessage(message string) string {
var regexMessageClean = `\x0f|\x1f|\x02|\x03(?:[\d]{1,2}(?:,[\d]{1,2})?)?`
rxp, err := regexp.Compile(regexMessageClean)
if err != nil {
log.Error().Err(err).Msgf("error compiling regex: %v", regexMessageClean)
return ""
}
return rxp.ReplaceAllString(message, "")
}

221
internal/irc/service.go Normal file
View file

@ -0,0 +1,221 @@
package irc
import (
"context"
"fmt"
"sync"
"github.com/autobrr/autobrr/internal/announce"
"github.com/autobrr/autobrr/internal/domain"
"github.com/rs/zerolog/log"
)
type Service interface {
StartHandlers()
StopNetwork(name string) error
ListNetworks(ctx context.Context) ([]domain.IrcNetwork, error)
GetNetworkByID(id int64) (*domain.IrcNetwork, error)
DeleteNetwork(ctx context.Context, id int64) error
StoreNetwork(network *domain.IrcNetwork) error
StoreChannel(networkID int64, channel *domain.IrcChannel) error
}
type service struct {
repo domain.IrcRepo
announceService announce.Service
indexerMap map[string]string
handlers map[string]*Handler
stopWG sync.WaitGroup
lock sync.Mutex
}
func NewService(repo domain.IrcRepo, announceService announce.Service) Service {
return &service{
repo: repo,
announceService: announceService,
handlers: make(map[string]*Handler),
}
}
func (s *service) StartHandlers() {
networks, err := s.repo.ListNetworks(context.Background())
if err != nil {
log.Error().Msgf("failed to list networks: %v", err)
}
for _, network := range networks {
if !network.Enabled {
continue
}
// check if already in handlers
//v, ok := s.handlers[network.Name]
s.lock.Lock()
channels, err := s.repo.ListChannels(network.ID)
if err != nil {
log.Error().Err(err).Msgf("failed to list channels for network %q", network.Addr)
}
network.Channels = channels
handler := NewHandler(network, s.announceService)
s.handlers[network.Name] = handler
s.lock.Unlock()
log.Debug().Msgf("starting network: %+v", network.Name)
s.stopWG.Add(1)
go func() {
if err := handler.Run(); err != nil {
log.Error().Err(err).Msgf("failed to start handler for network %q", network.Name)
}
}()
s.stopWG.Done()
}
}
func (s *service) startNetwork(network domain.IrcNetwork) error {
// look if we have the network in handlers already, if so start it
if handler, found := s.handlers[network.Name]; found {
log.Debug().Msgf("starting network: %+v", network.Name)
if handler.conn != nil {
go func() {
if err := handler.Run(); err != nil {
log.Error().Err(err).Msgf("failed to start handler for network %q", handler.network.Name)
}
}()
}
} else {
// if not found in handlers, lets add it and run it
handler := NewHandler(network, s.announceService)
s.lock.Lock()
s.handlers[network.Name] = handler
s.lock.Unlock()
log.Debug().Msgf("starting network: %+v", network.Name)
s.stopWG.Add(1)
go func() {
if err := handler.Run(); err != nil {
log.Error().Err(err).Msgf("failed to start handler for network %q", network.Name)
}
}()
s.stopWG.Done()
}
return nil
}
func (s *service) StopNetwork(name string) error {
if handler, found := s.handlers[name]; found {
handler.Stop()
log.Debug().Msgf("stopped network: %+v", name)
}
return nil
}
func (s *service) GetNetworkByID(id int64) (*domain.IrcNetwork, error) {
network, err := s.repo.GetNetworkByID(id)
if err != nil {
log.Error().Err(err).Msgf("failed to get network: %v", id)
return nil, err
}
channels, err := s.repo.ListChannels(network.ID)
if err != nil {
log.Error().Err(err).Msgf("failed to list channels for network %q", network.Addr)
return nil, err
}
network.Channels = append(network.Channels, channels...)
return network, nil
}
func (s *service) ListNetworks(ctx context.Context) ([]domain.IrcNetwork, error) {
networks, err := s.repo.ListNetworks(ctx)
if err != nil {
log.Error().Err(err).Msgf("failed to list networks: %v", err)
return nil, err
}
var ret []domain.IrcNetwork
for _, n := range networks {
channels, err := s.repo.ListChannels(n.ID)
if err != nil {
log.Error().Msgf("failed to list channels for network %q: %v", n.Addr, err)
return nil, err
}
n.Channels = append(n.Channels, channels...)
ret = append(ret, n)
}
return ret, nil
}
func (s *service) DeleteNetwork(ctx context.Context, id int64) error {
if err := s.repo.DeleteNetwork(ctx, id); err != nil {
return err
}
log.Debug().Msgf("delete network: %+v", id)
return nil
}
func (s *service) StoreNetwork(network *domain.IrcNetwork) error {
if err := s.repo.StoreNetwork(network); err != nil {
return err
}
log.Debug().Msgf("store network: %+v", network)
if network.Channels != nil {
for _, channel := range network.Channels {
if err := s.repo.StoreChannel(network.ID, &channel); err != nil {
return err
}
}
}
// stop or start network
if !network.Enabled {
log.Debug().Msgf("stopping network: %+v", network.Name)
err := s.StopNetwork(network.Name)
if err != nil {
log.Error().Err(err).Msgf("could not stop network: %+v", network.Name)
return fmt.Errorf("could not stop network: %v", network.Name)
}
} else {
log.Debug().Msgf("starting network: %+v", network.Name)
err := s.startNetwork(*network)
if err != nil {
log.Error().Err(err).Msgf("could not start network: %+v", network.Name)
return fmt.Errorf("could not start network: %v", network.Name)
}
}
return nil
}
func (s *service) StoreChannel(networkID int64, channel *domain.IrcChannel) error {
if err := s.repo.StoreChannel(networkID, channel); err != nil {
return err
}
return nil
}

52
internal/logger/logger.go Normal file
View file

@ -0,0 +1,52 @@
package logger
import (
"io"
"os"
"time"
"github.com/autobrr/autobrr/internal/config"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
"gopkg.in/natefinch/lumberjack.v2"
)
func Setup(cfg config.Cfg) {
zerolog.TimeFieldFormat = time.RFC3339
switch cfg.LogLevel {
case "INFO":
zerolog.SetGlobalLevel(zerolog.InfoLevel)
case "DEBUG":
zerolog.SetGlobalLevel(zerolog.DebugLevel)
case "ERROR":
zerolog.SetGlobalLevel(zerolog.ErrorLevel)
case "WARN":
zerolog.SetGlobalLevel(zerolog.WarnLevel)
default:
zerolog.SetGlobalLevel(zerolog.ErrorLevel)
}
// setup console writer
consoleWriter := zerolog.ConsoleWriter{Out: os.Stderr, TimeFormat: time.RFC3339}
writers := io.MultiWriter(consoleWriter)
// if logPath set create file writer
if cfg.LogPath != "" {
fileWriter := &lumberjack.Logger{
Filename: cfg.LogPath,
MaxSize: 100, // megabytes
MaxBackups: 3,
}
// overwrite writers
writers = io.MultiWriter(consoleWriter, fileWriter)
}
log.Logger = log.Output(writers)
log.Print("Starting autobrr")
log.Printf("Log-level: %v", cfg.LogLevel)
}

View file

@ -0,0 +1,79 @@
package release
import (
"fmt"
"github.com/anacrolix/torrent/metainfo"
"github.com/rs/zerolog/log"
"github.com/autobrr/autobrr/internal/action"
"github.com/autobrr/autobrr/internal/client"
"github.com/autobrr/autobrr/internal/domain"
)
type Service interface {
Process(announce domain.Announce) error
}
type service struct {
actionSvc action.Service
}
func NewService(actionService action.Service) Service {
return &service{actionSvc: actionService}
}
func (s *service) Process(announce domain.Announce) error {
log.Debug().Msgf("start to process release: %+v", announce)
if announce.Filter.Actions == nil {
return fmt.Errorf("no actions for filter: %v", announce.Filter.Name)
}
// check can download
// smart episode?
// check against rules like active downloading torrents
// create http client
c := client.NewHttpClient()
// download torrent file
// TODO check extra headers, cookie
res, err := c.DownloadFile(announce.TorrentUrl, nil)
if err != nil {
log.Error().Err(err).Msgf("could not download file: %v", announce.TorrentName)
return err
}
if res.FileName == "" {
return err
}
//log.Debug().Msgf("downloaded torrent file: %v", res.FileName)
// onTorrentDownloaded
// match more filters like torrent size
// Get meta info from file to find out the hash for later use
meta, err := metainfo.LoadFromFile(res.FileName)
if err != nil {
log.Error().Err(err).Msgf("metainfo could not open file: %v", res.FileName)
return err
}
// torrent info hash used for re-announce
hash := meta.HashInfoBytes().String()
// take action (watchFolder, test, runProgram, qBittorrent, Deluge etc)
// actionService
err = s.actionSvc.RunActions(res.FileName, hash, *announce.Filter)
if err != nil {
log.Error().Err(err).Msgf("error running actions for filter: %v", announce.Filter.Name)
return err
}
// safe to delete tmp file
return nil
}

43
internal/server/server.go Normal file
View file

@ -0,0 +1,43 @@
package server
import (
"sync"
"github.com/rs/zerolog/log"
"github.com/autobrr/autobrr/internal/indexer"
"github.com/autobrr/autobrr/internal/irc"
)
type Server struct {
Hostname string
Port int
indexerService indexer.Service
ircService irc.Service
stopWG sync.WaitGroup
lock sync.Mutex
}
func NewServer(ircSvc irc.Service, indexerSvc indexer.Service) *Server {
return &Server{
indexerService: indexerSvc,
ircService: ircSvc,
}
}
func (s *Server) Start() error {
log.Info().Msgf("Starting server. Listening on %v:%v", s.Hostname, s.Port)
// instantiate indexers
err := s.indexerService.Start()
if err != nil {
return err
}
// instantiate and start irc networks
s.ircService.StartHandlers()
return nil
}

12
internal/utils/strings.go Normal file
View file

@ -0,0 +1,12 @@
package utils
// StrSliceContains check if slice contains string
func StrSliceContains(s []string, str string) bool {
for _, v := range s {
if v == str {
return true
}
}
return false
}

176
pkg/qbittorrent/client.go Normal file
View file

@ -0,0 +1,176 @@
package qbittorrent
import (
"bytes"
"fmt"
"io"
"mime/multipart"
"net/http"
"net/http/cookiejar"
"net/url"
"os"
"strings"
"time"
"github.com/rs/zerolog/log"
"golang.org/x/net/publicsuffix"
)
type Client struct {
settings Settings
http *http.Client
}
type Settings struct {
Hostname string
Port uint
Username string
Password string
SSL bool
protocol string
}
func NewClient(s Settings) *Client {
jarOptions := &cookiejar.Options{PublicSuffixList: publicsuffix.List}
//store cookies in jar
jar, err := cookiejar.New(jarOptions)
if err != nil {
log.Error().Err(err).Msg("new client cookie error")
}
httpClient := &http.Client{
Timeout: time.Second * 10,
Jar: jar,
}
c := &Client{
settings: s,
http: httpClient,
}
c.settings.protocol = "http"
if c.settings.SSL {
c.settings.protocol = "https"
}
return c
}
func (c *Client) get(endpoint string, opts map[string]string) (*http.Response, error) {
reqUrl := fmt.Sprintf("%v://%v:%v/api/v2/%v", c.settings.protocol, c.settings.Hostname, c.settings.Port, endpoint)
req, err := http.NewRequest("GET", reqUrl, nil)
if err != nil {
log.Error().Err(err).Msgf("GET: error %v", reqUrl)
return nil, err
}
resp, err := c.http.Do(req)
if err != nil {
log.Error().Err(err).Msgf("GET: do %v", reqUrl)
return nil, err
}
return resp, nil
}
func (c *Client) post(endpoint string, opts map[string]string) (*http.Response, error) {
// add optional parameters that the user wants
form := url.Values{}
if opts != nil {
for k, v := range opts {
form.Add(k, v)
}
}
reqUrl := fmt.Sprintf("%v://%v:%v/api/v2/%v", c.settings.protocol, c.settings.Hostname, c.settings.Port, endpoint)
req, err := http.NewRequest("POST", reqUrl, strings.NewReader(form.Encode()))
if err != nil {
log.Error().Err(err).Msgf("POST: req %v", reqUrl)
return nil, err
}
// add the content-type so qbittorrent knows what to expect
req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
resp, err := c.http.Do(req)
if err != nil {
log.Error().Err(err).Msgf("POST: do %v", reqUrl)
return nil, err
}
return resp, nil
}
func (c *Client) postFile(endpoint string, fileName string, opts map[string]string) (*http.Response, error) {
file, err := os.Open(fileName)
if err != nil {
log.Error().Err(err).Msgf("POST file: opening file %v", fileName)
return nil, err
}
// Close the file later
defer file.Close()
// Buffer to store our request body as bytes
var requestBody bytes.Buffer
// Store a multipart writer
multiPartWriter := multipart.NewWriter(&requestBody)
// Initialize file field
fileWriter, err := multiPartWriter.CreateFormFile("torrents", fileName)
if err != nil {
log.Error().Err(err).Msgf("POST file: initializing file field %v", fileName)
return nil, err
}
// Copy the actual file content to the fields writer
_, err = io.Copy(fileWriter, file)
if err != nil {
log.Error().Err(err).Msgf("POST file: could not copy file to writer %v", fileName)
return nil, err
}
// Populate other fields
if opts != nil {
for key, val := range opts {
fieldWriter, err := multiPartWriter.CreateFormField(key)
if err != nil {
log.Error().Err(err).Msgf("POST file: could not add other fields %v", fileName)
return nil, err
}
_, err = fieldWriter.Write([]byte(val))
if err != nil {
log.Error().Err(err).Msgf("POST file: could not write field %v", fileName)
return nil, err
}
}
}
// Close multipart writer
multiPartWriter.Close()
reqUrl := fmt.Sprintf("%v://%v:%v/api/v2/%v", c.settings.protocol, c.settings.Hostname, c.settings.Port, endpoint)
req, err := http.NewRequest("POST", reqUrl, &requestBody)
if err != nil {
log.Error().Err(err).Msgf("POST file: could not create request object %v", fileName)
return nil, err
}
// Set correct content type
req.Header.Set("Content-Type", multiPartWriter.FormDataContentType())
res, err := c.http.Do(req)
if err != nil {
log.Error().Err(err).Msgf("POST file: could not perform request %v", fileName)
return nil, err
}
return res, nil
}
func (c *Client) setCookies(cookies []*http.Cookie) {
cookieURL, _ := url.Parse(fmt.Sprintf("%v://%v:%v", c.settings.protocol, c.settings.Hostname, c.settings.Port))
c.http.Jar.SetCookies(cookieURL, cookies)
}

179
pkg/qbittorrent/domain.go Normal file
View file

@ -0,0 +1,179 @@
package qbittorrent
type Torrent struct {
AddedOn int `json:"added_on"`
AmountLeft int `json:"amount_left"`
AutoManaged bool `json:"auto_tmm"`
Availability float32 `json:"availability"`
Category string `json:"category"`
Completed int `json:"completed"`
CompletionOn int `json:"completion_on"`
DlLimit int `json:"dl_limit"`
DlSpeed int `json:"dl_speed"`
Downloaded int `json:"downloaded"`
DownloadedSession int `json:"downloaded_session"`
ETA int `json:"eta"`
FirstLastPiecePrio bool `json:"f_l_piece_prio"`
ForceStart bool `json:"force_start"`
Hash string `json:"hash"`
LastActivity int `json:"last_activity"`
MagnetURI string `json:"magnet_uri"`
MaxRatio float32 `json:"max_ratio"`
MaxSeedingTime int `json:"max_seeding_time"`
Name string `json:"name"`
NumComplete int `json:"num_complete"`
NumIncomplete int `json:"num_incomplete"`
NumSeeds int `json:"num_seeds"`
Priority int `json:"priority"`
Progress float32 `json:"progress"`
Ratio float32 `json:"ratio"`
RatioLimit float32 `json:"ratio_limit"`
SavePath string `json:"save_path"`
SeedingTimeLimit int `json:"seeding_time_limit"`
SeenComplete int `json:"seen_complete"`
SequentialDownload bool `json:"seq_dl"`
Size int `json:"size"`
State TorrentState `json:"state"`
SuperSeeding bool `json:"super_seeding"`
Tags string `json:"tags"`
TimeActive int `json:"time_active"`
TotalSize int `json:"total_size"`
Tracker *string `json:"tracker"`
UpLimit int `json:"up_limit"`
Uploaded int `json:"uploaded"`
UploadedSession int `json:"uploaded_session"`
UpSpeed int `json:"upspeed"`
}
type TorrentTrackersResponse struct {
Trackers []TorrentTracker `json:"trackers"`
}
type TorrentTracker struct {
//Tier uint `json:"tier"` // can be both empty "" and int
Url string `json:"url"`
Status TrackerStatus `json:"status"`
NumPeers int `json:"num_peers"`
NumSeeds int `json:"num_seeds"`
NumLeechers int `json:"num_leechers"`
NumDownloaded int `json:"num_downloaded"`
Message string `json:"msg"`
}
type TorrentState string
const (
// Some error occurred, applies to paused torrents
TorrentStateError TorrentState = "error"
// Torrent data files is missing
TorrentStateMissingFiles TorrentState = "missingFiles"
// Torrent is being seeded and data is being transferred
TorrentStateUploading TorrentState = "uploading"
// Torrent is paused and has finished downloading
TorrentStatePausedUp TorrentState = "pausedUP"
// Queuing is enabled and torrent is queued for upload
TorrentStateQueuedUp TorrentState = "queuedUP"
// Torrent is being seeded, but no connection were made
TorrentStateStalledUp TorrentState = "stalledUP"
// Torrent has finished downloading and is being checked
TorrentStateCheckingUp TorrentState = "checkingUP"
// Torrent is forced to uploading and ignore queue limit
TorrentStateForcedUp TorrentState = "forcedUP"
// Torrent is allocating disk space for download
TorrentStateAllocating TorrentState = "allocating"
// Torrent is being downloaded and data is being transferred
TorrentStateDownloading TorrentState = "downloading"
// Torrent has just started downloading and is fetching metadata
TorrentStateMetaDl TorrentState = "metaDL"
// Torrent is paused and has NOT finished downloading
TorrentStatePausedDl TorrentState = "pausedDL"
// Queuing is enabled and torrent is queued for download
TorrentStateQueuedDl TorrentState = "queuedDL"
// Torrent is being downloaded, but no connection were made
TorrentStateStalledDl TorrentState = "stalledDL"
// Same as checkingUP, but torrent has NOT finished downloading
TorrentStateCheckingDl TorrentState = "checkingDL"
// Torrent is forced to downloading to ignore queue limit
TorrentStateForceDl TorrentState = "forceDL"
// Checking resume data on qBt startup
TorrentStateCheckingResumeData TorrentState = "checkingResumeData"
// Torrent is moving to another location
TorrentStateMoving TorrentState = "moving"
// Unknown status
TorrentStateUnknown TorrentState = "unknown"
)
type TorrentFilter string
const (
// Torrent is paused
TorrentFilterAll TorrentFilter = "all"
// Torrent is active
TorrentFilterActive TorrentFilter = "active"
// Torrent is inactive
TorrentFilterInactive TorrentFilter = "inactive"
// Torrent is completed
TorrentFilterCompleted TorrentFilter = "completed"
// Torrent is resumed
TorrentFilterResumed TorrentFilter = "resumed"
// Torrent is paused
TorrentFilterPaused TorrentFilter = "paused"
// Torrent is stalled
TorrentFilterStalled TorrentFilter = "stalled"
// Torrent is being seeded and data is being transferred
TorrentFilterUploading TorrentFilter = "uploading"
// Torrent is being seeded, but no connection were made
TorrentFilterStalledUploading TorrentFilter = "stalled_uploading"
// Torrent is being downloaded and data is being transferred
TorrentFilterDownloading TorrentFilter = "downloading"
// Torrent is being downloaded, but no connection were made
TorrentFilterStalledDownloading TorrentFilter = "stalled_downloading"
)
// TrackerStatus https://github.com/qbittorrent/qBittorrent/wiki/WebUI-API-(qBittorrent-4.1)#get-torrent-trackers
type TrackerStatus int
const (
// 0 Tracker is disabled (used for DHT, PeX, and LSD)
TrackerStatusDisabled TrackerStatus = 0
// 1 Tracker has not been contacted yet
TrackerStatusNotContacted TrackerStatus = 1
// 2 Tracker has been contacted and is working
TrackerStatusOK TrackerStatus = 2
// 3 Tracker is updating
TrackerStatusUpdating TrackerStatus = 3
// 4 Tracker has been contacted, but it is not working (or doesn't send proper replies)
TrackerStatusNotWorking TrackerStatus = 4
)

222
pkg/qbittorrent/methods.go Normal file
View file

@ -0,0 +1,222 @@
package qbittorrent
import (
"encoding/json"
"errors"
"io/ioutil"
"net/http"
"net/url"
"strconv"
"strings"
"github.com/rs/zerolog/log"
)
// Login https://github.com/qbittorrent/qBittorrent/wiki/WebUI-API-(qBittorrent-4.1)#authentication
func (c *Client) Login() error {
credentials := make(map[string]string)
credentials["username"] = c.settings.Username
credentials["password"] = c.settings.Password
resp, err := c.post("auth/login", credentials)
if err != nil {
log.Error().Err(err).Msg("login error")
return err
} else if resp.StatusCode == http.StatusForbidden {
log.Error().Err(err).Msg("User's IP is banned for too many failed login attempts")
return err
} else if resp.StatusCode != http.StatusOK { // check for correct status code
log.Error().Err(err).Msg("login bad status error")
return err
}
defer resp.Body.Close()
bodyBytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
return err
}
bodyString := string(bodyBytes)
// read output
if bodyString == "Fails." {
return errors.New("bad credentials")
}
// good response == "Ok."
// place cookies in jar for future requests
if cookies := resp.Cookies(); len(cookies) > 0 {
c.setCookies(cookies)
} else {
return errors.New("bad credentials")
}
return nil
}
func (c *Client) GetTorrents() ([]Torrent, error) {
var torrents []Torrent
resp, err := c.get("torrents/info", nil)
if err != nil {
log.Error().Err(err).Msg("get torrents error")
return nil, err
}
defer resp.Body.Close()
body, readErr := ioutil.ReadAll(resp.Body)
if readErr != nil {
log.Error().Err(err).Msg("get torrents read error")
return nil, readErr
}
err = json.Unmarshal(body, &torrents)
if err != nil {
log.Error().Err(err).Msg("get torrents unmarshal error")
return nil, err
}
return torrents, nil
}
func (c *Client) GetTorrentsFilter(filter TorrentFilter) ([]Torrent, error) {
var torrents []Torrent
v := url.Values{}
v.Add("filter", string(filter))
params := v.Encode()
resp, err := c.get("torrents/info?"+params, nil)
if err != nil {
log.Error().Err(err).Msgf("get filtered torrents error: %v", filter)
return nil, err
}
defer resp.Body.Close()
body, readErr := ioutil.ReadAll(resp.Body)
if readErr != nil {
log.Error().Err(err).Msgf("get filtered torrents read error: %v", filter)
return nil, readErr
}
err = json.Unmarshal(body, &torrents)
if err != nil {
log.Error().Err(err).Msgf("get filtered torrents unmarshal error: %v", filter)
return nil, err
}
return torrents, nil
}
func (c *Client) GetTorrentsRaw() (string, error) {
resp, err := c.get("torrents/info", nil)
if err != nil {
log.Error().Err(err).Msg("get torrent trackers raw error")
return "", err
}
defer resp.Body.Close()
data, _ := ioutil.ReadAll(resp.Body)
return string(data), nil
}
func (c *Client) GetTorrentTrackers(hash string) ([]TorrentTracker, error) {
var trackers []TorrentTracker
params := url.Values{}
params.Add("hash", hash)
p := params.Encode()
resp, err := c.get("torrents/trackers?"+p, nil)
if err != nil {
log.Error().Err(err).Msgf("get torrent trackers error: %v", hash)
return nil, err
}
defer resp.Body.Close()
body, readErr := ioutil.ReadAll(resp.Body)
if readErr != nil {
log.Error().Err(err).Msgf("get torrent trackers read error: %v", hash)
return nil, readErr
}
err = json.Unmarshal(body, &trackers)
if err != nil {
log.Error().Err(err).Msgf("get torrent trackers: %v", hash)
return nil, err
}
return trackers, nil
}
// AddTorrentFromFile add new torrent from torrent file
func (c *Client) AddTorrentFromFile(file string, options map[string]string) error {
res, err := c.postFile("torrents/add", file, options)
if err != nil {
log.Error().Err(err).Msgf("add torrents error: %v", file)
return err
} else if res.StatusCode != http.StatusOK {
log.Error().Err(err).Msgf("add torrents bad status: %v", file)
return err
}
defer res.Body.Close()
return nil
}
func (c *Client) DeleteTorrents(hashes []string, deleteFiles bool) error {
v := url.Values{}
// Add hashes together with | separator
hv := strings.Join(hashes, "|")
v.Add("hashes", hv)
v.Add("deleteFiles", strconv.FormatBool(deleteFiles))
encodedHashes := v.Encode()
resp, err := c.get("torrents/delete?"+encodedHashes, nil)
if err != nil {
log.Error().Err(err).Msgf("delete torrents error: %v", hashes)
return err
} else if resp.StatusCode != http.StatusOK {
log.Error().Err(err).Msgf("delete torrents bad code: %v", hashes)
return err
}
defer resp.Body.Close()
return nil
}
func (c *Client) ReAnnounceTorrents(hashes []string) error {
v := url.Values{}
// Add hashes together with | separator
hv := strings.Join(hashes, "|")
v.Add("hashes", hv)
encodedHashes := v.Encode()
resp, err := c.get("torrents/reannounce?"+encodedHashes, nil)
if err != nil {
log.Error().Err(err).Msgf("re-announce error: %v", hashes)
return err
} else if resp.StatusCode != http.StatusOK {
log.Error().Err(err).Msgf("re-announce error bad status: %v", hashes)
return err
}
defer resp.Body.Close()
return nil
}

100
pkg/releaseinfo/parser.go Normal file
View file

@ -0,0 +1,100 @@
package releaseinfo
import (
"reflect"
"strconv"
"strings"
)
// ReleaseInfo is the resulting structure returned by Parse
type ReleaseInfo struct {
Title string
Season int
Episode int
Year int
Resolution string
Source string
Codec string
Container string
Audio string
Group string
Region string
Extended bool
Hardcoded bool
Proper bool
Repack bool
Widescreen bool
Website string
Language string
Sbs string
Unrated bool
Size string
ThreeD bool
}
func setField(tor *ReleaseInfo, field, raw, val string) {
ttor := reflect.TypeOf(tor)
torV := reflect.ValueOf(tor)
field = strings.Title(field)
v, _ := ttor.Elem().FieldByName(field)
//fmt.Printf(" field=%v, type=%+v, value=%v, raw=%v\n", field, v.Type, val, raw)
switch v.Type.Kind() {
case reflect.Bool:
torV.Elem().FieldByName(field).SetBool(true)
case reflect.Int:
clean, _ := strconv.ParseInt(val, 10, 64)
torV.Elem().FieldByName(field).SetInt(clean)
case reflect.Uint:
clean, _ := strconv.ParseUint(val, 10, 64)
torV.Elem().FieldByName(field).SetUint(clean)
case reflect.String:
torV.Elem().FieldByName(field).SetString(val)
}
}
// Parse breaks up the given filename in TorrentInfo
func Parse(filename string) (*ReleaseInfo, error) {
tor := &ReleaseInfo{}
//fmt.Printf("filename %q\n", filename)
var startIndex, endIndex = 0, len(filename)
cleanName := strings.Replace(filename, "_", " ", -1)
for _, pattern := range patterns {
matches := pattern.re.FindAllStringSubmatch(cleanName, -1)
if len(matches) == 0 {
continue
}
matchIdx := 0
if pattern.last {
// Take last occurrence of element.
matchIdx = len(matches) - 1
}
//fmt.Printf(" %s: pattern:%q match:%#v\n", pattern.name, pattern.re, matches[matchIdx])
index := strings.Index(cleanName, matches[matchIdx][1])
if index == 0 {
startIndex = len(matches[matchIdx][1])
//fmt.Printf(" startIndex moved to %d [%q]\n", startIndex, filename[startIndex:endIndex])
} else if index < endIndex {
endIndex = index
//fmt.Printf(" endIndex moved to %d [%q]\n", endIndex, filename[startIndex:endIndex])
}
setField(tor, pattern.name, matches[matchIdx][1], matches[matchIdx][2])
}
// Start process for title
//fmt.Println(" title: <internal>")
raw := strings.Split(filename[startIndex:endIndex], "(")[0]
cleanName = raw
if strings.HasPrefix(cleanName, "- ") {
cleanName = raw[2:]
}
if strings.ContainsRune(cleanName, '.') && !strings.ContainsRune(cleanName, ' ') {
cleanName = strings.Replace(cleanName, ".", " ", -1)
}
cleanName = strings.Replace(cleanName, "_", " ", -1)
//cleanName = re.sub('([\[\(_]|- )$', '', cleanName).strip()
setField(tor, "title", raw, strings.TrimSpace(cleanName))
return tor, nil
}

View file

@ -0,0 +1,331 @@
package releaseinfo
import (
"flag"
"testing"
"github.com/stretchr/testify/assert"
)
var updateGoldenFiles = flag.Bool("update", false, "update golden files in testdata/")
var testData = []string{
"The Walking Dead S05E03 720p HDTV x264-ASAP[ettv]",
"Hercules (2014) 1080p BrRip H264 - YIFY",
"Dawn.of.the.Planet.of.the.Apes.2014.HDRip.XViD-EVO",
"The Big Bang Theory S08E06 HDTV XviD-LOL [eztv]",
"22 Jump Street (2014) 720p BrRip x264 - YIFY",
"Hercules.2014.EXTENDED.1080p.WEB-DL.DD5.1.H264-RARBG",
"Hercules.2014.Extended.Cut.HDRip.XViD-juggs[ETRG]",
"Hercules (2014) WEBDL DVDRip XviD-MAX",
"WWE Hell in a Cell 2014 PPV WEB-DL x264-WD -={SPARROW}=-",
"UFC.179.PPV.HDTV.x264-Ebi[rartv]",
"Marvels Agents of S H I E L D S02E05 HDTV x264-KILLERS [eztv]",
"X-Men.Days.of.Future.Past.2014.1080p.WEB-DL.DD5.1.H264-RARBG",
"Guardians Of The Galaxy 2014 R6 720p HDCAM x264-JYK",
"Marvel's.Agents.of.S.H.I.E.L.D.S02E01.Shadows.1080p.WEB-DL.DD5.1",
"Marvels Agents of S.H.I.E.L.D. S02E06 HDTV x264-KILLERS[ettv]",
"Guardians of the Galaxy (CamRip / 2014)",
"The.Walking.Dead.S05E03.1080p.WEB-DL.DD5.1.H.264-Cyphanix[rartv]",
"Brave.2012.R5.DVDRip.XViD.LiNE-UNiQUE",
"Lets.Be.Cops.2014.BRRip.XViD-juggs[ETRG]",
"These.Final.Hours.2013.WBBRip XViD",
"Downton Abbey 5x06 HDTV x264-FoV [eztv]",
"Annabelle.2014.HC.HDRip.XViD.AC3-juggs[ETRG]",
"Lucy.2014.HC.HDRip.XViD-juggs[ETRG]",
"The Flash 2014 S01E04 HDTV x264-FUM[ettv]",
"South Park S18E05 HDTV x264-KILLERS [eztv]",
"The Flash 2014 S01E03 HDTV x264-LOL[ettv]",
"The Flash 2014 S01E01 HDTV x264-LOL[ettv]",
"Lucy 2014 Dual-Audio WEBRip 1400Mb",
"Teenage Mutant Ninja Turtles (HdRip / 2014)",
"Teenage Mutant Ninja Turtles (unknown_release_type / 2014)",
"The Simpsons S26E05 HDTV x264 PROPER-LOL [eztv]",
"2047 - Sights of Death (2014) 720p BrRip x264 - YIFY",
"Two and a Half Men S12E01 HDTV x264 REPACK-LOL [eztv]",
"Dinosaur 13 2014 WEBrip XviD AC3 MiLLENiUM",
"Teenage.Mutant.Ninja.Turtles.2014.HDRip.XviD.MP3-RARBG",
"Dawn.Of.The.Planet.of.The.Apes.2014.1080p.WEB-DL.DD51.H264-RARBG",
"Teenage.Mutant.Ninja.Turtles.2014.720p.HDRip.x264.AC3.5.1-RARBG",
"Gotham.S01E05.Viper.WEB-DL.x264.AAC",
"Into.The.Storm.2014.1080p.WEB-DL.AAC2.0.H264-RARBG",
"Lucy 2014 Dual-Audio 720p WEBRip",
"Into The Storm 2014 1080p BRRip x264 DTS-JYK",
"Sin.City.A.Dame.to.Kill.For.2014.1080p.BluRay.x264-SPARKS",
"WWE Monday Night Raw 3rd Nov 2014 HDTV x264-Sir Paul",
"Jack.And.The.Cuckoo-Clock.Heart.2013.BRRip XViD",
"WWE Hell in a Cell 2014 HDTV x264 SNHD",
"Dracula.Untold.2014.TS.XViD.AC3.MrSeeN-SiMPLE",
"The Missing 1x01 Pilot HDTV x264-FoV [eztv]",
"Doctor.Who.2005.8x11.Dark.Water.720p.HDTV.x264-FoV[rartv]",
"Gotham.S01E07.Penguins.Umbrella.WEB-DL.x264.AAC",
"One Shot [2014] DVDRip XViD-ViCKY",
"The Shaukeens 2014 Hindi (1CD) DvDScr x264 AAC...Hon3y",
"The Shaukeens (2014) 1CD DvDScr Rip x264 [DDR]",
"Annabelle.2014.1080p.PROPER.HC.WEBRip.x264.AAC.2.0-RARBG",
"Interstellar (2014) CAM ENG x264 AAC-CPG",
"Guardians of the Galaxy (2014) Dual Audio DVDRip AVI",
"Eliza Graves (2014) Dual Audio WEB-DL 720p MKV x264",
"WWE Monday Night Raw 2014 11 10 WS PDTV x264-RKOFAN1990 -={SPARR",
"Sons.of.Anarchy.S01E03",
"doctor_who_2005.8x12.death_in_heaven.720p_hdtv_x264-fov",
"breaking.bad.s01e01.720p.bluray.x264-reward",
"Game of Thrones - 4x03 - Breaker of Chains",
"[720pMkv.Com]_sons.of.anarchy.s05e10.480p.BluRay.x264-GAnGSteR",
"[ www.Speed.cd ] -Sons.of.Anarchy.S07E07.720p.HDTV.X264-DIMENSION",
"Community.s02e20.rus.eng.720p.Kybik.v.Kybe",
"The.Jungle.Book.2016.3D.1080p.BRRip.SBS.x264.AAC-ETRG",
"Ant-Man.2015.3D.1080p.BRRip.Half-SBS.x264.AAC-m2g",
"Ice.Age.Collision.Course.2016.READNFO.720p.HDRIP.X264.AC3.TiTAN",
"Red.Sonja.Queen.Of.Plagues.2016.BDRip.x264-W4F[PRiME]",
"The Purge: Election Year (2016) HC - 720p HDRiP - 900MB - ShAaNi",
"War Dogs (2016) HDTS 600MB - NBY",
"The Hateful Eight (2015) 720p BluRay - x265 HEVC - 999MB - ShAaN",
"The.Boss.2016.UNRATED.720p.BRRip.x264.AAC-ETRG",
"Return.To.Snowy.River.1988.iNTERNAL.DVDRip.x264-W4F[PRiME]",
"Akira (2016) - UpScaled - 720p - DesiSCR-Rip - Hindi - x264 - AC3 - 5.1 - Mafiaking - M2Tv",
"Ben Hur 2016 TELESYNC x264 AC3 MAXPRO",
"The.Secret.Life.of.Pets.2016.HDRiP.AAC-LC.x264-LEGi0N",
"[HorribleSubs] Clockwork Planet - 10 [480p].mkv",
"[HorribleSubs] Detective Conan - 862 [1080p].mkv",
"thomas.and.friends.s19e09_s20e14.convert.hdtv.x264-w4f[eztv].mkv",
"Blade.Runner.2049.2017.1080p.WEB-DL.DD5.1.H264-FGT-[rarbg.to]",
"2012(2009).1080p.Dual Audio(Hindi+English) 5.1 Audios",
"2012 (2009) 1080p BrRip x264 - 1.7GB - YIFY",
"2012 2009 x264 720p Esub BluRay 6.0 Dual Audio English Hindi GOPISAHI",
}
var moreTestData = []string{
"Tokyo Olympics 2020 Street Skateboarding Prelims and Final 25 07 2021 1080p WEB-DL AAC2 0 H 264-playWEB",
"Tokyo Olympics 2020 Taekwondo Day3 Finals 26 07 720pEN25fps ES",
"Die Freundin der Haie 2021 German DUBBED DL DOKU 1080p WEB x264-WiSHTV",
}
var movieTests = []string{
"The Last Letter from Your Lover 2021 2160p NF WEBRip DDP5 1 Atmos x265-KiNGS",
"Blade 1998 Hybrid 1080p BluRay REMUX AVC Atmos-EPSiLON",
"Forrest Gump 1994 1080p BluRay DDP7 1 x264-Geek",
"Deux sous de violettes 1951 1080p Blu-ray Remux AVC FLAC 2 0-EDPH",
"Predator 1987 2160p UHD BluRay DTS-HD MA 5 1 HDR x265-W4NK3R",
"Final Destination 2 2003 1080p BluRay x264-ETHOS",
"Hellboy.II.The.Golden.Army.2008.REMASTERED.NORDiC.1080p.BluRay.x264-PANDEMONiUM",
"Wonders of the Sea 2017 BluRay 1080p AVC DTS-HD MA 2.0-BeyondHD",
"A Week Away 2021 1080p NF WEB-DL DDP 5.1 Atmos DV H.265-SymBiOTes",
"Control 2004 BluRay 1080p DTS-HD MA 5.1 AVC REMUX-FraMeSToR",
"Mimi 2021 1080p Hybrid WEB-DL DDP 5.1 x264-Telly",
"She's So Lovely 1997 BluRay 1080p DTS-HD MA 5.1 AVC REMUX-FraMeSToR",
"Those Who Wish Me Dead 2021 BluRay 1080p DD5.1 x264-BHDStudio",
"The Last Letter from Your Lover 2021 2160p NF WEBRip DDP 5.1 Atmos x265-KiNGS",
"Spinning Man 2018 BluRay 1080p DTS 5.1 x264-MTeam",
"The Wicker Man 1973 Final Cut 1080p BluRay FLAC 1.0 x264-NTb",
"New Police Story 2004 720p BluRay DTS x264-HiFi",
"La Cienaga 2001 Criterion Collection NTSC DVD9 DD 2.0",
"The Thin Blue Line 1988 Criterion Collection NTSC DVD9 DD 2.0",
"The Thin Red Line 1998 Criterion Collection NTSC 2xDVD9 DD 5.1",
"The Sword of Doom AKA daibosatsu 1966 Criterion Collection NTSC DVD9 DD 1.0",
"Freaks 2018 Hybrid REPACK 1080p BluRay REMUX AVC DTS-HD MA 5.1-EPSiLON",
"The Oxford Murders 2008 1080p BluRay Remux AVC DTS-HD MA 7.1-Pootis",
"Berlin Babylon 2001 PAL DVD9 DD 5.1",
"Dillinger 1973 1080p BluRay REMUX AVC DTS-HD MA 1.0-HiDeFZeN",
"True Romance 1993 2160p UHD Blu-ray DV HDR HEVC DTS-HD MA 5.1",
"Family 2019 1080p AMZN WEB-DL DD+ 5.1 H.264-TEPES",
"Family 2019 720p AMZN WEB-DL DD+ 5.1 H.264-TEPES",
"The Banana Splits Movie 2019 NTSC DVD9 DD 5.1-(_10_)",
"Sex Is Zero AKA saegjeugsigong 2002 720p BluRay DD 5.1 x264-KiR",
"Sex Is Zero AKA saegjeugsigong 2002 1080p BluRay DTS 5.1 x264-KiR",
"Sex Is Zero AKA saegjeugsigong 2002 1080p KOR Blu-ray AVC DTS-HD MA 5.1-ARiN",
"The Stranger AKA aagntuk 1991 Criterion Collection NTSC DVD9 DD 1.0",
"The Taking of Power by Louis XIV AKA La prise de pouvoir par Louis XIV 1966 Criterion Collection NTSC DVD9 DD 1.0",
"La Cienaga 2001 Criterion Collection NTSC DVD9 DD 2.0",
"The Thin Blue Line 1988 Criterion Collection NTSC DVD9 DD 2.0",
"The Thin Red Line 1998 Criterion Collection NTSC 2xDVD9 DD 5.1",
"The Sword of Doom AKA daibosatsu 1966 Criterion Collection NTSC DVD9 DD 1.0",
"Freaks 2018 Hybrid REPACK 1080p BluRay REMUX AVC DTS-HD MA 5.1-EPSiLON",
"The Oxford Murders 2008 1080p BluRay Remux AVC DTS-HD MA 7.1-Pootis",
"Berlin Babylon 2001 PAL DVD9 DD 5.1",
"Dillinger 1973 1080p BluRay REMUX AVC DTS-HD MA 1.0-HiDeFZeN",
"True Romance 1993 2160p UHD Blu-ray DV HDR HEVC DTS-HD MA 5.1",
"La Cienaga 2001 Criterion Collection NTSC DVD9 DD 2.0",
"Freaks 2018 Hybrid REPACK 1080p BluRay REMUX AVC DTS-HD MA 5.1-EPSiLON",
"The Oxford Murders 2008 1080p BluRay Remux AVC DTS-HD MA 7.1-Pootis",
}
//func TestParse_Movies(t *testing.T) {
// type args struct {
// filename string
// }
// tests := []struct {
// filename string
// want *ReleaseInfo
// wantErr bool
// }{
// {filename: "", want: nil, wantErr: false},
// }
// for _, tt := range tests {
// t.Run(tt.filename, func(t *testing.T) {
// got, err := Parse(tt.filename)
// if (err != nil) != tt.wantErr {
// t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantErr)
// return
// }
// if !reflect.DeepEqual(got, tt.want) {
// t.Errorf("Parse() got = %v, want %v", got, tt.want)
// }
// })
// }
//}
var tvTests = []string{
"Melrose Place S04 480p web-dl eac3 x264",
"Privileged.S01E17.1080p.WEB.h264-DiRT",
"Banshee S02 BluRay 720p DD5.1 x264-NTb",
"Banshee S04 BluRay 720p DTS x264-NTb",
"Servant S01 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-FLUX",
"South Park S06 1080p BluRay DD5.1 x264-W4NK3R",
"The Walking Dead: Origins S01E01 1080p WEB-DL DDP 2.0 H.264-GOSSIP",
"Mythic Quest S01 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-FLUX",
"Masameer County S01 1080p NF WEB-DL DD+ 5.1 H.264-XIQ",
"Kevin Can F**K Himself 2021 S01 1080p AMZN WEB-DL DD+ 5.1 H.264-SaiTama",
"How to Sell Drugs Online (Fast) S03 1080p NF WEB-DL DD+ 5.1 x264-KnightKing",
"Power Book III: Raising Kanan S01E01 2160p WEB-DL DD+ 5.1 H265-GGEZ",
"Power Book III: Raising Kanan S01E02 2160p WEB-DL DD+ 5.1 H265-GGWP",
"Thea Walking Dead: Origins S01E01 1080p WEB-DL DD+ 2.0 H.264-GOSSIP",
"Mean Mums S01 1080p AMZN WEB-DL DD+ 2.0 H.264-FLUX",
}
func TestParse_TV(t *testing.T) {
tests := []struct {
filename string
want *ReleaseInfo
wantErr bool
}{
{
filename: "Melrose Place S04 480p web-dl eac3 x264",
want: &ReleaseInfo{
Title: "Melrose Place",
Season: 4,
Resolution: "480p",
Source: "web-dl",
Codec: "x264",
Group: "dl eac3 x264",
},
wantErr: false,
},
{
filename: "Privileged.S01E17.1080p.WEB.h264-DiRT",
want: &ReleaseInfo{
Title: "Privileged",
Season: 1,
Episode: 17,
Resolution: "1080p",
Source: "WEB",
Codec: "h264",
Group: "DiRT",
},
wantErr: false,
},
{
filename: "Banshee S02 BluRay 720p DD5.1 x264-NTb",
want: &ReleaseInfo{
Title: "Banshee",
Season: 2,
Resolution: "720p",
Source: "BluRay",
Codec: "x264",
Audio: "DD5.1",
Group: "NTb",
},
wantErr: false,
},
{
filename: "Banshee Season 2 BluRay 720p DD5.1 x264-NTb",
want: &ReleaseInfo{
Title: "Banshee",
Season: 2,
Resolution: "720p",
Source: "BluRay",
Codec: "x264",
Audio: "DD5.1",
Group: "NTb",
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.filename, func(t *testing.T) {
got, err := Parse(tt.filename)
if (err != nil) != tt.wantErr {
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantErr)
return
}
assert.Equal(t, tt.want, got)
//if !reflect.DeepEqual(got, tt.want) {
// t.Errorf("Parse() got = %v, want %v", got, tt.want)
//}
})
}
}
var gamesTests = []string{
"Night Book NSW-LUMA",
"Evdeki Lanet-DARKSiDERS",
"Evdeki.Lanet-DARKSiDERS",
}
//func TestParser(t *testing.T) {
// for i, fname := range testData {
// t.Run(fmt.Sprintf("golden_file_%03d", i), func(t *testing.T) {
// tor, err := Parse(fname)
// if err != nil {
// t.Fatalf("test %v: parser error:\n %v", i, err)
// }
//
// var want ReleaseInfo
//
// if !reflect.DeepEqual(*tor, want) {
// t.Fatalf("test %v: wrong result for %q\nwant:\n %v\ngot:\n %v", i, fname, want, *tor)
// }
// })
// }
//}
//func TestParserWriteToFiles(t *testing.T) {
// for i, fname := range testData {
// t.Run(fmt.Sprintf("golden_file_%03d", i), func(t *testing.T) {
// tor, err := Parse(fname)
// if err != nil {
// t.Fatalf("test %v: parser error:\n %v", i, err)
// }
//
// goldenFilename := filepath.Join("testdata", fmt.Sprintf("golden_file_%03d.json", i))
//
// if *updateGoldenFiles {
// buf, err := json.MarshalIndent(tor, "", " ")
// if err != nil {
// t.Fatalf("error marshaling result: %v", err)
// }
//
// if err = ioutil.WriteFile(goldenFilename, buf, 0644); err != nil {
// t.Fatalf("unable to update golden file: %v", err)
// }
// }
//
// buf, err := ioutil.ReadFile(goldenFilename)
// if err != nil {
// t.Fatalf("error loading golden file: %v", err)
// }
//
// var want ReleaseInfo
// err = json.Unmarshal(buf, &want)
// if err != nil {
// t.Fatalf("error unmarshalling golden file %v: %v", goldenFilename, err)
// }
//
// if !reflect.DeepEqual(*tor, want) {
// t.Fatalf("test %v: wrong result for %q\nwant:\n %v\ngot:\n %v", i, fname, want, *tor)
// }
// })
// }
//}

View file

@ -0,0 +1,58 @@
package releaseinfo
import (
"fmt"
"os"
"reflect"
"regexp"
)
var patterns = []struct {
name string
// Use the last matching pattern. E.g. Year.
last bool
kind reflect.Kind
// REs need to have 2 sub expressions (groups), the first one is "raw", and
// the second one for the "clean" value.
// E.g. Epiode matching on "S01E18" will result in: raw = "E18", clean = "18".
re *regexp.Regexp
}{
//{"season", false, reflect.Int, regexp.MustCompile(`(?i)(s?([0-9]{1,2}))[ex]`)},
{"season", false, reflect.Int, regexp.MustCompile(`(?i)((?:S|Season\s*)(\d{1,3}))`)},
{"episode", false, reflect.Int, regexp.MustCompile(`(?i)([ex]([0-9]{2})(?:[^0-9]|$))`)},
{"episode", false, reflect.Int, regexp.MustCompile(`(-\s+([0-9]+)(?:[^0-9]|$))`)},
{"year", true, reflect.Int, regexp.MustCompile(`\b(((?:19[0-9]|20[0-9])[0-9]))\b`)},
{"resolution", false, reflect.String, regexp.MustCompile(`\b(([0-9]{3,4}p|i))\b`)},
{"source", false, reflect.String, regexp.MustCompile(`(?i)\b(((?:PPV\.)?[HP]DTV|(?:HD)?CAM|B[DR]Rip|(?:HD-?)?TS|(?:PPV )?WEB-?DL(?: DVDRip)?|HDRip|DVDRip|DVDRIP|CamRip|WEB|W[EB]BRip|BluRay|DvDScr|telesync))\b`)},
{"codec", false, reflect.String, regexp.MustCompile(`(?i)\b((xvid|HEVC|[hx]\.?26[45]))\b`)},
{"container", false, reflect.String, regexp.MustCompile(`(?i)\b((MKV|AVI|MP4))\b`)},
{"audio", false, reflect.String, regexp.MustCompile(`(?i)\b((MP3|DD5\.?1|Dual[\- ]Audio|LiNE|DTS|AAC[.-]LC|AAC(?:\.?2\.0)?|AC3(?:\.5\.1)?))\b`)},
{"region", false, reflect.String, regexp.MustCompile(`(?i)\b(R([0-9]))\b`)},
{"size", false, reflect.String, regexp.MustCompile(`(?i)\b((\d+(?:\.\d+)?(?:GB|MB)))\b`)},
{"website", false, reflect.String, regexp.MustCompile(`^(\[ ?([^\]]+?) ?\])`)},
{"language", false, reflect.String, regexp.MustCompile(`(?i)\b((rus\.eng|ita\.eng))\b`)},
{"sbs", false, reflect.String, regexp.MustCompile(`(?i)\b(((?:Half-)?SBS))\b`)},
{"group", false, reflect.String, regexp.MustCompile(`\b(- ?([^-]+(?:-={[^-]+-?$)?))$`)},
{"extended", false, reflect.Bool, regexp.MustCompile(`(?i)\b(EXTENDED(:?.CUT)?)\b`)},
{"hardcoded", false, reflect.Bool, regexp.MustCompile(`(?i)\b((HC))\b`)},
{"proper", false, reflect.Bool, regexp.MustCompile(`(?i)\b((PROPER))\b`)},
{"repack", false, reflect.Bool, regexp.MustCompile(`(?i)\b((REPACK))\b`)},
{"widescreen", false, reflect.Bool, regexp.MustCompile(`(?i)\b((WS))\b`)},
{"unrated", false, reflect.Bool, regexp.MustCompile(`(?i)\b((UNRATED))\b`)},
{"threeD", false, reflect.Bool, regexp.MustCompile(`(?i)\b((3D))\b`)},
}
func init() {
for _, pat := range patterns {
if pat.re.NumSubexp() != 2 {
fmt.Printf("Pattern %q does not have enough capture groups. want 2, got %d\n", pat.name, pat.re.NumSubexp())
os.Exit(1)
}
}
}

51
pkg/wildcard/match.go Normal file
View file

@ -0,0 +1,51 @@
package wildcard
// MatchSimple - finds whether the text matches/satisfies the pattern string.
// supports only '*' wildcard in the pattern.
// considers a file system path as a flat name space.
func MatchSimple(pattern, name string) bool {
if pattern == "" {
return name == pattern
}
if pattern == "*" {
return true
}
// Does only wildcard '*' match.
return deepMatchRune([]rune(name), []rune(pattern), true)
}
// Match - finds whether the text matches/satisfies the pattern string.
// supports '*' and '?' wildcards in the pattern string.
// unlike path.Match(), considers a path as a flat name space while matching the pattern.
// The difference is illustrated in the example here https://play.golang.org/p/Ega9qgD4Qz .
func Match(pattern, name string) (matched bool) {
if pattern == "" {
return name == pattern
}
if pattern == "*" {
return true
}
// Does extended wildcard '*' and '?' match.
return deepMatchRune([]rune(name), []rune(pattern), false)
}
func deepMatchRune(str, pattern []rune, simple bool) bool {
for len(pattern) > 0 {
switch pattern[0] {
default:
if len(str) == 0 || str[0] != pattern[0] {
return false
}
case '?':
if len(str) == 0 && !simple {
return false
}
case '*':
return deepMatchRune(str, pattern[1:], simple) ||
(len(str) > 0 && deepMatchRune(str[1:], pattern, simple))
}
str = str[1:]
pattern = pattern[1:]
}
return len(str) == 0 && len(pattern) == 0
}

View file

@ -0,0 +1,37 @@
package wildcard
import "testing"
// TestMatch - Tests validate the logic of wild card matching.
// `Match` supports '*' and '?' wildcards.
// Sample usage: In resource matching for bucket policy validation.
func TestMatch(t *testing.T) {
testCases := []struct {
pattern string
text string
matched bool
}{
{
pattern: "The?Simpsons*",
text: "The Simpsons S12",
matched: true,
},
{
pattern: "The?Simpsons*",
text: "The.Simpsons.S12",
matched: true,
},
{
pattern: "The?Simpsons*",
text: "The.Simps.S12",
matched: false,
},
}
// Iterating over the test cases, call the function under test and asert the output.
for i, testCase := range testCases {
actualResult := Match(testCase.pattern, testCase.text)
if testCase.matched != actualResult {
t.Errorf("Test %d: Expected the result to be `%v`, but instead found it to be `%v`", i+1, testCase.matched, actualResult)
}
}
}