mirror of
https://github.com/idanoo/autobrr
synced 2025-07-23 08:49:13 +00:00
feat: add backend
This commit is contained in:
parent
bc418ff248
commit
a838d994a6
68 changed files with 9561 additions and 0 deletions
588
internal/announce/parse.go
Normal file
588
internal/announce/parse.go
Normal file
|
@ -0,0 +1,588 @@
|
|||
package announce
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"html"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"github.com/autobrr/autobrr/internal/domain"
|
||||
"github.com/autobrr/autobrr/pkg/releaseinfo"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func (s *service) parseLineSingle(def *domain.IndexerDefinition, announce *domain.Announce, line string) error {
|
||||
for _, extract := range def.Parse.Lines {
|
||||
tmpVars := map[string]string{}
|
||||
|
||||
var err error
|
||||
err = s.parseExtract(extract.Pattern, extract.Vars, tmpVars, line)
|
||||
if err != nil {
|
||||
log.Debug().Msgf("error parsing extract: %v", line)
|
||||
return err
|
||||
}
|
||||
|
||||
// on lines matched
|
||||
err = s.onLinesMatched(def, tmpVars, announce)
|
||||
if err != nil {
|
||||
log.Debug().Msgf("error match line: %v", line)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *service) parseMultiLine() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *service) parseExtract(pattern string, vars []string, tmpVars map[string]string, line string) error {
|
||||
|
||||
rxp, err := regExMatch(pattern, line)
|
||||
if err != nil {
|
||||
log.Debug().Msgf("did not match expected line: %v", line)
|
||||
}
|
||||
|
||||
if rxp == nil {
|
||||
//return nil, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
// extract matched
|
||||
for i, v := range vars {
|
||||
value := ""
|
||||
|
||||
if rxp[i] != "" {
|
||||
value = rxp[i]
|
||||
// tmpVars[v] = rxp[i]
|
||||
}
|
||||
|
||||
tmpVars[v] = value
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *service) onLinesMatched(def *domain.IndexerDefinition, vars map[string]string, announce *domain.Announce) error {
|
||||
// TODO implement set tracker.lastAnnounce = now
|
||||
|
||||
announce.TorrentName = vars["torrentName"]
|
||||
|
||||
//err := s.postProcess(ti, vars, *announce)
|
||||
//if err != nil {
|
||||
// return err
|
||||
//}
|
||||
|
||||
// TODO extractReleaseInfo
|
||||
err := s.extractReleaseInfo(vars, announce.TorrentName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// resolution
|
||||
// source
|
||||
// encoder
|
||||
// canonicalize name
|
||||
|
||||
err = s.mapToAnnounce(vars, announce)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// torrent url
|
||||
torrentUrl, err := s.processTorrentUrl(def.Parse.Match.TorrentURL, vars, def.SettingsMap, def.Parse.Match.Encode)
|
||||
if err != nil {
|
||||
log.Debug().Msgf("error torrent url: %v", err)
|
||||
return err
|
||||
}
|
||||
|
||||
if torrentUrl != "" {
|
||||
announce.TorrentUrl = torrentUrl
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *service) processTorrentUrl(match string, vars map[string]string, extraVars map[string]string, encode []string) (string, error) {
|
||||
tmpVars := map[string]string{}
|
||||
|
||||
// copy vars to new tmp map
|
||||
for k, v := range vars {
|
||||
tmpVars[k] = v
|
||||
}
|
||||
|
||||
// merge extra vars with vars
|
||||
if extraVars != nil {
|
||||
for k, v := range extraVars {
|
||||
tmpVars[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
// handle url encode of values
|
||||
if encode != nil {
|
||||
for _, e := range encode {
|
||||
if v, ok := tmpVars[e]; ok {
|
||||
// url encode value
|
||||
t := url.QueryEscape(v)
|
||||
tmpVars[e] = t
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// setup text template to inject variables into
|
||||
tmpl, err := template.New("torrenturl").Parse(match)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("could not create torrent url template")
|
||||
return "", err
|
||||
}
|
||||
|
||||
var b bytes.Buffer
|
||||
err = tmpl.Execute(&b, &tmpVars)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("could not write torrent url template output")
|
||||
return "", err
|
||||
}
|
||||
|
||||
return b.String(), nil
|
||||
}
|
||||
|
||||
func split(r rune) bool {
|
||||
return r == ' ' || r == '.'
|
||||
}
|
||||
|
||||
func Splitter(s string, splits string) []string {
|
||||
m := make(map[rune]int)
|
||||
for _, r := range splits {
|
||||
m[r] = 1
|
||||
}
|
||||
|
||||
splitter := func(r rune) bool {
|
||||
return m[r] == 1
|
||||
}
|
||||
|
||||
return strings.FieldsFunc(s, splitter)
|
||||
}
|
||||
|
||||
func canonicalizeString(s string) []string {
|
||||
//a := strings.FieldsFunc(s, split)
|
||||
a := Splitter(s, " .")
|
||||
|
||||
return a
|
||||
}
|
||||
|
||||
func cleanReleaseName(input string) string {
|
||||
// Make a Regex to say we only want letters and numbers
|
||||
reg, err := regexp.Compile("[^a-zA-Z0-9]+")
|
||||
if err != nil {
|
||||
//log.Fatal(err)
|
||||
}
|
||||
processedString := reg.ReplaceAllString(input, " ")
|
||||
|
||||
return processedString
|
||||
}
|
||||
|
||||
func findLast(input string, pattern string) (string, error) {
|
||||
matched := make([]string, 0)
|
||||
//for _, s := range arr {
|
||||
|
||||
rxp, err := regexp.Compile(pattern)
|
||||
if err != nil {
|
||||
return "", err
|
||||
//return errors.Wrapf(err, "invalid regex: %s", value)
|
||||
}
|
||||
|
||||
matches := rxp.FindStringSubmatch(input)
|
||||
if matches != nil {
|
||||
log.Trace().Msgf("matches: %v", matches)
|
||||
// first value is the match, second value is the text
|
||||
if len(matches) >= 1 {
|
||||
last := matches[len(matches)-1]
|
||||
|
||||
// add to temp slice
|
||||
matched = append(matched, last)
|
||||
}
|
||||
}
|
||||
|
||||
//}
|
||||
|
||||
// check if multiple values in temp slice, if so get the last one
|
||||
if len(matched) >= 1 {
|
||||
last := matched[len(matched)-1]
|
||||
|
||||
return last, nil
|
||||
}
|
||||
|
||||
return "", nil
|
||||
}
|
||||
|
||||
func extractYear(releaseName string) (string, bool) {
|
||||
yearMatch, err := findLast(releaseName, "(?:^|\\D)(19[3-9]\\d|20[012]\\d)(?:\\D|$)")
|
||||
if err != nil {
|
||||
return "", false
|
||||
}
|
||||
log.Trace().Msgf("year matches: %v", yearMatch)
|
||||
return yearMatch, true
|
||||
}
|
||||
|
||||
func extractSeason(releaseName string) (string, bool) {
|
||||
seasonMatch, err := findLast(releaseName, "\\sS(\\d+)\\s?[ED]\\d+/i")
|
||||
sm2, err := findLast(releaseName, "\\s(?:S|Season\\s*)(\\d+)/i")
|
||||
//sm3, err := findLast(releaseName, "\\s((?<!\\d)\\d{1,2})x\\d+/i")
|
||||
if err != nil {
|
||||
return "", false
|
||||
}
|
||||
|
||||
log.Trace().Msgf("season matches: %v", seasonMatch)
|
||||
log.Trace().Msgf("season matches: %v", sm2)
|
||||
return seasonMatch, false
|
||||
}
|
||||
|
||||
func extractEpisode(releaseName string) (string, bool) {
|
||||
epMatch, err := findLast(releaseName, "\\sS\\d+\\s?E(\\d+)/i")
|
||||
ep2, err := findLast(releaseName, "\\s(?:E|Episode\\s*)(\\d+)/i")
|
||||
//ep3, err := findLast(releaseName, "\\s(?<!\\d)\\d{1,2}x(\\d+)/i")
|
||||
if err != nil {
|
||||
return "", false
|
||||
}
|
||||
|
||||
log.Trace().Msgf("ep matches: %v", epMatch)
|
||||
log.Trace().Msgf("ep matches: %v", ep2)
|
||||
return epMatch, false
|
||||
}
|
||||
|
||||
func (s *service) extractReleaseInfo(varMap map[string]string, releaseName string) error {
|
||||
// https://github.com/middelink/go-parse-torrent-name
|
||||
|
||||
canonReleaseName := cleanReleaseName(releaseName)
|
||||
log.Trace().Msgf("canonicalize release name: %v", canonReleaseName)
|
||||
|
||||
release, err := releaseinfo.Parse(releaseName)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Debug().Msgf("release: %+v", release)
|
||||
|
||||
// https://github.com/autodl-community/autodl-irssi/pull/194/files
|
||||
// year
|
||||
//year, yearMatch := extractYear(canonReleaseName)
|
||||
//if yearMatch {
|
||||
// setVariable("year", year, varMap, nil)
|
||||
//}
|
||||
//log.Trace().Msgf("year matches: %v", year)
|
||||
|
||||
// season
|
||||
//season, seasonMatch := extractSeason(canonReleaseName)
|
||||
//if seasonMatch {
|
||||
// // set var
|
||||
// log.Trace().Msgf("season matches: %v", season)
|
||||
//}
|
||||
|
||||
// episode
|
||||
//episode, episodeMatch := extractEpisode(canonReleaseName)
|
||||
//if episodeMatch {
|
||||
// // set var
|
||||
// log.Trace().Msgf("episode matches: %v", episode)
|
||||
//}
|
||||
|
||||
// resolution
|
||||
|
||||
// source
|
||||
|
||||
// encoder
|
||||
|
||||
// ignore
|
||||
|
||||
// tv or movie
|
||||
|
||||
// music stuff
|
||||
|
||||
// game stuff
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *service) mapToAnnounce(varMap map[string]string, ann *domain.Announce) error {
|
||||
|
||||
if torrentName, err := getFirstStringMapValue(varMap, []string{"torrentName"}); err != nil {
|
||||
return errors.Wrap(err, "failed parsing required field")
|
||||
} else {
|
||||
ann.TorrentName = html.UnescapeString(torrentName)
|
||||
}
|
||||
|
||||
if category, err := getFirstStringMapValue(varMap, []string{"category"}); err == nil {
|
||||
ann.Category = category
|
||||
}
|
||||
|
||||
if freeleech, err := getFirstStringMapValue(varMap, []string{"freeleech"}); err == nil {
|
||||
ann.Freeleech = strings.EqualFold(freeleech, "freeleech") || strings.EqualFold(freeleech, "yes")
|
||||
}
|
||||
|
||||
if freeleechPercent, err := getFirstStringMapValue(varMap, []string{"freeleechPercent"}); err == nil {
|
||||
ann.FreeleechPercent = freeleechPercent
|
||||
}
|
||||
|
||||
if uploader, err := getFirstStringMapValue(varMap, []string{"uploader"}); err == nil {
|
||||
ann.Uploader = uploader
|
||||
}
|
||||
|
||||
if scene, err := getFirstStringMapValue(varMap, []string{"scene"}); err == nil {
|
||||
ann.Scene = strings.EqualFold(scene, "true") || strings.EqualFold(scene, "yes")
|
||||
}
|
||||
|
||||
if year, err := getFirstStringMapValue(varMap, []string{"year"}); err == nil {
|
||||
yearI, err := strconv.Atoi(year)
|
||||
if err != nil {
|
||||
//log.Debug().Msgf("bad year var: %v", year)
|
||||
}
|
||||
ann.Year = yearI
|
||||
}
|
||||
|
||||
if tags, err := getFirstStringMapValue(varMap, []string{"releaseTags", "tags"}); err == nil {
|
||||
ann.Tags = tags
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *service) mapToAnnounceObj(varMap map[string]string, ann *domain.Announce) error {
|
||||
|
||||
if torrentName, err := getFirstStringMapValue(varMap, []string{"torrentName", "$torrentName"}); err != nil {
|
||||
return errors.Wrap(err, "failed parsing required field")
|
||||
} else {
|
||||
ann.TorrentName = html.UnescapeString(torrentName)
|
||||
}
|
||||
|
||||
if torrentUrl, err := getFirstStringMapValue(varMap, []string{"torrentUrl", "$torrentUrl"}); err != nil {
|
||||
return errors.Wrap(err, "failed parsing required field")
|
||||
} else {
|
||||
ann.TorrentUrl = torrentUrl
|
||||
}
|
||||
|
||||
if releaseType, err := getFirstStringMapValue(varMap, []string{"releaseType", "$releaseType"}); err == nil {
|
||||
ann.ReleaseType = releaseType
|
||||
}
|
||||
|
||||
if name1, err := getFirstStringMapValue(varMap, []string{"name1", "$name1"}); err == nil {
|
||||
ann.Name1 = name1
|
||||
}
|
||||
|
||||
if name2, err := getFirstStringMapValue(varMap, []string{"name2", "$name2"}); err == nil {
|
||||
ann.Name2 = name2
|
||||
}
|
||||
|
||||
if category, err := getFirstStringMapValue(varMap, []string{"category", "$category"}); err == nil {
|
||||
ann.Category = category
|
||||
}
|
||||
if freeleech, err := getFirstStringMapValue(varMap, []string{"freeleech", "$freeleech"}); err == nil {
|
||||
ann.Freeleech = strings.EqualFold(freeleech, "true")
|
||||
}
|
||||
|
||||
if uploader, err := getFirstStringMapValue(varMap, []string{"uploader", "$uploader"}); err == nil {
|
||||
ann.Uploader = uploader
|
||||
}
|
||||
|
||||
if tags, err := getFirstStringMapValue(varMap, []string{"$releaseTags", "$tags", "releaseTags", "tags"}); err == nil {
|
||||
ann.Tags = tags
|
||||
}
|
||||
|
||||
if cue, err := getFirstStringMapValue(varMap, []string{"cue", "$cue"}); err == nil {
|
||||
ann.Cue = strings.EqualFold(cue, "true")
|
||||
}
|
||||
|
||||
if logVar, err := getFirstStringMapValue(varMap, []string{"log", "$log"}); err == nil {
|
||||
ann.Log = logVar
|
||||
}
|
||||
|
||||
if media, err := getFirstStringMapValue(varMap, []string{"media", "$media"}); err == nil {
|
||||
ann.Media = media
|
||||
}
|
||||
|
||||
if format, err := getFirstStringMapValue(varMap, []string{"format", "$format"}); err == nil {
|
||||
ann.Format = format
|
||||
}
|
||||
|
||||
if bitRate, err := getFirstStringMapValue(varMap, []string{"bitrate", "$bitrate"}); err == nil {
|
||||
ann.Bitrate = bitRate
|
||||
}
|
||||
|
||||
if resolution, err := getFirstStringMapValue(varMap, []string{"resolution"}); err == nil {
|
||||
ann.Resolution = resolution
|
||||
}
|
||||
|
||||
if source, err := getFirstStringMapValue(varMap, []string{"source"}); err == nil {
|
||||
ann.Source = source
|
||||
}
|
||||
|
||||
if encoder, err := getFirstStringMapValue(varMap, []string{"encoder"}); err == nil {
|
||||
ann.Encoder = encoder
|
||||
}
|
||||
|
||||
if container, err := getFirstStringMapValue(varMap, []string{"container"}); err == nil {
|
||||
ann.Container = container
|
||||
}
|
||||
|
||||
if scene, err := getFirstStringMapValue(varMap, []string{"scene", "$scene"}); err == nil {
|
||||
ann.Scene = strings.EqualFold(scene, "true")
|
||||
}
|
||||
|
||||
if year, err := getFirstStringMapValue(varMap, []string{"year", "$year"}); err == nil {
|
||||
yearI, err := strconv.Atoi(year)
|
||||
if err != nil {
|
||||
//log.Debug().Msgf("bad year var: %v", year)
|
||||
}
|
||||
ann.Year = yearI
|
||||
}
|
||||
|
||||
//return &ann, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func setVariable(varName string, value string, varMap map[string]string, settings map[string]string) bool {
|
||||
|
||||
// check in instance options (auth)
|
||||
//optVal, ok := settings[name]
|
||||
//if !ok {
|
||||
// //return ""
|
||||
//}
|
||||
////ret = optVal
|
||||
//if optVal != "" {
|
||||
// return false
|
||||
//}
|
||||
|
||||
// else in varMap
|
||||
val, ok := varMap[varName]
|
||||
if !ok {
|
||||
//return ""
|
||||
varMap[varName] = value
|
||||
} else {
|
||||
// do something else?
|
||||
}
|
||||
log.Trace().Msgf("setVariable: %v", val)
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func getVariable(name string, varMap map[string]string, obj domain.Announce, settings map[string]string) string {
|
||||
var ret string
|
||||
|
||||
// check in announce obj
|
||||
// TODO reflect struct
|
||||
|
||||
// check in instance options (auth)
|
||||
optVal, ok := settings[name]
|
||||
if !ok {
|
||||
//return ""
|
||||
}
|
||||
//ret = optVal
|
||||
if optVal != "" {
|
||||
return optVal
|
||||
}
|
||||
|
||||
// else in varMap
|
||||
val, ok := varMap[name]
|
||||
if !ok {
|
||||
//return ""
|
||||
}
|
||||
ret = val
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
//func contains(s []string, str string) bool {
|
||||
// for _, v := range s {
|
||||
// if v == str {
|
||||
// return true
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// return false
|
||||
//}
|
||||
|
||||
func listContains(list []string, key string) bool {
|
||||
for _, lKey := range list {
|
||||
if strings.EqualFold(lKey, key) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func getStringMapValue(stringMap map[string]string, key string) (string, error) {
|
||||
lowerKey := strings.ToLower(key)
|
||||
|
||||
// case sensitive match
|
||||
//if caseSensitive {
|
||||
// v, ok := stringMap[key]
|
||||
// if !ok {
|
||||
// return "", fmt.Errorf("key was not found in map: %q", key)
|
||||
// }
|
||||
//
|
||||
// return v, nil
|
||||
//}
|
||||
|
||||
// case insensitive match
|
||||
for k, v := range stringMap {
|
||||
if strings.ToLower(k) == lowerKey {
|
||||
return v, nil
|
||||
}
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("key was not found in map: %q", lowerKey)
|
||||
}
|
||||
|
||||
func getFirstStringMapValue(stringMap map[string]string, keys []string) (string, error) {
|
||||
for _, k := range keys {
|
||||
if val, err := getStringMapValue(stringMap, k); err == nil {
|
||||
return val, nil
|
||||
}
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("key were not found in map: %q", strings.Join(keys, ", "))
|
||||
}
|
||||
|
||||
func removeElement(s []string, i int) ([]string, error) {
|
||||
// s is [1,2,3,4,5,6], i is 2
|
||||
|
||||
// perform bounds checking first to prevent a panic!
|
||||
if i >= len(s) || i < 0 {
|
||||
return nil, fmt.Errorf("Index is out of range. Index is %d with slice length %d", i, len(s))
|
||||
}
|
||||
|
||||
// This creates a new slice by creating 2 slices from the original:
|
||||
// s[:i] -> [1, 2]
|
||||
// s[i+1:] -> [4, 5, 6]
|
||||
// and joining them together using `append`
|
||||
return append(s[:i], s[i+1:]...), nil
|
||||
}
|
||||
|
||||
func regExMatch(pattern string, value string) ([]string, error) {
|
||||
|
||||
rxp, err := regexp.Compile(pattern)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
//return errors.Wrapf(err, "invalid regex: %s", value)
|
||||
}
|
||||
|
||||
matches := rxp.FindStringSubmatch(value)
|
||||
if matches == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
res := make([]string, 0)
|
||||
if matches != nil {
|
||||
res, err = removeElement(matches, 0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
585
internal/announce/parse_test.go
Normal file
585
internal/announce/parse_test.go
Normal file
|
@ -0,0 +1,585 @@
|
|||
package announce
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
//func Test_service_OnNewLine(t *testing.T) {
|
||||
// tfiles := tracker.NewService()
|
||||
// tfiles.ReadFiles()
|
||||
//
|
||||
// type fields struct {
|
||||
// trackerSvc tracker.Service
|
||||
// }
|
||||
// type args struct {
|
||||
// msg string
|
||||
// }
|
||||
// tests := []struct {
|
||||
// name string
|
||||
// fields fields
|
||||
// args args
|
||||
// wantErr bool
|
||||
// }{
|
||||
// // TODO: Add test cases.
|
||||
// {
|
||||
// name: "parse announce",
|
||||
// fields: fields{
|
||||
// trackerSvc: tfiles,
|
||||
// },
|
||||
// args: args{
|
||||
// msg: "New Torrent Announcement: <PC :: Iso> Name:'debian live 10 6 0 amd64 standard iso' uploaded by 'Anonymous' - http://www.tracker01.test/torrent/263302",
|
||||
// },
|
||||
// // expect struct: category, torrentName uploader freeleech baseurl torrentId
|
||||
// wantErr: false,
|
||||
// },
|
||||
// }
|
||||
// for _, tt := range tests {
|
||||
// t.Run(tt.name, func(t *testing.T) {
|
||||
// s := &service{
|
||||
// trackerSvc: tt.fields.trackerSvc,
|
||||
// }
|
||||
// if err := s.OnNewLine(tt.args.msg); (err != nil) != tt.wantErr {
|
||||
// t.Errorf("OnNewLine() error = %v, wantErr %v", err, tt.wantErr)
|
||||
// }
|
||||
// })
|
||||
// }
|
||||
//}
|
||||
|
||||
//func Test_service_parse(t *testing.T) {
|
||||
// type fields struct {
|
||||
// trackerSvc tracker.Service
|
||||
// }
|
||||
// type args struct {
|
||||
// serverName string
|
||||
// channelName string
|
||||
// announcer string
|
||||
// line string
|
||||
// }
|
||||
// tests := []struct {
|
||||
// name string
|
||||
// fields fields
|
||||
// args args
|
||||
// wantErr bool
|
||||
// }{
|
||||
// // TODO: Add test cases.
|
||||
// }
|
||||
// for _, tt := range tests {
|
||||
// t.Run(tt.name, func(t *testing.T) {
|
||||
// s := &service{
|
||||
// trackerSvc: tt.fields.trackerSvc,
|
||||
// }
|
||||
// if err := s.parse(tt.args.serverName, tt.args.channelName, tt.args.announcer, tt.args.line); (err != nil) != tt.wantErr {
|
||||
// t.Errorf("parse() error = %v, wantErr %v", err, tt.wantErr)
|
||||
// }
|
||||
// })
|
||||
// }
|
||||
//}
|
||||
|
||||
/*
|
||||
var (
|
||||
tracker01 = domain.TrackerInstance{
|
||||
Name: "T01",
|
||||
Enabled: true,
|
||||
Settings: nil,
|
||||
Auth: map[string]string{"rsskey": "000aaa111bbb222ccc333ddd"},
|
||||
//IRC: nil,
|
||||
Info: &domain.TrackerInfo{
|
||||
Type: "t01",
|
||||
ShortName: "T01",
|
||||
LongName: "Tracker01",
|
||||
SiteName: "www.tracker01.test",
|
||||
IRC: domain.TrackerIRCServer{
|
||||
Network: "Tracker01.test",
|
||||
ServerNames: []string{"irc.tracker01.test"},
|
||||
ChannelNames: []string{"#tracker01", "#t01announces"},
|
||||
AnnouncerNames: []string{"_AnnounceBot_"},
|
||||
},
|
||||
ParseInfo: domain.ParseInfo{
|
||||
LinePatterns: []domain.TrackerExtractPattern{
|
||||
|
||||
{
|
||||
PatternType: "linepattern",
|
||||
Optional: false,
|
||||
Regex: regexp.MustCompile("New Torrent Announcement:\\s*<([^>]*)>\\s*Name:'(.*)' uploaded by '([^']*)'\\s*(freeleech)*\\s*-\\s*https?\\:\\/\\/([^\\/]+\\/)torrent\\/(\\d+)"),
|
||||
Vars: []string{"category", "torrentName", "uploader", "$freeleech", "$baseUrl", "$torrentId"},
|
||||
},
|
||||
},
|
||||
MultiLinePatterns: nil,
|
||||
LineMatched: domain.LineMatched{
|
||||
Vars: []domain.LineMatchVars{
|
||||
{
|
||||
Name: "freeleech",
|
||||
Vars: []domain.LineMatchVarElem{
|
||||
{Type: "string", Value: "false"},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "torrentUrl",
|
||||
Vars: []domain.LineMatchVarElem{
|
||||
{Type: "string", Value: "https://"},
|
||||
{Type: "var", Value: "$baseUrl"},
|
||||
{Type: "string", Value: "rss/download/"},
|
||||
{Type: "var", Value: "$torrentId"},
|
||||
{Type: "string", Value: "/"},
|
||||
{Type: "var", Value: "rsskey"},
|
||||
{Type: "string", Value: "/"},
|
||||
{Type: "varenc", Value: "torrentName"},
|
||||
{Type: "string", Value: ".torrent"},
|
||||
},
|
||||
},
|
||||
},
|
||||
Extract: nil,
|
||||
LineMatchIf: nil,
|
||||
VarReplace: nil,
|
||||
SetRegex: &domain.SetRegex{
|
||||
SrcVar: "$freeleech",
|
||||
Regex: regexp.MustCompile("freeleech"),
|
||||
VarName: "freeleech",
|
||||
NewValue: "true",
|
||||
},
|
||||
ExtractOne: domain.ExtractOne{Extract: nil},
|
||||
ExtractTags: domain.ExtractTags{
|
||||
Name: "",
|
||||
SrcVar: "",
|
||||
Split: "",
|
||||
Regex: nil,
|
||||
SetVarIf: nil,
|
||||
},
|
||||
},
|
||||
Ignore: []domain.TrackerIgnore{},
|
||||
},
|
||||
},
|
||||
}
|
||||
tracker05 = domain.TrackerInstance{
|
||||
Name: "T05",
|
||||
Enabled: true,
|
||||
Settings: nil,
|
||||
Auth: map[string]string{"authkey": "000aaa111bbb222ccc333ddd", "torrent_pass": "eee444fff555ggg666hhh777"},
|
||||
//IRC: nil,
|
||||
Info: &domain.TrackerInfo{
|
||||
Type: "t05",
|
||||
ShortName: "T05",
|
||||
LongName: "Tracker05",
|
||||
SiteName: "tracker05.test",
|
||||
IRC: domain.TrackerIRCServer{
|
||||
Network: "Tracker05.test",
|
||||
ServerNames: []string{"irc.tracker05.test"},
|
||||
ChannelNames: []string{"#t05-announce"},
|
||||
AnnouncerNames: []string{"Drone"},
|
||||
},
|
||||
ParseInfo: domain.ParseInfo{
|
||||
LinePatterns: []domain.TrackerExtractPattern{
|
||||
|
||||
{
|
||||
PatternType: "linepattern",
|
||||
Optional: false,
|
||||
Regex: regexp.MustCompile("^(.*)\\s+-\\s+https?:.*[&\\?]id=.*https?\\:\\/\\/([^\\/]+\\/).*[&\\?]id=(\\d+)\\s*-\\s*(.*)"),
|
||||
Vars: []string{"torrentName", "$baseUrl", "$torrentId", "tags"},
|
||||
},
|
||||
},
|
||||
MultiLinePatterns: nil,
|
||||
LineMatched: domain.LineMatched{
|
||||
Vars: []domain.LineMatchVars{
|
||||
{
|
||||
Name: "scene",
|
||||
Vars: []domain.LineMatchVarElem{
|
||||
{Type: "string", Value: "false"},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "log",
|
||||
Vars: []domain.LineMatchVarElem{
|
||||
{Type: "string", Value: "false"},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "cue",
|
||||
Vars: []domain.LineMatchVarElem{
|
||||
{Type: "string", Value: "false"},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "freeleech",
|
||||
Vars: []domain.LineMatchVarElem{
|
||||
{Type: "string", Value: "false"},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "torrentUrl",
|
||||
Vars: []domain.LineMatchVarElem{
|
||||
{Type: "string", Value: "https://"},
|
||||
{Type: "var", Value: "$baseUrl"},
|
||||
{Type: "string", Value: "torrents.php?action=download&id="},
|
||||
{Type: "var", Value: "$torrentId"},
|
||||
{Type: "string", Value: "&authkey="},
|
||||
{Type: "var", Value: "authkey"},
|
||||
{Type: "string", Value: "&torrent_pass="},
|
||||
{Type: "var", Value: "torrent_pass"},
|
||||
},
|
||||
},
|
||||
},
|
||||
Extract: []domain.Extract{
|
||||
{SrcVar: "torrentName", Optional: true, Regex: regexp.MustCompile("[(\\[]((?:19|20)\\d\\d)[)\\]]"), Vars: []string{"year"}},
|
||||
{SrcVar: "$releaseTags", Optional: true, Regex: regexp.MustCompile("([\\d.]+)%"), Vars: []string{"logScore"}},
|
||||
},
|
||||
LineMatchIf: nil,
|
||||
VarReplace: []domain.ParseVarReplace{
|
||||
{Name: "tags", SrcVar: "tags", Regex: regexp.MustCompile("[._]"), Replace: " "},
|
||||
},
|
||||
SetRegex: nil,
|
||||
ExtractOne: domain.ExtractOne{Extract: []domain.Extract{
|
||||
{SrcVar: "torrentName", Optional: false, Regex: regexp.MustCompile("^(.+?) - ([^\\[]+).*\\[(\\d{4})\\] \\[([^\\[]+)\\] - ([^\\-\\[\\]]+)"), Vars: []string{"name1", "name2", "year", "releaseType", "$releaseTags"}},
|
||||
{SrcVar: "torrentName", Optional: false, Regex: regexp.MustCompile("^([^\\-]+)\\s+-\\s+(.+)"), Vars: []string{"name1", "name2"}},
|
||||
{SrcVar: "torrentName", Optional: false, Regex: regexp.MustCompile("(.*)"), Vars: []string{"name1"}},
|
||||
}},
|
||||
ExtractTags: domain.ExtractTags{
|
||||
Name: "",
|
||||
SrcVar: "$releaseTags",
|
||||
Split: "/",
|
||||
Regex: []*regexp.Regexp{regexp.MustCompile("^(?:5\\.1 Audio|\\.m4a|Various.*|~.*|>.*)$")},
|
||||
SetVarIf: []domain.SetVarIf{
|
||||
{VarName: "format", Value: "", NewValue: "", Regex: regexp.MustCompile("^(?:MP3|FLAC|Ogg Vorbis|AAC|AC3|DTS)$")},
|
||||
{VarName: "bitrate", Value: "", NewValue: "", Regex: regexp.MustCompile("Lossless$")},
|
||||
{VarName: "bitrate", Value: "", NewValue: "", Regex: regexp.MustCompile("^(?:vbr|aps|apx|v\\d|\\d{2,4}|\\d+\\.\\d+|q\\d+\\.[\\dx]+|Other)?(?:\\s*kbps|\\s*kbits?|\\s*k)?(?:\\s*\\(?(?:vbr|cbr)\\)?)?$")},
|
||||
{VarName: "media", Value: "", NewValue: "", Regex: regexp.MustCompile("^(?:CD|DVD|Vinyl|Soundboard|SACD|DAT|Cassette|WEB|Blu-ray|Other)$")},
|
||||
{VarName: "scene", Value: "Scene", NewValue: "true", Regex: nil},
|
||||
{VarName: "log", Value: "Log", NewValue: "true", Regex: nil},
|
||||
{VarName: "cue", Value: "Cue", NewValue: "true", Regex: nil},
|
||||
{VarName: "freeleech", Value: "Freeleech!", NewValue: "true", Regex: nil},
|
||||
},
|
||||
},
|
||||
},
|
||||
Ignore: []domain.TrackerIgnore{},
|
||||
},
|
||||
},
|
||||
}
|
||||
)
|
||||
*/
|
||||
|
||||
//func Test_service_parse(t *testing.T) {
|
||||
// type fields struct {
|
||||
// name string
|
||||
// trackerSvc tracker.Service
|
||||
// queues map[string]chan string
|
||||
// }
|
||||
// type args struct {
|
||||
// ti *domain.TrackerInstance
|
||||
// message string
|
||||
// }
|
||||
//
|
||||
// tests := []struct {
|
||||
// name string
|
||||
// fields fields
|
||||
// args args
|
||||
// want *domain.Announce
|
||||
// wantErr bool
|
||||
// }{
|
||||
// {
|
||||
// name: "tracker01_no_freeleech",
|
||||
// fields: fields{
|
||||
// name: "T01",
|
||||
// trackerSvc: nil,
|
||||
// queues: make(map[string]chan string),
|
||||
// }, args: args{
|
||||
// ti: &tracker01,
|
||||
// message: "New Torrent Announcement: <PC :: Iso> Name:'debian live 10 6 0 amd64 standard iso' uploaded by 'Anonymous' - http://www.tracker01.test/torrent/263302",
|
||||
// },
|
||||
// want: &domain.Announce{
|
||||
// Freeleech: false,
|
||||
// Category: "PC :: Iso",
|
||||
// TorrentName: "debian live 10 6 0 amd64 standard iso",
|
||||
// Uploader: "Anonymous",
|
||||
// TorrentUrl: "https://www.tracker01.test/rss/download/263302/000aaa111bbb222ccc333ddd/debian+live+10+6+0+amd64+standard+iso.torrent",
|
||||
// Site: "T01",
|
||||
// },
|
||||
// wantErr: false,
|
||||
// },
|
||||
// {
|
||||
// name: "tracker01_freeleech",
|
||||
// fields: fields{
|
||||
// name: "T01",
|
||||
// trackerSvc: nil,
|
||||
// queues: make(map[string]chan string),
|
||||
// }, args: args{
|
||||
// ti: &tracker01,
|
||||
// message: "New Torrent Announcement: <PC :: Iso> Name:'debian live 10 6 0 amd64 standard iso' uploaded by 'Anonymous' freeleech - http://www.tracker01.test/torrent/263302",
|
||||
// },
|
||||
// want: &domain.Announce{
|
||||
// Freeleech: true,
|
||||
// Category: "PC :: Iso",
|
||||
// TorrentName: "debian live 10 6 0 amd64 standard iso",
|
||||
// Uploader: "Anonymous",
|
||||
// TorrentUrl: "https://www.tracker01.test/rss/download/263302/000aaa111bbb222ccc333ddd/debian+live+10+6+0+amd64+standard+iso.torrent",
|
||||
// Site: "T01",
|
||||
// },
|
||||
// wantErr: false,
|
||||
// },
|
||||
// {
|
||||
// name: "tracker05_01",
|
||||
// fields: fields{
|
||||
// name: "T05",
|
||||
// trackerSvc: nil,
|
||||
// queues: make(map[string]chan string),
|
||||
// }, args: args{
|
||||
// ti: &tracker05,
|
||||
// message: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD - http://passtheheadphones.me/torrents.php?id=97614 / http://tracker05.test/torrents.php?action=download&id=1382972 - blues, rock, classic.rock,jazz,blues.rock,electric.blues",
|
||||
// },
|
||||
// want: &domain.Announce{
|
||||
// Name1: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
|
||||
// Name2: "Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
|
||||
// Freeleech: false,
|
||||
// TorrentName: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
|
||||
// TorrentUrl: "https://tracker05.test/torrents.php?action=download&id=1382972&authkey=000aaa111bbb222ccc333ddd&torrent_pass=eee444fff555ggg666hhh777",
|
||||
// Site: "T05",
|
||||
// Tags: "blues, rock, classic rock,jazz,blues rock,electric blues",
|
||||
// Log: "true",
|
||||
// Cue: true,
|
||||
// Format: "FLAC",
|
||||
// Bitrate: "Lossless",
|
||||
// Media: "CD",
|
||||
// Scene: false,
|
||||
// Year: 1977,
|
||||
// },
|
||||
// wantErr: false,
|
||||
// },
|
||||
// {
|
||||
// name: "tracker05_02",
|
||||
// fields: fields{
|
||||
// name: "T05",
|
||||
// trackerSvc: nil,
|
||||
// queues: make(map[string]chan string),
|
||||
// }, args: args{
|
||||
// ti: &tracker05,
|
||||
// message: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD - http://tracker05.test/torrents.php?id=72158898 / http://tracker05.test/torrents.php?action=download&id=29910415 - 1990s, folk, world_music, celtic",
|
||||
// },
|
||||
// want: &domain.Announce{
|
||||
// ReleaseType: "Album",
|
||||
// Name1: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
|
||||
// Name2: "Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
|
||||
// Freeleech: false,
|
||||
// TorrentName: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
|
||||
// TorrentUrl: "https://tracker05.test/torrents.php?action=download&id=29910415&authkey=000aaa111bbb222ccc333ddd&torrent_pass=eee444fff555ggg666hhh777",
|
||||
// Site: "T05",
|
||||
// Tags: "1990s, folk, world music, celtic",
|
||||
// Log: "true",
|
||||
// Cue: true,
|
||||
// Format: "FLAC",
|
||||
// Bitrate: "Lossless",
|
||||
// Media: "CD",
|
||||
// Scene: false,
|
||||
// Year: 1998,
|
||||
// },
|
||||
// wantErr: false,
|
||||
// },
|
||||
// }
|
||||
// for _, tt := range tests {
|
||||
// t.Run(tt.name, func(t *testing.T) {
|
||||
// s := &service{
|
||||
// name: tt.fields.name,
|
||||
// trackerSvc: tt.fields.trackerSvc,
|
||||
// queues: tt.fields.queues,
|
||||
// }
|
||||
// got, err := s.parse(tt.args.ti, tt.args.message)
|
||||
//
|
||||
// if (err != nil) != tt.wantErr {
|
||||
// t.Errorf("parse() error = %v, wantErr %v", err, tt.wantErr)
|
||||
// return
|
||||
// }
|
||||
// assert.Equal(t, tt.want, got)
|
||||
// })
|
||||
// }
|
||||
//}
|
||||
|
||||
//func Test_service_parseSingleLine(t *testing.T) {
|
||||
// type fields struct {
|
||||
// name string
|
||||
// ts tracker.Service
|
||||
// queues map[string]chan string
|
||||
// }
|
||||
// type args struct {
|
||||
// ti *domain.TrackerInstance
|
||||
// line string
|
||||
// }
|
||||
//
|
||||
// tests := []struct {
|
||||
// name string
|
||||
// fields fields
|
||||
// args args
|
||||
// want *domain.Announce
|
||||
// wantErr bool
|
||||
// }{
|
||||
// {
|
||||
// name: "tracker01_no_freeleech",
|
||||
// fields: fields{
|
||||
// name: "T01",
|
||||
// ts: nil,
|
||||
// queues: make(map[string]chan string),
|
||||
// }, args: args{
|
||||
// ti: &tracker01,
|
||||
// line: "New Torrent Announcement: <PC :: Iso> Name:'debian live 10 6 0 amd64 standard iso' uploaded by 'Anonymous' - http://www.tracker01.test/torrent/263302",
|
||||
// },
|
||||
// want: &domain.Announce{
|
||||
// Freeleech: false,
|
||||
// Category: "PC :: Iso",
|
||||
// TorrentName: "debian live 10 6 0 amd64 standard iso",
|
||||
// Uploader: "Anonymous",
|
||||
// TorrentUrl: "https://www.tracker01.test/rss/download/263302/000aaa111bbb222ccc333ddd/debian+live+10+6+0+amd64+standard+iso.torrent",
|
||||
// Site: "T01",
|
||||
// },
|
||||
// wantErr: false,
|
||||
// },
|
||||
// {
|
||||
// name: "tracker01_freeleech",
|
||||
// fields: fields{
|
||||
// name: "T01",
|
||||
// ts: nil,
|
||||
// queues: make(map[string]chan string),
|
||||
// }, args: args{
|
||||
// ti: &tracker01,
|
||||
// line: "New Torrent Announcement: <PC :: Iso> Name:'debian live 10 6 0 amd64 standard iso' uploaded by 'Anonymous' freeleech - http://www.tracker01.test/torrent/263302",
|
||||
// },
|
||||
// want: &domain.Announce{
|
||||
// Freeleech: true,
|
||||
// Category: "PC :: Iso",
|
||||
// TorrentName: "debian live 10 6 0 amd64 standard iso",
|
||||
// Uploader: "Anonymous",
|
||||
// TorrentUrl: "https://www.tracker01.test/rss/download/263302/000aaa111bbb222ccc333ddd/debian+live+10+6+0+amd64+standard+iso.torrent",
|
||||
// Site: "T01",
|
||||
// },
|
||||
// wantErr: false,
|
||||
// },
|
||||
// {
|
||||
// name: "tracker05_01",
|
||||
// fields: fields{
|
||||
// name: "T05",
|
||||
// ts: nil,
|
||||
// queues: make(map[string]chan string),
|
||||
// }, args: args{
|
||||
// ti: &tracker05,
|
||||
// line: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD - http://passtheheadphones.me/torrents.php?id=97614 / http://tracker05.test/torrents.php?action=download&id=1382972 - blues, rock, classic.rock,jazz,blues.rock,electric.blues",
|
||||
// },
|
||||
// want: &domain.Announce{
|
||||
// Name1: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
|
||||
// Name2: "Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
|
||||
// Freeleech: false,
|
||||
// TorrentName: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
|
||||
// TorrentUrl: "https://tracker05.test/torrents.php?action=download&id=1382972&authkey=000aaa111bbb222ccc333ddd&torrent_pass=eee444fff555ggg666hhh777",
|
||||
// Site: "T05",
|
||||
// Tags: "blues, rock, classic rock,jazz,blues rock,electric blues",
|
||||
// //Log: "true",
|
||||
// //Cue: true,
|
||||
// //Format: "FLAC",
|
||||
// //Bitrate: "Lossless",
|
||||
// //Media: "CD",
|
||||
// Log: "false",
|
||||
// Cue: false,
|
||||
// Format: "",
|
||||
// Bitrate: "",
|
||||
// Media: "",
|
||||
// Scene: false,
|
||||
// Year: 1977,
|
||||
// },
|
||||
// wantErr: false,
|
||||
// },
|
||||
// {
|
||||
// name: "tracker05_02",
|
||||
// fields: fields{
|
||||
// name: "T05",
|
||||
// ts: nil,
|
||||
// queues: make(map[string]chan string),
|
||||
// }, args: args{
|
||||
// ti: &tracker05,
|
||||
// line: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD - http://tracker05.test/torrents.php?id=72158898 / http://tracker05.test/torrents.php?action=download&id=29910415 - 1990s, folk, world_music, celtic",
|
||||
// },
|
||||
// want: &domain.Announce{
|
||||
// ReleaseType: "Album",
|
||||
// Name1: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
|
||||
// Name2: "Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
|
||||
// Freeleech: false,
|
||||
// TorrentName: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
|
||||
// TorrentUrl: "https://tracker05.test/torrents.php?action=download&id=29910415&authkey=000aaa111bbb222ccc333ddd&torrent_pass=eee444fff555ggg666hhh777",
|
||||
// Site: "T05",
|
||||
// Tags: "1990s, folk, world music, celtic",
|
||||
// Log: "true",
|
||||
// Cue: true,
|
||||
// Format: "FLAC",
|
||||
// Bitrate: "Lossless",
|
||||
// Media: "CD",
|
||||
// Scene: false,
|
||||
// Year: 1998,
|
||||
// },
|
||||
// wantErr: false,
|
||||
// },
|
||||
// }
|
||||
// for _, tt := range tests {
|
||||
// t.Run(tt.name, func(t *testing.T) {
|
||||
// s := &service{
|
||||
// name: tt.fields.name,
|
||||
// trackerSvc: tt.fields.ts,
|
||||
// queues: tt.fields.queues,
|
||||
// }
|
||||
//
|
||||
// announce := domain.Announce{
|
||||
// Site: tt.fields.name,
|
||||
// //Line: msg,
|
||||
// }
|
||||
// got, err := s.parseSingleLine(tt.args.ti, tt.args.line, &announce)
|
||||
// if (err != nil) != tt.wantErr {
|
||||
// t.Errorf("parseSingleLine() error = %v, wantErr %v", err, tt.wantErr)
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// assert.Equal(t, tt.want, got)
|
||||
// })
|
||||
// }
|
||||
//}
|
||||
|
||||
func Test_service_extractReleaseInfo(t *testing.T) {
|
||||
type fields struct {
|
||||
name string
|
||||
queues map[string]chan string
|
||||
}
|
||||
type args struct {
|
||||
varMap map[string]string
|
||||
releaseName string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
fields fields
|
||||
args args
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "test_01",
|
||||
fields: fields{
|
||||
name: "", queues: nil,
|
||||
},
|
||||
args: args{
|
||||
varMap: map[string]string{},
|
||||
releaseName: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "test_02",
|
||||
fields: fields{
|
||||
name: "", queues: nil,
|
||||
},
|
||||
args: args{
|
||||
varMap: map[string]string{},
|
||||
releaseName: "Lost S06E07 720p WEB-DL DD 5.1 H.264 - LP",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
s := &service{
|
||||
queues: tt.fields.queues,
|
||||
}
|
||||
if err := s.extractReleaseInfo(tt.args.varMap, tt.args.releaseName); (err != nil) != tt.wantErr {
|
||||
t.Errorf("extractReleaseInfo() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
91
internal/announce/service.go
Normal file
91
internal/announce/service.go
Normal file
|
@ -0,0 +1,91 @@
|
|||
package announce
|
||||
|
||||
import (
|
||||
"github.com/autobrr/autobrr/internal/domain"
|
||||
"github.com/autobrr/autobrr/internal/filter"
|
||||
"github.com/autobrr/autobrr/internal/indexer"
|
||||
"github.com/autobrr/autobrr/internal/release"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type Service interface {
|
||||
Parse(announceID string, msg string) error
|
||||
}
|
||||
|
||||
type service struct {
|
||||
filterSvc filter.Service
|
||||
indexerSvc indexer.Service
|
||||
releaseSvc release.Service
|
||||
queues map[string]chan string
|
||||
}
|
||||
|
||||
func NewService(filterService filter.Service, indexerSvc indexer.Service, releaseService release.Service) Service {
|
||||
|
||||
//queues := make(map[string]chan string)
|
||||
//for _, channel := range tinfo {
|
||||
//
|
||||
//}
|
||||
|
||||
return &service{
|
||||
filterSvc: filterService,
|
||||
indexerSvc: indexerSvc,
|
||||
releaseSvc: releaseService,
|
||||
}
|
||||
}
|
||||
|
||||
// Parse announce line
|
||||
func (s *service) Parse(announceID string, msg string) error {
|
||||
// announceID (server:channel:announcer)
|
||||
def := s.indexerSvc.GetIndexerByAnnounce(announceID)
|
||||
if def == nil {
|
||||
log.Debug().Msgf("could not find indexer definition: %v", announceID)
|
||||
return nil
|
||||
}
|
||||
|
||||
announce := domain.Announce{
|
||||
Site: def.Identifier,
|
||||
Line: msg,
|
||||
}
|
||||
|
||||
// parse lines
|
||||
if def.Parse.Type == "single" {
|
||||
err := s.parseLineSingle(def, &announce, msg)
|
||||
if err != nil {
|
||||
log.Debug().Msgf("could not parse single line: %v", msg)
|
||||
log.Error().Err(err).Msgf("could not parse single line: %v", msg)
|
||||
return err
|
||||
}
|
||||
}
|
||||
// implement multiline parsing
|
||||
|
||||
// find filter
|
||||
foundFilter, err := s.filterSvc.FindByIndexerIdentifier(announce)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("could not find filter")
|
||||
return err
|
||||
}
|
||||
|
||||
// no filter found, lets return
|
||||
if foundFilter == nil {
|
||||
log.Debug().Msg("no matching filter found")
|
||||
return nil
|
||||
}
|
||||
announce.Filter = foundFilter
|
||||
|
||||
log.Trace().Msgf("announce: %+v", announce)
|
||||
|
||||
log.Info().Msgf("Matched %v (%v) for %v", announce.TorrentName, announce.Filter.Name, announce.Site)
|
||||
|
||||
// match release
|
||||
|
||||
// process release
|
||||
go func() {
|
||||
err = s.releaseSvc.Process(announce)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msgf("could not process release: %+v", announce)
|
||||
}
|
||||
}()
|
||||
|
||||
return nil
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue