mirror of
https://github.com/idanoo/autobrr
synced 2025-07-23 08:49:13 +00:00
feat(indexers): GGn improve release name parsing (#1366)
* feat(indexers): GGn improve IRC parsing * chore: organize imports
This commit is contained in:
parent
dea0b32b89
commit
fffd5bbf56
5 changed files with 435 additions and 403 deletions
|
@ -4,10 +4,7 @@
|
||||||
package announce
|
package announce
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"net/url"
|
|
||||||
"strings"
|
"strings"
|
||||||
"text/template"
|
|
||||||
|
|
||||||
"github.com/autobrr/autobrr/internal/domain"
|
"github.com/autobrr/autobrr/internal/domain"
|
||||||
"github.com/autobrr/autobrr/internal/indexer"
|
"github.com/autobrr/autobrr/internal/indexer"
|
||||||
|
@ -105,8 +102,8 @@ func (a *announceProcessor) processQueue(queue chan string) {
|
||||||
rls.Protocol = domain.ReleaseProtocol(a.indexer.Protocol)
|
rls.Protocol = domain.ReleaseProtocol(a.indexer.Protocol)
|
||||||
|
|
||||||
// on lines matched
|
// on lines matched
|
||||||
if err := a.onLinesMatched(a.indexer, tmpVars, rls); err != nil {
|
if err := a.indexer.IRC.Parse.Parse(a.indexer, tmpVars, rls); err != nil {
|
||||||
a.log.Error().Err(err).Msg("error match line")
|
a.log.Error().Err(err).Msg("announce: could not parse announce for release")
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -138,113 +135,3 @@ func (a *announceProcessor) AddLineToQueue(channel string, line string) error {
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// onLinesMatched process vars into release
|
|
||||||
func (a *announceProcessor) onLinesMatched(def *domain.IndexerDefinition, vars map[string]string, rls *domain.Release) error {
|
|
||||||
// map variables from regex capture onto release struct
|
|
||||||
if err := rls.MapVars(def, vars); err != nil {
|
|
||||||
a.log.Error().Err(err).Msg("announce: could not map vars for release")
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// since OPS uses en-dashes as separators, which causes moistari/rls to not the torrentName properly,
|
|
||||||
// we replace the en-dashes with hyphens here
|
|
||||||
if def.Identifier == "ops" {
|
|
||||||
rls.TorrentName = strings.ReplaceAll(rls.TorrentName, "–", "-")
|
|
||||||
}
|
|
||||||
|
|
||||||
// parse fields
|
|
||||||
// run before ParseMatch to not potentially use a reconstructed TorrentName
|
|
||||||
rls.ParseString(rls.TorrentName)
|
|
||||||
|
|
||||||
// set baseUrl to default domain
|
|
||||||
baseUrl := def.URLS[0]
|
|
||||||
|
|
||||||
// override baseUrl
|
|
||||||
if def.BaseURL != "" {
|
|
||||||
baseUrl = def.BaseURL
|
|
||||||
}
|
|
||||||
|
|
||||||
// merge vars from regex captures on announce and vars from settings
|
|
||||||
mergedVars := mergeVars(vars, def.SettingsMap)
|
|
||||||
|
|
||||||
// parse torrentUrl
|
|
||||||
matched, err := def.IRC.Parse.ParseMatch(baseUrl, mergedVars)
|
|
||||||
if err != nil {
|
|
||||||
a.log.Error().Err(err).Msgf("announce: %v", err)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if matched != nil {
|
|
||||||
rls.DownloadURL = matched.TorrentURL
|
|
||||||
|
|
||||||
if matched.InfoURL != "" {
|
|
||||||
rls.InfoURL = matched.InfoURL
|
|
||||||
}
|
|
||||||
|
|
||||||
// only used by few indexers
|
|
||||||
if matched.TorrentName != "" {
|
|
||||||
rls.TorrentName = matched.TorrentName
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// handle optional cookies
|
|
||||||
if v, ok := def.SettingsMap["cookie"]; ok {
|
|
||||||
rls.RawCookie = v
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *announceProcessor) processTorrentUrl(match string, vars map[string]string, extraVars map[string]string, encode []string) (string, error) {
|
|
||||||
tmpVars := map[string]string{}
|
|
||||||
|
|
||||||
// copy vars to new tmp map
|
|
||||||
for k, v := range vars {
|
|
||||||
tmpVars[k] = v
|
|
||||||
}
|
|
||||||
|
|
||||||
// merge extra vars with vars
|
|
||||||
for k, v := range extraVars {
|
|
||||||
tmpVars[k] = v
|
|
||||||
}
|
|
||||||
|
|
||||||
// handle url encode of values
|
|
||||||
for _, e := range encode {
|
|
||||||
if v, ok := tmpVars[e]; ok {
|
|
||||||
// url encode value
|
|
||||||
t := url.QueryEscape(v)
|
|
||||||
tmpVars[e] = t
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// setup text template to inject variables into
|
|
||||||
tmpl, err := template.New("torrenturl").Parse(match)
|
|
||||||
if err != nil {
|
|
||||||
a.log.Error().Err(err).Msg("could not create torrent url template")
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
var b bytes.Buffer
|
|
||||||
if err := tmpl.Execute(&b, &tmpVars); err != nil {
|
|
||||||
a.log.Error().Err(err).Msg("could not write torrent url template output")
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
a.log.Trace().Msg("torrenturl processed")
|
|
||||||
|
|
||||||
return b.String(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// mergeVars merge maps
|
|
||||||
func mergeVars(data ...map[string]string) map[string]string {
|
|
||||||
tmpVars := map[string]string{}
|
|
||||||
|
|
||||||
for _, vars := range data {
|
|
||||||
// copy vars to new tmp map
|
|
||||||
for k, v := range vars {
|
|
||||||
tmpVars[k] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return tmpVars
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,71 +0,0 @@
|
||||||
// Copyright (c) 2021 - 2024, Ludvig Lundgren and the autobrr contributors.
|
|
||||||
// SPDX-License-Identifier: GPL-2.0-or-later
|
|
||||||
|
|
||||||
package announce
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func Test_announceProcessor_processTorrentUrl(t *testing.T) {
|
|
||||||
type args struct {
|
|
||||||
match string
|
|
||||||
vars map[string]string
|
|
||||||
extraVars map[string]string
|
|
||||||
encode []string
|
|
||||||
}
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
args args
|
|
||||||
want string
|
|
||||||
wantErr bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "passing with vars_1",
|
|
||||||
args: args{
|
|
||||||
match: "{{ .baseUrl }}api/v1/torrents/{{ .torrentId }}/torrent?key={{ .apikey }}",
|
|
||||||
vars: map[string]string{
|
|
||||||
"baseUrl": "https://example.test/",
|
|
||||||
"torrentId": "000000",
|
|
||||||
},
|
|
||||||
extraVars: map[string]string{
|
|
||||||
"apikey": "abababab+01010101",
|
|
||||||
},
|
|
||||||
encode: []string{"apikey"},
|
|
||||||
},
|
|
||||||
want: "https://example.test/api/v1/torrents/000000/torrent?key=abababab%2B01010101",
|
|
||||||
wantErr: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "passing with vars_2",
|
|
||||||
args: args{
|
|
||||||
match: "{{ .baseUrl }}/download.php/{{ .torrentId }}/{{ .torrentName }}.torrent?torrent_pass={{ .passkey }}",
|
|
||||||
vars: map[string]string{
|
|
||||||
"baseUrl": "https://example.test",
|
|
||||||
"torrentId": "000000",
|
|
||||||
"torrentName": "That Movie 2020 Blu-ray 1080p REMUX AVC DTS-HD MA 7 1 GROUP",
|
|
||||||
},
|
|
||||||
extraVars: map[string]string{
|
|
||||||
"passkey": "abababab01010101",
|
|
||||||
},
|
|
||||||
encode: []string{"torrentName"},
|
|
||||||
},
|
|
||||||
want: "https://example.test/download.php/000000/That+Movie+2020+Blu-ray+1080p+REMUX+AVC+DTS-HD+MA+7+1+GROUP.torrent?torrent_pass=abababab01010101",
|
|
||||||
wantErr: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, tt := range tests {
|
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
|
||||||
a := &announceProcessor{}
|
|
||||||
got, err := a.processTorrentUrl(tt.args.match, tt.args.vars, tt.args.extraVars, tt.args.encode)
|
|
||||||
if (err != nil) != tt.wantErr {
|
|
||||||
t.Errorf("processTorrentUrl() error = %v, wantErr %v", err, tt.wantErr)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
assert.Equal(t, tt.want, got)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -232,11 +232,52 @@ type IndexerIRCParseMatched struct {
|
||||||
TorrentName string
|
TorrentName string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *IndexerIRCParse) ParseMatch(baseURL string, vars map[string]string) (*IndexerIRCParseMatched, error) {
|
func parseTemplateURL(baseURL, sourceURL string, vars map[string]string, basename string) (*url.URL, error) {
|
||||||
matched := &IndexerIRCParseMatched{}
|
// setup text template to inject variables into
|
||||||
|
tmpl, err := template.New(basename).Funcs(sprig.TxtFuncMap()).Parse(sourceURL)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.New("could not create %s url template", basename)
|
||||||
|
}
|
||||||
|
|
||||||
|
var urlBytes bytes.Buffer
|
||||||
|
if err := tmpl.Execute(&urlBytes, &vars); err != nil {
|
||||||
|
return nil, errors.New("could not write %s url template output", basename)
|
||||||
|
}
|
||||||
|
|
||||||
|
templateUrl := urlBytes.String()
|
||||||
|
parsedUrl, err := url.Parse(templateUrl)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "could not parse template url: %q", templateUrl)
|
||||||
|
}
|
||||||
|
|
||||||
|
// for backwards compatibility remove Host and Scheme to rebuild url
|
||||||
|
if parsedUrl.Host != "" {
|
||||||
|
parsedUrl.Host = ""
|
||||||
|
}
|
||||||
|
if parsedUrl.Scheme != "" {
|
||||||
|
parsedUrl.Scheme = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// join baseURL with query
|
||||||
|
baseUrlPath, err := url.JoinPath(baseURL, parsedUrl.Path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "could not join %s url", basename)
|
||||||
|
}
|
||||||
|
|
||||||
|
// reconstruct url
|
||||||
|
infoUrl, err := url.Parse(baseUrlPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "could not parse %s url", basename)
|
||||||
|
}
|
||||||
|
|
||||||
|
infoUrl.RawQuery = parsedUrl.RawQuery
|
||||||
|
|
||||||
|
return infoUrl, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *IndexerIRCParseMatch) ParseURLs(baseURL string, vars map[string]string, rls *Release) error {
|
||||||
// handle url encode of values
|
// handle url encode of values
|
||||||
for _, e := range p.Match.Encode {
|
for _, e := range p.Encode {
|
||||||
if v, ok := vars[e]; ok {
|
if v, ok := vars[e]; ok {
|
||||||
// url encode value
|
// url encode value
|
||||||
t := url.QueryEscape(v)
|
t := url.QueryEscape(v)
|
||||||
|
@ -244,100 +285,86 @@ func (p *IndexerIRCParse) ParseMatch(baseURL string, vars map[string]string) (*I
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.Match.InfoURL != "" {
|
if p.InfoURL != "" {
|
||||||
// setup text template to inject variables into
|
infoURL, err := parseTemplateURL(baseURL, p.InfoURL, vars, "infourl")
|
||||||
tmpl, err := template.New("infourl").Funcs(sprig.TxtFuncMap()).Parse(p.Match.InfoURL)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.New("could not create info url template")
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
var urlBytes bytes.Buffer
|
rls.InfoURL = infoURL.String()
|
||||||
if err := tmpl.Execute(&urlBytes, &vars); err != nil {
|
|
||||||
return nil, errors.New("could not write info url template output")
|
|
||||||
}
|
|
||||||
|
|
||||||
templateUrl := urlBytes.String()
|
|
||||||
parsedUrl, err := url.Parse(templateUrl)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// for backwards compatibility remove Host and Scheme to rebuild url
|
|
||||||
if parsedUrl.Host != "" {
|
|
||||||
parsedUrl.Host = ""
|
|
||||||
}
|
|
||||||
if parsedUrl.Scheme != "" {
|
|
||||||
parsedUrl.Scheme = ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// join baseURL with query
|
|
||||||
baseUrlPath, err := url.JoinPath(baseURL, parsedUrl.Path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.Wrap(err, "could not join info url")
|
|
||||||
}
|
|
||||||
|
|
||||||
// reconstruct url
|
|
||||||
infoUrl, _ := url.Parse(baseUrlPath)
|
|
||||||
infoUrl.RawQuery = parsedUrl.RawQuery
|
|
||||||
|
|
||||||
matched.InfoURL = infoUrl.String()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.Match.TorrentURL != "" {
|
if p.TorrentURL != "" {
|
||||||
// setup text template to inject variables into
|
downloadURL, err := parseTemplateURL(baseURL, p.TorrentURL, vars, "torrenturl")
|
||||||
tmpl, err := template.New("torrenturl").Funcs(sprig.TxtFuncMap()).Parse(p.Match.TorrentURL)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.New("could not create torrent url template")
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
var urlBytes bytes.Buffer
|
rls.DownloadURL = downloadURL.String()
|
||||||
if err := tmpl.Execute(&urlBytes, &vars); err != nil {
|
|
||||||
return nil, errors.New("could not write torrent url template output")
|
|
||||||
}
|
|
||||||
|
|
||||||
templateUrl := urlBytes.String()
|
|
||||||
parsedUrl, err := url.Parse(templateUrl)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// for backwards compatibility remove Host and Scheme to rebuild url
|
|
||||||
if parsedUrl.Host != "" {
|
|
||||||
parsedUrl.Host = ""
|
|
||||||
}
|
|
||||||
if parsedUrl.Scheme != "" {
|
|
||||||
parsedUrl.Scheme = ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// join baseURL with query
|
|
||||||
baseUrlPath, err := url.JoinPath(baseURL, parsedUrl.Path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, errors.Wrap(err, "could not join torrent url")
|
|
||||||
}
|
|
||||||
|
|
||||||
// reconstruct url
|
|
||||||
torrentUrl, _ := url.Parse(baseUrlPath)
|
|
||||||
torrentUrl.RawQuery = parsedUrl.RawQuery
|
|
||||||
|
|
||||||
matched.TorrentURL = torrentUrl.String()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.Match.TorrentName != "" {
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *IndexerIRCParseMatch) ParseTorrentName(vars map[string]string, rls *Release) error {
|
||||||
|
if p.TorrentName != "" {
|
||||||
// setup text template to inject variables into
|
// setup text template to inject variables into
|
||||||
tmplName, err := template.New("torrentname").Funcs(sprig.TxtFuncMap()).Parse(p.Match.TorrentName)
|
tmplName, err := template.New("torrentname").Funcs(sprig.TxtFuncMap()).Parse(p.TorrentName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
var nameBytes bytes.Buffer
|
var nameBytes bytes.Buffer
|
||||||
if err := tmplName.Execute(&nameBytes, &vars); err != nil {
|
if err := tmplName.Execute(&nameBytes, &vars); err != nil {
|
||||||
return nil, errors.New("could not write torrent name template output")
|
return errors.New("could not write torrent name template output")
|
||||||
}
|
}
|
||||||
|
|
||||||
matched.TorrentName = nameBytes.String()
|
rls.TorrentName = nameBytes.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
return matched, nil
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *IndexerIRCParse) Parse(def *IndexerDefinition, vars map[string]string, rls *Release) error {
|
||||||
|
if err := rls.MapVars(def, vars); err != nil {
|
||||||
|
return errors.Wrap(err, "could not map variables for release")
|
||||||
|
}
|
||||||
|
|
||||||
|
baseUrl := def.URLS[0]
|
||||||
|
|
||||||
|
// merge vars from regex captures on announce and vars from settings
|
||||||
|
mergedVars := mergeVars(vars, def.SettingsMap)
|
||||||
|
|
||||||
|
// parse urls
|
||||||
|
if err := def.IRC.Parse.Match.ParseURLs(baseUrl, mergedVars, rls); err != nil {
|
||||||
|
return errors.Wrap(err, "could not parse urls for release")
|
||||||
|
}
|
||||||
|
|
||||||
|
// parse torrent var
|
||||||
|
if err := def.IRC.Parse.Match.ParseTorrentName(mergedVars, rls); err != nil {
|
||||||
|
return errors.Wrap(err, "could not parse release name")
|
||||||
|
}
|
||||||
|
|
||||||
|
var parser IRCParser
|
||||||
|
|
||||||
|
switch def.Identifier {
|
||||||
|
case "ggn":
|
||||||
|
parser = IRCParserGazelleGames{}
|
||||||
|
case "ops":
|
||||||
|
parser = IRCParserOrpheus{}
|
||||||
|
default:
|
||||||
|
parser = IRCParserDefault{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := parser.Parse(rls, vars); err != nil {
|
||||||
|
return errors.Wrap(err, "could not parse release")
|
||||||
|
}
|
||||||
|
|
||||||
|
if v, ok := def.SettingsMap["cookie"]; ok {
|
||||||
|
rls.RawCookie = v
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type TorrentBasic struct {
|
type TorrentBasic struct {
|
||||||
|
|
|
@ -9,46 +9,29 @@ import (
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestIndexerIRCParse_ParseMatch(t *testing.T) {
|
func TestIndexerIRCParseMatch_ParseUrls(t *testing.T) {
|
||||||
type fields struct {
|
type fields struct {
|
||||||
Type string
|
TorrentURL string
|
||||||
ForceSizeUnit string
|
TorrentName string
|
||||||
Lines []IndexerIRCParseLine
|
InfoURL string
|
||||||
Match IndexerIRCParseMatch
|
Encode []string
|
||||||
}
|
}
|
||||||
type args struct {
|
type args struct {
|
||||||
baseURL string
|
baseURL string
|
||||||
vars map[string]string
|
vars map[string]string
|
||||||
|
rls *Release
|
||||||
}
|
}
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
fields fields
|
fields fields
|
||||||
args args
|
args args
|
||||||
want *IndexerIRCParseMatched
|
want *Release
|
||||||
wantErr bool
|
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "test_01",
|
name: "",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
Type: "",
|
TorrentURL: "rss/download/{{ .torrentId }}/{{ .rsskey }}/{{ .torrentName }}.torrent",
|
||||||
ForceSizeUnit: "",
|
Encode: []string{"torrentName"},
|
||||||
Lines: []IndexerIRCParseLine{
|
|
||||||
{
|
|
||||||
Pattern: "New Torrent Announcement:\\s*<([^>]*)>\\s*Name:'(.*)' uploaded by '([^']*)'\\s*(freeleech)*\\s*-\\s*(https?\\:\\/\\/[^\\/]+\\/)torrent\\/(\\d+)",
|
|
||||||
Vars: []string{
|
|
||||||
"category",
|
|
||||||
"torrentName",
|
|
||||||
"uploader",
|
|
||||||
"freeleech",
|
|
||||||
"baseUrl",
|
|
||||||
"torrentId",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Match: IndexerIRCParseMatch{
|
|
||||||
TorrentURL: "rss/download/{{ .torrentId }}/{{ .rsskey }}/{{ .torrentName }}.torrent",
|
|
||||||
Encode: []string{"torrentName"},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
args: args{
|
args: args{
|
||||||
baseURL: "https://mock.local/",
|
baseURL: "https://mock.local/",
|
||||||
|
@ -61,40 +44,17 @@ func TestIndexerIRCParse_ParseMatch(t *testing.T) {
|
||||||
"torrentId": "240860011",
|
"torrentId": "240860011",
|
||||||
"rsskey": "00000000000000000000",
|
"rsskey": "00000000000000000000",
|
||||||
},
|
},
|
||||||
|
rls: &Release{},
|
||||||
},
|
},
|
||||||
want: &IndexerIRCParseMatched{
|
want: &Release{
|
||||||
TorrentURL: "https://mock.local/rss/download/240860011/00000000000000000000/The+Show+2019+S03E08+2160p+DV+WEBRip+6CH+x265+HEVC-GROUP.torrent",
|
DownloadURL: "https://mock.local/rss/download/240860011/00000000000000000000/The+Show+2019+S03E08+2160p+DV+WEBRip+6CH+x265+HEVC-GROUP.torrent",
|
||||||
},
|
},
|
||||||
wantErr: false,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "test_02",
|
name: "",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
Type: "",
|
TorrentURL: "/torrent/{{ .torrentId }}/download/{{ .passkey }}",
|
||||||
ForceSizeUnit: "",
|
Encode: nil,
|
||||||
Lines: []IndexerIRCParseLine{
|
|
||||||
{
|
|
||||||
Pattern: `(.*?)(?: - )?(Visual Novel|Light Novel|TV.*|Movie|Manga|OVA|ONA|DVD Special|BD Special|Oneshot|Anthology|Manhwa|Manhua|Artbook|Game|Live Action.*|)[\s\p{Zs}]{2,}\[(\d+)\] :: (.*?)(?: \/ (?:RAW|Softsubs|Hardsubs|Translated)\s\((.+)\)(?:.*Episode\s(\d+))?(?:.*(Freeleech))?.*)? \|\| (https.*)\/torrents.*\?id=\d+&torrentid=(\d+) \|\| (.+?(?:(?:\|\| Uploaded by|$))?) (?:\|\| Uploaded by: (.*))?$`,
|
|
||||||
Vars: []string{
|
|
||||||
"torrentName",
|
|
||||||
"category",
|
|
||||||
"year",
|
|
||||||
"releaseTags",
|
|
||||||
"releaseGroup",
|
|
||||||
"releaseEpisode",
|
|
||||||
"freeleech",
|
|
||||||
"baseUrl",
|
|
||||||
"torrentId",
|
|
||||||
"tags",
|
|
||||||
"uploader",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Match: IndexerIRCParseMatch{
|
|
||||||
TorrentURL: "/torrent/{{ .torrentId }}/download/{{ .passkey }}",
|
|
||||||
TorrentName: `{{ if .releaseGroup }}[{{ .releaseGroup }}] {{ end }}{{ .torrentName }} [{{ .year }}] {{ if .releaseEpisode }}{{ printf "- %02s " .releaseEpisode }}{{ end }}{{ print "[" .releaseTags "]" | replace " / " "][" }}`,
|
|
||||||
Encode: nil,
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
args: args{
|
args: args{
|
||||||
baseURL: "https://mock.local/",
|
baseURL: "https://mock.local/",
|
||||||
|
@ -112,35 +72,17 @@ func TestIndexerIRCParse_ParseMatch(t *testing.T) {
|
||||||
"uploader": "Uploader",
|
"uploader": "Uploader",
|
||||||
"passkey": "00000000000000000000",
|
"passkey": "00000000000000000000",
|
||||||
},
|
},
|
||||||
|
rls: &Release{},
|
||||||
},
|
},
|
||||||
want: &IndexerIRCParseMatched{
|
want: &Release{
|
||||||
TorrentURL: "https://mock.local/torrent/240860011/download/00000000000000000000",
|
DownloadURL: "https://mock.local/torrent/240860011/download/00000000000000000000",
|
||||||
TorrentName: "[Softsubs] Great BluRay SoftSubbed Anime [2020] [Blu-ray][MKV][h264 10-bit][1080p][FLAC 2.0][Dual Audio][Softsubs (Sub Group)][Freeleech]",
|
|
||||||
},
|
},
|
||||||
wantErr: false,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "test_03",
|
name: "",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
Type: "",
|
TorrentURL: "{{ .baseUrl }}rss/download/{{ .torrentId }}/{{ .rsskey }}/{{ .torrentName }}.torrent",
|
||||||
ForceSizeUnit: "",
|
Encode: []string{"torrentName"},
|
||||||
Lines: []IndexerIRCParseLine{
|
|
||||||
{
|
|
||||||
Pattern: "New Torrent Announcement:\\s*<([^>]*)>\\s*Name:'(.*)' uploaded by '([^']*)'\\s*(freeleech)*\\s*-\\s*(https?\\:\\/\\/[^\\/]+\\/)torrent\\/(\\d+)",
|
|
||||||
Vars: []string{
|
|
||||||
"category",
|
|
||||||
"torrentName",
|
|
||||||
"uploader",
|
|
||||||
"freeleech",
|
|
||||||
"baseUrl",
|
|
||||||
"torrentId",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Match: IndexerIRCParseMatch{
|
|
||||||
TorrentURL: "{{ .baseUrl }}rss/download/{{ .torrentId }}/{{ .rsskey }}/{{ .torrentName }}.torrent",
|
|
||||||
Encode: []string{"torrentName"},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
args: args{
|
args: args{
|
||||||
baseURL: "https://mock.local/",
|
baseURL: "https://mock.local/",
|
||||||
|
@ -153,34 +95,17 @@ func TestIndexerIRCParse_ParseMatch(t *testing.T) {
|
||||||
"torrentId": "240860011",
|
"torrentId": "240860011",
|
||||||
"rsskey": "00000000000000000000",
|
"rsskey": "00000000000000000000",
|
||||||
},
|
},
|
||||||
|
rls: &Release{},
|
||||||
},
|
},
|
||||||
want: &IndexerIRCParseMatched{
|
want: &Release{
|
||||||
TorrentURL: "https://mock.local/rss/download/240860011/00000000000000000000/The+Show+2019+S03E08+2160p+DV+WEBRip+6CH+x265+HEVC-GROUP.torrent",
|
DownloadURL: "https://mock.local/rss/download/240860011/00000000000000000000/The+Show+2019+S03E08+2160p+DV+WEBRip+6CH+x265+HEVC-GROUP.torrent",
|
||||||
},
|
},
|
||||||
wantErr: false,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "test_04",
|
name: "",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
Type: "",
|
TorrentURL: "https://mock.local/rss/download/{{ .torrentId }}/{{ .rsskey }}/{{ .torrentName }}.torrent",
|
||||||
ForceSizeUnit: "",
|
Encode: []string{"torrentName"},
|
||||||
Lines: []IndexerIRCParseLine{
|
|
||||||
{
|
|
||||||
Pattern: "New Torrent Announcement:\\s*<([^>]*)>\\s*Name:'(.*)' uploaded by '([^']*)'\\s*(freeleech)*\\s*-\\s*(https?\\:\\/\\/[^\\/]+\\/)torrent\\/(\\d+)",
|
|
||||||
Vars: []string{
|
|
||||||
"category",
|
|
||||||
"torrentName",
|
|
||||||
"uploader",
|
|
||||||
"freeleech",
|
|
||||||
"baseUrl",
|
|
||||||
"torrentId",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Match: IndexerIRCParseMatch{
|
|
||||||
TorrentURL: "https://mock.local/rss/download/{{ .torrentId }}/{{ .rsskey }}/{{ .torrentName }}.torrent",
|
|
||||||
Encode: []string{"torrentName"},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
args: args{
|
args: args{
|
||||||
baseURL: "https://mock.local/",
|
baseURL: "https://mock.local/",
|
||||||
|
@ -193,34 +118,17 @@ func TestIndexerIRCParse_ParseMatch(t *testing.T) {
|
||||||
"torrentId": "240860011",
|
"torrentId": "240860011",
|
||||||
"rsskey": "00000000000000000000",
|
"rsskey": "00000000000000000000",
|
||||||
},
|
},
|
||||||
|
rls: &Release{},
|
||||||
},
|
},
|
||||||
want: &IndexerIRCParseMatched{
|
want: &Release{
|
||||||
TorrentURL: "https://mock.local/rss/download/240860011/00000000000000000000/The+Show+2019+S03E08+2160p+DV+WEBRip+6CH+x265+HEVC-GROUP.torrent",
|
DownloadURL: "https://mock.local/rss/download/240860011/00000000000000000000/The+Show+2019+S03E08+2160p+DV+WEBRip+6CH+x265+HEVC-GROUP.torrent",
|
||||||
},
|
},
|
||||||
wantErr: false,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "test_04",
|
name: "",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
Type: "",
|
TorrentURL: "/rss/?action=download&key={{ .key }}&token={{ .token }}&hash={{ .torrentId }}&title={{ .torrentName }}",
|
||||||
ForceSizeUnit: "",
|
Encode: []string{"torrentName"},
|
||||||
Lines: []IndexerIRCParseLine{
|
|
||||||
{
|
|
||||||
Pattern: "New Torrent in category \\[([^\\]]*)\\] (.*) \\(([^\\)]*)\\) uploaded! Download\\: (https?\\:\\/\\/[^\\/]+\\/).+id=(.+)",
|
|
||||||
Vars: []string{
|
|
||||||
"category",
|
|
||||||
"torrentName",
|
|
||||||
"uploader",
|
|
||||||
"freeleech",
|
|
||||||
"baseUrl",
|
|
||||||
"torrentId",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Match: IndexerIRCParseMatch{
|
|
||||||
TorrentURL: "/rss/?action=download&key={{ .key }}&token={{ .token }}&hash={{ .torrentId }}&title={{ .torrentName }}",
|
|
||||||
Encode: []string{"torrentName"},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
args: args{
|
args: args{
|
||||||
baseURL: "https://mock.local/",
|
baseURL: "https://mock.local/",
|
||||||
|
@ -235,24 +143,225 @@ func TestIndexerIRCParse_ParseMatch(t *testing.T) {
|
||||||
"token": "TOKEN",
|
"token": "TOKEN",
|
||||||
"rsskey": "00000000000000000000",
|
"rsskey": "00000000000000000000",
|
||||||
},
|
},
|
||||||
|
rls: &Release{},
|
||||||
},
|
},
|
||||||
want: &IndexerIRCParseMatched{
|
want: &Release{
|
||||||
TorrentURL: "https://mock.local/rss/?action=download&key=KEY&token=TOKEN&hash=240860011&title=The+Show+2019+S03E08+2160p+DV+WEBRip+6CH+x265+HEVC-GROUP",
|
DownloadURL: "https://mock.local/rss/?action=download&key=KEY&token=TOKEN&hash=240860011&title=The+Show+2019+S03E08+2160p+DV+WEBRip+6CH+x265+HEVC-GROUP",
|
||||||
},
|
},
|
||||||
wantErr: false,
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
p := &IndexerIRCParse{
|
p := &IndexerIRCParseMatch{
|
||||||
Type: tt.fields.Type,
|
TorrentURL: tt.fields.TorrentURL,
|
||||||
ForceSizeUnit: tt.fields.ForceSizeUnit,
|
TorrentName: tt.fields.TorrentName,
|
||||||
Lines: tt.fields.Lines,
|
InfoURL: tt.fields.InfoURL,
|
||||||
Match: tt.fields.Match,
|
Encode: tt.fields.Encode,
|
||||||
}
|
}
|
||||||
|
p.ParseURLs(tt.args.baseURL, tt.args.vars, tt.args.rls)
|
||||||
got, _ := p.ParseMatch(tt.args.baseURL, tt.args.vars)
|
assert.Equal(t, tt.want, tt.args.rls)
|
||||||
assert.Equal(t, tt.want, got)
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIndexerIRCParseMatch_ParseTorrentName(t *testing.T) {
|
||||||
|
type fields struct {
|
||||||
|
TorrentURL string
|
||||||
|
TorrentName string
|
||||||
|
InfoURL string
|
||||||
|
Encode []string
|
||||||
|
}
|
||||||
|
type args struct {
|
||||||
|
vars map[string]string
|
||||||
|
rls *Release
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
fields fields
|
||||||
|
args args
|
||||||
|
want *Release
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: "",
|
||||||
|
},
|
||||||
|
args: args{
|
||||||
|
vars: map[string]string{
|
||||||
|
"torrentName": "The Show 2019 S03E08 2160p DV WEBRip 6CH x265 HEVC-GROUP",
|
||||||
|
},
|
||||||
|
rls: &Release{},
|
||||||
|
},
|
||||||
|
want: &Release{
|
||||||
|
TorrentName: "",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "",
|
||||||
|
fields: fields{
|
||||||
|
TorrentName: `{{ if .releaseGroup }}[{{ .releaseGroup }}] {{ end }}{{ .torrentName }} [{{ .year }}] {{ if .releaseEpisode }}{{ printf "- %02s " .releaseEpisode }}{{ end }}{{ print "[" .releaseTags "]" | replace " / " "][" }}`,
|
||||||
|
},
|
||||||
|
args: args{
|
||||||
|
vars: map[string]string{
|
||||||
|
"torrentName": "Great BluRay SoftSubbed Anime",
|
||||||
|
"category": "TV Series",
|
||||||
|
"year": "2020",
|
||||||
|
"releaseTags": "Blu-ray / MKV / h264 10-bit / 1080p / FLAC 2.0 / Dual Audio / Softsubs (Sub Group) / Freeleech",
|
||||||
|
"releaseGroup": "Softsubs",
|
||||||
|
"releaseEpisode": "",
|
||||||
|
"freeleech": "freeleech",
|
||||||
|
"baseUrl": "https://mock.local",
|
||||||
|
"torrentId": "240860011",
|
||||||
|
"tags": "comedy, drama, school.life, sports",
|
||||||
|
"uploader": "Uploader",
|
||||||
|
"passkey": "00000000000000000000",
|
||||||
|
},
|
||||||
|
rls: &Release{},
|
||||||
|
},
|
||||||
|
want: &Release{
|
||||||
|
TorrentName: "[Softsubs] Great BluRay SoftSubbed Anime [2020] [Blu-ray][MKV][h264 10-bit][1080p][FLAC 2.0][Dual Audio][Softsubs (Sub Group)][Freeleech]",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
p := &IndexerIRCParseMatch{
|
||||||
|
TorrentURL: tt.fields.TorrentURL,
|
||||||
|
TorrentName: tt.fields.TorrentName,
|
||||||
|
InfoURL: tt.fields.InfoURL,
|
||||||
|
Encode: tt.fields.Encode,
|
||||||
|
}
|
||||||
|
p.ParseTorrentName(tt.args.vars, tt.args.rls)
|
||||||
|
assert.Equal(t, tt.want, tt.args.rls)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIRCParserGazelleGames_Parse(t *testing.T) {
|
||||||
|
type args struct {
|
||||||
|
rls *Release
|
||||||
|
vars map[string]string
|
||||||
|
}
|
||||||
|
type want struct {
|
||||||
|
title string
|
||||||
|
release string
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want want
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "",
|
||||||
|
args: args{
|
||||||
|
rls: NewRelease("ggn"),
|
||||||
|
vars: map[string]string{
|
||||||
|
"torrentName": "Trouble.in.Paradise-GROUP in Trouble in Paradise",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: want{
|
||||||
|
title: "Trouble in Paradise",
|
||||||
|
release: "Trouble.in.Paradise-GROUP",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "",
|
||||||
|
args: args{
|
||||||
|
rls: NewRelease("ggn"),
|
||||||
|
vars: map[string]string{
|
||||||
|
"torrentName": "F.I.L.F. Game Walkthrough v.0.18 in F.I.L.F.",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: want{
|
||||||
|
title: "F.I.L.F.",
|
||||||
|
release: "F.I.L.F. Game Walkthrough v.0.18",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "",
|
||||||
|
args: args{
|
||||||
|
rls: NewRelease("ggn"),
|
||||||
|
vars: map[string]string{
|
||||||
|
"torrentName": "Ni no Kuni: Dominion of the Dark Djinn in Ni no Kuni: Dominion of the Dark Djinn",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: want{
|
||||||
|
title: "Ni no Kuni: Dominion of the Dark Djinn",
|
||||||
|
release: "Ni no Kuni: Dominion of the Dark Djinn",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "",
|
||||||
|
args: args{
|
||||||
|
rls: NewRelease("ggn"),
|
||||||
|
vars: map[string]string{
|
||||||
|
"torrentName": "Year 2 Remastered by Insaneintherainmusic",
|
||||||
|
"category": "OST",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: want{
|
||||||
|
title: "Year 2 Remastered by Insaneintherainmusic",
|
||||||
|
release: "Year 2 Remastered by Insaneintherainmusic",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
p := IRCParserGazelleGames{}
|
||||||
|
p.Parse(tt.args.rls, tt.args.vars)
|
||||||
|
assert.Equal(t, tt.want.release, tt.args.rls.TorrentName)
|
||||||
|
assert.Equal(t, tt.want.title, tt.args.rls.Title)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestIRCParserOrpheus_Parse(t *testing.T) {
|
||||||
|
type args struct {
|
||||||
|
rls *Release
|
||||||
|
vars map[string]string
|
||||||
|
}
|
||||||
|
type want struct {
|
||||||
|
title string
|
||||||
|
release string
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want want
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "",
|
||||||
|
args: args{
|
||||||
|
rls: NewRelease("ops"),
|
||||||
|
vars: map[string]string{
|
||||||
|
"torrentName": "Busta Rhymes – BEACH BALL (feat. BIA) – [2023] [Single] WEB/FLAC/24bit Lossless",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: want{
|
||||||
|
title: "BEACH BALL",
|
||||||
|
release: "Busta Rhymes - BEACH BALL (feat. BIA) - [2023] [Single] WEB/FLAC/24bit Lossless",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "",
|
||||||
|
args: args{
|
||||||
|
rls: NewRelease("ops"),
|
||||||
|
vars: map[string]string{
|
||||||
|
"torrentName": "Busta Rhymes – BEACH BALL (feat. BIA) – [2023] [Single] CD/FLAC/Lossless",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
want: want{
|
||||||
|
title: "BEACH BALL",
|
||||||
|
release: "Busta Rhymes - BEACH BALL (feat. BIA) - [2023] [Single] CD/FLAC/Lossless",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
p := IRCParserOrpheus{}
|
||||||
|
p.Parse(tt.args.rls, tt.args.vars)
|
||||||
|
assert.Equal(t, tt.want.release, tt.args.rls.TorrentName)
|
||||||
|
assert.Equal(t, tt.want.title, tt.args.rls.Title)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@ package domain
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -137,3 +138,82 @@ type IrcRepo interface {
|
||||||
GetNetworkByID(ctx context.Context, id int64) (*IrcNetwork, error)
|
GetNetworkByID(ctx context.Context, id int64) (*IrcNetwork, error)
|
||||||
DeleteNetwork(ctx context.Context, id int64) error
|
DeleteNetwork(ctx context.Context, id int64) error
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type IRCParser interface {
|
||||||
|
Parse(rls *Release, vars map[string]string) error
|
||||||
|
}
|
||||||
|
|
||||||
|
type IRCParserDefault struct{}
|
||||||
|
|
||||||
|
func (p IRCParserDefault) Parse(rls *Release, _ map[string]string) error {
|
||||||
|
// parse fields
|
||||||
|
// run before ParseMatch to not potentially use a reconstructed TorrentName
|
||||||
|
rls.ParseString(rls.TorrentName)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type IRCParserGazelleGames struct{}
|
||||||
|
|
||||||
|
func (p IRCParserGazelleGames) Parse(rls *Release, vars map[string]string) error {
|
||||||
|
torrentName := vars["torrentName"]
|
||||||
|
category := vars["category"]
|
||||||
|
|
||||||
|
releaseName := ""
|
||||||
|
title := ""
|
||||||
|
|
||||||
|
switch category {
|
||||||
|
case "OST":
|
||||||
|
// OST does not have the Title in Group naming convention
|
||||||
|
releaseName = torrentName
|
||||||
|
default:
|
||||||
|
releaseName, title = splitInMiddle(torrentName, " in ")
|
||||||
|
|
||||||
|
if releaseName == "" && title != "" {
|
||||||
|
releaseName = torrentName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rls.ParseString(releaseName)
|
||||||
|
|
||||||
|
if title != "" {
|
||||||
|
rls.Title = title
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type IRCParserOrpheus struct{}
|
||||||
|
|
||||||
|
func (p IRCParserOrpheus) Parse(rls *Release, vars map[string]string) error {
|
||||||
|
// OPS uses en-dashes as separators, which causes moistari/rls to not parse the torrentName properly,
|
||||||
|
// we replace the en-dashes with hyphens here
|
||||||
|
torrentName := vars["torrentName"]
|
||||||
|
rls.TorrentName = strings.ReplaceAll(torrentName, "–", "-")
|
||||||
|
|
||||||
|
rls.ParseString(rls.TorrentName)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// mergeVars merge maps
|
||||||
|
func mergeVars(data ...map[string]string) map[string]string {
|
||||||
|
tmpVars := map[string]string{}
|
||||||
|
|
||||||
|
for _, vars := range data {
|
||||||
|
// copy vars to new tmp map
|
||||||
|
for k, v := range vars {
|
||||||
|
tmpVars[k] = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return tmpVars
|
||||||
|
}
|
||||||
|
|
||||||
|
// splitInMiddle utility for GGn that tries to split the announced release name
|
||||||
|
// torrent name consists of "This.Game-GRP in This Game Group" but titles can include "in"
|
||||||
|
// this function tries to split in the correct place
|
||||||
|
func splitInMiddle(s, sep string) (string, string) {
|
||||||
|
parts := strings.Split(s, sep)
|
||||||
|
l := len(parts)
|
||||||
|
return strings.Join(parts[:l/2], sep), strings.Join(parts[l/2:], sep)
|
||||||
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue