feat(indexers): update PTP announce to new format (#1738)

* feat(indexers): update PTP to new format

* fix: update expect line

* feat: use unique key for dl link

* feat: update pattern

* fix: definition tests

* feat: rename var

* feat: add custom vars map

* feat: start mapCustomVars

* Update internal/indexer/definitions/ptp.yaml

Co-authored-by: nuxen <felix.schubert1998@gmail.com>

* feat(indexers): map custom indexer vars

* feat: support upload unix epoch time

* feat(releases): update mapvars

* feat(indexers): remove ptp api init

* feat(indexers): update ptp category mapping

* feat(releases): show announce type in details

* feat(releases): mapvars announcetype

---------

Co-authored-by: nuxen <felix.schubert1998@gmail.com>
This commit is contained in:
ze0s 2024-12-08 23:35:32 +01:00 committed by GitHub
parent f644b3a4d6
commit 24f31574e5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 342 additions and 108 deletions

View file

@ -228,10 +228,11 @@ func (i IndexerIRC) ValidChannel(channel string) bool {
}
type IndexerIRCParse struct {
Type string `json:"type"`
ForceSizeUnit string `json:"forcesizeunit"`
Lines []IndexerIRCParseLine `json:"lines"`
Match IndexerIRCParseMatch `json:"match"`
Type string `json:"type"`
ForceSizeUnit string `json:"forcesizeunit"`
Lines []IndexerIRCParseLine `json:"lines"`
Match IndexerIRCParseMatch `json:"match"`
Mappings map[string]map[string]map[string]string `json:"mappings"`
}
type LineTest struct {
@ -352,7 +353,31 @@ func (p *IndexerIRCParseMatch) ParseTorrentName(vars map[string]string, rls *Rel
return nil
}
func (p *IndexerIRCParse) MapCustomVariables(vars map[string]string) error {
for varsKey, varsKeyMap := range p.Mappings {
varsValue, ok := vars[varsKey]
if !ok {
continue
}
keyValueMap, ok := varsKeyMap[varsValue]
if !ok {
continue
}
for k, v := range keyValueMap {
vars[k] = v
}
}
return nil
}
func (p *IndexerIRCParse) Parse(def *IndexerDefinition, vars map[string]string, rls *Release) error {
if err := p.MapCustomVariables(vars); err != nil {
return errors.Wrap(err, "could not map custom variables for release")
}
if err := rls.MapVars(def, vars); err != nil {
return errors.Wrap(err, "could not map variables for release")
}

View file

@ -375,3 +375,107 @@ func TestIRCParserOrpheus_Parse(t *testing.T) {
})
}
}
func TestIndexerIRCParse_MapCustomVariables1(t *testing.T) {
type fields struct {
Type string
ForceSizeUnit string
Lines []IndexerIRCParseLine
Match IndexerIRCParseMatch
Mappings map[string]map[string]map[string]string
}
type args struct {
vars map[string]string
expectVars map[string]string
}
tests := []struct {
name string
fields fields
args args
wantErr bool
}{
{
name: "",
fields: fields{
Mappings: map[string]map[string]map[string]string{
"announceType": {
"0": map[string]string{
"announceType": "NEW",
},
"1": map[string]string{
"announceType": "PROMO",
},
},
"categoryEnum": {
"0": map[string]string{
"category": "Feature Film",
},
"1": map[string]string{
"category": "Short Film",
},
"2": map[string]string{
"category": "Miniseries",
},
"3": map[string]string{
"category": "Stand-up Comedy",
},
"4": map[string]string{
"category": "Live Performance",
},
"5": map[string]string{
"category": "Movie Collection",
},
},
"freeleechEnum": {
"0": map[string]string{
"downloadVolumeFactor": "1.0",
"uploadVolumeFactor": "1.0",
},
"1": map[string]string{
"downloadVolumeFactor": "0",
"uploadVolumeFactor": "1.0",
},
"2": map[string]string{
"downloadVolumeFactor": "0.5",
"uploadVolumeFactor": "1.0",
},
"3": map[string]string{
"downloadVolumeFactor": "0",
"uploadVolumeFactor": "0",
},
},
},
},
args: args{
vars: map[string]string{
"announceType": "1",
"categoryEnum": "0",
"freeleechEnum": "1",
},
expectVars: map[string]string{
"announceType": "PROMO",
"category": "Feature Film",
"categoryEnum": "0",
"freeleechEnum": "1",
"downloadVolumeFactor": "0",
"uploadVolumeFactor": "1.0",
},
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
p := &IndexerIRCParse{
Type: tt.fields.Type,
ForceSizeUnit: tt.fields.ForceSizeUnit,
Lines: tt.fields.Lines,
Match: tt.fields.Match,
Mappings: tt.fields.Mappings,
}
err := p.MapCustomVariables(tt.args.vars)
assert.NoError(t, err)
assert.Equal(t, tt.args.expectVars, tt.args.vars)
})
}
}

View file

@ -733,7 +733,7 @@ func (r *Release) MapVars(def *IndexerDefinition, varMap map[string]string) erro
r.Category = category
}
if announceType, err := getStringMapValue(varMap, "announceTypeEnum"); err == nil {
if announceType, err := getStringMapValue(varMap, "announceType"); err == nil {
annType, parseErr := ParseAnnounceType(announceType)
if parseErr == nil {
r.AnnounceType = annType
@ -764,30 +764,73 @@ func (r *Release) MapVars(def *IndexerDefinition, varMap map[string]string) erro
freeleechPercent = strings.Replace(freeleechPercent, "%", "", -1)
freeleechPercent = strings.Trim(freeleechPercent, " ")
freeleechPercentInt, err := strconv.Atoi(freeleechPercent)
if err != nil {
//log.Debug().Msgf("bad freeleechPercent var: %v", year)
}
freeleechPercentInt, parseErr := strconv.Atoi(freeleechPercent)
if parseErr == nil {
if freeleechPercentInt > 0 {
r.Freeleech = true
r.FreeleechPercent = freeleechPercentInt
if freeleechPercentInt > 0 {
r.Freeleech = true
r.FreeleechPercent = freeleechPercentInt
r.Bonus = append(r.Bonus, "Freeleech")
r.Bonus = append(r.Bonus, "Freeleech")
switch freeleechPercentInt {
case 25:
r.Bonus = append(r.Bonus, "Freeleech25")
case 50:
r.Bonus = append(r.Bonus, "Freeleech50")
case 75:
r.Bonus = append(r.Bonus, "Freeleech75")
case 100:
r.Bonus = append(r.Bonus, "Freeleech100")
switch freeleechPercentInt {
case 25:
r.Bonus = append(r.Bonus, "Freeleech25")
case 50:
r.Bonus = append(r.Bonus, "Freeleech50")
case 75:
r.Bonus = append(r.Bonus, "Freeleech75")
case 100:
r.Bonus = append(r.Bonus, "Freeleech100")
}
}
}
}
//if downloadVolumeFactor, err := getStringMapValue(varMap, "downloadVolumeFactor"); err == nil {
if downloadVolumeFactor, ok := varMap["downloadVolumeFactor"]; ok {
// special handling for BHD to map their freeleech into percent
//if def.Identifier == "beyondhd" {
// if freeleechPercent == "Capped FL" {
// freeleechPercent = "100%"
// } else if strings.Contains(freeleechPercent, "% FL") {
// freeleechPercent = strings.Replace(freeleechPercent, " FL", "", -1)
// }
//}
//r.downloadVolumeFactor = downloadVolumeFactor
value, parseErr := strconv.ParseInt(downloadVolumeFactor, 10, 64)
if parseErr == nil {
percentage := value * 100
r.FreeleechPercent = int(percentage)
}
r.Freeleech = true
}
//if uploadVolumeFactor, err := getStringMapValue(varMap, "uploadVolumeFactor"); err == nil {
// // special handling for BHD to map their freeleech into percent
// //if def.Identifier == "beyondhd" {
// // if freeleechPercent == "Capped FL" {
// // freeleechPercent = "100%"
// // } else if strings.Contains(freeleechPercent, "% FL") {
// // freeleechPercent = strings.Replace(freeleechPercent, " FL", "", -1)
// // }
// //}
//
// r.uploadVolumeFactor = uploadVolumeFactor
//
// //freeleechPercentInt, err := strconv.Atoi(freeleechPercent)
// //if err != nil {
// // //log.Debug().Msgf("bad freeleechPercent var: %v", year)
// //}
// //
// //if freeleechPercentInt > 0 {
// // r.Freeleech = true
// // r.FreeleechPercent = freeleechPercentInt
// //}
//}
if uploader, err := getStringMapValue(varMap, "uploader"); err == nil {
r.Uploader = uploader
}
@ -801,11 +844,17 @@ func (r *Release) MapVars(def *IndexerDefinition, varMap map[string]string) erro
torrentSize = fmt.Sprintf("%s %s", torrentSize, def.IRC.Parse.ForceSizeUnit)
}
size, err := humanize.ParseBytes(torrentSize)
if err != nil {
// log could not parse into bytes
size, parseErr := humanize.ParseBytes(torrentSize)
if parseErr == nil {
r.Size = size
}
}
if torrentSizeBytes, err := getStringMapValue(varMap, "torrentSizeBytes"); err == nil {
size, parseErr := strconv.ParseUint(torrentSizeBytes, 10, 64)
if parseErr == nil {
r.Size = size
}
r.Size = size
}
if scene, err := getStringMapValue(varMap, "scene"); err == nil {
@ -826,24 +875,27 @@ func (r *Release) MapVars(def *IndexerDefinition, varMap map[string]string) erro
}
if yearVal, err := getStringMapValue(varMap, "year"); err == nil {
year, err := strconv.Atoi(yearVal)
if err != nil {
//log.Debug().Msgf("bad year var: %v", year)
year, parseErr := strconv.Atoi(yearVal)
if parseErr == nil {
r.Year = year
}
r.Year = year
}
if tags, err := getStringMapValue(varMap, "tags"); err == nil {
tagsArr := []string{}
s := strings.Split(tags, ",")
for _, t := range s {
tagsArr = append(tagsArr, strings.Trim(t, " "))
if tags != "" && tags != "*" {
tagsArr := []string{}
s := strings.Split(tags, ",")
for _, t := range s {
tagsArr = append(tagsArr, strings.Trim(t, " "))
}
r.Tags = tagsArr
}
r.Tags = tagsArr
}
if title, err := getStringMapValue(varMap, "title"); err == nil {
r.Title = title
if title != "" && title != "*" {
r.Title = title
}
}
// handle releaseTags. Most of them are redundant but some are useful
@ -864,6 +916,10 @@ func (r *Release) MapVars(def *IndexerDefinition, varMap map[string]string) erro
r.Episode = episode
}
//if metaImdb, err := getStringMapValue(varMap, "imdb"); err == nil {
// r.MetaIMDB = metaImdb
//}
return nil
}

View file

@ -13,7 +13,6 @@ import (
"github.com/autobrr/autobrr/pkg/errors"
"github.com/autobrr/autobrr/pkg/ggn"
"github.com/autobrr/autobrr/pkg/ops"
"github.com/autobrr/autobrr/pkg/ptp"
"github.com/autobrr/autobrr/pkg/red"
"github.com/rs/zerolog"
@ -94,18 +93,6 @@ func (s *apiService) AddClient(indexer string, settings map[string]string) error
}
s.apiClients[indexer] = btn.NewClient(key)
case "ptp":
user, ok := settings["api_user"]
if !ok || user == "" {
return errors.New("api.Service.AddClient: could not initialize ptp client: missing var 'api_user'")
}
key, ok := settings["api_key"]
if !ok || key == "" {
return errors.New("api.Service.AddClient: could not initialize ptp client: missing var 'api_key'")
}
s.apiClients[indexer] = ptp.NewClient(user, key)
case "ggn":
key, ok := settings["api_key"]
if !ok || key == "" {
@ -156,16 +143,6 @@ func (s *apiService) getClientForTest(req domain.IndexerTestApiRequest) (apiClie
}
return btn.NewClient(req.ApiKey), nil
case "ptp":
if req.ApiUser == "" {
return nil, errors.New("api.Service.AddClient: could not initialize ptp client: missing var 'api_user'")
}
if req.ApiKey == "" {
return nil, errors.New("api.Service.AddClient: could not initialize ptp client: missing var 'api_key'")
}
return ptp.NewClient(req.ApiUser, req.ApiKey), nil
case "ggn":
if req.ApiKey == "" {
return nil, errors.New("api.Service.AddClient: could not initialize ggn client: missing var 'api_key'")

View file

@ -11,7 +11,6 @@ protocol: torrent
supports:
- irc
- rss
- api
# source: gazelle
settings:
- name: authkey
@ -26,34 +25,6 @@ settings:
label: Torrent pass
help: Right click DL on a torrent and get the torrent_pass.
- name: api_user
type: secret
required: true
label: API User
help: Edit profile -> Security -> Generate new api keys
- name: api_key
type: secret
required: true
label: API Key
help: Edit profile -> Security -> Generate new api keys
# api:
# url: https://passthepopcorn.me/
# type: json
# limits:
# max: 60
# per: minute
# settings:
# - name: api_user
# type: secret
# label: API User
# help: Edit profile -> Security -> Generate new api keys
# - name: api_key
# type: secret
# label: API Key
# help: Edit profile -> Security -> Generate new api keys
irc:
network: PassThePopcorn
server: irc.passthepopcorn.me
@ -93,31 +64,131 @@ irc:
type: single
lines:
- tests:
- line: That Movie [1972] by Some Director | x264 / Blu-ray / MKV / 1080p | 204371 | 964303 | That.Movie.1972.1080p.BluRay.FLAC.x264-GROUP | comedy, drama, romance
- line: 0:2:0:0:357805:1332009:9k6p:tt24249072:1899875175:1728081334|H.264/MKV/WEB/720p||2023|Last Straw|Last.Straw.2023.REPACK.720p.AMZN.WEB-DL.DDP5.1.H.264-FLUX|thriller,horror
expect:
year: "1972"
releaseTags: x264 / Blu-ray / MKV / 1080p
freeleech: ""
torrentId: "964303"
torrentName: That.Movie.1972.1080p.BluRay.FLAC.x264-GROUP
tags: comedy, drama, romance
- line: That Other Movie [1972] | x264 / Blu-ray / MKV / 1080p / Freeleech! | 204371 | 964303 | That.Other.Movie.1972.1080p.BluRay.FLAC.x264-GROUP | comedy, drama, romance
announceTypeEnum: 0
categoryEnum: 2
originEnum: 0
freeleechEnum: 0
groupId: "357805"
torrentId: "1332009"
key: "9k6p"
imdb: "tt24249072"
torrentSizeBytes: "1899875175"
uploadedDate: "1728081334"
releaseTags: H.264/MKV/WEB/720p
editionTags: ""
year: "2023"
title: "Last Straw"
torrentName: Last.Straw.2023.REPACK.720p.AMZN.WEB-DL.DDP5.1.H.264-FLUX
tags: thriller,horror
- line: 2:1:0:1:357805:1332009:9k6p:tt24249072:1899875175:1728081334|H.264/MKV/WEB/720p||2023|Last Straw|Last.Straw.2023.REPACK.720p.AMZN.WEB-DL.DDP5.1.H.264-FLUX|thriller,horror
expect:
year: "1972"
releaseTags: x264 / Blu-ray / MKV / 1080p / Freeleech!
freeleech: Freeleech
torrentId: "964303"
torrentName: That.Other.Movie.1972.1080p.BluRay.FLAC.x264-GROUP
tags: comedy, drama, romance
pattern: '.* \[(.*)\] (?:by .*)?\| (.*?(?: \/ (Freeleech)!)?) \| .* \| (.*) \| (.*) \| (.*)'
announceTypeEnum: 2
categoryEnum: 1
originEnum: 0
freeleechEnum: 1
groupId: "357805"
torrentId: "1332009"
key: "9k6p"
imdb: "tt24249072"
torrentSizeBytes: "1899875175"
uploadedDate: "1728081334"
releaseTags: H.264/MKV/WEB/720p
editionTags: ""
year: "2023"
title: "Last Straw"
torrentName: Last.Straw.2023.REPACK.720p.AMZN.WEB-DL.DDP5.1.H.264-FLUX
tags: thriller,horror
- line: 1:1:2:1:21108:1332339:ncfe:tt0107426:63996413804:1728081334|BD66/m2ts/Blu-ray/2160p|Dolby Atmos/Dolby Vision|1993|Little Buddha|Little.Buddha.1993.2160p.FRA.UHD.Blu-ray.DV.HDR.HEVC.DTS-HD.MA.5.1|drama,italian
expect:
announceTypeEnum: 1
categoryEnum: 1
originEnum: 2
freeleechEnum: 1
groupId: "21108"
torrentId: "1332339"
key: "ncfe"
imdb: "tt0107426"
torrentSizeBytes: "63996413804"
uploadedDate: "1728081334"
releaseTags: BD66/m2ts/Blu-ray/2160p
editionTags: "Dolby Atmos/Dolby Vision"
year: "1993"
title: "Little Buddha"
torrentName: Little.Buddha.1993.2160p.FRA.UHD.Blu-ray.DV.HDR.HEVC.DTS-HD.MA.5.1
tags: drama,italian
# <announceType>:<category>:<origin>:<freeleech_type>:<gid>:<tid>:<key>:tt<imdbid>:<sizebytes>|<format>/<container>/<source>/<resolution>|<edition_tags>|<year>|<title>|<releasename>|<taglist>
pattern: '(\d):(\d):(\d):(\d):(\d+):(\d+):(\w+):(tt\d+)?:(\d+):(\d+)\|(.*)\|(.*)\|(\d+)\|(.+)\|(.+)\|(.+)'
vars:
- year
- releaseTags
- freeleech
- announceTypeEnum
- categoryEnum
- originEnum
- freeleechEnum
- groupId
- torrentId
- key
- imdb
- torrentSizeBytes
- uploadedDate
- releaseTags
- editionTags
- year
- title
- torrentName
- tags
mappings:
announceTypeEnum:
"0":
announceType: NEW
"1":
announceType: CHECKED
"2":
announceType: PROMO # X-leeched (could be neutral/half/free)
"3":
announceType: PROMO_GP # marked golden popcorn
"4":
announceType: RESURRECTED # reseeded/revived from dead
# Category IDs are Zero+1 indexed from:
# ( 'Feature Film', 'Short Film', 'Miniseries', 'Stand-up Comedy', 'Live Performance', 'Movie Collection' )
categoryEnum:
"1":
category: "Feature Film"
"2":
category: "Short Film"
"3":
category: "Miniseries"
"4":
category: "Stand-up Comedy"
"5":
category: "Live Performance"
"6":
category: "Movie Collection"
originEnum:
"0":
origin: "P2P"
"1":
origin: "SCENE"
"2":
origin: "INTERNAL" # PERSONAL RIP
freeleechEnum:
"0": # Normal
downloadVolumeFactor: 1
uploadVolumeFactor: 1
"1": # Freeleech
downloadVolumeFactor: 0
uploadVolumeFactor: 1
"2": # Halfleech
downloadVolumeFactor: 0.5
uploadVolumeFactor: 1
"3": # Neutral
downloadVolumeFactor: 0
uploadVolumeFactor: 0
match:
infourl: "/torrents.php?torrentid={{ .torrentId }}"
torrenturl: "/torrents.php?action=download&id={{ .torrentId }}&authkey={{ .authkey }}&torrent_pass={{ .torrent_pass }}"
torrenturl: "/torrents.php?action=download&id={{ .torrentId }}&authkey={{ .authkey }}&torrent_pass={{ .torrent_pass }}&key={{ .key }}"

View file

@ -63,6 +63,7 @@ export const LinksCell = (props: CellContext<Release, unknown>) => {
<CellLine title="Indexer">{props.row.original.indexer.identifier}</CellLine>
<CellLine title="Protocol">{props.row.original.protocol}</CellLine>
<CellLine title="Implementation">{props.row.original.implementation}</CellLine>
<CellLine title="Announce Type">{props.row.original.announce_type}</CellLine>
<CellLine title="Category">{props.row.original.category}</CellLine>
<CellLine title="Uploader">{props.row.original.uploader}</CellLine>
<CellLine title="Size">{humanFileSize(props.row.original.size)}</CellLine>