feat(indexers): update PTP announce to new format (#1738)

* feat(indexers): update PTP to new format

* fix: update expect line

* feat: use unique key for dl link

* feat: update pattern

* fix: definition tests

* feat: rename var

* feat: add custom vars map

* feat: start mapCustomVars

* Update internal/indexer/definitions/ptp.yaml

Co-authored-by: nuxen <felix.schubert1998@gmail.com>

* feat(indexers): map custom indexer vars

* feat: support upload unix epoch time

* feat(releases): update mapvars

* feat(indexers): remove ptp api init

* feat(indexers): update ptp category mapping

* feat(releases): show announce type in details

* feat(releases): mapvars announcetype

---------

Co-authored-by: nuxen <felix.schubert1998@gmail.com>
This commit is contained in:
ze0s 2024-12-08 23:35:32 +01:00 committed by GitHub
parent f644b3a4d6
commit 24f31574e5
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 342 additions and 108 deletions

View file

@ -228,10 +228,11 @@ func (i IndexerIRC) ValidChannel(channel string) bool {
}
type IndexerIRCParse struct {
Type string `json:"type"`
ForceSizeUnit string `json:"forcesizeunit"`
Lines []IndexerIRCParseLine `json:"lines"`
Match IndexerIRCParseMatch `json:"match"`
Type string `json:"type"`
ForceSizeUnit string `json:"forcesizeunit"`
Lines []IndexerIRCParseLine `json:"lines"`
Match IndexerIRCParseMatch `json:"match"`
Mappings map[string]map[string]map[string]string `json:"mappings"`
}
type LineTest struct {
@ -352,7 +353,31 @@ func (p *IndexerIRCParseMatch) ParseTorrentName(vars map[string]string, rls *Rel
return nil
}
func (p *IndexerIRCParse) MapCustomVariables(vars map[string]string) error {
for varsKey, varsKeyMap := range p.Mappings {
varsValue, ok := vars[varsKey]
if !ok {
continue
}
keyValueMap, ok := varsKeyMap[varsValue]
if !ok {
continue
}
for k, v := range keyValueMap {
vars[k] = v
}
}
return nil
}
func (p *IndexerIRCParse) Parse(def *IndexerDefinition, vars map[string]string, rls *Release) error {
if err := p.MapCustomVariables(vars); err != nil {
return errors.Wrap(err, "could not map custom variables for release")
}
if err := rls.MapVars(def, vars); err != nil {
return errors.Wrap(err, "could not map variables for release")
}

View file

@ -375,3 +375,107 @@ func TestIRCParserOrpheus_Parse(t *testing.T) {
})
}
}
func TestIndexerIRCParse_MapCustomVariables1(t *testing.T) {
type fields struct {
Type string
ForceSizeUnit string
Lines []IndexerIRCParseLine
Match IndexerIRCParseMatch
Mappings map[string]map[string]map[string]string
}
type args struct {
vars map[string]string
expectVars map[string]string
}
tests := []struct {
name string
fields fields
args args
wantErr bool
}{
{
name: "",
fields: fields{
Mappings: map[string]map[string]map[string]string{
"announceType": {
"0": map[string]string{
"announceType": "NEW",
},
"1": map[string]string{
"announceType": "PROMO",
},
},
"categoryEnum": {
"0": map[string]string{
"category": "Feature Film",
},
"1": map[string]string{
"category": "Short Film",
},
"2": map[string]string{
"category": "Miniseries",
},
"3": map[string]string{
"category": "Stand-up Comedy",
},
"4": map[string]string{
"category": "Live Performance",
},
"5": map[string]string{
"category": "Movie Collection",
},
},
"freeleechEnum": {
"0": map[string]string{
"downloadVolumeFactor": "1.0",
"uploadVolumeFactor": "1.0",
},
"1": map[string]string{
"downloadVolumeFactor": "0",
"uploadVolumeFactor": "1.0",
},
"2": map[string]string{
"downloadVolumeFactor": "0.5",
"uploadVolumeFactor": "1.0",
},
"3": map[string]string{
"downloadVolumeFactor": "0",
"uploadVolumeFactor": "0",
},
},
},
},
args: args{
vars: map[string]string{
"announceType": "1",
"categoryEnum": "0",
"freeleechEnum": "1",
},
expectVars: map[string]string{
"announceType": "PROMO",
"category": "Feature Film",
"categoryEnum": "0",
"freeleechEnum": "1",
"downloadVolumeFactor": "0",
"uploadVolumeFactor": "1.0",
},
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
p := &IndexerIRCParse{
Type: tt.fields.Type,
ForceSizeUnit: tt.fields.ForceSizeUnit,
Lines: tt.fields.Lines,
Match: tt.fields.Match,
Mappings: tt.fields.Mappings,
}
err := p.MapCustomVariables(tt.args.vars)
assert.NoError(t, err)
assert.Equal(t, tt.args.expectVars, tt.args.vars)
})
}
}

View file

@ -733,7 +733,7 @@ func (r *Release) MapVars(def *IndexerDefinition, varMap map[string]string) erro
r.Category = category
}
if announceType, err := getStringMapValue(varMap, "announceTypeEnum"); err == nil {
if announceType, err := getStringMapValue(varMap, "announceType"); err == nil {
annType, parseErr := ParseAnnounceType(announceType)
if parseErr == nil {
r.AnnounceType = annType
@ -764,30 +764,73 @@ func (r *Release) MapVars(def *IndexerDefinition, varMap map[string]string) erro
freeleechPercent = strings.Replace(freeleechPercent, "%", "", -1)
freeleechPercent = strings.Trim(freeleechPercent, " ")
freeleechPercentInt, err := strconv.Atoi(freeleechPercent)
if err != nil {
//log.Debug().Msgf("bad freeleechPercent var: %v", year)
}
freeleechPercentInt, parseErr := strconv.Atoi(freeleechPercent)
if parseErr == nil {
if freeleechPercentInt > 0 {
r.Freeleech = true
r.FreeleechPercent = freeleechPercentInt
if freeleechPercentInt > 0 {
r.Freeleech = true
r.FreeleechPercent = freeleechPercentInt
r.Bonus = append(r.Bonus, "Freeleech")
r.Bonus = append(r.Bonus, "Freeleech")
switch freeleechPercentInt {
case 25:
r.Bonus = append(r.Bonus, "Freeleech25")
case 50:
r.Bonus = append(r.Bonus, "Freeleech50")
case 75:
r.Bonus = append(r.Bonus, "Freeleech75")
case 100:
r.Bonus = append(r.Bonus, "Freeleech100")
switch freeleechPercentInt {
case 25:
r.Bonus = append(r.Bonus, "Freeleech25")
case 50:
r.Bonus = append(r.Bonus, "Freeleech50")
case 75:
r.Bonus = append(r.Bonus, "Freeleech75")
case 100:
r.Bonus = append(r.Bonus, "Freeleech100")
}
}
}
}
//if downloadVolumeFactor, err := getStringMapValue(varMap, "downloadVolumeFactor"); err == nil {
if downloadVolumeFactor, ok := varMap["downloadVolumeFactor"]; ok {
// special handling for BHD to map their freeleech into percent
//if def.Identifier == "beyondhd" {
// if freeleechPercent == "Capped FL" {
// freeleechPercent = "100%"
// } else if strings.Contains(freeleechPercent, "% FL") {
// freeleechPercent = strings.Replace(freeleechPercent, " FL", "", -1)
// }
//}
//r.downloadVolumeFactor = downloadVolumeFactor
value, parseErr := strconv.ParseInt(downloadVolumeFactor, 10, 64)
if parseErr == nil {
percentage := value * 100
r.FreeleechPercent = int(percentage)
}
r.Freeleech = true
}
//if uploadVolumeFactor, err := getStringMapValue(varMap, "uploadVolumeFactor"); err == nil {
// // special handling for BHD to map their freeleech into percent
// //if def.Identifier == "beyondhd" {
// // if freeleechPercent == "Capped FL" {
// // freeleechPercent = "100%"
// // } else if strings.Contains(freeleechPercent, "% FL") {
// // freeleechPercent = strings.Replace(freeleechPercent, " FL", "", -1)
// // }
// //}
//
// r.uploadVolumeFactor = uploadVolumeFactor
//
// //freeleechPercentInt, err := strconv.Atoi(freeleechPercent)
// //if err != nil {
// // //log.Debug().Msgf("bad freeleechPercent var: %v", year)
// //}
// //
// //if freeleechPercentInt > 0 {
// // r.Freeleech = true
// // r.FreeleechPercent = freeleechPercentInt
// //}
//}
if uploader, err := getStringMapValue(varMap, "uploader"); err == nil {
r.Uploader = uploader
}
@ -801,11 +844,17 @@ func (r *Release) MapVars(def *IndexerDefinition, varMap map[string]string) erro
torrentSize = fmt.Sprintf("%s %s", torrentSize, def.IRC.Parse.ForceSizeUnit)
}
size, err := humanize.ParseBytes(torrentSize)
if err != nil {
// log could not parse into bytes
size, parseErr := humanize.ParseBytes(torrentSize)
if parseErr == nil {
r.Size = size
}
}
if torrentSizeBytes, err := getStringMapValue(varMap, "torrentSizeBytes"); err == nil {
size, parseErr := strconv.ParseUint(torrentSizeBytes, 10, 64)
if parseErr == nil {
r.Size = size
}
r.Size = size
}
if scene, err := getStringMapValue(varMap, "scene"); err == nil {
@ -826,24 +875,27 @@ func (r *Release) MapVars(def *IndexerDefinition, varMap map[string]string) erro
}
if yearVal, err := getStringMapValue(varMap, "year"); err == nil {
year, err := strconv.Atoi(yearVal)
if err != nil {
//log.Debug().Msgf("bad year var: %v", year)
year, parseErr := strconv.Atoi(yearVal)
if parseErr == nil {
r.Year = year
}
r.Year = year
}
if tags, err := getStringMapValue(varMap, "tags"); err == nil {
tagsArr := []string{}
s := strings.Split(tags, ",")
for _, t := range s {
tagsArr = append(tagsArr, strings.Trim(t, " "))
if tags != "" && tags != "*" {
tagsArr := []string{}
s := strings.Split(tags, ",")
for _, t := range s {
tagsArr = append(tagsArr, strings.Trim(t, " "))
}
r.Tags = tagsArr
}
r.Tags = tagsArr
}
if title, err := getStringMapValue(varMap, "title"); err == nil {
r.Title = title
if title != "" && title != "*" {
r.Title = title
}
}
// handle releaseTags. Most of them are redundant but some are useful
@ -864,6 +916,10 @@ func (r *Release) MapVars(def *IndexerDefinition, varMap map[string]string) erro
r.Episode = episode
}
//if metaImdb, err := getStringMapValue(varMap, "imdb"); err == nil {
// r.MetaIMDB = metaImdb
//}
return nil
}