Feature: Save releases (#36)

* chore: tidy deps

* refactor: database migration

* refactor: store release

* refactor: save release

* chore: add packages

* feat(web): show stats and recent releases

* refactor: simply filter struct

* feat: add eventbus

* chore: cleanup logging

* chore: update packages
This commit is contained in:
Ludvig Lundgren 2021-11-24 23:18:12 +01:00 committed by GitHub
parent d22dd2fe84
commit 7177e48c02
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
40 changed files with 5859 additions and 3328 deletions

View file

@ -2,32 +2,35 @@ package announce
import (
"bytes"
"errors"
"fmt"
"html"
"net/url"
"regexp"
"strconv"
"strings"
"text/template"
"github.com/autobrr/autobrr/internal/domain"
"github.com/pkg/errors"
"github.com/rs/zerolog/log"
)
func (s *service) parseLineSingle(def *domain.IndexerDefinition, announce *domain.Announce, line string) error {
func (s *service) parseLineSingle(def *domain.IndexerDefinition, release *domain.Release, line string) error {
for _, extract := range def.Parse.Lines {
tmpVars := map[string]string{}
var err error
err = s.parseExtract(extract.Pattern, extract.Vars, tmpVars, line)
match, err := s.parseExtract(extract.Pattern, extract.Vars, tmpVars, line)
if err != nil {
log.Debug().Msgf("error parsing extract: %v", line)
return err
}
if !match {
log.Debug().Msgf("line not matching expected regex pattern: %v", line)
return errors.New("line not matching expected regex pattern")
}
// on lines matched
err = s.onLinesMatched(def, tmpVars, announce)
err = s.onLinesMatched(def, tmpVars, release)
if err != nil {
log.Debug().Msgf("error match line: %v", line)
return err
@ -41,7 +44,7 @@ func (s *service) parseMultiLine() error {
return nil
}
func (s *service) parseExtract(pattern string, vars []string, tmpVars map[string]string, line string) error {
func (s *service) parseExtract(pattern string, vars []string, tmpVars map[string]string, line string) (bool, error) {
rxp, err := regExMatch(pattern, line)
if err != nil {
@ -50,7 +53,7 @@ func (s *service) parseExtract(pattern string, vars []string, tmpVars map[string
if rxp == nil {
//return nil, nil
return nil
return false, nil
}
// extract matched
@ -64,44 +67,34 @@ func (s *service) parseExtract(pattern string, vars []string, tmpVars map[string
tmpVars[v] = value
}
return nil
return true, nil
}
func (s *service) onLinesMatched(def *domain.IndexerDefinition, vars map[string]string, announce *domain.Announce) error {
// TODO implement set tracker.lastAnnounce = now
func (s *service) onLinesMatched(def *domain.IndexerDefinition, vars map[string]string, release *domain.Release) error {
var err error
announce.TorrentName = vars["torrentName"]
err = release.MapVars(vars)
//err := s.postProcess(ti, vars, *announce)
//if err != nil {
// return err
//}
// TODO extractReleaseInfo
err := s.extractReleaseInfo(vars, announce.TorrentName)
if err != nil {
return err
}
// resolution
// source
// encoder
// TODO is this even needed anymore
// canonicalize name
//canonReleaseName := cleanReleaseName(release.TorrentName)
//log.Trace().Msgf("canonicalize release name: %v", canonReleaseName)
err = s.mapToAnnounce(vars, announce)
err = release.Parse()
if err != nil {
log.Error().Err(err).Msg("announce: could not parse release")
return err
}
// torrent url
torrentUrl, err := s.processTorrentUrl(def.Parse.Match.TorrentURL, vars, def.SettingsMap, def.Parse.Match.Encode)
if err != nil {
log.Debug().Msgf("error torrent url: %v", err)
log.Error().Err(err).Msg("announce: could not process torrent url")
return err
}
if torrentUrl != "" {
announce.TorrentUrl = torrentUrl
release.TorrentURL = torrentUrl
}
return nil
@ -185,367 +178,6 @@ func cleanReleaseName(input string) string {
return processedString
}
func findLast(input string, pattern string) (string, error) {
matched := make([]string, 0)
//for _, s := range arr {
rxp, err := regexp.Compile(pattern)
if err != nil {
return "", err
//return errors.Wrapf(err, "invalid regex: %s", value)
}
matches := rxp.FindStringSubmatch(input)
if matches != nil {
log.Trace().Msgf("matches: %v", matches)
// first value is the match, second value is the text
if len(matches) >= 1 {
last := matches[len(matches)-1]
// add to temp slice
matched = append(matched, last)
}
}
//}
// check if multiple values in temp slice, if so get the last one
if len(matched) >= 1 {
last := matched[len(matched)-1]
return last, nil
}
return "", nil
}
func extractYear(releaseName string) (string, bool) {
yearMatch, err := findLast(releaseName, "(?:^|\\D)(19[3-9]\\d|20[012]\\d)(?:\\D|$)")
if err != nil {
return "", false
}
log.Trace().Msgf("year matches: %v", yearMatch)
return yearMatch, true
}
func extractSeason(releaseName string) (string, bool) {
seasonMatch, err := findLast(releaseName, "\\sS(\\d+)\\s?[ED]\\d+/i")
sm2, err := findLast(releaseName, "\\s(?:S|Season\\s*)(\\d+)/i")
//sm3, err := findLast(releaseName, "\\s((?<!\\d)\\d{1,2})x\\d+/i")
if err != nil {
return "", false
}
log.Trace().Msgf("season matches: %v", seasonMatch)
log.Trace().Msgf("season matches: %v", sm2)
return seasonMatch, false
}
func extractEpisode(releaseName string) (string, bool) {
epMatch, err := findLast(releaseName, "\\sS\\d+\\s?E(\\d+)/i")
ep2, err := findLast(releaseName, "\\s(?:E|Episode\\s*)(\\d+)/i")
//ep3, err := findLast(releaseName, "\\s(?<!\\d)\\d{1,2}x(\\d+)/i")
if err != nil {
return "", false
}
log.Trace().Msgf("ep matches: %v", epMatch)
log.Trace().Msgf("ep matches: %v", ep2)
return epMatch, false
}
func (s *service) extractReleaseInfo(varMap map[string]string, releaseName string) error {
// https://github.com/middelink/go-parse-torrent-name
canonReleaseName := cleanReleaseName(releaseName)
log.Trace().Msgf("canonicalize release name: %v", canonReleaseName)
//release, err := releaseinfo.Parse(releaseName)
//if err != nil {
// return err
//}
//
//log.Trace().Msgf("release: %+v", release)
// https://github.com/autodl-community/autodl-irssi/pull/194/files
// year
//year, yearMatch := extractYear(canonReleaseName)
//if yearMatch {
// setVariable("year", year, varMap, nil)
//}
//log.Trace().Msgf("year matches: %v", year)
// season
//season, seasonMatch := extractSeason(canonReleaseName)
//if seasonMatch {
// // set var
// log.Trace().Msgf("season matches: %v", season)
//}
// episode
//episode, episodeMatch := extractEpisode(canonReleaseName)
//if episodeMatch {
// // set var
// log.Trace().Msgf("episode matches: %v", episode)
//}
// resolution
// source
// encoder
// ignore
// tv or movie
// music stuff
// game stuff
return nil
}
func (s *service) mapToAnnounce(varMap map[string]string, ann *domain.Announce) error {
if torrentName, err := getFirstStringMapValue(varMap, []string{"torrentName"}); err != nil {
return errors.Wrap(err, "failed parsing required field")
} else {
ann.TorrentName = html.UnescapeString(torrentName)
}
if category, err := getFirstStringMapValue(varMap, []string{"category"}); err == nil {
ann.Category = category
}
if freeleech, err := getFirstStringMapValue(varMap, []string{"freeleech"}); err == nil {
ann.Freeleech = strings.EqualFold(freeleech, "freeleech") || strings.EqualFold(freeleech, "yes")
}
if freeleechPercent, err := getFirstStringMapValue(varMap, []string{"freeleechPercent"}); err == nil {
ann.FreeleechPercent = freeleechPercent
}
if uploader, err := getFirstStringMapValue(varMap, []string{"uploader"}); err == nil {
ann.Uploader = uploader
}
if scene, err := getFirstStringMapValue(varMap, []string{"scene"}); err == nil {
ann.Scene = strings.EqualFold(scene, "true") || strings.EqualFold(scene, "yes")
}
if year, err := getFirstStringMapValue(varMap, []string{"year"}); err == nil {
yearI, err := strconv.Atoi(year)
if err != nil {
//log.Debug().Msgf("bad year var: %v", year)
}
ann.Year = yearI
}
if tags, err := getFirstStringMapValue(varMap, []string{"releaseTags", "tags"}); err == nil {
ann.Tags = tags
}
return nil
}
func (s *service) mapToAnnounceObj(varMap map[string]string, ann *domain.Announce) error {
if torrentName, err := getFirstStringMapValue(varMap, []string{"torrentName", "$torrentName"}); err != nil {
return errors.Wrap(err, "failed parsing required field")
} else {
ann.TorrentName = html.UnescapeString(torrentName)
}
if torrentUrl, err := getFirstStringMapValue(varMap, []string{"torrentUrl", "$torrentUrl"}); err != nil {
return errors.Wrap(err, "failed parsing required field")
} else {
ann.TorrentUrl = torrentUrl
}
if releaseType, err := getFirstStringMapValue(varMap, []string{"releaseType", "$releaseType"}); err == nil {
ann.ReleaseType = releaseType
}
if name1, err := getFirstStringMapValue(varMap, []string{"name1", "$name1"}); err == nil {
ann.Name1 = name1
}
if name2, err := getFirstStringMapValue(varMap, []string{"name2", "$name2"}); err == nil {
ann.Name2 = name2
}
if category, err := getFirstStringMapValue(varMap, []string{"category", "$category"}); err == nil {
ann.Category = category
}
if freeleech, err := getFirstStringMapValue(varMap, []string{"freeleech", "$freeleech"}); err == nil {
ann.Freeleech = strings.EqualFold(freeleech, "true")
}
if uploader, err := getFirstStringMapValue(varMap, []string{"uploader", "$uploader"}); err == nil {
ann.Uploader = uploader
}
if tags, err := getFirstStringMapValue(varMap, []string{"$releaseTags", "$tags", "releaseTags", "tags"}); err == nil {
ann.Tags = tags
}
if cue, err := getFirstStringMapValue(varMap, []string{"cue", "$cue"}); err == nil {
ann.Cue = strings.EqualFold(cue, "true")
}
if logVar, err := getFirstStringMapValue(varMap, []string{"log", "$log"}); err == nil {
ann.Log = logVar
}
if media, err := getFirstStringMapValue(varMap, []string{"media", "$media"}); err == nil {
ann.Media = media
}
if format, err := getFirstStringMapValue(varMap, []string{"format", "$format"}); err == nil {
ann.Format = format
}
if bitRate, err := getFirstStringMapValue(varMap, []string{"bitrate", "$bitrate"}); err == nil {
ann.Bitrate = bitRate
}
if resolution, err := getFirstStringMapValue(varMap, []string{"resolution"}); err == nil {
ann.Resolution = resolution
}
if source, err := getFirstStringMapValue(varMap, []string{"source"}); err == nil {
ann.Source = source
}
if encoder, err := getFirstStringMapValue(varMap, []string{"encoder"}); err == nil {
ann.Encoder = encoder
}
if container, err := getFirstStringMapValue(varMap, []string{"container"}); err == nil {
ann.Container = container
}
if scene, err := getFirstStringMapValue(varMap, []string{"scene", "$scene"}); err == nil {
ann.Scene = strings.EqualFold(scene, "true")
}
if year, err := getFirstStringMapValue(varMap, []string{"year", "$year"}); err == nil {
yearI, err := strconv.Atoi(year)
if err != nil {
//log.Debug().Msgf("bad year var: %v", year)
}
ann.Year = yearI
}
//return &ann, nil
return nil
}
func setVariable(varName string, value string, varMap map[string]string, settings map[string]string) bool {
// check in instance options (auth)
//optVal, ok := settings[name]
//if !ok {
// //return ""
//}
////ret = optVal
//if optVal != "" {
// return false
//}
// else in varMap
val, ok := varMap[varName]
if !ok {
//return ""
varMap[varName] = value
} else {
// do something else?
}
log.Trace().Msgf("setVariable: %v", val)
return true
}
func getVariable(name string, varMap map[string]string, obj domain.Announce, settings map[string]string) string {
var ret string
// check in announce obj
// TODO reflect struct
// check in instance options (auth)
optVal, ok := settings[name]
if !ok {
//return ""
}
//ret = optVal
if optVal != "" {
return optVal
}
// else in varMap
val, ok := varMap[name]
if !ok {
//return ""
}
ret = val
return ret
}
//func contains(s []string, str string) bool {
// for _, v := range s {
// if v == str {
// return true
// }
// }
//
// return false
//}
func listContains(list []string, key string) bool {
for _, lKey := range list {
if strings.EqualFold(lKey, key) {
return true
}
}
return false
}
func getStringMapValue(stringMap map[string]string, key string) (string, error) {
lowerKey := strings.ToLower(key)
// case sensitive match
//if caseSensitive {
// v, ok := stringMap[key]
// if !ok {
// return "", fmt.Errorf("key was not found in map: %q", key)
// }
//
// return v, nil
//}
// case insensitive match
for k, v := range stringMap {
if strings.ToLower(k) == lowerKey {
return v, nil
}
}
return "", fmt.Errorf("key was not found in map: %q", lowerKey)
}
func getFirstStringMapValue(stringMap map[string]string, keys []string) (string, error) {
for _, k := range keys {
if val, err := getStringMapValue(stringMap, k); err == nil {
return val, nil
}
}
return "", fmt.Errorf("key were not found in map: %q", strings.Join(keys, ", "))
}
func removeElement(s []string, i int) ([]string, error) {
// s is [1,2,3,4,5,6], i is 2

View file

@ -1,9 +1,5 @@
package announce
import (
"testing"
)
//func Test_service_OnNewLine(t *testing.T) {
// tfiles := tracker.NewService()
// tfiles.ReadFiles()
@ -287,7 +283,7 @@ var (
// want: &domain.Announce{
// Freeleech: false,
// Category: "PC :: Iso",
// TorrentName: "debian live 10 6 0 amd64 standard iso",
// Name: "debian live 10 6 0 amd64 standard iso",
// Uploader: "Anonymous",
// TorrentUrl: "https://www.tracker01.test/rss/download/263302/000aaa111bbb222ccc333ddd/debian+live+10+6+0+amd64+standard+iso.torrent",
// Site: "T01",
@ -307,7 +303,7 @@ var (
// want: &domain.Announce{
// Freeleech: true,
// Category: "PC :: Iso",
// TorrentName: "debian live 10 6 0 amd64 standard iso",
// Name: "debian live 10 6 0 amd64 standard iso",
// Uploader: "Anonymous",
// TorrentUrl: "https://www.tracker01.test/rss/download/263302/000aaa111bbb222ccc333ddd/debian+live+10+6+0+amd64+standard+iso.torrent",
// Site: "T01",
@ -328,7 +324,7 @@ var (
// Name1: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name2: "Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// Freeleech: false,
// TorrentName: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// TorrentUrl: "https://tracker05.test/torrents.php?action=download&id=1382972&authkey=000aaa111bbb222ccc333ddd&torrent_pass=eee444fff555ggg666hhh777",
// Site: "T05",
// Tags: "blues, rock, classic rock,jazz,blues rock,electric blues",
@ -357,7 +353,7 @@ var (
// Name1: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name2: "Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// Freeleech: false,
// TorrentName: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// TorrentUrl: "https://tracker05.test/torrents.php?action=download&id=29910415&authkey=000aaa111bbb222ccc333ddd&torrent_pass=eee444fff555ggg666hhh777",
// Site: "T05",
// Tags: "1990s, folk, world music, celtic",
@ -421,7 +417,7 @@ var (
// want: &domain.Announce{
// Freeleech: false,
// Category: "PC :: Iso",
// TorrentName: "debian live 10 6 0 amd64 standard iso",
// Name: "debian live 10 6 0 amd64 standard iso",
// Uploader: "Anonymous",
// TorrentUrl: "https://www.tracker01.test/rss/download/263302/000aaa111bbb222ccc333ddd/debian+live+10+6+0+amd64+standard+iso.torrent",
// Site: "T01",
@ -441,7 +437,7 @@ var (
// want: &domain.Announce{
// Freeleech: true,
// Category: "PC :: Iso",
// TorrentName: "debian live 10 6 0 amd64 standard iso",
// Name: "debian live 10 6 0 amd64 standard iso",
// Uploader: "Anonymous",
// TorrentUrl: "https://www.tracker01.test/rss/download/263302/000aaa111bbb222ccc333ddd/debian+live+10+6+0+amd64+standard+iso.torrent",
// Site: "T01",
@ -462,7 +458,7 @@ var (
// Name1: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name2: "Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// Freeleech: false,
// TorrentName: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name: "Roy Buchanan - Loading Zone [1977] - FLAC / Lossless / Log / 100% / Cue / CD",
// TorrentUrl: "https://tracker05.test/torrents.php?action=download&id=1382972&authkey=000aaa111bbb222ccc333ddd&torrent_pass=eee444fff555ggg666hhh777",
// Site: "T05",
// Tags: "blues, rock, classic rock,jazz,blues rock,electric blues",
@ -496,7 +492,7 @@ var (
// Name1: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name2: "Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// Freeleech: false,
// TorrentName: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// Name: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// TorrentUrl: "https://tracker05.test/torrents.php?action=download&id=29910415&authkey=000aaa111bbb222ccc333ddd&torrent_pass=eee444fff555ggg666hhh777",
// Site: "T05",
// Tags: "1990s, folk, world music, celtic",
@ -534,52 +530,52 @@ var (
// }
//}
func Test_service_extractReleaseInfo(t *testing.T) {
type fields struct {
name string
queues map[string]chan string
}
type args struct {
varMap map[string]string
releaseName string
}
tests := []struct {
name string
fields fields
args args
wantErr bool
}{
{
name: "test_01",
fields: fields{
name: "", queues: nil,
},
args: args{
varMap: map[string]string{},
releaseName: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
},
wantErr: false,
},
{
name: "test_02",
fields: fields{
name: "", queues: nil,
},
args: args{
varMap: map[string]string{},
releaseName: "Lost S06E07 720p WEB-DL DD 5.1 H.264 - LP",
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := &service{
queues: tt.fields.queues,
}
if err := s.extractReleaseInfo(tt.args.varMap, tt.args.releaseName); (err != nil) != tt.wantErr {
t.Errorf("extractReleaseInfo() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}
//func Test_service_extractReleaseInfo(t *testing.T) {
// type fields struct {
// name string
// queues map[string]chan string
// }
// type args struct {
// varMap map[string]string
// releaseName string
// }
// tests := []struct {
// name string
// fields fields
// args args
// wantErr bool
// }{
// {
// name: "test_01",
// fields: fields{
// name: "", queues: nil,
// },
// args: args{
// varMap: map[string]string{},
// releaseName: "Heirloom - Road to the Isles [1998] [Album] - FLAC / Lossless / Log / 100% / Cue / CD",
// },
// wantErr: false,
// },
// {
// name: "test_02",
// fields: fields{
// name: "", queues: nil,
// },
// args: args{
// varMap: map[string]string{},
// releaseName: "Lost S06E07 720p WEB-DL DD 5.1 H.264 - LP",
// },
// wantErr: false,
// },
// }
// for _, tt := range tests {
// t.Run(tt.name, func(t *testing.T) {
// s := &service{
// queues: tt.fields.queues,
// }
// if err := s.extractReleaseInfo(tt.args.varMap, tt.args.releaseName); (err != nil) != tt.wantErr {
// t.Errorf("extractReleaseInfo() error = %v, wantErr %v", err, tt.wantErr)
// }
// })
// }
//}

View file

@ -1,11 +1,11 @@
package announce
import (
"context"
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/internal/filter"
"github.com/autobrr/autobrr/internal/indexer"
"github.com/autobrr/autobrr/internal/release"
"github.com/rs/zerolog/log"
)
@ -36,54 +36,73 @@ func NewService(filterService filter.Service, indexerSvc indexer.Service, releas
// Parse announce line
func (s *service) Parse(announceID string, msg string) error {
ctx := context.Background()
// make simpler by injecting indexer, or indexerdefinitions
// announceID (server:channel:announcer)
def := s.indexerSvc.GetIndexerByAnnounce(announceID)
if def == nil {
definition := s.indexerSvc.GetIndexerByAnnounce(announceID)
if definition == nil {
log.Debug().Msgf("could not find indexer definition: %v", announceID)
return nil
}
announce := domain.Announce{
Site: def.Identifier,
Line: msg,
newRelease, err := domain.NewRelease(definition.Identifier, msg)
if err != nil {
log.Error().Err(err).Msg("could not create new release")
return err
}
// parse lines
if def.Parse.Type == "single" {
err := s.parseLineSingle(def, &announce, msg)
if definition.Parse.Type == "single" {
err = s.parseLineSingle(definition, newRelease, msg)
if err != nil {
log.Debug().Msgf("could not parse single line: %v", msg)
log.Error().Err(err).Msgf("could not parse single line: %v", msg)
return err
}
}
// implement multiline parsing
// find filter
foundFilter, err := s.filterSvc.FindByIndexerIdentifier(announce)
// TODO implement multiline parsing
filterOK, foundFilter, err := s.filterSvc.FindAndCheckFilters(newRelease)
if err != nil {
log.Error().Err(err).Msg("could not find filter")
return err
}
// no filter found, lets return
if foundFilter == nil {
// no foundFilter found, lets return
if !filterOK || foundFilter == nil {
log.Trace().Msg("no matching filter found")
// TODO check in config for "Save all releases"
// Save as rejected
//newRelease.FilterStatus = domain.ReleaseStatusFilterRejected
//err = s.releaseSvc.Store(ctx, newRelease)
//if err != nil {
// log.Error().Err(err).Msgf("error writing release to database: %+v", newRelease)
// return nil
//}
return nil
}
announce.Filter = foundFilter
log.Trace().Msgf("announce: %+v", announce)
// save release
newRelease.Filter = foundFilter
newRelease.FilterName = foundFilter.Name
newRelease.FilterID = foundFilter.ID
log.Info().Msgf("Matched '%v' (%v) for %v", announce.TorrentName, announce.Filter.Name, announce.Site)
newRelease.FilterStatus = domain.ReleaseStatusFilterApproved
err = s.releaseSvc.Store(ctx, newRelease)
if err != nil {
log.Error().Err(err).Msgf("error writing release to database: %+v", newRelease)
return nil
}
// match release
log.Info().Msgf("Matched '%v' (%v) for %v", newRelease.TorrentName, newRelease.Filter.Name, newRelease.Indexer)
// process release
go func() {
err = s.releaseSvc.Process(announce)
err = s.releaseSvc.Process(*newRelease)
if err != nil {
log.Error().Err(err).Msgf("could not process release: %+v", announce)
log.Error().Err(err).Msgf("could not process release: %+v", newRelease)
}
}()