mirror of
https://github.com/idanoo/autobrr
synced 2025-07-24 17:29:12 +00:00
Feature: Support multiline irc parsing (#39)
* feat: initial multiline support * refactor: handle multiple indexers per network * wip: setup indexer * build: add docker compose for testing * chore: remove temp mock indexers * chore: update deps * refactor: update and store network handler * build: update test compose * chore: minor cleanup
This commit is contained in:
parent
506cef6f0f
commit
c4d580eb03
17 changed files with 1100 additions and 1042 deletions
|
@ -1,110 +0,0 @@
|
|||
package announce
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/autobrr/autobrr/internal/domain"
|
||||
"github.com/autobrr/autobrr/internal/filter"
|
||||
"github.com/autobrr/autobrr/internal/indexer"
|
||||
"github.com/autobrr/autobrr/internal/release"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type Service interface {
|
||||
Parse(announceID string, msg string) error
|
||||
}
|
||||
|
||||
type service struct {
|
||||
filterSvc filter.Service
|
||||
indexerSvc indexer.Service
|
||||
releaseSvc release.Service
|
||||
queues map[string]chan string
|
||||
}
|
||||
|
||||
func NewService(filterService filter.Service, indexerSvc indexer.Service, releaseService release.Service) Service {
|
||||
|
||||
//queues := make(map[string]chan string)
|
||||
//for _, channel := range tinfo {
|
||||
//
|
||||
//}
|
||||
|
||||
return &service{
|
||||
filterSvc: filterService,
|
||||
indexerSvc: indexerSvc,
|
||||
releaseSvc: releaseService,
|
||||
}
|
||||
}
|
||||
|
||||
// Parse announce line
|
||||
func (s *service) Parse(announceID string, msg string) error {
|
||||
ctx := context.Background()
|
||||
// make simpler by injecting indexer, or indexerdefinitions
|
||||
|
||||
// announceID (server:channel:announcer)
|
||||
definition := s.indexerSvc.GetIndexerByAnnounce(announceID)
|
||||
if definition == nil {
|
||||
log.Debug().Msgf("could not find indexer definition: %v", announceID)
|
||||
return nil
|
||||
}
|
||||
|
||||
newRelease, err := domain.NewRelease(definition.Identifier, msg)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("could not create new release")
|
||||
return err
|
||||
}
|
||||
|
||||
// parse lines
|
||||
if definition.Parse.Type == "single" {
|
||||
err = s.parseLineSingle(definition, newRelease, msg)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msgf("could not parse single line: %v", msg)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// TODO implement multiline parsing
|
||||
|
||||
filterOK, foundFilter, err := s.filterSvc.FindAndCheckFilters(newRelease)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("could not find filter")
|
||||
return err
|
||||
}
|
||||
|
||||
// no foundFilter found, lets return
|
||||
if !filterOK || foundFilter == nil {
|
||||
log.Trace().Msg("no matching filter found")
|
||||
|
||||
// TODO check in config for "Save all releases"
|
||||
// Save as rejected
|
||||
//newRelease.FilterStatus = domain.ReleaseStatusFilterRejected
|
||||
//err = s.releaseSvc.Store(ctx, newRelease)
|
||||
//if err != nil {
|
||||
// log.Error().Err(err).Msgf("error writing release to database: %+v", newRelease)
|
||||
// return nil
|
||||
//}
|
||||
return nil
|
||||
}
|
||||
|
||||
// save release
|
||||
newRelease.Filter = foundFilter
|
||||
newRelease.FilterName = foundFilter.Name
|
||||
newRelease.FilterID = foundFilter.ID
|
||||
|
||||
newRelease.FilterStatus = domain.ReleaseStatusFilterApproved
|
||||
err = s.releaseSvc.Store(ctx, newRelease)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msgf("error writing release to database: %+v", newRelease)
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Info().Msgf("Matched '%v' (%v) for %v", newRelease.TorrentName, newRelease.Filter.Name, newRelease.Indexer)
|
||||
|
||||
// process release
|
||||
go func() {
|
||||
err = s.releaseSvc.Process(*newRelease)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msgf("could not process release: %+v", newRelease)
|
||||
}
|
||||
}()
|
||||
|
||||
return nil
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue