feat(macros): add IndexerName (#1511)

* feat(macros): add IndexerName

* fix: tests

* fix: tests
This commit is contained in:
ze0s 2024-04-16 17:35:17 +02:00 committed by GitHub
parent c43e2c76d6
commit 3c3b47fa10
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
37 changed files with 310 additions and 235 deletions

View file

@ -48,7 +48,7 @@ func (s *service) execCmd(ctx context.Context, action *domain.Action, release do
duration := time.Since(start)
s.log.Info().Msgf("executed command: '%s', args: '%s' %s,%s, total time %v", cmd, args, release.TorrentName, release.Indexer, duration)
s.log.Info().Msgf("executed command: '%s', args: '%s' %s,%s, total time %v", cmd, args, release.TorrentName, release.Indexer.Name, duration)
return nil
}

View file

@ -81,7 +81,11 @@ func Test_service_execCmd(t *testing.T) {
release: domain.Release{
TorrentName: "This is a test",
TorrentTmpFile: "tmp-10000",
Indexer: "mock",
Indexer: domain.IndexerMinimal{
ID: 0,
Name: "Mock Indexer",
Identifier: "mock",
},
},
action: &domain.Action{
Name: "echo",

View file

@ -59,7 +59,7 @@ func (s *service) lidarr(ctx context.Context, action *domain.Action, release dom
DownloadUrl: release.DownloadURL,
MagnetUrl: release.MagnetURI,
Size: int64(release.Size),
Indexer: release.Indexer,
Indexer: release.Indexer.Identifier,
DownloadClientId: externalClientId,
DownloadClient: externalClient,
DownloadProtocol: string(release.Protocol),

View file

@ -58,7 +58,7 @@ func (s *service) radarr(ctx context.Context, action *domain.Action, release dom
DownloadUrl: release.DownloadURL,
MagnetUrl: release.MagnetURI,
Size: int64(release.Size),
Indexer: release.Indexer,
Indexer: release.Indexer.Identifier,
DownloadClientId: externalClientId,
DownloadClient: externalClient,
DownloadProtocol: string(release.Protocol),

View file

@ -58,7 +58,7 @@ func (s *service) readarr(ctx context.Context, action *domain.Action, release do
DownloadUrl: release.DownloadURL,
MagnetUrl: release.MagnetURI,
Size: int64(release.Size),
Indexer: release.Indexer,
Indexer: release.Indexer.Identifier,
DownloadClientId: externalClientId,
DownloadClient: externalClient,
DownloadProtocol: string(release.Protocol),

View file

@ -98,7 +98,7 @@ func (s *service) RunAction(ctx context.Context, action *domain.Action, release
Event: domain.NotificationEventPushApproved,
ReleaseName: release.TorrentName,
Filter: release.FilterName,
Indexer: release.Indexer,
Indexer: release.Indexer.Name,
InfoHash: release.TorrentHash,
Size: release.Size,
Status: domain.ReleasePushStatusApproved,

View file

@ -58,7 +58,7 @@ func (s *service) sonarr(ctx context.Context, action *domain.Action, release dom
DownloadUrl: release.DownloadURL,
MagnetUrl: release.MagnetURI,
Size: int64(release.Size),
Indexer: release.Indexer,
Indexer: release.Indexer.Identifier,
DownloadClientId: externalClientId,
DownloadClient: externalClient,
DownloadProtocol: string(release.Protocol),

View file

@ -58,7 +58,7 @@ func (s *service) whisparr(ctx context.Context, action *domain.Action, release d
DownloadUrl: release.DownloadURL,
MagnetUrl: release.MagnetURI,
Size: int64(release.Size),
Indexer: release.Indexer,
Indexer: release.Indexer.Identifier,
DownloadClientId: externalClientId,
DownloadClient: externalClient,
DownloadProtocol: string(release.Protocol),

View file

@ -103,7 +103,7 @@ func (a *announceProcessor) processQueue(queue chan string) {
continue
}
rls := domain.NewRelease(a.indexer.Identifier)
rls := domain.NewRelease(domain.IndexerMinimal{ID: a.indexer.ID, Name: a.indexer.Name, Identifier: a.indexer.Identifier})
rls.Protocol = domain.ReleaseProtocol(a.indexer.Protocol)
// on lines matched

View file

@ -32,7 +32,9 @@ func (r *FeedRepo) FindByID(ctx context.Context, id int) (*domain.Feed, error) {
queryBuilder := r.db.squirrel.
Select(
"f.id",
"i.id",
"i.identifier",
"i.name",
"f.name",
"f.type",
"f.enabled",
@ -64,7 +66,7 @@ func (r *FeedRepo) FindByID(ctx context.Context, id int) (*domain.Feed, error) {
var apiKey, cookie, settings sql.NullString
if err := row.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &f.MaxAge, &apiKey, &cookie, &settings, &f.CreatedAt, &f.UpdatedAt); err != nil {
if err := row.Scan(&f.ID, &f.Indexer.ID, &f.Indexer.Identifier, &f.Indexer.Name, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &f.MaxAge, &apiKey, &cookie, &settings, &f.CreatedAt, &f.UpdatedAt); err != nil {
return nil, errors.Wrap(err, "error scanning row")
}
@ -87,7 +89,9 @@ func (r *FeedRepo) FindByIndexerIdentifier(ctx context.Context, indexer string)
queryBuilder := r.db.squirrel.
Select(
"f.id",
"i.id",
"i.identifier",
"i.name",
"f.name",
"f.type",
"f.enabled",
@ -119,7 +123,7 @@ func (r *FeedRepo) FindByIndexerIdentifier(ctx context.Context, indexer string)
var apiKey, cookie, settings sql.NullString
if err := row.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &f.MaxAge, &apiKey, &cookie, &settings, &f.CreatedAt, &f.UpdatedAt); err != nil {
if err := row.Scan(&f.ID, &f.Indexer.ID, &f.Indexer.Identifier, &f.Indexer.Name, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &f.MaxAge, &apiKey, &cookie, &settings, &f.CreatedAt, &f.UpdatedAt); err != nil {
return nil, errors.Wrap(err, "error scanning row")
}
@ -140,7 +144,9 @@ func (r *FeedRepo) Find(ctx context.Context) ([]domain.Feed, error) {
queryBuilder := r.db.squirrel.
Select(
"f.id",
"i.id",
"i.identifier",
"i.name",
"f.name",
"f.type",
"f.enabled",
@ -179,7 +185,7 @@ func (r *FeedRepo) Find(ctx context.Context) ([]domain.Feed, error) {
var apiKey, cookie, lastRunData, settings sql.NullString
var lastRun sql.NullTime
if err := rows.Scan(&f.ID, &f.Indexer, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &f.MaxAge, &apiKey, &cookie, &lastRun, &lastRunData, &settings, &f.CreatedAt, &f.UpdatedAt); err != nil {
if err := rows.Scan(&f.ID, &f.Indexer.ID, &f.Indexer.Identifier, &f.Indexer.Name, &f.Name, &f.Type, &f.Enabled, &f.URL, &f.Interval, &f.Timeout, &f.MaxAge, &apiKey, &cookie, &lastRun, &lastRunData, &settings, &f.CreatedAt, &f.UpdatedAt); err != nil {
return nil, errors.Wrap(err, "error scanning row")
}

View file

@ -153,7 +153,6 @@ func (r *IndexerRepo) FindByID(ctx context.Context, id int) (*domain.Indexer, er
i.Settings = settingsMap
return &i, nil
}
func (r *IndexerRepo) FindByFilterID(ctx context.Context, id int) ([]domain.Indexer, error) {

View file

@ -39,7 +39,7 @@ func (repo *ReleaseRepo) Store(ctx context.Context, r *domain.Release) error {
queryBuilder := repo.db.squirrel.
Insert("release").
Columns("filter_status", "rejections", "indexer", "filter", "protocol", "implementation", "timestamp", "group_id", "torrent_id", "info_url", "download_url", "torrent_name", "size", "title", "category", "season", "episode", "year", "resolution", "source", "codec", "container", "hdr", "release_group", "proper", "repack", "website", "type", "origin", "tags", "uploader", "pre_time", "filter_id").
Values(r.FilterStatus, pq.Array(r.Rejections), r.Indexer, r.FilterName, r.Protocol, r.Implementation, r.Timestamp.Format(time.RFC3339), r.GroupID, r.TorrentID, r.InfoURL, r.DownloadURL, r.TorrentName, r.Size, r.Title, r.Category, r.Season, r.Episode, r.Year, r.Resolution, r.Source, codecStr, r.Container, hdrStr, r.Group, r.Proper, r.Repack, r.Website, r.Type, r.Origin, pq.Array(r.Tags), r.Uploader, r.PreTime, r.FilterID).
Values(r.FilterStatus, pq.Array(r.Rejections), r.Indexer.Identifier, r.FilterName, r.Protocol, r.Implementation, r.Timestamp.Format(time.RFC3339), r.GroupID, r.TorrentID, r.InfoURL, r.DownloadURL, r.TorrentName, r.Size, r.Title, r.Category, r.Season, r.Episode, r.Year, r.Resolution, r.Source, codecStr, r.Container, hdrStr, r.Group, r.Proper, r.Repack, r.Website, r.Type, r.Origin, pq.Array(r.Tags), r.Uploader, r.PreTime, r.FilterID).
Suffix("RETURNING id").RunWith(repo.db.handler)
// return values
@ -291,7 +291,7 @@ func (repo *ReleaseRepo) findReleases(ctx context.Context, tx *Tx, params domain
continue
}
rls.Indexer = rlsindexer.String
rls.Indexer.Identifier = rlsindexer.String
rls.FilterName = rlsfilter.String
rls.ActionStatus = make([]domain.ReleaseActionStatus, 0)
rls.InfoURL = infoUrl.String
@ -443,7 +443,7 @@ func (repo *ReleaseRepo) Get(ctx context.Context, req *domain.GetReleaseRequest)
return nil, errors.Wrap(err, "error scanning row")
}
rls.Indexer = indexerName.String
rls.Indexer.Identifier = indexerName.String
rls.FilterName = filterName.String
rls.FilterID = int(filterId.Int64)
rls.ActionStatus = make([]domain.ReleaseActionStatus, 0)

View file

@ -18,9 +18,13 @@ import (
func getMockRelease() *domain.Release {
return &domain.Release{
FilterStatus: domain.ReleaseStatusFilterApproved,
Rejections: []string{"test", "not-a-match"},
Indexer: "BTN",
FilterStatus: domain.ReleaseStatusFilterApproved,
Rejections: []string{"test", "not-a-match"},
Indexer: domain.IndexerMinimal{
ID: 0,
Name: "BTN",
Identifier: "btn",
},
FilterName: "ExampleFilter",
Protocol: domain.ReleaseProtocolTorrent,
Implementation: domain.ReleaseImplementationIRC,

View file

@ -36,7 +36,7 @@ type FeedRepo interface {
type Feed struct {
ID int `json:"id"`
Name string `json:"name"`
Indexer string `json:"indexer"`
Indexer IndexerMinimal `json:"indexer"`
Type string `json:"type"`
Enabled bool `json:"enabled"`
URL string `json:"url"`
@ -50,7 +50,6 @@ type Feed struct {
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
IndexerID int `json:"indexer_id,omitempty"`
Indexerr FeedIndexer `json:"-"`
LastRun time.Time `json:"last_run"`
LastRunData string `json:"last_run_data"`
NextRun time.Time `json:"next_run"`

View file

@ -35,6 +35,12 @@ type Indexer struct {
Settings map[string]string `json:"settings,omitempty"`
}
type IndexerMinimal struct {
ID int `json:"id"`
Name string `json:"name"`
Identifier string `json:"identifier"`
}
type IndexerDefinition struct {
ID int `json:"id,omitempty"`
Name string `json:"name"`

View file

@ -254,7 +254,7 @@ func TestIRCParserGazelleGames_Parse(t *testing.T) {
{
name: "",
args: args{
rls: NewRelease("ggn"),
rls: NewRelease(IndexerMinimal{0, "GazelleGames", "ggn"}),
vars: map[string]string{
"torrentName": "Trouble.in.Paradise-GROUP in Trouble in Paradise",
},
@ -267,7 +267,7 @@ func TestIRCParserGazelleGames_Parse(t *testing.T) {
{
name: "",
args: args{
rls: NewRelease("ggn"),
rls: NewRelease(IndexerMinimal{0, "GazelleGames", "ggn"}),
vars: map[string]string{
"torrentName": "F.I.L.F. Game Walkthrough v.0.18 in F.I.L.F.",
},
@ -280,7 +280,7 @@ func TestIRCParserGazelleGames_Parse(t *testing.T) {
{
name: "",
args: args{
rls: NewRelease("ggn"),
rls: NewRelease(IndexerMinimal{0, "GazelleGames", "ggn"}),
vars: map[string]string{
"torrentName": "Ni no Kuni: Dominion of the Dark Djinn in Ni no Kuni: Dominion of the Dark Djinn",
},
@ -293,7 +293,7 @@ func TestIRCParserGazelleGames_Parse(t *testing.T) {
{
name: "",
args: args{
rls: NewRelease("ggn"),
rls: NewRelease(IndexerMinimal{0, "GazelleGames", "ggn"}),
vars: map[string]string{
"torrentName": "Year 2 Remastered by Insaneintherainmusic",
"category": "OST",
@ -332,7 +332,7 @@ func TestIRCParserOrpheus_Parse(t *testing.T) {
{
name: "",
args: args{
rls: NewRelease("ops"),
rls: NewRelease(IndexerMinimal{0, "Orpheus", "ops"}),
vars: map[string]string{
"torrentName": "Busta Rhymes BEACH BALL (feat. BIA) [2023] [Single] WEB/FLAC/24bit Lossless",
"title": "Busta Rhymes BEACH BALL (feat. BIA)",
@ -348,7 +348,7 @@ func TestIRCParserOrpheus_Parse(t *testing.T) {
{
name: "",
args: args{
rls: NewRelease("ops"),
rls: NewRelease(IndexerMinimal{0, "Orpheus", "ops"}),
vars: map[string]string{
"torrentName": "Busta Rhymes BEACH BALL (feat. BIA) [2023] [Single] CD/FLAC/Lossless",
"title": "Busta Rhymes BEACH BALL (feat. BIA)",

View file

@ -28,6 +28,8 @@ type Macro struct {
DownloadUrl string
InfoUrl string
Indexer string
IndexerName string
IndexerIdentifier string
Title string
Type string
Category string
@ -67,7 +69,9 @@ func NewMacro(release Release) Macro {
GroupID: release.GroupID,
InfoUrl: release.InfoURL,
DownloadUrl: release.DownloadURL,
Indexer: release.Indexer,
Indexer: release.Indexer.Identifier,
IndexerName: release.Indexer.Name,
IndexerIdentifier: release.Indexer.Identifier,
Title: release.Title,
Type: release.Type,
Category: release.Category,

View file

@ -36,7 +36,7 @@ func TestMacros_Parse(t *testing.T) {
release: Release{
TorrentName: "This movie 2021",
TorrentTmpFile: "/tmp/a-temporary-file.torrent",
Indexer: "mock1",
Indexer: IndexerMinimal{0, "Mock Indexer", "mock1"},
},
args: args{text: "Print mee {{.TorrentPathName}}"},
want: "Print mee /tmp/a-temporary-file.torrent",
@ -47,7 +47,7 @@ func TestMacros_Parse(t *testing.T) {
release: Release{
TorrentName: "This movie 2021",
TorrentTmpFile: "/tmp/a-temporary-file.torrent",
Indexer: "mock1",
Indexer: IndexerMinimal{0, "Mock Indexer", "mock1"},
},
args: args{text: "Print mee {{TorrentPathName}}"},
want: "",
@ -58,7 +58,7 @@ func TestMacros_Parse(t *testing.T) {
release: Release{
TorrentName: "This movie 2021",
TorrentTmpFile: "/tmp/a-temporary-file.torrent",
Indexer: "mock1",
Indexer: IndexerMinimal{0, "Mock Indexer", "mock1"},
},
args: args{text: "add {{.TorrentPathName}} --category test"},
want: "add /tmp/a-temporary-file.torrent --category test",
@ -68,7 +68,7 @@ func TestMacros_Parse(t *testing.T) {
name: "test_program_arg_bad",
release: Release{
TorrentTmpFile: "/tmp/a-temporary-file.torrent",
Indexer: "mock1",
Indexer: IndexerMinimal{0, "Mock Indexer", "mock1"},
},
args: args{text: "add {{.TorrenttPathName}} --category test"},
want: "",
@ -79,7 +79,7 @@ func TestMacros_Parse(t *testing.T) {
release: Release{
TorrentName: "This movie 2021",
TorrentTmpFile: "/tmp/a-temporary-file.torrent",
Indexer: "mock1",
Indexer: IndexerMinimal{0, "Mock Indexer", "mock1"},
},
args: args{text: "add {{.TorrentPathName}} --category test --other {{.TorrentName}}"},
want: "add /tmp/a-temporary-file.torrent --category test --other This movie 2021",
@ -90,7 +90,7 @@ func TestMacros_Parse(t *testing.T) {
release: Release{
TorrentName: "This movie 2021",
DownloadURL: "https://some.site/download/fakeid",
Indexer: "mock1",
Indexer: IndexerMinimal{0, "Mock Indexer", "mock1"},
},
args: args{text: "{{.TorrentName}} {{.TorrentUrl}} SOME_LONG_TOKEN"},
want: "This movie 2021 https://some.site/download/fakeid SOME_LONG_TOKEN",
@ -101,7 +101,7 @@ func TestMacros_Parse(t *testing.T) {
release: Release{
TorrentName: "This movie 2021",
DownloadURL: "https://some.site/download/fakeid",
Indexer: "mock1",
Indexer: IndexerMinimal{0, "Mock Indexer", "mock1"},
},
args: args{text: "{{.Indexer}} {{.TorrentName}} {{.TorrentUrl}} SOME_LONG_TOKEN"},
want: "mock1 This movie 2021 https://some.site/download/fakeid SOME_LONG_TOKEN",
@ -112,7 +112,7 @@ func TestMacros_Parse(t *testing.T) {
release: Release{
TorrentName: "This movie 2021",
DownloadURL: "https://some.site/download/fakeid",
Indexer: "mock1",
Indexer: IndexerMinimal{0, "Mock Indexer", "mock1"},
},
args: args{text: "{{.Indexer}}-race"},
want: "mock1-race",
@ -123,7 +123,7 @@ func TestMacros_Parse(t *testing.T) {
release: Release{
TorrentName: "This movie 2021",
DownloadURL: "https://some.site/download/fakeid",
Indexer: "mock1",
Indexer: IndexerMinimal{0, "Mock Indexer", "mock1"},
},
args: args{text: "{{.Indexer}}-{{.CurrentYear}}-race"},
want: fmt.Sprintf("mock1-%v-race", currentTime.Year()),
@ -134,7 +134,7 @@ func TestMacros_Parse(t *testing.T) {
release: Release{
TorrentName: "This movie 2021",
DownloadURL: "https://some.site/download/fakeid",
Indexer: "mock1",
Indexer: IndexerMinimal{0, "Mock Indexer", "mock1"},
Resolution: "2160p",
HDR: []string{"DV"},
},
@ -147,7 +147,7 @@ func TestMacros_Parse(t *testing.T) {
release: Release{
TorrentName: "This movie 2021",
DownloadURL: "https://some.site/download/fakeid",
Indexer: "mock1",
Indexer: IndexerMinimal{0, "Mock Indexer", "mock1"},
Resolution: "2160p",
HDR: []string{"HDR"},
},
@ -160,7 +160,7 @@ func TestMacros_Parse(t *testing.T) {
release: Release{
TorrentName: "This movie 2021",
DownloadURL: "https://some.site/download/fakeid",
Indexer: "mock1",
Indexer: IndexerMinimal{0, "Mock Indexer", "mock1"},
Resolution: "2160p",
HDR: []string{"HDR"},
Year: 2021,
@ -220,7 +220,7 @@ func TestMacros_Parse(t *testing.T) {
TorrentName: "This movie 2021",
DownloadURL: "https://some.site/download/fakeid",
Group: "thisgrp",
Indexer: "mock1",
Indexer: IndexerMinimal{0, "Mock Indexer", "mock1"},
Year: 2021,
},
args: args{text: "movies-{{.Group}}"},
@ -263,6 +263,15 @@ func TestMacros_Parse(t *testing.T) {
want: "Artists: Jon Boy",
wantErr: false,
},
{
name: "test_args_indexer",
release: Release{
Indexer: IndexerMinimal{0, "Mock Indexer", "mock1"},
},
args: args{text: "indexer={{.IndexerName}}"},
want: fmt.Sprintf("indexer=Mock Indexer"),
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {

View file

@ -45,7 +45,7 @@ type Release struct {
ID int64 `json:"id"`
FilterStatus ReleaseFilterStatus `json:"filter_status"`
Rejections []string `json:"rejections"`
Indexer string `json:"indexer"`
Indexer IndexerMinimal `json:"indexer"`
FilterName string `json:"filter"`
Protocol ReleaseProtocol `json:"protocol"`
Implementation ReleaseImplementation `json:"implementation"` // irc, rss, api
@ -284,7 +284,7 @@ type GetReleaseActionStatusRequest struct {
Id int
}
func NewRelease(indexer string) *Release {
func NewRelease(indexer IndexerMinimal) *Release {
r := &Release{
Indexer: indexer,
FilterStatus: ReleaseStatusFilterPending,
@ -493,25 +493,24 @@ func (r *Release) downloadTorrentFile(ctx context.Context) error {
// Continue processing the response
//case http.StatusMovedPermanently, http.StatusFound, http.StatusSeeOther, http.StatusTemporaryRedirect, http.StatusPermanentRedirect:
// // Handle redirect
// return retry.Unrecoverable(errors.New("redirect encountered for torrent (%s) file (%s) - status code: %d - check indexer keys for %s", r.TorrentName, r.DownloadURL, resp.StatusCode, r.Indexer))
// return retry.Unrecoverable(errors.New("redirect encountered for torrent (%s) file (%s) - status code: %d - check indexer keys for %s", r.TorrentName, r.DownloadURL, resp.StatusCode, r.Indexer.Name))
case http.StatusUnauthorized, http.StatusForbidden:
return retry.Unrecoverable(errors.New("unrecoverable error downloading torrent (%s) file (%s) - status code: %d - check indexer keys for %s", r.TorrentName, r.DownloadURL, resp.StatusCode, r.Indexer))
return retry.Unrecoverable(errors.New("unrecoverable error downloading torrent (%s) file (%s) - status code: %d - check indexer keys for %s", r.TorrentName, r.DownloadURL, resp.StatusCode, r.Indexer.Name))
case http.StatusMethodNotAllowed:
return retry.Unrecoverable(errors.New("unrecoverable error downloading torrent (%s) file (%s) from '%s' - status code: %d. Check if the request method is correct", r.TorrentName, r.DownloadURL, r.Indexer, resp.StatusCode))
return retry.Unrecoverable(errors.New("unrecoverable error downloading torrent (%s) file (%s) from '%s' - status code: %d. Check if the request method is correct", r.TorrentName, r.DownloadURL, r.Indexer.Name, resp.StatusCode))
case http.StatusNotFound:
return errors.New("torrent %s not found on %s (%d) - retrying", r.TorrentName, r.Indexer, resp.StatusCode)
return errors.New("torrent %s not found on %s (%d) - retrying", r.TorrentName, r.Indexer.Name, resp.StatusCode)
case http.StatusBadGateway, http.StatusServiceUnavailable, http.StatusGatewayTimeout:
return errors.New("server error (%d) encountered while downloading torrent (%s) file (%s) from '%s' - retrying", resp.StatusCode, r.TorrentName, r.DownloadURL, r.Indexer)
return errors.New("server error (%d) encountered while downloading torrent (%s) file (%s) from '%s' - retrying", resp.StatusCode, r.TorrentName, r.DownloadURL, r.Indexer.Name)
case http.StatusInternalServerError:
return errors.New("server error (%d) encountered while downloading torrent (%s) file (%s) - check indexer keys for %s", resp.StatusCode, r.TorrentName, r.DownloadURL, r.Indexer)
return errors.New("server error (%d) encountered while downloading torrent (%s) file (%s) - check indexer keys for %s", resp.StatusCode, r.TorrentName, r.DownloadURL, r.Indexer.Name)
default:
return retry.Unrecoverable(errors.New("unexpected status code %d: check indexer keys for %s", resp.StatusCode, r.Indexer))
return retry.Unrecoverable(errors.New("unexpected status code %d: check indexer keys for %s", resp.StatusCode, r.Indexer.Name))
}
resetTmpFile := func() {
@ -537,10 +536,10 @@ func (r *Release) downloadTorrentFile(ctx context.Context) error {
var bse *bencode.SyntaxError
if errors.As(err, &bse) {
// regular error so we can retry if we receive html first run
return errors.Wrap(err, "metainfo unexpected content type, got HTML expected a bencoded torrent. check indexer keys for %s - %s", r.Indexer, r.TorrentName)
return errors.Wrap(err, "metainfo unexpected content type, got HTML expected a bencoded torrent. check indexer keys for %s - %s", r.Indexer.Name, r.TorrentName)
}
return retry.Unrecoverable(errors.Wrap(err, "metainfo unexpected content type. check indexer keys for %s - %s", r.Indexer, r.TorrentName))
return retry.Unrecoverable(errors.Wrap(err, "metainfo unexpected content type. check indexer keys for %s - %s", r.Indexer.Name, r.TorrentName))
}
// Write the body to file

View file

@ -93,7 +93,7 @@ func TestRelease_DownloadTorrentFile(t *testing.T) {
ID int64
FilterStatus ReleaseFilterStatus
Rejections []string
Indexer string
Indexer IndexerMinimal
FilterName string
Protocol ReleaseProtocol
Implementation ReleaseImplementation
@ -151,7 +151,11 @@ func TestRelease_DownloadTorrentFile(t *testing.T) {
{
name: "401",
fields: fields{
Indexer: "mock-indexer",
Indexer: IndexerMinimal{
ID: 0,
Name: "Mock Indexer",
Identifier: "mock-indexer",
},
TorrentName: "Test.Release-GROUP",
DownloadURL: fmt.Sprintf("%s/%d", ts.URL, 401),
Protocol: ReleaseProtocolTorrent,
@ -161,7 +165,11 @@ func TestRelease_DownloadTorrentFile(t *testing.T) {
{
name: "403",
fields: fields{
Indexer: "mock-indexer",
Indexer: IndexerMinimal{
ID: 0,
Name: "Mock Indexer",
Identifier: "mock-indexer",
},
TorrentName: "Test.Release-GROUP",
DownloadURL: fmt.Sprintf("%s/%d", ts.URL, 403),
Protocol: ReleaseProtocolTorrent,
@ -171,7 +179,11 @@ func TestRelease_DownloadTorrentFile(t *testing.T) {
{
name: "500",
fields: fields{
Indexer: "mock-indexer",
Indexer: IndexerMinimal{
ID: 0,
Name: "Mock Indexer",
Identifier: "mock-indexer",
},
TorrentName: "Test.Release-GROUP",
DownloadURL: fmt.Sprintf("%s/%d", ts.URL, 500),
Protocol: ReleaseProtocolTorrent,
@ -181,7 +193,11 @@ func TestRelease_DownloadTorrentFile(t *testing.T) {
{
name: "ok",
fields: fields{
Indexer: "mock-indexer",
Indexer: IndexerMinimal{
ID: 0,
Name: "Mock Indexer",
Identifier: "mock-indexer",
},
TorrentName: "Test.Release-GROUP",
DownloadURL: fmt.Sprintf("%s/%s", ts.URL, "file.torrent"),
Protocol: ReleaseProtocolTorrent,
@ -191,7 +207,11 @@ func TestRelease_DownloadTorrentFile(t *testing.T) {
{
name: "valid_torrent_with_text-html_header",
fields: fields{
Indexer: "mock-indexer",
Indexer: IndexerMinimal{
ID: 0,
Name: "Mock Indexer",
Identifier: "mock-indexer",
},
TorrentName: "Test.Release-GROUP",
DownloadURL: fmt.Sprintf("%s/files/%s", ts.URL, "valid_torrent_as_html"),
Protocol: ReleaseProtocolTorrent,
@ -201,7 +221,11 @@ func TestRelease_DownloadTorrentFile(t *testing.T) {
{
name: "invalid_torrent_with_text-html_header",
fields: fields{
Indexer: "mock-indexer",
Indexer: IndexerMinimal{
ID: 0,
Name: "Mock Indexer",
Identifier: "mock-indexer",
},
TorrentName: "Test.Release-GROUP",
DownloadURL: fmt.Sprintf("%s/files/%s", ts.URL, "invalid_torrent_as_html"),
Protocol: ReleaseProtocolTorrent,

View file

@ -19,16 +19,15 @@ import (
)
type NewznabJob struct {
Feed *domain.Feed
Name string
IndexerIdentifier string
Log zerolog.Logger
URL string
Client newznab.Client
Repo domain.FeedRepo
CacheRepo domain.FeedCacheRepo
ReleaseSvc release.Service
SchedulerSvc scheduler.Service
Feed *domain.Feed
Name string
Log zerolog.Logger
URL string
Client newznab.Client
Repo domain.FeedRepo
CacheRepo domain.FeedCacheRepo
ReleaseSvc release.Service
SchedulerSvc scheduler.Service
attempts int
errors []error
@ -36,17 +35,16 @@ type NewznabJob struct {
JobID int
}
func NewNewznabJob(feed *domain.Feed, name string, indexerIdentifier string, log zerolog.Logger, url string, client newznab.Client, repo domain.FeedRepo, cacheRepo domain.FeedCacheRepo, releaseSvc release.Service) FeedJob {
func NewNewznabJob(feed *domain.Feed, name string, log zerolog.Logger, url string, client newznab.Client, repo domain.FeedRepo, cacheRepo domain.FeedCacheRepo, releaseSvc release.Service) FeedJob {
return &NewznabJob{
Feed: feed,
Name: name,
IndexerIdentifier: indexerIdentifier,
Log: log,
URL: url,
Client: client,
Repo: repo,
CacheRepo: cacheRepo,
ReleaseSvc: releaseSvc,
Feed: feed,
Name: name,
Log: log,
URL: url,
Client: client,
Repo: repo,
CacheRepo: cacheRepo,
ReleaseSvc: releaseSvc,
}
}
@ -97,12 +95,12 @@ func (j *NewznabJob) process(ctx context.Context) error {
}
}
rls := domain.NewRelease(j.IndexerIdentifier)
rls := domain.NewRelease(domain.IndexerMinimal{ID: j.Feed.Indexer.ID, Name: j.Feed.Indexer.Name, Identifier: j.Feed.Indexer.Identifier})
rls.Implementation = domain.ReleaseImplementationNewznab
rls.Protocol = domain.ReleaseProtocolNzb
rls.TorrentName = item.Title
rls.InfoURL = item.GUID
rls.Implementation = domain.ReleaseImplementationNewznab
rls.Protocol = domain.ReleaseProtocolNzb
// parse size bytes string
rls.ParseSizeBytesString(item.Size)

View file

@ -26,15 +26,14 @@ var (
)
type RSSJob struct {
Feed *domain.Feed
Name string
IndexerIdentifier string
Log zerolog.Logger
URL string
Repo domain.FeedRepo
CacheRepo domain.FeedCacheRepo
ReleaseSvc release.Service
Timeout time.Duration
Feed *domain.Feed
Name string
Log zerolog.Logger
URL string
Repo domain.FeedRepo
CacheRepo domain.FeedCacheRepo
ReleaseSvc release.Service
Timeout time.Duration
attempts int
errors []error
@ -42,17 +41,16 @@ type RSSJob struct {
JobID int
}
func NewRSSJob(feed *domain.Feed, name string, indexerIdentifier string, log zerolog.Logger, url string, repo domain.FeedRepo, cacheRepo domain.FeedCacheRepo, releaseSvc release.Service, timeout time.Duration) FeedJob {
func NewRSSJob(feed *domain.Feed, name string, log zerolog.Logger, url string, repo domain.FeedRepo, cacheRepo domain.FeedCacheRepo, releaseSvc release.Service, timeout time.Duration) FeedJob {
return &RSSJob{
Feed: feed,
Name: name,
IndexerIdentifier: indexerIdentifier,
Log: log,
URL: url,
Repo: repo,
CacheRepo: cacheRepo,
ReleaseSvc: releaseSvc,
Timeout: timeout,
Feed: feed,
Name: name,
Log: log,
URL: url,
Repo: repo,
CacheRepo: cacheRepo,
ReleaseSvc: releaseSvc,
Timeout: timeout,
}
}
@ -120,7 +118,7 @@ func (j *RSSJob) processItem(item *gofeed.Item) *domain.Release {
}
}
rls := domain.NewRelease(j.IndexerIdentifier)
rls := domain.NewRelease(domain.IndexerMinimal{ID: j.Feed.Indexer.ID, Name: j.Feed.Indexer.Name, Identifier: j.Feed.Indexer.Identifier})
rls.Implementation = domain.ReleaseImplementationRSS
rls.ParseString(item.Title)

View file

@ -21,16 +21,15 @@ func TestRSSJob_processItem(t *testing.T) {
nowMinusTime := time.Now().Add(time.Duration(-3000) * time.Second)
type fields struct {
Feed *domain.Feed
Name string
IndexerIdentifier string
Log zerolog.Logger
URL string
Repo domain.FeedCacheRepo
ReleaseSvc release.Service
attempts int
errors []error
JobID int
Feed *domain.Feed
Name string
Log zerolog.Logger
URL string
Repo domain.FeedCacheRepo
ReleaseSvc release.Service
attempts int
errors []error
JobID int
}
type args struct {
item *gofeed.Item
@ -46,16 +45,20 @@ func TestRSSJob_processItem(t *testing.T) {
fields: fields{
Feed: &domain.Feed{
MaxAge: 3600,
Indexer: domain.IndexerMinimal{
ID: 0,
Name: "Mock Feed",
Identifier: "mock-feed",
},
},
Name: "test feed",
IndexerIdentifier: "mock-feed",
Log: zerolog.Logger{},
URL: "https://fake-feed.com/rss",
Repo: nil,
ReleaseSvc: nil,
attempts: 0,
errors: nil,
JobID: 0,
Name: "test feed",
Log: zerolog.Logger{},
URL: "https://fake-feed.com/rss",
Repo: nil,
ReleaseSvc: nil,
attempts: 0,
errors: nil,
JobID: 0,
},
args: args{item: &gofeed.Item{
Title: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
@ -68,23 +71,27 @@ func TestRSSJob_processItem(t *testing.T) {
Link: "/details.php?id=00000&hit=1",
GUID: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
}},
want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: "mock-feed", FilterName: "", Protocol: "torrent", Implementation: "RSS", Timestamp: now, GroupID: "", TorrentID: "", DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 1490000000, Title: "Some Release Title", Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n", Category: "", Season: 0, Episode: 0, Year: 2022, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: nil, Proper: false, Repack: false, Website: "", Artists: "", Type: "episode", LogScore: 0, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)},
want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: domain.IndexerMinimal{0, "Mock Feed", "mock-feed"}, FilterName: "", Protocol: "torrent", Implementation: "RSS", Timestamp: now, GroupID: "", TorrentID: "", DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 1490000000, Title: "Some Release Title", Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n", Category: "", Season: 0, Episode: 0, Year: 2022, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: nil, Proper: false, Repack: false, Website: "", Artists: "", Type: "episode", LogScore: 0, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)},
},
{
name: "with_baseurl",
fields: fields{
Feed: &domain.Feed{
MaxAge: 3600,
Indexer: domain.IndexerMinimal{
ID: 0,
Name: "Mock Feed",
Identifier: "mock-feed",
},
},
Name: "test feed",
IndexerIdentifier: "mock-feed",
Log: zerolog.Logger{},
URL: "https://fake-feed.com/rss",
Repo: nil,
ReleaseSvc: nil,
attempts: 0,
errors: nil,
JobID: 0,
Name: "test feed",
Log: zerolog.Logger{},
URL: "https://fake-feed.com/rss",
Repo: nil,
ReleaseSvc: nil,
attempts: 0,
errors: nil,
JobID: 0,
},
args: args{item: &gofeed.Item{
Title: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
@ -97,23 +104,27 @@ func TestRSSJob_processItem(t *testing.T) {
Link: "https://fake-feed.com/details.php?id=00000&hit=1",
GUID: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
}},
want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: "mock-feed", FilterName: "", Protocol: "torrent", Implementation: "RSS", Timestamp: now, GroupID: "", TorrentID: "", DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 1490000000, Title: "Some Release Title", Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n", Category: "", Season: 0, Episode: 0, Year: 2022, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: nil, Proper: false, Repack: false, Website: "", Artists: "", Type: "episode", LogScore: 0, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)},
want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: domain.IndexerMinimal{0, "Mock Feed", "mock-feed"}, FilterName: "", Protocol: "torrent", Implementation: "RSS", Timestamp: now, GroupID: "", TorrentID: "", DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 1490000000, Title: "Some Release Title", Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n", Category: "", Season: 0, Episode: 0, Year: 2022, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: nil, Proper: false, Repack: false, Website: "", Artists: "", Type: "episode", LogScore: 0, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)},
},
{
name: "time_parse",
fields: fields{
Feed: &domain.Feed{
MaxAge: 360,
Indexer: domain.IndexerMinimal{
ID: 0,
Name: "Mock Feed",
Identifier: "mock-feed",
},
},
Name: "test feed",
IndexerIdentifier: "mock-feed",
Log: zerolog.Logger{},
URL: "https://fake-feed.com/rss",
Repo: nil,
ReleaseSvc: nil,
attempts: 0,
errors: nil,
JobID: 0,
Name: "test feed",
Log: zerolog.Logger{},
URL: "https://fake-feed.com/rss",
Repo: nil,
ReleaseSvc: nil,
attempts: 0,
errors: nil,
JobID: 0,
},
args: args{item: &gofeed.Item{
Title: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
@ -127,7 +138,7 @@ func TestRSSJob_processItem(t *testing.T) {
GUID: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
//PublishedParsed: &nowMinusTime,
}},
want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: "mock-feed", FilterName: "", Protocol: "torrent", Implementation: "RSS", Timestamp: now, GroupID: "", TorrentID: "", DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 1490000000, Title: "Some Release Title", Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n", Category: "", Season: 0, Episode: 0, Year: 2022, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: nil, Proper: false, Repack: false, Website: "", Artists: "", Type: "episode", LogScore: 0, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)},
want: &domain.Release{ID: 0, FilterStatus: "PENDING", Rejections: []string{}, Indexer: domain.IndexerMinimal{0, "Mock Feed", "mock-feed"}, FilterName: "", Protocol: "torrent", Implementation: "RSS", Timestamp: now, GroupID: "", TorrentID: "", DownloadURL: "https://fake-feed.com/details.php?id=00000&hit=1", TorrentTmpFile: "", TorrentDataRawBytes: []uint8(nil), TorrentHash: "", TorrentName: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP", Size: 1490000000, Title: "Some Release Title", Description: "Category: Example\n Size: 1.49 GB\n Status: 27 seeders and 1 leechers\n Speed: 772.16 kB/s\n Added: 2022-09-29 16:06:08\n", Category: "", Season: 0, Episode: 0, Year: 2022, Resolution: "720p", Source: "WEB", Codec: []string{"H.264"}, Container: "", HDR: []string(nil), Audio: []string(nil), AudioChannels: "", Group: "GROUP", Region: "", Language: nil, Proper: false, Repack: false, Website: "", Artists: "", Type: "episode", LogScore: 0, Origin: "", Tags: []string{}, ReleaseTags: "", Freeleech: false, FreeleechPercent: 0, Bonus: []string(nil), Uploader: "", PreTime: "", Other: []string(nil), RawCookie: "", AdditionalSizeCheckRequired: false, FilterID: 0, Filter: (*domain.Filter)(nil), ActionStatus: []domain.ReleaseActionStatus(nil)},
},
{
name: "time_parse",
@ -135,15 +146,14 @@ func TestRSSJob_processItem(t *testing.T) {
Feed: &domain.Feed{
MaxAge: 360,
},
Name: "test feed",
IndexerIdentifier: "mock-feed",
Log: zerolog.Logger{},
URL: "https://fake-feed.com/rss",
Repo: nil,
ReleaseSvc: nil,
attempts: 0,
errors: nil,
JobID: 0,
Name: "test feed",
Log: zerolog.Logger{},
URL: "https://fake-feed.com/rss",
Repo: nil,
ReleaseSvc: nil,
attempts: 0,
errors: nil,
JobID: 0,
},
args: args{item: &gofeed.Item{
Title: "Some.Release.Title.2022.09.22.720p.WEB.h264-GROUP",
@ -163,16 +173,15 @@ func TestRSSJob_processItem(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
j := &RSSJob{
Feed: tt.fields.Feed,
Name: tt.fields.Name,
IndexerIdentifier: tt.fields.IndexerIdentifier,
Log: tt.fields.Log,
URL: tt.fields.URL,
CacheRepo: tt.fields.Repo,
ReleaseSvc: tt.fields.ReleaseSvc,
attempts: tt.fields.attempts,
errors: tt.fields.errors,
JobID: tt.fields.JobID,
Feed: tt.fields.Feed,
Name: tt.fields.Name,
Log: tt.fields.Log,
URL: tt.fields.URL,
CacheRepo: tt.fields.Repo,
ReleaseSvc: tt.fields.ReleaseSvc,
attempts: tt.fields.attempts,
errors: tt.fields.errors,
JobID: tt.fields.JobID,
}
got := j.processItem(tt.args.item)
if got != nil {

View file

@ -42,14 +42,14 @@ type Service interface {
}
type feedInstance struct {
Feed *domain.Feed
Name string
IndexerIdentifier string
URL string
ApiKey string
Implementation string
CronSchedule time.Duration
Timeout time.Duration
Feed *domain.Feed
Name string
Indexer domain.IndexerMinimal
URL string
ApiKey string
Implementation string
CronSchedule time.Duration
Timeout time.Duration
}
// feedKey creates a unique identifier to be used for controlling jobs in the scheduler
@ -349,14 +349,14 @@ func (s *service) restartJob(f *domain.Feed) error {
func newFeedInstance(f *domain.Feed) feedInstance {
// cron schedule to run every X minutes
fi := feedInstance{
Feed: f,
Name: f.Name,
IndexerIdentifier: f.Indexer,
Implementation: f.Type,
URL: f.URL,
ApiKey: f.ApiKey,
CronSchedule: time.Duration(f.Interval) * time.Minute,
Timeout: time.Duration(f.Timeout) * time.Second,
Feed: f,
Name: f.Name,
Indexer: f.Indexer,
Implementation: f.Type,
URL: f.URL,
ApiKey: f.ApiKey,
CronSchedule: time.Duration(f.Interval) * time.Minute,
Timeout: time.Duration(f.Timeout) * time.Second,
}
return fi
@ -403,11 +403,11 @@ func (s *service) startJob(f *domain.Feed) error {
job, err := s.initializeFeedJob(fi)
if err != nil {
return errors.Wrap(err, "initialize job %s failed", f.Indexer)
return errors.Wrap(err, "initialize job %s failed", f.Name)
}
if err := s.scheduleJob(fi, job); err != nil {
return errors.Wrap(err, "schedule job %s failed", f.Indexer)
return errors.Wrap(err, "schedule job %s failed", f.Name)
}
s.log.Debug().Msgf("successfully started feed: %s", f.Name)
@ -448,7 +448,7 @@ func (s *service) createTorznabJob(f feedInstance) (FeedJob, error) {
client := torznab.NewClient(torznab.Config{Host: f.URL, ApiKey: f.ApiKey, Timeout: f.Timeout})
// create job
job := NewTorznabJob(f.Feed, f.Name, f.IndexerIdentifier, l, f.URL, client, s.repo, s.cacheRepo, s.releaseSvc)
job := NewTorznabJob(f.Feed, f.Name, l, f.URL, client, s.repo, s.cacheRepo, s.releaseSvc)
return job, nil
}
@ -467,7 +467,7 @@ func (s *service) createNewznabJob(f feedInstance) (FeedJob, error) {
client := newznab.NewClient(newznab.Config{Host: f.URL, ApiKey: f.ApiKey, Timeout: f.Timeout})
// create job
job := NewNewznabJob(f.Feed, f.Name, f.IndexerIdentifier, l, f.URL, client, s.repo, s.cacheRepo, s.releaseSvc)
job := NewNewznabJob(f.Feed, f.Name, l, f.URL, client, s.repo, s.cacheRepo, s.releaseSvc)
return job, nil
}
@ -487,7 +487,7 @@ func (s *service) createRSSJob(f feedInstance) (FeedJob, error) {
l := s.log.With().Str("feed", f.Name).Logger()
// create job
job := NewRSSJob(f.Feed, f.Name, f.IndexerIdentifier, l, f.URL, s.repo, s.cacheRepo, s.releaseSvc, f.Timeout)
job := NewRSSJob(f.Feed, f.Name, l, f.URL, s.repo, s.cacheRepo, s.releaseSvc, f.Timeout)
return job, nil
}

View file

@ -20,16 +20,15 @@ import (
)
type TorznabJob struct {
Feed *domain.Feed
Name string
IndexerIdentifier string
Log zerolog.Logger
URL string
Client torznab.Client
Repo domain.FeedRepo
CacheRepo domain.FeedCacheRepo
ReleaseSvc release.Service
SchedulerSvc scheduler.Service
Feed *domain.Feed
Name string
Log zerolog.Logger
URL string
Client torznab.Client
Repo domain.FeedRepo
CacheRepo domain.FeedCacheRepo
ReleaseSvc release.Service
SchedulerSvc scheduler.Service
attempts int
errors []error
@ -42,17 +41,16 @@ type FeedJob interface {
RunE(ctx context.Context) error
}
func NewTorznabJob(feed *domain.Feed, name string, indexerIdentifier string, log zerolog.Logger, url string, client torznab.Client, repo domain.FeedRepo, cacheRepo domain.FeedCacheRepo, releaseSvc release.Service) FeedJob {
func NewTorznabJob(feed *domain.Feed, name string, log zerolog.Logger, url string, client torznab.Client, repo domain.FeedRepo, cacheRepo domain.FeedCacheRepo, releaseSvc release.Service) FeedJob {
return &TorznabJob{
Feed: feed,
Name: name,
IndexerIdentifier: indexerIdentifier,
Log: log,
URL: url,
Client: client,
Repo: repo,
CacheRepo: cacheRepo,
ReleaseSvc: releaseSvc,
Feed: feed,
Name: name,
Log: log,
URL: url,
Client: client,
Repo: repo,
CacheRepo: cacheRepo,
ReleaseSvc: releaseSvc,
}
}
@ -103,11 +101,11 @@ func (j *TorznabJob) process(ctx context.Context) error {
}
}
rls := domain.NewRelease(j.IndexerIdentifier)
rls := domain.NewRelease(domain.IndexerMinimal{ID: j.Feed.Indexer.ID, Name: j.Feed.Indexer.Name, Identifier: j.Feed.Indexer.Identifier})
rls.Implementation = domain.ReleaseImplementationTorznab
rls.TorrentName = item.Title
rls.DownloadURL = item.Link
rls.Implementation = domain.ReleaseImplementationTorznab
// parse size bytes string
rls.ParseSizeBytesString(item.Size)

View file

@ -466,14 +466,14 @@ func (s *service) AdditionalSizeCheck(ctx context.Context, f *domain.Filter, rel
l.Debug().Msgf("(%s) additional size check required", f.Name)
switch release.Indexer {
switch release.Indexer.Identifier {
case "ptp", "btn", "ggn", "redacted", "ops", "mock":
if release.Size == 0 {
l.Trace().Msgf("(%s) preparing to check via api", f.Name)
torrentInfo, err := s.apiService.GetTorrentByID(ctx, release.Indexer, release.TorrentID)
torrentInfo, err := s.apiService.GetTorrentByID(ctx, release.Indexer.Identifier, release.TorrentID)
if err != nil || torrentInfo == nil {
l.Error().Err(err).Msgf("(%s) could not get torrent info from api: '%s' from: %s", f.Name, release.TorrentID, release.Indexer)
l.Error().Err(err).Msgf("(%s) could not get torrent info from api: '%s' from: %s", f.Name, release.TorrentID, release.Indexer.Identifier)
return false, err
}
@ -487,7 +487,7 @@ func (s *service) AdditionalSizeCheck(ctx context.Context, f *domain.Filter, rel
// if indexer doesn't have api, download torrent and add to tmpPath
if err := release.DownloadTorrentFileCtx(ctx); err != nil {
l.Error().Err(err).Msgf("(%s) could not download torrent file with id: '%s' from: %s", f.Name, release.TorrentID, release.Indexer)
l.Error().Err(err).Msgf("(%s) could not download torrent file with id: '%s' from: %s", f.Name, release.TorrentID, release.Indexer.Identifier)
return false, err
}
}
@ -654,7 +654,7 @@ func (s *service) execCmd(ctx context.Context, external domain.FilterExternal, r
return 0, err
}
s.log.Debug().Msgf("executed external script: (%s), args: (%s) for release: (%s) indexer: (%s) total time (%s)", cmd, parsedArgs, release.TorrentName, release.Indexer, duration)
s.log.Debug().Msgf("executed external script: (%s), args: (%s) for release: (%s) indexer: (%s) total time (%s)", cmd, parsedArgs, release.TorrentName, release.Indexer.Name, duration)
return 0, nil
}

View file

@ -327,7 +327,7 @@ func TestIndexersParseAndFilter(t *testing.T) {
return
}
rls := domain.NewRelease(i.Identifier)
rls := domain.NewRelease(domain.IndexerMinimal{ID: i.ID, Name: i.Name, Identifier: i.Identifier})
rls.Protocol = domain.ReleaseProtocol(i.Protocol)
// on lines matched

View file

@ -101,7 +101,7 @@ func (s *service) ProcessManual(ctx context.Context, req *domain.ReleaseProcessR
return err
}
rls := domain.NewRelease(def.Identifier)
rls := domain.NewRelease(domain.IndexerMinimal{ID: def.ID, Name: def.Name, Identifier: def.Identifier})
switch req.IndexerImplementation {
case string(domain.IndexerImplementationIRC):
@ -168,19 +168,19 @@ func (s *service) Process(release *domain.Release) {
// TODO dupe checks
// get filters by priority
filters, err := s.filterSvc.FindByIndexerIdentifier(ctx, release.Indexer)
filters, err := s.filterSvc.FindByIndexerIdentifier(ctx, release.Indexer.Identifier)
if err != nil {
s.log.Error().Err(err).Msgf("release.Process: error finding filters for indexer: %s", release.Indexer)
s.log.Error().Err(err).Msgf("release.Process: error finding filters for indexer: %s", release.Indexer.Name)
return
}
if len(filters) == 0 {
s.log.Warn().Msgf("no active filters found for indexer: %s", release.Indexer)
s.log.Warn().Msgf("no active filters found for indexer: %s", release.Indexer.Name)
return
}
if err := s.processFilters(ctx, filters, release); err != nil {
s.log.Error().Err(err).Msgf("release.Process: error processing filters for indexer: %s", release.Indexer)
s.log.Error().Err(err).Msgf("release.Process: error processing filters for indexer: %s", release.Indexer.Name)
return
}
@ -196,7 +196,7 @@ func (s *service) processFilters(ctx context.Context, filters []*domain.Filter,
for _, f := range filters {
f := f
l := s.log.With().Str("indexer", release.Indexer).Str("filter", f.Name).Str("release", release.TorrentName).Logger()
l := s.log.With().Str("indexer", release.Indexer.Identifier).Str("filter", f.Name).Str("release", release.TorrentName).Logger()
// save filter on release
release.Filter = f
@ -211,13 +211,13 @@ func (s *service) processFilters(ctx context.Context, filters []*domain.Filter,
}
if !match {
l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s, no match. rejections: %s", release.Indexer, release.FilterName, release.TorrentName, f.RejectionsString(false))
l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s, no match. rejections: %s", release.Indexer.Name, release.FilterName, release.TorrentName, f.RejectionsString(false))
l.Debug().Msgf("filter %s rejected release: %s", f.Name, f.RejectionsString(true))
continue
}
l.Info().Msgf("Matched '%s' (%s) for %s", release.TorrentName, release.FilterName, release.Indexer)
l.Info().Msgf("Matched '%s' (%s) for %s", release.TorrentName, release.FilterName, release.Indexer.Name)
// found matching filter, lets find the filter actions and attach
active := true
@ -236,7 +236,7 @@ func (s *service) processFilters(ctx context.Context, filters []*domain.Filter,
// sleep for the delay period specified in the filter before running actions
delay := release.Filter.Delay
if delay > 0 {
l.Debug().Msgf("release.Process: delaying processing of '%s' (%s) for %s by %d seconds as specified in the filter", release.TorrentName, release.FilterName, release.Indexer, delay)
l.Debug().Msgf("release.Process: delaying processing of '%s' (%s) for %s by %d seconds as specified in the filter", release.TorrentName, release.FilterName, release.Indexer.Name, delay)
time.Sleep(time.Duration(delay) * time.Second)
}
@ -258,16 +258,16 @@ func (s *service) processFilters(ctx context.Context, filters []*domain.Filter,
// only run enabled actions
if !act.Enabled {
l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s action '%s' not enabled, skip", release.Indexer, release.FilterName, release.TorrentName, act.Name)
l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s action '%s' not enabled, skip", release.Indexer.Name, release.FilterName, release.TorrentName, act.Name)
continue
}
l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s , run action: %s", release.Indexer, release.FilterName, release.TorrentName, act.Name)
l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s , run action: %s", release.Indexer.Name, release.FilterName, release.TorrentName, act.Name)
// keep track of action clients to avoid sending the same thing all over again
_, tried := triedActionClients[actionClientTypeKey{Type: act.Type, ClientID: act.ClientID}]
if tried {
l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s action client already tried, skip", release.Indexer, release.FilterName, release.TorrentName)
l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s action client already tried, skip", release.Indexer.Name, release.FilterName, release.TorrentName)
continue
}

View file

@ -317,7 +317,7 @@ export const APIClient = {
if (!filter.value)
return;
if (filter.id == "indexer") {
if (filter.id == "indexer.identifier") {
params["indexer"].push(filter.value);
} else if (filter.id === "action_status") {
params["push_status"].push(filter.value); // push_status is the correct value here otherwise the releases table won't load when filtered by push status

View file

@ -60,7 +60,7 @@ export const LinksCell = (props: CellProps<Release>) => {
>
<div className="mb-1">
<CellLine title="Release">{props.row.original.name}</CellLine>
<CellLine title="Indexer">{props.row.original.indexer}</CellLine>
<CellLine title="Indexer">{props.row.original.indexer.identifier}</CellLine>
<CellLine title="Protocol">{props.row.original.protocol}</CellLine>
<CellLine title="Implementation">{props.row.original.implementation}</CellLine>
<CellLine title="Category">{props.row.original.category}</CellLine>

View file

@ -28,7 +28,7 @@ interface UpdateProps {
interface InitialValues {
id: number;
indexer: string;
indexer: IndexerMinimal;
enabled: boolean;
type: FeedType;
name: string;

View file

@ -207,7 +207,7 @@ export const ActivityTable = () => {
},
{
Header: "Indexer",
accessor: "indexer",
accessor: "indexer.identifier",
Cell: DataTable.TitleCell,
Filter: SelectColumnFilter,
filter: "includes"
@ -239,7 +239,11 @@ export const ActivityTable = () => {
const newData: Release[] = data.data.map((item, index) => ({
...item,
name: `${randomNames[index]}.iso`,
indexer: index % 2 === 0 ? "distrowatch" : "linuxtracker"
indexer: {
id: 0,
name: index % 2 === 0 ? "distrowatch" : "linuxtracker",
identifier: index % 2 === 0 ? "distrowatch" : "linuxtracker",
},
}));
setModifiedData(newData);
}
@ -290,7 +294,7 @@ export const ActivityTableContent = () => {
},
{
Header: "Indexer",
accessor: "indexer",
accessor: "indexer.identifier",
Cell: DataTable.TitleCell,
Filter: SelectColumnFilter,
filter: "includes"
@ -315,7 +319,11 @@ export const ActivityTableContent = () => {
const newData: Release[] = data.data.map((item, index) => ({
...item,
name: `${randomNames[index]}.iso`,
indexer: index % 2 === 0 ? "distrowatch" : "linuxtracker"
indexer: {
id: 0,
name: index % 2 === 0 ? "distrowatch" : "linuxtracker",
identifier: index % 2 === 0 ? "distrowatch" : "linuxtracker",
},
}));
setModifiedData(newData);
}

View file

@ -122,7 +122,7 @@ export const ReleaseTable = () => {
},
{
Header: "Indexer",
accessor: "indexer",
accessor: "indexer.identifier",
Cell: DataTable.IndexerCell,
Filter: IndexerSelectColumnFilter,
filter: "equal"
@ -148,7 +148,11 @@ export const ReleaseTable = () => {
const newData: Release[] = data.data.map((item, index) => ({
...item,
name: `${randomNames[index]}.iso`,
indexer: index % 2 === 0 ? "distrowatch" : "linuxtracker",
indexer: {
id: 0,
name: index % 2 === 0 ? "distrowatch" : "linuxtracker",
identifier: index % 2 === 0 ? "distrowatch" : "linuxtracker",
},
category: "Linux ISOs",
size: index % 2 === 0 ? 4566784529 : (index % 3 === 0 ? 7427019812 : 2312122455),
source: "",

View file

@ -179,7 +179,7 @@ function ListItem({ feed }: ListItemProps) {
<div className="col-span-9 md:col-span-4 pl-10 sm:pl-12 py-3 flex flex-col">
<span className="pr-2 dark:text-white truncate">{feed.name}</span>
<span className="pr-3 text-xs truncate">
{feed.indexer}
{feed.indexer.identifier}
</span>
</div>
<div className="hidden md:flex col-span-2 py-3 items-center">

View file

@ -5,7 +5,7 @@
interface Feed {
id: number;
indexer: string;
indexer: IndexerMinimal;
name: string;
type: FeedType;
enabled: boolean;

View file

@ -13,6 +13,12 @@ interface Indexer {
settings: Array<IndexerSetting>;
}
interface IndexerMinimal {
id: number;
name: string;
identifier: string;
}
interface IndexerDefinition {
id: number;
name: string;

View file

@ -7,7 +7,7 @@ interface Release {
id: number;
filter_status: string;
rejections: string[];
indexer: string;
indexer: IndexerMinimal;
filter: string;
protocol: string;
implementation: string;