feat(web): move from react-router to @tanstack/router (#1338)

* fix(auth): invalid cookie handling and wrongful basic auth invalidation

* fix(auth): fix test to reflect new HTTP status code

* fix(auth/web): do not throw on error

* fix(http): replace http codes in middleware to prevent basic auth invalidation
fix typo in comment

* fix test

* fix(web): api client handle 403

* refactor(http): auth_test use testify.assert

* refactor(http): set session opts after valid login

* refactor(http): send more client headers

* fix(http): test

* refactor(web): move router to tanstack/router

* refactor(web): use route loaders and suspense

* refactor(web): useSuspense for settings

* refactor(web): invalidate cookie in middleware

* fix: loclfile

* fix: load filter/id

* fix(web): login, onboard, types, imports

* fix(web): filter load

* fix(web): build errors

* fix(web): ts-expect-error

* fix(tests): filter_test.go

* fix(filters): tests

* refactor: remove duplicate spinner components
refactor: ReleaseTable.tsx loading animation
refactor: remove dedicated `pendingComponent` for `settingsRoute`

* fix: refactor missed SectionLoader to RingResizeSpinner

* fix: substitute divides with borders to account for unloaded elements

* fix(api): action status URL param

* revert: action status URL param
add comment

* fix(routing): notfound handling and split files

* fix(filters): notfound get params

* fix(queries): colon

* fix(queries): comments ts-ignore

* fix(queries): extract queryKeys

* fix(queries): remove err

* fix(routes): move zob schema inline

* fix(auth): middleware and redirect to login

* fix(auth): failing test

* fix(logs): invalidate correct key

* fix(logs): invalidate correct key

* fix(logs): invalidate correct key

* fix: JSX element stealing focus from searchbar

* reimplement empty release table state text

* fix(context): use deep-copy

* fix(releases): empty state and filter input warnings

* fix(releases): empty states

* fix(auth): onboarding

* fix(cache): invalidate queries

---------

Co-authored-by: ze0s <43699394+zze0s@users.noreply.github.com>
This commit is contained in:
martylukyy 2024-02-12 13:07:00 +01:00 committed by GitHub
parent cc9656cd41
commit 1a23b69bcf
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
64 changed files with 2543 additions and 2091 deletions

View file

@ -12,13 +12,11 @@ import (
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/internal/logger"
"github.com/autobrr/autobrr/pkg/cmp"
"github.com/autobrr/autobrr/pkg/errors"
sq "github.com/Masterminds/squirrel"
"github.com/lib/pq"
"github.com/rs/zerolog"
"golang.org/x/exp/slices"
)
type FilterRepo struct {
@ -245,25 +243,8 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
"f.max_leechers",
"f.created_at",
"f.updated_at",
"fe.id as external_id",
"fe.name",
"fe.idx",
"fe.type",
"fe.enabled",
"fe.exec_cmd",
"fe.exec_args",
"fe.exec_expect_status",
"fe.webhook_host",
"fe.webhook_method",
"fe.webhook_data",
"fe.webhook_headers",
"fe.webhook_expect_status",
"fe.webhook_retry_status",
"fe.webhook_retry_attempts",
"fe.webhook_retry_delay_seconds",
).
From("filter f").
LeftJoin("filter_external fe ON f.id = fe.filter_id").
Where(sq.Eq{"f.id": filterID})
query, args, err := queryBuilder.ToSql()
@ -271,176 +252,132 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
return nil, errors.Wrap(err, "error building query")
}
rows, err := r.db.handler.QueryContext(ctx, query, args...)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
row := r.db.handler.QueryRowContext(ctx, query, args...)
if row.Err() != nil {
if errors.Is(row.Err(), sql.ErrNoRows) {
return nil, domain.ErrRecordNotFound
}
return nil, errors.Wrap(err, "error executing query")
return nil, errors.Wrap(row.Err(), "error row")
}
var f domain.Filter
externalMap := make(map[int]domain.FilterExternal)
// filter
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
var delay, maxDownloads, logScore sql.NullInt32
for rows.Next() {
// filter
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
var delay, maxDownloads, logScore sql.NullInt32
// filter external
var extName, extType, extExecCmd, extExecArgs, extWebhookHost, extWebhookMethod, extWebhookHeaders, extWebhookData, extWebhookRetryStatus sql.NullString
var extId, extIndex, extWebhookStatus, extWebhookRetryAttempts, extWebhookDelaySeconds, extExecStatus sql.NullInt32
var extEnabled sql.NullBool
if err := rows.Scan(
&f.ID,
&f.Enabled,
&f.Name,
&minSize,
&maxSize,
&delay,
&f.Priority,
&maxDownloads,
&maxDownloadsUnit,
&matchReleases,
&exceptReleases,
&useRegex,
&matchReleaseGroups,
&exceptReleaseGroups,
&matchReleaseTags,
&exceptReleaseTags,
&f.UseRegexReleaseTags,
&matchDescription,
&exceptDescription,
&f.UseRegexDescription,
&scene,
&freeleech,
&freeleechPercent,
&f.SmartEpisode,
&shows,
&seasons,
&episodes,
pq.Array(&f.Resolutions),
pq.Array(&f.Codecs),
pq.Array(&f.Sources),
pq.Array(&f.Containers),
pq.Array(&f.MatchHDR),
pq.Array(&f.ExceptHDR),
pq.Array(&f.MatchOther),
pq.Array(&f.ExceptOther),
&years,
&artists,
&albums,
pq.Array(&f.MatchReleaseTypes),
pq.Array(&f.Formats),
pq.Array(&f.Quality),
pq.Array(&f.Media),
&logScore,
&hasLog,
&hasCue,
&perfectFlac,
&matchCategories,
&exceptCategories,
&matchUploaders,
&exceptUploaders,
pq.Array(&f.MatchLanguage),
pq.Array(&f.ExceptLanguage),
&tags,
&exceptTags,
&tagsMatchLogic,
&exceptTagsMatchLogic,
pq.Array(&f.Origins),
pq.Array(&f.ExceptOrigins),
&f.MinSeeders,
&f.MaxSeeders,
&f.MinLeechers,
&f.MaxLeechers,
&f.CreatedAt,
&f.UpdatedAt,
&extId,
&extName,
&extIndex,
&extType,
&extEnabled,
&extExecCmd,
&extExecArgs,
&extExecStatus,
&extWebhookHost,
&extWebhookMethod,
&extWebhookData,
&extWebhookHeaders,
&extWebhookStatus,
&extWebhookRetryStatus,
&extWebhookRetryAttempts,
&extWebhookDelaySeconds,
); err != nil {
return nil, errors.Wrap(err, "error scanning row")
err = row.Scan(
&f.ID,
&f.Enabled,
&f.Name,
&minSize,
&maxSize,
&delay,
&f.Priority,
&maxDownloads,
&maxDownloadsUnit,
&matchReleases,
&exceptReleases,
&useRegex,
&matchReleaseGroups,
&exceptReleaseGroups,
&matchReleaseTags,
&exceptReleaseTags,
&f.UseRegexReleaseTags,
&matchDescription,
&exceptDescription,
&f.UseRegexDescription,
&scene,
&freeleech,
&freeleechPercent,
&f.SmartEpisode,
&shows,
&seasons,
&episodes,
pq.Array(&f.Resolutions),
pq.Array(&f.Codecs),
pq.Array(&f.Sources),
pq.Array(&f.Containers),
pq.Array(&f.MatchHDR),
pq.Array(&f.ExceptHDR),
pq.Array(&f.MatchOther),
pq.Array(&f.ExceptOther),
&years,
&artists,
&albums,
pq.Array(&f.MatchReleaseTypes),
pq.Array(&f.Formats),
pq.Array(&f.Quality),
pq.Array(&f.Media),
&logScore,
&hasLog,
&hasCue,
&perfectFlac,
&matchCategories,
&exceptCategories,
&matchUploaders,
&exceptUploaders,
pq.Array(&f.MatchLanguage),
pq.Array(&f.ExceptLanguage),
&tags,
&exceptTags,
&tagsMatchLogic,
&exceptTagsMatchLogic,
pq.Array(&f.Origins),
pq.Array(&f.ExceptOrigins),
&f.MinSeeders,
&f.MaxSeeders,
&f.MinLeechers,
&f.MaxLeechers,
&f.CreatedAt,
&f.UpdatedAt,
)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, domain.ErrRecordNotFound
}
f.MinSize = minSize.String
f.MaxSize = maxSize.String
f.Delay = int(delay.Int32)
f.MaxDownloads = int(maxDownloads.Int32)
f.MaxDownloadsUnit = domain.FilterMaxDownloadsUnit(maxDownloadsUnit.String)
f.MatchReleases = matchReleases.String
f.ExceptReleases = exceptReleases.String
f.MatchReleaseGroups = matchReleaseGroups.String
f.ExceptReleaseGroups = exceptReleaseGroups.String
f.MatchReleaseTags = matchReleaseTags.String
f.ExceptReleaseTags = exceptReleaseTags.String
f.MatchDescription = matchDescription.String
f.ExceptDescription = exceptDescription.String
f.FreeleechPercent = freeleechPercent.String
f.Shows = shows.String
f.Seasons = seasons.String
f.Episodes = episodes.String
f.Years = years.String
f.Artists = artists.String
f.Albums = albums.String
f.LogScore = int(logScore.Int32)
f.Log = hasLog.Bool
f.Cue = hasCue.Bool
f.PerfectFlac = perfectFlac.Bool
f.MatchCategories = matchCategories.String
f.ExceptCategories = exceptCategories.String
f.MatchUploaders = matchUploaders.String
f.ExceptUploaders = exceptUploaders.String
f.Tags = tags.String
f.ExceptTags = exceptTags.String
f.TagsMatchLogic = tagsMatchLogic.String
f.ExceptTagsMatchLogic = exceptTagsMatchLogic.String
f.UseRegex = useRegex.Bool
f.Scene = scene.Bool
f.Freeleech = freeleech.Bool
if extId.Valid {
external := domain.FilterExternal{
ID: int(extId.Int32),
Name: extName.String,
Index: int(extIndex.Int32),
Type: domain.FilterExternalType(extType.String),
Enabled: extEnabled.Bool,
ExecCmd: extExecCmd.String,
ExecArgs: extExecArgs.String,
ExecExpectStatus: int(extExecStatus.Int32),
WebhookHost: extWebhookHost.String,
WebhookMethod: extWebhookMethod.String,
WebhookData: extWebhookData.String,
WebhookHeaders: extWebhookHeaders.String,
WebhookExpectStatus: int(extWebhookStatus.Int32),
WebhookRetryStatus: extWebhookRetryStatus.String,
WebhookRetryAttempts: int(extWebhookRetryAttempts.Int32),
WebhookRetryDelaySeconds: int(extWebhookDelaySeconds.Int32),
}
externalMap[external.ID] = external
}
return nil, errors.Wrap(err, "error scanning row")
}
for _, external := range externalMap {
f.External = append(f.External, external)
}
f.MinSize = minSize.String
f.MaxSize = maxSize.String
f.Delay = int(delay.Int32)
f.MaxDownloads = int(maxDownloads.Int32)
f.MaxDownloadsUnit = domain.FilterMaxDownloadsUnit(maxDownloadsUnit.String)
f.MatchReleases = matchReleases.String
f.ExceptReleases = exceptReleases.String
f.MatchReleaseGroups = matchReleaseGroups.String
f.ExceptReleaseGroups = exceptReleaseGroups.String
f.MatchReleaseTags = matchReleaseTags.String
f.ExceptReleaseTags = exceptReleaseTags.String
f.MatchDescription = matchDescription.String
f.ExceptDescription = exceptDescription.String
f.FreeleechPercent = freeleechPercent.String
f.Shows = shows.String
f.Seasons = seasons.String
f.Episodes = episodes.String
f.Years = years.String
f.Artists = artists.String
f.Albums = albums.String
f.LogScore = int(logScore.Int32)
f.Log = hasLog.Bool
f.Cue = hasCue.Bool
f.PerfectFlac = perfectFlac.Bool
f.MatchCategories = matchCategories.String
f.ExceptCategories = exceptCategories.String
f.MatchUploaders = matchUploaders.String
f.ExceptUploaders = exceptUploaders.String
f.Tags = tags.String
f.ExceptTags = exceptTags.String
f.TagsMatchLogic = tagsMatchLogic.String
f.ExceptTagsMatchLogic = exceptTagsMatchLogic.String
f.UseRegex = useRegex.Bool
f.Scene = scene.Bool
f.Freeleech = freeleech.Bool
return &f, nil
}
@ -517,28 +454,10 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
"f.max_leechers",
"f.created_at",
"f.updated_at",
"fe.id as external_id",
"fe.name",
"fe.idx",
"fe.type",
"fe.enabled",
"fe.exec_cmd",
"fe.exec_args",
"fe.exec_expect_status",
"fe.webhook_host",
"fe.webhook_method",
"fe.webhook_data",
"fe.webhook_headers",
"fe.webhook_expect_status",
"fe.webhook_retry_status",
"fe.webhook_retry_attempts",
"fe.webhook_retry_delay_seconds",
"fe.filter_id",
).
From("filter f").
Join("filter_indexer fi ON f.id = fi.filter_id").
Join("indexer i ON i.id = fi.indexer_id").
LeftJoin("filter_external fe ON f.id = fe.filter_id").
Where(sq.Eq{"i.identifier": indexer}).
Where(sq.Eq{"i.enabled": true}).
Where(sq.Eq{"f.enabled": true}).
@ -556,7 +475,7 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
defer rows.Close()
filtersMap := make(map[int]*domain.Filter)
var filters []*domain.Filter
for rows.Next() {
var f domain.Filter
@ -565,12 +484,7 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
var delay, maxDownloads, logScore sql.NullInt32
// filter external
var extName, extType, extExecCmd, extExecArgs, extWebhookHost, extWebhookMethod, extWebhookHeaders, extWebhookData, extWebhookRetryStatus sql.NullString
var extId, extIndex, extWebhookStatus, extWebhookRetryAttempts, extWebhookDelaySeconds, extExecStatus, extFilterId sql.NullInt32
var extEnabled sql.NullBool
if err := rows.Scan(
err := rows.Scan(
&f.ID,
&f.Enabled,
&f.Name,
@ -635,108 +549,52 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
&f.MaxLeechers,
&f.CreatedAt,
&f.UpdatedAt,
&extId,
&extName,
&extIndex,
&extType,
&extEnabled,
&extExecCmd,
&extExecArgs,
&extExecStatus,
&extWebhookHost,
&extWebhookMethod,
&extWebhookData,
&extWebhookHeaders,
&extWebhookStatus,
&extWebhookRetryStatus,
&extWebhookRetryAttempts,
&extWebhookDelaySeconds,
&extFilterId,
); err != nil {
)
if err != nil {
return nil, errors.Wrap(err, "error scanning row")
}
filter, ok := filtersMap[f.ID]
if !ok {
f.MinSize = minSize.String
f.MaxSize = maxSize.String
f.Delay = int(delay.Int32)
f.MaxDownloads = int(maxDownloads.Int32)
f.MaxDownloadsUnit = domain.FilterMaxDownloadsUnit(maxDownloadsUnit.String)
f.MatchReleases = matchReleases.String
f.ExceptReleases = exceptReleases.String
f.MatchReleaseGroups = matchReleaseGroups.String
f.ExceptReleaseGroups = exceptReleaseGroups.String
f.MatchReleaseTags = matchReleaseTags.String
f.ExceptReleaseTags = exceptReleaseTags.String
f.MatchDescription = matchDescription.String
f.ExceptDescription = exceptDescription.String
f.FreeleechPercent = freeleechPercent.String
f.Shows = shows.String
f.Seasons = seasons.String
f.Episodes = episodes.String
f.Years = years.String
f.Artists = artists.String
f.Albums = albums.String
f.LogScore = int(logScore.Int32)
f.Log = hasLog.Bool
f.Cue = hasCue.Bool
f.PerfectFlac = perfectFlac.Bool
f.MatchCategories = matchCategories.String
f.ExceptCategories = exceptCategories.String
f.MatchUploaders = matchUploaders.String
f.ExceptUploaders = exceptUploaders.String
f.Tags = tags.String
f.ExceptTags = exceptTags.String
f.TagsMatchLogic = tagsMatchLogic.String
f.ExceptTagsMatchLogic = exceptTagsMatchLogic.String
f.UseRegex = useRegex.Bool
f.Scene = scene.Bool
f.Freeleech = freeleech.Bool
f.MinSize = minSize.String
f.MaxSize = maxSize.String
f.Delay = int(delay.Int32)
f.MaxDownloads = int(maxDownloads.Int32)
f.MaxDownloadsUnit = domain.FilterMaxDownloadsUnit(maxDownloadsUnit.String)
f.MatchReleases = matchReleases.String
f.ExceptReleases = exceptReleases.String
f.MatchReleaseGroups = matchReleaseGroups.String
f.ExceptReleaseGroups = exceptReleaseGroups.String
f.MatchReleaseTags = matchReleaseTags.String
f.ExceptReleaseTags = exceptReleaseTags.String
f.MatchDescription = matchDescription.String
f.ExceptDescription = exceptDescription.String
f.FreeleechPercent = freeleechPercent.String
f.Shows = shows.String
f.Seasons = seasons.String
f.Episodes = episodes.String
f.Years = years.String
f.Artists = artists.String
f.Albums = albums.String
f.LogScore = int(logScore.Int32)
f.Log = hasLog.Bool
f.Cue = hasCue.Bool
f.PerfectFlac = perfectFlac.Bool
f.MatchCategories = matchCategories.String
f.ExceptCategories = exceptCategories.String
f.MatchUploaders = matchUploaders.String
f.ExceptUploaders = exceptUploaders.String
f.Tags = tags.String
f.ExceptTags = exceptTags.String
f.TagsMatchLogic = tagsMatchLogic.String
f.ExceptTagsMatchLogic = exceptTagsMatchLogic.String
f.UseRegex = useRegex.Bool
f.Scene = scene.Bool
f.Freeleech = freeleech.Bool
f.Rejections = []string{}
f.Rejections = []string{}
filter = &f
filtersMap[f.ID] = filter
}
if extId.Valid {
external := domain.FilterExternal{
ID: int(extId.Int32),
Name: extName.String,
Index: int(extIndex.Int32),
Type: domain.FilterExternalType(extType.String),
Enabled: extEnabled.Bool,
ExecCmd: extExecCmd.String,
ExecArgs: extExecArgs.String,
ExecExpectStatus: int(extExecStatus.Int32),
WebhookHost: extWebhookHost.String,
WebhookMethod: extWebhookMethod.String,
WebhookData: extWebhookData.String,
WebhookHeaders: extWebhookHeaders.String,
WebhookExpectStatus: int(extWebhookStatus.Int32),
WebhookRetryStatus: extWebhookRetryStatus.String,
WebhookRetryAttempts: int(extWebhookRetryAttempts.Int32),
WebhookRetryDelaySeconds: int(extWebhookDelaySeconds.Int32),
FilterId: int(extFilterId.Int32),
}
filter.External = append(filter.External, external)
}
filters = append(filters, &f)
}
var filters []*domain.Filter
for _, filter := range filtersMap {
filter := filter
filters = append(filters, filter)
}
// the filterMap messes up the order, so we need to sort the filters slice
slices.SortStableFunc(filters, func(a, b *domain.Filter) int {
// TODO remove with Go 1.21 and use std lib cmp
return cmp.Compare(b.Priority, a.Priority)
})
return filters, nil
}
@ -1232,39 +1090,6 @@ func (r *FilterRepo) UpdatePartial(ctx context.Context, filter domain.FilterUpda
if filter.ExceptOrigins != nil {
q = q.Set("except_origins", pq.Array(filter.ExceptOrigins))
}
if filter.ExternalScriptEnabled != nil {
q = q.Set("external_script_enabled", filter.ExternalScriptEnabled)
}
if filter.ExternalScriptCmd != nil {
q = q.Set("external_script_cmd", filter.ExternalScriptCmd)
}
if filter.ExternalScriptArgs != nil {
q = q.Set("external_script_args", filter.ExternalScriptArgs)
}
if filter.ExternalScriptExpectStatus != nil {
q = q.Set("external_script_expect_status", filter.ExternalScriptExpectStatus)
}
if filter.ExternalWebhookEnabled != nil {
q = q.Set("external_webhook_enabled", filter.ExternalWebhookEnabled)
}
if filter.ExternalWebhookHost != nil {
q = q.Set("external_webhook_host", filter.ExternalWebhookHost)
}
if filter.ExternalWebhookData != nil {
q = q.Set("external_webhook_data", filter.ExternalWebhookData)
}
if filter.ExternalWebhookExpectStatus != nil {
q = q.Set("external_webhook_expect_status", filter.ExternalWebhookExpectStatus)
}
if filter.ExternalWebhookRetryStatus != nil {
q = q.Set("external_webhook_retry_status", filter.ExternalWebhookRetryStatus)
}
if filter.ExternalWebhookRetryAttempts != nil {
q = q.Set("external_webhook_retry_attempts", filter.ExternalWebhookRetryAttempts)
}
if filter.ExternalWebhookRetryDelaySeconds != nil {
q = q.Set("external_webhook_retry_delay_seconds", filter.ExternalWebhookRetryDelaySeconds)
}
if filter.MinSeeders != nil {
q = q.Set("min_seeders", filter.MinSeeders)
}

View file

@ -205,11 +205,10 @@ func TestFilterRepo_Delete(t *testing.T) {
err = repo.Delete(context.Background(), createdFilters[0].ID)
assert.NoError(t, err)
// Verify that the filter is deleted
// Verify that the filter is deleted and return error ErrRecordNotFound
filter, err := repo.FindByID(context.Background(), createdFilters[0].ID)
assert.NoError(t, err)
assert.NotNil(t, filter)
assert.Equal(t, 0, filter.ID)
assert.ErrorIs(t, err, domain.ErrRecordNotFound)
assert.Nil(t, filter)
})
t.Run(fmt.Sprintf("Delete_Fails_No_Record [%s]", dbType), func(t *testing.T) {
@ -451,12 +450,11 @@ func TestFilterRepo_FindByID(t *testing.T) {
_ = repo.Delete(context.Background(), createdFilters[0].ID)
})
// TODO: This should succeed, but it fails because we are not handling the error correctly. Fix this.
t.Run(fmt.Sprintf("FindByID_Fails_Invalid_ID [%s]", dbType), func(t *testing.T) {
// Test using an invalid ID
filter, err := repo.FindByID(context.Background(), -1)
assert.NoError(t, err) // should return an error
assert.NotNil(t, filter) // should be nil
assert.ErrorIs(t, err, domain.ErrRecordNotFound) // should return an error
assert.Nil(t, filter) // should be nil
})
}

View file

@ -174,86 +174,75 @@ const (
)
type FilterUpdate struct {
ID int `json:"id"`
Name *string `json:"name,omitempty"`
Enabled *bool `json:"enabled,omitempty"`
MinSize *string `json:"min_size,omitempty"`
MaxSize *string `json:"max_size,omitempty"`
Delay *int `json:"delay,omitempty"`
Priority *int32 `json:"priority,omitempty"`
MaxDownloads *int `json:"max_downloads,omitempty"`
MaxDownloadsUnit *FilterMaxDownloadsUnit `json:"max_downloads_unit,omitempty"`
MatchReleases *string `json:"match_releases,omitempty"`
ExceptReleases *string `json:"except_releases,omitempty"`
UseRegex *bool `json:"use_regex,omitempty"`
MatchReleaseGroups *string `json:"match_release_groups,omitempty"`
ExceptReleaseGroups *string `json:"except_release_groups,omitempty"`
MatchReleaseTags *string `json:"match_release_tags,omitempty"`
ExceptReleaseTags *string `json:"except_release_tags,omitempty"`
UseRegexReleaseTags *bool `json:"use_regex_release_tags,omitempty"`
MatchDescription *string `json:"match_description,omitempty"`
ExceptDescription *string `json:"except_description,omitempty"`
UseRegexDescription *bool `json:"use_regex_description,omitempty"`
Scene *bool `json:"scene,omitempty"`
Origins *[]string `json:"origins,omitempty"`
ExceptOrigins *[]string `json:"except_origins,omitempty"`
Bonus *[]string `json:"bonus,omitempty"`
Freeleech *bool `json:"freeleech,omitempty"`
FreeleechPercent *string `json:"freeleech_percent,omitempty"`
SmartEpisode *bool `json:"smart_episode,omitempty"`
Shows *string `json:"shows,omitempty"`
Seasons *string `json:"seasons,omitempty"`
Episodes *string `json:"episodes,omitempty"`
Resolutions *[]string `json:"resolutions,omitempty"` // SD, 480i, 480p, 576p, 720p, 810p, 1080i, 1080p.
Codecs *[]string `json:"codecs,omitempty"` // XviD, DivX, x264, h.264 (or h264), mpeg2 (or mpeg-2), VC-1 (or VC1), WMV, Remux, h.264 Remux (or h264 Remux), VC-1 Remux (or VC1 Remux).
Sources *[]string `json:"sources,omitempty"` // DSR, PDTV, HDTV, HR.PDTV, HR.HDTV, DVDRip, DVDScr, BDr, BD5, BD9, BDRip, BRRip, DVDR, MDVDR, HDDVD, HDDVDRip, BluRay, WEB-DL, TVRip, CAM, R5, TELESYNC, TS, TELECINE, TC. TELESYNC and TS are synonyms (you don't need both). Same for TELECINE and TC
Containers *[]string `json:"containers,omitempty"`
MatchHDR *[]string `json:"match_hdr,omitempty"`
ExceptHDR *[]string `json:"except_hdr,omitempty"`
MatchOther *[]string `json:"match_other,omitempty"`
ExceptOther *[]string `json:"except_other,omitempty"`
Years *string `json:"years,omitempty"`
Artists *string `json:"artists,omitempty"`
Albums *string `json:"albums,omitempty"`
MatchReleaseTypes *[]string `json:"match_release_types,omitempty"` // Album,Single,EP
ExceptReleaseTypes *string `json:"except_release_types,omitempty"`
Formats *[]string `json:"formats,omitempty"` // MP3, FLAC, Ogg, AAC, AC3, DTS
Quality *[]string `json:"quality,omitempty"` // 192, 320, APS (VBR), V2 (VBR), V1 (VBR), APX (VBR), V0 (VBR), q8.x (VBR), Lossless, 24bit Lossless, Other
Media *[]string `json:"media,omitempty"` // CD, DVD, Vinyl, Soundboard, SACD, DAT, Cassette, WEB, Other
PerfectFlac *bool `json:"perfect_flac,omitempty"`
Cue *bool `json:"cue,omitempty"`
Log *bool `json:"log,omitempty"`
LogScore *int `json:"log_score,omitempty"`
MatchCategories *string `json:"match_categories,omitempty"`
ExceptCategories *string `json:"except_categories,omitempty"`
MatchUploaders *string `json:"match_uploaders,omitempty"`
ExceptUploaders *string `json:"except_uploaders,omitempty"`
MatchLanguage *[]string `json:"match_language,omitempty"`
ExceptLanguage *[]string `json:"except_language,omitempty"`
Tags *string `json:"tags,omitempty"`
ExceptTags *string `json:"except_tags,omitempty"`
TagsAny *string `json:"tags_any,omitempty"`
ExceptTagsAny *string `json:"except_tags_any,omitempty"`
TagsMatchLogic *string `json:"tags_match_logic,omitempty"`
ExceptTagsMatchLogic *string `json:"except_tags_match_logic,omitempty"`
MinSeeders *int `json:"min_seeders,omitempty"`
MaxSeeders *int `json:"max_seeders,omitempty"`
MinLeechers *int `json:"min_leechers,omitempty"`
MaxLeechers *int `json:"max_leechers,omitempty"`
ExternalScriptEnabled *bool `json:"external_script_enabled,omitempty"`
ExternalScriptCmd *string `json:"external_script_cmd,omitempty"`
ExternalScriptArgs *string `json:"external_script_args,omitempty"`
ExternalScriptExpectStatus *int `json:"external_script_expect_status,omitempty"`
ExternalWebhookEnabled *bool `json:"external_webhook_enabled,omitempty"`
ExternalWebhookHost *string `json:"external_webhook_host,omitempty"`
ExternalWebhookData *string `json:"external_webhook_data,omitempty"`
ExternalWebhookExpectStatus *int `json:"external_webhook_expect_status,omitempty"`
ExternalWebhookRetryStatus *string `json:"external_webhook_retry_status,omitempty"`
ExternalWebhookRetryAttempts *int `json:"external_webhook_retry_attempts,omitempty"`
ExternalWebhookRetryDelaySeconds *int `json:"external_webhook_retry_delay_seconds,omitempty"`
Actions []*Action `json:"actions,omitempty"`
External []FilterExternal `json:"external,omitempty"`
Indexers []Indexer `json:"indexers,omitempty"`
ID int `json:"id"`
Name *string `json:"name,omitempty"`
Enabled *bool `json:"enabled,omitempty"`
MinSize *string `json:"min_size,omitempty"`
MaxSize *string `json:"max_size,omitempty"`
Delay *int `json:"delay,omitempty"`
Priority *int32 `json:"priority,omitempty"`
MaxDownloads *int `json:"max_downloads,omitempty"`
MaxDownloadsUnit *FilterMaxDownloadsUnit `json:"max_downloads_unit,omitempty"`
MatchReleases *string `json:"match_releases,omitempty"`
ExceptReleases *string `json:"except_releases,omitempty"`
UseRegex *bool `json:"use_regex,omitempty"`
MatchReleaseGroups *string `json:"match_release_groups,omitempty"`
ExceptReleaseGroups *string `json:"except_release_groups,omitempty"`
MatchReleaseTags *string `json:"match_release_tags,omitempty"`
ExceptReleaseTags *string `json:"except_release_tags,omitempty"`
UseRegexReleaseTags *bool `json:"use_regex_release_tags,omitempty"`
MatchDescription *string `json:"match_description,omitempty"`
ExceptDescription *string `json:"except_description,omitempty"`
UseRegexDescription *bool `json:"use_regex_description,omitempty"`
Scene *bool `json:"scene,omitempty"`
Origins *[]string `json:"origins,omitempty"`
ExceptOrigins *[]string `json:"except_origins,omitempty"`
Bonus *[]string `json:"bonus,omitempty"`
Freeleech *bool `json:"freeleech,omitempty"`
FreeleechPercent *string `json:"freeleech_percent,omitempty"`
SmartEpisode *bool `json:"smart_episode,omitempty"`
Shows *string `json:"shows,omitempty"`
Seasons *string `json:"seasons,omitempty"`
Episodes *string `json:"episodes,omitempty"`
Resolutions *[]string `json:"resolutions,omitempty"` // SD, 480i, 480p, 576p, 720p, 810p, 1080i, 1080p.
Codecs *[]string `json:"codecs,omitempty"` // XviD, DivX, x264, h.264 (or h264), mpeg2 (or mpeg-2), VC-1 (or VC1), WMV, Remux, h.264 Remux (or h264 Remux), VC-1 Remux (or VC1 Remux).
Sources *[]string `json:"sources,omitempty"` // DSR, PDTV, HDTV, HR.PDTV, HR.HDTV, DVDRip, DVDScr, BDr, BD5, BD9, BDRip, BRRip, DVDR, MDVDR, HDDVD, HDDVDRip, BluRay, WEB-DL, TVRip, CAM, R5, TELESYNC, TS, TELECINE, TC. TELESYNC and TS are synonyms (you don't need both). Same for TELECINE and TC
Containers *[]string `json:"containers,omitempty"`
MatchHDR *[]string `json:"match_hdr,omitempty"`
ExceptHDR *[]string `json:"except_hdr,omitempty"`
MatchOther *[]string `json:"match_other,omitempty"`
ExceptOther *[]string `json:"except_other,omitempty"`
Years *string `json:"years,omitempty"`
Artists *string `json:"artists,omitempty"`
Albums *string `json:"albums,omitempty"`
MatchReleaseTypes *[]string `json:"match_release_types,omitempty"` // Album,Single,EP
ExceptReleaseTypes *string `json:"except_release_types,omitempty"`
Formats *[]string `json:"formats,omitempty"` // MP3, FLAC, Ogg, AAC, AC3, DTS
Quality *[]string `json:"quality,omitempty"` // 192, 320, APS (VBR), V2 (VBR), V1 (VBR), APX (VBR), V0 (VBR), q8.x (VBR), Lossless, 24bit Lossless, Other
Media *[]string `json:"media,omitempty"` // CD, DVD, Vinyl, Soundboard, SACD, DAT, Cassette, WEB, Other
PerfectFlac *bool `json:"perfect_flac,omitempty"`
Cue *bool `json:"cue,omitempty"`
Log *bool `json:"log,omitempty"`
LogScore *int `json:"log_score,omitempty"`
MatchCategories *string `json:"match_categories,omitempty"`
ExceptCategories *string `json:"except_categories,omitempty"`
MatchUploaders *string `json:"match_uploaders,omitempty"`
ExceptUploaders *string `json:"except_uploaders,omitempty"`
MatchLanguage *[]string `json:"match_language,omitempty"`
ExceptLanguage *[]string `json:"except_language,omitempty"`
Tags *string `json:"tags,omitempty"`
ExceptTags *string `json:"except_tags,omitempty"`
TagsAny *string `json:"tags_any,omitempty"`
ExceptTagsAny *string `json:"except_tags_any,omitempty"`
TagsMatchLogic *string `json:"tags_match_logic,omitempty"`
ExceptTagsMatchLogic *string `json:"except_tags_match_logic,omitempty"`
MinSeeders *int `json:"min_seeders,omitempty"`
MaxSeeders *int `json:"max_seeders,omitempty"`
MinLeechers *int `json:"min_leechers,omitempty"`
MaxLeechers *int `json:"max_leechers,omitempty"`
Actions []*Action `json:"actions,omitempty"`
External []FilterExternal `json:"external,omitempty"`
Indexers []Indexer `json:"indexers,omitempty"`
}
func (f *Filter) Validate() error {

View file

@ -124,6 +124,12 @@ func (s *service) FindByID(ctx context.Context, filterID int) (*domain.Filter, e
return nil, err
}
externalFilters, err := s.repo.FindExternalFiltersByID(ctx, filter.ID)
if err != nil {
s.log.Error().Err(err).Msgf("could not find external filters for filter id: %v", filter.ID)
}
filter.External = externalFilters
actions, err := s.actionRepo.FindByFilterID(ctx, filter.ID, nil)
if err != nil {
s.log.Error().Err(err).Msgf("could not find filter actions for filter id: %v", filter.ID)
@ -142,9 +148,25 @@ func (s *service) FindByID(ctx context.Context, filterID int) (*domain.Filter, e
func (s *service) FindByIndexerIdentifier(ctx context.Context, indexer string) ([]*domain.Filter, error) {
// get filters for indexer
filters, err := s.repo.FindByIndexerIdentifier(ctx, indexer)
if err != nil {
return nil, err
}
// we do not load actions here since we do not need it at this stage
// only load those after filter has matched
return s.repo.FindByIndexerIdentifier(ctx, indexer)
for _, filter := range filters {
filter := filter
externalFilters, err := s.repo.FindExternalFiltersByID(ctx, filter.ID)
if err != nil {
s.log.Error().Err(err).Msgf("could not find external filters for filter id: %v", filter.ID)
}
filter.External = externalFilters
}
return filters, nil
}
func (s *service) GetDownloadsByFilterId(ctx context.Context, filterID int) (*domain.FilterDownloads, error) {

View file

@ -21,6 +21,7 @@ import (
"github.com/go-chi/chi/v5"
"github.com/gorilla/sessions"
"github.com/rs/zerolog"
"github.com/stretchr/testify/assert"
)
type authServiceMock struct {
@ -144,9 +145,7 @@ func TestAuthHandlerLogin(t *testing.T) {
defer resp.Body.Close()
// check for response, here we'll just check for 204 NoContent
if status := resp.StatusCode; status != http.StatusNoContent {
t.Errorf("login: handler returned wrong status code: got %v want %v", status, http.StatusNoContent)
}
assert.Equalf(t, http.StatusNoContent, resp.StatusCode, "login handler: unexpected http status")
if v := resp.Header.Get("Set-Cookie"); v == "" {
t.Errorf("handler returned no cookie")
@ -207,12 +206,10 @@ func TestAuthHandlerValidateOK(t *testing.T) {
defer resp.Body.Close()
// check for response, here we'll just check for 204 NoContent
if status := resp.StatusCode; status != http.StatusNoContent {
t.Errorf("login: handler returned wrong status code: got %v want %v", status, http.StatusNoContent)
}
assert.Equalf(t, http.StatusNoContent, resp.StatusCode, "login handler: bad response")
if v := resp.Header.Get("Set-Cookie"); v == "" {
t.Errorf("handler returned no cookie")
assert.Equalf(t, "", v, "login handler: expected Set-Cookie header")
}
// validate token
@ -223,9 +220,7 @@ func TestAuthHandlerValidateOK(t *testing.T) {
defer resp.Body.Close()
if status := resp.StatusCode; status != http.StatusNoContent {
t.Errorf("validate: handler returned wrong status code: got %v want %v", status, http.StatusNoContent)
}
assert.Equalf(t, http.StatusNoContent, resp.StatusCode, "validate handler: unexpected http status")
}
func TestAuthHandlerValidateBad(t *testing.T) {
@ -272,9 +267,7 @@ func TestAuthHandlerValidateBad(t *testing.T) {
defer resp.Body.Close()
if status := resp.StatusCode; status != http.StatusNoContent {
t.Errorf("validate: handler returned wrong status code: got %v want %v", status, http.StatusNoContent)
}
assert.Equalf(t, http.StatusForbidden, resp.StatusCode, "validate handler: unexpected http status")
}
func TestAuthHandlerLoginBad(t *testing.T) {
@ -321,9 +314,7 @@ func TestAuthHandlerLoginBad(t *testing.T) {
defer resp.Body.Close()
// check for response, here we'll just check for 403 Forbidden
if status := resp.StatusCode; status != http.StatusForbidden {
t.Errorf("handler returned wrong status code: got %v want %v", status, http.StatusForbidden)
}
assert.Equalf(t, http.StatusForbidden, resp.StatusCode, "login handler: unexpected http status")
}
func TestAuthHandlerLogout(t *testing.T) {
@ -384,6 +375,8 @@ func TestAuthHandlerLogout(t *testing.T) {
t.Errorf("login: handler returned wrong status code: got %v want %v", status, http.StatusNoContent)
}
assert.Equalf(t, http.StatusNoContent, resp.StatusCode, "login handler: unexpected http status")
if v := resp.Header.Get("Set-Cookie"); v == "" {
t.Errorf("handler returned no cookie")
}
@ -396,9 +389,7 @@ func TestAuthHandlerLogout(t *testing.T) {
defer resp.Body.Close()
if status := resp.StatusCode; status != http.StatusNoContent {
t.Errorf("validate: handler returned wrong status code: got %v want %v", status, http.StatusNoContent)
}
assert.Equalf(t, http.StatusNoContent, resp.StatusCode, "validate handler: unexpected http status")
// logout
resp, err = client.Post(testServer.URL+"/auth/logout", "application/json", nil)
@ -408,9 +399,7 @@ func TestAuthHandlerLogout(t *testing.T) {
defer resp.Body.Close()
if status := resp.StatusCode; status != http.StatusNoContent {
t.Errorf("logout: handler returned wrong status code: got %v want %v", status, http.StatusNoContent)
}
assert.Equalf(t, http.StatusNoContent, resp.StatusCode, "logout handler: unexpected http status")
//if v := resp.Header.Get("Set-Cookie"); v != "" {
// t.Errorf("logout handler returned cookie")

View file

@ -67,6 +67,16 @@ func (e encoder) StatusNotFound(w http.ResponseWriter) {
w.WriteHeader(http.StatusNotFound)
}
func (e encoder) NotFoundErr(w http.ResponseWriter, err error) {
res := errorResponse{
Message: err.Error(),
}
w.Header().Set("Content-Type", "application/json; charset=utf-8")
w.WriteHeader(http.StatusNotFound)
json.NewEncoder(w).Encode(res)
}
func (e encoder) StatusInternalError(w http.ResponseWriter) {
w.WriteHeader(http.StatusInternalServerError)
}

View file

@ -119,7 +119,7 @@ func (h filterHandler) getByID(w http.ResponseWriter, r *http.Request) {
filter, err := h.service.FindByID(ctx, id)
if err != nil {
if errors.Is(err, domain.ErrRecordNotFound) {
h.encoder.StatusNotFound(w)
h.encoder.NotFoundErr(w, errors.New("filter with id %d not found", id))
return
}

View file

@ -38,7 +38,6 @@ func (s Server) IsAuthenticated(next http.Handler) http.Handler {
// MaxAge<0 means delete cookie immediately
session.Options.MaxAge = -1
session.Options.Path = s.config.Config.BaseURL
if err := session.Save(r, w); err != nil {
@ -50,13 +49,10 @@ func (s Server) IsAuthenticated(next http.Handler) http.Handler {
return
}
if session.IsNew {
http.Error(w, http.StatusText(http.StatusNoContent), http.StatusNoContent)
return
}
// Check if user is authenticated
if auth, ok := session.Values["authenticated"].(bool); !ok || !auth {
s.log.Warn().Msg("session not authenticated")
http.Error(w, http.StatusText(http.StatusForbidden), http.StatusForbidden)
return
}