feat(web): move from react-router to @tanstack/router (#1338)

* fix(auth): invalid cookie handling and wrongful basic auth invalidation

* fix(auth): fix test to reflect new HTTP status code

* fix(auth/web): do not throw on error

* fix(http): replace http codes in middleware to prevent basic auth invalidation
fix typo in comment

* fix test

* fix(web): api client handle 403

* refactor(http): auth_test use testify.assert

* refactor(http): set session opts after valid login

* refactor(http): send more client headers

* fix(http): test

* refactor(web): move router to tanstack/router

* refactor(web): use route loaders and suspense

* refactor(web): useSuspense for settings

* refactor(web): invalidate cookie in middleware

* fix: loclfile

* fix: load filter/id

* fix(web): login, onboard, types, imports

* fix(web): filter load

* fix(web): build errors

* fix(web): ts-expect-error

* fix(tests): filter_test.go

* fix(filters): tests

* refactor: remove duplicate spinner components
refactor: ReleaseTable.tsx loading animation
refactor: remove dedicated `pendingComponent` for `settingsRoute`

* fix: refactor missed SectionLoader to RingResizeSpinner

* fix: substitute divides with borders to account for unloaded elements

* fix(api): action status URL param

* revert: action status URL param
add comment

* fix(routing): notfound handling and split files

* fix(filters): notfound get params

* fix(queries): colon

* fix(queries): comments ts-ignore

* fix(queries): extract queryKeys

* fix(queries): remove err

* fix(routes): move zob schema inline

* fix(auth): middleware and redirect to login

* fix(auth): failing test

* fix(logs): invalidate correct key

* fix(logs): invalidate correct key

* fix(logs): invalidate correct key

* fix: JSX element stealing focus from searchbar

* reimplement empty release table state text

* fix(context): use deep-copy

* fix(releases): empty state and filter input warnings

* fix(releases): empty states

* fix(auth): onboarding

* fix(cache): invalidate queries

---------

Co-authored-by: ze0s <43699394+zze0s@users.noreply.github.com>
This commit is contained in:
martylukyy 2024-02-12 13:07:00 +01:00 committed by GitHub
parent cc9656cd41
commit 1a23b69bcf
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
64 changed files with 2543 additions and 2091 deletions

View file

@ -12,13 +12,11 @@ import (
"github.com/autobrr/autobrr/internal/domain"
"github.com/autobrr/autobrr/internal/logger"
"github.com/autobrr/autobrr/pkg/cmp"
"github.com/autobrr/autobrr/pkg/errors"
sq "github.com/Masterminds/squirrel"
"github.com/lib/pq"
"github.com/rs/zerolog"
"golang.org/x/exp/slices"
)
type FilterRepo struct {
@ -245,25 +243,8 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
"f.max_leechers",
"f.created_at",
"f.updated_at",
"fe.id as external_id",
"fe.name",
"fe.idx",
"fe.type",
"fe.enabled",
"fe.exec_cmd",
"fe.exec_args",
"fe.exec_expect_status",
"fe.webhook_host",
"fe.webhook_method",
"fe.webhook_data",
"fe.webhook_headers",
"fe.webhook_expect_status",
"fe.webhook_retry_status",
"fe.webhook_retry_attempts",
"fe.webhook_retry_delay_seconds",
).
From("filter f").
LeftJoin("filter_external fe ON f.id = fe.filter_id").
Where(sq.Eq{"f.id": filterID})
query, args, err := queryBuilder.ToSql()
@ -271,176 +252,132 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
return nil, errors.Wrap(err, "error building query")
}
rows, err := r.db.handler.QueryContext(ctx, query, args...)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
row := r.db.handler.QueryRowContext(ctx, query, args...)
if row.Err() != nil {
if errors.Is(row.Err(), sql.ErrNoRows) {
return nil, domain.ErrRecordNotFound
}
return nil, errors.Wrap(err, "error executing query")
return nil, errors.Wrap(row.Err(), "error row")
}
var f domain.Filter
externalMap := make(map[int]domain.FilterExternal)
// filter
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
var delay, maxDownloads, logScore sql.NullInt32
for rows.Next() {
// filter
var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
var delay, maxDownloads, logScore sql.NullInt32
// filter external
var extName, extType, extExecCmd, extExecArgs, extWebhookHost, extWebhookMethod, extWebhookHeaders, extWebhookData, extWebhookRetryStatus sql.NullString
var extId, extIndex, extWebhookStatus, extWebhookRetryAttempts, extWebhookDelaySeconds, extExecStatus sql.NullInt32
var extEnabled sql.NullBool
if err := rows.Scan(
&f.ID,
&f.Enabled,
&f.Name,
&minSize,
&maxSize,
&delay,
&f.Priority,
&maxDownloads,
&maxDownloadsUnit,
&matchReleases,
&exceptReleases,
&useRegex,
&matchReleaseGroups,
&exceptReleaseGroups,
&matchReleaseTags,
&exceptReleaseTags,
&f.UseRegexReleaseTags,
&matchDescription,
&exceptDescription,
&f.UseRegexDescription,
&scene,
&freeleech,
&freeleechPercent,
&f.SmartEpisode,
&shows,
&seasons,
&episodes,
pq.Array(&f.Resolutions),
pq.Array(&f.Codecs),
pq.Array(&f.Sources),
pq.Array(&f.Containers),
pq.Array(&f.MatchHDR),
pq.Array(&f.ExceptHDR),
pq.Array(&f.MatchOther),
pq.Array(&f.ExceptOther),
&years,
&artists,
&albums,
pq.Array(&f.MatchReleaseTypes),
pq.Array(&f.Formats),
pq.Array(&f.Quality),
pq.Array(&f.Media),
&logScore,
&hasLog,
&hasCue,
&perfectFlac,
&matchCategories,
&exceptCategories,
&matchUploaders,
&exceptUploaders,
pq.Array(&f.MatchLanguage),
pq.Array(&f.ExceptLanguage),
&tags,
&exceptTags,
&tagsMatchLogic,
&exceptTagsMatchLogic,
pq.Array(&f.Origins),
pq.Array(&f.ExceptOrigins),
&f.MinSeeders,
&f.MaxSeeders,
&f.MinLeechers,
&f.MaxLeechers,
&f.CreatedAt,
&f.UpdatedAt,
&extId,
&extName,
&extIndex,
&extType,
&extEnabled,
&extExecCmd,
&extExecArgs,
&extExecStatus,
&extWebhookHost,
&extWebhookMethod,
&extWebhookData,
&extWebhookHeaders,
&extWebhookStatus,
&extWebhookRetryStatus,
&extWebhookRetryAttempts,
&extWebhookDelaySeconds,
); err != nil {
return nil, errors.Wrap(err, "error scanning row")
err = row.Scan(
&f.ID,
&f.Enabled,
&f.Name,
&minSize,
&maxSize,
&delay,
&f.Priority,
&maxDownloads,
&maxDownloadsUnit,
&matchReleases,
&exceptReleases,
&useRegex,
&matchReleaseGroups,
&exceptReleaseGroups,
&matchReleaseTags,
&exceptReleaseTags,
&f.UseRegexReleaseTags,
&matchDescription,
&exceptDescription,
&f.UseRegexDescription,
&scene,
&freeleech,
&freeleechPercent,
&f.SmartEpisode,
&shows,
&seasons,
&episodes,
pq.Array(&f.Resolutions),
pq.Array(&f.Codecs),
pq.Array(&f.Sources),
pq.Array(&f.Containers),
pq.Array(&f.MatchHDR),
pq.Array(&f.ExceptHDR),
pq.Array(&f.MatchOther),
pq.Array(&f.ExceptOther),
&years,
&artists,
&albums,
pq.Array(&f.MatchReleaseTypes),
pq.Array(&f.Formats),
pq.Array(&f.Quality),
pq.Array(&f.Media),
&logScore,
&hasLog,
&hasCue,
&perfectFlac,
&matchCategories,
&exceptCategories,
&matchUploaders,
&exceptUploaders,
pq.Array(&f.MatchLanguage),
pq.Array(&f.ExceptLanguage),
&tags,
&exceptTags,
&tagsMatchLogic,
&exceptTagsMatchLogic,
pq.Array(&f.Origins),
pq.Array(&f.ExceptOrigins),
&f.MinSeeders,
&f.MaxSeeders,
&f.MinLeechers,
&f.MaxLeechers,
&f.CreatedAt,
&f.UpdatedAt,
)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
return nil, domain.ErrRecordNotFound
}
f.MinSize = minSize.String
f.MaxSize = maxSize.String
f.Delay = int(delay.Int32)
f.MaxDownloads = int(maxDownloads.Int32)
f.MaxDownloadsUnit = domain.FilterMaxDownloadsUnit(maxDownloadsUnit.String)
f.MatchReleases = matchReleases.String
f.ExceptReleases = exceptReleases.String
f.MatchReleaseGroups = matchReleaseGroups.String
f.ExceptReleaseGroups = exceptReleaseGroups.String
f.MatchReleaseTags = matchReleaseTags.String
f.ExceptReleaseTags = exceptReleaseTags.String
f.MatchDescription = matchDescription.String
f.ExceptDescription = exceptDescription.String
f.FreeleechPercent = freeleechPercent.String
f.Shows = shows.String
f.Seasons = seasons.String
f.Episodes = episodes.String
f.Years = years.String
f.Artists = artists.String
f.Albums = albums.String
f.LogScore = int(logScore.Int32)
f.Log = hasLog.Bool
f.Cue = hasCue.Bool
f.PerfectFlac = perfectFlac.Bool
f.MatchCategories = matchCategories.String
f.ExceptCategories = exceptCategories.String
f.MatchUploaders = matchUploaders.String
f.ExceptUploaders = exceptUploaders.String
f.Tags = tags.String
f.ExceptTags = exceptTags.String
f.TagsMatchLogic = tagsMatchLogic.String
f.ExceptTagsMatchLogic = exceptTagsMatchLogic.String
f.UseRegex = useRegex.Bool
f.Scene = scene.Bool
f.Freeleech = freeleech.Bool
if extId.Valid {
external := domain.FilterExternal{
ID: int(extId.Int32),
Name: extName.String,
Index: int(extIndex.Int32),
Type: domain.FilterExternalType(extType.String),
Enabled: extEnabled.Bool,
ExecCmd: extExecCmd.String,
ExecArgs: extExecArgs.String,
ExecExpectStatus: int(extExecStatus.Int32),
WebhookHost: extWebhookHost.String,
WebhookMethod: extWebhookMethod.String,
WebhookData: extWebhookData.String,
WebhookHeaders: extWebhookHeaders.String,
WebhookExpectStatus: int(extWebhookStatus.Int32),
WebhookRetryStatus: extWebhookRetryStatus.String,
WebhookRetryAttempts: int(extWebhookRetryAttempts.Int32),
WebhookRetryDelaySeconds: int(extWebhookDelaySeconds.Int32),
}
externalMap[external.ID] = external
}
return nil, errors.Wrap(err, "error scanning row")
}
for _, external := range externalMap {
f.External = append(f.External, external)
}
f.MinSize = minSize.String
f.MaxSize = maxSize.String
f.Delay = int(delay.Int32)
f.MaxDownloads = int(maxDownloads.Int32)
f.MaxDownloadsUnit = domain.FilterMaxDownloadsUnit(maxDownloadsUnit.String)
f.MatchReleases = matchReleases.String
f.ExceptReleases = exceptReleases.String
f.MatchReleaseGroups = matchReleaseGroups.String
f.ExceptReleaseGroups = exceptReleaseGroups.String
f.MatchReleaseTags = matchReleaseTags.String
f.ExceptReleaseTags = exceptReleaseTags.String
f.MatchDescription = matchDescription.String
f.ExceptDescription = exceptDescription.String
f.FreeleechPercent = freeleechPercent.String
f.Shows = shows.String
f.Seasons = seasons.String
f.Episodes = episodes.String
f.Years = years.String
f.Artists = artists.String
f.Albums = albums.String
f.LogScore = int(logScore.Int32)
f.Log = hasLog.Bool
f.Cue = hasCue.Bool
f.PerfectFlac = perfectFlac.Bool
f.MatchCategories = matchCategories.String
f.ExceptCategories = exceptCategories.String
f.MatchUploaders = matchUploaders.String
f.ExceptUploaders = exceptUploaders.String
f.Tags = tags.String
f.ExceptTags = exceptTags.String
f.TagsMatchLogic = tagsMatchLogic.String
f.ExceptTagsMatchLogic = exceptTagsMatchLogic.String
f.UseRegex = useRegex.Bool
f.Scene = scene.Bool
f.Freeleech = freeleech.Bool
return &f, nil
}
@ -517,28 +454,10 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
"f.max_leechers",
"f.created_at",
"f.updated_at",
"fe.id as external_id",
"fe.name",
"fe.idx",
"fe.type",
"fe.enabled",
"fe.exec_cmd",
"fe.exec_args",
"fe.exec_expect_status",
"fe.webhook_host",
"fe.webhook_method",
"fe.webhook_data",
"fe.webhook_headers",
"fe.webhook_expect_status",
"fe.webhook_retry_status",
"fe.webhook_retry_attempts",
"fe.webhook_retry_delay_seconds",
"fe.filter_id",
).
From("filter f").
Join("filter_indexer fi ON f.id = fi.filter_id").
Join("indexer i ON i.id = fi.indexer_id").
LeftJoin("filter_external fe ON f.id = fe.filter_id").
Where(sq.Eq{"i.identifier": indexer}).
Where(sq.Eq{"i.enabled": true}).
Where(sq.Eq{"f.enabled": true}).
@ -556,7 +475,7 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
defer rows.Close()
filtersMap := make(map[int]*domain.Filter)
var filters []*domain.Filter
for rows.Next() {
var f domain.Filter
@ -565,12 +484,7 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool
var delay, maxDownloads, logScore sql.NullInt32
// filter external
var extName, extType, extExecCmd, extExecArgs, extWebhookHost, extWebhookMethod, extWebhookHeaders, extWebhookData, extWebhookRetryStatus sql.NullString
var extId, extIndex, extWebhookStatus, extWebhookRetryAttempts, extWebhookDelaySeconds, extExecStatus, extFilterId sql.NullInt32
var extEnabled sql.NullBool
if err := rows.Scan(
err := rows.Scan(
&f.ID,
&f.Enabled,
&f.Name,
@ -635,108 +549,52 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string
&f.MaxLeechers,
&f.CreatedAt,
&f.UpdatedAt,
&extId,
&extName,
&extIndex,
&extType,
&extEnabled,
&extExecCmd,
&extExecArgs,
&extExecStatus,
&extWebhookHost,
&extWebhookMethod,
&extWebhookData,
&extWebhookHeaders,
&extWebhookStatus,
&extWebhookRetryStatus,
&extWebhookRetryAttempts,
&extWebhookDelaySeconds,
&extFilterId,
); err != nil {
)
if err != nil {
return nil, errors.Wrap(err, "error scanning row")
}
filter, ok := filtersMap[f.ID]
if !ok {
f.MinSize = minSize.String
f.MaxSize = maxSize.String
f.Delay = int(delay.Int32)
f.MaxDownloads = int(maxDownloads.Int32)
f.MaxDownloadsUnit = domain.FilterMaxDownloadsUnit(maxDownloadsUnit.String)
f.MatchReleases = matchReleases.String
f.ExceptReleases = exceptReleases.String
f.MatchReleaseGroups = matchReleaseGroups.String
f.ExceptReleaseGroups = exceptReleaseGroups.String
f.MatchReleaseTags = matchReleaseTags.String
f.ExceptReleaseTags = exceptReleaseTags.String
f.MatchDescription = matchDescription.String
f.ExceptDescription = exceptDescription.String
f.FreeleechPercent = freeleechPercent.String
f.Shows = shows.String
f.Seasons = seasons.String
f.Episodes = episodes.String
f.Years = years.String
f.Artists = artists.String
f.Albums = albums.String
f.LogScore = int(logScore.Int32)
f.Log = hasLog.Bool
f.Cue = hasCue.Bool
f.PerfectFlac = perfectFlac.Bool
f.MatchCategories = matchCategories.String
f.ExceptCategories = exceptCategories.String
f.MatchUploaders = matchUploaders.String
f.ExceptUploaders = exceptUploaders.String
f.Tags = tags.String
f.ExceptTags = exceptTags.String
f.TagsMatchLogic = tagsMatchLogic.String
f.ExceptTagsMatchLogic = exceptTagsMatchLogic.String
f.UseRegex = useRegex.Bool
f.Scene = scene.Bool
f.Freeleech = freeleech.Bool
f.MinSize = minSize.String
f.MaxSize = maxSize.String
f.Delay = int(delay.Int32)
f.MaxDownloads = int(maxDownloads.Int32)
f.MaxDownloadsUnit = domain.FilterMaxDownloadsUnit(maxDownloadsUnit.String)
f.MatchReleases = matchReleases.String
f.ExceptReleases = exceptReleases.String
f.MatchReleaseGroups = matchReleaseGroups.String
f.ExceptReleaseGroups = exceptReleaseGroups.String
f.MatchReleaseTags = matchReleaseTags.String
f.ExceptReleaseTags = exceptReleaseTags.String
f.MatchDescription = matchDescription.String
f.ExceptDescription = exceptDescription.String
f.FreeleechPercent = freeleechPercent.String
f.Shows = shows.String
f.Seasons = seasons.String
f.Episodes = episodes.String
f.Years = years.String
f.Artists = artists.String
f.Albums = albums.String
f.LogScore = int(logScore.Int32)
f.Log = hasLog.Bool
f.Cue = hasCue.Bool
f.PerfectFlac = perfectFlac.Bool
f.MatchCategories = matchCategories.String
f.ExceptCategories = exceptCategories.String
f.MatchUploaders = matchUploaders.String
f.ExceptUploaders = exceptUploaders.String
f.Tags = tags.String
f.ExceptTags = exceptTags.String
f.TagsMatchLogic = tagsMatchLogic.String
f.ExceptTagsMatchLogic = exceptTagsMatchLogic.String
f.UseRegex = useRegex.Bool
f.Scene = scene.Bool
f.Freeleech = freeleech.Bool
f.Rejections = []string{}
f.Rejections = []string{}
filter = &f
filtersMap[f.ID] = filter
}
if extId.Valid {
external := domain.FilterExternal{
ID: int(extId.Int32),
Name: extName.String,
Index: int(extIndex.Int32),
Type: domain.FilterExternalType(extType.String),
Enabled: extEnabled.Bool,
ExecCmd: extExecCmd.String,
ExecArgs: extExecArgs.String,
ExecExpectStatus: int(extExecStatus.Int32),
WebhookHost: extWebhookHost.String,
WebhookMethod: extWebhookMethod.String,
WebhookData: extWebhookData.String,
WebhookHeaders: extWebhookHeaders.String,
WebhookExpectStatus: int(extWebhookStatus.Int32),
WebhookRetryStatus: extWebhookRetryStatus.String,
WebhookRetryAttempts: int(extWebhookRetryAttempts.Int32),
WebhookRetryDelaySeconds: int(extWebhookDelaySeconds.Int32),
FilterId: int(extFilterId.Int32),
}
filter.External = append(filter.External, external)
}
filters = append(filters, &f)
}
var filters []*domain.Filter
for _, filter := range filtersMap {
filter := filter
filters = append(filters, filter)
}
// the filterMap messes up the order, so we need to sort the filters slice
slices.SortStableFunc(filters, func(a, b *domain.Filter) int {
// TODO remove with Go 1.21 and use std lib cmp
return cmp.Compare(b.Priority, a.Priority)
})
return filters, nil
}
@ -1232,39 +1090,6 @@ func (r *FilterRepo) UpdatePartial(ctx context.Context, filter domain.FilterUpda
if filter.ExceptOrigins != nil {
q = q.Set("except_origins", pq.Array(filter.ExceptOrigins))
}
if filter.ExternalScriptEnabled != nil {
q = q.Set("external_script_enabled", filter.ExternalScriptEnabled)
}
if filter.ExternalScriptCmd != nil {
q = q.Set("external_script_cmd", filter.ExternalScriptCmd)
}
if filter.ExternalScriptArgs != nil {
q = q.Set("external_script_args", filter.ExternalScriptArgs)
}
if filter.ExternalScriptExpectStatus != nil {
q = q.Set("external_script_expect_status", filter.ExternalScriptExpectStatus)
}
if filter.ExternalWebhookEnabled != nil {
q = q.Set("external_webhook_enabled", filter.ExternalWebhookEnabled)
}
if filter.ExternalWebhookHost != nil {
q = q.Set("external_webhook_host", filter.ExternalWebhookHost)
}
if filter.ExternalWebhookData != nil {
q = q.Set("external_webhook_data", filter.ExternalWebhookData)
}
if filter.ExternalWebhookExpectStatus != nil {
q = q.Set("external_webhook_expect_status", filter.ExternalWebhookExpectStatus)
}
if filter.ExternalWebhookRetryStatus != nil {
q = q.Set("external_webhook_retry_status", filter.ExternalWebhookRetryStatus)
}
if filter.ExternalWebhookRetryAttempts != nil {
q = q.Set("external_webhook_retry_attempts", filter.ExternalWebhookRetryAttempts)
}
if filter.ExternalWebhookRetryDelaySeconds != nil {
q = q.Set("external_webhook_retry_delay_seconds", filter.ExternalWebhookRetryDelaySeconds)
}
if filter.MinSeeders != nil {
q = q.Set("min_seeders", filter.MinSeeders)
}

View file

@ -205,11 +205,10 @@ func TestFilterRepo_Delete(t *testing.T) {
err = repo.Delete(context.Background(), createdFilters[0].ID)
assert.NoError(t, err)
// Verify that the filter is deleted
// Verify that the filter is deleted and return error ErrRecordNotFound
filter, err := repo.FindByID(context.Background(), createdFilters[0].ID)
assert.NoError(t, err)
assert.NotNil(t, filter)
assert.Equal(t, 0, filter.ID)
assert.ErrorIs(t, err, domain.ErrRecordNotFound)
assert.Nil(t, filter)
})
t.Run(fmt.Sprintf("Delete_Fails_No_Record [%s]", dbType), func(t *testing.T) {
@ -451,12 +450,11 @@ func TestFilterRepo_FindByID(t *testing.T) {
_ = repo.Delete(context.Background(), createdFilters[0].ID)
})
// TODO: This should succeed, but it fails because we are not handling the error correctly. Fix this.
t.Run(fmt.Sprintf("FindByID_Fails_Invalid_ID [%s]", dbType), func(t *testing.T) {
// Test using an invalid ID
filter, err := repo.FindByID(context.Background(), -1)
assert.NoError(t, err) // should return an error
assert.NotNil(t, filter) // should be nil
assert.ErrorIs(t, err, domain.ErrRecordNotFound) // should return an error
assert.Nil(t, filter) // should be nil
})
}