From dde0d0ed61b4918c8684583a22b95f0aff936d62 Mon Sep 17 00:00:00 2001 From: ze0s <43699394+zze0s@users.noreply.github.com> Date: Tue, 15 Aug 2023 23:07:39 +0200 Subject: [PATCH] feat(filters): add support for multiple external filters (#1030) * feat(filters): add support for multiple ext filters * refactor(filters): crud and check * feat(filters): add postgres migrations * fix(filters): field array types * fix(filters): formatting * fix(filters): formatting * feat(filters): external webhook improve logs --- internal/action/service.go | 5 + internal/database/filter.go | 713 +++++++++++++++++++------- internal/database/postgres_migrate.go | 76 ++- internal/database/release.go | 9 +- internal/database/sqlite_migrate.go | 195 ++++++- internal/domain/error.go | 10 + internal/domain/filter.go | 183 ++++--- internal/domain/release.go | 2 +- internal/filter/service.go | 211 +++++--- internal/http/filter.go | 8 +- internal/release/service.go | 44 +- web/src/domain/constants.ts | 18 + web/src/screens/filters/Details.tsx | 164 ++---- web/src/screens/filters/External.tsx | 333 ++++++++++++ web/src/types/Filter.d.ts | 21 + 15 files changed, 1514 insertions(+), 478 deletions(-) create mode 100644 internal/domain/error.go create mode 100644 web/src/screens/filters/External.tsx diff --git a/internal/action/service.go b/internal/action/service.go index 867b27d..a97c54d 100644 --- a/internal/action/service.go +++ b/internal/action/service.go @@ -20,6 +20,7 @@ type Service interface { Store(ctx context.Context, action domain.Action) (*domain.Action, error) List(ctx context.Context) ([]domain.Action, error) Get(ctx context.Context, req *domain.GetActionRequest) (*domain.Action, error) + FindByFilterID(ctx context.Context, filterID int) ([]*domain.Action, error) Delete(ctx context.Context, req *domain.DeleteActionRequest) error DeleteByFilterID(ctx context.Context, filterID int) error ToggleEnabled(actionID int) error @@ -75,6 +76,10 @@ func (s *service) Get(ctx context.Context, req *domain.GetActionRequest) (*domai return a, nil } +func (s *service) FindByFilterID(ctx context.Context, filterID int) ([]*domain.Action, error) { + return s.repo.FindByFilterID(ctx, filterID) +} + func (s *service) Delete(ctx context.Context, req *domain.DeleteActionRequest) error { return s.repo.Delete(ctx, req) } diff --git a/internal/database/filter.go b/internal/database/filter.go index 34bec5b..0b84b16 100644 --- a/internal/database/filter.go +++ b/internal/database/filter.go @@ -163,6 +163,7 @@ func (r *FilterRepo) ListFilters(ctx context.Context) ([]domain.Filter, error) { filters = append(filters, f) } + if err := rows.Err(); err != nil { return nil, errors.Wrap(err, "row error") } @@ -173,142 +174,249 @@ func (r *FilterRepo) ListFilters(ctx context.Context) ([]domain.Filter, error) { func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter, error) { queryBuilder := r.db.squirrel. Select( - "id", - "enabled", - "name", - "min_size", - "max_size", - "delay", - "priority", - "max_downloads", - "max_downloads_unit", - "match_releases", - "except_releases", - "use_regex", - "match_release_groups", - "except_release_groups", - "match_release_tags", - "except_release_tags", - "use_regex_release_tags", - "match_description", - "except_description", - "use_regex_description", - "scene", - "freeleech", - "freeleech_percent", - "smart_episode", - "shows", - "seasons", - "episodes", - "resolutions", - "codecs", - "sources", - "containers", - "match_hdr", - "except_hdr", - "match_other", - "except_other", - "years", - "artists", - "albums", - "release_types_match", - "formats", - "quality", - "media", - "log_score", - "has_log", - "has_cue", - "perfect_flac", - "match_categories", - "except_categories", - "match_uploaders", - "except_uploaders", - "match_language", - "except_language", - "tags", - "except_tags", - "tags_match_logic", - "except_tags_match_logic", - "origins", - "except_origins", - "external_script_enabled", - "external_script_cmd", - "external_script_args", - "external_script_expect_status", - "external_webhook_enabled", - "external_webhook_host", - "external_webhook_data", - "external_webhook_expect_status", - "created_at", - "updated_at", + "f.id", + "f.enabled", + "f.name", + "f.min_size", + "f.max_size", + "f.delay", + "f.priority", + "f.max_downloads", + "f.max_downloads_unit", + "f.match_releases", + "f.except_releases", + "f.use_regex", + "f.match_release_groups", + "f.except_release_groups", + "f.match_release_tags", + "f.except_release_tags", + "f.use_regex_release_tags", + "f.match_description", + "f.except_description", + "f.use_regex_description", + "f.scene", + "f.freeleech", + "f.freeleech_percent", + "f.smart_episode", + "f.shows", + "f.seasons", + "f.episodes", + "f.resolutions", + "f.codecs", + "f.sources", + "f.containers", + "f.match_hdr", + "f.except_hdr", + "f.match_other", + "f.except_other", + "f.years", + "f.artists", + "f.albums", + "f.release_types_match", + "f.formats", + "f.quality", + "f.media", + "f.log_score", + "f.has_log", + "f.has_cue", + "f.perfect_flac", + "f.match_categories", + "f.except_categories", + "f.match_uploaders", + "f.except_uploaders", + "f.match_language", + "f.except_language", + "f.tags", + "f.except_tags", + "f.tags_match_logic", + "f.except_tags_match_logic", + "f.origins", + "f.except_origins", + "f.created_at", + "f.updated_at", + "fe.id as external_id", + "fe.name", + "fe.idx", + "fe.type", + "fe.enabled", + "fe.exec_cmd", + "fe.exec_args", + "fe.exec_expect_status", + "fe.webhook_host", + "fe.webhook_method", + "fe.webhook_data", + "fe.webhook_headers", + "fe.webhook_expect_status", ). - From("filter"). - Where(sq.Eq{"id": filterID}) + From("filter f"). + LeftJoin("filter_external fe ON f.id = fe.filter_id"). + Where(sq.Eq{"f.id": filterID}) query, args, err := queryBuilder.ToSql() if err != nil { return nil, errors.Wrap(err, "error building query") } - row := r.db.handler.QueryRowContext(ctx, query, args...) - if err := row.Err(); err != nil { + rows, err := r.db.handler.QueryContext(ctx, query, args...) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, domain.ErrRecordNotFound + } return nil, errors.Wrap(err, "error executing query") } var f domain.Filter - var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic, extScriptCmd, extScriptArgs, extWebhookHost, extWebhookData sql.NullString - var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac, extScriptEnabled, extWebhookEnabled sql.NullBool - var delay, maxDownloads, logScore, extWebhookStatus, extScriptStatus sql.NullInt32 - if err := row.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &maxDownloads, &maxDownloadsUnit, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &matchReleaseTags, &exceptReleaseTags, &f.UseRegexReleaseTags, &matchDescription, &exceptDescription, &f.UseRegexDescription, &scene, &freeleech, &freeleechPercent, &f.SmartEpisode, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), pq.Array(&f.MatchOther), pq.Array(&f.ExceptOther), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, pq.Array(&f.MatchLanguage), pq.Array(&f.ExceptLanguage), &tags, &exceptTags, &tagsMatchLogic, &exceptTagsMatchLogic, pq.Array(&f.Origins), pq.Array(&f.ExceptOrigins), &extScriptEnabled, &extScriptCmd, &extScriptArgs, &extScriptStatus, &extWebhookEnabled, &extWebhookHost, &extWebhookData, &extWebhookStatus, &f.CreatedAt, &f.UpdatedAt); err != nil { - return nil, errors.Wrap(err, "error scanning row") + externalMap := make(map[int]domain.FilterExternal) + + for rows.Next() { + // filter + var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString + var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool + var delay, maxDownloads, logScore sql.NullInt32 + + // filter external + var extName, extType, extExecCmd, extExecArgs, extWebhookHost, extWebhookMethod, extWebhookHeaders, extWebhookData sql.NullString + var extId, extIndex, extWebhookStatus, extExecStatus sql.NullInt32 + var extEnabled sql.NullBool + + if err := rows.Scan( + &f.ID, + &f.Enabled, + &f.Name, + &minSize, + &maxSize, + &delay, + &f.Priority, + &maxDownloads, + &maxDownloadsUnit, + &matchReleases, + &exceptReleases, + &useRegex, + &matchReleaseGroups, + &exceptReleaseGroups, + &matchReleaseTags, + &exceptReleaseTags, + &f.UseRegexReleaseTags, + &matchDescription, + &exceptDescription, + &f.UseRegexDescription, + &scene, + &freeleech, + &freeleechPercent, + &f.SmartEpisode, + &shows, + &seasons, + &episodes, + pq.Array(&f.Resolutions), + pq.Array(&f.Codecs), + pq.Array(&f.Sources), + pq.Array(&f.Containers), + pq.Array(&f.MatchHDR), + pq.Array(&f.ExceptHDR), + pq.Array(&f.MatchOther), + pq.Array(&f.ExceptOther), + &years, + &artists, + &albums, + pq.Array(&f.MatchReleaseTypes), + pq.Array(&f.Formats), + pq.Array(&f.Quality), + pq.Array(&f.Media), + &logScore, + &hasLog, + &hasCue, + &perfectFlac, + &matchCategories, + &exceptCategories, + &matchUploaders, + &exceptUploaders, + pq.Array(&f.MatchLanguage), + pq.Array(&f.ExceptLanguage), + &tags, + &exceptTags, + &tagsMatchLogic, + &exceptTagsMatchLogic, + pq.Array(&f.Origins), + pq.Array(&f.ExceptOrigins), + &f.CreatedAt, + &f.UpdatedAt, + &extId, + &extName, + &extIndex, + &extType, + &extEnabled, + &extExecCmd, + &extExecArgs, + &extExecStatus, + &extWebhookHost, + &extWebhookMethod, + &extWebhookData, + &extWebhookHeaders, + &extWebhookStatus, + ); err != nil { + return nil, errors.Wrap(err, "error scanning row") + } + + f.MinSize = minSize.String + f.MaxSize = maxSize.String + f.Delay = int(delay.Int32) + f.MaxDownloads = int(maxDownloads.Int32) + f.MaxDownloadsUnit = domain.FilterMaxDownloadsUnit(maxDownloadsUnit.String) + f.MatchReleases = matchReleases.String + f.ExceptReleases = exceptReleases.String + f.MatchReleaseGroups = matchReleaseGroups.String + f.ExceptReleaseGroups = exceptReleaseGroups.String + f.MatchReleaseTags = matchReleaseTags.String + f.ExceptReleaseTags = exceptReleaseTags.String + f.MatchDescription = matchDescription.String + f.ExceptDescription = exceptDescription.String + f.FreeleechPercent = freeleechPercent.String + f.Shows = shows.String + f.Seasons = seasons.String + f.Episodes = episodes.String + f.Years = years.String + f.Artists = artists.String + f.Albums = albums.String + f.LogScore = int(logScore.Int32) + f.Log = hasLog.Bool + f.Cue = hasCue.Bool + f.PerfectFlac = perfectFlac.Bool + f.MatchCategories = matchCategories.String + f.ExceptCategories = exceptCategories.String + f.MatchUploaders = matchUploaders.String + f.ExceptUploaders = exceptUploaders.String + f.Tags = tags.String + f.ExceptTags = exceptTags.String + f.TagsMatchLogic = tagsMatchLogic.String + f.ExceptTagsMatchLogic = exceptTagsMatchLogic.String + f.UseRegex = useRegex.Bool + f.Scene = scene.Bool + f.Freeleech = freeleech.Bool + + if extId.Valid { + external := domain.FilterExternal{ + ID: int(extId.Int32), + Name: extName.String, + Index: int(extIndex.Int32), + Type: domain.FilterExternalType(extType.String), + Enabled: extEnabled.Bool, + ExecCmd: extExecCmd.String, + ExecArgs: extExecArgs.String, + ExecExpectStatus: int(extExecStatus.Int32), + WebhookHost: extWebhookHost.String, + WebhookMethod: extWebhookMethod.String, + WebhookData: extWebhookData.String, + WebhookHeaders: extWebhookHeaders.String, + WebhookExpectStatus: int(extWebhookStatus.Int32), + } + externalMap[external.ID] = external + } } - f.MinSize = minSize.String - f.MaxSize = maxSize.String - f.Delay = int(delay.Int32) - f.MaxDownloads = int(maxDownloads.Int32) - f.MaxDownloadsUnit = domain.FilterMaxDownloadsUnit(maxDownloadsUnit.String) - f.MatchReleases = matchReleases.String - f.ExceptReleases = exceptReleases.String - f.MatchReleaseGroups = matchReleaseGroups.String - f.ExceptReleaseGroups = exceptReleaseGroups.String - f.MatchReleaseTags = matchReleaseTags.String - f.ExceptReleaseTags = exceptReleaseTags.String - f.MatchDescription = matchDescription.String - f.ExceptDescription = exceptDescription.String - f.FreeleechPercent = freeleechPercent.String - f.Shows = shows.String - f.Seasons = seasons.String - f.Episodes = episodes.String - f.Years = years.String - f.Artists = artists.String - f.Albums = albums.String - f.LogScore = int(logScore.Int32) - f.Log = hasLog.Bool - f.Cue = hasCue.Bool - f.PerfectFlac = perfectFlac.Bool - f.MatchCategories = matchCategories.String - f.ExceptCategories = exceptCategories.String - f.MatchUploaders = matchUploaders.String - f.ExceptUploaders = exceptUploaders.String - f.Tags = tags.String - f.ExceptTags = exceptTags.String - f.TagsMatchLogic = tagsMatchLogic.String - f.ExceptTagsMatchLogic = exceptTagsMatchLogic.String - f.UseRegex = useRegex.Bool - f.Scene = scene.Bool - f.Freeleech = freeleech.Bool - - f.ExternalScriptEnabled = extScriptEnabled.Bool - f.ExternalScriptCmd = extScriptCmd.String - f.ExternalScriptArgs = extScriptArgs.String - f.ExternalScriptExpectStatus = int(extScriptStatus.Int32) - - f.ExternalWebhookEnabled = extWebhookEnabled.Bool - f.ExternalWebhookHost = extWebhookHost.String - f.ExternalWebhookData = extWebhookData.String - f.ExternalWebhookExpectStatus = int(extWebhookStatus.Int32) + for _, external := range externalMap { + f.External = append(f.External, external) + } return &f, nil } @@ -379,20 +487,27 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string "f.except_tags_match_logic", "f.origins", "f.except_origins", - "f.external_script_enabled", - "f.external_script_cmd", - "f.external_script_args", - "f.external_script_expect_status", - "f.external_webhook_enabled", - "f.external_webhook_host", - "f.external_webhook_data", - "f.external_webhook_expect_status", "f.created_at", "f.updated_at", + "fe.id as external_id", + "fe.name", + "fe.idx", + "fe.type", + "fe.enabled", + "fe.exec_cmd", + "fe.exec_args", + "fe.exec_expect_status", + "fe.webhook_host", + "fe.webhook_method", + "fe.webhook_data", + "fe.webhook_headers", + "fe.webhook_expect_status", + "fe.filter_id", ). From("filter f"). Join("filter_indexer fi ON f.id = fi.filter_id"). Join("indexer i ON i.id = fi.indexer_id"). + LeftJoin("filter_external fe ON f.id = fe.filter_id"). Where(sq.Eq{"i.identifier": indexer}). Where(sq.Eq{"i.enabled": true}). Where(sq.Eq{"f.enabled": true}). @@ -411,14 +526,97 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string defer rows.Close() var filters []domain.Filter + + externalMap := make(map[int][]domain.FilterExternal) + for rows.Next() { var f domain.Filter - var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic, extScriptCmd, extScriptArgs, extWebhookHost, extWebhookData sql.NullString - var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac, extScriptEnabled, extWebhookEnabled sql.NullBool - var delay, maxDownloads, logScore, extWebhookStatus, extScriptStatus sql.NullInt32 + var minSize, maxSize, maxDownloadsUnit, matchReleases, exceptReleases, matchReleaseGroups, exceptReleaseGroups, matchReleaseTags, exceptReleaseTags, matchDescription, exceptDescription, freeleechPercent, shows, seasons, episodes, years, artists, albums, matchCategories, exceptCategories, matchUploaders, exceptUploaders, tags, exceptTags, tagsMatchLogic, exceptTagsMatchLogic sql.NullString + var useRegex, scene, freeleech, hasLog, hasCue, perfectFlac sql.NullBool + var delay, maxDownloads, logScore sql.NullInt32 - if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &maxDownloads, &maxDownloadsUnit, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &matchReleaseTags, &exceptReleaseTags, &f.UseRegexReleaseTags, &matchDescription, &exceptDescription, &f.UseRegexDescription, &scene, &freeleech, &freeleechPercent, &f.SmartEpisode, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), pq.Array(&f.MatchOther), pq.Array(&f.ExceptOther), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, pq.Array(&f.MatchLanguage), pq.Array(&f.ExceptLanguage), &tags, &exceptTags, &tagsMatchLogic, &exceptTagsMatchLogic, pq.Array(&f.Origins), pq.Array(&f.ExceptOrigins), &extScriptEnabled, &extScriptCmd, &extScriptArgs, &extScriptStatus, &extWebhookEnabled, &extWebhookHost, &extWebhookData, &extWebhookStatus, &f.CreatedAt, &f.UpdatedAt); err != nil { + // filter external + var extName, extType, extExecCmd, extExecArgs, extWebhookHost, extWebhookMethod, extWebhookHeaders, extWebhookData sql.NullString + var extId, extIndex, extWebhookStatus, extExecStatus, extFilterId sql.NullInt32 + var extEnabled sql.NullBool + + if err := rows.Scan( + &f.ID, + &f.Enabled, + &f.Name, + &minSize, + &maxSize, + &delay, + &f.Priority, + &maxDownloads, + &maxDownloadsUnit, + &matchReleases, + &exceptReleases, + &useRegex, + &matchReleaseGroups, + &exceptReleaseGroups, + &matchReleaseTags, + &exceptReleaseTags, + &f.UseRegexReleaseTags, + &matchDescription, + &exceptDescription, + &f.UseRegexDescription, + &scene, + &freeleech, + &freeleechPercent, + &f.SmartEpisode, + &shows, + &seasons, + &episodes, + pq.Array(&f.Resolutions), + pq.Array(&f.Codecs), + pq.Array(&f.Sources), + pq.Array(&f.Containers), + pq.Array(&f.MatchHDR), + pq.Array(&f.ExceptHDR), + pq.Array(&f.MatchOther), + pq.Array(&f.ExceptOther), + &years, + &artists, + &albums, + pq.Array(&f.MatchReleaseTypes), + pq.Array(&f.Formats), + pq.Array(&f.Quality), + pq.Array(&f.Media), + &logScore, + &hasLog, + &hasCue, + &perfectFlac, + &matchCategories, + &exceptCategories, + &matchUploaders, + &exceptUploaders, + pq.Array(&f.MatchLanguage), + pq.Array(&f.ExceptLanguage), + &tags, + &exceptTags, + &tagsMatchLogic, + &exceptTagsMatchLogic, + pq.Array(&f.Origins), + pq.Array(&f.ExceptOrigins), + &f.CreatedAt, + &f.UpdatedAt, + &extId, + &extName, + &extIndex, + &extType, + &extEnabled, + &extExecCmd, + &extExecArgs, + &extExecStatus, + &extWebhookHost, + &extWebhookMethod, + &extWebhookData, + &extWebhookHeaders, + &extWebhookStatus, + &extFilterId, + ); err != nil { return nil, errors.Wrap(err, "error scanning row") } @@ -458,22 +656,119 @@ func (r *FilterRepo) findByIndexerIdentifier(ctx context.Context, indexer string f.Scene = scene.Bool f.Freeleech = freeleech.Bool - f.ExternalScriptEnabled = extScriptEnabled.Bool - f.ExternalScriptCmd = extScriptCmd.String - f.ExternalScriptArgs = extScriptArgs.String - f.ExternalScriptExpectStatus = int(extScriptStatus.Int32) - - f.ExternalWebhookEnabled = extWebhookEnabled.Bool - f.ExternalWebhookHost = extWebhookHost.String - f.ExternalWebhookData = extWebhookData.String - f.ExternalWebhookExpectStatus = int(extWebhookStatus.Int32) + if extId.Valid { + external := domain.FilterExternal{ + ID: int(extId.Int32), + Name: extName.String, + Index: int(extIndex.Int32), + Type: domain.FilterExternalType(extType.String), + Enabled: extEnabled.Bool, + ExecCmd: extExecCmd.String, + ExecArgs: extExecArgs.String, + ExecExpectStatus: int(extExecStatus.Int32), + WebhookHost: extWebhookHost.String, + WebhookMethod: extWebhookMethod.String, + WebhookData: extWebhookData.String, + WebhookHeaders: extWebhookHeaders.String, + WebhookExpectStatus: int(extWebhookStatus.Int32), + FilterId: int(extFilterId.Int32), + } + externalMap[external.FilterId] = append(externalMap[external.FilterId], external) + } filters = append(filters, f) } + for i, filter := range filters { + v, ok := externalMap[filter.ID] + if !ok { + continue + } + + filter.External = v + + filters[i] = filter + } + return filters, nil } +func (r *FilterRepo) FindExternalFiltersByID(ctx context.Context, filterId int) ([]domain.FilterExternal, error) { + queryBuilder := r.db.squirrel. + Select( + "fe.id", + "fe.name", + "fe.idx", + "fe.type", + "fe.enabled", + "fe.exec_cmd", + "fe.exec_args", + "fe.exec_expect_status", + "fe.webhook_host", + "fe.webhook_method", + "fe.webhook_data", + "fe.webhook_headers", + "fe.webhook_expect_status", + ). + From("filter_external fe"). + Where(sq.Eq{"fe.filter_id": filterId}) + + query, args, err := queryBuilder.ToSql() + if err != nil { + return nil, errors.Wrap(err, "error building query") + } + + rows, err := r.db.handler.QueryContext(ctx, query, args...) + if err != nil { + if errors.Is(err, sql.ErrNoRows) { + return nil, domain.ErrRecordNotFound + } + return nil, errors.Wrap(err, "error executing query") + } + + var externalFilters []domain.FilterExternal + + for rows.Next() { + var external domain.FilterExternal + + // filter external + var extExecCmd, extExecArgs, extWebhookHost, extWebhookMethod, extWebhookHeaders, extWebhookData sql.NullString + var extWebhookStatus, extExecStatus sql.NullInt32 + + if err := rows.Scan( + &external.ID, + &external.Name, + &external.Index, + &external.Type, + &external.Enabled, + &extExecCmd, + &extExecArgs, + &extExecStatus, + &extWebhookHost, + &extWebhookMethod, + &extWebhookData, + &extWebhookHeaders, + &extWebhookStatus, + ); err != nil { + return nil, errors.Wrap(err, "error scanning row") + } + + external.ExecCmd = extExecCmd.String + external.ExecArgs = extExecArgs.String + external.ExecExpectStatus = int(extExecStatus.Int32) + + external.WebhookHost = extWebhookHost.String + external.WebhookMethod = extWebhookMethod.String + external.WebhookData = extWebhookData.String + external.WebhookHeaders = extWebhookHeaders.String + external.WebhookExpectStatus = int(extWebhookStatus.Int32) + + externalFilters = append(externalFilters, external) + } + + return externalFilters, nil +} + func (r *FilterRepo) Store(ctx context.Context, filter domain.Filter) (*domain.Filter, error) { queryBuilder := r.db.squirrel. Insert("filter"). @@ -535,14 +830,6 @@ func (r *FilterRepo) Store(ctx context.Context, filter domain.Filter) (*domain.F "perfect_flac", "origins", "except_origins", - "external_script_enabled", - "external_script_cmd", - "external_script_args", - "external_script_expect_status", - "external_webhook_enabled", - "external_webhook_host", - "external_webhook_data", - "external_webhook_expect_status", ). Values( filter.Name, @@ -602,22 +889,13 @@ func (r *FilterRepo) Store(ctx context.Context, filter domain.Filter) (*domain.F filter.PerfectFlac, pq.Array(filter.Origins), pq.Array(filter.ExceptOrigins), - filter.ExternalScriptEnabled, - filter.ExternalScriptCmd, - filter.ExternalScriptArgs, - filter.ExternalScriptExpectStatus, - filter.ExternalWebhookEnabled, - filter.ExternalWebhookHost, - filter.ExternalWebhookData, - filter.ExternalWebhookExpectStatus, ). Suffix("RETURNING id").RunWith(r.db.handler) // return values var retID int - err := queryBuilder.QueryRowContext(ctx).Scan(&retID) - if err != nil { + if err := queryBuilder.QueryRowContext(ctx).Scan(&retID); err != nil { return nil, errors.Wrap(err, "error executing query") } @@ -688,14 +966,6 @@ func (r *FilterRepo) Update(ctx context.Context, filter domain.Filter) (*domain. Set("perfect_flac", filter.PerfectFlac). Set("origins", pq.Array(filter.Origins)). Set("except_origins", pq.Array(filter.ExceptOrigins)). - Set("external_script_enabled", filter.ExternalScriptEnabled). - Set("external_script_cmd", filter.ExternalScriptCmd). - Set("external_script_args", filter.ExternalScriptArgs). - Set("external_script_expect_status", filter.ExternalScriptExpectStatus). - Set("external_webhook_enabled", filter.ExternalWebhookEnabled). - Set("external_webhook_host", filter.ExternalWebhookHost). - Set("external_webhook_data", filter.ExternalWebhookData). - Set("external_webhook_expect_status", filter.ExternalWebhookExpectStatus). Set("updated_at", time.Now().Format(time.RFC3339)). Where(sq.Eq{"id": filter.ID}) @@ -950,6 +1220,7 @@ func (r *FilterRepo) ToggleEnabled(ctx context.Context, filterID int, enabled bo if err != nil { return errors.Wrap(err, "error building query") } + _, err = r.db.handler.ExecContext(ctx, query, args...) if err != nil { return errors.Wrap(err, "error executing query") @@ -979,27 +1250,28 @@ func (r *FilterRepo) StoreIndexerConnections(ctx context.Context, filterID int, return errors.Wrap(err, "error executing query") } + queryBuilder := r.db.squirrel. + Insert("filter_indexer").Columns("filter_id", "indexer_id") + for _, indexer := range indexers { - queryBuilder := r.db.squirrel. - Insert("filter_indexer").Columns("filter_id", "indexer_id"). - Values(filterID, indexer.ID) + queryBuilder = queryBuilder.Values(filterID, indexer.ID) + } - query, args, err := queryBuilder.ToSql() - if err != nil { - return errors.Wrap(err, "error building query") - } - _, err = tx.ExecContext(ctx, query, args...) - if err != nil { - return errors.Wrap(err, "error executing query") - } + query, args, err := queryBuilder.ToSql() + if err != nil { + return errors.Wrap(err, "error building query") + } - r.log.Debug().Msgf("filter.StoreIndexerConnections: store '%v' on filter: %v", indexer.Name, filterID) + if _, err = tx.ExecContext(ctx, query, args...); err != nil { + return errors.Wrap(err, "error executing query") } if err := tx.Commit(); err != nil { - return errors.Wrap(err, "error store indexers for filter: %v", filterID) + return errors.Wrap(err, "error store indexers for filter: %d", filterID) } + r.log.Debug().Msgf("filter.StoreIndexerConnections: indexers on filter: %d", filterID) + return nil } @@ -1116,3 +1388,80 @@ WHERE (release_action_status.status = 'PUSH_APPROVED' OR release_action_status.s return &f, nil } + +func (r *FilterRepo) StoreFilterExternal(ctx context.Context, filterID int, externalFilters []domain.FilterExternal) error { + tx, err := r.db.handler.BeginTx(ctx, nil) + if err != nil { + return err + } + + defer tx.Rollback() + + deleteQueryBuilder := r.db.squirrel. + Delete("filter_external"). + Where(sq.Eq{"filter_id": filterID}) + + deleteQuery, deleteArgs, err := deleteQueryBuilder.ToSql() + if err != nil { + return errors.Wrap(err, "error building query") + } + + _, err = tx.ExecContext(ctx, deleteQuery, deleteArgs...) + if err != nil { + return errors.Wrap(err, "error executing query") + } + + qb := r.db.squirrel. + Insert("filter_external"). + Columns( + "name", + "idx", + "type", + "enabled", + "exec_cmd", + "exec_args", + "exec_expect_status", + "webhook_host", + "webhook_method", + "webhook_data", + "webhook_headers", + "webhook_expect_status", + "filter_id", + ) + + for _, external := range externalFilters { + qb = qb.Values( + external.Name, + external.Index, + external.Type, + external.Enabled, + toNullString(external.ExecCmd), + toNullString(external.ExecArgs), + toNullInt32(int32(external.ExecExpectStatus)), + toNullString(external.WebhookHost), + toNullString(external.WebhookMethod), + toNullString(external.WebhookData), + toNullString(external.WebhookHeaders), + toNullInt32(int32(external.WebhookExpectStatus)), + filterID, + ) + } + + query, args, err := qb.ToSql() + if err != nil { + return errors.Wrap(err, "error building query") + } + + _, err = tx.ExecContext(ctx, query, args...) + if err != nil { + return errors.Wrap(err, "error executing query") + } + + if err := tx.Commit(); err != nil { + return errors.Wrap(err, "error store external filters for filter: %d", filterID) + } + + r.log.Debug().Msgf("filter.StoreFilterExternal: store external filters on filter: %d", filterID) + + return nil +} diff --git a/internal/database/postgres_migrate.go b/internal/database/postgres_migrate.go index 52dd871..2eda0a4 100644 --- a/internal/database/postgres_migrate.go +++ b/internal/database/postgres_migrate.go @@ -127,18 +127,29 @@ CREATE TABLE filter except_tags_match_logic TEXT, origins TEXT [] DEFAULT '{}', except_origins TEXT [] DEFAULT '{}', - external_script_enabled BOOLEAN DEFAULT FALSE, - external_script_cmd TEXT, - external_script_args TEXT, - external_script_expect_status INTEGER, - external_webhook_enabled BOOLEAN DEFAULT FALSE, - external_webhook_host TEXT, - external_webhook_data TEXT, - external_webhook_expect_status INTEGER, created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ); +CREATE TABLE filter_external +( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL, + idx INTEGER, + type TEXT, + enabled BOOLEAN, + exec_cmd TEXT, + exec_args TEXT, + exec_expect_status INTEGER, + webhook_host TEXT, + webhook_method TEXT, + webhook_data TEXT, + webhook_headers TEXT, + webhook_expect_status INTEGER, + filter_id INTEGER NOT NULL, + FOREIGN KEY (filter_id) REFERENCES filter(id) ON DELETE CASCADE +); + CREATE TABLE filter_indexer ( filter_id INTEGER, @@ -716,4 +727,53 @@ ALTER TABLE release_action_status ADD FOREIGN KEY (action_id) REFERENCES action ON DELETE SET NULL; `, + `CREATE TABLE filter_external + ( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL, + idx INTEGER, + type TEXT, + enabled BOOLEAN, + exec_cmd TEXT, + exec_args TEXT, + exec_expect_status INTEGER, + webhook_host TEXT, + webhook_method TEXT, + webhook_data TEXT, + webhook_headers TEXT, + webhook_expect_status INTEGER, + filter_id INTEGER NOT NULL, + FOREIGN KEY (filter_id) REFERENCES filter(id) ON DELETE CASCADE + ); + + INSERT INTO "filter_external" (name, type, enabled, exec_cmd, exec_args, exec_expect_status, filter_id) + SELECT 'exec', 'EXEC', external_script_enabled, external_script_cmd, external_script_args, external_script_expect_status, id FROM "filter" WHERE external_script_enabled = true; + + INSERT INTO "filter_external" (name, type, enabled, webhook_host, webhook_data, webhook_method, webhook_expect_status, filter_id) + SELECT 'webhook', 'WEBHOOK', external_webhook_enabled, external_webhook_host, external_webhook_data, 'POST', external_webhook_expect_status, id FROM "filter" WHERE external_webhook_enabled = true; + + ALTER TABLE filter + DROP COLUMN IF EXISTS external_script_enabled; + + ALTER TABLE filter + DROP COLUMN IF EXISTS external_script_cmd; + + ALTER TABLE filter + DROP COLUMN IF EXISTS external_script_args; + + ALTER TABLE filter + DROP COLUMN IF EXISTS external_script_expect_status; + + ALTER TABLE filter + DROP COLUMN IF EXISTS external_webhook_enabled; + + ALTER TABLE filter + DROP COLUMN IF EXISTS external_webhook_host; + + ALTER TABLE filter + DROP COLUMN IF EXISTS external_webhook_data; + + ALTER TABLE filter + DROP COLUMN IF EXISTS external_webhook_expect_status; + `, } diff --git a/internal/database/release.go b/internal/database/release.go index 3e7502b..a47cefd 100644 --- a/internal/database/release.go +++ b/internal/database/release.go @@ -32,7 +32,7 @@ func NewReleaseRepo(log logger.Logger, db *DB) domain.ReleaseRepo { } } -func (repo *ReleaseRepo) Store(ctx context.Context, r *domain.Release) (*domain.Release, error) { +func (repo *ReleaseRepo) Store(ctx context.Context, r *domain.Release) error { codecStr := strings.Join(r.Codec, ",") hdrStr := strings.Join(r.HDR, ",") @@ -45,16 +45,15 @@ func (repo *ReleaseRepo) Store(ctx context.Context, r *domain.Release) (*domain. // return values var retID int64 - err := queryBuilder.QueryRowContext(ctx).Scan(&retID) - if err != nil { - return nil, errors.Wrap(err, "error executing query") + if err := queryBuilder.QueryRowContext(ctx).Scan(&retID); err != nil { + return errors.Wrap(err, "error executing query") } r.ID = retID repo.log.Debug().Msgf("release.store: %+v", r) - return r, nil + return nil } func (repo *ReleaseRepo) StoreReleaseActionStatus(ctx context.Context, status *domain.ReleaseActionStatus) error { diff --git a/internal/database/sqlite_migrate.go b/internal/database/sqlite_migrate.go index 1ac7608..16d0184 100644 --- a/internal/database/sqlite_migrate.go +++ b/internal/database/sqlite_migrate.go @@ -127,18 +127,29 @@ CREATE TABLE filter except_tags_match_logic TEXT, origins TEXT [] DEFAULT '{}', except_origins TEXT [] DEFAULT '{}', - external_script_enabled BOOLEAN DEFAULT FALSE, - external_script_cmd TEXT, - external_script_args TEXT, - external_script_expect_status INTEGER, - external_webhook_enabled BOOLEAN DEFAULT FALSE, - external_webhook_host TEXT, - external_webhook_data TEXT, - external_webhook_expect_status INTEGER, created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ); +CREATE TABLE filter_external +( + id INTEGER PRIMARY KEY, + name TEXT NOT NULL, + idx INTEGER, + type TEXT, + enabled BOOLEAN, + exec_cmd TEXT, + exec_args TEXT, + exec_expect_status INTEGER, + webhook_host TEXT, + webhook_method TEXT, + webhook_data TEXT, + webhook_headers TEXT, + webhook_expect_status INTEGER, + filter_id INTEGER NOT NULL, + FOREIGN KEY (filter_id) REFERENCES filter(id) ON DELETE CASCADE +); + CREATE TABLE filter_indexer ( filter_id INTEGER, @@ -1148,4 +1159,172 @@ ADD COLUMN use_bouncer BOOLEAN DEFAULT FALSE; ALTER TABLE irc_network ADD COLUMN bouncer_addr TEXT;`, + `CREATE TABLE filter_external +( + id INTEGER PRIMARY KEY, + name TEXT NOT NULL, + idx INTEGER, + type TEXT, + enabled BOOLEAN, + exec_cmd TEXT, + exec_args TEXT, + exec_expect_status INTEGER, + webhook_host TEXT, + webhook_method TEXT, + webhook_data TEXT, + webhook_headers TEXT, + webhook_expect_status INTEGER, + filter_id INTEGER NOT NULL, + FOREIGN KEY (filter_id) REFERENCES filter(id) ON DELETE CASCADE +); + +INSERT INTO "filter_external" (name, type, enabled, exec_cmd, exec_args, exec_expect_status, filter_id) +SELECT 'exec', 'EXEC', external_script_enabled, external_script_cmd, external_script_args, external_script_expect_status, id FROM "filter" WHERE external_script_enabled = true; + +INSERT INTO "filter_external" (name, type, enabled, webhook_host, webhook_data, webhook_method, webhook_expect_status, filter_id) +SELECT 'webhook', 'WEBHOOK', external_webhook_enabled, external_webhook_host, external_webhook_data, 'POST', external_webhook_expect_status, id FROM "filter" WHERE external_webhook_enabled = true; + +create table filter_dg_tmp +( + id INTEGER primary key, + enabled BOOLEAN, + name TEXT not null, + min_size TEXT, + max_size TEXT, + delay INTEGER, + match_releases TEXT, + except_releases TEXT, + use_regex BOOLEAN, + match_release_groups TEXT, + except_release_groups TEXT, + scene BOOLEAN, + freeleech BOOLEAN, + freeleech_percent TEXT, + shows TEXT, + seasons TEXT, + episodes TEXT, + resolutions TEXT default '{}' not null, + codecs TEXT default '{}' not null, + sources TEXT default '{}' not null, + containers TEXT default '{}' not null, + match_hdr TEXT default '{}', + except_hdr TEXT default '{}', + years TEXT, + artists TEXT, + albums TEXT, + release_types_match TEXT default '{}', + release_types_ignore TEXT default '{}', + formats TEXT default '{}', + quality TEXT default '{}', + media TEXT default '{}', + log_score INTEGER, + has_log BOOLEAN, + has_cue BOOLEAN, + perfect_flac BOOLEAN, + match_categories TEXT, + except_categories TEXT, + match_uploaders TEXT, + except_uploaders TEXT, + tags TEXT, + except_tags TEXT, + created_at TIMESTAMP default CURRENT_TIMESTAMP, + updated_at TIMESTAMP default CURRENT_TIMESTAMP, + priority INTEGER default 0 not null, + origins TEXT default '{}', + match_other TEXT default '{}', + except_other TEXT default '{}', + max_downloads INTEGER default 0, + max_downloads_unit TEXT, + except_origins TEXT default '{}', + match_release_tags TEXT, + except_release_tags TEXT, + use_regex_release_tags BOOLEAN default FALSE, + smart_episode BOOLEAN default false, + match_language TEXT default '{}', + except_language TEXT default '{}', + tags_match_logic TEXT, + except_tags_match_logic TEXT, + match_description TEXT, + except_description TEXT, + use_regex_description BOOLEAN default FALSE +); + +insert into filter_dg_tmp(id, enabled, name, min_size, max_size, delay, match_releases, except_releases, use_regex, + match_release_groups, except_release_groups, scene, freeleech, freeleech_percent, shows, + seasons, episodes, resolutions, codecs, sources, containers, match_hdr, except_hdr, years, + artists, albums, release_types_match, release_types_ignore, formats, quality, media, + log_score, has_log, has_cue, perfect_flac, match_categories, except_categories, + match_uploaders, except_uploaders, tags, except_tags, created_at, updated_at, priority, + origins, match_other, except_other, max_downloads, max_downloads_unit, except_origins, + match_release_tags, except_release_tags, use_regex_release_tags, smart_episode, + match_language, except_language, tags_match_logic, except_tags_match_logic, match_description, + except_description, use_regex_description) +select id, + enabled, + name, + min_size, + max_size, + delay, + match_releases, + except_releases, + use_regex, + match_release_groups, + except_release_groups, + scene, + freeleech, + freeleech_percent, + shows, + seasons, + episodes, + resolutions, + codecs, + sources, + containers, + match_hdr, + except_hdr, + years, + artists, + albums, + release_types_match, + release_types_ignore, + formats, + quality, + media, + log_score, + has_log, + has_cue, + perfect_flac, + match_categories, + except_categories, + match_uploaders, + except_uploaders, + tags, + except_tags, + created_at, + updated_at, + priority, + origins, + match_other, + except_other, + max_downloads, + max_downloads_unit, + except_origins, + match_release_tags, + except_release_tags, + use_regex_release_tags, + smart_episode, + match_language, + except_language, + tags_match_logic, + except_tags_match_logic, + match_description, + except_description, + use_regex_description +from filter; + +drop table filter; + +alter table filter_dg_tmp + rename to filter; +`, } diff --git a/internal/domain/error.go b/internal/domain/error.go new file mode 100644 index 0000000..9f76bbd --- /dev/null +++ b/internal/domain/error.go @@ -0,0 +1,10 @@ +// Copyright (c) 2021 - 2023, Ludvig Lundgren and the autobrr contributors. +// SPDX-License-Identifier: GPL-2.0-or-later + +package domain + +import "database/sql" + +var ( + ErrRecordNotFound = sql.ErrNoRows +) diff --git a/internal/domain/filter.go b/internal/domain/filter.go index 3e02a78..230f537 100644 --- a/internal/domain/filter.go +++ b/internal/domain/filter.go @@ -21,10 +21,11 @@ https://autodl-community.github.io/autodl-irssi/configuration/filter/ */ type FilterRepo interface { + ListFilters(ctx context.Context) ([]Filter, error) + Find(ctx context.Context, params FilterQueryParams) ([]Filter, error) FindByID(ctx context.Context, filterID int) (*Filter, error) FindByIndexerIdentifier(ctx context.Context, indexer string) ([]Filter, error) - Find(ctx context.Context, params FilterQueryParams) ([]Filter, error) - ListFilters(ctx context.Context) ([]Filter, error) + FindExternalFiltersByID(ctx context.Context, filterId int) ([]FilterExternal, error) Store(ctx context.Context, filter Filter) (*Filter, error) Update(ctx context.Context, filter Filter) (*Filter, error) UpdatePartial(ctx context.Context, filter FilterUpdate) error @@ -32,6 +33,7 @@ type FilterRepo interface { Delete(ctx context.Context, filterID int) error StoreIndexerConnection(ctx context.Context, filterID int, indexerID int) error StoreIndexerConnections(ctx context.Context, filterID int, indexers []Indexer) error + StoreFilterExternal(ctx context.Context, filterID int, externalFilters []FilterExternal) error DeleteIndexerConnections(ctx context.Context, filterID int) error GetDownloadsByFilterId(ctx context.Context, filterID int) (*FilterDownloads, error) } @@ -63,84 +65,109 @@ type FilterQueryParams struct { } type Filter struct { - ID int `json:"id"` - Name string `json:"name"` - Enabled bool `json:"enabled"` - CreatedAt time.Time `json:"created_at"` - UpdatedAt time.Time `json:"updated_at"` - MinSize string `json:"min_size,omitempty"` - MaxSize string `json:"max_size,omitempty"` - Delay int `json:"delay,omitempty"` - Priority int32 `json:"priority"` - MaxDownloads int `json:"max_downloads,omitempty"` - MaxDownloadsUnit FilterMaxDownloadsUnit `json:"max_downloads_unit,omitempty"` - MatchReleases string `json:"match_releases,omitempty"` - ExceptReleases string `json:"except_releases,omitempty"` - UseRegex bool `json:"use_regex,omitempty"` - MatchReleaseGroups string `json:"match_release_groups,omitempty"` - ExceptReleaseGroups string `json:"except_release_groups,omitempty"` - Scene bool `json:"scene,omitempty"` - Origins []string `json:"origins,omitempty"` - ExceptOrigins []string `json:"except_origins,omitempty"` - Bonus []string `json:"bonus,omitempty"` - Freeleech bool `json:"freeleech,omitempty"` - FreeleechPercent string `json:"freeleech_percent,omitempty"` - SmartEpisode bool `json:"smart_episode"` - Shows string `json:"shows,omitempty"` - Seasons string `json:"seasons,omitempty"` - Episodes string `json:"episodes,omitempty"` - Resolutions []string `json:"resolutions,omitempty"` // SD, 480i, 480p, 576p, 720p, 810p, 1080i, 1080p. - Codecs []string `json:"codecs,omitempty"` // XviD, DivX, x264, h.264 (or h264), mpeg2 (or mpeg-2), VC-1 (or VC1), WMV, Remux, h.264 Remux (or h264 Remux), VC-1 Remux (or VC1 Remux). - Sources []string `json:"sources,omitempty"` // DSR, PDTV, HDTV, HR.PDTV, HR.HDTV, DVDRip, DVDScr, BDr, BD5, BD9, BDRip, BRRip, DVDR, MDVDR, HDDVD, HDDVDRip, BluRay, WEB-DL, TVRip, CAM, R5, TELESYNC, TS, TELECINE, TC. TELESYNC and TS are synonyms (you don't need both). Same for TELECINE and TC - Containers []string `json:"containers,omitempty"` - MatchHDR []string `json:"match_hdr,omitempty"` - ExceptHDR []string `json:"except_hdr,omitempty"` - MatchOther []string `json:"match_other,omitempty"` - ExceptOther []string `json:"except_other,omitempty"` - Years string `json:"years,omitempty"` - Artists string `json:"artists,omitempty"` - Albums string `json:"albums,omitempty"` - MatchReleaseTypes []string `json:"match_release_types,omitempty"` // Album,Single,EP - ExceptReleaseTypes string `json:"except_release_types,omitempty"` - Formats []string `json:"formats,omitempty"` // MP3, FLAC, Ogg, AAC, AC3, DTS - Quality []string `json:"quality,omitempty"` // 192, 320, APS (VBR), V2 (VBR), V1 (VBR), APX (VBR), V0 (VBR), q8.x (VBR), Lossless, 24bit Lossless, Other - Media []string `json:"media,omitempty"` // CD, DVD, Vinyl, Soundboard, SACD, DAT, Cassette, WEB, Other - PerfectFlac bool `json:"perfect_flac,omitempty"` - Cue bool `json:"cue,omitempty"` - Log bool `json:"log,omitempty"` - LogScore int `json:"log_score,omitempty"` - MatchCategories string `json:"match_categories,omitempty"` - ExceptCategories string `json:"except_categories,omitempty"` - MatchUploaders string `json:"match_uploaders,omitempty"` - ExceptUploaders string `json:"except_uploaders,omitempty"` - MatchLanguage []string `json:"match_language,omitempty"` - ExceptLanguage []string `json:"except_language,omitempty"` - Tags string `json:"tags,omitempty"` - ExceptTags string `json:"except_tags,omitempty"` - TagsAny string `json:"tags_any,omitempty"` - ExceptTagsAny string `json:"except_tags_any,omitempty"` - TagsMatchLogic string `json:"tags_match_logic,omitempty"` - ExceptTagsMatchLogic string `json:"except_tags_match_logic,omitempty"` - MatchReleaseTags string `json:"match_release_tags,omitempty"` - ExceptReleaseTags string `json:"except_release_tags,omitempty"` - UseRegexReleaseTags bool `json:"use_regex_release_tags,omitempty"` - MatchDescription string `json:"match_description,omitempty"` - ExceptDescription string `json:"except_description,omitempty"` - UseRegexDescription bool `json:"use_regex_description,omitempty"` - ExternalScriptEnabled bool `json:"external_script_enabled,omitempty"` - ExternalScriptCmd string `json:"external_script_cmd,omitempty"` - ExternalScriptArgs string `json:"external_script_args,omitempty"` - ExternalScriptExpectStatus int `json:"external_script_expect_status,omitempty"` - ExternalWebhookEnabled bool `json:"external_webhook_enabled,omitempty"` - ExternalWebhookHost string `json:"external_webhook_host,omitempty"` - ExternalWebhookData string `json:"external_webhook_data,omitempty"` - ExternalWebhookExpectStatus int `json:"external_webhook_expect_status,omitempty"` - ActionsCount int `json:"actions_count"` - Actions []*Action `json:"actions,omitempty"` - Indexers []Indexer `json:"indexers"` - Downloads *FilterDownloads `json:"-"` + ID int `json:"id"` + Name string `json:"name"` + Enabled bool `json:"enabled"` + CreatedAt time.Time `json:"created_at"` + UpdatedAt time.Time `json:"updated_at"` + MinSize string `json:"min_size,omitempty"` + MaxSize string `json:"max_size,omitempty"` + Delay int `json:"delay,omitempty"` + Priority int32 `json:"priority"` + MaxDownloads int `json:"max_downloads,omitempty"` + MaxDownloadsUnit FilterMaxDownloadsUnit `json:"max_downloads_unit,omitempty"` + MatchReleases string `json:"match_releases,omitempty"` + ExceptReleases string `json:"except_releases,omitempty"` + UseRegex bool `json:"use_regex,omitempty"` + MatchReleaseGroups string `json:"match_release_groups,omitempty"` + ExceptReleaseGroups string `json:"except_release_groups,omitempty"` + Scene bool `json:"scene,omitempty"` + Origins []string `json:"origins,omitempty"` + ExceptOrigins []string `json:"except_origins,omitempty"` + Bonus []string `json:"bonus,omitempty"` + Freeleech bool `json:"freeleech,omitempty"` + FreeleechPercent string `json:"freeleech_percent,omitempty"` + SmartEpisode bool `json:"smart_episode"` + Shows string `json:"shows,omitempty"` + Seasons string `json:"seasons,omitempty"` + Episodes string `json:"episodes,omitempty"` + Resolutions []string `json:"resolutions,omitempty"` // SD, 480i, 480p, 576p, 720p, 810p, 1080i, 1080p. + Codecs []string `json:"codecs,omitempty"` // XviD, DivX, x264, h.264 (or h264), mpeg2 (or mpeg-2), VC-1 (or VC1), WMV, Remux, h.264 Remux (or h264 Remux), VC-1 Remux (or VC1 Remux). + Sources []string `json:"sources,omitempty"` // DSR, PDTV, HDTV, HR.PDTV, HR.HDTV, DVDRip, DVDScr, BDr, BD5, BD9, BDRip, BRRip, DVDR, MDVDR, HDDVD, HDDVDRip, BluRay, WEB-DL, TVRip, CAM, R5, TELESYNC, TS, TELECINE, TC. TELESYNC and TS are synonyms (you don't need both). Same for TELECINE and TC + Containers []string `json:"containers,omitempty"` + MatchHDR []string `json:"match_hdr,omitempty"` + ExceptHDR []string `json:"except_hdr,omitempty"` + MatchOther []string `json:"match_other,omitempty"` + ExceptOther []string `json:"except_other,omitempty"` + Years string `json:"years,omitempty"` + Artists string `json:"artists,omitempty"` + Albums string `json:"albums,omitempty"` + MatchReleaseTypes []string `json:"match_release_types,omitempty"` // Album,Single,EP + ExceptReleaseTypes string `json:"except_release_types,omitempty"` + Formats []string `json:"formats,omitempty"` // MP3, FLAC, Ogg, AAC, AC3, DTS + Quality []string `json:"quality,omitempty"` // 192, 320, APS (VBR), V2 (VBR), V1 (VBR), APX (VBR), V0 (VBR), q8.x (VBR), Lossless, 24bit Lossless, Other + Media []string `json:"media,omitempty"` // CD, DVD, Vinyl, Soundboard, SACD, DAT, Cassette, WEB, Other + PerfectFlac bool `json:"perfect_flac,omitempty"` + Cue bool `json:"cue,omitempty"` + Log bool `json:"log,omitempty"` + LogScore int `json:"log_score,omitempty"` + MatchCategories string `json:"match_categories,omitempty"` + ExceptCategories string `json:"except_categories,omitempty"` + MatchUploaders string `json:"match_uploaders,omitempty"` + ExceptUploaders string `json:"except_uploaders,omitempty"` + MatchLanguage []string `json:"match_language,omitempty"` + ExceptLanguage []string `json:"except_language,omitempty"` + Tags string `json:"tags,omitempty"` + ExceptTags string `json:"except_tags,omitempty"` + TagsAny string `json:"tags_any,omitempty"` + ExceptTagsAny string `json:"except_tags_any,omitempty"` + TagsMatchLogic string `json:"tags_match_logic,omitempty"` + ExceptTagsMatchLogic string `json:"except_tags_match_logic,omitempty"` + MatchReleaseTags string `json:"match_release_tags,omitempty"` + ExceptReleaseTags string `json:"except_release_tags,omitempty"` + UseRegexReleaseTags bool `json:"use_regex_release_tags,omitempty"` + MatchDescription string `json:"match_description,omitempty"` + ExceptDescription string `json:"except_description,omitempty"` + UseRegexDescription bool `json:"use_regex_description,omitempty"` + //ExternalScriptEnabled bool `json:"external_script_enabled,omitempty"` + //ExternalScriptCmd string `json:"external_script_cmd,omitempty"` + //ExternalScriptArgs string `json:"external_script_args,omitempty"` + //ExternalScriptExpectStatus int `json:"external_script_expect_status,omitempty"` + //ExternalWebhookEnabled bool `json:"external_webhook_enabled,omitempty"` + //ExternalWebhookHost string `json:"external_webhook_host,omitempty"` + //ExternalWebhookData string `json:"external_webhook_data,omitempty"` + //ExternalWebhookExpectStatus int `json:"external_webhook_expect_status,omitempty"` + ActionsCount int `json:"actions_count"` + Actions []*Action `json:"actions,omitempty"` + External []FilterExternal `json:"external,omitempty"` + Indexers []Indexer `json:"indexers"` + Downloads *FilterDownloads `json:"-"` } +type FilterExternal struct { + ID int `json:"id"` + Name string `json:"name"` + Index int `json:"index"` + Type FilterExternalType `json:"type"` + Enabled bool `json:"enabled"` + ExecCmd string `json:"exec_cmd,omitempty"` + ExecArgs string `json:"exec_args,omitempty"` + ExecExpectStatus int `json:"exec_expect_status,omitempty"` + WebhookHost string `json:"webhook_host,omitempty"` + WebhookMethod string `json:"webhook_method,omitempty"` + WebhookData string `json:"webhook_data,omitempty"` + WebhookHeaders string `json:"webhook_headers,omitempty"` + WebhookExpectStatus int `json:"webhook_expect_status,omitempty"` + FilterId int `json:"-"` +} + +type FilterExternalType string + +const ( + ExternalFilterTypeExec FilterExternalType = "EXEC" + ExternalFilterTypeWebhook FilterExternalType = "WEBHOOK" +) + type FilterUpdate struct { ID int `json:"id"` Name *string `json:"name,omitempty"` diff --git a/internal/domain/release.go b/internal/domain/release.go index 615bc68..ed88ca6 100644 --- a/internal/domain/release.go +++ b/internal/domain/release.go @@ -29,7 +29,7 @@ import ( ) type ReleaseRepo interface { - Store(ctx context.Context, release *Release) (*Release, error) + Store(ctx context.Context, release *Release) error Find(ctx context.Context, params ReleaseQueryParams) (res []*Release, nextCursor int64, count int64, err error) FindRecent(ctx context.Context) ([]*Release, error) Get(ctx context.Context, req *GetReleaseRequest) (*Release, error) diff --git a/internal/filter/service.go b/internal/filter/service.go index 00ebced..f70a62c 100644 --- a/internal/filter/service.go +++ b/internal/filter/service.go @@ -8,9 +8,11 @@ import ( "context" "crypto/tls" "fmt" + "io" "net/http" "os" "os/exec" + "sort" "strings" "time" @@ -134,13 +136,9 @@ func (s *service) FindByID(ctx context.Context, filterID int) (*domain.Filter, e func (s *service) FindByIndexerIdentifier(ctx context.Context, indexer string) ([]domain.Filter, error) { // get filters for indexer - filters, err := s.repo.FindByIndexerIdentifier(ctx, indexer) - if err != nil { - s.log.Error().Err(err).Msgf("could not find filters for indexer: %v", indexer) - return nil, err - } - - return filters, nil + // we do not load actions here since we do not need it at this stage + // only load those after filter has matched + return s.repo.FindByIndexerIdentifier(ctx, indexer) } func (s *service) GetDownloadsByFilterId(ctx context.Context, filterID int) (*domain.FilterDownloads, error) { @@ -169,20 +167,26 @@ func (s *service) Update(ctx context.Context, filter domain.Filter) (*domain.Fil // update f, err := s.repo.Update(ctx, filter) if err != nil { - s.log.Error().Err(err).Msgf("could not update filter: %v", filter.Name) + s.log.Error().Err(err).Msgf("could not update filter: %s", filter.Name) return nil, err } // take care of connected indexers if err = s.repo.StoreIndexerConnections(ctx, f.ID, filter.Indexers); err != nil { - s.log.Error().Err(err).Msgf("could not store filter indexer connections: %v", filter.Name) + s.log.Error().Err(err).Msgf("could not store filter indexer connections: %s", filter.Name) + return nil, err + } + + // take care of connected external filters + if err = s.repo.StoreFilterExternal(ctx, f.ID, filter.External); err != nil { + s.log.Error().Err(err).Msgf("could not store external filters: %s", filter.Name) return nil, err } // take care of filter actions actions, err := s.actionRepo.StoreFilterActions(ctx, filter.Actions, int64(filter.ID)) if err != nil { - s.log.Error().Err(err).Msgf("could not store filter actions: %v", filter.Name) + s.log.Error().Err(err).Msgf("could not store filter actions: %s", filter.Name) return nil, err } @@ -313,8 +317,8 @@ func (s *service) Delete(ctx context.Context, filterID int) error { func (s *service) CheckFilter(ctx context.Context, f domain.Filter, release *domain.Release) (bool, error) { - s.log.Trace().Msgf("filter.Service.CheckFilter: checking filter: %v %+v", f.Name, f) - s.log.Trace().Msgf("filter.Service.CheckFilter: checking filter: %v for release: %+v", f.Name, release) + s.log.Trace().Msgf("filter.Service.CheckFilter: checking filter: %s %+v", f.Name, f) + s.log.Trace().Msgf("filter.Service.CheckFilter: checking filter: %s for release: %+v", f.Name, release) // do additional fetch to get download counts for filter if f.MaxDownloads > 0 { @@ -328,7 +332,7 @@ func (s *service) CheckFilter(ctx context.Context, f domain.Filter, release *dom rejections, matchedFilter := f.CheckFilter(release) if len(rejections) > 0 { - s.log.Debug().Msgf("filter.Service.CheckFilter: (%v) for release: %v rejections: (%v)", f.Name, release.TorrentName, release.RejectionsString(true)) + s.log.Debug().Msgf("filter.Service.CheckFilter: (%s) for release: %v rejections: (%s)", f.Name, release.TorrentName, release.RejectionsString(true)) return false, nil } @@ -350,7 +354,7 @@ func (s *service) CheckFilter(ctx context.Context, f domain.Filter, release *dom // if matched, do additional size check if needed, attach actions and return the filter - s.log.Debug().Msgf("filter.Service.CheckFilter: found and matched filter: %+v", f.Name) + s.log.Debug().Msgf("filter.Service.CheckFilter: found and matched filter: %s", f.Name) // Some indexers do not announce the size and if size (min,max) is set in a filter then it will need // additional size check. Some indexers have api implemented to fetch this data and for the others @@ -358,65 +362,34 @@ func (s *service) CheckFilter(ctx context.Context, f domain.Filter, release *dom // do additional size check against indexer api or download torrent for size check if release.AdditionalSizeCheckRequired { - s.log.Debug().Msgf("filter.Service.CheckFilter: (%v) additional size check required", f.Name) + s.log.Debug().Msgf("filter.Service.CheckFilter: (%s) additional size check required", f.Name) ok, err := s.AdditionalSizeCheck(ctx, f, release) if err != nil { - s.log.Error().Stack().Err(err).Msgf("filter.Service.CheckFilter: (%v) additional size check error", f.Name) + s.log.Error().Err(err).Msgf("filter.Service.CheckFilter: (%s) additional size check error", f.Name) return false, err } if !ok { - s.log.Trace().Msgf("filter.Service.CheckFilter: (%v) additional size check not matching what filter wanted", f.Name) + s.log.Trace().Msgf("filter.Service.CheckFilter: (%s) additional size check not matching what filter wanted", f.Name) return false, nil } } - // run external script - if f.ExternalScriptEnabled && f.ExternalScriptCmd != "" { - exitCode, err := s.execCmd(ctx, release, f.ExternalScriptCmd, f.ExternalScriptArgs) + // run external filters + if f.External != nil { + externalOk, err := s.RunExternalFilters(ctx, f.External, release) if err != nil { - s.log.Error().Err(err).Msgf("filter.Service.CheckFilter: error executing external command for filter: %+v", f.Name) + s.log.Error().Err(err).Msgf("filter.Service.CheckFilter: (%s) external filter check error", f.Name) return false, err } - if exitCode != f.ExternalScriptExpectStatus { - s.log.Trace().Msgf("filter.Service.CheckFilter: external script unexpected exit code. got: %v want: %v", exitCode, f.ExternalScriptExpectStatus) - release.AddRejectionF("external script unexpected exit code. got: %v want: %v", exitCode, f.ExternalScriptExpectStatus) + if !externalOk { + s.log.Trace().Msgf("filter.Service.CheckFilter: (%s) additional size check not matching what filter wanted", f.Name) return false, nil } } - // run external webhook - if f.ExternalWebhookEnabled && f.ExternalWebhookHost != "" && f.ExternalWebhookData != "" { - // run external scripts - statusCode, err := s.webhook(ctx, release, f.ExternalWebhookHost, f.ExternalWebhookData) - if err != nil { - s.log.Error().Err(err).Msgf("filter.Service.CheckFilter: error executing external webhook for filter: %v", f.Name) - return false, err - } - - if statusCode != f.ExternalWebhookExpectStatus { - s.log.Trace().Msgf("filter.Service.CheckFilter: external webhook unexpected status code. got: %v want: %v", statusCode, f.ExternalWebhookExpectStatus) - release.AddRejectionF("external webhook unexpected status code. got: %v want: %v", statusCode, f.ExternalWebhookExpectStatus) - return false, nil - } - } - - // found matching filter, lets find the filter actions and attach - actions, err := s.actionRepo.FindByFilterID(ctx, f.ID) - if err != nil { - s.log.Error().Err(err).Msgf("filter.Service.CheckFilter: error finding actions for filter: %+v", f.Name) - return false, err - } - - // if no actions, continue to next filter - if len(actions) == 0 { - s.log.Trace().Msgf("filter.Service.CheckFilter: no actions found for filter '%v', trying next one..", f.Name) - return false, nil - } - release.Filter.Actions = actions - return true, nil } @@ -515,12 +488,64 @@ func (s *service) CanDownloadShow(ctx context.Context, release *domain.Release) return s.releaseRepo.CanDownloadShow(ctx, release.Title, release.Season, release.Episode) } -func (s *service) execCmd(ctx context.Context, release *domain.Release, cmd string, args string) (int, error) { - s.log.Debug().Msgf("filter exec release: %v", release.TorrentName) +func (s *service) RunExternalFilters(ctx context.Context, externalFilters []domain.FilterExternal, release *domain.Release) (bool, error) { + var err error - if release.TorrentTmpFile == "" && strings.Contains(args, "TorrentPathName") { + defer func() { + // try recover panic if anything went wrong with the external filter checks + errors.RecoverPanic(recover(), &err) + }() + + // sort filters by index + sort.Slice(externalFilters, func(i, j int) bool { + return externalFilters[i].Index < externalFilters[j].Index + }) + + for _, external := range externalFilters { + if !external.Enabled { + s.log.Debug().Msgf("external filter %s not enabled, skipping...", external.Name) + + continue + } + + switch external.Type { + case domain.ExternalFilterTypeExec: + // run external script + exitCode, err := s.execCmd(ctx, external, release) + if err != nil { + return false, errors.Wrap(err, "error executing external command") + } + + if exitCode != external.ExecExpectStatus { + s.log.Trace().Msgf("filter.Service.CheckFilter: external script unexpected exit code. got: %d want: %d", exitCode, external.ExecExpectStatus) + release.AddRejectionF("external script unexpected exit code. got: %d want: %d", exitCode, external.ExecExpectStatus) + return false, nil + } + + case domain.ExternalFilterTypeWebhook: + // run external webhook + statusCode, err := s.webhook(ctx, external, release) + if err != nil { + return false, errors.Wrap(err, "error executing external webhook") + } + + if statusCode != external.WebhookExpectStatus { + s.log.Trace().Msgf("filter.Service.CheckFilter: external webhook unexpected status code. got: %d want: %d", statusCode, external.WebhookExpectStatus) + release.AddRejectionF("external webhook unexpected status code. got: %d want: %d", statusCode, external.WebhookExpectStatus) + return false, nil + } + } + } + + return false, nil +} + +func (s *service) execCmd(ctx context.Context, external domain.FilterExternal, release *domain.Release) (int, error) { + s.log.Trace().Msgf("filter exec release: %s", release.TorrentName) + + if release.TorrentTmpFile == "" && strings.Contains(external.ExecArgs, "TorrentPathName") { if err := release.DownloadTorrentFileCtx(ctx); err != nil { - return 0, errors.Wrap(err, "error downloading torrent file for release: %v", release.TorrentName) + return 0, errors.Wrap(err, "error downloading torrent file for release: %s", release.TorrentName) } } @@ -528,23 +553,23 @@ func (s *service) execCmd(ctx context.Context, release *domain.Release, cmd stri if len(release.TorrentDataRawBytes) == 0 && release.TorrentTmpFile != "" { t, err := os.ReadFile(release.TorrentTmpFile) if err != nil { - return 0, errors.Wrap(err, "could not read torrent file: %v", release.TorrentTmpFile) + return 0, errors.Wrap(err, "could not read torrent file: %s", release.TorrentTmpFile) } release.TorrentDataRawBytes = t } // check if program exists - cmd, err := exec.LookPath(cmd) + cmd, err := exec.LookPath(external.ExecCmd) if err != nil { - return 0, errors.Wrap(err, "exec failed, could not find program: %v", cmd) + return 0, errors.Wrap(err, "exec failed, could not find program: %s", cmd) } // handle args and replace vars m := domain.NewMacro(*release) // parse and replace values in argument string before continuing - parsedArgs, err := m.Parse(args) + parsedArgs, err := m.Parse(external.ExecArgs) if err != nil { return 0, errors.Wrap(err, "could not parse macro") } @@ -565,29 +590,33 @@ func (s *service) execCmd(ctx context.Context, release *domain.Release, cmd stri err = command.Run() var exitErr *exec.ExitError if errors.As(err, &exitErr) { - s.log.Debug().Msgf("filter script command exited with non zero code: %v", exitErr.ExitCode()) + s.log.Debug().Msgf("filter script command exited with non zero code: %d", exitErr.ExitCode()) return exitErr.ExitCode(), nil } duration := time.Since(start) - s.log.Debug().Msgf("executed external script: (%v), args: (%v) for release: (%v) indexer: (%v) total time (%v)", cmd, args, release.TorrentName, release.Indexer, duration) + s.log.Debug().Msgf("executed external script: (%s), args: (%s) for release: (%s) indexer: (%s) total time (%s)", cmd, external.ExecArgs, release.TorrentName, release.Indexer, duration) return 0, nil } -func (s *service) webhook(ctx context.Context, release *domain.Release, url string, data string) (int, error) { - s.log.Debug().Msgf("preparing to run external webhook filter to: (%s) payload: (%s)", url, data) +func (s *service) webhook(ctx context.Context, external domain.FilterExternal, release *domain.Release) (int, error) { + s.log.Trace().Msgf("preparing to run external webhook filter to: (%s) payload: (%s)", external.WebhookHost, external.WebhookData) + + if external.WebhookHost == "" { + return 0, errors.New("external filter: missing host for webhook") + } // if webhook data contains TorrentPathName or TorrentDataRawBytes, lets download the torrent file - if release.TorrentTmpFile == "" && (strings.Contains(data, "TorrentPathName") || strings.Contains(data, "TorrentDataRawBytes")) { + if release.TorrentTmpFile == "" && (strings.Contains(external.WebhookData, "TorrentPathName") || strings.Contains(external.WebhookData, "TorrentDataRawBytes")) { if err := release.DownloadTorrentFileCtx(ctx); err != nil { return 0, errors.Wrap(err, "webhook: could not download torrent file for release: %s", release.TorrentName) } } // if webhook data contains TorrentDataRawBytes, lets read the file into bytes we can then use in the macro - if len(release.TorrentDataRawBytes) == 0 && strings.Contains(data, "TorrentDataRawBytes") { + if len(release.TorrentDataRawBytes) == 0 && strings.Contains(external.WebhookData, "TorrentDataRawBytes") { t, err := os.ReadFile(release.TorrentTmpFile) if err != nil { return 0, errors.Wrap(err, "could not read torrent file: %s", release.TorrentTmpFile) @@ -599,12 +628,12 @@ func (s *service) webhook(ctx context.Context, release *domain.Release, url stri m := domain.NewMacro(*release) // parse and replace values in argument string before continuing - dataArgs, err := m.Parse(data) + dataArgs, err := m.Parse(external.WebhookData) if err != nil { - return 0, errors.Wrap(err, "could not parse webhook data macro: %s", data) + return 0, errors.Wrap(err, "could not parse webhook data macro: %s", external.WebhookData) } - s.log.Debug().Msgf("sending POST to external webhook filter: (%s) payload: (%s)", url, data) + s.log.Trace().Msgf("sending %s to external webhook filter: (%s) payload: (%s)", external.WebhookMethod, external.WebhookHost, external.WebhookData) t := &http.Transport{ TLSClientConfig: &tls.Config{ @@ -614,14 +643,41 @@ func (s *service) webhook(ctx context.Context, release *domain.Release, url stri client := http.Client{Transport: t, Timeout: 120 * time.Second} - req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewBufferString(dataArgs)) + method := http.MethodPost + if external.WebhookMethod != "" { + method = external.WebhookMethod + } + + req, err := http.NewRequestWithContext(ctx, method, external.WebhookHost, nil) if err != nil { return 0, errors.Wrap(err, "could not build request for webhook") } + if external.WebhookData != "" && dataArgs != "" { + req, err = http.NewRequestWithContext(ctx, method, external.WebhookHost, bytes.NewBufferString(dataArgs)) + if err != nil { + return 0, errors.Wrap(err, "could not build request for webhook") + } + } + req.Header.Set("Content-Type", "application/json") req.Header.Set("User-Agent", "autobrr") + if external.WebhookHeaders != "" { + headers := strings.Split(external.WebhookHeaders, ";") + + for _, header := range headers { + h := strings.Split(header, "=") + + if len(h) != 2 { + continue + } + + // add header to req + req.Header.Add(http.CanonicalHeaderKey(h[0]), h[1]) + } + } + start := time.Now() res, err := client.Do(req) @@ -631,11 +687,16 @@ func (s *service) webhook(ctx context.Context, release *domain.Release, url stri defer res.Body.Close() - if res.StatusCode > 299 { - return res.StatusCode, nil + body, err := io.ReadAll(res.Body) + if err != nil { + return 0, errors.Wrap(err, "could not read request body") } - s.log.Debug().Msgf("successfully ran external webhook filter to: (%s) payload: (%s) finished in %s", url, dataArgs, time.Since(start)) + if len(body) > 0 { + s.log.Debug().Msgf("filter external webhook response status: %d body: %s", res.StatusCode, body) + } + + s.log.Debug().Msgf("successfully ran external webhook filter to: (%s) payload: (%s) finished in %s", external.WebhookHost, dataArgs, time.Since(start)) return res.StatusCode, nil } diff --git a/internal/http/filter.go b/internal/http/filter.go index ccfbd60..f039b0c 100644 --- a/internal/http/filter.go +++ b/internal/http/filter.go @@ -14,6 +14,7 @@ import ( "github.com/go-chi/chi/v5" "github.com/autobrr/autobrr/internal/domain" + "github.com/autobrr/autobrr/pkg/errors" ) type filterService interface { @@ -117,7 +118,12 @@ func (h filterHandler) getByID(w http.ResponseWriter, r *http.Request) { filter, err := h.service.FindByID(ctx, id) if err != nil { - h.encoder.StatusNotFound(w) + if errors.Is(err, domain.ErrRecordNotFound) { + h.encoder.StatusNotFound(w) + return + } + + h.encoder.StatusInternalError(w) return } diff --git a/internal/release/service.go b/internal/release/service.go index ffa35ac..5abc160 100644 --- a/internal/release/service.go +++ b/internal/release/service.go @@ -78,12 +78,7 @@ func (s *service) Stats(ctx context.Context) (*domain.ReleaseStats, error) { } func (s *service) Store(ctx context.Context, release *domain.Release) error { - _, err := s.repo.Store(ctx, release) - if err != nil { - return err - } - - return nil + return s.repo.Store(ctx, release) } func (s *service) StoreReleaseActionStatus(ctx context.Context, status *domain.ReleaseActionStatus) error { @@ -127,6 +122,15 @@ func (s *service) Process(release *domain.Release) { return } + if err := s.processFilters(ctx, filters, release); err != nil { + s.log.Error().Err(err).Msgf("release.Process: error processing filters for indexer: %s", release.Indexer) + return + } + + return +} + +func (s *service) processFilters(ctx context.Context, filters []domain.Filter, release *domain.Release) error { // keep track of action clients to avoid sending the same thing all over again // save both client type and client id to potentially try another client of same type triedActionClients := map[actionClientTypeKey]struct{}{} @@ -144,7 +148,7 @@ func (s *service) Process(release *domain.Release) { match, err := s.filterSvc.CheckFilter(ctx, f, release) if err != nil { l.Error().Err(err).Msg("release.Process: error checking filter") - return + return err } if !match { @@ -159,23 +163,37 @@ func (s *service) Process(release *domain.Release) { // save release here to only save those with rejections from actions instead of all releases if release.ID == 0 { release.FilterStatus = domain.ReleaseStatusFilterApproved + if err = s.Store(ctx, release); err != nil { l.Error().Err(err).Msgf("release.Process: error writing release to database: %+v", release) - return + return err } } + // found matching filter, lets find the filter actions and attach + actions, err := s.actionSvc.FindByFilterID(ctx, f.ID) + if err != nil { + s.log.Error().Err(err).Msgf("release.Process: error finding actions for filter: %s", f.Name) + return err + } + + // if no actions, continue to next filter + if len(actions) == 0 { + s.log.Warn().Msgf("release.Process: no actions found for filter '%s', trying next one..", f.Name) + return nil + } + // sleep for the delay period specified in the filter before running actions delay := release.Filter.Delay if delay > 0 { - l.Debug().Msgf("Delaying processing of '%s' (%s) for %s by %d seconds as specified in the filter", release.TorrentName, release.FilterName, release.Indexer, delay) + l.Debug().Msgf("release.Process: delaying processing of '%s' (%s) for %s by %d seconds as specified in the filter", release.TorrentName, release.FilterName, release.Indexer, delay) time.Sleep(time.Duration(delay) * time.Second) } var rejections []string // run actions (watchFolder, test, exec, qBittorrent, Deluge, arr etc.) - for _, a := range release.Filter.Actions { + for _, a := range actions { act := a // only run enabled actions @@ -186,7 +204,7 @@ func (s *service) Process(release *domain.Release) { l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s , run action: %s", release.Indexer, release.FilterName, release.TorrentName, act.Name) - // keep track of actiom clients to avoid sending the same thing all over again + // keep track of action clients to avoid sending the same thing all over again _, tried := triedActionClients[actionClientTypeKey{Type: act.Type, ClientID: act.ClientID}] if tried { l.Trace().Msgf("release.Process: indexer: %s, filter: %s release: %s action client already tried, skip", release.Indexer, release.FilterName, release.TorrentName) @@ -227,11 +245,11 @@ func (s *service) Process(release *domain.Release) { break } - return + return nil } func (s *service) ProcessMultiple(releases []*domain.Release) { - s.log.Debug().Msgf("process (%v) new releases from feed", len(releases)) + s.log.Debug().Msgf("process (%d) new releases from feed", len(releases)) for _, rls := range releases { rls := rls diff --git a/web/src/domain/constants.ts b/web/src/domain/constants.ts index 5428f8e..5fd35a9 100644 --- a/web/src/domain/constants.ts +++ b/web/src/domain/constants.ts @@ -521,3 +521,21 @@ export const tagsMatchLogicOptions: OptionBasic[] = [ value: "ALL" } ]; + +export const ExternalFilterTypeOptions: RadioFieldsetOption[] = [ + { label: "Exec", description: "Run a custom command", value: "EXEC" }, + { label: "Webhook", description: "Run webhook", value: "WEBHOOK" }, +]; + +export const ExternalFilterTypeNameMap = { + "EXEC": "Exec", + "WEBHOOK": "Webhook", +}; + +export const ExternalFilterWebhookMethodOptions: OptionBasicTyped[] = [ + { label: "GET", value: "GET" }, + { label: "POST", value: "POST" }, + { label: "PUT", value: "PUT" }, + { label: "PATCH", value: "PATCH" }, + { label: "DELETE", value: "DELETE" }, +]; diff --git a/web/src/screens/filters/Details.tsx b/web/src/screens/filters/Details.tsx index 357bd59..83f4941 100644 --- a/web/src/screens/filters/Details.tsx +++ b/web/src/screens/filters/Details.tsx @@ -3,52 +3,53 @@ * SPDX-License-Identifier: GPL-2.0-or-later */ -import { useEffect, useRef, ReactNode } from "react"; -import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; -import { NavLink, Route, Routes, useLocation, useNavigate, useParams } from "react-router-dom"; -import { toast } from "react-hot-toast"; -import { Form, Formik, FormikValues, useFormikContext } from "formik"; -import { z } from "zod"; -import { toFormikValidationSchema } from "zod-formik-adapter"; -import { ChevronDownIcon, ChevronRightIcon } from "@heroicons/react/24/solid"; +import {ReactNode, useEffect, useRef} from "react"; +import {useMutation, useQuery, useQueryClient} from "@tanstack/react-query"; +import {NavLink, Route, Routes, useLocation, useNavigate, useParams} from "react-router-dom"; +import {toast} from "react-hot-toast"; +import {Form, Formik, FormikValues, useFormikContext} from "formik"; +import {z} from "zod"; +import {toFormikValidationSchema} from "zod-formik-adapter"; +import {ChevronDownIcon, ChevronRightIcon} from "@heroicons/react/24/solid"; import { - CODECS_OPTIONS, - CONTAINER_OPTIONS, - downloadsPerUnitOptions, - FORMATS_OPTIONS, - HDR_OPTIONS, - LANGUAGE_OPTIONS, - ORIGIN_OPTIONS, - OTHER_OPTIONS, - QUALITY_MUSIC_OPTIONS, - RELEASE_TYPE_MUSIC_OPTIONS, - RESOLUTION_OPTIONS, - SOURCES_MUSIC_OPTIONS, - SOURCES_OPTIONS, - tagsMatchLogicOptions + CODECS_OPTIONS, + CONTAINER_OPTIONS, + downloadsPerUnitOptions, + FORMATS_OPTIONS, + HDR_OPTIONS, + LANGUAGE_OPTIONS, + ORIGIN_OPTIONS, + OTHER_OPTIONS, + QUALITY_MUSIC_OPTIONS, + RELEASE_TYPE_MUSIC_OPTIONS, + RESOLUTION_OPTIONS, + SOURCES_MUSIC_OPTIONS, + SOURCES_OPTIONS, + tagsMatchLogicOptions } from "@app/domain/constants"; -import { APIClient } from "@api/APIClient"; -import { useToggle } from "@hooks/hooks"; -import { classNames } from "@utils"; +import {APIClient} from "@api/APIClient"; +import {useToggle} from "@hooks/hooks"; +import {classNames} from "@utils"; import { - CheckboxField, - IndexerMultiSelect, - MultiSelect, - NumberField, - Select, - SwitchGroup, - TextField, - RegexField + CheckboxField, + IndexerMultiSelect, + MultiSelect, + NumberField, + RegexField, + Select, + SwitchGroup, + TextField } from "@components/inputs"; import DEBUG from "@components/debug"; import Toast from "@components/notifications/Toast"; -import { DeleteModal } from "@components/modals"; -import { TitleSubtitle } from "@components/headings"; -import { RegexTextAreaField, TextArea, TextAreaAutoResize } from "@components/inputs/input"; -import { FilterActions } from "./Action"; -import { filterKeys } from "./List"; +import {DeleteModal} from "@components/modals"; +import {TitleSubtitle} from "@components/headings"; +import {RegexTextAreaField, TextAreaAutoResize} from "@components/inputs/input"; +import {FilterActions} from "./Action"; +import {filterKeys} from "./List"; +import {External} from "@screens/filters/External"; interface tabType { name: string; @@ -200,6 +201,21 @@ const actionSchema = z.object({ } }); +const externalFilterSchema = z.object({ + enabled: z.boolean(), + index: z.number(), + name: z.string(), + type: z.enum(["EXEC", "WEBHOOK"]), + exec_cmd: z.string().optional(), + exec_args: z.string().optional(), + exec_expect_status: z.number().optional(), + webhook_host: z.string().optional(), + webhook_type: z.string().optional(), + webhook_method: z.string().optional(), + webhook_data: z.string().optional(), + webhook_expect_status: z.number().optional(), +}); + const indexerSchema = z.object({ id: z.number(), name: z.string().optional() @@ -209,7 +225,8 @@ const indexerSchema = z.object({ const schema = z.object({ name: z.string(), indexers: z.array(indexerSchema).min(1, { message: "Must select at least one indexer" }), - actions: z.array(actionSchema) + actions: z.array(actionSchema), + external: z.array(externalFilterSchema) }); export function FilterDetails() { @@ -380,6 +397,7 @@ export function FilterDetails() { except_origins: filter.except_origins || [], indexers: filter.indexers || [], actions: filter.actions || [], + external: filter.external || [], external_script_enabled: filter.external_script_enabled || false, external_script_cmd: filter.external_script_cmd || "", external_script_args: filter.external_script_args || "", @@ -726,71 +744,3 @@ export function CollapsableSection({ title, subtitle, children, defaultOpen }: C ); } -export function External() { - const { values } = useFormikContext(); - - return ( -
- -
-

For custom commands you should specify the full path to the binary/program you want to run. And you can include your own static variables:

https://autobrr.com/filters/actions#custom-commands--exec
}/> - -
- - - -
-
- -
-
- -
- -
-
- - -
- -