mirror of
https://github.com/idanoo/autobrr
synced 2025-07-23 00:39:13 +00:00
feat: add postgres support (#215)
* feat: add postgres support and refactor * feat: improve releases find * fix: autobrrctl create user
This commit is contained in:
parent
f6873e932e
commit
3185832708
30 changed files with 1708 additions and 831 deletions
|
@ -3,7 +3,9 @@ package database
|
|||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
sq "github.com/Masterminds/squirrel"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/lib/pq"
|
||||
"github.com/rs/zerolog/log"
|
||||
|
@ -20,12 +22,28 @@ func NewFilterRepo(db *DB) domain.FilterRepo {
|
|||
}
|
||||
|
||||
func (r *FilterRepo) ListFilters(ctx context.Context) ([]domain.Filter, error) {
|
||||
//r.db.lock.RLock()
|
||||
//defer r.db.lock.RUnlock()
|
||||
queryBuilder := r.db.squirrel.
|
||||
Select(
|
||||
"id",
|
||||
"enabled",
|
||||
"name",
|
||||
"match_releases",
|
||||
"except_releases",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
).
|
||||
From("filter").
|
||||
OrderBy("name ASC")
|
||||
|
||||
rows, err := r.db.handler.QueryContext(ctx, "SELECT id, enabled, name, match_releases, except_releases, created_at, updated_at FROM filter ORDER BY name ASC")
|
||||
query, args, err := queryBuilder.ToSql()
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filters_list: error query data")
|
||||
log.Error().Stack().Err(err).Msg("filter.list: error building query")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rows, err := r.db.handler.QueryContext(ctx, query, args...)
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.list: error executing query")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
@ -38,7 +56,7 @@ func (r *FilterRepo) ListFilters(ctx context.Context) ([]domain.Filter, error) {
|
|||
var matchReleases, exceptReleases sql.NullString
|
||||
|
||||
if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, &matchReleases, &exceptReleases, &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filters_list: error scanning data to struct")
|
||||
log.Error().Stack().Err(err).Msg("filter.list: error scanning row")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
@ -48,6 +66,7 @@ func (r *FilterRepo) ListFilters(ctx context.Context) ([]domain.Filter, error) {
|
|||
filters = append(filters, f)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.list: row error")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
@ -55,11 +74,64 @@ func (r *FilterRepo) ListFilters(ctx context.Context) ([]domain.Filter, error) {
|
|||
}
|
||||
|
||||
func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter, error) {
|
||||
//r.db.lock.RLock()
|
||||
//defer r.db.lock.RUnlock()
|
||||
queryBuilder := r.db.squirrel.
|
||||
Select(
|
||||
"id",
|
||||
"enabled",
|
||||
"name",
|
||||
"min_size",
|
||||
"max_size",
|
||||
"delay",
|
||||
"priority",
|
||||
"match_releases",
|
||||
"except_releases",
|
||||
"use_regex",
|
||||
"match_release_groups",
|
||||
"except_release_groups",
|
||||
"scene",
|
||||
"freeleech",
|
||||
"freeleech_percent",
|
||||
"shows",
|
||||
"seasons",
|
||||
"episodes",
|
||||
"resolutions",
|
||||
"codecs",
|
||||
"sources",
|
||||
"containers",
|
||||
"match_hdr",
|
||||
"except_hdr",
|
||||
"years",
|
||||
"artists",
|
||||
"albums",
|
||||
"release_types_match",
|
||||
"formats",
|
||||
"quality",
|
||||
"media",
|
||||
"log_score",
|
||||
"has_log",
|
||||
"has_cue",
|
||||
"perfect_flac",
|
||||
"match_categories",
|
||||
"except_categories",
|
||||
"match_uploaders",
|
||||
"except_uploaders",
|
||||
"tags",
|
||||
"except_tags",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
).
|
||||
From("filter").
|
||||
Where("id = ?", filterID)
|
||||
|
||||
row := r.db.handler.QueryRowContext(ctx, "SELECT id, enabled, name, min_size, max_size, delay, priority, match_releases, except_releases, use_regex, match_release_groups, except_release_groups, scene, freeleech, freeleech_percent, shows, seasons, episodes, resolutions, codecs, sources, containers, match_hdr, except_hdr, years, artists, albums, release_types_match, formats, quality, media, log_score, has_log, has_cue, perfect_flac, match_categories, except_categories, match_uploaders, except_uploaders, tags, except_tags, created_at, updated_at FROM filter WHERE id = ?", filterID)
|
||||
query, args, err := queryBuilder.ToSql()
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.findByID: error building query")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
row := r.db.handler.QueryRowContext(ctx, query, args...)
|
||||
if err := row.Err(); err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.findByID: error query row")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
@ -69,7 +141,7 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
|||
var delay, logScore sql.NullInt32
|
||||
|
||||
if err := row.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||
log.Error().Stack().Err(err).Msgf("filter: %v : error scanning data to struct", filterID)
|
||||
log.Error().Stack().Err(err).Msgf("filter.findByID: %v : error scanning row", filterID)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
@ -106,63 +178,69 @@ func (r *FilterRepo) FindByID(ctx context.Context, filterID int) (*domain.Filter
|
|||
|
||||
// FindByIndexerIdentifier find active filters with active indexer only
|
||||
func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, error) {
|
||||
//r.db.lock.RLock()
|
||||
//defer r.db.lock.RUnlock()
|
||||
queryBuilder := r.db.squirrel.
|
||||
Select(
|
||||
"f.id",
|
||||
"f.enabled",
|
||||
"f.name",
|
||||
"f.min_size",
|
||||
"f.max_size",
|
||||
"f.delay",
|
||||
"f.priority",
|
||||
"f.match_releases",
|
||||
"f.except_releases",
|
||||
"f.use_regex",
|
||||
"f.match_release_groups",
|
||||
"f.except_release_groups",
|
||||
"f.scene",
|
||||
"f.freeleech",
|
||||
"f.freeleech_percent",
|
||||
"f.shows",
|
||||
"f.seasons",
|
||||
"f.episodes",
|
||||
"f.resolutions",
|
||||
"f.codecs",
|
||||
"f.sources",
|
||||
"f.containers",
|
||||
"f.match_hdr",
|
||||
"f.except_hdr",
|
||||
"f.years",
|
||||
"f.artists",
|
||||
"f.albums",
|
||||
"f.release_types_match",
|
||||
"f.formats",
|
||||
"f.quality",
|
||||
"f.media",
|
||||
"f.log_score",
|
||||
"f.has_log",
|
||||
"f.has_cue",
|
||||
"f.perfect_flac",
|
||||
"f.match_categories",
|
||||
"f.except_categories",
|
||||
"f.match_uploaders",
|
||||
"f.except_uploaders",
|
||||
"f.tags",
|
||||
"f.except_tags",
|
||||
"f.created_at",
|
||||
"f.updated_at",
|
||||
).
|
||||
From("filter f").
|
||||
Join("filter_indexer fi ON f.id = fi.filter_id").
|
||||
Join("indexer i ON i.id = fi.indexer_id").
|
||||
Where("i.identifier = ?", indexer).
|
||||
Where("i.enabled = ?", true).
|
||||
Where("f.enabled = ?", true).
|
||||
OrderBy("f.priority DESC")
|
||||
|
||||
rows, err := r.db.handler.Query(`
|
||||
SELECT
|
||||
f.id,
|
||||
f.enabled,
|
||||
f.name,
|
||||
f.min_size,
|
||||
f.max_size,
|
||||
f.delay,
|
||||
f.priority,
|
||||
f.match_releases,
|
||||
f.except_releases,
|
||||
f.use_regex,
|
||||
f.match_release_groups,
|
||||
f.except_release_groups,
|
||||
f.scene,
|
||||
f.freeleech,
|
||||
f.freeleech_percent,
|
||||
f.shows,
|
||||
f.seasons,
|
||||
f.episodes,
|
||||
f.resolutions,
|
||||
f.codecs,
|
||||
f.sources,
|
||||
f.containers,
|
||||
f.match_hdr,
|
||||
f.except_hdr,
|
||||
f.years,
|
||||
f.artists,
|
||||
f.albums,
|
||||
f.release_types_match,
|
||||
f.formats,
|
||||
f.quality,
|
||||
f.media,
|
||||
f.log_score,
|
||||
f.has_log,
|
||||
f.has_cue,
|
||||
f.perfect_flac,
|
||||
f.match_categories,
|
||||
f.except_categories,
|
||||
f.match_uploaders,
|
||||
f.except_uploaders,
|
||||
f.tags,
|
||||
f.except_tags,
|
||||
f.created_at,
|
||||
f.updated_at
|
||||
FROM filter f
|
||||
JOIN filter_indexer fi on f.id = fi.filter_id
|
||||
JOIN indexer i on i.id = fi.indexer_id
|
||||
WHERE i.identifier = ?
|
||||
AND f.enabled = true
|
||||
AND i.enabled = true
|
||||
ORDER BY f.priority DESC`, indexer)
|
||||
query, args, err := queryBuilder.ToSql()
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("error querying filter row")
|
||||
log.Error().Stack().Err(err).Msg("filter.findByIndexerIdentifier: error building query")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rows, err := r.db.handler.Query(query, args...)
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.findByIndexerIdentifier: error executing query")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
@ -177,7 +255,7 @@ func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, e
|
|||
var delay, logScore sql.NullInt32
|
||||
|
||||
if err := rows.Scan(&f.ID, &f.Enabled, &f.Name, &minSize, &maxSize, &delay, &f.Priority, &matchReleases, &exceptReleases, &useRegex, &matchReleaseGroups, &exceptReleaseGroups, &scene, &freeleech, &freeleechPercent, &shows, &seasons, &episodes, pq.Array(&f.Resolutions), pq.Array(&f.Codecs), pq.Array(&f.Sources), pq.Array(&f.Containers), pq.Array(&f.MatchHDR), pq.Array(&f.ExceptHDR), &years, &artists, &albums, pq.Array(&f.MatchReleaseTypes), pq.Array(&f.Formats), pq.Array(&f.Quality), pq.Array(&f.Media), &logScore, &hasLog, &hasCue, &perfectFlac, &matchCategories, &exceptCategories, &matchUploaders, &exceptUploaders, &tags, &exceptTags, &f.CreatedAt, &f.UpdatedAt); err != nil {
|
||||
log.Error().Stack().Err(err).Msg("error scanning data to struct")
|
||||
log.Error().Stack().Err(err).Msg("filter.findByIndexerIdentifier: error scanning row")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
@ -211,66 +289,56 @@ func (r *FilterRepo) FindByIndexerIdentifier(indexer string) ([]domain.Filter, e
|
|||
|
||||
filters = append(filters, f)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return filters, nil
|
||||
}
|
||||
|
||||
func (r *FilterRepo) Store(ctx context.Context, filter domain.Filter) (*domain.Filter, error) {
|
||||
//r.db.lock.RLock()
|
||||
//defer r.db.lock.RUnlock()
|
||||
|
||||
var err error
|
||||
if filter.ID != 0 {
|
||||
log.Debug().Msg("update existing record")
|
||||
} else {
|
||||
var res sql.Result
|
||||
|
||||
res, err = r.db.handler.ExecContext(ctx, `INSERT INTO filter (
|
||||
name,
|
||||
enabled,
|
||||
min_size,
|
||||
max_size,
|
||||
delay,
|
||||
priority,
|
||||
match_releases,
|
||||
except_releases,
|
||||
use_regex,
|
||||
match_release_groups,
|
||||
except_release_groups,
|
||||
scene,
|
||||
freeleech,
|
||||
freeleech_percent,
|
||||
shows,
|
||||
seasons,
|
||||
episodes,
|
||||
resolutions,
|
||||
codecs,
|
||||
sources,
|
||||
containers,
|
||||
match_hdr,
|
||||
except_hdr,
|
||||
years,
|
||||
match_categories,
|
||||
except_categories,
|
||||
match_uploaders,
|
||||
except_uploaders,
|
||||
tags,
|
||||
except_tags,
|
||||
artists,
|
||||
albums,
|
||||
release_types_match,
|
||||
formats,
|
||||
quality,
|
||||
media,
|
||||
log_score,
|
||||
has_log,
|
||||
has_cue,
|
||||
perfect_flac
|
||||
)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20, $21, $22, $23, $24, $25, $26, $27, $28, $29, $30, $31, $32, $33, $34, $35, $36, $37, $38, $39, $40) ON CONFLICT DO NOTHING`,
|
||||
queryBuilder := r.db.squirrel.
|
||||
Insert("filter").
|
||||
Columns(
|
||||
"name",
|
||||
"enabled",
|
||||
"min_size",
|
||||
"max_size",
|
||||
"delay",
|
||||
"priority",
|
||||
"match_releases",
|
||||
"except_releases",
|
||||
"use_regex",
|
||||
"match_release_groups",
|
||||
"except_release_groups",
|
||||
"scene",
|
||||
"freeleech",
|
||||
"freeleech_percent",
|
||||
"shows",
|
||||
"seasons",
|
||||
"episodes",
|
||||
"resolutions",
|
||||
"codecs",
|
||||
"sources",
|
||||
"containers",
|
||||
"match_hdr",
|
||||
"except_hdr",
|
||||
"years",
|
||||
"match_categories",
|
||||
"except_categories",
|
||||
"match_uploaders",
|
||||
"except_uploaders",
|
||||
"tags",
|
||||
"except_tags",
|
||||
"artists",
|
||||
"albums",
|
||||
"release_types_match",
|
||||
"formats",
|
||||
"quality",
|
||||
"media",
|
||||
"log_score",
|
||||
"has_log",
|
||||
"has_cue",
|
||||
"perfect_flac",
|
||||
).
|
||||
Values(
|
||||
filter.Name,
|
||||
filter.Enabled,
|
||||
filter.MinSize,
|
||||
|
@ -311,114 +379,80 @@ func (r *FilterRepo) Store(ctx context.Context, filter domain.Filter) (*domain.F
|
|||
filter.Log,
|
||||
filter.Cue,
|
||||
filter.PerfectFlac,
|
||||
)
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("error executing query")
|
||||
return nil, err
|
||||
}
|
||||
).
|
||||
Suffix("RETURNING id").RunWith(r.db.handler)
|
||||
|
||||
resId, _ := res.LastInsertId()
|
||||
filter.ID = int(resId)
|
||||
// return values
|
||||
var retID int
|
||||
|
||||
err := queryBuilder.QueryRowContext(ctx).Scan(&retID)
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.store: error executing query")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
filter.ID = retID
|
||||
|
||||
return &filter, nil
|
||||
}
|
||||
|
||||
func (r *FilterRepo) Update(ctx context.Context, filter domain.Filter) (*domain.Filter, error) {
|
||||
//r.db.lock.RLock()
|
||||
//defer r.db.lock.RUnlock()
|
||||
|
||||
//var res sql.Result
|
||||
|
||||
var err error
|
||||
_, err = r.db.handler.ExecContext(ctx, `
|
||||
UPDATE filter SET
|
||||
name = ?,
|
||||
enabled = ?,
|
||||
min_size = ?,
|
||||
max_size = ?,
|
||||
delay = ?,
|
||||
priority = ?,
|
||||
match_releases = ?,
|
||||
except_releases = ?,
|
||||
use_regex = ?,
|
||||
match_release_groups = ?,
|
||||
except_release_groups = ?,
|
||||
scene = ?,
|
||||
freeleech = ?,
|
||||
freeleech_percent = ?,
|
||||
shows = ?,
|
||||
seasons = ?,
|
||||
episodes = ?,
|
||||
resolutions = ?,
|
||||
codecs = ?,
|
||||
sources = ?,
|
||||
containers = ?,
|
||||
match_hdr = ?,
|
||||
except_hdr = ?,
|
||||
years = ?,
|
||||
match_categories = ?,
|
||||
except_categories = ?,
|
||||
match_uploaders = ?,
|
||||
except_uploaders = ?,
|
||||
tags = ?,
|
||||
except_tags = ?,
|
||||
artists = ?,
|
||||
albums = ?,
|
||||
release_types_match = ?,
|
||||
formats = ?,
|
||||
quality = ?,
|
||||
media = ?,
|
||||
log_score = ?,
|
||||
has_log = ?,
|
||||
has_cue = ?,
|
||||
perfect_flac = ?,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = ?`,
|
||||
filter.Name,
|
||||
filter.Enabled,
|
||||
filter.MinSize,
|
||||
filter.MaxSize,
|
||||
filter.Delay,
|
||||
filter.Priority,
|
||||
filter.MatchReleases,
|
||||
filter.ExceptReleases,
|
||||
filter.UseRegex,
|
||||
filter.MatchReleaseGroups,
|
||||
filter.ExceptReleaseGroups,
|
||||
filter.Scene,
|
||||
filter.Freeleech,
|
||||
filter.FreeleechPercent,
|
||||
filter.Shows,
|
||||
filter.Seasons,
|
||||
filter.Episodes,
|
||||
pq.Array(filter.Resolutions),
|
||||
pq.Array(filter.Codecs),
|
||||
pq.Array(filter.Sources),
|
||||
pq.Array(filter.Containers),
|
||||
pq.Array(filter.MatchHDR),
|
||||
pq.Array(filter.ExceptHDR),
|
||||
filter.Years,
|
||||
filter.MatchCategories,
|
||||
filter.ExceptCategories,
|
||||
filter.MatchUploaders,
|
||||
filter.ExceptUploaders,
|
||||
filter.Tags,
|
||||
filter.ExceptTags,
|
||||
filter.Artists,
|
||||
filter.Albums,
|
||||
pq.Array(filter.MatchReleaseTypes),
|
||||
pq.Array(filter.Formats),
|
||||
pq.Array(filter.Quality),
|
||||
pq.Array(filter.Media),
|
||||
filter.LogScore,
|
||||
filter.Log,
|
||||
filter.Cue,
|
||||
filter.PerfectFlac,
|
||||
filter.ID,
|
||||
)
|
||||
|
||||
queryBuilder := r.db.squirrel.
|
||||
Update("filter").
|
||||
Set("name", filter.Name).
|
||||
Set("enabled", filter.Enabled).
|
||||
Set("min_size", filter.MinSize).
|
||||
Set("max_size", filter.MaxSize).
|
||||
Set("delay", filter.Delay).
|
||||
Set("priority", filter.Priority).
|
||||
Set("use_regex", filter.UseRegex).
|
||||
Set("match_releases", filter.MatchReleases).
|
||||
Set("except_releases", filter.ExceptReleases).
|
||||
Set("match_release_groups", filter.MatchReleaseGroups).
|
||||
Set("except_release_groups", filter.ExceptReleaseGroups).
|
||||
Set("scene", filter.Scene).
|
||||
Set("freeleech", filter.Freeleech).
|
||||
Set("freeleech_percent", filter.FreeleechPercent).
|
||||
Set("shows", filter.Shows).
|
||||
Set("seasons", filter.Seasons).
|
||||
Set("episodes", filter.Episodes).
|
||||
Set("resolutions", pq.Array(filter.Resolutions)).
|
||||
Set("codecs", pq.Array(filter.Codecs)).
|
||||
Set("sources", pq.Array(filter.Sources)).
|
||||
Set("containers", pq.Array(filter.Containers)).
|
||||
Set("match_hdr", pq.Array(filter.MatchHDR)).
|
||||
Set("except_hdr", pq.Array(filter.ExceptHDR)).
|
||||
Set("years", filter.Years).
|
||||
Set("match_categories", filter.MatchCategories).
|
||||
Set("except_categories", filter.ExceptCategories).
|
||||
Set("match_uploaders", filter.MatchUploaders).
|
||||
Set("except_uploaders", filter.ExceptUploaders).
|
||||
Set("tags", filter.Tags).
|
||||
Set("except_tags", filter.ExceptTags).
|
||||
Set("artists", filter.Artists).
|
||||
Set("albums", filter.Albums).
|
||||
Set("release_types_match", pq.Array(filter.MatchReleaseTypes)).
|
||||
Set("formats", pq.Array(filter.Formats)).
|
||||
Set("quality", pq.Array(filter.Quality)).
|
||||
Set("media", pq.Array(filter.Media)).
|
||||
Set("log_score", filter.LogScore).
|
||||
Set("has_log", filter.Log).
|
||||
Set("has_cue", filter.Cue).
|
||||
Set("perfect_flac", filter.PerfectFlac).
|
||||
Set("updated_at", time.Now().Format(time.RFC3339)).
|
||||
Where("id = ?", filter.ID)
|
||||
|
||||
query, args, err := queryBuilder.ToSql()
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("error executing query")
|
||||
log.Error().Stack().Err(err).Msg("filter.update: error building query")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, err = r.db.handler.ExecContext(ctx, query, args...)
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.update: error executing query")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
|
@ -426,20 +460,22 @@ func (r *FilterRepo) Update(ctx context.Context, filter domain.Filter) (*domain.
|
|||
}
|
||||
|
||||
func (r *FilterRepo) ToggleEnabled(ctx context.Context, filterID int, enabled bool) error {
|
||||
//r.db.lock.RLock()
|
||||
//defer r.db.lock.RUnlock()
|
||||
|
||||
var err error
|
||||
_, err = r.db.handler.ExecContext(ctx, `
|
||||
UPDATE filter SET
|
||||
enabled = ?,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = ?`,
|
||||
enabled,
|
||||
filterID,
|
||||
)
|
||||
|
||||
queryBuilder := r.db.squirrel.
|
||||
Update("filter").
|
||||
Set("enabled", enabled).
|
||||
Set("updated_at", sq.Expr("CURRENT_TIMESTAMP")).
|
||||
Where("id = ?", filterID)
|
||||
|
||||
query, args, err := queryBuilder.ToSql()
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("error executing query")
|
||||
log.Error().Stack().Err(err).Msg("filter.toggleEnabled: error building query")
|
||||
return err
|
||||
}
|
||||
_, err = r.db.handler.ExecContext(ctx, query, args...)
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.toggleEnabled: error executing query")
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -447,9 +483,6 @@ func (r *FilterRepo) ToggleEnabled(ctx context.Context, filterID int, enabled bo
|
|||
}
|
||||
|
||||
func (r *FilterRepo) StoreIndexerConnections(ctx context.Context, filterID int, indexers []domain.Indexer) error {
|
||||
//r.db.lock.RLock()
|
||||
//defer r.db.lock.RUnlock()
|
||||
|
||||
tx, err := r.db.handler.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -457,27 +490,43 @@ func (r *FilterRepo) StoreIndexerConnections(ctx context.Context, filterID int,
|
|||
|
||||
defer tx.Rollback()
|
||||
|
||||
deleteQuery := `DELETE FROM filter_indexer WHERE filter_id = ?`
|
||||
_, err = tx.ExecContext(ctx, deleteQuery, filterID)
|
||||
deleteQueryBuilder := r.db.squirrel.
|
||||
Delete("filter_indexer").
|
||||
Where("filter_id = ?", filterID)
|
||||
|
||||
deleteQuery, deleteArgs, err := deleteQueryBuilder.ToSql()
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msgf("error deleting indexers for filter: %v", filterID)
|
||||
log.Error().Stack().Err(err).Msg("filter.StoreIndexerConnections: error building query")
|
||||
return err
|
||||
}
|
||||
_, err = tx.ExecContext(ctx, deleteQuery, deleteArgs...)
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msgf("filter.StoreIndexerConnections: error deleting indexers for filter: %v", filterID)
|
||||
return err
|
||||
}
|
||||
|
||||
for _, indexer := range indexers {
|
||||
query := `INSERT INTO filter_indexer (filter_id, indexer_id) VALUES ($1, $2)`
|
||||
_, err := tx.ExecContext(ctx, query, filterID, indexer.ID)
|
||||
queryBuilder := r.db.squirrel.
|
||||
Insert("filter_indexer").Columns("filter_id", "indexer_id").
|
||||
Values(filterID, indexer.ID)
|
||||
|
||||
query, args, err := queryBuilder.ToSql()
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("error executing query")
|
||||
log.Error().Stack().Err(err).Msg("filter.StoreIndexerConnections: error building query")
|
||||
return err
|
||||
}
|
||||
_, err = tx.ExecContext(ctx, query, args...)
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.StoreIndexerConnections: error executing query")
|
||||
return err
|
||||
}
|
||||
|
||||
log.Debug().Msgf("filter.indexers: store '%v' on filter: %v", indexer.Name, filterID)
|
||||
log.Debug().Msgf("filter.StoreIndexerConnections: store '%v' on filter: %v", indexer.Name, filterID)
|
||||
}
|
||||
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msgf("error deleting indexers for filter: %v", filterID)
|
||||
log.Error().Stack().Err(err).Msgf("filter.StoreIndexerConnections: error storing indexers for filter: %v", filterID)
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -485,13 +534,19 @@ func (r *FilterRepo) StoreIndexerConnections(ctx context.Context, filterID int,
|
|||
}
|
||||
|
||||
func (r *FilterRepo) StoreIndexerConnection(ctx context.Context, filterID int, indexerID int) error {
|
||||
//r.db.lock.RLock()
|
||||
//defer r.db.lock.RUnlock()
|
||||
queryBuilder := r.db.squirrel.
|
||||
Insert("filter_indexer").Columns("filter_id", "indexer_id").
|
||||
Values(filterID, indexerID)
|
||||
|
||||
query := `INSERT INTO filter_indexer (filter_id, indexer_id) VALUES ($1, $2)`
|
||||
_, err := r.db.handler.ExecContext(ctx, query, filterID, indexerID)
|
||||
query, args, err := queryBuilder.ToSql()
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("error executing query")
|
||||
log.Error().Stack().Err(err).Msg("filter.storeIndexerConnection: error building query")
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = r.db.handler.ExecContext(ctx, query, args...)
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.storeIndexerConnection: error executing query")
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -499,13 +554,19 @@ func (r *FilterRepo) StoreIndexerConnection(ctx context.Context, filterID int, i
|
|||
}
|
||||
|
||||
func (r *FilterRepo) DeleteIndexerConnections(ctx context.Context, filterID int) error {
|
||||
//r.db.lock.RLock()
|
||||
//defer r.db.lock.RUnlock()
|
||||
queryBuilder := r.db.squirrel.
|
||||
Delete("filter_indexer").
|
||||
Where("filter_id = ?", filterID)
|
||||
|
||||
query := `DELETE FROM filter_indexer WHERE filter_id = ?`
|
||||
_, err := r.db.handler.ExecContext(ctx, query, filterID)
|
||||
query, args, err := queryBuilder.ToSql()
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("error executing query")
|
||||
log.Error().Stack().Err(err).Msg("filter.deleteIndexerConnections: error building query")
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = r.db.handler.ExecContext(ctx, query, args...)
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.deleteIndexerConnections: error executing query")
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -513,12 +574,19 @@ func (r *FilterRepo) DeleteIndexerConnections(ctx context.Context, filterID int)
|
|||
}
|
||||
|
||||
func (r *FilterRepo) Delete(ctx context.Context, filterID int) error {
|
||||
//r.db.lock.RLock()
|
||||
//defer r.db.lock.RUnlock()
|
||||
queryBuilder := r.db.squirrel.
|
||||
Delete("filter").
|
||||
Where("id = ?", filterID)
|
||||
|
||||
_, err := r.db.handler.ExecContext(ctx, `DELETE FROM filter WHERE id = ?`, filterID)
|
||||
query, args, err := queryBuilder.ToSql()
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("error executing query")
|
||||
log.Error().Stack().Err(err).Msg("filter.delete: error building query")
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = r.db.handler.ExecContext(ctx, query, args...)
|
||||
if err != nil {
|
||||
log.Error().Stack().Err(err).Msg("filter.delete: error executing query")
|
||||
return err
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue