feat: add backend

This commit is contained in:
Ludvig Lundgren 2021-08-11 15:26:17 +02:00
parent bc418ff248
commit a838d994a6
68 changed files with 9561 additions and 0 deletions

176
pkg/qbittorrent/client.go Normal file
View file

@ -0,0 +1,176 @@
package qbittorrent
import (
"bytes"
"fmt"
"io"
"mime/multipart"
"net/http"
"net/http/cookiejar"
"net/url"
"os"
"strings"
"time"
"github.com/rs/zerolog/log"
"golang.org/x/net/publicsuffix"
)
type Client struct {
settings Settings
http *http.Client
}
type Settings struct {
Hostname string
Port uint
Username string
Password string
SSL bool
protocol string
}
func NewClient(s Settings) *Client {
jarOptions := &cookiejar.Options{PublicSuffixList: publicsuffix.List}
//store cookies in jar
jar, err := cookiejar.New(jarOptions)
if err != nil {
log.Error().Err(err).Msg("new client cookie error")
}
httpClient := &http.Client{
Timeout: time.Second * 10,
Jar: jar,
}
c := &Client{
settings: s,
http: httpClient,
}
c.settings.protocol = "http"
if c.settings.SSL {
c.settings.protocol = "https"
}
return c
}
func (c *Client) get(endpoint string, opts map[string]string) (*http.Response, error) {
reqUrl := fmt.Sprintf("%v://%v:%v/api/v2/%v", c.settings.protocol, c.settings.Hostname, c.settings.Port, endpoint)
req, err := http.NewRequest("GET", reqUrl, nil)
if err != nil {
log.Error().Err(err).Msgf("GET: error %v", reqUrl)
return nil, err
}
resp, err := c.http.Do(req)
if err != nil {
log.Error().Err(err).Msgf("GET: do %v", reqUrl)
return nil, err
}
return resp, nil
}
func (c *Client) post(endpoint string, opts map[string]string) (*http.Response, error) {
// add optional parameters that the user wants
form := url.Values{}
if opts != nil {
for k, v := range opts {
form.Add(k, v)
}
}
reqUrl := fmt.Sprintf("%v://%v:%v/api/v2/%v", c.settings.protocol, c.settings.Hostname, c.settings.Port, endpoint)
req, err := http.NewRequest("POST", reqUrl, strings.NewReader(form.Encode()))
if err != nil {
log.Error().Err(err).Msgf("POST: req %v", reqUrl)
return nil, err
}
// add the content-type so qbittorrent knows what to expect
req.Header.Add("Content-Type", "application/x-www-form-urlencoded")
resp, err := c.http.Do(req)
if err != nil {
log.Error().Err(err).Msgf("POST: do %v", reqUrl)
return nil, err
}
return resp, nil
}
func (c *Client) postFile(endpoint string, fileName string, opts map[string]string) (*http.Response, error) {
file, err := os.Open(fileName)
if err != nil {
log.Error().Err(err).Msgf("POST file: opening file %v", fileName)
return nil, err
}
// Close the file later
defer file.Close()
// Buffer to store our request body as bytes
var requestBody bytes.Buffer
// Store a multipart writer
multiPartWriter := multipart.NewWriter(&requestBody)
// Initialize file field
fileWriter, err := multiPartWriter.CreateFormFile("torrents", fileName)
if err != nil {
log.Error().Err(err).Msgf("POST file: initializing file field %v", fileName)
return nil, err
}
// Copy the actual file content to the fields writer
_, err = io.Copy(fileWriter, file)
if err != nil {
log.Error().Err(err).Msgf("POST file: could not copy file to writer %v", fileName)
return nil, err
}
// Populate other fields
if opts != nil {
for key, val := range opts {
fieldWriter, err := multiPartWriter.CreateFormField(key)
if err != nil {
log.Error().Err(err).Msgf("POST file: could not add other fields %v", fileName)
return nil, err
}
_, err = fieldWriter.Write([]byte(val))
if err != nil {
log.Error().Err(err).Msgf("POST file: could not write field %v", fileName)
return nil, err
}
}
}
// Close multipart writer
multiPartWriter.Close()
reqUrl := fmt.Sprintf("%v://%v:%v/api/v2/%v", c.settings.protocol, c.settings.Hostname, c.settings.Port, endpoint)
req, err := http.NewRequest("POST", reqUrl, &requestBody)
if err != nil {
log.Error().Err(err).Msgf("POST file: could not create request object %v", fileName)
return nil, err
}
// Set correct content type
req.Header.Set("Content-Type", multiPartWriter.FormDataContentType())
res, err := c.http.Do(req)
if err != nil {
log.Error().Err(err).Msgf("POST file: could not perform request %v", fileName)
return nil, err
}
return res, nil
}
func (c *Client) setCookies(cookies []*http.Cookie) {
cookieURL, _ := url.Parse(fmt.Sprintf("%v://%v:%v", c.settings.protocol, c.settings.Hostname, c.settings.Port))
c.http.Jar.SetCookies(cookieURL, cookies)
}

179
pkg/qbittorrent/domain.go Normal file
View file

@ -0,0 +1,179 @@
package qbittorrent
type Torrent struct {
AddedOn int `json:"added_on"`
AmountLeft int `json:"amount_left"`
AutoManaged bool `json:"auto_tmm"`
Availability float32 `json:"availability"`
Category string `json:"category"`
Completed int `json:"completed"`
CompletionOn int `json:"completion_on"`
DlLimit int `json:"dl_limit"`
DlSpeed int `json:"dl_speed"`
Downloaded int `json:"downloaded"`
DownloadedSession int `json:"downloaded_session"`
ETA int `json:"eta"`
FirstLastPiecePrio bool `json:"f_l_piece_prio"`
ForceStart bool `json:"force_start"`
Hash string `json:"hash"`
LastActivity int `json:"last_activity"`
MagnetURI string `json:"magnet_uri"`
MaxRatio float32 `json:"max_ratio"`
MaxSeedingTime int `json:"max_seeding_time"`
Name string `json:"name"`
NumComplete int `json:"num_complete"`
NumIncomplete int `json:"num_incomplete"`
NumSeeds int `json:"num_seeds"`
Priority int `json:"priority"`
Progress float32 `json:"progress"`
Ratio float32 `json:"ratio"`
RatioLimit float32 `json:"ratio_limit"`
SavePath string `json:"save_path"`
SeedingTimeLimit int `json:"seeding_time_limit"`
SeenComplete int `json:"seen_complete"`
SequentialDownload bool `json:"seq_dl"`
Size int `json:"size"`
State TorrentState `json:"state"`
SuperSeeding bool `json:"super_seeding"`
Tags string `json:"tags"`
TimeActive int `json:"time_active"`
TotalSize int `json:"total_size"`
Tracker *string `json:"tracker"`
UpLimit int `json:"up_limit"`
Uploaded int `json:"uploaded"`
UploadedSession int `json:"uploaded_session"`
UpSpeed int `json:"upspeed"`
}
type TorrentTrackersResponse struct {
Trackers []TorrentTracker `json:"trackers"`
}
type TorrentTracker struct {
//Tier uint `json:"tier"` // can be both empty "" and int
Url string `json:"url"`
Status TrackerStatus `json:"status"`
NumPeers int `json:"num_peers"`
NumSeeds int `json:"num_seeds"`
NumLeechers int `json:"num_leechers"`
NumDownloaded int `json:"num_downloaded"`
Message string `json:"msg"`
}
type TorrentState string
const (
// Some error occurred, applies to paused torrents
TorrentStateError TorrentState = "error"
// Torrent data files is missing
TorrentStateMissingFiles TorrentState = "missingFiles"
// Torrent is being seeded and data is being transferred
TorrentStateUploading TorrentState = "uploading"
// Torrent is paused and has finished downloading
TorrentStatePausedUp TorrentState = "pausedUP"
// Queuing is enabled and torrent is queued for upload
TorrentStateQueuedUp TorrentState = "queuedUP"
// Torrent is being seeded, but no connection were made
TorrentStateStalledUp TorrentState = "stalledUP"
// Torrent has finished downloading and is being checked
TorrentStateCheckingUp TorrentState = "checkingUP"
// Torrent is forced to uploading and ignore queue limit
TorrentStateForcedUp TorrentState = "forcedUP"
// Torrent is allocating disk space for download
TorrentStateAllocating TorrentState = "allocating"
// Torrent is being downloaded and data is being transferred
TorrentStateDownloading TorrentState = "downloading"
// Torrent has just started downloading and is fetching metadata
TorrentStateMetaDl TorrentState = "metaDL"
// Torrent is paused and has NOT finished downloading
TorrentStatePausedDl TorrentState = "pausedDL"
// Queuing is enabled and torrent is queued for download
TorrentStateQueuedDl TorrentState = "queuedDL"
// Torrent is being downloaded, but no connection were made
TorrentStateStalledDl TorrentState = "stalledDL"
// Same as checkingUP, but torrent has NOT finished downloading
TorrentStateCheckingDl TorrentState = "checkingDL"
// Torrent is forced to downloading to ignore queue limit
TorrentStateForceDl TorrentState = "forceDL"
// Checking resume data on qBt startup
TorrentStateCheckingResumeData TorrentState = "checkingResumeData"
// Torrent is moving to another location
TorrentStateMoving TorrentState = "moving"
// Unknown status
TorrentStateUnknown TorrentState = "unknown"
)
type TorrentFilter string
const (
// Torrent is paused
TorrentFilterAll TorrentFilter = "all"
// Torrent is active
TorrentFilterActive TorrentFilter = "active"
// Torrent is inactive
TorrentFilterInactive TorrentFilter = "inactive"
// Torrent is completed
TorrentFilterCompleted TorrentFilter = "completed"
// Torrent is resumed
TorrentFilterResumed TorrentFilter = "resumed"
// Torrent is paused
TorrentFilterPaused TorrentFilter = "paused"
// Torrent is stalled
TorrentFilterStalled TorrentFilter = "stalled"
// Torrent is being seeded and data is being transferred
TorrentFilterUploading TorrentFilter = "uploading"
// Torrent is being seeded, but no connection were made
TorrentFilterStalledUploading TorrentFilter = "stalled_uploading"
// Torrent is being downloaded and data is being transferred
TorrentFilterDownloading TorrentFilter = "downloading"
// Torrent is being downloaded, but no connection were made
TorrentFilterStalledDownloading TorrentFilter = "stalled_downloading"
)
// TrackerStatus https://github.com/qbittorrent/qBittorrent/wiki/WebUI-API-(qBittorrent-4.1)#get-torrent-trackers
type TrackerStatus int
const (
// 0 Tracker is disabled (used for DHT, PeX, and LSD)
TrackerStatusDisabled TrackerStatus = 0
// 1 Tracker has not been contacted yet
TrackerStatusNotContacted TrackerStatus = 1
// 2 Tracker has been contacted and is working
TrackerStatusOK TrackerStatus = 2
// 3 Tracker is updating
TrackerStatusUpdating TrackerStatus = 3
// 4 Tracker has been contacted, but it is not working (or doesn't send proper replies)
TrackerStatusNotWorking TrackerStatus = 4
)

222
pkg/qbittorrent/methods.go Normal file
View file

@ -0,0 +1,222 @@
package qbittorrent
import (
"encoding/json"
"errors"
"io/ioutil"
"net/http"
"net/url"
"strconv"
"strings"
"github.com/rs/zerolog/log"
)
// Login https://github.com/qbittorrent/qBittorrent/wiki/WebUI-API-(qBittorrent-4.1)#authentication
func (c *Client) Login() error {
credentials := make(map[string]string)
credentials["username"] = c.settings.Username
credentials["password"] = c.settings.Password
resp, err := c.post("auth/login", credentials)
if err != nil {
log.Error().Err(err).Msg("login error")
return err
} else if resp.StatusCode == http.StatusForbidden {
log.Error().Err(err).Msg("User's IP is banned for too many failed login attempts")
return err
} else if resp.StatusCode != http.StatusOK { // check for correct status code
log.Error().Err(err).Msg("login bad status error")
return err
}
defer resp.Body.Close()
bodyBytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
return err
}
bodyString := string(bodyBytes)
// read output
if bodyString == "Fails." {
return errors.New("bad credentials")
}
// good response == "Ok."
// place cookies in jar for future requests
if cookies := resp.Cookies(); len(cookies) > 0 {
c.setCookies(cookies)
} else {
return errors.New("bad credentials")
}
return nil
}
func (c *Client) GetTorrents() ([]Torrent, error) {
var torrents []Torrent
resp, err := c.get("torrents/info", nil)
if err != nil {
log.Error().Err(err).Msg("get torrents error")
return nil, err
}
defer resp.Body.Close()
body, readErr := ioutil.ReadAll(resp.Body)
if readErr != nil {
log.Error().Err(err).Msg("get torrents read error")
return nil, readErr
}
err = json.Unmarshal(body, &torrents)
if err != nil {
log.Error().Err(err).Msg("get torrents unmarshal error")
return nil, err
}
return torrents, nil
}
func (c *Client) GetTorrentsFilter(filter TorrentFilter) ([]Torrent, error) {
var torrents []Torrent
v := url.Values{}
v.Add("filter", string(filter))
params := v.Encode()
resp, err := c.get("torrents/info?"+params, nil)
if err != nil {
log.Error().Err(err).Msgf("get filtered torrents error: %v", filter)
return nil, err
}
defer resp.Body.Close()
body, readErr := ioutil.ReadAll(resp.Body)
if readErr != nil {
log.Error().Err(err).Msgf("get filtered torrents read error: %v", filter)
return nil, readErr
}
err = json.Unmarshal(body, &torrents)
if err != nil {
log.Error().Err(err).Msgf("get filtered torrents unmarshal error: %v", filter)
return nil, err
}
return torrents, nil
}
func (c *Client) GetTorrentsRaw() (string, error) {
resp, err := c.get("torrents/info", nil)
if err != nil {
log.Error().Err(err).Msg("get torrent trackers raw error")
return "", err
}
defer resp.Body.Close()
data, _ := ioutil.ReadAll(resp.Body)
return string(data), nil
}
func (c *Client) GetTorrentTrackers(hash string) ([]TorrentTracker, error) {
var trackers []TorrentTracker
params := url.Values{}
params.Add("hash", hash)
p := params.Encode()
resp, err := c.get("torrents/trackers?"+p, nil)
if err != nil {
log.Error().Err(err).Msgf("get torrent trackers error: %v", hash)
return nil, err
}
defer resp.Body.Close()
body, readErr := ioutil.ReadAll(resp.Body)
if readErr != nil {
log.Error().Err(err).Msgf("get torrent trackers read error: %v", hash)
return nil, readErr
}
err = json.Unmarshal(body, &trackers)
if err != nil {
log.Error().Err(err).Msgf("get torrent trackers: %v", hash)
return nil, err
}
return trackers, nil
}
// AddTorrentFromFile add new torrent from torrent file
func (c *Client) AddTorrentFromFile(file string, options map[string]string) error {
res, err := c.postFile("torrents/add", file, options)
if err != nil {
log.Error().Err(err).Msgf("add torrents error: %v", file)
return err
} else if res.StatusCode != http.StatusOK {
log.Error().Err(err).Msgf("add torrents bad status: %v", file)
return err
}
defer res.Body.Close()
return nil
}
func (c *Client) DeleteTorrents(hashes []string, deleteFiles bool) error {
v := url.Values{}
// Add hashes together with | separator
hv := strings.Join(hashes, "|")
v.Add("hashes", hv)
v.Add("deleteFiles", strconv.FormatBool(deleteFiles))
encodedHashes := v.Encode()
resp, err := c.get("torrents/delete?"+encodedHashes, nil)
if err != nil {
log.Error().Err(err).Msgf("delete torrents error: %v", hashes)
return err
} else if resp.StatusCode != http.StatusOK {
log.Error().Err(err).Msgf("delete torrents bad code: %v", hashes)
return err
}
defer resp.Body.Close()
return nil
}
func (c *Client) ReAnnounceTorrents(hashes []string) error {
v := url.Values{}
// Add hashes together with | separator
hv := strings.Join(hashes, "|")
v.Add("hashes", hv)
encodedHashes := v.Encode()
resp, err := c.get("torrents/reannounce?"+encodedHashes, nil)
if err != nil {
log.Error().Err(err).Msgf("re-announce error: %v", hashes)
return err
} else if resp.StatusCode != http.StatusOK {
log.Error().Err(err).Msgf("re-announce error bad status: %v", hashes)
return err
}
defer resp.Body.Close()
return nil
}

100
pkg/releaseinfo/parser.go Normal file
View file

@ -0,0 +1,100 @@
package releaseinfo
import (
"reflect"
"strconv"
"strings"
)
// ReleaseInfo is the resulting structure returned by Parse
type ReleaseInfo struct {
Title string
Season int
Episode int
Year int
Resolution string
Source string
Codec string
Container string
Audio string
Group string
Region string
Extended bool
Hardcoded bool
Proper bool
Repack bool
Widescreen bool
Website string
Language string
Sbs string
Unrated bool
Size string
ThreeD bool
}
func setField(tor *ReleaseInfo, field, raw, val string) {
ttor := reflect.TypeOf(tor)
torV := reflect.ValueOf(tor)
field = strings.Title(field)
v, _ := ttor.Elem().FieldByName(field)
//fmt.Printf(" field=%v, type=%+v, value=%v, raw=%v\n", field, v.Type, val, raw)
switch v.Type.Kind() {
case reflect.Bool:
torV.Elem().FieldByName(field).SetBool(true)
case reflect.Int:
clean, _ := strconv.ParseInt(val, 10, 64)
torV.Elem().FieldByName(field).SetInt(clean)
case reflect.Uint:
clean, _ := strconv.ParseUint(val, 10, 64)
torV.Elem().FieldByName(field).SetUint(clean)
case reflect.String:
torV.Elem().FieldByName(field).SetString(val)
}
}
// Parse breaks up the given filename in TorrentInfo
func Parse(filename string) (*ReleaseInfo, error) {
tor := &ReleaseInfo{}
//fmt.Printf("filename %q\n", filename)
var startIndex, endIndex = 0, len(filename)
cleanName := strings.Replace(filename, "_", " ", -1)
for _, pattern := range patterns {
matches := pattern.re.FindAllStringSubmatch(cleanName, -1)
if len(matches) == 0 {
continue
}
matchIdx := 0
if pattern.last {
// Take last occurrence of element.
matchIdx = len(matches) - 1
}
//fmt.Printf(" %s: pattern:%q match:%#v\n", pattern.name, pattern.re, matches[matchIdx])
index := strings.Index(cleanName, matches[matchIdx][1])
if index == 0 {
startIndex = len(matches[matchIdx][1])
//fmt.Printf(" startIndex moved to %d [%q]\n", startIndex, filename[startIndex:endIndex])
} else if index < endIndex {
endIndex = index
//fmt.Printf(" endIndex moved to %d [%q]\n", endIndex, filename[startIndex:endIndex])
}
setField(tor, pattern.name, matches[matchIdx][1], matches[matchIdx][2])
}
// Start process for title
//fmt.Println(" title: <internal>")
raw := strings.Split(filename[startIndex:endIndex], "(")[0]
cleanName = raw
if strings.HasPrefix(cleanName, "- ") {
cleanName = raw[2:]
}
if strings.ContainsRune(cleanName, '.') && !strings.ContainsRune(cleanName, ' ') {
cleanName = strings.Replace(cleanName, ".", " ", -1)
}
cleanName = strings.Replace(cleanName, "_", " ", -1)
//cleanName = re.sub('([\[\(_]|- )$', '', cleanName).strip()
setField(tor, "title", raw, strings.TrimSpace(cleanName))
return tor, nil
}

View file

@ -0,0 +1,331 @@
package releaseinfo
import (
"flag"
"testing"
"github.com/stretchr/testify/assert"
)
var updateGoldenFiles = flag.Bool("update", false, "update golden files in testdata/")
var testData = []string{
"The Walking Dead S05E03 720p HDTV x264-ASAP[ettv]",
"Hercules (2014) 1080p BrRip H264 - YIFY",
"Dawn.of.the.Planet.of.the.Apes.2014.HDRip.XViD-EVO",
"The Big Bang Theory S08E06 HDTV XviD-LOL [eztv]",
"22 Jump Street (2014) 720p BrRip x264 - YIFY",
"Hercules.2014.EXTENDED.1080p.WEB-DL.DD5.1.H264-RARBG",
"Hercules.2014.Extended.Cut.HDRip.XViD-juggs[ETRG]",
"Hercules (2014) WEBDL DVDRip XviD-MAX",
"WWE Hell in a Cell 2014 PPV WEB-DL x264-WD -={SPARROW}=-",
"UFC.179.PPV.HDTV.x264-Ebi[rartv]",
"Marvels Agents of S H I E L D S02E05 HDTV x264-KILLERS [eztv]",
"X-Men.Days.of.Future.Past.2014.1080p.WEB-DL.DD5.1.H264-RARBG",
"Guardians Of The Galaxy 2014 R6 720p HDCAM x264-JYK",
"Marvel's.Agents.of.S.H.I.E.L.D.S02E01.Shadows.1080p.WEB-DL.DD5.1",
"Marvels Agents of S.H.I.E.L.D. S02E06 HDTV x264-KILLERS[ettv]",
"Guardians of the Galaxy (CamRip / 2014)",
"The.Walking.Dead.S05E03.1080p.WEB-DL.DD5.1.H.264-Cyphanix[rartv]",
"Brave.2012.R5.DVDRip.XViD.LiNE-UNiQUE",
"Lets.Be.Cops.2014.BRRip.XViD-juggs[ETRG]",
"These.Final.Hours.2013.WBBRip XViD",
"Downton Abbey 5x06 HDTV x264-FoV [eztv]",
"Annabelle.2014.HC.HDRip.XViD.AC3-juggs[ETRG]",
"Lucy.2014.HC.HDRip.XViD-juggs[ETRG]",
"The Flash 2014 S01E04 HDTV x264-FUM[ettv]",
"South Park S18E05 HDTV x264-KILLERS [eztv]",
"The Flash 2014 S01E03 HDTV x264-LOL[ettv]",
"The Flash 2014 S01E01 HDTV x264-LOL[ettv]",
"Lucy 2014 Dual-Audio WEBRip 1400Mb",
"Teenage Mutant Ninja Turtles (HdRip / 2014)",
"Teenage Mutant Ninja Turtles (unknown_release_type / 2014)",
"The Simpsons S26E05 HDTV x264 PROPER-LOL [eztv]",
"2047 - Sights of Death (2014) 720p BrRip x264 - YIFY",
"Two and a Half Men S12E01 HDTV x264 REPACK-LOL [eztv]",
"Dinosaur 13 2014 WEBrip XviD AC3 MiLLENiUM",
"Teenage.Mutant.Ninja.Turtles.2014.HDRip.XviD.MP3-RARBG",
"Dawn.Of.The.Planet.of.The.Apes.2014.1080p.WEB-DL.DD51.H264-RARBG",
"Teenage.Mutant.Ninja.Turtles.2014.720p.HDRip.x264.AC3.5.1-RARBG",
"Gotham.S01E05.Viper.WEB-DL.x264.AAC",
"Into.The.Storm.2014.1080p.WEB-DL.AAC2.0.H264-RARBG",
"Lucy 2014 Dual-Audio 720p WEBRip",
"Into The Storm 2014 1080p BRRip x264 DTS-JYK",
"Sin.City.A.Dame.to.Kill.For.2014.1080p.BluRay.x264-SPARKS",
"WWE Monday Night Raw 3rd Nov 2014 HDTV x264-Sir Paul",
"Jack.And.The.Cuckoo-Clock.Heart.2013.BRRip XViD",
"WWE Hell in a Cell 2014 HDTV x264 SNHD",
"Dracula.Untold.2014.TS.XViD.AC3.MrSeeN-SiMPLE",
"The Missing 1x01 Pilot HDTV x264-FoV [eztv]",
"Doctor.Who.2005.8x11.Dark.Water.720p.HDTV.x264-FoV[rartv]",
"Gotham.S01E07.Penguins.Umbrella.WEB-DL.x264.AAC",
"One Shot [2014] DVDRip XViD-ViCKY",
"The Shaukeens 2014 Hindi (1CD) DvDScr x264 AAC...Hon3y",
"The Shaukeens (2014) 1CD DvDScr Rip x264 [DDR]",
"Annabelle.2014.1080p.PROPER.HC.WEBRip.x264.AAC.2.0-RARBG",
"Interstellar (2014) CAM ENG x264 AAC-CPG",
"Guardians of the Galaxy (2014) Dual Audio DVDRip AVI",
"Eliza Graves (2014) Dual Audio WEB-DL 720p MKV x264",
"WWE Monday Night Raw 2014 11 10 WS PDTV x264-RKOFAN1990 -={SPARR",
"Sons.of.Anarchy.S01E03",
"doctor_who_2005.8x12.death_in_heaven.720p_hdtv_x264-fov",
"breaking.bad.s01e01.720p.bluray.x264-reward",
"Game of Thrones - 4x03 - Breaker of Chains",
"[720pMkv.Com]_sons.of.anarchy.s05e10.480p.BluRay.x264-GAnGSteR",
"[ www.Speed.cd ] -Sons.of.Anarchy.S07E07.720p.HDTV.X264-DIMENSION",
"Community.s02e20.rus.eng.720p.Kybik.v.Kybe",
"The.Jungle.Book.2016.3D.1080p.BRRip.SBS.x264.AAC-ETRG",
"Ant-Man.2015.3D.1080p.BRRip.Half-SBS.x264.AAC-m2g",
"Ice.Age.Collision.Course.2016.READNFO.720p.HDRIP.X264.AC3.TiTAN",
"Red.Sonja.Queen.Of.Plagues.2016.BDRip.x264-W4F[PRiME]",
"The Purge: Election Year (2016) HC - 720p HDRiP - 900MB - ShAaNi",
"War Dogs (2016) HDTS 600MB - NBY",
"The Hateful Eight (2015) 720p BluRay - x265 HEVC - 999MB - ShAaN",
"The.Boss.2016.UNRATED.720p.BRRip.x264.AAC-ETRG",
"Return.To.Snowy.River.1988.iNTERNAL.DVDRip.x264-W4F[PRiME]",
"Akira (2016) - UpScaled - 720p - DesiSCR-Rip - Hindi - x264 - AC3 - 5.1 - Mafiaking - M2Tv",
"Ben Hur 2016 TELESYNC x264 AC3 MAXPRO",
"The.Secret.Life.of.Pets.2016.HDRiP.AAC-LC.x264-LEGi0N",
"[HorribleSubs] Clockwork Planet - 10 [480p].mkv",
"[HorribleSubs] Detective Conan - 862 [1080p].mkv",
"thomas.and.friends.s19e09_s20e14.convert.hdtv.x264-w4f[eztv].mkv",
"Blade.Runner.2049.2017.1080p.WEB-DL.DD5.1.H264-FGT-[rarbg.to]",
"2012(2009).1080p.Dual Audio(Hindi+English) 5.1 Audios",
"2012 (2009) 1080p BrRip x264 - 1.7GB - YIFY",
"2012 2009 x264 720p Esub BluRay 6.0 Dual Audio English Hindi GOPISAHI",
}
var moreTestData = []string{
"Tokyo Olympics 2020 Street Skateboarding Prelims and Final 25 07 2021 1080p WEB-DL AAC2 0 H 264-playWEB",
"Tokyo Olympics 2020 Taekwondo Day3 Finals 26 07 720pEN25fps ES",
"Die Freundin der Haie 2021 German DUBBED DL DOKU 1080p WEB x264-WiSHTV",
}
var movieTests = []string{
"The Last Letter from Your Lover 2021 2160p NF WEBRip DDP5 1 Atmos x265-KiNGS",
"Blade 1998 Hybrid 1080p BluRay REMUX AVC Atmos-EPSiLON",
"Forrest Gump 1994 1080p BluRay DDP7 1 x264-Geek",
"Deux sous de violettes 1951 1080p Blu-ray Remux AVC FLAC 2 0-EDPH",
"Predator 1987 2160p UHD BluRay DTS-HD MA 5 1 HDR x265-W4NK3R",
"Final Destination 2 2003 1080p BluRay x264-ETHOS",
"Hellboy.II.The.Golden.Army.2008.REMASTERED.NORDiC.1080p.BluRay.x264-PANDEMONiUM",
"Wonders of the Sea 2017 BluRay 1080p AVC DTS-HD MA 2.0-BeyondHD",
"A Week Away 2021 1080p NF WEB-DL DDP 5.1 Atmos DV H.265-SymBiOTes",
"Control 2004 BluRay 1080p DTS-HD MA 5.1 AVC REMUX-FraMeSToR",
"Mimi 2021 1080p Hybrid WEB-DL DDP 5.1 x264-Telly",
"She's So Lovely 1997 BluRay 1080p DTS-HD MA 5.1 AVC REMUX-FraMeSToR",
"Those Who Wish Me Dead 2021 BluRay 1080p DD5.1 x264-BHDStudio",
"The Last Letter from Your Lover 2021 2160p NF WEBRip DDP 5.1 Atmos x265-KiNGS",
"Spinning Man 2018 BluRay 1080p DTS 5.1 x264-MTeam",
"The Wicker Man 1973 Final Cut 1080p BluRay FLAC 1.0 x264-NTb",
"New Police Story 2004 720p BluRay DTS x264-HiFi",
"La Cienaga 2001 Criterion Collection NTSC DVD9 DD 2.0",
"The Thin Blue Line 1988 Criterion Collection NTSC DVD9 DD 2.0",
"The Thin Red Line 1998 Criterion Collection NTSC 2xDVD9 DD 5.1",
"The Sword of Doom AKA daibosatsu 1966 Criterion Collection NTSC DVD9 DD 1.0",
"Freaks 2018 Hybrid REPACK 1080p BluRay REMUX AVC DTS-HD MA 5.1-EPSiLON",
"The Oxford Murders 2008 1080p BluRay Remux AVC DTS-HD MA 7.1-Pootis",
"Berlin Babylon 2001 PAL DVD9 DD 5.1",
"Dillinger 1973 1080p BluRay REMUX AVC DTS-HD MA 1.0-HiDeFZeN",
"True Romance 1993 2160p UHD Blu-ray DV HDR HEVC DTS-HD MA 5.1",
"Family 2019 1080p AMZN WEB-DL DD+ 5.1 H.264-TEPES",
"Family 2019 720p AMZN WEB-DL DD+ 5.1 H.264-TEPES",
"The Banana Splits Movie 2019 NTSC DVD9 DD 5.1-(_10_)",
"Sex Is Zero AKA saegjeugsigong 2002 720p BluRay DD 5.1 x264-KiR",
"Sex Is Zero AKA saegjeugsigong 2002 1080p BluRay DTS 5.1 x264-KiR",
"Sex Is Zero AKA saegjeugsigong 2002 1080p KOR Blu-ray AVC DTS-HD MA 5.1-ARiN",
"The Stranger AKA aagntuk 1991 Criterion Collection NTSC DVD9 DD 1.0",
"The Taking of Power by Louis XIV AKA La prise de pouvoir par Louis XIV 1966 Criterion Collection NTSC DVD9 DD 1.0",
"La Cienaga 2001 Criterion Collection NTSC DVD9 DD 2.0",
"The Thin Blue Line 1988 Criterion Collection NTSC DVD9 DD 2.0",
"The Thin Red Line 1998 Criterion Collection NTSC 2xDVD9 DD 5.1",
"The Sword of Doom AKA daibosatsu 1966 Criterion Collection NTSC DVD9 DD 1.0",
"Freaks 2018 Hybrid REPACK 1080p BluRay REMUX AVC DTS-HD MA 5.1-EPSiLON",
"The Oxford Murders 2008 1080p BluRay Remux AVC DTS-HD MA 7.1-Pootis",
"Berlin Babylon 2001 PAL DVD9 DD 5.1",
"Dillinger 1973 1080p BluRay REMUX AVC DTS-HD MA 1.0-HiDeFZeN",
"True Romance 1993 2160p UHD Blu-ray DV HDR HEVC DTS-HD MA 5.1",
"La Cienaga 2001 Criterion Collection NTSC DVD9 DD 2.0",
"Freaks 2018 Hybrid REPACK 1080p BluRay REMUX AVC DTS-HD MA 5.1-EPSiLON",
"The Oxford Murders 2008 1080p BluRay Remux AVC DTS-HD MA 7.1-Pootis",
}
//func TestParse_Movies(t *testing.T) {
// type args struct {
// filename string
// }
// tests := []struct {
// filename string
// want *ReleaseInfo
// wantErr bool
// }{
// {filename: "", want: nil, wantErr: false},
// }
// for _, tt := range tests {
// t.Run(tt.filename, func(t *testing.T) {
// got, err := Parse(tt.filename)
// if (err != nil) != tt.wantErr {
// t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantErr)
// return
// }
// if !reflect.DeepEqual(got, tt.want) {
// t.Errorf("Parse() got = %v, want %v", got, tt.want)
// }
// })
// }
//}
var tvTests = []string{
"Melrose Place S04 480p web-dl eac3 x264",
"Privileged.S01E17.1080p.WEB.h264-DiRT",
"Banshee S02 BluRay 720p DD5.1 x264-NTb",
"Banshee S04 BluRay 720p DTS x264-NTb",
"Servant S01 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-FLUX",
"South Park S06 1080p BluRay DD5.1 x264-W4NK3R",
"The Walking Dead: Origins S01E01 1080p WEB-DL DDP 2.0 H.264-GOSSIP",
"Mythic Quest S01 2160p ATVP WEB-DL DDP 5.1 Atmos DV HEVC-FLUX",
"Masameer County S01 1080p NF WEB-DL DD+ 5.1 H.264-XIQ",
"Kevin Can F**K Himself 2021 S01 1080p AMZN WEB-DL DD+ 5.1 H.264-SaiTama",
"How to Sell Drugs Online (Fast) S03 1080p NF WEB-DL DD+ 5.1 x264-KnightKing",
"Power Book III: Raising Kanan S01E01 2160p WEB-DL DD+ 5.1 H265-GGEZ",
"Power Book III: Raising Kanan S01E02 2160p WEB-DL DD+ 5.1 H265-GGWP",
"Thea Walking Dead: Origins S01E01 1080p WEB-DL DD+ 2.0 H.264-GOSSIP",
"Mean Mums S01 1080p AMZN WEB-DL DD+ 2.0 H.264-FLUX",
}
func TestParse_TV(t *testing.T) {
tests := []struct {
filename string
want *ReleaseInfo
wantErr bool
}{
{
filename: "Melrose Place S04 480p web-dl eac3 x264",
want: &ReleaseInfo{
Title: "Melrose Place",
Season: 4,
Resolution: "480p",
Source: "web-dl",
Codec: "x264",
Group: "dl eac3 x264",
},
wantErr: false,
},
{
filename: "Privileged.S01E17.1080p.WEB.h264-DiRT",
want: &ReleaseInfo{
Title: "Privileged",
Season: 1,
Episode: 17,
Resolution: "1080p",
Source: "WEB",
Codec: "h264",
Group: "DiRT",
},
wantErr: false,
},
{
filename: "Banshee S02 BluRay 720p DD5.1 x264-NTb",
want: &ReleaseInfo{
Title: "Banshee",
Season: 2,
Resolution: "720p",
Source: "BluRay",
Codec: "x264",
Audio: "DD5.1",
Group: "NTb",
},
wantErr: false,
},
{
filename: "Banshee Season 2 BluRay 720p DD5.1 x264-NTb",
want: &ReleaseInfo{
Title: "Banshee",
Season: 2,
Resolution: "720p",
Source: "BluRay",
Codec: "x264",
Audio: "DD5.1",
Group: "NTb",
},
wantErr: false,
},
}
for _, tt := range tests {
t.Run(tt.filename, func(t *testing.T) {
got, err := Parse(tt.filename)
if (err != nil) != tt.wantErr {
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantErr)
return
}
assert.Equal(t, tt.want, got)
//if !reflect.DeepEqual(got, tt.want) {
// t.Errorf("Parse() got = %v, want %v", got, tt.want)
//}
})
}
}
var gamesTests = []string{
"Night Book NSW-LUMA",
"Evdeki Lanet-DARKSiDERS",
"Evdeki.Lanet-DARKSiDERS",
}
//func TestParser(t *testing.T) {
// for i, fname := range testData {
// t.Run(fmt.Sprintf("golden_file_%03d", i), func(t *testing.T) {
// tor, err := Parse(fname)
// if err != nil {
// t.Fatalf("test %v: parser error:\n %v", i, err)
// }
//
// var want ReleaseInfo
//
// if !reflect.DeepEqual(*tor, want) {
// t.Fatalf("test %v: wrong result for %q\nwant:\n %v\ngot:\n %v", i, fname, want, *tor)
// }
// })
// }
//}
//func TestParserWriteToFiles(t *testing.T) {
// for i, fname := range testData {
// t.Run(fmt.Sprintf("golden_file_%03d", i), func(t *testing.T) {
// tor, err := Parse(fname)
// if err != nil {
// t.Fatalf("test %v: parser error:\n %v", i, err)
// }
//
// goldenFilename := filepath.Join("testdata", fmt.Sprintf("golden_file_%03d.json", i))
//
// if *updateGoldenFiles {
// buf, err := json.MarshalIndent(tor, "", " ")
// if err != nil {
// t.Fatalf("error marshaling result: %v", err)
// }
//
// if err = ioutil.WriteFile(goldenFilename, buf, 0644); err != nil {
// t.Fatalf("unable to update golden file: %v", err)
// }
// }
//
// buf, err := ioutil.ReadFile(goldenFilename)
// if err != nil {
// t.Fatalf("error loading golden file: %v", err)
// }
//
// var want ReleaseInfo
// err = json.Unmarshal(buf, &want)
// if err != nil {
// t.Fatalf("error unmarshalling golden file %v: %v", goldenFilename, err)
// }
//
// if !reflect.DeepEqual(*tor, want) {
// t.Fatalf("test %v: wrong result for %q\nwant:\n %v\ngot:\n %v", i, fname, want, *tor)
// }
// })
// }
//}

View file

@ -0,0 +1,58 @@
package releaseinfo
import (
"fmt"
"os"
"reflect"
"regexp"
)
var patterns = []struct {
name string
// Use the last matching pattern. E.g. Year.
last bool
kind reflect.Kind
// REs need to have 2 sub expressions (groups), the first one is "raw", and
// the second one for the "clean" value.
// E.g. Epiode matching on "S01E18" will result in: raw = "E18", clean = "18".
re *regexp.Regexp
}{
//{"season", false, reflect.Int, regexp.MustCompile(`(?i)(s?([0-9]{1,2}))[ex]`)},
{"season", false, reflect.Int, regexp.MustCompile(`(?i)((?:S|Season\s*)(\d{1,3}))`)},
{"episode", false, reflect.Int, regexp.MustCompile(`(?i)([ex]([0-9]{2})(?:[^0-9]|$))`)},
{"episode", false, reflect.Int, regexp.MustCompile(`(-\s+([0-9]+)(?:[^0-9]|$))`)},
{"year", true, reflect.Int, regexp.MustCompile(`\b(((?:19[0-9]|20[0-9])[0-9]))\b`)},
{"resolution", false, reflect.String, regexp.MustCompile(`\b(([0-9]{3,4}p|i))\b`)},
{"source", false, reflect.String, regexp.MustCompile(`(?i)\b(((?:PPV\.)?[HP]DTV|(?:HD)?CAM|B[DR]Rip|(?:HD-?)?TS|(?:PPV )?WEB-?DL(?: DVDRip)?|HDRip|DVDRip|DVDRIP|CamRip|WEB|W[EB]BRip|BluRay|DvDScr|telesync))\b`)},
{"codec", false, reflect.String, regexp.MustCompile(`(?i)\b((xvid|HEVC|[hx]\.?26[45]))\b`)},
{"container", false, reflect.String, regexp.MustCompile(`(?i)\b((MKV|AVI|MP4))\b`)},
{"audio", false, reflect.String, regexp.MustCompile(`(?i)\b((MP3|DD5\.?1|Dual[\- ]Audio|LiNE|DTS|AAC[.-]LC|AAC(?:\.?2\.0)?|AC3(?:\.5\.1)?))\b`)},
{"region", false, reflect.String, regexp.MustCompile(`(?i)\b(R([0-9]))\b`)},
{"size", false, reflect.String, regexp.MustCompile(`(?i)\b((\d+(?:\.\d+)?(?:GB|MB)))\b`)},
{"website", false, reflect.String, regexp.MustCompile(`^(\[ ?([^\]]+?) ?\])`)},
{"language", false, reflect.String, regexp.MustCompile(`(?i)\b((rus\.eng|ita\.eng))\b`)},
{"sbs", false, reflect.String, regexp.MustCompile(`(?i)\b(((?:Half-)?SBS))\b`)},
{"group", false, reflect.String, regexp.MustCompile(`\b(- ?([^-]+(?:-={[^-]+-?$)?))$`)},
{"extended", false, reflect.Bool, regexp.MustCompile(`(?i)\b(EXTENDED(:?.CUT)?)\b`)},
{"hardcoded", false, reflect.Bool, regexp.MustCompile(`(?i)\b((HC))\b`)},
{"proper", false, reflect.Bool, regexp.MustCompile(`(?i)\b((PROPER))\b`)},
{"repack", false, reflect.Bool, regexp.MustCompile(`(?i)\b((REPACK))\b`)},
{"widescreen", false, reflect.Bool, regexp.MustCompile(`(?i)\b((WS))\b`)},
{"unrated", false, reflect.Bool, regexp.MustCompile(`(?i)\b((UNRATED))\b`)},
{"threeD", false, reflect.Bool, regexp.MustCompile(`(?i)\b((3D))\b`)},
}
func init() {
for _, pat := range patterns {
if pat.re.NumSubexp() != 2 {
fmt.Printf("Pattern %q does not have enough capture groups. want 2, got %d\n", pat.name, pat.re.NumSubexp())
os.Exit(1)
}
}
}

51
pkg/wildcard/match.go Normal file
View file

@ -0,0 +1,51 @@
package wildcard
// MatchSimple - finds whether the text matches/satisfies the pattern string.
// supports only '*' wildcard in the pattern.
// considers a file system path as a flat name space.
func MatchSimple(pattern, name string) bool {
if pattern == "" {
return name == pattern
}
if pattern == "*" {
return true
}
// Does only wildcard '*' match.
return deepMatchRune([]rune(name), []rune(pattern), true)
}
// Match - finds whether the text matches/satisfies the pattern string.
// supports '*' and '?' wildcards in the pattern string.
// unlike path.Match(), considers a path as a flat name space while matching the pattern.
// The difference is illustrated in the example here https://play.golang.org/p/Ega9qgD4Qz .
func Match(pattern, name string) (matched bool) {
if pattern == "" {
return name == pattern
}
if pattern == "*" {
return true
}
// Does extended wildcard '*' and '?' match.
return deepMatchRune([]rune(name), []rune(pattern), false)
}
func deepMatchRune(str, pattern []rune, simple bool) bool {
for len(pattern) > 0 {
switch pattern[0] {
default:
if len(str) == 0 || str[0] != pattern[0] {
return false
}
case '?':
if len(str) == 0 && !simple {
return false
}
case '*':
return deepMatchRune(str, pattern[1:], simple) ||
(len(str) > 0 && deepMatchRune(str[1:], pattern, simple))
}
str = str[1:]
pattern = pattern[1:]
}
return len(str) == 0 && len(pattern) == 0
}

View file

@ -0,0 +1,37 @@
package wildcard
import "testing"
// TestMatch - Tests validate the logic of wild card matching.
// `Match` supports '*' and '?' wildcards.
// Sample usage: In resource matching for bucket policy validation.
func TestMatch(t *testing.T) {
testCases := []struct {
pattern string
text string
matched bool
}{
{
pattern: "The?Simpsons*",
text: "The Simpsons S12",
matched: true,
},
{
pattern: "The?Simpsons*",
text: "The.Simpsons.S12",
matched: true,
},
{
pattern: "The?Simpsons*",
text: "The.Simps.S12",
matched: false,
},
}
// Iterating over the test cases, call the function under test and asert the output.
for i, testCase := range testCases {
actualResult := Match(testCase.pattern, testCase.text)
if testCase.matched != actualResult {
t.Errorf("Test %d: Expected the result to be `%v`, but instead found it to be `%v`", i+1, testCase.matched, actualResult)
}
}
}