- Skip invalid webhooks

- Build cache based on row data instead of entire row
- Compare columns instead of entire row
- Removed commitHash check and instead just processes on succesfull git pull
- No more entire file dumps!
This commit is contained in:
Daniel Mason 2021-09-03 14:16:15 +12:00
parent ae11cc53a3
commit 86eef8cc8e
Signed by: idanoo
GPG key ID: 387387CDBC02F132
5 changed files with 102 additions and 132 deletions

View file

@ -19,49 +19,65 @@ type UpdatedLocations struct {
// Updated data // Updated data
type UpdatedRow struct { type UpdatedRow struct {
ChangeDate time.Time // To order b FromDate time.Time `json:"FromDate"` // Start date
ChangeType string // ADDED, REMOVED, MODIFIED EndDate time.Time `json:"EndDate"` // End date
DiscordData string // Formatted Row data LocationName string `json:"LocationName"` // Location Name
TwitterData string // Formatted Row data LocationAddress string `json:"LocationAddress"` // Location Address
SlackData string // Formatted Row data
DiscordData string `json:"-"` // Formatted Row data
TwitterData string `json:"-"` // Formatted Row data
SlackData string `json:"-"` // Formatted Row data
} }
// Struct of updated locations // Struct of updated locations
var updatedLocations UpdatedLocations var updatedLocations UpdatedLocations
// cache of [exposureID]row of row data // cache of [exposureID]row of row data
var rowCache map[string]string var rowCache map[string]UpdatedRow
// parseCsvRow Build into struct for output later // parseCsvRow Build into struct for output later
func parseCsvRow(changeType string, data string) { func parseCsvRow(data string) {
parsedTime := parseTimeFromRow(data) c, st, et := parseRawRowData(data)
c := parseRawRowData(data) if rowHasChanged(c[4], st, et, c[1], c[2]) {
if rowHasChanged(c[4], data) {
newRow := UpdatedRow{ newRow := UpdatedRow{
ChangeDate: parsedTime, FromDate: st,
ChangeType: changeType, EndDate: et,
DiscordData: formatCsvDiscordRow(c), LocationName: c[1],
TwitterData: formatCsvTwitterRow(c), LocationAddress: c[2],
SlackData: formatCsvSlackRow(c), DiscordData: formatCsvDiscordRow(c),
TwitterData: formatCsvTwitterRow(c),
SlackData: formatCsvSlackRow(c),
} }
// Update row cache // Update row cache! [exposureId]UpdatedRow
rowCache[c[4]] = data rowCache[c[4]] = newRow
// Append row data // Append row data
updatedLocations.Locations = append(updatedLocations.Locations, newRow) updatedLocations.Locations = append(updatedLocations.Locations, newRow)
} }
} }
// rowHasChanged - Determine if row has actually changed // rowHasChanged - Determine if row has actually changed based on raw data
func rowHasChanged(exposureId string, row string) bool { func rowHasChanged(exposureId string, startTime time.Time, endTime time.Time, locationName string, locationAddress string) bool {
val, exists := rowCache[exposureId] val, exists := rowCache[exposureId]
if !exists { if !exists {
return true return true
} }
if val != row { if val.FromDate != startTime {
return true
}
if val.EndDate != endTime {
return true
}
if val.LocationName != locationName {
return true
}
if val.LocationAddress != locationAddress {
return true return true
} }
@ -71,7 +87,9 @@ func rowHasChanged(exposureId string, row string) bool {
// loadRepoIntoCache - reads all CSV data and parses the rows into our cache // loadRepoIntoCache - reads all CSV data and parses the rows into our cache
func loadRepoIntoCache(repoLocation string) { func loadRepoIntoCache(repoLocation string) {
// Init our cache! // Init our cache!
rowCache = make(map[string]string) rowCache = make(map[string]UpdatedRow)
// Load cache file. ELSE load files.
folders, err := ioutil.ReadDir(repoLocation + "/locations-of-interest") folders, err := ioutil.ReadDir(repoLocation + "/locations-of-interest")
if err != nil { if err != nil {
@ -113,12 +131,6 @@ func loadRowsIntoCache(filePath string) {
// Iterate through the records // Iterate through the records
i := 0 i := 0
for { for {
// Skip header row
if i == 0 {
i++
continue
}
// Read each record from csv // Read each record from csv
row, err := r.Read() row, err := r.Read()
if err == io.EOF { if err == io.EOF {
@ -128,14 +140,34 @@ func loadRowsIntoCache(filePath string) {
log.Fatal(err) log.Fatal(err)
} }
// Add to cache var // Skip header row
rowCache[row[0]] = strings.Join(row, ",") if i == 0 {
i++
continue
}
// Parse into our required format
c := make([]string, 0)
c = append(c, row...)
st, et := parseRowTimes(c[4], c[5])
// Build object
newRow := UpdatedRow{
FromDate: st,
EndDate: et,
LocationName: c[1],
LocationAddress: c[2],
}
// Add to cache
rowCache[row[0]] = newRow
} }
} }
func orderRowDataByDate() { func orderRowDataByDate() {
sort.Slice(updatedLocations.Locations, func(i, j int) bool { sort.Slice(updatedLocations.Locations, func(i, j int) bool {
return updatedLocations.Locations[i].ChangeDate.Before(updatedLocations.Locations[j].ChangeDate) return updatedLocations.Locations[i].FromDate.Before(updatedLocations.Locations[j].FromDate)
}) })
} }
@ -154,30 +186,8 @@ func formatCsvSlackRow(c []string) string {
return fmt.Sprintf("*%s* %s on _%s_ - _%s_", c[2], c[3], c[0], c[1]) return fmt.Sprintf("*%s* %s on _%s_ - _%s_", c[2], c[3], c[0], c[1])
} }
func parseTimeFromRow(data string) time.Time {
r := csv.NewReader(strings.NewReader(data))
r.Comma = ','
fields, err := r.Read()
if err != nil {
fmt.Println(err)
return time.Now()
}
c := make([]string, 0)
c = append(c, fields...)
starttime := c[4]
st, err := time.Parse("02/01/2006, 3:04 pm", starttime)
if err != nil {
log.Print(err)
return time.Now()
}
return st
}
// Returns []string of parsed data.. starttime, endtime, name, address, ID // Returns []string of parsed data.. starttime, endtime, name, address, ID
func parseRawRowData(data string) []string { func parseRawRowData(data string) ([]string, time.Time, time.Time) {
output := make([]string, 0) output := make([]string, 0)
r := csv.NewReader(strings.NewReader(data)) r := csv.NewReader(strings.NewReader(data))
@ -185,39 +195,41 @@ func parseRawRowData(data string) []string {
fields, err := r.Read() fields, err := r.Read()
if err != nil { if err != nil {
fmt.Println(err) fmt.Println(err)
return output return output, time.Now(), time.Now()
} }
c := make([]string, 0) c := make([]string, 0)
c = append(c, fields...) c = append(c, fields...)
starttime := c[4] st, et := parseRowTimes(c[4], c[5])
st, err := time.Parse("2/01/2006, 3:04 pm", starttime)
starttime := st.Format("Monday 2 Jan, 3:04PM")
endtime := et.Format("3:04PM")
return append(output, starttime, endtime, c[1], c[2], c[0]), st, et
}
func parseRowTimes(startString string, endString string) (time.Time, time.Time) {
st, err := time.Parse("2/01/2006, 3:04 pm", startString)
if err != nil { if err != nil {
log.Print(err) log.Print(err)
st, err = time.Parse("2006-01-02 15:04:05", starttime) st, err = time.Parse("2006-01-02 15:04:05", startString)
if err != nil { if err != nil {
log.Print(err) log.Print(err)
starttime = c[4] st = time.Now()
} }
} else {
starttime = st.Format("Monday 2 Jan, 3:04PM")
} }
endtime := c[5] et, err := time.Parse("2/01/2006, 3:04 pm", endString)
et, err := time.Parse("2/01/2006, 3:04 pm", endtime)
if err != nil { if err != nil {
log.Print(err) log.Print(err)
et, err = time.Parse("2006-01-02 15:04:05", endtime) et, err = time.Parse("2006-01-02 15:04:05", endString)
if err != nil { if err != nil {
log.Print(err) log.Print(err)
endtime = c[5] et = time.Now()
} }
} else {
endtime = et.Format("3:04PM")
} }
return st, et
return append(output, starttime, endtime, c[1], c[2], c[0])
} }
func getPostableDiscordData() []string { func getPostableDiscordData() []string {
@ -228,11 +240,7 @@ func getPostableDiscordData() []string {
rows := make([]string, 0) rows := make([]string, 0)
for _, location := range updatedLocations.Locations { for _, location := range updatedLocations.Locations {
if location.ChangeType == "REMOVED" { rows = append(rows, location.DiscordData)
rows = append(rows, fmt.Sprintf("REMOVED: %s", location.DiscordData))
} else {
rows = append(rows, location.DiscordData)
}
if len(rows) > 20 { if len(rows) > 20 {
groups = append(groups, strings.Join(rows, "\n")) groups = append(groups, strings.Join(rows, "\n"))
@ -250,11 +258,7 @@ func getPostableSlackData() []string {
} }
for _, location := range updatedLocations.Locations { for _, location := range updatedLocations.Locations {
if location.ChangeType == "REMOVED" { rows = append(rows, location.SlackData)
rows = append(rows, fmt.Sprintf("REMOVED: %s", location.SlackData))
} else {
rows = append(rows, location.SlackData)
}
} }
return rows return rows

View file

@ -12,6 +12,10 @@ import (
var DiscordWebhooks []string var DiscordWebhooks []string
func postToDiscord(webhookString string, msg string) { func postToDiscord(webhookString string, msg string) {
if webhookString == "" {
return
}
tokenParts := strings.Split(webhookString, "/") tokenParts := strings.Split(webhookString, "/")
len := len(tokenParts) len := len(tokenParts)
webhook, err := disgohook.NewWebhookClientByToken(nil, nil, tokenParts[len-2]+"/"+tokenParts[len-1]) webhook, err := disgohook.NewWebhookClientByToken(nil, nil, tokenParts[len-2]+"/"+tokenParts[len-1])

View file

@ -1,22 +1,17 @@
package nzcovidbot package nzcovidbot
import ( import (
"io/ioutil"
"log" "log"
"os" "os"
"strings" "strings"
"github.com/go-git/go-git/v5" "github.com/go-git/go-git/v5"
"github.com/go-git/go-git/v5/plumbing"
"github.com/waigani/diffparser" "github.com/waigani/diffparser"
) )
// Repo URL // Repo URL
var gitRepo *git.Repository var gitRepo *git.Repository
// Current hash we have parsed
var commitHash string
// Location to store tmp data // Location to store tmp data
var tmpDirectory = "./tmp" var tmpDirectory = "./tmp"
@ -33,9 +28,6 @@ func loadRepo(repository string) {
// Preload cache data of current rows // Preload cache data of current rows
loadRepoIntoCache(tmpDirectory + "/repo") loadRepoIntoCache(tmpDirectory + "/repo")
commitHash := getCommitHash()
log.Printf("Last reported hash: %s", commitHash)
gitRepo = r gitRepo = r
} }
@ -57,50 +49,28 @@ func cloneRepo(repository string) *git.Repository {
log.Fatal(err) log.Fatal(err)
} }
ref, err := r.Head()
if err != nil {
log.Fatal(err)
}
setCommitHash(ref.Hash().String())
log.Println("Succesfully cloned repo") log.Println("Succesfully cloned repo")
return r return r
} }
// Set it in memory + write to disk
func setCommitHash(hash string) error {
log.Printf("Update hash to: %s", hash)
commitHash = hash
return os.WriteFile(tmpDirectory+"/commithash", []byte(hash), 0644)
}
// Read from memory, or disk
func getCommitHash() string {
if commitHash != "" {
return commitHash
}
hash, err := ioutil.ReadFile(tmpDirectory + "/commithash")
if err != nil {
log.Fatal(err)
}
commitHash = string(hash)
return commitHash
}
func checkForUpdates() { func checkForUpdates() {
// Fetch updates from remote // Fetch updates from remote
pullOptions := git.PullOptions{ pullOptions := git.PullOptions{
RemoteName: "origin", RemoteName: "origin",
} }
// Get current commit hash PRE PULL
currentHead, err := gitRepo.Head()
if err != nil {
log.Fatal(err)
}
currentHash := currentHead.Hash()
// Pull latest changes if exist // Pull latest changes if exist
worktree, err := gitRepo.Worktree() worktree, err := gitRepo.Worktree()
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
err = worktree.Pull(&pullOptions) err = worktree.Pull(&pullOptions)
if err != nil { if err != nil {
if err == git.NoErrAlreadyUpToDate { if err == git.NoErrAlreadyUpToDate {
@ -111,7 +81,7 @@ func checkForUpdates() {
} }
} }
// Get current commit hash // Get current commit hash POST PULL
head, err := gitRepo.Head() head, err := gitRepo.Head()
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
@ -124,7 +94,7 @@ func checkForUpdates() {
} }
// Get current commit // Get current commit
currentCommit, err := gitRepo.CommitObject(plumbing.NewHash(getCommitHash())) currentCommit, err := gitRepo.CommitObject(currentHash)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
@ -153,27 +123,15 @@ func checkForUpdates() {
} }
if line.Mode == diffparser.ADDED { if line.Mode == diffparser.ADDED {
parseCsvRow("ADDED", line.Content) parseCsvRow(line.Content)
} }
// switch changeType := line.Mode; changeType {
// case diffparser.UNCHANGED:
// continue
// case diffparser.ADDED:
// parseCsvRow("ADDED", line.Content)
// case diffparser.REMOVED:
// continue
// // To re-add in future?
// // parseCsvRow("REMOVED", line.Content)
// }
} }
} }
} }
} }
} }
// Store current hash for future comparisons
setCommitHash(head.Hash().String())
// SEND IT o/----> // SEND IT o/---->
postTheUpdates() postTheUpdates()
} }

View file

@ -51,7 +51,7 @@ func postTheUpdates() {
for _, discordWebhook := range DiscordWebhooks { for _, discordWebhook := range DiscordWebhooks {
for _, postableData := range postableDiscordData { for _, postableData := range postableDiscordData {
go postToDiscord(discordWebhook, postableData) go postToDiscord(discordWebhook, postableData)
time.Sleep(1 * time.Second) time.Sleep(500 * time.Millisecond)
} }
} }

View file

@ -33,6 +33,10 @@ func postToSlack() {
} }
for _, webhook := range SlackWebhooks { for _, webhook := range SlackWebhooks {
if webhook == "" {
continue
}
err := slack.Send(webhook, "", payload) err := slack.Send(webhook, "", payload)
if len(err) > 0 { if len(err) > 0 {
fmt.Print(err) fmt.Print(err)