mirror of
https://github.com/idanoo/NZCovidBot
synced 2025-07-01 19:22:14 +00:00
Add ID based cache to determine if rows have actually changed
This commit is contained in:
parent
c79f5e7e7e
commit
c2d9a98bf9
3 changed files with 105 additions and 16 deletions
|
@ -3,7 +3,10 @@ package nzcovidbot
|
||||||
import (
|
import (
|
||||||
"encoding/csv"
|
"encoding/csv"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
"log"
|
"log"
|
||||||
|
"os"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
@ -26,19 +29,106 @@ type UpdatedRow struct {
|
||||||
// Struct of updated locations
|
// Struct of updated locations
|
||||||
var updatedLocations UpdatedLocations
|
var updatedLocations UpdatedLocations
|
||||||
|
|
||||||
|
// cache of [exposureID]row of row data
|
||||||
|
var rowCache map[string]string
|
||||||
|
|
||||||
// parseCsvRow Build into struct for output later
|
// parseCsvRow Build into struct for output later
|
||||||
func parseCsvRow(changeType string, data string) {
|
func parseCsvRow(changeType string, data string) {
|
||||||
parsedTime := parseTimeFromRow(data)
|
parsedTime := parseTimeFromRow(data)
|
||||||
|
|
||||||
newRow := UpdatedRow{
|
c := parseRawRowData(data)
|
||||||
ChangeDate: parsedTime,
|
if rowHasChanged(c[4], data) {
|
||||||
ChangeType: changeType,
|
newRow := UpdatedRow{
|
||||||
DiscordData: formatCsvDiscordRow(data),
|
ChangeDate: parsedTime,
|
||||||
TwitterData: formatCsvTwitterRow(data),
|
ChangeType: changeType,
|
||||||
SlackData: formatCsvSlackRow(data),
|
DiscordData: formatCsvDiscordRow(c),
|
||||||
|
TwitterData: formatCsvTwitterRow(c),
|
||||||
|
SlackData: formatCsvSlackRow(c),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update row cache
|
||||||
|
rowCache[c[4]] = data
|
||||||
|
|
||||||
|
// Append row data
|
||||||
|
updatedLocations.Locations = append(updatedLocations.Locations, newRow)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// rowHasChanged - Determine if row has actually changed
|
||||||
|
func rowHasChanged(exposureId string, row string) bool {
|
||||||
|
val, exists := rowCache[exposureId]
|
||||||
|
if !exists {
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
updatedLocations.Locations = append(updatedLocations.Locations, newRow)
|
if val != row {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// loadRepoIntoCache - reads all CSV data and parses the rows into our cache
|
||||||
|
func loadRepoIntoCache(repoLocation string) {
|
||||||
|
// Init our cache!
|
||||||
|
rowCache = make(map[string]string)
|
||||||
|
|
||||||
|
folders, err := ioutil.ReadDir(repoLocation + "/locations-of-interest")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// /august-2021
|
||||||
|
for _, f := range folders {
|
||||||
|
if f.IsDir() {
|
||||||
|
files, err := ioutil.ReadDir(repoLocation + "/locations-of-interest/" + f.Name())
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// august-2021/locations-of-interest.csv
|
||||||
|
for _, x := range files {
|
||||||
|
fullLocation := repoLocation + "/locations-of-interest/" + f.Name() + "/" + x.Name()
|
||||||
|
if strings.HasSuffix(fullLocation, ".csv") {
|
||||||
|
loadRowsIntoCache(fullLocation)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadRowsIntoCache(filePath string) {
|
||||||
|
// Open the file
|
||||||
|
csvfile, err := os.Open(filePath)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
defer csvfile.Close()
|
||||||
|
|
||||||
|
// Parse the file
|
||||||
|
r := csv.NewReader(csvfile)
|
||||||
|
|
||||||
|
// Iterate through the records
|
||||||
|
i := 0
|
||||||
|
for {
|
||||||
|
// Skip header row
|
||||||
|
if i == 0 {
|
||||||
|
i++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read each record from csv
|
||||||
|
row, err := r.Read()
|
||||||
|
if err == io.EOF {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add to cache var
|
||||||
|
rowCache[row[0]] = strings.Join(row, ",")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func orderRowDataByDate() {
|
func orderRowDataByDate() {
|
||||||
|
@ -48,20 +138,17 @@ func orderRowDataByDate() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// formatCsvDiscordRow Format the string to a tidy string for the interwebs
|
// formatCsvDiscordRow Format the string to a tidy string for the interwebs
|
||||||
func formatCsvDiscordRow(data string) string {
|
func formatCsvDiscordRow(c []string) string {
|
||||||
c := parseRawRowData(data)
|
|
||||||
return fmt.Sprintf("**%s** %s on _%s_ - _%s_", c[2], c[3], c[0], c[1])
|
return fmt.Sprintf("**%s** %s on _%s_ - _%s_", c[2], c[3], c[0], c[1])
|
||||||
}
|
}
|
||||||
|
|
||||||
// formatCsvTwitterRow Format the string to a tidy string for the interwebs
|
// formatCsvTwitterRow Format the string to a tidy string for the interwebs
|
||||||
func formatCsvTwitterRow(data string) string {
|
func formatCsvTwitterRow(c []string) string {
|
||||||
c := parseRawRowData(data)
|
|
||||||
return fmt.Sprintf("New Location: %s\n%s\n%s - %s\n#NZCovidTracker #NZCovid", c[2], c[3], c[0], c[1])
|
return fmt.Sprintf("New Location: %s\n%s\n%s - %s\n#NZCovidTracker #NZCovid", c[2], c[3], c[0], c[1])
|
||||||
}
|
}
|
||||||
|
|
||||||
// formatCsvSlackRow Format the string to a tidy string for the interwebs
|
// formatCsvSlackRow Format the string to a tidy string for the interwebs
|
||||||
func formatCsvSlackRow(data string) string {
|
func formatCsvSlackRow(c []string) string {
|
||||||
c := parseRawRowData(data)
|
|
||||||
return fmt.Sprintf("*%s* %s on _%s_ - _%s_", c[2], c[3], c[0], c[1])
|
return fmt.Sprintf("*%s* %s on _%s_ - _%s_", c[2], c[3], c[0], c[1])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -87,7 +174,7 @@ func parseTimeFromRow(data string) time.Time {
|
||||||
return st
|
return st
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns []string of parsed
|
// Returns []string of parsed data.. starttime, endtime, name, address, ID
|
||||||
func parseRawRowData(data string) []string {
|
func parseRawRowData(data string) []string {
|
||||||
output := make([]string, 0)
|
output := make([]string, 0)
|
||||||
|
|
||||||
|
@ -119,7 +206,7 @@ func parseRawRowData(data string) []string {
|
||||||
endtime = et.Format("3:04PM")
|
endtime = et.Format("3:04PM")
|
||||||
}
|
}
|
||||||
|
|
||||||
return append(output, starttime, endtime, c[1], c[2])
|
return append(output, starttime, endtime, c[1], c[2], c[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
func getPostableDiscordData() []string {
|
func getPostableDiscordData() []string {
|
||||||
|
|
|
@ -31,6 +31,9 @@ func loadRepo(repository string) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Preload cache data of current rows
|
||||||
|
loadRepoIntoCache(tmpDirectory + "/repo")
|
||||||
|
|
||||||
commitHash := getCommitHash()
|
commitHash := getCommitHash()
|
||||||
log.Printf("Last reported hash: %s", commitHash)
|
log.Printf("Last reported hash: %s", commitHash)
|
||||||
gitRepo = r
|
gitRepo = r
|
||||||
|
|
|
@ -30,7 +30,6 @@ func Lesgoooo() {
|
||||||
go checkForUpdates()
|
go checkForUpdates()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func postTheUpdates() {
|
func postTheUpdates() {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue