2024-03-27 23:35:03 +00:00
|
|
|
package cif
|
|
|
|
|
|
|
|
import (
|
2024-04-10 20:46:20 +01:00
|
|
|
"io"
|
2024-03-30 01:09:12 +00:00
|
|
|
"time"
|
2024-03-27 23:35:03 +00:00
|
|
|
|
2024-03-28 22:47:08 +00:00
|
|
|
"git.fjla.uk/owlboard/timetable-mgr/dbAccess"
|
2024-03-29 13:45:58 +00:00
|
|
|
"git.fjla.uk/owlboard/timetable-mgr/helpers"
|
2024-03-27 23:35:03 +00:00
|
|
|
"git.fjla.uk/owlboard/timetable-mgr/log"
|
2024-03-29 13:45:58 +00:00
|
|
|
"git.fjla.uk/owlboard/timetable-mgr/nrod"
|
2024-03-27 23:35:03 +00:00
|
|
|
"go.uber.org/zap"
|
|
|
|
)
|
|
|
|
|
2024-04-04 22:39:09 +01:00
|
|
|
// Replaces all existing CIF Data with a new download
|
|
|
|
func runCifFullDownload(cfg *helpers.Configuration) error {
|
2024-04-14 19:03:13 +01:00
|
|
|
log.Info("Downloading all CIF Data")
|
2024-04-04 22:39:09 +01:00
|
|
|
|
|
|
|
// Download CIF Data file
|
2024-03-29 13:45:58 +00:00
|
|
|
url, err := getUpdateUrl("full")
|
|
|
|
if err != nil {
|
2024-04-14 19:03:13 +01:00
|
|
|
log.Error("Error getting download URL", zap.Error(err))
|
2024-03-29 13:45:58 +00:00
|
|
|
}
|
2024-04-10 20:19:16 +01:00
|
|
|
dataStream, err := nrod.NrodStream(url, cfg)
|
2024-03-29 13:45:58 +00:00
|
|
|
if err != nil {
|
2024-04-14 19:03:13 +01:00
|
|
|
log.Error("Error downloading CIF data", zap.Error(err))
|
2024-03-29 13:45:58 +00:00
|
|
|
}
|
|
|
|
|
2024-04-04 22:39:09 +01:00
|
|
|
// Parse CIF file
|
2024-04-10 20:19:16 +01:00
|
|
|
parsed, err := parseCifDataStream(dataStream)
|
2024-04-04 22:39:09 +01:00
|
|
|
if err != nil {
|
2024-04-14 19:03:13 +01:00
|
|
|
log.Error("Error parsing CIF data", zap.Error(err))
|
2024-04-04 22:39:09 +01:00
|
|
|
return err
|
2024-03-30 01:09:12 +00:00
|
|
|
}
|
|
|
|
|
2024-04-04 22:39:09 +01:00
|
|
|
// Drop timetable collection
|
2024-04-08 21:22:56 +01:00
|
|
|
dbAccess.DropCollection(dbAccess.TimetableCollection) // I should edit this to prevent removal of VSTP entries in the database.
|
2024-03-30 01:09:12 +00:00
|
|
|
|
2024-04-04 22:39:09 +01:00
|
|
|
// Process CIF file
|
|
|
|
err = processParsedCif(parsed)
|
|
|
|
if err != nil {
|
2024-04-14 19:03:13 +01:00
|
|
|
log.Error("Error processing CIF data", zap.Error(err))
|
2024-03-30 01:09:12 +00:00
|
|
|
}
|
|
|
|
|
2024-04-08 21:08:07 +01:00
|
|
|
newMeta := generateMetadata(&parsed.header)
|
2024-04-11 20:59:02 +01:00
|
|
|
ok := dbAccess.PutCifMetadata(newMeta, fullUpdateType)
|
2024-04-08 21:08:07 +01:00
|
|
|
if !ok {
|
2024-04-14 19:03:13 +01:00
|
|
|
log.Warn("CIF Data updated, but metadata write failed")
|
2024-04-08 21:08:07 +01:00
|
|
|
}
|
2024-03-30 01:09:12 +00:00
|
|
|
|
2024-04-14 21:21:35 +01:00
|
|
|
// Set parsed to nil to encourage garbage collection
|
2024-04-13 21:44:53 +01:00
|
|
|
parsed = nil
|
2024-04-14 21:21:35 +01:00
|
|
|
|
|
|
|
// Clear out of date schedules
|
|
|
|
cutoff := time.Now().Add(-time.Hour * 24 * 7)
|
|
|
|
log.Debug("Attempting to remove outdated services", zap.Time("scheduleEnd before", cutoff))
|
|
|
|
count, err := dbAccess.RemoveOutdatedServices(cutoff)
|
|
|
|
if err != nil {
|
|
|
|
log.Warn("Out of date services not removed", zap.Error(err))
|
|
|
|
} else {
|
|
|
|
log.Info("Out of date services removed", zap.Int64("removal count", count))
|
|
|
|
}
|
|
|
|
|
2024-04-04 22:39:09 +01:00
|
|
|
return nil
|
|
|
|
}
|
2024-03-30 01:09:12 +00:00
|
|
|
|
2024-04-04 22:39:09 +01:00
|
|
|
// Runs a CIF Update for up to five days
|
|
|
|
func runCifUpdateDownload(cfg *helpers.Configuration, metadata *dbAccess.CifMetadata, days []time.Time) error {
|
2024-04-14 19:03:13 +01:00
|
|
|
log.Info("Downloading CIF Updates")
|
2024-03-30 01:09:12 +00:00
|
|
|
|
2024-04-04 22:39:09 +01:00
|
|
|
// Loop over dates
|
|
|
|
for _, time := range days {
|
2024-04-14 19:03:13 +01:00
|
|
|
log.Info("Downloading CIF File", zap.Time("CIF Data from", time))
|
2024-04-02 21:07:01 +01:00
|
|
|
|
2024-04-04 22:39:09 +01:00
|
|
|
// Download CIF data file
|
|
|
|
data, err := fetchUpdate(time, cfg)
|
2024-03-30 01:09:12 +00:00
|
|
|
if err != nil {
|
2024-04-14 19:03:13 +01:00
|
|
|
log.Error("Error fetching CIF update", zap.Error(err))
|
2024-04-04 22:39:09 +01:00
|
|
|
return err
|
2024-04-03 22:25:27 +01:00
|
|
|
}
|
2024-04-09 20:55:26 +01:00
|
|
|
|
2024-04-04 22:39:09 +01:00
|
|
|
// Parse CIF file
|
2024-04-10 20:46:20 +01:00
|
|
|
parsed, err := parseCifDataStream(data)
|
2024-04-03 22:25:27 +01:00
|
|
|
if err != nil {
|
2024-04-14 19:03:13 +01:00
|
|
|
log.Error("Error parsing CIF data", zap.Error(err))
|
2024-04-04 22:39:09 +01:00
|
|
|
return err
|
2024-04-03 22:25:27 +01:00
|
|
|
}
|
2024-03-29 13:45:58 +00:00
|
|
|
|
2024-04-12 20:43:03 +01:00
|
|
|
// Check CIF Metadata
|
2024-04-14 19:03:13 +01:00
|
|
|
log.Debug("Starting metadata checks")
|
2024-04-11 20:59:02 +01:00
|
|
|
|
2024-04-12 20:43:03 +01:00
|
|
|
reason, update := checkMetadata(metadata, &parsed.header)
|
|
|
|
if !update {
|
2024-04-14 19:03:13 +01:00
|
|
|
log.Warn("Update file not processed", zap.String("reason", reason))
|
2024-04-08 22:13:08 +01:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2024-04-14 19:03:13 +01:00
|
|
|
log.Info("CIF Data is suitable for processing", zap.String("reason", reason))
|
2024-04-08 22:13:08 +01:00
|
|
|
|
2024-04-04 22:39:09 +01:00
|
|
|
// Process CIF file
|
2024-04-12 20:43:03 +01:00
|
|
|
err = processParsedCif(parsed)
|
|
|
|
if err != nil {
|
2024-04-14 19:03:13 +01:00
|
|
|
log.Error("Error processing CIF data", zap.Error(err))
|
2024-04-12 20:43:03 +01:00
|
|
|
}
|
2024-04-06 22:31:38 +01:00
|
|
|
|
2024-04-12 20:43:03 +01:00
|
|
|
metadata = generateMetadata(&parsed.header)
|
2024-04-13 21:44:53 +01:00
|
|
|
parsed = nil
|
2024-04-03 22:25:27 +01:00
|
|
|
}
|
2024-04-04 22:39:09 +01:00
|
|
|
|
2024-04-11 20:59:02 +01:00
|
|
|
ok := dbAccess.PutCifMetadata(metadata, dailyUpdateType)
|
2024-04-08 22:13:08 +01:00
|
|
|
if !ok {
|
2024-04-14 19:03:13 +01:00
|
|
|
log.Warn("CIF Data updated, but metadata write failed.")
|
2024-04-08 22:13:08 +01:00
|
|
|
}
|
2024-04-06 22:28:26 +01:00
|
|
|
|
2024-04-04 22:39:09 +01:00
|
|
|
return nil
|
2024-03-27 23:35:03 +00:00
|
|
|
}
|
2024-03-30 01:09:12 +00:00
|
|
|
|
2024-04-04 22:39:09 +01:00
|
|
|
// Wraps nrod.NrodDownload() into a function which can handle downloading data for a given day
|
2024-04-10 20:46:20 +01:00
|
|
|
func fetchUpdate(t time.Time, cfg *helpers.Configuration) (io.ReadCloser, error) {
|
2024-03-30 01:09:12 +00:00
|
|
|
url, err := getUpdateUrl("daily")
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2024-04-04 22:39:09 +01:00
|
|
|
// Append day string to URL
|
2024-03-30 01:09:12 +00:00
|
|
|
url = url + getDayString(t)
|
|
|
|
|
2024-04-10 20:46:20 +01:00
|
|
|
dataStream, err := nrod.NrodStream(url, cfg)
|
2024-03-30 01:09:12 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2024-04-10 20:46:20 +01:00
|
|
|
return dataStream, nil
|
2024-03-30 01:09:12 +00:00
|
|
|
}
|