package cif import ( "io" "time" "git.fjla.uk/owlboard/timetable-mgr/dbAccess" "git.fjla.uk/owlboard/timetable-mgr/helpers" "git.fjla.uk/owlboard/timetable-mgr/log" "git.fjla.uk/owlboard/timetable-mgr/nrod" "go.uber.org/zap" ) // Replaces all existing CIF Data with a new download func runCifFullDownload(cfg *helpers.Configuration) error { preTime := time.Now() log.Info("Downloading all CIF Data") // Download CIF Data file url, err := getUpdateUrl("full") if err != nil { log.Error("Error getting download URL", zap.Error(err)) } dataStream, err := nrod.NrodStream(url, cfg) if err != nil { log.Error("Error downloading CIF data", zap.Error(err)) } // Parse CIF file parsed, err := parseCifDataStream(dataStream) if err != nil { log.Error("Error parsing CIF data", zap.Error(err)) return err } // Drop timetable collection dbAccess.DropCollection(dbAccess.TimetableCollection) // I should edit this to prevent removal of VSTP entries in the database. // Process CIF file err = ProcessParsedCif(parsed) if err != nil { log.Error("Error processing CIF data", zap.Error(err)) } newMeta := generateMetadata(&parsed.Header) ok := dbAccess.PutCifMetadata(newMeta, fullUpdateType) if !ok { log.Warn("CIF Data updated, but metadata write failed") } // Set parsed to nil to encourage garbage collection parsed = nil // Clear out of date schedules cutoff := time.Now().Add(-time.Hour * 24 * 7) log.Debug("Attempting to remove outdated services", zap.Time("scheduleEnd before", cutoff)) count, err := dbAccess.RemoveOutdatedServices(cutoff) if err != nil { log.Warn("Out of date services not removed", zap.Error(err)) } else { log.Info("Out of date services removed", zap.Int64("removal count", count)) } postTime := time.Now() updateDuration := postTime.Sub(preTime) log.Info("Execution time", zap.Duration("duration", updateDuration)) return nil } // Runs a CIF Update for up to five days func runCifUpdateDownload(cfg *helpers.Configuration, metadata *dbAccess.CifMetadata, days []time.Time) error { startTime := time.Now() log.Info("Downloading CIF Updates") // Loop over dates for _, time := range days { log.Info("Downloading CIF File", zap.Time("CIF Data from", time)) // Download CIF data file data, err := fetchUpdate(time, cfg) if err != nil { log.Error("Error fetching CIF update", zap.Error(err)) return err } // Parse CIF file parsed, err := parseCifDataStream(data) if err != nil { log.Error("Error parsing CIF data", zap.Error(err)) return err } // Check CIF Metadata log.Debug("Starting metadata checks") reason, update := checkMetadata(metadata, &parsed.Header) if !update { log.Warn("Update file not processed", zap.String("reason", reason)) continue } log.Info("CIF Data is suitable for processing", zap.String("reason", reason)) // Process CIF file err = ProcessParsedCif(parsed) if err != nil { log.Error("Error processing CIF data", zap.Error(err)) } metadata = generateMetadata(&parsed.Header) parsed = nil } ok := dbAccess.PutCifMetadata(metadata, dailyUpdateType) if !ok { log.Warn("CIF Data updated, but metadata write failed.") } endTime := time.Now() duration := startTime.Sub(endTime) log.Info("CIF Update process ended", zap.Duration("duration", duration)) return nil } // Wraps nrod.NrodDownload() into a function which can handle downloading data for a given day func fetchUpdate(t time.Time, cfg *helpers.Configuration) (io.ReadCloser, error) { url, err := getUpdateUrl("daily") if err != nil { return nil, err } // Append day string to URL url = url + getDayString(t) dataStream, err := nrod.NrodStream(url, cfg) if err != nil { return nil, err } return dataStream, nil }