Optimise batch size and stream handling

This commit is contained in:
Fred Boniface 2024-04-11 20:50:13 +01:00
parent fb510e1408
commit b2f82b0250
3 changed files with 5 additions and 13 deletions

View File

@ -12,7 +12,8 @@ import (
// Accepts the CIF data as a stream and outputs parsed data
func parseCifDataStream(dataStream io.ReadCloser) (*parsedData, error) {
log.Msg.Debug("STREAM-Starting CIF Datastream parsing")
defer dataStream.Close()
log.Msg.Debug("Starting CIF Datastream parsing")
if dataStream == nil {
return nil, errors.New("unable to parse nil pointer")
}
@ -56,6 +57,8 @@ func parseCifDataStream(dataStream io.ReadCloser) (*parsedData, error) {
continue
}
parsed.sched = append(parsed.sched, schedule)
case "EOF":
log.Msg.Info("Reached EOF")
default:
log.Msg.Warn("Unknown CIF Data type", zap.String("key", key))
}

View File

@ -15,7 +15,7 @@ func processParsedCif(data *parsedData) error {
log.Msg.Debug("Starting CIF Processing")
// Batch size for processing
batchSize := 750 // Needs optimising for better RAM use. 1000 ~ 5.7GB, 500 ~ 5.5GB, 750 ~ 5.2GB
batchSize := 250 // Needs optimising for better RAM use. 1000 ~ 5.7GB, 500 ~ 5.5GB, 750 ~ 5.2GB
// Process deletions in batches
for i := 0; i < len(data.sched); i += batchSize {

View File

@ -26,12 +26,6 @@ func runCifFullDownload(cfg *helpers.Configuration) error {
log.Msg.Error("Error downloading CIF data", zap.Error(err))
}
// DOES NOT WORK WITH NEW DOWNLOAD STREAMING
// If debug mode is on, call debugWriteDownload
// if helpers.Runtime == "debug" {
// debugWriteDownload(dataStream)
// }
// Parse CIF file
parsed, err := parseCifDataStream(dataStream)
if err != nil {
@ -39,11 +33,6 @@ func runCifFullDownload(cfg *helpers.Configuration) error {
return err
}
// Look to stream data onwards to the parsing function
// Make `data` a nil pointer as it is no longer required
dataStream = nil
// Drop timetable collection
dbAccess.DropCollection(dbAccess.TimetableCollection) // I should edit this to prevent removal of VSTP entries in the database.