timetable-extension #1
@ -72,7 +72,6 @@ func parseCifDataStream(dataStream io.ReadCloser) (*parsedData, error) {
|
||||
return nil, errors.New("unable to parse nil pointer")
|
||||
}
|
||||
|
||||
// Initialise data structures
|
||||
var parsed parsedData
|
||||
parsed.assoc = make([]upstreamApi.JsonAssociationV1, 0)
|
||||
parsed.sched = make([]upstreamApi.JsonScheduleV1, 0)
|
||||
@ -99,12 +98,9 @@ func parseCifDataStream(dataStream io.ReadCloser) (*parsedData, error) {
|
||||
}
|
||||
parsed.header = timetable
|
||||
case "JsonAssociationV1":
|
||||
var association upstreamApi.JsonAssociationV1
|
||||
if err := json.Unmarshal(value, &association); err != nil {
|
||||
log.Msg.Error("Error decoding JSONAssociationV1 object", zap.Error(err))
|
||||
// Association data is not currently used
|
||||
// but may be used in the future
|
||||
continue
|
||||
}
|
||||
parsed.assoc = append(parsed.assoc, association)
|
||||
case "JsonScheduleV1":
|
||||
var schedule upstreamApi.JsonScheduleV1
|
||||
if err := json.Unmarshal(value, &schedule); err != nil {
|
||||
|
11
cif/readme.md
Normal file
11
cif/readme.md
Normal file
@ -0,0 +1,11 @@
|
||||
# package cif
|
||||
|
||||
This package follows a similar pattern to `package corpus`.
|
||||
|
||||
First, CheckCorpus() retreived cif metadata from the database and determines whether an update is required and what type of update.
|
||||
|
||||
Then, one of the update functions is called which run through the update process. There are two update types, 'full' and 'update'. A 'full' update drops the entire timetable collection and rebuilds with a full CIF download. 'update' downloads CIF updates from specified days and applies updates.
|
||||
|
||||
Downloads are handled by `package nrod` which returns an io.ReadCloser which is passed to the parsing function.
|
||||
|
||||
Currently the parsing function returns a parsedCif pointer, however this is using significant memory due to the size of a full CIF download (Often around 4.5GB). The intention is to instead use a worker pool to handle the data.
|
Loading…
Reference in New Issue
Block a user