2023-05-31 22:09:09 +01:00
|
|
|
import yaml, hashlib
|
|
|
|
import logger as log
|
|
|
|
import mongo
|
2023-05-08 19:55:09 +01:00
|
|
|
|
2023-05-31 22:59:38 +01:00
|
|
|
REBUILD :bool = False # Set to True to force rebuild
|
|
|
|
|
2023-06-02 00:14:25 +01:00
|
|
|
log.out("pis.py: PIS Module Loaded", "DBUG")
|
2023-06-03 22:01:43 +01:00
|
|
|
file_location :str = "/app/data/pis/gwr.yaml" # Production & Testing
|
|
|
|
#file_location :str = "/home/fred.boniface/git/owlboard/db-manager/data/pis/gwr.yaml" # Local Development
|
2023-05-31 22:09:09 +01:00
|
|
|
|
|
|
|
def runUpdate():
|
|
|
|
if (not requiresUpdate()):
|
|
|
|
log.out('pis.runUpdate: PIS Codes do not need updating', 'INFO')
|
|
|
|
return
|
|
|
|
log.out(f"pis.runUpdate: Update required", "INFO")
|
|
|
|
pis_data = load()
|
|
|
|
pis_parsed = parse(pis_data)
|
2023-06-06 12:59:44 +01:00
|
|
|
pis_indexes = ["stops", "tiplocs"]
|
2023-05-31 22:09:09 +01:00
|
|
|
mongo.dropCollection("pis")
|
2023-06-06 12:59:44 +01:00
|
|
|
mongo.putMany("pis", pis_parsed, pis_indexes)
|
2023-05-31 22:09:09 +01:00
|
|
|
|
|
|
|
def requiresUpdate():
|
2023-05-31 22:59:38 +01:00
|
|
|
if REBUILD:
|
|
|
|
return True
|
2023-05-31 22:09:09 +01:00
|
|
|
currentHash = mongo.getMetaHash("pis")
|
|
|
|
with open(file_location, "r") as f:
|
|
|
|
text = f.read()
|
|
|
|
newHash = hashlib.md5(text.encode()).hexdigest()
|
2023-06-05 21:04:20 +01:00
|
|
|
log.out(f"pis.requiresUpdate: Existing PIS Hash: {currentHash}","INFO")
|
|
|
|
log.out(f"pis.requiresUpdate: New PIS hash: {newHash}", "INFO")
|
2023-05-31 22:09:09 +01:00
|
|
|
if currentHash is None or newHash != currentHash:
|
2023-06-05 21:04:20 +01:00
|
|
|
log.out("pis.requiredUpdate: PIS Data requires updating", "INFO")
|
2023-05-31 22:09:09 +01:00
|
|
|
mongo.putMetaHash("pis", newHash)
|
|
|
|
return True
|
2023-06-05 21:04:20 +01:00
|
|
|
log.out("pis.requiredUpdate: PIS Data is up to date", "INFO")
|
2023-05-31 22:09:09 +01:00
|
|
|
return False
|
2023-05-08 19:55:09 +01:00
|
|
|
|
2023-06-05 21:04:20 +01:00
|
|
|
def load():
|
2023-05-31 22:09:09 +01:00
|
|
|
with open(file_location, "r") as data:
|
2023-05-08 19:55:09 +01:00
|
|
|
try:
|
|
|
|
pis = yaml.safe_load(data)
|
|
|
|
return pis["pis"]
|
|
|
|
except yaml.YAMLError as exc:
|
2023-06-06 12:59:44 +01:00
|
|
|
log.out(f"pis.load: Error loading YAML: {exc}", "EROR")
|
2023-05-08 19:55:09 +01:00
|
|
|
return exc
|
|
|
|
|
|
|
|
def parse(codeList):
|
|
|
|
StartLen = len(codeList)
|
2023-05-31 22:59:38 +01:00
|
|
|
log.out(f"pis.parse: codeList starting length: {StartLen}", "DBUG")
|
|
|
|
log.out(f"pis.parse: Removing duplicate codes & adding TIPLOCs")
|
2023-05-08 19:55:09 +01:00
|
|
|
for i in codeList:
|
|
|
|
stops = i['stops']
|
|
|
|
code = i['code']
|
|
|
|
for ii in codeList:
|
|
|
|
if stops == ii['stops'] and code != ii['code']:
|
2023-06-04 23:28:50 +01:00
|
|
|
log.out(f"Identical stopping pattern found: {ii['code']}","DBUG")
|
2023-06-06 11:31:32 +01:00
|
|
|
codeList.remove(ii) # Instead of removing, I should add a property (duplicate: true),
|
|
|
|
# then I can filter this out on the backend when searching by start
|
|
|
|
# and end stations and just use query one for other queries, this
|
|
|
|
# means that when searching by code, a perfectly valid code won't
|
|
|
|
# show 0 results.
|
2023-05-31 22:59:38 +01:00
|
|
|
tiplocs = []
|
|
|
|
for iii in stops:
|
|
|
|
tiplocs.append(getTiploc(iii))
|
|
|
|
i['tiplocs'] = tiplocs
|
2023-06-06 12:59:44 +01:00
|
|
|
log.out(f"pis.parse: Removed {StartLen - len(codeList)} duplicates", "INFO")
|
2023-05-31 22:59:38 +01:00
|
|
|
return codeList
|
|
|
|
|
|
|
|
def getTiploc(crs :str):
|
|
|
|
CRS = crs.upper()
|
|
|
|
query = {
|
|
|
|
'3ALPHA': CRS
|
|
|
|
}
|
2023-06-03 21:57:26 +01:00
|
|
|
try:
|
|
|
|
res = mongo.query("stations", query)
|
|
|
|
if 'TIPLOC' in res:
|
|
|
|
return res['TIPLOC']
|
|
|
|
except Exception as e:
|
2023-06-04 23:28:50 +01:00
|
|
|
log.out(f"pis.getTiploc: Error finding tiploc: {query}", "EROR")
|
|
|
|
log.out(f"ERROR: {e}", "EROR")
|