Compare commits
188 Commits
Author | SHA1 | Date | |
---|---|---|---|
11f72679a0 | |||
e12b809d04 | |||
9ab5a7be99 | |||
8b00a9afe5 | |||
c4646bc654 | |||
989d14ff95 | |||
a561357fbe | |||
4d2262f349 | |||
eecafee7cf | |||
d54e223369 | |||
78c5c02c0e | |||
a793862aa2 | |||
be1ee0b285 | |||
196251eeb6 | |||
e913db5d57 | |||
fdcb43b5c2 | |||
9031eb53c6 | |||
2a9050940d | |||
e1fc4b1db2 | |||
6cfc42f22e | |||
9d51d4e45e | |||
fde37814a7 | |||
8fa0cf775f | |||
afa4ad7915 | |||
d49a5ae034 | |||
e7b8208edf | |||
dad9f46d86 | |||
c698187cdf | |||
90500b88af | |||
77ca61e178 | |||
2ff822d8eb | |||
09f883a461 | |||
d98b560584 | |||
f02ae3c7cd | |||
1f0a39adc6 | |||
f4b5e9ce37 | |||
a5a73812a9 | |||
87532b001d | |||
236d85648d | |||
91e2657d66 | |||
874b236f09 | |||
5904ee37cd | |||
70c9aa2b1e | |||
8e0b928f27 | |||
ac9372515f | |||
4cc6856a76 | |||
d15b7c3c7a | |||
c147d9c50c | |||
90baf1b13a | |||
0a25ae85f5 | |||
5db9d8e52a | |||
bf9b0ec8d0 | |||
31f104f51b | |||
c7b1c547b0 | |||
74a68e992a | |||
d259bd8a35 | |||
4b7722d5cd | |||
f96c7a7e9d | |||
ae1a467c97 | |||
0673637fa3 | |||
332aff4a17 | |||
a2777f1f37 | |||
5e31361880 | |||
05764d1897 | |||
1c8dfea43d | |||
517ecba7b6 | |||
4b2e7e99b8 | |||
de6f735edd | |||
ca6f245a84 | |||
68207d1e5e | |||
23ad8969f9 | |||
1c0081b570 | |||
4a1b8d12f8 | |||
5504439ce0 | |||
ab2aa09861 | |||
e22af3fa35 | |||
9ab6741243 | |||
2d2fdbb8db | |||
0e748d545e | |||
996221b221 | |||
658b0996bc | |||
1adf24d105 | |||
5380071a17 | |||
6bf8e91633 | |||
edb33153ce | |||
d7d4768663 | |||
8ba127b7c8 | |||
babd4563dc | |||
159c46c4cc | |||
b717917500 | |||
93c8aed105 | |||
d07cd177b3 | |||
81a859a8d9 | |||
96f22efb58 | |||
d8cc7c04f5 | |||
a865410b31 | |||
df0eb0451f | |||
6498f68bdf | |||
3818bc5d62 | |||
86e5c34e7f | |||
24b882ce5f | |||
e79985cb5a | |||
6cbf91c775 | |||
6a40ff90d0 | |||
681cf873dd | |||
08de9bcbc1 | |||
69f72dfff1 | |||
12753d76a1 | |||
45d0e0a2c0 | |||
ea8dd65a8d | |||
27794feb41 | |||
ce3033c455 | |||
cf87c1a2b6 | |||
49ad201522 | |||
55b854a592 | |||
07a0dd6f28 | |||
ab8db7bd06 | |||
a324c6fc63 | |||
3682ca51d8 | |||
04a3915b6c | |||
d46a97ffa5 | |||
72444e9fc9 | |||
eb80d7386e | |||
a4d82b0aa7 | |||
199a1760b7 | |||
34d00ad16f | |||
a04570c9e4 | |||
06259e3d9f | |||
e4255e9cbf | |||
f0b8e46007 | |||
4916c377c6 | |||
43fd6db4b9 | |||
aab3e00eac | |||
e7f6463689 | |||
5baaa5bef2 | |||
849c31af36 | |||
66b2ba002b | |||
c0f0591866 | |||
d879d544fe | |||
c4b3be9086 | |||
dc583a4632 | |||
a1afa9cddc | |||
574d232867 | |||
39dd93edb5 | |||
524e9b271d | |||
4ba93fdfa8 | |||
7f3d4780ff | |||
e2cba37938 | |||
085e6aa80d | |||
b32c34fe7d | |||
f6190bb1c9 | |||
cb8d53ae36 | |||
e76731f18a | |||
b590eb6cf5 | |||
a12951fe4f | |||
1ad96ae20d | |||
848d96bd2e | |||
11ece9c80e | |||
b119f126b9 | |||
d9197763ca | |||
8891a20233 | |||
76ccbba5a8 | |||
7da713526f | |||
fe9eeae7db | |||
95d735205a | |||
07dc5f6d8f | |||
c00be2aeda | |||
45b049714f | |||
ea50577946 | |||
132a8697b1 | |||
47447088a2 | |||
76c40dbbae | |||
7e823fb8be | |||
e0420db03c | |||
04d598f984 | |||
cb2f3aa2ea | |||
303610097e | |||
461b3cde77 | |||
bb61180ab8 | |||
4cb052b244 | |||
48843203b0 | |||
d10d558715 | |||
b1940bcfbb | |||
2326a97d35 | |||
b2b81e869b | |||
8fac608b44 | |||
fd99eb58d8 | |||
fdf489ccf9 |
@ -8,4 +8,6 @@ db-manager
|
||||
run.sh
|
||||
LICENSE
|
||||
*.md
|
||||
static
|
||||
.eslintrc.js
|
||||
.vscode
|
||||
.test-tools
|
35
.eslintrc.js
Normal file
35
.eslintrc.js
Normal file
@ -0,0 +1,35 @@
|
||||
module.exports = {
|
||||
env: {
|
||||
browser: false,
|
||||
node: true,
|
||||
commonjs: true,
|
||||
es2021: true,
|
||||
},
|
||||
extends: "eslint:recommended",
|
||||
overrides: [
|
||||
{
|
||||
files: ["**/*.ts", "**/*.js"],
|
||||
parser: "@typescript-eslint/parser",
|
||||
plugins: ["@typescript-eslint"],
|
||||
extends: [
|
||||
"plugin:@typescript-eslint/eslint-recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
],
|
||||
},
|
||||
],
|
||||
parserOptions: {
|
||||
ecmaVersion: "latest",
|
||||
},
|
||||
rules: {
|
||||
indent: ["error", 2],
|
||||
"linebreak-style": ["error", "unix"],
|
||||
quotes: ["error", "single"],
|
||||
semi: ["error", "always"],
|
||||
"max-len": [
|
||||
"warn",
|
||||
{
|
||||
code: 80,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
1
.npmrc
Normal file
1
.npmrc
Normal file
@ -0,0 +1 @@
|
||||
@owlboard:registry=https://git.fjla.uk/api/packages/OwlBoard/npm/
|
File diff suppressed because one or more lines are too long
@ -1 +0,0 @@
|
||||
{"GetStationBoardResult":{"generatedAt":"2023-01-14T11:23:12.6558466+00:00","locationName":"Pilning","crs":"PIL","nrccMessages":{"message":"\nPoor weather affecting services in Wales due to flooding on the railway More details can be found in <a href=\"https://t.co/uBU966PUmX\">Latest Travel News</a>."},"platformAvailable":"true"}}
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -1,4 +1,4 @@
|
||||
version: '3.1'
|
||||
version: "3.1"
|
||||
|
||||
services:
|
||||
mongo:
|
||||
|
@ -1,33 +0,0 @@
|
||||
{"service":
|
||||
[
|
||||
{"sta":"16:07",
|
||||
"eta":"On time",
|
||||
"operator":"South Western Railway",
|
||||
"operatorCode":"SW",
|
||||
"serviceType":"ferry",
|
||||
"serviceID":"37782PHBR____",
|
||||
"origin":
|
||||
{"location":
|
||||
{"locationName":
|
||||
"Ryde Pier Head","crs":"RYP"
|
||||
}
|
||||
},
|
||||
"destination":
|
||||
{"location":
|
||||
{"locationName":"Portsmouth Harbour",
|
||||
"crs":"PMH"
|
||||
}
|
||||
},
|
||||
"previousCallingPoints":
|
||||
{"callingPointList":
|
||||
{"callingPoint":
|
||||
{"locationName":"Ryde Pier Head",
|
||||
"crs":"RYP",
|
||||
"st":"15:45",
|
||||
"et":"On time"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{"std":"16:15","etd":"On time","operator":"South Western Railway","operatorCode":"SW","serviceType":"ferry","serviceID":"37746PHBR____","origin":{"location":{"locationName":"Portsmouth Harbour","crs":"PMH"}},"destination":{"location":{"locationName":"Ryde Pier Head","crs":"RYP"}},"subsequentCallingPoints":{"callingPointList":{"callingPoint":
|
||||
{"locationName":"Ryde Pier Head","crs":"RYP","st":"16:37","et":"On time"}}}}]}
|
20
.vscode/settings.json
vendored
20
.vscode/settings.json
vendored
@ -1,7 +1,15 @@
|
||||
{
|
||||
"git.autofetch": "all",
|
||||
"git.alwaysSignOff": true,
|
||||
"git.enableCommitSigning": false,
|
||||
"git.fetchOnPull": true,
|
||||
"git.pullBeforeCheckout": true
|
||||
}
|
||||
"git.autofetch": "all",
|
||||
"git.alwaysSignOff": true,
|
||||
"git.enableCommitSigning": false,
|
||||
"git.fetchOnPull": true,
|
||||
"git.pullBeforeCheckout": true,
|
||||
"editor.defaultFormatter": "rvest.vs-code-prettier-eslint",
|
||||
"editor.formatOnPaste": false, // required
|
||||
"editor.formatOnType": false, // required
|
||||
"editor.formatOnSave": true, // optional
|
||||
"editor.formatOnSaveMode": "file", // required to format on save
|
||||
"files.autoSave": "onFocusChange", // optional but recommended
|
||||
"vs-code-prettier-eslint.prettierLast": "false",
|
||||
"editor.tabSize": 2 // set as "true" to run 'prettier' last not first
|
||||
}
|
||||
|
22
.vscode/tasks.json
vendored
22
.vscode/tasks.json
vendored
@ -1,12 +1,12 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "start",
|
||||
"problemMatcher": [],
|
||||
"label": "npm: start",
|
||||
"detail": "node app.js"
|
||||
}
|
||||
]
|
||||
}
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "start",
|
||||
"problemMatcher": [],
|
||||
"label": "npm: start",
|
||||
"detail": "node app.js"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
20
Dockerfile
20
Dockerfile
@ -1,7 +1,19 @@
|
||||
FROM node:19
|
||||
EXPOSE 8460
|
||||
FROM node:20 as builder
|
||||
WORKDIR /usr/src/app
|
||||
COPY ./package*.json ./
|
||||
RUN npm ci --omit=dev
|
||||
COPY ./.npmrc ./
|
||||
RUN npm install
|
||||
COPY . .
|
||||
CMD [ "node", "app.js" ]
|
||||
# Ideally the tests should be run separately in a CI/CD workflow rather than during the build
|
||||
# Currently, it does prevent a container being published with failing tests
|
||||
RUN npm run test
|
||||
RUN npm run build
|
||||
|
||||
FROM node:20-slim
|
||||
EXPOSE 8460
|
||||
WORKDIR /usr/src/app
|
||||
COPY ./mail-templates/* ./mail-templates/
|
||||
COPY ./package*.json ./
|
||||
RUN npm ci --omit=dev
|
||||
COPY --from=builder /usr/src/app/dist/ ./
|
||||
CMD [ "node" , "app.js" ]
|
||||
|
108
README.md
108
README.md
@ -4,101 +4,41 @@ OwlBoard is both a backend API, and a frontend Arrival/Departure board webapp.
|
||||
|
||||
Powered by Node.JS and using the ldbs-json module, the OwlBoard API provides up to date train departure information for any station in the UK.
|
||||
|
||||
Whilst the application is open source, the webservice (owlboard.fb-infra.uk) is not openly available. National Rail Enquiries have limits on API access so to use this software yourself, you'll need to run your own instance after obtaining your own API key.
|
||||
## Build
|
||||
|
||||
The webservice (owlboard.fb-infra.uk) may contain ads to support the running of the service,
|
||||
if ads are implemented, I intend to avoid 'dubious' advertisers that target and track users.
|
||||
|
||||
Currently only the public API is available as I am currently unable to request a key for the staff version.
|
||||
|
||||
## Requirements:
|
||||
|
||||
To run this server you will need:
|
||||
- Docker or Kubernetes
|
||||
|
||||
## WebApp Colours:
|
||||
|
||||
- See CSS Variables
|
||||
To build the application with Docker, clone the repository and run `docker build`
|
||||
|
||||
## API Endpoints:
|
||||
- /api/v1:
|
||||
|
||||
- /list:
|
||||
- /stations:
|
||||
- GET: Get list of stations
|
||||
- Authenticated: No
|
||||
- Returns JSON: `{"STATION NAME":{"CRS":"code","TIPLOC":"code"}}`
|
||||
|
||||
- /corpus:
|
||||
- GET: Get full CORPUS Data
|
||||
- Authenticated: No
|
||||
- Returns JSON in original CORPUS format minus any blank values.
|
||||
|
||||
- /ldb:
|
||||
- /{crs}:
|
||||
- GET: Get arrival/departure board for {crs}
|
||||
- Authenticated: No
|
||||
- Returns JSON: Formatted as per ldbs-json module.
|
||||
|
||||
- /gitea:
|
||||
- POST: Post issue to Gitea Repo
|
||||
- Authenticated: Yes
|
||||
- Not yet implemented, submit issues at https://git.fjla.uk/fred.boniface/owlboard
|
||||
|
||||
- /kube:
|
||||
- /alive:
|
||||
- GET: Check alive
|
||||
- Authenticated: No
|
||||
- Returns JSON: `{"status":"alive"}`
|
||||
|
||||
- /ready:
|
||||
- GET: Check ready
|
||||
- Authenticated: No
|
||||
- Returns JSON: `{"state":""}` ready or not_ready.
|
||||
|
||||
## Stack:
|
||||
- app.js -> Launches server, Entry Point, defines routers and middlewares.
|
||||
- routes -> Routers - Directs requests to controllers.
|
||||
- controllers -> Checks auth, sends response. Request doesn't pass further.
|
||||
- services -> Provide data and do tasks, uses other services and utils.
|
||||
|
||||
- utils -> Provide utility functions that can be called by services.
|
||||
- configs -> Provide configuration details for other files.
|
||||
- static -> Holds files for static service, should be hosted behind a caching proxy.
|
||||
API Documentation has been removed as it is now out of date. I do intent to re-write the documentation at a later date.
|
||||
|
||||
## Configuration:
|
||||
The app is designed to be run within Kubernetes or within a Docker container, as such configuration is provided with environment variables. See the variable name and default options below. If a required configuration is not present the program will exit when that feature is initialised.
|
||||
|
||||
|VAR|DEFAULT|REQUIRED|PURPOSE|
|
||||
|:-:|:-----:|:------:|:-----:|
|
||||
|OWL_SRV_PORT|8460|NO|Web Server Port|
|
||||
|OWL_SRV_LISTEN|0.0.0.0|NO|Web Server Listen Address|
|
||||
|OWL_DB_USER|owl|NO|Database Username|
|
||||
|OWL_DB_PASS|twittwoo|NO|Database Password - Do not leave as default in production|
|
||||
|OWL_DB_NAME|owlboard|NO|Database Name|
|
||||
|OWL_DB_PORT|27017|NO|Database Server Port|
|
||||
|OWL_DB_HOST|localhost|NO|Database Server Host|
|
||||
|OWL_LDB_KEY||YES|National Rail LDBWS API Key|
|
||||
|OWL_LDB_SVKEY||NO|National Rail LDBSVWS API Key|
|
||||
|OWL_LDB_CORPUSUSER||YES|Network Rail CORPUS API Username|
|
||||
|OWL_LDB_CORPUSPASS||YES|Network Rail CORPUS API Password|
|
||||
|OWL_GIT_ISSUEBOT||NO|Gitea API Key for issue reporting|
|
||||
|OWL_GIT_APIENDPOINT||NO|Gitea API Endpoint|
|
||||
Configuration options are set through environment variables.
|
||||
These configuration options are shared with other programs in the OwlBoard ecosystem.
|
||||
|
||||
| VAR | DEFAULT | REQUIRED | PURPOSE |
|
||||
| :-----------------: | :-------: | :------: | :-------------------------------------------------------: |
|
||||
| OWL_SRV_PORT | 8460 | NO | Web Server Port |
|
||||
| OWL_SRV_LISTEN | 0.0.0.0 | NO | Web Server Listen Address |
|
||||
| OWL_DB_USER | owl | NO | Database Username |
|
||||
| OWL_DB_PASS | twittwoo | NO | Database Password - Do not leave as default in production |
|
||||
| OWL_DB_NAME | owlboard | NO | Database Name |
|
||||
| OWL_DB_PORT | 27017 | NO | Database Server Port |
|
||||
| OWL_DB_HOST | localhost | NO | Database Server Host |
|
||||
| OWL_LDB_KEY | | YES | National Rail LDBWS API Key |
|
||||
| OWL_LDB_SVKEY | | NO | National Rail LDBSVWS API Key |
|
||||
| OWL_LDB_CORPUSUSER | | YES | Network Rail NROD Username |
|
||||
| OWL_LDB_CORPUSPASS | | YES | Network Rail NROD Password |
|
||||
| OWL_GIT_ISSUEBOT | | NO | Gitea API Key for issue reporting |
|
||||
| OWL_GIT_APIENDPOINT | | NO | Gitea API Endpoint |
|
||||
|
||||
In the case that OWL_LDB_SVKEY is not available, staff versions of departure board, etc. will not be available.
|
||||
|
||||
In the case that OWL_GIT_ISSUEBOT is not available, the 'Report Issue' page will not be able to POST data.
|
||||
|
||||
## Database Layout
|
||||
## Database
|
||||
|
||||
The OwlBoard application will build the database if required at startup. All it needs is authentication details for a MongoDB server.
|
||||
OwlBoard uses MongoDB
|
||||
|
||||
### Collections
|
||||
|
||||
|Collection|Contents|Purpose|
|
||||
|:--------:|:------:|:-----:|
|
||||
|corpus|Raw CORPUS data with blank keys removed|Code lookups|
|
||||
|stations|Cleaned CORPUS Data, any objects with blank 3ALPHA & STANOX fields are removed|Validation before fetching Arr/Dep boards|
|
||||
|meta|Lists the update time of corpus and station data|Will be used to update after a predetermined time period|
|
||||
|
||||
Note that even after removing all objects from the CORPUS with a blank 3ALPHA & STANOX, many items remain which are not stations and will not have a board available. Going forwards methods to remove non-stations from this data will be introduced.
|
||||
The OwlBoard database is managed by the dbmanager application which will configure and maintain the database and should be run at least twice a day to ensure timetable data is up to date, see https://git.fjla.uk/owlboard/db-manager.
|
||||
|
40
UpNext.md
40
UpNext.md
@ -1,40 +0,0 @@
|
||||
# What to do next:
|
||||
|
||||
## Frontend:
|
||||
|
||||
* Enable text search for `locationName` on find-code page.
|
||||
* Add security headers - maybe on ingress controller?
|
||||
- see: https://webera.blog/improving-your-website-security-with-http-headers-in-nginx-ingress-369e8f3302cc
|
||||
* Replace close and menu icons with SVG
|
||||
* Service detail page needs style adjustments, the lines overflow on small screens
|
||||
|
||||
### In Progress:
|
||||
|
||||
|
||||
|
||||
### Completed - Testing:
|
||||
|
||||
* Write service worker for full PWA experience.
|
||||
* Implement error pages.
|
||||
* Issue page: Submit using API.
|
||||
* Issue page: Collect diagnostics such as browser features etc.
|
||||
* Add sanitizing to Gitea Issue API, currently considered to be unsafe.
|
||||
* Add Gitea Issue API
|
||||
* Issue page: Check for success and then redirect to /.
|
||||
* Add success test for Gitea Issue API and send the result onto the client.
|
||||
* DB Indexes:
|
||||
- "stations": 3ALPHA, STANOX, TIPLOC
|
||||
- "corpus": 3ALPHA, NLC
|
||||
* DB Indexes:.
|
||||
- "corpus": NLCDESC(TEXT)
|
||||
* Build metrics page
|
||||
* Responsive text sizes for boards.
|
||||
* Undo changed to make everything an array - frontend code to handle this.
|
||||
* Explore compression of API Responses
|
||||
|
||||
## Backend:
|
||||
|
||||
* Rewrite sanitizing functions to remove external dependancy.
|
||||
* DB: Count document creation, should only add date if doesn't already exist.
|
||||
- Then the count doesn't need clearing at each start.
|
||||
- Currently commented out the upsert of the date. This will only work on existing databases.
|
121
app.js
121
app.js
@ -1,75 +1,94 @@
|
||||
// OwlBoard - © Fred Boniface 2022 - Licensed under GPLv3 (or later)
|
||||
// OwlBoard - © Fred Boniface 2022-2023 - Licensed under GPLv3 (or later)
|
||||
// Please see the included LICENSE file
|
||||
|
||||
// Please see the included LICENSE file. Statically served fonts are
|
||||
// licensed separately, each folder contains a license file where a
|
||||
// different license applies.
|
||||
const mode = process.env.NODE_ENV || "development";
|
||||
|
||||
// While the Node app can serve static files, in production a separate
|
||||
// container should be used for this. See the dockerfile under /static
|
||||
// for this.
|
||||
// Logging
|
||||
const logger = require("./src/utils/logger.utils");
|
||||
logger.logger.info("Logger Initialised");
|
||||
|
||||
// External Requires
|
||||
const express = require('express');
|
||||
const app = express();
|
||||
const compression = require('compression')
|
||||
const express = require("express");
|
||||
const app = express();
|
||||
|
||||
// Middleware
|
||||
const rateLimit = require("express-rate-limit");
|
||||
const cors = require("cors");
|
||||
const authenticate = require("./src/middlewares/auth.middlewares");
|
||||
|
||||
// Internal Requires
|
||||
const log = require('./src/utils/log.utils'); // Log Helper
|
||||
const version = require('./src/configs/version.configs'); // Version Strings
|
||||
const listRtr = require('./src/routes/list.routes'); // /list endpoints
|
||||
const ldbRtr = require('./src/routes/ldb.routes'); // /ldb endpoints
|
||||
const kubeRtr = require('./src/routes/kube.routes'); // /kube endpoints
|
||||
const findRtr = require('./src/routes/find.routes'); // /find endpoints
|
||||
const issueRtr = require('./src/routes/issue.routes') // /issue endpoints
|
||||
const statRtr = require('./src/routes/stats.routes'); // /stat endpoints
|
||||
const initDb = require('./src/utils/dbinit.utils'); // DB Init Utility
|
||||
const version = require("./src/configs/version.configs");
|
||||
const pis2Rtr = require("./src/routes/pis.routes"); // API Version 2 Routes
|
||||
const ref2Rtr = require("./src/routes/ref.routes"); // API Version 2 Routes
|
||||
const live2Rtr = require("./src/routes/live.routes"); // API Version 2 Routes
|
||||
const tt2Rtr = require("./src/routes/timetable.routes"); // API Version 2
|
||||
const user2Rtr = require("./src/routes/user.routes"); // API Version 2 Routes
|
||||
const miscRtr = require("./src/routes/misc.routes"); // Non-Public API Routes
|
||||
|
||||
// Set Server Configurations
|
||||
const srvListen = process.env.OWL_SRV_LISTEN || "0.0.0.0"
|
||||
const srvPort = process.env.OWL_SRV_PORT || 8460
|
||||
const srvListen = process.env.OWL_SRV_LISTEN || "0.0.0.0";
|
||||
const srvPort = process.env.OWL_SRV_PORT || 8460;
|
||||
|
||||
const limiter = rateLimit({
|
||||
windowMs: 15 * (60 * 1000), // 15 minutes
|
||||
max: 100, // Limit each IP to 100 requests per `window` (here, per 15 minutes)
|
||||
standardHeaders: true, // Return rate limit info in the `RateLimit-*` headers
|
||||
legacyHeaders: false, // Disable the `X-RateLimit-*` headers
|
||||
});
|
||||
|
||||
// Print version number:
|
||||
log.out(`app: Starting OwlBoard - Backend Version: ${version.app} - API versions: ${version.api}`);
|
||||
logger.logger.info(`Starting version ${version.app} in ${mode} mode`);
|
||||
|
||||
// Test for required vars:
|
||||
// const varTest = require('./src/utils/varTest.utils');
|
||||
// var startTest = await varTest.varTest();
|
||||
//console.log("Required Vars Missing:", startTest.missing_required);
|
||||
//console.log("Desired Vars Missing:", startTest.missing_desired);
|
||||
// if startTest.pass == false
|
||||
// console.log("Unable to start, missing required vars")
|
||||
// exit app
|
||||
|
||||
// DB Init
|
||||
initDb.init();
|
||||
// Remove X-Powered-By header:
|
||||
app.disable("x-powered-by");
|
||||
|
||||
// Express Error Handling:
|
||||
app.use((err, req, res, next) => {
|
||||
const statusCode = err.statuscode || 500;
|
||||
console.error(err.message, err.stack);
|
||||
res.status(statusCode).json({'message': err.message});
|
||||
logger.logger.error(err, "Express Error");
|
||||
res.status(statusCode).json({ message: err.message });
|
||||
return;
|
||||
});
|
||||
|
||||
// Express Submodules:
|
||||
// Pre Middleware:
|
||||
app.use(
|
||||
cors({
|
||||
origin: "*", //[/\.owlboard\.info$/, 'localhost:5173', 'localhost:4173']
|
||||
})
|
||||
);
|
||||
app.use(express.json()); //JSON Parsing for POST Requests
|
||||
app.use(express.static('static')); //Serve static content from /static
|
||||
app.use(compression())
|
||||
//app.use(limiter);
|
||||
app.use(authenticate);
|
||||
|
||||
// Express Routes
|
||||
app.use('/api/v1/list', listRtr);
|
||||
app.use('/api/v1/ldb', ldbRtr);
|
||||
app.use('/api/v1/kube', kubeRtr);
|
||||
app.use('/api/v1/find', findRtr);
|
||||
app.use('/api/v1/issue', issueRtr);
|
||||
app.use('/api/v1/stats', statRtr)
|
||||
// 2023 Rationalisation Routes (/api/v2, /misc)
|
||||
app.use("/api/v2/pis", pis2Rtr); // API Version 2
|
||||
app.use("/api/v2/live", live2Rtr); // API Version 2
|
||||
app.use("/api/v2/ref", ref2Rtr); // API Version 2
|
||||
app.use("/api/v2/timetable", tt2Rtr); // API Version 2
|
||||
app.use("/api/v2/user", user2Rtr); // API Version 2
|
||||
app.use("/misc", miscRtr); // Non public-api endpoints (Stats, Issue, etc.)
|
||||
|
||||
app.use("/api/v1/auth/test", authenticate, (req, res) =>
|
||||
res.status(200).json({
|
||||
status: "ok",
|
||||
message: "Authentication successful",
|
||||
})
|
||||
); // Returns 401 if auth failed, 200 if successful.
|
||||
|
||||
// Number of proxies:
|
||||
app.set("trust proxy", 4);
|
||||
mode === "development"
|
||||
? app.get("/api/v1/ip", (req, res) => res.send(req.ip))
|
||||
: null;
|
||||
|
||||
// Disable etags
|
||||
app.set('etag', false)
|
||||
|
||||
// Start Express
|
||||
app.listen(srvPort, srvListen, (error) =>{
|
||||
if(!error) {
|
||||
log.out(`app.listen: Listening on http://${srvListen}:${srvPort}`);
|
||||
log.out("app.listen: State - alive")
|
||||
app.listen(srvPort, srvListen, (error) => {
|
||||
if (!error) {
|
||||
logger.logger.info(`Listening on http://${srvListen}:${srvPort}`);
|
||||
} else {
|
||||
log.out("app.listen: Error occurred, server can't start", error);
|
||||
logger.logger.error(error, `Error starting server`);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
5
jest.config.js
Normal file
5
jest.config.js
Normal file
@ -0,0 +1,5 @@
|
||||
/** @type {import('ts-jest').JestConfigWithTsJest} */
|
||||
module.exports = {
|
||||
preset: "ts-jest",
|
||||
testEnvironment: "node",
|
||||
};
|
106
mail-templates/register.html
Normal file
106
mail-templates/register.html
Normal file
@ -0,0 +1,106 @@
|
||||
<html lang="en">
|
||||
<head>
|
||||
<title>OwlBoard - Register</title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<style>
|
||||
html {
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
background-color: #404c55;
|
||||
background-image: radial-gradient(#2b343c, #404c55);
|
||||
}
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
a {
|
||||
color: azure;
|
||||
}
|
||||
table {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
color: azure;
|
||||
font-family: sans-serif;
|
||||
text-align: center;
|
||||
background-color: #404c55;
|
||||
background-image: radial-gradient(#2b343c, #404c55);
|
||||
}
|
||||
p {
|
||||
margin-left: 40px;
|
||||
margin-right: 40px;
|
||||
}
|
||||
#title {
|
||||
height: 100px;
|
||||
padding-top: 0px;
|
||||
margin-top: 0px;
|
||||
}
|
||||
h1 {
|
||||
color: #00b7b7;
|
||||
}
|
||||
#button {
|
||||
color: azure;
|
||||
font-size: larger;
|
||||
background-color: #007979;
|
||||
padding: 8px;
|
||||
padding-left: 12px;
|
||||
padding-right: 12px;
|
||||
text-decoration: none;
|
||||
border-radius: 14px;
|
||||
}
|
||||
.digits {
|
||||
color: azure;
|
||||
font-size: xx-large;
|
||||
font-weight: bolder;
|
||||
letter-spacing: 0.75ch;
|
||||
margin-left: 0.75ch;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<br><br>
|
||||
<table>
|
||||
<tr>
|
||||
<td>
|
||||
<img
|
||||
src="https://owlboard.info/images/logo/wide_logo.svg"
|
||||
id="title"
|
||||
alt="OwlBoard Logo"
|
||||
/>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
<h1>Register for OwlBoard</h1>
|
||||
<br />
|
||||
<p>
|
||||
You'll need to type your registration code in to the OwlBoard app
|
||||
</p>
|
||||
<br />
|
||||
<h2>Your Code:</h2>
|
||||
<span class="digits">987654</span>
|
||||
<br><br>
|
||||
<p>
|
||||
Go back to OwlBoard and enter your code. Go to the registration page and click the link at the top.
|
||||
</p>
|
||||
<br /><br /><br />
|
||||
<p>
|
||||
This registration is for one device only, you can register again
|
||||
using the same email address for other devices and access OwlBoard
|
||||
from elsewhere.
|
||||
</p>
|
||||
<p>
|
||||
If you did not request to sign up to OwlBoard (Staff Version), you
|
||||
can safely ignore this email. Your email address has not been stored
|
||||
by us.
|
||||
</p>
|
||||
<p>The registration link will expire after 1 hour.</p>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
<br />
|
||||
</body>
|
||||
</html>
|
10
mail-templates/register.txt
Normal file
10
mail-templates/register.txt
Normal file
@ -0,0 +1,10 @@
|
||||
Complete your OwlBoard (Staff) Registration by entering your six digit code.
|
||||
|
||||
987654
|
||||
|
||||
Go back to the OwlBoard app, goto "Menu > Registration" and click on the link at the top to enter your code.
|
||||
|
||||
If you did not request to register to OwlBoard then you can safely ignore this email.
|
||||
Your email address has not been stored by us and will not be required unless you wish to register again.
|
||||
|
||||
The link will expire after 1 hour.
|
9352
package-lock.json
generated
9352
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
58
package.json
58
package.json
@ -1,25 +1,45 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"axios": "^1.2.1",
|
||||
"compression": "^1.7.4",
|
||||
"express": "^4.18.2",
|
||||
"ldbs-json": "^1.2.1",
|
||||
"mongodb": "^4.13.0",
|
||||
"node-gzip": "^1.1.2",
|
||||
"string-sanitizer-fix": "^2.0.1"
|
||||
},
|
||||
"name": "owlboard",
|
||||
"description": "OwlBoard is an API and PWA for live rail departure board in the UK.",
|
||||
"version": "0.0.1",
|
||||
"main": "express.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1",
|
||||
"start": "node app.js"
|
||||
},
|
||||
"name": "owlboard-backend",
|
||||
"version": "2024.2.2",
|
||||
"description": "Provides LDB, PIS and live train details for the OwlBoard web client",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://git.fjla.uk/fred.boniface/owlboard.git"
|
||||
"url": "https://git.fjla.uk/owlboard/backend.git"
|
||||
},
|
||||
"license": "GPL-3.0-or-later",
|
||||
"author": "Fred Boniface",
|
||||
"license": "GPL-3.0-or-later"
|
||||
"main": "app.js",
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"run": "tsc && node dist/app.js",
|
||||
"dev": "tsc && NODE_ENV=development node dist/app.js",
|
||||
"start": "node app.js",
|
||||
"test": "jest",
|
||||
"format": "npx prettier -w ."
|
||||
},
|
||||
"dependencies": {
|
||||
"compression": "^1.7.4",
|
||||
"cors": "^2.8.5",
|
||||
"express": "^4.18.2",
|
||||
"express-rate-limit": "^6.7.0",
|
||||
"html-minifier": "^4.0.0",
|
||||
"juice": "^9.0.0",
|
||||
"ldbs-json": "^1.2.1",
|
||||
"moment-timezone": "^0.5.43",
|
||||
"mongodb": "^4.13.0",
|
||||
"nodemailer": "^6.9.9",
|
||||
"pino": "^8.15.1",
|
||||
"redis": "^4.6.7",
|
||||
"zlib": "^1.0.5"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@owlboard/ts-types": "^1.1.0",
|
||||
"@types/express": "^4.17.21",
|
||||
"@types/jest": "^29.5.3",
|
||||
"eslint": "^8.39.0",
|
||||
"jest": "^29.6.2",
|
||||
"prettier": "^2.8.8",
|
||||
"ts-jest": "^29.1.1",
|
||||
"typescript": "^5.1.6"
|
||||
}
|
||||
}
|
||||
|
@ -1,10 +0,0 @@
|
||||
module.exports = valid
|
||||
|
||||
const valid = [
|
||||
"owlboard.co.uk",
|
||||
"fjla.uk",
|
||||
"gwr.com",
|
||||
"swrailway.com",
|
||||
"firstrail.com",
|
||||
"networkrail.co.uk"
|
||||
]
|
34
src/configs/domains.configs.ts
Normal file
34
src/configs/domains.configs.ts
Normal file
@ -0,0 +1,34 @@
|
||||
export const valid: string[] = [
|
||||
"owlboard.info",
|
||||
"avantiwestcoast.co.uk",
|
||||
"btp.police.uk",
|
||||
"c2crail.net",
|
||||
"chilternrailways.co.uk",
|
||||
"crosscountrytrains.co.uk",
|
||||
"eastmidlandsrailway.co.uk",
|
||||
"abellio.co.uk",
|
||||
"tfl.gov.uk",
|
||||
"mtrel.co.uk",
|
||||
"eurostar.com",
|
||||
"eurotunnel.com",
|
||||
"ffwhr.com",
|
||||
"gwr.com",
|
||||
"hitachirail-eu.com",
|
||||
"greateranglia.co.uk",
|
||||
"heathrow.com",
|
||||
"swrailway.com",
|
||||
"lsltoc.co.uk",
|
||||
"lner.co.uk",
|
||||
"arrivarl.co.uk",
|
||||
"tube.tfl.gov.uk",
|
||||
"lumo.co.uk",
|
||||
"merseyrail.org",
|
||||
"nrcommcentre.com",
|
||||
"networkrail.co.uk",
|
||||
"northernrailway.co.uk",
|
||||
"scotrail.co.uk",
|
||||
"southeasternrailway.co.uk",
|
||||
"tpeexpress.co.uk",
|
||||
"tfwrail.wales",
|
||||
"wmtrains.co.uk",
|
||||
];
|
31
src/configs/errorCodes.configs.ts
Normal file
31
src/configs/errorCodes.configs.ts
Normal file
@ -0,0 +1,31 @@
|
||||
// statusCodes should be a map, not an object
|
||||
|
||||
export const statusCodes = {
|
||||
400: "data not found",
|
||||
700: "no authentication attempt",
|
||||
701: "invalid credentials",
|
||||
702: "domain not whitelisted",
|
||||
703: "registration request not found, maybe expired",
|
||||
800: "location code not found",
|
||||
801: "unable to fetch location data",
|
||||
900: "invalid request format",
|
||||
901: "email not provided",
|
||||
950: "upstream server error",
|
||||
951: "unknown server error",
|
||||
};
|
||||
|
||||
export const msgCodes = new Map<string, string>([
|
||||
[
|
||||
"LOC_NOT_FOUND",
|
||||
"Location not found. If you are sure that the location exists, there may be a fault with the data provider.",
|
||||
],
|
||||
[
|
||||
"USR_NOT_FOUND",
|
||||
"User is not registered, consider regeristering for access to this resource",
|
||||
],
|
||||
["AUTH_ERR", "Authentication Error"],
|
||||
["OK", "OK"],
|
||||
]);
|
||||
|
||||
module.exports = statusCodes;
|
||||
//export { statusCodes };
|
@ -1,6 +0,0 @@
|
||||
const version = {
|
||||
api: ["/api/v1/",],
|
||||
app: "0.0.2"
|
||||
};
|
||||
|
||||
module.exports = version;
|
12
src/configs/version.configs.ts
Normal file
12
src/configs/version.configs.ts
Normal file
@ -0,0 +1,12 @@
|
||||
interface versions {
|
||||
api: string[];
|
||||
app: string;
|
||||
}
|
||||
|
||||
const version: versions = {
|
||||
api: ["/api/v2"],
|
||||
app: "2025.03.2",
|
||||
};
|
||||
|
||||
module.exports = version;
|
||||
export { version };
|
@ -1,58 +0,0 @@
|
||||
const find = require('../services/find.services');
|
||||
|
||||
async function findName(req, res, next){
|
||||
try {
|
||||
var id = req.params.id
|
||||
res.json(await find.name(id))
|
||||
} catch (err) {
|
||||
console.error(`Unknown Error`, err.message);
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
async function findCrs(req, res, next){
|
||||
try {
|
||||
var id = req.params.id
|
||||
res.json(await find.crs(id))
|
||||
} catch (err) {
|
||||
console.error(`Unknown Error`, err.message);
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
async function findNlc(req, res, next){
|
||||
try {
|
||||
var id = req.params.id
|
||||
res.json(await find.nlc(id))
|
||||
} catch (err) {
|
||||
console.error(`Unknown Error`, err.message);
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
async function findTiploc(req, res, next){
|
||||
try {
|
||||
var id = req.params.id
|
||||
res.json(await find.tiploc(id))
|
||||
} catch (err) {
|
||||
console.error(`Unknown Error`, err.message);
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
async function findStanox(req, res, next){
|
||||
try {
|
||||
var id = req.params.id
|
||||
res.json(await find.stanox(id))
|
||||
} catch (err) {
|
||||
console.error(`Unknown Error`, err.message);
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
module.exports = {
|
||||
findName,
|
||||
findCrs,
|
||||
findNlc,
|
||||
findTiploc,
|
||||
findStanox
|
||||
}
|
@ -1,14 +1,17 @@
|
||||
const issue = require('../services/issue.services');
|
||||
const issue = require("../services/issue.services");
|
||||
const log = require("../utils/logs.utils");
|
||||
|
||||
async function post(req, res, next){
|
||||
try {
|
||||
res.json(await issue.processor(req.body))
|
||||
} catch (err) {
|
||||
console.error(`Controller Error`, err.message);
|
||||
next(err);
|
||||
}
|
||||
async function post(req, res, next) {
|
||||
try {
|
||||
log.out(`issueControllers.post: Request Body: ${JSON.stringify(req.body)}`);
|
||||
setCache(res, "no-store")
|
||||
res.json(await issue.processor(req.body));
|
||||
} catch (err) {
|
||||
console.error("Controller Error", err.message);
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
post
|
||||
}
|
||||
post,
|
||||
};
|
||||
|
@ -1,34 +0,0 @@
|
||||
const kube = require('../services/kube.services');
|
||||
|
||||
async function getAlive(req, res, next){
|
||||
try {
|
||||
var state = kube.getAlive()
|
||||
res.status((await state).code).send((await state).state)
|
||||
} catch (err) {
|
||||
res.status("503").send({state: "error"})
|
||||
}
|
||||
}
|
||||
|
||||
async function getReady(req, res, next){
|
||||
try {
|
||||
res.json(await kube.getReady(req.body))
|
||||
} catch (err) {
|
||||
console.error(`Unknown Error`, err.message);
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
async function getTime(req, res, next){
|
||||
try {
|
||||
res.json(await kube.getTime(req.body))
|
||||
} catch (err) {
|
||||
console.error(`Unknown Error`, err.message);
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getAlive,
|
||||
getReady,
|
||||
getTime
|
||||
}
|
@ -1,15 +1,98 @@
|
||||
const ldb = require('../services/ldb.services');
|
||||
const ldb = require("../services/ldb.services");
|
||||
|
||||
async function get(req, res, next){
|
||||
try {
|
||||
var id = req.params.id
|
||||
res.json(await ldb.get(req.body, id))
|
||||
} catch (err) {
|
||||
console.error(`Unknown Error`, err.message);
|
||||
next(err);
|
||||
import { setCache } from "../utils/cacheHeader.utils";
|
||||
import { logger } from "../utils/logger.utils";
|
||||
|
||||
async function getTrain(req, res, next) {
|
||||
// API v2 Only
|
||||
if (!req.isAuthed) {
|
||||
const err = new Error("Unauthorized");
|
||||
err.status = 401;
|
||||
throw err;
|
||||
}
|
||||
setCache(res, "private", 240)
|
||||
let type = req.params.searchType;
|
||||
let id = req.params.id;
|
||||
try {
|
||||
switch (type.toLowerCase()) {
|
||||
case "rid":
|
||||
res.json(await ldb.getServiceByRID(id));
|
||||
break;
|
||||
case "uid":
|
||||
case "headcode":
|
||||
case "rsid":
|
||||
res.json(await ldb.getServicesByOther(id));
|
||||
break;
|
||||
default:
|
||||
res
|
||||
.status(400)
|
||||
.json({ status: "error", message: "Invalid search type" });
|
||||
}
|
||||
} catch (err) {
|
||||
err.status = 500;
|
||||
console.error("Unknown Error", err.message);
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
async function getStation(req, res, next) {
|
||||
// API v2 Only
|
||||
let type = req.params.type;
|
||||
let id = req.params.id;
|
||||
try {
|
||||
if (type == "staff") {
|
||||
if (!req.isAuthed) {
|
||||
const err = new Error("Unauthorized");
|
||||
err.status = 401;
|
||||
return next(err);
|
||||
}
|
||||
const data = await ldb.get(id, true);
|
||||
// Only cache if data is present
|
||||
if (data.data) {
|
||||
setCache(res, "public", 120);
|
||||
} else {
|
||||
setCache(res, "no-store", 120);
|
||||
}
|
||||
res.json(data);
|
||||
} else {
|
||||
setCache(res, "public", 240)
|
||||
res.json(await ldb.get(id, false));
|
||||
}
|
||||
} catch (err) {
|
||||
setCache(res, "no-store")
|
||||
console.error("Unknown Error", err.message);
|
||||
err.status = 500;
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
async function getNearest(req, res, next) {
|
||||
// API v2 Only
|
||||
let latitude = req.params.latitude;
|
||||
let longitude = req.params.longitude;
|
||||
try {
|
||||
if (!req.isAuthed) {
|
||||
const err = new Error("Unauthorized");
|
||||
err.status = 401;
|
||||
return next(err)
|
||||
}
|
||||
const data = await ldb.getNearestStations(latitude, longitude)
|
||||
if (data) {
|
||||
setCache(res, "private", 120)
|
||||
} else {
|
||||
setCache(res, "no-store", 120)
|
||||
}
|
||||
res.json(data)
|
||||
} catch (err) {
|
||||
setCache(res, "no-store")
|
||||
logger.Error("Error fetching nearest station")
|
||||
err.status = 500;
|
||||
next(err)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
get
|
||||
}
|
||||
getTrain,
|
||||
getStation,
|
||||
getNearest,
|
||||
};
|
||||
|
@ -1,34 +0,0 @@
|
||||
const list = require('../services/list.services');
|
||||
|
||||
async function getStations(req, res, next){
|
||||
try {
|
||||
res.json(await list.getStations(req.body))
|
||||
} catch (err) {
|
||||
console.error(`Controller Error`, err.message);
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
async function getCorpus(req, res, next){
|
||||
try {
|
||||
res.json(await list.getCorpus(req.body))
|
||||
} catch (err) {
|
||||
console.error(`Controller Error`, err.message);
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
async function hits(req, res, next) {
|
||||
try {
|
||||
res.json(await list.hits())
|
||||
} catch (err) {
|
||||
console.error(`Controller Error`, err);
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getStations,
|
||||
getCorpus,
|
||||
hits
|
||||
}
|
40
src/controllers/pis.controllers.ts
Normal file
40
src/controllers/pis.controllers.ts
Normal file
@ -0,0 +1,40 @@
|
||||
import { Request, Response, NextFunction } from "express";
|
||||
import { findPisByOrigDest, findPisByCode } from "../services/pis.services";
|
||||
import { setCache } from "../utils/cacheHeader.utils";
|
||||
|
||||
async function byStartEndCRS(req: Request, res: Response, next: NextFunction) {
|
||||
// if (!req.isAuthed) {
|
||||
// const err = new Error("Unauthorized");
|
||||
// err.status = 401;
|
||||
// return next(err);
|
||||
// }
|
||||
try {
|
||||
let startCrs = req.params.startCrs;
|
||||
let endCrs = req.params.endCrs;
|
||||
setCache(res, "public", 600)
|
||||
res.json(await findPisByOrigDest(startCrs, endCrs));
|
||||
} catch (err: any) {
|
||||
console.error("Unknown Error", err.message);
|
||||
return next(err);
|
||||
}
|
||||
}
|
||||
|
||||
async function byCode(req: Request, res: Response, next: NextFunction) {
|
||||
// if (!req.isAuthed) {
|
||||
// const err = new Error("Unauthorized");
|
||||
// err.status = 401;
|
||||
// return next(err);
|
||||
// }
|
||||
try {
|
||||
let code = req.params.code;
|
||||
res.json(await findPisByCode(code));
|
||||
} catch (err: any) {
|
||||
console.error("Unknown Error", err.message);
|
||||
return next(err);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
byStartEndCRS,
|
||||
byCode,
|
||||
};
|
59
src/controllers/ref.controllers.js
Normal file
59
src/controllers/ref.controllers.js
Normal file
@ -0,0 +1,59 @@
|
||||
/* API V2 Exclusive Controller */
|
||||
|
||||
import { setCache } from "../utils/cacheHeader.utils";
|
||||
|
||||
const ldb = require("../services/ldb.services");
|
||||
const find = require("../services/find.services");
|
||||
|
||||
async function getReasonCode(req, res, next) {
|
||||
try {
|
||||
const code = req.params.code;
|
||||
if (code === "all") {
|
||||
setCache(res, "public", 604800)
|
||||
res.json(await ldb.getReasonCodeList());
|
||||
next;
|
||||
}
|
||||
res.json(await ldb.getReasonCode(code));
|
||||
next;
|
||||
} catch (err) {
|
||||
console.error("ERROR", err.message);
|
||||
err.status = 500;
|
||||
setCache(res, "no-store", 5)
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
async function getLocationReference(req, res, next) {
|
||||
try {
|
||||
const searchType = req.params.searchType;
|
||||
const id = req.params.id;
|
||||
setCache(res, "public", 604800)
|
||||
switch (searchType) {
|
||||
case "name":
|
||||
res.json(await find.name(id));
|
||||
break;
|
||||
case "crs": // Same as 3alpha
|
||||
case "3alpha":
|
||||
res.json(await find.crs(id));
|
||||
break;
|
||||
case "nlc":
|
||||
res.json(await find.nlc(id));
|
||||
break;
|
||||
case "tiploc":
|
||||
res.json(await find.tiploc(id));
|
||||
break;
|
||||
case "stanox":
|
||||
res.json(await find.stanox(id));
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("ERROR", err.message);
|
||||
err.status = 500;
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getReasonCode,
|
||||
getLocationReference,
|
||||
};
|
52
src/controllers/registration.controllers.js
Normal file
52
src/controllers/registration.controllers.js
Normal file
@ -0,0 +1,52 @@
|
||||
const reg = require("../services/registration.services");
|
||||
|
||||
async function register(req, res, next) {
|
||||
try {
|
||||
let response = await reg.regUser(req.body);
|
||||
res.status(response.status).json(response);
|
||||
} catch (err) {
|
||||
console.error("Controller Error", err.message);
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
async function request(req, res, next) {
|
||||
try {
|
||||
let response = await reg.createRegKey(req.body);
|
||||
res.status(response.status).json(response);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
async function getUser(req, res, next) {
|
||||
try {
|
||||
let uuid = req.params.uuid;
|
||||
let data = await reg.getUser(uuid);
|
||||
if (data.status) {
|
||||
res.status(data.status).json(data);
|
||||
} else {
|
||||
res.json(data);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
async function checkAuth(req, res, next) {
|
||||
if (!req.isAuthed) {
|
||||
res.status(401).body("Not Authorised");
|
||||
} else {
|
||||
res.status(200).body("Authorised Successfully");
|
||||
}
|
||||
next();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
register,
|
||||
request,
|
||||
getUser,
|
||||
checkAuth,
|
||||
};
|
@ -1,13 +1,32 @@
|
||||
const stat = require('../services/stats.services');
|
||||
import { setCache } from "../utils/cacheHeader.utils";
|
||||
|
||||
async function get(req, res, next) {
|
||||
try {
|
||||
res.json(await stat.hits())
|
||||
} catch (err) {
|
||||
console.error(`Controller Error`, err);
|
||||
next(err);
|
||||
}
|
||||
const stat = require("../services/stats.services");
|
||||
|
||||
async function versions(req, res, next) {
|
||||
// API v2
|
||||
try {
|
||||
setCache(res, "public", 60)
|
||||
res.json(await stat.getVersions());
|
||||
} catch (err) {
|
||||
console.error("Controller Error", err);
|
||||
err.status = 500;
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
async function statistics(req, res, next) {
|
||||
// Api v2
|
||||
try {
|
||||
setCache(res, "public", 60)
|
||||
res.json(await stat.statistics());
|
||||
} catch (err) {
|
||||
console.error("Controller Error", err);
|
||||
err.status = 500;
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
get}
|
||||
versions,
|
||||
statistics,
|
||||
};
|
||||
|
59
src/controllers/train.controllers.js
Normal file
59
src/controllers/train.controllers.js
Normal file
@ -0,0 +1,59 @@
|
||||
import { setCache } from "../utils/cacheHeader.utils";
|
||||
import { logger } from "../utils/logger.utils";
|
||||
|
||||
const train = require("../services/trainService.services");
|
||||
|
||||
async function getByHeadcodeToday(req, res, next) {
|
||||
// Deprecated - for future removal.
|
||||
logger.warn("Deprecated Function Called - trainService.services-getByHeadcodeToday")
|
||||
// if (!req.isAuthed) {
|
||||
// const err = new Error("Unauthorized");
|
||||
// err.status = 401;
|
||||
// next(err);
|
||||
// }
|
||||
try {
|
||||
var searchHeadcode = req.params.id;
|
||||
res.json(await train.findByHeadcodeToday(searchHeadcode));
|
||||
} catch (err) {
|
||||
console.error("Unknown Error", err.message);
|
||||
err.status = 500;
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
async function get(req, res, next) {
|
||||
// if (!req.isAuthed) {
|
||||
// const err = new Error("Unauthorized");
|
||||
// err.status = 401;
|
||||
// next(err);
|
||||
// }
|
||||
let date = req.params.date;
|
||||
let searchType = req.params.searchType;
|
||||
let id = req.params.id;
|
||||
try {
|
||||
switch (searchType) {
|
||||
case "headcode":
|
||||
setCache(res, "private", 1800)
|
||||
res.json(await train.findByHeadcode(id, date));
|
||||
break;
|
||||
case "byTrainUid":
|
||||
setCache(res, "private", 1800)
|
||||
res.json(await train.findByTrainUid(id, date));
|
||||
break;
|
||||
default:
|
||||
res.status(404).json({
|
||||
status: "error",
|
||||
message: `Invalid search type "${searchType}"`,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err.message);
|
||||
err.status = 500;
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getByHeadcodeToday,
|
||||
get,
|
||||
};
|
47
src/middlewares/auth.middlewares.ts
Normal file
47
src/middlewares/auth.middlewares.ts
Normal file
@ -0,0 +1,47 @@
|
||||
import type { NextFunction, Request, Response } from "express";
|
||||
import { logger } from "../utils/logger.utils";
|
||||
import { isAuthed } from "../utils/auth.utils";
|
||||
|
||||
module.exports = async function authCheck(
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction
|
||||
) {
|
||||
logger.debug("auth.middleware: Auth check begun");
|
||||
if (process.env.NODE_ENV === "development") {
|
||||
req.isAuthed = true;
|
||||
logger.warn("auth.middleware: DEV MODE - Access Granted");
|
||||
next();
|
||||
} else {
|
||||
const id: string | string[] | undefined = req.headers.uuid;
|
||||
if (typeof id === "undefined") {
|
||||
req.isAuthed = false;
|
||||
logger.info("auth.middleware: Authentication failed");
|
||||
next();
|
||||
} else if (typeof id === "string") {
|
||||
const authCheck = (await isAuthed(id)) || false;
|
||||
if (authCheck) {
|
||||
// Authenticate
|
||||
req.isAuthed = true;
|
||||
next();
|
||||
} else {
|
||||
req.isAuthed = false;
|
||||
logger.info("auth.middleware: Authentication Failed");
|
||||
next();
|
||||
}
|
||||
// Handle cases where UUID passed as an array
|
||||
} else if (Array.isArray(id)) {
|
||||
const authCheck = (await isAuthed(id[0])) || false;
|
||||
if (authCheck) {
|
||||
req.isAuthed = true;
|
||||
next();
|
||||
} else {
|
||||
req.isAuthed = false;
|
||||
logger.warn(
|
||||
"auth.middleware: UUID Passed as Array - Authentication Failed"
|
||||
);
|
||||
next();
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
12
src/middlewares/requireJson.middlewares.js
Normal file
12
src/middlewares/requireJson.middlewares.js
Normal file
@ -0,0 +1,12 @@
|
||||
const log = require("../utils/logs.utils");
|
||||
|
||||
module.exports = async function requireJson(req, res, next) {
|
||||
if (req.headers["content-type"] !== "application/json") {
|
||||
log.out("requireJson.middlewares: Bad Request: Not in JSON format");
|
||||
res.status(400).send({ status: 400, message: "Server requires JSON" });
|
||||
} else {
|
||||
next();
|
||||
}
|
||||
};
|
||||
|
||||
// Possibly want to check the req type?
|
@ -1,23 +0,0 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const findController = require('../controllers/find.controllers');
|
||||
|
||||
/* GET programming languages. */
|
||||
//router.get('/', programmingLanguagesController.get);
|
||||
|
||||
/* POST programming language */
|
||||
//router.post('/', programmingLanguagesController.create);
|
||||
|
||||
/* PUT programming language */
|
||||
//router.put('/:id', programmingLanguagesController.update);
|
||||
|
||||
/* DELETE programming language */
|
||||
//router.delete('/:id', programmingLanguagesController.remove);
|
||||
|
||||
router.get('/name/:id', findController.findName);
|
||||
router.get('/crs/:id', findController.findCrs);
|
||||
router.get('/nlc/:id', findController.findNlc);
|
||||
router.get('/tiploc/:id', findController.findTiploc);
|
||||
router.get('/stanox/:id', findController.findStanox);
|
||||
|
||||
module.exports = router;
|
@ -1,7 +0,0 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const issueController = require('../controllers/issue.controllers');
|
||||
|
||||
router.post('/', issueController.post);
|
||||
|
||||
module.exports = router;
|
@ -1,9 +0,0 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const kubeController = require('../controllers/kube.controllers');
|
||||
|
||||
router.get('/alive', kubeController.getAlive);
|
||||
router.get('/ready', kubeController.getReady);
|
||||
router.get('/time', kubeController.getTime);
|
||||
|
||||
module.exports = router
|
@ -1,19 +0,0 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const ldbController = require('../controllers/ldb.controllers');
|
||||
|
||||
/* GET programming languages. */
|
||||
//router.get('/', programmingLanguagesController.get);
|
||||
|
||||
/* POST programming language */
|
||||
//router.post('/', programmingLanguagesController.create);
|
||||
|
||||
/* PUT programming language */
|
||||
//router.put('/:id', programmingLanguagesController.update);
|
||||
|
||||
/* DELETE programming language */
|
||||
//router.delete('/:id', programmingLanguagesController.remove);
|
||||
|
||||
router.get('/:id', ldbController.get);
|
||||
|
||||
module.exports = router;
|
@ -1,20 +0,0 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const listController = require('../controllers/list.controllers');
|
||||
|
||||
/* GET programming languages. */
|
||||
//router.get('/', programmingLanguagesController.get);
|
||||
|
||||
/* POST programming language */
|
||||
//router.post('/', programmingLanguagesController.create);
|
||||
|
||||
/* PUT programming language */
|
||||
//router.put('/:id', programmingLanguagesController.update);
|
||||
|
||||
/* DELETE programming language */
|
||||
//router.delete('/:id', programmingLanguagesController.remove);
|
||||
|
||||
router.get('/stations', listController.getStations);
|
||||
router.get('/corpus', listController.getCorpus);
|
||||
|
||||
module.exports = router;
|
10
src/routes/live.routes.ts
Normal file
10
src/routes/live.routes.ts
Normal file
@ -0,0 +1,10 @@
|
||||
const express = require("express");
|
||||
const router = express.Router();
|
||||
const ldbCtr = require("../controllers/ldb.controllers");
|
||||
|
||||
// PIS
|
||||
router.get("/station/:id/:type", ldbCtr.getStation);
|
||||
router.get("/station/nearest/:latitude/:longitude", ldbCtr.getNearest);
|
||||
router.get("/train/:searchType/:id", ldbCtr.getTrain);
|
||||
|
||||
module.exports = router;
|
15
src/routes/misc.routes.ts
Normal file
15
src/routes/misc.routes.ts
Normal file
@ -0,0 +1,15 @@
|
||||
const express = require("express");
|
||||
const router = express.Router();
|
||||
|
||||
// Controller Imports
|
||||
const issueCtr = require("../controllers/issue.controllers");
|
||||
const statCtr = require("../controllers/stats.controllers");
|
||||
|
||||
// Routes
|
||||
|
||||
router.get("/server/stats", statCtr.statistics);
|
||||
router.get("/server/versions", statCtr.versions);
|
||||
|
||||
router.post("/issue", issueCtr.post);
|
||||
|
||||
module.exports = router;
|
9
src/routes/pis.routes.ts
Normal file
9
src/routes/pis.routes.ts
Normal file
@ -0,0 +1,9 @@
|
||||
const express = require("express");
|
||||
const router = express.Router();
|
||||
const pisCtr = require("../controllers/pis.controllers");
|
||||
|
||||
// PIS
|
||||
router.get("/byCode/:code", pisCtr.byCode);
|
||||
router.get("/byStartEnd/:startCrs/:endCrs", pisCtr.byStartEndCRS);
|
||||
|
||||
module.exports = router;
|
9
src/routes/ref.routes.ts
Normal file
9
src/routes/ref.routes.ts
Normal file
@ -0,0 +1,9 @@
|
||||
const express = require("express");
|
||||
const router = express.Router();
|
||||
const refCtr = require("../controllers/ref.controllers");
|
||||
|
||||
// Ref
|
||||
router.get("/reasonCode/:code", refCtr.getReasonCode);
|
||||
router.get("/locationCode/:searchType/:id", refCtr.getLocationReference);
|
||||
|
||||
module.exports = router;
|
@ -1,8 +0,0 @@
|
||||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const statsController = require('../controllers/stats.controllers');
|
||||
|
||||
|
||||
router.get('/', statsController.get);
|
||||
|
||||
module.exports = router;
|
8
src/routes/timetable.routes.ts
Normal file
8
src/routes/timetable.routes.ts
Normal file
@ -0,0 +1,8 @@
|
||||
const express = require("express");
|
||||
const router = express.Router();
|
||||
const ttCtr = require("../controllers/train.controllers");
|
||||
|
||||
// PIS
|
||||
router.get("/train/:date/:searchType/:id", ttCtr.get);
|
||||
|
||||
module.exports = router;
|
12
src/routes/user.routes.ts
Normal file
12
src/routes/user.routes.ts
Normal file
@ -0,0 +1,12 @@
|
||||
const express = require("express");
|
||||
const router = express.Router();
|
||||
const regCtr = require("../controllers/registration.controllers");
|
||||
|
||||
// User
|
||||
router.get("/:uuid", regCtr.getUser);
|
||||
router.get("/checkAuth", regCtr.checkAuth);
|
||||
// Not Implemented router.delete('/:uuid', regCtr.deleteUser);
|
||||
router.post("/request", regCtr.request);
|
||||
router.post("/register", regCtr.register);
|
||||
|
||||
module.exports = router;
|
@ -1,73 +0,0 @@
|
||||
// Get CORPUS data from Network Rail and format the data for OwlBoard
|
||||
|
||||
// Network Rail Datafeed user and pass must be stored in `/srv/keys/owlboard/keys.config.js`
|
||||
|
||||
// FUNCTIONS/
|
||||
// initSubset() : Exported: Uses the internal functions to return a clean CORPUS object.
|
||||
// initAll() : Exported: Uses the internal functions to return a full CORPUS object.
|
||||
// get() : Get the CORPUS data from Network Rail as a gzip file.
|
||||
// extract() : Extract the CORPUS JSON file from the GZIP file.
|
||||
// clean() : Cleans the CORPUS data, removing unneccesary non-stations from the data.
|
||||
|
||||
const log = require('../utils/log.utils'); // Log Helper
|
||||
|
||||
const axios = require('axios')
|
||||
const gz = require('node-gzip')
|
||||
|
||||
const corpusUser = process.env.OWL_LDB_CORPUSUSER
|
||||
const corpusPass = process.env.OWL_LDB_CORPUSPASS
|
||||
|
||||
async function subset(allCorpus) {
|
||||
return (await clean(allCorpus))
|
||||
}
|
||||
|
||||
async function get() {
|
||||
var gzipData = await fetch()
|
||||
return (await extract(gzipData))
|
||||
}
|
||||
|
||||
async function fetch() {
|
||||
log.out("corpus.fetch: Fetching CORPUS Data from Network Rail")
|
||||
authHead = Buffer.from(`${corpusUser}:${corpusPass}`).toString('base64')
|
||||
const url = 'https://publicdatafeeds.networkrail.co.uk/ntrod/SupportingFileAuthenticate?type=CORPUS'
|
||||
const options = {
|
||||
method: 'get',
|
||||
timeout: 20000,
|
||||
headers: {'Authorization': `Basic ${authHead}`},
|
||||
responseType: 'arraybuffer'
|
||||
}
|
||||
try {
|
||||
var { data } = await axios.get(url, options)
|
||||
log.out("corpus.fetch: CORPUS Data fetched")
|
||||
} catch (error) {
|
||||
log.out("corpus.fetch: Error fetching CORPUS")
|
||||
log.out(error)
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
async function extract(input) {
|
||||
log.out(`corpus.extract: Extracting CORPUS archive`)
|
||||
var raw = await gz.ungzip(input)
|
||||
var obj = await JSON.parse(raw)
|
||||
return (obj.TIPLOCDATA)
|
||||
}
|
||||
|
||||
async function clean(input) {
|
||||
log.out(`corpus.clean: Removing non-stations from CORPUS data`)
|
||||
let clean = [];
|
||||
for (const element of input) {
|
||||
if (element.STANOX != ' ' && element['3ALPHA'] != ' '){
|
||||
delete(element.UIC);
|
||||
delete(element.NLCDESC16);
|
||||
delete(element.NLC);
|
||||
clean.push(element);
|
||||
}
|
||||
}
|
||||
return clean;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
get,
|
||||
subset
|
||||
}
|
@ -1,131 +1,143 @@
|
||||
const log = require('../utils/log.utils'); // Log Helper
|
||||
import { logger } from "../utils/logger.utils";
|
||||
|
||||
const dbUser = process.env.OWL_DB_USER || "owl"
|
||||
const dbPass = process.env.OWL_DB_PASS || "twittwoo"
|
||||
const dbName = process.env.OWL_DB_NAME || "owlboard"
|
||||
const dbPort = process.env.OWL_DB_PORT || 27017
|
||||
const dbHost = process.env.OWL_DB_HOST || "localhost"
|
||||
const dbUser = process.env.OWL_DB_USER || "owl";
|
||||
const dbPass = process.env.OWL_DB_PASS || "twittwoo";
|
||||
const dbName = process.env.OWL_DB_NAME || "owlboard";
|
||||
const dbPort = process.env.OWL_DB_PORT || 27017;
|
||||
const dbHost = process.env.OWL_DB_HOST || "localhost";
|
||||
const uri = `mongodb://${dbUser}:${dbPass}@${dbHost}:${dbPort}`;
|
||||
const connOpts = {
|
||||
useUnifiedTopology: true,
|
||||
authSource: "owlboard",
|
||||
}
|
||||
|
||||
const { MongoClient } = require('mongodb');
|
||||
const { MongoClient } = require("mongodb");
|
||||
|
||||
const client = new MongoClient(uri);
|
||||
const client = new MongoClient(uri, connOpts);
|
||||
const db = client.db(dbName);
|
||||
|
||||
async function dropCollection(coll){
|
||||
await client.connect();
|
||||
|
||||
// check if collection contains any documents, if it doesn't, it is either empty or non-existent - it doesn't need dropping.
|
||||
var collection = db.collection(coll);
|
||||
var count = await collection.countDocuments();
|
||||
log.out(`DbAccess.dropCollection: Collection '${coll}' contains ${count} documents`)
|
||||
if (count == 0) {
|
||||
log.out(`DbAccess.dropCollection: Collection '${coll}' is empty. Do not need to drop`)
|
||||
} else {
|
||||
log.out(`DbAccess.dropCollection: dropping collection: '${coll}'`)
|
||||
db.dropCollection(coll);
|
||||
log.out(`DbAccess.dropCollection: dropped collection: '${coll}'`)
|
||||
}
|
||||
async function query(collection, query, returnId = false) {
|
||||
await client.connect();
|
||||
logger.trace(`dbAccess.query: Connecting to collection: '${collection}'`);
|
||||
var qcoll = db.collection(collection);
|
||||
var qcursor = qcoll.find(query);
|
||||
if (!returnId) {
|
||||
qcursor.project({ _id: 0 });
|
||||
}
|
||||
logger.trace(query, "dbAccess.query: Runnung Query");
|
||||
increment(collection);
|
||||
let result = await qcursor.toArray();
|
||||
logger.trace(result, "dbAccess.query: Response");
|
||||
return result;
|
||||
}
|
||||
|
||||
async function putCorpus(data){
|
||||
log.out("DbAccess.putCorpus: Uploading CORPUS data to database")
|
||||
await client.connect();
|
||||
try {
|
||||
var coll = db.collection("corpus");
|
||||
await coll.insertMany(data);
|
||||
} catch (error) {
|
||||
log.out("DbAccess.putCorpus: Error uploading Corpus data to database")
|
||||
log.out(error)
|
||||
}
|
||||
};
|
||||
|
||||
async function putStations(data){
|
||||
log.out("DbAccess.putStations: Uploading Stations data to database")
|
||||
await client.connect();
|
||||
try {
|
||||
var coll = db.collection("stations");
|
||||
coll.insertMany(data);
|
||||
} catch (error) {
|
||||
log.out("DbAccess.putStations: Error uploading Stations data to database")
|
||||
log.out(error)
|
||||
}
|
||||
};
|
||||
|
||||
async function updateMeta(type, target, unixTime){
|
||||
await client.connect();
|
||||
var coll = db.collection("meta");
|
||||
var filter = {type: type, target: target};
|
||||
var update = {$set:{updated: unixTime}};
|
||||
var options = {upsert: true}; // If document isn't present will insert.
|
||||
try {
|
||||
var result = await coll.updateOne(filter,update,options)
|
||||
log.out(`dbAccessServices.updateMeta: ${JSON.stringify(result)}`)
|
||||
log.out(`dbAccessServices.updateMeta: meta for '${target}' updated`)
|
||||
} catch (err) {
|
||||
log.out(`dbAccessServices.updateMeta: Unable to update meta for '${target}'`)
|
||||
log.out(err)
|
||||
}
|
||||
async function queryProject(collection, query, projection) {
|
||||
await client.connect();
|
||||
logger.debug(`dbAccess.queryProject: Connecting to col: '${collection}'`);
|
||||
const qcoll = db.collection(collection);
|
||||
const qcursor = qcoll.find(query).project(projection);
|
||||
logger.debug(
|
||||
projection,
|
||||
`dbAccess.query: Running Query: ${JSON.stringify(query)}`
|
||||
);
|
||||
increment(collection);
|
||||
return await qcursor.toArray();
|
||||
}
|
||||
|
||||
async function query(collection, query){
|
||||
await client.connect();
|
||||
log.out(`dbAccess.query: Connecting to collection: '${collection}'`)
|
||||
var qcoll = db.collection(collection);
|
||||
var qcursor = qcoll.find(query)
|
||||
qcursor.project({_id: 0})
|
||||
log.out(`dbAccess.query: Running Query: ${JSON.stringify(query)}`)
|
||||
increment(collection)
|
||||
return (await qcursor.toArray());
|
||||
}
|
||||
|
||||
async function ensureIndex(col, field, text) {
|
||||
await client.connect();
|
||||
if (!text) {
|
||||
log.out(`dbAccess.ensureIndex: Creating index in collection ${col} for field ${field}`)
|
||||
db.createIndex(col, field);
|
||||
} else {
|
||||
log.out(`dbAccess.ensureIndex: Creating text index in collection ${col} for field ${field}`)
|
||||
let idx = {}
|
||||
idx[field] = "text";
|
||||
db.createIndex(col, idx);
|
||||
}
|
||||
log.out(`dbAccess.ensureIndex: Index created`);
|
||||
return;
|
||||
async function queryAggregate(collection, pipeline) {
|
||||
await client.connect();
|
||||
logger.debug(`dbAccess.queryProject: Connecting to col: '${collection}'`);
|
||||
logger.trace(pipeline, "dbAccess.query: Running Aggregation");
|
||||
increment(collection);
|
||||
return await db.collection(collection).aggregate(pipeline).toArray();
|
||||
}
|
||||
|
||||
async function increment(target) {
|
||||
await client.connect();
|
||||
let col = db.collection("meta");
|
||||
let update = {}
|
||||
update[target] = 1
|
||||
col.updateOne({target: "counters"}, {$inc:update})
|
||||
return;
|
||||
logger.debug(`dbAccess.increment: Incrementing counter for: ${target}`);
|
||||
await client.connect();
|
||||
let col = db.collection("meta");
|
||||
let update = {};
|
||||
update[target] = 1;
|
||||
col.updateOne({ target: "counters" }, { $inc: update });
|
||||
}
|
||||
|
||||
async function createCount() {
|
||||
await client.connect();
|
||||
let col = db.collection("meta");
|
||||
var filter = {type: "count", target: "counters"};
|
||||
var update = {$set:{/*since: new Date,*/ type: "count", target: "counters"}};
|
||||
var options = {upsert: true}; // If document isn't present will insert.
|
||||
try {
|
||||
var result = await col.updateOne(filter,update,options)
|
||||
log.out(`dbAccessServices.updateMeta: ${JSON.stringify(result)}`)
|
||||
log.out(`dbAccessServices.updateMeta: count meta added updated`)
|
||||
} catch (err) {
|
||||
log.out(`dbAccessServices.updateMeta: Unable to add count`)
|
||||
log.out(err)
|
||||
}
|
||||
async function addUser(uuid, domain) {
|
||||
// Needs testing
|
||||
logger.debug("dbAccess.addUser: Adding user to database");
|
||||
let doc = { uuid: uuid, domain: domain, atime: new Date() };
|
||||
await client.connect();
|
||||
let col = db.collection("users");
|
||||
let res = await col.insertOne(doc);
|
||||
if (res.insertedId) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
async function addRegReq(uuid, domain) {
|
||||
// Needs testing
|
||||
logger.debug("dbAccess.addRegReq: Adding registration request");
|
||||
let doc = { uuid: uuid, time: new Date(), domain: domain };
|
||||
await client.connect();
|
||||
let col = db.collection("registrations");
|
||||
let res = col.insertOne(doc);
|
||||
return res;
|
||||
}
|
||||
|
||||
async function userAtime(uuid) {
|
||||
// Needs testing
|
||||
logger.debug("dbAccess.userAtime: Updating access time for user");
|
||||
let q = { uuid: uuid };
|
||||
let n = { $set: { uuid: uuid, atime: new Date() } };
|
||||
await client.connect();
|
||||
let col = db.collection("users");
|
||||
let res = col.updateOne(q, n, { upsert: true });
|
||||
return res;
|
||||
}
|
||||
|
||||
// Deletes one single registration request entry from the DB
|
||||
async function delRegReq(uuid) {
|
||||
logger.debug("dbAccess.delRegReq: Deleting a Registration Request");
|
||||
let collection = "registrations";
|
||||
await client.connect();
|
||||
let col = db.collection(collection);
|
||||
col.deleteOne({ uuid: uuid });
|
||||
}
|
||||
|
||||
async function colCount(collection) {
|
||||
logger.debug(
|
||||
`dbAccess.colCount: Counting entries in collection: ${collection}`
|
||||
);
|
||||
await client.connect();
|
||||
let col = db.collection(collection);
|
||||
let count = col.countDocuments();
|
||||
logger.debug(
|
||||
`dbAccess.colCount: Collection: ${collection} contains ${count}` +
|
||||
" documents"
|
||||
);
|
||||
return await count;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
putCorpus,
|
||||
putStations,
|
||||
dropCollection,
|
||||
updateMeta,
|
||||
query,
|
||||
ensureIndex,
|
||||
increment,
|
||||
createCount
|
||||
}
|
||||
query,
|
||||
queryProject,
|
||||
queryAggregate,
|
||||
increment,
|
||||
addUser,
|
||||
userAtime,
|
||||
addRegReq,
|
||||
delRegReq,
|
||||
colCount,
|
||||
};
|
||||
|
||||
export {
|
||||
query,
|
||||
queryProject,
|
||||
queryAggregate,
|
||||
increment,
|
||||
addUser,
|
||||
userAtime,
|
||||
addRegReq,
|
||||
delRegReq,
|
||||
colCount,
|
||||
};
|
||||
|
@ -1,59 +0,0 @@
|
||||
// Parse and return a find request
|
||||
|
||||
const log = require('../utils/log.utils'); // Log Helper
|
||||
const db = require('../services/dbAccess.services');
|
||||
const san = require('../utils/sanitizer.utils')
|
||||
|
||||
// DB Query: query(collection, query)
|
||||
|
||||
// Define collection as all queries are for the "corpus" collection.
|
||||
const col = "corpus"
|
||||
|
||||
async function name(id){
|
||||
log.out(`findServices.name: Finding station name: ${id}`)
|
||||
var name = san.cleanApiEndpointTxt(id.toUpperCase())
|
||||
query = {NLCDESC: name}
|
||||
var data = await db.query(col,query)
|
||||
return data
|
||||
}
|
||||
|
||||
async function crs(id){
|
||||
log.out(`findServices.crs: Finding crs: ${id}`)
|
||||
var crs = san.cleanApiEndpointTxt(id.toUpperCase())
|
||||
query = {'3ALPHA': crs}
|
||||
var data = await db.query(col,query)
|
||||
return data
|
||||
}
|
||||
|
||||
async function nlc(id){
|
||||
log.out(`findServices.nlc: Finding nlc: ${id}`)
|
||||
var nlc = san.cleanApiEndpointNum(id)
|
||||
query = {NLC: parseInt(nlc)}
|
||||
log.out(`findServices.nlc: NLC Converted to int: ${query}`)
|
||||
var data = await db.query(col,query)
|
||||
return data
|
||||
}
|
||||
|
||||
async function tiploc(id){
|
||||
log.out(`findServices.tiploc: Finding tiploc: ${id}`)
|
||||
var tiploc = san.cleanApiEndpointTxt(id.toUpperCase())
|
||||
query = {TIPLOC: tiploc}
|
||||
var data = await db.query(col,query)
|
||||
return data
|
||||
}
|
||||
|
||||
async function stanox(id){
|
||||
log.out(`findServices.stanox: Finding stanox: ${id}`)
|
||||
var stanox = san.cleanApiEndpointNum(id)
|
||||
query = {STANOX: String(stanox)}
|
||||
var data = await db.query(col,query)
|
||||
return data
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
name,
|
||||
crs,
|
||||
nlc,
|
||||
tiploc,
|
||||
stanox
|
||||
}
|
55
src/services/find.services.ts
Normal file
55
src/services/find.services.ts
Normal file
@ -0,0 +1,55 @@
|
||||
// Parse and return a find request
|
||||
|
||||
import { query } from "../services/dbAccess.services";
|
||||
import {
|
||||
cleanApiEndpointTxt,
|
||||
cleanApiEndpointNum,
|
||||
} from "../utils/sanitizer.utils";
|
||||
import { logger } from "../utils/logger.utils";
|
||||
|
||||
// Define collection as all queries are for the "corpus" collection.
|
||||
const col: string = "corpus";
|
||||
|
||||
async function name(id: string) {
|
||||
logger.debug(`findServices.name: Finding station name: ${id}`);
|
||||
var name = cleanApiEndpointTxt(id.toUpperCase());
|
||||
let queryObj = { NLCDESC: name };
|
||||
return await query(col, queryObj);
|
||||
}
|
||||
|
||||
async function crs(id: string) {
|
||||
logger.debug(`findServices.crs: Finding crs: ${id}`);
|
||||
var crs = cleanApiEndpointTxt(id.toUpperCase());
|
||||
let queryObj = { "3ALPHA": crs };
|
||||
return await query(col, queryObj);
|
||||
}
|
||||
|
||||
async function nlc(id: string) {
|
||||
logger.debug(`findServices.nlc: Finding nlc: ${id}`);
|
||||
var nlc = cleanApiEndpointNum(id);
|
||||
let queryObj = { NLC: parseInt(nlc) };
|
||||
logger.trace(`findServices.nlc: NLC Converted to int: ${query}`);
|
||||
return await query(col, queryObj);
|
||||
}
|
||||
|
||||
async function tiploc(id: string) {
|
||||
logger.debug(`findServices.tiploc: Finding tiploc: ${id}`);
|
||||
var tiploc = cleanApiEndpointTxt(id.toUpperCase());
|
||||
let queryObj = { TIPLOC: tiploc };
|
||||
return await query(col, queryObj);
|
||||
}
|
||||
|
||||
async function stanox(id: string) {
|
||||
logger.debug(`findServices.stanox: Finding stanox: ${id}`);
|
||||
var stanox = cleanApiEndpointNum(id);
|
||||
let queryObj = { STANOX: String(stanox) };
|
||||
return await query(col, queryObj);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
name,
|
||||
crs,
|
||||
nlc,
|
||||
tiploc,
|
||||
stanox,
|
||||
};
|
@ -1,33 +1,52 @@
|
||||
const axios = require('axios')
|
||||
const log = require('../utils/log.utils')
|
||||
import { logger } from "../utils/logger.utils";
|
||||
|
||||
const issueLabels = {
|
||||
bug: 120,
|
||||
enhancement: 122,
|
||||
question: 125,
|
||||
"user-support": 152,
|
||||
"web-user": 153,
|
||||
};
|
||||
|
||||
async function processor(data) {
|
||||
log.out(`issueService.processor: Issue received`)
|
||||
let out = {}
|
||||
out.title = data.subject.replace(/<[^>]+>|[\*\$]/g, '');
|
||||
out.body = data.msg.replace(/<[^>]+>|[\*\$]/g, '')
|
||||
sendToGitea(out);
|
||||
logger.debug("issueService.processor: Issue received");
|
||||
let out = {};
|
||||
out.labels = [issueLabels[data?.label] || 0, issueLabels["web-user"]];
|
||||
out.title = data?.subject.replace(/<[^>]+>|[\*\$]/g, "");
|
||||
out.body = data?.msg.replace(/<[^>]+>|[\*\$]/g, "");
|
||||
return await sendToGitea(out);
|
||||
}
|
||||
|
||||
async function sendToGitea(body) {
|
||||
let key = process.env.OWL_GIT_ISSUEBOT
|
||||
let url = process.env.OWL_GIT_APIENDPOINT
|
||||
let opts = {
|
||||
headers: {
|
||||
Authorization: key
|
||||
}
|
||||
}
|
||||
var res = await axios.post(url, body, opts)
|
||||
// Need to read the output from the POST and pass the result upwards to the client.
|
||||
if (res.status == 201) {
|
||||
log.out("issueService.sendToGitea: Issue sent to Gitea")
|
||||
return {status: res.status,message:"issue created"}
|
||||
try {
|
||||
const key = process.env.OWL_GIT_ISSUEBOT;
|
||||
const url = process.env.OWL_GIT_APIENDPOINT;
|
||||
const opts = {
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: key,
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
};
|
||||
|
||||
const res = await fetch(url, opts);
|
||||
|
||||
if (res.status === 201) {
|
||||
logger.debug("issueService.sendToGitea: Issue created");
|
||||
return { status: res.status, message: "issue created" };
|
||||
} else {
|
||||
log.out("issueService.sendToGitea: Failed to send issue to Gitea")
|
||||
return {status: res.status,message:"issue not created"}
|
||||
logger.error(
|
||||
`issueService.sendtoGitea: Error creating issue RETURN: ${res.status}`
|
||||
);
|
||||
return { status: res.status, message: "issue not created" };
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(err, `issueService.sendToGitea`);
|
||||
return { status: 500, message: "Internal Server Error" };
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
processor
|
||||
}
|
||||
processor,
|
||||
};
|
||||
|
@ -1,20 +0,0 @@
|
||||
async function getAlive(){
|
||||
log.out(`kubeServices.getAlive: alive hook checked`)
|
||||
return {code: 200, state: {state: "alive",noise: "twit-twoo"}}
|
||||
}
|
||||
|
||||
async function getReady(){
|
||||
log.out(`kubeServices.getReady: ready hook checked`)
|
||||
return "not_implemented";
|
||||
};
|
||||
|
||||
async function getTime(){
|
||||
var now = new Date()
|
||||
return {responseGenerated: now}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getAlive,
|
||||
getReady,
|
||||
getTime
|
||||
}
|
22
src/services/kube.services.ts
Normal file
22
src/services/kube.services.ts
Normal file
@ -0,0 +1,22 @@
|
||||
import { logger } from "../utils/logger.utils";
|
||||
|
||||
async function getAlive() {
|
||||
logger.trace("kubeServices.getAlive: alive hook checked");
|
||||
return { code: 200, state: { state: "alive", noise: "twit-twoo" } };
|
||||
}
|
||||
|
||||
async function getReady() {
|
||||
logger.trace("kubeServices.getReady: ready hook checked");
|
||||
return "not_implemented";
|
||||
}
|
||||
|
||||
async function getTime() {
|
||||
var now: Date = new Date();
|
||||
return { responseGenerated: now };
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getAlive,
|
||||
getReady,
|
||||
getTime,
|
||||
};
|
@ -1,52 +1,218 @@
|
||||
// Parse and return an LDB Request
|
||||
|
||||
// FUNCTIONS
|
||||
// post(body, id): Exported:
|
||||
// body: [req.body from controller]
|
||||
// id : [req.params.id from controller - this is expected to be CRS or TIPLOC]
|
||||
const ldb = require("ldbs-json");
|
||||
const util = require("../utils/ldb.utils");
|
||||
const san = require("../utils/sanitizer.utils");
|
||||
const db = require("../services/dbAccess.services");
|
||||
|
||||
// convertTiploc(TIPLOC) : Exported: Looks up CRS, Name & STANOX for Tiploc
|
||||
import { findStationsByDistancePipeline } from "../utils/ldbPipeline.utils";
|
||||
import { logger } from "../utils/logger.utils";
|
||||
|
||||
const log = require('../utils/log.utils'); // Log Helper
|
||||
const ldb = require('ldbs-json')
|
||||
const util = require('../utils/ldb.utils')
|
||||
const san = require('../utils/sanitizer.utils')
|
||||
const db = require('../services/dbAccess.services')
|
||||
import { transform as staffStationTransform } from "../utils/processors/ldb/staffStation";
|
||||
|
||||
const ldbKey = process.env.OWL_LDB_KEY
|
||||
const ldbsvKey = process.env.OWL_LDB_SVKEY
|
||||
|
||||
async function get(body, id){
|
||||
var cleanId = san.cleanApiEndpointTxt(id);
|
||||
var obj = await util.checkCrs(cleanId);
|
||||
try {
|
||||
var crs = obj[0]['3ALPHA'];
|
||||
log.out(`ldbService.get: Determined CRS for lookup to be: ${crs}`);
|
||||
var data = await arrDepBoard(crs);
|
||||
db.increment("ldbws") // Need to add creation of this document to the database. >> {type:"count",counting:"api_hit",target:"ldbws",since:"DATE"}
|
||||
} catch (err) {
|
||||
log.out(`ldbService.get: Error, Unable to find CRS: ${err}`)
|
||||
var data = {ERROR:'NOT_FOUND',description:'The entered station was not found. Please check and try again.'};
|
||||
const ldbKey = process.env.OWL_LDB_KEY;
|
||||
const ldbsvKey = process.env.OWL_LDB_SVKEY;
|
||||
|
||||
async function get(id, staff = false) {
|
||||
const cleanId = san.cleanApiEndpointTxt(id);
|
||||
const obj = await util.checkCrs(cleanId);
|
||||
try {
|
||||
const crs = obj[0]["3ALPHA"];
|
||||
logger.debug(`ldbService.get: Determined CRS for lookup to be: ${crs}`);
|
||||
if (staff) {
|
||||
const data = arrDepBoardStaff(crs);
|
||||
db.increment("ldbsvws");
|
||||
return await data;
|
||||
} else {
|
||||
const data = arrDepBoard(crs);
|
||||
db.increment("ldbws");
|
||||
return await data;
|
||||
}
|
||||
return data;
|
||||
} catch (err) {
|
||||
logger.error(err, "ldbService.get: Error, Unable to find CRS");
|
||||
return {
|
||||
obStatus: "LOC_NOT_FOUND",
|
||||
obMsg: "Location is not available",
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function arrDepBoard(CRS){
|
||||
log.out(`ldbService.arrDepBoard: Trying to fetch ArrDep Board for ${CRS}`)
|
||||
async function arrDepBoard(CRS) {
|
||||
logger.trace(`ldbService.arrDepBoard: Trying to fetch board for ${CRS}`);
|
||||
try {
|
||||
const options = {
|
||||
numRows: 10,
|
||||
crs: CRS.toUpperCase(),
|
||||
};
|
||||
const api = new ldb(ldbKey, false);
|
||||
let d = await api.call("GetArrDepBoardWithDetails", options, false, false);
|
||||
return await util.cleanData(d);
|
||||
} catch (err) {
|
||||
logger.error(err, "ldbService.arrDepBoard: Lookup Failed");
|
||||
return {
|
||||
GetStationBoardResult: "not available",
|
||||
Reason: `The CRS code ${CRS} is not valid`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function arrDepBoardStaff(CRS) {
|
||||
logger.debug(`ldbService.arrDepBoardStaff: Try to fetch board for ${CRS}`);
|
||||
try {
|
||||
const options = {
|
||||
numRows: 40,
|
||||
crs: CRS.toUpperCase(),
|
||||
getNonPassengerServices: true,
|
||||
time: await getDateTimeString(new Date()),
|
||||
timeWindow: 120,
|
||||
services: "PBS",
|
||||
};
|
||||
const api = new ldb(ldbsvKey, true);
|
||||
console.time(`Fetch Staff LDB for ${CRS.toUpperCase()}`);
|
||||
let result
|
||||
try {
|
||||
var options = {
|
||||
numRows: 10,
|
||||
crs: CRS.toUpperCase()
|
||||
}
|
||||
var api = new ldb(ldbKey,false)
|
||||
var reply = await api.call("GetArrDepBoardWithDetails",options)
|
||||
return reply
|
||||
result = await staffApiCallRetry(
|
||||
api,
|
||||
"GetArrivalDepartureBoardByCRS",
|
||||
options,
|
||||
5,
|
||||
);
|
||||
} catch (err) {
|
||||
log.out(`ldbService.arrDepBoard: Lookup Failed for: ${CRS}`)
|
||||
return {GetStationBoardResult: "not available", Reason: `The CRS code ${CRS} is not valid`, Why: `Sometimes a station will have more than one CRS - for example Filton Abbey Wood has FIT and FAW however schedules are only available when looking up with FIT - this is how the National Rail Enquiries systems work.`};
|
||||
logger.error(err, "Error fetching board data");
|
||||
return {obStatus: "Error", obMsg: "Error fetching data from National Rail", data: null}
|
||||
}
|
||||
};
|
||||
console.timeEnd(`Fetch Staff LDB for ${CRS.toUpperCase()}`);
|
||||
try {
|
||||
const _staffLdb = staffStationTransform(result);
|
||||
logger.debug("StaffLDB Transformed");
|
||||
logger.trace(_staffLdb, "StaffLDB Transformed");
|
||||
return {
|
||||
obStatus: "OK",
|
||||
obMsg: "OK",
|
||||
data: _staffLdb,
|
||||
};
|
||||
} catch (err) {
|
||||
logger.error(err, "Transformation Error");
|
||||
}
|
||||
return result;
|
||||
} catch (err) {
|
||||
logger.error(err, "ldbService.arrDepBoardStaff error");
|
||||
return {
|
||||
GetStationBoardResult: "not available",
|
||||
Reason: `The CRS code ${CRS} is not valid`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function getServiceByRID(rid) {
|
||||
logger.debug(`ldbService.getServiceByRID: Finding RID: ${rid}`);
|
||||
try {
|
||||
const options = {
|
||||
rid: String(rid),
|
||||
};
|
||||
const api = new ldb(ldbsvKey, true);
|
||||
return await api.call("GetServiceDetailsByRID", options, false, false);
|
||||
} catch (err) {
|
||||
logger.error(err, `ldbService.queryService`);
|
||||
}
|
||||
}
|
||||
|
||||
async function getServicesByOther(id) {
|
||||
logger.debug(`ldbService.getServiceByOther: Finding services: ${id}`);
|
||||
try {
|
||||
const options = {
|
||||
serviceID: id,
|
||||
sdd: getDateString(new Date()),
|
||||
};
|
||||
const api = new ldb(ldbsvKey, true);
|
||||
return await api.call("QueryServices", options, false, false);
|
||||
} catch (err) {
|
||||
logger.error(err, "ldbService.getServiceByOther");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function staffApiCallRetry(api, method, options, retries) {
|
||||
for (let i=0; i < retries; i++) {
|
||||
try {
|
||||
return await api.call(method, options, false, false);
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOTFOUND') {
|
||||
logger.warn(err, "DNS ERR")
|
||||
if (i < retries - 1) {
|
||||
logger.debug('Retrying API Call')
|
||||
await delay(500)
|
||||
continue;
|
||||
}
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
throw new Error("Max retries exceeded");
|
||||
}
|
||||
|
||||
function delay(ms) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
async function getReasonCodeList() {
|
||||
logger.debug("ldbService.getReasonCodeList: Fetching reason code list");
|
||||
try {
|
||||
const dbFilter = {};
|
||||
return await db.query("reasonCodes", dbFilter, false);
|
||||
} catch (err) {
|
||||
logger.error(err, "ldbService.getReasonCodeList");
|
||||
}
|
||||
}
|
||||
|
||||
async function getReasonCode(code) {
|
||||
logger.debug(`ldbService.getReasonCode: Fetching reason code ${code}`);
|
||||
try {
|
||||
const dbFilter = {
|
||||
code: code,
|
||||
};
|
||||
return await db.query("reasonCodes", dbFilter, false);
|
||||
} catch (err) {
|
||||
logger.error(err, "ldbService.getReasonCode");
|
||||
}
|
||||
}
|
||||
|
||||
async function getNearestStations(lat, long) {
|
||||
|
||||
logger.debug(`ldbService.getNearestStations: Fetching nearest stations`)
|
||||
let pipeline = findStationsByDistancePipeline(4, lat, long)
|
||||
try {
|
||||
return await db.queryAggregate("stations", pipeline)
|
||||
} catch (err) {
|
||||
logger.error(err, `ldbService.getNearestStations`)
|
||||
}
|
||||
}
|
||||
|
||||
async function getDateTimeString(date) {
|
||||
const year = date.getFullYear(),
|
||||
month = String(date.getMonth() + 1).padStart(2, "0"),
|
||||
day = String(date.getDate()).padStart(2, "0"),
|
||||
hour = String(date.getHours()).padStart(2, "0"),
|
||||
minute = String(date.getMinutes()).padStart(2, "0"),
|
||||
second = String(date.getSeconds()).padStart(2, "0");
|
||||
const format = `${year}-${month}-${day}T${hour}:${minute}:${second}`;
|
||||
return format;
|
||||
}
|
||||
|
||||
async function getDateString(date) {
|
||||
const year = date.getFullYear(),
|
||||
month = String(date.getMonth() + 1).padStart(2, "0"),
|
||||
day = String(date.getDate()).padStart(2, "0");
|
||||
const format = `${year}-${month}-${day}`;
|
||||
return format;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
get
|
||||
}
|
||||
get,
|
||||
getServiceByRID,
|
||||
getServicesByOther,
|
||||
getReasonCodeList,
|
||||
getReasonCode,
|
||||
getNearestStations,
|
||||
};
|
||||
|
@ -1,20 +0,0 @@
|
||||
const log = require('../utils/log.utils'); // Log Helper
|
||||
const db = require('../services/dbAccess.services')
|
||||
const os = require('os')
|
||||
|
||||
async function getStations(){
|
||||
var out = await db.query("stations")
|
||||
log.out(`listServices.getStations: fetched stations list`)
|
||||
return out;
|
||||
}
|
||||
|
||||
async function getCorpus(){
|
||||
var out = await db.query("corpus")
|
||||
log.out(`listServices.getCorpus: fetched CORPUS list`)
|
||||
return out;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getStations,
|
||||
getCorpus
|
||||
}
|
19
src/services/list.services.ts
Normal file
19
src/services/list.services.ts
Normal file
@ -0,0 +1,19 @@
|
||||
import { query } from "./dbAccess.services";
|
||||
import { logger } from "../utils/logger.utils";
|
||||
|
||||
async function getStations() {
|
||||
var out = query("stations");
|
||||
logger.debug("listServices.getStations: Fetching stations list");
|
||||
return await out;
|
||||
}
|
||||
|
||||
async function getCorpus() {
|
||||
var out = query("corpus");
|
||||
logger.debug("listServices.getCorpus: Fetching CORPUS list");
|
||||
return await out;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getStations,
|
||||
getCorpus,
|
||||
};
|
45
src/services/mail.services.ts
Normal file
45
src/services/mail.services.ts
Normal file
@ -0,0 +1,45 @@
|
||||
const mail = require("nodemailer");
|
||||
import { logger } from "../utils/logger.utils";
|
||||
|
||||
export interface msgDescriptor {
|
||||
to: string;
|
||||
cc?: string;
|
||||
bcc?: string;
|
||||
subject: string;
|
||||
txt: string;
|
||||
html?: string;
|
||||
from: string;
|
||||
}
|
||||
|
||||
const fromAddr = process.env.OWL_EML_FROM;
|
||||
const smtpUser = process.env.OWL_EML_USER;
|
||||
const smtpPass = process.env.OWL_EML_PASS;
|
||||
const smtpHost = process.env.OWL_EML_HOST;
|
||||
const smtpPort = process.env.OWL_EML_PORT;
|
||||
|
||||
const transporter = mail.createTransport({
|
||||
host: smtpHost,
|
||||
port: smtpPort,
|
||||
secure: false, // Must be false for STARTTLS on port 587 which is always secure
|
||||
auth: {
|
||||
user: smtpUser,
|
||||
pass: smtpPass,
|
||||
},
|
||||
});
|
||||
|
||||
async function send(message: msgDescriptor): Promise<boolean> {
|
||||
logger.debug("mailServices.send: Message send request received");
|
||||
message.from = fromAddr || "no-reply@owlboard.info";
|
||||
try {
|
||||
var res = await transporter.sendMail(message);
|
||||
} catch (err) {
|
||||
logger.error(err, "mailServices.send: Message send failed");
|
||||
return false;
|
||||
}
|
||||
logger.debug(res.response, "mailServices.send: SMTP Response");
|
||||
return true;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
send,
|
||||
};
|
142
src/services/pis.services.ts
Normal file
142
src/services/pis.services.ts
Normal file
@ -0,0 +1,142 @@
|
||||
// Finds PIS Codes using DB Lookups
|
||||
|
||||
const db = require("../services/dbAccess.services");
|
||||
const clean = require("../utils/sanitizer.utils");
|
||||
|
||||
import type { OB_Pis_SimpleObject } from "@owlboard/ts-types";
|
||||
|
||||
import { logger } from "../utils/logger.utils";
|
||||
import { queryAggregate } from "./dbAccess.services";
|
||||
import {
|
||||
getPartialEndTiplocMatchPipeline,
|
||||
getFullTiplocMatchPipeline,
|
||||
getPartialStartTiplocMatchPipeline,
|
||||
} from "../utils/pis.utils";
|
||||
import { Document } from "mongodb";
|
||||
|
||||
export const supported = ["GW", "UK", "HX"];
|
||||
|
||||
export async function findPisByOrigDest(start: string, end: string) {
|
||||
logger.debug(
|
||||
`pisServices.findPisByOrigDest: Searching for Orig: ${start}, Dest: ${end}`
|
||||
);
|
||||
const firstCrs = clean.cleanApiEndpointTxt(start.toLowerCase());
|
||||
const lastCrs = clean.cleanApiEndpointTxt(end.toLowerCase());
|
||||
const query = {
|
||||
stops: {
|
||||
$all: [
|
||||
{ $elemMatch: { $eq: firstCrs } },
|
||||
{ $elemMatch: { $eq: lastCrs } },
|
||||
],
|
||||
},
|
||||
$expr: {
|
||||
$and: [
|
||||
{ $eq: [{ $arrayElemAt: ["$stops", -1] }, lastCrs] },
|
||||
{ $eq: [{ $arrayElemAt: ["$stops", 0] }, firstCrs] },
|
||||
],
|
||||
},
|
||||
};
|
||||
const search = await db.query("pis", query);
|
||||
// Check for results, if none then try partial match
|
||||
return search;
|
||||
}
|
||||
|
||||
export async function findPisByCode(
|
||||
code: string
|
||||
): Promise<OB_Pis_SimpleObject | null> {
|
||||
logger.debug(`pisServices.findPisByCode: Searching for PIS code: ${code}`);
|
||||
const cleanCode = clean.removeNonNumeric(code);
|
||||
const query = {
|
||||
code: cleanCode,
|
||||
};
|
||||
const search = db.query("pis", query);
|
||||
return await search;
|
||||
}
|
||||
|
||||
// Queries the database for PIS codes that match the given TIPLOC array
|
||||
export async function findByTiplocArray(
|
||||
tiplocArray: string[]
|
||||
): Promise<OB_Pis_SimpleObject | null> {
|
||||
// Firstly fix errant TIPLOCS such as RDNG4AB which will never return a result
|
||||
// currently only works with RDNG4AB - checks for presence of RDNG4AB first to
|
||||
// avoid iterating over every array ever searched for.
|
||||
if (tiplocArray.includes("RDNG4AB")) {
|
||||
fixErrantTiplocs(tiplocArray);
|
||||
}
|
||||
|
||||
// PERFORMANCE NOTE:
|
||||
// The majority of queries will return a full match,
|
||||
// this means that a more performant pipeline is used
|
||||
// to find a full match and only then are more
|
||||
// complicated pipelines used to find partial matches
|
||||
// if the first pipeline returns nothing.
|
||||
try {
|
||||
const exactMatch = await findExactMatchByTiploc(tiplocArray);
|
||||
if (exactMatch) {
|
||||
return convertDocument(exactMatch, "none");
|
||||
} else {
|
||||
const partialEnd = await findPartialEndMatchByTiploc(tiplocArray);
|
||||
if (partialEnd) {
|
||||
return convertDocument(partialEnd, "first");
|
||||
} else {
|
||||
const partialStart = await findPartialStartMatchByTiploc(tiplocArray);
|
||||
if (partialStart) {
|
||||
return convertDocument(partialStart, "last");
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(err, "Error in findByTiplocArray");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Uses a pipeline to find an exact match
|
||||
async function findExactMatchByTiploc(array: string[]): Promise<Document> {
|
||||
const pipeline = getFullTiplocMatchPipeline(array);
|
||||
const res = await queryAggregate("pis", pipeline);
|
||||
return res[0];
|
||||
}
|
||||
|
||||
// Uses a pipeline to find a partial match - only supporting
|
||||
// codes ending with the correct stops for now.
|
||||
async function findPartialEndMatchByTiploc(array: string[]): Promise<Document> {
|
||||
const pipeline = getPartialEndTiplocMatchPipeline(array);
|
||||
const res = await queryAggregate("pis", pipeline);
|
||||
return res[0];
|
||||
}
|
||||
|
||||
// Uses a pipeline to find a partial match - supporting codes starting with the correct stops.
|
||||
async function findPartialStartMatchByTiploc(array: string[]): Promise<Document> {
|
||||
const pipeline = getPartialStartTiplocMatchPipeline(array);
|
||||
const res = await queryAggregate("pis", pipeline);
|
||||
return res[0];
|
||||
}
|
||||
|
||||
function convertDocument(doc: Document, skipType: string): OB_Pis_SimpleObject {
|
||||
return {
|
||||
code: doc.code.toString(),
|
||||
toc: doc.toc,
|
||||
skipCount: doc.skipStops,
|
||||
skipType: skipType,
|
||||
};
|
||||
}
|
||||
|
||||
// Changes any instance of 'RDNG4AB' to 'RDNGSTN'
|
||||
function fixErrantTiplocs(input: string[]): void {
|
||||
input.forEach((value, index, array) => {
|
||||
if (value === "RDNG4AB") {
|
||||
array[index] = "RDNGSTN";
|
||||
}
|
||||
// Additional substitutions can be applied here
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
supported,
|
||||
findPisByOrigDest,
|
||||
findPisByCode,
|
||||
findByTiplocArray,
|
||||
};
|
24
src/services/redis.services.ts
Normal file
24
src/services/redis.services.ts
Normal file
@ -0,0 +1,24 @@
|
||||
import { createClient } from "redis";
|
||||
|
||||
import zlib from "zlib";
|
||||
|
||||
const client = createClient({
|
||||
url: "redis:PORT",
|
||||
});
|
||||
|
||||
client.on("error", (err) => console.log("Redis Client Error", err));
|
||||
|
||||
async function addToCache(key: string, value: Object): Promise<boolean> {
|
||||
throw new Error("Unable to post to cache");
|
||||
}
|
||||
|
||||
async function getFromCache(key: string): Promise<Object> {
|
||||
throw new Error("Unable to retreive");
|
||||
}
|
||||
/*
|
||||
await client.connect();
|
||||
|
||||
await client.set('key', 'value');
|
||||
const value = await client.get('key');
|
||||
await client.disconnect();
|
||||
*/
|
77
src/services/registration.services.js
Normal file
77
src/services/registration.services.js
Normal file
@ -0,0 +1,77 @@
|
||||
const auth = require("../utils/auth.utils");
|
||||
const db = require("./dbAccess.services");
|
||||
const mail = require("./mail.services");
|
||||
const errors = require("../configs/errorCodes.configs");
|
||||
|
||||
import { logger } from "../utils/logger.utils";
|
||||
import { getDomainFromEmail } from "../utils/sanitizer.utils";
|
||||
import { valid as validDomains } from "../configs/domains.configs";
|
||||
import { generateCode } from "../utils/auth.utils";
|
||||
|
||||
async function createRegKey(body) {
|
||||
logger.debug("registerServices.createRegKey: Incoming request");
|
||||
if (body.email) {
|
||||
const domain = getDomainFromEmail(body.email);
|
||||
logger.info(`registerServices: Registration request from: ${domain}`);
|
||||
if (validDomains.includes(domain)) {
|
||||
logger.debug(`registerServices.createRegKey: Key from valid: ${domain}`);
|
||||
const key = generateCode()
|
||||
db.addRegReq(key, domain)
|
||||
const message = await auth.generateConfirmationEmail(body.email, key);
|
||||
if (!message) {
|
||||
const err = new Error("Message Generation Error");
|
||||
logger.error(err, "registerServices.createRegKey: Error generating email");
|
||||
return 500;
|
||||
}
|
||||
if ((await mail.send(message)) == true) {
|
||||
return {status: 201, message: "email sent"};
|
||||
}
|
||||
return {status:500, errorCode:950, errorMsg: errors[950]}
|
||||
}
|
||||
return { status: 403, errorCode: 702, errorMsg: errors[702] };
|
||||
} else {
|
||||
return { status: 400, errorCode: 901, errorMsg: errors[902] };
|
||||
}
|
||||
}
|
||||
|
||||
async function regUser(req) {
|
||||
// Add input validation
|
||||
const regCode = req.uuid.toLocaleUpperCase();
|
||||
logger.trace(`Read UUID: ${regCode}`);
|
||||
const res = await auth.checkRequest(regCode);
|
||||
logger.debug(res, "registrationServices");
|
||||
if (res.result) {
|
||||
const uuid = await auth.generateKey();
|
||||
const apiKey = await db.addUser(uuid, res.domain);
|
||||
if (apiKey) {
|
||||
db.delRegReq(req.uuid);
|
||||
return { status: 201, message: "User added", api_key: uuid };
|
||||
}
|
||||
}
|
||||
return { status: 401, errorCode: 703, errorMsg: errors[703] };
|
||||
}
|
||||
|
||||
// Currently errors on a correct code as it cannot be found... Ensure uuid is ALL CAPS
|
||||
|
||||
async function getUser(uuid) {
|
||||
try {
|
||||
const filter = {
|
||||
uuid: uuid,
|
||||
};
|
||||
const res = await db.query("users", filter, false);
|
||||
if (res.length) {
|
||||
return res;
|
||||
} else {
|
||||
return { status: 404, errorCode: 400, errorMsg: errors[400] };
|
||||
}
|
||||
} catch (err) {
|
||||
console.log(err);
|
||||
return { status: 500, errorCode: 951, errorMsg: errors[951] };
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
regUser,
|
||||
createRegKey,
|
||||
getUser,
|
||||
};
|
@ -1,16 +1,111 @@
|
||||
const log = require('../utils/log.utils'); // Log Helper
|
||||
const db = require('../services/dbAccess.services')
|
||||
const os = require('os')
|
||||
const db = require("../services/dbAccess.services");
|
||||
const os = require("os");
|
||||
const vers = require("../configs/version.configs");
|
||||
|
||||
async function hits(){
|
||||
var dat = await db.query("meta", {target: "counters"});
|
||||
log.out(`listServices.meta: fetched server meta`)
|
||||
let out = {}
|
||||
out.host = os.hostname()
|
||||
out.dat = dat
|
||||
return out;
|
||||
import { logger } from "../utils/logger.utils";
|
||||
|
||||
async function buildJson() {
|
||||
let json = {};
|
||||
json.count = {};
|
||||
// Async call all db queries
|
||||
const userCount = db.colCount("users");
|
||||
const regCount = db.colCount("registrations");
|
||||
const pisCount = db.colCount("pis");
|
||||
const corpusCount = db.colCount("corpus");
|
||||
const stationsCount = db.colCount("stations");
|
||||
const timetableCount = db.colCount("timetable");
|
||||
|
||||
// Insert data
|
||||
json.mode = process.env.NODE_ENV;
|
||||
json.host = os.hostname();
|
||||
// Await and insert async calls
|
||||
json.count.users = await userCount;
|
||||
json.count.reg = await regCount;
|
||||
json.count.pis = await pisCount;
|
||||
json.count.corpus = await corpusCount;
|
||||
json.count.stations = await stationsCount;
|
||||
json.count.timetable = await timetableCount;
|
||||
return json;
|
||||
}
|
||||
|
||||
async function hits() {
|
||||
logger.debug("statsServices.hits: Statistics Requested");
|
||||
const out = await buildJson();
|
||||
return out;
|
||||
}
|
||||
|
||||
async function getVersions() {
|
||||
logger.debug("statsServices.getVersions: Fetching versions");
|
||||
const mqClt = await db.query("versions", { target: "timetable-mgr" });
|
||||
const data = {
|
||||
backend: vers.app,
|
||||
"mq-client": mqClt[0]?.["version"] || "",
|
||||
};
|
||||
return data;
|
||||
}
|
||||
|
||||
async function statistics() {
|
||||
logger.debug("statsServices.statistics: Fetching statistics");
|
||||
|
||||
const timetablePromise = db.query("meta", { type: "CifMetadata" });
|
||||
const pisPromise = db.query("meta", { type: "PisMetadata" });
|
||||
const corpusPromise = db.query("meta", { target: "corpus" });
|
||||
const stationsPromise = db.query("meta", {type: "StationsMetadata"});
|
||||
|
||||
const lengthUsersPromise = db.colCount("users");
|
||||
const lengthRegistrationsPromise = db.colCount("registrations");
|
||||
const lengthCorpusPromise = db.colCount("corpus");
|
||||
const lengthStationsPromise = db.colCount("stations");
|
||||
const lengthPisPromise = db.colCount("pis");
|
||||
const lengthTimetablePromise = db.colCount("timetable");
|
||||
const lengthReasonCodesPromise = db.colCount("reasonCodes");
|
||||
|
||||
const [
|
||||
timetable,
|
||||
pis,
|
||||
corpus,
|
||||
lengthUsers,
|
||||
lengthRegistrations,
|
||||
lengthCorpus,
|
||||
lengthStations,
|
||||
lengthPis,
|
||||
lengthTimetable,
|
||||
stations,
|
||||
] = await Promise.all([
|
||||
timetablePromise,
|
||||
pisPromise,
|
||||
corpusPromise,
|
||||
lengthUsersPromise,
|
||||
lengthRegistrationsPromise,
|
||||
lengthCorpusPromise,
|
||||
lengthStationsPromise,
|
||||
lengthPisPromise,
|
||||
lengthTimetablePromise,
|
||||
stationsPromise,
|
||||
]);
|
||||
|
||||
return {
|
||||
hostname: os.hostname() || "Unknown",
|
||||
runtimeMode: process.env.NODE_ENV || "Unknown",
|
||||
updateTimes: {
|
||||
timetable: (timetable[0]["lastUpdate"]),
|
||||
pis: pis[0]["lastUpdate"],
|
||||
corpus: corpus[0]["updated_time"],
|
||||
stations: stations[0]["lastUpdate"],
|
||||
},
|
||||
dbLengths: {
|
||||
users: lengthUsers,
|
||||
registrations: lengthRegistrations,
|
||||
corpus: lengthCorpus,
|
||||
stations: lengthStations,
|
||||
pis: lengthPis,
|
||||
timetable: lengthTimetable,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
hits
|
||||
}
|
||||
hits,
|
||||
statistics,
|
||||
getVersions,
|
||||
};
|
||||
|
196
src/services/trainService.services.ts
Normal file
196
src/services/trainService.services.ts
Normal file
@ -0,0 +1,196 @@
|
||||
import { logger } from "../utils/logger.utils";
|
||||
import { findByTiplocArray, supported } from "./pis.services";
|
||||
import { queryAggregate } from "./dbAccess.services";
|
||||
import {
|
||||
getFindByHeadcodePipeline,
|
||||
getFindByTrainUidPipeline,
|
||||
} from "../utils/trainService.utils";
|
||||
import { removeNonAlphanumeric } from "../utils/sanitizer.utils";
|
||||
import { formatTimetableDetail } from "../utils/processors/timetable/timetableProcessor.utils";
|
||||
|
||||
import type {
|
||||
TrainServices,
|
||||
Service,
|
||||
Stop,
|
||||
SimpleService,
|
||||
OB_Pis_SimpleObject,
|
||||
} from "@owlboard/ts-types";
|
||||
|
||||
export async function findByHeadcode(
|
||||
headcode: string,
|
||||
date: Date | string
|
||||
): Promise<SimpleService[]> {
|
||||
const sanitizedHeadcode = removeNonAlphanumeric(headcode);
|
||||
logger.debug(
|
||||
`trainServices.findByHeadcode: Searching for trains by headcode: ${headcode}`
|
||||
);
|
||||
|
||||
// If 'now' then generate a new Date now, else use the provided date, then set time to 1200.
|
||||
const searchDate = date === "now" ? new Date() : new Date(date);
|
||||
searchDate.setHours(12, 0, 0);
|
||||
|
||||
// Get the 'shortDay'
|
||||
const shortDay = getShortDay(searchDate);
|
||||
|
||||
const query = {
|
||||
headcode: sanitizedHeadcode.toUpperCase(),
|
||||
daysRun: { $in: [shortDay] },
|
||||
scheduleStartDate: { $lte: searchDate },
|
||||
scheduleEndDate: { $gte: searchDate },
|
||||
};
|
||||
const pipeline = getFindByHeadcodePipeline(query);
|
||||
|
||||
const result: SimpleService[] = (await queryAggregate(
|
||||
"timetable",
|
||||
pipeline
|
||||
)) as SimpleService[];
|
||||
|
||||
const services = filterServices(result);
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
export async function findByTrainUid(
|
||||
uid: string,
|
||||
date: Date | string = new Date()
|
||||
) {
|
||||
// Set the correct date - whether a date or "now" was passed to function
|
||||
let queryDate: Date;
|
||||
if (date === 'now') {
|
||||
queryDate = new Date();
|
||||
} else if (date instanceof Date) {
|
||||
queryDate = date;
|
||||
} else {
|
||||
queryDate = new Date(date);
|
||||
}
|
||||
|
||||
// Build query
|
||||
const query = {
|
||||
trainUid: uid.toUpperCase(),
|
||||
daysRun: { $in: [getShortDay(queryDate)] },
|
||||
scheduleStartDate: { $lte: queryDate },
|
||||
scheduleEndDate: { $gte: queryDate },
|
||||
};
|
||||
const pipeline = getFindByTrainUidPipeline(query);
|
||||
|
||||
const result = (await queryAggregate("timetable", pipeline)) as Service[];
|
||||
|
||||
let services = filterServices(result) as Service[];
|
||||
|
||||
// Check if the operator is on the supported TOC list for PIS Codes - if so, call the fetchPisCode function.
|
||||
let pis: OB_Pis_SimpleObject | null;
|
||||
if (supported.includes(services[0]?.operator)) {
|
||||
pis = await fetchPisCode(services[0]?.stops);
|
||||
} else {
|
||||
pis = null;
|
||||
}
|
||||
return formatTimetableDetail(services[0], pis);
|
||||
}
|
||||
|
||||
// Internal Functions:
|
||||
|
||||
// Filters out non-passenger stops and then uses the stop array to request a PIS code for the service
|
||||
async function fetchPisCode(
|
||||
stops: Stop[]
|
||||
): Promise<OB_Pis_SimpleObject | null> {
|
||||
let tiplocList: string[] = [];
|
||||
for (const stop in stops) {
|
||||
if (stops[stop]["isPublic"]) tiplocList.push(stops[stop]["tiploc"]);
|
||||
}
|
||||
// Check if no public stops - then it should use an ECS headcode
|
||||
let pisData: OB_Pis_SimpleObject | null;
|
||||
if (tiplocList.length) {
|
||||
pisData = await findByTiplocArray(tiplocList);
|
||||
} else {
|
||||
pisData = {
|
||||
toc: "GW",
|
||||
skipCount: 0,
|
||||
code: randomEcsPis(),
|
||||
};
|
||||
}
|
||||
if (!pisData) {
|
||||
logger.debug(tiplocList, "No PIS found for service")
|
||||
}
|
||||
return pisData;
|
||||
}
|
||||
|
||||
// Picks a random choice of the ECS PIS Codes
|
||||
function randomEcsPis(): string {
|
||||
const options = ["0015", "9997"];
|
||||
const randomValue = Math.floor(Math.random() * 2);
|
||||
return options[randomValue];
|
||||
}
|
||||
|
||||
// Outputs the standard 'shortday' string from a Date.
|
||||
function getShortDay(day: Date): string {
|
||||
const dayMap = ["su", "m", "t", "w", "th", "f", "s"];
|
||||
const shortDay = dayMap[day.getDay()];
|
||||
return shortDay;
|
||||
}
|
||||
|
||||
// Filters services using their STP indicator so that over-riding entries are returned correctly
|
||||
function filterServices(services: SimpleService[]): SimpleService[] {
|
||||
let stpIndicators: Record<
|
||||
string,
|
||||
{ hasC: boolean; hasN: boolean; hasO: boolean; hasP: boolean }
|
||||
> = {};
|
||||
let filteredServices: SimpleService[] = [];
|
||||
|
||||
for (const service of services) {
|
||||
const trainUid = service["trainUid"],
|
||||
stpIndicator = service["stpIndicator"];
|
||||
|
||||
// Creates the stpIndicators array:
|
||||
if (!stpIndicators[trainUid]) {
|
||||
stpIndicators[trainUid] = {
|
||||
hasC: false,
|
||||
hasN: false,
|
||||
hasO: false,
|
||||
hasP: false,
|
||||
};
|
||||
}
|
||||
|
||||
if (stpIndicator === "C") {
|
||||
stpIndicators[trainUid].hasC = true;
|
||||
}
|
||||
if (stpIndicator === "N") {
|
||||
stpIndicators[trainUid].hasN = true;
|
||||
}
|
||||
if (stpIndicator === "O") {
|
||||
stpIndicators[trainUid].hasO = true;
|
||||
}
|
||||
if (stpIndicator === "P") {
|
||||
stpIndicators[trainUid].hasP = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Iterate each service, and only output one service matching each trainUid,
|
||||
// C > N > O > P is the order, with C being prioritised over other STP types.
|
||||
for (const service of services) {
|
||||
const trainUid = service["trainUid"];
|
||||
const thisStpIndicators = stpIndicators[trainUid];
|
||||
const stpIndicator = service["stpIndicator"];
|
||||
|
||||
if (stpIndicator === "C") {
|
||||
filteredServices.push(service);
|
||||
} else if (stpIndicator === "N" && !thisStpIndicators.hasC) {
|
||||
filteredServices.push(service);
|
||||
} else if (
|
||||
stpIndicator === "O" &&
|
||||
!thisStpIndicators.hasC &&
|
||||
!thisStpIndicators.hasN
|
||||
) {
|
||||
filteredServices.push(service);
|
||||
} else if (
|
||||
stpIndicator === "P" &&
|
||||
!thisStpIndicators.hasC &&
|
||||
!thisStpIndicators.hasN &&
|
||||
!thisStpIndicators.hasO
|
||||
) {
|
||||
filteredServices.push(service);
|
||||
}
|
||||
}
|
||||
return filteredServices;
|
||||
}
|
||||
|
||||
// Local Types:
|
17
src/types/index.d.ts
vendored
Normal file
17
src/types/index.d.ts
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
// src/types/express/index.d.ts
|
||||
|
||||
// to make the file a module and avoid the TypeScript error
|
||||
export {};
|
||||
|
||||
declare global {
|
||||
namespace Express {
|
||||
export interface Request {
|
||||
isAuthed: boolean;
|
||||
}
|
||||
|
||||
export interface Response {
|
||||
cacheType: string;
|
||||
cacheSecs: number;
|
||||
}
|
||||
}
|
||||
}
|
82
src/utils/auth.utils.ts
Normal file
82
src/utils/auth.utils.ts
Normal file
@ -0,0 +1,82 @@
|
||||
const crypt = require("crypto");
|
||||
const db = require("../services/dbAccess.services");
|
||||
const fs = require("fs/promises");
|
||||
|
||||
import { minifyMail } from "./minify.utils";
|
||||
import { logger } from "./logger.utils";
|
||||
|
||||
// Checks users registration key against issued keys
|
||||
async function isAuthed(uuid: string): Promise<boolean> {
|
||||
// Needs testing
|
||||
const q = {
|
||||
uuid: uuid,
|
||||
};
|
||||
const res = await db.query("users", q);
|
||||
logger.debug(res, "checkUser: DB Query Result");
|
||||
const authorized = res && res[0] && res[0].domain;
|
||||
if (authorized) db.userAtime(uuid);
|
||||
return authorized;
|
||||
}
|
||||
|
||||
// Checks whether a registration request key is valid
|
||||
async function checkRequest(key: string) {
|
||||
const collection = "registrations";
|
||||
const query = { uuid: key };
|
||||
const res = await db.query(collection, query);
|
||||
logger.debug(res, "checkRequest: DB Lookup result");
|
||||
const result =
|
||||
res.length > 0 && res[0].time
|
||||
? { result: true, domain: res[0].domain }
|
||||
: { result: false };
|
||||
return result;
|
||||
}
|
||||
|
||||
// Creates an API key for a user
|
||||
async function generateKey() {
|
||||
return crypt.randomUUID();
|
||||
}
|
||||
|
||||
export function generateCode(): string {
|
||||
const characters = 'ABCDEFGHJKLMNPQRSTUVWXYZ23456789';
|
||||
const codeLength = 6;
|
||||
|
||||
let code = '';
|
||||
const bytes = crypt.randomBytes(codeLength); // Generate random bytes
|
||||
for (let i = 0; i < codeLength; i++) {
|
||||
const randomIndex = bytes[i] % characters.length; // Map bytes to characters
|
||||
code += characters.charAt(randomIndex);
|
||||
}
|
||||
|
||||
return code;
|
||||
}
|
||||
|
||||
async function generateConfirmationEmail(eml: string, uuid: string) {
|
||||
try {
|
||||
const htmlTpl = await fs.readFile("mail-templates/register.html", "utf-8");
|
||||
const htmlStr = htmlTpl.replace(/987654/g, uuid);
|
||||
const htmlMin = await minifyMail(htmlStr);
|
||||
const txtTpl = fs.readFile("mail-templates/register.txt", "utf-8");
|
||||
return {
|
||||
to: eml,
|
||||
subject: "OwlBoard Registration",
|
||||
text: (await txtTpl).replace(/987654/g, uuid),
|
||||
html: htmlMin,
|
||||
};
|
||||
} catch (err) {
|
||||
logger.error(
|
||||
err,
|
||||
"generateConfirmationEmail: Error rendering email templates"
|
||||
);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
isAuthed,
|
||||
generateKey,
|
||||
generateConfirmationEmail,
|
||||
checkRequest,
|
||||
generateCode
|
||||
};
|
||||
|
||||
export { isAuthed, generateKey, generateConfirmationEmail, checkRequest };
|
9
src/utils/cacheHeader.utils.ts
Normal file
9
src/utils/cacheHeader.utils.ts
Normal file
@ -0,0 +1,9 @@
|
||||
import type { Response } from "express"
|
||||
|
||||
export function setCache(res: Response, type="private", time=120): void {
|
||||
if (type === "no-store") {
|
||||
res.setHeader('Cache-Control', 'no-store')
|
||||
return
|
||||
}
|
||||
res.setHeader('Cache-Control', `${type}, max-age=${time}`)
|
||||
}
|
@ -1,90 +0,0 @@
|
||||
// FUNCTIONS
|
||||
// init() : Exported: Uses the internal functions to initialise databases.
|
||||
// check() : Checks data presence and age.
|
||||
// build() : Builds/Rebuilds collections.
|
||||
|
||||
const log = require('../utils/log.utils'); // Log Helper
|
||||
const time = require('../utils/timeConvert.utils'); // Time Helper
|
||||
const corpus = require('../services/corpus.services');
|
||||
const dbAccess = require('../services/dbAccess.services');
|
||||
|
||||
async function init(){
|
||||
var status = await check('corpus');
|
||||
if (status == "not_ready") {
|
||||
try {
|
||||
await build("corpus")
|
||||
} catch (err) {
|
||||
log.out("dbInitUtils.init: Error building corpus database")
|
||||
log.out(err)
|
||||
}
|
||||
}
|
||||
|
||||
var status = await check('stations')
|
||||
if (status == "not_ready") {
|
||||
try {
|
||||
await build("stations")
|
||||
} catch (err) {
|
||||
log.out("dbInitUtils.init: Error building stations database")
|
||||
log.out(err)
|
||||
}
|
||||
}
|
||||
indexes();
|
||||
dbAccess.createCount();
|
||||
}
|
||||
|
||||
async function check(coll){
|
||||
log.out(`dbInitUtils.check: Checking collection '${coll}'`)
|
||||
try {
|
||||
var queryStr = {'type':'collection','target': coll};
|
||||
var res = await dbAccess.query('meta',queryStr);
|
||||
log.out(`dbInitUtils.check: Last update of ${coll}: ${time.unixLocal(res['0']['updated'])}`)
|
||||
var now = time.jsUnix(Date.now())
|
||||
var delta = now - res['0']['updated']
|
||||
} catch (err) {
|
||||
log.out(`dbInitUtils.check: Unable to find out data age. Presume stale. Error Message:`)
|
||||
log.out(err)
|
||||
var delta = 12096000 // Extra zero to ensure data is updated.
|
||||
}
|
||||
|
||||
var maxAge = 1209600 // 14 Days
|
||||
if (delta > maxAge) {
|
||||
log.out(`dbInitUtils.check: '${coll}' data older than max age ${maxAge} seconds. Update pending`)
|
||||
return "not_ready"
|
||||
} else {
|
||||
log.out(`dbInitUtils.check: '${coll}' data newer than max age ${maxAge} seconds. Update not required`)
|
||||
return "ready"
|
||||
}
|
||||
}
|
||||
|
||||
async function build(db){ // `db` must be one of: `corpus`, `stations`, `all`.
|
||||
log.out("dbInitUtils.build: Building database structure")
|
||||
var corpusAll = await corpus.get();
|
||||
if (db === "corpus") {
|
||||
await dbAccess.dropCollection("corpus");
|
||||
dbAccess.putCorpus(corpusAll);
|
||||
|
||||
log.out(`dbInitUtils.build: Updating corpus meta`);
|
||||
dbAccess.updateMeta("collection", "corpus", time.jsUnix(Date.now()));
|
||||
}
|
||||
if (db === "stations") {
|
||||
await dbAccess.dropCollection("stations");
|
||||
var corpusSubset = await corpus.subset(corpusAll);
|
||||
dbAccess.putStations(corpusSubset);
|
||||
|
||||
log.out(`dbInitUtils.build: Updating stations meta`);
|
||||
dbAccess.updateMeta("collection", "stations", time.jsUnix(Date.now()));
|
||||
}
|
||||
}
|
||||
|
||||
async function indexes() {
|
||||
dbAccess.ensureIndex("corpus", "NLC");
|
||||
dbAccess.ensureIndex("corpus", "3ALPHA");
|
||||
dbAccess.ensureIndex("stations", "3ALPHA");
|
||||
dbAccess.ensureIndex("stations", "STANOX");
|
||||
dbAccess.ensureIndex("stations", "TIPLOC");
|
||||
dbAccess.ensureIndex("corpus", "NLCDESC", "text")
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
init
|
||||
}
|
@ -1,43 +1,93 @@
|
||||
const log = require('../utils/log.utils'); // Log Helper
|
||||
const db = require('../services/dbAccess.services') // DB Access
|
||||
const san = require('../utils/sanitizer.utils') // Sanitiser
|
||||
const log = require("./logs.utils"); // Log Helper
|
||||
const db = require("../services/dbAccess.services"); // DB Access
|
||||
//const san = require('../utils/sanitizer.utils'); // Sanitiser
|
||||
|
||||
async function checkCrs(input){
|
||||
var INPUT = input.toUpperCase()
|
||||
log.out(`ldbUtils.checkCrs: Building database query to find: '${INPUT}'`)
|
||||
var query = {'$or':[{'3ALPHA':INPUT},{'TIPLOC':INPUT},{'STANOX':INPUT}]};
|
||||
var result = await db.query("stations", query)
|
||||
log.out(`ldbUtils.checkCrs: Query results: ${JSON.stringify(result)}`)
|
||||
return result
|
||||
import * as san from "../utils/sanitizer.utils";
|
||||
|
||||
async function checkCrs(input = "") {
|
||||
var INPUT = input.toUpperCase();
|
||||
var query = {
|
||||
$or: [{ "3ALPHA": INPUT }, { TIPLOC: INPUT }, { STANOX: INPUT }],
|
||||
};
|
||||
var result = await db.query("stations", query);
|
||||
log.out(
|
||||
"ldbUtils.checkCrs: Query results: " + JSON.stringify(result),
|
||||
"dbug"
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
async function cleanMessages(input){
|
||||
var out = []
|
||||
if (typeof input.message == "string") {
|
||||
out.push(await san.cleanNrcc(input.message))
|
||||
} else if (typeof input.message == "object") {
|
||||
for(var i = 0; i < input.message.length; i++) {
|
||||
out.push(await san.cleanNrcc(input.message[i]))
|
||||
}
|
||||
// Needs to be moved to the frontend `ensureArray() func`
|
||||
// Usage of this function should be migrated to the `translator` utilities.
|
||||
async function cleanMessages(input) {
|
||||
log.out("ldbUtils.cleanMessages: Deprecated function has been called", "err");
|
||||
var out = [];
|
||||
if (typeof input.message == "string") {
|
||||
out.push(san.cleanNrcc(input.message));
|
||||
} else if (typeof input.message == "object") {
|
||||
for (var i = 0; i < input.message.length; i++) {
|
||||
out.push(san.cleanNrcc(input.message[i]));
|
||||
}
|
||||
return out;
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
// Accepts an object but not an Array and returns it wrapped in an array.
|
||||
async function cleanServices(input){
|
||||
var out = []
|
||||
if (!Array.isArray(input)) {
|
||||
log.out(`ldbUtils.cleanServices: Transforming input: ${input}`)
|
||||
out.push(input)
|
||||
log.out(`ldbUtils.cleanServices: Returning output: ${out}`)
|
||||
return out;
|
||||
} else {
|
||||
return input;
|
||||
async function cleanServices(input) {
|
||||
log.out("ldbUtils.cleanServices: Deprecated function has been called", "err");
|
||||
var out = [];
|
||||
if (!Array.isArray(input)) {
|
||||
log.out(`ldbUtils.cleanServices: Transforming input: ${input}`, "dbug");
|
||||
out.push(input);
|
||||
log.out(`ldbUtils.cleanServices: Returning output: ${out}`, "dbug");
|
||||
return out;
|
||||
} else {
|
||||
return input;
|
||||
}
|
||||
}
|
||||
|
||||
async function cleanData(input) {
|
||||
try {
|
||||
if (input?.GetStationBoardResult?.trainServices) {
|
||||
log.out(
|
||||
"ldbUtils.cleanData: Changing train service data to array",
|
||||
"dbug"
|
||||
);
|
||||
input.GetStationBoardResult.trainServices.service = await ensureArray(
|
||||
input.GetStationBoardResult.trainServices.service
|
||||
);
|
||||
}
|
||||
if (input?.GetStationBoardResult?.busServices) {
|
||||
log.out("ldbUtils.cleanData: Changing bus service data to array", "dbug");
|
||||
input.GetStationBoardResult.busServices.service = await ensureArray(
|
||||
input.GetStationBoardResult.busServices.service
|
||||
);
|
||||
}
|
||||
if (input?.GetStationBoardResult?.ferryServices) {
|
||||
log.out(
|
||||
"ldbUtils.cleanData: Changing ferry service data to array",
|
||||
"dbug"
|
||||
);
|
||||
input.GetStationBoardResult.ferryServices.service = await ensureArray(
|
||||
input.GetStationBoardResult.ferryServices.service
|
||||
);
|
||||
}
|
||||
} catch (err) {
|
||||
log.out(`ldbUtils.cleanData: Error: ${err}`, "eror");
|
||||
}
|
||||
return input;
|
||||
}
|
||||
|
||||
async function ensureArray(data) {
|
||||
if (!Array.isArray(data)) {
|
||||
return [data];
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
checkCrs,
|
||||
cleanMessages,
|
||||
cleanServices
|
||||
}
|
||||
checkCrs,
|
||||
cleanMessages,
|
||||
cleanServices,
|
||||
cleanData,
|
||||
};
|
||||
|
47
src/utils/ldbPipeline.utils.ts
Normal file
47
src/utils/ldbPipeline.utils.ts
Normal file
@ -0,0 +1,47 @@
|
||||
export function findStationsByDistancePipeline(count: number, latitude: string, longitude: string) {
|
||||
const numericLatitude = parseFloat(latitude)
|
||||
const numericLongitude = parseFloat(longitude)
|
||||
const pipeline = [
|
||||
{
|
||||
'$geoNear': {
|
||||
'near': {
|
||||
'type': 'Point',
|
||||
'coordinates': [
|
||||
numericLongitude, numericLatitude
|
||||
]
|
||||
},
|
||||
'distanceField': 'distance'
|
||||
}
|
||||
}, {
|
||||
'$limit': count
|
||||
}, {
|
||||
'$addFields': {
|
||||
'miles': {
|
||||
'$divide': [
|
||||
{
|
||||
'$round': {
|
||||
'$multiply': [
|
||||
{
|
||||
'$divide': [
|
||||
'$distance', 1609.34
|
||||
]
|
||||
}, 4
|
||||
]
|
||||
}
|
||||
}, 4
|
||||
]
|
||||
}
|
||||
}
|
||||
}, {
|
||||
'$project': {
|
||||
'_id': 0,
|
||||
'3ALPHA': 1,
|
||||
'NLCDESC': 1,
|
||||
'miles': 1
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
//console.log(JSON.stringify(pipeline))
|
||||
return pipeline
|
||||
}
|
@ -1,8 +0,0 @@
|
||||
function out(msg) {
|
||||
var time = new Date().toISOString();
|
||||
console.log(`${time} - ${msg}`)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
out
|
||||
}
|
19
src/utils/logger.utils.ts
Normal file
19
src/utils/logger.utils.ts
Normal file
@ -0,0 +1,19 @@
|
||||
import pino from "pino";
|
||||
|
||||
const runtime = process.env.NODE_ENV;
|
||||
let level: string;
|
||||
if (runtime === "production") {
|
||||
level = "info";
|
||||
} else {
|
||||
level = "debug";
|
||||
}
|
||||
|
||||
export const logger = pino({
|
||||
level: level,
|
||||
formatters: {
|
||||
level: (label) => {
|
||||
return { level: label.toUpperCase() };
|
||||
},
|
||||
},
|
||||
timestamp: pino.stdTimeFunctions.isoTime,
|
||||
});
|
21
src/utils/logs.utils.ts
Normal file
21
src/utils/logs.utils.ts
Normal file
@ -0,0 +1,21 @@
|
||||
const environment: string = process.env.NODE_ENV || "Unknown";
|
||||
|
||||
const hideInProduction: string[] = ["info", "dbug"];
|
||||
|
||||
async function out(msg: string, level = "othr") {
|
||||
if (
|
||||
environment === "production" &&
|
||||
hideInProduction.includes(level.toLowerCase())
|
||||
) {
|
||||
return;
|
||||
} else {
|
||||
const time = new Date().toISOString();
|
||||
console.log(`${time} - ${level.toUpperCase()} - ${msg}`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
out,
|
||||
};
|
||||
|
||||
export { out };
|
17
src/utils/minify.utils.ts
Normal file
17
src/utils/minify.utils.ts
Normal file
@ -0,0 +1,17 @@
|
||||
import { logger } from "./logger.utils";
|
||||
|
||||
const htmlShrink = require("html-minifier").minify;
|
||||
const juice = require("juice");
|
||||
|
||||
// Inlines styles and minifies the inlined HTML
|
||||
async function minifyMail(input: string): Promise<string> {
|
||||
logger.trace("minifyMail: Minifying mail output");
|
||||
const inlined: string = juice(input);
|
||||
return htmlShrink(inlined, {
|
||||
removeComments: true,
|
||||
collapseWhitespace: true,
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { minifyMail };
|
||||
export { minifyMail };
|
13
src/utils/newSanitizer.ts
Normal file
13
src/utils/newSanitizer.ts
Normal file
@ -0,0 +1,13 @@
|
||||
import { logger } from "./logger.utils";
|
||||
|
||||
export function removeNewlineAndPTag(input: string): string {
|
||||
logger.debug("removeNewlineAndPTag: Cleaning string");
|
||||
const regex = /[\n\r]|<\/?p[^>]*>/g;
|
||||
return input.replace(regex, function (match) {
|
||||
if (match === "\n" || match === "\r") {
|
||||
return "";
|
||||
} else {
|
||||
return "";
|
||||
}
|
||||
});
|
||||
}
|
157
src/utils/pis.utils.ts
Normal file
157
src/utils/pis.utils.ts
Normal file
@ -0,0 +1,157 @@
|
||||
export function getPartialEndTiplocMatchPipeline(query: string[]) {
|
||||
return [
|
||||
{
|
||||
$match: {
|
||||
tiplocs: {
|
||||
$all: query,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$addFields: {
|
||||
reversedTiplocs: {
|
||||
$reverseArray: "$tiplocs",
|
||||
},
|
||||
query: {
|
||||
$literal: query,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$addFields: {
|
||||
reversedQuery: {
|
||||
$reverseArray: "$query",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$match: {
|
||||
$expr: {
|
||||
$eq: [
|
||||
{
|
||||
$slice: [
|
||||
"$reversedTiplocs",
|
||||
0,
|
||||
{
|
||||
$size: "$reversedQuery",
|
||||
},
|
||||
],
|
||||
},
|
||||
"$reversedQuery",
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$addFields: {
|
||||
skipStops: {
|
||||
$subtract: [
|
||||
{
|
||||
$size: "$tiplocs",
|
||||
},
|
||||
{
|
||||
$size: "$reversedQuery",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
$sort: {
|
||||
skipStops: 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
$limit: 1,
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
code: 1,
|
||||
skipStops: 1,
|
||||
toc: 1,
|
||||
_id: 0,
|
||||
},
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
export function getPartialStartTiplocMatchPipeline(query: string[]) {
|
||||
return [
|
||||
{
|
||||
'$match': {
|
||||
'tiplocs': {
|
||||
'$all': query
|
||||
}
|
||||
}
|
||||
}, {
|
||||
'$addFields': {
|
||||
'query': query
|
||||
}
|
||||
}, {
|
||||
'$match': {
|
||||
'$expr': {
|
||||
'$eq': [
|
||||
{
|
||||
'$slice': [
|
||||
'$tiplocs', {
|
||||
'$size': '$query'
|
||||
}
|
||||
]
|
||||
}, '$query'
|
||||
]
|
||||
}
|
||||
}
|
||||
}, {
|
||||
'$addFields': {
|
||||
'skipStops': {
|
||||
'$subtract': [
|
||||
{
|
||||
'$size': '$tiplocs'
|
||||
}, {
|
||||
'$size': '$query'
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}, {
|
||||
'$sort': {
|
||||
'skipStops': 1
|
||||
}
|
||||
}, {
|
||||
'$limit': 1
|
||||
}, {
|
||||
'$project': {
|
||||
'code': 1,
|
||||
'skipStops': 1,
|
||||
'toc': 1,
|
||||
'_id': 0
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
export function getFullTiplocMatchPipeline(query: string[]) {
|
||||
return [
|
||||
{
|
||||
$match: {
|
||||
tiplocs: query,
|
||||
},
|
||||
},
|
||||
{
|
||||
$limit: 1,
|
||||
},
|
||||
{
|
||||
$addFields: {
|
||||
skipStops: 0,
|
||||
},
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
code: 1,
|
||||
toc: 1,
|
||||
skipStops: 1,
|
||||
_id: 0,
|
||||
},
|
||||
},
|
||||
];
|
||||
}
|
7
src/utils/processors/README.md
Normal file
7
src/utils/processors/README.md
Normal file
@ -0,0 +1,7 @@
|
||||
# Translators
|
||||
|
||||
The utilities in the `translators` folder translate the upstream API into the downstream API.
|
||||
|
||||
The aim of the translators is to ensure a consistent data format while removing any unused data to keep the response sizes as small as possible.
|
||||
|
||||
Translators are kept in separate files so changes can be made in one place. Each translator exports a single function 'transform()'. This function accepts data from the upstream API and uses other functions in the file to build the API response object before returning that object to the caller.
|
220
src/utils/processors/ldb/staffStation.ts
Normal file
220
src/utils/processors/ldb/staffStation.ts
Normal file
@ -0,0 +1,220 @@
|
||||
import type {
|
||||
StaffLdb,
|
||||
NrccMessage,
|
||||
TrainServices,
|
||||
ServiceLocation,
|
||||
} from "@owlboard/ts-types";
|
||||
|
||||
import { tz } from "moment-timezone";
|
||||
import { removeNewlineAndPTag } from "../../newSanitizer";
|
||||
|
||||
import { logger } from "../../logger.utils";
|
||||
|
||||
/// I do not yet have a type defined for any of the input object
|
||||
export function transform(input: any): StaffLdb | null {
|
||||
console.time("StaffLdb Transformation");
|
||||
const data = input.GetBoardResult;
|
||||
let output: StaffLdb;
|
||||
try {
|
||||
output = {
|
||||
generatedAt: transformDateTime(data?.generatedAt) || new Date(),
|
||||
locationName: data?.locationName || "Not Found",
|
||||
stationManagerCode: data?.stationManagerCode || "UK",
|
||||
nrccMessages: transformNrcc(data?.nrccMessages) || undefined,
|
||||
trainServices: transformTrainServices(data?.trainServices) || undefined,
|
||||
busServices: transformTrainServices(data?.busServices) || undefined,
|
||||
ferryServices: transformTrainServices(data?.ferryServices) || undefined,
|
||||
};
|
||||
console.timeEnd("StaffLdb Transformation");
|
||||
if (output.locationName !== "Not Found") {
|
||||
return output;
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error(err, "utils/translators/ldb/staffLdb.transform");
|
||||
}
|
||||
console.timeEnd("StaffLdb Transformation");
|
||||
return null;
|
||||
}
|
||||
|
||||
function transformDateTime(input: string): Date {
|
||||
logger.trace("utils/translators/ldb/staffLdb.transformDateTime: Running");
|
||||
return new Date(input);
|
||||
}
|
||||
|
||||
function transformNrcc(input: any): NrccMessage[] | undefined {
|
||||
logger.trace("utils/translators/ldb/staffLdb.transformNrcc: Running");
|
||||
if (input === undefined) {
|
||||
return input;
|
||||
}
|
||||
let output: NrccMessage[] = [];
|
||||
let messages = input;
|
||||
if (!Array.isArray(input?.message)) {
|
||||
messages = [input?.message];
|
||||
}
|
||||
if (messages.length) {
|
||||
for (const item of messages) {
|
||||
let message: NrccMessage = {
|
||||
severity: item?.severity,
|
||||
xhtmlMessage: removeNewlineAndPTag(item?.xhtmlMessage),
|
||||
};
|
||||
output.push(message);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function transformTrainServices(input: any): TrainServices[] {
|
||||
logger.trace(
|
||||
"utils/translators/ldb/staffLdb.transformTrainServices: Running"
|
||||
);
|
||||
let services: any = input?.service;
|
||||
let output: TrainServices[] = [];
|
||||
if (services === undefined) {
|
||||
return output;
|
||||
}
|
||||
if (!Array.isArray(input.service)) {
|
||||
services = [input.service];
|
||||
}
|
||||
for (const service of services) {
|
||||
const times = parseTimes(service);
|
||||
const trainService: TrainServices = {
|
||||
rid: service?.rid,
|
||||
uid: service?.uid,
|
||||
trainid: service?.trainid,
|
||||
operatorCode: service?.operatorCode || "UK",
|
||||
platform: service?.platform || "-",
|
||||
platformIsHidden: service?.platformIsHidden,
|
||||
serviceIsSupressed: checkIsSupressed(service),
|
||||
origin: transformLocation(service?.origin),
|
||||
destination: transformLocation(service?.destination),
|
||||
length: calculateLength(service),
|
||||
isCancelled: service?.isCancelled,
|
||||
cancelReason: service?.cancelReason,
|
||||
delayReason: service?.delayReason,
|
||||
arrivalType: service?.arrivalType,
|
||||
departureType: service?.departureType,
|
||||
sta: times.sta,
|
||||
eta: times.eta,
|
||||
ata: times.ata,
|
||||
std: times.std,
|
||||
etd: times.etd,
|
||||
atd: times.atd,
|
||||
};
|
||||
Object.keys(trainService).forEach(
|
||||
(key) => trainService[key] === undefined && delete trainService[key]
|
||||
);
|
||||
output.push(trainService);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
function checkIsSupressed(service: TrainServices): string | undefined {
|
||||
logger.trace("utils/translators/ldb/staffStation.checkIsSupressed: Running");
|
||||
if (
|
||||
service.serviceIsSupressed === "true" ||
|
||||
service.isPassengerService === "false"
|
||||
) {
|
||||
return "true";
|
||||
} else {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
function transformLocation(input: any): ServiceLocation[] {
|
||||
logger.trace("utils/translators/ldb/staffStation.transformLocation: Running");
|
||||
let output: ServiceLocation[] = [];
|
||||
let locations: any[] = input.location;
|
||||
if (!Array.isArray(input.location)) {
|
||||
locations = [input.location];
|
||||
}
|
||||
for (const item of locations) {
|
||||
const location: ServiceLocation = {
|
||||
tiploc: item?.tiploc,
|
||||
};
|
||||
if (item?.via) {
|
||||
location.via = item.via;
|
||||
}
|
||||
output.push(location);
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
export function calculateLength(input: any): number | undefined {
|
||||
logger.trace("utils/translators/ldb/staffStation.calculateLength: Running");
|
||||
let length: number;
|
||||
if (input?.length) {
|
||||
length = input.length;
|
||||
return Number(length);
|
||||
}
|
||||
if (input?.formation?.coaches?.coach) {
|
||||
length = input.formation.coaches.coach.length;
|
||||
return Number(length);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function transformUnspecifiedDateTime(input: string): Date | undefined {
|
||||
logger.trace(
|
||||
"utils/translators/ldb/staffStation.transformUnspecifiedDateTime: Running"
|
||||
);
|
||||
if (!input) {
|
||||
return undefined;
|
||||
}
|
||||
const date = tz(input, "Europe/London"); // Want to be creating a moment object using moment.tz(...)
|
||||
return date.toDate();
|
||||
}
|
||||
|
||||
function parseTimes(service: TrainServices) {
|
||||
logger.trace("utils/translators/ldb/staffStation.parseTimes: Running");
|
||||
let { sta, eta, ata, std, etd, atd } = Object.fromEntries(
|
||||
Object.entries(service).map(([key, value]) => [
|
||||
key,
|
||||
transformUnspecifiedDateTime(value),
|
||||
])
|
||||
);
|
||||
|
||||
let etaResult: Date | undefined | string = eta;
|
||||
let ataResult: Date | undefined | string = ata;
|
||||
let etdResult: Date | undefined | string = etd;
|
||||
let atdResult: Date | undefined | string = atd;
|
||||
|
||||
if (sta) {
|
||||
if (
|
||||
eta !== undefined &&
|
||||
Math.abs(eta.getTime() - sta.getTime()) / 60000 <= 1.5
|
||||
) {
|
||||
etaResult = "RT";
|
||||
}
|
||||
if (
|
||||
ata !== undefined &&
|
||||
Math.abs(ata.getTime() - sta.getTime()) / 60000 <= 1.5
|
||||
) {
|
||||
ataResult = "RT";
|
||||
}
|
||||
}
|
||||
|
||||
if (std) {
|
||||
if (
|
||||
etd !== undefined &&
|
||||
Math.abs(etd.getTime() - std.getTime()) / 60000 <= 1.5
|
||||
) {
|
||||
etdResult = "RT";
|
||||
}
|
||||
if (
|
||||
atd !== undefined &&
|
||||
Math.abs(atd.getTime() - std.getTime()) / 60000 <= 1.5
|
||||
) {
|
||||
atdResult = "RT";
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
sta: sta,
|
||||
eta: etaResult,
|
||||
ata: ataResult,
|
||||
std: std,
|
||||
etd: etdResult,
|
||||
atd: atdResult,
|
||||
};
|
||||
}
|
99
src/utils/processors/timetable/timetableProcessor.utils.ts
Normal file
99
src/utils/processors/timetable/timetableProcessor.utils.ts
Normal file
@ -0,0 +1,99 @@
|
||||
import type {
|
||||
Service,
|
||||
OB_TrainTT_service,
|
||||
OB_Pis_SimpleObject,
|
||||
OB_TrainTT_stopDetail,
|
||||
Stop,
|
||||
} from "@owlboard/ts-types";
|
||||
|
||||
export function formatTimetableDetail(
|
||||
service: Service,
|
||||
pis: OB_Pis_SimpleObject | null
|
||||
): OB_TrainTT_service {
|
||||
const formattedService: OB_TrainTT_service = {
|
||||
stpIndicator: service.stpIndicator,
|
||||
operator: service.operator,
|
||||
trainUid: service.trainUid,
|
||||
headcode: service.headcode,
|
||||
powerType: service.powerType,
|
||||
planSpeed: convertStringToNumber(service.planSpeed),
|
||||
scheduleStart: service.scheduleStartDate,
|
||||
scheduleEnd: service.scheduleEndDate,
|
||||
daysRun: service.daysRun,
|
||||
stops: formatStops(service.stops),
|
||||
serviceDetail: service.serviceDetail,
|
||||
};
|
||||
|
||||
if (pis) {
|
||||
formattedService.pis = pis;
|
||||
}
|
||||
|
||||
return formattedService;
|
||||
}
|
||||
|
||||
function formatStops(stops: Stop[]): OB_TrainTT_stopDetail[] {
|
||||
if (!stops) {
|
||||
return []
|
||||
}
|
||||
|
||||
if (!stops.length) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Cleanly coerce Stop[] to OB_TrainTT_stopDetail[]
|
||||
const formattedStops: OB_TrainTT_stopDetail[] = [];
|
||||
|
||||
for (const stop of stops) {
|
||||
formattedStops.push(formatStopTimes(stop));
|
||||
}
|
||||
|
||||
return formattedStops;
|
||||
}
|
||||
|
||||
function formatStopTimes(stop: Stop): OB_TrainTT_stopDetail {
|
||||
// Cleanly converts a single stop to a stopdetail object
|
||||
let formattedStop: OB_TrainTT_stopDetail = {
|
||||
tiploc: stop.tiploc,
|
||||
isPublic: false,
|
||||
};
|
||||
if (stop.publicArrival) {
|
||||
formattedStop.publicArrival = stop.publicArrival;
|
||||
formattedStop.isPublic = true;
|
||||
}
|
||||
if (stop.publicDeparture) {
|
||||
formattedStop.publicDeparture = stop.publicDeparture;
|
||||
formattedStop.isPublic = true;
|
||||
}
|
||||
if (stop.wttArrival) {
|
||||
formattedStop.wttArrival = stop.wttArrival;
|
||||
}
|
||||
if (stop.wttDeparture) {
|
||||
formattedStop.wttDeparture = stop.wttDeparture;
|
||||
}
|
||||
|
||||
if (stop.platform) {
|
||||
formattedStop.platform = stop.platform;
|
||||
}
|
||||
|
||||
if (stop.pass) {
|
||||
formattedStop.pass = stop.pass;
|
||||
}
|
||||
|
||||
if (stop.arrLine) {
|
||||
formattedStop.arrLine = stop.arrLine;
|
||||
}
|
||||
|
||||
if (stop.depLine) {
|
||||
formattedStop.depLine = stop.depLine;
|
||||
}
|
||||
return formattedStop;
|
||||
}
|
||||
|
||||
function convertStringToNumber(str: string): number {
|
||||
const number = parseFloat(str);
|
||||
if (isNaN(number)) {
|
||||
return 0;
|
||||
} else {
|
||||
return number;
|
||||
}
|
||||
}
|
@ -1,45 +0,0 @@
|
||||
const clean = require('string-sanitizer-fix');
|
||||
const log = require('../utils/log.utils');
|
||||
|
||||
/*
|
||||
string.sanitize("a.bc@d efg#h"); // abcdefgh
|
||||
string.sanitize.keepSpace("a.bc@d efg#h"); // abcd efgh
|
||||
string.sanitize.keepUnicode("a.bc@d efg#hক"); // abcd efghক
|
||||
string.sanitize.addFullstop("a.bc@d efg#h"); // abcd.efgh
|
||||
string.sanitize.addUnderscore("a.bc@d efg#h"); // abcd_efgh
|
||||
string.sanitize.addDash("a.bc@d efg#h"); // abcd-efgh
|
||||
string.sanitize.removeNumber("@abcd efgh123"); // abcdefgh
|
||||
string.sanitize.keepNumber("@abcd efgh123"); // abcdefgh123
|
||||
string.addFullstop("abcd efgh"); // abcd.efgh
|
||||
string.addUnderscore("@abcd efgh"); // @abcd_efgh
|
||||
string.addDash("@abcd efgh"); // @abcd-efgh
|
||||
string.removeSpace("@abcd efgh"); // @abcdefgh
|
||||
*/
|
||||
|
||||
function cleanApiEndpointTxt(input) {
|
||||
var output = clean.sanitize.keepSpace(input)
|
||||
if (output != input){
|
||||
log.out(`sanitizerUtils.cleanApiEndpoint: WARN: Sanitizing changed string. Input = ${input}`);
|
||||
}
|
||||
return output
|
||||
}
|
||||
|
||||
function cleanApiEndpointNum(input) {
|
||||
var output = clean.sanitize.keepNumber(input)
|
||||
if (output != input){
|
||||
log.out(`sanitizerUtils.cleanApiEndpointNum: WARN: Sanitizing changed string. Input = ${input}`);
|
||||
}
|
||||
return output
|
||||
}
|
||||
|
||||
function cleanNrcc(input) {
|
||||
var rmNewline = input.replace(/[\n\r]/g, ""); // Remove newlines
|
||||
var rmPara = rmNewline.replace(/<\/?p[^>]*>/g, ""); // Remove <p> & </p>
|
||||
return rmPara;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
cleanApiEndpointTxt,
|
||||
cleanApiEndpointNum,
|
||||
cleanNrcc
|
||||
}
|
53
src/utils/sanitizer.utils.ts
Normal file
53
src/utils/sanitizer.utils.ts
Normal file
@ -0,0 +1,53 @@
|
||||
import { logger } from "./logger.utils";
|
||||
|
||||
function removeNonAlphanumeric(inputString: string) {
|
||||
logger.debug("removeNonAlphanumeric: Sanitizing string");
|
||||
return inputString.replace(/[^a-zA-Z0-9]/g, "");
|
||||
}
|
||||
|
||||
function removeNonAlpha(inputString: string) {
|
||||
logger.debug("removeNonAlpha: Sanitizing string");
|
||||
return inputString.replace(/[^a-zA-Z]/g, "");
|
||||
}
|
||||
|
||||
function removeNonNumeric(inputString: string) {
|
||||
logger.debug("removeNonNumeric: Sanitizing string");
|
||||
return inputString.replace(/[^0-9]/g, "");
|
||||
}
|
||||
|
||||
const cleanApiEndpointTxt = removeNonAlpha;
|
||||
const cleanApiEndpointNum = removeNonAlphanumeric;
|
||||
|
||||
function cleanNrcc(input: string) {
|
||||
logger.error("DEPRECATED FUNCTION", "cleanNrcc: Converting NRCC Data");
|
||||
// Remove newlines and then <p> tags from input
|
||||
const cleanInput = input.replace(/[\n\r]/g, "").replace(/<\/?p[^>]*>/g, "");
|
||||
return cleanInput;
|
||||
}
|
||||
|
||||
function getDomainFromEmail(mail: string) {
|
||||
logger.debug("getDomainFromEmail: Obtaining domain from email address");
|
||||
// Needs testing
|
||||
let split = mail.split("@");
|
||||
return split[1].toLowerCase();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
cleanApiEndpointTxt,
|
||||
cleanApiEndpointNum,
|
||||
removeNonAlpha,
|
||||
removeNonAlphanumeric,
|
||||
removeNonNumeric,
|
||||
cleanNrcc,
|
||||
getDomainFromEmail,
|
||||
};
|
||||
|
||||
export {
|
||||
cleanApiEndpointTxt,
|
||||
cleanApiEndpointNum,
|
||||
removeNonAlpha,
|
||||
removeNonAlphanumeric,
|
||||
removeNonNumeric,
|
||||
cleanNrcc,
|
||||
getDomainFromEmail,
|
||||
};
|
@ -1,15 +0,0 @@
|
||||
function unixLocal(unix) {
|
||||
var jsTime = unix*1000
|
||||
var dt = new Date(jsTime)
|
||||
return dt.toLocaleString()
|
||||
}
|
||||
|
||||
function jsUnix(js) {
|
||||
var preRound = js / 1000
|
||||
return Math.round(preRound)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
unixLocal,
|
||||
jsUnix,
|
||||
}
|
20
src/utils/timeConvert.utils.ts
Normal file
20
src/utils/timeConvert.utils.ts
Normal file
@ -0,0 +1,20 @@
|
||||
import { logger } from "./logger.utils";
|
||||
|
||||
function unixLocal(unix: number): string {
|
||||
logger.trace(`unixLocal: Converting time: ${unix}`);
|
||||
var jsTime = unix * 1000;
|
||||
var dt = new Date(jsTime);
|
||||
return dt.toLocaleString();
|
||||
}
|
||||
|
||||
function jsUnix(js: number): number {
|
||||
logger.trace(`jsUnix: Converting time: ${js}`);
|
||||
return Math.floor(js / 1000);
|
||||
}
|
||||
|
||||
export { jsUnix, unixLocal };
|
||||
|
||||
module.exports = {
|
||||
unixLocal,
|
||||
jsUnix,
|
||||
};
|
29
src/utils/trainService.utils.ts
Normal file
29
src/utils/trainService.utils.ts
Normal file
@ -0,0 +1,29 @@
|
||||
export function getFindByHeadcodePipeline(query: any) {
|
||||
return [
|
||||
{
|
||||
$match: query,
|
||||
},
|
||||
{
|
||||
$project: {
|
||||
operator: 1,
|
||||
stops: {
|
||||
$concatArrays: [
|
||||
[{ $first: "$stops" }],
|
||||
[{ $arrayElemAt: ["$stops", -1] }],
|
||||
],
|
||||
},
|
||||
trainUid: 1,
|
||||
stpIndicator: 1,
|
||||
},
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
export function getFindByTrainUidPipeline(query: any) {
|
||||
return [
|
||||
{
|
||||
$match: query,
|
||||
},
|
||||
{ $project: { _id: 0 } },
|
||||
];
|
||||
}
|
3
src/utils/userSetup.utils.ts
Normal file
3
src/utils/userSetup.utils.ts
Normal file
@ -0,0 +1,3 @@
|
||||
// Do I need to setup the database?
|
||||
|
||||
// Possibly not, becuase every write will create the document if it doesn't exist
|
@ -1,27 +0,0 @@
|
||||
// Checks that all required environment variables are present.
|
||||
// Returns True or False and offers an object detailing what is missing.
|
||||
|
||||
async function varTest(){
|
||||
var required = {
|
||||
OWL_LDB_KEY: process.env.OWL_LDB_KEY,
|
||||
OWL_LDB_CORPUSUSER: process.env.OWL_LDB_CORPUSUSER,
|
||||
OWL_LDB_CORPUSPASS: process.env.OWL_LDB_CORPUSPASS,
|
||||
OWL_NOT_USED: process.env.OWL_NOT_USED
|
||||
}
|
||||
var desired = {
|
||||
OWL_DB_PASS: process.env.OWL_DB_PASS
|
||||
}
|
||||
// DO NOT LOG CREDENTIALS!!!
|
||||
|
||||
// Test that each of required is NOT undefined.
|
||||
// var pass = true if all okay, false if not.
|
||||
// Append any missing values to missing_required = []
|
||||
// Test that each of desired is NOT undefined.
|
||||
// Append any missing values to missing_desired = []
|
||||
|
||||
// Return : {pass: $pass, missong_required = $missing_required, missing_desired = $missing_desired}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
varTest
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
.dockerignore
|
||||
Dockerfile
|
||||
*.xcf
|
||||
*.inkscape.svg
|
@ -1,34 +0,0 @@
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8"/>
|
||||
<meta name="description" content="OwlBoard - Live train departures for traincrew."/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta name="application-name" content="OwlBoard">
|
||||
<meta name="author" content="Frederick Boniface">
|
||||
<meta name="theme-color" content="#00b7b7">
|
||||
<link rel="apple-touch-icon" href="/images/app-icons/any/apple-192.png">
|
||||
<link rel="stylesheet" type="text/css" href="./styles/main.css"/>
|
||||
<link rel="icon" type="image/svg+xml" href="./images/icon.svg"/>
|
||||
<link rel="manifest" type="application/json" href="./manifest.json"/>
|
||||
<!-- NO SCRIPTS LOADED - NOT REQUIRED AT PRESENT -->
|
||||
<title>OwlBoard - Error</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="top_button" class="hide_micro">
|
||||
<picture aria-label="Back" class="sidebar_control" onclick="history.back()">
|
||||
<source srcset="/images/nav/back.svg" type="image/svg+xml">
|
||||
<img src="back-40.png" alt="Back">
|
||||
</picture>
|
||||
</div>
|
||||
<picture>
|
||||
<source srcset="/images/logo/wide_logo.svg" type="image/svg+xml">
|
||||
<source media="(max-height: 739px)" srcset="/images/logo/logo-full-200.png" type="image/png">
|
||||
<source srcset="/images/logo/logo-full-250.png" type="image/png">
|
||||
<img class="titleimg" src="/images/logo/logo-full-250.png" alt="OwlBoard Logo">
|
||||
</picture>
|
||||
<h2>Oh no!</h2>
|
||||
<p>That page cannot be found</p>
|
||||
<p>Try going to the <a href="/">homepage</a></p>
|
||||
<p>Error number: 404</p>
|
||||
</body>
|
||||
</html>
|
@ -1,12 +0,0 @@
|
||||
FROM fedora:latest as compressor
|
||||
RUN dnf install brotli nodejs npm jq -y
|
||||
RUN npm i uglifyjs-folder uglifycss html-minifier-terser -g
|
||||
COPY . /data/in
|
||||
RUN bash /data/in/conf/deploy.sh
|
||||
|
||||
FROM fholzer/nginx-brotli:latest
|
||||
RUN rm /etc/nginx/nginx.conf
|
||||
RUN apk update
|
||||
RUN apk add --upgrade libxml2 libxslt
|
||||
COPY ./conf/nginx.conf /etc/nginx/nginx.conf
|
||||
COPY --from=compressor /data/out/ /site-static/
|
@ -1,129 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8"/>
|
||||
<meta name="description" content="OwlBoard - Live train departures for traincrew."/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta name="application-name" content="OwlBoard">
|
||||
<meta name="author" content="Frederick Boniface">
|
||||
<meta name="theme-color" content="#00b7b7">
|
||||
<title>OwlBoard - Loading</title>
|
||||
<link rel="apple-touch-icon" href="/images/app-icons/any/apple-192.png">
|
||||
<link rel="stylesheet" type="text/css" href="./styles/main.css"/>
|
||||
<link rel="stylesheet" type="text/css" href="./styles/boards.css"/>
|
||||
<link rel="icon" type="image/svg+xml" href="./images/icon.svg"/>
|
||||
<link rel="manifest" type="application/json" href="./manifest.json"/>
|
||||
<script src="./js/lib.main.js" defer></script>
|
||||
<script src="./js/lib.board.js" defer></script>
|
||||
<script src="./js/simple-board.js" defer></script>
|
||||
</head>
|
||||
<body>
|
||||
<div id="loading">
|
||||
<div class="spinner">
|
||||
</div>
|
||||
<p id="loading_desc">\nLoading</p>
|
||||
</div>
|
||||
|
||||
<div id="content">
|
||||
<div id="header">
|
||||
<div id="station_name">
|
||||
<h1 id="stn_name" class="header-large"></h1>
|
||||
</div>
|
||||
<div id="header-right">
|
||||
<p class="header-small">Data from:</p>
|
||||
<p id="fetch_time" class="header-small">Loading...</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="alerts" onclick="">
|
||||
<div id="alerts_bar" onclick="inflateAlerts()">
|
||||
<picture>
|
||||
<source srcset="./images/nav/alert_icon.svg" type="image/svg+xml">
|
||||
<img id="alert_icon" src="./images/nav/alert_icon.svg" alt="">
|
||||
</picture>
|
||||
<p id="alert_bar_note"></p>
|
||||
<button id="alert_expand_arrow">⋁</button>
|
||||
<div id="alerts_msg" onclick="NULL">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="output">
|
||||
<table>
|
||||
<caption>Train Services</caption>
|
||||
<tr>
|
||||
<th class="name">Origin</th>
|
||||
<th class="name">Dest.</th>
|
||||
<th class="plat">Plat.</th>
|
||||
<th class="time">Sch Arr.</th>
|
||||
<th class="time">Exp Arr.</th>
|
||||
<th class="time">Sch Dep.</th>
|
||||
<th class="time">Exp Dep.</th>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div id="no_services" class="main-notice hidden-whille-loading">
|
||||
<p>There are no scheduled train services from this station</p>
|
||||
</div>
|
||||
|
||||
<div id="ferry" class="hide-when-loading secondary-table">
|
||||
<table>
|
||||
<caption>Ferry Services</caption>
|
||||
<tr>
|
||||
<th class="name">Origin</th>
|
||||
<th class="name">Dest.</th>
|
||||
<th class="plat"></th>
|
||||
<th class="time">Sch Arr.</th>
|
||||
<th class="time">Exp Arr.</th>
|
||||
<th class="time">Sch Dep.</th>
|
||||
<th class="time">Exp Dep.</th>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div id="bus" class="hide-when-loading secondary-table">
|
||||
<table>
|
||||
<caption>Bus Services</caption>
|
||||
<tr>
|
||||
<th class="name">Origin</th>
|
||||
<th class="name">Dest.</th>
|
||||
<th class="plat"></th>
|
||||
<th class="time">Sch Arr.</th>
|
||||
<th class="time">Exp Arr.</th>
|
||||
<th class="time">Sch Dep.</th>
|
||||
<th class="time">Exp Dep.</th>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
|
||||
<div id="error_notice" class="main-notice hide-when-loading">
|
||||
<h1 class="error">Oops</h1>
|
||||
<p class="error">There was an error with your request</p>
|
||||
<p id="err_not_found" class="notices-hidden">The station you are searching for cannot be found</p>
|
||||
<p id="err_no_data" class="notices-hidden">The station has no data. It may not be in operation yet/anymore.</p>
|
||||
<p id="err_conn" class="notices-hidden">Connection Error, check your data connection. Retrying.</p>
|
||||
</div>
|
||||
|
||||
<div id="footer">
|
||||
<a href="https://nationalrail.co.uk" target="_blank" rel="nofollow external noreferrer noopener">
|
||||
<picture id="nre_logo">
|
||||
<source srcset="./images/nre/nre-powered_400w.jxl" type="image/jxl">
|
||||
<source srcset="./images/nre/nre-powered_400w.webp" type="image/webp">
|
||||
<img src="./images/nre/nre-powered_400w.png" alt="Powered by National Rail Enquiries">
|
||||
</picture>
|
||||
</a>
|
||||
<a href="/">
|
||||
<picture id="owlboard_logo">
|
||||
<source srcset="./images/logo/mono-logo.svg" type="image/svg+xml">
|
||||
<img src="./images/logo/mono-logo-33.png" alt="OwlBoard Logo">
|
||||
</picture>
|
||||
<picture id="home_icon">
|
||||
<source srcset="./images/nav/home_icon.svg" type="image/svg+xml">
|
||||
<img src="./images/nav/home_icon-40.png" alt="Home">
|
||||
</picture>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
@ -1,38 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
ROOTIN="/data/in"
|
||||
ROOTOUT="/data/out"
|
||||
|
||||
echo "Running UglifyJS on /data/in folder"
|
||||
uglifyjs-folder "$ROOTIN" -x ".js" -eo "$ROOTOUT"
|
||||
|
||||
echo "Running UglifyCSS"
|
||||
CSSIN="/data/in/styles/"
|
||||
CSSOUT="/data/out/styles"
|
||||
|
||||
cd $CSSIN
|
||||
echo "Changed directory"
|
||||
pwd
|
||||
for f in *
|
||||
do
|
||||
if [ -f "$f" ]; then
|
||||
uglifycss "$f" --output "$f";
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Moving 'styles' to 'out'"
|
||||
cp -r /data/in/styles /data/out/styles
|
||||
|
||||
echo "Running html-minifier-terser on /folder"
|
||||
HTMLIN="/data/in/"
|
||||
HTMLOUT="/data/out"
|
||||
html-minifier-terser --collapse-whitespace --remove-comments --file-ext html --input-dir /data/in/ --output-dir /data/out/
|
||||
|
||||
echo "Moving JSON Manifest file from root to output"
|
||||
cat /data/in/manifest.json | jq -c > /data/out/manifest.json
|
||||
|
||||
echo "Moving images folder from in/ to out/"
|
||||
cp -r /data/in/images /data/out/images
|
||||
|
||||
echo "Running GZIP & Brotli on all HTML, JS, CSS, JSON & SVG files"
|
||||
find /data/out -type f -name \*.html -or -name \*.js -or -name \*.css -or -name \*.json -or -name \*.svg -or -name \*.ttf | while read file; do gzip -k -9 $file; brotli -k -q 11 $file; done
|
@ -1,60 +0,0 @@
|
||||
user nginx;
|
||||
worker_processes 1;
|
||||
|
||||
error_log /var/log/nginx/error.log notice;
|
||||
pid /var/run/nginx.pid;
|
||||
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
}
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
|
||||
access_log /var/log/nginx/access.log main;
|
||||
sendfile on;
|
||||
keepalive_timeout 65;
|
||||
|
||||
proxy_cache_path /var/cache/nginx keys_zone=owl_cache:20m inactive=24h;
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name localhost;
|
||||
proxy_cache owl_cache;
|
||||
|
||||
add_header Content-Security-Policy "default-src 'self'";
|
||||
|
||||
location / {
|
||||
root /site-static/;
|
||||
index index.html;
|
||||
gzip_static on;
|
||||
brotli_static on;
|
||||
error_page 404 /404.html;
|
||||
expires 3600;
|
||||
add_header Cache-Control "public, no-transform";
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
proxy_pass http://localhost:8460;
|
||||
proxy_cache_key $scheme://$host$uri$is_args$query_string;
|
||||
proxy_ignore_headers Cache-Control;
|
||||
proxy_cache_valid 200 2m; # Evaluate whether 2m or 1m is more appropriate
|
||||
expires 2m;
|
||||
add_header Cache-Control "private, no-transform";
|
||||
}
|
||||
|
||||
location /api/v1/list/ {
|
||||
proxy_pass http://localhost:8460;
|
||||
proxy_cache_key $scheme://$host$uri$is_args$query_string;
|
||||
proxy_ignore_headers Cache-Control;
|
||||
proxy_cache_valid 200 10080m;
|
||||
expires 3d;
|
||||
add_header Cache-Control "public, no-transform";
|
||||
}
|
||||
}
|
||||
}
|
@ -1,36 +0,0 @@
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="UTF-8"/>
|
||||
<meta name="description" content="OwlBoard - Live train departures for traincrew."/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta name="application-name" content="OwlBoard">
|
||||
<meta name="author" content="Frederick Boniface">
|
||||
<meta name="theme-color" content="#00b7b7">
|
||||
<link rel="apple-touch-icon" href="/images/app-icons/any/apple-192.png">
|
||||
<link rel="stylesheet" type="text/css" href="./styles/main.css"/>
|
||||
<link rel="icon" type="image/svg+xml" href="./images/icon.svg"/>
|
||||
<link rel="manifest" type="application/json" href="./manifest.json"/>
|
||||
<!-- NO SCRIPTS LOADED - NOT REQUIRED AT PRESENT -->
|
||||
<title>OwlBoard - Error</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="top_button" class="hide_micro">
|
||||
<picture aria-label="Close Menu" class="sidebar_control" onclick="history.back()">
|
||||
<source srcset="/images/nav/back.svg" type="image/svg+xml">
|
||||
<img src="back-40.png" alt="Close menu">
|
||||
</picture>
|
||||
</div>
|
||||
<picture>
|
||||
<source srcset="/images/logo/wide_logo.svg" type="image/svg+xml">
|
||||
<source media="(max-height: 739px)" srcset="/images/logo/logo-full-200.png" type="image/png">
|
||||
<source srcset="/images/logo/logo-full-250.png" type="image/png">
|
||||
<img class="titleimg" src="/images/logo/logo-full-250.png" alt="OwlBoard Logo">
|
||||
</picture>
|
||||
<h2>Oh no!</h2>
|
||||
<p>OwlBoard has encountered a Connection Error</p>
|
||||
<p>Check your data connection and try again</p>
|
||||
<p>Go to the <a href="/">homepage</a></p>
|
||||
<br>
|
||||
<p>Error Code: CERR</p>
|
||||
</body>
|
||||
</html>
|
@ -1,64 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8"/>
|
||||
<meta name="description" content="OwlBoard - Live train departures for traincrew."/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta name="application-name" content="OwlBoard">
|
||||
<meta name="author" content="Frederick Boniface">
|
||||
<meta name="theme-color" content="#00b7b7">
|
||||
<link rel="apple-touch-icon" href="/images/app-icons/any/apple-192.png">
|
||||
<link rel="stylesheet" type="text/css" href="./styles/main.css"/>
|
||||
<link rel="stylesheet" type="text/css" href="./styles/find-code.css"/>
|
||||
<link rel="icon" type="image/svg+xml" href="./images/icon.svg"/>
|
||||
<link rel="manifest" type="application/json" href="./manifest.json"/>
|
||||
<title>OwlBoard - Code Lookup</title>
|
||||
<script src="./js/lib.main.js" defer></script>
|
||||
<script src="./js/find-code.js" defer></script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<div id="top_button" class="hide_micro">
|
||||
<a href="/">
|
||||
<picture aria-label="Home" class="sidebar_control">
|
||||
<source srcset="/images/nav/back.svg" type="image/svg+xml">
|
||||
<img src="back-40.png" alt="Home">
|
||||
</picture>
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<picture>
|
||||
<source srcset="/images/logo/wide_logo.svg" type="image/svg+xml">
|
||||
<source media="(max-height: 739px)" srcset="/images/logo/logo-full-200.png" type="image/png">
|
||||
<source srcset="/images/logo/logo-full-250.png" type="image/png">
|
||||
<img class="titleimg" src="/images/logo/logo-full-250.png" alt="OwlBoard Logo">
|
||||
</picture>
|
||||
<h2>Code Lookup</h2>
|
||||
<p>Enter one known code in the relevant box below and hit submit.
|
||||
Where they exist, the other code types will be filled in.</p>
|
||||
<p>You cannot yet lookup by location name as the values are not unique.</p>
|
||||
<p>Location name search will be added in the future.</p>
|
||||
|
||||
<div id="loading">
|
||||
<div class="spinner">
|
||||
</div>
|
||||
<p id="loading_desc">Searching</p>
|
||||
</div>
|
||||
|
||||
<label for="name">Location name:</label><br>
|
||||
<input type="text" class="small-lookup-box" id="name" name="name" readonly=""><br>
|
||||
<label for="3alpha">CRS/3ALPHA:</label><br>
|
||||
<input type="text" class="small-lookup-box" id="3alpha" name="3alpha" maxlength="3"><br>
|
||||
<label for="nlc">NLC:</label><br>
|
||||
<input type="number" class="small-lookup-box" id="nlc" name="nlc" min="100000" max="999999"><br>
|
||||
<label for="tiploc">TIPLOC:</label><br>
|
||||
<input type="text" class="small-lookup-box" id="tiploc" name="tiploc" maxlength="7"><br>
|
||||
<label for="stanox">STANOX:</label><br>
|
||||
<input type="number" class="small-lookup-box" id="stanox" name="stanox"><br>
|
||||
<label for="stanme" hidden>STANME:</label><br>
|
||||
<input type="test" class="small-lookup-box" id="stanme" name="stanme" readonly="" hidden><br>
|
||||
<input type="submit" value="Find" class="lookup-button" onclick="fetchEntry()">
|
||||
<input type="submit" value="Clear" class="lookup-button" onclick="clearForm()">
|
||||
</body>
|
||||
</html>
|
@ -1,93 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8"/>
|
||||
<meta name="description" content="OwlBoard - Live train departures for traincrew."/>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta name="application-name" content="OwlBoard">
|
||||
<meta name="author" content="Frederick Boniface">
|
||||
<meta name="theme-color" content="#00b7b7">
|
||||
<link rel="apple-touch-icon" href="/images/app-icons/any/apple-192.png">
|
||||
<link rel="stylesheet" type="text/css" href="./styles/main.css"/>
|
||||
<link rel="stylesheet" type="text/css" href="./styles/help.css"/>
|
||||
<link rel="icon" type="image/svg+xml" href="./images/icon.svg"/>
|
||||
<link rel="manifest" type="application/json" href="./manifest.json"/>
|
||||
<!-- NO SCRIPTS LOADED - NOT REQUIRED AT PRESENT -->
|
||||
<title>OwlBoard</title>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<div id="top_button" class="hide_micro">
|
||||
<a href="/">
|
||||
<picture aria-label="Home" class="sidebar_control">
|
||||
<source srcset="/images/nav/back.svg" type="image/svg+xml">
|
||||
<img src="back-40.png" alt="Home">
|
||||
</picture>
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<picture>
|
||||
<source srcset="/images/logo/wide_logo.svg" type="image/svg+xml">
|
||||
<source media="(max-height: 739px)" srcset="/images/logo/logo-full-200.png" type="image/png">
|
||||
<source srcset="/images/logo/logo-full-250.png" type="image/png">
|
||||
<img class="titleimg" src="/images/logo/logo-full-250.png" alt="OwlBoard Logo">
|
||||
</picture>
|
||||
<h2>Help</h2>
|
||||
<p>OwlBoard gives you quick and easy access to departure boards for
|
||||
all National Rail stations in the UK.</p>
|
||||
<p>Just type a CRS, TIPLOC or STANOX into the textbox on the homepage and tap
|
||||
enter on the screen or your keypad. You can also select a differnt board type,
|
||||
more details on your choices below.</p>
|
||||
<p>For example, Portway Park &
|
||||
Ride's CRS is 'PRI', and its TIPLOC is 'PTWYPR'; Portsmouth Harbour's
|
||||
CRS is 'PMH', and its TIPLOC is 'PHBR'.</p>
|
||||
<p>A CRS is always three letters,
|
||||
a TIPLOC can be between 4-7 letters.</p>
|
||||
<br>
|
||||
<h3>Don't know the CRS or TIPLOC?</h3>
|
||||
<p>Sorry, you can't search by name but you can use our <a href="find-code.html">
|
||||
Code Lookup</a> page to help.</p>
|
||||
<h3>Board Types</h3>
|
||||
<h4>Basic Board - Default</h4>
|
||||
<p>The basic board shows the next 10 train arrival and departures, as well as
|
||||
bus and ferry departures where available.</p>
|
||||
<p>You can tap on a trains origin or destination to see service details.</p>
|
||||
<br>
|
||||
<h3>Glossary</h3>
|
||||
<p>Some of the terms may be new to you or different from those commonly used.</p>
|
||||
<table id="table">
|
||||
<tr>
|
||||
<th>Term</th>
|
||||
<th>Definition</th>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>CAN</td>
|
||||
<td>Cancelled</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>CRS</td>
|
||||
<td>Computer Reservation System Code - correctly termed as '3ALPHA'</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>NLC</td>
|
||||
<td>National Location Code - Used for finance & accounting</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>RT</td>
|
||||
<td>Right rime (On time)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>STANOX</td>
|
||||
<td>Station Number</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>TIPLOC</td>
|
||||
<td>Timing Point Location (Name)</td>
|
||||
</tr>
|
||||
</table>
|
||||
<br>
|
||||
<h3>Spotted an issue with the site?</h3>
|
||||
<p>Let me know by <a href="./report.html">reporting an issue</a>.</p>
|
||||
</body>
|
||||
</html>
|
Binary file not shown.
Before Width: | Height: | Size: 36 KiB |
Binary file not shown.
Before Width: | Height: | Size: 33 KiB |
@ -1,2 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg version="1.1" viewBox="0 0 667.26 706.8" xml:space="preserve" xmlns="http://www.w3.org/2000/svg" xmlns:cc="http://creativecommons.org/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"><metadata><rdf:RDF><cc:Work rdf:about=""><dc:format>image/svg+xml</dc:format><dc:type rdf:resource="http://purl.org/dc/dcmitype/StillImage"/></cc:Work></rdf:RDF></metadata><g transform="translate(-111.19 -90.003)"><rect x="213.88" y="260.42" width="81.997" height="48.755" fill="#fcfc09"/><rect x="167.35" y="187.29" width="181.72" height="68.7" fill="#f0ffff"/></g><g transform="matrix(1.3333 0 0 -1.3333 -111.19 790)" fill="#00b7b7"><g transform="translate(7.5,-7.5)"><path d="m112.25 369.32c12.988-10.101 30.989 0.076 39.875 15.722 9.969-34.512 37.501-53.367 57.648-37.596-6.503-20.085-34.373-34.554-61.522 13.215-15.947-14.598-28.559-14.416-36.001 8.659m14.302-81.369c15.405-6.791 28.15 7.974 30.66 25.885 21.351-31.973 53.097-43.81 65.802-22.905 1.237-21.743-18.954-43.315-60.644-2.464-9.305-18.663-20.837-21.694-35.818-0.516m33.549 175.93 24.381-9.342 24.382 9.342-24.382-24.381zm-14.899 47.955c1.574 0 3.075-0.31 4.448-0.869-1.973-1.1-3.309-3.206-3.309-5.626 0-3.554 2.883-6.436 6.437-6.436 1.608 0 3.079 0.59 4.208 1.566 5e-3 -0.143 8e-3 -0.284 8e-3 -0.428 0-6.512-5.28-11.791-11.792-11.791s-11.791 5.279-11.791 11.791c0 6.513 5.279 11.793 11.791 11.793m77.642 0c1.573 0 3.075-0.31 4.447-0.869-1.973-1.1-3.308-3.206-3.308-5.626 0-3.554 2.882-6.436 6.437-6.436 1.608 0 3.079 0.59 4.207 1.566 6e-3 -0.143 9e-3 -0.284 9e-3 -0.428 0-6.512-5.28-11.791-11.792-11.791-6.513 0-11.792 5.279-11.792 11.791 0 6.513 5.279 11.793 11.792 11.793m0 8.887c11.421 0 20.677-9.259 20.677-20.68 0-11.42-9.256-20.677-20.677-20.677-11.42 0-20.678 9.257-20.678 20.677 0 11.421 9.258 20.68 20.678 20.68m-77.642 0c11.42 0 20.679-9.259 20.679-20.68 0-11.42-9.259-20.677-20.679-20.677s-20.678 9.257-20.678 20.677c0 11.421 9.258 20.68 20.678 20.68m222.62-271.32c-5.257-16.303-14.169-16.431-25.436-6.118-19.182-33.751-38.872-23.527-43.468-9.336 14.236-11.143 33.688 2.178 40.73 26.562 6.28-11.055 18.998-18.245 28.174-11.108m-7.657 101.75c-5.26-16.304-14.169-16.433-25.436-6.118-19.182-33.751-38.873-23.529-43.469-9.338 14.236-11.142 33.688 2.179 40.731 26.564 6.279-11.055 18.997-18.247 28.174-11.108m3.828-50.877c-5.259-16.302-14.168-16.429-25.435-6.117-19.182-33.752-38.873-23.528-43.469-9.338 14.236-11.14 33.687 2.181 40.731 26.564 6.279-11.055 18.996-18.243 28.173-11.109m-185-126.56 8.456 14.687-2.481 14.064c8.24-6.441 16.897-12.257 25.895-17.419l-13.787-20.682c-5.163 5.163-11.523 8.215-18.083 9.35m214.44 47.276-16.013 24.214v93.753c0 40.019-32.441 72.459-72.458 72.459-37.742 0-68.739-28.855-72.144-65.707-0.563 6.626-0.974 13.336-1.228 20.135 0 36.412 26.858 66.546 61.843 71.684 15.118 42.436 3.44 91.058-31.815 121.88-0.332 0.288-0.668 0.569-1.002 0.854-14.934-8.987-28.921-18.756-41.766-29.467 14.159 17.506 30.102 32.253 47.212 45.198-47.368 32.008-116.69 32.008-164.06 0 17.11-12.946 33.054-27.694 47.212-45.201-11.522 9.609-23.965 18.462-37.188 26.67-40.329-37.119-47.937-98.2-17.545-144.16 4.83-7.304 7.155-15.981 6.624-24.722-3.808-62.683 19.436-123.99 63.84-168.4 3.505-3.505 7.116-6.877 10.824-10.115l-15.66-23.489c-18.988 18.987-54.154 9.494-56.053-19.369 4.94 7.294 12.965 10.334 21.93 10.306 4.624 3.683 10.595 5.668 18.277 5.498-14.197-2.669-23.71-11.684-25.256-26.007 9.826 11.065 21.246 13.643 34.785 11.262-7.045-4.94-12.081-12.841-12.767-23.274 4.941 7.293 12.965 10.335 21.932 10.303 1.457 1.162 3.047 2.155 4.783 2.963 0.557-0.597 1.095-1.202 1.614-1.819-7.504-4.493-12.316-11.823-13.378-21.652 5.924 6.673 12.428 10.259 19.627 11.533 1.471-3.662 2.152-7.45 1.823-11.24 4.057 2.964 6.698 7.077 8.006 11.651 2.475-0.187 5.026-0.574 7.654-1.132 12.8-8.149 22.377-20.41 21.31-32.736 11.849 8.659 11.621 27.116 1.367 38.28l8.659 15.04-2.26 12.808c15.693-7.825 32.28-13.746 49.402-17.626-7.313 17.831-12.59 36.793-15.633 56.995 19.737-52.989 51.201-99.462 92.224-140.63 3.548-6.147 10.784-9.143 17.64-7.305 6.856 1.837 11.623 8.048 11.623 15.147v65.793c33.793-9.913 62.771-33.463 79.074-66.13l119.85 29.509c-14.567 69.186-99.576 110.14-175.59 96.362z" fill="#00b7b7" fill-rule="evenodd"/></g><path transform="scale(1,-1)" d="m426.75-584.57h11.635v242.67h-11.635z" fill-rule="evenodd" stroke-width=".80733"/><path transform="scale(1,-1)" d="m533.35-584.09h11.635v242.67h-11.635z" fill-rule="evenodd" stroke-width=".80733"/><path transform="scale(1,-1)" d="m404.98-554.66h159.56v34.904h-159.56z" fill-rule="evenodd" stroke-width=".75"/><path transform="scale(1,-1)" d="m404.98-481.16h159.56v34.904h-159.56z" fill-rule="evenodd" stroke-width=".75"/><path transform="scale(1,-1)" d="m404.98-404.66h159.56v34.904h-159.56z" fill-rule="evenodd" stroke-width=".75"/></g></svg>
|
Before Width: | Height: | Size: 4.7 KiB |
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user