Compare commits

..

No commits in common. "main" and "v1.0.0" have entirely different histories.
main ... v1.0.0

94 changed files with 3365 additions and 10023 deletions

View File

@ -8,6 +8,4 @@ db-manager
run.sh run.sh
LICENSE LICENSE
*.md *.md
.eslintrc.js static
.vscode
.test-tools

View File

@ -1,35 +0,0 @@
module.exports = {
env: {
browser: false,
node: true,
commonjs: true,
es2021: true,
},
extends: "eslint:recommended",
overrides: [
{
files: ["**/*.ts", "**/*.js"],
parser: "@typescript-eslint/parser",
plugins: ["@typescript-eslint"],
extends: [
"plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended",
],
},
],
parserOptions: {
ecmaVersion: "latest",
},
rules: {
indent: ["error", 2],
"linebreak-style": ["error", "unix"],
quotes: ["error", "single"],
semi: ["error", "always"],
"max-len": [
"warn",
{
code: 80,
},
],
},
};

1
.npmrc
View File

@ -1 +0,0 @@
@owlboard:registry=https://git.fjla.uk/api/packages/OwlBoard/npm/

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1 @@
{"GetStationBoardResult":{"generatedAt":"2023-01-14T11:23:12.6558466+00:00","locationName":"Pilning","crs":"PIL","nrccMessages":{"message":"\nPoor weather affecting services in Wales due to flooding on the railway More details can be found in <a href=\"https://t.co/uBU966PUmX\">Latest Travel News</a>."},"platformAvailable":"true"}}

1
.test-tools/all.json Normal file

File diff suppressed because one or more lines are too long

1
.test-tools/clean.json Normal file

File diff suppressed because one or more lines are too long

View File

@ -1,4 +1,4 @@
version: "3.1" version: '3.1'
services: services:
mongo: mongo:

33
.test-tools/ferry-vc.json Normal file
View File

@ -0,0 +1,33 @@
{"service":
[
{"sta":"16:07",
"eta":"On time",
"operator":"South Western Railway",
"operatorCode":"SW",
"serviceType":"ferry",
"serviceID":"37782PHBR____",
"origin":
{"location":
{"locationName":
"Ryde Pier Head","crs":"RYP"
}
},
"destination":
{"location":
{"locationName":"Portsmouth Harbour",
"crs":"PMH"
}
},
"previousCallingPoints":
{"callingPointList":
{"callingPoint":
{"locationName":"Ryde Pier Head",
"crs":"RYP",
"st":"15:45",
"et":"On time"
}
}
}
},
{"std":"16:15","etd":"On time","operator":"South Western Railway","operatorCode":"SW","serviceType":"ferry","serviceID":"37746PHBR____","origin":{"location":{"locationName":"Portsmouth Harbour","crs":"PMH"}},"destination":{"location":{"locationName":"Ryde Pier Head","crs":"RYP"}},"subsequentCallingPoints":{"callingPointList":{"callingPoint":
{"locationName":"Ryde Pier Head","crs":"RYP","st":"16:37","et":"On time"}}}}]}

20
.vscode/settings.json vendored
View File

@ -1,15 +1,7 @@
{ {
"git.autofetch": "all", "git.autofetch": "all",
"git.alwaysSignOff": true, "git.alwaysSignOff": true,
"git.enableCommitSigning": false, "git.enableCommitSigning": false,
"git.fetchOnPull": true, "git.fetchOnPull": true,
"git.pullBeforeCheckout": true, "git.pullBeforeCheckout": true
"editor.defaultFormatter": "rvest.vs-code-prettier-eslint", }
"editor.formatOnPaste": false, // required
"editor.formatOnType": false, // required
"editor.formatOnSave": true, // optional
"editor.formatOnSaveMode": "file", // required to format on save
"files.autoSave": "onFocusChange", // optional but recommended
"vs-code-prettier-eslint.prettierLast": "false",
"editor.tabSize": 2 // set as "true" to run 'prettier' last not first
}

22
.vscode/tasks.json vendored
View File

@ -1,12 +1,12 @@
{ {
"version": "2.0.0", "version": "2.0.0",
"tasks": [ "tasks": [
{ {
"type": "npm", "type": "npm",
"script": "start", "script": "start",
"problemMatcher": [], "problemMatcher": [],
"label": "npm: start", "label": "npm: start",
"detail": "node app.js" "detail": "node app.js"
} }
] ]
} }

View File

@ -1,19 +1,7 @@
FROM node:20 as builder FROM node:19
WORKDIR /usr/src/app
COPY ./package*.json ./
COPY ./.npmrc ./
RUN npm install
COPY . .
# Ideally the tests should be run separately in a CI/CD workflow rather than during the build
# Currently, it does prevent a container being published with failing tests
RUN npm run test
RUN npm run build
FROM node:20-slim
EXPOSE 8460 EXPOSE 8460
WORKDIR /usr/src/app WORKDIR /usr/src/app
COPY ./mail-templates/* ./mail-templates/
COPY ./package*.json ./ COPY ./package*.json ./
RUN npm ci --omit=dev RUN npm ci --omit=dev
COPY --from=builder /usr/src/app/dist/ ./ COPY . .
CMD [ "node" , "app.js" ] CMD [ "node", "app.js" ]

108
README.md
View File

@ -4,41 +4,101 @@ OwlBoard is both a backend API, and a frontend Arrival/Departure board webapp.
Powered by Node.JS and using the ldbs-json module, the OwlBoard API provides up to date train departure information for any station in the UK. Powered by Node.JS and using the ldbs-json module, the OwlBoard API provides up to date train departure information for any station in the UK.
## Build Whilst the application is open source, the webservice (owlboard.fb-infra.uk) is not openly available. National Rail Enquiries have limits on API access so to use this software yourself, you'll need to run your own instance after obtaining your own API key.
To build the application with Docker, clone the repository and run `docker build` The webservice (owlboard.fb-infra.uk) may contain ads to support the running of the service,
if ads are implemented, I intend to avoid 'dubious' advertisers that target and track users.
Currently only the public API is available as I am currently unable to request a key for the staff version.
## Requirements:
To run this server you will need:
- Docker or Kubernetes
## WebApp Colours:
- See CSS Variables
## API Endpoints: ## API Endpoints:
- /api/v1:
API Documentation has been removed as it is now out of date. I do intent to re-write the documentation at a later date. - /list:
- /stations:
- GET: Get list of stations
- Authenticated: No
- Returns JSON: `{"STATION NAME":{"CRS":"code","TIPLOC":"code"}}`
- /corpus:
- GET: Get full CORPUS Data
- Authenticated: No
- Returns JSON in original CORPUS format minus any blank values.
- /ldb:
- /{crs}:
- GET: Get arrival/departure board for {crs}
- Authenticated: No
- Returns JSON: Formatted as per ldbs-json module.
- /gitea:
- POST: Post issue to Gitea Repo
- Authenticated: Yes
- Not yet implemented, submit issues at https://git.fjla.uk/fred.boniface/owlboard
- /kube:
- /alive:
- GET: Check alive
- Authenticated: No
- Returns JSON: `{"status":"alive"}`
- /ready:
- GET: Check ready
- Authenticated: No
- Returns JSON: `{"state":""}` ready or not_ready.
## Stack:
- app.js -> Launches server, Entry Point, defines routers and middlewares.
- routes -> Routers - Directs requests to controllers.
- controllers -> Checks auth, sends response. Request doesn't pass further.
- services -> Provide data and do tasks, uses other services and utils.
- utils -> Provide utility functions that can be called by services.
- configs -> Provide configuration details for other files.
- static -> Holds files for static service, should be hosted behind a caching proxy.
## Configuration: ## Configuration:
The app is designed to be run within Kubernetes or within a Docker container, as such configuration is provided with environment variables. See the variable name and default options below. If a required configuration is not present the program will exit when that feature is initialised.
Configuration options are set through environment variables. |VAR|DEFAULT|REQUIRED|PURPOSE|
These configuration options are shared with other programs in the OwlBoard ecosystem. |:-:|:-----:|:------:|:-----:|
|OWL_SRV_PORT|8460|NO|Web Server Port|
| VAR | DEFAULT | REQUIRED | PURPOSE | |OWL_SRV_LISTEN|0.0.0.0|NO|Web Server Listen Address|
| :-----------------: | :-------: | :------: | :-------------------------------------------------------: | |OWL_DB_USER|owl|NO|Database Username|
| OWL_SRV_PORT | 8460 | NO | Web Server Port | |OWL_DB_PASS|twittwoo|NO|Database Password - Do not leave as default in production|
| OWL_SRV_LISTEN | 0.0.0.0 | NO | Web Server Listen Address | |OWL_DB_NAME|owlboard|NO|Database Name|
| OWL_DB_USER | owl | NO | Database Username | |OWL_DB_PORT|27017|NO|Database Server Port|
| OWL_DB_PASS | twittwoo | NO | Database Password - Do not leave as default in production | |OWL_DB_HOST|localhost|NO|Database Server Host|
| OWL_DB_NAME | owlboard | NO | Database Name | |OWL_LDB_KEY||YES|National Rail LDBWS API Key|
| OWL_DB_PORT | 27017 | NO | Database Server Port | |OWL_LDB_SVKEY||NO|National Rail LDBSVWS API Key|
| OWL_DB_HOST | localhost | NO | Database Server Host | |OWL_LDB_CORPUSUSER||YES|Network Rail CORPUS API Username|
| OWL_LDB_KEY | | YES | National Rail LDBWS API Key | |OWL_LDB_CORPUSPASS||YES|Network Rail CORPUS API Password|
| OWL_LDB_SVKEY | | NO | National Rail LDBSVWS API Key | |OWL_GIT_ISSUEBOT||NO|Gitea API Key for issue reporting|
| OWL_LDB_CORPUSUSER | | YES | Network Rail NROD Username | |OWL_GIT_APIENDPOINT||NO|Gitea API Endpoint|
| OWL_LDB_CORPUSPASS | | YES | Network Rail NROD Password |
| OWL_GIT_ISSUEBOT | | NO | Gitea API Key for issue reporting |
| OWL_GIT_APIENDPOINT | | NO | Gitea API Endpoint |
In the case that OWL_LDB_SVKEY is not available, staff versions of departure board, etc. will not be available. In the case that OWL_LDB_SVKEY is not available, staff versions of departure board, etc. will not be available.
In the case that OWL_GIT_ISSUEBOT is not available, the 'Report Issue' page will not be able to POST data. In the case that OWL_GIT_ISSUEBOT is not available, the 'Report Issue' page will not be able to POST data.
## Database ## Database Layout
OwlBoard uses MongoDB The OwlBoard application will build the database if required at startup. All it needs is authentication details for a MongoDB server.
The OwlBoard database is managed by the dbmanager application which will configure and maintain the database and should be run at least twice a day to ensure timetable data is up to date, see https://git.fjla.uk/owlboard/db-manager. ### Collections
|Collection|Contents|Purpose|
|:--------:|:------:|:-----:|
|corpus|Raw CORPUS data with blank keys removed|Code lookups|
|stations|Cleaned CORPUS Data, any objects with blank 3ALPHA & STANOX fields are removed|Validation before fetching Arr/Dep boards|
|meta|Lists the update time of corpus and station data|Will be used to update after a predetermined time period|
Note that even after removing all objects from the CORPUS with a blank 3ALPHA & STANOX, many items remain which are not stations and will not have a board available. Going forwards methods to remove non-stations from this data will be introduced.

3
UpNext.md Normal file
View File

@ -0,0 +1,3 @@
# What to do next:
* Rewrite sanitizing functions to remove external dependancy.

115
app.js
View File

@ -1,94 +1,69 @@
// OwlBoard - © Fred Boniface 2022-2023 - Licensed under GPLv3 (or later) // OwlBoard - © Fred Boniface 2022 - Licensed under GPLv3 (or later)
// Please see the included LICENSE file
const mode = process.env.NODE_ENV || "development"; // Please see the included LICENSE file. Statically served fonts are
// licensed separately, each folder contains a license file where a
// different license applies.
// Logging console.log(`Initialising OwlBoard`)
const logger = require("./src/utils/logger.utils");
logger.logger.info("Logger Initialised");
// External Requires // External Requires
const express = require("express"); const express = require('express');
const app = express(); const app = express();
const compression = require('compression')
// Middleware
const rateLimit = require("express-rate-limit");
const cors = require("cors");
const authenticate = require("./src/middlewares/auth.middlewares");
// Internal Requires // Internal Requires
const version = require("./src/configs/version.configs"); const log = require('./src/utils/log.utils'); // Log Helper
const pis2Rtr = require("./src/routes/pis.routes"); // API Version 2 Routes const version = require('./src/configs/version.configs'); // Version Strings
const ref2Rtr = require("./src/routes/ref.routes"); // API Version 2 Routes const listRtr = require('./src/routes/list.routes'); // /list endpoints
const live2Rtr = require("./src/routes/live.routes"); // API Version 2 Routes const ldbRtr = require('./src/routes/ldb.routes'); // /ldb endpoints
const tt2Rtr = require("./src/routes/timetable.routes"); // API Version 2 const kubeRtr = require('./src/routes/kube.routes'); // /kube endpoints
const user2Rtr = require("./src/routes/user.routes"); // API Version 2 Routes const findRtr = require('./src/routes/find.routes'); // /find endpoints
const miscRtr = require("./src/routes/misc.routes"); // Non-Public API Routes const issueRtr = require('./src/routes/issue.routes') // /issue endpoints
const statRtr = require('./src/routes/stats.routes'); // /stat endpoints
// Set Server Configurations // Set Server Configurations
const srvListen = process.env.OWL_SRV_LISTEN || "0.0.0.0"; const srvListen = process.env.OWL_SRV_LISTEN || "0.0.0.0"
const srvPort = process.env.OWL_SRV_PORT || 8460; const srvPort = process.env.OWL_SRV_PORT || 8460
const limiter = rateLimit({
windowMs: 15 * (60 * 1000), // 15 minutes
max: 100, // Limit each IP to 100 requests per `window` (here, per 15 minutes)
standardHeaders: true, // Return rate limit info in the `RateLimit-*` headers
legacyHeaders: false, // Disable the `X-RateLimit-*` headers
});
// Print version number: // Print version number:
logger.logger.info(`Starting version ${version.app} in ${mode} mode`); log.out(`app: Starting OwlBoard - Backend Version: ${version.app} - API versions: ${version.api}`);
// Remove X-Powered-By header: // Test for required vars:
app.disable("x-powered-by"); // const varTest = require('./src/utils/varTest.utils');
// var startTest = await varTest.varTest();
//console.log("Required Vars Missing:", startTest.missing_required);
//console.log("Desired Vars Missing:", startTest.missing_desired);
// if startTest.pass == false
// console.log("Unable to start, missing required vars")
// exit app
// Express Error Handling: // Express Error Handling:
app.use((err, req, res, next) => { app.use((err, req, res, next) => {
const statusCode = err.statuscode || 500; const statusCode = err.statuscode || 500;
logger.logger.error(err, "Express Error"); console.error(err.message, err.stack);
res.status(statusCode).json({ message: err.message }); res.status(statusCode).json({'message': err.message});
return; return;
}); });
// Pre Middleware: // Express Submodules:
app.use(
cors({
origin: "*", //[/\.owlboard\.info$/, 'localhost:5173', 'localhost:4173']
})
);
app.use(express.json()); //JSON Parsing for POST Requests app.use(express.json()); //JSON Parsing for POST Requests
//app.use(limiter); //STATIC CONTENT NO LONGER SERVED FROM NODE
app.use(authenticate); app.use(compression()) // Compress API Data if supported by client
// 2023 Rationalisation Routes (/api/v2, /misc) // Express Routes
app.use("/api/v2/pis", pis2Rtr); // API Version 2 app.use('/api/v1/list', listRtr);
app.use("/api/v2/live", live2Rtr); // API Version 2 app.use('/api/v1/ldb', ldbRtr);
app.use("/api/v2/ref", ref2Rtr); // API Version 2 app.use('/api/v1/kube', kubeRtr);
app.use("/api/v2/timetable", tt2Rtr); // API Version 2 app.use('/api/v1/find', findRtr);
app.use("/api/v2/user", user2Rtr); // API Version 2 app.use('/api/v1/issue', issueRtr);
app.use("/misc", miscRtr); // Non public-api endpoints (Stats, Issue, etc.) app.use('/api/v1/stats', statRtr)
app.use("/api/v1/auth/test", authenticate, (req, res) =>
res.status(200).json({
status: "ok",
message: "Authentication successful",
})
); // Returns 401 if auth failed, 200 if successful.
// Number of proxies:
app.set("trust proxy", 4);
mode === "development"
? app.get("/api/v1/ip", (req, res) => res.send(req.ip))
: null;
// Disable etags
app.set('etag', false)
// Start Express // Start Express
app.listen(srvPort, srvListen, (error) => { app.listen(srvPort, srvListen, (error) =>{
if (!error) { if(!error) {
logger.logger.info(`Listening on http://${srvListen}:${srvPort}`); log.out(`app.listen: Listening on http://${srvListen}:${srvPort}`);
log.out("app.listen: State - alive")
} else { } else {
logger.logger.error(error, `Error starting server`); log.out("app.listen: Error occurred, server can't start", error);
} }
}); });

View File

@ -1,5 +0,0 @@
/** @type {import('ts-jest').JestConfigWithTsJest} */
module.exports = {
preset: "ts-jest",
testEnvironment: "node",
};

View File

@ -1,106 +0,0 @@
<html lang="en">
<head>
<title>OwlBoard - Register</title>
<meta name="viewport" content="width=device-width, initial-scale=1" />
<style>
html {
text-align: center;
width: 100%;
margin: 0;
padding: 0;
background-color: #404c55;
background-image: radial-gradient(#2b343c, #404c55);
}
body {
margin: 0;
padding: 0;
}
a {
color: azure;
}
table {
width: 100%;
height: 100%;
margin: 0;
padding: 0;
color: azure;
font-family: sans-serif;
text-align: center;
background-color: #404c55;
background-image: radial-gradient(#2b343c, #404c55);
}
p {
margin-left: 40px;
margin-right: 40px;
}
#title {
height: 100px;
padding-top: 0px;
margin-top: 0px;
}
h1 {
color: #00b7b7;
}
#button {
color: azure;
font-size: larger;
background-color: #007979;
padding: 8px;
padding-left: 12px;
padding-right: 12px;
text-decoration: none;
border-radius: 14px;
}
.digits {
color: azure;
font-size: xx-large;
font-weight: bolder;
letter-spacing: 0.75ch;
margin-left: 0.75ch;
}
</style>
</head>
<body>
<br><br>
<table>
<tr>
<td>
<img
src="https://owlboard.info/images/logo/wide_logo.svg"
id="title"
alt="OwlBoard Logo"
/>
</td>
</tr>
<tr>
<td>
<h1>Register for OwlBoard</h1>
<br />
<p>
You'll need to type your registration code in to the OwlBoard app
</p>
<br />
<h2>Your Code:</h2>
<span class="digits">987654</span>
<br><br>
<p>
Go back to OwlBoard and enter your code. Go to the registration page and click the link at the top.
</p>
<br /><br /><br />
<p>
This registration is for one device only, you can register again
using the same email address for other devices and access OwlBoard
from elsewhere.
</p>
<p>
If you did not request to sign up to OwlBoard (Staff Version), you
can safely ignore this email. Your email address has not been stored
by us.
</p>
<p>The registration link will expire after 1 hour.</p>
</td>
</tr>
</table>
<br />
</body>
</html>

View File

@ -1,10 +0,0 @@
Complete your OwlBoard (Staff) Registration by entering your six digit code.
987654
Go back to the OwlBoard app, goto "Menu > Registration" and click on the link at the top to enter your code.
If you did not request to register to OwlBoard then you can safely ignore this email.
Your email address has not been stored by us and will not be required unless you wish to register again.
The link will expire after 1 hour.

9186
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,45 +1,24 @@
{ {
"name": "owlboard-backend", "dependencies": {
"version": "2024.2.2", "axios": "^1.2.1",
"description": "Provides LDB, PIS and live train details for the OwlBoard web client", "compression": "^1.7.4",
"express": "^4.18.2",
"ldbs-json": "^1.2.1",
"mongodb": "^4.13.0",
"string-sanitizer-fix": "^2.0.1"
},
"name": "owlboard",
"description": "OwlBoard is an API and PWA for live rail departure board in the UK.",
"version": "0.0.1",
"main": "express.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "node app.js"
},
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://git.fjla.uk/owlboard/backend.git" "url": "https://git.fjla.uk/fred.boniface/owlboard.git"
}, },
"license": "GPL-3.0-or-later",
"author": "Fred Boniface", "author": "Fred Boniface",
"main": "app.js", "license": "GPL-3.0-or-later"
"scripts": {
"build": "tsc",
"run": "tsc && node dist/app.js",
"dev": "tsc && NODE_ENV=development node dist/app.js",
"start": "node app.js",
"test": "jest",
"format": "npx prettier -w ."
},
"dependencies": {
"compression": "^1.7.4",
"cors": "^2.8.5",
"express": "^4.18.2",
"express-rate-limit": "^6.7.0",
"html-minifier": "^4.0.0",
"juice": "^9.0.0",
"ldbs-json": "^1.2.1",
"moment-timezone": "^0.5.43",
"mongodb": "^4.13.0",
"nodemailer": "^6.9.9",
"pino": "^8.15.1",
"redis": "^4.6.7",
"zlib": "^1.0.5"
},
"devDependencies": {
"@owlboard/ts-types": "^1.1.0",
"@types/express": "^4.17.21",
"@types/jest": "^29.5.3",
"eslint": "^8.39.0",
"jest": "^29.6.2",
"prettier": "^2.8.8",
"ts-jest": "^29.1.1",
"typescript": "^5.1.6"
}
} }

View File

@ -0,0 +1,10 @@
module.exports = valid
const valid = [
"owlboard.co.uk",
"fjla.uk",
"gwr.com",
"swrailway.com",
"firstrail.com",
"networkrail.co.uk"
]

View File

@ -1,34 +0,0 @@
export const valid: string[] = [
"owlboard.info",
"avantiwestcoast.co.uk",
"btp.police.uk",
"c2crail.net",
"chilternrailways.co.uk",
"crosscountrytrains.co.uk",
"eastmidlandsrailway.co.uk",
"abellio.co.uk",
"tfl.gov.uk",
"mtrel.co.uk",
"eurostar.com",
"eurotunnel.com",
"ffwhr.com",
"gwr.com",
"hitachirail-eu.com",
"greateranglia.co.uk",
"heathrow.com",
"swrailway.com",
"lsltoc.co.uk",
"lner.co.uk",
"arrivarl.co.uk",
"tube.tfl.gov.uk",
"lumo.co.uk",
"merseyrail.org",
"nrcommcentre.com",
"networkrail.co.uk",
"northernrailway.co.uk",
"scotrail.co.uk",
"southeasternrailway.co.uk",
"tpeexpress.co.uk",
"tfwrail.wales",
"wmtrains.co.uk",
];

View File

@ -1,31 +0,0 @@
// statusCodes should be a map, not an object
export const statusCodes = {
400: "data not found",
700: "no authentication attempt",
701: "invalid credentials",
702: "domain not whitelisted",
703: "registration request not found, maybe expired",
800: "location code not found",
801: "unable to fetch location data",
900: "invalid request format",
901: "email not provided",
950: "upstream server error",
951: "unknown server error",
};
export const msgCodes = new Map<string, string>([
[
"LOC_NOT_FOUND",
"Location not found. If you are sure that the location exists, there may be a fault with the data provider.",
],
[
"USR_NOT_FOUND",
"User is not registered, consider regeristering for access to this resource",
],
["AUTH_ERR", "Authentication Error"],
["OK", "OK"],
]);
module.exports = statusCodes;
//export { statusCodes };

View File

@ -0,0 +1,6 @@
const version = {
api: ["/api/v1/",],
app: "1.0.0"
};
module.exports = version;

View File

@ -1,12 +0,0 @@
interface versions {
api: string[];
app: string;
}
const version: versions = {
api: ["/api/v2"],
app: "2025.03.2",
};
module.exports = version;
export { version };

View File

@ -0,0 +1,58 @@
const find = require('../services/find.services');
async function findName(req, res, next){
try {
var id = req.params.id
res.json(await find.name(id))
} catch (err) {
console.error(`Unknown Error`, err.message);
next(err);
}
}
async function findCrs(req, res, next){
try {
var id = req.params.id
res.json(await find.crs(id))
} catch (err) {
console.error(`Unknown Error`, err.message);
next(err);
}
}
async function findNlc(req, res, next){
try {
var id = req.params.id
res.json(await find.nlc(id))
} catch (err) {
console.error(`Unknown Error`, err.message);
next(err);
}
}
async function findTiploc(req, res, next){
try {
var id = req.params.id
res.json(await find.tiploc(id))
} catch (err) {
console.error(`Unknown Error`, err.message);
next(err);
}
}
async function findStanox(req, res, next){
try {
var id = req.params.id
res.json(await find.stanox(id))
} catch (err) {
console.error(`Unknown Error`, err.message);
next(err);
}
}
module.exports = {
findName,
findCrs,
findNlc,
findTiploc,
findStanox
}

View File

@ -1,17 +1,14 @@
const issue = require("../services/issue.services"); const issue = require('../services/issue.services');
const log = require("../utils/logs.utils");
async function post(req, res, next) { async function post(req, res, next){
try { try {
log.out(`issueControllers.post: Request Body: ${JSON.stringify(req.body)}`); res.json(await issue.processor(req.body))
setCache(res, "no-store") } catch (err) {
res.json(await issue.processor(req.body)); console.error(`Controller Error`, err.message);
} catch (err) { next(err);
console.error("Controller Error", err.message); }
next(err);
}
} }
module.exports = { module.exports = {
post, post
}; }

View File

@ -0,0 +1,34 @@
const kube = require('../services/kube.services');
async function getAlive(req, res, next){
try {
var state = kube.getAlive()
res.status((await state).code).send((await state).state)
} catch (err) {
res.status("503").send({state: "error"})
}
}
async function getReady(req, res, next){
try {
res.json(await kube.getReady(req.body))
} catch (err) {
console.error(`Unknown Error`, err.message);
next(err);
}
}
async function getTime(req, res, next){
try {
res.json(await kube.getTime(req.body))
} catch (err) {
console.error(`Unknown Error`, err.message);
next(err);
}
}
module.exports = {
getAlive,
getReady,
getTime
}

View File

@ -1,98 +1,15 @@
const ldb = require("../services/ldb.services"); const ldb = require('../services/ldb.services');
import { setCache } from "../utils/cacheHeader.utils"; async function get(req, res, next){
import { logger } from "../utils/logger.utils"; try {
var id = req.params.id
async function getTrain(req, res, next) { res.json(await ldb.get(req.body, id))
// API v2 Only } catch (err) {
if (!req.isAuthed) { console.error(`Unknown Error`, err.message);
const err = new Error("Unauthorized"); next(err);
err.status = 401;
throw err;
}
setCache(res, "private", 240)
let type = req.params.searchType;
let id = req.params.id;
try {
switch (type.toLowerCase()) {
case "rid":
res.json(await ldb.getServiceByRID(id));
break;
case "uid":
case "headcode":
case "rsid":
res.json(await ldb.getServicesByOther(id));
break;
default:
res
.status(400)
.json({ status: "error", message: "Invalid search type" });
} }
} catch (err) {
err.status = 500;
console.error("Unknown Error", err.message);
next(err);
}
}
async function getStation(req, res, next) {
// API v2 Only
let type = req.params.type;
let id = req.params.id;
try {
if (type == "staff") {
if (!req.isAuthed) {
const err = new Error("Unauthorized");
err.status = 401;
return next(err);
}
const data = await ldb.get(id, true);
// Only cache if data is present
if (data.data) {
setCache(res, "public", 120);
} else {
setCache(res, "no-store", 120);
}
res.json(data);
} else {
setCache(res, "public", 240)
res.json(await ldb.get(id, false));
}
} catch (err) {
setCache(res, "no-store")
console.error("Unknown Error", err.message);
err.status = 500;
next(err);
}
}
async function getNearest(req, res, next) {
// API v2 Only
let latitude = req.params.latitude;
let longitude = req.params.longitude;
try {
if (!req.isAuthed) {
const err = new Error("Unauthorized");
err.status = 401;
return next(err)
}
const data = await ldb.getNearestStations(latitude, longitude)
if (data) {
setCache(res, "private", 120)
} else {
setCache(res, "no-store", 120)
}
res.json(data)
} catch (err) {
setCache(res, "no-store")
logger.Error("Error fetching nearest station")
err.status = 500;
next(err)
}
} }
module.exports = { module.exports = {
getTrain, get
getStation, }
getNearest,
};

View File

@ -0,0 +1,34 @@
const list = require('../services/list.services');
async function getStations(req, res, next){
try {
res.json(await list.getStations(req.body))
} catch (err) {
console.error(`Controller Error`, err.message);
next(err);
}
}
async function getCorpus(req, res, next){
try {
res.json(await list.getCorpus(req.body))
} catch (err) {
console.error(`Controller Error`, err.message);
next(err);
}
}
async function hits(req, res, next) {
try {
res.json(await list.hits())
} catch (err) {
console.error(`Controller Error`, err);
next(err);
}
}
module.exports = {
getStations,
getCorpus,
hits
}

View File

@ -1,40 +0,0 @@
import { Request, Response, NextFunction } from "express";
import { findPisByOrigDest, findPisByCode } from "../services/pis.services";
import { setCache } from "../utils/cacheHeader.utils";
async function byStartEndCRS(req: Request, res: Response, next: NextFunction) {
// if (!req.isAuthed) {
// const err = new Error("Unauthorized");
// err.status = 401;
// return next(err);
// }
try {
let startCrs = req.params.startCrs;
let endCrs = req.params.endCrs;
setCache(res, "public", 600)
res.json(await findPisByOrigDest(startCrs, endCrs));
} catch (err: any) {
console.error("Unknown Error", err.message);
return next(err);
}
}
async function byCode(req: Request, res: Response, next: NextFunction) {
// if (!req.isAuthed) {
// const err = new Error("Unauthorized");
// err.status = 401;
// return next(err);
// }
try {
let code = req.params.code;
res.json(await findPisByCode(code));
} catch (err: any) {
console.error("Unknown Error", err.message);
return next(err);
}
}
module.exports = {
byStartEndCRS,
byCode,
};

View File

@ -1,59 +0,0 @@
/* API V2 Exclusive Controller */
import { setCache } from "../utils/cacheHeader.utils";
const ldb = require("../services/ldb.services");
const find = require("../services/find.services");
async function getReasonCode(req, res, next) {
try {
const code = req.params.code;
if (code === "all") {
setCache(res, "public", 604800)
res.json(await ldb.getReasonCodeList());
next;
}
res.json(await ldb.getReasonCode(code));
next;
} catch (err) {
console.error("ERROR", err.message);
err.status = 500;
setCache(res, "no-store", 5)
next(err);
}
}
async function getLocationReference(req, res, next) {
try {
const searchType = req.params.searchType;
const id = req.params.id;
setCache(res, "public", 604800)
switch (searchType) {
case "name":
res.json(await find.name(id));
break;
case "crs": // Same as 3alpha
case "3alpha":
res.json(await find.crs(id));
break;
case "nlc":
res.json(await find.nlc(id));
break;
case "tiploc":
res.json(await find.tiploc(id));
break;
case "stanox":
res.json(await find.stanox(id));
break;
}
} catch (err) {
console.error("ERROR", err.message);
err.status = 500;
next(err);
}
}
module.exports = {
getReasonCode,
getLocationReference,
};

View File

@ -1,52 +0,0 @@
const reg = require("../services/registration.services");
async function register(req, res, next) {
try {
let response = await reg.regUser(req.body);
res.status(response.status).json(response);
} catch (err) {
console.error("Controller Error", err.message);
next(err);
}
}
async function request(req, res, next) {
try {
let response = await reg.createRegKey(req.body);
res.status(response.status).json(response);
} catch (err) {
console.error(err);
next(err);
}
}
async function getUser(req, res, next) {
try {
let uuid = req.params.uuid;
let data = await reg.getUser(uuid);
if (data.status) {
res.status(data.status).json(data);
} else {
res.json(data);
}
} catch (err) {
console.error(err);
next(err);
}
}
async function checkAuth(req, res, next) {
if (!req.isAuthed) {
res.status(401).body("Not Authorised");
} else {
res.status(200).body("Authorised Successfully");
}
next();
}
module.exports = {
register,
request,
getUser,
checkAuth,
};

View File

@ -1,32 +1,13 @@
import { setCache } from "../utils/cacheHeader.utils"; const stat = require('../services/stats.services');
const stat = require("../services/stats.services"); async function get(req, res, next) {
try {
async function versions(req, res, next) { res.json(await stat.hits())
// API v2 } catch (err) {
try { console.error(`Controller Error`, err);
setCache(res, "public", 60) next(err);
res.json(await stat.getVersions()); }
} catch (err) {
console.error("Controller Error", err);
err.status = 500;
next(err);
}
}
async function statistics(req, res, next) {
// Api v2
try {
setCache(res, "public", 60)
res.json(await stat.statistics());
} catch (err) {
console.error("Controller Error", err);
err.status = 500;
next(err);
}
} }
module.exports = { module.exports = {
versions, get}
statistics,
};

View File

@ -1,59 +0,0 @@
import { setCache } from "../utils/cacheHeader.utils";
import { logger } from "../utils/logger.utils";
const train = require("../services/trainService.services");
async function getByHeadcodeToday(req, res, next) {
// Deprecated - for future removal.
logger.warn("Deprecated Function Called - trainService.services-getByHeadcodeToday")
// if (!req.isAuthed) {
// const err = new Error("Unauthorized");
// err.status = 401;
// next(err);
// }
try {
var searchHeadcode = req.params.id;
res.json(await train.findByHeadcodeToday(searchHeadcode));
} catch (err) {
console.error("Unknown Error", err.message);
err.status = 500;
next(err);
}
}
async function get(req, res, next) {
// if (!req.isAuthed) {
// const err = new Error("Unauthorized");
// err.status = 401;
// next(err);
// }
let date = req.params.date;
let searchType = req.params.searchType;
let id = req.params.id;
try {
switch (searchType) {
case "headcode":
setCache(res, "private", 1800)
res.json(await train.findByHeadcode(id, date));
break;
case "byTrainUid":
setCache(res, "private", 1800)
res.json(await train.findByTrainUid(id, date));
break;
default:
res.status(404).json({
status: "error",
message: `Invalid search type "${searchType}"`,
});
}
} catch (err) {
console.error(err.message);
err.status = 500;
next(err);
}
}
module.exports = {
getByHeadcodeToday,
get,
};

View File

@ -1,47 +0,0 @@
import type { NextFunction, Request, Response } from "express";
import { logger } from "../utils/logger.utils";
import { isAuthed } from "../utils/auth.utils";
module.exports = async function authCheck(
req: Request,
res: Response,
next: NextFunction
) {
logger.debug("auth.middleware: Auth check begun");
if (process.env.NODE_ENV === "development") {
req.isAuthed = true;
logger.warn("auth.middleware: DEV MODE - Access Granted");
next();
} else {
const id: string | string[] | undefined = req.headers.uuid;
if (typeof id === "undefined") {
req.isAuthed = false;
logger.info("auth.middleware: Authentication failed");
next();
} else if (typeof id === "string") {
const authCheck = (await isAuthed(id)) || false;
if (authCheck) {
// Authenticate
req.isAuthed = true;
next();
} else {
req.isAuthed = false;
logger.info("auth.middleware: Authentication Failed");
next();
}
// Handle cases where UUID passed as an array
} else if (Array.isArray(id)) {
const authCheck = (await isAuthed(id[0])) || false;
if (authCheck) {
req.isAuthed = true;
next();
} else {
req.isAuthed = false;
logger.warn(
"auth.middleware: UUID Passed as Array - Authentication Failed"
);
next();
}
}
}
};

View File

@ -1,12 +0,0 @@
const log = require("../utils/logs.utils");
module.exports = async function requireJson(req, res, next) {
if (req.headers["content-type"] !== "application/json") {
log.out("requireJson.middlewares: Bad Request: Not in JSON format");
res.status(400).send({ status: 400, message: "Server requires JSON" });
} else {
next();
}
};
// Possibly want to check the req type?

23
src/routes/find.routes.js Normal file
View File

@ -0,0 +1,23 @@
const express = require('express');
const router = express.Router();
const findController = require('../controllers/find.controllers');
/* GET programming languages. */
//router.get('/', programmingLanguagesController.get);
/* POST programming language */
//router.post('/', programmingLanguagesController.create);
/* PUT programming language */
//router.put('/:id', programmingLanguagesController.update);
/* DELETE programming language */
//router.delete('/:id', programmingLanguagesController.remove);
router.get('/name/:id', findController.findName);
router.get('/crs/:id', findController.findCrs);
router.get('/nlc/:id', findController.findNlc);
router.get('/tiploc/:id', findController.findTiploc);
router.get('/stanox/:id', findController.findStanox);
module.exports = router;

View File

@ -0,0 +1,7 @@
const express = require('express');
const router = express.Router();
const issueController = require('../controllers/issue.controllers');
router.post('/', issueController.post);
module.exports = router;

View File

@ -0,0 +1,9 @@
const express = require('express');
const router = express.Router();
const kubeController = require('../controllers/kube.controllers');
router.get('/alive', kubeController.getAlive);
router.get('/ready', kubeController.getReady);
router.get('/time', kubeController.getTime);
module.exports = router

19
src/routes/ldb.routes.js Normal file
View File

@ -0,0 +1,19 @@
const express = require('express');
const router = express.Router();
const ldbController = require('../controllers/ldb.controllers');
/* GET programming languages. */
//router.get('/', programmingLanguagesController.get);
/* POST programming language */
//router.post('/', programmingLanguagesController.create);
/* PUT programming language */
//router.put('/:id', programmingLanguagesController.update);
/* DELETE programming language */
//router.delete('/:id', programmingLanguagesController.remove);
router.get('/:id', ldbController.get);
module.exports = router;

20
src/routes/list.routes.js Normal file
View File

@ -0,0 +1,20 @@
const express = require('express');
const router = express.Router();
const listController = require('../controllers/list.controllers');
/* GET programming languages. */
//router.get('/', programmingLanguagesController.get);
/* POST programming language */
//router.post('/', programmingLanguagesController.create);
/* PUT programming language */
//router.put('/:id', programmingLanguagesController.update);
/* DELETE programming language */
//router.delete('/:id', programmingLanguagesController.remove);
router.get('/stations', listController.getStations);
router.get('/corpus', listController.getCorpus);
module.exports = router;

View File

@ -1,10 +0,0 @@
const express = require("express");
const router = express.Router();
const ldbCtr = require("../controllers/ldb.controllers");
// PIS
router.get("/station/:id/:type", ldbCtr.getStation);
router.get("/station/nearest/:latitude/:longitude", ldbCtr.getNearest);
router.get("/train/:searchType/:id", ldbCtr.getTrain);
module.exports = router;

View File

@ -1,15 +0,0 @@
const express = require("express");
const router = express.Router();
// Controller Imports
const issueCtr = require("../controllers/issue.controllers");
const statCtr = require("../controllers/stats.controllers");
// Routes
router.get("/server/stats", statCtr.statistics);
router.get("/server/versions", statCtr.versions);
router.post("/issue", issueCtr.post);
module.exports = router;

View File

@ -1,9 +0,0 @@
const express = require("express");
const router = express.Router();
const pisCtr = require("../controllers/pis.controllers");
// PIS
router.get("/byCode/:code", pisCtr.byCode);
router.get("/byStartEnd/:startCrs/:endCrs", pisCtr.byStartEndCRS);
module.exports = router;

View File

@ -1,9 +0,0 @@
const express = require("express");
const router = express.Router();
const refCtr = require("../controllers/ref.controllers");
// Ref
router.get("/reasonCode/:code", refCtr.getReasonCode);
router.get("/locationCode/:searchType/:id", refCtr.getLocationReference);
module.exports = router;

View File

@ -0,0 +1,8 @@
const express = require('express');
const router = express.Router();
const statsController = require('../controllers/stats.controllers');
router.get('/', statsController.get);
module.exports = router;

View File

@ -1,8 +0,0 @@
const express = require("express");
const router = express.Router();
const ttCtr = require("../controllers/train.controllers");
// PIS
router.get("/train/:date/:searchType/:id", ttCtr.get);
module.exports = router;

View File

@ -1,12 +0,0 @@
const express = require("express");
const router = express.Router();
const regCtr = require("../controllers/registration.controllers");
// User
router.get("/:uuid", regCtr.getUser);
router.get("/checkAuth", regCtr.checkAuth);
// Not Implemented router.delete('/:uuid', regCtr.deleteUser);
router.post("/request", regCtr.request);
router.post("/register", regCtr.register);
module.exports = router;

View File

@ -1,143 +1,39 @@
import { logger } from "../utils/logger.utils"; const log = require('../utils/log.utils'); // Log Helper
const dbUser = process.env.OWL_DB_USER || "owl"; const dbUser = process.env.OWL_DB_USER || "owl"
const dbPass = process.env.OWL_DB_PASS || "twittwoo"; const dbPass = process.env.OWL_DB_PASS || "twittwoo"
const dbName = process.env.OWL_DB_NAME || "owlboard"; const dbName = process.env.OWL_DB_NAME || "owlboard"
const dbPort = process.env.OWL_DB_PORT || 27017; const dbPort = process.env.OWL_DB_PORT || 27017
const dbHost = process.env.OWL_DB_HOST || "localhost"; const dbHost = process.env.OWL_DB_HOST || "localhost"
const uri = `mongodb://${dbUser}:${dbPass}@${dbHost}:${dbPort}`; const uri = `mongodb://${dbUser}:${dbPass}@${dbHost}:${dbPort}`;
const connOpts = {
useUnifiedTopology: true,
authSource: "owlboard",
}
const { MongoClient } = require("mongodb"); const { MongoClient } = require('mongodb');
const client = new MongoClient(uri, connOpts); const client = new MongoClient(uri);
const db = client.db(dbName); const db = client.db(dbName);
async function query(collection, query, returnId = false) { async function query(collection, query){
await client.connect(); await client.connect();
logger.trace(`dbAccess.query: Connecting to collection: '${collection}'`); log.out(`dbAccess.query: Connecting to collection: '${collection}'`)
var qcoll = db.collection(collection); var qcoll = db.collection(collection);
var qcursor = qcoll.find(query); var qcursor = qcoll.find(query)
if (!returnId) { qcursor.project({_id: 0})
qcursor.project({ _id: 0 }); log.out(`dbAccess.query: Running Query: ${JSON.stringify(query)}`)
} increment(collection)
logger.trace(query, "dbAccess.query: Runnung Query"); return (await qcursor.toArray());
increment(collection);
let result = await qcursor.toArray();
logger.trace(result, "dbAccess.query: Response");
return result;
}
async function queryProject(collection, query, projection) {
await client.connect();
logger.debug(`dbAccess.queryProject: Connecting to col: '${collection}'`);
const qcoll = db.collection(collection);
const qcursor = qcoll.find(query).project(projection);
logger.debug(
projection,
`dbAccess.query: Running Query: ${JSON.stringify(query)}`
);
increment(collection);
return await qcursor.toArray();
}
async function queryAggregate(collection, pipeline) {
await client.connect();
logger.debug(`dbAccess.queryProject: Connecting to col: '${collection}'`);
logger.trace(pipeline, "dbAccess.query: Running Aggregation");
increment(collection);
return await db.collection(collection).aggregate(pipeline).toArray();
} }
async function increment(target) { async function increment(target) {
logger.debug(`dbAccess.increment: Incrementing counter for: ${target}`); log.out(`dbAccess.increment: Incrementing counter for: ${target}`)
await client.connect(); await client.connect();
let col = db.collection("meta"); let col = db.collection("meta");
let update = {}; let update = {}
update[target] = 1; update[target] = 1
col.updateOne({ target: "counters" }, { $inc: update }); col.updateOne({target: "counters"}, {$inc:update})
} return;
async function addUser(uuid, domain) {
// Needs testing
logger.debug("dbAccess.addUser: Adding user to database");
let doc = { uuid: uuid, domain: domain, atime: new Date() };
await client.connect();
let col = db.collection("users");
let res = await col.insertOne(doc);
if (res.insertedId) {
return true;
}
return false;
}
async function addRegReq(uuid, domain) {
// Needs testing
logger.debug("dbAccess.addRegReq: Adding registration request");
let doc = { uuid: uuid, time: new Date(), domain: domain };
await client.connect();
let col = db.collection("registrations");
let res = col.insertOne(doc);
return res;
}
async function userAtime(uuid) {
// Needs testing
logger.debug("dbAccess.userAtime: Updating access time for user");
let q = { uuid: uuid };
let n = { $set: { uuid: uuid, atime: new Date() } };
await client.connect();
let col = db.collection("users");
let res = col.updateOne(q, n, { upsert: true });
return res;
}
// Deletes one single registration request entry from the DB
async function delRegReq(uuid) {
logger.debug("dbAccess.delRegReq: Deleting a Registration Request");
let collection = "registrations";
await client.connect();
let col = db.collection(collection);
col.deleteOne({ uuid: uuid });
}
async function colCount(collection) {
logger.debug(
`dbAccess.colCount: Counting entries in collection: ${collection}`
);
await client.connect();
let col = db.collection(collection);
let count = col.countDocuments();
logger.debug(
`dbAccess.colCount: Collection: ${collection} contains ${count}` +
" documents"
);
return await count;
} }
module.exports = { module.exports = {
query, query,
queryProject, increment
queryAggregate, }
increment,
addUser,
userAtime,
addRegReq,
delRegReq,
colCount,
};
export {
query,
queryProject,
queryAggregate,
increment,
addUser,
userAtime,
addRegReq,
delRegReq,
colCount,
};

View File

@ -0,0 +1,59 @@
// Parse and return a find request
const log = require('../utils/log.utils'); // Log Helper
const db = require('../services/dbAccess.services');
const san = require('../utils/sanitizer.utils')
// DB Query: query(collection, query)
// Define collection as all queries are for the "corpus" collection.
const col = "corpus"
async function name(id){
log.out(`findServices.name: Finding station name: ${id}`)
var name = san.cleanApiEndpointTxt(id.toUpperCase())
query = {NLCDESC: name}
//var data = await db.query(col,query)
return await db.query(col,query)
}
async function crs(id){
log.out(`findServices.crs: Finding crs: ${id}`)
var crs = san.cleanApiEndpointTxt(id.toUpperCase())
query = {'3ALPHA': crs}
//var data = await db.query(col,query)
return await db.query(col,query)
}
async function nlc(id){
log.out(`findServices.nlc: Finding nlc: ${id}`)
var nlc = san.cleanApiEndpointNum(id)
query = {NLC: parseInt(nlc)}
log.out(`findServices.nlc: NLC Converted to int: ${query}`)
//var data = await db.query(col,query)
return await db.query(col,query)
}
async function tiploc(id){
log.out(`findServices.tiploc: Finding tiploc: ${id}`)
var tiploc = san.cleanApiEndpointTxt(id.toUpperCase())
query = {TIPLOC: tiploc}
//var data = await db.query(col,query)
return await db.query(col,query)
}
async function stanox(id){
log.out(`findServices.stanox: Finding stanox: ${id}`)
var stanox = san.cleanApiEndpointNum(id)
query = {STANOX: String(stanox)}
//var data = await db.query(col,query)
return await db.query(col,query)
}
module.exports = {
name,
crs,
nlc,
tiploc,
stanox
}

View File

@ -1,55 +0,0 @@
// Parse and return a find request
import { query } from "../services/dbAccess.services";
import {
cleanApiEndpointTxt,
cleanApiEndpointNum,
} from "../utils/sanitizer.utils";
import { logger } from "../utils/logger.utils";
// Define collection as all queries are for the "corpus" collection.
const col: string = "corpus";
async function name(id: string) {
logger.debug(`findServices.name: Finding station name: ${id}`);
var name = cleanApiEndpointTxt(id.toUpperCase());
let queryObj = { NLCDESC: name };
return await query(col, queryObj);
}
async function crs(id: string) {
logger.debug(`findServices.crs: Finding crs: ${id}`);
var crs = cleanApiEndpointTxt(id.toUpperCase());
let queryObj = { "3ALPHA": crs };
return await query(col, queryObj);
}
async function nlc(id: string) {
logger.debug(`findServices.nlc: Finding nlc: ${id}`);
var nlc = cleanApiEndpointNum(id);
let queryObj = { NLC: parseInt(nlc) };
logger.trace(`findServices.nlc: NLC Converted to int: ${query}`);
return await query(col, queryObj);
}
async function tiploc(id: string) {
logger.debug(`findServices.tiploc: Finding tiploc: ${id}`);
var tiploc = cleanApiEndpointTxt(id.toUpperCase());
let queryObj = { TIPLOC: tiploc };
return await query(col, queryObj);
}
async function stanox(id: string) {
logger.debug(`findServices.stanox: Finding stanox: ${id}`);
var stanox = cleanApiEndpointNum(id);
let queryObj = { STANOX: String(stanox) };
return await query(col, queryObj);
}
module.exports = {
name,
crs,
nlc,
tiploc,
stanox,
};

View File

@ -1,52 +1,33 @@
import { logger } from "../utils/logger.utils"; const axios = require('axios')
const log = require('../utils/log.utils')
const issueLabels = {
bug: 120,
enhancement: 122,
question: 125,
"user-support": 152,
"web-user": 153,
};
async function processor(data) { async function processor(data) {
logger.debug("issueService.processor: Issue received"); log.out(`issueService.processor: Issue received`)
let out = {}; let out = {}
out.labels = [issueLabels[data?.label] || 0, issueLabels["web-user"]]; out.title = data.subject.replace(/<[^>]+>|[\*\$]/g, '');
out.title = data?.subject.replace(/<[^>]+>|[\*\$]/g, ""); out.body = data.msg.replace(/<[^>]+>|[\*\$]/g, '')
out.body = data?.msg.replace(/<[^>]+>|[\*\$]/g, ""); sendToGitea(out);
return await sendToGitea(out);
} }
async function sendToGitea(body) { async function sendToGitea(body) {
try { let key = process.env.OWL_GIT_ISSUEBOT
const key = process.env.OWL_GIT_ISSUEBOT; let url = process.env.OWL_GIT_APIENDPOINT
const url = process.env.OWL_GIT_APIENDPOINT; let opts = {
const opts = { headers: {
method: "POST", Authorization: key
headers: { }
Authorization: key, }
"Content-Type": "application/json", var res = await axios.post(url, body, opts)
}, // Need to read the output from the POST and pass the result upwards to the client.
body: JSON.stringify(body), if (res.status == 201) {
}; log.out("issueService.sendToGitea: Issue sent to Gitea")
return {status: res.status,message:"issue created"}
const res = await fetch(url, opts); } else {
log.out("issueService.sendToGitea: Failed to send issue to Gitea")
if (res.status === 201) { return {status: res.status,message:"issue not created"}
logger.debug("issueService.sendToGitea: Issue created");
return { status: res.status, message: "issue created" };
} else {
logger.error(
`issueService.sendtoGitea: Error creating issue RETURN: ${res.status}`
);
return { status: res.status, message: "issue not created" };
} }
} catch (err) {
logger.error(err, `issueService.sendToGitea`);
return { status: 500, message: "Internal Server Error" };
}
} }
module.exports = { module.exports = {
processor, processor
}; }

View File

@ -0,0 +1,20 @@
async function getAlive(){
log.out(`kubeServices.getAlive: alive hook checked`)
return {code: 200, state: {state: "alive",noise: "twit-twoo"}}
}
async function getReady(){
log.out(`kubeServices.getReady: ready hook checked`)
return "not_implemented";
};
async function getTime(){
var now = new Date()
return {responseGenerated: now}
}
module.exports = {
getAlive,
getReady,
getTime
}

View File

@ -1,22 +0,0 @@
import { logger } from "../utils/logger.utils";
async function getAlive() {
logger.trace("kubeServices.getAlive: alive hook checked");
return { code: 200, state: { state: "alive", noise: "twit-twoo" } };
}
async function getReady() {
logger.trace("kubeServices.getReady: ready hook checked");
return "not_implemented";
}
async function getTime() {
var now: Date = new Date();
return { responseGenerated: now };
}
module.exports = {
getAlive,
getReady,
getTime,
};

View File

@ -1,218 +1,46 @@
// Parse and return an LDB Request // Parse and return an LDB Request
const ldb = require("ldbs-json"); const log = require('../utils/log.utils'); // Log Helper
const util = require("../utils/ldb.utils"); const ldb = require('ldbs-json')
const san = require("../utils/sanitizer.utils"); const util = require('../utils/ldb.utils')
const db = require("../services/dbAccess.services"); const san = require('../utils/sanitizer.utils')
const db = require('../services/dbAccess.services')
import { findStationsByDistancePipeline } from "../utils/ldbPipeline.utils"; const ldbKey = process.env.OWL_LDB_KEY
import { logger } from "../utils/logger.utils"; const ldbsvKey = process.env.OWL_LDB_SVKEY
import { transform as staffStationTransform } from "../utils/processors/ldb/staffStation"; async function get(body, id){
var cleanId = san.cleanApiEndpointTxt(id);
var obj = await util.checkCrs(cleanId);
const ldbKey = process.env.OWL_LDB_KEY;
const ldbsvKey = process.env.OWL_LDB_SVKEY;
async function get(id, staff = false) {
const cleanId = san.cleanApiEndpointTxt(id);
const obj = await util.checkCrs(cleanId);
try {
const crs = obj[0]["3ALPHA"];
logger.debug(`ldbService.get: Determined CRS for lookup to be: ${crs}`);
if (staff) {
const data = arrDepBoardStaff(crs);
db.increment("ldbsvws");
return await data;
} else {
const data = arrDepBoard(crs);
db.increment("ldbws");
return await data;
}
} catch (err) {
logger.error(err, "ldbService.get: Error, Unable to find CRS");
return {
obStatus: "LOC_NOT_FOUND",
obMsg: "Location is not available",
};
}
}
async function arrDepBoard(CRS) {
logger.trace(`ldbService.arrDepBoard: Trying to fetch board for ${CRS}`);
try {
const options = {
numRows: 10,
crs: CRS.toUpperCase(),
};
const api = new ldb(ldbKey, false);
let d = await api.call("GetArrDepBoardWithDetails", options, false, false);
return await util.cleanData(d);
} catch (err) {
logger.error(err, "ldbService.arrDepBoard: Lookup Failed");
return {
GetStationBoardResult: "not available",
Reason: `The CRS code ${CRS} is not valid`,
};
}
}
async function arrDepBoardStaff(CRS) {
logger.debug(`ldbService.arrDepBoardStaff: Try to fetch board for ${CRS}`);
try {
const options = {
numRows: 40,
crs: CRS.toUpperCase(),
getNonPassengerServices: true,
time: await getDateTimeString(new Date()),
timeWindow: 120,
services: "PBS",
};
const api = new ldb(ldbsvKey, true);
console.time(`Fetch Staff LDB for ${CRS.toUpperCase()}`);
let result
try { try {
result = await staffApiCallRetry( var crs = obj[0]['3ALPHA'];
api, log.out(`ldbService.get: Determined CRS for lookup to be: ${crs}`);
"GetArrivalDepartureBoardByCRS", var data = arrDepBoard(crs);
options, db.increment("ldbws");
5, await data;
);
} catch (err) { } catch (err) {
logger.error(err, "Error fetching board data"); log.out(`ldbService.get: Error, Unable to find CRS: ${err}`)
return {obStatus: "Error", obMsg: "Error fetching data from National Rail", data: null} var data = {ERROR:'NOT_FOUND',description:'The entered station was not found. Please check and try again.'};
} }
console.timeEnd(`Fetch Staff LDB for ${CRS.toUpperCase()}`); return data;
}
async function arrDepBoard(CRS){
log.out(`ldbService.arrDepBoard: Trying to fetch ArrDep Board for ${CRS}`)
try { try {
const _staffLdb = staffStationTransform(result); var options = {
logger.debug("StaffLDB Transformed"); numRows: 10,
logger.trace(_staffLdb, "StaffLDB Transformed"); crs: CRS.toUpperCase()
return {
obStatus: "OK",
obMsg: "OK",
data: _staffLdb,
};
} catch (err) {
logger.error(err, "Transformation Error");
}
return result;
} catch (err) {
logger.error(err, "ldbService.arrDepBoardStaff error");
return {
GetStationBoardResult: "not available",
Reason: `The CRS code ${CRS} is not valid`,
};
}
}
async function getServiceByRID(rid) {
logger.debug(`ldbService.getServiceByRID: Finding RID: ${rid}`);
try {
const options = {
rid: String(rid),
};
const api = new ldb(ldbsvKey, true);
return await api.call("GetServiceDetailsByRID", options, false, false);
} catch (err) {
logger.error(err, `ldbService.queryService`);
}
}
async function getServicesByOther(id) {
logger.debug(`ldbService.getServiceByOther: Finding services: ${id}`);
try {
const options = {
serviceID: id,
sdd: getDateString(new Date()),
};
const api = new ldb(ldbsvKey, true);
return await api.call("QueryServices", options, false, false);
} catch (err) {
logger.error(err, "ldbService.getServiceByOther");
return false;
}
}
async function staffApiCallRetry(api, method, options, retries) {
for (let i=0; i < retries; i++) {
try {
return await api.call(method, options, false, false);
} catch (err) {
if (err.code === 'ENOTFOUND') {
logger.warn(err, "DNS ERR")
if (i < retries - 1) {
logger.debug('Retrying API Call')
await delay(500)
continue;
} }
} var api = new ldb(ldbKey,false)
throw err; var reply = api.call("GetArrDepBoardWithDetails", options, false, false)
return await reply
} catch (err) {
log.out(`ldbService.arrDepBoard: Lookup Failed for: ${CRS}`)
return {GetStationBoardResult: "not available", Reason: `The CRS code ${CRS} is not valid`, Why: `Sometimes a station will have more than one CRS - for example Filton Abbey Wood has FIT and FAW however schedules are only available when looking up with FIT - this is how the National Rail Enquiries systems work.`};
} }
} };
throw new Error("Max retries exceeded");
}
function delay(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
async function getReasonCodeList() {
logger.debug("ldbService.getReasonCodeList: Fetching reason code list");
try {
const dbFilter = {};
return await db.query("reasonCodes", dbFilter, false);
} catch (err) {
logger.error(err, "ldbService.getReasonCodeList");
}
}
async function getReasonCode(code) {
logger.debug(`ldbService.getReasonCode: Fetching reason code ${code}`);
try {
const dbFilter = {
code: code,
};
return await db.query("reasonCodes", dbFilter, false);
} catch (err) {
logger.error(err, "ldbService.getReasonCode");
}
}
async function getNearestStations(lat, long) {
logger.debug(`ldbService.getNearestStations: Fetching nearest stations`)
let pipeline = findStationsByDistancePipeline(4, lat, long)
try {
return await db.queryAggregate("stations", pipeline)
} catch (err) {
logger.error(err, `ldbService.getNearestStations`)
}
}
async function getDateTimeString(date) {
const year = date.getFullYear(),
month = String(date.getMonth() + 1).padStart(2, "0"),
day = String(date.getDate()).padStart(2, "0"),
hour = String(date.getHours()).padStart(2, "0"),
minute = String(date.getMinutes()).padStart(2, "0"),
second = String(date.getSeconds()).padStart(2, "0");
const format = `${year}-${month}-${day}T${hour}:${minute}:${second}`;
return format;
}
async function getDateString(date) {
const year = date.getFullYear(),
month = String(date.getMonth() + 1).padStart(2, "0"),
day = String(date.getDate()).padStart(2, "0");
const format = `${year}-${month}-${day}`;
return format;
}
module.exports = { module.exports = {
get, get
getServiceByRID, }
getServicesByOther,
getReasonCodeList,
getReasonCode,
getNearestStations,
};

View File

@ -0,0 +1,20 @@
const log = require('../utils/log.utils'); // Log Helper
const db = require('../services/dbAccess.services')
const os = require('os')
async function getStations(){
var out = db.query("stations")
log.out(`listServices.getStations: Fetching stations list`)
return await out;
}
async function getCorpus(){
var out = db.query("corpus")
log.out(`listServices.getCorpus: Fetching CORPUS list`)
return await out;
}
module.exports = {
getStations,
getCorpus
}

View File

@ -1,19 +0,0 @@
import { query } from "./dbAccess.services";
import { logger } from "../utils/logger.utils";
async function getStations() {
var out = query("stations");
logger.debug("listServices.getStations: Fetching stations list");
return await out;
}
async function getCorpus() {
var out = query("corpus");
logger.debug("listServices.getCorpus: Fetching CORPUS list");
return await out;
}
module.exports = {
getStations,
getCorpus,
};

View File

@ -1,45 +0,0 @@
const mail = require("nodemailer");
import { logger } from "../utils/logger.utils";
export interface msgDescriptor {
to: string;
cc?: string;
bcc?: string;
subject: string;
txt: string;
html?: string;
from: string;
}
const fromAddr = process.env.OWL_EML_FROM;
const smtpUser = process.env.OWL_EML_USER;
const smtpPass = process.env.OWL_EML_PASS;
const smtpHost = process.env.OWL_EML_HOST;
const smtpPort = process.env.OWL_EML_PORT;
const transporter = mail.createTransport({
host: smtpHost,
port: smtpPort,
secure: false, // Must be false for STARTTLS on port 587 which is always secure
auth: {
user: smtpUser,
pass: smtpPass,
},
});
async function send(message: msgDescriptor): Promise<boolean> {
logger.debug("mailServices.send: Message send request received");
message.from = fromAddr || "no-reply@owlboard.info";
try {
var res = await transporter.sendMail(message);
} catch (err) {
logger.error(err, "mailServices.send: Message send failed");
return false;
}
logger.debug(res.response, "mailServices.send: SMTP Response");
return true;
}
module.exports = {
send,
};

View File

@ -1,142 +0,0 @@
// Finds PIS Codes using DB Lookups
const db = require("../services/dbAccess.services");
const clean = require("../utils/sanitizer.utils");
import type { OB_Pis_SimpleObject } from "@owlboard/ts-types";
import { logger } from "../utils/logger.utils";
import { queryAggregate } from "./dbAccess.services";
import {
getPartialEndTiplocMatchPipeline,
getFullTiplocMatchPipeline,
getPartialStartTiplocMatchPipeline,
} from "../utils/pis.utils";
import { Document } from "mongodb";
export const supported = ["GW", "UK", "HX"];
export async function findPisByOrigDest(start: string, end: string) {
logger.debug(
`pisServices.findPisByOrigDest: Searching for Orig: ${start}, Dest: ${end}`
);
const firstCrs = clean.cleanApiEndpointTxt(start.toLowerCase());
const lastCrs = clean.cleanApiEndpointTxt(end.toLowerCase());
const query = {
stops: {
$all: [
{ $elemMatch: { $eq: firstCrs } },
{ $elemMatch: { $eq: lastCrs } },
],
},
$expr: {
$and: [
{ $eq: [{ $arrayElemAt: ["$stops", -1] }, lastCrs] },
{ $eq: [{ $arrayElemAt: ["$stops", 0] }, firstCrs] },
],
},
};
const search = await db.query("pis", query);
// Check for results, if none then try partial match
return search;
}
export async function findPisByCode(
code: string
): Promise<OB_Pis_SimpleObject | null> {
logger.debug(`pisServices.findPisByCode: Searching for PIS code: ${code}`);
const cleanCode = clean.removeNonNumeric(code);
const query = {
code: cleanCode,
};
const search = db.query("pis", query);
return await search;
}
// Queries the database for PIS codes that match the given TIPLOC array
export async function findByTiplocArray(
tiplocArray: string[]
): Promise<OB_Pis_SimpleObject | null> {
// Firstly fix errant TIPLOCS such as RDNG4AB which will never return a result
// currently only works with RDNG4AB - checks for presence of RDNG4AB first to
// avoid iterating over every array ever searched for.
if (tiplocArray.includes("RDNG4AB")) {
fixErrantTiplocs(tiplocArray);
}
// PERFORMANCE NOTE:
// The majority of queries will return a full match,
// this means that a more performant pipeline is used
// to find a full match and only then are more
// complicated pipelines used to find partial matches
// if the first pipeline returns nothing.
try {
const exactMatch = await findExactMatchByTiploc(tiplocArray);
if (exactMatch) {
return convertDocument(exactMatch, "none");
} else {
const partialEnd = await findPartialEndMatchByTiploc(tiplocArray);
if (partialEnd) {
return convertDocument(partialEnd, "first");
} else {
const partialStart = await findPartialStartMatchByTiploc(tiplocArray);
if (partialStart) {
return convertDocument(partialStart, "last");
} else {
return null;
}
}
}
} catch (err) {
logger.error(err, "Error in findByTiplocArray");
return null;
}
}
// Uses a pipeline to find an exact match
async function findExactMatchByTiploc(array: string[]): Promise<Document> {
const pipeline = getFullTiplocMatchPipeline(array);
const res = await queryAggregate("pis", pipeline);
return res[0];
}
// Uses a pipeline to find a partial match - only supporting
// codes ending with the correct stops for now.
async function findPartialEndMatchByTiploc(array: string[]): Promise<Document> {
const pipeline = getPartialEndTiplocMatchPipeline(array);
const res = await queryAggregate("pis", pipeline);
return res[0];
}
// Uses a pipeline to find a partial match - supporting codes starting with the correct stops.
async function findPartialStartMatchByTiploc(array: string[]): Promise<Document> {
const pipeline = getPartialStartTiplocMatchPipeline(array);
const res = await queryAggregate("pis", pipeline);
return res[0];
}
function convertDocument(doc: Document, skipType: string): OB_Pis_SimpleObject {
return {
code: doc.code.toString(),
toc: doc.toc,
skipCount: doc.skipStops,
skipType: skipType,
};
}
// Changes any instance of 'RDNG4AB' to 'RDNGSTN'
function fixErrantTiplocs(input: string[]): void {
input.forEach((value, index, array) => {
if (value === "RDNG4AB") {
array[index] = "RDNGSTN";
}
// Additional substitutions can be applied here
});
}
module.exports = {
supported,
findPisByOrigDest,
findPisByCode,
findByTiplocArray,
};

View File

@ -1,24 +0,0 @@
import { createClient } from "redis";
import zlib from "zlib";
const client = createClient({
url: "redis:PORT",
});
client.on("error", (err) => console.log("Redis Client Error", err));
async function addToCache(key: string, value: Object): Promise<boolean> {
throw new Error("Unable to post to cache");
}
async function getFromCache(key: string): Promise<Object> {
throw new Error("Unable to retreive");
}
/*
await client.connect();
await client.set('key', 'value');
const value = await client.get('key');
await client.disconnect();
*/

View File

@ -1,77 +0,0 @@
const auth = require("../utils/auth.utils");
const db = require("./dbAccess.services");
const mail = require("./mail.services");
const errors = require("../configs/errorCodes.configs");
import { logger } from "../utils/logger.utils";
import { getDomainFromEmail } from "../utils/sanitizer.utils";
import { valid as validDomains } from "../configs/domains.configs";
import { generateCode } from "../utils/auth.utils";
async function createRegKey(body) {
logger.debug("registerServices.createRegKey: Incoming request");
if (body.email) {
const domain = getDomainFromEmail(body.email);
logger.info(`registerServices: Registration request from: ${domain}`);
if (validDomains.includes(domain)) {
logger.debug(`registerServices.createRegKey: Key from valid: ${domain}`);
const key = generateCode()
db.addRegReq(key, domain)
const message = await auth.generateConfirmationEmail(body.email, key);
if (!message) {
const err = new Error("Message Generation Error");
logger.error(err, "registerServices.createRegKey: Error generating email");
return 500;
}
if ((await mail.send(message)) == true) {
return {status: 201, message: "email sent"};
}
return {status:500, errorCode:950, errorMsg: errors[950]}
}
return { status: 403, errorCode: 702, errorMsg: errors[702] };
} else {
return { status: 400, errorCode: 901, errorMsg: errors[902] };
}
}
async function regUser(req) {
// Add input validation
const regCode = req.uuid.toLocaleUpperCase();
logger.trace(`Read UUID: ${regCode}`);
const res = await auth.checkRequest(regCode);
logger.debug(res, "registrationServices");
if (res.result) {
const uuid = await auth.generateKey();
const apiKey = await db.addUser(uuid, res.domain);
if (apiKey) {
db.delRegReq(req.uuid);
return { status: 201, message: "User added", api_key: uuid };
}
}
return { status: 401, errorCode: 703, errorMsg: errors[703] };
}
// Currently errors on a correct code as it cannot be found... Ensure uuid is ALL CAPS
async function getUser(uuid) {
try {
const filter = {
uuid: uuid,
};
const res = await db.query("users", filter, false);
if (res.length) {
return res;
} else {
return { status: 404, errorCode: 400, errorMsg: errors[400] };
}
} catch (err) {
console.log(err);
return { status: 500, errorCode: 951, errorMsg: errors[951] };
}
}
module.exports = {
regUser,
createRegKey,
getUser,
};

View File

@ -1,111 +1,16 @@
const db = require("../services/dbAccess.services"); const log = require('../utils/log.utils'); // Log Helper
const os = require("os"); const db = require('../services/dbAccess.services')
const vers = require("../configs/version.configs"); const os = require('os')
import { logger } from "../utils/logger.utils"; async function hits(){
var dat = db.query("meta", {target: "counters"});
async function buildJson() { log.out(`listServices.meta: fetched server meta`)
let json = {}; let out = {}
json.count = {}; out.host = os.hostname()
// Async call all db queries out.dat = await dat
const userCount = db.colCount("users"); return out;
const regCount = db.colCount("registrations");
const pisCount = db.colCount("pis");
const corpusCount = db.colCount("corpus");
const stationsCount = db.colCount("stations");
const timetableCount = db.colCount("timetable");
// Insert data
json.mode = process.env.NODE_ENV;
json.host = os.hostname();
// Await and insert async calls
json.count.users = await userCount;
json.count.reg = await regCount;
json.count.pis = await pisCount;
json.count.corpus = await corpusCount;
json.count.stations = await stationsCount;
json.count.timetable = await timetableCount;
return json;
}
async function hits() {
logger.debug("statsServices.hits: Statistics Requested");
const out = await buildJson();
return out;
}
async function getVersions() {
logger.debug("statsServices.getVersions: Fetching versions");
const mqClt = await db.query("versions", { target: "timetable-mgr" });
const data = {
backend: vers.app,
"mq-client": mqClt[0]?.["version"] || "",
};
return data;
}
async function statistics() {
logger.debug("statsServices.statistics: Fetching statistics");
const timetablePromise = db.query("meta", { type: "CifMetadata" });
const pisPromise = db.query("meta", { type: "PisMetadata" });
const corpusPromise = db.query("meta", { target: "corpus" });
const stationsPromise = db.query("meta", {type: "StationsMetadata"});
const lengthUsersPromise = db.colCount("users");
const lengthRegistrationsPromise = db.colCount("registrations");
const lengthCorpusPromise = db.colCount("corpus");
const lengthStationsPromise = db.colCount("stations");
const lengthPisPromise = db.colCount("pis");
const lengthTimetablePromise = db.colCount("timetable");
const lengthReasonCodesPromise = db.colCount("reasonCodes");
const [
timetable,
pis,
corpus,
lengthUsers,
lengthRegistrations,
lengthCorpus,
lengthStations,
lengthPis,
lengthTimetable,
stations,
] = await Promise.all([
timetablePromise,
pisPromise,
corpusPromise,
lengthUsersPromise,
lengthRegistrationsPromise,
lengthCorpusPromise,
lengthStationsPromise,
lengthPisPromise,
lengthTimetablePromise,
stationsPromise,
]);
return {
hostname: os.hostname() || "Unknown",
runtimeMode: process.env.NODE_ENV || "Unknown",
updateTimes: {
timetable: (timetable[0]["lastUpdate"]),
pis: pis[0]["lastUpdate"],
corpus: corpus[0]["updated_time"],
stations: stations[0]["lastUpdate"],
},
dbLengths: {
users: lengthUsers,
registrations: lengthRegistrations,
corpus: lengthCorpus,
stations: lengthStations,
pis: lengthPis,
timetable: lengthTimetable,
},
};
} }
module.exports = { module.exports = {
hits, hits
statistics, }
getVersions,
};

View File

@ -1,196 +0,0 @@
import { logger } from "../utils/logger.utils";
import { findByTiplocArray, supported } from "./pis.services";
import { queryAggregate } from "./dbAccess.services";
import {
getFindByHeadcodePipeline,
getFindByTrainUidPipeline,
} from "../utils/trainService.utils";
import { removeNonAlphanumeric } from "../utils/sanitizer.utils";
import { formatTimetableDetail } from "../utils/processors/timetable/timetableProcessor.utils";
import type {
TrainServices,
Service,
Stop,
SimpleService,
OB_Pis_SimpleObject,
} from "@owlboard/ts-types";
export async function findByHeadcode(
headcode: string,
date: Date | string
): Promise<SimpleService[]> {
const sanitizedHeadcode = removeNonAlphanumeric(headcode);
logger.debug(
`trainServices.findByHeadcode: Searching for trains by headcode: ${headcode}`
);
// If 'now' then generate a new Date now, else use the provided date, then set time to 1200.
const searchDate = date === "now" ? new Date() : new Date(date);
searchDate.setHours(12, 0, 0);
// Get the 'shortDay'
const shortDay = getShortDay(searchDate);
const query = {
headcode: sanitizedHeadcode.toUpperCase(),
daysRun: { $in: [shortDay] },
scheduleStartDate: { $lte: searchDate },
scheduleEndDate: { $gte: searchDate },
};
const pipeline = getFindByHeadcodePipeline(query);
const result: SimpleService[] = (await queryAggregate(
"timetable",
pipeline
)) as SimpleService[];
const services = filterServices(result);
return services;
}
export async function findByTrainUid(
uid: string,
date: Date | string = new Date()
) {
// Set the correct date - whether a date or "now" was passed to function
let queryDate: Date;
if (date === 'now') {
queryDate = new Date();
} else if (date instanceof Date) {
queryDate = date;
} else {
queryDate = new Date(date);
}
// Build query
const query = {
trainUid: uid.toUpperCase(),
daysRun: { $in: [getShortDay(queryDate)] },
scheduleStartDate: { $lte: queryDate },
scheduleEndDate: { $gte: queryDate },
};
const pipeline = getFindByTrainUidPipeline(query);
const result = (await queryAggregate("timetable", pipeline)) as Service[];
let services = filterServices(result) as Service[];
// Check if the operator is on the supported TOC list for PIS Codes - if so, call the fetchPisCode function.
let pis: OB_Pis_SimpleObject | null;
if (supported.includes(services[0]?.operator)) {
pis = await fetchPisCode(services[0]?.stops);
} else {
pis = null;
}
return formatTimetableDetail(services[0], pis);
}
// Internal Functions:
// Filters out non-passenger stops and then uses the stop array to request a PIS code for the service
async function fetchPisCode(
stops: Stop[]
): Promise<OB_Pis_SimpleObject | null> {
let tiplocList: string[] = [];
for (const stop in stops) {
if (stops[stop]["isPublic"]) tiplocList.push(stops[stop]["tiploc"]);
}
// Check if no public stops - then it should use an ECS headcode
let pisData: OB_Pis_SimpleObject | null;
if (tiplocList.length) {
pisData = await findByTiplocArray(tiplocList);
} else {
pisData = {
toc: "GW",
skipCount: 0,
code: randomEcsPis(),
};
}
if (!pisData) {
logger.debug(tiplocList, "No PIS found for service")
}
return pisData;
}
// Picks a random choice of the ECS PIS Codes
function randomEcsPis(): string {
const options = ["0015", "9997"];
const randomValue = Math.floor(Math.random() * 2);
return options[randomValue];
}
// Outputs the standard 'shortday' string from a Date.
function getShortDay(day: Date): string {
const dayMap = ["su", "m", "t", "w", "th", "f", "s"];
const shortDay = dayMap[day.getDay()];
return shortDay;
}
// Filters services using their STP indicator so that over-riding entries are returned correctly
function filterServices(services: SimpleService[]): SimpleService[] {
let stpIndicators: Record<
string,
{ hasC: boolean; hasN: boolean; hasO: boolean; hasP: boolean }
> = {};
let filteredServices: SimpleService[] = [];
for (const service of services) {
const trainUid = service["trainUid"],
stpIndicator = service["stpIndicator"];
// Creates the stpIndicators array:
if (!stpIndicators[trainUid]) {
stpIndicators[trainUid] = {
hasC: false,
hasN: false,
hasO: false,
hasP: false,
};
}
if (stpIndicator === "C") {
stpIndicators[trainUid].hasC = true;
}
if (stpIndicator === "N") {
stpIndicators[trainUid].hasN = true;
}
if (stpIndicator === "O") {
stpIndicators[trainUid].hasO = true;
}
if (stpIndicator === "P") {
stpIndicators[trainUid].hasP = true;
}
}
// Iterate each service, and only output one service matching each trainUid,
// C > N > O > P is the order, with C being prioritised over other STP types.
for (const service of services) {
const trainUid = service["trainUid"];
const thisStpIndicators = stpIndicators[trainUid];
const stpIndicator = service["stpIndicator"];
if (stpIndicator === "C") {
filteredServices.push(service);
} else if (stpIndicator === "N" && !thisStpIndicators.hasC) {
filteredServices.push(service);
} else if (
stpIndicator === "O" &&
!thisStpIndicators.hasC &&
!thisStpIndicators.hasN
) {
filteredServices.push(service);
} else if (
stpIndicator === "P" &&
!thisStpIndicators.hasC &&
!thisStpIndicators.hasN &&
!thisStpIndicators.hasO
) {
filteredServices.push(service);
}
}
return filteredServices;
}
// Local Types:

17
src/types/index.d.ts vendored
View File

@ -1,17 +0,0 @@
// src/types/express/index.d.ts
// to make the file a module and avoid the TypeScript error
export {};
declare global {
namespace Express {
export interface Request {
isAuthed: boolean;
}
export interface Response {
cacheType: string;
cacheSecs: number;
}
}
}

View File

@ -1,82 +0,0 @@
const crypt = require("crypto");
const db = require("../services/dbAccess.services");
const fs = require("fs/promises");
import { minifyMail } from "./minify.utils";
import { logger } from "./logger.utils";
// Checks users registration key against issued keys
async function isAuthed(uuid: string): Promise<boolean> {
// Needs testing
const q = {
uuid: uuid,
};
const res = await db.query("users", q);
logger.debug(res, "checkUser: DB Query Result");
const authorized = res && res[0] && res[0].domain;
if (authorized) db.userAtime(uuid);
return authorized;
}
// Checks whether a registration request key is valid
async function checkRequest(key: string) {
const collection = "registrations";
const query = { uuid: key };
const res = await db.query(collection, query);
logger.debug(res, "checkRequest: DB Lookup result");
const result =
res.length > 0 && res[0].time
? { result: true, domain: res[0].domain }
: { result: false };
return result;
}
// Creates an API key for a user
async function generateKey() {
return crypt.randomUUID();
}
export function generateCode(): string {
const characters = 'ABCDEFGHJKLMNPQRSTUVWXYZ23456789';
const codeLength = 6;
let code = '';
const bytes = crypt.randomBytes(codeLength); // Generate random bytes
for (let i = 0; i < codeLength; i++) {
const randomIndex = bytes[i] % characters.length; // Map bytes to characters
code += characters.charAt(randomIndex);
}
return code;
}
async function generateConfirmationEmail(eml: string, uuid: string) {
try {
const htmlTpl = await fs.readFile("mail-templates/register.html", "utf-8");
const htmlStr = htmlTpl.replace(/987654/g, uuid);
const htmlMin = await minifyMail(htmlStr);
const txtTpl = fs.readFile("mail-templates/register.txt", "utf-8");
return {
to: eml,
subject: "OwlBoard Registration",
text: (await txtTpl).replace(/987654/g, uuid),
html: htmlMin,
};
} catch (err) {
logger.error(
err,
"generateConfirmationEmail: Error rendering email templates"
);
return false;
}
}
module.exports = {
isAuthed,
generateKey,
generateConfirmationEmail,
checkRequest,
generateCode
};
export { isAuthed, generateKey, generateConfirmationEmail, checkRequest };

View File

@ -1,9 +0,0 @@
import type { Response } from "express"
export function setCache(res: Response, type="private", time=120): void {
if (type === "no-store") {
res.setHeader('Cache-Control', 'no-store')
return
}
res.setHeader('Cache-Control', `${type}, max-age=${time}`)
}

View File

View File

@ -1,93 +1,43 @@
const log = require("./logs.utils"); // Log Helper const log = require('../utils/log.utils'); // Log Helper
const db = require("../services/dbAccess.services"); // DB Access const db = require('../services/dbAccess.services') // DB Access
//const san = require('../utils/sanitizer.utils'); // Sanitiser const san = require('../utils/sanitizer.utils') // Sanitiser
import * as san from "../utils/sanitizer.utils"; async function checkCrs(input){
var INPUT = input.toUpperCase()
async function checkCrs(input = "") { log.out(`ldbUtils.checkCrs: Building database query to find: '${INPUT}'`)
var INPUT = input.toUpperCase(); var query = {'$or':[{'3ALPHA':INPUT},{'TIPLOC':INPUT},{'STANOX':INPUT}]};
var query = { var result = await db.query("stations", query)
$or: [{ "3ALPHA": INPUT }, { TIPLOC: INPUT }, { STANOX: INPUT }], log.out(`ldbUtils.checkCrs: Query results: ${JSON.stringify(result)}`)
}; return result
var result = await db.query("stations", query);
log.out(
"ldbUtils.checkCrs: Query results: " + JSON.stringify(result),
"dbug"
);
return result;
} }
// Needs to be moved to the frontend `ensureArray() func` async function cleanMessages(input){ // Needs to be moved to the frontend `ensureArray() func`
// Usage of this function should be migrated to the `translator` utilities. var out = []
async function cleanMessages(input) { if (typeof input.message == "string") {
log.out("ldbUtils.cleanMessages: Deprecated function has been called", "err"); out.push(await san.cleanNrcc(input.message))
var out = []; } else if (typeof input.message == "object") {
if (typeof input.message == "string") { for(var i = 0; i < input.message.length; i++) {
out.push(san.cleanNrcc(input.message)); out.push(await san.cleanNrcc(input.message[i]))
} else if (typeof input.message == "object") { }
for (var i = 0; i < input.message.length; i++) {
out.push(san.cleanNrcc(input.message[i]));
} }
} return out;
return out;
} }
// Accepts an object but not an Array and returns it wrapped in an array. // Accepts an object but not an Array and returns it wrapped in an array.
async function cleanServices(input) { async function cleanServices(input){ // Need to triple check but I don't think this is used anymore.
log.out("ldbUtils.cleanServices: Deprecated function has been called", "err"); var out = []
var out = []; if (!Array.isArray(input)) {
if (!Array.isArray(input)) { log.out(`ldbUtils.cleanServices: Transforming input: ${input}`)
log.out(`ldbUtils.cleanServices: Transforming input: ${input}`, "dbug"); out.push(input)
out.push(input); log.out(`ldbUtils.cleanServices: Returning output: ${out}`)
log.out(`ldbUtils.cleanServices: Returning output: ${out}`, "dbug"); return out;
return out; } else {
} else { return input;
return input;
}
}
async function cleanData(input) {
try {
if (input?.GetStationBoardResult?.trainServices) {
log.out(
"ldbUtils.cleanData: Changing train service data to array",
"dbug"
);
input.GetStationBoardResult.trainServices.service = await ensureArray(
input.GetStationBoardResult.trainServices.service
);
} }
if (input?.GetStationBoardResult?.busServices) {
log.out("ldbUtils.cleanData: Changing bus service data to array", "dbug");
input.GetStationBoardResult.busServices.service = await ensureArray(
input.GetStationBoardResult.busServices.service
);
}
if (input?.GetStationBoardResult?.ferryServices) {
log.out(
"ldbUtils.cleanData: Changing ferry service data to array",
"dbug"
);
input.GetStationBoardResult.ferryServices.service = await ensureArray(
input.GetStationBoardResult.ferryServices.service
);
}
} catch (err) {
log.out(`ldbUtils.cleanData: Error: ${err}`, "eror");
}
return input;
}
async function ensureArray(data) {
if (!Array.isArray(data)) {
return [data];
}
return data;
} }
module.exports = { module.exports = {
checkCrs, checkCrs,
cleanMessages, cleanMessages,
cleanServices, cleanServices
cleanData, }
};

View File

@ -1,47 +0,0 @@
export function findStationsByDistancePipeline(count: number, latitude: string, longitude: string) {
const numericLatitude = parseFloat(latitude)
const numericLongitude = parseFloat(longitude)
const pipeline = [
{
'$geoNear': {
'near': {
'type': 'Point',
'coordinates': [
numericLongitude, numericLatitude
]
},
'distanceField': 'distance'
}
}, {
'$limit': count
}, {
'$addFields': {
'miles': {
'$divide': [
{
'$round': {
'$multiply': [
{
'$divide': [
'$distance', 1609.34
]
}, 4
]
}
}, 4
]
}
}
}, {
'$project': {
'_id': 0,
'3ALPHA': 1,
'NLCDESC': 1,
'miles': 1
}
}
]
//console.log(JSON.stringify(pipeline))
return pipeline
}

8
src/utils/log.utils.js Normal file
View File

@ -0,0 +1,8 @@
async function out(msg) {
var time = new Date().toISOString();
console.log(`${time} - ${msg}`)
}
module.exports = {
out
}

View File

@ -1,19 +0,0 @@
import pino from "pino";
const runtime = process.env.NODE_ENV;
let level: string;
if (runtime === "production") {
level = "info";
} else {
level = "debug";
}
export const logger = pino({
level: level,
formatters: {
level: (label) => {
return { level: label.toUpperCase() };
},
},
timestamp: pino.stdTimeFunctions.isoTime,
});

View File

@ -1,21 +0,0 @@
const environment: string = process.env.NODE_ENV || "Unknown";
const hideInProduction: string[] = ["info", "dbug"];
async function out(msg: string, level = "othr") {
if (
environment === "production" &&
hideInProduction.includes(level.toLowerCase())
) {
return;
} else {
const time = new Date().toISOString();
console.log(`${time} - ${level.toUpperCase()} - ${msg}`);
}
}
module.exports = {
out,
};
export { out };

View File

@ -1,17 +0,0 @@
import { logger } from "./logger.utils";
const htmlShrink = require("html-minifier").minify;
const juice = require("juice");
// Inlines styles and minifies the inlined HTML
async function minifyMail(input: string): Promise<string> {
logger.trace("minifyMail: Minifying mail output");
const inlined: string = juice(input);
return htmlShrink(inlined, {
removeComments: true,
collapseWhitespace: true,
});
}
module.exports = { minifyMail };
export { minifyMail };

View File

@ -1,13 +0,0 @@
import { logger } from "./logger.utils";
export function removeNewlineAndPTag(input: string): string {
logger.debug("removeNewlineAndPTag: Cleaning string");
const regex = /[\n\r]|<\/?p[^>]*>/g;
return input.replace(regex, function (match) {
if (match === "\n" || match === "\r") {
return "";
} else {
return "";
}
});
}

View File

@ -1,157 +0,0 @@
export function getPartialEndTiplocMatchPipeline(query: string[]) {
return [
{
$match: {
tiplocs: {
$all: query,
},
},
},
{
$addFields: {
reversedTiplocs: {
$reverseArray: "$tiplocs",
},
query: {
$literal: query,
},
},
},
{
$addFields: {
reversedQuery: {
$reverseArray: "$query",
},
},
},
{
$match: {
$expr: {
$eq: [
{
$slice: [
"$reversedTiplocs",
0,
{
$size: "$reversedQuery",
},
],
},
"$reversedQuery",
],
},
},
},
{
$addFields: {
skipStops: {
$subtract: [
{
$size: "$tiplocs",
},
{
$size: "$reversedQuery",
},
],
},
},
},
{
$sort: {
skipStops: 1,
},
},
{
$limit: 1,
},
{
$project: {
code: 1,
skipStops: 1,
toc: 1,
_id: 0,
},
},
];
}
export function getPartialStartTiplocMatchPipeline(query: string[]) {
return [
{
'$match': {
'tiplocs': {
'$all': query
}
}
}, {
'$addFields': {
'query': query
}
}, {
'$match': {
'$expr': {
'$eq': [
{
'$slice': [
'$tiplocs', {
'$size': '$query'
}
]
}, '$query'
]
}
}
}, {
'$addFields': {
'skipStops': {
'$subtract': [
{
'$size': '$tiplocs'
}, {
'$size': '$query'
}
]
}
}
}, {
'$sort': {
'skipStops': 1
}
}, {
'$limit': 1
}, {
'$project': {
'code': 1,
'skipStops': 1,
'toc': 1,
'_id': 0
}
}
]
}
export function getFullTiplocMatchPipeline(query: string[]) {
return [
{
$match: {
tiplocs: query,
},
},
{
$limit: 1,
},
{
$addFields: {
skipStops: 0,
},
},
{
$project: {
code: 1,
toc: 1,
skipStops: 1,
_id: 0,
},
},
];
}

View File

@ -1,7 +0,0 @@
# Translators
The utilities in the `translators` folder translate the upstream API into the downstream API.
The aim of the translators is to ensure a consistent data format while removing any unused data to keep the response sizes as small as possible.
Translators are kept in separate files so changes can be made in one place. Each translator exports a single function 'transform()'. This function accepts data from the upstream API and uses other functions in the file to build the API response object before returning that object to the caller.

View File

@ -1,220 +0,0 @@
import type {
StaffLdb,
NrccMessage,
TrainServices,
ServiceLocation,
} from "@owlboard/ts-types";
import { tz } from "moment-timezone";
import { removeNewlineAndPTag } from "../../newSanitizer";
import { logger } from "../../logger.utils";
/// I do not yet have a type defined for any of the input object
export function transform(input: any): StaffLdb | null {
console.time("StaffLdb Transformation");
const data = input.GetBoardResult;
let output: StaffLdb;
try {
output = {
generatedAt: transformDateTime(data?.generatedAt) || new Date(),
locationName: data?.locationName || "Not Found",
stationManagerCode: data?.stationManagerCode || "UK",
nrccMessages: transformNrcc(data?.nrccMessages) || undefined,
trainServices: transformTrainServices(data?.trainServices) || undefined,
busServices: transformTrainServices(data?.busServices) || undefined,
ferryServices: transformTrainServices(data?.ferryServices) || undefined,
};
console.timeEnd("StaffLdb Transformation");
if (output.locationName !== "Not Found") {
return output;
}
} catch (err) {
logger.error(err, "utils/translators/ldb/staffLdb.transform");
}
console.timeEnd("StaffLdb Transformation");
return null;
}
function transformDateTime(input: string): Date {
logger.trace("utils/translators/ldb/staffLdb.transformDateTime: Running");
return new Date(input);
}
function transformNrcc(input: any): NrccMessage[] | undefined {
logger.trace("utils/translators/ldb/staffLdb.transformNrcc: Running");
if (input === undefined) {
return input;
}
let output: NrccMessage[] = [];
let messages = input;
if (!Array.isArray(input?.message)) {
messages = [input?.message];
}
if (messages.length) {
for (const item of messages) {
let message: NrccMessage = {
severity: item?.severity,
xhtmlMessage: removeNewlineAndPTag(item?.xhtmlMessage),
};
output.push(message);
}
return output;
}
return undefined;
}
function transformTrainServices(input: any): TrainServices[] {
logger.trace(
"utils/translators/ldb/staffLdb.transformTrainServices: Running"
);
let services: any = input?.service;
let output: TrainServices[] = [];
if (services === undefined) {
return output;
}
if (!Array.isArray(input.service)) {
services = [input.service];
}
for (const service of services) {
const times = parseTimes(service);
const trainService: TrainServices = {
rid: service?.rid,
uid: service?.uid,
trainid: service?.trainid,
operatorCode: service?.operatorCode || "UK",
platform: service?.platform || "-",
platformIsHidden: service?.platformIsHidden,
serviceIsSupressed: checkIsSupressed(service),
origin: transformLocation(service?.origin),
destination: transformLocation(service?.destination),
length: calculateLength(service),
isCancelled: service?.isCancelled,
cancelReason: service?.cancelReason,
delayReason: service?.delayReason,
arrivalType: service?.arrivalType,
departureType: service?.departureType,
sta: times.sta,
eta: times.eta,
ata: times.ata,
std: times.std,
etd: times.etd,
atd: times.atd,
};
Object.keys(trainService).forEach(
(key) => trainService[key] === undefined && delete trainService[key]
);
output.push(trainService);
}
return output;
}
function checkIsSupressed(service: TrainServices): string | undefined {
logger.trace("utils/translators/ldb/staffStation.checkIsSupressed: Running");
if (
service.serviceIsSupressed === "true" ||
service.isPassengerService === "false"
) {
return "true";
} else {
return undefined;
}
}
function transformLocation(input: any): ServiceLocation[] {
logger.trace("utils/translators/ldb/staffStation.transformLocation: Running");
let output: ServiceLocation[] = [];
let locations: any[] = input.location;
if (!Array.isArray(input.location)) {
locations = [input.location];
}
for (const item of locations) {
const location: ServiceLocation = {
tiploc: item?.tiploc,
};
if (item?.via) {
location.via = item.via;
}
output.push(location);
}
return output;
}
export function calculateLength(input: any): number | undefined {
logger.trace("utils/translators/ldb/staffStation.calculateLength: Running");
let length: number;
if (input?.length) {
length = input.length;
return Number(length);
}
if (input?.formation?.coaches?.coach) {
length = input.formation.coaches.coach.length;
return Number(length);
}
return undefined;
}
function transformUnspecifiedDateTime(input: string): Date | undefined {
logger.trace(
"utils/translators/ldb/staffStation.transformUnspecifiedDateTime: Running"
);
if (!input) {
return undefined;
}
const date = tz(input, "Europe/London"); // Want to be creating a moment object using moment.tz(...)
return date.toDate();
}
function parseTimes(service: TrainServices) {
logger.trace("utils/translators/ldb/staffStation.parseTimes: Running");
let { sta, eta, ata, std, etd, atd } = Object.fromEntries(
Object.entries(service).map(([key, value]) => [
key,
transformUnspecifiedDateTime(value),
])
);
let etaResult: Date | undefined | string = eta;
let ataResult: Date | undefined | string = ata;
let etdResult: Date | undefined | string = etd;
let atdResult: Date | undefined | string = atd;
if (sta) {
if (
eta !== undefined &&
Math.abs(eta.getTime() - sta.getTime()) / 60000 <= 1.5
) {
etaResult = "RT";
}
if (
ata !== undefined &&
Math.abs(ata.getTime() - sta.getTime()) / 60000 <= 1.5
) {
ataResult = "RT";
}
}
if (std) {
if (
etd !== undefined &&
Math.abs(etd.getTime() - std.getTime()) / 60000 <= 1.5
) {
etdResult = "RT";
}
if (
atd !== undefined &&
Math.abs(atd.getTime() - std.getTime()) / 60000 <= 1.5
) {
atdResult = "RT";
}
}
return {
sta: sta,
eta: etaResult,
ata: ataResult,
std: std,
etd: etdResult,
atd: atdResult,
};
}

View File

@ -1,99 +0,0 @@
import type {
Service,
OB_TrainTT_service,
OB_Pis_SimpleObject,
OB_TrainTT_stopDetail,
Stop,
} from "@owlboard/ts-types";
export function formatTimetableDetail(
service: Service,
pis: OB_Pis_SimpleObject | null
): OB_TrainTT_service {
const formattedService: OB_TrainTT_service = {
stpIndicator: service.stpIndicator,
operator: service.operator,
trainUid: service.trainUid,
headcode: service.headcode,
powerType: service.powerType,
planSpeed: convertStringToNumber(service.planSpeed),
scheduleStart: service.scheduleStartDate,
scheduleEnd: service.scheduleEndDate,
daysRun: service.daysRun,
stops: formatStops(service.stops),
serviceDetail: service.serviceDetail,
};
if (pis) {
formattedService.pis = pis;
}
return formattedService;
}
function formatStops(stops: Stop[]): OB_TrainTT_stopDetail[] {
if (!stops) {
return []
}
if (!stops.length) {
return []
}
// Cleanly coerce Stop[] to OB_TrainTT_stopDetail[]
const formattedStops: OB_TrainTT_stopDetail[] = [];
for (const stop of stops) {
formattedStops.push(formatStopTimes(stop));
}
return formattedStops;
}
function formatStopTimes(stop: Stop): OB_TrainTT_stopDetail {
// Cleanly converts a single stop to a stopdetail object
let formattedStop: OB_TrainTT_stopDetail = {
tiploc: stop.tiploc,
isPublic: false,
};
if (stop.publicArrival) {
formattedStop.publicArrival = stop.publicArrival;
formattedStop.isPublic = true;
}
if (stop.publicDeparture) {
formattedStop.publicDeparture = stop.publicDeparture;
formattedStop.isPublic = true;
}
if (stop.wttArrival) {
formattedStop.wttArrival = stop.wttArrival;
}
if (stop.wttDeparture) {
formattedStop.wttDeparture = stop.wttDeparture;
}
if (stop.platform) {
formattedStop.platform = stop.platform;
}
if (stop.pass) {
formattedStop.pass = stop.pass;
}
if (stop.arrLine) {
formattedStop.arrLine = stop.arrLine;
}
if (stop.depLine) {
formattedStop.depLine = stop.depLine;
}
return formattedStop;
}
function convertStringToNumber(str: string): number {
const number = parseFloat(str);
if (isNaN(number)) {
return 0;
} else {
return number;
}
}

View File

@ -0,0 +1,45 @@
const clean = require('string-sanitizer-fix');
const log = require('../utils/log.utils');
/*
string.sanitize("a.bc@d efg#h"); // abcdefgh
string.sanitize.keepSpace("a.bc@d efg#h"); // abcd efgh
string.sanitize.keepUnicode("a.bc@d efg#hক"); // abcd efghক
string.sanitize.addFullstop("a.bc@d efg#h"); // abcd.efgh
string.sanitize.addUnderscore("a.bc@d efg#h"); // abcd_efgh
string.sanitize.addDash("a.bc@d efg#h"); // abcd-efgh
string.sanitize.removeNumber("@abcd efgh123"); // abcdefgh
string.sanitize.keepNumber("@abcd efgh123"); // abcdefgh123
string.addFullstop("abcd efgh"); // abcd.efgh
string.addUnderscore("@abcd efgh"); // @abcd_efgh
string.addDash("@abcd efgh"); // @abcd-efgh
string.removeSpace("@abcd efgh"); // @abcdefgh
*/
function cleanApiEndpointTxt(input) {
var output = clean.sanitize.keepSpace(input)
if (output != input){
log.out(`sanitizerUtils.cleanApiEndpoint: WARN: Sanitizing changed string. Input = ${input}`);
}
return output
}
function cleanApiEndpointNum(input) {
var output = clean.sanitize.keepNumber(input)
if (output != input){
log.out(`sanitizerUtils.cleanApiEndpointNum: WARN: Sanitizing changed string. Input = ${input}`);
}
return output
}
function cleanNrcc(input) {
var rmNewline = input.replace(/[\n\r]/g, ""); // Remove newlines
var rmPara = rmNewline.replace(/<\/?p[^>]*>/g, ""); // Remove <p> & </p>
return rmPara;
}
module.exports = {
cleanApiEndpointTxt,
cleanApiEndpointNum,
cleanNrcc
}

View File

@ -1,53 +0,0 @@
import { logger } from "./logger.utils";
function removeNonAlphanumeric(inputString: string) {
logger.debug("removeNonAlphanumeric: Sanitizing string");
return inputString.replace(/[^a-zA-Z0-9]/g, "");
}
function removeNonAlpha(inputString: string) {
logger.debug("removeNonAlpha: Sanitizing string");
return inputString.replace(/[^a-zA-Z]/g, "");
}
function removeNonNumeric(inputString: string) {
logger.debug("removeNonNumeric: Sanitizing string");
return inputString.replace(/[^0-9]/g, "");
}
const cleanApiEndpointTxt = removeNonAlpha;
const cleanApiEndpointNum = removeNonAlphanumeric;
function cleanNrcc(input: string) {
logger.error("DEPRECATED FUNCTION", "cleanNrcc: Converting NRCC Data");
// Remove newlines and then <p> tags from input
const cleanInput = input.replace(/[\n\r]/g, "").replace(/<\/?p[^>]*>/g, "");
return cleanInput;
}
function getDomainFromEmail(mail: string) {
logger.debug("getDomainFromEmail: Obtaining domain from email address");
// Needs testing
let split = mail.split("@");
return split[1].toLowerCase();
}
module.exports = {
cleanApiEndpointTxt,
cleanApiEndpointNum,
removeNonAlpha,
removeNonAlphanumeric,
removeNonNumeric,
cleanNrcc,
getDomainFromEmail,
};
export {
cleanApiEndpointTxt,
cleanApiEndpointNum,
removeNonAlpha,
removeNonAlphanumeric,
removeNonNumeric,
cleanNrcc,
getDomainFromEmail,
};

View File

@ -0,0 +1,15 @@
function unixLocal(unix) {
var jsTime = unix*1000
var dt = new Date(jsTime)
return dt.toLocaleString()
}
function jsUnix(js) {
var preRound = js / 1000
return Math.round(preRound)
}
module.exports = {
unixLocal,
jsUnix,
}

View File

@ -1,20 +0,0 @@
import { logger } from "./logger.utils";
function unixLocal(unix: number): string {
logger.trace(`unixLocal: Converting time: ${unix}`);
var jsTime = unix * 1000;
var dt = new Date(jsTime);
return dt.toLocaleString();
}
function jsUnix(js: number): number {
logger.trace(`jsUnix: Converting time: ${js}`);
return Math.floor(js / 1000);
}
export { jsUnix, unixLocal };
module.exports = {
unixLocal,
jsUnix,
};

View File

@ -1,29 +0,0 @@
export function getFindByHeadcodePipeline(query: any) {
return [
{
$match: query,
},
{
$project: {
operator: 1,
stops: {
$concatArrays: [
[{ $first: "$stops" }],
[{ $arrayElemAt: ["$stops", -1] }],
],
},
trainUid: 1,
stpIndicator: 1,
},
},
];
}
export function getFindByTrainUidPipeline(query: any) {
return [
{
$match: query,
},
{ $project: { _id: 0 } },
];
}

View File

@ -1,3 +0,0 @@
// Do I need to setup the database?
// Possibly not, becuase every write will create the document if it doesn't exist

View File

@ -0,0 +1,27 @@
// Checks that all required environment variables are present.
// Returns True or False and offers an object detailing what is missing.
async function varTest(){
var required = {
OWL_LDB_KEY: process.env.OWL_LDB_KEY,
OWL_LDB_CORPUSUSER: process.env.OWL_LDB_CORPUSUSER,
OWL_LDB_CORPUSPASS: process.env.OWL_LDB_CORPUSPASS,
OWL_NOT_USED: process.env.OWL_NOT_USED
}
var desired = {
OWL_DB_PASS: process.env.OWL_DB_PASS
}
// DO NOT LOG CREDENTIALS!!!
// Test that each of required is NOT undefined.
// var pass = true if all okay, false if not.
// Append any missing values to missing_required = []
// Test that each of desired is NOT undefined.
// Append any missing values to missing_desired = []
// Return : {pass: $pass, missong_required = $missing_required, missing_desired = $missing_desired}
}
module.exports = {
varTest
}

View File

@ -1,9 +0,0 @@
import { removeNewlineAndPTag } from "../../src/utils/newSanitizer";
describe("newSanitizer", () => {
test("Should remove /\n and <p>/</p> elements", () => {
const input = "\n<p>This is a string</p>";
const expectedOutput = "This is a string";
expect(removeNewlineAndPTag(input)).toEqual(expectedOutput);
});
});

View File

@ -1,62 +0,0 @@
import { getDomainFromEmail } from "../../src/utils/sanitizer.utils";
import { removeNonNumeric } from "../../src/utils/sanitizer.utils";
import { removeNonAlpha } from "../../src/utils/sanitizer.utils";
import { removeNonAlphanumeric } from "../../src/utils/sanitizer.utils";
describe("Sanitize Email", () => {
const inputs = [
"this+is+an-_email@example.com",
'"unusual email"@example.com',
"(brackets)addr@example.com",
"I%Have{Special}%Characters@example.com",
"Basic.address@example.com",
`"very.(),:;<>[]\".VERY.\"very\ \"very\".unusual"@example.com`,
"THIS_EMAIL_CONTAINS_CAPITALISED_DOMAIN@EXAMPLE.COM",
];
const expectedOutput = "example.com";
for (const addr of inputs) {
test(`Should return only domain: ${addr}`, () => {
expect(getDomainFromEmail(addr)).toEqual(expectedOutput);
});
}
});
describe("Remove non-numeric", () => {
const inputs = ["abc123", "<%43)($£@:}jfkd4"];
const expectedOutputs = ["123", "434"];
for (const key in inputs) {
const input = inputs[key];
const desired = expectedOutputs[key];
test(`Should return only numbers: ${key}`, () => {
expect(removeNonNumeric(input)).toEqual(desired);
});
}
});
describe("Remove non-Alpha", () => {
const inputs = ["DROP/*comment*/sampletable", "10; DROP TABLE members /*"];
const outputs = ["DROPcommentsampletable", "DROPTABLEmembers"];
for (const key in inputs) {
const input = inputs[key];
const desired = outputs[key];
test(`Should return with only letters: ${key}`, () => {
expect(removeNonAlpha(input)).toEqual(desired);
});
}
});
describe("Remove non-alphanumeric", () => {
const inputs = [
"DROP/*comment*/sampletable",
"10; DROP TABLE members /*",
"1F44",
];
const outputs = ["DROPcommentsampletable", "10DROPTABLEmembers", "1F44"];
for (const key in inputs) {
const input = inputs[key];
const desired = outputs[key];
test(`Should return with only alphanumeric: ${key}`, () => {
expect(removeNonAlphanumeric(input)).toEqual(desired);
});
}
});

View File

@ -1,17 +0,0 @@
import { jsUnix, unixLocal } from "../../src/utils/timeConvert.utils";
describe("Time Conversion", () => {
test("Should return unix time (seconds)", () => {
const now = new Date();
const nowJs = now.getTime();
const nowUnix = Math.floor(now.getTime() / 1000);
expect(jsUnix(nowJs)).toEqual(nowUnix);
});
test("Should return locale date string", () => {
const now = new Date();
const nowUnix = Math.floor(now.getTime() / 1000);
const result = now.toLocaleString();
expect(unixLocal(nowUnix)).toEqual(result);
});
});

View File

@ -1,43 +0,0 @@
import {
transform,
calculateLength,
} from "../../../../src/utils/processors/ldb/staffStation";
import { inputs } from "./stationInputs";
import { outputs } from "./stationOutputs";
import { noLength as serviceNoLength } from "./trainServiceInputs";
import { trainServices } from "./trainServiceInputs";
describe("transform", () => {
test("Should return null for empty input", () => {
const input = {};
expect(transform(input)).toBeNull();
});
for (const testNo in inputs) {
test(`Should correctly transform data ${testNo}`, () => {
const input = inputs[testNo];
const expectedOutput = outputs[testNo];
expect(transform(input)).toEqual(expectedOutput);
});
}
});
// Write test for calculateLength(input: TrainServices): number | undefined
describe("calculateLength", () => {
test("Should return ubdefined for no length", () => {
const input = serviceNoLength;
expect(calculateLength(input)).toBeUndefined();
});
for (const testNo in trainServices) {
test(`Should correctly calculate ${testNo}`, () => {
const input = trainServices[testNo];
const expectedOutput = 4;
expect(calculateLength(input)).toEqual(expectedOutput);
});
}
});

View File

@ -1,103 +0,0 @@
export const inputs: any[] = [
{
GetBoardResult: {
generatedAt: "2023-08-01T20:37:05.559123+01:00",
locationName: "Railway Station",
crs: "RLY",
stationManager: "Network Rail",
stationManagerCode: "RT",
nrccMessages: {
message: {
severity: "minor",
xhtmlMessage: "\n<p>Minor Alert</p>",
type: "station",
},
},
isTruncated: "true",
trainServices: {
service: [
{
rid: "202308017159276",
uid: "G59276",
trainid: "1M83",
sdd: "2023-08-01",
operator: "CrossCountry",
operatorCode: "XC",
sta: "2023-08-01T20:24:00",
ata: "2023-08-01T20:27:22",
arrivalType: "Actual",
std: "2023-08-01T20:35:00",
etd: "2023-08-01T20:35:00",
departureType: "Estimated",
departureSource: "Darwin",
platform: "5",
length: "10",
origin: {
location: {
locationName: "Plymouth",
crs: "PLY",
tiploc: "PLYMTH",
},
},
destination: {
location: {
locationName: "Birmingham New Street",
crs: "BHM",
tiploc: "BHAMNWS",
},
},
category: "XX",
activities: "T",
},
],
},
},
},
{
GetBoardResult: {
generatedAt: "2023-08-01T20:37:05.559123+01:00",
locationName: "Railway Station",
crs: "RLY",
stationManager: "Network Rail",
stationManagerCode: "RT",
isTruncated: "true",
trainServices: {
service: [
{
rid: "202308017159276",
uid: "G59276",
trainid: "1M83",
sdd: "2023-08-01",
operator: "CrossCountry",
operatorCode: "XC",
sta: "2023-08-01T20:24:00",
ata: "2023-08-01T20:27:22",
arrivalType: "Actual",
std: "2023-08-01T20:35:00",
etd: "2023-08-01T20:35:00",
departureType: "Estimated",
departureSource: "Darwin",
platform: "5",
length: "10",
origin: {
location: {
locationName: "Plymouth",
crs: "PLY",
tiploc: "PLYMTH",
},
},
destination: {
location: {
locationName: "Birmingham New Street",
crs: "BHM",
tiploc: "BHAMNWS",
},
},
category: "XX",
activities: "T",
},
],
},
},
},
];

View File

@ -1,81 +0,0 @@
import type {
StaffLdb,
NrccMessage,
TrainServices,
ServiceLocation,
} from "@owlboard/ts-types";
export const outputs: StaffLdb[] = [
{
generatedAt: expect.any(Date),
locationName: "Railway Station",
stationManagerCode: "RT",
nrccMessages: [
{
severity: "minor",
xhtmlMessage: "Minor Alert",
},
],
trainServices: [
{
rid: "202308017159276",
uid: "G59276",
trainid: "1M83",
operatorCode: "XC",
sta: expect.any(Date),
ata: expect.any(Date),
arrivalType: "Actual",
std: expect.any(Date),
etd: "RT",
departureType: "Estimated",
platform: "5",
length: 10,
origin: [
{
tiploc: "PLYMTH",
},
],
destination: [
{
tiploc: "BHAMNWS",
},
],
},
],
busServices: [],
ferryServices: [],
},
{
generatedAt: expect.any(Date),
locationName: "Railway Station",
stationManagerCode: "RT",
trainServices: [
{
rid: "202308017159276",
uid: "G59276",
trainid: "1M83",
operatorCode: "XC",
sta: expect.any(Date),
ata: expect.any(Date),
arrivalType: "Actual",
std: expect.any(Date),
etd: "RT",
departureType: "Estimated",
platform: "5",
length: 10,
origin: [
{
tiploc: "PLYMTH",
},
],
destination: [
{
tiploc: "BHAMNWS",
},
],
},
],
busServices: [],
ferryServices: [],
},
];

View File

@ -1,149 +0,0 @@
import type { TrainServices } from "@owlboard/ts-types";
export const noLength: any = {
rid: "202308058004480",
uid: "P04480",
trainid: "1A39",
sdd: "2023-08-05",
operator: "Great Western Railway",
operatorCode: "GW",
sta: "2023-08-05T21:51:00",
eta: "2023-08-05T23:04:18",
arrivalType: "Forecast",
std: "2023-08-05T22:00:00",
etd: "2023-08-05T23:05:18",
departureType: "Forecast",
departureSource: "Darwin",
platform: "7",
origin: {
location: {
locationName: "Penzance",
crs: "PNZ",
tiploc: "PENZNCE",
},
},
destination: {
location: {
locationName: "London Paddington",
crs: "PAD",
tiploc: "PADTON",
},
},
delayReason: "887",
category: "XX",
activities: "T",
};
export const trainServices: any[] = [
{
rid: "202308058005927",
uid: "P05927",
trainid: "2T53",
sdd: "2023-08-05",
operator: "Great Western Railway",
operatorCode: "GW",
sta: "2023-08-05T19:52:00",
eta: "2023-08-05T19:52:00",
arrivalType: "Forecast",
std: "2023-08-05T19:56:00",
etd: "2023-08-05T19:56:00",
departureType: "Forecast",
departureSource: "Darwin",
platform: "2",
formation: {
coaches: {
coach: [
{ coachClass: "Standard" },
{ coachClass: "Standard" },
{ coachClass: "Standard" },
{ coachClass: "Standard" },
],
},
},
origin: {
location: {
locationName: "Worcester Foregate Street",
crs: "WOF",
tiploc: "WORCSFS",
},
},
destination: {
location: {
locationName: "Bristol Temple Meads",
crs: "BRI",
tiploc: "BRSTLTM",
via: "via Gloucester",
},
},
category: "OO",
activities: "T",
},
{
rid: "202308057126314",
uid: "G26314",
trainid: "2V88",
sdd: "2023-08-05",
operator: "West Midlands Trains",
operatorCode: "LM",
sta: "2023-08-05T18:28:00",
eta: "2023-08-05T18:28:00",
arrivalType: "Forecast",
std: "2023-08-05T18:33:00",
etd: "2023-08-05T18:33:00",
departureType: "Forecast",
departureSource: "Darwin",
platform: "2",
formation: {
coaches: {
coach: [
{ coachClass: "Standard" },
{ coachClass: "Standard", toilet: "Accessible" },
{ coachClass: "Standard" },
{ coachClass: "Standard", toilet: "Accessible" },
],
},
},
origin: {
location: { locationName: "Dorridge", crs: "DDG", tiploc: "DORIDGE" },
},
destination: {
location: {
locationName: "Worcester Foregate Street",
crs: "WOF",
tiploc: "WORCSFS",
},
},
category: "OO",
activities: "T RM",
length: "4",
},
{
rid: "202308057126318",
uid: "G26318",
trainid: "2V96",
sdd: "2023-08-05",
operator: "West Midlands Trains",
operatorCode: "LM",
sta: "2023-08-05T19:28:00",
eta: "2023-08-05T19:28:00",
arrivalType: "Forecast",
std: "2023-08-05T19:33:00",
etd: "2023-08-05T19:33:00",
departureType: "Forecast",
departureSource: "Darwin",
platform: "2",
origin: {
location: { locationName: "Dorridge", crs: "DDG", tiploc: "DORIDGE" },
},
destination: {
location: {
locationName: "Worcester Foregate Street",
crs: "WOF",
tiploc: "WORCSFS",
},
},
category: "OO",
activities: "T RM",
length: "4",
},
];

View File

@ -1,110 +0,0 @@
{
"compilerOptions": {
/* Visit https://aka.ms/tsconfig to read more about this file */
/* Projects */
// "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */
// "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
// "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */
// "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */
// "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
/* Language and Environment */
"target": "es2016" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */,
// "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
// "jsx": "preserve", /* Specify what JSX code is generated. */
// "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */
// "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
// "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
// "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
// "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
// "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
// "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
// "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
// "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
/* Modules */
"module": "Node16" /* Specify what module code is generated. */,
// "rootDir": "./", /* Specify the root folder within your source files. */
"moduleResolution": "node16" /* Specify how TypeScript looks up a file from a given module specifier. */,
// "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
// "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
// "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
// "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */
// "types": [], /* Specify type package names to be included without being referenced in a source file. */
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
// "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
// "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */
// "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */
// "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */
// "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */
// "resolveJsonModule": true, /* Enable importing .json files. */
// "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */
// "noResolve": true, /* Disallow 'import's, 'require's or '<reference>'s from expanding the number of files TypeScript should add to a project. */
/* JavaScript Support */
"allowJs": true /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */,
"checkJs": false /* Enable error reporting in type-checked JavaScript files. */,
// "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
/* Emit */
// "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
// "declarationMap": true, /* Create sourcemaps for d.ts files. */
// "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
"sourceMap": true /* Create source map files for emitted JavaScript files. */,
// "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
// "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
"outDir": "./dist" /* Specify an output folder for all emitted files. */,
"removeComments": true /* Disable emitting comments. */,
// "noEmit": true, /* Disable emitting files from a compilation. */
// "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
// "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */
// "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
// "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
// "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
// "newLine": "crlf", /* Set the newline character for emitting files. */
// "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
// "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
// "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
// "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
// "declarationDir": "./", /* Specify the output directory for generated declaration files. */
// "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
/* Interop Constraints */
// "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
// "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */
// "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
"esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */,
// "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
"forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */,
/* Type Checking */
"strict": true /* Enable all strict type-checking options. */,
// "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
// "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */
// "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
// "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
// "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
// "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
// "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
// "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
// "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
// "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
// "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
// "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
// "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
// "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
// "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
// "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */
// "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
// "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
/* Completeness */
// "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
"skipLibCheck": true /* Skip type checking all .d.ts files. */
},
"include": ["src", "test", "./*", "./config"]
}