Merge remote-tracking branch 'origin/master' into accounts
This commit is contained in:
commit
e20d4ece14
|
@ -156,7 +156,8 @@ services:
|
||||||
ipv4_address: 10.10.10.60
|
ipv4_address: 10.10.10.60
|
||||||
environment:
|
environment:
|
||||||
- HOSTNAME=0.0.0.0
|
- HOSTNAME=0.0.0.0
|
||||||
- PORTAL_URL=nginx
|
- PORTAL_URL=http://nginx
|
||||||
|
- STATE_DIR=/usr/app/state
|
||||||
expose:
|
expose:
|
||||||
- 3100
|
- 3100
|
||||||
depends_on:
|
depends_on:
|
||||||
|
|
|
@ -10,6 +10,6 @@
|
||||||
"punycode": "^2.1.1"
|
"punycode": "^2.1.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"prettier": "^2.0.5"
|
"prettier": "^2.2.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -7,6 +7,9 @@ RUN yarn --no-lockfile
|
||||||
COPY src src
|
COPY src src
|
||||||
COPY cli cli
|
COPY cli cli
|
||||||
|
|
||||||
|
RUN echo '*/5 * * * * /usr/app/cli/run critical' >> /etc/crontabs/root
|
||||||
|
RUN echo '0 * * * * /usr/app/cli/run verbose' >> /etc/crontabs/root
|
||||||
|
|
||||||
EXPOSE 3100
|
EXPOSE 3100
|
||||||
ENV NODE_ENV production
|
ENV NODE_ENV production
|
||||||
CMD [ "node", "--max-http-header-size=64000", "src/index.js" ]
|
CMD [ "sh", "-c", "crond ; node --max-http-header-size=64000 src/index.js" ]
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
process.env.NODE_ENV = process.env.NODE_ENV || "production";
|
||||||
|
|
||||||
|
require("../src/run.js");
|
|
@ -5,17 +5,17 @@
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"body-parser": "^1.19.0",
|
"body-parser": "^1.19.0",
|
||||||
"cron": "^1.8.2",
|
|
||||||
"deep-object-diff": "^1.1.0",
|
"deep-object-diff": "^1.1.0",
|
||||||
"express": "^4.17.1",
|
"express": "^4.17.1",
|
||||||
"http-status-codes": "^2.1.2",
|
"http-status-codes": "^2.1.2",
|
||||||
"lodash": "^4.17.20",
|
"lodash": "^4.17.20",
|
||||||
"lowdb": "^1.0.0",
|
"lowdb": "^1.0.0",
|
||||||
"ms": "^2.1.2",
|
"object-hash": "^2.1.1",
|
||||||
"object-hash": "^2.0.3",
|
"superagent": "^6.0.0",
|
||||||
"superagent": "^6.0.0"
|
"tmp": "^0.2.1",
|
||||||
|
"yargs": "^16.2.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"prettier": "^2.0.5"
|
"prettier": "^2.2.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,15 +1,22 @@
|
||||||
|
const fs = require("fs");
|
||||||
const superagent = require("superagent");
|
const superagent = require("superagent");
|
||||||
|
const tmp = require('tmp');
|
||||||
const { StatusCodes } = require("http-status-codes");
|
const { StatusCodes } = require("http-status-codes");
|
||||||
const { calculateElapsedTime, getResponseContent } = require("../utils");
|
const { calculateElapsedTime, getResponseContent } = require("../utils");
|
||||||
|
|
||||||
// uploadCheck returns the result of uploading a sample file
|
// uploadCheck returns the result of uploading a sample file
|
||||||
async function uploadCheck(done) {
|
async function uploadCheck(done) {
|
||||||
const time = process.hrtime();
|
const time = process.hrtime();
|
||||||
|
const file = tmp.fileSync();
|
||||||
|
|
||||||
|
fs.writeSync(file.fd, Buffer.from(new Date())); // write current date to temp file
|
||||||
|
|
||||||
superagent
|
superagent
|
||||||
.post(`http://${process.env.PORTAL_URL}/skynet/skyfile`)
|
.post(`${process.env.PORTAL_URL}/skynet/skyfile`)
|
||||||
.attach("file", "package.json", "package.json")
|
.attach("file", file.name, file.name)
|
||||||
.end((error, response) => {
|
.end((error, response) => {
|
||||||
|
file.removeCallback();
|
||||||
|
|
||||||
const statusCode = (response && response.statusCode) || (error && error.statusCode) || null;
|
const statusCode = (response && response.statusCode) || (error && error.statusCode) || null;
|
||||||
|
|
||||||
done({
|
done({
|
||||||
|
@ -29,7 +36,7 @@ async function downloadCheck(done) {
|
||||||
let statusCode, errorResponseContent;
|
let statusCode, errorResponseContent;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await superagent.get(`http://${process.env.PORTAL_URL}/${skylink}?nocache=true`);
|
const response = await superagent.get(`${process.env.PORTAL_URL}/${skylink}?nocache=true`);
|
||||||
|
|
||||||
statusCode = response.statusCode;
|
statusCode = response.statusCode;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
@ -46,4 +53,4 @@ async function downloadCheck(done) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports.criticalChecks = [uploadCheck, downloadCheck];
|
module.exports = [uploadCheck, downloadCheck];
|
||||||
|
|
|
@ -1037,7 +1037,7 @@ function skylinkVerification(done, { name, skylink, bodyHash, headers }) {
|
||||||
const time = process.hrtime();
|
const time = process.hrtime();
|
||||||
|
|
||||||
// Create the query for the skylink
|
// Create the query for the skylink
|
||||||
const query = `http://${process.env.PORTAL_URL}/${skylink}?nocache=true`;
|
const query = `${process.env.PORTAL_URL}/${skylink}?nocache=true`;
|
||||||
|
|
||||||
// Get the Skylink
|
// Get the Skylink
|
||||||
superagent
|
superagent
|
||||||
|
@ -1087,7 +1087,7 @@ function skylinkVerification(done, { name, skylink, bodyHash, headers }) {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports.verboseChecks = [
|
module.exports = [
|
||||||
audioExampleCheck,
|
audioExampleCheck,
|
||||||
covid19PaperCheck,
|
covid19PaperCheck,
|
||||||
covid19CoroNopePaperCheck,
|
covid19CoroNopePaperCheck,
|
||||||
|
|
|
@ -1,11 +1,10 @@
|
||||||
const fs = require("fs");
|
const fs = require("fs");
|
||||||
const low = require("lowdb");
|
const low = require("lowdb");
|
||||||
const FileSync = require("lowdb/adapters/FileSync");
|
const FileSync = require("lowdb/adapters/FileSync");
|
||||||
const Memory = require("lowdb/adapters/Memory");
|
|
||||||
|
|
||||||
if (!fs.existsSync("state")) fs.mkdirSync("state");
|
if (!fs.existsSync(process.env.STATE_DIR)) fs.mkdirSync(process.env.STATE_DIR);
|
||||||
|
|
||||||
const adapter = new FileSync("state/state.json");
|
const adapter = new FileSync(`${process.env.STATE_DIR}/state.json`);
|
||||||
const db = low(adapter);
|
const db = low(adapter);
|
||||||
|
|
||||||
db.defaults({ disabled: false, critical: [], verbose: [] }).write();
|
db.defaults({ disabled: false, critical: [], verbose: [] }).write();
|
||||||
|
|
|
@ -8,8 +8,6 @@ const express = require("express");
|
||||||
const bodyparser = require("body-parser");
|
const bodyparser = require("body-parser");
|
||||||
const db = require("./db");
|
const db = require("./db");
|
||||||
|
|
||||||
require("./schedule");
|
|
||||||
|
|
||||||
const host = process.env.HOSTNAME || "0.0.0.0";
|
const host = process.env.HOSTNAME || "0.0.0.0";
|
||||||
const port = Number(process.env.PORT) || 3100;
|
const port = Number(process.env.PORT) || 3100;
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
require('yargs/yargs')(process.argv.slice(2))
|
||||||
|
.command('$0 <type>', 'Skynet portal health checks', (yargs) => {
|
||||||
|
yargs.positional('type', {
|
||||||
|
describe: 'Type of checks to run',
|
||||||
|
type: 'string',
|
||||||
|
choices: ["critical", "verbose"]
|
||||||
|
})
|
||||||
|
.option("portal-url", {
|
||||||
|
describe: "Skynet portal url",
|
||||||
|
default: process.env.PORTAL_URL || "https://siasky.net",
|
||||||
|
type: "string",
|
||||||
|
})
|
||||||
|
.option("state-dir", {
|
||||||
|
describe: "State directory",
|
||||||
|
default: process.env.STATE_DIR || "state",
|
||||||
|
type: "string",
|
||||||
|
})
|
||||||
|
}, async ({ type, portalUrl, stateDir }) => {
|
||||||
|
process.env.PORTAL_URL = portalUrl;
|
||||||
|
process.env.STATE_DIR = stateDir;
|
||||||
|
|
||||||
|
const db = require("../src/db");
|
||||||
|
const checks = require(`../src/checks/${type}`);
|
||||||
|
|
||||||
|
const entry = {
|
||||||
|
date: new Date().toISOString(),
|
||||||
|
checks: await Promise.all(checks.map((check) => new Promise(check))),
|
||||||
|
};
|
||||||
|
|
||||||
|
// read before writing to make sure no external changes are overwritten
|
||||||
|
db.read().get(type).push(entry).write();
|
||||||
|
})
|
||||||
|
.argv
|
|
@ -1,45 +0,0 @@
|
||||||
const { CronJob } = require("cron");
|
|
||||||
const ms = require("ms");
|
|
||||||
const db = require("./db");
|
|
||||||
const { criticalChecks } = require("./checks/critical");
|
|
||||||
const { verboseChecks } = require("./checks/verbose");
|
|
||||||
|
|
||||||
// use this timezone to run all cron instances at the same time regardless of the server location
|
|
||||||
const timezone = "UTC";
|
|
||||||
|
|
||||||
// critical health checks job definition
|
|
||||||
const criticalJobSchedule = "*/5 * * * *"; // on every 5 minute mark
|
|
||||||
const criticalJobOnTick = async () => {
|
|
||||||
const entry = {
|
|
||||||
date: new Date().toISOString(),
|
|
||||||
checks: await Promise.all(criticalChecks.map((check) => new Promise(check))),
|
|
||||||
};
|
|
||||||
|
|
||||||
// read before writing to make sure no external changes are overwritten
|
|
||||||
db.read().get("critical").push(entry).write();
|
|
||||||
};
|
|
||||||
const criticalJob = new CronJob(criticalJobSchedule, criticalJobOnTick, null, false, timezone);
|
|
||||||
|
|
||||||
// verbose health checks job definition
|
|
||||||
const verboseJobSchedule = "0 * * * *"; // on every full hour mark
|
|
||||||
const verboseJobOnTick = async () => {
|
|
||||||
const entry = {
|
|
||||||
date: new Date().toISOString(),
|
|
||||||
checks: await Promise.all(verboseChecks.map((check) => new Promise(check))),
|
|
||||||
};
|
|
||||||
|
|
||||||
// read before writing to make sure no external changes are overwritten
|
|
||||||
db.read().get("verbose").push(entry).write();
|
|
||||||
};
|
|
||||||
const verboseJob = new CronJob(verboseJobSchedule, verboseJobOnTick, null, false, timezone);
|
|
||||||
|
|
||||||
// fire all health checks on startup (with delay for other services to boot)
|
|
||||||
setTimeout(() => {
|
|
||||||
// fire first run manually
|
|
||||||
criticalJob.fireOnTick();
|
|
||||||
verboseJob.fireOnTick();
|
|
||||||
|
|
||||||
// start cron schedule
|
|
||||||
criticalJob.start();
|
|
||||||
verboseJob.start();
|
|
||||||
}, ms("1 minute"));
|
|
Reference in New Issue