From 6fb2b29bcb66c63c1181e0e7a687ef7d271c1ef4 Mon Sep 17 00:00:00 2001 From: siiky Date: Mon, 10 Mar 2025 14:16:42 +0000 Subject: [PATCH 01/21] feat: start re-working stress testing --- lib/middlewares/populateDeviceId.js | 4 + tests/stress/child.js | 87 ------- tests/stress/cli.js | 59 +++++ tests/stress/consts.js | 10 + tests/stress/db.js | 73 ++++++ tests/stress/env.js | 93 ++++++++ tests/stress/index.js | 82 +++++-- tests/stress/load-tx-dummy-data.js | 47 ---- tests/stress/machines.js | 92 ++++++++ tests/stress/queries-performance-analyzer.js | 231 ------------------- tests/stress/scripts/create-machines.sh | 19 +- tests/stress/scripts/index.js | 32 --- tests/stress/server.js | 59 +++++ tests/stress/test-server.js | 7 - tests/stress/utils/index.js | 5 - tests/stress/utils/init-cert.js | 12 - tests/stress/utils/save-config.js | 3 - tools/lamassu-server-stress-testing | 2 + 18 files changed, 454 insertions(+), 463 deletions(-) delete mode 100644 tests/stress/child.js create mode 100644 tests/stress/cli.js create mode 100644 tests/stress/consts.js create mode 100644 tests/stress/db.js create mode 100644 tests/stress/env.js delete mode 100644 tests/stress/load-tx-dummy-data.js create mode 100644 tests/stress/machines.js delete mode 100644 tests/stress/queries-performance-analyzer.js delete mode 100644 tests/stress/scripts/index.js create mode 100644 tests/stress/server.js delete mode 100644 tests/stress/test-server.js delete mode 100644 tests/stress/utils/index.js delete mode 100644 tests/stress/utils/init-cert.js delete mode 100644 tests/stress/utils/save-config.js create mode 100755 tools/lamassu-server-stress-testing diff --git a/lib/middlewares/populateDeviceId.js b/lib/middlewares/populateDeviceId.js index dd2fd9e2..e406578d 100644 --- a/lib/middlewares/populateDeviceId.js +++ b/lib/middlewares/populateDeviceId.js @@ -3,6 +3,8 @@ const crypto = require('crypto') const logger = require('../logger') +const IS_STRESS_TESTING = process.env.LAMASSU_STRESS_TESTING === "YES" + function sha256 (buf) { if (!buf) return null const hash = crypto.createHash('sha256') @@ -14,6 +16,8 @@ function sha256 (buf) { const populateDeviceId = function (req, res, next) { const deviceId = _.isFunction(req.connection.getPeerCertificate) ? sha256(req.connection.getPeerCertificate()?.raw) + : IS_STRESS_TESTING + ? 'placeholder' /* TODO: req... ? */ : null if (!deviceId) return res.status(500).json({ error: 'Unable to find certificate' }) diff --git a/tests/stress/child.js b/tests/stress/child.js deleted file mode 100644 index c6a7d9a1..00000000 --- a/tests/stress/child.js +++ /dev/null @@ -1,87 +0,0 @@ -const https = require('https') -const path = require('path') -const pify = require('pify') -const fs = pify(require('fs')) -const uuid = require('uuid') -const _ = require('lodash/fp') -const { PerformanceObserver, performance } = require('perf_hooks') - -const utils = require('./utils') -const variables = require('./utils/variables') - -var certificate = {} -var connectionInfo = {} - -const getCert = machineIndex => { - const key = fs.readFile(path.resolve(__dirname, 'machines', `${machineIndex}`, 'client.key')) - const cert = fs.readFile(path.resolve(__dirname, 'machines', `${machineIndex}`, 'client.pem')) - - return Promise.all([key, cert]).then(([key, cert]) => { - return { key, cert } - }).catch(err => { - console.error('The following error when reading the certificate: ', err) - return null - }) -} - -const getConnectionInfo = machineIndex => { - return fs.readFile(path.resolve(__dirname, 'machines', `${machineIndex}`, 'connection_info.json')) -} - -let counter = 0 -const requestTimes = [] -let latestResponseTime = 0 - -const request = (machineIndex, pid) => { - performance.mark('A') - https.get({ - hostname: 'localhost', - port: 3000, - path: '/poll?state=chooseCoin&model=unknown&version=7.5.0-beta.0&idle=true&pid=' + pid + '&sn=' + counter, - method: 'GET', - key: certificate.key, - cert: certificate.cert, - ca: connectionInfo.ca, - headers: { - date: new Date().toISOString(), - 'request-id': uuid.v4() - } - }, res => { - res.on('data', (d) => { - performance.mark('B') - performance.measure('A to B', 'A', 'B') - console.log(`Machine ${machineIndex} || Avg request response time: ${_.mean(requestTimes).toFixed(3)} || Latest response time: ${latestResponseTime.toFixed(3)}`) - process.send({ message: Buffer.from(d).toString() }) - }) - }) - - counter++ -} - -const obs = new PerformanceObserver((items) => { - latestResponseTime = items.getEntries()[0].duration - requestTimes.push(latestResponseTime) - performance.clearMarks() -}) -obs.observe({ entryTypes: ['measure'] }) - -process.on('message', async (msg) => { - console.log('Message from parent:', msg) - - const promises = [getCert(msg.machineIndex), getConnectionInfo(msg.machineIndex)] - Promise.all(promises).then(values => { - certificate = values[0] - connectionInfo = JSON.parse(values[1]) - }).catch(err => { - console.error('The following error occurred during certificate parsing: ', err) - }) - - if (msg.hasVariance) await new Promise(resolve => setTimeout(resolve, utils.randomIntFromInterval(1, variables.POLLING_INTERVAL))) - const pid = uuid.v4() - request(msg.machineIndex, pid) - - setInterval(() => { - const pid = uuid.v4() - request(msg.machineIndex, pid) - }, 5000) -}) diff --git a/tests/stress/cli.js b/tests/stress/cli.js new file mode 100644 index 00000000..10ce1c14 --- /dev/null +++ b/tests/stress/cli.js @@ -0,0 +1,59 @@ +const trimStart = (s, c) => { + let idx = 0 + while (idx < s.length && s[idx] === c) + idx++ + return idx > 0 ? s.substring(idx) : s +} + +const optkey = (opt) => trimStart(opt, '-') + +const parse = (default_options, option_arities) => (args) => { + const positional = [] + const parsed_options = {} + + for (let i = 0; i < args.length;) { + const arg = args[i] + i++ + + const arity = option_arities[arg] + if (typeof(arity) === 'number') { + if (arity+i > args.length) + return [`${arg}: not enough arguments.`, parsed_options, positional] + + const opt = optkey(arg) + switch (arity) { + case 0: parsed_options[opt] = true; break + case 1: parsed_options[opt] = args[i]; break + default: parsed_options[opt] = args.slice(i, i+arity); break + } + i += arity + } else { + positional.push(arg) + } + } + + const options = Object.assign({}, default_options, parsed_options) + return [null, options, positional] +} + +const help = ({ grammar, usage }) => () => { + if (usage) console.log(usage) + grammar.forEach( + ([optargs, optdesc, def]) => { + const deftext = def ? ` (default: ${def})` : "" + console.log(`\t${optargs.join(' ')}\t${optdesc}${deftext}`) + } + ) +} + +const CLI = ({ grammar, usage }) => { + const details = grammar.map(([[opt, ...optargs], optdesc, def]) => [opt, optargs.length, def]) + const option_arities = Object.fromEntries(details.map(([opt, arity, _def]) => [opt, arity])) + const default_options = Object.fromEntries(details.map(([opt, _arity, def]) => [optkey(opt), def])) + return { + parse: parse(default_options, option_arities), + help: help({ grammar, usage }), + } +} + +module.exports = CLI diff --git a/tests/stress/consts.js b/tests/stress/consts.js new file mode 100644 index 00000000..6ccb8eb8 --- /dev/null +++ b/tests/stress/consts.js @@ -0,0 +1,10 @@ +const EXIT = { + OK: 0, + EXCEPTION: 1, + UNKNOWN: 2, + BADARGS: 3, +} + +module.exports = { + EXIT, +} diff --git a/tests/stress/db.js b/tests/stress/db.js new file mode 100644 index 00000000..77aaff0b --- /dev/null +++ b/tests/stress/db.js @@ -0,0 +1,73 @@ +const cp = require('node:child_process') +const path = require('node:path') + +require('../../lib/environment-helper') +const db = require('../../lib/db') + +const { EXIT } = require('./consts') +const CLI = require('./cli') + +const help_message = "Setup the DB according to the previously defined environment." + +const cli = CLI({ + grammar: [ + [["--help"], "Show this help message"], + ], +}) + +const help = (exit_code) => { + console.log("Usage: lamassu-server-stress-testing db ARGS...") + console.log(help_message) + cli.help() + return exit_code +} + +const migrate = async () => { + const lamassu_migrate_path = path.join(__dirname, "../../bin/lamassu-migrate") + const { stdout, stderr, status, signal, error } = cp.spawnSync(lamassu_migrate_path, [], { + cwd: process.cwd(), + encoding: 'utf8', + }) + + if (typeof(status) !== 'number' || typeof(signal) === 'string' || error) { + console.error("stdout:", stdout) + console.error("stderr:", stderr) + console.error("status:", status) + console.error("signal:", signal) + console.error("error:", error) + return EXIT.EXCEPTION + } else { + return EXIT.OK + } +} + +const configure = async () => { + const config = '{"config":{"triggersConfig_expirationTime":"Forever","triggersConfig_automation":"Automatic","locale_timezone":"Pacific/Honolulu","cashIn_cashboxReset":"Manual","notifications_email_security":true,"notifications_sms_security":true,"notifications_notificationCenter_security":true,"wallets_advanced_feeMultiplier":"1","wallets_advanced_cryptoUnits":"full","wallets_advanced_allowTransactionBatching":false,"wallets_advanced_id":"c5e3e61e-71b2-4200-9851-0142db7e6797","triggersConfig_customerAuthentication":"SMS","commissions_cashOutFixedFee":1,"machineScreens_rates_active":true,"wallets_BTC_zeroConfLimit":0,"wallets_BTC_coin":"BTC","wallets_BTC_wallet":"mock-wallet","wallets_BTC_ticker":"mock-ticker","wallets_BTC_exchange":"mock-exchange","wallets_BTC_zeroConf":"none","locale_id":"5f18e5ae-4a5d-45b2-8184-a6d69f4cc237","locale_country":"US","locale_fiatCurrency":"USD","locale_languages":["en-US","ja-JP","de-DE"],"locale_cryptoCurrencies":["BTC"],"commissions_minimumTx":2,"commissions_fixedFee":3,"commissions_cashOut":4,"commissions_cashIn":5,"commissions_id":"06d11aaa-34e5-45ab-956b-9728ccfd9330"}}' + await db.none("INSERT INTO user_config (type, data, created, valid, schema_version) VALUES ('config', $1, now(), 't', 2)", [config]) + return EXIT.OK +} + +const run = async (args) => { + const [err, options, positional] = cli.parse(args) + if (err) { + console.error(err) + return help(EXIT.BADARGS) + } + + if (options.help) + return help(EXIT.OK) + + const funcs = [migrate, configure] + for (let func of funcs) { + const exit_code = await func() + if (exit_code !== EXIT.OK) + return exit_code + } + + return EXIT.OK +} + +module.exports = { + help_message, + run, +} diff --git a/tests/stress/env.js b/tests/stress/env.js new file mode 100644 index 00000000..0d06ba3c --- /dev/null +++ b/tests/stress/env.js @@ -0,0 +1,93 @@ +const fs = require('node:fs') +const os = require('node:os') +const path = require('node:path') + +const dotenv = require('dotenv') + +const { EXIT } = require('./consts') +const CLI = require('./cli') + +const help_message = "Produce an .env file appropriate for stress testing." + +const cli = CLI({ + grammar: [ + [["--help"], "Show this help message"], + [["--inenv", "ENV"], "Environment file path to read", ".env"], + [["--outenv", "ENV"], "Environment file path to write", ".stress.env"], + [["--dbuser", "DBUSER"], "Database username", "postgres"], + [["--dbpass", "DBPASS"], "Database password", "postgres123"], + [["--dbhost", "DBHOST"], "Database hostname", "localhost"], + [["--dbport", "DBPORT"], "Database port", "5432"], + [["--dbname", "DBNAME"], "Database name", "lamassu_stress"], + ], +}) + +const help = (exit_code) => { + console.log("Usage: lamassu-server-stress-testing env ARGS...") + console.log(help_message) + cli.help() + return exit_code +} + +const env_read = (path) => { + const envstr = fs.readFileSync(path, { encoding: 'utf8' }) + return dotenv.parse(envstr) + //const entries = envstr + // .split(os.EOL) + // .flatMap((line) => { + // line = line.trimStart() + // const i = line.indexOf('=') + // + // if (line.startsWith('#') || i <= 0) + // return [] + // + // const varname = line.substring(0, i) + // const value = line.substring(i + 1) + // return [[varname, value]] + // }) + //return Object.fromEntries(entries) +} + +const env_write = (envvars, path) => { + const envcontent = Object.entries(envvars) + .map(([varname, value]) => [varname, value].join('=')) + .join(os.EOL) + os.EOL + fs.writeFileSync(path, envcontent) +} + +const run = async (args) => { + const [err, options, positional] = cli.parse(args) + if (err) { + console.error(err) + return help(EXIT.BADARGS) + } + + if (options.help) + return help(EXIT.OK) + + if (positional.length > 0) { + console.error("Unknown arguments:", positional) + return help(EXIT.BADARGS) + } + + const inenvpath = path.resolve(process.cwd(), options.inenv ?? ".env") + const inenvvars = env_read(inenvpath) + + const outenvpath = path.resolve(process.cwd(), options.outenv ?? ".stress.env") + const outenvvars = { + ...inenvvars, + POSTGRES_USER: options.dbuser ?? "postgres", + POSTGRES_PASSWORD: options.dbpass ?? "postgres123", + POSTGRES_HOST: options.dbhost ?? "localhost", + POSTGRES_PORT: options.dbport ?? "5432", + POSTGRES_DB: options.dbname ?? "lamassu_stress", + } + env_write(outenvvars, outenvpath) + + return EXIT.OK +} + +module.exports = { + help_message, + run, +} diff --git a/tests/stress/index.js b/tests/stress/index.js index afe6de90..2d6acefc 100644 --- a/tests/stress/index.js +++ b/tests/stress/index.js @@ -1,35 +1,69 @@ - -const { fork } = require('child_process') const minimist = require('minimist') -const cmd = require('./scripts') -const variables = require('./utils/variables') +const { EXIT } = require('./consts') -function createMachines (numberOfMachines) { - return cmd.execCommand( - `bash ./scripts/create-machines.sh ${numberOfMachines} ${variables.SERVER_CERT_PATH} ${variables.MACHINE_PATH}` +const SUBCMDS = { + env: require('./env'), + db: require('./db'), + machines: require('./machines'), + server: require('./server'), +} + +const README = ` +This program will help you set the lamassu-server up for stress testing. This +short introduction is meant only as a quickstart guide; the subcommands may +support more options beyond those shown here. Use the --help flag for details +on each subcommand. + +First of all, you need to create a suitable .env file. With the following +commands, .env.bak will be used as a starting point, and the result will be +saved in .env. This is to avoid losing the real configurations. + +$ cp .env .env.bak +$ lamassu-server-stress-testing env --inenv .env.bak --outenv .env + +The database chosen in the command above (by default lamassu_stress) +must be initialized, and must have a bare-bones configuration. The following +command does that: + +$ lamassu-server-stress-testing db + +You also need to create fake machines that will be used later in the actual +stress tests (including certificates, and pairing each to the server). The +following command creates 10 fake machines, and saves their data in +path/to/stress/data/. path/to/real/machine/code/ is the path to the root of the +machine's code. + +$ lamassu-server-stress-testing machines -n 10 --fake_data_dir path/to/stress/data/ --machine path/to/real/machine/code/ +`; + +const help = (exit_code) => { + console.log("Usage: lamassu-server-stress-testing SUBCMD ARGS...",) + console.log("Where SUBCMD is one of the following:") + Object.entries(SUBCMDS).forEach( + ([subcmd, { help_message }]) => { + console.log(`\t${subcmd}\t${help_message ?? ''}`) + } ) + + console.log(README) + + return exit_code } -function startServer () { - const forked = fork('test-server.js') - forked.send('start') -} +const main = async (args) => { + try { + const subcmd = SUBCMDS[args[0]] -async function run (args = minimist(process.argv.slice(2))) { - const NUMBER_OF_MACHINES = args._[0] - const HAS_VARIANCE = args.v || false + const exit_code = (args.length === 0) ? help(EXIT.OK) : + (!subcmd) ? help(EXIT.BADARGS) : + await subcmd.run(args.slice(1)) - await createMachines(NUMBER_OF_MACHINES) - startServer() - - for (let i = 1; i <= NUMBER_OF_MACHINES; i++) { - const forked = fork('child.js') - forked.send({ machineIndex: i, hasVariance: HAS_VARIANCE }) - forked.on('message', msg => { - console.log(`Machine ${i} || ${msg}`) - }) + process.exit(exit_code ?? EXIT.UNKNOWN) + } catch (err) { + console.error(err) + process.exit(EXIT.EXCEPTION) } } -run() +module.exports = main diff --git a/tests/stress/load-tx-dummy-data.js b/tests/stress/load-tx-dummy-data.js deleted file mode 100644 index 69021b2c..00000000 --- a/tests/stress/load-tx-dummy-data.js +++ /dev/null @@ -1,47 +0,0 @@ -const db = require('../../lib/db') - -const loadDummyTxData = () => { - const sql = ` - CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; - - INSERT INTO customers - VALUES ('99ac9999-9999-99e9-9999-9f99a9999999', null, null, null, null, null, null, - 'load_test_customers', null, null, null, null, null, null, '2021-04-16 10:51:38', - 'automatic', null, 'automatic', null, 'automatic', null, 'automatic', null, 'automatic', - null, 'automatic', null, null, null, null, null, null, 'automatic', null, null, - null, null, null, null, null, null, null, null, null, null) - ON CONFLICT DO NOTHING; - - INSERT INTO cash_in_txs - SELECT uuid_generate_v4(), md5(random()::text), md5(random()::text), i::integer, 'BTC', - i::integer, 'EUR', null, null, null, null, now() - random() * INTERVAL '2 days', random() > 0.5, - random() > 0.5, random() > 0.5, now() - random() * INTERVAL '2 days', null, random() > 0.5, - random() > 0.5, i::integer, i::integer, 1, '99ac9999-9999-99e9-9999-9f99a9999999', - 6, random() > 0.5, random() * (0.9-0.1) + 0.1::int, i::integer, random() > 0.5, null, null, false, - null, null, null - FROM generate_series(1, 5000000) as t(i); - - INSERT INTO cash_out_txs - SELECT uuid_generate_v4(), md5(random()::text), md5(random()::text), i::integer, 'BTC', - i::integer, 'EUR', 'confirmed', random() > 0.5, random() > 0.5, random() > 0.5, - null, null, now() - random() * INTERVAL '2 days', now() - random() * INTERVAL '2 days', null, - random() > 0.5, random() > 0.5, random() > 0.5, 0, 1, 20, 50, null, '99ac9999-9999-99e9-9999-9f99a9999999', - random() * (40-1) + 1::int, now() - random() * INTERVAL '2 days', random() > 0.5, null, - random() * (0.9-0.1) + 0.1::int, i::integer, i::integer, null, null, null, null, null, null, null, null - FROM generate_series(1, 5000000) as t(i); - - INSERT INTO logs - SELECT uuid_generate_v4(), md5(random()::text), 'info', now() - random() * INTERVAL '2 days', - 'message', now() - random() * INTERVAL '2 days',0 - FROM generate_series(1, 5000000) as t(i); - - INSERT INTO bills - SELECT uuid_generate_v4(), i::integer, 'USD', '3d92c323-58c6-4172-9f30-91b80f0c653c', - i::integer, '2021-04-16 11:51:38', 'BTC', i::integer - FROM generate_series(1, 5000000) as t(i); - - ` - db.none(sql) -} - -loadDummyTxData() diff --git a/tests/stress/machines.js b/tests/stress/machines.js new file mode 100644 index 00000000..11d5e334 --- /dev/null +++ b/tests/stress/machines.js @@ -0,0 +1,92 @@ +const cp = require('node:child_process') +const fs = require('node:fs') +const path = require('node:path') + +const { EXIT } = require('./consts') +const CLI = require('./cli') + +const help_message = "Setup fake machines to be used as stress test clients." + +const cli = CLI({ + grammar: [ + [["--help"], "Show this help message"], + [["--machine", "PATH"], "Path to the machine's source code root"], + [["--fake_data_dir", "PATH"], "Where to save the fake machines' data"], + [["-n", "NUMBER"], "Number of fake machines to create"], + ], +}) + +const help = (exit_code) => { + console.log("Usage: lamassu-server-stress-testing machines ARGS...") + console.log(help_message) + cli.help() + return exit_code +} + +const create_fake_machine = async (gencerts_path, fake_data_dir, i) => + new Promise((resolve, reject) => { + const machine_data_dir = path.join(fake_data_dir, i.toString()) + fs.mkdirSync(machine_data_dir, { recursive: true, mode: 0o750 }) + + console.log("Creating fake machine number", i) + const gc = cp.fork(gencerts_path, [machine_data_dir], { + cwd: process.cwd(), + encoding: 'utf8', + }) + + gc.on('error', (error) => { + console.log(error) + resolve(EXIT.EXCEPTION) + }) + + gc.on('exit', (code, signal) => { + console.error("lamassu-server code:", code) + console.error("lamassu-server signal:", signal) + resolve(typeof(code) === 'number' ? code : EXIT.EXCEPTION) + }) + }) + +const create_fake_machines = async ({ machine, fake_data_dir, n }) => { + n = parseInt(n) + if (Number.isNaN(n) || n <= 0) { + console.error("Expected n to be a positive number, got", n) + return help(EXIT.BADARGS) + } + + /* TODO: Remove all data of previous machines? */ + //fs.rmSync(fake_data_dir, { recursive: true, force: true }) + + /* Create the root data directory */ + fs.mkdirSync(fake_data_dir, { recursive: true, mode: 0o750 }) + + const gencerts_path = path.join(machine, "tools", "generate-certificates") + let exit_code = EXIT.OK + for (let i = 0; i < n && exit_code === EXIT.OK; i++) + exit_code = await create_fake_machine(gencerts_path, fake_data_dir, i) + + return exit_code +} + +const run = async (args) => { + const [err, options, positional] = cli.parse(args) + if (err) { + console.error(err) + return help(EXIT.BADARGS) + } + + if (options.help) + return help(EXIT.OK) + + const missing_options = ["n", "machine", "fake_data_dir"].filter((opt) => !options[opt]) + if (missing_options.length > 0) { + console.error("The following options are required:", missing_options.join(", ")) + return help(EXIT.BADARGS) + } + + return await create_fake_machines(options) +} + +module.exports = { + help_message, + run, +} diff --git a/tests/stress/queries-performance-analyzer.js b/tests/stress/queries-performance-analyzer.js deleted file mode 100644 index d235ff62..00000000 --- a/tests/stress/queries-performance-analyzer.js +++ /dev/null @@ -1,231 +0,0 @@ -const db = require('../../lib/db') -const Pgp = require('pg-promise')() -const _ = require('lodash/fp') -const cashInTx = require('../../lib/cash-in/cash-in-tx') -const { CASH_OUT_TRANSACTION_STATES, REDEEMABLE_AGE } = require('../../lib/cash-out/cash-out-helper') - -const TX_PASSTHROUGH_ERROR_CODES = ['operatorCancel', 'scoreThresholdReached'] - -function filterTransaction () { - const sql = `EXPLAIN ANALYZE - SELECT DISTINCT * FROM ( - SELECT 'type' AS type, 'Cash In' AS value UNION - SELECT 'type' AS type, 'Cash Out' AS value UNION - SELECT 'machine' AS type, name AS value FROM devices d INNER JOIN cash_in_txs t ON d.device_id = t.device_id UNION - SELECT 'machine' AS type, name AS value FROM devices d INNER JOIN cash_out_txs t ON d.device_id = t.device_id UNION - SELECT 'customer' AS type, concat(id_card_data::json->>'firstName', ' ', id_card_data::json->>'lastName') AS value - FROM customers c INNER JOIN cash_in_txs t ON c.id = t.customer_id - WHERE c.id_card_data::json->>'firstName' IS NOT NULL or c.id_card_data::json->>'lastName' IS NOT NULL UNION - SELECT 'customer' AS type, concat(id_card_data::json->>'firstName', ' ', id_card_data::json->>'lastName') AS value - FROM customers c INNER JOIN cash_out_txs t ON c.id = t.customer_id - WHERE c.id_card_data::json->>'firstName' IS NOT NULL or c.id_card_data::json->>'lastName' IS NOT NULL UNION - SELECT 'fiat' AS type, fiat_code AS value FROM cash_in_txs UNION - SELECT 'fiat' AS type, fiat_code AS value FROM cash_out_txs UNION - SELECT 'crypto' AS type, crypto_code AS value FROM cash_in_txs UNION - SELECT 'crypto' AS type, crypto_code AS value FROM cash_out_txs UNION - SELECT 'address' AS type, to_address AS value FROM cash_in_txs UNION - SELECT 'address' AS type, to_address AS value FROM cash_out_txs UNION - SELECT 'status' AS type, ${cashInTx.TRANSACTION_STATES} AS value FROM cash_in_txs UNION - SELECT 'status' AS type, ${CASH_OUT_TRANSACTION_STATES} AS value FROM cash_out_txs - ) f` - return db.any(sql) -} - -function filterCustomer () { - const sql = `EXPLAIN ANALYZE - SELECT DISTINCT * FROM ( - SELECT 'phone' AS type, phone AS value FROM customers WHERE phone IS NOT NULL UNION - SELECT 'name' AS type, id_card_data::json->>'firstName' AS value FROM customers WHERE id_card_data::json->>'firstName' IS NOT NULL AND id_card_data::json->>'lastName' IS NULL UNION - SELECT 'name' AS type, id_card_data::json->>'lastName' AS value FROM customers WHERE id_card_data::json->>'firstName' IS NULL AND id_card_data::json->>'lastName' IS NOT NULL UNION - SELECT 'name' AS type, concat(id_card_data::json->>'firstName', ' ', id_card_data::json->>'lastName') AS value FROM customers WHERE id_card_data::json->>'firstName' IS NOT NULL AND id_card_data::json->>'lastName' IS NOT NULL UNION - SELECT 'address' as type, id_card_data::json->>'address' AS value FROM customers WHERE id_card_data::json->>'address' IS NOT NULL UNION - SELECT 'id' AS type, id_card_data::json->>'documentNumber' AS value FROM customers WHERE id_card_data::json->>'documentNumber' IS NOT NULL - ) f` - return db.any(sql) -} - -function getCustomerById (id) { - const passableErrorCodes = _.map(Pgp.as.text, TX_PASSTHROUGH_ERROR_CODES).join(',') - - const sql = `EXPLAIN ANALYZE - select id, authorized_override, days_suspended, is_suspended, front_camera_path, front_camera_override, - phone, sms_override, id_card_data, id_card_data_override, id_card_data_expiration, - id_card_photo_path, id_card_photo_override, us_ssn, us_ssn_override, sanctions, sanctions_at, - sanctions_override, total_txs, total_spent, created as last_active, fiat as last_tx_fiat, - fiat_code as last_tx_fiat_code, tx_class as last_tx_class, subscriber_info - from ( - select c.id, c.authorized_override, - greatest(0, date_part('day', c.suspended_until - now())) as days_suspended, - c.suspended_until > now() as is_suspended, - c.front_camera_path, c.front_camera_override, - c.phone, c.sms_override, c.id_card_data, c.id_card_data_override, c.id_card_data_expiration, - c.id_card_photo_path, c.id_card_photo_override, c.us_ssn, c.us_ssn_override, c.sanctions, - c.sanctions_at, c.sanctions_override, c.subscriber_info, t.tx_class, t.fiat, t.fiat_code, t.created, - row_number() over (partition by c.id order by t.created desc) as rn, - sum(case when t.id is not null then 1 else 0 end) over (partition by c.id) as total_txs, - sum(case when error_code is null or error_code not in ($1^) then t.fiat else 0 end) over (partition by c.id) as total_spent - from customers c left outer join ( - select 'cashIn' as tx_class, id, fiat, fiat_code, created, customer_id, error_code - from cash_in_txs where send_confirmed = true union - select 'cashOut' as tx_class, id, fiat, fiat_code, created, customer_id, error_code - from cash_out_txs where confirmed_at is not null) t on c.id = t.customer_id - where c.id = $2 - ) as cl where rn = 1` - return db.any(sql, [passableErrorCodes, id]) -} - -function simpleGetMachineLogs (deviceId, from = new Date(0).toISOString(), until = new Date().toISOString(), limit = null, offset = 0) { - const sql = `EXPLAIN ANALYZE - select id, log_level, timestamp, message from logs - where device_id=$1 - and timestamp >= $2 - and timestamp <= $3 - order by timestamp desc, serial desc - limit $4 - offset $5` - return db.any(sql, [ deviceId, from, until, limit, offset ]) -} - -function batchCashIn ( - from = new Date(0).toISOString(), - until = new Date().toISOString(), - limit = null, - offset = 0, - id = null, - txClass = null, - machineName = null, - customerName = null, - fiatCode = null, - cryptoCode = null, - toAddress = null, - status = null, - simplified = false -) { - const cashInSql = `EXPLAIN ANALYZE - SELECT 'cashIn' AS tx_class, txs.*, - c.phone AS customer_phone, - c.id_card_data_number AS customer_id_card_data_number, - c.id_card_data_expiration AS customer_id_card_data_expiration, - c.id_card_data AS customer_id_card_data, - concat(c.id_card_data::json->>'firstName', ' ', c.id_card_data::json->>'lastName') AS customer_name, - c.front_camera_path AS customer_front_camera_path, - c.id_card_photo_path AS customer_id_card_photo_path, - ((NOT txs.send_confirmed) AND (txs.created <= now() - interval $1)) AS expired - FROM (SELECT *, ${cashInTx.TRANSACTION_STATES} AS txStatus FROM cash_in_txs) AS txs - LEFT OUTER JOIN customers c ON txs.customer_id = c.id - INNER JOIN devices d ON txs.device_id = d.device_id - WHERE txs.created >= $2 AND txs.created <= $3 ${ - id !== null ? `AND txs.device_id = $6` : `` -} - AND ($7 is null or $7 = 'Cash In') - AND ($8 is null or d.name = $8) - AND ($9 is null or concat(c.id_card_data::json->>'firstName', ' ', c.id_card_data::json->>'lastName') = $9) - AND ($10 is null or txs.fiat_code = $10) - AND ($11 is null or txs.crypto_code = $11) - AND ($12 is null or txs.to_address = $12) - AND ($13 is null or txs.txStatus = $13) - AND (fiat > 0) - ORDER BY created DESC limit $4 offset $5` - - return db.any(cashInSql, [cashInTx.PENDING_INTERVAL, from, until, limit, offset, id, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status]) -} - -function batchCashOut ( - from = new Date(0).toISOString(), - until = new Date().toISOString(), - limit = null, - offset = 0, - id = null, - txClass = null, - machineName = null, - customerName = null, - fiatCode = null, - cryptoCode = null, - toAddress = null, - status = null, - simplified = false -) { - const cashOutSql = `EXPLAIN ANALYZE - SELECT 'cashOut' AS tx_class, - txs.*, - actions.tx_hash, - c.phone AS customer_phone, - c.id_card_data_number AS customer_id_card_data_number, - c.id_card_data_expiration AS customer_id_card_data_expiration, - c.id_card_data AS customer_id_card_data, - concat(c.id_card_data::json->>'firstName', ' ', c.id_card_data::json->>'lastName') AS customer_name, - c.front_camera_path AS customer_front_camera_path, - c.id_card_photo_path AS customer_id_card_photo_path, - (extract(epoch FROM (now() - greatest(txs.created, txs.confirmed_at))) * 1000) >= $1 AS expired - FROM (SELECT *, ${CASH_OUT_TRANSACTION_STATES} AS txStatus FROM cash_out_txs) txs - INNER JOIN cash_out_actions actions ON txs.id = actions.tx_id - AND actions.action = 'provisionAddress' - LEFT OUTER JOIN customers c ON txs.customer_id = c.id - INNER JOIN devices d ON txs.device_id = d.device_id - WHERE txs.created >= $2 AND txs.created <= $3 ${ - id !== null ? `AND txs.device_id = $6` : `` -} - AND ($7 is null or $7 = 'Cash Out') - AND ($8 is null or d.name = $8) - AND ($9 is null or concat(c.id_card_data::json->>'firstName', ' ', c.id_card_data::json->>'lastName') = $9) - AND ($10 is null or txs.fiat_code = $10) - AND ($11 is null or txs.crypto_code = $11) - AND ($12 is null or txs.to_address = $12) - AND ($13 is null or txs.txStatus = $13) - AND (fiat > 0) - ORDER BY created DESC limit $4 offset $5` - - return db.any(cashOutSql, [REDEEMABLE_AGE, from, until, limit, offset, id, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status]) -} - -function getTx (txId, txClass) { - const cashInSql = `EXPLAIN ANALYZE - select 'cashIn' as tx_class, txs.*, - ((not txs.send_confirmed) and (txs.created <= now() - interval $1)) as expired - from cash_in_txs as txs - where txs.id=$2` - - const cashOutSql = `EXPLAIN ANALYZE - select 'cashOut' as tx_class, - txs.*, - (extract(epoch from (now() - greatest(txs.created, txs.confirmed_at))) * 1000) >= $2 as expired - from cash_out_txs txs - where txs.id=$1` - - return txClass === 'cashIn' - ? db.any(cashInSql, [cashInTx.PENDING_INTERVAL, txId]) - : db.any(cashOutSql, [txId, REDEEMABLE_AGE]) -} - -function getTxAssociatedData (txId, txClass) { - const billsSql = `EXPLAIN ANALYZE select 'bills' as bills, b.* from bills b where cash_in_txs_id = $1` - const actionsSql = `EXPLAIN ANALYZE select 'cash_out_actions' as cash_out_actions, actions.* from cash_out_actions actions where tx_id = $1` - - return txClass === 'cashIn' - ? db.any(billsSql, [txId]) - : db.any(actionsSql, [txId]) -} - -const run = () => { - const deviceId = '7526924341dc4a57f02b6411a85923de' // randomly generated by the load script - const customerId = '99ac9999-9999-99e9-9999-9f99a9999999' // hardcoded on the current load script - const cashOutTxId = 'c402a7ae-b8f7-4781-8080-1e9ab76d62b5' // randomly generated by the load script - const cashInTxId = '4d8d89f4-7d77-4d30-87e8-be9de05deea7' // randomly generated by the load script - - const getExecutionTime = _.compose(_.get('QUERY PLAN'), _.last) - Promise.all([filterCustomer(), filterTransaction(), getCustomerById(customerId), simpleGetMachineLogs(deviceId), batchCashIn(), batchCashOut(), - getTx(cashInTxId, 'cashIn'), getTx(cashOutTxId, 'cashOut'), getTxAssociatedData(cashInTxId, 'cashIn'), getTxAssociatedData(cashOutTxId, 'cashOut')]) - .then(([filterCustomer, filterTransaction, getCustomerById, logs, batchCashIn, batchCashOut, getTxCashOut, getTxCashIn, - getTxAssociatedDataCashIn, getTxAssociatedDataCashOut]) => { - console.log(`filterCustomer => ${getExecutionTime(filterCustomer)}`) - console.log(`filterTransaction => ${getExecutionTime(filterTransaction)}`) - console.log(`getCustomerById => ${getExecutionTime(getCustomerById)}`) - console.log(`batchCashOut + batchCashIn => ${getExecutionTime(batchCashOut) + ' + ' + getExecutionTime(batchCashIn)} `) - console.log(`getTx (cash-out) => ${getExecutionTime(getTxCashOut)}`) - console.log(`getTx (cash-in) => ${getExecutionTime(getTxCashIn)}`) - console.log(`getTxAssociatedData (cash-in) => ${getExecutionTime(getTxAssociatedDataCashIn)}`) - console.log(`getTxAssociatedDataCashOut (cash-out) => ${getExecutionTime(getTxAssociatedDataCashOut)}`) - }) -} - -run() diff --git a/tests/stress/scripts/create-machines.sh b/tests/stress/scripts/create-machines.sh index 9c7ac003..9c997e3d 100644 --- a/tests/stress/scripts/create-machines.sh +++ b/tests/stress/scripts/create-machines.sh @@ -6,19 +6,12 @@ if [ $# -eq 0 ] echo "usage: ./build-machines [number_of_machines] /path/to/server/cert/lamassu_op_root_ca.pem /path/to/machine/" && exit 1 fi -case $1 in - ''|*[!0-9]*) echo "usage: ./build-machines [number_of_machines] /path/to/server/cert/lamassu_op_root_ca.pem /path/to/machine/" && exit 1;; -esac - SERVER_CERT=$(perl -pe 's/\n/\\n/' < $2) if [ -z "$SERVER_CERT" ] then echo "Lamassu-op-root-ca.pem is empty" && exit 1 fi -# Remove old folders -rm -rf ./machines/* - # Create stress database sudo -u postgres psql postgres -c "drop database if exists lamassu_stress" sudo -u postgres psql postgres -c "create database lamassu_stress with template lamassu" @@ -33,16 +26,12 @@ do cp "$3"/data/client.sample.pem ./machines/$NUMBER/ cp "$3"/data/client.sample.key ./machines/$NUMBER/ - - cat > ./machines/$NUMBER/connection_info.json << EOL - {"host":"localhost","ca":"$SERVER_CERT"} -EOL - - echo 'Generating certs...' - node ./utils/init-cert.js $NUMBER + cat > ./machines/$NUMBER/connection_info.json << EOF +{"host":"localhost","ca":"$SERVER_CERT"} +EOF # Get device_id - DEVICE_ID=`openssl x509 -outform der -in ./machines/$NUMBER/client.pem | sha256sum | cut -d " " -f 1` + DEVICE_ID=`openssl x509 -outform der -in ./machines/$NUMBER/client.pem | sha256sum | cut -d ' ' -f 1` # Update db config NEW_CONFIG=$(node ./utils/save-config.js $NUMBER $DEVICE_ID) diff --git a/tests/stress/scripts/index.js b/tests/stress/scripts/index.js deleted file mode 100644 index 991b2d19..00000000 --- a/tests/stress/scripts/index.js +++ /dev/null @@ -1,32 +0,0 @@ -const exec = require('child_process').exec - -/** - * Execute simple shell command (async wrapper). - * @param {String} cmd - * @return {Object} { stdout: String, stderr: String } - */ -function execCommand (cmd) { - return new Promise(function (resolve, reject) { - const proc = exec(cmd, (err, stdout, stderr) => { - if (err) { - reject(err) - } else { - resolve({ stdout, stderr }) - } - }) - - proc.stdout.on('data', data => { - console.log(data) - }) - - proc.stderr.on('data', data => { - console.log(data) - }) - - proc.on('exit', code => { - console.log('child process exited with code ' + code.toString()) - }) - }) -} - -module.exports = { execCommand } diff --git a/tests/stress/server.js b/tests/stress/server.js new file mode 100644 index 00000000..ae066a6a --- /dev/null +++ b/tests/stress/server.js @@ -0,0 +1,59 @@ +const cp = require('node:child_process') +const path = require('node:path') + +const { EXIT } = require('./consts') +const CLI = require('./cli') + +const help_message = "Start the server configured for stress testing." + +const cli = CLI({ + grammar: [ + [["--help"], "Show this help message"], + ], +}) + +const help = (exit_code) => { + console.log("Usage: lamassu-server-stress-testing server ARGS...") + console.log(help_message) + cli.help() + return exit_code +} + +const start_server = (args) => + new Promise((resolve, reject) => { + const lamassu_server = path.join(__dirname, "../../bin/lamassu-server") + const ls = cp.fork(lamassu_server, args, { + cwd: process.cwd(), + encoding: 'utf8', + env: { LAMASSU_STRESS_TESTING: "YES" }, + }) + + ls.on('error', (error) => { + console.log(error) + resolve(EXIT.EXCEPTION) + }) + + ls.on('exit', (code, signal) => { + console.error("lamassu-server code:", code) + console.error("lamassu-server signal:", signal) + resolve(typeof(code) === 'number' ? code : EXIT.EXCEPTION) + }) + }) + +const run = async (args) => { + const [err, options, positional] = cli.parse(args) + if (err) { + console.error(err) + return help(EXIT.BADARGS) + } + + if (options.help) + return help(EXIT.OK) + + return await start_server(positional) +} + +module.exports = { + help_message, + run, +} diff --git a/tests/stress/test-server.js b/tests/stress/test-server.js deleted file mode 100644 index 51569f7e..00000000 --- a/tests/stress/test-server.js +++ /dev/null @@ -1,7 +0,0 @@ -const cmd = require('./scripts') - -process.on('message', async (msg) => { - console.log('Message from parent:', msg) - - await cmd.execCommand(`node --prof LAMASSU_DB=STRESS_TEST ../../bin/lamassu-server`) -}) diff --git a/tests/stress/utils/index.js b/tests/stress/utils/index.js deleted file mode 100644 index e2445705..00000000 --- a/tests/stress/utils/index.js +++ /dev/null @@ -1,5 +0,0 @@ -function randomIntFromInterval (min, max) { - return Math.floor(Math.random() * (max - min + 1) + min) -} - -module.exports = { randomIntFromInterval } diff --git a/tests/stress/utils/init-cert.js b/tests/stress/utils/init-cert.js deleted file mode 100644 index cbfef975..00000000 --- a/tests/stress/utils/init-cert.js +++ /dev/null @@ -1,12 +0,0 @@ -const path = require('path') -const variables = require('./variables') -const { init } = require(`../${variables.MACHINE_PATH}/lib/pairing`) - -const number = process.argv[2] - -const certPath = { - cert: path.resolve(process.cwd(), 'machines', number, 'client.pem'), - key: path.resolve(process.cwd(), 'machines', number, 'client.key') -} - -init(certPath) diff --git a/tests/stress/utils/save-config.js b/tests/stress/utils/save-config.js deleted file mode 100644 index 9a39db0a..00000000 --- a/tests/stress/utils/save-config.js +++ /dev/null @@ -1,3 +0,0 @@ -const config = require('./default-config.json') - -console.log(JSON.stringify(config)) diff --git a/tools/lamassu-server-stress-testing b/tools/lamassu-server-stress-testing new file mode 100755 index 00000000..d58bfddc --- /dev/null +++ b/tools/lamassu-server-stress-testing @@ -0,0 +1,2 @@ +#!/usr/bin/env node +require('../tests/stress')(process.argv.slice(2)) From 0a120203be2667b53b190d8106fc68a6ab3c7784 Mon Sep 17 00:00:00 2001 From: siiky Date: Mon, 17 Mar 2025 16:57:30 +0000 Subject: [PATCH 02/21] feat: in stress testing use device ID sent in headers --- lib/middlewares/populateDeviceId.js | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/lib/middlewares/populateDeviceId.js b/lib/middlewares/populateDeviceId.js index e406578d..e9de4d88 100644 --- a/lib/middlewares/populateDeviceId.js +++ b/lib/middlewares/populateDeviceId.js @@ -14,12 +14,13 @@ function sha256 (buf) { } const populateDeviceId = function (req, res, next) { - const deviceId = _.isFunction(req.connection.getPeerCertificate) + let deviceId = _.isFunction(req.connection.getPeerCertificate) ? sha256(req.connection.getPeerCertificate()?.raw) - : IS_STRESS_TESTING - ? 'placeholder' /* TODO: req... ? */ : null + if (!deviceId && IS_STRESS_TESTING) + deviceId = req.headers.device_id + if (!deviceId) return res.status(500).json({ error: 'Unable to find certificate' }) req.deviceId = deviceId req.deviceTime = req.get('date') From 99dd90d51aeac92a1274772589f70317a9f5a366 Mon Sep 17 00:00:00 2001 From: siiky Date: Mon, 17 Mar 2025 23:33:38 +0000 Subject: [PATCH 03/21] feat: insert stress testing machines into DB --- tests/stress/db.js | 8 +++--- tests/stress/env.js | 18 ++++++------ tests/stress/index.js | 21 +++++++++----- tests/stress/machines.js | 60 ++++++++++++++++++++++++++++------------ tests/stress/server.js | 8 +++--- 5 files changed, 74 insertions(+), 41 deletions(-) diff --git a/tests/stress/db.js b/tests/stress/db.js index 77aaff0b..34f5a79a 100644 --- a/tests/stress/db.js +++ b/tests/stress/db.js @@ -1,5 +1,5 @@ -const cp = require('node:child_process') -const path = require('node:path') +const { spawnSync } = require('node:child_process') +const { join } = require('node:path') require('../../lib/environment-helper') const db = require('../../lib/db') @@ -23,8 +23,8 @@ const help = (exit_code) => { } const migrate = async () => { - const lamassu_migrate_path = path.join(__dirname, "../../bin/lamassu-migrate") - const { stdout, stderr, status, signal, error } = cp.spawnSync(lamassu_migrate_path, [], { + const lamassu_migrate_path = join(__dirname, "../../bin/lamassu-migrate") + const { stdout, stderr, status, signal, error } = spawnSync(lamassu_migrate_path, [], { cwd: process.cwd(), encoding: 'utf8', }) diff --git a/tests/stress/env.js b/tests/stress/env.js index 0d06ba3c..597a8dee 100644 --- a/tests/stress/env.js +++ b/tests/stress/env.js @@ -1,6 +1,6 @@ -const fs = require('node:fs') -const os = require('node:os') -const path = require('node:path') +const { readFileSync, writeFileSync } = require('node:fs') +const { EOL } = require('node:os') +const { resolve } = require('node:path') const dotenv = require('dotenv') @@ -30,10 +30,10 @@ const help = (exit_code) => { } const env_read = (path) => { - const envstr = fs.readFileSync(path, { encoding: 'utf8' }) + const envstr = readFileSync(path, { encoding: 'utf8' }) return dotenv.parse(envstr) //const entries = envstr - // .split(os.EOL) + // .split(EOL) // .flatMap((line) => { // line = line.trimStart() // const i = line.indexOf('=') @@ -51,8 +51,8 @@ const env_read = (path) => { const env_write = (envvars, path) => { const envcontent = Object.entries(envvars) .map(([varname, value]) => [varname, value].join('=')) - .join(os.EOL) + os.EOL - fs.writeFileSync(path, envcontent) + .join(EOL) + EOL + writeFileSync(path, envcontent) } const run = async (args) => { @@ -70,10 +70,10 @@ const run = async (args) => { return help(EXIT.BADARGS) } - const inenvpath = path.resolve(process.cwd(), options.inenv ?? ".env") + const inenvpath = resolve(process.cwd(), options.inenv ?? ".env") const inenvvars = env_read(inenvpath) - const outenvpath = path.resolve(process.cwd(), options.outenv ?? ".stress.env") + const outenvpath = resolve(process.cwd(), options.outenv ?? ".stress.env") const outenvvars = { ...inenvvars, POSTGRES_USER: options.dbuser ?? "postgres", diff --git a/tests/stress/index.js b/tests/stress/index.js index 2d6acefc..df8c6f83 100644 --- a/tests/stress/index.js +++ b/tests/stress/index.js @@ -17,14 +17,14 @@ on each subcommand. First of all, you need to create a suitable .env file. With the following commands, .env.bak will be used as a starting point, and the result will be -saved in .env. This is to avoid losing the real configurations. +saved in .env. This helps to avoid losing the real configurations. $ cp .env .env.bak $ lamassu-server-stress-testing env --inenv .env.bak --outenv .env -The database chosen in the command above (by default lamassu_stress) -must be initialized, and must have a bare-bones configuration. The following -command does that: +The database chosen in the command above (by default 'lamassu_stress') must be +initialized, and must have a bare-bones configuration. The following command +takes care of it: $ lamassu-server-stress-testing db @@ -34,15 +34,22 @@ following command creates 10 fake machines, and saves their data in path/to/stress/data/. path/to/real/machine/code/ is the path to the root of the machine's code. -$ lamassu-server-stress-testing machines -n 10 --fake_data_dir path/to/stress/data/ --machine path/to/real/machine/code/ +$ lamassu-server-stress-testing machines -n 10 --fake_data_dir path/to/stress/data/ --machine path/to/real/machine/code/ --replace_existing + +Finally, use the following to start the lamassu-server in stress-testing mode: + +$ lamassu-server-stress-testing server `; +const rightpad = (s, w, c) => s + c.repeat(Math.max(w-s.length, 0)) + const help = (exit_code) => { - console.log("Usage: lamassu-server-stress-testing SUBCMD ARGS...",) + console.log("Usage: lamassu-server-stress-testing SUBCMD ARGS...") console.log("Where SUBCMD is one of the following:") + const max_subcmd_length = Math.max(...Object.keys(SUBCMDS).map(subcmd => subcmd.length)) Object.entries(SUBCMDS).forEach( ([subcmd, { help_message }]) => { - console.log(`\t${subcmd}\t${help_message ?? ''}`) + console.log(`\t${rightpad(subcmd, max_subcmd_length, ' ')}\t${help_message ?? ''}`) } ) diff --git a/tests/stress/machines.js b/tests/stress/machines.js index 11d5e334..f116c50a 100644 --- a/tests/stress/machines.js +++ b/tests/stress/machines.js @@ -1,11 +1,16 @@ -const cp = require('node:child_process') -const fs = require('node:fs') -const path = require('node:path') +const { fork } = require('node:child_process') +const { createHash } = require('node:crypto') +const { mkdirSync, rmSync } = require('node:fs') +const { readFile } = require('node:fs/promises') +const { join } = require('node:path') + +require('../../lib/environment-helper') +const db = require('../../lib/db') const { EXIT } = require('./consts') const CLI = require('./cli') -const help_message = "Setup fake machines to be used as stress test clients." +const help_message = "Create and insert fake machines into the DB." const cli = CLI({ grammar: [ @@ -13,6 +18,7 @@ const cli = CLI({ [["--machine", "PATH"], "Path to the machine's source code root"], [["--fake_data_dir", "PATH"], "Where to save the fake machines' data"], [["-n", "NUMBER"], "Number of fake machines to create"], + [["--replace_existing"], "Remove machines of previous runs"], ], }) @@ -23,13 +29,19 @@ const help = (exit_code) => { return exit_code } -const create_fake_machine = async (gencerts_path, fake_data_dir, i) => +const compute_machine_id = pem_path => ( + readFile(pem_path, { encoding: 'utf8' }) + .then(cert => cert.split('\r\n')) + .then(cert => Buffer.from(cert.slice(1, cert.length-2).join(''), 'base64')) + .then(raw => createHash('sha256').update(raw).digest('hex')) +) + +const create_fake_machine = async (gencerts_path, machine_data_dir, i) => ( new Promise((resolve, reject) => { - const machine_data_dir = path.join(fake_data_dir, i.toString()) - fs.mkdirSync(machine_data_dir, { recursive: true, mode: 0o750 }) + mkdirSync(machine_data_dir, { recursive: true, mode: 0o750 }) console.log("Creating fake machine number", i) - const gc = cp.fork(gencerts_path, [machine_data_dir], { + const gc = fork(gencerts_path, [machine_data_dir], { cwd: process.cwd(), encoding: 'utf8', }) @@ -45,26 +57,40 @@ const create_fake_machine = async (gencerts_path, fake_data_dir, i) => resolve(typeof(code) === 'number' ? code : EXIT.EXCEPTION) }) }) +) -const create_fake_machines = async ({ machine, fake_data_dir, n }) => { +const create_fake_machines = async ({ machine, fake_data_dir, n, replace_existing }) => { n = parseInt(n) if (Number.isNaN(n) || n <= 0) { console.error("Expected n to be a positive number, got", n) return help(EXIT.BADARGS) } - /* TODO: Remove all data of previous machines? */ - //fs.rmSync(fake_data_dir, { recursive: true, force: true }) + if (replace_existing) { + rmSync(fake_data_dir, { recursive: true, force: true }) + await db.none("DELETE FROM devices") + } /* Create the root data directory */ - fs.mkdirSync(fake_data_dir, { recursive: true, mode: 0o750 }) + mkdirSync(fake_data_dir, { recursive: true, mode: 0o750 }) - const gencerts_path = path.join(machine, "tools", "generate-certificates") - let exit_code = EXIT.OK - for (let i = 0; i < n && exit_code === EXIT.OK; i++) - exit_code = await create_fake_machine(gencerts_path, fake_data_dir, i) + const gencerts_path = join(machine, "tools", "generate-certificates") + for (let i = 0; i < n; i++) { + const machine_data_dir = join(fake_data_dir, i.toString()) + const exit_code = await create_fake_machine(gencerts_path, machine_data_dir, i) + if (exit_code !== EXIT.OK) + return exit_code - return exit_code + const device_id = await compute_machine_id(join(machine_data_dir, "client.pem")) + + await db.none( + `INSERT INTO devices (device_id, cassette1, cassette2, paired, display, created, name, last_online, location) + VALUES ($1, 0, 0, 't', 't', now(), $2, now(), '{}'::json)`, + [device_id, `machine_${i}`] + ) + } + + return EXIT.OK } const run = async (args) => { diff --git a/tests/stress/server.js b/tests/stress/server.js index ae066a6a..7374195c 100644 --- a/tests/stress/server.js +++ b/tests/stress/server.js @@ -1,5 +1,5 @@ -const cp = require('node:child_process') -const path = require('node:path') +const { fork } = require('node:child_process') +const { join } = require('node:path') const { EXIT } = require('./consts') const CLI = require('./cli') @@ -21,8 +21,8 @@ const help = (exit_code) => { const start_server = (args) => new Promise((resolve, reject) => { - const lamassu_server = path.join(__dirname, "../../bin/lamassu-server") - const ls = cp.fork(lamassu_server, args, { + const lamassu_server = join(__dirname, "../../bin/lamassu-server") + const ls = fork(lamassu_server, args, { cwd: process.cwd(), encoding: 'utf8', env: { LAMASSU_STRESS_TESTING: "YES" }, From 27e2f26d11dd024eecdf8430f69846d285093a79 Mon Sep 17 00:00:00 2001 From: siiky Date: Tue, 18 Mar 2025 00:32:30 +0000 Subject: [PATCH 04/21] refactor: skip cert file creation Create the certificate in memory, insert it into the DB, and save it to a text file instead. --- tests/stress/machines.js | 87 +++++++++++++++++----------------------- 1 file changed, 36 insertions(+), 51 deletions(-) diff --git a/tests/stress/machines.js b/tests/stress/machines.js index f116c50a..01ccf8b4 100644 --- a/tests/stress/machines.js +++ b/tests/stress/machines.js @@ -1,7 +1,8 @@ const { fork } = require('node:child_process') const { createHash } = require('node:crypto') -const { mkdirSync, rmSync } = require('node:fs') +const { createWriteStream } = require('node:fs') const { readFile } = require('node:fs/promises') +const { EOL } = require('node:os') const { join } = require('node:path') require('../../lib/environment-helper') @@ -16,7 +17,7 @@ const cli = CLI({ grammar: [ [["--help"], "Show this help message"], [["--machine", "PATH"], "Path to the machine's source code root"], - [["--fake_data_dir", "PATH"], "Where to save the fake machines' data"], + [["--device_ids_path", "PATH"], "Where to save the list of device IDs"], [["-n", "NUMBER"], "Number of fake machines to create"], [["--replace_existing"], "Remove machines of previous runs"], ], @@ -29,66 +30,50 @@ const help = (exit_code) => { return exit_code } -const compute_machine_id = pem_path => ( - readFile(pem_path, { encoding: 'utf8' }) - .then(cert => cert.split('\r\n')) - .then(cert => Buffer.from(cert.slice(1, cert.length-2).join(''), 'base64')) - .then(raw => createHash('sha256').update(raw).digest('hex')) +const close_stream = (stream) => ( + new Promise((resolve, reject) => ( + stream.close((err) => err ? reject(err) : resolve()) + )) ) -const create_fake_machine = async (gencerts_path, machine_data_dir, i) => ( - new Promise((resolve, reject) => { - mkdirSync(machine_data_dir, { recursive: true, mode: 0o750 }) - - console.log("Creating fake machine number", i) - const gc = fork(gencerts_path, [machine_data_dir], { - cwd: process.cwd(), - encoding: 'utf8', - }) +const compute_machine_id = cert => { + cert = cert.split('\r\n') + const raw = Buffer.from(cert.slice(1, cert.length-2).join(''), 'base64') + return createHash('sha256').update(raw).digest('hex') +} - gc.on('error', (error) => { - console.log(error) - resolve(EXIT.EXCEPTION) - }) +const create_fake_machine = async (device_ids_file, self_sign, i) => { + console.log("creating machine", i) + const { cert } = await self_sign.generateCertificate() + const device_id = compute_machine_id(cert) + await db.none( + `INSERT INTO devices (device_id, cassette1, cassette2, paired, display, created, name, last_online, location) + VALUES ($1, 0, 0, 't', 't', now(), $2, now(), '{}'::json)`, + [device_id, `machine_${i}`] + ) + device_ids_file.write(device_id + EOL) + console.log("created machine", i, "with device ID", device_id) +} - gc.on('exit', (code, signal) => { - console.error("lamassu-server code:", code) - console.error("lamassu-server signal:", signal) - resolve(typeof(code) === 'number' ? code : EXIT.EXCEPTION) - }) - }) -) - -const create_fake_machines = async ({ machine, fake_data_dir, n, replace_existing }) => { +const create_fake_machines = async ({ machine, device_ids_path, n, replace_existing }) => { n = parseInt(n) if (Number.isNaN(n) || n <= 0) { console.error("Expected n to be a positive number, got", n) return help(EXIT.BADARGS) } - if (replace_existing) { - rmSync(fake_data_dir, { recursive: true, force: true }) - await db.none("DELETE FROM devices") - } + const device_ids_file = createWriteStream(device_ids_path, { + flags: replace_existing ? "w" : "a", + mode: 0o640, + flush: true, + }) + if (replace_existing) await db.none("DELETE FROM devices") - /* Create the root data directory */ - mkdirSync(fake_data_dir, { recursive: true, mode: 0o750 }) + const self_sign = require(join(process.cwd(), machine, "lib", "self_sign")) + for (let i = 0; i < n; i++) + await create_fake_machine(device_ids_file, self_sign, i) - const gencerts_path = join(machine, "tools", "generate-certificates") - for (let i = 0; i < n; i++) { - const machine_data_dir = join(fake_data_dir, i.toString()) - const exit_code = await create_fake_machine(gencerts_path, machine_data_dir, i) - if (exit_code !== EXIT.OK) - return exit_code - - const device_id = await compute_machine_id(join(machine_data_dir, "client.pem")) - - await db.none( - `INSERT INTO devices (device_id, cassette1, cassette2, paired, display, created, name, last_online, location) - VALUES ($1, 0, 0, 't', 't', now(), $2, now(), '{}'::json)`, - [device_id, `machine_${i}`] - ) - } + await close_stream(device_ids_file) return EXIT.OK } @@ -103,7 +88,7 @@ const run = async (args) => { if (options.help) return help(EXIT.OK) - const missing_options = ["n", "machine", "fake_data_dir"].filter((opt) => !options[opt]) + const missing_options = ["n", "machine", "device_ids_path"].filter((opt) => !options[opt]) if (missing_options.length > 0) { console.error("The following options are required:", missing_options.join(", ")) return help(EXIT.BADARGS) From 7581bbb078850ed107c0afa2d0df3b577bbba286 Mon Sep 17 00:00:00 2001 From: siiky Date: Tue, 15 Apr 2025 12:38:34 +0100 Subject: [PATCH 05/21] chore: update usage text --- tests/stress/index.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/stress/index.js b/tests/stress/index.js index df8c6f83..06424aa7 100644 --- a/tests/stress/index.js +++ b/tests/stress/index.js @@ -30,11 +30,11 @@ $ lamassu-server-stress-testing db You also need to create fake machines that will be used later in the actual stress tests (including certificates, and pairing each to the server). The -following command creates 10 fake machines, and saves their data in -path/to/stress/data/. path/to/real/machine/code/ is the path to the root of the -machine's code. +following command creates 10 fake machines, and saves their device IDs in +path/to/machine-ids.txt. path/to/real/machine/code/ is the path to the root of +the machine's code. -$ lamassu-server-stress-testing machines -n 10 --fake_data_dir path/to/stress/data/ --machine path/to/real/machine/code/ --replace_existing +$ lamassu-server-stress-testing machines -n 10 --device_ids_path path/to/machine-ids.txt --machine path/to/real/machine/code/ --replace_existing Finally, use the following to start the lamassu-server in stress-testing mode: From d689927b01cece610a08de6ebccb5ce62edcc6ec Mon Sep 17 00:00:00 2001 From: siiky Date: Tue, 20 May 2025 16:56:53 +0100 Subject: [PATCH 06/21] refactor: use matching names --- packages/server/lib/routes/logsRoutes.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/server/lib/routes/logsRoutes.js b/packages/server/lib/routes/logsRoutes.js index e482d014..b9a24401 100644 --- a/packages/server/lib/routes/logsRoutes.js +++ b/packages/server/lib/routes/logsRoutes.js @@ -27,7 +27,7 @@ function getLastSeen(req, res, next) { function updateLogs(req, res, next) { return logs .update(req.deviceId, req.body.logs) - .then(status => res.json({ success: status })) + .then(success => res.json({ success })) .catch(next) } From 6443eafeedf308dc7ff3cf1dc77156d6ab70dbf3 Mon Sep 17 00:00:00 2001 From: siiky Date: Tue, 20 May 2025 17:21:04 +0100 Subject: [PATCH 07/21] refactor: memoize pings --- packages/server/lib/middlewares/recordPing.js | 32 +++++++++++++++---- 1 file changed, 26 insertions(+), 6 deletions(-) diff --git a/packages/server/lib/middlewares/recordPing.js b/packages/server/lib/middlewares/recordPing.js index e74de771..b81358bc 100644 --- a/packages/server/lib/middlewares/recordPing.js +++ b/packages/server/lib/middlewares/recordPing.js @@ -1,7 +1,27 @@ -const plugins = require('../plugins') +const mem = require('mem') -module.exports = (req, res, next) => - plugins(req.settings, req.deviceId) - .recordPing(req.deviceTime, req.query.version, req.query.model) - .then(() => next()) - .catch(() => next()) +const logger = require('../logger') +const plugins = require('../plugins') +const T = require('../time') + +const record = mem( + ({ deviceId, deviceTime, model, version, settings }) => + plugins(settings, deviceId) + .recordPing(deviceTime, version, model) + .catch(logger.error), + { + cacheKey: ({ deviceId }) => deviceId, + maxAge: (3 / 2) * T.minute, // lib/notifier/codes.js + }, +) + +module.exports = (req, res, next) => { + record({ + deviceId: req.deviceId, + deviceTime: req.deviceTime, + model: req.query.model, + version: req.query.version, + settings: req.settings, + }) + next() +} From 0642ee301ed2339cd7e7cf1c09482da8846068f7 Mon Sep 17 00:00:00 2001 From: siiky Date: Tue, 20 May 2025 18:05:49 +0100 Subject: [PATCH 08/21] feat: generate fake machine IDs instead of client certificates --- packages/server/tests/stress/index.js | 5 +- packages/server/tests/stress/machines.js | 79 ++++++++++++++---------- 2 files changed, 48 insertions(+), 36 deletions(-) diff --git a/packages/server/tests/stress/index.js b/packages/server/tests/stress/index.js index 37aeaa63..d351be0d 100644 --- a/packages/server/tests/stress/index.js +++ b/packages/server/tests/stress/index.js @@ -29,10 +29,9 @@ $ lamassu-server-stress-testing db You also need to create fake machines that will be used later in the actual stress tests (including certificates, and pairing each to the server). The following command creates 10 fake machines, and saves their device IDs in -path/to/machine-ids.txt. path/to/real/machine/code/ is the path to the root of -the machine's code. +path/to/machine-ids.txt. -$ lamassu-server-stress-testing machines -n 10 --device_ids_path path/to/machine-ids.txt --machine path/to/real/machine/code/ --replace_existing +$ lamassu-server-stress-testing machines -n 10 --device_ids_path path/to/machine-ids.txt --replace_existing Finally, use the following to start the lamassu-server in stress-testing mode: diff --git a/packages/server/tests/stress/machines.js b/packages/server/tests/stress/machines.js index a6f95ec8..9e9605b1 100644 --- a/packages/server/tests/stress/machines.js +++ b/packages/server/tests/stress/machines.js @@ -1,7 +1,5 @@ -const { createHash } = require('node:crypto') const { createWriteStream } = require('node:fs') const { EOL } = require('node:os') -const { join } = require('node:path') require('../../lib/environment-helper') const db = require('../../lib/db') @@ -14,7 +12,6 @@ const help_message = 'Create and insert fake machines into the DB.' const cli = CLI({ grammar: [ [['--help'], 'Show this help message'], - [['--machine', 'PATH'], "Path to the machine's source code root"], [['--device_ids_path', 'PATH'], 'Where to save the list of device IDs'], [['-n', 'NUMBER'], 'Number of fake machines to create'], [['--replace_existing'], 'Remove machines of previous runs'], @@ -33,27 +30,51 @@ const close_stream = stream => stream.close(err => (err ? reject(err) : resolve())), ) -const compute_machine_id = cert => { - cert = cert.split('\r\n') - const raw = Buffer.from(cert.slice(1, cert.length - 2).join(''), 'base64') - return createHash('sha256').update(raw).digest('hex') +const leftpad = (s, w, c) => c.repeat(Math.max(w - s.length, 0)) + s + +function* chunk(arr, n) { + for (let i = 0; i < arr.length; i += n) { + yield arr.slice(i, i + n) + } } -const create_fake_machine = async (device_ids_file, self_sign, i) => { - console.log('creating machine', i) - const { cert } = await self_sign.generateCertificate() - const device_id = compute_machine_id(cert) - await db.none( - `INSERT INTO devices (device_id, cassette1, cassette2, paired, display, created, name, last_online, location) - VALUES ($1, 0, 0, 't', 't', now(), $2, now(), '{}'::json)`, - [device_id, `machine_${i}`], +const fake_machine_id = i => 'machine_' + i + +const save_machines_to_db = async (machines, replace_existing) => { + if (replace_existing) await db.none('DELETE FROM devices') + + console.log('inserting machines into DB') + db.tx(tx => + tx.batch( + machines.map(device_id => + tx.none( + `INSERT INTO devices (device_id, cassette1, cassette2, paired, display, created, name, last_online, location) + VALUES ($1, 0, 0, 't', 't', now(), $2, now(), '{}'::json)`, + [device_id, device_id], + ), + ), + ), ) - device_ids_file.write(device_id + EOL) - console.log('created machine', i, 'with device ID', device_id) +} + +const save_device_ids_to_file = async ( + machines, + { device_ids_path, replace_existing }, +) => { + const device_ids_file = createWriteStream(device_ids_path, { + flags: replace_existing ? 'w' : 'a', + mode: 0o640, + flush: true, + }) + + console.log('saving machine IDs to file') + for (const ids of chunk(machines, 20)) + await device_ids_file.write(ids.join(EOL) + EOL) + + await close_stream(device_ids_file) } const create_fake_machines = async ({ - machine, device_ids_path, n, replace_existing, @@ -63,19 +84,13 @@ const create_fake_machines = async ({ console.error('Expected n to be a positive number, got', n) return help(EXIT.BADARGS) } + const width = Math.log10(n) - const device_ids_file = createWriteStream(device_ids_path, { - flags: replace_existing ? 'w' : 'a', - mode: 0o640, - flush: true, - }) - if (replace_existing) await db.none('DELETE FROM devices') - - const self_sign = require(join(process.cwd(), machine, 'lib', 'self_sign')) - for (let i = 0; i < n; i++) - await create_fake_machine(device_ids_file, self_sign, i) - - await close_stream(device_ids_file) + const machines = Array(n) + .fill(0) + .map((x, i) => fake_machine_id(leftpad(i.toString(), width, '0'))) + await save_machines_to_db(machines, replace_existing) + await save_device_ids_to_file(machines, { device_ids_path, replace_existing }) return EXIT.OK } @@ -89,9 +104,7 @@ const run = async args => { if (options.help) return help(EXIT.OK) - const missing_options = ['n', 'machine', 'device_ids_path'].filter( - opt => !options[opt], - ) + const missing_options = ['n', 'device_ids_path'].filter(opt => !options[opt]) if (missing_options.length > 0) { console.error( 'The following options are required:', From 2aea5a523c4f5bd3cd6fc7ca3e6ab35354d24f55 Mon Sep 17 00:00:00 2001 From: siiky Date: Mon, 2 Jun 2025 17:04:56 +0100 Subject: [PATCH 09/21] fix: creating large number of fake machines --- packages/server/tests/stress/machines.js | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/packages/server/tests/stress/machines.js b/packages/server/tests/stress/machines.js index 9e9605b1..7f3e75ac 100644 --- a/packages/server/tests/stress/machines.js +++ b/packages/server/tests/stress/machines.js @@ -44,17 +44,18 @@ const save_machines_to_db = async (machines, replace_existing) => { if (replace_existing) await db.none('DELETE FROM devices') console.log('inserting machines into DB') - db.tx(tx => - tx.batch( - machines.map(device_id => - tx.none( - `INSERT INTO devices (device_id, cassette1, cassette2, paired, display, created, name, last_online, location) - VALUES ($1, 0, 0, 't', 't', now(), $2, now(), '{}'::json)`, - [device_id, device_id], + for (const ids of chunk(machines, 20)) + await db.tx(tx => + tx.batch( + ids.map(device_id => + tx.none( + `INSERT INTO devices (device_id, cassette1, cassette2, paired, display, created, name, last_online, location) + VALUES ($1, 0, 0, 't', 't', now(), $1, now(), '{}'::json)`, + [device_id], + ), ), ), - ), - ) + ) } const save_device_ids_to_file = async ( From be06ea5097f3b3b9b0a736a022fdd1a025075454 Mon Sep 17 00:00:00 2001 From: siiky Date: Mon, 2 Jun 2025 17:59:01 +0100 Subject: [PATCH 10/21] fix: parameterize query --- packages/server/lib/operator.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/server/lib/operator.js b/packages/server/lib/operator.js index b6243f16..e715b412 100644 --- a/packages/server/lib/operator.js +++ b/packages/server/lib/operator.js @@ -2,8 +2,8 @@ const db = require('./db') const _ = require('lodash/fp') function getOperatorId(service) { - const sql = `SELECT operator_id FROM operator_ids WHERE service = '${service}'` - return db.oneOrNone(sql).then(_.get('operator_id')) + const sql = 'SELECT operator_id FROM operator_ids WHERE service = ${service}' + return db.oneOrNone(sql, { service }).then(_.get('operator_id')) } module.exports = { getOperatorId } From 1b261504994c9fb0657678085abbd59e2b990226 Mon Sep 17 00:00:00 2001 From: siiky Date: Tue, 3 Jun 2025 09:44:42 +0100 Subject: [PATCH 11/21] refactor: batch record pings --- packages/server/lib/machine-loader.js | 48 +++++++++++++++++++ packages/server/lib/middlewares/recordPing.js | 26 +++------- packages/server/lib/plugins.js | 26 ---------- packages/server/lib/poller.js | 7 +++ 4 files changed, 61 insertions(+), 46 deletions(-) diff --git a/packages/server/lib/machine-loader.js b/packages/server/lib/machine-loader.js index f5fb29dc..b3d3c43a 100644 --- a/packages/server/lib/machine-loader.js +++ b/packages/server/lib/machine-loader.js @@ -702,6 +702,52 @@ function updatePhotos(dir, photoPairs) { ) } +let pendingRecordPings = new Map() +const enqueueRecordPing = ping => { + pendingRecordPings.set(ping.deviceId, ping) +} + +// from @sindresorhus/is-empty-iterable +const isEmptyIterable = iter => { + for (const _ of iter) return false + return true +} + +const batchRecordPendingPings = () => { + let pings = pendingRecordPings.values() + pendingRecordPings = new Map() + if (isEmptyIterable(pings)) return Promise.resolve() + + const prepareChunk = (t, chunk) => + chunk + .flatMap(({ deviceId, last_online, version, model }) => [ + t.none( + `INSERT INTO machine_pings (device_id, device_time) + VALUES ($1, $2) + ON CONFLICT (device_id) DO + UPDATE SET device_time = $2, + updated = now()`, + [deviceId, last_online], + ), + t.none( + pgp.helpers.update({ last_online, version, model }, null, 'devices') + + 'WHERE device_id = ${deviceId}', + { deviceId }, + ), + ]) + .toArray() + + const MaxBatchSize = 500 + return db + .task(async t => { + while (!isEmptyIterable(pings)) { + const chunk = pings.take(MaxBatchSize) + await t.batch(prepareChunk(t, chunk)).catch(err => logger.error(err)) + } + }) + .catch(err => logger.error(err)) +} + module.exports = { getMachineName, getMachines, @@ -719,4 +765,6 @@ module.exports = { updateDiagnostics, updateFailedQRScans, batchDiagnostics, + enqueueRecordPing, + batchRecordPendingPings, } diff --git a/packages/server/lib/middlewares/recordPing.js b/packages/server/lib/middlewares/recordPing.js index b81358bc..14d21b57 100644 --- a/packages/server/lib/middlewares/recordPing.js +++ b/packages/server/lib/middlewares/recordPing.js @@ -1,27 +1,13 @@ -const mem = require('mem') +const { enqueueRecordPing } = require('../machine-loader') -const logger = require('../logger') -const plugins = require('../plugins') -const T = require('../time') - -const record = mem( - ({ deviceId, deviceTime, model, version, settings }) => - plugins(settings, deviceId) - .recordPing(deviceTime, version, model) - .catch(logger.error), - { - cacheKey: ({ deviceId }) => deviceId, - maxAge: (3 / 2) * T.minute, // lib/notifier/codes.js - }, -) - -module.exports = (req, res, next) => { - record({ +const record = (req, res, next) => { + enqueueRecordPing({ deviceId: req.deviceId, - deviceTime: req.deviceTime, + last_online: req.deviceTime, model: req.query.model, version: req.query.version, - settings: req.settings, }) next() } + +module.exports = record diff --git a/packages/server/lib/plugins.js b/packages/server/lib/plugins.js index f92fce24..29af2440 100644 --- a/packages/server/lib/plugins.js +++ b/packages/server/lib/plugins.js @@ -409,31 +409,6 @@ function plugins(settings, deviceId) { }) } - function recordPing(deviceTime, version, model) { - const devices = { - version, - model, - last_online: deviceTime, - } - - return Promise.all([ - db.none( - `insert into machine_pings(device_id, device_time) - values ($1, $2) - ON CONFLICT (device_id) DO UPDATE SET device_time = $2, - updated = now()`, - [deviceId, deviceTime], - ), - db.none( - pgp.helpers.update(devices, null, 'devices') + - 'WHERE device_id = ${deviceId}', - { - deviceId, - }, - ), - ]) - } - function pruneMachinesHeartbeat() { const sql = `DELETE FROM machine_network_heartbeat h @@ -1062,7 +1037,6 @@ function plugins(settings, deviceId) { return { getRates, - recordPing, buildRates, getRawRates, buildRatesNoCommission, diff --git a/packages/server/lib/poller.js b/packages/server/lib/poller.js index 227707a2..4c947c59 100644 --- a/packages/server/lib/poller.js +++ b/packages/server/lib/poller.js @@ -14,6 +14,7 @@ const coinAtmRadar = require('./coinatmradar/coinatmradar') const configManager = require('./new-config-manager') const complianceTriggers = require('./compliance-triggers') const settingsLoader = require('./new-settings-loader') +const machineLoader = require('./machine-loader') const NodeCache = require('node-cache') const db = require('./db') const processBatches = require('./tx-batching-processing') @@ -31,6 +32,7 @@ const PRUNE_MACHINES_HEARTBEAT = 1 * T.day const TRANSACTION_BATCH_LIFECYCLE = 20 * T.minutes const TICKER_RATES_INTERVAL = 59 * T.seconds const FAILED_SCANS_INTERVAL = 1 * T.day +const PENDING_PINGS_INTERVAL = 90 * T.seconds // lib/notifier/codes.js const CHECK_NOTIFICATION_INTERVAL = 20 * T.seconds const PENDING_INTERVAL = 10 * T.seconds @@ -309,6 +311,11 @@ function doPolling() { QUEUE.SLOW, settings, ) + addToQueue( + machineLoader.batchRecordPendingPings, + PENDING_PINGS_INTERVAL, + QUEUE.SLOW, + ) } module.exports = { setup, reload } From 22b6b64fe448c372c970ba0eedf347aac5a0fb3e Mon Sep 17 00:00:00 2001 From: Rafael Taranto Date: Thu, 29 May 2025 16:17:11 +0100 Subject: [PATCH 12/21] fix: bad link on alert --- packages/admin-ui/src/pages/Dashboard/Alerts/AlertsTable.jsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/admin-ui/src/pages/Dashboard/Alerts/AlertsTable.jsx b/packages/admin-ui/src/pages/Dashboard/Alerts/AlertsTable.jsx index 078151f6..504a0449 100644 --- a/packages/admin-ui/src/pages/Dashboard/Alerts/AlertsTable.jsx +++ b/packages/admin-ui/src/pages/Dashboard/Alerts/AlertsTable.jsx @@ -18,7 +18,7 @@ const icons = { const links = { error: '/maintenance/machine-status', - fiatBalance: '/maintenance/cash-cassettes', + fiatBalance: '/maintenance/cash-units', cryptoBalance: '/maintenance/funding', } From 49080b04126f1ce9826fe2c359a1fee09aa907ca Mon Sep 17 00:00:00 2001 From: siiky Date: Tue, 3 Jun 2025 11:32:52 +0100 Subject: [PATCH 13/21] refactor: move `pairing.js:isPaired()` to `machine-loader.js:getPairedMachineName()` --- packages/server/lib/machine-loader.js | 8 ++++++++ packages/server/lib/middlewares/authorize.js | 5 ++--- packages/server/lib/pairing.js | 11 +---------- 3 files changed, 11 insertions(+), 13 deletions(-) diff --git a/packages/server/lib/machine-loader.js b/packages/server/lib/machine-loader.js index b3d3c43a..174ddeb1 100644 --- a/packages/server/lib/machine-loader.js +++ b/packages/server/lib/machine-loader.js @@ -173,6 +173,13 @@ function getMachineName(machineId) { return db.oneOrNone(sql, [machineId]).then(it => it?.name) } +const getPairedMachineName = deviceId => + db.oneOrNone( + 'SELECT name FROM devices WHERE device_id = $1 AND paired = TRUE', + [deviceId], + machine => machine?.name, + ) + function getMachine(machineId, config) { const sql = `${MACHINE_WITH_CALCULATED_FIELD_SQL} WHERE d.device_id = $1` @@ -750,6 +757,7 @@ const batchRecordPendingPings = () => { module.exports = { getMachineName, + getPairedMachineName, getMachines, getUnpairedMachines, getMachine, diff --git a/packages/server/lib/middlewares/authorize.js b/packages/server/lib/middlewares/authorize.js index 1a4d24df..30b95e93 100644 --- a/packages/server/lib/middlewares/authorize.js +++ b/packages/server/lib/middlewares/authorize.js @@ -1,9 +1,8 @@ -const pairing = require('../pairing') +const { getPairedMachineName } = require('../machine-loader') const logger = require('../logger') const authorize = function (req, res, next) { - return pairing - .isPaired(req.deviceId) + return getPairedMachineName(req.deviceId) .then(deviceName => { if (deviceName) { req.deviceName = deviceName diff --git a/packages/server/lib/pairing.js b/packages/server/lib/pairing.js index 38eecc9b..b21012e3 100644 --- a/packages/server/lib/pairing.js +++ b/packages/server/lib/pairing.js @@ -81,13 +81,4 @@ function authorizeCaDownload(caToken) { }) } -function isPaired(deviceId) { - const sql = - 'select device_id, name from devices where device_id=$1 and paired=TRUE' - - return db - .oneOrNone(sql, [deviceId]) - .then(row => (row && row.device_id === deviceId ? row.name : false)) -} - -module.exports = { pair, unpair, authorizeCaDownload, isPaired } +module.exports = { pair, unpair, authorizeCaDownload } From afe8299ae71ad0fe69379f438b6b7d2d06ccaf16 Mon Sep 17 00:00:00 2001 From: siiky Date: Tue, 3 Jun 2025 11:36:03 +0100 Subject: [PATCH 14/21] refactor: drop one `.then()` use and `lodash/fp` --- packages/server/lib/operator.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/server/lib/operator.js b/packages/server/lib/operator.js index e715b412..7cf8b8b2 100644 --- a/packages/server/lib/operator.js +++ b/packages/server/lib/operator.js @@ -1,9 +1,8 @@ const db = require('./db') -const _ = require('lodash/fp') function getOperatorId(service) { const sql = 'SELECT operator_id FROM operator_ids WHERE service = ${service}' - return db.oneOrNone(sql, { service }).then(_.get('operator_id')) + return db.oneOrNone(sql, { service }, ({ operator_id }) => operator_id) } module.exports = { getOperatorId } From c233aa9affb4d090eaa6eeac5297af923267bc6e Mon Sep 17 00:00:00 2001 From: siiky Date: Tue, 3 Jun 2025 11:43:30 +0100 Subject: [PATCH 15/21] chore: remove dead code --- packages/server/lib/middlewares/populateSettings.js | 3 --- packages/server/lib/middlewares/state.js | 1 - 2 files changed, 4 deletions(-) diff --git a/packages/server/lib/middlewares/populateSettings.js b/packages/server/lib/middlewares/populateSettings.js index 6a34ca3d..04f3f76d 100644 --- a/packages/server/lib/middlewares/populateSettings.js +++ b/packages/server/lib/middlewares/populateSettings.js @@ -78,9 +78,6 @@ const populateSettings = function (req, res, next) { const { needsSettingsReload, settingsCache } = state const operatorId = res.locals.operatorId const versionId = req.headers['config-version'] - if (versionId !== state.oldVersionId) { - state.oldVersionId = versionId - } try { // Priority of configs to retrieve diff --git a/packages/server/lib/middlewares/state.js b/packages/server/lib/middlewares/state.js index b6419d0f..b4934cc9 100644 --- a/packages/server/lib/middlewares/state.js +++ b/packages/server/lib/middlewares/state.js @@ -3,7 +3,6 @@ const SETTINGS_CACHE_REFRESH = 3600 module.exports = (function () { return { - oldVersionId: 'unset', needsSettingsReload: {}, settingsCache: new NodeCache({ stdTTL: SETTINGS_CACHE_REFRESH, From 593f7fe9495982221e03ff786f49f8d9b2a32614 Mon Sep 17 00:00:00 2001 From: siiky Date: Tue, 3 Jun 2025 12:08:54 +0100 Subject: [PATCH 16/21] refactor: use a single DB task to load settings --- packages/server/lib/new-settings-loader.js | 38 +++++++++++++--------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/packages/server/lib/new-settings-loader.js b/packages/server/lib/new-settings-loader.js index 090d80ba..84919715 100644 --- a/packages/server/lib/new-settings-loader.js +++ b/packages/server/lib/new-settings-loader.js @@ -91,7 +91,7 @@ function saveAccounts(accounts) { ) } -function loadAccounts(schemaVersion) { +function _loadAccounts(db, schemaVersion) { const sql = `SELECT data FROM user_config WHERE type = $1 @@ -100,14 +100,15 @@ function loadAccounts(schemaVersion) { ORDER BY id DESC LIMIT 1` - return db - .oneOrNone(sql, [ - 'accounts', - schemaVersion || NEW_SETTINGS_LOADER_SCHEMA_VERSION, - ]) - .then(_.compose(_.defaultTo({}), _.get('data.accounts'))) + return db.oneOrNone( + sql, + ['accounts', schemaVersion || NEW_SETTINGS_LOADER_SCHEMA_VERSION], + row => row?.data?.accounts ?? {}, + ) } +const loadAccounts = schemaVersion => _loadAccounts(db, schemaVersion) + function hideSecretFields(accounts) { return _.flow( _.filter(path => !_.isEmpty(_.get(path, accounts))), @@ -222,7 +223,7 @@ function loadLatestConfigOrNone(schemaVersion) { .then(row => (row ? row.data.config : {})) } -function loadConfig(versionId) { +function loadConfig(db, versionId) { const sql = `SELECT data FROM user_config WHERE id = $1 @@ -231,8 +232,11 @@ function loadConfig(versionId) { AND valid` return db - .one(sql, [versionId, NEW_SETTINGS_LOADER_SCHEMA_VERSION]) - .then(row => row.data.config) + .one( + sql, + [versionId, NEW_SETTINGS_LOADER_SCHEMA_VERSION], + ({ data: { config } }) => config, + ) .catch(err => { if (err.name === 'QueryResultError') { throw new Error('No such config version: ' + versionId) @@ -245,12 +249,14 @@ function loadConfig(versionId) { function load(versionId) { if (!versionId) Promise.reject('versionId is required') - return Promise.all([loadConfig(versionId), loadAccounts()]).then( - ([config, accounts]) => ({ - config, - accounts, - }), - ) + return db.task(t => { + t.batch([loadConfig(t, versionId), _loadAccounts(t)]).then( + ([config, accounts]) => ({ + config, + accounts, + }), + ) + }) } const fetchCurrentConfigVersion = () => { From dddbe98fca1d46ec56335ce084deb48849f37154 Mon Sep 17 00:00:00 2001 From: siiky Date: Tue, 3 Jun 2025 12:20:21 +0100 Subject: [PATCH 17/21] refactor: use a single DB task to load the latest settings --- packages/server/lib/new-settings-loader.js | 25 ++++++++++++---------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/packages/server/lib/new-settings-loader.js b/packages/server/lib/new-settings-loader.js index 84919715..a83ff7db 100644 --- a/packages/server/lib/new-settings-loader.js +++ b/packages/server/lib/new-settings-loader.js @@ -168,16 +168,19 @@ function migrationSaveConfig(config) { }) } -function loadLatest(schemaVersion) { - return Promise.all([ - loadLatestConfigOrNoneReturningVersion(schemaVersion), - loadAccounts(schemaVersion), - ]).then(([configObj, accounts]) => ({ - config: configObj.config, - accounts, - version: configObj.version, - })) -} +const loadLatest = schemaVersion => + db + .task(t => + t.batch([ + loadLatestConfigOrNoneReturningVersion(t, schemaVersion), + _loadAccounts(t, schemaVersion), + ]), + ) + .then(([configObj, accounts]) => ({ + config: configObj.config, + accounts, + version: configObj.version, + })) function loadLatestConfig() { const sql = `SELECT data @@ -196,7 +199,7 @@ function loadLatestConfig() { }) } -function loadLatestConfigOrNoneReturningVersion(schemaVersion) { +function loadLatestConfigOrNoneReturningVersion(db, schemaVersion) { const sql = `SELECT data, id FROM user_config WHERE type = 'config' From 3e83838b5a3b10ad02ccc905c8d492de246f4b39 Mon Sep 17 00:00:00 2001 From: siiky Date: Tue, 3 Jun 2025 12:29:54 +0100 Subject: [PATCH 18/21] refactor: filter old requests before populating settings --- packages/server/lib/routes.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/server/lib/routes.js b/packages/server/lib/routes.js index 0f68981d..daed749c 100644 --- a/packages/server/lib/routes.js +++ b/packages/server/lib/routes.js @@ -81,8 +81,8 @@ const loadRoutes = async () => { app.use(findOperatorId) app.use(populateDeviceId) app.use(authorize) - app.use(configRequiredRoutes, populateSettings) app.use(filterOldRequests) + app.use(configRequiredRoutes, populateSettings) // other app routes app.use('/graphql', recordPing) From aaa02e3186d60a26dadb7f1ecfd70ce11983184a Mon Sep 17 00:00:00 2001 From: siiky Date: Tue, 3 Jun 2025 12:30:19 +0100 Subject: [PATCH 19/21] refactor: populate operator after filtering old requests --- packages/server/lib/routes.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/server/lib/routes.js b/packages/server/lib/routes.js index daed749c..c4e8f6b0 100644 --- a/packages/server/lib/routes.js +++ b/packages/server/lib/routes.js @@ -78,10 +78,10 @@ const loadRoutes = async () => { // app /pair and /ca routes app.use('/', pairingRoutes) - app.use(findOperatorId) app.use(populateDeviceId) app.use(authorize) app.use(filterOldRequests) + app.use(findOperatorId) app.use(configRequiredRoutes, populateSettings) // other app routes From cb046ea7268e35b44782cc015f43a3e266846957 Mon Sep 17 00:00:00 2001 From: siiky Date: Tue, 3 Jun 2025 12:33:32 +0100 Subject: [PATCH 20/21] chore: drop `if` to idempotent op --- packages/server/lib/middlewares/populateSettings.js | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/packages/server/lib/middlewares/populateSettings.js b/packages/server/lib/middlewares/populateSettings.js index 04f3f76d..8d6c9d0b 100644 --- a/packages/server/lib/middlewares/populateSettings.js +++ b/packages/server/lib/middlewares/populateSettings.js @@ -110,7 +110,7 @@ const populateSettings = function (req, res, next) { const operatorSettings = settingsCache.get(`${operatorId}-latest`) - if (!!needsSettingsReload[operatorId] || !operatorSettings) { + if (needsSettingsReload[operatorId] || !operatorSettings) { needsSettingsReload[operatorId] ? logger.debug( 'Fetching and caching a new latest config value, as a reload was requested', @@ -125,8 +125,7 @@ const populateSettings = function (req, res, next) { const versionId = settings.version settingsCache.set(`${operatorId}-latest`, settings) settingsCache.set(`${operatorId}-v${versionId}`, settings) - if (needsSettingsReload[operatorId]) - delete needsSettingsReload[operatorId] + delete needsSettingsReload[operatorId] req.settings = settings }) .then(() => next()) From ab0a5a33e51c8b8fe0bfa7f94f1bf4383013f24c Mon Sep 17 00:00:00 2001 From: siiky Date: Thu, 5 Jun 2025 16:32:36 +0100 Subject: [PATCH 21/21] chore: comment stress testing code by default --- packages/server/lib/middlewares/populateDeviceId.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/server/lib/middlewares/populateDeviceId.js b/packages/server/lib/middlewares/populateDeviceId.js index 9a1d9fdf..f2a57e22 100644 --- a/packages/server/lib/middlewares/populateDeviceId.js +++ b/packages/server/lib/middlewares/populateDeviceId.js @@ -1,6 +1,6 @@ const crypto = require('crypto') -const IS_STRESS_TESTING = process.env.LAMASSU_STRESS_TESTING === 'YES' +//const IS_STRESS_TESTING = process.env.LAMASSU_STRESS_TESTING === 'YES' function sha256(buf) { if (!buf) return null @@ -16,7 +16,7 @@ const populateDeviceId = function (req, res, next) { : null let deviceId = peerCert?.raw ? sha256(peerCert.raw) : null - if (!deviceId && IS_STRESS_TESTING) deviceId = req.headers.device_id + //if (!deviceId && IS_STRESS_TESTING) deviceId = req.headers.device_id if (!deviceId) return res.status(500).json({ error: 'Unable to find certificate' })