chore: use monorepo organization
This commit is contained in:
parent
deaf7d6ecc
commit
a687827f7e
1099 changed files with 8184 additions and 11535 deletions
88799
packages/server/tests/ofac/dist.all.last.txt
Normal file
88799
packages/server/tests/ofac/dist.all.last.txt
Normal file
File diff suppressed because it is too large
Load diff
4275
packages/server/tests/ofac/dist.female.first.txt
Normal file
4275
packages/server/tests/ofac/dist.female.first.txt
Normal file
File diff suppressed because it is too large
Load diff
1219
packages/server/tests/ofac/dist.male.first.txt
Normal file
1219
packages/server/tests/ofac/dist.male.first.txt
Normal file
File diff suppressed because it is too large
Load diff
362
packages/server/tests/ofac/matching.js
Normal file
362
packages/server/tests/ofac/matching.js
Normal file
|
|
@ -0,0 +1,362 @@
|
|||
const assert = require('assert')
|
||||
const ofac = require('../../lib/ofac')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const _ = require('lodash/fp')
|
||||
|
||||
let structs
|
||||
let fullNames
|
||||
|
||||
const rand = N => _.random(0, N - 1)
|
||||
|
||||
const letters = _.range('a'.charCodeAt(0), 'z'.charCodeAt(0))
|
||||
const vowels = _.map(c => c.charCodeAt(0), ['a', 'e', 'i', 'o', 'u'])
|
||||
const consonants = _.difference(letters, vowels)
|
||||
|
||||
const duplicate = (word, index) => {
|
||||
const c = word[index]
|
||||
return _.join('', [word.slice(0, index), c, c, word.slice(index + 1)])
|
||||
}
|
||||
|
||||
const remove = (word, index) => {
|
||||
return _.join('', [word.slice(0, index), word.slice(index + 1)])
|
||||
}
|
||||
|
||||
const transpose = (word, index) => {
|
||||
const a = word[index]
|
||||
const b = word[index + 1] || ' '
|
||||
return _.join('', [word.slice(0, index), b, a, word.slice(index + 2)])
|
||||
}
|
||||
|
||||
const alter = (word, index) => {
|
||||
if (word[index] === ' ') return word
|
||||
const o = word.charCodeAt(index)
|
||||
const collection = _.includes(o, vowels) ? vowels : consonants
|
||||
const oo = _.sample(collection)
|
||||
const cc = String.fromCharCode(oo)
|
||||
return _.join('', [word.slice(0, index), cc, word.slice(index + 1)])
|
||||
}
|
||||
|
||||
const misspellOps = [
|
||||
duplicate,
|
||||
remove,
|
||||
transpose,
|
||||
alter
|
||||
]
|
||||
|
||||
const misspell = word => {
|
||||
const len = word.length
|
||||
const index = _.random(1, len)
|
||||
const operation = _.sample(misspellOps)
|
||||
return operation(word, index)
|
||||
}
|
||||
|
||||
const misspellRandomly = word => {
|
||||
const len = word.length
|
||||
const num = _.random(1, Math.sqrt(len))
|
||||
return _.flow(..._.times(() => misspell, num))(word)
|
||||
}
|
||||
|
||||
|
||||
const shiftVowel = word => {
|
||||
const indexedVowels = _.flow(
|
||||
_.get('length'),
|
||||
_.range(0),
|
||||
_.zip(_.split('', word)),
|
||||
_.map(_.zipObject(['letter', 'index'])),
|
||||
_.map(indexedLetter => {
|
||||
const ord = indexedLetter.letter.charCodeAt(0)
|
||||
const vowelIndex = _.indexOf(ord, vowels)
|
||||
return {...indexedLetter, vowelIndex}
|
||||
}),
|
||||
_.reject(_.flow(
|
||||
_.get('vowelIndex'),
|
||||
_.eq(-1)
|
||||
))
|
||||
)(word)
|
||||
|
||||
if (_.isEmpty(indexedVowels)) return false
|
||||
|
||||
const indexedVowel = _.sample(indexedVowels)
|
||||
const options = indexedVowel.vowelIndex === 0 ? [ +1 ]
|
||||
: indexedVowel.vowelIndex === 4 ? [ -1 ]
|
||||
: [ -1, +1 ]
|
||||
const offset = _.sample(options)
|
||||
const replacementOrd = vowels[indexedVowel.vowelIndex + offset]
|
||||
const replacement = String.fromCharCode(replacementOrd)
|
||||
|
||||
const index = indexedVowel.index
|
||||
return _.join('', [word.slice(0, index), replacement, word.slice(index + 1)])
|
||||
}
|
||||
|
||||
const makeReplacer = (a, b) => word => {
|
||||
const replaced = word.replace(a, b)
|
||||
return (replaced !== word) && replaced
|
||||
}
|
||||
|
||||
const makeReplacerPair = (a, b) => [
|
||||
makeReplacer(a, b),
|
||||
makeReplacer(b, a),
|
||||
]
|
||||
|
||||
const equivalences = [
|
||||
shiftVowel,
|
||||
...makeReplacerPair('v', 'f'),
|
||||
...makeReplacerPair('ph', 'f'),
|
||||
...makeReplacerPair('ck', 'k'),
|
||||
...makeReplacerPair('q', 'k')
|
||||
]
|
||||
|
||||
const transcribe = word => {
|
||||
const ops = _.shuffle(equivalences)
|
||||
for (const op of ops) {
|
||||
const transcribed = op(word)
|
||||
if (transcribed) return transcribed
|
||||
}
|
||||
}
|
||||
|
||||
const threshold = 0.85
|
||||
const fullNameThreshold = 0.95
|
||||
|
||||
describe('OFAC', function () {
|
||||
describe('Matching', function () {
|
||||
|
||||
before(function () {
|
||||
this.timeout(60000)
|
||||
return ofac.load()
|
||||
.then(result => {
|
||||
structs = result
|
||||
const {individuals} = structs
|
||||
fullNames = _.flow(
|
||||
_.flatMap('aliases'),
|
||||
_.map('fullName')
|
||||
)(individuals)
|
||||
})
|
||||
})
|
||||
|
||||
it.skip('should match the exact full names of suspects', function () {
|
||||
this.timeout(0)
|
||||
|
||||
for (const fullName of fullNames) {
|
||||
const matches = ofac.match({firstName: fullName}, null, {
|
||||
threshold,
|
||||
fullNameThreshold,
|
||||
})
|
||||
assert.ok(!_.isEmpty(matches))
|
||||
}
|
||||
})
|
||||
|
||||
it.skip('should match the permutated full names of suspects', function () {
|
||||
this.timeout(0)
|
||||
|
||||
for (const fullName of fullNames) {
|
||||
const reversed = _.flow(
|
||||
_.split(' '),
|
||||
_.reverse,
|
||||
_.join(' ')
|
||||
)(fullName)
|
||||
|
||||
const matches = ofac.match({firstName: reversed}, null, {
|
||||
threshold,
|
||||
fullNameThreshold,
|
||||
})
|
||||
assert.ok(!_.isEmpty(matches))
|
||||
}
|
||||
})
|
||||
|
||||
it('should match despite some misspellings', function () {
|
||||
this.timeout(0)
|
||||
|
||||
let countMatches = 0
|
||||
const failures = []
|
||||
|
||||
for (const fullName of fullNames) {
|
||||
const lightlyMisspelled = misspell(fullName)
|
||||
|
||||
const heavilyMisspelled = _.flow(
|
||||
_.split(' '),
|
||||
_.map(misspell),
|
||||
_.join(' ')
|
||||
)(fullName)
|
||||
|
||||
const matchesA = ofac.match({firstName: lightlyMisspelled}, null, {
|
||||
threshold,
|
||||
fullNameThreshold,
|
||||
})
|
||||
|
||||
if (!_.isEmpty(matchesA)) {
|
||||
countMatches += 1
|
||||
}
|
||||
else {
|
||||
failures.push({fullName, misspelled: lightlyMisspelled})
|
||||
}
|
||||
|
||||
const matchesB = ofac.match({firstName: heavilyMisspelled}, null, {
|
||||
threshold: threshold - 0.1,//: 0.75
|
||||
})
|
||||
|
||||
if (!_.isEmpty(matchesB)) {
|
||||
countMatches += 1
|
||||
}
|
||||
else {
|
||||
failures.push({fullName, heavy: true, misspelled: heavilyMisspelled})
|
||||
}
|
||||
}
|
||||
|
||||
for (const failure of failures) {
|
||||
const {fullName, heavy, misspelled} = failure
|
||||
console.log("Original:", fullName)
|
||||
ofac.match({firstName: misspelled}, null, {
|
||||
threshold: threshold + (heavy ? -0.1 : 0),
|
||||
debug: true
|
||||
})
|
||||
}
|
||||
|
||||
assert.equal(countMatches, fullNames.length * 2)
|
||||
})
|
||||
|
||||
it('should match phonetically similar words', function () {
|
||||
this.timeout(0)
|
||||
|
||||
let countMatches = 0
|
||||
const failures = []
|
||||
|
||||
for (const fullName of fullNames) {
|
||||
const transcribed = transcribe(fullName)
|
||||
|
||||
if (!transcribed) {
|
||||
console.warn(`Couldn't find an appropriate phonetic alteration for '${fullName}'`)
|
||||
countMatches += 1
|
||||
continue
|
||||
}
|
||||
|
||||
const matches = ofac.match({firstName: transcribed}, null, {
|
||||
threshold,
|
||||
fullNameThreshold,
|
||||
})
|
||||
|
||||
if (!_.isEmpty(matches)) {
|
||||
countMatches += 1
|
||||
}
|
||||
else {
|
||||
failures.push({fullName, misspelled: transcribed})
|
||||
}
|
||||
}
|
||||
|
||||
for (const failure of failures) {
|
||||
const {fullName, misspelled} = failure
|
||||
console.log("Original:", fullName)
|
||||
ofac.match({firstName: misspelled}, null, {
|
||||
threshold,
|
||||
fullNameThreshold,
|
||||
debug: true
|
||||
})
|
||||
}
|
||||
|
||||
assert.equal(countMatches, fullNames.length)
|
||||
})
|
||||
|
||||
it('should discard matches with inapropriate birthdates', function () {
|
||||
this.timeout(0)
|
||||
|
||||
const date = new Date()
|
||||
const YYYY = _.padCharsStart('0', 4, date.getFullYear())
|
||||
const MM = _.padCharsStart('0', 2, date.getMonth() + 1)
|
||||
const DD = _.padCharsStart('0', 2, date.getDate())
|
||||
const dateString = `${YYYY}${MM}${DD}`
|
||||
|
||||
const noMatchesWithBirthDates = _.every(_.flow(
|
||||
_.get('birthDatePeriods'),
|
||||
_.every(_.isEmpty)
|
||||
))
|
||||
|
||||
for (const fullName of fullNames) {
|
||||
const matches = ofac.match({firstName: fullName}, dateString, {
|
||||
threshold,
|
||||
fullNameThreshold,
|
||||
})
|
||||
assert.ok(noMatchesWithBirthDates(matches))
|
||||
}
|
||||
})
|
||||
|
||||
it('should not match against common names', function () {
|
||||
this.timeout(0)
|
||||
|
||||
const getNamesFromFile = _.flow(
|
||||
name => path.resolve(__dirname, name),
|
||||
file => fs.readFileSync(file, 'utf-8'),
|
||||
_.split('\n'),
|
||||
_.map( _.flow(
|
||||
_.split(' '),
|
||||
_.first
|
||||
))
|
||||
)
|
||||
|
||||
const lastNames = getNamesFromFile('dist.all.last.txt')
|
||||
const firstNamesMale = getNamesFromFile('dist.male.first.txt')
|
||||
const firstNamesFemale = getNamesFromFile('dist.female.first.txt')
|
||||
|
||||
let countMatches = 0
|
||||
const failures = []
|
||||
|
||||
for (const lastName of lastNames.slice(0, 100)) {
|
||||
for (firstName of firstNamesMale.slice(0, 100)) {
|
||||
const matches = ofac.match({firstName, lastName}, null, {
|
||||
threshold,
|
||||
fullNameThreshold,
|
||||
})
|
||||
|
||||
if (!_.isEmpty(matches)) {
|
||||
countMatches += 1
|
||||
failures.push({firstName, lastName})
|
||||
}
|
||||
}
|
||||
|
||||
for (firstName of firstNamesFemale.slice(0, 100)) {
|
||||
const matches = ofac.match({firstName, lastName}, null, {
|
||||
threshold,
|
||||
fullNameThreshold,
|
||||
})
|
||||
|
||||
if (!_.isEmpty(matches)) {
|
||||
countMatches += 1
|
||||
failures.push({firstName, lastName})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const failure of failures) {
|
||||
ofac.match(failure, null, {
|
||||
threshold,
|
||||
fullNameThreshold,
|
||||
debug: true
|
||||
})
|
||||
}
|
||||
|
||||
assert.equal(countMatches, 0)
|
||||
})
|
||||
|
||||
|
||||
it.skip('test', function () {
|
||||
const firstName = 'hian chariapaporn'
|
||||
ofac.match({firstName}, null, {
|
||||
threshold,
|
||||
fullNameThreshold,
|
||||
debug: true,
|
||||
verboseFor: ['hiran', 'chariapaporn']
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
it.skip('test', function () {
|
||||
const firstName = 'janice smith'
|
||||
ofac.match({firstName}, null, {
|
||||
threshold,
|
||||
fullNameThreshold,
|
||||
debug: true,
|
||||
verboseFor: ['samih', 'anis']
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
})
|
||||
188
packages/server/tests/ofac/parsing.js
Normal file
188
packages/server/tests/ofac/parsing.js
Normal file
|
|
@ -0,0 +1,188 @@
|
|||
const assert = require('assert')
|
||||
const parser = require('../../lib/ofac/parsing')
|
||||
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const util = require('util')
|
||||
const _ = require('lodash/fp')
|
||||
|
||||
const randomTmpFileName = () => path.join('/tmp', Math.random().toString())
|
||||
|
||||
const writeFile = util.promisify(fs.writeFile)
|
||||
|
||||
function makeDataFiles (contents) {
|
||||
const fileNames = _.map(randomTmpFileName, Array(contents.length))
|
||||
const pairs = _.zip(fileNames, contents)
|
||||
return Promise.all(_.map(_.spread(writeFile), pairs))
|
||||
.then(() => fileNames)
|
||||
}
|
||||
|
||||
|
||||
const mapLines = _.flow(_.map, _.join(''))
|
||||
|
||||
const partIds = new Map([
|
||||
['lastName', 1520],
|
||||
['firstName', 1521],
|
||||
['middleName', 1522],
|
||||
['maidenName', 1523],
|
||||
['patronymic', 91708],
|
||||
['matronymic', 91709],
|
||||
['nickname', 1528]
|
||||
])
|
||||
|
||||
const getId = part => partIds.get(part.partName)
|
||||
|
||||
const makePart = part => '' +
|
||||
` <DocumentedNamePart>
|
||||
<NamePartValue NamePartGroupID="${getId(part)}">${part.value}</NamePartValue>
|
||||
</DocumentedNamePart>`
|
||||
|
||||
const makeAlias = alias => '' +
|
||||
` <Alias AliasTypeID="1403">
|
||||
<DocumentedName ID="${alias.id}" DocNameStatusID="1">
|
||||
${_.map(makePart, alias.parts)}
|
||||
</DocumentedName>
|
||||
</Alias>`
|
||||
|
||||
const makePartGroup = part => '' +
|
||||
` <MasterNamePartGroup>
|
||||
<NamePartGroup
|
||||
ID="${getId(part)}"
|
||||
NamePartTypeID="${getId(part)}"/>
|
||||
</MasterNamePartGroup>`
|
||||
|
||||
const makePartGroups = alias => mapLines(makePartGroup, alias.parts)
|
||||
|
||||
const makeBirthDate = birthDate => '' +
|
||||
` <Feature FeatureTypeID="8">
|
||||
<FeatureVersion>
|
||||
<DatePeriod>
|
||||
<Start>
|
||||
<From>
|
||||
<Year>${birthDate.start.year}</Year>
|
||||
<Month>${birthDate.start.month}</Month>
|
||||
<Day>${birthDate.start.day}</Day>
|
||||
</From>
|
||||
<To>
|
||||
<Year>${birthDate.start.year}</Year>
|
||||
<Month>${birthDate.start.month}</Month>
|
||||
<Day>${birthDate.start.day}</Day>
|
||||
</To>
|
||||
</Start>
|
||||
<End>
|
||||
<From>
|
||||
<Year>${birthDate.end.year}</Year>
|
||||
<Month>${birthDate.end.month}</Month>
|
||||
<Day>${birthDate.end.day}</Day>
|
||||
</From>
|
||||
<To>
|
||||
<Year>${birthDate.end.year}</Year>
|
||||
<Month>${birthDate.end.month}</Month>
|
||||
<Day>${birthDate.end.day}</Day>
|
||||
</To>
|
||||
</End>
|
||||
</DatePeriod>
|
||||
</FeatureVersion>
|
||||
</Feature>`
|
||||
|
||||
const makeProfile = profile => {
|
||||
return '' +
|
||||
` <Profile ID="${profile.id}" PartySubTypeID="4">
|
||||
<Identity>
|
||||
${mapLines(makeAlias, profile.aliases)}
|
||||
<NamePartGroups>
|
||||
${mapLines(makePartGroups, profile.aliases)}
|
||||
</NamePartGroups>
|
||||
</Identity>
|
||||
${mapLines(makeBirthDate, profile.birthDatePeriods)}
|
||||
</Profile>`
|
||||
}
|
||||
|
||||
const makeXml = profiles => '' +
|
||||
`<?xml version="1.0" encoding="utf-8"?>
|
||||
<doc>
|
||||
${mapLines(makeProfile, profiles)}
|
||||
</doc>`
|
||||
|
||||
|
||||
const individualA = {id: '9', aliases: [{id: '5',
|
||||
parts: [
|
||||
{partName: 'firstName', value: 'john'},
|
||||
{partName: 'lastName', value: 'doe'}],
|
||||
fullName: 'john doe',
|
||||
words: [
|
||||
{value: 'john', phonetics: ['JN', 'AN']},
|
||||
{value: 'doe', phonetics: ['T']}]}],
|
||||
birthDatePeriods: [{
|
||||
start: {year: 1955, month: 10, day: 5},
|
||||
end: {year: 1955, month: 10, day: 5}}]
|
||||
}
|
||||
|
||||
const individualB = {id: '11', aliases: [{id: '15',
|
||||
parts: [
|
||||
{partName: 'firstName', value: 'john'},
|
||||
{partName: 'middleName', value: 'de'},
|
||||
{partName: 'lastName', value: 'gaul'}],
|
||||
fullName: 'john de gaul',
|
||||
words: [
|
||||
{value: 'john', phonetics: ['JN', 'AN']},
|
||||
{value: 'de', phonetics: ['T']},
|
||||
{value: 'gaul', phonetics: ['KL']}]}],
|
||||
birthDatePeriods: [{
|
||||
start: {year: 1965, month: 11, day: 20},
|
||||
end: {year: 1965, month: 11, day: 20}}]
|
||||
}
|
||||
|
||||
|
||||
const parseIndividuals = source => {
|
||||
const individuals = []
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
parser.parse(source, (err, profile) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
return
|
||||
}
|
||||
|
||||
if (!profile) {
|
||||
resolve(individuals)
|
||||
return
|
||||
}
|
||||
|
||||
individuals.push(profile)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
describe('OFAC', function () {
|
||||
describe('Parsing', function () {
|
||||
|
||||
// To detect botched downloads
|
||||
it('should fail on malformed XML', function () {
|
||||
const xml = '<a><b></a>'
|
||||
return makeDataFiles([xml])
|
||||
.then(files => Promise.all(_.map(parseIndividuals, files)))
|
||||
.catch(error => {
|
||||
assert.ok(error instanceof Error)
|
||||
return 'failed'
|
||||
})
|
||||
.then(ret => {
|
||||
assert.equal(ret, 'failed')
|
||||
})
|
||||
})
|
||||
|
||||
it('should return the expected individuals', function () {
|
||||
const xml = makeXml([individualA, individualB])
|
||||
|
||||
return makeDataFiles([xml])
|
||||
.then(files => Promise.all(_.map(parseIndividuals, files)))
|
||||
.then(([individuals]) => {
|
||||
assert.ok(Array.isArray(individuals))
|
||||
assert.equal(individuals.length, 2)
|
||||
assert.deepEqual(individuals, [individualA, individualB])
|
||||
})
|
||||
})
|
||||
|
||||
})
|
||||
})
|
||||
87
packages/server/tests/stress/child.js
Normal file
87
packages/server/tests/stress/child.js
Normal file
|
|
@ -0,0 +1,87 @@
|
|||
const https = require('https')
|
||||
const path = require('path')
|
||||
const pify = require('pify')
|
||||
const fs = pify(require('fs'))
|
||||
const uuid = require('uuid')
|
||||
const _ = require('lodash/fp')
|
||||
const { PerformanceObserver, performance } = require('perf_hooks')
|
||||
|
||||
const utils = require('./utils')
|
||||
const variables = require('./utils/variables')
|
||||
|
||||
var certificate = {}
|
||||
var connectionInfo = {}
|
||||
|
||||
const getCert = machineIndex => {
|
||||
const key = fs.readFile(path.resolve(__dirname, 'machines', `${machineIndex}`, 'client.key'))
|
||||
const cert = fs.readFile(path.resolve(__dirname, 'machines', `${machineIndex}`, 'client.pem'))
|
||||
|
||||
return Promise.all([key, cert]).then(([key, cert]) => {
|
||||
return { key, cert }
|
||||
}).catch(err => {
|
||||
console.error('The following error when reading the certificate: ', err)
|
||||
return null
|
||||
})
|
||||
}
|
||||
|
||||
const getConnectionInfo = machineIndex => {
|
||||
return fs.readFile(path.resolve(__dirname, 'machines', `${machineIndex}`, 'connection_info.json'))
|
||||
}
|
||||
|
||||
let counter = 0
|
||||
const requestTimes = []
|
||||
let latestResponseTime = 0
|
||||
|
||||
const request = (machineIndex, pid) => {
|
||||
performance.mark('A')
|
||||
https.get({
|
||||
hostname: 'localhost',
|
||||
port: 3000,
|
||||
path: '/poll?state=chooseCoin&model=unknown&version=7.5.0-beta.0&idle=true&pid=' + pid + '&sn=' + counter,
|
||||
method: 'GET',
|
||||
key: certificate.key,
|
||||
cert: certificate.cert,
|
||||
ca: connectionInfo.ca,
|
||||
headers: {
|
||||
date: new Date().toISOString(),
|
||||
'request-id': uuid.v4()
|
||||
}
|
||||
}, res => {
|
||||
res.on('data', (d) => {
|
||||
performance.mark('B')
|
||||
performance.measure('A to B', 'A', 'B')
|
||||
console.log(`Machine ${machineIndex} || Avg request response time: ${_.mean(requestTimes).toFixed(3)} || Latest response time: ${latestResponseTime.toFixed(3)}`)
|
||||
process.send({ message: Buffer.from(d).toString() })
|
||||
})
|
||||
})
|
||||
|
||||
counter++
|
||||
}
|
||||
|
||||
const obs = new PerformanceObserver((items) => {
|
||||
latestResponseTime = items.getEntries()[0].duration
|
||||
requestTimes.push(latestResponseTime)
|
||||
performance.clearMarks()
|
||||
})
|
||||
obs.observe({ entryTypes: ['measure'] })
|
||||
|
||||
process.on('message', async (msg) => {
|
||||
console.log('Message from parent:', msg)
|
||||
|
||||
const promises = [getCert(msg.machineIndex), getConnectionInfo(msg.machineIndex)]
|
||||
Promise.all(promises).then(values => {
|
||||
certificate = values[0]
|
||||
connectionInfo = JSON.parse(values[1])
|
||||
}).catch(err => {
|
||||
console.error('The following error occurred during certificate parsing: ', err)
|
||||
})
|
||||
|
||||
if (msg.hasVariance) await new Promise(resolve => setTimeout(resolve, utils.randomIntFromInterval(1, variables.POLLING_INTERVAL)))
|
||||
const pid = uuid.v4()
|
||||
request(msg.machineIndex, pid)
|
||||
|
||||
setInterval(() => {
|
||||
const pid = uuid.v4()
|
||||
request(msg.machineIndex, pid)
|
||||
}, 5000)
|
||||
})
|
||||
35
packages/server/tests/stress/index.js
Normal file
35
packages/server/tests/stress/index.js
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
|
||||
const { fork } = require('child_process')
|
||||
const minimist = require('minimist')
|
||||
|
||||
const cmd = require('./scripts')
|
||||
const variables = require('./utils/variables')
|
||||
|
||||
function createMachines (numberOfMachines) {
|
||||
return cmd.execCommand(
|
||||
`bash ./scripts/create-machines.sh ${numberOfMachines} ${variables.SERVER_CERT_PATH} ${variables.MACHINE_PATH}`
|
||||
)
|
||||
}
|
||||
|
||||
function startServer () {
|
||||
const forked = fork('test-server.js')
|
||||
forked.send('start')
|
||||
}
|
||||
|
||||
async function run (args = minimist(process.argv.slice(2))) {
|
||||
const NUMBER_OF_MACHINES = args._[0]
|
||||
const HAS_VARIANCE = args.v || false
|
||||
|
||||
await createMachines(NUMBER_OF_MACHINES)
|
||||
startServer()
|
||||
|
||||
for (let i = 1; i <= NUMBER_OF_MACHINES; i++) {
|
||||
const forked = fork('child.js')
|
||||
forked.send({ machineIndex: i, hasVariance: HAS_VARIANCE })
|
||||
forked.on('message', msg => {
|
||||
console.log(`Machine ${i} || ${msg}`)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
run()
|
||||
47
packages/server/tests/stress/load-tx-dummy-data.js
Normal file
47
packages/server/tests/stress/load-tx-dummy-data.js
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
const db = require('../../lib/db')
|
||||
|
||||
const loadDummyTxData = () => {
|
||||
const sql = `
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
|
||||
INSERT INTO customers
|
||||
VALUES ('99ac9999-9999-99e9-9999-9f99a9999999', null, null, null, null, null, null,
|
||||
'load_test_customers', null, null, null, null, null, null, '2021-04-16 10:51:38',
|
||||
'automatic', null, 'automatic', null, 'automatic', null, 'automatic', null, 'automatic',
|
||||
null, 'automatic', null, null, null, null, null, null, 'automatic', null, null,
|
||||
null, null, null, null, null, null, null, null, null, null)
|
||||
ON CONFLICT DO NOTHING;
|
||||
|
||||
INSERT INTO cash_in_txs
|
||||
SELECT uuid_generate_v4(), md5(random()::text), md5(random()::text), i::integer, 'BTC',
|
||||
i::integer, 'EUR', null, null, null, null, now() - random() * INTERVAL '2 days', random() > 0.5,
|
||||
random() > 0.5, random() > 0.5, now() - random() * INTERVAL '2 days', null, random() > 0.5,
|
||||
random() > 0.5, i::integer, i::integer, 1, '99ac9999-9999-99e9-9999-9f99a9999999',
|
||||
6, random() > 0.5, random() * (0.9-0.1) + 0.1::int, i::integer, random() > 0.5, null, null, false,
|
||||
null, null, null
|
||||
FROM generate_series(1, 5000000) as t(i);
|
||||
|
||||
INSERT INTO cash_out_txs
|
||||
SELECT uuid_generate_v4(), md5(random()::text), md5(random()::text), i::integer, 'BTC',
|
||||
i::integer, 'EUR', 'confirmed', random() > 0.5, random() > 0.5, random() > 0.5,
|
||||
null, null, now() - random() * INTERVAL '2 days', now() - random() * INTERVAL '2 days', null,
|
||||
random() > 0.5, random() > 0.5, random() > 0.5, 0, 1, 20, 50, null, '99ac9999-9999-99e9-9999-9f99a9999999',
|
||||
random() * (40-1) + 1::int, now() - random() * INTERVAL '2 days', random() > 0.5, null,
|
||||
random() * (0.9-0.1) + 0.1::int, i::integer, i::integer, null, null, null, null, null, null, null, null
|
||||
FROM generate_series(1, 5000000) as t(i);
|
||||
|
||||
INSERT INTO logs
|
||||
SELECT uuid_generate_v4(), md5(random()::text), 'info', now() - random() * INTERVAL '2 days',
|
||||
'message', now() - random() * INTERVAL '2 days',0
|
||||
FROM generate_series(1, 5000000) as t(i);
|
||||
|
||||
INSERT INTO bills
|
||||
SELECT uuid_generate_v4(), i::integer, 'USD', '3d92c323-58c6-4172-9f30-91b80f0c653c',
|
||||
i::integer, '2021-04-16 11:51:38', 'BTC', i::integer
|
||||
FROM generate_series(1, 5000000) as t(i);
|
||||
|
||||
`
|
||||
db.none(sql)
|
||||
}
|
||||
|
||||
loadDummyTxData()
|
||||
231
packages/server/tests/stress/queries-performance-analyzer.js
Normal file
231
packages/server/tests/stress/queries-performance-analyzer.js
Normal file
|
|
@ -0,0 +1,231 @@
|
|||
const db = require('../../lib/db')
|
||||
const Pgp = require('pg-promise')()
|
||||
const _ = require('lodash/fp')
|
||||
const cashInTx = require('../../lib/cash-in/cash-in-tx')
|
||||
const { CASH_OUT_TRANSACTION_STATES, REDEEMABLE_AGE } = require('../../lib/cash-out/cash-out-helper')
|
||||
|
||||
const TX_PASSTHROUGH_ERROR_CODES = ['operatorCancel', 'scoreThresholdReached']
|
||||
|
||||
function filterTransaction () {
|
||||
const sql = `EXPLAIN ANALYZE
|
||||
SELECT DISTINCT * FROM (
|
||||
SELECT 'type' AS type, 'Cash In' AS value UNION
|
||||
SELECT 'type' AS type, 'Cash Out' AS value UNION
|
||||
SELECT 'machine' AS type, name AS value FROM devices d INNER JOIN cash_in_txs t ON d.device_id = t.device_id UNION
|
||||
SELECT 'machine' AS type, name AS value FROM devices d INNER JOIN cash_out_txs t ON d.device_id = t.device_id UNION
|
||||
SELECT 'customer' AS type, concat(id_card_data::json->>'firstName', ' ', id_card_data::json->>'lastName') AS value
|
||||
FROM customers c INNER JOIN cash_in_txs t ON c.id = t.customer_id
|
||||
WHERE c.id_card_data::json->>'firstName' IS NOT NULL or c.id_card_data::json->>'lastName' IS NOT NULL UNION
|
||||
SELECT 'customer' AS type, concat(id_card_data::json->>'firstName', ' ', id_card_data::json->>'lastName') AS value
|
||||
FROM customers c INNER JOIN cash_out_txs t ON c.id = t.customer_id
|
||||
WHERE c.id_card_data::json->>'firstName' IS NOT NULL or c.id_card_data::json->>'lastName' IS NOT NULL UNION
|
||||
SELECT 'fiat' AS type, fiat_code AS value FROM cash_in_txs UNION
|
||||
SELECT 'fiat' AS type, fiat_code AS value FROM cash_out_txs UNION
|
||||
SELECT 'crypto' AS type, crypto_code AS value FROM cash_in_txs UNION
|
||||
SELECT 'crypto' AS type, crypto_code AS value FROM cash_out_txs UNION
|
||||
SELECT 'address' AS type, to_address AS value FROM cash_in_txs UNION
|
||||
SELECT 'address' AS type, to_address AS value FROM cash_out_txs UNION
|
||||
SELECT 'status' AS type, ${cashInTx.TRANSACTION_STATES} AS value FROM cash_in_txs UNION
|
||||
SELECT 'status' AS type, ${CASH_OUT_TRANSACTION_STATES} AS value FROM cash_out_txs
|
||||
) f`
|
||||
return db.any(sql)
|
||||
}
|
||||
|
||||
function filterCustomer () {
|
||||
const sql = `EXPLAIN ANALYZE
|
||||
SELECT DISTINCT * FROM (
|
||||
SELECT 'phone' AS type, phone AS value FROM customers WHERE phone IS NOT NULL UNION
|
||||
SELECT 'name' AS type, id_card_data::json->>'firstName' AS value FROM customers WHERE id_card_data::json->>'firstName' IS NOT NULL AND id_card_data::json->>'lastName' IS NULL UNION
|
||||
SELECT 'name' AS type, id_card_data::json->>'lastName' AS value FROM customers WHERE id_card_data::json->>'firstName' IS NULL AND id_card_data::json->>'lastName' IS NOT NULL UNION
|
||||
SELECT 'name' AS type, concat(id_card_data::json->>'firstName', ' ', id_card_data::json->>'lastName') AS value FROM customers WHERE id_card_data::json->>'firstName' IS NOT NULL AND id_card_data::json->>'lastName' IS NOT NULL UNION
|
||||
SELECT 'address' as type, id_card_data::json->>'address' AS value FROM customers WHERE id_card_data::json->>'address' IS NOT NULL UNION
|
||||
SELECT 'id' AS type, id_card_data::json->>'documentNumber' AS value FROM customers WHERE id_card_data::json->>'documentNumber' IS NOT NULL
|
||||
) f`
|
||||
return db.any(sql)
|
||||
}
|
||||
|
||||
function getCustomerById (id) {
|
||||
const passableErrorCodes = _.map(Pgp.as.text, TX_PASSTHROUGH_ERROR_CODES).join(',')
|
||||
|
||||
const sql = `EXPLAIN ANALYZE
|
||||
select id, authorized_override, days_suspended, is_suspended, front_camera_path, front_camera_override,
|
||||
phone, sms_override, id_card_data, id_card_data_override, id_card_data_expiration,
|
||||
id_card_photo_path, id_card_photo_override, us_ssn, us_ssn_override, sanctions, sanctions_at,
|
||||
sanctions_override, total_txs, total_spent, created as last_active, fiat as last_tx_fiat,
|
||||
fiat_code as last_tx_fiat_code, tx_class as last_tx_class, subscriber_info
|
||||
from (
|
||||
select c.id, c.authorized_override,
|
||||
greatest(0, date_part('day', c.suspended_until - now())) as days_suspended,
|
||||
c.suspended_until > now() as is_suspended,
|
||||
c.front_camera_path, c.front_camera_override,
|
||||
c.phone, c.sms_override, c.id_card_data, c.id_card_data_override, c.id_card_data_expiration,
|
||||
c.id_card_photo_path, c.id_card_photo_override, c.us_ssn, c.us_ssn_override, c.sanctions,
|
||||
c.sanctions_at, c.sanctions_override, c.subscriber_info, t.tx_class, t.fiat, t.fiat_code, t.created,
|
||||
row_number() over (partition by c.id order by t.created desc) as rn,
|
||||
sum(case when t.id is not null then 1 else 0 end) over (partition by c.id) as total_txs,
|
||||
sum(case when error_code is null or error_code not in ($1^) then t.fiat else 0 end) over (partition by c.id) as total_spent
|
||||
from customers c left outer join (
|
||||
select 'cashIn' as tx_class, id, fiat, fiat_code, created, customer_id, error_code
|
||||
from cash_in_txs where send_confirmed = true union
|
||||
select 'cashOut' as tx_class, id, fiat, fiat_code, created, customer_id, error_code
|
||||
from cash_out_txs where confirmed_at is not null) t on c.id = t.customer_id
|
||||
where c.id = $2
|
||||
) as cl where rn = 1`
|
||||
return db.any(sql, [passableErrorCodes, id])
|
||||
}
|
||||
|
||||
function simpleGetMachineLogs (deviceId, from = new Date(0).toISOString(), until = new Date().toISOString(), limit = null, offset = 0) {
|
||||
const sql = `EXPLAIN ANALYZE
|
||||
select id, log_level, timestamp, message from logs
|
||||
where device_id=$1
|
||||
and timestamp >= $2
|
||||
and timestamp <= $3
|
||||
order by timestamp desc, serial desc
|
||||
limit $4
|
||||
offset $5`
|
||||
return db.any(sql, [ deviceId, from, until, limit, offset ])
|
||||
}
|
||||
|
||||
function batchCashIn (
|
||||
from = new Date(0).toISOString(),
|
||||
until = new Date().toISOString(),
|
||||
limit = null,
|
||||
offset = 0,
|
||||
id = null,
|
||||
txClass = null,
|
||||
machineName = null,
|
||||
customerName = null,
|
||||
fiatCode = null,
|
||||
cryptoCode = null,
|
||||
toAddress = null,
|
||||
status = null,
|
||||
simplified = false
|
||||
) {
|
||||
const cashInSql = `EXPLAIN ANALYZE
|
||||
SELECT 'cashIn' AS tx_class, txs.*,
|
||||
c.phone AS customer_phone,
|
||||
c.id_card_data_number AS customer_id_card_data_number,
|
||||
c.id_card_data_expiration AS customer_id_card_data_expiration,
|
||||
c.id_card_data AS customer_id_card_data,
|
||||
concat(c.id_card_data::json->>'firstName', ' ', c.id_card_data::json->>'lastName') AS customer_name,
|
||||
c.front_camera_path AS customer_front_camera_path,
|
||||
c.id_card_photo_path AS customer_id_card_photo_path,
|
||||
((NOT txs.send_confirmed) AND (txs.created <= now() - interval $1)) AS expired
|
||||
FROM (SELECT *, ${cashInTx.TRANSACTION_STATES} AS txStatus FROM cash_in_txs) AS txs
|
||||
LEFT OUTER JOIN customers c ON txs.customer_id = c.id
|
||||
INNER JOIN devices d ON txs.device_id = d.device_id
|
||||
WHERE txs.created >= $2 AND txs.created <= $3 ${
|
||||
id !== null ? `AND txs.device_id = $6` : ``
|
||||
}
|
||||
AND ($7 is null or $7 = 'Cash In')
|
||||
AND ($8 is null or d.name = $8)
|
||||
AND ($9 is null or concat(c.id_card_data::json->>'firstName', ' ', c.id_card_data::json->>'lastName') = $9)
|
||||
AND ($10 is null or txs.fiat_code = $10)
|
||||
AND ($11 is null or txs.crypto_code = $11)
|
||||
AND ($12 is null or txs.to_address = $12)
|
||||
AND ($13 is null or txs.txStatus = $13)
|
||||
AND (fiat > 0)
|
||||
ORDER BY created DESC limit $4 offset $5`
|
||||
|
||||
return db.any(cashInSql, [cashInTx.PENDING_INTERVAL, from, until, limit, offset, id, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status])
|
||||
}
|
||||
|
||||
function batchCashOut (
|
||||
from = new Date(0).toISOString(),
|
||||
until = new Date().toISOString(),
|
||||
limit = null,
|
||||
offset = 0,
|
||||
id = null,
|
||||
txClass = null,
|
||||
machineName = null,
|
||||
customerName = null,
|
||||
fiatCode = null,
|
||||
cryptoCode = null,
|
||||
toAddress = null,
|
||||
status = null,
|
||||
simplified = false
|
||||
) {
|
||||
const cashOutSql = `EXPLAIN ANALYZE
|
||||
SELECT 'cashOut' AS tx_class,
|
||||
txs.*,
|
||||
actions.tx_hash,
|
||||
c.phone AS customer_phone,
|
||||
c.id_card_data_number AS customer_id_card_data_number,
|
||||
c.id_card_data_expiration AS customer_id_card_data_expiration,
|
||||
c.id_card_data AS customer_id_card_data,
|
||||
concat(c.id_card_data::json->>'firstName', ' ', c.id_card_data::json->>'lastName') AS customer_name,
|
||||
c.front_camera_path AS customer_front_camera_path,
|
||||
c.id_card_photo_path AS customer_id_card_photo_path,
|
||||
(extract(epoch FROM (now() - greatest(txs.created, txs.confirmed_at))) * 1000) >= $1 AS expired
|
||||
FROM (SELECT *, ${CASH_OUT_TRANSACTION_STATES} AS txStatus FROM cash_out_txs) txs
|
||||
INNER JOIN cash_out_actions actions ON txs.id = actions.tx_id
|
||||
AND actions.action = 'provisionAddress'
|
||||
LEFT OUTER JOIN customers c ON txs.customer_id = c.id
|
||||
INNER JOIN devices d ON txs.device_id = d.device_id
|
||||
WHERE txs.created >= $2 AND txs.created <= $3 ${
|
||||
id !== null ? `AND txs.device_id = $6` : ``
|
||||
}
|
||||
AND ($7 is null or $7 = 'Cash Out')
|
||||
AND ($8 is null or d.name = $8)
|
||||
AND ($9 is null or concat(c.id_card_data::json->>'firstName', ' ', c.id_card_data::json->>'lastName') = $9)
|
||||
AND ($10 is null or txs.fiat_code = $10)
|
||||
AND ($11 is null or txs.crypto_code = $11)
|
||||
AND ($12 is null or txs.to_address = $12)
|
||||
AND ($13 is null or txs.txStatus = $13)
|
||||
AND (fiat > 0)
|
||||
ORDER BY created DESC limit $4 offset $5`
|
||||
|
||||
return db.any(cashOutSql, [REDEEMABLE_AGE, from, until, limit, offset, id, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status])
|
||||
}
|
||||
|
||||
function getTx (txId, txClass) {
|
||||
const cashInSql = `EXPLAIN ANALYZE
|
||||
select 'cashIn' as tx_class, txs.*,
|
||||
((not txs.send_confirmed) and (txs.created <= now() - interval $1)) as expired
|
||||
from cash_in_txs as txs
|
||||
where txs.id=$2`
|
||||
|
||||
const cashOutSql = `EXPLAIN ANALYZE
|
||||
select 'cashOut' as tx_class,
|
||||
txs.*,
|
||||
(extract(epoch from (now() - greatest(txs.created, txs.confirmed_at))) * 1000) >= $2 as expired
|
||||
from cash_out_txs txs
|
||||
where txs.id=$1`
|
||||
|
||||
return txClass === 'cashIn'
|
||||
? db.any(cashInSql, [cashInTx.PENDING_INTERVAL, txId])
|
||||
: db.any(cashOutSql, [txId, REDEEMABLE_AGE])
|
||||
}
|
||||
|
||||
function getTxAssociatedData (txId, txClass) {
|
||||
const billsSql = `EXPLAIN ANALYZE select 'bills' as bills, b.* from bills b where cash_in_txs_id = $1`
|
||||
const actionsSql = `EXPLAIN ANALYZE select 'cash_out_actions' as cash_out_actions, actions.* from cash_out_actions actions where tx_id = $1`
|
||||
|
||||
return txClass === 'cashIn'
|
||||
? db.any(billsSql, [txId])
|
||||
: db.any(actionsSql, [txId])
|
||||
}
|
||||
|
||||
const run = () => {
|
||||
const deviceId = '7526924341dc4a57f02b6411a85923de' // randomly generated by the load script
|
||||
const customerId = '99ac9999-9999-99e9-9999-9f99a9999999' // hardcoded on the current load script
|
||||
const cashOutTxId = 'c402a7ae-b8f7-4781-8080-1e9ab76d62b5' // randomly generated by the load script
|
||||
const cashInTxId = '4d8d89f4-7d77-4d30-87e8-be9de05deea7' // randomly generated by the load script
|
||||
|
||||
const getExecutionTime = _.compose(_.get('QUERY PLAN'), _.last)
|
||||
Promise.all([filterCustomer(), filterTransaction(), getCustomerById(customerId), simpleGetMachineLogs(deviceId), batchCashIn(), batchCashOut(),
|
||||
getTx(cashInTxId, 'cashIn'), getTx(cashOutTxId, 'cashOut'), getTxAssociatedData(cashInTxId, 'cashIn'), getTxAssociatedData(cashOutTxId, 'cashOut')])
|
||||
.then(([filterCustomer, filterTransaction, getCustomerById, logs, batchCashIn, batchCashOut, getTxCashOut, getTxCashIn,
|
||||
getTxAssociatedDataCashIn, getTxAssociatedDataCashOut]) => {
|
||||
console.log(`filterCustomer => ${getExecutionTime(filterCustomer)}`)
|
||||
console.log(`filterTransaction => ${getExecutionTime(filterTransaction)}`)
|
||||
console.log(`getCustomerById => ${getExecutionTime(getCustomerById)}`)
|
||||
console.log(`batchCashOut + batchCashIn => ${getExecutionTime(batchCashOut) + ' + ' + getExecutionTime(batchCashIn)} `)
|
||||
console.log(`getTx (cash-out) => ${getExecutionTime(getTxCashOut)}`)
|
||||
console.log(`getTx (cash-in) => ${getExecutionTime(getTxCashIn)}`)
|
||||
console.log(`getTxAssociatedData (cash-in) => ${getExecutionTime(getTxAssociatedDataCashIn)}`)
|
||||
console.log(`getTxAssociatedDataCashOut (cash-out) => ${getExecutionTime(getTxAssociatedDataCashOut)}`)
|
||||
})
|
||||
}
|
||||
|
||||
run()
|
||||
61
packages/server/tests/stress/scripts/create-machines.sh
Normal file
61
packages/server/tests/stress/scripts/create-machines.sh
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
if [ $# -eq 0 ]
|
||||
then
|
||||
echo "usage: ./build-machines [number_of_machines] /path/to/server/cert/lamassu_op_root_ca.pem /path/to/machine/" && exit 1
|
||||
fi
|
||||
|
||||
case $1 in
|
||||
''|*[!0-9]*) echo "usage: ./build-machines [number_of_machines] /path/to/server/cert/lamassu_op_root_ca.pem /path/to/machine/" && exit 1;;
|
||||
esac
|
||||
|
||||
SERVER_CERT=$(perl -pe 's/\n/\\n/' < $2)
|
||||
if [ -z "$SERVER_CERT" ]
|
||||
then
|
||||
echo "Lamassu-op-root-ca.pem is empty" && exit 1
|
||||
fi
|
||||
|
||||
# Remove old folders
|
||||
rm -rf ./machines/*
|
||||
|
||||
# Create stress database
|
||||
sudo -u postgres psql postgres -c "drop database if exists lamassu_stress"
|
||||
sudo -u postgres psql postgres -c "create database lamassu_stress with template lamassu"
|
||||
|
||||
START=1
|
||||
END=$1
|
||||
for (( c=$START; c<=$END; c++ ))
|
||||
do
|
||||
echo "Creating machine $c out of $END..."
|
||||
NUMBER=$c
|
||||
mkdir -p ./machines/$NUMBER/
|
||||
cp "$3"/data/client.sample.pem ./machines/$NUMBER/
|
||||
cp "$3"/data/client.sample.key ./machines/$NUMBER/
|
||||
|
||||
|
||||
cat > ./machines/$NUMBER/connection_info.json << EOL
|
||||
{"host":"localhost","ca":"$SERVER_CERT"}
|
||||
EOL
|
||||
|
||||
echo 'Generating certs...'
|
||||
node ./utils/init-cert.js $NUMBER
|
||||
|
||||
# Get device_id
|
||||
DEVICE_ID=`openssl x509 -outform der -in ./machines/$NUMBER/client.pem | sha256sum | cut -d " " -f 1`
|
||||
|
||||
# Update db config
|
||||
NEW_CONFIG=$(node ./utils/save-config.js $NUMBER $DEVICE_ID)
|
||||
sudo -u postgres psql "lamassu_stress" << EOF
|
||||
insert into user_config(type, data, created, valid)
|
||||
values('config', '$NEW_CONFIG', now(), 't')
|
||||
EOF
|
||||
|
||||
# Add device on db
|
||||
sudo -u postgres psql "lamassu_stress" << EOF
|
||||
insert into devices(device_id, cashbox, cassette1, cassette2, paired, display, created, name, last_online, location)
|
||||
values ('$DEVICE_ID', 0, 0, 0, 't', 't', now(), $NUMBER, now(), '{}'::json)
|
||||
EOF
|
||||
done
|
||||
|
||||
echo "Done!"
|
||||
32
packages/server/tests/stress/scripts/index.js
Normal file
32
packages/server/tests/stress/scripts/index.js
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
const exec = require('child_process').exec
|
||||
|
||||
/**
|
||||
* Execute simple shell command (async wrapper).
|
||||
* @param {String} cmd
|
||||
* @return {Object} { stdout: String, stderr: String }
|
||||
*/
|
||||
function execCommand (cmd) {
|
||||
return new Promise(function (resolve, reject) {
|
||||
const proc = exec(cmd, (err, stdout, stderr) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
} else {
|
||||
resolve({ stdout, stderr })
|
||||
}
|
||||
})
|
||||
|
||||
proc.stdout.on('data', data => {
|
||||
console.log(data)
|
||||
})
|
||||
|
||||
proc.stderr.on('data', data => {
|
||||
console.log(data)
|
||||
})
|
||||
|
||||
proc.on('exit', code => {
|
||||
console.log('child process exited with code ' + code.toString())
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = { execCommand }
|
||||
7
packages/server/tests/stress/test-server.js
Normal file
7
packages/server/tests/stress/test-server.js
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
const cmd = require('./scripts')
|
||||
|
||||
process.on('message', async (msg) => {
|
||||
console.log('Message from parent:', msg)
|
||||
|
||||
await cmd.execCommand(`node --prof LAMASSU_DB=STRESS_TEST ../../bin/lamassu-server`)
|
||||
})
|
||||
2
packages/server/tests/stress/utils/default-config.json
Normal file
2
packages/server/tests/stress/utils/default-config.json
Normal file
|
|
@ -0,0 +1,2 @@
|
|||
{"config":{"wallets_BTC_coin":"BTC","wallets_BTC_wallet":"mock-wallet","wallets_BTC_ticker":"mock-ticker","wallets_BTC_exchange":"mock-exchange","wallets_BTC_zeroConf":"all-zero-conf","locale_id":"32cc539a-78e6-4a1d-96d8-31b7aa628e1f","locale_country":"US","locale_fiatCurrency":"USD","locale_languages":["en-US"],"locale_cryptoCurrencies":["BTC"],"commissions_minimumTx":1,"commissions_fixedFee":1,"commissions_cashOut":1,"commissions_cashIn":1,"commissions_id":"719b9dd9-1444-42fc-918a-f8b2265513ac"}}
|
||||
|
||||
5
packages/server/tests/stress/utils/index.js
Normal file
5
packages/server/tests/stress/utils/index.js
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
function randomIntFromInterval (min, max) {
|
||||
return Math.floor(Math.random() * (max - min + 1) + min)
|
||||
}
|
||||
|
||||
module.exports = { randomIntFromInterval }
|
||||
12
packages/server/tests/stress/utils/init-cert.js
Normal file
12
packages/server/tests/stress/utils/init-cert.js
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
const path = require('path')
|
||||
const variables = require('./variables')
|
||||
const { init } = require(`../${variables.MACHINE_PATH}/lib/pairing`)
|
||||
|
||||
const number = process.argv[2]
|
||||
|
||||
const certPath = {
|
||||
cert: path.resolve(process.cwd(), 'machines', number, 'client.pem'),
|
||||
key: path.resolve(process.cwd(), 'machines', number, 'client.key')
|
||||
}
|
||||
|
||||
init(certPath)
|
||||
3
packages/server/tests/stress/utils/save-config.js
Normal file
3
packages/server/tests/stress/utils/save-config.js
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
const config = require('./default-config.json')
|
||||
|
||||
console.log(JSON.stringify(config))
|
||||
7
packages/server/tests/stress/utils/variables.js
Normal file
7
packages/server/tests/stress/utils/variables.js
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
const SERVER_CERT_PATH = `../../certs/Lamassu_OP_Root_CA.pem`
|
||||
const MACHINE_PATH = `../../../lamassu-machine`
|
||||
|
||||
// Request timers
|
||||
const POLLING_INTERVAL = 5000
|
||||
|
||||
module.exports = { SERVER_CERT_PATH, MACHINE_PATH, POLLING_INTERVAL }
|
||||
Loading…
Add table
Add a link
Reference in a new issue