Merge branch 'dev' into feat/lam-1291/stress-testing

* dev: (39 commits)
  chore: re-add build files
  fix: mailgun as default
  fix: backwards compatibility on cashout fixed fee
  chore: remove coinbase as a ticker
  fix: expire temporary cookies on browser close
  fix: optimize and normalize bills and blacklist
  chore: data for cypress
  chore: deprecate old unused tables
  chore: remove dependency on async local storage
  fix: proper datetime name
  chore: remove extra comment
  chore: update build
  chore: migrating to nodejs 22
  feat: show unpaired device names on transactions
  chore: deprecate old migrations
  fix: update yup usage on custom info requests
  fix: loading svg on usdc
  chore: lamassu coins version bump
  chore: lamassu coins bump on admin
  fix: fee for sweeps
  ...
This commit is contained in:
siiky 2025-04-15 12:43:30 +01:00
commit 5d24f9b889
124 changed files with 17979 additions and 15339 deletions

View file

@ -18,7 +18,6 @@ jobs:
key: ${{ runner.os }}-buildx-updatetar
restore-keys: |
${{ runner.os }}-buildx-updatetar
- name: Build Docker image
uses: docker/build-push-action@v5
with:
@ -34,7 +33,6 @@ jobs:
docker create --name extract_artifact ci_image:latest
docker cp extract_artifact:/lamassu-server.tar.gz ./lamassu-server.tar.gz
docker rm extract_artifact
- name: Upload artifact
uses: actions/upload-artifact@v4
with:

View file

@ -1 +1 @@
nodejs 14
nodejs 22

View file

@ -4,27 +4,16 @@ const _ = require('lodash/fp')
require('../lib/environment-helper')
const db = require('../lib/db')
const migrate = require('../lib/migrate')
const { asyncLocalStorage, defaultStore } = require('../lib/async-storage')
const createMigration = `CREATE TABLE IF NOT EXISTS migrations (
id serial PRIMARY KEY,
data json NOT NULL
)`
const select = 'select * from migrations limit 1'
// no need to log the migration process
process.env.SKIP_SERVER_LOGS = true
const getMigrateFile = () => Promise.resolve()
const store = defaultStore()
asyncLocalStorage.run(store, () => {
db.none(createMigration)
.then(() => Promise.all([db.oneOrNone(select), getMigrateFile()]))
.then(([qResult, migrateFile]) => {
process.env.SKIP_SERVER_LOGS = !(qResult && _.find(({ title }) => title === '1572524820075-server-support-logs.js', qResult.data.migrations ?? []))
if (!qResult && migrateFile) {
return db.none('insert into migrations (id, data) values (1, $1)', [migrateFile])
}
})
db.none(createMigration)
.then(() => migrate.run())
.then(() => {
console.log('DB Migration succeeded.')
@ -34,4 +23,3 @@ asyncLocalStorage.run(store, () => {
console.error('DB Migration failed: %s', err)
process.exit(1)
})
})

View file

@ -1,9 +1,8 @@
#!/usr/bin/env node
require('../lib/environment-helper')
const { asyncLocalStorage, defaultStore } = require('../lib/async-storage')
const userManagement = require('../lib/new-admin/graphql/modules/userManagement')
const authErrors = require('../lib/new-admin/graphql/errors/authentication')
const authErrors = require('../lib/new-admin/graphql/errors')
const name = process.argv[2]
const role = process.argv[3]
@ -32,8 +31,7 @@ if (role !== 'user' && role !== 'superuser') {
process.exit(2)
}
asyncLocalStorage.run(defaultStore(), () => {
userManagement.createRegisterToken(name, role).then(token => {
userManagement.createRegisterToken(name, role).then(token => {
if (domain === 'localhost' && process.env.NODE_ENV !== 'production') {
console.log(`https://${domain}:3001/register?t=${token.token}`)
} else {
@ -41,7 +39,7 @@ asyncLocalStorage.run(defaultStore(), () => {
}
process.exit(0)
}).catch(err => {
}).catch(err => {
if (err instanceof authErrors.UserAlreadyExistsError){
console.log(`A user with email ${name} already exists!`)
@ -50,5 +48,4 @@ asyncLocalStorage.run(defaultStore(), () => {
console.log('Error: %s', err)
process.exit(3)
})
})

View file

@ -1,5 +0,0 @@
#!/usr/bin/env node
const adminServer = require('../lib/new-admin/graphql-dev-insecure')
adminServer.run()

View file

@ -15,7 +15,7 @@ RUN apt-get install -y -q curl \
net-tools \
tar
RUN curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash -
RUN curl -sL https://deb.nodesource.com/setup_22.x | sudo -E bash -
RUN apt-get install nodejs -y -q
WORKDIR lamassu-server

View file

@ -1,7 +1,7 @@
FROM alpine:3.14 AS build
RUN apk add --no-cache nodejs npm git curl build-base net-tools python3 postgresql-dev
FROM node:22-alpine AS build
RUN apk add --no-cache npm git curl build-base net-tools python3 postgresql-dev
WORKDIR lamassu-server
WORKDIR /lamassu-server
COPY ["package.json", "package-lock.json", "./"]
RUN npm version --allow-same-version --git-tag-version false --commit-hooks false 1.0.0
@ -10,8 +10,8 @@ RUN npm install --production
COPY . ./
FROM alpine:3.14 AS l-s-base
RUN apk add --no-cache nodejs npm git curl bash libpq openssl ca-certificates
FROM node:22-alpine AS l-s-base
RUN apk add --no-cache npm git curl bash libpq openssl ca-certificates
COPY --from=build /lamassu-server /lamassu-server
@ -28,6 +28,8 @@ ENTRYPOINT [ "/lamassu-server/bin/lamassu-server-entrypoint.sh" ]
FROM node:22-alpine AS build-ui
RUN apk add --no-cache npm git curl build-base python3
WORKDIR /app
COPY ["new-lamassu-admin/package.json", "new-lamassu-admin/package-lock.json", "./"]
RUN npm version --allow-same-version --git-tag-version false --commit-hooks false 1.0.0
@ -38,7 +40,7 @@ RUN npm run build
FROM l-s-base AS l-a-s
COPY --from=build-ui /build /lamassu-server/public
COPY --from=build-ui /app/build /lamassu-server/public
RUN chmod +x /lamassu-server/bin/lamassu-admin-server-entrypoint.sh

View file

@ -1,67 +0,0 @@
const _ = require('lodash/fp')
module.exports = {
unscoped,
cryptoScoped,
machineScoped,
scoped,
scopedValue,
all
}
function matchesValue (crypto, machine, instance) {
return instance.fieldLocator.fieldScope.crypto === crypto &&
instance.fieldLocator.fieldScope.machine === machine
}
function permutations (crypto, machine) {
return _.uniq([
[crypto, machine],
[crypto, 'global'],
['global', machine],
['global', 'global']
])
}
function fallbackValue (crypto, machine, instances) {
const notNil = _.negate(_.isNil)
const pickValue = arr => _.find(instance => matchesValue(arr[0], arr[1], instance), instances)
const fallbackRec = _.find(notNil, _.map(pickValue, permutations(crypto, machine)))
return fallbackRec && fallbackRec.fieldValue.value
}
function scopedValue (crypto, machine, fieldCode, config) {
const allScopes = config.filter(_.pathEq(['fieldLocator', 'code'], fieldCode))
return fallbackValue(crypto, machine, allScopes)
}
function generalScoped (crypto, machine, config) {
const localScopedValue = key =>
scopedValue(crypto, machine, key, config)
const keys = _.uniq(_.map(r => r.fieldLocator.code, config))
const keyedValues = keys.map(localScopedValue)
return _.zipObject(keys, keyedValues)
}
function machineScoped (machine, config) {
return generalScoped('global', machine, config)
}
function unscoped (config) {
return generalScoped('global', 'global', config)
}
function cryptoScoped (crypto, config) {
return generalScoped(crypto, 'global', config)
}
function scoped (crypto, machine, config) {
return generalScoped(crypto, machine, config)
}
function all (code, config) {
return _.uniq(_.map('fieldValue.value', _.filter(i => i.fieldLocator.code === code, config)))
}

View file

@ -1,191 +0,0 @@
const _ = require('lodash/fp')
const db = require('../db')
const configManager = require('./config-manager')
const logger = require('../logger')
const schema = require('./lamassu-schema.json')
const REMOVED_FIELDS = ['crossRefVerificationActive', 'crossRefVerificationThreshold']
const SETTINGS_LOADER_SCHEMA_VERSION = 1
function allScopes (cryptoScopes, machineScopes) {
const scopes = []
cryptoScopes.forEach(c => {
machineScopes.forEach(m => scopes.push([c, m]))
})
return scopes
}
function allCryptoScopes (cryptos, cryptoScope) {
const cryptoScopes = []
if (cryptoScope === 'global' || cryptoScope === 'both') cryptoScopes.push('global')
if (cryptoScope === 'specific' || cryptoScope === 'both') cryptos.forEach(r => cryptoScopes.push(r))
return cryptoScopes
}
function allMachineScopes (machineList, machineScope) {
const machineScopes = []
if (machineScope === 'global' || machineScope === 'both') machineScopes.push('global')
if (machineScope === 'specific' || machineScope === 'both') machineList.forEach(r => machineScopes.push(r))
return machineScopes
}
function satisfiesRequire (config, cryptos, machineList, field, anyFields, allFields) {
const fieldCode = field.code
const scopes = allScopes(
allCryptoScopes(cryptos, field.cryptoScope),
allMachineScopes(machineList, field.machineScope)
)
return scopes.every(scope => {
const isAnyEnabled = () => _.some(refField => {
return isScopeEnabled(config, cryptos, machineList, refField, scope)
}, anyFields)
const areAllEnabled = () => _.every(refField => {
return isScopeEnabled(config, cryptos, machineList, refField, scope)
}, allFields)
const isBlank = _.isNil(configManager.scopedValue(scope[0], scope[1], fieldCode, config))
const isRequired = (_.isEmpty(anyFields) || isAnyEnabled()) &&
(_.isEmpty(allFields) || areAllEnabled())
const hasDefault = !_.isNil(_.get('default', field))
const isValid = !isRequired || !isBlank || hasDefault
return isValid
})
}
function isScopeEnabled (config, cryptos, machineList, refField, scope) {
const [cryptoScope, machineScope] = scope
const candidateCryptoScopes = cryptoScope === 'global'
? allCryptoScopes(cryptos, refField.cryptoScope)
: [cryptoScope]
const candidateMachineScopes = machineScope === 'global'
? allMachineScopes(machineList, refField.machineScope)
: [ machineScope ]
const allRefCandidateScopes = allScopes(candidateCryptoScopes, candidateMachineScopes)
const getFallbackValue = scope => configManager.scopedValue(scope[0], scope[1], refField.code, config)
const values = allRefCandidateScopes.map(getFallbackValue)
return values.some(r => r)
}
function getCryptos (config, machineList) {
const scopes = allScopes(['global'], allMachineScopes(machineList, 'both'))
const scoped = scope => configManager.scopedValue(scope[0], scope[1], 'cryptoCurrencies', config)
return scopes.reduce((acc, scope) => _.union(acc, scoped(scope)), [])
}
function getGroup (fieldCode) {
return _.find(group => _.includes(fieldCode, group.fields), schema.groups)
}
function getField (fieldCode) {
const group = getGroup(fieldCode)
return getGroupField(group, fieldCode)
}
function getGroupField (group, fieldCode) {
const field = _.find(_.matchesProperty('code', fieldCode), schema.fields)
return _.merge(_.pick(['cryptoScope', 'machineScope'], group), field)
}
// Note: We can't use machine-loader because it relies on settings-loader,
// which relies on this
function getMachines () {
return db.any('select device_id from devices')
}
function fetchMachines () {
return getMachines()
.then(machineList => machineList.map(r => r.device_id))
}
function validateFieldParameter (value, validator) {
switch (validator.code) {
case 'required':
return true // We don't validate this here
case 'min':
return value >= validator.min
case 'max':
return value <= validator.max
default:
throw new Error('Unknown validation type: ' + validator.code)
}
}
function ensureConstraints (config) {
const pickField = fieldCode => schema.fields.find(r => r.code === fieldCode)
return Promise.resolve()
.then(() => {
config.every(fieldInstance => {
const fieldCode = fieldInstance.fieldLocator.code
if (_.includes(fieldCode, REMOVED_FIELDS)) return
const field = pickField(fieldCode)
if (!field) {
logger.warn('No such field: %s, %j', fieldCode, fieldInstance.fieldLocator.fieldScope)
return
}
const fieldValue = fieldInstance.fieldValue
const isValid = field.fieldValidation
.every(validator => validateFieldParameter(fieldValue.value, validator))
if (isValid) return true
throw new Error('Invalid config value')
})
})
}
function validateRequires (config) {
return fetchMachines()
.then(machineList => {
const cryptos = getCryptos(config, machineList)
return schema.groups.filter(group => {
return group.fields.some(fieldCode => {
const field = getGroupField(group, fieldCode)
if (!field.fieldValidation.find(r => r.code === 'required')) return false
const refFieldsAny = _.map(_.partial(getField, group), field.enabledIfAny)
const refFieldsAll = _.map(_.partial(getField, group), field.enabledIfAll)
const isInvalid = !satisfiesRequire(config, cryptos, machineList, field, refFieldsAny, refFieldsAll)
return isInvalid
})
})
})
.then(arr => arr.map(r => r.code))
}
function validate (config) {
return Promise.resolve()
.then(() => ensureConstraints(config))
.then(() => validateRequires(config))
.then(arr => {
if (arr.length === 0) return config
throw new Error('Invalid configuration:' + arr)
})
}
module.exports = {
SETTINGS_LOADER_SCHEMA_VERSION,
validate,
ensureConstraints,
validateRequires
}

View file

@ -1,230 +0,0 @@
const _ = require('lodash/fp')
const devMode = require('minimist')(process.argv.slice(2)).dev
const currencies = require('../new-admin/config/data/currencies.json')
const languageRec = require('../new-admin/config/data/languages.json')
const countries = require('../new-admin/config/data/countries.json')
const machineLoader = require('../machine-loader')
const configManager = require('./config-manager')
const db = require('../db')
const settingsLoader = require('./settings-loader')
const configValidate = require('./config-validate')
const jsonSchema = require('./lamassu-schema.json')
function fetchSchema () {
return _.cloneDeep(jsonSchema)
}
function fetchConfig () {
const sql = `select data from user_config where type=$1 and schema_version=$2
order by id desc limit 1`
return db.oneOrNone(sql, ['config', configValidate.SETTINGS_LOADER_SCHEMA_VERSION])
.then(row => row ? row.data.config : [])
}
function allScopes (cryptoScopes, machineScopes) {
const scopes = []
cryptoScopes.forEach(c => {
machineScopes.forEach(m => scopes.push([c, m]))
})
return scopes
}
function allMachineScopes (machineList, machineScope) {
const machineScopes = []
if (machineScope === 'global' || machineScope === 'both') machineScopes.push('global')
if (machineScope === 'specific' || machineScope === 'both') machineList.forEach(r => machineScopes.push(r))
return machineScopes
}
function getCryptos (config, machineList) {
const scopes = allScopes(['global'], allMachineScopes(machineList, 'both'))
const scoped = scope => configManager.scopedValue(scope[0], scope[1], 'cryptoCurrencies', config)
return scopes.reduce((acc, scope) => _.union(acc, scoped(scope)), [])
}
function getGroup (schema, fieldCode) {
return schema.groups.find(group => group.fields.find(_.isEqual(fieldCode)))
}
function getField (schema, group, fieldCode) {
if (!group) group = getGroup(schema, fieldCode)
const field = schema.fields.find(r => r.code === fieldCode)
return _.merge(_.pick(['cryptoScope', 'machineScope'], group), field)
}
const fetchMachines = () => machineLoader.getMachines()
.then(machineList => machineList.map(r => r.deviceId))
function validateCurrentConfig () {
return fetchConfig()
.then(configValidate.validateRequires)
}
const decorateEnabledIf = _.curry((schemaFields, schemaField) => {
const code = schemaField.fieldLocator.code
const field = _.find(f => f.code === code, schemaFields)
return _.assign(schemaField, {
fieldEnabledIfAny: field.enabledIfAny || [],
fieldEnabledIfAll: field.enabledIfAll || []
})
})
function fetchConfigGroup (code) {
const fieldLocatorCodeEq = _.matchesProperty(['fieldLocator', 'code'])
return Promise.all([fetchSchema(), fetchData(), fetchConfig(), fetchMachines()])
.then(([schema, data, config, machineList]) => {
const groupSchema = schema.groups.find(r => r.code === code)
if (!groupSchema) throw new Error('No such group schema: ' + code)
const schemaFields = groupSchema.fields
.map(_.curry(getField)(schema, groupSchema))
.map(f => _.assign(f, {
fieldEnabledIfAny: f.enabledIfAny || [],
fieldEnabledIfAll: f.enabledIfAll || []
}))
const candidateFields = [
schemaFields.map(_.get('requiredIf')),
schemaFields.map(_.get('enabledIfAny')),
schemaFields.map(_.get('enabledIfAll')),
groupSchema.fields,
'fiatCurrency'
]
const smush = _.flow(_.flattenDeep, _.compact, _.uniq)
const configFields = smush(candidateFields)
// Expand this to check against full schema
const fieldValidator = field => !_.isNil(_.get('fieldLocator.fieldScope.crypto', field))
const reducer = (acc, configField) => {
return acc.concat(config.filter(fieldLocatorCodeEq(configField)))
}
const reducedFields = _.filter(fieldValidator, configFields.reduce(reducer, []))
const values = _.map(decorateEnabledIf(schema.fields), reducedFields)
groupSchema.fields = undefined
groupSchema.entries = schemaFields
const selectedCryptos = _.defaultTo([], getCryptos(config, machineList))
return {
schema: groupSchema,
values,
selectedCryptos,
data
}
})
}
function massageCurrencies (currencies) {
const convert = r => ({
code: r['Alphabetic Code'],
display: r['Currency']
})
const top5Codes = ['USD', 'EUR', 'GBP', 'CAD', 'AUD']
const mapped = _.map(convert, currencies)
const codeToRec = code => _.find(_.matchesProperty('code', code), mapped)
const top5 = _.map(codeToRec, top5Codes)
const raw = _.uniqBy(_.get('code'), _.concat(top5, mapped))
return raw.filter(r => r.code !== '' && r.code[0] !== 'X' && r.display.indexOf('(') === -1)
}
const mapLanguage = lang => {
const arr = lang.split('-')
const code = arr[0]
const country = arr[1]
const langNameArr = languageRec.lang[code]
if (!langNameArr) return null
const langName = langNameArr[0]
if (!country) return {code: lang, display: langName}
return {code: lang, display: `${langName} [${country}]`}
}
const supportedLanguages = languageRec.supported
const languages = supportedLanguages.map(mapLanguage).filter(r => r)
const ALL_CRYPTOS = ['BTC', 'ETH', 'LTC', 'DASH', 'ZEC', 'BCH']
const filterAccounts = (data, isDevMode) => {
const notAllowed = ['mock-ticker', 'mock-wallet', 'mock-exchange', 'mock-sms', 'mock-id-verify', 'mock-zero-conf']
const filterOut = o => _.includes(o.code, notAllowed)
return isDevMode ? data : {...data, accounts: _.filter(a => !filterOut(a), data.accounts)}
}
function fetchData () {
return machineLoader.getMachineNames()
.then(machineList => ({
currencies: massageCurrencies(currencies),
cryptoCurrencies: [
{crypto: 'BTC', display: 'Bitcoin'},
{crypto: 'ETH', display: 'Ethereum'},
{crypto: 'LTC', display: 'Litecoin'},
{crypto: 'DASH', display: 'Dash'},
{crypto: 'ZEC', display: 'Zcash'},
{crypto: 'BCH', display: 'Bitcoin Cash'}
],
languages: languages,
countries,
accounts: [
{code: 'bitpay', display: 'Bitpay', class: 'ticker', cryptos: ['BTC', 'BCH']},
{code: 'kraken', display: 'Kraken', class: 'ticker', cryptos: ['BTC', 'ETH', 'LTC', 'DASH', 'ZEC', 'BCH']},
{code: 'bitstamp', display: 'Bitstamp', class: 'ticker', cryptos: ['BTC', 'ETH', 'LTC', 'BCH']},
{code: 'coinbase', display: 'Coinbase', class: 'ticker', cryptos: ['BTC', 'ETH', 'LTC', 'BCH', 'ZEC', 'DASH']},
{code: 'itbit', display: 'itBit', class: 'ticker', cryptos: ['BTC', 'ETH']},
{code: 'mock-ticker', display: 'Mock (Caution!)', class: 'ticker', cryptos: ALL_CRYPTOS},
{code: 'bitcoind', display: 'bitcoind', class: 'wallet', cryptos: ['BTC']},
{code: 'no-layer2', display: 'No Layer 2', class: 'layer2', cryptos: ALL_CRYPTOS},
{code: 'infura', display: 'Infura', class: 'wallet', cryptos: ['ETH']},
{code: 'geth', display: 'geth', class: 'wallet', cryptos: ['ETH']},
{code: 'zcashd', display: 'zcashd', class: 'wallet', cryptos: ['ZEC']},
{code: 'litecoind', display: 'litecoind', class: 'wallet', cryptos: ['LTC']},
{code: 'dashd', display: 'dashd', class: 'wallet', cryptos: ['DASH']},
{code: 'bitcoincashd', display: 'bitcoincashd', class: 'wallet', cryptos: ['BCH']},
{code: 'bitgo', display: 'BitGo', class: 'wallet', cryptos: ['BTC', 'ZEC', 'LTC', 'BCH', 'DASH']},
{code: 'bitstamp', display: 'Bitstamp', class: 'exchange', cryptos: ['BTC', 'ETH', 'LTC', 'BCH']},
{code: 'itbit', display: 'itBit', class: 'exchange', cryptos: ['BTC', 'ETH']},
{code: 'kraken', display: 'Kraken', class: 'exchange', cryptos: ['BTC', 'ETH', 'LTC', 'DASH', 'ZEC', 'BCH']},
{code: 'mock-wallet', display: 'Mock (Caution!)', class: 'wallet', cryptos: ALL_CRYPTOS},
{code: 'no-exchange', display: 'No exchange', class: 'exchange', cryptos: ALL_CRYPTOS},
{code: 'mock-exchange', display: 'Mock exchange', class: 'exchange', cryptos: ALL_CRYPTOS},
{code: 'mock-sms', display: 'Mock SMS', class: 'sms'},
{code: 'mock-id-verify', display: 'Mock ID verifier', class: 'idVerifier'},
{code: 'twilio', display: 'Twilio', class: 'sms'},
{code: 'mailgun', display: 'Mailgun', class: 'email'},
{code: 'all-zero-conf', display: 'Always 0-conf', class: 'zeroConf', cryptos: ['BTC', 'ZEC', 'LTC', 'DASH', 'BCH']},
{code: 'no-zero-conf', display: 'Always 1-conf', class: 'zeroConf', cryptos: ALL_CRYPTOS},
{code: 'blockcypher', display: 'Blockcypher', class: 'zeroConf', cryptos: ['BTC']},
{code: 'mock-zero-conf', display: 'Mock 0-conf', class: 'zeroConf', cryptos: ['BTC', 'ZEC', 'LTC', 'DASH', 'BCH', 'ETH']}
],
machines: machineList.map(machine => ({machine: machine.deviceId, display: machine.name}))
}))
.then((data) => {
return filterAccounts(data, devMode)
})
}
function saveConfigGroup (results) {
if (results.values.length === 0) return fetchConfigGroup(results.groupCode)
return settingsLoader.modifyConfig(results.values)
.then(() => fetchConfigGroup(results.groupCode))
}
module.exports = {
fetchConfigGroup,
saveConfigGroup,
validateCurrentConfig,
fetchConfig,
filterAccounts
}

File diff suppressed because it is too large Load diff

View file

@ -1,250 +0,0 @@
const path = require('path')
const fs = require('fs')
const _ = require('lodash/fp')
const argv = require('minimist')(process.argv.slice(2))
const pify = require('pify')
const pgp = require('pg-promise')()
const db = require('../db')
const configValidate = require('./config-validate')
const schema = require('./lamassu-schema.json')
let settingsCache
function loadFixture () {
const fixture = argv.fixture
const machine = argv.machine
if (fixture && !machine) throw new Error('Missing --machine parameter for --fixture')
const fixturePath = fixture => path.resolve(__dirname, '..', 'test', 'fixtures', fixture + '.json')
const promise = fixture
? pify(fs.readFile)(fixturePath(fixture)).then(JSON.parse)
: Promise.resolve([])
return promise
.then(values => _.map(v => {
return (v.fieldLocator.fieldScope.machine === 'machine')
? _.set('fieldLocator.fieldScope.machine', machine, v)
: v
}, values))
}
function isEquivalentField (a, b) {
return _.isEqual(
[a.fieldLocator.code, a.fieldLocator.fieldScope],
[b.fieldLocator.code, b.fieldLocator.fieldScope]
)
}
// b overrides a
function mergeValues (a, b) {
return _.reject(r => _.isNil(r.fieldValue), _.unionWith(isEquivalentField, b, a))
}
function load (versionId) {
if (!versionId) throw new Error('versionId is required')
return Promise.all([loadConfig(versionId), loadAccounts()])
.then(([config, accounts]) => ({
config,
accounts
}))
}
function loadLatest (filterSchemaVersion = true) {
return Promise.all([loadLatestConfig(filterSchemaVersion), loadAccounts(filterSchemaVersion)])
.then(([config, accounts]) => ({
config,
accounts
}))
}
function loadConfig (versionId) {
if (argv.fixture) return loadFixture()
const sql = `select data
from user_config
where id=$1 and type=$2 and schema_version=$3
and valid`
return db.one(sql, [versionId, 'config', configValidate.SETTINGS_LOADER_SCHEMA_VERSION])
.then(row => row.data.config)
.then(configValidate.validate)
.catch(err => {
if (err.name === 'QueryResultError') {
throw new Error('No such config version: ' + versionId)
}
throw err
})
}
function loadLatestConfig (filterSchemaVersion = true) {
if (argv.fixture) return loadFixture()
const sql = `select id, valid, data
from user_config
where type=$1 ${filterSchemaVersion ? 'and schema_version=$2' : ''}
and valid
order by id desc
limit 1`
return db.oneOrNone(sql, ['config', configValidate.SETTINGS_LOADER_SCHEMA_VERSION])
.then(row => row.data.config)
.then(configValidate.validate)
.catch(err => {
if (err.name === 'QueryResultError') {
throw new Error('lamassu-server is not configured')
}
throw err
})
}
function loadRecentConfig () {
if (argv.fixture) return loadFixture()
const sql = `select id, data
from user_config
where type=$1 and schema_version=$2
order by id desc
limit 1`
return db.one(sql, ['config', configValidate.SETTINGS_LOADER_SCHEMA_VERSION])
.then(row => row.data.config)
}
function loadAccounts (filterSchemaVersion = true) {
const toFields = fieldArr => _.fromPairs(_.map(r => [r.code, r.value], fieldArr))
const toPairs = r => [r.code, toFields(r.fields)]
return db.oneOrNone(`select data from user_config where type=$1 ${filterSchemaVersion ? 'and schema_version=$2' : ''}`, ['accounts', configValidate.SETTINGS_LOADER_SCHEMA_VERSION])
.then(function (data) {
if (!data) return {}
return _.fromPairs(_.map(toPairs, data.data.accounts))
})
}
function settings () {
return settingsCache
}
function save (config) {
const sql = 'insert into user_config (type, data, valid) values ($1, $2, $3)'
return configValidate.validate(config)
.then(() => db.none(sql, ['config', {config}, true]))
.catch(() => db.none(sql, ['config', {config}, false]))
}
function configAddField (scope, fieldCode, fieldType, fieldClass, value) {
return {
fieldLocator: {
fieldScope: {
crypto: scope.crypto,
machine: scope.machine
},
code: fieldCode,
fieldType,
fieldClass
},
fieldValue: {fieldType, value}
}
}
function configDeleteField (scope, fieldCode) {
return {
fieldLocator: {
fieldScope: {
crypto: scope.crypto,
machine: scope.machine
},
code: fieldCode
},
fieldValue: null
}
}
function populateScopes (schema) {
const scopeLookup = {}
_.forEach(r => {
const scope = {
cryptoScope: r.cryptoScope,
machineScope: r.machineScope
}
_.forEach(field => { scopeLookup[field] = scope }, r.fields)
}, schema.groups)
return _.map(r => _.assign(scopeLookup[r.code], r), schema.fields)
}
function cryptoDefaultOverride (cryptoCode, code, defaultValue) {
if (cryptoCode === 'ETH' && code === 'zeroConf') {
return 'no-zero-conf'
}
return defaultValue
}
function cryptoCodeDefaults (schema, cryptoCode) {
const scope = {crypto: cryptoCode, machine: 'global'}
const schemaEntries = populateScopes(schema)
const hasCryptoSpecificDefault = r => r.cryptoScope === 'specific' && !_.isNil(r.default)
const cryptoSpecificFields = _.filter(hasCryptoSpecificDefault, schemaEntries)
return _.map(r => {
const defaultValue = cryptoDefaultOverride(cryptoCode, r.code, r.default)
return configAddField(scope, r.code, r.fieldType, r.fieldClass, defaultValue)
}, cryptoSpecificFields)
}
const uniqCompact = _.flow(_.compact, _.uniq)
function addCryptoDefaults (oldConfig, newFields) {
const cryptoCodeEntries = _.filter(v => v.fieldLocator.code === 'cryptoCurrencies', newFields)
const cryptoCodes = _.flatMap(_.get('fieldValue.value'), cryptoCodeEntries)
const uniqueCryptoCodes = uniqCompact(cryptoCodes)
const mapDefaults = cryptoCode => cryptoCodeDefaults(schema, cryptoCode)
const defaults = _.flatMap(mapDefaults, uniqueCryptoCodes)
return mergeValues(defaults, oldConfig)
}
function modifyConfig (newFields) {
const TransactionMode = pgp.txMode.TransactionMode
const isolationLevel = pgp.txMode.isolationLevel
const mode = new TransactionMode({ tiLevel: isolationLevel.serializable })
function transaction (t) {
return loadRecentConfig()
.then(oldConfig => {
const oldConfigWithDefaults = addCryptoDefaults(oldConfig, newFields)
const doSave = _.flow(mergeValues, save)
return doSave(oldConfigWithDefaults, newFields)
})
}
return db.tx({ mode }, transaction)
}
module.exports = {
settings,
loadConfig,
loadRecentConfig,
load,
loadLatest,
loadLatestConfig,
save,
loadFixture,
mergeValues,
modifyConfig,
configAddField,
configDeleteField
}

View file

@ -1,11 +1,9 @@
const fs = require('fs')
const http = require('http')
const https = require('https')
const argv = require('minimist')(process.argv.slice(2))
require('./environment-helper')
const { asyncLocalStorage, defaultStore } = require('./async-storage')
const routes = require('./routes')
const { loadRoutes } = require('./routes')
const logger = require('./logger')
const poller = require('./poller')
const settingsLoader = require('./new-settings-loader')
@ -16,15 +14,12 @@ const ofacUpdate = require('./ofac/update')
const KEY_PATH = process.env.KEY_PATH
const CERT_PATH = process.env.CERT_PATH
const devMode = argv.dev
const CA_PATH = process.env.CA_PATH
const version = require('../package.json').version
logger.info('Version: %s', version)
function run () {
const store = defaultStore()
return asyncLocalStorage.run(store, () => {
return new Promise((resolve, reject) => {
let count = 0
let handler
@ -40,7 +35,7 @@ function run () {
.then(settings => {
clearInterval(handler)
return loadSanctions(settings)
.then(() => startServer(settings))
.then(startServer)
.then(resolve)
})
.catch(errorHandler)
@ -49,7 +44,6 @@ function run () {
handler = setInterval(runner, 10000)
runner()
})
})
}
function loadSanctions (settings) {
@ -68,30 +62,27 @@ function loadSanctions (settings) {
})
}
function startServer (settings) {
return Promise.resolve()
.then(() => {
poller.setup(['public'])
async function startServer () {
const app = await loadRoutes()
poller.setup()
const httpsServerOptions = {
key: fs.readFileSync(KEY_PATH),
cert: fs.readFileSync(CERT_PATH),
ca: fs.readFileSync(CA_PATH),
requestCert: true,
rejectUnauthorized: false
}
const server = devMode
? http.createServer(routes.app)
: https.createServer(httpsServerOptions, routes.app)
const server = https.createServer(httpsServerOptions, app)
const port = argv.port || 3000
if (devMode) logger.info('In dev mode')
server.listen(port, () => {
logger.info('lamassu-server listening on port ' +
port + ' ' + (devMode ? '(http)' : '(https)'))
})
})
await new Promise((resolve) =>
server.listen({ port }, resolve),
)
logger.info(`lamassu-server listening on port ${port}`)
}
module.exports = { run }

View file

@ -1,11 +0,0 @@
const { AsyncLocalStorage } = require('async_hooks')
const asyncLocalStorage = new AsyncLocalStorage()
const defaultStore = () => {
const store = new Map()
store.set('schema', 'public')
store.set('defaultSchema', 'ERROR_SCHEMA')
return store
}
module.exports = { asyncLocalStorage, defaultStore }

View file

@ -50,7 +50,7 @@ const BINARIES = {
defaultUrlHash: 'd89c2afd78183f3ee815adcccdff02098be0c982633889e7b1e9c9656fbef219',
defaultDir: 'dashcore-18.1.0/bin',
url: 'https://github.com/dashpay/dash/releases/download/v21.1.1/dashcore-21.1.1-x86_64-linux-gnu.tar.gz',
dir: 'dashcore-21.1.1/bin'
dir: 'dashcore-21.1.1/bin',
urlHash: 'c3157d4a82a3cb7c904a68e827bd1e629854fefcc0dcaf1de4343a810a190bf5',
},
LTC: {

View file

@ -52,6 +52,9 @@ const mapValuesWithKey = _.mapValues.convert({cap: false})
function convertBigNumFields (obj) {
const convert = (value, key) => {
if (_.includes(key, [ 'cryptoAtoms', 'receivedCryptoAtoms', 'fiat', 'fixedFee' ])) {
// BACKWARDS_COMPATIBILITY 10.1
// bills before 10.2 don't have fixedFee
if (key === 'fixedFee' && !value) return new BN(0).toString()
return value.toString()
}

View file

@ -21,7 +21,7 @@ function createCashboxBatch (deviceId, cashboxCount) {
return db.tx(t => {
const batchId = uuid.v4()
const q1 = t.none(sql, [batchId, deviceId])
const q1 = t.one(sql, [batchId, deviceId])
const q2 = t.none(sql2, [batchId, deviceId])
const q3 = t.none(sql3, [batchId, deviceId])
return t.batch([q1, q2, q3])
@ -133,7 +133,7 @@ function getMachineUnbatchedBills (deviceId) {
function getBatchById (id) {
const sql = `
SELECT cb.id, cb.device_id, cb.created, cb.operation_type, cb.bill_count_override, cb.performed_by, json_agg(b.*) AS bills
FROM cashbox_batches AS cb
FROM cash_unit_operation AS cb
LEFT JOIN bills AS b ON cb.id = b.cashbox_batch_id
WHERE cb.id = $1
GROUP BY cb.id

View file

@ -38,12 +38,12 @@ const settings = {
wallets_LTC_exchange: 'mock-exchange',
wallets_LTC_zeroConf: 'mock-zero-conf',
wallets_DASH_active: true,
wallets_DASH_ticker: 'coinbase',
wallets_DASH_ticker: 'binance',
wallets_DASH_wallet: 'mock-wallet',
wallets_DASH_exchange: 'mock-exchange',
wallets_DASH_zeroConf: 'mock-zero-conf',
wallets_ZEC_active: true,
wallets_ZEC_ticker: 'coinbase',
wallets_ZEC_ticker: 'binance',
wallets_ZEC_wallet: 'mock-wallet',
wallets_ZEC_exchange: 'mock-exchange',
wallets_ZEC_zeroConf: 'mock-zero-conf',

View file

@ -1,8 +0,0 @@
const { asyncLocalStorage, defaultStore } = require('./async-storage')
const computeSchema = (req, res, next) => {
const store = defaultStore()
return asyncLocalStorage.run(store, () => next())
}
module.exports = computeSchema

View file

@ -1,477 +0,0 @@
const _ = require('lodash/fp')
const uuid = require('uuid')
const { COINS } = require('@lamassu/coins')
const { scopedValue } = require('./admin/config-manager')
const GLOBAL = 'global'
const ALL_CRYPTOS = _.values(COINS).sort()
const ALL_CRYPTOS_STRING = 'ALL_COINS'
const ALL_MACHINES = 'ALL_MACHINES'
const GLOBAL_SCOPE = {
crypto: ALL_CRYPTOS,
machine: GLOBAL
}
function getConfigFields (codes, config) {
const stringfiedGlobalScope = JSON.stringify(GLOBAL_SCOPE)
const fields = config
.filter(i => codes.includes(i.fieldLocator.code))
.map(f => {
const crypto = Array.isArray(f.fieldLocator.fieldScope.crypto)
? f.fieldLocator.fieldScope.crypto.sort()
: f.fieldLocator.fieldScope.crypto === GLOBAL
? ALL_CRYPTOS
: [f.fieldLocator.fieldScope.crypto]
const machine = f.fieldLocator.fieldScope.machine
return {
code: f.fieldLocator.code,
scope: {
crypto,
machine
},
value: f.fieldValue.value
}
})
.filter(f => f.value != null)
const grouped = _.chain(fields)
.groupBy(f => JSON.stringify(f.scope))
.value()
return {
global: grouped[stringfiedGlobalScope] || [],
scoped:
_.entries(
_.chain(grouped)
.omit([stringfiedGlobalScope])
.value()
).map(f => {
const fallbackValues =
_.difference(codes, f[1].map(v => v.code))
.map(v => ({
code: v,
scope: JSON.parse(f[0]),
value: scopedValue(f[0].crypto, f[0].machine, v, config)
}))
.filter(f => f.value != null)
return {
scope: JSON.parse(f[0]),
values: f[1].concat(fallbackValues)
}
}) || []
}
}
function migrateCommissions (config) {
const areArraysEquals = (arr1, arr2) => Array.isArray(arr1) && Array.isArray(arr2) && _.isEmpty(_.xor(arr1, arr2))
const getMachine = _.get('scope.machine')
const getCrypto = _.get('scope.crypto')
const flattenCoins = _.compose(_.flatten, _.map(getCrypto))
const diffAllCryptos = _.compose(_.difference(ALL_CRYPTOS))
const codes = {
minimumTx: 'minimumTx',
cashInFee: 'fixedFee',
cashInCommission: 'cashIn',
cashOutCommission: 'cashOut'
}
const { global, scoped } = getConfigFields(_.keys(codes), config)
const defaultCashOutCommissions = { code: 'cashOutCommission', value: 0, scope: global[0].scope }
const isCashOutDisabled =
_.isEmpty(_.filter(commissionElement => commissionElement.code === 'cashOutCommission', global))
const globalWithDefaults =
isCashOutDisabled ? _.concat(global, defaultCashOutCommissions) : global
const machineAndCryptoScoped = scoped.filter(
f => f.scope.machine !== GLOBAL_SCOPE.machine && f.scope.crypto.length === 1
)
const cryptoScoped = scoped.filter(
f =>
f.scope.machine === GLOBAL_SCOPE.machine &&
!areArraysEquals(f.scope.crypto, GLOBAL_SCOPE.crypto)
)
const machineScoped = scoped.filter(
f =>
f.scope.machine !== GLOBAL_SCOPE.machine &&
areArraysEquals(f.scope.crypto, GLOBAL_SCOPE.crypto)
)
const withCryptoScoped = machineAndCryptoScoped.concat(cryptoScoped)
const filteredMachineScoped = _.map(it => {
const filterByMachine = _.filter(_.includes(getMachine(it)))
const unrepeatedCryptos = _.compose(
diffAllCryptos,
flattenCoins,
filterByMachine
)(withCryptoScoped)
return _.set('scope.crypto', unrepeatedCryptos)(it)
})(machineScoped)
const allCommissionsOverrides = withCryptoScoped.concat(filteredMachineScoped)
return {
..._.fromPairs(globalWithDefaults.map(f => [`commissions_${codes[f.code]}`, f.value])),
...(allCommissionsOverrides.length > 0 && {
commissions_overrides: allCommissionsOverrides.map(s => ({
..._.fromPairs(s.values.map(f => [codes[f.code], f.value])),
machine: s.scope.machine === GLOBAL ? ALL_MACHINES : s.scope.machine,
cryptoCurrencies: areArraysEquals(s.scope.crypto, ALL_CRYPTOS) ? [ALL_CRYPTOS_STRING] : s.scope.crypto,
id: uuid.v4()
}))
})
}
}
function migrateLocales (config) {
const codes = {
country: 'country',
fiatCurrency: 'fiatCurrency',
machineLanguages: 'languages',
cryptoCurrencies: 'cryptoCurrencies',
timezone: 'timezone'
}
const { global, scoped } = getConfigFields(_.keys(codes), config)
return {
..._.fromPairs(global.map(f => [`locale_${codes[f.code]}`, f.value])),
...(scoped.length > 0 && {
locale_overrides: scoped.map(s => ({
..._.fromPairs(s.values.map(f => [codes[f.code], f.value])),
machine: s.scope.machine,
id: uuid.v4()
}))
})
}
}
function migrateCashOut (config) {
const globalCodes = {
fudgeFactorActive: 'fudgeFactorActive'
}
const scopedCodes = {
cashOutEnabled: 'active',
topCashOutDenomination: 'top',
bottomCashOutDenomination: 'bottom',
zeroConfLimit: 'zeroConfLimit'
}
const { global } = getConfigFields(_.keys(globalCodes), config)
const { scoped } = getConfigFields(_.keys(scopedCodes), config)
return {
..._.fromPairs(
global.map(f => [`cashOut_${globalCodes[f.code]}`, f.value])
),
..._.fromPairs(
_.flatten(
scoped.map(s => {
const fields = s.values.map(f => [
`cashOut_${f.scope.machine}_${scopedCodes[f.code]}`,
f.value
])
fields.push([`cashOut_${s.scope.machine}_id`, s.scope.machine])
return fields
})
)
)
}
}
function migrateNotifications (config) {
const globalCodes = {
notificationsEmailEnabled: 'email_active',
notificationsSMSEnabled: 'sms_active',
cashOutCassette1AlertThreshold: 'fiatBalanceCassette1',
cashOutCassette2AlertThreshold: 'fiatBalanceCassette2',
cryptoAlertThreshold: 'cryptoLowBalance'
}
const machineScopedCodes = {
cashOutCassette1AlertThreshold: 'cassette1',
cashOutCassette2AlertThreshold: 'cassette2'
}
const cryptoScopedCodes = {
cryptoAlertThreshold: 'lowBalance'
}
const { global } = getConfigFields(_.keys(globalCodes), config)
const machineScoped = getConfigFields(
_.keys(machineScopedCodes),
config
).scoped.filter(f => f.scope.crypto === GLOBAL && f.scope.machine !== GLOBAL)
const cryptoScoped = getConfigFields(
_.keys(cryptoScopedCodes),
config
).scoped.filter(f => f.scope.crypto !== GLOBAL && f.scope.machine === GLOBAL)
return {
..._.fromPairs(
global.map(f => [`notifications_${globalCodes[f.code]}`, f.value])
),
notifications_email_balance: true,
notifications_email_transactions: true,
notifications_email_compliance: true,
notifications_email_errors: true,
notifications_sms_balance: true,
notifications_sms_transactions: true,
notifications_sms_compliance: true,
notifications_sms_errors: true,
...(machineScoped.length > 0 && {
notifications_fiatBalanceOverrides: machineScoped.map(s => ({
..._.fromPairs(
s.values.map(f => [machineScopedCodes[f.code], f.value])
),
machine: s.scope.machine,
id: uuid.v4()
}))
}),
...(cryptoScoped.length > 0 && {
notifications_cryptoBalanceOverrides: cryptoScoped.map(s => ({
..._.fromPairs(s.values.map(f => [cryptoScopedCodes[f.code], f.value])),
cryptoCurrency: s.scope.crypto,
id: uuid.v4()
}))
})
}
}
function migrateWallet (config) {
const codes = {
ticker: 'ticker',
wallet: 'wallet',
exchange: 'exchange',
zeroConf: 'zeroConf'
}
const { scoped } = getConfigFields(_.keys(codes), config)
return {
...(scoped.length > 0 &&
_.fromPairs(
_.flatten(
scoped.map(s =>
s.values.map(f => [
`wallets_${f.scope.crypto}_${codes[f.code]}`,
f.value
])
)
)
))
}
}
function migrateOperatorInfo (config) {
const codes = {
operatorInfoActive: 'active',
operatorInfoEmail: 'email',
operatorInfoName: 'name',
operatorInfoPhone: 'phone',
operatorInfoWebsite: 'website',
operatorInfoCompanyNumber: 'companyNumber'
}
const { global } = getConfigFields(_.keys(codes), config)
return {
..._.fromPairs(global.map(f => [`operatorInfo_${codes[f.code]}`, f.value]))
}
}
function migrateReceiptPrinting (config) {
const codes = {
receiptPrintingActive: 'active'
}
const { global } = getConfigFields(_.keys(codes), config)
return {
..._.fromPairs(global.map(f => [`receipt_${codes[f.code]}`, f.value])),
receipt_operatorWebsite: true,
receipt_operatorEmail: true,
receipt_operatorPhone: true,
receipt_companyRegistration: true,
receipt_machineLocation: true,
receipt_customerNameOrPhoneNumber: true,
receipt_exchangeRate: true,
receipt_addressQRCode: true
}
}
function migrateCoinATMRadar (config) {
const codes = ['coinAtmRadarActive', 'coinAtmRadarShowRates']
const { global } = getConfigFields(codes, config)
const coinAtmRadar = _.fromPairs(global.map(f => [f.code, f.value]))
return {
coinAtmRadar_active: coinAtmRadar.coinAtmRadarActive,
coinAtmRadar_commissions: coinAtmRadar.coinAtmRadarShowRates,
coinAtmRadar_limitsAndVerification: coinAtmRadar.coinAtmRadarShowRates
}
}
function migrateTermsAndConditions (config) {
const codes = {
termsScreenActive: 'active',
termsScreenTitle: 'title',
termsScreenText: 'text',
termsAcceptButtonText: 'acceptButtonText',
termsCancelButtonText: 'cancelButtonText'
}
const { global } = getConfigFields(_.keys(codes), config)
return {
..._.fromPairs(
global.map(f => [`termsConditions_${codes[f.code]}`, f.value])
)
}
}
function migrateComplianceTriggers (config) {
const suspensionDays = 1
const triggerTypes = {
amount: 'txAmount',
velocity: 'txVelocity',
volume: 'txVolume',
consecutiveDays: 'consecutiveDays'
}
const requirements = {
sms: 'sms',
idData: 'idCardData',
idPhoto: 'idCardPhoto',
facePhoto: 'facephoto',
sanctions: 'sanctions',
suspend: 'suspend'
}
function createTrigger (
requirement,
threshold,
suspensionDays
) {
const triggerConfig = {
id: uuid.v4(),
direction: 'both',
threshold,
thresholdDays: 1,
triggerType: triggerTypes.volume,
requirement
}
if (!requirement === 'suspend') return triggerConfig
return _.assign(triggerConfig, { suspensionDays })
}
const codes = [
'smsVerificationActive',
'smsVerificationThreshold',
'idCardDataVerificationActive',
'idCardDataVerificationThreshold',
'idCardPhotoVerificationActive',
'idCardPhotoVerificationThreshold',
'frontCameraVerificationActive',
'frontCameraVerificationThreshold',
'sanctionsVerificationActive',
'sanctionsVerificationThreshold',
'hardLimitVerificationActive',
'hardLimitVerificationThreshold',
'rejectAddressReuseActive'
]
const global = _.fromPairs(
getConfigFields(codes, config).global.map(f => [f.code, f.value])
)
const triggers = []
if (global.smsVerificationActive && _.isNumber(global.smsVerificationThreshold)) {
triggers.push(
createTrigger(requirements.sms, global.smsVerificationThreshold)
)
}
if (global.idCardDataVerificationActive && _.isNumber(global.idCardDataVerificationThreshold)) {
triggers.push(
createTrigger(requirements.idData, global.idCardDataVerificationThreshold)
)
}
if (global.idCardPhotoVerificationActive && _.isNumber(global.idCardPhotoVerificationThreshold)) {
triggers.push(
createTrigger(requirements.idPhoto, global.idCardPhotoVerificationThreshold)
)
}
if (global.frontCameraVerificationActive && _.isNumber(global.frontCameraVerificationThreshold)) {
triggers.push(
createTrigger(requirements.facePhoto, global.frontCameraVerificationThreshold)
)
}
if (global.sanctionsVerificationActive && _.isNumber(global.sanctionsVerificationThreshold)) {
triggers.push(
createTrigger(requirements.sanctions, global.sanctionsVerificationThreshold)
)
}
if (global.hardLimitVerificationActive && _.isNumber(global.hardLimitVerificationThreshold)) {
triggers.push(
createTrigger(requirements.suspend, global.hardLimitVerificationThreshold, suspensionDays)
)
}
return {
triggers,
['compliance_rejectAddressReuse']: global.rejectAddressReuseActive
}
}
function migrateConfig (config) {
return {
...migrateCommissions(config),
...migrateLocales(config),
...migrateCashOut(config),
...migrateNotifications(config),
...migrateWallet(config),
...migrateOperatorInfo(config),
...migrateReceiptPrinting(config),
...migrateCoinATMRadar(config),
...migrateTermsAndConditions(config),
...migrateComplianceTriggers(config)
}
}
function migrateAccounts (accounts) {
const accountArray = [
'bitgo',
'bitstamp',
'blockcypher',
'infura',
'itbit',
'kraken',
'mailgun',
'twilio'
]
const services = _.keyBy('code', accounts)
const serviceFields = _.mapValues(({ fields }) => _.keyBy('code', fields))(services)
const allAccounts = _.mapValues(_.mapValues(_.get('value')))(serviceFields)
return _.pick(accountArray)(allAccounts)
}
function migrate (config, accounts) {
return {
config: migrateConfig(config),
accounts: migrateAccounts(accounts)
}
}
module.exports = { migrate }

View file

@ -5,81 +5,12 @@ const _ = require('lodash/fp')
const { PSQL_URL } = require('./constants')
const logger = require('./logger')
const eventBus = require('./event-bus')
const { asyncLocalStorage, defaultStore } = require('./async-storage')
const DATABASE_NOT_REACHABLE = 'Database not reachable.'
const stripDefaultDbFuncs = dbCtx => {
return {
ctx: dbCtx.ctx,
query: dbCtx.$query,
result: dbCtx.$result,
many: dbCtx.$many,
oneOrNone: dbCtx.$oneOrNone,
one: dbCtx.$one,
none: dbCtx.$none,
any: dbCtx.$any,
manyOrNone: dbCtx.$manyOrNone,
tx: dbCtx.$tx,
task: dbCtx.$task,
batch: dbCtx.batch,
multi: dbCtx.$multi,
connect: dbCtx.connect
}
}
const _tx = (obj, opts, cb) => {
return obj.tx(opts, t => {
return cb(stripDefaultDbFuncs(t))
})
}
const _task = (obj, opts, cb) => {
return obj.task(opts, t => {
return cb(stripDefaultDbFuncs(t))
})
}
const getSchema = () => {
const store = asyncLocalStorage.getStore() ?? defaultStore()
return asyncLocalStorage.run(store, () => store.get('schema'))
}
const getDefaultSchema = () => 'ERROR_SCHEMA'
const searchPathWrapper = (t, cb) => {
return t.none('SET search_path TO $1:name', [getSchema()])
.then(cb.bind(t, t))
.catch(logger.error)
.finally(() => t.none('SET search_path TO $1:name', [getDefaultSchema()]))
}
const pgp = Pgp({
pgNative: true,
schema: 'ERROR_SCHEMA',
extend (obj, dbContext) {
obj.__taskEx = function (cb, throwOnError = true) {
const args = pgp.utils.taskArgs(arguments)
const schema = getSchema()
if (!schema && throwOnError) {
return Promise.reject(new Error('No schema selected, cannot complete query'))
} else if (!schema) {
return Promise.resolve('No schema selected, cannot complete query')
}
return obj.task.call(this, args.options, t => searchPathWrapper(t, cb))
}
obj.$query = (query, values, qrm, throwOnError) => obj.__taskEx(t => t.query(query, values, qrm), throwOnError)
obj.$result = (query, variables, cb, thisArg) => obj.__taskEx(t => t.result(query, variables, cb, thisArg))
obj.$many = (query, variables) => obj.__taskEx(t => t.many(query, variables))
obj.$manyOrNone = (query, variables) => obj.__taskEx(t => t.manyOrNone(query, variables))
obj.$oneOrNone = (query, variables) => obj.__taskEx(t => t.oneOrNone(query, variables))
obj.$one = (query, variables) => obj.__taskEx(t => t.one(query, variables))
obj.$none = (query, variables) => obj.__taskEx(t => t.none(query, variables))
obj.$any = (query, variables) => obj.__taskEx(t => t.any(query, variables))
obj.$multi = (query, variables) => obj.__taskEx(t => t.multi(query, variables))
// when opts is not defined "cb" occupies the "opts" spot of the arguments
obj.$tx = (opts, cb) => typeof opts === 'function' ? _tx(obj, {}, opts) : _tx(obj, opts, cb)
obj.$task = (opts, cb) => typeof opts === 'function' ? _task(obj, {}, opts) : _task(obj, opts, cb)
},
schema: 'public',
error: (err, e) => {
if (e.cn) logger.error(DATABASE_NOT_REACHABLE)
else if (e.query) {
@ -90,7 +21,7 @@ const pgp = Pgp({
}
})
const db = stripDefaultDbFuncs(pgp(PSQL_URL))
const db = pgp(PSQL_URL)
eventBus.subscribe('log', args => {
if (process.env.SKIP_SERVER_LOGS) return
@ -104,14 +35,10 @@ eventBus.subscribe('log', args => {
const sql = `insert into server_logs
(id, device_id, message, log_level, meta) values ($1, $2, $3, $4, $5) returning *`
// need to set AsyncLocalStorage (ALS) for this function as well
// because this module is imported before ALS is set up on app.js
const store = defaultStore()
asyncLocalStorage.run(store, () => {
db.one(sql, [uuid.v4(), '', msgToSave, level, meta])
.then(_.mapKeys(_.camelCase))
.catch(_.noop)
})
})
module.exports = db

View file

@ -3,7 +3,7 @@ const ph = require('./plugin-helper')
function sendMessage (settings, rec) {
return Promise.resolve()
.then(() => {
const pluginCode = settings.config.notifications_thirdParty_email
const pluginCode = settings.config.notifications_thirdParty_email || 'mailgun'
const plugin = ph.load(ph.EMAIL, pluginCode)
const account = settings.accounts[pluginCode]
@ -14,7 +14,7 @@ function sendMessage (settings, rec) {
function sendCustomerMessage (settings, rec) {
return Promise.resolve()
.then(() => {
const pluginCode = settings.config.notifications_thirdParty_email
const pluginCode = settings.config.notifications_thirdParty_email || 'mailgun'
const plugin = ph.load(ph.EMAIL, pluginCode)
const account = settings.accounts[pluginCode]

View file

@ -1,27 +1,27 @@
const logger = require('../logger')
const https = require('https')
const { ApolloServer } = require('apollo-server-express')
const { ApolloServer } = require('@apollo/server')
const devMode = !!require('minimist')(process.argv.slice(2)).dev
module.exports = new ApolloServer({
typeDefs: require('./types'),
resolvers: require('./resolvers'),
context: ({ req, res }) => ({
const context = ({ req, res }) => ({
deviceId: req.deviceId, /* lib/middlewares/populateDeviceId.js */
deviceName: req.deviceName, /* lib/middlewares/authorize.js */
operatorId: res.locals.operatorId, /* lib/middlewares/operatorId.js */
pid: req.query.pid,
settings: req.settings, /* lib/middlewares/populateSettings.js */
}),
uploads: false,
playground: false,
})
const graphQLServer = new ApolloServer({
typeDefs: require('./types'),
resolvers: require('./resolvers'),
introspection: false,
formatError: error => {
logger.error(error)
return error
},
debug: devMode,
includeStacktraceInErrorResponses: devMode,
logger
})
module.exports = { graphQLServer, context }

View file

@ -1,4 +1,5 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
module.exports = gql`
type Coin {
cryptoCode: String!

View file

@ -21,7 +21,18 @@ const logger = new winston.Logger({
})
],
rewriters: [
(...[,, meta]) => meta instanceof Error ? { message: meta.message, stack: meta.stack, meta } : meta
(...[,, meta]) => {
if (meta.isAxiosError) {
return {
message: meta.message,
status: meta.response?.status,
data: meta.response?.data,
url: meta.config?.url,
method: meta.config?.method
}
}
return meta instanceof Error ? { message: meta.message, stack: meta.stack, meta } : meta
}
],
exitOnError: false
})

View file

@ -13,7 +13,7 @@ const dbm = require('./postgresql_interface')
const configManager = require('./new-config-manager')
const notifierUtils = require('./notifier/utils')
const notifierQueries = require('./notifier/queries')
const { ApolloError } = require('apollo-server-errors');
const { GraphQLError } = require('graphql');
const { loadLatestConfig } = require('./new-settings-loader')
const logger = require('./logger')
@ -154,7 +154,7 @@ function getMachine (machineId, config) {
const sql = `${MACHINE_WITH_CALCULATED_FIELD_SQL} WHERE d.device_id = $1`
const queryMachine = db.oneOrNone(sql, [machineId]).then(r => {
if (r === null) throw new ApolloError('Resource doesn\'t exist', 'NOT_FOUND')
if (r === null) throw new GraphQLError('Resource doesn\'t exist', { extensions: { code: 'NOT_FOUND' } })
else return toMachineObject(r)
})

View file

@ -1,10 +0,0 @@
const { asyncLocalStorage, defaultStore } = require('../async-storage')
const computeSchema = (req, res, next) => {
const store = defaultStore()
asyncLocalStorage.run(store, () => {
next()
})
}
module.exports = computeSchema

View file

@ -1,8 +1,5 @@
const _ = require('lodash/fp')
const crypto = require('crypto')
const logger = require('../logger')
const IS_STRESS_TESTING = process.env.LAMASSU_STRESS_TESTING === "YES"
function sha256 (buf) {
@ -14,9 +11,8 @@ function sha256 (buf) {
}
const populateDeviceId = function (req, res, next) {
let deviceId = _.isFunction(req.connection.getPeerCertificate)
? sha256(req.connection.getPeerCertificate()?.raw)
: null
const peerCert = req.socket.getPeerCertificate ? req.socket.getPeerCertificate() : null
let deviceId = peerCert?.raw ? sha256(peerCert.raw) : null
if (!deviceId && IS_STRESS_TESTING)
deviceId = req.headers.device_id

View file

@ -4,22 +4,24 @@ const path = require('path')
const express = require('express')
const https = require('https')
const serveStatic = require('serve-static')
const cors = require('cors')
const helmet = require('helmet')
const nocache = require('nocache')
const cookieParser = require('cookie-parser')
const { graphqlUploadExpress } = require('graphql-upload')
const { ApolloServer } = require('apollo-server-express')
const { ApolloServer } = require('@apollo/server')
const { expressMiddleware } = require('@apollo/server/express4')
const { ApolloServerPluginLandingPageDisabled } = require('@apollo/server/plugin/disabled')
const { ApolloServerPluginLandingPageLocalDefault } = require('@apollo/server/plugin/landingPage/default')
const { mergeResolvers } = require('@graphql-tools/merge')
const { makeExecutableSchema } = require('@graphql-tools/schema')
require('../environment-helper')
const { asyncLocalStorage, defaultStore } = require('../async-storage')
const logger = require('../logger')
const exchange = require('../exchange')
const { AuthDirective } = require('./graphql/directives')
const { authDirectiveTransformer } = require('./graphql/directives')
const { typeDefs, resolvers } = require('./graphql/schema')
const findOperatorId = require('../middlewares/operatorId')
const computeSchema = require('../compute-schema')
const { USER_SESSIONS_CLEAR_INTERVAL } = require('../constants')
const { session, cleanUserSessions, buildApolloContext } = require('./middlewares')
@ -28,6 +30,7 @@ const devMode = require('minimist')(process.argv.slice(2)).dev
const HOSTNAME = process.env.HOSTNAME
const KEY_PATH = process.env.KEY_PATH
const CERT_PATH = process.env.CERT_PATH
const CA_PATH = process.env.CA_PATH
const ID_PHOTO_CARD_DIR = process.env.ID_PHOTO_CARD_DIR
const FRONT_CAMERA_DIR = process.env.FRONT_CAMERA_DIR
const OPERATOR_DATA_DIR = process.env.OPERATOR_DATA_DIR
@ -37,64 +40,76 @@ if (!HOSTNAME) {
process.exit(1)
}
const app = express()
const loadRoutes = async () => {
const app = express()
app.use(helmet())
app.use(compression())
app.use(nocache())
app.use(cookieParser())
app.use(express.json())
app.use(express.urlencoded({ extended: true })) // support encoded bodies
app.use(express.static(path.resolve(__dirname, '..', '..', 'public')))
app.use(cleanUserSessions(USER_SESSIONS_CLEAR_INTERVAL))
app.use(computeSchema)
app.use(findOperatorId)
app.use(session)
app.use(graphqlUploadExpress())
app.use(helmet())
app.use(compression())
app.use(nocache())
app.use(cookieParser())
app.use(express.json())
app.use(express.urlencoded({ extended: true })) // support encoded bodies
app.use(express.static(path.resolve(__dirname, '..', '..', 'public')))
app.use(cleanUserSessions(USER_SESSIONS_CLEAR_INTERVAL))
app.use(findOperatorId)
app.use(session)
const apolloServer = new ApolloServer({
// Dynamic import for graphql-upload since it's not a CommonJS module
const { default: graphqlUploadExpress } = await import('graphql-upload/graphqlUploadExpress.mjs')
const { default: GraphQLUpload } = await import('graphql-upload/GraphQLUpload.mjs')
app.use(graphqlUploadExpress())
const schema = makeExecutableSchema({
typeDefs,
resolvers,
uploads: false,
schemaDirectives: {
auth: AuthDirective
},
playground: false,
resolvers: mergeResolvers(resolvers, { Upload: GraphQLUpload }),
})
const schemaWithDirectives = authDirectiveTransformer(schema)
const apolloServer = new ApolloServer({
schema: schemaWithDirectives,
csrfPrevention: false,
introspection: false,
formatError: error => {
const exception = error?.extensions?.exception
logger.error(error, JSON.stringify(exception || {}))
return error
formatError: (formattedError, error) => {
logger.error(error, JSON.stringify(error?.extensions || {}))
return formattedError
},
context: async (obj) => buildApolloContext(obj)
})
plugins: [
devMode
? ApolloServerPluginLandingPageLocalDefault()
: ApolloServerPluginLandingPageDisabled()
]
})
apolloServer.applyMiddleware({
app,
cors: {
credentials: true,
origin: devMode && 'https://localhost:3001'
}
})
await apolloServer.start();
// cors on app for /api/register endpoint.
app.use(cors({ credentials: true, origin: devMode && 'https://localhost:3001' }))
app.use(
'/graphql',
express.json(),
expressMiddleware(apolloServer, {
context: async ({ req, res }) => buildApolloContext({ req, res })
})
);
app.use('/id-card-photo', serveStatic(ID_PHOTO_CARD_DIR, { index: false }))
app.use('/front-camera-photo', serveStatic(FRONT_CAMERA_DIR, { index: false }))
app.use('/operator-data', serveStatic(OPERATOR_DATA_DIR, { index: false }))
// Everything not on graphql or api/register is redirected to the front-end
app.get('*', (req, res) => res.sendFile(path.resolve(__dirname, '..', '..', 'public', 'index.html')))
app.use('/id-card-photo', serveStatic(ID_PHOTO_CARD_DIR, { index: false }))
app.use('/front-camera-photo', serveStatic(FRONT_CAMERA_DIR, { index: false }))
app.use('/operator-data', serveStatic(OPERATOR_DATA_DIR, { index: false }))
// Everything not on graphql or api/register is redirected to the front-end
app.get('*', (req, res) => res.sendFile(path.resolve(__dirname, '..', '..', 'public', 'index.html')))
return app
}
const certOptions = {
key: fs.readFileSync(KEY_PATH),
cert: fs.readFileSync(CERT_PATH)
cert: fs.readFileSync(CERT_PATH),
ca: fs.readFileSync(CA_PATH)
}
function run () {
const store = defaultStore()
asyncLocalStorage.run(store, () => {
async function run () {
const app = await loadRoutes()
const serverPort = devMode ? 8070 : 443
const serverLog = `lamassu-admin-server listening on port ${serverPort}`
@ -104,7 +119,6 @@ function run () {
const webServer = https.createServer(certOptions, app)
webServer.listen(serverPort, () => logger.info(serverLog))
})
}
module.exports = { run }

View file

@ -3,8 +3,8 @@ const _ = require('lodash/fp')
const { ALL } = require('../../plugins/common/ccxt')
const { BTC, BCH, DASH, ETH, LTC, USDT, ZEC, XMR, LN, TRX, USDT_TRON } = COINS
const { bitpay, coinbase, itbit, bitstamp, kraken, binanceus, cex, binance, bitfinex } = ALL
const { BTC, BCH, DASH, ETH, LTC, USDT, ZEC, XMR, LN, TRX, USDT_TRON, USDC } = COINS
const { bitpay, itbit, bitstamp, kraken, binanceus, cex, binance, bitfinex } = ALL
const TICKER = 'ticker'
const WALLET = 'wallet'
@ -26,14 +26,13 @@ const ALL_ACCOUNTS = [
{ code: 'bitpay', display: 'Bitpay', class: TICKER, cryptos: bitpay.CRYPTO },
{ code: 'kraken', display: 'Kraken', class: TICKER, cryptos: kraken.CRYPTO },
{ code: 'bitstamp', display: 'Bitstamp', class: TICKER, cryptos: bitstamp.CRYPTO },
{ code: 'coinbase', display: 'Coinbase', class: TICKER, cryptos: coinbase.CRYPTO },
{ code: 'itbit', display: 'itBit', class: TICKER, cryptos: itbit.CRYPTO },
{ code: 'mock-ticker', display: 'Mock (Caution!)', class: TICKER, cryptos: ALL_CRYPTOS, dev: true },
{ code: 'bitcoind', display: 'bitcoind', class: WALLET, cryptos: [BTC] },
{ code: 'no-layer2', display: 'No Layer 2', class: LAYER_2, cryptos: ALL_CRYPTOS },
{ code: 'infura', display: 'Infura/Alchemy', class: WALLET, cryptos: [ETH, USDT] },
{ code: 'infura', display: 'Infura/Alchemy', class: WALLET, cryptos: [ETH, USDT, USDC] },
{ code: 'trongrid', display: 'Trongrid', class: WALLET, cryptos: [TRX, USDT_TRON] },
{ code: 'geth', display: 'geth (deprecated)', class: WALLET, cryptos: [ETH, USDT] },
{ code: 'geth', display: 'geth (deprecated)', class: WALLET, cryptos: [ETH, USDT, USDC] },
{ code: 'zcashd', display: 'zcashd', class: WALLET, cryptos: [ZEC] },
{ code: 'litecoind', display: 'litecoind', class: WALLET, cryptos: [LTC] },
{ code: 'dashd', display: 'dashd', class: WALLET, cryptos: [DASH] },
@ -61,8 +60,8 @@ const ALL_ACCOUNTS = [
{ code: 'none', display: 'None', class: ZERO_CONF, cryptos: ALL_CRYPTOS },
{ code: 'blockcypher', display: 'Blockcypher', class: ZERO_CONF, cryptos: [BTC] },
{ code: 'mock-zero-conf', display: 'Mock 0-conf', class: ZERO_CONF, cryptos: ALL_CRYPTOS, dev: true },
{ code: 'scorechain', display: 'Scorechain', class: WALLET_SCORING, cryptos: [BTC, ETH, LTC, BCH, DASH, USDT, USDT_TRON, TRX] },
{ code: 'elliptic', display: 'Elliptic', class: WALLET_SCORING, cryptos: [BTC, ETH, LTC, BCH, USDT, USDT_TRON, TRX, ZEC] },
{ code: 'scorechain', display: 'Scorechain', class: WALLET_SCORING, cryptos: [BTC, ETH, LTC, BCH, DASH, USDT, USDC, USDT_TRON, TRX] },
{ code: 'elliptic', display: 'Elliptic', class: WALLET_SCORING, cryptos: [BTC, ETH, LTC, BCH, USDT, USDC, USDT_TRON, TRX, ZEC] },
{ code: 'mock-scoring', display: 'Mock scoring', class: WALLET_SCORING, cryptos: ALL_CRYPTOS, dev: true },
{ code: 'sumsub', display: 'Sumsub', class: COMPLIANCE },
{ code: 'mock-compliance', display: 'Mock Compliance', class: COMPLIANCE, dev: true },

View file

@ -4,30 +4,29 @@ const { CASH_OUT_TRANSACTION_STATES } = require('../cash-out/cash-out-helper')
function transaction () {
const sql = `SELECT DISTINCT * FROM (
SELECT 'type' AS type, 'Cash In' AS value UNION
SELECT 'type' AS type, 'Cash Out' AS value UNION
SELECT 'machine' AS type, name AS value FROM devices d INNER JOIN cash_in_txs t ON d.device_id = t.device_id UNION
SELECT 'machine' AS type, name AS value FROM devices d INNER JOIN cash_out_txs t ON d.device_id = t.device_id UNION
SELECT 'customer' AS type, concat(id_card_data::json->>'firstName', ' ', id_card_data::json->>'lastName') AS value
SELECT 'type' AS type, NULL AS label, 'Cash In' AS value UNION
SELECT 'type' AS type, NULL AS label, 'Cash Out' AS value UNION
SELECT 'machine' AS type, name AS label, d.device_id AS value FROM devices d INNER JOIN cash_in_txs t ON d.device_id = t.device_id UNION
SELECT 'machine' AS type, name AS label, d.device_id AS value FROM devices d INNER JOIN cash_out_txs t ON d.device_id = t.device_id UNION
SELECT 'customer' AS type, NULL AS label, concat(id_card_data::json->>'firstName', ' ', id_card_data::json->>'lastName') AS value
FROM customers c INNER JOIN cash_in_txs t ON c.id = t.customer_id
WHERE c.id_card_data::json->>'firstName' IS NOT NULL or c.id_card_data::json->>'lastName' IS NOT NULL UNION
SELECT 'customer' AS type, concat(id_card_data::json->>'firstName', ' ', id_card_data::json->>'lastName') AS value
SELECT 'customer' AS type, NULL AS label, concat(id_card_data::json->>'firstName', ' ', id_card_data::json->>'lastName') AS value
FROM customers c INNER JOIN cash_out_txs t ON c.id = t.customer_id
WHERE c.id_card_data::json->>'firstName' IS NOT NULL or c.id_card_data::json->>'lastName' IS NOT NULL UNION
SELECT 'fiat' AS type, fiat_code AS value FROM cash_in_txs UNION
SELECT 'fiat' AS type, fiat_code AS value FROM cash_out_txs UNION
SELECT 'crypto' AS type, crypto_code AS value FROM cash_in_txs UNION
SELECT 'crypto' AS type, crypto_code AS value FROM cash_out_txs UNION
SELECT 'address' AS type, to_address AS value FROM cash_in_txs UNION
SELECT 'address' AS type, to_address AS value FROM cash_out_txs UNION
SELECT 'status' AS type, ${cashInTx.TRANSACTION_STATES} AS value FROM cash_in_txs UNION
SELECT 'status' AS type, ${CASH_OUT_TRANSACTION_STATES} AS value FROM cash_out_txs UNION
SELECT 'sweep status' AS type, CASE WHEN swept THEN 'Swept' WHEN NOT swept THEN 'Unswept' END AS value FROM cash_out_txs
SELECT 'fiat' AS type, NULL AS label, fiat_code AS value FROM cash_in_txs UNION
SELECT 'fiat' AS type, NULL AS label, fiat_code AS value FROM cash_out_txs UNION
SELECT 'crypto' AS type, NULL AS label, crypto_code AS value FROM cash_in_txs UNION
SELECT 'crypto' AS type, NULL AS label, crypto_code AS value FROM cash_out_txs UNION
SELECT 'address' AS type, NULL AS label, to_address AS value FROM cash_in_txs UNION
SELECT 'address' AS type, NULL AS label, to_address AS value FROM cash_out_txs UNION
SELECT 'status' AS type, NULL AS label, ${cashInTx.TRANSACTION_STATES} AS value FROM cash_in_txs UNION
SELECT 'status' AS type, NULL AS label, ${CASH_OUT_TRANSACTION_STATES} AS value FROM cash_out_txs UNION
SELECT 'sweep status' AS type, NULL AS label, CASE WHEN swept THEN 'Swept' WHEN NOT swept THEN 'Unswept' END AS value FROM cash_out_txs
) f`
return db.any(sql)
}
function customer () {
const sql = `SELECT DISTINCT * FROM (
SELECT 'phone' AS type, phone AS value FROM customers WHERE phone IS NOT NULL UNION

View file

@ -1,24 +0,0 @@
const express = require('express')
const { ApolloServer } = require('apollo-server-express')
require('../environment-helper')
const { typeDefs, resolvers } = require('./graphql/schema')
const logger = require('../logger')
const app = express()
const server = new ApolloServer({
typeDefs,
resolvers
})
server.applyMiddleware({ app })
app.use(express.json())
function run () {
const serverLog = `lamassu-admin-server listening on port ${9010}${server.graphqlPath}`
app.listen(9010, () => logger.info(serverLog))
}
module.exports = { run }

View file

@ -1,40 +1,49 @@
const _ = require('lodash/fp')
const { SchemaDirectiveVisitor, AuthenticationError } = require('apollo-server-express')
const { mapSchema, getDirective, MapperKind } = require('@graphql-tools/utils')
const { defaultFieldResolver } = require('graphql')
class AuthDirective extends SchemaDirectiveVisitor {
visitObject (type) {
this.ensureFieldsWrapped(type)
type._requiredAuthRole = this.args.requires
const { AuthenticationError } = require('../errors')
function authDirectiveTransformer(schema, directiveName = 'auth') {
return mapSchema(schema, {
// For object types
[MapperKind.OBJECT_TYPE]: (objectType) => {
const directive = getDirective(schema, objectType, directiveName)?.[0]
if (directive) {
const requiredAuthRole = directive.requires
objectType._requiredAuthRole = requiredAuthRole
}
return objectType
},
// For field definitions
[MapperKind.OBJECT_FIELD]: (fieldConfig, _fieldName, typeName) => {
const directive = getDirective(schema, fieldConfig, directiveName)?.[0]
if (directive) {
const requiredAuthRole = directive.requires
fieldConfig._requiredAuthRole = requiredAuthRole
}
visitFieldDefinition (field, details) {
this.ensureFieldsWrapped(details.objectType)
field._requiredAuthRole = this.args.requires
}
// Get the parent object type
const objectType = schema.getType(typeName)
ensureFieldsWrapped (objectType) {
if (objectType._authFieldsWrapped) return
objectType._authFieldsWrapped = true
const fields = objectType.getFields()
_.forEach(fieldName => {
const field = fields[fieldName]
const { resolve = defaultFieldResolver } = field
field.resolve = function (root, args, context, info) {
const requiredRoles = field._requiredAuthRole ? field._requiredAuthRole : objectType._requiredAuthRole
// Apply auth check to the field's resolver
const { resolve = defaultFieldResolver } = fieldConfig
fieldConfig.resolve = function (root, args, context, info) {
const requiredRoles = fieldConfig._requiredAuthRole || objectType._requiredAuthRole
if (!requiredRoles) return resolve.apply(this, [root, args, context, info])
const user = context.req.session.user
if (!user || !_.includes(_.upperCase(user.role), requiredRoles)) throw new AuthenticationError('You do not have permission to access this resource!')
if (!user || !_.includes(_.upperCase(user.role), requiredRoles)) {
throw new AuthenticationError('You do not have permission to access this resource!')
}
return resolve.apply(this, [root, args, context, info])
}
}, _.keys(fields))
return fieldConfig
}
})
}
module.exports = AuthDirective
module.exports = authDirectiveTransformer

View file

@ -1,3 +1,3 @@
const AuthDirective = require('./auth')
const authDirectiveTransformer = require('./auth')
module.exports = { AuthDirective }
module.exports = { authDirectiveTransformer }

View file

@ -0,0 +1,71 @@
const { GraphQLError } = require('graphql')
const { ApolloServerErrorCode } = require('@apollo/server/errors')
class AuthenticationError extends GraphQLError {
constructor() {
super('Authentication failed', {
extensions: {
code: 'UNAUTHENTICATED'
}
})
}
}
class InvalidCredentialsError extends GraphQLError {
constructor() {
super('Invalid credentials', {
extensions: {
code: 'INVALID_CREDENTIALS'
}
})
}
}
class UserAlreadyExistsError extends GraphQLError {
constructor() {
super('User already exists', {
extensions: {
code: 'USER_ALREADY_EXISTS'
}
})
}
}
class InvalidTwoFactorError extends GraphQLError {
constructor() {
super('Invalid two-factor code', {
extensions: {
code: 'INVALID_TWO_FACTOR_CODE'
}
})
}
}
class InvalidUrlError extends GraphQLError {
constructor() {
super('Invalid URL token', {
extensions: {
code: 'INVALID_URL_TOKEN'
}
})
}
}
class UserInputError extends GraphQLError {
constructor() {
super('User input error', {
extensions: {
code: ApolloServerErrorCode.BAD_USER_INPUT
}
})
}
}
module.exports = {
AuthenticationError,
InvalidCredentialsError,
UserAlreadyExistsError,
InvalidTwoFactorError,
InvalidUrlError,
UserInputError
}

View file

@ -1,37 +0,0 @@
const { ApolloError, AuthenticationError } = require('apollo-server-express')
class InvalidCredentialsError extends ApolloError {
constructor(message) {
super(message, 'INVALID_CREDENTIALS')
Object.defineProperty(this, 'name', { value: 'InvalidCredentialsError' })
}
}
class UserAlreadyExistsError extends ApolloError {
constructor(message) {
super(message, 'USER_ALREADY_EXISTS')
Object.defineProperty(this, 'name', { value: 'UserAlreadyExistsError' })
}
}
class InvalidTwoFactorError extends ApolloError {
constructor(message) {
super(message, 'INVALID_TWO_FACTOR_CODE')
Object.defineProperty(this, 'name', { value: 'InvalidTwoFactorError' })
}
}
class InvalidUrlError extends ApolloError {
constructor(message) {
super(message, 'INVALID_URL_TOKEN')
Object.defineProperty(this, 'name', { value: 'InvalidUrlError' })
}
}
module.exports = {
AuthenticationError,
InvalidCredentialsError,
UserAlreadyExistsError,
InvalidTwoFactorError,
InvalidUrlError
}

View file

@ -8,7 +8,7 @@ const loginHelper = require('../../services/login')
const T = require('../../../time')
const users = require('../../../users')
const sessionManager = require('../../../session-manager')
const authErrors = require('../errors/authentication')
const authErrors = require('../errors')
const credentials = require('../../../hardware-credentials')
const REMEMBER_ME_AGE = 90 * T.day

View file

@ -1,13 +1,9 @@
const { GraphQLDateTime } = require('graphql-iso-date')
const { GraphQLJSON, GraphQLJSONObject } = require('graphql-type-json')
const { GraphQLUpload } = require('graphql-upload')
GraphQLDateTime.name = 'Date'
const { DateTimeISOResolver, JSONResolver, JSONObjectResolver } = require('graphql-scalars')
const resolvers = {
JSON: GraphQLJSON,
JSONObject: GraphQLJSONObject,
Date: GraphQLDateTime,
UploadGQL: GraphQLUpload
JSON: JSONResolver,
JSONObject: JSONObjectResolver,
DateTimeISO: DateTimeISOResolver
}
module.exports = resolvers

View file

@ -19,10 +19,10 @@ const resolvers = {
isAnonymous: parent => (parent.customerId === anonymous.uuid)
},
Query: {
transactions: (...[, { from, until, limit, offset, deviceId, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status, swept, excludeTestingCustomers }]) =>
transactions.batch(from, until, limit, offset, deviceId, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status, swept, excludeTestingCustomers),
transactionsCsv: (...[, { from, until, limit, offset, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status, swept, timezone, excludeTestingCustomers, simplified }]) =>
transactions.batch(from, until, limit, offset, null, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status, swept, excludeTestingCustomers, simplified)
transactions: (...[, { from, until, limit, offset, txClass, deviceId, customerName, fiatCode, cryptoCode, toAddress, status, swept, excludeTestingCustomers }]) =>
transactions.batch(from, until, limit, offset, txClass, deviceId, customerName, fiatCode, cryptoCode, toAddress, status, swept, excludeTestingCustomers),
transactionsCsv: (...[, { from, until, limit, offset, txClass, deviceId, customerName, fiatCode, cryptoCode, toAddress, status, swept, timezone, excludeTestingCustomers, simplified }]) =>
transactions.batch(from, until, limit, offset, null, txClass, deviceId, customerName, fiatCode, cryptoCode, toAddress, status, swept, excludeTestingCustomers, simplified)
.then(data => parseAsync(logDateFormat(timezone, data, ['created', 'sendTime', 'publishedAt']))),
transactionCsv: (...[, { id, txClass, timezone }]) =>
transactions.getTx(id, txClass).then(data =>

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type Bill {
@ -6,7 +6,7 @@ const typeDef = gql`
fiat: Int
fiatCode: String
deviceId: ID
created: Date
created: DateTimeISO
cashUnitOperationId: ID
}

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type Blacklist {

View file

@ -1,10 +1,10 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type CashboxBatch {
id: ID
deviceId: ID
created: Date
created: DateTimeISO
operationType: String
customBillCount: Int
performedBy: String
@ -14,7 +14,7 @@ const typeDef = gql`
type Query {
cashboxBatches: [CashboxBatch] @auth
cashboxBatchesCsv(from: Date, until: Date, timezone: String): String @auth
cashboxBatchesCsv(from: DateTimeISO, until: DateTimeISO, timezone: String): String @auth
}
type Mutation {

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type Country {

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type Currency {

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
@ -33,7 +33,7 @@ const typeDef = gql`
customerId: ID
infoRequestId: ID
override: String
overrideAt: Date
overrideAt: DateTimeISO
overrideBy: ID
customerData: JSON
customInfoRequest: CustomInfoRequest

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type Customer {
@ -6,10 +6,10 @@ const typeDef = gql`
authorizedOverride: String
daysSuspended: Int
isSuspended: Boolean
newPhoto: UploadGQL
newPhoto: Upload
photoType: String
frontCameraPath: String
frontCameraAt: Date
frontCameraAt: DateTimeISO
frontCameraOverride: String
phone: String
email: String
@ -17,19 +17,19 @@ const typeDef = gql`
smsOverride: String
idCardData: JSONObject
idCardDataOverride: String
idCardDataExpiration: Date
idCardPhoto: UploadGQL
idCardDataExpiration: DateTimeISO
idCardPhoto: Upload
idCardPhotoPath: String
idCardPhotoOverride: String
idCardPhotoAt: Date
idCardPhotoAt: DateTimeISO
usSsn: String
usSsnOverride: String
sanctions: Boolean
sanctionsAt: Date
sanctionsAt: DateTimeISO
sanctionsOverride: String
totalTxs: Int
totalSpent: String
lastActive: Date
lastActive: DateTimeISO
lastTxFiat: String
lastTxFiatCode: String
lastTxClass: String
@ -53,28 +53,28 @@ const typeDef = gql`
smsOverride: String
idCardData: JSONObject
idCardDataOverride: String
idCardDataExpiration: Date
idCardDataExpiration: DateTimeISO
idCardPhotoPath: String
idCardPhotoOverride: String
usSsn: String
usSsnOverride: String
sanctions: Boolean
sanctionsAt: Date
sanctionsAt: DateTimeISO
sanctionsOverride: String
totalTxs: Int
totalSpent: String
lastActive: Date
lastActive: DateTimeISO
lastTxFiat: String
lastTxFiatCode: String
lastTxClass: String
suspendedUntil: Date
suspendedUntil: DateTimeISO
subscriberInfo: Boolean
phoneOverride: String
}
input CustomerEdit {
idCardData: JSONObject
idCardPhoto: UploadGQL
idCardPhoto: Upload
usSsn: String
subscriberInfo: JSONObject
}
@ -82,8 +82,8 @@ const typeDef = gql`
type CustomerNote {
id: ID
customerId: ID
created: Date
lastEditedAt: Date
created: DateTimeISO
lastEditedAt: DateTimeISO
lastEditedBy: ID
title: String
content: String
@ -108,7 +108,7 @@ const typeDef = gql`
removeCustomField(customerId: ID!, fieldId: ID!): Boolean @auth
editCustomer(customerId: ID!, customerEdit: CustomerEdit): Customer @auth
deleteEditedData(customerId: ID!, customerEdit: CustomerEdit): Customer @auth
replacePhoto(customerId: ID!, photoType: String, newPhoto: UploadGQL): Customer @auth
replacePhoto(customerId: ID!, photoType: String, newPhoto: Upload): Customer @auth
createCustomerNote(customerId: ID!, title: String!, content: String!): Boolean @auth
editCustomerNote(noteId: ID!, newContent: String!): Boolean @auth
deleteCustomerNote(noteId: ID!): Boolean @auth

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type CoinFunds {

View file

@ -1,25 +1,25 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type MachineLog {
id: ID!
logLevel: String!
timestamp: Date!
timestamp: DateTimeISO!
message: String!
}
type ServerLog {
id: ID!
logLevel: String!
timestamp: Date!
timestamp: DateTimeISO!
message: String
}
type Query {
machineLogs(deviceId: ID!, from: Date, until: Date, limit: Int, offset: Int): [MachineLog] @auth
machineLogsCsv(deviceId: ID!, from: Date, until: Date, limit: Int, offset: Int, timezone: String): String @auth
serverLogs(from: Date, until: Date, limit: Int, offset: Int): [ServerLog] @auth
serverLogsCsv(from: Date, until: Date, limit: Int, offset: Int, timezone: String): String @auth
machineLogs(deviceId: ID!, from: DateTimeISO, until: DateTimeISO, limit: Int, offset: Int): [MachineLog] @auth
machineLogsCsv(deviceId: ID!, from: DateTimeISO, until: DateTimeISO, limit: Int, offset: Int, timezone: String): String @auth
serverLogs(from: DateTimeISO, until: DateTimeISO, limit: Int, offset: Int): [ServerLog] @auth
serverLogsCsv(from: DateTimeISO, until: DateTimeISO, limit: Int, offset: Int, timezone: String): String @auth
}
`

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type IndividualDiscount {

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type MachineStatus {
@ -10,8 +10,8 @@ const typeDef = gql`
name: String!
deviceId: ID!
paired: Boolean!
lastPing: Date
pairedAt: Date
lastPing: DateTimeISO
pairedAt: DateTimeISO
diagnostics: Diagnostics
version: String
model: String
@ -26,9 +26,9 @@ const typeDef = gql`
}
type Diagnostics {
timestamp: Date
frontTimestamp: Date
scanTimestamp: Date
timestamp: DateTimeISO
frontTimestamp: DateTimeISO
scanTimestamp: DateTimeISO
}
type CashUnits {
@ -64,8 +64,8 @@ const typeDef = gql`
deviceId: ID!
name: String
model: String
paired: Date!
unpaired: Date!
paired: DateTimeISO!
unpaired: DateTimeISO!
}
type MachineEvent {
@ -73,9 +73,9 @@ const typeDef = gql`
deviceId: String
eventType: String
note: String
created: Date
created: DateTimeISO
age: Float
deviceTime: Date
deviceTime: DateTimeISO
}
enum MachineAction {

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type Query {

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type Notification {
@ -6,7 +6,7 @@ const typeDef = gql`
type: String
detail: JSON
message: String
created: Date
created: DateTimeISO
read: Boolean
valid: Boolean
}

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type Mutation {

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type Rate {

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type SanctionMatches {

View file

@ -1,10 +1,10 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
scalar JSON
scalar JSONObject
scalar Date
scalar UploadGQL
scalar DateTimeISO
scalar Upload
`
module.exports = typeDef

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type Query {

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type SMSNotice {

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type ProcessStatus {

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type Transaction {
@ -14,12 +14,12 @@ const typeDef = gql`
txHash: String
phone: String
error: String
created: Date
created: DateTimeISO
send: Boolean
sendConfirmed: Boolean
dispense: Boolean
timedout: Boolean
sendTime: Date
sendTime: DateTimeISO
errorCode: String
operatorCompleted: Boolean
sendPending: Boolean
@ -35,7 +35,7 @@ const typeDef = gql`
customerPhone: String
customerEmail: String
customerIdCardDataNumber: String
customerIdCardDataExpiration: Date
customerIdCardDataExpiration: DateTimeISO
customerIdCardData: JSONObject
customerName: String
customerFrontCameraPath: String
@ -44,9 +44,9 @@ const typeDef = gql`
machineName: String
discount: Int
txCustomerPhotoPath: String
txCustomerPhotoAt: Date
txCustomerPhotoAt: DateTimeISO
batched: Boolean
batchTime: Date
batchTime: DateTimeISO
batchError: String
walletScore: Int
profit: String
@ -56,11 +56,12 @@ const typeDef = gql`
type Filter {
type: String
value: String
label: String
}
type Query {
transactions(from: Date, until: Date, limit: Int, offset: Int, deviceId: ID, txClass: String, machineName: String, customerName: String, fiatCode: String, cryptoCode: String, toAddress: String, status: String, swept: Boolean, excludeTestingCustomers: Boolean): [Transaction] @auth
transactionsCsv(from: Date, until: Date, limit: Int, offset: Int, txClass: String, machineName: String, customerName: String, fiatCode: String, cryptoCode: String, toAddress: String, status: String, swept: Boolean, timezone: String, excludeTestingCustomers: Boolean, simplified: Boolean): String @auth
transactions(from: DateTimeISO, until: DateTimeISO, limit: Int, offset: Int, txClass: String, deviceId: String, customerName: String, fiatCode: String, cryptoCode: String, toAddress: String, status: String, swept: Boolean, excludeTestingCustomers: Boolean): [Transaction] @auth
transactionsCsv(from: DateTimeISO, until: DateTimeISO, limit: Int, offset: Int, txClass: String, deviceId: String, customerName: String, fiatCode: String, cryptoCode: String, toAddress: String, status: String, swept: Boolean, timezone: String, excludeTestingCustomers: Boolean, simplified: Boolean): String @auth
transactionCsv(id: ID, txClass: String, timezone: String): String @auth
txAssociatedDataCsv(id: ID, txClass: String, timezone: String): String @auth
transactionFilters: [Filter] @auth

View file

@ -45,7 +45,7 @@ const typeDef = `
type UserSession {
sid: String!
sess: JSONObject!
expire: Date!
expire: DateTimeISO!
}
type User {
@ -53,8 +53,8 @@ const typeDef = `
username: String
role: String
enabled: Boolean
created: Date
last_accessed: Date
created: DateTimeISO
last_accessed: DateTimeISO
last_accessed_from: String
last_accessed_address: String
}
@ -68,14 +68,14 @@ const typeDef = `
type ResetToken {
token: String
user_id: ID
expire: Date
expire: DateTimeISO
}
type RegistrationToken {
token: String
username: String
role: String
expire: Date
expire: DateTimeISO
}
type Query {

View file

@ -1,4 +1,4 @@
const { gql } = require('apollo-server-express')
const gql = require('graphql-tag')
const typeDef = gql`
type Query {

View file

@ -1,21 +1,18 @@
const { asyncLocalStorage } = require('../../async-storage')
const db = require('../../db')
const { USER_SESSIONS_TABLE_NAME } = require('../../constants')
const logger = require('../../logger')
const schemaCache = {}
let schemaCache = Date.now()
const cleanUserSessions = (cleanInterval) => (req, res, next) => {
const schema = asyncLocalStorage.getStore() ? asyncLocalStorage.getStore().get('schema') : null
const now = Date.now()
if (!schema) return next()
if (schema && schemaCache.schema + cleanInterval > now) return next()
if (schemaCache + cleanInterval > now) return next()
logger.debug(`Clearing expired sessions for schema ${schema}`)
logger.debug(`Clearing expired sessions for schema 'public'`)
return db.none('DELETE FROM $1^ WHERE expire < to_timestamp($2 / 1000.0)', [USER_SESSIONS_TABLE_NAME, now])
.then(() => {
schemaCache.schema = now
schemaCache = now
return next()
})
.catch(next)

View file

@ -1,7 +1,7 @@
const { AuthenticationError } = require('apollo-server-express')
const base64 = require('base-64')
const users = require('../../users')
const { AuthenticationError } = require('../graphql/errors')
const buildApolloContext = async ({ req, res }) => {
if (!req.session.user) return { req, res }

View file

@ -18,8 +18,7 @@ router.use('*', async (req, res, next) => getOperatorId('authentication').then(o
cookie: {
httpOnly: true,
secure: true,
sameSite: true,
maxAge: 60 * 10 * 1000 // 10 minutes
sameSite: true
}
})(req, res, next))
)

View file

@ -1,5 +1,5 @@
const machineLoader = require('../../machine-loader')
const { UserInputError } = require('apollo-server-express')
const { UserInputError } = require('../graphql/errors')
function getMachine (machineId) {
return machineLoader.getMachines()

View file

@ -11,19 +11,6 @@ const { REDEEMABLE_AGE, CASH_OUT_TRANSACTION_STATES } = require('../../cash-out/
const NUM_RESULTS = 1000
function addNames (txs) {
return machineLoader.getMachineNames()
.then(machines => {
const addName = tx => {
const machine = _.find(['deviceId', tx.deviceId], machines)
const name = machine ? machine.name : 'Unpaired'
return _.set('machineName', name, tx)
}
return _.map(addName, txs)
})
}
function addProfits (txs) {
return _.map(it => {
const profit = getProfit(it).toString()
@ -33,14 +20,31 @@ function addProfits (txs) {
const camelize = _.mapKeys(_.camelCase)
const DEVICE_NAME_QUERY = `
CASE
WHEN ud.name IS NOT NULL THEN ud.name || ' (unpaired)'
WHEN d.name IS NOT NULL THEN d.name
ELSE 'Unpaired'
END AS machine_name
`
const DEVICE_NAME_JOINS = `
LEFT JOIN devices d ON txs.device_id = d.device_id
LEFT JOIN (
SELECT device_id, name, unpaired, paired
FROM unpaired_devices
) ud ON txs.device_id = ud.device_id
AND ud.unpaired >= txs.created
AND (txs.created >= ud.paired)
`
function batch (
from = new Date(0).toISOString(),
until = new Date().toISOString(),
limit = null,
offset = 0,
id = null,
txClass = null,
machineName = null,
deviceId = null,
customerName = null,
fiatCode = null,
cryptoCode = null,
@ -61,8 +65,7 @@ function batch (
k
)
)),
addProfits,
addNames
addProfits
)
const cashInSql = `SELECT 'cashIn' AS tx_class, txs.*,
@ -77,21 +80,20 @@ function batch (
txs.tx_customer_photo_at AS tx_customer_photo_at,
txs.tx_customer_photo_path AS tx_customer_photo_path,
((NOT txs.send_confirmed) AND (txs.created <= now() - interval $1)) AS expired,
tb.error_message AS batch_error
tb.error_message AS batch_error,
${DEVICE_NAME_QUERY}
FROM (SELECT *, ${cashInTx.TRANSACTION_STATES} AS txStatus FROM cash_in_txs) AS txs
LEFT OUTER JOIN customers c ON txs.customer_id = c.id
LEFT JOIN devices d ON txs.device_id = d.device_id
${DEVICE_NAME_JOINS}
LEFT OUTER JOIN transaction_batches tb ON txs.batch_id = tb.id
WHERE txs.created >= $2 AND txs.created <= $3 ${
id !== null ? `AND txs.device_id = $6` : ``
}
AND ($7 is null or $7 = 'Cash In')
AND ($8 is null or d.name = $8)
AND ($9 is null or concat(c.id_card_data::json->>'firstName', ' ', c.id_card_data::json->>'lastName') = $9)
AND ($10 is null or txs.fiat_code = $10)
AND ($11 is null or txs.crypto_code = $11)
AND ($12 is null or txs.to_address = $12)
AND ($13 is null or txs.txStatus = $13)
WHERE txs.created >= $2 AND txs.created <= $3
AND ($6 is null or $6 = 'Cash In')
AND ($7 is null or txs.device_id = $7)
AND ($8 is null or concat(c.id_card_data::json->>'firstName', ' ', c.id_card_data::json->>'lastName') = $8)
AND ($9 is null or txs.fiat_code = $9)
AND ($10 is null or txs.crypto_code = $10)
AND ($11 is null or txs.to_address = $11)
AND ($12 is null or txs.txStatus = $12)
${excludeTestingCustomers ? `AND c.is_test_customer is false` : ``}
${isCsvExport && !simplified ? '' : 'AND (error IS NOT null OR tb.error_message IS NOT null OR fiat > 0)'}
ORDER BY created DESC limit $4 offset $5`
@ -109,23 +111,22 @@ function batch (
c.id_card_photo_path AS customer_id_card_photo_path,
txs.tx_customer_photo_at AS tx_customer_photo_at,
txs.tx_customer_photo_path AS tx_customer_photo_path,
(NOT txs.dispense AND extract(epoch FROM (now() - greatest(txs.created, txs.confirmed_at))) >= $1) AS expired
(NOT txs.dispense AND extract(epoch FROM (now() - greatest(txs.created, txs.confirmed_at))) >= $1) AS expired,
${DEVICE_NAME_QUERY}
FROM (SELECT *, ${CASH_OUT_TRANSACTION_STATES} AS txStatus FROM cash_out_txs) txs
INNER JOIN cash_out_actions actions ON txs.id = actions.tx_id
AND actions.action = 'provisionAddress'
LEFT OUTER JOIN customers c ON txs.customer_id = c.id
LEFT JOIN devices d ON txs.device_id = d.device_id
WHERE txs.created >= $2 AND txs.created <= $3 ${
id !== null ? `AND txs.device_id = $6` : ``
}
AND ($7 is null or $7 = 'Cash Out')
AND ($8 is null or d.name = $8)
AND ($9 is null or concat(c.id_card_data::json->>'firstName', ' ', c.id_card_data::json->>'lastName') = $9)
AND ($10 is null or txs.fiat_code = $10)
AND ($11 is null or txs.crypto_code = $11)
AND ($12 is null or txs.to_address = $12)
AND ($13 is null or txs.txStatus = $13)
AND ($14 is null or txs.swept = $14)
${DEVICE_NAME_JOINS}
WHERE txs.created >= $2 AND txs.created <= $3
AND ($6 is null or $6 = 'Cash Out')
AND ($7 is null or txs.device_id = $7)
AND ($8 is null or concat(c.id_card_data::json->>'firstName', ' ', c.id_card_data::json->>'lastName') = $8)
AND ($9 is null or txs.fiat_code = $9)
AND ($10 is null or txs.crypto_code = $10)
AND ($11 is null or txs.to_address = $11)
AND ($12 is null or txs.txStatus = $12)
AND ($13 is null or txs.swept = $13)
${excludeTestingCustomers ? `AND c.is_test_customer is false` : ``}
${isCsvExport ? '' : 'AND fiat > 0'}
ORDER BY created DESC limit $4 offset $5`
@ -141,13 +142,13 @@ function batch (
}
if (hasCashInOnlyFilters) {
promises = [db.any(cashInSql, [cashInTx.PENDING_INTERVAL, from, until, limit, offset, id, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status])]
promises = [db.any(cashInSql, [cashInTx.PENDING_INTERVAL, from, until, limit, offset, txClass, deviceId, customerName, fiatCode, cryptoCode, toAddress, status])]
} else if (hasCashOutOnlyFilters) {
promises = [db.any(cashOutSql, [REDEEMABLE_AGE, from, until, limit, offset, id, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status, swept])]
promises = [db.any(cashOutSql, [REDEEMABLE_AGE, from, until, limit, offset, txClass, deviceId, customerName, fiatCode, cryptoCode, toAddress, status, swept])]
} else {
promises = [
db.any(cashInSql, [cashInTx.PENDING_INTERVAL, from, until, limit, offset, id, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status]),
db.any(cashOutSql, [REDEEMABLE_AGE, from, until, limit, offset, id, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status, swept])
db.any(cashInSql, [cashInTx.PENDING_INTERVAL, from, until, limit, offset, txClass, deviceId, customerName, fiatCode, cryptoCode, toAddress, status]),
db.any(cashOutSql, [REDEEMABLE_AGE, from, until, limit, offset, txClass, deviceId, customerName, fiatCode, cryptoCode, toAddress, status, swept])
]
}
@ -249,7 +250,7 @@ const getStatus = it => {
function getCustomerTransactionsBatch (ids) {
const packager = _.flow(it => {
return it
}, _.flatten, _.orderBy(_.property('created'), ['desc']), _.map(camelize), addNames)
}, _.flatten, _.orderBy(_.property('created'), ['desc']), _.map(camelize))
const cashInSql = `SELECT 'cashIn' AS tx_class, txs.*,
c.phone AS customer_phone,
@ -261,9 +262,11 @@ function getCustomerTransactionsBatch (ids) {
c.front_camera_path AS customer_front_camera_path,
c.id_card_photo_path AS customer_id_card_photo_path,
((NOT txs.send_confirmed) AND (txs.created <= now() - interval $2)) AS expired,
tb.error_message AS batch_error
tb.error_message AS batch_error,
${DEVICE_NAME_QUERY}
FROM cash_in_txs AS txs
LEFT OUTER JOIN customers c ON txs.customer_id = c.id
${DEVICE_NAME_JOINS}
LEFT OUTER JOIN transaction_batches tb ON txs.batch_id = tb.id
WHERE c.id IN ($1^)
ORDER BY created DESC limit $3`
@ -279,11 +282,13 @@ function getCustomerTransactionsBatch (ids) {
c.name AS customer_name,
c.front_camera_path AS customer_front_camera_path,
c.id_card_photo_path AS customer_id_card_photo_path,
(NOT txs.dispense AND extract(epoch FROM (now() - greatest(txs.created, txs.confirmed_at))) >= $3) AS expired
(NOT txs.dispense AND extract(epoch FROM (now() - greatest(txs.created, txs.confirmed_at))) >= $3) AS expired,
${DEVICE_NAME_QUERY}
FROM cash_out_txs txs
INNER JOIN cash_out_actions actions ON txs.id = actions.tx_id
AND actions.action = 'provisionAddress'
LEFT OUTER JOIN customers c ON txs.customer_id = c.id
${DEVICE_NAME_JOINS}
WHERE c.id IN ($1^)
ORDER BY created DESC limit $2`
return Promise.all([
@ -297,7 +302,7 @@ function getCustomerTransactionsBatch (ids) {
}
function single (txId) {
const packager = _.flow(_.compact, _.map(camelize), addNames)
const packager = _.flow(_.compact, _.map(camelize))
const cashInSql = `SELECT 'cashIn' AS tx_class, txs.*,
c.phone AS customer_phone,
@ -309,9 +314,11 @@ function single (txId) {
c.front_camera_path AS customer_front_camera_path,
c.id_card_photo_path AS customer_id_card_photo_path,
((NOT txs.send_confirmed) AND (txs.created <= now() - interval $1)) AS expired,
tb.error_message AS batch_error
tb.error_message AS batch_error,
${DEVICE_NAME_QUERY}
FROM cash_in_txs AS txs
LEFT OUTER JOIN customers c ON txs.customer_id = c.id
${DEVICE_NAME_JOINS}
LEFT OUTER JOIN transaction_batches tb ON txs.batch_id = tb.id
WHERE id=$2`
@ -325,13 +332,14 @@ function single (txId) {
c.id_card_data AS customer_id_card_data,
c.name AS customer_name,
c.front_camera_path AS customer_front_camera_path,
c.id_card_photo_path AS customer_id_card_photo_path,
(NOT txs.dispense AND extract(epoch FROM (now() - greatest(txs.created, txs.confirmed_at))) >= $2) AS expired
(NOT txs.dispense AND extract(epoch FROM (now() - greatest(txs.created, txs.confirmed_at))) >= $2) AS expired,
${DEVICE_NAME_QUERY}
FROM cash_out_txs txs
INNER JOIN cash_out_actions actions ON txs.id = actions.tx_id
AND actions.action = 'provisionAddress'
LEFT OUTER JOIN customers c ON txs.customer_id = c.id
${DEVICE_NAME_JOINS}
WHERE id=$1`
return Promise.all([

View file

@ -2,12 +2,9 @@ const crypto = require('crypto')
const _ = require('lodash/fp')
const db = require('./db')
const migration = require('./config-migration')
const { asyncLocalStorage } = require('./async-storage')
const { getOperatorId } = require('./operator')
const { getTermsConditions, setTermsConditions } = require('./new-config-manager')
const OLD_SETTINGS_LOADER_SCHEMA_VERSION = 1
const NEW_SETTINGS_LOADER_SCHEMA_VERSION = 2
const PASSWORD_FILLED = 'PASSWORD_FILLED'
const SECRET_FIELDS = [
@ -59,10 +56,14 @@ const addTermsHash = configs => {
const notifyReload = (dbOrTx, operatorId) =>
dbOrTx.none(
'NOTIFY $1:name, $2',
['reload', JSON.stringify({ schema: asyncLocalStorage.getStore().get('schema'), operatorId })]
['reload', JSON.stringify({ operatorId })]
)
function saveAccounts (accounts) {
if (!accounts) {
return Promise.resolve()
}
const accountsSql = `UPDATE user_config SET data = $1, valid = TRUE, schema_version = $2 WHERE type = 'accounts';
INSERT INTO user_config (type, data, valid, schema_version)
SELECT 'accounts', $1, TRUE, $2 WHERE 'accounts' NOT IN (SELECT type FROM user_config)`

View file

@ -456,13 +456,6 @@ function plugins (settings, deviceId) {
.catch(logger.error)
}
function pong () {
return db.none(`UPDATE server_events SET created=now() WHERE event_type=$1;
INSERT INTO server_events (event_type) SELECT $1
WHERE NOT EXISTS (SELECT 1 FROM server_events WHERE event_type=$1);`, ['ping'])
.catch(logger.error)
}
/*
* Trader functions
*/
@ -935,7 +928,6 @@ function plugins (settings, deviceId) {
getPhoneCode,
getEmailCode,
executeTrades,
pong,
clearOldLogs,
notifyConfirmation,
sweepHd,

View file

@ -12,7 +12,7 @@ const binance = require('../exchange/binance')
const bitfinex = require('../exchange/bitfinex')
const logger = require('../../logger')
const { BTC, BCH, DASH, ETH, LTC, ZEC, USDT, TRX, USDT_TRON, LN } = COINS
const { BTC, BCH, DASH, ETH, LTC, ZEC, USDT, TRX, USDT_TRON, LN, USDC } = COINS
const ALL = {
cex: cex,
@ -21,11 +21,6 @@ const ALL = {
bitstamp: bitstamp,
itbit: itbit,
bitpay: bitpay,
coinbase: {
CRYPTO: [BTC, ETH, LTC, DASH, ZEC, BCH, USDT, USDT_TRON, TRX, LN],
FIAT: 'ALL_CURRENCIES',
DEFAULT_FIAT_MARKET: 'EUR'
},
binance: binance,
bitfinex: bitfinex
}

View file

@ -4,8 +4,8 @@ const _ = require('lodash/fp')
const { ORDER_TYPES } = require('./consts')
const ORDER_TYPE = ORDER_TYPES.MARKET
const { BTC, BCH, DASH, ETH, LTC, ZEC, USDT, USDT_TRON, LN } = COINS
const CRYPTO = [BTC, ETH, LTC, DASH, ZEC, BCH, USDT, USDT_TRON, LN]
const { BTC, BCH, DASH, ETH, LTC, ZEC, USDT, USDT_TRON, LN, USDC } = COINS
const CRYPTO = [BTC, ETH, LTC, DASH, ZEC, BCH, USDT, USDT_TRON, LN, USDC]
const FIAT = ['USD']
const DEFAULT_FIAT_MARKET = 'USD'
const REQUIRED_CONFIG_FIELDS = ['apiKey', 'privateKey', 'currencyMarket']

View file

@ -4,8 +4,8 @@ const _ = require('lodash/fp')
const { ORDER_TYPES } = require('./consts')
const ORDER_TYPE = ORDER_TYPES.MARKET
const { BTC, ETH, LTC, BCH, USDT, LN } = COINS
const CRYPTO = [BTC, ETH, LTC, BCH, USDT, LN]
const { BTC, ETH, LTC, BCH, USDT, LN, USDC } = COINS
const CRYPTO = [BTC, ETH, LTC, BCH, USDT, LN, USDC]
const FIAT = ['USD', 'EUR']
const DEFAULT_FIAT_MARKET = 'EUR'
const AMOUNT_PRECISION = 8

View file

@ -4,8 +4,8 @@ const _ = require('lodash/fp')
const { ORDER_TYPES } = require('./consts')
const ORDER_TYPE = ORDER_TYPES.MARKET
const { BTC, ETH, LTC, BCH, USDT, LN } = COINS
const CRYPTO = [BTC, ETH, LTC, BCH, USDT, LN]
const { BTC, ETH, LTC, BCH, USDT, LN, USDC } = COINS
const CRYPTO = [BTC, ETH, LTC, BCH, USDT, LN, USDC]
const FIAT = ['USD', 'EUR']
const DEFAULT_FIAT_MARKET = 'EUR'
const AMOUNT_PRECISION = 8

View file

@ -4,8 +4,8 @@ const { ORDER_TYPES } = require('./consts')
const { COINS } = require('@lamassu/coins')
const ORDER_TYPE = ORDER_TYPES.MARKET
const { BTC, BCH, DASH, ETH, LTC, ZEC, XMR, USDT, TRX, USDT_TRON, LN } = COINS
const CRYPTO = [BTC, ETH, LTC, DASH, ZEC, BCH, XMR, USDT, TRX, USDT_TRON, LN]
const { BTC, BCH, DASH, ETH, LTC, ZEC, XMR, USDT, TRX, USDT_TRON, LN, USDC } = COINS
const CRYPTO = [BTC, ETH, LTC, DASH, ZEC, BCH, XMR, USDT, TRX, USDT_TRON, LN, USDC]
const FIAT = ['USD', 'EUR']
const DEFAULT_FIAT_MARKET = 'EUR'
const AMOUNT_PRECISION = 6

View file

@ -161,7 +161,7 @@ function generateErc20Tx (_toAddress, wallet, amount, includesFee, cryptoCode) {
.then(([gas, txCount, baseFeePerGas]) => {
lastUsedNonces[fromAddress] = txCount
const maxPriorityFeePerGas = new BN(web3.utils.toWei('2.5', 'gwei')) // web3 default value
const maxPriorityFeePerGas = new BN(web3.utils.toWei('1.0', 'gwei')) // web3 default value
const maxFeePerGas = new BN(2).times(baseFeePerGas).plus(maxPriorityFeePerGas)
if (includesFee && (toSend.isNegative() || toSend.isZero())) {
@ -219,13 +219,11 @@ function generateTx (_toAddress, wallet, amount, includesFee, cryptoCode, txId)
.then(([gas, gasPrice, txCount, baseFeePerGas]) => {
lastUsedNonces[fromAddress] = txCount
const maxPriorityFeePerGas = new BN(web3.utils.toWei('2.5', 'gwei')) // web3 default value
const neededPriority = new BN(web3.utils.toWei('2.0', 'gwei'))
const maxFeePerGas = baseFeePerGas.plus(neededPriority)
const newGasPrice = BN.minimum(maxFeePerGas, baseFeePerGas.plus(maxPriorityFeePerGas))
const maxPriorityFeePerGas = new BN(web3.utils.toWei('1.0', 'gwei')) // web3 default value
const maxFeePerGas = baseFeePerGas.times(2).plus(maxPriorityFeePerGas)
const toSend = includesFee
? new BN(amount).minus(newGasPrice.times(gas))
? new BN(amount).minus(maxFeePerGas.times(gas))
: amount
const rawTx = {

View file

@ -8,16 +8,13 @@ const T = require('./time')
const logger = require('./logger')
const cashOutTx = require('./cash-out/cash-out-tx')
const cashInTx = require('./cash-in/cash-in-tx')
const customers = require('./customers')
const sanctionsUpdater = require('./ofac/update')
const sanctions = require('./ofac/index')
const coinAtmRadar = require('./coinatmradar/coinatmradar')
const configManager = require('./new-config-manager')
const complianceTriggers = require('./compliance-triggers')
const { asyncLocalStorage, defaultStore } = require('./async-storage')
const settingsLoader = require('./new-settings-loader')
const NodeCache = require('node-cache')
const util = require('util')
const db = require('./db')
const processBatches = require('./tx-batching-processing')
@ -26,7 +23,6 @@ const LIVE_INCOMING_TX_INTERVAL = 5 * T.seconds
const UNNOTIFIED_INTERVAL = 10 * T.seconds
const SWEEP_HD_INTERVAL = 5 * T.minute
const TRADE_INTERVAL = 60 * T.seconds
const PONG_INTERVAL = 10 * T.seconds
const LOGS_CLEAR_INTERVAL = 1 * T.day
const SANCTIONS_INITIAL_DOWNLOAD_INTERVAL = 5 * T.minutes
const SANCTIONS_UPDATE_INTERVAL = 1 * T.day
@ -56,17 +52,11 @@ const SLOW_QUEUE = new Queue({
interval: SLOW_QUEUE_WAIT
})
// Fix for asyncLocalStorage store being lost due to callback-based queue
FAST_QUEUE.enqueue = util.promisify(FAST_QUEUE.enqueue)
SLOW_QUEUE.enqueue = util.promisify(SLOW_QUEUE.enqueue)
const QUEUE = {
FAST: FAST_QUEUE,
SLOW: SLOW_QUEUE
}
const schemaCallbacks = new Map()
const cachedVariables = new NodeCache({
stdTTL: CACHE_ENTRY_TTL,
checkperiod: CACHE_ENTRY_TTL,
@ -78,31 +68,25 @@ cachedVariables.on('expired', (key, val) => {
if (!val.isReloading) {
// since val is passed by reference we don't need to do cachedVariables.set()
val.isReloading = true
return reload(key)
return reload()
}
})
db.connect({ direct: true }).then(sco => {
sco.client.on('notification', data => {
const parsedData = JSON.parse(data.payload)
return reload(parsedData.schema)
sco.client.on('notification', () => {
return reload()
})
return sco.none('LISTEN $1:name', 'reload')
}).catch(console.error)
function reload (schema) {
const store = defaultStore()
store.set('schema', schema)
// set asyncLocalStorage so settingsLoader loads settings for the right schema
return asyncLocalStorage.run(store, () => {
function reload () {
return settingsLoader.loadLatest()
.then(settings => {
const pi = plugins(settings)
cachedVariables.set(schema, { settings, pi, isReloading: false })
logger.debug(`Settings for schema '${schema}' reloaded in poller`)
cachedVariables.set('public', { settings, pi, isReloading: false })
logger.debug(`Settings for schema 'public' reloaded in poller`)
return updateAndLoadSanctions()
})
})
}
function pi () { return cachedVariables.get('public').pi }
@ -205,26 +189,12 @@ const cleanOldFailedQRScans = () => {
})
}
// function checkExternalCompliance (settings) {
// return customers.checkExternalCompliance(settings)
// }
function initializeEachSchema (schemas = ['public']) {
// for each schema set "thread variables" and do polling
return _.forEach(schema => {
const store = defaultStore()
store.set('schema', schema)
return asyncLocalStorage.run(store, () => {
function setup () {
return settingsLoader.loadLatest().then(settings => {
// prevent inadvertedly clearing the array without clearing timeouts
if (schemaCallbacks.has(schema)) throw new Error(`The schema "${schema}" cannot be initialized twice on poller`)
const pi = plugins(settings)
cachedVariables.set(schema, { settings, pi, isReloading: false })
schemaCallbacks.set(schema, [])
return doPolling(schema)
})
cachedVariables.set('public', { settings, pi, isReloading: false })
return doPolling()
}).catch(console.error)
}, schemas)
}
function recursiveTimeout (func, timeout, ...vars) {
@ -246,25 +216,12 @@ function recursiveTimeout (func, timeout, ...vars) {
}, timeout)
}
function addToQueue (func, interval, schema, queue, ...vars) {
function addToQueue (func, interval, queue, ...vars) {
recursiveTimeout(func, interval, ...vars)
// return schemaCallbacks.get(schema).push(setInterval(() => {
// return queue.enqueue().then(() => {
// // get plugins or settings from the cache every time func is run
// const loadVariables = vars.length > 0 && typeof vars[0] === 'function'
// if (loadVariables) {
// const funcVars = [...vars]
// funcVars[0] = vars[0]()
// return func(...funcVars)
// }
// return func(...vars)
// }).catch(console.error)
// }, interval))
}
function doPolling (schema) {
function doPolling () {
pi().executeTrades()
pi().pong()
pi().clearOldLogs()
cashOutTx.monitorLiveIncoming(settings())
cashOutTx.monitorStaleIncoming(settings())
@ -272,40 +229,23 @@ function doPolling (schema) {
pi().sweepHd()
notifier.checkNotification(pi())
updateCoinAtmRadar()
// checkExternalCompliance(settings())
addToQueue(pi().getRawRates, TICKER_RATES_INTERVAL, schema, QUEUE.FAST)
addToQueue(pi().executeTrades, TRADE_INTERVAL, schema, QUEUE.FAST)
addToQueue(cashOutTx.monitorLiveIncoming, LIVE_INCOMING_TX_INTERVAL, schema, QUEUE.FAST, settings)
addToQueue(cashOutTx.monitorStaleIncoming, INCOMING_TX_INTERVAL, schema, QUEUE.FAST, settings)
addToQueue(cashOutTx.monitorUnnotified, UNNOTIFIED_INTERVAL, schema, QUEUE.FAST, settings)
addToQueue(cashInTx.monitorPending, PENDING_INTERVAL, schema, QUEUE.FAST, settings)
addToQueue(processBatches, UNNOTIFIED_INTERVAL, schema, QUEUE.FAST, settings, TRANSACTION_BATCH_LIFECYCLE)
addToQueue(pi().sweepHd, SWEEP_HD_INTERVAL, schema, QUEUE.FAST, settings)
addToQueue(pi().pong, PONG_INTERVAL, schema, QUEUE.FAST)
addToQueue(pi().clearOldLogs, LOGS_CLEAR_INTERVAL, schema, QUEUE.SLOW)
addToQueue(notifier.checkNotification, CHECK_NOTIFICATION_INTERVAL, schema, QUEUE.FAST, pi)
addToQueue(initialSanctionsDownload, SANCTIONS_INITIAL_DOWNLOAD_INTERVAL, schema, QUEUE.SLOW)
addToQueue(updateAndLoadSanctions, SANCTIONS_UPDATE_INTERVAL, schema, QUEUE.SLOW)
addToQueue(updateCoinAtmRadar, RADAR_UPDATE_INTERVAL, schema, QUEUE.SLOW)
addToQueue(pi().pruneMachinesHeartbeat, PRUNE_MACHINES_HEARTBEAT, schema, QUEUE.SLOW, settings)
addToQueue(cleanOldFailedQRScans, FAILED_SCANS_INTERVAL, schema, QUEUE.SLOW, settings)
addToQueue(cleanOldFailedPDF417Scans, FAILED_SCANS_INTERVAL, schema, QUEUE.SLOW, settings)
// addToQueue(checkExternalCompliance, EXTERNAL_COMPLIANCE_INTERVAL, schema, QUEUE.SLOW, settings)
addToQueue(pi().getRawRates, TICKER_RATES_INTERVAL, QUEUE.FAST)
addToQueue(pi().executeTrades, TRADE_INTERVAL, QUEUE.FAST)
addToQueue(cashOutTx.monitorLiveIncoming, LIVE_INCOMING_TX_INTERVAL, QUEUE.FAST, settings)
addToQueue(cashOutTx.monitorStaleIncoming, INCOMING_TX_INTERVAL, QUEUE.FAST, settings)
addToQueue(cashOutTx.monitorUnnotified, UNNOTIFIED_INTERVAL, QUEUE.FAST, settings)
addToQueue(cashInTx.monitorPending, PENDING_INTERVAL, QUEUE.FAST, settings)
addToQueue(processBatches, UNNOTIFIED_INTERVAL, QUEUE.FAST, settings, TRANSACTION_BATCH_LIFECYCLE)
addToQueue(pi().sweepHd, SWEEP_HD_INTERVAL, QUEUE.FAST, settings)
addToQueue(pi().clearOldLogs, LOGS_CLEAR_INTERVAL, QUEUE.SLOW)
addToQueue(notifier.checkNotification, CHECK_NOTIFICATION_INTERVAL, QUEUE.FAST, pi)
addToQueue(initialSanctionsDownload, SANCTIONS_INITIAL_DOWNLOAD_INTERVAL, QUEUE.SLOW)
addToQueue(updateAndLoadSanctions, SANCTIONS_UPDATE_INTERVAL, QUEUE.SLOW)
addToQueue(updateCoinAtmRadar, RADAR_UPDATE_INTERVAL, QUEUE.SLOW)
addToQueue(pi().pruneMachinesHeartbeat, PRUNE_MACHINES_HEARTBEAT, QUEUE.SLOW, settings)
addToQueue(cleanOldFailedQRScans, FAILED_SCANS_INTERVAL, QUEUE.SLOW, settings)
addToQueue(cleanOldFailedPDF417Scans, FAILED_SCANS_INTERVAL, QUEUE.SLOW, settings)
}
function setup (schemasToAdd = [], schemasToRemove = []) {
// clear callback array for each schema in schemasToRemove and clear cached variables
_.forEach(schema => {
const callbacks = schemaCallbacks.get(schema)
_.forEach(clearInterval, callbacks)
schemaCallbacks.delete(schema)
cachedVariables.del(schema)
}, schemasToRemove)
return initializeEachSchema(schemasToAdd)
}
const getActiveSchemas = () => Array.from(schemaCallbacks.keys())
module.exports = { setup, reload, getActiveSchemas }
module.exports = { setup, reload }

View file

@ -1,5 +1,4 @@
const express = require('express')
const argv = require('minimist')(process.argv.slice(2))
const compression = require('compression')
const helmet = require('helmet')
const morgan = require('morgan')
@ -9,7 +8,6 @@ const logger = require('./logger')
const addRWBytes = require('./middlewares/addRWBytes')
const authorize = require('./middlewares/authorize')
const computeSchema = require('./middlewares/compute-schema')
const errorHandler = require('./middlewares/errorHandler')
const filterOldRequests = require('./middlewares/filterOldRequests')
const findOperatorId = require('./middlewares/operatorId')
@ -35,11 +33,14 @@ const verifyPromoCodeRoutes = require('./routes/verifyPromoCodeRoutes')
const probeRoutes = require('./routes/probeLnRoutes')
const failedQRScansRoutes = require('./routes/failedQRScans')
const graphQLServer = require('./graphql/server')
const { graphQLServer, context } = require('./graphql/server')
const app = express()
const { expressMiddleware } = require('@apollo/server/express4')
const configRequiredRoutes = [
const loadRoutes = async () => {
const app = express()
const configRequiredRoutes = [
'/poll',
'/terms_conditions',
'/event',
@ -48,71 +49,80 @@ const configRequiredRoutes = [
'/tx',
'/verify_promo_code',
'/graphql'
]
]
// middleware setup
app.use(addRWBytes())
app.use(compression({ threshold: 500 }))
app.use(helmet())
app.use(nocache())
app.use(express.json({ limit: '2mb' }))
// middleware setup
app.use(addRWBytes())
app.use(compression({ threshold: 500 }))
app.use(helmet())
app.use(nocache())
app.use(express.json({ limit: '2mb' }))
morgan.token('bytesRead', (_req, res) => res.bytesRead)
morgan.token('bytesWritten', (_req, res) => res.bytesWritten)
app.use(morgan(':method :url :status :response-time ms -- :bytesRead/:bytesWritten B', { stream: logger.stream }))
morgan.token('bytesRead', (_req, res) => res.bytesRead)
morgan.token('bytesWritten', (_req, res) => res.bytesWritten)
app.use(morgan(':method :url :status :response-time ms -- :bytesRead/:bytesWritten B', { stream: logger.stream }))
app.use('/robots.txt', (req, res) => {
app.use('/robots.txt', (req, res) => {
res.type('text/plain')
res.send("User-agent: *\nDisallow: /")
})
})
app.get('/', (req, res) => {
app.get('/', (req, res) => {
res.sendStatus(404)
})
})
// app /pair and /ca routes
app.use('/', pairingRoutes)
// app /pair and /ca routes
app.use('/', pairingRoutes)
app.use(findOperatorId)
app.use(populateDeviceId)
app.use(computeSchema)
app.use(authorize)
app.use(configRequiredRoutes, populateSettings)
app.use(filterOldRequests)
app.use(findOperatorId)
app.use(populateDeviceId)
app.use(authorize)
app.use(configRequiredRoutes, populateSettings)
app.use(filterOldRequests)
// other app routes
app.use('/graphql', recordPing)
app.use('/poll', pollingRoutes)
app.use('/terms_conditions', termsAndConditionsRoutes)
app.use('/state', stateRoutes)
app.use('/cashbox', cashboxRoutes)
// other app routes
app.use('/graphql', recordPing)
app.use('/poll', pollingRoutes)
app.use('/terms_conditions', termsAndConditionsRoutes)
app.use('/state', stateRoutes)
app.use('/cashbox', cashboxRoutes)
app.use('/network', performanceRoutes)
app.use('/diagnostics', diagnosticsRoutes)
app.use('/failedqrscans', failedQRScansRoutes)
app.use('/network', performanceRoutes)
app.use('/diagnostics', diagnosticsRoutes)
app.use('/failedqrscans', failedQRScansRoutes)
app.use('/verify_user', verifyUserRoutes)
app.use('/verify_transaction', verifyTxRoutes)
app.use('/verify_promo_code', verifyPromoCodeRoutes)
app.use('/verify_user', verifyUserRoutes)
app.use('/verify_transaction', verifyTxRoutes)
app.use('/verify_promo_code', verifyPromoCodeRoutes)
// BACKWARDS_COMPATIBILITY 9.0
// machines before 9.0 still use the phone_code route
app.use('/phone_code', phoneCodeRoutes)
// BACKWARDS_COMPATIBILITY 9.0
// machines before 9.0 still use the phone_code route
app.use('/phone_code', phoneCodeRoutes)
app.use('/customer', customerRoutes)
app.use('/customer', customerRoutes)
app.use('/tx', txRoutes)
app.use('/tx', txRoutes)
app.use('/logs', logsRoutes)
app.use('/units', unitsRoutes)
app.use('/logs', logsRoutes)
app.use('/units', unitsRoutes)
app.use('/probe', probeRoutes)
app.use('/probe', probeRoutes)
graphQLServer.applyMiddleware({ app })
await graphQLServer.start()
app.use('/graphql',
express.json(),
expressMiddleware(graphQLServer, {
context,
}),
);
app.use(errorHandler)
app.use((req, res) => {
app.use(errorHandler)
app.use((req, res) => {
res.status(404).json({ error: 'No such route' })
})
})
module.exports = { app }
return app
}
module.exports = { loadRoutes }

View file

@ -28,8 +28,7 @@ function cashboxRemoval (req, res, next) {
return cashbox.createCashboxBatch(req.deviceId, machine.cashbox)
.then(batch => Promise.all([
cashbox.getBatchById(batch.id),
getMachineName(batch.device_id),
setMachine({ deviceId: req.deviceId, action: 'emptyCashInBills' }, operatorId)
getMachineName(batch.device_id)
]))
})
.then(([batch, machineName]) => res.status(200).send({ batch: _.merge(batch, { machineName }), status: 'OK' }))

View file

@ -19,7 +19,7 @@ const loadOrUpdateSanctions = () => {
sanctionStatus.timestamp = Date.now()
})
.catch(e => {
logger.error('Couldn\'t load OFAC sanction list!')
logger.error('Couldn\'t load OFAC sanction list!', e)
})
}

View file

@ -1,25 +1,15 @@
const db = require('./db')
const migrateTools = require('./migrate-tools')
// This migration was updated on v10.2
// it's from before 7.5 and we update one major version at a time
// Data migration was removed, keeping only the schema update
exports.up = function (next) {
return migrateTools.migrateNames()
.then(updateSql => {
const sql = [
'alter table devices add column name text',
updateSql,
'alter table devices alter column name set not null'
]
return db.multi(sql, next)
})
.catch(() => {
const sql = [
'alter table devices add column name text',
'alter table devices alter column name set not null'
]
return db.multi(sql, next)
})
}
exports.down = function (next) {

View file

@ -1,34 +1,9 @@
const db = require('./db')
const machineLoader = require('../lib/machine-loader')
const { migrationSaveConfig, saveAccounts, loadLatest } = require('../lib/new-settings-loader')
const { migrate } = require('../lib/config-migration')
const _ = require('lodash/fp')
const OLD_SETTINGS_LOADER_SCHEMA_VERSION = 1
// This migration was actually a config update
// it's from before 7.5 and we update one major version at a time
// v10.2 is good enough to deprecate it
// file still has to exist so that the migration tool doesn't throw an error
module.exports.up = function (next) {
function migrateConfig (settings) {
const newSettings = migrate(settings.config, settings.accounts)
return Promise.all([
migrationSaveConfig(newSettings.config),
saveAccounts(newSettings.accounts)
])
.then(() => next())
}
loadLatest(OLD_SETTINGS_LOADER_SCHEMA_VERSION)
.then(settings => _.isEmpty(settings.config)
? next()
: migrateConfig(settings)
)
.catch(err => {
if (err.message === 'lamassu-server is not configured') {
return next()
}
console.log(err.message)
return next(err)
})
next()
}
module.exports.down = function (next) {

View file

@ -0,0 +1,12 @@
const db = require('./db')
exports.up = next => db.multi([
'DROP TABLE aggregated_machine_pings;',
'DROP TABLE cash_in_refills;',
'DROP TABLE cash_out_refills;',
'DROP TABLE customer_compliance_persistence;',
'DROP TABLE compliance_overrides_persistence;',
'DROP TABLE server_events;',
], next)
exports.down = next => next()

View file

@ -0,0 +1,10 @@
const db = require('./db')
exports.up = next => db.multi([
'ALTER TABLE bills ADD CONSTRAINT cash_in_txs_id FOREIGN KEY (cash_in_txs_id) REFERENCES cash_in_txs(id);',
'CREATE INDEX bills_cash_in_txs_id_idx ON bills USING btree (cash_in_txs_id);',
`CREATE INDEX bills_null_cashbox_batch_id_idx ON bills (cash_in_txs_id) WHERE cashbox_batch_id IS NULL AND destination_unit = 'cashbox';`,
'CREATE INDEX cash_in_txs_device_id_idx ON cash_in_txs USING btree (device_id);'
], next)
exports.down = next => next()

View file

@ -0,0 +1,11 @@
const db = require('./db')
exports.up = next => db.multi([
'ALTER TABLE public.blacklist DROP CONSTRAINT IF EXISTS blacklist_pkey;',
'ALTER TABLE public.blacklist ADD PRIMARY KEY (address);',
'DROP INDEX IF EXISTS blacklist_temp_address_key;',
'CREATE UNIQUE INDEX blacklist_address_idx ON public.blacklist USING btree (address);',
], next)
exports.down = next => next()

View file

@ -1,16 +0,0 @@
const pgp = require('pg-promise')()
const _ = require('lodash/fp')
const settingsLoader = require('../lib/admin/settings-loader')
const machineLoader = require('../lib/machine-loader')
module.exports = {migrateNames}
function migrateNames () {
const cs = new pgp.helpers.ColumnSet(['?device_id', 'name'], {table: 'devices'})
return settingsLoader.loadLatestConfig(false)
.then(config => machineLoader.getMachineNames(config))
.then(_.map(r => ({device_id: r.deviceId, name: r.name})))
.then(data => pgp.helpers.update(data, cs) + ' WHERE t.device_id=v.device_id')
}

View file

@ -1 +0,0 @@
nodejs 22

File diff suppressed because it is too large Load diff

View file

@ -5,7 +5,7 @@
"type": "module",
"dependencies": {
"@apollo/react-hooks": "^3.1.3",
"@lamassu/coins": "v1.5.3",
"@lamassu/coins": "v1.6.1",
"@material-ui/core": "4.12.4",
"@material-ui/icons": "4.11.2",
"@material-ui/lab": "^4.0.0-alpha.61",
@ -18,7 +18,6 @@
"apollo-link-http": "^1.5.17",
"apollo-upload-client": "^13.0.0",
"axios": "0.21.1",
"base-64": "^1.0.0",
"bignumber.js": "9.0.0",
"classnames": "2.2.6",
"countries-and-timezones": "^2.4.0",

View file

@ -38,10 +38,10 @@ const SearchBox = memo(
classes={{ option: classes.autocomplete }}
value={filters}
options={options}
getOptionLabel={it => it.value}
getOptionLabel={it => it.label || it.value}
renderOption={it => (
<div className={classes.item}>
<P className={classes.itemLabel}>{it.value}</P>
<P className={classes.itemLabel}>{it.label || it.value}</P>
<P className={classes.itemType}>{it.type}</P>
</div>
)}

View file

@ -32,7 +32,7 @@ const SearchFilter = ({
<Chip
key={idx}
classes={chipClasses}
label={`${onlyFirstToUpper(f.type)}: ${f.value}`}
label={`${onlyFirstToUpper(f.type)}: ${f.label || f.value}`}
onDelete={() => onFilterDelete(f)}
deleteIcon={<CloseIcon className={classes.button} />}
/>

View file

@ -27,11 +27,16 @@ const BooleanCell = ({ name }) => {
const BooleanPropertiesTable = memo(
({ title, disabled, data, elements, save, forcedEditing = false }) => {
const initialValues = R.fromPairs(
elements.map(it => [it.name, data[it.name]?.toString() ?? null])
elements.map(it => [it.name, data[it.name]?.toString() ?? 'false'])
)
const validationSchema = R.fromPairs(
elements.map(it => [it.name, Yup.boolean().required()])
const validationSchema = Yup.object().shape(
R.fromPairs(
elements.map(it => [
it.name,
Yup.mixed().oneOf(['true', 'false', true, false]).required()
])
)
)
const [editing, setEditing] = useState(forcedEditing)

View file

@ -53,7 +53,7 @@ const Td = ({
[classes.size]: !header,
[classes.bold]: !header && bold
}
return <div className={classnames(className, classNames)}>{children}</div>
return <div data-cy={`td-${header}`} className={classnames(className, classNames)}>{children}</div>
}
const Th = ({ children, ...props }) => {

View file

@ -37,8 +37,8 @@ const MACHINE_LOGS = gql`
query machineLogsCsv(
$deviceId: ID!
$limit: Int
$from: Date
$until: Date
$from: DateTimeISO
$until: DateTimeISO
$timezone: String
) {
machineLogsCsv(
@ -52,7 +52,6 @@ const MACHINE_LOGS = gql`
`
const createCsv = async ({ machineLogsCsv }) => {
console.log(machineLogsCsv)
const machineLogs = new Blob([machineLogsCsv], {
type: 'text/plain;charset=utf-8'
})

View file

@ -53,6 +53,7 @@ const Row = ({
return (
<div className={classes.rowWrapper}>
<div
data-cy={id}
className={classnames({ [classes.before]: expanded && index !== 0 })}>
<Tr
size={size}

View file

@ -59,8 +59,8 @@ const DAY_OPTIONS = R.map(
const GET_TRANSACTIONS = gql`
query transactions(
$from: Date
$until: Date
$from: DateTimeISO
$until: DateTimeISO
$excludeTestingCustomers: Boolean
) {
transactions(

View file

@ -11,7 +11,7 @@ import CloseIcon from 'src/styling/icons/action/close/zodiac.svg?react'
import ReverseSettingsIcon from 'src/styling/icons/circle buttons/settings/white.svg?react'
import SettingsIcon from 'src/styling/icons/circle buttons/settings/zodiac.svg?react'
import { Link, Button, IconButton } from 'src/components/buttons'
import { Link, Button, IconButton, SupportLinkButton } from 'src/components/buttons'
import { Switch } from 'src/components/inputs'
import { fromNamespace, toNamespace } from 'src/utils/config'
@ -275,10 +275,13 @@ const Blacklist = () => {
<Label2>{rejectAddressReuse ? 'On' : 'Off'}</Label2>
<HelpTooltip width={304}>
<P>
This option requires a user to scan a fresh wallet address if
they attempt to scan one that had been previously used for a
transaction in your network.
For details about rejecting address reuse, please read the
relevant knowledgebase article:
</P>
<SupportLinkButton
link="https://support.lamassu.is/hc/en-us/articles/360033622211-Reject-Address-Reuse"
label="Reject Address Reuse"
/>
</HelpTooltip>
</Box>
<Link color="primary" onClick={() => setShowModal(true)}>

View file

@ -135,7 +135,7 @@ const Commissions = ({ name: SCREEN_KEY }) => {
/>
<SupportLinkButton
link="https://support.lamassu.is/hc/en-us/articles/360061558352-Commissions-and-Profit-Calculations"
label="SCommissions and Profit Calculations"
label="Commissions and Profit Calculations"
bottomSpace="1"
/>
</HelpTooltip>

View file

@ -147,19 +147,25 @@ const Wizard = ({
onSubmit={onContinue}
initialValues={stepOptions.initialValues}
validationSchema={stepOptions.schema}>
{({ errors }) => (
<Form className={classes.form}>
<stepOptions.Component
selectedValues={selectedValues}
customInfoRequirementOptions={customInfoRequirementOptions}
errors={errors}
{...stepOptions.props}
/>
<div className={classes.submit}>
{error && <ErrorMessage>Failed to save</ErrorMessage>}
{Object.keys(errors).length > 0 && (
<ErrorMessage>{Object.values(errors)[0]}</ErrorMessage>
)}
<Button className={classes.button} type="submit">
{isLastStep ? 'Add Data' : 'Next'}
</Button>
</div>
</Form>
)}
</Formik>
</Modal>
</>

View file

@ -453,14 +453,16 @@ const customerDataSchemas = {
documentNumber: Yup.string().required(),
dateOfBirth: Yup.string()
.test({
test: val => isValid(parse(new Date(), 'yyyy-MM-dd', val))
test: val => isValid(parse(new Date(), 'yyyy-MM-dd', val)),
message: 'Date must be in format YYYY-MM-DD'
})
.required(),
gender: Yup.string().required(),
country: Yup.string().required(),
expirationDate: Yup.string()
.test({
test: val => isValid(parse(new Date(), 'yyyy-MM-dd', val))
test: val => isValid(parse(new Date(), 'yyyy-MM-dd', val)),
message: 'Date must be in format YYYY-MM-DD'
})
.required()
}),
@ -543,9 +545,12 @@ const tryFormatDate = rawDate => {
}
const formatDates = values => {
R.forEach(elem => {
values[elem] = tryFormatDate(values[elem])
})(['dateOfBirth', 'expirationDate'])
R.map(
elem =>
(values[elem] = format('yyyyMMdd')(
parse(new Date(), 'yyyy-MM-dd', values[elem])
))
)(['dateOfBirth', 'expirationDate'])
return values
}

View file

@ -8,6 +8,7 @@ import { HelpTooltip } from 'src/components/Tooltip'
import Section from 'src/components/layout/Section'
import TitleSection from 'src/components/layout/TitleSection'
import { P } from 'src/components/typography'
import _schemas from 'src/pages/Services/schemas'
import Wizard from 'src/pages/Wallet/Wizard'
import { WalletSchema } from 'src/pages/Wallet/helper'
@ -68,6 +69,12 @@ const SAVE_CONFIG = gql`
}
`
const GET_MARKETS = gql`
query getMarkets {
getMarkets
}
`
const FiatCurrencyChangeAlert = ({ open, close, save }) => {
const classes = useStyles()
@ -107,6 +114,9 @@ const Locales = ({ name: SCREEN_KEY }) => {
const [isEditingDefault, setEditingDefault] = useState(false)
const [isEditingOverrides, setEditingOverrides] = useState(false)
const { data } = useQuery(GET_DATA)
const { data: marketsData } = useQuery(GET_MARKETS)
const schemas = _schemas(marketsData?.getMarkets)
const [saveConfig] = useMutation(SAVE_CONFIG, {
onCompleted: () => setWizard(false),
refetchQueries: () => ['getData'],
@ -234,6 +244,7 @@ const Locales = ({ name: SCREEN_KEY }) => {
</Section>
{wizard && (
<Wizard
schemas={schemas}
coin={R.find(R.propEq('code', wizard))(cryptoCurrencies)}
onClose={() => setWizard(false)}
save={wizardSave}

Some files were not shown because too many files have changed in this diff Show more