Merge pull request #1814 from RafaelTaranto/feat/add-unpaired-machine-names

LAM-1362 feat: add unpaired machine names
This commit is contained in:
Rafael Taranto 2025-03-28 11:09:41 +00:00 committed by GitHub
commit 59da215788
21 changed files with 109 additions and 2381 deletions

View file

@ -1,67 +0,0 @@
const _ = require('lodash/fp')
module.exports = {
unscoped,
cryptoScoped,
machineScoped,
scoped,
scopedValue,
all
}
function matchesValue (crypto, machine, instance) {
return instance.fieldLocator.fieldScope.crypto === crypto &&
instance.fieldLocator.fieldScope.machine === machine
}
function permutations (crypto, machine) {
return _.uniq([
[crypto, machine],
[crypto, 'global'],
['global', machine],
['global', 'global']
])
}
function fallbackValue (crypto, machine, instances) {
const notNil = _.negate(_.isNil)
const pickValue = arr => _.find(instance => matchesValue(arr[0], arr[1], instance), instances)
const fallbackRec = _.find(notNil, _.map(pickValue, permutations(crypto, machine)))
return fallbackRec && fallbackRec.fieldValue.value
}
function scopedValue (crypto, machine, fieldCode, config) {
const allScopes = config.filter(_.pathEq(['fieldLocator', 'code'], fieldCode))
return fallbackValue(crypto, machine, allScopes)
}
function generalScoped (crypto, machine, config) {
const localScopedValue = key =>
scopedValue(crypto, machine, key, config)
const keys = _.uniq(_.map(r => r.fieldLocator.code, config))
const keyedValues = keys.map(localScopedValue)
return _.zipObject(keys, keyedValues)
}
function machineScoped (machine, config) {
return generalScoped('global', machine, config)
}
function unscoped (config) {
return generalScoped('global', 'global', config)
}
function cryptoScoped (crypto, config) {
return generalScoped(crypto, 'global', config)
}
function scoped (crypto, machine, config) {
return generalScoped(crypto, machine, config)
}
function all (code, config) {
return _.uniq(_.map('fieldValue.value', _.filter(i => i.fieldLocator.code === code, config)))
}

View file

@ -1,191 +0,0 @@
const _ = require('lodash/fp')
const db = require('../db')
const configManager = require('./config-manager')
const logger = require('../logger')
const schema = require('./lamassu-schema.json')
const REMOVED_FIELDS = ['crossRefVerificationActive', 'crossRefVerificationThreshold']
const SETTINGS_LOADER_SCHEMA_VERSION = 1
function allScopes (cryptoScopes, machineScopes) {
const scopes = []
cryptoScopes.forEach(c => {
machineScopes.forEach(m => scopes.push([c, m]))
})
return scopes
}
function allCryptoScopes (cryptos, cryptoScope) {
const cryptoScopes = []
if (cryptoScope === 'global' || cryptoScope === 'both') cryptoScopes.push('global')
if (cryptoScope === 'specific' || cryptoScope === 'both') cryptos.forEach(r => cryptoScopes.push(r))
return cryptoScopes
}
function allMachineScopes (machineList, machineScope) {
const machineScopes = []
if (machineScope === 'global' || machineScope === 'both') machineScopes.push('global')
if (machineScope === 'specific' || machineScope === 'both') machineList.forEach(r => machineScopes.push(r))
return machineScopes
}
function satisfiesRequire (config, cryptos, machineList, field, anyFields, allFields) {
const fieldCode = field.code
const scopes = allScopes(
allCryptoScopes(cryptos, field.cryptoScope),
allMachineScopes(machineList, field.machineScope)
)
return scopes.every(scope => {
const isAnyEnabled = () => _.some(refField => {
return isScopeEnabled(config, cryptos, machineList, refField, scope)
}, anyFields)
const areAllEnabled = () => _.every(refField => {
return isScopeEnabled(config, cryptos, machineList, refField, scope)
}, allFields)
const isBlank = _.isNil(configManager.scopedValue(scope[0], scope[1], fieldCode, config))
const isRequired = (_.isEmpty(anyFields) || isAnyEnabled()) &&
(_.isEmpty(allFields) || areAllEnabled())
const hasDefault = !_.isNil(_.get('default', field))
const isValid = !isRequired || !isBlank || hasDefault
return isValid
})
}
function isScopeEnabled (config, cryptos, machineList, refField, scope) {
const [cryptoScope, machineScope] = scope
const candidateCryptoScopes = cryptoScope === 'global'
? allCryptoScopes(cryptos, refField.cryptoScope)
: [cryptoScope]
const candidateMachineScopes = machineScope === 'global'
? allMachineScopes(machineList, refField.machineScope)
: [ machineScope ]
const allRefCandidateScopes = allScopes(candidateCryptoScopes, candidateMachineScopes)
const getFallbackValue = scope => configManager.scopedValue(scope[0], scope[1], refField.code, config)
const values = allRefCandidateScopes.map(getFallbackValue)
return values.some(r => r)
}
function getCryptos (config, machineList) {
const scopes = allScopes(['global'], allMachineScopes(machineList, 'both'))
const scoped = scope => configManager.scopedValue(scope[0], scope[1], 'cryptoCurrencies', config)
return scopes.reduce((acc, scope) => _.union(acc, scoped(scope)), [])
}
function getGroup (fieldCode) {
return _.find(group => _.includes(fieldCode, group.fields), schema.groups)
}
function getField (fieldCode) {
const group = getGroup(fieldCode)
return getGroupField(group, fieldCode)
}
function getGroupField (group, fieldCode) {
const field = _.find(_.matchesProperty('code', fieldCode), schema.fields)
return _.merge(_.pick(['cryptoScope', 'machineScope'], group), field)
}
// Note: We can't use machine-loader because it relies on settings-loader,
// which relies on this
function getMachines () {
return db.any('select device_id from devices')
}
function fetchMachines () {
return getMachines()
.then(machineList => machineList.map(r => r.device_id))
}
function validateFieldParameter (value, validator) {
switch (validator.code) {
case 'required':
return true // We don't validate this here
case 'min':
return value >= validator.min
case 'max':
return value <= validator.max
default:
throw new Error('Unknown validation type: ' + validator.code)
}
}
function ensureConstraints (config) {
const pickField = fieldCode => schema.fields.find(r => r.code === fieldCode)
return Promise.resolve()
.then(() => {
config.every(fieldInstance => {
const fieldCode = fieldInstance.fieldLocator.code
if (_.includes(fieldCode, REMOVED_FIELDS)) return
const field = pickField(fieldCode)
if (!field) {
logger.warn('No such field: %s, %j', fieldCode, fieldInstance.fieldLocator.fieldScope)
return
}
const fieldValue = fieldInstance.fieldValue
const isValid = field.fieldValidation
.every(validator => validateFieldParameter(fieldValue.value, validator))
if (isValid) return true
throw new Error('Invalid config value')
})
})
}
function validateRequires (config) {
return fetchMachines()
.then(machineList => {
const cryptos = getCryptos(config, machineList)
return schema.groups.filter(group => {
return group.fields.some(fieldCode => {
const field = getGroupField(group, fieldCode)
if (!field.fieldValidation.find(r => r.code === 'required')) return false
const refFieldsAny = _.map(_.partial(getField, group), field.enabledIfAny)
const refFieldsAll = _.map(_.partial(getField, group), field.enabledIfAll)
const isInvalid = !satisfiesRequire(config, cryptos, machineList, field, refFieldsAny, refFieldsAll)
return isInvalid
})
})
})
.then(arr => arr.map(r => r.code))
}
function validate (config) {
return Promise.resolve()
.then(() => ensureConstraints(config))
.then(() => validateRequires(config))
.then(arr => {
if (arr.length === 0) return config
throw new Error('Invalid configuration:' + arr)
})
}
module.exports = {
SETTINGS_LOADER_SCHEMA_VERSION,
validate,
ensureConstraints,
validateRequires
}

View file

@ -1,230 +0,0 @@
const _ = require('lodash/fp')
const devMode = require('minimist')(process.argv.slice(2)).dev
const currencies = require('../new-admin/config/data/currencies.json')
const languageRec = require('../new-admin/config/data/languages.json')
const countries = require('../new-admin/config/data/countries.json')
const machineLoader = require('../machine-loader')
const configManager = require('./config-manager')
const db = require('../db')
const settingsLoader = require('./settings-loader')
const configValidate = require('./config-validate')
const jsonSchema = require('./lamassu-schema.json')
function fetchSchema () {
return _.cloneDeep(jsonSchema)
}
function fetchConfig () {
const sql = `select data from user_config where type=$1 and schema_version=$2
order by id desc limit 1`
return db.oneOrNone(sql, ['config', configValidate.SETTINGS_LOADER_SCHEMA_VERSION])
.then(row => row ? row.data.config : [])
}
function allScopes (cryptoScopes, machineScopes) {
const scopes = []
cryptoScopes.forEach(c => {
machineScopes.forEach(m => scopes.push([c, m]))
})
return scopes
}
function allMachineScopes (machineList, machineScope) {
const machineScopes = []
if (machineScope === 'global' || machineScope === 'both') machineScopes.push('global')
if (machineScope === 'specific' || machineScope === 'both') machineList.forEach(r => machineScopes.push(r))
return machineScopes
}
function getCryptos (config, machineList) {
const scopes = allScopes(['global'], allMachineScopes(machineList, 'both'))
const scoped = scope => configManager.scopedValue(scope[0], scope[1], 'cryptoCurrencies', config)
return scopes.reduce((acc, scope) => _.union(acc, scoped(scope)), [])
}
function getGroup (schema, fieldCode) {
return schema.groups.find(group => group.fields.find(_.isEqual(fieldCode)))
}
function getField (schema, group, fieldCode) {
if (!group) group = getGroup(schema, fieldCode)
const field = schema.fields.find(r => r.code === fieldCode)
return _.merge(_.pick(['cryptoScope', 'machineScope'], group), field)
}
const fetchMachines = () => machineLoader.getMachines()
.then(machineList => machineList.map(r => r.deviceId))
function validateCurrentConfig () {
return fetchConfig()
.then(configValidate.validateRequires)
}
const decorateEnabledIf = _.curry((schemaFields, schemaField) => {
const code = schemaField.fieldLocator.code
const field = _.find(f => f.code === code, schemaFields)
return _.assign(schemaField, {
fieldEnabledIfAny: field.enabledIfAny || [],
fieldEnabledIfAll: field.enabledIfAll || []
})
})
function fetchConfigGroup (code) {
const fieldLocatorCodeEq = _.matchesProperty(['fieldLocator', 'code'])
return Promise.all([fetchSchema(), fetchData(), fetchConfig(), fetchMachines()])
.then(([schema, data, config, machineList]) => {
const groupSchema = schema.groups.find(r => r.code === code)
if (!groupSchema) throw new Error('No such group schema: ' + code)
const schemaFields = groupSchema.fields
.map(_.curry(getField)(schema, groupSchema))
.map(f => _.assign(f, {
fieldEnabledIfAny: f.enabledIfAny || [],
fieldEnabledIfAll: f.enabledIfAll || []
}))
const candidateFields = [
schemaFields.map(_.get('requiredIf')),
schemaFields.map(_.get('enabledIfAny')),
schemaFields.map(_.get('enabledIfAll')),
groupSchema.fields,
'fiatCurrency'
]
const smush = _.flow(_.flattenDeep, _.compact, _.uniq)
const configFields = smush(candidateFields)
// Expand this to check against full schema
const fieldValidator = field => !_.isNil(_.get('fieldLocator.fieldScope.crypto', field))
const reducer = (acc, configField) => {
return acc.concat(config.filter(fieldLocatorCodeEq(configField)))
}
const reducedFields = _.filter(fieldValidator, configFields.reduce(reducer, []))
const values = _.map(decorateEnabledIf(schema.fields), reducedFields)
groupSchema.fields = undefined
groupSchema.entries = schemaFields
const selectedCryptos = _.defaultTo([], getCryptos(config, machineList))
return {
schema: groupSchema,
values,
selectedCryptos,
data
}
})
}
function massageCurrencies (currencies) {
const convert = r => ({
code: r['Alphabetic Code'],
display: r['Currency']
})
const top5Codes = ['USD', 'EUR', 'GBP', 'CAD', 'AUD']
const mapped = _.map(convert, currencies)
const codeToRec = code => _.find(_.matchesProperty('code', code), mapped)
const top5 = _.map(codeToRec, top5Codes)
const raw = _.uniqBy(_.get('code'), _.concat(top5, mapped))
return raw.filter(r => r.code !== '' && r.code[0] !== 'X' && r.display.indexOf('(') === -1)
}
const mapLanguage = lang => {
const arr = lang.split('-')
const code = arr[0]
const country = arr[1]
const langNameArr = languageRec.lang[code]
if (!langNameArr) return null
const langName = langNameArr[0]
if (!country) return {code: lang, display: langName}
return {code: lang, display: `${langName} [${country}]`}
}
const supportedLanguages = languageRec.supported
const languages = supportedLanguages.map(mapLanguage).filter(r => r)
const ALL_CRYPTOS = ['BTC', 'ETH', 'LTC', 'DASH', 'ZEC', 'BCH']
const filterAccounts = (data, isDevMode) => {
const notAllowed = ['mock-ticker', 'mock-wallet', 'mock-exchange', 'mock-sms', 'mock-id-verify', 'mock-zero-conf']
const filterOut = o => _.includes(o.code, notAllowed)
return isDevMode ? data : {...data, accounts: _.filter(a => !filterOut(a), data.accounts)}
}
function fetchData () {
return machineLoader.getMachineNames()
.then(machineList => ({
currencies: massageCurrencies(currencies),
cryptoCurrencies: [
{crypto: 'BTC', display: 'Bitcoin'},
{crypto: 'ETH', display: 'Ethereum'},
{crypto: 'LTC', display: 'Litecoin'},
{crypto: 'DASH', display: 'Dash'},
{crypto: 'ZEC', display: 'Zcash'},
{crypto: 'BCH', display: 'Bitcoin Cash'}
],
languages: languages,
countries,
accounts: [
{code: 'bitpay', display: 'Bitpay', class: 'ticker', cryptos: ['BTC', 'BCH']},
{code: 'kraken', display: 'Kraken', class: 'ticker', cryptos: ['BTC', 'ETH', 'LTC', 'DASH', 'ZEC', 'BCH']},
{code: 'bitstamp', display: 'Bitstamp', class: 'ticker', cryptos: ['BTC', 'ETH', 'LTC', 'BCH']},
{code: 'coinbase', display: 'Coinbase', class: 'ticker', cryptos: ['BTC', 'ETH', 'LTC', 'BCH', 'ZEC', 'DASH']},
{code: 'itbit', display: 'itBit', class: 'ticker', cryptos: ['BTC', 'ETH']},
{code: 'mock-ticker', display: 'Mock (Caution!)', class: 'ticker', cryptos: ALL_CRYPTOS},
{code: 'bitcoind', display: 'bitcoind', class: 'wallet', cryptos: ['BTC']},
{code: 'no-layer2', display: 'No Layer 2', class: 'layer2', cryptos: ALL_CRYPTOS},
{code: 'infura', display: 'Infura', class: 'wallet', cryptos: ['ETH']},
{code: 'geth', display: 'geth', class: 'wallet', cryptos: ['ETH']},
{code: 'zcashd', display: 'zcashd', class: 'wallet', cryptos: ['ZEC']},
{code: 'litecoind', display: 'litecoind', class: 'wallet', cryptos: ['LTC']},
{code: 'dashd', display: 'dashd', class: 'wallet', cryptos: ['DASH']},
{code: 'bitcoincashd', display: 'bitcoincashd', class: 'wallet', cryptos: ['BCH']},
{code: 'bitgo', display: 'BitGo', class: 'wallet', cryptos: ['BTC', 'ZEC', 'LTC', 'BCH', 'DASH']},
{code: 'bitstamp', display: 'Bitstamp', class: 'exchange', cryptos: ['BTC', 'ETH', 'LTC', 'BCH']},
{code: 'itbit', display: 'itBit', class: 'exchange', cryptos: ['BTC', 'ETH']},
{code: 'kraken', display: 'Kraken', class: 'exchange', cryptos: ['BTC', 'ETH', 'LTC', 'DASH', 'ZEC', 'BCH']},
{code: 'mock-wallet', display: 'Mock (Caution!)', class: 'wallet', cryptos: ALL_CRYPTOS},
{code: 'no-exchange', display: 'No exchange', class: 'exchange', cryptos: ALL_CRYPTOS},
{code: 'mock-exchange', display: 'Mock exchange', class: 'exchange', cryptos: ALL_CRYPTOS},
{code: 'mock-sms', display: 'Mock SMS', class: 'sms'},
{code: 'mock-id-verify', display: 'Mock ID verifier', class: 'idVerifier'},
{code: 'twilio', display: 'Twilio', class: 'sms'},
{code: 'mailgun', display: 'Mailgun', class: 'email'},
{code: 'all-zero-conf', display: 'Always 0-conf', class: 'zeroConf', cryptos: ['BTC', 'ZEC', 'LTC', 'DASH', 'BCH']},
{code: 'no-zero-conf', display: 'Always 1-conf', class: 'zeroConf', cryptos: ALL_CRYPTOS},
{code: 'blockcypher', display: 'Blockcypher', class: 'zeroConf', cryptos: ['BTC']},
{code: 'mock-zero-conf', display: 'Mock 0-conf', class: 'zeroConf', cryptos: ['BTC', 'ZEC', 'LTC', 'DASH', 'BCH', 'ETH']}
],
machines: machineList.map(machine => ({machine: machine.deviceId, display: machine.name}))
}))
.then((data) => {
return filterAccounts(data, devMode)
})
}
function saveConfigGroup (results) {
if (results.values.length === 0) return fetchConfigGroup(results.groupCode)
return settingsLoader.modifyConfig(results.values)
.then(() => fetchConfigGroup(results.groupCode))
}
module.exports = {
fetchConfigGroup,
saveConfigGroup,
validateCurrentConfig,
fetchConfig,
filterAccounts
}

File diff suppressed because it is too large Load diff

View file

@ -1,250 +0,0 @@
const path = require('path')
const fs = require('fs')
const _ = require('lodash/fp')
const argv = require('minimist')(process.argv.slice(2))
const pify = require('pify')
const pgp = require('pg-promise')()
const db = require('../db')
const configValidate = require('./config-validate')
const schema = require('./lamassu-schema.json')
let settingsCache
function loadFixture () {
const fixture = argv.fixture
const machine = argv.machine
if (fixture && !machine) throw new Error('Missing --machine parameter for --fixture')
const fixturePath = fixture => path.resolve(__dirname, '..', 'test', 'fixtures', fixture + '.json')
const promise = fixture
? pify(fs.readFile)(fixturePath(fixture)).then(JSON.parse)
: Promise.resolve([])
return promise
.then(values => _.map(v => {
return (v.fieldLocator.fieldScope.machine === 'machine')
? _.set('fieldLocator.fieldScope.machine', machine, v)
: v
}, values))
}
function isEquivalentField (a, b) {
return _.isEqual(
[a.fieldLocator.code, a.fieldLocator.fieldScope],
[b.fieldLocator.code, b.fieldLocator.fieldScope]
)
}
// b overrides a
function mergeValues (a, b) {
return _.reject(r => _.isNil(r.fieldValue), _.unionWith(isEquivalentField, b, a))
}
function load (versionId) {
if (!versionId) throw new Error('versionId is required')
return Promise.all([loadConfig(versionId), loadAccounts()])
.then(([config, accounts]) => ({
config,
accounts
}))
}
function loadLatest (filterSchemaVersion = true) {
return Promise.all([loadLatestConfig(filterSchemaVersion), loadAccounts(filterSchemaVersion)])
.then(([config, accounts]) => ({
config,
accounts
}))
}
function loadConfig (versionId) {
if (argv.fixture) return loadFixture()
const sql = `select data
from user_config
where id=$1 and type=$2 and schema_version=$3
and valid`
return db.one(sql, [versionId, 'config', configValidate.SETTINGS_LOADER_SCHEMA_VERSION])
.then(row => row.data.config)
.then(configValidate.validate)
.catch(err => {
if (err.name === 'QueryResultError') {
throw new Error('No such config version: ' + versionId)
}
throw err
})
}
function loadLatestConfig (filterSchemaVersion = true) {
if (argv.fixture) return loadFixture()
const sql = `select id, valid, data
from user_config
where type=$1 ${filterSchemaVersion ? 'and schema_version=$2' : ''}
and valid
order by id desc
limit 1`
return db.oneOrNone(sql, ['config', configValidate.SETTINGS_LOADER_SCHEMA_VERSION])
.then(row => row.data.config)
.then(configValidate.validate)
.catch(err => {
if (err.name === 'QueryResultError') {
throw new Error('lamassu-server is not configured')
}
throw err
})
}
function loadRecentConfig () {
if (argv.fixture) return loadFixture()
const sql = `select id, data
from user_config
where type=$1 and schema_version=$2
order by id desc
limit 1`
return db.one(sql, ['config', configValidate.SETTINGS_LOADER_SCHEMA_VERSION])
.then(row => row.data.config)
}
function loadAccounts (filterSchemaVersion = true) {
const toFields = fieldArr => _.fromPairs(_.map(r => [r.code, r.value], fieldArr))
const toPairs = r => [r.code, toFields(r.fields)]
return db.oneOrNone(`select data from user_config where type=$1 ${filterSchemaVersion ? 'and schema_version=$2' : ''}`, ['accounts', configValidate.SETTINGS_LOADER_SCHEMA_VERSION])
.then(function (data) {
if (!data) return {}
return _.fromPairs(_.map(toPairs, data.data.accounts))
})
}
function settings () {
return settingsCache
}
function save (config) {
const sql = 'insert into user_config (type, data, valid) values ($1, $2, $3)'
return configValidate.validate(config)
.then(() => db.none(sql, ['config', {config}, true]))
.catch(() => db.none(sql, ['config', {config}, false]))
}
function configAddField (scope, fieldCode, fieldType, fieldClass, value) {
return {
fieldLocator: {
fieldScope: {
crypto: scope.crypto,
machine: scope.machine
},
code: fieldCode,
fieldType,
fieldClass
},
fieldValue: {fieldType, value}
}
}
function configDeleteField (scope, fieldCode) {
return {
fieldLocator: {
fieldScope: {
crypto: scope.crypto,
machine: scope.machine
},
code: fieldCode
},
fieldValue: null
}
}
function populateScopes (schema) {
const scopeLookup = {}
_.forEach(r => {
const scope = {
cryptoScope: r.cryptoScope,
machineScope: r.machineScope
}
_.forEach(field => { scopeLookup[field] = scope }, r.fields)
}, schema.groups)
return _.map(r => _.assign(scopeLookup[r.code], r), schema.fields)
}
function cryptoDefaultOverride (cryptoCode, code, defaultValue) {
if (cryptoCode === 'ETH' && code === 'zeroConf') {
return 'no-zero-conf'
}
return defaultValue
}
function cryptoCodeDefaults (schema, cryptoCode) {
const scope = {crypto: cryptoCode, machine: 'global'}
const schemaEntries = populateScopes(schema)
const hasCryptoSpecificDefault = r => r.cryptoScope === 'specific' && !_.isNil(r.default)
const cryptoSpecificFields = _.filter(hasCryptoSpecificDefault, schemaEntries)
return _.map(r => {
const defaultValue = cryptoDefaultOverride(cryptoCode, r.code, r.default)
return configAddField(scope, r.code, r.fieldType, r.fieldClass, defaultValue)
}, cryptoSpecificFields)
}
const uniqCompact = _.flow(_.compact, _.uniq)
function addCryptoDefaults (oldConfig, newFields) {
const cryptoCodeEntries = _.filter(v => v.fieldLocator.code === 'cryptoCurrencies', newFields)
const cryptoCodes = _.flatMap(_.get('fieldValue.value'), cryptoCodeEntries)
const uniqueCryptoCodes = uniqCompact(cryptoCodes)
const mapDefaults = cryptoCode => cryptoCodeDefaults(schema, cryptoCode)
const defaults = _.flatMap(mapDefaults, uniqueCryptoCodes)
return mergeValues(defaults, oldConfig)
}
function modifyConfig (newFields) {
const TransactionMode = pgp.txMode.TransactionMode
const isolationLevel = pgp.txMode.isolationLevel
const mode = new TransactionMode({ tiLevel: isolationLevel.serializable })
function transaction (t) {
return loadRecentConfig()
.then(oldConfig => {
const oldConfigWithDefaults = addCryptoDefaults(oldConfig, newFields)
const doSave = _.flow(mergeValues, save)
return doSave(oldConfigWithDefaults, newFields)
})
}
return db.tx({ mode }, transaction)
}
module.exports = {
settings,
loadConfig,
loadRecentConfig,
load,
loadLatest,
loadLatestConfig,
save,
loadFixture,
mergeValues,
modifyConfig,
configAddField,
configDeleteField
}

View file

@ -1,477 +0,0 @@
const _ = require('lodash/fp')
const uuid = require('uuid')
const { COINS } = require('@lamassu/coins')
const { scopedValue } = require('./admin/config-manager')
const GLOBAL = 'global'
const ALL_CRYPTOS = _.values(COINS).sort()
const ALL_CRYPTOS_STRING = 'ALL_COINS'
const ALL_MACHINES = 'ALL_MACHINES'
const GLOBAL_SCOPE = {
crypto: ALL_CRYPTOS,
machine: GLOBAL
}
function getConfigFields (codes, config) {
const stringfiedGlobalScope = JSON.stringify(GLOBAL_SCOPE)
const fields = config
.filter(i => codes.includes(i.fieldLocator.code))
.map(f => {
const crypto = Array.isArray(f.fieldLocator.fieldScope.crypto)
? f.fieldLocator.fieldScope.crypto.sort()
: f.fieldLocator.fieldScope.crypto === GLOBAL
? ALL_CRYPTOS
: [f.fieldLocator.fieldScope.crypto]
const machine = f.fieldLocator.fieldScope.machine
return {
code: f.fieldLocator.code,
scope: {
crypto,
machine
},
value: f.fieldValue.value
}
})
.filter(f => f.value != null)
const grouped = _.chain(fields)
.groupBy(f => JSON.stringify(f.scope))
.value()
return {
global: grouped[stringfiedGlobalScope] || [],
scoped:
_.entries(
_.chain(grouped)
.omit([stringfiedGlobalScope])
.value()
).map(f => {
const fallbackValues =
_.difference(codes, f[1].map(v => v.code))
.map(v => ({
code: v,
scope: JSON.parse(f[0]),
value: scopedValue(f[0].crypto, f[0].machine, v, config)
}))
.filter(f => f.value != null)
return {
scope: JSON.parse(f[0]),
values: f[1].concat(fallbackValues)
}
}) || []
}
}
function migrateCommissions (config) {
const areArraysEquals = (arr1, arr2) => Array.isArray(arr1) && Array.isArray(arr2) && _.isEmpty(_.xor(arr1, arr2))
const getMachine = _.get('scope.machine')
const getCrypto = _.get('scope.crypto')
const flattenCoins = _.compose(_.flatten, _.map(getCrypto))
const diffAllCryptos = _.compose(_.difference(ALL_CRYPTOS))
const codes = {
minimumTx: 'minimumTx',
cashInFee: 'fixedFee',
cashInCommission: 'cashIn',
cashOutCommission: 'cashOut'
}
const { global, scoped } = getConfigFields(_.keys(codes), config)
const defaultCashOutCommissions = { code: 'cashOutCommission', value: 0, scope: global[0].scope }
const isCashOutDisabled =
_.isEmpty(_.filter(commissionElement => commissionElement.code === 'cashOutCommission', global))
const globalWithDefaults =
isCashOutDisabled ? _.concat(global, defaultCashOutCommissions) : global
const machineAndCryptoScoped = scoped.filter(
f => f.scope.machine !== GLOBAL_SCOPE.machine && f.scope.crypto.length === 1
)
const cryptoScoped = scoped.filter(
f =>
f.scope.machine === GLOBAL_SCOPE.machine &&
!areArraysEquals(f.scope.crypto, GLOBAL_SCOPE.crypto)
)
const machineScoped = scoped.filter(
f =>
f.scope.machine !== GLOBAL_SCOPE.machine &&
areArraysEquals(f.scope.crypto, GLOBAL_SCOPE.crypto)
)
const withCryptoScoped = machineAndCryptoScoped.concat(cryptoScoped)
const filteredMachineScoped = _.map(it => {
const filterByMachine = _.filter(_.includes(getMachine(it)))
const unrepeatedCryptos = _.compose(
diffAllCryptos,
flattenCoins,
filterByMachine
)(withCryptoScoped)
return _.set('scope.crypto', unrepeatedCryptos)(it)
})(machineScoped)
const allCommissionsOverrides = withCryptoScoped.concat(filteredMachineScoped)
return {
..._.fromPairs(globalWithDefaults.map(f => [`commissions_${codes[f.code]}`, f.value])),
...(allCommissionsOverrides.length > 0 && {
commissions_overrides: allCommissionsOverrides.map(s => ({
..._.fromPairs(s.values.map(f => [codes[f.code], f.value])),
machine: s.scope.machine === GLOBAL ? ALL_MACHINES : s.scope.machine,
cryptoCurrencies: areArraysEquals(s.scope.crypto, ALL_CRYPTOS) ? [ALL_CRYPTOS_STRING] : s.scope.crypto,
id: uuid.v4()
}))
})
}
}
function migrateLocales (config) {
const codes = {
country: 'country',
fiatCurrency: 'fiatCurrency',
machineLanguages: 'languages',
cryptoCurrencies: 'cryptoCurrencies',
timezone: 'timezone'
}
const { global, scoped } = getConfigFields(_.keys(codes), config)
return {
..._.fromPairs(global.map(f => [`locale_${codes[f.code]}`, f.value])),
...(scoped.length > 0 && {
locale_overrides: scoped.map(s => ({
..._.fromPairs(s.values.map(f => [codes[f.code], f.value])),
machine: s.scope.machine,
id: uuid.v4()
}))
})
}
}
function migrateCashOut (config) {
const globalCodes = {
fudgeFactorActive: 'fudgeFactorActive'
}
const scopedCodes = {
cashOutEnabled: 'active',
topCashOutDenomination: 'top',
bottomCashOutDenomination: 'bottom',
zeroConfLimit: 'zeroConfLimit'
}
const { global } = getConfigFields(_.keys(globalCodes), config)
const { scoped } = getConfigFields(_.keys(scopedCodes), config)
return {
..._.fromPairs(
global.map(f => [`cashOut_${globalCodes[f.code]}`, f.value])
),
..._.fromPairs(
_.flatten(
scoped.map(s => {
const fields = s.values.map(f => [
`cashOut_${f.scope.machine}_${scopedCodes[f.code]}`,
f.value
])
fields.push([`cashOut_${s.scope.machine}_id`, s.scope.machine])
return fields
})
)
)
}
}
function migrateNotifications (config) {
const globalCodes = {
notificationsEmailEnabled: 'email_active',
notificationsSMSEnabled: 'sms_active',
cashOutCassette1AlertThreshold: 'fiatBalanceCassette1',
cashOutCassette2AlertThreshold: 'fiatBalanceCassette2',
cryptoAlertThreshold: 'cryptoLowBalance'
}
const machineScopedCodes = {
cashOutCassette1AlertThreshold: 'cassette1',
cashOutCassette2AlertThreshold: 'cassette2'
}
const cryptoScopedCodes = {
cryptoAlertThreshold: 'lowBalance'
}
const { global } = getConfigFields(_.keys(globalCodes), config)
const machineScoped = getConfigFields(
_.keys(machineScopedCodes),
config
).scoped.filter(f => f.scope.crypto === GLOBAL && f.scope.machine !== GLOBAL)
const cryptoScoped = getConfigFields(
_.keys(cryptoScopedCodes),
config
).scoped.filter(f => f.scope.crypto !== GLOBAL && f.scope.machine === GLOBAL)
return {
..._.fromPairs(
global.map(f => [`notifications_${globalCodes[f.code]}`, f.value])
),
notifications_email_balance: true,
notifications_email_transactions: true,
notifications_email_compliance: true,
notifications_email_errors: true,
notifications_sms_balance: true,
notifications_sms_transactions: true,
notifications_sms_compliance: true,
notifications_sms_errors: true,
...(machineScoped.length > 0 && {
notifications_fiatBalanceOverrides: machineScoped.map(s => ({
..._.fromPairs(
s.values.map(f => [machineScopedCodes[f.code], f.value])
),
machine: s.scope.machine,
id: uuid.v4()
}))
}),
...(cryptoScoped.length > 0 && {
notifications_cryptoBalanceOverrides: cryptoScoped.map(s => ({
..._.fromPairs(s.values.map(f => [cryptoScopedCodes[f.code], f.value])),
cryptoCurrency: s.scope.crypto,
id: uuid.v4()
}))
})
}
}
function migrateWallet (config) {
const codes = {
ticker: 'ticker',
wallet: 'wallet',
exchange: 'exchange',
zeroConf: 'zeroConf'
}
const { scoped } = getConfigFields(_.keys(codes), config)
return {
...(scoped.length > 0 &&
_.fromPairs(
_.flatten(
scoped.map(s =>
s.values.map(f => [
`wallets_${f.scope.crypto}_${codes[f.code]}`,
f.value
])
)
)
))
}
}
function migrateOperatorInfo (config) {
const codes = {
operatorInfoActive: 'active',
operatorInfoEmail: 'email',
operatorInfoName: 'name',
operatorInfoPhone: 'phone',
operatorInfoWebsite: 'website',
operatorInfoCompanyNumber: 'companyNumber'
}
const { global } = getConfigFields(_.keys(codes), config)
return {
..._.fromPairs(global.map(f => [`operatorInfo_${codes[f.code]}`, f.value]))
}
}
function migrateReceiptPrinting (config) {
const codes = {
receiptPrintingActive: 'active'
}
const { global } = getConfigFields(_.keys(codes), config)
return {
..._.fromPairs(global.map(f => [`receipt_${codes[f.code]}`, f.value])),
receipt_operatorWebsite: true,
receipt_operatorEmail: true,
receipt_operatorPhone: true,
receipt_companyRegistration: true,
receipt_machineLocation: true,
receipt_customerNameOrPhoneNumber: true,
receipt_exchangeRate: true,
receipt_addressQRCode: true
}
}
function migrateCoinATMRadar (config) {
const codes = ['coinAtmRadarActive', 'coinAtmRadarShowRates']
const { global } = getConfigFields(codes, config)
const coinAtmRadar = _.fromPairs(global.map(f => [f.code, f.value]))
return {
coinAtmRadar_active: coinAtmRadar.coinAtmRadarActive,
coinAtmRadar_commissions: coinAtmRadar.coinAtmRadarShowRates,
coinAtmRadar_limitsAndVerification: coinAtmRadar.coinAtmRadarShowRates
}
}
function migrateTermsAndConditions (config) {
const codes = {
termsScreenActive: 'active',
termsScreenTitle: 'title',
termsScreenText: 'text',
termsAcceptButtonText: 'acceptButtonText',
termsCancelButtonText: 'cancelButtonText'
}
const { global } = getConfigFields(_.keys(codes), config)
return {
..._.fromPairs(
global.map(f => [`termsConditions_${codes[f.code]}`, f.value])
)
}
}
function migrateComplianceTriggers (config) {
const suspensionDays = 1
const triggerTypes = {
amount: 'txAmount',
velocity: 'txVelocity',
volume: 'txVolume',
consecutiveDays: 'consecutiveDays'
}
const requirements = {
sms: 'sms',
idData: 'idCardData',
idPhoto: 'idCardPhoto',
facePhoto: 'facephoto',
sanctions: 'sanctions',
suspend: 'suspend'
}
function createTrigger (
requirement,
threshold,
suspensionDays
) {
const triggerConfig = {
id: uuid.v4(),
direction: 'both',
threshold,
thresholdDays: 1,
triggerType: triggerTypes.volume,
requirement
}
if (!requirement === 'suspend') return triggerConfig
return _.assign(triggerConfig, { suspensionDays })
}
const codes = [
'smsVerificationActive',
'smsVerificationThreshold',
'idCardDataVerificationActive',
'idCardDataVerificationThreshold',
'idCardPhotoVerificationActive',
'idCardPhotoVerificationThreshold',
'frontCameraVerificationActive',
'frontCameraVerificationThreshold',
'sanctionsVerificationActive',
'sanctionsVerificationThreshold',
'hardLimitVerificationActive',
'hardLimitVerificationThreshold',
'rejectAddressReuseActive'
]
const global = _.fromPairs(
getConfigFields(codes, config).global.map(f => [f.code, f.value])
)
const triggers = []
if (global.smsVerificationActive && _.isNumber(global.smsVerificationThreshold)) {
triggers.push(
createTrigger(requirements.sms, global.smsVerificationThreshold)
)
}
if (global.idCardDataVerificationActive && _.isNumber(global.idCardDataVerificationThreshold)) {
triggers.push(
createTrigger(requirements.idData, global.idCardDataVerificationThreshold)
)
}
if (global.idCardPhotoVerificationActive && _.isNumber(global.idCardPhotoVerificationThreshold)) {
triggers.push(
createTrigger(requirements.idPhoto, global.idCardPhotoVerificationThreshold)
)
}
if (global.frontCameraVerificationActive && _.isNumber(global.frontCameraVerificationThreshold)) {
triggers.push(
createTrigger(requirements.facePhoto, global.frontCameraVerificationThreshold)
)
}
if (global.sanctionsVerificationActive && _.isNumber(global.sanctionsVerificationThreshold)) {
triggers.push(
createTrigger(requirements.sanctions, global.sanctionsVerificationThreshold)
)
}
if (global.hardLimitVerificationActive && _.isNumber(global.hardLimitVerificationThreshold)) {
triggers.push(
createTrigger(requirements.suspend, global.hardLimitVerificationThreshold, suspensionDays)
)
}
return {
triggers,
['compliance_rejectAddressReuse']: global.rejectAddressReuseActive
}
}
function migrateConfig (config) {
return {
...migrateCommissions(config),
...migrateLocales(config),
...migrateCashOut(config),
...migrateNotifications(config),
...migrateWallet(config),
...migrateOperatorInfo(config),
...migrateReceiptPrinting(config),
...migrateCoinATMRadar(config),
...migrateTermsAndConditions(config),
...migrateComplianceTriggers(config)
}
}
function migrateAccounts (accounts) {
const accountArray = [
'bitgo',
'bitstamp',
'blockcypher',
'infura',
'itbit',
'kraken',
'mailgun',
'twilio'
]
const services = _.keyBy('code', accounts)
const serviceFields = _.mapValues(({ fields }) => _.keyBy('code', fields))(services)
const allAccounts = _.mapValues(_.mapValues(_.get('value')))(serviceFields)
return _.pick(accountArray)(allAccounts)
}
function migrate (config, accounts) {
return {
config: migrateConfig(config),
accounts: migrateAccounts(accounts)
}
}
module.exports = { migrate }

View file

@ -4,30 +4,29 @@ const { CASH_OUT_TRANSACTION_STATES } = require('../cash-out/cash-out-helper')
function transaction () { function transaction () {
const sql = `SELECT DISTINCT * FROM ( const sql = `SELECT DISTINCT * FROM (
SELECT 'type' AS type, 'Cash In' AS value UNION SELECT 'type' AS type, NULL AS label, 'Cash In' AS value UNION
SELECT 'type' AS type, 'Cash Out' AS value UNION SELECT 'type' AS type, NULL AS label, 'Cash Out' AS value UNION
SELECT 'machine' AS type, name AS value FROM devices d INNER JOIN cash_in_txs t ON d.device_id = t.device_id UNION SELECT 'machine' AS type, name AS label, d.device_id AS value FROM devices d INNER JOIN cash_in_txs t ON d.device_id = t.device_id UNION
SELECT 'machine' AS type, name AS value FROM devices d INNER JOIN cash_out_txs t ON d.device_id = t.device_id UNION SELECT 'machine' AS type, name AS label, d.device_id AS value FROM devices d INNER JOIN cash_out_txs t ON d.device_id = t.device_id UNION
SELECT 'customer' AS type, concat(id_card_data::json->>'firstName', ' ', id_card_data::json->>'lastName') AS value SELECT 'customer' AS type, NULL AS label, concat(id_card_data::json->>'firstName', ' ', id_card_data::json->>'lastName') AS value
FROM customers c INNER JOIN cash_in_txs t ON c.id = t.customer_id FROM customers c INNER JOIN cash_in_txs t ON c.id = t.customer_id
WHERE c.id_card_data::json->>'firstName' IS NOT NULL or c.id_card_data::json->>'lastName' IS NOT NULL UNION WHERE c.id_card_data::json->>'firstName' IS NOT NULL or c.id_card_data::json->>'lastName' IS NOT NULL UNION
SELECT 'customer' AS type, concat(id_card_data::json->>'firstName', ' ', id_card_data::json->>'lastName') AS value SELECT 'customer' AS type, NULL AS label, concat(id_card_data::json->>'firstName', ' ', id_card_data::json->>'lastName') AS value
FROM customers c INNER JOIN cash_out_txs t ON c.id = t.customer_id FROM customers c INNER JOIN cash_out_txs t ON c.id = t.customer_id
WHERE c.id_card_data::json->>'firstName' IS NOT NULL or c.id_card_data::json->>'lastName' IS NOT NULL UNION WHERE c.id_card_data::json->>'firstName' IS NOT NULL or c.id_card_data::json->>'lastName' IS NOT NULL UNION
SELECT 'fiat' AS type, fiat_code AS value FROM cash_in_txs UNION SELECT 'fiat' AS type, NULL AS label, fiat_code AS value FROM cash_in_txs UNION
SELECT 'fiat' AS type, fiat_code AS value FROM cash_out_txs UNION SELECT 'fiat' AS type, NULL AS label, fiat_code AS value FROM cash_out_txs UNION
SELECT 'crypto' AS type, crypto_code AS value FROM cash_in_txs UNION SELECT 'crypto' AS type, NULL AS label, crypto_code AS value FROM cash_in_txs UNION
SELECT 'crypto' AS type, crypto_code AS value FROM cash_out_txs UNION SELECT 'crypto' AS type, NULL AS label, crypto_code AS value FROM cash_out_txs UNION
SELECT 'address' AS type, to_address AS value FROM cash_in_txs UNION SELECT 'address' AS type, NULL AS label, to_address AS value FROM cash_in_txs UNION
SELECT 'address' AS type, to_address AS value FROM cash_out_txs UNION SELECT 'address' AS type, NULL AS label, to_address AS value FROM cash_out_txs UNION
SELECT 'status' AS type, ${cashInTx.TRANSACTION_STATES} AS value FROM cash_in_txs UNION SELECT 'status' AS type, NULL AS label, ${cashInTx.TRANSACTION_STATES} AS value FROM cash_in_txs UNION
SELECT 'status' AS type, ${CASH_OUT_TRANSACTION_STATES} AS value FROM cash_out_txs UNION SELECT 'status' AS type, NULL AS label, ${CASH_OUT_TRANSACTION_STATES} AS value FROM cash_out_txs UNION
SELECT 'sweep status' AS type, CASE WHEN swept THEN 'Swept' WHEN NOT swept THEN 'Unswept' END AS value FROM cash_out_txs SELECT 'sweep status' AS type, NULL AS label, CASE WHEN swept THEN 'Swept' WHEN NOT swept THEN 'Unswept' END AS value FROM cash_out_txs
) f` ) f`
return db.any(sql) return db.any(sql)
} }
function customer () { function customer () {
const sql = `SELECT DISTINCT * FROM ( const sql = `SELECT DISTINCT * FROM (
SELECT 'phone' AS type, phone AS value FROM customers WHERE phone IS NOT NULL UNION SELECT 'phone' AS type, phone AS value FROM customers WHERE phone IS NOT NULL UNION

View file

@ -19,10 +19,10 @@ const resolvers = {
isAnonymous: parent => (parent.customerId === anonymous.uuid) isAnonymous: parent => (parent.customerId === anonymous.uuid)
}, },
Query: { Query: {
transactions: (...[, { from, until, limit, offset, deviceId, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status, swept, excludeTestingCustomers }]) => transactions: (...[, { from, until, limit, offset, txClass, deviceId, customerName, fiatCode, cryptoCode, toAddress, status, swept, excludeTestingCustomers }]) =>
transactions.batch(from, until, limit, offset, deviceId, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status, swept, excludeTestingCustomers), transactions.batch(from, until, limit, offset, txClass, deviceId, customerName, fiatCode, cryptoCode, toAddress, status, swept, excludeTestingCustomers),
transactionsCsv: (...[, { from, until, limit, offset, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status, swept, timezone, excludeTestingCustomers, simplified }]) => transactionsCsv: (...[, { from, until, limit, offset, txClass, deviceId, customerName, fiatCode, cryptoCode, toAddress, status, swept, timezone, excludeTestingCustomers, simplified }]) =>
transactions.batch(from, until, limit, offset, null, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status, swept, excludeTestingCustomers, simplified) transactions.batch(from, until, limit, offset, null, txClass, deviceId, customerName, fiatCode, cryptoCode, toAddress, status, swept, excludeTestingCustomers, simplified)
.then(data => parseAsync(logDateFormat(timezone, data, ['created', 'sendTime', 'publishedAt']))), .then(data => parseAsync(logDateFormat(timezone, data, ['created', 'sendTime', 'publishedAt']))),
transactionCsv: (...[, { id, txClass, timezone }]) => transactionCsv: (...[, { id, txClass, timezone }]) =>
transactions.getTx(id, txClass).then(data => transactions.getTx(id, txClass).then(data =>

View file

@ -56,11 +56,12 @@ const typeDef = gql`
type Filter { type Filter {
type: String type: String
value: String value: String
label: String
} }
type Query { type Query {
transactions(from: Date, until: Date, limit: Int, offset: Int, deviceId: ID, txClass: String, machineName: String, customerName: String, fiatCode: String, cryptoCode: String, toAddress: String, status: String, swept: Boolean, excludeTestingCustomers: Boolean): [Transaction] @auth transactions(from: Date, until: Date, limit: Int, offset: Int, txClass: String, deviceId: String, customerName: String, fiatCode: String, cryptoCode: String, toAddress: String, status: String, swept: Boolean, excludeTestingCustomers: Boolean): [Transaction] @auth
transactionsCsv(from: Date, until: Date, limit: Int, offset: Int, txClass: String, machineName: String, customerName: String, fiatCode: String, cryptoCode: String, toAddress: String, status: String, swept: Boolean, timezone: String, excludeTestingCustomers: Boolean, simplified: Boolean): String @auth transactionsCsv(from: Date, until: Date, limit: Int, offset: Int, txClass: String, deviceId: String, customerName: String, fiatCode: String, cryptoCode: String, toAddress: String, status: String, swept: Boolean, timezone: String, excludeTestingCustomers: Boolean, simplified: Boolean): String @auth
transactionCsv(id: ID, txClass: String, timezone: String): String @auth transactionCsv(id: ID, txClass: String, timezone: String): String @auth
txAssociatedDataCsv(id: ID, txClass: String, timezone: String): String @auth txAssociatedDataCsv(id: ID, txClass: String, timezone: String): String @auth
transactionFilters: [Filter] @auth transactionFilters: [Filter] @auth

View file

@ -1,5 +1,4 @@
const { AuthenticationError } = require('apollo-server-express') const { AuthenticationError } = require('apollo-server-express')
const base64 = require('base-64')
const users = require('../../users') const users = require('../../users')
const buildApolloContext = async ({ req, res }) => { const buildApolloContext = async ({ req, res }) => {

View file

@ -11,19 +11,6 @@ const { REDEEMABLE_AGE, CASH_OUT_TRANSACTION_STATES } = require('../../cash-out/
const NUM_RESULTS = 1000 const NUM_RESULTS = 1000
function addNames (txs) {
return machineLoader.getMachineNames()
.then(machines => {
const addName = tx => {
const machine = _.find(['deviceId', tx.deviceId], machines)
const name = machine ? machine.name : 'Unpaired'
return _.set('machineName', name, tx)
}
return _.map(addName, txs)
})
}
function addProfits (txs) { function addProfits (txs) {
return _.map(it => { return _.map(it => {
const profit = getProfit(it).toString() const profit = getProfit(it).toString()
@ -33,14 +20,31 @@ function addProfits (txs) {
const camelize = _.mapKeys(_.camelCase) const camelize = _.mapKeys(_.camelCase)
const DEVICE_NAME_QUERY = `
CASE
WHEN ud.name IS NOT NULL THEN ud.name || ' (unpaired)'
WHEN d.name IS NOT NULL THEN d.name
ELSE 'Unpaired'
END AS machine_name
`
const DEVICE_NAME_JOINS = `
LEFT JOIN devices d ON txs.device_id = d.device_id
LEFT JOIN (
SELECT device_id, name, unpaired, paired
FROM unpaired_devices
) ud ON txs.device_id = ud.device_id
AND ud.unpaired >= txs.created
AND (txs.created >= ud.paired)
`
function batch ( function batch (
from = new Date(0).toISOString(), from = new Date(0).toISOString(),
until = new Date().toISOString(), until = new Date().toISOString(),
limit = null, limit = null,
offset = 0, offset = 0,
id = null,
txClass = null, txClass = null,
machineName = null, deviceId = null,
customerName = null, customerName = null,
fiatCode = null, fiatCode = null,
cryptoCode = null, cryptoCode = null,
@ -61,8 +65,7 @@ function batch (
k k
) )
)), )),
addProfits, addProfits
addNames
) )
const cashInSql = `SELECT 'cashIn' AS tx_class, txs.*, const cashInSql = `SELECT 'cashIn' AS tx_class, txs.*,
@ -77,21 +80,20 @@ function batch (
txs.tx_customer_photo_at AS tx_customer_photo_at, txs.tx_customer_photo_at AS tx_customer_photo_at,
txs.tx_customer_photo_path AS tx_customer_photo_path, txs.tx_customer_photo_path AS tx_customer_photo_path,
((NOT txs.send_confirmed) AND (txs.created <= now() - interval $1)) AS expired, ((NOT txs.send_confirmed) AND (txs.created <= now() - interval $1)) AS expired,
tb.error_message AS batch_error tb.error_message AS batch_error,
${DEVICE_NAME_QUERY}
FROM (SELECT *, ${cashInTx.TRANSACTION_STATES} AS txStatus FROM cash_in_txs) AS txs FROM (SELECT *, ${cashInTx.TRANSACTION_STATES} AS txStatus FROM cash_in_txs) AS txs
LEFT OUTER JOIN customers c ON txs.customer_id = c.id LEFT OUTER JOIN customers c ON txs.customer_id = c.id
LEFT JOIN devices d ON txs.device_id = d.device_id ${DEVICE_NAME_JOINS}
LEFT OUTER JOIN transaction_batches tb ON txs.batch_id = tb.id LEFT OUTER JOIN transaction_batches tb ON txs.batch_id = tb.id
WHERE txs.created >= $2 AND txs.created <= $3 ${ WHERE txs.created >= $2 AND txs.created <= $3
id !== null ? `AND txs.device_id = $6` : `` AND ($6 is null or $6 = 'Cash In')
} AND ($7 is null or txs.device_id = $7)
AND ($7 is null or $7 = 'Cash In') AND ($8 is null or concat(c.id_card_data::json->>'firstName', ' ', c.id_card_data::json->>'lastName') = $8)
AND ($8 is null or d.name = $8) AND ($9 is null or txs.fiat_code = $9)
AND ($9 is null or concat(c.id_card_data::json->>'firstName', ' ', c.id_card_data::json->>'lastName') = $9) AND ($10 is null or txs.crypto_code = $10)
AND ($10 is null or txs.fiat_code = $10) AND ($11 is null or txs.to_address = $11)
AND ($11 is null or txs.crypto_code = $11) AND ($12 is null or txs.txStatus = $12)
AND ($12 is null or txs.to_address = $12)
AND ($13 is null or txs.txStatus = $13)
${excludeTestingCustomers ? `AND c.is_test_customer is false` : ``} ${excludeTestingCustomers ? `AND c.is_test_customer is false` : ``}
${isCsvExport && !simplified ? '' : 'AND (error IS NOT null OR tb.error_message IS NOT null OR fiat > 0)'} ${isCsvExport && !simplified ? '' : 'AND (error IS NOT null OR tb.error_message IS NOT null OR fiat > 0)'}
ORDER BY created DESC limit $4 offset $5` ORDER BY created DESC limit $4 offset $5`
@ -109,23 +111,22 @@ function batch (
c.id_card_photo_path AS customer_id_card_photo_path, c.id_card_photo_path AS customer_id_card_photo_path,
txs.tx_customer_photo_at AS tx_customer_photo_at, txs.tx_customer_photo_at AS tx_customer_photo_at,
txs.tx_customer_photo_path AS tx_customer_photo_path, txs.tx_customer_photo_path AS tx_customer_photo_path,
(NOT txs.dispense AND extract(epoch FROM (now() - greatest(txs.created, txs.confirmed_at))) >= $1) AS expired (NOT txs.dispense AND extract(epoch FROM (now() - greatest(txs.created, txs.confirmed_at))) >= $1) AS expired,
${DEVICE_NAME_QUERY}
FROM (SELECT *, ${CASH_OUT_TRANSACTION_STATES} AS txStatus FROM cash_out_txs) txs FROM (SELECT *, ${CASH_OUT_TRANSACTION_STATES} AS txStatus FROM cash_out_txs) txs
INNER JOIN cash_out_actions actions ON txs.id = actions.tx_id INNER JOIN cash_out_actions actions ON txs.id = actions.tx_id
AND actions.action = 'provisionAddress' AND actions.action = 'provisionAddress'
LEFT OUTER JOIN customers c ON txs.customer_id = c.id LEFT OUTER JOIN customers c ON txs.customer_id = c.id
LEFT JOIN devices d ON txs.device_id = d.device_id ${DEVICE_NAME_JOINS}
WHERE txs.created >= $2 AND txs.created <= $3 ${ WHERE txs.created >= $2 AND txs.created <= $3
id !== null ? `AND txs.device_id = $6` : `` AND ($6 is null or $6 = 'Cash Out')
} AND ($7 is null or txs.device_id = $7)
AND ($7 is null or $7 = 'Cash Out') AND ($8 is null or concat(c.id_card_data::json->>'firstName', ' ', c.id_card_data::json->>'lastName') = $8)
AND ($8 is null or d.name = $8) AND ($9 is null or txs.fiat_code = $9)
AND ($9 is null or concat(c.id_card_data::json->>'firstName', ' ', c.id_card_data::json->>'lastName') = $9) AND ($10 is null or txs.crypto_code = $10)
AND ($10 is null or txs.fiat_code = $10) AND ($11 is null or txs.to_address = $11)
AND ($11 is null or txs.crypto_code = $11) AND ($12 is null or txs.txStatus = $12)
AND ($12 is null or txs.to_address = $12) AND ($13 is null or txs.swept = $13)
AND ($13 is null or txs.txStatus = $13)
AND ($14 is null or txs.swept = $14)
${excludeTestingCustomers ? `AND c.is_test_customer is false` : ``} ${excludeTestingCustomers ? `AND c.is_test_customer is false` : ``}
${isCsvExport ? '' : 'AND fiat > 0'} ${isCsvExport ? '' : 'AND fiat > 0'}
ORDER BY created DESC limit $4 offset $5` ORDER BY created DESC limit $4 offset $5`
@ -141,13 +142,13 @@ function batch (
} }
if (hasCashInOnlyFilters) { if (hasCashInOnlyFilters) {
promises = [db.any(cashInSql, [cashInTx.PENDING_INTERVAL, from, until, limit, offset, id, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status])] promises = [db.any(cashInSql, [cashInTx.PENDING_INTERVAL, from, until, limit, offset, txClass, deviceId, customerName, fiatCode, cryptoCode, toAddress, status])]
} else if (hasCashOutOnlyFilters) { } else if (hasCashOutOnlyFilters) {
promises = [db.any(cashOutSql, [REDEEMABLE_AGE, from, until, limit, offset, id, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status, swept])] promises = [db.any(cashOutSql, [REDEEMABLE_AGE, from, until, limit, offset, txClass, deviceId, customerName, fiatCode, cryptoCode, toAddress, status, swept])]
} else { } else {
promises = [ promises = [
db.any(cashInSql, [cashInTx.PENDING_INTERVAL, from, until, limit, offset, id, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status]), db.any(cashInSql, [cashInTx.PENDING_INTERVAL, from, until, limit, offset, txClass, deviceId, customerName, fiatCode, cryptoCode, toAddress, status]),
db.any(cashOutSql, [REDEEMABLE_AGE, from, until, limit, offset, id, txClass, machineName, customerName, fiatCode, cryptoCode, toAddress, status, swept]) db.any(cashOutSql, [REDEEMABLE_AGE, from, until, limit, offset, txClass, deviceId, customerName, fiatCode, cryptoCode, toAddress, status, swept])
] ]
} }
@ -249,7 +250,7 @@ const getStatus = it => {
function getCustomerTransactionsBatch (ids) { function getCustomerTransactionsBatch (ids) {
const packager = _.flow(it => { const packager = _.flow(it => {
return it return it
}, _.flatten, _.orderBy(_.property('created'), ['desc']), _.map(camelize), addNames) }, _.flatten, _.orderBy(_.property('created'), ['desc']), _.map(camelize))
const cashInSql = `SELECT 'cashIn' AS tx_class, txs.*, const cashInSql = `SELECT 'cashIn' AS tx_class, txs.*,
c.phone AS customer_phone, c.phone AS customer_phone,
@ -261,9 +262,11 @@ function getCustomerTransactionsBatch (ids) {
c.front_camera_path AS customer_front_camera_path, c.front_camera_path AS customer_front_camera_path,
c.id_card_photo_path AS customer_id_card_photo_path, c.id_card_photo_path AS customer_id_card_photo_path,
((NOT txs.send_confirmed) AND (txs.created <= now() - interval $2)) AS expired, ((NOT txs.send_confirmed) AND (txs.created <= now() - interval $2)) AS expired,
tb.error_message AS batch_error tb.error_message AS batch_error,
${DEVICE_NAME_QUERY}
FROM cash_in_txs AS txs FROM cash_in_txs AS txs
LEFT OUTER JOIN customers c ON txs.customer_id = c.id LEFT OUTER JOIN customers c ON txs.customer_id = c.id
${DEVICE_NAME_JOINS}
LEFT OUTER JOIN transaction_batches tb ON txs.batch_id = tb.id LEFT OUTER JOIN transaction_batches tb ON txs.batch_id = tb.id
WHERE c.id IN ($1^) WHERE c.id IN ($1^)
ORDER BY created DESC limit $3` ORDER BY created DESC limit $3`
@ -279,11 +282,13 @@ function getCustomerTransactionsBatch (ids) {
c.name AS customer_name, c.name AS customer_name,
c.front_camera_path AS customer_front_camera_path, c.front_camera_path AS customer_front_camera_path,
c.id_card_photo_path AS customer_id_card_photo_path, c.id_card_photo_path AS customer_id_card_photo_path,
(NOT txs.dispense AND extract(epoch FROM (now() - greatest(txs.created, txs.confirmed_at))) >= $3) AS expired (NOT txs.dispense AND extract(epoch FROM (now() - greatest(txs.created, txs.confirmed_at))) >= $3) AS expired,
${DEVICE_NAME_QUERY}
FROM cash_out_txs txs FROM cash_out_txs txs
INNER JOIN cash_out_actions actions ON txs.id = actions.tx_id INNER JOIN cash_out_actions actions ON txs.id = actions.tx_id
AND actions.action = 'provisionAddress' AND actions.action = 'provisionAddress'
LEFT OUTER JOIN customers c ON txs.customer_id = c.id LEFT OUTER JOIN customers c ON txs.customer_id = c.id
${DEVICE_NAME_JOINS}
WHERE c.id IN ($1^) WHERE c.id IN ($1^)
ORDER BY created DESC limit $2` ORDER BY created DESC limit $2`
return Promise.all([ return Promise.all([
@ -297,7 +302,7 @@ function getCustomerTransactionsBatch (ids) {
} }
function single (txId) { function single (txId) {
const packager = _.flow(_.compact, _.map(camelize), addNames) const packager = _.flow(_.compact, _.map(camelize))
const cashInSql = `SELECT 'cashIn' AS tx_class, txs.*, const cashInSql = `SELECT 'cashIn' AS tx_class, txs.*,
c.phone AS customer_phone, c.phone AS customer_phone,
@ -309,9 +314,11 @@ function single (txId) {
c.front_camera_path AS customer_front_camera_path, c.front_camera_path AS customer_front_camera_path,
c.id_card_photo_path AS customer_id_card_photo_path, c.id_card_photo_path AS customer_id_card_photo_path,
((NOT txs.send_confirmed) AND (txs.created <= now() - interval $1)) AS expired, ((NOT txs.send_confirmed) AND (txs.created <= now() - interval $1)) AS expired,
tb.error_message AS batch_error tb.error_message AS batch_error,
${DEVICE_NAME_QUERY}
FROM cash_in_txs AS txs FROM cash_in_txs AS txs
LEFT OUTER JOIN customers c ON txs.customer_id = c.id LEFT OUTER JOIN customers c ON txs.customer_id = c.id
${DEVICE_NAME_JOINS}
LEFT OUTER JOIN transaction_batches tb ON txs.batch_id = tb.id LEFT OUTER JOIN transaction_batches tb ON txs.batch_id = tb.id
WHERE id=$2` WHERE id=$2`
@ -325,13 +332,14 @@ function single (txId) {
c.id_card_data AS customer_id_card_data, c.id_card_data AS customer_id_card_data,
c.name AS customer_name, c.name AS customer_name,
c.front_camera_path AS customer_front_camera_path, c.front_camera_path AS customer_front_camera_path,
c.id_card_photo_path AS customer_id_card_photo_path, c.id_card_photo_path AS customer_id_card_photo_path,
(NOT txs.dispense AND extract(epoch FROM (now() - greatest(txs.created, txs.confirmed_at))) >= $2) AS expired (NOT txs.dispense AND extract(epoch FROM (now() - greatest(txs.created, txs.confirmed_at))) >= $2) AS expired,
${DEVICE_NAME_QUERY}
FROM cash_out_txs txs FROM cash_out_txs txs
INNER JOIN cash_out_actions actions ON txs.id = actions.tx_id INNER JOIN cash_out_actions actions ON txs.id = actions.tx_id
AND actions.action = 'provisionAddress' AND actions.action = 'provisionAddress'
LEFT OUTER JOIN customers c ON txs.customer_id = c.id LEFT OUTER JOIN customers c ON txs.customer_id = c.id
${DEVICE_NAME_JOINS}
WHERE id=$1` WHERE id=$1`
return Promise.all([ return Promise.all([

View file

@ -1,25 +1,15 @@
const db = require('./db') const db = require('./db')
const migrateTools = require('./migrate-tools')
// This migration was updated on v10.2
// it's from before 7.5 and we update one major version at a time
// Data migration was removed, keeping only the schema update
exports.up = function (next) { exports.up = function (next) {
return migrateTools.migrateNames() const sql = [
.then(updateSql => { 'alter table devices add column name text',
const sql = [ 'alter table devices alter column name set not null'
'alter table devices add column name text', ]
updateSql,
'alter table devices alter column name set not null'
]
return db.multi(sql, next) return db.multi(sql, next)
})
.catch(() => {
const sql = [
'alter table devices add column name text',
'alter table devices alter column name set not null'
]
return db.multi(sql, next)
})
} }
exports.down = function (next) { exports.down = function (next) {

View file

@ -1,34 +1,9 @@
const db = require('./db') // This migration was actually a config update
const machineLoader = require('../lib/machine-loader') // it's from before 7.5 and we update one major version at a time
const { migrationSaveConfig, saveAccounts, loadLatest } = require('../lib/new-settings-loader') // v10.2 is good enough to deprecate it
const { migrate } = require('../lib/config-migration') // file still has to exist so that the migration tool doesn't throw an error
const _ = require('lodash/fp')
const OLD_SETTINGS_LOADER_SCHEMA_VERSION = 1
module.exports.up = function (next) { module.exports.up = function (next) {
function migrateConfig (settings) { next()
const newSettings = migrate(settings.config, settings.accounts)
return Promise.all([
migrationSaveConfig(newSettings.config),
saveAccounts(newSettings.accounts)
])
.then(() => next())
}
loadLatest(OLD_SETTINGS_LOADER_SCHEMA_VERSION)
.then(settings => _.isEmpty(settings.config)
? next()
: migrateConfig(settings)
)
.catch(err => {
if (err.message === 'lamassu-server is not configured') {
return next()
}
console.log(err.message)
return next(err)
})
} }
module.exports.down = function (next) { module.exports.down = function (next) {

View file

@ -1,16 +0,0 @@
const pgp = require('pg-promise')()
const _ = require('lodash/fp')
const settingsLoader = require('../lib/admin/settings-loader')
const machineLoader = require('../lib/machine-loader')
module.exports = {migrateNames}
function migrateNames () {
const cs = new pgp.helpers.ColumnSet(['?device_id', 'name'], {table: 'devices'})
return settingsLoader.loadLatestConfig(false)
.then(config => machineLoader.getMachineNames(config))
.then(_.map(r => ({device_id: r.deviceId, name: r.name})))
.then(data => pgp.helpers.update(data, cs) + ' WHERE t.device_id=v.device_id')
}

View file

@ -18,7 +18,6 @@
"apollo-link-http": "^1.5.17", "apollo-link-http": "^1.5.17",
"apollo-upload-client": "^13.0.0", "apollo-upload-client": "^13.0.0",
"axios": "0.21.1", "axios": "0.21.1",
"base-64": "^1.0.0",
"bignumber.js": "9.0.0", "bignumber.js": "9.0.0",
"classnames": "2.2.6", "classnames": "2.2.6",
"countries-and-timezones": "^2.4.0", "countries-and-timezones": "^2.4.0",

View file

@ -38,10 +38,10 @@ const SearchBox = memo(
classes={{ option: classes.autocomplete }} classes={{ option: classes.autocomplete }}
value={filters} value={filters}
options={options} options={options}
getOptionLabel={it => it.value} getOptionLabel={it => it.label || it.value}
renderOption={it => ( renderOption={it => (
<div className={classes.item}> <div className={classes.item}>
<P className={classes.itemLabel}>{it.value}</P> <P className={classes.itemLabel}>{it.label || it.value}</P>
<P className={classes.itemType}>{it.type}</P> <P className={classes.itemType}>{it.type}</P>
</div> </div>
)} )}

View file

@ -32,7 +32,7 @@ const SearchFilter = ({
<Chip <Chip
key={idx} key={idx}
classes={chipClasses} classes={chipClasses}
label={`${onlyFirstToUpper(f.type)}: ${f.value}`} label={`${onlyFirstToUpper(f.type)}: ${f.label || f.value}`}
onDelete={() => onFilterDelete(f)} onDelete={() => onFilterDelete(f)}
deleteIcon={<CloseIcon className={classes.button} />} deleteIcon={<CloseIcon className={classes.button} />}
/> />

View file

@ -52,7 +52,6 @@ const MACHINE_LOGS = gql`
` `
const createCsv = async ({ machineLogsCsv }) => { const createCsv = async ({ machineLogsCsv }) => {
console.log(machineLogsCsv)
const machineLogs = new Blob([machineLogsCsv], { const machineLogs = new Blob([machineLogsCsv], {
type: 'text/plain;charset=utf-8' type: 'text/plain;charset=utf-8'
}) })

View file

@ -20,7 +20,7 @@ const useStyles = makeStyles(mainStyles)
const NUM_LOG_RESULTS = 5 const NUM_LOG_RESULTS = 5
const GET_TRANSACTIONS = gql` const GET_TRANSACTIONS = gql`
query transactions($limit: Int, $from: Date, $until: Date, $deviceId: ID) { query transactions($limit: Int, $from: Date, $until: Date, $deviceId: String) {
transactions( transactions(
limit: $limit limit: $limit
from: $from from: $from

View file

@ -61,6 +61,7 @@ const GET_TRANSACTION_FILTERS = gql`
transactionFilters { transactionFilters {
type type
value value
label
} }
} }
` `
@ -71,7 +72,7 @@ const GET_TRANSACTIONS = gql`
$from: Date $from: Date
$until: Date $until: Date
$txClass: String $txClass: String
$machineName: String $deviceId: String
$customerName: String $customerName: String
$fiatCode: String $fiatCode: String
$cryptoCode: String $cryptoCode: String
@ -84,7 +85,7 @@ const GET_TRANSACTIONS = gql`
from: $from from: $from
until: $until until: $until
txClass: $txClass txClass: $txClass
machineName: $machineName deviceId: $deviceId
customerName: $customerName customerName: $customerName
fiatCode: $fiatCode fiatCode: $fiatCode
cryptoCode: $cryptoCode cryptoCode: $cryptoCode
@ -265,13 +266,13 @@ const Transactions = () => {
setVariables({ setVariables({
limit: NUM_LOG_RESULTS, limit: NUM_LOG_RESULTS,
txClass: filtersObject.type, txClass: filtersObject.type,
machineName: filtersObject.machine, deviceId: filtersObject.machine,
customerName: filtersObject.customer, customerName: filtersObject.customer,
fiatCode: filtersObject.fiat, fiatCode: filtersObject.fiat,
cryptoCode: filtersObject.crypto, cryptoCode: filtersObject.crypto,
toAddress: filtersObject.address, toAddress: filtersObject.address,
status: filtersObject.status, status: filtersObject.status,
swept: filtersObject.swept === 'Swept' swept: filtersObject.swept && filtersObject.swept === 'Swept'
}) })
refetch && refetch() refetch && refetch()
@ -289,13 +290,13 @@ const Transactions = () => {
setVariables({ setVariables({
limit: NUM_LOG_RESULTS, limit: NUM_LOG_RESULTS,
txClass: filtersObject.type, txClass: filtersObject.type,
machineName: filtersObject.machine, deviceId: filtersObject.machine,
customerName: filtersObject.customer, customerName: filtersObject.customer,
fiatCode: filtersObject.fiat, fiatCode: filtersObject.fiat,
cryptoCode: filtersObject.crypto, cryptoCode: filtersObject.crypto,
toAddress: filtersObject.address, toAddress: filtersObject.address,
status: filtersObject.status, status: filtersObject.status,
swept: filtersObject.swept === 'Swept' swept: filtersObject.swept && filtersObject.swept === 'Swept'
}) })
refetch && refetch() refetch && refetch()
@ -308,13 +309,13 @@ const Transactions = () => {
setVariables({ setVariables({
limit: NUM_LOG_RESULTS, limit: NUM_LOG_RESULTS,
txClass: filtersObject.type, txClass: filtersObject.type,
machineName: filtersObject.machine, deviceId: filtersObject.machine,
customerName: filtersObject.customer, customerName: filtersObject.customer,
fiatCode: filtersObject.fiat, fiatCode: filtersObject.fiat,
cryptoCode: filtersObject.crypto, cryptoCode: filtersObject.crypto,
toAddress: filtersObject.address, toAddress: filtersObject.address,
status: filtersObject.status, status: filtersObject.status,
swept: filtersObject.swept === 'Swept' swept: filtersObject.swept && filtersObject.swept === 'Swept'
}) })
refetch && refetch() refetch && refetch()

View file

@ -25,7 +25,6 @@
"apollo-server-express": "2.25.1", "apollo-server-express": "2.25.1",
"argon2": "0.28.2", "argon2": "0.28.2",
"axios": "0.21.1", "axios": "0.21.1",
"base-64": "^1.0.0",
"base-x": "3.0.9", "base-x": "3.0.9",
"base64url": "^3.0.1", "base64url": "^3.0.1",
"bchaddrjs": "^0.3.0", "bchaddrjs": "^0.3.0",