Merge branch 'dev' into feat-dockerize-l-s

This commit is contained in:
Rafael Taranto 2022-04-28 17:14:30 +01:00 committed by GitHub
commit a1f2485b19
54 changed files with 819 additions and 272 deletions

View file

@ -1,7 +1,7 @@
#!/usr/bin/env node #!/usr/bin/env node
const path = require('path') const path = require('path')
require('dotenv').config({ path: path.resolve(__dirname, '../.env') }) require('dotenv').config({ path: process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env') })
var pgp = require('pg-promise')() var pgp = require('pg-promise')()

View file

@ -3,7 +3,7 @@
'use strict' 'use strict'
const path = require('path') const path = require('path')
require('dotenv').config({ path: path.resolve(__dirname, '../.env') }) require('dotenv').config({ path: process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env') })
const setEnvVariable = require('../tools/set-env-var') const setEnvVariable = require('../tools/set-env-var')

View file

@ -1,7 +1,7 @@
#!/usr/bin/env node #!/usr/bin/env node
const path = require('path') const path = require('path')
require('dotenv').config({ path: path.resolve(__dirname, '../.env') }) require('dotenv').config({ path: process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env') })
const hdkey = require('ethereumjs-wallet/hdkey') const hdkey = require('ethereumjs-wallet/hdkey')
const hkdf = require('futoin-hkdf') const hkdf = require('futoin-hkdf')
const db = require('../lib/db') const db = require('../lib/db')

View file

@ -3,6 +3,10 @@ const _ = require('lodash/fp')
const path = require('path') const path = require('path')
require('dotenv').config({ path: path.resolve(__dirname, '../.env') }) require('dotenv').config({ path: path.resolve(__dirname, '../.env') })
const _ = require('lodash/fp')
const path = require('path')
require('dotenv').config({ path: process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env') })
const db = require('../lib/db') const db = require('../lib/db')
const migrate = require('../lib/migrate') const migrate = require('../lib/migrate')
const { asyncLocalStorage, defaultStore } = require('../lib/async-storage') const { asyncLocalStorage, defaultStore } = require('../lib/async-storage')

View file

@ -2,7 +2,7 @@
const fs = require('fs') const fs = require('fs')
const path = require('path') const path = require('path')
require('dotenv').config({ path: path.resolve(__dirname, '../.env') }) require('dotenv').config({ path: process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env') })
const MNEMONIC_PATH = process.env.MNEMONIC_PATH const MNEMONIC_PATH = process.env.MNEMONIC_PATH

View file

@ -5,7 +5,7 @@
const setEnvVariable = require('../tools/set-env-var') const setEnvVariable = require('../tools/set-env-var')
const path = require('path') const path = require('path')
require('dotenv').config({ path: path.resolve(__dirname, '../.env') }) require('dotenv').config({ path: process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env') })
if (!process.env.OFAC_SOURCES_NAMES && !process.env.OFAC_SOURCES_URLS) { if (!process.env.OFAC_SOURCES_NAMES && !process.env.OFAC_SOURCES_URLS) {
setEnvVariable('OFAC_SOURCES_NAMES', 'sdn_advanced,cons_advanced') setEnvVariable('OFAC_SOURCES_NAMES', 'sdn_advanced,cons_advanced')

View file

@ -4,7 +4,7 @@ const fs = require('fs')
const hkdf = require('futoin-hkdf') const hkdf = require('futoin-hkdf')
const path = require('path') const path = require('path')
require('dotenv').config({ path: path.resolve(__dirname, '../.env') }) require('dotenv').config({ path: process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env') })
const mnemonicHelpers = require('../lib/mnemonic-helpers') const mnemonicHelpers = require('../lib/mnemonic-helpers')

View file

@ -1,14 +1,14 @@
#!/usr/bin/env node #!/usr/bin/env node
const path = require('path') const path = require('path')
require('dotenv').config({ path: path.resolve(__dirname, '../.env') }) require('dotenv').config({ path: process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env') })
const { asyncLocalStorage, defaultStore } = require('../lib/async-storage') const { asyncLocalStorage, defaultStore } = require('../lib/async-storage')
const userManagement = require('../lib/new-admin/graphql/modules/userManagement') const userManagement = require('../lib/new-admin/graphql/modules/userManagement')
const authErrors = require('../lib/new-admin/graphql/errors/authentication') const authErrors = require('../lib/new-admin/graphql/errors/authentication')
const name = process.argv[2] const name = process.argv[2]
const role = process.argv[3] const role = process.argv[3]
const domain = process.env.LAMASSU_ADMIN_SERVER_IP || process.env.HOSTNAME const domain = process.env.HOSTNAME
if (!domain) { if (!domain) {
console.error('No hostname configured in the environment') console.error('No hostname configured in the environment')

View file

@ -9,7 +9,7 @@ const mnemonicHelpers = require('../lib/mnemonic-helpers')
const setEnvVariable = require('../tools/set-env-var') const setEnvVariable = require('../tools/set-env-var')
const path = require('path') const path = require('path')
require('dotenv').config({ path: path.resolve(__dirname, '../.env') }) require('dotenv').config({ path: process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env') })
if (!process.env.MNEMONIC_PATH && process.env.SEED_PATH) { if (!process.env.MNEMONIC_PATH && process.env.SEED_PATH) {
const seed = fs.readFileSync(process.env.SEED_PATH, 'utf8').trim() const seed = fs.readFileSync(process.env.SEED_PATH, 'utf8').trim()

View file

@ -5,7 +5,7 @@
const pgp = require('pg-promise')() const pgp = require('pg-promise')()
const path = require('path') const path = require('path')
require('dotenv').config({ path: path.resolve(__dirname, '../.env') }) require('dotenv').config({ path: process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env') })
const { PSQL_URL } = require('../lib/constants') const { PSQL_URL } = require('../lib/constants')

View file

@ -1,7 +1,7 @@
#!/usr/bin/env node #!/usr/bin/env node
const path = require('path') const path = require('path')
require('dotenv').config({ path: path.resolve(__dirname, '../.env') }) require('dotenv').config({ path: process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env') })
const login = require('../lib/admin/login') const login = require('../lib/admin/login')

View file

@ -8,7 +8,7 @@ const os = require('os')
const bip39 = require('bip39') const bip39 = require('bip39')
const setEnvVariable = require('../tools/set-env-var') const setEnvVariable = require('../tools/set-env-var')
require('dotenv').config({ path: path.resolve(__dirname, '../.env') }) require('dotenv').config({ path: process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env') })
if (process.env.MNEMONIC_PATH && !process.env.SEED_PATH) { if (process.env.MNEMONIC_PATH && !process.env.SEED_PATH) {
const mnemonic = fs.readFileSync(process.env.MNEMONIC_PATH, 'utf8') const mnemonic = fs.readFileSync(process.env.MNEMONIC_PATH, 'utf8')

View file

@ -4,7 +4,7 @@ const http = require('http')
const https = require('https') const https = require('https')
const argv = require('minimist')(process.argv.slice(2)) const argv = require('minimist')(process.argv.slice(2))
require('dotenv').config({ path: path.resolve(__dirname, '../.env') }) require('dotenv').config({ path: process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env') })
const { asyncLocalStorage, defaultStore } = require('./async-storage') const { asyncLocalStorage, defaultStore } = require('./async-storage')
const routes = require('./routes') const routes = require('./routes')
@ -82,8 +82,6 @@ function startServer (settings) {
: https.createServer(httpsServerOptions, routes.app) : https.createServer(httpsServerOptions, routes.app)
const port = argv.port || 3000 const port = argv.port || 3000
const localPort = 3030
const localServer = http.createServer(routes.localApp)
if (devMode) logger.info('In dev mode') if (devMode) logger.info('In dev mode')
@ -91,10 +89,6 @@ function startServer (settings) {
logger.info('lamassu-server listening on port ' + logger.info('lamassu-server listening on port ' +
port + ' ' + (devMode ? '(http)' : '(https)')) port + ' ' + (devMode ? '(http)' : '(https)'))
}) })
localServer.listen(localPort, 'localhost', () => {
logger.info('lamassu-server listening on local port ' + localPort)
})
}) })
} }

View file

@ -31,14 +31,14 @@ function updateCore (coinRec, isCurrentlyRunning) {
common.logger.info(`changetype already defined, skipping...`) common.logger.info(`changetype already defined, skipping...`)
} else { } else {
common.logger.info(`Enabling bech32 change addresses in config file..`) common.logger.info(`Enabling bech32 change addresses in config file..`)
common.es(`echo -e "\nchangetype=bech32" >> /mnt/blockchains/bitcoin/bitcoin.conf`) common.es(`echo "\nchangetype=bech32" >> /mnt/blockchains/bitcoin/bitcoin.conf`)
} }
if (common.es(`grep "listenonion=" /mnt/blockchains/bitcoin/bitcoin.conf || true`)) { if (common.es(`grep "listenonion=" /mnt/blockchains/bitcoin/bitcoin.conf || true`)) {
common.logger.info(`listenonion already defined, skipping...`) common.logger.info(`listenonion already defined, skipping...`)
} else { } else {
common.logger.info(`Setting 'listenonion=0' in config file...`) common.logger.info(`Setting 'listenonion=0' in config file...`)
common.es(`echo -e "\nlistenonion=0" >> /mnt/blockchains/bitcoin/bitcoin.conf`) common.es(`echo "\nlistenonion=0" >> /mnt/blockchains/bitcoin/bitcoin.conf`)
} }
if (isCurrentlyRunning) { if (isCurrentlyRunning) {
@ -63,5 +63,6 @@ changetype=bech32
walletrbf=1 walletrbf=1
bind=0.0.0.0:8332 bind=0.0.0.0:8332
rpcport=8333 rpcport=8333
listenonion=0` listenonion=0
`
} }

View file

@ -46,5 +46,6 @@ keypool=10000
prune=4000 prune=4000
daemon=0 daemon=0
bind=0.0.0.0:8335 bind=0.0.0.0:8335
rpcport=8336` rpcport=8336
`
} }

View file

@ -23,18 +23,16 @@ module.exports = {
const BINARIES = { const BINARIES = {
BTC: { BTC: {
defaultUrl: 'https://bitcoincore.org/bin/bitcoin-core-0.20.1/bitcoin-0.20.1-x86_64-linux-gnu.tar.gz',
defaultDir: 'bitcoin-0.20.1/bin',
url: 'https://bitcoincore.org/bin/bitcoin-core-22.0/bitcoin-22.0-x86_64-linux-gnu.tar.gz', url: 'https://bitcoincore.org/bin/bitcoin-core-22.0/bitcoin-22.0-x86_64-linux-gnu.tar.gz',
dir: 'bitcoin-22.0/bin' dir: 'bitcoin-22.0/bin'
}, },
ETH: { ETH: {
url: 'https://gethstore.blob.core.windows.net/builds/geth-linux-amd64-1.10.15-8be800ff.tar.gz', url: 'https://gethstore.blob.core.windows.net/builds/geth-linux-amd64-1.10.17-25c9b49f.tar.gz',
dir: 'geth-linux-amd64-1.10.15-8be800ff' dir: 'geth-linux-amd64-1.10.17-25c9b49f'
}, },
ZEC: { ZEC: {
url: 'https://z.cash/downloads/zcash-4.6.0-1-linux64-debian-stretch.tar.gz', url: 'https://z.cash/downloads/zcash-4.6.0-2-linux64-debian-bullseye.tar.gz',
dir: 'zcash-4.6.0-1/bin' dir: 'zcash-4.6.0-2/bin'
}, },
DASH: { DASH: {
url: 'https://github.com/dashpay/dash/releases/download/v0.17.0.3/dashcore-0.17.0.3-x86_64-linux-gnu.tar.gz', url: 'https://github.com/dashpay/dash/releases/download/v0.17.0.3/dashcore-0.17.0.3-x86_64-linux-gnu.tar.gz',
@ -56,7 +54,7 @@ const BINARIES = {
} }
} }
const coinsUpdateDependent = ['BTC'] const coinsUpdateDependent = []
function firewall (ports) { function firewall (ports) {
if (!ports || ports.length === 0) throw new Error('No ports supplied') if (!ports || ports.length === 0) throw new Error('No ports supplied')

View file

@ -34,7 +34,7 @@ function updateCore (coinRec, isCurrentlyRunning) {
common.logger.info(`enablecoinjoin already defined, skipping...`) common.logger.info(`enablecoinjoin already defined, skipping...`)
} else { } else {
common.logger.info(`Enabling CoinJoin in config file...`) common.logger.info(`Enabling CoinJoin in config file...`)
common.es(`echo -e "\nenablecoinjoin=1" >> /mnt/blockchains/dash/dash.conf`) common.es(`echo "\nenablecoinjoin=1" >> /mnt/blockchains/dash/dash.conf`)
} }
if (common.es(`grep "privatesendautostart=" /mnt/blockchains/dash/dash.conf || true`)) { if (common.es(`grep "privatesendautostart=" /mnt/blockchains/dash/dash.conf || true`)) {
@ -44,14 +44,14 @@ function updateCore (coinRec, isCurrentlyRunning) {
common.logger.info(`coinjoinautostart already defined, skipping...`) common.logger.info(`coinjoinautostart already defined, skipping...`)
} else { } else {
common.logger.info(`Enabling CoinJoin AutoStart in config file...`) common.logger.info(`Enabling CoinJoin AutoStart in config file...`)
common.es(`echo -e "\ncoinjoinautostart=1" >> /mnt/blockchains/dash/dash.conf`) common.es(`echo "\ncoinjoinautostart=1" >> /mnt/blockchains/dash/dash.conf`)
} }
if (common.es(`grep "litemode=" /mnt/blockchains/dash/dash.conf || true`)) { if (common.es(`grep "litemode=" /mnt/blockchains/dash/dash.conf || true`)) {
common.logger.info(`Switching from 'LiteMode' to 'DisableGovernance'...`) common.logger.info(`Switching from 'LiteMode' to 'DisableGovernance'...`)
common.es(`sed -i 's/litemode/disablegovernance/g' /mnt/blockchains/dash/dash.conf`) common.es(`sed -i 's/litemode/disablegovernance/g' /mnt/blockchains/dash/dash.conf`)
} else { } else {
common.es(`echo -e "\ndisablegovernance already defined, skipping..."`) common.es(`echo "\ndisablegovernance already defined, skipping..."`)
} }
if (isCurrentlyRunning) { if (isCurrentlyRunning) {
@ -71,5 +71,6 @@ disablegovernance=1
prune=4000 prune=4000
txindex=0 txindex=0
enablecoinjoin=1 enablecoinjoin=1
coinjoinautostart=1` coinjoinautostart=1
`
} }

View file

@ -31,7 +31,7 @@ function updateCore (coinRec, isCurrentlyRunning) {
common.logger.info(`changetype already defined, skipping...`) common.logger.info(`changetype already defined, skipping...`)
} else { } else {
common.logger.info(`Enabling bech32 change addresses in config file..`) common.logger.info(`Enabling bech32 change addresses in config file..`)
common.es(`echo -e "\nchangetype=bech32" >> /mnt/blockchains/litecoin/litecoin.conf`) common.es(`echo "\nchangetype=bech32" >> /mnt/blockchains/litecoin/litecoin.conf`)
} }
if (isCurrentlyRunning) { if (isCurrentlyRunning) {
@ -52,5 +52,6 @@ keypool=10000
prune=4000 prune=4000
daemon=0 daemon=0
addresstype=p2sh-segwit addresstype=p2sh-segwit
changetype=bech32` changetype=bech32
`
} }

View file

@ -49,5 +49,6 @@ addnode=mainnet.z.cash
rpcuser=lamassuserver rpcuser=lamassuserver
rpcpassword=${common.randomPass()} rpcpassword=${common.randomPass()}
dbcache=500 dbcache=500
keypool=10000` keypool=10000
`
} }

View file

@ -2,13 +2,14 @@ const _ = require('lodash/fp')
const pgp = require('pg-promise')() const pgp = require('pg-promise')()
const helper = require('./cash-out-helper') const helper = require('./cash-out-helper')
const { anonymousCustomer } = require('../constants')
const toDb = helper.toDb const toDb = helper.toDb
const toObj = helper.toObj const toObj = helper.toObj
const UPDATEABLE_FIELDS = ['txHash', 'txVersion', 'status', 'dispense', 'dispenseConfirmed', const UPDATEABLE_FIELDS = ['txHash', 'txVersion', 'status', 'dispense', 'dispenseConfirmed',
'notified', 'redeem', 'phone', 'error', 'swept', 'publishedAt', 'confirmedAt', 'errorCode', 'notified', 'redeem', 'phone', 'error', 'swept', 'publishedAt', 'confirmedAt', 'errorCode',
'receivedCryptoAtoms', 'walletScore' ] 'receivedCryptoAtoms', 'walletScore', 'customerId' ]
module.exports = {upsert, update, insert} module.exports = {upsert, update, insert}
@ -52,7 +53,15 @@ function diff (oldTx, newTx) {
// We never null out an existing field // We never null out an existing field
if (oldTx && _.isNil(newTx[fieldKey])) return if (oldTx && _.isNil(newTx[fieldKey])) return
updatedTx[fieldKey] = newTx[fieldKey] switch (fieldKey) {
case 'customerId':
if (oldTx.customerId === anonymousCustomer.uuid) {
return updatedTx['customerId'] = newTx['customerId']
}
return
default:
return updatedTx[fieldKey] = newTx[fieldKey]
}
}) })
return updatedTx return updatedTx

View file

@ -72,22 +72,25 @@ function getBillsByBatchId (id) {
function logFormatter (data) { function logFormatter (data) {
return _.map( return _.map(
it => { it => {
const bills = _.filter(
ite => !(_.isNil(ite) || _.isNil(ite.fiat_code) || _.isNil(ite.fiat) || _.isNaN(ite.fiat)),
it.bills
)
return { return {
id: it.id, id: it.id,
deviceId: it.deviceId, deviceId: it.deviceId,
created: it.created, created: it.created,
operationType: it.operationType, operationType: it.operationType,
performedBy: it.performedBy, billCount: _.size(bills),
billCount: _.size(it.bills),
fiatTotals: _.reduce( fiatTotals: _.reduce(
(acc, value) => { (acc, value) => {
acc[value.fiat_code] = (acc[value.fiat_code] || 0) + value.fiat acc[value.fiat_code] = (acc[value.fiat_code] || 0) + value.fiat
return acc return acc
}, },
{}, {},
it.bills bills
), ),
billsByDenomination: _.countBy(ite => `${ite.fiat} ${ite.fiat_code}`, it.bills) billsByDenomination: _.countBy(ite => `${ite.fiat} ${ite.fiat_code}`, bills)
} }
}, },
data data

276
lib/graphql/resolvers.js Normal file
View file

@ -0,0 +1,276 @@
const _ = require('lodash/fp')
const nmd = require('nano-markdown')
const { accounts: accountsConfig, countries, languages } = require('../new-admin/config')
const plugins = require('../plugins')
const configManager = require('../new-config-manager')
const { batchGetCustomInfoRequest, getCustomInfoRequests } = require('../new-admin/services/customInfoRequests')
const state = require('../middlewares/state')
const VERSION = require('../../package.json').version
const urlsToPing = [
`us.archive.ubuntu.com`,
`uk.archive.ubuntu.com`,
`za.archive.ubuntu.com`,
`cn.archive.ubuntu.com`
]
const speedtestFiles = [
{
url: 'https://github.com/lamassu/speed-test-assets/raw/main/python-defaults_2.7.18-3.tar.gz',
size: 44668
}
]
const addSmthInfo = (dstField, srcFields) => smth =>
smth && smth.active ? _.set(dstField, _.pick(srcFields, smth)) : _.identity
const addOperatorInfo = addSmthInfo(
'operatorInfo',
['name', 'phone', 'email', 'website', 'companyNumber']
)
const addReceiptInfo = addSmthInfo(
'receiptInfo',
[
'sms',
'operatorWebsite',
'operatorEmail',
'operatorPhone',
'companyNumber',
'machineLocation',
'customerNameOrPhoneNumber',
'exchangeRate',
'addressQRCode',
]
)
/* TODO: Simplify this. */
const buildTriggers = (allTriggers) => {
const normalTriggers = []
const customTriggers = _.filter(o => {
if (_.isEmpty(o.customInfoRequestId) || _.isNil(o.customInfoRequestId)) normalTriggers.push(o)
return !_.isNil(o.customInfoRequestId) && !_.isEmpty(o.customInfoRequestId)
}, allTriggers)
return _.flow(
_.map(_.get('customInfoRequestId')),
batchGetCustomInfoRequest
)(customTriggers)
.then(res => {
res.forEach((details, index) => {
// make sure we aren't attaching the details to the wrong trigger
if (customTriggers[index].customInfoRequestId !== details.id) return
customTriggers[index] = { ...customTriggers[index], customInfoRequest: details }
})
return [...normalTriggers, ...customTriggers]
})
}
const staticConfig = ({ currentConfigVersion, deviceId, deviceName, pq, settings, }) => {
const massageCoins = _.map(_.pick([
'batchable',
'cashInCommission',
'cashInFee',
'cashOutCommission',
'cryptoCode',
'cryptoNetwork',
'cryptoUnits',
'display',
'minimumTx'
]))
const staticConf = _.flow(
_.pick([
'areThereAvailablePromoCodes',
'coins',
'configVersion',
'timezone'
]),
_.update('coins', massageCoins),
_.set('serverVersion', VERSION),
)(pq)
return Promise.all([
!!configManager.getCompliance(settings.config).enablePaperWalletOnly,
configManager.getTriggersAutomation(getCustomInfoRequests(true), settings.config),
buildTriggers(configManager.getTriggers(settings.config)),
configManager.getWalletSettings('BTC', settings.config).layer2 !== 'no-layer2',
configManager.getLocale(deviceId, settings.config),
configManager.getOperatorInfo(settings.config),
configManager.getReceipt(settings.config),
!!configManager.getCashOut(deviceId, settings.config).active,
])
.then(([
enablePaperWalletOnly,
triggersAutomation,
triggers,
hasLightning,
localeInfo,
operatorInfo,
receiptInfo,
twoWayMode,
]) =>
(currentConfigVersion && currentConfigVersion >= staticConf.configVersion) ?
null :
_.flow(
_.assign({
enablePaperWalletOnly,
triggersAutomation,
triggers,
hasLightning,
localeInfo: {
country: localeInfo.country,
languages: localeInfo.languages,
fiatCode: localeInfo.fiatCurrency
},
machineInfo: { deviceId, deviceName },
twoWayMode,
speedtestFiles,
urlsToPing,
}),
_.update('triggersAutomation', _.mapValues(_.eq('Automatic'))),
addOperatorInfo(operatorInfo),
addReceiptInfo(receiptInfo)
)(staticConf))
}
const setZeroConfLimit = config => coin =>
_.set(
'zeroConfLimit',
configManager.getWalletSettings(coin.cryptoCode, config).zeroConfLimit,
coin
)
const dynamicConfig = ({ deviceId, operatorId, pid, pq, settings, }) => {
const massageCassettes = cassettes =>
cassettes ?
_.flow(
cassettes => _.set('physical', _.get('cassettes', cassettes), cassettes),
cassettes => _.set('virtual', _.get('virtualCassettes', cassettes), cassettes),
_.unset('cassettes'),
_.unset('virtualCassettes')
)(cassettes) :
null
state.pids = _.update(operatorId, _.set(deviceId, { pid, ts: Date.now() }), state.pids)
return _.flow(
_.pick(['balances', 'cassettes', 'coins', 'rates']),
_.update('cassettes', massageCassettes),
/* [{ cryptoCode, rates }, ...] => [[cryptoCode, rates], ...] */
_.update('coins', _.map(({ cryptoCode, rates }) => [cryptoCode, rates])),
/* [{ cryptoCode: balance }, ...] => [[cryptoCode, { balance }], ...] */
_.update('balances', _.flow(
_.toPairs,
_.map(([cryptoCode, balance]) => [cryptoCode, { balance }])
)),
/* Group the separate objects by cryptoCode */
/* { balances, coins, rates } => { cryptoCode: { balance, ask, bid, cashIn, cashOut }, ... } */
({ balances, cassettes, coins, rates }) => ({
cassettes,
coins: _.flow(
_.reduce(
(ret, [cryptoCode, obj]) => _.update(cryptoCode, _.assign(obj), ret),
rates
),
/* { cryptoCode: { balance, ask, bid, cashIn, cashOut }, ... } => [[cryptoCode, { balance, ask, bid, cashIn, cashOut }], ...] */
_.toPairs,
/* [[cryptoCode, { balance, ask, bid, cashIn, cashOut }], ...] => [{ cryptoCode, balance, ask, bid, cashIn, cashOut }, ...] */
_.map(([cryptoCode, obj]) => _.set('cryptoCode', cryptoCode, obj))
)(_.concat(balances, coins))
}),
_.update('coins', _.map(setZeroConfLimit(settings.config))),
_.set('reboot', !!pid && state.reboots?.[operatorId]?.[deviceId] === pid),
_.set('shutdown', !!pid && state.shutdowns?.[operatorId]?.[deviceId] === pid),
_.set('restartServices', !!pid && state.restartServicesMap?.[operatorId]?.[deviceId] === pid),
)(pq)
}
const configs = (parent, { currentConfigVersion }, { deviceId, deviceName, operatorId, pid, settings }, info) =>
plugins(settings, deviceId)
.pollQueries()
.then(pq => ({
static: staticConfig({
currentConfigVersion,
deviceId,
deviceName,
pq,
settings,
}),
dynamic: dynamicConfig({
deviceId,
operatorId,
pid,
pq,
settings,
}),
}))
const massageTerms = terms => (terms.active && terms.text) ? ({
delay: Boolean(terms.delay),
title: terms.title,
text: nmd(terms.text),
accept: terms.acceptButtonText,
cancel: terms.cancelButtonText,
}) : null
/*
* The type of the result of `configManager.getTermsConditions()` is more or
* less `Maybe (Maybe Hash, Maybe TC)`. Each case has a specific meaning to the
* machine:
*
* Nothing => Nothing
* There are no T&C or they've been removed/disabled.
*
* Just (Nothing, _) => Nothing
* Shouldn't happen! Treated as if there were no T&C.
*
* Just (Just hash, Nothing) => Nothing
* May happen (after `massageTerms`) if T&C are disabled.
*
* Just (Just hash, Just tc) => Just (hash, Just tc) or Just (hash, Nothing)
* `tc` is sent depending on whether the `hash` differs from `currentHash` or
* not.
*/
const terms = (parent, { currentConfigVersion, currentHash }, { deviceId, settings }, info) => {
const isNone = x => _.isNil(x) || _.isEmpty(x)
let latestTerms = configManager.getTermsConditions(settings.config)
if (isNone(latestTerms)) return null
const hash = latestTerms.hash
if (!_.isString(hash)) return null
latestTerms = massageTerms(latestTerms)
if (isNone(latestTerms)) return null
const isHashNew = hash !== currentHash
const text = isHashNew ? latestTerms.text : null
return plugins(settings, deviceId)
.fetchCurrentConfigVersion()
.catch(() => null)
.then(configVersion => isHashNew || _.isNil(currentConfigVersion) || currentConfigVersion < configVersion)
.then(isVersionNew => isVersionNew ? _.omit(['text'], latestTerms) : null)
.then(details => ({ hash, details, text }))
}
module.exports = {
Query: {
configs,
terms,
}
}

27
lib/graphql/server.js Normal file
View file

@ -0,0 +1,27 @@
const logger = require('../logger')
const https = require('https')
const { ApolloServer } = require('apollo-server-express')
const devMode = !!require('minimist')(process.argv.slice(2)).dev
module.exports = new ApolloServer({
typeDefs: require('./types'),
resolvers: require('./resolvers'),
context: ({ req, res }) => ({
deviceId: req.deviceId, /* lib/middlewares/populateDeviceId.js */
deviceName: req.deviceName, /* lib/middlewares/authorize.js */
operatorId: res.locals.operatorId, /* lib/middlewares/operatorId.js */
pid: req.query.pid,
settings: req.settings, /* lib/middlewares/populateSettings.js */
}),
uploads: false,
playground: false,
introspection: false,
formatError: error => {
logger.error(error)
return error
},
debug: devMode,
logger
})

155
lib/graphql/types.js Normal file
View file

@ -0,0 +1,155 @@
const { gql } = require('apollo-server-express')
module.exports = gql`
type Coin {
cryptoCode: String!
display: String!
minimumTx: String!
cashInFee: String!
cashInCommission: String!
cashOutCommission: String!
cryptoNetwork: Boolean!
cryptoUnits: String!
batchable: Boolean!
}
type LocaleInfo {
country: String!
fiatCode: String!
languages: [String!]!
}
type OperatorInfo {
name: String!
phone: String!
email: String!
website: String!
companyNumber: String!
}
type MachineInfo {
deviceId: String!
deviceName: String
}
type ReceiptInfo {
sms: Boolean!
operatorWebsite: Boolean!
operatorEmail: Boolean!
operatorPhone: Boolean!
companyNumber: Boolean!
machineLocation: Boolean!
customerNameOrPhoneNumber: Boolean!
exchangeRate: Boolean!
addressQRCode: Boolean!
}
type SpeedtestFile {
url: String!
size: Int!
}
# True if automatic, False otherwise
type TriggersAutomation {
sanctions: Boolean!
idCardPhoto: Boolean!
idCardData: Boolean!
facephoto: Boolean!
usSsn: Boolean!
}
type Trigger {
id: String!
customInfoRequestId: String!
direction: String!
requirement: String!
triggerType: String!
suspensionDays: Int
threshold: Int
thresholdDays: Int
}
type TermsDetails {
delay: Boolean!
title: String!
accept: String!
cancel: String!
}
type Terms {
hash: String!
text: String
details: TermsDetails
}
type StaticConfig {
configVersion: Int!
areThereAvailablePromoCodes: Boolean!
coins: [Coin!]!
enablePaperWalletOnly: Boolean!
hasLightning: Boolean!
serverVersion: String!
timezone: Int!
twoWayMode: Boolean!
localeInfo: LocaleInfo!
operatorInfo: OperatorInfo
machineInfo: MachineInfo!
receiptInfo: ReceiptInfo
speedtestFiles: [SpeedtestFile!]!
urlsToPing: [String!]!
triggersAutomation: TriggersAutomation!
triggers: [Trigger!]!
}
type DynamicCoinValues {
# NOTE: Doesn't seem to be used anywhere outside of lib/plugins.js.
# However, it can be used to generate the cache key, if we ever move to an
# actual caching mechanism.
#timestamp: String!
cryptoCode: String!
balance: String!
# Raw rates
ask: String!
bid: String!
# Rates with commissions applied
cashIn: String!
cashOut: String!
zeroConfLimit: Int!
}
type PhysicalCassette {
denomination: Int!
count: Int!
}
type Cassettes {
physical: [PhysicalCassette!]!
virtual: [Int!]!
}
type DynamicConfig {
cassettes: Cassettes
coins: [DynamicCoinValues!]!
reboot: Boolean!
shutdown: Boolean!
restartServices: Boolean!
}
type Configs {
static: StaticConfig
dynamic: DynamicConfig!
}
type Query {
configs(currentConfigVersion: Int): Configs!
terms(currentHash: String, currentConfigVersion: Int): Terms
}
`

View file

@ -209,6 +209,7 @@ function setMachine (rec, operatorId) {
} }
function updateNetworkPerformance (deviceId, data) { function updateNetworkPerformance (deviceId, data) {
if (_.isEmpty(data)) return Promise.resolve(true)
const downloadSpeed = _.head(data) const downloadSpeed = _.head(data)
const dbData = { const dbData = {
device_id: deviceId, device_id: deviceId,
@ -224,6 +225,7 @@ function updateNetworkPerformance (deviceId, data) {
} }
function updateNetworkHeartbeat (deviceId, data) { function updateNetworkHeartbeat (deviceId, data) {
if (_.isEmpty(data)) return Promise.resolve(true)
const avgResponseTime = _.meanBy(e => _.toNumber(e.averageResponseTime), data) const avgResponseTime = _.meanBy(e => _.toNumber(e.averageResponseTime), data)
const avgPacketLoss = _.meanBy(e => _.toNumber(e.packetLoss), data) const avgPacketLoss = _.meanBy(e => _.toNumber(e.packetLoss), data)
const dbData = { const dbData = {

View file

@ -11,7 +11,6 @@ function sha256 (buf) {
} }
const populateDeviceId = function (req, res, next) { const populateDeviceId = function (req, res, next) {
logger.info(`DEBUG LOG - Method: ${req.method} Path: ${req.path}`)
const deviceId = _.isFunction(req.connection.getPeerCertificate) const deviceId = _.isFunction(req.connection.getPeerCertificate)
? sha256(req.connection.getPeerCertificate().raw) ? sha256(req.connection.getPeerCertificate().raw)
: null : null

View file

@ -0,0 +1,7 @@
const plugins = require('../plugins')
module.exports = (req, res, next) =>
plugins(req.settings, req.deviceId)
.recordPing(req.deviceTime, req.query.version, req.query.model)
.then(() => next())
.catch(() => next())

View file

@ -1,7 +1,9 @@
const _ = require('lodash/fp') const _ = require('lodash/fp')
const fs = require('fs') const fs = require('fs')
const os = require('os')
const makeDir = require('make-dir') const makeDir = require('make-dir')
const path = require('path') const path = require('path')
const cp = require('child_process')
const load = require('./options-loader') const load = require('./options-loader')
const logger = require('./logger') const logger = require('./logger')
@ -53,61 +55,13 @@ function updateOptionBasepath (result, optionName) {
} }
async function run () { async function run () {
// load defaults
const defaultOpts = require('../lamassu-default')
// load current opts // load current opts
const options = load() const options = load().opts
const currentOpts = options.opts const shouldMigrate = !fs.existsSync(process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env'))
// check if there are new options to add // write the resulting .env
let result = _.mergeAll([defaultOpts, currentOpts])
// get all the options
// that ends with "Path" suffix
logger.info(`Detected lamassu-server basepath: ${currentBasePath}`)
_.each(_.wrap(updateOptionBasepath, result),
[
'seedPath',
'caPath',
'certPath',
'keyPath',
'lamassuCaPath'
])
const shouldMigrate = !_.isEqual(result, currentOpts) || _.has('lamassuServerPath', result)
// write the resulting lamassu.json
if (shouldMigrate) { if (shouldMigrate) {
// remove old lamassuServerPath config const postgresPw = new RegExp(':(\\w*)@').exec(options.postgresql)[1]
result = _.omit('lamassuServerPath', result) cp.spawnSync('node', ['tools/build-prod-env.js', '--db-password', postgresPw, '--hostname', options.hostname], { cwd: currentBasePath, encoding: 'utf-8' })
// find keys for which values
// have been changed
const differentValue = _.wrap(_.filter, key => !_.isEqual(result[key], currentOpts[key]))
// output affected options
const newOpts = _.pick(_.union(
// find change keys
differentValue(_.keys(result)),
// find new opts
_.difference(_.keys(result), _.keys(currentOpts))
), result)
logger.info('Updating options', newOpts)
// store new lamassu.json file
fs.writeFileSync(options.path, JSON.stringify(result, null, ' '))
} }
// get all the new options
// that ends with "Dir" suffix
mapKeyValuesDeep((v, k) => {
if (_.endsWith('Dir', k)) {
const path = _.attempt(() => makeDir.sync(v))
if (_.isError(path)) {
logger.error(`while creating folder ${v}`, path)
}
}
}, result)
} }

View file

@ -12,7 +12,7 @@ const { graphqlUploadExpress } = require('graphql-upload')
const { ApolloServer } = require('apollo-server-express') const { ApolloServer } = require('apollo-server-express')
const _ = require('lodash/fp') const _ = require('lodash/fp')
require('dotenv').config({ path: path.resolve(__dirname, '../../.env') }) require('dotenv').config({ path: process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env') })
const { asyncLocalStorage, defaultStore } = require('../async-storage') const { asyncLocalStorage, defaultStore } = require('../async-storage')
const logger = require('../logger') const logger = require('../logger')

View file

@ -2,7 +2,7 @@ const express = require('express')
const path = require('path') const path = require('path')
const { ApolloServer } = require('apollo-server-express') const { ApolloServer } = require('apollo-server-express')
require('dotenv').config({ path: path.resolve(__dirname, '../../.env') }) require('dotenv').config({ path: process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env') })
const { typeDefs, resolvers } = require('./graphql/schema') const { typeDefs, resolvers } = require('./graphql/schema')
const logger = require('../logger') const logger = require('../logger')

View file

@ -1,5 +1,4 @@
const _ = require('lodash/fp') const _ = require('lodash/fp')
const { getCustomInfoRequests } = require('./new-admin/services/customInfoRequests')
const namespaces = { const namespaces = {
ADVANCED: 'advanced', ADVANCED: 'advanced',
@ -21,6 +20,7 @@ const filter = namespace => _.pickBy((value, key) => _.startsWith(`${namespace}_
const strip = key => _.mapKeys(stripl(`${key}_`)) const strip = key => _.mapKeys(stripl(`${key}_`))
const fromNamespace = _.curry((key, config) => _.compose(strip(key), filter(key))(config)) const fromNamespace = _.curry((key, config) => _.compose(strip(key), filter(key))(config))
const toNamespace = _.curry((ns, config) => _.mapKeys(key => `${ns}_${key}`, config))
const getCommissions = (cryptoCode, deviceId, config) => { const getCommissions = (cryptoCode, deviceId, config) => {
const commissions = fromNamespace(namespaces.COMMISSIONS)(config) const commissions = fromNamespace(namespaces.COMMISSIONS)(config)
@ -48,7 +48,6 @@ const getCommissions = (cryptoCode, deviceId, config) => {
const getLocale = (deviceId, it) => { const getLocale = (deviceId, it) => {
const locale = fromNamespace(namespaces.LOCALE)(it) const locale = fromNamespace(namespaces.LOCALE)(it)
const filter = _.matches({ machine: deviceId }) const filter = _.matches({ machine: deviceId })
return _.omit('overrides', _.assignAll([locale, ..._.filter(filter)(locale.overrides)])) return _.omit('overrides', _.assignAll([locale, ..._.filter(filter)(locale.overrides)]))
} }
@ -117,8 +116,9 @@ const getGlobalNotifications = config => getNotifications(null, null, config)
const getTriggers = _.get('triggers') const getTriggers = _.get('triggers')
const getTriggersAutomation = config => { /* `customInfoRequests` is the result of a call to `getCustomInfoRequests` */
return getCustomInfoRequests(true) const getTriggersAutomation = (customInfoRequests, config) => {
return customInfoRequests
.then(infoRequests => { .then(infoRequests => {
const defaultAutomation = _.get('triggersConfig_automation')(config) const defaultAutomation = _.get('triggersConfig_automation')(config)
const requirements = { const requirements = {
@ -155,6 +155,8 @@ const getCryptoUnits = (crypto, config) => {
return getWalletSettings(crypto, config).cryptoUnits return getWalletSettings(crypto, config).cryptoUnits
} }
const setTermsConditions = toNamespace(namespaces.TERMS_CONDITIONS)
module.exports = { module.exports = {
getWalletSettings, getWalletSettings,
getCashInSettings, getCashInSettings,
@ -174,5 +176,6 @@ module.exports = {
getGlobalCashOut, getGlobalCashOut,
getCashOut, getCashOut,
getCryptosFromWalletNamespace, getCryptosFromWalletNamespace,
getCryptoUnits getCryptoUnits,
setTermsConditions,
} }

View file

@ -1,8 +1,11 @@
const crypto = require('crypto')
const _ = require('lodash/fp') const _ = require('lodash/fp')
const db = require('./db') const db = require('./db')
const migration = require('./config-migration') const migration = require('./config-migration')
const { asyncLocalStorage } = require('./async-storage') const { asyncLocalStorage } = require('./async-storage')
const { getOperatorId } = require('./operator') const { getOperatorId } = require('./operator')
const { getTermsConditions, setTermsConditions } = require('./new-config-manager')
const OLD_SETTINGS_LOADER_SCHEMA_VERSION = 1 const OLD_SETTINGS_LOADER_SCHEMA_VERSION = 1
const NEW_SETTINGS_LOADER_SCHEMA_VERSION = 2 const NEW_SETTINGS_LOADER_SCHEMA_VERSION = 2
@ -23,6 +26,29 @@ const SECRET_FIELDS = [
'twilio.authToken' 'twilio.authToken'
] ]
/*
* JSON.stringify isn't necessarily deterministic so this function may compute
* different hashes for the same object.
*/
const md5hash = text =>
crypto
.createHash('MD5')
.update(text)
.digest('hex')
const addTermsHash = configs => {
const terms = _.omit(['hash'], getTermsConditions(configs))
return _.isEmpty(terms) ?
configs :
_.flow(
_.get('text'),
md5hash,
hash => _.set('hash', hash, terms),
setTermsConditions,
_.assign(configs),
)(terms)
}
const accountsSql = `update user_config set data = $2, valid = $3, schema_version = $4 where type = $1; const accountsSql = `update user_config set data = $2, valid = $3, schema_version = $4 where type = $1;
insert into user_config (type, data, valid, schema_version) insert into user_config (type, data, valid, schema_version)
select $1, $2, $3, $4 where $1 not in (select type from user_config)` select $1, $2, $3, $4 where $1 not in (select type from user_config)`
@ -74,7 +100,7 @@ const configSql = 'insert into user_config (type, data, valid, schema_version) v
function saveConfig (config) { function saveConfig (config) {
return Promise.all([loadLatestConfigOrNone(), getOperatorId('middleware')]) return Promise.all([loadLatestConfigOrNone(), getOperatorId('middleware')])
.then(([currentConfig, operatorId]) => { .then(([currentConfig, operatorId]) => {
const newConfig = _.assign(currentConfig, config) const newConfig = addTermsHash(_.assign(currentConfig, config))
return db.tx(t => { return db.tx(t => {
return t.none(configSql, ['config', { config: newConfig }, true, NEW_SETTINGS_LOADER_SCHEMA_VERSION]) return t.none(configSql, ['config', { config: newConfig }, true, NEW_SETTINGS_LOADER_SCHEMA_VERSION])
.then(() => t.none('NOTIFY $1:name, $2', ['reload', JSON.stringify({ schema: asyncLocalStorage.getStore().get('schema'), operatorId })])) .then(() => t.none('NOTIFY $1:name, $2', ['reload', JSON.stringify({ schema: asyncLocalStorage.getStore().get('schema'), operatorId })]))

View file

@ -4,7 +4,7 @@ const os = require('os')
const argv = require('minimist')(process.argv.slice(2)) const argv = require('minimist')(process.argv.slice(2))
const _ = require('lodash/fp') const _ = require('lodash/fp')
require('dotenv').config({ path: path.resolve(__dirname, '../.env') }) require('dotenv').config({ path: process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env') })
const DATABASE = process.env.LAMASSU_DB ?? 'PROD' const DATABASE = process.env.LAMASSU_DB ?? 'PROD'
const dbMapping = psqlConf => ({ const dbMapping = psqlConf => ({

View file

@ -25,6 +25,7 @@ const customers = require('./customers')
const commissionMath = require('./commission-math') const commissionMath = require('./commission-math')
const loyalty = require('./loyalty') const loyalty = require('./loyalty')
const transactionBatching = require('./tx-batching') const transactionBatching = require('./tx-batching')
const state = require('./middlewares/state')
const { CASSETTE_MAX_CAPACITY, CASH_OUT_DISPENSE_READY, CONFIRMATION_CODE } = require('./constants') const { CASSETTE_MAX_CAPACITY, CASH_OUT_DISPENSE_READY, CONFIRMATION_CODE } = require('./constants')
@ -39,7 +40,6 @@ const mapValuesWithKey = _.mapValues.convert({
const TRADE_TTL = 2 * T.minutes const TRADE_TTL = 2 * T.minutes
const STALE_TICKER = 3 * T.minutes const STALE_TICKER = 3 * T.minutes
const STALE_BALANCE = 3 * T.minutes const STALE_BALANCE = 3 * T.minutes
const PONG_TTL = '1 week'
const tradesQueues = {} const tradesQueues = {}
function plugins (settings, deviceId) { function plugins (settings, deviceId) {
@ -206,8 +206,7 @@ function plugins (settings, deviceId) {
} }
function mapCoinSettings (coinParams) { function mapCoinSettings (coinParams) {
const cryptoCode = coinParams[0] const [ cryptoCode, cryptoNetwork ] = coinParams
const cryptoNetwork = coinParams[1]
const commissions = configManager.getCommissions(cryptoCode, deviceId, settings.config) const commissions = configManager.getCommissions(cryptoCode, deviceId, settings.config)
const minimumTx = new BN(commissions.minimumTx) const minimumTx = new BN(commissions.minimumTx)
const cashInFee = new BN(commissions.fixedFee) const cashInFee = new BN(commissions.fixedFee)
@ -228,56 +227,57 @@ function plugins (settings, deviceId) {
} }
} }
function pollQueries (serialNumber, deviceTime, deviceRec, machineVersion, machineModel) { function pollQueries () {
const localeConfig = configManager.getLocale(deviceId, settings.config) const localeConfig = configManager.getLocale(deviceId, settings.config)
const fiatCode = localeConfig.fiatCurrency const fiatCode = localeConfig.fiatCurrency
const cryptoCodes = localeConfig.cryptoCurrencies const cryptoCodes = localeConfig.cryptoCurrencies
const timezone = millisecondsToMinutes(getTimezoneOffset(localeConfig.timezone))
const tickerPromises = cryptoCodes.map(c => ticker.getRates(settings, fiatCode, c)) const tickerPromises = cryptoCodes.map(c => ticker.getRates(settings, fiatCode, c))
const balancePromises = cryptoCodes.map(c => fiatBalance(fiatCode, c)) const balancePromises = cryptoCodes.map(c => fiatBalance(fiatCode, c))
const testnetPromises = cryptoCodes.map(c => wallet.cryptoNetwork(settings, c)) const networkPromises = cryptoCodes.map(c => wallet.cryptoNetwork(settings, c))
const pingPromise = recordPing(deviceTime, machineVersion, machineModel)
const currentConfigVersionPromise = fetchCurrentConfigVersion()
const currentAvailablePromoCodes = loyalty.getNumberOfAvailablePromoCodes()
const supportsBatchingPromise = cryptoCodes.map(c => wallet.supportsBatching(settings, c)) const supportsBatchingPromise = cryptoCodes.map(c => wallet.supportsBatching(settings, c))
const promises = [ return Promise.all([
buildAvailableCassettes(), buildAvailableCassettes(),
pingPromise, fetchCurrentConfigVersion(),
currentConfigVersionPromise, millisecondsToMinutes(getTimezoneOffset(localeConfig.timezone)),
timezone loyalty.getNumberOfAvailablePromoCodes(),
].concat( Promise.all(supportsBatchingPromise),
supportsBatchingPromise, Promise.all(tickerPromises),
tickerPromises, Promise.all(balancePromises),
balancePromises, Promise.all(networkPromises)
testnetPromises, ])
currentAvailablePromoCodes .then(([
) cassettes,
configVersion,
timezone,
numberOfAvailablePromoCodes,
batchableCoins,
tickers,
balances,
networks
]) => {
const coinsWithoutRate = _.flow(
_.zip(cryptoCodes),
_.map(mapCoinSettings)
)(networks)
return Promise.all(promises) const coins = _.flow(
.then(arr => { _.map(it => ({ batchable: it })),
const cassettes = arr[0] _.zipWith(
const configVersion = arr[2] _.assign,
const tz = arr[3] _.zipWith(_.assign, coinsWithoutRate, tickers)
const cryptoCodesCount = cryptoCodes.length )
const batchableCoinsRes = arr.slice(4, cryptoCodesCount + 4) )(batchableCoins)
const batchableCoins = batchableCoinsRes.map(it => ({ batchable: it }))
const tickers = arr.slice(cryptoCodesCount + 4, 2 * cryptoCodesCount + 4)
const balances = arr.slice(2 * cryptoCodesCount + 4, 3 * cryptoCodesCount + 4)
const testNets = arr.slice(3 * cryptoCodesCount + 4, arr.length - 1)
const coinParams = _.zip(cryptoCodes, testNets)
const coinsWithoutRate = _.map(mapCoinSettings, coinParams)
const areThereAvailablePromoCodes = arr[arr.length - 1] > 0
return { return {
cassettes, cassettes,
rates: buildRates(tickers), rates: buildRates(tickers),
balances: buildBalances(balances), balances: buildBalances(balances),
coins: _.zipWith(_.assign, _.zipWith(_.assign, coinsWithoutRate, tickers), batchableCoins), coins,
configVersion, configVersion,
areThereAvailablePromoCodes, areThereAvailablePromoCodes: numberOfAvailablePromoCodes > 0,
timezone: tz timezone
} }
}) })
} }
@ -365,12 +365,12 @@ function plugins (settings, deviceId) {
const rate = rawRate.div(cashInCommission) const rate = rawRate.div(cashInCommission)
const lowBalanceMargin = new BN(1.05) const lowBalanceMargin = new BN(0.95)
const cryptoRec = coinUtils.getCryptoCurrency(cryptoCode) const cryptoRec = coinUtils.getCryptoCurrency(cryptoCode)
const unitScale = cryptoRec.unitScale const unitScale = cryptoRec.unitScale
const shiftedRate = rate.shiftedBy(-unitScale) const shiftedRate = rate.shiftedBy(-unitScale)
const fiatTransferBalance = balance.times(shiftedRate).div(lowBalanceMargin) const fiatTransferBalance = balance.times(shiftedRate).times(lowBalanceMargin)
return { return {
timestamp: balanceRec.timestamp, timestamp: balanceRec.timestamp,
@ -850,6 +850,7 @@ function plugins (settings, deviceId) {
return { return {
getRates, getRates,
recordPing,
buildRates, buildRates,
getRawRates, getRawRates,
buildRatesNoCommission, buildRatesNoCommission,

View file

@ -39,11 +39,11 @@ function fetch (account = {}, method, params) {
return r.data.result return r.data.result
}) })
.catch(err => { .catch(err => {
throw new Error(_.join(' ', [ throw new Error(JSON.stringify({
'json-rpc::axios error:', responseMessage: _.get('message', err),
JSON.stringify(_.get('message', err, '')), message: _.get('response.data.error.message', err),
JSON.stringify(_.get('response.data.error', err, '')) code: _.get('response.data.error.code', err)
])) }))
}) })
} }

View file

@ -6,8 +6,18 @@ const { getRate } = require('../../../lib/forex')
const RETRIES = 2 const RETRIES = 2
const tickerObjects = {}
function ticker (fiatCode, cryptoCode, tickerName) { function ticker (fiatCode, cryptoCode, tickerName) {
const ticker = new ccxt[tickerName]({ timeout: 3000 }) if (!tickerObjects[tickerName]) {
tickerObjects[tickerName] = new ccxt[tickerName]({
timeout: 3000,
enableRateLimit: false,
})
}
const ticker = tickerObjects[tickerName]
if (verifyFiatSupport(fiatCode, tickerName)) { if (verifyFiatSupport(fiatCode, tickerName)) {
return getCurrencyRates(ticker, fiatCode, cryptoCode) return getCurrencyRates(ticker, fiatCode, cryptoCode)
} }

View file

@ -14,6 +14,16 @@ function fetch (method, params) {
return jsonRpc.fetch(rpcConfig, method, params) return jsonRpc.fetch(rpcConfig, method, params)
} }
function errorHandle (e) {
const err = JSON.parse(e.message)
switch (err.code) {
case -6:
throw new E.InsufficientFundsError()
default:
throw e
}
}
function checkCryptoCode (cryptoCode) { function checkCryptoCode (cryptoCode) {
if (cryptoCode !== 'BCH') return Promise.reject(new Error('Unsupported crypto: ' + cryptoCode)) if (cryptoCode !== 'BCH') return Promise.reject(new Error('Unsupported crypto: ' + cryptoCode))
return Promise.resolve() return Promise.resolve()
@ -50,10 +60,7 @@ function sendCoins (account, tx, settings, operatorId) {
txid: pickedObj.txid txid: pickedObj.txid
} }
}) })
.catch(err => { .catch(errorHandle)
if (err.code === -6) throw new E.InsufficientFundsError()
throw err
})
} }
function newAddress (account, info, tx, settings, operatorId) { function newAddress (account, info, tx, settings, operatorId) {

View file

@ -17,21 +17,55 @@ function fetch (method, params) {
return jsonRpc.fetch(rpcConfig, method, params) return jsonRpc.fetch(rpcConfig, method, params)
} }
function errorHandle (e) {
const err = JSON.parse(e.message)
switch (err.code) {
case -4:
return loadWallet()
case -5:
return logger.error(`${err}`)
case -6:
throw new E.InsufficientFundsError()
case -18:
return createWallet()
case -35:
// Wallet is already loaded, just return
return
default:
throw e
}
}
function checkCryptoCode (cryptoCode) { function checkCryptoCode (cryptoCode) {
if (cryptoCode !== 'BTC') return Promise.reject(new Error('Unsupported crypto: ' + cryptoCode)) if (cryptoCode !== 'BTC') return Promise.reject(new Error('Unsupported crypto: ' + cryptoCode))
return Promise.resolve() return Promise.resolve().then(loadWallet)
}
function createWallet () {
return fetch('createwallet', ['wallet'])
.then(loadWallet)
}
function loadWallet () {
return fetch('loadwallet', ['wallet', true])
// Catching the error here to suppress error code -35
// This improves UX on the initial wallet load and serves as error sink
// for wallet creation/loading related issues before actual business logic runs
.catch(errorHandle)
} }
function accountBalance (cryptoCode) { function accountBalance (cryptoCode) {
return checkCryptoCode(cryptoCode) return checkCryptoCode(cryptoCode)
.then(() => fetch('getwalletinfo')) .then(() => fetch('getwalletinfo'))
.then(({ balance }) => new BN(balance).shiftedBy(unitScale).decimalPlaces(0)) .then(({ balance }) => new BN(balance).shiftedBy(unitScale).decimalPlaces(0))
.catch(errorHandle)
} }
function accountUnconfirmedBalance (cryptoCode) { function accountUnconfirmedBalance (cryptoCode) {
return checkCryptoCode(cryptoCode) return checkCryptoCode(cryptoCode)
.then(() => fetch('getwalletinfo')) .then(() => fetch('getwalletinfo'))
.then(({ unconfirmed_balance: balance }) => new BN(balance).shiftedBy(unitScale).decimalPlaces(0)) .then(({ unconfirmed_balance: balance }) => new BN(balance).shiftedBy(unitScale).decimalPlaces(0))
.catch(errorHandle)
} }
// We want a balance that includes all spends (0 conf) but only deposits that // We want a balance that includes all spends (0 conf) but only deposits that
@ -75,10 +109,7 @@ function sendCoins (account, tx, settings, operatorId, feeMultiplier) {
txid: pickedObj.txid txid: pickedObj.txid
} }
}) })
.catch(err => { .catch(errorHandle)
if (err.code === -6) throw new E.InsufficientFundsError()
throw err
})
} }
function sendCoinsBatch (account, txs, cryptoCode, feeMultiplier) { function sendCoinsBatch (account, txs, cryptoCode, feeMultiplier) {
@ -98,20 +129,19 @@ function sendCoinsBatch (account, txs, cryptoCode, feeMultiplier) {
fee: new BN(pickedObj.fee).abs().shiftedBy(unitScale).decimalPlaces(0), fee: new BN(pickedObj.fee).abs().shiftedBy(unitScale).decimalPlaces(0),
txid: pickedObj.txid txid: pickedObj.txid
})) }))
.catch(err => { .catch(errorHandle)
if (err.code === -6) throw new E.InsufficientFundsError()
throw err
})
} }
function newAddress (account, info, tx, settings, operatorId) { function newAddress (account, info, tx, settings, operatorId) {
return checkCryptoCode(info.cryptoCode) return checkCryptoCode(info.cryptoCode)
.then(() => fetch('getnewaddress')) .then(() => fetch('getnewaddress'))
.catch(errorHandle)
} }
function addressBalance (address, confs) { function addressBalance (address, confs) {
return fetch('getreceivedbyaddress', [address, confs]) return fetch('getreceivedbyaddress', [address, confs])
.then(r => new BN(r).shiftedBy(unitScale).decimalPlaces(0)) .then(r => new BN(r).shiftedBy(unitScale).decimalPlaces(0))
.catch(errorHandle)
} }
function confirmedBalance (address, cryptoCode) { function confirmedBalance (address, cryptoCode) {
@ -156,6 +186,7 @@ function newFunding (account, cryptoCode, settings, operatorId) {
fundingConfirmedBalance, fundingConfirmedBalance,
fundingAddress fundingAddress
})) }))
.catch(errorHandle)
} }
function cryptoNetwork (account, cryptoCode, settings, operatorId) { function cryptoNetwork (account, cryptoCode, settings, operatorId) {
@ -169,7 +200,7 @@ function fetchRBF (txId) {
return [txId, res['bip125-replaceable']] return [txId, res['bip125-replaceable']]
}) })
.catch(err => { .catch(err => {
if (err.code === -5) logger.error(`${err.message}`) errorHandle(err)
return [txId, true] return [txId, true]
}) })
} }

View file

@ -15,6 +15,16 @@ function fetch (method, params) {
return jsonRpc.fetch(rpcConfig, method, params) return jsonRpc.fetch(rpcConfig, method, params)
} }
function errorHandle (e) {
const err = JSON.parse(e.message)
switch (err.code) {
case -6:
throw new E.InsufficientFundsError()
default:
throw e
}
}
function checkCryptoCode (cryptoCode) { function checkCryptoCode (cryptoCode) {
if (cryptoCode !== 'DASH') return Promise.reject(new Error('Unsupported crypto: ' + cryptoCode)) if (cryptoCode !== 'DASH') return Promise.reject(new Error('Unsupported crypto: ' + cryptoCode))
return Promise.resolve() return Promise.resolve()
@ -52,10 +62,7 @@ function sendCoins (account, tx, settings, operatorId) {
txid: pickedObj.txid txid: pickedObj.txid
} }
}) })
.catch(err => { .catch(errorHandle)
if (err.code === -6) throw new E.InsufficientFundsError()
throw err
})
} }
function newAddress (account, info, tx, settings, operatorId) { function newAddress (account, info, tx, settings, operatorId) {

View file

@ -15,6 +15,16 @@ function fetch (method, params) {
return jsonRpc.fetch(rpcConfig, method, params) return jsonRpc.fetch(rpcConfig, method, params)
} }
function errorHandle (e) {
const err = JSON.parse(e.message)
switch (err.code) {
case -6:
throw new E.InsufficientFundsError()
default:
throw e
}
}
function checkCryptoCode (cryptoCode) { function checkCryptoCode (cryptoCode) {
if (cryptoCode !== 'LTC') return Promise.reject(new Error('Unsupported crypto: ' + cryptoCode)) if (cryptoCode !== 'LTC') return Promise.reject(new Error('Unsupported crypto: ' + cryptoCode))
return Promise.resolve() return Promise.resolve()
@ -52,10 +62,7 @@ function sendCoins (account, tx, settings, operatorId) {
txid: pickedObj.txid txid: pickedObj.txid
} }
}) })
.catch(err => { .catch(errorHandle)
if (err.code === -6) throw new E.InsufficientFundsError()
throw err
})
} }
function newAddress (account, info, tx, settings, operatorId) { function newAddress (account, info, tx, settings, operatorId) {

View file

@ -16,6 +16,16 @@ function fetch (method, params) {
return jsonRpc.fetch(rpcConfig, method, params) return jsonRpc.fetch(rpcConfig, method, params)
} }
function errorHandle (e) {
const err = JSON.parse(e.message)
switch (err.code) {
case -6:
throw new E.InsufficientFundsError()
default:
throw e
}
}
function checkCryptoCode (cryptoCode) { function checkCryptoCode (cryptoCode) {
if (cryptoCode !== 'ZEC') return Promise.reject(new Error('Unsupported crypto: ' + cryptoCode)) if (cryptoCode !== 'ZEC') return Promise.reject(new Error('Unsupported crypto: ' + cryptoCode))
return Promise.resolve() return Promise.resolve()
@ -78,10 +88,7 @@ function sendCoins (account, tx, settings, operatorId) {
txid: pickedObj.txid txid: pickedObj.txid
} }
}) })
.catch(err => { .catch(errorHandle)
if (err.code === -6) throw new E.InsufficientFundsError()
throw err
})
} }
function newAddress (account, info, tx, settings, operatorId) { function newAddress (account, info, tx, settings, operatorId) {

View file

@ -33,6 +33,7 @@ const SANCTIONS_UPDATE_INTERVAL = 1 * T.day
const RADAR_UPDATE_INTERVAL = 5 * T.minutes const RADAR_UPDATE_INTERVAL = 5 * T.minutes
const PRUNE_MACHINES_HEARTBEAT = 1 * T.day const PRUNE_MACHINES_HEARTBEAT = 1 * T.day
const TRANSACTION_BATCH_LIFECYCLE = 20 * T.minutes const TRANSACTION_BATCH_LIFECYCLE = 20 * T.minutes
const TICKER_RATES_INTERVAL = 59 * T.seconds
const CHECK_NOTIFICATION_INTERVAL = 20 * T.seconds const CHECK_NOTIFICATION_INTERVAL = 20 * T.seconds
const PENDING_INTERVAL = 10 * T.seconds const PENDING_INTERVAL = 10 * T.seconds
@ -178,6 +179,7 @@ function doPolling (schema) {
notifier.checkNotification(pi()) notifier.checkNotification(pi())
updateCoinAtmRadar() updateCoinAtmRadar()
addToQueue(pi().getRawRates, TICKER_RATES_INTERVAL, schema, QUEUE.FAST)
addToQueue(pi().executeTrades, TRADE_INTERVAL, schema, QUEUE.FAST) addToQueue(pi().executeTrades, TRADE_INTERVAL, schema, QUEUE.FAST)
addToQueue(cashOutTx.monitorLiveIncoming, LIVE_INCOMING_TX_INTERVAL, schema, QUEUE.FAST, settings, false, coinFilter) addToQueue(cashOutTx.monitorLiveIncoming, LIVE_INCOMING_TX_INTERVAL, schema, QUEUE.FAST, settings, false, coinFilter)
addToQueue(cashOutTx.monitorStaleIncoming, INCOMING_TX_INTERVAL, schema, QUEUE.FAST, settings, false, coinFilter) addToQueue(cashOutTx.monitorStaleIncoming, INCOMING_TX_INTERVAL, schema, QUEUE.FAST, settings, false, coinFilter)

View file

@ -14,6 +14,7 @@ const computeSchema = require('./middlewares/compute-schema')
const findOperatorId = require('./middlewares/operatorId') const findOperatorId = require('./middlewares/operatorId')
const populateDeviceId = require('./middlewares/populateDeviceId') const populateDeviceId = require('./middlewares/populateDeviceId')
const populateSettings = require('./middlewares/populateSettings') const populateSettings = require('./middlewares/populateSettings')
const recordPing = require('./middlewares/recordPing')
const cashboxRoutes = require('./routes/cashboxRoutes') const cashboxRoutes = require('./routes/cashboxRoutes')
const customerRoutes = require('./routes/customerRoutes') const customerRoutes = require('./routes/customerRoutes')
@ -29,6 +30,8 @@ const verifyUserRoutes = require('./routes/verifyUserRoutes')
const verifyTxRoutes = require('./routes/verifyTxRoutes') const verifyTxRoutes = require('./routes/verifyTxRoutes')
const verifyPromoCodeRoutes = require('./routes/verifyPromoCodeRoutes') const verifyPromoCodeRoutes = require('./routes/verifyPromoCodeRoutes')
const graphQLServer = require('./graphql/server')
const app = express() const app = express()
const configRequiredRoutes = [ const configRequiredRoutes = [
@ -38,7 +41,8 @@ const configRequiredRoutes = [
'/phone_code', '/phone_code',
'/customer', '/customer',
'/tx', '/tx',
'/verify_promo_code' '/verify_promo_code',
'/graphql'
] ]
const devMode = argv.dev || process.env.HTTP const devMode = argv.dev || process.env.HTTP
@ -55,11 +59,12 @@ app.use('/', pairingRoutes)
app.use(findOperatorId) app.use(findOperatorId)
app.use(populateDeviceId) app.use(populateDeviceId)
app.use(computeSchema) app.use(computeSchema)
if (!devMode) app.use(authorize) app.use(authorize)
app.use(configRequiredRoutes, populateSettings) app.use(configRequiredRoutes, populateSettings)
app.use(filterOldRequests) app.use(filterOldRequests)
// other app routes // other app routes
app.use('/graphql', recordPing)
app.use('/poll', pollingRoutes) app.use('/poll', pollingRoutes)
app.use('/terms_conditions', termsAndConditionsRoutes) app.use('/terms_conditions', termsAndConditionsRoutes)
app.use('/state', stateRoutes) app.use('/state', stateRoutes)
@ -78,6 +83,8 @@ app.use('/tx', txRoutes)
app.use('/logs', logsRoutes) app.use('/logs', logsRoutes)
graphQLServer.applyMiddleware({ app })
app.use(errorHandler) app.use(errorHandler)
app.use((req, res) => { app.use((req, res) => {
res.status(404).json({ error: 'No such route' }) res.status(404).json({ error: 'No such route' })

View file

@ -10,7 +10,7 @@ const plugins = require('../plugins')
const semver = require('semver') const semver = require('semver')
const state = require('../middlewares/state') const state = require('../middlewares/state')
const version = require('../../package.json').version const version = require('../../package.json').version
const customRequestQueries = require('../new-admin/services/customInfoRequests') const { batchGetCustomInfoRequest, getCustomInfoRequests } = require('../new-admin/services/customInfoRequests')
const urlsToPing = [ const urlsToPing = [
`us.archive.ubuntu.com`, `us.archive.ubuntu.com`,
@ -45,7 +45,7 @@ const buildTriggers = (allTriggers) => {
return !_.isNil(o.customInfoRequestId) && !_.isEmpty(o.customInfoRequestId) return !_.isNil(o.customInfoRequestId) && !_.isEmpty(o.customInfoRequestId)
}, allTriggers) }, allTriggers)
return _.flow([_.map(_.get('customInfoRequestId')), customRequestQueries.batchGetCustomInfoRequest])(customTriggers) return _.flow([_.map(_.get('customInfoRequestId')), batchGetCustomInfoRequest])(customTriggers)
.then(res => { .then(res => {
res.forEach((details, index) => { res.forEach((details, index) => {
// make sure we aren't attaching the details to the wrong trigger // make sure we aren't attaching the details to the wrong trigger
@ -61,7 +61,6 @@ function poll (req, res, next) {
const machineModel = req.query.model const machineModel = req.query.model
const deviceId = req.deviceId const deviceId = req.deviceId
const deviceTime = req.deviceTime const deviceTime = req.deviceTime
const serialNumber = req.query.sn
const pid = req.query.pid const pid = req.query.pid
const settings = req.settings const settings = req.settings
const operatorId = res.locals.operatorId const operatorId = res.locals.operatorId
@ -73,9 +72,6 @@ function poll (req, res, next) {
const pi = plugins(settings, deviceId) const pi = plugins(settings, deviceId)
const hasLightning = checkHasLightning(settings) const hasLightning = checkHasLightning(settings)
const triggersAutomationPromise = configManager.getTriggersAutomation(settings.config)
const triggersPromise = buildTriggers(configManager.getTriggers(settings.config))
const operatorInfo = configManager.getOperatorInfo(settings.config) const operatorInfo = configManager.getOperatorInfo(settings.config)
const machineInfo = { deviceId: req.deviceId, deviceName: req.deviceName } const machineInfo = { deviceId: req.deviceId, deviceName: req.deviceName }
const cashOutConfig = configManager.getCashOut(deviceId, settings.config) const cashOutConfig = configManager.getCashOut(deviceId, settings.config)
@ -85,10 +81,13 @@ function poll (req, res, next) {
state.pids = _.update(operatorId, _.set(deviceId, { pid, ts: Date.now() }), state.pids) state.pids = _.update(operatorId, _.set(deviceId, { pid, ts: Date.now() }), state.pids)
return Promise.all([pi.pollQueries(serialNumber, deviceTime, req.query, machineVersion, machineModel), triggersPromise, triggersAutomationPromise]) return Promise.all([
.then(([results, triggers, triggersAutomation]) => { pi.recordPing(deviceTime, machineVersion, machineModel),
const cassettes = results.cassettes pi.pollQueries(),
buildTriggers(configManager.getTriggers(settings.config)),
configManager.getTriggersAutomation(getCustomInfoRequests(true), settings.config),
])
.then(([_pingRes, results, triggers, triggersAutomation]) => {
const reboot = pid && state.reboots?.[operatorId]?.[deviceId] === pid const reboot = pid && state.reboots?.[operatorId]?.[deviceId] === pid
const shutdown = pid && state.shutdowns?.[operatorId]?.[deviceId] === pid const shutdown = pid && state.shutdowns?.[operatorId]?.[deviceId] === pid
const restartServices = pid && state.restartServicesMap?.[operatorId]?.[deviceId] === pid const restartServices = pid && state.restartServicesMap?.[operatorId]?.[deviceId] === pid
@ -110,7 +109,6 @@ function poll (req, res, next) {
receiptPrintingActive: receipt.active, receiptPrintingActive: receipt.active,
smsReceiptActive: receipt.sms, smsReceiptActive: receipt.sms,
enablePaperWalletOnly, enablePaperWalletOnly,
cassettes,
twoWayMode: cashOutConfig.active, twoWayMode: cashOutConfig.active,
zeroConfLimits, zeroConfLimits,
reboot, reboot,

View file

@ -7,7 +7,7 @@ const ccxt = require('./plugins/ticker/ccxt')
const mockTicker = require('./plugins/ticker/mock-ticker') const mockTicker = require('./plugins/ticker/mock-ticker')
const bitpay = require('./plugins/ticker/bitpay') const bitpay = require('./plugins/ticker/bitpay')
const FETCH_INTERVAL = 60000 const FETCH_INTERVAL = 58000
function _getRates (settings, fiatCode, cryptoCode) { function _getRates (settings, fiatCode, cryptoCode) {
return Promise.resolve() return Promise.resolve()

View file

@ -0,0 +1,11 @@
const { saveConfig } = require('../lib/new-settings-loader')
exports.up = function (next) {
return saveConfig({})
.then(next)
.catch(next)
}
exports.down = function (next) {
next()
}

View file

@ -2,4 +2,4 @@ SKIP_PREFLIGHT_CHECK=true
HTTPS=true HTTPS=true
REACT_APP_TYPE_CHECK_SANCTUARY=false REACT_APP_TYPE_CHECK_SANCTUARY=false
PORT=3001 PORT=3001
REACT_APP_BUILD_TARGET=LAMASSU REACT_APP_BUILD_TARGET=PAZUZ

View file

@ -1,7 +1,7 @@
import { useLazyQuery } from '@apollo/react-hooks' import { useLazyQuery } from '@apollo/react-hooks'
import { makeStyles, ClickAwayListener } from '@material-ui/core' import { makeStyles, ClickAwayListener } from '@material-ui/core'
import classnames from 'classnames' import classnames from 'classnames'
import { format } from 'date-fns/fp' import { format, set } from 'date-fns/fp'
import FileSaver from 'file-saver' import FileSaver from 'file-saver'
import * as R from 'ramda' import * as R from 'ramda'
import React, { useState, useCallback } from 'react' import React, { useState, useCallback } from 'react'
@ -280,7 +280,15 @@ const LogsDownloaderPopover = ({
)} )}
</div> </div>
<DateRangePicker <DateRangePicker
maxDate={new Date()} maxDate={set(
{
hours: 23,
minutes: 59,
seconds: 59,
milliseconds: 999
},
new Date()
)}
onRangeChange={handleRangeChange} onRangeChange={handleRangeChange}
/> />
</div> </div>

View file

@ -178,9 +178,7 @@ const Calendar = ({ minDate, maxDate, handleSelect, ...props }) => {
{R.range(1, 8).map((row, key) => ( {R.range(1, 8).map((row, key) => (
<tr key={key}> <tr key={key}>
{getRow(currentDisplayedMonth, row).map((day, key) => ( {getRow(currentDisplayedMonth, row).map((day, key) => (
<td <td key={key} onClick={() => handleSelect(day)}>
key={key}
onClick={() => handleSelect(day, minDate, maxDate)}>
<Tile <Tile
isDisabled={ isDisabled={
(maxDate && isAfter(maxDate, day)) || (maxDate && isAfter(maxDate, day)) ||

View file

@ -1,11 +1,6 @@
import { makeStyles } from '@material-ui/core/styles' import { makeStyles } from '@material-ui/core/styles'
import classnames from 'classnames' import classnames from 'classnames'
import { import { compareAsc, differenceInDays, set } from 'date-fns/fp'
differenceInDays,
differenceInMonths,
isSameMonth,
set
} from 'date-fns/fp'
import React, { useState, useEffect } from 'react' import React, { useState, useEffect } from 'react'
import Calendar from './Calendar' import Calendar from './Calendar'
@ -29,27 +24,22 @@ const DateRangePicker = ({ minDate, maxDate, className, onRangeChange }) => {
const classes = useStyles() const classes = useStyles()
const handleSelect = (day, minDate, maxDate) => { const handleSelect = day => {
if ( if (
(maxDate && differenceInDays(maxDate, day) > 0) || (maxDate && compareAsc(maxDate, day) > 0) ||
(minDate && differenceInDays(day, minDate) > 0) (minDate && differenceInDays(day, minDate) > 0)
) )
return return
if (from && !to && differenceInDays(day, from) > 0) { if (from && !to) {
setTo(from) if (differenceInDays(from, day) >= 0) {
setFrom(day)
return
}
if (
from &&
!to &&
(isSameMonth(from, day) || differenceInMonths(from, day) > 0)
) {
setTo( setTo(
set({ hours: 23, minutes: 59, seconds: 59, milliseconds: 999 }, day) set({ hours: 23, minutes: 59, seconds: 59, milliseconds: 999 }, day)
) )
} else {
setTo(from)
setFrom(day)
}
return return
} }

2
package-lock.json generated
View file

@ -1,6 +1,6 @@
{ {
"name": "lamassu-server", "name": "lamassu-server",
"version": "8.0.0-beta.3", "version": "8.1.0-beta.0",
"lockfileVersion": 1, "lockfileVersion": 1,
"requires": true, "requires": true,
"dependencies": { "dependencies": {

View file

@ -2,7 +2,7 @@
"name": "lamassu-server", "name": "lamassu-server",
"description": "bitcoin atm client server protocol module", "description": "bitcoin atm client server protocol module",
"keywords": [], "keywords": [],
"version": "8.0.0-beta.3", "version": "8.1.0-beta.0",
"license": "Unlicense", "license": "Unlicense",
"author": "Lamassu (https://lamassu.is)", "author": "Lamassu (https://lamassu.is)",
"dependencies": { "dependencies": {

View file

@ -13,7 +13,7 @@ if (!_.isEqual(_.intersection(_.keys(argv), requiredParams), requiredParams)) {
process.exit(2) process.exit(2)
} }
fs.copyFileSync(path.resolve(__dirname, '../.sample.env'), path.resolve(__dirname, '../.env')) fs.copyFileSync(path.resolve(__dirname, '../.sample.env'), process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env'))
setEnvVariable('POSTGRES_USER', 'lamassu_pg') setEnvVariable('POSTGRES_USER', 'lamassu_pg')
setEnvVariable('POSTGRES_PASSWORD', `${argv['db-password']}`) setEnvVariable('POSTGRES_PASSWORD', `${argv['db-password']}`)

View file

@ -3,7 +3,8 @@ const os = require('os')
const path = require('path') const path = require('path')
const setEnvVariable = (key, value) => { const setEnvVariable = (key, value) => {
const ENV_VARIABLES = fs.readFileSync(path.resolve(__dirname, '../.env'), 'utf-8').split(os.EOL) const ENV_PATH = process.env.NODE_ENV === 'production' ? path.resolve(os.homedir(), '.lamassu', '.env') : path.resolve(__dirname, '../.env')
const ENV_VARIABLES = fs.readFileSync(ENV_PATH, 'utf-8').split(os.EOL)
const target = ENV_VARIABLES.indexOf(ENV_VARIABLES.find(line => line.match(new RegExp(`^${key}=`)))) const target = ENV_VARIABLES.indexOf(ENV_VARIABLES.find(line => line.match(new RegExp(`^${key}=`))))
if (target < 0) { if (target < 0) {
@ -18,7 +19,7 @@ const setEnvVariable = (key, value) => {
ENV_VARIABLES.splice(target, 1, `${key}=${value}`) ENV_VARIABLES.splice(target, 1, `${key}=${value}`)
} }
fs.writeFileSync(path.resolve(__dirname, '../.env'), ENV_VARIABLES.join(os.EOL)) fs.writeFileSync(ENV_PATH, ENV_VARIABLES.join(os.EOL))
} }
module.exports = setEnvVariable module.exports = setEnvVariable