Merge pull request #873 from chaotixkilla/feat-integrate-hedging-and-accounting-with-pazuz-admin

Integrate hedging and accounting with pazuz related screens
This commit is contained in:
Rafael Taranto 2021-11-17 20:34:20 +00:00 committed by GitHub
commit d138b26903
40 changed files with 761 additions and 386 deletions

View file

@ -64,8 +64,7 @@ function loadSanctions (settings) {
function startServer (settings) {
return Promise.resolve()
.then(() => {
poller.start(settings)
poller.setup(['public'])
const httpsServerOptions = {
key: fs.readFileSync(options.keyPath),
cert: fs.readFileSync(options.certPath),

8
lib/compute-schema.js Normal file
View file

@ -0,0 +1,8 @@
const { asyncLocalStorage, defaultStore } = require('./async-storage')
const computeSchema = (req, res, next) => {
const store = defaultStore()
return asyncLocalStorage.run(store, () => next())
}
module.exports = computeSchema

View file

@ -1,3 +1,5 @@
const T = require('./time')
const anonymousCustomer = {
uuid: '47ac1184-8102-11e7-9079-8f13a7117867',
name: 'anonymous'
@ -8,6 +10,8 @@ const cassetteMaxCapacity = 500
const AUTHENTICATOR_ISSUER_ENTITY = 'Lamassu'
const AUTH_TOKEN_EXPIRATION_TIME = '30 minutes'
const REGISTRATION_TOKEN_EXPIRATION_TIME = '30 minutes'
const USER_SESSIONS_TABLE_NAME = 'user_sessions'
const USER_SESSIONS_CLEAR_INTERVAL = 1 * T.hour
const AUTOMATIC = 'automatic'
const MANUAL = 'manual'
@ -19,5 +23,7 @@ module.exports = {
AUTH_TOKEN_EXPIRATION_TIME,
REGISTRATION_TOKEN_EXPIRATION_TIME,
AUTOMATIC,
MANUAL
MANUAL,
USER_SESSIONS_TABLE_NAME,
USER_SESSIONS_CLEAR_INTERVAL
}

View file

@ -21,7 +21,8 @@ const stripDefaultDbFuncs = dbCtx => {
tx: dbCtx.$tx,
task: dbCtx.$task,
batch: dbCtx.batch,
multi: dbCtx.$multi
multi: dbCtx.$multi,
connect: dbCtx.connect
}
}
@ -37,7 +38,10 @@ const _task = (obj, opts, cb) => {
})
}
const getSchema = () => 'public'
const getSchema = () => {
const store = asyncLocalStorage.getStore() ?? defaultStore()
return asyncLocalStorage.run(store, () => store.get('schema'))
}
const getDefaultSchema = () => 'ERROR_SCHEMA'
const searchPathWrapper = (t, cb) => {

View file

@ -157,18 +157,37 @@ function unpair (rec) {
}
function reboot (rec) {
return axios.post(`http://localhost:3030/reboot?device_id=${rec.deviceId}`)
return db.none('NOTIFY $1:name, $2', ['poller', JSON.stringify(
{
type: 'machineAction',
action: 'reboot',
value: _.pick(['deviceId', 'operatorId', 'action'], rec)
}
)])
}
function shutdown (rec) {
return axios.post(`http://localhost:3030/shutdown?device_id=${rec.deviceId}`)
return db.none('NOTIFY $1:name, $2', ['poller', JSON.stringify(
{
type: 'machineAction',
action: 'shutdown',
value: _.pick(['deviceId', 'operatorId', 'action'], rec)
}
)])
}
function restartServices (rec) {
return axios.post(`http://localhost:3030/restartServices?device_id=${rec.deviceId}`)
return db.none('NOTIFY $1:name, $2', ['poller', JSON.stringify(
{
type: 'machineAction',
action: 'restartServices',
value: _.pick(['deviceId', 'operatorId', 'action'], rec)
}
)])
}
function setMachine (rec) {
function setMachine (rec, operatorId) {
rec.operatorId = operatorId
switch (rec.action) {
case 'rename': return renameMachine(rec)
case 'emptyCashInBills': return emptyCashInBills(rec)

View file

@ -16,9 +16,12 @@ const options = require('../options')
const users = require('../users')
const logger = require('../logger')
const session = require('./middlewares/session')
const { AuthDirective } = require('./graphql/directives')
const { typeDefs, resolvers } = require('./graphql/schema')
const findOperatorId = require('../middlewares/operatorId')
const computeSchema = require('../compute-schema')
const { USER_SESSIONS_CLEAR_INTERVAL } = require('../constants')
const { session, cleanUserSessions, buildApolloContext } = require('./middlewares')
const devMode = require('minimist')(process.argv.slice(2)).dev
const idPhotoCardBasedir = _.get('idPhotoCardDir', options)
@ -32,6 +35,7 @@ if (!hostname) {
}
const app = express()
app.use(helmet())
app.use(compression())
app.use(nocache())
@ -39,6 +43,9 @@ app.use(cookieParser())
app.use(express.json())
app.use(express.urlencoded({ extended: true })) // support encoded bodies
app.use(express.static(path.resolve(__dirname, '..', '..', 'public')))
app.use(cleanUserSessions(USER_SESSIONS_CLEAR_INTERVAL))
app.use(computeSchema)
app.use(findOperatorId)
app.use(session)
const apolloServer = new ApolloServer({
@ -53,27 +60,7 @@ const apolloServer = new ApolloServer({
logger.error(error)
return error
},
context: async ({ req, res }) => {
if (!req.session.user) return { req }
const user = await users.verifyAndUpdateUser(
req.session.user.id,
req.headers['user-agent'] || 'Unknown',
req.ip
)
if (!user || !user.enabled) throw new AuthenticationError('Authentication failed')
req.session.ua = req.headers['user-agent'] || 'Unknown'
req.session.ipAddress = req.ip
req.session.lastUsed = new Date(Date.now()).toISOString()
req.session.user.id = user.id
req.session.user.role = user.role
res.set('role', user.role)
res.set('Access-Control-Expose-Headers', 'role')
return { req }
}
context: async (obj) => buildApolloContext(obj)
})
apolloServer.applyMiddleware({

View file

@ -30,8 +30,7 @@ const authenticateUser = (username, password) => {
const destroySessionIfSameUser = (context, user) => {
const sessionUser = getUserFromCookie(context)
if (sessionUser && user.id === sessionUser.id)
context.req.session.destroy()
if (sessionUser && user.id === sessionUser.id) { context.req.session.destroy() }
}
const destroySessionIfBeingUsed = (sessID, context) => {
@ -45,7 +44,7 @@ const getUserFromCookie = context => {
}
const getLamassuCookie = context => {
return context.req.cookies && context.req.cookies.lid
return context.req.cookies && context.req.cookies.lamassu_sid
}
const initializeSession = (context, user, rememberMe) => {
@ -60,7 +59,7 @@ const executeProtectedAction = (code, id, context, action) => {
if (user.role !== 'superuser') {
return action()
}
return confirm2FA(code, context)
.then(() => action())
})

View file

@ -13,7 +13,7 @@ const resolvers = {
},
Mutation: {
setCustomer: (root, { customerId, customerInput }, context, info) => {
const token = !!context.req.cookies.lid && context.req.session.user.id
const token = !!context.req.cookies.lamassu_sid && context.req.session.user.id
if (customerId === anonymous.uuid) return customers.getCustomerById(customerId)
return customers.updateCustomer(customerId, customerInput, token)
}

View file

@ -18,7 +18,7 @@ const resolvers = {
machine: (...[, { deviceId }]) => machineLoader.getMachine(deviceId)
},
Mutation: {
machineAction: (...[, { deviceId, action, cashbox, cassette1, cassette2, newName }]) => machineAction({ deviceId, action, cashbox, cassette1, cassette2, newName })
machineAction: (...[, { deviceId, action, cashbox, cassette1, cassette2, newName }, context]) => machineAction({ deviceId, action, cashbox, cassette1, cassette2, newName }, context)
}
}

View file

@ -1,11 +1,5 @@
const got = require('got')
const logger = require('../../../logger')
const settingsLoader = require('../../../new-settings-loader')
const notify = () => got.post('http://localhost:3030/dbChange')
.catch(e => logger.error('lamassu-server not responding'))
const resolvers = {
Query: {
accounts: () => settingsLoader.showAccounts(),
@ -14,10 +8,7 @@ const resolvers = {
Mutation: {
saveAccounts: (...[, { accounts }]) => settingsLoader.saveAccounts(accounts),
// resetAccounts: (...[, { schemaVersion }]) => settingsLoader.resetAccounts(schemaVersion),
saveConfig: (...[, { config }]) => settingsLoader.saveConfig(config).then(it => {
notify()
return it
}),
saveConfig: (...[, { config }]) => settingsLoader.saveConfig(config),
// resetConfig: (...[, { schemaVersion }]) => settingsLoader.resetConfig(schemaVersion),
// migrateConfigAndAccounts: () => settingsLoader.migrate()
}

View file

@ -0,0 +1,24 @@
const { asyncLocalStorage } = require('../../async-storage')
const db = require('../../db')
const { USER_SESSIONS_TABLE_NAME } = require('../../constants')
const logger = require('../../logger')
const schemaCache = {}
const cleanUserSessions = (cleanInterval) => (req, res, next) => {
const schema = asyncLocalStorage.getStore() ? asyncLocalStorage.getStore().get('schema') : null
const now = Date.now()
if (!schema) return next()
if (schema && schemaCache.schema + cleanInterval > now) return next()
logger.debug(`Clearing expired sessions for schema ${schema}`)
return db.none('DELETE FROM $1^ WHERE expire < to_timestamp($2 / 1000.0)', [USER_SESSIONS_TABLE_NAME, now])
.then(() => {
schemaCache.schema = now
return next()
})
.catch(next)
}
module.exports = cleanUserSessions

View file

@ -0,0 +1,29 @@
const { AuthenticationError } = require('apollo-server-express')
const base64 = require('base-64')
const users = require('../../users')
const buildApolloContext = async ({ req, res }) => {
if (!req.session.user) return { req, res }
const user = await users.verifyAndUpdateUser(
req.session.user.id,
req.headers['user-agent'] || 'Unknown',
req.ip
)
if (!user || !user.enabled) throw new AuthenticationError('Authentication failed')
req.session.ua = req.headers['user-agent'] || 'Unknown'
req.session.ipAddress = req.ip
req.session.lastUsed = new Date(Date.now()).toISOString()
req.session.user.id = user.id
req.session.user.username = user.username
req.session.user.role = user.role
res.set('lamassu_role', user.role)
res.cookie('pazuz_operatoridentifier', base64.encode(user.username))
res.set('Access-Control-Expose-Headers', 'lamassu_role')
return { req, res }
}
module.exports = buildApolloContext

View file

@ -0,0 +1,9 @@
const cleanUserSessions = require('./cleanUserSessions')
const buildApolloContext = require('./context')
const session = require('./session')
module.exports = {
cleanUserSessions,
buildApolloContext,
session
}

View file

@ -3,10 +3,11 @@ const express = require('express')
const router = express.Router()
const hkdf = require('futoin-hkdf')
const session = require('express-session')
const pgSession = require('connect-pg-simple')(session)
const PgSession = require('connect-pg-simple')(session)
const mnemonicHelpers = require('../../mnemonic-helpers')
const db = require('../../db')
const options = require('../../options')
const { USER_SESSIONS_TABLE_NAME } = require('../../constants')
const getSecret = () => {
const mnemonic = fs.readFileSync(options.mnemonicPath, 'utf8')
@ -20,11 +21,11 @@ const getSecret = () => {
const hostname = options.hostname
router.use('*', session({
store: new pgSession({
store: new PgSession({
pgPromise: db,
tableName: 'user_sessions'
tableName: USER_SESSIONS_TABLE_NAME
}),
name: 'lid',
name: 'lamassu_sid',
secret: getSecret(),
resave: false,
saveUninitialized: false,

View file

@ -6,13 +6,14 @@ function getMachine (machineId) {
.then(machines => machines.find(({ deviceId }) => deviceId === machineId))
}
function machineAction ({ deviceId, action, cashbox, cassette1, cassette2, newName }) {
function machineAction ({ deviceId, action, cashbox, cassette1, cassette2, newName }, context) {
const operatorId = context.res.locals.operatorId
return getMachine(deviceId)
.then(machine => {
if (!machine) throw new UserInputError(`machine:${deviceId} not found`, { deviceId })
return machine
})
.then(machineLoader.setMachine({ deviceId, action, cashbox, cassettes: [cassette1, cassette2], newName }))
.then(machineLoader.setMachine({ deviceId, action, cashbox, cassettes: [cassette1, cassette2], newName }, operatorId))
.then(getMachine(deviceId))
}

View file

@ -3,6 +3,7 @@ const pify = require('pify')
const readFile = pify(fs.readFile)
const crypto = require('crypto')
const baseX = require('base-x')
const { NIL } = require('uuid')
const options = require('../../options')
const db = require('../../db')
@ -19,7 +20,7 @@ function totem (name) {
return readFile(caPath)
.then(data => {
const caHash = crypto.createHash('sha256').update(data).digest()
const token = crypto.randomBytes(32)
const token = Buffer.concat([crypto.randomBytes(32), NIL])
const hexToken = token.toString('hex')
const caHexToken = crypto.createHash('sha256').update(hexToken).digest('hex')
const buf = Buffer.concat([caHash, token, Buffer.from(options.hostname)])

View file

@ -1,6 +1,7 @@
const _ = require('lodash/fp')
const db = require('./db')
const migration = require('./config-migration')
const { asyncLocalStorage } = require('./async-storage')
const OLD_SETTINGS_LOADER_SCHEMA_VERSION = 1
const NEW_SETTINGS_LOADER_SCHEMA_VERSION = 2
@ -73,7 +74,10 @@ function saveConfig (config) {
return loadLatestConfigOrNone()
.then(currentConfig => {
const newConfig = _.assign(currentConfig, config)
return db.none(configSql, ['config', { config: newConfig }, true, NEW_SETTINGS_LOADER_SCHEMA_VERSION])
return db.tx(t => {
return t.none(configSql, ['config', { config: newConfig }, true, NEW_SETTINGS_LOADER_SCHEMA_VERSION])
.then(() => t.none('NOTIFY $1:name, $2', ['poller', JSON.stringify({ type: 'reload', schema: asyncLocalStorage.getStore().get('schema') })]))
}).catch(console.error)
})
}

View file

@ -6,6 +6,8 @@ const _ = require('lodash/fp')
require('dotenv').config()
const DATABASE = process.env.LAMASSU_DB ?? 'DEV'
const dbMapping = psqlConf => ({
STRESS_TEST: _.replace('lamassu', 'lamassu_stress', psqlConf),
RELEASE: _.replace('lamassu', 'lamassu_release', psqlConf),
@ -39,7 +41,7 @@ function load () {
opts: JSON.parse(fs.readFileSync(globalConfigPath))
}
config.opts.postgresql = dbMapping(config.opts.postgresql)[process.env.LAMASSU_DB]
config.opts.postgresql = dbMapping(config.opts.postgresql)[DATABASE]
return config
} catch (_) {
@ -50,7 +52,7 @@ function load () {
opts: JSON.parse(fs.readFileSync(homeConfigPath))
}
config.opts.postgresql = dbMapping(config.opts.postgresql)[process.env.LAMASSU_DB]
config.opts.postgresql = dbMapping(config.opts.postgresql)[DATABASE]
return config
} catch (_) {

View file

@ -1,5 +1,5 @@
const _ = require('lodash/fp')
const Queue = require('queue-promise')
const plugins = require('./plugins')
const notifier = require('./notifier')
const T = require('./time')
@ -11,6 +11,12 @@ const sanctions = require('./ofac/index')
const coinAtmRadar = require('./coinatmradar/coinatmradar')
const configManager = require('./new-config-manager')
const complianceTriggers = require('./compliance-triggers')
const { asyncLocalStorage, defaultStore } = require('./async-storage')
const settingsLoader = require('./new-settings-loader')
const NodeCache = require('node-cache')
const util = require('util')
const db = require('./db')
const state = require('./middlewares/state')
const INCOMING_TX_INTERVAL = 30 * T.seconds
const LIVE_INCOMING_TX_INTERVAL = 5 * T.seconds
@ -24,25 +30,106 @@ const LOGS_CLEAR_INTERVAL = 1 * T.day
const SANCTIONS_INITIAL_DOWNLOAD_INTERVAL = 5 * T.minutes
const SANCTIONS_UPDATE_INTERVAL = 1 * T.week
const RADAR_UPDATE_INTERVAL = 5 * T.minutes
const PRUNE_MACHINES_HEARBEAT = 1 * T.day
const PRUNE_MACHINES_HEARTBEAT = 1 * T.day
const CHECK_NOTIFICATION_INTERVAL = 20 * T.seconds
const PENDING_INTERVAL = 10 * T.seconds
const CACHE_ENTRY_TTL = 3600 // seconds
const coinFilter = ['ETH']
const FAST_QUEUE_WAIT = 1 * T.seconds
const SLOW_QUEUE_WAIT = 10 * T.seconds
let _pi, _settings
const FAST_QUEUE = new Queue({
concurrent: 600,
interval: FAST_QUEUE_WAIT
})
function reload (__settings) {
_settings = __settings
_pi = plugins(_settings)
logger.debug('settings reloaded in poller')
updateAndLoadSanctions()
const SLOW_QUEUE = new Queue({
concurrent: 10,
interval: SLOW_QUEUE_WAIT
})
// Fix for asyncLocalStorage store being lost due to callback-based queue
FAST_QUEUE.enqueue = util.promisify(FAST_QUEUE.enqueue)
SLOW_QUEUE.enqueue = util.promisify(SLOW_QUEUE.enqueue)
const QUEUE = {
FAST: FAST_QUEUE,
SLOW: SLOW_QUEUE
}
function pi () { return _pi }
function settings () { return _settings }
const coinFilter = ['ETH']
const schemaCallbacks = new Map()
const cachedVariables = new NodeCache({
stdTTL: CACHE_ENTRY_TTL,
checkperiod: CACHE_ENTRY_TTL,
deleteOnExpire: false,
useClones: false // pass values by reference instead of cloning
})
cachedVariables.on('expired', (key, val) => {
if (!val.isReloading) {
// since val is passed by reference we don't need to do cachedVariables.set()
val.isReloading = true
return reload(key)
}
})
db.connect({ direct: true }).then(sco => {
sco.client.on('notification', data => {
const parsedData = JSON.parse(data.payload)
switch (parsedData.type) {
case 'reload':
return reload(parsedData.schema)
case 'machineAction':
return machineAction(parsedData.action, parsedData.value)
default:
break
}
})
return sco.none('LISTEN $1:name', 'poller')
}).catch(console.error)
function reload (schema) {
const store = defaultStore()
store.set('schema', schema)
// set asyncLocalStorage so settingsLoader loads settings for the right schema
return asyncLocalStorage.run(store, () => {
return settingsLoader.loadLatest().then(settings => {
const pi = plugins(settings)
cachedVariables.set(schema, { settings, pi, isReloading: false })
logger.debug(`Settings for schema '${schema}' reloaded in poller`)
return updateAndLoadSanctions()
})
})
}
function machineAction (type, value) {
const deviceId = value.deviceId
const operatorId = value.operatorId
const pid = state.pids?.[operatorId]?.[deviceId]?.pid
switch (type) {
case 'reboot':
logger.debug(`Rebooting machine '${deviceId}' from operator ${operatorId}`)
state.reboots[operatorId] = { [deviceId]: pid }
break
case 'shutdown':
logger.debug(`Shutting down machine '${deviceId}' from operator ${operatorId}`)
state.shutdowns[operatorId] = { [deviceId]: pid }
break
case 'restartServices':
logger.debug(`Restarting services of machine '${deviceId}' from operator ${operatorId}`)
state.restartServicesMap[operatorId] = { [deviceId]: pid }
break
default:
break
}
}
function pi () { return cachedVariables.get(asyncLocalStorage.getStore().get('schema')).pi }
function settings () { return cachedVariables.get(asyncLocalStorage.getStore().get('schema')).settings }
function initialSanctionsDownload () {
const structs = sanctions.getStructs()
@ -70,9 +157,40 @@ function updateCoinAtmRadar () {
.then(rates => coinAtmRadar.update(rates, settings()))
}
function start (__settings) {
reload(__settings)
function initializeEachSchema (schemas = ['public']) {
// for each schema set "thread variables" and do polling
return _.forEach(schema => {
const store = defaultStore()
store.set('schema', schema)
return asyncLocalStorage.run(store, () => {
return settingsLoader.loadLatest().then(settings => {
// prevent inadvertedly clearing the array without clearing timeouts
if (schemaCallbacks.has(schema)) throw new Error(`The schema "${schema}" cannot be initialized twice on poller`)
const pi = plugins(settings)
cachedVariables.set(schema, { settings, pi, isReloading: false })
schemaCallbacks.set(schema, [])
return doPolling(schema)
})
}).catch(console.error)
}, schemas)
}
function addToQueue (func, interval, schema, queue, ...vars) {
return schemaCallbacks.get(schema).push(setInterval(() => {
return queue.enqueue().then(() => {
// get plugins or settings from the cache every time func is run
const loadVariables = vars.length > 0 && typeof vars[0] === 'function'
if (loadVariables) {
const funcVars = [...vars]
funcVars[0] = vars[0]()
return func(...funcVars)
}
return func(...vars)
}).catch(console.error)
}, interval))
}
function doPolling (schema) {
pi().executeTrades()
pi().pong()
pi().clearOldLogs()
@ -87,23 +205,37 @@ function start (__settings) {
notifier.checkNotification(pi())
updateCoinAtmRadar()
setInterval(() => pi().executeTrades(), TRADE_INTERVAL)
setInterval(() => cashOutTx.monitorLiveIncoming(settings(), false, coinFilter), LIVE_INCOMING_TX_INTERVAL)
setInterval(() => cashOutTx.monitorStaleIncoming(settings(), false, coinFilter), INCOMING_TX_INTERVAL)
addToQueue(pi().executeTrades, TRADE_INTERVAL, schema, QUEUE.FAST)
addToQueue(cashOutTx.monitorLiveIncoming, LIVE_INCOMING_TX_INTERVAL, schema, QUEUE.FAST, settings, false, coinFilter)
addToQueue(cashOutTx.monitorStaleIncoming, INCOMING_TX_INTERVAL, schema, QUEUE.FAST, settings, false, coinFilter)
if (!_.isEmpty(coinFilter)) {
setInterval(() => cashOutTx.monitorLiveIncoming(settings(), true, coinFilter), LIVE_INCOMING_TX_INTERVAL_FILTER)
setInterval(() => cashOutTx.monitorStaleIncoming(settings(), true, coinFilter), INCOMING_TX_INTERVAL_FILTER)
addToQueue(cashOutTx.monitorLiveIncoming, LIVE_INCOMING_TX_INTERVAL_FILTER, schema, QUEUE.FAST, settings, true, coinFilter)
addToQueue(cashOutTx.monitorStaleIncoming, INCOMING_TX_INTERVAL_FILTER, schema, QUEUE.FAST, settings, true, coinFilter)
}
setInterval(() => cashOutTx.monitorUnnotified(settings()), UNNOTIFIED_INTERVAL)
setInterval(() => cashInTx.monitorPending(settings()), PENDING_INTERVAL)
setInterval(() => pi().sweepHd(), SWEEP_HD_INTERVAL)
setInterval(() => pi().pong(), PONG_INTERVAL)
setInterval(() => pi().clearOldLogs(), LOGS_CLEAR_INTERVAL)
setInterval(() => notifier.checkNotification(pi()), CHECK_NOTIFICATION_INTERVAL)
setInterval(initialSanctionsDownload, SANCTIONS_INITIAL_DOWNLOAD_INTERVAL)
setInterval(updateAndLoadSanctions, SANCTIONS_UPDATE_INTERVAL)
setInterval(updateCoinAtmRadar, RADAR_UPDATE_INTERVAL)
setInterval(() => pi().pruneMachinesHeartbeat(), PRUNE_MACHINES_HEARBEAT)
addToQueue(cashOutTx.monitorUnnotified, UNNOTIFIED_INTERVAL, schema, QUEUE.FAST, settings)
addToQueue(cashInTx.monitorPending, PENDING_INTERVAL, schema, QUEUE.FAST, settings)
addToQueue(pi().sweepHd, SWEEP_HD_INTERVAL, schema, QUEUE.FAST, settings)
addToQueue(pi().pong, PONG_INTERVAL, schema, QUEUE.FAST)
addToQueue(pi().clearOldLogs, LOGS_CLEAR_INTERVAL, schema, QUEUE.SLOW)
addToQueue(notifier.checkNotification, CHECK_NOTIFICATION_INTERVAL, schema, QUEUE.FAST, pi)
addToQueue(initialSanctionsDownload, SANCTIONS_INITIAL_DOWNLOAD_INTERVAL, schema, QUEUE.SLOW)
addToQueue(updateAndLoadSanctions, SANCTIONS_UPDATE_INTERVAL, schema, QUEUE.SLOW)
addToQueue(updateCoinAtmRadar, RADAR_UPDATE_INTERVAL, schema, QUEUE.SLOW)
addToQueue(pi().pruneMachinesHeartbeat(), PRUNE_MACHINES_HEARTBEAT, schema, QUEUE.SLOW, settings)
}
module.exports = { start, reload }
function setup (schemasToAdd = [], schemasToRemove = []) {
// clear callback array for each schema in schemasToRemove and clear cached variables
_.forEach(schema => {
const callbacks = schemaCallbacks.get(schema)
_.forEach(clearInterval, callbacks)
schemaCallbacks.delete(schema)
cachedVariables.del(schema)
}, schemasToRemove)
return initializeEachSchema(schemasToAdd)
}
const getActiveSchemas = () => Array.from(schemaCallbacks.keys())
module.exports = { setup, reload, getActiveSchemas }

View file

@ -25,15 +25,12 @@ const phoneCodeRoutes = require('./routes/phoneCodeRoutes')
const pollingRoutes = require('./routes/pollingRoutes')
const stateRoutes = require('./routes/stateRoutes')
const termsAndConditionsRoutes = require('./routes/termsAndConditionsRoutes')
const txRoutes = require('./routes/txRoutes')
const { router: txRoutes } = require('./routes/txRoutes')
const verifyUserRoutes = require('./routes/verifyUserRoutes')
const verifyTxRoutes = require('./routes/verifyTxRoutes')
const verifyPromoCodeRoutes = require('./routes/verifyPromoCodeRoutes')
const localAppRoutes = require('./routes/localAppRoutes')
const app = express()
const localApp = express()
const configRequiredRoutes = [
'/poll',
@ -87,7 +84,4 @@ app.use((req, res) => {
res.status(404).json({ error: 'No such route' })
})
// localapp routes
localApp.use('/', localAppRoutes)
module.exports = { app, localApp }
module.exports = { app }

View file

@ -8,6 +8,7 @@ const { getCashInSettings } = require('../new-config-manager')
const { AUTOMATIC } = require('../constants.js')
function notifyCashboxRemoval (req, res, next) {
const operatorId = res.locals.operatorId
return Promise.all([getMachine(req.deviceId), loadLatestConfig()])
.then(([machine, config]) => {
const cashInSettings = getCashInSettings(config)
@ -15,7 +16,7 @@ function notifyCashboxRemoval (req, res, next) {
return res.status(200).send({ status: 'OK' })
}
return cashbox.createCashboxBatch(req.deviceId, machine.cashbox)
.then(() => setMachine({ deviceId: req.deviceId, action: 'emptyCashInBills' }))
.then(() => setMachine({ deviceId: req.deviceId, action: 'emptyCashInBills' }, operatorId))
.then(() => res.status(200).send({ status: 'OK' }))
})
.catch(next)

View file

@ -1,48 +0,0 @@
const express = require('express')
const router = express.Router()
const state = require('../middlewares/state')
router.get('/pid', (req, res) => {
const deviceId = req.query.device_id
const pidRec = state.pids[deviceId]
res.json(pidRec)
})
router.post('/reboot', (req, res) => {
const deviceId = req.query.device_id
const pid = state.pids[deviceId] && state.pids[deviceId].pid
if (!deviceId || !pid) {
return res.sendStatus(400)
}
state.reboots[deviceId] = pid
res.sendStatus(200)
})
router.post('/shutdown', (req, res) => {
const deviceId = req.query.device_id
const pid = state.pids[deviceId] && state.pids[deviceId].pid
if (!deviceId || !pid) {
return res.sendStatus(400)
}
state.shutdowns[deviceId] = pid
res.sendStatus(200)
})
router.post('/restartServices', (req, res) => {
const deviceId = req.query.device_id
const pid = state.pids[deviceId] && state.pids[deviceId].pid
if (!deviceId || !pid) {
return res.sendStatus(400)
}
state.restartServicesMap[deviceId] = pid
res.sendStatus(200)
})
module.exports = router

View file

@ -31,6 +31,7 @@ function poll (req, res, next) {
const serialNumber = req.query.sn
const pid = req.query.pid
const settings = req.settings
const operatorId = res.locals.operatorId
const localeConfig = configManager.getLocale(deviceId, settings.config)
const zeroConfLimits = _.reduce((acc, cryptoCode) => {
acc[cryptoCode] = configManager.getWalletSettings(cryptoCode, settings.config).zeroConfLimit
@ -48,15 +49,15 @@ function poll (req, res, next) {
const receipt = configManager.getReceipt(settings.config)
const terms = configManager.getTermsConditions(settings.config)
state.pids[deviceId] = { pid, ts: Date.now() }
state.pids[operatorId] = { [deviceId]: { pid, ts: Date.now() } }
return pi.pollQueries(serialNumber, deviceTime, req.query, machineVersion, machineModel)
.then(results => {
const cassettes = results.cassettes
const reboot = pid && state.reboots[deviceId] && state.reboots[deviceId] === pid
const shutdown = pid && state.shutdowns[deviceId] && state.shutdowns[deviceId] === pid
const restartServices = pid && state.restartServicesMap[deviceId] && state.restartServicesMap[deviceId] === pid
const reboot = pid && state.reboots?.[operatorId]?.[deviceId] === pid
const shutdown = pid && state.shutdowns?.[operatorId]?.[deviceId] === pid
const restartServices = pid && state.restartServicesMap?.[operatorId]?.[deviceId] === pid
const langs = localeConfig.languages
const locale = {

View file

@ -66,4 +66,4 @@ router.post('/', postTx)
router.get('/:id', getTx)
router.get('/', getPhoneTx)
module.exports = router
module.exports = { postTx, getTx, getPhoneTx, router }

View file

@ -59,7 +59,7 @@ function verifyAndUpdateUser (id, ua, ip) {
.then(user => {
if (!user) return null
const sql2 = `UPDATE users SET last_accessed=now(), last_accessed_from=$1, last_accessed_address=$2 WHERE id=$3 RETURNING id, role, enabled`
const sql2 = `UPDATE users SET last_accessed=now(), last_accessed_from=$1, last_accessed_address=$2 WHERE id=$3 RETURNING id, username, role, enabled`
return db.one(sql2, [ua, ip, id])
})
.then(user => user)