diff --git a/lib/ofac/matching_tests.js b/lib/ofac/matching_tests.js
deleted file mode 100644
index e69de29b..00000000
diff --git a/lib/ofac/parsing.js b/lib/ofac/parsing.js
index bb7864e0..7c712637 100644
--- a/lib/ofac/parsing.js
+++ b/lib/ofac/parsing.js
@@ -132,7 +132,7 @@ function promiseParseDocument (source) {
xml.on('error', err => {
xml.pause()
- const message = `Error while parsing OFAC data source file (${source}): ${err.message()}`
+ const message = `Error while parsing OFAC data source file (${source}): ${err.message}`
reject(new Error(message))
})
diff --git a/lib/ofac/parsing_tests.js b/lib/ofac/parsing_tests.js
deleted file mode 100644
index e69de29b..00000000
diff --git a/package.json b/package.json
index 3bb19736..fb45262b 100644
--- a/package.json
+++ b/package.json
@@ -78,7 +78,7 @@
},
"scripts": {
"start": "node bin/lamassu-server",
- "test": "mocha $(find . -path ./node_modules -prune -o -name '*_tests.js')"
+ "test": "mocha --recursive tests"
},
"devDependencies": {
"ava": "^0.19.1",
diff --git a/tests/ofac/matching.js b/tests/ofac/matching.js
new file mode 100644
index 00000000..f73e82d4
--- /dev/null
+++ b/tests/ofac/matching.js
@@ -0,0 +1,10 @@
+const assert = require('assert')
+const parser = require('../../lib/ofac/matching')
+
+describe('OFAC', function () {
+ describe('Matching', function () {
+
+ it('should ...')
+
+ })
+})
diff --git a/tests/ofac/parsing.js b/tests/ofac/parsing.js
new file mode 100644
index 00000000..8920a246
--- /dev/null
+++ b/tests/ofac/parsing.js
@@ -0,0 +1,168 @@
+const assert = require('assert')
+const parser = require('../../lib/ofac/parsing')
+
+const fs = require('fs')
+const path = require('path')
+const util = require('util')
+const _ = require('lodash/fp')
+
+const randomTmpFileName = () => path.join('/tmp', Math.random().toString())
+
+const writeFile = util.promisify(fs.writeFile)
+
+function makeDataFiles (contents) {
+ const fileNames = _.map(randomTmpFileName, Array(contents.length))
+ const pairs = _.zip(fileNames, contents)
+ return Promise.all(_.map(_.spread(writeFile), pairs))
+ .then(() => fileNames)
+}
+
+
+const mapLines = _.flow(_.map, _.join(''))
+
+const partIds = new Map([
+ ['lastName', 1520],
+ ['firstName', 1521],
+ ['middleName', 1522],
+ ['maidenName', 1523],
+ ['patronymic', 91708],
+ ['matronymic', 91709],
+ ['nickname', 1528]
+])
+
+const getId = part => partIds.get(part.partName)
+
+const makePart = part => '' +
+ '\n\t\t\t\t\t' +
+ `\n\t\t\t\t\t\t` +
+ part.value +
+ '' +
+ '\n\t\t\t\t\t'
+
+const makeAlias = alias => '' +
+ '\n\t\t\t' +
+ `\n\t\t\t\t` +
+ _.map(makePart, alias.parts) +
+ '\n\t\t\t\t' +
+ '\n\t\t\t'
+
+const makePartGroup = part => '' +
+ '\n\t\t\t\t' +
+ '\n\t\t\t\t\t` +
+ '\n\t\t\t\t'
+
+const makePartGroups = alias => mapLines(makePartGroup, alias.parts)
+
+const makeBirthDate = birthDate => '' +
+ '\n\t\t' +
+ '\n\t\t\t' +
+ '\n\t\t\t\t' +
+ '\n\t\t\t\t\t' +
+ '\n\t\t\t\t\t\t' +
+ `\n\t\t\t\t\t\t\t${birthDate.start.year}` +
+ `\n\t\t\t\t\t\t\t${birthDate.start.month}` +
+ `\n\t\t\t\t\t\t\t${birthDate.start.day}` +
+ '\n\t\t\t\t\t\t' +
+ '\n\t\t\t\t\t\t' +
+ `\n\t\t\t\t\t\t\t${birthDate.start.year}` +
+ `\n\t\t\t\t\t\t\t${birthDate.start.month}` +
+ `\n\t\t\t\t\t\t\t${birthDate.start.day}` +
+ '\n\t\t\t\t\t\t' +
+ '\n\t\t\t\t\t' +
+ '\n\t\t\t\t\t' +
+ '\n\t\t\t\t\t\t' +
+ `\n\t\t\t\t\t\t\t${birthDate.end.year}` +
+ `\n\t\t\t\t\t\t\t${birthDate.end.month}` +
+ `\n\t\t\t\t\t\t\t${birthDate.end.day}` +
+ '\n\t\t\t\t\t\t' +
+ '\n\t\t\t\t\t\t' +
+ `\n\t\t\t\t\t\t\t${birthDate.end.year}` +
+ `\n\t\t\t\t\t\t\t${birthDate.end.month}` +
+ `\n\t\t\t\t\t\t\t${birthDate.end.day}` +
+ '\n\t\t\t\t\t\t' +
+ '\n\t\t\t\t\t' +
+ '\n\t\t\t\t' +
+ '\n\t\t\t' +
+ '\n\t\t'
+
+const makeProfile = profile => {
+ console.log(profile.birthDates)
+ return '' +
+ `\n\t` +
+ '\n\t\t' +
+ mapLines(makeAlias, profile.aliases) +
+ '\n\t\t\t' +
+ mapLines(makePartGroups, profile.aliases) +
+ '\n\t\t\t' +
+ '\n\t\t' +
+ mapLines(makeBirthDate, profile.birthDates) +
+ '\n\t'
+}
+
+const makeXml = profiles => '' +
+ '\n' +
+ mapLines(makeProfile, profiles) +
+ '\n'
+
+
+describe('OFAC', function () {
+ describe('Parsing', function () {
+
+ // To detect botched downloads
+ it('should fail on malformed XML', function () {
+ const xml = ''
+ return makeDataFiles([xml]).then(parser.parse)
+ .catch(error => {
+ assert.ok(error instanceof Error)
+ return true
+ })
+ .then(ret => {
+ assert.equal(ret, true)
+ })
+ })
+
+ it('should return the expected structs', function () {
+ const xml = makeXml([{
+ id: '1', aliases: [{
+ id: '1',
+ parts: [
+ {partName: 'firstName', value: 'John'},
+ {partName: 'lastName', value: 'Doe'}]
+ }],
+ birthDates: [{
+ start: {year: 1955, month: 10, day: 5},
+ end: {year: 1955, month: 10, day: 5}
+ }]
+ }])
+ return makeDataFiles([xml]).then(parser.parse)
+ .then(structs => {
+ const {individuals} = structs
+ assert.ok(Array.isArray(individuals))
+ assert.equal(individuals.length, 1)
+
+ const {individualsMap} = structs
+ assert.ok(individualsMap instanceof Map)
+ assert.equal(individualsMap.size, 1)
+
+ const {aliasToIndividual} = structs
+ assert.ok(aliasToIndividual instanceof Map)
+ assert.equal(aliasToIndividual.size, 1)
+
+ const {phoneticMap} = structs
+ assert.ok(phoneticMap instanceof Map)
+ assert.equal(phoneticMap.size, 3)
+
+ const {wordList} = structs
+ assert.ok(Array.isArray(wordList))
+ assert.equal(wordList.length, 2)
+ })
+ })
+
+ it('should be able to parse multiple sources')
+
+ it('should remove duplicates from multiple sources')
+
+ })
+})