Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
c3388ce
Add action to retrieve region code by name
solocommand Nov 1, 2022
59736e9
Create SMG import service
solocommand Nov 1, 2022
1cb9f75
Support upserting records
solocommand Nov 1, 2022
e8e84a7
Add additional profession mappings
solocommand Nov 2, 2022
fb927ec
Remove additional terms
solocommand Nov 2, 2022
ed9fffb
Add mappings for renamed/altered terms
solocommand Nov 2, 2022
1410dc6
Set dates
solocommand Nov 2, 2022
700f29a
Ensure IdentityX questions & answers are ObjectIds
solocommand Nov 17, 2022
a71bd04
Add new constructor to ObjectId classes
solocommand Nov 17, 2022
cdc8c60
Add segment to fix previously written oids
solocommand Nov 17, 2022
38bb710
Update validation for Dr. Bicuspid
Shinsina Jan 19, 2023
a085660
Trim region and country names prior to corrections
Shinsina Jan 20, 2023
b057410
Merge pull request #1 from Shinsina/dr-bicuspid
solocommand Jan 30, 2023
babfdd7
Additional validation for custom fields
Shinsina Jan 31, 2023
b8e7c5e
Account for empty old answer values
Shinsina Jan 31, 2023
009159a
Merge pull request #2 from Shinsina/additional-adjustments
solocommand Jan 31, 2023
46eb2c0
Allow Educator and Student Profession
Shinsina Jan 31, 2023
4773c34
Point Oral to Oral & Maxillofacial Surgeon
Shinsina Jan 31, 2023
27c316a
Merge pull request #3 from Shinsina/school-professions
solocommand Jan 31, 2023
e9d10e0
add email to log
solocommand Mar 10, 2023
fbd041d
Update validation for Aunt Minnie
Shinsina Apr 6, 2023
c0b82a6
Validate on '1' vs 'TRUE'
Shinsina Apr 6, 2023
15f034f
Validation updates
solocommand Apr 6, 2023
2c5e2ff
Merge branch 'smg-import' into aunt-minnie
Shinsina Apr 6, 2023
f95b45c
Trim the regionName prior to filtering
Shinsina Apr 7, 2023
a0ce942
Strip trailing periods off normalized email
Shinsina Apr 7, 2023
c4718cd
Merge pull request #4 from Shinsina/aunt-minnie
solocommand Apr 7, 2023
d58f1c8
Add additional region mappings
solocommand Sep 12, 2023
543ee0a
Validation exceptions for Aunt Minnie Europe
Shinsina Oct 11, 2023
1bfb128
Merge pull request #5 from Shinsina/ame-imports
solocommand Oct 16, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -87,3 +87,7 @@ typings/
# DynamoDB Local files
.dynamodb/
.DS_Store

services/import/data/**/*.csv
dump
scripts/download.sh
14 changes: 14 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,20 @@ services:
ports:
- "12011:80"

import:
<<: *node
working_dir: /identity-x/services/import
command: ["node", "."]
environment:
<<: *env
INTERNAL_PORT: 80
EXTERNAL_PORT: 12012
depends_on:
- application
- locale
ports:
- "12012:80"

volumes:
mongodb: {}
yarn-cache: {}
9 changes: 9 additions & 0 deletions services/import/.eslintrc.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
module.exports = {
extends: 'airbnb-base',
plugins: [
'import'
],
rules: {
'no-underscore-dangle': [ 'error', { allow: ['_id'] } ],
},
};
Empty file added services/import/data/.gitignore
Empty file.
9 changes: 9 additions & 0 deletions services/import/gulpfile.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
const gulpfile = require('../../gulpfile');

gulpfile({
entry: 'src/index.js',
lintPaths: ['src/**/*.js'],
watchPaths: [
'src/**/*.js',
],
});
34 changes: 34 additions & 0 deletions services/import/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
{
"name": "@identity-x/import-service",
"version": "1.11.2",
"description": "The IdentityX import service.",
"main": "src/index.js",
"author": "Josh Worden <[email protected]>",
"repository": "https://github.com/base-cms/id-me/tree/master/services/import",
"license": "MIT",
"private": true,
"scripts": {
"dev": "gulp",
"lint": "eslint --ext .js --max-warnings 5 ./",
"test": "yarn lint"
},
"dependencies": {
"@base-cms/env": "^1.0.0",
"@base-cms/object-path": "^1.9.0",
"@identity-x/service-clients": "^1.2.0",
"@identity-x/utils": "^1.2.0",
"@parameter1/mongodb": "^1.6.0",
"async": "^3.2.4",
"csv2json": "^2.0.2",
"inquirer": "^7.3.3",
"validator": "^11.0.0"
},
"devDependencies": {
"eslint": "^5.16.0",
"eslint-config-airbnb-base": "^13.1.0",
"eslint-plugin-import": "^2.17.2",
"gulp": "^4.0.2",
"gulp-cached": "^1.1.1",
"gulp-eslint": "^5.0.0"
}
}
37 changes: 37 additions & 0 deletions services/import/src/batch.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
const { log } = console;

const batch = async ({
name,
totalCount,
limit,
page = 1,
handler = () => {},
retriever = () => {},
} = {}) => {
if (!totalCount) return;
const pages = Math.ceil(totalCount / limit);
const skip = (page - 1) * limit;
log(`Handling batch ${page} of ${pages} (L/S ${limit}/${skip}) for '${name}'`);

const results = await retriever({
name,
pages,
page,
limit,
skip,
});

await handler({ results, name });
if (page < pages) {
await batch({
name,
totalCount,
limit,
page: page + 1,
handler,
retriever,
});
}
};

module.exports = batch;
10 changes: 10 additions & 0 deletions services/import/src/env.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
const {
cleanEnv,
validators,
} = require('@base-cms/env');

const { nonemptystr } = validators;

module.exports = cleanEnv(process.env, {
MONGO_DSN: nonemptystr({ desc: 'The MongoDB DSN to connect to.' }),
});
51 changes: 51 additions & 0 deletions services/import/src/fix-object-ids.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
const { ObjectId } = require('@parameter1/mongodb');
const { iterateCursor } = require('@parameter1/mongodb/utils');

const batch = require('./batch');
const client = require('./mongodb');

module.exports = async (appId, limit = 10) => {
const applicationId = new ObjectId(appId);
const collection = await client.collection({ dbName: 'identity-x', name: 'app-users' });
const projection = { customBooleanFieldAnswers: 1, customSelectFieldAnswers: 1 };
const query = {
applicationId,
$or: [
{ 'customBooleanFieldAnswers.0._id': { $type: 'string' } },
{ 'customSelectFieldAnswers.0._id': { $type: 'string' } },
],
};

await batch({
name: 'fix-ids',
totalCount: await collection.countDocuments(query),
limit,
// Explicitly skipping `sort` since we're modifying what we're querying against!
retriever: () => collection.find(query, { limit, projection }),
handler: async ({ results }) => {
const ops = [];
await iterateCursor(results, async (user) => {
ops.push({
updateOne: {
filter: { _id: user._id },
update: {
$set: {
customBooleanFieldAnswers: user.customBooleanFieldAnswers.map(({ _id, value }) => ({
_id: new ObjectId(_id),
value,
})),
customSelectFieldAnswers: user.customSelectFieldAnswers.map(({ _id, values }) => ({
_id: new ObjectId(_id),
values: (values || []).map(id => new ObjectId(id)),
})),
},
},
},
});
});
return collection.bulkWrite(ops);
},
});

return client.close();
};
101 changes: 101 additions & 0 deletions services/import/src/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
const fs = require('fs');
const { join } = require('path');
const inquirer = require('inquirer');
const { applicationService } = require('@identity-x/service-clients');
const parseCSV = require('./parse-csv');
const validate = require('./validate');
const upsert = require('./upsert');
const fixObjectIds = require('./fix-object-ids');

const { log } = console;

process.on('unhandledRejection', (e) => {
log(e);
throw e;
});

const findFilesIn = (path, ext = 'csv', arr = []) => {
const pattern = new RegExp(`.${ext}$`, 'i');
let found = arr || [];
fs.readdirSync(path).forEach((file) => {
const filePath = `${path}/${file}`;
if (fs.statSync(filePath).isDirectory()) {
found = findFilesIn(filePath, ext, found);
} else if (pattern.test(file)) {
found.push(join(filePath));
}
});
return found;
};

(async () => {
const {
appId,
file,
limit,
errorOnBadAnswer,
fixObjectIdValues,
} = await inquirer.prompt([
{
type: 'input',
name: 'orgId',
message: 'What organization should be imported into?',
default: '627aa459dfa0e102fdc93122', // SMG
},
{
type: 'list',
name: 'appId',
message: 'What application should be imported into?',
choices: async (ans) => {
const apps = await applicationService.request('listForOrg', { id: ans.orgId, fields: { name: 1 } });
return apps.map(app => ({ name: app.name, value: app._id }));
},
default: '629bac8439347cfce3861789', // Lab Pulse
},
{
type: 'confirm',
name: 'fixObjectIdValues',
message: 'Should existing question/answer values be converted to ObjectIds?',
default: false,
},
{
type: 'list',
name: 'file',
message: 'Which file should be imported?',
choices: () => {
const path = join(__dirname, '../data');
return findFilesIn(path);
},
},
{
type: 'number',
name: 'limit',
message: 'How many users should be created/validated at once?',
default: 100,
},
{
type: 'confirm',
name: 'errorOnBadAnswer',
message: 'Should record be skipped if a bad answer value is found?',
default: true,
},
]);

if (fixObjectIdValues) {
log(`Fixing existing ObjectId values for ${appId}...`);
await fixObjectIds(appId, limit);
log('Done!');
process.exit(0);
}

try {
log(`Importing records from ${file} to ${appId}!`);
const records = await parseCSV(file);
const validated = await validate(records, appId, limit, errorOnBadAnswer);
await upsert(validated, appId, limit);
log('Import complete!');
} catch (e) {
log('Encountered error!', e);
process.exit(1);
}
})().catch((e) => { throw e; });
4 changes: 4 additions & 0 deletions services/import/src/mongodb.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
const Client = require('@parameter1/mongodb/client');
const { MONGO_DSN } = require('./env');

module.exports = new Client({ url: MONGO_DSN });
11 changes: 11 additions & 0 deletions services/import/src/parse-csv.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
const csv2json = require('csv2json');
const fs = require('fs');

module.exports = filename => new Promise(async (resolve, reject) => {
const strings = [];
fs.createReadStream(filename)
.pipe(csv2json({}))
.on('data', e => strings.push(e.toString()))
.on('error', reject)
.on('end', () => resolve(JSON.parse(strings.join(''))));
});
71 changes: 71 additions & 0 deletions services/import/src/upsert.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
const { ObjectId } = require('@parameter1/mongodb');
const batch = require('./batch');
const client = require('./mongodb');

const { log } = console;
const now = new Date('2022-01-01');

module.exports = async (records = [], appId, limit = 10) => {
log('Upserting ', records.length, appId, limit);

const applicationId = new ObjectId(appId);
const collection = await client.collection({ dbName: 'identity-x', name: 'app-users' });

await batch({
name: 'upsert',
totalCount: records.length,
limit,
retriever: ({ skip }) => records.slice(skip, skip + limit),
handler: async ({ results }) => {
const ops = results.reduce((arr, user) => {
const {
_id,
email,
verified,
externalId,
customBooleanFieldAnswers,
...payload
} = user;
const insertDefaults = {
verified,
customBooleanFieldAnswers,
createdAt: now,
updatedAt: now,
};
const filter = { applicationId, email, ...(_id && { _id }) };
const $addToSet = {
...(externalId && { externalIds: externalId }),
};
return [
...arr,
{
// Upsert the user
updateOne: {
filter,
update: {
...(Object.keys($addToSet).length && { $addToSet }),
$setOnInsert: { ...insertDefaults, ...filter },
$set: { ...payload, _importedAt: now },
},
upsert: !_id,
},
},
{
// Set the boolean answers if they haven't already been set in IdentityX
updateOne: {
filter: {
applicationId,
email,
'customBooleanFieldAnswers.0': { $exists: false },
},
update: { $set: { customBooleanFieldAnswers } },
},
},
];
}, []);
return collection.bulkWrite(ops);
},
});

return client.close();
};
Loading