Joel's Thoughts

Bulk User Registration From CSV - MEAN.JS

December 16, 2016

I have this requirement on our single page application where an admin can bulk import users from a CSV file. The app was built on the top of MEAN.js framework.

First step is to install the csv-parser module.

npm i csv-parse --save

Next step is to create the bare functions needed. On my admin controller (admin.server.controller.js), I created the parseCsv function which is going to be exposed on the Express route so an admin user can upload the csv file.

var path = require('path'),
    fs = require('fs'),
    Parse = require('csv-parse'),
    shortid = require('shortid'),
    mongoose = require('mongoose'),   
    multer = require('multer'),
    config = require(path.resolve('./config/config')),
    User = mongoose.model('User'),
    errorHandler = require(path.resolve('./modules/core/server/controllers/errors.server.controller'));


exports.parseCsv = function(req, res, next) {
    var upload = multer(config.uploads.bulkUsersCsvUpload).single('csvData');
    upload(req, res, function(uploadError) {
        if (uploadError) {
            return res.status(400).send({
                message: 'Error occurred while uploading csv.'
            });
        } else {
            parseFile(req, res, next);
        }
    });
};


..... more code

On the Express route, the exposed parseCsv function from the admin controller is used something like this,

 app.route('/api/users/csv').post(adminPolicy.isAllowed, admin.parseCsv);

If you notice inside parseCsv, there’s a function named parseFile . This is the function responsible for parsing the users’ data from csv and save into MongoDB database.

Next is to write the parseFile function. It looks like this:


...more code before

var parseFile = function(req, res, next) {
    var filePath = req.file.path,
        users = [],
        isValid = false,
        columns = true;

    function onNewRecord(record) {
        isValid = (record.hasOwnProperty('email') &&
            record.hasOwnProperty('firstName') && record.hasOwnProperty('lastName') &&
            record.hasOwnProperty('roles'));
        if (isValid) {
            users.push(record);
        }
    }


    function onParseError(error) {
        return res.status(400).send({
            message: 'Error occurred while parsing csv.'
        });
    }

    function done(linesRead) {
        var Promise = require('bluebird'),
            i, len = users.length, result,
            promises = [],
            user;

        for (i = 0; i < len; i++) {
            user = users[i];                       
            result = createUser(user);
            promises.push(result);
        }

        return Promise.all(promises).then(function(data) {
            return res.status(200).send({
                message: 'Successfully created ' + data.length + ' users from CSV.'
            });
        }).catch(function(err) {
            return res.status(400).send({
                message: 'The user creation is incomplete.'
            });
        });
    }

    return parseCSVFile(filePath, columns, onNewRecord, onParseError, done);
};


...more code


As you see, parseFile is composed of 3 main functions namely:

  1. onNewRecord - called everytime the parser finished reading a single row in the csv file. It’s main job is to store the recorded data into the users array to be retrieved later.

  2. onParseError - responsible for informing the user incase a csv parsing error occurs.

  3. done - called when all the content of the csv files are read and recorded. It’s job is to retrieve each record/data stored inside users array and save them individually into MongoDB using createUser function. This will return a promise object that will be stored into promises array.

The collected promises are then provisioned to Bluebird‘s Promise.all function which determines the point in time when all promises are completed.

Finally, parseFile invokes the function parseCSVFile.


var parseCSVFile = function(sourceFilePath, columns, onNewRecord, handleError, done) {
    var source = fs.createReadStream(sourceFilePath),
        linesRead = 0,
        parser = Parse({
            delimiter: ',',
            columns: columns
        });

    parser.on('readable', function() {
        var record;
        while (record = parser.read()) {
            linesRead++;
            onNewRecord(record);
        }
    });

    parser.on('error', function(error) {
        handleError(error);
    });

    parser.on('end', function() {
        done(linesRead);
    });

    source.pipe(parser);
};


If you notice, parseCSVFile is actually doing the task of csv parsing using the csv-parser module.









  • About
  • Search
  • Resume
  • Powered by Jekyll using the Trio theme