Scenario: Standard CSV import didn’t support the import of ItemReceipt to NetSuite. The client requested an alternative way to import the Item Receipt Into NetSuite from a CSV file.
Solution: We will create a custom record in NetSuite. There will be a custom field to upload the CSV file, there will be another custom field that shows the execution status of the file.
Using a scheduled script we will create IR for the transaction in the CSV file. When the client saves the entry in the custom record a user event script will trigger a map/reduce script.
Custom Record:

User Event Script
/**
* @NApiVersion 2.1
* @NScriptType UserEventScript
*/
define(['N/record', 'N/task'],
/**
* @param{record} record
* @param{task} task
*/
(record, task) => {
const afterSubmit = (scriptContext) => {
try {
let currentRec = scriptContext.newRecord;
log.debug("currentRec", currentRec);
let id = currentRec.id;
log.debug("Record id", id)
let file_id=currentRec.getValue({
fieldId: 'custrecord_jj_file_upload',
});
log.debug("file_id",file_id);
let mrTask = task.create({
taskType: task.TaskType.MAP_REDUCE,
scriptId: 'customscript_jj_mr_ir',
deploymentId: 'customdeploy_jj_mr_ir',
params: { 'custscript_jj_file_id':file_id, 'custscript_jj_rec_id': id }
});
let scriptTaskId = mrTask.submit();
record.submitFields({
type: 'customrecord_jj_ir_cust_import',
id: id,
values: {
'custrecord_jj_import_status': 'IN PROGRESS'
}
});
}catch (e) {
log.debug("error",e);
}
}
return { afterSubmit}
});
Map/Reduce Script:
/**
* @NApiVersion 2.1
* @NScriptType MapReduceScript
*/
define(['N/error', 'N/file', 'N/record', 'N/search','N/runtime','/SuiteScripts/papaparse.min.js'],
/**
* @param{error} error
* @param{file} file
* @param{record} record
* @param{search} search
*/
(error, file, record, search,runtime,papa) => {
/**
* Defines the function that is executed at the beginning of the map/reduce process and generates the input data.
* @param {Object} inputContext
* @param {boolean} inputContext.isRestarted - Indicates whether the current invocation of this function is the first
* invocation (if true, the current invocation is not the first invocation and this function has been restarted)
* @param {Object} inputContext.ObjectRef - Object that references the input data
* @typedef {Object} ObjectRef
* @property {string|number} ObjectRef.id - Internal ID of the record instance that contains the input data
* @property {string} ObjectRef.type - Type of the record instance that contains the input data
* @returns {Array|Object|Search|ObjectRef|File|Query} The input data to use in the map/reduce process
* @since 2015.2
*/
const getInputData = (inputContext) => {
try {
var file_id = runtime.getCurrentScript().getParameter('custscript_jj_file_id');
var recId=runtime.getCurrentScript().getParameter('custscript_jj_rec_id');
log.debug("file_id",file_id)
log.debug("Record Id",recId);
var uploadFile = file.load({id: file_id});
var file_contents=uploadFile.getContents();
log.debug("file contents",file_contents);
var newValue = papa.parse(file_contents, {
header: true,
skipEmptyLines: true
});
log.debug("newValue", newValue)
log.debug("working");
// newValue.data.map(el => {
// el.customRecInternalId = recId;
// return el
// });
return newValue.data.map(el => {
el.customRecInternalId = recId;
return el
});
}catch (e) {
log.debug("error",e)
}
}
/**
* Defines the function that is executed when the map entry point is triggered. This entry point is triggered automatically
* when the associated getInputData stage is complete. This function is applied to each key-value pair in the provided
* context.
* @param {Object} mapContext - Data collection containing the key-value pairs to process in the map stage. This parameter
* is provided automatically based on the results of the getInputData stage.
* @param {Iterator} mapContext.errors - Serialized errors that were thrown during previous attempts to execute the map
* function on the current key-value pair
* @param {number} mapContext.executionNo - Number of times the map function has been executed on the current key-value
* pair
* @param {boolean} mapContext.isRestarted - Indicates whether the current invocation of this function is the first
* invocation (if true, the current invocation is not the first invocation and this function has been restarted)
* @param {string} mapContext.key - Key to be processed during the map stage
* @param {string} mapContext.value - Value to be processed during the map stage
* @since 2015.2
*/
const map = (mapContext) => {
try{
log.debug("***In Map ***");
let mapResult=JSON.parse(mapContext.value);
log.debug("mapResult",mapResult)
let createdFrom=mapResult["Document Number"];
log.debug("Created rom Internal Id",createdFrom);
mapContext.write({
key: createdFrom,
value: mapResult
});
}catch (e) {
console.log("error@Map",e)
}
}
/**
* Defines the function that is executed when the reduce entry point is triggered. This entry point is triggered
* automatically when the associated map stage is complete. This function is applied to each group in the provided context.
* @param {Object} reduceContext - Data collection containing the groups to process in the reduce stage. This parameter is
* provided automatically based on the results of the map stage.
* @param {Iterator} reduceContext.errors - Serialized errors that were thrown during previous attempts to execute the
* reduce function on the current group
* @param {number} reduceContext.executionNo - Number of times the reduce function has been executed on the current group
* @param {boolean} reduceContext.isRestarted - Indicates whether the current invocation of this function is the first
* invocation (if true, the current invocation is not the first invocation and this function has been restarted)
* @param {string} reduceContext.key - Key to be processed during the reduce stage
* @param {List<String>} reduceContext.values - All values associated with a unique key that was passed to the reduce stage
* for processing
* @since 2015.2
*/
const reduce = (reduceContext) => {
try {
log.debug("***In Reduce***");
log.debug("Key",reduceContext.key)
//log.debug("Value",reduceContext.values);
//JSON.parse(reduceContext.values)
let result=reduceContext.values.map(JSON.parse);
log.debug("Value",result)
let createdType=result[0]["Type"];
if(createdType.toLowerCase()==='purchase order'){
createdType='purchaseorder'
}
else if(createdType.toLowerCase()==='return authorization'){
createdType='returnauthorization'
}
else if(createdType.toLowerCase()==='transfer order'){
createdType='transferorder'
}
//log.debug("length",result.length)
// for(var i=0; i<result.length;i++){
// log.debug("ooro ",result[i]);
// }
// var re
var IRrecord=record.transform({
fromType:createdType,
fromId: result[0]["Created From Internal ID"],
toType: record.Type.ITEM_RECEIPT,isDynamic: true
});
let line_count=IRrecord.getLineCount({
sublistId: 'item'
});
log.debug("Line Count",line_count)
for(var i=0;i<line_count;i++){
var flag=0;
IRrecord.selectLine({
sublistId: 'item',
line:i
});
var item = IRrecord.getCurrentSublistValue({
sublistId: 'item',
fieldId: 'item'
});
// log.debug("item",item)
// var quantity = IRrecord.getCurrentSublistValue({
// sublistId: 'item',
// fieldId: 'quantity'
// });
// log.debug("quantity",quantity)
// && quantity===result[j]["Quantity"]
for(var j=0; j<result.length;j++){
if(item===result[j]["Item Internal ID"]){
//log.debug("Inside if loop")
flag=flag+1;
}
}
//let k=0;
if(flag <= 0){
//k=k+1;
//log.debug("Inside Flag Loop",k)
IRrecord.setCurrentSublistValue({
sublistId: 'item',
fieldId: 'itemreceive',
value: false,
});
IRrecord.commitLine({
sublistId: 'item'
});
}
}
var IRNum = IRrecord.save()
log.debug('IFNum', IRNum);
reduceContext.write({
key: 'success',
value: result
})
}catch (e) {
log.debug("error@reduce",e)
let result=reduceContext.values.map(JSON.parse);
reduceContext.write({
key: 'error',
value: {...result, 'error': e}
});
}
}
/**
* Defines the function that is executed when the summarize entry point is triggered. This entry point is triggered
* automatically when the associated reduce stage is complete. This function is applied to the entire result set.
* @param {Object} summaryContext - Statistics about the execution of a map/reduce script
* @param {number} summaryContext.concurrency - Maximum concurrency number when executing parallel tasks for the map/reduce
* script
* @param {Date} summaryContext.dateCreated - The date and time when the map/reduce script began running
* @param {boolean} summaryContext.isRestarted - Indicates whether the current invocation of this function is the first
* invocation (if true, the current invocation is not the first invocation and this function has been restarted)
* @param {Iterator} summaryContext.output - Serialized keys and values that were saved as output during the reduce stage
* @param {number} summaryContext.seconds - Total seconds elapsed when running the map/reduce script
* @param {number} summaryContext.usage - Total number of governance usage units consumed when running the map/reduce
* script
* @param {number} summaryContext.yields - Total number of yields when running the map/reduce script
* @param {Object} summaryContext.inputSummary - Statistics about the input stage
* @param {Object} summaryContext.mapSummary - Statistics about the map stage
* @param {Object} summaryContext.reduceSummary - Statistics about the reduce stage
* @since 2015.2
*/
const summarize = (summaryContext) => {
try{
var titleArray = ["Created From Type","Created From ID", "Error Description"];
var csvFileData = titleArray.toString() + '\r\n';
let recID = '', fileID = '',flag=0;
summaryContext.output.iterator().each(function (key,value){
let parseSummary = JSON.parse(value);
log.debug("Summary parse", parseSummary)
recID=parseSummary[0]["customRecInternalId"];
//log.debug("Record ID in Summary",recID);
if (key === 'error') {
flag=flag+1
//log.debug("error parse", parseSummary)
let recType=parseSummary[0]["Type"];
let recInternal=parseSummary[0]["Created From Internal ID"]
csvFileData +=recType + ',' + recInternal + ',' + parseSummary.error.message.replace(',', " ") + '\r\n';
}
return true
})
if(flag>0) {
let fileObj = file.create({
name: 'ERROR-' + recID + '-' + Math.floor(Date.now() / 1000) + '.csv',
fileType: file.Type.CSV,
folder: 5850,
contents: csvFileData
});
fileID=fileObj.save()
}
if (recID && fileID!=='') {
record.submitFields({
type: 'customrecord_jj_ir_cust_import',
id: recID,
values: {
'custrecord_jj_import_status': 'COMPLETE',
'custrecord_jj_error_file': fileID
}
});
}else {
record.submitFields({
type: 'customrecord_jj_ir_cust_import',
id: recID,
values: {
'custrecord_jj_import_status': 'COMPLETE'
}
});
}
}catch (e) {
log.debug("error@summarize",e)
}
}
return {getInputData,map, reduce, summarize}
});