I have the working code that reads CSV file from S3, groups every 25 rows in a BatchWriteItem DynamoDB request, and sends it. The BatchWrite would often return success with UnprocessedItems which contains partial items (not all 25). Subsequent resubmit may also fail (partial or complete). I wanted to implement exponential backoff when sending subsequent requests, but all the library I have found assuming the tasks are the same. In my case, the items may or may not be the same as the ones in the previous requests.
I am not very familiar with Node.js. Is there any library/way to implement a re-tried tasks with (different) context?
I am using AWS Lambda, so cannot use global variables.
Helper function writing to DDB with 1 retry:
// batchwrite to DDB
function batchWriteDDB(params) {
dynamodb.batchWriteItem(params, function(err, data) {
if (err) {
console.error("Batchwrite failed: " + err, err.stack);
} else {
var unprocessed = data.UnprocessedItems;
if (Object.keys(unprocessed).length === 0) {
console.log("Processed all items.");
} else {
// some unprocessed items, do it again
console.warn("Batchwrite did not to complete: " + util.inspect(unprocessed, { showHidden: false, depth: null }));
console.log("Retry btachwriting...");
var params2 = {};
params2["RequestItems"] = data.UnprocessedItems;
dynamodb.batchWriteItem(params2, function(error, data2) {
if (err) {
console.error("Retry failed: " + err, err.stack);
} else {
var unprocessed2 = data2.UnprocessedItems;
if (Object.keys(unprocessed2).length === 0) {
console.log("Retry processed all items.");
} else {
console.error("Failed AGAIN to complete: " + util.inspect(unprocessed2, { showHidden: false, depth: null }));
}
}
});
}
}
});
}