I have a Lambda function triggered by S3 upload. I want to read the contents of the uploaded file by using a readStreamer.
const shouldUseXray = false
export async function handleSplitSpreadsheet (event, context) {
context.callbackWaitsForEmptyEventLoop = false
if (shouldUseXray) {
const { captureAsyncFunc } = require('../../../lib/xray_promise')
await captureAsyncFunc('handleSplitSpreadsheet', () => {
log('event', { event })
processLargeSpreadsheet(event, context)
})
} else {
processLargeSpreadsheet(event, context)
}
}
export async function processLargeSpreadsheet (event, context) {
await Promise.all(event.Records.map(async (s3Record) => {
const bucketName = s3Record.s3.bucket.name
const s3ObjectMeta = s3Record.s3.object
const s3ObjectKey = s3ObjectMeta.key
const s3ObjectSize = s3ObjectMeta.size
try {
const s3ObjectStream = s3.getObject({
Bucket: bucketName,
Key: s3ObjectKey,
}).createReadStream()
s3ObjectStream.setEncoding('utf8')
s3ObjectStream.on('open', function (data) {
log('Open:', data)
})
s3ObjectStream.on('error', function (data) {
log('Error: ', data)
})
s3ObjectStream.on('data', function (data) {
log('Data: ', data)
})
s3ObjectStream.on('end', function (data) {
log('End: ', data)
})
} catch (err) {
log('Error: ', err)
}
}))
}
If I invoke this function locally using sls invoke --path event.json, being event.json the exact event that s3 produces, the file is printed on console.
If I upload a file to S3, nothing is printed. Even if I wrap it all in a try/catch no error is catched.
serverless.yml allows getObject for s3.
iamRoleStatements:
- Effect: "Allow"
Action:
- "s3:GetObject"
- "s3:PutObject"
- "s3:DeleteObject"
Resource: "arn:aws:s3:::my-s3-bucketr-foo"