1

I have a lambda function that takes a file as input and uploads that file to AWS s3 but for some reason, it does not work when deployed. Images when uploaded with the deployed lambda function do not load properly and it says that Picture is not viewable, the same for any documents or files; when opening them it says that it's corrupted and when uploading anything above 1 Mb it shows ether internal server error or request is too big.

This is the Node.js script:

import { S3Client } from "@aws-sdk/client-s3";
import { Upload } from "@aws-sdk/lib-storage";
import * as crypto from "crypto";
import { middyfy } from "@libs/lambda";
import { _200, _400, _500 } from "@libs/Response";
import * as dotenv from "dotenv";
import { Handler } from "aws-lambda";
const multipart = require("aws-lambda-multipart-parser");

dotenv.config();

const region = process.env.AWS_REGION || "it is hidden not empty ";
// Initialize AWS S3 client using environment variables for credentials
const s3Client = new S3Client({
  region: region,
});

// Function to generate a random filename
const generateRandomFileName = () => {
  return crypto.randomBytes(8).toString("hex");
};

// Function to upload file data to S3 and return the URL
const uploadFileAndGetUrl = async (fileContent, contentType) => {
  const randomFileName = generateRandomFileName();
  const Key = `uploads/${randomFileName}`;

  try {
    const upload = new Upload({
      client: s3Client,
      params: {
        Bucket: process.env.AWSBucket,
        Key,
        Body: fileContent,
        ContentType: contentType,
        // ACL: "public-read",
      },
    });

    const result = await upload.done();
    const fileUrl = `https://${process.env.AWSBucket}.s3.${region}.amazonaws.com/${result.Key}`;
    return fileUrl;
  } catch (error) {
    console.error("Error uploading file to S3:", error);
    throw new Error("File upload failed");
  }
};

// interface CustomRequestContext {
//   body: {
//     'Content-Type': string;
//   };
// }

export const uploadFile: Handler = async (event) => {
  try {
    const result = multipart.parse(event, true);

    // console.log(event);

    if (!result.file.contentType || !result.file.content) {
      return _400("Invalid request format.");
    }
    // console.log(body);

    // console.log(JSON.parse(body));
    const contentType = result.file.contentType;
    const fileContent = result.file.content;

    const fileUrl = await uploadFileAndGetUrl(fileContent, contentType);

    return _200({ url: fileUrl });
  } catch (error) {
    console.error("Error handling file upload:", error);
    return _500({ message: "Internal server error.", ...error });
  }
};

export const main = middyfy(uploadFile);
import { handlerPath } from "@libs/handler-resolver";

export default {
  handler: `${handlerPath(__dirname)}/handler.main`,
  timeout: 30,
  memorySize: 512,
  events: [
    {
      http: {
        method: "post",
        path: "/file/upload",
        cors: true,
      },
    },
  ],
};
servereless.ts 


import type { AWS } from "@serverless/typescript";
import * as functions from "@functions/index";
import DynamoDBResources from "./serverless/DynamodbResources";

const DynamoTableNames = () => {
  const tableNames: { [key: string]: { Ref: string } } = {};
  Object.keys(DynamoDBResources).map((tableName) => {
    tableNames[tableName] = { Ref: tableName };
  });
  return tableNames;
};

const serverlessConfiguration: AWS = {
  service: "procurpal",
  useDotenv: true,
  frameworkVersion: "3",
  plugins: ["serverless-esbuild", "serverless-webpack", "serverless-offline"],
  provider: {
    name: "aws",
    runtime: "nodejs20.x",
    profile: "it is hidden not empty ",
    
    region: "ap-south-1",
    iamManagedPolicies: [
      "arn:aws:iam::aws:policy/AmazonDynamoDBFullAccess",
      "arn:aws:iam::aws:policy/AmazonS3FullAccess",
    ],
    apiGateway: {
      minimumCompressionSize: 1024,
      shouldStartNameWithService: true,
    },
    environment: {
      AWS_NODEJS_CONNECTION_REUSE_ENABLED: "1",
      NODE_OPTIONS: "--enable-source-maps --stack-trace-limit=1000",
      region: "${self:provider.region}",
      ...DynamoTableNames(),
      AWSBucket: "${self:custom.bucketName}",
      BaseURL: "${self:custom.BaseURL}",
    },
  },
  // import the function via paths
  functions: { ...functions },
  package: { individually: true },
  custom: {
    esbuild: {
      bundle: true,
      minify: false,
      sourcemap: true,
      exclude: ["aws-sdk"],
      target: "node20",
      define: { "require.resolve": undefined },
      platform: "node",
      concurrency: 10,
    },
    bucketName: "it is hidden not empty ",
    webpack: {
      webpackConfig: "./webpack.config.js",
      includeModules: true,
    },
    BaseURL: "it is hidden not empty ",
    WSSBaseURL: "it is hidden not empty ",
  },
  resources: {
    Resources: {
      ...DynamoDBResources,
    },
  },
};

module.exports = serverlessConfiguration;

Is there any way to solve this issue? I use a serverless framework.

2
  • translteNice question. #1 Create a reproducible sample without dynamo and keep it simple #2 Just to try, upload the same file but using a simple approach like gist.github.com/jrichardsz/… #3 Where does the file come from? Postman? another application? Commented May 30, 2024 at 12:44
  • Also check this stackoverflow.com/a/62819124/3957754 Commented May 30, 2024 at 12:47

1 Answer 1

0

This will work for me:

provider:

  apiGateway:
    binaryMediaTypes:
      - '*/*'

use this library to parse multi-part event: lambda-multipart-parser

import parser from 'lambda-multipart-parser'

export async function handleUpload(event) {
  const files = await parser.parse(event)

  const result = await uploadS3(userId, files.files[0])
}

export async function uploadS3(userId, file) {
  const { filename, encoding, contentType, content } = file
  const bucketName = process.env.STORAGE_NAME
  const imageId = v4() // uuid
  // Extract file extension from the fileName
  const fileExtension = filename.split('.').pop()
  const filePath = `public/${userId}/${imageId}.${fileExtension}`
  const url = `https://${bucketName}.s3.amazonaws.com/${filePath}`

  let params = {
    Bucket: bucketName,
    Key: filePath,
    Body: content,
    ContentType: contentType,
    ContentDisposition: `inline; filename="${imageId}.${fileExtension}"`,
    ContentEncoding: encoding,
    ACL: ObjectCannedACL.public_read, // 'public-read'
  }

  console.dir({ upload_s3: params })

  const s3Client = new S3Client()
  const command = new PutObjectCommand(params)

  return await await s3Client
    .send(command)
    .then(res => ({ imageId, filePath, url }))
    .catch(error => {
      console.error({ error_upload_file: error.message })
      throw error
    })
}
Sign up to request clarification or add additional context in comments.

Comments

Your Answer

By clicking “Post Your Answer”, you agree to our terms of service and acknowledge you have read our privacy policy.

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.