简体   繁体   中英

AWS Lambda reach memory limit

i use this Lambda function to generate thumbnails on the fly. But i get the following error:

REPORT RequestId: 9369f148-2a85-11e7-a571-5f1e1818669e Duration: 188.18 ms Billed Duration: 200 ms Memory Size: 1536 MB Max Memory Used: 1536 MB 

AND...

RequestId: 9369f148-2a85-11e7-a571-5f1e1818669e Process exited before completing request

So i think i reach the max Memory Limit. Without the function "uploadRecentImage()" it works. But if i add a new size to imgVariants[] i will also hit the Memory Limit. I think the way the function handle the imgVariants (each loop) will cause this but i don't know to make it better. I will be grateful for any help.

Here is my function:

// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm').subClass({
  imageMagick: true
}); // use ImageMagick
var util = require('util');

// configuration as code - add, modify, remove array elements as desired
var imgVariants = [
  {
    "SIZE": "Large1",
    "POSTFIX": "-l",
    "MAX_WIDTH": 6000,
    "MAX_HEIGHT": 6000,
    "SIZING_QUALITY": 75,
    "INTERLACE": "Line"
  },
    {
    "SIZE": "Large1",
    "POSTFIX": "-l",
    "MAX_WIDTH": 1280,
    "MAX_HEIGHT": 1280,
    "SIZING_QUALITY": 75,
    "INTERLACE": "Line"
  },
  {
    "SIZE": "Large1",
    "POSTFIX": "-l",
    "MAX_WIDTH": 500,
    "MAX_HEIGHT": 500,
    "SIZING_QUALITY": 75,
    "INTERLACE": "Line"
  },
    {
    "SIZE": "Large1",
    "POSTFIX": "-l",
    "MAX_WIDTH": 100,
    "MAX_HEIGHT": 100,
    "SIZING_QUALITY": 75,
    "INTERLACE": "Line"
  }
];
var DST_BUCKET_POSTFIX = "resized";



// get reference to S3 client
var s3 = new AWS.S3();

exports.handler = function (event, context) {
  // Read options from the event.
  console.log("Reading options from event:\n", util.inspect(event, {
    depth: 5
  }));
  var srcBucket = event.Records[0].s3.bucket.name;
  // Object key may have spaces or unicode non-ASCII characters.
  var srcKey = decodeURIComponent(event.Records[0].s3.object.key.replace(/\+/g, " "));
  // derive the file name and extension
  var srcFile = srcKey.match(/(.+)\.([^.]+)/);

  var srcName = srcFile[1];
  var scrExt = srcFile[2];
  // set the destination bucket
  var dstBucket = srcBucket + DST_BUCKET_POSTFIX;


  // make sure that source and destination are different buckets.
  if (srcBucket === dstBucket) {
    console.error("Destination bucket must be different from source bucket.");
    return;
  }

  if (!scrExt) {
    console.error('unable to derive file type extension from file key ' + srcKey);
    return;
  }

  if (scrExt != "jpg" && scrExt != "png") {
    console.log('skipping non-supported file type ' + srcKey + ' (must be jpg or png)');
    return;
  }

  function processImage(data, options, callback) {
    gm(data.Body).size(function (err, size) {

      var scalingFactor = Math.min(
        options.MAX_WIDTH / size.width,
        options.MAX_HEIGHT / size.height
      );
      var width = scalingFactor * size.width;
      var height = scalingFactor * size.height;

      this.resize(width, height)
        .quality(options.SIZING_QUALITY || 75)
        .interlace(options.INTERLACE || 'None')
        .toBuffer(scrExt, function (err, buffer) {
          if (err) {
            callback(err);

          } else {
            uploadImage(data.ContentType, buffer, options, callback);
            uploadRecentImage(data.ContentType, buffer, options, callback);
          }
        });
    });
  }

  function uploadImage(contentType, data, options, callback) {
    // Upload the transformed image to the destination S3 bucket.
    s3.putObject({
        Bucket: dstBucket,
        Key: options.MAX_WIDTH + '/' + srcName + '.' + scrExt,
        Body: data,
        ContentType: contentType
      },
      callback);
  }


  function uploadRecentImage(contentType, data, options, callback) {
    if(options.MAX_WIDTH == 500){
         s3.putObject({
            Bucket: dstBucket,
            Key: 'recent_optimized.' + scrExt,
            Body: data,
            ContentType: contentType
          },
          callback);
    }
    if(options.MAX_WIDTH == 100){
           s3.putObject({
            Bucket: dstBucket,
            Key: 'recent_thumb.' + scrExt,
            Body: data,
            ContentType: contentType
          },
          callback);
     }
  }


  // Download the image from S3 and process for each requested image variant.
  async.waterfall(
    [
      function download(next) {
          // Download the image from S3 into a buffer.
          s3.getObject({
              Bucket: srcBucket,
              Key: srcKey
            },
            next);
      },
      function processImages(data, next) {
          async.each(imgVariants, function (variant, next) {
            processImage(data, variant, next);
          }, next);

      }

    ],
    function (err) {
      if (err) {
        console.error(
          'Unable to resize ' + srcBucket + '/' + srcKey +
          ' and upload to ' + dstBucket +
          ' due to an error: ' + err
        );
      } else {
        console.log(
          'Successfully resized ' + srcBucket + '/' + srcKey +
          ' and uploaded to ' + dstBucket
        );
      }

      context.done();
    }
  );
};
  1. You can limit the number of parallel processImages calls:

Replace async.each(imgVariants,

with async.eachLimit(imgVariants, 2,

to not process more than two images in parallel.

  1. The script has a bug:

uploadImage(data.ContentType, buffer, options, callback); uploadRecentImage(data.ContentType, buffer, options, callback); This will call callback twice which is not allowed. Only call the callback once!

  1. The script has another bug: event.Records[0] it will only process the first image. If you upload multiple images at the same time this will miss some images.

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM