简体   繁体   中英

NodeJs - How to convert chunks of data to new Buffer?

In NodeJS, I have chunks of data from a file upload that saved the file in parts. I'd like to convert this by doing new Buffer() then upload it to Amazon s3

This would work if there was only one chunk but when there are multiple, I cannot figure out how to do new Buffer()

Currently my solution is write the chunks of data into a real file on my own server, then send the PATH of that file to Amazon s3.

How can I skip the file creation step and actually send the buffer the Amazon s3?

i guess you need to use streaming-s3

var streamingS3 = require('streaming-s3');
var uploadFile = function (fileReadStream, awsHeader, cb) {

    //set options for the streaming module
    var options = {
        concurrentParts: 2,
        waitTime: 20000,
        retries: 2,
        maxPartSize: 10 * 1024 * 1024
    };
    //call stream function to upload the file to s3
    var uploader = new streamingS3(fileReadStream, aws.accessKey, aws.secretKey, awsHeader, options);
    //start uploading
    uploader.begin();// important if callback not provided.

    // handle these functions
    uploader.on('data', function (bytesRead) {
        console.log(bytesRead, ' bytes read.');
    });

    uploader.on('part', function (number) {
        console.log('Part ', number, ' uploaded.');
    });

    // All parts uploaded, but upload not yet acknowledged.
    uploader.on('uploaded', function (stats) {
        console.log('Upload stats: ', stats);
    });

    uploader.on('finished', function (response, stats) {
        console.log(response);
        cb(null, response);
    });

    uploader.on('error', function (err) {
        console.log('Upload error: ', err);
        cb(err);
    });

};

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM