I am working with azure function in nodeJS. Originally I wanted to download an image as a buffer, add it to my zip file (with adm-zip) and then upload it again. However, the image could not be displayed after unzipping. To download the image I used createReadStream
then allocated a buffer from the stream and then added it to the zip, saved the zip and uploaded it with createBlobFromText
. Everything else here works fine, I tried adding a test.txt
and that was added normally. So there must be a problem with my image stream.
So I tried just downloading the img as a buffer and directly upload it again to my storage. And as a result, the image is broken and cannot be opened. To download the image I use createReadStream
. Then allocate a buffer with the stream and then upload the buffer with createBlockBlobFromText
.
So my question is, am I handling streams with images wrong?
Is there a better way to get the image and continue working with it?
Thanks!
Here is my code with the zip:
const storage = require('azure-storage');
const STORAGE_ACCOUNT_NAME = 'something';
const ACCOUNT_ACCESS_KEY = 'somekey';
let AdmZip = require('adm-zip');
const blobService = storage.createBlobService(STORAGE_ACCOUNT_NAME, ACCOUNT_ACCESS_KEY);
module.exports = function (context, req) {
context.log('JavaScript HTTP trigger function processed a request.');
var container = 'container';
var file = 'powerpoint.pptx';
var data = [],
dataLen = 0;
var stream = blobService.createReadStream(container, file);
stream.on('data', (chunk) => {
context.log(`Received ${chunk.length} bytes of data.`);
data.push(chunk);
dataLen += chunk.length;
});
stream.on('end', () => {
getImageAsBuffer(container, 'test-image.png').then((imgBuf) => {
var buf = Buffer.alloc(dataLen);
for (var i = 0, len = data.length, pos = 0; i < len; i++) {
data[i].copy(buf, pos);
pos += data[i].length;
}
var zip = new AdmZip(buf);
zip.deleteFile("/ppt/media/image1.png");
zip.addFile("/ppt/media/image1.png", Buffer.alloc(imgBuf.dataLen, imgBuf.data));
var powerpoint = zip.toBuffer();
uploadRemoteFile(powerpoint, container, 'new-powerpoint.pptx').then((res) => {
context.log('res', res);
context.done();
})
})
});
};
function uploadRemoteFile(buffer, containerName, filename) {
return new Promise((resolve, reject) => {
blobService.createBlockBlobFromText(containerName, filename, buffer, function (err) {
if (err) {
reject(err);
} else {
resolve({
message: 'resolved successfully'
});
}
})
});
}
function getImageAsBuffer(container, file) {
return new Promise((resolve, reject) => {
var imgData = [],
imgLen = 0;
var imgStream = blobService.createReadStream(container, file);
imgStream.on('data', (chunk) => {
imgData.push(chunk);
imgLen += chunk.length;
});
imgStream.on('end', () => {
resolve({
"data": imgData,
"dataLen": imgLen
})
})
})
}
One issue I see in your code is that you're using createBlockBlobFromText
method to upload binary content. You will need to use createBlockBlobFromStream
method.
You can do something like the following:
const stream = require('stream');
function uploadRemoteFile(buffer, containerName, filename) {
return new Promise((resolve, reject) => {
const bufferStream = new stream.PassThrough({
highWaterMark: buffer.length
});
bufferStream.end(buffer);
blobService.createBlockBlobFromStream(containerName, filename, bufferStream, buffer.length, function (err) {
if (err) {
reject(err);
} else {
resolve({
message: 'resolved successfully'
});
}
})
});
}
The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.