简体   繁体   中英

How to upload an image to Google Cloud Storage from an image url in Node?

给定图像 url,我如何将该图像上传到 Google Cloud Storage 以使用 Node.js 进行图像处理?

It's a 2 steps process:

  • Download file locally using request or fetch .
  • Upload to GCL with the official library .

     var fs = require('fs'); var gcloud = require('gcloud'); // Authenticating on a per-API-basis. You don't need to do this if you auth on a // global basis (see Authentication section above). var gcs = gcloud.storage({ projectId: 'my-project', keyFilename: '/path/to/keyfile.json' }); // Create a new bucket. gcs.createBucket('my-new-bucket', function(err, bucket) { if (!err) { // "my-new-bucket" was successfully created. } }); // Reference an existing bucket. var bucket = gcs.bucket('my-existing-bucket'); var localReadStream = fs.createReadStream('/photos/zoo/zebra.jpg'); var remoteWriteStream = bucket.file('zebra.jpg').createWriteStream(); localReadStream.pipe(remoteWriteStream) .on('error', function(err) {}) .on('finish', function() { // The file upload is complete. });

If you would like to save the file as a jpeg image, you will need to edit the remoteWriteStream stream and add custom metadata:

var image = bucket.file('zebra.jpg');
localReadStream.pipe(image.createWriteStream({
    metadata: {
      contentType: 'image/jpeg',
      metadata: {
        custom: 'metadata'
      }
    }
}))

I found this while digging through this documentation

To add onto Yevgen Safronov's answer, we can pipe the request into the write stream without explicitly downloading the image into the local file system.

const request = require('request');
const storage = require('@google-cloud/storage')();

function saveToStorage(attachmentUrl, bucketName, objectName) {
  const req = request(attachmentUrl);
  req.pause();
  req.on('response', res => {

    // Don't set up the pipe to the write stream unless the status is ok.
    // See https://stackoverflow.com/a/26163128/2669960 for details.
    if (res.statusCode !== 200) {
      return;
    }

    const writeStream = storage.bucket(bucketName).file(objectName)
      .createWriteStream({

        // Tweak the config options as desired.
        gzip: true,
        public: true,
        metadata: {
          contentType: res.headers['content-type']
        }
      });
    req.pipe(writeStream)
      .on('finish', () => console.log('saved'))
      .on('error', err => {
        writeStream.end();
        console.error(err);
      });

    // Resume only when the pipe is set up.
    req.resume();
  });
  req.on('error', err => console.error(err));
}

Incase of handling image uploads from a remote url. In reference to the latest library provided by Google docs. Instead of storing the buffer of image. We can directly send it to storage.

function sendUploadUrlToGCS(req, res, next) {
  if (!req.body.url) {
    return next();
  }

  var gcsname = Date.now() + '_name.jpg';
  var file = bucket.file(gcsname);

  return request({url: <remote-image-url>, encoding: null}, function(err, response, buffer) {
    req.file = {};
    var stream = file.createWriteStream({
      metadata: {
        contentType: response.headers['content-type']
      }
    });

    stream.on('error', function(err) {
       req.file.cloudStorageError = err;
       console.log(err);
       next(err);
    });

    stream.on('finish', function() {
      req.file.cloudStorageObject = gcsname;
      req.file.cloudStoragePublicUrl = getPublicUrl(gcsname);
      next();
    });

    stream.end(buffer);
  });
}

I used the request library and storage library for make it. The code below is in TypeScript. Regards

import * as gcs from '@google-cloud/storage';
import {Storage} from '@google-cloud/storage';
import request from 'request';

private _storage: Storage;

constructor() {
    // example of json path: ../../config/google-cloud/google-storage.json
    this._storage = new gcs.Storage({keyFilename: 'JSON Config Path'});
}

public saveFileFromUrl(path: string): Promise<string> {
    return new Promise<any>((resolve, reject) => {
        request({url: path, encoding: null}, (err, res, buffer) => {
            if (res.statusCode !== 200) {
                reject(err);
            }
            const bucketName = 'bucket_name';
            const destination = `bucket location and file name`; // example: 'test/image.jpg'
            const file = this._storage.bucket(bucketName).file(destination);
            // put the image public
            file.save(buffer, {public: true, gzip: true}).then(data => {
                resolve(`${bucketName}/${destination}`)
            }).catch(err => {
                reject(err);
            });
        });
    })
}

Leaving my solution here for people who want to use:

  • firebase admin SDK
  • axios and async await
/**
 * TODO(developer): Uncomment the following lines before running the sample.
 * specify an existing bucket.
 * specify any url pointing to an file.
 */
// const bucketName = 'liist-prod-nodejs-backend';
// const url = "https://images.unsplash.com/photo-1601191906024-54b4e490abae?crop=entropy&cs=tinysrgb&fit=crop&fm=jpg&h=800&ixlib=rb-1.2.1&q=80&w=800";

// 1. load required packages
const axios = require('axios');
const crypto = require('crypto');
const httpAdapter = require('axios/lib/adapters/http');

// 2. setup firebase admin SDK + storage bucket
const admin = require('firebase-admin');
const serviceAccount = require('path/to/key.json');
const app = admin.initializeApp({ 
  credential: admin.credential.cert(serviceAccount),
  storageBucket: bucketName
});
const storage = app.storage();
const bucket = storage.bucket(bucketName);

// 3. helper function
function randomToken(size = 20) { // maxsize is 128
  return crypto.randomBytes(64).toString('hex').substr(0, size)
}

// 4. async function to actually upload image from link to firebase storage bucket
async function uploadToStorage(bucket, url) {
  // define filename, folder and access token
  const accessToken = randomToken();
  const bucketName = bucket.name;
  const fileEnding = url.split('.').pop();
  const folder = 'defaultFolder';
  const filename = `myTargetFile`;
  const fullPath = `${folder}/${filename}.${fileEnding}`;
  const fullPathUrlEncoded = `${folder}%2F${filename}.${fileEnding}`;

  // axios request to get file stream
  const axiosResponse = await axios.get(url, { responseType: 'stream', adapter: httpAdapter });
  if (axiosResponse.status !== 200) {
    throw new Error(`axios request to ${url} failed.`);
  } 
  // create file + write stream (=> tweak options if needed)
  const file = bucket.file(fullPath);
  const output = file.createWriteStream({
    gzip: true,
    // if public is true, the file can be found here: `https://storage.googleapis.com/${bucketName}/${fullPath}`;
    public: false, // media token needed, more restricted and secure
    metadata: {
      contentType: axiosResponse.headers['content-type'],
      metadata: {
        firebaseStorageDownloadTokens: accessToken, // define access token
      },
    }
  });
  // wrapp stream around a promise
  // => resolves to public url
  const stream = axiosResponse.data;
  const streamPromise = new Promise(function (resolve, reject) {
    stream.on('data', (chunk) => {
      output.write(new Buffer.from(chunk));
    });
    stream.on('end', () => {
      output.end();
      const publicUrl = `https://firebasestorage.googleapis.com/v0/b/${bucketName}/o/${fullPathUrlEncoded}?alt=media&token=${accessToken}`;
      resolve(publicUrl);
    });
    stream.on('error', (err) => {
      output.end();
      reject(err);
    })
  });
  return await streamPromise;
}

// 4. upload to storage
console.log("uploading file to storage ...");
(async () => {
  const publicUrl = await uploadToStorage(bucket, url);
  console.log(publicUrl);
})(); 

utility.js

 // google cloud stoage 
 let fs                = require('fs');
 const { Storage }     = require('@google-cloud/storage');
 var credentials       = require('../../yourProjectCredentials.json');
 const storage         = new Storage({credentials: credentials});

 const bucketName      = 'pictures';

 const uuidV1          = require('uuid/v1');
 var dir               = './images';



/**
 * Store Image to GCP Buket
 * @param { picture }
 * @returns { picture_url }
 */
class ApplicationUtility{

    constructor(){}

    /**
     * Store Image to GCP Buket
     * @param { picture }
     * @returns { picture_url }
     */

    async storeImageTocloud (picture) {

        let fileNamePic = uuidV1();
        let path2 = fileNamePic + "_picture.jpg";
        let path = dir + "/" + path2;
        var bitmap = new Buffer.from(picture, 'base64');
        fs.writeFileSync(path, bitmap, { flag: 'w' }, (err) => {
            if (err)
                throw (err);
        });
        storage
            .bucket(bucketName)
            .upload(path)
            .then(() => console.log(`${fileNamePic} uploaded to 
             ${bucketName}.`))
            .catch(err => { throw (err) });

        let url = `https://storage.googleapis.com/${bucketName}/${path2}`;
        return url;

    }

}


module.exports = ApplicationUtility;

index.js

  const ImagesStorage              = require('./utility');
  const imagesStorage              = new ImagesStorage();

        //call 
        let picture = body.pic
        let url = await imagesStorage.storeImageTocloud(picture);
        console.log(url)

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM