Javascript 使用节点 js 中的 AWS sdk 将整个目录树上传到 S3

声明:本页面是StackOverFlow热门问题的中英对照翻译,遵循CC BY-SA 4.0协议,如果您需要使用它,必须同样遵循CC BY-SA许可,注明原文地址和作者信息,同时你必须将它归于原作者(不是我):StackOverFlow 原文地址: http://stackoverflow.com/questions/27670051/
Warning: these are provided under cc-by-sa 4.0 license. You are free to use/share it, But you must attribute it to the original authors (not me): StackOverFlow

提示:将鼠标放在中文语句上可以显示对应的英文。显示中英文
时间:2020-08-23 00:29:41  来源:igfitidea点击:

Upload entire directory tree to S3 using AWS sdk in node js

javascriptnode.jsamazon-web-servicesamazon-s3aws-sdk

提问by LifeQuery

I currently upload single objects to S3 using like so:

我目前使用如下方式将单个对象上传到 S3:

var options = {
        Bucket: bucket,
        Key: s3Path,
        Body: body,
        ACL: s3FilePermissions
};

S3.putObject(options,
function (err, data) {
    //console.log(data);
});

But when I have a large resources folder for example, I use the AWS CLItool.
I was wondering, is there a native way to do the same thing with the aws sdk (upload entire folders to s3)?

但是,例如,当我有一个大型资源文件夹时,我会使用AWS CLI工具。
我想知道,是否有一种本地方式可以使用 aws sdk(将整个文件夹上传到 s3)来做同样的事情?

回答by Jim Chertkov

Old-school recursive way I whipped up in a hurry. Only uses core node modules and standard AWS sdk.

老派的递归方式我匆匆忙忙。仅使用核心节点模块和标准 AWS sdk。

var AWS = require('aws-sdk');
var path = require("path");
var fs = require('fs');

const uploadDir = function(s3Path, bucketName) {

    let s3 = new AWS.S3();

    function walkSync(currentDirPath, callback) {
        fs.readdirSync(currentDirPath).forEach(function (name) {
            var filePath = path.join(currentDirPath, name);
            var stat = fs.statSync(filePath);
            if (stat.isFile()) {
                callback(filePath, stat);
            } else if (stat.isDirectory()) {
                walkSync(filePath, callback);
            }
        });
    }

    walkSync(s3Path, function(filePath, stat) {
        let bucketPath = filePath.substring(s3Path.length+1);
        let params = {Bucket: bucketName, Key: bucketPath, Body: fs.readFileSync(filePath) };
        s3.putObject(params, function(err, data) {
            if (err) {
                console.log(err)
            } else {
                console.log('Successfully uploaded '+ bucketPath +' to ' + bucketName);
            }
        });

    });
};

uploadDir("path to your folder", "your bucket name");

Special thanks to Ali from this postwith helping get the filenames

特别感谢这篇文章中的Ali帮助获取文件名

回答by PersianIronwood

here is a cleaned up/debugged/working version of @Jim's solution

这是@Jim 解决方案的清理/调试/工作版本

    function uploadArtifactsToS3() {
  const artifactFolder = `logs/${config.log}/test-results`;
  const testResultsPath = './test-results';

  const walkSync = (currentDirPath, callback) => {
    fs.readdirSync(currentDirPath).forEach((name) => {
      const filePath = path.join(currentDirPath, name);
      const stat = fs.statSync(filePath);
      if (stat.isFile()) {
        callback(filePath, stat);
      } else if (stat.isDirectory()) {
        walkSync(filePath, callback);
      }
    });
  };

  walkSync(testResultsPath, async (filePath) => {
    let bucketPath = filePath.substring(testResultsPath.length - 1);
    let params = {
      Bucket: process.env.SOURCE_BUCKET,
      Key: `${artifactFolder}/${bucketPath}`,
      Body: fs.readFileSync(filePath)
    };
    try {
      await s3.putObject(params).promise();
      console.log(`Successfully uploaded ${bucketPath} to s3 bucket`);
    } catch (error) {
      console.error(`error in uploading ${bucketPath} to s3 bucket`);
      throw new Error(`error in uploading ${bucketPath} to s3 bucket`);
    }
  });
}

回答by Ryan Kelley

I was just contemplating this problem the other day, and was thinking something like this:

前几天我只是在考虑这个问题,并且在想这样的事情:

...    
var async = require('async'),
    fs = require('fs'),
    path = require("path");

var directoryName = './test',
    directoryPath = path.resolve(directoryName);

var files = fs.readdirSync(directoryPath);
async.map(files, function (f, cb) {
    var filePath = path.join(directoryPath, f);

    var options = {
        Bucket: bucket,
        Key: s3Path,
        Body: fs.readFileSync(filePath),
        ACL: s3FilePermissions
    };

    S3.putObject(options, cb);

}, function (err, results) {
    if (err) console.error(err);
    console.log(results);
});

回答by unboundev

You might try the node-s3-client.

您可以尝试使用node-s3-client

UPDATE: Available on npm here

更新:在NPM可用这里

From the sync a directory to s3 docs:

从同步目录到 s3文档

UPDATE: Added client inialization code.

更新:添加了客户端初始化代码。

var client = s3.createClient({
    maxAsyncS3: 20,     // this is the default
    s3RetryCount: 3,    // this is the default
    s3RetryDelay: 1000, // this is the default
    multipartUploadThreshold: 20971520, // this is the default (20 MB)
    multipartUploadSize: 15728640, // this is the default (15 MB)
    s3Options: {
      accessKeyId: "YOUR ACCESS KEY",
      secretAccessKey: "YOUR SECRET ACCESS KEY"
    }
  });

var params = {
  localDir: "some/local/dir",
  deleteRemoved: true, // default false, whether to remove s3 objects
                       // that have no corresponding local file.

  s3Params: {
    Bucket: "s3 bucket name",
    Prefix: "some/remote/dir/",
    // other options supported by putObject, except Body and ContentLength.
    // See: http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#putObject-property
  },
};
var uploader = client.uploadDir(params);
uploader.on('error', function(err) {
  console.error("unable to sync:", err.stack);
});
uploader.on('progress', function() {
  console.log("progress", uploader.progressAmount, uploader.progressTotal);
});
uploader.on('end', function() {
  console.log("done uploading");
});

回答by Camilo Gomez

This works for me (you'll need to add walkSync package):

这对我有用(你需要添加 walkSync 包):

    async function asyncForEach(array, callback) {
      for (let index = 0; index < array.length; index++) {
        await callback(array[index], index, array);
      }
    }

    const syncS3Directory = async (s3Path, endpoint) => {

    await asyncForEach(walkSync(s3Path, {directories: false}), async (file) => {
    const filePath = Path.join(s3Path, file);
    const fileContent = fs.readFileSync(filePath);
    const params = {
      Bucket: endpoint,
      Key: file,
      Body: fileContent,
      ContentType: "text/html",
    };
    let s3Upload = await s3.upload(params).promise();
    s3Upload ? undefined : Logger.error("Error synchronizing the bucket");
  });

      console.log("S3 bucket synchronized!");

};