aws: const correctness

This commit is contained in:
Girish Ramakrishnan
2021-06-23 14:30:00 -07:00
parent 02263e8921
commit 5129465e59
2 changed files with 16 additions and 16 deletions
+15 -13
View File
@@ -25,7 +25,7 @@ exports = module.exports = {
const assert = require('assert'),
async = require('async'),
AWS = require('aws-sdk'),
AwsSdk = require('aws-sdk'),
BoxError = require('../boxerror.js'),
chunk = require('lodash.chunk'),
constants = require('../constants.js'),
@@ -38,15 +38,17 @@ const assert = require('assert'),
S3BlockReadStream = require('s3-block-read-stream'),
_ = require('underscore');
let aws = AwsSdk;
// test only
var originalAWS;
function mockInject(mock) {
originalAWS = AWS;
AWS = mock;
originalAWS = aws;
aws = mock;
}
function mockRestore() {
AWS = originalAWS;
aws = originalAWS;
}
function S3_NOT_FOUND(error) {
@@ -111,13 +113,13 @@ function upload(apiConfig, backupFilePath, sourceStream, callback) {
getS3Config(apiConfig, function (error, credentials) {
if (error) return callback(error);
var params = {
const params = {
Bucket: apiConfig.bucket,
Key: backupFilePath,
Body: sourceStream
};
var s3 = new AWS.S3(credentials);
const s3 = new aws.S3(credentials);
// s3.upload automatically does a multi-part upload. we set queueSize to 3 to reduce memory usage
// uploader will buffer at most queueSize * partSize bytes into memory at any given time.
@@ -146,7 +148,7 @@ function exists(apiConfig, backupFilePath, callback) {
getS3Config(apiConfig, function (error, credentials) {
if (error) return callback(error);
const s3 = new AWS.S3(_.omit(credentials, 'retryDelayOptions', 'maxRetries'));
const s3 = new aws.S3(_.omit(credentials, 'retryDelayOptions', 'maxRetries'));
if (!backupFilePath.endsWith('/')) { // check for file
const params = {
@@ -190,7 +192,7 @@ function download(apiConfig, backupFilePath, callback) {
Key: backupFilePath
};
var s3 = new AWS.S3(credentials);
var s3 = new aws.S3(credentials);
var ps = new PassThrough();
var multipartDownload = new S3BlockReadStream(s3, params, { blockSize: 64 * 1024 * 1024 /*, logCallback: debug */ });
@@ -220,7 +222,7 @@ function listDir(apiConfig, dir, batchSize, iteratorCallback, callback) {
getS3Config(apiConfig, function (error, credentials) {
if (error) return callback(error);
var s3 = new AWS.S3(credentials);
var s3 = new aws.S3(credentials);
var listParams = {
Bucket: apiConfig.bucket,
Prefix: dir,
@@ -277,7 +279,7 @@ function copy(apiConfig, oldFilePath, newFilePath) {
getS3Config(apiConfig, function (error, credentials) {
if (error) return iteratorCallback(error);
var s3 = new AWS.S3(credentials);
var s3 = new aws.S3(credentials);
var relativePath = path.relative(oldFilePath, entry.fullPath);
function done(error) {
@@ -406,7 +408,7 @@ function remove(apiConfig, filename, callback) {
getS3Config(apiConfig, function (error, credentials) {
if (error) return callback(error);
var s3 = new AWS.S3(credentials);
var s3 = new aws.S3(credentials);
var deleteParams = {
Bucket: apiConfig.bucket,
@@ -434,7 +436,7 @@ function removeDir(apiConfig, pathPrefix) {
getS3Config(apiConfig, function (error, credentials) {
if (error) return process.nextTick(() => events.emit('done', error));
var s3 = new AWS.S3(credentials);
var s3 = new aws.S3(credentials);
listDir(apiConfig, pathPrefix, 1000, function listDirIterator(entries, done) {
total += entries.length;
@@ -503,7 +505,7 @@ function testConfig(apiConfig, callback) {
Body: 'testcontent'
};
var s3 = new AWS.S3(_.omit(credentials, 'retryDelayOptions', 'maxRetries'));
var s3 = new aws.S3(_.omit(credentials, 'retryDelayOptions', 'maxRetries'));
s3.putObject(params, function (error) {
if (error) return callback(new BoxError(BoxError.EXTERNAL_ERROR, `Error put object cloudron-testfile. Message: ${error.message} HTTP Code: ${error.code}`));