2017-09-17 17:51:00 +02:00
'use strict' ;
exports = module . exports = {
2017-10-29 11:10:50 +01:00
upload : upload ,
download : download ,
downloadDir : downloadDir ,
copy : copy ,
remove : remove ,
removeDir : removeDir ,
2017-09-17 17:51:00 +02:00
backupDone : backupDone ,
testConfig : testConfig ,
// Used to mock GCS
_mockInject : mockInject ,
_mockRestore : mockRestore
} ;
var assert = require ( 'assert' ) ,
GCS = require ( '@google-cloud/storage' ) ,
BackupsError = require ( '../backups.js' ) . BackupsError ,
2017-10-29 11:10:50 +01:00
chunk = require ( 'lodash.chunk' ) ,
2017-09-17 17:51:00 +02:00
debug = require ( 'debug' ) ( 'box:storage/gcs' ) ,
once = require ( 'once' ) ,
PassThrough = require ( 'stream' ) . PassThrough ,
2017-10-29 11:10:50 +01:00
EventEmitter = require ( 'events' ) ,
mkdirp = require ( 'mkdirp' ) ,
fs = require ( 'fs' ) ,
2017-09-17 17:51:00 +02:00
path = require ( 'path' ) ,
2017-10-29 11:10:50 +01:00
async = require ( 'async' ) ;
2017-09-17 17:51:00 +02:00
// test only
var originalGCS ;
function mockInject ( mock ) {
originalGCS = GCS ;
GCS = mock ;
}
function mockRestore ( ) {
GCS = originalGCS ;
}
// internal only
function getBackupCredentials ( backupConfig ) {
assert . strictEqual ( typeof backupConfig , 'object' ) ;
var config = {
provider : backupConfig . provider ,
projectId : backupConfig . projectId ,
keyFilename : backupConfig . keyFilename ,
} ;
if ( backupConfig . credentials ) {
config . credentials = {
client _email : backupConfig . credentials . client _email ,
private _key : backupConfig . credentials . private _key
} ;
}
return config ;
}
function getBucket ( apiConfig ) {
var credentials = getBackupCredentials ( apiConfig ) ;
return GCS ( credentials ) . bucket ( apiConfig . bucket ) ;
}
2017-10-29 11:10:50 +01:00
// storage api
function upload ( apiConfig , backupFilePath , sourceStream , callback ) {
2017-09-17 17:51:00 +02:00
assert . strictEqual ( typeof apiConfig , 'object' ) ;
2017-10-29 11:10:50 +01:00
assert . strictEqual ( typeof backupFilePath , 'string' ) ;
assert . strictEqual ( typeof sourceStream , 'object' ) ;
assert . strictEqual ( typeof callback , 'function' ) ;
function done ( error ) {
if ( error ) {
debug ( '[%s] upload: gcp upload error.' , backupFilePath , error ) ;
return callback ( new BackupsError ( BackupsError . EXTERNAL _ERROR , 'Error uploading ${backupFilePath}. Message: ${error.message} HTTP Code: ${error.code}' ) ) ;
}
2017-09-17 17:51:00 +02:00
2017-10-29 11:10:50 +01:00
callback ( null ) ;
}
2017-09-17 17:51:00 +02:00
2017-10-29 11:10:50 +01:00
return sourceStream . pipe (
getBucket ( apiConfig )
. file ( backupFilePath )
. createWriteStream ( { resumable : false } )
. on ( 'finish' , done )
. on ( 'error' , function ( e ) {
if ( e ) done ( e ) ;
} )
) ;
2017-09-17 17:51:00 +02:00
}
2017-10-29 11:10:50 +01:00
function download ( apiConfig , backupFilePath , callback ) {
2017-09-17 17:51:00 +02:00
assert . strictEqual ( typeof apiConfig , 'object' ) ;
2017-10-29 11:10:50 +01:00
assert . strictEqual ( typeof backupFilePath , 'string' ) ;
2017-09-17 17:51:00 +02:00
assert . strictEqual ( typeof callback , 'function' ) ;
2017-10-29 11:10:50 +01:00
var file = getBucket ( apiConfig ) . file ( backupFilePath ) ;
2017-09-17 17:51:00 +02:00
2017-10-29 11:10:50 +01:00
var ps = new PassThrough ( ) ;
var readStream = file . createReadStream ( )
. on ( 'error' , function ( error ) {
if ( error && error . code == 404 ) {
ps . emit ( 'error' , new BackupsError ( BackupsError . NOT _FOUND ) ) ;
} else {
debug ( '[%s] download: gcp stream error.' , backupFilePath , error ) ;
ps . emit ( 'error' , new BackupsError ( BackupsError . EXTERNAL _ERROR , error ) ) ;
}
} )
;
readStream . pipe ( ps ) ;
2017-09-17 17:51:00 +02:00
2017-10-29 11:10:50 +01:00
callback ( null , ps ) ;
}
2017-09-17 17:51:00 +02:00
2017-10-29 11:10:50 +01:00
function listDir ( apiConfig , backupFilePath , batchSize , iteratorCallback , callback ) {
2017-09-17 17:51:00 +02:00
var bucket = getBucket ( apiConfig ) ;
2017-10-29 11:10:50 +01:00
var query = { prefix : backupFilePath , maxResults : batchSize , autoPaginate : false } ;
async . forever ( function listAndDownload ( foreverCallback ) {
bucket . getFiles ( query , function ( error , files , nextQuery ) {
if ( error ) {
debug ( 'remove: Failed to list %s. Not fatal.' , error ) ;
return foreverCallback ( error ) ;
}
var arr = batchSize === 1 ? files : chunk ( files , batchSize ) ;
if ( arr . length === 0 ) return foreverCallback ( new Error ( 'Done' ) ) ;
debug ( 'emitting ' + arr . length + ' files found' ) ;
iteratorCallback ( arr , function ( error ) {
if ( error ) return foreverCallback ( error ) ;
if ( arr . length < batchSize ) return foreverCallback ( new Error ( 'Done' ) ) ;
query = nextQuery ;
foreverCallback ( ) ;
} ) ;
} ) ;
} , function ( error ) {
if ( error . message === 'Done' ) return callback ( null ) ;
callback ( error ) ;
} ) ;
2017-09-17 17:51:00 +02:00
}
2017-10-29 11:10:50 +01:00
function downloadDir ( apiConfig , backupFilePath , destDir ) {
2017-09-17 17:51:00 +02:00
assert . strictEqual ( typeof apiConfig , 'object' ) ;
2017-10-29 11:10:50 +01:00
assert . strictEqual ( typeof backupFilePath , 'string' ) ;
assert . strictEqual ( typeof destDir , 'string' ) ;
2017-09-17 17:51:00 +02:00
2017-10-29 11:10:50 +01:00
var events = new EventEmitter ( ) ;
var total = 0 ;
2017-09-17 17:51:00 +02:00
2017-10-29 11:10:50 +01:00
function downloadFile ( file , iteratorCallback ) {
var relativePath = path . relative ( backupFilePath , file . name ) ;
2017-09-17 17:51:00 +02:00
2017-10-29 11:10:50 +01:00
events . emit ( 'progress' , 'Downloading ${relativePath}' ) ;
2017-09-17 17:51:00 +02:00
2017-10-29 11:10:50 +01:00
mkdirp ( path . dirname ( path . join ( destDir , relativePath ) ) , function ( error ) {
if ( error ) return iteratorCallback ( new BackupsError ( BackupsError . EXTERNAL _ERROR , error . message ) ) ;
2017-09-17 17:51:00 +02:00
2017-10-29 11:10:50 +01:00
download ( apiConfig , file . name , function ( error , sourceStream ) {
if ( error ) return iteratorCallback ( error ) ;
var destStream = fs . createWriteStream ( path . join ( destDir , relativePath ) ) ;
destStream . on ( 'open' , function ( ) {
sourceStream . pipe ( destStream ) ;
} ) ;
destStream . on ( 'error' , function ( error ) {
return iteratorCallback ( new BackupsError ( BackupsError . EXTERNAL _ERROR , error . message ) ) ;
} ) ;
destStream . on ( 'finish' , iteratorCallback ) ;
} ) ;
} ) ;
}
const concurrency = 10 , batchSize = 1 ;
listDir ( apiConfig , backupFilePath , batchSize , function ( objects , done ) {
total += objects . length ;
async . eachLimit ( objects , concurrency , downloadFile , done ) ;
} , function ( error ) {
events . emit ( 'progress' , 'Downloaded ${total} files' ) ;
events . emit ( 'done' , error ) ;
} ) ;
return events ;
2017-09-17 17:51:00 +02:00
}
2017-10-29 11:10:50 +01:00
function copy ( apiConfig , oldFilePath , newFilePath ) {
2017-09-17 17:51:00 +02:00
assert . strictEqual ( typeof apiConfig , 'object' ) ;
2017-10-29 11:10:50 +01:00
assert . strictEqual ( typeof oldFilePath , 'string' ) ;
assert . strictEqual ( typeof newFilePath , 'string' ) ;
2017-09-17 17:51:00 +02:00
2017-10-29 11:10:50 +01:00
var events = new EventEmitter ( ) , retryCount = 0 ;
function copyFile ( file , iteratorCallback ) {
2017-09-17 17:51:00 +02:00
2017-10-29 11:10:50 +01:00
var relativePath = path . relative ( oldFilePath , file . name ) ;
file . copy ( path . join ( newFilePath , relativePath ) , function ( error , newFile , apiResponse ) {
if ( error && error . code == 404 ) return iteratorCallback ( new BackupsError ( BackupsError . NOT _FOUND , 'Old backup not found' ) ) ;
2017-09-17 17:51:00 +02:00
if ( error ) {
2017-10-29 11:10:50 +01:00
debug ( 'copyBackup: gcs copy error' , error ) ;
return iteratorCallback ( new BackupsError ( BackupsError . EXTERNAL _ERROR , error . message ) ) ;
2017-09-17 17:51:00 +02:00
}
2017-10-29 11:10:50 +01:00
iteratorCallback ( null ) ;
} ) ;
events . emit ( 'progress' , 'Copying (multipart) ${relativePath}' ) ;
}
const batchSize = 1 ;
var total = 0 , concurrency = 4 ;
listDir ( apiConfig , oldFilePath , batchSize , function ( files , done ) {
total += files . length ;
2017-09-17 17:51:00 +02:00
2017-10-29 11:10:50 +01:00
if ( retryCount === 0 ) concurrency = Math . min ( concurrency + 1 , 10 ) ; else concurrency = Math . max ( concurrency - 1 , 5 ) ;
events . emit ( 'progress' , '${retryCount} errors. concurrency set to ${concurrency}' ) ;
retryCount = 0 ;
async . eachLimit ( files , concurrency , copyFile , done ) ;
} , function ( error ) {
events . emit ( 'progress' , 'Copied ${total} files' ) ;
events . emit ( 'done' , error ) ;
} ) ;
return events ;
}
function remove ( apiConfig , filename , callback ) {
assert . strictEqual ( typeof apiConfig , 'object' ) ;
assert . strictEqual ( typeof filename , 'string' ) ;
assert . strictEqual ( typeof callback , 'function' ) ;
getBucket ( apiConfig )
. file ( filename )
. delete ( function ( e ) {
if ( e ) debug ( 'removeBackups: Unable to remove %s (%s). Not fatal.' , filename , e . message ) ;
else debug ( 'removeBackups: Deleted: %s' , filename ) ;
2017-09-17 17:51:00 +02:00
callback ( null ) ;
} ) ;
}
2017-10-29 11:10:50 +01:00
function removeDir ( apiConfig , pathPrefix ) {
2017-09-17 17:51:00 +02:00
assert . strictEqual ( typeof apiConfig , 'object' ) ;
2017-10-29 11:10:50 +01:00
assert . strictEqual ( typeof pathPrefix , 'string' ) ;
2017-09-17 17:51:00 +02:00
2017-10-29 11:10:50 +01:00
var events = new EventEmitter ( ) , retryCount = 0 ;
2017-09-17 17:51:00 +02:00
2017-10-29 11:10:50 +01:00
const batchSize = 1 ;
var total = 0 , concurrency = 4 ;
listDir ( apiConfig , oldFilePath , batchSize , function ( files , done ) {
total += files . length ;
if ( retryCount === 0 ) concurrency = Math . min ( concurrency + 1 , 10 ) ; else concurrency = Math . max ( concurrency - 1 , 5 ) ;
events . emit ( 'progress' , '${retryCount} errors. concurrency set to ${concurrency}' ) ;
retryCount = 0 ;
async . eachLimit ( files , concurrency , remove . bind ( null , apiConfig ) , done ) ;
} , function ( error ) {
events . emit ( 'progress' , 'Deleted ${total} files' ) ;
events . emit ( 'done' , error ) ;
2017-09-17 17:51:00 +02:00
} ) ;
2017-10-29 11:10:50 +01:00
return events ;
2017-09-17 17:51:00 +02:00
}
function testConfig ( apiConfig , callback ) {
assert . strictEqual ( typeof apiConfig , 'object' ) ;
assert . strictEqual ( typeof callback , 'function' ) ;
if ( typeof apiConfig . projectId !== 'string' ) return callback ( new BackupsError ( BackupsError . BAD _FIELD , 'projectId must be a string' ) ) ;
if ( typeof apiConfig . keyFilename !== 'string' ) {
if ( typeof apiConfig . credentials !== 'object' ) return callback ( new BackupsError ( BackupsError . BAD _FIELD , 'credentials must be an object' ) ) ;
if ( typeof apiConfig . credentials . client _email !== 'string' ) return callback ( new BackupsError ( BackupsError . BAD _FIELD , 'credentials.client_email must be a string' ) ) ;
if ( typeof apiConfig . credentials . private _key !== 'string' ) return callback ( new BackupsError ( BackupsError . BAD _FIELD , 'credentials.private_key must be a string' ) ) ;
}
if ( typeof apiConfig . bucket !== 'string' ) return callback ( new BackupsError ( BackupsError . BAD _FIELD , 'bucket must be a string' ) ) ;
if ( typeof apiConfig . prefix !== 'string' ) return callback ( new BackupsError ( BackupsError . BAD _FIELD , 'prefix must be a string' ) ) ;
// attempt to upload and delete a file with new credentials
var bucket = getBucket ( apiConfig ) ;
var testFile = bucket . file ( path . join ( apiConfig . prefix , 'cloudron-testfile' ) ) ;
var uploadStream = testFile . createWriteStream ( { resumable : false } ) ;
var testfileStream = new PassThrough ( ) ;
testfileStream . write ( "testfilecontents" ) ;
testfileStream . end ( ) ;
testfileStream
. on ( 'error' , function ( error ) {
debug ( 'failed uploading cloudron-testfile' , error ) ;
return callback ( new BackupsError ( BackupsError . EXTERNAL _ERROR , error . message ) ) ;
} )
. on ( 'end' , function ( ) {
debug ( 'uploaded cloudron-testfile' ) ;
testFile . delete ( function ( error ) {
if ( error ) return callback ( new BackupsError ( BackupsError . EXTERNAL _ERROR , error . message ) ) ;
debug ( 'deleted cloudron-testfile' ) ;
callback ( ) ;
} ) ;
} )
. pipe ( uploadStream ) ;
}
function backupDone ( backupId , appBackupIds , callback ) {
assert . strictEqual ( typeof backupId , 'string' ) ;
assert ( Array . isArray ( appBackupIds ) ) ;
assert . strictEqual ( typeof callback , 'function' ) ;
callback ( ) ;
}