rsync: first try with one file. if that works, go faster
this works better if encryption fails. intead of downloading many files and erroring non-stop
This commit is contained in:
@@ -210,7 +210,7 @@ async function downloadDir(backupConfig, backupFilePath, dataLayout, progressCal
|
||||
const [mkdirError] = await safe(fs.promises.mkdir(path.dirname(destFilePath), { recursive: true }));
|
||||
if (mkdirError) throw new BoxError(BoxError.FS_ERROR, mkdirError.message);
|
||||
|
||||
await promiseRetry({ times: 5, interval: 20000 }, async function () {
|
||||
await promiseRetry({ times: 3, interval: 20000 }, async function () {
|
||||
const [downloadError, sourceStream] = await safe(storage.api(backupConfig.provider).download(backupConfig, entry.fullPath));
|
||||
if (downloadError) {
|
||||
progressCallback({ message: `Download ${entry.fullPath} to ${destFilePath} errored: ${downloadError.message}` });
|
||||
@@ -250,7 +250,7 @@ async function downloadDir(backupConfig, backupFilePath, dataLayout, progressCal
|
||||
const concurrency = backupConfig.limits?.downloadConcurrency || (backupConfig.provider === 's3' ? 30 : 10);
|
||||
let marker = null;
|
||||
while (true) {
|
||||
const batch = await storage.api(backupConfig.provider).listDir(backupConfig, backupFilePath, 1000, marker);
|
||||
const batch = await storage.api(backupConfig.provider).listDir(backupConfig, backupFilePath, marker === null ? 1 : 1000, marker); // try with one file first. if that works out, we continue faster
|
||||
await async.eachLimit(batch.entries, concurrency, downloadFile);
|
||||
if (!batch.marker) break;
|
||||
marker = batch.marker;
|
||||
|
||||
Reference in New Issue
Block a user