test: add EnsureFileSizeStream test
This commit is contained in:
@@ -1,11 +1,5 @@
|
||||
'use strict';
|
||||
|
||||
exports = module.exports = {
|
||||
getBackupFilePath,
|
||||
download,
|
||||
upload
|
||||
};
|
||||
|
||||
const assert = require('assert'),
|
||||
BoxError = require('../boxerror.js'),
|
||||
DataLayout = require('../datalayout.js'),
|
||||
@@ -272,3 +266,12 @@ async function upload(backupConfig, remotePath, dataLayout, progressCallback) {
|
||||
await tarPack(dataLayout, backupConfig.encryption, uploader, progressCallback);
|
||||
});
|
||||
}
|
||||
|
||||
exports = module.exports = {
|
||||
getBackupFilePath,
|
||||
download,
|
||||
upload,
|
||||
|
||||
// exported for testing
|
||||
_EnsureFileSizeStream: EnsureFileSizeStream
|
||||
};
|
||||
|
||||
@@ -4,8 +4,6 @@ const assert = require('assert'),
|
||||
BoxError = require('./boxerror.js'),
|
||||
crypto = require('crypto'),
|
||||
debug = require('debug')('box:hush'),
|
||||
fs = require('fs'),
|
||||
ProgressStream = require('./progress-stream.js'),
|
||||
TransformStream = require('stream').Transform;
|
||||
|
||||
class EncryptStream extends TransformStream {
|
||||
|
||||
@@ -8,11 +8,14 @@
|
||||
|
||||
const common = require('./common.js'),
|
||||
DataLayout = require('../datalayout.js'),
|
||||
EnsureFileSizeStream = require('../backupformat/tgz.js')._EnsureFileSizeStream,
|
||||
expect = require('expect.js'),
|
||||
fs = require('fs'),
|
||||
os = require('os'),
|
||||
path = require('path'),
|
||||
rsync = require('../backupformat/rsync.js');
|
||||
rsync = require('../backupformat/rsync.js'),
|
||||
safe = require('safetydance'),
|
||||
stream = require('node:stream/promises');
|
||||
|
||||
describe('backuptask', function () {
|
||||
const { setup, cleanup, createTree } = common;
|
||||
@@ -20,6 +23,47 @@ describe('backuptask', function () {
|
||||
before(setup);
|
||||
after(cleanup);
|
||||
|
||||
describe('EnsureFileSizeStream', function () {
|
||||
const name = 'eberswalde.txt';
|
||||
const data = Buffer.from('This file has 22 bytes');
|
||||
before(function () {
|
||||
fs.writeFileSync(`/tmp/${name}`, data);
|
||||
});
|
||||
after(function () {
|
||||
fs.rmSync(`/tmp/${name}`);
|
||||
});
|
||||
|
||||
it('correct size', async function () {
|
||||
const efs = new EnsureFileSizeStream({ name, size: 22 });
|
||||
const ins = fs.createReadStream(`/tmp/${name}`);
|
||||
const outs = fs.createWriteStream('/tmp/out.txt');
|
||||
const [error] = await safe(stream.pipeline(ins, efs, outs));
|
||||
expect(error).to.be(null);
|
||||
const out = fs.readFileSync('/tmp/out.txt');
|
||||
expect(out).to.eql(data);
|
||||
});
|
||||
|
||||
it('overflow', async function () {
|
||||
const efs = new EnsureFileSizeStream({ name, size: 20 });
|
||||
const ins = fs.createReadStream(`/tmp/${name}`);
|
||||
const outs = fs.createWriteStream('/tmp/out.txt');
|
||||
const [error] = await safe(stream.pipeline(ins, efs, outs));
|
||||
expect(error).to.be(null);
|
||||
const out = fs.readFileSync('/tmp/out.txt');
|
||||
expect(out).to.eql(data.subarray(0, 20));
|
||||
});
|
||||
|
||||
it('underflow', async function () {
|
||||
const efs = new EnsureFileSizeStream({ name, size: 30 });
|
||||
const ins = fs.createReadStream(`/tmp/${name}`);
|
||||
const outs = fs.createWriteStream('/tmp/out.txt');
|
||||
const [error] = await safe(stream.pipeline(ins, efs, outs));
|
||||
expect(error).to.be(null);
|
||||
const out = fs.readFileSync('/tmp/out.txt');
|
||||
expect(out).to.eql(Buffer.concat([data, Buffer.alloc(8)]));
|
||||
});
|
||||
});
|
||||
|
||||
describe('fs meta data', function () {
|
||||
let tmpdir;
|
||||
before(function () {
|
||||
|
||||
Reference in New Issue
Block a user