🐎 Improve performance

This commit is contained in:
C-3PO 2018-07-09 19:10:19 +02:00
parent 6bb8ccea22
commit 3a3fa68213
Signed by: c3po
GPG key ID: 62993C4BB4D86F24
7 changed files with 20 additions and 27 deletions

View file

@ -13,30 +13,19 @@ export default function saveResponse(
}
//Remember file size
const headerLength = Number(response.headers['content-length']);
//const headerLength = Number(response.headers['content-length']);
const writeStream = fs.createWriteStream(filePath);
//If we receive a part of the response, write it to disk
let totalLength = 0;
response.on('data', (chunk: Buffer) => {
totalLength += chunk.length;
//Exit early if we received too much data
if (totalLength > headerLength) {
return reject(`Expected length ${headerLength} but received at least ${totalLength}.`);
}
//Write chunk to disk
writeStream.write(chunk);
});
response.pipe(writeStream);
//If we finished reading response, check for correctness, then return it
response.on('end', () => {
//Check that length is correct
if (totalLength !== headerLength) {
/*if (totalLength !== headerLength) {
return reject(`Expected length ${headerLength} but received ${totalLength}.`);
}
}*/
//wait until everything is written to disk, then return file name
writeStream.end(() => {

View file

@ -4,7 +4,7 @@ export default function getDecryptor(decryptionKeys: [number, number, number]) {
let [key0, key1, key2] = decryptionKeys;
return (encryptedChunk: Buffer) => {
const decryptedChunk = Buffer.alloc(encryptedChunk.length);
//const decryptedChunk = Buffer.alloc(encryptedChunk.length);
for (let i = 0; i < encryptedChunk.length; i += 1) {
//read byte
@ -19,9 +19,11 @@ export default function getDecryptor(decryptionKeys: [number, number, number]) {
[key0, key1, key2] = updateKeys([key0, key1, key2], curChar);
//write byte
decryptedChunk.writeUInt8(curChar, i);
//decryptedChunk.writeUInt8(curChar, i);
encryptedChunk.writeUInt8(curChar, i);
}
return decryptedChunk;
//return decryptedChunk;
return encryptedChunk;
};
}

View file

@ -1,10 +1,7 @@
//Similar to extractFile.ts, but instead of receiving and returning an ArrayBuffer, works with Node.js streams.
import * as fs from 'fs';
import * as stream from 'stream';
import * as zlib from 'zlib';
import { ISsnFileEntry } from '../interfaces/ISsnFileEntry';
import decryptStream from './streams/decryptStream';
import decryptTransform from './streams/decryptTransform';
import readBytesFromStream from './streams/readBytesFromStream';
/** Extracts the file with the given metadata from the stream.
@ -16,8 +13,8 @@ export default async function extractFileAsStream(file: ISsnFileEntry, inputStre
//pipe into decryption if file is encrypted
if (file.decryptionKeys !== undefined) {
const decryptTransform = decryptStream(file.decryptionKeys);
curStream = curStream.pipe(decryptTransform);
const transform = decryptTransform(file.decryptionKeys);
curStream = curStream.pipe(transform);
//skip encryption header
await readBytesFromStream(curStream, 12);
}

View file

@ -12,6 +12,7 @@ export default function arrayBufferToStream(arrayBuffer: ArrayBuffer, offset: nu
let position = offset;
const endPosition = (length !== undefined) ? (offset + length) : arrayBuffer.byteLength;
const byteArray = new Uint8Array(arrayBuffer);
const outStream = new stream.Readable({
read(size) {
const chunkSize = Math.min(size || BUFFER_SIZE, endPosition - position); //TODO: we can probably remove BUFFER_SIZE
@ -24,7 +25,12 @@ export default function arrayBufferToStream(arrayBuffer: ArrayBuffer, offset: nu
}
//Write chunk to stream
const chunk = Buffer.from(arrayBuffer, position, chunkSize);
const chunk = Buffer.allocUnsafe(chunkSize);
for (let i = 0; i < chunkSize; i += 1) {
chunk.writeUInt8(byteArray[position + i], i);
}
//chunk.fill(arrayBuffer, position);
//const chunk = Buffer.from(arrayBuffer, position, chunkSize);
position += chunk.length;
needMoreData = this.push(chunk);
} while (needMoreData);

View file

@ -18,7 +18,7 @@ function createFileStream(disks: string[], index: number, offset: number, length
/** Takes a list of ReadableStreams (the disks), as well as the offset and length, and returns a stream for just one file. */
async function getConcatenatedStream(disks: string[], { diskStart, offset, length }: IGetFileFromDisksOptions): Promise<stream.Readable> {
let curDiskIndex = diskStart;
let curDisk: fs.ReadStream = createFileStream(disks, diskStart, offset);
let curDisk: fs.ReadStream = createFileStream(disks, diskStart, offset, length);
let totalRead = 0;
//Create new stream that concatenates disks until storedSize is reached, then ends the stream.

View file

@ -4,7 +4,6 @@ import * as stream from 'stream';
export default function performDiffing(sourceFile: string, diffStream: stream.Readable, targetFile: string): Promise<void> {
return new Promise((resolve, reject) => {
//const sourceStream = fs.createReadStream(sourceFile);
const targetStream = fs.createWriteStream(targetFile);
//spawn xdelta3 process, set up listeners