🐎 Improve performance
This commit is contained in:
parent
6bb8ccea22
commit
3a3fa68213
7 changed files with 20 additions and 27 deletions
|
@ -13,30 +13,19 @@ export default function saveResponse(
|
||||||
}
|
}
|
||||||
|
|
||||||
//Remember file size
|
//Remember file size
|
||||||
const headerLength = Number(response.headers['content-length']);
|
//const headerLength = Number(response.headers['content-length']);
|
||||||
|
|
||||||
const writeStream = fs.createWriteStream(filePath);
|
const writeStream = fs.createWriteStream(filePath);
|
||||||
|
|
||||||
//If we receive a part of the response, write it to disk
|
//If we receive a part of the response, write it to disk
|
||||||
let totalLength = 0;
|
response.pipe(writeStream);
|
||||||
response.on('data', (chunk: Buffer) => {
|
|
||||||
totalLength += chunk.length;
|
|
||||||
|
|
||||||
//Exit early if we received too much data
|
|
||||||
if (totalLength > headerLength) {
|
|
||||||
return reject(`Expected length ${headerLength} but received at least ${totalLength}.`);
|
|
||||||
}
|
|
||||||
|
|
||||||
//Write chunk to disk
|
|
||||||
writeStream.write(chunk);
|
|
||||||
});
|
|
||||||
|
|
||||||
//If we finished reading response, check for correctness, then return it
|
//If we finished reading response, check for correctness, then return it
|
||||||
response.on('end', () => {
|
response.on('end', () => {
|
||||||
//Check that length is correct
|
//Check that length is correct
|
||||||
if (totalLength !== headerLength) {
|
/*if (totalLength !== headerLength) {
|
||||||
return reject(`Expected length ${headerLength} but received ${totalLength}.`);
|
return reject(`Expected length ${headerLength} but received ${totalLength}.`);
|
||||||
}
|
}*/
|
||||||
|
|
||||||
//wait until everything is written to disk, then return file name
|
//wait until everything is written to disk, then return file name
|
||||||
writeStream.end(() => {
|
writeStream.end(() => {
|
||||||
|
|
|
@ -4,7 +4,7 @@ export default function getDecryptor(decryptionKeys: [number, number, number]) {
|
||||||
let [key0, key1, key2] = decryptionKeys;
|
let [key0, key1, key2] = decryptionKeys;
|
||||||
|
|
||||||
return (encryptedChunk: Buffer) => {
|
return (encryptedChunk: Buffer) => {
|
||||||
const decryptedChunk = Buffer.alloc(encryptedChunk.length);
|
//const decryptedChunk = Buffer.alloc(encryptedChunk.length);
|
||||||
|
|
||||||
for (let i = 0; i < encryptedChunk.length; i += 1) {
|
for (let i = 0; i < encryptedChunk.length; i += 1) {
|
||||||
//read byte
|
//read byte
|
||||||
|
@ -19,9 +19,11 @@ export default function getDecryptor(decryptionKeys: [number, number, number]) {
|
||||||
[key0, key1, key2] = updateKeys([key0, key1, key2], curChar);
|
[key0, key1, key2] = updateKeys([key0, key1, key2], curChar);
|
||||||
|
|
||||||
//write byte
|
//write byte
|
||||||
decryptedChunk.writeUInt8(curChar, i);
|
//decryptedChunk.writeUInt8(curChar, i);
|
||||||
|
encryptedChunk.writeUInt8(curChar, i);
|
||||||
}
|
}
|
||||||
|
|
||||||
return decryptedChunk;
|
//return decryptedChunk;
|
||||||
|
return encryptedChunk;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,7 @@
|
||||||
//Similar to extractFile.ts, but instead of receiving and returning an ArrayBuffer, works with Node.js streams.
|
|
||||||
|
|
||||||
import * as fs from 'fs';
|
|
||||||
import * as stream from 'stream';
|
import * as stream from 'stream';
|
||||||
import * as zlib from 'zlib';
|
import * as zlib from 'zlib';
|
||||||
import { ISsnFileEntry } from '../interfaces/ISsnFileEntry';
|
import { ISsnFileEntry } from '../interfaces/ISsnFileEntry';
|
||||||
import decryptStream from './streams/decryptStream';
|
import decryptTransform from './streams/decryptTransform';
|
||||||
import readBytesFromStream from './streams/readBytesFromStream';
|
import readBytesFromStream from './streams/readBytesFromStream';
|
||||||
|
|
||||||
/** Extracts the file with the given metadata from the stream.
|
/** Extracts the file with the given metadata from the stream.
|
||||||
|
@ -16,8 +13,8 @@ export default async function extractFileAsStream(file: ISsnFileEntry, inputStre
|
||||||
|
|
||||||
//pipe into decryption if file is encrypted
|
//pipe into decryption if file is encrypted
|
||||||
if (file.decryptionKeys !== undefined) {
|
if (file.decryptionKeys !== undefined) {
|
||||||
const decryptTransform = decryptStream(file.decryptionKeys);
|
const transform = decryptTransform(file.decryptionKeys);
|
||||||
curStream = curStream.pipe(decryptTransform);
|
curStream = curStream.pipe(transform);
|
||||||
//skip encryption header
|
//skip encryption header
|
||||||
await readBytesFromStream(curStream, 12);
|
await readBytesFromStream(curStream, 12);
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,6 +12,7 @@ export default function arrayBufferToStream(arrayBuffer: ArrayBuffer, offset: nu
|
||||||
|
|
||||||
let position = offset;
|
let position = offset;
|
||||||
const endPosition = (length !== undefined) ? (offset + length) : arrayBuffer.byteLength;
|
const endPosition = (length !== undefined) ? (offset + length) : arrayBuffer.byteLength;
|
||||||
|
const byteArray = new Uint8Array(arrayBuffer);
|
||||||
const outStream = new stream.Readable({
|
const outStream = new stream.Readable({
|
||||||
read(size) {
|
read(size) {
|
||||||
const chunkSize = Math.min(size || BUFFER_SIZE, endPosition - position); //TODO: we can probably remove BUFFER_SIZE
|
const chunkSize = Math.min(size || BUFFER_SIZE, endPosition - position); //TODO: we can probably remove BUFFER_SIZE
|
||||||
|
@ -24,7 +25,12 @@ export default function arrayBufferToStream(arrayBuffer: ArrayBuffer, offset: nu
|
||||||
}
|
}
|
||||||
|
|
||||||
//Write chunk to stream
|
//Write chunk to stream
|
||||||
const chunk = Buffer.from(arrayBuffer, position, chunkSize);
|
const chunk = Buffer.allocUnsafe(chunkSize);
|
||||||
|
for (let i = 0; i < chunkSize; i += 1) {
|
||||||
|
chunk.writeUInt8(byteArray[position + i], i);
|
||||||
|
}
|
||||||
|
//chunk.fill(arrayBuffer, position);
|
||||||
|
//const chunk = Buffer.from(arrayBuffer, position, chunkSize);
|
||||||
position += chunk.length;
|
position += chunk.length;
|
||||||
needMoreData = this.push(chunk);
|
needMoreData = this.push(chunk);
|
||||||
} while (needMoreData);
|
} while (needMoreData);
|
||||||
|
|
|
@ -18,7 +18,7 @@ function createFileStream(disks: string[], index: number, offset: number, length
|
||||||
/** Takes a list of ReadableStreams (the disks), as well as the offset and length, and returns a stream for just one file. */
|
/** Takes a list of ReadableStreams (the disks), as well as the offset and length, and returns a stream for just one file. */
|
||||||
async function getConcatenatedStream(disks: string[], { diskStart, offset, length }: IGetFileFromDisksOptions): Promise<stream.Readable> {
|
async function getConcatenatedStream(disks: string[], { diskStart, offset, length }: IGetFileFromDisksOptions): Promise<stream.Readable> {
|
||||||
let curDiskIndex = diskStart;
|
let curDiskIndex = diskStart;
|
||||||
let curDisk: fs.ReadStream = createFileStream(disks, diskStart, offset);
|
let curDisk: fs.ReadStream = createFileStream(disks, diskStart, offset, length);
|
||||||
let totalRead = 0;
|
let totalRead = 0;
|
||||||
|
|
||||||
//Create new stream that concatenates disks until storedSize is reached, then ends the stream.
|
//Create new stream that concatenates disks until storedSize is reached, then ends the stream.
|
||||||
|
|
|
@ -4,7 +4,6 @@ import * as stream from 'stream';
|
||||||
|
|
||||||
export default function performDiffing(sourceFile: string, diffStream: stream.Readable, targetFile: string): Promise<void> {
|
export default function performDiffing(sourceFile: string, diffStream: stream.Readable, targetFile: string): Promise<void> {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
//const sourceStream = fs.createReadStream(sourceFile);
|
|
||||||
const targetStream = fs.createWriteStream(targetFile);
|
const targetStream = fs.createWriteStream(targetFile);
|
||||||
|
|
||||||
//spawn xdelta3 process, set up listeners
|
//spawn xdelta3 process, set up listeners
|
||||||
|
|
Loading…
Reference in a new issue