🗑 Remove streamSetMaxLength
This commit is contained in:
parent
ac8bb8b236
commit
285078fbe6
4 changed files with 11 additions and 48 deletions
|
@ -4,7 +4,6 @@ import * as stream from 'stream';
|
||||||
import * as zlib from 'zlib';
|
import * as zlib from 'zlib';
|
||||||
import { ISsnFileEntry } from '../interfaces/ISsnFileEntry';
|
import { ISsnFileEntry } from '../interfaces/ISsnFileEntry';
|
||||||
import decryptStream from './streams/decryptStream';
|
import decryptStream from './streams/decryptStream';
|
||||||
import streamSetMaxLength from './streams/streamSetMaxLength';
|
|
||||||
|
|
||||||
/** Extracts the file with the given metadata from the stream.
|
/** Extracts the file with the given metadata from the stream.
|
||||||
* The stream must already start at the .zip's local file header
|
* The stream must already start at the .zip's local file header
|
||||||
|
@ -13,8 +12,6 @@ import streamSetMaxLength from './streams/streamSetMaxLength';
|
||||||
export default function extractFileStream(file: ISsnFileEntry, inputStream: stream.Readable, skipDecompression: boolean = false): stream.Readable {
|
export default function extractFileStream(file: ISsnFileEntry, inputStream: stream.Readable, skipDecompression: boolean = false): stream.Readable {
|
||||||
let curStream = inputStream;
|
let curStream = inputStream;
|
||||||
|
|
||||||
//curStream = streamSetMaxLength(curStream, file.compressedSize);
|
|
||||||
|
|
||||||
//pipe into decryption if file is encrypted
|
//pipe into decryption if file is encrypted
|
||||||
if (file.decryptionKeys !== undefined) {
|
if (file.decryptionKeys !== undefined) {
|
||||||
const decryptTransform = decryptStream(file.decryptionKeys);
|
const decryptTransform = decryptStream(file.decryptionKeys);
|
||||||
|
|
|
@ -39,7 +39,7 @@ export default async function getPatch(product: Product, from: number, to: numbe
|
||||||
//Extract newly added files
|
//Extract newly added files
|
||||||
fileEntries.filter((file) => file.diffType === SsnDiffType.NewFile).forEach(async (file) => {
|
fileEntries.filter((file) => file.diffType === SsnDiffType.NewFile).forEach(async (file) => {
|
||||||
try {
|
try {
|
||||||
const fileStream = await getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, storedSize: file.compressedSize });
|
const fileStream = await getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, length: file.compressedSize });
|
||||||
const fileContents = extractFileStream(file, fileStream, true);
|
const fileContents = extractFileStream(file, fileStream, true);
|
||||||
console.debug(file.name, file.compressedSize, await streamToArrayBuffer(fileContents));
|
console.debug(file.name, file.compressedSize, await streamToArrayBuffer(fileContents));
|
||||||
//TODO: need to write to disk
|
//TODO: need to write to disk
|
||||||
|
@ -51,7 +51,7 @@ export default async function getPatch(product: Product, from: number, to: numbe
|
||||||
//Extract changed files
|
//Extract changed files
|
||||||
fileEntries.filter((file) => file.diffType === SsnDiffType.Changed).forEach(async (file) => {
|
fileEntries.filter((file) => file.diffType === SsnDiffType.Changed).forEach(async (file) => {
|
||||||
try {
|
try {
|
||||||
const fileStream = await getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, storedSize: file.compressedSize });
|
const fileStream = await getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, length: file.compressedSize });
|
||||||
const fileContents = extractFileStream(file, fileStream, true);
|
const fileContents = extractFileStream(file, fileStream, true);
|
||||||
console.debug(file.name, file.compressedSize, await streamToArrayBuffer(fileContents));
|
console.debug(file.name, file.compressedSize, await streamToArrayBuffer(fileContents));
|
||||||
//TODO: need to apply diffing, then write to disk
|
//TODO: need to apply diffing, then write to disk
|
||||||
|
|
|
@ -8,7 +8,7 @@ interface IGetFileFromDisksOptions {
|
||||||
/** Offset into the start of the disk where the local file header starts. */
|
/** Offset into the start of the disk where the local file header starts. */
|
||||||
offset: number;
|
offset: number;
|
||||||
/** Length of the stored file (compressed size + optional 12 byte encryption header), but excluding the length of the local file header. */
|
/** Length of the stored file (compressed size + optional 12 byte encryption header), but excluding the length of the local file header. */
|
||||||
storedSize: number;
|
length: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
function createFileStream(disks: string[], index: number, offset: number, length: number = Infinity): fs.ReadStream {
|
function createFileStream(disks: string[], index: number, offset: number, length: number = Infinity): fs.ReadStream {
|
||||||
|
@ -16,7 +16,7 @@ function createFileStream(disks: string[], index: number, offset: number, length
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Takes a list of ReadableStreams (the disks), as well as the offset and length, and returns a stream for just one file. */
|
/** Takes a list of ReadableStreams (the disks), as well as the offset and length, and returns a stream for just one file. */
|
||||||
async function getFullStream(disks: string[], { diskStart, offset, storedSize }: IGetFileFromDisksOptions): Promise<stream.Readable> {
|
async function getFullStream(disks: string[], { diskStart, offset, length }: IGetFileFromDisksOptions): Promise<stream.Readable> {
|
||||||
let curDiskIndex = diskStart;
|
let curDiskIndex = diskStart;
|
||||||
let curDisk: fs.ReadStream = createFileStream(disks, diskStart, offset);
|
let curDisk: fs.ReadStream = createFileStream(disks, diskStart, offset);
|
||||||
let totalRead = 0;
|
let totalRead = 0;
|
||||||
|
@ -27,7 +27,7 @@ async function getFullStream(disks: string[], { diskStart, offset, storedSize }:
|
||||||
const onData = (chunk: Buffer) => {
|
const onData = (chunk: Buffer) => {
|
||||||
totalRead += chunk.length;
|
totalRead += chunk.length;
|
||||||
//If we've reached the end, we can stop reading after this chunk
|
//If we've reached the end, we can stop reading after this chunk
|
||||||
const readTooManyBytes = totalRead - storedSize;
|
const readTooManyBytes = totalRead - length;
|
||||||
if (readTooManyBytes >= 0) {
|
if (readTooManyBytes >= 0) {
|
||||||
//We can still write the whole chunk before ending the stream
|
//We can still write the whole chunk before ending the stream
|
||||||
if (readTooManyBytes === 0) {
|
if (readTooManyBytes === 0) {
|
||||||
|
@ -52,10 +52,10 @@ async function getFullStream(disks: string[], { diskStart, offset, storedSize }:
|
||||||
const onEnd = () => {
|
const onEnd = () => {
|
||||||
curDiskIndex += 1;
|
curDiskIndex += 1;
|
||||||
//End if we are at end of file or end of disks
|
//End if we are at end of file or end of disks
|
||||||
if (curDiskIndex >= disks.length || totalRead >= storedSize) {
|
if (curDiskIndex >= disks.length || totalRead >= length) {
|
||||||
outputStream.end();
|
outputStream.end();
|
||||||
} else {
|
} else {
|
||||||
curDisk = createFileStream(disks, curDiskIndex, 0, storedSize - totalRead);
|
curDisk = createFileStream(disks, curDiskIndex, 0, length - totalRead);
|
||||||
//set up new listeners for data and end
|
//set up new listeners for data and end
|
||||||
curDisk.on('data', onData);
|
curDisk.on('data', onData);
|
||||||
curDisk.on('end', onEnd);
|
curDisk.on('end', onEnd);
|
||||||
|
@ -70,12 +70,12 @@ async function getFullStream(disks: string[], { diskStart, offset, storedSize }:
|
||||||
return outputStream;
|
return outputStream;
|
||||||
}
|
}
|
||||||
|
|
||||||
export default async function getFileFromDisks(disks: string[], { diskStart, offset, storedSize }: IGetFileFromDisksOptions): Promise<stream.Readable> {
|
export default async function getFileFromDisks(disks: string[], { diskStart, offset, length }: IGetFileFromDisksOptions): Promise<stream.Readable> {
|
||||||
//read local file header
|
//read local file header
|
||||||
const headerStream = await getFullStream(disks, { diskStart, offset, storedSize: 30 });
|
const headerStream = await getFullStream(disks, { diskStart, offset, length: 30 });
|
||||||
const localFileHeaderLength = await readLocalFileHeader(headerStream);
|
const localFileHeaderLength = await readLocalFileHeader(headerStream);
|
||||||
headerStream.destroy(); //TODO
|
headerStream.destroy(); //TODO: is this the best way to close/destroy the stream?
|
||||||
|
|
||||||
//read actual file
|
//read actual file
|
||||||
return getFullStream(disks, { diskStart, offset: offset + localFileHeaderLength, storedSize });
|
return getFullStream(disks, { diskStart, offset: offset + localFileHeaderLength, length });
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,34 +0,0 @@
|
||||||
import * as stream from 'stream';
|
|
||||||
|
|
||||||
/** Takes the given ReadableStream and returns a ReadableStream with the same contents but that terminates after the given length. */
|
|
||||||
export default function streamSetMaxLength(inputStream: stream.Readable, maxLength: number): stream.Readable {
|
|
||||||
if (maxLength <= 0) {
|
|
||||||
throw new RangeError('maxLength is out of bounds.');
|
|
||||||
}
|
|
||||||
|
|
||||||
let remaining = maxLength;
|
|
||||||
|
|
||||||
const outStream = new stream.Readable({
|
|
||||||
read(size) {
|
|
||||||
//If no size is provided, just pass through all remaining bytes
|
|
||||||
if (size === undefined) {
|
|
||||||
this.push(inputStream.read(remaining));
|
|
||||||
remaining = 0;
|
|
||||||
//End is reached, terminate stream
|
|
||||||
this.push(null);
|
|
||||||
} else {
|
|
||||||
//Otherwise, pass through however many bytes we can
|
|
||||||
const clampedSize = Math.min(size, remaining);
|
|
||||||
this.push(inputStream.read(clampedSize));
|
|
||||||
remaining -= clampedSize;
|
|
||||||
|
|
||||||
//If end is reached, terminate stream
|
|
||||||
if (remaining <= 0) {
|
|
||||||
this.push(null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
return outStream;
|
|
||||||
}
|
|
Loading…
Reference in a new issue