diff --git a/src/ssn/extractFileStream.ts b/src/ssn/extractFileStream.ts index ed8a7c1..20848e8 100644 --- a/src/ssn/extractFileStream.ts +++ b/src/ssn/extractFileStream.ts @@ -4,7 +4,6 @@ import * as stream from 'stream'; import * as zlib from 'zlib'; import { ISsnFileEntry } from '../interfaces/ISsnFileEntry'; import decryptStream from './streams/decryptStream'; -import streamSetMaxLength from './streams/streamSetMaxLength'; /** Extracts the file with the given metadata from the stream. * The stream must already start at the .zip's local file header @@ -13,8 +12,6 @@ import streamSetMaxLength from './streams/streamSetMaxLength'; export default function extractFileStream(file: ISsnFileEntry, inputStream: stream.Readable, skipDecompression: boolean = false): stream.Readable { let curStream = inputStream; - //curStream = streamSetMaxLength(curStream, file.compressedSize); - //pipe into decryption if file is encrypted if (file.decryptionKeys !== undefined) { const decryptTransform = decryptStream(file.decryptionKeys); diff --git a/src/ssn/getPatch.ts b/src/ssn/getPatch.ts index ed5e808..78595ab 100644 --- a/src/ssn/getPatch.ts +++ b/src/ssn/getPatch.ts @@ -39,7 +39,7 @@ export default async function getPatch(product: Product, from: number, to: numbe //Extract newly added files fileEntries.filter((file) => file.diffType === SsnDiffType.NewFile).forEach(async (file) => { try { - const fileStream = await getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, storedSize: file.compressedSize }); + const fileStream = await getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, length: file.compressedSize }); const fileContents = extractFileStream(file, fileStream, true); console.debug(file.name, file.compressedSize, await streamToArrayBuffer(fileContents)); //TODO: need to write to disk @@ -51,7 +51,7 @@ export default async function getPatch(product: Product, from: number, to: numbe //Extract changed files fileEntries.filter((file) => file.diffType === SsnDiffType.Changed).forEach(async (file) => { try { - const fileStream = await getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, storedSize: file.compressedSize }); + const fileStream = await getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, length: file.compressedSize }); const fileContents = extractFileStream(file, fileStream, true); console.debug(file.name, file.compressedSize, await streamToArrayBuffer(fileContents)); //TODO: need to apply diffing, then write to disk diff --git a/src/ssn/streams/getFileFromDisks.ts b/src/ssn/streams/getFileFromDisks.ts index 5eb7201..a6a50e8 100644 --- a/src/ssn/streams/getFileFromDisks.ts +++ b/src/ssn/streams/getFileFromDisks.ts @@ -8,7 +8,7 @@ interface IGetFileFromDisksOptions { /** Offset into the start of the disk where the local file header starts. */ offset: number; /** Length of the stored file (compressed size + optional 12 byte encryption header), but excluding the length of the local file header. */ - storedSize: number; + length: number; } function createFileStream(disks: string[], index: number, offset: number, length: number = Infinity): fs.ReadStream { @@ -16,7 +16,7 @@ function createFileStream(disks: string[], index: number, offset: number, length } /** Takes a list of ReadableStreams (the disks), as well as the offset and length, and returns a stream for just one file. */ -async function getFullStream(disks: string[], { diskStart, offset, storedSize }: IGetFileFromDisksOptions): Promise { +async function getFullStream(disks: string[], { diskStart, offset, length }: IGetFileFromDisksOptions): Promise { let curDiskIndex = diskStart; let curDisk: fs.ReadStream = createFileStream(disks, diskStart, offset); let totalRead = 0; @@ -27,7 +27,7 @@ async function getFullStream(disks: string[], { diskStart, offset, storedSize }: const onData = (chunk: Buffer) => { totalRead += chunk.length; //If we've reached the end, we can stop reading after this chunk - const readTooManyBytes = totalRead - storedSize; + const readTooManyBytes = totalRead - length; if (readTooManyBytes >= 0) { //We can still write the whole chunk before ending the stream if (readTooManyBytes === 0) { @@ -52,10 +52,10 @@ async function getFullStream(disks: string[], { diskStart, offset, storedSize }: const onEnd = () => { curDiskIndex += 1; //End if we are at end of file or end of disks - if (curDiskIndex >= disks.length || totalRead >= storedSize) { + if (curDiskIndex >= disks.length || totalRead >= length) { outputStream.end(); } else { - curDisk = createFileStream(disks, curDiskIndex, 0, storedSize - totalRead); + curDisk = createFileStream(disks, curDiskIndex, 0, length - totalRead); //set up new listeners for data and end curDisk.on('data', onData); curDisk.on('end', onEnd); @@ -70,12 +70,12 @@ async function getFullStream(disks: string[], { diskStart, offset, storedSize }: return outputStream; } -export default async function getFileFromDisks(disks: string[], { diskStart, offset, storedSize }: IGetFileFromDisksOptions): Promise { +export default async function getFileFromDisks(disks: string[], { diskStart, offset, length }: IGetFileFromDisksOptions): Promise { //read local file header - const headerStream = await getFullStream(disks, { diskStart, offset, storedSize: 30 }); + const headerStream = await getFullStream(disks, { diskStart, offset, length: 30 }); const localFileHeaderLength = await readLocalFileHeader(headerStream); - headerStream.destroy(); //TODO + headerStream.destroy(); //TODO: is this the best way to close/destroy the stream? //read actual file - return getFullStream(disks, { diskStart, offset: offset + localFileHeaderLength, storedSize }); + return getFullStream(disks, { diskStart, offset: offset + localFileHeaderLength, length }); } diff --git a/src/ssn/streams/streamSetMaxLength.ts b/src/ssn/streams/streamSetMaxLength.ts deleted file mode 100644 index 3b8eb69..0000000 --- a/src/ssn/streams/streamSetMaxLength.ts +++ /dev/null @@ -1,34 +0,0 @@ -import * as stream from 'stream'; - -/** Takes the given ReadableStream and returns a ReadableStream with the same contents but that terminates after the given length. */ -export default function streamSetMaxLength(inputStream: stream.Readable, maxLength: number): stream.Readable { - if (maxLength <= 0) { - throw new RangeError('maxLength is out of bounds.'); - } - - let remaining = maxLength; - - const outStream = new stream.Readable({ - read(size) { - //If no size is provided, just pass through all remaining bytes - if (size === undefined) { - this.push(inputStream.read(remaining)); - remaining = 0; - //End is reached, terminate stream - this.push(null); - } else { - //Otherwise, pass through however many bytes we can - const clampedSize = Math.min(size, remaining); - this.push(inputStream.read(clampedSize)); - remaining -= clampedSize; - - //If end is reached, terminate stream - if (remaining <= 0) { - this.push(null); - } - } - }, - }); - - return outStream; -}