From 4d8a631db4586ef2f0bdc6ba3eaae4860ef2f392 Mon Sep 17 00:00:00 2001 From: C-3PO Date: Sun, 8 Jul 2018 20:18:04 +0200 Subject: [PATCH] =?UTF-8?q?=E2=99=BB=20Read=20local=20file=20header=20sepa?= =?UTF-8?q?rately?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/ssn/getManifest.ts | 2 +- src/ssn/getSolidpkg.ts | 2 +- src/ssn/streams/getFileFromDisks.ts | 30 +++++++++++++++----------- src/ssn/streams/readLocalFileHeader.ts | 4 ++-- 4 files changed, 22 insertions(+), 16 deletions(-) diff --git a/src/ssn/getManifest.ts b/src/ssn/getManifest.ts index 0e81153..c669274 100644 --- a/src/ssn/getManifest.ts +++ b/src/ssn/getManifest.ts @@ -36,7 +36,7 @@ export default async function getManifest(product: Product): Promise const stream = arrayBufferToStream(ssnFile, firstFile.offset); //Extract manifest.xml file - await readLocalFileHeader(stream); + await readLocalFileHeader(stream, true); const patchmanifestStream = extractFileStream(firstFile, stream); //Convert ArrayBuffer to string diff --git a/src/ssn/getSolidpkg.ts b/src/ssn/getSolidpkg.ts index ed6f512..96ebb4f 100644 --- a/src/ssn/getSolidpkg.ts +++ b/src/ssn/getSolidpkg.ts @@ -41,7 +41,7 @@ export default async function getSolidpkg(product: Product, from: number, to: nu const stream = arrayBufferToStream(ssnFile, firstFile.offset); //Extract metafile.solid file - await readLocalFileHeader(stream); + await readLocalFileHeader(stream, skipAdditionalLength); const solidFileStream = extractFileStream(firstFile, stream); const solidFileArrayBuffer = await streamToArrayBuffer(solidFileStream); const solidContents = parseBencode(new DataView(solidFileArrayBuffer)) as ISolid; diff --git a/src/ssn/streams/getFileFromDisks.ts b/src/ssn/streams/getFileFromDisks.ts index 7371585..5eb7201 100644 --- a/src/ssn/streams/getFileFromDisks.ts +++ b/src/ssn/streams/getFileFromDisks.ts @@ -11,15 +11,14 @@ interface IGetFileFromDisksOptions { storedSize: number; } -function getStream(disks: string[], index: number, offset: number, length: number = Infinity) { +function createFileStream(disks: string[], index: number, offset: number, length: number = Infinity): fs.ReadStream { return fs.createReadStream(disks[index], { start: offset, end: offset + length - 1 }); } /** Takes a list of ReadableStreams (the disks), as well as the offset and length, and returns a stream for just one file. */ -export default async function getFileFromDisks(disks: string[], { diskStart, offset, storedSize }: IGetFileFromDisksOptions): Promise { +async function getFullStream(disks: string[], { diskStart, offset, storedSize }: IGetFileFromDisksOptions): Promise { let curDiskIndex = diskStart; - let curDisk = getStream(disks, diskStart, offset); - let localFileHeaderLength = 0; + let curDisk: fs.ReadStream = createFileStream(disks, diskStart, offset); let totalRead = 0; //Create new stream that concatenates disks until storedSize is reached, then ends the stream. @@ -28,8 +27,8 @@ export default async function getFileFromDisks(disks: string[], { diskStart, off const onData = (chunk: Buffer) => { totalRead += chunk.length; //If we've reached the end, we can stop reading after this chunk - const readTooManyBytes = totalRead - (localFileHeaderLength + storedSize); - if (localFileHeaderLength !== 0 && readTooManyBytes >= 0) { + const readTooManyBytes = totalRead - storedSize; + if (readTooManyBytes >= 0) { //We can still write the whole chunk before ending the stream if (readTooManyBytes === 0) { outputStream.end(chunk); @@ -40,6 +39,7 @@ export default async function getFileFromDisks(disks: string[], { diskStart, off const shortenedChunk = Buffer.alloc(shortenedLength, chunk); outputStream.end(shortenedChunk); curDisk.close(); + curDisk.off('data', onData); } } else { //Nowhere near the end, so just write normally @@ -52,10 +52,10 @@ export default async function getFileFromDisks(disks: string[], { diskStart, off const onEnd = () => { curDiskIndex += 1; //End if we are at end of file or end of disks - if (curDiskIndex >= disks.length || (localFileHeaderLength !== 0 && totalRead >= localFileHeaderLength + storedSize)) { + if (curDiskIndex >= disks.length || totalRead >= storedSize) { outputStream.end(); } else { - curDisk = getStream(disks, curDiskIndex, 0, (localFileHeaderLength === 0) ? Infinity : localFileHeaderLength + storedSize - totalRead); + curDisk = createFileStream(disks, curDiskIndex, 0, storedSize - totalRead); //set up new listeners for data and end curDisk.on('data', onData); curDisk.on('end', onEnd); @@ -67,9 +67,15 @@ export default async function getFileFromDisks(disks: string[], { diskStart, off curDisk.on('end', onEnd); curDisk.on('error', onError); - //Read local file header - localFileHeaderLength = await readLocalFileHeader(outputStream); - //now that local file header has been read, we will restrict length of stream to storedSize - return outputStream; } + +export default async function getFileFromDisks(disks: string[], { diskStart, offset, storedSize }: IGetFileFromDisksOptions): Promise { + //read local file header + const headerStream = await getFullStream(disks, { diskStart, offset, storedSize: 30 }); + const localFileHeaderLength = await readLocalFileHeader(headerStream); + headerStream.destroy(); //TODO + + //read actual file + return getFullStream(disks, { diskStart, offset: offset + localFileHeaderLength, storedSize }); +} diff --git a/src/ssn/streams/readLocalFileHeader.ts b/src/ssn/streams/readLocalFileHeader.ts index b47530b..ad48d69 100644 --- a/src/ssn/streams/readLocalFileHeader.ts +++ b/src/ssn/streams/readLocalFileHeader.ts @@ -16,7 +16,7 @@ function waitReadableLength(inputStream: stream.Readable, minLength: number): Pr * each stored file, and advances the stream accordingly. * Returns length of the local file header. */ -export default async function readLocalFileHeader(inputStream: stream.Readable): Promise { +export default async function readLocalFileHeader(inputStream: stream.Readable, skipAdditionalLength = false): Promise { let localFileHeader: Buffer = inputStream.read(30); if (localFileHeader === null) { //need to wait until data is ready for reading @@ -37,7 +37,7 @@ export default async function readLocalFileHeader(inputStream: stream.Readable): //skip local file name and extra field const additionalLength = localFilenameSize + localExtraSize; - if (additionalLength > 0) { + if (skipAdditionalLength && additionalLength > 0) { await waitReadableLength(inputStream, additionalLength); const tmpChunk = inputStream.read(additionalLength); if (tmpChunk === null) {