diff --git a/src/interfaces/ISsnFileEntry.ts b/src/interfaces/ISsnFileEntry.ts index e822214..73672a0 100644 --- a/src/interfaces/ISsnFileEntry.ts +++ b/src/interfaces/ISsnFileEntry.ts @@ -24,7 +24,7 @@ interface ISsnFileEntry { name: string; /** Uncompressed size */ size: number; - /** Compressed size */ + /** Stored size (size of compressed data + 12 byte encryption header if applicable) */ compressedSize: number; /** Decryption keys needed to decrypt the file */ decryptionKeys: [number, number, number] | undefined; diff --git a/src/ssn/extractFileStream.ts b/src/ssn/extractFileStream.ts index dcec088..929aa04 100644 --- a/src/ssn/extractFileStream.ts +++ b/src/ssn/extractFileStream.ts @@ -1,11 +1,9 @@ //Similar to extractFile.ts, but instead of receiving and returning an ArrayBuffer, works with Node.js streams. -import * as fs from 'fs'; import * as stream from 'stream'; import * as zlib from 'zlib'; import { ISsnFileEntry } from '../interfaces/ISsnFileEntry'; import decryptStream from './streams/decryptStream'; -import streamSetMaxLength from './streams/streamSetMaxLength'; /** Extracts the file with the given metadata from the stream. * The stream must already start at the .zip's local file header @@ -34,10 +32,6 @@ export default function extractFileStream(file: ISsnFileEntry, inputStream: stre let curStream = inputStream; - //set max length (including random 12 byte encryption header) - const maxLength = streamSetMaxLength(curStream, file.compressedSize); - curStream = maxLength; - //pipe into decryption if file is encrypted if (file.decryptionKeys !== undefined) { const decryptTransform = decryptStream([...file.decryptionKeys] as [number, number, number]); diff --git a/src/ssn/getManifest.ts b/src/ssn/getManifest.ts index e0e1abb..ec6dc1f 100644 --- a/src/ssn/getManifest.ts +++ b/src/ssn/getManifest.ts @@ -32,7 +32,7 @@ export default async function getManifest(product: Product): Promise throw new Error(`Expected .patchmanifest to contain a file called manifest.xml but it is called "${firstFile.name}".`); } - const stream = arrayBufferToStream(ssnFile, firstFile.offset); + const stream = arrayBufferToStream(ssnFile, firstFile.offset, firstFile.compressedSize); //Extract manifest.xml file const patchmanifestStream = extractFileStream(firstFile, stream); diff --git a/src/ssn/getSolidpkg.ts b/src/ssn/getSolidpkg.ts index b52851a..8c87882 100644 --- a/src/ssn/getSolidpkg.ts +++ b/src/ssn/getSolidpkg.ts @@ -2,9 +2,11 @@ import getUrlContents from '../cdn/getUrlContents'; import { Product } from '../interfaces/ISettings'; import ISolid from '../interfaces/ISolidFile'; import ISolidSimple from '../interfaces/ISolidSimple'; -import extractFile from './extractFile'; +import extractFileStream from './extractFileStream'; import parseBencode from './reader/bencodeParser'; import readSsnFile from './reader/readSsnFile'; +import arrayBufferToStream from './streams/arrayBufferToStream'; +import streamToArrayBuffer from './streams/streamToArrayBuffer'; import verifyProductName from './verify/verifyProductName'; import verifySolidpkg from './verify/verifySolidpkg'; @@ -35,9 +37,12 @@ export default async function getSolidpkg(product: Product, from: number, to: nu throw new Error(`Expected .solidpkg to contain a file called metafile.solid but it is called "${firstFile.name}".`); } + const stream = arrayBufferToStream(ssnFile, firstFile.offset, firstFile.compressedSize); + //Extract metafile.solid file - const solidFile = await extractFile(firstFile, [new DataView(ssnFile)]); - const solidContents = parseBencode(new DataView(solidFile)) as ISolid; + const solidFileStream = extractFileStream(firstFile, stream); + const solidFileArrayBuffer = await streamToArrayBuffer(solidFileStream); + const solidContents = parseBencode(new DataView(solidFileArrayBuffer)) as ISolid; //Verify metafile.solid for correctness verifySolidpkg(solidContents, { product, from, to }); diff --git a/src/ssn/reader/bencodeParser.ts b/src/ssn/reader/bencodeParser.ts index 2d9a0e2..3747d68 100644 --- a/src/ssn/reader/bencodeParser.ts +++ b/src/ssn/reader/bencodeParser.ts @@ -4,7 +4,7 @@ import { TextDecoder } from 'util'; -const Decoder = new TextDecoder('utf-8'); +const decoder = new TextDecoder('utf-8'); /** Takes a Bencoded-encoded file, parses it at the given starting position and returns a JSON object, or rejects on error. */ function bpParse(dv: DataView, posIn: number = 0): { obj: any, pos: number } { @@ -16,15 +16,15 @@ function bpParse(dv: DataView, posIn: number = 0): { obj: any, pos: number } { obj = {}; do { //read key - const out1 = bpParse(dv, pos); - pos = out1.pos; - if (typeof out1.obj !== 'string') { - throw new Error(`Expected dictionary key to be string but it is "${typeof out1.obj}".`); + const outKey = bpParse(dv, pos); + pos = outKey.pos; + if (typeof outKey.obj !== 'string') { + throw new Error(`Expected dictionary key to be string but it is "${typeof outKey.obj}".`); } //read value - const out2 = bpParse(dv, pos); - pos = out2.pos; - obj[out1.obj] = out2.obj; + const outValue = bpParse(dv, pos); + pos = outValue.pos; + obj[outKey.obj] = outValue.obj; } while (dv.getUint8(pos) !== 0x65); //'e' - end pos += 1; break; @@ -68,7 +68,7 @@ function bpParse(dv: DataView, posIn: number = 0): { obj: any, pos: number } { curChar = dv.getUint8(pos); pos += 1; } //read string - obj = Decoder.decode(new DataView(dv.buffer, pos, strLen)); + obj = decoder.decode(new DataView(dv.buffer, pos, strLen)); pos += strLen; break; } diff --git a/src/ssn/streams/arrayBufferToStream.ts b/src/ssn/streams/arrayBufferToStream.ts index e961ebd..5b9cf2b 100644 --- a/src/ssn/streams/arrayBufferToStream.ts +++ b/src/ssn/streams/arrayBufferToStream.ts @@ -2,19 +2,23 @@ import * as stream from 'stream'; const BUFFER_SIZE = 16 * 1024; -export default function arrayBufferToStream(arrayBuffer: ArrayBuffer, offset = 0): stream.Readable { - if (offset < 0 || offset + 1 >= arrayBuffer.byteLength) { +export default function arrayBufferToStream(arrayBuffer: ArrayBuffer, offset: number = 0, length?: number): stream.Readable { + if (offset < 0 || offset >= arrayBuffer.byteLength - 1) { + throw new RangeError('Could not convert ArrayBuffer to ReadableStream; out of bounds.'); + } + if (length !== undefined && offset + length >= arrayBuffer.byteLength - 1) { throw new RangeError('Could not convert ArrayBuffer to ReadableStream; out of bounds.'); } let position = offset; + const endPosition = (length !== undefined) ? offset + length : arrayBuffer.byteLength; const outStream = new stream.Readable({ read(size) { - const chunkSize = Math.min(size || BUFFER_SIZE, arrayBuffer.byteLength - position); //TODO: we can probably remove BUFFER_SIZE + const chunkSize = Math.min(size || BUFFER_SIZE, endPosition - position); //TODO: we can probably remove BUFFER_SIZE let needMoreData: boolean; do { //If end is reached - if (position + 1 >= arrayBuffer.byteLength) { + if (position >= endPosition - 1) { this.push(null); return; } diff --git a/src/ssn/streams/streamSetMaxLength.ts b/src/ssn/streams/streamSetMaxLength.ts deleted file mode 100644 index 3b8eb69..0000000 --- a/src/ssn/streams/streamSetMaxLength.ts +++ /dev/null @@ -1,34 +0,0 @@ -import * as stream from 'stream'; - -/** Takes the given ReadableStream and returns a ReadableStream with the same contents but that terminates after the given length. */ -export default function streamSetMaxLength(inputStream: stream.Readable, maxLength: number): stream.Readable { - if (maxLength <= 0) { - throw new RangeError('maxLength is out of bounds.'); - } - - let remaining = maxLength; - - const outStream = new stream.Readable({ - read(size) { - //If no size is provided, just pass through all remaining bytes - if (size === undefined) { - this.push(inputStream.read(remaining)); - remaining = 0; - //End is reached, terminate stream - this.push(null); - } else { - //Otherwise, pass through however many bytes we can - const clampedSize = Math.min(size, remaining); - this.push(inputStream.read(clampedSize)); - remaining -= clampedSize; - - //If end is reached, terminate stream - if (remaining <= 0) { - this.push(null); - } - } - }, - }); - - return outStream; -} diff --git a/src/ssn/streams/streamToArrayBuffer.ts b/src/ssn/streams/streamToArrayBuffer.ts new file mode 100644 index 0000000..8589655 --- /dev/null +++ b/src/ssn/streams/streamToArrayBuffer.ts @@ -0,0 +1,25 @@ +import * as stream from 'stream'; + +export default function streamToArrayBuffer(inputStream: stream.Readable): Promise { + return new Promise((resolve, reject) => { + const chunks: Buffer[] = []; + let totalSize = 0; + + //Convert chunks to string + inputStream.on('data', (chunk: Buffer) => { + chunks.push(chunk); + totalSize += chunk.length; + }); + + //Output final string + inputStream.on('end', () => { + const outBuffer = Buffer.concat(chunks, totalSize); + resolve(outBuffer.buffer as ArrayBuffer); + }); + + //Exit on error + inputStream.on('error', (error) => { + reject(error); + }); + }); +}