Update solidpkg to use streams instead of buffers

This commit is contained in:
C-3PO 2018-07-05 18:36:37 +02:00
parent 28c0fdecf4
commit 5c915776aa
Signed by: c3po
GPG key ID: 62993C4BB4D86F24
8 changed files with 52 additions and 58 deletions

View file

@ -24,7 +24,7 @@ interface ISsnFileEntry {
name: string; name: string;
/** Uncompressed size */ /** Uncompressed size */
size: number; size: number;
/** Compressed size */ /** Stored size (size of compressed data + 12 byte encryption header if applicable) */
compressedSize: number; compressedSize: number;
/** Decryption keys needed to decrypt the file */ /** Decryption keys needed to decrypt the file */
decryptionKeys: [number, number, number] | undefined; decryptionKeys: [number, number, number] | undefined;

View file

@ -1,11 +1,9 @@
//Similar to extractFile.ts, but instead of receiving and returning an ArrayBuffer, works with Node.js streams. //Similar to extractFile.ts, but instead of receiving and returning an ArrayBuffer, works with Node.js streams.
import * as fs from 'fs';
import * as stream from 'stream'; import * as stream from 'stream';
import * as zlib from 'zlib'; import * as zlib from 'zlib';
import { ISsnFileEntry } from '../interfaces/ISsnFileEntry'; import { ISsnFileEntry } from '../interfaces/ISsnFileEntry';
import decryptStream from './streams/decryptStream'; import decryptStream from './streams/decryptStream';
import streamSetMaxLength from './streams/streamSetMaxLength';
/** Extracts the file with the given metadata from the stream. /** Extracts the file with the given metadata from the stream.
* The stream must already start at the .zip's local file header * The stream must already start at the .zip's local file header
@ -34,10 +32,6 @@ export default function extractFileStream(file: ISsnFileEntry, inputStream: stre
let curStream = inputStream; let curStream = inputStream;
//set max length (including random 12 byte encryption header)
const maxLength = streamSetMaxLength(curStream, file.compressedSize);
curStream = maxLength;
//pipe into decryption if file is encrypted //pipe into decryption if file is encrypted
if (file.decryptionKeys !== undefined) { if (file.decryptionKeys !== undefined) {
const decryptTransform = decryptStream([...file.decryptionKeys] as [number, number, number]); const decryptTransform = decryptStream([...file.decryptionKeys] as [number, number, number]);

View file

@ -32,7 +32,7 @@ export default async function getManifest(product: Product): Promise<IManifest>
throw new Error(`Expected .patchmanifest to contain a file called manifest.xml but it is called "${firstFile.name}".`); throw new Error(`Expected .patchmanifest to contain a file called manifest.xml but it is called "${firstFile.name}".`);
} }
const stream = arrayBufferToStream(ssnFile, firstFile.offset); const stream = arrayBufferToStream(ssnFile, firstFile.offset, firstFile.compressedSize);
//Extract manifest.xml file //Extract manifest.xml file
const patchmanifestStream = extractFileStream(firstFile, stream); const patchmanifestStream = extractFileStream(firstFile, stream);

View file

@ -2,9 +2,11 @@ import getUrlContents from '../cdn/getUrlContents';
import { Product } from '../interfaces/ISettings'; import { Product } from '../interfaces/ISettings';
import ISolid from '../interfaces/ISolidFile'; import ISolid from '../interfaces/ISolidFile';
import ISolidSimple from '../interfaces/ISolidSimple'; import ISolidSimple from '../interfaces/ISolidSimple';
import extractFile from './extractFile'; import extractFileStream from './extractFileStream';
import parseBencode from './reader/bencodeParser'; import parseBencode from './reader/bencodeParser';
import readSsnFile from './reader/readSsnFile'; import readSsnFile from './reader/readSsnFile';
import arrayBufferToStream from './streams/arrayBufferToStream';
import streamToArrayBuffer from './streams/streamToArrayBuffer';
import verifyProductName from './verify/verifyProductName'; import verifyProductName from './verify/verifyProductName';
import verifySolidpkg from './verify/verifySolidpkg'; import verifySolidpkg from './verify/verifySolidpkg';
@ -35,9 +37,12 @@ export default async function getSolidpkg(product: Product, from: number, to: nu
throw new Error(`Expected .solidpkg to contain a file called metafile.solid but it is called "${firstFile.name}".`); throw new Error(`Expected .solidpkg to contain a file called metafile.solid but it is called "${firstFile.name}".`);
} }
const stream = arrayBufferToStream(ssnFile, firstFile.offset, firstFile.compressedSize);
//Extract metafile.solid file //Extract metafile.solid file
const solidFile = await extractFile(firstFile, [new DataView(ssnFile)]); const solidFileStream = extractFileStream(firstFile, stream);
const solidContents = parseBencode(new DataView(solidFile)) as ISolid; const solidFileArrayBuffer = await streamToArrayBuffer(solidFileStream);
const solidContents = parseBencode(new DataView(solidFileArrayBuffer)) as ISolid;
//Verify metafile.solid for correctness //Verify metafile.solid for correctness
verifySolidpkg(solidContents, { product, from, to }); verifySolidpkg(solidContents, { product, from, to });

View file

@ -4,7 +4,7 @@
import { TextDecoder } from 'util'; import { TextDecoder } from 'util';
const Decoder = new TextDecoder('utf-8'); const decoder = new TextDecoder('utf-8');
/** Takes a Bencoded-encoded file, parses it at the given starting position and returns a JSON object, or rejects on error. */ /** Takes a Bencoded-encoded file, parses it at the given starting position and returns a JSON object, or rejects on error. */
function bpParse(dv: DataView, posIn: number = 0): { obj: any, pos: number } { function bpParse(dv: DataView, posIn: number = 0): { obj: any, pos: number } {
@ -16,15 +16,15 @@ function bpParse(dv: DataView, posIn: number = 0): { obj: any, pos: number } {
obj = {}; obj = {};
do { do {
//read key //read key
const out1 = bpParse(dv, pos); const outKey = bpParse(dv, pos);
pos = out1.pos; pos = outKey.pos;
if (typeof out1.obj !== 'string') { if (typeof outKey.obj !== 'string') {
throw new Error(`Expected dictionary key to be string but it is "${typeof out1.obj}".`); throw new Error(`Expected dictionary key to be string but it is "${typeof outKey.obj}".`);
} }
//read value //read value
const out2 = bpParse(dv, pos); const outValue = bpParse(dv, pos);
pos = out2.pos; pos = outValue.pos;
obj[out1.obj] = out2.obj; obj[outKey.obj] = outValue.obj;
} while (dv.getUint8(pos) !== 0x65); //'e' - end } while (dv.getUint8(pos) !== 0x65); //'e' - end
pos += 1; pos += 1;
break; break;
@ -68,7 +68,7 @@ function bpParse(dv: DataView, posIn: number = 0): { obj: any, pos: number } {
curChar = dv.getUint8(pos); pos += 1; curChar = dv.getUint8(pos); pos += 1;
} }
//read string //read string
obj = Decoder.decode(new DataView(dv.buffer, pos, strLen)); obj = decoder.decode(new DataView(dv.buffer, pos, strLen));
pos += strLen; pos += strLen;
break; break;
} }

View file

@ -2,19 +2,23 @@ import * as stream from 'stream';
const BUFFER_SIZE = 16 * 1024; const BUFFER_SIZE = 16 * 1024;
export default function arrayBufferToStream(arrayBuffer: ArrayBuffer, offset = 0): stream.Readable { export default function arrayBufferToStream(arrayBuffer: ArrayBuffer, offset: number = 0, length?: number): stream.Readable {
if (offset < 0 || offset + 1 >= arrayBuffer.byteLength) { if (offset < 0 || offset >= arrayBuffer.byteLength - 1) {
throw new RangeError('Could not convert ArrayBuffer to ReadableStream; out of bounds.');
}
if (length !== undefined && offset + length >= arrayBuffer.byteLength - 1) {
throw new RangeError('Could not convert ArrayBuffer to ReadableStream; out of bounds.'); throw new RangeError('Could not convert ArrayBuffer to ReadableStream; out of bounds.');
} }
let position = offset; let position = offset;
const endPosition = (length !== undefined) ? offset + length : arrayBuffer.byteLength;
const outStream = new stream.Readable({ const outStream = new stream.Readable({
read(size) { read(size) {
const chunkSize = Math.min(size || BUFFER_SIZE, arrayBuffer.byteLength - position); //TODO: we can probably remove BUFFER_SIZE const chunkSize = Math.min(size || BUFFER_SIZE, endPosition - position); //TODO: we can probably remove BUFFER_SIZE
let needMoreData: boolean; let needMoreData: boolean;
do { do {
//If end is reached //If end is reached
if (position + 1 >= arrayBuffer.byteLength) { if (position >= endPosition - 1) {
this.push(null); this.push(null);
return; return;
} }

View file

@ -1,34 +0,0 @@
import * as stream from 'stream';
/** Takes the given ReadableStream and returns a ReadableStream with the same contents but that terminates after the given length. */
export default function streamSetMaxLength(inputStream: stream.Readable, maxLength: number): stream.Readable {
if (maxLength <= 0) {
throw new RangeError('maxLength is out of bounds.');
}
let remaining = maxLength;
const outStream = new stream.Readable({
read(size) {
//If no size is provided, just pass through all remaining bytes
if (size === undefined) {
this.push(inputStream.read(remaining));
remaining = 0;
//End is reached, terminate stream
this.push(null);
} else {
//Otherwise, pass through however many bytes we can
const clampedSize = Math.min(size, remaining);
this.push(inputStream.read(clampedSize));
remaining -= clampedSize;
//If end is reached, terminate stream
if (remaining <= 0) {
this.push(null);
}
}
},
});
return outStream;
}

View file

@ -0,0 +1,25 @@
import * as stream from 'stream';
export default function streamToArrayBuffer(inputStream: stream.Readable): Promise<ArrayBuffer> {
return new Promise((resolve, reject) => {
const chunks: Buffer[] = [];
let totalSize = 0;
//Convert chunks to string
inputStream.on('data', (chunk: Buffer) => {
chunks.push(chunk);
totalSize += chunk.length;
});
//Output final string
inputStream.on('end', () => {
const outBuffer = Buffer.concat(chunks, totalSize);
resolve(outBuffer.buffer as ArrayBuffer);
});
//Exit on error
inputStream.on('error', (error) => {
reject(error);
});
});
}