🐛 Fix skipping of encryption header

This commit is contained in:
C-3PO 2018-07-08 20:54:59 +02:00
parent 4192fc6e19
commit 4c9270d59c
Signed by: c3po
GPG key ID: 62993C4BB4D86F24
6 changed files with 26 additions and 18 deletions

View file

@ -4,12 +4,13 @@ import * as stream from 'stream';
import * as zlib from 'zlib'; import * as zlib from 'zlib';
import { ISsnFileEntry } from '../interfaces/ISsnFileEntry'; import { ISsnFileEntry } from '../interfaces/ISsnFileEntry';
import decryptStream from './streams/decryptStream'; import decryptStream from './streams/decryptStream';
import waitReadableLength from './streams/waitReadableLength';
/** Extracts the file with the given metadata from the stream. /** Extracts the file with the given metadata from the stream.
* The stream must already start at the .zip's local file header * The stream must already start at the .zip's local file header
* and must transparently span across multiple disks if necessary. * and must transparently span across multiple disks if necessary.
*/ */
export default function extractFileStream(file: ISsnFileEntry, inputStream: stream.Readable, skipDecompression: boolean = false): stream.Readable { export default async function extractFileStream(file: ISsnFileEntry, inputStream: stream.Readable, skipDecompression: boolean = false): Promise<stream.Readable> {
let curStream = inputStream; let curStream = inputStream;
//pipe into decryption if file is encrypted //pipe into decryption if file is encrypted
@ -17,7 +18,12 @@ export default function extractFileStream(file: ISsnFileEntry, inputStream: stre
const decryptTransform = decryptStream(file.decryptionKeys); const decryptTransform = decryptStream(file.decryptionKeys);
curStream = curStream.pipe(decryptTransform); curStream = curStream.pipe(decryptTransform);
//skip encryption header //skip encryption header
curStream.read(12); const encryptionHeader = curStream.read(12);
if (encryptionHeader === null) {
//need to wait until data is ready for reading
await waitReadableLength(curStream, 30);
curStream.read(30);
}
} }
if (!skipDecompression) { if (!skipDecompression) {

View file

@ -37,7 +37,7 @@ export default async function getManifest(product: Product): Promise<IManifest>
//Extract manifest.xml file //Extract manifest.xml file
await readLocalFileHeader(stream, true); await readLocalFileHeader(stream, true);
const patchmanifestStream = extractFileStream(firstFile, stream); const patchmanifestStream = await extractFileStream(firstFile, stream);
//Convert ArrayBuffer to string //Convert ArrayBuffer to string
const patchmanifestXml = await streamToString(patchmanifestStream); const patchmanifestXml = await streamToString(patchmanifestStream);

View file

@ -40,7 +40,7 @@ export default async function getPatch(product: Product, from: number, to: numbe
fileEntries.filter((file) => file.diffType === SsnDiffType.NewFile).forEach(async (file) => { fileEntries.filter((file) => file.diffType === SsnDiffType.NewFile).forEach(async (file) => {
try { try {
const fileStream = await getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, length: file.compressedSize }); const fileStream = await getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, length: file.compressedSize });
const fileContents = extractFileStream(file, fileStream, true); const fileContents = await extractFileStream(file, fileStream, true);
console.debug(file.name, file.compressedSize, await streamToArrayBuffer(fileContents)); console.debug(file.name, file.compressedSize, await streamToArrayBuffer(fileContents));
//TODO: need to write to disk //TODO: need to write to disk
} catch (error) { } catch (error) {
@ -52,7 +52,7 @@ export default async function getPatch(product: Product, from: number, to: numbe
fileEntries.filter((file) => file.diffType === SsnDiffType.Changed).forEach(async (file) => { fileEntries.filter((file) => file.diffType === SsnDiffType.Changed).forEach(async (file) => {
try { try {
const fileStream = await getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, length: file.compressedSize }); const fileStream = await getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, length: file.compressedSize });
const fileContents = extractFileStream(file, fileStream, true); const fileContents = await extractFileStream(file, fileStream, true);
console.debug(file.name, file.compressedSize, await streamToArrayBuffer(fileContents)); console.debug(file.name, file.compressedSize, await streamToArrayBuffer(fileContents));
//TODO: need to apply diffing, then write to disk //TODO: need to apply diffing, then write to disk
} catch (error) { } catch (error) {

View file

@ -42,7 +42,7 @@ export default async function getSolidpkg(product: Product, from: number, to: nu
//Extract metafile.solid file //Extract metafile.solid file
await readLocalFileHeader(stream, true); await readLocalFileHeader(stream, true);
const solidFileStream = extractFileStream(firstFile, stream); const solidFileStream = await extractFileStream(firstFile, stream);
const solidFileArrayBuffer = await streamToArrayBuffer(solidFileStream); const solidFileArrayBuffer = await streamToArrayBuffer(solidFileStream);
const solidContents = parseBencode(new DataView(solidFileArrayBuffer.buffer)) as ISolid; const solidContents = parseBencode(new DataView(solidFileArrayBuffer.buffer)) as ISolid;

View file

@ -1,16 +1,5 @@
import * as stream from 'stream'; import * as stream from 'stream';
import waitReadableLength from './waitReadableLength';
/** Returns a promise that resolves as soon as the given stream has the given number of bytes ready for reading. */
function waitReadableLength(inputStream: stream.Readable, minLength: number): Promise<void> {
return new Promise((resolve) => {
const interval = setInterval(() => {
if (inputStream.readableLength >= minLength) {
clearInterval(interval);
resolve();
}
}, 100);
});
}
/** Reads the local file header, which is included before /** Reads the local file header, which is included before
* each stored file, and advances the stream accordingly. * each stored file, and advances the stream accordingly.

View file

@ -0,0 +1,13 @@
import * as stream from 'stream';
/** Returns a promise that resolves as soon as the given stream has the given number of bytes ready for reading. */
export default function waitReadableLength(inputStream: stream.Readable, minLength: number): Promise<void> {
return new Promise((resolve) => {
const interval = setInterval(() => {
if (inputStream.readableLength >= minLength) {
clearInterval(interval);
resolve();
}
}, 100);
});
}