🚧 Only skip decompression for disks

This commit is contained in:
C-3PO 2018-07-08 19:38:25 +02:00
parent 4e41a46006
commit a6de035101
Signed by: c3po
GPG key ID: 62993C4BB4D86F24
2 changed files with 12 additions and 10 deletions

View file

@ -10,7 +10,7 @@ import streamSetMaxLength from './streams/streamSetMaxLength';
* The stream must already start at the .zip's local file header * The stream must already start at the .zip's local file header
* and must transparently span across multiple disks if necessary. * and must transparently span across multiple disks if necessary.
*/ */
export default function extractFileStream(file: ISsnFileEntry, inputStream: stream.Readable): stream.Readable { export default function extractFileStream(file: ISsnFileEntry, inputStream: stream.Readable, skipDecompression: boolean = false): stream.Readable {
let curStream = inputStream; let curStream = inputStream;
curStream = streamSetMaxLength(curStream, file.compressedSize); curStream = streamSetMaxLength(curStream, file.compressedSize);
@ -21,13 +21,15 @@ export default function extractFileStream(file: ISsnFileEntry, inputStream: stre
curStream = curStream.pipe(decryptTransform); curStream = curStream.pipe(decryptTransform);
} }
/*//pipe into decompression if (skipDecompression) {
//pipe into decompression
const decompressTransform = zlib.createInflateRaw(); const decompressTransform = zlib.createInflateRaw();
decompressTransform.on('error', (error) => { decompressTransform.on('error', (error) => {
//TODO: need to throw error sync, not async //TODO: need to throw error sync, not async
throw new Error(`Error during decompression of "${file.name}": ${error.message}`); throw new Error(`Error during decompression of "${file.name}": ${error.message}`);
}); });
curStream = curStream.pipe(decompressTransform);*/ curStream = curStream.pipe(decompressTransform);
}
return curStream; return curStream;
} }

View file

@ -40,7 +40,7 @@ export default async function getPatch(product: Product, from: number, to: numbe
fileEntries.filter((file) => file.diffType === SsnDiffType.NewFile).forEach(async (file) => { fileEntries.filter((file) => file.diffType === SsnDiffType.NewFile).forEach(async (file) => {
try { try {
const fileStream = await getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, storedSize: file.compressedSize }); const fileStream = await getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, storedSize: file.compressedSize });
const fileContents = extractFileStream(file, fileStream); const fileContents = extractFileStream(file, fileStream, true);
console.debug(await streamToArrayBuffer(fileContents)); console.debug(await streamToArrayBuffer(fileContents));
//TODO: need to write to disk //TODO: need to write to disk
} catch (error) { } catch (error) {
@ -52,7 +52,7 @@ export default async function getPatch(product: Product, from: number, to: numbe
fileEntries.filter((file) => file.diffType === SsnDiffType.Changed).forEach(async (file) => { fileEntries.filter((file) => file.diffType === SsnDiffType.Changed).forEach(async (file) => {
try { try {
const fileStream = await getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, storedSize: file.compressedSize }); const fileStream = await getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, storedSize: file.compressedSize });
const fileContents = extractFileStream(file, fileStream); const fileContents = extractFileStream(file, fileStream, true);
console.debug(await streamToArrayBuffer(fileContents)); console.debug(await streamToArrayBuffer(fileContents));
//TODO: need to apply diffing, then write to disk //TODO: need to apply diffing, then write to disk
} catch (error) { } catch (error) {