🐛 Fix end of file reached error
This commit is contained in:
parent
5c915776aa
commit
68348b3673
6 changed files with 42 additions and 5 deletions
|
@ -24,7 +24,7 @@ interface ISsnFileEntry {
|
||||||
name: string;
|
name: string;
|
||||||
/** Uncompressed size */
|
/** Uncompressed size */
|
||||||
size: number;
|
size: number;
|
||||||
/** Stored size (size of compressed data + 12 byte encryption header if applicable) */
|
/** Stored size (size of compressed data + 12 byte encryption header if applicable). Does not include the local ZIP file header, which is slightly longer than 30 bytes. */
|
||||||
compressedSize: number;
|
compressedSize: number;
|
||||||
/** Decryption keys needed to decrypt the file */
|
/** Decryption keys needed to decrypt the file */
|
||||||
decryptionKeys: [number, number, number] | undefined;
|
decryptionKeys: [number, number, number] | undefined;
|
||||||
|
|
|
@ -4,6 +4,7 @@ import * as stream from 'stream';
|
||||||
import * as zlib from 'zlib';
|
import * as zlib from 'zlib';
|
||||||
import { ISsnFileEntry } from '../interfaces/ISsnFileEntry';
|
import { ISsnFileEntry } from '../interfaces/ISsnFileEntry';
|
||||||
import decryptStream from './streams/decryptStream';
|
import decryptStream from './streams/decryptStream';
|
||||||
|
import streamSetMaxLength from './streams/streamSetMaxLength';
|
||||||
|
|
||||||
/** Extracts the file with the given metadata from the stream.
|
/** Extracts the file with the given metadata from the stream.
|
||||||
* The stream must already start at the .zip's local file header
|
* The stream must already start at the .zip's local file header
|
||||||
|
@ -32,9 +33,11 @@ export default function extractFileStream(file: ISsnFileEntry, inputStream: stre
|
||||||
|
|
||||||
let curStream = inputStream;
|
let curStream = inputStream;
|
||||||
|
|
||||||
|
curStream = streamSetMaxLength(curStream, file.compressedSize);
|
||||||
|
|
||||||
//pipe into decryption if file is encrypted
|
//pipe into decryption if file is encrypted
|
||||||
if (file.decryptionKeys !== undefined) {
|
if (file.decryptionKeys !== undefined) {
|
||||||
const decryptTransform = decryptStream([...file.decryptionKeys] as [number, number, number]);
|
const decryptTransform = decryptStream(file.decryptionKeys);
|
||||||
curStream = curStream.pipe(decryptTransform);
|
curStream = curStream.pipe(decryptTransform);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,7 @@ export default async function getManifest(product: Product): Promise<IManifest>
|
||||||
throw new Error(`Expected .patchmanifest to contain a file called manifest.xml but it is called "${firstFile.name}".`);
|
throw new Error(`Expected .patchmanifest to contain a file called manifest.xml but it is called "${firstFile.name}".`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const stream = arrayBufferToStream(ssnFile, firstFile.offset, firstFile.compressedSize);
|
const stream = arrayBufferToStream(ssnFile, firstFile.offset);
|
||||||
|
|
||||||
//Extract manifest.xml file
|
//Extract manifest.xml file
|
||||||
const patchmanifestStream = extractFileStream(firstFile, stream);
|
const patchmanifestStream = extractFileStream(firstFile, stream);
|
||||||
|
|
|
@ -37,7 +37,7 @@ export default async function getSolidpkg(product: Product, from: number, to: nu
|
||||||
throw new Error(`Expected .solidpkg to contain a file called metafile.solid but it is called "${firstFile.name}".`);
|
throw new Error(`Expected .solidpkg to contain a file called metafile.solid but it is called "${firstFile.name}".`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const stream = arrayBufferToStream(ssnFile, firstFile.offset, firstFile.compressedSize);
|
const stream = arrayBufferToStream(ssnFile, firstFile.offset);
|
||||||
|
|
||||||
//Extract metafile.solid file
|
//Extract metafile.solid file
|
||||||
const solidFileStream = extractFileStream(firstFile, stream);
|
const solidFileStream = extractFileStream(firstFile, stream);
|
||||||
|
|
|
@ -11,7 +11,7 @@ export default function arrayBufferToStream(arrayBuffer: ArrayBuffer, offset: nu
|
||||||
}
|
}
|
||||||
|
|
||||||
let position = offset;
|
let position = offset;
|
||||||
const endPosition = (length !== undefined) ? offset + length : arrayBuffer.byteLength;
|
const endPosition = (length !== undefined) ? (offset + length) : arrayBuffer.byteLength;
|
||||||
const outStream = new stream.Readable({
|
const outStream = new stream.Readable({
|
||||||
read(size) {
|
read(size) {
|
||||||
const chunkSize = Math.min(size || BUFFER_SIZE, endPosition - position); //TODO: we can probably remove BUFFER_SIZE
|
const chunkSize = Math.min(size || BUFFER_SIZE, endPosition - position); //TODO: we can probably remove BUFFER_SIZE
|
||||||
|
|
34
src/ssn/streams/streamSetMaxLength.ts
Normal file
34
src/ssn/streams/streamSetMaxLength.ts
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
import * as stream from 'stream';
|
||||||
|
|
||||||
|
/** Takes the given ReadableStream and returns a ReadableStream with the same contents but that terminates after the given length. */
|
||||||
|
export default function streamSetMaxLength(inputStream: stream.Readable, maxLength: number): stream.Readable {
|
||||||
|
if (maxLength <= 0) {
|
||||||
|
throw new RangeError('maxLength is out of bounds.');
|
||||||
|
}
|
||||||
|
|
||||||
|
let remaining = maxLength;
|
||||||
|
|
||||||
|
const outStream = new stream.Readable({
|
||||||
|
read(size) {
|
||||||
|
//If no size is provided, just pass through all remaining bytes
|
||||||
|
if (size === undefined) {
|
||||||
|
this.push(inputStream.read(remaining));
|
||||||
|
remaining = 0;
|
||||||
|
//End is reached, terminate stream
|
||||||
|
this.push(null);
|
||||||
|
} else {
|
||||||
|
//Otherwise, pass through however many bytes we can
|
||||||
|
const clampedSize = Math.min(size, remaining);
|
||||||
|
this.push(inputStream.read(clampedSize));
|
||||||
|
remaining -= clampedSize;
|
||||||
|
|
||||||
|
//If end is reached, terminate stream
|
||||||
|
if (remaining <= 0) {
|
||||||
|
this.push(null);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
return outStream;
|
||||||
|
}
|
Loading…
Reference in a new issue