♻ Read local file header separately

This commit is contained in:
C-3PO 2018-07-08 20:18:04 +02:00
parent 843314e475
commit 4d8a631db4
Signed by: c3po
GPG key ID: 62993C4BB4D86F24
4 changed files with 22 additions and 16 deletions

View file

@ -36,7 +36,7 @@ export default async function getManifest(product: Product): Promise<IManifest>
const stream = arrayBufferToStream(ssnFile, firstFile.offset);
//Extract manifest.xml file
await readLocalFileHeader(stream);
await readLocalFileHeader(stream, true);
const patchmanifestStream = extractFileStream(firstFile, stream);
//Convert ArrayBuffer to string

View file

@ -41,7 +41,7 @@ export default async function getSolidpkg(product: Product, from: number, to: nu
const stream = arrayBufferToStream(ssnFile, firstFile.offset);
//Extract metafile.solid file
await readLocalFileHeader(stream);
await readLocalFileHeader(stream, skipAdditionalLength);
const solidFileStream = extractFileStream(firstFile, stream);
const solidFileArrayBuffer = await streamToArrayBuffer(solidFileStream);
const solidContents = parseBencode(new DataView(solidFileArrayBuffer)) as ISolid;

View file

@ -11,15 +11,14 @@ interface IGetFileFromDisksOptions {
storedSize: number;
}
function getStream(disks: string[], index: number, offset: number, length: number = Infinity) {
function createFileStream(disks: string[], index: number, offset: number, length: number = Infinity): fs.ReadStream {
return fs.createReadStream(disks[index], { start: offset, end: offset + length - 1 });
}
/** Takes a list of ReadableStreams (the disks), as well as the offset and length, and returns a stream for just one file. */
export default async function getFileFromDisks(disks: string[], { diskStart, offset, storedSize }: IGetFileFromDisksOptions): Promise<stream.Readable> {
async function getFullStream(disks: string[], { diskStart, offset, storedSize }: IGetFileFromDisksOptions): Promise<stream.Readable> {
let curDiskIndex = diskStart;
let curDisk = getStream(disks, diskStart, offset);
let localFileHeaderLength = 0;
let curDisk: fs.ReadStream = createFileStream(disks, diskStart, offset);
let totalRead = 0;
//Create new stream that concatenates disks until storedSize is reached, then ends the stream.
@ -28,8 +27,8 @@ export default async function getFileFromDisks(disks: string[], { diskStart, off
const onData = (chunk: Buffer) => {
totalRead += chunk.length;
//If we've reached the end, we can stop reading after this chunk
const readTooManyBytes = totalRead - (localFileHeaderLength + storedSize);
if (localFileHeaderLength !== 0 && readTooManyBytes >= 0) {
const readTooManyBytes = totalRead - storedSize;
if (readTooManyBytes >= 0) {
//We can still write the whole chunk before ending the stream
if (readTooManyBytes === 0) {
outputStream.end(chunk);
@ -40,6 +39,7 @@ export default async function getFileFromDisks(disks: string[], { diskStart, off
const shortenedChunk = Buffer.alloc(shortenedLength, chunk);
outputStream.end(shortenedChunk);
curDisk.close();
curDisk.off('data', onData);
}
} else {
//Nowhere near the end, so just write normally
@ -52,10 +52,10 @@ export default async function getFileFromDisks(disks: string[], { diskStart, off
const onEnd = () => {
curDiskIndex += 1;
//End if we are at end of file or end of disks
if (curDiskIndex >= disks.length || (localFileHeaderLength !== 0 && totalRead >= localFileHeaderLength + storedSize)) {
if (curDiskIndex >= disks.length || totalRead >= storedSize) {
outputStream.end();
} else {
curDisk = getStream(disks, curDiskIndex, 0, (localFileHeaderLength === 0) ? Infinity : localFileHeaderLength + storedSize - totalRead);
curDisk = createFileStream(disks, curDiskIndex, 0, storedSize - totalRead);
//set up new listeners for data and end
curDisk.on('data', onData);
curDisk.on('end', onEnd);
@ -67,9 +67,15 @@ export default async function getFileFromDisks(disks: string[], { diskStart, off
curDisk.on('end', onEnd);
curDisk.on('error', onError);
//Read local file header
localFileHeaderLength = await readLocalFileHeader(outputStream);
//now that local file header has been read, we will restrict length of stream to storedSize
return outputStream;
}
export default async function getFileFromDisks(disks: string[], { diskStart, offset, storedSize }: IGetFileFromDisksOptions): Promise<stream.Readable> {
//read local file header
const headerStream = await getFullStream(disks, { diskStart, offset, storedSize: 30 });
const localFileHeaderLength = await readLocalFileHeader(headerStream);
headerStream.destroy(); //TODO
//read actual file
return getFullStream(disks, { diskStart, offset: offset + localFileHeaderLength, storedSize });
}

View file

@ -16,7 +16,7 @@ function waitReadableLength(inputStream: stream.Readable, minLength: number): Pr
* each stored file, and advances the stream accordingly.
* Returns length of the local file header.
*/
export default async function readLocalFileHeader(inputStream: stream.Readable): Promise<number> {
export default async function readLocalFileHeader(inputStream: stream.Readable, skipAdditionalLength = false): Promise<number> {
let localFileHeader: Buffer = inputStream.read(30);
if (localFileHeader === null) {
//need to wait until data is ready for reading
@ -37,7 +37,7 @@ export default async function readLocalFileHeader(inputStream: stream.Readable):
//skip local file name and extra field
const additionalLength = localFilenameSize + localExtraSize;
if (additionalLength > 0) {
if (skipAdditionalLength && additionalLength > 0) {
await waitReadableLength(inputStream, additionalLength);
const tmpChunk = inputStream.read(additionalLength);
if (tmpChunk === null) {