♻ Some refactoring to prepare for patch processing

This commit is contained in:
C-3PO 2018-07-05 20:50:23 +02:00
parent 60fe546d31
commit f849744ac8
Signed by: c3po
GPG key ID: 62993C4BB4D86F24
8 changed files with 57 additions and 35 deletions

View file

@ -3,7 +3,7 @@ import * as http from 'http';
import saveResponse from './funcs/saveResponse';
/** Downloads the given URL and saves it to disk. Throws error if download fails. */
export default function downloadUrlContents({ host, path }: {host: string, path: string}): Promise<fs.ReadStream> {
export default function downloadUrlContents({ host, path }: {host: string, path: string}): Promise<string> {
return new Promise((resolve, reject) => {
//Create HTTP request
const request = http.request({

View file

@ -2,7 +2,7 @@ import * as fs from 'fs';
import * as http from 'http';
export default function saveResponse(
resolve: (fileName: fs.ReadStream) => void,
resolve: (fileName: string) => void,
reject: (reason: string) => void,
response: http.IncomingMessage,
) {
@ -57,7 +57,8 @@ export default function saveResponse(
//Return file reader
//TODO: need to automatically delete file once it is no longer used
//TODO: need to provide methods to seek through file
const stream = fs.createReadStream(tempFileName, { encoding: 'binary' }); //TODO: we may need to remove encoding since mentioning encoding automatically switches to string format
return resolve(stream);
return resolve(tempFileName);
//const stream = fs.createReadStream(tempFileName, { encoding: 'binary' }); //TODO: we may need to remove encoding since mentioning encoding automatically switches to string format
//return resolve(stream);
});
}

View file

@ -11,26 +11,6 @@ import streamSetMaxLength from './streams/streamSetMaxLength';
* and must transparently span across multiple disks if necessary.
*/
export default function extractFileStream(file: ISsnFileEntry, inputStream: stream.Readable): stream.Readable {
const localFileHeader: Buffer = inputStream.read(30);
//Local file header signature
const magic = localFileHeader.readUInt32LE(0);
if (magic !== 0x04034B50) {
throw new Error(`Local file header had wrong magic; expected 0x04034B50 but got 0x${magic.toString(16).padStart(8, '0')}.`);
}
//All fields in the local file header are copies of the central file header, so we can skip them.
//FIXME: Maybe we should actually read these fields to verify that they are identical?
//skip 22 bytes
const localFilenameSize = localFileHeader.readUInt16LE(26);
const localExtraSize = localFileHeader.readUInt16LE(28);
//skip local file name and extra field
if (localFilenameSize + localExtraSize > 0) {
inputStream.read(localFilenameSize + localExtraSize);
}
//-------------------------------------------------
let curStream = inputStream;
curStream = streamSetMaxLength(curStream, file.compressedSize);

View file

@ -6,6 +6,7 @@ import extractFileStream from './extractFileStream';
import parsePatchmanifest from './reader/parsePatchmanifest';
import readSsnFile from './reader/readSsnFile';
import arrayBufferToStream from './streams/arrayBufferToStream';
import readLocalFileHeader from './streams/readLocalFileHeader';
import streamToString from './streams/streamToString';
import verifyPatchmanifest from './verify/verifyPatchmanifest';
import verifyProductName from './verify/verifyProductName';
@ -35,6 +36,7 @@ export default async function getManifest(product: Product): Promise<IManifest>
const stream = arrayBufferToStream(ssnFile, firstFile.offset);
//Extract manifest.xml file
readLocalFileHeader(stream);
const patchmanifestStream = extractFileStream(firstFile, stream);
//Convert ArrayBuffer to string

View file

@ -1,8 +1,12 @@
import downloadUrlContents from '../cdn/downloadUrlContents';
import getUrlContents from '../cdn/getUrlContents';
import { Product } from '../interfaces/ISettings';
import { SsnDiffType } from '../interfaces/ISsnFileEntry';
import extractFileStream from './extractFileStream';
import getSolidpkg from './getSolidpkg';
import readSsnFile from './reader/readSsnFile';
import getFileFromDisks from './streams/getFileFromDisks';
import streamToArrayBuffer from './streams/streamToArrayBuffer';
import verifyPatch from './verify/verifyPatch';
export default async function getPatch(product: Product, from: number, to: number) {
@ -27,27 +31,29 @@ export default async function getPatch(product: Product, from: number, to: numbe
//Then we need to wait for disks to finish download before we can extract individual files
//TODO: we can optimize this to already extract some files as soon as their relevant parts are downloaded
await Promise.all(diskFiles);
const diskFilenames = await Promise.all(diskFiles);
//const dvArray = bufferArray.map((buffer) => new DataView(buffer));
//TODO: Verify that downloaded files match the hash in `solidpkg.pieces`
//Extract newly added files
/*fileEntries.filter((file) => file.diffType === SsnDiffType.NewFile).forEach(async (file) => {
const fileContents = await extractFile(file, diskFiles);
console.debug(new Uint8Array(fileContents));
//TODO
fileEntries.filter((file) => file.diffType === SsnDiffType.NewFile).forEach(async (file) => {
const fileStream = getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, storedSize: file.compressedSize });
const fileContents = extractFileStream(file, fileStream);
console.debug(await streamToArrayBuffer(fileContents));
//TODO: need to write to disk
});
//Extract changed files
fileEntries.filter((file) => file.diffType === SsnDiffType.Changed).forEach(async (file) => {
const fileContents = await extractFile(file, diskFiles);
console.debug(new Uint8Array(fileContents));
//TODO
const fileStream = getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, storedSize: file.compressedSize });
const fileContents = extractFileStream(file, fileStream);
console.debug(await streamToArrayBuffer(fileContents));
//TODO: need to apply diffing, then write to disk
});
//Need to delete deleted files
fileEntries.filter((file) => file.diffType === SsnDiffType.Deleted).forEach((file) => {
//TODO
});*/
//TODO: need to delete file
});
}

View file

@ -6,6 +6,7 @@ import extractFileStream from './extractFileStream';
import parseBencode from './reader/bencodeParser';
import readSsnFile from './reader/readSsnFile';
import arrayBufferToStream from './streams/arrayBufferToStream';
import readLocalFileHeader from './streams/readLocalFileHeader';
import streamToArrayBuffer from './streams/streamToArrayBuffer';
import verifyProductName from './verify/verifyProductName';
import verifySolidpkg from './verify/verifySolidpkg';
@ -40,6 +41,7 @@ export default async function getSolidpkg(product: Product, from: number, to: nu
const stream = arrayBufferToStream(ssnFile, firstFile.offset);
//Extract metafile.solid file
readLocalFileHeader(stream);
const solidFileStream = extractFileStream(firstFile, stream);
const solidFileArrayBuffer = await streamToArrayBuffer(solidFileStream);
const solidContents = parseBencode(new DataView(solidFileArrayBuffer)) as ISolid;

View file

@ -1,3 +1,4 @@
import * as fs from 'fs';
import * as stream from 'stream';
interface IGetFileFromDisksOptions {
@ -10,6 +11,14 @@ interface IGetFileFromDisksOptions {
}
/** Takes a list of ReadableStreams (the disks), as well as the offset and length, and returns a stream for just one file. */
export default function getFileFromDisks(disks: stream.Readable[], { diskStart, offset, storedSize }: IGetFileFromDisksOptions) {
export default function getFileFromDisks(disks: string[], { diskStart, offset, storedSize }: IGetFileFromDisksOptions): stream.Readable {
const diskStreams = disks.map((fileName) => fs.createReadStream(fileName));
//TODO: Can local file header also be spread across multiple disks, or only the payload?
//Read local file header
//...
//Create new stream that concatenates disks until storedSize is reached, then ends the stream.
const outputStream = new stream.Readable();
//...
return outputStream;
}

View file

@ -0,0 +1,22 @@
import * as stream from 'stream';
/** Reads the local file header, which is included before each stored file, and advances the stream accordingly. */
export default function readLocalFileHeader(inputStream: stream.Readable) {
const localFileHeader: Buffer = inputStream.read(30);
//Local file header signature
const magic = localFileHeader.readUInt32LE(0);
if (magic !== 0x04034B50) {
throw new Error(`Local file header had wrong magic; expected 0x04034B50 but got 0x${magic.toString(16).padStart(8, '0')}.`);
}
//All fields in the local file header are copies of the central file header, so we can skip them.
//FIXME: Maybe we should actually read these fields to verify that they are identical?
//skip 22 bytes
const localFilenameSize = localFileHeader.readUInt16LE(26);
const localExtraSize = localFileHeader.readUInt16LE(28);
//skip local file name and extra field
if (localFilenameSize + localExtraSize > 0) {
inputStream.read(localFilenameSize + localExtraSize);
}
}