♻ Some refactoring to prepare for patch processing
This commit is contained in:
parent
60fe546d31
commit
f849744ac8
8 changed files with 57 additions and 35 deletions
|
@ -3,7 +3,7 @@ import * as http from 'http';
|
||||||
import saveResponse from './funcs/saveResponse';
|
import saveResponse from './funcs/saveResponse';
|
||||||
|
|
||||||
/** Downloads the given URL and saves it to disk. Throws error if download fails. */
|
/** Downloads the given URL and saves it to disk. Throws error if download fails. */
|
||||||
export default function downloadUrlContents({ host, path }: {host: string, path: string}): Promise<fs.ReadStream> {
|
export default function downloadUrlContents({ host, path }: {host: string, path: string}): Promise<string> {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
//Create HTTP request
|
//Create HTTP request
|
||||||
const request = http.request({
|
const request = http.request({
|
||||||
|
|
|
@ -2,7 +2,7 @@ import * as fs from 'fs';
|
||||||
import * as http from 'http';
|
import * as http from 'http';
|
||||||
|
|
||||||
export default function saveResponse(
|
export default function saveResponse(
|
||||||
resolve: (fileName: fs.ReadStream) => void,
|
resolve: (fileName: string) => void,
|
||||||
reject: (reason: string) => void,
|
reject: (reason: string) => void,
|
||||||
response: http.IncomingMessage,
|
response: http.IncomingMessage,
|
||||||
) {
|
) {
|
||||||
|
@ -57,7 +57,8 @@ export default function saveResponse(
|
||||||
//Return file reader
|
//Return file reader
|
||||||
//TODO: need to automatically delete file once it is no longer used
|
//TODO: need to automatically delete file once it is no longer used
|
||||||
//TODO: need to provide methods to seek through file
|
//TODO: need to provide methods to seek through file
|
||||||
const stream = fs.createReadStream(tempFileName, { encoding: 'binary' }); //TODO: we may need to remove encoding since mentioning encoding automatically switches to string format
|
return resolve(tempFileName);
|
||||||
return resolve(stream);
|
//const stream = fs.createReadStream(tempFileName, { encoding: 'binary' }); //TODO: we may need to remove encoding since mentioning encoding automatically switches to string format
|
||||||
|
//return resolve(stream);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,26 +11,6 @@ import streamSetMaxLength from './streams/streamSetMaxLength';
|
||||||
* and must transparently span across multiple disks if necessary.
|
* and must transparently span across multiple disks if necessary.
|
||||||
*/
|
*/
|
||||||
export default function extractFileStream(file: ISsnFileEntry, inputStream: stream.Readable): stream.Readable {
|
export default function extractFileStream(file: ISsnFileEntry, inputStream: stream.Readable): stream.Readable {
|
||||||
const localFileHeader: Buffer = inputStream.read(30);
|
|
||||||
|
|
||||||
//Local file header signature
|
|
||||||
const magic = localFileHeader.readUInt32LE(0);
|
|
||||||
if (magic !== 0x04034B50) {
|
|
||||||
throw new Error(`Local file header had wrong magic; expected 0x04034B50 but got 0x${magic.toString(16).padStart(8, '0')}.`);
|
|
||||||
}
|
|
||||||
//All fields in the local file header are copies of the central file header, so we can skip them.
|
|
||||||
//FIXME: Maybe we should actually read these fields to verify that they are identical?
|
|
||||||
//skip 22 bytes
|
|
||||||
const localFilenameSize = localFileHeader.readUInt16LE(26);
|
|
||||||
const localExtraSize = localFileHeader.readUInt16LE(28);
|
|
||||||
|
|
||||||
//skip local file name and extra field
|
|
||||||
if (localFilenameSize + localExtraSize > 0) {
|
|
||||||
inputStream.read(localFilenameSize + localExtraSize);
|
|
||||||
}
|
|
||||||
|
|
||||||
//-------------------------------------------------
|
|
||||||
|
|
||||||
let curStream = inputStream;
|
let curStream = inputStream;
|
||||||
|
|
||||||
curStream = streamSetMaxLength(curStream, file.compressedSize);
|
curStream = streamSetMaxLength(curStream, file.compressedSize);
|
||||||
|
|
|
@ -6,6 +6,7 @@ import extractFileStream from './extractFileStream';
|
||||||
import parsePatchmanifest from './reader/parsePatchmanifest';
|
import parsePatchmanifest from './reader/parsePatchmanifest';
|
||||||
import readSsnFile from './reader/readSsnFile';
|
import readSsnFile from './reader/readSsnFile';
|
||||||
import arrayBufferToStream from './streams/arrayBufferToStream';
|
import arrayBufferToStream from './streams/arrayBufferToStream';
|
||||||
|
import readLocalFileHeader from './streams/readLocalFileHeader';
|
||||||
import streamToString from './streams/streamToString';
|
import streamToString from './streams/streamToString';
|
||||||
import verifyPatchmanifest from './verify/verifyPatchmanifest';
|
import verifyPatchmanifest from './verify/verifyPatchmanifest';
|
||||||
import verifyProductName from './verify/verifyProductName';
|
import verifyProductName from './verify/verifyProductName';
|
||||||
|
@ -35,6 +36,7 @@ export default async function getManifest(product: Product): Promise<IManifest>
|
||||||
const stream = arrayBufferToStream(ssnFile, firstFile.offset);
|
const stream = arrayBufferToStream(ssnFile, firstFile.offset);
|
||||||
|
|
||||||
//Extract manifest.xml file
|
//Extract manifest.xml file
|
||||||
|
readLocalFileHeader(stream);
|
||||||
const patchmanifestStream = extractFileStream(firstFile, stream);
|
const patchmanifestStream = extractFileStream(firstFile, stream);
|
||||||
|
|
||||||
//Convert ArrayBuffer to string
|
//Convert ArrayBuffer to string
|
||||||
|
|
|
@ -1,8 +1,12 @@
|
||||||
import downloadUrlContents from '../cdn/downloadUrlContents';
|
import downloadUrlContents from '../cdn/downloadUrlContents';
|
||||||
import getUrlContents from '../cdn/getUrlContents';
|
import getUrlContents from '../cdn/getUrlContents';
|
||||||
import { Product } from '../interfaces/ISettings';
|
import { Product } from '../interfaces/ISettings';
|
||||||
|
import { SsnDiffType } from '../interfaces/ISsnFileEntry';
|
||||||
|
import extractFileStream from './extractFileStream';
|
||||||
import getSolidpkg from './getSolidpkg';
|
import getSolidpkg from './getSolidpkg';
|
||||||
import readSsnFile from './reader/readSsnFile';
|
import readSsnFile from './reader/readSsnFile';
|
||||||
|
import getFileFromDisks from './streams/getFileFromDisks';
|
||||||
|
import streamToArrayBuffer from './streams/streamToArrayBuffer';
|
||||||
import verifyPatch from './verify/verifyPatch';
|
import verifyPatch from './verify/verifyPatch';
|
||||||
|
|
||||||
export default async function getPatch(product: Product, from: number, to: number) {
|
export default async function getPatch(product: Product, from: number, to: number) {
|
||||||
|
@ -27,27 +31,29 @@ export default async function getPatch(product: Product, from: number, to: numbe
|
||||||
|
|
||||||
//Then we need to wait for disks to finish download before we can extract individual files
|
//Then we need to wait for disks to finish download before we can extract individual files
|
||||||
//TODO: we can optimize this to already extract some files as soon as their relevant parts are downloaded
|
//TODO: we can optimize this to already extract some files as soon as their relevant parts are downloaded
|
||||||
await Promise.all(diskFiles);
|
const diskFilenames = await Promise.all(diskFiles);
|
||||||
//const dvArray = bufferArray.map((buffer) => new DataView(buffer));
|
//const dvArray = bufferArray.map((buffer) => new DataView(buffer));
|
||||||
|
|
||||||
//TODO: Verify that downloaded files match the hash in `solidpkg.pieces`
|
//TODO: Verify that downloaded files match the hash in `solidpkg.pieces`
|
||||||
|
|
||||||
//Extract newly added files
|
//Extract newly added files
|
||||||
/*fileEntries.filter((file) => file.diffType === SsnDiffType.NewFile).forEach(async (file) => {
|
fileEntries.filter((file) => file.diffType === SsnDiffType.NewFile).forEach(async (file) => {
|
||||||
const fileContents = await extractFile(file, diskFiles);
|
const fileStream = getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, storedSize: file.compressedSize });
|
||||||
console.debug(new Uint8Array(fileContents));
|
const fileContents = extractFileStream(file, fileStream);
|
||||||
//TODO
|
console.debug(await streamToArrayBuffer(fileContents));
|
||||||
|
//TODO: need to write to disk
|
||||||
});
|
});
|
||||||
|
|
||||||
//Extract changed files
|
//Extract changed files
|
||||||
fileEntries.filter((file) => file.diffType === SsnDiffType.Changed).forEach(async (file) => {
|
fileEntries.filter((file) => file.diffType === SsnDiffType.Changed).forEach(async (file) => {
|
||||||
const fileContents = await extractFile(file, diskFiles);
|
const fileStream = getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, storedSize: file.compressedSize });
|
||||||
console.debug(new Uint8Array(fileContents));
|
const fileContents = extractFileStream(file, fileStream);
|
||||||
//TODO
|
console.debug(await streamToArrayBuffer(fileContents));
|
||||||
|
//TODO: need to apply diffing, then write to disk
|
||||||
});
|
});
|
||||||
|
|
||||||
//Need to delete deleted files
|
//Need to delete deleted files
|
||||||
fileEntries.filter((file) => file.diffType === SsnDiffType.Deleted).forEach((file) => {
|
fileEntries.filter((file) => file.diffType === SsnDiffType.Deleted).forEach((file) => {
|
||||||
//TODO
|
//TODO: need to delete file
|
||||||
});*/
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@ import extractFileStream from './extractFileStream';
|
||||||
import parseBencode from './reader/bencodeParser';
|
import parseBencode from './reader/bencodeParser';
|
||||||
import readSsnFile from './reader/readSsnFile';
|
import readSsnFile from './reader/readSsnFile';
|
||||||
import arrayBufferToStream from './streams/arrayBufferToStream';
|
import arrayBufferToStream from './streams/arrayBufferToStream';
|
||||||
|
import readLocalFileHeader from './streams/readLocalFileHeader';
|
||||||
import streamToArrayBuffer from './streams/streamToArrayBuffer';
|
import streamToArrayBuffer from './streams/streamToArrayBuffer';
|
||||||
import verifyProductName from './verify/verifyProductName';
|
import verifyProductName from './verify/verifyProductName';
|
||||||
import verifySolidpkg from './verify/verifySolidpkg';
|
import verifySolidpkg from './verify/verifySolidpkg';
|
||||||
|
@ -40,6 +41,7 @@ export default async function getSolidpkg(product: Product, from: number, to: nu
|
||||||
const stream = arrayBufferToStream(ssnFile, firstFile.offset);
|
const stream = arrayBufferToStream(ssnFile, firstFile.offset);
|
||||||
|
|
||||||
//Extract metafile.solid file
|
//Extract metafile.solid file
|
||||||
|
readLocalFileHeader(stream);
|
||||||
const solidFileStream = extractFileStream(firstFile, stream);
|
const solidFileStream = extractFileStream(firstFile, stream);
|
||||||
const solidFileArrayBuffer = await streamToArrayBuffer(solidFileStream);
|
const solidFileArrayBuffer = await streamToArrayBuffer(solidFileStream);
|
||||||
const solidContents = parseBencode(new DataView(solidFileArrayBuffer)) as ISolid;
|
const solidContents = parseBencode(new DataView(solidFileArrayBuffer)) as ISolid;
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import * as fs from 'fs';
|
||||||
import * as stream from 'stream';
|
import * as stream from 'stream';
|
||||||
|
|
||||||
interface IGetFileFromDisksOptions {
|
interface IGetFileFromDisksOptions {
|
||||||
|
@ -10,6 +11,14 @@ interface IGetFileFromDisksOptions {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Takes a list of ReadableStreams (the disks), as well as the offset and length, and returns a stream for just one file. */
|
/** Takes a list of ReadableStreams (the disks), as well as the offset and length, and returns a stream for just one file. */
|
||||||
export default function getFileFromDisks(disks: stream.Readable[], { diskStart, offset, storedSize }: IGetFileFromDisksOptions) {
|
export default function getFileFromDisks(disks: string[], { diskStart, offset, storedSize }: IGetFileFromDisksOptions): stream.Readable {
|
||||||
|
const diskStreams = disks.map((fileName) => fs.createReadStream(fileName));
|
||||||
|
//TODO: Can local file header also be spread across multiple disks, or only the payload?
|
||||||
|
//Read local file header
|
||||||
//...
|
//...
|
||||||
|
|
||||||
|
//Create new stream that concatenates disks until storedSize is reached, then ends the stream.
|
||||||
|
const outputStream = new stream.Readable();
|
||||||
|
//...
|
||||||
|
return outputStream;
|
||||||
}
|
}
|
||||||
|
|
22
src/ssn/streams/readLocalFileHeader.ts
Normal file
22
src/ssn/streams/readLocalFileHeader.ts
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
import * as stream from 'stream';
|
||||||
|
|
||||||
|
/** Reads the local file header, which is included before each stored file, and advances the stream accordingly. */
|
||||||
|
export default function readLocalFileHeader(inputStream: stream.Readable) {
|
||||||
|
const localFileHeader: Buffer = inputStream.read(30);
|
||||||
|
|
||||||
|
//Local file header signature
|
||||||
|
const magic = localFileHeader.readUInt32LE(0);
|
||||||
|
if (magic !== 0x04034B50) {
|
||||||
|
throw new Error(`Local file header had wrong magic; expected 0x04034B50 but got 0x${magic.toString(16).padStart(8, '0')}.`);
|
||||||
|
}
|
||||||
|
//All fields in the local file header are copies of the central file header, so we can skip them.
|
||||||
|
//FIXME: Maybe we should actually read these fields to verify that they are identical?
|
||||||
|
//skip 22 bytes
|
||||||
|
const localFilenameSize = localFileHeader.readUInt16LE(26);
|
||||||
|
const localExtraSize = localFileHeader.readUInt16LE(28);
|
||||||
|
|
||||||
|
//skip local file name and extra field
|
||||||
|
if (localFilenameSize + localExtraSize > 0) {
|
||||||
|
inputStream.read(localFilenameSize + localExtraSize);
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in a new issue