diff --git a/src/cdn/downloadUrlContents.ts b/src/cdn/downloadUrlContents.ts index 1cbbd63..053587b 100644 --- a/src/cdn/downloadUrlContents.ts +++ b/src/cdn/downloadUrlContents.ts @@ -3,7 +3,7 @@ import * as http from 'http'; import saveResponse from './funcs/saveResponse'; /** Downloads the given URL and saves it to disk. Throws error if download fails. */ -export default function downloadUrlContents({ host, path }: {host: string, path: string}): Promise { +export default function downloadUrlContents({ host, path }: {host: string, path: string}): Promise { return new Promise((resolve, reject) => { //Create HTTP request const request = http.request({ diff --git a/src/cdn/funcs/saveResponse.ts b/src/cdn/funcs/saveResponse.ts index f6933d9..2509d77 100644 --- a/src/cdn/funcs/saveResponse.ts +++ b/src/cdn/funcs/saveResponse.ts @@ -2,7 +2,7 @@ import * as fs from 'fs'; import * as http from 'http'; export default function saveResponse( - resolve: (fileName: fs.ReadStream) => void, + resolve: (fileName: string) => void, reject: (reason: string) => void, response: http.IncomingMessage, ) { @@ -57,7 +57,8 @@ export default function saveResponse( //Return file reader //TODO: need to automatically delete file once it is no longer used //TODO: need to provide methods to seek through file - const stream = fs.createReadStream(tempFileName, { encoding: 'binary' }); //TODO: we may need to remove encoding since mentioning encoding automatically switches to string format - return resolve(stream); + return resolve(tempFileName); + //const stream = fs.createReadStream(tempFileName, { encoding: 'binary' }); //TODO: we may need to remove encoding since mentioning encoding automatically switches to string format + //return resolve(stream); }); } diff --git a/src/ssn/extractFileStream.ts b/src/ssn/extractFileStream.ts index 53fd088..a1eb4de 100644 --- a/src/ssn/extractFileStream.ts +++ b/src/ssn/extractFileStream.ts @@ -11,26 +11,6 @@ import streamSetMaxLength from './streams/streamSetMaxLength'; * and must transparently span across multiple disks if necessary. */ export default function extractFileStream(file: ISsnFileEntry, inputStream: stream.Readable): stream.Readable { - const localFileHeader: Buffer = inputStream.read(30); - - //Local file header signature - const magic = localFileHeader.readUInt32LE(0); - if (magic !== 0x04034B50) { - throw new Error(`Local file header had wrong magic; expected 0x04034B50 but got 0x${magic.toString(16).padStart(8, '0')}.`); - } - //All fields in the local file header are copies of the central file header, so we can skip them. - //FIXME: Maybe we should actually read these fields to verify that they are identical? - //skip 22 bytes - const localFilenameSize = localFileHeader.readUInt16LE(26); - const localExtraSize = localFileHeader.readUInt16LE(28); - - //skip local file name and extra field - if (localFilenameSize + localExtraSize > 0) { - inputStream.read(localFilenameSize + localExtraSize); - } - - //------------------------------------------------- - let curStream = inputStream; curStream = streamSetMaxLength(curStream, file.compressedSize); diff --git a/src/ssn/getManifest.ts b/src/ssn/getManifest.ts index e0e1abb..8bd5859 100644 --- a/src/ssn/getManifest.ts +++ b/src/ssn/getManifest.ts @@ -6,6 +6,7 @@ import extractFileStream from './extractFileStream'; import parsePatchmanifest from './reader/parsePatchmanifest'; import readSsnFile from './reader/readSsnFile'; import arrayBufferToStream from './streams/arrayBufferToStream'; +import readLocalFileHeader from './streams/readLocalFileHeader'; import streamToString from './streams/streamToString'; import verifyPatchmanifest from './verify/verifyPatchmanifest'; import verifyProductName from './verify/verifyProductName'; @@ -35,6 +36,7 @@ export default async function getManifest(product: Product): Promise const stream = arrayBufferToStream(ssnFile, firstFile.offset); //Extract manifest.xml file + readLocalFileHeader(stream); const patchmanifestStream = extractFileStream(firstFile, stream); //Convert ArrayBuffer to string diff --git a/src/ssn/getPatch.ts b/src/ssn/getPatch.ts index 58cfff8..7b96eff 100644 --- a/src/ssn/getPatch.ts +++ b/src/ssn/getPatch.ts @@ -1,8 +1,12 @@ import downloadUrlContents from '../cdn/downloadUrlContents'; import getUrlContents from '../cdn/getUrlContents'; import { Product } from '../interfaces/ISettings'; +import { SsnDiffType } from '../interfaces/ISsnFileEntry'; +import extractFileStream from './extractFileStream'; import getSolidpkg from './getSolidpkg'; import readSsnFile from './reader/readSsnFile'; +import getFileFromDisks from './streams/getFileFromDisks'; +import streamToArrayBuffer from './streams/streamToArrayBuffer'; import verifyPatch from './verify/verifyPatch'; export default async function getPatch(product: Product, from: number, to: number) { @@ -27,27 +31,29 @@ export default async function getPatch(product: Product, from: number, to: numbe //Then we need to wait for disks to finish download before we can extract individual files //TODO: we can optimize this to already extract some files as soon as their relevant parts are downloaded - await Promise.all(diskFiles); + const diskFilenames = await Promise.all(diskFiles); //const dvArray = bufferArray.map((buffer) => new DataView(buffer)); //TODO: Verify that downloaded files match the hash in `solidpkg.pieces` //Extract newly added files - /*fileEntries.filter((file) => file.diffType === SsnDiffType.NewFile).forEach(async (file) => { - const fileContents = await extractFile(file, diskFiles); - console.debug(new Uint8Array(fileContents)); - //TODO + fileEntries.filter((file) => file.diffType === SsnDiffType.NewFile).forEach(async (file) => { + const fileStream = getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, storedSize: file.compressedSize }); + const fileContents = extractFileStream(file, fileStream); + console.debug(await streamToArrayBuffer(fileContents)); + //TODO: need to write to disk }); //Extract changed files fileEntries.filter((file) => file.diffType === SsnDiffType.Changed).forEach(async (file) => { - const fileContents = await extractFile(file, diskFiles); - console.debug(new Uint8Array(fileContents)); - //TODO + const fileStream = getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, storedSize: file.compressedSize }); + const fileContents = extractFileStream(file, fileStream); + console.debug(await streamToArrayBuffer(fileContents)); + //TODO: need to apply diffing, then write to disk }); //Need to delete deleted files fileEntries.filter((file) => file.diffType === SsnDiffType.Deleted).forEach((file) => { - //TODO - });*/ + //TODO: need to delete file + }); } diff --git a/src/ssn/getSolidpkg.ts b/src/ssn/getSolidpkg.ts index e5cbcd2..c266133 100644 --- a/src/ssn/getSolidpkg.ts +++ b/src/ssn/getSolidpkg.ts @@ -6,6 +6,7 @@ import extractFileStream from './extractFileStream'; import parseBencode from './reader/bencodeParser'; import readSsnFile from './reader/readSsnFile'; import arrayBufferToStream from './streams/arrayBufferToStream'; +import readLocalFileHeader from './streams/readLocalFileHeader'; import streamToArrayBuffer from './streams/streamToArrayBuffer'; import verifyProductName from './verify/verifyProductName'; import verifySolidpkg from './verify/verifySolidpkg'; @@ -40,6 +41,7 @@ export default async function getSolidpkg(product: Product, from: number, to: nu const stream = arrayBufferToStream(ssnFile, firstFile.offset); //Extract metafile.solid file + readLocalFileHeader(stream); const solidFileStream = extractFileStream(firstFile, stream); const solidFileArrayBuffer = await streamToArrayBuffer(solidFileStream); const solidContents = parseBencode(new DataView(solidFileArrayBuffer)) as ISolid; diff --git a/src/ssn/streams/getFileFromDisks.ts b/src/ssn/streams/getFileFromDisks.ts index d4ed99c..1d85ed8 100644 --- a/src/ssn/streams/getFileFromDisks.ts +++ b/src/ssn/streams/getFileFromDisks.ts @@ -1,3 +1,4 @@ +import * as fs from 'fs'; import * as stream from 'stream'; interface IGetFileFromDisksOptions { @@ -10,6 +11,14 @@ interface IGetFileFromDisksOptions { } /** Takes a list of ReadableStreams (the disks), as well as the offset and length, and returns a stream for just one file. */ -export default function getFileFromDisks(disks: stream.Readable[], { diskStart, offset, storedSize }: IGetFileFromDisksOptions) { +export default function getFileFromDisks(disks: string[], { diskStart, offset, storedSize }: IGetFileFromDisksOptions): stream.Readable { + const diskStreams = disks.map((fileName) => fs.createReadStream(fileName)); + //TODO: Can local file header also be spread across multiple disks, or only the payload? + //Read local file header //... + + //Create new stream that concatenates disks until storedSize is reached, then ends the stream. + const outputStream = new stream.Readable(); + //... + return outputStream; } diff --git a/src/ssn/streams/readLocalFileHeader.ts b/src/ssn/streams/readLocalFileHeader.ts new file mode 100644 index 0000000..04b6849 --- /dev/null +++ b/src/ssn/streams/readLocalFileHeader.ts @@ -0,0 +1,22 @@ +import * as stream from 'stream'; + +/** Reads the local file header, which is included before each stored file, and advances the stream accordingly. */ +export default function readLocalFileHeader(inputStream: stream.Readable) { + const localFileHeader: Buffer = inputStream.read(30); + + //Local file header signature + const magic = localFileHeader.readUInt32LE(0); + if (magic !== 0x04034B50) { + throw new Error(`Local file header had wrong magic; expected 0x04034B50 but got 0x${magic.toString(16).padStart(8, '0')}.`); + } + //All fields in the local file header are copies of the central file header, so we can skip them. + //FIXME: Maybe we should actually read these fields to verify that they are identical? + //skip 22 bytes + const localFilenameSize = localFileHeader.readUInt16LE(26); + const localExtraSize = localFileHeader.readUInt16LE(28); + + //skip local file name and extra field + if (localFilenameSize + localExtraSize > 0) { + inputStream.read(localFilenameSize + localExtraSize); + } +}