🐎 Use patcher-installer to install from previous patch as well

This commit is contained in:
C-3PO 2018-09-14 04:23:11 +02:00
parent 1fb738505e
commit 2cd936e6c2
Signed by: c3po
GPG key ID: 62993C4BB4D86F24
4 changed files with 22 additions and 122 deletions

View file

@ -5,14 +5,11 @@ import createDirRecursively from '../cdn/funcs/createDirRecursively';
import getUrlContents from '../cdn/getUrlContents';
import { Product } from '../interfaces/ISettings';
import { SsnDiffType } from '../interfaces/ISsnFileEntry';
import extractFileAsStream from './extractFileAsStream';
import getSolidpkg from './getSolidpkg';
import launch from './patcher-installer/launch';
import readSsnFile from './reader/readSsnFile';
import getFileFromDisks from './streams/getFileFromDisks';
import verifyPatch from './verify/verifyPatch';
import verifyProductName from './verify/verifyProductName';
import performDiffing from './xdelta3/performDiffing';
interface IGetPatchArgs {
/** The product that should be patched. */
@ -90,7 +87,7 @@ export default async function getPatch({ product, from, to, sourceDirectory, tar
const outputStream = fs.createWriteStream(outputName);
//start installation
await launch(diskFilenames[file.diskNumberStart], file.offset, file.compressedSize, file.decryptionKeys, outputStream);
await launch(diskFilenames[file.diskNumberStart], file.offset, file.compressedSize, file.decryptionKeys, undefined, outputStream);
outputStream.close();
} catch (error) {
@ -101,15 +98,16 @@ export default async function getPatch({ product, from, to, sourceDirectory, tar
//Extract changed files
fileEntries.filter((file) => file.diffType === SsnDiffType.Changed).forEach(async (file) => {
try {
//extract file
const fileStream = await getFileFromDisks(diskFilenames, { diskStart: file.diskNumberStart, offset: file.offset, length: file.compressedSize });
const fileContents = await extractFileAsStream(file, fileStream);
//need to apply xdelta3 diffing, then write to disk
const sourceFile = path.join(sourceDir, file.name);
const outputName = path.join(targetDir, file.name);
const outputNameTemp = `${outputName}.tmp`;
await performDiffing(sourceFile, fileContents, outputNameTemp);
const outputNameTemp = path.join(targetDir, `${file.name}.tmp`);
//create file write stream
await createDirRecursively(path.dirname(outputNameTemp));
const outputStream = fs.createWriteStream(outputNameTemp);
//start installation
await launch(diskFilenames[file.diskNumberStart], file.offset, file.compressedSize, file.decryptionKeys, sourceFile, outputStream);
//clean up: delete source file if necessary, and remove .tmp file extension
if (sourceDir === targetDir) {
@ -132,6 +130,7 @@ export default async function getPatch({ product, from, to, sourceDirectory, tar
}
} catch (error) {
console.error(`Could not extract file "${file.name}"`, error);
//TODO: need to delete .tmp file
}
});

View file

@ -4,7 +4,14 @@ import * as path from 'path';
const processPath = path.join(__dirname, '../../lib/patcher-installer');
export default function launchProcess(diskFile: string, offset: number, compressedSize: number, decryptionKeys: [number, number, number] | undefined, outputStream: fs.WriteStream) {
export default function launchProcess(
diskFile: string,
offset: number,
compressedSize: number,
decryptionKeys: [number, number, number] | undefined,
previousFile: string | undefined,
outputStream: fs.WriteStream,
) {
return new Promise((resolve, reject) => {
const parameters = [
'--disk', diskFile,
@ -14,6 +21,10 @@ export default function launchProcess(diskFile: string, offset: number, compress
if (decryptionKeys !== undefined) {
parameters.push('--keys', decryptionKeys.join(','));
}
if (previousFile !== undefined) {
parameters.push(previousFile);
}
const spawnedProcess = childProcess.spawn(processPath, parameters.map((value) => value.toString(), { cwd: '.' }));
spawnedProcess.stdout.pipe(outputStream);

View file

@ -1,81 +0,0 @@
import * as fs from 'fs';
import * as stream from 'stream';
import readLocalFileHeader from './readLocalFileHeader';
interface IGetFileFromDisksOptions {
/** Number of the disk where the local file header starts */
diskStart: number;
/** Offset into the start of the disk where the local file header starts. */
offset: number;
/** Length of the stored file (compressed size + optional 12 byte encryption header), but excluding the length of the local file header. */
length: number;
}
function createFileStream(disks: string[], index: number, offset: number, length: number = Infinity): fs.ReadStream {
return fs.createReadStream(disks[index], { start: offset, end: offset + length - 1 });
}
/** Takes a list of ReadableStreams (the disks), as well as the offset and length, and returns a stream for just one file. */
async function getConcatenatedStream(disks: string[], { diskStart, offset, length }: IGetFileFromDisksOptions): Promise<stream.Readable> {
let curDiskIndex = diskStart;
let curDisk: fs.ReadStream = createFileStream(disks, diskStart, offset, length);
let totalRead = 0;
//Create new stream that concatenates disks until storedSize is reached, then ends the stream.
const outputStream = new stream.PassThrough();
const onData = (chunk: Buffer) => {
totalRead += chunk.length;
//If we've reached the end, we can stop reading after this chunk
const readTooManyBytes = totalRead - length;
if (readTooManyBytes >= 0) {
//We can still write the whole chunk before ending the stream
if (readTooManyBytes === 0) {
outputStream.end(chunk);
curDisk.close();
} else {
//We must shorten the chunk, write the shortened chunk and then end the stream
const shortenedLength = chunk.length - readTooManyBytes;
const shortenedChunk = Buffer.alloc(shortenedLength, chunk);
outputStream.end(shortenedChunk);
curDisk.close();
curDisk.off('data', onData);
}
} else {
//Nowhere near the end, so just write normally
outputStream.write(chunk);
}
};
const onError = (error: any) => {
console.error(error);
};
const onEnd = () => {
curDiskIndex += 1;
//End if we are at end of file or end of disks
if (curDiskIndex >= disks.length || totalRead >= length) {
outputStream.end();
} else {
curDisk = createFileStream(disks, curDiskIndex, 0, length - totalRead);
//set up new listeners for data and end
curDisk.on('data', onData);
curDisk.on('end', onEnd);
curDisk.on('error', onError);
}
};
curDisk.on('data', onData);
curDisk.on('end', onEnd);
curDisk.on('error', onError);
return outputStream;
}
export default async function getFileFromDisks(disks: string[], { diskStart, offset, length }: IGetFileFromDisksOptions): Promise<stream.Readable> {
//read local file header
const headerStream = await getConcatenatedStream(disks, { diskStart, offset, length: 30 });
const localFileHeaderLength = await readLocalFileHeader(headerStream);
headerStream.destroy(); //TODO: is this the best way to close/destroy the stream?
//read actual file
return getConcatenatedStream(disks, { diskStart, offset: offset + localFileHeaderLength, length });
}

View file

@ -1,29 +0,0 @@
import * as childProcess from 'child_process';
import * as fs from 'fs';
import * as stream from 'stream';
export default function performDiffing(sourceFile: string, diffStream: stream.Readable, targetFile: string): Promise<void> {
return new Promise((resolve, reject) => {
const targetStream = fs.createWriteStream(targetFile);
//spawn xdelta3 process, set up listeners
const process = childProcess.spawn('xdelta3', ['-d', '-s', sourceFile]);
diffStream.pipe(process.stdin);
process.stdout.pipe(targetStream);
process.stderr.on('data', (chunk) => {
reject(`Error during xdelta3: ${chunk}`);
});
process.on('error', (error) => {
reject(`Error during xdelta3: ${error}`);
});
process.on('exit', (code, signal) => {
if (code === 0) {
resolve();
} else {
reject(`xdelta3 exited with code "${code}" due to signal "${signal}".`);
}
});
});
}