♻️ Only use fat-arrow functions when immediately returning result

This commit is contained in:
C-3PO 2018-11-16 00:43:50 +01:00
parent 872b87a4e0
commit 9aff99ae86
Signed by: c3po
GPG key ID: 62993C4BB4D86F24
19 changed files with 59 additions and 61 deletions

View file

@ -5,7 +5,7 @@ import * as childProcess from 'child_process';
* Takes as input the host domain name, the path and the file size * Takes as input the host domain name, the path and the file size
*/ */
export default function downloadWithCurl({ host, path, tempFileName }: {host: string, path: string, tempFileName: string}): Promise<string> { export default function downloadWithCurl({ host, path, tempFileName }: {host: string, path: string, tempFileName: string}): Promise<string> {
return new Promise((resolve, reject) => { return new Promise(function(resolve, reject) {
const url = `http://${host}${(path.substr(0, 1) === '/' ? '' : '/')}${path}`; const url = `http://${host}${(path.substr(0, 1) === '/' ? '' : '/')}${path}`;
const parameters: string[] = [ const parameters: string[] = [
@ -21,11 +21,11 @@ export default function downloadWithCurl({ host, path, tempFileName }: {host: st
const spawnedProcess = childProcess.spawn('curl', parameters); const spawnedProcess = childProcess.spawn('curl', parameters);
spawnedProcess.stderr.setEncoding('utf8'); spawnedProcess.stderr.setEncoding('utf8');
spawnedProcess.stderr.on('data', (error) => { spawnedProcess.stderr.on('data', function(error) {
reject(`Error in process:\n> curl ${parameters.join(' ')}\n${error}`); reject(`Error in process:\n> curl ${parameters.join(' ')}\n${error}`);
}); });
spawnedProcess.on('exit', (code) => { spawnedProcess.on('exit', function(code) {
if (code === 0) { if (code === 0) {
resolve(tempFileName); resolve(tempFileName);
} else { } else {

View file

@ -2,9 +2,9 @@ import * as fs from 'fs';
/** Checks if this file already exists on the local disk, so we don't need to download it again. */ /** Checks if this file already exists on the local disk, so we don't need to download it again. */
export default function checkLocalCache(fileName: string, size: number): Promise<boolean> { export default function checkLocalCache(fileName: string, size: number): Promise<boolean> {
return new Promise((resolve, reject) => { return new Promise(function(resolve, reject) {
//Check if file already exists //Check if file already exists
fs.exists(fileName, (exists) => { fs.exists(fileName, function(exists) {
if (exists) { if (exists) {
const fileStats = fs.statSync(fileName); const fileStats = fs.statSync(fileName);
//check if file size matches //check if file size matches
@ -12,7 +12,7 @@ export default function checkLocalCache(fileName: string, size: number): Promise
resolve(true); resolve(true);
} else { } else {
//delete file so we can overwrite it //delete file so we can overwrite it
fs.unlink(fileName, (error) => { fs.unlink(fileName, function(error) {
if (error) { if (error) {
reject(error); reject(error);
} }

View file

@ -3,29 +3,29 @@ import * as path from 'path';
/** Recursively creates the given directory. */ /** Recursively creates the given directory. */
export default function createDirRecursively(folderName: string): Promise<void> { export default function createDirRecursively(folderName: string): Promise<void> {
return new Promise((resolve, reject) => { return new Promise(function(resolve, reject) {
//Try to create directory //Try to create directory
fs.mkdir(folderName, (error) => { fs.mkdir(folderName, function(error) {
//If it fails, we first need to create parent directory //If it fails, we first need to create parent directory
if (error) { if (error) {
switch (error.code) { switch (error.code) {
case 'ENOENT': //parent does not exist case 'ENOENT': //parent does not exist
//Create parent //Create parent
const parentFolder = path.dirname(folderName); const parentFolder = path.dirname(folderName);
createDirRecursively(parentFolder).then(() => { createDirRecursively(parentFolder).then(function() {
//Then try again //Then try again
try { try {
resolve(createDirRecursively(folderName)); resolve(createDirRecursively(folderName));
} catch (error) { } catch (error) {
reject(error); reject(error);
} }
}).catch((parentError) => { }).catch(function(parentError) {
reject(parentError); reject(parentError);
}); });
break; break;
case 'EEXIST': { //already exists (either as file or directory) case 'EEXIST': { //already exists (either as file or directory)
fs.stat(folderName, (statError, stats) => { fs.stat(folderName, function(statError, stats) {
if (statError) { if (statError) {
reject(statError); reject(statError);
} }

View file

@ -22,7 +22,7 @@ export default function handleResponse(
//If we receive a part of the response, store it //If we receive a part of the response, store it
const chunkList: Buffer[] = []; const chunkList: Buffer[] = [];
let totalLength = 0; let totalLength = 0;
response.on('data', (chunk: Buffer) => { response.on('data', function(chunk: Buffer) {
totalLength += chunk.length; totalLength += chunk.length;
//Exit early if we received too much data //Exit early if we received too much data
@ -35,7 +35,7 @@ export default function handleResponse(
}); });
//If we finished reading response, check for correctness, then return it //If we finished reading response, check for correctness, then return it
response.on('end', () => { response.on('end', function() {
//Check that length is correct //Check that length is correct
if (totalLength !== headerLength) { if (totalLength !== headerLength) {
return reject(`Expected length ${headerLength} but received ${totalLength}.`); return reject(`Expected length ${headerLength} but received ${totalLength}.`);

View file

@ -31,23 +31,23 @@ import * as dns from 'dns';
import { IDnsResult } from '../../interfaces/IDnsResult'; import { IDnsResult } from '../../interfaces/IDnsResult';
//TODO: send e-mail with the error //TODO: send e-mail with the error
const assert = (condition: boolean) => { if (!condition) { console.warn('Assert failed'); } }; const assert = function(condition: boolean) { if (!condition) { console.warn('Assert failed'); } };
/** Looks up the given domain and returns a list of IP addresses, along with their time-to-live */ /** Looks up the given domain and returns a list of IP addresses, along with their time-to-live */
async function resolveDns(domain: string): Promise<IDnsResult[]> { async function resolveDns(domain: string): Promise<IDnsResult[]> {
return new Promise((resolve) => { return new Promise(function(resolve) {
//check given string for correctness to prevent injection attacks //check given string for correctness to prevent injection attacks
if (domain.match(/^[a-z0-9]+(?:[-.]{1}[a-z0-9]+)*\.[a-z]{2,3}$/) === null) { return resolve([]); } if (domain.match(/^[a-z0-9]+(?:[-.]{1}[a-z0-9]+)*\.[a-z]{2,3}$/) === null) { return resolve([]); }
//Check Level3/North_America separately //Check Level3/North_America separately
if (domain !== 'cdn-patch.swtor.com') { if (domain !== 'cdn-patch.swtor.com') {
dns.resolve4(domain, { ttl: true }, (err, result) => { dns.resolve4(domain, { ttl: true }, (err, result) => (
return resolve(result.map(({ address, ttl }) => ({ address, ttl, type: 'level3-us' as IDnsResult['type'] }))); resolve(result.map(({ address, ttl }) => ({ address, ttl, type: 'level3-us' as IDnsResult['type'] })))
}); ));
} else { } else {
//Use bash so we get more information. //Use bash so we get more information.
//Also do plenty of asserts to ensure that overall CDN structure has stayed unchanged, and TODO send e-mail if it's different (max. once per hour) //Also do plenty of asserts to ensure that overall CDN structure has stayed unchanged, and TODO send e-mail if it's different (max. once per hour)
exec('dig +noall +answer "cdn-patch.swtor.com"', { timeout: 10000 }, (error, stdout) => { exec('dig +noall +answer "cdn-patch.swtor.com"', { timeout: 10000 }, function(error, stdout) {
//check for error //check for error
assert(!error); assert(!error);
if (error) { if (error) {

View file

@ -18,7 +18,7 @@ export default function saveResponse(
response.pipe(writeStream); response.pipe(writeStream);
//Wait until everything is written to disk, then return file name //Wait until everything is written to disk, then return file name
writeStream.on('close', () => { writeStream.on('close', function() {
resolve(filePath); resolve(filePath);
}); });
} }

View file

@ -3,7 +3,7 @@ import handleResponse from './funcs/handleResponse';
/** Downloads the given URL into memory and returns it as an ArrayBuffer. Throws error if download fails or file is too large to be handled in memory. */ /** Downloads the given URL into memory and returns it as an ArrayBuffer. Throws error if download fails or file is too large to be handled in memory. */
export default function getUrlContents({ host, path }: {host: string, path: string}): Promise<ArrayBuffer> { export default function getUrlContents({ host, path }: {host: string, path: string}): Promise<ArrayBuffer> {
return new Promise((resolve, reject) => { return new Promise(function(resolve, reject) {
//Create HTTP request //Create HTTP request
const request = http.request({ const request = http.request({
family: 4, family: 4,
@ -12,7 +12,7 @@ export default function getUrlContents({ host, path }: {host: string, path: stri
}, handleResponse.bind(null, resolve, (reason: string) => { request.abort(); reject(reason); })); }, handleResponse.bind(null, resolve, (reason: string) => { request.abort(); reject(reason); }));
//In case of connection errors, exit early //In case of connection errors, exit early
request.on('error', (error) => { request.on('error', function(error) {
request.abort(); request.abort();
reject(error); reject(error);
}); });

View file

@ -24,7 +24,7 @@ async function heartbeatDns(domain: string) {
//Update array with new information //Update array with new information
dnsResults.forEach( dnsResults.forEach(
({ address, type }, index) => { function({ address, type }, index) {
//Calculate weight: //Calculate weight:
//on cdn-patch.swtor.com: 3 if first, 2 if second, otherwise 1 //on cdn-patch.swtor.com: 3 if first, 2 if second, otherwise 1
let weight = (index < 2) ? (3 - index) : 1; let weight = (index < 2) ? (3 - index) : 1;
@ -60,14 +60,14 @@ async function heartbeatDns(domain: string) {
//Decay weights - reduce them based on update frequency (-50% if full minute, but less if TTL was shorter than a minute) //Decay weights - reduce them based on update frequency (-50% if full minute, but less if TTL was shorter than a minute)
const decayFactor = 0.5 ** ((now - lastUpdate) / 60000); const decayFactor = 0.5 ** ((now - lastUpdate) / 60000);
lastUpdate = now; lastUpdate = now;
servers.forEach((server) => { server.weight *= decayFactor; }); servers.forEach(function(server) { server.weight *= decayFactor; });
//Sort the array by weight //Sort the array by weight
servers.sort((a, b) => b.weight - a.weight); servers.sort((a, b) => b.weight - a.weight);
//Output current list //Output current list
let output = ''; let output = '';
servers.forEach((server) => { servers.forEach(function(server) {
//set colors based on server type, see https://en.wikipedia.org/wiki/ANSI_escape_code#Colors //set colors based on server type, see https://en.wikipedia.org/wiki/ANSI_escape_code#Colors
//bright color if seen within last 5 minutes //bright color if seen within last 5 minutes
if (now - server.lastSeen < 300000) { output += '\x1b[1m'; } else { output += '\x1b[0m'; } if (now - server.lastSeen < 300000) { output += '\x1b[1m'; } else { output += '\x1b[0m'; }

View file

@ -3,7 +3,7 @@ import updateKeys from './lib/updateKeys';
export default function getDecryptor(decryptionKeys: [number, number, number]) { export default function getDecryptor(decryptionKeys: [number, number, number]) {
let [key0, key1, key2] = decryptionKeys; let [key0, key1, key2] = decryptionKeys;
return (chunk: Buffer) => { return function(chunk: Buffer) {
for (let i = 0; i < chunk.length; i += 1) { for (let i = 0; i < chunk.length; i += 1) {
//read byte //read byte
let curChar = chunk.readUInt8(i); let curChar = chunk.readUInt8(i);

View file

@ -1,5 +1,5 @@
//Karatsuba algorithm - https://stackoverflow.com/questions/31087787/javascript-32bit-integer-multiply-simulation //Karatsuba algorithm - https://stackoverflow.com/questions/31087787/javascript-32bit-integer-multiply-simulation
const int32Mul = (x: number, y: number) => { export default function int32Mul(x: number, y: number) {
// We use B = 2 and m = 16, because it will make sure that we only do multiplications with // We use B = 2 and m = 16, because it will make sure that we only do multiplications with
// 16 Bit per factor so that the result must have less than 32 Bit in total (which fits well // 16 Bit per factor so that the result must have less than 32 Bit in total (which fits well
// into a double). // into a double).
@ -13,6 +13,4 @@ const int32Mul = (x: number, y: number) => {
// z1 + z0. We can discard z2 completely as it only contains a value out of our relevant bounds. // z1 + z0. We can discard z2 completely as it only contains a value out of our relevant bounds.
// Both z1 and z0 are 32 Bit, but we shift out the top 16 Bit of z1. // Both z1 and z0 are 32 Bit, but we shift out the top 16 Bit of z1.
return (((x1 * y0 + x0 * y1) << 16) + (x0 * y0)) | 0; return (((x1 * y0 + x0 * y1) << 16) + (x0 * y0)) | 0;
}; }
export default int32Mul;

View file

@ -21,7 +21,7 @@ export default async function extractFileAsStream(file: ISsnFileEntry, inputStre
//pipe into decompression //pipe into decompression
const decompressTransform = zlib.createInflateRaw(); const decompressTransform = zlib.createInflateRaw();
decompressTransform.on('error', (error) => { decompressTransform.on('error', function(error) {
//TODO: need to throw error sync, not async //TODO: need to throw error sync, not async
throw new Error(`Error during decompression of "${file.name}": ${error.message}`); throw new Error(`Error during decompression of "${file.name}": ${error.message}`);
}); });

View file

@ -79,7 +79,7 @@ export default async function installPatch({ product, from, to, sourceDirectory,
//-------------------------------------------------------------------------------------------------------------- //--------------------------------------------------------------------------------------------------------------
//Perform the patching //Perform the patching
const extractAdded = async (file: ISsnFileEntry) => { const extractAdded = async function(file: ISsnFileEntry) {
try { try {
//create file write stream //create file write stream
const outputName = path.join(targetDir, file.name); const outputName = path.join(targetDir, file.name);
@ -89,7 +89,7 @@ export default async function installPatch({ product, from, to, sourceDirectory,
//start installation //start installation
await launch(diskFilenames[file.diskNumberStart], file.offset, file.compressedSize, file.decryptionKeys, undefined, outputNameTemp); await launch(diskFilenames[file.diskNumberStart], file.offset, file.compressedSize, file.decryptionKeys, undefined, outputNameTemp);
fs.rename(outputNameTemp, outputName, (renameError) => { fs.rename(outputNameTemp, outputName, function(renameError) {
if (renameError) { if (renameError) {
throw new Error(`Could not rename output file "${outputNameTemp}": ${renameError.message}`); throw new Error(`Could not rename output file "${outputNameTemp}": ${renameError.message}`);
} }
@ -99,7 +99,7 @@ export default async function installPatch({ product, from, to, sourceDirectory,
} }
}; };
const extractChanged = async (file: ISsnFileEntry) => { const extractChanged = async function(file: ISsnFileEntry) {
try { try {
const sourceFile = path.join(sourceDir, file.name); const sourceFile = path.join(sourceDir, file.name);
const outputName = path.join(targetDir, file.name); const outputName = path.join(targetDir, file.name);
@ -113,18 +113,18 @@ export default async function installPatch({ product, from, to, sourceDirectory,
//clean up: delete source file if necessary, and remove .tmp file extension //clean up: delete source file if necessary, and remove .tmp file extension
if (sourceDir === targetDir) { if (sourceDir === targetDir) {
fs.unlink(sourceFile, (deleteError) => { fs.unlink(sourceFile, function(deleteError) {
if (deleteError) { if (deleteError) {
throw new Error(`Could not delete old source file "${sourceFile}": ${deleteError.message}`); throw new Error(`Could not delete old source file "${sourceFile}": ${deleteError.message}`);
} }
fs.rename(outputNameTemp, outputName, (renameError) => { fs.rename(outputNameTemp, outputName, function(renameError) {
if (renameError) { if (renameError) {
throw new Error(`Could not rename output file "${outputNameTemp}": ${renameError.message}`); throw new Error(`Could not rename output file "${outputNameTemp}": ${renameError.message}`);
} }
}); });
}); });
} else { } else {
fs.rename(outputNameTemp, outputName, (renameError) => { fs.rename(outputNameTemp, outputName, function(renameError) {
if (renameError) { if (renameError) {
throw new Error(`Could not rename output file "${outputNameTemp}": ${renameError.message}`); throw new Error(`Could not rename output file "${outputNameTemp}": ${renameError.message}`);
} }
@ -136,10 +136,10 @@ export default async function installPatch({ product, from, to, sourceDirectory,
} }
}; };
const deleteRemoved = async (file: ISsnFileEntry) => { const deleteRemoved = async function(file: ISsnFileEntry) {
//delete file //delete file
const fileName = path.join(targetDir, file.name); const fileName = path.join(targetDir, file.name);
fs.unlink(fileName, (error) => { fs.unlink(fileName, function(error) {
if (error) { if (error) {
console.error(`Could not delete removed file "${file.name}"`, error); console.error(`Could not delete removed file "${file.name}"`, error);
} }
@ -147,12 +147,12 @@ export default async function installPatch({ product, from, to, sourceDirectory,
//TODO: delete folder (and parent folders) that are empty //TODO: delete folder (and parent folders) that are empty
}; };
const copyUnchanged = async (file: ISsnFileEntry) => { const copyUnchanged = async function(file: ISsnFileEntry) {
//copy file //copy file
const sourceName = path.join(sourceDir, file.name); const sourceName = path.join(sourceDir, file.name);
const targetName = path.join(targetDir, file.name); const targetName = path.join(targetDir, file.name);
await createDirRecursively(path.dirname(targetName)); await createDirRecursively(path.dirname(targetName));
fs.copyFile(sourceName, targetName, (error) => { fs.copyFile(sourceName, targetName, function(error) {
if (error) { if (error) {
console.error(`Could not copy unchanged file "${file.name}"`, error); console.error(`Could not copy unchanged file "${file.name}"`, error);
} }

View file

@ -12,7 +12,7 @@ export default function launchProcess(
previousFile: string | undefined, previousFile: string | undefined,
output: string | fs.WriteStream, output: string | fs.WriteStream,
) { ) {
return new Promise((resolve, reject) => { return new Promise(function(resolve, reject) {
const parameters = [ const parameters = [
'--disk', diskFile, '--disk', diskFile,
'--offset', offset, '--offset', offset,
@ -32,7 +32,7 @@ export default function launchProcess(
if (typeof output === 'string') { if (typeof output === 'string') {
spawnedProcess.stdout.setEncoding('utf8'); spawnedProcess.stdout.setEncoding('utf8');
spawnedProcess.stdout.on('data', (chunk) => { spawnedProcess.stdout.on('data', function(chunk) {
console.log(chunk); console.log(chunk);
}); });
} else { } else {
@ -41,7 +41,7 @@ export default function launchProcess(
spawnedProcess.stdout.on('end', resolve); spawnedProcess.stdout.on('end', resolve);
spawnedProcess.stderr.setEncoding('utf8'); spawnedProcess.stderr.setEncoding('utf8');
spawnedProcess.stderr.on('data', (error) => { spawnedProcess.stderr.on('data', function(error) {
reject(`Error in process:\n> ${processPath} ${parameters.join(' ')}\n${error}`); reject(`Error in process:\n> ${processPath} ${parameters.join(' ')}\n${error}`);
}); });
}); });

View file

@ -1,10 +1,10 @@
export default function taskManager(tasks: Array<() => Promise<void>>, maxConcurrentTasks: number): Promise<any[]> { export default function taskManager(tasks: Array<() => Promise<void>>, maxConcurrentTasks: number): Promise<any[]> {
return new Promise((resolve, reject) => { return new Promise(function(resolve, reject) {
const returnValues = Array(tasks.length); const returnValues = Array(tasks.length);
const remainingTasks = tasks; const remainingTasks = tasks;
let currentlyRunningTasks = 0; let currentlyRunningTasks = 0;
const startNewTask = () => { const startNewTask = function() {
//Exit if we completed all tasks //Exit if we completed all tasks
if (remainingTasks.length === 0) { if (remainingTasks.length === 0) {
if (currentlyRunningTasks === 0) { if (currentlyRunningTasks === 0) {
@ -15,11 +15,11 @@ export default function taskManager(tasks: Array<() => Promise<void>>, maxConcur
const curTask = remainingTasks.pop() as () => Promise<void>; const curTask = remainingTasks.pop() as () => Promise<void>;
const curTaskIndex = remainingTasks.length; const curTaskIndex = remainingTasks.length;
currentlyRunningTasks += 1; currentlyRunningTasks += 1;
curTask().then((...result) => { curTask().then(function(...result) {
returnValues[curTaskIndex] = result; returnValues[curTaskIndex] = result;
currentlyRunningTasks -= 1; currentlyRunningTasks -= 1;
return startNewTask(); return startNewTask();
}).catch((error) => { }).catch(function(error) {
reject(error); reject(error);
}); });
} }

View file

@ -12,7 +12,7 @@ export default function parsePatchManifest(manifestFile: any): IManifest {
//<Releases><Release><Id>0</Id><SHA1>53678f8057e52896a8145dca5c188ab3f24fa55f</SHA1></Release></Releases> //<Releases><Release><Id>0</Id><SHA1>53678f8057e52896a8145dca5c188ab3f24fa55f</SHA1></Release></Releases>
const Releases = PatchManifest.elements[7]; const Releases = PatchManifest.elements[7];
Releases.elements.forEach((Release: any) => { Releases.elements.forEach(function(Release: any) {
const id = Release.elements[0].elements[0].text; const id = Release.elements[0].elements[0].text;
const sha1 = Release.elements[1].elements[0].text; const sha1 = Release.elements[1].elements[0].text;
out.releases[id] = { sha1, from: [], to: [] }; out.releases[id] = { sha1, from: [], to: [] };
@ -20,7 +20,7 @@ export default function parsePatchManifest(manifestFile: any): IManifest {
//<ReleaseUpdatePaths><ReleaseUpdatePath><From>287</From><To>288</To><ExtraDataItem>...</ExtraDataItem></ReleaseUpdatePath></ReleaseUpdatePaths //<ReleaseUpdatePaths><ReleaseUpdatePath><From>287</From><To>288</To><ExtraDataItem>...</ExtraDataItem></ReleaseUpdatePath></ReleaseUpdatePaths
const ReleaseUpdatePaths = PatchManifest.elements[8]; const ReleaseUpdatePaths = PatchManifest.elements[8];
ReleaseUpdatePaths.elements.forEach((ReleaseUpdatePath: any) => { ReleaseUpdatePaths.elements.forEach(function(ReleaseUpdatePath: any) {
const from = ReleaseUpdatePath.elements[0].elements[0].text; const from = ReleaseUpdatePath.elements[0].elements[0].text;
const to = ReleaseUpdatePath.elements[1].elements[0].text; const to = ReleaseUpdatePath.elements[1].elements[0].text;
//Release -1 does not exist but is a valid "from", e.g. for -1to0 //Release -1 does not exist but is a valid "from", e.g. for -1to0

View file

@ -2,8 +2,8 @@ import * as stream from 'stream';
/** Returns a promise that resolves as soon as the given stream has the given number of bytes ready for reading. */ /** Returns a promise that resolves as soon as the given stream has the given number of bytes ready for reading. */
function waitReadableLength(inputStream: stream.Readable, minLength: number): Promise<void> { function waitReadableLength(inputStream: stream.Readable, minLength: number): Promise<void> {
return new Promise((resolve) => { return new Promise(function(resolve) {
const interval = setInterval(() => { const interval = setInterval(function() {
if (inputStream.readableLength >= minLength) { if (inputStream.readableLength >= minLength) {
clearInterval(interval); clearInterval(interval);
resolve(); resolve();

View file

@ -1,24 +1,24 @@
import * as stream from 'stream'; import * as stream from 'stream';
export default function streamToArrayBuffer(inputStream: stream.Readable): Promise<Buffer> { export default function streamToArrayBuffer(inputStream: stream.Readable): Promise<Buffer> {
return new Promise((resolve, reject) => { return new Promise(function(resolve, reject) {
const chunks: Buffer[] = []; const chunks: Buffer[] = [];
let totalSize = 0; let totalSize = 0;
//Convert chunks to string //Convert chunks to string
inputStream.on('data', (chunk: Buffer) => { inputStream.on('data', function(chunk: Buffer) {
chunks.push(chunk); chunks.push(chunk);
totalSize += chunk.length; totalSize += chunk.length;
}); });
//Output final string //Output final string
inputStream.on('end', () => { inputStream.on('end', function() {
const outBuffer = Buffer.concat(chunks, totalSize); const outBuffer = Buffer.concat(chunks, totalSize);
resolve(outBuffer); resolve(outBuffer);
}); });
//Exit on error //Exit on error
inputStream.on('error', (error) => { inputStream.on('error', function(error) {
reject(error); reject(error);
}); });
}); });

View file

@ -3,22 +3,22 @@ import * as stream from 'stream';
export default function streamToString(inputStream: stream.Readable): Promise<string> { export default function streamToString(inputStream: stream.Readable): Promise<string> {
const decoder = new TextDecoder('utf-8'); const decoder = new TextDecoder('utf-8');
return new Promise((resolve, reject) => { return new Promise(function(resolve, reject) {
const stringParts: string[] = []; const stringParts: string[] = [];
//Convert chunks to string //Convert chunks to string
inputStream.on('data', (chunk: Buffer) => { inputStream.on('data', function(chunk: Buffer) {
stringParts.push(decoder.decode(chunk, { stream: true })); stringParts.push(decoder.decode(chunk, { stream: true }));
}); });
//Output final string //Output final string
inputStream.on('end', () => { inputStream.on('end', function() {
stringParts.push(decoder.decode()); stringParts.push(decoder.decode());
resolve(stringParts.join('')); resolve(stringParts.join(''));
}); });
//Exit on error //Exit on error
inputStream.on('error', (error) => { inputStream.on('error', function(error) {
reject(error); reject(error);
}); });
}); });

View file

@ -38,7 +38,7 @@ export default function verifyPatch(fileEntries: ISsnFileEntry[], product: Produ
//Patches from -1 must not have diff type NewFile //Patches from -1 must not have diff type NewFile
if (from === -1) { if (from === -1) {
fileEntries.filter((file) => file.diffType !== SsnDiffType.NewFile).forEach((file) => { fileEntries.filter((file) => file.diffType !== SsnDiffType.NewFile).forEach(function(file) {
throw new Error(`Patches from -1 must be included in full, but this patch had a file "${file.name}" with diff type ${file.diffType}.`); throw new Error(`Patches from -1 must be included in full, but this patch had a file "${file.name}" with diff type ${file.diffType}.`);
}); });
} }