♻ Move skipping of encryption header out of decryption

This commit is contained in:
C-3PO 2018-07-08 20:45:19 +02:00
parent 6dc6aa8844
commit 4192fc6e19
Signed by: c3po
GPG key ID: 62993C4BB4D86F24
6 changed files with 10 additions and 118 deletions

View file

@ -24,7 +24,8 @@ export default function getDecryptor(decryptionKeys: [number, number, number]) {
let position = 0;
return (encryptedChunk: Buffer) => {
const decryptedChunk = Buffer.alloc(encryptedChunk.length - Math.max(12 - position, 0));
const decryptedChunk = Buffer.alloc(encryptedChunk.length);
//const decryptedChunk = Buffer.alloc(encryptedChunk.length - Math.max(12 - position, 0));
for (let i = 0; i < encryptedChunk.length; i += 1) {
//read byte
@ -39,13 +40,14 @@ export default function getDecryptor(decryptionKeys: [number, number, number]) {
[key0, key1, key2] = updateKeys([key0, key1, key2], curChar);
//write byte
if (position + i < 12) {
/*if (position + i < 12) {
//do nothing
} else if (position + i >= 12 && position < 12) {
decryptedChunk.writeUInt8(curChar, position + i - 12);
} else {
decryptedChunk.writeUInt8(curChar, i);
}
}*/
decryptedChunk.writeUInt8(curChar, i);
}
position += encryptedChunk.length;

View file

@ -1,23 +0,0 @@
import updateKeys from './lib/updateKeys';
export default function decryptFile(dv: DataView, length: number, [key0, key1, key2]: [number, number, number]) {
const decryptedBuffer = new ArrayBuffer(length - 12);
const dvOut = new DataView(decryptedBuffer);
for (let i = 0; i < length; i += 1) {
//read and decrypt byte
let curChar = dv.getUint8(i);
const keyPart = (key2 | 2) & 0xFFFF;
const decryptedByte = (keyPart * (keyPart ^ 1)) >>> 8;
curChar ^= decryptedByte & 0xFF;
//Skip the first 12 bytes (random encryption header)
if (i >= 12) {
dvOut.setUint8(i - 12, curChar);
}
//update keys
[key0, key1, key2] = updateKeys([key0, key1, key2], curChar);
}
return dvOut;
}

View file

@ -1,43 +0,0 @@
import * as zlib from 'zlib';
import { ISsnFileEntry } from '../interfaces/ISsnFileEntry';
import decryptFile from './decryption/decryptFile';
import ByteReader from './extractFileByteReader';
/** Extracts the given file from the given DataView array and returns it as an ArrayBuffer.
* Will throw an error when end of final DataView is reached.
*/
export default async function extractFile(file: ISsnFileEntry, dvArray: DataView[]): Promise<ArrayBuffer> {
//Use ByteReader for reading a uint8 and seeking forward across DataView boundaries
const byteReader = new ByteReader(dvArray, file.diskNumberStart, file.offset);
//Local file header signature
if (byteReader.readUint32() !== 0x04034B50) {
throw new Error('Local file header had wrong magic');
}
//All fields in the local file header are copies of the central file header, so we can skip them.
//FIXME: Maybe we should actually read these fields to verify that they are identical?
byteReader.seek(22);
const localFilenameSize = byteReader.readUint16();
const localExtraSize = byteReader.readUint16();
byteReader.seek(localFilenameSize + localExtraSize);
//Extract actual file contents
let dvFinal = byteReader.extractDv(file.compressedSize);
//Decrypt file if necessary
if (file.decryptionKeys !== undefined) {
dvFinal = decryptFile(dvFinal, file.compressedSize, file.decryptionKeys);
}
//Decompress file
const decompressedBuffer: Buffer = await new Promise((resolve, reject) => {
zlib.inflateRaw(dvFinal, (error, result) => {
if (error !== null) {
return reject(error);
}
resolve(result);
});
}) as Buffer;
return decompressedBuffer.buffer as ArrayBuffer;
}

View file

@ -1,46 +0,0 @@
export default class ByteReader {
private dvArray: DataView[];
private dvIndex = 0;
private pos = 0;
constructor(dvArray: DataView[], startDvIndex: number, offset: number) {
this.dvArray = dvArray;
this.dvIndex = startDvIndex;
this.pos = offset;
}
/** Reads one byte and returns it. */
public readUint8() {
const curByte = this.dvArray[this.dvIndex].getUint8(this.pos);
this.pos += 1;
if (this.pos >= this.dvArray[this.dvIndex].byteLength) {
this.pos = 0;
this.dvIndex += 1;
if (this.dvIndex >= this.dvArray.length) { throw new Error('Tried to read beyond DataView boundary in extractFile'); }
}
return curByte;
}
/** Reads two bytes as an unsigned 16-bit integer and returns it. */
public readUint16() {
return this.readUint8() | (this.readUint8() << 8);
}
/** Reads four bytes as an unsigned 32-bit integer and returns it. */
public readUint32() {
return (this.readUint8() | (this.readUint8() << 8) | (this.readUint8() << 16) | (this.readUint8() << 24)) >>> 0;
}
/** Seeks the given number of bytes forward, without reading the bytes. */
public seek(numBytes: number) {
this.pos += numBytes;
if (this.pos >= this.dvArray[this.dvIndex].byteLength) {
this.pos -= this.dvArray[this.dvIndex].byteLength;
this.dvIndex += 1;
if (this.dvIndex >= this.dvArray.length) { throw new Error('Tried to read beyond DataView boundary in extractFile'); }
}
}
/** Reads the given amount of bytes, and returns it as a DataView. */
public extractDv(length: number) {
const dv = new DataView(new ArrayBuffer(length));
for (let i = 0; i < length; i += 1) {
dv.setUint8(i, this.readUint8()); //TODO: refactor this so it is more optimized
}
return dv;
}
}

View file

@ -16,6 +16,8 @@ export default function extractFileStream(file: ISsnFileEntry, inputStream: stre
if (file.decryptionKeys !== undefined) {
const decryptTransform = decryptStream(file.decryptionKeys);
curStream = curStream.pipe(decryptTransform);
//skip encryption header
curStream.read(12);
}
if (!skipDecompression) {

View file

@ -16,7 +16,7 @@ function createFileStream(disks: string[], index: number, offset: number, length
}
/** Takes a list of ReadableStreams (the disks), as well as the offset and length, and returns a stream for just one file. */
async function getFullStream(disks: string[], { diskStart, offset, length }: IGetFileFromDisksOptions): Promise<stream.Readable> {
async function getConcatenatedStream(disks: string[], { diskStart, offset, length }: IGetFileFromDisksOptions): Promise<stream.Readable> {
let curDiskIndex = diskStart;
let curDisk: fs.ReadStream = createFileStream(disks, diskStart, offset);
let totalRead = 0;
@ -72,10 +72,10 @@ async function getFullStream(disks: string[], { diskStart, offset, length }: IGe
export default async function getFileFromDisks(disks: string[], { diskStart, offset, length }: IGetFileFromDisksOptions): Promise<stream.Readable> {
//read local file header
const headerStream = await getFullStream(disks, { diskStart, offset, length: 30 });
const headerStream = await getConcatenatedStream(disks, { diskStart, offset, length: 30 });
const localFileHeaderLength = await readLocalFileHeader(headerStream);
headerStream.destroy(); //TODO: is this the best way to close/destroy the stream?
//read actual file
return getFullStream(disks, { diskStart, offset: offset + localFileHeaderLength, length });
return getConcatenatedStream(disks, { diskStart, offset: offset + localFileHeaderLength, length });
}