🐎 Restrict download speed to increase perf
This commit is contained in:
parent
eb11b6414a
commit
44b8e8b275
2 changed files with 2 additions and 2 deletions
|
@ -11,7 +11,7 @@ export default function downloadWithCurl({ host, path, tempFileName }: {host: st
|
||||||
const parameters: string[] = [
|
const parameters: string[] = [
|
||||||
//...
|
//...
|
||||||
'--silent',
|
'--silent',
|
||||||
'--limit-rate', '30m', //maximum speed of 30 MB/s = 240 MBit/s
|
'--limit-rate', '20m', //maximum speed of 30 MB/s = 240 MBit/s
|
||||||
'--speed-limit', String(Math.round(100 * 1024 * 1024 / 15)), //abort if less than 100 MB in 15 seconds
|
'--speed-limit', String(Math.round(100 * 1024 * 1024 / 15)), //abort if less than 100 MB in 15 seconds
|
||||||
'--speed-time', '15',
|
'--speed-time', '15',
|
||||||
'--output', tempFileName,
|
'--output', tempFileName,
|
||||||
|
|
|
@ -72,7 +72,7 @@ export default async function getPatch({ product, from, to, sourceDirectory, tar
|
||||||
|
|
||||||
//Then we need to wait for disks to finish download before we can extract individual files
|
//Then we need to wait for disks to finish download before we can extract individual files
|
||||||
//TODO: we can optimize this to already extract some files as soon as their relevant parts are downloaded
|
//TODO: we can optimize this to already extract some files as soon as their relevant parts are downloaded
|
||||||
const diskFilenames: string[] = await taskManager(diskFiles, 3); //max. of 3 concurrent downloads
|
const diskFilenames: string[] = await taskManager(diskFiles, 2); //max. of 2 concurrent downloads
|
||||||
|
|
||||||
//TODO: Verify that downloaded files match the hash in `solidpkg.pieces`
|
//TODO: Verify that downloaded files match the hash in `solidpkg.pieces`
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue