diff --git a/src/cdn/downloadWithCurl.ts b/src/cdn/downloadWithCurl.ts index 22d42f5..0d9dc6b 100644 --- a/src/cdn/downloadWithCurl.ts +++ b/src/cdn/downloadWithCurl.ts @@ -11,7 +11,7 @@ export default function downloadWithCurl({ host, path, tempFileName }: {host: st const parameters: string[] = [ //... '--silent', - '--limit-rate', '30m', //maximum speed of 30 MB/s = 240 MBit/s + '--limit-rate', '20m', //maximum speed of 30 MB/s = 240 MBit/s '--speed-limit', String(Math.round(100 * 1024 * 1024 / 15)), //abort if less than 100 MB in 15 seconds '--speed-time', '15', '--output', tempFileName, diff --git a/src/ssn/getPatch.ts b/src/ssn/getPatch.ts index 4e210d7..2be23d2 100644 --- a/src/ssn/getPatch.ts +++ b/src/ssn/getPatch.ts @@ -72,7 +72,7 @@ export default async function getPatch({ product, from, to, sourceDirectory, tar //Then we need to wait for disks to finish download before we can extract individual files //TODO: we can optimize this to already extract some files as soon as their relevant parts are downloaded - const diskFilenames: string[] = await taskManager(diskFiles, 3); //max. of 3 concurrent downloads + const diskFilenames: string[] = await taskManager(diskFiles, 2); //max. of 2 concurrent downloads //TODO: Verify that downloaded files match the hash in `solidpkg.pieces`