in gh-actions-artifact-client/src/download-http-client.js [75:196]
async downloadIndividualFileToStream(
httpClientIndex,
artifactLocation,
outputStream
) {
let retryCount = 0
const retryLimit = config_variables.getRetryLimit()
const headers = utils.getDownloadHeaders('application/json', true, true)
// a single GET request is used to download a file
const makeDownloadRequest = async () => {
const client = this.downloadHttpManager.getClient(httpClientIndex)
return await client.get(artifactLocation, headers)
}
// Increments the current retry count and then checks if the retry limit has been reached
// If there have been too many retries, fail so the download stops. If there is a retryAfterValue value provided,
// it will be used
const backOff = async retryAfterValue => {
retryCount++
if (retryCount > retryLimit) {
throw new Error(
`Retry limit has been reached. Unable to download ${artifactLocation}`
)
} else {
this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex)
if (retryAfterValue) {
// Back off by waiting the specified time denoted by the retry-after header
console.error(
`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`
)
await utils.sleep(retryAfterValue)
} else {
// Back off using an exponential value that depends on the retry count
const backoffTime =
utils.getExponentialRetryTimeInMilliseconds(retryCount)
console.error(
`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`
)
await utils.sleep(backoffTime)
}
console.error(
`Finished backoff for retry #${retryCount}, continuing with download`
)
}
}
var partBuffer
let partIndex = 0
// keep trying to download a file until a retry limit has been reached
while (retryCount <= retryLimit) {
let response
try {
response = await makeDownloadRequest()
} catch (error) {
// if an error is caught, it is usually indicative of a timeout so retry the download
console.error(
'An error occurred while attempting to download a file',
error
)
// increment the retryCount and use exponential backoff to wait before making the next request
await backOff()
continue
}
let forceRetry = false
if (utils.isSuccessStatusCode(response.message.statusCode)) {
try {
const partSize = parseInt(response.message.headers['content-length'])
if (!partBuffer || partBuffer.length != partSize) {
partBuffer = Buffer.alloc(partSize)
}
partIndex = 0
await new Promise((resolve, reject) => {
response.message
.on('error', error => {
console.error(
`An error occurred while attempting to read the response stream`,
error
)
reject(error)
})
.on('data', chunk => {
chunk.copy(partBuffer, partIndex)
partIndex += chunk.length
})
.on('close', () => {
resolve()
})
})
if (partSize != partIndex) {
console.error(
`Didn't receive full file. received bytes ${partIndex} != expected bytes ${partSize}`
)
}
break
} catch (error) {
forceRetry = true
}
}
if (
forceRetry ||
utils.isRetryableStatusCode(response.message.statusCode)
) {
console.error(
`A ${response.message.statusCode} response code has been received while attempting to download an artifact`
)
// if a throttled status code is received, try to get the retryAfter header value, else differ to standard exponential backoff
utils.isThrottledStatusCode(response.message.statusCode)
? await backOff(
utils.tryGetRetryAfterValueTimeInMilliseconds(
response.message.headers
)
)
: await backOff()
} else {
// Some unexpected response code, fail immediately and stop the download
utils.displayHttpDiagnostics(response)
throw new Error(
`Unexpected http ${response.message.statusCode} during download for ${artifactLocation}`
)
}
}
outputStream.write(partBuffer)
}