in lib/crawler/http_utils/response.rb [147:184]
def consume_http_entity(max_response_size:, request_timeout:)
stream = http_entity.content
check_content_encoding
response_buffer = create_response_buffer(max_response_size)
chunk = Java::byte[1024].new
loop do
received_bytes = stream.read(chunk)
break if received_bytes.negative?
total_downloaded = response_buffer.length + received_bytes
if max_response_size && total_downloaded >= max_response_size
raise Crawler::HttpUtils::ResponseTooLarge, <<~ERROR.squish
Failed to fetch the response from
ERROR
end
response_buffer.append(chunk, 0, received_bytes)
raise Crawler::HttpUtils::RequestTimeout, url if request_timeout && time_since_request_start > request_timeout
end
response_buffer.to_byte_array
ensure
stream.close
end