in lib/crawler/http_client.rb [82:118]
def head(url, headers: nil)
raise ArgumentError, 'Need a Crawler URL object!' unless url.is_a?(Crawler::Data::URL)
check_connection_pool_stats!
start_time = Time.now
http_head = HttpHead.new(url.to_s)
headers&.each do |key, value|
http_head.set_header(key, value)
end
apache_response = client.execute(http_head)
end_time = Time.now
Crawler::HttpUtils::Response.new(
apache_response:,
url:,
request_start_time: start_time,
request_end_time: end_time
)
rescue Java::JavaNet::SocketTimeoutException => e
raise SocketTimeout, e
rescue Java::OrgApacheHttpConn::ConnectTimeoutException => e
raise ConnectTimeout, e
rescue Java::JavaxNetSsl::SSLException => e
raise SslException.for_java_error(e)
rescue Java::OrgApacheHcCore5Http::NoHttpResponseException => e
raise NoHttpResponseError.for_proxy_host(
error: e,
proxy_host: config.http_proxy_host
)
rescue Java::JavaLang::Exception => e
raise BaseErrorFromJava, e
end