in spec/support/faux/faux_crawl.rb [51:73]
def initialize(*sites)
@options = sites.extract_options!
@sites = configure_sites(*sites)
@crawl_id = options.fetch(:crawl_id, BSON::ObjectId.new.to_s)
@url_queue = options.fetch(:url_queue, enterprise_search? ? :esqueues_me : :memory_only)
@user_agent = options.fetch(:user_agent, 'Faux Crawler')
@auth = options.fetch(:auth, nil)
@url = options.fetch(:url, Settings.faux_url)
@seed_urls = coerce_to_absolute_urls(options[:seed_urls] || ["#{@url}/"])
@sitemap_urls = coerce_to_absolute_urls(options[:sitemap_urls] || [])
@domain_allowlist = seed_urls.map { |url| Crawler::Data::URL.parse(url).site }
@content_extraction = options.fetch(:content_extraction, { enabled: false, mime_types: [] })
@default_encoding = options[:default_encoding]
@timeouts = options.fetch(:timeouts, {}).slice(
:connect_timeout, :socket_timeout, :request_timeout
).compact
@results = ResultsCollection.new
@expect_success = options.fetch(:expect_success, true)
start_sites
end