spec/integration/robots_txt_spec.rb [32:42]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
  end

  it 'should respect robots.txt disallow rules for matching User-Agent' do
    results = FauxCrawl.run(site, user_agent: 'Elastic Crawler')

    expect(results).to have_only_these_results [
      mock_response(url: 'http://127.0.0.1:9393/', status_code: 200),
      mock_response(url: 'http://127.0.0.1:9393/hey', status_code: 200),
      mock_response(url: 'http://127.0.0.1:9393/sekret-stuff', status_code: 200)
    ]
  end
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



spec/integration/robots_txt_spec.rb [57:67]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    end

    it 'should respect robots.txt disallow rules for matching User-Agent' do
      results = FauxCrawl.run(site, user_agent: 'Elastic Crawler')

      expect(results).to have_only_these_results [
        mock_response(url: 'http://127.0.0.1:9393/', status_code: 200),
        mock_response(url: 'http://127.0.0.1:9393/hey', status_code: 200),
        mock_response(url: 'http://127.0.0.1:9393/sekret-stuff', status_code: 200)
      ]
    end
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



