output_crawl_result

in lib/crawler/coordinator.rb [540:571]


    def output_crawl_result(crawl_result)
      retries = 0
      begin
        sink.write(crawl_result).tap do |outcome|
          
          unless outcome.is_a?(Hash)
            error = "Expected to return an outcome object from the sink, returned #{outcome.inspect} instead"
            raise ArgumentError, error
          end
        end
      rescue Errors::SinkLockedError
        
        
        retries += 1
        unless retries >= SINK_LOCK_MAX_RETRIES
          interruptible_sleep(SINK_LOCK_RETRY_INTERVAL)
          retry
        end

        log = <<~LOG.squish
          Sink lock couldn't be acquired after #{retries} attempts, so crawl result for URL
          [#{crawl_result.url}] was dropped.
        LOG
        system_logger.warn(log)
        sink.failure(log)
      rescue StandardError => e
        log = "Unexpected exception while sending crawl results to the output sink: #{e}"
        system_logger.fatal(log)
        sink.failure(log)
      end
    end