in lib/crawler/data/url_queue/memory_only.rb [38:63]
def check_queue_size!
current_items = memory_queue.length
if current_items >= memory_size_limit
maybe_threshold_alert(
<<~LOG
In-memory URL queue is full (
New URLs will not be added to it until there is more space available.
This may lead to missing pages in your search index.
LOG
)
raise Crawler::Data::UrlQueue::QueueFullError,
"Too many items in URL queue: #{current_items} >= #{memory_size_limit}"
end
return unless current_items >= warning_threshold(memory_size_limit)
maybe_threshold_alert(
<<~LOG
In-memory URL queue is
If we hit the limit of
the crawler will be forced to start dropping new URLs,
which may lead to missing pages in your search index.
LOG
)
end