Skip to content

Commit

Permalink
feat: introduce sidekiq_job_timeout & sidekiq_batch_size
Browse files Browse the repository at this point in the history
  • Loading branch information
ninoseki committed Nov 4, 2024
1 parent 8ba57bf commit 92c2969
Show file tree
Hide file tree
Showing 4 changed files with 23 additions and 10 deletions.
8 changes: 8 additions & 0 deletions lib/miteru/config.rb
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,8 @@ class Config < Anyway::Config
sentry_trace_sample_rate: 0.25,
sidekiq_redis_url: nil,
sidekiq_job_retry: 0,
sidekiq_batch_size: 50,
sidekiq_job_timeout: 600,
cache_redis_url: nil,
cache_ex: nil,
cache_prefix: "miteru:cache",
Expand All @@ -53,6 +55,12 @@ class Config < Anyway::Config
# @!attribute [r] sidekiq_job_retry
# @return [Integer]

# @!attribute [r] sidekiq_batch_size
# @return [Integer]

# @!attribute [r] sidekiq_job_timeout
# @return [Integer]

# @!attribute [r] cache_redis_url
# @return [String, nil]

Expand Down
6 changes: 2 additions & 4 deletions lib/miteru/orchestrator.rb
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,8 @@ def call
end

def sidekiq_call
non_cached_websites.each do |website|
Jobs::CrawleJob.perform_async(website.url, website.source)
logger.info("Website:#{website.truncated_url} crawler job queued.") if verbose?
end
array_of_args = non_cached_websites.map { |website| [website.url, website.source] }
Jobs::CrawleJob.perform_bulk(array_of_args, batch_size: Miteru.config.sidekiq_batch_size)
end

def parallel_call
Expand Down
5 changes: 4 additions & 1 deletion lib/miteru/sidekiq/application.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,10 @@

Sidekiq.configure_server do |config|
config.redis = {url: Miteru.config.sidekiq_redis_url.to_s}
config.default_job_options = {"retry" => Miteru.config.sidekiq_job_retry}
config.default_job_options = {
retry: Miteru.config.sidekiq_job_retry,
expires_in: 0.second
}
end

Sidekiq.configure_client do |config|
Expand Down
14 changes: 9 additions & 5 deletions lib/miteru/sidekiq/jobs.rb
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# frozen_string_literal: true

require "sidekiq"
require "timeout"

module Miteru
module Jobs
Expand All @@ -14,12 +15,15 @@ class CrawleJob
#
def perform(url, source)
website = Miteru::Website.new(url, source:)

with_db_connection do
result = Crawler.result(website)
if result.success?
Miteru.logger.info("Crawler:#{website.truncated_url} succeeded.")
else
Miteru.logger.info("Crawler:#{website.truncated_url} failed - #{result.failure}.")
Timeout.timeout(Miteru.config.sidekiq_job_timeout) do
result = Crawler.result(website)
if result.success?
Miteru.logger.info("Crawler:#{website.truncated_url} succeeded.")
else
Miteru.logger.info("Crawler:#{website.truncated_url} failed - #{result.failure}.")
end
end
end
end
Expand Down

0 comments on commit 92c2969

Please sign in to comment.