Files
2025-03-02 07:45:07 +00:00

92 lines
2.8 KiB
Ruby

# typed: true
Rails.application.configure do
# GoodJob configuration - applies to all environments (including test)
config.good_job.inline_execution_respects_schedule = true
config.good_job.active_record_parent_class = "ReduxApplicationRecord"
config.good_job.retry_on_unhandled_error = true
config.good_job.smaller_number_is_higher_priority = true
# job cleanup config - retain the last 24 hours of jobs
config.good_job.preserve_job_records = true
config.good_job.cleanup_preserved_jobs_before_seconds_ago = 1.day
config.good_job.cleanup_interval_jobs = 100_000
config.good_job.cleanup_interval_seconds = 4.hours
config.good_job.logger = Logger.new(STDOUT)
config.good_job.logger.level = :info
if Rails.env == "worker"
config.good_job.execution_mode = :async
config.good_job.on_thread_error = ->(exception) do
Rails.logger.error("GoodJob exception: #{exception}")
end
else
config.good_job.execution_mode = :external
end
config.good_job.enable_cron = true
config.good_job.cron_graceful_restart_period = 2.minutes
config.good_job.cron = {
inkbunny_latest_posts: { # each recurring job must have a unique key
cron: "*/2 * * * *",
class: "Domain::Inkbunny::Job::LatestPostsJob",
args: [{}],
set: {
queue: "manual",
priority: -20,
},
description: "Inkbunny, enqueue latest posts",
},
fa_browse_page_job: {
cron: "*/1 * * * *",
class: "Domain::Fa::Job::BrowsePageJob",
args: [{}],
set: {
queue: "manual",
priority: -20,
},
description: "FurAffinity, scan browse page",
},
e621_posts_index_job: {
cron: "*/1 * * * *",
class: "Domain::E621::Job::PostsIndexJob",
args: [{}],
set: {
queue: "manual",
priority: -20,
},
description: "e621, index posts",
},
}
end
ActiveSupport.on_load(:good_job_application_controller) do
T.bind(self, T.class_of(ActionController::Base))
content_security_policy do |policy|
policy.font_src :self, :https, :data, "cdnjs.cloudflare.com"
policy.style_src :self, :https, "cdnjs.cloudflare.com"
policy.style_src_elem :self, :https, "cdnjs.cloudflare.com"
end
Scraper::Metrics::GoodJobMetricsWithQueues.start
class GoodJob::JobsController < GoodJob::ApplicationController
helper Domain::PostsHelper
helper Domain::UsersHelper
helper Domain::PostGroupsHelper
helper Domain::DomainModelHelper
helper GoodJobHelper
end
end
ActiveSupport.on_load(:good_job_base_record) do
class GoodJob::Execution < GoodJob::BaseRecord
has_one :log_lines_collection,
class_name: "::GoodJobExecutionLogLinesCollection",
dependent: :destroy,
inverse_of: :good_job_execution
after_create { Scraper::JobBase.last_good_job_execution = self }
end
end