Files
redux-scraper/spec/spec_util.rb
Dylan Knutson 572c61cebb add proxies
2025-07-23 04:51:44 +00:00

169 lines
4.3 KiB
Ruby

# typed: strict
require "rspec/mocks"
class SpecUtil
extend T::Sig
include HasColorLogger
extend RSpec::Mocks::ExampleMethods
extend RSpec::Matchers
sig { params(length: T.nilable(Integer)).returns(String) }
def self.random_string(length = 8)
(0...length).map { (65 + rand(26)).chr }.join
end
sig do
params(
expected_method: Symbol,
expected_url: String,
request_headers: T::Hash[String, String],
response_code: Integer,
response_time_ms: Integer,
response_headers: T::Hash[String, String],
response_body: String,
).returns(Scraper::CurlHttpPerformer)
end
def self.mock_http_performer(
expected_method,
expected_url,
request_headers: {},
response_code: 200,
response_time_ms: 15,
response_headers: { "content-type" => "text/plain" },
response_body: "http body"
)
mock = instance_double("Scraper::CurlHttpPerformer")
allow(mock).to receive(:is_a?).with(String).and_return(false)
allow(mock).to receive(:is_a?).with(Scraper::CurlHttpPerformer).and_return(
true,
)
allow(mock).to receive(:do_request).with(
Scraper::CurlHttpPerformer::Request.new(
http_method:
T.must(
Scraper::CurlHttpPerformer::Method.try_deserialize(
expected_method.to_s.upcase,
),
),
uri: Addressable::URI.parse(expected_url),
request_headers: request_headers,
),
).and_return(
Scraper::CurlHttpPerformer::Response.new(
uri: Addressable::URI.parse(expected_url),
response_code: response_code,
response_headers: response_headers,
response_time_ms: response_time_ms,
body: response_body,
performed_by: "direct",
),
)
mock
end
sig { params(path: String, mode: String).returns(String) }
def self.read_fixture_file(path, mode: "rt")
File.read(Rails.root.join("test/fixtures/files", path), mode: mode)
end
sig do
params(job_class: T.nilable(T.class_of(Scraper::JobBase))).returns(
T::Array[T.untyped],
)
end
def self.enqueued_jobs(job_class = nil)
GoodJob::Job
.where(performed_at: nil)
.order(created_at: :asc)
.all
.map do |job|
args =
::ActiveJob::Arguments.deserialize(job.serialized_params["arguments"])
{
job: T.must(job.job_class).constantize,
queue: job.queue_name,
priority: job.priority,
args:,
good_job: job,
}
end
.filter { |job| job_is_class(job_class, job) }
end
sig do
params(
job_class: T.nilable(T.class_of(Scraper::JobBase)),
include_job_id: T::Boolean,
include_caused_by_job: T::Boolean,
).returns(T::Array[T.untyped])
end
def self.enqueued_job_args(
job_class = nil,
include_job_id: false,
include_caused_by_job: false
)
enqueued_jobs(job_class).map do |job|
args = job[:args][0]
args.merge!(job_id: job[:good_job].id) if include_job_id
args.delete(:caused_by_job_id) unless include_caused_by_job
args
end
end
sig do
params(job_class: T.nilable(T.class_of(Scraper::JobBase))).returns(
T::Array[T.untyped],
)
end
def self.clear_enqueued_jobs!(job_class = nil)
rel = GoodJob::Job
rel = rel.where(job_class: job_class.name) if job_class
rel.destroy_all
end
sig do
params(
job_class:
T.nilable(
T.any(
T.class_of(Scraper::JobBase),
T::Array[T.class_of(Scraper::JobBase)],
),
),
job: T.untyped,
).returns(T::Boolean)
end
def self.job_is_class(job_class, job)
if job_class.nil?
true
elsif job_class.is_a? Array
job_class.include? job[:job]
else
job_class == job[:job]
end
end
sig { params(limit: T.nilable(Integer)).void }
def self.perform_jobs(limit: nil)
count = 0
loop do
break if limit && count >= limit
job =
GoodJob::CurrentThread.within do
GoodJob::JobPerformer
.new("*")
.send(:job_query)
.perform_with_advisory_lock(lock_id: "1234") do |execution|
# GoodJob::CurrentThread.job = execution
end
end
break unless job
raise job.handled_error if job.handled_error
count += 1
end
end
end