Files
redux-scraper/app/jobs/domain/e621/job/scan_users_job.rb
2025-01-28 23:50:12 +00:00

49 lines
1.5 KiB
Ruby

# typed: strict
class Domain::E621::Job::ScanUsersJob < Domain::E621::Job::Base
sig { override.params(args: T.untyped).void }
def perform(args)
after = T.let(args[:after_e621_id], T.nilable(String))
breaker = 0
num_seen_users = 0
num_new_users = 0
loop do
breaker += 1
break if breaker > 10
url = "https://e621.net/users.json?limit=320"
url += "&page=b#{after}" if after
response = http_client.get(url)
if response.status_code != 200
fatal_error(
"non 200 response for /users.json: #{response.status_code.to_s.underline}",
)
end
users_json = JSON.parse(response.body)
logger.info "saw #{users_json.size} users"
break if users_json.empty?
num_seen_users += users_json.size
ReduxApplicationRecord.transaction do
users_json.each do |user_json|
user =
Domain::E621::User.find_or_initialize_by(
e621_user_id: user_json["id"],
) { |user| user.name = user_json["name"] }
is_new = user.new_record?
num_new_users += 1 if is_new
user.save!
# defer_job(Domain::E621::Job::ScanUserFavsJob, user: user) if is_new
end
logger.info "#{num_new_users} new users"
end
after = users_json.map { |user_json| user_json["id"] }.min.to_s
end
if num_seen_users > 0
logger.info "scanning more users, after_e621_id: #{after}"
defer_job(Domain::E621::Job::ScanUsersJob, after_e621_id: after)
end
end
end