partial migration to tagged logs
This commit is contained in:
1
Gemfile
1
Gemfile
@@ -150,6 +150,7 @@ gem "rack-cors"
|
||||
gem "react_on_rails"
|
||||
gem "sanitize", "~> 6.1"
|
||||
gem "shakapacker", "~> 6.6"
|
||||
gem "timeout"
|
||||
|
||||
group :development do
|
||||
gem "prettier_print"
|
||||
|
||||
@@ -604,6 +604,7 @@ DEPENDENCIES
|
||||
table_print
|
||||
tailwindcss-rails (~> 3.0)
|
||||
tapioca
|
||||
timeout
|
||||
turbo-rails
|
||||
tzinfo-data
|
||||
web-console
|
||||
|
||||
7
TODO.md
7
TODO.md
@@ -6,6 +6,11 @@
|
||||
- [x] Attach logs to jobs, page to view jobs and their logs
|
||||
- [ ] Standardize all the embeddings tables to use the same schema (item_id, embedding)
|
||||
- [ ] Bluesky scraper
|
||||
- [ ] Download favs / votes for E621 users
|
||||
- [x] Download favs / votes for E621 users
|
||||
- [ ] Automatically enqueue jobs for FA users to do incremental scans of profiles
|
||||
- [ ] Fix FA posts that start with "Font size adjustment: smallerlarger"
|
||||
- [ ] Convert logger .prefix=... into .tagged(...)
|
||||
- [ ] `make_tag` should be smart about the objects it takes
|
||||
- [ ] Convert all `state: string` attributes to enums in ActiveRecord models
|
||||
- [ ] Create `belongs_to_log_entry` macro for ActiveRecord models
|
||||
- [ ] Use StaticFileJobHelper for Domain::Fa::Job::ScanFileJob
|
||||
|
||||
@@ -16,4 +16,14 @@ class Domain::E621::Job::Base < Scraper::JobBase
|
||||
def user_from_args
|
||||
T.cast(arguments[0][:user], T.nilable(Domain::User::E621User))
|
||||
end
|
||||
|
||||
sig { returns(Domain::Post::E621Post) }
|
||||
def post_from_args!
|
||||
T.must(post_from_args)
|
||||
end
|
||||
|
||||
sig { returns(T.nilable(Domain::Post::E621Post)) }
|
||||
def post_from_args
|
||||
T.cast(arguments[0][:post], T.nilable(Domain::Post::E621Post))
|
||||
end
|
||||
end
|
||||
|
||||
@@ -4,19 +4,18 @@ class Domain::E621::Job::ScanPostJob < Domain::E621::Job::Base
|
||||
|
||||
sig { override.params(args: T::Hash[Symbol, T.untyped]).returns(T.untyped) }
|
||||
def perform(args)
|
||||
post =
|
||||
T.let(args[:post] || raise("no post provided"), Domain::Post::E621Post)
|
||||
|
||||
logger.prefix =
|
||||
proc { "[e621_id #{post.e621_id.to_s.bold} / #{post.state&.bold}]" }
|
||||
|
||||
post = post_from_args!
|
||||
logger.push_tags(make_arg_tag(post))
|
||||
file = post.file
|
||||
logger.push_tags(make_arg_tag(file)) if file.present?
|
||||
|
||||
if file.present? && file.state == "ok" && file.log_entry_id.present?
|
||||
logger.warn("Post #{post.e621_id} already has a file")
|
||||
logger.warn("post already has file with 'ok' state, skipping")
|
||||
return
|
||||
end
|
||||
|
||||
logger.info("Scanning post #{post.e621_id}")
|
||||
logger.info("scanning post")
|
||||
|
||||
response = http_client.get("https://e621.net/posts/#{post.e621_id}.json")
|
||||
post.scan_log_entry = response.log_entry
|
||||
post.last_submission_log_entry = response.log_entry
|
||||
@@ -30,7 +29,7 @@ class Domain::E621::Job::ScanPostJob < Domain::E621::Job::Base
|
||||
"Error scanning post #{post.e621_id}: #{response.status_code}",
|
||||
)
|
||||
else
|
||||
logger.info("Post #{post.e621_id} scanned successfully")
|
||||
logger.info("scanned post")
|
||||
end
|
||||
|
||||
post_json = JSON.parse(response.body)["post"]
|
||||
@@ -42,9 +41,7 @@ class Domain::E621::Job::ScanPostJob < Domain::E621::Job::Base
|
||||
post.scan_log_entry = response.log_entry
|
||||
post.last_submission_log_entry = response.log_entry
|
||||
ensure
|
||||
if post
|
||||
post.save!
|
||||
file.save! if file
|
||||
end
|
||||
post.save! if post
|
||||
file.save! if file
|
||||
end
|
||||
end
|
||||
|
||||
@@ -2,14 +2,22 @@
|
||||
class Domain::E621::Job::ScanUserFavsJob < Domain::E621::Job::Base
|
||||
MAX_PAGES_BEFORE_BREAK = 2400
|
||||
MAX_PER_PAGE = T.let(Rails.env.test? ? 4 : 320, Integer)
|
||||
include HasMeasureDuration
|
||||
|
||||
sig { override.params(args: T::Hash[Symbol, T.untyped]).returns(T.untyped) }
|
||||
def perform(args)
|
||||
user = user_from_args!
|
||||
if user.scanned_favs_status == "error" && !args[:force]
|
||||
logger.info("[user #{user.e621_id} has error status, skipping]")
|
||||
return
|
||||
logger.push_tags(make_arg_tag(user))
|
||||
logger.info("server indicates #{user.num_other_favs_cached} favs")
|
||||
|
||||
if user.scanned_favs_error?
|
||||
if force_scan?
|
||||
logger.info(
|
||||
"scanned favs status is error, but force scan is true, continuing",
|
||||
)
|
||||
else
|
||||
logger.warn("scanned favs status is error, skipping")
|
||||
return
|
||||
end
|
||||
end
|
||||
|
||||
last_e621_post_id = T.let(nil, T.nilable(Integer))
|
||||
@@ -17,30 +25,16 @@ class Domain::E621::Job::ScanUserFavsJob < Domain::E621::Job::Base
|
||||
post_ids = T.let([], T::Array[Integer])
|
||||
total_new_posts = 0
|
||||
|
||||
prefix = [
|
||||
"[e621 user id: #{user.e621_id&.to_s&.bold}]",
|
||||
"[username: #{user.name&.bold}]",
|
||||
].join(" ")
|
||||
|
||||
logger.info("#{prefix} [cached favs: #{user.num_other_favs_cached}]")
|
||||
|
||||
loop do
|
||||
breaker += 1
|
||||
if breaker > MAX_PAGES_BEFORE_BREAK
|
||||
logger.warn(
|
||||
"#{prefix} [breaker is too big] [last e621 post id: #{last_e621_post_id}]",
|
||||
)
|
||||
logger.error("breaker is too big (#{breaker})")
|
||||
break
|
||||
end
|
||||
|
||||
url =
|
||||
"https://e621.net/posts.json?tags=status:any+fav:#{user.url_name}+order:id_desc&limit=#{MAX_PER_PAGE}"
|
||||
if last_e621_post_id
|
||||
limiter = "before #{last_e621_post_id.to_s.bold}"
|
||||
url += "&page=b#{last_e621_post_id.to_s}"
|
||||
else
|
||||
limiter = "(none)"
|
||||
end
|
||||
url += "&page=b#{last_e621_post_id.to_s}" if last_e621_post_id
|
||||
response = http_client.get(url)
|
||||
|
||||
if response.status_code == 403 &&
|
||||
@@ -63,9 +57,7 @@ class Domain::E621::Job::ScanUserFavsJob < Domain::E621::Job::Base
|
||||
T::Array[T::Hash[String, T.untyped]],
|
||||
)
|
||||
if posts_json.empty?
|
||||
logger.info(
|
||||
"#{prefix} [limiter: #{limiter}] [req: #{breaker}] [no posts found] ",
|
||||
)
|
||||
logger.info("no posts found on page #{breaker}")
|
||||
break
|
||||
end
|
||||
|
||||
@@ -76,55 +68,49 @@ class Domain::E621::Job::ScanUserFavsJob < Domain::E621::Job::Base
|
||||
end
|
||||
.to_h
|
||||
|
||||
measure(
|
||||
"#{prefix} [finding favs: #{posts_json.size}] [req: #{breaker}]",
|
||||
) do
|
||||
e621_id_to_post_id = T.let({}, T::Hash[Integer, Integer])
|
||||
e621_post_id_to_post_json
|
||||
.keys
|
||||
.each_slice(1000) do |e621_post_id_slice|
|
||||
e621_id_to_post_id.merge!(
|
||||
Domain::E621::Post
|
||||
.where(e621_id: e621_post_id_slice)
|
||||
.pluck(:e621_id, :id)
|
||||
.to_h,
|
||||
)
|
||||
end
|
||||
missing_e621_ids =
|
||||
e621_post_id_to_post_json.keys - e621_id_to_post_id.keys
|
||||
logger.info "found #{posts_json.size} favs on page #{breaker}"
|
||||
e621_id_to_post_id = T.let({}, T::Hash[Integer, Integer])
|
||||
e621_post_id_to_post_json
|
||||
.keys
|
||||
.each_slice(1000) do |e621_post_id_slice|
|
||||
e621_id_to_post_id.merge!(
|
||||
Domain::E621::Post
|
||||
.where(e621_id: e621_post_id_slice)
|
||||
.pluck(:e621_id, :id)
|
||||
.to_h,
|
||||
)
|
||||
end
|
||||
missing_e621_ids =
|
||||
e621_post_id_to_post_json.keys - e621_id_to_post_id.keys
|
||||
|
||||
if missing_e621_ids.any?
|
||||
measure("#{prefix} [creating posts: #{missing_e621_ids.size}]") do
|
||||
missing_e621_ids.each do |e621_post_id|
|
||||
post_json = T.must(e621_post_id_to_post_json[e621_post_id])
|
||||
post =
|
||||
Domain::E621::TagUtil.initialize_or_update_post(
|
||||
post_json: post_json,
|
||||
caused_by_entry: causing_log_entry,
|
||||
)
|
||||
was_new = post.new_record?
|
||||
post.last_index_page ||= response.log_entry
|
||||
post.save!
|
||||
e621_id_to_post_id[e621_post_id] = T.must(post.id)
|
||||
if was_new
|
||||
logger.info(
|
||||
"#{prefix} [created post: e621 id #{post.e621_id} / id #{post.id}]",
|
||||
)
|
||||
total_new_posts += 1
|
||||
end
|
||||
end
|
||||
if missing_e621_ids.any?
|
||||
logger.info "creating #{missing_e621_ids.size} posts"
|
||||
missing_e621_ids.each do |e621_post_id|
|
||||
post_json = T.must(e621_post_id_to_post_json[e621_post_id])
|
||||
post =
|
||||
Domain::E621::TagUtil.initialize_or_update_post(
|
||||
post_json: post_json,
|
||||
caused_by_entry: causing_log_entry,
|
||||
)
|
||||
was_new = post.new_record?
|
||||
post.set_index_page_entry(response.log_entry)
|
||||
post.save!
|
||||
e621_id_to_post_id[e621_post_id] = T.must(post.id)
|
||||
if was_new
|
||||
logger.info("created post #{make_arg_tag(post).join(" ")}")
|
||||
total_new_posts += 1
|
||||
end
|
||||
end
|
||||
|
||||
post_ids.concat(e621_id_to_post_id.values)
|
||||
logger.info(
|
||||
"#{prefix} [req: #{breaker}] [total posts: #{post_ids.size}] [total created: #{total_new_posts}]",
|
||||
)
|
||||
end
|
||||
|
||||
post_ids.concat(e621_id_to_post_id.values)
|
||||
logger.info(
|
||||
"[total posts: #{post_ids.size}] [total created: #{total_new_posts}]",
|
||||
)
|
||||
|
||||
if posts_json.size < MAX_PER_PAGE
|
||||
logger.info(
|
||||
"#{prefix} [fewer than limit; breaking] [limiter: #{limiter}] [req: #{breaker}]",
|
||||
"number of posts #{posts_json.size} < MAX_PER_PAGE (#{MAX_PER_PAGE}), breaking",
|
||||
)
|
||||
break
|
||||
end
|
||||
@@ -132,37 +118,35 @@ class Domain::E621::Job::ScanUserFavsJob < Domain::E621::Job::Base
|
||||
last_e621_post_id = T.cast(T.must(posts_json.last)["id"].to_i, Integer)
|
||||
end
|
||||
|
||||
measure("#{prefix} [upserting favs: #{post_ids.size}]") do
|
||||
post_ids.each_slice(1000) do |slice|
|
||||
ReduxApplicationRecord.transaction do
|
||||
Domain::UserPostFav.upsert_all(
|
||||
slice.map { |post_id| { user_id: user.id, post_id: post_id } },
|
||||
unique_by: :index_domain_user_post_favs_on_user_id_and_post_id,
|
||||
)
|
||||
end
|
||||
logger.info "upserting #{post_ids.size} favs"
|
||||
post_ids.each_slice(1000) do |slice|
|
||||
ReduxApplicationRecord.transaction do
|
||||
Domain::UserPostFav.upsert_all(
|
||||
slice.map { |post_id| { user_id: user.id, post_id: post_id } },
|
||||
unique_by: :index_domain_user_post_favs_on_user_id_and_post_id,
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
logger.info(
|
||||
"#{prefix} " +
|
||||
[
|
||||
"[favs scanned: #{post_ids.size.to_s.bold}]",
|
||||
"[posts created: #{total_new_posts.to_s.bold}]",
|
||||
"[total requests: #{breaker}]",
|
||||
"[done]",
|
||||
].join(" "),
|
||||
[
|
||||
"[favs scanned: #{post_ids.size.to_s.bold}]",
|
||||
"[posts created: #{total_new_posts.to_s.bold}]",
|
||||
"[total requests: #{breaker}]",
|
||||
"done",
|
||||
].join(" "),
|
||||
)
|
||||
|
||||
user.scanned_favs_status = "ok"
|
||||
user.scanned_favs_ok!
|
||||
user.scanned_favs_at = DateTime.current
|
||||
user.save!
|
||||
rescue StandardError
|
||||
logger.error("error scanning user favs: #{user&.e621_id}")
|
||||
user = user_from_args
|
||||
if user
|
||||
user.scanned_favs_status = "error"
|
||||
user.save!
|
||||
end
|
||||
user.scanned_favs_error! if user
|
||||
raise
|
||||
ensure
|
||||
user.save! if user
|
||||
logger.pop_tags
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
# typed: strict
|
||||
class Domain::E621::Job::StaticFileJob < Domain::E621::Job::Base
|
||||
include Domain::StaticFileJobHelper
|
||||
queue_as :static_file
|
||||
|
||||
sig { override.params(args: T::Hash[Symbol, T.untyped]).returns(T.untyped) }
|
||||
def perform(args)
|
||||
post_file =
|
||||
if post_file = args[:post_file]
|
||||
T.cast(post_file, Domain::PostFile)
|
||||
file =
|
||||
if file = (args[:file] || args[:post_file])
|
||||
T.cast(file, Domain::PostFile)
|
||||
elsif (post = args[:post]) && post.is_a?(Domain::Post::E621Post)
|
||||
T.must(post.file)
|
||||
elsif (post = args[:post]) && post.is_a?(Domain::E621::Post)
|
||||
@@ -15,50 +16,12 @@ class Domain::E621::Job::StaticFileJob < Domain::E621::Job::Base
|
||||
raise("post with e621_id #{post.e621_id} not found")
|
||||
T.must(post.file)
|
||||
else
|
||||
fatal_error("post_file or post is required")
|
||||
fatal_error(":file or :post is required")
|
||||
end
|
||||
|
||||
post = T.cast(post_file.post, Domain::Post::E621Post)
|
||||
logger.prefix = proc { "[e621_id #{post.e621_id.to_s.bold}]" }
|
||||
|
||||
if post_file.state == "terminal_error"
|
||||
logger.error("post file is in a terminal error state, skipping")
|
||||
return
|
||||
end
|
||||
|
||||
if post_file.state == "ok" && post_file.log_entry_id.present?
|
||||
logger.warn("post file has already been downloaded, skipping")
|
||||
return
|
||||
end
|
||||
|
||||
file_url_str = post_file.url_str
|
||||
if file_url_str.blank?
|
||||
logger.warn("post file has no url, skipping")
|
||||
return
|
||||
end
|
||||
|
||||
response = http_client.get(file_url_str)
|
||||
post_file.log_entry = response.log_entry
|
||||
post_file.last_status_code = response.status_code
|
||||
|
||||
if response.status_code == 200
|
||||
post_file.state = "ok"
|
||||
logger.info "downloaded file"
|
||||
elsif response.status_code == 404
|
||||
post_file.state = "terminal_error"
|
||||
post_file.retry_count += 1
|
||||
logger.error("#{response.status_code}, not retrying download")
|
||||
else
|
||||
post_file.retry_count += 1
|
||||
if post_file.retry_count >= 3
|
||||
post_file.state = "terminal_error"
|
||||
logger.error("file has been retried 3 times, giving up")
|
||||
else
|
||||
post_file.state = "retryable_error"
|
||||
fatal_error("#{response.status_code}, will retry later")
|
||||
end
|
||||
end
|
||||
ensure
|
||||
post_file.save! if post_file
|
||||
logger.tagged(
|
||||
make_tag("post_id", file.post&.id),
|
||||
make_tag("state", file.state),
|
||||
) { download_post_file(file) }
|
||||
end
|
||||
end
|
||||
|
||||
@@ -15,11 +15,6 @@ class Domain::Fa::Job::Base < Scraper::JobBase
|
||||
|
||||
protected
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def force_scan?
|
||||
!!arguments[0][:force_scan]
|
||||
end
|
||||
|
||||
sig { params(build_post: T::Boolean).returns(Domain::Post::FaPost) }
|
||||
def post_from_args!(build_post: false)
|
||||
args = arguments[0]
|
||||
|
||||
@@ -22,12 +22,13 @@ class Domain::Fa::Job::FavsJob < Domain::Fa::Job::Base
|
||||
full_scan = !!args[:full_scan]
|
||||
@use_http_cache = !!args[:use_http_cache]
|
||||
|
||||
logger.prefix = "[#{user.url_name&.bold} / #{user.state&.bold}]"
|
||||
logger.push_tags(make_arg_tag(user))
|
||||
return unless user_due_for_favs_scan?(user)
|
||||
|
||||
max_page_number =
|
||||
T.let([((user.num_favorites || 0) + 1) / 48, 100].max, Integer)
|
||||
logger.info "[max page number] [#{max_page_number.to_s.bold}]"
|
||||
logger.info make_tag("user.num_favorites", user.num_favorites)
|
||||
logger.info make_tag("max favs page number", max_page_number)
|
||||
|
||||
existing_faved_ids =
|
||||
T.let(Set.new(user.user_post_favs.pluck(:post_id)), T::Set[Integer])
|
||||
@@ -45,7 +46,10 @@ class Domain::Fa::Job::FavsJob < Domain::Fa::Job::Base
|
||||
user.scanned_favs_at = Time.zone.now
|
||||
|
||||
to_add += @seen_post_ids - existing_faved_ids
|
||||
logger.info "[partial scan] [add #{to_add.size.to_s.bold}]"
|
||||
logger.info format_tags(
|
||||
"partial scan",
|
||||
make_tag("add posts", to_add.size),
|
||||
)
|
||||
ReduxApplicationRecord.transaction do
|
||||
to_add.each_slice(1000) do |slice|
|
||||
Domain::UserPostFav.upsert_all(
|
||||
@@ -55,7 +59,10 @@ class Domain::Fa::Job::FavsJob < Domain::Fa::Job::Base
|
||||
end
|
||||
user.save!
|
||||
end
|
||||
logger.info "[reached end of unobserved favs] [stopping scan]"
|
||||
logger.info format_tags(
|
||||
"reached end of unobserved favs",
|
||||
"stopping scan",
|
||||
)
|
||||
return
|
||||
end
|
||||
end
|
||||
@@ -65,7 +72,10 @@ class Domain::Fa::Job::FavsJob < Domain::Fa::Job::Base
|
||||
end
|
||||
|
||||
to_add = @seen_post_ids - existing_faved_ids
|
||||
logger.info "[calc change favs] [add #{to_add.size.to_s.bold}]"
|
||||
logger.info format_tags(
|
||||
"calc change favs",
|
||||
make_tag("add posts", to_add.size),
|
||||
)
|
||||
|
||||
ReduxApplicationRecord.transaction do
|
||||
if to_add.any?
|
||||
@@ -80,7 +90,10 @@ class Domain::Fa::Job::FavsJob < Domain::Fa::Job::Base
|
||||
user.scanned_favs_at = Time.zone.now
|
||||
user.save!
|
||||
end
|
||||
logger.info "[updated favs list] [posts: #{user.user_post_favs.count.to_s.bold}]"
|
||||
logger.info format_tags(
|
||||
"updated favs list",
|
||||
make_tag("add posts", user.user_post_favs.count),
|
||||
)
|
||||
ensure
|
||||
user.save! if user
|
||||
end
|
||||
@@ -109,7 +122,7 @@ class Domain::Fa::Job::FavsJob < Domain::Fa::Job::Base
|
||||
user,
|
||||
response,
|
||||
)
|
||||
logger.error("account disabled / not found, abort")
|
||||
logger.error(format_tags("account disabled / not found", "aborting"))
|
||||
return :stop
|
||||
end
|
||||
|
||||
@@ -155,11 +168,11 @@ class Domain::Fa::Job::FavsJob < Domain::Fa::Job::Base
|
||||
@last_page_post_ids.add(id)
|
||||
end
|
||||
|
||||
logger.info [
|
||||
"[page #{@page_number.to_s.bold}]",
|
||||
"[posts: #{submissions.length.to_s.bold}]",
|
||||
"[created: #{posts_to_create_hashes.size.to_s.bold}]",
|
||||
].join(" ")
|
||||
logger.info format_tags(
|
||||
make_tag("page", @page_number),
|
||||
make_tag("posts", submissions.length),
|
||||
make_tag("created", posts_to_create_hashes.size),
|
||||
)
|
||||
|
||||
ret
|
||||
end
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
# typed: strict
|
||||
module Domain::Inkbunny::Job
|
||||
class FileJob < Base
|
||||
queue_as :static_file
|
||||
|
||||
sig { override.params(args: T::Hash[Symbol, T.untyped]).returns(T.untyped) }
|
||||
def perform(args)
|
||||
file = args[:file] || fatal_error("file is required")
|
||||
logger.prefix =
|
||||
proc do
|
||||
"[#{file.id.to_s.bold} / " + "#{file.ib_file_id.to_s.bold} / " +
|
||||
"#{file.state.to_s.bold}] "
|
||||
end
|
||||
|
||||
url_str = file.url_str
|
||||
if file.state == "error"
|
||||
retry_count = file.state_detail&.[]("error")&.[]("retry_count") || 0
|
||||
if retry_count >= 3
|
||||
logger.error("file has been retried 3 times, giving up")
|
||||
return
|
||||
end
|
||||
end
|
||||
|
||||
response = http_client.get(url_str)
|
||||
|
||||
if response.status_code != 200
|
||||
file.state = :error
|
||||
fe = (file.state_detail["error"] ||= {})
|
||||
fe["status_code"] = response.status_code
|
||||
fe["log_entry_id"] = response.log_entry.id
|
||||
fe["retry_count"] ||= 0
|
||||
fe["retry_count"] += 1
|
||||
file.save!
|
||||
|
||||
if response.status_code == 404
|
||||
logger.error("#{response.status_code}, not retrying download")
|
||||
else
|
||||
fatal_error("#{response.status_code}, will retry later")
|
||||
end
|
||||
|
||||
return
|
||||
end
|
||||
|
||||
file.state = :ok
|
||||
file.log_entry = response.log_entry
|
||||
file.blob_entry = response.log_entry.response
|
||||
file.state_detail.delete("error")
|
||||
file.save!
|
||||
logger.info "downloaded file"
|
||||
end
|
||||
end
|
||||
end
|
||||
19
app/jobs/domain/inkbunny/job/static_file_job.rb
Normal file
19
app/jobs/domain/inkbunny/job/static_file_job.rb
Normal file
@@ -0,0 +1,19 @@
|
||||
# typed: strict
|
||||
module Domain::Inkbunny::Job
|
||||
class StaticFileJob < Base
|
||||
include Domain::StaticFileJobHelper
|
||||
queue_as :static_file
|
||||
|
||||
sig { override.params(args: T::Hash[Symbol, T.untyped]).returns(T.untyped) }
|
||||
def perform(args)
|
||||
file = T.cast(args[:file], Domain::PostFile::InkbunnyPostFile)
|
||||
|
||||
logger.tagged(
|
||||
make_tag("file_class", file.class.name),
|
||||
make_tag("id", file.id),
|
||||
make_tag("ib_file_id", file.ib_id),
|
||||
make_tag("state", file.state),
|
||||
) { download_post_file(file) }
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -3,8 +3,9 @@ module Domain::Inkbunny::Job
|
||||
class UpdatePoolJob < Base
|
||||
sig { override.params(args: T::Hash[Symbol, T.untyped]).returns(T.untyped) }
|
||||
def perform(args)
|
||||
pool = T.let(args[:pool], Domain::Inkbunny::Pool)
|
||||
logger.prefix = "[pool #{pool.ib_pool_id.to_s.bold}]"
|
||||
pool = T.let(args[:pool], Domain::PostGroup::InkbunnyPool)
|
||||
logger.prefix = "[pool #{pool.id.to_s.bold}]"
|
||||
logger.push_tags(make_tag("pool", pool.id))
|
||||
|
||||
if pool.deep_update_log_entry.present?
|
||||
logger.info("skipping, already deep updated")
|
||||
@@ -24,7 +25,7 @@ module Domain::Inkbunny::Job
|
||||
|
||||
url =
|
||||
ApiSearchPageProcessor.build_api_search_url(
|
||||
pool_id: pool.ib_pool_id,
|
||||
pool_id: pool.ib_id,
|
||||
rid: rid,
|
||||
page: page,
|
||||
)
|
||||
@@ -66,6 +67,8 @@ module Domain::Inkbunny::Job
|
||||
{ ib_post_ids: posts_to_update.map(&:ib_id) },
|
||||
)
|
||||
end
|
||||
ensure
|
||||
logger.pop_tags
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -5,7 +5,7 @@ class Domain::Inkbunny::Job::UpdatePostsJob < Domain::Inkbunny::Job::Base
|
||||
ib_post_ids = args[:ib_post_ids]
|
||||
|
||||
missing_pool_post_ib_ids = T::Set[Integer].new
|
||||
pools_to_update = T::Set[Domain::Inkbunny::Pool].new
|
||||
pools_to_update = T::Set[Domain::PostGroup::InkbunnyPool].new
|
||||
|
||||
if ib_post_ids.empty?
|
||||
logger.info "empty ib_post_ids"
|
||||
@@ -49,7 +49,7 @@ class Domain::Inkbunny::Job::UpdatePostsJob < Domain::Inkbunny::Job::Base
|
||||
params(
|
||||
ib_post_ids_chunk: T::Array[Integer],
|
||||
missing_pool_post_ib_ids: T::Set[Integer],
|
||||
pools_to_update: T::Set[Domain::Inkbunny::Pool],
|
||||
pools_to_update: T::Set[Domain::PostGroup::InkbunnyPool],
|
||||
).void
|
||||
end
|
||||
def process_ib_post_ids(
|
||||
@@ -67,7 +67,7 @@ class Domain::Inkbunny::Job::UpdatePostsJob < Domain::Inkbunny::Job::Base
|
||||
logger.info("api_submissions page has #{submission_jsons.size} posts")
|
||||
|
||||
submission_jsons.each do |submission_json|
|
||||
Domain::Inkbunny::Post.transaction do
|
||||
Domain::Post::InkbunnyPost.transaction do
|
||||
deep_update_post_from_submission_json(
|
||||
submission_json,
|
||||
response.log_entry,
|
||||
@@ -85,7 +85,7 @@ class Domain::Inkbunny::Job::UpdatePostsJob < Domain::Inkbunny::Job::Base
|
||||
submission_json: T::Hash[String, T.untyped],
|
||||
log_entry: HttpLogEntry,
|
||||
missing_pool_post_ib_ids: T::Set[Integer],
|
||||
pools_to_update: T::Set[Domain::Inkbunny::Pool],
|
||||
pools_to_update: T::Set[Domain::PostGroup::InkbunnyPool],
|
||||
).void
|
||||
end
|
||||
def deep_update_post_from_submission_json(
|
||||
@@ -94,14 +94,14 @@ class Domain::Inkbunny::Job::UpdatePostsJob < Domain::Inkbunny::Job::Base
|
||||
missing_pool_post_ib_ids,
|
||||
pools_to_update
|
||||
)
|
||||
logger.prefix = "ib_post_id #{submission_json["submission_id"].to_s.bold}"
|
||||
logger.prefix = "ib_id #{submission_json["submission_id"].to_s.bold}"
|
||||
logger.info "update post #{submission_json["submission_id"].to_s.bold}"
|
||||
|
||||
post =
|
||||
Domain::Inkbunny::Post.includes(:pools).find_by!(
|
||||
ib_post_id: submission_json["submission_id"],
|
||||
Domain::Post::InkbunnyPost.includes(:pools).find_by!(
|
||||
ib_id: submission_json["submission_id"],
|
||||
)
|
||||
logger.info "deep update post #{post.ib_post_id.to_s.bold}"
|
||||
logger.info "deep update post #{post.ib_id.to_s.bold}"
|
||||
post.deep_updated_at = Time.zone.now
|
||||
post.description = submission_json["description"]
|
||||
post.writing = submission_json["writing"]
|
||||
@@ -128,73 +128,86 @@ class Domain::Inkbunny::Job::UpdatePostsJob < Domain::Inkbunny::Job::Base
|
||||
|
||||
if submission_json["user_icon_url_large"]
|
||||
user = T.must(post.creator)
|
||||
user.avatar_url_str = submission_json["user_icon_url_large"]
|
||||
if user.avatar_url_str_changed?
|
||||
avatar = user.avatar
|
||||
avatar_url_str = submission_json["user_icon_url_large"]
|
||||
if !avatar || avatar.url_str != avatar_url_str
|
||||
avatar = user.build_avatar
|
||||
avatar.url_str = avatar_url_str
|
||||
user.deep_update_log_entry = log_entry
|
||||
logger.info "avatar url changed, enqueuing download for user #{user.name}"
|
||||
defer_job(Domain::Inkbunny::Job::UserAvatarJob, { user: user })
|
||||
logger.info "avatar url changed, enqueuing download for avatar #{user.name}"
|
||||
avatar.enqueue_job_after_save(
|
||||
Domain::Inkbunny::Job::UserAvatarJob,
|
||||
{ avatar: avatar, caused_by_entry: log_entry },
|
||||
)
|
||||
end
|
||||
user.save!
|
||||
end
|
||||
|
||||
post_files_by_md5 = post.files.index_by(&:md5_initial)
|
||||
post_files_by_ib_id = post.files.index_by(&:ib_id)
|
||||
file_jsons = submission_json["files"] || fatal_error("no files[] array")
|
||||
post.save!
|
||||
|
||||
file_jsons.each do |file_json|
|
||||
file_jsons.each_with_index do |file_json, index|
|
||||
ib_file_id = file_json["file_id"]&.to_i
|
||||
if ib_file_id.blank?
|
||||
logger.error "files[#{index}] for post #{post.ib_id.to_s.bold} has no ib_id, skipping"
|
||||
next
|
||||
end
|
||||
next if post_files_by_ib_id[ib_file_id]
|
||||
md5_initial = file_json["initial_file_md5"]
|
||||
next if post_files_by_md5[md5_initial]
|
||||
|
||||
# We create all files, even those with null MD5 sums (which also do not have
|
||||
# a valid download URL), so that post.files.count will be accurate and match
|
||||
# pagecount.
|
||||
file =
|
||||
post.files.create(
|
||||
{
|
||||
state: md5_initial.present? ? "ok" : "error",
|
||||
ib_file_id: file_json["file_id"]&.to_i,
|
||||
ib_created_at: Time.parse(file_json["create_datetime"]),
|
||||
file_order: file_json["submission_file_order"]&.to_i,
|
||||
ib_detail_raw: file_json,
|
||||
file_name: file_json["file_name"],
|
||||
url_str: file_json["file_url_full"],
|
||||
md5_initial: md5_initial,
|
||||
md5_full: file_json["full_file_md5"],
|
||||
md5s: {
|
||||
initial_file_md5: md5_initial,
|
||||
full_file_md5: file_json["full_file_md5"],
|
||||
large_file_md5: file_json["large_file_md5"],
|
||||
small_file_md5: file_json["small_file_md5"],
|
||||
thumbnail_md5: file_json["thumbnail_md5"],
|
||||
},
|
||||
},
|
||||
)
|
||||
post.files.create do |file|
|
||||
md5_initial.present? ? file.state_ok! : file.state_terminal_error!
|
||||
file.ib_id = ib_file_id
|
||||
file.ib_created_at = Time.zone.parse(file_json["create_datetime"])
|
||||
file.file_order = file_json["submission_file_order"]&.to_i
|
||||
file.ib_detail_raw = file_json
|
||||
file.file_name = file_json["file_name"]
|
||||
file.url_str = file_json["file_url_full"]
|
||||
file.md5_initial = md5_initial
|
||||
file.md5_full = file_json["full_file_md5"]
|
||||
file.md5s = {
|
||||
initial_file_md5: md5_initial,
|
||||
full_file_md5: file_json["full_file_md5"],
|
||||
large_file_md5: file_json["large_file_md5"],
|
||||
small_file_md5: file_json["small_file_md5"],
|
||||
thumbnail_md5: file_json["thumbnail_md5"],
|
||||
}
|
||||
end
|
||||
|
||||
if file.state == "error"
|
||||
logger.error "file #{file.ib_file_id.to_s.bold} is poorly formed, skipping enqueue"
|
||||
if file.state_terminal_error?
|
||||
logger.error "file #{file.ib_id.to_s.bold} is poorly formed, skipping enqueue"
|
||||
next
|
||||
end
|
||||
|
||||
if file.invalid?
|
||||
logger.error "file #{file.ib_file_id.to_s.bold} (ib_post_id #{post.ib_post_id.to_s.bold}) is invalid: #{file.errors.full_messages.join(", ")}"
|
||||
logger.error "file #{file.ib_id.to_s.bold} (ib_id #{post.ib_id.to_s.bold}) is invalid: #{file.errors.full_messages.join(", ")}"
|
||||
fatal_error(
|
||||
"file #{file.ib_file_id.to_s.bold} is invalid: #{file.errors.full_messages.join(", ")}",
|
||||
"file #{file.ib_id.to_s.bold} is invalid: #{file.errors.full_messages.join(", ")}",
|
||||
)
|
||||
end
|
||||
|
||||
logger.info "[ib_post_id #{post.ib_post_id.to_s.bold}] " +
|
||||
"new file #{file.ib_file_id.to_s.bold} - #{file.file_name&.black&.bold}"
|
||||
logger.info "[post ib_id #{post.ib_id.to_s.bold}] " +
|
||||
"new file #{file.ib_id.to_s.bold} - #{file.file_name&.black&.bold}"
|
||||
|
||||
defer_job(Domain::Inkbunny::Job::FileJob, { file: file }, { priority: 1 })
|
||||
defer_job(
|
||||
Domain::Inkbunny::Job::StaticFileJob,
|
||||
{ file: file },
|
||||
{ priority: 1 },
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
sig do
|
||||
params(
|
||||
post: Domain::Inkbunny::Post,
|
||||
post: Domain::Post::InkbunnyPost,
|
||||
pools_json: T::Array[T::Hash[String, T.untyped]],
|
||||
missing_pool_post_ib_ids: T::Set[Integer],
|
||||
pools_to_update: T::Set[Domain::Inkbunny::Pool],
|
||||
pools_to_update: T::Set[Domain::PostGroup::InkbunnyPool],
|
||||
).void
|
||||
end
|
||||
def update_submission_pools(
|
||||
@@ -209,25 +222,25 @@ class Domain::Inkbunny::Job::UpdatePostsJob < Domain::Inkbunny::Job::Base
|
||||
submission_left_submission_id
|
||||
submission_right_submission_id
|
||||
].map do |key|
|
||||
ib_post_id = pool_json[key]&.to_i
|
||||
next nil if ib_post_id.blank?
|
||||
ib_id = pool_json[key]&.to_i
|
||||
next nil if ib_id.blank?
|
||||
p =
|
||||
Domain::Inkbunny::Post.find_or_initialize_by(
|
||||
ib_post_id: ib_post_id,
|
||||
Domain::Post::InkbunnyPost.find_or_initialize_by(
|
||||
ib_id: ib_id,
|
||||
) do |p|
|
||||
p.creator = post.creator
|
||||
p.state_detail = { "created_from" => "pool_mention" }
|
||||
# p.state_detail = { "created_from" => "pool_mention" }
|
||||
end
|
||||
if p.new_record?
|
||||
missing_pool_post_ib_ids.add(ib_post_id)
|
||||
missing_pool_post_ib_ids.add(ib_id)
|
||||
p.save!
|
||||
end
|
||||
p
|
||||
end
|
||||
|
||||
pool =
|
||||
Domain::Inkbunny::Pool.find_or_initialize_by(
|
||||
ib_pool_id: pool_json["pool_id"],
|
||||
Domain::PostGroup::InkbunnyPool.find_or_initialize_by(
|
||||
ib_id: pool_json["pool_id"],
|
||||
)
|
||||
pools_to_update.add(pool) if pool.deep_update_log_entry_id.blank?
|
||||
pool.count = pool_json["count"]&.to_i
|
||||
@@ -235,7 +248,7 @@ class Domain::Inkbunny::Job::UpdatePostsJob < Domain::Inkbunny::Job::Base
|
||||
pool.description = pool_json["description"]
|
||||
pool.save!
|
||||
|
||||
pool_join = post.pool_joins.find_or_initialize_by(pool: pool)
|
||||
pool_join = post.post_group_joins.find_or_initialize_by(group: pool)
|
||||
pool_join.left_post = left_post
|
||||
pool_join.right_post = right_post
|
||||
pool_join.save!
|
||||
|
||||
@@ -19,7 +19,6 @@ module Domain::Inkbunny::Job
|
||||
|
||||
response = http_client.get(url_str)
|
||||
self.first_log_entry ||= response.log_entry
|
||||
|
||||
avatar.last_log_entry = response.log_entry
|
||||
|
||||
case response.status_code
|
||||
|
||||
@@ -4,7 +4,10 @@ module Domain::Inkbunny::Job
|
||||
sig { override.params(args: T::Hash[Symbol, T.untyped]).returns(T.untyped) }
|
||||
def perform(args)
|
||||
user = user_from_args!
|
||||
logger.prefix = "[#{user.name&.bold} / #{user.ib_id.to_s.bold}]"
|
||||
logger.push_tags(
|
||||
make_tag("id", user.id),
|
||||
make_tag("user_ib_id", user.ib_id),
|
||||
)
|
||||
|
||||
if user.scanned_gallery_at&.after?(1.week.ago)
|
||||
logger.warn(
|
||||
@@ -70,6 +73,8 @@ module Domain::Inkbunny::Job
|
||||
{ ib_post_ids: processor.changed_posts.map(&:ib_id) },
|
||||
)
|
||||
end
|
||||
ensure
|
||||
logger.pop_tags
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
55
app/jobs/domain/static_file_job_helper.rb
Normal file
55
app/jobs/domain/static_file_job_helper.rb
Normal file
@@ -0,0 +1,55 @@
|
||||
# typed: strict
|
||||
module Domain::StaticFileJobHelper
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
include HasColorLogger
|
||||
abstract!
|
||||
requires_ancestor { Scraper::JobBase }
|
||||
|
||||
MAX_RETRIES = 3
|
||||
|
||||
sig { params(post_file: Domain::PostFile).void }
|
||||
def download_post_file(post_file)
|
||||
if post_file.state_terminal_error?
|
||||
logger.error("post file is in a terminal error state, skipping")
|
||||
return
|
||||
end
|
||||
|
||||
if post_file.state_ok? && post_file.log_entry_id.present?
|
||||
logger.warn("post file has already been downloaded, skipping")
|
||||
return
|
||||
end
|
||||
|
||||
file_url_str = post_file.url_str
|
||||
if file_url_str.blank?
|
||||
logger.warn("post file has no url, skipping")
|
||||
return
|
||||
end
|
||||
|
||||
response = http_client.get(file_url_str)
|
||||
post_file.log_entry = response.log_entry
|
||||
post_file.last_status_code = response.status_code
|
||||
|
||||
if response.status_code == 200
|
||||
post_file.state_ok!
|
||||
logger.info("downloaded file")
|
||||
elsif response.status_code == 404
|
||||
post_file.state_terminal_error!
|
||||
post_file.retry_count += 1
|
||||
logger.error("#{response.status_code}, not retrying download")
|
||||
else
|
||||
post_file.retry_count += 1
|
||||
if post_file.retry_count > MAX_RETRIES
|
||||
post_file.state_terminal_error!
|
||||
logger.error(
|
||||
"file has been retried #{post_file.retry_count} times, giving up",
|
||||
)
|
||||
else
|
||||
post_file.state_retryable_error!
|
||||
fatal_error("#{response.status_code}, will retry later")
|
||||
end
|
||||
end
|
||||
ensure
|
||||
post_file.save! if post_file
|
||||
end
|
||||
end
|
||||
@@ -94,6 +94,11 @@ class Scraper::JobBase < ApplicationJob
|
||||
def perform(args)
|
||||
end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def force_scan?
|
||||
!!arguments[0][:force_scan]
|
||||
end
|
||||
|
||||
# The log entry that caused this job to be enqueued.
|
||||
sig { returns(T.nilable(HttpLogEntry)) }
|
||||
def caused_by_entry
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# typed: strict
|
||||
class ColorLogger < Logger
|
||||
extend T::Sig
|
||||
include ActiveSupport::TaggedLogging
|
||||
|
||||
@quiet =
|
||||
T.let(Concurrent::ThreadLocalVar.new { 0 }, Concurrent::ThreadLocalVar)
|
||||
@@ -68,9 +69,13 @@ class ColorLogger < Logger
|
||||
@log_lines = T.let([], T::Array[String])
|
||||
|
||||
this = self
|
||||
this.define_singleton_method(:prefix=) do |prefix|
|
||||
this.instance_variable_set(:@prefix, prefix)
|
||||
end
|
||||
|
||||
self.formatter =
|
||||
proc do |severity, datetime, progname, msg|
|
||||
prefix = this.prefix
|
||||
prefix = this.instance_variable_get(:@prefix)
|
||||
prefix = prefix.call if prefix.is_a?(Proc)
|
||||
|
||||
line = [prefix, msg].reject(&:blank?).join(" ")
|
||||
|
||||
@@ -9,18 +9,27 @@ class Domain::E621::TagUtil
|
||||
params(
|
||||
post_json: T::Hash[String, T.untyped],
|
||||
caused_by_entry: T.nilable(ReduxApplicationRecord),
|
||||
force_update: T::Boolean,
|
||||
).returns(Domain::Post::E621Post)
|
||||
end
|
||||
def self.initialize_or_update_post(post_json:, caused_by_entry: nil)
|
||||
def self.initialize_or_update_post(
|
||||
post_json:,
|
||||
caused_by_entry: nil,
|
||||
force_update: false
|
||||
)
|
||||
# create all posts that don't already exist
|
||||
e621_id = T.cast(post_json["id"], Integer)
|
||||
e621_post = Domain::Post::E621Post.find_or_initialize_by(e621_id: e621_id)
|
||||
|
||||
e621_updated_at = post_json["updated_at"]
|
||||
return e621_post if e621_post.e621_updated_at == e621_updated_at
|
||||
e621_updated_at = Time.zone.parse(post_json["updated_at"])
|
||||
if !force_update && (e621_post.e621_updated_at == e621_updated_at)
|
||||
logger.info(
|
||||
"post #{e621_post.id} / #{e621_post.e621_id} updated at #{e621_updated_at.to_s.bold} is unchanged",
|
||||
)
|
||||
return e621_post
|
||||
end
|
||||
|
||||
e621_post.e621_updated_at = post_json["updated_at"]
|
||||
e621_post.index_page_ids ||= []
|
||||
e621_post.e621_updated_at = e621_updated_at
|
||||
e621_post.caused_by_entry_id = caused_by_entry.id if caused_by_entry
|
||||
|
||||
e621_md5 = T.cast(post_json["file"]["md5"], String)
|
||||
@@ -36,6 +45,9 @@ class Domain::E621::TagUtil
|
||||
file =
|
||||
e621_post.file ||
|
||||
e621_post.build_file do |new_file|
|
||||
logger.info(
|
||||
"building file for post #{e621_post.id} / #{e621_post.e621_id}",
|
||||
)
|
||||
new_file = T.cast(new_file, Domain::PostFile)
|
||||
new_file.enqueue_job_after_save(
|
||||
Domain::E621::Job::StaticFileJob,
|
||||
|
||||
@@ -35,6 +35,7 @@ class Domain::E621::Task::FixE621PostMissingFiles
|
||||
Domain::E621::TagUtil.initialize_or_update_post(
|
||||
post_json: post_json,
|
||||
caused_by_entry: post.last_index_page,
|
||||
force_update: true,
|
||||
)
|
||||
unless post_updated.id == post.id
|
||||
logger.error(
|
||||
@@ -47,6 +48,11 @@ class Domain::E621::Task::FixE621PostMissingFiles
|
||||
"[fixed missing file][post id: #{post.id}][post e621_id: #{post.e621_id}]",
|
||||
)
|
||||
|
||||
if !post_updated.file
|
||||
logger.error("post #{post.id} has no file")
|
||||
binding.pry
|
||||
end
|
||||
|
||||
post_updated.save!
|
||||
end
|
||||
end
|
||||
|
||||
@@ -21,6 +21,47 @@ module HasColorLogger
|
||||
end
|
||||
end
|
||||
|
||||
sig { params(tag_name: String, tag_value: T.untyped).returns(String) }
|
||||
def make_tag(tag_name, tag_value)
|
||||
tag_value_str = tag_value ? tag_value.to_s.bold : "(nil)".italic
|
||||
"#{tag_name}: #{tag_value_str}"
|
||||
end
|
||||
|
||||
sig { params(tags: String).returns(String) }
|
||||
def format_tags(*tags)
|
||||
tags.map { |tag| "[#{tag}]" }.join(" ")
|
||||
end
|
||||
|
||||
sig { params(arg: T.untyped).returns(T::Array[String]) }
|
||||
def make_arg_tag(arg)
|
||||
tags = []
|
||||
|
||||
if arg.is_a?(Domain::User)
|
||||
prefix_and_attr = arg.class.param_prefix_and_attribute
|
||||
tags << make_tag("user.id", arg.id)
|
||||
tags << make_tag(
|
||||
"user.#{prefix_and_attr[0]}",
|
||||
arg.send(prefix_and_attr[1]),
|
||||
)
|
||||
end
|
||||
|
||||
if arg.is_a?(Domain::Post)
|
||||
prefix_and_attr = arg.class.param_prefix_and_attribute
|
||||
tags << make_tag("post.id", arg.id)
|
||||
tags << make_tag(
|
||||
"post.#{prefix_and_attr[0]}",
|
||||
arg.send(prefix_and_attr[1]),
|
||||
)
|
||||
end
|
||||
|
||||
if arg.is_a?(Domain::PostFile)
|
||||
tags << make_tag("post_file.id", arg.id)
|
||||
tags << make_tag("post_file.post_id", arg.post_id)
|
||||
end
|
||||
|
||||
tags
|
||||
end
|
||||
|
||||
# by default, write to stdout
|
||||
extend ActiveSupport::Concern
|
||||
included { include HasColorLogger[$stdout] }
|
||||
|
||||
@@ -46,6 +46,19 @@ class Scraper::CurlHttpPerformer
|
||||
).returns(Response)
|
||||
end
|
||||
def do_request(method, url, request_headers)
|
||||
Timeout.timeout(60) { do_request_impl(method, url, request_headers) }
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
sig do
|
||||
params(
|
||||
method: Symbol,
|
||||
url: String,
|
||||
request_headers: T::Hash[String, String],
|
||||
).returns(Response)
|
||||
end
|
||||
def do_request_impl(method, url, request_headers)
|
||||
t, curl = get_curl
|
||||
start_at = Time.now
|
||||
curl.url = Addressable::URI.encode url
|
||||
@@ -99,8 +112,6 @@ class Scraper::CurlHttpPerformer
|
||||
)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
sig { returns([Thread, Curl::Easy]) }
|
||||
def get_curl
|
||||
t = Thread.current
|
||||
|
||||
@@ -35,11 +35,12 @@ class Domain::Post::InkbunnyPost < Domain::Post
|
||||
attr_json :submission_type, :string
|
||||
attr_json :title, :string
|
||||
attr_json :writing, :string
|
||||
attr_json :description, :string
|
||||
attr_json :num_views, :integer
|
||||
attr_json :num_files, :integer
|
||||
attr_json :num_favs, :integer
|
||||
attr_json :num_comments, :integer
|
||||
attr_json :keywords, :string, array: true
|
||||
attr_json :keywords, ActiveModel::Type::Value.new
|
||||
attr_json :last_file_updated_at, :datetime
|
||||
attr_json :deep_update_log_entry_id, :integer
|
||||
attr_json :shallow_update_log_entry_id, :integer
|
||||
|
||||
@@ -16,6 +16,15 @@ class Domain::PostFile < ReduxApplicationRecord
|
||||
attr_json :last_status_code, :integer
|
||||
attr_json :retry_count, :integer
|
||||
|
||||
enum :state,
|
||||
{
|
||||
pending: "pending",
|
||||
ok: "ok",
|
||||
retryable_error: "retryable_error",
|
||||
terminal_error: "terminal_error",
|
||||
},
|
||||
prefix: "state"
|
||||
|
||||
validates :state,
|
||||
inclusion: {
|
||||
in: %w[pending ok retryable_error terminal_error],
|
||||
|
||||
@@ -8,4 +8,9 @@ class Domain::PostFile::InkbunnyPostFile < Domain::PostFile
|
||||
attr_json :md5_full, :string
|
||||
attr_json :md5s, ActiveModel::Type::Value.new
|
||||
attr_json :file_order, :integer
|
||||
|
||||
sig { returns(Integer) }
|
||||
def retry_count
|
||||
super || 0
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,5 +1,14 @@
|
||||
# typed: strict
|
||||
class Domain::PostGroup::InkbunnyPool < Domain::PostGroup
|
||||
attr_json :ib_id, :integer
|
||||
attr_json :deep_update_log_entry_id, :integer
|
||||
attr_json :count, :integer
|
||||
attr_json :name, :string
|
||||
attr_json :description, :string
|
||||
|
||||
validates :ib_id, presence: true
|
||||
|
||||
belongs_to :deep_update_log_entry,
|
||||
class_name: "::HttpLogEntry",
|
||||
optional: true
|
||||
end
|
||||
|
||||
@@ -8,6 +8,8 @@ class Domain::User::E621User < Domain::User
|
||||
attr_json :scanned_favs_at, :datetime
|
||||
attr_json :registered_at, :datetime
|
||||
|
||||
enum :scanned_favs_status, { ok: "ok", error: "error" }, prefix: :scanned_favs
|
||||
|
||||
has_many :uploaded_posts,
|
||||
class_name: "::Domain::Post::E621Post",
|
||||
foreign_key: :uploader_user_id,
|
||||
|
||||
@@ -27,6 +27,11 @@ class Domain::PostPolicy < ApplicationPolicy
|
||||
user&.admin? || false
|
||||
end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def view_faved_by?
|
||||
user&.admin? || false
|
||||
end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def user_favorite_posts?
|
||||
true
|
||||
|
||||
@@ -36,7 +36,7 @@ namespace :ib do
|
||||
.where(blob_entry_sha256: nil)
|
||||
.where("url_str <> ?", "")
|
||||
.find_each do |file|
|
||||
Domain::Inkbunny::Job::FileJob.new.perform(file: file)
|
||||
Domain::Inkbunny::Job::StaticFileJob.new.perform(file: file)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
# typed: true
|
||||
|
||||
# DO NOT EDIT MANUALLY
|
||||
# This is an autogenerated file for dynamic methods in `Domain::Inkbunny::Job::FileJob`.
|
||||
# Please instead update this file by running `bin/tapioca dsl Domain::Inkbunny::Job::FileJob`.
|
||||
# This is an autogenerated file for dynamic methods in `Domain::Inkbunny::Job::StaticFileJob`.
|
||||
# Please instead update this file by running `bin/tapioca dsl Domain::Inkbunny::Job::StaticFileJob`.
|
||||
|
||||
|
||||
class Domain::Inkbunny::Job::FileJob
|
||||
class Domain::Inkbunny::Job::StaticFileJob
|
||||
sig { returns(ColorLogger) }
|
||||
def logger; end
|
||||
|
||||
@@ -16,8 +16,8 @@ class Domain::Inkbunny::Job::FileJob
|
||||
sig do
|
||||
params(
|
||||
args: T::Hash[::Symbol, T.untyped],
|
||||
block: T.nilable(T.proc.params(job: Domain::Inkbunny::Job::FileJob).void)
|
||||
).returns(T.any(Domain::Inkbunny::Job::FileJob, FalseClass))
|
||||
block: T.nilable(T.proc.params(job: Domain::Inkbunny::Job::StaticFileJob).void)
|
||||
).returns(T.any(Domain::Inkbunny::Job::StaticFileJob, FalseClass))
|
||||
end
|
||||
def perform_later(args, &block); end
|
||||
|
||||
57
sorbet/rbi/dsl/domain/post/inkbunny_post.rbi
generated
57
sorbet/rbi/dsl/domain/post/inkbunny_post.rbi
generated
@@ -969,6 +969,51 @@ class Domain::Post::InkbunnyPost
|
||||
sig { void }
|
||||
def deep_updated_at_will_change!; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def description; end
|
||||
|
||||
sig { params(value: T.nilable(::String)).returns(T.nilable(::String)) }
|
||||
def description=(value); end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def description?; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def description_before_last_save; end
|
||||
|
||||
sig { returns(T.untyped) }
|
||||
def description_before_type_cast; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def description_came_from_user?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def description_change; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def description_change_to_be_saved; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
def description_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def description_in_database; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def description_previous_change; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
def description_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def description_previously_was; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def description_was; end
|
||||
|
||||
sig { void }
|
||||
def description_will_change!; end
|
||||
|
||||
sig { returns(T.untyped) }
|
||||
def ib_detail_raw; end
|
||||
|
||||
@@ -1628,6 +1673,9 @@ class Domain::Post::InkbunnyPost
|
||||
sig { void }
|
||||
def restore_deep_updated_at!; end
|
||||
|
||||
sig { void }
|
||||
def restore_description!; end
|
||||
|
||||
sig { void }
|
||||
def restore_ib_detail_raw!; end
|
||||
|
||||
@@ -1712,6 +1760,12 @@ class Domain::Post::InkbunnyPost
|
||||
sig { returns(T::Boolean) }
|
||||
def saved_change_to_deep_updated_at?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def saved_change_to_description; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def saved_change_to_description?; end
|
||||
|
||||
sig { returns(T.nilable([T.untyped, T.untyped])) }
|
||||
def saved_change_to_ib_detail_raw; end
|
||||
|
||||
@@ -2188,6 +2242,9 @@ class Domain::Post::InkbunnyPost
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_deep_updated_at?; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_description?; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_ib_detail_raw?; end
|
||||
|
||||
|
||||
94
sorbet/rbi/dsl/domain/post_file.rbi
generated
94
sorbet/rbi/dsl/domain/post_file.rbi
generated
@@ -8,6 +8,7 @@
|
||||
class Domain::PostFile
|
||||
include GeneratedAssociationMethods
|
||||
include GeneratedAttributeMethods
|
||||
include EnumMethodsModule
|
||||
extend CommonRelationMethods
|
||||
extend GeneratedRelationMethods
|
||||
|
||||
@@ -51,6 +52,9 @@ class Domain::PostFile
|
||||
).returns(::Domain::PostFile)
|
||||
end
|
||||
def new(attributes = nil, &block); end
|
||||
|
||||
sig { returns(T::Hash[T.any(String, Symbol), String]) }
|
||||
def states; end
|
||||
end
|
||||
|
||||
module CommonRelationMethods
|
||||
@@ -410,6 +414,32 @@ class Domain::PostFile
|
||||
def third_to_last!; end
|
||||
end
|
||||
|
||||
module EnumMethodsModule
|
||||
sig { void }
|
||||
def state_ok!; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def state_ok?; end
|
||||
|
||||
sig { void }
|
||||
def state_pending!; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def state_pending?; end
|
||||
|
||||
sig { void }
|
||||
def state_retryable_error!; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def state_retryable_error?; end
|
||||
|
||||
sig { void }
|
||||
def state_terminal_error!; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def state_terminal_error?; end
|
||||
end
|
||||
|
||||
module GeneratedAssociationMethods
|
||||
sig { returns(T.nilable(::BlobEntry)) }
|
||||
def blob; end
|
||||
@@ -566,6 +596,18 @@ class Domain::PostFile
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def none(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def not_state_ok(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def not_state_pending(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def not_state_retryable_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def not_state_terminal_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def null_relation?(*args, &blk); end
|
||||
|
||||
@@ -625,6 +667,18 @@ class Domain::PostFile
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def select(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def state_ok(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def state_pending(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def state_retryable_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def state_terminal_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def strict_loading(*args, &blk); end
|
||||
|
||||
@@ -1246,7 +1300,7 @@ class Domain::PostFile
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def state; end
|
||||
|
||||
sig { params(value: T.nilable(::String)).returns(T.nilable(::String)) }
|
||||
sig { params(value: T.nilable(T.any(::String, ::Symbol))).returns(T.nilable(T.any(::String, ::Symbol))) }
|
||||
def state=(value); end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
@@ -1267,7 +1321,12 @@ class Domain::PostFile
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def state_change_to_be_saved; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
sig do
|
||||
params(
|
||||
from: T.nilable(T.any(::String, ::Symbol)),
|
||||
to: T.nilable(T.any(::String, ::Symbol))
|
||||
).returns(T::Boolean)
|
||||
end
|
||||
def state_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
@@ -1276,7 +1335,12 @@ class Domain::PostFile
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def state_previous_change; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
sig do
|
||||
params(
|
||||
from: T.nilable(T.any(::String, ::Symbol)),
|
||||
to: T.nilable(T.any(::String, ::Symbol))
|
||||
).returns(T::Boolean)
|
||||
end
|
||||
def state_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
@@ -1549,6 +1613,18 @@ class Domain::PostFile
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def none(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def not_state_ok(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def not_state_pending(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def not_state_retryable_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def not_state_terminal_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def null_relation?(*args, &blk); end
|
||||
|
||||
@@ -1608,6 +1684,18 @@ class Domain::PostFile
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def select(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def state_ok(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def state_pending(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def state_retryable_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def state_terminal_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def strict_loading(*args, &blk); end
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
class Domain::PostFile::InkbunnyPostFile
|
||||
include GeneratedAssociationMethods
|
||||
include GeneratedAttributeMethods
|
||||
include EnumMethodsModule
|
||||
extend CommonRelationMethods
|
||||
extend GeneratedRelationMethods
|
||||
|
||||
@@ -51,6 +52,9 @@ class Domain::PostFile::InkbunnyPostFile
|
||||
).returns(::Domain::PostFile::InkbunnyPostFile)
|
||||
end
|
||||
def new(attributes = nil, &block); end
|
||||
|
||||
sig { returns(T::Hash[T.any(String, Symbol), String]) }
|
||||
def states; end
|
||||
end
|
||||
|
||||
module CommonRelationMethods
|
||||
@@ -447,6 +451,8 @@ class Domain::PostFile::InkbunnyPostFile
|
||||
def third_to_last!; end
|
||||
end
|
||||
|
||||
module EnumMethodsModule; end
|
||||
|
||||
module GeneratedAssociationMethods
|
||||
sig { returns(T.nilable(::BlobEntry)) }
|
||||
def blob; end
|
||||
@@ -585,6 +591,18 @@ class Domain::PostFile::InkbunnyPostFile
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def none(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def not_state_ok(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def not_state_pending(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def not_state_retryable_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def not_state_terminal_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def null_relation?(*args, &blk); end
|
||||
|
||||
@@ -644,6 +662,18 @@ class Domain::PostFile::InkbunnyPostFile
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def select(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def state_ok(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def state_pending(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def state_retryable_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def state_terminal_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def strict_loading(*args, &blk); end
|
||||
|
||||
@@ -1707,7 +1737,7 @@ class Domain::PostFile::InkbunnyPostFile
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def state; end
|
||||
|
||||
sig { params(value: T.nilable(::String)).returns(T.nilable(::String)) }
|
||||
sig { params(value: T.nilable(T.any(::String, ::Symbol))).returns(T.nilable(T.any(::String, ::Symbol))) }
|
||||
def state=(value); end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
@@ -1728,7 +1758,12 @@ class Domain::PostFile::InkbunnyPostFile
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def state_change_to_be_saved; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
sig do
|
||||
params(
|
||||
from: T.nilable(T.any(::String, ::Symbol)),
|
||||
to: T.nilable(T.any(::String, ::Symbol))
|
||||
).returns(T::Boolean)
|
||||
end
|
||||
def state_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
@@ -1737,7 +1772,12 @@ class Domain::PostFile::InkbunnyPostFile
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def state_previous_change; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
sig do
|
||||
params(
|
||||
from: T.nilable(T.any(::String, ::Symbol)),
|
||||
to: T.nilable(T.any(::String, ::Symbol))
|
||||
).returns(T::Boolean)
|
||||
end
|
||||
def state_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
@@ -2034,6 +2074,18 @@ class Domain::PostFile::InkbunnyPostFile
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def none(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def not_state_ok(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def not_state_pending(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def not_state_retryable_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def not_state_terminal_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def null_relation?(*args, &blk); end
|
||||
|
||||
@@ -2093,6 +2145,18 @@ class Domain::PostFile::InkbunnyPostFile
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def select(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def state_ok(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def state_pending(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def state_retryable_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def state_terminal_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def strict_loading(*args, &blk); end
|
||||
|
||||
|
||||
255
sorbet/rbi/dsl/domain/post_group/inkbunny_pool.rbi
generated
255
sorbet/rbi/dsl/domain/post_group/inkbunny_pool.rbi
generated
@@ -448,6 +448,27 @@ class Domain::PostGroup::InkbunnyPool
|
||||
end
|
||||
|
||||
module GeneratedAssociationMethods
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(::HttpLogEntry) }
|
||||
def build_deep_update_log_entry(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(::HttpLogEntry) }
|
||||
def create_deep_update_log_entry(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(::HttpLogEntry) }
|
||||
def create_deep_update_log_entry!(*args, &blk); end
|
||||
|
||||
sig { returns(T.nilable(::HttpLogEntry)) }
|
||||
def deep_update_log_entry; end
|
||||
|
||||
sig { params(value: T.nilable(::HttpLogEntry)).void }
|
||||
def deep_update_log_entry=(value); end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def deep_update_log_entry_changed?; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def deep_update_log_entry_previously_changed?; end
|
||||
|
||||
sig { returns(T::Array[T.untyped]) }
|
||||
def post_group_join_ids; end
|
||||
|
||||
@@ -475,6 +496,12 @@ class Domain::PostGroup::InkbunnyPool
|
||||
|
||||
sig { params(value: T::Enumerable[::Domain::Post]).void }
|
||||
def posts=(value); end
|
||||
|
||||
sig { returns(T.nilable(::HttpLogEntry)) }
|
||||
def reload_deep_update_log_entry; end
|
||||
|
||||
sig { void }
|
||||
def reset_deep_update_log_entry; end
|
||||
end
|
||||
|
||||
module GeneratedAssociationRelationMethods
|
||||
@@ -641,6 +668,51 @@ class Domain::PostGroup::InkbunnyPool
|
||||
end
|
||||
|
||||
module GeneratedAttributeMethods
|
||||
sig { returns(T.nilable(::Integer)) }
|
||||
def count; end
|
||||
|
||||
sig { params(value: T.nilable(::Integer)).returns(T.nilable(::Integer)) }
|
||||
def count=(value); end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def count?; end
|
||||
|
||||
sig { returns(T.nilable(::Integer)) }
|
||||
def count_before_last_save; end
|
||||
|
||||
sig { returns(T.untyped) }
|
||||
def count_before_type_cast; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def count_came_from_user?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
|
||||
def count_change; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
|
||||
def count_change_to_be_saved; end
|
||||
|
||||
sig { params(from: T.nilable(::Integer), to: T.nilable(::Integer)).returns(T::Boolean) }
|
||||
def count_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::Integer)) }
|
||||
def count_in_database; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
|
||||
def count_previous_change; end
|
||||
|
||||
sig { params(from: T.nilable(::Integer), to: T.nilable(::Integer)).returns(T::Boolean) }
|
||||
def count_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::Integer)) }
|
||||
def count_previously_was; end
|
||||
|
||||
sig { returns(T.nilable(::Integer)) }
|
||||
def count_was; end
|
||||
|
||||
sig { void }
|
||||
def count_will_change!; end
|
||||
|
||||
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
|
||||
def created_at; end
|
||||
|
||||
@@ -696,6 +768,96 @@ class Domain::PostGroup::InkbunnyPool
|
||||
sig { void }
|
||||
def created_at_will_change!; end
|
||||
|
||||
sig { returns(T.nilable(::Integer)) }
|
||||
def deep_update_log_entry_id; end
|
||||
|
||||
sig { params(value: T.nilable(::Integer)).returns(T.nilable(::Integer)) }
|
||||
def deep_update_log_entry_id=(value); end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def deep_update_log_entry_id?; end
|
||||
|
||||
sig { returns(T.nilable(::Integer)) }
|
||||
def deep_update_log_entry_id_before_last_save; end
|
||||
|
||||
sig { returns(T.untyped) }
|
||||
def deep_update_log_entry_id_before_type_cast; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def deep_update_log_entry_id_came_from_user?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
|
||||
def deep_update_log_entry_id_change; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
|
||||
def deep_update_log_entry_id_change_to_be_saved; end
|
||||
|
||||
sig { params(from: T.nilable(::Integer), to: T.nilable(::Integer)).returns(T::Boolean) }
|
||||
def deep_update_log_entry_id_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::Integer)) }
|
||||
def deep_update_log_entry_id_in_database; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
|
||||
def deep_update_log_entry_id_previous_change; end
|
||||
|
||||
sig { params(from: T.nilable(::Integer), to: T.nilable(::Integer)).returns(T::Boolean) }
|
||||
def deep_update_log_entry_id_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::Integer)) }
|
||||
def deep_update_log_entry_id_previously_was; end
|
||||
|
||||
sig { returns(T.nilable(::Integer)) }
|
||||
def deep_update_log_entry_id_was; end
|
||||
|
||||
sig { void }
|
||||
def deep_update_log_entry_id_will_change!; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def description; end
|
||||
|
||||
sig { params(value: T.nilable(::String)).returns(T.nilable(::String)) }
|
||||
def description=(value); end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def description?; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def description_before_last_save; end
|
||||
|
||||
sig { returns(T.untyped) }
|
||||
def description_before_type_cast; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def description_came_from_user?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def description_change; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def description_change_to_be_saved; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
def description_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def description_in_database; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def description_previous_change; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
def description_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def description_previously_was; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def description_was; end
|
||||
|
||||
sig { void }
|
||||
def description_will_change!; end
|
||||
|
||||
sig { returns(T.nilable(::Integer)) }
|
||||
def ib_id; end
|
||||
|
||||
@@ -876,9 +1038,63 @@ class Domain::PostGroup::InkbunnyPool
|
||||
sig { void }
|
||||
def json_attributes_will_change!; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def name; end
|
||||
|
||||
sig { params(value: T.nilable(::String)).returns(T.nilable(::String)) }
|
||||
def name=(value); end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def name?; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def name_before_last_save; end
|
||||
|
||||
sig { returns(T.untyped) }
|
||||
def name_before_type_cast; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def name_came_from_user?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def name_change; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def name_change_to_be_saved; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
def name_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def name_in_database; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def name_previous_change; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
def name_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def name_previously_was; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def name_was; end
|
||||
|
||||
sig { void }
|
||||
def name_will_change!; end
|
||||
|
||||
sig { void }
|
||||
def restore_count!; end
|
||||
|
||||
sig { void }
|
||||
def restore_created_at!; end
|
||||
|
||||
sig { void }
|
||||
def restore_deep_update_log_entry_id!; end
|
||||
|
||||
sig { void }
|
||||
def restore_description!; end
|
||||
|
||||
sig { void }
|
||||
def restore_ib_id!; end
|
||||
|
||||
@@ -891,18 +1107,39 @@ class Domain::PostGroup::InkbunnyPool
|
||||
sig { void }
|
||||
def restore_json_attributes!; end
|
||||
|
||||
sig { void }
|
||||
def restore_name!; end
|
||||
|
||||
sig { void }
|
||||
def restore_type!; end
|
||||
|
||||
sig { void }
|
||||
def restore_updated_at!; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
|
||||
def saved_change_to_count; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def saved_change_to_count?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
|
||||
def saved_change_to_created_at; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def saved_change_to_created_at?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
|
||||
def saved_change_to_deep_update_log_entry_id; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def saved_change_to_deep_update_log_entry_id?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def saved_change_to_description; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def saved_change_to_description?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
|
||||
def saved_change_to_ib_id; end
|
||||
|
||||
@@ -927,6 +1164,12 @@ class Domain::PostGroup::InkbunnyPool
|
||||
sig { returns(T::Boolean) }
|
||||
def saved_change_to_json_attributes?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def saved_change_to_name; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def saved_change_to_name?; end
|
||||
|
||||
sig { returns(T.nilable([T.untyped, T.untyped])) }
|
||||
def saved_change_to_type; end
|
||||
|
||||
@@ -1039,9 +1282,18 @@ class Domain::PostGroup::InkbunnyPool
|
||||
sig { void }
|
||||
def updated_at_will_change!; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_count?; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_created_at?; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_deep_update_log_entry_id?; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_description?; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_ib_id?; end
|
||||
|
||||
@@ -1054,6 +1306,9 @@ class Domain::PostGroup::InkbunnyPool
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_json_attributes?; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_name?; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_type?; end
|
||||
|
||||
|
||||
58
sorbet/rbi/dsl/domain/user/e621_user.rbi
generated
58
sorbet/rbi/dsl/domain/user/e621_user.rbi
generated
@@ -8,6 +8,7 @@
|
||||
class Domain::User::E621User
|
||||
include GeneratedAssociationMethods
|
||||
include GeneratedAttributeMethods
|
||||
include EnumMethodsModule
|
||||
extend CommonRelationMethods
|
||||
extend GeneratedRelationMethods
|
||||
|
||||
@@ -51,6 +52,9 @@ class Domain::User::E621User
|
||||
).returns(::Domain::User::E621User)
|
||||
end
|
||||
def new(attributes = nil, &block); end
|
||||
|
||||
sig { returns(T::Hash[T.any(String, Symbol), String]) }
|
||||
def scanned_favs_statuses; end
|
||||
end
|
||||
|
||||
module CommonRelationMethods
|
||||
@@ -442,6 +446,20 @@ class Domain::User::E621User
|
||||
def third_to_last!; end
|
||||
end
|
||||
|
||||
module EnumMethodsModule
|
||||
sig { void }
|
||||
def scanned_favs_error!; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def scanned_favs_error?; end
|
||||
|
||||
sig { void }
|
||||
def scanned_favs_ok!; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def scanned_favs_ok?; end
|
||||
end
|
||||
|
||||
module GeneratedAssociationMethods
|
||||
sig { returns(T.nilable(::Domain::UserAvatar)) }
|
||||
def avatar; end
|
||||
@@ -692,6 +710,12 @@ class Domain::User::E621User
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def none(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def not_scanned_favs_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def not_scanned_favs_ok(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def null_relation?(*args, &blk); end
|
||||
|
||||
@@ -748,6 +772,12 @@ class Domain::User::E621User
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def rewhere(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def scanned_favs_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def scanned_favs_ok(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
|
||||
def select(*args, &blk); end
|
||||
|
||||
@@ -1447,7 +1477,7 @@ class Domain::User::E621User
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def scanned_favs_status; end
|
||||
|
||||
sig { params(value: T.nilable(::String)).returns(T.nilable(::String)) }
|
||||
sig { params(value: T.nilable(T.any(::String, ::Symbol))).returns(T.nilable(T.any(::String, ::Symbol))) }
|
||||
def scanned_favs_status=(value); end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
@@ -1468,7 +1498,12 @@ class Domain::User::E621User
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def scanned_favs_status_change_to_be_saved; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
sig do
|
||||
params(
|
||||
from: T.nilable(T.any(::String, ::Symbol)),
|
||||
to: T.nilable(T.any(::String, ::Symbol))
|
||||
).returns(T::Boolean)
|
||||
end
|
||||
def scanned_favs_status_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
@@ -1477,7 +1512,12 @@ class Domain::User::E621User
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def scanned_favs_status_previous_change; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
sig do
|
||||
params(
|
||||
from: T.nilable(T.any(::String, ::Symbol)),
|
||||
to: T.nilable(T.any(::String, ::Symbol))
|
||||
).returns(T::Boolean)
|
||||
end
|
||||
def scanned_favs_status_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
@@ -1705,6 +1745,12 @@ class Domain::User::E621User
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def none(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def not_scanned_favs_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def not_scanned_favs_ok(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def null_relation?(*args, &blk); end
|
||||
|
||||
@@ -1761,6 +1807,12 @@ class Domain::User::E621User
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def rewhere(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def scanned_favs_error(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def scanned_favs_ok(*args, &blk); end
|
||||
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
|
||||
def select(*args, &blk); end
|
||||
|
||||
|
||||
2
sorbet/rbi/gems/timeout@0.4.3.rbi
generated
2
sorbet/rbi/gems/timeout@0.4.3.rbi
generated
@@ -1,4 +1,4 @@
|
||||
# typed: strict
|
||||
# typed: true
|
||||
|
||||
# DO NOT EDIT MANUALLY
|
||||
# This is an autogenerated file for types exported from the `timeout` gem.
|
||||
|
||||
9
spec/factories/domain/post_group/inkbunny_pool.rb
Normal file
9
spec/factories/domain/post_group/inkbunny_pool.rb
Normal file
@@ -0,0 +1,9 @@
|
||||
# typed: false
|
||||
FactoryBot.define do
|
||||
factory :domain_post_group_inkbunny_pool,
|
||||
class: "Domain::PostGroup::InkbunnyPool" do
|
||||
sequence(:ib_id) { |n| n }
|
||||
name { "Pool #{ib_id}" }
|
||||
description { "Description for Pool #{ib_id}" }
|
||||
end
|
||||
end
|
||||
@@ -28,7 +28,7 @@ module PerformJobHelpers
|
||||
when String, Regexp
|
||||
expect(ret.message).to match(should_raise), bt_printer
|
||||
else
|
||||
expect(ret).to be_a(Exception), bt_printer
|
||||
expect(ret).to be_a(Exception), ret.nil? ? "no exception" : bt_printer
|
||||
end
|
||||
|
||||
ret
|
||||
|
||||
@@ -97,30 +97,17 @@ describe Domain::E621::Job::StaticFileJob do
|
||||
content_type: "text/html",
|
||||
contents: "test",
|
||||
},
|
||||
{
|
||||
uri:
|
||||
"https://static1.e621.net/data/c0/fa/c0fa5293f1d1440c2d3f2c3e027d3c36.jpg",
|
||||
status_code: 500,
|
||||
content_type: "text/html",
|
||||
contents: "test",
|
||||
},
|
||||
{
|
||||
uri:
|
||||
"https://static1.e621.net/data/c0/fa/c0fa5293f1d1440c2d3f2c3e027d3c36.jpg",
|
||||
status_code: 500,
|
||||
content_type: "text/html",
|
||||
contents: "test",
|
||||
},
|
||||
],
|
||||
] * 4,
|
||||
)
|
||||
|
||||
perform_now({ post_file: post_file }, should_raise: /will retry later/)
|
||||
perform_now({ post_file: post_file }, should_raise: /will retry later/)
|
||||
perform_now({ post_file: post_file }, should_raise: /will retry later/)
|
||||
perform_now({ post_file: post_file })
|
||||
post_file.reload
|
||||
expect(post_file.state).to eq("terminal_error")
|
||||
expect(post_file.log_entry).to eq(mock_log_entries[2])
|
||||
expect(post_file.log_entry).to eq(mock_log_entries[3])
|
||||
expect(post_file.last_status_code).to eq(500)
|
||||
expect(post_file.retry_count).to eq(3)
|
||||
expect(post_file.retry_count).to eq(4)
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,197 +0,0 @@
|
||||
# typed: false
|
||||
require "rails_helper"
|
||||
|
||||
module FileJobSpec
|
||||
AN_IMAGE_SHA256 =
|
||||
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
AN_IMAGE_MD5 = "d41d8cd98f00b204e9800998ecf8427e"
|
||||
AN_IMAGE_PATH = "domain/e621/job/an-image.png"
|
||||
AN_IMAGE_URL = "https://static1.e621.net/file/foo.png"
|
||||
end
|
||||
|
||||
describe Domain::Inkbunny::Job::FileJob do
|
||||
let(:http_client_mock) { instance_double("::Scraper::HttpClient") }
|
||||
before { Scraper::ClientFactory.http_client_mock = http_client_mock }
|
||||
let(:file) do
|
||||
Domain::Inkbunny::File.create!(
|
||||
{
|
||||
ib_file_id: 12_345,
|
||||
md5_initial: FileJobSpec::AN_IMAGE_MD5,
|
||||
md5_full: FileJobSpec::AN_IMAGE_MD5,
|
||||
url_str: FileJobSpec::AN_IMAGE_URL,
|
||||
file_name: "foo.png",
|
||||
ib_created_at: Time.now,
|
||||
file_order: 1,
|
||||
md5s: {
|
||||
initial_file_md5: FileJobSpec::AN_IMAGE_MD5,
|
||||
},
|
||||
post:
|
||||
Domain::Inkbunny::Post.create!(
|
||||
{
|
||||
ib_post_id: 67_891,
|
||||
creator:
|
||||
Domain::Inkbunny::User.create!(
|
||||
{ ib_user_id: 12_345, name: "TheUser" },
|
||||
),
|
||||
},
|
||||
),
|
||||
},
|
||||
)
|
||||
end
|
||||
|
||||
describe "#perform" do
|
||||
it "downloads the file if url_str is present" do
|
||||
hle = create(:http_log_entry)
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
uri: FileJobSpec::AN_IMAGE_URL,
|
||||
status_code: 200,
|
||||
content_type: "image/png",
|
||||
contents: SpecUtil.read_fixture_file(FileJobSpec::AN_IMAGE_PATH),
|
||||
caused_by_entry: hle,
|
||||
},
|
||||
],
|
||||
)
|
||||
perform_now({ file: file, caused_by_entry: hle })
|
||||
|
||||
file.reload
|
||||
expect(file.log_entry.response).to eq(file.blob_entry)
|
||||
expect(file.blob_entry.sha256_hex).to eq(FileJobSpec::AN_IMAGE_SHA256)
|
||||
end
|
||||
|
||||
it "marks the post as errored if the download fails" do
|
||||
hles =
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
uri: FileJobSpec::AN_IMAGE_URL,
|
||||
status_code: 404,
|
||||
content_type: "text/html",
|
||||
contents: "not found",
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
perform_now({ file: file })
|
||||
|
||||
file.reload
|
||||
expect(file.state).to eq("error")
|
||||
expect(file.blob_entry).to be_nil
|
||||
expect(file.state_detail["error"]).to eq(
|
||||
{
|
||||
"status_code" => 404,
|
||||
"log_entry_id" => hles[0].id,
|
||||
"retry_count" => 1,
|
||||
},
|
||||
)
|
||||
end
|
||||
|
||||
it "recovers from a failed download" do
|
||||
hles =
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
uri: FileJobSpec::AN_IMAGE_URL,
|
||||
status_code: 500,
|
||||
content_type: "text/html",
|
||||
contents: "not found",
|
||||
},
|
||||
{
|
||||
uri: FileJobSpec::AN_IMAGE_URL,
|
||||
status_code: 200,
|
||||
content_type: "image/png",
|
||||
contents: SpecUtil.read_fixture_file(FileJobSpec::AN_IMAGE_PATH),
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
perform_now({ file: file }, should_raise: true)
|
||||
file.reload
|
||||
expect(file.blob_entry).to be_nil
|
||||
|
||||
perform_now({ file: file })
|
||||
file.reload
|
||||
expect(file.blob_entry).not_to be_nil
|
||||
expect(file.blob_entry.sha256_hex).to eq(FileJobSpec::AN_IMAGE_SHA256)
|
||||
end
|
||||
|
||||
it "throws on a non-404 error in order to retry later" do
|
||||
num_retries = 3
|
||||
hles =
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
uri: FileJobSpec::AN_IMAGE_URL,
|
||||
status_code: 500,
|
||||
content_type: "text/html",
|
||||
contents: "not found",
|
||||
},
|
||||
] * num_retries,
|
||||
)
|
||||
|
||||
num_retries.times.map do |retry_num|
|
||||
perform_now({ file: file }, should_raise: true)
|
||||
file.reload
|
||||
expect(file.state).to eq("error")
|
||||
expect(file.blob_entry).to be_nil
|
||||
expect(file.state_detail["error"]).to eq(
|
||||
{
|
||||
"status_code" => 500,
|
||||
"log_entry_id" => hles[retry_num].id,
|
||||
"retry_count" => retry_num + 1,
|
||||
},
|
||||
)
|
||||
end
|
||||
|
||||
# the last retry should not throw, but simply bail out early
|
||||
perform_now({ file: file })
|
||||
file.reload
|
||||
expect(file.state).to eq("error")
|
||||
expect(file.blob_entry).to be_nil
|
||||
end
|
||||
|
||||
it "fails if file argument is missing" do
|
||||
expect { perform_now({}) }.to raise_error(/file is required/)
|
||||
end
|
||||
|
||||
it "retries a file in error state that hasn't hit retry limit" do
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
# First attempt fails
|
||||
{
|
||||
uri: FileJobSpec::AN_IMAGE_URL,
|
||||
status_code: 500,
|
||||
content_type: "text/html",
|
||||
contents: "error",
|
||||
},
|
||||
{
|
||||
uri: FileJobSpec::AN_IMAGE_URL,
|
||||
status_code: 200,
|
||||
content_type: "image/png",
|
||||
contents: SpecUtil.read_fixture_file(FileJobSpec::AN_IMAGE_PATH),
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
perform_now({ file: file }, should_raise: true)
|
||||
file.reload
|
||||
expect(file.state).to eq("error")
|
||||
expect(file.state_detail["error"]["retry_count"]).to eq(1)
|
||||
|
||||
# Second attempt succeeds
|
||||
perform_now({ file: file })
|
||||
file.reload
|
||||
expect(file.state).to eq("ok")
|
||||
expect(file.blob_entry).not_to be_nil
|
||||
expect(file.blob_entry.sha256_hex).to eq(FileJobSpec::AN_IMAGE_SHA256)
|
||||
expect(file.state_detail["error"]).to be_nil
|
||||
expect(file.state_detail).not_to have_key("retry_count")
|
||||
end
|
||||
end
|
||||
end
|
||||
156
spec/jobs/domain/inkbunny/job/static_file_job_spec.rb
Normal file
156
spec/jobs/domain/inkbunny/job/static_file_job_spec.rb
Normal file
@@ -0,0 +1,156 @@
|
||||
# typed: false
|
||||
require "rails_helper"
|
||||
|
||||
module FileJobSpec
|
||||
AN_IMAGE_SHA256 =
|
||||
"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
||||
AN_IMAGE_MD5 = "d41d8cd98f00b204e9800998ecf8427e"
|
||||
AN_IMAGE_PATH = "domain/e621/job/an-image.png"
|
||||
AN_IMAGE_URL = "https://inkbunny.net/file/foo.png"
|
||||
end
|
||||
|
||||
describe Domain::Inkbunny::Job::StaticFileJob do
|
||||
let(:http_client_mock) { instance_double("::Scraper::HttpClient") }
|
||||
before { Scraper::ClientFactory.http_client_mock = http_client_mock }
|
||||
let(:file) do
|
||||
Domain::PostFile::InkbunnyPostFile.create!(
|
||||
{
|
||||
ib_id: 12_345,
|
||||
md5_initial: FileJobSpec::AN_IMAGE_MD5,
|
||||
md5_full: FileJobSpec::AN_IMAGE_MD5,
|
||||
url_str: FileJobSpec::AN_IMAGE_URL,
|
||||
file_name: "foo.png",
|
||||
ib_created_at: Time.now,
|
||||
file_order: 1,
|
||||
md5s: {
|
||||
initial_file_md5: FileJobSpec::AN_IMAGE_MD5,
|
||||
},
|
||||
post:
|
||||
Domain::Post::InkbunnyPost.create!(
|
||||
ib_id: 67_891,
|
||||
creator:
|
||||
Domain::User::InkbunnyUser.create!(
|
||||
ib_id: 12_345,
|
||||
name: "TheUser",
|
||||
),
|
||||
),
|
||||
},
|
||||
)
|
||||
end
|
||||
|
||||
describe "#perform" do
|
||||
it "downloads the file if url_str is present" do
|
||||
caused_by_entry = create(:http_log_entry)
|
||||
log_entries =
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
uri: FileJobSpec::AN_IMAGE_URL,
|
||||
status_code: 200,
|
||||
content_type: "image/png",
|
||||
contents: SpecUtil.read_fixture_file(FileJobSpec::AN_IMAGE_PATH),
|
||||
caused_by_entry:,
|
||||
},
|
||||
],
|
||||
)
|
||||
perform_now({ file:, caused_by_entry: })
|
||||
|
||||
file.reload
|
||||
expect(file.log_entry).to eq(log_entries[0])
|
||||
expect(file.state).to eq("ok")
|
||||
end
|
||||
|
||||
it "marks the post as errored if the download fails" do
|
||||
log_entries =
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
uri: FileJobSpec::AN_IMAGE_URL,
|
||||
status_code: 404,
|
||||
content_type: "text/html",
|
||||
contents: "not found",
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
perform_now({ file: file })
|
||||
|
||||
file.reload
|
||||
expect(file.state).to eq("terminal_error")
|
||||
expect(file.retry_count).to eq(1)
|
||||
expect(file.last_status_code).to eq(404)
|
||||
expect(file.log_entry).to eq(log_entries[0])
|
||||
end
|
||||
|
||||
it "recovers from a failed download" do
|
||||
log_entries =
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
uri: FileJobSpec::AN_IMAGE_URL,
|
||||
status_code: 500,
|
||||
content_type: "text/html",
|
||||
contents: "not found",
|
||||
},
|
||||
{
|
||||
uri: FileJobSpec::AN_IMAGE_URL,
|
||||
status_code: 200,
|
||||
content_type: "image/png",
|
||||
contents: SpecUtil.read_fixture_file(FileJobSpec::AN_IMAGE_PATH),
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
perform_now({ file: file }, should_raise: true)
|
||||
file.reload
|
||||
expect(file.log_entry).to eq(log_entries[0])
|
||||
expect(file.state).to eq("retryable_error")
|
||||
expect(file.retry_count).to eq(1)
|
||||
expect(file.last_status_code).to eq(500)
|
||||
|
||||
perform_now({ file: file })
|
||||
file.reload
|
||||
expect(file.log_entry).to eq(log_entries[1])
|
||||
expect(file.state).to eq("ok")
|
||||
expect(file.retry_count).to eq(1)
|
||||
expect(file.last_status_code).to eq(200)
|
||||
end
|
||||
|
||||
it "throws on a non-404 error in order to retry later" do
|
||||
log_entries =
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
uri: FileJobSpec::AN_IMAGE_URL,
|
||||
status_code: 500,
|
||||
content_type: "text/html",
|
||||
contents: "not found",
|
||||
},
|
||||
] * 4,
|
||||
)
|
||||
|
||||
3.times.map do |retry_num|
|
||||
perform_now({ file: file }, should_raise: true)
|
||||
file.reload
|
||||
expect(file.state).to eq("retryable_error")
|
||||
expect(file.retry_count).to eq(retry_num + 1)
|
||||
expect(file.last_status_code).to eq(500)
|
||||
expect(file.log_entry).to eq(log_entries[retry_num])
|
||||
end
|
||||
|
||||
# the last retry should not throw, but simply bail out early
|
||||
perform_now({ file: file })
|
||||
file.reload
|
||||
expect(file.state).to eq("terminal_error")
|
||||
expect(file.log_entry).to eq(log_entries[3])
|
||||
end
|
||||
|
||||
it "fails if file argument is missing" do
|
||||
expect { perform_now({}) }.to raise_error(/Expected type/)
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -8,7 +8,7 @@ RSpec.describe Domain::Inkbunny::Job::UpdatePoolJob do
|
||||
|
||||
describe "#perform" do
|
||||
it "processes all posts in a pool and enqueues update jobs for posts needing updates" do
|
||||
pool = create(:domain_inkbunny_pool, ib_pool_id: 83_746)
|
||||
pool = create(:domain_post_group_inkbunny_pool, ib_id: 83_746)
|
||||
first_page_json =
|
||||
JSON.parse(
|
||||
SpecUtil.read_fixture_file(
|
||||
@@ -88,7 +88,7 @@ RSpec.describe Domain::Inkbunny::Job::UpdatePoolJob do
|
||||
end
|
||||
|
||||
it "skips if already deep updated" do
|
||||
pool = create(:domain_inkbunny_pool, ib_pool_id: 83_746)
|
||||
pool = create(:domain_post_group_inkbunny_pool, ib_id: 83_746)
|
||||
log_entry = create(:http_log_entry)
|
||||
pool.deep_update_log_entry = log_entry
|
||||
pool.save!
|
||||
|
||||
@@ -28,20 +28,20 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
end
|
||||
|
||||
let!(:user_thendyart) do
|
||||
Domain::Inkbunny::User.create!(ib_user_id: 941_565, name: "ThendyArt")
|
||||
Domain::User::InkbunnyUser.create!(ib_id: 941_565, name: "ThendyArt")
|
||||
end
|
||||
|
||||
let!(:user_seff) do
|
||||
Domain::Inkbunny::User.create!(ib_user_id: 229_331, name: "Seff")
|
||||
Domain::User::InkbunnyUser.create!(ib_id: 229_331, name: "Seff")
|
||||
end
|
||||
|
||||
let!(:user_soulcentinel) do
|
||||
Domain::Inkbunny::User.create!(ib_user_id: 349_747, name: "SoulCentinel")
|
||||
Domain::User::InkbunnyUser.create!(ib_id: 349_747, name: "SoulCentinel")
|
||||
end
|
||||
|
||||
let!(:post_3104202) do
|
||||
Domain::Inkbunny::Post.create!(
|
||||
ib_post_id: 3_104_202,
|
||||
Domain::Post::InkbunnyPost.create!(
|
||||
ib_id: 3_104_202,
|
||||
creator: user_thendyart,
|
||||
title: "Phantom Touch - Page 25",
|
||||
posted_at: Time.parse("2023-08-27 21:31:40.365597+02"),
|
||||
@@ -53,8 +53,8 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
end
|
||||
|
||||
let!(:post_3104200) do
|
||||
Domain::Inkbunny::Post.create!(
|
||||
ib_post_id: 3_104_200,
|
||||
Domain::Post::InkbunnyPost.create!(
|
||||
ib_id: 3_104_200,
|
||||
creator: user_seff,
|
||||
title: "Camp Pines Sketch Dump (Aug 2023)",
|
||||
posted_at: Time.parse("2023-08-27 21:30:59.308046+02"),
|
||||
@@ -66,8 +66,8 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
end
|
||||
|
||||
let!(:post_3104197) do
|
||||
Domain::Inkbunny::Post.create!(
|
||||
ib_post_id: 3_104_197,
|
||||
Domain::Post::InkbunnyPost.create!(
|
||||
ib_id: 3_104_197,
|
||||
creator: user_soulcentinel,
|
||||
title: "Comm - BJ bird",
|
||||
posted_at: Time.parse("2023-08-27 21:29:37.995264+02"),
|
||||
@@ -104,19 +104,19 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
).to match(
|
||||
array_including(
|
||||
hash_including(
|
||||
pool: Domain::Inkbunny::Pool.find_by(ib_pool_id: 71_061),
|
||||
pool: Domain::PostGroup::InkbunnyPool.find_by(ib_id: 71_061),
|
||||
caused_by_entry: log_entries[0],
|
||||
),
|
||||
hash_including(
|
||||
pool: Domain::Inkbunny::Pool.find_by(ib_pool_id: 35_628),
|
||||
pool: Domain::PostGroup::InkbunnyPool.find_by(ib_id: 35_628),
|
||||
caused_by_entry: log_entries[0],
|
||||
),
|
||||
hash_including(
|
||||
pool: Domain::Inkbunny::Pool.find_by(ib_pool_id: 35_045),
|
||||
pool: Domain::PostGroup::InkbunnyPool.find_by(ib_id: 35_045),
|
||||
caused_by_entry: log_entries[0],
|
||||
),
|
||||
hash_including(
|
||||
pool: Domain::Inkbunny::Pool.find_by(ib_pool_id: 83_746),
|
||||
pool: Domain::PostGroup::InkbunnyPool.find_by(ib_id: 83_746),
|
||||
caused_by_entry: log_entries[0],
|
||||
),
|
||||
),
|
||||
@@ -150,13 +150,13 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
|
||||
# Check user details were updated
|
||||
user_seff.reload
|
||||
expect(user_seff.avatar_url_str).to eq(
|
||||
expect(user_seff.avatar.url_str).to eq(
|
||||
"https://us.ib.metapix.net/usericons/large/176/176443_Seff_seffpfp.png",
|
||||
)
|
||||
|
||||
# Check files were created
|
||||
expect(post_3104200.files.count).to eq(4)
|
||||
file_4652528 = post_3104200.files.find_by!(ib_file_id: 4_652_528)
|
||||
file_4652528 = post_3104200.files.find_by!(ib_id: 4_652_528)
|
||||
expect(file_4652528.file_order).to eq(0)
|
||||
expect(file_4652528.file_name).to eq("4652528_Seff_aug23sketches7-1.png")
|
||||
expect(file_4652528.url_str).to eq(
|
||||
@@ -166,11 +166,9 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
expect(file_4652528.md5_full).to eq("07946e8d485664704b316cb218805367")
|
||||
|
||||
# Check file jobs were enqueued
|
||||
file_jobs = SpecUtil.enqueued_jobs(Domain::Inkbunny::Job::FileJob)
|
||||
file_jobs = SpecUtil.enqueued_jobs(Domain::Inkbunny::Job::StaticFileJob)
|
||||
expect(file_jobs.length).to eq(6)
|
||||
expect(
|
||||
file_jobs.map { |job| job[:args][0][:file].ib_file_id }.sort,
|
||||
).to eq(
|
||||
expect(file_jobs.map { |job| job[:args][0][:file].ib_id }.sort).to eq(
|
||||
[4_652_528, 4_652_530, 4_652_531, 4_652_534, 4_652_535, 4_652_537],
|
||||
)
|
||||
file_jobs.each do |job|
|
||||
@@ -181,7 +179,9 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
it "enqueues avatar jobs" do
|
||||
perform_now({ ib_post_ids: ib_post_ids, caused_by_entry: nil })
|
||||
user_soulcentinel.reload
|
||||
expect(user_soulcentinel.avatar_url_str).to eq(
|
||||
user_seff.reload
|
||||
user_thendyart.reload
|
||||
expect(user_soulcentinel.avatar.url_str).to eq(
|
||||
"https://us.ib.metapix.net/usericons/large/208/208598_SoulCentinel_fireb.gif",
|
||||
)
|
||||
|
||||
@@ -193,9 +193,9 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
SpecUtil.enqueued_job_args(Domain::Inkbunny::Job::UserAvatarJob),
|
||||
).to match(
|
||||
[
|
||||
{ user: user_soulcentinel, caused_by_entry: log_entries[0] },
|
||||
{ user: user_seff, caused_by_entry: log_entries[0] },
|
||||
{ user: user_thendyart, caused_by_entry: log_entries[0] },
|
||||
{ avatar: user_soulcentinel.avatar, caused_by_entry: log_entries[0] },
|
||||
{ avatar: user_seff.avatar, caused_by_entry: log_entries[0] },
|
||||
{ avatar: user_thendyart.avatar, caused_by_entry: log_entries[0] },
|
||||
],
|
||||
)
|
||||
end
|
||||
@@ -205,9 +205,9 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
|
||||
# Post 3104202 should be in "Phantom Touch" pool
|
||||
post_3104202.reload
|
||||
expect(post_3104202.pool_joins.count).to eq(1)
|
||||
expect(post_3104202.post_group_joins.count).to eq(1)
|
||||
phantom_touch_pool = post_3104202.pools.first
|
||||
expect(phantom_touch_pool.ib_pool_id).to eq(83_746)
|
||||
expect(phantom_touch_pool.ib_id).to eq(83_746)
|
||||
expect(phantom_touch_pool.name).to eq("Phantom Touch | Ongoing")
|
||||
expect(phantom_touch_pool.description).to eq(
|
||||
"18+ M/F Adult Comic\n\n(New Page Every Sunday)",
|
||||
@@ -216,16 +216,16 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
|
||||
# Post 3104197 should be in 3 pools
|
||||
post_3104197.reload
|
||||
expect(post_3104197.pool_joins.count).to eq(3)
|
||||
pool_ids = post_3104197.pools.pluck(:ib_pool_id).sort
|
||||
expect(post_3104197.post_group_joins.count).to eq(3)
|
||||
pool_ids = post_3104197.pools.pluck(:ib_id).sort
|
||||
expect(pool_ids).to eq([35_045, 35_628, 71_061])
|
||||
animation_pool = post_3104197.pools.find_by(ib_pool_id: 71_061)
|
||||
animation_pool = post_3104197.pools.find_by(ib_id: 71_061)
|
||||
expect(animation_pool.name).to eq("Animation")
|
||||
expect(animation_pool.description).to eq("It moves!")
|
||||
|
||||
# Post 3104200 should have no pools
|
||||
post_3104200.reload
|
||||
expect(post_3104200.pool_joins.count).to eq(0)
|
||||
expect(post_3104200.post_group_joins.count).to eq(0)
|
||||
expect(post_3104200.pools).to be_empty
|
||||
end
|
||||
|
||||
@@ -235,16 +235,16 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
# Check post 3104197's pool joins
|
||||
post_3104197.reload
|
||||
animation_pool_join =
|
||||
post_3104197.pool_joins.find_by(
|
||||
pool: Domain::Inkbunny::Pool.find_by!(ib_pool_id: 71_061),
|
||||
post_3104197.post_group_joins.find_by(
|
||||
group: Domain::PostGroup::InkbunnyPool.find_by!(ib_id: 71_061),
|
||||
)
|
||||
expect(animation_pool_join.left_post.ib_post_id).to eq(3_082_162)
|
||||
expect(animation_pool_join.left_post.ib_id).to eq(3_082_162)
|
||||
expect(animation_pool_join.right_post).to be_nil
|
||||
|
||||
# Check post 3104202's pool joins
|
||||
post_3104202.reload
|
||||
phantom_touch_pool_join = post_3104202.pool_joins.first
|
||||
expect(phantom_touch_pool_join.left_post.ib_post_id).to eq(3_098_688)
|
||||
phantom_touch_pool_join = post_3104202.post_group_joins.first
|
||||
expect(phantom_touch_pool_join.left_post.ib_id).to eq(3_098_688)
|
||||
expect(phantom_touch_pool_join.right_post).to be_nil
|
||||
end
|
||||
|
||||
@@ -296,12 +296,12 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
end
|
||||
|
||||
let!(:user_zzreg) do
|
||||
Domain::Inkbunny::User.create!(ib_user_id: 110_036, name: "zzreg")
|
||||
Domain::User::InkbunnyUser.create!(ib_id: 110_036, name: "zzreg")
|
||||
end
|
||||
|
||||
let!(:post_1047334) do
|
||||
Domain::Inkbunny::Post.create!(
|
||||
ib_post_id: 1_047_334,
|
||||
Domain::Post::InkbunnyPost.create!(
|
||||
ib_id: 1_047_334,
|
||||
creator: user_zzreg,
|
||||
title: "New Submission",
|
||||
posted_at: Time.parse("2016-03-13 22:18:52.32319+01"),
|
||||
@@ -318,7 +318,7 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
|
||||
post_1047334.reload
|
||||
expect(post_1047334.files.count).to eq(1)
|
||||
file_1445274 = post_1047334.files.find_by!(ib_file_id: 1_445_274)
|
||||
file_1445274 = post_1047334.files.find_by!(ib_id: 1_445_274)
|
||||
expect(file_1445274.md5_initial).to eq("0127e88651e73140718f3b8f7f2037d5")
|
||||
expect(file_1445274.md5_full).to eq("aa0e22f86a9c345ead2bd711a1c91986")
|
||||
expect(file_1445274.file_name).to eq(
|
||||
@@ -330,7 +330,7 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
|
||||
post_1047334.reload
|
||||
expect(post_1047334.files.count).to eq(2)
|
||||
expect(post_1047334.last_file_updated_at).to eq(
|
||||
expect(post_1047334.last_file_updated_at).to be_within(1.second).of(
|
||||
Time.parse("2023-09-14 19:07:45.735562+02"),
|
||||
)
|
||||
|
||||
@@ -342,7 +342,7 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
)
|
||||
|
||||
# New file should be created
|
||||
file_4680214 = post_1047334.files.find_by!(ib_file_id: 4_680_214)
|
||||
file_4680214 = post_1047334.files.find_by!(ib_id: 4_680_214)
|
||||
expect(file_4680214.attributes).to include(
|
||||
"md5_initial" => "9fbfbdf3cc6d8b3538b7edbfe36bde8c",
|
||||
"md5_full" => "d2e30d953f4785e22c3d9c722249a974",
|
||||
@@ -351,11 +351,11 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
)
|
||||
|
||||
# Check file jobs were enqueued for both updates
|
||||
file_jobs = SpecUtil.enqueued_jobs(Domain::Inkbunny::Job::FileJob)
|
||||
file_jobs = SpecUtil.enqueued_jobs(Domain::Inkbunny::Job::StaticFileJob)
|
||||
expect(file_jobs.length).to eq(2)
|
||||
expect(
|
||||
file_jobs.map { |job| job[:args][0][:file].ib_file_id }.sort,
|
||||
).to eq([1_445_274, 4_680_214])
|
||||
expect(file_jobs.map { |job| job[:args][0][:file].ib_id }.sort).to eq(
|
||||
[1_445_274, 4_680_214],
|
||||
)
|
||||
expect(file_jobs[0][:args][0][:caused_by_entry]).to eq(log_entries[0])
|
||||
expect(file_jobs[1][:args][0][:caused_by_entry]).to eq(log_entries[1])
|
||||
end
|
||||
@@ -384,13 +384,13 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
end
|
||||
|
||||
let!(:user_friar) do
|
||||
create(:domain_inkbunny_user, ib_user_id: 1664, name: "Friar")
|
||||
create(:domain_user_inkbunny_user, ib_id: 1664, name: "Friar")
|
||||
end
|
||||
|
||||
let!(:post_2637105) do
|
||||
create(
|
||||
:domain_inkbunny_post,
|
||||
ib_post_id: 2_637_105,
|
||||
:domain_post_inkbunny_post,
|
||||
ib_id: 2_637_105,
|
||||
creator: user_friar,
|
||||
num_files: 5,
|
||||
)
|
||||
@@ -413,7 +413,7 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
# Check that files were created, even those with null MD5 sums
|
||||
expect(post_2637105.files.count).to eq(5)
|
||||
|
||||
file_3897070 = post_2637105.files.find_by!(ib_file_id: 3_897_070)
|
||||
file_3897070 = post_2637105.files.find_by!(ib_id: 3_897_070)
|
||||
expect(file_3897070.url_str).to eq(
|
||||
"https://tx.ib.metapix.net/files/full/3897/3897070_Friar_ffrbb4.png",
|
||||
)
|
||||
@@ -421,14 +421,14 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
|
||||
"md5_initial" => "",
|
||||
"md5_full" => "",
|
||||
)
|
||||
expect(file_3897070.state).to eq("error")
|
||||
expect(file_3897070.state).to eq("terminal_error")
|
||||
|
||||
# Check file jobs were enqueued for valid files
|
||||
file_jobs = SpecUtil.enqueued_jobs(Domain::Inkbunny::Job::FileJob)
|
||||
file_jobs = SpecUtil.enqueued_jobs(Domain::Inkbunny::Job::StaticFileJob)
|
||||
expect(file_jobs.length).to eq(4)
|
||||
expect(
|
||||
file_jobs.map { |job| job[:args][0][:file].ib_file_id }.sort,
|
||||
).to eq([3_897_058, 3_897_061, 3_897_065, 3_897_067])
|
||||
expect(file_jobs.map { |job| job[:args][0][:file].ib_id }.sort).to eq(
|
||||
[3_897_058, 3_897_061, 3_897_065, 3_897_067],
|
||||
)
|
||||
file_jobs.each do |job|
|
||||
expect(job[:args][0][:caused_by_entry]).to eq(log_entries[0])
|
||||
end
|
||||
|
||||
@@ -40,8 +40,8 @@ RSpec.describe Domain::Inkbunny::Job::UserGalleryJob do
|
||||
expect { perform_now(args) }.to(
|
||||
change(Domain::Post::InkbunnyPost, :count)
|
||||
.by(0)
|
||||
.and(change(Domain::Inkbunny::File, :count).by(0))
|
||||
.and(change(Domain::Inkbunny::User, :count).by(0)),
|
||||
.and(change(Domain::PostFile::InkbunnyPostFile, :count).by(0))
|
||||
.and(change(Domain::User::InkbunnyUser, :count).by(0)),
|
||||
)
|
||||
end
|
||||
|
||||
|
||||
@@ -36,10 +36,8 @@ RSpec.describe Domain::User::E621User, type: :model do
|
||||
user.scanned_favs_status = "error"
|
||||
expect(user).to be_valid
|
||||
|
||||
user.scanned_favs_status = "invalid"
|
||||
expect(user).not_to be_valid
|
||||
expect(user.errors[:scanned_favs_status]).to include(
|
||||
"is not included in the list",
|
||||
expect do user.scanned_favs_status = "invalid" end.to raise_error(
|
||||
ArgumentError,
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
Reference in New Issue
Block a user