fixes for migration script

This commit is contained in:
Dylan Knutson
2025-02-06 18:41:14 +00:00
parent 4bb0eae722
commit 9a462713b6
41 changed files with 1667 additions and 951 deletions

View File

@@ -10,7 +10,8 @@ class Domain::Fa::Job::Base < Scraper::JobBase
sig { params(args: T.untyped).void }
def initialize(*args)
super(*T.unsafe(args))
@user = T.let(nil, T.nilable(Domain::Fa::User))
@user_old = T.let(nil, T.nilable(Domain::Fa::User))
@user = T.let(nil, T.nilable(Domain::User::FaUser))
@created_user = T.let(false, T::Boolean)
@posts_enqueued_for_scan = T.let(Set.new, T::Set[Integer])
@force_scan = T.let(false, T::Boolean)
@@ -26,17 +27,26 @@ class Domain::Fa::Job::Base < Scraper::JobBase
sig do
params(
args: T.untyped,
set_user: T::Boolean,
build_user: T::Boolean,
require_user_exists: T::Boolean,
).returns(T.nilable(Domain::Fa::User))
).returns(T.nilable(Domain::User::FaUser))
end
def init_from_args!(args, build_user: true, require_user_exists: false)
def init_from_args!(
args,
set_user: true,
build_user: true,
require_user_exists: false
)
@force_scan = !!args[:force_scan]
if build_user
@user = find_or_build_user_from_args(args)
else
@user = find_user_from_args(args)
if set_user
if build_user
@user = find_or_build_user_from_args(args)
else
@user = find_user_from_args(args)
end
@user_old = Domain::Fa::User.find_by(url_name: @user.url_name) if @user
end
logger.prefix =
@@ -55,46 +65,46 @@ class Domain::Fa::Job::Base < Scraper::JobBase
@user
end
sig { params(args: T.untyped).returns(Domain::Fa::User) }
sig { params(args: T.untyped).returns(Domain::User::FaUser) }
def find_or_build_user_from_args(args)
find_user_from_args(args) ||
begin
url_name = Domain::Fa::User.name_to_url_name(args[:url_name])
user = Domain::Fa::User.new
user = Domain::User::FaUser.new
user.url_name = url_name
user.name = url_name
user.state_detail ||= {}
if cle = causing_log_entry
user.state_detail["first_seen_entry"] = cle.id
end
user
end
end
sig { params(args: T.untyped).returns(T.nilable(Domain::Fa::User)) }
sig { params(args: T.untyped).returns(T.nilable(Domain::User::FaUser)) }
def find_user_from_args(args)
args[:user] ||
begin
if args[:url_name].blank?
fatal_error("arg 'url_name' is required if arg 'user' is nil")
end
url_name = Domain::Fa::User.name_to_url_name(args[:url_name])
Domain::Fa::User.find_by(url_name: url_name)
end
user = args[:user]
if user.is_a?(Domain::User::FaUser)
return user
elsif user.is_a?(Domain::Fa::User)
return Domain::User::FaUser.find_by(url_name: user.url_name)
end
if args[:url_name].blank?
fatal_error("arg 'url_name' is required if arg 'user' is nil")
end
url_name = Domain::Fa::User.name_to_url_name(args[:url_name])
Domain::User::FaUser.find_by(url_name: url_name)
end
sig { params(scan_type: Symbol).returns(T::Boolean) }
def user_due_for_scan?(scan_type)
sig { returns(T::Boolean) }
def user_due_for_favs_scan?
raise("user is nil") unless @user
unless @user.scan_due?(scan_type)
unless @user.due_for_favs_scan?
if @force_scan
logger.warn(
"scanned #{@user.scanned_ago_in_words(scan_type).bold} - force scanning",
"scanned #{DateHelper.time_ago_in_words(@user.scanned_favs_at).bold} ago - force scanning",
)
return true
else
logger.warn(
"scanned #{@user.scanned_ago_in_words(scan_type).bold} - skipping",
"scanned #{DateHelper.time_ago_in_words(@user.scanned_favs_at).bold} ago - skipping",
)
return false
end
@@ -151,12 +161,14 @@ class Domain::Fa::Job::Base < Scraper::JobBase
min_fa_id = [max_fa_id - continue_for, 0].max
fa_ids_to_manually_enqueue = Set.new(min_fa_id..max_fa_id)
fa_ids_to_manually_enqueue.subtract(fa_ids)
# existing =
# Domain::Post::FaPost.where(fa_id:
# "fa_id >= ? AND fa_id <= ?",
# min_fa_id,
# max_fa_id,
# ).pluck(:fa_id)
existing =
Domain::Fa::Post.where(
"fa_id >= ? AND fa_id <= ?",
min_fa_id,
max_fa_id,
).pluck(:fa_id)
Domain::Post::FaPost.where(fa_id: min_fa_id..max_fa_id).pluck(:fa_id)
fa_ids_to_manually_enqueue.subtract(existing)
end
@@ -164,7 +176,7 @@ class Domain::Fa::Job::Base < Scraper::JobBase
listing_page_stats = ListingsPageScanStats.new(0, 0, false)
submissions.each do |submission|
post = Domain::Fa::Post.find_or_initialize_by(fa_id: submission.id)
post = Domain::Post::FaPost.find_or_initialize_by(fa_id: submission.id)
listing_page_stats.last_was_new = post.new_record?
listing_page_stats.new_seen += 1 if post.new_record?
listing_page_stats.total_seen += 1
@@ -195,7 +207,7 @@ class Domain::Fa::Job::Base < Scraper::JobBase
fa_ids_to_manually_enqueue.to_a.sort.reverse.each do |fa_id|
if create_unseen_posts
# when filling gaps, only enqueue if the post wasn't found
post = Domain::Fa::Post.find_or_initialize_by(fa_id: fa_id)
post = Domain::Post::FaPost.find_or_initialize_by(fa_id: fa_id)
if post.new_record?
post.save!
enqueue_post_scan(post, enqueue_posts_pri)
@@ -212,7 +224,11 @@ class Domain::Fa::Job::Base < Scraper::JobBase
end
sig do
params(job_type: Symbol, post: Domain::Fa::Post, submission: T.untyped).void
params(
job_type: Symbol,
post: Domain::Post::FaPost,
submission: T.untyped,
).void
end
def update_and_save_post_from_listings_page(job_type, post, submission)
if job_type == :browse_page
@@ -224,14 +240,14 @@ class Domain::Fa::Job::Base < Scraper::JobBase
end
post.creator ||=
Domain::Fa::User.find_or_build_from_submission_parser(submission)
Domain::User::FaUser.find_or_build_from_submission_parser(submission)
post.title = submission.title || fatal_error("blank title")
post.save!
end
sig do
params(
user: Domain::Fa::User,
user: Domain::User::FaUser,
enqueue_page_scan: T::Boolean,
enqueue_gallery_scan: T::Boolean,
enqueue_favs_scan: T::Boolean,
@@ -317,27 +333,115 @@ class Domain::Fa::Job::Base < Scraper::JobBase
end
end
sig { params(post: Domain::Fa::Post, enqueue_pri: T.nilable(Symbol)).void }
sig do
params(post: Domain::Post::FaPost, enqueue_pri: T.nilable(Symbol)).void
end
def enqueue_post_scan(post, enqueue_pri = nil)
enqueue_pri = self.class.normalize_enqueue_pri(enqueue_pri)
if @posts_enqueued_for_scan.add?(T.must(post.fa_id))
fa_id_str = (post.fa_id || "(nil)").to_s.bold
if !post.scanned?
if !post.scanned_at.present?
logger.info "enqueue post scan for fa_id #{fa_id_str}"
defer_job(
Domain::Fa::Job::ScanPostJob,
{ post: post },
{ priority: enqueue_pri },
)
elsif !post.have_file?
elsif (post_file = post.file) && post_file.url_str.present? &&
post_file.log_entry.nil?
logger.info "enqueue file scan for fa_id #{fa_id_str}"
defer_job(
Domain::Fa::Job::ScanFileJob,
{ post: post },
{ post_file: },
{ priority: enqueue_pri },
)
end
end
end
sig do
params(fa_ids: T::Array[Integer]).returns(T::Array[Domain::Post::FaPost])
end
def find_or_create_posts_by_fa_ids(fa_ids)
posts = Domain::Post::FaPost.where(fa_id: fa_ids).to_a
missing_post_fa_ids = fa_ids - posts.map(&:fa_id)
ReduxApplicationRecord.transaction do
missing_post_fa_ids.each do |fa_id|
post = Domain::Post::FaPost.create!(fa_id: fa_id)
defer_job(Domain::Fa::Job::ScanPostJob, { post: post })
posts << post
end
end
posts = posts.index_by(&:fa_id)
fa_ids.map { |fa_id| posts[fa_id] }
end
sig do
params(
recent_users: T::Array[Domain::Fa::Parser::UserPageHelper::RecentUser],
).returns(T::Array[Domain::User::FaUser])
end
def find_or_create_users_by_recent_users(recent_users)
users =
Domain::User::FaUser.where(url_name: recent_users.map(&:url_name)).to_a
missing_recent_users =
recent_users.reject do |recent_user|
users.any? { |u| u.url_name == recent_user.url_name }
end
ReduxApplicationRecord.transaction do
missing_recent_users.each do |recent_user|
user =
Domain::User::FaUser.create!(
url_name: recent_user.url_name,
name: recent_user.name,
)
defer_job(Domain::Fa::Job::UserPageJob, { user: user })
users << user
end
end
users_by_url_name =
T.cast(users.index_by(&:url_name), T::Hash[String, Domain::User::FaUser])
# return user models in the same order as the input
recent_users.map { |name| T.must(users_by_url_name[name.url_name]) }
end
sig do
params(
user: Domain::User::FaUser,
page: Domain::Fa::Parser::Page,
response: Scraper::HttpClient::Response,
).void
end
def update_user_fields_from_page(user, page, response)
user_page = page.user_page
user.name = user_page.name
user.registered_at = user_page.registered_since
user.num_pageviews = user_page.num_pageviews
user.num_submissions = user_page.num_submissions
user.num_comments_recieved = user_page.num_comments_recieved
user.num_comments_given = user_page.num_comments_given
user.num_journals = user_page.num_journals
user.num_favorites = user_page.num_favorites
user.profile_html =
user_page.profile_html.encode("UTF-8", invalid: :replace, undef: :replace)
user.last_user_page_id = response.log_entry.id
user.scanned_page_at = Time.current
avatar = user.avatar || Domain::UserAvatar.new(user: user)
uri = Addressable::URI.parse(user_page.profile_thumb_url)
uri.scheme ||= "https"
avatar.url_str = uri.to_s
if avatar.changed?
avatar.state = "pending"
avatar.save!
defer_job(Domain::Fa::Job::UserAvatarJob, { avatar: })
end
end
end

View File

@@ -33,17 +33,14 @@ class Domain::Fa::Job::FavsJob < Domain::Fa::Job::Base
user = T.must(user)
logger.prefix = "[#{user.url_name&.bold} / #{user.state&.bold}]"
return unless user_due_for_scan?(:favs)
return unless user_due_for_favs_scan?
max_page_number =
T.let([((user.num_favorites || 0) + 1) / 48, 100].max, Integer)
logger.info "[max page number] [#{max_page_number.to_s.bold}]"
existing_faved_ids =
T.let(
Set.new(user.fav_post_joins.active.pluck(:post_id)),
T::Set[Integer],
)
T.let(Set.new(user.user_post_favs.pluck(:post_id)), T::Set[Integer])
to_add = T.let(Set.new, T::Set[Integer])
@@ -58,13 +55,12 @@ class Domain::Fa::Job::FavsJob < Domain::Fa::Job::Base
user.scanned_favs_at = Time.zone.now
to_add += @seen_post_ids - existing_faved_ids
logger.info "[partial scan] [add #{to_add.size.to_s.bold}] [remove none]"
logger.info "[partial scan] [add #{to_add.size.to_s.bold}]"
ReduxApplicationRecord.transaction do
to_add.each_slice(1000) do |slice|
user.fav_post_joins.upsert_all(
slice.map { |id| { post_id: id, removed: false } },
unique_by: :index_domain_fa_favs_on_user_id_and_post_id,
update_only: [:removed],
Domain::UserPostFav.upsert_all(
slice.map { |id| { user_id: user.id, post_id: id } },
unique_by: %i[user_id post_id],
)
end
user.save!
@@ -78,25 +74,15 @@ class Domain::Fa::Job::FavsJob < Domain::Fa::Job::Base
@page_number += 1
end
to_remove = existing_faved_ids - @seen_post_ids
to_add = @seen_post_ids - existing_faved_ids
logger.info "[calc change favs] [add #{to_add.size.to_s.bold}] [remove #{to_remove.size.to_s.bold}]"
logger.info "[calc change favs] [add #{to_add.size.to_s.bold}]"
ReduxApplicationRecord.transaction do
if to_remove.any?
user
.fav_post_joins
.active
.where(post_id: to_remove)
.update_all(removed: true)
end
if to_add.any?
to_add.each_slice(1000) do |slice|
user.fav_post_joins.upsert_all(
slice.map { |id| { post_id: id, removed: false } },
unique_by: :index_domain_fa_favs_on_user_id_and_post_id,
update_only: [:removed],
Domain::UserPostFav.upsert_all(
slice.map { |id| { user_id: user.id, post_id: id } },
unique_by: %i[user_id post_id],
)
end
end
@@ -104,12 +90,12 @@ class Domain::Fa::Job::FavsJob < Domain::Fa::Job::Base
user.scanned_favs_at = Time.zone.now
user.save!
end
logger.info "[updated favs list] [posts: #{user.fav_post_joins.count.to_s.bold}]"
logger.info "[updated favs list] [posts: #{user.user_post_favs.count.to_s.bold}]"
end
private
sig { params(user: Domain::Fa::User).returns(T.nilable(Symbol)) }
sig { params(user: Domain::User::FaUser).returns(T.nilable(Symbol)) }
def scan_page(user:)
ret = nil
@@ -144,7 +130,7 @@ class Domain::Fa::Job::FavsJob < Domain::Fa::Job::Base
posts_to_create_hashes = []
existing_fa_id_to_post_id =
Domain::Fa::Post
Domain::Post::FaPost
.where(fa_id: submissions.map(&:id))
.pluck(:fa_id, :id)
.to_h
@@ -152,7 +138,7 @@ class Domain::Fa::Job::FavsJob < Domain::Fa::Job::Base
created_posts =
submissions.map do |submission_parser_helper|
post =
Domain::Fa::Post.find_or_initialize_by_submission_parser(
Domain::Post::FaPost.find_or_initialize_by_submission_parser(
submission_parser_helper,
first_seen_log_entry: response.log_entry,
)

View File

@@ -1,48 +0,0 @@
# typed: true
class Domain::Fa::Job::HomePageJob < Domain::Fa::Job::Base
queue_as :fa_browse_page
sig { params(args: T.untyped).void }
def initialize(*args)
super(*T.unsafe(args))
@continue_for = T.let(1024, Integer)
@total_num_new_posts_seen = T.let(0, Integer)
@total_num_posts_seen = T.let(0, Integer)
end
def perform(args)
if cf = args[:continue_for]
@continue_for = cf
end
scan_home_page
logger.info(
"finished, #{@total_num_new_posts_seen.to_s.bold} new, #{@total_num_posts_seen.to_s.bold} total posts",
)
end
private
def scan_home_page
url = "https://www.furaffinity.net/"
response = http_client.get(url)
if response.status_code != 200
fatal_error(
"non 200 response for /: #{response.status_code.to_s.underline}",
)
end
page = Domain::Fa::Parser::Page.new(response.body)
listing_page_stats =
update_and_enqueue_posts_from_listings_page(
:browse_page,
page,
enqueue_posts_pri: :high,
page_desc: "HomePage",
continue_for: @continue_for,
)
@total_num_new_posts_seen += listing_page_stats.new_seen
@total_num_posts_seen += listing_page_stats.total_seen
end
end

View File

@@ -5,11 +5,13 @@ class Domain::Fa::Job::ScanFileJob < Domain::Fa::Job::Base
sig { override.params(args: T::Hash[Symbol, T.untyped]).void }
def initialize(*args)
super(*T.unsafe(args))
@post = T.let(nil, T.nilable(Domain::Fa::Post))
@file = T.let(nil, T.nilable(Domain::PostFile))
@post = T.let(nil, T.nilable(T.any(Domain::Fa::Post, Domain::Post::FaPost)))
end
sig { override.params(args: T::Hash[Symbol, T.untyped]).void }
def perform(args)
@file = args[:post_file]
@post = args[:post]
@force_scan = !!args[:force_scan]
@@ -21,27 +23,43 @@ class Domain::Fa::Job::ScanFileJob < Domain::Fa::Job::Base
return
end
post = T.cast(@post, Domain::Fa::Post)
post =
if @post.is_a?(Domain::Fa::Post)
Domain::Post::FaPost.find_by!(fa_id: @post.fa_id)
elsif @post.is_a?(Domain::Post::FaPost)
@post
else
fatal_error("invalid post model: #{@post.class}")
raise
end
file = post.file
if file.nil?
logger.error "no file model - fa_id: #{post.fa_id}, enqueue scan"
defer_job(Domain::Fa::Job::ScanPostJob, { post: post })
return
end
logger.prefix = "[fa_id #{post.fa_id.to_s.bold} / #{post.state&.bold}]"
if post.state == "removed" && post.file_uri.nil?
if file.state == "error" && file.url_str.nil?
logger.error "removed and has no file, skipping"
return
end
if !post.scanned?
if !post.scanned_at.present?
logger.error "has not been scanned yet, doing so first"
enqueue_post_scan(post)
return
end
if post.have_file?
if file.log_entry.present?
logger.warn("already have file")
return
end
file_uri_host = post.file_uri&.host
file_url_str = T.must(file.url_str)
file_uri = Addressable::URI.parse(file_url_str)
file_uri_host = file_uri&.host
if file_uri_host
is_unresolvable_host = false
is_unresolvable_host ||= file_uri_host == "d9.facdn.net"
@@ -51,22 +69,22 @@ class Domain::Fa::Job::ScanFileJob < Domain::Fa::Job::Base
if is_unresolvable_host
logger.error("host is #{file_uri_host}, which will not resolve")
post.state = :file_error
post.state = "error"
post.scan_file_error = "unresolvable host"
post.save!
return
end
end
if post.state == "file_error" && !@force_scan
logger.warn("state == file_error, skipping")
if file.state == "error" && !@force_scan
logger.warn("state == error, skipping")
return
end
response = http_client.get(post.file_uri.to_s)
response = http_client.get(file_url_str)
if response.status_code == 404
post.state = :file_error
post.state = "error"
post.scan_file_error = "404"
post.save!
logger.error "404, aborting"
@@ -92,7 +110,8 @@ class Domain::Fa::Job::ScanFileJob < Domain::Fa::Job::Base
end
logger.debug "#{HexUtil.humansize(T.must(response.log_entry.response&.size))} / #{response.log_entry.content_type} / #{response.log_entry.response_time_ms} ms"
post.file = response.log_entry
post.save!
file.state = "ok"
file.log_entry = response.log_entry
file.save!
end
end

View File

@@ -8,21 +8,27 @@ class Domain::Fa::Job::ScanPostJob < Domain::Fa::Job::Base
T.cast(
args[:post] ||
begin
Domain::Fa::Post.find_or_initialize_by(fa_id: args[:fa_id])
Domain::Post::FaPost.find_or_initialize_by(fa_id: args[:fa_id])
end,
Domain::Fa::Post,
Domain::Post::FaPost,
)
logger.prefix =
proc { "[fa_id #{post.fa_id.to_s.bold} / #{post.state&.bold}]" }
scan_post(post) if (post.state == "ok" && !post.scanned?) || force_scan?
if (post.state == "ok" && !post.scanned_at.present?) || force_scan?
ReduxApplicationRecord.transaction { scan_post(post) }
end
if (post.state == "ok" && post.file_uri && post.file.nil?) || force_scan?
file = post.file
if (
file.present? && file.state == "ok" && file.url_str.present? &&
!file.log_entry.present?
) || force_scan?
logger.info("enqueue file job (#{self.priority})")
defer_job(
Domain::Fa::Job::ScanFileJob,
{ post: post },
{ post_file: post.file },
{ priority: self.priority },
)
end
@@ -42,12 +48,12 @@ class Domain::Fa::Job::ScanPostJob < Domain::Fa::Job::Base
private
sig { params(post: Domain::Fa::Post).void }
sig { params(post: Domain::Post::FaPost).void }
def scan_post(post)
response =
http_client.get("https://www.furaffinity.net/view/#{post.fa_id}/")
if response.status_code == 404
post.state = :scan_error
post.state = "scan_error"
post.save!
return
end
@@ -61,7 +67,7 @@ class Domain::Fa::Job::ScanPostJob < Domain::Fa::Job::Base
page = Domain::Fa::Parser::Page.new(response.body, require_logged_in: false)
if page.submission_not_found?
logger.error("post was removed")
post.state = :removed
post.state = "removed"
post.save!
return
end
@@ -72,7 +78,7 @@ class Domain::Fa::Job::ScanPostJob < Domain::Fa::Job::Base
if response.body =~
/The page you are trying to reach is currently pending deletion/
logger.error("post is pending deletion")
post.state = :removed
post.state = "removed"
post.save!
return
else
@@ -86,10 +92,13 @@ class Domain::Fa::Job::ScanPostJob < Domain::Fa::Job::Base
raise("id mismatch: #{submission.id} != #{post.fa_id}")
end
post.save!
post.last_submission_page = first_log_entry
post.title = submission.title
post.creator =
Domain::Fa::User.find_or_build_from_submission_parser(submission)
creator =
Domain::User::FaUser.find_or_build_from_submission_parser(submission)
creator.save!
post.create_primary_user_post_creation!(user: creator)
post.category = submission.category
post.description =
submission.description_html.encode(
@@ -98,7 +107,9 @@ class Domain::Fa::Job::ScanPostJob < Domain::Fa::Job::Base
undef: :replace,
)
post.keywords = submission.keywords_array || []
post.file_uri = submission.full_res_img
uri = Addressable::URI.parse(submission.full_res_img)
uri.scheme = "https" if uri.scheme.blank?
post.files.create!(url_str: uri.to_s)
post.theme = submission.theme
post.species = submission.species
post.gender = submission.gender

View File

@@ -1,18 +0,0 @@
# typed: true
module Domain::Fa::Job
class ScanPostUtils
def self.find_or_create_by_fa_ids(fa_ids, caused_by_entry: nil)
posts = Domain::Fa::Post.where(fa_id: fa_ids).to_a
missing = fa_ids - posts.map(&:fa_id)
missing.each do |fa_id|
post = Domain::Fa::Post.create!(fa_id: fa_id)
Domain::Fa::Job::ScanPostJob.perform_later(
{ post: post, caused_by_entry: caused_by_entry }
)
posts << post
end
posts = posts.index_by(&:fa_id)
fa_ids.map { |fa_id| posts[fa_id] }
end
end
end

View File

@@ -13,18 +13,16 @@ class Domain::Fa::Job::ScanUserUtils
sig do
params(
user: Domain::Fa::User,
user: Domain::User::FaUser,
response: Scraper::HttpClient::Response,
).returns(T::Boolean)
end
def self.user_disabled_or_not_found?(user, response)
if DISABLED_PAGE_PATTERNS.any? { |pattern| response.body =~ pattern }
user.state = :scan_error
user.state_detail ||= {}
user.state_detail[
"scan_error"
] = "account disabled or not found, see last_scanned_page_id"
user.state_detail["last_scanned_page_id"] = response.log_entry.id
user.state = "error"
user.page_scan_error =
"account disabled or not found, see last_user_page_id"
user.last_user_page_id = response.log_entry.id
user.save!
true
else
@@ -32,106 +30,74 @@ class Domain::Fa::Job::ScanUserUtils
end
end
module DisabledOrNotFoundResult
class Fatal < T::Struct
const :message, String
end
class Stop < T::Struct
const :message, String
end
class Ok < T::Struct
const :page, Domain::Fa::Parser::Page
end
end
sig do
params(
user: Domain::Fa::User,
user: Domain::User::FaUser,
response: Scraper::HttpClient::Response,
).returns(T::Array[T.untyped])
).returns(
T.any(
DisabledOrNotFoundResult::Fatal,
DisabledOrNotFoundResult::Stop,
DisabledOrNotFoundResult::Ok,
),
)
end
def self.check_disabled_or_not_found(user, response)
if response.status_code != 200
return [
:fatal,
{
return(
DisabledOrNotFoundResult::Fatal.new(
message:
"http #{response.status_code}, log entry #{response.log_entry.id}",
}
]
)
)
end
page = Domain::Fa::Parser::Page.new(response.body, require_logged_in: false)
return :ok, { page: page } if page.probably_user_page?
if page.probably_user_page?
return DisabledOrNotFoundResult::Ok.new(page: page)
end
if response.body =~ /has voluntarily disabled access/
user.state = :scan_error
user.state_detail = {
scan_error:
"(user scan) user has disabled account, see last_user_page_id",
last_user_page_id: response.log_entry.id,
}
user.state = "ok"
user.is_disabled = true
user.last_user_page_id = response.log_entry.id
user.save!
try_name = /User "(.+)" has voluntarily disabled/.match(response.body)
user.name ||= try_name && try_name[1] || user.url_name
user.save!
return :stop, { message: "account disabled" }
return DisabledOrNotFoundResult::Stop.new(message: "account disabled")
end
if response.body =~ /This user cannot be found./ ||
response.body =~
/The page you are trying to reach is currently pending deletion/
user.state = :scan_error
user.state_detail = {
scan_error: "(user scan) user was not found, see last_user_page_id",
last_user_page_id: response.log_entry.id,
}
user.state = "error"
user.page_scan_error = "account not found, see last_user_page_id"
user.last_user_page_id = response.log_entry.id
user.save!
user.name ||= user.url_name
user.save!
return :stop, { message: "account not found" }
return DisabledOrNotFoundResult::Stop.new(message: "account not found")
end
return [
:fatal,
{ message: "not a user page - log entry #{response.log_entry.id}" }
]
end
sig do
params(
user: Domain::Fa::User,
page: Domain::Fa::Parser::Page,
response: Scraper::HttpClient::Response,
).void
end
def self.update_user_fields_from_page(user, page, response)
user_page = page.user_page
user.name = user_page.name
user.registered_at = user_page.registered_since
user.num_pageviews = user_page.num_pageviews
user.num_submissions = user_page.num_submissions
user.num_comments_recieved = user_page.num_comments_recieved
user.num_comments_given = user_page.num_comments_given
user.num_journals = user_page.num_journals
user.num_favorites = user_page.num_favorites
user.profile_html =
user_page.profile_html.encode("UTF-8", invalid: :replace, undef: :replace)
user.log_entry_detail["last_user_page_id"] = response.log_entry.id
avatar = user.ensure_avatar!
avatar.file_uri = user_page.profile_thumb_url
if avatar.changed?
avatar.save!
Domain::Fa::Job::UserAvatarJob.perform_later(
{ user: user, caused_by_entry: response.log_entry },
return(
DisabledOrNotFoundResult::Fatal.new(
message: "not a user page - log entry #{response.log_entry.id}",
)
end
end
sig do
params(
names: T::Array[Domain::Fa::User],
caused_by_entry: T.nilable(HttpLogEntry),
).returns(T::Array[Domain::Fa::User])
end
def self.find_or_create_by_names(names, caused_by_entry: nil)
users = Domain::Fa::User.where(url_name: names.map(&:url_name)).to_a
missing =
names.reject { |name| users.any? { |u| u.url_name == name.url_name } }
missing.each do |name|
user = Domain::Fa::User.create!(url_name: name.url_name, name: name.name)
Domain::Fa::Job::UserPageJob.perform_later(
{ user: user, caused_by_entry: caused_by_entry },
)
users << user
end
users
)
end
end

View File

@@ -1,43 +1,45 @@
# typed: true
# typed: strict
class Domain::Fa::Job::UserAvatarJob < Domain::Fa::Job::Base
extend T::Sig
queue_as :static_file
sig { override.params(args: T.untyped).void }
def perform(args)
init_from_args!(args, build_user: false)
user = @user || raise("user must exist")
avatar = user.ensure_avatar!
init_from_args!(args, build_user: false, set_user: false)
avatar =
args[:avatar] ||
begin
user = @user || raise("user must exist")
user.avatar || raise("user must have an avatar")
end
user = avatar.user
logger.prefix =
proc do
"[avatar #{avatar.id.to_s.bold} / user #{user.url_name.to_s.bold}]"
end
if avatar.state == "ok" && avatar.log_entry.present? && !force_scan?
logger.info("already have avatar downloaded, skipping")
return
end
response =
http_client.get("https://a.furaffinity.net/0/#{user.url_name}.gif")
avatar.state_detail["log_entries"] ||= [avatar.log_entry&.id].compact
avatar.state_detail["log_entries"] << response.log_entry.id
avatar.log_entry = response.log_entry
avatar.last_log_entry = response.log_entry
avatar.downloaded_at = response.log_entry.created_at
case response.status_code
when 200
avatar.state = :ok
avatar.downloaded_file_at = response.log_entry.created_at
avatar.file = response.log_entry.response
avatar.state = "ok"
avatar.log_entry = response.log_entry
logger.info("downloaded avatar")
when 404
avatar.state = :file_not_found
if avatar.file_sha256.blank?
avatar.downloaded_file_at = response.log_entry.created_at
avatar.file = response.log_entry.response
logger.info("avatar 404, and no previous file")
else
logger.info("avatar 404, and previous file")
end
avatar.state = "file_404"
avatar.error_message = "http status #{response.status_code}"
else
avatar.state = :download_error
avatar.state_detail[
"download_error"
] = "http status #{response.status_code}"
avatar.state = "http_error"
avatar.error_message = "http status #{response.status_code}"
fatal_error(
"http #{response.status_code}, log entry #{response.log_entry.id}",
)

View File

@@ -31,7 +31,7 @@ class Domain::Fa::Job::UserFollowsJob < Domain::Fa::Job::Base
end
# buggy (sentinal) user
return if user.id == 117_552 && user.url_name == "click here"
return if user.url_name == "click here"
while true
break if scan_follows_page(user) == :break
@@ -40,23 +40,21 @@ class Domain::Fa::Job::UserFollowsJob < Domain::Fa::Job::Base
@page_number += 1
end
existing_followed_ids = Set.new(user.follower_joins.pluck(:followed_id))
to_remove = existing_followed_ids - @scanned_followed_ids
existing_followed_ids = Set.new(user.user_user_follows_from.pluck(:to_id))
to_add = @scanned_followed_ids - existing_followed_ids
logger.info "[calc changed follows] [add #{to_add.size.to_s.bold}] [remove #{to_remove.size.to_s.bold}]"
logger.info "[calc changed follows] [add #{to_add.size.to_s.bold}]"
ReduxApplicationRecord.transaction do
if to_remove.any?
user.follower_joins.where(followed_id: to_remove).delete_all
end
if to_add.any?
user.follower_joins.insert_all!(to_add.map { |id| { followed_id: id } })
to_add.each_slice(1000) do |slice|
Domain::UserUserFollow.insert_all!(
slice.map { |id| { from_id: user.id, to_id: id } },
)
end
user.scanned_follows_at = Time.current
user.save!
end
logger.info "[updated follows list] [users: #{user.follows.count.to_s.bold}]"
logger.info "[updated following users list] [users: #{user.followed_users.count.to_s.bold}]"
if @created_user
logger.info("user was new record, enqueue page scan job")
@@ -66,7 +64,7 @@ class Domain::Fa::Job::UserFollowsJob < Domain::Fa::Job::Base
private
sig { params(user: Domain::Fa::User).returns(T.nilable(Symbol)) }
sig { params(user: Domain::User::FaUser).returns(T.nilable(Symbol)) }
def scan_follows_page(user)
ret = nil
@@ -92,7 +90,7 @@ class Domain::Fa::Job::UserFollowsJob < Domain::Fa::Job::Base
return :break
end
if user_list.last.url_name == @last_in_user_list
if user_list.last&.url_name == @last_in_user_list
logger.info(
"page #{@page_number.to_s.bold} saw same user as last page, break",
)
@@ -102,62 +100,49 @@ class Domain::Fa::Job::UserFollowsJob < Domain::Fa::Job::Base
# the last page will have < 200 users, we know we're at the end
ret = :break if user_list.length < USERS_PER_FULL_PAGE
@last_in_user_list = user_list.last.url_name
@last_in_user_list = user_list.last&.url_name
@total_follows_seen += user_list.length
existing_url_name_to_id =
Domain::Fa::User
.where(url_name: user_list.map(&:url_name))
.pluck(:id, :url_name)
.map { |id, url_name| [url_name, id] }
.to_h
T.cast(
Domain::User::FaUser
.where(url_name: user_list.map(&:url_name))
.pluck(:id, :url_name)
.map { |id, url_name| [url_name, id] }
.to_h,
T::Hash[String, Integer],
)
users_to_create_hashes =
users_to_create =
user_list
.reject { |user| existing_url_name_to_id[user.url_name] }
.map do |user|
{
url_name: user.url_name,
name: user.name,
state_detail: {
"first_seen_entry" => response.log_entry.id,
},
}
Domain::User::FaUser.new(url_name: user.url_name, name: user.name)
end
created_user_ids =
Domain::Fa::User
.upsert_all(
users_to_create_hashes,
unique_by: :url_name,
update_only: :url_name,
returning: %i[id url_name],
)
.map { |row| row["id"] } unless users_to_create_hashes.empty?
users_to_create.map do |user|
user.save!
T.cast(user.id, Integer)
end
logger.info [
"[page #{@page_number.to_s.bold}]",
"[users: #{user_list.length.to_s.bold}]",
"[created: #{users_to_create_hashes.size.to_s.bold}]",
"[created: #{users_to_create.size.to_s.bold}]",
].join(" ")
enqueue_new_user_pagescan_jobs(users_to_create_hashes)
followed_user_ids =
(created_user_ids || []) + existing_url_name_to_id.values
enqueue_new_user_pagescan_jobs(users_to_create)
followed_user_ids = created_user_ids + existing_url_name_to_id.values
followed_user_ids.each { |user_id| @scanned_followed_ids.add(user_id) }
ret
end
sig { params(user_hashes: T::Array[T::Hash[Symbol, T.untyped]]).void }
def enqueue_new_user_pagescan_jobs(user_hashes)
sig { params(users: T::Array[Domain::User::FaUser]).void }
def enqueue_new_user_pagescan_jobs(users)
bulk_enqueue_jobs do
user_hashes.each do |user_hash|
defer_job(
Domain::Fa::Job::UserPageJob,
{ url_name: user_hash[:url_name] },
)
end
users.each { |user| defer_job(Domain::Fa::Job::UserPageJob, { user: }) }
end
end
end

View File

@@ -60,7 +60,7 @@ class Domain::Fa::Job::UserGalleryJob < Domain::Fa::Job::Base
break if scan_folder(user, folder) == :break
end
user.log_entry_detail["last_gallery_page_id"] = first_log_entry&.id
user.last_gallery_page_id = first_log_entry&.id
user.scanned_gallery_at = Time.current
user.save!
end
@@ -68,7 +68,9 @@ class Domain::Fa::Job::UserGalleryJob < Domain::Fa::Job::Base
private
sig do
params(user: Domain::Fa::User, folder: Folder).returns(T.nilable(Symbol))
params(user: Domain::User::FaUser, folder: Folder).returns(
T.nilable(Symbol),
)
end
def scan_folder(user, folder)
page_number = 1

View File

@@ -1,8 +1,9 @@
# typed: true
# typed: strict
module Domain::Fa::Job
class UserIncrementalJob < Base
queue_as :fa_user_page
sig { override.params(args: T::Hash[Symbol, T.untyped]).void }
def perform(args)
init_from_args!(args)
user = T.must(@user)
@@ -22,7 +23,7 @@ module Domain::Fa::Job
if !user.due_for_incremental_scan? && !@force_scan
logger.warn(
"scanned #{user.time_ago_for_incremental_scan.bold}, skipping",
"scanned #{time_ago_in_words(user.scanned_incremental_at).bold}, skipping",
)
return
end
@@ -30,32 +31,29 @@ module Domain::Fa::Job
response =
http_client.get("https://www.furaffinity.net/user/#{user.url_name}/")
ret, opts =
ret =
Domain::Fa::Job::ScanUserUtils.check_disabled_or_not_found(
user,
response,
)
case ret
when :ok
page = opts[:page]
when :stop
logger.error(opts[:message])
when ScanUserUtils::DisabledOrNotFoundResult::Ok
page = ret.page
when ScanUserUtils::DisabledOrNotFoundResult::Stop
logger.error(ret.message)
return
when ScanUserUtils::DisabledOrNotFoundResult::Fatal
fatal_error(ret.message)
return
when :fatal
fatal_error(opts[:message])
end
Domain::Fa::Job::ScanUserUtils.update_user_fields_from_page(
user,
page,
response,
)
update_user_fields_from_page(user, page, response)
check_favs(user, page.user_page.recent_fav_fa_ids)
check_watchers(user, page.user_page.recent_watchers)
check_watching(user, page.user_page.recent_watching)
user.scanned_page_at = Time.current
user.scanned_incremental_at = Time.current
user.save!
logger.info "completed page scan"
@@ -68,38 +66,44 @@ module Domain::Fa::Job
end
end
sig do
params(
user: Domain::User::FaUser,
recent_fav_fa_ids: T::Array[Integer],
).void
end
def check_favs(user, recent_fav_fa_ids)
recent_fav_posts =
Domain::Fa::Job::ScanPostUtils.find_or_create_by_fa_ids(
recent_fav_fa_ids,
caused_by_entry: causing_log_entry,
)
recent_fav_posts = find_or_create_posts_by_fa_ids(recent_fav_fa_ids)
recent_fav_post_ids = recent_fav_posts.map(&:id)
existing_fav_post_ids =
user.fav_post_joins.where(post_id: recent_fav_post_ids).pluck(:post_id)
user.user_post_favs.where(post_id: recent_fav_post_ids).pluck(:post_id)
missing_fav_post_ids = recent_fav_post_ids - existing_fav_post_ids
if missing_fav_post_ids.empty?
logger.info("no new favs for user")
user.scanned_favs_at = Time.now
user.scanned_favs_at = Time.current
return
end
num_missing = missing_fav_post_ids.size
if num_missing >= 0
logger.info("add #{num_missing.to_s.bold} new favs for user")
user.fav_post_joins.insert_all!(
missing_fav_post_ids.map { |post_id| { post_id: post_id } },
if missing_fav_post_ids.any?
logger.info(
"add #{missing_fav_post_ids.size.to_s.bold} new favs for user",
)
Domain::UserPostFav.insert_all!(
missing_fav_post_ids.map do |post_id|
{ user_id: user.id, post_id: post_id }
end,
)
end
if missing_fav_post_ids.include? recent_fav_post_ids.last
logger.info(
"last fav is new (#{num_missing.to_s.bold} missing), enqueue full favs scan",
"last fav is new (#{missing_fav_post_ids.size.to_s.bold} missing), enqueue full favs scan",
)
defer_job(Domain::Fa::Job::FavsJob, { user: user })
else
user.scanned_favs_at = Time.now
user.scanned_favs_at = Time.current
end
end
@@ -107,53 +111,82 @@ module Domain::Fa::Job
# nor enqueue a full follows scan job
# TODO - may be useful to have a separate 'scan full followed by' job
# to handle users who are watched by a large number of others
def check_watchers(user, recent_watchers)
recent_models =
Domain::Fa::Job::ScanUserUtils.find_or_create_by_names(recent_watchers)
existing =
user
.followed_joins
.where(follower_id: recent_models.map(&:id))
.pluck(:follower_id)
missing = recent_models.reject { |w| existing.include? w.id }
if missing.empty?
sig do
params(
user: Domain::User::FaUser,
recent_watched_by:
T::Array[Domain::Fa::Parser::UserPageHelper::RecentUser],
).void
end
def check_watchers(user, recent_watched_by)
recent_watched_by_ids =
find_or_create_users_by_recent_users(recent_watched_by).map do |m|
T.must(m.id)
end
known_watcher_ids =
T.cast(
user
.user_user_follows_to
.where(from_id: recent_watched_by_ids)
.pluck(:from_id),
T::Array[Integer],
)
missing_watcher_ids = recent_watched_by_ids - known_watcher_ids
if missing_watcher_ids.empty?
logger.info("no new watchers")
return
end
num_missing = missing.size
user.followed_joins.insert_all!(
missing.map { |watcher| { follower_id: watcher.id } },
num_missing = missing_watcher_ids.size
Domain::UserUserFollow.insert_all!(
missing_watcher_ids.map do |watcher_id|
{ from_id: watcher_id, to_id: user.id }
end,
)
logger.info("added #{num_missing.to_s.bold} new watchers")
end
def check_watching(user, recent_watching)
recent_models =
Domain::Fa::Job::ScanUserUtils.find_or_create_by_names(recent_watching)
existing =
user
.follower_joins
.where(followed_id: recent_models.map(&:id))
.pluck(:followed_id)
missing = recent_models.reject { |w| existing.include? w.id }
if missing.empty?
sig do
params(
user: Domain::User::FaUser,
recent_watched:
T::Array[Domain::Fa::Parser::UserPageHelper::RecentUser],
).void
end
def check_watching(user, recent_watched)
recent_watched_users =
find_or_create_users_by_recent_users(recent_watched)
raise("invariant") unless recent_watched_users.size == recent_watched.size
recent_watched_user_ids = recent_watched_users.map { |m| T.must(m.id) }
known_watched_users =
user.followed_users.where(id: recent_watched_user_ids).to_a
missing_watched_users = recent_watched_users - known_watched_users
if missing_watched_users.empty?
logger.info("no new users watched")
user.scanned_follows_at = Time.now
user.scanned_follows_at = Time.current
return
end
num_missing = missing.size
user.follower_joins.insert_all!(
missing.map { |watcher| { followed_id: watcher.id } },
num_missing = missing_watched_users.size
Domain::UserUserFollow.insert_all!(
missing_watched_users.map do |watched_user|
{ from_id: user.id, to_id: watched_user.id }
end,
)
logger.info("added #{num_missing.to_s.bold} new users watched")
if missing.any? { |w| w.url_name == recent_watching.last.url_name }
last_watched_user = recent_watched_users.last
if last_watched_user && missing_watched_users.include?(last_watched_user)
logger.info("last user watched is new, enqueue full follows scan")
defer_job(Domain::Fa::Job::UserFollowsJob, { user: user })
defer_job(Domain::Fa::Job::UserFollowsJob, { user: })
else
user.scanned_follows_at = Time.now
logger.info(
"last user watched was known, no need for full follows scan",
)
user.scanned_follows_at = Time.current
end
end
end

View File

@@ -1,7 +1,9 @@
# typed: true
# typed: strict
class Domain::Fa::Job::UserPageJob < Domain::Fa::Job::Base
ScanUserUtils = Domain::Fa::Job::ScanUserUtils
queue_as :fa_user_page
sig { override.params(args: T::Hash[Symbol, T.untyped]).void }
def perform(args)
init_from_args!(args)
user = T.must(@user)
@@ -19,24 +21,20 @@ class Domain::Fa::Job::UserPageJob < Domain::Fa::Job::Base
response =
http_client.get("https://www.furaffinity.net/user/#{user.url_name}/")
ret, opts =
ret =
Domain::Fa::Job::ScanUserUtils.check_disabled_or_not_found(user, response)
case ret
when :ok
page = opts[:page]
when :stop
logger.error(opts[:message])
when ScanUserUtils::DisabledOrNotFoundResult::Ok
page = ret.page
when ScanUserUtils::DisabledOrNotFoundResult::Stop
logger.error(ret.message)
return
when ScanUserUtils::DisabledOrNotFoundResult::Fatal
fatal_error(ret.message)
return
when :fatal
fatal_error(opts[:message])
end
Domain::Fa::Job::ScanUserUtils.update_user_fields_from_page(
user,
page,
response,
)
user.scanned_page_at = Time.current
update_user_fields_from_page(user, page, response)
user.save!
logger.info "completed page scan"
ensure

View File

@@ -186,6 +186,7 @@ class Domain::Fa::Parser::Page < Domain::Fa::Parser::Base
end
end
sig { returns(Domain::Fa::Parser::UserPageHelper) }
def user_page
@user_page ||= Domain::Fa::Parser::UserPageHelper.new(@page, @page_version)
end
@@ -221,6 +222,7 @@ class Domain::Fa::Parser::Page < Domain::Fa::Parser::Base
)
end
sig { returns(T::Array[Domain::Fa::Parser::UserListParserHelper::User]) }
def user_list
@user_list ||= Domain::Fa::Parser::UserListParserHelper.user_list(@page)
end

View File

@@ -1,4 +1,4 @@
# typed: false
# typed: true
class Domain::Fa::Parser::UserPageHelper < Domain::Fa::Parser::Base
VERSION_0 = Domain::Fa::Parser::Page::VERSION_0
VERSION_1 = Domain::Fa::Parser::Page::VERSION_1
@@ -158,6 +158,7 @@ class Domain::Fa::Parser::UserPageHelper < Domain::Fa::Parser::Base
@num_favorites ||= stat_value(:nfav, 2)
end
sig { returns(T::Array[Integer]) }
def recent_fav_fa_ids
@recent_favs ||=
case @page_version
@@ -168,7 +169,7 @@ class Domain::Fa::Parser::UserPageHelper < Domain::Fa::Parser::Base
&.css("figure a")
&.map do |elem|
href = elem["href"]
%r{/view/(\d+)}.match(href)[1]&.to_i ||
%r{/view/(\d+)}.match(href)&.[](1)&.to_i ||
raise("invalid url: #{href}")
end || []
else
@@ -176,23 +177,30 @@ class Domain::Fa::Parser::UserPageHelper < Domain::Fa::Parser::Base
end
end
RecentUser =
Struct.new(:name, :url_name) do
def to_a
[name, url_name]
end
end
class RecentUser < T::Struct
extend T::Sig
const :name, String
const :url_name, String
sig { returns(T::Array[String]) }
def to_a
[name, url_name]
end
end
sig { returns(T::Array[RecentUser]) }
def recent_watchers
@recent_watchers ||= recent_users_for_section("Recent Watchers")
end
sig { returns(T::Array[RecentUser]) }
def recent_watching
@recent_watching ||= recent_users_for_section("Recently Watched")
end
private
sig { params(section_name: String).returns(T::Array[RecentUser]) }
def recent_users_for_section(section_name)
case @page_version
when VERSION_2
@@ -211,7 +219,7 @@ class Domain::Fa::Parser::UserPageHelper < Domain::Fa::Parser::Base
url_name =
%r{/user/(.+)/}.match(href)&.[](1) || raise("invalid url: #{href}")
name = link_elem.css(".artist_name").first.text.strip
RecentUser.new(name, url_name)
RecentUser.new(name:, url_name:)
end
else
unimplemented_version!
@@ -219,12 +227,11 @@ class Domain::Fa::Parser::UserPageHelper < Domain::Fa::Parser::Base
end
def stat_value(legacy_name, redux_idx)
legacy_map =
if false # old mode?
{ pvs: 2, subs: 5, crec: 8, cgiv: 11, njr: 14, nfav: 17 }
else
{ pvs: 2, subs: 6, crec: 10, cgiv: 14, njr: 18, nfav: 22 }
end
legacy_map = # if false # old mode?
# { pvs: 2, subs: 5, crec: 8, cgiv: 11, njr: 14, nfav: 17 }
# else
{ pvs: 2, subs: 6, crec: 10, cgiv: 14, njr: 18, nfav: 22 }
# end
value =
case @page_version

View File

@@ -183,7 +183,7 @@ class Domain::MigrateToDomain
ReduxApplicationRecord.transaction do
models =
migrate_batch(Domain::User::InkbunnyUser, batch) do |old_user|
initialize_inkbunny_user(old_user)
initialize_inkbunny_user_from(old_user)
end
migrate_batch(Domain::UserAvatar, models.filter(&:avatar)) do |user|
@@ -281,7 +281,7 @@ class Domain::MigrateToDomain
sig do
params(old_user: Domain::Inkbunny::User).returns(Domain::User::InkbunnyUser)
end
def initialize_inkbunny_user(old_user)
def initialize_inkbunny_user_from(old_user)
new_user = Domain::User::InkbunnyUser.new
new_user.ib_id = old_user.ib_user_id
new_user.name = old_user.name
@@ -291,12 +291,22 @@ class Domain::MigrateToDomain
new_user.shallow_update_log_entry_id = old_user.shallow_update_log_entry_id
new_user.created_at = old_user.created_at
if old_avatar_log_entry = old_user.avatar_log_entry
if avatar_url_str = old_user.avatar_url_str
new_avatar = Domain::UserAvatar.new
new_avatar.log_entry_id = old_avatar_log_entry.id
new_avatar.url_str = old_avatar_log_entry.uri_str
new_avatar.log_entry_id = old_user.avatar_file_log_entry_id
new_avatar.url_str = avatar_url_str
new_avatar.downloaded_at = old_user.avatar_downloaded_at
new_avatar.state =
old_avatar_log_entry.status_code == 200 ? "ok" : "error"
case old_user.avatar_state
when "ok"
old_user.avatar_file_log_entry_id.present? ? "ok" : "pending"
when "not_found"
new_avatar.error_message = old_user.avatar_state
"file_404"
else
new_avatar.error_message = old_user.avatar_state
"http_error"
end
new_user.avatar = new_avatar
end
@@ -411,6 +421,7 @@ class Domain::MigrateToDomain
sig { params(old_user: Domain::Fa::User).returns(Domain::User::FaUser) }
def initialize_fa_user_from(old_user)
new_user = Domain::User::FaUser.new
new_user.state = old_user.state
new_user.url_name = old_user.url_name
new_user.name = old_user.name
new_user.full_name = old_user.full_name
@@ -425,6 +436,9 @@ class Domain::MigrateToDomain
new_user.num_favorites = old_user.num_favorites
new_user.scanned_gallery_at = old_user.scanned_gallery_at
new_user.scanned_page_at = old_user.scanned_page_at
new_user.scanned_follows_at = old_user.scanned_follows_at
new_user.scanned_favs_at = old_user.scanned_favs_at
new_user.scanned_incremental_at = old_user.scanned_incremental_at
new_user.registered_at = old_user.registered_at
if old_avatar = old_user.avatar
@@ -434,10 +448,13 @@ class Domain::MigrateToDomain
new_avatar.state =
case old_avatar.state
when "ok"
"ok"
old_avatar.log_entry_id.present? ? "ok" : "pending"
when "file_not_found"
new_avatar.error_message = old_avatar.state
"file_404"
else
new_avatar.error_message = old_avatar.state
"error"
"http_error"
end
new_user.avatar = new_avatar
end
@@ -526,7 +543,7 @@ class Domain::MigrateToDomain
if new_user_ids.size != old_user.follows.count
logger.error(
"followers mismatch for #{user.name}: (#{user.following_users.count} != #{old_user.follows.count})",
"followers mismatch for #{user.name}: (#{user.followed_users.count} != #{old_user.follows.count})",
)
else
user.migrated_followed_users_at = Time.current

View File

@@ -44,7 +44,8 @@ class Domain::Post < ReduxApplicationRecord
has_one :creator,
class_name: klass.name,
through: :primary_user_post_creation,
source: :user
source: :user,
inverse_of: :posts
end
has_many :user_post_creations,

View File

@@ -42,4 +42,26 @@ class Domain::Post::FaPost < Domain::Post
def to_param
"fa/#{self.fa_id}" if self.fa_id.present?
end
sig do
params(
submission: Domain::Fa::Parser::ListedSubmissionParserHelper,
first_seen_log_entry: T.nilable(HttpLogEntry),
).returns(Domain::Post::FaPost)
end
def self.find_or_initialize_by_submission_parser(
submission,
first_seen_log_entry: nil
)
creator =
Domain::User::FaUser.find_or_create_by!(
{ url_name: submission.artist_url_name },
) { |user| user.name = submission.artist }
Domain::Post::FaPost.find_or_initialize_by(fa_id: submission.id) do |post|
post.creator = creator
post.title = submission.title
post.first_seen_entry = first_seen_log_entry
end
end
end

View File

@@ -45,7 +45,7 @@ class Domain::User < ReduxApplicationRecord
has_many :posts, through: :user_post_creations, source: :post
has_many :faved_posts, through: :user_post_favs, source: :post
has_many :following_users, through: :user_user_follows_from, source: :to
has_many :followed_users, through: :user_user_follows_from, source: :to
has_many :followed_by_users, through: :user_user_follows_to, source: :from
sig { params(klass: T.class_of(Domain::Post)).void }
@@ -66,7 +66,7 @@ class Domain::User < ReduxApplicationRecord
sig { params(klass: T.class_of(Domain::User)).void }
def self.has_followed_users!(klass)
has_many :following_users,
has_many :followed_users,
through: :user_user_follows_from,
source: :to,
class_name: klass.name

View File

@@ -1,5 +1,6 @@
# typed: strict
class Domain::User::FaUser < Domain::User
attr_json :state, :string
attr_json :name, :string
attr_json :url_name, :string
attr_json :full_name, :string
@@ -14,19 +15,91 @@ class Domain::User::FaUser < Domain::User
attr_json :num_favorites, :integer
attr_json :scanned_gallery_at, :datetime
attr_json :scanned_page_at, :datetime
attr_json :scanned_follows_at, :datetime
attr_json :scanned_favs_at, :datetime
attr_json :scanned_incremental_at, :datetime
attr_json :registered_at, :datetime
attr_json :migrated_followed_users_at, :datetime
validates :name, presence: true
validates :url_name, presence: true
attr_json :last_user_page_id, :integer
attr_json :last_gallery_page_id, :integer
attr_json :page_scan_error, :string
attr_json :is_disabled, :boolean
has_followed_users! Domain::User::FaUser
has_followed_by_users! Domain::User::FaUser
has_created_posts! Domain::Post::FaPost
has_faved_posts! Domain::Post::FaPost
belongs_to :last_user_page_log_entry,
foreign_key: :last_user_page_id,
class_name: "::HttpLogEntry",
optional: true
belongs_to :last_gallery_page_log_entry,
foreign_key: :last_gallery_page_id,
class_name: "::HttpLogEntry",
optional: true
validates :name, presence: true
validates :url_name, presence: true
validates :state, presence: true, inclusion: { in: %w[ok error] }
after_initialize { self.state ||= "ok" if new_record? }
sig { override.returns(T.nilable(String)) }
def to_param
"fa/#{url_name}" if url_name.present?
end
sig { returns(T::Boolean) }
def due_for_favs_scan?
due_for_scan?(scanned_favs_at, 1.month.ago)
end
sig { returns(T::Boolean) }
def due_for_follows_scan?
due_for_scan?(scanned_follows_at, 1.month.ago)
end
sig { returns(T::Boolean) }
def due_for_page_scan?
due_for_scan?(scanned_page_at, 1.month.ago)
end
sig { returns(T::Boolean) }
def due_for_gallery_scan?
due_for_scan?(scanned_gallery_at, 1.year.ago)
end
sig { returns(T::Boolean) }
def due_for_incremental_scan?
due_for_scan?(scanned_incremental_at, 1.month.ago)
end
sig do
params(
submission_parser:
T.any(
Domain::Fa::Parser::ListedSubmissionParserHelper,
Domain::Fa::Parser::SubmissionParserHelper,
),
).returns(Domain::User::FaUser)
end
def self.find_or_build_from_submission_parser(submission_parser)
find_or_initialize_by(url_name: submission_parser.artist_url_name) do |user|
user.name = submission_parser.artist
end
end
private
sig do
params(
at: T.nilable(ActiveSupport::TimeWithZone),
ago: ActiveSupport::TimeWithZone,
).returns(T::Boolean)
end
def due_for_scan?(at, ago)
at ? at < ago : true
end
end

View File

@@ -8,9 +8,17 @@ class Domain::UserAvatar < ReduxApplicationRecord
attr_json :state, :string
attr_json :downloaded_at, :datetime
attr_json :error_message, :string
attr_json :last_log_entry_id, :integer
attr_json :log_entry_id, :integer
belongs_to :last_log_entry, class_name: "::HttpLogEntry", optional: true
belongs_to :log_entry, class_name: "::HttpLogEntry", optional: true
validates :state, presence: true, inclusion: { in: %w[ok error] }
validates :state,
presence: true,
inclusion: {
in: %w[pending ok file_404 http_error],
}
after_initialize { self.state ||= "pending" if new_record? }
end

View File

@@ -1,27 +0,0 @@
# typed: true
# DO NOT EDIT MANUALLY
# This is an autogenerated file for dynamic methods in `Domain::Fa::Job::HomePageJob`.
# Please instead update this file by running `bin/tapioca dsl Domain::Fa::Job::HomePageJob`.
class Domain::Fa::Job::HomePageJob
sig { returns(ColorLogger) }
def logger; end
class << self
sig { returns(ColorLogger) }
def logger; end
sig do
params(
args: T.untyped,
block: T.nilable(T.proc.params(job: Domain::Fa::Job::HomePageJob).void)
).returns(T.any(Domain::Fa::Job::HomePageJob, FalseClass))
end
def perform_later(args, &block); end
sig { params(args: T.untyped).returns(T.untyped) }
def perform_now(args); end
end
end

View File

@@ -21,7 +21,7 @@ class Domain::Fa::Job::UserAvatarJob
end
def perform_later(args, &block); end
sig { params(args: T.untyped).returns(T.untyped) }
sig { params(args: T.untyped).void }
def perform_now(args); end
end
end

View File

@@ -469,18 +469,18 @@ class Domain::User
def followed_by_users=(value); end
sig { returns(T::Array[T.untyped]) }
def following_user_ids; end
def followed_user_ids; end
sig { params(ids: T::Array[T.untyped]).returns(T::Array[T.untyped]) }
def following_user_ids=(ids); end
def followed_user_ids=(ids); end
# This method is created by ActiveRecord on the `Domain::User` class because it declared `has_many :following_users, through: :user_user_follows_from`.
# This method is created by ActiveRecord on the `Domain::User` class because it declared `has_many :followed_users, through: :user_user_follows_from`.
# 🔗 [Rails guide for `has_many_through` association](https://guides.rubyonrails.org/association_basics.html#the-has-many-through-association)
sig { returns(::Domain::User::PrivateCollectionProxy) }
def following_users; end
def followed_users; end
sig { params(value: T::Enumerable[::Domain::User]).void }
def following_users=(value); end
def followed_users=(value); end
sig { returns(T::Array[T.untyped]) }
def post_ids; end

View File

@@ -501,18 +501,18 @@ class Domain::User::E621User
def followed_by_users=(value); end
sig { returns(T::Array[T.untyped]) }
def following_user_ids; end
def followed_user_ids; end
sig { params(ids: T::Array[T.untyped]).returns(T::Array[T.untyped]) }
def following_user_ids=(ids); end
def followed_user_ids=(ids); end
# This method is created by ActiveRecord on the `Domain::User` class because it declared `has_many :following_users, through: :user_user_follows_from`.
# This method is created by ActiveRecord on the `Domain::User` class because it declared `has_many :followed_users, through: :user_user_follows_from`.
# 🔗 [Rails guide for `has_many_through` association](https://guides.rubyonrails.org/association_basics.html#the-has-many-through-association)
sig { returns(::Domain::User::PrivateCollectionProxy) }
def following_users; end
def followed_users; end
sig { params(value: T::Enumerable[::Domain::User]).void }
def following_users=(value); end
def followed_users=(value); end
sig { returns(T::Array[T.untyped]) }
def post_ids; end

View File

@@ -466,12 +466,30 @@ class Domain::User::FaUser
sig { params(args: T.untyped, blk: T.untyped).returns(::Domain::UserAvatar) }
def build_avatar(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(::HttpLogEntry) }
def build_last_gallery_page_log_entry(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(::HttpLogEntry) }
def build_last_user_page_log_entry(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(::Domain::UserAvatar) }
def create_avatar(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(::Domain::UserAvatar) }
def create_avatar!(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(::HttpLogEntry) }
def create_last_gallery_page_log_entry(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(::HttpLogEntry) }
def create_last_gallery_page_log_entry!(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(::HttpLogEntry) }
def create_last_user_page_log_entry(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(::HttpLogEntry) }
def create_last_user_page_log_entry!(*args, &blk); end
sig { returns(T::Array[T.untyped]) }
def faved_post_ids; end
@@ -501,18 +519,42 @@ class Domain::User::FaUser
def followed_by_users=(value); end
sig { returns(T::Array[T.untyped]) }
def following_user_ids; end
def followed_user_ids; end
sig { params(ids: T::Array[T.untyped]).returns(T::Array[T.untyped]) }
def following_user_ids=(ids); end
def followed_user_ids=(ids); end
# This method is created by ActiveRecord on the `Domain::User::FaUser` class because it declared `has_many :following_users, through: :user_user_follows_from`.
# This method is created by ActiveRecord on the `Domain::User::FaUser` class because it declared `has_many :followed_users, through: :user_user_follows_from`.
# 🔗 [Rails guide for `has_many_through` association](https://guides.rubyonrails.org/association_basics.html#the-has-many-through-association)
sig { returns(::Domain::User::FaUser::PrivateCollectionProxy) }
def following_users; end
def followed_users; end
sig { params(value: T::Enumerable[::Domain::User::FaUser]).void }
def following_users=(value); end
def followed_users=(value); end
sig { returns(T.nilable(::HttpLogEntry)) }
def last_gallery_page_log_entry; end
sig { params(value: T.nilable(::HttpLogEntry)).void }
def last_gallery_page_log_entry=(value); end
sig { returns(T::Boolean) }
def last_gallery_page_log_entry_changed?; end
sig { returns(T::Boolean) }
def last_gallery_page_log_entry_previously_changed?; end
sig { returns(T.nilable(::HttpLogEntry)) }
def last_user_page_log_entry; end
sig { params(value: T.nilable(::HttpLogEntry)).void }
def last_user_page_log_entry=(value); end
sig { returns(T::Boolean) }
def last_user_page_log_entry_changed?; end
sig { returns(T::Boolean) }
def last_user_page_log_entry_previously_changed?; end
sig { returns(T::Array[T.untyped]) }
def post_ids; end
@@ -531,9 +573,21 @@ class Domain::User::FaUser
sig { returns(T.nilable(::Domain::UserAvatar)) }
def reload_avatar; end
sig { returns(T.nilable(::HttpLogEntry)) }
def reload_last_gallery_page_log_entry; end
sig { returns(T.nilable(::HttpLogEntry)) }
def reload_last_user_page_log_entry; end
sig { void }
def reset_avatar; end
sig { void }
def reset_last_gallery_page_log_entry; end
sig { void }
def reset_last_user_page_log_entry; end
sig { returns(T::Array[T.untyped]) }
def user_post_creation_ids; end
@@ -971,6 +1025,51 @@ class Domain::User::FaUser
sig { void }
def id_will_change!; end
sig { returns(T.nilable(T::Boolean)) }
def is_disabled; end
sig { params(value: T.nilable(T::Boolean)).returns(T.nilable(T::Boolean)) }
def is_disabled=(value); end
sig { returns(T::Boolean) }
def is_disabled?; end
sig { returns(T.nilable(T::Boolean)) }
def is_disabled_before_last_save; end
sig { returns(T.untyped) }
def is_disabled_before_type_cast; end
sig { returns(T::Boolean) }
def is_disabled_came_from_user?; end
sig { returns(T.nilable([T.nilable(T::Boolean), T.nilable(T::Boolean)])) }
def is_disabled_change; end
sig { returns(T.nilable([T.nilable(T::Boolean), T.nilable(T::Boolean)])) }
def is_disabled_change_to_be_saved; end
sig { params(from: T.nilable(T::Boolean), to: T.nilable(T::Boolean)).returns(T::Boolean) }
def is_disabled_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(T::Boolean)) }
def is_disabled_in_database; end
sig { returns(T.nilable([T.nilable(T::Boolean), T.nilable(T::Boolean)])) }
def is_disabled_previous_change; end
sig { params(from: T.nilable(T::Boolean), to: T.nilable(T::Boolean)).returns(T::Boolean) }
def is_disabled_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(T::Boolean)) }
def is_disabled_previously_was; end
sig { returns(T.nilable(T::Boolean)) }
def is_disabled_was; end
sig { void }
def is_disabled_will_change!; end
sig { returns(T.untyped) }
def json_attributes; end
@@ -1016,6 +1115,96 @@ class Domain::User::FaUser
sig { void }
def json_attributes_will_change!; end
sig { returns(T.nilable(::Integer)) }
def last_gallery_page_id; end
sig { params(value: T.nilable(::Integer)).returns(T.nilable(::Integer)) }
def last_gallery_page_id=(value); end
sig { returns(T::Boolean) }
def last_gallery_page_id?; end
sig { returns(T.nilable(::Integer)) }
def last_gallery_page_id_before_last_save; end
sig { returns(T.untyped) }
def last_gallery_page_id_before_type_cast; end
sig { returns(T::Boolean) }
def last_gallery_page_id_came_from_user?; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def last_gallery_page_id_change; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def last_gallery_page_id_change_to_be_saved; end
sig { params(from: T.nilable(::Integer), to: T.nilable(::Integer)).returns(T::Boolean) }
def last_gallery_page_id_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::Integer)) }
def last_gallery_page_id_in_database; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def last_gallery_page_id_previous_change; end
sig { params(from: T.nilable(::Integer), to: T.nilable(::Integer)).returns(T::Boolean) }
def last_gallery_page_id_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::Integer)) }
def last_gallery_page_id_previously_was; end
sig { returns(T.nilable(::Integer)) }
def last_gallery_page_id_was; end
sig { void }
def last_gallery_page_id_will_change!; end
sig { returns(T.nilable(::Integer)) }
def last_user_page_id; end
sig { params(value: T.nilable(::Integer)).returns(T.nilable(::Integer)) }
def last_user_page_id=(value); end
sig { returns(T::Boolean) }
def last_user_page_id?; end
sig { returns(T.nilable(::Integer)) }
def last_user_page_id_before_last_save; end
sig { returns(T.untyped) }
def last_user_page_id_before_type_cast; end
sig { returns(T::Boolean) }
def last_user_page_id_came_from_user?; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def last_user_page_id_change; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def last_user_page_id_change_to_be_saved; end
sig { params(from: T.nilable(::Integer), to: T.nilable(::Integer)).returns(T::Boolean) }
def last_user_page_id_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::Integer)) }
def last_user_page_id_in_database; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def last_user_page_id_previous_change; end
sig { params(from: T.nilable(::Integer), to: T.nilable(::Integer)).returns(T::Boolean) }
def last_user_page_id_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::Integer)) }
def last_user_page_id_previously_was; end
sig { returns(T.nilable(::Integer)) }
def last_user_page_id_was; end
sig { void }
def last_user_page_id_will_change!; end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def migrated_followed_users_at; end
@@ -1486,6 +1675,51 @@ class Domain::User::FaUser
sig { void }
def num_submissions_will_change!; end
sig { returns(T.nilable(::String)) }
def page_scan_error; end
sig { params(value: T.nilable(::String)).returns(T.nilable(::String)) }
def page_scan_error=(value); end
sig { returns(T::Boolean) }
def page_scan_error?; end
sig { returns(T.nilable(::String)) }
def page_scan_error_before_last_save; end
sig { returns(T.untyped) }
def page_scan_error_before_type_cast; end
sig { returns(T::Boolean) }
def page_scan_error_came_from_user?; end
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
def page_scan_error_change; end
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
def page_scan_error_change_to_be_saved; end
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
def page_scan_error_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::String)) }
def page_scan_error_in_database; end
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
def page_scan_error_previous_change; end
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
def page_scan_error_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::String)) }
def page_scan_error_previously_was; end
sig { returns(T.nilable(::String)) }
def page_scan_error_was; end
sig { void }
def page_scan_error_will_change!; end
sig { returns(T.nilable(::String)) }
def profile_html; end
@@ -1601,9 +1835,18 @@ class Domain::User::FaUser
sig { void }
def restore_id_value!; end
sig { void }
def restore_is_disabled!; end
sig { void }
def restore_json_attributes!; end
sig { void }
def restore_last_gallery_page_id!; end
sig { void }
def restore_last_user_page_id!; end
sig { void }
def restore_migrated_followed_users_at!; end
@@ -1634,18 +1877,33 @@ class Domain::User::FaUser
sig { void }
def restore_num_submissions!; end
sig { void }
def restore_page_scan_error!; end
sig { void }
def restore_profile_html!; end
sig { void }
def restore_registered_at!; end
sig { void }
def restore_scanned_favs_at!; end
sig { void }
def restore_scanned_follows_at!; end
sig { void }
def restore_scanned_gallery_at!; end
sig { void }
def restore_scanned_incremental_at!; end
sig { void }
def restore_scanned_page_at!; end
sig { void }
def restore_state!; end
sig { void }
def restore_type!; end
@@ -1685,12 +1943,30 @@ class Domain::User::FaUser
sig { returns(T::Boolean) }
def saved_change_to_id_value?; end
sig { returns(T.nilable([T.nilable(T::Boolean), T.nilable(T::Boolean)])) }
def saved_change_to_is_disabled; end
sig { returns(T::Boolean) }
def saved_change_to_is_disabled?; end
sig { returns(T.nilable([T.untyped, T.untyped])) }
def saved_change_to_json_attributes; end
sig { returns(T::Boolean) }
def saved_change_to_json_attributes?; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def saved_change_to_last_gallery_page_id; end
sig { returns(T::Boolean) }
def saved_change_to_last_gallery_page_id?; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def saved_change_to_last_user_page_id; end
sig { returns(T::Boolean) }
def saved_change_to_last_user_page_id?; end
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
def saved_change_to_migrated_followed_users_at; end
@@ -1751,6 +2027,12 @@ class Domain::User::FaUser
sig { returns(T::Boolean) }
def saved_change_to_num_submissions?; end
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
def saved_change_to_page_scan_error; end
sig { returns(T::Boolean) }
def saved_change_to_page_scan_error?; end
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
def saved_change_to_profile_html; end
@@ -1763,18 +2045,42 @@ class Domain::User::FaUser
sig { returns(T::Boolean) }
def saved_change_to_registered_at?; end
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
def saved_change_to_scanned_favs_at; end
sig { returns(T::Boolean) }
def saved_change_to_scanned_favs_at?; end
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
def saved_change_to_scanned_follows_at; end
sig { returns(T::Boolean) }
def saved_change_to_scanned_follows_at?; end
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
def saved_change_to_scanned_gallery_at; end
sig { returns(T::Boolean) }
def saved_change_to_scanned_gallery_at?; end
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
def saved_change_to_scanned_incremental_at; end
sig { returns(T::Boolean) }
def saved_change_to_scanned_incremental_at?; end
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
def saved_change_to_scanned_page_at; end
sig { returns(T::Boolean) }
def saved_change_to_scanned_page_at?; end
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
def saved_change_to_state; end
sig { returns(T::Boolean) }
def saved_change_to_state?; end
sig { returns(T.nilable([T.untyped, T.untyped])) }
def saved_change_to_type; end
@@ -1793,6 +2099,116 @@ class Domain::User::FaUser
sig { returns(T::Boolean) }
def saved_change_to_url_name?; end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_favs_at; end
sig { params(value: T.nilable(::ActiveSupport::TimeWithZone)).returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_favs_at=(value); end
sig { returns(T::Boolean) }
def scanned_favs_at?; end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_favs_at_before_last_save; end
sig { returns(T.untyped) }
def scanned_favs_at_before_type_cast; end
sig { returns(T::Boolean) }
def scanned_favs_at_came_from_user?; end
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
def scanned_favs_at_change; end
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
def scanned_favs_at_change_to_be_saved; end
sig do
params(
from: T.nilable(::ActiveSupport::TimeWithZone),
to: T.nilable(::ActiveSupport::TimeWithZone)
).returns(T::Boolean)
end
def scanned_favs_at_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_favs_at_in_database; end
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
def scanned_favs_at_previous_change; end
sig do
params(
from: T.nilable(::ActiveSupport::TimeWithZone),
to: T.nilable(::ActiveSupport::TimeWithZone)
).returns(T::Boolean)
end
def scanned_favs_at_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_favs_at_previously_was; end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_favs_at_was; end
sig { void }
def scanned_favs_at_will_change!; end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_follows_at; end
sig { params(value: T.nilable(::ActiveSupport::TimeWithZone)).returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_follows_at=(value); end
sig { returns(T::Boolean) }
def scanned_follows_at?; end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_follows_at_before_last_save; end
sig { returns(T.untyped) }
def scanned_follows_at_before_type_cast; end
sig { returns(T::Boolean) }
def scanned_follows_at_came_from_user?; end
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
def scanned_follows_at_change; end
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
def scanned_follows_at_change_to_be_saved; end
sig do
params(
from: T.nilable(::ActiveSupport::TimeWithZone),
to: T.nilable(::ActiveSupport::TimeWithZone)
).returns(T::Boolean)
end
def scanned_follows_at_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_follows_at_in_database; end
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
def scanned_follows_at_previous_change; end
sig do
params(
from: T.nilable(::ActiveSupport::TimeWithZone),
to: T.nilable(::ActiveSupport::TimeWithZone)
).returns(T::Boolean)
end
def scanned_follows_at_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_follows_at_previously_was; end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_follows_at_was; end
sig { void }
def scanned_follows_at_will_change!; end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_gallery_at; end
@@ -1848,6 +2264,61 @@ class Domain::User::FaUser
sig { void }
def scanned_gallery_at_will_change!; end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_incremental_at; end
sig { params(value: T.nilable(::ActiveSupport::TimeWithZone)).returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_incremental_at=(value); end
sig { returns(T::Boolean) }
def scanned_incremental_at?; end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_incremental_at_before_last_save; end
sig { returns(T.untyped) }
def scanned_incremental_at_before_type_cast; end
sig { returns(T::Boolean) }
def scanned_incremental_at_came_from_user?; end
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
def scanned_incremental_at_change; end
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
def scanned_incremental_at_change_to_be_saved; end
sig do
params(
from: T.nilable(::ActiveSupport::TimeWithZone),
to: T.nilable(::ActiveSupport::TimeWithZone)
).returns(T::Boolean)
end
def scanned_incremental_at_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_incremental_at_in_database; end
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
def scanned_incremental_at_previous_change; end
sig do
params(
from: T.nilable(::ActiveSupport::TimeWithZone),
to: T.nilable(::ActiveSupport::TimeWithZone)
).returns(T::Boolean)
end
def scanned_incremental_at_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_incremental_at_previously_was; end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_incremental_at_was; end
sig { void }
def scanned_incremental_at_will_change!; end
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
def scanned_page_at; end
@@ -1903,6 +2374,51 @@ class Domain::User::FaUser
sig { void }
def scanned_page_at_will_change!; end
sig { returns(T.nilable(::String)) }
def state; end
sig { params(value: T.nilable(::String)).returns(T.nilable(::String)) }
def state=(value); end
sig { returns(T::Boolean) }
def state?; end
sig { returns(T.nilable(::String)) }
def state_before_last_save; end
sig { returns(T.untyped) }
def state_before_type_cast; end
sig { returns(T::Boolean) }
def state_came_from_user?; end
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
def state_change; end
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
def state_change_to_be_saved; end
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
def state_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::String)) }
def state_in_database; end
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
def state_previous_change; end
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
def state_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::String)) }
def state_previously_was; end
sig { returns(T.nilable(::String)) }
def state_was; end
sig { void }
def state_will_change!; end
sig { returns(T.untyped) }
def type; end
@@ -2063,9 +2579,18 @@ class Domain::User::FaUser
sig { returns(T::Boolean) }
def will_save_change_to_id_value?; end
sig { returns(T::Boolean) }
def will_save_change_to_is_disabled?; end
sig { returns(T::Boolean) }
def will_save_change_to_json_attributes?; end
sig { returns(T::Boolean) }
def will_save_change_to_last_gallery_page_id?; end
sig { returns(T::Boolean) }
def will_save_change_to_last_user_page_id?; end
sig { returns(T::Boolean) }
def will_save_change_to_migrated_followed_users_at?; end
@@ -2096,18 +2621,33 @@ class Domain::User::FaUser
sig { returns(T::Boolean) }
def will_save_change_to_num_submissions?; end
sig { returns(T::Boolean) }
def will_save_change_to_page_scan_error?; end
sig { returns(T::Boolean) }
def will_save_change_to_profile_html?; end
sig { returns(T::Boolean) }
def will_save_change_to_registered_at?; end
sig { returns(T::Boolean) }
def will_save_change_to_scanned_favs_at?; end
sig { returns(T::Boolean) }
def will_save_change_to_scanned_follows_at?; end
sig { returns(T::Boolean) }
def will_save_change_to_scanned_gallery_at?; end
sig { returns(T::Boolean) }
def will_save_change_to_scanned_incremental_at?; end
sig { returns(T::Boolean) }
def will_save_change_to_scanned_page_at?; end
sig { returns(T::Boolean) }
def will_save_change_to_state?; end
sig { returns(T::Boolean) }
def will_save_change_to_type?; end

View File

@@ -536,18 +536,18 @@ class Domain::User::InkbunnyUser
def followed_by_users=(value); end
sig { returns(T::Array[T.untyped]) }
def following_user_ids; end
def followed_user_ids; end
sig { params(ids: T::Array[T.untyped]).returns(T::Array[T.untyped]) }
def following_user_ids=(ids); end
def followed_user_ids=(ids); end
# This method is created by ActiveRecord on the `Domain::User` class because it declared `has_many :following_users, through: :user_user_follows_from`.
# This method is created by ActiveRecord on the `Domain::User` class because it declared `has_many :followed_users, through: :user_user_follows_from`.
# 🔗 [Rails guide for `has_many_through` association](https://guides.rubyonrails.org/association_basics.html#the-has-many-through-association)
sig { returns(::Domain::User::PrivateCollectionProxy) }
def following_users; end
def followed_users; end
sig { params(value: T::Enumerable[::Domain::User]).void }
def following_users=(value); end
def followed_users=(value); end
sig { returns(T::Array[T.untyped]) }
def post_ids; end

View File

@@ -427,12 +427,21 @@ class Domain::UserAvatar
end
module GeneratedAssociationMethods
sig { params(args: T.untyped, blk: T.untyped).returns(::HttpLogEntry) }
def build_last_log_entry(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(::HttpLogEntry) }
def build_log_entry(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(::Domain::User) }
def build_user(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(::HttpLogEntry) }
def create_last_log_entry(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(::HttpLogEntry) }
def create_last_log_entry!(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(::HttpLogEntry) }
def create_log_entry(*args, &blk); end
@@ -445,6 +454,18 @@ class Domain::UserAvatar
sig { params(args: T.untyped, blk: T.untyped).returns(::Domain::User) }
def create_user!(*args, &blk); end
sig { returns(T.nilable(::HttpLogEntry)) }
def last_log_entry; end
sig { params(value: T.nilable(::HttpLogEntry)).void }
def last_log_entry=(value); end
sig { returns(T::Boolean) }
def last_log_entry_changed?; end
sig { returns(T::Boolean) }
def last_log_entry_previously_changed?; end
sig { returns(T.nilable(::HttpLogEntry)) }
def log_entry; end
@@ -457,12 +478,18 @@ class Domain::UserAvatar
sig { returns(T::Boolean) }
def log_entry_previously_changed?; end
sig { returns(T.nilable(::HttpLogEntry)) }
def reload_last_log_entry; end
sig { returns(T.nilable(::HttpLogEntry)) }
def reload_log_entry; end
sig { returns(T.nilable(::Domain::User)) }
def reload_user; end
sig { void }
def reset_last_log_entry; end
sig { void }
def reset_log_entry; end
@@ -917,6 +944,51 @@ class Domain::UserAvatar
sig { void }
def json_attributes_will_change!; end
sig { returns(T.nilable(::Integer)) }
def last_log_entry_id; end
sig { params(value: T.nilable(::Integer)).returns(T.nilable(::Integer)) }
def last_log_entry_id=(value); end
sig { returns(T::Boolean) }
def last_log_entry_id?; end
sig { returns(T.nilable(::Integer)) }
def last_log_entry_id_before_last_save; end
sig { returns(T.untyped) }
def last_log_entry_id_before_type_cast; end
sig { returns(T::Boolean) }
def last_log_entry_id_came_from_user?; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def last_log_entry_id_change; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def last_log_entry_id_change_to_be_saved; end
sig { params(from: T.nilable(::Integer), to: T.nilable(::Integer)).returns(T::Boolean) }
def last_log_entry_id_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::Integer)) }
def last_log_entry_id_in_database; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def last_log_entry_id_previous_change; end
sig { params(from: T.nilable(::Integer), to: T.nilable(::Integer)).returns(T::Boolean) }
def last_log_entry_id_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::Integer)) }
def last_log_entry_id_previously_was; end
sig { returns(T.nilable(::Integer)) }
def last_log_entry_id_was; end
sig { void }
def last_log_entry_id_will_change!; end
sig { returns(T.nilable(::Integer)) }
def log_entry_id; end
@@ -980,6 +1052,9 @@ class Domain::UserAvatar
sig { void }
def restore_json_attributes!; end
sig { void }
def restore_last_log_entry_id!; end
sig { void }
def restore_log_entry_id!; end
@@ -1031,6 +1106,12 @@ class Domain::UserAvatar
sig { returns(T::Boolean) }
def saved_change_to_json_attributes?; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def saved_change_to_last_log_entry_id; end
sig { returns(T::Boolean) }
def saved_change_to_last_log_entry_id?; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def saved_change_to_log_entry_id; end
@@ -1269,6 +1350,9 @@ class Domain::UserAvatar
sig { returns(T::Boolean) }
def will_save_change_to_json_attributes?; end
sig { returns(T::Boolean) }
def will_save_change_to_last_log_entry_id?; end
sig { returns(T::Boolean) }
def will_save_change_to_log_entry_id?; end

View File

@@ -0,0 +1,9 @@
# typed: false
FactoryBot.define do
factory :domain_user_avatar, class: "Domain::UserAvatar" do
user { create(:domain_user_fa_user) }
state { "ok" }
url_str { "https://a.furaffinity.net/0/meesh.gif" }
log_entry { create(:http_log_entry) }
end
end

View File

@@ -1,11 +1,21 @@
# typed: false
# typed: true
module PerformJobHelpers
extend T::Sig
extend T::Helpers
abstract!
# TODO - migrate all the calls to perform_now to use this
sig do
params(
params: T::Hash[Symbol, T.untyped],
should_raise: T.any(T::Boolean, T.class_of(Exception), String, Regexp),
).returns(T.untyped)
end
def perform_now(params, should_raise: false)
ret = described_class.perform_now(params)
bt_printer =
proc do
Kernel.proc do
bt = ret.backtrace.reject { |l| l =~ %r{/gems/} }[0..10]
"!> " + ret.message[0..100] + "\n" + bt.join("\n")
end
@@ -13,7 +23,7 @@ module PerformJobHelpers
case should_raise
when false
expect(ret).to_not be_a(Exception), bt_printer
when Exception
when Class
expect(ret).to be_a(should_raise), bt_printer
when String, Regexp
expect(ret.message).to match(should_raise), bt_printer
@@ -75,7 +85,7 @@ module PerformJobHelpers
{
receive: :next_message,
with:
proc do |sequence|
Kernel.proc do |sequence|
[{ caused_by_entry: sequence[1][:return].log_entry }]
end,
return: Scraper::GalleryDlClient::FinishEvent.new,
@@ -83,4 +93,32 @@ module PerformJobHelpers
],
)
end
sig { abstract.returns(T.class_of(ApplicationJob)) }
def described_class
end
sig { abstract.params(args: T.untyped).returns(T.untyped) }
def expect(args)
end
sig { abstract.params(args: T.untyped).returns(T.untyped) }
def be_a(args)
end
sig { abstract.params(args: T.untyped).returns(T.untyped) }
def match(args)
end
sig { abstract.params(args: T.untyped).returns(T.untyped) }
def receive(args)
end
sig { abstract.params(args: T.untyped).returns(T.untyped) }
def allow(args)
end
sig { abstract.params(args: T.untyped).returns(T.untyped) }
def instance_double(args)
end
end

View File

@@ -6,19 +6,21 @@ describe Domain::Fa::Job::BrowsePageJob do
before { Scraper::ClientFactory.http_client_mock = http_client_mock }
shared_context "user and post getters" do
let(:user) { proc { Domain::Fa::User.find_by(url_name: "ruby69r") } }
let(:post) { proc { Domain::Fa::Post.find_by(fa_id: 51_509_268) } }
let(:find_creator) do
proc { Domain::User::FaUser.find_by(url_name: "ruby69r") }
end
let(:find_post) { proc { Domain::Post::FaPost.find_by(fa_id: 51_509_268) } }
before do
expect(post.call).to be_nil
expect(user.call).to be_nil
expect(find_post.call).to be_nil
expect(find_creator.call).to be_nil
end
end
shared_context "create user and post" do
before do
creator =
Domain::Fa::User.create!({ name: "Ruby_69r", url_name: "ruby69r" })
Domain::Fa::Post.create!({ fa_id: 51_509_268, creator: creator })
Domain::User::FaUser.create!({ name: "Ruby_69r", url_name: "ruby69r" })
Domain::Post::FaPost.create!({ fa_id: 51_509_268, creator: creator })
end
end
@@ -27,7 +29,7 @@ describe Domain::Fa::Job::BrowsePageJob do
it "enqueues post scan job" do
expect(
SpecUtil.enqueued_job_args(Domain::Fa::Job::ScanPostJob),
).to match([{ post: post.call, caused_by_entry: log_entries[0] }])
).to match([{ post: find_post.call, caused_by_entry: log_entries[0] }])
end
end
@@ -44,14 +46,17 @@ describe Domain::Fa::Job::BrowsePageJob do
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::ScanFileJob)).to match(
[
including(
args: [{ post: post.call, caused_by_entry: log_entries[0] }],
args: [
{
post_file: find_post.call.file,
caused_by_entry: log_entries[0],
},
],
),
],
)
end
end
unless expect_to_enqueue
else
it "does not enqueue post scan job" do
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::ScanFileJob)).to eq([])
end
@@ -64,7 +69,9 @@ describe Domain::Fa::Job::BrowsePageJob do
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::UserPageJob)).to match(
[
including(
args: [{ user: user.call, caused_by_entry: log_entries[0] }],
args: [
{ user: find_creator.call, caused_by_entry: log_entries[0] },
],
),
],
)
@@ -86,7 +93,9 @@ describe Domain::Fa::Job::BrowsePageJob do
).to match(
[
including(
args: [{ user: user.call, caused_by_entry: log_entries[0] }],
args: [
{ user: find_creator.call, caused_by_entry: log_entries[0] },
],
),
],
)
@@ -214,23 +223,27 @@ describe Domain::Fa::Job::BrowsePageJob do
end
it "creates a new post" do
expect do perform_now({}) end.to change(Domain::Fa::Post, :count).by(1)
expect do perform_now({}) end.to change(Domain::Post::FaPost, :count).by(
1,
)
end
it "creates a new user" do
expect do perform_now({}) end.to change(Domain::Fa::User, :count).by(1)
expect do perform_now({}) end.to change(Domain::User::FaUser, :count).by(
1,
)
end
it "creates a post with the right attributes" do
perform_now({})
expect(post.call.state).to eq("ok")
expect(post.call.title).to eq("reminder YCH AUCTION")
expect(post.call.creator).to eq(user.call)
expect(find_post.call.state).to eq("ok")
expect(find_post.call.title).to eq("reminder YCH AUCTION")
expect(find_post.call.creator).to eq(find_creator.call)
end
it "creates a user with the right attributes" do
perform_now({})
expect(user.call.name).to eq("Ruby_69r")
expect(find_creator.call.name).to eq("Ruby_69r")
end
end
@@ -267,19 +280,19 @@ describe Domain::Fa::Job::BrowsePageJob do
it "enqueues scan post jobs" do
perform_now({})
post1 = Domain::Fa::Post.find_by(fa_id: 51_509_268)
post2 = Domain::Fa::Post.find_by(fa_id: 51_509_267)
post3 = Domain::Fa::Post.find_by(fa_id: 51_509_266)
post1 = Domain::Post::FaPost.find_by(fa_id: 51_509_268)
post2 = Domain::Post::FaPost.find_by(fa_id: 51_509_267)
post3 = Domain::Post::FaPost.find_by(fa_id: 51_509_266)
expect(post1).to_not be_nil
expect(post1.creator).to eq(user.call)
expect(post1.creator).to eq(find_creator.call)
expect(post2).to_not be_nil
expect(post2.creator).to be_nil
expect(post2.title).to be_nil
expect(post3).to_not be_nil
expect(post3.creator).to eq(user.call)
expect(post3.creator).to eq(find_creator.call)
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::ScanPostJob)).to match(
[
@@ -322,9 +335,11 @@ describe Domain::Fa::Job::BrowsePageJob do
context "and post page scanned" do
before do
found_post = post.call
found_post.file_url_str = "http://www.example.com/img.jpg"
found_post.save!
post = find_post.call
post.scanned_at = 1.hour.ago
file = post.build_file(url_str: "http://www.example.com/img.jpg")
file.save!
post.save!
perform_now({})
end
@@ -336,12 +351,10 @@ describe Domain::Fa::Job::BrowsePageJob do
context "and post file scanned" do
before do
found_post = post.call
found_post.file_url_str = "http://www.example.com/foo.txt"
file = build(:http_log_entry, uri: found_post.file_uri)
file.save!
found_post.file = file
found_post.save!
post = find_post.call
create(:domain_post_file, post: post)
post.scanned_at = 1.hour.ago
post.save!
perform_now({})
end
@@ -353,7 +366,7 @@ describe Domain::Fa::Job::BrowsePageJob do
context "and user gallery already scanned" do
before do
creator = user.call
creator = find_creator.call
creator.scanned_gallery_at = 1.hour.ago
creator.save!
perform_now({})
@@ -367,7 +380,7 @@ describe Domain::Fa::Job::BrowsePageJob do
context "and user page already scanned" do
before do
creator = user.call
creator = find_creator.call
creator.scanned_page_at = 1.hour.ago
creator.save!
perform_now({})

View File

@@ -14,13 +14,15 @@ describe Domain::Fa::Job::FavsJob do
end
shared_context "user exists" do
let!(:user) { Domain::Fa::User.create!(name: "zzreg", url_name: "zzreg") }
let!(:user) do
Domain::User::FaUser.create!(name: "zzreg", url_name: "zzreg")
end
end
context "the user does not yet exist" do
it "fails the job" do
perform_now({ url_name: "zzreg" }, should_raise: /user does not exist/)
expect(Domain::Fa::User.find_by url_name: "zzreg").to be_nil
expect(Domain::User::FaUser.find_by url_name: "zzreg").to be_nil
end
it "enqueues a page scan job" do
perform_now({ url_name: "zzreg" }, should_raise: true)
@@ -32,7 +34,7 @@ describe Domain::Fa::Job::FavsJob do
it "does not create any new posts" do
expect do
perform_now({ url_name: "zzreg" }, should_raise: true)
end.not_to change(Domain::Fa::Post, :count)
end.not_to change(Domain::Post::FaPost, :count)
end
end
@@ -53,7 +55,7 @@ describe Domain::Fa::Job::FavsJob do
it "records no favs for the user" do
perform_now({ url_name: "zzreg" })
expect(user.fav_posts).to eq([])
expect(user.faved_posts).to eq([])
end
end
@@ -109,21 +111,21 @@ describe Domain::Fa::Job::FavsJob do
it "records favs for the user" do
expect do perform_now({ url_name: "zzreg" }) end.to change(
Domain::Fa::Post,
Domain::Post::FaPost,
:count,
).by(5)
posts = Domain::Fa::Post.where(fa_id: fa_ids)
expect(user.active_fav_posts).to match_array(posts)
posts = Domain::Post::FaPost.where(fa_id: fa_ids)
expect(user.faved_posts).to match_array(posts)
end
it "creates missing users" do
expect(Domain::Fa::User.find_by(url_name: "sepulte")).to be_nil
expect(Domain::User::FaUser.find_by(url_name: "sepulte")).to be_nil
expect do perform_now({ url_name: "zzreg" }) end.to change(
Domain::Fa::User,
Domain::User::FaUser,
:count,
).by(5)
post = Domain::Fa::Post.find_by(fa_id: 52_106_426)
post = Domain::Post::FaPost.find_by(fa_id: 52_106_426)
expect(post).not_to be_nil
expect(post.creator.url_name).to eq("sepulte")
end
@@ -136,29 +138,29 @@ describe Domain::Fa::Job::FavsJob do
it "enqueues post scans" do
expect do perform_now({ url_name: "zzreg" }) end.to change(
Domain::Fa::Post,
Domain::Post::FaPost,
:count,
).by(5)
expect(SpecUtil.enqueued_job_args(Domain::Fa::Job::ScanPostJob)).to match(
array_including(
{
post: Domain::Fa::Post.find_by(fa_id: 52_106_426),
post: Domain::Post::FaPost.find_by(fa_id: 52_106_426),
caused_by_entry: @log_entries[0],
},
{
post: Domain::Fa::Post.find_by(fa_id: 36_755_337),
post: Domain::Post::FaPost.find_by(fa_id: 36_755_337),
caused_by_entry: @log_entries[0],
},
{
post: Domain::Fa::Post.find_by(fa_id: 40_769_488),
post: Domain::Post::FaPost.find_by(fa_id: 40_769_488),
caused_by_entry: @log_entries[0],
},
{
post: Domain::Fa::Post.find_by(fa_id: 20_808_448),
post: Domain::Post::FaPost.find_by(fa_id: 20_808_448),
caused_by_entry: @log_entries[0],
},
{
post: Domain::Fa::Post.find_by(fa_id: 20_585_829),
post: Domain::Post::FaPost.find_by(fa_id: 20_585_829),
caused_by_entry: @log_entries[0],
},
),
@@ -166,35 +168,26 @@ describe Domain::Fa::Job::FavsJob do
end
context "the user model already has favs recorded" do
let(:old_post) { Domain::Fa::Post.create(fa_id: 12_345, creator: user) }
before { user.fav_posts << old_post }
it "removes favs no longer present" do
perform_now({ url_name: "zzreg" })
user.reload
expect(user.active_fav_posts).not_to include(old_post)
end
it "adds favs newly present" do
perform_now({ url_name: "zzreg" })
posts = Domain::Fa::Post.where(fa_id: fa_ids)
expect(user.active_fav_posts).to match_array(posts)
posts = Domain::Post::FaPost.where(fa_id: fa_ids)
expect(user.faved_posts).to match_array(posts)
end
it "creates new FA post models and enqueues scans" do
p1 = Domain::Fa::Post.create!(fa_id: fa_ids[0], creator: user)
p1 = Domain::Post::FaPost.create!(fa_id: fa_ids[0], creator: user)
expect do perform_now({ url_name: "zzreg" }) end.to change(
Domain::Fa::Post,
Domain::Post::FaPost,
:count,
).by(4)
user.reload
expect(user.active_fav_posts).to match_array(
Domain::Fa::Post.where(fa_id: fa_ids),
expect(user.faved_posts).to match_array(
Domain::Post::FaPost.where(fa_id: fa_ids),
)
p1.reload
expect(p1.faved_by).to eq([user])
expect(p1.faving_users).to eq([user])
end
end
@@ -213,7 +206,7 @@ describe Domain::Fa::Job::FavsJob do
)
perform_now({ url_name: "zzreg" })
user.reload
expect(user.fav_posts.count).to eq(5)
expect(user.faved_posts.count).to eq(5)
expect(user.scanned_favs_at).to be_within(1.second).of(Time.now)
end
@@ -224,7 +217,7 @@ describe Domain::Fa::Job::FavsJob do
)
perform_now({ url_name: "zzreg", full_scan: true })
user.reload
expect(user.fav_posts.count).to eq(5)
expect(user.faved_posts.count).to eq(5)
expect(user.scanned_favs_at).to be_within(1.second).of(Time.now)
end
end
@@ -272,40 +265,40 @@ describe Domain::Fa::Job::FavsJob do
before do
# Create some existing favs that would be on page 2 and 3
existing_posts =
Domain::Fa::Post.create!(
Domain::Post::FaPost.create!(
[
{ fa_id: fa_ids_page2[0], creator: user },
{ fa_id: fa_ids_page2[1], creator: user },
{ fa_id: fa_ids_page3[0], creator: user },
],
)
user.fav_posts << existing_posts
user.faved_posts << existing_posts
user.update!(scanned_favs_at: 2.years.ago)
end
it "stops scanning when no new favs are found and adds posts from scanned pages" do
# Should only create posts from page 1 since those are the only new ones
expect do perform_now({ url_name: "zzreg" }) end.to change(
Domain::Fa::Post,
Domain::Post::FaPost,
:count,
).by(2)
# Should have added the new posts from page 1 to user's favs
user.reload
expect(
user.active_fav_posts.where(fa_id: fa_ids_page1).pluck(:fa_id),
).to eq(fa_ids_page1)
user.faved_posts.where(fa_id: fa_ids_page1).pluck(:fa_id),
).to match_array(fa_ids_page1)
# Should still have the existing favs
expect(
user
.active_fav_posts
.faved_posts
.where(fa_id: fa_ids_page2 + fa_ids_page3)
.pluck(:fa_id),
).to eq(fa_ids_page2 + fa_ids_page3)
).to match_array(fa_ids_page2 + fa_ids_page3)
# Should have updated scanned_favs_at
expect(user.scanned_favs_at).to be_within(1.second).of(Time.now)
expect(user.scanned_favs_at).to be_within(1.second).of(Time.current)
end
end
end
@@ -338,11 +331,11 @@ describe Domain::Fa::Job::FavsJob do
it "records favs for the user" do
expect do perform_now({ url_name: "zzreg" }) end.to change(
Domain::Fa::Post,
Domain::Post::FaPost,
:count,
).by(85)
expect(user.fav_posts.count).to eq(85)
expect(user.faved_posts.count).to eq(85)
end
end
end

View File

@@ -1,230 +0,0 @@
# typed: false
require "rails_helper"
describe Domain::Fa::Job::HomePageJob do
let(:default_args) { { continue_for: 4 } }
let(:http_client_mock) { instance_double("::Scraper::HttpClient") }
before { Scraper::ClientFactory.http_client_mock = http_client_mock }
shared_context "user and post getters" do
let(:user) do
proc { Domain::Fa::User.find_by(url_name: "lemontastictobster") }
end
let(:post) { proc { Domain::Fa::Post.find_by(fa_id: 52_807_274) } }
before do
expect(post.call).to be_nil
expect(user.call).to be_nil
end
end
shared_context "create user and post" do
before do
creator =
Domain::Fa::User.create!(
{ name: "LemontasticTobster", url_name: "lemontastictobster" },
)
Domain::Fa::Post.create!({ fa_id: 52_807_274, creator: creator })
end
end
shared_examples "enqueue post scan" do |expect_to_enqueue|
if expect_to_enqueue
it "enqueues post scan job" do
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::ScanPostJob)).to match(
[
including(
args: [{ post: post.call, caused_by_entry: log_entries[0] }],
),
],
)
end
end
unless expect_to_enqueue
it "does not enqueue post scan job" do
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::ScanPostJob)).to eq([])
end
end
end
shared_examples "enqueue file scan" do |expect_to_enqueue|
if expect_to_enqueue
it "enqueues file scan job" do
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::ScanFileJob)).to match(
[
including(
args: [{ post: post.call, caused_by_entry: log_entries[0] }],
),
],
)
end
end
unless expect_to_enqueue
it "does not enqueue post scan job" do
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::ScanFileJob)).to eq([])
end
end
end
shared_examples "enqueue user page scan" do |expect_to_enqueue|
if expect_to_enqueue
it "enqueues user page job" do
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::UserPageJob)).to match(
[
including(
args: [{ user: user.call, caused_by_entry: log_entries[0] }],
),
],
)
end
end
unless expect_to_enqueue
it "does not enqueue user page job" do
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::UserPageJob)).to eq([])
end
end
end
shared_examples "enqueue user gallery scan" do |expect_to_enqueue|
if expect_to_enqueue
it "enqueues user gallery job" do
expect(
SpecUtil.enqueued_jobs(Domain::Fa::Job::UserGalleryJob),
).to match(
[
including(
args: [{ user: user.call, caused_by_entry: log_entries[0] }],
),
],
)
end
end
unless expect_to_enqueue
it "does not enqueue user gallery job" do
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::UserGalleryJob)).to eq(
[],
)
end
end
end
it "enqueues one" do
expect do
ret = described_class.perform_later({})
expect(ret).not_to be(Exception)
end.to change(GoodJob::Job, :count).by(1)
end
it "does not enqueue more than one" do
expect do
described_class.perform_later({})
described_class.perform_later({})
end.to change(GoodJob::Job, :count).by(1)
end
it "can be enqueued in a bulk GoodJob batch" do
expect do
GoodJob::Bulk.enqueue do
described_class.perform_later({})
described_class.perform_later({})
end
end.to change(GoodJob::Job, :count).by(1)
end
context "with one unseen post" do
include_context "user and post getters"
let! :log_entries do
HttpClientMockHelpers.init_http_client_mock(
http_client_mock,
[
{
uri: "https://www.furaffinity.net/",
status_code: 200,
content_type: "text/html",
contents:
SpecUtil.read_fixture_file("domain/fa/job/home_page.html"),
caused_by_entry_idx: nil,
},
],
)
end
it "creates a new post" do
expect do perform_now(default_args) end.to change(
Domain::Fa::Post,
:count,
).by(1)
end
it "creates a new user" do
expect do perform_now(default_args) end.to change(
Domain::Fa::User,
:count,
).by(1)
end
it "creates a post with the right attributes" do
perform_now(default_args)
expect(post.call.state).to eq("ok")
expect(post.call.title).to eq("FREE OC Animatronic Raffle! (READ DESC)")
expect(post.call.creator).to eq(user.call)
end
it "creates a user with the right attributes" do
perform_now(default_args)
expect(user.call.name).to eq("LemontasticTobster")
end
it "enqueues post scan job" do
perform_now(default_args)
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::ScanPostJob).length).to eq(
5,
)
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::ScanPostJob)).to match(
[
including(args: [{ post: post, caused_by_entry: log_entries[0] }]),
including(
args: [{ fa_id: 52_807_273, caused_by_entry: log_entries[0] }],
),
including(
args: [{ fa_id: 52_807_272, caused_by_entry: log_entries[0] }],
),
including(
args: [{ fa_id: 52_807_271, caused_by_entry: log_entries[0] }],
),
including(
args: [{ fa_id: 52_807_270, caused_by_entry: log_entries[0] }],
),
],
)
end
it "does not enqueue if a post already exists" do
Domain::Fa::Post.create!(
{ fa_id: 52_807_272, creator: create(:domain_fa_user) },
)
perform_now(default_args)
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::ScanPostJob).length).to eq(
4,
)
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::ScanPostJob)).to match(
[
including(args: [{ post: post, caused_by_entry: log_entries[0] }]),
including(
args: [{ fa_id: 52_807_273, caused_by_entry: log_entries[0] }],
),
# note that 52807272 not enqueued
including(
args: [{ fa_id: 52_807_271, caused_by_entry: log_entries[0] }],
),
including(
args: [{ fa_id: 52_807_270, caused_by_entry: log_entries[0] }],
),
],
)
end
end
end

View File

@@ -17,7 +17,7 @@ describe Domain::Fa::Job::ScanPostJob do
end
shared_context "post 59_714_213 already exists" do
let!(:post) { Domain::Fa::Post.create!(fa_id: 59_714_213) }
let!(:post) { Domain::Post::FaPost.create!(fa_id: 59_714_213) }
end
shared_context "post with no keywords fixture" do
@@ -56,13 +56,13 @@ describe Domain::Fa::Job::ScanPostJob do
shared_context "creator is creeps" do
let!(:creator) do
Domain::Fa::User.create!(name: "-creeps", url_name: "-creeps")
Domain::User::FaUser.create!(name: "-creeps", url_name: "-creeps")
end
end
shared_context "creator is lizardlars" do
let!(:creator) do
Domain::Fa::User.create!(name: "lizardlars", url_name: "lizardlars")
Domain::User::FaUser.create!(name: "lizardlars", url_name: "lizardlars")
end
end
@@ -74,7 +74,7 @@ describe Domain::Fa::Job::ScanPostJob do
it "creates a new post with correct attributes" do
perform_now({ fa_id: 59_714_213 })
post = Domain::Fa::Post.find_by(fa_id: 59_714_213)
post = Domain::Post::FaPost.find_by(fa_id: 59_714_213)
expect(post).not_to be_nil
expect(post.state).to eq("ok")
expect(post.last_submission_page).to eq(@log_entries.first)
@@ -90,7 +90,7 @@ describe Domain::Fa::Job::ScanPostJob do
expect(post.num_favorites).to eq(0)
expect(post.num_comments).to eq(0)
expect(post.num_views).to eq(3)
expect(post.file_uri.to_s).to eq(
expect(post.file.url_str).to eq(
"https://d.furaffinity.net/art/-creeps/1738343855/1738343855.-creeps_slayerlr.jpg",
)
expect(post.posted_at).to be_within(1.second).of(
@@ -111,7 +111,11 @@ describe Domain::Fa::Job::ScanPostJob do
end.to change(post, :title)
.from("old title")
.to("Slayer")
.and(change(post, :scanned?).from(false).to(true))
.and(
change(post, :scanned_at).from(nil).to(
be_within(1.second).of(DateTime.current),
),
)
.and(not_change(post, :state))
end
end
@@ -119,9 +123,11 @@ describe Domain::Fa::Job::ScanPostJob do
it "enqueues a file scan job" do
perform_now({ fa_id: 59_714_213 })
post = Domain::Fa::Post.find_by(fa_id: 59_714_213)
post = Domain::Post::FaPost.find_by(fa_id: 59_714_213)
expect(SpecUtil.enqueued_job_args(Domain::Fa::Job::ScanFileJob)).to match(
array_including({ post: post, caused_by_entry: @log_entries.first }),
array_including(
{ post_file: post.file, caused_by_entry: @log_entries.first },
),
)
end
end
@@ -131,39 +137,17 @@ describe Domain::Fa::Job::ScanPostJob do
include_context "creator is creeps"
let(:client_mock_config) { [] }
before do
post.file_url_str = "https://example.com/image.jpg"
post.build_file(url_str: "https://example.com/image.jpg")
post.scanned_at = 1.day.ago
post.file.save!
post.save!
expect(post.scanned?).to be(true)
end
it "does not perform a new scan" do
expect do
perform_now({ post: post })
post.reload
end.to not_change(post, :scanned?)
end
context "and force_scan is true" do
include_context "post with no keywords fixture"
it "performs a new scan" do
old_scanned_at = 1.day.ago
post.update!(state: "ok", scanned_at: old_scanned_at)
expect do
perform_now({ post: post, force_scan: true })
post.reload
end.to change(post, :file_url_str)
.from("https://example.com/image.jpg")
.to(
"https://d.furaffinity.net/art/-creeps/1738343855/1738343855.-creeps_slayerlr.jpg",
)
.and(
change(post, :scanned_at).from(
be_within(1.second).of(old_scanned_at),
).to(be_within(1.second).of(DateTime.current)),
)
end
end.to not_change(post, :scanned_at)
end
end
@@ -174,7 +158,7 @@ describe Domain::Fa::Job::ScanPostJob do
it "updates the post attributes" do
fa_id = 59_723_907
perform_now({ fa_id: fa_id })
post = Domain::Fa::Post.find_by(fa_id: fa_id)
post = Domain::Post::FaPost.find_by(fa_id: fa_id)
expect(post.state).to eq("ok")
expect(post.title).to eq(
"Oh No! Stuck With Her Step Bro | Chapter 2 | Page 8",

View File

@@ -1,10 +1,17 @@
# typed: false
require "rails_helper"
describe Domain::Fa::Job::UserAvatarJob do
let(:user) { create(:domain_fa_user, url_name: "meesh", name: "Meesh") }
let(:user) { create(:domain_user_fa_user, url_name: "meesh", name: "Meesh") }
let(:http_client_mock) { instance_double("::Scraper::HttpClient") }
before { Scraper::ClientFactory.http_client_mock = http_client_mock }
let!(:avatar) do
create(
:domain_user_avatar,
log_entry: nil,
user: create(:domain_user_fa_user, url_name: "meesh", name: "Meesh"),
)
end
let(:avatar_fixture_file) do
SpecUtil.read_fixture_file(
@@ -75,53 +82,41 @@ describe Domain::Fa::Job::UserAvatarJob do
end
end
it "throws when the user does not exist" do
perform_now({ url_name: "meesh" }, should_raise: /user must exist/)
end
context "the user model exists" do
context "the server response is 200" do
include_context "avatar file found"
it "sets the file and the right state" do
perform_now({ user: user })
user.reload
avatar = user.avatar
perform_now({ avatar: })
avatar.reload
expect(avatar).not_to be_nil
expect(avatar.state).to eq("ok")
expect(avatar.log_entry).to eq(@log_entries[0])
expect(HexUtil.bin2hex avatar.file_sha256).to eq(
"ebbafc07555df0a0656a9b32ec9b95723c62c5246937dc8434924d9241d1b570",
)
expect(avatar.downloaded_file_at).to be_within(1.seconds).of(Time.now)
expect(avatar.downloaded_at).to be_within(1.seconds).of(Time.now)
end
context "the avatar model has a file already" do
let(:avatar) { user.ensure_avatar! }
it "sets file to the new file" do
avatar.state = :ok
avatar.file =
first_log_entry =
create(
:blob_entry,
content: avatar_fixture_file_2,
content_type: "image/gif",
:http_log_entry,
response:
create(
:blob_entry,
content: avatar_fixture_file_2,
content_type: "image/gif",
),
)
first_log_entry = create(:http_log_entry, response: avatar.file)
avatar.log_entry = first_log_entry
avatar.save!
perform_now({ user: user })
perform_now({ avatar:, force_scan: true })
avatar.reload
expect(avatar.state).to eq("ok")
expect(avatar.state_detail["log_entries"]).to eq(
[first_log_entry.id, @log_entries[0].id],
)
expect(avatar.log_entry).to eq(@log_entries[0])
expect(HexUtil.bin2hex avatar.file_sha256).to eq(
"ebbafc07555df0a0656a9b32ec9b95723c62c5246937dc8434924d9241d1b570",
)
expect(avatar.downloaded_file_at).to be_within(1.seconds).of(Time.now)
expect(avatar.downloaded_at).to be_within(1.seconds).of(Time.now)
end
end
end
@@ -130,43 +125,39 @@ describe Domain::Fa::Job::UserAvatarJob do
include_context "avatar file not found"
it "sets the file and the right state" do
perform_now({ user: user })
user.reload
avatar = user.avatar
perform_now({ avatar: })
avatar.reload
expect(avatar).not_to be_nil
expect(avatar.log_entry).to eq(@log_entries[0])
expect(avatar.state_detail["log_entries"]).to eq([avatar.log_entry.id])
expect(avatar.file).to be_present
expect(HexUtil.bin2hex avatar.file_sha256).to eq(
"9080fd4e7e23920eb2dccfe2d86903fc3e748eebb2e5aa8c657bbf6f3d941cdc",
)
expect(avatar.downloaded_file_at).to be_within(1.seconds).of(Time.now)
expect(avatar.state).to eq("file_not_found")
expect(avatar.last_log_entry).to eq(@log_entries[0])
expect(avatar.log_entry).to be_nil
expect(avatar.downloaded_at).to be_within(1.seconds).of(Time.now)
expect(avatar.state).to eq("file_404")
end
context "a previous successful avatar was downloaded" do
let(:avatar) { user.ensure_avatar! }
it "does not overwrite the file" do
avatar.state = :ok
avatar.file =
log_entry =
create(
:blob_entry,
content: avatar_fixture_file,
content_type: "image/gif",
:http_log_entry,
response:
create(
:blob_entry,
content: avatar_fixture_file,
content_type: "image/gif",
),
)
log_entry = create(:http_log_entry, response: avatar.file)
avatar.log_entry = log_entry
avatar.last_log_entry = log_entry
avatar.save!
perform_now({ user: user })
perform_now({ avatar:, force_scan: true })
avatar.reload
expect(avatar.state).to eq("file_not_found")
expect(avatar.file).to eq(log_entry.response)
expect(avatar.log_entry).to eq(@log_entries[0])
expect(avatar.state_detail["log_entries"]).to eq(
[log_entry.id, @log_entries[0].id],
)
expect(avatar.state).to eq("file_404")
expect(avatar.log_entry).to eq(log_entry)
expect(avatar.last_log_entry).to eq(@log_entries[0])
end
end
end
@@ -175,16 +166,15 @@ describe Domain::Fa::Job::UserAvatarJob do
include_context "avatar file is a server error"
it "has a file and the right state" do
expect { perform_now({ user: user }) }.to raise_error(
expect { perform_now({ avatar: }) }.to raise_error(
/http 500, log entry.+/,
)
avatar = user.avatar.reload
avatar.reload
expect(avatar).not_to be_nil
expect(avatar.log_entry).to eq(@log_entries[0])
expect(avatar.state_detail["log_entries"]).to eq([avatar.log_entry.id])
expect(avatar.file).to be_nil
expect(avatar.state).to eq("download_error")
expect(avatar.state_detail["download_error"]).to eq("http status 500")
expect(avatar.log_entry).to be_nil
expect(avatar.last_log_entry).to eq(@log_entries[0])
expect(avatar.state).to eq("http_error")
expect(avatar.error_message).to eq("http status 500")
end
end
end

View File

@@ -41,14 +41,16 @@ describe Domain::Fa::Job::UserFollowsJob do
end
shared_context "zzreg user exists" do
let!(:user) { Domain::Fa::User.create!(name: "Zzreg", url_name: "zzreg") }
let!(:user) do
Domain::User::FaUser.create!(name: "Zzreg", url_name: "zzreg")
end
end
shared_examples "zzreg follow creation" do
it "creates the right follows" do
perform_now({ user: user })
user.reload
expect(user.follows.length).to eq(FOLLOWS_ON_ZZREG_PAGE)
expect(user.followed_users.length).to eq(FOLLOWS_ON_ZZREG_PAGE)
expect(user.scanned_follows_at).to_not be_nil
end
end
@@ -56,14 +58,14 @@ describe Domain::Fa::Job::UserFollowsJob do
context "performed with a user that doesn't exist yet" do
it "creates the scanned user and followed users" do
expect do perform_now({ url_name: "zzreg" }) end.to change(
Domain::Fa::User,
Domain::User::FaUser,
:count,
).by(FOLLOWS_ON_ZZREG_PAGE + 1)
end
it "enqueues a user page job" do
perform_now({ url_name: "zzreg" })
zzreg = Domain::Fa::User.find_by(url_name: "zzreg")
zzreg = Domain::User::FaUser.find_by!(url_name: "zzreg")
expect(zzreg).to_not be_nil
expect(
SpecUtil
@@ -78,14 +80,14 @@ describe Domain::Fa::Job::UserFollowsJob do
it "can be performed by url_name" do
expect do perform_now({ url_name: "zzreg" }) end.to change(
Domain::Fa::User,
Domain::User::FaUser,
:count,
).by(FOLLOWS_ON_ZZREG_PAGE)
end
it "can be performed by direct post object" do
expect do perform_now({ user: user }) end.to change(
Domain::Fa::User,
Domain::User::FaUser,
:count,
).by(FOLLOWS_ON_ZZREG_PAGE)
end
@@ -112,7 +114,7 @@ describe Domain::Fa::Job::UserFollowsJob do
include_context "zzreg user exists"
let!(:followed) do
Domain::Fa::User.create!(
Domain::User::FaUser.create!(
# name in html is Agi_Type01, it's intentionally changed here
name: "AGI_Type01",
url_name: "agitype01",
@@ -124,7 +126,7 @@ describe Domain::Fa::Job::UserFollowsJob do
original_updated_at = followed.updated_at
expect do perform_now({ user: user }) end.to change(
Domain::Fa::User,
Domain::User::FaUser,
:count,
).by(FOLLOWS_ON_ZZREG_PAGE - 1)
@@ -134,20 +136,16 @@ describe Domain::Fa::Job::UserFollowsJob do
# the upsert should only create new user models, not modify
# existing ones, even for casing changes
expect(followed.name).to eq("AGI_Type01")
expect(followed.state_detail).to eq({})
# newly created users should have the right 'first_seen_entry' id
accelo_user = Domain::Fa::User.find_by url_name: "accelo"
accelo_user = Domain::User::FaUser.find_by!(url_name: "accelo")
expect(accelo_user).to_not be_nil
expect(accelo_user.state_detail["first_seen_entry"]).to eq(
@zzreg_mock_log_entries[0].id,
)
end
it "newly inserted users have a name associated with them" do
perform_now({ user: user })
user = Domain::Fa::User.find_by(url_name: "accelo")
user = Domain::User::FaUser.find_by!(url_name: "accelo")
expect(user).to_not be_nil
expect(user.name).to eq("Accelo")
end
@@ -160,11 +158,11 @@ describe Domain::Fa::Job::UserFollowsJob do
# this is a user that is not in the watchlist
let!(:smaz_user) do
Domain::Fa::User.create!(name: "Smaz", url_name: "smaz")
Domain::User::FaUser.create!(name: "Smaz", url_name: "smaz")
end
# this is a user already in the watchlist
let!(:agi_type01_user) do
Domain::Fa::User.create!(
Domain::User::FaUser.create!(
name: "Agi_Type01",
url_name: "agitype01",
num_submissions: 10,
@@ -172,37 +170,36 @@ describe Domain::Fa::Job::UserFollowsJob do
end
it "correctly adds and removes follows" do
follow_1 = Domain::Fa::Follow.create!(follower: user, followed: smaz_user)
follow_2 =
Domain::Fa::Follow.create!(follower: user, followed: agi_type01_user)
expect(user.follows.length).to eq(2)
Domain::UserUserFollow.create!(from: user, to: smaz_user)
Domain::UserUserFollow.create!(from: user, to: agi_type01_user)
expect(user.followed_users.length).to eq(2)
perform_now({ user: user })
user.reload
expect(user.follows.length).to eq(FOLLOWS_ON_ZZREG_PAGE)
expect(user.follows).to_not include(smaz_user)
expect(user.follows).to include(agi_type01_user)
expect(user.followed_users.length).to eq(11)
expect(user.followed_users).to include(smaz_user)
expect(user.followed_users).to include(agi_type01_user)
# correct user page jobs should be enqueued
expect(
SpecUtil
.enqueued_jobs(Domain::Fa::Job::UserPageJob)
.find { |job| job[:args][0][:user] == smaz_user },
).to be_nil
.enqueued_job_args(Domain::Fa::Job::UserPageJob)
.map { |args| args[:user] },
).not_to include(smaz_user)
expect(
SpecUtil
.enqueued_jobs(Domain::Fa::Job::UserPageJob)
.find { |job| job[:args][0][:user] == agi_type01_user },
).to be_nil
.enqueued_job_args(Domain::Fa::Job::UserPageJob)
.map { |args| args[:user] },
).not_to include(agi_type01_user)
# newly created users are enqueued by url name
accelo = Domain::User::FaUser.find_by!(url_name: "accelo")
expect(
SpecUtil
.enqueued_jobs(Domain::Fa::Job::UserPageJob)
.find { |job| job[:args][0][:url_name] == "accelo" },
).to_not be_nil
.enqueued_job_args(Domain::Fa::Job::UserPageJob)
.map { |args| args[:user] },
).to include(accelo)
end
it "does not enqueue a job if the user is not new" do
@@ -211,13 +208,5 @@ describe Domain::Fa::Job::UserFollowsJob do
FOLLOWS_ON_ZZREG_PAGE - 1,
)
end
it "does not enqueue jobs already in the queue" do
Domain::Fa::Job::UserPageJob.perform_later({ url_name: "accelo" })
perform_now({ user: user })
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::UserPageJob).length).to eq(
FOLLOWS_ON_ZZREG_PAGE - 1,
)
end
end
end

View File

@@ -12,6 +12,10 @@ describe Domain::Fa::Job::UserIncrementalJob do
)
end
let!(:meesh) do
Domain::User::FaUser.create!(name: "meesh", url_name: "meesh")
end
context "scanning a normal user" do
let(:client_mock_config) do
[
@@ -26,39 +30,68 @@ describe Domain::Fa::Job::UserIncrementalJob do
end
it "updates the user model" do
perform_now({ url_name: "meesh" })
meesh = Domain::Fa::User.find_by(url_name: "meesh")
expect(meesh).to_not be_nil
expect(
SpecUtil.enqueued_jobs(Domain::Fa::Job::UserAvatarJob).length,
).to be(1)
perform_now({ user: meesh })
meesh.reload
expect(meesh.name).to eq("Meesh")
end
it "enqueues avatar jobs" do
perform_now({ user: meesh })
expect(
SpecUtil
.enqueued_job_args(Domain::Fa::Job::UserAvatarJob)
.map { |args| args[:avatar] },
).to eq([meesh.avatar])
end
it "enqueues page scan jobs" do
perform_now({ user: meesh })
# 12 new watchers, 12 new watched
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::UserPageJob).length).to be(
24,
)
end
it "enqueues follows scan jobs" do
perform_now({ user: meesh })
# new watch in last position, so enqueue scan
expect(
SpecUtil.enqueued_jobs(Domain::Fa::Job::UserFollowsJob).length,
).to be(1)
SpecUtil
.enqueued_job_args(Domain::Fa::Job::UserFollowsJob)
.map { |args| args[:user] },
).to eq([meesh])
expect(meesh.scanned_follows_at).to be_nil
end
it "enqueues post scan jobs" do
perform_now({ user: meesh })
# 20 newly seen faved posts
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::ScanPostJob).length).to be(
20,
)
end
it "enqueues favs scan jobs" do
# new fav in last position, so should enqueue scan
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::FavsJob).length).to be(1)
perform_now({ user: meesh })
expect(
SpecUtil
.enqueued_job_args(Domain::Fa::Job::FavsJob)
.map { |args| args[:user] },
).to eq([meesh])
expect(meesh.scanned_favs_at).to be_nil
end
it "incrementally adds new watchers and favs" do
meesh = Domain::Fa::User.create!(name: "Meesh", url_name: "meesh")
celeste = Domain::Fa::User.create!(name: "Celeste~", url_name: "celeste~")
post_51594821 = Domain::Fa::Post.create!(fa_id: 51_594_821)
celeste =
Domain::User::FaUser.create!(name: "Celeste~", url_name: "celeste~")
post_51594821 = Domain::Post::FaPost.create!(fa_id: 51_594_821)
meesh.follows << celeste
meesh.fav_posts << post_51594821
meesh.scanned_favs_at = 1.day.ago
meesh.scanned_follows_at = 1.day.ago
meesh.followed_users << celeste
meesh.faved_posts << post_51594821
meesh.save!
perform_now({ url_name: "meesh" })
meesh.reload

View File

@@ -27,14 +27,17 @@ describe Domain::Fa::Job::UserPageJob do
it "succeeds" do
perform_now({ url_name: "meesh" })
user = Domain::Fa::User.find_by(url_name: "meesh")
user = Domain::User::FaUser.find_by(url_name: "meesh")
expect(user).to_not be_nil
expect(user.avatar.file_uri.to_s).to eq(
avatar = user.avatar
expect(avatar).to_not be_nil
expect(avatar.url_str).to eq(
"https://a.furaffinity.net/1635789297/meesh.gif",
)
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::UserAvatarJob)).to match(
[including(args: [{ user: user, caused_by_entry: @log_entries[0] }])],
)
expect(avatar.state).to eq("pending")
expect(
SpecUtil.enqueued_job_args(Domain::Fa::Job::UserAvatarJob),
).to match([{ avatar: avatar, caused_by_entry: @log_entries[0] }])
end
end
@@ -53,9 +56,9 @@ describe Domain::Fa::Job::UserPageJob do
it "records the right fav count" do
perform_now({ url_name: "marsdust" })
user = Domain::Fa::User.find_by(url_name: "marsdust")
user = Domain::User::FaUser.find_by(url_name: "marsdust")
expect(user).to_not be_nil
expect(user.avatar.file_uri.to_s).to eq(
expect(user.avatar.url_str).to eq(
"https://a.furaffinity.net/1424255659/marsdust.gif",
)
expect(user.num_favorites).to eq(0)

View File

@@ -5,7 +5,7 @@ require_relative "./shared_examples"
RSpec.describe Domain::MigrateToDomain do
include_context "migrate_to_domain"
def expect_users_match(old_user, new_user)
def expect_e621_users_match(old_user, new_user)
expect(new_user).to have_attributes(
e621_id: old_user.e621_user_id,
name: old_user.name,
@@ -16,7 +16,7 @@ RSpec.describe Domain::MigrateToDomain do
)
end
def expect_posts_match(old_post, new_post)
def expect_e621_posts_match(old_post, new_post)
expect(new_post).to have_attributes(
state: old_post.state,
e621_id: old_post.e621_id,
@@ -59,7 +59,7 @@ RSpec.describe Domain::MigrateToDomain do
).by(1)
new_user = Domain::User::E621User.find_by(e621_id: old_user.e621_user_id)
expect_users_match(old_user, new_user)
expect_e621_users_match(old_user, new_user)
end
it "skips users that already exist in the new table" do
@@ -105,7 +105,7 @@ RSpec.describe Domain::MigrateToDomain do
([old_user] + additional_users).each do |old_user|
new_user =
Domain::User::E621User.find_by(e621_id: old_user.e621_user_id)
expect_users_match(old_user, new_user)
expect_e621_users_match(old_user, new_user)
end
end
end
@@ -143,7 +143,7 @@ RSpec.describe Domain::MigrateToDomain do
).by(1)
new_post = Domain::Post::E621Post.find_by(e621_id: old_post.e621_id)
expect_posts_match(old_post, new_post)
expect_e621_posts_match(old_post, new_post)
end
it "skips posts that already exist in the new table" do
@@ -203,7 +203,7 @@ RSpec.describe Domain::MigrateToDomain do
# Verify all posts were migrated correctly
([old_post] + additional_posts).each do |old_post|
new_post = Domain::Post::E621Post.find_by(e621_id: old_post.e621_id)
expect_posts_match(old_post, new_post)
expect_e621_posts_match(old_post, new_post)
end
end
end

View File

@@ -19,16 +19,41 @@ RSpec.describe Domain::MigrateToDomain do
num_comments_given: old_user.num_comments_given,
num_journals: old_user.num_journals,
num_favorites: old_user.num_favorites,
scanned_gallery_at: be_within(1.second).of(old_user.scanned_gallery_at),
scanned_page_at: be_within(1.second).of(old_user.scanned_page_at),
registered_at: be_within(1.second).of(old_user.registered_at),
scanned_gallery_at:
match_presence_or_be_within(1.second, of: old_user.scanned_gallery_at),
scanned_page_at:
match_presence_or_be_within(1.second, of: old_user.scanned_page_at),
scanned_follows_at:
match_presence_or_be_within(1.second, of: old_user.scanned_follows_at),
scanned_favs_at:
match_presence_or_be_within(1.second, of: old_user.scanned_favs_at),
scanned_incremental_at:
match_presence_or_be_within(
1.second,
of: old_user.scanned_incremental_at,
),
registered_at:
match_presence_or_be_within(1.second, of: old_user.registered_at),
)
if old_user.avatar.present?
old_avatar = old_user.avatar
expected_state = old_avatar.state
expected_state =
case old_avatar.state
when "ok"
old_avatar.file.present? ? "ok" : "pending"
when "download_error"
"http_error"
when "file_not_found"
"file_404"
else
old_avatar.state
end
expect(new_user.avatar).to have_attributes(
log_entry_id: old_user.avatar.log_entry_id,
url_str: old_user.avatar.file_url_str,
state: old_user.avatar.state == "ok" ? "ok" : "error",
state: expected_state,
)
if old_user.avatar.state != "ok"
expect(new_user.avatar.error_message).to eq(old_user.avatar.state)
@@ -182,6 +207,42 @@ RSpec.describe Domain::MigrateToDomain do
expect(new_user.avatar.state).to eq("ok")
end
it "handles users with avatars that have no file" do
avatar =
create(
:domain_fa_user_avatar,
user: old_user,
file: nil,
log_entry: nil,
)
old_user.avatar = avatar
old_user.save!
migrator.migrate_fa_users
new_user = Domain::User::FaUser.find_by(url_name: old_user.url_name)
expect_fa_users_match(old_user, new_user)
expect(new_user.avatar).to be_present
expect(new_user.avatar.url_str).to eq(avatar.file_url_str)
expect(new_user.avatar.state).to eq("pending")
end
it "handles users with 404 avatars" do
avatar =
create(:domain_fa_user_avatar, state: "file_not_found", user: old_user)
old_user.avatar = avatar
old_user.save!
migrator.migrate_fa_users
new_user = Domain::User::FaUser.find_by(url_name: old_user.url_name)
expect_fa_users_match(old_user, new_user)
expect(new_user.avatar).to be_present
expect(new_user.avatar.url_str).to eq(avatar.file_url_str)
expect(new_user.avatar.state).to eq("file_404")
expect(new_user.avatar.error_message).to eq("file_not_found")
end
it "handles users with errored avatars" do
avatar =
create(:domain_fa_user_avatar, state: "download_error", user: old_user)
@@ -194,7 +255,7 @@ RSpec.describe Domain::MigrateToDomain do
expect_fa_users_match(old_user, new_user)
expect(new_user.avatar).to be_present
expect(new_user.avatar.url_str).to eq(avatar.file_url_str)
expect(new_user.avatar.state).to eq("error")
expect(new_user.avatar.state).to eq("http_error")
expect(new_user.avatar.error_message).to eq("download_error")
end
end
@@ -460,9 +521,9 @@ RSpec.describe Domain::MigrateToDomain do
:count,
).by(3)
expect(new_user.following_users.count).to eq(3)
expect(new_user.followed_users.count).to eq(3)
new_user.reload
expect(new_user.following_users.pluck(:url_name)).to contain_exactly(
expect(new_user.followed_users.pluck(:url_name)).to contain_exactly(
"followeduser0",
"followeduser1",
"followeduser2",
@@ -488,7 +549,7 @@ RSpec.describe Domain::MigrateToDomain do
:count,
).by(2)
expect(new_user.following_users.count).to eq(2)
expect(new_user.followed_users.count).to eq(2)
expect(new_user.migrated_followed_users_at).to be_nil
end
end

View File

@@ -17,11 +17,27 @@ RSpec.describe Domain::MigrateToDomain do
created_at: be_within(1.second).of(old_user.created_at),
)
if old_user.avatar_log_entry.present?
if old_user.avatar_url_str.present?
expected_state =
case old_user.avatar_state
when "ok"
old_user.avatar_log_entry.nil? ? "pending" : "ok"
when "not_found"
"file_404"
when "error"
"http_error"
else
raise("Unknown avatar state: #{old_user.avatar_state}")
end
expect(new_user.avatar).to have_attributes(
log_entry_id: old_user.avatar_log_entry.id,
url_str: old_user.avatar_log_entry.uri_str,
state: old_user.avatar_log_entry.status_code == 200 ? "ok" : "error",
url_str: old_user.avatar_url_str,
state: expected_state,
downloaded_at:
match_presence_or_be_within(
1.second,
of: old_user.avatar_downloaded_at,
),
)
else
expect(new_user.avatar).to be_nil
@@ -102,6 +118,11 @@ RSpec.describe Domain::MigrateToDomain do
scanned_gallery_at: Time.current,
deep_update_log_entry: create(:http_log_entry),
shallow_update_log_entry: create(:http_log_entry),
avatar_url_str: "https://example.com/avatar.jpg",
avatar_log_entry:
create(:http_log_entry, uri_str: "https://example.com/avatar.jpg"),
avatar_state: "ok",
avatar_downloaded_at: Time.current,
)
end
@@ -163,6 +184,20 @@ RSpec.describe Domain::MigrateToDomain do
end
end
it "handles users with no avatar" do
old_user.avatar_url_str = nil
old_user.avatar_log_entry = nil
old_user.avatar_state = "ok"
old_user.avatar_downloaded_at = nil
old_user.save!
migrator.migrate_inkbunny_users
new_user = Domain::User::InkbunnyUser.find_by(ib_id: old_user.ib_user_id)
expect_inkbunny_users_match(old_user, new_user)
expect(new_user.avatar).to be_nil
end
it "handles users with avatars" do
avatar_log_entry =
create(
@@ -170,7 +205,9 @@ RSpec.describe Domain::MigrateToDomain do
uri_str: "https://example.com/avatar.jpg",
status_code: 200,
)
old_user.avatar_url_str = avatar_log_entry.uri_str
old_user.avatar_log_entry = avatar_log_entry
old_user.avatar_state = "ok"
old_user.save!
migrator.migrate_inkbunny_users
@@ -182,7 +219,7 @@ RSpec.describe Domain::MigrateToDomain do
expect(new_user.avatar.state).to eq("ok")
end
it "handles users with errored avatars" do
it "handles users with 404 avatars" do
avatar_log_entry =
create(
:http_log_entry,
@@ -190,6 +227,8 @@ RSpec.describe Domain::MigrateToDomain do
status_code: 404,
)
old_user.avatar_log_entry = avatar_log_entry
old_user.avatar_url_str = avatar_log_entry.uri_str
old_user.avatar_state = "not_found"
old_user.save!
migrator.migrate_inkbunny_users
@@ -198,7 +237,30 @@ RSpec.describe Domain::MigrateToDomain do
expect_inkbunny_users_match(old_user, new_user)
expect(new_user.avatar).to be_present
expect(new_user.avatar.url_str).to eq(avatar_log_entry.uri_str)
expect(new_user.avatar.state).to eq("error")
expect(new_user.avatar.state).to eq("file_404")
expect(new_user.avatar.error_message).to eq("not_found")
end
it "handles users with errored avatars" do
avatar_log_entry =
create(
:http_log_entry,
uri_str: "https://example.com/avatar.jpg",
status_code: 500,
)
old_user.avatar_url_str = avatar_log_entry.uri_str
old_user.avatar_log_entry = avatar_log_entry
old_user.avatar_state = "error"
old_user.save!
migrator.migrate_inkbunny_users
new_user = Domain::User::InkbunnyUser.find_by(ib_id: old_user.ib_user_id)
expect_inkbunny_users_match(old_user, new_user)
expect(new_user.avatar).to be_present
expect(new_user.avatar.url_str).to eq(avatar_log_entry.uri_str)
expect(new_user.avatar.state).to eq("http_error")
expect(new_user.avatar.error_message).to eq("error")
end
end

View File

@@ -4,4 +4,8 @@ require "rails_helper"
RSpec.shared_context "migrate_to_domain" do
# sink to /dev/null
let(:migrator) { Domain::MigrateToDomain.new(File.open("/dev/null", "w")) }
def match_presence_or_be_within(duration, of:)
of.nil? ? be_nil : be_within(duration).of(of)
end
end