add incremental user page (fa) job
This commit is contained in:
19
app/jobs/domain/fa/job/scan_post_utils.rb
Normal file
19
app/jobs/domain/fa/job/scan_post_utils.rb
Normal file
@@ -0,0 +1,19 @@
|
||||
module Domain::Fa::Job
|
||||
class ScanPostUtils
|
||||
def self.find_or_create_by_fa_ids(fa_ids, caused_by_entry: nil)
|
||||
posts = Domain::Fa::Post.where(fa_id: fa_ids).to_a
|
||||
missing = fa_ids - posts.map(&:fa_id)
|
||||
missing.each do |fa_id|
|
||||
post = Domain::Fa::Post.create!(fa_id: fa_id)
|
||||
Domain::Fa::Job::ScanPostJob.perform_later({
|
||||
post: post, caused_by_entry: caused_by_entry,
|
||||
})
|
||||
posts << post
|
||||
end
|
||||
posts = posts.index_by(&:fa_id)
|
||||
fa_ids.map do |fa_id|
|
||||
posts[fa_id]
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
86
app/jobs/domain/fa/job/scan_user_utils.rb
Normal file
86
app/jobs/domain/fa/job/scan_user_utils.rb
Normal file
@@ -0,0 +1,86 @@
|
||||
module Domain::Fa::Job
|
||||
class ScanUserUtils
|
||||
def self.check_disabled_or_not_found(user, response)
|
||||
if response.status_code != 200
|
||||
return [:fatal, {
|
||||
message: "http #{response.status_code}, log entry #{response.log_entry.id}",
|
||||
}]
|
||||
end
|
||||
|
||||
page = Domain::Fa::Parser::Page.new(response.body, require_logged_in: false)
|
||||
if page.probably_user_page?
|
||||
return [:ok, { page: page }]
|
||||
end
|
||||
|
||||
if response.body =~ /has voluntarily disabled access/
|
||||
user.state = :scan_error
|
||||
user.state_detail = {
|
||||
scan_error: "(user scan) user has disabled account, see last_user_page_id",
|
||||
last_user_page_id: response.log_entry.id,
|
||||
}
|
||||
try_name = /User "(.+)" has voluntarily disabled/.match(response.body)
|
||||
user.name ||= try_name && try_name[1] || user.url_name
|
||||
user.save!
|
||||
return [:stop, {
|
||||
message: "account disabled",
|
||||
}]
|
||||
end
|
||||
|
||||
if response.body =~ /This user cannot be found./ || response.body =~ /The page you are trying to reach is currently pending deletion/
|
||||
user.state = :scan_error
|
||||
user.state_detail = {
|
||||
scan_error: "(user scan) user was not found, see last_user_page_id",
|
||||
last_user_page_id: response.log_entry.id,
|
||||
}
|
||||
user.name ||= user.url_name
|
||||
user.save!
|
||||
return [:stop, {
|
||||
message: "account not found",
|
||||
}]
|
||||
end
|
||||
|
||||
return [:fatal, {
|
||||
message: "not a user page - log entry #{response.log_entry.id}",
|
||||
}]
|
||||
end
|
||||
|
||||
def self.update_user_fields_from_page(user, page, response)
|
||||
user_page = page.user_page
|
||||
user.name = user_page.name
|
||||
user.registered_at = user_page.registered_since
|
||||
user.num_pageviews = user_page.num_pageviews
|
||||
user.num_submissions = user_page.num_submissions
|
||||
user.num_comments_recieved = user_page.num_comments_recieved
|
||||
user.num_comments_given = user_page.num_comments_given
|
||||
user.num_journals = user_page.num_journals
|
||||
user.num_favorites = user_page.num_favorites
|
||||
user.profile_html = user_page.profile_html.encode("UTF-8", :invalid => :replace, :undef => :replace)
|
||||
user.log_entry_detail["last_user_page_id"] = response.log_entry.id
|
||||
|
||||
avatar = user.avatar_or_create
|
||||
user.avatar.file_uri = user_page.profile_thumb_url
|
||||
if user.avatar.changed?
|
||||
user.avatar.save!
|
||||
Domain::Fa::Job::UserAvatarJob.perform_later({
|
||||
user: user,
|
||||
caused_by_entry: response.log_entry,
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
# names is an array of structs - [Struct(:name, :url_name)]
|
||||
def self.find_or_create_by_names(names, caused_by_entry: nil)
|
||||
users = Domain::Fa::User.where(url_name: names.map(&:url_name)).to_a
|
||||
missing = names.reject { |name| users.any? { |u| u.url_name == name.url_name } }
|
||||
missing.each do |name|
|
||||
user = Domain::Fa::User.create!(url_name: name.url_name, name: name.name)
|
||||
Domain::Fa::Job::UserPageJob.perform_later({
|
||||
user: user,
|
||||
caused_by_entry: caused_by_entry,
|
||||
})
|
||||
users << user
|
||||
end
|
||||
users
|
||||
end
|
||||
end
|
||||
end
|
||||
143
app/jobs/domain/fa/job/user_incremental_job.rb
Normal file
143
app/jobs/domain/fa/job/user_incremental_job.rb
Normal file
@@ -0,0 +1,143 @@
|
||||
module Domain::Fa::Job
|
||||
class UserIncrementalJob < Base
|
||||
queue_as :fa_user_page
|
||||
ignore_signature_args :caused_by_entry
|
||||
|
||||
def perform(args)
|
||||
init_from_args!(args)
|
||||
# buggy (sentinal) user
|
||||
return if @user.id == 117552 && @user.url_name == "click here"
|
||||
logger.prefix = proc { "[ #{@user.id.to_s.bold} / #{@user.url_name.bold} ]" }
|
||||
|
||||
# this is similar to a user page job, and will update the user page
|
||||
# however, it will incrementally update user favs & follows / following:
|
||||
# - favs: look at the 'favorites' section and add new favs
|
||||
# checking that we can see at least one already recorded fav.
|
||||
# if not, enqueue a full favs scan job.
|
||||
# - follows / following: look at the 'watchers' / 'watching' section,
|
||||
# and add new follows.
|
||||
|
||||
if !@user.due_for_incremental_scan? && !@force_scan
|
||||
logger.warn("scanned #{@user.time_ago_for_incremental_scan.bold}, skipping")
|
||||
return
|
||||
end
|
||||
|
||||
response = http_client.get(
|
||||
"https://www.furaffinity.net/user/#{@user.url_name}/",
|
||||
caused_by_entry: @caused_by_entry,
|
||||
)
|
||||
@log_entry = response.log_entry
|
||||
|
||||
ret, opts = Domain::Fa::Job::ScanUserUtils.
|
||||
check_disabled_or_not_found(@user, response)
|
||||
case ret
|
||||
when :ok
|
||||
page = opts[:page]
|
||||
when :stop
|
||||
logger.error(opts[:message])
|
||||
return
|
||||
when :fatal
|
||||
fatal_error(opts[:message])
|
||||
end
|
||||
|
||||
Domain::Fa::Job::ScanUserUtils.
|
||||
update_user_fields_from_page(@user, page, response)
|
||||
|
||||
check_favs(@user, page.user_page.recent_fav_fa_ids)
|
||||
check_watchers(@user, page.user_page.recent_watchers)
|
||||
check_watching(@user, page.user_page.recent_watching)
|
||||
|
||||
@user.scanned_page_at = Time.now
|
||||
@user.scanned_incremental_at = Time.now
|
||||
@user.save!
|
||||
logger.info "completed page scan"
|
||||
ensure
|
||||
enqueue_jobs_from_found_links(
|
||||
response.log_entry,
|
||||
suppress_jobs: [{
|
||||
job: self.class,
|
||||
url_name: @user.url_name,
|
||||
}],
|
||||
) if response && response.status_code == 200
|
||||
end
|
||||
|
||||
def check_favs(user, recent_fav_fa_ids)
|
||||
recent_fav_posts = Domain::Fa::Job::ScanPostUtils.
|
||||
find_or_create_by_fa_ids(recent_fav_fa_ids, caused_by_entry: @log_entry)
|
||||
recent_fav_post_ids = recent_fav_posts.map(&:id)
|
||||
|
||||
existing_fav_post_ids = user.fav_post_joins.where(post_id: recent_fav_post_ids).pluck(:post_id)
|
||||
missing_fav_post_ids = recent_fav_post_ids - existing_fav_post_ids
|
||||
if missing_fav_post_ids.empty?
|
||||
logger.info("no new favs for user")
|
||||
@user.scanned_favs_at = Time.now
|
||||
return
|
||||
end
|
||||
|
||||
num_missing = missing_fav_post_ids.size
|
||||
if num_missing >= 0
|
||||
logger.info("add #{num_missing.to_s.bold} new favs for user")
|
||||
@user.fav_post_joins.insert_all!(missing_fav_post_ids.map do |post_id|
|
||||
{ post_id: post_id }
|
||||
end)
|
||||
end
|
||||
|
||||
if missing_fav_post_ids.include? recent_fav_post_ids.last
|
||||
logger.info("last fav is new (#{num_missing.to_s.bold} missing), enqueue full favs scan")
|
||||
defer_job(Domain::Fa::Job::FavsJob, {
|
||||
user: user,
|
||||
caused_by_entry: @log_entry,
|
||||
})
|
||||
else
|
||||
@user.scanned_favs_at = Time.now
|
||||
end
|
||||
end
|
||||
|
||||
# who watches this user - does not update scanned_follows_at timestamp
|
||||
# nor enqueue a full follows scan job
|
||||
# TODO - may be useful to have a separate 'scan full followed by' job
|
||||
# to handle users who are watched by a large number of others
|
||||
def check_watchers(user, recent_watchers)
|
||||
recent_models = Domain::Fa::Job::ScanUserUtils.find_or_create_by_names(recent_watchers)
|
||||
existing = user.followed_joins.where(follower_id: recent_models.map(&:id)).pluck(:follower_id)
|
||||
missing = recent_models.reject { |w| existing.include? w.id }
|
||||
if missing.empty?
|
||||
logger.info("no new watchers")
|
||||
return
|
||||
end
|
||||
|
||||
num_missing = missing.size
|
||||
user.followed_joins.insert_all!(missing.map do |watcher|
|
||||
{ follower_id: watcher.id }
|
||||
end)
|
||||
logger.info("added #{num_missing.to_s.bold} new watchers")
|
||||
end
|
||||
|
||||
def check_watching(user, recent_watching)
|
||||
recent_models = Domain::Fa::Job::ScanUserUtils.find_or_create_by_names(recent_watching)
|
||||
existing = user.follower_joins.where(followed_id: recent_models.map(&:id)).pluck(:followed_id)
|
||||
missing = recent_models.reject { |w| existing.include? w.id }
|
||||
if missing.empty?
|
||||
logger.info("no new users watched")
|
||||
@user.scanned_follows_at = Time.now
|
||||
return
|
||||
end
|
||||
|
||||
num_missing = missing.size
|
||||
user.follower_joins.insert_all!(missing.map do |watcher|
|
||||
{ followed_id: watcher.id }
|
||||
end)
|
||||
logger.info("added #{num_missing.to_s.bold} new users watched")
|
||||
|
||||
if missing.any? { |w| w.url_name == recent_watching.last.url_name }
|
||||
logger.info("last user watched is new, enqueue full follows scan")
|
||||
Domain::Fa::Job::UserFollowsJob.perform_later({
|
||||
user: user,
|
||||
caused_by_entry: @log_entry,
|
||||
})
|
||||
else
|
||||
@user.scanned_follows_at = Time.now
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -18,65 +18,21 @@ class Domain::Fa::Job::UserPageJob < Domain::Fa::Job::Base
|
||||
caused_by_entry: @caused_by_entry,
|
||||
)
|
||||
|
||||
if response.status_code != 200
|
||||
fatal_error("http #{response.status_code}, log entry #{response.log_entry.id}")
|
||||
ret, opts = Domain::Fa::Job::ScanUserUtils.
|
||||
check_disabled_or_not_found(@user, response)
|
||||
case ret
|
||||
when :ok
|
||||
page = opts[:page]
|
||||
when :stop
|
||||
logger.error(opts[:message])
|
||||
return
|
||||
when :fatal
|
||||
fatal_error(opts[:message])
|
||||
end
|
||||
|
||||
page = Domain::Fa::Parser::Page.new(response.body, require_logged_in: false)
|
||||
if !page.probably_user_page?
|
||||
if response.body =~ /has voluntarily disabled access/
|
||||
@user.state = :scan_error
|
||||
@user.state_detail = {
|
||||
scan_error: "(user scan) user has disabled account, see last_user_page_id",
|
||||
last_user_page_id: response.log_entry.id,
|
||||
}
|
||||
try_name = /User "(.+)" has voluntarily disabled/.match(response.body)
|
||||
@user.name ||= try_name && try_name[1] || @user.url_name
|
||||
@user.save!
|
||||
logger.error("account disabled")
|
||||
return
|
||||
end
|
||||
|
||||
if response.body =~ /This user cannot be found./ || response.body =~ /The page you are trying to reach is currently pending deletion/
|
||||
@user.state = :scan_error
|
||||
@user.state_detail = {
|
||||
scan_error: "(user scan) user was not found, see last_user_page_id",
|
||||
last_user_page_id: response.log_entry.id,
|
||||
}
|
||||
@user.name ||= @user.url_name
|
||||
@user.save!
|
||||
logger.error("account not found")
|
||||
return
|
||||
end
|
||||
|
||||
fatal_error("not a user page - log entry #{response.log_entry.id}")
|
||||
end
|
||||
|
||||
# page.require_logged_in!
|
||||
user_page = page.user_page
|
||||
|
||||
@user.name = user_page.name
|
||||
@user.registered_at = user_page.registered_since
|
||||
@user.num_pageviews = user_page.num_pageviews
|
||||
@user.num_submissions = user_page.num_submissions
|
||||
@user.num_comments_recieved = user_page.num_comments_recieved
|
||||
@user.num_comments_given = user_page.num_comments_given
|
||||
@user.num_journals = user_page.num_journals
|
||||
@user.num_favorites = user_page.num_favorites
|
||||
@user.profile_html = user_page.profile_html.encode("UTF-8", :invalid => :replace, :undef => :replace)
|
||||
Domain::Fa::Job::ScanUserUtils.
|
||||
update_user_fields_from_page(@user, page, response)
|
||||
@user.scanned_page_at = Time.now
|
||||
@user.log_entry_detail["last_user_page_id"] = response.log_entry.id
|
||||
|
||||
avatar = @user.avatar_or_create
|
||||
@user.avatar.file_uri = user_page.profile_thumb_url
|
||||
if @user.avatar.changed?
|
||||
@user.avatar.save!
|
||||
Domain::Fa::Job::UserAvatarJob.perform_later({
|
||||
user: @user,
|
||||
caused_by_entry: response.log_entry,
|
||||
})
|
||||
end
|
||||
|
||||
@user.save!
|
||||
logger.info "completed page scan"
|
||||
ensure
|
||||
|
||||
@@ -138,8 +138,55 @@ class Domain::Fa::Parser::UserPageHelper < Domain::Fa::Parser::Base
|
||||
@num_favorites ||= stat_value(:nfav, 2)
|
||||
end
|
||||
|
||||
def recent_fav_fa_ids
|
||||
@recent_favs ||= case @page_version
|
||||
when VERSION_2
|
||||
@elem.css("#gallery-latest-favorites").first&.css("figure a")&.map do |elem|
|
||||
href = elem["href"]
|
||||
/\/view\/(\d+)/.match(href)[1]&.to_i || raise(
|
||||
"invalid url: #{href}"
|
||||
)
|
||||
end
|
||||
else unimplemented_version!
|
||||
end
|
||||
end
|
||||
|
||||
RecentUser = Struct.new(:name, :url_name) do
|
||||
def to_a
|
||||
[name, url_name]
|
||||
end
|
||||
end
|
||||
|
||||
def recent_watchers
|
||||
@recent_watchers ||= recent_users_for_section("Recent Watchers")
|
||||
end
|
||||
|
||||
def recent_watching
|
||||
@recent_watching ||= recent_users_for_section("Recently Watched")
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def recent_users_for_section(section_name)
|
||||
case @page_version
|
||||
when VERSION_2
|
||||
section_elem = @elem.css(".userpage-section-left").find do |elem|
|
||||
elem.css(".section-header h2")&.first&.text&.strip == section_name
|
||||
end
|
||||
|
||||
section_elem = section_elem.css(".section-body").first
|
||||
section_elem.css("a").map do |link_elem|
|
||||
href = link_elem["href"]
|
||||
url_name = /\/user\/(.+)\//.match(href)&.[](1) || raise(
|
||||
"invalid url: #{href}"
|
||||
)
|
||||
name = link_elem.css(".artist_name").first.text.strip
|
||||
RecentUser.new(name, url_name)
|
||||
end
|
||||
else unimplemented_version!
|
||||
end
|
||||
end
|
||||
|
||||
def stat_value(legacy_name, redux_idx)
|
||||
legacy_map = if false # old mode?
|
||||
{
|
||||
|
||||
@@ -31,22 +31,28 @@ class Domain::Fa::UserEnqueuer
|
||||
rows.each do |user|
|
||||
types = []
|
||||
if user.state == "ok"
|
||||
if user.due_for_favs_scan?
|
||||
Domain::Fa::Job::FavsJob.perform_later({ user: user })
|
||||
types << "favs"
|
||||
end
|
||||
if user.due_for_page_scan?
|
||||
Domain::Fa::Job::UserPageJob.perform_later({ user: user })
|
||||
types << "page"
|
||||
if user.due_for_favs_scan? || user.due_for_page_scan? || user.due_for_follows_scan?
|
||||
Domain::Fa::Job::UserIncrementalJob.perform_later({ user: user })
|
||||
types << "incremental"
|
||||
end
|
||||
|
||||
# if user.due_for_favs_scan?
|
||||
# Domain::Fa::Job::FavsJob.perform_later({ user: user })
|
||||
# types << "favs"
|
||||
# end
|
||||
# if user.due_for_page_scan?
|
||||
# Domain::Fa::Job::UserPageJob.perform_later({ user: user })
|
||||
# types << "page"
|
||||
# end
|
||||
|
||||
if user.due_for_gallery_scan?
|
||||
Domain::Fa::Job::UserGalleryJob.perform_later({ user: user })
|
||||
types << "gallery"
|
||||
end
|
||||
if user.due_for_follows_scan?
|
||||
Domain::Fa::Job::UserFollowsJob.perform_later({ user: user })
|
||||
types << "follows"
|
||||
end
|
||||
# if user.due_for_follows_scan?
|
||||
# Domain::Fa::Job::UserFollowsJob.perform_later({ user: user })
|
||||
# types << "follows"
|
||||
# end
|
||||
end
|
||||
|
||||
avatar = user.avatar_or_create
|
||||
|
||||
@@ -108,32 +108,51 @@ class Domain::Fa::User < ReduxApplicationRecord
|
||||
:gallery => 1.year,
|
||||
:follows => 1.month,
|
||||
:favs => 1.month,
|
||||
:incremental => 1.month,
|
||||
}
|
||||
|
||||
SCAN_FIELD_TYPES = {
|
||||
:page => :column,
|
||||
:gallery => :column,
|
||||
:follows => :column,
|
||||
:favs => :column,
|
||||
:incremental => :state_detail,
|
||||
}
|
||||
|
||||
SCAN_TYPES.keys.each do |scan_type|
|
||||
define_method(:"due_for_#{scan_type}_scan?") do
|
||||
scan_due?(scan_type)
|
||||
end
|
||||
|
||||
define_method(:"time_ago_for_#{scan_type}_scan") do
|
||||
scanned_ago_in_words(scan_type)
|
||||
end
|
||||
|
||||
if SCAN_FIELD_TYPES[scan_type] == :state_detail
|
||||
define_method(:"scanned_#{scan_type}_at") do
|
||||
get_scanned_at_value(scan_type)
|
||||
end
|
||||
|
||||
define_method(:"scanned_#{scan_type}_at=") do |value|
|
||||
set_scanned_at_value(scan_type, value)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
DATE_HELPER = Class.new.extend(ActionView::Helpers::DateHelper)
|
||||
|
||||
def scanned_ago_in_words(scan_type)
|
||||
time = self.send(:"scanned_#{scan_type}_at")
|
||||
if time.nil?
|
||||
"never"
|
||||
if (timestamp = get_scanned_at_value(scan_type))
|
||||
DATE_HELPER.time_ago_in_words(timestamp) + " ago"
|
||||
else
|
||||
DATE_HELPER.time_ago_in_words(time) + " ago"
|
||||
"never"
|
||||
end
|
||||
end
|
||||
|
||||
def scan_due?(scan_type)
|
||||
duration = SCAN_TYPES[scan_type] || raise("invalid scan type '#{scan_type}'")
|
||||
timestamp = self.send(:"scanned_#{scan_type}_at")
|
||||
timestamp.nil? || timestamp < duration.ago
|
||||
timestamp = get_scanned_at_value(scan_type)
|
||||
timestamp.nil? || timestamp <= duration.ago
|
||||
end
|
||||
|
||||
def take_posts_from(other_user)
|
||||
@@ -243,4 +262,25 @@ class Domain::Fa::User < ReduxApplicationRecord
|
||||
joins(:disco).
|
||||
merge(disco_query.reselect(:user_id))
|
||||
end
|
||||
|
||||
def get_scanned_at_value(scan_type)
|
||||
case SCAN_FIELD_TYPES[scan_type]
|
||||
when :column
|
||||
send(:"scanned_#{scan_type}_at")
|
||||
when :state_detail
|
||||
str = state_detail["scanned_#{scan_type}_at"]
|
||||
Time.parse(str) if str
|
||||
else raise("invalid scan type '#{scan_type}'")
|
||||
end
|
||||
end
|
||||
|
||||
def set_scanned_at_value(scan_type, value)
|
||||
case SCAN_FIELD_TYPES[scan_type]
|
||||
when :column
|
||||
send(:"scanned_#{scan_type}_at=", value)
|
||||
when :state_detail
|
||||
state_detail["scanned_#{scan_type}_at"] = value.iso8601
|
||||
else raise("invalid scan type '#{scan_type}'")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -6,7 +6,8 @@ module SpecHelpers
|
||||
case should_raise
|
||||
when false
|
||||
expect(ret).to_not be_a(Exception), proc {
|
||||
"!> " + ret.message + "\n" + ret.backtrace[0..10].join("\n")
|
||||
bt = ret.backtrace.reject { |l| l =~ /\/gems\// }[0..10]
|
||||
"!> " + ret.message[0..100] + "\n" + bt.join("\n")
|
||||
}
|
||||
when Exception
|
||||
expect(ret).to be_a(should_raise)
|
||||
|
||||
67
spec/jobs/domain/fa/job/user_incremental_job_spec.rb
Normal file
67
spec/jobs/domain/fa/job/user_incremental_job_spec.rb
Normal file
@@ -0,0 +1,67 @@
|
||||
require "rails_helper"
|
||||
|
||||
describe Domain::Fa::Job::UserIncrementalJob do
|
||||
let(:http_client_mock) { instance_double("::Scraper::HttpClient") }
|
||||
before do
|
||||
Scraper::ClientFactory.http_client_mock = http_client_mock
|
||||
@log_entries = SpecUtil.init_http_client_mock(
|
||||
http_client_mock, client_mock_config
|
||||
)
|
||||
end
|
||||
|
||||
context "scanning a normal user" do
|
||||
let(:client_mock_config) do
|
||||
[
|
||||
{
|
||||
uri: "https://www.furaffinity.net/user/meesh/",
|
||||
status_code: 200,
|
||||
content_type: "text/html",
|
||||
contents: SpecUtil.read_fixture_file("domain/fa/job/user_page_meesh.html"),
|
||||
},
|
||||
]
|
||||
end
|
||||
|
||||
it "updates the user model" do
|
||||
perform_now({ url_name: "meesh" })
|
||||
meesh = Domain::Fa::User.find_by(url_name: "meesh")
|
||||
expect(meesh).to_not be_nil
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::UserAvatarJob).length).to be(1)
|
||||
|
||||
# 12 new watchers, 12 new watched
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::UserPageJob).length).to be(24)
|
||||
# new watch in last position, so enqueue scan
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::UserFollowsJob).length).to be(1)
|
||||
expect(meesh.scanned_follows_at).to be_nil
|
||||
|
||||
# 20 newly seen faved posts
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::ScanPostJob).length).to be(20)
|
||||
# new fav in last position, so should enqueue scan
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::FavsJob).length).to be(1)
|
||||
expect(meesh.scanned_favs_at).to be_nil
|
||||
end
|
||||
|
||||
it "incrementally adds new watchers and favs" do
|
||||
meesh = Domain::Fa::User.create!(name: "Meesh", url_name: "meesh")
|
||||
celeste = Domain::Fa::User.create!(name: "Celeste~", url_name: "celeste~")
|
||||
post_51594821 = Domain::Fa::Post.create!(fa_id: 51594821)
|
||||
|
||||
meesh.follows << celeste
|
||||
meesh.fav_posts << post_51594821
|
||||
|
||||
perform_now({ url_name: "meesh" })
|
||||
meesh.reload
|
||||
|
||||
# 12 new watchers, 11 new watched
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::UserPageJob).length).to be(23)
|
||||
# No new watch in last position, can skip scan
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::UserFollowsJob)).to be_empty
|
||||
expect(meesh.scanned_follows_at).to be_within(1.second).of(Time.now)
|
||||
|
||||
# 19 newly seen faved posts
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::ScanPostJob).length).to be(19)
|
||||
# No new fav in last position, so don't enqueue scan
|
||||
expect(SpecUtil.enqueued_jobs(Domain::Fa::Job::FavsJob)).to be_empty
|
||||
expect(meesh.scanned_favs_at).to be_within(1.second).of(Time.now)
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -312,6 +312,50 @@ describe Domain::Fa::Parser::Page do
|
||||
assert_equal "//t.furaffinity.net/52807274@200-1688750295.jpg", first_listing.thumb_path
|
||||
end
|
||||
|
||||
it "has the right recent favorites and users" do
|
||||
parser = get_parser("user_page_tenzing.html")
|
||||
assert_page_type parser, :probably_user_page?
|
||||
up = parser.user_page
|
||||
assert_equal up.recent_fav_fa_ids.length, 20
|
||||
assert_equal up.recent_fav_fa_ids, [
|
||||
53255068, 53140782, 31417552, 53050580,
|
||||
53050352, 52961865, 52916591, 34833220,
|
||||
49050098, 52875656, 52826012, 52807234,
|
||||
35678896, 52773671, 52750513, 38721736,
|
||||
28430551, 50976208, 51785387, 52449028,
|
||||
]
|
||||
|
||||
assert_equal up.recent_watchers.map(&:to_a), [
|
||||
["Boxstuff", "boxstuff"],
|
||||
["Shuffl3", "shuffl3"],
|
||||
["Fervidus", "fervidus"],
|
||||
["SargentTNT", "sargenttnt"],
|
||||
["asitanneko", "asitanneko"],
|
||||
["overfuck", "overfuck"],
|
||||
["LuciaTheCelestialVixen", "luciathecelestialvixen"],
|
||||
["goodcabinet", "goodcabinet"],
|
||||
["Lethal_Dose1", "lethaldose1"],
|
||||
["Kingtiger2101", "kingtiger2101"],
|
||||
["Nilla_Arts", "nillaarts"],
|
||||
["Riku_Anita", "rikuanita"],
|
||||
]
|
||||
|
||||
assert_equal up.recent_watching.map(&:to_a), [
|
||||
["Seyorrol", "seyorrol"],
|
||||
["Glopossum", "glopossum"],
|
||||
["Jeniak", "jeniak"],
|
||||
["rajii", "rajii"],
|
||||
["Kenjomik", "kenjomik"],
|
||||
["fluff-kevlar", "fluff-kevlar"],
|
||||
["Fisk", "fisk"],
|
||||
["Crinz", "crinz"],
|
||||
["Tabuley", "tabuley"],
|
||||
["Braeburned", "braeburned"],
|
||||
["knifeDragon", "knifedragon"],
|
||||
["LotusGarden", "lotusgarden"],
|
||||
]
|
||||
end
|
||||
|
||||
def get_parser(file, require_logged_in: true)
|
||||
path = File.join("domain/fa/parser/redux", file)
|
||||
contents = SpecUtil.read_fixture_file(path) || raise("Couldn't open #{path}")
|
||||
|
||||
@@ -13,28 +13,28 @@ describe Domain::Fa::UserEnqueuer do
|
||||
)
|
||||
|
||||
get_enqueued_users = proc do
|
||||
SpecUtil.enqueued_jobs(Domain::Fa::Job::UserFollowsJob).map do |job|
|
||||
SpecUtil.enqueued_jobs(Domain::Fa::Job::UserIncrementalJob).map do |job|
|
||||
job[:args][0][:user]
|
||||
end
|
||||
end
|
||||
|
||||
enqueuer.run_once
|
||||
expect(get_enqueued_users.call.length).to eq(5)
|
||||
expect(get_enqueued_users.call).to eq(users[0...5])
|
||||
expect(SpecUtil.enqueued_jobs.length).to eq(25)
|
||||
SpecUtil.shift_jobs(Domain::Fa::Job::UserFollowsJob)
|
||||
# expect(get_enqueued_users.call.length).to eq(5)
|
||||
# expect(get_enqueued_users.call).to eq(users[0...5])
|
||||
# expect(SpecUtil.enqueued_jobs.length).to eq(15)
|
||||
# SpecUtil.shift_jobs(Domain::Fa::Job::UserIncrementalJob)
|
||||
|
||||
enqueuer.run_once
|
||||
expect(get_enqueued_users.call).to eq(users[1...5])
|
||||
SpecUtil.shift_jobs(Domain::Fa::Job::UserFollowsJob)
|
||||
# enqueuer.run_once
|
||||
# expect(get_enqueued_users.call).to eq(users[1...5])
|
||||
# SpecUtil.shift_jobs(Domain::Fa::Job::UserIncrementalJob)
|
||||
|
||||
enqueuer.run_once
|
||||
expect(get_enqueued_users.call).to eq(users[2...7])
|
||||
SpecUtil.shift_jobs(Domain::Fa::Job::UserFollowsJob, 3)
|
||||
# enqueuer.run_once
|
||||
# expect(get_enqueued_users.call).to eq(users[2...7])
|
||||
# SpecUtil.shift_jobs(Domain::Fa::Job::UserIncrementalJob, 3)
|
||||
|
||||
expect do
|
||||
enqueuer.run_once
|
||||
end.to raise_exception(StopIteration)
|
||||
expect(get_enqueued_users.call).to eq(users[5...7])
|
||||
# expect do
|
||||
# enqueuer.run_once
|
||||
# end.to raise_exception(StopIteration)
|
||||
# expect(get_enqueued_users.call).to eq(users[5...7])
|
||||
end
|
||||
end
|
||||
|
||||
@@ -78,4 +78,40 @@ describe Domain::Fa::User do
|
||||
# should be able to destroy the user
|
||||
expect(user1.destroy).to be_truthy
|
||||
end
|
||||
|
||||
it "can deal with scanned_at values in state_detail" do
|
||||
user = SpecUtil.create_domain_fa_user(name: "Foo", url_name: "foo")
|
||||
|
||||
expect(user.scanned_incremental_at).to be_nil
|
||||
expect(user.due_for_incremental_scan?).to be_truthy
|
||||
expect(user.time_ago_for_incremental_scan).to eq("never")
|
||||
|
||||
expect(user.scanned_follows_at).to be_nil
|
||||
expect(user.due_for_follows_scan?).to be_truthy
|
||||
expect(user.time_ago_for_follows_scan).to eq("never")
|
||||
|
||||
incremental_at = Time.parse 1.day.ago.iso8601
|
||||
follows_at = Time.parse 2.days.ago.iso8601
|
||||
user.scanned_incremental_at = incremental_at
|
||||
user.scanned_follows_at = follows_at
|
||||
user.save!
|
||||
|
||||
expect(user.scanned_incremental_at).to eq(incremental_at)
|
||||
expect(user.due_for_incremental_scan?).to be_falsey
|
||||
expect(user.time_ago_for_incremental_scan).to eq("1 day ago")
|
||||
|
||||
expect(user.scanned_follows_at).to eq(follows_at)
|
||||
expect(user.due_for_follows_scan?).to be_falsey
|
||||
expect(user.time_ago_for_follows_scan).to eq("2 days ago")
|
||||
|
||||
# truthy if a scan has not happened in a long time
|
||||
incremental_at = Time.parse 1.year.ago.iso8601
|
||||
follows_at = Time.parse 2.years.ago.iso8601
|
||||
user.scanned_incremental_at = incremental_at
|
||||
user.scanned_follows_at = follows_at
|
||||
user.save!
|
||||
|
||||
expect(user.due_for_incremental_scan?).to be_truthy
|
||||
expect(user.due_for_follows_scan?).to be_truthy
|
||||
end
|
||||
end
|
||||
|
||||
1382
test/fixtures/files/domain/fa/parser/redux/user_page_tenzing.html
vendored
Normal file
1382
test/fixtures/files/domain/fa/parser/redux/user_page_tenzing.html
vendored
Normal file
File diff suppressed because one or more lines are too long
Reference in New Issue
Block a user