Compare commits
7 Commits
dymk--grap
...
dymk--sofu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7072bbd910 | ||
|
|
8f81468fc0 | ||
|
|
6c33c35a12 | ||
|
|
de4874c886 | ||
|
|
dc6965ab7b | ||
|
|
49fd8ccd48 | ||
|
|
6f8afdd2a6 |
@@ -74,7 +74,7 @@ RUN git config --system core.pager "delta" && \
|
||||
git config --system delta.navigate true && \
|
||||
git config --system delta.dark true && \
|
||||
git config --system delta.side-by-side true && \
|
||||
git config --system merge.conflictstyle "zdiff3" \
|
||||
git config --system merge.conflictstyle "zdiff3" && \
|
||||
git config --system core.editor "cursor --wait"
|
||||
|
||||
# Install native gems
|
||||
|
||||
@@ -4,4 +4,5 @@ RUN apt-get update && apt-get install -y \
|
||||
postgresql-17-pgvector \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY create-tablespaces.bash /docker-entrypoint-initdb.d/00-create-tablespaces.bash
|
||||
RUN echo "CREATE EXTENSION pgvector;" >> /docker-entrypoint-initdb.d/01-pgvector.sql
|
||||
|
||||
9
.devcontainer/create-tablespaces.bash
Executable file
9
.devcontainer/create-tablespaces.bash
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/bin/bash -ex
|
||||
|
||||
mkdir -p /tablespaces/mirai
|
||||
chown postgres:postgres /tablespaces/mirai
|
||||
chmod 750 /tablespaces/mirai
|
||||
psql -v ON_ERROR_STOP=1 \
|
||||
--username "$POSTGRES_USER" \
|
||||
--dbname "$POSTGRES_DB" \
|
||||
-c "CREATE TABLESPACE mirai LOCATION '/tablespaces/mirai'"
|
||||
@@ -24,6 +24,7 @@ services:
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- postgres-data:/var/lib/postgresql/data
|
||||
- postgres-data-tablespaces:/tablespaces
|
||||
- ./create-db-user.sql:/docker-entrypoint-initdb.d/create-db-user.sql
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
@@ -66,6 +67,7 @@ services:
|
||||
|
||||
volumes:
|
||||
postgres-data:
|
||||
postgres-data-tablespaces:
|
||||
devcontainer-redux-gem-cache:
|
||||
devcontainer-redux-blob-files:
|
||||
devcontainer-redux-grafana-data:
|
||||
|
||||
1
Gemfile
1
Gemfile
@@ -135,6 +135,7 @@ gem "disco"
|
||||
gem "faiss"
|
||||
gem "neighbor"
|
||||
gem "progressbar"
|
||||
gem "attr_json"
|
||||
|
||||
group :production, :staging do
|
||||
gem "rails_semantic_logger", "~> 4.17"
|
||||
|
||||
@@ -95,6 +95,8 @@ GEM
|
||||
tzinfo (~> 2.0, >= 2.0.5)
|
||||
addressable (2.8.7)
|
||||
public_suffix (>= 2.0.2, < 7.0)
|
||||
attr_json (2.5.0)
|
||||
activerecord (>= 6.0.0, < 8.1)
|
||||
base64 (0.2.0)
|
||||
bcrypt (3.1.20)
|
||||
benchmark (0.4.0)
|
||||
@@ -533,6 +535,7 @@ PLATFORMS
|
||||
|
||||
DEPENDENCIES
|
||||
addressable
|
||||
attr_json
|
||||
bootsnap
|
||||
capybara
|
||||
colorize
|
||||
|
||||
50
Rakefile
50
Rakefile
@@ -37,27 +37,6 @@ task periodic_tasks: %i[environment set_logger_stdout] do
|
||||
end
|
||||
end
|
||||
|
||||
Thread.new do
|
||||
loop do
|
||||
Rake::Task["fa:browse_page_job"].execute
|
||||
Rake::Task["fa:home_page_job"].execute
|
||||
Rake::Task["e621:posts_index_job"].execute
|
||||
puts "enqueue periodic jobs"
|
||||
sleep 1.minute
|
||||
end
|
||||
end
|
||||
|
||||
Thread.new do
|
||||
loop do
|
||||
puts "enqueue inkbunny latest posts"
|
||||
Domain::Inkbunny::Job::LatestPostsJob.set(
|
||||
queue: "inkbunny",
|
||||
priority: -20,
|
||||
).perform_later({})
|
||||
sleep 2.minutes
|
||||
end
|
||||
end
|
||||
|
||||
loop { sleep 10 }
|
||||
end
|
||||
|
||||
@@ -85,6 +64,7 @@ task good_job: %i[environment set_ar_stdout set_logger_stdout] do
|
||||
"GOOD_JOB_MAX_CACHE" => "10000",
|
||||
"GOOD_JOB_QUEUE_SELECT_LIMIT" => "4096",
|
||||
"GOOD_JOB_MAX_THREADS" => "4",
|
||||
"GOOD_JOB_ENABLE_CRON" => "1",
|
||||
"GOOD_JOB_QUEUES" =>
|
||||
ENV["GOOD_JOB_QUEUES"] ||
|
||||
%w[manual:4 fa_post,e621:2 *:6].reject(&:nil?).join(";"),
|
||||
@@ -100,34 +80,6 @@ task good_job: %i[environment set_ar_stdout set_logger_stdout] do
|
||||
exec(cmd)
|
||||
end
|
||||
|
||||
task recompute_job_signatures: :environment do
|
||||
ActiveRecord::Base.logger = Logger.new(STDOUT)
|
||||
ActiveRecord::Base.logger.level = :error
|
||||
|
||||
start_at = ENV["start_at"]&.to_i || 0
|
||||
count = 0
|
||||
destroyed = 0
|
||||
puts "# jobs: #{Delayed::Job.count}"
|
||||
Delayed::Job.find_each(start: start_at) do |job|
|
||||
job.set_signature
|
||||
unless job.save
|
||||
job.destroy
|
||||
destroyed += 1
|
||||
end
|
||||
count += 1
|
||||
if count % 50 == 0
|
||||
puts "processed #{count}, destroyed #{destroyed} - last id: #{job.id}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
task workoff_failed_jobs: %i[environment set_ar_stdout set_logger_stdout] do
|
||||
worker = Delayed::Worker.new
|
||||
Delayed::Job
|
||||
.where("last_error is not null and attempts <= 2")
|
||||
.find_each(batch_size: 1) { |job| worker.run(job) }
|
||||
end
|
||||
|
||||
task :reverse_csv do
|
||||
file = ENV["file"] || raise("need 'file' (file path)")
|
||||
in_csv = CSV.parse(File.open(file, "r+"), headers: true)
|
||||
|
||||
@@ -1,7 +1,19 @@
|
||||
# typed: strict
|
||||
class Domain::E621::Job::Base < Scraper::JobBase
|
||||
queue_as :e621
|
||||
|
||||
sig { override.returns(Symbol) }
|
||||
def self.http_factory_method
|
||||
:get_e621_http_client
|
||||
end
|
||||
|
||||
sig { returns(Domain::E621::User) }
|
||||
def user_from_args!
|
||||
T.must(user_from_args)
|
||||
end
|
||||
|
||||
sig { returns(T.nilable(Domain::E621::User)) }
|
||||
def user_from_args
|
||||
T.cast(arguments[0][:user], T.nilable(Domain::E621::User))
|
||||
end
|
||||
end
|
||||
|
||||
@@ -4,13 +4,8 @@ class Domain::E621::Job::PostsIndexJob < Domain::E621::Job::Base
|
||||
|
||||
sig { override.params(args: T::Hash[Symbol, T.untyped]).void }
|
||||
def perform(args)
|
||||
response =
|
||||
http_client.get(
|
||||
"https://e621.net/posts.json",
|
||||
caused_by_entry: causing_log_entry,
|
||||
)
|
||||
response = http_client.get("https://e621.net/posts.json")
|
||||
log_entry = response.log_entry
|
||||
self.first_log_entry ||= log_entry
|
||||
|
||||
if response.status_code != 200
|
||||
fatal_error(
|
||||
|
||||
82
app/jobs/domain/e621/job/scan_post_favs_job.rb
Normal file
82
app/jobs/domain/e621/job/scan_post_favs_job.rb
Normal file
@@ -0,0 +1,82 @@
|
||||
# typed: strict
|
||||
class Domain::E621::Job::ScanPostFavsJob < Domain::E621::Job::Base
|
||||
MAX_PAGES = T.let(200, Integer)
|
||||
MAX_USERS_PER_PAGE = T.let(Rails.env.test? ? 10 : 320, Integer)
|
||||
MAX_USERS_PER_SLICE = 1000
|
||||
|
||||
class UserRow < T::Struct
|
||||
const :e621_id, Integer
|
||||
const :name, String
|
||||
const :num_other_favs, Integer
|
||||
end
|
||||
|
||||
sig { override.params(args: T.untyped).void }
|
||||
def perform(args)
|
||||
post = T.cast(args[:post], Domain::E621::Post)
|
||||
page = 1
|
||||
breaker = 0
|
||||
total_created_users = 0
|
||||
logger.info("scanning post #{post.e621_id} favs")
|
||||
loop do
|
||||
break if breaker > MAX_PAGES
|
||||
logger.info("requesting page #{page}")
|
||||
url =
|
||||
"https://e621.net/posts/#{post.e621_id}/favorites?limit=#{MAX_USERS_PER_PAGE}&page=#{page}"
|
||||
response = http_client.get(url)
|
||||
if response.status_code != 200
|
||||
fatal_error("#{response.status_code} - bailing")
|
||||
end
|
||||
|
||||
e621_id_to_user_row = T.let({}, T::Hash[Integer, UserRow])
|
||||
html = T.cast(Nokogiri.HTML(response.body), Nokogiri::HTML4::Document)
|
||||
rows = html.css("tbody tr")
|
||||
rows.each do |row_elem|
|
||||
user_member_elem = row_elem.css("td:first-child a")&.first
|
||||
e621_user_id = user_member_elem["href"].split("/").last.to_i
|
||||
e621_id_to_user_row[e621_user_id] = UserRow.new(
|
||||
e621_id: e621_user_id,
|
||||
name: user_member_elem.text,
|
||||
num_other_favs: row_elem.css("td:last-child").text.to_i,
|
||||
)
|
||||
end
|
||||
|
||||
ReduxApplicationRecord.transaction do
|
||||
e621_id_to_user =
|
||||
T.cast(
|
||||
Domain::E621::User.where(
|
||||
e621_user_id: e621_id_to_user_row.keys,
|
||||
).index_by(&:e621_user_id),
|
||||
T::Hash[Integer, Domain::E621::User],
|
||||
)
|
||||
e621_id_to_user_row.values.each do |user_row|
|
||||
user =
|
||||
e621_id_to_user[user_row.e621_id] ||
|
||||
Domain::E621::User.new(
|
||||
e621_user_id: user_row.e621_id,
|
||||
name: user_row.name,
|
||||
)
|
||||
user.num_other_favs_cached = user_row.num_other_favs
|
||||
total_created_users += 1 if user.new_record?
|
||||
user.save!
|
||||
end
|
||||
|
||||
logger.info(
|
||||
"[update user fav counts: #{e621_id_to_user_row.size}] [created users: #{total_created_users}]",
|
||||
)
|
||||
end
|
||||
|
||||
if rows.count < MAX_USERS_PER_PAGE
|
||||
logger.info(
|
||||
"[no more users faving post, bailing] [total users: #{total_created_users}]",
|
||||
)
|
||||
break
|
||||
end
|
||||
|
||||
page += 1
|
||||
breaker += 1
|
||||
end
|
||||
|
||||
post.scanned_post_favs_at = DateTime.current
|
||||
post.save!
|
||||
end
|
||||
end
|
||||
@@ -20,13 +20,8 @@ class Domain::E621::Job::ScanPostJob < Domain::E621::Job::Base
|
||||
end
|
||||
|
||||
logger.info("Scanning post #{post.e621_id}")
|
||||
response =
|
||||
http_client.get(
|
||||
"https://e621.net/posts/#{post.e621_id}.json",
|
||||
caused_by_entry: causing_log_entry,
|
||||
)
|
||||
response = http_client.get("https://e621.net/posts/#{post.e621_id}.json")
|
||||
log_entry = response.log_entry
|
||||
self.first_log_entry ||= log_entry
|
||||
if response.status_code != 200
|
||||
post.state_detail["scan_log_entry_id"] = log_entry.id
|
||||
post.state = :scan_error
|
||||
|
||||
168
app/jobs/domain/e621/job/scan_user_favs_job.rb
Normal file
168
app/jobs/domain/e621/job/scan_user_favs_job.rb
Normal file
@@ -0,0 +1,168 @@
|
||||
# typed: strict
|
||||
class Domain::E621::Job::ScanUserFavsJob < Domain::E621::Job::Base
|
||||
MAX_PAGES_BEFORE_BREAK = 2400
|
||||
MAX_PER_PAGE = T.let(Rails.env.test? ? 4 : 320, Integer)
|
||||
include HasMeasureDuration
|
||||
|
||||
sig { override.params(args: T.untyped).void }
|
||||
def perform(args)
|
||||
user = user_from_args!
|
||||
if user.scanned_favs_status == "error" && !args[:force]
|
||||
logger.info("[user #{user.e621_user_id} has error status, skipping]")
|
||||
return
|
||||
end
|
||||
|
||||
last_e621_post_id = T.let(nil, T.nilable(Integer))
|
||||
breaker = T.let(0, Integer)
|
||||
post_ids = T.let([], T::Array[Integer])
|
||||
total_new_posts = 0
|
||||
|
||||
prefix = [
|
||||
"[e621 user id: #{user.e621_user_id&.to_s&.bold}]",
|
||||
"[username: #{user.name&.bold}]",
|
||||
].join(" ")
|
||||
|
||||
logger.info("#{prefix} [cached favs: #{user.num_other_favs_cached}]")
|
||||
|
||||
loop do
|
||||
breaker += 1
|
||||
if breaker > MAX_PAGES_BEFORE_BREAK
|
||||
logger.warn(
|
||||
"#{prefix} [breaker is too big] [last e621 post id: #{last_e621_post_id}]",
|
||||
)
|
||||
break
|
||||
end
|
||||
|
||||
url =
|
||||
"https://e621.net/posts.json?tags=status:any+fav:#{user.url_name}+order:id_desc&limit=#{MAX_PER_PAGE}"
|
||||
if last_e621_post_id
|
||||
limiter = "before #{last_e621_post_id.to_s.bold}"
|
||||
url += "&page=b#{last_e621_post_id.to_s}"
|
||||
else
|
||||
limiter = "(none)"
|
||||
end
|
||||
response = http_client.get(url)
|
||||
|
||||
if response.status_code == 403 &&
|
||||
response.body.include?("This users favorites are hidden")
|
||||
user.favs_are_hidden = true
|
||||
user.scanned_favs_at = Time.current
|
||||
user.save!
|
||||
break
|
||||
end
|
||||
|
||||
if response.status_code != 200
|
||||
fatal_error(
|
||||
"non 200 response for /favorites.json: #{response.status_code.to_s.underline}",
|
||||
)
|
||||
end
|
||||
|
||||
posts_json =
|
||||
T.cast(
|
||||
JSON.parse(response.body)["posts"],
|
||||
T::Array[T::Hash[String, T.untyped]],
|
||||
)
|
||||
if posts_json.empty?
|
||||
logger.info(
|
||||
"#{prefix} [limiter: #{limiter}] [req: #{breaker}] [no posts found] ",
|
||||
)
|
||||
break
|
||||
end
|
||||
|
||||
e621_post_id_to_post_json =
|
||||
posts_json
|
||||
.map do |post_json|
|
||||
[T.cast(post_json["id"].to_i, Integer), post_json]
|
||||
end
|
||||
.to_h
|
||||
|
||||
measure(
|
||||
"#{prefix} [finding favs: #{posts_json.size}] [req: #{breaker}]",
|
||||
) do
|
||||
e621_id_to_post_id = T.let({}, T::Hash[Integer, Integer])
|
||||
e621_post_id_to_post_json
|
||||
.keys
|
||||
.each_slice(1000) do |e621_post_id_slice|
|
||||
e621_id_to_post_id.merge!(
|
||||
Domain::E621::Post
|
||||
.where(e621_id: e621_post_id_slice)
|
||||
.pluck(:e621_id, :id)
|
||||
.to_h,
|
||||
)
|
||||
end
|
||||
missing_e621_ids =
|
||||
e621_post_id_to_post_json.keys - e621_id_to_post_id.keys
|
||||
|
||||
if missing_e621_ids.any?
|
||||
measure("#{prefix} [creating posts: #{missing_e621_ids.size}]") do
|
||||
missing_e621_ids.each do |e621_post_id|
|
||||
post_json = T.must(e621_post_id_to_post_json[e621_post_id])
|
||||
post =
|
||||
Domain::E621::TagUtil.initialize_or_update_post(
|
||||
post_json: post_json,
|
||||
caused_by_entry: causing_log_entry,
|
||||
)
|
||||
was_new = post.new_record?
|
||||
post.save!
|
||||
e621_id_to_post_id[e621_post_id] = T.must(post.id)
|
||||
if was_new
|
||||
logger.info(
|
||||
"#{prefix} [created post: e621 id #{post.e621_id} / id #{post.id}]",
|
||||
)
|
||||
total_new_posts += 1
|
||||
defer_job(Domain::E621::Job::StaticFileJob, post: post)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
post_ids.concat(e621_id_to_post_id.values)
|
||||
logger.info(
|
||||
"#{prefix} [req: #{breaker}] [total posts: #{post_ids.size}] [total created: #{total_new_posts}]",
|
||||
)
|
||||
end
|
||||
|
||||
if posts_json.size < MAX_PER_PAGE
|
||||
logger.info(
|
||||
"#{prefix} [fewer than limit; breaking] [limiter: #{limiter}] [req: #{breaker}]",
|
||||
)
|
||||
break
|
||||
end
|
||||
|
||||
last_e621_post_id = T.cast(T.must(posts_json.last)["id"].to_i, Integer)
|
||||
end
|
||||
|
||||
measure("#{prefix} [upserting favs: #{post_ids.size}]") do
|
||||
post_ids.each_slice(1000) do |slice|
|
||||
ReduxApplicationRecord.transaction do
|
||||
Domain::E621::Fav.upsert_all(
|
||||
slice.map { |post_id| { user_id: user.id, post_id: post_id } },
|
||||
unique_by: :index_domain_e621_favs_on_user_id_and_post_id,
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
logger.info(
|
||||
"#{prefix} " +
|
||||
[
|
||||
"[favs scanned: #{post_ids.size.to_s.bold}]",
|
||||
"[posts created: #{total_new_posts.to_s.bold}]",
|
||||
"[total requests: #{breaker}]",
|
||||
"[done]",
|
||||
].join(" "),
|
||||
)
|
||||
|
||||
user.scanned_favs_status = "ok"
|
||||
user.scanned_favs_at = Time.current
|
||||
user.save!
|
||||
rescue StandardError
|
||||
logger.error("error scanning user favs: #{user&.e621_user_id}")
|
||||
user = user_from_args
|
||||
if user
|
||||
user.scanned_favs_status = "error"
|
||||
user.save!
|
||||
end
|
||||
raise
|
||||
end
|
||||
end
|
||||
48
app/jobs/domain/e621/job/scan_users_job.rb
Normal file
48
app/jobs/domain/e621/job/scan_users_job.rb
Normal file
@@ -0,0 +1,48 @@
|
||||
# typed: strict
|
||||
class Domain::E621::Job::ScanUsersJob < Domain::E621::Job::Base
|
||||
sig { override.params(args: T.untyped).void }
|
||||
def perform(args)
|
||||
after = T.let(args[:after_e621_id], T.nilable(String))
|
||||
breaker = 0
|
||||
num_seen_users = 0
|
||||
num_new_users = 0
|
||||
loop do
|
||||
breaker += 1
|
||||
break if breaker > 10
|
||||
|
||||
url = "https://e621.net/users.json?limit=320"
|
||||
url += "&page=b#{after}" if after
|
||||
response = http_client.get(url)
|
||||
|
||||
if response.status_code != 200
|
||||
fatal_error(
|
||||
"non 200 response for /users.json: #{response.status_code.to_s.underline}",
|
||||
)
|
||||
end
|
||||
|
||||
users_json = JSON.parse(response.body)
|
||||
logger.info "saw #{users_json.size} users"
|
||||
break if users_json.empty?
|
||||
num_seen_users += users_json.size
|
||||
ReduxApplicationRecord.transaction do
|
||||
users_json.each do |user_json|
|
||||
user =
|
||||
Domain::E621::User.find_or_initialize_by(
|
||||
e621_user_id: user_json["id"],
|
||||
) { |user| user.name = user_json["name"] }
|
||||
is_new = user.new_record?
|
||||
num_new_users += 1 if is_new
|
||||
user.save!
|
||||
# defer_job(Domain::E621::Job::ScanUserFavsJob, user: user) if is_new
|
||||
end
|
||||
logger.info "#{num_new_users} new users"
|
||||
end
|
||||
after = users_json.map { |user_json| user_json["id"] }.min.to_s
|
||||
end
|
||||
|
||||
if num_seen_users > 0
|
||||
logger.info "scanning more users, after_e621_id: #{after}"
|
||||
defer_job(Domain::E621::Job::ScanUsersJob, after_e621_id: after)
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -24,8 +24,7 @@ class Domain::E621::Job::StaticFileJob < Domain::E621::Job::Base
|
||||
end
|
||||
end
|
||||
|
||||
response = http_client.get(file_url_str, caused_by_entry: causing_log_entry)
|
||||
self.first_log_entry ||= response.log_entry
|
||||
response = http_client.get(file_url_str)
|
||||
|
||||
if response.status_code != 200
|
||||
post.state = :file_error
|
||||
|
||||
@@ -38,9 +38,7 @@ class Domain::Fa::Job::BrowsePageJob < Domain::Fa::Job::Base
|
||||
url = "https://www.furaffinity.net/browse/#{@page_number}/"
|
||||
end
|
||||
|
||||
response = http_client.get(url, caused_by_entry: causing_log_entry)
|
||||
log_entry = response.log_entry
|
||||
self.first_log_entry ||= log_entry
|
||||
response = http_client.get(url)
|
||||
|
||||
if response.status_code != 200
|
||||
fatal_error(
|
||||
|
||||
@@ -119,13 +119,7 @@ class Domain::Fa::Job::FavsJob < Domain::Fa::Job::Base
|
||||
else
|
||||
"https://www.furaffinity.net/favorites/#{user.url_name}/"
|
||||
end
|
||||
response =
|
||||
http_client.get(
|
||||
url,
|
||||
caused_by_entry: causing_log_entry,
|
||||
use_http_cache: @use_http_cache,
|
||||
)
|
||||
self.first_log_entry ||= response.log_entry
|
||||
response = http_client.get(url, use_http_cache: @use_http_cache)
|
||||
if response.status_code != 200
|
||||
fatal_error(
|
||||
"http #{response.status_code.to_s.red.bold}, " +
|
||||
|
||||
@@ -24,9 +24,7 @@ class Domain::Fa::Job::HomePageJob < Domain::Fa::Job::Base
|
||||
|
||||
def scan_home_page
|
||||
url = "https://www.furaffinity.net/"
|
||||
response = http_client.get(url, caused_by_entry: causing_log_entry)
|
||||
log_entry = response.log_entry
|
||||
self.first_log_entry ||= log_entry
|
||||
response = http_client.get(url)
|
||||
|
||||
if response.status_code != 200
|
||||
fatal_error(
|
||||
|
||||
@@ -63,9 +63,7 @@ class Domain::Fa::Job::ScanFileJob < Domain::Fa::Job::Base
|
||||
return
|
||||
end
|
||||
|
||||
response =
|
||||
http_client.get(post.file_uri.to_s, caused_by_entry: causing_log_entry)
|
||||
self.first_log_entry ||= response.log_entry
|
||||
response = http_client.get(post.file_uri.to_s)
|
||||
|
||||
if response.status_code == 404
|
||||
post.state_detail["404_count"] ||= 0
|
||||
|
||||
@@ -59,11 +59,7 @@ class Domain::Fa::Job::ScanPostJob < Domain::Fa::Job::Base
|
||||
sig { params(post: Domain::Fa::Post).void }
|
||||
def scan_post(post)
|
||||
response =
|
||||
http_client.get(
|
||||
"https://www.furaffinity.net/view/#{post.fa_id}/",
|
||||
caused_by_entry: causing_log_entry,
|
||||
)
|
||||
self.first_log_entry ||= response.log_entry
|
||||
http_client.get("https://www.furaffinity.net/view/#{post.fa_id}/")
|
||||
if response.status_code != 200
|
||||
fatal_error(
|
||||
"error scanning fa_id #{post.fa_id}: #{response.status_code}, log entry #{response.log_entry.id}",
|
||||
|
||||
@@ -12,11 +12,7 @@ class Domain::Fa::Job::UserAvatarJob < Domain::Fa::Job::Base
|
||||
end
|
||||
|
||||
response =
|
||||
http_client.get(
|
||||
"https://a.furaffinity.net/0/#{user.url_name}.gif",
|
||||
caused_by_entry: @caused_by_entry,
|
||||
)
|
||||
self.first_log_entry ||= response.log_entry
|
||||
http_client.get("https://a.furaffinity.net/0/#{user.url_name}.gif")
|
||||
|
||||
avatar.state_detail["log_entries"] ||= [avatar.log_entry&.id].compact
|
||||
avatar.state_detail["log_entries"] << response.log_entry.id
|
||||
|
||||
@@ -76,8 +76,7 @@ class Domain::Fa::Job::UserFollowsJob < Domain::Fa::Job::Base
|
||||
else
|
||||
"https://www.furaffinity.net/watchlist/by/#{user.url_name}/"
|
||||
end
|
||||
response = http_client.get(url, caused_by_entry: causing_log_entry)
|
||||
self.first_log_entry ||= response.log_entry
|
||||
response = http_client.get(url)
|
||||
if response.status_code != 200
|
||||
fatal_error(
|
||||
"http #{response.status_code.to_s.red.bold}, " +
|
||||
|
||||
@@ -86,7 +86,7 @@ class Domain::Fa::Job::UserGalleryJob < Domain::Fa::Job::Base
|
||||
folder_href = "/" + folder_href unless folder_href.start_with?("/")
|
||||
page_url =
|
||||
"https://www.furaffinity.net#{folder_href}#{page_number}?perpage=72"
|
||||
response = http_client.get(page_url, caused_by_entry: causing_log_entry)
|
||||
response = http_client.get(page_url)
|
||||
log_entry = response.log_entry
|
||||
|
||||
if response.status_code == 200
|
||||
|
||||
@@ -28,11 +28,7 @@ module Domain::Fa::Job
|
||||
end
|
||||
|
||||
response =
|
||||
http_client.get(
|
||||
"https://www.furaffinity.net/user/#{user.url_name}/",
|
||||
caused_by_entry: @caused_by_entry,
|
||||
)
|
||||
@log_entry = response.log_entry
|
||||
http_client.get("https://www.furaffinity.net/user/#{user.url_name}/")
|
||||
|
||||
ret, opts =
|
||||
Domain::Fa::Job::ScanUserUtils.check_disabled_or_not_found(
|
||||
|
||||
@@ -17,10 +17,7 @@ class Domain::Fa::Job::UserPageJob < Domain::Fa::Job::Base
|
||||
end
|
||||
|
||||
response =
|
||||
http_client.get(
|
||||
"https://www.furaffinity.net/user/#{user.url_name}/",
|
||||
caused_by_entry: causing_log_entry,
|
||||
)
|
||||
http_client.get("https://www.furaffinity.net/user/#{user.url_name}/")
|
||||
|
||||
ret, opts =
|
||||
Domain::Fa::Job::ScanUserUtils.check_disabled_or_not_found(user, response)
|
||||
|
||||
@@ -1,9 +1,22 @@
|
||||
# typed: true
|
||||
# typed: strict
|
||||
class Domain::Inkbunny::Job::Base < Scraper::JobBase
|
||||
extend T::Sig
|
||||
|
||||
discard_on ActiveJob::DeserializationError
|
||||
queue_as :inkbunny
|
||||
|
||||
sig { override.returns(Symbol) }
|
||||
def self.http_factory_method
|
||||
:get_inkbunny_http_client
|
||||
end
|
||||
|
||||
sig { returns(T.nilable(Domain::Inkbunny::User)) }
|
||||
def user_from_args
|
||||
T.cast(arguments[0][:user], T.nilable(Domain::Inkbunny::User))
|
||||
end
|
||||
|
||||
sig { returns(Domain::Inkbunny::User) }
|
||||
def user_from_args!
|
||||
user_from_args || raise("user must exist")
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
# typed: true
|
||||
# typed: strict
|
||||
module Domain::Inkbunny::Job
|
||||
class FileJob < Base
|
||||
queue_as :static_file
|
||||
|
||||
sig { override.params(args: T.untyped).void }
|
||||
def perform(args)
|
||||
file = args[:file] || fatal_error("file is required")
|
||||
caused_by_entry = args[:caused_by_entry]
|
||||
logger.prefix =
|
||||
proc do
|
||||
"[#{file.id.to_s.bold} / " + "#{file.ib_file_id.to_s.bold} / " +
|
||||
@@ -21,7 +21,7 @@ module Domain::Inkbunny::Job
|
||||
end
|
||||
end
|
||||
|
||||
response = http_client.get(url_str, caused_by_entry: caused_by_entry)
|
||||
response = http_client.get(url_str)
|
||||
|
||||
if response.status_code != 200
|
||||
file.state = :error
|
||||
|
||||
@@ -16,9 +16,7 @@ module Domain::Inkbunny::Job
|
||||
end
|
||||
|
||||
url = ApiSearchPageProcessor.build_api_search_url(rid: rid, page: page)
|
||||
response = http_client.post(url, caused_by_entry: causing_log_entry)
|
||||
log_entry = response.log_entry
|
||||
self.first_log_entry ||= log_entry
|
||||
response = http_client.post(url)
|
||||
|
||||
if response.status_code != 200
|
||||
fatal_error("api_search failed: #{response.status_code}")
|
||||
@@ -27,7 +25,7 @@ module Domain::Inkbunny::Job
|
||||
result =
|
||||
processor.process!(
|
||||
JSON.parse(response.body),
|
||||
caused_by_entry: log_entry,
|
||||
caused_by_entry: response.log_entry,
|
||||
)
|
||||
num_new_posts = T.cast(result[:num_new_posts], Integer)
|
||||
logger.info(
|
||||
|
||||
@@ -28,9 +28,7 @@ module Domain::Inkbunny::Job
|
||||
rid: rid,
|
||||
page: page,
|
||||
)
|
||||
response = http_client.post(url, caused_by_entry: causing_log_entry)
|
||||
log_entry = response.log_entry
|
||||
self.first_log_entry ||= log_entry
|
||||
response = http_client.post(url)
|
||||
pool.deep_update_log_entry = causing_log_entry
|
||||
|
||||
if response.status_code != 200
|
||||
@@ -40,7 +38,7 @@ module Domain::Inkbunny::Job
|
||||
result =
|
||||
processor.process!(
|
||||
JSON.parse(response.body),
|
||||
caused_by_entry: log_entry,
|
||||
caused_by_entry: response.log_entry,
|
||||
)
|
||||
|
||||
rid ||= T.cast(result[:rid], String)
|
||||
|
||||
@@ -58,9 +58,7 @@ class Domain::Inkbunny::Job::UpdatePostsJob < Domain::Inkbunny::Job::Base
|
||||
pools_to_update
|
||||
)
|
||||
url = build_api_submissions_url(ib_post_ids_chunk)
|
||||
response = http_client.get(url, caused_by_entry: causing_log_entry)
|
||||
log_entry = response.log_entry
|
||||
self.first_log_entry ||= log_entry
|
||||
response = http_client.get(url)
|
||||
if response.status_code != 200
|
||||
fatal_error("api_submissions failed: #{response.status_code}")
|
||||
end
|
||||
@@ -72,7 +70,7 @@ class Domain::Inkbunny::Job::UpdatePostsJob < Domain::Inkbunny::Job::Base
|
||||
Domain::Inkbunny::Post.transaction do
|
||||
deep_update_post_from_submission_json(
|
||||
submission_json,
|
||||
log_entry,
|
||||
response.log_entry,
|
||||
missing_pool_post_ib_ids,
|
||||
pools_to_update,
|
||||
)
|
||||
|
||||
@@ -1,49 +1,53 @@
|
||||
# typed: true
|
||||
# typed: strict
|
||||
module Domain::Inkbunny::Job
|
||||
class UserAvatarJob < Base
|
||||
queue_as :static_file
|
||||
|
||||
sig { params(args: T.untyped).void }
|
||||
def initialize(*args)
|
||||
super(*T.unsafe(args))
|
||||
end
|
||||
|
||||
sig { override.params(args: T.untyped).void }
|
||||
def perform(args)
|
||||
@user = args[:user] || raise("user must exist")
|
||||
@caused_by_entry = args[:caused_by_entry]
|
||||
user = user_from_args!
|
||||
|
||||
logger.prefix =
|
||||
proc do
|
||||
"[user #{@user.name.to_s.bold} / #{@user.ib_user_id.to_s.bold}]"
|
||||
end
|
||||
proc { "[user #{user.name.to_s.bold} / #{user.ib_user_id.to_s.bold}]" }
|
||||
|
||||
if @user.avatar_url_str.blank?
|
||||
avatar_url_str = user.avatar_url_str
|
||||
if avatar_url_str.blank?
|
||||
logger.warn("user has no avatar_url_str")
|
||||
return
|
||||
end
|
||||
|
||||
response =
|
||||
http_client.get(@user.avatar_url_str, caused_by_entry: @caused_by_entry)
|
||||
response = http_client.get(avatar_url_str)
|
||||
self.first_log_entry ||= response.log_entry
|
||||
|
||||
@user.avatar_state_detail ||= {}
|
||||
@user.avatar_state_detail["log_entries"] ||= [
|
||||
@user.avatar_file_log_entry_id,
|
||||
user.avatar_state_detail ||= {}
|
||||
user.avatar_state_detail["log_entries"] ||= [
|
||||
user.avatar_file_log_entry_id,
|
||||
].compact
|
||||
@user.avatar_state_detail["log_entries"] << response.log_entry.id
|
||||
@user.avatar_log_entry = response.log_entry
|
||||
user.avatar_state_detail["log_entries"] << response.log_entry.id
|
||||
user.avatar_log_entry = response.log_entry
|
||||
|
||||
case response.status_code
|
||||
when 200
|
||||
@user.avatar_state = :ok
|
||||
@user.avatar_state_detail.delete("download_error")
|
||||
@user.avatar_downloaded_at = response.log_entry.created_at
|
||||
@user.avatar_file_sha256 = response.log_entry.response_sha256
|
||||
user.avatar_state = :ok
|
||||
user.avatar_state_detail.delete("download_error")
|
||||
user.avatar_downloaded_at = response.log_entry.created_at
|
||||
user.avatar_file_sha256 = response.log_entry.response_sha256
|
||||
logger.info("downloaded avatar")
|
||||
when 404
|
||||
@user.avatar_state = :not_found
|
||||
user.avatar_state = :not_found
|
||||
logger.info("avatar 404")
|
||||
else
|
||||
@user.avatar_state = :error
|
||||
@user.avatar_state_detail[
|
||||
user.avatar_state = :error
|
||||
user.avatar_state_detail[
|
||||
"download_error"
|
||||
] = "http status #{response.status_code}"
|
||||
if @user.avatar_file_sha256.blank?
|
||||
@user.avatar_downloaded_at = response.log_entry.created_at
|
||||
if user.avatar_file_sha256.blank?
|
||||
user.avatar_downloaded_at = response.log_entry.created_at
|
||||
logger.info("avatar error, and no previous file")
|
||||
else
|
||||
logger.info("avatar error, keeping previous file")
|
||||
@@ -53,7 +57,7 @@ module Domain::Inkbunny::Job
|
||||
)
|
||||
end
|
||||
ensure
|
||||
@user.save! if @user
|
||||
user.save! if user
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
# typed: false
|
||||
# typed: true
|
||||
module Domain::Inkbunny::Job
|
||||
class UserGalleryJob < Base
|
||||
def perform(args)
|
||||
user = args[:user] || raise("user must exist")
|
||||
caused_by_entry = args[:caused_by_entry]
|
||||
logger.prefix = "[#{user.name.bold} / #{user.ib_user_id.to_s.bold}]"
|
||||
user = user_from_args!
|
||||
logger.prefix = "[#{user.name&.bold} / #{user.ib_user_id.to_s.bold}]"
|
||||
|
||||
if user.scanned_gallery_at&.after?(1.week.ago)
|
||||
logger.warn(
|
||||
@@ -14,13 +13,12 @@ module Domain::Inkbunny::Job
|
||||
end
|
||||
|
||||
processor = ApiSearchPageProcessor.new
|
||||
first_log_entry = nil
|
||||
rid = nil
|
||||
page = 1
|
||||
loop_count = 0
|
||||
max_loop_count = 2000
|
||||
rid = T.let(nil, T.nilable(String))
|
||||
page = T.let(1, Integer)
|
||||
loop_count = T.let(0, Integer)
|
||||
max_loop_count = T.let(2000, Integer)
|
||||
|
||||
while true
|
||||
loop do
|
||||
loop_count += 1
|
||||
raise("loop_count: #{loop_count}") if loop_count > max_loop_count
|
||||
|
||||
@@ -31,12 +29,7 @@ module Domain::Inkbunny::Job
|
||||
page: page,
|
||||
)
|
||||
|
||||
response =
|
||||
http_client.post(
|
||||
url,
|
||||
caused_by_entry: first_log_entry || caused_by_entry,
|
||||
)
|
||||
first_log_entry ||= response.log_entry
|
||||
response = http_client.post(url)
|
||||
if response.status_code != 200
|
||||
fatal_error("api_search failed: #{response.status_code}")
|
||||
end
|
||||
@@ -61,22 +54,18 @@ module Domain::Inkbunny::Job
|
||||
logger.info("[no new posts, stopping]")
|
||||
break
|
||||
end
|
||||
rid = result[:rid] || raise("no rid")
|
||||
break if result[:num_pages] <= page
|
||||
rid = T.cast(result[:rid], String)
|
||||
break if T.cast(result[:num_pages], Integer) <= page
|
||||
page += 1
|
||||
end
|
||||
|
||||
logger.info("[total new posts: #{result[:num_total_new_posts]}]")
|
||||
user.scanned_gallery_at = Time.current
|
||||
user.save!
|
||||
|
||||
if processor.changed_posts.any?
|
||||
defer_job(
|
||||
Domain::Inkbunny::Job::UpdatePostsJob,
|
||||
{
|
||||
ib_post_ids: processor.changed_posts.map(&:ib_post_id),
|
||||
caused_by_entry: first_log_entry,
|
||||
},
|
||||
{ ib_post_ids: processor.changed_posts.map(&:ib_post_id) },
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
20
app/jobs/domain/sofurry/job/job_base.rb
Normal file
20
app/jobs/domain/sofurry/job/job_base.rb
Normal file
@@ -0,0 +1,20 @@
|
||||
# typed: strict
|
||||
class Domain::Sofurry::Job::JobBase < Scraper::JobBase
|
||||
queue_as :sofurry
|
||||
discard_on ActiveJob::DeserializationError
|
||||
|
||||
sig { override.returns(Symbol) }
|
||||
def self.http_factory_method
|
||||
:get_sofurry_http_client
|
||||
end
|
||||
|
||||
sig { returns(T.nilable(Domain::Sofurry::User)) }
|
||||
def user_from_args
|
||||
T.cast(arguments[0][:user], T.nilable(Domain::Sofurry::User))
|
||||
end
|
||||
|
||||
sig { returns(Domain::Sofurry::User) }
|
||||
def user_from_args!
|
||||
T.must(user_from_args)
|
||||
end
|
||||
end
|
||||
18
app/jobs/domain/sofurry/job/scan_user_job.rb
Normal file
18
app/jobs/domain/sofurry/job/scan_user_job.rb
Normal file
@@ -0,0 +1,18 @@
|
||||
# typed: strict
|
||||
class Domain::Sofurry::Job::ScanUserJob < Domain::Sofurry::Job::JobBase
|
||||
sig { override.params(args: T.untyped).void }
|
||||
def perform(args)
|
||||
user = user_from_args!
|
||||
unless user.due_for_gallery_scan?
|
||||
logger.warn(
|
||||
"Skipping gallery scan for #{user.url_name} because it's not due",
|
||||
)
|
||||
return
|
||||
end
|
||||
|
||||
logger.info("Scanning gallery for #{user.url_name}")
|
||||
page_url = "https://#{user.url_name}.sofurry.com"
|
||||
response = http_client.get(page_url)
|
||||
logger.info("response status: #{response.log_entry.status_code}")
|
||||
end
|
||||
end
|
||||
@@ -15,8 +15,7 @@ class Domain::Twitter::Job::MediaJob < Domain::Twitter::Job::TwitterJobBase
|
||||
return
|
||||
end
|
||||
|
||||
response =
|
||||
http_client.get(@media.url_str, caused_by_entry: @caused_by_entry)
|
||||
response = http_client.get(@media.url_str)
|
||||
|
||||
logger.debug "#{HexUtil.humansize(T.must(response.log_entry.response&.size))} / " +
|
||||
"#{response.log_entry.content_type} / " +
|
||||
|
||||
@@ -11,6 +11,63 @@ class Scraper::JobBase < ApplicationJob
|
||||
|
||||
DeferredJob = Struct.new(:job_class, :params, :set_args)
|
||||
|
||||
class WrappedHttpClient
|
||||
extend T::Sig
|
||||
|
||||
sig { params(job: Scraper::JobBase, http_client: Scraper::HttpClient).void }
|
||||
def initialize(job, http_client)
|
||||
@job = job
|
||||
@http_client = http_client
|
||||
end
|
||||
|
||||
sig do
|
||||
params(url: String, use_http_cache: T::Boolean).returns(
|
||||
Scraper::HttpClient::Response,
|
||||
)
|
||||
end
|
||||
def get(url, use_http_cache: false)
|
||||
around_request(
|
||||
proc do
|
||||
@http_client.get(
|
||||
url,
|
||||
caused_by_entry: @job.causing_log_entry,
|
||||
use_http_cache: use_http_cache,
|
||||
)
|
||||
end,
|
||||
)
|
||||
end
|
||||
|
||||
sig do
|
||||
params(url: String, use_http_cache: T::Boolean).returns(
|
||||
Scraper::HttpClient::Response,
|
||||
)
|
||||
end
|
||||
def post(url, use_http_cache: false)
|
||||
around_request(
|
||||
proc do
|
||||
@http_client.post(
|
||||
url,
|
||||
caused_by_entry: @job.causing_log_entry,
|
||||
use_http_cache: use_http_cache,
|
||||
)
|
||||
end,
|
||||
)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
sig do
|
||||
params(proc: T.proc.returns(Scraper::HttpClient::Response)).returns(
|
||||
Scraper::HttpClient::Response,
|
||||
)
|
||||
end
|
||||
def around_request(proc)
|
||||
response = proc.call
|
||||
@job.first_log_entry ||= response.log_entry
|
||||
response
|
||||
end
|
||||
end
|
||||
|
||||
sig { params(args: T.untyped).void }
|
||||
def initialize(*args)
|
||||
super(*T.unsafe(args))
|
||||
@@ -24,9 +81,10 @@ class Scraper::JobBase < ApplicationJob
|
||||
def self.http_factory_method
|
||||
end
|
||||
|
||||
sig { returns(Scraper::HttpClient) }
|
||||
sig { returns(WrappedHttpClient) }
|
||||
def http_client
|
||||
@http_client ||= Scraper::ClientFactory.send(self.class.http_factory_method)
|
||||
WrappedHttpClient.new(self, @http_client)
|
||||
end
|
||||
|
||||
sig { returns(Scraper::GalleryDlClient) }
|
||||
|
||||
@@ -1,9 +1,16 @@
|
||||
# typed: false
|
||||
module Domain::E621::TagUtil
|
||||
# typed: strict
|
||||
class Domain::E621::TagUtil
|
||||
extend T::Sig
|
||||
include HasColorLogger
|
||||
|
||||
TagAndCategory = Struct.new(:name, :category)
|
||||
|
||||
sig do
|
||||
params(
|
||||
post_json: T::Hash[String, T.untyped],
|
||||
caused_by_entry: T.nilable(ReduxApplicationRecord),
|
||||
).returns(Domain::E621::Post)
|
||||
end
|
||||
def self.initialize_or_update_post(post_json:, caused_by_entry: nil)
|
||||
# create all posts that don't already exist
|
||||
e621_id = post_json["id"]
|
||||
|
||||
42
app/lib/domain/e621/task/collect_post_favs_task.rb
Normal file
42
app/lib/domain/e621/task/collect_post_favs_task.rb
Normal file
@@ -0,0 +1,42 @@
|
||||
# typed: strict
|
||||
class Domain::E621::Task::CollectPostFavsTask
|
||||
extend T::Sig
|
||||
include HasColorLogger
|
||||
|
||||
sig { params(max_page: Integer, query: String).void }
|
||||
def run(max_page:, query:)
|
||||
page = 1
|
||||
while page <= max_page
|
||||
url =
|
||||
"https://e621.net/posts.json?tags=#{query.gsub(" ", "+")}&limit=320&page=#{page}"
|
||||
logger.info "GET page=#{page}, max=#{max_page}, url=#{url}"
|
||||
page += 1
|
||||
post_jsons =
|
||||
T.cast(
|
||||
HTTP
|
||||
.headers(
|
||||
"User-Agent" => "FurryArchiver",
|
||||
"Accept" => "application/json",
|
||||
)
|
||||
.get(url)
|
||||
.parse[
|
||||
"posts"
|
||||
],
|
||||
T::Array[T::Hash[String, T.untyped]],
|
||||
)
|
||||
|
||||
post_jsons.each do |post_json|
|
||||
post =
|
||||
Domain::E621::TagUtil.initialize_or_update_post(post_json: post_json)
|
||||
if post.nil?
|
||||
logger.warn "post not found: #{post_json["id"]}"
|
||||
next
|
||||
end
|
||||
post.save! if post.changed?
|
||||
raise if post.new_record?
|
||||
next if post.scanned_post_favs_at
|
||||
Domain::E621::Job::ScanPostFavsJob.perform_now(post: post)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -40,6 +40,14 @@ class Scraper::ClientFactory
|
||||
end
|
||||
end
|
||||
|
||||
def self.get_sofurry_http_client
|
||||
if Rails.env.test?
|
||||
@http_client_mock || raise("no http client mock set")
|
||||
else
|
||||
_http_client_impl(:sofurry, Scraper::SofurryHttpClientConfig)
|
||||
end
|
||||
end
|
||||
|
||||
def self.get_e621_http_client
|
||||
if Rails.env.test?
|
||||
@http_client_mock || raise("no http client mock set")
|
||||
|
||||
@@ -230,8 +230,11 @@ class Scraper::HttpClient
|
||||
total_time_ms: total_time_ms,
|
||||
)
|
||||
|
||||
if response_code == 524 || response_code == 502 || response_code == 503 ||
|
||||
response_code == 403
|
||||
if response_code == 403 &&
|
||||
response_body.include?("This users favorites are hidden")
|
||||
logger.warn("403 - favorites are hidden")
|
||||
elsif response_code == 524 || response_code == 502 ||
|
||||
response_code == 503 || response_code == 403
|
||||
logger.error("HTTP #{response_code} - sleeping to not throttle server")
|
||||
sleep 15
|
||||
end
|
||||
|
||||
@@ -6,7 +6,7 @@ class Scraper::HttpClientConfig
|
||||
|
||||
include HasColorLogger
|
||||
|
||||
sig { abstract.returns(T.nilable(T::Array[T.untyped])) }
|
||||
sig { abstract.returns(T.nilable(T::Array[T::Hash[Symbol, T.untyped]])) }
|
||||
def cookies
|
||||
end
|
||||
|
||||
|
||||
34
app/lib/scraper/sofurry_http_client_config.rb
Normal file
34
app/lib/scraper/sofurry_http_client_config.rb
Normal file
@@ -0,0 +1,34 @@
|
||||
# typed: strict
|
||||
class Scraper::SofurryHttpClientConfig < Scraper::HttpClientConfig
|
||||
sig { override.returns(T.nilable(T::Array[T.untyped])) }
|
||||
def cookies
|
||||
php_cookie = GlobalState.get("sofurry-cookie-php")
|
||||
raise "PHPSESSID cookie is not set" if php_cookie.nil? || php_cookie.empty?
|
||||
|
||||
[
|
||||
{
|
||||
domain: ".sofurry.com",
|
||||
cookies: [{ name: "PHPSESSID", value: php_cookie, path: "/" }],
|
||||
},
|
||||
]
|
||||
end
|
||||
|
||||
sig { override.returns(T::Array[[String, Numeric]]) }
|
||||
def ratelimit
|
||||
[
|
||||
["sofurry.com", 0.25],
|
||||
["*.sofurry.com", 0.25],
|
||||
["*.sofurryfiles.com", 0.25],
|
||||
]
|
||||
end
|
||||
|
||||
sig { override.returns(T::Array[String]) }
|
||||
def allowed_domains
|
||||
%w[sofurry.com *.sofurry.com *.sofurryfiles.com]
|
||||
end
|
||||
|
||||
sig { override.returns(Integer) }
|
||||
def redirect_limit
|
||||
3
|
||||
end
|
||||
end
|
||||
@@ -2,7 +2,7 @@
|
||||
class Scraper::TwitterHttpClientConfig < Scraper::HttpClientConfig
|
||||
DEFAULT_ALLOWED_DOMAINS = %w[*.twimg.com ipinfo.io]
|
||||
|
||||
sig { override.returns(T::Array[String]) }
|
||||
sig { override.returns(T.nilable(T::Array[T::Hash[Symbol, T.untyped]])) }
|
||||
def cookies
|
||||
[]
|
||||
end
|
||||
|
||||
6
app/models/domain/e621/fav.rb
Normal file
6
app/models/domain/e621/fav.rb
Normal file
@@ -0,0 +1,6 @@
|
||||
# typed: strict
|
||||
class Domain::E621::Fav < ReduxApplicationRecord
|
||||
self.table_name = "domain_e621_favs"
|
||||
belongs_to :user, class_name: "Domain::E621::User", inverse_of: :favs
|
||||
belongs_to :post, class_name: "Domain::E621::Post", inverse_of: :favs
|
||||
end
|
||||
@@ -1,5 +1,6 @@
|
||||
# typed: strict
|
||||
class Domain::E621::Post < ReduxApplicationRecord
|
||||
include AttrJson::Record
|
||||
self.table_name = "domain_e621_posts"
|
||||
|
||||
include HasIndexedPost
|
||||
@@ -36,6 +37,17 @@ class Domain::E621::Post < ReduxApplicationRecord
|
||||
foreign_key: :e621_id,
|
||||
optional: true
|
||||
|
||||
has_many :favs, class_name: "Domain::E621::Fav", inverse_of: :post
|
||||
has_many :faving_users,
|
||||
class_name: "Domain::E621::User",
|
||||
through: :favs,
|
||||
source: :user
|
||||
|
||||
# When was the post's /posts/<post_id>/favorites pages scanned?
|
||||
# Used to identify users with a significant number of favorites, setting
|
||||
# their `num_other_favs_cached` attribute
|
||||
attr_json :scanned_post_favs_at, :datetime
|
||||
|
||||
sig { returns(String) }
|
||||
def to_param
|
||||
self.e621_id.to_s
|
||||
|
||||
29
app/models/domain/e621/user.rb
Normal file
29
app/models/domain/e621/user.rb
Normal file
@@ -0,0 +1,29 @@
|
||||
# typed: strict
|
||||
class Domain::E621::User < ReduxApplicationRecord
|
||||
self.table_name = "domain_e621_users"
|
||||
include AttrJson::Record
|
||||
include AttrJson::Record::QueryScopes
|
||||
|
||||
json_attributes_scope :scanned_favs_at
|
||||
|
||||
validates_inclusion_of :scanned_favs_status,
|
||||
in: %w[ok error],
|
||||
if: :scanned_favs_status?
|
||||
|
||||
has_many :favs, class_name: "Domain::E621::Fav", inverse_of: :user
|
||||
attr_json :favs_are_hidden, :boolean
|
||||
|
||||
# number of favorites that the user has, derived from scraped html
|
||||
# on /posts/<post_id>/favorites?page=<n>
|
||||
# Used to find users with a significant number of favorites
|
||||
attr_json :num_other_favs_cached, :integer
|
||||
attr_json :scanned_favs_status, :string
|
||||
attr_json :scanned_favs_at, :datetime
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def url_name
|
||||
if name = self.name
|
||||
name.gsub(" ", "_")
|
||||
end
|
||||
end
|
||||
end
|
||||
8
app/models/domain/sofurry/fav.rb
Normal file
8
app/models/domain/sofurry/fav.rb
Normal file
@@ -0,0 +1,8 @@
|
||||
# typed: strict
|
||||
class Domain::Sofurry::Fav < ReduxApplicationRecord
|
||||
self.table_name = "domain_sofurry_favs"
|
||||
self.primary_key = %i[post_id user_id]
|
||||
|
||||
belongs_to :post, class_name: "Domain::Sofurry::Post"
|
||||
belongs_to :user, class_name: "Domain::Sofurry::User"
|
||||
end
|
||||
5
app/models/domain/sofurry/file.rb
Normal file
5
app/models/domain/sofurry/file.rb
Normal file
@@ -0,0 +1,5 @@
|
||||
# typed: strict
|
||||
class Domain::Sofurry::File < ReduxApplicationRecord
|
||||
self.table_name = "domain_sofurry_files"
|
||||
belongs_to :post, class_name: "Domain::Sofurry::Post", inverse_of: :file
|
||||
end
|
||||
31
app/models/domain/sofurry/post.rb
Normal file
31
app/models/domain/sofurry/post.rb
Normal file
@@ -0,0 +1,31 @@
|
||||
# typed: strict
|
||||
class Domain::Sofurry::Post < ReduxApplicationRecord
|
||||
self.table_name = "domain_sofurry_posts"
|
||||
include HasIndexedPost
|
||||
include AttrJson::Record
|
||||
|
||||
belongs_to :creator, class_name: "Domain::Sofurry::User", inverse_of: :posts
|
||||
has_one :file, class_name: "Domain::Sofurry::File", inverse_of: :post
|
||||
|
||||
validates :sofurry_id, presence: true, uniqueness: true
|
||||
validates :title, presence: true
|
||||
validates :description, presence: true
|
||||
validates :content_level,
|
||||
presence: true,
|
||||
inclusion: {
|
||||
in: %w[clean adult extreme],
|
||||
}
|
||||
# when saving from json, 0=story, 1=art, 2=music, 3=journal, 4=photo
|
||||
validates :post_type,
|
||||
presence: true,
|
||||
inclusion: {
|
||||
in: %w[story art music journal photo],
|
||||
}
|
||||
|
||||
attr_json :posted_at, :datetime
|
||||
attr_json :content_level, :string
|
||||
attr_json :title, :string
|
||||
attr_json :tags_array, :string, array: true
|
||||
attr_json :description, :string
|
||||
attr_json :post_type, :string
|
||||
end
|
||||
31
app/models/domain/sofurry/user.rb
Normal file
31
app/models/domain/sofurry/user.rb
Normal file
@@ -0,0 +1,31 @@
|
||||
# typed: strict
|
||||
class Domain::Sofurry::User < ReduxApplicationRecord
|
||||
include AttrJson::Record
|
||||
self.table_name = "domain_sofurry_users"
|
||||
validates :sofurry_id, presence: true, uniqueness: true
|
||||
validates :url_name, presence: true, uniqueness: true
|
||||
|
||||
belongs_to :avatar_file, class_name: "::BlobEntry"
|
||||
belongs_to :avatar_log_entry, class_name: "::HttpLogEntry"
|
||||
has_many :posts, class_name: "Domain::Sofurry::Post", inverse_of: :creator
|
||||
|
||||
attr_json :submission_count_cached, :integer
|
||||
attr_json :registered_at, :datetime # or Jan 1, 1970 if "long long ago"
|
||||
attr_json :profile_description, :string
|
||||
attr_json :gallery_folder_ids, :integer, array: true
|
||||
|
||||
attr_json :scanned_gallery_at, :datetime
|
||||
attr_json :scanned_profile_at, :datetime
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def due_for_gallery_scan?
|
||||
sga = scanned_gallery_at
|
||||
sga.nil? || sga < 1.month.ago
|
||||
end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def due_for_profile_scan?
|
||||
sga = scanned_profile_at
|
||||
sga.nil? || sga < 1.month.ago
|
||||
end
|
||||
end
|
||||
@@ -11,6 +11,20 @@ class ReduxApplicationRecord < ActiveRecord::Base
|
||||
after_update { observe(:update) }
|
||||
after_destroy { observe(:destroy) }
|
||||
|
||||
sig { params(attr_name: Symbol).void }
|
||||
def self.json_attributes_scope(attr_name)
|
||||
scope :"where_#{attr_name}",
|
||||
->(value) do
|
||||
if value.nil? || value == :null
|
||||
where("json_attributes->>'#{attr_name}' IS NULL")
|
||||
elsif value == :not_null
|
||||
where("json_attributes->>'#{attr_name}' IS NOT NULL")
|
||||
else
|
||||
where("json_attributes->>'#{attr_name}' = ?", value)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
ACTIVE_RECORD_COUNTER =
|
||||
|
||||
@@ -26,6 +26,8 @@
|
||||
<%= render "good_job/arguments/domain_inkbunny_file", file: value %>
|
||||
<% when Domain::E621::Post %>
|
||||
<%= render "good_job/arguments/domain_e621_post", post: value %>
|
||||
<% when Domain::E621::User %>
|
||||
<%= render "good_job/arguments/domain_e621_user", user: value %>
|
||||
<% else %>
|
||||
<div class="text-truncate">
|
||||
<code class="small" title="<%= value.inspect %>"
|
||||
|
||||
49
app/views/good_job/arguments/_domain_e621_user.html.erb
Normal file
49
app/views/good_job/arguments/_domain_e621_user.html.erb
Normal file
@@ -0,0 +1,49 @@
|
||||
<div class="d-flex align-items-center gap-2">
|
||||
<%= link_to "https://e621.net/users/#{user.e621_user_id}",
|
||||
class: "badge bg-primary",
|
||||
target: "_blank" do %>
|
||||
<i class="fa-solid fa-paw me-1"></i>Domain::E621::User #<%= user.id %>
|
||||
<% end %>
|
||||
|
||||
<div class="d-flex align-items-center ms-auto gap-2">
|
||||
<% if user.scanned_favs_status.present? %>
|
||||
<span
|
||||
class="badge <%= user.scanned_favs_status == "ok" ? "bg-success" : "bg-warning text-dark" %>"
|
||||
>
|
||||
<i
|
||||
class="<%= if user.scanned_favs_status == "ok"
|
||||
"fa-solid fa-check"
|
||||
else
|
||||
"fa-solid fa-exclamation-triangle"
|
||||
end %> me-1"
|
||||
></i>
|
||||
<%= user.scanned_favs_status %>
|
||||
</span>
|
||||
<% end %>
|
||||
|
||||
<span class="badge bg-secondary">
|
||||
<i class="fa-solid fa-user me-1"></i><%= user.name %>
|
||||
</span>
|
||||
|
||||
<% if user.num_other_favs_cached.present? %>
|
||||
<span class="badge bg-info text-dark">
|
||||
<i class="fa-solid fa-heart me-1"></i><%= user.num_other_favs_cached %>
|
||||
favs
|
||||
</span>
|
||||
<% end %>
|
||||
<% if user.scanned_favs_at.present? %>
|
||||
<span
|
||||
class="badge bg-light text-dark"
|
||||
title="<%= time_ago_in_words(user.scanned_favs_at) %> ago"
|
||||
>
|
||||
<i class="fa-regular fa-clock me-1"></i
|
||||
><%= user.scanned_favs_at.strftime("%Y-%m-%d %H:%M:%S") %>
|
||||
</span>
|
||||
<% end %>
|
||||
<% if user.favs_are_hidden %>
|
||||
<span class="badge bg-danger">
|
||||
<i class="fa-solid fa-eye-slash me-1"></i>Hidden
|
||||
</span>
|
||||
<% end %>
|
||||
</div>
|
||||
</div>
|
||||
@@ -17,6 +17,8 @@ module ReduxScraper
|
||||
class Application < Rails::Application
|
||||
config.session_store :cookie_store, key: "_refurrer_session"
|
||||
config.assets.precompile << "good_job_custom.css"
|
||||
config.active_record.schema_format = :sql
|
||||
|
||||
# Initialize configuration defaults for originally generated Rails version.
|
||||
config.load_defaults 7.0
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ Rails.application.configure do
|
||||
|
||||
config.cache_store = :memory_store
|
||||
config.public_file_server.headers = {
|
||||
"Cache-Control" => "public, max-age=#{2.days.to_i}"
|
||||
"Cache-Control" => "public, max-age=#{2.days.to_i}",
|
||||
}
|
||||
else
|
||||
config.action_controller.perform_caching = false
|
||||
@@ -69,4 +69,7 @@ Rails.application.configure do
|
||||
|
||||
# Uncomment if you wish to allow Action Cable access from any origin.
|
||||
config.action_cable.disable_request_forgery_protection = true
|
||||
|
||||
# Do not dump schema after migrations.
|
||||
config.active_record.dump_schema_after_migration = false
|
||||
end
|
||||
|
||||
@@ -23,6 +23,51 @@ Rails.application.configure do
|
||||
else
|
||||
config.good_job.execution_mode = :external
|
||||
end
|
||||
|
||||
config.good_job.enable_cron = true
|
||||
config.good_job.cron_graceful_restart_period = 2.minutes
|
||||
config.good_job.cron = {
|
||||
inkbunny_latest_posts: { # each recurring job must have a unique key
|
||||
cron: "*/2 * * * *",
|
||||
class: "Domain::Inkbunny::Job::LatestPostsJob",
|
||||
args: [{}],
|
||||
set: {
|
||||
queue: "manual",
|
||||
priority: -20,
|
||||
},
|
||||
description: "Inkbunny, enqueue latest posts",
|
||||
},
|
||||
fa_browse_page_job: {
|
||||
cron: "*/1 * * * *",
|
||||
class: "Domain::Fa::Job::BrowsePageJob",
|
||||
args: [{}],
|
||||
set: {
|
||||
queue: "manual",
|
||||
priority: -20,
|
||||
},
|
||||
description: "FurAffinity, scan browse page",
|
||||
},
|
||||
fa_home_page_job: {
|
||||
cron: "*/1 * * * *",
|
||||
class: "Domain::Fa::Job::HomePageJob",
|
||||
args: [{}],
|
||||
set: {
|
||||
queue: "manual",
|
||||
priority: -20,
|
||||
},
|
||||
description: "FurAffinity, scan home page",
|
||||
},
|
||||
e621_posts_index_job: {
|
||||
cron: "*/1 * * * *",
|
||||
class: "Domain::E621::Job::PostsIndexJob",
|
||||
args: [{}],
|
||||
set: {
|
||||
queue: "manual",
|
||||
priority: -20,
|
||||
},
|
||||
description: "e621, index posts",
|
||||
},
|
||||
}
|
||||
end
|
||||
|
||||
ActiveSupport.on_load(:good_job_application_controller) do
|
||||
|
||||
5
db/migrate/20250127165909_drop_delayed_jobs.rb
Normal file
5
db/migrate/20250127165909_drop_delayed_jobs.rb
Normal file
@@ -0,0 +1,5 @@
|
||||
class DropDelayedJobs < ActiveRecord::Migration[7.2]
|
||||
def change
|
||||
drop_table :delayed_jobs
|
||||
end
|
||||
end
|
||||
31
db/migrate/20250127184150_create_domain_e621_users.rb
Normal file
31
db/migrate/20250127184150_create_domain_e621_users.rb
Normal file
@@ -0,0 +1,31 @@
|
||||
class CreateDomainE621Users < ActiveRecord::Migration[7.2]
|
||||
def change
|
||||
up_only { execute <<~SQL }
|
||||
SET DEFAULT_TABLESPACE = mirai;
|
||||
SQL
|
||||
|
||||
create_table :domain_e621_users do |t|
|
||||
t.integer :e621_user_id, null: false, index: { unique: true }
|
||||
t.datetime :scanned_favs_at
|
||||
t.string :name, null: false
|
||||
t.timestamps
|
||||
end
|
||||
|
||||
create_table :domain_e621_favs do |t|
|
||||
t.references :user,
|
||||
null: false,
|
||||
index: false,
|
||||
foreign_key: {
|
||||
to_table: :domain_e621_users,
|
||||
}
|
||||
t.references :post,
|
||||
null: false,
|
||||
foreign_key: {
|
||||
to_table: :domain_e621_posts,
|
||||
}
|
||||
|
||||
t.index %i[user_id post_id], unique: true
|
||||
t.timestamps
|
||||
end
|
||||
end
|
||||
end
|
||||
6
db/migrate/20250128041904_add_data_to_e621_users.rb
Normal file
6
db/migrate/20250128041904_add_data_to_e621_users.rb
Normal file
@@ -0,0 +1,6 @@
|
||||
class AddDataToE621Users < ActiveRecord::Migration[7.2]
|
||||
def change
|
||||
add_column :domain_e621_users, :json_attributes, :jsonb, default: {}
|
||||
add_index :domain_e621_users, :json_attributes, using: :gin
|
||||
end
|
||||
end
|
||||
6
db/migrate/20250128142856_add_data_to_e621_posts.rb
Normal file
6
db/migrate/20250128142856_add_data_to_e621_posts.rb
Normal file
@@ -0,0 +1,6 @@
|
||||
class AddDataToE621Posts < ActiveRecord::Migration[7.2]
|
||||
def change
|
||||
add_column :domain_e621_posts, :json_attributes, :jsonb, default: {}
|
||||
add_index :domain_e621_posts, :json_attributes, using: :gin
|
||||
end
|
||||
end
|
||||
59
db/migrate/20250128235159_create_sofurry_tables.rb
Normal file
59
db/migrate/20250128235159_create_sofurry_tables.rb
Normal file
@@ -0,0 +1,59 @@
|
||||
class CreateSofurryTables < ActiveRecord::Migration[7.2]
|
||||
def change
|
||||
up_only { execute <<~SQL }
|
||||
SET DEFAULT_TABLESPACE = mirai;
|
||||
SQL
|
||||
|
||||
create_table :domain_sofurry_users do |t|
|
||||
t.jsonb :json_attributes, default: {}
|
||||
t.string :sofurry_id, null: false, index: { unique: true }
|
||||
t.string :url_name, null: false, index: { unique: true }
|
||||
t.string :avatar_url_str
|
||||
t.binary :avatar_file_sha256
|
||||
t.references :avatar_log_entry,
|
||||
null: true,
|
||||
foreign_key: {
|
||||
to_table: :http_log_entries,
|
||||
}
|
||||
t.timestamps
|
||||
end
|
||||
|
||||
create_table :domain_sofurry_posts do |t|
|
||||
t.jsonb :json_attributes, default: {}
|
||||
t.references :sofurry_creator,
|
||||
null: false,
|
||||
foreign_key: {
|
||||
to_table: :domain_sofurry_users,
|
||||
}
|
||||
t.string :sofurry_id, null: false, index: { unique: true }
|
||||
t.timestamps
|
||||
end
|
||||
|
||||
create_table :domain_sofurry_files do |t|
|
||||
t.jsonb :json_attributes, default: {}
|
||||
t.string :sofurry_id, null: false, index: { unique: true }
|
||||
t.references :post,
|
||||
null: false,
|
||||
foreign_key: {
|
||||
to_table: :domain_sofurry_posts,
|
||||
}
|
||||
t.timestamps
|
||||
end
|
||||
|
||||
create_table :domain_sofurry_favs, id: false do |t|
|
||||
t.references :post,
|
||||
null: false,
|
||||
index: false,
|
||||
foreign_key: {
|
||||
to_table: :domain_sofurry_posts,
|
||||
}
|
||||
t.references :user,
|
||||
null: false,
|
||||
foreign_key: {
|
||||
to_table: :domain_sofurry_users,
|
||||
}
|
||||
t.index %i[post_id user_id], unique: true
|
||||
t.timestamps
|
||||
end
|
||||
end
|
||||
end
|
||||
7
db/migrate/20250129000149_add_sofurry_postable_type.rb
Normal file
7
db/migrate/20250129000149_add_sofurry_postable_type.rb
Normal file
@@ -0,0 +1,7 @@
|
||||
class AddSofurryPostableType < ActiveRecord::Migration[7.2]
|
||||
def change
|
||||
up_only { execute <<~SQL }
|
||||
ALTER TYPE postable_type ADD VALUE IF NOT EXISTS 'Domain::Sofurry::Post';
|
||||
SQL
|
||||
end
|
||||
end
|
||||
@@ -0,0 +1,6 @@
|
||||
class RelaxE621PostsMd5UniqueConstraint < ActiveRecord::Migration[7.2]
|
||||
def change
|
||||
remove_index :domain_e621_posts, :md5, unique: true
|
||||
add_index :domain_e621_posts, :md5, unique: false
|
||||
end
|
||||
end
|
||||
@@ -0,0 +1,26 @@
|
||||
# typed: strict
|
||||
class MigrateE621ScannedFavsAtToJsonColumn < ActiveRecord::Migration[7.2]
|
||||
extend T::Sig
|
||||
|
||||
sig { void }
|
||||
def change
|
||||
reversible do |dir|
|
||||
dir.up do
|
||||
execute <<~SQL
|
||||
UPDATE domain_e621_users
|
||||
SET json_attributes = jsonb_set(json_attributes, '{scanned_favs_at}', to_json(scanned_favs_at)::jsonb)
|
||||
WHERE scanned_favs_at IS NOT NULL
|
||||
SQL
|
||||
remove_column :domain_e621_users, :scanned_favs_at
|
||||
end
|
||||
dir.down do
|
||||
add_column :domain_e621_users, :scanned_favs_at, :datetime
|
||||
execute <<~SQL
|
||||
UPDATE domain_e621_users
|
||||
SET scanned_favs_at = (json_attributes->>'scanned_favs_at')::text::timestamp
|
||||
WHERE json_attributes->>'scanned_favs_at' IS NOT NULL
|
||||
SQL
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
1866
db/schema.rb
generated
1866
db/schema.rb
generated
File diff suppressed because it is too large
Load Diff
8186
db/structure.sql
Normal file
8186
db/structure.sql
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,19 +1,53 @@
|
||||
namespace :e621 do
|
||||
desc "import e621 data from csv"
|
||||
task import_csv: :environment do |t, args|
|
||||
start_at = ENV["start_at"]&.to_i
|
||||
limit = ENV["limit"]&.to_i
|
||||
csv_path = ENV["csv"] || raise("must supply `csv`")
|
||||
|
||||
Domain::E621::CsvPostImporter.new(
|
||||
csv_path: csv_path,
|
||||
start_at: start_at,
|
||||
limit: limit,
|
||||
).run
|
||||
end
|
||||
|
||||
desc "run a single e621 posts index job"
|
||||
task posts_index_job: :environment do
|
||||
Domain::E621::Job::PostsIndexJob.set(priority: -10).perform_later({})
|
||||
end
|
||||
|
||||
desc "scan e621 user favs"
|
||||
task scan_user_favs: :environment do
|
||||
while user =
|
||||
Domain::E621::User
|
||||
.where(scanned_favs_at: nil)
|
||||
.order("RANDOM()")
|
||||
.take
|
||||
Domain::E621::Job::ScanUserFavsJob.perform_now(user: user)
|
||||
end
|
||||
end
|
||||
|
||||
desc "scan e621 user favs, descending by num_other_favs_cached"
|
||||
task scan_user_favs_descending: :environment do
|
||||
# total number of favs
|
||||
# ReduxApplicationRecord.connection.execute(
|
||||
# "SELECT SUM((json_attributes->>'num_other_favs_cached')::text::int) as total_favs
|
||||
# FROM domain_e621_users WHERE json_attributes->>'num_other_favs_cached' IS NOT NULL",
|
||||
# ).first["total_favs"]
|
||||
#
|
||||
#
|
||||
user_query =
|
||||
lambda do
|
||||
Domain::E621::User
|
||||
.where_scanned_favs_at(:null)
|
||||
.where("json_attributes->>'num_other_favs_cached' is not null")
|
||||
.order(Arel.sql "json_attributes->>'num_other_favs_cached' DESC")
|
||||
.first
|
||||
end
|
||||
|
||||
while user = user_query.call
|
||||
Domain::E621::Job::ScanUserFavsJob.perform_now(user: user)
|
||||
end
|
||||
end
|
||||
|
||||
desc "Gather cached user fav counts based on post fav lists"
|
||||
task collect_post_favs: :environment do
|
||||
max_page = ENV["MAX_PAGE"] || 1
|
||||
default_query = "status:any order:favcount"
|
||||
query = nil
|
||||
while query.blank?
|
||||
print "query (#{default_query})> "
|
||||
query = $stdin.gets&.chomp || default_query
|
||||
end
|
||||
|
||||
Domain::E621::Task::CollectPostFavsTask.new.run(max_page:, query:)
|
||||
end
|
||||
end
|
||||
|
||||
3
sorbet/rbi/dsl/application_controller.rbi
generated
3
sorbet/rbi/dsl/application_controller.rbi
generated
@@ -17,6 +17,9 @@ class ApplicationController
|
||||
include ::Turbo::FramesHelper
|
||||
include ::Turbo::IncludesHelper
|
||||
include ::Turbo::StreamsHelper
|
||||
include ::ActionView::Helpers::DateHelper
|
||||
include ::ActionView::Helpers::SanitizeHelper
|
||||
include ::ActionView::Helpers::RenderingHelper
|
||||
include ::ActionView::Helpers::CaptureHelper
|
||||
include ::ActionView::Helpers::OutputSafetyHelper
|
||||
include ::ActionView::Helpers::TagHelper
|
||||
|
||||
37
sorbet/rbi/dsl/attr_json/model.rbi
generated
Normal file
37
sorbet/rbi/dsl/attr_json/model.rbi
generated
Normal file
@@ -0,0 +1,37 @@
|
||||
# typed: true
|
||||
|
||||
# DO NOT EDIT MANUALLY
|
||||
# This is an autogenerated file for dynamic methods in `AttrJson::Model`.
|
||||
# Please instead update this file by running `bin/tapioca dsl AttrJson::Model`.
|
||||
|
||||
|
||||
module AttrJson::Model
|
||||
include GeneratedInstanceMethods
|
||||
|
||||
mixes_in_class_methods ::ActiveModel::Validations::ClassMethods
|
||||
mixes_in_class_methods ::ActiveModel::Conversion::ClassMethods
|
||||
mixes_in_class_methods GeneratedClassMethods
|
||||
|
||||
module GeneratedClassMethods
|
||||
def __callbacks; end
|
||||
def __callbacks=(value); end
|
||||
def __callbacks?; end
|
||||
def _validators; end
|
||||
def _validators=(value); end
|
||||
def _validators?; end
|
||||
def attr_json_registry; end
|
||||
def attr_json_registry=(value); end
|
||||
def attr_json_registry?; end
|
||||
def param_delimiter; end
|
||||
def param_delimiter=(value); end
|
||||
def param_delimiter?; end
|
||||
end
|
||||
|
||||
module GeneratedInstanceMethods
|
||||
def __callbacks; end
|
||||
def __callbacks?; end
|
||||
def _validators; end
|
||||
def _validators?; end
|
||||
def param_delimiter=(value); end
|
||||
end
|
||||
end
|
||||
3
sorbet/rbi/dsl/devise_controller.rbi
generated
3
sorbet/rbi/dsl/devise_controller.rbi
generated
@@ -14,6 +14,9 @@ class DeviseController
|
||||
include ::Turbo::FramesHelper
|
||||
include ::Turbo::IncludesHelper
|
||||
include ::Turbo::StreamsHelper
|
||||
include ::ActionView::Helpers::DateHelper
|
||||
include ::ActionView::Helpers::SanitizeHelper
|
||||
include ::ActionView::Helpers::RenderingHelper
|
||||
include ::ActionView::Helpers::CaptureHelper
|
||||
include ::ActionView::Helpers::OutputSafetyHelper
|
||||
include ::ActionView::Helpers::TagHelper
|
||||
|
||||
1282
sorbet/rbi/dsl/domain/e621/fav.rbi
generated
Normal file
1282
sorbet/rbi/dsl/domain/e621/fav.rbi
generated
Normal file
File diff suppressed because it is too large
Load Diff
27
sorbet/rbi/dsl/domain/e621/job/scan_post_favs_job.rbi
generated
Normal file
27
sorbet/rbi/dsl/domain/e621/job/scan_post_favs_job.rbi
generated
Normal file
@@ -0,0 +1,27 @@
|
||||
# typed: true
|
||||
|
||||
# DO NOT EDIT MANUALLY
|
||||
# This is an autogenerated file for dynamic methods in `Domain::E621::Job::ScanPostFavsJob`.
|
||||
# Please instead update this file by running `bin/tapioca dsl Domain::E621::Job::ScanPostFavsJob`.
|
||||
|
||||
|
||||
class Domain::E621::Job::ScanPostFavsJob
|
||||
sig { returns(ColorLogger) }
|
||||
def logger; end
|
||||
|
||||
class << self
|
||||
sig { returns(ColorLogger) }
|
||||
def logger; end
|
||||
|
||||
sig do
|
||||
params(
|
||||
args: T.untyped,
|
||||
block: T.nilable(T.proc.params(job: Domain::E621::Job::ScanPostFavsJob).void)
|
||||
).returns(T.any(Domain::E621::Job::ScanPostFavsJob, FalseClass))
|
||||
end
|
||||
def perform_later(args, &block); end
|
||||
|
||||
sig { params(args: T.untyped).void }
|
||||
def perform_now(args); end
|
||||
end
|
||||
end
|
||||
27
sorbet/rbi/dsl/domain/e621/job/scan_user_favs_job.rbi
generated
Normal file
27
sorbet/rbi/dsl/domain/e621/job/scan_user_favs_job.rbi
generated
Normal file
@@ -0,0 +1,27 @@
|
||||
# typed: true
|
||||
|
||||
# DO NOT EDIT MANUALLY
|
||||
# This is an autogenerated file for dynamic methods in `Domain::E621::Job::ScanUserFavsJob`.
|
||||
# Please instead update this file by running `bin/tapioca dsl Domain::E621::Job::ScanUserFavsJob`.
|
||||
|
||||
|
||||
class Domain::E621::Job::ScanUserFavsJob
|
||||
sig { returns(ColorLogger) }
|
||||
def logger; end
|
||||
|
||||
class << self
|
||||
sig { returns(ColorLogger) }
|
||||
def logger; end
|
||||
|
||||
sig do
|
||||
params(
|
||||
args: T.untyped,
|
||||
block: T.nilable(T.proc.params(job: Domain::E621::Job::ScanUserFavsJob).void)
|
||||
).returns(T.any(Domain::E621::Job::ScanUserFavsJob, FalseClass))
|
||||
end
|
||||
def perform_later(args, &block); end
|
||||
|
||||
sig { params(args: T.untyped).void }
|
||||
def perform_now(args); end
|
||||
end
|
||||
end
|
||||
27
sorbet/rbi/dsl/domain/e621/job/scan_users_job.rbi
generated
Normal file
27
sorbet/rbi/dsl/domain/e621/job/scan_users_job.rbi
generated
Normal file
@@ -0,0 +1,27 @@
|
||||
# typed: true
|
||||
|
||||
# DO NOT EDIT MANUALLY
|
||||
# This is an autogenerated file for dynamic methods in `Domain::E621::Job::ScanUsersJob`.
|
||||
# Please instead update this file by running `bin/tapioca dsl Domain::E621::Job::ScanUsersJob`.
|
||||
|
||||
|
||||
class Domain::E621::Job::ScanUsersJob
|
||||
sig { returns(ColorLogger) }
|
||||
def logger; end
|
||||
|
||||
class << self
|
||||
sig { returns(ColorLogger) }
|
||||
def logger; end
|
||||
|
||||
sig do
|
||||
params(
|
||||
args: T.untyped,
|
||||
block: T.nilable(T.proc.params(job: Domain::E621::Job::ScanUsersJob).void)
|
||||
).returns(T.any(Domain::E621::Job::ScanUsersJob, FalseClass))
|
||||
end
|
||||
def perform_later(args, &block); end
|
||||
|
||||
sig { params(args: T.untyped).void }
|
||||
def perform_now(args); end
|
||||
end
|
||||
end
|
||||
167
sorbet/rbi/dsl/domain/e621/post.rbi
generated
167
sorbet/rbi/dsl/domain/e621/post.rbi
generated
@@ -12,12 +12,27 @@ class Domain::E621::Post
|
||||
extend CommonRelationMethods
|
||||
extend GeneratedRelationMethods
|
||||
|
||||
sig { returns(T::Array[Symbol]) }
|
||||
def attr_json_registry; end
|
||||
|
||||
private
|
||||
|
||||
sig { returns(NilClass) }
|
||||
def to_ary; end
|
||||
|
||||
class << self
|
||||
sig { params(name: Symbol, type: Symbol, options: T.nilable(T::Hash[Symbol, T.untyped])).void }
|
||||
def attr_json(name, type, options = nil); end
|
||||
|
||||
sig do
|
||||
params(
|
||||
default_container_attribute: T.nilable(Symbol),
|
||||
bad_cast: T.nilable(Symbol),
|
||||
unknown_key: T.nilable(Symbol)
|
||||
).void
|
||||
end
|
||||
def attr_json_config(default_container_attribute: nil, bad_cast: nil, unknown_key: nil); end
|
||||
|
||||
sig do
|
||||
params(
|
||||
attributes: T.untyped,
|
||||
@@ -472,6 +487,34 @@ class Domain::E621::Post
|
||||
sig { params(args: T.untyped, blk: T.untyped).returns(::Domain::E621::Post) }
|
||||
def create_parent_e621!(*args, &blk); end
|
||||
|
||||
sig { returns(T::Array[T.untyped]) }
|
||||
def fav_ids; end
|
||||
|
||||
sig { params(ids: T::Array[T.untyped]).returns(T::Array[T.untyped]) }
|
||||
def fav_ids=(ids); end
|
||||
|
||||
sig { returns(T::Array[T.untyped]) }
|
||||
def faving_user_ids; end
|
||||
|
||||
sig { params(ids: T::Array[T.untyped]).returns(T::Array[T.untyped]) }
|
||||
def faving_user_ids=(ids); end
|
||||
|
||||
# This method is created by ActiveRecord on the `Domain::E621::Post` class because it declared `has_many :faving_users, through: :favs`.
|
||||
# 🔗 [Rails guide for `has_many_through` association](https://guides.rubyonrails.org/association_basics.html#the-has-many-through-association)
|
||||
sig { returns(::Domain::E621::User::PrivateCollectionProxy) }
|
||||
def faving_users; end
|
||||
|
||||
sig { params(value: T::Enumerable[::Domain::E621::User]).void }
|
||||
def faving_users=(value); end
|
||||
|
||||
# This method is created by ActiveRecord on the `Domain::E621::Post` class because it declared `has_many :favs`.
|
||||
# 🔗 [Rails guide for `has_many` association](https://guides.rubyonrails.org/association_basics.html#the-has-many-association)
|
||||
sig { returns(::Domain::E621::Fav::PrivateCollectionProxy) }
|
||||
def favs; end
|
||||
|
||||
sig { params(value: T::Enumerable[::Domain::E621::Fav]).void }
|
||||
def favs=(value); end
|
||||
|
||||
sig { returns(T.nilable(::HttpLogEntry)) }
|
||||
def file; end
|
||||
|
||||
@@ -1257,6 +1300,51 @@ class Domain::E621::Post
|
||||
sig { void }
|
||||
def id_will_change!; end
|
||||
|
||||
sig { returns(T.untyped) }
|
||||
def json_attributes; end
|
||||
|
||||
sig { params(value: T.untyped).returns(T.untyped) }
|
||||
def json_attributes=(value); end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def json_attributes?; end
|
||||
|
||||
sig { returns(T.untyped) }
|
||||
def json_attributes_before_last_save; end
|
||||
|
||||
sig { returns(T.untyped) }
|
||||
def json_attributes_before_type_cast; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def json_attributes_came_from_user?; end
|
||||
|
||||
sig { returns(T.nilable([T.untyped, T.untyped])) }
|
||||
def json_attributes_change; end
|
||||
|
||||
sig { returns(T.nilable([T.untyped, T.untyped])) }
|
||||
def json_attributes_change_to_be_saved; end
|
||||
|
||||
sig { params(from: T.untyped, to: T.untyped).returns(T::Boolean) }
|
||||
def json_attributes_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.untyped) }
|
||||
def json_attributes_in_database; end
|
||||
|
||||
sig { returns(T.nilable([T.untyped, T.untyped])) }
|
||||
def json_attributes_previous_change; end
|
||||
|
||||
sig { params(from: T.untyped, to: T.untyped).returns(T::Boolean) }
|
||||
def json_attributes_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.untyped) }
|
||||
def json_attributes_previously_was; end
|
||||
|
||||
sig { returns(T.untyped) }
|
||||
def json_attributes_was; end
|
||||
|
||||
sig { void }
|
||||
def json_attributes_will_change!; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def md5; end
|
||||
|
||||
@@ -1629,6 +1717,9 @@ class Domain::E621::Post
|
||||
sig { void }
|
||||
def restore_id_value!; end
|
||||
|
||||
sig { void }
|
||||
def restore_json_attributes!; end
|
||||
|
||||
sig { void }
|
||||
def restore_md5!; end
|
||||
|
||||
@@ -1650,6 +1741,9 @@ class Domain::E621::Post
|
||||
sig { void }
|
||||
def restore_rating!; end
|
||||
|
||||
sig { void }
|
||||
def restore_scanned_post_favs_at!; end
|
||||
|
||||
sig { void }
|
||||
def restore_score!; end
|
||||
|
||||
@@ -1740,6 +1834,12 @@ class Domain::E621::Post
|
||||
sig { returns(T::Boolean) }
|
||||
def saved_change_to_id_value?; end
|
||||
|
||||
sig { returns(T.nilable([T.untyped, T.untyped])) }
|
||||
def saved_change_to_json_attributes; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def saved_change_to_json_attributes?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def saved_change_to_md5; end
|
||||
|
||||
@@ -1782,6 +1882,12 @@ class Domain::E621::Post
|
||||
sig { returns(T::Boolean) }
|
||||
def saved_change_to_rating?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
|
||||
def saved_change_to_scanned_post_favs_at; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def saved_change_to_scanned_post_favs_at?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
|
||||
def saved_change_to_score; end
|
||||
|
||||
@@ -1830,6 +1936,61 @@ class Domain::E621::Post
|
||||
sig { returns(T::Boolean) }
|
||||
def saved_change_to_updated_at?; end
|
||||
|
||||
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
|
||||
def scanned_post_favs_at; end
|
||||
|
||||
sig { params(value: T.nilable(::ActiveSupport::TimeWithZone)).returns(T.nilable(::ActiveSupport::TimeWithZone)) }
|
||||
def scanned_post_favs_at=(value); end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def scanned_post_favs_at?; end
|
||||
|
||||
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
|
||||
def scanned_post_favs_at_before_last_save; end
|
||||
|
||||
sig { returns(T.untyped) }
|
||||
def scanned_post_favs_at_before_type_cast; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def scanned_post_favs_at_came_from_user?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
|
||||
def scanned_post_favs_at_change; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
|
||||
def scanned_post_favs_at_change_to_be_saved; end
|
||||
|
||||
sig do
|
||||
params(
|
||||
from: T.nilable(::ActiveSupport::TimeWithZone),
|
||||
to: T.nilable(::ActiveSupport::TimeWithZone)
|
||||
).returns(T::Boolean)
|
||||
end
|
||||
def scanned_post_favs_at_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
|
||||
def scanned_post_favs_at_in_database; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::ActiveSupport::TimeWithZone), T.nilable(::ActiveSupport::TimeWithZone)])) }
|
||||
def scanned_post_favs_at_previous_change; end
|
||||
|
||||
sig do
|
||||
params(
|
||||
from: T.nilable(::ActiveSupport::TimeWithZone),
|
||||
to: T.nilable(::ActiveSupport::TimeWithZone)
|
||||
).returns(T::Boolean)
|
||||
end
|
||||
def scanned_post_favs_at_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
|
||||
def scanned_post_favs_at_previously_was; end
|
||||
|
||||
sig { returns(T.nilable(::ActiveSupport::TimeWithZone)) }
|
||||
def scanned_post_favs_at_was; end
|
||||
|
||||
sig { void }
|
||||
def scanned_post_favs_at_will_change!; end
|
||||
|
||||
sig { returns(T.nilable(::Integer)) }
|
||||
def score; end
|
||||
|
||||
@@ -2247,6 +2408,9 @@ class Domain::E621::Post
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_id_value?; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_json_attributes?; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_md5?; end
|
||||
|
||||
@@ -2268,6 +2432,9 @@ class Domain::E621::Post
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_rating?; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_scanned_post_favs_at?; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_score?; end
|
||||
|
||||
|
||||
16
sorbet/rbi/dsl/domain/e621/tag_util.rbi
generated
Normal file
16
sorbet/rbi/dsl/domain/e621/tag_util.rbi
generated
Normal file
@@ -0,0 +1,16 @@
|
||||
# typed: true
|
||||
|
||||
# DO NOT EDIT MANUALLY
|
||||
# This is an autogenerated file for dynamic methods in `Domain::E621::TagUtil`.
|
||||
# Please instead update this file by running `bin/tapioca dsl Domain::E621::TagUtil`.
|
||||
|
||||
|
||||
class Domain::E621::TagUtil
|
||||
sig { returns(ColorLogger) }
|
||||
def logger; end
|
||||
|
||||
class << self
|
||||
sig { returns(ColorLogger) }
|
||||
def logger; end
|
||||
end
|
||||
end
|
||||
16
sorbet/rbi/dsl/domain/e621/task/collect_post_favs_task.rbi
generated
Normal file
16
sorbet/rbi/dsl/domain/e621/task/collect_post_favs_task.rbi
generated
Normal file
@@ -0,0 +1,16 @@
|
||||
# typed: true
|
||||
|
||||
# DO NOT EDIT MANUALLY
|
||||
# This is an autogenerated file for dynamic methods in `Domain::E621::Task::CollectPostFavsTask`.
|
||||
# Please instead update this file by running `bin/tapioca dsl Domain::E621::Task::CollectPostFavsTask`.
|
||||
|
||||
|
||||
class Domain::E621::Task::CollectPostFavsTask
|
||||
sig { returns(ColorLogger) }
|
||||
def logger; end
|
||||
|
||||
class << self
|
||||
sig { returns(ColorLogger) }
|
||||
def logger; end
|
||||
end
|
||||
end
|
||||
1586
sorbet/rbi/dsl/domain/e621/user.rbi
generated
Normal file
1586
sorbet/rbi/dsl/domain/e621/user.rbi
generated
Normal file
File diff suppressed because it is too large
Load Diff
2
sorbet/rbi/dsl/domain/inkbunny/job/file_job.rbi
generated
2
sorbet/rbi/dsl/domain/inkbunny/job/file_job.rbi
generated
@@ -21,7 +21,7 @@ class Domain::Inkbunny::Job::FileJob
|
||||
end
|
||||
def perform_later(args, &block); end
|
||||
|
||||
sig { params(args: T.untyped).returns(T.untyped) }
|
||||
sig { params(args: T.untyped).void }
|
||||
def perform_now(args); end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -21,7 +21,7 @@ class Domain::Inkbunny::Job::UserAvatarJob
|
||||
end
|
||||
def perform_later(args, &block); end
|
||||
|
||||
sig { params(args: T.untyped).returns(T.untyped) }
|
||||
sig { params(args: T.untyped).void }
|
||||
def perform_now(args); end
|
||||
end
|
||||
end
|
||||
|
||||
1222
sorbet/rbi/dsl/domain/sofurry/fav.rbi
generated
Normal file
1222
sorbet/rbi/dsl/domain/sofurry/fav.rbi
generated
Normal file
File diff suppressed because it is too large
Load Diff
1344
sorbet/rbi/dsl/domain/sofurry/file.rbi
generated
Normal file
1344
sorbet/rbi/dsl/domain/sofurry/file.rbi
generated
Normal file
File diff suppressed because it is too large
Load Diff
1363
sorbet/rbi/dsl/domain/sofurry/post.rbi
generated
Normal file
1363
sorbet/rbi/dsl/domain/sofurry/post.rbi
generated
Normal file
File diff suppressed because it is too large
Load Diff
1943
sorbet/rbi/dsl/domain/sofurry/user.rbi
generated
Normal file
1943
sorbet/rbi/dsl/domain/sofurry/user.rbi
generated
Normal file
File diff suppressed because it is too large
Load Diff
174
sorbet/rbi/dsl/flat_sst_entry.rbi
generated
174
sorbet/rbi/dsl/flat_sst_entry.rbi
generated
@@ -6,6 +6,7 @@
|
||||
|
||||
|
||||
class FlatSstEntry
|
||||
include GeneratedAttributeMethods
|
||||
extend CommonRelationMethods
|
||||
extend GeneratedRelationMethods
|
||||
|
||||
@@ -525,6 +526,179 @@ class FlatSstEntry
|
||||
def without(*args, &blk); end
|
||||
end
|
||||
|
||||
module GeneratedAttributeMethods
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def contents; end
|
||||
|
||||
sig { params(value: T.nilable(::String)).returns(T.nilable(::String)) }
|
||||
def contents=(value); end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def contents?; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def contents_before_last_save; end
|
||||
|
||||
sig { returns(T.untyped) }
|
||||
def contents_before_type_cast; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def contents_came_from_user?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def contents_change; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def contents_change_to_be_saved; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
def contents_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def contents_in_database; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def contents_previous_change; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
def contents_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def contents_previously_was; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def contents_was; end
|
||||
|
||||
sig { void }
|
||||
def contents_will_change!; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def id; end
|
||||
|
||||
sig { params(value: T.nilable(::String)).returns(T.nilable(::String)) }
|
||||
def id=(value); end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def id?; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def id_before_last_save; end
|
||||
|
||||
sig { returns(T.untyped) }
|
||||
def id_before_type_cast; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def id_came_from_user?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def id_change; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def id_change_to_be_saved; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
def id_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def id_in_database; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def id_previous_change; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
def id_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def id_previously_was; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def id_was; end
|
||||
|
||||
sig { void }
|
||||
def id_will_change!; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def key; end
|
||||
|
||||
sig { params(value: T.nilable(::String)).returns(T.nilable(::String)) }
|
||||
def key=(value); end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def key?; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def key_before_last_save; end
|
||||
|
||||
sig { returns(T.untyped) }
|
||||
def key_before_type_cast; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def key_came_from_user?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def key_change; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def key_change_to_be_saved; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
def key_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def key_in_database; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def key_previous_change; end
|
||||
|
||||
sig { params(from: T.nilable(::String), to: T.nilable(::String)).returns(T::Boolean) }
|
||||
def key_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def key_previously_was; end
|
||||
|
||||
sig { returns(T.nilable(::String)) }
|
||||
def key_was; end
|
||||
|
||||
sig { void }
|
||||
def key_will_change!; end
|
||||
|
||||
sig { void }
|
||||
def restore_contents!; end
|
||||
|
||||
sig { void }
|
||||
def restore_id!; end
|
||||
|
||||
sig { void }
|
||||
def restore_key!; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def saved_change_to_contents; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def saved_change_to_contents?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def saved_change_to_id; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def saved_change_to_id?; end
|
||||
|
||||
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
|
||||
def saved_change_to_key; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def saved_change_to_key?; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_contents?; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_id?; end
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def will_save_change_to_key?; end
|
||||
end
|
||||
|
||||
module GeneratedRelationMethods
|
||||
sig { returns(PrivateRelation) }
|
||||
def all; end
|
||||
|
||||
3
sorbet/rbi/dsl/rails/application_controller.rbi
generated
3
sorbet/rbi/dsl/rails/application_controller.rbi
generated
@@ -17,6 +17,9 @@ class Rails::ApplicationController
|
||||
include ::Turbo::FramesHelper
|
||||
include ::Turbo::IncludesHelper
|
||||
include ::Turbo::StreamsHelper
|
||||
include ::ActionView::Helpers::DateHelper
|
||||
include ::ActionView::Helpers::SanitizeHelper
|
||||
include ::ActionView::Helpers::RenderingHelper
|
||||
include ::ActionView::Helpers::CaptureHelper
|
||||
include ::ActionView::Helpers::OutputSafetyHelper
|
||||
include ::ActionView::Helpers::TagHelper
|
||||
|
||||
@@ -17,6 +17,9 @@ class Rails::Conductor::BaseController
|
||||
include ::Turbo::FramesHelper
|
||||
include ::Turbo::IncludesHelper
|
||||
include ::Turbo::StreamsHelper
|
||||
include ::ActionView::Helpers::DateHelper
|
||||
include ::ActionView::Helpers::SanitizeHelper
|
||||
include ::ActionView::Helpers::RenderingHelper
|
||||
include ::ActionView::Helpers::CaptureHelper
|
||||
include ::ActionView::Helpers::OutputSafetyHelper
|
||||
include ::ActionView::Helpers::TagHelper
|
||||
|
||||
3
sorbet/rbi/dsl/rails/health_controller.rbi
generated
3
sorbet/rbi/dsl/rails/health_controller.rbi
generated
@@ -17,6 +17,9 @@ class Rails::HealthController
|
||||
include ::Turbo::FramesHelper
|
||||
include ::Turbo::IncludesHelper
|
||||
include ::Turbo::StreamsHelper
|
||||
include ::ActionView::Helpers::DateHelper
|
||||
include ::ActionView::Helpers::SanitizeHelper
|
||||
include ::ActionView::Helpers::RenderingHelper
|
||||
include ::ActionView::Helpers::CaptureHelper
|
||||
include ::ActionView::Helpers::OutputSafetyHelper
|
||||
include ::ActionView::Helpers::TagHelper
|
||||
|
||||
1327
sorbet/rbi/gems/attr_json@2.5.0.rbi
generated
Normal file
1327
sorbet/rbi/gems/attr_json@2.5.0.rbi
generated
Normal file
File diff suppressed because it is too large
Load Diff
105
sorbet/tapioca/compilers/attr_json_dsl.rb
Normal file
105
sorbet/tapioca/compilers/attr_json_dsl.rb
Normal file
@@ -0,0 +1,105 @@
|
||||
# typed: strict
|
||||
module Tapioca::Compilers
|
||||
class AttrJsonDsl < Tapioca::Dsl::Compiler
|
||||
extend T::Sig
|
||||
|
||||
ConstantType = type_member { { fixed: T.class_of(::AttrJson::Record) } }
|
||||
|
||||
sig { override.returns(T::Enumerable[Module]) }
|
||||
def self.gather_constants
|
||||
all_classes.select { |c| c < ::AttrJson::Record }
|
||||
end
|
||||
|
||||
sig { override.void }
|
||||
def decorate
|
||||
root.create_path(constant) do |klass|
|
||||
klass.create_method(
|
||||
"attr_json_config",
|
||||
parameters: [
|
||||
create_kw_opt_param(
|
||||
"default_container_attribute",
|
||||
type: "T.nilable(Symbol)",
|
||||
default: "nil",
|
||||
),
|
||||
create_kw_opt_param(
|
||||
"bad_cast",
|
||||
type: "T.nilable(Symbol)",
|
||||
default: "nil",
|
||||
),
|
||||
create_kw_opt_param(
|
||||
"unknown_key",
|
||||
type: "T.nilable(Symbol)",
|
||||
default: "nil",
|
||||
),
|
||||
],
|
||||
return_type: "void",
|
||||
class_method: true,
|
||||
)
|
||||
klass.create_method(
|
||||
"attr_json",
|
||||
parameters: [
|
||||
create_param("name", type: "Symbol"),
|
||||
create_param("type", type: "Symbol"),
|
||||
create_opt_param(
|
||||
"options",
|
||||
type: "T.nilable(T::Hash[Symbol, T.untyped])",
|
||||
default: "nil",
|
||||
),
|
||||
],
|
||||
return_type: "void",
|
||||
class_method: true,
|
||||
)
|
||||
klass.create_method(
|
||||
"attr_json_registry",
|
||||
return_type: "T::Array[Symbol]",
|
||||
)
|
||||
attribute_names =
|
||||
T.cast(
|
||||
T.unsafe(constant).attr_json_registry.attribute_names,
|
||||
T::Array[Symbol],
|
||||
)
|
||||
# attribute_names.each do |attribute_name|
|
||||
# attr_type =
|
||||
# T.cast(
|
||||
# T
|
||||
# .unsafe(constant)
|
||||
# .attr_json_registry
|
||||
# .type_for_attribute(attribute_name),
|
||||
# ActiveModel::Type::Value,
|
||||
# )
|
||||
|
||||
# type_name =
|
||||
# case attr_type.type
|
||||
# when :boolean
|
||||
# "T::Boolean"
|
||||
# when :big_integer
|
||||
# "Integer"
|
||||
# when :binary
|
||||
# "String"
|
||||
# when :date
|
||||
# "Date"
|
||||
# when :datetime
|
||||
# "DateTime"
|
||||
# when :decimal
|
||||
# "Decimal"
|
||||
# when :float
|
||||
# "Float"
|
||||
# when :immutable_string
|
||||
# "String"
|
||||
# when :integer
|
||||
# "Integer"
|
||||
# when :string
|
||||
# "String"
|
||||
# else
|
||||
# raise("Unknown type: #{attr_type.type}")
|
||||
# end
|
||||
|
||||
# # klass.create_method(
|
||||
# # attribute_name.to_s,
|
||||
# # return_type: "T.nilable(#{type_name})",
|
||||
# # )
|
||||
# end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -2,6 +2,7 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
require "./app/lib/has_color_logger"
|
||||
require "./app/lib/domain/e621/tag_util"
|
||||
require "./spec/helpers/debug_helpers"
|
||||
require "./spec/helpers/http_client_mock_helpers"
|
||||
require "./spec/helpers/perform_job_helpers"
|
||||
|
||||
8
spec/factories/domain/e621/users.rb
Normal file
8
spec/factories/domain/e621/users.rb
Normal file
@@ -0,0 +1,8 @@
|
||||
# typed: false
|
||||
FactoryBot.define do
|
||||
factory :domain_e621_user, class: "Domain::E621::User" do
|
||||
sequence(:e621_user_id) { |n| n }
|
||||
sequence(:name) { |n| "user#{n}" }
|
||||
scanned_favs_at { nil }
|
||||
end
|
||||
end
|
||||
122
spec/jobs/domain/e621/job/scan_post_favs_job_spec.rb
Normal file
122
spec/jobs/domain/e621/job/scan_post_favs_job_spec.rb
Normal file
@@ -0,0 +1,122 @@
|
||||
# typed: false
|
||||
require "rails_helper"
|
||||
|
||||
describe Domain::E621::Job::ScanPostFavsJob do
|
||||
let(:http_client_mock) { instance_double("::Scraper::HttpClient") }
|
||||
before { Scraper::ClientFactory.http_client_mock = http_client_mock }
|
||||
before do
|
||||
e621_user_ids = [
|
||||
454_589,
|
||||
1_535_298,
|
||||
956_950,
|
||||
413_725,
|
||||
372_696,
|
||||
940_693,
|
||||
2_055_406,
|
||||
1_775_383,
|
||||
954_593,
|
||||
]
|
||||
e621_user_ids.each do |e621_user_id|
|
||||
create(:domain_e621_user, e621_user_id: e621_user_id)
|
||||
end
|
||||
end
|
||||
|
||||
it "scans users who favorited the post" do
|
||||
post = create(:domain_e621_post, e621_id: 4_005_902)
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
uri: "https://e621.net/posts/4005902/favorites?limit=10&page=1",
|
||||
status_code: 200,
|
||||
content_type: "text/html",
|
||||
contents:
|
||||
SpecUtil.read_fixture_file("domain/e621/job/post_favs_0.html"),
|
||||
},
|
||||
{
|
||||
uri: "https://e621.net/posts/4005902/favorites?limit=10&page=2",
|
||||
status_code: 200,
|
||||
content_type: "text/html",
|
||||
contents:
|
||||
SpecUtil.read_fixture_file("domain/e621/job/post_favs_1.html"),
|
||||
caused_by_entry_idx: 0,
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
expect(post.scanned_post_favs_at).to be_nil
|
||||
perform_now({ post: post })
|
||||
post.reload
|
||||
|
||||
# Verify post was marked as scanned
|
||||
expect(post.scanned_post_favs_at).to be_within(1.second).of(
|
||||
DateTime.current,
|
||||
)
|
||||
|
||||
# Verify users were created and updated with correct fav counts
|
||||
# First page users
|
||||
users_page1 =
|
||||
Domain::E621::User.where(
|
||||
e621_user_id: [454_589, 1_535_298, 956_950, 413_725, 372_696],
|
||||
)
|
||||
expect(users_page1.count).to eq(5)
|
||||
|
||||
# Check specific user fav counts from first page
|
||||
expect(
|
||||
users_page1.find_by(e621_user_id: 454_589).num_other_favs_cached,
|
||||
).to eq(765)
|
||||
expect(
|
||||
users_page1.find_by(e621_user_id: 1_535_298).num_other_favs_cached,
|
||||
).to eq(330)
|
||||
expect(
|
||||
users_page1.find_by(e621_user_id: 956_950).num_other_favs_cached,
|
||||
).to eq(24)
|
||||
expect(
|
||||
users_page1.find_by(e621_user_id: 413_725).num_other_favs_cached,
|
||||
).to eq(2529)
|
||||
expect(
|
||||
users_page1.find_by(e621_user_id: 372_696).num_other_favs_cached,
|
||||
).to eq(88)
|
||||
|
||||
# Second page users
|
||||
users_page2 =
|
||||
Domain::E621::User.where(
|
||||
e621_user_id: [940_693, 2_055_406, 1_775_383, 954_593],
|
||||
)
|
||||
expect(users_page2.count).to eq(4)
|
||||
|
||||
# Check specific user fav counts from second page
|
||||
expect(
|
||||
users_page2.find_by(e621_user_id: 940_693).num_other_favs_cached,
|
||||
).to eq(25_685)
|
||||
expect(
|
||||
users_page2.find_by(e621_user_id: 2_055_406).num_other_favs_cached,
|
||||
).to eq(37)
|
||||
expect(
|
||||
users_page2.find_by(e621_user_id: 1_775_383).num_other_favs_cached,
|
||||
).to eq(497)
|
||||
expect(
|
||||
users_page2.find_by(e621_user_id: 954_593).num_other_favs_cached,
|
||||
).to eq(70)
|
||||
end
|
||||
|
||||
it "handles error responses" do
|
||||
post = create(:domain_e621_post, e621_id: 4_005_902)
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
uri: "https://e621.net/posts/4005902/favorites?limit=10&page=1",
|
||||
status_code: 404,
|
||||
content_type: "text/html",
|
||||
contents: "Not Found",
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
expect { perform_now({ post: post }) }.to raise_error(/404 - bailing/)
|
||||
|
||||
post.reload
|
||||
expect(post.scanned_post_favs_at).to be_nil
|
||||
end
|
||||
end
|
||||
126
spec/jobs/domain/e621/job/scan_user_favs_job_spec.rb
Normal file
126
spec/jobs/domain/e621/job/scan_user_favs_job_spec.rb
Normal file
@@ -0,0 +1,126 @@
|
||||
require "rails_helper"
|
||||
|
||||
RSpec.describe Domain::E621::Job::ScanUserFavsJob do
|
||||
let(:http_client_mock) { instance_double("::Scraper::HttpClient") }
|
||||
before { Scraper::ClientFactory.http_client_mock = http_client_mock }
|
||||
|
||||
let(:user) { create(:domain_e621_user, e621_user_id: 123_456) }
|
||||
let(:job) { described_class.new }
|
||||
|
||||
let!(:log_entries) do
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
uri:
|
||||
"https://e621.net/posts.json?tags=status:any+fav:#{user.url_name}+order:id_desc&limit=4",
|
||||
content_type: "application/json",
|
||||
contents:
|
||||
File.read("test/fixtures/files/domain/e621/job/favorites_0.json"),
|
||||
},
|
||||
{
|
||||
uri:
|
||||
"https://e621.net/posts.json?tags=status:any+fav:#{user.url_name}+order:id_desc&limit=4&page=b5129881",
|
||||
content_type: "application/json",
|
||||
contents: { posts: [] }.to_json,
|
||||
caused_by_entry_idx: 0,
|
||||
},
|
||||
],
|
||||
)
|
||||
end
|
||||
|
||||
describe "#perform" do
|
||||
it "scans user favorites and creates fav records" do
|
||||
expect do perform_now({ user: user }) end.to change {
|
||||
user.scanned_favs_status
|
||||
}.from(nil).to("ok")
|
||||
|
||||
# Verify the posts were created
|
||||
expect(Domain::E621::Post.count).to eq(5)
|
||||
expect(Domain::E621::Fav.count).to eq(5)
|
||||
|
||||
# Verify StaticFileJob was enqueued for each new post
|
||||
static_file_jobs =
|
||||
SpecUtil.enqueued_job_args(Domain::E621::Job::StaticFileJob)
|
||||
expect(static_file_jobs.size).to eq(5)
|
||||
expect(
|
||||
static_file_jobs.map { |args| args[:post].e621_id },
|
||||
).to match_array(Domain::E621::Post.pluck(:e621_id))
|
||||
|
||||
# Verify specific post details from fixture
|
||||
post = Domain::E621::Post.find_by(e621_id: 5_212_363)
|
||||
expect(post).to be_present
|
||||
expect(post.file_url_str).to eq(
|
||||
"https://static1.e621.net/data/87/18/8718995a7dd49f24dfae9ffc042c9578.png",
|
||||
)
|
||||
|
||||
# Verify fav relationship
|
||||
fav = Domain::E621::Fav.find_by(user: user, post: post)
|
||||
expect(fav).to be_present
|
||||
end
|
||||
|
||||
it "updates scanned_favs_at timestamp" do
|
||||
expect { perform_now({ user: user }) }.to change {
|
||||
user.reload.scanned_favs_at
|
||||
}.from(nil).to be_within(1.second).of(Time.current)
|
||||
end
|
||||
|
||||
context "when API returns error" do
|
||||
let!(:log_entries) do
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
method: :get,
|
||||
uri:
|
||||
"https://e621.net/posts.json?tags=status:any+fav:#{user.url_name}+order:id_desc&limit=4",
|
||||
content_type: "application/json",
|
||||
status_code: 404,
|
||||
contents: "Not Found",
|
||||
},
|
||||
],
|
||||
)
|
||||
end
|
||||
|
||||
it "handles HTTP errors appropriately" do
|
||||
expect { perform_now({ user: user }) }.to raise_error(
|
||||
/non 200 response/,
|
||||
).and change { user.scanned_favs_status }.from(nil).to("error")
|
||||
end
|
||||
end
|
||||
|
||||
context "when favorites are hidden" do
|
||||
let!(:log_entries) do
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
method: :get,
|
||||
uri:
|
||||
"https://e621.net/posts.json?tags=status:any+fav:#{user.url_name}+order:id_desc&limit=4",
|
||||
content_type: "application/json",
|
||||
status_code: 403,
|
||||
contents:
|
||||
File.read(
|
||||
"test/fixtures/files/domain/e621/job/favorites_hidden.json",
|
||||
),
|
||||
},
|
||||
],
|
||||
)
|
||||
end
|
||||
|
||||
it "handles hidden favorites appropriately" do
|
||||
expect { perform_now({ user: user }) }.to change {
|
||||
user.reload.favs_are_hidden
|
||||
}.from(nil).to(true).and change { user.reload.scanned_favs_at }.from(
|
||||
nil,
|
||||
).to(be_within(1.second).of(Time.current)).and change {
|
||||
user.reload.scanned_favs_status
|
||||
}.from(nil).to("ok")
|
||||
|
||||
# Should not create any favs
|
||||
expect(Domain::E621::Fav.count).to eq(0)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
154
spec/jobs/domain/e621/job/scan_users_job_spec.rb
Normal file
154
spec/jobs/domain/e621/job/scan_users_job_spec.rb
Normal file
@@ -0,0 +1,154 @@
|
||||
# typed: false
|
||||
require "rails_helper"
|
||||
|
||||
describe Domain::E621::Job::ScanUsersJob do
|
||||
let(:http_client_mock) { instance_double("::Scraper::HttpClient") }
|
||||
before { Scraper::ClientFactory.http_client_mock = http_client_mock }
|
||||
|
||||
let!(:log_entries) do
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
uri: "https://e621.net/users.json?limit=320",
|
||||
content_type: "application/json",
|
||||
contents:
|
||||
File.read("test/fixtures/files/domain/e621/job/users_index_0.json"),
|
||||
},
|
||||
{
|
||||
uri: "https://e621.net/users.json?limit=320&page=b2089164",
|
||||
content_type: "application/json",
|
||||
contents:
|
||||
File.read(
|
||||
"test/fixtures/files/domain/e621/job/users_index_empty.json",
|
||||
),
|
||||
caused_by_entry_idx: 0,
|
||||
},
|
||||
],
|
||||
)
|
||||
end
|
||||
|
||||
it "creates users from the API response" do
|
||||
expect { perform_now({}) }.to change(Domain::E621::User, :count).by(12)
|
||||
|
||||
user = Domain::E621::User.find_by(e621_user_id: 2_089_238)
|
||||
expect(user).not_to be_nil
|
||||
expect(user.name).to eq("chongzi")
|
||||
|
||||
user = Domain::E621::User.find_by(e621_user_id: 2_089_235)
|
||||
expect(user).not_to be_nil
|
||||
expect(user.name).to eq("dhqobc")
|
||||
end
|
||||
|
||||
# it "enqueues scan user favs jobs for new users" do
|
||||
# perform_now({})
|
||||
|
||||
# user1 = Domain::E621::User.find_by(e621_user_id: 2_089_238)
|
||||
# user2 = Domain::E621::User.find_by(e621_user_id: 2_089_237)
|
||||
|
||||
# expect(SpecUtil.enqueued_jobs(Domain::E621::Job::ScanUserFavsJob)).to match(
|
||||
# array_including(
|
||||
# including(args: [{ user: user1, caused_by_entry: log_entries[0] }]),
|
||||
# including(args: [{ user: user2, caused_by_entry: log_entries[0] }]),
|
||||
# ),
|
||||
# )
|
||||
# end
|
||||
|
||||
context "when user already exists" do
|
||||
let!(:existing_user) do
|
||||
Domain::E621::User.create!(e621_user_id: 2_089_238, name: "chongzi")
|
||||
end
|
||||
|
||||
it "does not create duplicate users" do
|
||||
expect { perform_now({}) }.to change(Domain::E621::User, :count).by(11)
|
||||
end
|
||||
|
||||
# it "does not enqueue scan favs job for existing users" do
|
||||
# perform_now({})
|
||||
|
||||
# new_user = Domain::E621::User.find_by(e621_user_id: 2_089_237)
|
||||
|
||||
# expect(
|
||||
# SpecUtil.enqueued_jobs(Domain::E621::Job::ScanUserFavsJob),
|
||||
# ).not_to include(
|
||||
# including(
|
||||
# args: [{ user: existing_user, caused_by_entry: log_entries[0] }],
|
||||
# ),
|
||||
# )
|
||||
# expect(
|
||||
# SpecUtil.enqueued_jobs(Domain::E621::Job::ScanUserFavsJob),
|
||||
# ).to include(
|
||||
# including(args: [{ user: new_user, caused_by_entry: log_entries[0] }]),
|
||||
# )
|
||||
# end
|
||||
end
|
||||
|
||||
context "with pagination" do
|
||||
let(:log_entries) do
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
uri: "https://e621.net/users.json?limit=320",
|
||||
content_type: "application/json",
|
||||
contents:
|
||||
File.read(
|
||||
"test/fixtures/files/domain/e621/job/users_index_0.json",
|
||||
),
|
||||
},
|
||||
{
|
||||
uri: "https://e621.net/users.json?limit=320&page=b2089164",
|
||||
content_type: "application/json",
|
||||
contents:
|
||||
File.read(
|
||||
"test/fixtures/files/domain/e621/job/users_index_1.json",
|
||||
),
|
||||
caused_by_entry_idx: 0,
|
||||
},
|
||||
{
|
||||
uri: "https://e621.net/users.json?limit=320&page=b2089089",
|
||||
content_type: "application/json",
|
||||
contents:
|
||||
File.read(
|
||||
"test/fixtures/files/domain/e621/job/users_index_empty.json",
|
||||
),
|
||||
caused_by_entry_idx: 0,
|
||||
},
|
||||
],
|
||||
)
|
||||
end
|
||||
|
||||
it "follows pagination and creates all users" do
|
||||
expect { perform_now({}) }.to change(Domain::E621::User, :count).by(23)
|
||||
|
||||
# First page
|
||||
expect(Domain::E621::User.exists?(e621_user_id: 2_089_238)).to be true
|
||||
expect(Domain::E621::User.exists?(e621_user_id: 2_089_235)).to be true
|
||||
|
||||
# Second page
|
||||
expect(Domain::E621::User.exists?(e621_user_id: 2_089_163)).to be true
|
||||
expect(Domain::E621::User.exists?(e621_user_id: 2_089_091)).to be true
|
||||
end
|
||||
end
|
||||
|
||||
context "when API returns error" do
|
||||
let(:log_entries) do
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
[
|
||||
{
|
||||
method: :get,
|
||||
uri: "https://e621.net/users.json?limit=320",
|
||||
content_type: "application/json",
|
||||
status_code: 500,
|
||||
contents: "Internal Server Error",
|
||||
},
|
||||
],
|
||||
)
|
||||
end
|
||||
|
||||
it "raises an error" do
|
||||
expect { perform_now({}) }.to raise_error(/non 200 response/)
|
||||
end
|
||||
end
|
||||
end
|
||||
790
test/fixtures/files/domain/e621/job/favorites_0.json
vendored
Normal file
790
test/fixtures/files/domain/e621/job/favorites_0.json
vendored
Normal file
@@ -0,0 +1,790 @@
|
||||
{
|
||||
"posts": [
|
||||
{
|
||||
"id": 5212363,
|
||||
"created_at": "2024-11-29T12:01:58.991-05:00",
|
||||
"updated_at": "2025-01-27T12:56:03.283-05:00",
|
||||
"file": {
|
||||
"width": 2400,
|
||||
"height": 1330,
|
||||
"ext": "png",
|
||||
"size": 2529196,
|
||||
"md5": "8718995a7dd49f24dfae9ffc042c9578",
|
||||
"url": "https://static1.e621.net/data/87/18/8718995a7dd49f24dfae9ffc042c9578.png"
|
||||
},
|
||||
"preview": {
|
||||
"width": 150,
|
||||
"height": 83,
|
||||
"url": "https://static1.e621.net/data/preview/87/18/8718995a7dd49f24dfae9ffc042c9578.jpg"
|
||||
},
|
||||
"sample": {
|
||||
"has": true,
|
||||
"height": 471,
|
||||
"width": 850,
|
||||
"url": "https://static1.e621.net/data/sample/87/18/8718995a7dd49f24dfae9ffc042c9578.jpg",
|
||||
"alternates": {}
|
||||
},
|
||||
"score": { "up": 1312, "down": -26, "total": 1286 },
|
||||
"tags": {
|
||||
"general": [
|
||||
"4_toes",
|
||||
"5_fingers",
|
||||
"bed",
|
||||
"bed_covers",
|
||||
"bedding",
|
||||
"bestiality",
|
||||
"bodily_fluids",
|
||||
"brown_body",
|
||||
"brown_fur",
|
||||
"butt",
|
||||
"clenched_teeth",
|
||||
"countershade_torso",
|
||||
"countershading",
|
||||
"cum",
|
||||
"cum_inside",
|
||||
"cum_on_body",
|
||||
"cum_on_ceiling_fan",
|
||||
"cum_on_fan_blades",
|
||||
"cum_on_ground",
|
||||
"cum_on_photo",
|
||||
"cum_on_pillow",
|
||||
"cum_on_wall",
|
||||
"dildo",
|
||||
"doggystyle",
|
||||
"duo",
|
||||
"eyes_closed",
|
||||
"feet",
|
||||
"female",
|
||||
"female_human",
|
||||
"female_on_bottom",
|
||||
"female_on_feral",
|
||||
"female_penetrated",
|
||||
"feral",
|
||||
"feral_on_top",
|
||||
"feral_penetrating",
|
||||
"feral_penetrating_female",
|
||||
"feral_penetrating_human",
|
||||
"fingers",
|
||||
"from_behind_position",
|
||||
"fur",
|
||||
"furniture",
|
||||
"genital_fluids",
|
||||
"genitals",
|
||||
"hair",
|
||||
"huge_penetration",
|
||||
"human_on_bottom",
|
||||
"human_on_feral",
|
||||
"human_penetrated",
|
||||
"inside",
|
||||
"interspecies",
|
||||
"larger_feral",
|
||||
"larger_male",
|
||||
"leaking_cum",
|
||||
"long_hair",
|
||||
"male",
|
||||
"male/female",
|
||||
"male_feral",
|
||||
"male_on_human",
|
||||
"male_on_top",
|
||||
"male_penetrating",
|
||||
"male_penetrating_female",
|
||||
"male_penetrating_human",
|
||||
"mating_grip",
|
||||
"mounting",
|
||||
"nude",
|
||||
"nude_female",
|
||||
"nude_human",
|
||||
"on_bed",
|
||||
"on_bottom",
|
||||
"on_top",
|
||||
"penetration",
|
||||
"penile",
|
||||
"penile_penetration",
|
||||
"penis_in_pussy",
|
||||
"pillow",
|
||||
"red_hair",
|
||||
"sex",
|
||||
"sex_toy",
|
||||
"size_difference",
|
||||
"smaller_female",
|
||||
"smaller_human",
|
||||
"tail",
|
||||
"teeth",
|
||||
"text",
|
||||
"toes",
|
||||
"vaginal",
|
||||
"vaginal_penetration"
|
||||
],
|
||||
"artist": ["dark_violet"],
|
||||
"contributor": [],
|
||||
"copyright": [],
|
||||
"character": ["dex_(jelomaus)", "jess_(dark_violet)"],
|
||||
"species": [
|
||||
"canid",
|
||||
"canine",
|
||||
"canis",
|
||||
"domestic_dog",
|
||||
"german_shepherd",
|
||||
"herding_dog",
|
||||
"human",
|
||||
"mammal",
|
||||
"pastoral_dog"
|
||||
],
|
||||
"invalid": [],
|
||||
"meta": [
|
||||
"2024",
|
||||
"comic",
|
||||
"digital_media_(artwork)",
|
||||
"english_text",
|
||||
"hi_res",
|
||||
"story",
|
||||
"story_in_description"
|
||||
],
|
||||
"lore": []
|
||||
},
|
||||
"locked_tags": [],
|
||||
"change_seq": 63286178,
|
||||
"flags": {
|
||||
"pending": false,
|
||||
"flagged": false,
|
||||
"note_locked": false,
|
||||
"status_locked": false,
|
||||
"rating_locked": true,
|
||||
"deleted": false
|
||||
},
|
||||
"rating": "e",
|
||||
"fav_count": 2559,
|
||||
"sources": [
|
||||
"https://www.furaffinity.net/user/jelomaus/",
|
||||
"https://www.furaffinity.net/view/59000590/",
|
||||
"https://d.furaffinity.net/art/jelomaus/1732899601/1732899601.jelomaus_dex_x_jess_5.png",
|
||||
"https://d.furaffinity.net/art/darkviolet/1732901227/1732901227.darkviolet_dex_x_jess_5.png",
|
||||
"https://www.furaffinity.net/view/59000870/",
|
||||
"https://www.furaffinity.net/user/darkviolet/"
|
||||
],
|
||||
"pools": [44693],
|
||||
"relationships": {
|
||||
"parent_id": null,
|
||||
"has_children": false,
|
||||
"has_active_children": false,
|
||||
"children": []
|
||||
},
|
||||
"approver_id": null,
|
||||
"uploader_id": 589819,
|
||||
"description": "h2.Doggy Day Care [5/8]\n\nYou're ignoring the chat again. They're yelling about what's going on, laughing, amazed, stunned, shocked... every kind of reaction you could expect, and all of it layers upon layers of lies. What would they say if they thought no-one else was listening...?\n\nYou go back to what you wanted from the beginning. She's moaning, gasping, shouting expletives into the mattress, rocked with each of the dog's savage, needy thrusts. Hah, he must be wanting it badly - he hasn't had release in a while, even if his last load is still dripping down the wall. That must have been... how long ago?\n\nYou glance at the time - wait, has it been almost six hours already? Your hard drive must be half full by now. You should stop soon. You [i]need[/i] to study, after all.\n\nYou should stop soon, give this girl [i]some[/i] privacy.\n\nYou should really stop soon.\n\nReally...",
|
||||
"comment_count": 5,
|
||||
"is_favorited": true,
|
||||
"has_notes": false,
|
||||
"duration": null
|
||||
},
|
||||
{
|
||||
"id": 5214461,
|
||||
"created_at": "2024-11-30T11:37:53.545-05:00",
|
||||
"updated_at": "2025-01-27T12:56:12.390-05:00",
|
||||
"file": {
|
||||
"width": 2400,
|
||||
"height": 1330,
|
||||
"ext": "png",
|
||||
"size": 2275665,
|
||||
"md5": "7beea9cb1fead46303f2bb83c6718c64",
|
||||
"url": "https://static1.e621.net/data/7b/ee/7beea9cb1fead46303f2bb83c6718c64.png"
|
||||
},
|
||||
"preview": {
|
||||
"width": 150,
|
||||
"height": 83,
|
||||
"url": "https://static1.e621.net/data/preview/7b/ee/7beea9cb1fead46303f2bb83c6718c64.jpg"
|
||||
},
|
||||
"sample": {
|
||||
"has": true,
|
||||
"height": 471,
|
||||
"width": 850,
|
||||
"url": "https://static1.e621.net/data/sample/7b/ee/7beea9cb1fead46303f2bb83c6718c64.jpg",
|
||||
"alternates": {}
|
||||
},
|
||||
"score": { "up": 1302, "down": -19, "total": 1283 },
|
||||
"tags": {
|
||||
"general": [
|
||||
"5_toes",
|
||||
"animal_genitalia",
|
||||
"balls",
|
||||
"bed",
|
||||
"bed_covers",
|
||||
"bedding",
|
||||
"bestiality",
|
||||
"big_balls",
|
||||
"bodily_fluids",
|
||||
"brown_body",
|
||||
"brown_fur",
|
||||
"butt",
|
||||
"countershade_torso",
|
||||
"countershading",
|
||||
"cum",
|
||||
"cum_in_pussy",
|
||||
"cum_inside",
|
||||
"cum_on_ground",
|
||||
"cum_on_wall",
|
||||
"duo",
|
||||
"faceless_character",
|
||||
"faceless_female",
|
||||
"faceless_human",
|
||||
"feet",
|
||||
"female",
|
||||
"female_human",
|
||||
"female_on_bottom",
|
||||
"female_on_feral",
|
||||
"female_penetrated",
|
||||
"feral",
|
||||
"feral_on_top",
|
||||
"feral_penetrating",
|
||||
"feral_penetrating_female",
|
||||
"feral_penetrating_human",
|
||||
"fur",
|
||||
"furniture",
|
||||
"genital_fluids",
|
||||
"genitals",
|
||||
"huge_penetration",
|
||||
"human_on_bottom",
|
||||
"human_on_feral",
|
||||
"human_penetrated",
|
||||
"inside",
|
||||
"interspecies",
|
||||
"lying",
|
||||
"lying_on_ground",
|
||||
"male",
|
||||
"male/female",
|
||||
"male_feral",
|
||||
"male_on_human",
|
||||
"male_on_top",
|
||||
"male_penetrating",
|
||||
"male_penetrating_female",
|
||||
"male_penetrating_human",
|
||||
"on_back",
|
||||
"on_bottom",
|
||||
"on_ground",
|
||||
"on_top",
|
||||
"penetration",
|
||||
"penile",
|
||||
"penile_penetration",
|
||||
"penis_in_pussy",
|
||||
"pillow",
|
||||
"profanity",
|
||||
"sex",
|
||||
"tail",
|
||||
"text",
|
||||
"throbbing",
|
||||
"throbbing_balls",
|
||||
"toes",
|
||||
"vaginal",
|
||||
"vaginal_penetration",
|
||||
"voyeur",
|
||||
"voyeur_pov"
|
||||
],
|
||||
"artist": ["dark_violet"],
|
||||
"contributor": [],
|
||||
"copyright": [],
|
||||
"character": ["dex_(jelomaus)", "jess_(dark_violet)"],
|
||||
"species": [
|
||||
"canid",
|
||||
"canine",
|
||||
"canis",
|
||||
"domestic_dog",
|
||||
"german_shepherd",
|
||||
"herding_dog",
|
||||
"human",
|
||||
"mammal",
|
||||
"pastoral_dog"
|
||||
],
|
||||
"invalid": [],
|
||||
"meta": [
|
||||
"2024",
|
||||
"comic",
|
||||
"digital_media_(artwork)",
|
||||
"english_text",
|
||||
"hi_res",
|
||||
"story",
|
||||
"story_in_description"
|
||||
],
|
||||
"lore": []
|
||||
},
|
||||
"locked_tags": [],
|
||||
"change_seq": 63286621,
|
||||
"flags": {
|
||||
"pending": false,
|
||||
"flagged": false,
|
||||
"note_locked": false,
|
||||
"status_locked": false,
|
||||
"rating_locked": false,
|
||||
"deleted": false
|
||||
},
|
||||
"rating": "e",
|
||||
"fav_count": 2491,
|
||||
"sources": [
|
||||
"https://www.furaffinity.net/view/59011846/",
|
||||
"https://www.furaffinity.net/user/jelomaus/",
|
||||
"https://d.furaffinity.net/art/jelomaus/1732984558/1732984558.jelomaus_dex_x_jess_6.png",
|
||||
"https://d.furaffinity.net/art/darkviolet/1732984575/1732984575.darkviolet_dex_x_jess_6.png",
|
||||
"https://www.furaffinity.net/view/59011850/",
|
||||
"https://www.furaffinity.net/user/darkviolet/"
|
||||
],
|
||||
"pools": [44693],
|
||||
"relationships": {
|
||||
"parent_id": null,
|
||||
"has_children": false,
|
||||
"has_active_children": false,
|
||||
"children": []
|
||||
},
|
||||
"approver_id": null,
|
||||
"uploader_id": 589819,
|
||||
"description": "h2.Doggy Day Care [6/8]\n\n\nYou gave in, and ordered in pizza about an hour ago. You weren't gonna do any groceries today, and you certainly weren't gonna leave long enough to cook anything. Honestly, you're just thankful that your computer screen can't be seen from the door. What would that delivery guy say, if he'd seen...?\n\nSo you lounge back, one foot on the desk, a half-eaten slice of Spicy Vegetarian dangling from one hand, strings of mozzarella swaying like the strands of his cum from the bed frame...\n\nGod, the way his hips pound into her...\n\nYou swallow idly. The stream is still going; it had several hundred viewers at last count. You stopped checking the chat, too. There was a beauty here, and everyone there seemed to caught up in what other's thought of their opinions to say that, even if they did think it.\n\nSo you watch, those swinging balls, the flourishing tail, the way her toes curl and her legs lock around his perfectly toned waist... his elegant crouch, his body wound around hers, his muscled hips slamming into her rear over and over and over as the cheap microphone catches her voice gasping sweet everythings at him...\n\nYou don't even notice where your other hand is until it's reached your crotch.",
|
||||
"comment_count": 12,
|
||||
"is_favorited": true,
|
||||
"has_notes": false,
|
||||
"duration": null
|
||||
},
|
||||
{
|
||||
"id": 5306537,
|
||||
"created_at": "2025-01-14T01:01:28.128-05:00",
|
||||
"updated_at": "2025-01-27T15:02:24.688-05:00",
|
||||
"file": {
|
||||
"width": 1800,
|
||||
"height": 2700,
|
||||
"ext": "png",
|
||||
"size": 5326105,
|
||||
"md5": "73859965f57aa05c59dbb977fe333f8c",
|
||||
"url": "https://static1.e621.net/data/73/85/73859965f57aa05c59dbb977fe333f8c.png"
|
||||
},
|
||||
"preview": {
|
||||
"width": 100,
|
||||
"height": 150,
|
||||
"url": "https://static1.e621.net/data/preview/73/85/73859965f57aa05c59dbb977fe333f8c.jpg"
|
||||
},
|
||||
"sample": {
|
||||
"has": true,
|
||||
"height": 1275,
|
||||
"width": 850,
|
||||
"url": "https://static1.e621.net/data/sample/73/85/73859965f57aa05c59dbb977fe333f8c.jpg",
|
||||
"alternates": {}
|
||||
},
|
||||
"score": { "up": 1575, "down": -10, "total": 1565 },
|
||||
"tags": {
|
||||
"general": [
|
||||
"animal_genitalia",
|
||||
"animal_penis",
|
||||
"anthro",
|
||||
"balls",
|
||||
"black_lips",
|
||||
"black_nose",
|
||||
"black_penis",
|
||||
"blush",
|
||||
"bodily_fluids",
|
||||
"brown_body",
|
||||
"brown_fur",
|
||||
"canine_genitalia",
|
||||
"canine_penis",
|
||||
"circumcised",
|
||||
"circumcision_scar",
|
||||
"close-up",
|
||||
"cum",
|
||||
"cum_in_mouth",
|
||||
"cum_inside",
|
||||
"cum_on_tongue",
|
||||
"cumshot",
|
||||
"cumshot_in_mouth",
|
||||
"dialogue",
|
||||
"drinking",
|
||||
"drinking_cum",
|
||||
"dripping",
|
||||
"drooling",
|
||||
"ejaculation",
|
||||
"erection",
|
||||
"eye_scar",
|
||||
"faceless_character",
|
||||
"faceless_male",
|
||||
"facial_scar",
|
||||
"fellatio",
|
||||
"fingers",
|
||||
"floppy_ears",
|
||||
"fluffy",
|
||||
"foreskin",
|
||||
"fur",
|
||||
"genital_fluids",
|
||||
"genital_scar",
|
||||
"genitals",
|
||||
"group",
|
||||
"hair",
|
||||
"handjob",
|
||||
"heart_eyes",
|
||||
"heart_symbol",
|
||||
"humanoid_genitalia",
|
||||
"humanoid_penis",
|
||||
"knot",
|
||||
"lips",
|
||||
"locker",
|
||||
"locker_room",
|
||||
"male",
|
||||
"male/male",
|
||||
"male_penetrating",
|
||||
"motion_lines",
|
||||
"multicolored_body",
|
||||
"muscular",
|
||||
"muscular_anthro",
|
||||
"muscular_male",
|
||||
"nipples",
|
||||
"nude",
|
||||
"onomatopoeia",
|
||||
"open_mouth",
|
||||
"oral",
|
||||
"oral_penetration",
|
||||
"orange_body",
|
||||
"orange_fur",
|
||||
"orgasm",
|
||||
"penetration",
|
||||
"penile",
|
||||
"penis",
|
||||
"penis_in_mouth",
|
||||
"penis_on_shoulder",
|
||||
"penis_on_tongue",
|
||||
"pink_penis",
|
||||
"pink_tongue",
|
||||
"precum",
|
||||
"profanity",
|
||||
"red_penis",
|
||||
"retracted_foreskin",
|
||||
"saliva",
|
||||
"saliva_drip",
|
||||
"saliva_string",
|
||||
"scar",
|
||||
"sex",
|
||||
"sound_effects",
|
||||
"striped_body",
|
||||
"stripes",
|
||||
"teal_eyes",
|
||||
"teeth",
|
||||
"text",
|
||||
"tongue",
|
||||
"tongue_out",
|
||||
"tongue_out_blowjob",
|
||||
"tuft",
|
||||
"vein",
|
||||
"veiny_penis",
|
||||
"yellow_body",
|
||||
"yellow_fur"
|
||||
],
|
||||
"artist": ["notafurrytho"],
|
||||
"contributor": [],
|
||||
"copyright": [],
|
||||
"character": ["kodii"],
|
||||
"species": [
|
||||
"bird_dog",
|
||||
"canid",
|
||||
"canine",
|
||||
"canis",
|
||||
"domestic_dog",
|
||||
"felid",
|
||||
"golden_retriever",
|
||||
"hunting_dog",
|
||||
"mammal",
|
||||
"pantherine",
|
||||
"retriever",
|
||||
"tiger"
|
||||
],
|
||||
"invalid": [],
|
||||
"meta": [
|
||||
"2:3",
|
||||
"absurd_res",
|
||||
"comic",
|
||||
"digital_media_(artwork)",
|
||||
"english_text",
|
||||
"hi_res"
|
||||
],
|
||||
"lore": []
|
||||
},
|
||||
"locked_tags": [],
|
||||
"change_seq": 63782947,
|
||||
"flags": {
|
||||
"pending": false,
|
||||
"flagged": false,
|
||||
"note_locked": false,
|
||||
"status_locked": false,
|
||||
"rating_locked": true,
|
||||
"deleted": false
|
||||
},
|
||||
"rating": "e",
|
||||
"fav_count": 3308,
|
||||
"sources": [
|
||||
"https://www.furaffinity.net/view/59513157/",
|
||||
"https://bsky.app/profile/notafurrytho.bsky.social/post/3lfolbnlzlk2w",
|
||||
"https://twitter.com/Not_A_Furry_Tho/status/1879046402304794667"
|
||||
],
|
||||
"pools": [],
|
||||
"relationships": {
|
||||
"parent_id": null,
|
||||
"has_children": false,
|
||||
"has_active_children": false,
|
||||
"children": []
|
||||
},
|
||||
"approver_id": 147515,
|
||||
"uploader_id": 1210277,
|
||||
"description": "",
|
||||
"comment_count": 12,
|
||||
"is_favorited": true,
|
||||
"has_notes": false,
|
||||
"duration": null
|
||||
},
|
||||
{
|
||||
"id": 2518409,
|
||||
"created_at": "2020-12-06T18:33:25.092-05:00",
|
||||
"updated_at": "2025-01-24T03:08:20.945-05:00",
|
||||
"file": {
|
||||
"width": 900,
|
||||
"height": 800,
|
||||
"ext": "png",
|
||||
"size": 190294,
|
||||
"md5": "240584fc545f44c3fe615e1f941246c0",
|
||||
"url": "https://static1.e621.net/data/24/05/240584fc545f44c3fe615e1f941246c0.png"
|
||||
},
|
||||
"preview": {
|
||||
"width": 150,
|
||||
"height": 133,
|
||||
"url": "https://static1.e621.net/data/preview/24/05/240584fc545f44c3fe615e1f941246c0.jpg"
|
||||
},
|
||||
"sample": {
|
||||
"has": true,
|
||||
"height": 755,
|
||||
"width": 850,
|
||||
"url": "https://static1.e621.net/data/sample/24/05/240584fc545f44c3fe615e1f941246c0.jpg",
|
||||
"alternates": {}
|
||||
},
|
||||
"score": { "up": 153, "down": -1, "total": 152 },
|
||||
"tags": {
|
||||
"general": [
|
||||
"animal_genitalia",
|
||||
"animal_penis",
|
||||
"anus",
|
||||
"balls",
|
||||
"black_claws",
|
||||
"black_nose",
|
||||
"black_pawpads",
|
||||
"bodily_fluids",
|
||||
"butt",
|
||||
"canine_genitalia",
|
||||
"canine_penis",
|
||||
"claws",
|
||||
"cum",
|
||||
"dewclaw",
|
||||
"ejaculation",
|
||||
"erection",
|
||||
"feral",
|
||||
"fur",
|
||||
"genital_fluids",
|
||||
"genitals",
|
||||
"green_eyes",
|
||||
"hands-free",
|
||||
"knot",
|
||||
"looking_at_viewer",
|
||||
"looking_back",
|
||||
"male",
|
||||
"multicolored_body",
|
||||
"multicolored_fur",
|
||||
"pawpads",
|
||||
"penis",
|
||||
"pink_anus",
|
||||
"pink_penis",
|
||||
"pink_tongue",
|
||||
"presenting",
|
||||
"presenting_anus",
|
||||
"presenting_hindquarters",
|
||||
"raised_tail",
|
||||
"rear_view",
|
||||
"simple_background",
|
||||
"solo",
|
||||
"spreading",
|
||||
"tail",
|
||||
"tail_anus",
|
||||
"tan_body",
|
||||
"tan_fur",
|
||||
"tongue",
|
||||
"tongue_out"
|
||||
],
|
||||
"artist": ["curdog"],
|
||||
"contributor": [],
|
||||
"copyright": [],
|
||||
"character": [],
|
||||
"species": [
|
||||
"canid",
|
||||
"canine",
|
||||
"canis",
|
||||
"coyote",
|
||||
"coywolf",
|
||||
"hybrid",
|
||||
"mammal",
|
||||
"wolf"
|
||||
],
|
||||
"invalid": [],
|
||||
"meta": ["2016", "digital_media_(artwork)", "flat_colors"],
|
||||
"lore": []
|
||||
},
|
||||
"locked_tags": [],
|
||||
"change_seq": 60731551,
|
||||
"flags": {
|
||||
"pending": false,
|
||||
"flagged": false,
|
||||
"note_locked": false,
|
||||
"status_locked": false,
|
||||
"rating_locked": false,
|
||||
"deleted": false
|
||||
},
|
||||
"rating": "e",
|
||||
"fav_count": 294,
|
||||
"sources": ["https://www.furaffinity.net/view/21177374/"],
|
||||
"pools": [],
|
||||
"relationships": {
|
||||
"parent_id": null,
|
||||
"has_children": false,
|
||||
"has_active_children": false,
|
||||
"children": []
|
||||
},
|
||||
"approver_id": 169756,
|
||||
"uploader_id": 656332,
|
||||
"description": "",
|
||||
"comment_count": 0,
|
||||
"is_favorited": true,
|
||||
"has_notes": false,
|
||||
"duration": null
|
||||
},
|
||||
{
|
||||
"id": 5129881,
|
||||
"created_at": "2024-10-21T11:48:47.402-04:00",
|
||||
"updated_at": "2025-01-27T13:20:01.601-05:00",
|
||||
"file": {
|
||||
"width": 4096,
|
||||
"height": 2130,
|
||||
"ext": "jpg",
|
||||
"size": 830854,
|
||||
"md5": "aea8644225157c991ef20e9b67e0d0ea",
|
||||
"url": "https://static1.e621.net/data/ae/a8/aea8644225157c991ef20e9b67e0d0ea.jpg"
|
||||
},
|
||||
"preview": {
|
||||
"width": 150,
|
||||
"height": 78,
|
||||
"url": "https://static1.e621.net/data/preview/ae/a8/aea8644225157c991ef20e9b67e0d0ea.jpg"
|
||||
},
|
||||
"sample": {
|
||||
"has": true,
|
||||
"height": 442,
|
||||
"width": 850,
|
||||
"url": "https://static1.e621.net/data/sample/ae/a8/aea8644225157c991ef20e9b67e0d0ea.jpg",
|
||||
"alternates": {}
|
||||
},
|
||||
"score": { "up": 1118, "down": -27, "total": 1091 },
|
||||
"tags": {
|
||||
"general": [
|
||||
"anal",
|
||||
"anal_penetration",
|
||||
"anthro",
|
||||
"anthro_on_feral",
|
||||
"anthro_penetrating",
|
||||
"anthro_penetrating_feral",
|
||||
"antlers",
|
||||
"anus",
|
||||
"backpack",
|
||||
"backsack",
|
||||
"balls",
|
||||
"bestiality",
|
||||
"bottomwear",
|
||||
"brown_hair",
|
||||
"butt",
|
||||
"clothed",
|
||||
"clothing",
|
||||
"clothing_around_one_leg",
|
||||
"duo",
|
||||
"erection",
|
||||
"face_lick",
|
||||
"feral",
|
||||
"feral_penetrated",
|
||||
"fully_clothed",
|
||||
"genitals",
|
||||
"hair",
|
||||
"happy",
|
||||
"happy_sex",
|
||||
"horn",
|
||||
"humanoid_genitalia",
|
||||
"humanoid_penis",
|
||||
"interspecies",
|
||||
"licking",
|
||||
"lying",
|
||||
"male",
|
||||
"male/male",
|
||||
"male_on_anthro",
|
||||
"male_on_feral",
|
||||
"male_penetrated",
|
||||
"male_penetrating",
|
||||
"male_penetrating_male",
|
||||
"on_back",
|
||||
"on_side",
|
||||
"pants",
|
||||
"partially_clothed",
|
||||
"penetration",
|
||||
"penile",
|
||||
"penile_penetration",
|
||||
"penis",
|
||||
"penis_in_ass",
|
||||
"perineum",
|
||||
"sex",
|
||||
"sweater",
|
||||
"tongue",
|
||||
"tongue_out",
|
||||
"topwear",
|
||||
"underwear",
|
||||
"underwear_around_one_leg",
|
||||
"underwear_down"
|
||||
],
|
||||
"artist": ["kapri"],
|
||||
"contributor": [],
|
||||
"copyright": [],
|
||||
"character": [],
|
||||
"species": [
|
||||
"canid",
|
||||
"canine",
|
||||
"canis",
|
||||
"deer",
|
||||
"domestic_dog",
|
||||
"mammal"
|
||||
],
|
||||
"invalid": [],
|
||||
"meta": ["absurd_res", "hi_res"],
|
||||
"lore": []
|
||||
},
|
||||
"locked_tags": [],
|
||||
"change_seq": 62900406,
|
||||
"flags": {
|
||||
"pending": false,
|
||||
"flagged": false,
|
||||
"note_locked": false,
|
||||
"status_locked": false,
|
||||
"rating_locked": true,
|
||||
"deleted": false
|
||||
},
|
||||
"rating": "e",
|
||||
"fav_count": 2248,
|
||||
"sources": [
|
||||
"https://pbs.twimg.com/media/GaaXoZaW0AA_ron?format=jpg\u0026name=orig",
|
||||
"https://x.com/kaprileak/status/1848331867302301987/photo/2"
|
||||
],
|
||||
"pools": [],
|
||||
"relationships": {
|
||||
"parent_id": 5129880,
|
||||
"has_children": false,
|
||||
"has_active_children": false,
|
||||
"children": []
|
||||
},
|
||||
"approver_id": 147515,
|
||||
"uploader_id": 1314348,
|
||||
"description": "‼️Content warning: Feral‼️\n\nBack from college, better than the dorm",
|
||||
"comment_count": 4,
|
||||
"is_favorited": true,
|
||||
"has_notes": false,
|
||||
"duration": null
|
||||
}
|
||||
]
|
||||
}
|
||||
1
test/fixtures/files/domain/e621/job/favorites_empty.json
vendored
Normal file
1
test/fixtures/files/domain/e621/job/favorites_empty.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{ "posts": [] }
|
||||
1
test/fixtures/files/domain/e621/job/favorites_hidden.json
vendored
Normal file
1
test/fixtures/files/domain/e621/job/favorites_hidden.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{ "success": false, "reason": "Access Denied: This users favorites are hidden" }
|
||||
300
test/fixtures/files/domain/e621/job/post_favs_0.html
vendored
Normal file
300
test/fixtures/files/domain/e621/job/post_favs_0.html
vendored
Normal file
File diff suppressed because one or more lines are too long
253
test/fixtures/files/domain/e621/job/post_favs_1.html
vendored
Normal file
253
test/fixtures/files/domain/e621/job/post_favs_1.html
vendored
Normal file
File diff suppressed because one or more lines are too long
182
test/fixtures/files/domain/e621/job/users_index_0.json
vendored
Normal file
182
test/fixtures/files/domain/e621/job/users_index_0.json
vendored
Normal file
@@ -0,0 +1,182 @@
|
||||
[
|
||||
{
|
||||
"id": 2089238,
|
||||
"created_at": "2024-12-06T22:19:57.484-05:00",
|
||||
"name": "chongzi",
|
||||
"level": 20,
|
||||
"base_upload_limit": 10,
|
||||
"post_upload_count": 0,
|
||||
"post_update_count": 0,
|
||||
"note_update_count": 0,
|
||||
"is_banned": false,
|
||||
"can_approve_posts": false,
|
||||
"can_upload_free": false,
|
||||
"level_string": "Member",
|
||||
"avatar_id": null
|
||||
},
|
||||
{
|
||||
"id": 2089237,
|
||||
"created_at": "2024-12-06T22:19:43.658-05:00",
|
||||
"name": "sirisak2515",
|
||||
"level": 20,
|
||||
"base_upload_limit": 10,
|
||||
"post_upload_count": 0,
|
||||
"post_update_count": 0,
|
||||
"note_update_count": 0,
|
||||
"is_banned": false,
|
||||
"can_approve_posts": false,
|
||||
"can_upload_free": false,
|
||||
"level_string": "Member",
|
||||
"avatar_id": null
|
||||
},
|
||||
{
|
||||
"id": 2089236,
|
||||
"created_at": "2024-12-06T22:19:33.045-05:00",
|
||||
"name": "Dwarfman402",
|
||||
"level": 20,
|
||||
"base_upload_limit": 10,
|
||||
"post_upload_count": 0,
|
||||
"post_update_count": 0,
|
||||
"note_update_count": 0,
|
||||
"is_banned": false,
|
||||
"can_approve_posts": false,
|
||||
"can_upload_free": false,
|
||||
"level_string": "Member",
|
||||
"avatar_id": null
|
||||
},
|
||||
{
|
||||
"id": 2089235,
|
||||
"created_at": "2024-12-06T22:19:15.354-05:00",
|
||||
"name": "dhqobc",
|
||||
"level": 20,
|
||||
"base_upload_limit": 10,
|
||||
"post_upload_count": 0,
|
||||
"post_update_count": 0,
|
||||
"note_update_count": 0,
|
||||
"is_banned": false,
|
||||
"can_approve_posts": false,
|
||||
"can_upload_free": false,
|
||||
"level_string": "Member",
|
||||
"avatar_id": null
|
||||
},
|
||||
{
|
||||
"id": 2089234,
|
||||
"created_at": "2024-12-06T22:18:09.481-05:00",
|
||||
"name": "otto11146",
|
||||
"level": 20,
|
||||
"base_upload_limit": 10,
|
||||
"post_upload_count": 0,
|
||||
"post_update_count": 0,
|
||||
"note_update_count": 0,
|
||||
"is_banned": false,
|
||||
"can_approve_posts": false,
|
||||
"can_upload_free": false,
|
||||
"level_string": "Member",
|
||||
"avatar_id": null
|
||||
},
|
||||
{
|
||||
"id": 2089233,
|
||||
"created_at": "2024-12-06T22:16:36.778-05:00",
|
||||
"name": "Jermet",
|
||||
"level": 20,
|
||||
"base_upload_limit": 10,
|
||||
"post_upload_count": 0,
|
||||
"post_update_count": 0,
|
||||
"note_update_count": 0,
|
||||
"is_banned": false,
|
||||
"can_approve_posts": false,
|
||||
"can_upload_free": false,
|
||||
"level_string": "Member",
|
||||
"avatar_id": null
|
||||
},
|
||||
{
|
||||
"id": 2089232,
|
||||
"created_at": "2024-12-06T22:15:41.661-05:00",
|
||||
"name": "memesforum",
|
||||
"level": 20,
|
||||
"base_upload_limit": 10,
|
||||
"post_upload_count": 0,
|
||||
"post_update_count": 0,
|
||||
"note_update_count": 0,
|
||||
"is_banned": false,
|
||||
"can_approve_posts": false,
|
||||
"can_upload_free": false,
|
||||
"level_string": "Member",
|
||||
"avatar_id": null
|
||||
},
|
||||
{
|
||||
"id": 2089231,
|
||||
"created_at": "2024-12-06T22:12:54.677-05:00",
|
||||
"name": "Tofu-",
|
||||
"level": 20,
|
||||
"base_upload_limit": 10,
|
||||
"post_upload_count": 0,
|
||||
"post_update_count": 0,
|
||||
"note_update_count": 0,
|
||||
"is_banned": false,
|
||||
"can_approve_posts": false,
|
||||
"can_upload_free": false,
|
||||
"level_string": "Member",
|
||||
"avatar_id": null
|
||||
},
|
||||
{
|
||||
"id": 2089230,
|
||||
"created_at": "2024-12-06T22:12:44.995-05:00",
|
||||
"name": "Bozoyoudontzs",
|
||||
"level": 20,
|
||||
"base_upload_limit": 10,
|
||||
"post_upload_count": 0,
|
||||
"post_update_count": 0,
|
||||
"note_update_count": 0,
|
||||
"is_banned": false,
|
||||
"can_approve_posts": false,
|
||||
"can_upload_free": false,
|
||||
"level_string": "Member",
|
||||
"avatar_id": null
|
||||
},
|
||||
{
|
||||
"id": 2089229,
|
||||
"created_at": "2024-12-06T22:12:10.035-05:00",
|
||||
"name": "dirtcat",
|
||||
"level": 20,
|
||||
"base_upload_limit": 10,
|
||||
"post_upload_count": 0,
|
||||
"post_update_count": 0,
|
||||
"note_update_count": 0,
|
||||
"is_banned": false,
|
||||
"can_approve_posts": false,
|
||||
"can_upload_free": false,
|
||||
"level_string": "Member",
|
||||
"avatar_id": null
|
||||
},
|
||||
{
|
||||
"id": 2089228,
|
||||
"created_at": "2024-12-06T22:11:54.511-05:00",
|
||||
"name": "Luantbj32",
|
||||
"level": 20,
|
||||
"base_upload_limit": 10,
|
||||
"post_upload_count": 0,
|
||||
"post_update_count": 0,
|
||||
"note_update_count": 0,
|
||||
"is_banned": false,
|
||||
"can_approve_posts": false,
|
||||
"can_upload_free": false,
|
||||
"level_string": "Member",
|
||||
"avatar_id": null
|
||||
},
|
||||
{
|
||||
"id": 2089164,
|
||||
"created_at": "2024-12-06T20:53:19.640-05:00",
|
||||
"name": "Cosmefulanito09",
|
||||
"level": 20,
|
||||
"base_upload_limit": 10,
|
||||
"post_upload_count": 0,
|
||||
"post_update_count": 0,
|
||||
"note_update_count": 0,
|
||||
"is_banned": false,
|
||||
"can_approve_posts": false,
|
||||
"can_upload_free": false,
|
||||
"level_string": "Member",
|
||||
"avatar_id": null
|
||||
}
|
||||
]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user