create inkbunny

This commit is contained in:
Dylan Knutson
2023-09-14 18:15:59 -07:00
parent 6263660341
commit 404baccbdf
19 changed files with 2461 additions and 45 deletions

View File

@@ -12,6 +12,7 @@ Rake.application.rake_require "metrics"
Rake.application.rake_require "fa"
Rake.application.rake_require "e621"
Rake.application.rake_require "twitter"
Rake.application.rake_require "ib"
task :set_ar_stdout => :environment do
ActiveRecord::Base.logger = Logger.new($stdout)

View File

@@ -1,5 +1,6 @@
class Domain::Inkbunny::Job::Base < Scraper::JobBase
discard_on ActiveJob::DeserializationError
queue_as :inkbunny
def self.http_factory_method
:get_inkbunny_http_client

View File

@@ -0,0 +1,53 @@
module Domain::Inkbunny::Job
class FileJob < Base
def perform(args)
file = args[:file] || fatal_error("file is required")
caused_by_entry = args[:caused_by_entry]
logger.prefix = proc {
"[#{file.id.to_s.bold} / " +
"#{file.ib_file_id.to_s.bold} / " +
"#{file.state.to_s.bold}] "
}
url_str = file.url_str
if file.state == "error"
retry_count = file.state_detail&.
[]("error")&.
[]("retry_count") || 0
if retry_count >= 3
logger.error("file has been retried 3 times, giving up")
return
end
end
response = http_client.get(
url_str,
caused_by_entry: caused_by_entry,
)
if response.status_code != 200
file.state = :error
fe = (file.state_detail["error"] ||= {})
fe["status_code"] = response.status_code
fe["log_entry_id"] = response.log_entry.id
fe["retry_count"] ||= 0
fe["retry_count"] += 1
file.save!
if response.status_code == 404
logger.error("#{response.status_code}, not retrying download")
else
fatal_error("#{response.status_code}, will retry later")
end
return
end
file.state = :ok
file.log_entry = response.log_entry
file.blob_entry = response.log_entry.response
file.save!
logger.info "downloaded file"
end
end
end

View File

@@ -1,10 +1,162 @@
module Domain::Inkbunny::Job
class LatestPostsJob < Base
queue_as :inkbunny_latest_posts
def perform(args)
url = "https://inkbunny.net/api_submissions.php"
response = http_client.get(url, caused_by_entry: @first_browse_page_entry || @caused_by_entry)
url = "https://inkbunny.net/api_search.php?orderby=create_datetime&keywords=no&title=no&description=no"
@api_search_response = http_client.post(url, caused_by_entry: @first_browse_page_entry || @caused_by_entry)
if @api_search_response.status_code != 200
fatal_error("api_search failed: #{@api_search_response.status_code}")
end
api_search_json = JSON.parse(@api_search_response.body)
handle_search_response(api_search_json)
end
private
def handle_search_response(api_search_json)
ib_submission_jsons = api_search_json["submissions"]
ib_submission_ids = ib_submission_jsons.map { |j| j["submission_id"]&.to_i }
@ib_post_id_to_model = Domain::Inkbunny::Post
.where(ib_post_id: ib_submission_ids)
.includes(:files, :creator)
.index_by(&:ib_post_id)
new_posts = []
users = []
ib_submission_jsons.each do |submission_json|
ib_post_id = submission_json["submission_id"]&.to_i
unless @ib_post_id_to_model[ib_post_id]
post = Domain::Inkbunny::Post.new({
ib_post_id: ib_post_id,
})
user = Domain::Inkbunny::User.find_or_initialize_by({
ib_user_id: submission_json["user_id"].to_i,
}) do |user|
user.name = submission_json["username"]
end
user.save!
post.creator = user
new_posts << post
@ib_post_id_to_model[ib_post_id] = post
end
end
Domain::Inkbunny::Post.transaction do
users.select do |user|
user.new_record? || user.changed?
end.each(&:save!)
new_posts.each(&:save!)
end
# do shallow updates of all posts
needs_deep_update_posts = []
Domain::Inkbunny::Post.transaction do
ib_submission_jsons.each do |submission_json|
needs_deep_update, post = shallow_update_post_from_submission_json(submission_json)
needs_deep_update_posts << post if needs_deep_update
end
end
# TODO - check condition for needing a deep update
# Such as:
# - Never been deep updated before
# - Number of files changed
# - Latest file updated timestamp changed
# - Don't have a user avatar yet
if needs_deep_update_posts.any?
ids_list = needs_deep_update_posts.map(&:ib_post_id).join(",")
url = "https://inkbunny.net/api_submissions.php?" +
"submission_ids=#{ids_list}" +
"&show_description=yes&show_writing=yes&show_pools=yes"
@api_submissions_response = http_client.get(
url,
caused_by_entry: @api_search_response.log_entry,
)
if @api_submissions_response.status_code != 200
fatal_error("api_submissions failed: #{@api_submissions_response.status_code}")
end
api_submissions_json = JSON.parse(@api_submissions_response.body)
api_submissions_json["submissions"].each do |submission_json|
Domain::Inkbunny::Post.transaction do
deep_update_post_from_submission_json(submission_json)
end
end
end
end
def shallow_update_post_from_submission_json(json)
post = post_for_json(json)
post.shallow_updated_at = Time.now
post.title = json["title"]
post.posted_at = Time.parse json["create_datetime"]
post.last_file_updated_at = Time.parse json["last_file_update_datetime"]
post.num_files = json["pagecount"]&.to_i
post.rating = json["rating_id"]&.to_i
post.submission_type = json["submission_type_id"]&.to_i
post.ib_detail_raw = json
needs_deep_update = post.last_file_updated_at_changed? ||
post.num_files_changed? ||
post.files.count != post.num_files
post.save!
[needs_deep_update, post]
end
def deep_update_post_from_submission_json(submission_json)
post = post_for_json(submission_json)
logger.info "deep update post #{post.ib_post_id.to_s.bold}"
post.deep_updated_at = Time.now
post.description = submission_json["description"]
# TODO - enqueue avatar download job if needed
if submission_json["user_icon_url_large"]
post.creator.avatar_url_str = submission_json["user_icon_url_large"]
post.creator.save! if post.creator.changed?
end
post_files_by_md5 = post.files.index_by(&:md5_initial)
file_jsons = submission_json["files"] || fatal_error("no files[] array")
file_jsons.each do |file_json|
md5_initial = file_json["initial_file_md5"]
next if post_files_by_md5[md5_initial]
md5_full = file_json["full_file_md5"]
file = post.files.create!({
ib_file_id: file_json["file_id"]&.to_i,
ib_created_at: Time.parse(file_json["create_datetime"]),
file_order: file_json["submission_file_order"]&.to_i,
ib_detail_raw: file_json,
file_name: file_json["file_name"],
url_str: file_json["file_url_full"],
md5_initial: md5_initial,
md5_full: md5_full,
md5s: {
"initial_file_md5": md5_initial,
"full_file_md5": file_json["full_file_md5"],
"large_file_md5": file_json["large_file_md5"],
"small_file_md5": file_json["small_file_md5"],
"thumbnail_md5": file_json["thumbnail_md5"],
},
})
logger.info "[post #{post.ib_post_id.to_s.bold}]: " +
"new file #{file.ib_file_id.to_s.bold} - #{file.file_name.black.bold}"
defer_job(Domain::Inkbunny::Job::FileJob, {
file: file,
caused_by_entry: @api_submissions_response.log_entry,
})
end
post.save!
end
def post_for_json(submission_json)
post_id = submission_json["submission_id"]&.to_i || fatal_error(
"submission_id not found in submission_json: #{submission_json.keys.join(", ")}"
)
@ib_post_id_to_model[post_id] || fatal_error(
"post not found for ib_post_id #{post_id}"
)
end
end
end

View File

@@ -1,3 +1,28 @@
class Domain::Inkbunny::File < ReduxApplicationRecord
self.table_name = "domain_inkbunny_files"
belongs_to :post,
class_name: "::Domain::Inkbunny::Post",
inverse_of: :files
belongs_to :blob_entry,
class_name: "::BlobEntryP",
foreign_key: :blob_entry_sha256,
optional: true
belongs_to :log_entry,
class_name: "::HttpLogEntry",
optional: true
enum :state, %i[ok error]
after_initialize do
self.state ||= :ok
self.state_detail ||= {}
end
validates_presence_of(%i[
ib_file_id file_name url_str
ib_created_at file_order
md5_initial md5_full md5s
])
end

View File

@@ -5,7 +5,30 @@ class Domain::Inkbunny::Post < ReduxApplicationRecord
class_name: "::Domain::Inkbunny::User",
inverse_of: :posts
has_many :files,
class_name: "::Domain::Inkbunny::File",
inverse_of: :post
enum :state, %i[ok error]
enum :rating, %i[general mature adult]
enum :submission_type, %i[
unknown
picture_pinup
sketch
picture_series
comic
portfolio
flash_animation
flash_interactive
video_feature
video_animation
music_single
music_album
writing_document
character_sheet
photography
]
after_initialize do
self.state ||= :ok
self.state_detail ||= {}

View File

@@ -6,6 +6,7 @@ class Domain::Inkbunny::User < ReduxApplicationRecord
inverse_of: :creator,
foreign_key: :creator_id
validates_presence_of :ib_user_id, :name
enum :state, %i[ok error]
after_initialize do
self.state ||= :ok

View File

@@ -13,27 +13,37 @@ class CreateInkbunnyInitialModels < ActiveRecord::Migration[7.0]
t.integer :state, null: false
t.json :state_detail
t.string :name, null: false, unique: true
t.bigint :ib_user_id
t.timestamps
end
t.string :name, null: false
t.bigint :ib_user_id, null: false
t.index :ib_user_id, unique: true
# profile avatar (and log entry if there was one)
t.string :avatar_url_str
t.binary :avatar_file_sha256
t.bigint :avatar_file_log_entry_id
create_table :domain_inkbunny_user_avatars do |t|
t.references :user, null: false
t.timestamps
end
create_table :domain_inkbunny_posts do |t|
t.integer :state, null: false
t.json :state_detail
t.jsonb :state_detail
t.datetime :shallow_updated_at
t.datetime :deep_updated_at
t.references :creator
t.bigint :ib_post_id
t.string :title
t.string :description
t.string :writing
t.timestamp :posted_at
t.timestamp :last_file_updated_at
t.integer :rating
# submission type e.g. sketch, pinup, music
t.integer :ib_type
t.integer :submission_type
t.integer :num_views
t.integer :num_files
t.jsonb :ib_detail_raw
@@ -41,17 +51,26 @@ class CreateInkbunnyInitialModels < ActiveRecord::Migration[7.0]
end
create_table :domain_inkbunny_files do |t|
t.references :post, null: false
t.integer :ordinal, null: false
t.integer :state, null: false
t.jsonb :state_detail, null: false, default: {}
t.string :name
# no index on :post as we include it in the [:post, :file_order] idx
t.references :post, null: false, index: false
t.references :log_entry, index: false
t.binary :blob_entry_sha256
t.integer :file_order, null: false
t.index [:post_id, :file_order]
t.bigint :ib_file_id
t.jsonb :ib_detail_raw
t.datetime :ib_created_at
t.string :file_name
t.string :url_str
t.string :md5_initial
t.string :md5_full
t.jsonb :md5s
t.datetime :posted_at
t.jsonb :ib_detail_raw
t.timestamps
end

40
db/schema.rb generated
View File

@@ -862,18 +862,23 @@ ActiveRecord::Schema[7.0].define(version: 2023_08_26_003811) do
end
create_table "domain_inkbunny_files", force: :cascade do |t|
t.integer "state", null: false
t.jsonb "state_detail", default: {}, null: false
t.bigint "post_id", null: false
t.integer "ordinal", null: false
t.string "name"
t.bigint "log_entry_id"
t.binary "blob_entry_sha256"
t.integer "file_order", null: false
t.bigint "ib_file_id"
t.jsonb "ib_detail_raw"
t.datetime "ib_created_at"
t.string "file_name"
t.string "url_str"
t.string "md5_initial"
t.string "md5_full"
t.jsonb "md5s"
t.datetime "posted_at"
t.jsonb "ib_detail_raw"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["post_id"], name: "index_domain_inkbunny_files_on_post_id"
t.index ["post_id", "file_order"], name: "index_domain_inkbunny_files_on_post_id_and_file_order"
end
create_table "domain_inkbunny_follows", id: false, force: :cascade do |t|
@@ -909,12 +914,20 @@ ActiveRecord::Schema[7.0].define(version: 2023_08_26_003811) do
create_table "domain_inkbunny_posts", force: :cascade do |t|
t.integer "state", null: false
t.json "state_detail"
t.jsonb "state_detail"
t.datetime "shallow_updated_at"
t.datetime "deep_updated_at"
t.bigint "creator_id"
t.bigint "ib_post_id"
t.string "title"
t.string "description"
t.string "writing"
t.datetime "posted_at", precision: nil
t.datetime "last_file_updated_at", precision: nil
t.integer "rating"
t.integer "ib_type"
t.integer "submission_type"
t.integer "num_views"
t.integer "num_files"
t.jsonb "ib_detail_raw"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
@@ -931,20 +944,17 @@ ActiveRecord::Schema[7.0].define(version: 2023_08_26_003811) do
t.datetime "updated_at", null: false
end
create_table "domain_inkbunny_user_avatars", force: :cascade do |t|
t.bigint "user_id", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["user_id"], name: "index_domain_inkbunny_user_avatars_on_user_id"
end
create_table "domain_inkbunny_users", force: :cascade do |t|
t.integer "state", null: false
t.json "state_detail"
t.string "name", null: false
t.bigint "ib_user_id"
t.bigint "ib_user_id", null: false
t.string "avatar_url_str"
t.binary "avatar_file_sha256"
t.bigint "avatar_file_log_entry_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["ib_user_id"], name: "index_domain_inkbunny_users_on_ib_user_id", unique: true
end
create_table "domain_twitter_medias", id: false, force: :cascade do |t|

8
rake/ib.rake Normal file
View File

@@ -0,0 +1,8 @@
namespace :ib do
desc "run a single e621 posts index job"
task :latest_posts_job => :environment do
Domain::Inkbunny::Job::LatestPostsJob.
set(priority: -10).
perform_later({})
end
end

View File

@@ -0,0 +1,142 @@
require "rails_helper"
describe Domain::Inkbunny::Job::FileJob do
AN_IMAGE_SHA256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
AN_IMAGE_MD5 = "d41d8cd98f00b204e9800998ecf8427e"
AN_IMAGE_PATH = "domain/e621/job/an-image.png"
AN_IMAGE_URL = "https://static1.e621.net/file/foo.png"
let(:http_client_mock) { instance_double("::Scraper::HttpClient") }
before do
Scraper::ClientFactory.http_client_mock = http_client_mock
end
let(:file) do
Domain::Inkbunny::File.create!({
ib_file_id: 12345,
md5_initial: AN_IMAGE_MD5,
md5_full: AN_IMAGE_MD5,
url_str: AN_IMAGE_URL,
file_name: "foo.png",
ib_created_at: Time.now,
file_order: 1,
md5s: { "initial_file_md5": AN_IMAGE_MD5 },
post: Domain::Inkbunny::Post.create!({
ib_post_id: 67891,
creator: Domain::Inkbunny::User.create!({
ib_user_id: 12345,
name: "TheUser",
}),
}),
})
end
describe "#perform" do
it "downloads the file if url_str is present" do
hle = SpecUtil.create_http_log_entry
SpecUtil.init_http_client_mock(
http_client_mock, [
{
uri: AN_IMAGE_URL,
status_code: 200,
content_type: "image/png",
contents: SpecUtil.read_fixture_file(AN_IMAGE_PATH),
caused_by_entry: hle,
},
]
)
perform_now({ file: file, caused_by_entry: hle })
file.reload
expect(file.log_entry.response).to eq(file.blob_entry)
expect(file.blob_entry.sha256_hex).to eq(AN_IMAGE_SHA256)
end
it "marks the post as errored if the download fails" do
hles = SpecUtil.init_http_client_mock(
http_client_mock, [
{
uri: AN_IMAGE_URL,
status_code: 404,
content_type: "text/html",
contents: "not found",
},
]
)
perform_now({ file: file })
file.reload
expect(file.state).to eq("error")
expect(file.blob_entry).to be_nil
expect(file.state_detail["error"]).to eq(
{
"status_code" => 404,
"log_entry_id" => hles[0].id,
"retry_count" => 1,
}
)
end
it "recovers from a failed download" do
hles = SpecUtil.init_http_client_mock(
http_client_mock, [
{
uri: AN_IMAGE_URL,
status_code: 500,
content_type: "text/html",
contents: "not found",
},
{
uri: AN_IMAGE_URL,
status_code: 200,
content_type: "image/png",
contents: SpecUtil.read_fixture_file(AN_IMAGE_PATH),
},
]
)
perform_now({ file: file }, should_raise: true)
file.reload
expect(file.blob_entry).to be_nil
perform_now({ file: file })
file.reload
expect(file.blob_entry).not_to be_nil
expect(file.blob_entry.sha256_hex).to eq(AN_IMAGE_SHA256)
end
it "throws on a non-404 error in order to retry later" do
num_retries = 3
hles = SpecUtil.init_http_client_mock(
http_client_mock, [
{
uri: AN_IMAGE_URL,
status_code: 500,
content_type: "text/html",
contents: "not found",
},
] * num_retries
)
num_retries.times.map do |retry_num|
perform_now({ file: file }, should_raise: true)
file.reload
expect(file.state).to eq("error")
expect(file.blob_entry).to be_nil
expect(file.state_detail["error"]).to eq(
{
"status_code" => 500,
"log_entry_id" => hles[retry_num].id,
"retry_count" => retry_num + 1,
}
)
end
# the last retry should not throw, but simply bail out early
perform_now({ file: file })
file.reload
expect(file.state).to eq("error")
expect(file.blob_entry).to be_nil
end
end
end

View File

@@ -0,0 +1,186 @@
require "rails_helper"
describe Domain::Inkbunny::Job::LatestPostsJob do
let(:http_client_mock) { instance_double("::Scraper::HttpClient") }
before do
Scraper::ClientFactory.http_client_mock = http_client_mock
end
let(:api_search_url) {
"https://inkbunny.net/api_search.php?orderby=create_datetime&keywords=no&title=no&description=no"
}
let(:api_submissions_url) {
"https://inkbunny.net/api_submissions.php" +
"?submission_ids=3104202,3104200,3104197&" +
"show_description=yes&show_writing=yes&show_pools=yes"
}
let(:api_submissions_1047334_url) {
"https://inkbunny.net/api_submissions.php" +
"?submission_ids=1047334&" +
"show_description=yes&show_writing=yes&show_pools=yes"
}
context "the files do not change in the response" do
let! :log_entries do
SpecUtil.init_http_client_mock(
http_client_mock, [
{
method: :post,
uri: api_search_url,
content_type: "application/json",
contents: SpecUtil.read_fixture_file("domain/inkbunny/job/api_search.json"),
},
{
method: :post,
uri: api_submissions_url,
content_type: "application/json",
contents: SpecUtil.read_fixture_file("domain/inkbunny/job/api_submissions.json"),
caused_by_entry_idx: 0,
},
# same as the first, should not update or touch any posts
{
method: :post,
uri: api_search_url,
content_type: "application/json",
contents: SpecUtil.read_fixture_file("domain/inkbunny/job/api_search.json"),
},
]
)
end
it "creates posts" do
expect do
perform_now({})
end.to(
change(Domain::Inkbunny::Post, :count).by(3).
and(change(Domain::Inkbunny::File, :count).by(6)).
and(change(Domain::Inkbunny::User, :count).by(3))
)
user_thendyart = Domain::Inkbunny::User.find_by!(ib_user_id: 941565)
expect(user_thendyart.name).to eq("ThendyArt")
user_seff = Domain::Inkbunny::User.find_by!(ib_user_id: 229331)
expect(user_seff.name).to eq("Seff")
expect(user_seff.avatar_url_str).to eq("https://us.ib.metapix.net/usericons/large/176/176443_Seff_seffpfp.png")
post_3104202 = Domain::Inkbunny::Post.find_by!(ib_post_id: 3104202)
expect(post_3104202.title).to eq("Phantom Touch - Page 25")
expect(post_3104202.posted_at).to eq(Time.parse("2023-08-27 21:31:40.365597+02"))
expect(post_3104202.creator).to eq(user_thendyart)
expect(post_3104202.last_file_updated_at).to eq(Time.parse("2023-08-27 21:30:06.222262+02"))
expect(post_3104202.num_files).to eq(1)
expect(post_3104202.rating).to eq("adult")
expect(post_3104202.submission_type).to eq("comic")
expect(post_3104202.shallow_updated_at).to be_within(1.second).of(Time.now)
expect(post_3104202.deep_updated_at).to be_within(1.second).of(Time.now)
expect(post_3104202.files.count).to eq(1)
file_4652537 = post_3104202.files.first
expect(file_4652537.ib_file_id).to eq(4652537)
expect(file_4652537.file_order).to eq(0)
expect(file_4652537.md5_initial).to eq("fbeb553c483a346108beeada93d90086")
expect(file_4652537.md5_full).to eq("15eea2648c8afaee1fef970befb28b24")
expect(file_4652537.url_str).to eq("https://us.ib.metapix.net/files/full/4652/4652537_ThendyArt_pt_pg_25.jpg")
post_3104200 = Domain::Inkbunny::Post.find_by!(ib_post_id: 3104200)
expect(post_3104200.creator).to eq(user_seff)
expect(post_3104200.title).to eq("Camp Pines Sketch Dump (Aug 2023)")
expect(post_3104200.description).to match(/Not sure how canon/)
expect(post_3104200.num_files).to eq(4)
# should enqueue file download jobs as all are new
file_jobs = SpecUtil.enqueued_jobs(Domain::Inkbunny::Job::FileJob)
expect(file_jobs.length).to eq(6)
expect(file_jobs.map { |job|
job[:args][0][:file].ib_file_id
}.sort).to eq([
4652528, 4652530, 4652531,
4652534, 4652535, 4652537,
])
file_jobs.each do |job|
expect(
job[:args][0][:caused_by_entry]
).to eq(log_entries[1])
end
# perform another scan, nothing should change
expect do
perform_now({})
end.to(
change(Domain::Inkbunny::Post, :count).by(0).
and(change(Domain::Inkbunny::File, :count).by(0)).
and(change(Domain::Inkbunny::User, :count).by(0))
)
expect(
SpecUtil.enqueued_jobs(Domain::Inkbunny::Job::FileJob).length
).to eq(file_jobs.length)
end
end
context "an existing file changed in the response" do
let! :log_entries do
SpecUtil.init_http_client_mock(
http_client_mock, [
{
method: :post,
uri: api_search_url,
content_type: "application/json",
contents: SpecUtil.read_fixture_file("domain/inkbunny/job/api_search_1047334_before.json"),
},
{
method: :post,
uri: api_submissions_1047334_url,
content_type: "application/json",
contents: SpecUtil.read_fixture_file("domain/inkbunny/job/api_submissions_1047334_before.json"),
caused_by_entry_idx: 0,
},
{
method: :post,
uri: api_search_url,
content_type: "application/json",
contents: SpecUtil.read_fixture_file("domain/inkbunny/job/api_search_1047334_after.json"),
},
{
method: :post,
uri: api_submissions_1047334_url,
content_type: "application/json",
contents: SpecUtil.read_fixture_file("domain/inkbunny/job/api_submissions_1047334_after.json"),
caused_by_entry_idx: 2,
},
]
)
end
it "updates posts and files" do
perform_now({})
post_1047334 = Domain::Inkbunny::Post.find_by!(ib_post_id: 1047334)
file_1445274 = post_1047334.files.find_by!(ib_file_id: 1445274)
expect(file_1445274.md5_initial).to eq("0127e88651e73140718f3b8f7f2037d5")
expect(file_1445274.md5_full).to eq("aa0e22f86a9c345ead2bd711a1c91986")
file_jobs = SpecUtil.enqueued_jobs(Domain::Inkbunny::Job::FileJob)
expect(file_jobs.size).to eq(1)
SpecUtil.clear_enqueued_jobs!(Domain::Inkbunny::Job::FileJob)
# second perform should create the new file
expect do
perform_now({})
end.to(
change(Domain::Inkbunny::Post, :count).by(0).
and(change(Domain::Inkbunny::File, :count).by(1)).
and(change(Domain::Inkbunny::User, :count).by(0))
)
post_1047334.reload
expect(post_1047334.files.count).to eq(2)
file_4680214 = post_1047334.files.find_by!(ib_file_id: 4680214)
expect(file_4680214.ib_file_id).to eq(4680214)
expect(file_4680214.md5_initial).to eq("9fbfbdf3cc6d8b3538b7edbfe36bde8c")
expect(file_4680214.md5_full).to eq("d2e30d953f4785e22c3d9c722249a974")
file_jobs = SpecUtil.enqueued_jobs(Domain::Inkbunny::Job::FileJob)
expect(file_jobs.size).to eq(1)
end
end
end

View File

@@ -107,7 +107,7 @@ class SpecUtil
uri: request[:uri],
contents: request[:contents],
content_type: request[:content_type],
status_code: request[:status_code],
status_code: request[:status_code] || 200,
)
log_entry.save!
log_entries << log_entry
@@ -119,7 +119,8 @@ class SpecUtil
caused_by_entry = request[:caused_by_entry]
end
expect(http_client_mock).to(receive(:get).
method = request[:method] || :get
expect(http_client_mock).to(receive(method).
with(
log_entry.uri.to_s,
{ caused_by_entry: caused_by_entry }
@@ -138,7 +139,7 @@ class SpecUtil
File.read(Rails.root.join("test/fixtures/files", path), mode: mode)
end
def self.enqueued_jobs(job_klass = nil)
def self.enqueued_jobs(job_class = nil)
GoodJob::Job.order(created_at: :asc).all.map do |job|
{
job: job.job_class.constantize,
@@ -148,24 +149,32 @@ class SpecUtil
good_job: job,
}
end.filter do |job|
job_is_klass(job_klass, job)
job_is_class(job_class, job)
end
end
def self.shift_jobs(job_klass = nil, by = 1)
def self.clear_enqueued_jobs!(job_class = nil)
rel = GoodJob::Job
if job_class
rel = rel.where(job_class: job_class.name)
end
rel.destroy_all
end
def self.shift_jobs(job_class = nil, by = 1)
by.times do
job = enqueued_jobs.find { |job| job_is_klass(job_klass, job) }
job = enqueued_jobs.find { |job| job_is_class(job_class, job) }
job[:good_job].destroy if job
end
end
def self.job_is_klass(job_klass, job)
if job_klass.nil?
def self.job_is_class(job_class, job)
if job_class.nil?
true
elsif job_klass.is_a? Array
job_klass.include? job[:job]
elsif job_class.is_a? Array
job_class.include? job[:job]
else
job_klass == job[:job]
job_class == job[:job]
end
end

View File

@@ -0,0 +1,136 @@
{
"sid": "n958z4aO2Iy7WP4bj8Zi8pU-Pm",
"results_count_all": "18000",
"results_count_thispage": 3,
"pages_count": 600,
"page": 1,
"user_location": "",
"search_params": [],
"submissions": [
{
"submission_id": "3104202",
"hidden": "f",
"username": "ThendyArt",
"user_id": "941565",
"create_datetime": "2023-08-27 21:31:40.365597+02",
"create_datetime_usertime": "27 Aug 2023 21:31 CEST",
"last_file_update_datetime": "2023-08-27 21:30:06.222262+02",
"last_file_update_datetime_usertime": "27 Aug 2023 21:30 CEST",
"thumbnail_url_huge_noncustom": "https://us.ib.metapix.net/files/preview/4652/4652537_ThendyArt_pt_pg_25.jpg",
"thumbnail_url_large_noncustom": "https://us.ib.metapix.net/thumbnails/large/4652/4652537_ThendyArt_pt_pg_25_noncustom.jpg",
"thumbnail_url_medium_noncustom": "https://us.ib.metapix.net/thumbnails/medium/4652/4652537_ThendyArt_pt_pg_25_noncustom.jpg",
"thumb_medium_noncustom_x": "93",
"thumb_medium_noncustom_y": "120",
"thumb_large_noncustom_x": "155",
"thumb_large_noncustom_y": "200",
"thumb_huge_noncustom_x": "232",
"thumb_huge_noncustom_y": "300",
"file_name": "4652537_ThendyArt_pt_pg_25.jpg",
"title": "Phantom Touch - Page 25",
"deleted": "f",
"public": "t",
"mimetype": "image/jpeg",
"pagecount": "1",
"rating_id": "2",
"rating_name": "Adult",
"file_url_full": "https://us.ib.metapix.net/files/full/4652/4652537_ThendyArt_pt_pg_25.jpg",
"file_url_screen": "https://us.ib.metapix.net/files/screen/4652/4652537_ThendyArt_pt_pg_25.jpg",
"file_url_preview": "https://us.ib.metapix.net/files/preview/4652/4652537_ThendyArt_pt_pg_25.jpg",
"submission_type_id": "4",
"type_name": "Comic",
"friends_only": "f",
"guest_block": "f",
"scraps": "f"
},
{
"submission_id": "3104200",
"hidden": "f",
"username": "Seff",
"user_id": "229331",
"create_datetime": "2023-08-27 21:30:59.308046+02",
"create_datetime_usertime": "27 Aug 2023 21:30 CEST",
"last_file_update_datetime": "2023-08-27 21:26:14.049+02",
"last_file_update_datetime_usertime": "27 Aug 2023 21:26 CEST",
"thumbnail_url_huge_noncustom": "https://us.ib.metapix.net/files/preview/4652/4652528_Seff_aug23sketches7-1.jpg",
"thumbnail_url_large_noncustom": "https://us.ib.metapix.net/thumbnails/large/4652/4652528_Seff_aug23sketches7-1_noncustom.jpg",
"thumbnail_url_medium_noncustom": "https://us.ib.metapix.net/thumbnails/medium/4652/4652528_Seff_aug23sketches7-1_noncustom.jpg",
"thumb_medium_noncustom_x": "96",
"thumb_medium_noncustom_y": "120",
"thumb_large_noncustom_x": "160",
"thumb_large_noncustom_y": "200",
"thumb_huge_noncustom_x": "240",
"thumb_huge_noncustom_y": "300",
"file_name": "4652528_Seff_aug23sketches7-1.png",
"title": "Camp Pines Sketch Dump (Aug 2023)",
"deleted": "f",
"public": "t",
"mimetype": "image/png",
"pagecount": "4",
"rating_id": "2",
"rating_name": "Adult",
"file_url_full": "https://us.ib.metapix.net/files/full/4652/4652528_Seff_aug23sketches7-1.png",
"file_url_screen": "https://us.ib.metapix.net/files/screen/4652/4652528_Seff_aug23sketches7-1.png",
"file_url_preview": "https://us.ib.metapix.net/files/preview/4652/4652528_Seff_aug23sketches7-1.jpg",
"submission_type_id": "1",
"type_name": "Picture/Pinup",
"friends_only": "f",
"guest_block": "t",
"scraps": "t",
"latest_file_name": "4652535_Seff_aug23sketches6-1.png",
"latest_mimetype": "image/png",
"latest_thumbnail_url_huge_noncustom": "https://us.ib.metapix.net/files/preview/4652/4652535_Seff_aug23sketches6-1.jpg",
"latest_thumbnail_url_large_noncustom": "https://us.ib.metapix.net/thumbnails/large/4652/4652535_Seff_aug23sketches6-1_noncustom.jpg",
"latest_thumbnail_url_medium_noncustom": "https://us.ib.metapix.net/thumbnails/medium/4652/4652535_Seff_aug23sketches6-1_noncustom.jpg",
"latest_thumb_medium_noncustom_x": "80",
"latest_thumb_medium_noncustom_y": "120",
"latest_thumb_large_noncustom_x": "133",
"latest_thumb_large_noncustom_y": "200",
"latest_thumb_huge_noncustom_x": "200",
"latest_thumb_huge_noncustom_y": "300"
},
{
"submission_id": "3104197",
"hidden": "f",
"username": "SoulCentinel",
"user_id": "349747",
"create_datetime": "2023-08-27 21:29:37.995264+02",
"create_datetime_usertime": "27 Aug 2023 21:29 CEST",
"last_file_update_datetime": "2023-08-27 21:24:23.653306+02",
"last_file_update_datetime_usertime": "27 Aug 2023 21:24 CEST",
"thumbnail_url_huge": "https://us.ib.metapix.net/thumbnails/huge/4652/4652530_SoulCentinel_c_bj_bird.jpg",
"thumbnail_url_large": "https://us.ib.metapix.net/thumbnails/large/4652/4652530_SoulCentinel_c_bj_bird.jpg",
"thumbnail_url_medium": "https://us.ib.metapix.net/thumbnails/medium/4652/4652530_SoulCentinel_c_bj_bird.jpg",
"thumb_huge_x": "289",
"thumb_huge_y": "300",
"thumb_large_x": "193",
"thumb_large_y": "200",
"thumb_medium_x": "116",
"thumb_medium_y": "120",
"thumbnail_url_huge_noncustom": "https://us.ib.metapix.net/files/preview/4652/4652530_SoulCentinel_c_bj_bird.gif",
"thumbnail_url_large_noncustom": "https://us.ib.metapix.net/thumbnails/large/4652/4652530_SoulCentinel_c_bj_bird_noncustom.jpg",
"thumbnail_url_medium_noncustom": "https://us.ib.metapix.net/thumbnails/medium/4652/4652530_SoulCentinel_c_bj_bird_noncustom.jpg",
"thumb_medium_noncustom_x": "120",
"thumb_medium_noncustom_y": "93",
"thumb_large_noncustom_x": "200",
"thumb_large_noncustom_y": "154",
"thumb_huge_noncustom_x": "300",
"thumb_huge_noncustom_y": "231",
"file_name": "4652530_SoulCentinel_c_bj_bird.gif",
"title": "Comm - BJ bird",
"deleted": "f",
"public": "t",
"mimetype": "image/gif",
"pagecount": "1",
"rating_id": "2",
"rating_name": "Adult",
"file_url_full": "https://us.ib.metapix.net/files/full/4652/4652530_SoulCentinel_c_bj_bird.gif",
"file_url_screen": "https://us.ib.metapix.net/files/screen/4652/4652530_SoulCentinel_c_bj_bird.gif",
"file_url_preview": "https://us.ib.metapix.net/files/preview/4652/4652530_SoulCentinel_c_bj_bird.gif",
"submission_type_id": "1",
"type_name": "Picture/Pinup",
"friends_only": "f",
"guest_block": "f",
"scraps": "f"
}
]
}

View File

@@ -0,0 +1,46 @@
{
"sid": "n958z4aO2Iy7WP4bj8Zi8pU-Pm",
"results_count_all": "1",
"results_count_thispage": 1,
"pages_count": 1,
"page": 1,
"user_location": "",
"search_params": [],
"submissions": [
{
"submission_id": "1047334",
"hidden": "f",
"username": "zzreg",
"user_id": "110036",
"create_datetime": "2016-03-13 22:18:52.32319+01",
"create_datetime_usertime": "13 Mar 2016 22:18 CET",
"last_file_update_datetime": "2023-09-14 19:07:45.735562+02",
"last_file_update_datetime_usertime": "14 Sep 2023 19:07 CEST",
"thumbnail_url_huge_noncustom": "https://inkbunny.net/private_files/preview/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped.jpg",
"thumbnail_url_large_noncustom": "https://inkbunny.net/private_thumbnails/large/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped_noncustom.jpg",
"thumbnail_url_medium_noncustom": "https://inkbunny.net/private_thumbnails/medium/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped_noncustom.jpg",
"thumb_medium_noncustom_x": "120",
"thumb_medium_noncustom_y": "90",
"thumb_large_noncustom_x": "200",
"thumb_large_noncustom_y": "150",
"thumb_huge_noncustom_x": "300",
"thumb_huge_noncustom_y": "225",
"file_name": "4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped.jpg",
"title": "New Submission",
"deleted": "f",
"public": "f",
"mimetype": "image/jpeg",
"pagecount": "1",
"rating_id": "0",
"rating_name": "General",
"file_url_full": "https://inkbunny.net/private_files/full/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped.jpg",
"file_url_screen": "https://inkbunny.net/private_files/screen/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped.jpg",
"file_url_preview": "https://inkbunny.net/private_files/preview/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped.jpg",
"submission_type_id": "1",
"type_name": "Picture/Pinup",
"friends_only": "f",
"guest_block": "f",
"scraps": "f"
}
]
}

View File

@@ -0,0 +1,46 @@
{
"sid": "n958z4aO2Iy7WP4bj8Zi8pU-Pm",
"results_count_all": "1",
"results_count_thispage": 1,
"pages_count": 1,
"page": 1,
"user_location": "",
"search_params": [],
"submissions": [
{
"submission_id": "1047334",
"hidden": "f",
"username": "zzreg",
"user_id": "110036",
"create_datetime": "2016-03-13 22:18:52.32319+01",
"create_datetime_usertime": "13 Mar 2016 22:18 CET",
"last_file_update_datetime": "2016-03-13 22:18:52.32319+01",
"last_file_update_datetime_usertime": "13 Mar 2016 22:18 CET",
"thumbnail_url_huge_noncustom": "https://inkbunny.net/private_files/full/1445/1445274_zzreg_sname-yellow-small-border.png",
"thumbnail_url_large_noncustom": "https://inkbunny.net/private_files/full/1445/1445274_zzreg_sname-yellow-small-border.png",
"thumbnail_url_medium_noncustom": "https://inkbunny.net/private_files/full/1445/1445274_zzreg_sname-yellow-small-border.png",
"thumb_medium_noncustom_x": "120",
"thumb_medium_noncustom_y": "90",
"thumb_large_noncustom_x": "200",
"thumb_large_noncustom_y": "150",
"thumb_huge_noncustom_x": "300",
"thumb_huge_noncustom_y": "225",
"file_name": "1445274_zzreg_sname-yellow-small-border.png",
"title": "New Submission",
"deleted": "f",
"public": "f",
"mimetype": "image/jpeg",
"pagecount": "1",
"rating_id": "0",
"rating_name": "General",
"file_url_full": "https://inkbunny.net/private_files/full/1445/1445274_zzreg_sname-yellow-small-border.png",
"file_url_screen": "https://inkbunny.net/private_files/full/1445/1445274_zzreg_sname-yellow-small-border.png",
"file_url_preview": "https://inkbunny.net/private_files/full/1445/1445274_zzreg_sname-yellow-small-border.png",
"submission_type_id": "1",
"type_name": "Picture/Pinup",
"friends_only": "f",
"guest_block": "f",
"scraps": "f"
}
]
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,90 @@
{
"sid": "n958z4aO2Iy7WP4bj8Zi8pU-Pm",
"results_count": 1,
"user_location": "",
"submissions": [
{
"submission_id": "1047334",
"keywords": [],
"hidden": "f",
"scraps": "f",
"favorite": "f",
"favorites_count": "0",
"create_datetime": "2016-03-13 22:18:52.32319+01",
"create_datetime_usertime": "13 Mar 2016 22:18 CET",
"last_file_update_datetime": "2023-09-14 19:07:45.735562+02",
"last_file_update_datetime_usertime": "14 Sep 2023 19:07 CEST",
"username": "zzreg",
"user_id": "110036",
"user_icon_file_name": null,
"user_icon_url_large": null,
"user_icon_url_medium": null,
"user_icon_url_small": null,
"file_name": "4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped.jpg",
"file_url_full": "https://inkbunny.net/private_files/full/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped.jpg",
"file_url_screen": "https://inkbunny.net/private_files/screen/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped.jpg",
"file_url_preview": "https://inkbunny.net/private_files/preview/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped.jpg",
"thumbnail_url_huge_noncustom": "https://inkbunny.net/private_files/preview/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped.jpg",
"thumbnail_url_large_noncustom": "https://inkbunny.net/private_thumbnails/large/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped_noncustom.jpg",
"thumbnail_url_medium_noncustom": "https://inkbunny.net/private_thumbnails/medium/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped_noncustom.jpg",
"thumb_medium_noncustom_x": "120",
"thumb_medium_noncustom_y": "90",
"thumb_large_noncustom_x": "200",
"thumb_large_noncustom_y": "150",
"thumb_huge_noncustom_x": "300",
"thumb_huge_noncustom_y": "225",
"files": [
{
"file_id": "4680214",
"file_name": "4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped.jpg",
"file_url_full": "https://inkbunny.net/private_files/full/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped.jpg",
"file_url_screen": "https://inkbunny.net/private_files/screen/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped.jpg",
"file_url_preview": "https://inkbunny.net/private_files/preview/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped.jpg",
"mimetype": "image/jpeg",
"submission_id": "1047334",
"user_id": "110036",
"submission_file_order": "0",
"full_size_x": "1200",
"full_size_y": "900",
"screen_size_x": "920",
"screen_size_y": "690",
"preview_size_x": "300",
"preview_size_y": "225",
"initial_file_md5": "9fbfbdf3cc6d8b3538b7edbfe36bde8c",
"full_file_md5": "d2e30d953f4785e22c3d9c722249a974",
"large_file_md5": "ec7904b8f3ed2421934950d30048aca2",
"small_file_md5": "96a29ea29298cdaf1995f4363e00abc9",
"thumbnail_md5": "426c341368c5170194a40657ff42a865",
"deleted": "f",
"create_datetime": "2023-09-14 19:07:45.735562+02",
"create_datetime_usertime": "14 Sep 2023 19:07 CEST",
"thumbnail_url_huge_noncustom": "https://inkbunny.net/private_files/preview/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped.jpg",
"thumbnail_url_large_noncustom": "https://inkbunny.net/private_thumbnails/large/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped_noncustom.jpg",
"thumbnail_url_medium_noncustom": "https://inkbunny.net/private_thumbnails/medium/4680/4680214_zzreg_how-to-photograph-snakes-15-1200x900-cropped_noncustom.jpg",
"thumb_medium_noncustom_x": "120",
"thumb_medium_noncustom_y": "90",
"thumb_large_noncustom_x": "200",
"thumb_large_noncustom_y": "150",
"thumb_huge_noncustom_x": "300",
"thumb_huge_noncustom_y": "225"
}
],
"pools": [],
"pools_count": 0,
"title": "New Submission",
"deleted": "f",
"public": "f",
"mimetype": "image/jpeg",
"pagecount": "1",
"rating_id": "0",
"rating_name": "General",
"ratings": [],
"submission_type_id": "1",
"type_name": "Picture/Pinup",
"guest_block": "f",
"friends_only": "f",
"comments_count": "0",
"views": "0"
}
]
}

View File

@@ -0,0 +1,90 @@
{
"sid": "n958z4aO2Iy7WP4bj8Zi8pU-Pm",
"results_count": 1,
"user_location": "",
"submissions": [
{
"submission_id": "1047334",
"keywords": [],
"hidden": "f",
"scraps": "f",
"favorite": "f",
"favorites_count": "0",
"create_datetime": "2016-03-13 22:18:52.32319+01",
"create_datetime_usertime": "13 Mar 2016 22:18 CET",
"last_file_update_datetime": "2016-03-13 22:18:52.413705+01",
"last_file_update_datetime_usertime": "13 Mar 2016 22:18 CET",
"username": "zzreg",
"user_id": "110036",
"user_icon_file_name": null,
"user_icon_url_large": null,
"user_icon_url_medium": null,
"user_icon_url_small": null,
"file_name": "1445274_zzreg_sname-yellow-small-border.png",
"file_url_full": "https://inkbunny.net/private_files/full/1445/1445274_zzreg_sname-yellow-small-border.png",
"file_url_screen": "https://inkbunny.net/private_files/screen/1445/1445274_zzreg_sname-yellow-small-border.png",
"file_url_preview": "https://inkbunny.net/private_files/preview/1445/1445274_zzreg_sname-yellow-small-border.jpg",
"thumbnail_url_huge_noncustom": "https://inkbunny.net/private_files/preview/1445/1445274_zzreg_sname-yellow-small-border.jpg",
"thumbnail_url_large_noncustom": "https://inkbunny.net/private_thumbnails/large/1445/1445274_zzreg_sname-yellow-small-border_noncustom.jpg",
"thumbnail_url_medium_noncustom": "https://inkbunny.net/private_thumbnails/medium/1445/1445274_zzreg_sname-yellow-small-border_noncustom.jpg",
"thumb_medium_noncustom_x": "100",
"thumb_medium_noncustom_y": "83",
"thumb_large_noncustom_x": "100",
"thumb_large_noncustom_y": "83",
"thumb_huge_noncustom_x": "100",
"thumb_huge_noncustom_y": "83",
"files": [
{
"file_id": "1445274",
"file_name": "1445274_zzreg_sname-yellow-small-border.png",
"file_url_full": "https://inkbunny.net/private_files/full/1445/1445274_zzreg_sname-yellow-small-border.png",
"file_url_screen": "https://inkbunny.net/private_files/screen/1445/1445274_zzreg_sname-yellow-small-border.png",
"file_url_preview": "https://inkbunny.net/private_files/preview/1445/1445274_zzreg_sname-yellow-small-border.jpg",
"mimetype": "image/png",
"submission_id": "1047334",
"user_id": "110036",
"submission_file_order": "0",
"full_size_x": "100",
"full_size_y": "83",
"screen_size_x": "100",
"screen_size_y": "83",
"preview_size_x": "100",
"preview_size_y": "83",
"initial_file_md5": "0127e88651e73140718f3b8f7f2037d5",
"full_file_md5": "aa0e22f86a9c345ead2bd711a1c91986",
"large_file_md5": "dc89f7a6cf6a75c07f7f33ca2c624335",
"small_file_md5": "859121421566fdac28413bbd100a8df0",
"thumbnail_md5": "44abb3745cc1743376d5e6ce31f591c9",
"deleted": "f",
"create_datetime": "2016-03-13 22:18:52.413705+01",
"create_datetime_usertime": "13 Mar 2016 22:18 CET",
"thumbnail_url_huge_noncustom": "https://inkbunny.net/private_files/preview/1445/1445274_zzreg_sname-yellow-small-border.jpg",
"thumbnail_url_large_noncustom": "https://inkbunny.net/private_thumbnails/large/1445/1445274_zzreg_sname-yellow-small-border_noncustom.jpg",
"thumbnail_url_medium_noncustom": "https://inkbunny.net/private_thumbnails/medium/1445/1445274_zzreg_sname-yellow-small-border_noncustom.jpg",
"thumb_medium_noncustom_x": "100",
"thumb_medium_noncustom_y": "83",
"thumb_large_noncustom_x": "100",
"thumb_large_noncustom_y": "83",
"thumb_huge_noncustom_x": "100",
"thumb_huge_noncustom_y": "83"
}
],
"pools": [],
"pools_count": 0,
"title": "New Submission",
"deleted": "f",
"public": "f",
"mimetype": "image/png",
"pagecount": "1",
"rating_id": "0",
"rating_name": "General",
"ratings": [],
"submission_type_id": "1",
"type_name": "Picture/Pinup",
"guest_block": "f",
"friends_only": "f",
"comments_count": "0",
"views": "0"
}
]
}