Enhance Inkbunny job processing and database schema

- Added support for pool-specific API searches in `ApiSearchPageProcessor` by introducing `pool_id` as a parameter.
- Created a new `UpdatePoolJob` class to handle deep updates for pools, including enqueuing jobs for posts needing updates.
- Updated `UpdatePostsJob` to manage missing posts and pools more effectively, ensuring proper job enqueuing.
- Modified the `Domain::Inkbunny::Pool` model to include a reference to `deep_update_log_entry` for tracking updates.
- Updated database schema to add `deep_update_log_entry_id` to the `domain_inkbunny_pools` table.
- Added tests for the new job functionality and ensured correct processing of API responses.

These changes improve the maintainability and robustness of the Inkbunny job processing system.
This commit is contained in:
Dylan Knutson
2025-01-05 20:15:17 +00:00
parent b35d6878dd
commit 258be5583d
13 changed files with 659 additions and 21 deletions

View File

@@ -76,12 +76,14 @@ class Domain::Inkbunny::Job::ApiSearchPageProcessor
#
# If rid is provided, then the page is for a specific RID set, constructed from a previous API search.
# If ib_user_id is provided, then the page is for the user's gallery.
# If pool_id is provided, then the page is for a specific pool.
# If neither is provided, then the page is for the latest posts.
sig do
params(
ib_user_id: T.nilable(Integer),
username: T.nilable(String),
rid: T.nilable(String),
pool_id: T.nilable(Integer),
page: Integer,
).returns(String)
end
@@ -89,6 +91,7 @@ class Domain::Inkbunny::Job::ApiSearchPageProcessor
ib_user_id: nil,
username: nil,
rid: nil,
pool_id: nil,
page: 1
)
base_url = URI.parse("https://inkbunny.net/api_search.php")
@@ -99,6 +102,11 @@ class Domain::Inkbunny::Job::ApiSearchPageProcessor
if rid.present?
query_params["rid"] = rid
elsif pool_id.present?
query_params["get_rid"] = "yes"
query_params["orderby"] = "create_datetime"
query_params["pool_id"] = pool_id
query_params["keywords"] = "no"
elsif ib_user_id.present?
query_params["get_rid"] = "yes"
query_params["orderby"] = "create_datetime"

View File

@@ -0,0 +1,72 @@
# typed: strict
module Domain::Inkbunny::Job
class UpdatePoolJob < Base
sig { override.params(args: T::Hash[Symbol, T.untyped]).void }
def perform(args)
pool = T.let(args[:pool], Domain::Inkbunny::Pool)
logger.prefix = "[pool #{pool.ib_pool_id.to_s.bold}]"
if pool.deep_update_log_entry.present?
logger.info("skipping, already deep updated")
return
end
processor = ApiSearchPageProcessor.new
rid = T.let(nil, T.nilable(String))
page = T.let(1, Integer)
loop_count = T.let(0, Integer)
while true
loop_count += 1
if loop_count > ApiSearchPageProcessor::MAX_LOOP_COUNT
raise("loop_count: #{loop_count}")
end
url =
ApiSearchPageProcessor.build_api_search_url(
pool_id: pool.ib_pool_id,
rid: rid,
page: page,
)
response = http_client.post(url, caused_by_entry: causing_log_entry)
log_entry = response.log_entry
self.first_log_entry ||= log_entry
pool.deep_update_log_entry = causing_log_entry
if response.status_code != 200
fatal_error("api_search failed: #{response.status_code}")
end
result =
processor.process!(
JSON.parse(response.body),
caused_by_entry: log_entry,
)
logger.info(
[
"[rid: #{rid}]",
"[page #{page}]",
"[total changed posts: #{result[:num_total_changed_posts]}]",
"[total posts: #{result[:num_total_posts]}]",
"[total users: #{result[:num_total_users]}]",
].join(" "),
)
rid ||= T.cast(result[:rid], String)
break if result[:num_pages] == page
page += 1
end
pool.save!
posts_to_update = processor.all_posts.reject(&:deep_updated_at)
if posts_to_update.any?
defer_job(
Domain::Inkbunny::Job::UpdatePostsJob,
{ ib_post_ids: posts_to_update.map(&:ib_post_id) },
)
end
end
end
end

View File

@@ -4,13 +4,36 @@ class Domain::Inkbunny::Job::UpdatePostsJob < Domain::Inkbunny::Job::Base
def perform(args)
ib_post_ids = args[:ib_post_ids]
missing_pool_post_ib_ids = T::Set[Integer].new
pools_to_update = T::Set[Domain::Inkbunny::Pool].new
if ib_post_ids.empty?
logger.info "empty ib_post_ids"
return
end
ib_post_ids.each_slice(100) do |ib_post_ids_chunk|
process_ib_post_ids(ib_post_ids_chunk)
process_ib_post_ids(
ib_post_ids_chunk,
missing_pool_post_ib_ids,
pools_to_update,
)
end
# Enqueue update jobs for missing posts
unless missing_pool_post_ib_ids.empty?
missing_pool_post_ib_ids.each_slice(1000) do |ib_post_ids_chunk|
logger.info "enqueuing update jobs for missing posts: #{ib_post_ids_chunk.join(", ")}"
defer_job(
Domain::Inkbunny::Job::UpdatePostsJob,
{ ib_post_ids: ib_post_ids_chunk },
)
end
end
# Enqueue update jobs for pools
pools_to_update.each do |pool|
defer_job(Domain::Inkbunny::Job::UpdatePoolJob, { pool: pool })
end
end
@@ -22,9 +45,19 @@ class Domain::Inkbunny::Job::UpdatePostsJob < Domain::Inkbunny::Job::Base
"&show_description=yes&show_writing=yes&show_pools=yes"
end
sig { params(ib_post_ids: T::Array[Integer]).void }
def process_ib_post_ids(ib_post_ids)
url = build_api_submissions_url(ib_post_ids)
sig do
params(
ib_post_ids_chunk: T::Array[Integer],
missing_pool_post_ib_ids: T::Set[Integer],
pools_to_update: T::Set[Domain::Inkbunny::Pool],
).void
end
def process_ib_post_ids(
ib_post_ids_chunk,
missing_pool_post_ib_ids,
pools_to_update
)
url = build_api_submissions_url(ib_post_ids_chunk)
response = http_client.get(url, caused_by_entry: causing_log_entry)
log_entry = response.log_entry
self.first_log_entry ||= log_entry
@@ -35,26 +68,17 @@ class Domain::Inkbunny::Job::UpdatePostsJob < Domain::Inkbunny::Job::Base
submission_jsons = api_submissions_json["submissions"]
logger.info("api_submissions page has #{submission_jsons.size} posts")
missing_pool_post_ib_ids = T::Set[Integer].new
submission_jsons.each do |submission_json|
Domain::Inkbunny::Post.transaction do
deep_update_post_from_submission_json(
submission_json,
log_entry,
missing_pool_post_ib_ids,
pools_to_update,
)
end
end
# Enqueue update jobs for missing posts
unless missing_pool_post_ib_ids.empty?
logger.info "enqueuing update jobs for missing posts: #{missing_pool_post_ib_ids.to_a.join(", ")}"
defer_job(
Domain::Inkbunny::Job::UpdatePostsJob,
{ ib_post_ids: missing_pool_post_ib_ids.to_a },
)
end
logger.prefix = ""
end
@@ -63,12 +87,14 @@ class Domain::Inkbunny::Job::UpdatePostsJob < Domain::Inkbunny::Job::Base
submission_json: T::Hash[String, T.untyped],
log_entry: HttpLogEntry,
missing_pool_post_ib_ids: T::Set[Integer],
pools_to_update: T::Set[Domain::Inkbunny::Pool],
).void
end
def deep_update_post_from_submission_json(
submission_json,
log_entry,
missing_pool_post_ib_ids
missing_pool_post_ib_ids,
pools_to_update
)
logger.prefix = "ib_post_id #{submission_json["submission_id"].to_s.bold}"
@@ -93,7 +119,12 @@ class Domain::Inkbunny::Job::UpdatePostsJob < Domain::Inkbunny::Job::Base
post.deep_update_log_entry = log_entry
if pools_json = submission_json["pools"]
update_submission_pools(post, pools_json, missing_pool_post_ib_ids)
update_submission_pools(
post,
pools_json,
missing_pool_post_ib_ids,
pools_to_update,
)
end
if submission_json["user_icon_url_large"]
@@ -164,9 +195,15 @@ class Domain::Inkbunny::Job::UpdatePostsJob < Domain::Inkbunny::Job::Base
post: Domain::Inkbunny::Post,
pools_json: T::Array[T::Hash[String, T.untyped]],
missing_pool_post_ib_ids: T::Set[Integer],
pools_to_update: T::Set[Domain::Inkbunny::Pool],
).void
end
def update_submission_pools(post, pools_json, missing_pool_post_ib_ids)
def update_submission_pools(
post,
pools_json,
missing_pool_post_ib_ids,
pools_to_update
)
pools_json.each do |pool_json|
left_post, right_post =
%w[
@@ -193,6 +230,7 @@ class Domain::Inkbunny::Job::UpdatePostsJob < Domain::Inkbunny::Job::Base
Domain::Inkbunny::Pool.find_or_initialize_by(
ib_pool_id: pool_json["pool_id"],
)
pools_to_update.add(pool) if pool.deep_update_log_entry_id.blank?
pool.count = pool_json["count"]&.to_i
pool.name = pool_json["name"]
pool.description = pool_json["description"]

View File

@@ -4,6 +4,9 @@ class Domain::Inkbunny::Pool < ReduxApplicationRecord
has_many :pool_joins, class_name: "::Domain::Inkbunny::PoolJoin"
has_many :posts, through: :pool_joins, source: :post
belongs_to :deep_update_log_entry,
class_name: "::HttpLogEntry",
optional: true
validates :ib_pool_id, presence: true, uniqueness: true
end

View File

@@ -0,0 +1,10 @@
class AddDeepUpdateFieldsToIbPools < ActiveRecord::Migration[7.2]
def change
change_table :domain_inkbunny_pools do |t|
t.references :deep_update_log_entry,
foreign_key: {
to_table: :http_log_entries,
}
end
end
end

5
db/schema.rb generated
View File

@@ -10,7 +10,7 @@
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema[7.2].define(version: 2025_01_04_211454) do
ActiveRecord::Schema[7.2].define(version: 2025_01_05_193442) do
# These are extensions that must be enabled in order to support this database
enable_extension "pg_prewarm"
enable_extension "pg_stat_statements"
@@ -1508,6 +1508,8 @@ ActiveRecord::Schema[7.2].define(version: 2025_01_04_211454) do
t.datetime "updated_at", null: false
t.bigint "ib_pool_id"
t.integer "count"
t.bigint "deep_update_log_entry_id"
t.index ["deep_update_log_entry_id"], name: "index_domain_inkbunny_pools_on_deep_update_log_entry_id"
t.index ["ib_pool_id"], name: "index_domain_inkbunny_pools_on_ib_pool_id", unique: true
end
@@ -1848,6 +1850,7 @@ ActiveRecord::Schema[7.2].define(version: 2025_01_04_211454) do
add_foreign_key "domain_inkbunny_files", "http_log_entries", column: "log_entry_id"
add_foreign_key "domain_inkbunny_follows", "domain_inkbunny_users", column: "followed_id"
add_foreign_key "domain_inkbunny_follows", "domain_inkbunny_users", column: "follower_id"
add_foreign_key "domain_inkbunny_pools", "http_log_entries", column: "deep_update_log_entry_id"
add_foreign_key "domain_inkbunny_posts", "domain_inkbunny_users", column: "creator_id"
add_foreign_key "domain_inkbunny_posts", "http_log_entries", column: "deep_update_log_entry_id"
add_foreign_key "domain_inkbunny_posts", "http_log_entries", column: "shallow_update_log_entry_id"

View File

@@ -0,0 +1,27 @@
# typed: true
# DO NOT EDIT MANUALLY
# This is an autogenerated file for dynamic methods in `Domain::Inkbunny::Job::UpdatePoolJob`.
# Please instead update this file by running `bin/tapioca dsl Domain::Inkbunny::Job::UpdatePoolJob`.
class Domain::Inkbunny::Job::UpdatePoolJob
sig { returns(ColorLogger) }
def logger; end
class << self
sig { returns(ColorLogger) }
def logger; end
sig do
params(
args: T::Hash[::Symbol, T.untyped],
block: T.nilable(T.proc.params(job: Domain::Inkbunny::Job::UpdatePoolJob).void)
).returns(T.any(Domain::Inkbunny::Job::UpdatePoolJob, FalseClass))
end
def perform_later(args, &block); end
sig { params(args: T::Hash[::Symbol, T.untyped]).void }
def perform_now(args); end
end
end

View File

@@ -416,6 +416,27 @@ class Domain::Inkbunny::Pool
end
module GeneratedAssociationMethods
sig { params(args: T.untyped, blk: T.untyped).returns(::HttpLogEntry) }
def build_deep_update_log_entry(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(::HttpLogEntry) }
def create_deep_update_log_entry(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(::HttpLogEntry) }
def create_deep_update_log_entry!(*args, &blk); end
sig { returns(T.nilable(::HttpLogEntry)) }
def deep_update_log_entry; end
sig { params(value: T.nilable(::HttpLogEntry)).void }
def deep_update_log_entry=(value); end
sig { returns(T::Boolean) }
def deep_update_log_entry_changed?; end
sig { returns(T::Boolean) }
def deep_update_log_entry_previously_changed?; end
sig { returns(T::Array[T.untyped]) }
def pool_join_ids; end
@@ -443,6 +464,12 @@ class Domain::Inkbunny::Pool
sig { params(value: T::Enumerable[::Domain::Inkbunny::Post]).void }
def posts=(value); end
sig { returns(T.nilable(::HttpLogEntry)) }
def reload_deep_update_log_entry; end
sig { void }
def reset_deep_update_log_entry; end
end
module GeneratedAssociationRelationMethods
@@ -690,6 +717,51 @@ class Domain::Inkbunny::Pool
sig { void }
def created_at_will_change!; end
sig { returns(T.nilable(::Integer)) }
def deep_update_log_entry_id; end
sig { params(value: T.nilable(::Integer)).returns(T.nilable(::Integer)) }
def deep_update_log_entry_id=(value); end
sig { returns(T::Boolean) }
def deep_update_log_entry_id?; end
sig { returns(T.nilable(::Integer)) }
def deep_update_log_entry_id_before_last_save; end
sig { returns(T.untyped) }
def deep_update_log_entry_id_before_type_cast; end
sig { returns(T::Boolean) }
def deep_update_log_entry_id_came_from_user?; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def deep_update_log_entry_id_change; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def deep_update_log_entry_id_change_to_be_saved; end
sig { params(from: T.nilable(::Integer), to: T.nilable(::Integer)).returns(T::Boolean) }
def deep_update_log_entry_id_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::Integer)) }
def deep_update_log_entry_id_in_database; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def deep_update_log_entry_id_previous_change; end
sig { params(from: T.nilable(::Integer), to: T.nilable(::Integer)).returns(T::Boolean) }
def deep_update_log_entry_id_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::Integer)) }
def deep_update_log_entry_id_previously_was; end
sig { returns(T.nilable(::Integer)) }
def deep_update_log_entry_id_was; end
sig { void }
def deep_update_log_entry_id_will_change!; end
sig { returns(T.nilable(::String)) }
def description; end
@@ -921,6 +993,9 @@ class Domain::Inkbunny::Pool
sig { void }
def restore_created_at!; end
sig { void }
def restore_deep_update_log_entry_id!; end
sig { void }
def restore_description!; end
@@ -951,6 +1026,12 @@ class Domain::Inkbunny::Pool
sig { returns(T::Boolean) }
def saved_change_to_created_at?; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def saved_change_to_deep_update_log_entry_id; end
sig { returns(T::Boolean) }
def saved_change_to_deep_update_log_entry_id?; end
sig { returns(T.nilable([T.nilable(::String), T.nilable(::String)])) }
def saved_change_to_description; end
@@ -1048,6 +1129,9 @@ class Domain::Inkbunny::Pool
sig { returns(T::Boolean) }
def will_save_change_to_created_at?; end
sig { returns(T::Boolean) }
def will_save_change_to_deep_update_log_entry_id?; end
sig { returns(T::Boolean) }
def will_save_change_to_description?; end

View File

@@ -39,4 +39,10 @@ FactoryBot.define do
file.blob_entry_sha256 = Digest::SHA256.digest(SecureRandom.hex(32))
end
end
factory :domain_inkbunny_pool, class: "Domain::Inkbunny::Pool" do
sequence(:ib_pool_id) { |n| n }
sequence(:name) { |n| "Pool #{n}" }
sequence(:description) { |n| "Description #{n}" }
end
end

View File

@@ -0,0 +1,103 @@
# typed: false
require "rails_helper"
RSpec.describe Domain::Inkbunny::Job::UpdatePoolJob do
let(:http_client_mock) { instance_double("::Scraper::HttpClient") }
before { Scraper::ClientFactory.http_client_mock = http_client_mock }
let(:log_entries) { [] }
describe "#perform" do
it "processes all posts in a pool and enqueues update jobs for posts needing updates" do
pool = create(:domain_inkbunny_pool, ib_pool_id: 83_746)
first_page_json =
JSON.parse(
SpecUtil.read_fixture_file(
"domain/inkbunny/job/api_search_pool_83746_page_1.json",
),
)
log_entries =
HttpClientMockHelpers.init_http_client_mock(
http_client_mock,
[
{
method: :post,
uri:
"https://inkbunny.net/api_search.php?submissions_per_page=100&page=1&get_rid=yes&orderby=create_datetime&pool_id=83746&keywords=no",
content_type: "application/json",
contents:
SpecUtil.read_fixture_file(
"domain/inkbunny/job/api_search_pool_83746_page_1.json",
),
options: {
caused_by_entry: nil,
use_http_cache: nil,
},
},
{
method: :post,
uri:
"https://inkbunny.net/api_search.php?submissions_per_page=100&page=2&rid=#{first_page_json["rid"]}",
content_type: "application/json",
contents:
SpecUtil.read_fixture_file(
"domain/inkbunny/job/api_search_pool_83746_page_2.json",
),
caused_by_entry_idx: 0,
},
],
)
expect { perform_now({ pool: pool }) }.to change {
Domain::Inkbunny::Post.count
}.by(4)
# Verify posts were created correctly
post_3334290 = Domain::Inkbunny::Post.find_by!(ib_post_id: 3_334_290)
expect(post_3334290.title).to eq("Phantom Touch: Follow-Up")
expect(post_3334290.posted_at).to eq(
Time.parse("2024-05-28 00:57:36.597545+00"),
)
expect(post_3334290.last_file_updated_at).to eq(
Time.parse("2024-05-28 00:55:24.811748+00"),
)
expect(post_3334290.num_files).to eq(1)
expect(post_3334290.rating).to eq("general")
expect(post_3334290.submission_type).to eq("picture_pinup")
expect(post_3334290.shallow_updated_at).to be_within(1.second).of(
Time.now,
)
expect(post_3334290.shallow_update_log_entry).to eq(log_entries[0])
expect(post_3334290.deep_updated_at).to be_nil
# Verify user was created
user_thendyart = Domain::Inkbunny::User.find_by!(ib_user_id: 941_565)
expect(user_thendyart.name).to eq("ThendyArt")
# Verify UpdatePostsJob was enqueued for posts without deep_updated_at
expect(SpecUtil.enqueued_job_args).to match(
[
hash_including(
ib_post_ids:
array_including(3_334_290, 3_333_345, 3_333_338, 3_327_400),
caused_by_entry: log_entries[0],
),
],
)
expect(pool.deep_update_log_entry).to eq(log_entries[0])
end
it "skips if already deep updated" do
pool = create(:domain_inkbunny_pool, ib_pool_id: 83_746)
log_entry = create(:http_log_entry)
pool.deep_update_log_entry = log_entry
pool.save!
expect { perform_now({ pool: pool }) }.not_to change {
Domain::Inkbunny::Post.count
}
expect(pool.deep_update_log_entry).to eq(log_entry)
end
end
end

View File

@@ -79,6 +79,49 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
end
let(:ib_post_ids) { [3_104_202, 3_104_200, 3_104_197] }
let(:pool_ib_post_ids) { [3_082_162, 3_095_240, 3_101_532, 3_098_688] }
it "enqueues update jobs for missing posts" do
perform_now({ ib_post_ids: ib_post_ids, caused_by_entry: nil })
expect(
SpecUtil.enqueued_jobs(Domain::Inkbunny::Job::UpdatePostsJob).length,
).to eq(1)
expect(
SpecUtil.enqueued_job_args(Domain::Inkbunny::Job::UpdatePostsJob),
).to match(
[{ ib_post_ids: pool_ib_post_ids, caused_by_entry: log_entries[0] }],
)
end
it "enqueues update jobs for missing pools" do
perform_now({ ib_post_ids: ib_post_ids, caused_by_entry: nil })
expect(
SpecUtil.enqueued_jobs(Domain::Inkbunny::Job::UpdatePoolJob).length,
).to eq(4)
expect(
SpecUtil.enqueued_job_args(Domain::Inkbunny::Job::UpdatePoolJob),
).to match(
array_including(
hash_including(
pool: Domain::Inkbunny::Pool.find_by(ib_pool_id: 71_061),
caused_by_entry: log_entries[0],
),
hash_including(
pool: Domain::Inkbunny::Pool.find_by(ib_pool_id: 35_628),
caused_by_entry: log_entries[0],
),
hash_including(
pool: Domain::Inkbunny::Pool.find_by(ib_pool_id: 35_045),
caused_by_entry: log_entries[0],
),
hash_including(
pool: Domain::Inkbunny::Pool.find_by(ib_pool_id: 83_746),
caused_by_entry: log_entries[0],
),
),
)
end
it "updates posts with detailed information" do
perform_now({ ib_post_ids: ib_post_ids, caused_by_entry: nil })
@@ -213,9 +256,7 @@ describe Domain::Inkbunny::Job::UpdatePostsJob do
SpecUtil.enqueued_jobs(Domain::Inkbunny::Job::UpdatePostsJob)
update_job_post_ids =
update_jobs.map { |job| job[:args][0][:ib_post_ids] }.flatten.sort
expect(update_job_post_ids).to match_array(
[3_082_162, 3_095_240, 3_101_532, 3_098_688],
)
expect(update_job_post_ids).to match_array(pool_ib_post_ids)
update_jobs.each do |job|
expect(job[:args][0][:caused_by_entry]).to eq(log_entries[0])

View File

@@ -0,0 +1,83 @@
{
"sid": "9B5BPQqTZ,Mj-nYbL2tuIPoO,b",
"results_count_all": "67",
"results_count_thispage": 2,
"pages_count": 2,
"page": 1,
"user_location": "",
"rid": "2a21f7d4bc",
"rid_ttl": "15 minutes",
"search_params": [],
"submissions": [
{
"submission_id": "3334290",
"hidden": "f",
"username": "ThendyArt",
"user_id": "941565",
"create_datetime": "2024-05-28 00:57:36.597545+00",
"create_datetime_usertime": "28 May 2024 02:57 CEST",
"last_file_update_datetime": "2024-05-28 00:55:24.811748+00",
"last_file_update_datetime_usertime": "28 May 2024 02:55 CEST",
"thumbnail_url_huge_noncustom": "https:\/\/tx.ib.metapix.net\/files\/preview\/5052\/5052995_ThendyArt_img_8512.jpg",
"thumbnail_url_large_noncustom": "https:\/\/tx.ib.metapix.net\/thumbnails\/large\/5052\/5052995_ThendyArt_img_8512_noncustom.jpg",
"thumbnail_url_medium_noncustom": "https:\/\/tx.ib.metapix.net\/thumbnails\/medium\/5052\/5052995_ThendyArt_img_8512_noncustom.jpg",
"thumb_medium_noncustom_x": "93",
"thumb_medium_noncustom_y": "120",
"thumb_large_noncustom_x": "155",
"thumb_large_noncustom_y": "200",
"thumb_huge_noncustom_x": "232",
"thumb_huge_noncustom_y": "300",
"file_name": "5052995_ThendyArt_img_8512.jpg",
"title": "Phantom Touch: Follow-Up",
"deleted": "f",
"public": "t",
"mimetype": "image\/jpeg",
"pagecount": "1",
"rating_id": "0",
"rating_name": "General",
"file_url_full": "https:\/\/tx.ib.metapix.net\/files\/full\/5052\/5052995_ThendyArt_img_8512.jpg",
"file_url_screen": "https:\/\/tx.ib.metapix.net\/files\/screen\/5052\/5052995_ThendyArt_img_8512.jpg",
"file_url_preview": "https:\/\/tx.ib.metapix.net\/files\/preview\/5052\/5052995_ThendyArt_img_8512.jpg",
"submission_type_id": "1",
"type_name": "Picture\/Pinup",
"friends_only": "f",
"guest_block": "f",
"scraps": "f"
},
{
"submission_id": "3333345",
"hidden": "f",
"username": "ThendyArt",
"user_id": "941565",
"create_datetime": "2024-05-26 23:31:29.449436+00",
"create_datetime_usertime": "27 May 2024 01:31 CEST",
"last_file_update_datetime": "2024-05-26 23:30:55.983099+00",
"last_file_update_datetime_usertime": "27 May 2024 01:30 CEST",
"thumbnail_url_huge_noncustom": "https:\/\/tx.ib.metapix.net\/files\/preview\/5051\/5051517_ThendyArt_untitled_artwork.jpg",
"thumbnail_url_large_noncustom": "https:\/\/tx.ib.metapix.net\/thumbnails\/large\/5051\/5051517_ThendyArt_untitled_artwork_noncustom.jpg",
"thumbnail_url_medium_noncustom": "https:\/\/tx.ib.metapix.net\/thumbnails\/medium\/5051\/5051517_ThendyArt_untitled_artwork_noncustom.jpg",
"thumb_medium_noncustom_x": "93",
"thumb_medium_noncustom_y": "120",
"thumb_large_noncustom_x": "155",
"thumb_large_noncustom_y": "200",
"thumb_huge_noncustom_x": "232",
"thumb_huge_noncustom_y": "300",
"file_name": "5051517_ThendyArt_untitled_artwork.jpg",
"title": "Phantom Touch - The End!!",
"deleted": "f",
"public": "t",
"mimetype": "image\/jpeg",
"pagecount": "1",
"rating_id": "2",
"rating_name": "Adult",
"file_url_full": "https:\/\/tx.ib.metapix.net\/files\/full\/5051\/5051517_ThendyArt_untitled_artwork.jpg",
"file_url_screen": "https:\/\/tx.ib.metapix.net\/files\/screen\/5051\/5051517_ThendyArt_untitled_artwork.jpg",
"file_url_preview": "https:\/\/tx.ib.metapix.net\/files\/preview\/5051\/5051517_ThendyArt_untitled_artwork.jpg",
"submission_type_id": "1",
"type_name": "Picture\/Pinup",
"friends_only": "f",
"guest_block": "f",
"scraps": "f"
}
]
}

View File

@@ -0,0 +1,160 @@
{
"sid": "9B5BPQqTZ,Mj-nYbL2tuIPoO,b",
"results_count_all": "67",
"results_count_thispage": 2,
"pages_count": 2,
"page": 2,
"user_location": "",
"rid": "2a21f7d4bc",
"rid_ttl": "15 minutes",
"search_params": [
{
"param_name": "field_join_type",
"param_value": "or"
},
{
"param_name": "text",
"param_value": ""
},
{
"param_name": "string_join_type",
"param_value": "and"
},
{
"param_name": "keywords",
"param_value": "yes"
},
{
"param_name": "keyword_id",
"param_value": false
},
{
"param_name": "title",
"param_value": "no"
},
{
"param_name": "description",
"param_value": "no"
},
{
"param_name": "md5",
"param_value": "no"
},
{
"param_name": "username",
"param_value": ""
},
{
"param_name": "user_id",
"param_value": ""
},
{
"param_name": "favs_user_id",
"param_value": ""
},
{
"param_name": "unread_submissions",
"param_value": "no"
},
{
"param_name": "type",
"param_value": ""
},
{
"param_name": "pool_id",
"param_value": "83746"
},
{
"param_name": "orderby",
"param_value": "create_datetime"
},
{
"param_name": "dayslimit",
"param_value": ""
},
{
"param_name": "random",
"param_value": "no"
},
{
"param_name": "scraps",
"param_value": "both"
},
{
"param_name": "count_limit",
"param_value": 18000
}
],
"submissions": [
{
"submission_id": "3333338",
"hidden": "f",
"username": "ThendyArt",
"user_id": "941565",
"create_datetime": "2024-05-26 23:21:31.359589+00",
"create_datetime_usertime": "27 May 2024 01:21 CEST",
"last_file_update_datetime": "2024-05-26 23:20:26.520089+00",
"last_file_update_datetime_usertime": "27 May 2024 01:20 CEST",
"thumbnail_url_huge_noncustom": "https://tx.ib.metapix.net/files/preview/5051/5051507_ThendyArt_untitled_artwork_1.jpg",
"thumbnail_url_large_noncustom": "https://tx.ib.metapix.net/thumbnails/large/5051/5051507_ThendyArt_untitled_artwork_1_noncustom.jpg",
"thumbnail_url_medium_noncustom": "https://tx.ib.metapix.net/thumbnails/medium/5051/5051507_ThendyArt_untitled_artwork_1_noncustom.jpg",
"thumb_medium_noncustom_x": "93",
"thumb_medium_noncustom_y": "120",
"thumb_large_noncustom_x": "155",
"thumb_large_noncustom_y": "200",
"thumb_huge_noncustom_x": "232",
"thumb_huge_noncustom_y": "300",
"file_name": "5051507_ThendyArt_untitled_artwork_1.jpg",
"title": "Phantom Touch - Page 64",
"deleted": "f",
"public": "t",
"mimetype": "image/jpeg",
"pagecount": "1",
"rating_id": "2",
"rating_name": "Adult",
"file_url_full": "https://tx.ib.metapix.net/files/full/5051/5051507_ThendyArt_untitled_artwork_1.jpg",
"file_url_screen": "https://tx.ib.metapix.net/files/screen/5051/5051507_ThendyArt_untitled_artwork_1.jpg",
"file_url_preview": "https://tx.ib.metapix.net/files/preview/5051/5051507_ThendyArt_untitled_artwork_1.jpg",
"submission_type_id": "1",
"type_name": "Picture/Pinup",
"friends_only": "f",
"guest_block": "f",
"scraps": "f"
},
{
"submission_id": "3327400",
"hidden": "f",
"username": "ThendyArt",
"user_id": "941565",
"create_datetime": "2024-05-19 22:19:26.620567+00",
"create_datetime_usertime": "20 May 2024 00:19 CEST",
"last_file_update_datetime": "2024-05-19 15:52:53.158883+00",
"last_file_update_datetime_usertime": "19 May 2024 17:52 CEST",
"thumbnail_url_huge_noncustom": "https://tx.ib.metapix.net/files/preview/5041/5041890_ThendyArt_img_8437.jpg",
"thumbnail_url_large_noncustom": "https://tx.ib.metapix.net/thumbnails/large/5041/5041890_ThendyArt_img_8437_noncustom.jpg",
"thumbnail_url_medium_noncustom": "https://tx.ib.metapix.net/thumbnails/medium/5041/5041890_ThendyArt_img_8437_noncustom.jpg",
"thumb_medium_noncustom_x": "93",
"thumb_medium_noncustom_y": "120",
"thumb_large_noncustom_x": "155",
"thumb_large_noncustom_y": "200",
"thumb_huge_noncustom_x": "232",
"thumb_huge_noncustom_y": "300",
"file_name": "5041890_ThendyArt_img_8437.jpg",
"title": "Phantom Touch - Page 63",
"deleted": "f",
"public": "t",
"mimetype": "image/jpeg",
"pagecount": "1",
"rating_id": "2",
"rating_name": "Adult",
"file_url_full": "https://tx.ib.metapix.net/files/full/5041/5041890_ThendyArt_img_8437.jpg",
"file_url_screen": "https://tx.ib.metapix.net/files/screen/5041/5041890_ThendyArt_img_8437.jpg",
"file_url_preview": "https://tx.ib.metapix.net/files/preview/5041/5041890_ThendyArt_img_8437.jpg",
"submission_type_id": "1",
"type_name": "Picture/Pinup",
"friends_only": "f",
"guest_block": "f",
"scraps": "f"
}
]
}