add e621 posts job

This commit is contained in:
Dylan Knutson
2023-08-22 15:09:39 -07:00
parent bab5ff1e34
commit 3ccc08d0c7
17 changed files with 931 additions and 17 deletions

View File

@@ -48,6 +48,7 @@ task :periodic_tasks => [:environment, :set_logger_stdout] do
loop do
Rake::Task["fa:browse_page_job"].execute
Rake::Task["fa:home_page_job"].execute
# Rake::Task["e621:posts_index_job"].execute
puts "emitted browse page and home page job"
sleep 1.minute
end

View File

@@ -0,0 +1,7 @@
class Domain::E621::Job::Base < Scraper::JobBase
discard_on ActiveJob::DeserializationError
def self.http_factory_method
:get_e621_http_client
end
end

View File

@@ -0,0 +1,90 @@
module Domain::E621::Job
class PostsIndexJob < Base
queue_as :e621
ignore_signature_args :caused_by_entry
def perform(args)
@caused_by_entry = args[:caused_by_entry]
response = http_client.get(
"https://e621.net/posts.json",
caused_by_entry: @caused_by_entry,
)
if response.status_code != 200
fatal_error("non 200 response for /posts.json: #{response.status_code.to_s.underline}")
end
@log_entry = response.log_entry
json = JSON.parse(response.body)
if json["posts"].nil?
fatal_error("no posts in response: HLE #{@log_entry.id}}")
end
e621_id_to_post = Domain::E621::Post.where(
e621_id: json["posts"].map { |post_json| post_json["id"] },
).index_by(&:e621_id)
@num_updated = 0
@num_created = 0
json["posts"].each do |post_json|
e621_id = post_json["id"]
post = e621_id_to_post[e621_id] || begin
@num_created += 1
Domain::E621::Post.new({ e621_id: e621_id })
end
logger.prefix = proc { "[e621_id #{post.e621_id.to_s.bold}]" }
if update_post!(post, post_json)
@num_updated += 1
end
end
logger.prefix = nil
logger.info("#{@num_updated} updated, #{@num_created} created")
end
private
def update_post!(post, post_json)
e621_updated_at = post_json["updated_at"]
return false if post.state_detail["e621_updated_at"] == e621_updated_at
post.state_detail["e621_updated_at"] = post_json["updated_at"]
post.state_detail["index_page_ids"] ||= []
post.state_detail["index_page_ids"] << @log_entry.id
post.state_detail["last_index_page_id"] = @log_entry.id
e621_md5 = post_json["file"]["md5"]
if post.md5 && post.md5 != e621_md5
fatal_error("md5 mismatch for post")
else
post.md5 = e621_md5
end
post.file_url_str ||= post_json["file"]["url"]
post.description = post_json["description"]
post.rating = post_json["rating"]
post.score = post_json["score"]["total"]
post.score_up = post_json["score"]["up"]
post.score_down = post_json["score"]["down"]
post.num_favorites = post_json["fav_count"]
post.num_comments = post_json["comment_count"]
post.change_seq = post_json["change_seq"]
post.parent_e621_id = post_json["relationships"]["parent_id"]
post.flags_array = post_json["flags"].to_a.select(&:second).map(&:first)
post.pools_array = post_json["pools"]
post.sources_array = post_json["sources"]
post.tags_array = post_json["tags"]
post.artists_array = post_json["tags"]["artist"]
is_new = post.new_record?
post.save!
defer_job(Domain::E621::Job::StaticFileJob, {
post: post,
caused_by_entry: @log_entry,
}) if is_new
true
end
end
end

View File

@@ -0,0 +1,9 @@
module Domain::E621::Job
class ScanPostJob < Base
queue_as :e621
ignore_signature_args :caused_by_entry
def perform(post:, caused_by_entry:)
end
end
end

View File

@@ -0,0 +1,61 @@
module Domain::E621::Job
class StaticFileJob < Base
queue_as :e621
ignore_signature_args :caused_by_entry
def perform(args)
post = args[:post] || fatal_error("post is required")
caused_by_entry = args[:caused_by_entry]
logger.prefix = proc { "[e621_id #{post.e621_id.to_s.bold}]" }
file_url_str = post.file_url_str
if file_url_str.blank?
logger.warn("post has no file_url_str, enqueueing for scan")
defer_job(Domain::E621::Job::ScanPostJob, {
post: post,
caused_by_entry: caused_by_entry,
})
return
end
if post.state == "file_error"
retry_count = post.state_detail&.
[]("file_error")&.
[]("retry_count") || 0
if retry_count >= 3
logger.error("file has been retried 3 times, giving up")
return
end
end
response = http_client.get(
file_url_str,
caused_by_entry: caused_by_entry,
)
if response.status_code != 200
post.state = :file_error
fe = (post.state_detail["file_error"] ||= {})
fe["status_code"] = response.status_code
fe["log_entry_id"] = response.log_entry.id
fe["retry_count"] ||= 0
fe["retry_count"] += 1
post.save!
if response.status_code == 404
logger.error("#{response.status_code}, not retrying download")
else
fatal_error("#{response.status_code}, will retry later")
end
return
end
post.state = :ok
post.file = response.log_entry
post.save!
logger.info "downloaded file"
end
end
end

View File

@@ -63,21 +63,36 @@ class ColorLogger
def self.klass_name_from_instance(instance)
klass_name = instance.class.name.dup
klass_name.delete_prefix!("Domain::")
if klass_name.start_with?("Fa::Scraper::")
klass_name.delete_prefix!("Fa::Scraper::")
klass_name = "Fa::#{klass_name}"
end
if klass_name.start_with?("Fa::Job::")
klass_name.delete_prefix!("Fa::Job::")
klass_name.delete_suffix!("Job")
klass_name = "Fa::#{klass_name}"
end
if klass_name.start_with?("Twitter::Job::")
klass_name.delete_prefix!("Twitter::Job::")
klass_name.delete_suffix!("Job")
klass_name = "Twitter::#{klass_name}"
prefixes = [
{
if_prefix: "Fa::Scraper::",
replace_with: "Fa::",
},
{
if_prefix: "Fa::Job::",
replace_with: "Fa::",
delete_suffix: "Job",
},
{
if_prefix: "Twitter::Job::",
replace_with: "Twitter::",
delete_suffix: "Job",
},
{
if_prefix: "E621::Job::",
replace_with: "E621::",
delete_suffix: "Job",
},
].each do |conf|
if klass_name.start_with?(conf[:if_prefix])
klass_name.delete_prefix!(conf[:if_prefix])
if (suffix = conf[:delete_suffix])
klass_name.delete_suffix!(suffix)
end
klass_name = conf[:replace_with] + klass_name
break
end
end
klass_name

View File

@@ -14,7 +14,7 @@ class Metrics::Client
def initialize(default_tags: {})
unless REPORT
logger.warn "!!! not reporting for this environment !!!"
logger.warn "not reporting metrics for '#{Rails.env.to_s.bold}' environment"
return
end

View File

@@ -39,6 +39,14 @@ class Scraper::ClientFactory
end
end
def self.get_e621_http_client
if Rails.env.test? || Rails.env.development?
@http_client_mock || raise("no http client mock set")
else
_http_client_impl(:e621, Scraper::E621HttpClientConfig)
end
end
def self._gallery_dl_client_impl
@gallery_dl_clients.value ||= begin
proxy_config = Rails.application.config.x.proxy || raise("no proxy config")

View File

@@ -0,0 +1,26 @@
class Scraper::E621HttpClientConfig < Scraper::HttpClientConfig
DEFAULT_ALLOWED_DOMAINS = [
"e621.net",
"*.e621.net",
]
def cookies
[]
end
def ratelimit
[
["static1.e621.net", 1],
["facdn.net", 2],
["*", 0.25],
]
end
def allowed_domains
DEFAULT_ALLOWED_DOMAINS
end
def redirect_limit
2
end
end

View File

@@ -34,6 +34,10 @@ class BlobEntryP < ReduxApplicationRecord
find_by(sha256: sha256) || raise("blob #{HexUtil.bin2hex(sha256)} does not exist")
end
def sha256_hex
HexUtil.bin2hex(sha256) if sha256
end
def contents
@contents ||= begin
contents_raw = self.read_attribute(:contents)

View File

@@ -45,8 +45,13 @@ worker_configs = [
workers: 3,
},
{
name: "twitter / fa galleries",
queues: [:twitter_timeline_tweets, :fa_user_page, :fa_user_gallery],
name: "twitter / fa galleries / e621",
queues: [
:twitter_timeline_tweets,
:fa_user_page,
:fa_user_gallery,
:e621,
],
workers: PRIORITIZE_STATIC ? 1 : 2,
},
{

View File

@@ -24,4 +24,11 @@ namespace :e621 do
start_at: start_at,
).run
end
desc "run a single e621 posts index job"
task :posts_index_job => :environment do
Domain::E621::Job::PostsIndexJob.
set(priority: -10).
perform_later({})
end
end

View File

@@ -0,0 +1 @@

View File

@@ -0,0 +1,41 @@
require "rails_helper"
describe Domain::E621::Job::PostsIndexJob do
let(:http_client_mock) { instance_double("::Scraper::HttpClient") }
before do
Scraper::ClientFactory.http_client_mock = http_client_mock
SpecUtil.init_http_client_mock(
http_client_mock, [
{
uri: "https://e621.net/posts.json",
status_code: 200,
content_type: "text/html",
contents: SpecUtil.read_fixture_file("domain/e621/job/posts_index_1.json"),
caused_by_entry_idx: nil,
},
]
)
end
describe "#perform" do
it "creates new posts" do
expect do
perform_now({})
end.to change(Domain::E621::Post, :count).by(5)
end
it "updates existing posts" do
Domain::E621::Post.create!({
e621_id: 4247443,
md5: "1c6169aa51668681e9697a48144d7c78",
})
expect do
perform_now({})
end.to change(Domain::E621::Post, :count).by(4)
post = Domain::E621::Post.find_by(e621_id: 4247443)
expect(post.file_url_str).to eq(
"https://static1.e621.net/data/1c/61/1c6169aa51668681e9697a48144d7c78.jpg"
)
end
end
end

View File

@@ -0,0 +1,159 @@
require "rails_helper"
describe Domain::E621::Job::StaticFileJob do
AN_IMAGE_SHA256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
AN_IMAGE_MD5 = "d41d8cd98f00b204e9800998ecf8427e"
AN_IMAGE_PATH = "domain/e621/job/an-image.png"
let(:http_client_mock) { instance_double("::Scraper::HttpClient") }
before do
Scraper::ClientFactory.http_client_mock = http_client_mock
end
describe "#perform" do
it "enqueues a post scan job if it doesn't have a file url" do
hle = SpecUtil.create_http_log_entry
post = Domain::E621::Post.create!({
e621_id: 12345,
md5: AN_IMAGE_MD5,
})
perform_now({ post: post, caused_by_entry: hle })
expect(SpecUtil.enqueued_jobs(Domain::E621::Job::ScanPostJob)).to match(
[
including(args: [{
post: post,
caused_by_entry: hle,
}]),
]
)
end
it "downloads the file if file_url_str is present" do
hle = SpecUtil.create_http_log_entry
SpecUtil.init_http_client_mock(
http_client_mock, [
{
uri: "https://static1.e621.net/file/foo.png",
status_code: 200,
content_type: "image/png",
contents: SpecUtil.read_fixture_file(AN_IMAGE_PATH),
caused_by_entry: hle,
},
]
)
post = Domain::E621::Post.create!({
e621_id: 12345,
md5: AN_IMAGE_MD5,
file_url_str: "https://static1.e621.net/file/foo.png",
})
perform_now({ post: post, caused_by_entry: hle })
post.reload
expect(post.file).not_to be_nil
expect(post.file.response.sha256_hex).to eq(AN_IMAGE_SHA256)
end
it "marks the post as errored if the download fails" do
hles = SpecUtil.init_http_client_mock(
http_client_mock, [
{
uri: "https://static1.e621.net/file/foo.png",
status_code: 404,
content_type: "text/html",
contents: "not found",
},
]
)
post = Domain::E621::Post.create!({
e621_id: 12345,
md5: AN_IMAGE_MD5,
file_url_str: "https://static1.e621.net/file/foo.png",
})
perform_now({ post: post })
post.reload
expect(post.state).to eq("file_error")
expect(post.file).to be_nil
expect(post.state_detail["file_error"]).to eq(
{
"status_code" => 404,
"log_entry_id" => hles[0].id,
"retry_count" => 1,
}
)
end
it "recovers from a failed download" do
hles = SpecUtil.init_http_client_mock(
http_client_mock, [
{
uri: "https://static1.e621.net/file/foo.png",
status_code: 500,
content_type: "text/html",
contents: "not found",
},
{
uri: "https://static1.e621.net/file/foo.png",
status_code: 200,
content_type: "image/png",
contents: SpecUtil.read_fixture_file(AN_IMAGE_PATH),
},
]
)
post = Domain::E621::Post.create!({
e621_id: 12345,
md5: AN_IMAGE_MD5,
file_url_str: "https://static1.e621.net/file/foo.png",
})
perform_now({ post: post }, should_raise: true)
post.reload
expect(post.file).to be_nil
perform_now({ post: post })
post.reload
expect(post.file).not_to be_nil
expect(post.file.response.sha256_hex).to eq(AN_IMAGE_SHA256)
end
it "throws on a non-404 error in order to retry later" do
num_retries = 3
hles = SpecUtil.init_http_client_mock(
http_client_mock, [
{
uri: "https://static1.e621.net/file/foo.png",
status_code: 500,
content_type: "text/html",
contents: "not found",
},
] * num_retries
)
post = Domain::E621::Post.create!({
e621_id: 12345,
md5: AN_IMAGE_MD5,
file_url_str: "https://static1.e621.net/file/foo.png",
})
num_retries.times.map do |retry_num|
perform_now({ post: post }, should_raise: true)
post.reload
expect(post.state).to eq("file_error")
expect(post.file).to be_nil
expect(post.state_detail["file_error"]).to eq(
{
"status_code" => 500,
"log_entry_id" => hles[retry_num].id,
"retry_count" => retry_num + 1,
}
)
end
# the last retry should not throw, but simply bail out early
perform_now({ post: post })
post.reload
expect(post.state).to eq("file_error")
expect(post.file).to be_nil
end
end
end

View File

View File

@@ -0,0 +1,480 @@
{
"posts": [
{
"id": 4247444,
"created_at": "2023-08-22T00:05:26.192-04:00",
"updated_at": "2023-08-22T00:05:26.192-04:00",
"file": {
"width": 843,
"height": 519,
"ext": "jpg",
"size": 118587,
"md5": "d752b8d7c0f66af2ba929229c2c88c36",
"url": "https://static1.e621.net/data/d7/52/d752b8d7c0f66af2ba929229c2c88c36.jpg"
},
"preview": {
"width": 150,
"height": 92,
"url": "https://static1.e621.net/data/preview/d7/52/d752b8d7c0f66af2ba929229c2c88c36.jpg"
},
"sample": {
"has": false,
"height": 519,
"width": 843,
"url": "https://static1.e621.net/data/d7/52/d752b8d7c0f66af2ba929229c2c88c36.jpg",
"alternates": {}
},
"score": { "up": 2, "down": 0, "total": 2 },
"tags": {
"general": [
"ambiguous_gender",
"anthro",
"bangs",
"black_hair",
"black_nails",
"clothed",
"clothing",
"colored_nails",
"ear_piercing",
"facial_markings",
"frown",
"hair",
"head_markings",
"markings",
"mole_(marking)",
"nails",
"open_frown",
"open_mouth",
"piercing",
"snout",
"solo",
"style_parody",
"toony",
"topwear"
],
"species": ["didelphid", "mammal", "marsupial"],
"character": ["fan_character"],
"copyright": ["clone_high"],
"artist": ["octo-risotto"],
"invalid": [],
"lore": [],
"meta": ["greyscale", "monochrome"]
},
"locked_tags": [],
"change_seq": 50329141,
"flags": {
"pending": true,
"flagged": false,
"note_locked": false,
"status_locked": false,
"rating_locked": false,
"deleted": false
},
"rating": "s",
"fav_count": 0,
"sources": [
"https://www.furaffinity.net/view/38365651/",
"https://d.furaffinity.net/art/octo-risotto/1600973407/1600973023.octo-risotto_expressing_yourself.jpg"
],
"pools": [],
"relationships": {
"parent_id": null,
"has_children": false,
"has_active_children": false,
"children": []
},
"approver_id": null,
"uploader_id": 1472475,
"description": "",
"comment_count": 0,
"is_favorited": false,
"has_notes": false,
"duration": null
},
{
"id": 4247443,
"created_at": "2023-08-22T00:04:34.469-04:00",
"updated_at": "2023-08-22T00:07:03.503-04:00",
"file": {
"width": 2048,
"height": 2732,
"ext": "jpg",
"size": 660805,
"md5": "1c6169aa51668681e9697a48144d7c78",
"url": "https://static1.e621.net/data/1c/61/1c6169aa51668681e9697a48144d7c78.jpg"
},
"preview": {
"width": 112,
"height": 150,
"url": "https://static1.e621.net/data/preview/1c/61/1c6169aa51668681e9697a48144d7c78.jpg"
},
"sample": {
"has": true,
"height": 1133,
"width": 850,
"url": "https://static1.e621.net/data/sample/1c/61/1c6169aa51668681e9697a48144d7c78.jpg",
"alternates": {}
},
"score": { "up": 3, "down": 0, "total": 3 },
"tags": {
"general": [
"alcohol",
"anthro",
"ball",
"balls",
"beach",
"beach_ball",
"beer",
"beverage",
"big_butt",
"blush",
"bodily_fluids",
"butt",
"drunk",
"duo",
"erection",
"genital_fluids",
"genitals",
"girly",
"huge_butt",
"inflatable",
"looking_at_viewer",
"looking_back",
"male",
"male/male",
"nude",
"open_mouth",
"outside",
"penis",
"presenting",
"presenting_hindquarters",
"raised_tail",
"sand",
"seaside",
"sex",
"sky",
"smile",
"spreading",
"substance_intoxication",
"tail",
"thick_thighs",
"tongue",
"water",
"wide_hips"
],
"species": ["mammal", "procyonid", "raccoon"],
"character": ["rocket_raccoon", "shanukk"],
"copyright": ["guardians_of_the_galaxy", "marvel"],
"artist": ["link6432"],
"invalid": [],
"lore": [],
"meta": ["absurd_res", "digital_media_(artwork)", "hi_res"]
},
"locked_tags": [],
"change_seq": 50329135,
"flags": {
"pending": true,
"flagged": false,
"note_locked": false,
"status_locked": false,
"rating_locked": false,
"deleted": false
},
"rating": "e",
"fav_count": 3,
"sources": ["https://www.furaffinity.net/view/53380301/"],
"pools": [],
"relationships": {
"parent_id": null,
"has_children": false,
"has_active_children": false,
"children": []
},
"approver_id": null,
"uploader_id": 39821,
"description": "Shanukk teaches Rocket which is the appropriate outfit to go to the beach, but it seems that rocket has been interested in something else that he wants to try, what will it be?\r\n\r\nych from Link6432",
"comment_count": 0,
"is_favorited": false,
"has_notes": false,
"duration": null
},
{
"id": 4247442,
"created_at": "2023-08-22T00:04:26.055-04:00",
"updated_at": "2023-08-22T00:05:50.878-04:00",
"file": {
"width": 3166,
"height": 3495,
"ext": "png",
"size": 5732193,
"md5": "94b786013357a6e060d795b27fdcb77c",
"url": "https://static1.e621.net/data/94/b7/94b786013357a6e060d795b27fdcb77c.png"
},
"preview": {
"width": 135,
"height": 150,
"url": "https://static1.e621.net/data/preview/94/b7/94b786013357a6e060d795b27fdcb77c.jpg"
},
"sample": {
"has": true,
"height": 938,
"width": 850,
"url": "https://static1.e621.net/data/sample/94/b7/94b786013357a6e060d795b27fdcb77c.jpg",
"alternates": {}
},
"score": { "up": 0, "down": 0, "total": 0 },
"tags": {
"general": [
"anthro",
"chair",
"clothed",
"clothing",
"eyes_closed",
"female",
"fully_clothed",
"furniture",
"green_body",
"simple_background",
"sitting",
"sleeping",
"solo",
"tongue",
"tongue_out",
"white_background"
],
"species": ["kobold"],
"character": ["lemon_(tohupo)"],
"copyright": [],
"artist": ["tohupo"],
"invalid": [],
"lore": [],
"meta": [
"2023",
"absurd_res",
"dated",
"digital_media_(artwork)",
"hi_res",
"signature"
]
},
"locked_tags": [],
"change_seq": 50329136,
"flags": {
"pending": false,
"flagged": false,
"note_locked": false,
"status_locked": false,
"rating_locked": false,
"deleted": false
},
"rating": "s",
"fav_count": 1,
"sources": [
"https://itaku.ee/images/503482",
"https://itaku.ee/api/media/gallery_imgs/IMG_2330_cObeny4.png"
],
"pools": [],
"relationships": {
"parent_id": null,
"has_children": false,
"has_active_children": false,
"children": []
},
"approver_id": null,
"uploader_id": 167841,
"description": "",
"comment_count": 0,
"is_favorited": false,
"has_notes": false,
"duration": null
},
{
"id": 4247441,
"created_at": "2023-08-22T00:04:21.229-04:00",
"updated_at": "2023-08-22T00:08:16.034-04:00",
"file": {
"width": 1000,
"height": 1351,
"ext": "png",
"size": 864447,
"md5": "be5b0d4a1cc5169c288da8020ce00862",
"url": "https://static1.e621.net/data/be/5b/be5b0d4a1cc5169c288da8020ce00862.png"
},
"preview": {
"width": 111,
"height": 150,
"url": "https://static1.e621.net/data/preview/be/5b/be5b0d4a1cc5169c288da8020ce00862.jpg"
},
"sample": {
"has": true,
"height": 1148,
"width": 850,
"url": "https://static1.e621.net/data/sample/be/5b/be5b0d4a1cc5169c288da8020ce00862.jpg",
"alternates": {}
},
"score": { "up": 2, "down": 0, "total": 2 },
"tags": {
"general": [
"anthro",
"ball",
"beach",
"beach_ball",
"black_body",
"black_fur",
"breasts",
"female",
"fluffy",
"fluffy_tail",
"front_view",
"fur",
"genitals",
"inflatable",
"looking_at_viewer",
"nipples",
"nude",
"open_mouth",
"orange_body",
"orange_fur",
"outside",
"pink_nipples",
"pokeball",
"pussy",
"seaside",
"sky",
"smile",
"solo",
"tail",
"tongue",
"tongue_out"
],
"species": [
"arcanine",
"canid",
"generation_1_pokemon",
"mammal",
"pokemon_(species)"
],
"character": [],
"copyright": ["nintendo", "pokemon"],
"artist": ["pig_(artist)"],
"invalid": [],
"lore": [],
"meta": ["hi_res"]
},
"locked_tags": [],
"change_seq": 50329131,
"flags": {
"pending": true,
"flagged": false,
"note_locked": false,
"status_locked": false,
"rating_locked": false,
"deleted": false
},
"rating": "e",
"fav_count": 8,
"sources": [
"https://www.furaffinity.net/view/53388454/",
"https://d.furaffinity.net/art/pig/1692676252/1692676252.pig_17_resize.png"
],
"pools": [],
"relationships": {
"parent_id": null,
"has_children": false,
"has_active_children": false,
"children": []
},
"approver_id": null,
"uploader_id": 1314348,
"description": "",
"comment_count": 0,
"is_favorited": false,
"has_notes": false,
"duration": null
},
{
"id": 4247440,
"created_at": "2023-08-22T00:04:05.859-04:00",
"updated_at": "2023-08-22T00:04:05.859-04:00",
"file": {
"width": 3500,
"height": 3200,
"ext": "png",
"size": 6694936,
"md5": "29a591ce7de9ceee0dd70c2d1fb09bdf",
"url": "https://static1.e621.net/data/29/a5/29a591ce7de9ceee0dd70c2d1fb09bdf.png"
},
"preview": {
"width": 150,
"height": 137,
"url": "https://static1.e621.net/data/preview/29/a5/29a591ce7de9ceee0dd70c2d1fb09bdf.jpg"
},
"sample": {
"has": true,
"height": 777,
"width": 850,
"url": "https://static1.e621.net/data/sample/29/a5/29a591ce7de9ceee0dd70c2d1fb09bdf.jpg",
"alternates": {}
},
"score": { "up": 0, "down": -1, "total": -1 },
"tags": {
"general": [
"anthro",
"anus",
"birth",
"blush",
"bodily_fluids",
"butt",
"clothed",
"clothing",
"egg",
"female",
"fur",
"genital_fluids",
"genitals",
"green_eyes",
"hair",
"open_mouth",
"oviposition",
"pussy",
"solo",
"tail"
],
"species": ["deer", "faun_(spyro)", "mammal"],
"character": ["elora"],
"copyright": ["activision", "spyro_the_dragon"],
"artist": ["heartlessangel3d"],
"invalid": [],
"lore": [],
"meta": ["absurd_res", "hi_res"]
},
"locked_tags": [],
"change_seq": 50329128,
"flags": {
"pending": true,
"flagged": false,
"note_locked": false,
"status_locked": false,
"rating_locked": false,
"deleted": false
},
"rating": "e",
"fav_count": 0,
"sources": ["https://inkbunny.net/s/2954143?override=true"],
"pools": [],
"relationships": {
"parent_id": null,
"has_children": false,
"has_active_children": false,
"children": []
},
"approver_id": null,
"uploader_id": 95495,
"description": "Posting for the artist\r\n\r\nI like to think this is what happened after I drew Spyro knotting that fluffy butt of hers.\r\n\r\n----------\r\n\r\nFurAffinity - https://www.furaffinity.net/user/heartlessangel3d\r\nDeviantArt - https://www.deviantart.com/heartlessangel3d\r\nTwitter - https://twitter.com/3HeartlessAngel",
"comment_count": 0,
"is_favorited": false,
"has_notes": false,
"duration": null
}
]
}