migration script

This commit is contained in:
Dylan Knutson
2025-02-05 03:46:16 +00:00
parent 428cb0a491
commit aea94c98cd
24 changed files with 1929 additions and 373 deletions

View File

@@ -94,11 +94,11 @@ task :reverse_csv do
end
task migrate_domain: :environment do
Domain::MigrateToDomain.new.migrate_e621_users
Domain::MigrateToDomain.new.migrate_e621_posts
Domain::MigrateToDomain.new.migrate_fa_users
# Domain::MigrateToDomain.new.migrate_e621_users
# Domain::MigrateToDomain.new.migrate_e621_posts
# Domain::MigrateToDomain.new.migrate_fa_users
Domain::MigrateToDomain.new.migrate_fa_posts
Domain::MigrateToDomain.new.migrate_e621_users_favs
Domain::MigrateToDomain.new.migrate_fa_users_favs
Domain::MigrateToDomain.new.migrate_fa_users_followers
# Domain::MigrateToDomain.new.migrate_e621_users_favs
# Domain::MigrateToDomain.new.migrate_fa_users_favs
# Domain::MigrateToDomain.new.migrate_fa_users_followers
end

View File

@@ -52,7 +52,7 @@ class Domain::Fa::Job::UserFollowsJob < Domain::Fa::Job::Base
if to_add.any?
user.follower_joins.insert_all!(to_add.map { |id| { followed_id: id } })
end
user.scanned_follows_at = Time.now
user.scanned_follows_at = Time.current
user.save!
end

View File

@@ -61,7 +61,7 @@ class Domain::Fa::Job::UserGalleryJob < Domain::Fa::Job::Base
end
user.log_entry_detail["last_gallery_page_id"] = first_log_entry&.id
user.scanned_gallery_at = Time.now
user.scanned_gallery_at = Time.current
user.save!
end

View File

@@ -55,8 +55,8 @@ module Domain::Fa::Job
check_watchers(user, page.user_page.recent_watchers)
check_watching(user, page.user_page.recent_watching)
user.scanned_page_at = Time.now
user.scanned_incremental_at = Time.now
user.scanned_page_at = Time.current
user.scanned_incremental_at = Time.current
user.save!
logger.info "completed page scan"
ensure

View File

@@ -36,7 +36,7 @@ class Domain::Fa::Job::UserPageJob < Domain::Fa::Job::Base
page,
response,
)
user.scanned_page_at = Time.now
user.scanned_page_at = Time.current
user.save!
logger.info "completed page scan"
ensure

View File

@@ -5,224 +5,245 @@ class Domain::MigrateToDomain
sig { void }
def migrate_e621_users
Domain::E621::User
.joins(
"LEFT JOIN domain_users ON domain_e621_users.e621_user_id =
(domain_users.json_attributes->>'e621_id')::integer
AND domain_users.type = 'Domain::User::E621User'",
logger.info "migrating e621 users"
query =
Domain::E621::User.where.not(
e621_user_id: Domain::User::E621User.select(:e621_id),
)
.where("domain_users.id IS NULL")
.find_in_batches do |batch|
ReduxApplicationRecord.transaction do
batch.each { |user| migrate_e621_user(user) }
end
pb =
ProgressBar.create(
throttle_rate: 0.2,
total: query.count,
format: "%t: %c/%C %B %p%% %a %e",
output: $stderr,
)
query.find_in_batches(batch_size: 10_000) do |batch|
migrate_batch(Domain::User::E621User, batch) do |old_model|
initialize_e621_user_from(old_model)
end
pb.progress = [pb.progress + batch.size, pb.total].min
end
end
sig { void }
def migrate_e621_posts
Domain::E621::Post
.joins(
logger.info "migrating e621 posts"
query =
Domain::E621::Post.joins(
"LEFT JOIN domain_posts ON domain_e621_posts.e621_id =
(domain_posts.json_attributes->>'e621_id')::integer
AND domain_posts.type = 'Domain::Post::E621Post'",
).where("domain_posts.id IS NULL")
pb =
ProgressBar.create(
throttle_rate: 0.2,
total: query.count,
format: "%t: %c/%C %B %p%% %a %e",
output: $stderr,
)
.where("domain_posts.id IS NULL")
.find_in_batches do |batch|
ReduxApplicationRecord.transaction do
batch.each { |post| migrate_e621_post(post) }
end
query.find_in_batches(batch_size: 10_000) do |batch|
migrate_batch(Domain::Post::E621Post, batch) do |old_model|
initialize_e621_post_from(old_model)
end
pb.progress = [pb.progress + batch.size, pb.total].min
end
end
sig { void }
def migrate_fa_users
Domain::Fa::User
.joins(
logger.info "migrating fa users"
query =
Domain::Fa::User.joins(
"LEFT JOIN domain_users ON domain_fa_users.url_name =
domain_users.json_attributes->>'url_name'
AND domain_users.type = 'Domain::User::FaUser'",
).where("domain_users.id IS NULL")
pb =
ProgressBar.create(
throttle_rate: 0.2,
total: query.count,
format: "%t: %c/%C %B %p%% %a %e",
output: $stderr,
)
.where("domain_users.id IS NULL")
.find_in_batches do |batch|
ReduxApplicationRecord.transaction do
batch.each { |user| migrate_fa_user(user) }
end
query.find_in_batches(batch_size: 10_000) do |batch|
migrate_batch(Domain::User::FaUser, batch) do |old_user|
initialize_fa_user_from(old_user)
end
pb.progress = [pb.progress + batch.size, pb.total].min
end
end
sig { void }
def migrate_fa_posts
Domain::Fa::Post
.joins(
"LEFT JOIN domain_posts ON domain_fa_posts.fa_id =
(domain_posts.json_attributes->>'fa_id')::integer
AND domain_posts.type = 'Domain::Post::FaPost'",
logger.info "migrating fa posts"
query =
Domain::Fa::Post.where.not(fa_id: Domain::Post::FaPost.select(:fa_id))
pb =
ProgressBar.create(
throttle_rate: 0.2,
total: query.count,
format: "%t: %c/%C %B %p%% %a %e",
output: $stderr,
)
.where("domain_posts.id IS NULL")
query
.includes(:creator, :file)
.find_in_batches do |batch|
.find_in_batches(batch_size: 10_000) do |batch|
ReduxApplicationRecord.transaction do
batch.each { |post| migrate_fa_post(post) }
end
end
end
sig { params(user: Domain::Fa::User).void }
def migrate_fa_user(user)
user =
Domain::User::FaUser.find_or_initialize_by(
url_name: user.url_name,
) { |new_user| new_user.name = user.name }
if user.new_record?
logger.info("migrated fa user #{user.url_name}")
user.save!
end
end
sig { params(post: Domain::Fa::Post).void }
def migrate_fa_post(post)
post =
Domain::Post::FaPost.find_or_initialize_by(
fa_id: post.fa_id,
) do |new_post|
new_post.state = post.state
new_post.title = post.title
new_post.fa_id = post.fa_id
new_post.category = post.category
new_post.theme = post.theme
new_post.species = post.species
new_post.gender = post.gender
new_post.description = post.description
new_post.keywords = post.keywords
new_post.num_favorites = post.num_favorites
new_post.num_comments = post.num_comments
new_post.num_views = post.num_views
new_post.posted_at = post.posted_at
new_post.scanned_at = post.scanned_at
new_post.scan_file_error = post.scan_file_error
new_post.last_user_page_id = post.last_user_page_id
new_post.last_submission_page_id = post.last_submission_page_id
new_post.first_browse_page_id = post.first_browse_page_id
new_post.first_gallery_page_id = post.first_gallery_page_id
new_post.first_seen_entry_id = post.first_seen_entry_id
new_post.created_at = post.created_at
if post.creator.present?
new_post.creator =
Domain::User::FaUser.find_by!(url_name: post.creator&.url_name)
end
if post.file.present?
new_post.file =
Domain::PostFile.find_or_create_by(
log_entry: post.file,
) do |new_file|
new_file.log_entry = post.file
new_file.url_str = post.file_url_str
new_file.state = post.state
initialized_models =
migrate_batch(Domain::Post::FaPost, batch) do |old_post|
initialize_fa_post_from(old_post)
end
migrate_batch(
Domain::PostFile,
initialized_models.filter(&:file),
) do |post|
file = T.must(post.file)
file.post_id = post.id
file
end
migrate_batch(
Domain::UserPostCreation,
initialized_models.filter(&:user_post_creation),
) do |post|
user_post_creation = T.must(post.user_post_creation)
user_post_creation.post_id = post.id
user_post_creation
end
end
pb.progress = [pb.progress + batch.size, pb.total].min
end
if post.new_record?
logger.info("migrated fa post #{post.fa_id}")
post.save!
end
end
sig { params(user: Domain::E621::User).void }
def migrate_e621_user(user)
user =
Domain::User::E621User.find_or_initialize_by(
e621_id: user.e621_user_id,
) do |new_user|
new_user.name = user.name
new_user.favs_are_hidden = user.favs_are_hidden
new_user.num_other_favs_cached = user.num_other_favs_cached
new_user.scanned_favs_status = user.scanned_favs_status
new_user.scanned_favs_at = user.scanned_favs_at
end
if user.new_record?
logger.info("migrated e621 user #{user.name}")
user.save!
end
end
sig { params(post: Domain::E621::Post).void }
def migrate_e621_post(post)
post =
Domain::Post::E621Post.find_or_initialize_by(
e621_id: post.e621_id,
) do |new_post|
new_post.state = post.state
new_post.e621_id = post.e621_id
new_post.scanned_post_favs_at = post.scanned_post_favs_at
new_post.rating = post.rating
new_post.tags_array = post.tags_array
new_post.flags_array = post.flags_array
new_post.pools_array = post.pools_array
new_post.sources_array = post.sources_array
new_post.artists_array = post.artists_array
new_post.e621_updated_at = post.e621_updated_at
new_post.last_index_page_id = post.last_index_page_id
new_post.caused_by_entry_id = post.caused_by_entry_id
new_post.scan_log_entry_id = post.scan_log_entry_id
new_post.index_page_ids = post.index_page_ids
new_post.prev_md5s = post.prev_md5s
new_post.scan_error = post.scan_error
new_post.file_error = post.file_error
new_post.created_at = post.created_at
# TODO - migrate parent posts
end
if post.new_record?
logger.info("migrated e621 post #{post.e621_id}")
post.save!
end
end
sig { void }
def migrate_e621_users_favs
logger.info "migrating e621 users favs"
Domain::User::E621User
.where_migrated_user_favs_at("is null")
.where(migrated_user_favs_at: nil)
.find_each { |user| migrate_e621_user_favs(user) }
end
sig { params(user: Domain::User::E621User).void }
def migrate_e621_user_favs(user)
user_e621_id = user.e621_id
old_user = Domain::E621::User.find_by!(e621_user_id: user_e621_id)
old_post_e621_ids = old_user.faved_posts.pluck(:e621_id)
new_post_ids =
Domain::Post::E621Post.where(e621_id: old_post_e621_ids).pluck(:id)
Domain::UserPostFav.upsert_all(
new_post_ids.map { |post_id| { user_id: user.id, post_id: } },
unique_by: %i[user_id post_id],
)
if user.faved_posts.count != old_user.faved_posts.count
logger.error(
"favs mismatch for #{user.name}: (#{user.faved_posts.count} != #{old_user.faved_posts.count})",
)
else
user.migrated_user_favs_at = Time.current
user.save!
logger.info("migrated e621 user favs #{user.name} (#{new_post_ids.size})")
end
end
sig { void }
def migrate_fa_users_favs
logger.info "migrating fa users favs"
Domain::User::FaUser
.where_migrated_user_favs_at("is null")
.where(migrated_user_favs_at: nil)
.find_each { |user| migrate_fa_user_favs(user) }
end
sig { void }
def migrate_fa_users_followed_users
logger.info "migrating fa followed users"
Domain::User::FaUser
.where(migrated_followed_users_at: nil)
.find_each { |user| migrate_fa_user_followed_users(user) }
end
private
sig { params(old_user: Domain::E621::User).returns(Domain::User::E621User) }
def initialize_e621_user_from(old_user)
new_user = Domain::User::E621User.new
new_user.e621_id = old_user.e621_user_id
new_user.name = old_user.name
new_user.favs_are_hidden = old_user.favs_are_hidden
new_user.num_other_favs_cached = old_user.num_other_favs_cached
new_user.scanned_favs_status = old_user.scanned_favs_status
new_user.scanned_favs_at = old_user.scanned_favs_at
new_user
end
sig { params(old_post: Domain::E621::Post).returns(Domain::Post::E621Post) }
def initialize_e621_post_from(old_post)
new_post = Domain::Post::E621Post.new
new_post.state = old_post.state
new_post.e621_id = old_post.e621_id
new_post.scanned_post_favs_at = old_post.scanned_post_favs_at
new_post.rating = old_post.rating
new_post.tags_array = old_post.tags_array
new_post.flags_array = old_post.flags_array
new_post.pools_array = old_post.pools_array
new_post.sources_array = old_post.sources_array
new_post.artists_array = old_post.artists_array
new_post.e621_updated_at = old_post.e621_updated_at
new_post.last_index_page_id = old_post.last_index_page_id
new_post.caused_by_entry_id = old_post.caused_by_entry_id
new_post.scan_log_entry_id = old_post.scan_log_entry_id
new_post.index_page_ids = old_post.index_page_ids
new_post.prev_md5s = old_post.prev_md5s
new_post.scan_error = old_post.scan_error
new_post.file_error = old_post.file_error
new_post.created_at = old_post.created_at
new_post.parent_post_e621_id = old_post.parent_e621_id
new_post
end
sig { params(old_user: Domain::Fa::User).returns(Domain::User::FaUser) }
def initialize_fa_user_from(old_user)
new_user = Domain::User::FaUser.new
new_user.url_name = old_user.url_name
new_user.name = old_user.name
new_user.full_name = old_user.full_name
new_user.artist_type = old_user.artist_type
new_user.mood = old_user.mood
new_user.profile_html = old_user.profile_html
new_user.num_pageviews = old_user.num_pageviews
new_user.num_submissions = old_user.num_submissions
new_user.num_comments_recieved = old_user.num_comments_recieved
new_user.num_comments_given = old_user.num_comments_given
new_user.num_journals = old_user.num_journals
new_user.num_favorites = old_user.num_favorites
new_user.scanned_gallery_at = old_user.scanned_gallery_at
new_user.scanned_page_at = old_user.scanned_page_at
new_user.registered_at = old_user.registered_at
new_user
end
sig { params(post: Domain::Fa::Post).returns(Domain::Post::FaPost) }
def initialize_fa_post_from(post)
new_post = Domain::Post::FaPost.new
new_post.state = post.state
new_post.title = post.title
new_post.fa_id = post.fa_id
new_post.category = post.category
new_post.theme = post.theme
new_post.species = post.species
new_post.gender = post.gender
new_post.description = post.description
new_post.keywords = post.keywords
new_post.num_favorites = post.num_favorites
new_post.num_comments = post.num_comments
new_post.num_views = post.num_views
new_post.posted_at = post.posted_at
new_post.scanned_at = post.scanned_at
new_post.scan_file_error = post.scan_file_error
new_post.last_user_page_id = post.last_user_page_id
new_post.last_submission_page_id = post.last_submission_page_id
new_post.first_browse_page_id = post.first_browse_page_id
new_post.first_gallery_page_id = post.first_gallery_page_id
new_post.first_seen_entry_id = post.first_seen_entry_id
new_post.created_at = post.created_at
if post.creator.present?
new_post.creator =
Domain::User::FaUser.find_by!(url_name: post.creator&.url_name)
end
if post.file.present?
new_post.file =
Domain::PostFile.find_or_initialize_by(
log_entry: post.file,
) do |new_file|
new_file.log_entry = post.file
new_file.url_str = post.file_url_str
new_file.state = post.state
end
end
new_post
end
sig { params(user: Domain::User::FaUser).void }
def migrate_fa_user_favs(user)
user_url_name = user.url_name
@@ -244,19 +265,11 @@ class Domain::MigrateToDomain
else
user.migrated_user_favs_at = Time.current
user.save!
logger.info("migrated fa user favs #{user.name} (#{new_post_ids.size})")
end
end
sig { void }
def migrate_fa_users_followers
Domain::User::FaUser
.where_migrated_followers_at("is null")
.find_each { |user| migrate_fa_user_followers(user) }
end
sig { params(user: Domain::User::FaUser).void }
def migrate_fa_user_followers(user)
def migrate_fa_user_followed_users(user)
user_url_name = user.url_name
old_user = Domain::Fa::User.find_by!(url_name: user_url_name)
followed_user_url_names = old_user.follows.pluck(:url_name)
@@ -275,11 +288,79 @@ class Domain::MigrateToDomain
"followers mismatch for #{user.name}: (#{user.following_users.count} != #{old_user.follows.count})",
)
else
user.migrated_followers_at = Time.current
user.migrated_followed_users_at = Time.current
user.save!
logger.info(
"migrated fa user followers #{user.name} (#{new_user_ids.size})",
)
end
end
sig { params(new_user: Domain::User::E621User).void }
def migrate_e621_user_favs(new_user)
user_e621_id = new_user.e621_id
old_user = Domain::E621::User.find_by!(e621_user_id: user_e621_id)
old_post_e621_ids = old_user.faved_posts.pluck(:e621_id)
new_post_ids =
Domain::Post::E621Post.where(e621_id: old_post_e621_ids).pluck(:id)
new_post_ids.each_slice(10_000) do |post_ids|
Domain::UserPostFav.upsert_all(
post_ids.map { |post_id| { user_id: new_user.id, post_id: } },
unique_by: %i[user_id post_id],
)
end
if new_user.faved_posts.count != old_user.faved_posts.count
logger.error(
"favs mismatch for #{new_user.name}: (#{new_user.faved_posts.count} != #{old_user.faved_posts.count})",
)
else
new_user.migrated_user_favs_at = Time.current
new_user.save!
logger.info(
"migrated e621 user favs #{new_user.name} (#{new_post_ids.size})",
)
end
end
sig do
params(
klass: T.class_of(ActiveRecord::Base),
attributes: T::Hash[String, T.untyped],
).returns(T::Hash[String, T.untyped])
end
def clean_attributes(klass, attributes)
json_attributes =
T.unsafe(klass).attr_json_registry.attribute_names.map(&:to_s)
attributes.except(*json_attributes).except("id", "created_at", "updated_at")
end
sig do
type_parameters(:Old, :New)
.params(
klass: T.class_of(ActiveRecord::Base),
batch: T::Array[T.all(T.type_parameter(:Old), ActiveRecord::Base)],
model_mapper:
T
.proc
.params(record: T.all(T.type_parameter(:Old), ActiveRecord::Base))
.returns(T.all(T.type_parameter(:New), ActiveRecord::Base)),
)
.returns(T::Array[T.type_parameter(:New)])
end
def migrate_batch(klass, batch, &model_mapper)
attributes = []
models = []
klass.transaction do
batch.each do |record|
model = model_mapper.call(record)
models << model
attributes << clean_attributes(klass, model.attributes)
end
returned = klass.insert_all!(attributes)
returned.zip(models).each { |hash, model| model.id = hash["id"] }
end
models
end
end

View File

@@ -10,29 +10,21 @@ class Arel::Visitors::ToSql
def visit_Arel_Attributes_Attribute(o, collector)
join_name = o.relation.table_alias || o.relation.name
ar_table = o.relation.instance_variable_get("@klass")
if ar_table && ar_table < AttrJsonRecordAliases
registry =
if ar_table &&
attribute_def =
AttrJsonRecordAliases::ImplHelper.get_json_attr_def(
ar_table,
o.name,
)
attr_type =
T.cast(
T.unsafe(ar_table).attr_json_registry,
AttrJson::AttributeDefinition::Registry,
attribute_def.type.type,
T.any(Symbol, ActiveModel::Type::Value),
)
attribute_def =
T.cast(
registry[o.name.to_sym],
T.nilable(AttrJson::AttributeDefinition),
)
if attribute_def
attr_type =
T.cast(
attribute_def.type.type,
T.any(Symbol, ActiveModel::Type::Value),
)
attr_type_cast = ar_table.json_attribute_type_cast(attr_type)
column = "json_attributes->>'#{o.name}'"
str =
"((#{quote_table_name(join_name)}.#{column})#{attr_type_cast})"
return collector << str
end
attr_type_cast = ar_table.json_attribute_type_cast(attr_type)
column = "#{quote_column_name("json_attributes")}->>'#{o.name}'"
str = "((#{quote_table_name(join_name)}.#{column})#{attr_type_cast})"
return collector << str
end
collector << quote_table_name(join_name) << "." <<
@@ -147,38 +139,39 @@ module AttrJsonRecordAliases
end
end
sig do
params(
attr_name: Symbol,
type: T.any(Symbol, ActiveModel::Type::Value),
).returns(String)
end
def json_attribute_expression(attr_name, type)
"json_attributes->>'#{attr_name}'"
end
# sig do
# params(
# attr_name: Symbol,
# type: T.any(Symbol, ActiveModel::Type::Value),
# ).returns(String)
# end
# def json_attribute_expression(attr_name, type)
# adapter_class = T.unsafe(self).adapter_class
# "#{adapter_class.quote_table_name(self.table_name)}.#{adapter_class.quote_column_name("json_attributes")}->>'#{attr_name}'"
# end
sig do
params(
attr_name: Symbol,
type: T.any(Symbol, ActiveModel::Type::Value),
).void
end
def json_attributes_scope(attr_name, type)
attribute_expression = json_attribute_expression(attr_name, type)
db_type = json_attribute_type_cast(type)
scope :"where_#{attr_name}",
->(expr, *binds) do
where("(#{attribute_expression}#{db_type}) #{expr}", binds)
end
# sig do
# params(
# attr_name: Symbol,
# type: T.any(Symbol, ActiveModel::Type::Value),
# ).void
# end
# def json_attributes_scope(attr_name, type)
# attribute_expression = json_attribute_expression(attr_name, type)
# db_type = json_attribute_type_cast(type)
# scope :"where_#{attr_name}",
# ->(expr, *binds) do
# where("((#{attribute_expression})#{db_type}) #{expr}", binds)
# end
scope :"order_#{attr_name}",
->(dir) do
unless [:asc, :desc, nil].include?(dir)
raise("invalid direction: #{dir}")
end
order(Arel.sql("#{attribute_expression} #{dir}"))
end
end
# scope :"order_#{attr_name}",
# ->(dir) do
# unless [:asc, :desc, nil].include?(dir)
# raise("invalid direction: #{dir}")
# end
# order(Arel.sql("#{attribute_expression} #{dir}"))
# end
# end
sig do
params(
@@ -189,7 +182,7 @@ module AttrJsonRecordAliases
end
def attr_json_scoped(attr_name, type, **options)
T.unsafe(self).attr_json(attr_name, type, **options)
json_attributes_scope(attr_name, type)
# json_attributes_scope(attr_name, type)
end
end

View File

@@ -264,7 +264,7 @@ class Domain::Fa::User < ReduxApplicationRecord
send(:"scanned_#{scan_type}_at")
when :state_detail
str = state_detail["scanned_#{scan_type}_at"]
Time.parse(str) if str
Time.zone.parse(str) if str
else
raise("invalid scan type '#{scan_type}'")
end

View File

@@ -21,7 +21,7 @@ class Domain::Post::E621Post < Domain::Post
attr_json_scoped :artists_array, :string, array: true
attr_json_scoped :e621_updated_at, :datetime
attr_json_scoped :parent_post_id, :integer
attr_json_scoped :parent_post_e621_id, :integer
attr_json_scoped :last_index_page_id, :integer
attr_json_scoped :caused_by_entry_id, :integer
attr_json_scoped :scan_log_entry_id, :integer
@@ -31,7 +31,11 @@ class Domain::Post::E621Post < Domain::Post
attr_json_scoped :file_error, FileError.to_type
attr_json_scoped :uploader_user_id, :integer
belongs_to :parent_post, class_name: "Domain::Post::E621Post", optional: true
belongs_to :parent_post,
class_name: "Domain::Post::E621Post",
foreign_key: :parent_post_e621_id,
primary_key: :e621_id,
optional: true
belongs_to :uploader_user,
class_name: "::Domain::User::E621User",
inverse_of: :uploaded_posts,

View File

@@ -44,7 +44,7 @@ class Domain::Post::FaPost < Domain::Post
inverse_of: :post,
dependent: :destroy
after_initialize { self.state ||= "ok" }
after_initialize { self.state ||= "ok" if self.new_record? }
validates :state, inclusion: { in: %w[ok removed scan_error file_error] }
validates :fa_id, presence: true

View File

@@ -18,5 +18,5 @@ class Domain::User::E621User < Domain::User
if: :scanned_favs_status?,
}
validates :e621_id, presence: true
validates :name, presence: true
validates :name, length: { minimum: 1 }, allow_nil: false
end

View File

@@ -7,7 +7,20 @@ class Domain::User::FaUser < Domain::User
attr_json_scoped :name, :string
attr_json_scoped :url_name, :string
attr_json_scoped :migrated_followers_at, :datetime
attr_json_scoped :full_name, :string
attr_json_scoped :artist_type, :string
attr_json_scoped :mood, :string
attr_json_scoped :profile_html, :string
attr_json_scoped :num_pageviews, :integer
attr_json_scoped :num_submissions, :integer
attr_json_scoped :num_comments_recieved, :integer
attr_json_scoped :num_comments_given, :integer
attr_json_scoped :num_journals, :integer
attr_json_scoped :num_favorites, :integer
attr_json_scoped :scanned_gallery_at, :datetime
attr_json_scoped :scanned_page_at, :datetime
attr_json_scoped :registered_at, :datetime
attr_json_scoped :migrated_followed_users_at, :datetime
validates :name, presence: true
validates :url_name, presence: true

View File

@@ -10,6 +10,7 @@ unless Rails.env.test?
# This reports stats per request like HTTP status and timings
Rails.application.middleware.unshift PrometheusExporter::Middleware
PrometheusExporter::Client.default.logger.level = Logger::ERROR
PrometheusExporter::Instrumentation::ActiveRecord.start(
custom_labels: {
type: "puma_single_mode",

View File

@@ -19,6 +19,8 @@ class CreateUnifiedDomainTables < ActiveRecord::Migration[7.2]
sig { void }
def change
up_only { execute "SET DEFAULT_TABLESPACE = mirai" }
reversible do |dir|
dir.up do
execute "CREATE TYPE domain_post_type AS ENUM (#{POST_TYPES.map { |t| "'#{t}'" }.join(", ")})"

View File

@@ -2659,8 +2659,10 @@ CREATE SEQUENCE public.domain_inkbunny_users_id_seq
ALTER SEQUENCE public.domain_inkbunny_users_id_seq OWNED BY public.domain_inkbunny_users.id;
SET default_tablespace = mirai;
--
-- Name: domain_post_files; Type: TABLE; Schema: public; Owner: -
-- Name: domain_post_files; Type: TABLE; Schema: public; Owner: -; Tablespace: mirai
--
CREATE TABLE public.domain_post_files (
@@ -2693,7 +2695,7 @@ ALTER SEQUENCE public.domain_post_files_id_seq OWNED BY public.domain_post_files
--
-- Name: domain_posts; Type: TABLE; Schema: public; Owner: -
-- Name: domain_posts; Type: TABLE; Schema: public; Owner: -; Tablespace: mirai
--
CREATE TABLE public.domain_posts (
@@ -2724,8 +2726,6 @@ CREATE SEQUENCE public.domain_posts_id_seq
ALTER SEQUENCE public.domain_posts_id_seq OWNED BY public.domain_posts.id;
SET default_tablespace = mirai;
--
-- Name: domain_twitter_medias; Type: TABLE; Schema: public; Owner: -; Tablespace: mirai
--
@@ -2855,10 +2855,8 @@ CREATE SEQUENCE public.domain_twitter_users_id_seq
ALTER SEQUENCE public.domain_twitter_users_id_seq OWNED BY public.domain_twitter_users.id;
SET default_tablespace = '';
--
-- Name: domain_user_avatars; Type: TABLE; Schema: public; Owner: -
-- Name: domain_user_avatars; Type: TABLE; Schema: public; Owner: -; Tablespace: mirai
--
CREATE TABLE public.domain_user_avatars (
@@ -2891,7 +2889,7 @@ ALTER SEQUENCE public.domain_user_avatars_id_seq OWNED BY public.domain_user_ava
--
-- Name: domain_user_post_creations; Type: TABLE; Schema: public; Owner: -
-- Name: domain_user_post_creations; Type: TABLE; Schema: public; Owner: -; Tablespace: mirai
--
CREATE TABLE public.domain_user_post_creations (
@@ -2901,7 +2899,7 @@ CREATE TABLE public.domain_user_post_creations (
--
-- Name: domain_user_post_favs; Type: TABLE; Schema: public; Owner: -
-- Name: domain_user_post_favs; Type: TABLE; Schema: public; Owner: -; Tablespace: mirai
--
CREATE TABLE public.domain_user_post_favs (
@@ -2912,7 +2910,7 @@ CREATE TABLE public.domain_user_post_favs (
--
-- Name: domain_user_user_follows; Type: TABLE; Schema: public; Owner: -
-- Name: domain_user_user_follows; Type: TABLE; Schema: public; Owner: -; Tablespace: mirai
--
CREATE TABLE public.domain_user_user_follows (
@@ -2922,7 +2920,7 @@ CREATE TABLE public.domain_user_user_follows (
--
-- Name: domain_users; Type: TABLE; Schema: public; Owner: -
-- Name: domain_users; Type: TABLE; Schema: public; Owner: -; Tablespace: mirai
--
CREATE TABLE public.domain_users (
@@ -2953,6 +2951,8 @@ CREATE SEQUENCE public.domain_users_id_seq
ALTER SEQUENCE public.domain_users_id_seq OWNED BY public.domain_users.id;
SET default_tablespace = '';
--
-- Name: flat_sst_entries; Type: TABLE; Schema: public; Owner: -
--
@@ -5150,8 +5150,10 @@ ALTER TABLE ONLY public.domain_inkbunny_users
ADD CONSTRAINT domain_inkbunny_users_pkey PRIMARY KEY (id);
SET default_tablespace = mirai;
--
-- Name: domain_post_files domain_post_files_pkey; Type: CONSTRAINT; Schema: public; Owner: -
-- Name: domain_post_files domain_post_files_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace: mirai
--
ALTER TABLE ONLY public.domain_post_files
@@ -5159,15 +5161,13 @@ ALTER TABLE ONLY public.domain_post_files
--
-- Name: domain_posts domain_posts_pkey; Type: CONSTRAINT; Schema: public; Owner: -
-- Name: domain_posts domain_posts_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace: mirai
--
ALTER TABLE ONLY public.domain_posts
ADD CONSTRAINT domain_posts_pkey PRIMARY KEY (id);
SET default_tablespace = mirai;
--
-- Name: domain_twitter_tweets domain_twitter_tweets_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace: mirai
--
@@ -5192,10 +5192,8 @@ ALTER TABLE ONLY public.domain_twitter_users
ADD CONSTRAINT domain_twitter_users_pkey PRIMARY KEY (id);
SET default_tablespace = '';
--
-- Name: domain_user_avatars domain_user_avatars_pkey; Type: CONSTRAINT; Schema: public; Owner: -
-- Name: domain_user_avatars domain_user_avatars_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace: mirai
--
ALTER TABLE ONLY public.domain_user_avatars
@@ -5203,13 +5201,15 @@ ALTER TABLE ONLY public.domain_user_avatars
--
-- Name: domain_users domain_users_pkey; Type: CONSTRAINT; Schema: public; Owner: -
-- Name: domain_users domain_users_pkey; Type: CONSTRAINT; Schema: public; Owner: -; Tablespace: mirai
--
ALTER TABLE ONLY public.domain_users
ADD CONSTRAINT domain_users_pkey PRIMARY KEY (id);
SET default_tablespace = '';
--
-- Name: global_states global_states_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
@@ -5350,50 +5350,50 @@ CREATE INDEX domain_fa_users_name_idx ON public.domain_fa_users USING gist (name
CREATE INDEX domain_fa_users_url_name_idx ON public.domain_fa_users USING gist (url_name public.gist_trgm_ops);
SET default_tablespace = '';
--
-- Name: idx_domain_e621_posts_on_e621_id; Type: INDEX; Schema: public; Owner: -
-- Name: idx_domain_e621_posts_on_e621_id; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE UNIQUE INDEX idx_domain_e621_posts_on_e621_id ON public.domain_posts USING btree ((((json_attributes ->> 'e621_id'::text))::integer)) WHERE (type = 'Domain::Post::E621Post'::public.domain_post_type);
--
-- Name: idx_domain_e621_posts_on_uploader_user_id; Type: INDEX; Schema: public; Owner: -
-- Name: idx_domain_e621_posts_on_uploader_user_id; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE UNIQUE INDEX idx_domain_e621_posts_on_uploader_user_id ON public.domain_posts USING btree ((((json_attributes ->> 'uploader_user_id'::text))::integer)) WHERE (type = 'Domain::Post::E621Post'::public.domain_post_type);
--
-- Name: idx_domain_e621_users_on_e621_id; Type: INDEX; Schema: public; Owner: -
-- Name: idx_domain_e621_users_on_e621_id; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE UNIQUE INDEX idx_domain_e621_users_on_e621_id ON public.domain_users USING btree ((((json_attributes ->> 'e621_id'::text))::integer)) WHERE (type = 'Domain::User::E621User'::public.domain_user_type);
--
-- Name: idx_domain_fa_posts_on_fa_id; Type: INDEX; Schema: public; Owner: -
-- Name: idx_domain_fa_posts_on_fa_id; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE UNIQUE INDEX idx_domain_fa_posts_on_fa_id ON public.domain_posts USING btree ((((json_attributes ->> 'fa_id'::text))::integer)) WHERE (type = 'Domain::Post::FaPost'::public.domain_post_type);
--
-- Name: idx_domain_fa_users_on_url_name; Type: INDEX; Schema: public; Owner: -
-- Name: idx_domain_fa_users_on_url_name; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE UNIQUE INDEX idx_domain_fa_users_on_url_name ON public.domain_users USING btree (((json_attributes ->> 'url_name'::text))) WHERE (type = 'Domain::User::FaUser'::public.domain_user_type);
--
-- Name: idx_domain_users_on_migrated_user_favs_at; Type: INDEX; Schema: public; Owner: -
-- Name: idx_domain_users_on_migrated_user_favs_at; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE INDEX idx_domain_users_on_migrated_user_favs_at ON public.domain_users USING btree (((json_attributes ->> 'migrated_user_favs_at'::text)));
SET default_tablespace = '';
--
-- Name: idx_on_good_job_execution_id_685ddb5560; Type: INDEX; Schema: public; Owner: -
--
@@ -6671,29 +6671,29 @@ CREATE UNIQUE INDEX index_domain_inkbunny_users_on_ib_user_id ON public.domain_i
CREATE INDEX index_domain_inkbunny_users_on_shallow_update_log_entry_id ON public.domain_inkbunny_users USING btree (shallow_update_log_entry_id);
SET default_tablespace = mirai;
--
-- Name: index_domain_post_files_on_log_entry_id; Type: INDEX; Schema: public; Owner: -
-- Name: index_domain_post_files_on_log_entry_id; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE INDEX index_domain_post_files_on_log_entry_id ON public.domain_post_files USING btree (log_entry_id);
--
-- Name: index_domain_post_files_on_post_id; Type: INDEX; Schema: public; Owner: -
-- Name: index_domain_post_files_on_post_id; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE INDEX index_domain_post_files_on_post_id ON public.domain_post_files USING btree (post_id);
--
-- Name: index_domain_posts_on_type; Type: INDEX; Schema: public; Owner: -
-- Name: index_domain_posts_on_type; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE INDEX index_domain_posts_on_type ON public.domain_posts USING btree (type);
SET default_tablespace = mirai;
--
-- Name: index_domain_twitter_medias_on_file_id; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
@@ -6750,71 +6750,71 @@ CREATE UNIQUE INDEX index_domain_twitter_users_on_name ON public.domain_twitter_
CREATE UNIQUE INDEX index_domain_twitter_users_on_tw_id ON public.domain_twitter_users USING btree (tw_id);
SET default_tablespace = '';
--
-- Name: index_domain_user_avatars_on_type; Type: INDEX; Schema: public; Owner: -
-- Name: index_domain_user_avatars_on_type; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE INDEX index_domain_user_avatars_on_type ON public.domain_user_avatars USING btree (type);
--
-- Name: index_domain_user_avatars_on_user_id; Type: INDEX; Schema: public; Owner: -
-- Name: index_domain_user_avatars_on_user_id; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE INDEX index_domain_user_avatars_on_user_id ON public.domain_user_avatars USING btree (user_id);
--
-- Name: index_domain_user_post_creations_on_post_id_and_user_id; Type: INDEX; Schema: public; Owner: -
-- Name: index_domain_user_post_creations_on_post_id_and_user_id; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE INDEX index_domain_user_post_creations_on_post_id_and_user_id ON public.domain_user_post_creations USING btree (post_id, user_id);
--
-- Name: index_domain_user_post_creations_on_user_id_and_post_id; Type: INDEX; Schema: public; Owner: -
-- Name: index_domain_user_post_creations_on_user_id_and_post_id; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE UNIQUE INDEX index_domain_user_post_creations_on_user_id_and_post_id ON public.domain_user_post_creations USING btree (user_id, post_id);
--
-- Name: index_domain_user_post_favs_on_post_id_and_user_id; Type: INDEX; Schema: public; Owner: -
-- Name: index_domain_user_post_favs_on_post_id_and_user_id; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE INDEX index_domain_user_post_favs_on_post_id_and_user_id ON public.domain_user_post_favs USING btree (post_id, user_id);
--
-- Name: index_domain_user_post_favs_on_user_id_and_post_id; Type: INDEX; Schema: public; Owner: -
-- Name: index_domain_user_post_favs_on_user_id_and_post_id; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE UNIQUE INDEX index_domain_user_post_favs_on_user_id_and_post_id ON public.domain_user_post_favs USING btree (user_id, post_id);
--
-- Name: index_domain_user_user_follows_on_from_id_and_to_id; Type: INDEX; Schema: public; Owner: -
-- Name: index_domain_user_user_follows_on_from_id_and_to_id; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE UNIQUE INDEX index_domain_user_user_follows_on_from_id_and_to_id ON public.domain_user_user_follows USING btree (from_id, to_id);
--
-- Name: index_domain_user_user_follows_on_to_id_and_from_id; Type: INDEX; Schema: public; Owner: -
-- Name: index_domain_user_user_follows_on_to_id_and_from_id; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE INDEX index_domain_user_user_follows_on_to_id_and_from_id ON public.domain_user_user_follows USING btree (to_id, from_id);
--
-- Name: index_domain_users_on_type; Type: INDEX; Schema: public; Owner: -
-- Name: index_domain_users_on_type; Type: INDEX; Schema: public; Owner: -; Tablespace: mirai
--
CREATE INDEX index_domain_users_on_type ON public.domain_users USING btree (type);
SET default_tablespace = '';
--
-- Name: index_e621_posts_on_scanned_post_favs_at; Type: INDEX; Schema: public; Owner: -
--

View File

@@ -20,8 +20,8 @@ namespace :e621 do
task scan_user_favs: :environment do
while user =
Domain::E621::User
.where_scanned_favs_at("is null")
.where_num_other_favs_cached("< ?", 200)
.where(scanned_favs_at: nil)
.where(num_other_favs_cached: ..200)
.order("RANDOM()")
.take
Domain::E621::Job::ScanUserFavsJob.perform_now(user: user)
@@ -33,11 +33,11 @@ namespace :e621 do
user_query =
lambda do
Domain::E621::User
.where_scanned_favs_status("is null")
.or(Domain::E621::User.where_scanned_favs_status("<> ?", "error"))
.where_scanned_favs_at("is null")
.where_num_other_favs_cached("is not null")
.order_num_other_favs_cached(:desc)
.where(scanned_favs_status: nil)
.or(Domain::E621::User.where.not(scanned_favs_status: "error"))
.where(scanned_favs_at: nil)
.where.not(num_other_favs_cached: nil)
.order(num_other_favs_cached: :desc)
end
while user = user_query.call.first
Domain::E621::Job::ScanUserFavsJob.perform_now(user: user)
@@ -83,11 +83,20 @@ namespace :e621 do
# puts Domain::Post::E621Post.where(e621_id: 5_350_363).explain.inspect
# puts Domain::Post::FaPost.where(fa_id: 52_801_830).explain.inspect
# puts Domain::Fa::Post.where(fa_id: 52_801_830).explain.inspect
puts Domain::Post::FaPost
.select(:fa_id)
.where(fa_id: 52_801_830)
.explain
.inspect
# puts Domain::Post::FaPost
# .select(:fa_id)
# .where(fa_id: 52_801_830)
# .explain
# .inspect
query = Domain::E621::Post.joins(domain_posts: :e621_id)
# "LEFT JOIN domain_posts ON domain_e621_posts.e621_id =
# (domain_posts.json_attributes->>'e621_id')::integer
# AND domain_posts.type = 'Domain::Post::E621Post'",
# ).where("domain_posts.id IS NULL")
puts query.explain.inspect
# puts Domain::Fa::Post
# .joins(
# "

View File

@@ -30,34 +30,34 @@ class Domain::Fa::User
sig { returns(ColorLogger) }
def logger; end
sig { returns(T.nilable(Time)) }
sig { returns(T.nilable(ActiveSupport::TimeWithZone)) }
def scanned_favs_at; end
sig { params(value: T.nilable(Time)).void }
sig { params(value: T.nilable(ActiveSupport::TimeWithZone)).void }
def scanned_favs_at=(value); end
sig { returns(T.nilable(Time)) }
sig { returns(T.nilable(ActiveSupport::TimeWithZone)) }
def scanned_follows_at; end
sig { params(value: T.nilable(Time)).void }
sig { params(value: T.nilable(ActiveSupport::TimeWithZone)).void }
def scanned_follows_at=(value); end
sig { returns(T.nilable(Time)) }
sig { returns(T.nilable(ActiveSupport::TimeWithZone)) }
def scanned_gallery_at; end
sig { params(value: T.nilable(Time)).void }
sig { params(value: T.nilable(ActiveSupport::TimeWithZone)).void }
def scanned_gallery_at=(value); end
sig { returns(T.nilable(Time)) }
sig { returns(T.nilable(ActiveSupport::TimeWithZone)) }
def scanned_incremental_at; end
sig { params(value: T.nilable(Time)).void }
sig { params(value: T.nilable(ActiveSupport::TimeWithZone)).void }
def scanned_incremental_at=(value); end
sig { returns(T.nilable(Time)) }
sig { returns(T.nilable(ActiveSupport::TimeWithZone)) }
def scanned_page_at; end
sig { params(value: T.nilable(Time)).void }
sig { params(value: T.nilable(ActiveSupport::TimeWithZone)).void }
def scanned_page_at=(value); end
sig { returns(String) }

View File

@@ -721,7 +721,7 @@ class Domain::Post::E621Post
def order_last_index_page_id(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
def order_parent_post_id(*args, &blk); end
def order_parent_post_e621_id(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
def order_pools_array(*args, &blk); end
@@ -821,7 +821,7 @@ class Domain::Post::E621Post
def where_last_index_page_id(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
def where_parent_post_id(*args, &blk); end
def where_parent_post_e621_id(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateAssociationRelation) }
def where_pools_array(*args, &blk); end
@@ -1425,49 +1425,49 @@ class Domain::Post::E621Post
def last_index_page_id_will_change!; end
sig { returns(T.nilable(::Integer)) }
def parent_post_id; end
def parent_post_e621_id; end
sig { params(value: T.nilable(::Integer)).returns(T.nilable(::Integer)) }
def parent_post_id=(value); end
def parent_post_e621_id=(value); end
sig { returns(T::Boolean) }
def parent_post_id?; end
def parent_post_e621_id?; end
sig { returns(T.nilable(::Integer)) }
def parent_post_id_before_last_save; end
def parent_post_e621_id_before_last_save; end
sig { returns(T.untyped) }
def parent_post_id_before_type_cast; end
def parent_post_e621_id_before_type_cast; end
sig { returns(T::Boolean) }
def parent_post_id_came_from_user?; end
def parent_post_e621_id_came_from_user?; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def parent_post_id_change; end
def parent_post_e621_id_change; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def parent_post_id_change_to_be_saved; end
def parent_post_e621_id_change_to_be_saved; end
sig { params(from: T.nilable(::Integer), to: T.nilable(::Integer)).returns(T::Boolean) }
def parent_post_id_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
def parent_post_e621_id_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::Integer)) }
def parent_post_id_in_database; end
def parent_post_e621_id_in_database; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def parent_post_id_previous_change; end
def parent_post_e621_id_previous_change; end
sig { params(from: T.nilable(::Integer), to: T.nilable(::Integer)).returns(T::Boolean) }
def parent_post_id_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
def parent_post_e621_id_previously_changed?(from: T.unsafe(nil), to: T.unsafe(nil)); end
sig { returns(T.nilable(::Integer)) }
def parent_post_id_previously_was; end
def parent_post_e621_id_previously_was; end
sig { returns(T.nilable(::Integer)) }
def parent_post_id_was; end
def parent_post_e621_id_was; end
sig { void }
def parent_post_id_will_change!; end
def parent_post_e621_id_will_change!; end
sig { returns(T.untyped) }
def pools_array; end
@@ -1641,7 +1641,7 @@ class Domain::Post::E621Post
def restore_last_index_page_id!; end
sig { void }
def restore_parent_post_id!; end
def restore_parent_post_e621_id!; end
sig { void }
def restore_pools_array!; end
@@ -1752,10 +1752,10 @@ class Domain::Post::E621Post
def saved_change_to_last_index_page_id?; end
sig { returns(T.nilable([T.nilable(::Integer), T.nilable(::Integer)])) }
def saved_change_to_parent_post_id; end
def saved_change_to_parent_post_e621_id; end
sig { returns(T::Boolean) }
def saved_change_to_parent_post_id?; end
def saved_change_to_parent_post_e621_id?; end
sig { returns(T.nilable([T.untyped, T.untyped])) }
def saved_change_to_pools_array; end
@@ -2291,7 +2291,7 @@ class Domain::Post::E621Post
def will_save_change_to_last_index_page_id?; end
sig { returns(T::Boolean) }
def will_save_change_to_parent_post_id?; end
def will_save_change_to_parent_post_e621_id?; end
sig { returns(T::Boolean) }
def will_save_change_to_pools_array?; end
@@ -2446,7 +2446,7 @@ class Domain::Post::E621Post
def order_last_index_page_id(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
def order_parent_post_id(*args, &blk); end
def order_parent_post_e621_id(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
def order_pools_array(*args, &blk); end
@@ -2546,7 +2546,7 @@ class Domain::Post::E621Post
def where_last_index_page_id(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
def where_parent_post_id(*args, &blk); end
def where_parent_post_e621_id(*args, &blk); end
sig { params(args: T.untyped, blk: T.untyped).returns(PrivateRelation) }
def where_pools_array(*args, &blk); end

File diff suppressed because it is too large Load Diff

View File

@@ -24,11 +24,16 @@ module Tapioca::Compilers
)
klass.create_method(
"scanned_#{scan_type}_at",
return_type: "T.nilable(Time)",
return_type: "T.nilable(ActiveSupport::TimeWithZone)",
)
klass.create_method(
"scanned_#{scan_type}_at=",
parameters: [create_param("value", type: "T.nilable(Time)")],
parameters: [
create_param(
"value",
type: "T.nilable(ActiveSupport::TimeWithZone)",
),
],
return_type: "void",
)
end

View File

@@ -1,7 +1,7 @@
# typed: false
FactoryBot.define do
factory :blob_entry do
transient { content { "test content" } }
transient { content { "test content #{SecureRandom.alphanumeric(10)}" } }
content_type { "text/plain" }
sha256 { Digest::SHA256.digest(content) }

View File

@@ -0,0 +1,509 @@
# typed: false
require "rails_helper"
RSpec.describe Domain::MigrateToDomain do
let(:migrator) { described_class.new }
def expect_users_match(old_user, new_user)
expect(new_user).to have_attributes(
e621_id: old_user.e621_user_id,
name: old_user.name,
favs_are_hidden: old_user.favs_are_hidden,
num_other_favs_cached: old_user.num_other_favs_cached,
scanned_favs_status: old_user.scanned_favs_status,
scanned_favs_at: be_within(1.second).of(old_user.scanned_favs_at),
)
end
def expect_fa_users_match(old_user, new_user)
expect(new_user).to have_attributes(
url_name: old_user.url_name,
name: old_user.name,
full_name: old_user.full_name,
artist_type: old_user.artist_type,
mood: old_user.mood,
profile_html: old_user.profile_html,
num_pageviews: old_user.num_pageviews,
num_submissions: old_user.num_submissions,
num_comments_recieved: old_user.num_comments_recieved,
num_comments_given: old_user.num_comments_given,
num_journals: old_user.num_journals,
num_favorites: old_user.num_favorites,
scanned_gallery_at: be_within(1.second).of(old_user.scanned_gallery_at),
scanned_page_at: be_within(1.second).of(old_user.scanned_page_at),
registered_at: be_within(1.second).of(old_user.registered_at),
)
end
def expect_posts_match(old_post, new_post)
expect(new_post).to have_attributes(
state: old_post.state,
e621_id: old_post.e621_id,
scanned_post_favs_at: old_post.scanned_post_favs_at,
rating: old_post.rating,
tags_array: old_post.tags_array,
flags_array: old_post.flags_array,
pools_array: old_post.pools_array,
sources_array: old_post.sources_array,
artists_array: old_post.artists_array,
e621_updated_at: be_within(1.second).of(old_post.e621_updated_at),
last_index_page_id: old_post.last_index_page_id,
caused_by_entry_id: old_post.caused_by_entry_id,
scan_log_entry_id: old_post.scan_log_entry_id,
index_page_ids: old_post.index_page_ids,
prev_md5s: old_post.prev_md5s,
scan_error: old_post.scan_error,
file_error: old_post.file_error,
created_at: be_within(1.second).of(old_post.created_at),
parent_post_e621_id: old_post.parent_e621_id,
)
end
def expect_fa_posts_match(old_post, new_post)
expect(new_post).to have_attributes(
state: old_post.state,
title: old_post.title,
fa_id: old_post.fa_id,
category: old_post.category,
theme: old_post.theme,
species: old_post.species,
gender: old_post.gender,
description: old_post.description,
keywords: old_post.keywords,
num_favorites: old_post.num_favorites,
num_comments: old_post.num_comments,
num_views: old_post.num_views,
posted_at: be_within(1.second).of(old_post.posted_at),
scanned_at: be_within(1.second).of(old_post.scanned_at),
scan_file_error: old_post.scan_file_error,
last_user_page_id: old_post.last_user_page_id,
last_submission_page_id: old_post.last_submission_page_id,
first_browse_page_id: old_post.first_browse_page_id,
first_gallery_page_id: old_post.first_gallery_page_id,
first_seen_entry_id: old_post.first_seen_entry_id,
created_at: be_within(1.second).of(old_post.created_at),
)
if old_post.creator.present?
expect(new_post.creator).to have_attributes(
url_name: old_post.creator.url_name,
)
else
expect(new_post.creator).to be_nil
end
if old_post.file.present?
expect(new_post.file).to have_attributes(
log_entry: old_post.file,
url_str: old_post.file_url_str,
state: old_post.state,
)
else
expect(new_post.file).to be_nil
end
end
describe "#migrate_e621_users" do
let!(:old_user) do
Domain::E621::User.create!(
e621_user_id: 123,
name: "test_user",
favs_are_hidden: true,
num_other_favs_cached: 42,
scanned_favs_status: "ok",
scanned_favs_at: Time.current,
)
end
it "migrates users that don't exist in the new table" do
expect { migrator.migrate_e621_users }.to change(
Domain::User::E621User,
:count,
).by(1)
new_user = Domain::User::E621User.find_by(e621_id: old_user.e621_user_id)
expect_users_match(old_user, new_user)
end
it "skips users that already exist in the new table" do
# Create a user in the new table first
Domain::User::E621User.create!(
e621_id: old_user.e621_user_id,
name: old_user.name,
)
expect { migrator.migrate_e621_users }.not_to change(
Domain::User::E621User,
:count,
)
end
it "handles multiple users in batches" do
# Create a few more old users
additional_users =
2.times.map do |i|
Domain::E621::User.create!(
e621_user_id: 456 + i,
name: "test_user_#{i}",
favs_are_hidden: false,
num_other_favs_cached: i,
scanned_favs_status: "ok",
scanned_favs_at: Time.current,
)
end
expect { migrator.migrate_e621_users }.to change(
Domain::User::E621User,
:count,
).by(3)
expect(Domain::User::E621User.count).to eq(3)
expect(Domain::User::E621User.pluck(:e621_id)).to contain_exactly(
123,
456,
457,
)
# Verify all users were migrated correctly
([old_user] + additional_users).each do |old_user|
new_user =
Domain::User::E621User.find_by(e621_id: old_user.e621_user_id)
expect_users_match(old_user, new_user)
end
end
end
describe "#migrate_e621_posts" do
let!(:old_post) do
Domain::E621::Post.create!(
e621_id: 123,
state: "ok",
rating: "s",
tags_array: {
"general" => %w[tag1 tag2],
},
flags_array: ["flag1"],
pools_array: ["pool1"],
sources_array: ["source1"],
artists_array: ["artist1"],
e621_updated_at: Time.current,
last_index_page_id: 1,
caused_by_entry_id: 2,
scan_log_entry_id: 3,
index_page_ids: [1, 2, 3],
prev_md5s: ["md5_1"],
scan_error: nil,
file_error: nil,
parent_e621_id: nil,
scanned_post_favs_at: Time.current,
)
end
it "migrates posts that don't exist in the new table" do
expect { migrator.migrate_e621_posts }.to change(
Domain::Post::E621Post,
:count,
).by(1)
new_post = Domain::Post::E621Post.find_by(e621_id: old_post.e621_id)
expect_posts_match(old_post, new_post)
end
it "skips posts that already exist in the new table" do
# Create a post in the new table first
Domain::Post::E621Post.create!(
e621_id: old_post.e621_id,
state: "ok",
rating: "q",
)
expect { migrator.migrate_e621_posts }.not_to change(
Domain::Post::E621Post,
:count,
)
end
it "handles multiple posts in batches" do
# Create a few more old posts
additional_posts =
2.times.map do |i|
Domain::E621::Post.create!(
e621_id: 456 + i,
state: "ok",
rating: "q",
tags_array: {
"general" => ["tag#{i}"],
},
flags_array: ["flag#{i}"],
pools_array: ["pool#{i}"],
sources_array: ["source#{i}"],
artists_array: ["artist#{i}"],
e621_updated_at: Time.current,
last_index_page_id: i,
caused_by_entry_id: i + 1,
scan_log_entry_id: i + 2,
index_page_ids: [i],
prev_md5s: ["md5_#{i}"],
scan_error: nil,
file_error: nil,
parent_e621_id: nil,
scanned_post_favs_at: Time.current,
)
end
expect { migrator.migrate_e621_posts }.to change(
Domain::Post::E621Post,
:count,
).by(3)
expect(Domain::Post::E621Post.count).to eq(3)
expect(Domain::Post::E621Post.pluck(:e621_id)).to contain_exactly(
123,
456,
457,
)
# Verify all posts were migrated correctly
([old_post] + additional_posts).each do |old_post|
new_post = Domain::Post::E621Post.find_by(e621_id: old_post.e621_id)
expect_posts_match(old_post, new_post)
end
end
end
describe "#migrate_fa_users" do
let!(:old_user) do
Domain::Fa::User.create!(
url_name: "testuser",
name: "Test_User",
full_name: "Test User Full Name",
artist_type: "artist",
mood: "happy",
profile_html: "<p>Test profile</p>",
num_pageviews: 1000,
num_submissions: 50,
num_comments_recieved: 200,
num_comments_given: 150,
num_journals: 10,
num_favorites: 300,
scanned_gallery_at: Time.current,
scanned_page_at: Time.current,
registered_at: 1.year.ago,
)
end
it "migrates users that don't exist in the new table" do
expect { migrator.migrate_fa_users }.to change(
Domain::User::FaUser,
:count,
).by(1)
new_user = Domain::User::FaUser.find_by(url_name: old_user.url_name)
expect_fa_users_match(old_user, new_user)
end
it "skips users that already exist in the new table" do
# Create a user in the new table first
Domain::User::FaUser.create!(
url_name: old_user.url_name,
name: old_user.name,
)
expect { migrator.migrate_fa_users }.not_to change(
Domain::User::FaUser,
:count,
)
end
it "handles multiple users in batches" do
# Create a few more old users
additional_users =
2.times.map do |i|
Domain::Fa::User.create!(
url_name: "testuser#{i}",
name: "Test_User_#{i}",
full_name: "Test User #{i} Full Name",
artist_type: "artist",
mood: "happy",
profile_html: "<p>Test profile #{i}</p>",
num_pageviews: 1000 + i,
num_submissions: 50 + i,
num_comments_recieved: 200 + i,
num_comments_given: 150 + i,
num_journals: 10 + i,
num_favorites: 300 + i,
scanned_gallery_at: Time.current,
scanned_page_at: Time.current,
registered_at: i.days.ago,
)
end
expect { migrator.migrate_fa_users }.to change(
Domain::User::FaUser,
:count,
).by(3)
expect(Domain::User::FaUser.count).to eq(3)
expect(Domain::User::FaUser.pluck(:url_name)).to contain_exactly(
"testuser",
"testuser0",
"testuser1",
)
# Verify all users were migrated correctly
([old_user] + additional_users).each do |old_user|
new_user = Domain::User::FaUser.find_by(url_name: old_user.url_name)
expect_fa_users_match(old_user, new_user)
end
end
end
describe "#migrate_fa_posts" do
let!(:creator) do
Domain::Fa::User.create!(url_name: "artist1", name: "Artist 1")
end
let!(:new_creator) do
Domain::User::FaUser.create!(url_name: "artist1", name: "Artist 1")
end
let!(:old_post) do
Domain::Fa::Post.create!(
fa_id: 123,
state: "ok",
title: "Test Post",
category: "artwork",
theme: "abstract",
species: "canine",
gender: "male",
description: "Test description",
keywords: %w[test art],
num_favorites: 42,
num_comments: 10,
num_views: 100,
posted_at: Time.current,
scanned_at: Time.current,
scan_file_error: nil,
last_user_page_id: 1,
last_submission_page_id: 2,
first_browse_page_id: 3,
first_gallery_page_id: 4,
first_seen_entry_id: 5,
creator: creator,
file_url_str: "https://example.com/image.jpg",
file: create(:http_log_entry),
)
end
it "migrates posts that don't exist in the new table" do
expect { migrator.migrate_fa_posts }.to change(
Domain::Post::FaPost,
:count,
).by(1)
new_post = Domain::Post::FaPost.find_by(fa_id: old_post.fa_id)
expect_fa_posts_match(old_post, new_post)
end
it "skips posts that already exist in the new table" do
# Create a post in the new table first
Domain::Post::FaPost.create!(fa_id: old_post.fa_id, state: "ok")
expect { migrator.migrate_fa_posts }.not_to change(
Domain::Post::FaPost,
:count,
)
end
it "handles multiple posts in batches" do
# Create a few more old posts
additional_posts =
2.times.map do |i|
Domain::Fa::Post.create!(
fa_id: 456 + i,
state: "ok",
title: "Test Post #{i}",
category: "artwork",
theme: "abstract",
species: "canine",
gender: "male",
description: "Test description #{i}",
keywords: ["test#{i}", "art"],
num_favorites: 42 + i,
num_comments: 10 + i,
num_views: 100 + i,
posted_at: Time.current,
scanned_at: Time.current,
scan_file_error: nil,
last_user_page_id: i + 1,
last_submission_page_id: i + 2,
first_browse_page_id: i + 3,
first_gallery_page_id: i + 4,
first_seen_entry_id: i + 5,
creator: creator,
file_url_str: "https://example.com/image_#{i}.jpg",
file: create(:http_log_entry),
)
end
expect { migrator.migrate_fa_posts }.to change(
Domain::Post::FaPost,
:count,
).by(3)
expect(Domain::Post::FaPost.count).to eq(3)
expect(Domain::Post::FaPost.pluck(:fa_id)).to contain_exactly(
123,
456,
457,
)
# Verify all posts were migrated correctly
([old_post] + additional_posts).each do |old_post|
new_post = Domain::Post::FaPost.find_by(fa_id: old_post.fa_id)
expect_fa_posts_match(old_post, new_post)
end
end
it "handles posts without creators" do
post_without_creator =
Domain::Fa::Post.create!(
fa_id: 789,
state: "ok",
title: "No Creator Post",
category: "artwork",
posted_at: Time.current,
scanned_at: Time.current,
)
expect { migrator.migrate_fa_posts }.to change(
Domain::Post::FaPost,
:count,
).by(2)
new_post = Domain::Post::FaPost.find_by(fa_id: post_without_creator.fa_id)
expect_fa_posts_match(post_without_creator, new_post)
end
it "handles posts without files" do
post_without_file =
Domain::Fa::Post.create!(
fa_id: 789,
state: "ok",
title: "No File Post",
category: "artwork",
posted_at: Time.current,
scanned_at: Time.current,
creator: creator,
)
expect { migrator.migrate_fa_posts }.to change(
Domain::Post::FaPost,
:count,
).by(2)
new_post = Domain::Post::FaPost.find_by(fa_id: post_without_file.fa_id)
expect_fa_posts_match(post_without_file, new_post)
end
end
end

View File

@@ -56,19 +56,26 @@ RSpec.describe Domain::Post::E621Post, type: :model do
post2 = create(:domain_post_e621_post, scan_error: "an error")
post3 = create(:domain_post_e621_post, scan_error: nil)
expect(described_class.where_scan_error("is not null")).to match_array(
[post2],
)
expect(described_class.where_scan_error("is null")).to match_array(
expect(described_class.where.not(scan_error: nil)).to match_array([post2])
expect(described_class.where(scan_error: nil)).to match_array(
[post1, post3],
)
post2.scan_error = nil
post2.save!
expect(described_class.where_scan_error("is null")).to match_array(
expect(described_class.where(scan_error: nil)).to match_array(
[post1, post2, post3],
)
end
it "can be counted with generated scopes" do
create(:domain_post_e621_post)
post2 = create(:domain_post_e621_post)
create(:domain_post_e621_post, parent_post: post2)
expect(described_class.where(parent_post_e621_id: nil).count).to eq(2)
expect(described_class.where(parent_post: nil).count).to eq(2)
end
end
describe "associations" do

View File

@@ -18,7 +18,12 @@ RSpec.describe Domain::User::E621User, type: :model do
it "requires name" do
user.name = nil
expect(user).not_to be_valid
expect(user.errors[:name]).to include("can't be blank")
expect(user.errors[:name][0]).to include("is too short")
end
it "allows a whitespace name" do
user.name = " "
expect(user).to be_valid
end
it "validates scanned_favs_status inclusion" do