Files
redux-scraper/app/helpers/domain/fa/posts_helper.rb
Dylan Knutson 0700adaa55 Enhance PostsHelper and View Logic for Improved Post Metadata Display
- Updated `PostsHelper` to enforce strict typing and added new methods for guessing HTTP log entries related to scanned posts and file downloads.
- Refactored the `post_state_string` method to handle unknown states more gracefully.
- Modified the view template to replace the old scanned and file description logic with links to log entries, providing clearer metadata about post actions.
- Removed deprecated tests related to the old description methods and added new tests for the updated functionality.

These changes improve the clarity and usability of post metadata in the application.
2025-01-20 18:00:08 +00:00

156 lines
4.4 KiB
Ruby

# typed: strict
module Domain::Fa::PostsHelper
extend T::Sig
include ActionView::Helpers::DateHelper
include ActionView::Helpers::SanitizeHelper
include ActionView::Helpers::RenderingHelper
include ActionView::Helpers::TagHelper
sig { params(post: Domain::Fa::Post).returns(String) }
def post_state_string(post)
if post.have_file?
"file"
elsif post.scanned?
"scanned"
else
post.state || "unknown"
end
end
sig do
params(
params:
T.any(ActionController::Parameters, T::Hash[T.untyped, T.untyped]),
).returns(T.nilable(String))
end
def page_str(params)
if (params[:page] || 1).to_i > 1
"(page #{params[:page]})"
else
nil
end
end
sig { params(post: Domain::Fa::Post).returns(T.nilable(HttpLogEntry)) }
def guess_scanned_http_log_entry(post)
HttpLogEntry.find_all_by_uri(
"https://www.furaffinity.net/view/#{post.fa_id}",
).first
end
sig { params(post: Domain::Fa::Post).returns(T.nilable(HttpLogEntry)) }
def guess_file_downloaded_http_log_entry(post)
if (uri = post.file_uri)
HttpLogEntry.find_all_by_uri(uri).first
end
end
sig { params(html: String).returns(String) }
def fa_post_description_sanitized(html)
fa_post_id_to_node = {}
fa_user_url_name_to_node = {}
sanitizer =
Sanitize.new(
elements: %w[br img b i span strong],
attributes: {
"span" => %w[style],
},
css: {
properties: %w[font-size color],
},
transformers: [
Kernel.lambda do |env|
# Only allow and transform FA links
if env[:node_name] == "a"
node = env[:node]
# by default, assume the host is www.furaffinity.net
href = node["href"]&.downcase || ""
href = "//" + href if href.match?(/^(www\.)?furaffinity\.net/)
uri = URI.parse(href)
uri.host ||= "www.furaffinity.net"
path = uri.path
fa_host_matcher = /^(www\.)?furaffinity\.net$/
fa_post_matcher = %r{^/view/(\d+)/?$}
fa_user_matcher = %r{^/user/(\w+)/?$}
if fa_host_matcher.match?(uri.host) && path
if match = path.match(fa_post_matcher)
fa_id = match[1].to_i
fa_post_id_to_node[fa_id] = node
next { node_whitelist: [node] }
elsif match = path.match(fa_user_matcher)
fa_url_name = match[1]
fa_user_url_name_to_node[fa_url_name] = node
next { node_whitelist: [node] }
end
end
# Don't allow any other links
node.replace(node.children)
end
end,
],
)
fragment = Nokogiri::HTML5.fragment(sanitizer.send(:preprocess, html))
sanitizer.node!(fragment)
if fa_post_id_to_node.any?
# Batch load posts and their titles, ensuring fa_post_ids are strings
posts_by_id =
Domain::Fa::Post.where(fa_id: fa_post_id_to_node.keys).index_by(&:fa_id)
# Replace the link text with post titles if available
fa_post_id_to_node.each do |fa_id, node|
if (post = posts_by_id[fa_id])
node.replace(
Nokogiri::HTML5.fragment(
render(
partial: "domain/fa/posts/description_inline_link_fa_post",
locals: {
post: post,
},
),
),
)
else
node.replace(node.children)
end
end
end
if fa_user_url_name_to_node.any?
# Batch load users and their names, ensuring fa_user_url_names are strings
users_by_url_name =
Domain::Fa::User
.where(url_name: fa_user_url_name_to_node.keys)
.includes(:avatar)
.index_by(&:url_name)
# Replace the link text with user names if available
fa_user_url_name_to_node.each do |fa_url_name, node|
if (user = users_by_url_name[fa_url_name])
node.replace(
Nokogiri::HTML5.fragment(
render(
partial: "domain/fa/posts/description_inline_link_fa_user",
locals: {
user: user,
},
),
),
)
else
node.replace(node.children)
end
end
end
raw fragment.to_html(preserve_newline: true)
end
end