api improvements, color logger spec
This commit is contained in:
@@ -3,7 +3,7 @@ class ApplicationController < ActionController::Base
|
||||
|
||||
API_TOKENS = {
|
||||
"a4eb03ac-b33c-439c-9b51-a834d1c5cf48" => "dymk",
|
||||
"56cc81fe-8c00-4436-8981-4580eab00e66" => "targus",
|
||||
"56cc81fe-8c00-4436-8981-4580eab00e66" => "taargus",
|
||||
"a36f0d68-5262-4b62-9e2d-dfe648d70f35" => "vilk",
|
||||
"9c38727f-f11d-41de-b775-0effd86d520c" => "xjal",
|
||||
"e38c568f-a24d-4f26-87f0-dfcd898a359d" => "fyacin",
|
||||
|
||||
@@ -9,7 +9,12 @@ class Domain::Fa::ApiController < ApplicationController
|
||||
jobs_async = GoodJob::Job.
|
||||
select(:id, :queue_name, :serialized_params).
|
||||
where(queue_name: "manual", finished_at: nil).
|
||||
load_async
|
||||
where(
|
||||
[
|
||||
"(serialized_params->'exception_executions' = '{}')",
|
||||
"(serialized_params->'exception_executions' is null)",
|
||||
].join(" OR ")
|
||||
).load_async
|
||||
|
||||
users_async = Domain::Fa::User.
|
||||
where(url_name: url_names).
|
||||
@@ -172,25 +177,28 @@ class Domain::Fa::ApiController < ApplicationController
|
||||
return
|
||||
end
|
||||
|
||||
ReduxApplicationRecord.connection.execute("SET ivfflat.probes = 32")
|
||||
all_similar_users = similar_users_list(user.similar_users_by_followed)
|
||||
|
||||
not_followed_similar_users = nil
|
||||
if exclude_url_name
|
||||
exclude_user = Domain::Fa::User.find_by(url_name: exclude_url_name)
|
||||
not_followed_similar_users = if exclude_user.nil?
|
||||
exclude_folowed_by_user = Domain::Fa::User.find_by(url_name: exclude_url_name)
|
||||
not_followed_similar_users = if exclude_folowed_by_user.nil?
|
||||
# TODO - enqueue a manual UserFollowsJob for this user and have client
|
||||
# re-try the request later
|
||||
{
|
||||
error: "user '#{exclude_url_name}' not found",
|
||||
error_type: "exclude_user_not_found",
|
||||
}
|
||||
elsif exclude_user.scanned_follows_at.nil?
|
||||
elsif exclude_folowed_by_user.scanned_follows_at.nil?
|
||||
{
|
||||
error: "user '#{exclude_url_name}' followers list hasn't been scanned",
|
||||
error_type: "exclude_user_not_scanned",
|
||||
}
|
||||
else
|
||||
similar_users_list(user.similar_users_by_followed(exclude_user))
|
||||
similar_users_list(user.similar_users_by_followed(
|
||||
exclude_followed_by: exclude_folowed_by_user,
|
||||
))
|
||||
end
|
||||
end
|
||||
|
||||
@@ -207,12 +215,19 @@ class Domain::Fa::ApiController < ApplicationController
|
||||
# TODO - go re-scan all user pages and extract user thumbnail
|
||||
profile_thumb_url = nil
|
||||
begin
|
||||
profile_page_response = get_best_user_page_http_log_entry_for(user.url_name)
|
||||
profile_page_response = get_best_user_page_http_log_entry_for(user)
|
||||
if profile_page_response
|
||||
parser = Domain::Fa::Parser::Page.new(profile_page_response.contents, require_logged_in: false)
|
||||
profile_thumb_url = parser.user_page.profile_thumb_url
|
||||
else
|
||||
if user.scanned_page_at.nil?
|
||||
if user.due_for_follows_scan?
|
||||
Domain::Fa::Job::UserFollowsJob.set({
|
||||
priority: -20,
|
||||
}).perform_later({
|
||||
user: user,
|
||||
})
|
||||
end
|
||||
if user.due_for_page_scan?
|
||||
Domain::Fa::Job::UserPageJob.set({
|
||||
priority: -20,
|
||||
}).perform_later({
|
||||
@@ -233,7 +248,7 @@ class Domain::Fa::ApiController < ApplicationController
|
||||
end
|
||||
end
|
||||
|
||||
def get_best_user_page_http_log_entry_for(url_name)
|
||||
def get_best_user_page_http_log_entry_for(user)
|
||||
for_path = proc { |uri_path|
|
||||
HttpLogEntry.where(
|
||||
uri_scheme: "https",
|
||||
@@ -242,9 +257,15 @@ class Domain::Fa::ApiController < ApplicationController
|
||||
).order(created_at: :desc).first&.response
|
||||
}
|
||||
|
||||
for_hle_id = proc { |hle_id|
|
||||
hle_id && HttpLogEntry.find_by(id: hle_id)&.response
|
||||
}
|
||||
|
||||
# older versions don't end in a trailing slash
|
||||
for_path.call("/user/#{url_name}/") ||
|
||||
for_path.call("/user/#{url_name}")
|
||||
hle_id = user.log_entry_detail && user.log_entry_detail["last_user_page_id"]
|
||||
for_hle_id.call(hle_id) ||
|
||||
for_path.call("/user/#{user.url_name}/") ||
|
||||
for_path.call("/user/#{user.url_name}")
|
||||
end
|
||||
|
||||
def defer_post_scan(post, fa_id)
|
||||
|
||||
@@ -1,43 +1,44 @@
|
||||
class ColorLogger
|
||||
@quiet = Concurrent::ThreadLocalVar.new { 0 }
|
||||
|
||||
def self.quiet(&block)
|
||||
def self.quiet
|
||||
@quiet.value += 1
|
||||
block.call
|
||||
yield
|
||||
ensure
|
||||
@quiet.value -= 1
|
||||
end
|
||||
|
||||
def self.make(sink, instance)
|
||||
def self.unquiet
|
||||
saved = @quiet.value
|
||||
@quiet.value = 0
|
||||
yield
|
||||
ensure
|
||||
@quiet.value = saved
|
||||
end
|
||||
|
||||
def self.make(sink, instance = nil)
|
||||
# clean up common class names
|
||||
klass_name = instance.class.name.dup
|
||||
klass_name.delete_prefix!("Domain::")
|
||||
if klass_name.start_with?("Fa::Scraper::")
|
||||
klass_name.delete_prefix!("Fa::Scraper::")
|
||||
klass_name = "Fa::#{klass_name}"
|
||||
if instance
|
||||
klass_name = klass_name_from_instance(instance)
|
||||
else
|
||||
klass_name = "(Anonymous)"
|
||||
end
|
||||
|
||||
if klass_name.start_with?("Fa::Job::")
|
||||
klass_name.delete_prefix!("Fa::Job::")
|
||||
klass_name.delete_suffix!("Job")
|
||||
klass_name = "Fa::#{klass_name}"
|
||||
end
|
||||
|
||||
if klass_name.start_with?("Twitter::Job::")
|
||||
klass_name.delete_prefix!("Twitter::Job::")
|
||||
klass_name.delete_suffix!("Job")
|
||||
klass_name = "Twitter::#{klass_name}"
|
||||
end
|
||||
|
||||
Logger.new(sink).tap do |logger|
|
||||
logger = Logger.new(sink).tap do |logger|
|
||||
# logger.level = Logger::ERROR if Rails.env.test?
|
||||
|
||||
logger.instance_variable_set("@logger_prefix", "")
|
||||
logger.instance_variable_set("@sink", sink)
|
||||
|
||||
def logger.prefix=(p)
|
||||
@logger_prefix = p
|
||||
end
|
||||
def logger.prefix
|
||||
@logger_prefix || ""
|
||||
end
|
||||
def logger.sink
|
||||
@sink
|
||||
end
|
||||
|
||||
logger.formatter = proc do |severity, datetime, progname, msg|
|
||||
color = case severity
|
||||
@@ -59,5 +60,30 @@ class ColorLogger
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
ActiveSupport::TaggedLogging.new(logger)
|
||||
end
|
||||
|
||||
def self.klass_name_from_instance(instance)
|
||||
klass_name = instance.class.name.dup
|
||||
klass_name.delete_prefix!("Domain::")
|
||||
if klass_name.start_with?("Fa::Scraper::")
|
||||
klass_name.delete_prefix!("Fa::Scraper::")
|
||||
klass_name = "Fa::#{klass_name}"
|
||||
end
|
||||
|
||||
if klass_name.start_with?("Fa::Job::")
|
||||
klass_name.delete_prefix!("Fa::Job::")
|
||||
klass_name.delete_suffix!("Job")
|
||||
klass_name = "Fa::#{klass_name}"
|
||||
end
|
||||
|
||||
if klass_name.start_with?("Twitter::Job::")
|
||||
klass_name.delete_prefix!("Twitter::Job::")
|
||||
klass_name.delete_suffix!("Job")
|
||||
klass_name = "Twitter::#{klass_name}"
|
||||
end
|
||||
|
||||
klass_name
|
||||
end
|
||||
end
|
||||
|
||||
@@ -25,8 +25,8 @@ class Domain::Fa::FactorCalculator
|
||||
|
||||
def write_factors
|
||||
total = 0
|
||||
for_followed_width = Domain::Fa::UserFactor.columns_hash["for_followed"].sql_type_metadata.limit
|
||||
for_follower_width = Domain::Fa::UserFactor.columns_hash["for_follower"].sql_type_metadata.limit
|
||||
for_followed_width = Domain::Fa::UserFactor.native_factor_width("for_followed")
|
||||
for_follower_width = Domain::Fa::UserFactor.native_factor_width("for_follower")
|
||||
|
||||
measure("#{"for_followed".bold} - done") do
|
||||
write_factors_col(:item_ids, :item_factors, :for_followed)
|
||||
|
||||
@@ -1,11 +1,31 @@
|
||||
require "active_support/concern"
|
||||
|
||||
module HasColorLogger
|
||||
extend ActiveSupport::Concern
|
||||
def self.[](sink)
|
||||
Module.new do
|
||||
extend ActiveSupport::Concern
|
||||
|
||||
included do
|
||||
def logger
|
||||
@logger ||= ColorLogger.make($stdout, self)
|
||||
@__color_logger_sink = sink
|
||||
def self.name
|
||||
"HasColorLogger[#{@__color_logger_sink.inspect}]"
|
||||
end
|
||||
|
||||
included do
|
||||
@__color_logger_sink = sink
|
||||
|
||||
def logger
|
||||
@logger ||= ColorLogger.make(
|
||||
self.class.instance_variable_get("@__color_logger_sink"),
|
||||
self
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# by default, write to stdout
|
||||
extend ActiveSupport::Concern
|
||||
included do
|
||||
include HasColorLogger[$stdout]
|
||||
end
|
||||
end
|
||||
|
||||
@@ -90,30 +90,45 @@ class Domain::Fa::User < ReduxApplicationRecord
|
||||
name.delete("_").delete("!").downcase
|
||||
end
|
||||
|
||||
# users similar to this one, based on who this user follows
|
||||
def similar_users_by_follower
|
||||
similar_users_by(:for_follower, nil)
|
||||
# TODO - write method for getting suggested users to follow
|
||||
# based on this user
|
||||
# something like:
|
||||
# UserFactor.nearest_neighbors(
|
||||
# :for_followed,
|
||||
# self.disco.for_follows,
|
||||
# # should this be euclidean? idk, need to test
|
||||
# distance: "inner_product"
|
||||
# )
|
||||
# exclude self.follows.pluck(:followed_id)
|
||||
|
||||
# find users similar to 'self' based on who 'self' follows
|
||||
def similar_users_by_follower(exclude_followed_by: nil)
|
||||
similar_users_by(:for_follower, exclude_followed_by)
|
||||
end
|
||||
|
||||
# users similar to this one, based on who follows this user
|
||||
def similar_users_by_followed(exclude_already_followed = nil)
|
||||
similar_users_by(:for_followed, exclude_already_followed)
|
||||
# find users similar to 'self one based on who follows 'self'
|
||||
def similar_users_by_followed(exclude_followed_by: nil)
|
||||
similar_users_by(:for_followed, exclude_followed_by)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def similar_users_by(factor_col, exclude_already_followed)
|
||||
def similar_users_by(factor_col, exclude_followed_by)
|
||||
query = self.
|
||||
disco.
|
||||
nearest_neighbors(factor_col, distance: "euclidean")
|
||||
|
||||
if exclude_already_followed
|
||||
query = query.where.not(user_id: exclude_already_followed.follows.select(:followed_id))
|
||||
if exclude_followed_by
|
||||
query = query.where.not(user_id: exclude_followed_by.follows.select(:followed_id))
|
||||
end
|
||||
|
||||
users_from_disco_query(query)
|
||||
end
|
||||
|
||||
def users_from_disco_query(disco_query)
|
||||
Domain::Fa::User.
|
||||
select("domain_fa_users.*", query.select_values.last).
|
||||
select("domain_fa_users.*", disco_query.select_values.last).
|
||||
joins(:disco).
|
||||
merge(query.reselect(:user_id))
|
||||
merge(disco_query.reselect(:user_id))
|
||||
end
|
||||
end
|
||||
|
||||
@@ -3,7 +3,11 @@ class Domain::Fa::UserFactor < ReduxApplicationRecord
|
||||
|
||||
belongs_to :user, class_name: "::Domain::Fa::User"
|
||||
|
||||
FACTORS_WIDTHS = 8
|
||||
FACTORS_WIDTHS = 16
|
||||
has_neighbors :for_follower
|
||||
has_neighbors :for_followed
|
||||
|
||||
def self.native_factor_width(column)
|
||||
self.columns_hash[column.to_s].sql_type_metadata.limit
|
||||
end
|
||||
end
|
||||
|
||||
78
spec/lib/color_logger_spec.rb
Normal file
78
spec/lib/color_logger_spec.rb
Normal file
@@ -0,0 +1,78 @@
|
||||
require "rails_helper"
|
||||
|
||||
describe ColorLogger do
|
||||
let(:sink) { StringIO.new }
|
||||
let(:logger) { ColorLogger.make(sink) }
|
||||
let(:prefix) { "[#{"(Anonymous)".light_blue}] " }
|
||||
let(:read_sink) {
|
||||
proc {
|
||||
sink.rewind
|
||||
sink_contents = sink.read
|
||||
sink.rewind
|
||||
sink.truncate(0)
|
||||
sink_contents
|
||||
}
|
||||
}
|
||||
|
||||
it "logs contents with color", quiet: false do
|
||||
logger.info("foo")
|
||||
expect(read_sink.call).to eq("#{prefix} foo\n")
|
||||
|
||||
line1 = "bar!"
|
||||
logger.info(line1)
|
||||
line2 = "#{"yes".red}, #{"no".blue}"
|
||||
logger.info(line2)
|
||||
|
||||
expect(read_sink.call).to eq(
|
||||
[
|
||||
"#{prefix} #{line1}\n",
|
||||
"#{prefix} #{line2}\n",
|
||||
].join("")
|
||||
)
|
||||
end
|
||||
|
||||
it "respects the 'quiet' wrapper", quiet: false do
|
||||
ColorLogger.quiet do
|
||||
logger.info("don't log me")
|
||||
end
|
||||
logger.info("but do log this")
|
||||
expect(read_sink.call).to eq("#{prefix} but do log this\n")
|
||||
end
|
||||
|
||||
it "by default, rspec logs are quiet" do
|
||||
logger.info("don't log me")
|
||||
expect(read_sink.call).to eq("")
|
||||
end
|
||||
|
||||
it "uses stdout by default when included" do
|
||||
inst = Class.new do
|
||||
def self.name
|
||||
"TestClass"
|
||||
end
|
||||
include HasColorLogger
|
||||
end.new
|
||||
expect(inst.logger.sink).to be($stdout)
|
||||
end
|
||||
|
||||
it "can have other sink injected when including HasColorLogger" do
|
||||
s = sink
|
||||
inst = Class.new do
|
||||
def self.name
|
||||
"TestClass"
|
||||
end
|
||||
include HasColorLogger[s]
|
||||
end.new
|
||||
expect(inst.logger.sink).to be(sink)
|
||||
|
||||
# quiet by default
|
||||
inst.logger.info("don't log")
|
||||
expect(read_sink.call).to eq("")
|
||||
|
||||
# unquiet works
|
||||
ColorLogger.unquiet do
|
||||
inst.logger.info("foo bar")
|
||||
# right class name is used
|
||||
expect(read_sink.call).to eq("[#{"TestClass".light_blue}] foo bar\n")
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -24,7 +24,7 @@ describe Domain::Fa::PostEnqueuer do
|
||||
)
|
||||
end
|
||||
|
||||
it "works" do
|
||||
it "enqueues posts" do
|
||||
post_fa_ids = posts.map(&:fa_id)
|
||||
|
||||
enqueuer.run_once
|
||||
@@ -55,11 +55,11 @@ describe Domain::Fa::PostEnqueuer do
|
||||
|
||||
# post [1] should be filtered out
|
||||
enqueuer.run_once
|
||||
expect(enqueued_fa_ids.call).to eq([
|
||||
post_fa_ids[1],
|
||||
expect(enqueued_fa_ids.call[0]).to eq(post_fa_ids[1])
|
||||
expect(enqueued_fa_ids.call[1..].shuffle).to contain_exactly(
|
||||
post_fa_ids[0],
|
||||
post_fa_ids[2],
|
||||
post_fa_ids[3],
|
||||
])
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
// @grant unsafeWindow
|
||||
// @connect scraper.local
|
||||
// ==/UserScript==
|
||||
'use strict';
|
||||
"use strict";
|
||||
|
||||
function fa() {
|
||||
function setupNavbar() {
|
||||
@@ -34,7 +34,7 @@ function fa() {
|
||||
|
||||
function makeLargeStatusNode(opts = {}) {
|
||||
if (opts.type == null) {
|
||||
opts.type = 'span'
|
||||
opts.type = "span";
|
||||
}
|
||||
if (opts.smaller == null) {
|
||||
opts.smaller = true;
|
||||
@@ -44,7 +44,8 @@ function fa() {
|
||||
}
|
||||
|
||||
const statusNode = document.createElement(opts.type);
|
||||
statusNode.style.cssText = "margin-left: 5px; color: #b7b7b7!important; display: inline";
|
||||
statusNode.style.cssText =
|
||||
"margin-left: 5px; color: #b7b7b7!important; display: inline";
|
||||
if (opts.smaller) {
|
||||
statusNode.style.fontSize = "80%";
|
||||
}
|
||||
@@ -83,14 +84,13 @@ function fa() {
|
||||
figure.querySelector("b").style.height = "auto";
|
||||
statusNode.style.cssText = "position:relative;bottom:2px;";
|
||||
captionLabel.after(statusNode);
|
||||
}
|
||||
else if (caption) {
|
||||
} else if (caption) {
|
||||
statusNode.style.cssText = "position:relative;bottom:2px;";
|
||||
caption.appendChild(statusNode);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
const figcaption = document.createElement("div");
|
||||
figcaption.style.cssText = "display: block !important; position: absolute; bottom: 0px; font-size: 10px; width:100%";
|
||||
figcaption.style.cssText =
|
||||
"display: block !important; position: absolute; bottom: 0px; font-size: 10px; width:100%";
|
||||
figcaption.appendChild(statusNode);
|
||||
figure.appendChild(figcaption);
|
||||
figure.style.height = `calc(${figure.style.height} + 20px)`;
|
||||
@@ -99,168 +99,192 @@ function fa() {
|
||||
return { faId, statusNode };
|
||||
});
|
||||
|
||||
const featuredElem = [...document.querySelectorAll(".userpage-featured-title h2")]
|
||||
.map(elem => {
|
||||
const featuredElem = [
|
||||
...document.querySelectorAll(".userpage-featured-title h2"),
|
||||
]
|
||||
.map((elem) => {
|
||||
// skip if it's a dynamically changing preview, `submissionDataElems` handles those
|
||||
if (elem.classList.contains("preview_title")) { return null; }
|
||||
if (elem.classList.contains("preview_title")) {
|
||||
return null;
|
||||
}
|
||||
const link = elem.querySelector("a");
|
||||
if (!link) { return null; }
|
||||
if (!link) {
|
||||
return null;
|
||||
}
|
||||
const faId = faIdFromViewHref(link.href);
|
||||
if (!faId) { return null; }
|
||||
if (!faId) {
|
||||
return null;
|
||||
}
|
||||
const statusNode = makeLargeStatusNode();
|
||||
elem.appendChild(statusNode);
|
||||
|
||||
return {
|
||||
faId,
|
||||
statusNode
|
||||
statusNode,
|
||||
};
|
||||
}).filter(isNotNull);
|
||||
})
|
||||
.filter(isNotNull);
|
||||
|
||||
let submissionDataElems = []
|
||||
let submissionDataElems = [];
|
||||
if (unsafeWindow.submission_data) {
|
||||
submissionDataElems = Object.entries(unsafeWindow.submission_data)
|
||||
.map(([fa_id, _]) => {
|
||||
const statusNode = makeLargeStatusNode()
|
||||
submissionDataElems = Object.entries(unsafeWindow.submission_data).map(
|
||||
([fa_id, _]) => {
|
||||
const statusNode = makeLargeStatusNode();
|
||||
return {
|
||||
faId: fa_id,
|
||||
statusNode
|
||||
}
|
||||
});
|
||||
statusNode,
|
||||
};
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// watch for changes to preview user and swap out status node when that happens
|
||||
[...document.querySelectorAll("h2.preview_title")].forEach((previewTitleNode) => {
|
||||
const swapOutNode = document.createElement("span");
|
||||
swapOutNode.classList = "swapper";
|
||||
previewTitleNode.appendChild(swapOutNode);
|
||||
const previewTitleLink = previewTitleNode.querySelector("a");
|
||||
[...document.querySelectorAll("h2.preview_title")].forEach(
|
||||
(previewTitleNode) => {
|
||||
const swapOutNode = document.createElement("span");
|
||||
swapOutNode.classList = "swapper";
|
||||
previewTitleNode.appendChild(swapOutNode);
|
||||
const previewTitleLink = previewTitleNode.querySelector("a");
|
||||
|
||||
const observerCb = () => {
|
||||
const previewFaId = faIdFromViewHref(previewTitleLink.href);
|
||||
swapOutNode.innerHTML = "";
|
||||
const currentSubmissionElem = submissionDataElems.find(({ faId }) => faId == previewFaId)
|
||||
if (currentSubmissionElem) {
|
||||
swapOutNode.appendChild(currentSubmissionElem.statusNode);
|
||||
}
|
||||
};
|
||||
const observer = new MutationObserver(observerCb);
|
||||
observer.observe(previewTitleLink, { childList: true, subtree: true });
|
||||
observerCb();
|
||||
});
|
||||
const observerCb = () => {
|
||||
const previewFaId = faIdFromViewHref(previewTitleLink.href);
|
||||
swapOutNode.innerHTML = "";
|
||||
const currentSubmissionElem = submissionDataElems.find(
|
||||
({ faId }) => faId == previewFaId
|
||||
);
|
||||
if (currentSubmissionElem) {
|
||||
swapOutNode.appendChild(currentSubmissionElem.statusNode);
|
||||
}
|
||||
};
|
||||
const observer = new MutationObserver(observerCb);
|
||||
observer.observe(previewTitleLink, { childList: true, subtree: true });
|
||||
observerCb();
|
||||
}
|
||||
);
|
||||
|
||||
// /view/<faId>/ page elements
|
||||
let primaryViewPageElems = []
|
||||
let primaryViewPageElems = [];
|
||||
let faId = faIdFromViewHref(window.location.href);
|
||||
if (faId) {
|
||||
primaryViewPageElems = [...document.querySelectorAll(".submission-title h2")]
|
||||
.map(elem => {
|
||||
elem.querySelector("p").style.display = "inline";
|
||||
const statusNode = makeLargeStatusNode({ type: "h3" })
|
||||
elem.appendChild(statusNode);
|
||||
return {
|
||||
faId,
|
||||
statusNode
|
||||
}
|
||||
});
|
||||
primaryViewPageElems = [
|
||||
...document.querySelectorAll(".submission-title h2"),
|
||||
].map((elem) => {
|
||||
elem.querySelector("p").style.display = "inline";
|
||||
const statusNode = makeLargeStatusNode({ type: "h3" });
|
||||
elem.appendChild(statusNode);
|
||||
return {
|
||||
faId,
|
||||
statusNode,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
return [...galleryFigures, ...submissionDataElems, ...featuredElem, ...primaryViewPageElems];
|
||||
return [
|
||||
...galleryFigures,
|
||||
...submissionDataElems,
|
||||
...featuredElem,
|
||||
...primaryViewPageElems,
|
||||
];
|
||||
}
|
||||
|
||||
function gatherUserElements() {
|
||||
// if on a gallery / browse page, the creator links from those posts
|
||||
const userSubmissionLinks = galleryFigureElements().map((figure) => {
|
||||
const userLinkElem = [...figure.querySelectorAll("figcaption a")]
|
||||
.map(elem => ({
|
||||
elem: elem,
|
||||
urlName: urlNameFromUserHref(elem.href)
|
||||
}))
|
||||
.filter(({ elem, urlName }) => urlName != null)[0];
|
||||
const userSubmissionLinks = galleryFigureElements()
|
||||
.map((figure) => {
|
||||
const userLinkElem = [...figure.querySelectorAll("figcaption a")]
|
||||
.map((elem) => ({
|
||||
elem: elem,
|
||||
urlName: urlNameFromUserHref(elem.href),
|
||||
}))
|
||||
.filter(({ elem, urlName }) => urlName != null)[0];
|
||||
|
||||
if (userLinkElem == null) { return null; }
|
||||
figure.querySelector("u").style.cssText = "display: block";
|
||||
if (userLinkElem == null) {
|
||||
return null;
|
||||
}
|
||||
figure.querySelector("u").style.cssText = "display: block";
|
||||
|
||||
const statusNode = document.createElement("span");;
|
||||
statusNode.style.cssText = "margin-left: 2px; color: #c1c1c1";
|
||||
statusNode.innerHTML = "(...)";
|
||||
userLinkElem.elem.after(statusNode);
|
||||
const statusNode = document.createElement("span");
|
||||
statusNode.style.cssText = "margin-left: 2px; color: #c1c1c1";
|
||||
statusNode.innerHTML = "(...)";
|
||||
userLinkElem.elem.after(statusNode);
|
||||
|
||||
return {
|
||||
urlName: userLinkElem.urlName,
|
||||
shouldEnqueue: true,
|
||||
statusNode,
|
||||
}
|
||||
}).filter(isNotNull);
|
||||
return {
|
||||
urlName: userLinkElem.urlName,
|
||||
shouldEnqueue: true,
|
||||
statusNode,
|
||||
};
|
||||
})
|
||||
.filter(isNotNull);
|
||||
|
||||
// if on a /user/ page, the primary username element
|
||||
const userPageUrlName = urlNameFromUserHref(window.location.href);
|
||||
let userPageMain = userPageUrlName ? [
|
||||
...document.querySelectorAll("h1 username"),
|
||||
].map(elem => {
|
||||
const statusNode = document.createElement("span");
|
||||
statusNode.innerHTML = "(...)";
|
||||
elem.after(statusNode);
|
||||
return {
|
||||
urlName: userPageUrlName,
|
||||
shouldEnqueue: true,
|
||||
statusNode,
|
||||
}
|
||||
}) : [];
|
||||
let userPageMain = userPageUrlName
|
||||
? [...document.querySelectorAll("h1 username")].map((elem) => {
|
||||
const statusNode = document.createElement("span");
|
||||
statusNode.innerHTML = "(...)";
|
||||
elem.after(statusNode);
|
||||
return {
|
||||
urlName: userPageUrlName,
|
||||
shouldEnqueue: true,
|
||||
statusNode,
|
||||
};
|
||||
})
|
||||
: [];
|
||||
|
||||
// comments made by users on posts or user pages
|
||||
let userComments = [
|
||||
...document.querySelectorAll("comment-username a"),
|
||||
].map(elem => {
|
||||
let userComments = [...document.querySelectorAll("comment-username a")].map(
|
||||
(elem) => {
|
||||
const statusNode = makeLargeStatusNode();
|
||||
elem.after(statusNode);
|
||||
return {
|
||||
urlName: urlNameFromUserHref(elem.href),
|
||||
shouldEnqueue: true,
|
||||
statusNode,
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
// users mentioned in post descriptions or user bios
|
||||
let iconUsernames = [
|
||||
...document.querySelectorAll("a.iconusername, a.linkusername"),
|
||||
].map((elem) => {
|
||||
const statusNode = makeLargeStatusNode();
|
||||
elem.after(statusNode);
|
||||
return {
|
||||
urlName: urlNameFromUserHref(elem.href),
|
||||
shouldEnqueue: false,
|
||||
statusNode,
|
||||
}
|
||||
});
|
||||
|
||||
// users mentioned in post descriptions or user bios
|
||||
let iconUsernames = [
|
||||
...document.querySelectorAll("a.iconusername, a.linkusername")
|
||||
].map(elem => {
|
||||
const statusNode = makeLargeStatusNode()
|
||||
elem.after(statusNode);
|
||||
return {
|
||||
urlName: urlNameFromUserHref(elem.href),
|
||||
shouldEnqueue: true,
|
||||
statusNode,
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
// watcher / watching lists on user page
|
||||
let watchersAndWatchList = [
|
||||
...document.querySelectorAll("a > span.artist_name")
|
||||
].map(elem => {
|
||||
...document.querySelectorAll("a > span.artist_name"),
|
||||
].map((elem) => {
|
||||
const link = elem.parentNode;
|
||||
const statusNode = makeLargeStatusNode();
|
||||
link.after(statusNode);
|
||||
return {
|
||||
urlName: urlNameFromUserHref(link.href),
|
||||
shouldEnqueue: false,
|
||||
shouldEnqueue: true,
|
||||
statusNode,
|
||||
};
|
||||
})
|
||||
|
||||
});
|
||||
|
||||
// users mentioned in the "hero" sections on a url page
|
||||
let submissionDataElems = []
|
||||
let submissionDataElems = [];
|
||||
|
||||
if (unsafeWindow.submission_data) {
|
||||
submissionDataElems = Object.entries(unsafeWindow.submission_data)
|
||||
.map(([_, { lower }]) => {
|
||||
submissionDataElems = Object.entries(unsafeWindow.submission_data).map(
|
||||
([_, { lower }]) => {
|
||||
const statusNode = makeLargeStatusNode();
|
||||
return {
|
||||
urlName: lower,
|
||||
shouldEnqueue: true,
|
||||
statusNode
|
||||
}
|
||||
});
|
||||
statusNode,
|
||||
};
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// watch for changes to preview user and swap out status node when that happens
|
||||
@@ -273,7 +297,9 @@ function fa() {
|
||||
const observerCb = () => {
|
||||
const previewUrlName = urlNameFromUserHref(previewUser.href);
|
||||
swapOutNode.innerHTML = "";
|
||||
const currentSubmissionElem = submissionDataElems.find(({ urlName }) => urlName == previewUrlName)
|
||||
const currentSubmissionElem = submissionDataElems.find(
|
||||
({ urlName }) => urlName == previewUrlName
|
||||
);
|
||||
if (currentSubmissionElem) {
|
||||
swapOutNode.appendChild(currentSubmissionElem.statusNode);
|
||||
}
|
||||
@@ -284,34 +310,37 @@ function fa() {
|
||||
}
|
||||
|
||||
// on a /view/ page, the name of the user
|
||||
const submissionContainerUserLinks = [...document.querySelectorAll(".submission-id-sub-container a")]
|
||||
.map(elem => {
|
||||
const statusNode = makeLargeStatusNode({ smaller: false });
|
||||
elem.after(statusNode);
|
||||
return {
|
||||
urlName: urlNameFromUserHref(elem.href),
|
||||
shouldEnqueue: true,
|
||||
statusNode
|
||||
};
|
||||
});
|
||||
const submissionContainerUserLinks = [
|
||||
...document.querySelectorAll(".submission-id-sub-container a"),
|
||||
].map((elem) => {
|
||||
const statusNode = makeLargeStatusNode({ smaller: false });
|
||||
elem.after(statusNode);
|
||||
return {
|
||||
urlName: urlNameFromUserHref(elem.href),
|
||||
shouldEnqueue: true,
|
||||
statusNode,
|
||||
};
|
||||
});
|
||||
|
||||
// on a /watchlist/by/ or /watchlist/to page
|
||||
const watchListUserLinks = [...document.querySelectorAll(".watch-list-items.watch-row a")]
|
||||
.map(elem => {
|
||||
const statusNode = makeLargeStatusNode({
|
||||
smaller: false, style: {
|
||||
display: "block",
|
||||
'margin-bottom': "5px",
|
||||
'font-size': "50%",
|
||||
}
|
||||
});
|
||||
elem.parentNode.appendChild(statusNode);
|
||||
return {
|
||||
urlName: urlNameFromUserHref(elem.href),
|
||||
shouldEnqueue: true,
|
||||
statusNode
|
||||
};
|
||||
const watchListUserLinks = [
|
||||
...document.querySelectorAll(".watch-list-items.watch-row a"),
|
||||
].map((elem) => {
|
||||
const statusNode = makeLargeStatusNode({
|
||||
smaller: false,
|
||||
style: {
|
||||
display: "block",
|
||||
"margin-bottom": "5px",
|
||||
"font-size": "50%",
|
||||
},
|
||||
});
|
||||
elem.parentNode.appendChild(statusNode);
|
||||
return {
|
||||
urlName: urlNameFromUserHref(elem.href),
|
||||
shouldEnqueue: true,
|
||||
statusNode,
|
||||
};
|
||||
});
|
||||
|
||||
return [
|
||||
...userSubmissionLinks,
|
||||
@@ -337,14 +366,16 @@ function fa() {
|
||||
|
||||
const userPageRegex = /^\/(user|gallery|scraps|favorites|journals)\/.+/;
|
||||
const match = url.pathname.match(userPageRegex);
|
||||
if (!match) { return null; }
|
||||
if (!match) {
|
||||
return null;
|
||||
}
|
||||
return url.pathname.split("/")[2];
|
||||
}
|
||||
|
||||
function faIdFromViewHref(href) {
|
||||
const viewPageRegex = /\/(view|full)\/(\d+)/;
|
||||
const match = href.match(viewPageRegex);
|
||||
const faId = match && match[2] || null;
|
||||
const faId = (match && match[2]) || null;
|
||||
if (faId) {
|
||||
return parseInt(faId);
|
||||
}
|
||||
@@ -356,8 +387,8 @@ function fa() {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (styleOpts['border-collapse'] == null) {
|
||||
styleOpts['border-collapse'] = "collapse";
|
||||
if (styleOpts["border-collapse"] == null) {
|
||||
styleOpts["border-collapse"] = "collapse";
|
||||
}
|
||||
|
||||
let table = document.createElement("table");
|
||||
@@ -371,8 +402,7 @@ function fa() {
|
||||
stats.each(({ name, value, sep, nameAlign, valueAlign }) => {
|
||||
if (name == "" && value == "") {
|
||||
tbody.innerHTML += `<tr><td>---</td><td>---</td></tr>`;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
if (sep == null) {
|
||||
sep = ":";
|
||||
}
|
||||
@@ -398,11 +428,11 @@ function fa() {
|
||||
case 1:
|
||||
case 2:
|
||||
case 3:
|
||||
return { 'line-height': '1.0em', 'font-size': "1.0em" };
|
||||
return { "line-height": "1.0em", "font-size": "1.0em" };
|
||||
case 4:
|
||||
return { 'line-height': '0.9em', 'font-size': "0.8em" };
|
||||
return { "line-height": "0.9em", "font-size": "0.8em" };
|
||||
default:
|
||||
return { 'line-height': "0.9em", 'font-size': "0.6em" }
|
||||
return { "line-height": "0.9em", "font-size": "0.6em" };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -421,20 +451,18 @@ function fa() {
|
||||
navbarPageStatsNode,
|
||||
navbarEnqueueNode,
|
||||
navbarLiveQueueNode,
|
||||
navbarLiveEntityNode
|
||||
].forEach(node => {
|
||||
navbarLiveEntityNode,
|
||||
].forEach((node) => {
|
||||
node.style.display = "flex";
|
||||
node.style.marginRight = "5px";
|
||||
});
|
||||
|
||||
[
|
||||
navbarPageStatsNode,
|
||||
navbarEnqueueNode,
|
||||
navbarLiveQueueNode
|
||||
].forEach(node => {
|
||||
node.style.paddingRight = "5px";
|
||||
node.style.borderRight = "1px solid #d7d7d7";
|
||||
});
|
||||
[navbarPageStatsNode, navbarEnqueueNode, navbarLiveQueueNode].forEach(
|
||||
(node) => {
|
||||
node.style.paddingRight = "5px";
|
||||
node.style.borderRight = "1px solid #d7d7d7";
|
||||
}
|
||||
);
|
||||
|
||||
navbarNode.append(navbarPageStatsNode);
|
||||
navbarNode.append(navbarEnqueueNode);
|
||||
@@ -443,18 +471,18 @@ function fa() {
|
||||
|
||||
const userElements = gatherUserElements();
|
||||
const urlNames = [...new Set(userElements.map(({ urlName }) => urlName))];
|
||||
const urlNamesToEnqueue = [...new Set(userElements
|
||||
.filter(({ shouldEnqueue }) => shouldEnqueue)
|
||||
.map(({ urlName }) => urlName))
|
||||
const urlNamesToEnqueue = [
|
||||
...new Set(
|
||||
userElements
|
||||
.filter(({ shouldEnqueue }) => shouldEnqueue)
|
||||
.map(({ urlName }) => urlName)
|
||||
),
|
||||
];
|
||||
|
||||
const postElements = gatherPostElements();
|
||||
const faIds = [...new Set(postElements.map(({ faId }) => faId))];
|
||||
|
||||
function renderLiveQueueStats({
|
||||
livePostsStats,
|
||||
liveQueueStats,
|
||||
}) {
|
||||
function renderLiveQueueStats({ livePostsStats, liveQueueStats }) {
|
||||
let elemsCountsNode = document.createElement("div");
|
||||
elemsCountsNode.style.width = "100%";
|
||||
elemsCountsNode.style.height = "100%";
|
||||
@@ -466,37 +494,44 @@ function fa() {
|
||||
navbarLiveQueueNode.appendChild(elemsCountsNode);
|
||||
|
||||
const postsStatsTable = renderTable(livePostsStats, {
|
||||
...optsForNumRows(livePostsStats.length), width: "auto"
|
||||
...optsForNumRows(livePostsStats.length),
|
||||
width: "auto",
|
||||
});
|
||||
|
||||
const queueStatsTable = renderTable(liveQueueStats, {
|
||||
...optsForNumRows(liveQueueStats.length), width: "auto"
|
||||
...optsForNumRows(liveQueueStats.length),
|
||||
width: "auto",
|
||||
});
|
||||
|
||||
|
||||
postsStatsTable && elemsCountsNode.appendChild(postsStatsTable);
|
||||
queueStatsTable && elemsCountsNode.appendChild(queueStatsTable);
|
||||
}
|
||||
|
||||
function renderLiveEntityStats(liveEntityStats) {
|
||||
const liveEntityStatsTable = renderTable(liveEntityStats, {
|
||||
...optsForNumRows(liveEntityStats.length), width: "auto"
|
||||
...optsForNumRows(liveEntityStats.length),
|
||||
width: "auto",
|
||||
});
|
||||
navbarLiveEntityNode.innerHTML = "";
|
||||
liveEntityStatsTable && navbarLiveEntityNode.appendChild(liveEntityStatsTable);
|
||||
liveEntityStatsTable &&
|
||||
navbarLiveEntityNode.appendChild(liveEntityStatsTable);
|
||||
}
|
||||
|
||||
let completedEnqueue = false;
|
||||
|
||||
GM_xmlhttpRequest({
|
||||
url: 'http://scraper.local:3000/api/fa/enqueue_objects',
|
||||
url: "http://scraper.local:3000/api/fa/enqueue_objects",
|
||||
method: "POST",
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json'
|
||||
Accept: "application/json",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
data: JSON.stringify({ fa_ids: faIds, url_names: urlNames, url_names_to_enqueue: urlNamesToEnqueue }),
|
||||
onload: response => {
|
||||
data: JSON.stringify({
|
||||
fa_ids: faIds,
|
||||
url_names: urlNames,
|
||||
url_names_to_enqueue: urlNamesToEnqueue,
|
||||
}),
|
||||
onload: (response) => {
|
||||
console.log("response: ", response);
|
||||
completedEnqueue = true;
|
||||
|
||||
@@ -507,17 +542,19 @@ function fa() {
|
||||
} else {
|
||||
navbarLiveQueueNode.innerHTML = `<b>${response.status} enqueing</b>`;
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
function handleEnqueueResponse(jsonResponse) {
|
||||
navbarEnqueueNode.innerHTML = "";
|
||||
|
||||
const enqueueStats = Object
|
||||
.entries(jsonResponse)
|
||||
.map(([name, value]) => ({ name: name.split("_").join(" "), value }));
|
||||
const enqueueStats = Object.entries(jsonResponse).map(([name, value]) => ({
|
||||
name: name.split("_").join(" "),
|
||||
value,
|
||||
}));
|
||||
const enqueueStatsTable = renderTable(enqueueStats, {
|
||||
...optsForNumRows(enqueueStats.length), width: "auto"
|
||||
...optsForNumRows(enqueueStats.length),
|
||||
width: "auto",
|
||||
});
|
||||
|
||||
enqueueStatsTable && navbarEnqueueNode.append(enqueueStatsTable);
|
||||
@@ -525,14 +562,18 @@ function fa() {
|
||||
|
||||
function pollLiveStats() {
|
||||
GM_xmlhttpRequest({
|
||||
url: 'http://scraper.local:3000/api/fa/object_statuses',
|
||||
url: "http://scraper.local:3000/api/fa/object_statuses",
|
||||
method: "POST",
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json'
|
||||
Accept: "application/json",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
data: JSON.stringify({ fa_ids: faIds, url_names: urlNames, url_names_to_enqueue: urlNamesToEnqueue }),
|
||||
onload: response => {
|
||||
data: JSON.stringify({
|
||||
fa_ids: faIds,
|
||||
url_names: urlNames,
|
||||
url_names_to_enqueue: urlNamesToEnqueue,
|
||||
}),
|
||||
onload: (response) => {
|
||||
console.log("response: ", response);
|
||||
if (response.status === 200) {
|
||||
const jsonResponse = JSON.parse(response.response);
|
||||
@@ -540,8 +581,7 @@ function fa() {
|
||||
const keepPolling = handleLiveStatsResponse(jsonResponse);
|
||||
if (!completedEnqueue || keepPolling) {
|
||||
setTimeout(() => pollLiveStats(), 2500);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
console.log("reached terminal state");
|
||||
}
|
||||
} else {
|
||||
@@ -561,23 +601,35 @@ function fa() {
|
||||
|
||||
for (const [gotFaId, postInfo] of Object.entries(jsonResponse.posts)) {
|
||||
allTerminalState = allTerminalState && postInfo.terminal_state;
|
||||
postElements.filter(({ faId }) => faId == gotFaId).forEach(({ statusNode }) => {
|
||||
statusNode.innerHTML = postInfo.state;
|
||||
postElements
|
||||
.filter(({ faId }) => faId == gotFaId)
|
||||
.forEach(({ statusNode }) => {
|
||||
statusNode.innerHTML = postInfo.state;
|
||||
|
||||
switch (postInfo.state) {
|
||||
case "not_seen": numNotSeenPosts += 1; break;
|
||||
case "ok": numOkPosts += 1; break;
|
||||
case "scanned_post": numScannedPosts += 1; break;
|
||||
case "have_file": numHaveFile += 1; break;
|
||||
}
|
||||
});
|
||||
switch (postInfo.state) {
|
||||
case "not_seen":
|
||||
numNotSeenPosts += 1;
|
||||
break;
|
||||
case "ok":
|
||||
numOkPosts += 1;
|
||||
break;
|
||||
case "scanned_post":
|
||||
numScannedPosts += 1;
|
||||
break;
|
||||
case "have_file":
|
||||
numHaveFile += 1;
|
||||
break;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
for (const [gotUrlName, userInfo] of Object.entries(jsonResponse.users)) {
|
||||
allTerminalState = allTerminalState && userInfo.terminal_state;
|
||||
userElements.filter(({ urlName }) => urlName == gotUrlName).forEach(({ statusNode }) => {
|
||||
statusNode.innerHTML = userInfo.state;
|
||||
});
|
||||
userElements
|
||||
.filter(({ urlName }) => urlName == gotUrlName)
|
||||
.forEach(({ statusNode }) => {
|
||||
statusNode.innerHTML = userInfo.state;
|
||||
});
|
||||
}
|
||||
|
||||
const livePostsStats = [
|
||||
@@ -587,13 +639,13 @@ function fa() {
|
||||
{ name: "have file", value: numHaveFile },
|
||||
];
|
||||
|
||||
let liveQueueStats = Object
|
||||
.entries(jsonResponse.queues.depths)
|
||||
.map(([queue, depth]) => ({ name: queue, value: depth }));
|
||||
let liveQueueStats = Object.entries(jsonResponse.queues.depths).map(
|
||||
([queue, depth]) => ({ name: queue, value: depth })
|
||||
);
|
||||
|
||||
liveQueueStats = [
|
||||
{ name: "total depth", value: `${jsonResponse.queues.total_depth}` },
|
||||
...liveQueueStats
|
||||
...liveQueueStats,
|
||||
];
|
||||
|
||||
while (liveQueueStats.length < 4) {
|
||||
@@ -602,19 +654,21 @@ function fa() {
|
||||
|
||||
allTerminalState &&= jsonResponse.queues.total_depth == 0;
|
||||
|
||||
let liveEntityStats = [
|
||||
{ name: "no entity", value: "", sep: '' }
|
||||
];
|
||||
let liveEntityStats = [{ name: "no entity", value: "", sep: "" }];
|
||||
|
||||
const thisPageFaId = faIdFromViewHref(window.location.href);
|
||||
const pssCommon = { sep: '', valueAlign: 'left' };
|
||||
const pssCommon = { sep: "", valueAlign: "left" };
|
||||
if (thisPageFaId != null) {
|
||||
const postData = jsonResponse.posts[thisPageFaId];
|
||||
liveEntityStats = [
|
||||
{ name: 'link', value: `<a target="_blank" href="${postData.info_url}" style="text-decoration: underline dotted">${thisPageFaId}</a>`, ...pssCommon },
|
||||
{
|
||||
name: "link",
|
||||
value: `<a target="_blank" href="${postData.info_url}" style="text-decoration: underline dotted">${thisPageFaId}</a>`,
|
||||
...pssCommon,
|
||||
},
|
||||
{ name: `seen`, value: postData.seen_at, ...pssCommon },
|
||||
{ name: `scanned`, value: postData.scanned_at, ...pssCommon },
|
||||
{ name: `downloaded`, value: postData.downloaded_at, ...pssCommon }
|
||||
{ name: `downloaded`, value: postData.downloaded_at, ...pssCommon },
|
||||
];
|
||||
}
|
||||
|
||||
@@ -622,10 +676,14 @@ function fa() {
|
||||
if (thisPageUrlName != null) {
|
||||
const userData = jsonResponse.users[thisPageUrlName];
|
||||
liveEntityStats = [
|
||||
{ name: '', value: thisPageUrlName, ...pssCommon },
|
||||
{ name: 'first seen', value: userData.created_at, ...pssCommon },
|
||||
{ name: 'page scan', value: userData.scanned_page_at, ...pssCommon },
|
||||
{ name: 'gallery scan', value: userData.scanned_gallery_at, ...pssCommon }
|
||||
{ name: "", value: thisPageUrlName, ...pssCommon },
|
||||
{ name: "first seen", value: userData.created_at, ...pssCommon },
|
||||
{ name: "page scan", value: userData.scanned_page_at, ...pssCommon },
|
||||
{
|
||||
name: "gallery scan",
|
||||
value: userData.scanned_gallery_at,
|
||||
...pssCommon,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
@@ -639,10 +697,13 @@ function fa() {
|
||||
}
|
||||
|
||||
// right off, can render the page stats table
|
||||
const pageStatsTable = renderTable([
|
||||
{ name: "page users", value: urlNames.length },
|
||||
{ name: "page posts", value: faIds.length }
|
||||
], { ...optsForNumRows(2), width: "auto" });
|
||||
const pageStatsTable = renderTable(
|
||||
[
|
||||
{ name: "page users", value: urlNames.length },
|
||||
{ name: "page posts", value: faIds.length },
|
||||
],
|
||||
{ ...optsForNumRows(2), width: "auto" }
|
||||
);
|
||||
navbarPageStatsNode.innerHTML = "";
|
||||
navbarPageStatsNode.append(pageStatsTable);
|
||||
|
||||
@@ -651,14 +712,14 @@ function fa() {
|
||||
{ name: "not seen", value: "---" },
|
||||
{ name: "ok", value: "---" },
|
||||
{ name: "scanned", value: "---" },
|
||||
{ name: "have file", value: "---" }
|
||||
{ name: "have file", value: "---" },
|
||||
],
|
||||
liveQueueStats: [
|
||||
{ name: "queue depths", value: "---" },
|
||||
{ name: "", value: "" },
|
||||
{ name: "", value: "" },
|
||||
{ name: "", value: "" },
|
||||
]
|
||||
],
|
||||
});
|
||||
|
||||
pollLiveStats();
|
||||
@@ -670,9 +731,9 @@ function twitter(mainNode) {
|
||||
}
|
||||
|
||||
let seenTweets = new Set();
|
||||
let ignoreUsers = new Set(['elonmusk']);
|
||||
let ignoreUsers = new Set(["elonmusk"]);
|
||||
let nameToStatusNodes = {};
|
||||
let nameToStatus = { 'Mangoyena': 'yes!' };
|
||||
let nameToStatus = { Mangoyena: "yes!" };
|
||||
|
||||
function observerCallback() {
|
||||
// update status nodes for all the user links
|
||||
@@ -682,14 +743,18 @@ function twitter(mainNode) {
|
||||
}
|
||||
return node;
|
||||
}
|
||||
[...document.querySelectorAll(
|
||||
"[data-testid=primaryColumn] a[role=link][tabindex='-1']:not(aria-hidden)"
|
||||
)]
|
||||
.filter(link =>
|
||||
link.hostname == 'twitter.com' &&
|
||||
!link.pathname.startsWith('/i/') &&
|
||||
!link.querySelector('img')
|
||||
).map(link => ({
|
||||
[
|
||||
...document.querySelectorAll(
|
||||
"[data-testid=primaryColumn] a[role=link][tabindex='-1']:not(aria-hidden)"
|
||||
),
|
||||
]
|
||||
.filter(
|
||||
(link) =>
|
||||
link.hostname == "twitter.com" &&
|
||||
!link.pathname.startsWith("/i/") &&
|
||||
!link.querySelector("img")
|
||||
)
|
||||
.map((link) => ({
|
||||
elem: link,
|
||||
name: link.pathname.split("/")[1],
|
||||
header: parentN(link, 7),
|
||||
@@ -698,48 +763,49 @@ function twitter(mainNode) {
|
||||
if (nameToStatusNodes[name] == null) {
|
||||
nameToStatusNodes[name] = [];
|
||||
}
|
||||
if (header.querySelector('.reduxStatusNode') == null) {
|
||||
const statusNode = document.createElement('span');
|
||||
if (header.querySelector(".reduxStatusNode") == null) {
|
||||
const statusNode = document.createElement("span");
|
||||
statusNode.style.cssText = "color:white; float: right";
|
||||
statusNode.classList = ['reduxStatusNode'];
|
||||
statusNode.classList = ["reduxStatusNode"];
|
||||
header.insertBefore(statusNode, header.children[1]);
|
||||
nameToStatusNodes[name].push(statusNode);
|
||||
}
|
||||
|
||||
const status = nameToStatus[name];
|
||||
nameToStatusNodes[name].forEach(statusNode => {
|
||||
nameToStatusNodes[name].forEach((statusNode) => {
|
||||
if (status != null) {
|
||||
statusNode.innerHTML = status;
|
||||
} else {
|
||||
statusNode.innerHTML = '(loading...)';
|
||||
statusNode.innerHTML = "(loading...)";
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const tweets = [...document.querySelectorAll(
|
||||
"a[role=link]"
|
||||
)].filter(link =>
|
||||
link.hostname == 'twitter.com' &&
|
||||
link.pathname.includes("/status/")
|
||||
).map(link => {
|
||||
const parts = link.pathname.split("/");
|
||||
return {
|
||||
name: parts[1],
|
||||
tweetId: parts[3],
|
||||
};
|
||||
})
|
||||
const tweets = [...document.querySelectorAll("a[role=link]")]
|
||||
.filter(
|
||||
(link) =>
|
||||
link.hostname == "twitter.com" && link.pathname.includes("/status/")
|
||||
)
|
||||
.map((link) => {
|
||||
const parts = link.pathname.split("/");
|
||||
return {
|
||||
name: parts[1],
|
||||
tweetId: parts[3],
|
||||
};
|
||||
})
|
||||
.filter(({ name }) => !ignoreUsers.has(name))
|
||||
.map(JSON.stringify)
|
||||
.filter(onlyUnique);
|
||||
|
||||
let newTweets = [...tweets].filter(n => !seenTweets.has(n));
|
||||
let newTweets = [...tweets].filter((n) => !seenTweets.has(n));
|
||||
if (newTweets.length > 0) {
|
||||
newTweets
|
||||
.map(JSON.parse)
|
||||
.forEach(({ name, tweetId }) =>
|
||||
console.log('new tweet', `${name} / ${tweetId}`));
|
||||
console.log("new tweet", `${name} / ${tweetId}`)
|
||||
);
|
||||
|
||||
newTweets.forEach(tweet => seenTweets.add(tweet));
|
||||
newTweets.forEach((tweet) => seenTweets.add(tweet));
|
||||
enqueueNewTweets(newTweets);
|
||||
}
|
||||
}
|
||||
@@ -747,7 +813,7 @@ function twitter(mainNode) {
|
||||
const enqueuedTweets = new Set();
|
||||
let enqueueInProgress = false;
|
||||
function enqueueNewTweets(newTweets) {
|
||||
newTweets.forEach(tweet => enqueuedTweets.add(tweet));
|
||||
newTweets.forEach((tweet) => enqueuedTweets.add(tweet));
|
||||
|
||||
if (!enqueueInProgress && enqueuedTweets.size > 0) {
|
||||
enqueueInProgress = true;
|
||||
@@ -762,22 +828,20 @@ function twitter(mainNode) {
|
||||
console.log("enqueue tweets: ", tweets);
|
||||
|
||||
GM_xmlhttpRequest({
|
||||
url: 'http://scraper.local:3000/api/twitter/enqueue_objects',
|
||||
url: "http://scraper.local:3000/api/twitter/enqueue_objects",
|
||||
method: "POST",
|
||||
headers: {
|
||||
'Accept': 'application/json',
|
||||
'Content-Type': 'application/json'
|
||||
Accept: "application/json",
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
data: JSON.stringify({ names: tweets.map(({ name }) => name) }),
|
||||
onload: response => {
|
||||
|
||||
onload: (response) => {
|
||||
if (response.status === 200) {
|
||||
const jsonResponse = JSON.parse(response.response);
|
||||
console.log("redux json: ", jsonResponse);
|
||||
|
||||
enqueueInProgress = false;
|
||||
enqueueNewTweets([]);
|
||||
|
||||
} else {
|
||||
console.error("redux error: ", response.status, response);
|
||||
}
|
||||
@@ -794,18 +858,18 @@ function twitter(mainNode) {
|
||||
|
||||
observer.observe(mainNode, {
|
||||
childList: true,
|
||||
subtree: true
|
||||
subtree: true,
|
||||
});
|
||||
}
|
||||
startObserving();
|
||||
}
|
||||
|
||||
(function () {
|
||||
if (window.location.hostname == 'www.furaffinity.net') {
|
||||
if (window.location.hostname == "www.furaffinity.net") {
|
||||
fa();
|
||||
} else if (window.location.hostname == 'twitter.com') {
|
||||
twitter(document.querySelector('body'));
|
||||
} else if (window.location.hostname == "twitter.com") {
|
||||
twitter(document.querySelector("body"));
|
||||
} else {
|
||||
console.log("unhandled domain ", window.location.hostname);
|
||||
}
|
||||
})();
|
||||
})();
|
||||
|
||||
@@ -146,7 +146,7 @@ async function fa() {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
onload: (response) => {
|
||||
const json_error_codes = [403, 404, 501];
|
||||
const json_error_codes = [403, 404, 500];
|
||||
if (response.status == 200) {
|
||||
let json = JSON.parse(response.response);
|
||||
populateRecommendations(json);
|
||||
|
||||
Reference in New Issue
Block a user