Compare commits
220 Commits
main
...
dymk--grap
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d6d75aa26a | ||
|
|
dc5063bb23 | ||
|
|
414b56ab69 | ||
|
|
ade1d478a5 | ||
|
|
69fc3652a2 | ||
|
|
ab9e42346a | ||
|
|
2d4f672b6a | ||
|
|
0700adaa55 | ||
|
|
557258ff9f | ||
|
|
64efbee162 | ||
|
|
828f52fe81 | ||
|
|
73ff4ee472 | ||
|
|
f14c73a152 | ||
|
|
2789cf2c7f | ||
|
|
3f56df3af3 | ||
|
|
80ee303503 | ||
|
|
f5748cd005 | ||
|
|
f0502f500d | ||
|
|
4d6c67b5a1 | ||
|
|
fcf98c8067 | ||
|
|
9f0f6877d9 | ||
|
|
d6afdf424b | ||
|
|
4af584fffd | ||
|
|
ed299a404d | ||
|
|
48337c08bc | ||
|
|
a9d639b66d | ||
|
|
e931897c6c | ||
|
|
3a878deeec | ||
|
|
e89dca1fa4 | ||
|
|
1243a2f1f5 | ||
|
|
17cd07bb91 | ||
|
|
69ea16daf6 | ||
|
|
2d68b7bc15 | ||
|
|
077b7b9876 | ||
|
|
8e9e720695 | ||
|
|
a9bccb00e2 | ||
|
|
fa235a2310 | ||
|
|
f1c91f1119 | ||
|
|
1f3fa0074e | ||
|
|
37e269321f | ||
|
|
999e67db35 | ||
|
|
60d7e2920a | ||
|
|
c226eb20ed | ||
|
|
4b09b926a0 | ||
|
|
97dff5abf9 | ||
|
|
44778f6541 | ||
|
|
c9858ee354 | ||
|
|
28ab0cc023 | ||
|
|
fbc3a53c25 | ||
|
|
eb5a6d3190 | ||
|
|
af119ed683 | ||
|
|
b639ec2618 | ||
|
|
cdf064bfdf | ||
|
|
a60284c0d4 | ||
|
|
56fa72619a | ||
|
|
efccf79f64 | ||
|
|
e1b3fa4401 | ||
|
|
9f67a525b7 | ||
|
|
50af3d90d8 | ||
|
|
e7a584bc57 | ||
|
|
b1d06df6d2 | ||
|
|
6d5f494c64 | ||
|
|
3c41cd5b7d | ||
|
|
6b4e11e907 | ||
|
|
c70240b143 | ||
|
|
9c7a83eb4e | ||
|
|
22af93ada7 | ||
|
|
8d4f30ba43 | ||
|
|
9349a5466c | ||
|
|
ced01f1b9e | ||
|
|
2ce6dc7b96 | ||
|
|
6922c07b8c | ||
|
|
985c2c2347 | ||
|
|
c63e1b8cb2 | ||
|
|
8ac13f0602 | ||
|
|
2e36e08828 | ||
|
|
16fab739b5 | ||
|
|
8051c86bb4 | ||
|
|
3eb9be47bc | ||
|
|
2ee31f4e74 | ||
|
|
9e58ee067b | ||
|
|
1b59b44435 | ||
|
|
955a3021ae | ||
|
|
b97b82b1d8 | ||
|
|
4a31bd99e8 | ||
|
|
ca22face6c | ||
|
|
4f880fd419 | ||
|
|
7ee7b57965 | ||
|
|
ebfea0ab7c | ||
|
|
6436fe8fa6 | ||
|
|
9a3742abf1 | ||
|
|
0a980259dc | ||
|
|
fea167459d | ||
|
|
2a5e236a7f | ||
|
|
1fa22351d5 | ||
|
|
01f8d0b962 | ||
|
|
85dec62850 | ||
|
|
3ab0fa4fa3 | ||
|
|
5b94a0a7de | ||
|
|
2e0c2fdf51 | ||
|
|
ea5a2a7d6c | ||
|
|
d358cdbd7f | ||
|
|
bd0fad859e | ||
|
|
0e744bbdbe | ||
|
|
531cd1bb43 | ||
|
|
552532a95c | ||
|
|
ad78d41f06 | ||
|
|
93e389855a | ||
|
|
6ec902a859 | ||
|
|
fb78c3a27d | ||
|
|
6620633f22 | ||
|
|
3f5b0eadc6 | ||
|
|
657713192b | ||
|
|
173a4f2c78 | ||
|
|
401a730226 | ||
|
|
5988152835 | ||
|
|
7e33f70f19 | ||
|
|
b8cadb9855 | ||
|
|
8751ce4856 | ||
|
|
0977ac4343 | ||
|
|
09f1db712d | ||
|
|
3263e8aca8 | ||
|
|
03804c8cf1 | ||
|
|
031b8f965d | ||
|
|
7276ef6cbd | ||
|
|
fab12a4fe4 | ||
|
|
7229900eaa | ||
|
|
5ad6e89889 | ||
|
|
1cddb94af6 | ||
|
|
4f4c7fabc7 | ||
|
|
d16b613f33 | ||
|
|
3ae55422e0 | ||
|
|
3a9478e0f4 | ||
|
|
c424b7dacd | ||
|
|
ff8e539579 | ||
|
|
2833dc806f | ||
|
|
9423a50bc3 | ||
|
|
67c28cb8d2 | ||
|
|
5b508060ff | ||
|
|
c7a2a3481a | ||
|
|
df712f65db | ||
|
|
c34faef0dc | ||
|
|
37ad4b2ea8 | ||
|
|
17d2a87089 | ||
|
|
99ee3aaa91 | ||
|
|
c3d8c7afa7 | ||
|
|
d7f3cd4074 | ||
|
|
dbbe6788e8 | ||
|
|
aa1eaef5fd | ||
|
|
bb1e760d2e | ||
|
|
254367eb62 | ||
|
|
cc1fb9847f | ||
|
|
32fe41ff04 | ||
|
|
3f0d845472 | ||
|
|
7758927865 | ||
|
|
158fb9b478 | ||
|
|
75503e2a99 | ||
|
|
dc4c1b1df9 | ||
|
|
b8163f9e77 | ||
|
|
5505e7089e | ||
|
|
84866c0f6a | ||
|
|
df43a77fe2 | ||
|
|
15fc61c0d0 | ||
|
|
fde45e9704 | ||
|
|
3e62f9949c | ||
|
|
e99daf4b59 | ||
|
|
35aa025778 | ||
|
|
ab13af43af | ||
|
|
e57b0f4fc9 | ||
|
|
db4ea55b28 | ||
|
|
230bd5757d | ||
|
|
f317aa273e | ||
|
|
18fff3bc07 | ||
|
|
ca33644f84 | ||
|
|
3dc43530f8 | ||
|
|
f1e40a405f | ||
|
|
57083dc74c | ||
|
|
5c1318d768 | ||
|
|
71f54ae5e7 | ||
|
|
d19255a2c9 | ||
|
|
6b4c3c2294 | ||
|
|
4c774faafd | ||
|
|
450a5844eb | ||
|
|
0d4511cbcf | ||
|
|
a0d52575f3 | ||
|
|
9468e570d9 | ||
|
|
c2997f4d5f | ||
|
|
96b0804a0f | ||
|
|
9d5f1138d3 | ||
|
|
1a912103f1 | ||
|
|
6d2eff0849 | ||
|
|
369c79f8df | ||
|
|
8d85f7ebe1 | ||
|
|
a413b31a2c | ||
|
|
effb21b7cc | ||
|
|
3e6e1bf20b | ||
|
|
ff9aa66a4c | ||
|
|
d4dfa7309c | ||
|
|
c587aabbbe | ||
|
|
c63f1dffcb | ||
|
|
3c45545eab | ||
|
|
13c9ff0e8c | ||
|
|
db4c244196 | ||
|
|
67181ce78a | ||
|
|
798b2e43cb | ||
|
|
43848c3dd4 | ||
|
|
ff017290ec | ||
|
|
fcf635f96c | ||
|
|
b9673e9585 | ||
|
|
8ce85c6ef0 | ||
|
|
4a8f4f241b | ||
|
|
31d78ad0b9 | ||
|
|
b1a5496f09 | ||
|
|
18545bbfd8 | ||
|
|
24e52357be | ||
|
|
29cdb1669c | ||
|
|
2941b6a91d | ||
|
|
5a34130044 | ||
|
|
edc4940ba2 | ||
|
|
c2e3ce669e |
7
.cursorignore
Normal file
@@ -0,0 +1,7 @@
|
||||
# Add directories or file patterns to ignore during indexing (e.g. foo/ or *.csv)
|
||||
config/database.yml
|
||||
config/cookies/fa.yml
|
||||
tmp
|
||||
log
|
||||
public
|
||||
.bundle
|
||||
99
.devcontainer/Dockerfile.devcontainer
Normal file
@@ -0,0 +1,99 @@
|
||||
FROM ruby:3.2.0 AS native-gems
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; \
|
||||
echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends --no-install-suggests -y \
|
||||
cmake
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
RUN gem install bundler -v '2.4.5'
|
||||
COPY gems gems
|
||||
WORKDIR /usr/src/app/gems/xdiff-rb
|
||||
RUN bundle install
|
||||
RUN rake compile
|
||||
WORKDIR /usr/src/app/gems/rb-bsdiff
|
||||
RUN bundle install
|
||||
RUN rake compile
|
||||
|
||||
# Primary image
|
||||
FROM mcr.microsoft.com/devcontainers/ruby:1-3.2-bookworm
|
||||
|
||||
# apt caching & install packages
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; \
|
||||
echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends --no-install-suggests -y \
|
||||
autoconf \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gnupg \
|
||||
iputils-ping \
|
||||
libblas-dev \
|
||||
libdb-dev \
|
||||
libffi-dev \
|
||||
libgdbm-dev \
|
||||
libgdbm6 \
|
||||
libgmp-dev \
|
||||
liblapack-dev \
|
||||
libncurses5-dev \
|
||||
libpq-dev \
|
||||
libreadline6-dev \
|
||||
libssl-dev \
|
||||
libvips42 \
|
||||
libyaml-dev \
|
||||
patch \
|
||||
rustc \
|
||||
uuid-dev \
|
||||
zlib1g-dev \
|
||||
watchman
|
||||
|
||||
# Install postgres 17 client
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
sudo install -d /usr/share/postgresql-common/pgdg && \
|
||||
curl -o /usr/share/postgresql-common/pgdg/apt.postgresql.org.asc --fail https://www.postgresql.org/media/keys/ACCC4CF8.asc && \
|
||||
sh -c 'echo "deb [signed-by=/usr/share/postgresql-common/pgdg/apt.postgresql.org.asc] https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' && \
|
||||
apt update && \
|
||||
apt-get install --no-install-recommends --no-install-suggests -y \
|
||||
postgresql-client-17
|
||||
|
||||
# Install & configure delta diff tool
|
||||
RUN wget -O- https://github.com/dandavison/delta/releases/download/0.18.2/git-delta_0.18.2_amd64.deb > /tmp/git-delta.deb && \
|
||||
sudo dpkg -i /tmp/git-delta.deb && \
|
||||
rm /tmp/git-delta.deb
|
||||
|
||||
RUN git config --system core.pager "delta" && \
|
||||
git config --system interactive.diffFilter "delta --color-only" && \
|
||||
git config --system delta.navigate true && \
|
||||
git config --system delta.dark true && \
|
||||
git config --system delta.side-by-side true && \
|
||||
git config --system merge.conflictstyle "zdiff3" \
|
||||
git config --system core.editor "cursor --wait"
|
||||
|
||||
# Install native gems
|
||||
COPY --from=native-gems /usr/src/app/gems/xdiff-rb /gems/xdiff-rb
|
||||
COPY --from=native-gems /usr/src/app/gems/rb-bsdiff /gems/rb-bsdiff
|
||||
|
||||
ENV RAILS_ENV development
|
||||
|
||||
# [Optional] Uncomment this line to install additional gems.
|
||||
RUN su vscode -c "gem install bundler -v '2.5.6'" && \
|
||||
su vscode -c "gem install rake -v '13.0.6'" && \
|
||||
su vscode -c "gem install ruby-lsp -v '0.22.1'"
|
||||
|
||||
# install exo
|
||||
RUN su vscode -c "curl -sL https://exo.deref.io/install | bash"
|
||||
ENV PATH "/home/vscode/.exo/bin:$PATH"
|
||||
|
||||
# install just (command runner)
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://just.systems/install.sh | bash -s -- --to /usr/local/bin
|
||||
|
||||
RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && nvm install 18 && nvm use 18 && npm install -g yarn" 2>&1
|
||||
ENV PATH /usr/local/share/nvm/current/bin:$PATH
|
||||
7
.devcontainer/Dockerfile.postgres
Normal file
@@ -0,0 +1,7 @@
|
||||
FROM postgres:17
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
postgresql-17-pgvector \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN echo "CREATE EXTENSION pgvector;" >> /docker-entrypoint-initdb.d/01-pgvector.sql
|
||||
5
.devcontainer/create-db-user.sql
Normal file
@@ -0,0 +1,5 @@
|
||||
CREATE USER vscode CREATEDB;
|
||||
CREATE DATABASE vscode WITH OWNER vscode;
|
||||
|
||||
CREATE DATABASE redux_test WITH OWNER vscode;
|
||||
CREATE DATABASE legacy_test WITH OWNER vscode;
|
||||
29
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,29 @@
|
||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
||||
// README at: https://github.com/devcontainers/templates/tree/main/src/ruby-rails-postgres
|
||||
{
|
||||
"name": "Ruby on Rails & Postgres",
|
||||
"dockerComposeFile": "docker-compose.yml",
|
||||
"service": "app",
|
||||
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
|
||||
"features": {
|
||||
"ghcr.io/meaningful-ooo/devcontainer-features/fish:1": {},
|
||||
"ghcr.io/nikobockerman/devcontainer-features/fish-persistent-data:2": {}
|
||||
},
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// This can be used to network with other containers or the host.
|
||||
// "forwardPorts": [3000, 5432],
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
// "postCreateCommand": "bundle install && rake db:setup",
|
||||
"postCreateCommand": ".devcontainer/post-create.sh",
|
||||
"forwardPorts": [
|
||||
3000, // rails development
|
||||
3001, // rails staging
|
||||
9394, // prometheus exporter
|
||||
"pgadmin:8080", // pgadmin
|
||||
"grafana:3100", // grafana
|
||||
"prometheus:9090" // prometheus
|
||||
]
|
||||
// Configure tool-specific properties.
|
||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||
// "remoteUser": "root"
|
||||
}
|
||||
72
.devcontainer/docker-compose.yml
Normal file
@@ -0,0 +1,72 @@
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
app:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: .devcontainer/Dockerfile.devcontainer
|
||||
volumes:
|
||||
- ../..:/workspaces:cached
|
||||
- ./fish-shell-conf-d:/home/vscode/.config/fish/conf.d
|
||||
- devcontainer-redux-gem-cache:/usr/local/rvm/gems
|
||||
- devcontainer-redux-blob-files:/mnt/blob_files_development
|
||||
- /tank/redux-data/blob_files_production:/mnt/blob_files_production
|
||||
environment:
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT: http://otel-collector:4318
|
||||
OTEL_SERVICE_NAME: redux-scraper-dev
|
||||
OTEL_RESOURCE_ATTRIBUTES: application=redux-scraper-dev
|
||||
command: sleep infinity
|
||||
|
||||
db:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.postgres
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- postgres-data:/var/lib/postgresql/data
|
||||
- ./create-db-user.sql:/docker-entrypoint-initdb.d/create-db-user.sql
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
||||
pgadmin:
|
||||
image: dpage/pgadmin4:8.14.0
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
PGADMIN_DEFAULT_EMAIL: admin@example.com
|
||||
PGADMIN_DEFAULT_PASSWORD: password
|
||||
PGADMIN_LISTEN_PORT: 8080
|
||||
PGADMIN_CONFIG_SERVER_MODE: 'False'
|
||||
PGADMIN_CONFIG_MASTER_PASSWORD_REQUIRED: 'False'
|
||||
|
||||
prometheus:
|
||||
image: prom/prometheus
|
||||
command:
|
||||
- '--config.file=/etc/prometheus/prometheus.yml'
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ./prometheus:/etc/prometheus
|
||||
- devcontainer-redux-prometheus-data:/prometheus
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- GF_SERVER_HTTP_PORT=3100
|
||||
- GF_USERS_ALLOW_SIGN_UP=false
|
||||
- GF_LOG_LEVEL=debug
|
||||
- GF_SERVER_ROOT_URL=http://localhost:3100/grafana/
|
||||
- GF_SERVER_SERVE_FROM_SUB_PATH=false
|
||||
- GF_AUTH_PROXY_ENABLED=true
|
||||
- GF_AUTH_PROXY_HEADER_NAME=X-WEBAUTH-USER
|
||||
- GF_AUTH_PROXY_HEADER_PROPERTY=username
|
||||
volumes:
|
||||
- devcontainer-redux-grafana-data:/var/lib/grafana
|
||||
|
||||
volumes:
|
||||
postgres-data:
|
||||
devcontainer-redux-gem-cache:
|
||||
devcontainer-redux-blob-files:
|
||||
devcontainer-redux-grafana-data:
|
||||
devcontainer-redux-prometheus-data:
|
||||
1
.devcontainer/fish-shell-conf-d/just-completions.fish
Executable file
@@ -0,0 +1 @@
|
||||
complete -f -c just -a (just --summary)
|
||||
17
.devcontainer/fish-shell-conf-d/prompt.fish
Executable file
@@ -0,0 +1,17 @@
|
||||
function fish_prompt -d "Write out the prompt"
|
||||
# This shows up as USER@HOST /home/user/ >, with the directory colored
|
||||
# $USER and $hostname are set by fish, so you can just use them
|
||||
# instead of using `whoami` and `hostname`
|
||||
printf '%s %s%s> ' \
|
||||
(printf '%s%s%s' (set_color -d grey) $USER (set_color normal)) \
|
||||
(printf '%s%s%s' (set_color $fish_color_cwd) (prompt_pwd) (set_color normal)) \
|
||||
(fish_git_prompt)
|
||||
end
|
||||
|
||||
function fish_right_prompt -d "Write out the right prompt"
|
||||
set_color red
|
||||
if [ $RAILS_ENV = "development" ]
|
||||
set_color -d green
|
||||
end
|
||||
printf '%s%s' (echo $RAILS_ENV) (set_color normal)
|
||||
end
|
||||
27
.devcontainer/fish-shell-conf-d/utils.fish
Executable file
@@ -0,0 +1,27 @@
|
||||
function blob-files-dir
|
||||
if [ $RAILS_ENV = "production" ]
|
||||
echo "/mnt/blob_files_production/v1"
|
||||
return 0
|
||||
else if [ $RAILS_ENV = "development" ]
|
||||
echo "/mnt/blob_files_development/v1"
|
||||
return 0
|
||||
else
|
||||
echo "unknown RAILS_ENV: $RAILS_ENV" >&2
|
||||
return 1
|
||||
end
|
||||
end
|
||||
|
||||
function blob-file-path
|
||||
set -l file_name $argv[1]
|
||||
set -l prefix (blob-files-dir || return 1)
|
||||
set -l p0 (string sub -l 2 $file_name)
|
||||
set -l p1 (string sub -s 3 -l 2 $file_name)
|
||||
set -l p2 (string sub -s 5 -l 1 $file_name)
|
||||
printf "%s/%s/%s/%s/%s" $prefix $p0 $p1 $p2 $file_name
|
||||
end
|
||||
|
||||
function blob-files-stats
|
||||
set -l files_dir (blob-files-dir || return 1)
|
||||
printf "apparent size: %s\n" (du -sh --apparent-size $files_dir)
|
||||
printf "actual size: %s\n" (du -sh $files_dir)
|
||||
end
|
||||
20
.devcontainer/install-extensions.sh
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/bin/bash -ex
|
||||
|
||||
# EDITOR_BIN="$(ls /vscode/cursor-server/bin/*/*/bin/remote-cli/cursor | head -n 1)"
|
||||
|
||||
# detect if either cursor or code is available, and use the first detected one
|
||||
EDITOR_BIN=cursor
|
||||
# EDITOR_BIN=code
|
||||
|
||||
function install_extension() {
|
||||
$EDITOR_BIN --install-extension "$1"
|
||||
}
|
||||
|
||||
install_extension Shopify.ruby-extensions-pack
|
||||
install_extension dbaeumer.vscode-eslint
|
||||
install_extension aliariff.vscode-erb-beautify
|
||||
install_extension bradlc.vscode-tailwindcss
|
||||
install_extension KoichiSasada.vscode-rdbg
|
||||
install_extension qwtel.sqlite-viewer
|
||||
install_extension esbenp.prettier-vscode
|
||||
install_extension ms-azuretools.vscode-docker
|
||||
22
.devcontainer/post-create.sh
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash -ex
|
||||
|
||||
function mkdir_and_chmod {
|
||||
sudo mkdir -p $1
|
||||
sudo chmod 777 $1
|
||||
}
|
||||
|
||||
mkdir_and_chmod .devcontainer/data/prometheus
|
||||
mkdir_and_chmod .devcontainer/data/grafana
|
||||
echo "Path: $PATH"
|
||||
echo "Ruby: $(which ruby)"
|
||||
echo "Gem: $(which gem)"
|
||||
echo "Bundler: $(which bundler)"
|
||||
echo "Rake: $(which rake)"
|
||||
|
||||
bundle install --jobs $(getconf _NPROCESSORS_ONLN)
|
||||
rbenv rehash
|
||||
|
||||
bin/rails yarn:install
|
||||
yarn
|
||||
|
||||
yarn add --dev prettier @prettier/plugin-ruby
|
||||
12
.devcontainer/prometheus/prometheus.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
global:
|
||||
scrape_interval: 15s
|
||||
evaluation_interval: 15s
|
||||
|
||||
scrape_configs:
|
||||
- job_name: 'prometheus'
|
||||
static_configs:
|
||||
- targets: ['prometheus:9090']
|
||||
|
||||
- job_name: 'rails'
|
||||
static_configs:
|
||||
- targets: ['app:9394']
|
||||
14
.dockerignore
Normal file
@@ -0,0 +1,14 @@
|
||||
.git
|
||||
.gitignore
|
||||
log
|
||||
tmp
|
||||
ext
|
||||
build
|
||||
node_modules
|
||||
profiler
|
||||
.bundle
|
||||
.vscode
|
||||
launch.json
|
||||
settings.json
|
||||
*.export
|
||||
.devcontainer
|
||||
15
.env-cmdrc
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"dev": {
|
||||
"RAILS_ENV": "development",
|
||||
"FOO_BAR": "baz"
|
||||
},
|
||||
"staging": {
|
||||
"RAILS_ENV": "staging"
|
||||
},
|
||||
"production": {
|
||||
"RAILS_ENV": "production"
|
||||
},
|
||||
"worker": {
|
||||
"RAILS_ENV": "worker"
|
||||
}
|
||||
}
|
||||
12
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for more information:
|
||||
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
# https://containers.dev/guide/dependabot
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "devcontainers"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: weekly
|
||||
24
.gitignore
vendored
@@ -4,6 +4,19 @@
|
||||
# or operating system, you probably want to add a global ignore instead:
|
||||
# git config --global core.excludesfile '~/.gitignore_global'
|
||||
|
||||
build
|
||||
tmp
|
||||
core
|
||||
*.bundle
|
||||
lib/xdiff
|
||||
ext/xdiff/Makefile
|
||||
ext/xdiff/xdiff
|
||||
|
||||
# use yarn to manage node_modules
|
||||
package-lock.json
|
||||
|
||||
*.notes.md
|
||||
|
||||
# Ignore bundler config.
|
||||
/.bundle
|
||||
|
||||
@@ -36,3 +49,14 @@
|
||||
|
||||
/profiler/
|
||||
/flamegraph.svg
|
||||
/app/assets/builds/*
|
||||
!/app/assets/builds/.keep
|
||||
|
||||
/public/packs
|
||||
/public/packs-test
|
||||
/node_modules
|
||||
/yarn-error.log
|
||||
yarn-debug.log*
|
||||
.yarn-integrity
|
||||
.DS_Store
|
||||
*.export
|
||||
|
||||
15
.prettierrc
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"trailingComma": "all",
|
||||
"arrowParens": "always",
|
||||
"singleQuote": true,
|
||||
"plugins": [
|
||||
"prettier-plugin-tailwindcss",
|
||||
"@prettier/plugin-ruby",
|
||||
"@prettier/plugin-xml",
|
||||
"@4az/prettier-plugin-html-erb"
|
||||
],
|
||||
"xmlQuoteAttributes": "double",
|
||||
"xmlWhitespaceSensitivity": "ignore"
|
||||
}
|
||||
32
.rubocop.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
# The behavior of RuboCop can be controlled via the .rubocop.yml
|
||||
# configuration file. It makes it possible to enable/disable
|
||||
# certain cops (checks) and to alter their behavior if they accept
|
||||
# any parameters. The file can be placed either in your home
|
||||
# directory or in some project directory.
|
||||
#
|
||||
# RuboCop will start looking for the configuration file in the directory
|
||||
# where the inspected file is and continue its way up to the root directory.
|
||||
#
|
||||
# See https://docs.rubocop.org/rubocop/configuration
|
||||
|
||||
inherit_mode:
|
||||
merge:
|
||||
- Exclude
|
||||
|
||||
AllCops:
|
||||
NewCops: disable
|
||||
|
||||
Metrics/MethodLength:
|
||||
Enabled: false
|
||||
|
||||
Metrics/ClassLength:
|
||||
Enabled: false
|
||||
|
||||
Metrics/BlockLength:
|
||||
Enabled: false
|
||||
|
||||
Style/Documentation:
|
||||
Enabled: false
|
||||
|
||||
Metrics/AbcSize:
|
||||
Enabled: false
|
||||
@@ -1 +1 @@
|
||||
3.2.0
|
||||
system
|
||||
11
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "rdbg",
|
||||
"name": "Attach rdbg",
|
||||
"request": "attach",
|
||||
"rdbgPath": "export GEM_HOME=/usr/local/rvm/gems/default && bundle exec rdbg"
|
||||
}
|
||||
]
|
||||
}
|
||||
59
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
{
|
||||
"editor.formatOnSave": true,
|
||||
"workbench.editor.titleScrollbarSizing": "large",
|
||||
"window.title": "${activeEditorMedium}${separator}${rootName}${separator}${profileName}",
|
||||
"workbench.preferredDarkColorTheme": "Spinel",
|
||||
"workbench.preferredLightColorTheme": "Spinel Light",
|
||||
"rubyLsp.formatter": "syntax_tree",
|
||||
"files.associations": {
|
||||
".env-cmdrc": "json"
|
||||
},
|
||||
"[ruby]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[erb]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[typescriptreact]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[javascriptreact]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[xml]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[dockerfile]": {
|
||||
"editor.defaultFormatter": "ms-azuretools.vscode-docker"
|
||||
},
|
||||
"tailwindCSS.includeLanguages": {
|
||||
"erb": "html",
|
||||
"typescript": "javascript"
|
||||
},
|
||||
"tailwindCSS.experimental.classRegex": [
|
||||
"\\bclass:\\s*'([^']*)'",
|
||||
"\\bclass:\\s*\"([^\"]*)\"",
|
||||
"[\"'`]([^\"'`]*).*?,?\\s?"
|
||||
],
|
||||
"editor.quickSuggestions": {
|
||||
"other": "on",
|
||||
"comments": "off",
|
||||
"strings": "on"
|
||||
},
|
||||
"tailwindCSS.experimental.configFile": "config/tailwind.config.js",
|
||||
"sqliteViewer.maxFileSize": 4000,
|
||||
"files.insertFinalNewline": true,
|
||||
"files.trimTrailingWhitespace": true
|
||||
}
|
||||
80
Dockerfile
Normal file
@@ -0,0 +1,80 @@
|
||||
FROM ruby:3.2.6 AS native-gems
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; \
|
||||
echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends --no-install-suggests -y \
|
||||
cmake
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
RUN gem install bundler -v '2.5.6'
|
||||
COPY gems gems
|
||||
WORKDIR /usr/src/app/gems/xdiff-rb
|
||||
RUN bundle _2.5.6_ install
|
||||
RUN rake compile
|
||||
WORKDIR /usr/src/app/gems/rb-bsdiff
|
||||
RUN bundle _2.5.6_ install
|
||||
RUN rake compile
|
||||
|
||||
# Primary image
|
||||
FROM ruby:3.2.6
|
||||
USER root
|
||||
|
||||
# apt caching & install packages
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; \
|
||||
echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends --no-install-suggests -y \
|
||||
libblas-dev liblapack-dev
|
||||
|
||||
# preinstall gems that take a long time to install
|
||||
RUN MAKE="make -j12" gem install bundler -v '2.5.6' --verbose
|
||||
RUN MAKE="make -j12" gem install faiss -v '0.3.2' --verbose
|
||||
RUN MAKE="make -j12" gem install rails_live_reload -v '0.3.6' --verbose
|
||||
RUN bundle config --global frozen 1
|
||||
|
||||
# set up nodejs 18.x deb repo
|
||||
RUN mkdir -p /etc/apt/keyrings && \
|
||||
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key \
|
||||
| gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && \
|
||||
echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_18.x nodistro main" \
|
||||
| tee /etc/apt/sources.list.d/nodesource.list
|
||||
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends --no-install-suggests -y \
|
||||
libvips42 ca-certificates curl gnupg nodejs libpq-dev
|
||||
|
||||
COPY --from=native-gems /usr/src/app/gems/xdiff-rb /gems/xdiff-rb
|
||||
COPY --from=native-gems /usr/src/app/gems/rb-bsdiff /gems/rb-bsdiff
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
COPY Gemfile Gemfile.lock ./
|
||||
RUN bundle _2.5.6_ install
|
||||
|
||||
# install js dependencies
|
||||
COPY package.json yarn.lock ./
|
||||
RUN npm install -g yarn
|
||||
RUN rails yarn:install
|
||||
RUN yarn
|
||||
|
||||
COPY . .
|
||||
|
||||
# precompile assets
|
||||
RUN RAILS_ENV=production bin/rails assets:precompile
|
||||
RUN mkdir -p tmp/pids
|
||||
|
||||
# create user with id=1000 gid=1000
|
||||
RUN groupadd -g 1000 app && \
|
||||
useradd -m -d /home/app -s /bin/bash -u 1000 -g 1000 app
|
||||
RUN chown -R app:app /usr/src/app
|
||||
USER app
|
||||
CMD /bin/bash
|
||||
138
Gemfile
@@ -1,18 +1,18 @@
|
||||
source "https://rubygems.org"
|
||||
git_source(:github) { |repo| "https://github.com/#{repo}.git" }
|
||||
|
||||
ruby "3.2.0"
|
||||
ruby "3.2.6"
|
||||
# ruby "3.0.3"
|
||||
|
||||
# Bundle edge Rails instead: gem "rails", github: "rails/rails", branch: "main"
|
||||
gem "rails", "~> 7.0.4", ">= 7.0.4.2"
|
||||
gem "rails", "~> 7.2"
|
||||
|
||||
# The original asset pipeline for Rails [https://github.com/rails/sprockets-rails]
|
||||
gem "sprockets-rails"
|
||||
|
||||
# Use sqlite3 as the database for Active Record
|
||||
gem "sqlite3", "~> 1.4"
|
||||
gem "pg"
|
||||
gem "sqlite3", "~> 1.4"
|
||||
|
||||
gem "pry"
|
||||
gem "pry-stack_explorer"
|
||||
@@ -20,8 +20,8 @@ gem "pry-stack_explorer"
|
||||
# Use the Puma web server [https://github.com/puma/puma]
|
||||
gem "puma", "~> 5.0"
|
||||
|
||||
# Use JavaScript with ESM import maps [https://github.com/rails/importmap-rails]
|
||||
gem "importmap-rails"
|
||||
# # Use JavaScript with ESM import maps [https://github.com/rails/importmap-rails]
|
||||
# gem "importmap-rails"
|
||||
|
||||
# Hotwire's SPA-like page accelerator [https://turbo.hotwired.dev]
|
||||
gem "turbo-rails"
|
||||
@@ -30,7 +30,7 @@ gem "turbo-rails"
|
||||
gem "stimulus-rails"
|
||||
|
||||
# Build JSON APIs with ease [https://github.com/rails/jbuilder]
|
||||
gem "jbuilder"
|
||||
gem "jbuilder", "~> 2.13"
|
||||
|
||||
# Use Redis adapter to run Action Cable in production
|
||||
# gem "redis", "~> 4.0"
|
||||
@@ -42,7 +42,7 @@ gem "jbuilder"
|
||||
# gem "bcrypt", "~> 3.1.7"
|
||||
|
||||
# Windows does not include zoneinfo files, so bundle the tzinfo-data gem
|
||||
gem "tzinfo-data", platforms: %i[ mingw mswin x64_mingw jruby ]
|
||||
gem "tzinfo-data", platforms: %i[mingw mswin x64_mingw jruby]
|
||||
|
||||
# Reduces boot times through caching; required in config/boot.rb
|
||||
gem "bootsnap", require: false
|
||||
@@ -53,60 +53,120 @@ gem "bootsnap", require: false
|
||||
# Use Active Storage variants [https://guides.rubyonrails.org/active_storage_overview.html#transforming-images]
|
||||
# gem "image_processing", "~> 1.2"
|
||||
|
||||
group :development, :test do
|
||||
group :development, :test, :staging do
|
||||
# See https://guides.rubyonrails.org/debugging_rails_applications.html#debugging-with-the-debug-gem
|
||||
gem "debug", platforms: %i[ mri mingw x64_mingw ]
|
||||
gem "debug", "~> 1.10", platforms: %i[mri mingw x64_mingw]
|
||||
end
|
||||
|
||||
group :development do
|
||||
group :development, :staging do
|
||||
# Use console on exceptions pages [https://github.com/rails/web-console]
|
||||
gem "htmlbeautifier"
|
||||
gem "rufo", require: false
|
||||
gem "web-console"
|
||||
|
||||
# Add speed badges [https://github.com/MiniProfiler/rack-mini-profiler]
|
||||
gem "rack-mini-profiler", require: ["enable_rails_patches", "rack-mini-profiler"]
|
||||
# Speed up commands on slow machines / big apps [https://github.com/rails/spring]
|
||||
# gem "spring"
|
||||
end
|
||||
|
||||
group :log_watcher do
|
||||
gem "curses"
|
||||
gem "listen"
|
||||
gem "concurrent-ruby-ext", require: "concurrent"
|
||||
gem "concurrent-ruby-edge", require: "concurrent-edge"
|
||||
# Add speed badges [https://github.com/MiniProfiler/rack-mini-profiler]
|
||||
gem "memory_profiler"
|
||||
gem "rack-mini-profiler",
|
||||
"~> 3.3",
|
||||
require: %w[enable_rails_patches rack-mini-profiler]
|
||||
gem "stackprof"
|
||||
|
||||
# temporary ref of rails_live_reload to a commit that adds ignore patterns
|
||||
# to the Listen gem
|
||||
gem "rails_live_reload",
|
||||
git: "https://github.com/railsjazz/rails_live_reload",
|
||||
ref: "dcd3b73904594e2c5134c2f6e05954f3937a8d29"
|
||||
# git: "https://github.com/mktakuya/rails_live_reload",
|
||||
# ref: "95d7ac7c03e8c702066ed3dc9cd70a965412e2d2"
|
||||
# gem "rails_live_reload", "0.4.0"
|
||||
end
|
||||
|
||||
group :test do
|
||||
# Use system testing [https://guides.rubyonrails.org/testing.html#system-testing]
|
||||
gem "capybara"
|
||||
gem "rspec-rails", "~> 7.0"
|
||||
gem "rails-controller-testing"
|
||||
gem "selenium-webdriver"
|
||||
gem "webdrivers"
|
||||
gem "rspec-rails"
|
||||
gem "shoulda-matchers"
|
||||
gem "factory_bot_rails"
|
||||
gem "pundit-matchers", "~> 4.0"
|
||||
gem "db-query-matchers", "~> 0.14"
|
||||
end
|
||||
|
||||
gem "xdiff", path: "../xdiff-rb"
|
||||
group :test, :development do
|
||||
gem "parallel_tests"
|
||||
end
|
||||
|
||||
gem "xdiff", path: "/gems/xdiff-rb"
|
||||
|
||||
# for legacy import
|
||||
gem "diffy"
|
||||
gem "rb-bsdiff", path: "../rb-bsdiff"
|
||||
gem "rb-bsdiff", path: "/gems/rb-bsdiff"
|
||||
|
||||
gem "addressable"
|
||||
gem "colorize"
|
||||
gem "concurrent-ruby-edge", require: "concurrent-edge"
|
||||
gem "concurrent-ruby-ext", require: "concurrent"
|
||||
gem "curb"
|
||||
gem "daemons"
|
||||
gem "discard"
|
||||
gem "good_job", "~> 4.6"
|
||||
gem "http-cookie"
|
||||
gem "http", "~> 5.2" # For proxying requests
|
||||
gem "kaminari"
|
||||
gem "nokogiri"
|
||||
gem "pluck_each"
|
||||
gem "ripcord"
|
||||
gem "ruby-prof"
|
||||
gem "ruby-prof-speedscope"
|
||||
gem "ruby-vips"
|
||||
gem "table_print"
|
||||
gem "addressable"
|
||||
gem "nokogiri"
|
||||
gem "http-cookie"
|
||||
gem "curb"
|
||||
gem "kaminari"
|
||||
gem "delayed_job_active_record"
|
||||
# gem "delayed-web"
|
||||
gem "delayed_job_web"
|
||||
gem "colorize"
|
||||
gem "daemons"
|
||||
gem "delayed_job_worker_pool"
|
||||
gem "ripcord"
|
||||
gem "influxdb-client"
|
||||
gem "discard"
|
||||
# gem 'cli-ui'
|
||||
# gem "paper_trail"
|
||||
# gem "paper_trail-hashdiff"
|
||||
# gem "hashdiff"
|
||||
gem "zstd-ruby"
|
||||
|
||||
# gem "pghero", git: "https://github.com/dymk/pghero", ref: "e314f99"
|
||||
gem "pghero", "~> 3.6"
|
||||
gem "pg_query", ">= 2"
|
||||
|
||||
gem "disco"
|
||||
gem "faiss"
|
||||
gem "neighbor"
|
||||
gem "progressbar"
|
||||
|
||||
group :production, :staging do
|
||||
gem "rails_semantic_logger", "~> 4.17"
|
||||
end
|
||||
|
||||
group :production do
|
||||
gem "sd_notify"
|
||||
end
|
||||
|
||||
gem "rack", "~> 2.2"
|
||||
gem "rack-cors"
|
||||
gem "react_on_rails"
|
||||
gem "sanitize", "~> 6.1"
|
||||
gem "shakapacker", "~> 6.6"
|
||||
|
||||
group :development do
|
||||
gem "prettier_print"
|
||||
gem "syntax_tree", "~> 6.2"
|
||||
end
|
||||
|
||||
gem "cssbundling-rails", "~> 1.4"
|
||||
gem "tailwindcss-rails", "~> 3.0"
|
||||
|
||||
# Authentication
|
||||
gem "devise", "~> 4.9"
|
||||
|
||||
# Authorization
|
||||
gem "pundit", "~> 2.4"
|
||||
|
||||
# Monitoring
|
||||
gem "prometheus_exporter", "~> 2.2"
|
||||
|
||||
gem "sorbet-static-and-runtime"
|
||||
gem "tapioca", require: false, group: %i[development test]
|
||||
gem "rspec-sorbet", group: [:test]
|
||||
|
||||
697
Gemfile.lock
@@ -1,148 +1,217 @@
|
||||
GIT
|
||||
remote: https://github.com/railsjazz/rails_live_reload
|
||||
revision: dcd3b73904594e2c5134c2f6e05954f3937a8d29
|
||||
ref: dcd3b73904594e2c5134c2f6e05954f3937a8d29
|
||||
specs:
|
||||
rails_live_reload (0.3.6)
|
||||
listen
|
||||
nio4r
|
||||
railties
|
||||
websocket-driver
|
||||
|
||||
PATH
|
||||
remote: ../rb-bsdiff
|
||||
remote: /gems/rb-bsdiff
|
||||
specs:
|
||||
rb-bsdiff (0.1.0)
|
||||
|
||||
PATH
|
||||
remote: ../xdiff-rb
|
||||
remote: /gems/xdiff-rb
|
||||
specs:
|
||||
xdiff (0.0.1)
|
||||
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
actioncable (7.0.4.2)
|
||||
actionpack (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
actioncable (7.2.2.1)
|
||||
actionpack (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
nio4r (~> 2.0)
|
||||
websocket-driver (>= 0.6.1)
|
||||
actionmailbox (7.0.4.2)
|
||||
actionpack (= 7.0.4.2)
|
||||
activejob (= 7.0.4.2)
|
||||
activerecord (= 7.0.4.2)
|
||||
activestorage (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
mail (>= 2.7.1)
|
||||
net-imap
|
||||
net-pop
|
||||
net-smtp
|
||||
actionmailer (7.0.4.2)
|
||||
actionpack (= 7.0.4.2)
|
||||
actionview (= 7.0.4.2)
|
||||
activejob (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
mail (~> 2.5, >= 2.5.4)
|
||||
net-imap
|
||||
net-pop
|
||||
net-smtp
|
||||
rails-dom-testing (~> 2.0)
|
||||
actionpack (7.0.4.2)
|
||||
actionview (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
rack (~> 2.0, >= 2.2.0)
|
||||
zeitwerk (~> 2.6)
|
||||
actionmailbox (7.2.2.1)
|
||||
actionpack (= 7.2.2.1)
|
||||
activejob (= 7.2.2.1)
|
||||
activerecord (= 7.2.2.1)
|
||||
activestorage (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
mail (>= 2.8.0)
|
||||
actionmailer (7.2.2.1)
|
||||
actionpack (= 7.2.2.1)
|
||||
actionview (= 7.2.2.1)
|
||||
activejob (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
mail (>= 2.8.0)
|
||||
rails-dom-testing (~> 2.2)
|
||||
actionpack (7.2.2.1)
|
||||
actionview (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
nokogiri (>= 1.8.5)
|
||||
racc
|
||||
rack (>= 2.2.4, < 3.2)
|
||||
rack-session (>= 1.0.1)
|
||||
rack-test (>= 0.6.3)
|
||||
rails-dom-testing (~> 2.0)
|
||||
rails-html-sanitizer (~> 1.0, >= 1.2.0)
|
||||
actiontext (7.0.4.2)
|
||||
actionpack (= 7.0.4.2)
|
||||
activerecord (= 7.0.4.2)
|
||||
activestorage (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
rails-dom-testing (~> 2.2)
|
||||
rails-html-sanitizer (~> 1.6)
|
||||
useragent (~> 0.16)
|
||||
actiontext (7.2.2.1)
|
||||
actionpack (= 7.2.2.1)
|
||||
activerecord (= 7.2.2.1)
|
||||
activestorage (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
globalid (>= 0.6.0)
|
||||
nokogiri (>= 1.8.5)
|
||||
actionview (7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
actionview (7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
builder (~> 3.1)
|
||||
erubi (~> 1.4)
|
||||
rails-dom-testing (~> 2.0)
|
||||
rails-html-sanitizer (~> 1.1, >= 1.2.0)
|
||||
activejob (7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
erubi (~> 1.11)
|
||||
rails-dom-testing (~> 2.2)
|
||||
rails-html-sanitizer (~> 1.6)
|
||||
activejob (7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
globalid (>= 0.3.6)
|
||||
activemodel (7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
activerecord (7.0.4.2)
|
||||
activemodel (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
activestorage (7.0.4.2)
|
||||
actionpack (= 7.0.4.2)
|
||||
activejob (= 7.0.4.2)
|
||||
activerecord (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
activemodel (7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
activerecord (7.2.2.1)
|
||||
activemodel (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
timeout (>= 0.4.0)
|
||||
activestorage (7.2.2.1)
|
||||
actionpack (= 7.2.2.1)
|
||||
activejob (= 7.2.2.1)
|
||||
activerecord (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
marcel (~> 1.0)
|
||||
mini_mime (>= 1.1.0)
|
||||
activesupport (7.0.4.2)
|
||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
||||
activesupport (7.2.2.1)
|
||||
base64
|
||||
benchmark (>= 0.3)
|
||||
bigdecimal
|
||||
concurrent-ruby (~> 1.0, >= 1.3.1)
|
||||
connection_pool (>= 2.2.5)
|
||||
drb
|
||||
i18n (>= 1.6, < 2)
|
||||
logger (>= 1.4.2)
|
||||
minitest (>= 5.1)
|
||||
tzinfo (~> 2.0)
|
||||
addressable (2.8.1)
|
||||
public_suffix (>= 2.0.2, < 6.0)
|
||||
securerandom (>= 0.3)
|
||||
tzinfo (~> 2.0, >= 2.0.5)
|
||||
addressable (2.8.7)
|
||||
public_suffix (>= 2.0.2, < 7.0)
|
||||
base64 (0.2.0)
|
||||
bcrypt (3.1.20)
|
||||
benchmark (0.4.0)
|
||||
bigdecimal (3.1.9)
|
||||
bindex (0.8.1)
|
||||
binding_of_caller (1.0.0)
|
||||
debug_inspector (>= 0.0.1)
|
||||
bootsnap (1.16.0)
|
||||
binding_of_caller (1.0.1)
|
||||
debug_inspector (>= 1.2.0)
|
||||
bootsnap (1.18.4)
|
||||
msgpack (~> 1.2)
|
||||
builder (3.2.4)
|
||||
capybara (3.38.0)
|
||||
builder (3.3.0)
|
||||
capybara (3.40.0)
|
||||
addressable
|
||||
matrix
|
||||
mini_mime (>= 0.1.3)
|
||||
nokogiri (~> 1.8)
|
||||
nokogiri (~> 1.11)
|
||||
rack (>= 1.6.0)
|
||||
rack-test (>= 0.6.3)
|
||||
regexp_parser (>= 1.5, < 3.0)
|
||||
xpath (~> 3.2)
|
||||
coderay (1.1.3)
|
||||
colorize (0.8.1)
|
||||
concurrent-ruby (1.2.0)
|
||||
concurrent-ruby-edge (0.7.0)
|
||||
concurrent-ruby (~> 1.2.0)
|
||||
concurrent-ruby-ext (1.2.0)
|
||||
concurrent-ruby (= 1.2.0)
|
||||
colorize (1.1.0)
|
||||
concurrent-ruby (1.3.4)
|
||||
concurrent-ruby-edge (0.7.1)
|
||||
concurrent-ruby (~> 1.3)
|
||||
concurrent-ruby-ext (1.3.4)
|
||||
concurrent-ruby (= 1.3.4)
|
||||
connection_pool (2.4.1)
|
||||
crass (1.0.6)
|
||||
curb (1.0.5)
|
||||
curses (1.4.4)
|
||||
daemons (1.4.1)
|
||||
date (3.3.3)
|
||||
debug (1.7.1)
|
||||
irb (>= 1.5.0)
|
||||
reline (>= 0.3.1)
|
||||
debug_inspector (1.1.0)
|
||||
delayed_job (4.1.11)
|
||||
activesupport (>= 3.0, < 8.0)
|
||||
delayed_job_active_record (4.1.7)
|
||||
activerecord (>= 3.0, < 8.0)
|
||||
delayed_job (>= 3.0, < 5)
|
||||
delayed_job_web (1.4.4)
|
||||
activerecord (> 3.0.0)
|
||||
delayed_job (> 2.0.3)
|
||||
rack-protection (>= 1.5.5)
|
||||
sinatra (>= 1.4.4)
|
||||
delayed_job_worker_pool (1.0.0)
|
||||
delayed_job (>= 3.0, < 4.2)
|
||||
diff-lcs (1.5.0)
|
||||
diffy (3.4.2)
|
||||
discard (1.2.1)
|
||||
activerecord (>= 4.2, < 8)
|
||||
domain_name (0.5.20190701)
|
||||
unf (>= 0.0.5, < 1.0.0)
|
||||
erubi (1.12.0)
|
||||
ffi (1.15.5)
|
||||
globalid (1.1.0)
|
||||
activesupport (>= 5.0)
|
||||
http-cookie (1.0.5)
|
||||
domain_name (~> 0.5)
|
||||
i18n (1.12.0)
|
||||
concurrent-ruby (~> 1.0)
|
||||
importmap-rails (1.1.5)
|
||||
actionpack (>= 6.0.0)
|
||||
cssbundling-rails (1.4.1)
|
||||
railties (>= 6.0.0)
|
||||
influxdb-client (2.9.0)
|
||||
io-console (0.6.0)
|
||||
irb (1.6.2)
|
||||
reline (>= 0.3.0)
|
||||
jbuilder (2.11.5)
|
||||
curb (1.0.6)
|
||||
daemons (1.4.1)
|
||||
date (3.4.1)
|
||||
db-query-matchers (0.14.0)
|
||||
activesupport (>= 4.0, < 8.1)
|
||||
rspec (>= 3.0)
|
||||
debug (1.10.0)
|
||||
irb (~> 1.10)
|
||||
reline (>= 0.3.8)
|
||||
debug_inspector (1.2.0)
|
||||
devise (4.9.4)
|
||||
bcrypt (~> 3.0)
|
||||
orm_adapter (~> 0.1)
|
||||
railties (>= 4.1.0)
|
||||
responders
|
||||
warden (~> 1.2.3)
|
||||
diff-lcs (1.5.1)
|
||||
diffy (3.4.3)
|
||||
discard (1.4.0)
|
||||
activerecord (>= 4.2, < 9.0)
|
||||
disco (0.5.1)
|
||||
libmf (>= 0.4)
|
||||
numo-narray (>= 0.9.2)
|
||||
domain_name (0.6.20240107)
|
||||
drb (2.2.1)
|
||||
erubi (1.13.1)
|
||||
et-orbi (1.2.11)
|
||||
tzinfo
|
||||
execjs (2.10.0)
|
||||
factory_bot (6.5.0)
|
||||
activesupport (>= 5.0.0)
|
||||
factory_bot_rails (6.4.4)
|
||||
factory_bot (~> 6.5)
|
||||
railties (>= 5.0.0)
|
||||
faiss (0.3.2)
|
||||
numo-narray
|
||||
rice (>= 4.0.2)
|
||||
ffi (1.17.1-aarch64-linux-gnu)
|
||||
ffi (1.17.1-aarch64-linux-musl)
|
||||
ffi (1.17.1-arm64-darwin)
|
||||
ffi (1.17.1-x86_64-darwin)
|
||||
ffi (1.17.1-x86_64-linux-gnu)
|
||||
ffi (1.17.1-x86_64-linux-musl)
|
||||
ffi-compiler (1.3.2)
|
||||
ffi (>= 1.15.5)
|
||||
rake
|
||||
fugit (1.11.1)
|
||||
et-orbi (~> 1, >= 1.2.11)
|
||||
raabro (~> 1.4)
|
||||
globalid (1.2.1)
|
||||
activesupport (>= 6.1)
|
||||
good_job (4.6.0)
|
||||
activejob (>= 6.1.0)
|
||||
activerecord (>= 6.1.0)
|
||||
concurrent-ruby (>= 1.3.1)
|
||||
fugit (>= 1.11.0)
|
||||
railties (>= 6.1.0)
|
||||
thor (>= 1.0.0)
|
||||
google-protobuf (4.29.2-aarch64-linux)
|
||||
bigdecimal
|
||||
rake (>= 13)
|
||||
google-protobuf (4.29.2-arm64-darwin)
|
||||
bigdecimal
|
||||
rake (>= 13)
|
||||
google-protobuf (4.29.2-x86_64-darwin)
|
||||
bigdecimal
|
||||
rake (>= 13)
|
||||
google-protobuf (4.29.2-x86_64-linux)
|
||||
bigdecimal
|
||||
rake (>= 13)
|
||||
htmlbeautifier (1.4.3)
|
||||
http (5.2.0)
|
||||
addressable (~> 2.8)
|
||||
base64 (~> 0.1)
|
||||
http-cookie (~> 1.0)
|
||||
http-form_data (~> 2.2)
|
||||
llhttp-ffi (~> 0.5.0)
|
||||
http-cookie (1.0.8)
|
||||
domain_name (~> 0.5)
|
||||
http-form_data (2.3.0)
|
||||
i18n (1.14.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
io-console (0.8.0)
|
||||
irb (1.14.3)
|
||||
rdoc (>= 4.0.0)
|
||||
reline (>= 0.4.2)
|
||||
jbuilder (2.13.0)
|
||||
actionview (>= 5.0.0)
|
||||
activesupport (>= 5.0.0)
|
||||
kaminari (1.2.2)
|
||||
@@ -157,166 +226,310 @@ GEM
|
||||
activerecord
|
||||
kaminari-core (= 1.2.2)
|
||||
kaminari-core (1.2.2)
|
||||
listen (3.8.0)
|
||||
libmf (0.4.0)
|
||||
ffi
|
||||
listen (3.9.0)
|
||||
rb-fsevent (~> 0.10, >= 0.10.3)
|
||||
rb-inotify (~> 0.9, >= 0.9.10)
|
||||
loofah (2.19.1)
|
||||
llhttp-ffi (0.5.0)
|
||||
ffi-compiler (~> 1.0)
|
||||
rake (~> 13.0)
|
||||
logger (1.6.4)
|
||||
loofah (2.23.1)
|
||||
crass (~> 1.0.2)
|
||||
nokogiri (>= 1.5.9)
|
||||
mail (2.8.0.1)
|
||||
nokogiri (>= 1.12.0)
|
||||
mail (2.8.1)
|
||||
mini_mime (>= 0.1.1)
|
||||
net-imap
|
||||
net-pop
|
||||
net-smtp
|
||||
marcel (1.0.2)
|
||||
marcel (1.0.4)
|
||||
matrix (0.4.2)
|
||||
method_source (1.0.0)
|
||||
mini_mime (1.1.2)
|
||||
mini_portile2 (2.8.1)
|
||||
minitest (5.17.0)
|
||||
msgpack (1.6.0)
|
||||
mustermann (3.0.0)
|
||||
ruby2_keywords (~> 0.0.1)
|
||||
net-imap (0.3.4)
|
||||
memory_profiler (1.1.0)
|
||||
method_source (1.1.0)
|
||||
mini_mime (1.1.5)
|
||||
minitest (5.25.4)
|
||||
msgpack (1.7.5)
|
||||
neighbor (0.5.1)
|
||||
activerecord (>= 7)
|
||||
net-imap (0.5.4)
|
||||
date
|
||||
net-protocol
|
||||
net-pop (0.1.2)
|
||||
net-protocol
|
||||
net-protocol (0.2.1)
|
||||
net-protocol (0.2.2)
|
||||
timeout
|
||||
net-smtp (0.3.3)
|
||||
net-smtp (0.5.0)
|
||||
net-protocol
|
||||
nio4r (2.5.8)
|
||||
nokogiri (1.14.1)
|
||||
mini_portile2 (~> 2.8.0)
|
||||
netrc (0.11.0)
|
||||
nio4r (2.7.4)
|
||||
nokogiri (1.18.1-aarch64-linux-gnu)
|
||||
racc (~> 1.4)
|
||||
pg (1.4.5)
|
||||
pry (0.14.2)
|
||||
nokogiri (1.18.1-aarch64-linux-musl)
|
||||
racc (~> 1.4)
|
||||
nokogiri (1.18.1-arm64-darwin)
|
||||
racc (~> 1.4)
|
||||
nokogiri (1.18.1-x86_64-darwin)
|
||||
racc (~> 1.4)
|
||||
nokogiri (1.18.1-x86_64-linux-gnu)
|
||||
racc (~> 1.4)
|
||||
nokogiri (1.18.1-x86_64-linux-musl)
|
||||
racc (~> 1.4)
|
||||
numo-narray (0.9.2.1)
|
||||
orm_adapter (0.5.0)
|
||||
parallel (1.26.3)
|
||||
parallel_tests (4.7.2)
|
||||
parallel
|
||||
pg (1.5.9)
|
||||
pg_query (6.0.0)
|
||||
google-protobuf (>= 3.25.3)
|
||||
pghero (3.6.1)
|
||||
activerecord (>= 6.1)
|
||||
pluck_each (0.2.0)
|
||||
activerecord (> 3.2.0)
|
||||
activesupport (> 3.0.0)
|
||||
prettier_print (1.2.1)
|
||||
prism (1.3.0)
|
||||
progressbar (1.13.0)
|
||||
prometheus_exporter (2.2.0)
|
||||
webrick
|
||||
pry (0.15.2)
|
||||
coderay (~> 1.1)
|
||||
method_source (~> 1.0)
|
||||
pry-stack_explorer (0.6.1)
|
||||
binding_of_caller (~> 1.0)
|
||||
pry (~> 0.13)
|
||||
public_suffix (5.0.1)
|
||||
puma (5.6.5)
|
||||
psych (5.2.2)
|
||||
date
|
||||
stringio
|
||||
public_suffix (6.0.1)
|
||||
puma (5.6.9)
|
||||
nio4r (~> 2.0)
|
||||
racc (1.6.2)
|
||||
rack (2.2.6.2)
|
||||
rack-mini-profiler (3.0.0)
|
||||
pundit (2.4.0)
|
||||
activesupport (>= 3.0.0)
|
||||
pundit-matchers (4.0.0)
|
||||
rspec-core (~> 3.12)
|
||||
rspec-expectations (~> 3.12)
|
||||
rspec-mocks (~> 3.12)
|
||||
rspec-support (~> 3.12)
|
||||
raabro (1.4.0)
|
||||
racc (1.8.1)
|
||||
rack (2.2.10)
|
||||
rack-cors (2.0.2)
|
||||
rack (>= 2.0.0)
|
||||
rack-mini-profiler (3.3.1)
|
||||
rack (>= 1.2.0)
|
||||
rack-protection (3.0.5)
|
||||
rack-proxy (0.7.7)
|
||||
rack
|
||||
rack-test (2.0.2)
|
||||
rack-session (1.0.2)
|
||||
rack (< 3)
|
||||
rack-test (2.2.0)
|
||||
rack (>= 1.3)
|
||||
rails (7.0.4.2)
|
||||
actioncable (= 7.0.4.2)
|
||||
actionmailbox (= 7.0.4.2)
|
||||
actionmailer (= 7.0.4.2)
|
||||
actionpack (= 7.0.4.2)
|
||||
actiontext (= 7.0.4.2)
|
||||
actionview (= 7.0.4.2)
|
||||
activejob (= 7.0.4.2)
|
||||
activemodel (= 7.0.4.2)
|
||||
activerecord (= 7.0.4.2)
|
||||
activestorage (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
rackup (1.0.1)
|
||||
rack (< 3)
|
||||
webrick
|
||||
rails (7.2.2.1)
|
||||
actioncable (= 7.2.2.1)
|
||||
actionmailbox (= 7.2.2.1)
|
||||
actionmailer (= 7.2.2.1)
|
||||
actionpack (= 7.2.2.1)
|
||||
actiontext (= 7.2.2.1)
|
||||
actionview (= 7.2.2.1)
|
||||
activejob (= 7.2.2.1)
|
||||
activemodel (= 7.2.2.1)
|
||||
activerecord (= 7.2.2.1)
|
||||
activestorage (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
bundler (>= 1.15.0)
|
||||
railties (= 7.0.4.2)
|
||||
rails-dom-testing (2.0.3)
|
||||
activesupport (>= 4.2.0)
|
||||
railties (= 7.2.2.1)
|
||||
rails-controller-testing (1.0.5)
|
||||
actionpack (>= 5.0.1.rc1)
|
||||
actionview (>= 5.0.1.rc1)
|
||||
activesupport (>= 5.0.1.rc1)
|
||||
rails-dom-testing (2.2.0)
|
||||
activesupport (>= 5.0.0)
|
||||
minitest
|
||||
nokogiri (>= 1.6)
|
||||
rails-html-sanitizer (1.5.0)
|
||||
loofah (~> 2.19, >= 2.19.1)
|
||||
railties (7.0.4.2)
|
||||
actionpack (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
method_source
|
||||
rails-html-sanitizer (1.6.2)
|
||||
loofah (~> 2.21)
|
||||
nokogiri (>= 1.15.7, != 1.16.7, != 1.16.6, != 1.16.5, != 1.16.4, != 1.16.3, != 1.16.2, != 1.16.1, != 1.16.0.rc1, != 1.16.0)
|
||||
rails_semantic_logger (4.17.0)
|
||||
rack
|
||||
railties (>= 5.1)
|
||||
semantic_logger (~> 4.16)
|
||||
railties (7.2.2.1)
|
||||
actionpack (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
irb (~> 1.13)
|
||||
rackup (>= 1.0.0)
|
||||
rake (>= 12.2)
|
||||
thor (~> 1.0)
|
||||
zeitwerk (~> 2.5)
|
||||
rake (13.0.6)
|
||||
thor (~> 1.0, >= 1.2.2)
|
||||
zeitwerk (~> 2.6)
|
||||
rainbow (3.1.1)
|
||||
rake (13.2.1)
|
||||
rb-fsevent (0.11.2)
|
||||
rb-inotify (0.10.1)
|
||||
rb-inotify (0.11.1)
|
||||
ffi (~> 1.0)
|
||||
regexp_parser (2.6.2)
|
||||
reline (0.3.2)
|
||||
rbi (0.2.2)
|
||||
prism (~> 1.0)
|
||||
sorbet-runtime (>= 0.5.9204)
|
||||
rdoc (6.10.0)
|
||||
psych (>= 4.0.0)
|
||||
react_on_rails (14.0.5)
|
||||
addressable
|
||||
connection_pool
|
||||
execjs (~> 2.5)
|
||||
rails (>= 5.2)
|
||||
rainbow (~> 3.0)
|
||||
regexp_parser (2.10.0)
|
||||
reline (0.6.0)
|
||||
io-console (~> 0.5)
|
||||
rexml (3.2.5)
|
||||
responders (3.1.1)
|
||||
actionpack (>= 5.2)
|
||||
railties (>= 5.2)
|
||||
rexml (3.4.0)
|
||||
rice (4.3.3)
|
||||
ripcord (2.0.0)
|
||||
rspec-core (3.12.1)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-expectations (3.12.2)
|
||||
rspec (3.13.0)
|
||||
rspec-core (~> 3.13.0)
|
||||
rspec-expectations (~> 3.13.0)
|
||||
rspec-mocks (~> 3.13.0)
|
||||
rspec-core (3.13.2)
|
||||
rspec-support (~> 3.13.0)
|
||||
rspec-expectations (3.13.3)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-mocks (3.12.3)
|
||||
rspec-support (~> 3.13.0)
|
||||
rspec-mocks (3.13.2)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-rails (6.0.1)
|
||||
actionpack (>= 6.1)
|
||||
activesupport (>= 6.1)
|
||||
railties (>= 6.1)
|
||||
rspec-core (~> 3.11)
|
||||
rspec-expectations (~> 3.11)
|
||||
rspec-mocks (~> 3.11)
|
||||
rspec-support (~> 3.11)
|
||||
rspec-support (3.12.0)
|
||||
ruby-prof (1.4.5)
|
||||
rspec-support (~> 3.13.0)
|
||||
rspec-rails (7.1.0)
|
||||
actionpack (>= 7.0)
|
||||
activesupport (>= 7.0)
|
||||
railties (>= 7.0)
|
||||
rspec-core (~> 3.13)
|
||||
rspec-expectations (~> 3.13)
|
||||
rspec-mocks (~> 3.13)
|
||||
rspec-support (~> 3.13)
|
||||
rspec-sorbet (1.9.2)
|
||||
sorbet-runtime
|
||||
rspec-support (3.13.2)
|
||||
ruby-prof (1.7.1)
|
||||
ruby-prof-speedscope (0.3.0)
|
||||
ruby-prof (~> 1.0)
|
||||
ruby2_keywords (0.0.5)
|
||||
ruby-vips (2.2.2)
|
||||
ffi (~> 1.12)
|
||||
logger
|
||||
rubyzip (2.3.2)
|
||||
selenium-webdriver (4.8.0)
|
||||
rufo (0.18.0)
|
||||
sanitize (6.1.3)
|
||||
crass (~> 1.0.2)
|
||||
nokogiri (>= 1.12.0)
|
||||
sd_notify (0.1.1)
|
||||
securerandom (0.4.1)
|
||||
selenium-webdriver (4.10.0)
|
||||
rexml (~> 3.2, >= 3.2.5)
|
||||
rubyzip (>= 1.2.2, < 3.0)
|
||||
websocket (~> 1.0)
|
||||
sinatra (3.0.5)
|
||||
mustermann (~> 3.0)
|
||||
rack (~> 2.2, >= 2.2.4)
|
||||
rack-protection (= 3.0.5)
|
||||
tilt (~> 2.0)
|
||||
sprockets (4.2.0)
|
||||
semantic_logger (4.16.1)
|
||||
concurrent-ruby (~> 1.0)
|
||||
semantic_range (3.1.0)
|
||||
shakapacker (6.6.0)
|
||||
activesupport (>= 5.2)
|
||||
rack-proxy (>= 0.6.1)
|
||||
railties (>= 5.2)
|
||||
semantic_range (>= 2.3.0)
|
||||
shoulda-matchers (6.4.0)
|
||||
activesupport (>= 5.2.0)
|
||||
sorbet (0.5.11711)
|
||||
sorbet-static (= 0.5.11711)
|
||||
sorbet-runtime (0.5.11711)
|
||||
sorbet-static (0.5.11711-aarch64-linux)
|
||||
sorbet-static (0.5.11711-universal-darwin)
|
||||
sorbet-static (0.5.11711-x86_64-linux)
|
||||
sorbet-static-and-runtime (0.5.11711)
|
||||
sorbet (= 0.5.11711)
|
||||
sorbet-runtime (= 0.5.11711)
|
||||
spoom (1.5.0)
|
||||
erubi (>= 1.10.0)
|
||||
prism (>= 0.28.0)
|
||||
sorbet-static-and-runtime (>= 0.5.10187)
|
||||
thor (>= 0.19.2)
|
||||
sprockets (4.2.1)
|
||||
concurrent-ruby (~> 1.0)
|
||||
rack (>= 2.2.4, < 4)
|
||||
sprockets-rails (3.4.2)
|
||||
actionpack (>= 5.2)
|
||||
activesupport (>= 5.2)
|
||||
sprockets-rails (3.5.2)
|
||||
actionpack (>= 6.1)
|
||||
activesupport (>= 6.1)
|
||||
sprockets (>= 3.0.0)
|
||||
sqlite3 (1.6.0)
|
||||
mini_portile2 (~> 2.8.0)
|
||||
stimulus-rails (1.2.1)
|
||||
sqlite3 (1.7.3-aarch64-linux)
|
||||
sqlite3 (1.7.3-arm64-darwin)
|
||||
sqlite3 (1.7.3-x86_64-darwin)
|
||||
sqlite3 (1.7.3-x86_64-linux)
|
||||
stackprof (0.2.26)
|
||||
stimulus-rails (1.3.4)
|
||||
railties (>= 6.0.0)
|
||||
stringio (3.1.2)
|
||||
syntax_tree (6.2.0)
|
||||
prettier_print (>= 1.2.0)
|
||||
table_print (1.5.7)
|
||||
thor (1.2.1)
|
||||
tilt (2.1.0)
|
||||
timeout (0.3.1)
|
||||
turbo-rails (1.3.3)
|
||||
tailwindcss-rails (3.1.0)
|
||||
railties (>= 7.0.0)
|
||||
tailwindcss-ruby
|
||||
tailwindcss-ruby (3.4.17-aarch64-linux)
|
||||
tailwindcss-ruby (3.4.17-arm64-darwin)
|
||||
tailwindcss-ruby (3.4.17-x86_64-darwin)
|
||||
tailwindcss-ruby (3.4.17-x86_64-linux)
|
||||
tapioca (0.16.5)
|
||||
bundler (>= 2.2.25)
|
||||
netrc (>= 0.11.0)
|
||||
parallel (>= 1.21.0)
|
||||
rbi (~> 0.2)
|
||||
sorbet-static-and-runtime (>= 0.5.11087)
|
||||
spoom (>= 1.2.0)
|
||||
thor (>= 1.2.0)
|
||||
yard-sorbet
|
||||
thor (1.3.2)
|
||||
timeout (0.4.3)
|
||||
turbo-rails (2.0.11)
|
||||
actionpack (>= 6.0.0)
|
||||
activejob (>= 6.0.0)
|
||||
railties (>= 6.0.0)
|
||||
tzinfo (2.0.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
unf (0.1.4)
|
||||
unf_ext
|
||||
unf_ext (0.0.8.2)
|
||||
web-console (4.2.0)
|
||||
useragent (0.16.11)
|
||||
warden (1.2.9)
|
||||
rack (>= 2.0.9)
|
||||
web-console (4.2.1)
|
||||
actionview (>= 6.0.0)
|
||||
activemodel (>= 6.0.0)
|
||||
bindex (>= 0.4.0)
|
||||
railties (>= 6.0.0)
|
||||
webdrivers (5.2.0)
|
||||
webdrivers (5.3.1)
|
||||
nokogiri (~> 1.6)
|
||||
rubyzip (>= 1.3.0)
|
||||
selenium-webdriver (~> 4.0)
|
||||
websocket (1.2.9)
|
||||
websocket-driver (0.7.5)
|
||||
selenium-webdriver (~> 4.0, < 4.11)
|
||||
webrick (1.9.1)
|
||||
websocket (1.2.11)
|
||||
websocket-driver (0.7.6)
|
||||
websocket-extensions (>= 0.1.0)
|
||||
websocket-extensions (0.1.5)
|
||||
xpath (3.2.0)
|
||||
nokogiri (~> 1.8)
|
||||
zeitwerk (2.6.6)
|
||||
yard (0.9.37)
|
||||
yard-sorbet (0.9.0)
|
||||
sorbet-runtime
|
||||
yard
|
||||
zeitwerk (2.7.1)
|
||||
zstd-ruby (1.5.6.6)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
aarch64-linux
|
||||
aarch64-linux-gnu
|
||||
aarch64-linux-musl
|
||||
arm64-darwin
|
||||
universal-darwin
|
||||
x86_64-darwin
|
||||
x86_64-linux-gnu
|
||||
x86_64-linux-musl
|
||||
|
||||
DEPENDENCIES
|
||||
addressable
|
||||
@@ -325,46 +538,78 @@ DEPENDENCIES
|
||||
colorize
|
||||
concurrent-ruby-edge
|
||||
concurrent-ruby-ext
|
||||
cssbundling-rails (~> 1.4)
|
||||
curb
|
||||
curses
|
||||
daemons
|
||||
debug
|
||||
delayed_job_active_record
|
||||
delayed_job_web
|
||||
delayed_job_worker_pool
|
||||
db-query-matchers (~> 0.14)
|
||||
debug (~> 1.10)
|
||||
devise (~> 4.9)
|
||||
diffy
|
||||
discard
|
||||
disco
|
||||
factory_bot_rails
|
||||
faiss
|
||||
good_job (~> 4.6)
|
||||
htmlbeautifier
|
||||
http (~> 5.2)
|
||||
http-cookie
|
||||
importmap-rails
|
||||
influxdb-client
|
||||
jbuilder
|
||||
jbuilder (~> 2.13)
|
||||
kaminari
|
||||
listen
|
||||
memory_profiler
|
||||
neighbor
|
||||
nokogiri
|
||||
parallel_tests
|
||||
pg
|
||||
pg_query (>= 2)
|
||||
pghero (~> 3.6)
|
||||
pluck_each
|
||||
prettier_print
|
||||
progressbar
|
||||
prometheus_exporter (~> 2.2)
|
||||
pry
|
||||
pry-stack_explorer
|
||||
puma (~> 5.0)
|
||||
rack-mini-profiler
|
||||
rails (~> 7.0.4, >= 7.0.4.2)
|
||||
pundit (~> 2.4)
|
||||
pundit-matchers (~> 4.0)
|
||||
rack (~> 2.2)
|
||||
rack-cors
|
||||
rack-mini-profiler (~> 3.3)
|
||||
rails (~> 7.2)
|
||||
rails-controller-testing
|
||||
rails_live_reload!
|
||||
rails_semantic_logger (~> 4.17)
|
||||
rb-bsdiff!
|
||||
react_on_rails
|
||||
ripcord
|
||||
rspec-rails
|
||||
rspec-rails (~> 7.0)
|
||||
rspec-sorbet
|
||||
ruby-prof
|
||||
ruby-prof-speedscope
|
||||
ruby-vips
|
||||
rufo
|
||||
sanitize (~> 6.1)
|
||||
sd_notify
|
||||
selenium-webdriver
|
||||
shakapacker (~> 6.6)
|
||||
shoulda-matchers
|
||||
sorbet-static-and-runtime
|
||||
sprockets-rails
|
||||
sqlite3 (~> 1.4)
|
||||
stackprof
|
||||
stimulus-rails
|
||||
syntax_tree (~> 6.2)
|
||||
table_print
|
||||
tailwindcss-rails (~> 3.0)
|
||||
tapioca
|
||||
turbo-rails
|
||||
tzinfo-data
|
||||
web-console
|
||||
webdrivers
|
||||
xdiff!
|
||||
zstd-ruby
|
||||
|
||||
RUBY VERSION
|
||||
ruby 3.2.0p0
|
||||
ruby 3.2.6p234
|
||||
|
||||
BUNDLED WITH
|
||||
2.4.6
|
||||
2.6.2
|
||||
|
||||
5
Procfile.dev
Normal file
@@ -0,0 +1,5 @@
|
||||
rails: RAILS_ENV=development bundle exec rails s -p 3000
|
||||
wp-client: RAILS_ENV=development HMR=true ./bin/webpacker-dev-server
|
||||
wp-server: RAILS_ENV=development HMR=true SERVER_BUNDLE_ONLY=yes ./bin/webpacker --watch
|
||||
css: RAILS_ENV=development yarn "build:css[debug]" --watch
|
||||
prometheus_exporter: RAILS_ENV=development bundle exec prometheus_exporter --bind 0.0.0.0 --prefix redux_ --label '{"environment": "development"}'
|
||||
9
Procfile.dev-static
Normal file
@@ -0,0 +1,9 @@
|
||||
# You can run these commands in separate shells
|
||||
web: rails s -p 3000
|
||||
|
||||
# Next line runs a watch process with webpack to compile the changed files.
|
||||
# When making frequent changes to client side assets, you will prefer building webpack assets
|
||||
# upon saving rather than when you refresh your browser page.
|
||||
# Note, if using React on Rails localization you will need to run
|
||||
# `bundle exec rake react_on_rails:locale` before you run bin/webpacker
|
||||
webpack: sh -c 'rm -rf public/packs/* || true && bin/webpacker -w'
|
||||
3
Procfile.production
Normal file
@@ -0,0 +1,3 @@
|
||||
rails: RAILS_ENV=production bundle exec rails s -b 0.0.0.0 -p 3000
|
||||
tail: tail -f log/production.log
|
||||
prometheus_exporter: RAILS_ENV=production bundle exec prometheus_exporter --bind 0.0.0.0 --prefix redux_ --label '{"environment": "production"}'
|
||||
5
Procfile.staging
Normal file
@@ -0,0 +1,5 @@
|
||||
rails: RAILS_ENV=staging ./bin/rails s -p 3001
|
||||
wp-client: RAILS_ENV=development HMR=true ./bin/webpacker-dev-server
|
||||
wp-server: RAILS_ENV=development HMR=true SERVER_BUNDLE_ONLY=yes ./bin/webpacker --watch
|
||||
css: RAILS_ENV=development yarn "build:css[debug]" --watch
|
||||
prometheus_exporter: RAILS_ENV=staging bundle exec prometheus_exporter --bind 0.0.0.0 --prefix redux_ --label '{"environment": "staging"}'
|
||||
3
Procfile.worker
Normal file
@@ -0,0 +1,3 @@
|
||||
periodic_tasks: RAILS_ENV=worker bundle exec rake periodic_tasks
|
||||
good_job: RAILS_ENV=worker bundle exec rake good_job
|
||||
prometheus_exporter: RAILS_ENV=worker bundle exec prometheus_exporter --bind 0.0.0.0 --prefix redux_ --label '{"environment": "worker"}'
|
||||
@@ -7,8 +7,12 @@ Things you may want to cover:
|
||||
|
||||
* Ruby version
|
||||
|
||||
- 3.2.6
|
||||
|
||||
* System dependencies
|
||||
|
||||
- node 18.x
|
||||
|
||||
* Configuration
|
||||
|
||||
* Database creation
|
||||
@@ -21,4 +25,6 @@ Things you may want to cover:
|
||||
|
||||
* Deployment instructions
|
||||
|
||||
- Build docker image with `docker build . -t redux-scraper-app`
|
||||
|
||||
* ...
|
||||
|
||||
134
Rakefile
@@ -5,39 +5,102 @@ require "rake/testtask"
|
||||
require_relative "config/application"
|
||||
|
||||
Rails.application.load_tasks
|
||||
$LOAD_PATH << Rails.root.join("rake")
|
||||
Rake.application.rake_require "sst"
|
||||
Rake.application.rake_require "log_entry"
|
||||
Rake.application.rake_require "worker"
|
||||
Rake.application.rake_require "metrics"
|
||||
Rake.application.rake_require "fa"
|
||||
Rake.application.rake_require "e621"
|
||||
Rake.application.rake_require "twitter"
|
||||
Dir.glob(Rails.root.join("rake", "*.rake")).each { |rake_file| load rake_file }
|
||||
|
||||
task :set_ar_stdout => :environment do
|
||||
ActiveRecord::Base.logger = Logger.new(STDOUT)
|
||||
task set_ar_stdout: :environment do
|
||||
ActiveRecord::Base.logger = Logger.new($stdout)
|
||||
end
|
||||
|
||||
task :set_logger_stdout => :environment do
|
||||
Rails.logger = Logger.new(STDOUT)
|
||||
Rails.logger.formatter = proc do |severity, datetime, progname, msg|
|
||||
"#{severity}: #{msg}\n"
|
||||
end
|
||||
task set_logger_stdout: :environment do
|
||||
Rails.logger = Logger.new($stdout)
|
||||
Rails.logger.formatter =
|
||||
proc { |severity, datetime, progname, msg| "#{severity}: #{msg}\n" }
|
||||
ActiveRecord::Base.logger = nil
|
||||
ActiveJob::Base.logger = nil
|
||||
GoodJob.logger = Rails.logger
|
||||
end
|
||||
|
||||
task :pool_combined do
|
||||
ENV["RAILS_ENV"] = "production"
|
||||
proxies = ["direct", "proxy-1", "dedipath-1", "serverhost-1"]
|
||||
proxy = ENV["proxy"]
|
||||
raise("'proxy' must be set") unless proxy
|
||||
raise("'proxy' must be one of #{proxies}") unless proxies.include?(proxy)
|
||||
cmd = "bundle exec delayed_job_worker_pool pool_combined.rb"
|
||||
puts "$> #{cmd}"
|
||||
task periodic_tasks: %i[environment set_logger_stdout] do
|
||||
Thread.new do
|
||||
loop do
|
||||
Rake::Task["pghero:capture_space_stats"].execute
|
||||
puts "logged space stats"
|
||||
sleep 6.hours
|
||||
end
|
||||
end
|
||||
|
||||
Thread.new do
|
||||
loop do
|
||||
Rake::Task["pghero:capture_query_stats"].execute
|
||||
puts "logged query stats"
|
||||
sleep 5.minutes
|
||||
end
|
||||
end
|
||||
|
||||
Thread.new do
|
||||
loop do
|
||||
Rake::Task["fa:browse_page_job"].execute
|
||||
Rake::Task["fa:home_page_job"].execute
|
||||
Rake::Task["e621:posts_index_job"].execute
|
||||
puts "enqueue periodic jobs"
|
||||
sleep 1.minute
|
||||
end
|
||||
end
|
||||
|
||||
Thread.new do
|
||||
loop do
|
||||
puts "enqueue inkbunny latest posts"
|
||||
Domain::Inkbunny::Job::LatestPostsJob.set(
|
||||
queue: "inkbunny",
|
||||
priority: -20,
|
||||
).perform_later({})
|
||||
sleep 2.minutes
|
||||
end
|
||||
end
|
||||
|
||||
loop { sleep 10 }
|
||||
end
|
||||
|
||||
namespace :db_sampler do
|
||||
task export: :environment do
|
||||
url_names = ENV["url_names"] || raise("need 'url_names' (comma-separated)")
|
||||
outfile = $stdout
|
||||
DbSampler.new(outfile).export(url_names.split(","))
|
||||
ensure
|
||||
outfile.close if outfile
|
||||
end
|
||||
|
||||
task import: [:environment] do
|
||||
infile = $stdin
|
||||
DbSampler.new(infile).import
|
||||
ensure
|
||||
infile.close if infile
|
||||
end
|
||||
end
|
||||
|
||||
task good_job: %i[environment set_ar_stdout set_logger_stdout] do
|
||||
env_hash = {
|
||||
"RAILS_ENV" => "worker",
|
||||
"GOOD_JOB_POLL_INTERVAL" => "5",
|
||||
"GOOD_JOB_MAX_CACHE" => "10000",
|
||||
"GOOD_JOB_QUEUE_SELECT_LIMIT" => "4096",
|
||||
"GOOD_JOB_MAX_THREADS" => "4",
|
||||
"GOOD_JOB_QUEUES" =>
|
||||
ENV["GOOD_JOB_QUEUES"] ||
|
||||
%w[manual:4 fa_post,e621:2 *:6].reject(&:nil?).join(";"),
|
||||
}
|
||||
|
||||
env_hash.each do |key, value|
|
||||
ENV[key] = value
|
||||
puts "$> #{key.light_black.bold} = #{value.bold}"
|
||||
end
|
||||
|
||||
cmd = "bundle exec good_job"
|
||||
puts "$> #{cmd.bold}"
|
||||
exec(cmd)
|
||||
end
|
||||
|
||||
task :recompute_job_signatures => :environment do
|
||||
task recompute_job_signatures: :environment do
|
||||
ActiveRecord::Base.logger = Logger.new(STDOUT)
|
||||
ActiveRecord::Base.logger.level = :error
|
||||
|
||||
@@ -58,11 +121,22 @@ task :recompute_job_signatures => :environment do
|
||||
end
|
||||
end
|
||||
|
||||
task :workoff_failed_jobs => [:environment, :set_ar_stdout, :set_logger_stdout] do
|
||||
task workoff_failed_jobs: %i[environment set_ar_stdout set_logger_stdout] do
|
||||
worker = Delayed::Worker.new
|
||||
Delayed::Job.where(
|
||||
"last_error is not null and attempts <= 2"
|
||||
).find_each(batch_size: 1) do |job|
|
||||
worker.run(job)
|
||||
end
|
||||
Delayed::Job
|
||||
.where("last_error is not null and attempts <= 2")
|
||||
.find_each(batch_size: 1) { |job| worker.run(job) }
|
||||
end
|
||||
|
||||
task :reverse_csv do
|
||||
file = ENV["file"] || raise("need 'file' (file path)")
|
||||
in_csv = CSV.parse(File.open(file, "r+"), headers: true)
|
||||
out_csv =
|
||||
CSV.new(
|
||||
File.open("rev_" + file, "w"),
|
||||
write_headers: true,
|
||||
headers: in_csv.headers,
|
||||
)
|
||||
in_csv.reverse_each { |row| out_csv << row.map(&:second) }
|
||||
out_csv.close
|
||||
end
|
||||
|
||||
9
TODO.md
Normal file
@@ -0,0 +1,9 @@
|
||||
# Project TODO List
|
||||
|
||||
- [ ] Add bookmarking feature for posts across different domains
|
||||
- [ ] Add search feature to search FA descriptions, tags, E621 descriptions, tags
|
||||
- [x] Get inkbunny index scan job working
|
||||
- [x] Attach logs to jobs, page to view jobs and their logs
|
||||
- [ ] Standardize all the embeddings tables to use the same schema (item_id, embedding)
|
||||
- [ ] Bluesky scraper
|
||||
- [ ] Download favs / votes for E621 users
|
||||
@@ -1,4 +1,5 @@
|
||||
//= link_tree ../images
|
||||
//= link_directory ../stylesheets .css
|
||||
//= link_tree ../../javascript .js
|
||||
//= link_tree ../../../vendor/javascript .js
|
||||
//= link_tree ../builds
|
||||
//= link good_job_custom.css
|
||||
|
||||
3
app/assets/images/arrow-top-right-on-square.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" class="w-6 h-6">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" d="M13.5 6H5.25A2.25 2.25 0 003 8.25v10.5A2.25 2.25 0 005.25 21h10.5A2.25 2.25 0 0018 18.75V10.5m-10.5 6L21 3m0 0h-5.25M21 3v5.25" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 326 B |
BIN
app/assets/images/domain-icons/bsky.png
Normal file
|
After Width: | Height: | Size: 7.6 KiB |
BIN
app/assets/images/domain-icons/deviantart.png
Normal file
|
After Width: | Height: | Size: 1.6 KiB |
BIN
app/assets/images/domain-icons/e621.png
Normal file
|
After Width: | Height: | Size: 5.0 KiB |
BIN
app/assets/images/domain-icons/fa.png
Normal file
|
After Width: | Height: | Size: 8.2 KiB |
BIN
app/assets/images/domain-icons/inkbunny.png
Normal file
|
After Width: | Height: | Size: 15 KiB |
BIN
app/assets/images/domain-icons/itaku.png
Normal file
|
After Width: | Height: | Size: 3.7 KiB |
BIN
app/assets/images/domain-icons/newgrounds.png
Normal file
|
After Width: | Height: | Size: 797 B |
BIN
app/assets/images/domain-icons/patreon.png
Normal file
|
After Width: | Height: | Size: 772 B |
BIN
app/assets/images/domain-icons/pixiv.png
Normal file
|
After Width: | Height: | Size: 678 B |
BIN
app/assets/images/domain-icons/x-twitter.png
Normal file
|
After Width: | Height: | Size: 4.1 KiB |
3
app/assets/images/user-circle.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" class="w-6 h-6">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" d="M15.75 6a3.75 3.75 0 11-7.5 0 3.75 3.75 0 017.5 0zM4.501 20.118a7.5 7.5 0 0114.998 0A17.933 17.933 0 0112 21.75c-2.676 0-5.216-.584-7.499-1.632z" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 344 B |
@@ -1,49 +0,0 @@
|
||||
/*
|
||||
* This is a manifest file that'll be compiled into application.css, which will include all the files
|
||||
* listed below.
|
||||
*
|
||||
* Any CSS (and SCSS, if configured) file within this directory, lib/assets/stylesheets, or any plugin's
|
||||
* vendor/assets/stylesheets directory can be referenced here using a relative path.
|
||||
*
|
||||
* You're free to add application-wide styles to this file and they'll appear at the bottom of the
|
||||
* compiled file so the styles you add here take precedence over styles defined in any other CSS
|
||||
* files in this directory. Styles in this file should be added after the last require_* statement.
|
||||
* It is generally better to create a new file per style scope.
|
||||
*
|
||||
*= require_tree .
|
||||
*= require_self
|
||||
*/
|
||||
html,
|
||||
body {
|
||||
height: 100%;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
body {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
margin: 0 2em;
|
||||
}
|
||||
|
||||
.content-container {
|
||||
flex-grow: 1;
|
||||
margin: 1em 0;
|
||||
min-height: 512px;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.image-container {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
margin: 0 auto;
|
||||
position: absolute;
|
||||
}
|
||||
|
||||
.image-container .media {
|
||||
position: absolute;
|
||||
left: 50%;
|
||||
transform: translate(-50%);
|
||||
max-height: 100%;
|
||||
max-width: 100%;
|
||||
box-shadow: 0 0 5px 1px black;
|
||||
}
|
||||
41
app/assets/stylesheets/application.tailwind.css
Normal file
@@ -0,0 +1,41 @@
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
.animated-shadow-sky {
|
||||
@apply shadow-lg;
|
||||
}
|
||||
|
||||
.sky-section {
|
||||
@apply divide-y divide-slate-300 overflow-hidden border border-slate-300 bg-slate-100 md:rounded-lg;
|
||||
}
|
||||
|
||||
.section-header {
|
||||
@apply px-4 py-3 font-medium text-slate-900;
|
||||
}
|
||||
|
||||
.sky-link {
|
||||
@apply text-sky-600 underline decoration-dotted transition-colors hover:text-sky-800;
|
||||
}
|
||||
|
||||
.scroll-shadows {
|
||||
background:
|
||||
/* Shadow Cover TOP */
|
||||
linear-gradient(white 30%, rgba(255, 255, 255, 0)) center top,
|
||||
/* Shadow Cover BOTTOM */ linear-gradient(rgba(255, 255, 255, 0), white 70%)
|
||||
center bottom,
|
||||
/* Shadow TOP */
|
||||
linear-gradient(to bottom, rgba(0, 0, 0, 0.1), rgba(0, 0, 0, 0)) center
|
||||
top,
|
||||
/* Shadow BOTTOM */
|
||||
linear-gradient(to top, rgba(0, 0, 0, 0.1), rgba(0, 0, 0, 0)) center
|
||||
bottom;
|
||||
|
||||
background-repeat: no-repeat;
|
||||
background-size:
|
||||
100% 20px,
|
||||
100% 20px,
|
||||
100% 10px,
|
||||
100% 10px;
|
||||
background-attachment: local, local, scroll, scroll;
|
||||
}
|
||||
87
app/assets/stylesheets/good_job_custom.css
Normal file
@@ -0,0 +1,87 @@
|
||||
/* ANSI Colors */
|
||||
.ansi-bold {
|
||||
font-weight: bold;
|
||||
}
|
||||
.ansi-black {
|
||||
color: #000000;
|
||||
}
|
||||
.ansi-red {
|
||||
color: #cd0000;
|
||||
}
|
||||
.ansi-green {
|
||||
color: #00cd00;
|
||||
}
|
||||
.ansi-yellow {
|
||||
color: #cdcd00;
|
||||
}
|
||||
.ansi-blue {
|
||||
color: #0000ee;
|
||||
}
|
||||
.ansi-magenta {
|
||||
color: #cd00cd;
|
||||
}
|
||||
.ansi-cyan {
|
||||
color: #00cdcd;
|
||||
}
|
||||
.ansi-white {
|
||||
color: #e5e5e5;
|
||||
}
|
||||
|
||||
/* Bright variants */
|
||||
.ansi-bright-black {
|
||||
color: #7f7f7f;
|
||||
}
|
||||
.ansi-bright-red {
|
||||
color: #ff0000;
|
||||
}
|
||||
.ansi-bright-green {
|
||||
color: #00ff00;
|
||||
}
|
||||
.ansi-bright-yellow {
|
||||
color: #ffff00;
|
||||
}
|
||||
.ansi-bright-blue {
|
||||
color: #5c5cff;
|
||||
}
|
||||
.ansi-bright-magenta {
|
||||
color: #ff00ff;
|
||||
}
|
||||
.ansi-bright-cyan {
|
||||
color: #00ffff;
|
||||
}
|
||||
.ansi-bright-white {
|
||||
color: #ffffff;
|
||||
}
|
||||
|
||||
.log-uuid {
|
||||
max-width: 100px;
|
||||
overflow: hidden;
|
||||
/* white-space: nowrap; */
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
/* Log line container */
|
||||
.log-line {
|
||||
font-family: monospace;
|
||||
font-size: 0.8rem;
|
||||
line-height: 1;
|
||||
margin: 2px 0;
|
||||
padding: 2px 4px;
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.log-line > span {
|
||||
display: inline-block;
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
.good-job-execution-log {
|
||||
background: #3d3d3d;
|
||||
}
|
||||
|
||||
.text-truncate-link {
|
||||
display: inline-block;
|
||||
max-width: 300px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
# typed: strict
|
||||
module ApplicationCable
|
||||
class Channel < ActionCable::Channel::Base
|
||||
end
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# typed: strict
|
||||
module ApplicationCable
|
||||
class Connection < ActionCable::Connection::Base
|
||||
end
|
||||
|
||||
84
app/controllers/admin/proxy_controller.rb
Normal file
@@ -0,0 +1,84 @@
|
||||
# typed: true
|
||||
# frozen_string_literal: true
|
||||
class Admin::ProxyController < ApplicationController
|
||||
before_action :authenticate_user!
|
||||
before_action :require_admin!
|
||||
skip_before_action :verify_authenticity_token, only: %i[grafana prometheus]
|
||||
|
||||
def grafana
|
||||
fullpath =
|
||||
"http://grafana:3100#{request.fullpath.delete_prefix("/grafana")}"
|
||||
proxy_response(fullpath, "/grafana")
|
||||
end
|
||||
|
||||
def prometheus
|
||||
fullpath = "http://prometheus:9090#{request.fullpath.delete_prefix("/prometheus")}"
|
||||
proxy_response(fullpath, "/prometheus")
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def require_admin!
|
||||
unless current_user&.admin?
|
||||
redirect_to root_path, alert: "You are not authorized to access this area"
|
||||
end
|
||||
end
|
||||
|
||||
def grafana_proxy_headers
|
||||
{ "X-WEBAUTH-USER" => "admin" }.merge(proxy_headers)
|
||||
end
|
||||
|
||||
def proxy_headers
|
||||
{
|
||||
"X-Forwarded-Host" => request.host_with_port,
|
||||
"X-Forwarded-Proto" => request.ssl? ? "https" : "http",
|
||||
"X-Forwarded-For" => request.remote_ip,
|
||||
"Host" => request.host,
|
||||
"Connection" => request.headers["Connection"],
|
||||
"Upgrade" => request.headers["Upgrade"],
|
||||
"Accept" => request.headers["Accept"],
|
||||
"Cookie" => request.headers["Cookie"],
|
||||
"Content-Type" => request.headers["Content-Type"],
|
||||
"Content-Length" => request.headers["Content-Length"],
|
||||
}.merge
|
||||
end
|
||||
|
||||
def websocket_request?
|
||||
request.headers["Connection"]&.include?("upgrade")
|
||||
end
|
||||
|
||||
def proxy_response(fullpath, prefix)
|
||||
method = request.method.downcase.to_s
|
||||
if method == "post"
|
||||
response = HTTP.headers(grafana_proxy_headers).send(method, fullpath, body: request.raw_post)
|
||||
else
|
||||
response = HTTP.headers(grafana_proxy_headers).send(method, fullpath)
|
||||
end
|
||||
|
||||
headers = response.headers.to_h
|
||||
|
||||
# Handle redirects by rewriting the Location header
|
||||
if response.code.in?([301, 302, 303, 307, 308]) &&
|
||||
headers["Location"].present?
|
||||
location = headers["Location"]
|
||||
# Strip the host from absolute URLs
|
||||
location = location.gsub(%r{^https?://[^/]+}, "")
|
||||
# Add our prefix to relative URLs
|
||||
location = "#{prefix}#{location}" if location.start_with?("/")
|
||||
headers["Location"] = location
|
||||
end
|
||||
|
||||
# Pass through the response with all headers
|
||||
response_headers = headers.except("Content-Type")
|
||||
|
||||
render_args = {
|
||||
body: response.body.to_s,
|
||||
status: response.code,
|
||||
content_type: headers["Content-Type"],
|
||||
headers: response_headers,
|
||||
}
|
||||
render_args[:location] = headers["Location"] if headers["Location"]
|
||||
|
||||
render render_args
|
||||
end
|
||||
end
|
||||
@@ -1,2 +1,38 @@
|
||||
# typed: true
|
||||
class ApplicationController < ActionController::Base
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
include Pundit::Authorization
|
||||
include Devise::Controllers::Helpers::ClassMethods
|
||||
|
||||
before_action do
|
||||
if Rails.env.development? || Rails.env.staging?
|
||||
Rack::MiniProfiler.authorize_request
|
||||
end
|
||||
end
|
||||
|
||||
before_action :authenticate_user!
|
||||
|
||||
# Pundit authorization error handling
|
||||
rescue_from Pundit::NotAuthorizedError, with: :user_not_authorized
|
||||
|
||||
protected
|
||||
|
||||
def set_ivfflat_probes!
|
||||
ReduxApplicationRecord.connection.execute("SET ivfflat.max_probes = 10")
|
||||
ReduxApplicationRecord.connection.execute("SET ivfflat.probes = 10")
|
||||
end
|
||||
|
||||
protected
|
||||
|
||||
def prometheus_client
|
||||
PrometheusExporter::Client.default
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def user_not_authorized
|
||||
flash[:alert] = "You are not authorized to perform this action."
|
||||
redirect_back(fallback_location: root_path)
|
||||
end
|
||||
end
|
||||
|
||||
81
app/controllers/blob_entries_controller.rb
Normal file
@@ -0,0 +1,81 @@
|
||||
# typed: false
|
||||
class BlobEntriesController < ApplicationController
|
||||
skip_before_action :authenticate_user!, only: [:show]
|
||||
|
||||
def show
|
||||
thumb = params[:thumb]
|
||||
raise("invalid thumb #{thumb}") if thumb.present? && !thumb_params(thumb)
|
||||
|
||||
expires_dur = 1.year
|
||||
response.headers["Expires"] = expires_dur.from_now.httpdate
|
||||
expires_in expires_dur, public: true
|
||||
|
||||
sha256 = params[:sha256]
|
||||
etag = sha256
|
||||
etag += "-#{thumb}" if thumb
|
||||
return unless stale?(last_modified: Time.at(0), strong_etag: etag)
|
||||
|
||||
# images, videos, etc
|
||||
blob_entry = BlobEntry.find(HexUtil.hex2bin(sha256))
|
||||
if helpers.is_send_data_content_type?(blob_entry.content_type)
|
||||
if !thumb.blank? &&
|
||||
helpers.is_thumbable_content_type?(blob_entry.content_type)
|
||||
filename = "thumb-#{thumb}-#{sha256}"
|
||||
filename = filename[..File.extname(filename).length]
|
||||
filename += ".jpeg"
|
||||
|
||||
width, height = thumb_params(thumb)
|
||||
image =
|
||||
Vips::Image.thumbnail_buffer(
|
||||
blob_entry.contents,
|
||||
width,
|
||||
height: height,
|
||||
)
|
||||
resized_image_contents = image.jpegsave_buffer
|
||||
|
||||
send_data(
|
||||
resized_image_contents,
|
||||
type: "image/jpg",
|
||||
disposition: "inline",
|
||||
filename: filename,
|
||||
)
|
||||
else
|
||||
ext = helpers.ext_for_content_type(blob_entry.content_type)
|
||||
ext = ".#{ext}" if ext
|
||||
send_data(
|
||||
blob_entry.contents,
|
||||
type: blob_entry.content_type,
|
||||
disposition: "inline",
|
||||
filename: "data#{ext}",
|
||||
)
|
||||
end
|
||||
elsif blob_entry.content_type =~ %r{text/plain}
|
||||
render plain: blob_entry.contents
|
||||
elsif blob_entry.content_type.starts_with? "text/html"
|
||||
render html: blob_entry.contents.html_safe
|
||||
elsif blob_entry.content_type.starts_with? "application/json"
|
||||
pretty_json = JSON.pretty_generate(JSON.parse blob_entry.contents)
|
||||
render html:
|
||||
"<html><body><pre>#{pretty_json}</pre></body></html>".html_safe
|
||||
else
|
||||
render plain: "no renderer for #{blob_entry.content_type}"
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def thumb_params(thumb)
|
||||
case thumb
|
||||
when "32-avatar"
|
||||
[32, 32]
|
||||
when "64-avatar"
|
||||
[64, 64]
|
||||
when "tiny"
|
||||
[100, 100]
|
||||
when "small"
|
||||
[400, 300]
|
||||
when "medium"
|
||||
[800, 600]
|
||||
end
|
||||
end
|
||||
end
|
||||
6
app/controllers/domain/e621/posts_controller.rb
Normal file
@@ -0,0 +1,6 @@
|
||||
# typed: true
|
||||
class Domain::E621::PostsController < ApplicationController
|
||||
def show
|
||||
@post = Domain::E621::Post.find_by!(e621_id: params[:e621_id])
|
||||
end
|
||||
end
|
||||
@@ -1,26 +1,48 @@
|
||||
# typed: true
|
||||
class Domain::Fa::ApiController < ApplicationController
|
||||
skip_before_action :authenticate_user!
|
||||
before_action :validate_api_token!
|
||||
|
||||
skip_before_action :verify_authenticity_token,
|
||||
only: %i[ enqueue_objects object_statuses ]
|
||||
only: %i[enqueue_objects object_statuses]
|
||||
|
||||
skip_before_action :validate_api_token!, only: %i[search_user_names]
|
||||
|
||||
def search_user_names
|
||||
name = params[:name]
|
||||
limit = (params[:limit] || 5).to_i.clamp(0, 15)
|
||||
users = users_for_name(name, limit: limit)
|
||||
if !Rails.env.production? && name == "error"
|
||||
render status: 500, json: { error: "an error!" }
|
||||
else
|
||||
render json: { users: users }
|
||||
end
|
||||
end
|
||||
|
||||
def object_statuses
|
||||
fa_ids = (params[:fa_ids] || []).map(&:to_i)
|
||||
url_names = (params[:url_names] || [])
|
||||
|
||||
jobs_async = Delayed::Backend::ActiveRecord::Job.
|
||||
select(:id, :queue, :handler).
|
||||
where(queue: "manual").
|
||||
load_async
|
||||
jobs_async =
|
||||
GoodJob::Job
|
||||
.select(:id, :queue_name, :serialized_params)
|
||||
.where(queue_name: "manual", finished_at: nil)
|
||||
.where(
|
||||
[
|
||||
"(serialized_params->'exception_executions' = '{}')",
|
||||
"(serialized_params->'exception_executions' is null)",
|
||||
].join(" OR "),
|
||||
)
|
||||
.load_async
|
||||
|
||||
users_async = Domain::Fa::User.
|
||||
where(url_name: url_names).
|
||||
load_async
|
||||
users_async = Domain::Fa::User.where(url_name: url_names).load_async
|
||||
|
||||
fa_id_to_post = Domain::Fa::Post.
|
||||
includes(:file).
|
||||
where(fa_id: fa_ids).
|
||||
map do |post|
|
||||
[post.fa_id, post]
|
||||
end.to_h
|
||||
fa_id_to_post =
|
||||
Domain::Fa::Post
|
||||
.includes(:file)
|
||||
.where(fa_id: fa_ids)
|
||||
.map { |post| [post.fa_id, post] }
|
||||
.to_h
|
||||
|
||||
posts_response = {}
|
||||
users_response = {}
|
||||
@@ -28,19 +50,25 @@ class Domain::Fa::ApiController < ApplicationController
|
||||
fa_ids.each do |fa_id|
|
||||
post = fa_id_to_post[fa_id]
|
||||
|
||||
post_response = {
|
||||
terminal_state: false,
|
||||
seen_at: time_ago_or_never(post&.created_at),
|
||||
scanned_at: "never",
|
||||
downloaded_at: "never",
|
||||
}
|
||||
post_response =
|
||||
T.let(
|
||||
{
|
||||
terminal_state: false,
|
||||
seen_at: time_ago_or_never(post&.created_at),
|
||||
scanned_at: "never",
|
||||
downloaded_at: "never",
|
||||
},
|
||||
T::Hash[Symbol, T.untyped],
|
||||
)
|
||||
|
||||
if post
|
||||
post_response[:info_url] = domain_fa_post_url(fa_id: post.fa_id)
|
||||
post_response[:scanned_at] = time_ago_or_never(post.scanned_at)
|
||||
|
||||
if post.file.present?
|
||||
post_response[:downloaded_at] = time_ago_or_never(post.file.created_at)
|
||||
post_response[:downloaded_at] = time_ago_or_never(
|
||||
post.file&.created_at,
|
||||
)
|
||||
post_response[:state] = "have_file"
|
||||
post_response[:terminal_state] = true
|
||||
elsif post.scanned?
|
||||
@@ -55,9 +83,7 @@ class Domain::Fa::ApiController < ApplicationController
|
||||
posts_response[fa_id] = post_response
|
||||
end
|
||||
|
||||
url_name_to_user = users_async.map do |user|
|
||||
[user.url_name, user]
|
||||
end.to_h
|
||||
url_name_to_user = users_async.map { |user| [user.url_name, user] }.to_h
|
||||
|
||||
url_names.each do |url_name|
|
||||
user = url_name_to_user[url_name]
|
||||
@@ -79,41 +105,41 @@ class Domain::Fa::ApiController < ApplicationController
|
||||
user_response[:terminal_state] = true
|
||||
end
|
||||
else
|
||||
user_response = {
|
||||
state: "not_seen",
|
||||
terminal_state: false,
|
||||
}
|
||||
user_response = { state: "not_seen", terminal_state: false }
|
||||
end
|
||||
users_response[url_name] = user_response
|
||||
end
|
||||
|
||||
queue_depths = Hash.new do |hash, key|
|
||||
hash[key] = 0
|
||||
end
|
||||
queue_depths = Hash.new { |hash, key| hash[key] = 0 }
|
||||
|
||||
jobs_async.each do |job|
|
||||
queue_depths[job.payload_object.job_data["job_class"]] += 1
|
||||
queue_depths[job.serialized_params["job_class"]] += 1
|
||||
end
|
||||
|
||||
queue_depths = queue_depths.map do |key, value|
|
||||
[key.
|
||||
delete_prefix("Domain::Fa::Job::").
|
||||
split("::").
|
||||
last.
|
||||
underscore.
|
||||
delete_suffix("_job").
|
||||
gsub("_", " "),
|
||||
value]
|
||||
end.to_h
|
||||
queue_depths =
|
||||
queue_depths
|
||||
.map do |key, value|
|
||||
[
|
||||
key
|
||||
.delete_prefix("Domain::Fa::Job::")
|
||||
.split("::")
|
||||
.last
|
||||
.underscore
|
||||
.delete_suffix("_job")
|
||||
.gsub("_", " "),
|
||||
value,
|
||||
]
|
||||
end
|
||||
.to_h
|
||||
|
||||
render json: {
|
||||
posts: posts_response,
|
||||
users: users_response,
|
||||
queues: {
|
||||
total_depth: queue_depths.values.sum,
|
||||
depths: queue_depths,
|
||||
},
|
||||
}
|
||||
posts: posts_response,
|
||||
users: users_response,
|
||||
queues: {
|
||||
total_depth: queue_depths.values.sum,
|
||||
depths: queue_depths,
|
||||
},
|
||||
}
|
||||
end
|
||||
|
||||
def enqueue_objects
|
||||
@@ -123,13 +149,18 @@ class Domain::Fa::ApiController < ApplicationController
|
||||
url_names = (params[:url_names] || [])
|
||||
url_names_to_enqueue = Set.new(params[:url_names_to_enqueue] || [])
|
||||
|
||||
fa_id_to_post = Domain::Fa::Post.includes(:file).where(fa_id: fa_ids).map do |post|
|
||||
[post.fa_id, post]
|
||||
end.to_h
|
||||
fa_id_to_post =
|
||||
Domain::Fa::Post
|
||||
.includes(:file)
|
||||
.where(fa_id: fa_ids)
|
||||
.map { |post| [post.fa_id, post] }
|
||||
.to_h
|
||||
|
||||
url_name_to_user = Domain::Fa::User.where(url_name: url_names).map do |user|
|
||||
[user.url_name, user]
|
||||
end.to_h
|
||||
url_name_to_user =
|
||||
Domain::Fa::User
|
||||
.where(url_name: url_names)
|
||||
.map { |user| [user.url_name, user] }
|
||||
.to_h
|
||||
|
||||
fa_ids.each do |fa_id|
|
||||
post = fa_id_to_post[fa_id]
|
||||
@@ -144,42 +175,134 @@ class Domain::Fa::ApiController < ApplicationController
|
||||
enqueue_deferred!
|
||||
|
||||
render json: {
|
||||
post_scans: @enqueue_counts[Domain::Fa::Job::ScanPostJob],
|
||||
post_files: @enqueue_counts[Domain::Fa::Job::ScanFileJob],
|
||||
user_pages: @enqueue_counts[Domain::Fa::Job::UserPageJob],
|
||||
user_galleries: @enqueue_counts[Domain::Fa::Job::UserGalleryJob],
|
||||
}
|
||||
post_scans: @enqueue_counts[Domain::Fa::Job::ScanPostJob],
|
||||
post_files: @enqueue_counts[Domain::Fa::Job::ScanFileJob],
|
||||
user_pages: @enqueue_counts[Domain::Fa::Job::UserPageJob],
|
||||
user_galleries: @enqueue_counts[Domain::Fa::Job::UserGalleryJob],
|
||||
}
|
||||
end
|
||||
|
||||
def similar_users
|
||||
url_name = params[:url_name]
|
||||
exclude_url_name = params[:exclude_url_name]
|
||||
|
||||
user = Domain::Fa::User.find_by(url_name: url_name)
|
||||
if user.nil?
|
||||
render status: 404,
|
||||
json: {
|
||||
error: "user '#{url_name}' not found",
|
||||
error_type: "user_not_found",
|
||||
}
|
||||
return
|
||||
end
|
||||
|
||||
all_similar_users = helpers.similar_users_by_followed(user, limit: 10)
|
||||
|
||||
if all_similar_users.nil?
|
||||
render status: 500,
|
||||
json: {
|
||||
error:
|
||||
"user '#{url_name}' has not had recommendations computed yet",
|
||||
error_type: "recs_not_computed",
|
||||
}
|
||||
return
|
||||
end
|
||||
|
||||
all_similar_users = users_list_to_similar_list(all_similar_users)
|
||||
|
||||
not_followed_similar_users = nil
|
||||
if exclude_url_name
|
||||
exclude_folowed_by_user =
|
||||
Domain::Fa::User.find_by(url_name: exclude_url_name)
|
||||
not_followed_similar_users =
|
||||
if exclude_folowed_by_user.nil?
|
||||
# TODO - enqueue a manual UserFollowsJob for this user and have client
|
||||
# re-try the request later
|
||||
{
|
||||
error: "user '#{exclude_url_name}' not found",
|
||||
error_type: "exclude_user_not_found",
|
||||
}
|
||||
elsif exclude_folowed_by_user.scanned_follows_at.nil?
|
||||
{
|
||||
error:
|
||||
"user '#{exclude_url_name}' followers list hasn't been scanned",
|
||||
error_type: "exclude_user_not_scanned",
|
||||
}
|
||||
else
|
||||
users_list_to_similar_list(
|
||||
helpers.similar_users_by_followed(
|
||||
user,
|
||||
limit: 10,
|
||||
exclude_followed_by: exclude_folowed_by_user,
|
||||
),
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
render json: {
|
||||
all: all_similar_users,
|
||||
not_followed: not_followed_similar_users,
|
||||
}
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def get_best_user_page_http_log_entry_for(user)
|
||||
for_path =
|
||||
proc do |uri_path|
|
||||
HttpLogEntry
|
||||
.where(
|
||||
uri_scheme: "https",
|
||||
uri_host: "www.furaffinity.net",
|
||||
uri_path: uri_path,
|
||||
)
|
||||
.order(created_at: :desc)
|
||||
.first
|
||||
&.response
|
||||
end
|
||||
|
||||
for_hle_id =
|
||||
proc { |hle_id| hle_id && HttpLogEntry.find_by(id: hle_id)&.response }
|
||||
|
||||
# older versions don't end in a trailing slash
|
||||
hle_id = user.log_entry_detail && user.log_entry_detail["last_user_page_id"]
|
||||
for_hle_id.call(hle_id) || for_path.call("/user/#{user.url_name}/") ||
|
||||
for_path.call("/user/#{user.url_name}")
|
||||
end
|
||||
|
||||
def defer_post_scan(post, fa_id)
|
||||
if !post || !post.scanned?
|
||||
defer_manual(Domain::Fa::Job::ScanPostJob, {
|
||||
fa_id: fa_id,
|
||||
}, -17)
|
||||
defer_manual(Domain::Fa::Job::ScanPostJob, { fa_id: fa_id }, -17)
|
||||
end
|
||||
|
||||
if post && post.file_uri && !post.file.present?
|
||||
return defer_manual(Domain::Fa::Job::ScanFileJob, {
|
||||
post: post,
|
||||
}, -15, "static_file")
|
||||
return
|
||||
return(
|
||||
defer_manual(
|
||||
Domain::Fa::Job::ScanFileJob,
|
||||
{ post: post },
|
||||
-15,
|
||||
"static_file",
|
||||
)
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
def defer_user_scan(user, url_name, highpri)
|
||||
if !user || user.due_for_page_scan?
|
||||
defer_manual(Domain::Fa::Job::UserPageJob, {
|
||||
url_name: url_name,
|
||||
}, highpri ? -16 : -6)
|
||||
defer_manual(
|
||||
Domain::Fa::Job::UserPageJob,
|
||||
{ url_name: url_name },
|
||||
highpri ? -16 : -6,
|
||||
)
|
||||
return
|
||||
end
|
||||
|
||||
if !user || user.due_for_gallery_scan?
|
||||
defer_manual(Domain::Fa::Job::UserGalleryJob, {
|
||||
url_name: url_name,
|
||||
}, highpri ? -14 : -4)
|
||||
defer_manual(
|
||||
Domain::Fa::Job::UserGalleryJob,
|
||||
{ url_name: url_name },
|
||||
highpri ? -14 : -4,
|
||||
)
|
||||
return
|
||||
end
|
||||
|
||||
@@ -196,9 +319,11 @@ class Domain::Fa::ApiController < ApplicationController
|
||||
end
|
||||
|
||||
def enqueue_deferred!
|
||||
while job = (@deferred_jobs || []).shift
|
||||
klass, args, priority, queue = job
|
||||
klass.set(priority: priority, queue: queue).perform_later(args)
|
||||
GoodJob::Bulk.enqueue do
|
||||
while job = (@deferred_jobs || []).shift
|
||||
klass, args, priority, queue = job
|
||||
klass.set(priority: priority, queue: queue).perform_later(args)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -209,4 +334,90 @@ class Domain::Fa::ApiController < ApplicationController
|
||||
"never"
|
||||
end
|
||||
end
|
||||
|
||||
def users_for_name(name, limit: 10)
|
||||
users =
|
||||
Domain::Fa::User
|
||||
.where(
|
||||
[
|
||||
"(name ilike :name) OR (url_name ilike :name)",
|
||||
{ name: "#{ReduxApplicationRecord.sanitize_sql_like(name)}%" },
|
||||
],
|
||||
)
|
||||
.includes(:avatar)
|
||||
.select(:id, :state, :state_detail, :log_entry_detail, :name, :url_name)
|
||||
.select(
|
||||
"(SELECT COUNT(*) FROM domain_fa_posts WHERE creator_id = domain_fa_users.id) as num_posts",
|
||||
)
|
||||
.order(name: :asc)
|
||||
.limit(limit)
|
||||
|
||||
users.map do |user|
|
||||
{
|
||||
id: user.id,
|
||||
name: user.name,
|
||||
url_name: user.url_name,
|
||||
thumb: helpers.fa_user_avatar_path(user, thumb: "64-avatar"),
|
||||
show_path: domain_fa_user_path(user.url_name),
|
||||
# `num_posts` is a manually added column, so we need to use T.unsafe to
|
||||
# access it
|
||||
num_posts: T.unsafe(user).num_posts,
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
def users_list_to_similar_list(users_list)
|
||||
users_list.map do |user|
|
||||
profile_thumb_url = user.avatar&.file_uri&.to_s
|
||||
profile_thumb_url ||
|
||||
begin
|
||||
profile_page_response = get_best_user_page_http_log_entry_for(user)
|
||||
if profile_page_response
|
||||
parser =
|
||||
Domain::Fa::Parser::Page.new(
|
||||
profile_page_response.contents,
|
||||
require_logged_in: false,
|
||||
)
|
||||
profile_thumb_url = parser.user_page.profile_thumb_url
|
||||
else
|
||||
if user.due_for_follows_scan?
|
||||
Domain::Fa::Job::UserFollowsJob.set(
|
||||
{ priority: -20 },
|
||||
).perform_later({ user: user })
|
||||
end
|
||||
if user.due_for_page_scan?
|
||||
Domain::Fa::Job::UserPageJob.set({ priority: -20 }).perform_later(
|
||||
{ user: user },
|
||||
)
|
||||
end
|
||||
end
|
||||
rescue StandardError
|
||||
logger.error("error getting profile_thumb_url: #{$!.message}")
|
||||
end
|
||||
|
||||
{
|
||||
name: user.name,
|
||||
url_name: user.url_name,
|
||||
profile_thumb_url: profile_thumb_url,
|
||||
url: "https://www.furaffinity.net/user/#{user.url_name}/",
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
API_TOKENS = {
|
||||
"a4eb03ac-b33c-439c-9b51-a834d1c5cf48" => "dymk",
|
||||
"56cc81fe-8c00-4436-8981-4580eab00e66" => "taargus",
|
||||
"9c38727f-f11d-41de-b775-0effd86d520c" => "xjal",
|
||||
"e38c568f-a24d-4f26-87f0-dfcd898a359d" => "fyacin",
|
||||
"41fa1144-d4cd-11ed-afa1-0242ac120002" => "soft_fox_lad",
|
||||
"9b3cf444-5913-4efb-9935-bf26501232ff" => "syfaro",
|
||||
}
|
||||
|
||||
def validate_api_token!
|
||||
api_token = request.params[:api_token]
|
||||
api_user_name = API_TOKENS[api_token]
|
||||
return if api_user_name
|
||||
return if VpnOnlyRouteConstraint.new.matches?(request)
|
||||
render status: 403, json: { error: "not authenticated" }
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,24 +1,37 @@
|
||||
# typed: true
|
||||
|
||||
class Domain::Fa::PostsController < ApplicationController
|
||||
before_action :set_domain_fa_post,
|
||||
only: %i[ show scan_post scan_post ]
|
||||
before_action :set_ivfflat_probes!, only: %i[show]
|
||||
before_action :set_domain_fa_post, only: %i[show scan_post]
|
||||
|
||||
skip_before_action :verify_authenticity_token,
|
||||
only: %i[ try_scan_post try_scan_posts ]
|
||||
only: %i[try_scan_post try_scan_posts]
|
||||
|
||||
# GET /domain/fa/posts
|
||||
skip_before_action :authenticate_user!, only: %i[show index]
|
||||
|
||||
# This action is always scoped to a user, so the :user_url_name parameter is required.
|
||||
# GET /domain/fa/users/:user_url_name/posts
|
||||
def index
|
||||
@posts = Domain::Fa::Post.
|
||||
includes(:creator, :file).
|
||||
page(params[:page]).
|
||||
per(50).
|
||||
order(fa_id: :desc).
|
||||
without_count
|
||||
@user = Domain::Fa::User.find_by!(url_name: params[:user_url_name])
|
||||
relation = policy_scope(@user.posts)
|
||||
@posts =
|
||||
relation
|
||||
.includes(:creator, :file)
|
||||
.order(fa_id: :desc)
|
||||
.page(params[:page])
|
||||
.per(50)
|
||||
.without_count
|
||||
end
|
||||
|
||||
# GET /domain/fa/posts/1
|
||||
# GET /domain/fa/posts/:fa_id
|
||||
def show
|
||||
end
|
||||
|
||||
# GET /domain/fa/posts/:fa_id/favorites
|
||||
def favorites
|
||||
@post = Domain::Fa::Post.find_by!(fa_id: params[:fa_id])
|
||||
end
|
||||
|
||||
def scan_post
|
||||
if try_enqueue_post_scan(@post, @post.fa_id)
|
||||
redirect_to domain_fa_post_path(@post.fa_id), notice: "Enqueued for scan"
|
||||
@@ -29,42 +42,40 @@ class Domain::Fa::PostsController < ApplicationController
|
||||
|
||||
def try_scan_post
|
||||
fa_id = params[:fa_id]&.to_i || raise("need fa_id parameter")
|
||||
post = Domain::Fa::Post.find_by_fa_id(fa_id)
|
||||
post = Domain::Fa::Post.find_by(fa_id: fa_id)
|
||||
enqueued = try_enqueue_post_scan(post, fa_id)
|
||||
|
||||
if post && post.file.present?
|
||||
state_string = "downloaded #{helpers.time_ago_in_words(post.file.created_at, include_seconds: true)} ago"
|
||||
if post && (file = post.file).present?
|
||||
state_string =
|
||||
"downloaded #{helpers.time_ago_in_words(file.created_at, include_seconds: true)} ago"
|
||||
elsif post && post.scanned?
|
||||
state_string = "scanned #{helpers.time_ago_in_words(post.scanned_at, include_seconds: true)} ago"
|
||||
state_string =
|
||||
"scanned #{helpers.time_ago_in_words(post.scanned_at, include_seconds: true)} ago"
|
||||
else
|
||||
state_string = []
|
||||
if !post
|
||||
state_string << "not seen"
|
||||
else
|
||||
state_string << "#{post.state}"
|
||||
end
|
||||
!post ? state_string << "not seen" : state_string << "#{post.state}"
|
||||
|
||||
if enqueued
|
||||
state_string << "enqueued"
|
||||
end
|
||||
state_string << "enqueued" if enqueued
|
||||
|
||||
state_string = state_string.join(", ")
|
||||
end
|
||||
|
||||
render json: {
|
||||
enqueued: enqueued,
|
||||
title: post&.title,
|
||||
state: state_string,
|
||||
is_terminal_state: post&.scanned? && post&.file&.present? || false,
|
||||
}
|
||||
enqueued: enqueued,
|
||||
title: post&.title,
|
||||
state: state_string,
|
||||
is_terminal_state: post&.scanned? && post.file&.present? || false,
|
||||
}
|
||||
end
|
||||
|
||||
def try_scan_posts
|
||||
Rails.logger.info "params: #{params.inspect}"
|
||||
fa_ids = params[:fa_ids].map(&:to_i)
|
||||
fa_id_to_post = Domain::Fa::Post.where(fa_id: fa_ids).map do |post|
|
||||
[post.fa_id, post]
|
||||
end.to_h
|
||||
fa_id_to_post =
|
||||
Domain::Fa::Post
|
||||
.where(fa_id: fa_ids)
|
||||
.map { |post| [post.fa_id, post] }
|
||||
.to_h
|
||||
|
||||
response = {}
|
||||
|
||||
@@ -99,21 +110,19 @@ class Domain::Fa::PostsController < ApplicationController
|
||||
|
||||
if !post || !post.scanned?
|
||||
Rails.logger.info "Enqueue scan #{fa_id}"
|
||||
Domain::Fa::Job::ScanPostJob.
|
||||
set(priority: -15, queue: "manual").
|
||||
perform_later({
|
||||
fa_id: fa_id,
|
||||
})
|
||||
Domain::Fa::Job::ScanPostJob.set(
|
||||
priority: -15,
|
||||
queue: "manual",
|
||||
).perform_later({ fa_id: fa_id })
|
||||
return true
|
||||
end
|
||||
|
||||
if post && post.file_uri && !post.file.present?
|
||||
Rails.logger.info "Enqueue file #{fa_id}"
|
||||
Domain::Fa::Job::ScanFileJob.
|
||||
set(priority: -15, queue: "manual").
|
||||
perform_later({
|
||||
post: post,
|
||||
})
|
||||
Domain::Fa::Job::ScanFileJob.set(
|
||||
priority: -15,
|
||||
queue: "manual",
|
||||
).perform_later({ post: post })
|
||||
return true
|
||||
end
|
||||
|
||||
@@ -122,6 +131,9 @@ class Domain::Fa::PostsController < ApplicationController
|
||||
|
||||
# Use callbacks to share common setup or constraints between actions.
|
||||
def set_domain_fa_post
|
||||
@post = Domain::Fa::Post.find_by_fa_id!(params[:fa_id])
|
||||
@post =
|
||||
Domain::Fa::Post.includes(:creator, file: :response).find_by!(
|
||||
fa_id: params[:fa_id],
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,71 +1,28 @@
|
||||
# typed: true
|
||||
|
||||
class Domain::Fa::UsersController < ApplicationController
|
||||
before_action :set_domain_fa_user, only: %i[ show edit update destroy ]
|
||||
before_action :set_ivfflat_probes!, only: %i[show]
|
||||
before_action :set_user, only: %i[show]
|
||||
skip_before_action :authenticate_user!, only: %i[show]
|
||||
|
||||
# GET /domain/fa/users or /domain/fa/users.json
|
||||
def index
|
||||
@domain_fa_users = Domain::Fa::User.page(params[:page])
|
||||
authorize Domain::Fa::User
|
||||
@users =
|
||||
policy_scope(Domain::Fa::User).includes({ avatar: [:file] }).page(
|
||||
params[:page],
|
||||
)
|
||||
end
|
||||
|
||||
# GET /domain/fa/users/1 or /domain/fa/users/1.json
|
||||
def show
|
||||
end
|
||||
|
||||
# GET /domain/fa/users/new
|
||||
def new
|
||||
@domain_fa_user = Domain::Fa::User.new
|
||||
end
|
||||
|
||||
# GET /domain/fa/users/1/edit
|
||||
def edit
|
||||
end
|
||||
|
||||
# POST /domain/fa/users or /domain/fa/users.json
|
||||
def create
|
||||
@domain_fa_user = Domain::Fa::User.new(domain_fa_user_params)
|
||||
|
||||
respond_to do |format|
|
||||
if @domain_fa_user.save
|
||||
format.html { redirect_to domain_fa_user_url(@domain_fa_user), notice: "User was successfully created." }
|
||||
format.json { render :show, status: :created, location: @domain_fa_user }
|
||||
else
|
||||
format.html { render :new, status: :unprocessable_entity }
|
||||
format.json { render json: @domain_fa_user.errors, status: :unprocessable_entity }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# PATCH/PUT /domain/fa/users/1 or /domain/fa/users/1.json
|
||||
def update
|
||||
respond_to do |format|
|
||||
if @domain_fa_user.update(domain_fa_user_params)
|
||||
format.html { redirect_to domain_fa_user_url(@domain_fa_user), notice: "User was successfully updated." }
|
||||
format.json { render :show, status: :ok, location: @domain_fa_user }
|
||||
else
|
||||
format.html { render :edit, status: :unprocessable_entity }
|
||||
format.json { render json: @domain_fa_user.errors, status: :unprocessable_entity }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# DELETE /domain/fa/users/1 or /domain/fa/users/1.json
|
||||
def destroy
|
||||
@domain_fa_user.destroy
|
||||
|
||||
respond_to do |format|
|
||||
format.html { redirect_to domain_fa_users_url, notice: "User was successfully destroyed." }
|
||||
format.json { head :no_content }
|
||||
end
|
||||
authorize @user
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Use callbacks to share common setup or constraints between actions.
|
||||
def set_domain_fa_user
|
||||
@domain_fa_user = Domain::Fa::User.find(params[:id])
|
||||
end
|
||||
|
||||
# Only allow a list of trusted parameters through.
|
||||
def domain_fa_user_params
|
||||
params.fetch(:domain_fa_user, {})
|
||||
def set_user
|
||||
@user = Domain::Fa::User.find_by(url_name: params[:url_name])
|
||||
end
|
||||
end
|
||||
|
||||
19
app/controllers/domain/inkbunny/posts_controller.rb
Normal file
@@ -0,0 +1,19 @@
|
||||
# typed: false
|
||||
class Domain::Inkbunny::PostsController < ApplicationController
|
||||
skip_before_action :authenticate_user!, only: %i[show index]
|
||||
|
||||
def index
|
||||
relation = Domain::Inkbunny::Post.includes(:creator, :files)
|
||||
|
||||
if params[:user_id].present?
|
||||
@user = Domain::Inkbunny::User.find(params[:user_id])
|
||||
relation = relation.where(creator: @user)
|
||||
end
|
||||
|
||||
@posts = relation.order(ib_post_id: :desc).page(params[:page]).per(50)
|
||||
end
|
||||
|
||||
def show
|
||||
@post = Domain::Inkbunny::Post.find_by!(ib_post_id: params[:ib_post_id])
|
||||
end
|
||||
end
|
||||
6
app/controllers/domain/inkbunny/users_controller.rb
Normal file
@@ -0,0 +1,6 @@
|
||||
# typed: true
|
||||
class Domain::Inkbunny::UsersController < ApplicationController
|
||||
def show
|
||||
@user = Domain::Inkbunny::User.find_by(name: params[:name])
|
||||
end
|
||||
end
|
||||
@@ -1,14 +1,12 @@
|
||||
# typed: true
|
||||
class Domain::Twitter::ApiController < ApplicationController
|
||||
skip_before_action :verify_authenticity_token,
|
||||
only: %i[ enqueue_objects ]
|
||||
skip_before_action :verify_authenticity_token, only: %i[enqueue_objects]
|
||||
|
||||
def enqueue_objects
|
||||
@enqueue_counts ||= Hash.new { |h, k| h[k] = 0 }
|
||||
|
||||
names = (params[:names] || [])
|
||||
names.each do |name|
|
||||
defer_user_timeline_scan(name, true)
|
||||
end
|
||||
names.each { |name| defer_user_timeline_scan(name, true) }
|
||||
enqueue_deferred!
|
||||
render json: @enqueue_counts.to_json
|
||||
end
|
||||
@@ -16,9 +14,11 @@ class Domain::Twitter::ApiController < ApplicationController
|
||||
private
|
||||
|
||||
def defer_user_timeline_scan(name, highpri)
|
||||
defer_manual(Domain::Twitter::Job::UserTimelineTweetsJob, {
|
||||
name: name,
|
||||
}, highpri ? -16 : -6)
|
||||
defer_manual(
|
||||
Domain::Twitter::Job::UserTimelineTweetsJob,
|
||||
{ name: name },
|
||||
highpri ? -16 : -6,
|
||||
)
|
||||
end
|
||||
|
||||
def defer_manual(klass, args, priority, queue = nil)
|
||||
@@ -31,9 +31,11 @@ class Domain::Twitter::ApiController < ApplicationController
|
||||
end
|
||||
|
||||
def enqueue_deferred!
|
||||
while job = (@deferred_jobs || []).shift
|
||||
klass, args, priority, queue = job
|
||||
klass.set(priority: priority, queue: queue).perform_later(args)
|
||||
GoodJob::Bulk.enqueue do
|
||||
while job = (@deferred_jobs || []).shift
|
||||
klass, args, priority, queue = job
|
||||
klass.set(priority: priority, queue: queue).perform_later(args)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
204
app/controllers/global_states_controller.rb
Normal file
@@ -0,0 +1,204 @@
|
||||
# typed: false
|
||||
class GlobalStatesController < ApplicationController
|
||||
before_action :set_global_state, only: %i[edit update destroy]
|
||||
after_action :verify_authorized
|
||||
|
||||
FA_COOKIE_KEYS = %w[
|
||||
furaffinity-cookie-a
|
||||
furaffinity-cookie-b
|
||||
furaffinity-cookie-oaid
|
||||
].freeze
|
||||
|
||||
IB_COOKIE_KEYS = %w[inkbunny-username inkbunny-password inkbunny-sid].freeze
|
||||
|
||||
def index
|
||||
authorize GlobalState
|
||||
@global_states = policy_scope(GlobalState).order(:key)
|
||||
end
|
||||
|
||||
def new
|
||||
@global_state = GlobalState.new
|
||||
authorize @global_state
|
||||
end
|
||||
|
||||
def create
|
||||
@global_state = GlobalState.new(global_state_params)
|
||||
authorize @global_state
|
||||
if @global_state.save
|
||||
redirect_to global_states_path,
|
||||
notice: "Global state was successfully created."
|
||||
else
|
||||
render :new, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
||||
def edit
|
||||
authorize @global_state
|
||||
end
|
||||
|
||||
def update
|
||||
authorize @global_state
|
||||
if @global_state.update(global_state_params)
|
||||
redirect_to global_states_path,
|
||||
notice: "Global state was successfully updated."
|
||||
else
|
||||
render :edit, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
||||
def destroy
|
||||
authorize @global_state
|
||||
@global_state.destroy
|
||||
redirect_to global_states_path,
|
||||
notice: "Global state was successfully deleted."
|
||||
end
|
||||
|
||||
def fa_cookies
|
||||
authorize GlobalState
|
||||
@fa_cookies =
|
||||
FA_COOKIE_KEYS.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
end
|
||||
|
||||
def edit_fa_cookies
|
||||
authorize GlobalState
|
||||
@fa_cookies =
|
||||
FA_COOKIE_KEYS.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
end
|
||||
|
||||
def update_fa_cookies
|
||||
authorize GlobalState
|
||||
|
||||
begin
|
||||
ActiveRecord::Base.transaction do
|
||||
fa_cookies_params.each do |key, value|
|
||||
state = GlobalState.find_or_initialize_by(key: key)
|
||||
state.value = value
|
||||
state.value_type = :string
|
||||
state.save!
|
||||
end
|
||||
end
|
||||
|
||||
redirect_to fa_cookies_global_states_path,
|
||||
notice: "FA cookies were successfully updated."
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
@fa_cookies =
|
||||
FA_COOKIE_KEYS.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
flash.now[:alert] = "Error updating FA cookies: #{e.message}"
|
||||
render :edit_fa_cookies, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
||||
def ib_cookies
|
||||
authorize GlobalState
|
||||
@ib_cookies =
|
||||
IB_COOKIE_KEYS.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
end
|
||||
|
||||
def edit_ib_cookies
|
||||
authorize GlobalState
|
||||
@ib_cookies =
|
||||
IB_COOKIE_KEYS
|
||||
.reject { |key| key == "inkbunny-sid" }
|
||||
.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
@ib_sid = GlobalState.find_by(key: "inkbunny-sid")
|
||||
end
|
||||
|
||||
def update_ib_cookies
|
||||
authorize GlobalState
|
||||
|
||||
begin
|
||||
params_hash = params.require(:ib_cookies).permit(*IB_COOKIE_KEYS).to_h
|
||||
has_credentials =
|
||||
params_hash["inkbunny-username"].present? ||
|
||||
params_hash["inkbunny-password"].present?
|
||||
has_sid = params_hash["inkbunny-sid"].present?
|
||||
|
||||
if has_credentials && has_sid
|
||||
raise ArgumentError,
|
||||
"Cannot set both credentials and session ID at the same time"
|
||||
end
|
||||
|
||||
if !has_credentials && !has_sid
|
||||
raise ArgumentError, "Must set either credentials or session ID"
|
||||
end
|
||||
|
||||
ActiveRecord::Base.transaction do
|
||||
if has_credentials
|
||||
# Update username and password
|
||||
%w[inkbunny-username inkbunny-password].each do |key|
|
||||
state = GlobalState.find_or_initialize_by(key: key)
|
||||
state.value = params_hash[key]
|
||||
state.value_type = :string
|
||||
state.save!
|
||||
end
|
||||
else
|
||||
# Update SID
|
||||
state = GlobalState.find_or_initialize_by(key: "inkbunny-sid")
|
||||
state.value = params_hash["inkbunny-sid"]
|
||||
state.value_type = :string
|
||||
state.save!
|
||||
end
|
||||
end
|
||||
|
||||
redirect_to ib_cookies_global_states_path,
|
||||
notice: "Inkbunny credentials were successfully updated."
|
||||
rescue ArgumentError => e
|
||||
@ib_cookies =
|
||||
IB_COOKIE_KEYS
|
||||
.reject { |key| key == "inkbunny-sid" }
|
||||
.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
@ib_sid = GlobalState.find_by(key: "inkbunny-sid")
|
||||
flash.now[:alert] = "Error updating Inkbunny credentials: #{e.message}"
|
||||
render :edit_ib_cookies, status: :unprocessable_entity
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
@ib_cookies =
|
||||
IB_COOKIE_KEYS
|
||||
.reject { |key| key == "inkbunny-sid" }
|
||||
.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
@ib_sid = GlobalState.find_by(key: "inkbunny-sid")
|
||||
flash.now[:alert] = "Error updating Inkbunny credentials: #{e.message}"
|
||||
render :edit_ib_cookies, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def set_global_state
|
||||
@global_state = GlobalState.find(params[:id])
|
||||
end
|
||||
|
||||
def global_state_params
|
||||
params.require(:global_state).permit(:key, :value, :value_type)
|
||||
end
|
||||
|
||||
def fa_cookies_params
|
||||
params.require(:fa_cookies).permit(*FA_COOKIE_KEYS)
|
||||
end
|
||||
|
||||
def ib_cookies_params
|
||||
params.require(:ib_cookies).permit(
|
||||
*IB_COOKIE_KEYS.reject { |key| key == "inkbunny-sid" },
|
||||
)
|
||||
end
|
||||
end
|
||||
13
app/controllers/indexed_posts_controller.rb
Normal file
@@ -0,0 +1,13 @@
|
||||
# typed: false
|
||||
class IndexedPostsController < ApplicationController
|
||||
def index
|
||||
@posts = IndexedPost.all
|
||||
active_sources = (params[:sources] || SourceHelper.all_source_names)
|
||||
unless SourceHelper.has_all_sources?(active_sources)
|
||||
postable_types = SourceHelper.source_names_to_class_names(active_sources)
|
||||
@posts =
|
||||
@posts.where(postable_type: postable_types) if postable_types.any?
|
||||
end
|
||||
@posts = @posts.order(created_at: :desc).page(params[:page]).per(50)
|
||||
end
|
||||
end
|
||||
@@ -1,3 +1,4 @@
|
||||
# typed: false
|
||||
class LogEntriesController < ApplicationController
|
||||
def index
|
||||
@uri_filter = Addressable::URI.parse(params[:filter]) if params[:filter]
|
||||
@@ -10,9 +11,11 @@ class LogEntriesController < ApplicationController
|
||||
|
||||
if @uri_filter.path.present?
|
||||
if @uri_filter.query.present?
|
||||
query = query.
|
||||
where("uri_path = ?", @uri_filter.path).
|
||||
where("uri_query like ?", @uri_filter.query + "%")
|
||||
query =
|
||||
query.where("uri_path = ?", @uri_filter.path).where(
|
||||
"uri_query like ?",
|
||||
@uri_filter.query + "%",
|
||||
)
|
||||
else
|
||||
query = query.where("uri_path like ?", @uri_filter.path + "%")
|
||||
end
|
||||
@@ -21,12 +24,14 @@ class LogEntriesController < ApplicationController
|
||||
query = HttpLogEntry
|
||||
end
|
||||
|
||||
@log_entries = query.
|
||||
page(params[:page]).
|
||||
per(50).
|
||||
includes(:response).
|
||||
order(id: :desc).
|
||||
without_count
|
||||
@log_entries =
|
||||
query
|
||||
.page(params[:page])
|
||||
.per(50)
|
||||
.joins(:response)
|
||||
.includes(:response)
|
||||
.order(id: :desc)
|
||||
.without_count
|
||||
|
||||
formats.clear
|
||||
formats << :html
|
||||
@@ -38,74 +43,40 @@ class LogEntriesController < ApplicationController
|
||||
@last_window_count = 0
|
||||
@last_window_bytes = 0
|
||||
@last_window_bytes_stored = 0
|
||||
@content_type_counts = Hash.new do |hash, key|
|
||||
hash[key] = {
|
||||
count: 0,
|
||||
bytes: 0,
|
||||
bytes_stored: 0,
|
||||
}
|
||||
end
|
||||
@content_type_counts =
|
||||
Hash.new do |hash, key|
|
||||
hash[key] = { count: 0, bytes: 0, bytes_stored: 0 }
|
||||
end
|
||||
|
||||
@by_domain_counts = Hash.new do |hash, key|
|
||||
hash[key] = {
|
||||
count: 0,
|
||||
bytes: 0,
|
||||
bytes_stored: 0,
|
||||
}
|
||||
end
|
||||
@by_domain_counts =
|
||||
Hash.new do |hash, key|
|
||||
hash[key] = { count: 0, bytes: 0, bytes_stored: 0 }
|
||||
end
|
||||
|
||||
HttpLogEntry.includes(:response).find_each(batch_size: 100, order: :desc) do |log_entry|
|
||||
break if log_entry.created_at < @time_window.ago
|
||||
@last_window_count += 1
|
||||
@last_window_bytes += log_entry.response.size
|
||||
@last_window_bytes_stored += log_entry.response.bytes_stored
|
||||
content_type = log_entry.content_type.split(";").first
|
||||
HttpLogEntry
|
||||
.joins(:response)
|
||||
.includes(:response)
|
||||
.select("http_log_entries.*, blob_entries_p.size")
|
||||
.find_each(batch_size: 100, order: :desc) do |log_entry|
|
||||
break if log_entry.created_at < @time_window.ago
|
||||
@last_window_count += 1
|
||||
@last_window_bytes += log_entry.response_size
|
||||
content_type = log_entry.content_type.split(";").first
|
||||
|
||||
@content_type_counts[content_type][:count] += 1
|
||||
@content_type_counts[content_type][:bytes] += log_entry.response.size
|
||||
@content_type_counts[content_type][:bytes_stored] += log_entry.response.bytes_stored
|
||||
@content_type_counts[content_type][:count] += 1
|
||||
@content_type_counts[content_type][:bytes] += log_entry.response_size
|
||||
|
||||
@by_domain_counts[log_entry.uri_host][:count] += 1
|
||||
@by_domain_counts[log_entry.uri_host][:bytes] += log_entry.response.size
|
||||
@by_domain_counts[log_entry.uri_host][:bytes_stored] += log_entry.response.bytes_stored
|
||||
end
|
||||
@by_domain_counts[log_entry.uri_host][:count] += 1
|
||||
@by_domain_counts[log_entry.uri_host][:bytes] += log_entry.response_size
|
||||
end
|
||||
end
|
||||
|
||||
def show
|
||||
@log_entry = HttpLogEntry.includes(
|
||||
:caused_by_entry,
|
||||
:triggered_entries,
|
||||
response: :base,
|
||||
).find(params[:id])
|
||||
end
|
||||
|
||||
def contents
|
||||
expires_dur = 1.year
|
||||
response.headers["Expires"] = expires_dur.from_now.httpdate
|
||||
expires_in expires_dur, public: true
|
||||
|
||||
log_entry = HttpLogEntry.find(params[:id])
|
||||
hex_sha256 = HexUtil.bin2hex(log_entry.response_sha256)
|
||||
return unless stale?(last_modified: Time.at(0), strong_etag: hex_sha256)
|
||||
|
||||
# images, videos, etc
|
||||
entry_response = log_entry.response
|
||||
if helpers.is_send_data_content_type?(entry_response.content_type)
|
||||
send_data(
|
||||
entry_response.contents,
|
||||
type: entry_response.content_type,
|
||||
disposition: "inline",
|
||||
filename: log_entry.uri.path,
|
||||
)
|
||||
elsif entry_response.content_type =~ /text\/plain/
|
||||
render plain: entry_response.contents
|
||||
elsif entry_response.content_type.starts_with? "text/html"
|
||||
render html: entry_response.contents.html_safe
|
||||
elsif entry_response.content_type.starts_with? "application/json"
|
||||
pretty_json = JSON.pretty_generate(JSON.parse entry_response.contents)
|
||||
render html: "<html><body><pre>#{pretty_json}</pre></body></html>".html_safe
|
||||
else
|
||||
render plain: "no renderer for #{entry_response.content_type}"
|
||||
end
|
||||
@log_entry =
|
||||
HttpLogEntry.includes(
|
||||
:caused_by_entry,
|
||||
:triggered_entries,
|
||||
response: :base,
|
||||
).find(params[:id])
|
||||
end
|
||||
end
|
||||
|
||||
8
app/controllers/pages_controller.rb
Normal file
@@ -0,0 +1,8 @@
|
||||
# typed: true
|
||||
class PagesController < ApplicationController
|
||||
skip_before_action :authenticate_user!, only: [:root]
|
||||
|
||||
def root
|
||||
render :root
|
||||
end
|
||||
end
|
||||
21
app/controllers/user_scripts_controller.rb
Normal file
@@ -0,0 +1,21 @@
|
||||
# typed: true
|
||||
class UserScriptsController < ApplicationController
|
||||
skip_before_action :authenticate_user!, only: [:get]
|
||||
|
||||
def get
|
||||
expires_in 1.hour, public: true
|
||||
response.cache_control[:public] = false
|
||||
response.cache_control[:private] = true
|
||||
|
||||
script = params[:script]
|
||||
case script
|
||||
when "furecs.user.js"
|
||||
send_file(
|
||||
Rails.root.join("user_scripts/furecs.user.js"),
|
||||
type: "application/json",
|
||||
)
|
||||
else
|
||||
render status: 404, text: "not found"
|
||||
end
|
||||
end
|
||||
end
|
||||
65
app/controllers/users/registrations_controller.rb
Normal file
@@ -0,0 +1,65 @@
|
||||
# typed: false
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Users::RegistrationsController < Devise::RegistrationsController
|
||||
# before_action :configure_sign_up_params, only: [:create]
|
||||
# before_action :configure_account_update_params, only: [:update]
|
||||
|
||||
# GET /resource/sign_up
|
||||
def new
|
||||
flash[:alert] = "New registrations are currently disabled."
|
||||
redirect_to root_path
|
||||
end
|
||||
|
||||
# POST /resource
|
||||
def create
|
||||
flash[:alert] = "New registrations are currently disabled."
|
||||
redirect_to root_path
|
||||
end
|
||||
|
||||
# GET /resource/edit
|
||||
# def edit
|
||||
# super
|
||||
# end
|
||||
|
||||
# PUT /resource
|
||||
# def update
|
||||
# super
|
||||
# end
|
||||
|
||||
# DELETE /resource
|
||||
# def destroy
|
||||
# super
|
||||
# end
|
||||
|
||||
# GET /resource/cancel
|
||||
# Forces the session data which is usually expired after sign
|
||||
# in to be expired now. This is useful if the user wants to
|
||||
# cancel oauth signing in/up in the middle of the process,
|
||||
# removing all OAuth session data.
|
||||
# def cancel
|
||||
# super
|
||||
# end
|
||||
|
||||
# protected
|
||||
|
||||
# If you have extra params to permit, append them to the sanitizer.
|
||||
# def configure_sign_up_params
|
||||
# devise_parameter_sanitizer.permit(:sign_up, keys: [:attribute])
|
||||
# end
|
||||
|
||||
# If you have extra params to permit, append them to the sanitizer.
|
||||
# def configure_account_update_params
|
||||
# devise_parameter_sanitizer.permit(:account_update, keys: [:attribute])
|
||||
# end
|
||||
|
||||
# The path used after sign up.
|
||||
# def after_sign_up_path_for(resource)
|
||||
# super(resource)
|
||||
# end
|
||||
|
||||
# The path used after sign up for inactive accounts.
|
||||
# def after_inactive_sign_up_path_for(resource)
|
||||
# super(resource)
|
||||
# end
|
||||
end
|
||||
28
app/controllers/users/sessions_controller.rb
Normal file
@@ -0,0 +1,28 @@
|
||||
# typed: strict
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Users::SessionsController < Devise::SessionsController
|
||||
# before_action :configure_sign_in_params, only: [:create]
|
||||
|
||||
# GET /resource/sign_in
|
||||
# def new
|
||||
# super
|
||||
# end
|
||||
|
||||
# POST /resource/sign_in
|
||||
# def create
|
||||
# super
|
||||
# end
|
||||
|
||||
# DELETE /resource/sign_out
|
||||
# def destroy
|
||||
# super
|
||||
# end
|
||||
|
||||
# protected
|
||||
|
||||
# If you have extra params to permit, append them to the sanitizer.
|
||||
# def configure_sign_in_params
|
||||
# devise_parameter_sanitizer.permit(:sign_in, keys: [:attribute])
|
||||
# end
|
||||
end
|
||||
@@ -1,2 +1,3 @@
|
||||
# typed: strict
|
||||
module ApplicationHelper
|
||||
end
|
||||
|
||||
96
app/helpers/domain/e621/posts_helper.rb
Normal file
@@ -0,0 +1,96 @@
|
||||
# typed: false
|
||||
module Domain::E621::PostsHelper
|
||||
def icon_asset_for_url(url)
|
||||
domain = extract_domain(url)
|
||||
return nil unless domain
|
||||
|
||||
domain_patterns = {
|
||||
%w[*.e621.net e621.net] => "e621.png",
|
||||
%w[*.furaffinity.net furaffinity.net] => "fa.png",
|
||||
%w[*.bsky.app bsky.app] => "bsky.png",
|
||||
%w[*.itaku.ee itaku.ee] => "itaku.png",
|
||||
%w[*.deviantart.com deviantart.com *.wixmp.com] => "deviantart.png",
|
||||
%w[*.twitter.com twitter.com *.x.com x.com] => "x-twitter.png",
|
||||
%w[*.inkbunny.net inkbunny.net *.ib.metapix.net ib.metapix.net] =>
|
||||
"inkbunny.png",
|
||||
%w[*.newgrounds.com newgrounds.com] => "newgrounds.png",
|
||||
%w[*.patreon.com patreon.com] => "patreon.png",
|
||||
%w[*.pixiv.net pixiv.net *.pximg.net pximg.net] => "pixiv.png",
|
||||
}
|
||||
|
||||
domain_patterns.each do |patterns, icon|
|
||||
patterns.each do |pattern|
|
||||
if File.fnmatch?(pattern, domain, File::FNM_PATHNAME)
|
||||
return asset_path("domain-icons/#{icon}")
|
||||
end
|
||||
end
|
||||
end
|
||||
nil
|
||||
end
|
||||
|
||||
def tag_category_tw_class(category)
|
||||
case category.to_sym
|
||||
when :general
|
||||
"bg-blue-300" # Light blue
|
||||
when :artist
|
||||
"bg-indigo-300" # Light indigo
|
||||
when :copyright
|
||||
"bg-purple-300" # Light purple
|
||||
when :character
|
||||
"bg-green-300" # Light green
|
||||
when :species
|
||||
"bg-teal-300" # Light teal
|
||||
when :invalid
|
||||
"bg-slate-300" # Medium gray
|
||||
when :meta
|
||||
"bg-amber-300" # Light amber
|
||||
when :lore
|
||||
"bg-cyan-300" # Light cyan
|
||||
else
|
||||
"bg-white" # White (default)
|
||||
end
|
||||
end
|
||||
|
||||
def tag_category_order
|
||||
%i[artist copyright character species general meta lore invalid]
|
||||
end
|
||||
|
||||
def font_awesome_category_icon(category)
|
||||
case category.to_sym
|
||||
when :artist
|
||||
"fa-brush"
|
||||
when :species
|
||||
"fa-paw"
|
||||
when :character
|
||||
"fa-user"
|
||||
when :copyright
|
||||
"fa-copyright"
|
||||
when :general
|
||||
"fa-tag"
|
||||
when :lore
|
||||
"fa-book"
|
||||
when :meta
|
||||
"fa-info"
|
||||
when :invalid
|
||||
"fa-ban"
|
||||
end
|
||||
end
|
||||
|
||||
def fa_post_for_source(source)
|
||||
uri = URI.parse(source)
|
||||
return unless %w[www.furaffinity.net furaffinity.net].include?(uri.host)
|
||||
fa_id = uri.path.match(%r{/view/(\d+)})[1]
|
||||
return unless fa_id
|
||||
Domain::Fa::Post.find_by(fa_id: fa_id)
|
||||
rescue StandardError
|
||||
nil
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def extract_domain(url)
|
||||
URI.parse(url).host
|
||||
rescue URI::InvalidURIError
|
||||
nil
|
||||
end
|
||||
end
|
||||
@@ -1,39 +1,155 @@
|
||||
# typed: strict
|
||||
module Domain::Fa::PostsHelper
|
||||
extend T::Sig
|
||||
|
||||
include ActionView::Helpers::DateHelper
|
||||
include ActionView::Helpers::SanitizeHelper
|
||||
include ActionView::Helpers::RenderingHelper
|
||||
include ActionView::Helpers::TagHelper
|
||||
|
||||
sig { params(post: Domain::Fa::Post).returns(String) }
|
||||
def post_state_string(post)
|
||||
if post.have_file?
|
||||
"file"
|
||||
elsif post.scanned?
|
||||
"scanned"
|
||||
else
|
||||
post.state
|
||||
post.state || "unknown"
|
||||
end
|
||||
end
|
||||
|
||||
sig do
|
||||
params(
|
||||
params:
|
||||
T.any(ActionController::Parameters, T::Hash[T.untyped, T.untyped]),
|
||||
).returns(T.nilable(String))
|
||||
end
|
||||
def page_str(params)
|
||||
if (params[:page] || 1).to_i > 1
|
||||
"(Page #{params[:page]})"
|
||||
"(page #{params[:page]})"
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def scanned_and_file_description(post)
|
||||
parts = []
|
||||
if post.scanned?
|
||||
time_ago = if post.scanned_at
|
||||
time_ago_in_words(post.scanned_at)
|
||||
sig { params(post: Domain::Fa::Post).returns(T.nilable(HttpLogEntry)) }
|
||||
def guess_scanned_http_log_entry(post)
|
||||
HttpLogEntry.find_all_by_uri(
|
||||
"https://www.furaffinity.net/view/#{post.fa_id}",
|
||||
).first
|
||||
end
|
||||
|
||||
sig { params(post: Domain::Fa::Post).returns(T.nilable(HttpLogEntry)) }
|
||||
def guess_file_downloaded_http_log_entry(post)
|
||||
if (uri = post.file_uri)
|
||||
HttpLogEntry.find_all_by_uri(uri).first
|
||||
end
|
||||
end
|
||||
|
||||
sig { params(html: String).returns(String) }
|
||||
def fa_post_description_sanitized(html)
|
||||
fa_post_id_to_node = {}
|
||||
fa_user_url_name_to_node = {}
|
||||
|
||||
sanitizer =
|
||||
Sanitize.new(
|
||||
elements: %w[br img b i span strong],
|
||||
attributes: {
|
||||
"span" => %w[style],
|
||||
},
|
||||
css: {
|
||||
properties: %w[font-size color],
|
||||
},
|
||||
transformers: [
|
||||
Kernel.lambda do |env|
|
||||
# Only allow and transform FA links
|
||||
if env[:node_name] == "a"
|
||||
node = env[:node]
|
||||
|
||||
# by default, assume the host is www.furaffinity.net
|
||||
href = node["href"]&.downcase || ""
|
||||
href = "//" + href if href.match?(/^(www\.)?furaffinity\.net/)
|
||||
uri = URI.parse(href)
|
||||
uri.host ||= "www.furaffinity.net"
|
||||
path = uri.path
|
||||
|
||||
fa_host_matcher = /^(www\.)?furaffinity\.net$/
|
||||
fa_post_matcher = %r{^/view/(\d+)/?$}
|
||||
fa_user_matcher = %r{^/user/(\w+)/?$}
|
||||
|
||||
if fa_host_matcher.match?(uri.host) && path
|
||||
if match = path.match(fa_post_matcher)
|
||||
fa_id = match[1].to_i
|
||||
fa_post_id_to_node[fa_id] = node
|
||||
next { node_whitelist: [node] }
|
||||
elsif match = path.match(fa_user_matcher)
|
||||
fa_url_name = match[1]
|
||||
fa_user_url_name_to_node[fa_url_name] = node
|
||||
next { node_whitelist: [node] }
|
||||
end
|
||||
end
|
||||
|
||||
# Don't allow any other links
|
||||
node.replace(node.children)
|
||||
end
|
||||
end,
|
||||
],
|
||||
)
|
||||
|
||||
fragment = Nokogiri::HTML5.fragment(sanitizer.send(:preprocess, html))
|
||||
sanitizer.node!(fragment)
|
||||
|
||||
if fa_post_id_to_node.any?
|
||||
# Batch load posts and their titles, ensuring fa_post_ids are strings
|
||||
posts_by_id =
|
||||
Domain::Fa::Post.where(fa_id: fa_post_id_to_node.keys).index_by(&:fa_id)
|
||||
|
||||
# Replace the link text with post titles if available
|
||||
fa_post_id_to_node.each do |fa_id, node|
|
||||
if (post = posts_by_id[fa_id])
|
||||
node.replace(
|
||||
Nokogiri::HTML5.fragment(
|
||||
render(
|
||||
partial: "domain/fa/posts/description_inline_link_fa_post",
|
||||
locals: {
|
||||
post: post,
|
||||
},
|
||||
),
|
||||
),
|
||||
)
|
||||
else
|
||||
"(unknown)"
|
||||
node.replace(node.children)
|
||||
end
|
||||
parts << "Scanned #{time_ago} ago"
|
||||
else
|
||||
parts << "Not scanned"
|
||||
end
|
||||
end
|
||||
if post.file
|
||||
parts << "file #{time_ago_in_words(post.file.created_at)} ago"
|
||||
else
|
||||
parts << "no file"
|
||||
|
||||
if fa_user_url_name_to_node.any?
|
||||
# Batch load users and their names, ensuring fa_user_url_names are strings
|
||||
users_by_url_name =
|
||||
Domain::Fa::User
|
||||
.where(url_name: fa_user_url_name_to_node.keys)
|
||||
.includes(:avatar)
|
||||
.index_by(&:url_name)
|
||||
|
||||
# Replace the link text with user names if available
|
||||
fa_user_url_name_to_node.each do |fa_url_name, node|
|
||||
if (user = users_by_url_name[fa_url_name])
|
||||
node.replace(
|
||||
Nokogiri::HTML5.fragment(
|
||||
render(
|
||||
partial: "domain/fa/posts/description_inline_link_fa_user",
|
||||
locals: {
|
||||
user: user,
|
||||
},
|
||||
),
|
||||
),
|
||||
)
|
||||
else
|
||||
node.replace(node.children)
|
||||
end
|
||||
end
|
||||
end
|
||||
parts.join(", ")
|
||||
|
||||
raw fragment.to_html(preserve_newline: true)
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,2 +1,113 @@
|
||||
# typed: false
|
||||
module Domain::Fa::UsersHelper
|
||||
def avatar_url(sha256, thumb: "32-avatar")
|
||||
blob_path(HexUtil.bin2hex(sha256), format: "jpg", thumb: thumb)
|
||||
end
|
||||
|
||||
def fa_user_avatar_path(user, thumb: nil)
|
||||
if (sha256 = user.avatar&.file_sha256)
|
||||
blob_path(HexUtil.bin2hex(sha256), format: "jpg", thumb: thumb)
|
||||
else
|
||||
# default / 'not found' avatar image
|
||||
# "/blobs/9080fd4e7e23920eb2dccfe2d86903fc3e748eebb2e5aa8c657bbf6f3d941cdc/contents.jpg"
|
||||
asset_path("user-circle.svg")
|
||||
end
|
||||
end
|
||||
|
||||
def sanitized_fa_user_profile_html(html)
|
||||
# try to preload all the FA usernames in the profile
|
||||
maybe_url_names =
|
||||
Nokogiri
|
||||
.HTML(html)
|
||||
.css("a")
|
||||
.flat_map do |node|
|
||||
href = URI.parse(node["href"])
|
||||
right_host = href.host.nil? || href.host == "www.furaffinity.net"
|
||||
right_path = href.path =~ %r{/user/.+}
|
||||
if right_host && right_path
|
||||
[href]
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
.map { |href| href.path.split("/")[2]&.downcase }
|
||||
|
||||
preloaded_users =
|
||||
Domain::Fa::User
|
||||
.where(url_name: maybe_url_names)
|
||||
.select(:id, :state, :state_detail, :log_entry_detail, :url_name)
|
||||
.joins(:avatar)
|
||||
.includes(:avatar)
|
||||
.index_by(&:url_name)
|
||||
|
||||
raw Sanitize.fragment(
|
||||
html,
|
||||
elements: %w[br img b i span strong],
|
||||
attributes: {
|
||||
"span" => %w[style],
|
||||
"a" => [],
|
||||
},
|
||||
css: {
|
||||
properties: %w[font-size color],
|
||||
},
|
||||
transformers:
|
||||
lambda do |env|
|
||||
return unless env[:node_name] == "a"
|
||||
node = env[:node]
|
||||
href = URI.parse(node["href"])
|
||||
unless href.host == nil || href.host == "www.furaffinity.net"
|
||||
return
|
||||
end
|
||||
return unless href.path =~ %r{/user/.+}
|
||||
url_name = href.path.split("/")[2]&.downcase
|
||||
Sanitize.node!(
|
||||
node,
|
||||
{ elements: %w[a], attributes: { "a" => %w[href] } },
|
||||
)
|
||||
node["href"] = domain_fa_user_path(url_name)
|
||||
node["class"] = "text-slate-200 underline decoration-slate-200 " +
|
||||
"decoration-dashed decoration-dashed decoration-1"
|
||||
|
||||
whitelist = [node]
|
||||
|
||||
user =
|
||||
preloaded_users[url_name] ||
|
||||
Domain::Fa::User.find_by(url_name: url_name)
|
||||
if user
|
||||
img = Nokogiri::XML::Node.new("img", node.document)
|
||||
img["class"] = "inline w-5"
|
||||
img["src"] = fa_user_avatar_path(user, thumb: "32-avatar")
|
||||
node.prepend_child(img)
|
||||
whitelist << img
|
||||
end
|
||||
|
||||
{ node_allowlist: whitelist }
|
||||
end,
|
||||
)
|
||||
end
|
||||
|
||||
def similar_users_by_followed(user, limit: 10, exclude_followed_by: nil)
|
||||
if user.disco.nil?
|
||||
nil
|
||||
else
|
||||
ReduxApplicationRecord.connection.execute("SET ivfflat.probes = 32")
|
||||
user.similar_users_by_followed(
|
||||
exclude_followed_by: exclude_followed_by,
|
||||
).limit(limit)
|
||||
end
|
||||
end
|
||||
|
||||
def fa_user_account_status(user)
|
||||
log_entry_id = user.log_entry_detail["last_user_page_id"]
|
||||
return "unknown" if log_entry_id.nil?
|
||||
log_entry = HttpLogEntry.find_by(id: log_entry_id)
|
||||
return "unknown" if log_entry.nil?
|
||||
parser =
|
||||
Domain::Fa::Parser::Page.new(
|
||||
log_entry.response.contents,
|
||||
require_logged_in: false,
|
||||
)
|
||||
return "unknown" unless parser.probably_user_page?
|
||||
parser.user_page.account_status
|
||||
end
|
||||
end
|
||||
|
||||
128
app/helpers/good_job_helper.rb
Normal file
@@ -0,0 +1,128 @@
|
||||
# typed: strict
|
||||
# frozen_string_literal: true
|
||||
|
||||
module GoodJobHelper
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
extend self
|
||||
|
||||
class AnsiSegment < T::Struct
|
||||
const :text, String
|
||||
const :class_names, T::Array[String]
|
||||
end
|
||||
|
||||
# ANSI escape code pattern
|
||||
ANSI_PATTERN = /\e\[([0-9;]*)m/
|
||||
UUID_REGEX = /[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/
|
||||
|
||||
sig { params(text: String).returns(T::Array[AnsiSegment]) }
|
||||
def parse_ansi(text)
|
||||
segments = []
|
||||
current_classes = T::Array[String].new
|
||||
|
||||
# Split the text into parts based on ANSI codes
|
||||
parts = text.split(ANSI_PATTERN)
|
||||
|
||||
# Process each part and its corresponding ANSI codes
|
||||
parts.each_with_index do |part, index|
|
||||
if index.even?
|
||||
# This is text content
|
||||
segments << AnsiSegment.new(
|
||||
text: part,
|
||||
class_names: current_classes.dup,
|
||||
)
|
||||
else
|
||||
# This is an ANSI code
|
||||
codes = part.split(";").map(&:to_i)
|
||||
if codes == [0]
|
||||
current_classes.clear
|
||||
else
|
||||
codes.each do |code|
|
||||
class_name = ansi_code_to_class(code)
|
||||
current_classes << class_name if class_name
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# go through segments and detect UUIDs, splitting the segment at the uuid
|
||||
# and adding them to the segments array. Should result in a <before>, <uuid>,
|
||||
# <after> tuple.
|
||||
segments.flat_map do |segment|
|
||||
if segment.text.match?(UUID_REGEX)
|
||||
idx = segment.text.index(UUID_REGEX)
|
||||
[
|
||||
AnsiSegment.new(
|
||||
text: segment.text[0...idx],
|
||||
class_names: segment.class_names,
|
||||
),
|
||||
AnsiSegment.new(
|
||||
text: segment.text[idx...idx + 36],
|
||||
class_names: ["log-uuid"],
|
||||
),
|
||||
AnsiSegment.new(
|
||||
text: segment.text[idx + 36..],
|
||||
class_names: segment.class_names,
|
||||
),
|
||||
]
|
||||
else
|
||||
[segment]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
sig { params(job: GoodJob::Job).returns(T::Hash[String, T.untyped]) }
|
||||
def arguments_for_job(job)
|
||||
deserialized =
|
||||
T.cast(
|
||||
ActiveJob::Arguments.deserialize(job.serialized_params).to_h,
|
||||
T::Hash[String, T.untyped],
|
||||
)
|
||||
args = deserialized["arguments"].first
|
||||
args.sort_by { |key, _| key.to_s }.to_h
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
sig { params(code: Integer).returns(T.nilable(String)) }
|
||||
def ansi_code_to_class(code)
|
||||
case code
|
||||
when 1
|
||||
"ansi-bold"
|
||||
when 30
|
||||
"ansi-black"
|
||||
when 31
|
||||
"ansi-red"
|
||||
when 32
|
||||
"ansi-green"
|
||||
when 33
|
||||
"ansi-yellow"
|
||||
when 34
|
||||
"ansi-blue"
|
||||
when 35
|
||||
"ansi-magenta"
|
||||
when 36
|
||||
"ansi-cyan"
|
||||
when 37
|
||||
"ansi-white"
|
||||
when 90
|
||||
"ansi-bright-black"
|
||||
when 91
|
||||
"ansi-bright-red"
|
||||
when 92
|
||||
"ansi-bright-green"
|
||||
when 93
|
||||
"ansi-bright-yellow"
|
||||
when 94
|
||||
"ansi-bright-blue"
|
||||
when 95
|
||||
"ansi-bright-magenta"
|
||||
when 96
|
||||
"ansi-bright-cyan"
|
||||
when 97
|
||||
"ansi-bright-white"
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
end
|
||||
22
app/helpers/indexable_posts_helper.rb
Normal file
@@ -0,0 +1,22 @@
|
||||
# typed: false
|
||||
module IndexablePostsHelper
|
||||
def show_path(indexed_post)
|
||||
case indexed_post.postable_type
|
||||
when "Domain::Fa::Post"
|
||||
# need to use the helper here because the postable is not loaded
|
||||
Rails.application.routes.url_helpers.domain_fa_post_path(
|
||||
indexed_post.postable,
|
||||
)
|
||||
when "Domain::E621::Post"
|
||||
Rails.application.routes.url_helpers.domain_e621_post_path(
|
||||
indexed_post.postable,
|
||||
)
|
||||
when "Domain::Inkbunny::Post"
|
||||
Rails.application.routes.url_helpers.domain_inkbunny_post_path(
|
||||
indexed_post.postable,
|
||||
)
|
||||
else
|
||||
raise("Unsupported postable type: #{indexed_post.postable_type}")
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,3 +1,4 @@
|
||||
# typed: true
|
||||
module LogEntriesHelper
|
||||
def is_send_data_content_type?(content_type)
|
||||
is_renderable_image_type?(content_type) ||
|
||||
@@ -5,22 +6,49 @@ module LogEntriesHelper
|
||||
is_flash_content_type?(content_type)
|
||||
end
|
||||
|
||||
def path_iterative_parts(uri_path)
|
||||
path_parts = uri_path.split("/")
|
||||
(1...path_parts.length).map do |i|
|
||||
[
|
||||
path_parts[i],
|
||||
path_parts[0..i].join("/") + (i == path_parts.length - 1 ? "" : "/"),
|
||||
]
|
||||
end
|
||||
end
|
||||
|
||||
def ext_for_content_type(content_type)
|
||||
case content_type
|
||||
when "image/jpeg"
|
||||
"jpeg"
|
||||
when "image/jpg"
|
||||
"jpg"
|
||||
when "image/png"
|
||||
"png"
|
||||
when "image/gif"
|
||||
"gif"
|
||||
when "video/webm"
|
||||
"webm"
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def is_renderable_image_type?(content_type)
|
||||
[
|
||||
"image/jpeg",
|
||||
"image/jpg",
|
||||
"image/png",
|
||||
"image/gif",
|
||||
].any? { |ct| content_type.starts_with?(ct) }
|
||||
%w[image/jpeg image/jpg image/png image/gif].any? do |ct|
|
||||
content_type.starts_with?(ct)
|
||||
end
|
||||
end
|
||||
|
||||
def is_thumbable_content_type?(content_type)
|
||||
%w[video/webm].any? { |ct| content_type.starts_with?(ct) } ||
|
||||
is_renderable_image_type?(content_type)
|
||||
end
|
||||
|
||||
def is_renderable_video_type?(content_type)
|
||||
[
|
||||
"video/mp4",
|
||||
].any? { |ct| content_type.starts_with?(ct) }
|
||||
%w[video/mp4 video/webm].any? { |ct| content_type.starts_with?(ct) }
|
||||
end
|
||||
|
||||
def is_flash_content_type?(content_type)
|
||||
content_type =~ /application\/x-shockwave-flash/
|
||||
content_type =~ %r{application/x-shockwave-flash}
|
||||
end
|
||||
end
|
||||
|
||||
22
app/helpers/source_helper.rb
Normal file
@@ -0,0 +1,22 @@
|
||||
# typed: true
|
||||
module SourceHelper
|
||||
def self.source_name_to_class_name
|
||||
{
|
||||
"furaffinity" => "Domain::Fa::Post",
|
||||
"e621" => "Domain::E621::Post",
|
||||
"inkbunny" => "Domain::Inkbunny::Post",
|
||||
}
|
||||
end
|
||||
|
||||
def self.all_source_names
|
||||
source_name_to_class_name.keys
|
||||
end
|
||||
|
||||
def self.source_names_to_class_names(list)
|
||||
list.map { |source| source_name_to_class_name[source] }.compact
|
||||
end
|
||||
|
||||
def self.has_all_sources?(list)
|
||||
list.sort == all_source_names.sort
|
||||
end
|
||||
end
|
||||
@@ -1,3 +0,0 @@
|
||||
// Configure your import map in config/importmap.rb. Read more: https://github.com/rails/importmap-rails
|
||||
import "@hotwired/turbo-rails"
|
||||
import "controllers"
|
||||
70
app/javascript/bundles/Main/components/Icon.tsx
Normal file
@@ -0,0 +1,70 @@
|
||||
import * as React from 'react';
|
||||
|
||||
interface PropTypes {
|
||||
type: 'magnifying-glass' | 'exclamation-circle' | 'spinner';
|
||||
className?: string;
|
||||
}
|
||||
export default function Icon(props: PropTypes) {
|
||||
const { type } = props;
|
||||
const className = `w-6 h-6 pointer-events-none absolute
|
||||
transform top-1/2 -translate-y-1/2 ${props.className}`;
|
||||
|
||||
switch (type) {
|
||||
case 'magnifying-glass':
|
||||
return (
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
strokeWidth={'2'}
|
||||
className={className}
|
||||
>
|
||||
<path
|
||||
fill="none"
|
||||
d="M21 21l-5.197-5.197m0 0A7.5 7.5 0 105.196 5.196a7.5 7.5 0 0010.607 10.607z"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
|
||||
case 'spinner':
|
||||
return (
|
||||
<svg
|
||||
version="1.1"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="6 6 38 38"
|
||||
className={className}
|
||||
>
|
||||
<path d="M25.251,6.461c-10.318,0-18.683,8.365-18.683,18.683h4.068c0-8.071,6.543-14.615,14.615-14.615V6.461z">
|
||||
<animateTransform
|
||||
attributeType="xml"
|
||||
attributeName="transform"
|
||||
type="rotate"
|
||||
from="0 25 25"
|
||||
to="360 25 25"
|
||||
dur="0.6s"
|
||||
repeatCount="indefinite"
|
||||
/>
|
||||
</path>
|
||||
</svg>
|
||||
);
|
||||
|
||||
case 'exclamation-circle':
|
||||
return (
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
strokeWidth="1.5"
|
||||
stroke="currentColor"
|
||||
className={`h-6 w-6 ${className}`}
|
||||
>
|
||||
<path
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
d="M12 9v3.75m9-.75a9 9 0 11-18 0 9 9 0 0118 0zm-9 3.75h.008v.008H12v-.008z"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
}
|
||||
70
app/javascript/bundles/Main/components/ListItem.tsx
Normal file
@@ -0,0 +1,70 @@
|
||||
import * as React from 'react';
|
||||
import Icon from './Icon';
|
||||
|
||||
const COMMON_LIST_ELEM_CLASSES = `
|
||||
w-full p-2
|
||||
text-xl font-light
|
||||
border-inherit
|
||||
group-focus-within:border-slate-300
|
||||
`;
|
||||
|
||||
interface PropTypes {
|
||||
value: string;
|
||||
subtext?: string;
|
||||
thumb?: string;
|
||||
isLast: boolean;
|
||||
selected: boolean;
|
||||
href?: string;
|
||||
style: 'item' | 'info' | 'error';
|
||||
}
|
||||
|
||||
export default function ListItem({
|
||||
value,
|
||||
thumb,
|
||||
isLast,
|
||||
selected,
|
||||
style,
|
||||
href,
|
||||
subtext,
|
||||
}: PropTypes) {
|
||||
const iconClassName = ['ml-2'];
|
||||
const textClassName = [
|
||||
COMMON_LIST_ELEM_CLASSES,
|
||||
'relative flex items-center justify-between',
|
||||
'border-t-0',
|
||||
isLast && 'rounded-b-lg',
|
||||
style === 'item' && selected && 'bg-slate-700 text-slate-100',
|
||||
style === 'info' && 'text-slate-500 italic',
|
||||
style === 'error' && 'text-red-500',
|
||||
'hover:bg-slate-600 hover:text-slate-200',
|
||||
].filter(Boolean);
|
||||
|
||||
return (
|
||||
<a
|
||||
className="relative block"
|
||||
onPointerUp={() => {
|
||||
if (href) {
|
||||
window.location.href = href;
|
||||
}
|
||||
}}
|
||||
href={href}
|
||||
>
|
||||
{style === 'error' && (
|
||||
<Icon type="exclamation-circle" className={iconClassName.join(' ')} />
|
||||
)}
|
||||
<div className={textClassName.join(' ')}>
|
||||
<div className="inline-block w-8">
|
||||
{thumb && (
|
||||
<img src={thumb} alt="thumbnail" className="inline w-full" />
|
||||
)}
|
||||
</div>
|
||||
<div className="inline-block flex-grow pl-1">{value}</div>
|
||||
{subtext && (
|
||||
<div className="vertical-align-middle float-right inline-block pl-1 text-sm italic text-slate-500">
|
||||
{subtext}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</a>
|
||||
);
|
||||
}
|
||||
144
app/javascript/bundles/Main/components/UserMenu.tsx
Normal file
@@ -0,0 +1,144 @@
|
||||
import * as React from 'react';
|
||||
import { useRef, useEffect, useState } from 'react';
|
||||
|
||||
interface UserMenuProps {
|
||||
userEmail: string;
|
||||
userRole?: 'admin' | 'moderator';
|
||||
editProfilePath: string;
|
||||
signOutPath: string;
|
||||
csrfToken: string;
|
||||
globalStatesPath: string;
|
||||
goodJobPath: string;
|
||||
grafanaPath: string;
|
||||
prometheusPath: string;
|
||||
}
|
||||
|
||||
export const UserMenu: React.FC<UserMenuProps> = ({
|
||||
userEmail,
|
||||
userRole,
|
||||
editProfilePath,
|
||||
signOutPath,
|
||||
csrfToken,
|
||||
globalStatesPath,
|
||||
goodJobPath,
|
||||
grafanaPath,
|
||||
prometheusPath,
|
||||
}) => {
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const menuRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (event: MouseEvent) => {
|
||||
if (menuRef.current && !menuRef.current.contains(event.target as Node)) {
|
||||
setIsOpen(false);
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener('mousedown', handleClickOutside);
|
||||
return () => document.removeEventListener('mousedown', handleClickOutside);
|
||||
}, []);
|
||||
|
||||
const handleSignOut = (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
const form = document.createElement('form');
|
||||
form.method = 'POST';
|
||||
form.action = signOutPath;
|
||||
form.style.display = 'none';
|
||||
|
||||
const methodInput = document.createElement('input');
|
||||
methodInput.type = 'hidden';
|
||||
methodInput.name = '_method';
|
||||
methodInput.value = 'delete';
|
||||
|
||||
const csrfInput = document.createElement('input');
|
||||
csrfInput.type = 'hidden';
|
||||
csrfInput.name = 'authenticity_token';
|
||||
csrfInput.value = csrfToken;
|
||||
|
||||
form.appendChild(methodInput);
|
||||
form.appendChild(csrfInput);
|
||||
document.body.appendChild(form);
|
||||
form.submit();
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="relative" ref={menuRef}>
|
||||
<button
|
||||
className="flex items-center space-x-2 text-slate-600 hover:text-slate-900 focus:outline-none"
|
||||
onClick={() => setIsOpen(!isOpen)}
|
||||
>
|
||||
<i className="fas fa-user-circle text-2xl" />
|
||||
<i className="fas fa-chevron-down text-xs" />
|
||||
</button>
|
||||
|
||||
<div
|
||||
className={`absolute right-0 z-50 mt-2 w-48 rounded-md bg-white py-1 shadow-lg ring-1 ring-black ring-opacity-5 transition-all duration-200 ${
|
||||
isOpen ? 'visible opacity-100' : 'invisible opacity-0'
|
||||
}`}
|
||||
>
|
||||
<div className="border-b border-slate-200 px-4 py-2 text-sm text-slate-700">
|
||||
<div className="font-medium">{userEmail}</div>
|
||||
{userRole === 'admin' && (
|
||||
<span className="inline-flex items-center rounded-full bg-red-100 px-2 py-0.5 text-xs font-medium text-red-800">
|
||||
Admin
|
||||
</span>
|
||||
)}
|
||||
{userRole === 'moderator' && (
|
||||
<span className="inline-flex items-center rounded-full bg-blue-100 px-2 py-0.5 text-xs font-medium text-blue-800">
|
||||
Mod
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{userRole === 'admin' && (
|
||||
<>
|
||||
<a
|
||||
href={globalStatesPath}
|
||||
className="flex w-full items-center px-4 py-2 text-sm text-slate-700 hover:bg-slate-100"
|
||||
>
|
||||
<i className="fas fa-cogs mr-2 w-5" />
|
||||
<span>Global State</span>
|
||||
</a>
|
||||
<a
|
||||
href={goodJobPath}
|
||||
className="flex w-full items-center px-4 py-2 text-sm text-slate-700 hover:bg-slate-100"
|
||||
>
|
||||
<i className="fas fa-tasks mr-2 w-5" />
|
||||
<span>Jobs Queue</span>
|
||||
</a>
|
||||
<a
|
||||
href={grafanaPath}
|
||||
className="flex w-full items-center px-4 py-2 text-sm text-slate-700 hover:bg-slate-100"
|
||||
>
|
||||
<i className="fas fa-chart-line mr-2 w-5" />
|
||||
<span>Grafana</span>
|
||||
</a>
|
||||
<a
|
||||
href={prometheusPath}
|
||||
className="flex w-full items-center px-4 py-2 text-sm text-slate-700 hover:bg-slate-100"
|
||||
>
|
||||
<i className="fas fa-chart-bar mr-2 w-5" />
|
||||
<span>Prometheus</span>
|
||||
</a>
|
||||
</>
|
||||
)}
|
||||
|
||||
<a
|
||||
href={editProfilePath}
|
||||
className="flex w-full items-center px-4 py-2 text-sm text-slate-700 hover:bg-slate-100"
|
||||
>
|
||||
<i className="fas fa-cog mr-2 w-5" />
|
||||
<span>Edit Profile</span>
|
||||
</a>
|
||||
|
||||
<button
|
||||
onClick={handleSignOut}
|
||||
className="flex w-full items-center px-4 py-2 text-left text-sm text-slate-700 hover:bg-slate-100"
|
||||
>
|
||||
<i className="fas fa-sign-out-alt mr-2 w-5" />
|
||||
<span>Sign Out</span>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
307
app/javascript/bundles/Main/components/UserSearchBar.tsx
Normal file
@@ -0,0 +1,307 @@
|
||||
import { debounce, isEmpty } from 'lodash';
|
||||
import * as React from 'react';
|
||||
import { useCallback, useRef, useState } from 'react';
|
||||
import Icon from './Icon';
|
||||
import ListItem from './ListItem';
|
||||
import Trie, { TrieNode } from '../lib/Trie';
|
||||
|
||||
// 1. Group related constants
|
||||
const CONFIG = {
|
||||
HOST: '',
|
||||
LOG: false,
|
||||
} as const;
|
||||
|
||||
const STYLES = {
|
||||
LIST_ELEM_CLASSNAME: [
|
||||
'w-full p-2 pl-8 text-xl font-light border-slate-300 border-2',
|
||||
'group-focus-within:border-slate-400',
|
||||
],
|
||||
SVG_BASE_CLASSNAME: `stroke-slate-500 fill-slate-500`,
|
||||
SVG_FOCUSABLE_CLASSNAME: `stroke-slate-500 fill-slate-500 group-focus-within:stroke-slate-800 group-focus-within:fill-slate-800`,
|
||||
INPUT_CLASSNAME: `text-slate-500 group-focus-within:text-slate-800 placeholder-slate-500 group-focus-within:placeholder-slate-800 placeholder:font-extralight`,
|
||||
} as const;
|
||||
|
||||
// 2. Simplify logging
|
||||
const log = {
|
||||
info: (...args: any[]) => CONFIG.LOG && console.log(...args),
|
||||
error: (...args: any[]) => CONFIG.LOG && console.error(...args),
|
||||
};
|
||||
|
||||
interface PropTypes {
|
||||
isServerRendered?: boolean;
|
||||
}
|
||||
|
||||
interface User {
|
||||
id: number;
|
||||
name: string;
|
||||
url_name: string;
|
||||
thumb?: string;
|
||||
show_path: string;
|
||||
num_posts: number;
|
||||
}
|
||||
|
||||
interface ServerResponse {
|
||||
users: User[];
|
||||
}
|
||||
|
||||
type TrieValue = [number, string];
|
||||
type TrieType = Trie<TrieValue>;
|
||||
type TrieNodeType = TrieNode<TrieValue>;
|
||||
|
||||
export default function UserSearchBar({ isServerRendered }: PropTypes) {
|
||||
isServerRendered = !!isServerRendered;
|
||||
const [pendingRequest, setPendingRequest] = useState<AbortController | null>(
|
||||
null,
|
||||
);
|
||||
const [state, setState] = useState({
|
||||
userName: '',
|
||||
userList: [] as ServerResponse['users'],
|
||||
selectedIdx: null as number | null,
|
||||
errorMessage: null as string | null,
|
||||
typingSettled: true,
|
||||
isFocused: isServerRendered ? false : true,
|
||||
});
|
||||
|
||||
const inputRef = useRef(null);
|
||||
|
||||
const clearResults = useCallback(() => {
|
||||
setState((s) => ({
|
||||
...s,
|
||||
userList: [],
|
||||
errorMessage: null,
|
||||
selectedIdx: null,
|
||||
}));
|
||||
}, []);
|
||||
|
||||
const cancelPendingRequest = useCallback(async () => {
|
||||
if (pendingRequest) {
|
||||
setPendingRequest(null);
|
||||
pendingRequest.abort();
|
||||
}
|
||||
}, [pendingRequest, setPendingRequest]);
|
||||
|
||||
const sendSearchRequest = useCallback(
|
||||
(userName) => {
|
||||
cancelPendingRequest();
|
||||
|
||||
const controller = new AbortController();
|
||||
setPendingRequest(controller);
|
||||
|
||||
async function sendRequest() {
|
||||
try {
|
||||
let req = await fetch(
|
||||
`${CONFIG.HOST}/api/fa/search_user_names?name=${userName}`,
|
||||
{
|
||||
signal: controller.signal,
|
||||
},
|
||||
);
|
||||
|
||||
setPendingRequest(null);
|
||||
setState((s) => ({
|
||||
...s,
|
||||
errorMessage: null,
|
||||
}));
|
||||
|
||||
if (req.status != 200) {
|
||||
const error_json = await req.json();
|
||||
setState((s) => ({
|
||||
...s,
|
||||
errorMessage: `error loading users: ${error_json.error || JSON.stringify(error_json)}`,
|
||||
}));
|
||||
} else {
|
||||
let gotUserList = await req.json();
|
||||
setState((s) => ({
|
||||
...s,
|
||||
userList: gotUserList.users,
|
||||
}));
|
||||
}
|
||||
} catch (err) {
|
||||
if (!err.message.includes('aborted')) {
|
||||
log.error('error loading user trie: ', err);
|
||||
setState((s) => ({
|
||||
...s,
|
||||
errorMessage: `error loading users: ` + err.message,
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
sendRequest();
|
||||
|
||||
return () => controller.abort();
|
||||
},
|
||||
[cancelPendingRequest, setPendingRequest],
|
||||
);
|
||||
|
||||
const searchForUser = useCallback(
|
||||
(userName: string) => {
|
||||
setState((s) => ({ ...s, userName }));
|
||||
if (isEmpty(userName)) {
|
||||
clearResults();
|
||||
} else {
|
||||
sendSearchRequest(userName);
|
||||
}
|
||||
},
|
||||
[clearResults, sendSearchRequest],
|
||||
);
|
||||
|
||||
const searchForUserDebounced = useCallback(
|
||||
debounce(async (userName) => {
|
||||
log.info('sending search for ', userName);
|
||||
setState((s) => ({ ...s, typingSettled: true }));
|
||||
searchForUser(userName);
|
||||
}, 250),
|
||||
[searchForUser],
|
||||
);
|
||||
|
||||
function invokeIdx(idx) {
|
||||
const user = state.userList[idx];
|
||||
if (user) {
|
||||
log.info('selecting user: ', user);
|
||||
setState((s) => ({ ...s, userName: user.name }));
|
||||
inputRef.current.value = user.name;
|
||||
window.location.href = user.show_path;
|
||||
}
|
||||
}
|
||||
|
||||
function invokeSelected() {
|
||||
if (state.selectedIdx != null) {
|
||||
invokeIdx(state.selectedIdx);
|
||||
}
|
||||
}
|
||||
|
||||
const visibility = {
|
||||
error: state.isFocused && !isEmpty(state.errorMessage),
|
||||
info:
|
||||
state.isFocused &&
|
||||
!isEmpty(state.userName) &&
|
||||
!pendingRequest &&
|
||||
state.typingSettled &&
|
||||
state.userList.length === 0,
|
||||
items: !isEmpty(state.userName) && state.userList.length > 0,
|
||||
};
|
||||
const anyShown = Object.values(visibility).some(Boolean);
|
||||
|
||||
function UserSearchBarItems() {
|
||||
return (
|
||||
<div
|
||||
className={`${anyShown || 'border-b-0'} divide-y divide-inherit rounded-b-lg border border-t-0 border-inherit`}
|
||||
>
|
||||
{visibility.error ? (
|
||||
<ListItem
|
||||
key="error"
|
||||
isLast={!visibility.info && state.userList.length == 0}
|
||||
selected={false}
|
||||
style="error"
|
||||
value={state.errorMessage}
|
||||
/>
|
||||
) : null}
|
||||
{visibility.info ? (
|
||||
<ListItem
|
||||
key="info"
|
||||
isLast={!visibility.items}
|
||||
selected={false}
|
||||
style="info"
|
||||
value="No users found"
|
||||
/>
|
||||
) : null}
|
||||
{visibility.items
|
||||
? state.userList.map(({ name, thumb, show_path, num_posts }, idx) => (
|
||||
<ListItem
|
||||
key={'name-' + name}
|
||||
isLast={idx == state.userList.length - 1}
|
||||
selected={idx == state.selectedIdx}
|
||||
style="item"
|
||||
value={name}
|
||||
thumb={thumb}
|
||||
href={show_path}
|
||||
subtext={`${num_posts.toString()} posts`}
|
||||
/>
|
||||
))
|
||||
: null}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const keyHandlers = {
|
||||
Tab: (shiftKey: boolean) =>
|
||||
shiftKey ? selectPrevListElem() : selectNextListElem(),
|
||||
ArrowDown: () => selectNextListElem(),
|
||||
ArrowUp: () => selectPrevListElem(),
|
||||
Enter: () => invokeSelected(),
|
||||
};
|
||||
|
||||
function onSearchInputKeyDown(event: React.KeyboardEvent) {
|
||||
const handler = keyHandlers[event.code];
|
||||
if (handler) {
|
||||
event.preventDefault();
|
||||
handler(event.shiftKey);
|
||||
}
|
||||
}
|
||||
|
||||
function selectNextListElem() {
|
||||
setNewIdxTruncated(state.selectedIdx == null ? 0 : state.selectedIdx + 1);
|
||||
}
|
||||
|
||||
function selectPrevListElem() {
|
||||
setNewIdxTruncated(state.selectedIdx == null ? -1 : state.selectedIdx - 1);
|
||||
}
|
||||
|
||||
function setNewIdxTruncated(newIdx) {
|
||||
if (state.userList.length == 0) {
|
||||
newIdx = null;
|
||||
} else {
|
||||
if (newIdx >= state.userList.length) {
|
||||
newIdx = 0;
|
||||
} else if (newIdx < 0) {
|
||||
newIdx = state.userList.length - 1;
|
||||
}
|
||||
}
|
||||
setState((s) => ({ ...s, selectedIdx: newIdx }));
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
className={[
|
||||
'group mx-auto w-full p-2 transition-colors duration-1000 sm:rounded-xl',
|
||||
'focus-within:border-slate-400 sm:max-w-md',
|
||||
'border-slate-300 bg-slate-50 p-2 shadow-lg',
|
||||
].join(' ')}
|
||||
>
|
||||
<label className={`relative block ${STYLES.INPUT_CLASSNAME}`}>
|
||||
<Icon
|
||||
type="magnifying-glass"
|
||||
className={`ml-2 ${STYLES.SVG_FOCUSABLE_CLASSNAME}`}
|
||||
/>
|
||||
{pendingRequest && (
|
||||
<Icon
|
||||
type="spinner"
|
||||
className={`right-2 ${STYLES.SVG_BASE_CLASSNAME}`}
|
||||
/>
|
||||
)}
|
||||
<input
|
||||
autoFocus
|
||||
className={[
|
||||
STYLES.LIST_ELEM_CLASSNAME,
|
||||
STYLES.INPUT_CLASSNAME,
|
||||
'rounded-lg outline-none',
|
||||
'bg-slate-50 placeholder:italic',
|
||||
anyShown && 'rounded-b-none',
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(' ')}
|
||||
placeholder="Search FurAffinity Users?!?"
|
||||
defaultValue={state.userName}
|
||||
onChange={(e) => {
|
||||
setState((s) => ({ ...s, typingSettled: false }));
|
||||
searchForUserDebounced(e.target.value);
|
||||
}}
|
||||
onKeyDown={onSearchInputKeyDown}
|
||||
onFocus={() => setState((s) => ({ ...s, isFocused: true }))}
|
||||
onBlur={() => setState((s) => ({ ...s, isFocused: false }))}
|
||||
ref={inputRef}
|
||||
/>
|
||||
</label>
|
||||
<UserSearchBarItems />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
import * as React from 'react';
|
||||
import UserSearchBar from './UserSearchBar';
|
||||
|
||||
export default function (props) {
|
||||
return <UserSearchBar {...props} isServerRendered={true} />;
|
||||
}
|
||||
91
app/javascript/bundles/Main/lib/Trie.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
interface SerializedTrie<T> {
|
||||
// terminal node?
|
||||
t: 1 | 0;
|
||||
// value of the node
|
||||
v: T;
|
||||
// optional children
|
||||
c?: { [s: string]: SerializedTrie<T> };
|
||||
}
|
||||
|
||||
export class TrieNode<T> {
|
||||
public terminal: boolean;
|
||||
public value: T;
|
||||
public children: Map<string, TrieNode<T>>;
|
||||
public serialized: SerializedTrie<T>;
|
||||
|
||||
constructor(ser: SerializedTrie<T>) {
|
||||
this.terminal = ser.t == 1;
|
||||
this.value = ser.v;
|
||||
this.children = new Map();
|
||||
this.serialized = ser;
|
||||
|
||||
if (ser.c != null) {
|
||||
for (const [key, value] of Object.entries(ser.c)) {
|
||||
this.children.set(key, new TrieNode(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default class Trie<T> {
|
||||
public root: TrieNode<T>;
|
||||
constructor(ser: SerializedTrie<T>) {
|
||||
this.root = new TrieNode(ser);
|
||||
}
|
||||
|
||||
public nodeForPrefix(key: string): {
|
||||
chain: string[];
|
||||
node: TrieNode<T> | null;
|
||||
} {
|
||||
let chain = [];
|
||||
let node = this.root;
|
||||
let remaining = key;
|
||||
while (node && remaining.length > 0) {
|
||||
let exactChild = null;
|
||||
console.log('remaining: ', remaining);
|
||||
|
||||
for (const [childKey, child] of node.children.entries()) {
|
||||
if (remaining.startsWith(childKey)) {
|
||||
console.log('exact match for: ', childKey);
|
||||
exactChild = child;
|
||||
chain.push(childKey);
|
||||
remaining = remaining.slice(childKey.length);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// if an exact match was found, continue iterating
|
||||
if (exactChild) {
|
||||
node = exactChild;
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log('looking for partial match for ', remaining);
|
||||
for (const [childKey, child] of node.children.entries()) {
|
||||
const startsWith = childKey.startsWith(remaining);
|
||||
console.log(
|
||||
'test ',
|
||||
childKey,
|
||||
' against ',
|
||||
remaining,
|
||||
': ',
|
||||
startsWith,
|
||||
' ',
|
||||
child.serialized,
|
||||
);
|
||||
if (startsWith) {
|
||||
console.log('partial match for: ', remaining, ': ', child.serialized);
|
||||
chain.push(childKey);
|
||||
return { chain, node: child };
|
||||
}
|
||||
}
|
||||
|
||||
console.log('did not find partial, bailing!');
|
||||
return { chain, node: null };
|
||||
}
|
||||
|
||||
// // return remaining.length === 0 && node && node.terminal ? node : null;
|
||||
console.log('returning child ', node, ' for remaining ', remaining);
|
||||
return { chain, node };
|
||||
}
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
import { Application } from "@hotwired/stimulus"
|
||||
|
||||
const application = Application.start()
|
||||
|
||||
// Configure Stimulus development experience
|
||||
application.debug = false
|
||||
window.Stimulus = application
|
||||
|
||||
export { application }
|
||||
@@ -1,7 +0,0 @@
|
||||
import { Controller } from "@hotwired/stimulus"
|
||||
|
||||
export default class extends Controller {
|
||||
connect() {
|
||||
this.element.textContent = "Hello World!"
|
||||
}
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
// Import and register all your controllers from the importmap under controllers/*
|
||||
|
||||
import { application } from "controllers/application"
|
||||
|
||||
// Eager load all controllers defined in the import map under controllers/**/*_controller
|
||||
import { eagerLoadControllersFrom } from "@hotwired/stimulus-loading"
|
||||
eagerLoadControllersFrom("controllers", application)
|
||||
|
||||
// Lazy load controllers as they appear in the DOM (remember not to preload controllers in import map!)
|
||||
// import { lazyLoadControllersFrom } from "@hotwired/stimulus-loading"
|
||||
// lazyLoadControllersFrom("controllers", application)
|
||||
10
app/javascript/packs/application-bundle.js
Normal file
@@ -0,0 +1,10 @@
|
||||
import ReactOnRails from 'react-on-rails';
|
||||
|
||||
import UserSearchBar from '../bundles/Main/components/UserSearchBar';
|
||||
import { UserMenu } from '../bundles/Main/components/UserMenu';
|
||||
|
||||
// This is how react_on_rails can see the components in the browser.
|
||||
ReactOnRails.register({
|
||||
UserSearchBar,
|
||||
UserMenu,
|
||||
});
|
||||
9
app/javascript/packs/server-bundle.js
Normal file
@@ -0,0 +1,9 @@
|
||||
import ReactOnRails from 'react-on-rails';
|
||||
|
||||
import UserSearchBar from '../bundles/Main/components/UserSearchBarServer';
|
||||
import { UserMenu } from '../bundles/Main/components/UserMenu';
|
||||
// This is how react_on_rails can see the UserSearchBar in the browser.
|
||||
ReactOnRails.register({
|
||||
UserMenu,
|
||||
UserSearchBar,
|
||||
});
|
||||
140
app/javascript/server/buildUsersTrie.js
Normal file
@@ -0,0 +1,140 @@
|
||||
function buildUsersTrie(users) {
|
||||
const rootNode = new trie();
|
||||
users.forEach(([id, name]) => {
|
||||
rootNode.insert(name.toLowerCase(), [id, name]);
|
||||
});
|
||||
return JSON.stringify(rootNode.serialize());
|
||||
}
|
||||
class trie_node {
|
||||
constructor() {
|
||||
this.terminal = false;
|
||||
this.children = new Map();
|
||||
}
|
||||
serialize() {
|
||||
const { terminal, value, children } = this;
|
||||
let mapped = {};
|
||||
let numChildren = 0;
|
||||
Object.keys(Object.fromEntries(children)).forEach((childKey) => {
|
||||
numChildren += 1;
|
||||
mapped[childKey] = children.get(childKey).serialize();
|
||||
});
|
||||
return {
|
||||
t: this.terminal ? 1 : 0,
|
||||
v: value,
|
||||
c: numChildren > 0 ? mapped : undefined,
|
||||
};
|
||||
}
|
||||
}
|
||||
class trie {
|
||||
constructor() {
|
||||
this.root = new trie_node();
|
||||
this.elements = 0;
|
||||
}
|
||||
serialize() {
|
||||
return this.root.serialize();
|
||||
}
|
||||
get length() {
|
||||
return this.elements;
|
||||
}
|
||||
get(key) {
|
||||
const node = this.getNode(key);
|
||||
if (node) {
|
||||
return node.value;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
contains(key) {
|
||||
const node = this.getNode(key);
|
||||
return !!node;
|
||||
}
|
||||
insert(key, value) {
|
||||
let node = this.root;
|
||||
let remaining = key;
|
||||
while (remaining.length > 0) {
|
||||
let child = null;
|
||||
for (const childKey of node.children.keys()) {
|
||||
const prefix = this.commonPrefix(remaining, childKey);
|
||||
if (!prefix.length) {
|
||||
continue;
|
||||
}
|
||||
if (prefix.length === childKey.length) {
|
||||
// enter child node
|
||||
child = node.children.get(childKey);
|
||||
remaining = remaining.slice(childKey.length);
|
||||
break;
|
||||
}
|
||||
else {
|
||||
// split the child
|
||||
child = new trie_node();
|
||||
child.children.set(childKey.slice(prefix.length), node.children.get(childKey));
|
||||
node.children.delete(childKey);
|
||||
node.children.set(prefix, child);
|
||||
remaining = remaining.slice(prefix.length);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!child && remaining.length) {
|
||||
child = new trie_node();
|
||||
node.children.set(remaining, child);
|
||||
remaining = "";
|
||||
}
|
||||
node = child;
|
||||
}
|
||||
if (!node.terminal) {
|
||||
node.terminal = true;
|
||||
this.elements += 1;
|
||||
}
|
||||
node.value = value;
|
||||
}
|
||||
remove(key) {
|
||||
const node = this.getNode(key);
|
||||
if (node) {
|
||||
node.terminal = false;
|
||||
this.elements -= 1;
|
||||
}
|
||||
}
|
||||
map(prefix, func) {
|
||||
const mapped = [];
|
||||
const node = this.getNode(prefix);
|
||||
const stack = [];
|
||||
if (node) {
|
||||
stack.push([prefix, node]);
|
||||
}
|
||||
while (stack.length) {
|
||||
const [key, node] = stack.pop();
|
||||
if (node.terminal) {
|
||||
mapped.push(func(key, node.value));
|
||||
}
|
||||
for (const c of node.children.keys()) {
|
||||
stack.push([key + c, node.children.get(c)]);
|
||||
}
|
||||
}
|
||||
return mapped;
|
||||
}
|
||||
getNode(key) {
|
||||
let node = this.root;
|
||||
let remaining = key;
|
||||
while (node && remaining.length > 0) {
|
||||
let child = null;
|
||||
for (let i = 1; i <= remaining.length; i += 1) {
|
||||
child = node.children.get(remaining.slice(0, i));
|
||||
if (child) {
|
||||
remaining = remaining.slice(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
node = child;
|
||||
}
|
||||
return remaining.length === 0 && node && node.terminal ? node : null;
|
||||
}
|
||||
commonPrefix(a, b) {
|
||||
const shortest = Math.min(a.length, b.length);
|
||||
let i = 0;
|
||||
for (; i < shortest; i += 1) {
|
||||
if (a[i] !== b[i]) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return a.slice(0, i);
|
||||
}
|
||||
}
|
||||
163
app/javascript/server/buildUsersTrie.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
type UserRow = [number, string];
|
||||
|
||||
function buildUsersTrie(users: UserRow[]): string {
|
||||
const rootNode = new trie<[number, string]>();
|
||||
users.forEach(([id, name]) => {
|
||||
rootNode.insert(name.toLowerCase(), [id, name]);
|
||||
});
|
||||
return JSON.stringify(rootNode.serialize());
|
||||
}
|
||||
|
||||
class trie_node<T> {
|
||||
public terminal: boolean;
|
||||
public value: T;
|
||||
public children: Map<string, trie_node<T>>;
|
||||
|
||||
constructor() {
|
||||
this.terminal = false;
|
||||
this.children = new Map();
|
||||
}
|
||||
|
||||
public serialize(): Object {
|
||||
const { terminal, value, children } = this;
|
||||
let mapped = {};
|
||||
let numChildren = 0;
|
||||
Object.keys(Object.fromEntries(children)).forEach((childKey) => {
|
||||
numChildren += 1;
|
||||
mapped[childKey] = children.get(childKey).serialize();
|
||||
});
|
||||
return {
|
||||
t: this.terminal ? 1 : 0,
|
||||
v: value,
|
||||
c: numChildren > 0 ? mapped : undefined,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
class trie<T> {
|
||||
public root: trie_node<T>;
|
||||
public elements: number;
|
||||
|
||||
constructor() {
|
||||
this.root = new trie_node<T>();
|
||||
this.elements = 0;
|
||||
}
|
||||
|
||||
public serialize(): Object {
|
||||
return this.root.serialize();
|
||||
}
|
||||
|
||||
public get length(): number {
|
||||
return this.elements;
|
||||
}
|
||||
|
||||
public get(key: string): T | null {
|
||||
const node = this.getNode(key);
|
||||
if (node) {
|
||||
return node.value;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public contains(key: string): boolean {
|
||||
const node = this.getNode(key);
|
||||
return !!node;
|
||||
}
|
||||
|
||||
public insert(key: string, value: T): void {
|
||||
let node = this.root;
|
||||
let remaining = key;
|
||||
while (remaining.length > 0) {
|
||||
let child: trie_node<T> = null;
|
||||
for (const childKey of node.children.keys()) {
|
||||
const prefix = this.commonPrefix(remaining, childKey);
|
||||
if (!prefix.length) {
|
||||
continue;
|
||||
}
|
||||
if (prefix.length === childKey.length) {
|
||||
// enter child node
|
||||
child = node.children.get(childKey);
|
||||
remaining = remaining.slice(childKey.length);
|
||||
break;
|
||||
} else {
|
||||
// split the child
|
||||
child = new trie_node<T>();
|
||||
child.children.set(
|
||||
childKey.slice(prefix.length),
|
||||
node.children.get(childKey)
|
||||
);
|
||||
node.children.delete(childKey);
|
||||
node.children.set(prefix, child);
|
||||
remaining = remaining.slice(prefix.length);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!child && remaining.length) {
|
||||
child = new trie_node<T>();
|
||||
node.children.set(remaining, child);
|
||||
remaining = "";
|
||||
}
|
||||
node = child;
|
||||
}
|
||||
if (!node.terminal) {
|
||||
node.terminal = true;
|
||||
this.elements += 1;
|
||||
}
|
||||
node.value = value;
|
||||
}
|
||||
|
||||
public remove(key: string): void {
|
||||
const node = this.getNode(key);
|
||||
if (node) {
|
||||
node.terminal = false;
|
||||
this.elements -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
public map<U>(prefix: string, func: (key: string, value: T) => U): U[] {
|
||||
const mapped = [];
|
||||
const node = this.getNode(prefix);
|
||||
const stack: [string, trie_node<T>][] = [];
|
||||
if (node) {
|
||||
stack.push([prefix, node]);
|
||||
}
|
||||
while (stack.length) {
|
||||
const [key, node] = stack.pop();
|
||||
if (node.terminal) {
|
||||
mapped.push(func(key, node.value));
|
||||
}
|
||||
for (const c of node.children.keys()) {
|
||||
stack.push([key + c, node.children.get(c)]);
|
||||
}
|
||||
}
|
||||
return mapped;
|
||||
}
|
||||
|
||||
private getNode(key: string): trie_node<T> | null {
|
||||
let node = this.root;
|
||||
let remaining = key;
|
||||
while (node && remaining.length > 0) {
|
||||
let child = null;
|
||||
for (let i = 1; i <= remaining.length; i += 1) {
|
||||
child = node.children.get(remaining.slice(0, i));
|
||||
if (child) {
|
||||
remaining = remaining.slice(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
node = child;
|
||||
}
|
||||
return remaining.length === 0 && node && node.terminal ? node : null;
|
||||
}
|
||||
|
||||
private commonPrefix(a: string, b: string): string {
|
||||
const shortest = Math.min(a.length, b.length);
|
||||
let i = 0;
|
||||
for (; i < shortest; i += 1) {
|
||||
if (a[i] !== b[i]) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return a.slice(0, i);
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,29 @@
|
||||
# typed: strict
|
||||
class ApplicationJob < ActiveJob::Base
|
||||
# Automatically retry jobs that encountered a deadlock
|
||||
# retry_on ActiveRecord::Deadlocked
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
|
||||
# Most jobs are safe to ignore if the underlying records are no longer available
|
||||
# discard_on ActiveJob::DeserializationError
|
||||
abstract!
|
||||
|
||||
include GoodJob::ActiveJobExtensions::Concurrency
|
||||
include HasColorLogger
|
||||
|
||||
retry_on(
|
||||
StandardError,
|
||||
wait: :polynomially_longer,
|
||||
attempts: :unlimited,
|
||||
) do |job, exception|
|
||||
job.logger.error(
|
||||
"error: #{exception.message}\n#{exception.backtrace.join("\n")}",
|
||||
)
|
||||
end
|
||||
|
||||
@ignore_signature_args = T.let([], T.nilable(T::Array[Symbol]))
|
||||
|
||||
sig { params(args: Symbol).returns(T::Array[Symbol]) }
|
||||
def self.ignore_signature_args(*args)
|
||||
@ignore_signature_args ||= []
|
||||
@ignore_signature_args.concat(args)
|
||||
@ignore_signature_args
|
||||
end
|
||||
end
|
||||
|
||||
7
app/jobs/domain/e621/job/base.rb
Normal file
@@ -0,0 +1,7 @@
|
||||
# typed: strict
|
||||
class Domain::E621::Job::Base < Scraper::JobBase
|
||||
sig { override.returns(Symbol) }
|
||||
def self.http_factory_method
|
||||
:get_e621_http_client
|
||||
end
|
||||
end
|
||||
56
app/jobs/domain/e621/job/posts_index_job.rb
Normal file
@@ -0,0 +1,56 @@
|
||||
# typed: strict
|
||||
class Domain::E621::Job::PostsIndexJob < Domain::E621::Job::Base
|
||||
queue_as :e621
|
||||
|
||||
sig { override.params(args: T::Hash[Symbol, T.untyped]).void }
|
||||
def perform(args)
|
||||
response =
|
||||
http_client.get(
|
||||
"https://e621.net/posts.json",
|
||||
caused_by_entry: causing_log_entry,
|
||||
)
|
||||
log_entry = response.log_entry
|
||||
self.first_log_entry ||= log_entry
|
||||
|
||||
if response.status_code != 200
|
||||
fatal_error(
|
||||
"non 200 response for /posts.json: #{response.status_code.to_s.underline}",
|
||||
)
|
||||
end
|
||||
|
||||
json = JSON.parse(response.body)
|
||||
if json["posts"].nil?
|
||||
fatal_error("no posts in response, hle #{log_entry.id}")
|
||||
end
|
||||
|
||||
e621_posts =
|
||||
json["posts"].map do |post_json|
|
||||
Domain::E621::TagUtil.initialize_or_update_post(
|
||||
post_json: post_json,
|
||||
caused_by_entry: causing_log_entry,
|
||||
)
|
||||
end
|
||||
|
||||
created_posts = []
|
||||
updated_posts = []
|
||||
seen_posts = []
|
||||
|
||||
e621_posts.each do |e621_post|
|
||||
created_posts << e621_post if e621_post.new_record?
|
||||
updated_posts << e621_post if e621_post.changed?
|
||||
seen_posts << e621_post
|
||||
e621_post.save!
|
||||
end
|
||||
|
||||
(created_posts + updated_posts).uniq.each do |post|
|
||||
logger.info(
|
||||
"[e621_id: #{post.e621_id.to_s.bold}] enqueueing static file job",
|
||||
)
|
||||
defer_job(Domain::E621::Job::StaticFileJob, { post: post })
|
||||
end
|
||||
|
||||
logger.info(
|
||||
"#{updated_posts.count} updated, #{created_posts.count} created, #{seen_posts.count} seen",
|
||||
)
|
||||
end
|
||||
end
|
||||
54
app/jobs/domain/e621/job/scan_post_job.rb
Normal file
@@ -0,0 +1,54 @@
|
||||
# typed: strict
|
||||
class Domain::E621::Job::ScanPostJob < Domain::E621::Job::Base
|
||||
queue_as :e621
|
||||
|
||||
sig { override.params(args: T::Hash[Symbol, T.untyped]).void }
|
||||
def perform(args)
|
||||
post = T.let(args[:post] || raise("no post provided"), Domain::E621::Post)
|
||||
|
||||
logger.prefix =
|
||||
proc { "[e621_id #{post.e621_id.to_s.bold} / #{post.state&.bold}]" }
|
||||
|
||||
if post.file.present?
|
||||
logger.warn("Post #{post.e621_id} already has a file")
|
||||
return
|
||||
end
|
||||
|
||||
if post.file_url_str.present?
|
||||
logger.error("Post #{post.e621_id} already has a file URL")
|
||||
return
|
||||
end
|
||||
|
||||
logger.info("Scanning post #{post.e621_id}")
|
||||
response =
|
||||
http_client.get(
|
||||
"https://e621.net/posts/#{post.e621_id}.json",
|
||||
caused_by_entry: causing_log_entry,
|
||||
)
|
||||
log_entry = response.log_entry
|
||||
self.first_log_entry ||= log_entry
|
||||
if response.status_code != 200
|
||||
post.state_detail["scan_log_entry_id"] = log_entry.id
|
||||
post.state = :scan_error
|
||||
post.state_detail[
|
||||
"scan_error"
|
||||
] = "Error scanning post #{post.e621_id}: #{response.status_code}"
|
||||
post.save!
|
||||
fatal_error(
|
||||
"Error scanning post #{post.e621_id}: #{response.status_code}",
|
||||
)
|
||||
end
|
||||
|
||||
post_json = JSON.parse(response.body)["post"]
|
||||
post =
|
||||
Domain::E621::TagUtil.initialize_or_update_post(
|
||||
post_json: post_json,
|
||||
caused_by_entry: log_entry,
|
||||
)
|
||||
post.save!
|
||||
|
||||
unless post.file.present?
|
||||
defer_job(Domain::E621::Job::StaticFileJob, { post: post })
|
||||
end
|
||||
end
|
||||
end
|
||||
53
app/jobs/domain/e621/job/static_file_job.rb
Normal file
@@ -0,0 +1,53 @@
|
||||
# typed: strict
|
||||
class Domain::E621::Job::StaticFileJob < Domain::E621::Job::Base
|
||||
queue_as :static_file
|
||||
|
||||
sig { override.params(args: T::Hash[Symbol, T.untyped]).void }
|
||||
def perform(args)
|
||||
post =
|
||||
T.let(args[:post] || fatal_error("post is required"), Domain::E621::Post)
|
||||
|
||||
logger.prefix = proc { "[e621_id #{post.e621_id.to_s.bold}]" }
|
||||
|
||||
file_url_str = post.file_url_str
|
||||
if file_url_str.blank?
|
||||
logger.warn("post has no file_url_str, enqueueing for scan")
|
||||
defer_job(Domain::E621::Job::ScanPostJob, { post: post })
|
||||
return
|
||||
end
|
||||
|
||||
if post.state == "file_error"
|
||||
retry_count = post.state_detail&.[]("file_error")&.[]("retry_count") || 0
|
||||
if retry_count >= 3
|
||||
logger.error("file has been retried 3 times, giving up")
|
||||
return
|
||||
end
|
||||
end
|
||||
|
||||
response = http_client.get(file_url_str, caused_by_entry: causing_log_entry)
|
||||
self.first_log_entry ||= response.log_entry
|
||||
|
||||
if response.status_code != 200
|
||||
post.state = :file_error
|
||||
fe = (post.state_detail["file_error"] ||= {})
|
||||
fe["status_code"] = response.status_code
|
||||
fe["log_entry_id"] = response.log_entry.id
|
||||
fe["retry_count"] ||= 0
|
||||
fe["retry_count"] += 1
|
||||
post.save!
|
||||
|
||||
if response.status_code == 404
|
||||
logger.error("#{response.status_code}, not retrying download")
|
||||
else
|
||||
fatal_error("#{response.status_code}, will retry later")
|
||||
end
|
||||
|
||||
return
|
||||
end
|
||||
|
||||
post.state = :ok
|
||||
post.file = response.log_entry
|
||||
post.save!
|
||||
logger.info "downloaded file"
|
||||
end
|
||||
end
|
||||
340
app/jobs/domain/fa/job/base.rb
Normal file
@@ -0,0 +1,340 @@
|
||||
# typed: strict
|
||||
class Domain::Fa::Job::Base < Scraper::JobBase
|
||||
discard_on ActiveJob::DeserializationError
|
||||
|
||||
sig { override.returns(Symbol) }
|
||||
def self.http_factory_method
|
||||
:get_fa_http_client
|
||||
end
|
||||
|
||||
sig { params(args: T.untyped).void }
|
||||
def initialize(*args)
|
||||
super(*T.unsafe(args))
|
||||
@force_scan = T.let(false, T::Boolean)
|
||||
@user = T.let(nil, T.nilable(Domain::Fa::User))
|
||||
@created_user = T.let(false, T::Boolean)
|
||||
@posts_enqueued_for_scan = T.let(Set.new, T::Set[Integer])
|
||||
end
|
||||
|
||||
protected
|
||||
|
||||
sig do
|
||||
params(
|
||||
args: T.untyped,
|
||||
build_user: T::Boolean,
|
||||
require_user_exists: T::Boolean,
|
||||
).returns(T.nilable(Domain::Fa::User))
|
||||
end
|
||||
def init_from_args!(args, build_user: true, require_user_exists: false)
|
||||
@force_scan = !!args[:force_scan]
|
||||
|
||||
if build_user
|
||||
@user = find_or_build_user_from_args(args)
|
||||
else
|
||||
@user = find_user_from_args(args)
|
||||
end
|
||||
|
||||
logger.prefix =
|
||||
"[user #{(@user&.url_name || @user&.name || args[:url_name])&.bold} / #{@user&.state&.bold}]"
|
||||
|
||||
return nil unless @user
|
||||
if @user.new_record?
|
||||
if require_user_exists
|
||||
fatal_error("user must already exist")
|
||||
else
|
||||
@user.save!
|
||||
@created_user = true
|
||||
end
|
||||
end
|
||||
|
||||
@user
|
||||
end
|
||||
|
||||
sig { params(args: T.untyped).returns(Domain::Fa::User) }
|
||||
def find_or_build_user_from_args(args)
|
||||
find_user_from_args(args) ||
|
||||
begin
|
||||
url_name = Domain::Fa::User.name_to_url_name(args[:url_name])
|
||||
user = Domain::Fa::User.new
|
||||
user.url_name = url_name
|
||||
user.name = url_name
|
||||
user.state_detail ||= {}
|
||||
if cle = causing_log_entry
|
||||
user.state_detail["first_seen_entry"] = cle.id
|
||||
end
|
||||
user
|
||||
end
|
||||
end
|
||||
|
||||
sig { params(args: T.untyped).returns(T.nilable(Domain::Fa::User)) }
|
||||
def find_user_from_args(args)
|
||||
args[:user] ||
|
||||
begin
|
||||
if args[:url_name].blank?
|
||||
fatal_error("arg 'url_name' is required if arg 'user' is nil")
|
||||
end
|
||||
url_name = Domain::Fa::User.name_to_url_name(args[:url_name])
|
||||
Domain::Fa::User.find_by(url_name: url_name)
|
||||
end
|
||||
end
|
||||
|
||||
sig { params(scan_type: Symbol).returns(T::Boolean) }
|
||||
def user_due_for_scan?(scan_type)
|
||||
raise("user is nil") unless @user
|
||||
unless @user.scan_due?(scan_type)
|
||||
if @force_scan
|
||||
logger.warn(
|
||||
"scanned #{@user.scanned_ago_in_words(scan_type).bold} - force scanning",
|
||||
)
|
||||
return true
|
||||
else
|
||||
logger.warn(
|
||||
"scanned #{@user.scanned_ago_in_words(scan_type).bold} - skipping",
|
||||
)
|
||||
return false
|
||||
end
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
ListingsPageScanStats = Struct.new(:new_seen, :total_seen, :last_was_new)
|
||||
|
||||
sig do
|
||||
params(
|
||||
job_type: Symbol,
|
||||
page: T.untyped,
|
||||
enqueue_posts_pri: Symbol,
|
||||
enqueue_page_scan: T::Boolean,
|
||||
enqueue_gallery_scan: T::Boolean,
|
||||
page_desc: T.nilable(String),
|
||||
fill_id_gaps: T::Boolean,
|
||||
continue_for: T.nilable(Integer),
|
||||
).returns(ListingsPageScanStats)
|
||||
end
|
||||
def update_and_enqueue_posts_from_listings_page(
|
||||
job_type,
|
||||
page,
|
||||
enqueue_posts_pri:,
|
||||
enqueue_page_scan: true,
|
||||
enqueue_gallery_scan: true,
|
||||
page_desc: nil,
|
||||
fill_id_gaps: false,
|
||||
continue_for: nil
|
||||
)
|
||||
fatal_error("not a listings page") unless page.probably_listings_page?
|
||||
submissions = page.submissions_parsed
|
||||
|
||||
fa_ids_to_manually_enqueue = Set.new
|
||||
fa_ids = Set.new(submissions.map(&:id))
|
||||
|
||||
create_unseen_posts = false
|
||||
|
||||
if fill_id_gaps && submissions.any?
|
||||
create_unseen_posts = true
|
||||
max_fa_id, min_fa_id = fa_ids.max, fa_ids.min
|
||||
# sanity check so we don't enqueue too many post jobs
|
||||
if max_fa_id - min_fa_id <= 250
|
||||
(min_fa_id..max_fa_id).each do |fa_id|
|
||||
fa_ids_to_manually_enqueue << fa_id unless fa_ids.include?(fa_id)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if continue_for && submissions.any?
|
||||
max_fa_id = fa_ids.max
|
||||
min_fa_id = [max_fa_id - continue_for, 0].max
|
||||
fa_ids_to_manually_enqueue = Set.new(min_fa_id..max_fa_id)
|
||||
fa_ids_to_manually_enqueue.subtract(fa_ids)
|
||||
existing =
|
||||
Domain::Fa::Post.where(
|
||||
"fa_id >= ? AND fa_id <= ?",
|
||||
min_fa_id,
|
||||
max_fa_id,
|
||||
).pluck(:fa_id)
|
||||
fa_ids_to_manually_enqueue.subtract(existing)
|
||||
end
|
||||
|
||||
page_desc = (page_desc ? "page #{page_desc.to_s.bold}" : "page")
|
||||
|
||||
listing_page_stats = ListingsPageScanStats.new(0, 0, false)
|
||||
submissions.each do |submission|
|
||||
post = Domain::Fa::Post.find_or_initialize_by(fa_id: submission.id)
|
||||
listing_page_stats.last_was_new = post.new_record?
|
||||
listing_page_stats.new_seen += 1 if post.new_record?
|
||||
listing_page_stats.total_seen += 1
|
||||
|
||||
update_and_save_post_from_listings_page(job_type, post, submission)
|
||||
if post.creator
|
||||
enqueue_user_scan(
|
||||
T.must(post.creator),
|
||||
enqueue_page_scan: enqueue_page_scan,
|
||||
enqueue_gallery_scan: enqueue_gallery_scan,
|
||||
)
|
||||
end
|
||||
|
||||
case post.state&.to_sym
|
||||
when :ok
|
||||
enqueue_post_scan(post, enqueue_posts_pri)
|
||||
when :removed
|
||||
logger.info "(todo) removed post seen in listing page, enqueue scan for fa_id #{post.fa_id}"
|
||||
when :scan_error
|
||||
logger.info "(todo) scan_error'd post seen in listing page for fa_id #{post.fa_id}"
|
||||
when :file_error
|
||||
logger.info "(todo) file_error'd post seen in listing page for fa_id #{post.fa_id}"
|
||||
else
|
||||
logger.info "unknown post state `#{post.state}` for fa_id #{post.fa_id}"
|
||||
end
|
||||
end
|
||||
|
||||
fa_ids_to_manually_enqueue.to_a.sort.reverse.each do |fa_id|
|
||||
if create_unseen_posts
|
||||
# when filling gaps, only enqueue if the post wasn't found
|
||||
post = Domain::Fa::Post.find_or_initialize_by(fa_id: fa_id)
|
||||
if post.new_record?
|
||||
post.save!
|
||||
enqueue_post_scan(post, enqueue_posts_pri)
|
||||
end
|
||||
else
|
||||
enqueue_fa_id_scan(fa_id, enqueue_posts_pri)
|
||||
end
|
||||
end
|
||||
|
||||
logger.info "#{page_desc} has #{submissions.count.to_s.bold} posts, " +
|
||||
"#{listing_page_stats.new_seen.to_s.bold} new"
|
||||
|
||||
listing_page_stats
|
||||
end
|
||||
|
||||
sig do
|
||||
params(job_type: Symbol, post: Domain::Fa::Post, submission: T.untyped).void
|
||||
end
|
||||
def update_and_save_post_from_listings_page(job_type, post, submission)
|
||||
if job_type == :browse_page
|
||||
post.log_entry_detail["first_browse_page_id"] ||= causing_log_entry&.id
|
||||
elsif job_type == :gallery_page
|
||||
post.log_entry_detail["first_gallery_page_id"] ||= causing_log_entry&.id
|
||||
else
|
||||
fatal_error("unhandled job_type: #{job_type}")
|
||||
end
|
||||
|
||||
post.creator ||=
|
||||
Domain::Fa::User.find_or_build_from_submission_parser(submission)
|
||||
post.title = submission.title || fatal_error("blank title")
|
||||
post.thumbnail_uri =
|
||||
submission.thumb_path || fatal_error("blank thumb_path")
|
||||
post.save!
|
||||
end
|
||||
|
||||
sig do
|
||||
params(
|
||||
user: Domain::Fa::User,
|
||||
enqueue_page_scan: T::Boolean,
|
||||
enqueue_gallery_scan: T::Boolean,
|
||||
enqueue_favs_scan: T::Boolean,
|
||||
).void
|
||||
end
|
||||
def enqueue_user_scan(
|
||||
user,
|
||||
enqueue_page_scan: true,
|
||||
enqueue_gallery_scan: true,
|
||||
enqueue_favs_scan: true
|
||||
)
|
||||
users_enqueued_for_page_scan ||= Set.new
|
||||
users_enqueued_for_gallery_scan ||= Set.new
|
||||
users_enqueued_for_favs_scan ||= Set.new
|
||||
|
||||
args =
|
||||
if user.persisted?
|
||||
{ user: user }
|
||||
else
|
||||
unless user.url_name
|
||||
logger.warn "user does not have a url name and is not persisted, skipping (#{user.name})"
|
||||
return
|
||||
end
|
||||
|
||||
{ url_name: user.url_name }
|
||||
end
|
||||
|
||||
if enqueue_page_scan && users_enqueued_for_page_scan.add?(user.url_name)
|
||||
if user.due_for_page_scan?
|
||||
logger.info(
|
||||
"enqueue user page job for #{T.must(user.url_name).bold}, " +
|
||||
"last scanned #{time_ago_in_words(user.scanned_page_at)}",
|
||||
)
|
||||
defer_job(Domain::Fa::Job::UserPageJob, args)
|
||||
end
|
||||
end
|
||||
|
||||
if enqueue_gallery_scan &&
|
||||
users_enqueued_for_gallery_scan.add?(user.url_name)
|
||||
if user.due_for_gallery_scan?
|
||||
logger.info(
|
||||
"enqueue user gallery job for #{T.must(user.url_name).bold}, " +
|
||||
"last scanned #{time_ago_in_words(user.scanned_gallery_at)}",
|
||||
)
|
||||
defer_job(Domain::Fa::Job::UserGalleryJob, args)
|
||||
end
|
||||
end
|
||||
|
||||
if enqueue_favs_scan && users_enqueued_for_favs_scan.add?(user.url_name)
|
||||
if user.due_for_favs_scan?
|
||||
logger.info(
|
||||
"enqueue user favs job for #{T.must(user.url_name).bold}, " +
|
||||
"last scanned #{time_ago_in_words(user.scanned_favs_at)}",
|
||||
)
|
||||
defer_job(Domain::Fa::Job::FavsJob, args)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
sig { params(enqueue_pri: T.nilable(Symbol)).returns(Integer) }
|
||||
def self.normalize_enqueue_pri(enqueue_pri)
|
||||
case enqueue_pri
|
||||
when :low
|
||||
-5
|
||||
when :high
|
||||
-15
|
||||
else
|
||||
-10
|
||||
end
|
||||
end
|
||||
|
||||
sig { params(fa_id: Integer, enqueue_pri: T.nilable(Symbol)).void }
|
||||
def enqueue_fa_id_scan(fa_id, enqueue_pri = nil)
|
||||
enqueue_pri = self.class.normalize_enqueue_pri(enqueue_pri)
|
||||
|
||||
if @posts_enqueued_for_scan.add?(fa_id)
|
||||
logger.info "enqueue post scan for fa_id #{fa_id}"
|
||||
defer_job(
|
||||
Domain::Fa::Job::ScanPostJob,
|
||||
{ fa_id: fa_id },
|
||||
{ priority: enqueue_pri },
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
sig { params(post: Domain::Fa::Post, enqueue_pri: T.nilable(Symbol)).void }
|
||||
def enqueue_post_scan(post, enqueue_pri = nil)
|
||||
enqueue_pri = self.class.normalize_enqueue_pri(enqueue_pri)
|
||||
|
||||
if @posts_enqueued_for_scan.add?(T.must(post.fa_id))
|
||||
fa_id_str = (post.fa_id || "(nil)").to_s.bold
|
||||
if !post.scanned?
|
||||
logger.info "enqueue post scan for fa_id #{fa_id_str}"
|
||||
defer_job(
|
||||
Domain::Fa::Job::ScanPostJob,
|
||||
{ post: post },
|
||||
{ priority: enqueue_pri },
|
||||
)
|
||||
elsif !post.have_file?
|
||||
logger.info "enqueue file scan for fa_id #{fa_id_str}"
|
||||
defer_job(
|
||||
Domain::Fa::Job::ScanFileJob,
|
||||
{ post: post },
|
||||
{ priority: enqueue_pri },
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,66 +1,62 @@
|
||||
class Domain::Fa::Job::BrowsePageJob < Domain::Fa::Job::FaJobBase
|
||||
# typed: strict
|
||||
class Domain::Fa::Job::BrowsePageJob < Domain::Fa::Job::Base
|
||||
queue_as :fa_browse_page
|
||||
ignore_signature_args [:caused_by_entry]
|
||||
|
||||
sig { params(args: T.untyped).void }
|
||||
def initialize(*args)
|
||||
super(*T.unsafe(args))
|
||||
@page_number = T.let(1, Integer)
|
||||
@total_num_new_posts_seen = T.let(0, Integer)
|
||||
@total_num_posts_seen = T.let(0, Integer)
|
||||
end
|
||||
|
||||
sig { override.params(args: T::Hash[Symbol, T.untyped]).void }
|
||||
def perform(args)
|
||||
if self.class.ran_recently?
|
||||
logger.warn("skipping browse page job, ran to recently")
|
||||
return
|
||||
end
|
||||
self.class.mark_ran!
|
||||
|
||||
@caused_by_entry = args[:caused_by_entry]
|
||||
@first_browse_page_entry = nil
|
||||
|
||||
@page_number = 1
|
||||
@total_num_new_posts_seen = 0
|
||||
@total_num_posts_seen = 0
|
||||
|
||||
while true
|
||||
break unless scan_browse_page
|
||||
break if @page_number > 150
|
||||
@page_number += 1
|
||||
end
|
||||
|
||||
logger.info("finished, #{@total_num_new_posts_seen.to_s.bold} new, #{@total_num_posts_seen.to_s.bold} total posts across #{@page_number.to_s.bold} pages")
|
||||
end
|
||||
|
||||
def self.ran_recently?
|
||||
@@last_ran_at ||= nil
|
||||
return false unless @@last_ran_at
|
||||
@@last_ran_at > 1.minute.ago
|
||||
end
|
||||
|
||||
def self.mark_ran!
|
||||
@@last_ran_at = Time.now
|
||||
logger.info(
|
||||
[
|
||||
"[finished]",
|
||||
"[total new: #{@total_num_new_posts_seen.to_s.bold}]",
|
||||
"[total seen: #{@total_num_posts_seen.to_s.bold}]",
|
||||
"[pages: #{@page_number.to_s.bold}]",
|
||||
].join(" "),
|
||||
)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
sig { returns(T::Boolean) }
|
||||
def scan_browse_page
|
||||
if @page_number == 1
|
||||
url = "https://www.furaffinity.net/browse/"
|
||||
else
|
||||
url = "https://www.furaffinity.net/browse/#{@page_number}"
|
||||
url = "https://www.furaffinity.net/browse/#{@page_number}/"
|
||||
end
|
||||
|
||||
response = http_client.get(url, caused_by_entry: @first_browse_page_entry || @caused_by_entry)
|
||||
response = http_client.get(url, caused_by_entry: causing_log_entry)
|
||||
log_entry = response.log_entry
|
||||
@first_browse_page_entry ||= log_entry
|
||||
self.first_log_entry ||= log_entry
|
||||
|
||||
if response.status_code != 200
|
||||
fatal_error("non 200 response for /browse: #{response.status_code.to_s.underline}")
|
||||
fatal_error(
|
||||
"non 200 response for /browse: #{response.status_code.to_s.underline}",
|
||||
)
|
||||
end
|
||||
page = Domain::Fa::Parser::Page.new(response.body)
|
||||
listing_page_stats = update_and_enqueue_posts_from_listings_page(
|
||||
:browse_page, page, log_entry,
|
||||
enqueue_posts_pri: :high,
|
||||
page_desc: "Browse@#{@page_number}",
|
||||
)
|
||||
|
||||
if listing_page_stats.total_seen == 0
|
||||
fatal_error("0 posts on browse page - log entry #{log_entry.id.to_s.bold}")
|
||||
end
|
||||
page = Domain::Fa::Parser::Page.new(response.body)
|
||||
listing_page_stats =
|
||||
update_and_enqueue_posts_from_listings_page(
|
||||
:browse_page,
|
||||
page,
|
||||
enqueue_posts_pri: :high,
|
||||
page_desc: "Browse@#{@page_number}",
|
||||
fill_id_gaps: true,
|
||||
)
|
||||
|
||||
@total_num_new_posts_seen += listing_page_stats.new_seen
|
||||
@total_num_posts_seen += listing_page_stats.total_seen
|
||||
|
||||
@@ -1,167 +0,0 @@
|
||||
class Domain::Fa::Job::FaJobBase < Scraper::JobBase
|
||||
discard_on ActiveJob::DeserializationError
|
||||
|
||||
def self.build_http_client
|
||||
@@fa_base_http_client ||= begin
|
||||
Scraper::FaHttpClient.new
|
||||
end
|
||||
end
|
||||
|
||||
protected
|
||||
|
||||
def http_client
|
||||
@http_client ||= self.class.build_http_client
|
||||
end
|
||||
|
||||
def find_or_intitialize_user_from_args(args, caused_by_entry: nil)
|
||||
args[:user] || begin
|
||||
url_name = args[:url_name]&.downcase
|
||||
fatal_error("arg 'url_name' is required if arg 'user' is nil") if url_name.blank?
|
||||
Domain::Fa::User.find_or_create_by(url_name: url_name) do |user|
|
||||
user.state_detail ||= {}
|
||||
user.state_detail["first_seen_entry"] = caused_by_entry.id if caused_by_entry
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
ListingsPageScanStats = Struct.new(
|
||||
:new_seen,
|
||||
:total_seen,
|
||||
:last_was_new,
|
||||
)
|
||||
|
||||
def update_and_enqueue_posts_from_listings_page(
|
||||
job_type,
|
||||
page,
|
||||
caused_by_entry,
|
||||
enqueue_posts_pri:,
|
||||
enqueue_page_scan: true,
|
||||
enqueue_gallery_scan: true,
|
||||
page_desc: nil
|
||||
)
|
||||
fatal_error("not a listings page") unless page.probably_listings_page?
|
||||
submissions = page.submissions_parsed
|
||||
|
||||
page_desc = if page_desc
|
||||
"page #{page_desc.to_s.bold}"
|
||||
else
|
||||
"page"
|
||||
end
|
||||
|
||||
listing_page_stats = ListingsPageScanStats.new(0, 0, false)
|
||||
submissions.each do |submission|
|
||||
post = Domain::Fa::Post.find_or_initialize_by(fa_id: submission.id)
|
||||
listing_page_stats.last_was_new = post.new_record?
|
||||
listing_page_stats.new_seen += 1 if post.new_record?
|
||||
listing_page_stats.total_seen += 1
|
||||
|
||||
update_post_from_listings_page(job_type, post, submission, caused_by_entry)
|
||||
enqueue_user_scan(
|
||||
post.creator,
|
||||
caused_by_entry,
|
||||
enqueue_page_scan: enqueue_page_scan,
|
||||
enqueue_gallery_scan: enqueue_gallery_scan,
|
||||
) if post.creator
|
||||
|
||||
case post.state.to_sym
|
||||
when :ok
|
||||
enqueue_post_scan(post, caused_by_entry, enqueue_posts_pri)
|
||||
when :removed
|
||||
logger.info "(todo) removed post seen in listing page, enqueue scan for fa_id #{post.fa_id}"
|
||||
when :scan_error
|
||||
logger.info "(todo) scan_error'd post seen in listing page for fa_id #{post.fa_id}"
|
||||
when :file_error
|
||||
logger.info "(todo) file_error'd post seen in listing page for fa_id #{post.fa_id}"
|
||||
else
|
||||
logger.info "unknown post state `#{post.state}` for fa_id #{post.fa_id}"
|
||||
end
|
||||
end
|
||||
|
||||
logger.info "#{page_desc} has #{submissions.count.to_s.bold} posts, " +
|
||||
"#{listing_page_stats.new_seen.to_s.bold} new"
|
||||
|
||||
listing_page_stats
|
||||
end
|
||||
|
||||
def update_post_from_listings_page(job_type, post, submission, caused_by_entry)
|
||||
if job_type == :browse_page
|
||||
post.log_entry_detail["first_browse_page_id"] ||= caused_by_entry.id
|
||||
elsif job_type == :gallery_page
|
||||
post.log_entry_detail["first_gallery_page_id"] ||= caused_by_entry.id
|
||||
else
|
||||
fatal_error("unhandled job_type: #{job_type}")
|
||||
end
|
||||
|
||||
post.creator ||= Domain::Fa::User.find_or_build_from_submission_parser(submission)
|
||||
post.title = submission.title || fatal_error("blank title")
|
||||
post.thumbnail_uri = submission.thumb_path || fatal_error("blank thumb_path")
|
||||
post.save!
|
||||
end
|
||||
|
||||
def enqueue_user_scan(
|
||||
user,
|
||||
caused_by_entry,
|
||||
enqueue_page_scan: true,
|
||||
enqueue_gallery_scan: true
|
||||
)
|
||||
@users_enqueued_for_page_scan ||= Set.new
|
||||
@users_enqueued_for_gallery_scan ||= Set.new
|
||||
|
||||
args = if user.persisted?
|
||||
{ user: user }
|
||||
else
|
||||
unless user.url_name
|
||||
logger.warn "user does not have a url name and is not persisted, skipping (#{user.name})"
|
||||
return
|
||||
end
|
||||
|
||||
{ url_name: user.url_name }
|
||||
end.merge({ caused_by_entry: caused_by_entry })
|
||||
|
||||
if enqueue_page_scan && @users_enqueued_for_page_scan.add?(user.url_name)
|
||||
if user.due_for_page_scan?
|
||||
logger.info(
|
||||
"enqueue user page job for #{user.url_name.bold}, " +
|
||||
"last scanned #{time_ago_in_words(user.scanned_page_at)}"
|
||||
)
|
||||
enqueue_job(Domain::Fa::Job::UserPageJob, args)
|
||||
end
|
||||
end
|
||||
|
||||
if enqueue_gallery_scan && @users_enqueued_for_gallery_scan.add?(user.url_name)
|
||||
if user.due_for_gallery_scan?
|
||||
logger.info(
|
||||
"enqueue user gallery job for #{user.url_name.bold}, " +
|
||||
"last scanned #{time_ago_in_words(user.scanned_gallery_at)}"
|
||||
)
|
||||
enqueue_job(Domain::Fa::Job::UserGalleryJob, args)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def enqueue_post_scan(post, caused_by_entry, enqueue_pri)
|
||||
enqueue_pri = case enqueue_pri
|
||||
when :low then -5
|
||||
when :high then -15
|
||||
else -10
|
||||
end
|
||||
|
||||
@posts_enqueued_for_scan ||= Set.new
|
||||
if @posts_enqueued_for_scan.add?(post.fa_id)
|
||||
fa_id_str = (post.fa_id || "(nil)").to_s.bold
|
||||
if !post.scanned?
|
||||
logger.info "enqueue post scan for fa_id #{fa_id_str}"
|
||||
enqueue_job(Domain::Fa::Job::ScanPostJob, {
|
||||
post: post,
|
||||
caused_by_entry: caused_by_entry,
|
||||
}, { priority: enqueue_pri })
|
||||
elsif !post.have_file?
|
||||
logger.info "enqueue file scan for fa_id #{fa_id_str}"
|
||||
enqueue_job(Domain::Fa::Job::ScanFileJob, {
|
||||
post: post,
|
||||
caused_by_entry: caused_by_entry,
|
||||
}, { priority: enqueue_pri })
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
205
app/jobs/domain/fa/job/favs_job.rb
Normal file
@@ -0,0 +1,205 @@
|
||||
# typed: strict
|
||||
class Domain::Fa::Job::FavsJob < Domain::Fa::Job::Base
|
||||
include HasBulkEnqueueJobs
|
||||
queue_as :fa_user_favs
|
||||
|
||||
USERS_PER_FULL_PAGE = T.let(Rails.env.test? ? 9 : 190, Integer)
|
||||
|
||||
sig { params(args: T.untyped).void }
|
||||
def initialize(*args)
|
||||
super(*T.unsafe(args))
|
||||
@seen_post_ids = T.let(Set.new, T::Set[Integer])
|
||||
@page_id = T.let(nil, T.nilable(String))
|
||||
@page_number = T.let(0, Integer)
|
||||
@total_items_seen = T.let(0, Integer)
|
||||
@first_job_entry = T.let(nil, T.nilable(HttpLogEntry))
|
||||
@full_scan = T.let(false, T::Boolean)
|
||||
@force_scan = T.let(false, T::Boolean)
|
||||
@last_page_post_ids = T.let(Set.new, T::Set[Integer])
|
||||
@use_http_cache = T.let(false, T::Boolean)
|
||||
end
|
||||
|
||||
sig { override.params(args: T::Hash[Symbol, T.untyped]).void }
|
||||
def perform(args)
|
||||
@first_job_entry = nil
|
||||
user = init_from_args!(args, build_user: false)
|
||||
@full_scan = !!args[:full_scan]
|
||||
@use_http_cache = !!args[:use_http_cache]
|
||||
user ||
|
||||
begin
|
||||
defer_job(Domain::Fa::Job::UserPageJob, { url_name: args[:url_name] })
|
||||
fatal_error("user does not exist: #{args}")
|
||||
end
|
||||
user = T.must(user)
|
||||
|
||||
logger.prefix = "[#{user.url_name&.bold} / #{user.state&.bold}]"
|
||||
return unless user_due_for_scan?(:favs)
|
||||
|
||||
max_page_number =
|
||||
T.let([((user.num_favorites || 0) + 1) / 48, 100].max, Integer)
|
||||
logger.info "[max page number] [#{max_page_number.to_s.bold}]"
|
||||
|
||||
existing_faved_ids =
|
||||
T.let(
|
||||
Set.new(user.fav_post_joins.active.pluck(:post_id)),
|
||||
T::Set[Integer],
|
||||
)
|
||||
|
||||
to_add = T.let(Set.new, T::Set[Integer])
|
||||
|
||||
while true
|
||||
ret = scan_page(user: user)
|
||||
break if ret == :break
|
||||
return if ret == :stop
|
||||
|
||||
if !@full_scan
|
||||
new_favs = @last_page_post_ids - existing_faved_ids
|
||||
if new_favs.empty?
|
||||
user.scanned_favs_at = Time.zone.now
|
||||
|
||||
to_add += @seen_post_ids - existing_faved_ids
|
||||
logger.info "[partial scan] [add #{to_add.size.to_s.bold}] [remove none]"
|
||||
ReduxApplicationRecord.transaction do
|
||||
to_add.each_slice(1000) do |slice|
|
||||
user.fav_post_joins.upsert_all(
|
||||
slice.map { |id| { post_id: id, removed: false } },
|
||||
unique_by: :index_domain_fa_favs_on_user_id_and_post_id,
|
||||
update_only: [:removed],
|
||||
)
|
||||
end
|
||||
user.save!
|
||||
end
|
||||
logger.info "[reached end of unobserved favs] [stopping scan]"
|
||||
return
|
||||
end
|
||||
end
|
||||
|
||||
break if @page_number > max_page_number
|
||||
@page_number += 1
|
||||
end
|
||||
|
||||
to_remove = existing_faved_ids - @seen_post_ids
|
||||
to_add = @seen_post_ids - existing_faved_ids
|
||||
logger.info "[calc change favs] [add #{to_add.size.to_s.bold}] [remove #{to_remove.size.to_s.bold}]"
|
||||
|
||||
ReduxApplicationRecord.transaction do
|
||||
if to_remove.any?
|
||||
user
|
||||
.fav_post_joins
|
||||
.active
|
||||
.where(post_id: to_remove)
|
||||
.update_all(removed: true)
|
||||
end
|
||||
|
||||
if to_add.any?
|
||||
to_add.each_slice(1000) do |slice|
|
||||
user.fav_post_joins.upsert_all(
|
||||
slice.map { |id| { post_id: id, removed: false } },
|
||||
unique_by: :index_domain_fa_favs_on_user_id_and_post_id,
|
||||
update_only: [:removed],
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
user.scanned_favs_at = Time.zone.now
|
||||
user.save!
|
||||
end
|
||||
logger.info "[updated favs list] [posts: #{user.fav_post_joins.count.to_s.bold}]"
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
sig { params(user: Domain::Fa::User).returns(T.nilable(Symbol)) }
|
||||
def scan_page(user:)
|
||||
ret = nil
|
||||
|
||||
url =
|
||||
if @page_id
|
||||
"https://www.furaffinity.net/favorites/#{user.url_name}/#{@page_id}/next"
|
||||
else
|
||||
"https://www.furaffinity.net/favorites/#{user.url_name}/"
|
||||
end
|
||||
response =
|
||||
http_client.get(
|
||||
url,
|
||||
caused_by_entry: causing_log_entry,
|
||||
use_http_cache: @use_http_cache,
|
||||
)
|
||||
self.first_log_entry ||= response.log_entry
|
||||
if response.status_code != 200
|
||||
fatal_error(
|
||||
"http #{response.status_code.to_s.red.bold}, " +
|
||||
"log entry #{response.log_entry.id.to_s.bold}",
|
||||
)
|
||||
end
|
||||
|
||||
if Domain::Fa::Job::ScanUserUtils.user_disabled_or_not_found?(
|
||||
user,
|
||||
response,
|
||||
)
|
||||
logger.error("account disabled / not found, abort")
|
||||
return :stop
|
||||
end
|
||||
|
||||
page = Domain::Fa::Parser::Page.new(response.body)
|
||||
fatal_error("not a favs listing page") unless page.probably_listings_page?
|
||||
submissions = page.submissions_parsed
|
||||
@page_id = page.favorites_next_button_id
|
||||
ret = :break if @page_id.nil?
|
||||
@total_items_seen += submissions.length
|
||||
|
||||
posts_to_create_hashes = []
|
||||
existing_fa_id_to_post_id =
|
||||
Domain::Fa::Post
|
||||
.where(fa_id: submissions.map(&:id))
|
||||
.pluck(:fa_id, :id)
|
||||
.to_h
|
||||
|
||||
posts_to_create_hashes =
|
||||
submissions
|
||||
.reject { |submission| existing_fa_id_to_post_id[submission.id] }
|
||||
.map do |submission|
|
||||
Domain::Fa::Post.hash_from_submission_parser_helper(
|
||||
submission,
|
||||
first_seen_log_entry: response.log_entry,
|
||||
)
|
||||
end
|
||||
|
||||
created_post_ids = []
|
||||
created_post_ids =
|
||||
Domain::Fa::Post
|
||||
.insert_all!(posts_to_create_hashes, returning: %i[id fa_id])
|
||||
.map { |row| row["id"] } unless posts_to_create_hashes.empty?
|
||||
|
||||
enqueue_new_post_scan_jobs(
|
||||
posts_to_create_hashes.map { |hash| hash[:fa_id] },
|
||||
)
|
||||
|
||||
@last_page_post_ids = Set.new
|
||||
created_post_ids.each do |id|
|
||||
@seen_post_ids.add(id)
|
||||
@last_page_post_ids.add(id)
|
||||
end
|
||||
existing_fa_id_to_post_id.values.each do |id|
|
||||
@seen_post_ids.add(id)
|
||||
@last_page_post_ids.add(id)
|
||||
end
|
||||
|
||||
logger.info [
|
||||
"[page #{@page_number.to_s.bold}]",
|
||||
"[posts: #{submissions.length.to_s.bold}]",
|
||||
"[created: #{posts_to_create_hashes.size.to_s.bold}]",
|
||||
].join(" ")
|
||||
|
||||
ret
|
||||
end
|
||||
|
||||
sig { params(fa_ids: T::Array[Integer]).void }
|
||||
def enqueue_new_post_scan_jobs(fa_ids)
|
||||
bulk_enqueue_jobs do
|
||||
fa_ids.each do |fa_id|
|
||||
defer_job(Domain::Fa::Job::ScanPostJob, { fa_id: fa_id })
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||