Compare commits
322 Commits
fa94d90474
...
dymk--perc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aad67622fc | ||
|
|
32b9d606e7 | ||
|
|
a88382d54d | ||
|
|
c9d967fd74 | ||
|
|
70fb486cff | ||
|
|
87e1d50ae2 | ||
|
|
59a0f8a349 | ||
|
|
259ace9862 | ||
|
|
67de25a2c2 | ||
|
|
fdffd40277 | ||
|
|
6e4cb797fb | ||
|
|
f969ceb371 | ||
|
|
6b395d63d4 | ||
|
|
b080ac896f | ||
|
|
04661a8505 | ||
|
|
111a22ff8a | ||
|
|
24e6d0cf66 | ||
|
|
c0ddef96f0 | ||
|
|
720a2ab1b8 | ||
|
|
1a84b885f2 | ||
|
|
e49fe33dc6 | ||
|
|
ac50c47865 | ||
|
|
df9c42656c | ||
|
|
23ff88e595 | ||
|
|
db67ba23bc | ||
|
|
3bf1cb13ef | ||
|
|
e1e2f1d472 | ||
|
|
f87c75186f | ||
|
|
0dabfa42e5 | ||
|
|
7843f0faa5 | ||
|
|
f5f05c9267 | ||
|
|
ad3d564d58 | ||
|
|
7437586dda | ||
|
|
74bafc027a | ||
|
|
06fc36c4db | ||
|
|
ed525ee142 | ||
|
|
ec7cd52a76 | ||
|
|
0223a8ef1c | ||
|
|
b16b2009b0 | ||
|
|
bfbbf5d7d4 | ||
|
|
8c2593b414 | ||
|
|
41a8dab3d3 | ||
|
|
79159b2d31 | ||
|
|
1647ba574c | ||
|
|
97ab826f14 | ||
|
|
c7047ef8aa | ||
|
|
4dbdb68514 | ||
|
|
41324f019f | ||
|
|
eb5ecb956d | ||
|
|
c555c043a9 | ||
|
|
ccd5404a10 | ||
|
|
2faa485a35 | ||
|
|
3ea8dbfe83 | ||
|
|
1801d475e7 | ||
|
|
a2813ca125 | ||
|
|
b470d1a669 | ||
|
|
2e1922c68f | ||
|
|
8fb884c92c | ||
|
|
2700ef0f99 | ||
|
|
36bd296c1a | ||
|
|
50d875982a | ||
|
|
fe0711c7d9 | ||
|
|
eeb1511e52 | ||
|
|
18d304842e | ||
|
|
93b0de6073 | ||
|
|
784682bb44 | ||
|
|
4a1858f057 | ||
|
|
32e927dcce | ||
|
|
27253ff50b | ||
|
|
cfb8d6e714 | ||
|
|
ab52ad7ebf | ||
|
|
c1b3887c58 | ||
|
|
e375570a0f | ||
|
|
a31aabaab2 | ||
|
|
8c86c02ffc | ||
|
|
1133837ed0 | ||
|
|
cf506b735a | ||
|
|
049f83660c | ||
|
|
fb9e36f527 | ||
|
|
1f7a45cea2 | ||
|
|
aef521ea7e | ||
|
|
13c2d3cbed | ||
|
|
ff579c1a30 | ||
|
|
6c253818ff | ||
|
|
c2cbe78fd1 | ||
|
|
512119ebb4 | ||
|
|
af15c6feff | ||
|
|
cf5feb366a | ||
|
|
1761c89dc5 | ||
|
|
9a462713b6 | ||
|
|
4bb0eae722 | ||
|
|
35ba1db97e | ||
|
|
aea94c98cd | ||
|
|
428cb0a491 | ||
|
|
b01f54cc4f | ||
|
|
acbdf72e8e | ||
|
|
fc8e74d2fb | ||
|
|
bcd845759e | ||
|
|
c4f0a73cfd | ||
|
|
507e6ee715 | ||
|
|
5c14d26f5f | ||
|
|
4d5784b630 | ||
|
|
8f81468fc0 | ||
|
|
6c33c35a12 | ||
|
|
de4874c886 | ||
|
|
dc6965ab7b | ||
|
|
49fd8ccd48 | ||
|
|
6f8afdd2a6 | ||
|
|
2d4f672b6a | ||
|
|
0700adaa55 | ||
|
|
557258ff9f | ||
|
|
64efbee162 | ||
|
|
828f52fe81 | ||
|
|
73ff4ee472 | ||
|
|
f14c73a152 | ||
|
|
2789cf2c7f | ||
|
|
3f56df3af3 | ||
|
|
80ee303503 | ||
|
|
f5748cd005 | ||
|
|
f0502f500d | ||
|
|
4d6c67b5a1 | ||
|
|
fcf98c8067 | ||
|
|
9f0f6877d9 | ||
|
|
d6afdf424b | ||
|
|
4af584fffd | ||
|
|
ed299a404d | ||
|
|
48337c08bc | ||
|
|
a9d639b66d | ||
|
|
e931897c6c | ||
|
|
3a878deeec | ||
|
|
e89dca1fa4 | ||
|
|
1243a2f1f5 | ||
|
|
17cd07bb91 | ||
|
|
69ea16daf6 | ||
|
|
2d68b7bc15 | ||
|
|
077b7b9876 | ||
|
|
8e9e720695 | ||
|
|
a9bccb00e2 | ||
|
|
fa235a2310 | ||
|
|
f1c91f1119 | ||
|
|
1f3fa0074e | ||
|
|
37e269321f | ||
|
|
999e67db35 | ||
|
|
60d7e2920a | ||
|
|
c226eb20ed | ||
|
|
4b09b926a0 | ||
|
|
97dff5abf9 | ||
|
|
44778f6541 | ||
|
|
c9858ee354 | ||
|
|
28ab0cc023 | ||
|
|
fbc3a53c25 | ||
|
|
eb5a6d3190 | ||
|
|
af119ed683 | ||
|
|
b639ec2618 | ||
|
|
cdf064bfdf | ||
|
|
a60284c0d4 | ||
|
|
56fa72619a | ||
|
|
efccf79f64 | ||
|
|
e1b3fa4401 | ||
|
|
9f67a525b7 | ||
|
|
50af3d90d8 | ||
|
|
e7a584bc57 | ||
|
|
b1d06df6d2 | ||
|
|
6d5f494c64 | ||
|
|
3c41cd5b7d | ||
|
|
6b4e11e907 | ||
|
|
c70240b143 | ||
|
|
9c7a83eb4e | ||
|
|
22af93ada7 | ||
|
|
8d4f30ba43 | ||
|
|
9349a5466c | ||
|
|
ced01f1b9e | ||
|
|
2ce6dc7b96 | ||
|
|
6922c07b8c | ||
|
|
985c2c2347 | ||
|
|
c63e1b8cb2 | ||
|
|
8ac13f0602 | ||
|
|
2e36e08828 | ||
|
|
16fab739b5 | ||
|
|
8051c86bb4 | ||
|
|
3eb9be47bc | ||
|
|
2ee31f4e74 | ||
|
|
9e58ee067b | ||
|
|
1b59b44435 | ||
|
|
955a3021ae | ||
|
|
b97b82b1d8 | ||
|
|
4a31bd99e8 | ||
|
|
ca22face6c | ||
|
|
4f880fd419 | ||
|
|
7ee7b57965 | ||
|
|
ebfea0ab7c | ||
|
|
6436fe8fa6 | ||
|
|
9a3742abf1 | ||
|
|
0a980259dc | ||
|
|
fea167459d | ||
|
|
2a5e236a7f | ||
|
|
1fa22351d5 | ||
|
|
01f8d0b962 | ||
|
|
85dec62850 | ||
|
|
3ab0fa4fa3 | ||
|
|
5b94a0a7de | ||
|
|
2e0c2fdf51 | ||
|
|
ea5a2a7d6c | ||
|
|
d358cdbd7f | ||
|
|
bd0fad859e | ||
|
|
0e744bbdbe | ||
|
|
531cd1bb43 | ||
|
|
552532a95c | ||
|
|
ad78d41f06 | ||
|
|
93e389855a | ||
|
|
6ec902a859 | ||
|
|
fb78c3a27d | ||
|
|
6620633f22 | ||
|
|
3f5b0eadc6 | ||
|
|
657713192b | ||
|
|
173a4f2c78 | ||
|
|
401a730226 | ||
|
|
5988152835 | ||
|
|
7e33f70f19 | ||
|
|
b8cadb9855 | ||
|
|
8751ce4856 | ||
|
|
0977ac4343 | ||
|
|
09f1db712d | ||
|
|
3263e8aca8 | ||
|
|
03804c8cf1 | ||
|
|
031b8f965d | ||
|
|
7276ef6cbd | ||
|
|
fab12a4fe4 | ||
|
|
7229900eaa | ||
|
|
5ad6e89889 | ||
|
|
1cddb94af6 | ||
|
|
4f4c7fabc7 | ||
|
|
d16b613f33 | ||
|
|
3ae55422e0 | ||
|
|
3a9478e0f4 | ||
|
|
c424b7dacd | ||
|
|
ff8e539579 | ||
|
|
2833dc806f | ||
|
|
9423a50bc3 | ||
|
|
67c28cb8d2 | ||
|
|
5b508060ff | ||
|
|
c7a2a3481a | ||
|
|
df712f65db | ||
|
|
c34faef0dc | ||
|
|
37ad4b2ea8 | ||
|
|
17d2a87089 | ||
|
|
99ee3aaa91 | ||
|
|
c3d8c7afa7 | ||
|
|
d7f3cd4074 | ||
|
|
dbbe6788e8 | ||
|
|
aa1eaef5fd | ||
|
|
bb1e760d2e | ||
|
|
254367eb62 | ||
|
|
cc1fb9847f | ||
|
|
32fe41ff04 | ||
|
|
3f0d845472 | ||
|
|
7758927865 | ||
|
|
158fb9b478 | ||
|
|
75503e2a99 | ||
|
|
dc4c1b1df9 | ||
|
|
b8163f9e77 | ||
|
|
5505e7089e | ||
|
|
84866c0f6a | ||
|
|
df43a77fe2 | ||
|
|
15fc61c0d0 | ||
|
|
fde45e9704 | ||
|
|
3e62f9949c | ||
|
|
e99daf4b59 | ||
|
|
35aa025778 | ||
|
|
ab13af43af | ||
|
|
e57b0f4fc9 | ||
|
|
db4ea55b28 | ||
|
|
230bd5757d | ||
|
|
f317aa273e | ||
|
|
18fff3bc07 | ||
|
|
ca33644f84 | ||
|
|
3dc43530f8 | ||
|
|
f1e40a405f | ||
|
|
57083dc74c | ||
|
|
5c1318d768 | ||
|
|
71f54ae5e7 | ||
|
|
d19255a2c9 | ||
|
|
6b4c3c2294 | ||
|
|
4c774faafd | ||
|
|
450a5844eb | ||
|
|
0d4511cbcf | ||
|
|
a0d52575f3 | ||
|
|
9468e570d9 | ||
|
|
c2997f4d5f | ||
|
|
96b0804a0f | ||
|
|
9d5f1138d3 | ||
|
|
1a912103f1 | ||
|
|
6d2eff0849 | ||
|
|
369c79f8df | ||
|
|
8d85f7ebe1 | ||
|
|
a413b31a2c | ||
|
|
effb21b7cc | ||
|
|
3e6e1bf20b | ||
|
|
ff9aa66a4c | ||
|
|
d4dfa7309c | ||
|
|
c587aabbbe | ||
|
|
c63f1dffcb | ||
|
|
3c45545eab | ||
|
|
13c9ff0e8c | ||
|
|
db4c244196 | ||
|
|
67181ce78a | ||
|
|
798b2e43cb | ||
|
|
43848c3dd4 | ||
|
|
ff017290ec | ||
|
|
fcf635f96c | ||
|
|
b9673e9585 | ||
|
|
8ce85c6ef0 | ||
|
|
4a8f4f241b | ||
|
|
31d78ad0b9 | ||
|
|
b1a5496f09 | ||
|
|
18545bbfd8 | ||
|
|
24e52357be | ||
|
|
29cdb1669c | ||
|
|
2941b6a91d | ||
|
|
5a34130044 | ||
|
|
edc4940ba2 | ||
|
|
c2e3ce669e |
7
.cursorignore
Normal file
@@ -0,0 +1,7 @@
|
||||
# Add directories or file patterns to ignore during indexing (e.g. foo/ or *.csv)
|
||||
config/database.yml
|
||||
config/cookies/fa.yml
|
||||
tmp
|
||||
log
|
||||
public
|
||||
.bundle
|
||||
103
.devcontainer/Dockerfile.devcontainer
Normal file
@@ -0,0 +1,103 @@
|
||||
FROM ruby:3.2.0 AS native-gems
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; \
|
||||
echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends --no-install-suggests -y \
|
||||
cmake
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
RUN gem install bundler -v '2.4.5'
|
||||
COPY gems gems
|
||||
WORKDIR /usr/src/app/gems/xdiff-rb
|
||||
RUN bundle install
|
||||
RUN rake compile
|
||||
WORKDIR /usr/src/app/gems/rb-bsdiff
|
||||
RUN bundle install
|
||||
RUN rake compile
|
||||
|
||||
# Primary image
|
||||
FROM mcr.microsoft.com/devcontainers/ruby:1-3.2-bookworm
|
||||
|
||||
# apt caching & install packages
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; \
|
||||
echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends --no-install-suggests -y \
|
||||
autoconf \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gnupg \
|
||||
iputils-ping \
|
||||
libblas-dev \
|
||||
libdb-dev \
|
||||
libffi-dev \
|
||||
libgdbm-dev \
|
||||
libgdbm6 \
|
||||
libgmp-dev \
|
||||
liblapack-dev \
|
||||
libncurses5-dev \
|
||||
libpq-dev \
|
||||
libreadline6-dev \
|
||||
libssl-dev \
|
||||
libvips42 \
|
||||
libyaml-dev \
|
||||
patch \
|
||||
uuid-dev \
|
||||
zlib1g-dev \
|
||||
watchman \
|
||||
ffmpeg \
|
||||
ffmpegthumbnailer \
|
||||
abiword \
|
||||
pdftohtml \
|
||||
libreoffice
|
||||
|
||||
# Install postgres 15 client
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
sudo install -d /usr/share/postgresql-common/pgdg && \
|
||||
curl -o /usr/share/postgresql-common/pgdg/apt.postgresql.org.asc --fail https://www.postgresql.org/media/keys/ACCC4CF8.asc && \
|
||||
sh -c 'echo "deb [signed-by=/usr/share/postgresql-common/pgdg/apt.postgresql.org.asc] https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' && \
|
||||
apt update && \
|
||||
apt-get install --no-install-recommends --no-install-suggests -y \
|
||||
postgresql-client-15
|
||||
|
||||
# Install & configure delta diff tool
|
||||
RUN wget -O- https://github.com/dandavison/delta/releases/download/0.18.2/git-delta_0.18.2_amd64.deb > /tmp/git-delta.deb && \
|
||||
sudo dpkg -i /tmp/git-delta.deb && \
|
||||
rm /tmp/git-delta.deb
|
||||
|
||||
RUN git config --system core.pager "delta" && \
|
||||
git config --system interactive.diffFilter "delta --color-only" && \
|
||||
git config --system delta.navigate true && \
|
||||
git config --system delta.dark true && \
|
||||
git config --system delta.side-by-side true && \
|
||||
git config --system merge.conflictstyle "zdiff3" && \
|
||||
git config --system core.editor "cursor --wait"
|
||||
|
||||
# Install native gems
|
||||
COPY --from=native-gems /usr/src/app/gems/xdiff-rb /gems/xdiff-rb
|
||||
COPY --from=native-gems /usr/src/app/gems/rb-bsdiff /gems/rb-bsdiff
|
||||
|
||||
ENV RAILS_ENV development
|
||||
|
||||
# [Optional] Uncomment this line to install additional gems.
|
||||
RUN su vscode -c "gem install bundler -v '2.5.6'" && \
|
||||
su vscode -c "gem install rake -v '13.0.6'" && \
|
||||
su vscode -c "gem install ruby-lsp -v '0.22.1'"
|
||||
|
||||
# install exo
|
||||
RUN su vscode -c "curl -sL https://exo.deref.io/install | bash"
|
||||
ENV PATH "/home/vscode/.exo/bin:$PATH"
|
||||
|
||||
# install just (command runner)
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://just.systems/install.sh | bash -s -- --to /usr/local/bin
|
||||
|
||||
RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && nvm install 18 && nvm use 18 && npm install -g yarn" 2>&1
|
||||
ENV PATH /usr/local/share/nvm/current/bin:$PATH
|
||||
8
.devcontainer/Dockerfile.postgres
Normal file
@@ -0,0 +1,8 @@
|
||||
FROM postgres:15
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
postgresql-15-pgvector \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY create-tablespaces.bash /docker-entrypoint-initdb.d/00-create-tablespaces.bash
|
||||
RUN echo "CREATE EXTENSION pgvector;" >> /docker-entrypoint-initdb.d/01-pgvector.sql
|
||||
5
.devcontainer/create-db-user.sql
Normal file
@@ -0,0 +1,5 @@
|
||||
CREATE USER vscode CREATEDB;
|
||||
CREATE DATABASE vscode WITH OWNER vscode;
|
||||
|
||||
CREATE DATABASE redux_test WITH OWNER vscode;
|
||||
CREATE DATABASE legacy_test WITH OWNER vscode;
|
||||
9
.devcontainer/create-tablespaces.bash
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/bin/bash -ex
|
||||
|
||||
mkdir -p /tablespaces/mirai
|
||||
chown postgres:postgres /tablespaces/mirai
|
||||
chmod 750 /tablespaces/mirai
|
||||
psql -v ON_ERROR_STOP=1 \
|
||||
--username "$POSTGRES_USER" \
|
||||
--dbname "$POSTGRES_DB" \
|
||||
-c "CREATE TABLESPACE mirai LOCATION '/tablespaces/mirai'"
|
||||
29
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,29 @@
|
||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
||||
// README at: https://github.com/devcontainers/templates/tree/main/src/ruby-rails-postgres
|
||||
{
|
||||
"name": "Ruby on Rails & Postgres",
|
||||
"dockerComposeFile": "docker-compose.yml",
|
||||
"service": "app",
|
||||
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
|
||||
"features": {
|
||||
"ghcr.io/meaningful-ooo/devcontainer-features/fish:1": {},
|
||||
"ghcr.io/nikobockerman/devcontainer-features/fish-persistent-data:2": {}
|
||||
},
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// This can be used to network with other containers or the host.
|
||||
// "forwardPorts": [3000, 5432],
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
// "postCreateCommand": "bundle install && rake db:setup",
|
||||
"postCreateCommand": ".devcontainer/post-create.sh",
|
||||
"forwardPorts": [
|
||||
3000, // rails development
|
||||
3001, // rails staging
|
||||
9394, // prometheus exporter
|
||||
"pgadmin:8080", // pgadmin
|
||||
"grafana:3100", // grafana
|
||||
"prometheus:9090" // prometheus
|
||||
]
|
||||
// Configure tool-specific properties.
|
||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||
// "remoteUser": "root"
|
||||
}
|
||||
74
.devcontainer/docker-compose.yml
Normal file
@@ -0,0 +1,74 @@
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
app:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: .devcontainer/Dockerfile.devcontainer
|
||||
volumes:
|
||||
- ../..:/workspaces:cached
|
||||
- ./fish-shell-conf-d:/home/vscode/.config/fish/conf.d
|
||||
- devcontainer-redux-gem-cache:/usr/local/rvm/gems
|
||||
- devcontainer-redux-blob-files:/mnt/blob_files_development
|
||||
- /tank/redux-data/blob_files_production:/mnt/blob_files_production
|
||||
environment:
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT: http://otel-collector:4318
|
||||
OTEL_SERVICE_NAME: redux-scraper-dev
|
||||
OTEL_RESOURCE_ATTRIBUTES: application=redux-scraper-dev
|
||||
command: sleep infinity
|
||||
|
||||
db:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.postgres
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- postgres-data:/var/lib/postgresql/data
|
||||
- postgres-data-tablespaces:/tablespaces
|
||||
- ./create-db-user.sql:/docker-entrypoint-initdb.d/create-db-user.sql
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
||||
pgadmin:
|
||||
image: dpage/pgadmin4:9
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
PGADMIN_DEFAULT_EMAIL: admin@example.com
|
||||
PGADMIN_DEFAULT_PASSWORD: password
|
||||
PGADMIN_LISTEN_PORT: 8080
|
||||
PGADMIN_CONFIG_SERVER_MODE: 'False'
|
||||
PGADMIN_CONFIG_MASTER_PASSWORD_REQUIRED: 'False'
|
||||
|
||||
prometheus:
|
||||
image: prom/prometheus
|
||||
command:
|
||||
- '--config.file=/etc/prometheus/prometheus.yml'
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ./prometheus:/etc/prometheus
|
||||
- devcontainer-redux-prometheus-data:/prometheus
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- GF_SERVER_HTTP_PORT=3100
|
||||
- GF_USERS_ALLOW_SIGN_UP=false
|
||||
- GF_LOG_LEVEL=debug
|
||||
- GF_SERVER_ROOT_URL=http://localhost:3100/grafana/
|
||||
- GF_SERVER_SERVE_FROM_SUB_PATH=false
|
||||
- GF_AUTH_PROXY_ENABLED=true
|
||||
- GF_AUTH_PROXY_HEADER_NAME=X-WEBAUTH-USER
|
||||
- GF_AUTH_PROXY_HEADER_PROPERTY=username
|
||||
volumes:
|
||||
- devcontainer-redux-grafana-data:/var/lib/grafana
|
||||
|
||||
volumes:
|
||||
postgres-data:
|
||||
postgres-data-tablespaces:
|
||||
devcontainer-redux-gem-cache:
|
||||
devcontainer-redux-blob-files:
|
||||
devcontainer-redux-grafana-data:
|
||||
devcontainer-redux-prometheus-data:
|
||||
1
.devcontainer/fish-shell-conf-d/just-completions.fish
Executable file
@@ -0,0 +1 @@
|
||||
complete -f -c just -a (just --summary)
|
||||
17
.devcontainer/fish-shell-conf-d/prompt.fish
Executable file
@@ -0,0 +1,17 @@
|
||||
function fish_prompt -d "Write out the prompt"
|
||||
# This shows up as USER@HOST /home/user/ >, with the directory colored
|
||||
# $USER and $hostname are set by fish, so you can just use them
|
||||
# instead of using `whoami` and `hostname`
|
||||
printf '%s %s%s> ' \
|
||||
(printf '%s%s%s' (set_color -d grey) $USER (set_color normal)) \
|
||||
(printf '%s%s%s' (set_color $fish_color_cwd) (prompt_pwd) (set_color normal)) \
|
||||
(fish_git_prompt)
|
||||
end
|
||||
|
||||
function fish_right_prompt -d "Write out the right prompt"
|
||||
set_color red
|
||||
if [ $RAILS_ENV = "development" ]
|
||||
set_color -d green
|
||||
end
|
||||
printf '%s%s' (echo $RAILS_ENV) (set_color normal)
|
||||
end
|
||||
1
.devcontainer/fish-shell-conf-d/rustup.fish
Normal file
@@ -0,0 +1 @@
|
||||
source "$HOME/.cargo/env.fish"
|
||||
27
.devcontainer/fish-shell-conf-d/utils.fish
Executable file
@@ -0,0 +1,27 @@
|
||||
function blob-files-dir
|
||||
if [ $RAILS_ENV = "production" ]
|
||||
echo "/mnt/blob_files_production/v1"
|
||||
return 0
|
||||
else if [ $RAILS_ENV = "development" ]
|
||||
echo "/mnt/blob_files_development/v1"
|
||||
return 0
|
||||
else
|
||||
echo "unknown RAILS_ENV: $RAILS_ENV" >&2
|
||||
return 1
|
||||
end
|
||||
end
|
||||
|
||||
function blob-file-path
|
||||
set -l file_name $argv[1]
|
||||
set -l prefix (blob-files-dir || return 1)
|
||||
set -l p0 (string sub -l 2 $file_name)
|
||||
set -l p1 (string sub -s 3 -l 2 $file_name)
|
||||
set -l p2 (string sub -s 5 -l 1 $file_name)
|
||||
printf "%s/%s/%s/%s/%s" $prefix $p0 $p1 $p2 $file_name
|
||||
end
|
||||
|
||||
function blob-files-stats
|
||||
set -l files_dir (blob-files-dir || return 1)
|
||||
printf "apparent size: %s\n" (du -sh --apparent-size $files_dir)
|
||||
printf "actual size: %s\n" (du -sh $files_dir)
|
||||
end
|
||||
22
.devcontainer/install-extensions.sh
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash -ex
|
||||
|
||||
# EDITOR_BIN="$(ls /vscode/cursor-server/bin/*/*/bin/remote-cli/cursor | head -n 1)"
|
||||
|
||||
# detect if either cursor or code is available, and use the first detected one
|
||||
EDITOR_BIN=cursor
|
||||
# EDITOR_BIN=code
|
||||
|
||||
function install_extension() {
|
||||
$EDITOR_BIN --install-extension "$1"
|
||||
}
|
||||
|
||||
install_extension Shopify.ruby-extensions-pack
|
||||
install_extension dbaeumer.vscode-eslint
|
||||
install_extension aliariff.vscode-erb-beautify
|
||||
install_extension bradlc.vscode-tailwindcss
|
||||
install_extension KoichiSasada.vscode-rdbg
|
||||
install_extension qwtel.sqlite-viewer
|
||||
install_extension esbenp.prettier-vscode
|
||||
install_extension ms-azuretools.vscode-docker
|
||||
install_extension 1YiB.rust-bundle
|
||||
install_extension rust-lang.rust-analyzer
|
||||
22
.devcontainer/post-create.sh
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash -ex
|
||||
|
||||
function mkdir_and_chmod {
|
||||
sudo mkdir -p $1
|
||||
sudo chmod 777 $1
|
||||
}
|
||||
|
||||
mkdir_and_chmod .devcontainer/data/prometheus
|
||||
mkdir_and_chmod .devcontainer/data/grafana
|
||||
echo "Path: $PATH"
|
||||
echo "Ruby: $(which ruby)"
|
||||
echo "Gem: $(which gem)"
|
||||
echo "Bundler: $(which bundler)"
|
||||
echo "Rake: $(which rake)"
|
||||
|
||||
bundle install --jobs $(getconf _NPROCESSORS_ONLN)
|
||||
rbenv rehash
|
||||
|
||||
bin/rails yarn:install
|
||||
yarn
|
||||
|
||||
yarn add --dev prettier @prettier/plugin-ruby
|
||||
12
.devcontainer/prometheus/prometheus.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
global:
|
||||
scrape_interval: 15s
|
||||
evaluation_interval: 15s
|
||||
|
||||
scrape_configs:
|
||||
- job_name: 'prometheus'
|
||||
static_configs:
|
||||
- targets: ['prometheus:9090']
|
||||
|
||||
- job_name: 'rails'
|
||||
static_configs:
|
||||
- targets: ['app:9394']
|
||||
15
.dockerignore
Normal file
@@ -0,0 +1,15 @@
|
||||
.git
|
||||
.gitignore
|
||||
log
|
||||
tmp
|
||||
ext
|
||||
build
|
||||
node_modules
|
||||
profiler
|
||||
.bundle
|
||||
.vscode
|
||||
launch.json
|
||||
settings.json
|
||||
*.export
|
||||
.devcontainer
|
||||
user_scripts/dist
|
||||
15
.env-cmdrc
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"dev": {
|
||||
"RAILS_ENV": "development",
|
||||
"FOO_BAR": "baz"
|
||||
},
|
||||
"staging": {
|
||||
"RAILS_ENV": "staging"
|
||||
},
|
||||
"production": {
|
||||
"RAILS_ENV": "production"
|
||||
},
|
||||
"worker": {
|
||||
"RAILS_ENV": "worker"
|
||||
}
|
||||
}
|
||||
12
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for more information:
|
||||
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
# https://containers.dev/guide/dependabot
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "devcontainers"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: weekly
|
||||
28
.gitignore
vendored
@@ -4,6 +4,21 @@
|
||||
# or operating system, you probably want to add a global ignore instead:
|
||||
# git config --global core.excludesfile '~/.gitignore_global'
|
||||
|
||||
build
|
||||
tmp
|
||||
core
|
||||
*.bundle
|
||||
lib/xdiff
|
||||
ext/xdiff/Makefile
|
||||
ext/xdiff/xdiff
|
||||
user_scripts/dist
|
||||
migrated_files.txt
|
||||
|
||||
# use yarn to manage node_modules
|
||||
package-lock.json
|
||||
|
||||
*.notes.md
|
||||
|
||||
# Ignore bundler config.
|
||||
/.bundle
|
||||
|
||||
@@ -35,4 +50,15 @@
|
||||
/config/master.key
|
||||
|
||||
/profiler/
|
||||
/flamegraph.svg
|
||||
/flamegraph.svg
|
||||
/app/assets/builds/*
|
||||
!/app/assets/builds/.keep
|
||||
|
||||
/public/packs
|
||||
/public/packs-test
|
||||
/node_modules
|
||||
/yarn-error.log
|
||||
yarn-debug.log*
|
||||
.yarn-integrity
|
||||
.DS_Store
|
||||
*.export
|
||||
|
||||
15
.prettierrc
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"trailingComma": "all",
|
||||
"arrowParens": "always",
|
||||
"singleQuote": true,
|
||||
"plugins": [
|
||||
"prettier-plugin-tailwindcss",
|
||||
"@prettier/plugin-ruby",
|
||||
"@prettier/plugin-xml",
|
||||
"@4az/prettier-plugin-html-erb"
|
||||
],
|
||||
"xmlQuoteAttributes": "double",
|
||||
"xmlWhitespaceSensitivity": "ignore"
|
||||
}
|
||||
32
.rubocop.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
# The behavior of RuboCop can be controlled via the .rubocop.yml
|
||||
# configuration file. It makes it possible to enable/disable
|
||||
# certain cops (checks) and to alter their behavior if they accept
|
||||
# any parameters. The file can be placed either in your home
|
||||
# directory or in some project directory.
|
||||
#
|
||||
# RuboCop will start looking for the configuration file in the directory
|
||||
# where the inspected file is and continue its way up to the root directory.
|
||||
#
|
||||
# See https://docs.rubocop.org/rubocop/configuration
|
||||
|
||||
inherit_mode:
|
||||
merge:
|
||||
- Exclude
|
||||
|
||||
AllCops:
|
||||
NewCops: disable
|
||||
|
||||
Metrics/MethodLength:
|
||||
Enabled: false
|
||||
|
||||
Metrics/ClassLength:
|
||||
Enabled: false
|
||||
|
||||
Metrics/BlockLength:
|
||||
Enabled: false
|
||||
|
||||
Style/Documentation:
|
||||
Enabled: false
|
||||
|
||||
Metrics/AbcSize:
|
||||
Enabled: false
|
||||
@@ -1 +1 @@
|
||||
3.2.0
|
||||
system
|
||||
11
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "rdbg",
|
||||
"name": "Attach rdbg",
|
||||
"request": "attach",
|
||||
"rdbgPath": "export GEM_HOME=/usr/local/rvm/gems/default && bundle exec rdbg"
|
||||
}
|
||||
]
|
||||
}
|
||||
59
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
{
|
||||
"editor.formatOnSave": true,
|
||||
"workbench.editor.titleScrollbarSizing": "large",
|
||||
"window.title": "${activeEditorMedium}${separator}${rootName}${separator}${profileName}",
|
||||
"workbench.preferredDarkColorTheme": "Spinel",
|
||||
"workbench.preferredLightColorTheme": "Spinel Light",
|
||||
"rubyLsp.formatter": "syntax_tree",
|
||||
"files.associations": {
|
||||
".env-cmdrc": "json"
|
||||
},
|
||||
"[ruby]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[erb]": {
|
||||
"editor.defaultFormatter": "aliariff.vscode-erb-beautify"
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[typescriptreact]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[javascriptreact]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[xml]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[dockerfile]": {
|
||||
"editor.defaultFormatter": "ms-azuretools.vscode-docker"
|
||||
},
|
||||
"tailwindCSS.includeLanguages": {
|
||||
"erb": "html",
|
||||
"typescript": "javascript"
|
||||
},
|
||||
"tailwindCSS.experimental.classRegex": [
|
||||
"\\bclass:\\s*'([^']*)'",
|
||||
"\\bclass:\\s*\"([^\"]*)\"",
|
||||
"[\"'`]([^\"'`]*).*?,?\\s?"
|
||||
],
|
||||
"editor.quickSuggestions": {
|
||||
"other": "on",
|
||||
"comments": "off",
|
||||
"strings": "on"
|
||||
},
|
||||
"tailwindCSS.experimental.configFile": "config/tailwind.config.js",
|
||||
"sqliteViewer.maxFileSize": 4000,
|
||||
"files.insertFinalNewline": true,
|
||||
"files.trimTrailingWhitespace": true
|
||||
}
|
||||
94
Dockerfile
Normal file
@@ -0,0 +1,94 @@
|
||||
FROM ruby:3.2.6 AS native-gems
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; \
|
||||
echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends --no-install-suggests -y \
|
||||
cmake
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
RUN gem install bundler -v '2.5.6'
|
||||
COPY gems gems
|
||||
WORKDIR /usr/src/app/gems/xdiff-rb
|
||||
RUN bundle _2.5.6_ install
|
||||
RUN rake compile
|
||||
WORKDIR /usr/src/app/gems/rb-bsdiff
|
||||
RUN bundle _2.5.6_ install
|
||||
RUN rake compile
|
||||
|
||||
# Primary image
|
||||
FROM ruby:3.2.6
|
||||
USER root
|
||||
|
||||
# apt caching & install packages
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; \
|
||||
echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends --no-install-suggests -y \
|
||||
libblas-dev liblapack-dev
|
||||
|
||||
# preinstall gems that take a long time to install
|
||||
RUN MAKE="make -j12" gem install bundler -v '2.5.6' --verbose
|
||||
RUN MAKE="make -j12" gem install rice -v '4.3.3' --verbose
|
||||
RUN MAKE="make -j12" gem install faiss -v '0.3.2' --verbose
|
||||
RUN MAKE="make -j12" gem install rails_live_reload -v '0.3.6' --verbose
|
||||
RUN bundle config --global frozen 1
|
||||
|
||||
# set up nodejs 18.x deb repo
|
||||
RUN mkdir -p /etc/apt/keyrings && \
|
||||
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key \
|
||||
| gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && \
|
||||
echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_18.x nodistro main" \
|
||||
| tee /etc/apt/sources.list.d/nodesource.list
|
||||
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends --no-install-suggests -y \
|
||||
libvips42 \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gnupg \
|
||||
nodejs \
|
||||
libpq-dev \
|
||||
ffmpeg \
|
||||
ffmpegthumbnailer \
|
||||
abiword \
|
||||
pdftohtml \
|
||||
libreoffice
|
||||
|
||||
COPY --from=native-gems /usr/src/app/gems/xdiff-rb /gems/xdiff-rb
|
||||
COPY --from=native-gems /usr/src/app/gems/rb-bsdiff /gems/rb-bsdiff
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
COPY Gemfile Gemfile.lock ./
|
||||
RUN bundle _2.5.6_ install
|
||||
|
||||
# install js dependencies
|
||||
COPY package.json yarn.lock ./
|
||||
RUN npm install -g yarn
|
||||
RUN rails yarn:install
|
||||
RUN yarn
|
||||
|
||||
COPY . .
|
||||
|
||||
# precompile assets
|
||||
RUN RAILS_ENV=production bin/rails assets:precompile
|
||||
RUN mkdir -p tmp/pids
|
||||
|
||||
# build user scripts
|
||||
RUN yarn build:user-scripts
|
||||
|
||||
# create user with id=1000 gid=1000
|
||||
RUN groupadd -g 1000 app && \
|
||||
useradd -m -d /home/app -s /bin/bash -u 1000 -g 1000 app
|
||||
RUN chown -R app:app /usr/src/app
|
||||
USER app
|
||||
CMD /bin/bash
|
||||
151
Gemfile
@@ -1,27 +1,28 @@
|
||||
source "https://rubygems.org"
|
||||
git_source(:github) { |repo| "https://github.com/#{repo}.git" }
|
||||
|
||||
ruby "3.2.0"
|
||||
ruby "3.2.6"
|
||||
# ruby "3.0.3"
|
||||
|
||||
# Bundle edge Rails instead: gem "rails", github: "rails/rails", branch: "main"
|
||||
gem "rails", "~> 7.0.4", ">= 7.0.4.2"
|
||||
gem "rails", "~> 7.2"
|
||||
|
||||
# The original asset pipeline for Rails [https://github.com/rails/sprockets-rails]
|
||||
gem "sprockets-rails"
|
||||
|
||||
# Use sqlite3 as the database for Active Record
|
||||
gem "sqlite3", "~> 1.4"
|
||||
gem "pg"
|
||||
gem "sqlite3", "~> 1.4"
|
||||
|
||||
gem "pry"
|
||||
gem "pry-stack_explorer"
|
||||
|
||||
# Use the Puma web server [https://github.com/puma/puma]
|
||||
gem "puma", "~> 5.0"
|
||||
gem "thruster"
|
||||
|
||||
# Use JavaScript with ESM import maps [https://github.com/rails/importmap-rails]
|
||||
gem "importmap-rails"
|
||||
# # Use JavaScript with ESM import maps [https://github.com/rails/importmap-rails]
|
||||
# gem "importmap-rails"
|
||||
|
||||
# Hotwire's SPA-like page accelerator [https://turbo.hotwired.dev]
|
||||
gem "turbo-rails"
|
||||
@@ -30,7 +31,7 @@ gem "turbo-rails"
|
||||
gem "stimulus-rails"
|
||||
|
||||
# Build JSON APIs with ease [https://github.com/rails/jbuilder]
|
||||
gem "jbuilder"
|
||||
gem "jbuilder", "~> 2.13"
|
||||
|
||||
# Use Redis adapter to run Action Cable in production
|
||||
# gem "redis", "~> 4.0"
|
||||
@@ -42,7 +43,7 @@ gem "jbuilder"
|
||||
# gem "bcrypt", "~> 3.1.7"
|
||||
|
||||
# Windows does not include zoneinfo files, so bundle the tzinfo-data gem
|
||||
gem "tzinfo-data", platforms: %i[ mingw mswin x64_mingw jruby ]
|
||||
gem "tzinfo-data", platforms: %i[mingw mswin x64_mingw jruby]
|
||||
|
||||
# Reduces boot times through caching; required in config/boot.rb
|
||||
gem "bootsnap", require: false
|
||||
@@ -53,60 +54,132 @@ gem "bootsnap", require: false
|
||||
# Use Active Storage variants [https://guides.rubyonrails.org/active_storage_overview.html#transforming-images]
|
||||
# gem "image_processing", "~> 1.2"
|
||||
|
||||
group :development, :test do
|
||||
group :development, :test, :staging do
|
||||
# See https://guides.rubyonrails.org/debugging_rails_applications.html#debugging-with-the-debug-gem
|
||||
gem "debug", platforms: %i[ mri mingw x64_mingw ]
|
||||
gem "debug", "~> 1.10", platforms: %i[mri mingw x64_mingw]
|
||||
end
|
||||
|
||||
group :development do
|
||||
group :development, :staging do
|
||||
# Use console on exceptions pages [https://github.com/rails/web-console]
|
||||
gem "htmlbeautifier"
|
||||
gem "rufo", require: false
|
||||
gem "web-console"
|
||||
|
||||
# Add speed badges [https://github.com/MiniProfiler/rack-mini-profiler]
|
||||
gem "rack-mini-profiler", require: ["enable_rails_patches", "rack-mini-profiler"]
|
||||
# Speed up commands on slow machines / big apps [https://github.com/rails/spring]
|
||||
# gem "spring"
|
||||
end
|
||||
|
||||
group :log_watcher do
|
||||
gem "curses"
|
||||
gem "listen"
|
||||
gem "concurrent-ruby-ext", require: "concurrent"
|
||||
gem "concurrent-ruby-edge", require: "concurrent-edge"
|
||||
# Add speed badges [https://github.com/MiniProfiler/rack-mini-profiler]
|
||||
gem "memory_profiler"
|
||||
gem "rack-mini-profiler",
|
||||
"~> 3.3",
|
||||
require: %w[enable_rails_patches rack-mini-profiler]
|
||||
gem "stackprof"
|
||||
|
||||
# temporary ref of rails_live_reload to a commit that adds ignore patterns
|
||||
# to the Listen gem
|
||||
gem "rails_live_reload",
|
||||
git: "https://github.com/railsjazz/rails_live_reload",
|
||||
ref: "dcd3b73904594e2c5134c2f6e05954f3937a8d29"
|
||||
# git: "https://github.com/mktakuya/rails_live_reload",
|
||||
# ref: "95d7ac7c03e8c702066ed3dc9cd70a965412e2d2"
|
||||
# gem "rails_live_reload", "0.4.0"
|
||||
end
|
||||
|
||||
group :test do
|
||||
# Use system testing [https://guides.rubyonrails.org/testing.html#system-testing]
|
||||
gem "capybara"
|
||||
gem "rspec-rails", "~> 7.0"
|
||||
gem "rails-controller-testing"
|
||||
gem "selenium-webdriver"
|
||||
gem "webdrivers"
|
||||
gem "rspec-rails"
|
||||
gem "shoulda-matchers"
|
||||
gem "factory_bot_rails"
|
||||
gem "pundit-matchers", "~> 4.0"
|
||||
gem "db-query-matchers", "~> 0.14"
|
||||
end
|
||||
|
||||
gem "xdiff", path: "../xdiff-rb"
|
||||
group :test, :development do
|
||||
gem "parallel_tests"
|
||||
end
|
||||
|
||||
gem "xdiff", path: "/gems/xdiff-rb"
|
||||
|
||||
# for legacy import
|
||||
gem "diffy"
|
||||
gem "rb-bsdiff", path: "../rb-bsdiff"
|
||||
gem "rb-bsdiff", path: "/gems/rb-bsdiff"
|
||||
|
||||
gem "addressable"
|
||||
gem "colorize"
|
||||
gem "concurrent-ruby-edge", require: "concurrent-edge"
|
||||
gem "concurrent-ruby-ext", require: "concurrent"
|
||||
gem "curb"
|
||||
gem "daemons"
|
||||
gem "discard"
|
||||
gem "good_job", "~> 4.6"
|
||||
gem "http-cookie"
|
||||
gem "http", "~> 5.2" # For proxying requests
|
||||
gem "kaminari"
|
||||
gem "nokogiri"
|
||||
gem "pluck_each"
|
||||
gem "ripcord"
|
||||
gem "ruby-prof"
|
||||
gem "ruby-prof-speedscope"
|
||||
gem "ruby-vips"
|
||||
gem "dhash-vips"
|
||||
gem "ffmpeg", git: "https://github.com/instructure/ruby-ffmpeg", tag: "v6.1.2"
|
||||
gem "table_print"
|
||||
gem "addressable"
|
||||
gem "nokogiri"
|
||||
gem "http-cookie"
|
||||
gem "curb"
|
||||
gem "kaminari"
|
||||
gem "delayed_job_active_record"
|
||||
# gem "delayed-web"
|
||||
gem "delayed_job_web"
|
||||
gem "colorize"
|
||||
gem "daemons"
|
||||
gem "delayed_job_worker_pool"
|
||||
gem "ripcord"
|
||||
gem "influxdb-client"
|
||||
gem "discard"
|
||||
# gem 'cli-ui'
|
||||
# gem "paper_trail"
|
||||
# gem "paper_trail-hashdiff"
|
||||
# gem "hashdiff"
|
||||
gem "zstd-ruby"
|
||||
gem "rouge"
|
||||
gem "docx"
|
||||
gem "ruby-bbcode"
|
||||
gem "dtext_rb",
|
||||
git: "https://github.com/e621ng/dtext_rb",
|
||||
ref: "5ef8fd7a5205c832f4c18197911717e7d491494e"
|
||||
|
||||
# gem "pghero", git: "https://github.com/dymk/pghero", ref: "e314f99"
|
||||
gem "pghero", "~> 3.6"
|
||||
gem "pg_query", ">= 2"
|
||||
|
||||
gem "disco"
|
||||
gem "faiss"
|
||||
gem "neighbor"
|
||||
gem "progressbar"
|
||||
gem "attr_json"
|
||||
|
||||
group :production, :staging do
|
||||
gem "rails_semantic_logger", "~> 4.17"
|
||||
end
|
||||
|
||||
group :production do
|
||||
gem "sd_notify"
|
||||
gem "cloudflare-rails"
|
||||
end
|
||||
|
||||
gem "rack", "~> 2.2"
|
||||
gem "rack-cors"
|
||||
gem "react_on_rails"
|
||||
gem "sanitize", "~> 6.1"
|
||||
gem "shakapacker", "~> 6.6"
|
||||
gem "timeout"
|
||||
|
||||
group :development do
|
||||
gem "prettier_print"
|
||||
gem "syntax_tree", "~> 6.2"
|
||||
end
|
||||
|
||||
gem "cssbundling-rails", "~> 1.4"
|
||||
gem "tailwindcss-rails", "~> 3.0"
|
||||
|
||||
# Authentication
|
||||
gem "devise", "~> 4.9"
|
||||
|
||||
# Authorization
|
||||
gem "pundit", "~> 2.4"
|
||||
|
||||
# Monitoring
|
||||
gem "prometheus_exporter", "~> 2.2"
|
||||
|
||||
gem "sorbet-static-and-runtime"
|
||||
gem "tapioca", require: false
|
||||
gem "rspec-sorbet", group: [:test]
|
||||
gem "sorbet-struct-comparable"
|
||||
|
||||
745
Gemfile.lock
@@ -1,148 +1,244 @@
|
||||
GIT
|
||||
remote: https://github.com/e621ng/dtext_rb
|
||||
revision: 5ef8fd7a5205c832f4c18197911717e7d491494e
|
||||
ref: 5ef8fd7a5205c832f4c18197911717e7d491494e
|
||||
specs:
|
||||
dtext_rb (1.11.0)
|
||||
|
||||
GIT
|
||||
remote: https://github.com/instructure/ruby-ffmpeg
|
||||
revision: a3404b8fa275e2eb9549f074906461b0266a70ea
|
||||
tag: v6.1.2
|
||||
specs:
|
||||
ffmpeg (6.1.2)
|
||||
multi_json (~> 1.8)
|
||||
|
||||
GIT
|
||||
remote: https://github.com/railsjazz/rails_live_reload
|
||||
revision: dcd3b73904594e2c5134c2f6e05954f3937a8d29
|
||||
ref: dcd3b73904594e2c5134c2f6e05954f3937a8d29
|
||||
specs:
|
||||
rails_live_reload (0.3.6)
|
||||
listen
|
||||
nio4r
|
||||
railties
|
||||
websocket-driver
|
||||
|
||||
PATH
|
||||
remote: ../rb-bsdiff
|
||||
remote: /gems/rb-bsdiff
|
||||
specs:
|
||||
rb-bsdiff (0.1.0)
|
||||
|
||||
PATH
|
||||
remote: ../xdiff-rb
|
||||
remote: /gems/xdiff-rb
|
||||
specs:
|
||||
xdiff (0.0.1)
|
||||
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
actioncable (7.0.4.2)
|
||||
actionpack (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
actioncable (7.2.2.1)
|
||||
actionpack (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
nio4r (~> 2.0)
|
||||
websocket-driver (>= 0.6.1)
|
||||
actionmailbox (7.0.4.2)
|
||||
actionpack (= 7.0.4.2)
|
||||
activejob (= 7.0.4.2)
|
||||
activerecord (= 7.0.4.2)
|
||||
activestorage (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
mail (>= 2.7.1)
|
||||
net-imap
|
||||
net-pop
|
||||
net-smtp
|
||||
actionmailer (7.0.4.2)
|
||||
actionpack (= 7.0.4.2)
|
||||
actionview (= 7.0.4.2)
|
||||
activejob (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
mail (~> 2.5, >= 2.5.4)
|
||||
net-imap
|
||||
net-pop
|
||||
net-smtp
|
||||
rails-dom-testing (~> 2.0)
|
||||
actionpack (7.0.4.2)
|
||||
actionview (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
rack (~> 2.0, >= 2.2.0)
|
||||
zeitwerk (~> 2.6)
|
||||
actionmailbox (7.2.2.1)
|
||||
actionpack (= 7.2.2.1)
|
||||
activejob (= 7.2.2.1)
|
||||
activerecord (= 7.2.2.1)
|
||||
activestorage (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
mail (>= 2.8.0)
|
||||
actionmailer (7.2.2.1)
|
||||
actionpack (= 7.2.2.1)
|
||||
actionview (= 7.2.2.1)
|
||||
activejob (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
mail (>= 2.8.0)
|
||||
rails-dom-testing (~> 2.2)
|
||||
actionpack (7.2.2.1)
|
||||
actionview (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
nokogiri (>= 1.8.5)
|
||||
racc
|
||||
rack (>= 2.2.4, < 3.2)
|
||||
rack-session (>= 1.0.1)
|
||||
rack-test (>= 0.6.3)
|
||||
rails-dom-testing (~> 2.0)
|
||||
rails-html-sanitizer (~> 1.0, >= 1.2.0)
|
||||
actiontext (7.0.4.2)
|
||||
actionpack (= 7.0.4.2)
|
||||
activerecord (= 7.0.4.2)
|
||||
activestorage (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
rails-dom-testing (~> 2.2)
|
||||
rails-html-sanitizer (~> 1.6)
|
||||
useragent (~> 0.16)
|
||||
actiontext (7.2.2.1)
|
||||
actionpack (= 7.2.2.1)
|
||||
activerecord (= 7.2.2.1)
|
||||
activestorage (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
globalid (>= 0.6.0)
|
||||
nokogiri (>= 1.8.5)
|
||||
actionview (7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
actionview (7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
builder (~> 3.1)
|
||||
erubi (~> 1.4)
|
||||
rails-dom-testing (~> 2.0)
|
||||
rails-html-sanitizer (~> 1.1, >= 1.2.0)
|
||||
activejob (7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
erubi (~> 1.11)
|
||||
rails-dom-testing (~> 2.2)
|
||||
rails-html-sanitizer (~> 1.6)
|
||||
activejob (7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
globalid (>= 0.3.6)
|
||||
activemodel (7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
activerecord (7.0.4.2)
|
||||
activemodel (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
activestorage (7.0.4.2)
|
||||
actionpack (= 7.0.4.2)
|
||||
activejob (= 7.0.4.2)
|
||||
activerecord (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
activemodel (7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
activerecord (7.2.2.1)
|
||||
activemodel (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
timeout (>= 0.4.0)
|
||||
activestorage (7.2.2.1)
|
||||
actionpack (= 7.2.2.1)
|
||||
activejob (= 7.2.2.1)
|
||||
activerecord (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
marcel (~> 1.0)
|
||||
mini_mime (>= 1.1.0)
|
||||
activesupport (7.0.4.2)
|
||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
||||
activesupport (7.2.2.1)
|
||||
base64
|
||||
benchmark (>= 0.3)
|
||||
bigdecimal
|
||||
concurrent-ruby (~> 1.0, >= 1.3.1)
|
||||
connection_pool (>= 2.2.5)
|
||||
drb
|
||||
i18n (>= 1.6, < 2)
|
||||
logger (>= 1.4.2)
|
||||
minitest (>= 5.1)
|
||||
tzinfo (~> 2.0)
|
||||
addressable (2.8.1)
|
||||
public_suffix (>= 2.0.2, < 6.0)
|
||||
securerandom (>= 0.3)
|
||||
tzinfo (~> 2.0, >= 2.0.5)
|
||||
addressable (2.8.7)
|
||||
public_suffix (>= 2.0.2, < 7.0)
|
||||
attr_json (2.5.0)
|
||||
activerecord (>= 6.0.0, < 8.1)
|
||||
base64 (0.2.0)
|
||||
bcrypt (3.1.20)
|
||||
benchmark (0.4.0)
|
||||
bigdecimal (3.1.9)
|
||||
bindex (0.8.1)
|
||||
binding_of_caller (1.0.0)
|
||||
debug_inspector (>= 0.0.1)
|
||||
bootsnap (1.16.0)
|
||||
binding_of_caller (1.0.1)
|
||||
debug_inspector (>= 1.2.0)
|
||||
bootsnap (1.18.4)
|
||||
msgpack (~> 1.2)
|
||||
builder (3.2.4)
|
||||
capybara (3.38.0)
|
||||
builder (3.3.0)
|
||||
capybara (3.40.0)
|
||||
addressable
|
||||
matrix
|
||||
mini_mime (>= 0.1.3)
|
||||
nokogiri (~> 1.8)
|
||||
nokogiri (~> 1.11)
|
||||
rack (>= 1.6.0)
|
||||
rack-test (>= 0.6.3)
|
||||
regexp_parser (>= 1.5, < 3.0)
|
||||
xpath (~> 3.2)
|
||||
cloudflare-rails (6.2.0)
|
||||
actionpack (>= 7.1.0, < 8.1.0)
|
||||
activesupport (>= 7.1.0, < 8.1.0)
|
||||
railties (>= 7.1.0, < 8.1.0)
|
||||
zeitwerk (>= 2.5.0)
|
||||
coderay (1.1.3)
|
||||
colorize (0.8.1)
|
||||
concurrent-ruby (1.2.0)
|
||||
concurrent-ruby-edge (0.7.0)
|
||||
concurrent-ruby (~> 1.2.0)
|
||||
concurrent-ruby-ext (1.2.0)
|
||||
concurrent-ruby (= 1.2.0)
|
||||
colorize (1.1.0)
|
||||
concurrent-ruby (1.3.4)
|
||||
concurrent-ruby-edge (0.7.1)
|
||||
concurrent-ruby (~> 1.3)
|
||||
concurrent-ruby-ext (1.3.4)
|
||||
concurrent-ruby (= 1.3.4)
|
||||
connection_pool (2.4.1)
|
||||
crass (1.0.6)
|
||||
curb (1.0.5)
|
||||
curses (1.4.4)
|
||||
daemons (1.4.1)
|
||||
date (3.3.3)
|
||||
debug (1.7.1)
|
||||
irb (>= 1.5.0)
|
||||
reline (>= 0.3.1)
|
||||
debug_inspector (1.1.0)
|
||||
delayed_job (4.1.11)
|
||||
activesupport (>= 3.0, < 8.0)
|
||||
delayed_job_active_record (4.1.7)
|
||||
activerecord (>= 3.0, < 8.0)
|
||||
delayed_job (>= 3.0, < 5)
|
||||
delayed_job_web (1.4.4)
|
||||
activerecord (> 3.0.0)
|
||||
delayed_job (> 2.0.3)
|
||||
rack-protection (>= 1.5.5)
|
||||
sinatra (>= 1.4.4)
|
||||
delayed_job_worker_pool (1.0.0)
|
||||
delayed_job (>= 3.0, < 4.2)
|
||||
diff-lcs (1.5.0)
|
||||
diffy (3.4.2)
|
||||
discard (1.2.1)
|
||||
activerecord (>= 4.2, < 8)
|
||||
domain_name (0.5.20190701)
|
||||
unf (>= 0.0.5, < 1.0.0)
|
||||
erubi (1.12.0)
|
||||
ffi (1.15.5)
|
||||
globalid (1.1.0)
|
||||
activesupport (>= 5.0)
|
||||
http-cookie (1.0.5)
|
||||
domain_name (~> 0.5)
|
||||
i18n (1.12.0)
|
||||
concurrent-ruby (~> 1.0)
|
||||
importmap-rails (1.1.5)
|
||||
actionpack (>= 6.0.0)
|
||||
cssbundling-rails (1.4.1)
|
||||
railties (>= 6.0.0)
|
||||
influxdb-client (2.9.0)
|
||||
io-console (0.6.0)
|
||||
irb (1.6.2)
|
||||
reline (>= 0.3.0)
|
||||
jbuilder (2.11.5)
|
||||
curb (1.0.6)
|
||||
daemons (1.4.1)
|
||||
date (3.4.1)
|
||||
db-query-matchers (0.14.0)
|
||||
activesupport (>= 4.0, < 8.1)
|
||||
rspec (>= 3.0)
|
||||
debug (1.10.0)
|
||||
irb (~> 1.10)
|
||||
reline (>= 0.3.8)
|
||||
debug_inspector (1.2.0)
|
||||
devise (4.9.4)
|
||||
bcrypt (~> 3.0)
|
||||
orm_adapter (~> 0.1)
|
||||
railties (>= 4.1.0)
|
||||
responders
|
||||
warden (~> 1.2.3)
|
||||
dhash-vips (0.2.3.0)
|
||||
ruby-vips (~> 2.0, != 2.1.1, != 2.1.0)
|
||||
diff-lcs (1.5.1)
|
||||
diffy (3.4.3)
|
||||
discard (1.4.0)
|
||||
activerecord (>= 4.2, < 9.0)
|
||||
disco (0.5.1)
|
||||
libmf (>= 0.4)
|
||||
numo-narray (>= 0.9.2)
|
||||
docx (0.8.0)
|
||||
nokogiri (~> 1.13, >= 1.13.0)
|
||||
rubyzip (~> 2.0)
|
||||
domain_name (0.6.20240107)
|
||||
drb (2.2.1)
|
||||
erubi (1.13.1)
|
||||
et-orbi (1.2.11)
|
||||
tzinfo
|
||||
execjs (2.10.0)
|
||||
factory_bot (6.5.0)
|
||||
activesupport (>= 5.0.0)
|
||||
factory_bot_rails (6.4.4)
|
||||
factory_bot (~> 6.5)
|
||||
railties (>= 5.0.0)
|
||||
faiss (0.3.2)
|
||||
numo-narray
|
||||
rice (>= 4.0.2)
|
||||
ffi (1.17.1-aarch64-linux-gnu)
|
||||
ffi (1.17.1-aarch64-linux-musl)
|
||||
ffi (1.17.1-arm64-darwin)
|
||||
ffi (1.17.1-x86_64-darwin)
|
||||
ffi (1.17.1-x86_64-linux-gnu)
|
||||
ffi (1.17.1-x86_64-linux-musl)
|
||||
ffi-compiler (1.3.2)
|
||||
ffi (>= 1.15.5)
|
||||
rake
|
||||
fugit (1.11.1)
|
||||
et-orbi (~> 1, >= 1.2.11)
|
||||
raabro (~> 1.4)
|
||||
globalid (1.2.1)
|
||||
activesupport (>= 6.1)
|
||||
good_job (4.6.0)
|
||||
activejob (>= 6.1.0)
|
||||
activerecord (>= 6.1.0)
|
||||
concurrent-ruby (>= 1.3.1)
|
||||
fugit (>= 1.11.0)
|
||||
railties (>= 6.1.0)
|
||||
thor (>= 1.0.0)
|
||||
google-protobuf (4.29.2-aarch64-linux)
|
||||
bigdecimal
|
||||
rake (>= 13)
|
||||
google-protobuf (4.29.2-arm64-darwin)
|
||||
bigdecimal
|
||||
rake (>= 13)
|
||||
google-protobuf (4.29.2-x86_64-darwin)
|
||||
bigdecimal
|
||||
rake (>= 13)
|
||||
google-protobuf (4.29.2-x86_64-linux)
|
||||
bigdecimal
|
||||
rake (>= 13)
|
||||
htmlbeautifier (1.4.3)
|
||||
http (5.2.0)
|
||||
addressable (~> 2.8)
|
||||
base64 (~> 0.1)
|
||||
http-cookie (~> 1.0)
|
||||
http-form_data (~> 2.2)
|
||||
llhttp-ffi (~> 0.5.0)
|
||||
http-cookie (1.0.8)
|
||||
domain_name (~> 0.5)
|
||||
http-form_data (2.3.0)
|
||||
i18n (1.14.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
io-console (0.8.0)
|
||||
irb (1.14.3)
|
||||
rdoc (>= 4.0.0)
|
||||
reline (>= 0.4.2)
|
||||
jbuilder (2.13.0)
|
||||
actionview (>= 5.0.0)
|
||||
activesupport (>= 5.0.0)
|
||||
kaminari (1.2.2)
|
||||
@@ -157,214 +253,411 @@ GEM
|
||||
activerecord
|
||||
kaminari-core (= 1.2.2)
|
||||
kaminari-core (1.2.2)
|
||||
listen (3.8.0)
|
||||
libmf (0.4.0)
|
||||
ffi
|
||||
listen (3.9.0)
|
||||
rb-fsevent (~> 0.10, >= 0.10.3)
|
||||
rb-inotify (~> 0.9, >= 0.9.10)
|
||||
loofah (2.19.1)
|
||||
llhttp-ffi (0.5.0)
|
||||
ffi-compiler (~> 1.0)
|
||||
rake (~> 13.0)
|
||||
logger (1.6.4)
|
||||
loofah (2.23.1)
|
||||
crass (~> 1.0.2)
|
||||
nokogiri (>= 1.5.9)
|
||||
mail (2.8.0.1)
|
||||
nokogiri (>= 1.12.0)
|
||||
mail (2.8.1)
|
||||
mini_mime (>= 0.1.1)
|
||||
net-imap
|
||||
net-pop
|
||||
net-smtp
|
||||
marcel (1.0.2)
|
||||
marcel (1.0.4)
|
||||
matrix (0.4.2)
|
||||
method_source (1.0.0)
|
||||
mini_mime (1.1.2)
|
||||
mini_portile2 (2.8.1)
|
||||
minitest (5.17.0)
|
||||
msgpack (1.6.0)
|
||||
mustermann (3.0.0)
|
||||
ruby2_keywords (~> 0.0.1)
|
||||
net-imap (0.3.4)
|
||||
memory_profiler (1.1.0)
|
||||
method_source (1.1.0)
|
||||
mini_mime (1.1.5)
|
||||
minitest (5.25.4)
|
||||
msgpack (1.7.5)
|
||||
multi_json (1.15.0)
|
||||
neighbor (0.5.1)
|
||||
activerecord (>= 7)
|
||||
net-imap (0.5.4)
|
||||
date
|
||||
net-protocol
|
||||
net-pop (0.1.2)
|
||||
net-protocol
|
||||
net-protocol (0.2.1)
|
||||
net-protocol (0.2.2)
|
||||
timeout
|
||||
net-smtp (0.3.3)
|
||||
net-smtp (0.5.0)
|
||||
net-protocol
|
||||
nio4r (2.5.8)
|
||||
nokogiri (1.14.1)
|
||||
mini_portile2 (~> 2.8.0)
|
||||
netrc (0.11.0)
|
||||
nio4r (2.7.4)
|
||||
nokogiri (1.18.1-aarch64-linux-gnu)
|
||||
racc (~> 1.4)
|
||||
pg (1.4.5)
|
||||
pry (0.14.2)
|
||||
nokogiri (1.18.1-aarch64-linux-musl)
|
||||
racc (~> 1.4)
|
||||
nokogiri (1.18.1-arm64-darwin)
|
||||
racc (~> 1.4)
|
||||
nokogiri (1.18.1-x86_64-darwin)
|
||||
racc (~> 1.4)
|
||||
nokogiri (1.18.1-x86_64-linux-gnu)
|
||||
racc (~> 1.4)
|
||||
nokogiri (1.18.1-x86_64-linux-musl)
|
||||
racc (~> 1.4)
|
||||
numo-narray (0.9.2.1)
|
||||
orm_adapter (0.5.0)
|
||||
parallel (1.26.3)
|
||||
parallel_tests (4.7.2)
|
||||
parallel
|
||||
pg (1.5.9)
|
||||
pg_query (6.0.0)
|
||||
google-protobuf (>= 3.25.3)
|
||||
pghero (3.6.1)
|
||||
activerecord (>= 6.1)
|
||||
pluck_each (0.2.0)
|
||||
activerecord (> 3.2.0)
|
||||
activesupport (> 3.0.0)
|
||||
prettier_print (1.2.1)
|
||||
prism (1.3.0)
|
||||
progressbar (1.13.0)
|
||||
prometheus_exporter (2.2.0)
|
||||
webrick
|
||||
pry (0.15.2)
|
||||
coderay (~> 1.1)
|
||||
method_source (~> 1.0)
|
||||
pry-stack_explorer (0.6.1)
|
||||
binding_of_caller (~> 1.0)
|
||||
pry (~> 0.13)
|
||||
public_suffix (5.0.1)
|
||||
puma (5.6.5)
|
||||
psych (5.2.2)
|
||||
date
|
||||
stringio
|
||||
public_suffix (6.0.1)
|
||||
puma (5.6.9)
|
||||
nio4r (~> 2.0)
|
||||
racc (1.6.2)
|
||||
rack (2.2.6.2)
|
||||
rack-mini-profiler (3.0.0)
|
||||
pundit (2.4.0)
|
||||
activesupport (>= 3.0.0)
|
||||
pundit-matchers (4.0.0)
|
||||
rspec-core (~> 3.12)
|
||||
rspec-expectations (~> 3.12)
|
||||
rspec-mocks (~> 3.12)
|
||||
rspec-support (~> 3.12)
|
||||
raabro (1.4.0)
|
||||
racc (1.8.1)
|
||||
rack (2.2.10)
|
||||
rack-cors (2.0.2)
|
||||
rack (>= 2.0.0)
|
||||
rack-mini-profiler (3.3.1)
|
||||
rack (>= 1.2.0)
|
||||
rack-protection (3.0.5)
|
||||
rack-proxy (0.7.7)
|
||||
rack
|
||||
rack-test (2.0.2)
|
||||
rack-session (1.0.2)
|
||||
rack (< 3)
|
||||
rack-test (2.2.0)
|
||||
rack (>= 1.3)
|
||||
rails (7.0.4.2)
|
||||
actioncable (= 7.0.4.2)
|
||||
actionmailbox (= 7.0.4.2)
|
||||
actionmailer (= 7.0.4.2)
|
||||
actionpack (= 7.0.4.2)
|
||||
actiontext (= 7.0.4.2)
|
||||
actionview (= 7.0.4.2)
|
||||
activejob (= 7.0.4.2)
|
||||
activemodel (= 7.0.4.2)
|
||||
activerecord (= 7.0.4.2)
|
||||
activestorage (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
rackup (1.0.1)
|
||||
rack (< 3)
|
||||
webrick
|
||||
rails (7.2.2.1)
|
||||
actioncable (= 7.2.2.1)
|
||||
actionmailbox (= 7.2.2.1)
|
||||
actionmailer (= 7.2.2.1)
|
||||
actionpack (= 7.2.2.1)
|
||||
actiontext (= 7.2.2.1)
|
||||
actionview (= 7.2.2.1)
|
||||
activejob (= 7.2.2.1)
|
||||
activemodel (= 7.2.2.1)
|
||||
activerecord (= 7.2.2.1)
|
||||
activestorage (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
bundler (>= 1.15.0)
|
||||
railties (= 7.0.4.2)
|
||||
rails-dom-testing (2.0.3)
|
||||
activesupport (>= 4.2.0)
|
||||
railties (= 7.2.2.1)
|
||||
rails-controller-testing (1.0.5)
|
||||
actionpack (>= 5.0.1.rc1)
|
||||
actionview (>= 5.0.1.rc1)
|
||||
activesupport (>= 5.0.1.rc1)
|
||||
rails-dom-testing (2.2.0)
|
||||
activesupport (>= 5.0.0)
|
||||
minitest
|
||||
nokogiri (>= 1.6)
|
||||
rails-html-sanitizer (1.5.0)
|
||||
loofah (~> 2.19, >= 2.19.1)
|
||||
railties (7.0.4.2)
|
||||
actionpack (= 7.0.4.2)
|
||||
activesupport (= 7.0.4.2)
|
||||
method_source
|
||||
rails-html-sanitizer (1.6.2)
|
||||
loofah (~> 2.21)
|
||||
nokogiri (>= 1.15.7, != 1.16.7, != 1.16.6, != 1.16.5, != 1.16.4, != 1.16.3, != 1.16.2, != 1.16.1, != 1.16.0.rc1, != 1.16.0)
|
||||
rails_semantic_logger (4.17.0)
|
||||
rack
|
||||
railties (>= 5.1)
|
||||
semantic_logger (~> 4.16)
|
||||
railties (7.2.2.1)
|
||||
actionpack (= 7.2.2.1)
|
||||
activesupport (= 7.2.2.1)
|
||||
irb (~> 1.13)
|
||||
rackup (>= 1.0.0)
|
||||
rake (>= 12.2)
|
||||
thor (~> 1.0)
|
||||
zeitwerk (~> 2.5)
|
||||
rake (13.0.6)
|
||||
thor (~> 1.0, >= 1.2.2)
|
||||
zeitwerk (~> 2.6)
|
||||
rainbow (3.1.1)
|
||||
rake (13.2.1)
|
||||
rb-fsevent (0.11.2)
|
||||
rb-inotify (0.10.1)
|
||||
rb-inotify (0.11.1)
|
||||
ffi (~> 1.0)
|
||||
regexp_parser (2.6.2)
|
||||
reline (0.3.2)
|
||||
rbi (0.2.2)
|
||||
prism (~> 1.0)
|
||||
sorbet-runtime (>= 0.5.9204)
|
||||
rdoc (6.10.0)
|
||||
psych (>= 4.0.0)
|
||||
react_on_rails (14.0.5)
|
||||
addressable
|
||||
connection_pool
|
||||
execjs (~> 2.5)
|
||||
rails (>= 5.2)
|
||||
rainbow (~> 3.0)
|
||||
regexp_parser (2.10.0)
|
||||
reline (0.6.0)
|
||||
io-console (~> 0.5)
|
||||
rexml (3.2.5)
|
||||
responders (3.1.1)
|
||||
actionpack (>= 5.2)
|
||||
railties (>= 5.2)
|
||||
rexml (3.4.0)
|
||||
rice (4.3.3)
|
||||
ripcord (2.0.0)
|
||||
rspec-core (3.12.1)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-expectations (3.12.2)
|
||||
rouge (4.5.1)
|
||||
rspec (3.13.0)
|
||||
rspec-core (~> 3.13.0)
|
||||
rspec-expectations (~> 3.13.0)
|
||||
rspec-mocks (~> 3.13.0)
|
||||
rspec-core (3.13.2)
|
||||
rspec-support (~> 3.13.0)
|
||||
rspec-expectations (3.13.3)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-mocks (3.12.3)
|
||||
rspec-support (~> 3.13.0)
|
||||
rspec-mocks (3.13.2)
|
||||
diff-lcs (>= 1.2.0, < 2.0)
|
||||
rspec-support (~> 3.12.0)
|
||||
rspec-rails (6.0.1)
|
||||
actionpack (>= 6.1)
|
||||
activesupport (>= 6.1)
|
||||
railties (>= 6.1)
|
||||
rspec-core (~> 3.11)
|
||||
rspec-expectations (~> 3.11)
|
||||
rspec-mocks (~> 3.11)
|
||||
rspec-support (~> 3.11)
|
||||
rspec-support (3.12.0)
|
||||
ruby-prof (1.4.5)
|
||||
rspec-support (~> 3.13.0)
|
||||
rspec-rails (7.1.0)
|
||||
actionpack (>= 7.0)
|
||||
activesupport (>= 7.0)
|
||||
railties (>= 7.0)
|
||||
rspec-core (~> 3.13)
|
||||
rspec-expectations (~> 3.13)
|
||||
rspec-mocks (~> 3.13)
|
||||
rspec-support (~> 3.13)
|
||||
rspec-sorbet (1.9.2)
|
||||
sorbet-runtime
|
||||
rspec-support (3.13.2)
|
||||
ruby-bbcode (2.1.1)
|
||||
activesupport (>= 4.2.2)
|
||||
ruby-prof (1.7.1)
|
||||
ruby-prof-speedscope (0.3.0)
|
||||
ruby-prof (~> 1.0)
|
||||
ruby2_keywords (0.0.5)
|
||||
ruby-vips (2.2.2)
|
||||
ffi (~> 1.12)
|
||||
logger
|
||||
rubyzip (2.3.2)
|
||||
selenium-webdriver (4.8.0)
|
||||
rufo (0.18.0)
|
||||
sanitize (6.1.3)
|
||||
crass (~> 1.0.2)
|
||||
nokogiri (>= 1.12.0)
|
||||
sd_notify (0.1.1)
|
||||
securerandom (0.4.1)
|
||||
selenium-webdriver (4.10.0)
|
||||
rexml (~> 3.2, >= 3.2.5)
|
||||
rubyzip (>= 1.2.2, < 3.0)
|
||||
websocket (~> 1.0)
|
||||
sinatra (3.0.5)
|
||||
mustermann (~> 3.0)
|
||||
rack (~> 2.2, >= 2.2.4)
|
||||
rack-protection (= 3.0.5)
|
||||
tilt (~> 2.0)
|
||||
sprockets (4.2.0)
|
||||
semantic_logger (4.16.1)
|
||||
concurrent-ruby (~> 1.0)
|
||||
semantic_range (3.1.0)
|
||||
shakapacker (6.6.0)
|
||||
activesupport (>= 5.2)
|
||||
rack-proxy (>= 0.6.1)
|
||||
railties (>= 5.2)
|
||||
semantic_range (>= 2.3.0)
|
||||
shoulda-matchers (6.4.0)
|
||||
activesupport (>= 5.2.0)
|
||||
sorbet (0.5.11711)
|
||||
sorbet-static (= 0.5.11711)
|
||||
sorbet-runtime (0.5.11711)
|
||||
sorbet-static (0.5.11711-aarch64-linux)
|
||||
sorbet-static (0.5.11711-universal-darwin)
|
||||
sorbet-static (0.5.11711-x86_64-linux)
|
||||
sorbet-static-and-runtime (0.5.11711)
|
||||
sorbet (= 0.5.11711)
|
||||
sorbet-runtime (= 0.5.11711)
|
||||
sorbet-struct-comparable (1.3.0)
|
||||
sorbet-runtime (>= 0.5)
|
||||
spoom (1.5.0)
|
||||
erubi (>= 1.10.0)
|
||||
prism (>= 0.28.0)
|
||||
sorbet-static-and-runtime (>= 0.5.10187)
|
||||
thor (>= 0.19.2)
|
||||
sprockets (4.2.1)
|
||||
concurrent-ruby (~> 1.0)
|
||||
rack (>= 2.2.4, < 4)
|
||||
sprockets-rails (3.4.2)
|
||||
actionpack (>= 5.2)
|
||||
activesupport (>= 5.2)
|
||||
sprockets-rails (3.5.2)
|
||||
actionpack (>= 6.1)
|
||||
activesupport (>= 6.1)
|
||||
sprockets (>= 3.0.0)
|
||||
sqlite3 (1.6.0)
|
||||
mini_portile2 (~> 2.8.0)
|
||||
stimulus-rails (1.2.1)
|
||||
sqlite3 (1.7.3-aarch64-linux)
|
||||
sqlite3 (1.7.3-arm64-darwin)
|
||||
sqlite3 (1.7.3-x86_64-darwin)
|
||||
sqlite3 (1.7.3-x86_64-linux)
|
||||
stackprof (0.2.26)
|
||||
stimulus-rails (1.3.4)
|
||||
railties (>= 6.0.0)
|
||||
stringio (3.1.2)
|
||||
syntax_tree (6.2.0)
|
||||
prettier_print (>= 1.2.0)
|
||||
table_print (1.5.7)
|
||||
thor (1.2.1)
|
||||
tilt (2.1.0)
|
||||
timeout (0.3.1)
|
||||
turbo-rails (1.3.3)
|
||||
tailwindcss-rails (3.1.0)
|
||||
railties (>= 7.0.0)
|
||||
tailwindcss-ruby
|
||||
tailwindcss-ruby (3.4.17-aarch64-linux)
|
||||
tailwindcss-ruby (3.4.17-arm64-darwin)
|
||||
tailwindcss-ruby (3.4.17-x86_64-darwin)
|
||||
tailwindcss-ruby (3.4.17-x86_64-linux)
|
||||
tapioca (0.16.5)
|
||||
bundler (>= 2.2.25)
|
||||
netrc (>= 0.11.0)
|
||||
parallel (>= 1.21.0)
|
||||
rbi (~> 0.2)
|
||||
sorbet-static-and-runtime (>= 0.5.11087)
|
||||
spoom (>= 1.2.0)
|
||||
thor (>= 1.2.0)
|
||||
yard-sorbet
|
||||
thor (1.3.2)
|
||||
thruster (0.1.11-aarch64-linux)
|
||||
thruster (0.1.11-arm64-darwin)
|
||||
thruster (0.1.11-x86_64-darwin)
|
||||
thruster (0.1.11-x86_64-linux)
|
||||
timeout (0.4.3)
|
||||
turbo-rails (2.0.11)
|
||||
actionpack (>= 6.0.0)
|
||||
activejob (>= 6.0.0)
|
||||
railties (>= 6.0.0)
|
||||
tzinfo (2.0.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
unf (0.1.4)
|
||||
unf_ext
|
||||
unf_ext (0.0.8.2)
|
||||
web-console (4.2.0)
|
||||
useragent (0.16.11)
|
||||
warden (1.2.9)
|
||||
rack (>= 2.0.9)
|
||||
web-console (4.2.1)
|
||||
actionview (>= 6.0.0)
|
||||
activemodel (>= 6.0.0)
|
||||
bindex (>= 0.4.0)
|
||||
railties (>= 6.0.0)
|
||||
webdrivers (5.2.0)
|
||||
webdrivers (5.3.1)
|
||||
nokogiri (~> 1.6)
|
||||
rubyzip (>= 1.3.0)
|
||||
selenium-webdriver (~> 4.0)
|
||||
websocket (1.2.9)
|
||||
websocket-driver (0.7.5)
|
||||
selenium-webdriver (~> 4.0, < 4.11)
|
||||
webrick (1.9.1)
|
||||
websocket (1.2.11)
|
||||
websocket-driver (0.7.6)
|
||||
websocket-extensions (>= 0.1.0)
|
||||
websocket-extensions (0.1.5)
|
||||
xpath (3.2.0)
|
||||
nokogiri (~> 1.8)
|
||||
zeitwerk (2.6.6)
|
||||
yard (0.9.37)
|
||||
yard-sorbet (0.9.0)
|
||||
sorbet-runtime
|
||||
yard
|
||||
zeitwerk (2.7.1)
|
||||
zstd-ruby (1.5.6.6)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
aarch64-linux
|
||||
aarch64-linux-gnu
|
||||
aarch64-linux-musl
|
||||
arm64-darwin
|
||||
universal-darwin
|
||||
x86_64-darwin
|
||||
x86_64-linux-gnu
|
||||
x86_64-linux-musl
|
||||
|
||||
DEPENDENCIES
|
||||
addressable
|
||||
attr_json
|
||||
bootsnap
|
||||
capybara
|
||||
cloudflare-rails
|
||||
colorize
|
||||
concurrent-ruby-edge
|
||||
concurrent-ruby-ext
|
||||
cssbundling-rails (~> 1.4)
|
||||
curb
|
||||
curses
|
||||
daemons
|
||||
debug
|
||||
delayed_job_active_record
|
||||
delayed_job_web
|
||||
delayed_job_worker_pool
|
||||
db-query-matchers (~> 0.14)
|
||||
debug (~> 1.10)
|
||||
devise (~> 4.9)
|
||||
dhash-vips
|
||||
diffy
|
||||
discard
|
||||
disco
|
||||
docx
|
||||
dtext_rb!
|
||||
factory_bot_rails
|
||||
faiss
|
||||
ffmpeg!
|
||||
good_job (~> 4.6)
|
||||
htmlbeautifier
|
||||
http (~> 5.2)
|
||||
http-cookie
|
||||
importmap-rails
|
||||
influxdb-client
|
||||
jbuilder
|
||||
jbuilder (~> 2.13)
|
||||
kaminari
|
||||
listen
|
||||
memory_profiler
|
||||
neighbor
|
||||
nokogiri
|
||||
parallel_tests
|
||||
pg
|
||||
pg_query (>= 2)
|
||||
pghero (~> 3.6)
|
||||
pluck_each
|
||||
prettier_print
|
||||
progressbar
|
||||
prometheus_exporter (~> 2.2)
|
||||
pry
|
||||
pry-stack_explorer
|
||||
puma (~> 5.0)
|
||||
rack-mini-profiler
|
||||
rails (~> 7.0.4, >= 7.0.4.2)
|
||||
pundit (~> 2.4)
|
||||
pundit-matchers (~> 4.0)
|
||||
rack (~> 2.2)
|
||||
rack-cors
|
||||
rack-mini-profiler (~> 3.3)
|
||||
rails (~> 7.2)
|
||||
rails-controller-testing
|
||||
rails_live_reload!
|
||||
rails_semantic_logger (~> 4.17)
|
||||
rb-bsdiff!
|
||||
react_on_rails
|
||||
ripcord
|
||||
rspec-rails
|
||||
rouge
|
||||
rspec-rails (~> 7.0)
|
||||
rspec-sorbet
|
||||
ruby-bbcode
|
||||
ruby-prof
|
||||
ruby-prof-speedscope
|
||||
ruby-vips
|
||||
rufo
|
||||
sanitize (~> 6.1)
|
||||
sd_notify
|
||||
selenium-webdriver
|
||||
shakapacker (~> 6.6)
|
||||
shoulda-matchers
|
||||
sorbet-static-and-runtime
|
||||
sorbet-struct-comparable
|
||||
sprockets-rails
|
||||
sqlite3 (~> 1.4)
|
||||
stackprof
|
||||
stimulus-rails
|
||||
syntax_tree (~> 6.2)
|
||||
table_print
|
||||
tailwindcss-rails (~> 3.0)
|
||||
tapioca
|
||||
thruster
|
||||
timeout
|
||||
turbo-rails
|
||||
tzinfo-data
|
||||
web-console
|
||||
webdrivers
|
||||
xdiff!
|
||||
zstd-ruby
|
||||
|
||||
RUBY VERSION
|
||||
ruby 3.2.0p0
|
||||
ruby 3.2.6p234
|
||||
|
||||
BUNDLED WITH
|
||||
2.4.6
|
||||
2.6.2
|
||||
|
||||
5
Procfile.dev
Normal file
@@ -0,0 +1,5 @@
|
||||
rails: RAILS_ENV=development HTTP_PORT=3000 TARGET_PORT=3003 rdbg --command --nonstop --open -- thrust ./bin/rails server -p 3003
|
||||
wp-client: RAILS_ENV=development HMR=true ./bin/webpacker-dev-server
|
||||
wp-server: RAILS_ENV=development HMR=true SERVER_BUNDLE_ONLY=yes ./bin/webpacker --watch
|
||||
css: tailwindcss -c ./config/tailwind.config.js -i ./app/assets/stylesheets/application.tailwind.css -o ./app/assets/builds/tailwind.css --watch
|
||||
prometheus_exporter: RAILS_ENV=development bundle exec prometheus_exporter --bind 0.0.0.0 --prefix redux_ --label '{"environment": "development"}'
|
||||
9
Procfile.dev-static
Normal file
@@ -0,0 +1,9 @@
|
||||
# You can run these commands in separate shells
|
||||
web: rails s -p 3000
|
||||
|
||||
# Next line runs a watch process with webpack to compile the changed files.
|
||||
# When making frequent changes to client side assets, you will prefer building webpack assets
|
||||
# upon saving rather than when you refresh your browser page.
|
||||
# Note, if using React on Rails localization you will need to run
|
||||
# `bundle exec rake react_on_rails:locale` before you run bin/webpacker
|
||||
webpack: sh -c 'rm -rf public/packs/* || true && bin/webpacker -w'
|
||||
3
Procfile.production
Normal file
@@ -0,0 +1,3 @@
|
||||
rails: RAILS_ENV=production HTTP_PORT=3000 TARGET_PORT=3003 thrust ./bin/rails server -p 3003
|
||||
tail: tail -f log/production.log
|
||||
prometheus_exporter: RAILS_ENV=production bundle exec prometheus_exporter --bind 0.0.0.0 --prefix redux_ --label '{"environment": "production"}'
|
||||
5
Procfile.staging
Normal file
@@ -0,0 +1,5 @@
|
||||
rails: RAILS_ENV=staging HTTP_PORT=3001 TARGET_PORT=3002 bundle exec thrust ./bin/rails server -p 3002
|
||||
wp-client: RAILS_ENV=development HMR=true ./bin/webpacker-dev-server
|
||||
wp-server: RAILS_ENV=development HMR=true SERVER_BUNDLE_ONLY=yes ./bin/webpacker --watch
|
||||
css: RAILS_ENV=development yarn "build:css[debug]" --watch
|
||||
prometheus_exporter: RAILS_ENV=staging bundle exec prometheus_exporter --bind 0.0.0.0 --prefix redux_ --label '{"environment": "staging"}'
|
||||
3
Procfile.worker
Normal file
@@ -0,0 +1,3 @@
|
||||
periodic_tasks: RAILS_ENV=worker bundle exec rake periodic_tasks
|
||||
good_job: RAILS_ENV=worker bundle exec rake good_job
|
||||
prometheus_exporter: RAILS_ENV=worker bundle exec prometheus_exporter --bind 0.0.0.0 --prefix redux_ --label '{"environment": "worker"}'
|
||||
@@ -7,8 +7,12 @@ Things you may want to cover:
|
||||
|
||||
* Ruby version
|
||||
|
||||
- 3.2.6
|
||||
|
||||
* System dependencies
|
||||
|
||||
- node 18.x
|
||||
|
||||
* Configuration
|
||||
|
||||
* Database creation
|
||||
@@ -21,4 +25,6 @@ Things you may want to cover:
|
||||
|
||||
* Deployment instructions
|
||||
|
||||
- Build docker image with `docker build . -t redux-scraper-app`
|
||||
|
||||
* ...
|
||||
|
||||
639
Rakefile
@@ -5,64 +5,615 @@ require "rake/testtask"
|
||||
require_relative "config/application"
|
||||
|
||||
Rails.application.load_tasks
|
||||
$LOAD_PATH << Rails.root.join("rake")
|
||||
Rake.application.rake_require "sst"
|
||||
Rake.application.rake_require "log_entry"
|
||||
Rake.application.rake_require "worker"
|
||||
Rake.application.rake_require "metrics"
|
||||
Rake.application.rake_require "fa"
|
||||
Rake.application.rake_require "e621"
|
||||
Rake.application.rake_require "twitter"
|
||||
Dir.glob(Rails.root.join("rake", "*.rake")).each { |rake_file| load rake_file }
|
||||
|
||||
task :set_ar_stdout => :environment do
|
||||
ActiveRecord::Base.logger = Logger.new(STDOUT)
|
||||
task set_ar_stdout: :environment do
|
||||
ActiveRecord::Base.logger = Logger.new($stdout)
|
||||
end
|
||||
|
||||
task :set_logger_stdout => :environment do
|
||||
Rails.logger = Logger.new(STDOUT)
|
||||
Rails.logger.formatter = proc do |severity, datetime, progname, msg|
|
||||
"#{severity}: #{msg}\n"
|
||||
end
|
||||
task set_logger_stdout: :environment do
|
||||
Rails.logger = Logger.new($stdout)
|
||||
Rails.logger.formatter =
|
||||
proc { |severity, datetime, progname, msg| "#{severity}: #{msg}\n" }
|
||||
ActiveRecord::Base.logger = nil
|
||||
ActiveJob::Base.logger = nil
|
||||
GoodJob.logger = Rails.logger
|
||||
end
|
||||
|
||||
task :pool_combined do
|
||||
ENV["RAILS_ENV"] = "production"
|
||||
proxies = ["direct", "proxy-1", "dedipath-1", "serverhost-1"]
|
||||
proxy = ENV["proxy"]
|
||||
raise("'proxy' must be set") unless proxy
|
||||
raise("'proxy' must be one of #{proxies}") unless proxies.include?(proxy)
|
||||
cmd = "bundle exec delayed_job_worker_pool pool_combined.rb"
|
||||
puts "$> #{cmd}"
|
||||
task periodic_tasks: %i[environment set_logger_stdout] do
|
||||
Thread.new do
|
||||
loop do
|
||||
Rake::Task["pghero:capture_space_stats"].execute
|
||||
puts "logged space stats"
|
||||
sleep 6.hours
|
||||
end
|
||||
end
|
||||
|
||||
Thread.new do
|
||||
loop do
|
||||
Rake::Task["pghero:capture_query_stats"].execute
|
||||
puts "logged query stats"
|
||||
sleep 5.minutes
|
||||
end
|
||||
end
|
||||
|
||||
loop { sleep 10 }
|
||||
end
|
||||
|
||||
namespace :db_sampler do
|
||||
task export: :environment do
|
||||
url_names = ENV["url_names"] || raise("need 'url_names' (comma-separated)")
|
||||
outfile = $stdout
|
||||
DbSampler.new(outfile).export(url_names.split(","))
|
||||
ensure
|
||||
outfile.close if outfile
|
||||
end
|
||||
|
||||
task import: [:environment] do
|
||||
infile = $stdin
|
||||
DbSampler.new(infile).import
|
||||
ensure
|
||||
infile.close if infile
|
||||
end
|
||||
end
|
||||
|
||||
task good_job: %i[environment set_ar_stdout set_logger_stdout] do
|
||||
env_hash = {
|
||||
"RAILS_ENV" => "worker",
|
||||
"GOOD_JOB_POLL_INTERVAL" => "5",
|
||||
"GOOD_JOB_MAX_CACHE" => "10000",
|
||||
"GOOD_JOB_QUEUE_SELECT_LIMIT" => "4096",
|
||||
"GOOD_JOB_MAX_THREADS" => "4",
|
||||
"GOOD_JOB_ENABLE_CRON" => "1",
|
||||
"GOOD_JOB_QUEUES" =>
|
||||
ENV["GOOD_JOB_QUEUES"] ||
|
||||
%w[manual:4 fa_post,e621:2 *:6].reject(&:nil?).join(";"),
|
||||
}
|
||||
|
||||
env_hash.each do |key, value|
|
||||
ENV[key] = value
|
||||
puts "$> #{key.light_black.bold} = #{value.bold}"
|
||||
end
|
||||
|
||||
cmd = "bundle exec good_job"
|
||||
puts "$> #{cmd.bold}"
|
||||
exec(cmd)
|
||||
end
|
||||
|
||||
task :recompute_job_signatures => :environment do
|
||||
ActiveRecord::Base.logger = Logger.new(STDOUT)
|
||||
ActiveRecord::Base.logger.level = :error
|
||||
task :reverse_csv do
|
||||
file = ENV["file"] || raise("need 'file' (file path)")
|
||||
in_csv = CSV.parse(File.open(file, "r+"), headers: true)
|
||||
out_csv =
|
||||
CSV.new(
|
||||
File.open("rev_" + file, "w"),
|
||||
write_headers: true,
|
||||
headers: in_csv.headers,
|
||||
)
|
||||
in_csv.reverse_each { |row| out_csv << row.map(&:second) }
|
||||
out_csv.close
|
||||
end
|
||||
|
||||
start_at = ENV["start_at"]&.to_i || 0
|
||||
count = 0
|
||||
destroyed = 0
|
||||
puts "# jobs: #{Delayed::Job.count}"
|
||||
Delayed::Job.find_each(start: start_at) do |job|
|
||||
job.set_signature
|
||||
unless job.save
|
||||
job.destroy
|
||||
destroyed += 1
|
||||
task migrate_to_domain: :environment do
|
||||
only_user = ENV["only_user"]
|
||||
allowed_domains = %w[e621 fa ib]
|
||||
only_domains = (ENV["only_domains"] || "").split(",")
|
||||
only_domains = allowed_domains if only_domains.empty?
|
||||
if (only_domains - allowed_domains).any?
|
||||
raise "only_domains must be a subset of #{allowed_domains.join(", ")}"
|
||||
end
|
||||
|
||||
migrator = Domain::MigrateToDomain.new
|
||||
|
||||
if only_domains.include?("e621")
|
||||
# migrator.migrate_e621_users(only_user: only_user)
|
||||
# migrator.migrate_e621_posts(only_user: only_user)
|
||||
migrator.migrate_e621_users_favs(only_user: only_user)
|
||||
end
|
||||
|
||||
if only_domains.include?("fa")
|
||||
# migrator.migrate_fa_users(only_user: only_user)
|
||||
# migrator.migrate_fa_posts(only_user: only_user)
|
||||
# migrator.migrate_fa_users_favs(only_user: only_user)
|
||||
migrator.migrate_fa_users_followed_users(only_user: only_user)
|
||||
end
|
||||
|
||||
if only_domains.include?("ib")
|
||||
migrator.migrate_inkbunny_users(only_user: only_user)
|
||||
migrator.migrate_inkbunny_posts(only_user: only_user)
|
||||
migrator.migrate_inkbunny_pools(only_user: nil) if only_user.nil?
|
||||
end
|
||||
end
|
||||
|
||||
task infer_last_submission_log_entries: :environment do
|
||||
only_fa_id = ENV["only_fa_id"]
|
||||
start = ENV["start_at"]&.to_i || nil
|
||||
|
||||
if only_fa_id
|
||||
relation = Domain::Fa::Post.where(fa_id: only_fa_id)
|
||||
else
|
||||
relation =
|
||||
Domain::Fa::Post
|
||||
.where(state: :ok)
|
||||
.where(last_submission_page_id: nil)
|
||||
.or(Domain::Fa::Post.where(state: :ok).where(posted_at: nil))
|
||||
end
|
||||
|
||||
relation.find_each(batch_size: 10, start:) do |post|
|
||||
parts = ["[id: #{post.id}]", "[fa_id: #{post.fa_id}]"]
|
||||
|
||||
log_entry = post.guess_last_submission_page
|
||||
unless log_entry
|
||||
parts << "[no log entry]"
|
||||
next
|
||||
end
|
||||
count += 1
|
||||
if count % 50 == 0
|
||||
puts "processed #{count}, destroyed #{destroyed} - last id: #{job.id}"
|
||||
|
||||
contents = log_entry.response&.contents
|
||||
unless contents
|
||||
parts << "[no contents]"
|
||||
next
|
||||
end
|
||||
|
||||
parser = Domain::Fa::Parser::Page.new(contents)
|
||||
if parser.submission_not_found?
|
||||
parts << "[removed]"
|
||||
post.state = :removed
|
||||
else
|
||||
posted_at = parser.submission.posted_date
|
||||
post.posted_at ||= posted_at
|
||||
parts << "[posted at: #{posted_at}]"
|
||||
end
|
||||
|
||||
if post.last_submission_page_id.present? &&
|
||||
log_entry.id != post.last_submission_page_id
|
||||
parts << "[overwrite]"
|
||||
end
|
||||
post.last_submission_page_id = log_entry.id
|
||||
|
||||
parts << "[log entry: #{log_entry.id}]"
|
||||
parts << "[uri: #{log_entry.uri.to_s}]"
|
||||
post.save!
|
||||
rescue => e
|
||||
parts << "[error: #{e.message}]"
|
||||
ensure
|
||||
puts parts.join(" ")
|
||||
end
|
||||
end
|
||||
|
||||
task fix_fa_post_files: :environment do
|
||||
file_ids = ENV["file_ids"]&.split(",") || raise("need 'file_ids'")
|
||||
Domain::Fa::Post
|
||||
.where(file_id: file_ids)
|
||||
.find_each { |post| post.fix_file_by_uri! }
|
||||
end
|
||||
|
||||
task fix_fa_post_files_by_csv: :environment do
|
||||
require "csv"
|
||||
|
||||
csv_file = ENV["csv_file"] || raise("need 'csv_file'")
|
||||
CSV
|
||||
.open(csv_file, headers: true)
|
||||
.each do |row|
|
||||
id = row["id"].to_i
|
||||
post = Domain::Fa::Post.find(id)
|
||||
post.fix_file_by_uri!
|
||||
end
|
||||
end
|
||||
|
||||
task fix_buggy_fa_posts: :environment do
|
||||
post_fa_ids = %w[7704069 7704068 6432347 6432346].map(&:to_i)
|
||||
|
||||
require "uri"
|
||||
|
||||
post_fa_ids.each do |fa_id|
|
||||
post = Domain::Fa::Post.find_by(fa_id: fa_id)
|
||||
next unless post&.file
|
||||
post_file_url_str = Addressable::URI.parse(post.file_url_str).to_s
|
||||
file_url_str = Addressable::URI.parse(CGI.unescape(post.file.uri.to_s)).to_s
|
||||
hle = post.guess_last_submission_page
|
||||
|
||||
parser = Domain::Fa::Parser::Page.new(hle.response.contents)
|
||||
if parser.submission_not_found?
|
||||
post.file = nil
|
||||
post.save!
|
||||
puts "submission not found"
|
||||
else
|
||||
submission = parser.submission
|
||||
full_res_img = Addressable::URI.parse(submission.full_res_img)
|
||||
full_res_img.scheme = "https" if full_res_img.scheme.blank?
|
||||
matches = full_res_img.to_s == post.file_url_str
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
task :workoff_failed_jobs => [:environment, :set_ar_stdout, :set_logger_stdout] do
|
||||
worker = Delayed::Worker.new
|
||||
Delayed::Job.where(
|
||||
"last_error is not null and attempts <= 2"
|
||||
).find_each(batch_size: 1) do |job|
|
||||
worker.run(job)
|
||||
task enqueue_fa_posts_missing_files: %i[environment set_logger_stdout] do
|
||||
Domain::Post::FaPost
|
||||
.where(state: "ok")
|
||||
.where
|
||||
.missing(:file)
|
||||
.find_each(order: :desc) do |post|
|
||||
Domain::Fa::Job::ScanPostJob.perform_now(post:)
|
||||
end
|
||||
end
|
||||
|
||||
task fix_e621_post_files: :environment do
|
||||
query = Domain::Post::E621Post.where(state: "ok").where.missing(:files)
|
||||
limit = ENV["limit"]&.to_i
|
||||
puts "query: #{query.to_sql}"
|
||||
|
||||
query.find_each(batch_size: 10) do |post|
|
||||
Domain::E621::Task::FixE621PostMissingFiles.new.run(post)
|
||||
if limit
|
||||
limit -= 1
|
||||
if limit.zero?
|
||||
puts "limit reached"
|
||||
break
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
task fix_ok_e621_posts_missing_files: :environment do
|
||||
query = Domain::Post::E621Post.where(state: "ok").where.missing(:file)
|
||||
progress_bar =
|
||||
ProgressBar.create(total: query.count, format: "%t: %c/%C %B %p%% %a %e")
|
||||
query.find_each(batch_size: 10) do |post|
|
||||
Domain::E621::Job::ScanPostJob.perform_now(post:)
|
||||
progress_bar.progress = [progress_bar.progress + 1, progress_bar.total].min
|
||||
end
|
||||
end
|
||||
|
||||
task perform_good_jobs: :environment do
|
||||
job_class = ENV["job_class"]
|
||||
job_id = ENV["job_id"]
|
||||
limit = ENV["limit"]&.to_i
|
||||
|
||||
if !job_id.present? && !job_class.present?
|
||||
raise "need 'job_id' or 'job_class'"
|
||||
end
|
||||
|
||||
relation =
|
||||
if job_id
|
||||
job =
|
||||
GoodJob::Job.find_by(id: job_id) ||
|
||||
GoodJob::Execution.find_by(id: job_id)&.job
|
||||
if job.nil?
|
||||
puts "no job found with id #{job_id}"
|
||||
exit 1
|
||||
end
|
||||
puts "found job with id #{job.id}" if job.id != job_id
|
||||
GoodJob::Job.where(id: job.id)
|
||||
else
|
||||
GoodJob::Job.queued.where(job_class: job_class).order(created_at: :asc)
|
||||
end
|
||||
|
||||
relation.find_each(batch_size: 1) do |job|
|
||||
job = T.cast(job, GoodJob::Job)
|
||||
|
||||
# Get the actual job instance and deserialize arguments
|
||||
serialized_args = job.serialized_params["arguments"]
|
||||
if serialized_args.nil?
|
||||
puts "No arguments found for job #{job.id}"
|
||||
next
|
||||
end
|
||||
|
||||
deserialized_args = ActiveJob::Arguments.deserialize(serialized_args)
|
||||
job_instance = job.job_class.constantize.new
|
||||
job_instance.deserialize(job.serialized_params)
|
||||
|
||||
puts "Running job #{job.id} (#{job.job_class})"
|
||||
|
||||
# Create execution record
|
||||
execution =
|
||||
GoodJob::Execution.create!(
|
||||
active_job_id: job.active_job_id,
|
||||
job_class: job.job_class,
|
||||
queue_name: job.queue_name,
|
||||
serialized_params: job.serialized_params,
|
||||
scheduled_at: job.scheduled_at,
|
||||
created_at: Time.current,
|
||||
updated_at: Time.current,
|
||||
process_id: SecureRandom.uuid,
|
||||
)
|
||||
|
||||
start_time = Time.current
|
||||
|
||||
# Temporarily disable concurrency limits
|
||||
job_class = job.job_class.constantize
|
||||
old_config = job_class.good_job_concurrency_config
|
||||
job_class.good_job_concurrency_config = { total_limit: nil }
|
||||
|
||||
begin
|
||||
# Perform the job with deserialized arguments
|
||||
GoodJob::CurrentThread.job = job
|
||||
job.update!(performed_at: Time.current)
|
||||
job_instance.arguments = deserialized_args
|
||||
job_instance.perform_now
|
||||
|
||||
# Update execution and job records
|
||||
execution.update!(
|
||||
finished_at: Time.current,
|
||||
error: nil,
|
||||
error_event: nil,
|
||||
duration: Time.current - start_time,
|
||||
)
|
||||
job.update!(finished_at: Time.current)
|
||||
puts "Job completed successfully"
|
||||
rescue => e
|
||||
puts "Job failed: #{e.message}"
|
||||
# Update execution and job records with error
|
||||
execution.update!(
|
||||
finished_at: Time.current,
|
||||
error: e.message,
|
||||
error_event: "execution_failed",
|
||||
error_backtrace: e.backtrace,
|
||||
duration: Time.current - start_time,
|
||||
)
|
||||
job.update!(
|
||||
error: "#{e.class}: #{e.message}",
|
||||
error_event: "execution_failed",
|
||||
)
|
||||
raise e
|
||||
ensure
|
||||
job.update!(
|
||||
executions_count: GoodJob::Execution.where(active_job_id: job.id).count,
|
||||
)
|
||||
# Restore original concurrency config
|
||||
job_class.good_job_concurrency_config = old_config
|
||||
GoodJob::CurrentThread.job = nil
|
||||
end
|
||||
|
||||
if limit
|
||||
limit -= 1
|
||||
if limit.zero?
|
||||
puts "limit reached"
|
||||
break
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
task fix_removed_fa_posts: :environment do
|
||||
colorize_state = ->(state) do
|
||||
case state
|
||||
when "ok"
|
||||
"ok".green
|
||||
when "removed"
|
||||
"removed".red
|
||||
else
|
||||
state.to_s
|
||||
end.bold
|
||||
end
|
||||
|
||||
last_fa_id = ENV["start_at"]&.to_i
|
||||
while true
|
||||
query =
|
||||
Domain::Post::FaPost
|
||||
.where(state: "removed")
|
||||
.where.not(title: nil)
|
||||
.order(fa_id: :desc)
|
||||
query = query.where(fa_id: ...last_fa_id) if last_fa_id
|
||||
post = query.first
|
||||
break unless post
|
||||
last_fa_id = post.fa_id
|
||||
|
||||
puts "[before] [post.state: #{colorize_state.call(post.state)}] [post.file.id: #{post.file&.id}] [post.id: #{post.id}] [post.fa_id: #{post.fa_id}] [post.title: #{post.title}]"
|
||||
Domain::Fa::Job::ScanPostJob.perform_now(post: post, force_scan: true)
|
||||
post.reload
|
||||
puts "[after] [post.state: #{colorize_state.call(post.state)}] [post.file.id: #{post.file&.id}] [post.id: #{post.id}] [post.fa_id: #{post.fa_id}] [post.title: #{post.title}]"
|
||||
sleep 2
|
||||
end
|
||||
rescue => e
|
||||
puts "error: #{e.message}"
|
||||
binding.pry
|
||||
end
|
||||
|
||||
task fix_fa_user_avatars: :environment do
|
||||
new_users_missing_avatar =
|
||||
Domain::User::FaUser.where.missing(:avatar).select(:url_name)
|
||||
old_users_with_avatar =
|
||||
Domain::Fa::User
|
||||
.where(url_name: new_users_missing_avatar)
|
||||
.includes(:avatar)
|
||||
.filter(&:avatar)
|
||||
|
||||
old_users_with_avatar.each do |old_user|
|
||||
old_avatar = old_user.avatar
|
||||
new_user = Domain::User::FaUser.find_by(url_name: old_user.url_name)
|
||||
|
||||
if old_avatar.log_entry.nil?
|
||||
puts "enqueue fresh download for #{old_user.url_name}"
|
||||
new_avatar = Domain::UserAvatar.new
|
||||
new_user.avatar = new_avatar
|
||||
new_user.save!
|
||||
Domain::Fa::Job::UserAvatarJob.perform_now(avatar: new_avatar)
|
||||
new_avatar.reload
|
||||
|
||||
binding.pry
|
||||
next
|
||||
end
|
||||
|
||||
new_avatar = Domain::UserAvatar.new
|
||||
new_avatar.log_entry_id = old_avatar.log_entry_id
|
||||
new_avatar.last_log_entry_id = old_avatar.log_entry_id
|
||||
new_avatar.url_str = old_avatar.file_url_str
|
||||
new_avatar.downloaded_at = old_avatar.log_entry&.created_at
|
||||
new_avatar.state =
|
||||
case old_avatar.state
|
||||
when "ok"
|
||||
old_avatar.log_entry_id.present? ? "ok" : "pending"
|
||||
when "file_not_found"
|
||||
new_avatar.error_message = old_avatar.state
|
||||
"file_404"
|
||||
else
|
||||
new_avatar.error_message = old_avatar.state
|
||||
"http_error"
|
||||
end
|
||||
new_user.avatar = new_avatar
|
||||
new_user.save!
|
||||
puts "migrated #{old_user.url_name}"
|
||||
rescue => e
|
||||
puts "error: #{e.message}"
|
||||
binding.pry
|
||||
end
|
||||
end
|
||||
|
||||
task run_fa_user_avatar_jobs: :environment do
|
||||
avatars =
|
||||
Domain::UserAvatar
|
||||
.where(state: "pending")
|
||||
.joins(:user)
|
||||
.where(user: { type: Domain::User::FaUser.name })
|
||||
|
||||
puts "count: #{avatars.count}"
|
||||
|
||||
avatars.each do |avatar|
|
||||
Domain::Fa::Job::UserAvatarJob.perform_now(avatar:)
|
||||
avatar.reload
|
||||
puts "perform avatar job for #{avatar.user.url_name} - #{avatar.state.bold}"
|
||||
end
|
||||
end
|
||||
|
||||
task sample_migrated_favs: :environment do
|
||||
new_user = Domain::User::FaUser.where.not(migrated_user_favs_at: nil).last
|
||||
old_user = Domain::Fa::User.find_by(url_name: new_user.url_name)
|
||||
|
||||
puts "user: #{new_user.url_name}"
|
||||
puts "old fav count: #{old_user.fav_posts.count}"
|
||||
puts "new fav count: #{new_user.faved_posts.count}"
|
||||
end
|
||||
|
||||
task create_post_file_fingerprints: :environment do
|
||||
def migrate_posts_for_user(user)
|
||||
puts "migrating posts for #{user.to_param}"
|
||||
pb =
|
||||
ProgressBar.create(
|
||||
total: user.posts.count,
|
||||
format: "%t: %c/%C %B %p%% %a %e",
|
||||
)
|
||||
|
||||
user
|
||||
.posts
|
||||
.includes(:files)
|
||||
.find_in_batches(batch_size: 64) do |batch|
|
||||
ReduxApplicationRecord.transaction do
|
||||
batch.each { |post| migrate_post(post) }
|
||||
pb.progress = [pb.progress + 1, pb.total].min
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def migrate_post(post)
|
||||
puts "migrating #{post.id} / #{post.to_param} / '#{post.title_for_view}'"
|
||||
ColorLogger.quiet do
|
||||
post.files.each do |file|
|
||||
migrate_post_file(file)
|
||||
rescue StandardError => e
|
||||
puts "error: #{e.message}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def migrate_post_file(post_file)
|
||||
job = Domain::PostFileThumbnailJob.new
|
||||
ColorLogger.quiet do
|
||||
job.perform({ post_file: })
|
||||
rescue => e
|
||||
puts "error: #{e.message}"
|
||||
end
|
||||
end
|
||||
|
||||
if ENV["post_file_descending"].present?
|
||||
total = 49_783_962 # cache this value
|
||||
pb = ProgressBar.create(total:, format: "%t: %c/%C %B %p%% %a %e")
|
||||
i = 0
|
||||
Domain::PostFile
|
||||
.where(state: "ok")
|
||||
.includes(:blob)
|
||||
.find_each(
|
||||
order: :desc,
|
||||
batch_size: 32,
|
||||
start: ENV["start_at"],
|
||||
) do |post_file|
|
||||
i += 1
|
||||
if i % 100 == 0
|
||||
puts "migrating #{post_file.id} / #{post_file.post.title_for_view}"
|
||||
end
|
||||
migrate_post_file(post_file)
|
||||
pb.progress = [pb.progress + 1, pb.total].min
|
||||
end
|
||||
elsif ENV["posts_descending"].present?
|
||||
# total = Domain::Post.count
|
||||
total = 66_431_808 # cache this value
|
||||
pb = ProgressBar.create(total:, format: "%t: %c/%C %B %p%% %a %e")
|
||||
Domain::Post.find_each(order: :desc) do |post|
|
||||
migrate_post(post) unless post.is_a?(Domain::Post::InkbunnyPost)
|
||||
pb.progress = [pb.progress + 1, pb.total].min
|
||||
end
|
||||
elsif ENV["user"].present?
|
||||
for_user = ENV["user"] || raise("need 'user'")
|
||||
user = DomainController.find_model_from_param(Domain::User, for_user)
|
||||
raise "user '#{for_user}' not found" unless user
|
||||
migrate_posts_for_user(user)
|
||||
elsif ENV["users_descending"].present?
|
||||
# all users with posts, ordered by post count descending
|
||||
migrated_file = File.open("migrated_files.txt", "a+")
|
||||
migrated_file.seek(0)
|
||||
migrated_users = migrated_file.readlines.map(&:strip)
|
||||
users =
|
||||
Domain::User::FaUser.order(
|
||||
Arel.sql("json_attributes->>'num_watched_by' DESC NULLS LAST"),
|
||||
).pluck(:id)
|
||||
|
||||
users.each do |user_id|
|
||||
user = Domain::User::FaUser.find(user_id)
|
||||
next if migrated_users.include?(user.to_param)
|
||||
puts "migrating posts for #{user.to_param} (#{user.num_watched_by} watched by)"
|
||||
migrate_posts_for_user(user)
|
||||
migrated_file.write("#{user.to_param}\n")
|
||||
migrated_file.flush
|
||||
end
|
||||
migrated_file.close
|
||||
else
|
||||
raise "need 'user' or 'users_descending'"
|
||||
end
|
||||
end
|
||||
|
||||
task enqueue_pending_post_files: :environment do
|
||||
query = Domain::PostFile.where(state: "pending")
|
||||
puts "enqueueing #{query.count} pending post files"
|
||||
query.find_in_batches(batch_size: 100, start: ENV["start_at"]) do |batch|
|
||||
while (
|
||||
queue_size =
|
||||
GoodJob::Job.where(
|
||||
job_class: "Job::PostFileJob",
|
||||
performed_at: nil,
|
||||
scheduled_at: nil,
|
||||
error: nil,
|
||||
).count
|
||||
) > 100
|
||||
puts "queue size: #{queue_size}"
|
||||
sleep 10
|
||||
end
|
||||
batch.each do |post_file|
|
||||
Job::PostFileJob.set(priority: 10).perform_later(post_file:)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
task find_post_files_with_empty_response: :environment do
|
||||
query =
|
||||
Domain::PostFile
|
||||
.where(state: "ok", retry_count: 0)
|
||||
.joins(:log_entry)
|
||||
.where(http_log_entries: { response_sha256: BlobFile::EMPTY_FILE_SHA256 })
|
||||
|
||||
pb = ProgressBar.create(total: query.count, format: "%t: %c/%C %B %p%% %a %e")
|
||||
|
||||
query.find_each(batch_size: 10) do |post_file|
|
||||
# puts "post_file: #{post_file.id} / '#{post_file.post.to_param}'"
|
||||
post_file.state_pending!
|
||||
post_file.save!
|
||||
Job::PostFileJob.perform_now(post_file:)
|
||||
pb.progress = [pb.progress + 1, pb.total].min
|
||||
end
|
||||
end
|
||||
|
||||
41
TODO.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Project TODO List
|
||||
|
||||
- [ ] Add bookmarking feature for posts across different domains
|
||||
- [ ] Add search feature to search FA descriptions, tags, E621 descriptions, tags
|
||||
- [x] Get inkbunny index scan job working
|
||||
- [x] Attach logs to jobs, page to view jobs and their logs
|
||||
- [ ] Standardize all the embeddings tables to use the same schema (item_id, embedding)
|
||||
- [ ] Bluesky scraper
|
||||
- [x] Download favs / votes for E621 users
|
||||
- [ ] Automatically enqueue jobs for FA users to do incremental scans of profiles
|
||||
- [ ] Fix FA posts that start with "Font size adjustment: smallerlarger"
|
||||
- [ ] Convert logger .prefix=... into .tagged(...)
|
||||
- [x] `make_tag` should be smart about the objects it takes
|
||||
- [ ] Convert all `state: string` attributes to enums in ActiveRecord models
|
||||
- [ ] Create `belongs_to_log_entry` macro for ActiveRecord models
|
||||
- [x] Use StaticFileJobHelper for Domain::Fa::Job::ScanFileJob
|
||||
- [ ] Unify HTTP client configs for all domains, so the same job type can be used for different domains
|
||||
- [ ] put abstract `external_url_for_view` in a module
|
||||
- [ ] backfill descriptions on inkbunny posts
|
||||
- [ ] store deep update json on inkbunny posts
|
||||
- [x] limit number of users, or paginate for "users who favorited this post" page
|
||||
- [ ] manual good job runner does not indicate if the job threw an exception - check return value of #perform, maybe?
|
||||
- [ ] FA user favs job should stop when in incremental mode when all posts on the page are already known favs (e.g. pages with only 47 posts are not a false positive)
|
||||
- [x] Factor out FA listings page enqueue logic into common location; use in Gallery and Favs jobs
|
||||
- [ ] Add followers / following to FA user show page
|
||||
- [x] Parse E621 source url for inkbunny posts & users
|
||||
- [x] Parse E621 source url for fa users
|
||||
- [ ] Parse BBCode in post descriptions
|
||||
- example post with bbcode: https://refurrer.com/posts/ib/3452498
|
||||
- [ ] Show tags on fa posts, ib posts
|
||||
- [ ] Sofurry implmentation
|
||||
- [ ] Make unified Static file job
|
||||
- [ ] Make unified Avatar file job
|
||||
- [ ] ko-fi domain icon
|
||||
- [ ] tumblr domain icon
|
||||
- [ ] Do PCA on user factors table to display a 2D plot of users
|
||||
- [ ] Use links found in descriptions to indicate re-scanning a post? (e.g. for comic next/prev links)
|
||||
- [ ] fix for IDs that have a dot in them - e.g. https://refurrer.com/users/fa@jakke.
|
||||
- [ ] Rich inline links to e621 e.g. https://refurrer.com/posts/fa@60070060
|
||||
- [ ] Find FaPost that have favs recorded but no scan / file, enqueue scan
|
||||
- [ ] Bunch of posts with empty responses: posts = Domain::Post.joins(files: :log_entry).where(files: { http_log_entries: { response_sha256: BlobFile::EMPTY_FILE_SHA256 }}).limit(10)
|
||||
@@ -1,4 +1,5 @@
|
||||
//= link_tree ../images
|
||||
//= link_directory ../stylesheets .css
|
||||
//= link_tree ../../javascript .js
|
||||
//= link_tree ../../../vendor/javascript .js
|
||||
//= link_tree ../builds
|
||||
//= link good_job_custom.css
|
||||
|
||||
3
app/assets/images/arrow-top-right-on-square.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" class="w-6 h-6">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" d="M13.5 6H5.25A2.25 2.25 0 003 8.25v10.5A2.25 2.25 0 005.25 21h10.5A2.25 2.25 0 0018 18.75V10.5m-10.5 6L21 3m0 0h-5.25M21 3v5.25" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 326 B |
BIN
app/assets/images/domain-icons/bigcartel.png
Normal file
|
After Width: | Height: | Size: 1.8 KiB |
BIN
app/assets/images/domain-icons/boosty.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
BIN
app/assets/images/domain-icons/bsky.png
Normal file
|
After Width: | Height: | Size: 7.6 KiB |
BIN
app/assets/images/domain-icons/carrd.png
Normal file
|
After Width: | Height: | Size: 5.1 KiB |
BIN
app/assets/images/domain-icons/deviantart.png
Normal file
|
After Width: | Height: | Size: 1.6 KiB |
BIN
app/assets/images/domain-icons/e621.png
Normal file
|
After Width: | Height: | Size: 5.0 KiB |
BIN
app/assets/images/domain-icons/fa.png
Normal file
|
After Width: | Height: | Size: 8.2 KiB |
BIN
app/assets/images/domain-icons/gumroad.png
Normal file
|
After Width: | Height: | Size: 3.7 KiB |
BIN
app/assets/images/domain-icons/inkbunny.png
Normal file
|
After Width: | Height: | Size: 15 KiB |
BIN
app/assets/images/domain-icons/itaku.png
Normal file
|
After Width: | Height: | Size: 3.7 KiB |
BIN
app/assets/images/domain-icons/itch-io.png
Normal file
|
After Width: | Height: | Size: 2.2 KiB |
BIN
app/assets/images/domain-icons/ko-fi.png
Normal file
|
After Width: | Height: | Size: 1.1 KiB |
BIN
app/assets/images/domain-icons/newgrounds.png
Normal file
|
After Width: | Height: | Size: 797 B |
BIN
app/assets/images/domain-icons/patreon.png
Normal file
|
After Width: | Height: | Size: 772 B |
BIN
app/assets/images/domain-icons/pixiv.png
Normal file
|
After Width: | Height: | Size: 678 B |
BIN
app/assets/images/domain-icons/redbubble.png
Normal file
|
After Width: | Height: | Size: 7.6 KiB |
1
app/assets/images/domain-icons/sorbet/rbi/dsl/.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
||||
**/*.rbi linguist-generated=true
|
||||
23
app/assets/images/domain-icons/sorbet/rbi/dsl/active_support/callbacks.rbi
generated
Normal file
@@ -0,0 +1,23 @@
|
||||
# typed: true
|
||||
|
||||
# DO NOT EDIT MANUALLY
|
||||
# This is an autogenerated file for dynamic methods in `ActiveSupport::Callbacks`.
|
||||
# Please instead update this file by running `bin/tapioca dsl ActiveSupport::Callbacks`.
|
||||
|
||||
|
||||
module ActiveSupport::Callbacks
|
||||
include GeneratedInstanceMethods
|
||||
|
||||
mixes_in_class_methods GeneratedClassMethods
|
||||
|
||||
module GeneratedClassMethods
|
||||
def __callbacks; end
|
||||
def __callbacks=(value); end
|
||||
def __callbacks?; end
|
||||
end
|
||||
|
||||
module GeneratedInstanceMethods
|
||||
def __callbacks; end
|
||||
def __callbacks?; end
|
||||
end
|
||||
end
|
||||
BIN
app/assets/images/domain-icons/spreadshirt.png
Normal file
|
After Width: | Height: | Size: 15 KiB |
BIN
app/assets/images/domain-icons/subscribestar.png
Normal file
|
After Width: | Height: | Size: 3.8 KiB |
BIN
app/assets/images/domain-icons/telegram.png
Normal file
|
After Width: | Height: | Size: 9.3 KiB |
BIN
app/assets/images/domain-icons/tumblr.png
Normal file
|
After Width: | Height: | Size: 1.4 KiB |
BIN
app/assets/images/domain-icons/weasyl.png
Normal file
|
After Width: | Height: | Size: 11 KiB |
BIN
app/assets/images/domain-icons/x-twitter.png
Normal file
|
After Width: | Height: | Size: 4.1 KiB |
BIN
app/assets/images/furecs/furecs-screenshot-2.png
Normal file
|
After Width: | Height: | Size: 114 KiB |
BIN
app/assets/images/furecs/furecs-screenshot.png
Normal file
|
After Width: | Height: | Size: 123 KiB |
19
app/assets/images/generic-domain.svg
Normal file
@@ -0,0 +1,19 @@
|
||||
<svg
|
||||
width="16"
|
||||
height="16"
|
||||
viewBox="0 0 16 16"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<!-- Background circle -->
|
||||
<circle cx="8" cy="8" r="7" fill="#E0E0E0" />
|
||||
|
||||
<!-- Stylized "www" text -->
|
||||
<path
|
||||
d="M4 8.5C4 6.5 5 5.5 6 5.5C7 5.5 8 6.5 8 8.5C8 6.5 9 5.5 10 5.5C11 5.5 12 6.5 12 8.5"
|
||||
stroke="#666666"
|
||||
stroke-width="1.5"
|
||||
stroke-linecap="round"
|
||||
fill="none"
|
||||
/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 414 B |
BIN
app/assets/images/refurrer-logo-icon.png
Normal file
|
After Width: | Height: | Size: 3.2 KiB |
BIN
app/assets/images/refurrer-logo-md.png
Normal file
|
After Width: | Height: | Size: 17 KiB |
BIN
app/assets/images/refurrer-logo.png
Normal file
|
After Width: | Height: | Size: 325 KiB |
3
app/assets/images/user-circle.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" class="w-6 h-6">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" d="M15.75 6a3.75 3.75 0 11-7.5 0 3.75 3.75 0 017.5 0zM4.501 20.118a7.5 7.5 0 0114.998 0A17.933 17.933 0 0112 21.75c-2.676 0-5.216-.584-7.499-1.632z" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 344 B |
@@ -1,49 +0,0 @@
|
||||
/*
|
||||
* This is a manifest file that'll be compiled into application.css, which will include all the files
|
||||
* listed below.
|
||||
*
|
||||
* Any CSS (and SCSS, if configured) file within this directory, lib/assets/stylesheets, or any plugin's
|
||||
* vendor/assets/stylesheets directory can be referenced here using a relative path.
|
||||
*
|
||||
* You're free to add application-wide styles to this file and they'll appear at the bottom of the
|
||||
* compiled file so the styles you add here take precedence over styles defined in any other CSS
|
||||
* files in this directory. Styles in this file should be added after the last require_* statement.
|
||||
* It is generally better to create a new file per style scope.
|
||||
*
|
||||
*= require_tree .
|
||||
*= require_self
|
||||
*/
|
||||
html,
|
||||
body {
|
||||
height: 100%;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
body {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
margin: 0 2em;
|
||||
}
|
||||
|
||||
.content-container {
|
||||
flex-grow: 1;
|
||||
margin: 1em 0;
|
||||
min-height: 512px;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.image-container {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
margin: 0 auto;
|
||||
position: absolute;
|
||||
}
|
||||
|
||||
.image-container .media {
|
||||
position: absolute;
|
||||
left: 50%;
|
||||
transform: translate(-50%);
|
||||
max-height: 100%;
|
||||
max-width: 100%;
|
||||
box-shadow: 0 0 5px 1px black;
|
||||
}
|
||||
61
app/assets/stylesheets/application.tailwind.css
Normal file
@@ -0,0 +1,61 @@
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
.animated-shadow-sky {
|
||||
@apply shadow-lg;
|
||||
}
|
||||
|
||||
.sky-section {
|
||||
@apply divide-y divide-slate-300 overflow-hidden border border-slate-300 bg-slate-100 sm:rounded-lg;
|
||||
}
|
||||
|
||||
.section-header {
|
||||
@apply px-4 py-3 font-medium text-slate-900;
|
||||
}
|
||||
|
||||
.sky-section-header {
|
||||
@apply px-4 py-3 font-medium text-slate-900;
|
||||
}
|
||||
|
||||
.sky-link {
|
||||
@apply text-sky-600 underline decoration-dotted transition-colors hover:text-sky-800;
|
||||
}
|
||||
|
||||
.blue-link {
|
||||
@apply text-blue-600 transition-colors hover:text-blue-800 hover:underline;
|
||||
}
|
||||
|
||||
.scroll-shadows {
|
||||
background:
|
||||
/* Shadow Cover TOP */
|
||||
linear-gradient(white 30%, rgba(255, 255, 255, 0)) center top,
|
||||
/* Shadow Cover BOTTOM */ linear-gradient(rgba(255, 255, 255, 0), white 70%)
|
||||
center bottom,
|
||||
/* Shadow TOP */
|
||||
linear-gradient(to bottom, rgba(0, 0, 0, 0.1), rgba(0, 0, 0, 0)) center
|
||||
top,
|
||||
/* Shadow BOTTOM */
|
||||
linear-gradient(to top, rgba(0, 0, 0, 0.1), rgba(0, 0, 0, 0)) center
|
||||
bottom;
|
||||
|
||||
background-repeat: no-repeat;
|
||||
background-size:
|
||||
100% 20px,
|
||||
100% 20px,
|
||||
100% 10px,
|
||||
100% 10px;
|
||||
background-attachment: local, local, scroll, scroll;
|
||||
}
|
||||
|
||||
.log-entry-table-header-cell {
|
||||
@apply border-b border-slate-200 bg-slate-50 px-2 py-1 text-xs font-medium uppercase tracking-wider text-slate-500;
|
||||
}
|
||||
|
||||
.log-entry-table-row-cell {
|
||||
@apply flex items-center border-b border-slate-200 px-2 py-1 text-sm group-hover:bg-slate-50;
|
||||
}
|
||||
|
||||
.rich-text-content blockquote {
|
||||
@apply my-4 border-s-4 border-gray-300 bg-slate-200 p-4 italic leading-relaxed;
|
||||
}
|
||||
131
app/assets/stylesheets/good_job_custom.css
Normal file
@@ -0,0 +1,131 @@
|
||||
/* ANSI Colors */
|
||||
.ansi-bold {
|
||||
font-weight: bold;
|
||||
}
|
||||
.ansi-black {
|
||||
color: #333333;
|
||||
}
|
||||
.ansi-red {
|
||||
color: #cd3333;
|
||||
}
|
||||
.ansi-green {
|
||||
color: #33cd33;
|
||||
}
|
||||
.ansi-yellow {
|
||||
color: #cdcd33;
|
||||
}
|
||||
.ansi-blue {
|
||||
color: #3333ee;
|
||||
}
|
||||
.ansi-magenta {
|
||||
color: #cd33cd;
|
||||
}
|
||||
.ansi-cyan {
|
||||
color: #33cdcd;
|
||||
}
|
||||
.ansi-white {
|
||||
color: #e5e5e5;
|
||||
}
|
||||
|
||||
/* Bright variants */
|
||||
.ansi-bright-black {
|
||||
color: #7f7f7f;
|
||||
}
|
||||
.ansi-bright-red {
|
||||
color: #990000;
|
||||
}
|
||||
.ansi-bright-green {
|
||||
color: #009900;
|
||||
}
|
||||
.ansi-bright-yellow {
|
||||
color: #999900;
|
||||
}
|
||||
.ansi-bright-blue {
|
||||
color: #5c5c99;
|
||||
}
|
||||
.ansi-bright-magenta {
|
||||
color: #990099;
|
||||
}
|
||||
.ansi-bright-cyan {
|
||||
color: #009999;
|
||||
}
|
||||
.ansi-bright-white {
|
||||
color: #999999;
|
||||
}
|
||||
|
||||
.log-uuid {
|
||||
min-width: 20px;
|
||||
max-width: 100px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
/* All log lines container */
|
||||
.good-job-log-lines {
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
/* Single log line container */
|
||||
.good-job-log-line {
|
||||
font-family: monospace;
|
||||
font-size: 0.8rem;
|
||||
line-height: 1;
|
||||
margin: 2px 0;
|
||||
padding: 2px 4px;
|
||||
display: flex;
|
||||
white-space: nowrap;
|
||||
width: max-content; /* Make width match the content width */
|
||||
}
|
||||
|
||||
.good-job-log-line:hover {
|
||||
background-color: #ccc;
|
||||
}
|
||||
|
||||
.good-job-log-line > span {
|
||||
display: inline-block;
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
.good-job-execution-log {
|
||||
color: #333;
|
||||
background: #f0f0f0;
|
||||
}
|
||||
|
||||
.text-truncate-link {
|
||||
display: inline-block;
|
||||
max-width: 300px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.good-job-arg-name {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.good-job-arg-grid {
|
||||
display: grid;
|
||||
grid-template-columns: auto 1fr;
|
||||
}
|
||||
|
||||
.good-job-arg-value,
|
||||
.good-job-arg-name {
|
||||
padding: 0.35em 0.4em;
|
||||
}
|
||||
|
||||
.good-job-arg-name,
|
||||
.good-job-arg-value {
|
||||
border-bottom: 1px solid #e0e0e0;
|
||||
}
|
||||
|
||||
.good-job-arg-row {
|
||||
display: contents;
|
||||
}
|
||||
|
||||
.good-job-arg-row:hover > * {
|
||||
background-color: #ccc;
|
||||
}
|
||||
|
||||
/* This ensures the last row doesn't have a bottom border */
|
||||
.good-job-arg-grid .good-job-arg-row:last-child * {
|
||||
border-bottom: none;
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
# typed: strict
|
||||
module ApplicationCable
|
||||
class Channel < ActionCable::Channel::Base
|
||||
end
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# typed: strict
|
||||
module ApplicationCable
|
||||
class Connection < ActionCable::Connection::Base
|
||||
end
|
||||
|
||||
84
app/controllers/admin/proxy_controller.rb
Normal file
@@ -0,0 +1,84 @@
|
||||
# typed: true
|
||||
# frozen_string_literal: true
|
||||
class Admin::ProxyController < ApplicationController
|
||||
before_action :authenticate_user!
|
||||
before_action :require_admin!
|
||||
skip_before_action :verify_authenticity_token, only: %i[grafana prometheus]
|
||||
|
||||
def grafana
|
||||
fullpath =
|
||||
"http://grafana:3100#{request.fullpath.delete_prefix("/grafana")}"
|
||||
proxy_response(fullpath, "/grafana")
|
||||
end
|
||||
|
||||
def prometheus
|
||||
fullpath = "http://prometheus:9090#{request.fullpath.delete_prefix("/prometheus")}"
|
||||
proxy_response(fullpath, "/prometheus")
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def require_admin!
|
||||
unless current_user&.admin?
|
||||
redirect_to root_path, alert: "You are not authorized to access this area"
|
||||
end
|
||||
end
|
||||
|
||||
def grafana_proxy_headers
|
||||
{ "X-WEBAUTH-USER" => "admin" }.merge(proxy_headers)
|
||||
end
|
||||
|
||||
def proxy_headers
|
||||
{
|
||||
"X-Forwarded-Host" => request.host_with_port,
|
||||
"X-Forwarded-Proto" => request.ssl? ? "https" : "http",
|
||||
"X-Forwarded-For" => request.remote_ip,
|
||||
"Host" => request.host,
|
||||
"Connection" => request.headers["Connection"],
|
||||
"Upgrade" => request.headers["Upgrade"],
|
||||
"Accept" => request.headers["Accept"],
|
||||
"Cookie" => request.headers["Cookie"],
|
||||
"Content-Type" => request.headers["Content-Type"],
|
||||
"Content-Length" => request.headers["Content-Length"],
|
||||
}.merge
|
||||
end
|
||||
|
||||
def websocket_request?
|
||||
request.headers["Connection"]&.include?("upgrade")
|
||||
end
|
||||
|
||||
def proxy_response(fullpath, prefix)
|
||||
method = request.method.downcase.to_s
|
||||
if method == "post"
|
||||
response = HTTP.headers(grafana_proxy_headers).send(method, fullpath, body: request.raw_post)
|
||||
else
|
||||
response = HTTP.headers(grafana_proxy_headers).send(method, fullpath)
|
||||
end
|
||||
|
||||
headers = response.headers.to_h
|
||||
|
||||
# Handle redirects by rewriting the Location header
|
||||
if response.code.in?([301, 302, 303, 307, 308]) &&
|
||||
headers["Location"].present?
|
||||
location = headers["Location"]
|
||||
# Strip the host from absolute URLs
|
||||
location = location.gsub(%r{^https?://[^/]+}, "")
|
||||
# Add our prefix to relative URLs
|
||||
location = "#{prefix}#{location}" if location.start_with?("/")
|
||||
headers["Location"] = location
|
||||
end
|
||||
|
||||
# Pass through the response with all headers
|
||||
response_headers = headers.except("Content-Type")
|
||||
|
||||
render_args = {
|
||||
body: response.body.to_s,
|
||||
status: response.code,
|
||||
content_type: headers["Content-Type"],
|
||||
headers: response_headers,
|
||||
}
|
||||
render_args[:location] = headers["Location"] if headers["Location"]
|
||||
|
||||
render render_args
|
||||
end
|
||||
end
|
||||
@@ -1,2 +1,42 @@
|
||||
# typed: true
|
||||
class ApplicationController < ActionController::Base
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
include Pundit::Authorization
|
||||
include Devise::Controllers::Helpers::ClassMethods
|
||||
|
||||
sig { returns(T.nilable(IpAddressRole)) }
|
||||
def current_ip_address_role
|
||||
@current_ip_address_role ||= IpAddressRole.for_ip(request.remote_ip)
|
||||
end
|
||||
helper_method :current_ip_address_role
|
||||
|
||||
sig { returns(T.nilable(T.any(User, IpAddressRole))) }
|
||||
def pundit_user
|
||||
current_user || current_ip_address_role
|
||||
end
|
||||
|
||||
before_action do
|
||||
if Rails.env.development? || Rails.env.staging?
|
||||
Rack::MiniProfiler.authorize_request
|
||||
end
|
||||
end
|
||||
|
||||
before_action :authenticate_user!
|
||||
|
||||
# Pundit authorization error handling
|
||||
rescue_from Pundit::NotAuthorizedError, with: :user_not_authorized
|
||||
|
||||
protected
|
||||
|
||||
def prometheus_client
|
||||
PrometheusExporter::Client.default
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def user_not_authorized
|
||||
flash[:alert] = "You are not authorized to perform this action."
|
||||
redirect_back(fallback_location: root_path)
|
||||
end
|
||||
end
|
||||
|
||||
215
app/controllers/blob_entries_controller.rb
Normal file
@@ -0,0 +1,215 @@
|
||||
# typed: strict
|
||||
class BlobEntriesController < ApplicationController
|
||||
skip_before_action :authenticate_user!, only: [:show]
|
||||
|
||||
sig { void }
|
||||
def show
|
||||
thumb = params[:thumb]
|
||||
if thumb.present? && !thumb_params(thumb)
|
||||
raise ActionController::BadRequest.new("invalid thumbnail #{thumb}")
|
||||
end
|
||||
|
||||
if thumb.present?
|
||||
expires_dur = 1.week
|
||||
else
|
||||
expires_dur = 1.year
|
||||
end
|
||||
response.headers["Expires"] = expires_dur.from_now.httpdate
|
||||
expires_in expires_dur, public: true
|
||||
|
||||
unless stale?(
|
||||
last_modified: Time.at(0),
|
||||
strong_etag: strong_etag_for_request,
|
||||
)
|
||||
return
|
||||
end
|
||||
|
||||
sha256 = T.let(params[:sha256], String)
|
||||
raise ActionController::BadRequest.new("no file specified") if sha256.blank?
|
||||
|
||||
if show_blob_file(sha256, thumb)
|
||||
return
|
||||
elsif BlobFile.migrate_sha256!(sha256) && show_blob_file(sha256, thumb)
|
||||
return
|
||||
else
|
||||
raise ActiveRecord::RecordNotFound
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
sig { params(sha256: String, thumb: T.nilable(String)).returns(T::Boolean) }
|
||||
def show_blob_file(sha256, thumb)
|
||||
if thumb
|
||||
thumb_params = thumb_params(thumb)
|
||||
if thumb_params.nil?
|
||||
raise ActionController::BadRequest.new("invalid thumbnail: #{thumb}")
|
||||
end
|
||||
|
||||
# if the requested format is gif, and the thumbnail type is content-container, we want to
|
||||
# thumbnail the gif into another gif. Else, always thumbnail into a jpeg.
|
||||
file_ext = "jpeg"
|
||||
if params[:format] == "gif" && thumb == "content-container"
|
||||
file_ext = "gif"
|
||||
end
|
||||
|
||||
width, height = thumb_params
|
||||
filename = "thumb-#{sha256}-#{thumb}.#{file_ext}"
|
||||
cache_key = "vips:#{filename}"
|
||||
thumb_data =
|
||||
Rack::MiniProfiler.step("vips: load from cache") do
|
||||
Rails
|
||||
.cache
|
||||
.fetch(cache_key, expires_in: 1.day) do
|
||||
blob_file = BlobFile.find_by(sha256: HexUtil.hex2bin(sha256))
|
||||
if blob_file
|
||||
content_type =
|
||||
blob_file.content_type || "application/octet-stream"
|
||||
if helpers.is_renderable_video_type?(content_type)
|
||||
thumbnail_video_file(blob_file, width, height, file_ext)
|
||||
elsif helpers.is_renderable_image_type?(content_type)
|
||||
thumbnail_image_file(blob_file, width, height, file_ext)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if !thumb_data
|
||||
Rails.cache.delete(cache_key)
|
||||
return false
|
||||
end
|
||||
|
||||
send_data(
|
||||
thumb_data[0],
|
||||
type: thumb_data[1],
|
||||
disposition: "inline",
|
||||
filename: filename,
|
||||
)
|
||||
else
|
||||
blob_file = BlobFile.find_by(sha256: HexUtil.hex2bin(sha256))
|
||||
return false if !blob_file
|
||||
|
||||
content_type = blob_file.content_type || "application/octet-stream"
|
||||
send_file(
|
||||
blob_file.absolute_file_path,
|
||||
type: content_type,
|
||||
disposition: "inline",
|
||||
)
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
sig do
|
||||
params(
|
||||
blob_file: BlobFile,
|
||||
width: Integer,
|
||||
height: Integer,
|
||||
thumb: String,
|
||||
).returns(T.nilable([String, String]))
|
||||
end
|
||||
def thumbnail_video_file(blob_file, width, height, thumb)
|
||||
video_file = blob_file.absolute_file_path
|
||||
temp_thumb_file = Tempfile.new(%w[video-thumb .png])
|
||||
process_result =
|
||||
system(
|
||||
"ffmpegthumbnailer",
|
||||
"-f", # overlay video strip indicator
|
||||
"-i",
|
||||
video_file,
|
||||
"-o",
|
||||
T.must(temp_thumb_file.path),
|
||||
"-s",
|
||||
"#{width}",
|
||||
"-c",
|
||||
"jpeg",
|
||||
)
|
||||
if !process_result
|
||||
temp_thumb_file.unlink
|
||||
return nil
|
||||
end
|
||||
|
||||
thumb_data_tmp = File.read(T.must(temp_thumb_file.path), mode: "rb")
|
||||
temp_thumb_file.unlink
|
||||
[thumb_data_tmp, "image/jpeg"]
|
||||
end
|
||||
|
||||
# Returns a tuple of the thumbnail data and the content type
|
||||
sig do
|
||||
params(
|
||||
blob_file: BlobFile,
|
||||
width: Integer,
|
||||
height: Integer,
|
||||
file_ext: String,
|
||||
).returns(T.nilable([String, String]))
|
||||
end
|
||||
def thumbnail_image_file(blob_file, width, height, file_ext)
|
||||
if file_ext == "gif"
|
||||
Rack::MiniProfiler.step("vips: load gif") do
|
||||
# Use libvips' gifload with n=-1 to load all frames
|
||||
image = Vips::Image.gifload(blob_file.absolute_file_path, n: -1)
|
||||
num_frames = image.get("n-pages")
|
||||
image_width, image_height = image.width, (image.height / num_frames)
|
||||
|
||||
if width >= image_width && height >= image_height
|
||||
logger.info("gif is already smaller than requested thumbnail size")
|
||||
return [
|
||||
File.read(blob_file.absolute_file_path, mode: "rb"),
|
||||
"image/gif"
|
||||
]
|
||||
end
|
||||
|
||||
Rack::MiniProfiler.step("vips: thumbnail gif") do
|
||||
image = image.thumbnail_image(width, height: height)
|
||||
image_buffer =
|
||||
image.gifsave_buffer(
|
||||
dither: 1,
|
||||
effort: 1,
|
||||
interframe_maxerror: 16,
|
||||
interpalette_maxerror: 10,
|
||||
interlace: true,
|
||||
)
|
||||
[image_buffer, "image/gif"]
|
||||
end
|
||||
end
|
||||
else
|
||||
# Original static image thumbnailing logic
|
||||
image_buffer =
|
||||
Rack::MiniProfiler.step("vips: load image") do
|
||||
T.unsafe(Vips::Image).thumbnail(
|
||||
blob_file.absolute_file_path,
|
||||
width,
|
||||
height: height,
|
||||
)
|
||||
end
|
||||
|
||||
Rack::MiniProfiler.step("vips: thumbnail image") do
|
||||
logger.info("rendering thumbnail as jpeg")
|
||||
[image_buffer.jpegsave_buffer(interlace: true, Q: 95), "image/jpeg"]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
sig { params(thumb: String).returns(T.nilable([Integer, Integer])) }
|
||||
def thumb_params(thumb)
|
||||
case thumb
|
||||
when "32-avatar"
|
||||
[32, 32]
|
||||
when "64-avatar"
|
||||
[64, 64]
|
||||
when "tiny"
|
||||
[100, 100]
|
||||
when "small"
|
||||
[400, 300]
|
||||
when "medium"
|
||||
[800, 600]
|
||||
when "content-container"
|
||||
[768, 2048]
|
||||
end
|
||||
end
|
||||
|
||||
sig { returns(String) }
|
||||
def strong_etag_for_request
|
||||
[params[:sha256], params[:thumb], params[:format]].compact.join("-")
|
||||
end
|
||||
end
|
||||
@@ -1,26 +1,30 @@
|
||||
# typed: true
|
||||
class Domain::Fa::ApiController < ApplicationController
|
||||
skip_before_action :authenticate_user!
|
||||
before_action :validate_api_token!
|
||||
|
||||
skip_before_action :verify_authenticity_token,
|
||||
only: %i[ enqueue_objects object_statuses ]
|
||||
only: %i[enqueue_objects object_statuses similar_users]
|
||||
|
||||
skip_before_action :validate_api_token!,
|
||||
only: %i[object_statuses similar_users]
|
||||
|
||||
def object_statuses
|
||||
fa_ids = (params[:fa_ids] || []).map(&:to_i)
|
||||
url_names = (params[:url_names] || [])
|
||||
fa_ids = (params[:fa_ids] || []).reject(&:blank?).map(&:to_i)
|
||||
url_names = (params[:url_names] || []).reject(&:blank?)
|
||||
|
||||
jobs_async = Delayed::Backend::ActiveRecord::Job.
|
||||
select(:id, :queue, :handler).
|
||||
where(queue: "manual").
|
||||
load_async
|
||||
url_name_to_user =
|
||||
Domain::User::FaUser
|
||||
.where(url_name: url_names)
|
||||
.map { |user| [T.must(user.url_name), user] }
|
||||
.to_h
|
||||
|
||||
users_async = Domain::Fa::User.
|
||||
where(url_name: url_names).
|
||||
load_async
|
||||
|
||||
fa_id_to_post = Domain::Fa::Post.
|
||||
includes(:file).
|
||||
where(fa_id: fa_ids).
|
||||
map do |post|
|
||||
[post.fa_id, post]
|
||||
end.to_h
|
||||
fa_id_to_post =
|
||||
Domain::Post::FaPost
|
||||
.includes(:file)
|
||||
.where(fa_id: fa_ids)
|
||||
.map { |post| [T.must(post.fa_id), post] }
|
||||
.to_h
|
||||
|
||||
posts_response = {}
|
||||
users_response = {}
|
||||
@@ -28,92 +32,64 @@ class Domain::Fa::ApiController < ApplicationController
|
||||
fa_ids.each do |fa_id|
|
||||
post = fa_id_to_post[fa_id]
|
||||
|
||||
post_response = {
|
||||
terminal_state: false,
|
||||
seen_at: time_ago_or_never(post&.created_at),
|
||||
scanned_at: "never",
|
||||
downloaded_at: "never",
|
||||
}
|
||||
|
||||
if post
|
||||
post_response[:info_url] = domain_fa_post_url(fa_id: post.fa_id)
|
||||
post_response[:scanned_at] = time_ago_or_never(post.scanned_at)
|
||||
post_state =
|
||||
if post.file.present?
|
||||
"have_file"
|
||||
elsif post.scanned_at?
|
||||
"scanned_post"
|
||||
else
|
||||
post.state
|
||||
end
|
||||
|
||||
if post.file.present?
|
||||
post_response[:downloaded_at] = time_ago_or_never(post.file.created_at)
|
||||
post_response[:state] = "have_file"
|
||||
post_response[:terminal_state] = true
|
||||
elsif post.scanned?
|
||||
post_response[:state] = "scanned_post"
|
||||
else
|
||||
post_response[:state] = post.state
|
||||
end
|
||||
post_response = {
|
||||
state: post_state,
|
||||
seen_at: time_ago_or_never(post.created_at),
|
||||
object_url: request.base_url + helpers.domain_post_path(post),
|
||||
post_scan: {
|
||||
last_at: time_ago_or_never(post.scanned_at),
|
||||
due_for_scan: !post.scanned_at?,
|
||||
},
|
||||
file_scan: {
|
||||
last_at: time_ago_or_never(post.file&.created_at),
|
||||
due_for_scan: !post.file&.created_at?,
|
||||
},
|
||||
}
|
||||
else
|
||||
post_response[:state] = "not_seen"
|
||||
post_response = { state: "not_seen" }
|
||||
end
|
||||
|
||||
posts_response[fa_id] = post_response
|
||||
end
|
||||
|
||||
url_name_to_user = users_async.map do |user|
|
||||
[user.url_name, user]
|
||||
end.to_h
|
||||
|
||||
url_names.each do |url_name|
|
||||
user = url_name_to_user[url_name]
|
||||
|
||||
if user
|
||||
user_response = {
|
||||
created_at: time_ago_or_never(user.created_at),
|
||||
scanned_gallery_at: time_ago_or_never(user.scanned_gallery_at),
|
||||
scanned_page_at: time_ago_or_never(user.scanned_page_at),
|
||||
state: user.state,
|
||||
object_url: request.base_url + helpers.domain_user_path(user),
|
||||
page_scan: {
|
||||
last_at: time_ago_or_never(user.scanned_page_at),
|
||||
due_for_scan: user.page_scan.due?,
|
||||
},
|
||||
gallery_scan: {
|
||||
last_at: time_ago_or_never(user.gallery_scan.at),
|
||||
due_for_scan: user.gallery_scan.due?,
|
||||
},
|
||||
favs_scan: {
|
||||
last_at: time_ago_or_never(user.favs_scan.at),
|
||||
due_for_scan: user.favs_scan.due?,
|
||||
},
|
||||
}
|
||||
states = []
|
||||
states << "page" unless user.due_for_page_scan?
|
||||
states << "gallery" unless user.due_for_gallery_scan?
|
||||
states << "seen" if states.empty?
|
||||
|
||||
user_response[:state] = states.join(",")
|
||||
|
||||
if user.scanned_gallery_at && user.scanned_page_at
|
||||
user_response[:terminal_state] = true
|
||||
end
|
||||
else
|
||||
user_response = {
|
||||
state: "not_seen",
|
||||
terminal_state: false,
|
||||
}
|
||||
user_response = { state: "not_seen" }
|
||||
end
|
||||
users_response[url_name] = user_response
|
||||
end
|
||||
|
||||
queue_depths = Hash.new do |hash, key|
|
||||
hash[key] = 0
|
||||
end
|
||||
|
||||
jobs_async.each do |job|
|
||||
queue_depths[job.payload_object.job_data["job_class"]] += 1
|
||||
end
|
||||
|
||||
queue_depths = queue_depths.map do |key, value|
|
||||
[key.
|
||||
delete_prefix("Domain::Fa::Job::").
|
||||
split("::").
|
||||
last.
|
||||
underscore.
|
||||
delete_suffix("_job").
|
||||
gsub("_", " "),
|
||||
value]
|
||||
end.to_h
|
||||
|
||||
render json: {
|
||||
posts: posts_response,
|
||||
users: users_response,
|
||||
queues: {
|
||||
total_depth: queue_depths.values.sum,
|
||||
depths: queue_depths,
|
||||
},
|
||||
}
|
||||
render json: { posts: posts_response, users: users_response }
|
||||
end
|
||||
|
||||
def enqueue_objects
|
||||
@@ -123,13 +99,18 @@ class Domain::Fa::ApiController < ApplicationController
|
||||
url_names = (params[:url_names] || [])
|
||||
url_names_to_enqueue = Set.new(params[:url_names_to_enqueue] || [])
|
||||
|
||||
fa_id_to_post = Domain::Fa::Post.includes(:file).where(fa_id: fa_ids).map do |post|
|
||||
[post.fa_id, post]
|
||||
end.to_h
|
||||
fa_id_to_post =
|
||||
Domain::Fa::Post
|
||||
.includes(:file)
|
||||
.where(fa_id: fa_ids)
|
||||
.map { |post| [post.fa_id, post] }
|
||||
.to_h
|
||||
|
||||
url_name_to_user = Domain::Fa::User.where(url_name: url_names).map do |user|
|
||||
[user.url_name, user]
|
||||
end.to_h
|
||||
url_name_to_user =
|
||||
Domain::Fa::User
|
||||
.where(url_name: url_names)
|
||||
.map { |user| [user.url_name, user] }
|
||||
.to_h
|
||||
|
||||
fa_ids.each do |fa_id|
|
||||
post = fa_id_to_post[fa_id]
|
||||
@@ -144,42 +125,48 @@ class Domain::Fa::ApiController < ApplicationController
|
||||
enqueue_deferred!
|
||||
|
||||
render json: {
|
||||
post_scans: @enqueue_counts[Domain::Fa::Job::ScanPostJob],
|
||||
post_files: @enqueue_counts[Domain::Fa::Job::ScanFileJob],
|
||||
user_pages: @enqueue_counts[Domain::Fa::Job::UserPageJob],
|
||||
user_galleries: @enqueue_counts[Domain::Fa::Job::UserGalleryJob],
|
||||
}
|
||||
post_scans: @enqueue_counts[Domain::Fa::Job::ScanPostJob],
|
||||
post_files: @enqueue_counts[Domain::Fa::Job::ScanFileJob],
|
||||
user_pages: @enqueue_counts[Domain::Fa::Job::UserPageJob],
|
||||
user_galleries: @enqueue_counts[Domain::Fa::Job::UserGalleryJob],
|
||||
}
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def defer_post_scan(post, fa_id)
|
||||
if !post || !post.scanned?
|
||||
defer_manual(Domain::Fa::Job::ScanPostJob, {
|
||||
fa_id: fa_id,
|
||||
}, -17)
|
||||
defer_manual(Domain::Fa::Job::ScanPostJob, { fa_id: fa_id }, -17)
|
||||
end
|
||||
|
||||
if post && post.file_uri && !post.file.present?
|
||||
return defer_manual(Domain::Fa::Job::ScanFileJob, {
|
||||
post: post,
|
||||
}, -15, "static_file")
|
||||
return
|
||||
return(
|
||||
defer_manual(
|
||||
Domain::Fa::Job::ScanFileJob,
|
||||
{ post: post },
|
||||
-15,
|
||||
"static_file",
|
||||
)
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
def defer_user_scan(user, url_name, highpri)
|
||||
if !user || user.due_for_page_scan?
|
||||
defer_manual(Domain::Fa::Job::UserPageJob, {
|
||||
url_name: url_name,
|
||||
}, highpri ? -16 : -6)
|
||||
defer_manual(
|
||||
Domain::Fa::Job::UserPageJob,
|
||||
{ url_name: url_name },
|
||||
highpri ? -16 : -6,
|
||||
)
|
||||
return
|
||||
end
|
||||
|
||||
if !user || user.due_for_gallery_scan?
|
||||
defer_manual(Domain::Fa::Job::UserGalleryJob, {
|
||||
url_name: url_name,
|
||||
}, highpri ? -14 : -4)
|
||||
defer_manual(
|
||||
Domain::Fa::Job::UserGalleryJob,
|
||||
{ url_name: url_name },
|
||||
highpri ? -14 : -4,
|
||||
)
|
||||
return
|
||||
end
|
||||
|
||||
@@ -196,9 +183,11 @@ class Domain::Fa::ApiController < ApplicationController
|
||||
end
|
||||
|
||||
def enqueue_deferred!
|
||||
while job = (@deferred_jobs || []).shift
|
||||
klass, args, priority, queue = job
|
||||
klass.set(priority: priority, queue: queue).perform_later(args)
|
||||
GoodJob::Bulk.enqueue do
|
||||
while job = (@deferred_jobs || []).shift
|
||||
klass, args, priority, queue = job
|
||||
klass.set(priority: priority, queue: queue).perform_later(args)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -209,4 +198,21 @@ class Domain::Fa::ApiController < ApplicationController
|
||||
"never"
|
||||
end
|
||||
end
|
||||
|
||||
API_TOKENS = {
|
||||
"a4eb03ac-b33c-439c-9b51-a834d1c5cf48" => "dymk",
|
||||
"56cc81fe-8c00-4436-8981-4580eab00e66" => "taargus",
|
||||
"9c38727f-f11d-41de-b775-0effd86d520c" => "xjal",
|
||||
"e38c568f-a24d-4f26-87f0-dfcd898a359d" => "fyacin",
|
||||
"41fa1144-d4cd-11ed-afa1-0242ac120002" => "soft_fox_lad",
|
||||
"9b3cf444-5913-4efb-9935-bf26501232ff" => "syfaro",
|
||||
}
|
||||
|
||||
def validate_api_token!
|
||||
api_token = request.params[:api_token]
|
||||
api_user_name = API_TOKENS[api_token]
|
||||
return if api_user_name
|
||||
return if VpnOnlyRouteConstraint.new.matches?(request)
|
||||
render status: 403, json: { error: "not authenticated" }
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,127 +0,0 @@
|
||||
class Domain::Fa::PostsController < ApplicationController
|
||||
before_action :set_domain_fa_post,
|
||||
only: %i[ show scan_post scan_post ]
|
||||
|
||||
skip_before_action :verify_authenticity_token,
|
||||
only: %i[ try_scan_post try_scan_posts ]
|
||||
|
||||
# GET /domain/fa/posts
|
||||
def index
|
||||
@posts = Domain::Fa::Post.
|
||||
includes(:creator, :file).
|
||||
page(params[:page]).
|
||||
per(50).
|
||||
order(fa_id: :desc).
|
||||
without_count
|
||||
end
|
||||
|
||||
# GET /domain/fa/posts/1
|
||||
def show
|
||||
end
|
||||
|
||||
def scan_post
|
||||
if try_enqueue_post_scan(@post, @post.fa_id)
|
||||
redirect_to domain_fa_post_path(@post.fa_id), notice: "Enqueued for scan"
|
||||
else
|
||||
redirect_to domain_fa_post_path(@post.fa_id), notice: "Already scanned"
|
||||
end
|
||||
end
|
||||
|
||||
def try_scan_post
|
||||
fa_id = params[:fa_id]&.to_i || raise("need fa_id parameter")
|
||||
post = Domain::Fa::Post.find_by_fa_id(fa_id)
|
||||
enqueued = try_enqueue_post_scan(post, fa_id)
|
||||
|
||||
if post && post.file.present?
|
||||
state_string = "downloaded #{helpers.time_ago_in_words(post.file.created_at, include_seconds: true)} ago"
|
||||
elsif post && post.scanned?
|
||||
state_string = "scanned #{helpers.time_ago_in_words(post.scanned_at, include_seconds: true)} ago"
|
||||
else
|
||||
state_string = []
|
||||
if !post
|
||||
state_string << "not seen"
|
||||
else
|
||||
state_string << "#{post.state}"
|
||||
end
|
||||
|
||||
if enqueued
|
||||
state_string << "enqueued"
|
||||
end
|
||||
|
||||
state_string = state_string.join(", ")
|
||||
end
|
||||
|
||||
render json: {
|
||||
enqueued: enqueued,
|
||||
title: post&.title,
|
||||
state: state_string,
|
||||
is_terminal_state: post&.scanned? && post&.file&.present? || false,
|
||||
}
|
||||
end
|
||||
|
||||
def try_scan_posts
|
||||
Rails.logger.info "params: #{params.inspect}"
|
||||
fa_ids = params[:fa_ids].map(&:to_i)
|
||||
fa_id_to_post = Domain::Fa::Post.where(fa_id: fa_ids).map do |post|
|
||||
[post.fa_id, post]
|
||||
end.to_h
|
||||
|
||||
response = {}
|
||||
|
||||
fa_ids.each do |fa_id|
|
||||
post = fa_id_to_post[fa_id]
|
||||
if post.nil?
|
||||
state = "not_seen"
|
||||
elsif post.file.present?
|
||||
state = "have_file"
|
||||
elsif post.scanned?
|
||||
state = "scanned"
|
||||
else
|
||||
state = "state_#{post.state}"
|
||||
end
|
||||
|
||||
response[fa_id] = {
|
||||
state: state,
|
||||
enqueued: try_enqueue_post_scan(post, fa_id),
|
||||
}
|
||||
end
|
||||
render json: response
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def try_enqueue_post_scan(post, fa_id)
|
||||
@@already_enqueued_fa_ids ||= Set.new
|
||||
unless @@already_enqueued_fa_ids.add?(fa_id)
|
||||
Rails.logger.info "Already enqueued #{fa_id}, skipping"
|
||||
return false
|
||||
end
|
||||
|
||||
if !post || !post.scanned?
|
||||
Rails.logger.info "Enqueue scan #{fa_id}"
|
||||
Domain::Fa::Job::ScanPostJob.
|
||||
set(priority: -15, queue: "manual").
|
||||
perform_later({
|
||||
fa_id: fa_id,
|
||||
})
|
||||
return true
|
||||
end
|
||||
|
||||
if post && post.file_uri && !post.file.present?
|
||||
Rails.logger.info "Enqueue file #{fa_id}"
|
||||
Domain::Fa::Job::ScanFileJob.
|
||||
set(priority: -15, queue: "manual").
|
||||
perform_later({
|
||||
post: post,
|
||||
})
|
||||
return true
|
||||
end
|
||||
|
||||
false
|
||||
end
|
||||
|
||||
# Use callbacks to share common setup or constraints between actions.
|
||||
def set_domain_fa_post
|
||||
@post = Domain::Fa::Post.find_by_fa_id!(params[:fa_id])
|
||||
end
|
||||
end
|
||||
@@ -1,71 +0,0 @@
|
||||
class Domain::Fa::UsersController < ApplicationController
|
||||
before_action :set_domain_fa_user, only: %i[ show edit update destroy ]
|
||||
|
||||
# GET /domain/fa/users or /domain/fa/users.json
|
||||
def index
|
||||
@domain_fa_users = Domain::Fa::User.page(params[:page])
|
||||
end
|
||||
|
||||
# GET /domain/fa/users/1 or /domain/fa/users/1.json
|
||||
def show
|
||||
end
|
||||
|
||||
# GET /domain/fa/users/new
|
||||
def new
|
||||
@domain_fa_user = Domain::Fa::User.new
|
||||
end
|
||||
|
||||
# GET /domain/fa/users/1/edit
|
||||
def edit
|
||||
end
|
||||
|
||||
# POST /domain/fa/users or /domain/fa/users.json
|
||||
def create
|
||||
@domain_fa_user = Domain::Fa::User.new(domain_fa_user_params)
|
||||
|
||||
respond_to do |format|
|
||||
if @domain_fa_user.save
|
||||
format.html { redirect_to domain_fa_user_url(@domain_fa_user), notice: "User was successfully created." }
|
||||
format.json { render :show, status: :created, location: @domain_fa_user }
|
||||
else
|
||||
format.html { render :new, status: :unprocessable_entity }
|
||||
format.json { render json: @domain_fa_user.errors, status: :unprocessable_entity }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# PATCH/PUT /domain/fa/users/1 or /domain/fa/users/1.json
|
||||
def update
|
||||
respond_to do |format|
|
||||
if @domain_fa_user.update(domain_fa_user_params)
|
||||
format.html { redirect_to domain_fa_user_url(@domain_fa_user), notice: "User was successfully updated." }
|
||||
format.json { render :show, status: :ok, location: @domain_fa_user }
|
||||
else
|
||||
format.html { render :edit, status: :unprocessable_entity }
|
||||
format.json { render json: @domain_fa_user.errors, status: :unprocessable_entity }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# DELETE /domain/fa/users/1 or /domain/fa/users/1.json
|
||||
def destroy
|
||||
@domain_fa_user.destroy
|
||||
|
||||
respond_to do |format|
|
||||
format.html { redirect_to domain_fa_users_url, notice: "User was successfully destroyed." }
|
||||
format.json { head :no_content }
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Use callbacks to share common setup or constraints between actions.
|
||||
def set_domain_fa_user
|
||||
@domain_fa_user = Domain::Fa::User.find(params[:id])
|
||||
end
|
||||
|
||||
# Only allow a list of trusted parameters through.
|
||||
def domain_fa_user_params
|
||||
params.fetch(:domain_fa_user, {})
|
||||
end
|
||||
end
|
||||
26
app/controllers/domain/post_groups_controller.rb
Normal file
@@ -0,0 +1,26 @@
|
||||
# typed: true
|
||||
|
||||
class Domain::PostGroupsController < DomainController
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
|
||||
skip_before_action :authenticate_user!, only: %i[show]
|
||||
before_action :set_post_group!, only: %i[show]
|
||||
|
||||
# GET /pools/:id
|
||||
sig(:final) { void }
|
||||
def show
|
||||
authorize @post_group
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
sig { override.returns(DomainController::DomainParamConfig) }
|
||||
def self.param_config
|
||||
DomainController::DomainParamConfig.new(
|
||||
post_group_id_param: :id,
|
||||
post_id_param: :domain_post_id,
|
||||
user_id_param: :domain_user_id,
|
||||
)
|
||||
end
|
||||
end
|
||||
296
app/controllers/domain/posts_controller.rb
Normal file
@@ -0,0 +1,296 @@
|
||||
# typed: true
|
||||
|
||||
require "open-uri"
|
||||
require "tempfile"
|
||||
require "base64"
|
||||
|
||||
class Domain::PostsController < DomainController
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
|
||||
skip_before_action :authenticate_user!,
|
||||
only: %i[
|
||||
show
|
||||
index
|
||||
user_favorite_posts
|
||||
user_created_posts
|
||||
visual_search
|
||||
visual_results
|
||||
]
|
||||
before_action :set_post!, only: %i[show]
|
||||
before_action :set_user!, only: %i[user_favorite_posts user_created_posts]
|
||||
before_action :set_post_group!, only: %i[posts_in_group]
|
||||
|
||||
class PostsIndexViewConfig < T::ImmutableStruct
|
||||
include T::Struct::ActsAsComparable
|
||||
|
||||
const :show_domain_filters, T::Boolean
|
||||
const :show_creator_links, T::Boolean
|
||||
const :index_type_header, String
|
||||
end
|
||||
|
||||
sig { void }
|
||||
def initialize
|
||||
super
|
||||
@posts_index_view_config =
|
||||
PostsIndexViewConfig.new(
|
||||
show_domain_filters: false,
|
||||
show_creator_links: false,
|
||||
index_type_header: "all_posts",
|
||||
)
|
||||
end
|
||||
|
||||
# GET /posts
|
||||
sig(:final) { void }
|
||||
def index
|
||||
@posts_index_view_config =
|
||||
PostsIndexViewConfig.new(
|
||||
show_domain_filters: true,
|
||||
show_creator_links: true,
|
||||
index_type_header: "all_posts",
|
||||
)
|
||||
authorize Domain::Post
|
||||
@posts = posts_relation(Domain::Post.all).without_count
|
||||
active_sources = (params[:sources] || DomainSourceHelper.all_source_names)
|
||||
unless DomainSourceHelper.has_all_sources?(active_sources)
|
||||
postable_types =
|
||||
DomainSourceHelper.source_names_to_class_names(active_sources)
|
||||
@posts = @posts.where(type: postable_types) if postable_types.any?
|
||||
end
|
||||
end
|
||||
|
||||
# GET /posts/:id
|
||||
sig(:final) { void }
|
||||
def show
|
||||
authorize @post
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def user_favorite_posts
|
||||
@posts_index_view_config =
|
||||
PostsIndexViewConfig.new(
|
||||
show_domain_filters: false,
|
||||
show_creator_links: true,
|
||||
index_type_header: "user_favorites",
|
||||
)
|
||||
|
||||
@user = T.must(@user)
|
||||
authorize @user
|
||||
@posts = posts_relation(@user.faved_posts)
|
||||
authorize @posts
|
||||
render :index
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def user_created_posts
|
||||
@posts_index_view_config =
|
||||
PostsIndexViewConfig.new(
|
||||
show_domain_filters: false,
|
||||
show_creator_links: true,
|
||||
index_type_header: "user_created",
|
||||
)
|
||||
|
||||
@user = T.must(@user)
|
||||
authorize @user
|
||||
@posts = posts_relation(@user.posts)
|
||||
authorize @posts
|
||||
render :index
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def posts_in_group
|
||||
@posts_index_view_config =
|
||||
PostsIndexViewConfig.new(
|
||||
show_domain_filters: false,
|
||||
show_creator_links: true,
|
||||
index_type_header: "posts_in_group",
|
||||
)
|
||||
|
||||
authorize @post_group
|
||||
@posts = posts_relation(T.must(@post_group).posts)
|
||||
render :index
|
||||
end
|
||||
|
||||
# GET /posts/visual_search
|
||||
sig(:final) { void }
|
||||
def visual_search
|
||||
authorize Domain::Post
|
||||
end
|
||||
|
||||
sig { params(content_type: T.nilable(String)).returns(T::Boolean) }
|
||||
def check_content_type!(content_type)
|
||||
return false unless content_type
|
||||
|
||||
ret =
|
||||
Domain::PostFile::Thumbnail::THUMBABLE_CONTENT_TYPES.any? do |type|
|
||||
content_type.match?(type)
|
||||
end
|
||||
|
||||
unless ret
|
||||
flash.now[:error] = "The uploaded file is not a valid image format."
|
||||
render :visual_search
|
||||
end
|
||||
|
||||
ret
|
||||
end
|
||||
|
||||
# POST /posts/visual_search
|
||||
sig(:final) { void }
|
||||
def visual_results
|
||||
authorize Domain::Post
|
||||
|
||||
# Process the uploaded image or URL
|
||||
image_result = process_image_input
|
||||
return unless image_result
|
||||
|
||||
image_path, content_type = image_result
|
||||
|
||||
# Create thumbnail for the view if possible
|
||||
@uploaded_image_data_uri = create_thumbnail(image_path, content_type)
|
||||
@uploaded_hash_value = generate_fingerprint(image_path)
|
||||
@uploaded_detail_hash_value = generate_detail_fingerprint(image_path)
|
||||
@post_file_fingerprints =
|
||||
find_similar_fingerprints(@uploaded_hash_value).to_a
|
||||
@post_file_fingerprints.sort! do |a, b|
|
||||
helpers.calculate_similarity_percentage(
|
||||
b.fingerprint_detail_value,
|
||||
@uploaded_detail_hash_value,
|
||||
) <=>
|
||||
helpers.calculate_similarity_percentage(
|
||||
a.fingerprint_detail_value,
|
||||
@uploaded_detail_hash_value,
|
||||
)
|
||||
end
|
||||
@post_file_fingerprints = @post_file_fingerprints.take(10)
|
||||
@posts = @post_file_fingerprints.map(&:post_file).compact.map(&:post)
|
||||
ensure
|
||||
# Clean up any temporary files
|
||||
if @temp_file
|
||||
@temp_file.unlink
|
||||
@temp_file = nil
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Process the uploaded file or URL and return [image_path, content_type] or nil on failure
|
||||
sig { returns(T.nilable([String, String])) }
|
||||
def process_image_input
|
||||
if params[:image_file].present?
|
||||
process_uploaded_file
|
||||
elsif params[:image_url].present?
|
||||
process_image_url
|
||||
else
|
||||
flash.now[:error] = "Please upload an image or provide an image URL."
|
||||
render :visual_search
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
# Process an uploaded file and return [image_path, content_type] or nil on failure
|
||||
sig { returns(T.nilable([String, String])) }
|
||||
def process_uploaded_file
|
||||
image_file = params[:image_file]
|
||||
content_type = T.must(image_file.content_type)
|
||||
|
||||
return nil unless check_content_type!(content_type)
|
||||
|
||||
image_path = T.must(image_file.tempfile.path)
|
||||
[image_path, content_type]
|
||||
end
|
||||
|
||||
# Process an image URL and return [image_path, content_type] or nil on failure
|
||||
sig { returns(T.nilable([String, String])) }
|
||||
def process_image_url
|
||||
# Download the image to a temporary file
|
||||
image_url = params[:image_url]
|
||||
image_io = URI.open(image_url)
|
||||
|
||||
if image_io.nil?
|
||||
flash.now[:error] = "The URL does not point to a valid image format."
|
||||
render :visual_search
|
||||
return nil
|
||||
end
|
||||
|
||||
content_type = T.must(T.unsafe(image_io).content_type)
|
||||
return nil unless check_content_type!(content_type)
|
||||
|
||||
# Save to temp file
|
||||
extension = helpers.extension_for_content_type(content_type) || "jpg"
|
||||
@temp_file = Tempfile.new(["image", ".#{extension}"])
|
||||
@temp_file.binmode
|
||||
image_data = image_io.read
|
||||
@temp_file.write(image_data)
|
||||
@temp_file.close
|
||||
|
||||
image_path = T.must(@temp_file.path)
|
||||
[image_path, content_type]
|
||||
rescue StandardError => e
|
||||
Rails.logger.error("Error processing image URL: #{e.message}")
|
||||
flash.now[:error] = "Error downloading search image"
|
||||
render :visual_search
|
||||
nil
|
||||
end
|
||||
|
||||
# Create a thumbnail from the image and return the data URI
|
||||
sig do
|
||||
params(image_path: String, content_type: String).returns(T.nilable(String))
|
||||
end
|
||||
def create_thumbnail(image_path, content_type)
|
||||
helpers.create_image_thumbnail_data_uri(image_path, content_type)
|
||||
end
|
||||
|
||||
# Generate a fingerprint from the image path
|
||||
sig { params(image_path: String).returns(String) }
|
||||
def generate_fingerprint(image_path)
|
||||
# Use the new from_file_path method to create a fingerprint
|
||||
Domain::PostFile::BitFingerprint.from_file_path(image_path)
|
||||
end
|
||||
|
||||
# Generate a detail fingerprint from the image path
|
||||
sig { params(image_path: String).returns(String) }
|
||||
def generate_detail_fingerprint(image_path)
|
||||
Domain::PostFile::BitFingerprint.detail_from_file_path(image_path)
|
||||
end
|
||||
|
||||
# Find similar images based on the fingerprint
|
||||
sig { params(fingerprint_value: String).returns(ActiveRecord::Relation) }
|
||||
def find_similar_fingerprints(fingerprint_value)
|
||||
# Use the model's similar_to_fingerprint method directly
|
||||
|
||||
subquery = <<~SQL
|
||||
(
|
||||
select distinct on (post_file_id) *, (fingerprint_value <~> '#{ActiveRecord::Base.connection.quote_string(fingerprint_value)}') as distance
|
||||
from #{Domain::PostFile::BitFingerprint.table_name}
|
||||
order by post_file_id, distance asc
|
||||
) subquery
|
||||
SQL
|
||||
|
||||
Domain::PostFile::BitFingerprint
|
||||
.select("*")
|
||||
.from(subquery)
|
||||
.order("distance ASC")
|
||||
.limit(32)
|
||||
end
|
||||
|
||||
sig { override.returns(DomainController::DomainParamConfig) }
|
||||
def self.param_config
|
||||
DomainController::DomainParamConfig.new(
|
||||
post_id_param: :id,
|
||||
user_id_param: :domain_user_id,
|
||||
post_group_id_param: :domain_post_group_id,
|
||||
)
|
||||
end
|
||||
|
||||
sig(:final) do
|
||||
params(starting_relation: ActiveRecord::Relation).returns(
|
||||
T.all(ActiveRecord::Relation, Kaminari::ActiveRecordRelationMethods),
|
||||
)
|
||||
end
|
||||
def posts_relation(starting_relation)
|
||||
relation = starting_relation
|
||||
relation = T.unsafe(policy_scope(relation)).page(params[:page]).per(50)
|
||||
relation = relation.order(relation.klass.post_order_attribute => :desc)
|
||||
relation
|
||||
end
|
||||
end
|
||||
@@ -1,14 +1,12 @@
|
||||
# typed: true
|
||||
class Domain::Twitter::ApiController < ApplicationController
|
||||
skip_before_action :verify_authenticity_token,
|
||||
only: %i[ enqueue_objects ]
|
||||
skip_before_action :verify_authenticity_token, only: %i[enqueue_objects]
|
||||
|
||||
def enqueue_objects
|
||||
@enqueue_counts ||= Hash.new { |h, k| h[k] = 0 }
|
||||
|
||||
names = (params[:names] || [])
|
||||
names.each do |name|
|
||||
defer_user_timeline_scan(name, true)
|
||||
end
|
||||
names.each { |name| defer_user_timeline_scan(name, true) }
|
||||
enqueue_deferred!
|
||||
render json: @enqueue_counts.to_json
|
||||
end
|
||||
@@ -16,9 +14,11 @@ class Domain::Twitter::ApiController < ApplicationController
|
||||
private
|
||||
|
||||
def defer_user_timeline_scan(name, highpri)
|
||||
defer_manual(Domain::Twitter::Job::UserTimelineTweetsJob, {
|
||||
name: name,
|
||||
}, highpri ? -16 : -6)
|
||||
defer_manual(
|
||||
Domain::Twitter::Job::UserTimelineTweetsJob,
|
||||
{ name: name },
|
||||
highpri ? -16 : -6,
|
||||
)
|
||||
end
|
||||
|
||||
def defer_manual(klass, args, priority, queue = nil)
|
||||
@@ -31,9 +31,11 @@ class Domain::Twitter::ApiController < ApplicationController
|
||||
end
|
||||
|
||||
def enqueue_deferred!
|
||||
while job = (@deferred_jobs || []).shift
|
||||
klass, args, priority, queue = job
|
||||
klass.set(priority: priority, queue: queue).perform_later(args)
|
||||
GoodJob::Bulk.enqueue do
|
||||
while job = (@deferred_jobs || []).shift
|
||||
klass, args, priority, queue = job
|
||||
klass.set(priority: priority, queue: queue).perform_later(args)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
253
app/controllers/domain/users_controller.rb
Normal file
@@ -0,0 +1,253 @@
|
||||
# typed: true
|
||||
class Domain::UsersController < DomainController
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
|
||||
before_action :set_user!, only: %i[show followed_by following]
|
||||
before_action :set_post!, only: %i[users_faving_post]
|
||||
skip_before_action :authenticate_user!,
|
||||
only: %i[
|
||||
show
|
||||
search_by_name
|
||||
users_faving_post
|
||||
similar_users
|
||||
]
|
||||
|
||||
# GET /users
|
||||
sig(:final) { void }
|
||||
def index
|
||||
authorize Domain::User
|
||||
@users = policy_scope(Domain::User).order(created_at: :desc)
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def followed_by
|
||||
@user = T.must(@user)
|
||||
authorize @user
|
||||
@users =
|
||||
@user
|
||||
.followed_by_users
|
||||
.includes(avatar: :log_entry)
|
||||
.page(params[:page])
|
||||
.per(50)
|
||||
@index_type = :followed_by
|
||||
render :index
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def following
|
||||
@user = T.must(@user)
|
||||
authorize @user
|
||||
@users =
|
||||
@user
|
||||
.followed_users
|
||||
.includes(avatar: :log_entry)
|
||||
.page(params[:page])
|
||||
.per(50)
|
||||
@index_type = :following
|
||||
render :index
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def users_faving_post
|
||||
@post = T.must(@post)
|
||||
authorize @post
|
||||
@users =
|
||||
T
|
||||
.unsafe(@post)
|
||||
.faving_users
|
||||
.includes(avatar: :log_entry)
|
||||
.page(params[:page])
|
||||
.per(50)
|
||||
|
||||
@index_type = :users_faving_post
|
||||
render :index
|
||||
end
|
||||
|
||||
# GET /users/:id
|
||||
sig(:final) { void }
|
||||
def show
|
||||
authorize @user
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def search_by_name
|
||||
authorize Domain::User
|
||||
name = params[:name]&.downcase
|
||||
name = ReduxApplicationRecord.sanitize_sql_like(name)
|
||||
@user_search_names =
|
||||
Domain::UserSearchName
|
||||
.select("domain_user_search_names.*, domain_users.*")
|
||||
.select("levenshtein(name, '#{name}') as distance")
|
||||
.select(
|
||||
"(SELECT COUNT(*) FROM domain_user_post_creations dupc WHERE dupc.user_id = domain_users.id) as num_posts",
|
||||
)
|
||||
.joins(:user)
|
||||
.where(
|
||||
"(name ilike ?) OR (similarity(dmetaphone(name), dmetaphone(?)) > 0.8)",
|
||||
"%#{name}%",
|
||||
name,
|
||||
)
|
||||
.where(
|
||||
"NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM domain_user_search_names dns2
|
||||
WHERE dns2.user_id = domain_user_search_names.user_id
|
||||
AND levenshtein(dns2.name, ?) < levenshtein(domain_user_search_names.name, ?)
|
||||
)",
|
||||
name,
|
||||
name,
|
||||
)
|
||||
.order("distance ASC")
|
||||
.limit(10)
|
||||
end
|
||||
|
||||
sig { void }
|
||||
def similar_users
|
||||
url_name = params[:url_name]
|
||||
exclude_url_name = params[:exclude_url_name]
|
||||
|
||||
user = Domain::User::FaUser.find_by(url_name: url_name)
|
||||
if user.nil?
|
||||
render status: 404,
|
||||
json: {
|
||||
error: "user '#{url_name}' not found",
|
||||
error_type: "user_not_found",
|
||||
}
|
||||
return
|
||||
end
|
||||
|
||||
all_similar_users =
|
||||
users_similar_to_by_followers(user, limit: 10).map do |u|
|
||||
user_to_similarity_entry(u)
|
||||
end
|
||||
|
||||
if all_similar_users.nil?
|
||||
render status: 500,
|
||||
json: {
|
||||
error:
|
||||
"user '#{url_name}' has not had recommendations computed yet",
|
||||
error_type: "recs_not_computed",
|
||||
}
|
||||
return
|
||||
end
|
||||
|
||||
not_followed_similar_users = nil
|
||||
if exclude_url_name
|
||||
exclude_followed_by =
|
||||
Domain::User::FaUser.find_by(url_name: exclude_url_name)
|
||||
if exclude_followed_by.nil?
|
||||
render status: 500,
|
||||
json: {
|
||||
error: "user '#{exclude_url_name}' not found",
|
||||
error_type: "exclude_user_not_found",
|
||||
}
|
||||
return
|
||||
elsif exclude_followed_by.scanned_follows_at.nil?
|
||||
render status: 500,
|
||||
json: {
|
||||
error:
|
||||
"user '#{exclude_url_name}' followers list hasn't been scanned",
|
||||
error_type: "exclude_user_not_scanned",
|
||||
}
|
||||
return
|
||||
else
|
||||
not_followed_similar_users =
|
||||
users_similar_to_by_followers(
|
||||
user,
|
||||
limit: 10,
|
||||
exclude_followed_by: exclude_followed_by,
|
||||
).map { |u| user_to_similarity_entry(u) }
|
||||
end
|
||||
end
|
||||
|
||||
render json: {
|
||||
all: all_similar_users,
|
||||
not_followed: not_followed_similar_users,
|
||||
}
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
sig { override.returns(DomainController::DomainParamConfig) }
|
||||
def self.param_config
|
||||
DomainController::DomainParamConfig.new(
|
||||
user_id_param: :id,
|
||||
post_id_param: :domain_post_id,
|
||||
post_group_id_param: :domain_post_group_id,
|
||||
)
|
||||
end
|
||||
|
||||
sig { params(user: Domain::User::FaUser).returns(T::Hash[Symbol, T.untyped]) }
|
||||
def user_to_similarity_entry(user)
|
||||
profile_thumb_url = user.avatar&.log_entry&.uri_str
|
||||
profile_thumb_url ||=
|
||||
begin
|
||||
pp_log_entry = get_best_user_page_http_log_entry_for(user)
|
||||
if pp_log_entry && (response_bytes = pp_log_entry.response_bytes)
|
||||
parser =
|
||||
Domain::Fa::Parser::Page.new(
|
||||
response_bytes,
|
||||
require_logged_in: false,
|
||||
)
|
||||
parser.user_page.profile_thumb_url
|
||||
end
|
||||
rescue StandardError
|
||||
logger.error("error getting profile_thumb_url: #{$!.message}")
|
||||
end || "https://a.furaffinity.net/0/#{user.url_name}.gif"
|
||||
|
||||
{
|
||||
name: user.name,
|
||||
url_name: user.url_name,
|
||||
profile_thumb_url: profile_thumb_url,
|
||||
external_url: "https://www.furaffinity.net/user/#{user.url_name}/",
|
||||
refurrer_url: request.base_url + helpers.domain_user_path(user),
|
||||
}
|
||||
end
|
||||
|
||||
sig { params(user: Domain::User::FaUser).returns(T.nilable(HttpLogEntry)) }
|
||||
def get_best_user_page_http_log_entry_for(user)
|
||||
for_path =
|
||||
proc do |uri_path|
|
||||
HttpLogEntry
|
||||
.where(
|
||||
uri_scheme: "https",
|
||||
uri_host: "www.furaffinity.net",
|
||||
uri_path: uri_path,
|
||||
)
|
||||
.order(created_at: :desc)
|
||||
.first
|
||||
end
|
||||
|
||||
# older versions don't end in a trailing slash
|
||||
user.last_user_page_log_entry || for_path.call("/user/#{user.url_name}/") ||
|
||||
for_path.call("/user/#{user.url_name}")
|
||||
end
|
||||
|
||||
sig do
|
||||
params(
|
||||
user: Domain::User::FaUser,
|
||||
limit: Integer,
|
||||
exclude_followed_by: T.nilable(Domain::User::FaUser),
|
||||
).returns(T::Array[Domain::User::FaUser])
|
||||
end
|
||||
def users_similar_to_by_followers(user, limit: 10, exclude_followed_by: nil)
|
||||
factors = Domain::Factors::UserUserFollowToFactors.find_by(user: user)
|
||||
return [] if factors.nil?
|
||||
|
||||
relation =
|
||||
Domain::NeighborFinder
|
||||
.find_neighbors(factors)
|
||||
.limit(limit)
|
||||
.includes(:user)
|
||||
|
||||
if exclude_followed_by
|
||||
relation =
|
||||
relation.where.not(
|
||||
user_id: exclude_followed_by.followed_users.select(:to_id),
|
||||
)
|
||||
end
|
||||
|
||||
relation.map(&:user)
|
||||
end
|
||||
end
|
||||
73
app/controllers/domain_controller.rb
Normal file
@@ -0,0 +1,73 @@
|
||||
# typed: strict
|
||||
class DomainController < ApplicationController
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
abstract!
|
||||
|
||||
class DomainParamConfig < T::ImmutableStruct
|
||||
include T::Struct::ActsAsComparable
|
||||
|
||||
const :post_id_param, Symbol
|
||||
const :user_id_param, Symbol
|
||||
const :post_group_id_param, Symbol
|
||||
end
|
||||
|
||||
sig { void }
|
||||
def initialize
|
||||
super
|
||||
@post = T.let(nil, T.nilable(Domain::Post))
|
||||
@user = T.let(nil, T.nilable(Domain::User))
|
||||
@post_group = T.let(nil, T.nilable(Domain::PostGroup))
|
||||
end
|
||||
|
||||
protected
|
||||
|
||||
sig { abstract.returns(DomainParamConfig) }
|
||||
def self.param_config
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def set_post!
|
||||
@post =
|
||||
self.class.find_model_from_param(
|
||||
Domain::Post,
|
||||
params[self.class.param_config.post_id_param],
|
||||
) || raise(ActiveRecord::RecordNotFound)
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def set_user!
|
||||
@user =
|
||||
self.class.find_model_from_param(
|
||||
Domain::User,
|
||||
params[self.class.param_config.user_id_param],
|
||||
) || raise(ActiveRecord::RecordNotFound)
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def set_post_group!
|
||||
@post_group =
|
||||
self.class.find_model_from_param(
|
||||
Domain::PostGroup,
|
||||
params[self.class.param_config.post_group_id_param],
|
||||
) || raise(ActiveRecord::RecordNotFound)
|
||||
end
|
||||
|
||||
public
|
||||
|
||||
sig(:final) do
|
||||
type_parameters(:Klass)
|
||||
.params(
|
||||
klass:
|
||||
T.all(
|
||||
T.class_of(ReduxApplicationRecord),
|
||||
HasCompositeToParam::ClassMethods[T.type_parameter(:Klass)],
|
||||
),
|
||||
param: T.nilable(String),
|
||||
)
|
||||
.returns(T.nilable(T.type_parameter(:Klass)))
|
||||
end
|
||||
def self.find_model_from_param(klass, param)
|
||||
klass.find_by_param(param)
|
||||
end
|
||||
end
|
||||
204
app/controllers/global_states_controller.rb
Normal file
@@ -0,0 +1,204 @@
|
||||
# typed: false
|
||||
class GlobalStatesController < ApplicationController
|
||||
before_action :set_global_state, only: %i[edit update destroy]
|
||||
after_action :verify_authorized
|
||||
|
||||
FA_COOKIE_KEYS = %w[
|
||||
furaffinity-cookie-a
|
||||
furaffinity-cookie-b
|
||||
furaffinity-cookie-oaid
|
||||
].freeze
|
||||
|
||||
IB_COOKIE_KEYS = %w[inkbunny-username inkbunny-password inkbunny-sid].freeze
|
||||
|
||||
def index
|
||||
authorize GlobalState
|
||||
@global_states = policy_scope(GlobalState).order(:key)
|
||||
end
|
||||
|
||||
def new
|
||||
@global_state = GlobalState.new
|
||||
authorize @global_state
|
||||
end
|
||||
|
||||
def create
|
||||
@global_state = GlobalState.new(global_state_params)
|
||||
authorize @global_state
|
||||
if @global_state.save
|
||||
redirect_to global_states_path,
|
||||
notice: "Global state was successfully created."
|
||||
else
|
||||
render :new, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
||||
def edit
|
||||
authorize @global_state
|
||||
end
|
||||
|
||||
def update
|
||||
authorize @global_state
|
||||
if @global_state.update(global_state_params)
|
||||
redirect_to global_states_path,
|
||||
notice: "Global state was successfully updated."
|
||||
else
|
||||
render :edit, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
||||
def destroy
|
||||
authorize @global_state
|
||||
@global_state.destroy
|
||||
redirect_to global_states_path,
|
||||
notice: "Global state was successfully deleted."
|
||||
end
|
||||
|
||||
def fa_cookies
|
||||
authorize GlobalState
|
||||
@fa_cookies =
|
||||
FA_COOKIE_KEYS.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
end
|
||||
|
||||
def edit_fa_cookies
|
||||
authorize GlobalState
|
||||
@fa_cookies =
|
||||
FA_COOKIE_KEYS.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
end
|
||||
|
||||
def update_fa_cookies
|
||||
authorize GlobalState
|
||||
|
||||
begin
|
||||
ActiveRecord::Base.transaction do
|
||||
fa_cookies_params.each do |key, value|
|
||||
state = GlobalState.find_or_initialize_by(key: key)
|
||||
state.value = value
|
||||
state.value_type = :string
|
||||
state.save!
|
||||
end
|
||||
end
|
||||
|
||||
redirect_to fa_cookies_global_states_path,
|
||||
notice: "FA cookies were successfully updated."
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
@fa_cookies =
|
||||
FA_COOKIE_KEYS.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
flash.now[:alert] = "Error updating FA cookies: #{e.message}"
|
||||
render :edit_fa_cookies, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
||||
def ib_cookies
|
||||
authorize GlobalState
|
||||
@ib_cookies =
|
||||
IB_COOKIE_KEYS.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
end
|
||||
|
||||
def edit_ib_cookies
|
||||
authorize GlobalState
|
||||
@ib_cookies =
|
||||
IB_COOKIE_KEYS
|
||||
.reject { |key| key == "inkbunny-sid" }
|
||||
.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
@ib_sid = GlobalState.find_by(key: "inkbunny-sid")
|
||||
end
|
||||
|
||||
def update_ib_cookies
|
||||
authorize GlobalState
|
||||
|
||||
begin
|
||||
params_hash = params.require(:ib_cookies).permit(*IB_COOKIE_KEYS).to_h
|
||||
has_credentials =
|
||||
params_hash["inkbunny-username"].present? ||
|
||||
params_hash["inkbunny-password"].present?
|
||||
has_sid = params_hash["inkbunny-sid"].present?
|
||||
|
||||
if has_credentials && has_sid
|
||||
raise ArgumentError,
|
||||
"Cannot set both credentials and session ID at the same time"
|
||||
end
|
||||
|
||||
if !has_credentials && !has_sid
|
||||
raise ArgumentError, "Must set either credentials or session ID"
|
||||
end
|
||||
|
||||
ActiveRecord::Base.transaction do
|
||||
if has_credentials
|
||||
# Update username and password
|
||||
%w[inkbunny-username inkbunny-password].each do |key|
|
||||
state = GlobalState.find_or_initialize_by(key: key)
|
||||
state.value = params_hash[key]
|
||||
state.value_type = :string
|
||||
state.save!
|
||||
end
|
||||
else
|
||||
# Update SID
|
||||
state = GlobalState.find_or_initialize_by(key: "inkbunny-sid")
|
||||
state.value = params_hash["inkbunny-sid"]
|
||||
state.value_type = :string
|
||||
state.save!
|
||||
end
|
||||
end
|
||||
|
||||
redirect_to ib_cookies_global_states_path,
|
||||
notice: "Inkbunny credentials were successfully updated."
|
||||
rescue ArgumentError => e
|
||||
@ib_cookies =
|
||||
IB_COOKIE_KEYS
|
||||
.reject { |key| key == "inkbunny-sid" }
|
||||
.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
@ib_sid = GlobalState.find_by(key: "inkbunny-sid")
|
||||
flash.now[:alert] = "Error updating Inkbunny credentials: #{e.message}"
|
||||
render :edit_ib_cookies, status: :unprocessable_entity
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
@ib_cookies =
|
||||
IB_COOKIE_KEYS
|
||||
.reject { |key| key == "inkbunny-sid" }
|
||||
.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
@ib_sid = GlobalState.find_by(key: "inkbunny-sid")
|
||||
flash.now[:alert] = "Error updating Inkbunny credentials: #{e.message}"
|
||||
render :edit_ib_cookies, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def set_global_state
|
||||
@global_state = GlobalState.find(params[:id])
|
||||
end
|
||||
|
||||
def global_state_params
|
||||
params.require(:global_state).permit(:key, :value, :value_type)
|
||||
end
|
||||
|
||||
def fa_cookies_params
|
||||
params.require(:fa_cookies).permit(*FA_COOKIE_KEYS)
|
||||
end
|
||||
|
||||
def ib_cookies_params
|
||||
params.require(:ib_cookies).permit(
|
||||
*IB_COOKIE_KEYS.reject { |key| key == "inkbunny-sid" },
|
||||
)
|
||||
end
|
||||
end
|
||||
@@ -1,3 +1,4 @@
|
||||
# typed: false
|
||||
class LogEntriesController < ApplicationController
|
||||
def index
|
||||
@uri_filter = Addressable::URI.parse(params[:filter]) if params[:filter]
|
||||
@@ -10,9 +11,11 @@ class LogEntriesController < ApplicationController
|
||||
|
||||
if @uri_filter.path.present?
|
||||
if @uri_filter.query.present?
|
||||
query = query.
|
||||
where("uri_path = ?", @uri_filter.path).
|
||||
where("uri_query like ?", @uri_filter.query + "%")
|
||||
query =
|
||||
query.where("uri_path = ?", @uri_filter.path).where(
|
||||
"uri_query like ?",
|
||||
@uri_filter.query + "%",
|
||||
)
|
||||
else
|
||||
query = query.where("uri_path like ?", @uri_filter.path + "%")
|
||||
end
|
||||
@@ -21,12 +24,14 @@ class LogEntriesController < ApplicationController
|
||||
query = HttpLogEntry
|
||||
end
|
||||
|
||||
@log_entries = query.
|
||||
page(params[:page]).
|
||||
per(50).
|
||||
includes(:response).
|
||||
order(id: :desc).
|
||||
without_count
|
||||
@log_entries =
|
||||
query
|
||||
.page(params[:page])
|
||||
.per(50)
|
||||
.joins(:response)
|
||||
.includes(:response)
|
||||
.order(id: :desc)
|
||||
.without_count
|
||||
|
||||
formats.clear
|
||||
formats << :html
|
||||
@@ -38,74 +43,40 @@ class LogEntriesController < ApplicationController
|
||||
@last_window_count = 0
|
||||
@last_window_bytes = 0
|
||||
@last_window_bytes_stored = 0
|
||||
@content_type_counts = Hash.new do |hash, key|
|
||||
hash[key] = {
|
||||
count: 0,
|
||||
bytes: 0,
|
||||
bytes_stored: 0,
|
||||
}
|
||||
end
|
||||
@content_type_counts =
|
||||
Hash.new do |hash, key|
|
||||
hash[key] = { count: 0, bytes: 0, bytes_stored: 0 }
|
||||
end
|
||||
|
||||
@by_domain_counts = Hash.new do |hash, key|
|
||||
hash[key] = {
|
||||
count: 0,
|
||||
bytes: 0,
|
||||
bytes_stored: 0,
|
||||
}
|
||||
end
|
||||
@by_domain_counts =
|
||||
Hash.new do |hash, key|
|
||||
hash[key] = { count: 0, bytes: 0, bytes_stored: 0 }
|
||||
end
|
||||
|
||||
HttpLogEntry.includes(:response).find_each(batch_size: 100, order: :desc) do |log_entry|
|
||||
break if log_entry.created_at < @time_window.ago
|
||||
@last_window_count += 1
|
||||
@last_window_bytes += log_entry.response.size
|
||||
@last_window_bytes_stored += log_entry.response.bytes_stored
|
||||
content_type = log_entry.content_type.split(";").first
|
||||
HttpLogEntry
|
||||
.joins(:response)
|
||||
.includes(:response)
|
||||
.select("http_log_entries.*, blob_files.size_bytes")
|
||||
.find_each(batch_size: 100, order: :desc) do |log_entry|
|
||||
break if log_entry.created_at < @time_window.ago
|
||||
@last_window_count += 1
|
||||
@last_window_bytes += log_entry.response_size
|
||||
content_type = log_entry.content_type.split(";").first
|
||||
|
||||
@content_type_counts[content_type][:count] += 1
|
||||
@content_type_counts[content_type][:bytes] += log_entry.response.size
|
||||
@content_type_counts[content_type][:bytes_stored] += log_entry.response.bytes_stored
|
||||
@content_type_counts[content_type][:count] += 1
|
||||
@content_type_counts[content_type][:bytes] += log_entry.response_size
|
||||
|
||||
@by_domain_counts[log_entry.uri_host][:count] += 1
|
||||
@by_domain_counts[log_entry.uri_host][:bytes] += log_entry.response.size
|
||||
@by_domain_counts[log_entry.uri_host][:bytes_stored] += log_entry.response.bytes_stored
|
||||
end
|
||||
@by_domain_counts[log_entry.uri_host][:count] += 1
|
||||
@by_domain_counts[log_entry.uri_host][:bytes] += log_entry.response_size
|
||||
end
|
||||
end
|
||||
|
||||
def show
|
||||
@log_entry = HttpLogEntry.includes(
|
||||
:caused_by_entry,
|
||||
:triggered_entries,
|
||||
response: :base,
|
||||
).find(params[:id])
|
||||
end
|
||||
|
||||
def contents
|
||||
expires_dur = 1.year
|
||||
response.headers["Expires"] = expires_dur.from_now.httpdate
|
||||
expires_in expires_dur, public: true
|
||||
|
||||
log_entry = HttpLogEntry.find(params[:id])
|
||||
hex_sha256 = HexUtil.bin2hex(log_entry.response_sha256)
|
||||
return unless stale?(last_modified: Time.at(0), strong_etag: hex_sha256)
|
||||
|
||||
# images, videos, etc
|
||||
entry_response = log_entry.response
|
||||
if helpers.is_send_data_content_type?(entry_response.content_type)
|
||||
send_data(
|
||||
entry_response.contents,
|
||||
type: entry_response.content_type,
|
||||
disposition: "inline",
|
||||
filename: log_entry.uri.path,
|
||||
)
|
||||
elsif entry_response.content_type =~ /text\/plain/
|
||||
render plain: entry_response.contents
|
||||
elsif entry_response.content_type.starts_with? "text/html"
|
||||
render html: entry_response.contents.html_safe
|
||||
elsif entry_response.content_type.starts_with? "application/json"
|
||||
pretty_json = JSON.pretty_generate(JSON.parse entry_response.contents)
|
||||
render html: "<html><body><pre>#{pretty_json}</pre></body></html>".html_safe
|
||||
else
|
||||
render plain: "no renderer for #{entry_response.content_type}"
|
||||
end
|
||||
@log_entry =
|
||||
HttpLogEntry.includes(
|
||||
:caused_by_entry,
|
||||
:triggered_entries,
|
||||
:response,
|
||||
).find(params[:id])
|
||||
end
|
||||
end
|
||||
|
||||
12
app/controllers/pages_controller.rb
Normal file
@@ -0,0 +1,12 @@
|
||||
# typed: true
|
||||
class PagesController < ApplicationController
|
||||
skip_before_action :authenticate_user!, only: %i[root furecs_user_script]
|
||||
|
||||
def root
|
||||
render :root
|
||||
end
|
||||
|
||||
def furecs_user_script
|
||||
render :furecs_user_script
|
||||
end
|
||||
end
|
||||
85
app/controllers/state/ip_address_roles_controller.rb
Normal file
@@ -0,0 +1,85 @@
|
||||
# typed: true
|
||||
class State::IpAddressRolesController < ApplicationController
|
||||
before_action :set_ip_address_role, only: %i[edit update destroy toggle]
|
||||
before_action :authorize_ip_address_roles
|
||||
|
||||
# GET /state/ip_address_roles
|
||||
def index
|
||||
@ip_address_roles = IpAddressRole.all.order(created_at: :desc)
|
||||
end
|
||||
|
||||
# GET /state/ip_address_roles/new
|
||||
def new
|
||||
@ip_address_role = IpAddressRole.new
|
||||
end
|
||||
|
||||
# GET /state/ip_address_roles/1/edit
|
||||
def edit
|
||||
end
|
||||
|
||||
# POST /state/ip_address_roles
|
||||
def create
|
||||
@ip_address_role = IpAddressRole.new(ip_address_role_params)
|
||||
|
||||
if @ip_address_role.save
|
||||
redirect_to state_ip_address_roles_path,
|
||||
notice: "IP address role was successfully created."
|
||||
else
|
||||
render :new
|
||||
end
|
||||
end
|
||||
|
||||
# PATCH/PUT /state/ip_address_roles/1
|
||||
def update
|
||||
if @ip_address_role.update(ip_address_role_params)
|
||||
redirect_to state_ip_address_roles_path,
|
||||
notice: "IP address role was successfully updated."
|
||||
else
|
||||
render :edit
|
||||
end
|
||||
end
|
||||
|
||||
# DELETE /state/ip_address_roles/1
|
||||
def destroy
|
||||
@ip_address_role.destroy
|
||||
redirect_to state_ip_address_roles_path,
|
||||
notice: "IP address role was successfully deleted."
|
||||
end
|
||||
|
||||
def toggle
|
||||
@ip_address_role.update!(active: !@ip_address_role.active)
|
||||
redirect_to state_ip_address_roles_path
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
redirect_to state_ip_address_roles_path,
|
||||
alert: "Failed to update status: #{e.message}"
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Use callbacks to share common setup or constraints between actions
|
||||
def set_ip_address_role
|
||||
@ip_address_role = IpAddressRole.find(params[:id])
|
||||
end
|
||||
|
||||
# Only allow a list of trusted parameters through
|
||||
def ip_address_role_params
|
||||
params.require(:ip_address_role).permit(
|
||||
:ip_address,
|
||||
:role,
|
||||
:description,
|
||||
:active,
|
||||
)
|
||||
end
|
||||
|
||||
# Authorize all actions based on the current action
|
||||
def authorize_ip_address_roles
|
||||
case action_name.to_sym
|
||||
when :index, :new, :edit
|
||||
authorize IpAddressRole, policy_class: State::IpAddressRolePolicy
|
||||
when :create
|
||||
authorize IpAddressRole, policy_class: State::IpAddressRolePolicy
|
||||
when :update, :destroy, :toggle
|
||||
authorize @ip_address_role, policy_class: State::IpAddressRolePolicy
|
||||
end
|
||||
end
|
||||
end
|
||||
24
app/controllers/user_scripts_controller.rb
Normal file
@@ -0,0 +1,24 @@
|
||||
# typed: true
|
||||
class UserScriptsController < ApplicationController
|
||||
skip_before_action :authenticate_user!, only: [:get]
|
||||
skip_before_action :verify_authenticity_token, only: [:get]
|
||||
|
||||
ALLOWED_SCRIPTS = %w[object_statuses.user.js furecs.user.js].freeze
|
||||
|
||||
def get
|
||||
expires_in 1.hour, public: true
|
||||
response.cache_control[:public] = true
|
||||
response.cache_control[:private] = false
|
||||
|
||||
script = params[:script]
|
||||
unless ALLOWED_SCRIPTS.include?(script)
|
||||
render status: 404, text: "not found"
|
||||
return
|
||||
end
|
||||
|
||||
send_file(
|
||||
Rails.root.join("user_scripts/dist/#{script}"),
|
||||
type: "application/javascript",
|
||||
)
|
||||
end
|
||||
end
|
||||
65
app/controllers/users/registrations_controller.rb
Normal file
@@ -0,0 +1,65 @@
|
||||
# typed: false
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Users::RegistrationsController < Devise::RegistrationsController
|
||||
# before_action :configure_sign_up_params, only: [:create]
|
||||
# before_action :configure_account_update_params, only: [:update]
|
||||
|
||||
# GET /resource/sign_up
|
||||
def new
|
||||
flash[:alert] = "New registrations are currently disabled."
|
||||
redirect_to root_path
|
||||
end
|
||||
|
||||
# POST /resource
|
||||
def create
|
||||
flash[:alert] = "New registrations are currently disabled."
|
||||
redirect_to root_path
|
||||
end
|
||||
|
||||
# GET /resource/edit
|
||||
# def edit
|
||||
# super
|
||||
# end
|
||||
|
||||
# PUT /resource
|
||||
# def update
|
||||
# super
|
||||
# end
|
||||
|
||||
# DELETE /resource
|
||||
# def destroy
|
||||
# super
|
||||
# end
|
||||
|
||||
# GET /resource/cancel
|
||||
# Forces the session data which is usually expired after sign
|
||||
# in to be expired now. This is useful if the user wants to
|
||||
# cancel oauth signing in/up in the middle of the process,
|
||||
# removing all OAuth session data.
|
||||
# def cancel
|
||||
# super
|
||||
# end
|
||||
|
||||
# protected
|
||||
|
||||
# If you have extra params to permit, append them to the sanitizer.
|
||||
# def configure_sign_up_params
|
||||
# devise_parameter_sanitizer.permit(:sign_up, keys: [:attribute])
|
||||
# end
|
||||
|
||||
# If you have extra params to permit, append them to the sanitizer.
|
||||
# def configure_account_update_params
|
||||
# devise_parameter_sanitizer.permit(:account_update, keys: [:attribute])
|
||||
# end
|
||||
|
||||
# The path used after sign up.
|
||||
# def after_sign_up_path_for(resource)
|
||||
# super(resource)
|
||||
# end
|
||||
|
||||
# The path used after sign up for inactive accounts.
|
||||
# def after_inactive_sign_up_path_for(resource)
|
||||
# super(resource)
|
||||
# end
|
||||
end
|
||||
28
app/controllers/users/sessions_controller.rb
Normal file
@@ -0,0 +1,28 @@
|
||||
# typed: strict
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Users::SessionsController < Devise::SessionsController
|
||||
# before_action :configure_sign_in_params, only: [:create]
|
||||
|
||||
# GET /resource/sign_in
|
||||
# def new
|
||||
# super
|
||||
# end
|
||||
|
||||
# POST /resource/sign_in
|
||||
# def create
|
||||
# super
|
||||
# end
|
||||
|
||||
# DELETE /resource/sign_out
|
||||
# def destroy
|
||||
# super
|
||||
# end
|
||||
|
||||
# protected
|
||||
|
||||
# If you have extra params to permit, append them to the sanitizer.
|
||||
# def configure_sign_in_params
|
||||
# devise_parameter_sanitizer.permit(:sign_in, keys: [:attribute])
|
||||
# end
|
||||
end
|
||||
@@ -1,2 +1,3 @@
|
||||
# typed: strict
|
||||
module ApplicationHelper
|
||||
end
|
||||
|
||||
348
app/helpers/domain/descriptions_helper.rb
Normal file
@@ -0,0 +1,348 @@
|
||||
# typed: strict
|
||||
# frozen_string_literal: true
|
||||
require "dtext"
|
||||
|
||||
module Domain::DescriptionsHelper
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
include HelpersInterface
|
||||
include Domain::PostsHelper
|
||||
include Domain::DomainsHelper
|
||||
include Domain::UsersHelper
|
||||
requires_ancestor { Object }
|
||||
|
||||
abstract!
|
||||
|
||||
sig do
|
||||
params(assumed_host: String, url_string: String).returns(
|
||||
T.nilable(Addressable::URI),
|
||||
)
|
||||
end
|
||||
def try_parse_uri(assumed_host, url_string)
|
||||
extracted = URI.extract(url_string).first || url_string
|
||||
|
||||
# if the url string starts with a slash, add the assumed host to it
|
||||
extracted = assumed_host + extracted if extracted.starts_with?("/")
|
||||
|
||||
# if the url string doesn't have a protocol, add https:// to it
|
||||
unless extracted.starts_with?("http") && extracted.include?("://")
|
||||
extracted = "https://" + extracted
|
||||
end
|
||||
|
||||
uri = Addressable::URI.parse(extracted)
|
||||
uri.host ||= assumed_host
|
||||
uri.scheme ||= "https"
|
||||
uri
|
||||
rescue Addressable::URI::InvalidURIError
|
||||
nil
|
||||
end
|
||||
|
||||
sig { params(text: String, url: String).returns(T::Boolean) }
|
||||
def text_same_as_url?(text, url)
|
||||
text = text.strip.downcase
|
||||
url = url.strip.downcase
|
||||
["", "http://", "https://"].any? { |prefix| "#{prefix}#{text}" == url }
|
||||
end
|
||||
|
||||
sig { params(model: HasDescriptionHtmlForView).returns(T.nilable(String)) }
|
||||
def description_section_class_for_model(model)
|
||||
case model
|
||||
when Domain::Post::FaPost, Domain::User::FaUser
|
||||
"bg-slate-700 p-4 text-slate-200 text-sm"
|
||||
when Domain::Post::E621Post, Domain::User::E621User
|
||||
"bg-slate-700 p-4 text-slate-200 text-sm"
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
WEAK_URL_MATCHER_REGEX =
|
||||
%r{(http(s)?:\/\/.)?(www\.)?[-a-zA-Z0-9@:%._\+~#=]{2,256}\.[a-z]{2,6}\b([-a-zA-Z0-9@:%_\+.~#?&//=]*)}
|
||||
|
||||
ALLOWED_INFERRED_URL_DOMAINS =
|
||||
T.let(
|
||||
%w[furaffinity.net inkbunny.net e621.net]
|
||||
.flat_map { |domain| [domain, "www.#{domain}"] }
|
||||
.freeze,
|
||||
T::Array[String],
|
||||
)
|
||||
|
||||
sig { params(model: HasDescriptionHtmlForView).returns(T.nilable(String)) }
|
||||
def sanitize_description_html(model)
|
||||
html = model.description_html_for_view
|
||||
return nil if html.blank?
|
||||
|
||||
case model
|
||||
when Domain::Post::E621Post
|
||||
dtext_result = DText.parse(html)
|
||||
return nil if dtext_result.blank?
|
||||
html = dtext_result[0]
|
||||
else
|
||||
# profiles often contain bbcode, so first re-parse that
|
||||
# for some reason, lots of duplicate <br> tags appear as well
|
||||
html = html.gsub("<br>", "").strip
|
||||
html = try_convert_bbcode_to_html(html)
|
||||
end
|
||||
|
||||
replacements = {}
|
||||
|
||||
# Transform bare text that is not contained within an anchor tag into an anchor tag
|
||||
text_link_transformer =
|
||||
lambda do |env|
|
||||
node = T.cast(env[:node], Nokogiri::XML::Node)
|
||||
return if env[:is_allowlisted]
|
||||
|
||||
next unless node.text?
|
||||
next unless node.ancestors("a").empty?
|
||||
next unless (node_text = T.cast(node.text, T.nilable(String)))
|
||||
next unless (match = node_text.match(WEAK_URL_MATCHER_REGEX))
|
||||
next unless (url_text = match[0])
|
||||
unless (
|
||||
uri =
|
||||
try_parse_uri(model.description_html_base_domain, url_text)
|
||||
)
|
||||
next
|
||||
end
|
||||
unless ALLOWED_PLAIN_TEXT_URL_DOMAINS.any? { |domain|
|
||||
url_matches_domain?(domain, uri.host)
|
||||
}
|
||||
next
|
||||
end
|
||||
|
||||
before, after = node.text.split(url_text, 2)
|
||||
new_node = "#{before}<a href=\"#{url_text}\">#{url_text}</a>#{after}"
|
||||
node.replace(new_node)
|
||||
end
|
||||
|
||||
tag_class_and_style_transformer =
|
||||
lambda do |env|
|
||||
node = T.cast(env[:node], Nokogiri::XML::Node)
|
||||
node_name = T.cast(env[:node_name], String)
|
||||
return if env[:is_allowlisted] || !node.element?
|
||||
|
||||
# Convert bbcode_center class to text-align: center style
|
||||
# and remove all other styling
|
||||
add_node_styles = []
|
||||
if node["class"]&.include?("bbcode_center")
|
||||
add_node_styles << "text-align: center"
|
||||
end
|
||||
|
||||
node.name = "div" if node_name == "code"
|
||||
node.remove_attribute("class")
|
||||
# add to original styles
|
||||
node["style"] = (node["style"] || "")
|
||||
.split(";")
|
||||
.map(&:strip)
|
||||
.concat(add_node_styles)
|
||||
.map { |s| s + ";" }
|
||||
.join(" ")
|
||||
end
|
||||
|
||||
link_to_model_link_transformer =
|
||||
lambda do |env|
|
||||
node = T.cast(env[:node], Nokogiri::XML::Node)
|
||||
node_name = T.cast(env[:node_name], String)
|
||||
next if env[:is_allowlisted] || !node.element?
|
||||
|
||||
# Only allow and transform FA links
|
||||
if node_name == "a"
|
||||
href_str = node["href"]&.downcase || ""
|
||||
url = try_parse_uri(model.description_html_base_domain, href_str)
|
||||
next { node_whitelist: [] } if url.nil?
|
||||
|
||||
found_link = link_for_source(url.to_s)
|
||||
if found_link.present? && (found_model = found_link.model)
|
||||
partial, locals =
|
||||
case found_model
|
||||
when Domain::Post
|
||||
[
|
||||
"domain/has_description_html/inline_link_domain_post",
|
||||
{
|
||||
post: found_model,
|
||||
link_text: node.text,
|
||||
visual_style: "description-section-link",
|
||||
},
|
||||
]
|
||||
when Domain::User
|
||||
[
|
||||
"domain/has_description_html/inline_link_domain_user",
|
||||
{
|
||||
user: found_model,
|
||||
link_text: node.text,
|
||||
visual_style: "description-section-link",
|
||||
},
|
||||
]
|
||||
else
|
||||
raise "Unknown model type: #{found_link.model.class}"
|
||||
end
|
||||
|
||||
replacements[node] = Nokogiri::HTML5.fragment(
|
||||
render(partial:, locals:),
|
||||
)
|
||||
next { node_whitelist: [node] }
|
||||
else
|
||||
if ALLOWED_EXTERNAL_LINK_DOMAINS.any? { |domain|
|
||||
url_matches_domain?(domain, url.host)
|
||||
}
|
||||
if node.text.blank? || text_same_as_url?(node.text, url.to_s)
|
||||
title = title_for_url(url.to_s)
|
||||
else
|
||||
title = node.text
|
||||
end
|
||||
|
||||
replacements[node] = Nokogiri::HTML5.fragment(
|
||||
render(
|
||||
partial: "domain/has_description_html/inline_link_external",
|
||||
locals: {
|
||||
url: url.to_s,
|
||||
title:,
|
||||
icon_path: icon_path_for_domain(url.host),
|
||||
},
|
||||
),
|
||||
)
|
||||
next { node_whitelist: [node] }
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
disallowed_link_transformer =
|
||||
lambda do |env|
|
||||
node = T.cast(env[:node], Nokogiri::XML::Node)
|
||||
node_name = T.cast(env[:node_name], String)
|
||||
return if env[:is_allowlisted] || !node.element?
|
||||
|
||||
if node_name == "a"
|
||||
# by the time we're here, we know this is not a valid link node,
|
||||
# and it should be replaced with its text
|
||||
node.replace(node.inner_html)
|
||||
end
|
||||
end
|
||||
|
||||
sanitizer =
|
||||
Sanitize.new(
|
||||
elements: %w[a code div br img b i span strong hr p],
|
||||
attributes: {
|
||||
"a" => %w[href class],
|
||||
:all => %w[class style],
|
||||
},
|
||||
css: {
|
||||
properties: %w[font-size color text-align class],
|
||||
},
|
||||
transformers: [
|
||||
text_link_transformer,
|
||||
tag_class_and_style_transformer,
|
||||
link_to_model_link_transformer,
|
||||
disallowed_link_transformer,
|
||||
],
|
||||
)
|
||||
|
||||
fragment = Nokogiri::HTML5.fragment(sanitizer.send(:preprocess, html))
|
||||
sanitizer.node!(fragment)
|
||||
replacements.each { |node, replacement| node.replace(replacement) }
|
||||
raw fragment.to_html(preserve_newline: true)
|
||||
rescue StandardError
|
||||
raise if Rails.env == "staging" || Rails.env.test? || Rails.env.development?
|
||||
# if anything goes wrong in production, bail out and don't display anything
|
||||
"(error generating description)"
|
||||
end
|
||||
|
||||
sig { params(visual_style: String).returns(String) }
|
||||
def link_classes_for_visual_style(visual_style)
|
||||
case visual_style
|
||||
when "sky-link"
|
||||
"blue-link truncate"
|
||||
when "description-section-link"
|
||||
[
|
||||
"text-sky-200 border-slate-200",
|
||||
"border border-transparent hover:border-slate-300 hover:text-sky-800 hover:bg-slate-100",
|
||||
"rounded-md px-1 transition-all",
|
||||
"inline-flex items-center align-bottom",
|
||||
].join(" ")
|
||||
else
|
||||
"blue-link"
|
||||
end
|
||||
end
|
||||
|
||||
sig do
|
||||
params(user: Domain::User, visual_style: String, icon_size: String).returns(
|
||||
T::Hash[Symbol, T.untyped],
|
||||
)
|
||||
end
|
||||
def props_for_user_hover_preview(user, visual_style, icon_size)
|
||||
cache_key = [
|
||||
user,
|
||||
policy(user),
|
||||
"popover_inline_link_domain_user",
|
||||
icon_size,
|
||||
]
|
||||
Rails
|
||||
.cache
|
||||
.fetch(cache_key) do
|
||||
num_posts =
|
||||
user.has_created_posts? ? user.user_post_creations.count : nil
|
||||
registered_at = domain_user_registered_at_string_for_view(user)
|
||||
num_followed_by =
|
||||
user.has_followed_by_users? ? user.user_user_follows_to.count : nil
|
||||
num_followed =
|
||||
user.has_followed_users? ? user.user_user_follows_from.count : nil
|
||||
avatar_thumb_size = icon_size == "large" ? "64-avatar" : "32-avatar"
|
||||
|
||||
{
|
||||
iconSize: icon_size,
|
||||
linkText: user.name_for_view,
|
||||
userId: user.to_param,
|
||||
userName: user.name_for_view,
|
||||
userPath: domain_user_path(user),
|
||||
userSmallAvatarPath:
|
||||
domain_user_avatar_img_src_path(
|
||||
user.avatar,
|
||||
thumb: avatar_thumb_size,
|
||||
),
|
||||
userAvatarPath: domain_user_avatar_img_src_path(user.avatar),
|
||||
userAvatarAlt: "View #{user.name_for_view}'s profile",
|
||||
userDomainIcon: domain_model_icon_path(user),
|
||||
userNumPosts: num_posts,
|
||||
userRegisteredAt: registered_at,
|
||||
userNumFollowedBy: num_followed_by,
|
||||
userNumFollowed: num_followed,
|
||||
}
|
||||
end
|
||||
.then do |props|
|
||||
props[:visualStyle] = visual_style
|
||||
props
|
||||
end
|
||||
end
|
||||
|
||||
sig do
|
||||
params(post: Domain::Post, link_text: String, visual_style: String).returns(
|
||||
T::Hash[Symbol, T.untyped],
|
||||
)
|
||||
end
|
||||
def props_for_post_hover_preview(post, link_text, visual_style)
|
||||
cache_key = [post, policy(post), "popover_inline_link_domain_post"]
|
||||
Rails
|
||||
.cache
|
||||
.fetch(cache_key) do
|
||||
{
|
||||
linkText: link_text,
|
||||
postId: post.to_param,
|
||||
postTitle: post.title,
|
||||
postPath: Rails.application.routes.url_helpers.domain_post_path(post),
|
||||
postThumbnailPath: thumbnail_for_post_path(post),
|
||||
postThumbnailAlt: "View on #{domain_name_for_model(post)}",
|
||||
postDomainIcon: domain_model_icon_path(post),
|
||||
}.then do |props|
|
||||
if creator = post.primary_creator_for_view
|
||||
props[:creatorName] = creator.name_for_view
|
||||
props[:creatorAvatarPath] = user_avatar_path_for_view(creator)
|
||||
end
|
||||
props
|
||||
end
|
||||
end
|
||||
.then do |props|
|
||||
props[:visualStyle] = visual_style
|
||||
props
|
||||
end
|
||||
end
|
||||
end
|
||||
46
app/helpers/domain/domain_model_helper.rb
Normal file
@@ -0,0 +1,46 @@
|
||||
# typed: strict
|
||||
module Domain::DomainModelHelper
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
include HelpersInterface
|
||||
abstract!
|
||||
|
||||
HasDomainTypeType =
|
||||
T.type_alias { T.any(HasDomainType, HasDomainType::ClassMethods) }
|
||||
|
||||
sig { params(model: HasDomainTypeType).returns(String) }
|
||||
def domain_name_for_model(model)
|
||||
case model.domain_type
|
||||
when Domain::DomainType::Fa
|
||||
"FurAffinity"
|
||||
when Domain::DomainType::E621
|
||||
"E621"
|
||||
when Domain::DomainType::Inkbunny
|
||||
"Inkbunny"
|
||||
end
|
||||
end
|
||||
|
||||
sig { params(model: HasDomainTypeType).returns(String) }
|
||||
def domain_abbreviation_for_model(model)
|
||||
case model.domain_type
|
||||
when Domain::DomainType::Fa
|
||||
"FA"
|
||||
when Domain::DomainType::E621
|
||||
"E621"
|
||||
when Domain::DomainType::Inkbunny
|
||||
"IB"
|
||||
end
|
||||
end
|
||||
|
||||
sig { params(model: Domain::Post).returns(String) }
|
||||
def title_for_post_model(model)
|
||||
case model
|
||||
when Domain::Post::FaPost
|
||||
model.title
|
||||
when Domain::Post::E621Post
|
||||
model.title
|
||||
when Domain::Post::InkbunnyPost
|
||||
model.title
|
||||
end || "(unknown)"
|
||||
end
|
||||
end
|
||||
9
app/helpers/domain/domain_type.rb
Normal file
@@ -0,0 +1,9 @@
|
||||
# typed: strict
|
||||
# Enum represents the domain of a post or user, e.g. "FurAffinity", "E621", "Inkbunny"
|
||||
class Domain::DomainType < T::Enum
|
||||
enums do
|
||||
Fa = new
|
||||
E621 = new
|
||||
Inkbunny = new
|
||||
end
|
||||
end
|
||||
133
app/helpers/domain/domains_helper.rb
Normal file
@@ -0,0 +1,133 @@
|
||||
# typed: strict
|
||||
module Domain::DomainsHelper
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
include HelpersInterface
|
||||
abstract!
|
||||
|
||||
# If a URL is detected in plain text and is one of these domains,
|
||||
# it will be converted to an anchor tag.
|
||||
ALLOWED_PLAIN_TEXT_URL_DOMAINS = %w[
|
||||
e621.net
|
||||
furaffinity.net
|
||||
inkbunny.net
|
||||
].freeze
|
||||
|
||||
# If a link is detected in an anchor tag and is one of these domains,
|
||||
# it will be converted to a link.
|
||||
ALLOWED_EXTERNAL_LINK_DOMAINS =
|
||||
T.let(
|
||||
(
|
||||
%w[
|
||||
archiveofourown.org
|
||||
behance.net
|
||||
bigcartel.com
|
||||
boosty.to
|
||||
bsky.app
|
||||
carrd.co
|
||||
deviantart.com
|
||||
discord.gg
|
||||
dribbble.com
|
||||
e621.net
|
||||
facebook.com
|
||||
furaffinity.net
|
||||
gumroad.com
|
||||
hipolink.me
|
||||
inkbunny.net
|
||||
itch.io
|
||||
instagram.com
|
||||
ko-fi.com
|
||||
livejournal.com
|
||||
mstdn.social
|
||||
patreon.com
|
||||
pinterest.com
|
||||
pixiv.net
|
||||
redbubble.com
|
||||
spreadshirt.com
|
||||
spreadshirt.de
|
||||
t.me
|
||||
tumblr.com
|
||||
twitch.tv
|
||||
twitter.com
|
||||
vimeo.com
|
||||
weasyl.com
|
||||
x.com
|
||||
youtube.com
|
||||
] + ALLOWED_PLAIN_TEXT_URL_DOMAINS
|
||||
).freeze,
|
||||
T::Array[String],
|
||||
)
|
||||
|
||||
DOMAIN_TO_ICON_PATH =
|
||||
T.let(
|
||||
{
|
||||
"bigcartel.com" => "bigcartel.png",
|
||||
"boosty.to" => "boosty.png",
|
||||
"bsky.app" => "bsky.png",
|
||||
"carrd.co" => "carrd.png",
|
||||
"deviantart.com" => "deviantart.png",
|
||||
"e621.net" => "e621.png",
|
||||
"furaffinity.net" => "fa.png",
|
||||
"ib.metapix.net" => "inkbunny.png",
|
||||
"inkbunny.net" => "inkbunny.png",
|
||||
"itaku.ee" => "itaku.png",
|
||||
"ko-fi.com" => "ko-fi.png",
|
||||
"newgrounds.com" => "newgrounds.png",
|
||||
"patreon.com" => "patreon.png",
|
||||
"pixiv.net" => "pixiv.png",
|
||||
"redbubble.com" => "redbubble.png",
|
||||
"spreadshirt.com" => "spreadshirt.png",
|
||||
"spreadshirt.de" => "spreadshirt.png",
|
||||
"subscribestar.com" => "subscribestar.png",
|
||||
"subscribestar.adult" => "subscribestar.png",
|
||||
"gumroad.com" => "gumroad.png",
|
||||
"itch.io" => "itch-io.png",
|
||||
"t.me" => "telegram.png",
|
||||
"tumblr.com" => "tumblr.png",
|
||||
"twitter.com" => "x-twitter.png",
|
||||
"weasyl.com" => "weasyl.png",
|
||||
"wixmp.com" => "deviantart.png",
|
||||
"x.com" => "x-twitter.png",
|
||||
}.freeze,
|
||||
T::Hash[String, String],
|
||||
)
|
||||
|
||||
DOMAIN_TITLE_MAPPERS =
|
||||
T.let(
|
||||
[
|
||||
[%r{://t.me/([^/]+)}, ->(match) { match[1] }],
|
||||
[%r{://bsky.app/profile/([^/]+)}, ->(match) { match[1] }],
|
||||
[%r{://(.*\.)?x.com/([^/]+)}, ->(match) { match[2] }],
|
||||
[%r{://(.*\.)?twitter.com/([^/]+)}, ->(match) { match[2] }],
|
||||
[%r{://(.*\.)?patreon.com/([^/]+)}, ->(match) { match[2] }],
|
||||
[%r{://(.*\.)?furaffinity.net/user/([^/]+)}, ->(match) { match[2] }],
|
||||
],
|
||||
T::Array[[Regexp, T.proc.params(match: MatchData).returns(String)]],
|
||||
)
|
||||
|
||||
sig { params(domain: String, host: String).returns(T::Boolean) }
|
||||
def url_matches_domain?(domain, host)
|
||||
host == domain || host.end_with?(".#{domain}")
|
||||
end
|
||||
|
||||
sig { params(domain: String).returns(T.nilable(String)) }
|
||||
def icon_path_for_domain(domain)
|
||||
for test_domain, icon in DOMAIN_TO_ICON_PATH
|
||||
if url_matches_domain?(test_domain, domain)
|
||||
return asset_path("domain-icons/#{icon}")
|
||||
end
|
||||
end
|
||||
nil
|
||||
end
|
||||
|
||||
sig { params(url: String).returns(String) }
|
||||
def title_for_url(url)
|
||||
url = url.to_s
|
||||
for mapper in DOMAIN_TITLE_MAPPERS
|
||||
if (match = mapper[0].match(url)) && (group = mapper[1].call(match))
|
||||
return group
|
||||
end
|
||||
end
|
||||
url
|
||||
end
|
||||
end
|
||||
96
app/helpers/domain/e621/posts_helper.rb
Normal file
@@ -0,0 +1,96 @@
|
||||
# typed: false
|
||||
module Domain::E621::PostsHelper
|
||||
def icon_asset_for_url(url)
|
||||
domain = extract_domain(url)
|
||||
return nil unless domain
|
||||
|
||||
domain_patterns = {
|
||||
%w[*.e621.net e621.net] => "e621.png",
|
||||
%w[*.furaffinity.net furaffinity.net] => "fa.png",
|
||||
%w[*.bsky.app bsky.app] => "bsky.png",
|
||||
%w[*.itaku.ee itaku.ee] => "itaku.png",
|
||||
%w[*.deviantart.com deviantart.com *.wixmp.com] => "deviantart.png",
|
||||
%w[*.twitter.com twitter.com *.x.com x.com] => "x-twitter.png",
|
||||
%w[*.inkbunny.net inkbunny.net *.ib.metapix.net ib.metapix.net] =>
|
||||
"inkbunny.png",
|
||||
%w[*.newgrounds.com newgrounds.com] => "newgrounds.png",
|
||||
%w[*.patreon.com patreon.com] => "patreon.png",
|
||||
%w[*.pixiv.net pixiv.net *.pximg.net pximg.net] => "pixiv.png",
|
||||
}
|
||||
|
||||
domain_patterns.each do |patterns, icon|
|
||||
patterns.each do |pattern|
|
||||
if File.fnmatch?(pattern, domain, File::FNM_PATHNAME)
|
||||
return asset_path("domain-icons/#{icon}")
|
||||
end
|
||||
end
|
||||
end
|
||||
nil
|
||||
end
|
||||
|
||||
def tag_category_tw_class(category)
|
||||
case category.to_sym
|
||||
when :general
|
||||
"bg-blue-300" # Light blue
|
||||
when :artist
|
||||
"bg-indigo-300" # Light indigo
|
||||
when :copyright
|
||||
"bg-purple-300" # Light purple
|
||||
when :character
|
||||
"bg-green-300" # Light green
|
||||
when :species
|
||||
"bg-teal-300" # Light teal
|
||||
when :invalid
|
||||
"bg-slate-300" # Medium gray
|
||||
when :meta
|
||||
"bg-amber-300" # Light amber
|
||||
when :lore
|
||||
"bg-cyan-300" # Light cyan
|
||||
else
|
||||
"bg-white" # White (default)
|
||||
end
|
||||
end
|
||||
|
||||
def tag_category_order
|
||||
%i[artist copyright character species general meta lore invalid]
|
||||
end
|
||||
|
||||
def font_awesome_category_icon(category)
|
||||
case category.to_sym
|
||||
when :artist
|
||||
"fa-brush"
|
||||
when :species
|
||||
"fa-paw"
|
||||
when :character
|
||||
"fa-user"
|
||||
when :copyright
|
||||
"fa-copyright"
|
||||
when :general
|
||||
"fa-tag"
|
||||
when :lore
|
||||
"fa-book"
|
||||
when :meta
|
||||
"fa-info"
|
||||
when :invalid
|
||||
"fa-ban"
|
||||
end
|
||||
end
|
||||
|
||||
def fa_post_for_source(source)
|
||||
uri = URI.parse(source)
|
||||
return unless %w[www.furaffinity.net furaffinity.net].include?(uri.host)
|
||||
fa_id = uri.path.match(%r{/view/(\d+)})[1]
|
||||
return unless fa_id
|
||||
Domain::Fa::Post.find_by(fa_id: fa_id)
|
||||
rescue StandardError
|
||||
nil
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def extract_domain(url)
|
||||
URI.parse(url).host
|
||||
rescue URI::InvalidURIError
|
||||
nil
|
||||
end
|
||||
end
|
||||
@@ -1,39 +1,165 @@
|
||||
# typed: strict
|
||||
module Domain::Fa::PostsHelper
|
||||
extend T::Sig
|
||||
|
||||
include ActionView::Helpers::DateHelper
|
||||
include ActionView::Helpers::SanitizeHelper
|
||||
include ActionView::Helpers::RenderingHelper
|
||||
include ActionView::Helpers::TagHelper
|
||||
|
||||
sig { params(post: Domain::Fa::Post).returns(String) }
|
||||
def post_state_string(post)
|
||||
if post.have_file?
|
||||
"file"
|
||||
elsif post.scanned?
|
||||
"scanned"
|
||||
else
|
||||
post.state
|
||||
post.state || "unknown"
|
||||
end
|
||||
end
|
||||
|
||||
sig do
|
||||
params(
|
||||
params:
|
||||
T.any(ActionController::Parameters, T::Hash[T.untyped, T.untyped]),
|
||||
).returns(T.nilable(String))
|
||||
end
|
||||
def page_str(params)
|
||||
if (params[:page] || 1).to_i > 1
|
||||
"(Page #{params[:page]})"
|
||||
"(page #{params[:page]})"
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def scanned_and_file_description(post)
|
||||
parts = []
|
||||
if post.scanned?
|
||||
time_ago = if post.scanned_at
|
||||
time_ago_in_words(post.scanned_at)
|
||||
sig { params(post: Domain::Fa::Post).returns(T.nilable(HttpLogEntry)) }
|
||||
def guess_scanned_http_log_entry(post)
|
||||
HttpLogEntry.find_all_by_uri(
|
||||
"https://www.furaffinity.net/view/#{post.fa_id}",
|
||||
).first
|
||||
end
|
||||
|
||||
sig { params(post: Domain::Fa::Post).returns(T.nilable(HttpLogEntry)) }
|
||||
def guess_file_downloaded_http_log_entry(post)
|
||||
if (uri = post.file_uri)
|
||||
HttpLogEntry.find_all_by_uri(uri).first
|
||||
end
|
||||
end
|
||||
|
||||
sig { params(html: String).returns(String) }
|
||||
def fa_post_description_sanitized(html)
|
||||
fa_post_id_to_node = {}
|
||||
fa_user_url_name_to_node = {}
|
||||
|
||||
sanitizer =
|
||||
Sanitize.new(
|
||||
elements: %w[br img b i span strong],
|
||||
attributes: {
|
||||
"span" => %w[style],
|
||||
},
|
||||
css: {
|
||||
properties: %w[font-size color],
|
||||
},
|
||||
transformers: [
|
||||
Kernel.lambda do |env|
|
||||
# Only allow and transform FA links
|
||||
if env[:node_name] == "a"
|
||||
node = env[:node]
|
||||
|
||||
# by default, assume the host is www.furaffinity.net
|
||||
href = node["href"]&.downcase || ""
|
||||
href = "//" + href if href.match?(/^(www\.)?furaffinity\.net/)
|
||||
uri =
|
||||
begin
|
||||
URI.parse(href)
|
||||
rescue URI::InvalidURIError
|
||||
nil
|
||||
end
|
||||
|
||||
valid_type = !uri.is_a?(URI::MailTo)
|
||||
next { node_whitelist: [node] } if uri.nil? || !valid_type
|
||||
|
||||
uri.host ||= "www.furaffinity.net"
|
||||
uri.scheme ||= "https"
|
||||
path = uri.path
|
||||
|
||||
fa_host_matcher = /^(www\.)?furaffinity\.net$/
|
||||
fa_post_matcher = %r{^/view/(\d+)/?$}
|
||||
fa_user_matcher = %r{^/user/(\w+)/?$}
|
||||
|
||||
if fa_host_matcher.match?(uri.host) && path
|
||||
if match = path.match(fa_post_matcher)
|
||||
fa_id = match[1].to_i
|
||||
fa_post_id_to_node[fa_id] = node
|
||||
next { node_whitelist: [node] }
|
||||
elsif match = path.match(fa_user_matcher)
|
||||
fa_url_name = match[1]
|
||||
fa_user_url_name_to_node[fa_url_name] = node
|
||||
next { node_whitelist: [node] }
|
||||
end
|
||||
end
|
||||
|
||||
# Don't allow any other links
|
||||
node.replace(node.children)
|
||||
end
|
||||
end,
|
||||
],
|
||||
)
|
||||
|
||||
fragment = Nokogiri::HTML5.fragment(sanitizer.send(:preprocess, html))
|
||||
sanitizer.node!(fragment)
|
||||
|
||||
if fa_post_id_to_node.any?
|
||||
# Batch load posts and their titles, ensuring fa_post_ids are strings
|
||||
posts_by_id =
|
||||
Domain::Fa::Post.where(fa_id: fa_post_id_to_node.keys).index_by(&:fa_id)
|
||||
|
||||
# Replace the link text with post titles if available
|
||||
fa_post_id_to_node.each do |fa_id, node|
|
||||
if (post = posts_by_id[fa_id])
|
||||
node.replace(
|
||||
Nokogiri::HTML5.fragment(
|
||||
render(
|
||||
partial: "domain/fa/posts/description_inline_link_fa_post",
|
||||
locals: {
|
||||
post: post,
|
||||
},
|
||||
),
|
||||
),
|
||||
)
|
||||
else
|
||||
"(unknown)"
|
||||
node.replace(node.children)
|
||||
end
|
||||
parts << "Scanned #{time_ago} ago"
|
||||
else
|
||||
parts << "Not scanned"
|
||||
end
|
||||
end
|
||||
if post.file
|
||||
parts << "file #{time_ago_in_words(post.file.created_at)} ago"
|
||||
else
|
||||
parts << "no file"
|
||||
|
||||
if fa_user_url_name_to_node.any?
|
||||
# Batch load users and their names, ensuring fa_user_url_names are strings
|
||||
users_by_url_name =
|
||||
Domain::Fa::User
|
||||
.where(url_name: fa_user_url_name_to_node.keys)
|
||||
.includes(:avatar)
|
||||
.index_by(&:url_name)
|
||||
|
||||
# Replace the link text with user names if available
|
||||
fa_user_url_name_to_node.each do |fa_url_name, node|
|
||||
if (user = users_by_url_name[fa_url_name])
|
||||
node.replace(
|
||||
Nokogiri::HTML5.fragment(
|
||||
render(
|
||||
partial: "domain/fa/posts/description_inline_link_fa_user",
|
||||
locals: {
|
||||
user: user,
|
||||
},
|
||||
),
|
||||
),
|
||||
)
|
||||
else
|
||||
node.replace(node.children)
|
||||
end
|
||||
end
|
||||
end
|
||||
parts.join(", ")
|
||||
|
||||
raw fragment.to_html(preserve_newline: true)
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,2 +1,131 @@
|
||||
# typed: false
|
||||
module Domain::Fa::UsersHelper
|
||||
extend T::Sig
|
||||
|
||||
def avatar_url(sha256, thumb: "32-avatar")
|
||||
blob_path(HexUtil.bin2hex(sha256), format: "jpg", thumb: thumb)
|
||||
end
|
||||
|
||||
def fa_user_avatar_path(user, thumb: nil)
|
||||
if (sha256 = user.avatar&.file_sha256)
|
||||
blob_path(HexUtil.bin2hex(sha256), format: "jpg", thumb: thumb)
|
||||
else
|
||||
# default / 'not found' avatar image
|
||||
# "/blobs/9080fd4e7e23920eb2dccfe2d86903fc3e748eebb2e5aa8c657bbf6f3d941cdc/contents.jpg"
|
||||
asset_path("user-circle.svg")
|
||||
end
|
||||
end
|
||||
|
||||
def sanitized_fa_user_profile_html(html)
|
||||
# try to preload all the FA usernames in the profile
|
||||
maybe_url_names =
|
||||
Nokogiri
|
||||
.HTML(html)
|
||||
.css("a")
|
||||
.flat_map do |node|
|
||||
href = URI.parse(node["href"])
|
||||
right_host = href.host.nil? || href.host == "www.furaffinity.net"
|
||||
right_path = href.path =~ %r{/user/.+}
|
||||
if right_host && right_path
|
||||
[href]
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
.map { |href| href.path.split("/")[2]&.downcase }
|
||||
|
||||
preloaded_users =
|
||||
Domain::Fa::User
|
||||
.where(url_name: maybe_url_names)
|
||||
.select(:id, :state, :state_detail, :log_entry_detail, :url_name)
|
||||
.joins(:avatar)
|
||||
.includes(:avatar)
|
||||
.index_by(&:url_name)
|
||||
|
||||
raw Sanitize.fragment(
|
||||
html,
|
||||
elements: %w[br img b i span strong],
|
||||
attributes: {
|
||||
"span" => %w[style],
|
||||
"a" => [],
|
||||
},
|
||||
css: {
|
||||
properties: %w[font-size color],
|
||||
},
|
||||
transformers:
|
||||
lambda do |env|
|
||||
return unless env[:node_name] == "a"
|
||||
node = env[:node]
|
||||
href = URI.parse(node["href"])
|
||||
unless href.host == nil || href.host == "www.furaffinity.net"
|
||||
return
|
||||
end
|
||||
return unless href.path =~ %r{/user/.+}
|
||||
url_name = href.path.split("/")[2]&.downcase
|
||||
Sanitize.node!(
|
||||
node,
|
||||
{ elements: %w[a], attributes: { "a" => %w[href] } },
|
||||
)
|
||||
node["href"] = domain_fa_user_path(url_name)
|
||||
node["class"] = "text-slate-200 underline decoration-slate-200 " +
|
||||
"decoration-dashed decoration-dashed decoration-1"
|
||||
|
||||
whitelist = [node]
|
||||
|
||||
user =
|
||||
preloaded_users[url_name] ||
|
||||
Domain::Fa::User.find_by(url_name: url_name)
|
||||
if user
|
||||
img = Nokogiri::XML::Node.new("img", node.document)
|
||||
img["class"] = "inline w-5"
|
||||
img["src"] = fa_user_avatar_path(user, thumb: "32-avatar")
|
||||
node.prepend_child(img)
|
||||
whitelist << img
|
||||
end
|
||||
|
||||
{ node_allowlist: whitelist }
|
||||
end,
|
||||
)
|
||||
end
|
||||
|
||||
# TODO - remove this once we've migrated similarity scores to new user model
|
||||
sig do
|
||||
params(
|
||||
user: Domain::User::FaUser,
|
||||
limit: Integer,
|
||||
exclude_followed_by: T.nilable(Domain::User::FaUser),
|
||||
).returns(T::Array[Domain::User::FaUser])
|
||||
end
|
||||
def similar_users_by_followed(user, limit: 10, exclude_followed_by: nil)
|
||||
factors = Domain::Factors::UserUserFollowToFactors.find_by(user: user)
|
||||
return [] if factors.nil?
|
||||
|
||||
relation =
|
||||
Domain::NeighborFinder
|
||||
.find_neighbors(factors)
|
||||
.limit(limit)
|
||||
.includes(:user)
|
||||
if exclude_followed_by
|
||||
relation =
|
||||
relation.where.not(
|
||||
user_id: exclude_followed_by.followed_users.select(:to_id),
|
||||
)
|
||||
end
|
||||
|
||||
relation.map { |factor| factor.user }
|
||||
end
|
||||
|
||||
def fa_user_account_status(user)
|
||||
log_entry_id = user.log_entry_detail["last_user_page_id"]
|
||||
return "unknown" if log_entry_id.nil?
|
||||
log_entry = HttpLogEntry.find_by(id: log_entry_id)
|
||||
return "unknown" if log_entry.nil?
|
||||
parser =
|
||||
Domain::Fa::Parser::Page.new(
|
||||
log_entry.response.contents,
|
||||
require_logged_in: false,
|
||||
)
|
||||
return "unknown" unless parser.probably_user_page?
|
||||
parser.user_page.account_status
|
||||
end
|
||||
end
|
||||
|
||||