Compare commits
646 Commits
dymk--grap
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3174032ac3 | ||
|
|
4f7217abf0 | ||
|
|
fa94d90474 | ||
|
|
7f7728366b | ||
|
|
1905575d19 | ||
|
|
3021bc4a97 | ||
|
|
deff73d73a | ||
|
|
9e0e8a8d3c | ||
|
|
8bd6c4b2ae | ||
|
|
6381067235 | ||
|
|
9b13bec296 | ||
|
|
d1f791d598 | ||
|
|
7243635238 | ||
|
|
a76b10634e | ||
|
|
7bca1452e4 | ||
|
|
26d82fca77 | ||
|
|
d2789f47dc | ||
|
|
4830a4ce54 | ||
|
|
7f521b30e9 | ||
|
|
a68e5b0112 | ||
|
|
8376dfc662 | ||
|
|
cb3b52bf41 | ||
|
|
8e98a5ee4b | ||
|
|
a8f258d5ef | ||
|
|
15ea73a350 | ||
|
|
6bf64cf8c6 | ||
|
|
c5dc181187 | ||
|
|
b6e3912ccb | ||
|
|
73f6f77596 | ||
|
|
cdcd574d02 | ||
|
|
8d6953c758 | ||
|
|
558c4f940e | ||
|
|
c1b63275e8 | ||
|
|
fd97d145cb | ||
|
|
87fda1a475 | ||
|
|
cbb08ba8c0 | ||
|
|
130d77419a | ||
|
|
598993abaf | ||
|
|
d06347a771 | ||
|
|
0fd4d13673 | ||
|
|
df02fd3077 | ||
|
|
5b12e28fb7 | ||
|
|
64a65d1490 | ||
|
|
a1fab9e645 | ||
|
|
1e46e42352 | ||
|
|
43876ef7c4 | ||
|
|
4d456ee73d | ||
|
|
b6e2e5e502 | ||
|
|
2acf31c70a | ||
|
|
3c83ed3ba7 | ||
|
|
1058a53d18 | ||
|
|
5646e388be | ||
|
|
c1310c6dcc | ||
|
|
62f14d10d4 | ||
|
|
2a8d631b29 | ||
|
|
90d2cce076 | ||
|
|
db6f2ce92e | ||
|
|
ca937eb2bc | ||
|
|
981bea5016 | ||
|
|
66e97ba5c7 | ||
|
|
7d07a18a80 | ||
|
|
e9ac97be29 | ||
|
|
cfffe50541 | ||
|
|
1d248c1f23 | ||
|
|
e1933104b3 | ||
|
|
419a1503f2 | ||
|
|
9a113fe2be | ||
|
|
c78dd401c7 | ||
|
|
b33a267a83 | ||
|
|
6bb0b255fb | ||
|
|
1357eb9095 | ||
|
|
dea2071662 | ||
|
|
6df6f63060 | ||
|
|
420a44a27d | ||
|
|
2de7f85a99 | ||
|
|
171ddd430b | ||
|
|
ad0675a9aa | ||
|
|
d08c896d97 | ||
|
|
127dd9be51 | ||
|
|
390f0939b0 | ||
|
|
40c6d44100 | ||
|
|
ded26741a8 | ||
|
|
eba4b58666 | ||
|
|
5c71fc6b15 | ||
|
|
f2f8a9c34a | ||
|
|
36ceae80fe | ||
|
|
5f5a54d68f | ||
|
|
e30e20b033 | ||
|
|
608044e8fb | ||
|
|
9efeedd1ff | ||
|
|
3512c3f32e | ||
|
|
e9f3b0e822 | ||
|
|
6b8fce7ddc | ||
|
|
65b0c519aa | ||
|
|
e9601c4719 | ||
|
|
ceadf699e8 | ||
|
|
51bcfd246f | ||
|
|
fe2f6e8b90 | ||
|
|
e78baa6594 | ||
|
|
ff18b5f75c | ||
|
|
24a59d50f2 | ||
|
|
baed10db21 | ||
|
|
6bf85456d1 | ||
|
|
a935e226ba | ||
|
|
1a02767051 | ||
|
|
45cac0b1b8 | ||
|
|
089a91918c | ||
|
|
b2b8341780 | ||
|
|
49c6f574a0 | ||
|
|
ccc032ca9f | ||
|
|
e31e912de8 | ||
|
|
ec7c5f4d8d | ||
|
|
9941529101 | ||
|
|
aeabfc5150 | ||
|
|
5e0e0ce8ac | ||
|
|
da3087793e | ||
|
|
99d310cdc4 | ||
|
|
7f5a8ccc12 | ||
|
|
be5bafd400 | ||
|
|
9ffb8b3f5a | ||
|
|
87653af566 | ||
|
|
156a6775d5 | ||
|
|
83ae4ebd45 | ||
|
|
d899413d7c | ||
|
|
0fe7040935 | ||
|
|
2be92ac365 | ||
|
|
210b3b05c7 | ||
|
|
3d35a8b3b9 | ||
|
|
544f6764d4 | ||
|
|
67339142dd | ||
|
|
d892e00471 | ||
|
|
d85d04ea53 | ||
|
|
6fa15fdafc | ||
|
|
611b20c146 | ||
|
|
a497fa4adf | ||
|
|
35fe54ccc7 | ||
|
|
a3898e8dba | ||
|
|
68776f74c6 | ||
|
|
b0356111b6 | ||
|
|
9f33f26b2b | ||
|
|
4e2bd344fa | ||
|
|
55dfc81436 | ||
|
|
118a0c58c2 | ||
|
|
e598529639 | ||
|
|
eefcd9eb93 | ||
|
|
7aaf471f09 | ||
|
|
335c1a3c6d | ||
|
|
154c9787d6 | ||
|
|
1bab697a43 | ||
|
|
0ecada567d | ||
|
|
3ad3517138 | ||
|
|
4e94a8911c | ||
|
|
8bcdd9b451 | ||
|
|
6d8499c7bb | ||
|
|
636642152a | ||
|
|
faefcf92de | ||
|
|
7c25988943 | ||
|
|
ace981c7b6 | ||
|
|
d8143c000c | ||
|
|
6cee2e4ea4 | ||
|
|
beeb38c111 | ||
|
|
f7f17ee3fe | ||
|
|
672b7eb5d8 | ||
|
|
4ed1c558b9 | ||
|
|
c43d1ca197 | ||
|
|
1f44ec2fa2 | ||
|
|
8b2ee14ef7 | ||
|
|
53a9498386 | ||
|
|
bc4143ae12 | ||
|
|
ca4729f7d1 | ||
|
|
3d2599a4ab | ||
|
|
072bbb849e | ||
|
|
56ed78faaf | ||
|
|
bd6246d29a | ||
|
|
5aeee4fe14 | ||
|
|
f11a5782e1 | ||
|
|
dca8ba4566 | ||
|
|
3356bddd60 | ||
|
|
5b67f2ad9a | ||
|
|
dffdef51cd | ||
|
|
d86612ee2e | ||
|
|
0e92d9a7e1 | ||
|
|
211d5eb62c | ||
|
|
bbca0db010 | ||
|
|
af4d84ccb1 | ||
|
|
457a4e4609 | ||
|
|
3ea2428814 | ||
|
|
430247a3ad | ||
|
|
19fc98e4ef | ||
|
|
ad229fbd4e | ||
|
|
8333a1bb3f | ||
|
|
fb436e1b75 | ||
|
|
c96b1d9cc1 | ||
|
|
e027dc9bc4 | ||
|
|
572c61cebb | ||
|
|
7134d20d71 | ||
|
|
a3d1fd7b82 | ||
|
|
7095816b19 | ||
|
|
f63d8cabe7 | ||
|
|
1470a21bbe | ||
|
|
931e736bbf | ||
|
|
538293d5c0 | ||
|
|
295020a10f | ||
|
|
8d65c2514a | ||
|
|
fe51855ebd | ||
|
|
ff8eff72a6 | ||
|
|
b1de28d80f | ||
|
|
fd61616c03 | ||
|
|
538d3919d9 | ||
|
|
537821a1a9 | ||
|
|
e4d2f7d862 | ||
|
|
df88a06e88 | ||
|
|
d925a4b682 | ||
|
|
598ffe8dbf | ||
|
|
ca7315c8ae | ||
|
|
0c5f6e84cb | ||
|
|
8e00a7578c | ||
|
|
e67eb94e44 | ||
|
|
c27b0c952d | ||
|
|
878ca2fe32 | ||
|
|
13ad697026 | ||
|
|
c832d53535 | ||
|
|
7a02282893 | ||
|
|
c37e3c1024 | ||
|
|
acc2f9a240 | ||
|
|
9f1fc93267 | ||
|
|
0e262f99b9 | ||
|
|
5f81edea92 | ||
|
|
163418c8cc | ||
|
|
aad0cb045d | ||
|
|
2a8d618a84 | ||
|
|
91a16e12a1 | ||
|
|
6c086ac9cc | ||
|
|
542e38b35a | ||
|
|
e5c37e4c9d | ||
|
|
58b8321333 | ||
|
|
d14dc6d3c1 | ||
|
|
c5b4374def | ||
|
|
9a4b1b891d | ||
|
|
7f57d4363f | ||
|
|
878cef79a6 | ||
|
|
69c845c135 | ||
|
|
1828a2a722 | ||
|
|
9187e66b80 | ||
|
|
19fdeee765 | ||
|
|
75f9ece42c | ||
|
|
7a378c0dd5 | ||
|
|
033470f9d9 | ||
|
|
4a6296efcc | ||
|
|
fd930f944a | ||
|
|
b8e0058e98 | ||
|
|
fc45188026 | ||
|
|
fada9ce179 | ||
|
|
2547702c9a | ||
|
|
12975d30f0 | ||
|
|
5b6536bfe6 | ||
|
|
dd950aeec5 | ||
|
|
61f6e390aa | ||
|
|
64f793d3dd | ||
|
|
1c0cca006f | ||
|
|
24725e50eb | ||
|
|
f68e5ed2ba | ||
|
|
b72bf18a7d | ||
|
|
60d4f3c008 | ||
|
|
bbb60d6590 | ||
|
|
1344d170f8 | ||
|
|
67170f248e | ||
|
|
94541b6b97 | ||
|
|
1a1186f36f | ||
|
|
fbed316b97 | ||
|
|
2c18b33dfe | ||
|
|
4d15a1f3c3 | ||
|
|
ffa5cdd76c | ||
|
|
8f76ce0373 | ||
|
|
df3887b56d | ||
|
|
de043393c2 | ||
|
|
cb2be5b9c3 | ||
|
|
5a20e4af98 | ||
|
|
4f8a5cfcff | ||
|
|
be36c74bbd | ||
|
|
e7fb708dc9 | ||
|
|
a2460a5327 | ||
|
|
fe5276d3ef | ||
|
|
84915a9b98 | ||
|
|
1e15e9f467 | ||
|
|
b3725e7d33 | ||
|
|
1d7204b4ee | ||
|
|
ae8984be60 | ||
|
|
11922c9d8a | ||
|
|
0427ac43c7 | ||
|
|
f79e37f256 | ||
|
|
1c051ce5b0 | ||
|
|
2f778daf54 | ||
|
|
101d383f95 | ||
|
|
967f78366e | ||
|
|
5a9efa798d | ||
|
|
306c174a01 | ||
|
|
66d1e06b4b | ||
|
|
ca5649a28f | ||
|
|
b3c33958e6 | ||
|
|
3b6ff33ec7 | ||
|
|
7b28bce448 | ||
|
|
609a4d1fd1 | ||
|
|
5ebd924780 | ||
|
|
c74cbfe4e0 | ||
|
|
2deeb2bd78 | ||
|
|
308232e01d | ||
|
|
3a06181db8 | ||
|
|
e3b2463cbe | ||
|
|
c20e7a0b7e | ||
|
|
e1c21fb2df | ||
|
|
70c65ffdbd | ||
|
|
5cb24a8065 | ||
|
|
85c4c7e75c | ||
|
|
79a6fee997 | ||
|
|
5a771fa130 | ||
|
|
b5d9d2de7f | ||
|
|
c527a05705 | ||
|
|
a2091e1555 | ||
|
|
7ee3b30180 | ||
|
|
9d2bde629a | ||
|
|
dc98e30f47 | ||
|
|
94533b6e45 | ||
|
|
7d0d94d761 | ||
|
|
dab811d784 | ||
|
|
305ddd2cb6 | ||
|
|
ed30a8c5bd | ||
|
|
55f806c5b4 | ||
|
|
a209c64149 | ||
|
|
99809041a5 | ||
|
|
1ef786fa8e | ||
|
|
7453db4ba5 | ||
|
|
0d32ef2802 | ||
|
|
f845d06267 | ||
|
|
0040bc45a2 | ||
|
|
70debe9995 | ||
|
|
994e60789d | ||
|
|
4ec27ab968 | ||
|
|
495e7dc8dd | ||
|
|
c51bd682f5 | ||
|
|
5b6a9c5737 | ||
|
|
6dbee335a7 | ||
|
|
7b58e27532 | ||
|
|
fae01bbe05 | ||
|
|
a98d9c49ee | ||
|
|
01b48b0c96 | ||
|
|
c6a7b9d49a | ||
|
|
9256d78bf5 | ||
|
|
23188f948f | ||
|
|
171b2a72c2 | ||
|
|
da422ea3aa | ||
|
|
8fdb78e8f3 | ||
|
|
93d304cdf8 | ||
|
|
954c825c9a | ||
|
|
7f0762318e | ||
|
|
585cd1b293 | ||
|
|
7048b3002d | ||
|
|
398abf48a7 | ||
|
|
87993562eb | ||
|
|
fbd146484f | ||
|
|
a1ad0f71ab | ||
|
|
79a20afcff | ||
|
|
b452e04af4 | ||
|
|
d45b6047b6 | ||
|
|
99fe3d951c | ||
|
|
ff4c374453 | ||
|
|
a296688123 | ||
|
|
5ad0828aa8 | ||
|
|
d32d583196 | ||
|
|
3490e28cb4 | ||
|
|
d4ffac3496 | ||
|
|
b1cefa763f | ||
|
|
a732c975a9 | ||
|
|
f0c8fc720f | ||
|
|
8ddad8258c | ||
|
|
9a2fac1433 | ||
|
|
42f45bf8c0 | ||
|
|
de598e863c | ||
|
|
cd60166d16 | ||
|
|
78c358d31f | ||
|
|
00d90312dc | ||
|
|
091b0ef02d | ||
|
|
d237250c4b | ||
|
|
1971bc876d | ||
|
|
e358b6dc5b | ||
|
|
23b1f0dfd3 | ||
|
|
9a779c1add | ||
|
|
8181628bb0 | ||
|
|
6b1956c029 | ||
|
|
4eb4f17208 | ||
|
|
790a283e54 | ||
|
|
d8eadfd416 | ||
|
|
44209ac89b | ||
|
|
25a86d6464 | ||
|
|
ca924afd49 | ||
|
|
abed08f5a0 | ||
|
|
3285e56c10 | ||
|
|
1d7a373d73 | ||
|
|
5737e5790e | ||
|
|
97f7e50d61 | ||
|
|
fc0fbfc46e | ||
|
|
85075fbf68 | ||
|
|
e8f5b2ee92 | ||
|
|
a09ddfa32b | ||
|
|
0718dd9cea | ||
|
|
51be64abba | ||
|
|
34337f6f57 | ||
|
|
54a8cd2fff | ||
|
|
8fc32e64e9 | ||
|
|
9c38bfce13 | ||
|
|
369fa71007 | ||
|
|
be8e52c792 | ||
|
|
dd14f1d521 | ||
|
|
dadd235204 | ||
|
|
e028e905d3 | ||
|
|
0f657d0b59 | ||
|
|
ea20ece8e2 | ||
|
|
2a9302ced9 | ||
|
|
b62f7094f4 | ||
|
|
cf5c4d28b6 | ||
|
|
50e8c059e6 | ||
|
|
e71cb9bdb2 | ||
|
|
c53665c689 | ||
|
|
a26fda0c44 | ||
|
|
61f7faa8e4 | ||
|
|
99f14b93ed | ||
|
|
870c2beb4e | ||
|
|
5defef912f | ||
|
|
7578b85e4b | ||
|
|
172f47b649 | ||
|
|
9746cefa51 | ||
|
|
1cbc94b43c | ||
|
|
e2e75cdc3d | ||
|
|
4d3a8e9fc6 | ||
|
|
d8ffd9bde1 | ||
|
|
b1cdca1513 | ||
|
|
2f9e02233b | ||
|
|
8c7305c720 | ||
|
|
26196534fd | ||
|
|
f96c5d853e | ||
|
|
258be5583d | ||
|
|
b35d6878dd | ||
|
|
304b9bd5d0 | ||
|
|
02f40215e9 | ||
|
|
13f078eb34 | ||
|
|
72191c1d93 | ||
|
|
2a934490ba | ||
|
|
513267327d | ||
|
|
6a5815d186 | ||
|
|
025b4ac8e4 | ||
|
|
76c701a97f | ||
|
|
8817d21e0f | ||
|
|
0a651bab9d | ||
|
|
fdd1200396 | ||
|
|
3a14c6946f | ||
|
|
5c304ae7e9 | ||
|
|
f079fc0f98 | ||
|
|
ea4b36a1d8 | ||
|
|
7339c1153c | ||
|
|
20aa7871ea | ||
|
|
fcd9a7fbab | ||
|
|
db174a0661 | ||
|
|
fc8b75f014 | ||
|
|
93259d2676 | ||
|
|
cc3482d62a | ||
|
|
e781ed8f43 | ||
|
|
c7e7bceb74 | ||
|
|
02a9fd4b34 | ||
|
|
52498b3cc2 | ||
|
|
15c11b2b89 | ||
|
|
2681502c3e | ||
|
|
7bc3ed07ee | ||
|
|
9e8f2651db | ||
|
|
ec26e425c6 | ||
|
|
32173b50d8 | ||
|
|
553d0bea8d | ||
|
|
3cfa166b4a | ||
|
|
ca914dbe25 | ||
|
|
e47b2997c8 | ||
|
|
b1b8a6c780 | ||
|
|
652463a273 | ||
|
|
7e02771431 | ||
|
|
7a7cc84b94 | ||
|
|
18b9bf531c | ||
|
|
7e19e78073 | ||
|
|
5c1807711b | ||
|
|
d6ff5f2ebf | ||
|
|
23e7eea815 | ||
|
|
874a92d1b5 | ||
|
|
2381df7d10 | ||
|
|
355da6932b | ||
|
|
1c86e73df9 | ||
|
|
9391a2cfe1 | ||
|
|
9265b55876 | ||
|
|
bded4ba2bf | ||
|
|
432c13be42 | ||
|
|
b5cae62f0e | ||
|
|
a83b790386 | ||
|
|
b672b7ed68 | ||
|
|
dba9fc1d4e | ||
|
|
29f671b41f | ||
|
|
a15e79a231 | ||
|
|
eca9b2e5cb | ||
|
|
f2074a8544 | ||
|
|
276a4adc0c | ||
|
|
0715fa86e7 | ||
|
|
9de3c1b5e8 | ||
|
|
aad2f29e96 | ||
|
|
752b8c3b2c | ||
|
|
404baccbdf | ||
|
|
6263660341 | ||
|
|
75c2969c5e | ||
|
|
4492e7bb74 | ||
|
|
3dbde5ba95 | ||
|
|
a12e4d718d | ||
|
|
8b80a5e3cf | ||
|
|
984614e04b | ||
|
|
fb8db13ba5 | ||
|
|
cdde756529 | ||
|
|
b3784b24d4 | ||
|
|
3f516938c2 | ||
|
|
ca8e69e9bc | ||
|
|
7c019a4074 | ||
|
|
6d94274bef | ||
|
|
72f78b6499 | ||
|
|
db258633cb | ||
|
|
1e249e0ece | ||
|
|
3ccc08d0c7 | ||
|
|
bab5ff1e34 | ||
|
|
48cfa13427 | ||
|
|
d21bacb9a3 | ||
|
|
fc48506a68 | ||
|
|
fcb19bcd02 | ||
|
|
fb1cc88b6f | ||
|
|
870329c086 | ||
|
|
33d0e02203 | ||
|
|
52dd497d4c | ||
|
|
a4247ff8b0 | ||
|
|
1ed1b99475 | ||
|
|
0b0acd60f2 | ||
|
|
7824deac2d | ||
|
|
b7a14b7c58 | ||
|
|
4ce8c197c3 | ||
|
|
5315534317 | ||
|
|
14c728e6cd | ||
|
|
1a7b6c7c3c | ||
|
|
904f62ca4f | ||
|
|
bdfd057cc8 | ||
|
|
31d461d9eb | ||
|
|
9134c3cba5 | ||
|
|
63994d5a62 | ||
|
|
cb2270aab0 | ||
|
|
bdf60b6d7c | ||
|
|
4d2afe1371 | ||
|
|
53aa6a6e73 | ||
|
|
79c2b9728e | ||
|
|
cb3aaadd29 | ||
|
|
e1c10b150c | ||
|
|
b608d3ed21 | ||
|
|
026d78bc29 | ||
|
|
be5165f66d | ||
|
|
18a29fb57c | ||
|
|
52320955b3 | ||
|
|
1e4a3905f5 | ||
|
|
8e39f5c2f3 | ||
|
|
7cee795ea2 | ||
|
|
c4d386457c | ||
|
|
8935cb4f5f | ||
|
|
00c6dacef1 | ||
|
|
57c87d2b3e | ||
|
|
5155195cf8 | ||
|
|
869aa611ba | ||
|
|
ddd47f7520 | ||
|
|
a1c287304b | ||
|
|
dfa6202b5e | ||
|
|
aedc9f4fcd | ||
|
|
f4fadd84d0 | ||
|
|
000e040caa | ||
|
|
94d49db716 | ||
|
|
7db5d052d9 | ||
|
|
5419b761f3 | ||
|
|
c16578f2aa | ||
|
|
a8a619e114 | ||
|
|
b8ac3a9d24 | ||
|
|
1be5811e14 | ||
|
|
2f7fbf29fe | ||
|
|
dd4da3598f | ||
|
|
28df3e953a | ||
|
|
149f0c5905 | ||
|
|
baf50fbc4d | ||
|
|
c387143134 | ||
|
|
579cdf53c6 | ||
|
|
5a7b0d1e6e | ||
|
|
4e62fd4291 | ||
|
|
0406cf40c6 | ||
|
|
29fc1d3bc6 | ||
|
|
53a87dd829 | ||
|
|
813657a60b | ||
|
|
67d1ce9f38 | ||
|
|
96aff2e233 | ||
|
|
43a508cf94 | ||
|
|
188b1673a4 | ||
|
|
0e8d3d633f | ||
|
|
e3c778316d | ||
|
|
5dd51475db | ||
|
|
eae637c3b6 | ||
|
|
75962d6960 | ||
|
|
b28ccc1125 | ||
|
|
09bee4911d | ||
|
|
b1eb25f961 | ||
|
|
0a83f4f1bd | ||
|
|
10cbee3c0d | ||
|
|
745df8965e | ||
|
|
af6c359b8b | ||
|
|
c498bdff4f | ||
|
|
091926f9be | ||
|
|
b33028eb6e | ||
|
|
2fe46ae778 | ||
|
|
246e95b385 | ||
|
|
7108f33a01 | ||
|
|
92d79a9f9d | ||
|
|
69f1d19ef2 | ||
|
|
d19acdeacd | ||
|
|
a2c3262d1e | ||
|
|
f41bafb53b | ||
|
|
564e6e8860 | ||
|
|
f808c736cd | ||
|
|
9f2c82edce | ||
|
|
b7b2e368b8 | ||
|
|
a856cbecf1 | ||
|
|
72de85405c | ||
|
|
ba89049897 | ||
|
|
9dab7cdeb5 | ||
|
|
6321b7ebb3 | ||
|
|
c8d44a33d7 | ||
|
|
08fe22e51f | ||
|
|
690eccd970 | ||
|
|
a09f3281eb | ||
|
|
41bcabcf4a | ||
|
|
3f349bb000 | ||
|
|
44662be0e4 | ||
|
|
3dbd800c58 | ||
|
|
9e824b38a3 | ||
|
|
48c99c6fb0 | ||
|
|
deb7415650 | ||
|
|
df570589d4 |
32
.cursorignore
Normal file
@@ -0,0 +1,32 @@
|
||||
# Add directories or file patterns to ignore during indexing (e.g. foo/ or *.csv)
|
||||
config/database.yml
|
||||
config/cookies/fa.yml
|
||||
tmp
|
||||
log
|
||||
public
|
||||
.bundle
|
||||
gems
|
||||
|
||||
# Generated/build artifacts
|
||||
node_modules
|
||||
user_scripts/dist
|
||||
app/assets/builds
|
||||
vendor/javascript
|
||||
|
||||
# Sorbet generated files
|
||||
sorbet/tapioca
|
||||
sorbet/rbi/gems
|
||||
sorbet/rbi/annotations
|
||||
sorbet/rbi/dsl
|
||||
|
||||
# Configuration files with secrets
|
||||
config/credentials.yml.enc
|
||||
config/master.key
|
||||
|
||||
# Lock files
|
||||
yarn.lock
|
||||
Gemfile.lock
|
||||
|
||||
# Documentation
|
||||
TODO.md
|
||||
*.notes.md
|
||||
278
.cursorrules
Normal file
@@ -0,0 +1,278 @@
|
||||
# How to use this codebase
|
||||
|
||||
- Run `bin/tapioca dsl` after changing a model or concern.
|
||||
- Run `bin/tapioca gems` after changing the Gemfile.
|
||||
- Run `srb tc` after making changes to Ruby files to ensure the codebase is typechecked.
|
||||
- Run `bin/rspec <path_to_spec_file>` after a spec file is modified.
|
||||
- Run `tapioca dsl` if models or concerns are modified.
|
||||
- Run `bin/rspec <path_to_spec_file>` to run tests for a single file.
|
||||
- There are no view-specific tests, so if a view changes then run the controller tests instead.
|
||||
- For instance, if you modify `app/models/domain/post.rb`, run `bin/rspec spec/models/domain/post_spec.rb`. If you modify `app/views/domain/users/index.html.erb`, run `bin/rspec spec/controllers/domain/users_controller_spec.rb`.
|
||||
- At the end of a long series of changes, run `just test`.
|
||||
- If specs are failing, then fix the failures, and rerun with `bin/rspec <path_to_spec_file>`.
|
||||
- If you need to add logging to a Job to debug it, set `quiet: false` on the spec you are debugging.
|
||||
- Fish shell is used for development, not bash.
|
||||
- When running scratch commands, use `bin/rails runner`, not `bin/rails console`.
|
||||
|
||||
# Typescript Development
|
||||
|
||||
- React is the only frontend framework used.
|
||||
- Styling is done with Tailwind CSS and FontAwesome.
|
||||
- Put new typescript files in `app/javascript/bundles/Main/components/`
|
||||
|
||||
# HTTP Mocking in Job Specs
|
||||
|
||||
When writing specs for jobs that make HTTP requests, use `HttpClientMockHelpers.init_with()` instead of manually creating doubles:
|
||||
|
||||
```ruby
|
||||
# CORRECT: Use HttpClientMockHelpers.init_with
|
||||
let(:client_mock_config) do
|
||||
[
|
||||
{
|
||||
uri: "https://example.com/api/first-endpoint",
|
||||
status_code: 200,
|
||||
content_type: "application/json",
|
||||
contents: first_response_body,
|
||||
},
|
||||
{
|
||||
uri: "https://example.com/api/second-endpoint",
|
||||
status_code: 200,
|
||||
content_type: "application/json",
|
||||
contents: second_response_body,
|
||||
caused_by_entry: :any, # Use this for chained requests
|
||||
},
|
||||
]
|
||||
end
|
||||
|
||||
before { @log_entries = HttpClientMockHelpers.init_with(client_mock_config) }
|
||||
|
||||
# WRONG: Don't create doubles manually
|
||||
expect(http_client_mock).to receive(:get).and_return(
|
||||
double(status_code: 200, body: response_body, log_entry: double),
|
||||
)
|
||||
|
||||
# WRONG: Don't use the old init_http_client_mock method
|
||||
@log_entries =
|
||||
HttpClientMockHelpers.init_http_client_mock(
|
||||
http_client_mock,
|
||||
client_mock_config,
|
||||
)
|
||||
```
|
||||
|
||||
This pattern:
|
||||
|
||||
- Uses the preferred `init_with` helper method
|
||||
- Automatically uses the global `http_client_mock` from `spec_helper.rb`
|
||||
- Creates real HttpLogEntry objects that can be serialized by ActiveJob
|
||||
- Follows the established codebase pattern
|
||||
- Avoids "Unsupported argument type: RSpec::Mocks::Double" errors
|
||||
- Use `caused_by_entry: :any` for HTTP requests that are chained (where one request's log entry becomes the `caused_by_entry` for the next request)
|
||||
- No need to manually set up `http_client_mock` - it's handled globally in `spec_helper.rb`
|
||||
|
||||
# Job Enqueuing Verification in Specs
|
||||
|
||||
Use `SpecUtil.enqueued_job_args()` instead of mocking `perform_later`:
|
||||
|
||||
```ruby
|
||||
# CORRECT: Test actual job enqueuing
|
||||
enqueued_jobs = SpecUtil.enqueued_job_args(SomeJob)
|
||||
expect(enqueued_jobs).to contain_exactly(hash_including(user: user))
|
||||
expect(enqueued_jobs).to be_empty # For no jobs
|
||||
|
||||
# WRONG: Don't mock perform_later (breaks with .set chaining)
|
||||
expect(SomeJob).to receive(:perform_later)
|
||||
```
|
||||
|
||||
Benefits: More robust, tests actual behavior, no cleanup needed (tests run in transactions).
|
||||
|
||||
# Testing Jobs
|
||||
|
||||
When writing specs for jobs e.g. Domain::Site::SomethingJob, do not invoke `job.perform(...)` directly, always use `perform_now(...)` (defined in spec/helpers/perform_job_helpers.rb)
|
||||
|
||||
# === BACKLOG.MD GUIDELINES START ===
|
||||
|
||||
# Instructions for the usage of Backlog.md CLI Tool
|
||||
|
||||
## 1. Source of Truth
|
||||
|
||||
- Tasks live under **`backlog/tasks/`** (drafts under **`backlog/drafts/`**).
|
||||
- Every implementation decision starts with reading the corresponding Markdown task file.
|
||||
- Project documentation is in **`backlog/docs/`**.
|
||||
- Project decisions are in **`backlog/decisions/`**.
|
||||
|
||||
## 2. Defining Tasks
|
||||
|
||||
### **Title**
|
||||
|
||||
Use a clear brief title that summarizes the task.
|
||||
|
||||
### **Description**: (The **"why"**)
|
||||
|
||||
Provide a concise summary of the task purpose and its goal. Do not add implementation details here. It
|
||||
should explain the purpose and context of the task. Code snippets should be avoided.
|
||||
|
||||
### **Acceptance Criteria**: (The **"what"**)
|
||||
|
||||
List specific, measurable outcomes that define what means to reach the goal from the description. Use checkboxes (`- [ ]`) for tracking.
|
||||
When defining `## Acceptance Criteria` for a task, focus on **outcomes, behaviors, and verifiable requirements** rather
|
||||
than step-by-step implementation details.
|
||||
Acceptance Criteria (AC) define _what_ conditions must be met for the task to be considered complete.
|
||||
They should be testable and confirm that the core purpose of the task is achieved.
|
||||
**Key Principles for Good ACs:**
|
||||
|
||||
- **Outcome-Oriented:** Focus on the result, not the method.
|
||||
- **Testable/Verifiable:** Each criterion should be something that can be objectively tested or verified.
|
||||
- **Clear and Concise:** Unambiguous language.
|
||||
- **Complete:** Collectively, ACs should cover the scope of the task.
|
||||
- **User-Focused (where applicable):** Frame ACs from the perspective of the end-user or the system's external behavior.
|
||||
- _Good Example:_ "- [ ] User can successfully log in with valid credentials."
|
||||
- _Good Example:_ "- [ ] System processes 1000 requests per second without errors."
|
||||
- _Bad Example (Implementation Step):_ "- [ ] Add a new function `handleLogin()` in `auth.ts`."
|
||||
|
||||
### Task file
|
||||
|
||||
Once a task is created it will be stored in `backlog/tasks/` directory as a Markdown file with the format
|
||||
`task-<id> - <title>.md` (e.g. `task-42 - Add GraphQL resolver.md`).
|
||||
|
||||
### Additional task requirements
|
||||
|
||||
- Tasks must be **atomic** and **testable**. If a task is too large, break it down into smaller subtasks.
|
||||
Each task should represent a single unit of work that can be completed in a single PR.
|
||||
|
||||
- **Never** reference tasks that are to be done in the future or that are not yet created. You can only reference
|
||||
previous
|
||||
tasks (id < current task id).
|
||||
|
||||
- When creating multiple tasks, ensure they are **independent** and they do not depend on future tasks.
|
||||
Example of wrong tasks splitting: task 1: "Add API endpoint for user data", task 2: "Define the user model and DB
|
||||
schema".
|
||||
Example of correct tasks splitting: task 1: "Add system for handling API requests", task 2: "Add user model and DB
|
||||
schema", task 3: "Add API endpoint for user data".
|
||||
|
||||
## 3. Recommended Task Anatomy
|
||||
|
||||
```markdown
|
||||
# task‑42 - Add GraphQL resolver
|
||||
|
||||
## Description (the why)
|
||||
|
||||
Short, imperative explanation of the goal of the task and why it is needed.
|
||||
|
||||
## Acceptance Criteria (the what)
|
||||
|
||||
- [ ] Resolver returns correct data for happy path
|
||||
- [ ] Error response matches REST
|
||||
- [ ] P95 latency ≤ 50 ms under 100 RPS
|
||||
|
||||
## Implementation Plan (the how)
|
||||
|
||||
1. Research existing GraphQL resolver patterns
|
||||
2. Implement basic resolver with error handling
|
||||
3. Add performance monitoring
|
||||
4. Write unit and integration tests
|
||||
5. Benchmark performance under load
|
||||
|
||||
## Implementation Notes (only added after working on the task)
|
||||
|
||||
- Approach taken
|
||||
- Features implemented or modified
|
||||
- Technical decisions and trade-offs
|
||||
- Modified or added files
|
||||
```
|
||||
|
||||
## 6. Implementing Tasks
|
||||
|
||||
Mandatory sections for every task:
|
||||
|
||||
- **Implementation Plan**: (The **"how"**) Outline the steps to achieve the task. Because the implementation details may
|
||||
change after the task is created, **the implementation notes must be added only after putting the task in progress**
|
||||
and before starting working on the task.
|
||||
- **Implementation Notes**: Document your approach, decisions, challenges, and any deviations from the plan. This
|
||||
section is added after you are done working on the task. It should summarize what you did and why you did it. Keep it
|
||||
concise but informative.
|
||||
|
||||
**IMPORTANT**: Do not implement anything else that deviates from the **Acceptance Criteria**. If you need to
|
||||
implement something that is not in the AC, update the AC first and then implement it or create a new task for it.
|
||||
|
||||
## 2. Typical Workflow
|
||||
|
||||
```bash
|
||||
# 1 Identify work
|
||||
backlog task list -s "To Do" --plain
|
||||
|
||||
# 2 Read details & documentation
|
||||
backlog task 42 --plain
|
||||
# Read also all documentation files in `backlog/docs/` directory.
|
||||
# Read also all decision files in `backlog/decisions/` directory.
|
||||
|
||||
# 3 Start work: assign yourself & move column
|
||||
backlog task edit 42 -a @{yourself} -s "In Progress"
|
||||
|
||||
# 4 Add implementation plan before starting
|
||||
backlog task edit 42 --plan "1. Analyze current implementation\n2. Identify bottlenecks\n3. Refactor in phases"
|
||||
|
||||
# 5 Break work down if needed by creating subtasks or additional tasks
|
||||
backlog task create "Refactor DB layer" -p 42 -a @{yourself} -d "Description" --ac "Tests pass,Performance improved"
|
||||
|
||||
# 6 Complete and mark Done
|
||||
backlog task edit 42 -s Done --notes "Implemented GraphQL resolver with error handling and performance monitoring"
|
||||
```
|
||||
|
||||
### 7. Final Steps Before Marking a Task as Done
|
||||
|
||||
Always ensure you have:
|
||||
|
||||
1. ✅ Marked all acceptance criteria as completed (change `- [ ]` to `- [x]`)
|
||||
2. ✅ Added an `## Implementation Notes` section documenting your approach
|
||||
3. ✅ Run all tests and linting checks
|
||||
4. ✅ Updated relevant documentation
|
||||
|
||||
## 8. Definition of Done (DoD)
|
||||
|
||||
A task is **Done** only when **ALL** of the following are complete:
|
||||
|
||||
1. **Acceptance criteria** checklist in the task file is fully checked (all `- [ ]` changed to `- [x]`).
|
||||
2. **Implementation plan** was followed or deviations were documented in Implementation Notes.
|
||||
3. **Automated tests** (unit + integration) cover new logic.
|
||||
4. **Static analysis**: linter & formatter succeed.
|
||||
5. **Documentation**:
|
||||
- All relevant docs updated (any relevant README file, backlog/docs, backlog/decisions, etc.).
|
||||
- Task file **MUST** have an `## Implementation Notes` section added summarising:
|
||||
- Approach taken
|
||||
- Features implemented or modified
|
||||
- Technical decisions and trade-offs
|
||||
- Modified or added files
|
||||
6. **Review**: self review code.
|
||||
7. **Task hygiene**: status set to **Done** via CLI (`backlog task edit <id> -s Done`).
|
||||
8. **No regressions**: performance, security and licence checks green.
|
||||
|
||||
⚠️ **IMPORTANT**: Never mark a task as Done without completing ALL items above.
|
||||
|
||||
## 9. Handy CLI Commands
|
||||
|
||||
| Purpose | Command |
|
||||
| ---------------- | ---------------------------------------------------------------------- |
|
||||
| Create task | `backlog task create "Add OAuth"` |
|
||||
| Create with desc | `backlog task create "Feature" -d "Enables users to use this feature"` |
|
||||
| Create with AC | `backlog task create "Feature" --ac "Must work,Must be tested"` |
|
||||
| Create with deps | `backlog task create "Feature" --dep task-1,task-2` |
|
||||
| Create sub task | `backlog task create -p 14 "Add Google auth"` |
|
||||
| List tasks | `backlog task list --plain` |
|
||||
| View detail | `backlog task 7 --plain` |
|
||||
| Edit | `backlog task edit 7 -a @{yourself} -l auth,backend` |
|
||||
| Add plan | `backlog task edit 7 --plan "Implementation approach"` |
|
||||
| Add AC | `backlog task edit 7 --ac "New criterion,Another one"` |
|
||||
| Add deps | `backlog task edit 7 --dep task-1,task-2` |
|
||||
| Add notes | `backlog task edit 7 --notes "We added this and that feature because"` |
|
||||
| Mark as done | `backlog task edit 7 -s "Done"` |
|
||||
| Archive | `backlog task archive 7` |
|
||||
| Draft flow | `backlog draft create "Spike GraphQL"` → `backlog draft promote 3.1` |
|
||||
| Demote to draft | `backlog task demote <task-id>` |
|
||||
|
||||
## 10. Tips for AI Agents
|
||||
|
||||
- **Always use `--plain` flag** when listing or viewing tasks for AI-friendly text output instead of using Backlog.md
|
||||
interactive UI.
|
||||
- When users mention to create a task, they mean to create a task using Backlog.md CLI tool.
|
||||
|
||||
# === BACKLOG.MD GUIDELINES END ===
|
||||
147
.devcontainer/Dockerfile.devcontainer
Normal file
@@ -0,0 +1,147 @@
|
||||
# Primary image
|
||||
FROM mcr.microsoft.com/devcontainers/base:debian-12
|
||||
|
||||
# apt caching & install packages
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean && \
|
||||
echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends --no-install-suggests -qqy \
|
||||
abiword \
|
||||
autoconf \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
curl \
|
||||
ffmpeg \
|
||||
ffmpegthumbnailer \
|
||||
file \
|
||||
gnupg \
|
||||
iputils-ping \
|
||||
libblas-dev \
|
||||
libdb-dev \
|
||||
libffi-dev \
|
||||
libgdbm-dev \
|
||||
libgdbm6 \
|
||||
libgmp-dev \
|
||||
libicu-dev \
|
||||
liblapack-dev \
|
||||
libncurses5-dev \
|
||||
libpq-dev \
|
||||
libreadline6-dev \
|
||||
libreoffice \
|
||||
libsqlite3-dev \
|
||||
libssl-dev \
|
||||
libyaml-dev \
|
||||
patch \
|
||||
pdftohtml \
|
||||
pkg-config \
|
||||
rustc \
|
||||
uuid-dev \
|
||||
watchman \
|
||||
zlib1g-dev
|
||||
|
||||
# Install vips dependencies
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends --no-install-suggests -qqy \
|
||||
automake \
|
||||
gtk-doc-tools \
|
||||
gobject-introspection \
|
||||
libgirepository1.0-dev \
|
||||
libglib2.0-dev \
|
||||
libexpat1-dev \
|
||||
libjpeg-dev \
|
||||
libpng-dev \
|
||||
libtiff5-dev \
|
||||
libwebp-dev \
|
||||
libheif-dev \
|
||||
libexif-dev \
|
||||
liblcms2-dev \
|
||||
libxml2-dev \
|
||||
libfftw3-dev \
|
||||
liborc-0.4-dev \
|
||||
libcgif-dev \
|
||||
libjxl-dev \
|
||||
libopenjp2-7-dev \
|
||||
meson \
|
||||
ninja-build
|
||||
|
||||
|
||||
# Install imagemagick from source
|
||||
RUN cd /tmp && \
|
||||
wget -qO- https://imagemagick.org/archive/releases/ImageMagick-7.1.2-1.tar.xz | tar -xJ && \
|
||||
cd ImageMagick-7.1.2-1 && \
|
||||
./configure && \
|
||||
make -j$(nproc) && \
|
||||
make install && \
|
||||
ldconfig && \
|
||||
cd / && \
|
||||
rm -rf /tmp/ImageMagick-7.1.2-1*
|
||||
|
||||
# Install vips from source
|
||||
RUN cd /tmp && \
|
||||
wget -qO- https://github.com/libvips/libvips/releases/download/v8.17.1/vips-8.17.1.tar.xz | tar -xJ && \
|
||||
cd vips-8.17.1 && \
|
||||
meson setup build --prefix=/usr/local -Dcgif=enabled && \
|
||||
cd build && \
|
||||
ninja && \
|
||||
ninja install && \
|
||||
ldconfig && \
|
||||
cd / && \
|
||||
rm -rf /tmp/vips-8.17.1*
|
||||
|
||||
# Install postgres 15 client
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
sudo install -d /usr/share/postgresql-common/pgdg && \
|
||||
curl -o /usr/share/postgresql-common/pgdg/apt.postgresql.org.asc --fail https://www.postgresql.org/media/keys/ACCC4CF8.asc && \
|
||||
sh -c 'echo "deb [signed-by=/usr/share/postgresql-common/pgdg/apt.postgresql.org.asc] https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' && \
|
||||
sudo apt update && \
|
||||
sudo apt-get install --no-install-recommends --no-install-suggests -qqy \
|
||||
postgresql-client-17
|
||||
|
||||
# Install & configure delta diff tool
|
||||
RUN wget -O- https://github.com/dandavison/delta/releases/download/0.18.2/git-delta_0.18.2_amd64.deb > /tmp/git-delta.deb && \
|
||||
sudo dpkg -i /tmp/git-delta.deb && \
|
||||
rm /tmp/git-delta.deb
|
||||
|
||||
RUN git config --system core.pager "delta" && \
|
||||
git config --system interactive.diffFilter "delta --color-only" && \
|
||||
git config --system delta.navigate "true" && \
|
||||
git config --system delta.dark "true" && \
|
||||
git config --system delta.side-by-side "true" && \
|
||||
git config --system merge.conflictstyle "zdiff3" && \
|
||||
git config --system core.editor "cursor --wait" && \
|
||||
git config --system diff.algorithm "histogram" && \
|
||||
git config --system diff.colorMoved "default"
|
||||
|
||||
# Install ruby
|
||||
USER vscode
|
||||
RUN git clone https://github.com/rbenv/rbenv.git ~/.rbenv
|
||||
ENV PATH="/home/vscode/.rbenv/bin:/home/vscode/.rbenv/shims:$PATH"
|
||||
RUN echo 'eval "$(rbenv init - --no-rehash bash)"' >> ~/.bashrc
|
||||
RUN git clone https://github.com/rbenv/ruby-build.git "$(rbenv root)"/plugins/ruby-build
|
||||
RUN rbenv install 3.4.4 && \
|
||||
rbenv global 3.4.4
|
||||
|
||||
ENV RAILS_ENV development
|
||||
|
||||
# Pre install gems to speed up container startup
|
||||
USER root
|
||||
RUN mkdir -p /tmp/bundle-install-cache && \
|
||||
chown -R vscode:vscode /tmp/bundle-install-cache
|
||||
WORKDIR /tmp/bundle-install-cache
|
||||
USER vscode
|
||||
COPY Gemfile.lock Gemfile ./
|
||||
COPY gems/has_aux_table ./gems/has_aux_table
|
||||
RUN BUNDLE_FROZEN=true MAKE="make -j$(nproc)" bundle install --jobs $(nproc)
|
||||
|
||||
# install exo
|
||||
RUN curl -sL https://exo.deref.io/install | bash
|
||||
ENV PATH "/home/vscode/.exo/bin:$PATH"
|
||||
|
||||
# install just (command runner)
|
||||
RUN curl --proto '=https' --tlsv1.2 -sSf https://just.systems/install.sh | sudo bash -s -- --to /usr/local/bin
|
||||
7
.devcontainer/Dockerfile.postgres
Normal file
@@ -0,0 +1,7 @@
|
||||
FROM postgres:17
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
postgresql-17-pgvector \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN echo "CREATE EXTENSION vector;" >> /docker-entrypoint-initdb.d/01-vector.sql
|
||||
5
.devcontainer/create-db-user.sql
Normal file
@@ -0,0 +1,5 @@
|
||||
CREATE USER vscode CREATEDB;
|
||||
CREATE DATABASE vscode WITH OWNER vscode;
|
||||
|
||||
CREATE DATABASE redux_test WITH OWNER vscode;
|
||||
CREATE DATABASE legacy_test WITH OWNER vscode;
|
||||
50
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,50 @@
|
||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the
|
||||
// README at: https://github.com/devcontainers/templates/tree/main/src/ruby-rails-postgres
|
||||
{
|
||||
"name": "Ruby on Rails & Postgres",
|
||||
"dockerComposeFile": "docker-compose.yml",
|
||||
"service": "app",
|
||||
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
|
||||
"features": {
|
||||
"ghcr.io/meaningful-ooo/devcontainer-features/fish:1": {},
|
||||
"ghcr.io/nikobockerman/devcontainer-features/fish-persistent-data:2": {},
|
||||
"ghcr.io/devcontainers-extra/features/npm-package:1": {
|
||||
"package": "backlog.md"
|
||||
}
|
||||
},
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"Shopify.ruby-extensions-pack",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"aliariff.vscode-erb-beautify",
|
||||
"bradlc.vscode-tailwindcss",
|
||||
"KoichiSasada.vscode-rdbg",
|
||||
"qwtel.sqlite-viewer",
|
||||
"esbenp.prettier-vscode",
|
||||
"ms-azuretools.vscode-docker",
|
||||
"1YiB.rust-bundle",
|
||||
"rust-lang.rust-analyzer",
|
||||
"saoudrizwan.claude-dev",
|
||||
"ritwickdey.LiveServer"
|
||||
]
|
||||
}
|
||||
},
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// This can be used to network with other containers or the host.
|
||||
// "forwardPorts": [3000, 5432],
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
// "postCreateCommand": "bundle install && rake db:setup",
|
||||
"postCreateCommand": ".devcontainer/post-create.sh",
|
||||
"forwardPorts": [
|
||||
3000, // rails
|
||||
3001, // thrust
|
||||
9394, // prometheus exporter
|
||||
"pgadmin:8080", // pgadmin
|
||||
"grafana:3100", // grafana
|
||||
"prometheus:9090" // prometheus
|
||||
]
|
||||
// Configure tool-specific properties.
|
||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||
// "remoteUser": "root"
|
||||
}
|
||||
115
.devcontainer/docker-compose.yml
Normal file
@@ -0,0 +1,115 @@
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
app:
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: .devcontainer/Dockerfile.devcontainer
|
||||
volumes:
|
||||
- ../..:/workspaces:cached
|
||||
- ./fish-shell-conf-d:/home/vscode/.config/fish/conf.d
|
||||
- devcontainer-redux-gem-cache:/usr/local/rvm/gems
|
||||
- devcontainer-redux-blob-files:/mnt/blob_files_development
|
||||
- /tank/redux-data/blob_files_production:/mnt/blob_files_production
|
||||
environment:
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT: http://otel-collector:4318
|
||||
OTEL_SERVICE_NAME: redux-scraper-dev
|
||||
OTEL_RESOURCE_ATTRIBUTES: application=redux-scraper-dev
|
||||
command: sleep infinity
|
||||
|
||||
db:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.postgres
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- postgres-17-data:/var/lib/postgresql/data
|
||||
- ./create-db-user.sql:/docker-entrypoint-initdb.d/create-db-user.sql
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
||||
pgadmin:
|
||||
image: dpage/pgadmin4:9
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
PGADMIN_DEFAULT_EMAIL: admin@example.com
|
||||
PGADMIN_DEFAULT_PASSWORD: password
|
||||
PGADMIN_LISTEN_PORT: 8080
|
||||
PGADMIN_CONFIG_SERVER_MODE: 'False'
|
||||
PGADMIN_CONFIG_MASTER_PASSWORD_REQUIRED: 'False'
|
||||
|
||||
prometheus:
|
||||
image: prom/prometheus
|
||||
command:
|
||||
- '--config.file=/etc/prometheus/prometheus.yml'
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ./prometheus:/etc/prometheus
|
||||
- devcontainer-redux-prometheus-data:/prometheus
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
- GF_SERVER_HTTP_PORT=3100
|
||||
- GF_USERS_ALLOW_SIGN_UP=false
|
||||
- GF_LOG_LEVEL=debug
|
||||
- GF_SERVER_ROOT_URL=http://localhost:3100/grafana/
|
||||
- GF_SERVER_SERVE_FROM_SUB_PATH=false
|
||||
- GF_AUTH_PROXY_ENABLED=true
|
||||
- GF_AUTH_PROXY_HEADER_NAME=X-WEBAUTH-USER
|
||||
- GF_AUTH_PROXY_HEADER_PROPERTY=username
|
||||
volumes:
|
||||
- devcontainer-redux-grafana-data:/var/lib/grafana
|
||||
|
||||
airvpn-netherlands-proxy:
|
||||
image: qmcgaw/gluetun
|
||||
cap_add:
|
||||
- NET_ADMIN
|
||||
devices:
|
||||
- /dev/net/tun:/dev/net/tun
|
||||
environment:
|
||||
- HTTPPROXY=on
|
||||
- SHADOWSOCKS=on
|
||||
- HTTPPROXY_LOG=on
|
||||
- VPN_SERVICE_PROVIDER=airvpn
|
||||
- VPN_TYPE=wireguard
|
||||
- WIREGUARD_PRIVATE_KEY=INLA6x1gUVLRPKcCBgRmfpJBCXhOpyq3SvRd5EvCE08=
|
||||
- WIREGUARD_PRESHARED_KEY=DR6CBW9yG5y+D+qpo8TZCizo5WKOooC/UFBdWk6lGEg=
|
||||
- WIREGUARD_ADDRESSES=10.165.87.232,fd7d:76ee:e68f:a993:4d1b:a77a:b471:a606
|
||||
- SERVER_COUNTRIES=Netherlands
|
||||
|
||||
airvpn-san-jose-proxy:
|
||||
image: qmcgaw/gluetun
|
||||
cap_add:
|
||||
- NET_ADMIN
|
||||
devices:
|
||||
- /dev/net/tun:/dev/net/tun
|
||||
environment:
|
||||
- HTTPPROXY=on
|
||||
- SHADOWSOCKS=on
|
||||
- HTTPPROXY_LOG=on
|
||||
- VPN_SERVICE_PROVIDER=airvpn
|
||||
- VPN_TYPE=wireguard
|
||||
- WIREGUARD_PRIVATE_KEY=INLA6x1gUVLRPKcCBgRmfpJBCXhOpyq3SvRd5EvCE08=
|
||||
- WIREGUARD_PRESHARED_KEY=DR6CBW9yG5y+D+qpo8TZCizo5WKOooC/UFBdWk6lGEg=
|
||||
- WIREGUARD_ADDRESSES=10.165.87.232/32,fd7d:76ee:e68f:a993:4d1b:a77a:b471:a606/128
|
||||
- SERVER_CITIES="San Jose California, Fremont California"
|
||||
|
||||
tor:
|
||||
image: dockurr/tor
|
||||
volumes:
|
||||
- devcontainer-redux-tor-config:/etc/tor
|
||||
- devcontainer-redux-tor-data:/var/lib/tor
|
||||
restart: always
|
||||
|
||||
volumes:
|
||||
postgres-17-data:
|
||||
devcontainer-redux-gem-cache:
|
||||
devcontainer-redux-blob-files:
|
||||
devcontainer-redux-grafana-data:
|
||||
devcontainer-redux-prometheus-data:
|
||||
devcontainer-redux-tor-config:
|
||||
devcontainer-redux-tor-data:
|
||||
10
.devcontainer/fish-shell-conf-d/bash-in-agent.fish
Normal file
@@ -0,0 +1,10 @@
|
||||
# Agent detection - only activate minimal mode for actual agents
|
||||
if test -n "$npm_config_yes"; or test -n "$CI"; or not status --is-interactive
|
||||
set -gx AGENT_MODE true
|
||||
else
|
||||
set -gx AGENT_MODE false
|
||||
end
|
||||
|
||||
if test $AGENT_MODE = true
|
||||
# /usr/bin/bash -l
|
||||
end
|
||||
1
.devcontainer/fish-shell-conf-d/binstubs.fish
Normal file
@@ -0,0 +1 @@
|
||||
set -gx PATH "/workspaces/redux-scraper/bin" $PATH
|
||||
1
.devcontainer/fish-shell-conf-d/just-completions.fish
Executable file
@@ -0,0 +1 @@
|
||||
complete -f -c just -a (just --summary)
|
||||
17
.devcontainer/fish-shell-conf-d/prompt.fish
Executable file
@@ -0,0 +1,17 @@
|
||||
function fish_prompt -d "Write out the prompt"
|
||||
# This shows up as USER@HOST /home/user/ >, with the directory colored
|
||||
# $USER and $hostname are set by fish, so you can just use them
|
||||
# instead of using `whoami` and `hostname`
|
||||
printf '%s %s%s> ' \
|
||||
(printf '%s%s%s' (set_color -d grey) $USER (set_color normal)) \
|
||||
(printf '%s%s%s' (set_color $fish_color_cwd) (prompt_pwd) (set_color normal)) \
|
||||
(fish_git_prompt)
|
||||
end
|
||||
|
||||
function fish_right_prompt -d "Write out the right prompt"
|
||||
set_color red
|
||||
if [ $RAILS_ENV = "development" ]
|
||||
set_color -d green
|
||||
end
|
||||
printf '%s%s' (echo $RAILS_ENV) (set_color normal)
|
||||
end
|
||||
1
.devcontainer/fish-shell-conf-d/rbenv.fish
Normal file
@@ -0,0 +1 @@
|
||||
status --is-interactive; and rbenv init - --no-rehash fish | source
|
||||
37
.devcontainer/fish-shell-conf-d/utils.fish
Executable file
@@ -0,0 +1,37 @@
|
||||
function blob-files-dir
|
||||
if [ $RAILS_ENV = "production" ]
|
||||
echo "/mnt/blob_files_production/v1"
|
||||
return 0
|
||||
else if [ $RAILS_ENV = "development" ]
|
||||
echo "/mnt/blob_files_development/v1"
|
||||
return 0
|
||||
else
|
||||
echo "unknown RAILS_ENV: $RAILS_ENV" >&2
|
||||
return 1
|
||||
end
|
||||
end
|
||||
|
||||
function blob-file-path
|
||||
set -l file_name $argv[1]
|
||||
set -l prefix (blob-files-dir || return 1)
|
||||
set -l p0 (string sub -l 2 $file_name)
|
||||
set -l p1 (string sub -s 3 -l 2 $file_name)
|
||||
set -l p2 (string sub -s 5 -l 1 $file_name)
|
||||
printf "%s/%s/%s/%s/%s" $prefix $p0 $p1 $p2 $file_name
|
||||
end
|
||||
|
||||
function blob-files-stats
|
||||
set -l files_dir (blob-files-dir || return 1)
|
||||
printf "apparent size: %s\n" (du -sh --apparent-size $files_dir)
|
||||
printf "actual size: %s\n" (du -sh $files_dir)
|
||||
end
|
||||
|
||||
function curl-fa-onion
|
||||
curl \
|
||||
--socks5-hostname tor:9050 \
|
||||
--compressed \
|
||||
-A "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:128.0) Gecko/20100101 Firefox/128.0" \
|
||||
-H "Accept-Encoding: gzip, deflate" \
|
||||
-H "Connection: keep-alive" \
|
||||
"http://g6jy5jkx466lrqojcngbnksugrcfxsl562bzuikrka5rv7srgguqbjid.onion/$argv[1]"
|
||||
end
|
||||
22
.devcontainer/install-extensions.sh
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash -ex
|
||||
|
||||
# EDITOR_BIN="$(ls /vscode/cursor-server/bin/*/*/bin/remote-cli/cursor | head -n 1)"
|
||||
|
||||
# detect if either cursor or code is available, and use the first detected one
|
||||
EDITOR_BIN=cursor
|
||||
# EDITOR_BIN=code
|
||||
|
||||
function install_extension() {
|
||||
$EDITOR_BIN --install-extension "$1"
|
||||
}
|
||||
|
||||
install_extension Shopify.ruby-extensions-pack
|
||||
install_extension dbaeumer.vscode-eslint
|
||||
install_extension aliariff.vscode-erb-beautify
|
||||
install_extension bradlc.vscode-tailwindcss
|
||||
install_extension KoichiSasada.vscode-rdbg
|
||||
install_extension qwtel.sqlite-viewer
|
||||
install_extension esbenp.prettier-vscode
|
||||
install_extension ms-azuretools.vscode-docker
|
||||
install_extension 1YiB.rust-bundle
|
||||
install_extension rust-lang.rust-analyzer
|
||||
22
.devcontainer/post-create.sh
Executable file
@@ -0,0 +1,22 @@
|
||||
#!/bin/bash -ex
|
||||
|
||||
function mkdir_and_chmod {
|
||||
sudo mkdir -p $1
|
||||
sudo chmod 777 $1
|
||||
}
|
||||
|
||||
mkdir_and_chmod .devcontainer/data/prometheus
|
||||
mkdir_and_chmod .devcontainer/data/grafana
|
||||
echo "Path: $PATH"
|
||||
echo "Ruby: $(which ruby)"
|
||||
echo "Gem: $(which gem)"
|
||||
echo "Bundler: $(which bundler)"
|
||||
echo "Rake: $(which rake)"
|
||||
|
||||
bundle install --jobs $(getconf _NPROCESSORS_ONLN)
|
||||
rbenv rehash
|
||||
|
||||
bin/rails yarn:install
|
||||
yarn
|
||||
|
||||
yarn add --dev prettier @prettier/plugin-ruby
|
||||
12
.devcontainer/prometheus/prometheus.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
global:
|
||||
scrape_interval: 15s
|
||||
evaluation_interval: 15s
|
||||
|
||||
scrape_configs:
|
||||
- job_name: 'prometheus'
|
||||
static_configs:
|
||||
- targets: ['prometheus:9090']
|
||||
|
||||
- job_name: 'rails'
|
||||
static_configs:
|
||||
- targets: ['app:9394']
|
||||
89
.dockerignore
Normal file
@@ -0,0 +1,89 @@
|
||||
.git
|
||||
.gitignore
|
||||
log
|
||||
tmp
|
||||
ext
|
||||
build
|
||||
node_modules
|
||||
profiler
|
||||
.bundle
|
||||
.vscode
|
||||
launch.json
|
||||
settings.json
|
||||
*.export
|
||||
.devcontainer
|
||||
user_scripts/dist
|
||||
backlog
|
||||
|
||||
# Test directories (not needed in production)
|
||||
spec
|
||||
test
|
||||
|
||||
# Development and CI/CD files
|
||||
.github
|
||||
.ruby-lsp
|
||||
.aider*
|
||||
.cursorignore
|
||||
.cursorrules
|
||||
.rspec
|
||||
.rspec_parallel
|
||||
.rubocop.yml
|
||||
.prettierrc
|
||||
TODO.md
|
||||
*.notes.md
|
||||
things-to-fix.notes.md
|
||||
mf-fitter-commands.notes.md
|
||||
.aiderignore
|
||||
|
||||
# Sorbet type checking files (not needed in production)
|
||||
sorbet
|
||||
|
||||
# Storage directory (contains uploaded files/cache)
|
||||
storage
|
||||
|
||||
# Development database files
|
||||
db/*.sqlite3
|
||||
db/*.sqlite3-*
|
||||
|
||||
# Core dump files
|
||||
core
|
||||
|
||||
# Yarn/npm cache and lock files that might conflict
|
||||
yarn-error.log
|
||||
yarn-debug.log*
|
||||
.yarn-integrity
|
||||
package-lock.json
|
||||
|
||||
# OS specific files
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Editor specific files
|
||||
*.swp
|
||||
*.swo
|
||||
*~
|
||||
|
||||
# Local environment files
|
||||
.env.local
|
||||
.env.*.local
|
||||
|
||||
# Compiled assets (will be rebuilt in Docker)
|
||||
public/assets
|
||||
public/packs
|
||||
public/packs-test
|
||||
app/assets/builds/*
|
||||
|
||||
# Flame graph files
|
||||
flamegraph.svg
|
||||
|
||||
# Procfile variants (only need production one)
|
||||
Procfile.dev
|
||||
Procfile.dev-static
|
||||
Procfile.staging
|
||||
Procfile.worker
|
||||
|
||||
# Development scripts
|
||||
justfile
|
||||
|
||||
# Documentation
|
||||
README.md
|
||||
15
.env-cmdrc
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"dev": {
|
||||
"RAILS_ENV": "development",
|
||||
"FOO_BAR": "baz"
|
||||
},
|
||||
"staging": {
|
||||
"RAILS_ENV": "staging"
|
||||
},
|
||||
"production": {
|
||||
"RAILS_ENV": "production"
|
||||
},
|
||||
"worker": {
|
||||
"RAILS_ENV": "worker"
|
||||
}
|
||||
}
|
||||
12
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for more information:
|
||||
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
# https://containers.dev/guide/dependabot
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "devcontainers"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: weekly
|
||||
24
.gitignore
vendored
@@ -4,6 +4,18 @@
|
||||
# or operating system, you probably want to add a global ignore instead:
|
||||
# git config --global core.excludesfile '~/.gitignore_global'
|
||||
|
||||
build
|
||||
tmp
|
||||
core
|
||||
*.bundle
|
||||
user_scripts/dist
|
||||
migrated_files.txt
|
||||
|
||||
# use yarn to manage node_modules
|
||||
package-lock.json
|
||||
|
||||
*.notes.md
|
||||
*.txt
|
||||
# Ignore bundler config.
|
||||
/.bundle
|
||||
|
||||
@@ -36,3 +48,15 @@
|
||||
|
||||
/profiler/
|
||||
/flamegraph.svg
|
||||
/app/assets/builds/*
|
||||
!/app/assets/builds/.keep
|
||||
|
||||
/public/packs
|
||||
/public/packs-test
|
||||
/node_modules
|
||||
/yarn-error.log
|
||||
yarn-debug.log*
|
||||
.yarn-integrity
|
||||
.DS_Store
|
||||
*.export
|
||||
.aider*
|
||||
|
||||
3
.gitmodules
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
[submodule "gems/has_aux_table"]
|
||||
path = gems/has_aux_table
|
||||
url = ssh://git@git.dy.mk:2221/dymk/has_aux_table.git
|
||||
27
.prettierrc
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"trailingComma": "all",
|
||||
"arrowParens": "always",
|
||||
"singleQuote": true,
|
||||
"semi": true,
|
||||
"bracketSpacing": true,
|
||||
"bracketSameLine": false,
|
||||
"printWidth": 80,
|
||||
"plugins": [
|
||||
"prettier-plugin-tailwindcss",
|
||||
"@prettier/plugin-ruby",
|
||||
"@prettier/plugin-xml",
|
||||
"@4az/prettier-plugin-html-erb"
|
||||
],
|
||||
"xmlQuoteAttributes": "double",
|
||||
"xmlWhitespaceSensitivity": "ignore",
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["*.ts", "*.tsx"],
|
||||
"options": {
|
||||
"parser": "typescript"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
2
.rspec_parallel
Normal file
@@ -0,0 +1,2 @@
|
||||
--format progress
|
||||
--format ParallelTests::RSpec::RuntimeLogger --out tmp/parallel_runtime_rspec.log
|
||||
32
.rubocop.yml
Normal file
@@ -0,0 +1,32 @@
|
||||
# The behavior of RuboCop can be controlled via the .rubocop.yml
|
||||
# configuration file. It makes it possible to enable/disable
|
||||
# certain cops (checks) and to alter their behavior if they accept
|
||||
# any parameters. The file can be placed either in your home
|
||||
# directory or in some project directory.
|
||||
#
|
||||
# RuboCop will start looking for the configuration file in the directory
|
||||
# where the inspected file is and continue its way up to the root directory.
|
||||
#
|
||||
# See https://docs.rubocop.org/rubocop/configuration
|
||||
|
||||
inherit_mode:
|
||||
merge:
|
||||
- Exclude
|
||||
|
||||
AllCops:
|
||||
NewCops: disable
|
||||
|
||||
Metrics/MethodLength:
|
||||
Enabled: false
|
||||
|
||||
Metrics/ClassLength:
|
||||
Enabled: false
|
||||
|
||||
Metrics/BlockLength:
|
||||
Enabled: false
|
||||
|
||||
Style/Documentation:
|
||||
Enabled: false
|
||||
|
||||
Metrics/AbcSize:
|
||||
Enabled: false
|
||||
@@ -1 +1 @@
|
||||
3.2.0
|
||||
3.4.4
|
||||
|
||||
10
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "rdbg",
|
||||
"name": "rdbg - attach",
|
||||
"request": "attach"
|
||||
}
|
||||
]
|
||||
}
|
||||
67
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
{
|
||||
"editor.formatOnSave": true,
|
||||
"workbench.editor.titleScrollbarSizing": "large",
|
||||
"window.title": "${activeEditorMedium}${separator}${rootName}${separator}${profileName}",
|
||||
"workbench.preferredDarkColorTheme": "Spinel",
|
||||
"workbench.preferredLightColorTheme": "Spinel Light",
|
||||
"rubyLsp.formatter": "syntax_tree",
|
||||
"rubyLsp.featureFlags": {
|
||||
"fullTestDiscovery": true
|
||||
},
|
||||
"rubyLsp.addonSettings": {
|
||||
"Ruby LSP RSpec": {
|
||||
"debug": true
|
||||
}
|
||||
},
|
||||
"files.associations": {
|
||||
".env-cmdrc": "json"
|
||||
},
|
||||
"[ruby]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[erb]": {
|
||||
"editor.defaultFormatter": "aliariff.vscode-erb-beautify"
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[javascript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[typescriptreact]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[javascriptreact]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[jsonc]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[xml]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[dockerfile]": {
|
||||
"editor.defaultFormatter": "ms-azuretools.vscode-docker"
|
||||
},
|
||||
"tailwindCSS.includeLanguages": {
|
||||
"erb": "html",
|
||||
"typescript": "javascript"
|
||||
},
|
||||
"tailwindCSS.experimental.classRegex": [
|
||||
"\\bclass:\\s*'([^']*)'",
|
||||
"\\bclass:\\s*\"([^\"]*)\"",
|
||||
"[\"'`]([^\"'`]*).*?,?\\s?"
|
||||
],
|
||||
"editor.quickSuggestions": {
|
||||
"other": "on",
|
||||
"comments": "off",
|
||||
"strings": "on"
|
||||
},
|
||||
"tailwindCSS.experimental.configFile": "config/tailwind.config.js",
|
||||
"sqliteViewer.maxFileSize": 4000,
|
||||
"files.insertFinalNewline": true,
|
||||
"files.trimTrailingWhitespace": true
|
||||
}
|
||||
121
Dockerfile
Normal file
@@ -0,0 +1,121 @@
|
||||
# Primary image
|
||||
FROM ruby:3.4.4
|
||||
USER root
|
||||
|
||||
# apt caching & install packages
|
||||
RUN rm -f /etc/apt/apt.conf.d/docker-clean; \
|
||||
echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache
|
||||
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends --no-install-suggests -y \
|
||||
libblas-dev liblapack-dev
|
||||
|
||||
# preinstall gems that take a long time to install
|
||||
RUN MAKE="make -j12" gem install bundler -v '2.6.7'
|
||||
RUN MAKE="make -j12" gem install rice -v '4.3.3'
|
||||
RUN MAKE="make -j12" gem install faiss -v '0.3.2'
|
||||
RUN MAKE="make -j12" gem install rails_live_reload -v '0.3.6'
|
||||
RUN bundle config --global frozen 1
|
||||
|
||||
# set up nodejs 18.x deb repo
|
||||
RUN mkdir -p /etc/apt/keyrings && \
|
||||
curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key \
|
||||
| gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && \
|
||||
echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_18.x nodistro main" \
|
||||
| tee /etc/apt/sources.list.d/nodesource.list
|
||||
|
||||
RUN \
|
||||
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends --no-install-suggests -y \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gnupg \
|
||||
nodejs \
|
||||
libpq-dev \
|
||||
ffmpeg \
|
||||
ffmpegthumbnailer \
|
||||
abiword \
|
||||
pdftohtml \
|
||||
libreoffice
|
||||
|
||||
# Install vips dependencies
|
||||
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||
apt-get update && \
|
||||
apt-get install --no-install-recommends --no-install-suggests -qqy \
|
||||
automake \
|
||||
gtk-doc-tools \
|
||||
gobject-introspection \
|
||||
libgirepository1.0-dev \
|
||||
libglib2.0-dev \
|
||||
libexpat1-dev \
|
||||
libjpeg-dev \
|
||||
libpng-dev \
|
||||
libtiff5-dev \
|
||||
libwebp-dev \
|
||||
libheif-dev \
|
||||
libexif-dev \
|
||||
liblcms2-dev \
|
||||
libxml2-dev \
|
||||
libfftw3-dev \
|
||||
liborc-0.4-dev \
|
||||
libcgif-dev \
|
||||
libjxl-dev \
|
||||
libopenjp2-7-dev \
|
||||
meson \
|
||||
ninja-build
|
||||
|
||||
# Install imagemagick from source
|
||||
RUN cd /tmp && \
|
||||
wget -qO- https://imagemagick.org/archive/releases/ImageMagick-7.1.2-1.tar.xz | tar -xJ && \
|
||||
cd ImageMagick-7.1.2-1 && \
|
||||
./configure && \
|
||||
make -j$(nproc) && \
|
||||
make install && \
|
||||
ldconfig && \
|
||||
cd / && \
|
||||
rm -rf /tmp/ImageMagick-7.1.2-1*
|
||||
|
||||
# Install vips from source
|
||||
RUN cd /tmp && \
|
||||
wget -qO- https://github.com/libvips/libvips/releases/download/v8.17.1/vips-8.17.1.tar.xz | tar -xJ && \
|
||||
cd vips-8.17.1 && \
|
||||
meson setup build --prefix=/usr/local -Dcgif=enabled && \
|
||||
cd build && \
|
||||
ninja && \
|
||||
ninja install && \
|
||||
ldconfig && \
|
||||
cd / && \
|
||||
rm -rf /tmp/vips-8.17.1*
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
COPY Gemfile Gemfile.lock ./
|
||||
COPY gems/has_aux_table ./gems/has_aux_table
|
||||
RUN ls -lah gems && BUNDLE_FROZEN=true MAKE="make -j$(nproc)" bundle install --jobs $(nproc)
|
||||
|
||||
# install js dependencies
|
||||
COPY package.json yarn.lock ./
|
||||
RUN npm install -g yarn
|
||||
RUN rails yarn:install
|
||||
RUN yarn
|
||||
|
||||
COPY . .
|
||||
|
||||
# precompile assets
|
||||
RUN RAILS_ENV=production bin/rails assets:precompile
|
||||
RUN mkdir -p tmp/pids
|
||||
|
||||
# build user scripts
|
||||
RUN yarn build:user-scripts
|
||||
|
||||
# create user with id=1000 gid=1000
|
||||
RUN groupadd -g 1000 app && \
|
||||
useradd -m -d /home/app -s /bin/bash -u 1000 -g 1000 app
|
||||
RUN chown -R app:app /usr/src/app
|
||||
USER app
|
||||
CMD /bin/bash
|
||||
171
Gemfile
@@ -1,17 +1,16 @@
|
||||
source "https://rubygems.org"
|
||||
git_source(:github) { |repo| "https://github.com/#{repo}.git" }
|
||||
|
||||
ruby "3.2.0"
|
||||
# ruby "3.0.3"
|
||||
ruby "~> 3"
|
||||
gem "bundler", "~> 2.6.7"
|
||||
|
||||
# Bundle edge Rails instead: gem "rails", github: "rails/rails", branch: "main"
|
||||
gem "rails", "~> 7.0.4", ">= 7.0.4.2"
|
||||
gem "rails", "~> 7.2"
|
||||
gem "has_aux_table", path: "gems/has_aux_table"
|
||||
|
||||
# The original asset pipeline for Rails [https://github.com/rails/sprockets-rails]
|
||||
gem "sprockets-rails"
|
||||
|
||||
# Use sqlite3 as the database for Active Record
|
||||
gem "sqlite3", "~> 1.4"
|
||||
gem "pg"
|
||||
|
||||
gem "pry"
|
||||
@@ -19,9 +18,10 @@ gem "pry-stack_explorer"
|
||||
|
||||
# Use the Puma web server [https://github.com/puma/puma]
|
||||
gem "puma", "~> 5.0"
|
||||
gem "thruster"
|
||||
|
||||
# Use JavaScript with ESM import maps [https://github.com/rails/importmap-rails]
|
||||
gem "importmap-rails"
|
||||
# # Use JavaScript with ESM import maps [https://github.com/rails/importmap-rails]
|
||||
# gem "importmap-rails"
|
||||
|
||||
# Hotwire's SPA-like page accelerator [https://turbo.hotwired.dev]
|
||||
gem "turbo-rails"
|
||||
@@ -30,7 +30,7 @@ gem "turbo-rails"
|
||||
gem "stimulus-rails"
|
||||
|
||||
# Build JSON APIs with ease [https://github.com/rails/jbuilder]
|
||||
gem "jbuilder"
|
||||
gem "jbuilder", "~> 2.13"
|
||||
|
||||
# Use Redis adapter to run Action Cable in production
|
||||
# gem "redis", "~> 4.0"
|
||||
@@ -42,7 +42,7 @@ gem "jbuilder"
|
||||
# gem "bcrypt", "~> 3.1.7"
|
||||
|
||||
# Windows does not include zoneinfo files, so bundle the tzinfo-data gem
|
||||
gem "tzinfo-data", platforms: %i[ mingw mswin x64_mingw jruby ]
|
||||
gem "tzinfo-data", platforms: %i[mingw mswin x64_mingw jruby]
|
||||
|
||||
# Reduces boot times through caching; required in config/boot.rb
|
||||
gem "bootsnap", require: false
|
||||
@@ -53,60 +53,141 @@ gem "bootsnap", require: false
|
||||
# Use Active Storage variants [https://guides.rubyonrails.org/active_storage_overview.html#transforming-images]
|
||||
# gem "image_processing", "~> 1.2"
|
||||
|
||||
group :development, :test do
|
||||
group :development, :test, :staging do
|
||||
# See https://guides.rubyonrails.org/debugging_rails_applications.html#debugging-with-the-debug-gem
|
||||
gem "debug", platforms: %i[ mri mingw x64_mingw ]
|
||||
gem "debug", "~> 1.11", platforms: %i[mri mingw x64_mingw], require: false
|
||||
end
|
||||
|
||||
group :development do
|
||||
group :development, :staging do
|
||||
# Use console on exceptions pages [https://github.com/rails/web-console]
|
||||
gem "htmlbeautifier"
|
||||
gem "rufo", require: false
|
||||
gem "web-console"
|
||||
|
||||
# Add speed badges [https://github.com/MiniProfiler/rack-mini-profiler]
|
||||
gem "rack-mini-profiler", require: ["enable_rails_patches", "rack-mini-profiler"]
|
||||
# Speed up commands on slow machines / big apps [https://github.com/rails/spring]
|
||||
# gem "spring"
|
||||
end
|
||||
gem "spring"
|
||||
gem "spring-commands-rspec"
|
||||
|
||||
group :log_watcher do
|
||||
gem "curses"
|
||||
gem "listen"
|
||||
gem "concurrent-ruby-ext", require: "concurrent"
|
||||
gem "concurrent-ruby-edge", require: "concurrent-edge"
|
||||
# Add speed badges [https://github.com/MiniProfiler/rack-mini-profiler]
|
||||
gem "memory_profiler"
|
||||
gem "rack-mini-profiler",
|
||||
"~> 3.3",
|
||||
require: %w[enable_rails_patches rack-mini-profiler]
|
||||
gem "stackprof"
|
||||
|
||||
# temporary ref of rails_live_reload to a commit that adds ignore patterns
|
||||
# to the Listen gem
|
||||
gem "rails_live_reload",
|
||||
git: "https://github.com/railsjazz/rails_live_reload",
|
||||
ref: "dcd3b73904594e2c5134c2f6e05954f3937a8d29"
|
||||
# git: "https://github.com/mktakuya/rails_live_reload",
|
||||
# ref: "95d7ac7c03e8c702066ed3dc9cd70a965412e2d2"
|
||||
# gem "rails_live_reload", "0.4.0"
|
||||
end
|
||||
|
||||
group :test do
|
||||
# Use system testing [https://guides.rubyonrails.org/testing.html#system-testing]
|
||||
gem "capybara"
|
||||
gem "rspec-rails", "~> 7.0"
|
||||
gem "rails-controller-testing"
|
||||
gem "selenium-webdriver"
|
||||
gem "webdrivers"
|
||||
gem "rspec-rails"
|
||||
gem "shoulda-matchers"
|
||||
gem "factory_bot_rails"
|
||||
gem "pundit-matchers", "~> 4.0"
|
||||
gem "db-query-matchers", "~> 0.14"
|
||||
end
|
||||
|
||||
gem "xdiff", path: "../xdiff-rb"
|
||||
|
||||
# for legacy import
|
||||
gem "diffy"
|
||||
gem "rb-bsdiff", path: "../rb-bsdiff"
|
||||
group :test, :development do
|
||||
gem "parallel_tests"
|
||||
gem "spring-commands-parallel-tests"
|
||||
end
|
||||
|
||||
gem "addressable"
|
||||
gem "colorize"
|
||||
gem "concurrent-ruby-edge", require: "concurrent-edge"
|
||||
gem "concurrent-ruby-ext", require: "concurrent"
|
||||
gem "curb"
|
||||
gem "daemons"
|
||||
gem "discard"
|
||||
gem "good_job", "~> 4.6"
|
||||
gem "http-cookie"
|
||||
gem "http", "~> 5.2" # For proxying requests
|
||||
gem "kaminari"
|
||||
gem "nokogiri"
|
||||
gem "pluck_each"
|
||||
gem "ripcord"
|
||||
gem "ruby-prof"
|
||||
gem "ruby-prof-speedscope"
|
||||
gem "ruby-vips"
|
||||
gem "dhash-vips"
|
||||
gem "ffmpeg", git: "https://github.com/instructure/ruby-ffmpeg", tag: "v6.1.2"
|
||||
gem "table_print"
|
||||
gem "addressable"
|
||||
gem "nokogiri"
|
||||
gem "http-cookie"
|
||||
gem "curb"
|
||||
gem "kaminari"
|
||||
gem "delayed_job_active_record"
|
||||
# gem "delayed-web"
|
||||
gem "delayed_job_web"
|
||||
gem "colorize"
|
||||
gem "daemons"
|
||||
gem "delayed_job_worker_pool"
|
||||
gem "ripcord"
|
||||
gem "influxdb-client"
|
||||
gem "discard"
|
||||
# gem 'cli-ui'
|
||||
# gem "paper_trail"
|
||||
# gem "paper_trail-hashdiff"
|
||||
# gem "hashdiff"
|
||||
gem "zstd-ruby"
|
||||
gem "rouge"
|
||||
gem "docx"
|
||||
gem "ruby-bbcode"
|
||||
gem "dtext_rb",
|
||||
git: "https://github.com/e621ng/dtext_rb",
|
||||
ref: "5ef8fd7a5205c832f4c18197911717e7d491494e"
|
||||
gem "charlock_holmes"
|
||||
|
||||
# Telegram Bot API
|
||||
gem "telegram-bot-ruby"
|
||||
|
||||
# gem "pghero", git: "https://github.com/dymk/pghero", ref: "e314f99"
|
||||
gem "pghero", "~> 3.6"
|
||||
gem "pg_query", ">= 2"
|
||||
|
||||
gem "disco"
|
||||
gem "faiss"
|
||||
gem "neighbor"
|
||||
gem "progressbar"
|
||||
gem "attr_json"
|
||||
|
||||
group :production, :staging do
|
||||
gem "rails_semantic_logger", "~> 4.17"
|
||||
gem "cloudflare-rails"
|
||||
end
|
||||
|
||||
group :production do
|
||||
gem "sd_notify"
|
||||
end
|
||||
|
||||
gem "rack", "~> 2.2"
|
||||
gem "rack-cors"
|
||||
gem "react_on_rails"
|
||||
gem "sanitize", "~> 6.1"
|
||||
gem "shakapacker", "~> 6.6"
|
||||
gem "timeout"
|
||||
|
||||
group :development do
|
||||
gem "prettier_print"
|
||||
gem "syntax_tree", "~> 6.2"
|
||||
gem "unicode_plot" # For terminal-based data visualization (Ruby API)
|
||||
gem "rumale" # Professional machine learning library for Ruby
|
||||
gem "ruby-lsp-rspec", require: false
|
||||
end
|
||||
|
||||
gem "cssbundling-rails", "~> 1.4"
|
||||
gem "tailwindcss-rails", "~> 3.0"
|
||||
|
||||
# Authentication
|
||||
gem "devise", "~> 4.9"
|
||||
|
||||
# Authorization
|
||||
gem "pundit", "~> 2.4"
|
||||
|
||||
# Monitoring
|
||||
gem "prometheus_exporter", "~> 2.2"
|
||||
|
||||
SORBET_VERSION = "0.5.12221"
|
||||
gem "sorbet", SORBET_VERSION, group: :development
|
||||
gem "sorbet-runtime", SORBET_VERSION
|
||||
gem "tapioca", "0.16.6", require: false, group: %i[development test]
|
||||
gem "rspec-sorbet", group: [:test]
|
||||
gem "sorbet-struct-comparable"
|
||||
|
||||
gem "skyfall", "~> 0.6.0"
|
||||
|
||||
gem "didkit", "~> 0.2.3"
|
||||
|
||||
929
Gemfile.lock
5
Procfile.dev
Normal file
@@ -0,0 +1,5 @@
|
||||
rails: RAILS_ENV=development HTTP_PORT=3001 thrust ./bin/rails server
|
||||
wp-client: RAILS_ENV=development HMR=true ./bin/webpacker-dev-server
|
||||
wp-server: RAILS_ENV=development HMR=true SERVER_BUNDLE_ONLY=yes ./bin/webpacker --watch
|
||||
css: tailwindcss -c ./config/tailwind.config.js -i ./app/assets/stylesheets/application.tailwind.css -o ./app/assets/builds/tailwind.css --watch
|
||||
prometheus_exporter: RAILS_ENV=development bundle exec prometheus_exporter --bind 0.0.0.0 --prefix redux_ --label '{"environment": "development"}'
|
||||
9
Procfile.dev-static
Normal file
@@ -0,0 +1,9 @@
|
||||
# You can run these commands in separate shells
|
||||
web: rails s -p 3000
|
||||
|
||||
# Next line runs a watch process with webpack to compile the changed files.
|
||||
# When making frequent changes to client side assets, you will prefer building webpack assets
|
||||
# upon saving rather than when you refresh your browser page.
|
||||
# Note, if using React on Rails localization you will need to run
|
||||
# `bundle exec rake react_on_rails:locale` before you run bin/webpacker
|
||||
webpack: sh -c 'rm -rf public/packs/* || true && bin/webpacker -w'
|
||||
3
Procfile.production
Normal file
@@ -0,0 +1,3 @@
|
||||
rails: RAILS_ENV=production HTTP_PORT=3000 TARGET_PORT=3003 thrust ./bin/rails server -p 3003
|
||||
tail: tail -f log/production.log
|
||||
prometheus_exporter: RAILS_ENV=production bundle exec prometheus_exporter --bind 0.0.0.0 --prefix redux_ --label '{"environment": "production"}'
|
||||
5
Procfile.staging
Normal file
@@ -0,0 +1,5 @@
|
||||
rails: RAILS_ENV=staging HTTP_PORT=3001 bundle exec thrust ./bin/rails server
|
||||
wp-client: RAILS_ENV=development HMR=true ./bin/webpacker-dev-server
|
||||
wp-server: RAILS_ENV=development HMR=true SERVER_BUNDLE_ONLY=yes ./bin/webpacker --watch
|
||||
css: RAILS_ENV=development yarn "build:css[debug]" --watch
|
||||
prometheus-exporter: RAILS_ENV=staging bundle exec prometheus_exporter --bind 0.0.0.0 --prefix redux_ --label '{"environment": "staging"}'
|
||||
3
Procfile.worker
Normal file
@@ -0,0 +1,3 @@
|
||||
periodic_tasks: RAILS_ENV=worker bundle exec rake periodic_tasks
|
||||
good_job: RAILS_ENV=worker bundle exec rake good_job
|
||||
prometheus_exporter: RAILS_ENV=worker bundle exec prometheus_exporter --bind 0.0.0.0 --prefix redux_ --label '{"environment": "worker"}'
|
||||
32
README.md
@@ -1,3 +1,29 @@
|
||||
# Redux Scraper
|
||||
|
||||
A Ruby on Rails application for scraping and managing various content sources.
|
||||
|
||||
## Setup
|
||||
|
||||
This application is configured for optimal development and testing performance:
|
||||
|
||||
### Performance Optimizations
|
||||
|
||||
- **Bootsnap**: Accelerates gem loading and caching for faster boot times
|
||||
- **Spring**: Preloads the Rails application for faster command execution
|
||||
|
||||
#### Rails Boot Performance
|
||||
|
||||
- Development boot time: ~270ms (87% faster than without optimization)
|
||||
- Test environment startup: ~211ms (29% faster than without optimization)
|
||||
|
||||
To use Spring-optimized commands:
|
||||
```bash
|
||||
# Use bin/ executables for Spring acceleration
|
||||
bin/rails console
|
||||
bin/rails runner "puts 'Hello'"
|
||||
bin/rspec spec/
|
||||
```
|
||||
|
||||
# README
|
||||
|
||||
This README would normally document whatever steps are necessary to get the
|
||||
@@ -7,8 +33,12 @@ Things you may want to cover:
|
||||
|
||||
* Ruby version
|
||||
|
||||
- 3.2.6
|
||||
|
||||
* System dependencies
|
||||
|
||||
- node 18.x
|
||||
|
||||
* Configuration
|
||||
|
||||
* Database creation
|
||||
@@ -21,4 +51,6 @@ Things you may want to cover:
|
||||
|
||||
* Deployment instructions
|
||||
|
||||
- Build docker image with `docker build . -t redux-scraper-app`
|
||||
|
||||
* ...
|
||||
|
||||
414
Rakefile
@@ -5,64 +5,390 @@ require "rake/testtask"
|
||||
require_relative "config/application"
|
||||
|
||||
Rails.application.load_tasks
|
||||
$LOAD_PATH << Rails.root.join("rake")
|
||||
Rake.application.rake_require "sst"
|
||||
Rake.application.rake_require "log_entry"
|
||||
Rake.application.rake_require "worker"
|
||||
Rake.application.rake_require "metrics"
|
||||
Rake.application.rake_require "fa"
|
||||
Rake.application.rake_require "e621"
|
||||
Rake.application.rake_require "twitter"
|
||||
Dir.glob(Rails.root.join("rake", "*.rake")).each { |rake_file| load rake_file }
|
||||
|
||||
task :set_ar_stdout => :environment do
|
||||
ActiveRecord::Base.logger = Logger.new(STDOUT)
|
||||
task set_ar_stdout: :environment do
|
||||
ActiveRecord::Base.logger = Logger.new($stdout)
|
||||
end
|
||||
|
||||
task :set_logger_stdout => :environment do
|
||||
Rails.logger = Logger.new(STDOUT)
|
||||
Rails.logger.formatter = proc do |severity, datetime, progname, msg|
|
||||
"#{severity}: #{msg}\n"
|
||||
end
|
||||
task set_logger_stdout: :environment do
|
||||
Rails.logger = Logger.new($stdout)
|
||||
Rails.logger.formatter =
|
||||
proc { |severity, datetime, progname, msg| "#{severity}: #{msg}\n" }
|
||||
ActiveRecord::Base.logger = nil
|
||||
ActiveJob::Base.logger = nil
|
||||
GoodJob.logger = Rails.logger
|
||||
end
|
||||
|
||||
task :pool_combined do
|
||||
ENV["RAILS_ENV"] = "production"
|
||||
proxies = ["direct", "proxy-1", "dedipath-1", "serverhost-1"]
|
||||
proxy = ENV["proxy"]
|
||||
raise("'proxy' must be set") unless proxy
|
||||
raise("'proxy' must be one of #{proxies}") unless proxies.include?(proxy)
|
||||
cmd = "bundle exec delayed_job_worker_pool pool_combined.rb"
|
||||
puts "$> #{cmd}"
|
||||
task periodic_tasks: %i[environment set_logger_stdout] do
|
||||
Thread.new do
|
||||
loop do
|
||||
Rake::Task["pghero:capture_space_stats"].execute
|
||||
puts "logged space stats"
|
||||
sleep 6.hours
|
||||
end
|
||||
end
|
||||
|
||||
Thread.new do
|
||||
loop do
|
||||
Rake::Task["pghero:capture_query_stats"].execute
|
||||
puts "logged query stats"
|
||||
sleep 5.minutes
|
||||
end
|
||||
end
|
||||
|
||||
loop { sleep 10 }
|
||||
end
|
||||
|
||||
# TODO - migrate to Domain::Post / Domain::User
|
||||
# namespace :db_sampler do
|
||||
# task export: :environment do
|
||||
# url_names = ENV["url_names"] || raise("need 'url_names' (comma-separated)")
|
||||
# outfile = $stdout
|
||||
# DbSampler.new(outfile).export(url_names.split(","))
|
||||
# ensure
|
||||
# outfile.close if outfile
|
||||
# end
|
||||
|
||||
# task import: [:environment] do
|
||||
# infile = $stdin
|
||||
# DbSampler.new(infile).import
|
||||
# ensure
|
||||
# infile.close if infile
|
||||
# end
|
||||
# end
|
||||
|
||||
task good_job: %i[environment set_ar_stdout set_logger_stdout] do
|
||||
env_hash = {
|
||||
"RAILS_ENV" => "worker",
|
||||
"GOOD_JOB_POLL_INTERVAL" => "5",
|
||||
"GOOD_JOB_MAX_CACHE" => "10000",
|
||||
"GOOD_JOB_QUEUE_SELECT_LIMIT" => "4096",
|
||||
"GOOD_JOB_MAX_THREADS" => "4",
|
||||
"GOOD_JOB_ENABLE_CRON" => "1",
|
||||
"GOOD_JOB_QUEUES" =>
|
||||
ENV["GOOD_JOB_QUEUES"] ||
|
||||
%w[manual:4 fa_post,e621:2 *:6].reject(&:nil?).join(";"),
|
||||
}
|
||||
|
||||
env_hash.each do |key, value|
|
||||
ENV[key] = value
|
||||
puts "$> #{key.light_black.bold} = #{value.bold}"
|
||||
end
|
||||
|
||||
cmd = "bundle exec good_job"
|
||||
puts "$> #{cmd.bold}"
|
||||
exec(cmd)
|
||||
end
|
||||
|
||||
task :recompute_job_signatures => :environment do
|
||||
ActiveRecord::Base.logger = Logger.new(STDOUT)
|
||||
ActiveRecord::Base.logger.level = :error
|
||||
task :reverse_csv do
|
||||
file = ENV["file"] || raise("need 'file' (file path)")
|
||||
in_csv = CSV.parse(File.open(file, "r+"), headers: true)
|
||||
out_csv =
|
||||
CSV.new(
|
||||
File.open("rev_" + file, "w"),
|
||||
write_headers: true,
|
||||
headers: in_csv.headers,
|
||||
)
|
||||
in_csv.reverse_each { |row| out_csv << row.map(&:second) }
|
||||
out_csv.close
|
||||
end
|
||||
|
||||
start_at = ENV["start_at"]&.to_i || 0
|
||||
count = 0
|
||||
destroyed = 0
|
||||
puts "# jobs: #{Delayed::Job.count}"
|
||||
Delayed::Job.find_each(start: start_at) do |job|
|
||||
job.set_signature
|
||||
unless job.save
|
||||
job.destroy
|
||||
destroyed += 1
|
||||
task enqueue_fa_posts_missing_files: %i[environment set_logger_stdout] do
|
||||
Domain::Post::FaPost
|
||||
.where(state: "ok")
|
||||
.where
|
||||
.missing(:file)
|
||||
.find_each(order: :desc) do |post|
|
||||
Domain::Fa::Job::ScanPostJob.perform_now(post:)
|
||||
end
|
||||
end
|
||||
|
||||
task fix_e621_post_files: :environment do
|
||||
query = Domain::Post::E621Post.where(state: "ok").where.missing(:files)
|
||||
limit = ENV["limit"]&.to_i
|
||||
puts "query: #{query.to_sql}"
|
||||
|
||||
query.find_each(batch_size: 10) do |post|
|
||||
Domain::E621::Task::FixE621PostMissingFiles.new.run(post)
|
||||
if limit
|
||||
limit -= 1
|
||||
if limit.zero?
|
||||
puts "limit reached"
|
||||
break
|
||||
end
|
||||
count += 1
|
||||
if count % 50 == 0
|
||||
puts "processed #{count}, destroyed #{destroyed} - last id: #{job.id}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
task :workoff_failed_jobs => [:environment, :set_ar_stdout, :set_logger_stdout] do
|
||||
worker = Delayed::Worker.new
|
||||
Delayed::Job.where(
|
||||
"last_error is not null and attempts <= 2"
|
||||
).find_each(batch_size: 1) do |job|
|
||||
worker.run(job)
|
||||
task fix_ok_e621_posts_missing_files: :environment do
|
||||
query = Domain::Post::E621Post.where(state: "ok").where.missing(:file)
|
||||
progress_bar =
|
||||
ProgressBar.create(total: query.count, format: "%t: %c/%C %B %p%% %a %e")
|
||||
query.find_each(batch_size: 10) do |post|
|
||||
Domain::E621::Job::ScanPostJob.perform_now(post:)
|
||||
progress_bar.progress = [progress_bar.progress + 1, progress_bar.total].min
|
||||
end
|
||||
end
|
||||
|
||||
task perform_good_jobs: :environment do
|
||||
job_class = ENV["job_class"]
|
||||
job_id = ENV["job_id"]
|
||||
limit = ENV["limit"]&.to_i
|
||||
|
||||
if !job_id.present? && !job_class.present?
|
||||
raise "need 'job_id' or 'job_class'"
|
||||
end
|
||||
|
||||
relation =
|
||||
if job_id
|
||||
job =
|
||||
GoodJob::Job.find_by(id: job_id) ||
|
||||
GoodJob::Execution.find_by(id: job_id)&.job
|
||||
if job.nil?
|
||||
puts "no job found with id #{job_id}"
|
||||
exit 1
|
||||
end
|
||||
puts "found job with id #{job.id}" if job.id != job_id
|
||||
GoodJob::Job.where(id: job.id)
|
||||
else
|
||||
GoodJob::Job.queued.where(job_class: job_class).order(created_at: :asc)
|
||||
end
|
||||
|
||||
relation.find_each(batch_size: 1) do |job|
|
||||
job = T.cast(job, GoodJob::Job)
|
||||
|
||||
# Get the actual job instance and deserialize arguments
|
||||
serialized_args = job.serialized_params["arguments"]
|
||||
if serialized_args.nil?
|
||||
puts "No arguments found for job #{job.id}"
|
||||
next
|
||||
end
|
||||
|
||||
deserialized_args = ActiveJob::Arguments.deserialize(serialized_args)
|
||||
job_instance = job.job_class.constantize.new
|
||||
job_instance.deserialize(job.serialized_params)
|
||||
|
||||
puts "Running job #{job.id} (#{job.job_class})"
|
||||
|
||||
# Create execution record
|
||||
execution =
|
||||
GoodJob::Execution.create!(
|
||||
active_job_id: job.active_job_id,
|
||||
job_class: job.job_class,
|
||||
queue_name: job.queue_name,
|
||||
serialized_params: job.serialized_params,
|
||||
scheduled_at: job.scheduled_at,
|
||||
created_at: Time.now,
|
||||
updated_at: Time.now,
|
||||
process_id: SecureRandom.uuid,
|
||||
)
|
||||
|
||||
start_time = Time.now
|
||||
|
||||
# Temporarily disable concurrency limits
|
||||
job_class = job.job_class.constantize
|
||||
old_config = job_class.good_job_concurrency_config
|
||||
job_class.good_job_concurrency_config = { total_limit: nil }
|
||||
|
||||
begin
|
||||
# Perform the job with deserialized arguments
|
||||
GoodJob::CurrentThread.job = job
|
||||
job.update!(performed_at: Time.now)
|
||||
job_instance.arguments = deserialized_args
|
||||
job_instance.perform_now
|
||||
|
||||
# Update execution and job records
|
||||
execution.update!(
|
||||
finished_at: Time.now,
|
||||
error: nil,
|
||||
error_event: nil,
|
||||
duration: Time.now - start_time,
|
||||
)
|
||||
job.update!(finished_at: Time.now)
|
||||
puts "Job completed successfully"
|
||||
rescue => e
|
||||
puts "Job failed: #{e.message}"
|
||||
# Update execution and job records with error
|
||||
execution.update!(
|
||||
finished_at: Time.now,
|
||||
error: e.message,
|
||||
error_event: "execution_failed",
|
||||
error_backtrace: e.backtrace,
|
||||
duration: Time.now - start_time,
|
||||
)
|
||||
job.update!(
|
||||
error: "#{e.class}: #{e.message}",
|
||||
error_event: "execution_failed",
|
||||
)
|
||||
raise e
|
||||
ensure
|
||||
job.update!(
|
||||
executions_count: GoodJob::Execution.where(active_job_id: job.id).count,
|
||||
)
|
||||
# Restore original concurrency config
|
||||
job_class.good_job_concurrency_config = old_config
|
||||
GoodJob::CurrentThread.job = nil
|
||||
end
|
||||
|
||||
if limit
|
||||
limit -= 1
|
||||
if limit.zero?
|
||||
puts "limit reached"
|
||||
break
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
task fix_removed_fa_posts: :environment do
|
||||
colorize_state = ->(state) do
|
||||
case state
|
||||
when "ok"
|
||||
"ok".green
|
||||
when "removed"
|
||||
"removed".red
|
||||
else
|
||||
state.to_s
|
||||
end.bold
|
||||
end
|
||||
|
||||
last_fa_id = ENV["start_at"]&.to_i
|
||||
while true
|
||||
query =
|
||||
Domain::Post::FaPost
|
||||
.where(state: "removed")
|
||||
.where.not(title: nil)
|
||||
.order(fa_id: :desc)
|
||||
query = query.where(fa_id: ...last_fa_id) if last_fa_id
|
||||
post = query.first
|
||||
break unless post
|
||||
last_fa_id = post.fa_id
|
||||
|
||||
puts "[before] [post.state: #{colorize_state.call(post.state)}] [post.file.id: #{post.file&.id}] [post.id: #{post.id}] [post.fa_id: #{post.fa_id}] [post.title: #{post.title}]"
|
||||
Domain::Fa::Job::ScanPostJob.perform_now(post: post, force_scan: true)
|
||||
post.reload
|
||||
puts "[after] [post.state: #{colorize_state.call(post.state)}] [post.file.id: #{post.file&.id}] [post.id: #{post.id}] [post.fa_id: #{post.fa_id}] [post.title: #{post.title}]"
|
||||
sleep 2
|
||||
end
|
||||
rescue => e
|
||||
puts "error: #{e.message}"
|
||||
binding.pry
|
||||
end
|
||||
|
||||
task run_fa_user_avatar_jobs: :environment do
|
||||
avatars =
|
||||
Domain::UserAvatar
|
||||
.where(state: "pending")
|
||||
.joins(:user)
|
||||
.where(user: { type: Domain::User::FaUser.name })
|
||||
|
||||
puts "count: #{avatars.count}"
|
||||
|
||||
avatars.each do |avatar|
|
||||
Domain::Fa::Job::UserAvatarJob.perform_now(avatar:)
|
||||
avatar.reload
|
||||
puts "perform avatar job for #{avatar.user.url_name} - #{avatar.state.bold}"
|
||||
end
|
||||
end
|
||||
|
||||
task create_post_file_fingerprints: %i[environment set_logger_stdout] do
|
||||
task = Tasks::CreatePostFileFingerprintsTask.new
|
||||
|
||||
mode =
|
||||
if ENV["post_file_descending"].present?
|
||||
Tasks::CreatePostFileFingerprintsTask::Mode::PostFileDescending
|
||||
elsif ENV["posts_descending"].present?
|
||||
Tasks::CreatePostFileFingerprintsTask::Mode::PostsDescending
|
||||
elsif ENV["user"].present?
|
||||
Tasks::CreatePostFileFingerprintsTask::Mode::User
|
||||
elsif ENV["users_descending"].present?
|
||||
Tasks::CreatePostFileFingerprintsTask::Mode::UsersDescending
|
||||
else
|
||||
raise "need one of: post_file_descending, posts_descending, user, users_descending"
|
||||
end
|
||||
|
||||
task.run(mode: mode, user_param: ENV["user"], start_at: ENV["start_at"])
|
||||
end
|
||||
|
||||
task enqueue_pending_post_files: :environment do
|
||||
query = Domain::PostFile.where(state: "pending")
|
||||
puts "enqueueing #{query.count} pending post files"
|
||||
query.find_in_batches(batch_size: 100, start: ENV["start_at"]) do |batch|
|
||||
while (
|
||||
queue_size =
|
||||
GoodJob::Job.where(
|
||||
job_class: "Job::PostFileJob",
|
||||
performed_at: nil,
|
||||
scheduled_at: nil,
|
||||
error: nil,
|
||||
).count
|
||||
) > 100
|
||||
puts "queue size: #{queue_size}"
|
||||
sleep 10
|
||||
end
|
||||
batch.each do |post_file|
|
||||
Job::PostFileJob.set(priority: 10).perform_later(post_file:)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
task find_post_files_with_empty_response: :environment do
|
||||
query =
|
||||
Domain::PostFile
|
||||
.where(state: "ok", retry_count: 0)
|
||||
.joins(:log_entry)
|
||||
.where(http_log_entries: { response_sha256: BlobFile::EMPTY_FILE_SHA256 })
|
||||
|
||||
pb = ProgressBar.create(total: query.count, format: "%t: %c/%C %B %p%% %a %e")
|
||||
|
||||
query.find_each(batch_size: 10) do |post_file|
|
||||
# puts "post_file: #{post_file.id} / '#{post_file.post.to_param}'"
|
||||
post_file.state_pending!
|
||||
post_file.save!
|
||||
Job::PostFileJob.perform_now(post_file:)
|
||||
pb.progress = [pb.progress + 1, pb.total].min
|
||||
end
|
||||
end
|
||||
|
||||
desc "Enqueue pending post file jobs"
|
||||
task enqueue_pending_post_file_jobs: :environment do
|
||||
Tasks::EnqueueDuePostFileJobsTask.new.run
|
||||
end
|
||||
|
||||
desc "Compute null counter caches for all users"
|
||||
task compute_null_user_counter_caches: :environment do
|
||||
counter_caches = {
|
||||
user_post_creations_count: :user_post_creations,
|
||||
user_post_favs_count: :user_post_favs,
|
||||
user_user_follows_from_count: :user_user_follows_from,
|
||||
user_user_follows_to_count: :user_user_follows_to,
|
||||
}
|
||||
|
||||
query =
|
||||
Domain::User.where(
|
||||
counter_caches.map { |col, _| "(\"#{col}\" IS NULL)" }.join(" OR "),
|
||||
)
|
||||
total = query.count
|
||||
query = query.select(:id, *counter_caches.keys)
|
||||
|
||||
puts "computing #{counter_caches.keys.join(", ")} for #{total} users"
|
||||
pb = ProgressBar.create(total:, format: "%t: %c/%C %B %p%% %a %e")
|
||||
|
||||
query.find_in_batches(batch_size: 32) do |batch|
|
||||
ReduxApplicationRecord.transaction do
|
||||
batch.each do |user|
|
||||
nil_caches =
|
||||
counter_caches.keys.filter { |cache| user.send(cache).nil? }
|
||||
Domain::User.reset_counters(
|
||||
user.id,
|
||||
*nil_caches.map { |col| counter_caches[col] },
|
||||
)
|
||||
pb.progress = [pb.progress + 1, total].min
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
puts "set proc title to #{ARGV.first}"
|
||||
Process.setproctitle(ARGV.first) if $0.split("/").last == "rake"
|
||||
|
||||
47
TODO.md
Normal file
@@ -0,0 +1,47 @@
|
||||
# Project TODO List
|
||||
|
||||
- [ ] Add bookmarking feature for posts across different domains
|
||||
- [ ] Add search feature to search FA descriptions, tags, E621 descriptions, tags
|
||||
- [x] Get inkbunny index scan job working
|
||||
- [x] Attach logs to jobs, page to view jobs and their logs
|
||||
- [ ] Standardize all the embeddings tables to use the same schema (item_id, embedding)
|
||||
- [ ] Bluesky scraper
|
||||
- [x] Download favs / votes for E621 users
|
||||
- [ ] Automatically enqueue jobs for FA users to do incremental scans of profiles
|
||||
- [ ] Fix FA posts that start with "Font size adjustment: smallerlarger"
|
||||
- [ ] Convert logger .prefix=... into .tagged(...)
|
||||
- [x] `make_tag` should be smart about the objects it takes
|
||||
- [ ] Convert all `state: string` attributes to enums in ActiveRecord models
|
||||
- [ ] Create `belongs_to_log_entry` macro for ActiveRecord models
|
||||
- [x] Use StaticFileJobHelper for Domain::Fa::Job::ScanFileJob
|
||||
- [ ] Unify HTTP client configs for all domains, so the same job type can be used for different domains
|
||||
- [ ] put abstract `external_url_for_view` in a module
|
||||
- [ ] backfill descriptions on inkbunny posts
|
||||
- [ ] store deep update json on inkbunny posts
|
||||
- [x] limit number of users, or paginate for "users who favorited this post" page
|
||||
- [ ] manual good job runner does not indicate if the job threw an exception - check return value of #perform, maybe?
|
||||
- [ ] FA user favs job should stop when in incremental mode when all posts on the page are already known favs (e.g. pages with only 47 posts are not a false positive)
|
||||
- [x] Factor out FA listings page enqueue logic into common location; use in Gallery and Favs jobs
|
||||
- [ ] Add followers / following to FA user show page
|
||||
- [x] Parse E621 source url for inkbunny posts & users
|
||||
- [x] Parse E621 source url for fa users
|
||||
- [ ] Parse BBCode in post descriptions
|
||||
- example post with bbcode: https://refurrer.com/posts/ib/3452498
|
||||
- [ ] Show tags on fa posts, ib posts
|
||||
- [ ] Sofurry implmentation
|
||||
- [ ] Make unified Static file job
|
||||
- [ ] Make unified Avatar file job
|
||||
- [ ] ko-fi domain icon
|
||||
- [ ] tumblr domain icon
|
||||
- [ ] Do PCA on user factors table to display a 2D plot of users
|
||||
- [ ] Use links found in descriptions to indicate re-scanning a post? (e.g. for comic next/prev links)
|
||||
- [ ] fix for IDs that have a dot in them - e.g. https://refurrer.com/users/fa@jakke.
|
||||
- [ ] Rich inline links to e621 e.g. https://refurrer.com/posts/fa@60070060
|
||||
- [ ] Find FaPost that have favs recorded but no scan / file, enqueue scan
|
||||
- [x] Bunch of posts with empty responses: posts = Domain::Post.joins(files: :log_entry).where(files: { http_log_entries: { response_sha256: BlobFile::EMPTY_FILE_SHA256 }}).limit(10)
|
||||
- [ ] Create GlobalState entries for last FA id on browse page, periodic scan to scan from the newest FA ID to the stored one
|
||||
- [ ] GlobalState entries for long running backfill jobs, automatically restart them if they fail
|
||||
- [ ] Flag to pass to jobs to log HTTP requests / responses to a directory, HTTP mock helper to read from that directory
|
||||
- [ ] fix IP address incorrect for Cloudflare proxied requests
|
||||
- [ ] SOCKS5 proxy for additional workers
|
||||
- [ ] Backup FA scraper using foxbot & g6jy5jkx466lrqojcngbnksugrcfxsl562bzuikrka5rv7srgguqbjid.onion
|
||||
@@ -1,4 +1,5 @@
|
||||
//= link_tree ../images
|
||||
//= link_directory ../stylesheets .css
|
||||
//= link_tree ../../javascript .js
|
||||
//= link_tree ../../../vendor/javascript .js
|
||||
//= link_tree ../builds
|
||||
//= link good_job_custom.css
|
||||
|
||||
3
app/assets/images/arrow-top-right-on-square.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" class="w-6 h-6">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" d="M13.5 6H5.25A2.25 2.25 0 003 8.25v10.5A2.25 2.25 0 005.25 21h10.5A2.25 2.25 0 0018 18.75V10.5m-10.5 6L21 3m0 0h-5.25M21 3v5.25" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 326 B |
BIN
app/assets/images/domain-icons/aethy.png
Normal file
|
After Width: | Height: | Size: 28 KiB |
BIN
app/assets/images/domain-icons/bigcartel.png
Normal file
|
After Width: | Height: | Size: 1.8 KiB |
BIN
app/assets/images/domain-icons/bluesky.png
Normal file
|
After Width: | Height: | Size: 7.6 KiB |
BIN
app/assets/images/domain-icons/boosty.png
Normal file
|
After Width: | Height: | Size: 14 KiB |
BIN
app/assets/images/domain-icons/bsky.png
Normal file
|
After Width: | Height: | Size: 7.6 KiB |
BIN
app/assets/images/domain-icons/carrd.png
Normal file
|
After Width: | Height: | Size: 5.1 KiB |
BIN
app/assets/images/domain-icons/deviantart.png
Normal file
|
After Width: | Height: | Size: 1.6 KiB |
BIN
app/assets/images/domain-icons/e621.png
Normal file
|
After Width: | Height: | Size: 5.0 KiB |
BIN
app/assets/images/domain-icons/fa.png
Normal file
|
After Width: | Height: | Size: 8.2 KiB |
BIN
app/assets/images/domain-icons/gumroad.png
Normal file
|
After Width: | Height: | Size: 3.7 KiB |
BIN
app/assets/images/domain-icons/inkbunny.png
Normal file
|
After Width: | Height: | Size: 15 KiB |
BIN
app/assets/images/domain-icons/itaku.png
Normal file
|
After Width: | Height: | Size: 3.7 KiB |
BIN
app/assets/images/domain-icons/itch-io.png
Normal file
|
After Width: | Height: | Size: 2.2 KiB |
BIN
app/assets/images/domain-icons/ko-fi.png
Normal file
|
After Width: | Height: | Size: 1.1 KiB |
BIN
app/assets/images/domain-icons/linktree.png
Normal file
|
After Width: | Height: | Size: 1.3 KiB |
BIN
app/assets/images/domain-icons/newgrounds.png
Normal file
|
After Width: | Height: | Size: 797 B |
BIN
app/assets/images/domain-icons/patreon.png
Normal file
|
After Width: | Height: | Size: 772 B |
BIN
app/assets/images/domain-icons/pixiv.png
Normal file
|
After Width: | Height: | Size: 678 B |
BIN
app/assets/images/domain-icons/redbubble.png
Normal file
|
After Width: | Height: | Size: 7.6 KiB |
BIN
app/assets/images/domain-icons/sofurry.png
Normal file
|
After Width: | Height: | Size: 18 KiB |
1
app/assets/images/domain-icons/sorbet/rbi/dsl/.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
||||
**/*.rbi linguist-generated=true
|
||||
23
app/assets/images/domain-icons/sorbet/rbi/dsl/active_support/callbacks.rbi
generated
Normal file
@@ -0,0 +1,23 @@
|
||||
# typed: true
|
||||
|
||||
# DO NOT EDIT MANUALLY
|
||||
# This is an autogenerated file for dynamic methods in `ActiveSupport::Callbacks`.
|
||||
# Please instead update this file by running `bin/tapioca dsl ActiveSupport::Callbacks`.
|
||||
|
||||
|
||||
module ActiveSupport::Callbacks
|
||||
include GeneratedInstanceMethods
|
||||
|
||||
mixes_in_class_methods GeneratedClassMethods
|
||||
|
||||
module GeneratedClassMethods
|
||||
def __callbacks; end
|
||||
def __callbacks=(value); end
|
||||
def __callbacks?; end
|
||||
end
|
||||
|
||||
module GeneratedInstanceMethods
|
||||
def __callbacks; end
|
||||
def __callbacks?; end
|
||||
end
|
||||
end
|
||||
BIN
app/assets/images/domain-icons/spreadshirt.png
Normal file
|
After Width: | Height: | Size: 15 KiB |
BIN
app/assets/images/domain-icons/subscribestar.png
Normal file
|
After Width: | Height: | Size: 3.8 KiB |
BIN
app/assets/images/domain-icons/telegram.png
Normal file
|
After Width: | Height: | Size: 9.3 KiB |
BIN
app/assets/images/domain-icons/trello.png
Normal file
|
After Width: | Height: | Size: 537 B |
BIN
app/assets/images/domain-icons/tumblr.png
Normal file
|
After Width: | Height: | Size: 1.4 KiB |
BIN
app/assets/images/domain-icons/weasyl.png
Normal file
|
After Width: | Height: | Size: 11 KiB |
BIN
app/assets/images/domain-icons/x-twitter.png
Normal file
|
After Width: | Height: | Size: 4.1 KiB |
BIN
app/assets/images/furecs/furecs-screenshot-2.png
Normal file
|
After Width: | Height: | Size: 114 KiB |
BIN
app/assets/images/furecs/furecs-screenshot.png
Normal file
|
After Width: | Height: | Size: 123 KiB |
19
app/assets/images/generic-domain.svg
Normal file
@@ -0,0 +1,19 @@
|
||||
<svg
|
||||
width="16"
|
||||
height="16"
|
||||
viewBox="0 0 16 16"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<!-- Background circle -->
|
||||
<circle cx="8" cy="8" r="7" fill="#E0E0E0" />
|
||||
|
||||
<!-- Stylized "www" text -->
|
||||
<path
|
||||
d="M4 8.5C4 6.5 5 5.5 6 5.5C7 5.5 8 6.5 8 8.5C8 6.5 9 5.5 10 5.5C11 5.5 12 6.5 12 8.5"
|
||||
stroke="#666666"
|
||||
stroke-width="1.5"
|
||||
stroke-linecap="round"
|
||||
fill="none"
|
||||
/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 414 B |
BIN
app/assets/images/refurrer-logo-icon.png
Normal file
|
After Width: | Height: | Size: 3.2 KiB |
BIN
app/assets/images/refurrer-logo-md.png
Normal file
|
After Width: | Height: | Size: 17 KiB |
BIN
app/assets/images/refurrer-logo.png
Normal file
|
After Width: | Height: | Size: 325 KiB |
3
app/assets/images/user-circle.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="1.5" stroke="currentColor" class="w-6 h-6">
|
||||
<path stroke-linecap="round" stroke-linejoin="round" d="M15.75 6a3.75 3.75 0 11-7.5 0 3.75 3.75 0 017.5 0zM4.501 20.118a7.5 7.5 0 0114.998 0A17.933 17.933 0 0112 21.75c-2.676 0-5.216-.584-7.499-1.632z" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 344 B |
@@ -1,49 +0,0 @@
|
||||
/*
|
||||
* This is a manifest file that'll be compiled into application.css, which will include all the files
|
||||
* listed below.
|
||||
*
|
||||
* Any CSS (and SCSS, if configured) file within this directory, lib/assets/stylesheets, or any plugin's
|
||||
* vendor/assets/stylesheets directory can be referenced here using a relative path.
|
||||
*
|
||||
* You're free to add application-wide styles to this file and they'll appear at the bottom of the
|
||||
* compiled file so the styles you add here take precedence over styles defined in any other CSS
|
||||
* files in this directory. Styles in this file should be added after the last require_* statement.
|
||||
* It is generally better to create a new file per style scope.
|
||||
*
|
||||
*= require_tree .
|
||||
*= require_self
|
||||
*/
|
||||
html,
|
||||
body {
|
||||
height: 100%;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
body {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
margin: 0 2em;
|
||||
}
|
||||
|
||||
.content-container {
|
||||
flex-grow: 1;
|
||||
margin: 1em 0;
|
||||
min-height: 512px;
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.image-container {
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
margin: 0 auto;
|
||||
position: absolute;
|
||||
}
|
||||
|
||||
.image-container .media {
|
||||
position: absolute;
|
||||
left: 50%;
|
||||
transform: translate(-50%);
|
||||
max-height: 100%;
|
||||
max-width: 100%;
|
||||
box-shadow: 0 0 5px 1px black;
|
||||
}
|
||||
61
app/assets/stylesheets/application.tailwind.css
Normal file
@@ -0,0 +1,61 @@
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
.animated-shadow-sky {
|
||||
@apply shadow-lg;
|
||||
}
|
||||
|
||||
.sky-section {
|
||||
@apply divide-y divide-slate-300 overflow-hidden border border-slate-300 bg-slate-100 sm:rounded-lg;
|
||||
}
|
||||
|
||||
.section-header {
|
||||
@apply px-4 py-3 font-medium text-slate-900;
|
||||
}
|
||||
|
||||
.sky-section-header {
|
||||
@apply px-4 py-3 font-medium text-slate-900;
|
||||
}
|
||||
|
||||
.sky-link {
|
||||
@apply text-sky-600 underline decoration-dotted transition-colors hover:text-sky-800;
|
||||
}
|
||||
|
||||
.blue-link {
|
||||
@apply text-blue-600 transition-colors hover:text-blue-800 hover:underline;
|
||||
}
|
||||
|
||||
.scroll-shadows {
|
||||
background:
|
||||
/* Shadow Cover TOP */
|
||||
linear-gradient(white 30%, rgba(255, 255, 255, 0)) center top,
|
||||
/* Shadow Cover BOTTOM */ linear-gradient(rgba(255, 255, 255, 0), white 70%)
|
||||
center bottom,
|
||||
/* Shadow TOP */
|
||||
linear-gradient(to bottom, rgba(0, 0, 0, 0.1), rgba(0, 0, 0, 0)) center
|
||||
top,
|
||||
/* Shadow BOTTOM */
|
||||
linear-gradient(to top, rgba(0, 0, 0, 0.1), rgba(0, 0, 0, 0)) center
|
||||
bottom;
|
||||
|
||||
background-repeat: no-repeat;
|
||||
background-size:
|
||||
100% 20px,
|
||||
100% 20px,
|
||||
100% 10px,
|
||||
100% 10px;
|
||||
background-attachment: local, local, scroll, scroll;
|
||||
}
|
||||
|
||||
.log-entry-table-header-cell {
|
||||
@apply bg-slate-50 py-1 text-xs font-medium uppercase tracking-wider text-slate-500;
|
||||
}
|
||||
|
||||
.log-entry-table-row-cell {
|
||||
@apply flex items-center py-1 text-sm;
|
||||
}
|
||||
|
||||
.rich-text-content blockquote {
|
||||
@apply my-4 border-s-4 border-gray-300 bg-slate-200 p-4 italic leading-relaxed;
|
||||
}
|
||||
131
app/assets/stylesheets/good_job_custom.css
Normal file
@@ -0,0 +1,131 @@
|
||||
/* ANSI Colors */
|
||||
.ansi-bold {
|
||||
font-weight: bold;
|
||||
}
|
||||
.ansi-black {
|
||||
color: #333333;
|
||||
}
|
||||
.ansi-red {
|
||||
color: #cd3333;
|
||||
}
|
||||
.ansi-green {
|
||||
color: #33cd33;
|
||||
}
|
||||
.ansi-yellow {
|
||||
color: #cdcd33;
|
||||
}
|
||||
.ansi-blue {
|
||||
color: #3333ee;
|
||||
}
|
||||
.ansi-magenta {
|
||||
color: #cd33cd;
|
||||
}
|
||||
.ansi-cyan {
|
||||
color: #33cdcd;
|
||||
}
|
||||
.ansi-white {
|
||||
color: #e5e5e5;
|
||||
}
|
||||
|
||||
/* Bright variants */
|
||||
.ansi-bright-black {
|
||||
color: #7f7f7f;
|
||||
}
|
||||
.ansi-bright-red {
|
||||
color: #990000;
|
||||
}
|
||||
.ansi-bright-green {
|
||||
color: #009900;
|
||||
}
|
||||
.ansi-bright-yellow {
|
||||
color: #999900;
|
||||
}
|
||||
.ansi-bright-blue {
|
||||
color: #5c5c99;
|
||||
}
|
||||
.ansi-bright-magenta {
|
||||
color: #990099;
|
||||
}
|
||||
.ansi-bright-cyan {
|
||||
color: #009999;
|
||||
}
|
||||
.ansi-bright-white {
|
||||
color: #999999;
|
||||
}
|
||||
|
||||
.log-uuid {
|
||||
min-width: 20px;
|
||||
max-width: 100px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
/* All log lines container */
|
||||
.good-job-log-lines {
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
/* Single log line container */
|
||||
.good-job-log-line {
|
||||
font-family: monospace;
|
||||
font-size: 0.8rem;
|
||||
line-height: 1;
|
||||
margin: 2px 0;
|
||||
padding: 2px 4px;
|
||||
display: flex;
|
||||
white-space: nowrap;
|
||||
width: max-content; /* Make width match the content width */
|
||||
}
|
||||
|
||||
.good-job-log-line:hover {
|
||||
background-color: #ccc;
|
||||
}
|
||||
|
||||
.good-job-log-line > span {
|
||||
display: inline-block;
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
.good-job-execution-log {
|
||||
color: #333;
|
||||
background: #f0f0f0;
|
||||
}
|
||||
|
||||
.text-truncate-link {
|
||||
display: inline-block;
|
||||
max-width: 300px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.good-job-arg-name {
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.good-job-arg-grid {
|
||||
display: grid;
|
||||
grid-template-columns: auto 1fr;
|
||||
}
|
||||
|
||||
.good-job-arg-value,
|
||||
.good-job-arg-name {
|
||||
padding: 0.35em 0.4em;
|
||||
}
|
||||
|
||||
.good-job-arg-name,
|
||||
.good-job-arg-value {
|
||||
border-bottom: 1px solid #e0e0e0;
|
||||
}
|
||||
|
||||
.good-job-arg-row {
|
||||
display: contents;
|
||||
}
|
||||
|
||||
.good-job-arg-row:hover > * {
|
||||
background-color: #ccc;
|
||||
}
|
||||
|
||||
/* This ensures the last row doesn't have a bottom border */
|
||||
.good-job-arg-grid .good-job-arg-row:last-child * {
|
||||
border-bottom: none;
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
# typed: strict
|
||||
module ApplicationCable
|
||||
class Channel < ActionCable::Channel::Base
|
||||
end
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# typed: strict
|
||||
module ApplicationCable
|
||||
class Connection < ActionCable::Connection::Base
|
||||
end
|
||||
|
||||
84
app/controllers/admin/proxy_controller.rb
Normal file
@@ -0,0 +1,84 @@
|
||||
# typed: true
|
||||
# frozen_string_literal: true
|
||||
class Admin::ProxyController < ApplicationController
|
||||
before_action :authenticate_user!
|
||||
before_action :require_admin!
|
||||
skip_before_action :verify_authenticity_token, only: %i[grafana prometheus]
|
||||
|
||||
def grafana
|
||||
fullpath =
|
||||
"http://grafana:3100#{request.fullpath.delete_prefix("/grafana")}"
|
||||
proxy_response(fullpath, "/grafana")
|
||||
end
|
||||
|
||||
def prometheus
|
||||
fullpath = "http://prometheus:9090#{request.fullpath.delete_prefix("/prometheus")}"
|
||||
proxy_response(fullpath, "/prometheus")
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def require_admin!
|
||||
unless current_user&.admin?
|
||||
redirect_to root_path, alert: "You are not authorized to access this area"
|
||||
end
|
||||
end
|
||||
|
||||
def grafana_proxy_headers
|
||||
{ "X-WEBAUTH-USER" => "admin" }.merge(proxy_headers)
|
||||
end
|
||||
|
||||
def proxy_headers
|
||||
{
|
||||
"X-Forwarded-Host" => request.host_with_port,
|
||||
"X-Forwarded-Proto" => request.ssl? ? "https" : "http",
|
||||
"X-Forwarded-For" => request.remote_ip,
|
||||
"Host" => request.host,
|
||||
"Connection" => request.headers["Connection"],
|
||||
"Upgrade" => request.headers["Upgrade"],
|
||||
"Accept" => request.headers["Accept"],
|
||||
"Cookie" => request.headers["Cookie"],
|
||||
"Content-Type" => request.headers["Content-Type"],
|
||||
"Content-Length" => request.headers["Content-Length"],
|
||||
}.merge
|
||||
end
|
||||
|
||||
def websocket_request?
|
||||
request.headers["Connection"]&.include?("upgrade")
|
||||
end
|
||||
|
||||
def proxy_response(fullpath, prefix)
|
||||
method = request.method.downcase.to_s
|
||||
if method == "post"
|
||||
response = HTTP.headers(grafana_proxy_headers).send(method, fullpath, body: request.raw_post)
|
||||
else
|
||||
response = HTTP.headers(grafana_proxy_headers).send(method, fullpath)
|
||||
end
|
||||
|
||||
headers = response.headers.to_h
|
||||
|
||||
# Handle redirects by rewriting the Location header
|
||||
if response.code.in?([301, 302, 303, 307, 308]) &&
|
||||
headers["Location"].present?
|
||||
location = headers["Location"]
|
||||
# Strip the host from absolute URLs
|
||||
location = location.gsub(%r{^https?://[^/]+}, "")
|
||||
# Add our prefix to relative URLs
|
||||
location = "#{prefix}#{location}" if location.start_with?("/")
|
||||
headers["Location"] = location
|
||||
end
|
||||
|
||||
# Pass through the response with all headers
|
||||
response_headers = headers.except("Content-Type")
|
||||
|
||||
render_args = {
|
||||
body: response.body.to_s,
|
||||
status: response.code,
|
||||
content_type: headers["Content-Type"],
|
||||
headers: response_headers,
|
||||
}
|
||||
render_args[:location] = headers["Location"] if headers["Location"]
|
||||
|
||||
render render_args
|
||||
end
|
||||
end
|
||||
@@ -1,2 +1,42 @@
|
||||
# typed: true
|
||||
class ApplicationController < ActionController::Base
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
include Pundit::Authorization
|
||||
include Devise::Controllers::Helpers::ClassMethods
|
||||
|
||||
sig { returns(T.nilable(IpAddressRole)) }
|
||||
def current_ip_address_role
|
||||
@current_ip_address_role ||= IpAddressRole.for_ip(request.remote_ip)
|
||||
end
|
||||
helper_method :current_ip_address_role
|
||||
|
||||
sig { returns(T.nilable(T.any(User, IpAddressRole))) }
|
||||
def pundit_user
|
||||
current_user || current_ip_address_role
|
||||
end
|
||||
|
||||
before_action do
|
||||
if Rails.env.development? || Rails.env.staging?
|
||||
Rack::MiniProfiler.authorize_request
|
||||
end
|
||||
end
|
||||
|
||||
before_action :authenticate_user!
|
||||
|
||||
# Pundit authorization error handling
|
||||
rescue_from Pundit::NotAuthorizedError, with: :user_not_authorized
|
||||
|
||||
protected
|
||||
|
||||
def prometheus_client
|
||||
PrometheusExporter::Client.default
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def user_not_authorized
|
||||
flash[:alert] = "You are not authorized to perform this action."
|
||||
redirect_back(fallback_location: root_path)
|
||||
end
|
||||
end
|
||||
|
||||
246
app/controllers/blob_entries_controller.rb
Normal file
@@ -0,0 +1,246 @@
|
||||
# typed: strict
|
||||
class BlobEntriesController < ApplicationController
|
||||
skip_before_action :authenticate_user!, only: [:show]
|
||||
|
||||
sig { void }
|
||||
def show
|
||||
thumb = params[:thumb]
|
||||
if thumb.present? && !thumb_params(thumb)
|
||||
raise ActionController::BadRequest.new("invalid thumbnail #{thumb}")
|
||||
end
|
||||
|
||||
if thumb.present?
|
||||
expires_dur = 1.week
|
||||
else
|
||||
expires_dur = 1.year
|
||||
end
|
||||
response.headers["Expires"] = expires_dur.from_now.httpdate
|
||||
expires_in expires_dur, public: true
|
||||
|
||||
unless stale?(
|
||||
last_modified: Time.at(0),
|
||||
strong_etag: strong_etag_for_request,
|
||||
)
|
||||
return
|
||||
end
|
||||
|
||||
sha256 = T.let(params[:sha256], String)
|
||||
raise ActionController::BadRequest.new("no file specified") if sha256.blank?
|
||||
|
||||
if show_blob_file(sha256, thumb)
|
||||
return
|
||||
else
|
||||
raise ActiveRecord::RecordNotFound
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
sig { params(sha256: String, thumb: T.nilable(String)).returns(T::Boolean) }
|
||||
def show_blob_file(sha256, thumb)
|
||||
if thumb
|
||||
thumb_params = thumb_params(thumb)
|
||||
if thumb_params.nil?
|
||||
raise ActionController::BadRequest.new("invalid thumbnail: #{thumb}")
|
||||
end
|
||||
|
||||
# if the requested format is gif, and the thumbnail type is content-container, we want to
|
||||
# thumbnail the gif into another gif. Else, always thumbnail into a jpeg.
|
||||
file_ext = "jpeg"
|
||||
if params[:format] == "gif" && thumb == "content-container"
|
||||
file_ext = "gif"
|
||||
end
|
||||
|
||||
# content-container may be pre-thumbnailed, see if the file is on the disk
|
||||
if thumb == "content-container" && file_ext == "jpeg"
|
||||
thumbnail_path =
|
||||
Domain::PostFile::Thumbnail.absolute_file_path(
|
||||
sha256,
|
||||
"content_container",
|
||||
0,
|
||||
)
|
||||
if File.exist?(thumbnail_path)
|
||||
send_file(thumbnail_path, type: "image/jpeg", disposition: "inline")
|
||||
return true
|
||||
end
|
||||
end
|
||||
|
||||
width, height = thumb_params
|
||||
filename = "thumb-#{sha256}-#{thumb}.#{file_ext}"
|
||||
cache_key = "vips:#{filename}"
|
||||
thumb_data =
|
||||
Rack::MiniProfiler.step("vips: load from cache") do
|
||||
Rails
|
||||
.cache
|
||||
.fetch(cache_key, expires_in: 1.day) do
|
||||
blob_file = BlobFile.find_by(sha256: HexUtil.hex2bin(sha256))
|
||||
if blob_file
|
||||
content_type =
|
||||
blob_file.content_type || "application/octet-stream"
|
||||
if helpers.is_renderable_video_type?(content_type)
|
||||
thumbnail_video_file(blob_file, width, height, file_ext)
|
||||
elsif helpers.is_renderable_image_type?(content_type)
|
||||
thumbnail_image_file(blob_file, width, height, file_ext)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if !thumb_data
|
||||
Rails.cache.delete(cache_key)
|
||||
return false
|
||||
end
|
||||
|
||||
send_data(
|
||||
thumb_data[0],
|
||||
type: thumb_data[1],
|
||||
disposition: "inline",
|
||||
filename: filename,
|
||||
)
|
||||
else
|
||||
blob_file = BlobFile.find_by(sha256: HexUtil.hex2bin(sha256))
|
||||
return false if !blob_file
|
||||
|
||||
content_type = blob_file.content_type || "application/octet-stream"
|
||||
send_file(
|
||||
blob_file.absolute_file_path,
|
||||
type: content_type,
|
||||
disposition: "inline",
|
||||
)
|
||||
end
|
||||
|
||||
return true
|
||||
end
|
||||
|
||||
sig do
|
||||
params(
|
||||
blob_file: BlobFile,
|
||||
width: Integer,
|
||||
height: Integer,
|
||||
thumb: String,
|
||||
).returns(T.nilable([String, String]))
|
||||
end
|
||||
def thumbnail_video_file(blob_file, width, height, thumb)
|
||||
video_file = blob_file.absolute_file_path
|
||||
temp_thumb_file = Tempfile.new(%w[video-thumb .png])
|
||||
process_result =
|
||||
system(
|
||||
"ffmpegthumbnailer",
|
||||
"-f", # overlay video strip indicator
|
||||
"-i",
|
||||
video_file,
|
||||
"-o",
|
||||
T.must(temp_thumb_file.path),
|
||||
"-s",
|
||||
"#{width}",
|
||||
"-c",
|
||||
"jpeg",
|
||||
)
|
||||
if !process_result
|
||||
temp_thumb_file.unlink
|
||||
return nil
|
||||
end
|
||||
|
||||
thumb_data_tmp = File.read(T.must(temp_thumb_file.path), mode: "rb")
|
||||
temp_thumb_file.unlink
|
||||
[thumb_data_tmp, "image/jpeg"]
|
||||
end
|
||||
|
||||
# Returns a tuple of the thumbnail data and the content type
|
||||
sig do
|
||||
params(
|
||||
blob_file: BlobFile,
|
||||
width: Integer,
|
||||
height: Integer,
|
||||
file_ext: String,
|
||||
).returns(T.nilable([String, String]))
|
||||
end
|
||||
def thumbnail_image_file(blob_file, width, height, file_ext)
|
||||
blob_file_path = blob_file.absolute_file_path
|
||||
|
||||
if file_ext == "gif"
|
||||
VipsUtil.try_load_gif(
|
||||
blob_file_path,
|
||||
load_gif: -> do
|
||||
Rack::MiniProfiler.step("vips: load gif") do
|
||||
# Use libvips' gifload with n=-1 to load all frames
|
||||
image = Vips::Image.gifload(blob_file_path, n: -1)
|
||||
num_frames = image.get("n-pages")
|
||||
image_width, image_height = image.width, (image.height / num_frames)
|
||||
|
||||
if width >= image_width && height >= image_height
|
||||
logger.info(
|
||||
"gif is already smaller than requested thumbnail size",
|
||||
)
|
||||
return File.binread(blob_file_path), "image/gif"
|
||||
end
|
||||
|
||||
Rack::MiniProfiler.step("vips: thumbnail gif") do
|
||||
image = image.thumbnail_image(width, height: height)
|
||||
image_buffer =
|
||||
image.gifsave_buffer(
|
||||
dither: 1,
|
||||
effort: 1,
|
||||
interframe_maxerror: 16,
|
||||
interpalette_maxerror: 10,
|
||||
interlace: true,
|
||||
)
|
||||
[image_buffer, "image/gif"]
|
||||
end
|
||||
end
|
||||
end,
|
||||
on_load_failed: ->(detected_content_type) do
|
||||
case detected_content_type
|
||||
when %r{image/png}
|
||||
thumbnail_image_file(blob_file, width, height, "png")
|
||||
when %r{image/jpeg}, %r{image/jpg}
|
||||
thumbnail_image_file(blob_file, width, height, "jpeg")
|
||||
else
|
||||
raise
|
||||
end
|
||||
end,
|
||||
)
|
||||
else
|
||||
# Original static image thumbnailing logic
|
||||
image_buffer =
|
||||
Rack::MiniProfiler.step("vips: load image") do
|
||||
T.unsafe(Vips::Image).thumbnail(
|
||||
blob_file.absolute_file_path,
|
||||
width,
|
||||
height: height,
|
||||
)
|
||||
end
|
||||
|
||||
Rack::MiniProfiler.step("vips: thumbnail image") do
|
||||
logger.info("rendering thumbnail as jpeg")
|
||||
[
|
||||
T.let(image_buffer.jpegsave_buffer(interlace: true, Q: 95), String),
|
||||
"image/jpeg",
|
||||
]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
sig { params(thumb: String).returns(T.nilable([Integer, Integer])) }
|
||||
def thumb_params(thumb)
|
||||
case thumb
|
||||
when "32-avatar"
|
||||
[32, 32]
|
||||
when "64-avatar"
|
||||
[64, 64]
|
||||
when "tiny"
|
||||
[100, 100]
|
||||
when "small"
|
||||
[400, 300]
|
||||
when "medium"
|
||||
[800, 600]
|
||||
when "content-container"
|
||||
[768, 2048]
|
||||
end
|
||||
end
|
||||
|
||||
sig { returns(String) }
|
||||
def strong_etag_for_request
|
||||
[params[:sha256], params[:thumb], params[:format]].compact.join("-")
|
||||
end
|
||||
end
|
||||
@@ -1,212 +0,0 @@
|
||||
class Domain::Fa::ApiController < ApplicationController
|
||||
skip_before_action :verify_authenticity_token,
|
||||
only: %i[ enqueue_objects object_statuses ]
|
||||
|
||||
def object_statuses
|
||||
fa_ids = (params[:fa_ids] || []).map(&:to_i)
|
||||
url_names = (params[:url_names] || [])
|
||||
|
||||
jobs_async = Delayed::Backend::ActiveRecord::Job.
|
||||
select(:id, :queue, :handler).
|
||||
where(queue: "manual").
|
||||
load_async
|
||||
|
||||
users_async = Domain::Fa::User.
|
||||
where(url_name: url_names).
|
||||
load_async
|
||||
|
||||
fa_id_to_post = Domain::Fa::Post.
|
||||
includes(:file).
|
||||
where(fa_id: fa_ids).
|
||||
map do |post|
|
||||
[post.fa_id, post]
|
||||
end.to_h
|
||||
|
||||
posts_response = {}
|
||||
users_response = {}
|
||||
|
||||
fa_ids.each do |fa_id|
|
||||
post = fa_id_to_post[fa_id]
|
||||
|
||||
post_response = {
|
||||
terminal_state: false,
|
||||
seen_at: time_ago_or_never(post&.created_at),
|
||||
scanned_at: "never",
|
||||
downloaded_at: "never",
|
||||
}
|
||||
|
||||
if post
|
||||
post_response[:info_url] = domain_fa_post_url(fa_id: post.fa_id)
|
||||
post_response[:scanned_at] = time_ago_or_never(post.scanned_at)
|
||||
|
||||
if post.file.present?
|
||||
post_response[:downloaded_at] = time_ago_or_never(post.file.created_at)
|
||||
post_response[:state] = "have_file"
|
||||
post_response[:terminal_state] = true
|
||||
elsif post.scanned?
|
||||
post_response[:state] = "scanned_post"
|
||||
else
|
||||
post_response[:state] = post.state
|
||||
end
|
||||
else
|
||||
post_response[:state] = "not_seen"
|
||||
end
|
||||
|
||||
posts_response[fa_id] = post_response
|
||||
end
|
||||
|
||||
url_name_to_user = users_async.map do |user|
|
||||
[user.url_name, user]
|
||||
end.to_h
|
||||
|
||||
url_names.each do |url_name|
|
||||
user = url_name_to_user[url_name]
|
||||
|
||||
if user
|
||||
user_response = {
|
||||
created_at: time_ago_or_never(user.created_at),
|
||||
scanned_gallery_at: time_ago_or_never(user.scanned_gallery_at),
|
||||
scanned_page_at: time_ago_or_never(user.scanned_page_at),
|
||||
}
|
||||
states = []
|
||||
states << "page" unless user.due_for_page_scan?
|
||||
states << "gallery" unless user.due_for_gallery_scan?
|
||||
states << "seen" if states.empty?
|
||||
|
||||
user_response[:state] = states.join(",")
|
||||
|
||||
if user.scanned_gallery_at && user.scanned_page_at
|
||||
user_response[:terminal_state] = true
|
||||
end
|
||||
else
|
||||
user_response = {
|
||||
state: "not_seen",
|
||||
terminal_state: false,
|
||||
}
|
||||
end
|
||||
users_response[url_name] = user_response
|
||||
end
|
||||
|
||||
queue_depths = Hash.new do |hash, key|
|
||||
hash[key] = 0
|
||||
end
|
||||
|
||||
jobs_async.each do |job|
|
||||
queue_depths[job.payload_object.job_data["job_class"]] += 1
|
||||
end
|
||||
|
||||
queue_depths = queue_depths.map do |key, value|
|
||||
[key.
|
||||
delete_prefix("Domain::Fa::Job::").
|
||||
split("::").
|
||||
last.
|
||||
underscore.
|
||||
delete_suffix("_job").
|
||||
gsub("_", " "),
|
||||
value]
|
||||
end.to_h
|
||||
|
||||
render json: {
|
||||
posts: posts_response,
|
||||
users: users_response,
|
||||
queues: {
|
||||
total_depth: queue_depths.values.sum,
|
||||
depths: queue_depths,
|
||||
},
|
||||
}
|
||||
end
|
||||
|
||||
def enqueue_objects
|
||||
@enqueue_counts ||= Hash.new { |h, k| h[k] = 0 }
|
||||
|
||||
fa_ids = (params[:fa_ids] || []).map(&:to_i)
|
||||
url_names = (params[:url_names] || [])
|
||||
url_names_to_enqueue = Set.new(params[:url_names_to_enqueue] || [])
|
||||
|
||||
fa_id_to_post = Domain::Fa::Post.includes(:file).where(fa_id: fa_ids).map do |post|
|
||||
[post.fa_id, post]
|
||||
end.to_h
|
||||
|
||||
url_name_to_user = Domain::Fa::User.where(url_name: url_names).map do |user|
|
||||
[user.url_name, user]
|
||||
end.to_h
|
||||
|
||||
fa_ids.each do |fa_id|
|
||||
post = fa_id_to_post[fa_id]
|
||||
defer_post_scan(post, fa_id)
|
||||
end
|
||||
|
||||
url_names.each do |url_name|
|
||||
user = url_name_to_user[url_name]
|
||||
defer_user_scan(user, url_name, url_names_to_enqueue.include?(url_name))
|
||||
end
|
||||
|
||||
enqueue_deferred!
|
||||
|
||||
render json: {
|
||||
post_scans: @enqueue_counts[Domain::Fa::Job::ScanPostJob],
|
||||
post_files: @enqueue_counts[Domain::Fa::Job::ScanFileJob],
|
||||
user_pages: @enqueue_counts[Domain::Fa::Job::UserPageJob],
|
||||
user_galleries: @enqueue_counts[Domain::Fa::Job::UserGalleryJob],
|
||||
}
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def defer_post_scan(post, fa_id)
|
||||
if !post || !post.scanned?
|
||||
defer_manual(Domain::Fa::Job::ScanPostJob, {
|
||||
fa_id: fa_id,
|
||||
}, -17)
|
||||
end
|
||||
|
||||
if post && post.file_uri && !post.file.present?
|
||||
return defer_manual(Domain::Fa::Job::ScanFileJob, {
|
||||
post: post,
|
||||
}, -15, "static_file")
|
||||
return
|
||||
end
|
||||
end
|
||||
|
||||
def defer_user_scan(user, url_name, highpri)
|
||||
if !user || user.due_for_page_scan?
|
||||
defer_manual(Domain::Fa::Job::UserPageJob, {
|
||||
url_name: url_name,
|
||||
}, highpri ? -16 : -6)
|
||||
return
|
||||
end
|
||||
|
||||
if !user || user.due_for_gallery_scan?
|
||||
defer_manual(Domain::Fa::Job::UserGalleryJob, {
|
||||
url_name: url_name,
|
||||
}, highpri ? -14 : -4)
|
||||
return
|
||||
end
|
||||
|
||||
false
|
||||
end
|
||||
|
||||
def defer_manual(klass, args, priority, queue = "manual")
|
||||
@@enqueue_deduper ||= Set.new
|
||||
return unless @@enqueue_deduper.add?([klass, args, priority])
|
||||
|
||||
@deferred_jobs ||= []
|
||||
@deferred_jobs << [klass, args, priority, queue]
|
||||
@enqueue_counts[klass] += 1
|
||||
end
|
||||
|
||||
def enqueue_deferred!
|
||||
while job = (@deferred_jobs || []).shift
|
||||
klass, args, priority, queue = job
|
||||
klass.set(priority: priority, queue: queue).perform_later(args)
|
||||
end
|
||||
end
|
||||
|
||||
def time_ago_or_never(time)
|
||||
if time
|
||||
helpers.time_ago_in_words(time, include_seconds: true) + " ago"
|
||||
else
|
||||
"never"
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,127 +0,0 @@
|
||||
class Domain::Fa::PostsController < ApplicationController
|
||||
before_action :set_domain_fa_post,
|
||||
only: %i[ show scan_post scan_post ]
|
||||
|
||||
skip_before_action :verify_authenticity_token,
|
||||
only: %i[ try_scan_post try_scan_posts ]
|
||||
|
||||
# GET /domain/fa/posts
|
||||
def index
|
||||
@posts = Domain::Fa::Post.
|
||||
includes(:creator, :file).
|
||||
page(params[:page]).
|
||||
per(50).
|
||||
order(fa_id: :desc).
|
||||
without_count
|
||||
end
|
||||
|
||||
# GET /domain/fa/posts/1
|
||||
def show
|
||||
end
|
||||
|
||||
def scan_post
|
||||
if try_enqueue_post_scan(@post, @post.fa_id)
|
||||
redirect_to domain_fa_post_path(@post.fa_id), notice: "Enqueued for scan"
|
||||
else
|
||||
redirect_to domain_fa_post_path(@post.fa_id), notice: "Already scanned"
|
||||
end
|
||||
end
|
||||
|
||||
def try_scan_post
|
||||
fa_id = params[:fa_id]&.to_i || raise("need fa_id parameter")
|
||||
post = Domain::Fa::Post.find_by_fa_id(fa_id)
|
||||
enqueued = try_enqueue_post_scan(post, fa_id)
|
||||
|
||||
if post && post.file.present?
|
||||
state_string = "downloaded #{helpers.time_ago_in_words(post.file.created_at, include_seconds: true)} ago"
|
||||
elsif post && post.scanned?
|
||||
state_string = "scanned #{helpers.time_ago_in_words(post.scanned_at, include_seconds: true)} ago"
|
||||
else
|
||||
state_string = []
|
||||
if !post
|
||||
state_string << "not seen"
|
||||
else
|
||||
state_string << "#{post.state}"
|
||||
end
|
||||
|
||||
if enqueued
|
||||
state_string << "enqueued"
|
||||
end
|
||||
|
||||
state_string = state_string.join(", ")
|
||||
end
|
||||
|
||||
render json: {
|
||||
enqueued: enqueued,
|
||||
title: post&.title,
|
||||
state: state_string,
|
||||
is_terminal_state: post&.scanned? && post&.file&.present? || false,
|
||||
}
|
||||
end
|
||||
|
||||
def try_scan_posts
|
||||
Rails.logger.info "params: #{params.inspect}"
|
||||
fa_ids = params[:fa_ids].map(&:to_i)
|
||||
fa_id_to_post = Domain::Fa::Post.where(fa_id: fa_ids).map do |post|
|
||||
[post.fa_id, post]
|
||||
end.to_h
|
||||
|
||||
response = {}
|
||||
|
||||
fa_ids.each do |fa_id|
|
||||
post = fa_id_to_post[fa_id]
|
||||
if post.nil?
|
||||
state = "not_seen"
|
||||
elsif post.file.present?
|
||||
state = "have_file"
|
||||
elsif post.scanned?
|
||||
state = "scanned"
|
||||
else
|
||||
state = "state_#{post.state}"
|
||||
end
|
||||
|
||||
response[fa_id] = {
|
||||
state: state,
|
||||
enqueued: try_enqueue_post_scan(post, fa_id),
|
||||
}
|
||||
end
|
||||
render json: response
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def try_enqueue_post_scan(post, fa_id)
|
||||
@@already_enqueued_fa_ids ||= Set.new
|
||||
unless @@already_enqueued_fa_ids.add?(fa_id)
|
||||
Rails.logger.info "Already enqueued #{fa_id}, skipping"
|
||||
return false
|
||||
end
|
||||
|
||||
if !post || !post.scanned?
|
||||
Rails.logger.info "Enqueue scan #{fa_id}"
|
||||
Domain::Fa::Job::ScanPostJob.
|
||||
set(priority: -15, queue: "manual").
|
||||
perform_later({
|
||||
fa_id: fa_id,
|
||||
})
|
||||
return true
|
||||
end
|
||||
|
||||
if post && post.file_uri && !post.file.present?
|
||||
Rails.logger.info "Enqueue file #{fa_id}"
|
||||
Domain::Fa::Job::ScanFileJob.
|
||||
set(priority: -15, queue: "manual").
|
||||
perform_later({
|
||||
post: post,
|
||||
})
|
||||
return true
|
||||
end
|
||||
|
||||
false
|
||||
end
|
||||
|
||||
# Use callbacks to share common setup or constraints between actions.
|
||||
def set_domain_fa_post
|
||||
@post = Domain::Fa::Post.find_by_fa_id!(params[:fa_id])
|
||||
end
|
||||
end
|
||||
@@ -1,71 +0,0 @@
|
||||
class Domain::Fa::UsersController < ApplicationController
|
||||
before_action :set_domain_fa_user, only: %i[ show edit update destroy ]
|
||||
|
||||
# GET /domain/fa/users or /domain/fa/users.json
|
||||
def index
|
||||
@domain_fa_users = Domain::Fa::User.page(params[:page])
|
||||
end
|
||||
|
||||
# GET /domain/fa/users/1 or /domain/fa/users/1.json
|
||||
def show
|
||||
end
|
||||
|
||||
# GET /domain/fa/users/new
|
||||
def new
|
||||
@domain_fa_user = Domain::Fa::User.new
|
||||
end
|
||||
|
||||
# GET /domain/fa/users/1/edit
|
||||
def edit
|
||||
end
|
||||
|
||||
# POST /domain/fa/users or /domain/fa/users.json
|
||||
def create
|
||||
@domain_fa_user = Domain::Fa::User.new(domain_fa_user_params)
|
||||
|
||||
respond_to do |format|
|
||||
if @domain_fa_user.save
|
||||
format.html { redirect_to domain_fa_user_url(@domain_fa_user), notice: "User was successfully created." }
|
||||
format.json { render :show, status: :created, location: @domain_fa_user }
|
||||
else
|
||||
format.html { render :new, status: :unprocessable_entity }
|
||||
format.json { render json: @domain_fa_user.errors, status: :unprocessable_entity }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# PATCH/PUT /domain/fa/users/1 or /domain/fa/users/1.json
|
||||
def update
|
||||
respond_to do |format|
|
||||
if @domain_fa_user.update(domain_fa_user_params)
|
||||
format.html { redirect_to domain_fa_user_url(@domain_fa_user), notice: "User was successfully updated." }
|
||||
format.json { render :show, status: :ok, location: @domain_fa_user }
|
||||
else
|
||||
format.html { render :edit, status: :unprocessable_entity }
|
||||
format.json { render json: @domain_fa_user.errors, status: :unprocessable_entity }
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# DELETE /domain/fa/users/1 or /domain/fa/users/1.json
|
||||
def destroy
|
||||
@domain_fa_user.destroy
|
||||
|
||||
respond_to do |format|
|
||||
format.html { redirect_to domain_fa_users_url, notice: "User was successfully destroyed." }
|
||||
format.json { head :no_content }
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Use callbacks to share common setup or constraints between actions.
|
||||
def set_domain_fa_user
|
||||
@domain_fa_user = Domain::Fa::User.find(params[:id])
|
||||
end
|
||||
|
||||
# Only allow a list of trusted parameters through.
|
||||
def domain_fa_user_params
|
||||
params.fetch(:domain_fa_user, {})
|
||||
end
|
||||
end
|
||||
26
app/controllers/domain/post_groups_controller.rb
Normal file
@@ -0,0 +1,26 @@
|
||||
# typed: true
|
||||
|
||||
class Domain::PostGroupsController < DomainController
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
|
||||
skip_before_action :authenticate_user!, only: %i[show]
|
||||
before_action :set_post_group!, only: %i[show]
|
||||
|
||||
# GET /pools/:id
|
||||
sig(:final) { void }
|
||||
def show
|
||||
authorize @post_group
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
sig { override.returns(DomainController::DomainParamConfig) }
|
||||
def self.param_config
|
||||
DomainController::DomainParamConfig.new(
|
||||
post_group_id_param: :id,
|
||||
post_id_param: :domain_post_id,
|
||||
user_id_param: :domain_user_id,
|
||||
)
|
||||
end
|
||||
end
|
||||
254
app/controllers/domain/posts_controller.rb
Normal file
@@ -0,0 +1,254 @@
|
||||
# typed: true
|
||||
|
||||
require "open-uri"
|
||||
require "tempfile"
|
||||
require "base64"
|
||||
|
||||
class Domain::PostsController < DomainController
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
|
||||
skip_before_action :authenticate_user!,
|
||||
only: %i[
|
||||
show
|
||||
index
|
||||
user_favorite_posts
|
||||
user_created_posts
|
||||
visual_search
|
||||
visual_results
|
||||
]
|
||||
before_action :set_post!, only: %i[show]
|
||||
before_action :set_user!, only: %i[user_created_posts]
|
||||
before_action :set_post_group!, only: %i[posts_in_group]
|
||||
|
||||
class PostsIndexViewConfig < T::ImmutableStruct
|
||||
include T::Struct::ActsAsComparable
|
||||
|
||||
const :show_domain_filters, T::Boolean
|
||||
const :show_creator_links, T::Boolean
|
||||
const :index_type_header, String
|
||||
end
|
||||
|
||||
sig { void }
|
||||
def initialize
|
||||
super
|
||||
@posts_index_view_config =
|
||||
PostsIndexViewConfig.new(
|
||||
show_domain_filters: false,
|
||||
show_creator_links: false,
|
||||
index_type_header: "all_posts",
|
||||
)
|
||||
end
|
||||
|
||||
# GET /posts
|
||||
sig(:final) { void }
|
||||
def index
|
||||
@posts_index_view_config =
|
||||
PostsIndexViewConfig.new(
|
||||
show_domain_filters: true,
|
||||
show_creator_links: true,
|
||||
index_type_header: "all_posts",
|
||||
)
|
||||
authorize Domain::Post
|
||||
@posts = posts_relation(Domain::Post.all).without_count
|
||||
active_sources = (params[:sources] || DomainSourceHelper.all_source_names)
|
||||
unless DomainSourceHelper.has_all_sources?(active_sources)
|
||||
postable_types =
|
||||
DomainSourceHelper.source_names_to_class_names(active_sources)
|
||||
@posts = @posts.where(type: postable_types) if postable_types.any?
|
||||
end
|
||||
end
|
||||
|
||||
# GET /posts/:id
|
||||
sig(:final) { void }
|
||||
def show
|
||||
authorize @post
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def user_created_posts
|
||||
@posts_index_view_config =
|
||||
PostsIndexViewConfig.new(
|
||||
show_domain_filters: false,
|
||||
show_creator_links: false,
|
||||
index_type_header: "user_created",
|
||||
)
|
||||
|
||||
@user = T.must(@user)
|
||||
authorize @user
|
||||
@posts = posts_relation(@user.posts)
|
||||
authorize @posts
|
||||
render :index
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def posts_in_group
|
||||
@posts_index_view_config =
|
||||
PostsIndexViewConfig.new(
|
||||
show_domain_filters: false,
|
||||
show_creator_links: true,
|
||||
index_type_header: "posts_in_group",
|
||||
)
|
||||
|
||||
authorize @post_group
|
||||
@posts = posts_relation(T.must(@post_group).posts)
|
||||
render :index
|
||||
end
|
||||
|
||||
# GET /posts/visual_search
|
||||
sig(:final) { void }
|
||||
def visual_search
|
||||
authorize Domain::Post
|
||||
end
|
||||
|
||||
sig { params(content_type: T.nilable(String)).returns(T::Boolean) }
|
||||
def check_content_type!(content_type)
|
||||
return false unless content_type
|
||||
|
||||
ret =
|
||||
Domain::PostFile::Thumbnail::THUMBABLE_CONTENT_TYPES.any? do |type|
|
||||
content_type.match?(type)
|
||||
end
|
||||
|
||||
unless ret
|
||||
flash.now[:error] = "The uploaded file is not a valid image format."
|
||||
render :visual_search
|
||||
end
|
||||
|
||||
ret
|
||||
end
|
||||
|
||||
# POST /posts/visual_search
|
||||
sig(:final) { void }
|
||||
def visual_results
|
||||
authorize Domain::Post
|
||||
|
||||
# Process the uploaded image or URL
|
||||
file_result = process_image_input
|
||||
return unless file_result
|
||||
file_path, content_type = file_result
|
||||
|
||||
# Create thumbnail for the view if possible
|
||||
tmp_dir = Dir.mktmpdir("visual-search")
|
||||
thumbs_and_fingerprints =
|
||||
helpers.generate_fingerprints(file_path, content_type, tmp_dir)
|
||||
first_thumb_and_fingerprint = thumbs_and_fingerprints&.first
|
||||
if thumbs_and_fingerprints.nil? || first_thumb_and_fingerprint.nil?
|
||||
flash.now[:error] = "Error generating fingerprints"
|
||||
render :visual_search
|
||||
return
|
||||
end
|
||||
logger.info("generated #{thumbs_and_fingerprints.length} thumbs")
|
||||
|
||||
@uploaded_image_data_uri =
|
||||
helpers.create_image_thumbnail_data_uri(
|
||||
first_thumb_and_fingerprint.thumb_path,
|
||||
"image/jpeg",
|
||||
)
|
||||
@uploaded_detail_hash_value = first_thumb_and_fingerprint.detail_fingerprint
|
||||
before = Time.now
|
||||
|
||||
similar_fingerprints =
|
||||
helpers.find_similar_fingerprints(
|
||||
thumbs_and_fingerprints.map(&:to_fingerprint_and_detail),
|
||||
).take(10)
|
||||
|
||||
@time_taken = Time.now - before
|
||||
|
||||
@matches = similar_fingerprints
|
||||
@good_matches =
|
||||
similar_fingerprints.select { |f| f.similarity_percentage >= 80 }
|
||||
@bad_matches =
|
||||
similar_fingerprints.select { |f| f.similarity_percentage < 80 }
|
||||
|
||||
@matches = @good_matches if @good_matches.any?
|
||||
ensure
|
||||
# Clean up any temporary files
|
||||
FileUtils.rm_rf(tmp_dir) if tmp_dir
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
# Process the uploaded file or URL and return [image_path, content_type] or nil on failure
|
||||
sig { returns(T.nilable([String, String])) }
|
||||
def process_image_input
|
||||
if params[:image_file].present?
|
||||
process_uploaded_file
|
||||
elsif params[:image_url].present?
|
||||
process_image_url
|
||||
else
|
||||
flash.now[:error] = "Please upload an image or provide an image URL."
|
||||
render :visual_search
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
# Process an uploaded file and return [image_path, content_type] or nil on failure
|
||||
sig { returns(T.nilable([String, String])) }
|
||||
def process_uploaded_file
|
||||
image_file = params[:image_file]
|
||||
content_type = T.must(image_file.content_type)
|
||||
|
||||
return nil unless check_content_type!(content_type)
|
||||
|
||||
image_path = T.must(image_file.tempfile.path)
|
||||
[image_path, content_type]
|
||||
end
|
||||
|
||||
# Process an image URL and return [image_path, content_type] or nil on failure
|
||||
sig { returns(T.nilable([String, String])) }
|
||||
def process_image_url
|
||||
# Download the image to a temporary file
|
||||
image_url = params[:image_url]
|
||||
image_io = URI.open(image_url)
|
||||
|
||||
if image_io.nil?
|
||||
flash.now[:error] = "The URL does not point to a valid image format."
|
||||
render :visual_search
|
||||
return nil
|
||||
end
|
||||
|
||||
content_type = T.must(T.unsafe(image_io).content_type)
|
||||
return nil unless check_content_type!(content_type)
|
||||
|
||||
# Save to temp file
|
||||
extension = helpers.extension_for_content_type(content_type) || "jpg"
|
||||
@temp_file = Tempfile.new(["image", ".#{extension}"])
|
||||
@temp_file.binmode
|
||||
image_data = image_io.read
|
||||
@temp_file.write(image_data)
|
||||
@temp_file.close
|
||||
|
||||
image_path = T.must(@temp_file.path)
|
||||
[image_path, content_type]
|
||||
rescue StandardError => e
|
||||
Rails.logger.error("Error processing image URL: #{e.message}")
|
||||
flash.now[:error] = "Error downloading search image"
|
||||
render :visual_search
|
||||
nil
|
||||
end
|
||||
|
||||
sig { override.returns(DomainController::DomainParamConfig) }
|
||||
def self.param_config
|
||||
DomainController::DomainParamConfig.new(
|
||||
post_id_param: :id,
|
||||
user_id_param: :domain_user_id,
|
||||
post_group_id_param: :domain_post_group_id,
|
||||
)
|
||||
end
|
||||
|
||||
sig(:final) do
|
||||
params(
|
||||
starting_relation: ActiveRecord::Relation,
|
||||
skip_ordering: T::Boolean,
|
||||
).returns(
|
||||
T.all(ActiveRecord::Relation, Kaminari::ActiveRecordRelationMethods),
|
||||
)
|
||||
end
|
||||
def posts_relation(starting_relation, skip_ordering: false)
|
||||
relation = starting_relation
|
||||
relation = T.unsafe(policy_scope(relation)).page(params[:page]).per(50)
|
||||
relation = relation.order("posted_at DESC NULLS LAST") unless skip_ordering
|
||||
relation
|
||||
end
|
||||
end
|
||||
@@ -1,14 +1,12 @@
|
||||
# typed: true
|
||||
class Domain::Twitter::ApiController < ApplicationController
|
||||
skip_before_action :verify_authenticity_token,
|
||||
only: %i[ enqueue_objects ]
|
||||
skip_before_action :verify_authenticity_token, only: %i[enqueue_objects]
|
||||
|
||||
def enqueue_objects
|
||||
@enqueue_counts ||= Hash.new { |h, k| h[k] = 0 }
|
||||
|
||||
names = (params[:names] || [])
|
||||
names.each do |name|
|
||||
defer_user_timeline_scan(name, true)
|
||||
end
|
||||
names.each { |name| defer_user_timeline_scan(name, true) }
|
||||
enqueue_deferred!
|
||||
render json: @enqueue_counts.to_json
|
||||
end
|
||||
@@ -16,9 +14,11 @@ class Domain::Twitter::ApiController < ApplicationController
|
||||
private
|
||||
|
||||
def defer_user_timeline_scan(name, highpri)
|
||||
defer_manual(Domain::Twitter::Job::UserTimelineTweetsJob, {
|
||||
name: name,
|
||||
}, highpri ? -16 : -6)
|
||||
defer_manual(
|
||||
Domain::Twitter::Job::UserTimelineTweetsJob,
|
||||
{ name: name },
|
||||
highpri ? -16 : -6,
|
||||
)
|
||||
end
|
||||
|
||||
def defer_manual(klass, args, priority, queue = nil)
|
||||
@@ -31,11 +31,13 @@ class Domain::Twitter::ApiController < ApplicationController
|
||||
end
|
||||
|
||||
def enqueue_deferred!
|
||||
GoodJob::Bulk.enqueue do
|
||||
while job = (@deferred_jobs || []).shift
|
||||
klass, args, priority, queue = job
|
||||
klass.set(priority: priority, queue: queue).perform_later(args)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def time_ago_or_never(time)
|
||||
if time
|
||||
|
||||
120
app/controllers/domain/user_job_events_controller.rb
Normal file
@@ -0,0 +1,120 @@
|
||||
# typed: strict
|
||||
class Domain::UserJobEventsController < DomainController
|
||||
extend T::Sig
|
||||
|
||||
before_action :set_user!
|
||||
|
||||
sig { void }
|
||||
def tracked_objects_kinds
|
||||
@kinds =
|
||||
T.let(
|
||||
Domain::UserJobEvent::AddTrackedObject.kinds.keys.map(&:to_s),
|
||||
T.nilable(T::Array[String]),
|
||||
)
|
||||
|
||||
@kind_counts =
|
||||
T.let(
|
||||
@user&.add_tracked_objects&.reorder(nil)&.group(:kind)&.count,
|
||||
T.nilable(T::Hash[String, Integer]),
|
||||
)
|
||||
|
||||
@kinds_most_at =
|
||||
T.let(
|
||||
@user
|
||||
&.add_tracked_objects
|
||||
&.reorder(nil)
|
||||
&.group(:kind)
|
||||
&.maximum(:requested_at),
|
||||
T.nilable(T::Hash[String, Time]),
|
||||
)
|
||||
end
|
||||
|
||||
sig { void }
|
||||
def tracked_objects
|
||||
set_and_validate_kind!
|
||||
@tracked_objects =
|
||||
T.let(
|
||||
T
|
||||
.must(@user)
|
||||
.add_tracked_objects
|
||||
.includes(:log_entry)
|
||||
.where(kind: @kind)
|
||||
.sort_by { |a| -a.requested_at.to_i },
|
||||
T.untyped,
|
||||
)
|
||||
end
|
||||
|
||||
sig { void }
|
||||
def backfill_scan_job
|
||||
set_and_validate_kind!
|
||||
@user = T.must(@user)
|
||||
|
||||
unless @user.is_a?(Domain::User::FaUser)
|
||||
flash[:error] = "This user is not a FurAffinity user"
|
||||
redirect_to tracked_objects_domain_user_job_events_path(
|
||||
@user,
|
||||
kind: @kind,
|
||||
)
|
||||
return
|
||||
end
|
||||
|
||||
case @kind
|
||||
when "favs"
|
||||
now = Time.now
|
||||
stats = Domain::Fa::BackfillTrackedObjectUserFavs.new(user: @user).run
|
||||
flash[
|
||||
:success
|
||||
] = "Backfilled #{@user.url_name} favs, #{stats.total_created} favs scans created, #{stats.total_favs} favs, loaded #{stats.total_hles} logs, took #{helpers.distance_of_time_in_words_to_now(now, include_seconds: true)}"
|
||||
end
|
||||
|
||||
redirect_to tracked_objects_domain_user_job_events_path(@user, kind: @kind)
|
||||
end
|
||||
|
||||
sig { void }
|
||||
def enqueue_scan_job
|
||||
set_and_validate_kind!
|
||||
@user = T.must(@user)
|
||||
|
||||
unless @user.is_a?(Domain::User::FaUser)
|
||||
flash[:error] = "This user is not a FurAffinity user"
|
||||
redirect_to tracked_objects_domain_user_job_events_path(
|
||||
@user,
|
||||
kind: @kind,
|
||||
)
|
||||
return
|
||||
end
|
||||
|
||||
case @kind
|
||||
when "favs"
|
||||
flash[:success] = "Enqueued scan job for #{@user.url_name} favs"
|
||||
Domain::Fa::Job::FavsJob.set(queue: "manual").perform_later(
|
||||
user: @user,
|
||||
force_scan: true,
|
||||
)
|
||||
else
|
||||
flash[:error] = "Unimplemented kind: #{@kind}"
|
||||
end
|
||||
|
||||
redirect_to tracked_objects_domain_user_job_events_path(@user, kind: @kind)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
sig { override.returns(DomainController::DomainParamConfig) }
|
||||
def self.param_config
|
||||
DomainController::DomainParamConfig.new(
|
||||
user_id_param: :domain_user_id,
|
||||
post_id_param: :domain_post_id,
|
||||
post_group_id_param: :domain_post_group_id,
|
||||
)
|
||||
end
|
||||
|
||||
sig { void }
|
||||
def set_and_validate_kind!
|
||||
@kind = T.let(params[:kind], T.nilable(String))
|
||||
raise ActionController::RoutingError, "Not Found" if @kind.blank?
|
||||
unless Domain::UserJobEvent::AddTrackedObject.kinds.include?(@kind)
|
||||
raise ActionController::RoutingError, "Not Found"
|
||||
end
|
||||
end
|
||||
end
|
||||
29
app/controllers/domain/user_post_favs_controller.rb
Normal file
@@ -0,0 +1,29 @@
|
||||
# typed: true
|
||||
# frozen_string_literal: true
|
||||
|
||||
class Domain::UserPostFavsController < DomainController
|
||||
before_action :set_user!, only: %i[favorites]
|
||||
|
||||
def self.param_config
|
||||
DomainParamConfig.new(
|
||||
post_id_param: :domain_post_id,
|
||||
user_id_param: :domain_user_id,
|
||||
post_group_id_param: :domain_post_group_id,
|
||||
)
|
||||
end
|
||||
|
||||
sig { void }
|
||||
def favorites
|
||||
@posts_index_view_config =
|
||||
Domain::PostsController::PostsIndexViewConfig.new(
|
||||
show_domain_filters: false,
|
||||
show_creator_links: true,
|
||||
index_type_header: "user_favorites",
|
||||
)
|
||||
user = T.cast(@user, Domain::User)
|
||||
@user_post_favs =
|
||||
user.user_post_favs.includes(:post).page(params[:page]).per(50)
|
||||
authorize @user_post_favs
|
||||
render :favorites
|
||||
end
|
||||
end
|
||||
290
app/controllers/domain/users_controller.rb
Normal file
@@ -0,0 +1,290 @@
|
||||
# typed: true
|
||||
class Domain::UsersController < DomainController
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
|
||||
before_action :set_user!,
|
||||
only: %i[show followed_by following monitor_bluesky_user]
|
||||
before_action :set_post!, only: %i[users_faving_post]
|
||||
skip_before_action :authenticate_user!,
|
||||
only: %i[
|
||||
show
|
||||
search_by_name
|
||||
users_faving_post
|
||||
similar_users
|
||||
]
|
||||
|
||||
# GET /users
|
||||
sig(:final) { void }
|
||||
def index
|
||||
authorize Domain::User
|
||||
@users = policy_scope(Domain::User).order(created_at: :desc)
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def followed_by
|
||||
@user = T.must(@user)
|
||||
authorize @user
|
||||
@users =
|
||||
@user
|
||||
.followed_by_users
|
||||
.includes(avatar: :log_entry)
|
||||
.page(params[:page])
|
||||
.per(50)
|
||||
@index_type = :followed_by
|
||||
render :index
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def following
|
||||
@user = T.must(@user)
|
||||
authorize @user
|
||||
@users =
|
||||
@user
|
||||
.followed_users
|
||||
.includes(avatar: :log_entry)
|
||||
.page(params[:page])
|
||||
.per(50)
|
||||
@index_type = :following
|
||||
render :index
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def users_faving_post
|
||||
@post = T.must(@post)
|
||||
authorize @post
|
||||
@users =
|
||||
T
|
||||
.unsafe(@post)
|
||||
.faving_users
|
||||
.includes(avatar: :log_entry)
|
||||
.page(params[:page])
|
||||
.per(50)
|
||||
|
||||
@index_type = :users_faving_post
|
||||
render :index
|
||||
end
|
||||
|
||||
# GET /users/:id
|
||||
sig(:final) { void }
|
||||
def show
|
||||
authorize @user
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def search_by_name
|
||||
authorize Domain::User
|
||||
name = params[:name]&.downcase
|
||||
name = ReduxApplicationRecord.sanitize_sql_like(name)
|
||||
|
||||
if name.starts_with?("did:plc:") || name.starts_with?("did:pkh:")
|
||||
@user_search_names =
|
||||
Domain::UserSearchName
|
||||
.select(
|
||||
"domain_user_search_names.*, domain_users.*, domain_users_bluesky_aux.did",
|
||||
)
|
||||
.select(
|
||||
"levenshtein(domain_users_bluesky_aux.did, '#{name}') as distance",
|
||||
)
|
||||
.where(
|
||||
user: Domain::User::BlueskyUser.where("did LIKE ?", "#{name}%"),
|
||||
)
|
||||
.joins(:user)
|
||||
.limit(10)
|
||||
return
|
||||
end
|
||||
|
||||
@user_search_names =
|
||||
Domain::UserSearchName
|
||||
.select("domain_user_search_names.*, domain_users.*")
|
||||
.select("levenshtein(name, '#{name}') as distance")
|
||||
.select(
|
||||
"(SELECT COUNT(*) FROM domain_user_post_creations dupc WHERE dupc.user_id = domain_users.id) as num_posts",
|
||||
)
|
||||
.joins(:user)
|
||||
.where(
|
||||
"(name ilike ?) OR (similarity(dmetaphone(name), dmetaphone(?)) > 0.8)",
|
||||
"%#{name}%",
|
||||
name,
|
||||
)
|
||||
.where(
|
||||
"NOT EXISTS (
|
||||
SELECT 1
|
||||
FROM domain_user_search_names dns2
|
||||
WHERE dns2.user_id = domain_user_search_names.user_id
|
||||
AND levenshtein(dns2.name, ?) < levenshtein(domain_user_search_names.name, ?)
|
||||
)",
|
||||
name,
|
||||
name,
|
||||
)
|
||||
.order("distance ASC")
|
||||
.limit(10)
|
||||
end
|
||||
|
||||
sig { void }
|
||||
def similar_users
|
||||
url_name = params[:url_name]
|
||||
exclude_url_name = params[:exclude_url_name]
|
||||
|
||||
user = Domain::User::FaUser.find_by(url_name: url_name)
|
||||
if user.nil?
|
||||
render status: 404,
|
||||
json: {
|
||||
error: "user '#{url_name}' not found",
|
||||
error_type: "user_not_found",
|
||||
}
|
||||
return
|
||||
end
|
||||
|
||||
all_similar_users =
|
||||
users_similar_to_by_followers(user, limit: 10).map do |u|
|
||||
user_to_similarity_entry(u)
|
||||
end
|
||||
|
||||
if all_similar_users.nil?
|
||||
render status: 500,
|
||||
json: {
|
||||
error:
|
||||
"user '#{url_name}' has not had recommendations computed yet",
|
||||
error_type: "recs_not_computed",
|
||||
}
|
||||
return
|
||||
end
|
||||
|
||||
not_followed_similar_users = nil
|
||||
if exclude_url_name
|
||||
exclude_followed_by =
|
||||
Domain::User::FaUser.find_by(url_name: exclude_url_name)
|
||||
if exclude_followed_by.nil?
|
||||
render status: 500,
|
||||
json: {
|
||||
error: "user '#{exclude_url_name}' not found",
|
||||
error_type: "exclude_user_not_found",
|
||||
}
|
||||
return
|
||||
elsif exclude_followed_by.scanned_follows_at.nil?
|
||||
render status: 500,
|
||||
json: {
|
||||
error:
|
||||
"user '#{exclude_url_name}' followers list hasn't been scanned",
|
||||
error_type: "exclude_user_not_scanned",
|
||||
}
|
||||
return
|
||||
else
|
||||
not_followed_similar_users =
|
||||
users_similar_to_by_followers(
|
||||
user,
|
||||
limit: 10,
|
||||
exclude_followed_by: exclude_followed_by,
|
||||
).map { |u| user_to_similarity_entry(u) }
|
||||
end
|
||||
end
|
||||
|
||||
render json: {
|
||||
all: all_similar_users,
|
||||
not_followed: not_followed_similar_users,
|
||||
}
|
||||
end
|
||||
|
||||
sig { void }
|
||||
def monitor_bluesky_user
|
||||
user = T.cast(@user, Domain::User::BlueskyUser)
|
||||
authorize user
|
||||
monitor = Domain::Bluesky::MonitoredObject.build_for_user(user)
|
||||
if monitor.save
|
||||
Domain::Bluesky::Job::ScanUserJob.perform_later(user:)
|
||||
Domain::Bluesky::Job::ScanPostsJob.perform_later(user:)
|
||||
flash[:notice] = "User is now being monitored"
|
||||
else
|
||||
flash[
|
||||
:alert
|
||||
] = "Error monitoring user: #{monitor.errors.full_messages.join(", ")}"
|
||||
end
|
||||
redirect_to domain_user_path(user)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
sig { override.returns(DomainController::DomainParamConfig) }
|
||||
def self.param_config
|
||||
DomainController::DomainParamConfig.new(
|
||||
user_id_param: :id,
|
||||
post_id_param: :domain_post_id,
|
||||
post_group_id_param: :domain_post_group_id,
|
||||
)
|
||||
end
|
||||
|
||||
# TODO - make a typed ImmutableStruct for the return type
|
||||
sig { params(user: Domain::User::FaUser).returns(T::Hash[Symbol, T.untyped]) }
|
||||
def user_to_similarity_entry(user)
|
||||
profile_thumb_url = user.avatar&.log_entry&.uri_str
|
||||
profile_thumb_url ||=
|
||||
begin
|
||||
pp_log_entry = get_best_user_page_http_log_entry_for(user)
|
||||
if pp_log_entry
|
||||
parser =
|
||||
Domain::Fa::Parser::Page.from_log_entry(
|
||||
pp_log_entry,
|
||||
require_logged_in: false,
|
||||
)
|
||||
parser.user_page.profile_thumb_url
|
||||
end
|
||||
rescue StandardError
|
||||
logger.error("error getting profile_thumb_url: #{$!.message}")
|
||||
end || "https://a.furaffinity.net/0/#{user.url_name}.gif"
|
||||
|
||||
{
|
||||
name: user.name,
|
||||
url_name: user.url_name,
|
||||
profile_thumb_url: profile_thumb_url,
|
||||
external_url: "https://www.furaffinity.net/user/#{user.url_name}/",
|
||||
refurrer_url: request.base_url + helpers.domain_user_path(user),
|
||||
}
|
||||
end
|
||||
|
||||
sig { params(user: Domain::User::FaUser).returns(T.nilable(HttpLogEntry)) }
|
||||
def get_best_user_page_http_log_entry_for(user)
|
||||
for_path =
|
||||
proc do |uri_path|
|
||||
HttpLogEntry
|
||||
.where(
|
||||
uri_scheme: "https",
|
||||
uri_host: "www.furaffinity.net",
|
||||
uri_path: uri_path,
|
||||
)
|
||||
.order(created_at: :desc)
|
||||
.first
|
||||
end
|
||||
|
||||
# older versions don't end in a trailing slash
|
||||
user.last_user_page_log_entry || for_path.call("/user/#{user.url_name}/") ||
|
||||
for_path.call("/user/#{user.url_name}")
|
||||
end
|
||||
|
||||
sig do
|
||||
params(
|
||||
user: Domain::User::FaUser,
|
||||
limit: Integer,
|
||||
exclude_followed_by: T.nilable(Domain::User::FaUser),
|
||||
).returns(T::Array[Domain::User::FaUser])
|
||||
end
|
||||
def users_similar_to_by_followers(user, limit: 10, exclude_followed_by: nil)
|
||||
factors = Domain::Factors::UserUserFollowToFactors.find_by(user: user)
|
||||
return [] if factors.nil?
|
||||
|
||||
relation =
|
||||
Domain::NeighborFinder
|
||||
.find_neighbors(factors)
|
||||
.limit(limit)
|
||||
.includes(:user)
|
||||
|
||||
if exclude_followed_by
|
||||
relation =
|
||||
relation.where.not(
|
||||
user_id: exclude_followed_by.followed_users.select(:to_id),
|
||||
)
|
||||
end
|
||||
|
||||
relation.map(&:user)
|
||||
end
|
||||
end
|
||||
73
app/controllers/domain_controller.rb
Normal file
@@ -0,0 +1,73 @@
|
||||
# typed: strict
|
||||
class DomainController < ApplicationController
|
||||
extend T::Sig
|
||||
extend T::Helpers
|
||||
abstract!
|
||||
|
||||
class DomainParamConfig < T::ImmutableStruct
|
||||
include T::Struct::ActsAsComparable
|
||||
|
||||
const :post_id_param, Symbol
|
||||
const :user_id_param, Symbol
|
||||
const :post_group_id_param, Symbol
|
||||
end
|
||||
|
||||
sig { void }
|
||||
def initialize
|
||||
super
|
||||
@post = T.let(nil, T.nilable(Domain::Post))
|
||||
@user = T.let(nil, T.nilable(Domain::User))
|
||||
@post_group = T.let(nil, T.nilable(Domain::PostGroup))
|
||||
end
|
||||
|
||||
protected
|
||||
|
||||
sig { abstract.returns(DomainParamConfig) }
|
||||
def self.param_config
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def set_post!
|
||||
@post =
|
||||
self.class.find_model_from_param(
|
||||
Domain::Post,
|
||||
params[self.class.param_config.post_id_param],
|
||||
) || raise(ActiveRecord::RecordNotFound)
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def set_user!
|
||||
@user =
|
||||
self.class.find_model_from_param(
|
||||
Domain::User,
|
||||
params[self.class.param_config.user_id_param],
|
||||
) || raise(ActiveRecord::RecordNotFound)
|
||||
end
|
||||
|
||||
sig(:final) { void }
|
||||
def set_post_group!
|
||||
@post_group =
|
||||
self.class.find_model_from_param(
|
||||
Domain::PostGroup,
|
||||
params[self.class.param_config.post_group_id_param],
|
||||
) || raise(ActiveRecord::RecordNotFound)
|
||||
end
|
||||
|
||||
public
|
||||
|
||||
sig(:final) do
|
||||
type_parameters(:Klass)
|
||||
.params(
|
||||
klass:
|
||||
T.all(
|
||||
T.class_of(ReduxApplicationRecord),
|
||||
HasCompositeToParam::ClassMethods[T.type_parameter(:Klass)],
|
||||
),
|
||||
param: T.nilable(String),
|
||||
)
|
||||
.returns(T.nilable(T.type_parameter(:Klass)))
|
||||
end
|
||||
def self.find_model_from_param(klass, param)
|
||||
klass.find_by_param(param)
|
||||
end
|
||||
end
|
||||
254
app/controllers/global_states_controller.rb
Normal file
@@ -0,0 +1,254 @@
|
||||
# typed: false
|
||||
class GlobalStatesController < ApplicationController
|
||||
before_action :set_global_state, only: %i[edit update destroy]
|
||||
after_action :verify_authorized
|
||||
|
||||
FA_COOKIE_KEYS = %w[
|
||||
furaffinity-cookie-a
|
||||
furaffinity-cookie-b
|
||||
furaffinity-cookie-oaid
|
||||
].freeze
|
||||
|
||||
IB_COOKIE_KEYS = %w[inkbunny-username inkbunny-password inkbunny-sid].freeze
|
||||
|
||||
TELEGRAM_KEYS = %w[telegram-bot-token].freeze
|
||||
|
||||
def index
|
||||
authorize GlobalState
|
||||
@global_states = policy_scope(GlobalState).order(:key)
|
||||
end
|
||||
|
||||
def new
|
||||
@global_state = GlobalState.new
|
||||
authorize @global_state
|
||||
end
|
||||
|
||||
def create
|
||||
@global_state = GlobalState.new(global_state_params)
|
||||
authorize @global_state
|
||||
if @global_state.save
|
||||
redirect_to global_states_path,
|
||||
notice: "Global state was successfully created."
|
||||
else
|
||||
render :new, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
||||
def edit
|
||||
authorize @global_state
|
||||
end
|
||||
|
||||
def update
|
||||
authorize @global_state
|
||||
if @global_state.update(global_state_params)
|
||||
redirect_to global_states_path,
|
||||
notice: "Global state was successfully updated."
|
||||
else
|
||||
render :edit, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
||||
def destroy
|
||||
authorize @global_state
|
||||
@global_state.destroy
|
||||
redirect_to global_states_path,
|
||||
notice: "Global state was successfully deleted."
|
||||
end
|
||||
|
||||
def fa_cookies
|
||||
authorize GlobalState
|
||||
@fa_cookies =
|
||||
FA_COOKIE_KEYS.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
end
|
||||
|
||||
def edit_fa_cookies
|
||||
authorize GlobalState
|
||||
@fa_cookies =
|
||||
FA_COOKIE_KEYS.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
end
|
||||
|
||||
def update_fa_cookies
|
||||
authorize GlobalState
|
||||
|
||||
begin
|
||||
ActiveRecord::Base.transaction do
|
||||
fa_cookies_params.each do |key, value|
|
||||
state = GlobalState.find_or_initialize_by(key: key)
|
||||
state.value = value
|
||||
state.value_type = :string
|
||||
state.save!
|
||||
end
|
||||
end
|
||||
|
||||
redirect_to fa_cookies_global_states_path,
|
||||
notice: "FA cookies were successfully updated."
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
@fa_cookies =
|
||||
FA_COOKIE_KEYS.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
flash.now[:alert] = "Error updating FA cookies: #{e.message}"
|
||||
render :edit_fa_cookies, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
||||
def ib_cookies
|
||||
authorize GlobalState
|
||||
@ib_cookies =
|
||||
IB_COOKIE_KEYS.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
end
|
||||
|
||||
def edit_ib_cookies
|
||||
authorize GlobalState
|
||||
@ib_cookies =
|
||||
IB_COOKIE_KEYS
|
||||
.reject { |key| key == "inkbunny-sid" }
|
||||
.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
@ib_sid = GlobalState.find_by(key: "inkbunny-sid")
|
||||
end
|
||||
|
||||
def update_ib_cookies
|
||||
authorize GlobalState
|
||||
|
||||
begin
|
||||
params_hash = params.require(:ib_cookies).permit(*IB_COOKIE_KEYS).to_h
|
||||
has_credentials =
|
||||
params_hash["inkbunny-username"].present? ||
|
||||
params_hash["inkbunny-password"].present?
|
||||
has_sid = params_hash["inkbunny-sid"].present?
|
||||
|
||||
if has_credentials && has_sid
|
||||
raise ArgumentError,
|
||||
"Cannot set both credentials and session ID at the same time"
|
||||
end
|
||||
|
||||
if !has_credentials && !has_sid
|
||||
raise ArgumentError, "Must set either credentials or session ID"
|
||||
end
|
||||
|
||||
ActiveRecord::Base.transaction do
|
||||
if has_credentials
|
||||
# Update username and password
|
||||
%w[inkbunny-username inkbunny-password].each do |key|
|
||||
state = GlobalState.find_or_initialize_by(key: key)
|
||||
state.value = params_hash[key]
|
||||
state.value_type = :string
|
||||
state.save!
|
||||
end
|
||||
else
|
||||
# Update SID
|
||||
state = GlobalState.find_or_initialize_by(key: "inkbunny-sid")
|
||||
state.value = params_hash["inkbunny-sid"]
|
||||
state.value_type = :string
|
||||
state.save!
|
||||
end
|
||||
end
|
||||
|
||||
redirect_to ib_cookies_global_states_path,
|
||||
notice: "Inkbunny credentials were successfully updated."
|
||||
rescue ArgumentError => e
|
||||
@ib_cookies =
|
||||
IB_COOKIE_KEYS
|
||||
.reject { |key| key == "inkbunny-sid" }
|
||||
.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
@ib_sid = GlobalState.find_by(key: "inkbunny-sid")
|
||||
flash.now[:alert] = "Error updating Inkbunny credentials: #{e.message}"
|
||||
render :edit_ib_cookies, status: :unprocessable_entity
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
@ib_cookies =
|
||||
IB_COOKIE_KEYS
|
||||
.reject { |key| key == "inkbunny-sid" }
|
||||
.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
@ib_sid = GlobalState.find_by(key: "inkbunny-sid")
|
||||
flash.now[:alert] = "Error updating Inkbunny credentials: #{e.message}"
|
||||
render :edit_ib_cookies, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
||||
def telegram_config
|
||||
authorize GlobalState
|
||||
@telegram_config =
|
||||
TELEGRAM_KEYS.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
end
|
||||
|
||||
def edit_telegram_config
|
||||
authorize GlobalState
|
||||
@telegram_config =
|
||||
TELEGRAM_KEYS.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
end
|
||||
|
||||
def update_telegram_config
|
||||
authorize GlobalState
|
||||
|
||||
begin
|
||||
ActiveRecord::Base.transaction do
|
||||
telegram_config_params.each do |key, value|
|
||||
state = GlobalState.find_or_initialize_by(key: key)
|
||||
state.value = value
|
||||
state.value_type = :string
|
||||
state.save!
|
||||
end
|
||||
end
|
||||
|
||||
redirect_to telegram_config_global_states_path,
|
||||
notice: "Telegram bot configuration was successfully updated."
|
||||
rescue ActiveRecord::RecordInvalid => e
|
||||
@telegram_config =
|
||||
TELEGRAM_KEYS.map do |key|
|
||||
GlobalState.find_by(key: key) ||
|
||||
GlobalState.new(key: key, value_type: :string)
|
||||
end
|
||||
flash.now[:alert] = "Error updating Telegram bot configuration: #{e.message}"
|
||||
render :edit_telegram_config, status: :unprocessable_entity
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def set_global_state
|
||||
@global_state = GlobalState.find(params[:id])
|
||||
end
|
||||
|
||||
def global_state_params
|
||||
params.require(:global_state).permit(:key, :value, :value_type)
|
||||
end
|
||||
|
||||
def fa_cookies_params
|
||||
params.require(:fa_cookies).permit(*FA_COOKIE_KEYS)
|
||||
end
|
||||
|
||||
def ib_cookies_params
|
||||
params.require(:ib_cookies).permit(
|
||||
*IB_COOKIE_KEYS.reject { |key| key == "inkbunny-sid" },
|
||||
)
|
||||
end
|
||||
|
||||
def telegram_config_params
|
||||
params.require(:telegram_config).permit(*TELEGRAM_KEYS)
|
||||
end
|
||||
end
|
||||
@@ -1,3 +1,4 @@
|
||||
# typed: false
|
||||
class LogEntriesController < ApplicationController
|
||||
def index
|
||||
@uri_filter = Addressable::URI.parse(params[:filter]) if params[:filter]
|
||||
@@ -10,9 +11,11 @@ class LogEntriesController < ApplicationController
|
||||
|
||||
if @uri_filter.path.present?
|
||||
if @uri_filter.query.present?
|
||||
query = query.
|
||||
where("uri_path = ?", @uri_filter.path).
|
||||
where("uri_query like ?", @uri_filter.query + "%")
|
||||
query =
|
||||
query.where("uri_path = ?", @uri_filter.path).where(
|
||||
"uri_query like ?",
|
||||
@uri_filter.query + "%",
|
||||
)
|
||||
else
|
||||
query = query.where("uri_path like ?", @uri_filter.path + "%")
|
||||
end
|
||||
@@ -21,12 +24,14 @@ class LogEntriesController < ApplicationController
|
||||
query = HttpLogEntry
|
||||
end
|
||||
|
||||
@log_entries = query.
|
||||
page(params[:page]).
|
||||
per(50).
|
||||
includes(:response).
|
||||
order(id: :desc).
|
||||
without_count
|
||||
@log_entries =
|
||||
query
|
||||
.page(params[:page])
|
||||
.per(50)
|
||||
.joins(:response)
|
||||
.includes(:response)
|
||||
.order(id: :desc)
|
||||
.without_count
|
||||
|
||||
formats.clear
|
||||
formats << :html
|
||||
@@ -38,74 +43,40 @@ class LogEntriesController < ApplicationController
|
||||
@last_window_count = 0
|
||||
@last_window_bytes = 0
|
||||
@last_window_bytes_stored = 0
|
||||
@content_type_counts = Hash.new do |hash, key|
|
||||
hash[key] = {
|
||||
count: 0,
|
||||
bytes: 0,
|
||||
bytes_stored: 0,
|
||||
}
|
||||
@content_type_counts =
|
||||
Hash.new do |hash, key|
|
||||
hash[key] = { count: 0, bytes: 0, bytes_stored: 0 }
|
||||
end
|
||||
|
||||
@by_domain_counts = Hash.new do |hash, key|
|
||||
hash[key] = {
|
||||
count: 0,
|
||||
bytes: 0,
|
||||
bytes_stored: 0,
|
||||
}
|
||||
@by_domain_counts =
|
||||
Hash.new do |hash, key|
|
||||
hash[key] = { count: 0, bytes: 0, bytes_stored: 0 }
|
||||
end
|
||||
|
||||
HttpLogEntry.includes(:response).find_each(batch_size: 100, order: :desc) do |log_entry|
|
||||
HttpLogEntry
|
||||
.joins(:response)
|
||||
.includes(:response)
|
||||
.select("http_log_entries.*, blob_files.size_bytes")
|
||||
.find_each(batch_size: 100, order: :desc) do |log_entry|
|
||||
break if log_entry.created_at < @time_window.ago
|
||||
@last_window_count += 1
|
||||
@last_window_bytes += log_entry.response.size
|
||||
@last_window_bytes_stored += log_entry.response.bytes_stored
|
||||
@last_window_bytes += log_entry.response_size
|
||||
content_type = log_entry.content_type.split(";").first
|
||||
|
||||
@content_type_counts[content_type][:count] += 1
|
||||
@content_type_counts[content_type][:bytes] += log_entry.response.size
|
||||
@content_type_counts[content_type][:bytes_stored] += log_entry.response.bytes_stored
|
||||
@content_type_counts[content_type][:bytes] += log_entry.response_size
|
||||
|
||||
@by_domain_counts[log_entry.uri_host][:count] += 1
|
||||
@by_domain_counts[log_entry.uri_host][:bytes] += log_entry.response.size
|
||||
@by_domain_counts[log_entry.uri_host][:bytes_stored] += log_entry.response.bytes_stored
|
||||
@by_domain_counts[log_entry.uri_host][:bytes] += log_entry.response_size
|
||||
end
|
||||
end
|
||||
|
||||
def show
|
||||
@log_entry = HttpLogEntry.includes(
|
||||
@log_entry =
|
||||
HttpLogEntry.includes(
|
||||
:caused_by_entry,
|
||||
:triggered_entries,
|
||||
response: :base,
|
||||
:response,
|
||||
).find(params[:id])
|
||||
end
|
||||
|
||||
def contents
|
||||
expires_dur = 1.year
|
||||
response.headers["Expires"] = expires_dur.from_now.httpdate
|
||||
expires_in expires_dur, public: true
|
||||
|
||||
log_entry = HttpLogEntry.find(params[:id])
|
||||
hex_sha256 = HexUtil.bin2hex(log_entry.response_sha256)
|
||||
return unless stale?(last_modified: Time.at(0), strong_etag: hex_sha256)
|
||||
|
||||
# images, videos, etc
|
||||
entry_response = log_entry.response
|
||||
if helpers.is_send_data_content_type?(entry_response.content_type)
|
||||
send_data(
|
||||
entry_response.contents,
|
||||
type: entry_response.content_type,
|
||||
disposition: "inline",
|
||||
filename: log_entry.uri.path,
|
||||
)
|
||||
elsif entry_response.content_type =~ /text\/plain/
|
||||
render plain: entry_response.contents
|
||||
elsif entry_response.content_type.starts_with? "text/html"
|
||||
render html: entry_response.contents.html_safe
|
||||
elsif entry_response.content_type.starts_with? "application/json"
|
||||
pretty_json = JSON.pretty_generate(JSON.parse entry_response.contents)
|
||||
render html: "<html><body><pre>#{pretty_json}</pre></body></html>".html_safe
|
||||
else
|
||||
render plain: "no renderer for #{entry_response.content_type}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
12
app/controllers/pages_controller.rb
Normal file
@@ -0,0 +1,12 @@
|
||||
# typed: true
|
||||
class PagesController < ApplicationController
|
||||
skip_before_action :authenticate_user!, only: %i[root furecs_user_script]
|
||||
|
||||
def root
|
||||
render :root
|
||||
end
|
||||
|
||||
def furecs_user_script
|
||||
render :furecs_user_script
|
||||
end
|
||||
end
|
||||