mirror of
https://github.com/bookwyrm-social/bookwyrm.git
synced 2024-06-13 10:49:23 +00:00
Compare commits
1099 commits
Author | SHA1 | Date | |
---|---|---|---|
|
3545a1c3b6 | ||
|
4d3d5d15d0 | ||
|
d90e8e56d5 | ||
|
eca246fc61 | ||
|
aa2312e8af | ||
|
61d9e0c260 | ||
|
44eedd09d9 | ||
|
46544451d4 | ||
|
4e987a0e66 | ||
|
332286cdff | ||
|
e4035c6da6 | ||
|
e6ee169c3e | ||
|
29f852b57e | ||
|
acae063652 | ||
|
c32f9faaa0 | ||
|
e7f95ef4c2 | ||
|
a6c2ce15dd | ||
|
7604d0acdb | ||
|
77832cbec7 | ||
|
de67c73237 | ||
|
f38622fdc9 | ||
|
051dab77bb | ||
|
2896219e88 | ||
|
03ac846b5d | ||
|
39c2a0feae | ||
|
22986a08f0 | ||
|
f6bbe673ca | ||
|
f324a3cd1d | ||
|
039160e004 | ||
|
a1ff5a478e | ||
|
1cb86197d5 | ||
|
2537886b4d | ||
|
1474c0d3aa | ||
|
e46bc2e9a1 | ||
|
01b37026eb | ||
|
9ebda3fbe8 | ||
|
b6174d9101 | ||
|
1303f539c3 | ||
|
624115bf11 | ||
|
224fae7a87 | ||
|
869bc5a376 | ||
|
d80a0146bd | ||
|
e1fd57a1d6 | ||
|
1f8ba4df3e | ||
|
c11725a5c8 | ||
|
309147bd98 | ||
|
1276112214 | ||
|
e9325b8798 | ||
|
e0a14ea2ba | ||
|
69c273486c | ||
|
ffb3549e06 | ||
|
16e1b17a33 | ||
|
3dfbc44c9a | ||
|
23bf089004 | ||
|
b5ef9f6241 | ||
|
4fa823e8df | ||
|
cfcb873235 | ||
|
0007c86a2c | ||
|
984d7fb7d8 | ||
|
92a94d2fdc | ||
|
0d621b68e0 | ||
|
47fdad9c87 | ||
|
3349817a0b | ||
|
45bd67cb04 | ||
|
2f4010b93b | ||
|
c4b21ee258 | ||
|
ad830dd885 | ||
|
366c647585 | ||
|
4f58b11330 | ||
|
609bc15406 | ||
|
c42db40a63 | ||
|
3aefbb548e | ||
|
baea105c18 | ||
|
c73d1fff6a | ||
|
3d183a393f | ||
|
f24fdf73b5 | ||
|
839ab2fafd | ||
|
637f19b208 | ||
|
031223104f | ||
|
6684d60526 | ||
|
cca58023ed | ||
|
bf5c08dbf3 | ||
|
be872ed672 | ||
|
70f803a1f6 | ||
|
4304cd4a79 | ||
|
8733369605 | ||
|
df78cc64a6 | ||
|
f844abcad9 | ||
|
21a39f8170 | ||
|
c3c46144fe | ||
|
d48d312c0a | ||
|
501fb45528 | ||
|
7d581759da | ||
|
d5a536ae36 | ||
|
26f92db5d8 | ||
|
5686c5ae5d | ||
|
9d9e64399c | ||
|
b6aba44e42 | ||
|
3ffbb242a4 | ||
|
af0bd90c15 | ||
|
73630331d1 | ||
|
ca6dbcb483 | ||
|
e1c54b2933 | ||
|
439cb3ccaa | ||
|
321397a349 | ||
|
464a0298c6 | ||
|
0501ce39cd | ||
|
4d5a30d953 | ||
|
5cfe7eca6f | ||
|
5082806b82 | ||
|
d1d91f0c2b | ||
|
ea0ade955b | ||
|
f085d3d0fe | ||
|
4bbdd0b2d0 | ||
|
d5fb21f330 | ||
|
f28800af7f | ||
|
cb3fd0cfc1 | ||
|
72ed878eeb | ||
|
f666951934 | ||
|
fcd0087589 | ||
|
ffee29d8e2 | ||
|
75bc4f8cb0 | ||
|
e7ae0fdf93 | ||
|
5d597f1ca9 | ||
|
0ac9d12d1c | ||
|
e74de94640 | ||
|
1464d09a43 | ||
|
2272e7a326 | ||
|
2bbe3d4c32 | ||
|
bb5d8152f1 | ||
|
dabf7c6e10 | ||
|
cdbc1d172c | ||
|
3133a47b7c | ||
|
c6ca547d58 | ||
|
797d5cb508 | ||
|
699d637bae | ||
|
9afd0ebb54 | ||
|
9685ae5a0a | ||
|
98600440d8 | ||
|
ed2e9e5ea8 | ||
|
ef57c0bc8b | ||
|
145c67dd21 | ||
|
6a67943408 | ||
|
9dfa218ba5 | ||
|
bf52eeaa9e | ||
|
011e4a27a6 | ||
|
7192449b21 | ||
|
d9bf848cfa | ||
|
bd95bcd50b | ||
|
f721289b1d | ||
|
a51402241b | ||
|
e0decbfd1d | ||
|
aee8dc16af | ||
|
5bd66cb3f7 | ||
|
ab7b0893e0 | ||
|
471233c1dc | ||
|
073f62d5bb | ||
|
a770689245 | ||
|
69f464418d | ||
|
f11c80162a | ||
|
7c2fa746ae | ||
|
03587dfdc7 | ||
|
dd27684d4b | ||
|
caebebeb37 | ||
|
592914dc91 | ||
|
2915133223 | ||
|
2d2ccd51df | ||
|
4a690e675a | ||
|
fb82c7a579 | ||
|
6f191acb27 | ||
|
7fb079cb43 | ||
|
7066e2815b | ||
|
e04cd79ff8 | ||
|
5e123972e8 | ||
|
b3753ab6da | ||
|
b8995bd4b1 | ||
|
769d9726e5 | ||
|
36222afa79 | ||
|
0795b4d171 | ||
|
2de35f3fc7 | ||
|
bac52eef3e | ||
|
8bbac458a6 | ||
|
5b71e94888 | ||
|
a914a44fba | ||
|
8e088a6d53 | ||
|
b508b4cd33 | ||
|
886d6ec9f7 | ||
|
21f75da75e | ||
|
20db968315 | ||
|
c3d25c59c5 | ||
|
3cde6dbe5a | ||
|
682bb3b62f | ||
|
b5b9eddaf0 | ||
|
ab430e0208 | ||
|
e13e4237f4 | ||
|
762786839c | ||
|
4ca52c0b38 | ||
|
6a87713f9f | ||
|
d08147c6d9 | ||
|
f423834bd0 | ||
|
d304ceb437 | ||
|
47afe34d97 | ||
|
4d23edddca | ||
|
68cb94daf2 | ||
|
864304f128 | ||
|
7690247ab4 | ||
|
3367b20965 | ||
|
748418590f | ||
|
ccf2b16d73 | ||
|
3be227fc86 | ||
|
a6dc5bd13f | ||
|
518da3b9cf | ||
|
2cf7ed477d | ||
|
cceccd1ecf | ||
|
beb49af514 | ||
|
90bd893568 | ||
|
e2c9ea3cd2 | ||
|
4b9fe0af0c | ||
|
1b9e0546e6 | ||
|
8cf52e0a77 | ||
|
0282e20b89 | ||
|
4e20e43037 | ||
|
383e6533e1 | ||
|
74fdd9a85a | ||
|
6af0a08838 | ||
|
12b469a0d6 | ||
|
288743b686 | ||
|
a3465e6154 | ||
|
3ba528ecdd | ||
|
304c47863b | ||
|
b68a4cc392 | ||
|
6dfb5000cc | ||
|
8d018b872f | ||
|
9e7b040b73 | ||
|
09c3d9c0dc | ||
|
dd9d68c97d | ||
|
d138395c75 | ||
|
91fe4ad535 | ||
|
9fa09d5ebe | ||
|
eadb0e640f | ||
|
be140d5e5a | ||
|
22c4155c7c | ||
|
498dc35d99 | ||
|
0f5a3e9163 | ||
|
da2636fa29 | ||
|
c1520da56d | ||
|
fee3fdd5a8 | ||
|
c944824ac7 | ||
|
4312e9bba0 | ||
|
39da471f79 | ||
|
570017d3b0 | ||
|
3652ac8100 | ||
|
f8fd76cff0 | ||
|
206ed9f7fb | ||
|
218171e9bc | ||
|
50b811d9aa | ||
|
1ae9870862 | ||
|
db97d76a24 | ||
|
354388cc8f | ||
|
2c59908ddd | ||
|
6a70eadba8 | ||
|
ec52460f02 | ||
|
1fabe51261 | ||
|
e6b6bd648d | ||
|
9d7965780d | ||
|
333fb03c2c | ||
|
8f537ef56a | ||
|
6163e1a6be | ||
|
dd1999eb8e | ||
|
4c0d5ede86 | ||
|
1c587c5e53 | ||
|
ddd13a3e2e | ||
|
7469f1f4ca | ||
|
363cb79951 | ||
|
46a158d701 | ||
|
8773caa26b | ||
|
89d8537e1b | ||
|
71f527eb1b | ||
|
4a9d69e169 | ||
|
d97747078e | ||
|
db629255db | ||
|
6ac38564e2 | ||
|
6c9ca0bf19 | ||
|
6b1ffbc634 | ||
|
748c934986 | ||
|
f7580c59a5 | ||
|
4e2b8af147 | ||
|
48f8ee57a6 | ||
|
faf45cf956 | ||
|
a1ac9494b2 | ||
|
6d5752fb4e | ||
|
37aa7ad2f6 | ||
|
e0667c6a03 | ||
|
103da863c4 | ||
|
fa66284000 | ||
|
0f0420ce04 | ||
|
438d88d8d4 | ||
|
5f2f321ed5 | ||
|
45cc3dc979 | ||
|
9c5f6c527b | ||
|
efa29b269c | ||
|
2ba7dff845 | ||
|
21a8570035 | ||
|
ef6fd608fa | ||
|
b05621005e | ||
|
3675a4cf3f | ||
|
5f7be848fc | ||
|
f96ddaa3e1 | ||
|
adff3c4251 | ||
|
765fc1e43d | ||
|
c106b2a988 | ||
|
2c231acebe | ||
|
a3e05254b5 | ||
|
582e97e4a5 | ||
|
0d619f7eb4 | ||
|
2bb9a85591 | ||
|
6add81cf15 | ||
|
629acbaa19 | ||
|
940274b1c2 | ||
|
accb3273f1 | ||
|
8ac873419f | ||
|
31babdfa51 | ||
|
80ad36e75b | ||
|
500e4eb4f5 | ||
|
82f9aa9da4 | ||
|
2d4b11aaee | ||
|
193aeff4d2 | ||
|
c4596544a3 | ||
|
30ba8d37dc | ||
|
eb6bea013f | ||
|
646b27b7a7 | ||
|
ea9d3f8ba1 | ||
|
290ee997b3 | ||
|
ad56024ffe | ||
|
f7b4d9ea50 | ||
|
6cb3b97144 | ||
|
a563275308 | ||
|
ddc35a7a52 | ||
|
26c37de2d4 | ||
|
fd0b1d90b0 | ||
|
dd5c314bd5 | ||
|
a59dcfc890 | ||
|
8e2649ba3b | ||
|
d73141792d | ||
|
469172947b | ||
|
833f26fd0e | ||
|
fb5fae4251 | ||
|
c22f189c86 | ||
|
61a6ee29d8 | ||
|
a585321ef9 | ||
|
45d6f1f890 | ||
|
b990d9ccd8 | ||
|
ea7f3c297e | ||
|
d640e4ac96 | ||
|
ddbda3ab9c | ||
|
76a3874662 | ||
|
8144507893 | ||
|
70adf878e8 | ||
|
5ef104b802 | ||
|
d4d2734dab | ||
|
62cc6c298f | ||
|
cbd08127ef | ||
|
eb13eb9882 | ||
|
9a487b0442 | ||
|
854eb36618 | ||
|
b04ebe397b | ||
|
5d13bf8e49 | ||
|
6dc95a82d6 | ||
|
1a682753c0 | ||
|
a4599d0374 | ||
|
83ff880603 | ||
|
ce18d343e8 | ||
|
93cab480d6 | ||
|
1966f1d9a3 | ||
|
f267fc3235 | ||
|
6cd2c91135 | ||
|
c2622a510c | ||
|
ebcc81dd73 | ||
|
30c9ec9611 | ||
|
51cb70d344 | ||
|
9acb5f66fe | ||
|
ae5950f187 | ||
|
766a2163dd | ||
|
db8c686dd3 | ||
|
597378bb78 | ||
|
9c3e6384f8 | ||
|
01db77a745 | ||
|
d287581620 | ||
|
193a1c7d54 | ||
|
8be9e91d21 | ||
|
f36af42f41 | ||
|
5509941aa4 | ||
|
d6f7f76c4d | ||
|
381490e31d | ||
|
addfee0607 | ||
|
2a85378456 | ||
|
d9a640c809 | ||
|
0756c5ac5c | ||
|
913a19c8f0 | ||
|
e2249f2515 | ||
|
f72ada4780 | ||
|
86d79f537a | ||
|
fb16806afe | ||
|
ffeca9f908 | ||
|
45d33c37ea | ||
|
ca79cb1ca7 | ||
|
5647477ba7 | ||
|
4711b3bc19 | ||
|
0d908b594c | ||
|
0e3936cb61 | ||
|
09b2dea995 | ||
|
3754718916 | ||
|
9b3f4933ac | ||
|
47cdc14bc0 | ||
|
430e4eb90d | ||
|
b728bb4323 | ||
|
a4172214d1 | ||
|
fb36958444 | ||
|
44d21d1ba4 | ||
|
bd3acdbf31 | ||
|
4a4046a704 | ||
|
7cca199a11 | ||
|
1649457372 | ||
|
7fcadb1d4d | ||
|
5c0e159d43 | ||
|
000e5e6145 | ||
|
8bb5a664c5 | ||
|
e032e5491d | ||
|
4bfa1ca5b8 | ||
|
13374917f3 | ||
|
799f842115 | ||
|
aa67f598dd | ||
|
9d502f5ee2 | ||
|
198c0037c6 | ||
|
e5d292919c | ||
|
029b438355 | ||
|
dd72013225 | ||
|
5d09c54e57 | ||
|
aac8aa1adf | ||
|
0f6e567b21 | ||
|
c65e165aeb | ||
|
979162da10 | ||
|
b27ed847d5 | ||
|
d93da4e86d | ||
|
8fd05004ea | ||
|
5384e4c470 | ||
|
b7ba6f1a36 | ||
|
7f55495287 | ||
|
31a78a5c9e | ||
|
193a36390b | ||
|
cf1afefc84 | ||
|
b8bf3d5bd9 | ||
|
58f149d889 | ||
|
90cc28986e | ||
|
d6eb390cee | ||
|
b5805accac | ||
|
bbfbd1e97a | ||
|
9bcb5b80ea | ||
|
8df408e07e | ||
|
bcb3a343d4 | ||
|
416a6caf2d | ||
|
44ef928c3c | ||
|
e4d688665c | ||
|
0299f2e235 | ||
|
c997d2d44a | ||
|
e322d3cae1 | ||
|
48904fc60b | ||
|
99a9a64708 | ||
|
065e15e4db | ||
|
72c1c6ee3d | ||
|
0276c15948 | ||
|
c6dea2523c | ||
|
6ba7418121 | ||
|
8ed4a997f8 | ||
|
2c9ebba5d7 | ||
|
7c2de92df3 | ||
|
b6325da9ab | ||
|
179dbd75aa | ||
|
b022b5a1b7 | ||
|
c2742b4d80 | ||
|
cfe42305be | ||
|
d828ba0bc6 | ||
|
6933f70af3 | ||
|
d94b27b723 | ||
|
3d9f339bd5 | ||
|
1d5cc83347 | ||
|
d8018cb937 | ||
|
4da96d937e | ||
|
446854ccf0 | ||
|
f011f2bce9 | ||
|
ff1f239a57 | ||
|
6aaff28c13 | ||
|
aaea1b1b9e | ||
|
8dbfba17d6 | ||
|
2ba0e3d7ff | ||
|
a7fcd898c2 | ||
|
97757fa1ee | ||
|
a56ba0ce1c | ||
|
8ddafafa84 | ||
|
d620bd7350 | ||
|
68f54cf5a4 | ||
|
f4da9fbf34 | ||
|
bf81192d73 | ||
|
e144ce19fa | ||
|
bd920a4630 | ||
|
7684101f15 | ||
|
06568aab88 | ||
|
5bf27d4fb2 | ||
|
1a7a843dea | ||
|
62f985edb8 | ||
|
54ec5e2ae0 | ||
|
63530294d4 | ||
|
da4214ad61 | ||
|
01d4381898 | ||
|
ab9cea1742 | ||
|
b81170c149 | ||
|
a884825b3c | ||
|
bbc78f03ae | ||
|
d5762f1d52 | ||
|
891b72c79c | ||
|
ddf94f8714 | ||
|
43324cf43a | ||
|
1bedcdaebd | ||
|
f3fc5f6179 | ||
|
99a9dbe5f4 | ||
|
be9d92b1c2 | ||
|
edfa6b18a1 | ||
|
fa80aa54a9 | ||
|
0e4c5ed439 | ||
|
c120fa8c87 | ||
|
2bb7652dfe | ||
|
e928027e16 | ||
|
dccac11527 | ||
|
ebcacfc6c5 | ||
|
44b14f4933 | ||
|
774b1095a3 | ||
|
0bb4b0d71d | ||
|
2248206a66 | ||
|
0a5e1048ce | ||
|
9ddd631549 | ||
|
1b958a9b31 | ||
|
282f7dd8d6 | ||
|
e152b625fa | ||
|
ee88c3b914 | ||
|
8663e204c7 | ||
|
e7a1572450 | ||
|
3f038b4d67 | ||
|
06d822d9e0 | ||
|
85d1760b97 | ||
|
716e64de68 | ||
|
15b7b7eaa7 | ||
|
d34b70cb7b | ||
|
ee6e3ed7eb | ||
|
2d185dfb8a | ||
|
27d99a0094 | ||
|
93a32f4e15 | ||
|
8d3c2d9bd2 | ||
|
7a6b60772c | ||
|
d2f06e804f | ||
|
a93519ec3e | ||
|
1190ea7e69 | ||
|
c17a2ec55b | ||
|
d3668e413d | ||
|
f353b49d36 | ||
|
47953c84d7 | ||
|
4de9907456 | ||
|
61caeed5a3 | ||
|
5e42afd85a | ||
|
d0c652f0f5 | ||
|
93a7dd9cf3 | ||
|
9e9e9a9f85 | ||
|
ff2bb513ed | ||
|
89b87db1c8 | ||
|
67822d3cb0 | ||
|
10e0f2224a | ||
|
7104e775d8 | ||
|
d682e55812 | ||
|
afad39bf80 | ||
|
954a02126e | ||
|
7d13cbb10b | ||
|
294788aa1a | ||
|
116a838eef | ||
|
f839038c8f | ||
|
285c513211 | ||
|
95ba38524b | ||
|
68f1a69b6a | ||
|
8c950237a4 | ||
|
e1217f2054 | ||
|
ae51dcec63 | ||
|
22554f85ad | ||
|
c1a7e4d9eb | ||
|
416bbd4d9e | ||
|
45fc10e3bf | ||
|
0502f6ba42 | ||
|
86fd62a09e | ||
|
2137737d9b | ||
|
621cfa7ed2 | ||
|
6f9c7f39fb | ||
|
c486b9c37e | ||
|
df43a8e2c5 | ||
|
941efb3f72 | ||
|
d2b2cc0521 | ||
|
853b5f28a4 | ||
|
935779b5e3 | ||
|
25a2615d5f | ||
|
50ac691126 | ||
|
4d35fd45df | ||
|
6f3b1b565f | ||
|
1952bb6ddc | ||
|
170d1fe205 | ||
|
737ac8e908 | ||
|
9752819bdb | ||
|
4b47646e28 | ||
|
5f619d7a39 | ||
|
777c8b4549 | ||
|
46d80d56a5 | ||
|
a9c605ea97 | ||
|
52a979da2d | ||
|
5592a8e08b | ||
|
f30555be0f | ||
|
f662e4e049 | ||
|
e29c93a1e9 | ||
|
ddec2dbaa9 | ||
|
b8fc5c9b7a | ||
|
8477d0b89d | ||
|
afb5c01947 | ||
|
2b6852e7a0 | ||
|
d05cf8e59b | ||
|
b6b55b2e65 | ||
|
c5e536aeaa | ||
|
07ef12ce8e | ||
|
0c846ca31f | ||
|
0a2efeb5aa | ||
|
6222088f15 | ||
|
fd1ebf5f71 | ||
|
11a726b40b | ||
|
c0a5e55f7f | ||
|
b34a491172 | ||
|
a27c652501 | ||
|
836127f369 | ||
|
20114b0059 | ||
|
c9e6dcc2d9 | ||
|
00bf2903bc | ||
|
698e74a496 | ||
|
695c67a714 | ||
|
abb6bcd199 | ||
|
4e16800b52 | ||
|
4a9d80268a | ||
|
781b01a007 | ||
|
1685ac1953 | ||
|
2237a7eb9e | ||
|
caa31de685 | ||
|
f88a0f8229 | ||
|
b78d51410b | ||
|
6392a8e01d | ||
|
912269303e | ||
|
abebf82042 | ||
|
25e8b259f7 | ||
|
3624763073 | ||
|
d55e0b6ba3 | ||
|
06923c64c1 | ||
|
3ade72b90d | ||
|
67f6c0a5a7 | ||
|
cd247a6689 | ||
|
b97dafc303 | ||
|
4d352faae3 | ||
|
f02faa1b74 | ||
|
1937177e1a | ||
|
3251ef0bf5 | ||
|
8afcb9b6d3 | ||
|
c02306a66b | ||
|
c066d11eb1 | ||
|
8f0f3e6ace | ||
|
f07d730e03 | ||
|
a4bfcb34d5 | ||
|
6667178703 | ||
|
c946e7dd82 | ||
|
0f79aea36f | ||
|
66f62566d6 | ||
|
97adf2f7fd | ||
|
0452e8698d | ||
|
16b7db4639 | ||
|
d7ba0e3a8a | ||
|
17d741039c | ||
|
0043329cc1 | ||
|
c3c22022f6 | ||
|
1778c56be0 | ||
|
55eb81dbf9 | ||
|
a7e427efc2 | ||
|
1798abfc3e | ||
|
34d5c557d8 | ||
|
3d123bc2f2 | ||
|
7cae5879c8 | ||
|
bcfd4d2efa | ||
|
e4ba09178f | ||
|
703a56940c | ||
|
3deddf6355 | ||
|
b3bfcf8665 | ||
|
458b258ad5 | ||
|
fcfe34f2f6 | ||
|
e34fe9a059 | ||
|
d4088ac854 | ||
|
33e179e44b | ||
|
2a08170fb5 | ||
|
088b9ab555 | ||
|
3e38fecd55 | ||
|
7cfdf235bc | ||
|
80a1180090 | ||
|
b05f2e99e8 | ||
|
4fd5e2094a | ||
|
9547edf845 | ||
|
d67903fd4b | ||
|
01a56540d0 | ||
|
c95f160216 | ||
|
fe4bc28f37 | ||
|
b69031c01a | ||
|
bab28a8fc9 | ||
|
ec2c5cb546 | ||
|
2c968e94cb | ||
|
fadf30b942 | ||
|
cc05cabcb5 | ||
|
ef582f1bc2 | ||
|
b75b5cb165 | ||
|
0a029e6e01 | ||
|
85b647b7ab | ||
|
1e495684af | ||
|
5b051631ec | ||
|
ce3885d4f6 | ||
|
1322a0c693 | ||
|
25fd7276ea | ||
|
af5f71f5ac | ||
|
05f8bd0d3c | ||
|
a5cf912ae8 | ||
|
a5ede835b2 | ||
|
d8ba1f4309 | ||
|
6e9f64262c | ||
|
2260e14868 | ||
|
688978369f | ||
|
b9851d665e | ||
|
a09b2ab45c | ||
|
bc870a305f | ||
|
c2196fb704 | ||
|
47e8f3c3e6 | ||
|
b0601a0958 | ||
|
4e999657cc | ||
|
d560a6baef | ||
|
e7ba6a3141 | ||
|
0a9ef9e047 | ||
|
4c526dfcaa | ||
|
dfa935bd72 | ||
|
1c9da7b84b | ||
|
5eae123668 | ||
|
567c103e59 | ||
|
e5f8e4babc | ||
|
0686926048 | ||
|
f6d8786179 | ||
|
3760e3b45c | ||
|
2e88e73509 | ||
|
0f2c0c034d | ||
|
767cd14639 | ||
|
8f8587f79d | ||
|
ff8e4597e5 | ||
|
0f8da5b738 | ||
|
c6aaa80c62 | ||
|
1e0fe6d7c8 | ||
|
5ed1441ddb | ||
|
d7adada29c | ||
|
2826e184d2 | ||
|
63b60ad62c | ||
|
185486c6fc | ||
|
53c8085207 | ||
|
a05942fe15 | ||
|
d9f6449767 | ||
|
15e82ece07 | ||
|
861d3b1500 | ||
|
518f87ef62 | ||
|
a166af9990 | ||
|
e76b44fc8f | ||
|
b7e7867b9b | ||
|
e8949bbffd | ||
|
27c40ccf20 | ||
|
66250e0dd8 | ||
|
0e43cc4274 | ||
|
013c726869 | ||
|
83ad45644b | ||
|
2dddb2e3da | ||
|
a901014e48 | ||
|
ae5c27f3bb | ||
|
f4a4b59a14 | ||
|
0be5cf31dc | ||
|
9d69f2fb3e | ||
|
73f1484025 | ||
|
455b0c82ea | ||
|
acafa0b417 | ||
|
3ca36fef4d | ||
|
173d0b77ac | ||
|
211b60bba2 | ||
|
d1bad521e9 | ||
|
8565367993 | ||
|
220cad8661 | ||
|
5d3883c9a0 | ||
|
1a733746f2 | ||
|
1a215e9b9e | ||
|
2f8cf941af | ||
|
3336fd0f11 | ||
|
f6fba19ac4 | ||
|
928d56b566 | ||
|
e37ed8ea5e | ||
|
c29ca5ad32 | ||
|
390f61ff3b | ||
|
61283b3d4f | ||
|
588ec80b4c | ||
|
6a81f91902 | ||
|
71dc05f894 | ||
|
30f025dbcd | ||
|
2920973961 | ||
|
f07d7b02f1 | ||
|
8736f2c6ef | ||
|
c721e17aa0 | ||
|
9cd599dee3 | ||
|
2293c1c5a8 | ||
|
bd26da351a | ||
|
fec6f39e4e | ||
|
308dfd1be0 | ||
|
0354e53eea | ||
|
9a30a3656a | ||
|
07aca2f62c | ||
|
1bda8a5d9d | ||
|
75f37d7361 | ||
|
d2c4785af1 | ||
|
eae06602a9 | ||
|
9c5b5d0ac1 | ||
|
c4d72829e9 | ||
|
c947360da8 | ||
|
2f2dae074b | ||
|
9d531fcb62 | ||
|
aae1d10eea | ||
|
3e78e398c0 | ||
|
7d1f3deaca | ||
|
ccf3a4c5c1 | ||
|
8b88de624d | ||
|
eee4e30e25 | ||
|
fbb9d75cc8 | ||
|
107f5b38ca | ||
|
ac4276f212 | ||
|
6778046906 | ||
|
fbb6c41035 | ||
|
815e788245 | ||
|
3bd20e3ff8 | ||
|
f39a1fd580 | ||
|
0818d5aabb | ||
|
0832a2fa8e | ||
|
c2a7b9a77b | ||
|
245ae35a81 | ||
|
d56b9f14a2 | ||
|
040dca0c31 | ||
|
a7e6919b96 | ||
|
61037cf38a | ||
|
7f3a8f27ab | ||
|
dd92c53410 | ||
|
fad1eb8952 | ||
|
c875b18e34 | ||
|
1841d196ff | ||
|
e783c90693 | ||
|
6a949c24e2 | ||
|
11f1a4662e | ||
|
4f6a235d77 | ||
|
a601be4708 | ||
|
65e3a31354 | ||
|
c6d23ba26a | ||
|
0bb0537215 | ||
|
ca3054fdac | ||
|
9056a5d9e7 | ||
|
a1b3fc1a79 | ||
|
d251f4201b | ||
|
aec99ba173 | ||
|
bf0225fe93 | ||
|
a4ccd45537 | ||
|
70679856bd | ||
|
c21d580fd8 | ||
|
98f5cd9393 | ||
|
637e0376d4 | ||
|
9ff28d97b1 | ||
|
76fc0dfcc4 | ||
|
603b2d9502 | ||
|
affaf3d0ba | ||
|
b3a519c082 | ||
|
ab146f652a | ||
|
ee1dd612fb | ||
|
aae8b1ac14 | ||
|
24d59315df | ||
|
d6a321dda9 | ||
|
120a0f1882 | ||
|
3a5ddf33a9 | ||
|
0e0d9d2d08 | ||
|
666f09c576 | ||
|
a4ea4082e6 | ||
|
70a5153271 | ||
|
45127c9648 | ||
|
1139d214a0 | ||
|
379db26300 | ||
|
912e92bacd | ||
|
bfb29c0d74 | ||
|
ad1ddf2bff | ||
|
490064cdf8 | ||
|
5c9b962639 | ||
|
097cd3ed72 | ||
|
a1df116c58 | ||
|
a6e5939ad2 | ||
|
d80cefd416 | ||
|
7211906697 | ||
|
3f205f1b10 | ||
|
858a93e98a | ||
|
f43d7f8c70 | ||
|
7bd9623b68 | ||
|
1985c2d284 | ||
|
575e1bac4c | ||
|
b77ae9e783 | ||
|
b92836ee7b | ||
|
4e025b5105 | ||
|
8fa89f5ece | ||
|
ac48457748 | ||
|
cbb027c56c | ||
|
6b39052fcc | ||
|
a3013c6224 | ||
|
21d9cb5fe5 | ||
|
a94a4732ec | ||
|
718939834a | ||
|
84834eb5d3 | ||
|
ed5471c7ab | ||
|
0cae89b2b6 | ||
|
f3b1b1d8e4 | ||
|
5f5886edea | ||
|
6f025af99f | ||
|
74f08323d1 | ||
|
fb3cb229e9 | ||
|
290b740392 | ||
|
c918617a6a | ||
|
fadbc76aaa | ||
|
d788105eeb | ||
|
dfcc61a3e1 | ||
|
e28562949b | ||
|
bd893e29de | ||
|
71e2486d01 | ||
|
2bbc9a16ad | ||
|
3619d8960b | ||
|
f520d1b7f8 | ||
|
a0b7112c9c | ||
|
98726585f6 | ||
|
8a8af4e909 | ||
|
123628c66a | ||
|
56a062d01f | ||
|
c7adb62831 | ||
|
a6676718cb | ||
|
e3261c6b88 | ||
|
43b34610a6 | ||
|
8e25ae34d6 | ||
|
843147e16b | ||
|
c450947eee | ||
|
279fa3851b | ||
|
03f21b0f35 | ||
|
c9dcd4f7ad | ||
|
ef85394a16 | ||
|
e112718d2d | ||
|
49758f2383 | ||
|
632e3844b9 | ||
|
41633090ba | ||
|
e9f26b7d50 | ||
|
1048638e30 | ||
|
7272ca2564 | ||
|
07b50a1453 | ||
|
77264493eb | ||
|
5272786fbb | ||
|
4e3513bd41 | ||
|
035ca6fec2 | ||
|
10f53d9809 | ||
|
d033848d3f | ||
|
22dc4cbcb8 | ||
|
5bcd294f47 | ||
|
5895524a25 | ||
|
d11bb17698 | ||
|
206698238d | ||
|
af3957d76b | ||
|
a8235fc3a2 | ||
|
912d0a0149 | ||
|
c89da1bd66 | ||
|
3709f5c7a9 | ||
|
6986fc9025 | ||
|
61453d48e6 | ||
|
b574a12fff | ||
|
4e09391b2e | ||
|
9092c9c80c | ||
|
b0f90d05f2 | ||
|
36c14655ec | ||
|
4b1012b185 | ||
|
29ac34cb92 | ||
|
f91fcd518a | ||
|
776c5526c8 | ||
|
7a93b5c315 | ||
|
78607a0c3e | ||
|
68c6a9e748 | ||
|
8053f49acc | ||
|
93bd66ad3e | ||
|
9c54030b61 | ||
|
011844b7ac | ||
|
359228127d | ||
|
ba7f0fce71 | ||
|
2a914f98b7 | ||
|
dfae27ca32 | ||
|
459b74294d | ||
|
7170e8972d | ||
|
42e78b14e9 | ||
|
2d4a42ceba | ||
|
63dbb6a291 | ||
|
ffd035f25a | ||
|
9dc3cdca5d | ||
|
e4677eb6fa | ||
|
4607d30cad | ||
|
18fcea35da | ||
|
35f1d043f4 | ||
|
7a6e249614 | ||
|
02bf018271 | ||
|
2fb771f0de | ||
|
4c9408d772 | ||
|
e5663f97c5 | ||
|
182a722a55 | ||
|
e7d8692836 | ||
|
0b14d3fddf | ||
|
55b6d63774 | ||
|
3526d9fd19 | ||
|
764bc0c204 | ||
|
27f025bc39 | ||
|
1fdf5a7a39 | ||
|
44b4b10eb8 | ||
|
9cddea11c7 | ||
|
7b59f38cc8 | ||
|
c009f6e4df | ||
|
56f38c178c | ||
|
d9a305a0f2 | ||
|
d640bc9b1c | ||
|
40632b2486 | ||
|
0b30373bd3 | ||
|
c216937dc4 | ||
|
1320108703 | ||
|
e8894b159f | ||
|
649c782782 | ||
|
d2d087dcb7 | ||
|
7ec56505ea | ||
|
cee2de41ca | ||
|
7f8279fe54 | ||
|
802a150c76 | ||
|
9c5d588630 | ||
|
050cd583df | ||
|
3b9828c0fc | ||
|
445cb60f2c | ||
|
b949259c57 | ||
|
3850183e95 | ||
|
8d4b69927b | ||
|
f6a6cb281b | ||
|
c92cdec36e | ||
|
45a1457a4f | ||
|
9cad11b2b3 | ||
|
c2022acf67 | ||
|
9ea5a3b89c | ||
|
cfe388896b | ||
|
6b0e51caf4 | ||
|
be31a4b576 | ||
|
72c292d2c3 | ||
|
fb74c1977e | ||
|
54285e4bcd | ||
|
b3edeca295 | ||
|
319be60c80 | ||
|
06fa1adc27 | ||
|
94c573b469 | ||
|
118b5bfda7 | ||
|
f9c75a43ae | ||
|
e909cbfd4a | ||
|
797d339132 | ||
|
def58a3e18 | ||
|
ac17876cb6 | ||
|
0495da07a4 | ||
|
b1962ef75d | ||
|
c77436fc12 | ||
|
6b6ed23e25 | ||
|
c878e11913 | ||
|
adcf9310a0 | ||
|
a12dc692ce | ||
|
e5e9e807ca | ||
|
fc599f8b9a | ||
|
e1f6110dc8 | ||
|
336c62bfc2 | ||
|
583d5b3bdb | ||
|
891a5d4dd8 | ||
|
51f445bc72 | ||
|
51bb4c6f5d | ||
|
225957ba8a | ||
|
54b8d2c3f3 | ||
|
6f27b5fd2e | ||
|
dbd5a02617 |
28
.env.example
28
.env.example
|
@ -8,7 +8,7 @@ USE_HTTPS=true
|
||||||
DOMAIN=your.domain.here
|
DOMAIN=your.domain.here
|
||||||
EMAIL=your@email.here
|
EMAIL=your@email.here
|
||||||
|
|
||||||
# Instance defualt language (see options at bookwyrm/settings.py "LANGUAGES"
|
# Instance default language (see options at bookwyrm/settings.py "LANGUAGES"
|
||||||
LANGUAGE_CODE="en-us"
|
LANGUAGE_CODE="en-us"
|
||||||
# Used for deciding which editions to prefer
|
# Used for deciding which editions to prefer
|
||||||
DEFAULT_LANGUAGE="English"
|
DEFAULT_LANGUAGE="English"
|
||||||
|
@ -16,6 +16,11 @@ DEFAULT_LANGUAGE="English"
|
||||||
## Leave unset to allow all hosts
|
## Leave unset to allow all hosts
|
||||||
# ALLOWED_HOSTS="localhost,127.0.0.1,[::1]"
|
# ALLOWED_HOSTS="localhost,127.0.0.1,[::1]"
|
||||||
|
|
||||||
|
# Specify when the site is served from a port that is not the default
|
||||||
|
# for the protocol (80 for HTTP or 443 for HTTPS).
|
||||||
|
# Probably only necessary in development.
|
||||||
|
# PORT=1333
|
||||||
|
|
||||||
MEDIA_ROOT=images/
|
MEDIA_ROOT=images/
|
||||||
|
|
||||||
# Database configuration
|
# Database configuration
|
||||||
|
@ -71,14 +76,20 @@ ENABLE_THUMBNAIL_GENERATION=true
|
||||||
USE_S3=false
|
USE_S3=false
|
||||||
AWS_ACCESS_KEY_ID=
|
AWS_ACCESS_KEY_ID=
|
||||||
AWS_SECRET_ACCESS_KEY=
|
AWS_SECRET_ACCESS_KEY=
|
||||||
|
# seconds for signed S3 urls to expire
|
||||||
|
# this is currently only used for user export files
|
||||||
|
S3_SIGNED_URL_EXPIRY=900
|
||||||
|
|
||||||
# Commented are example values if you use a non-AWS, S3-compatible service
|
# Commented are example values if you use a non-AWS, S3-compatible service
|
||||||
# AWS S3 should work with only AWS_STORAGE_BUCKET_NAME and AWS_S3_REGION_NAME
|
# AWS S3 should work with only AWS_STORAGE_BUCKET_NAME and AWS_S3_REGION_NAME
|
||||||
# non-AWS S3-compatible services will need AWS_STORAGE_BUCKET_NAME,
|
# non-AWS S3-compatible services will need AWS_STORAGE_BUCKET_NAME,
|
||||||
# along with both AWS_S3_CUSTOM_DOMAIN and AWS_S3_ENDPOINT_URL
|
# along with both AWS_S3_CUSTOM_DOMAIN and AWS_S3_ENDPOINT_URL.
|
||||||
|
# AWS_S3_URL_PROTOCOL must end in ":" and defaults to the same protocol as
|
||||||
|
# the BookWyrm instance ("http:" or "https:", based on USE_SSL).
|
||||||
|
|
||||||
# AWS_STORAGE_BUCKET_NAME= # "example-bucket-name"
|
# AWS_STORAGE_BUCKET_NAME= # "example-bucket-name"
|
||||||
# AWS_S3_CUSTOM_DOMAIN=None # "example-bucket-name.s3.fr-par.scw.cloud"
|
# AWS_S3_CUSTOM_DOMAIN=None # "example-bucket-name.s3.fr-par.scw.cloud"
|
||||||
|
# AWS_S3_URL_PROTOCOL=None # "http:"
|
||||||
# AWS_S3_REGION_NAME=None # "fr-par"
|
# AWS_S3_REGION_NAME=None # "fr-par"
|
||||||
# AWS_S3_ENDPOINT_URL=None # "https://s3.fr-par.scw.cloud"
|
# AWS_S3_ENDPOINT_URL=None # "https://s3.fr-par.scw.cloud"
|
||||||
|
|
||||||
|
@ -133,7 +144,14 @@ HTTP_X_FORWARDED_PROTO=false
|
||||||
TWO_FACTOR_LOGIN_VALIDITY_WINDOW=2
|
TWO_FACTOR_LOGIN_VALIDITY_WINDOW=2
|
||||||
TWO_FACTOR_LOGIN_MAX_SECONDS=60
|
TWO_FACTOR_LOGIN_MAX_SECONDS=60
|
||||||
|
|
||||||
# Additional hosts to allow in the Content-Security-Policy, "self" (should be DOMAIN)
|
# Additional hosts to allow in the Content-Security-Policy, "self" (should be
|
||||||
# and AWS_S3_CUSTOM_DOMAIN (if used) are added by default.
|
# DOMAIN with optionally ":" + PORT) and AWS_S3_CUSTOM_DOMAIN (if used) are
|
||||||
# Value should be a comma-separated list of host names.
|
# added by default. Value should be a comma-separated list of host names.
|
||||||
CSP_ADDITIONAL_HOSTS=
|
CSP_ADDITIONAL_HOSTS=
|
||||||
|
|
||||||
|
# Time before being logged out (in seconds)
|
||||||
|
# SESSION_COOKIE_AGE=2592000 # current default: 30 days
|
||||||
|
|
||||||
|
# Maximum allowed memory for file uploads (increase if users are having trouble
|
||||||
|
# uploading BookWyrm export files).
|
||||||
|
# DATA_UPLOAD_MAX_MEMORY_MiB=100
|
||||||
|
|
78
.github/pull_request_template.md
vendored
Normal file
78
.github/pull_request_template.md
vendored
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
<!--
|
||||||
|
Thanks for contributing!
|
||||||
|
|
||||||
|
Please ensure the name of your PR is written in imperative present tense. For example:
|
||||||
|
|
||||||
|
- "fix color contrast on submit buttons"
|
||||||
|
- "add 'favourite food' value to Author model"
|
||||||
|
|
||||||
|
To check (tick) a list item, replace the space between square brackets with an x, like this:
|
||||||
|
|
||||||
|
- [x] I have checked the box
|
||||||
|
|
||||||
|
You can find more information and tips for BookWyrm contributors at https://docs.joinbookwyrm.com/contributing.html
|
||||||
|
-->
|
||||||
|
|
||||||
|
## Are you finished?
|
||||||
|
|
||||||
|
### Linters
|
||||||
|
<!--
|
||||||
|
Please run linters on your code before submitting your PR.
|
||||||
|
If you miss this step it is likely that the GitHub task runners will fail.
|
||||||
|
-->
|
||||||
|
|
||||||
|
- [ ] I have checked my code with `black`, `pylint`, and `mypy`, or `./bw-dev formatters`
|
||||||
|
|
||||||
|
### Tests
|
||||||
|
<!-- Check one -->
|
||||||
|
|
||||||
|
- [ ] My changes do not need new tests
|
||||||
|
- [ ] All tests I have added are passing
|
||||||
|
- [ ] I have written tests but need help to make them pass
|
||||||
|
- [ ] I have not written tests and need help to write them
|
||||||
|
|
||||||
|
## What type of Pull Request is this?
|
||||||
|
<!-- Check all that apply -->
|
||||||
|
|
||||||
|
- [ ] Bug Fix
|
||||||
|
- [ ] Enhancement
|
||||||
|
- [ ] Plumbing / Internals / Dependencies
|
||||||
|
- [ ] Refactor
|
||||||
|
|
||||||
|
## Does this PR change settings or dependencies, or break something?
|
||||||
|
<!-- Check all that apply -->
|
||||||
|
|
||||||
|
- [ ] This PR changes or adds default settings, configuration, or .env values
|
||||||
|
- [ ] This PR changes or adds dependencies
|
||||||
|
- [ ] This PR introduces other breaking changes
|
||||||
|
|
||||||
|
### Details of breaking or configuration changes (if any of above checked)
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Describe what your pull request does here.
|
||||||
|
|
||||||
|
For pull requests that relate or close an issue, please include them
|
||||||
|
below. We like to follow [Github's guidance on linking issues to pull requests](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue).
|
||||||
|
|
||||||
|
For example having the text: "closes #1234" would connect the current pull
|
||||||
|
request to issue 1234. And when we merge the pull request, Github will
|
||||||
|
automatically close the issue.
|
||||||
|
-->
|
||||||
|
|
||||||
|
- Related Issue #
|
||||||
|
- Closes #
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
<!--
|
||||||
|
Documentation for users, admins, and developers is an important way to keep the BookWyrm community welcoming and make Bookwyrm easy to use.
|
||||||
|
Our documentation is maintained in a separate repository at https://github.com/bookwyrm-social/documentation
|
||||||
|
-->
|
||||||
|
|
||||||
|
<!-- Check all that apply -->
|
||||||
|
|
||||||
|
- [ ] New or amended documentation will be required if this PR is merged
|
||||||
|
- [ ] I have created a matching pull request in the Documentation repository
|
||||||
|
- [ ] I intend to create a matching pull request in the Documentation repository after this PR is merged
|
||||||
|
|
26
.github/release.yml
vendored
Normal file
26
.github/release.yml
vendored
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
changelog:
|
||||||
|
exclude:
|
||||||
|
labels:
|
||||||
|
- ignore-for-release
|
||||||
|
categories:
|
||||||
|
- title: ‼️ Breaking Changes & New Settings ⚙️
|
||||||
|
labels:
|
||||||
|
- breaking-change
|
||||||
|
- config-change
|
||||||
|
- title: Updated Dependencies 🧸
|
||||||
|
labels:
|
||||||
|
- dependencies
|
||||||
|
- title: New Features 🎉
|
||||||
|
labels:
|
||||||
|
- enhancement
|
||||||
|
- title: Bug Fixes 🐛
|
||||||
|
labels:
|
||||||
|
- fix
|
||||||
|
- bug
|
||||||
|
- title: Internals/Plumbing 👩🔧
|
||||||
|
- plumbing
|
||||||
|
- tests
|
||||||
|
- deployment
|
||||||
|
- title: Other Changes
|
||||||
|
labels:
|
||||||
|
- "*"
|
17
.github/workflows/black.yml
vendored
17
.github/workflows/black.yml
vendored
|
@ -1,17 +0,0 @@
|
||||||
name: Python Formatting (run ./bw-dev black to fix)
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ main ]
|
|
||||||
pull_request:
|
|
||||||
branches: [ main ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
lint:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- uses: actions/setup-python@v4
|
|
||||||
- uses: psf/black@22.12.0
|
|
||||||
with:
|
|
||||||
version: 22.12.0
|
|
8
.github/workflows/codeql-analysis.yml
vendored
8
.github/workflows/codeql-analysis.yml
vendored
|
@ -36,11 +36,11 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v2
|
uses: github/codeql-action/init@v3
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
|
@ -51,7 +51,7 @@ jobs:
|
||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
- name: Autobuild
|
- name: Autobuild
|
||||||
uses: github/codeql-action/autobuild@v2
|
uses: github/codeql-action/autobuild@v3
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# 📚 https://git.io/JvXDl
|
# 📚 https://git.io/JvXDl
|
||||||
|
@ -65,4 +65,4 @@ jobs:
|
||||||
# make release
|
# make release
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v2
|
uses: github/codeql-action/analyze@v3
|
||||||
|
|
2
.github/workflows/curlylint.yaml
vendored
2
.github/workflows/curlylint.yaml
vendored
|
@ -10,7 +10,7 @@ jobs:
|
||||||
lint:
|
lint:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install curlylint
|
- name: Install curlylint
|
||||||
run: pip install curlylint
|
run: pip install curlylint
|
||||||
|
|
61
.github/workflows/django-tests.yml
vendored
61
.github/workflows/django-tests.yml
vendored
|
@ -1,61 +0,0 @@
|
||||||
name: Run Python Tests
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ main ]
|
|
||||||
pull_request:
|
|
||||||
branches: [ main ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
|
|
||||||
runs-on: ubuntu-20.04
|
|
||||||
services:
|
|
||||||
postgres:
|
|
||||||
image: postgres:13
|
|
||||||
env:
|
|
||||||
POSTGRES_USER: postgres
|
|
||||||
POSTGRES_PASSWORD: hunter2
|
|
||||||
options: >-
|
|
||||||
--health-cmd pg_isready
|
|
||||||
--health-interval 10s
|
|
||||||
--health-timeout 5s
|
|
||||||
--health-retries 5
|
|
||||||
ports:
|
|
||||||
- 5432:5432
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: 3.9
|
|
||||||
- name: Install Dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
pip install -r requirements.txt
|
|
||||||
- name: Run Tests
|
|
||||||
env:
|
|
||||||
SECRET_KEY: beepbeep
|
|
||||||
DEBUG: false
|
|
||||||
USE_HTTPS: true
|
|
||||||
DOMAIN: your.domain.here
|
|
||||||
BOOKWYRM_DATABASE_BACKEND: postgres
|
|
||||||
MEDIA_ROOT: images/
|
|
||||||
POSTGRES_PASSWORD: hunter2
|
|
||||||
POSTGRES_USER: postgres
|
|
||||||
POSTGRES_DB: github_actions
|
|
||||||
POSTGRES_HOST: 127.0.0.1
|
|
||||||
CELERY_BROKER: ""
|
|
||||||
REDIS_BROKER_PORT: 6379
|
|
||||||
REDIS_BROKER_PASSWORD: beep
|
|
||||||
USE_DUMMY_CACHE: true
|
|
||||||
FLOWER_PORT: 8888
|
|
||||||
EMAIL_HOST: "smtp.mailgun.org"
|
|
||||||
EMAIL_PORT: 587
|
|
||||||
EMAIL_HOST_USER: ""
|
|
||||||
EMAIL_HOST_PASSWORD: ""
|
|
||||||
EMAIL_USE_TLS: true
|
|
||||||
ENABLE_PREVIEW_IMAGES: false
|
|
||||||
ENABLE_THUMBNAIL_GENERATION: true
|
|
||||||
HTTP_X_FORWARDED_PROTO: false
|
|
||||||
run: |
|
|
||||||
pytest -n 3
|
|
5
.github/workflows/lint-frontend.yaml
vendored
5
.github/workflows/lint-frontend.yaml
vendored
|
@ -19,10 +19,11 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it.
|
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it.
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install modules
|
- name: Install modules
|
||||||
run: npm install stylelint stylelint-config-recommended stylelint-config-standard stylelint-order eslint
|
# run: npm install stylelint stylelint-config-recommended stylelint-config-standard stylelint-order eslint
|
||||||
|
run: npm install eslint@^8.9.0
|
||||||
|
|
||||||
# See .stylelintignore for files that are not linted.
|
# See .stylelintignore for files that are not linted.
|
||||||
# - name: Run stylelint
|
# - name: Run stylelint
|
||||||
|
|
4
.github/workflows/prettier.yaml
vendored
4
.github/workflows/prettier.yaml
vendored
|
@ -14,10 +14,10 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it.
|
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it.
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install modules
|
- name: Install modules
|
||||||
run: npm install prettier
|
run: npm install prettier@2.5.1
|
||||||
|
|
||||||
- name: Run Prettier
|
- name: Run Prettier
|
||||||
run: npx prettier --check bookwyrm/static/js/*.js
|
run: npx prettier --check bookwyrm/static/js/*.js
|
||||||
|
|
27
.github/workflows/pylint.yml
vendored
27
.github/workflows/pylint.yml
vendored
|
@ -1,27 +0,0 @@
|
||||||
name: Pylint
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches: [ main ]
|
|
||||||
pull_request:
|
|
||||||
branches: [ main ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Set up Python 3.9
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: 3.9
|
|
||||||
- name: Install Dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
pip install -r requirements.txt
|
|
||||||
- name: Analysing the code with pylint
|
|
||||||
run: |
|
|
||||||
pylint bookwyrm/
|
|
||||||
|
|
99
.github/workflows/python.yml
vendored
Normal file
99
.github/workflows/python.yml
vendored
Normal file
|
@ -0,0 +1,99 @@
|
||||||
|
name: Python
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ main ]
|
||||||
|
|
||||||
|
# overrides for .env.example
|
||||||
|
env:
|
||||||
|
POSTGRES_HOST: 127.0.0.1
|
||||||
|
PGPORT: 5432
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
POSTGRES_PASSWORD: hunter2
|
||||||
|
POSTGRES_DB: github_actions
|
||||||
|
SECRET_KEY: beepbeep
|
||||||
|
EMAIL_HOST_USER: ""
|
||||||
|
EMAIL_HOST_PASSWORD: ""
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
pytest:
|
||||||
|
name: Tests (pytest)
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:13
|
||||||
|
env: # does not inherit from jobs.build.env
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
POSTGRES_PASSWORD: hunter2
|
||||||
|
options: >-
|
||||||
|
--health-cmd pg_isready
|
||||||
|
--health-interval 10s
|
||||||
|
--health-timeout 5s
|
||||||
|
--health-retries 5
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Set up Python 3.11
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: 3.11
|
||||||
|
cache: pip
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
|
pip install pytest-github-actions-annotate-failures
|
||||||
|
- name: Set up .env
|
||||||
|
run: cp .env.example .env
|
||||||
|
- name: Check migrations up-to-date
|
||||||
|
run: python ./manage.py makemigrations --check
|
||||||
|
- name: Run Tests
|
||||||
|
run: pytest -n 3
|
||||||
|
|
||||||
|
pylint:
|
||||||
|
name: Linting (pylint)
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Set up Python 3.11
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: 3.11
|
||||||
|
cache: pip
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
|
- name: Analyse code with pylint
|
||||||
|
run: pylint bookwyrm/
|
||||||
|
|
||||||
|
mypy:
|
||||||
|
name: Typing (mypy)
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Set up Python 3.11
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: 3.11
|
||||||
|
cache: pip
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
|
- name: Set up .env
|
||||||
|
run: cp .env.example .env
|
||||||
|
- name: Analyse code with mypy
|
||||||
|
run: mypy bookwyrm celerywyrm
|
||||||
|
|
||||||
|
black:
|
||||||
|
name: Formatting (black; run ./bw-dev black to fix)
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
- uses: psf/black@stable
|
||||||
|
with:
|
||||||
|
version: "22.*"
|
5
.gitignore
vendored
5
.gitignore
vendored
|
@ -16,6 +16,8 @@
|
||||||
# BookWyrm
|
# BookWyrm
|
||||||
.env
|
.env
|
||||||
/images/
|
/images/
|
||||||
|
/exports/
|
||||||
|
/static/
|
||||||
bookwyrm/static/css/bookwyrm.css
|
bookwyrm/static/css/bookwyrm.css
|
||||||
bookwyrm/static/css/themes/
|
bookwyrm/static/css/themes/
|
||||||
!bookwyrm/static/css/themes/bookwyrm-*.scss
|
!bookwyrm/static/css/themes/bookwyrm-*.scss
|
||||||
|
@ -36,3 +38,6 @@ nginx/default.conf
|
||||||
|
|
||||||
#macOS
|
#macOS
|
||||||
**/.DS_Store
|
**/.DS_Store
|
||||||
|
|
||||||
|
# Docker
|
||||||
|
docker-compose.override.yml
|
||||||
|
|
1
.prettierrc
Normal file
1
.prettierrc
Normal file
|
@ -0,0 +1 @@
|
||||||
|
'trailingComma': 'es5'
|
|
@ -1,4 +1,4 @@
|
||||||
FROM python:3.9
|
FROM python:3.11
|
||||||
|
|
||||||
ENV PYTHONUNBUFFERED 1
|
ENV PYTHONUNBUFFERED 1
|
||||||
|
|
||||||
|
|
334
FEDERATION.md
Normal file
334
FEDERATION.md
Normal file
|
@ -0,0 +1,334 @@
|
||||||
|
# Federation
|
||||||
|
|
||||||
|
BookWyrm uses the [ActivityPub](http://activitypub.rocks/) protocol to send and receive user activity between other BookWyrm instances and other services that implement ActivityPub. To handle book data, BookWyrm has a handful of extended Activity types which are not part of the standard, but are legible to other BookWyrm instances.
|
||||||
|
|
||||||
|
## Activities and Objects
|
||||||
|
|
||||||
|
### Users and relationships
|
||||||
|
User relationship interactions follow the standard ActivityPub spec.
|
||||||
|
|
||||||
|
- `Follow`: request to receive statuses from a user, and view their statuses that have followers-only privacy
|
||||||
|
- `Accept`: approves a `Follow` and finalizes the relationship
|
||||||
|
- `Reject`: denies a `Follow`
|
||||||
|
- `Block`: prevent users from seeing one another's statuses, and prevents the blocked user from viewing the actor's profile
|
||||||
|
- `Update`: updates a user's profile and settings
|
||||||
|
- `Delete`: deactivates a user
|
||||||
|
- `Undo`: reverses a `Block` or `Follow`
|
||||||
|
|
||||||
|
### Activities
|
||||||
|
- `Create/Status`: saves a new status in the database.
|
||||||
|
- `Delete/Status`: Removes a status
|
||||||
|
- `Like/Status`: Creates a favorite on the status
|
||||||
|
- `Announce/Status`: Boosts the status into the actor's timeline
|
||||||
|
- `Undo/*`,: Reverses an `Announce`, `Like`, or `Move`
|
||||||
|
- `Move/User`: Moves a user from one ActivityPub id to another.
|
||||||
|
|
||||||
|
### Collections
|
||||||
|
User's books and lists are represented by [`OrderedCollection`](https://www.w3.org/TR/activitystreams-vocabulary/#dfn-orderedcollection)
|
||||||
|
|
||||||
|
### Statuses
|
||||||
|
|
||||||
|
BookWyrm is focused on book reading activities - it is not a general-purpose messaging application. For this reason, BookWyrm only accepts status `Create` activities if they are:
|
||||||
|
|
||||||
|
- Direct messages (i.e., `Note`s with the privacy level `direct`, which mention a local user),
|
||||||
|
- Related to a book (of a custom status type that includes the field `inReplyToBook`),
|
||||||
|
- Replies to existing statuses saved in the database
|
||||||
|
|
||||||
|
All other statuses will be received by the instance inbox, but by design **will not be delivered to user inboxes or displayed to users**.
|
||||||
|
|
||||||
|
### Custom Object types
|
||||||
|
|
||||||
|
With the exception of `Note`, the following object types are used in Bookwyrm but are not currently provided with a custom JSON-LD `@context` extension IRI. This is likely to change in future to make them true deserialisable JSON-LD objects.
|
||||||
|
|
||||||
|
##### Note
|
||||||
|
|
||||||
|
Within BookWyrm a `Note` is constructed according to [the ActivityStreams vocabulary](https://www.w3.org/TR/activitystreams-vocabulary/#dfn-note), however `Note`s can only be created as direct messages or as replies to other statuses. As mentioned above, this also applies to incoming `Note`s.
|
||||||
|
|
||||||
|
##### Review
|
||||||
|
|
||||||
|
A `Review` is a status in response to a book (indicated by the `inReplyToBook` field), which has a title, body, and numerical rating between 0 (not rated) and 5.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "https://example.net/user/library_lurker/review/2",
|
||||||
|
"type": "Review",
|
||||||
|
"published": "2023-06-30T21:43:46.013132+00:00",
|
||||||
|
"attributedTo": "https://example.net/user/library_lurker",
|
||||||
|
"content": "<p>This is an enjoyable book with great characters.</p>",
|
||||||
|
"to": ["https://example.net/user/library_lurker/followers"],
|
||||||
|
"cc": [],
|
||||||
|
"replies": {
|
||||||
|
"id": "https://example.net/user/library_lurker/review/2/replies",
|
||||||
|
"type": "OrderedCollection",
|
||||||
|
"totalItems": 0,
|
||||||
|
"first": "https://example.net/user/library_lurker/review/2/replies?page=1",
|
||||||
|
"last": "https://example.net/user/library_lurker/review/2/replies?page=1",
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
},
|
||||||
|
"summary": "Spoilers ahead!",
|
||||||
|
"tag": [],
|
||||||
|
"attachment": [],
|
||||||
|
"sensitive": true,
|
||||||
|
"inReplyToBook": "https://example.net/book/1",
|
||||||
|
"name": "What a cracking read",
|
||||||
|
"rating": 4.5,
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Comment
|
||||||
|
|
||||||
|
A `Comment` on a book mentions a book and has a message body, reading status, and progress indicator.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "https://example.net/user/library_lurker/comment/9",
|
||||||
|
"type": "Comment",
|
||||||
|
"published": "2023-06-30T21:43:46.013132+00:00",
|
||||||
|
"attributedTo": "https://example.net/user/library_lurker",
|
||||||
|
"content": "<p>This is a very enjoyable book so far.</p>",
|
||||||
|
"to": ["https://example.net/user/library_lurker/followers"],
|
||||||
|
"cc": [],
|
||||||
|
"replies": {
|
||||||
|
"id": "https://example.net/user/library_lurker/comment/9/replies",
|
||||||
|
"type": "OrderedCollection",
|
||||||
|
"totalItems": 0,
|
||||||
|
"first": "https://example.net/user/library_lurker/comment/9/replies?page=1",
|
||||||
|
"last": "https://example.net/user/library_lurker/comment/9/replies?page=1",
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
},
|
||||||
|
"summary": "Spoilers ahead!",
|
||||||
|
"tag": [],
|
||||||
|
"attachment": [],
|
||||||
|
"sensitive": true,
|
||||||
|
"inReplyToBook": "https://example.net/book/1",
|
||||||
|
"readingStatus": "reading",
|
||||||
|
"progress": 25,
|
||||||
|
"progressMode": "PG",
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Quotation
|
||||||
|
|
||||||
|
A quotation (aka "quote") has a message body, an excerpt from a book including position as a page number or percentage indicator, and mentions a book.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "https://example.net/user/mouse/quotation/13",
|
||||||
|
"url": "https://example.net/user/mouse/quotation/13",
|
||||||
|
"inReplyTo": null,
|
||||||
|
"published": "2020-05-10T02:38:31.150343+00:00",
|
||||||
|
"attributedTo": "https://example.net/user/mouse",
|
||||||
|
"to": [
|
||||||
|
"https://www.w3.org/ns/activitystreams#Public"
|
||||||
|
],
|
||||||
|
"cc": [
|
||||||
|
"https://example.net/user/mouse/followers"
|
||||||
|
],
|
||||||
|
"sensitive": false,
|
||||||
|
"content": "I really like this quote",
|
||||||
|
"type": "Quotation",
|
||||||
|
"replies": {
|
||||||
|
"id": "https://example.net/user/mouse/quotation/13/replies",
|
||||||
|
"type": "Collection",
|
||||||
|
"first": {
|
||||||
|
"type": "CollectionPage",
|
||||||
|
"next": "https://example.net/user/mouse/quotation/13/replies?only_other_accounts=true&page=true",
|
||||||
|
"partOf": "https://example.net/user/mouse/quotation/13/replies",
|
||||||
|
"items": []
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"inReplyToBook": "https://example.net/book/1",
|
||||||
|
"quote": "To be or not to be, that is the question.",
|
||||||
|
"position": 50,
|
||||||
|
"positionMode": "PCT",
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Custom Objects
|
||||||
|
|
||||||
|
##### Work
|
||||||
|
A particular book, a "work" in the [FRBR](https://en.wikipedia.org/wiki/Functional_Requirements_for_Bibliographic_Records) sense.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "https://bookwyrm.social/book/5988",
|
||||||
|
"type": "Work",
|
||||||
|
"authors": [
|
||||||
|
"https://bookwyrm.social/author/417"
|
||||||
|
],
|
||||||
|
"first_published_date": null,
|
||||||
|
"published_date": null,
|
||||||
|
"title": "Piranesi",
|
||||||
|
"sort_title": null,
|
||||||
|
"subtitle": null,
|
||||||
|
"description": "**From the *New York Times* bestselling author of *Jonathan Strange & Mr. Norrell*, an intoxicating, hypnotic new novel set in a dreamlike alternative reality.",
|
||||||
|
"languages": [],
|
||||||
|
"series": null,
|
||||||
|
"series_number": null,
|
||||||
|
"subjects": [
|
||||||
|
"English literature"
|
||||||
|
],
|
||||||
|
"subject_places": [],
|
||||||
|
"openlibrary_key": "OL20893680W",
|
||||||
|
"librarything_key": null,
|
||||||
|
"goodreads_key": null,
|
||||||
|
"attachment": [
|
||||||
|
{
|
||||||
|
"url": "https://bookwyrm.social/images/covers/10226290-M.jpg",
|
||||||
|
"type": "Image"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"lccn": null,
|
||||||
|
"editions": [
|
||||||
|
"https://bookwyrm.social/book/5989"
|
||||||
|
],
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Edition
|
||||||
|
A particular _manifestation_ of a Work, in the [FRBR](https://en.wikipedia.org/wiki/Functional_Requirements_for_Bibliographic_Records) sense.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "https://bookwyrm.social/book/5989",
|
||||||
|
"lastEditedBy": "https://example.net/users/rat",
|
||||||
|
"type": "Edition",
|
||||||
|
"authors": [
|
||||||
|
"https://bookwyrm.social/author/417"
|
||||||
|
],
|
||||||
|
"first_published_date": null,
|
||||||
|
"published_date": "2020-09-15T00:00:00+00:00",
|
||||||
|
"title": "Piranesi",
|
||||||
|
"sort_title": null,
|
||||||
|
"subtitle": null,
|
||||||
|
"description": "Piranesi's house is no ordinary building; its rooms are infinite, its corridors endless, its walls are lined with thousands upon thousands of statues, each one different from all the others.",
|
||||||
|
"languages": [
|
||||||
|
"English"
|
||||||
|
],
|
||||||
|
"series": null,
|
||||||
|
"series_number": null,
|
||||||
|
"subjects": [],
|
||||||
|
"subject_places": [],
|
||||||
|
"openlibrary_key": "OL29486417M",
|
||||||
|
"librarything_key": null,
|
||||||
|
"goodreads_key": null,
|
||||||
|
"isfdb": null,
|
||||||
|
"attachment": [
|
||||||
|
{
|
||||||
|
"url": "https://bookwyrm.social/images/covers/50202953._SX318_.jpg",
|
||||||
|
"type": "Image"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"isbn_10": "1526622424",
|
||||||
|
"isbn_13": "9781526622426",
|
||||||
|
"oclc_number": null,
|
||||||
|
"asin": null,
|
||||||
|
"pages": 272,
|
||||||
|
"physical_format": null,
|
||||||
|
"publishers": [
|
||||||
|
"Bloomsbury Publishing Plc"
|
||||||
|
],
|
||||||
|
"work": "https://bookwyrm.social/book/5988",
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Shelf
|
||||||
|
|
||||||
|
A user's book collection. By default, every user has a `to-read`, `reading`, `read`, and `stopped-reading` shelf which are used to track reading progress. Users may create an unlimited number of additional shelves with their own ids.
|
||||||
|
|
||||||
|
Example
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "https://example.net/user/avid_reader/books/extraspecialbooks-5",
|
||||||
|
"type": "Shelf",
|
||||||
|
"totalItems": 0,
|
||||||
|
"first": "https://example.net/user/avid_reader/books/extraspecialbooks-5?page=1",
|
||||||
|
"last": "https://example.net/user/avid_reader/books/extraspecialbooks-5?page=1",
|
||||||
|
"name": "Extra special books",
|
||||||
|
"owner": "https://example.net/user/avid_reader",
|
||||||
|
"to": [
|
||||||
|
"https://www.w3.org/ns/activitystreams#Public"
|
||||||
|
],
|
||||||
|
"cc": [
|
||||||
|
"https://example.net/user/avid_reader/followers"
|
||||||
|
],
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### List
|
||||||
|
|
||||||
|
A collection of books that may have items contributed by users other than the one who created the list.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "https://example.net/list/1",
|
||||||
|
"type": "BookList",
|
||||||
|
"totalItems": 0,
|
||||||
|
"first": "https://example.net/list/1?page=1",
|
||||||
|
"last": "https://example.net/list/1?page=1",
|
||||||
|
"name": "My cool list",
|
||||||
|
"owner": "https://example.net/user/avid_reader",
|
||||||
|
"to": [
|
||||||
|
"https://www.w3.org/ns/activitystreams#Public"
|
||||||
|
],
|
||||||
|
"cc": [
|
||||||
|
"https://example.net/user/avid_reader/followers"
|
||||||
|
],
|
||||||
|
"summary": "A list of books I like.",
|
||||||
|
"curation": "curated",
|
||||||
|
"@context": "https://www.w3.org/ns/activitystreams"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Activities
|
||||||
|
|
||||||
|
- `Create`: Adds a shelf or list to the database.
|
||||||
|
- `Delete`: Removes a shelf or list.
|
||||||
|
- `Add`: Adds a book to a shelf or list.
|
||||||
|
- `Remove`: Removes a book from a shelf or list.
|
||||||
|
|
||||||
|
## Alternative Serialization
|
||||||
|
Because BookWyrm uses custom object types that aren't listed in [the standard ActivityStreams Vocabulary](https://www.w3.org/TR/activitystreams-vocabulary), some statuses are transformed into standard types when sent to or viewed by non-BookWyrm services. `Review`s are converted into `Article`s, and `Comment`s and `Quotation`s are converted into `Note`s, with a link to the book and the cover image attached.
|
||||||
|
|
||||||
|
In future this may be done with [JSON-LD type arrays](https://www.w3.org/TR/json-ld/#specifying-the-type) instead.
|
||||||
|
|
||||||
|
## Other extensions
|
||||||
|
|
||||||
|
### Webfinger
|
||||||
|
|
||||||
|
Bookwyrm uses the [Webfinger](https://datatracker.ietf.org/doc/html/rfc7033) standard to identify and disambiguate fediverse actors. The [Webfinger documentation on the Mastodon project](https://docs.joinmastodon.org/spec/webfinger/) provides a good overview of how Webfinger is used.
|
||||||
|
|
||||||
|
### HTTP Signatures
|
||||||
|
|
||||||
|
Bookwyrm uses and requires HTTP signatures for all `POST` requests. `GET` requests are not signed by default, but if Bookwyrm receives a `403` response to a `GET` it will re-send the request, signed by the default server user. This usually will have a user id of `https://example.net/user/bookwyrm.instance.actor`
|
||||||
|
|
||||||
|
#### publicKey id
|
||||||
|
|
||||||
|
In older versions of Bookwyrm the `publicKey.id` was incorrectly listed in request headers as `https://example.net/user/username#main-key`. As of v0.6.3 the id is now listed correctly, as `https://example.net/user/username/#main-key`. In most ActivityPub implementations this will make no difference as the URL will usually resolve to the same place.
|
||||||
|
|
||||||
|
### NodeInfo
|
||||||
|
|
||||||
|
Bookwyrm uses the [NodeInfo](http://nodeinfo.diaspora.software/) standard to provide statistics and version information for each instance.
|
||||||
|
|
||||||
|
## Further Documentation
|
||||||
|
|
||||||
|
See [docs.joinbookwyrm.com/](https://docs.joinbookwyrm.com/) for more documentation.
|
|
@ -10,7 +10,6 @@ BookWyrm is a social network for tracking your reading, talking about books, wri
|
||||||
## Links
|
## Links
|
||||||
|
|
||||||
[![Mastodon Follow](https://img.shields.io/mastodon/follow/000146121?domain=https%3A%2F%2Ftech.lgbt&style=social)](https://tech.lgbt/@bookwyrm)
|
[![Mastodon Follow](https://img.shields.io/mastodon/follow/000146121?domain=https%3A%2F%2Ftech.lgbt&style=social)](https://tech.lgbt/@bookwyrm)
|
||||||
[![Twitter Follow](https://img.shields.io/twitter/follow/BookWyrmSocial?style=social)](https://twitter.com/BookWyrmSocial)
|
|
||||||
|
|
||||||
- [Project homepage](https://joinbookwyrm.com/)
|
- [Project homepage](https://joinbookwyrm.com/)
|
||||||
- [Support](https://patreon.com/bookwyrm)
|
- [Support](https://patreon.com/bookwyrm)
|
||||||
|
|
|
@ -4,7 +4,11 @@ import sys
|
||||||
|
|
||||||
from .base_activity import ActivityEncoder, Signature, naive_parse
|
from .base_activity import ActivityEncoder, Signature, naive_parse
|
||||||
from .base_activity import Link, Mention, Hashtag
|
from .base_activity import Link, Mention, Hashtag
|
||||||
from .base_activity import ActivitySerializerError, resolve_remote_id
|
from .base_activity import (
|
||||||
|
ActivitySerializerError,
|
||||||
|
resolve_remote_id,
|
||||||
|
get_representative,
|
||||||
|
)
|
||||||
from .image import Document, Image
|
from .image import Document, Image
|
||||||
from .note import Note, GeneratedNote, Article, Comment, Quotation
|
from .note import Note, GeneratedNote, Article, Comment, Quotation
|
||||||
from .note import Review, Rating
|
from .note import Review, Rating
|
||||||
|
@ -19,6 +23,7 @@ from .verbs import Create, Delete, Undo, Update
|
||||||
from .verbs import Follow, Accept, Reject, Block
|
from .verbs import Follow, Accept, Reject, Block
|
||||||
from .verbs import Add, Remove
|
from .verbs import Add, Remove
|
||||||
from .verbs import Announce, Like
|
from .verbs import Announce, Like
|
||||||
|
from .verbs import Move
|
||||||
|
|
||||||
# this creates a list of all the Activity types that we can serialize,
|
# this creates a list of all the Activity types that we can serialize,
|
||||||
# so when an Activity comes in from outside, we can check if it's known
|
# so when an Activity comes in from outside, we can check if it's known
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
""" basics for an activitypub serializer """
|
""" basics for an activitypub serializer """
|
||||||
|
from __future__ import annotations
|
||||||
from dataclasses import dataclass, fields, MISSING
|
from dataclasses import dataclass, fields, MISSING
|
||||||
from json import JSONEncoder
|
from json import JSONEncoder
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Optional, Union, TypeVar, overload, Any
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from django.apps import apps
|
from django.apps import apps
|
||||||
|
@ -10,12 +13,16 @@ from django.utils.http import http_date
|
||||||
|
|
||||||
from bookwyrm import models
|
from bookwyrm import models
|
||||||
from bookwyrm.connectors import ConnectorException, get_data
|
from bookwyrm.connectors import ConnectorException, get_data
|
||||||
|
from bookwyrm.models import base_model
|
||||||
from bookwyrm.signatures import make_signature
|
from bookwyrm.signatures import make_signature
|
||||||
from bookwyrm.settings import DOMAIN, INSTANCE_ACTOR_USERNAME
|
from bookwyrm.settings import DOMAIN, INSTANCE_ACTOR_USERNAME
|
||||||
from bookwyrm.tasks import app, MEDIUM
|
from bookwyrm.tasks import app, MISC
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
TBookWyrmModel = TypeVar("TBookWyrmModel", bound=base_model.BookWyrmModel)
|
||||||
|
|
||||||
|
|
||||||
class ActivitySerializerError(ValueError):
|
class ActivitySerializerError(ValueError):
|
||||||
"""routine problems serializing activitypub json"""
|
"""routine problems serializing activitypub json"""
|
||||||
|
@ -65,7 +72,13 @@ class ActivityObject:
|
||||||
id: str
|
id: str
|
||||||
type: str
|
type: str
|
||||||
|
|
||||||
def __init__(self, activity_objects=None, **kwargs):
|
def __init__(
|
||||||
|
self,
|
||||||
|
activity_objects: Optional[
|
||||||
|
dict[str, Union[str, list[str], ActivityObject, base_model.BookWyrmModel]]
|
||||||
|
] = None,
|
||||||
|
**kwargs: Any,
|
||||||
|
):
|
||||||
"""this lets you pass in an object with fields that aren't in the
|
"""this lets you pass in an object with fields that aren't in the
|
||||||
dataclass, which it ignores. Any field in the dataclass is required or
|
dataclass, which it ignores. Any field in the dataclass is required or
|
||||||
has a default value"""
|
has a default value"""
|
||||||
|
@ -101,13 +114,13 @@ class ActivityObject:
|
||||||
# pylint: disable=too-many-locals,too-many-branches,too-many-arguments
|
# pylint: disable=too-many-locals,too-many-branches,too-many-arguments
|
||||||
def to_model(
|
def to_model(
|
||||||
self,
|
self,
|
||||||
model=None,
|
model: Optional[type[TBookWyrmModel]] = None,
|
||||||
instance=None,
|
instance: Optional[TBookWyrmModel] = None,
|
||||||
allow_create=True,
|
allow_create: bool = True,
|
||||||
save=True,
|
save: bool = True,
|
||||||
overwrite=True,
|
overwrite: bool = True,
|
||||||
allow_external_connections=True,
|
allow_external_connections: bool = True,
|
||||||
):
|
) -> Optional[TBookWyrmModel]:
|
||||||
"""convert from an activity to a model instance. Args:
|
"""convert from an activity to a model instance. Args:
|
||||||
model: the django model that this object is being converted to
|
model: the django model that this object is being converted to
|
||||||
(will guess if not known)
|
(will guess if not known)
|
||||||
|
@ -127,7 +140,7 @@ class ActivityObject:
|
||||||
if (
|
if (
|
||||||
allow_create
|
allow_create
|
||||||
and hasattr(model, "ignore_activity")
|
and hasattr(model, "ignore_activity")
|
||||||
and model.ignore_activity(self)
|
and model.ignore_activity(self, allow_external_connections)
|
||||||
):
|
):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -224,7 +237,7 @@ class ActivityObject:
|
||||||
omit = kwargs.get("omit", ())
|
omit = kwargs.get("omit", ())
|
||||||
data = self.__dict__.copy()
|
data = self.__dict__.copy()
|
||||||
# recursively serialize
|
# recursively serialize
|
||||||
for (k, v) in data.items():
|
for k, v in data.items():
|
||||||
try:
|
try:
|
||||||
if issubclass(type(v), ActivityObject):
|
if issubclass(type(v), ActivityObject):
|
||||||
data[k] = v.serialize()
|
data[k] = v.serialize()
|
||||||
|
@ -237,11 +250,14 @@ class ActivityObject:
|
||||||
pass
|
pass
|
||||||
data = {k: v for (k, v) in data.items() if v is not None and k not in omit}
|
data = {k: v for (k, v) in data.items() if v is not None and k not in omit}
|
||||||
if "@context" not in omit:
|
if "@context" not in omit:
|
||||||
data["@context"] = "https://www.w3.org/ns/activitystreams"
|
data["@context"] = [
|
||||||
|
"https://www.w3.org/ns/activitystreams",
|
||||||
|
{"Hashtag": "as:Hashtag"},
|
||||||
|
]
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
@app.task(queue=MEDIUM, ignore_result=True)
|
@app.task(queue=MISC)
|
||||||
@transaction.atomic
|
@transaction.atomic
|
||||||
def set_related_field(
|
def set_related_field(
|
||||||
model_name, origin_model_name, related_field_name, related_remote_id, data
|
model_name, origin_model_name, related_field_name, related_remote_id, data
|
||||||
|
@ -296,14 +312,40 @@ def get_model_from_type(activity_type):
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=too-many-arguments
|
# pylint: disable=too-many-arguments
|
||||||
|
@overload
|
||||||
def resolve_remote_id(
|
def resolve_remote_id(
|
||||||
remote_id,
|
remote_id: str,
|
||||||
model=None,
|
model: type[TBookWyrmModel],
|
||||||
refresh=False,
|
refresh: bool = False,
|
||||||
save=True,
|
save: bool = True,
|
||||||
get_activity=False,
|
get_activity: bool = False,
|
||||||
allow_external_connections=True,
|
allow_external_connections: bool = True,
|
||||||
):
|
) -> TBookWyrmModel:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=too-many-arguments
|
||||||
|
@overload
|
||||||
|
def resolve_remote_id(
|
||||||
|
remote_id: str,
|
||||||
|
model: Optional[str] = None,
|
||||||
|
refresh: bool = False,
|
||||||
|
save: bool = True,
|
||||||
|
get_activity: bool = False,
|
||||||
|
allow_external_connections: bool = True,
|
||||||
|
) -> base_model.BookWyrmModel:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=too-many-arguments
|
||||||
|
def resolve_remote_id(
|
||||||
|
remote_id: str,
|
||||||
|
model: Optional[Union[str, type[base_model.BookWyrmModel]]] = None,
|
||||||
|
refresh: bool = False,
|
||||||
|
save: bool = True,
|
||||||
|
get_activity: bool = False,
|
||||||
|
allow_external_connections: bool = True,
|
||||||
|
) -> base_model.BookWyrmModel:
|
||||||
"""take a remote_id and return an instance, creating if necessary. Args:
|
"""take a remote_id and return an instance, creating if necessary. Args:
|
||||||
remote_id: the unique url for looking up the object in the db or by http
|
remote_id: the unique url for looking up the object in the db or by http
|
||||||
model: a string or object representing the model that corresponds to the object
|
model: a string or object representing the model that corresponds to the object
|
||||||
|
@ -358,19 +400,15 @@ def resolve_remote_id(
|
||||||
|
|
||||||
def get_representative():
|
def get_representative():
|
||||||
"""Get or create an actor representing the instance
|
"""Get or create an actor representing the instance
|
||||||
to sign requests to 'secure mastodon' servers"""
|
to sign outgoing HTTP GET requests"""
|
||||||
username = f"{INSTANCE_ACTOR_USERNAME}@{DOMAIN}"
|
return models.User.objects.get_or_create(
|
||||||
email = "bookwyrm@localhost"
|
username=f"{INSTANCE_ACTOR_USERNAME}@{DOMAIN}",
|
||||||
try:
|
defaults={
|
||||||
user = models.User.objects.get(username=username)
|
"email": "bookwyrm@localhost",
|
||||||
except models.User.DoesNotExist:
|
"local": True,
|
||||||
user = models.User.objects.create_user(
|
"localname": INSTANCE_ACTOR_USERNAME,
|
||||||
username=username,
|
},
|
||||||
email=email,
|
)[0]
|
||||||
local=True,
|
|
||||||
localname=INSTANCE_ACTOR_USERNAME,
|
|
||||||
)
|
|
||||||
return user
|
|
||||||
|
|
||||||
|
|
||||||
def get_activitypub_data(url):
|
def get_activitypub_data(url):
|
||||||
|
@ -384,10 +422,12 @@ def get_activitypub_data(url):
|
||||||
resp = requests.get(
|
resp = requests.get(
|
||||||
url,
|
url,
|
||||||
headers={
|
headers={
|
||||||
"Accept": "application/json; charset=utf-8",
|
# pylint: disable=line-too-long
|
||||||
|
"Accept": 'application/ld+json; profile="https://www.w3.org/ns/activitystreams"',
|
||||||
"Date": now,
|
"Date": now,
|
||||||
"Signature": make_signature("get", sender, url, now),
|
"Signature": make_signature("get", sender, url, now),
|
||||||
},
|
},
|
||||||
|
timeout=15,
|
||||||
)
|
)
|
||||||
except requests.RequestException:
|
except requests.RequestException:
|
||||||
raise ConnectorException()
|
raise ConnectorException()
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
""" book and author data """
|
""" book and author data """
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from typing import List
|
from typing import Optional
|
||||||
|
|
||||||
from .base_activity import ActivityObject
|
from .base_activity import ActivityObject
|
||||||
from .image import Document
|
from .image import Document
|
||||||
|
@ -11,19 +11,17 @@ from .image import Document
|
||||||
class BookData(ActivityObject):
|
class BookData(ActivityObject):
|
||||||
"""shared fields for all book data and authors"""
|
"""shared fields for all book data and authors"""
|
||||||
|
|
||||||
openlibraryKey: str = None
|
openlibraryKey: Optional[str] = None
|
||||||
inventaireId: str = None
|
inventaireId: Optional[str] = None
|
||||||
librarythingKey: str = None
|
librarythingKey: Optional[str] = None
|
||||||
goodreadsKey: str = None
|
goodreadsKey: Optional[str] = None
|
||||||
bnfId: str = None
|
bnfId: Optional[str] = None
|
||||||
viaf: str = None
|
viaf: Optional[str] = None
|
||||||
wikidata: str = None
|
wikidata: Optional[str] = None
|
||||||
asin: str = None
|
asin: Optional[str] = None
|
||||||
aasin: str = None
|
aasin: Optional[str] = None
|
||||||
isfdb: str = None
|
isfdb: Optional[str] = None
|
||||||
lastEditedBy: str = None
|
lastEditedBy: Optional[str] = None
|
||||||
links: List[str] = field(default_factory=lambda: [])
|
|
||||||
fileLinks: List[str] = field(default_factory=lambda: [])
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
# pylint: disable=invalid-name
|
||||||
|
@ -35,17 +33,19 @@ class Book(BookData):
|
||||||
sortTitle: str = None
|
sortTitle: str = None
|
||||||
subtitle: str = None
|
subtitle: str = None
|
||||||
description: str = ""
|
description: str = ""
|
||||||
languages: List[str] = field(default_factory=lambda: [])
|
languages: list[str] = field(default_factory=list)
|
||||||
series: str = ""
|
series: str = ""
|
||||||
seriesNumber: str = ""
|
seriesNumber: str = ""
|
||||||
subjects: List[str] = field(default_factory=lambda: [])
|
subjects: list[str] = field(default_factory=list)
|
||||||
subjectPlaces: List[str] = field(default_factory=lambda: [])
|
subjectPlaces: list[str] = field(default_factory=list)
|
||||||
|
|
||||||
authors: List[str] = field(default_factory=lambda: [])
|
authors: list[str] = field(default_factory=list)
|
||||||
firstPublishedDate: str = ""
|
firstPublishedDate: str = ""
|
||||||
publishedDate: str = ""
|
publishedDate: str = ""
|
||||||
|
|
||||||
cover: Document = None
|
fileLinks: list[str] = field(default_factory=list)
|
||||||
|
|
||||||
|
cover: Optional[Document] = None
|
||||||
type: str = "Book"
|
type: str = "Book"
|
||||||
|
|
||||||
|
|
||||||
|
@ -58,10 +58,10 @@ class Edition(Book):
|
||||||
isbn10: str = ""
|
isbn10: str = ""
|
||||||
isbn13: str = ""
|
isbn13: str = ""
|
||||||
oclcNumber: str = ""
|
oclcNumber: str = ""
|
||||||
pages: int = None
|
pages: Optional[int] = None
|
||||||
physicalFormat: str = ""
|
physicalFormat: str = ""
|
||||||
physicalFormatDetail: str = ""
|
physicalFormatDetail: str = ""
|
||||||
publishers: List[str] = field(default_factory=lambda: [])
|
publishers: list[str] = field(default_factory=list)
|
||||||
editionRank: int = 0
|
editionRank: int = 0
|
||||||
|
|
||||||
type: str = "Edition"
|
type: str = "Edition"
|
||||||
|
@ -73,7 +73,7 @@ class Work(Book):
|
||||||
"""work instance of a book object"""
|
"""work instance of a book object"""
|
||||||
|
|
||||||
lccn: str = ""
|
lccn: str = ""
|
||||||
editions: List[str] = field(default_factory=lambda: [])
|
editions: list[str] = field(default_factory=list)
|
||||||
type: str = "Work"
|
type: str = "Work"
|
||||||
|
|
||||||
|
|
||||||
|
@ -83,12 +83,12 @@ class Author(BookData):
|
||||||
"""author of a book"""
|
"""author of a book"""
|
||||||
|
|
||||||
name: str
|
name: str
|
||||||
isni: str = None
|
isni: Optional[str] = None
|
||||||
viafId: str = None
|
viafId: Optional[str] = None
|
||||||
gutenbergId: str = None
|
gutenbergId: Optional[str] = None
|
||||||
born: str = None
|
born: Optional[str] = None
|
||||||
died: str = None
|
died: Optional[str] = None
|
||||||
aliases: List[str] = field(default_factory=lambda: [])
|
aliases: list[str] = field(default_factory=list)
|
||||||
bio: str = ""
|
bio: str = ""
|
||||||
wikipediaLink: str = ""
|
wikipediaLink: str = ""
|
||||||
type: str = "Author"
|
type: str = "Author"
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
""" actor serializer """
|
""" actor serializer """
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
|
|
||||||
from .base_activity import ActivityObject
|
from .base_activity import ActivityObject
|
||||||
|
@ -35,9 +35,11 @@ class Person(ActivityObject):
|
||||||
endpoints: Dict = None
|
endpoints: Dict = None
|
||||||
name: str = None
|
name: str = None
|
||||||
summary: str = None
|
summary: str = None
|
||||||
icon: Image = field(default_factory=lambda: {})
|
icon: Image = None
|
||||||
bookwyrmUser: bool = False
|
bookwyrmUser: bool = False
|
||||||
manuallyApprovesFollowers: str = False
|
manuallyApprovesFollowers: str = False
|
||||||
discoverable: str = False
|
discoverable: str = False
|
||||||
hideFollows: str = False
|
hideFollows: str = False
|
||||||
|
movedTo: str = None
|
||||||
|
alsoKnownAs: dict[str] = None
|
||||||
type: str = "Person"
|
type: str = "Person"
|
||||||
|
|
|
@ -171,9 +171,19 @@ class Reject(Verb):
|
||||||
type: str = "Reject"
|
type: str = "Reject"
|
||||||
|
|
||||||
def action(self, allow_external_connections=True):
|
def action(self, allow_external_connections=True):
|
||||||
"""reject a follow request"""
|
"""reject a follow or follow request"""
|
||||||
obj = self.object.to_model(save=False, allow_create=False)
|
|
||||||
obj.reject()
|
for model_name in ["UserFollowRequest", "UserFollows", None]:
|
||||||
|
model = apps.get_model(f"bookwyrm.{model_name}") if model_name else None
|
||||||
|
if obj := self.object.to_model(
|
||||||
|
model=model,
|
||||||
|
save=False,
|
||||||
|
allow_create=False,
|
||||||
|
allow_external_connections=allow_external_connections,
|
||||||
|
):
|
||||||
|
# Reject the first model that can be built.
|
||||||
|
obj.reject()
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
@dataclass(init=False)
|
@dataclass(init=False)
|
||||||
|
@ -231,3 +241,30 @@ class Announce(Verb):
|
||||||
def action(self, allow_external_connections=True):
|
def action(self, allow_external_connections=True):
|
||||||
"""boost"""
|
"""boost"""
|
||||||
self.to_model(allow_external_connections=allow_external_connections)
|
self.to_model(allow_external_connections=allow_external_connections)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(init=False)
|
||||||
|
class Move(Verb):
|
||||||
|
"""a user moving an object"""
|
||||||
|
|
||||||
|
object: str
|
||||||
|
type: str = "Move"
|
||||||
|
origin: str = None
|
||||||
|
target: str = None
|
||||||
|
|
||||||
|
def action(self, allow_external_connections=True):
|
||||||
|
"""move"""
|
||||||
|
|
||||||
|
object_is_user = resolve_remote_id(remote_id=self.object, model="User")
|
||||||
|
|
||||||
|
if object_is_user:
|
||||||
|
model = apps.get_model("bookwyrm.MoveUser")
|
||||||
|
|
||||||
|
self.to_model(
|
||||||
|
model=model,
|
||||||
|
save=True,
|
||||||
|
allow_external_connections=allow_external_connections,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# we might do something with this to move other objects at some point
|
||||||
|
pass
|
||||||
|
|
|
@ -8,7 +8,7 @@ from opentelemetry import trace
|
||||||
|
|
||||||
from bookwyrm import models
|
from bookwyrm import models
|
||||||
from bookwyrm.redis_store import RedisStore, r
|
from bookwyrm.redis_store import RedisStore, r
|
||||||
from bookwyrm.tasks import app, LOW, MEDIUM, HIGH
|
from bookwyrm.tasks import app, STREAMS, IMPORT_TRIGGERED
|
||||||
from bookwyrm.telemetry import open_telemetry
|
from bookwyrm.telemetry import open_telemetry
|
||||||
|
|
||||||
|
|
||||||
|
@ -38,11 +38,14 @@ class ActivityStream(RedisStore):
|
||||||
|
|
||||||
def add_status(self, status, increment_unread=False):
|
def add_status(self, status, increment_unread=False):
|
||||||
"""add a status to users' feeds"""
|
"""add a status to users' feeds"""
|
||||||
|
audience = self.get_audience(status)
|
||||||
# the pipeline contains all the add-to-stream activities
|
# the pipeline contains all the add-to-stream activities
|
||||||
pipeline = self.add_object_to_related_stores(status, execute=False)
|
pipeline = self.add_object_to_stores(
|
||||||
|
status, self.get_stores_for_users(audience), execute=False
|
||||||
|
)
|
||||||
|
|
||||||
if increment_unread:
|
if increment_unread:
|
||||||
for user_id in self.get_audience(status):
|
for user_id in audience:
|
||||||
# add to the unread status count
|
# add to the unread status count
|
||||||
pipeline.incr(self.unread_id(user_id))
|
pipeline.incr(self.unread_id(user_id))
|
||||||
# add to the unread status count for status type
|
# add to the unread status count for status type
|
||||||
|
@ -102,11 +105,18 @@ class ActivityStream(RedisStore):
|
||||||
"""go from zero to a timeline"""
|
"""go from zero to a timeline"""
|
||||||
self.populate_store(self.stream_id(user.id))
|
self.populate_store(self.stream_id(user.id))
|
||||||
|
|
||||||
|
@tracer.start_as_current_span("ActivityStream._get_audience")
|
||||||
def _get_audience(self, status): # pylint: disable=no-self-use
|
def _get_audience(self, status): # pylint: disable=no-self-use
|
||||||
"""given a status, what users should see it"""
|
"""given a status, what users should see it, excluding the author"""
|
||||||
# direct messages don't appeard in feeds, direct comments/reviews/etc do
|
trace.get_current_span().set_attribute("status_type", status.status_type)
|
||||||
|
trace.get_current_span().set_attribute("status_privacy", status.privacy)
|
||||||
|
trace.get_current_span().set_attribute(
|
||||||
|
"status_reply_parent_privacy",
|
||||||
|
status.reply_parent.privacy if status.reply_parent else status.privacy,
|
||||||
|
)
|
||||||
|
# direct messages don't appear in feeds, direct comments/reviews/etc do
|
||||||
if status.privacy == "direct" and status.status_type == "Note":
|
if status.privacy == "direct" and status.status_type == "Note":
|
||||||
return []
|
return models.User.objects.none()
|
||||||
|
|
||||||
# everybody who could plausibly see this status
|
# everybody who could plausibly see this status
|
||||||
audience = models.User.objects.filter(
|
audience = models.User.objects.filter(
|
||||||
|
@ -119,36 +129,38 @@ class ActivityStream(RedisStore):
|
||||||
# only visible to the poster and mentioned users
|
# only visible to the poster and mentioned users
|
||||||
if status.privacy == "direct":
|
if status.privacy == "direct":
|
||||||
audience = audience.filter(
|
audience = audience.filter(
|
||||||
Q(id=status.user.id) # if the user is the post's author
|
Q(id__in=status.mention_users.all()) # if the user is mentioned
|
||||||
| Q(id__in=status.mention_users.all()) # if the user is mentioned
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# don't show replies to statuses the user can't see
|
# don't show replies to statuses the user can't see
|
||||||
elif status.reply_parent and status.reply_parent.privacy == "followers":
|
elif status.reply_parent and status.reply_parent.privacy == "followers":
|
||||||
audience = audience.filter(
|
audience = audience.filter(
|
||||||
Q(id=status.user.id) # if the user is the post's author
|
Q(id=status.reply_parent.user.id) # if the user is the OG author
|
||||||
| Q(id=status.reply_parent.user.id) # if the user is the OG author
|
|
||||||
| (
|
| (
|
||||||
Q(following=status.user) & Q(following=status.reply_parent.user)
|
Q(following=status.user) & Q(following=status.reply_parent.user)
|
||||||
) # if the user is following both authors
|
) # if the user is following both authors
|
||||||
).distinct()
|
)
|
||||||
|
|
||||||
# only visible to the poster's followers and tagged users
|
# only visible to the poster's followers and tagged users
|
||||||
elif status.privacy == "followers":
|
elif status.privacy == "followers":
|
||||||
audience = audience.filter(
|
audience = audience.filter(
|
||||||
Q(id=status.user.id) # if the user is the post's author
|
Q(following=status.user) # if the user is following the author
|
||||||
| Q(following=status.user) # if the user is following the author
|
|
||||||
)
|
)
|
||||||
return audience.distinct()
|
return audience.distinct("id")
|
||||||
|
|
||||||
@tracer.start_as_current_span("ActivityStream.get_audience")
|
@tracer.start_as_current_span("ActivityStream.get_audience")
|
||||||
def get_audience(self, status):
|
def get_audience(self, status):
|
||||||
"""given a status, what users should see it"""
|
"""given a status, what users should see it"""
|
||||||
trace.get_current_span().set_attribute("stream_id", self.key)
|
trace.get_current_span().set_attribute("stream_id", self.key)
|
||||||
return [user.id for user in self._get_audience(status)]
|
audience = self._get_audience(status).values_list("id", flat=True)
|
||||||
|
status_author = models.User.objects.filter(
|
||||||
|
is_active=True, local=True, id=status.user.id
|
||||||
|
).values_list("id", flat=True)
|
||||||
|
return list(set(audience) | set(status_author))
|
||||||
|
|
||||||
def get_stores_for_object(self, obj):
|
def get_stores_for_users(self, user_ids):
|
||||||
return [self.stream_id(user_id) for user_id in self.get_audience(obj)]
|
"""convert a list of user ids into redis store ids"""
|
||||||
|
return [self.stream_id(user_id) for user_id in user_ids]
|
||||||
|
|
||||||
def get_statuses_for_user(self, user): # pylint: disable=no-self-use
|
def get_statuses_for_user(self, user): # pylint: disable=no-self-use
|
||||||
"""given a user, what statuses should they see on this stream"""
|
"""given a user, what statuses should they see on this stream"""
|
||||||
|
@ -171,13 +183,13 @@ class HomeStream(ActivityStream):
|
||||||
def get_audience(self, status):
|
def get_audience(self, status):
|
||||||
trace.get_current_span().set_attribute("stream_id", self.key)
|
trace.get_current_span().set_attribute("stream_id", self.key)
|
||||||
audience = super()._get_audience(status)
|
audience = super()._get_audience(status)
|
||||||
if not audience:
|
|
||||||
return []
|
|
||||||
# if the user is the post's author
|
|
||||||
ids_self = [user.id for user in audience.filter(Q(id=status.user.id))]
|
|
||||||
# if the user is following the author
|
# if the user is following the author
|
||||||
ids_following = [user.id for user in audience.filter(Q(following=status.user))]
|
audience = audience.filter(following=status.user).values_list("id", flat=True)
|
||||||
return ids_self + ids_following
|
# if the user is the post's author
|
||||||
|
status_author = models.User.objects.filter(
|
||||||
|
is_active=True, local=True, id=status.user.id
|
||||||
|
).values_list("id", flat=True)
|
||||||
|
return list(set(audience) | set(status_author))
|
||||||
|
|
||||||
def get_statuses_for_user(self, user):
|
def get_statuses_for_user(self, user):
|
||||||
return models.Status.privacy_filter(
|
return models.Status.privacy_filter(
|
||||||
|
@ -197,11 +209,11 @@ class LocalStream(ActivityStream):
|
||||||
|
|
||||||
key = "local"
|
key = "local"
|
||||||
|
|
||||||
def _get_audience(self, status):
|
def get_audience(self, status):
|
||||||
# this stream wants no part in non-public statuses
|
# this stream wants no part in non-public statuses
|
||||||
if status.privacy != "public" or not status.user.local:
|
if status.privacy != "public" or not status.user.local:
|
||||||
return []
|
return []
|
||||||
return super()._get_audience(status)
|
return super().get_audience(status)
|
||||||
|
|
||||||
def get_statuses_for_user(self, user):
|
def get_statuses_for_user(self, user):
|
||||||
# all public statuses by a local user
|
# all public statuses by a local user
|
||||||
|
@ -218,13 +230,6 @@ class BooksStream(ActivityStream):
|
||||||
|
|
||||||
def _get_audience(self, status):
|
def _get_audience(self, status):
|
||||||
"""anyone with the mentioned book on their shelves"""
|
"""anyone with the mentioned book on their shelves"""
|
||||||
# only show public statuses on the books feed,
|
|
||||||
# and only statuses that mention books
|
|
||||||
if status.privacy != "public" or not (
|
|
||||||
status.mention_books.exists() or hasattr(status, "book")
|
|
||||||
):
|
|
||||||
return []
|
|
||||||
|
|
||||||
work = (
|
work = (
|
||||||
status.book.parent_work
|
status.book.parent_work
|
||||||
if hasattr(status, "book")
|
if hasattr(status, "book")
|
||||||
|
@ -232,9 +237,17 @@ class BooksStream(ActivityStream):
|
||||||
)
|
)
|
||||||
|
|
||||||
audience = super()._get_audience(status)
|
audience = super()._get_audience(status)
|
||||||
if not audience:
|
return audience.filter(shelfbook__book__parent_work=work)
|
||||||
|
|
||||||
|
def get_audience(self, status):
|
||||||
|
# only show public statuses on the books feed,
|
||||||
|
# and only statuses that mention books
|
||||||
|
if status.privacy != "public" or not (
|
||||||
|
status.mention_books.exists() or hasattr(status, "book")
|
||||||
|
):
|
||||||
return []
|
return []
|
||||||
return audience.filter(shelfbook__book__parent_work=work).distinct()
|
|
||||||
|
return super().get_audience(status)
|
||||||
|
|
||||||
def get_statuses_for_user(self, user):
|
def get_statuses_for_user(self, user):
|
||||||
"""any public status that mentions the user's books"""
|
"""any public status that mentions the user's books"""
|
||||||
|
@ -312,10 +325,9 @@ def add_status_on_create(sender, instance, created, *args, **kwargs):
|
||||||
remove_status_task.delay(instance.id)
|
remove_status_task.delay(instance.id)
|
||||||
return
|
return
|
||||||
|
|
||||||
# To avoid creating a zillion unnecessary tasks caused by re-saving the model,
|
# We don't want to create multiple add_status_tasks for each status, and because
|
||||||
# check if it's actually ready to send before we go. We're trusting this was
|
# the transactions are atomic, on_commit won't run until the status is ready to add.
|
||||||
# set correctly by the inbox or view
|
if not created:
|
||||||
if not instance.ready:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# when creating new things, gotta wait on the transaction
|
# when creating new things, gotta wait on the transaction
|
||||||
|
@ -326,7 +338,11 @@ def add_status_on_create(sender, instance, created, *args, **kwargs):
|
||||||
|
|
||||||
def add_status_on_create_command(sender, instance, created):
|
def add_status_on_create_command(sender, instance, created):
|
||||||
"""runs this code only after the database commit completes"""
|
"""runs this code only after the database commit completes"""
|
||||||
priority = HIGH
|
# boosts trigger 'saves" twice, so don't bother duplicating the task
|
||||||
|
if sender == models.Boost and not created:
|
||||||
|
return
|
||||||
|
|
||||||
|
priority = STREAMS
|
||||||
# check if this is an old status, de-prioritize if so
|
# check if this is an old status, de-prioritize if so
|
||||||
# (this will happen if federation is very slow, or, more expectedly, on csv import)
|
# (this will happen if federation is very slow, or, more expectedly, on csv import)
|
||||||
if instance.published_date < timezone.now() - timedelta(
|
if instance.published_date < timezone.now() - timedelta(
|
||||||
|
@ -336,7 +352,7 @@ def add_status_on_create_command(sender, instance, created):
|
||||||
if instance.user.local:
|
if instance.user.local:
|
||||||
return
|
return
|
||||||
# an out of date remote status is a low priority but should be added
|
# an out of date remote status is a low priority but should be added
|
||||||
priority = LOW
|
priority = IMPORT_TRIGGERED
|
||||||
|
|
||||||
add_status_task.apply_async(
|
add_status_task.apply_async(
|
||||||
args=(instance.id,),
|
args=(instance.id,),
|
||||||
|
@ -480,7 +496,7 @@ def remove_statuses_on_unshelve(sender, instance, *args, **kwargs):
|
||||||
# ---- TASKS
|
# ---- TASKS
|
||||||
|
|
||||||
|
|
||||||
@app.task(queue=LOW, ignore_result=True)
|
@app.task(queue=STREAMS)
|
||||||
def add_book_statuses_task(user_id, book_id):
|
def add_book_statuses_task(user_id, book_id):
|
||||||
"""add statuses related to a book on shelve"""
|
"""add statuses related to a book on shelve"""
|
||||||
user = models.User.objects.get(id=user_id)
|
user = models.User.objects.get(id=user_id)
|
||||||
|
@ -488,7 +504,7 @@ def add_book_statuses_task(user_id, book_id):
|
||||||
BooksStream().add_book_statuses(user, book)
|
BooksStream().add_book_statuses(user, book)
|
||||||
|
|
||||||
|
|
||||||
@app.task(queue=LOW, ignore_result=True)
|
@app.task(queue=STREAMS)
|
||||||
def remove_book_statuses_task(user_id, book_id):
|
def remove_book_statuses_task(user_id, book_id):
|
||||||
"""remove statuses about a book from a user's books feed"""
|
"""remove statuses about a book from a user's books feed"""
|
||||||
user = models.User.objects.get(id=user_id)
|
user = models.User.objects.get(id=user_id)
|
||||||
|
@ -496,7 +512,7 @@ def remove_book_statuses_task(user_id, book_id):
|
||||||
BooksStream().remove_book_statuses(user, book)
|
BooksStream().remove_book_statuses(user, book)
|
||||||
|
|
||||||
|
|
||||||
@app.task(queue=MEDIUM, ignore_result=True)
|
@app.task(queue=STREAMS)
|
||||||
def populate_stream_task(stream, user_id):
|
def populate_stream_task(stream, user_id):
|
||||||
"""background task for populating an empty activitystream"""
|
"""background task for populating an empty activitystream"""
|
||||||
user = models.User.objects.get(id=user_id)
|
user = models.User.objects.get(id=user_id)
|
||||||
|
@ -504,7 +520,7 @@ def populate_stream_task(stream, user_id):
|
||||||
stream.populate_streams(user)
|
stream.populate_streams(user)
|
||||||
|
|
||||||
|
|
||||||
@app.task(queue=MEDIUM, ignore_result=True)
|
@app.task(queue=STREAMS)
|
||||||
def remove_status_task(status_ids):
|
def remove_status_task(status_ids):
|
||||||
"""remove a status from any stream it might be in"""
|
"""remove a status from any stream it might be in"""
|
||||||
# this can take an id or a list of ids
|
# this can take an id or a list of ids
|
||||||
|
@ -514,10 +530,12 @@ def remove_status_task(status_ids):
|
||||||
|
|
||||||
for stream in streams.values():
|
for stream in streams.values():
|
||||||
for status in statuses:
|
for status in statuses:
|
||||||
stream.remove_object_from_related_stores(status)
|
stream.remove_object_from_stores(
|
||||||
|
status, stream.get_stores_for_users(stream.get_audience(status))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@app.task(queue=HIGH, ignore_result=True)
|
@app.task(queue=STREAMS)
|
||||||
def add_status_task(status_id, increment_unread=False):
|
def add_status_task(status_id, increment_unread=False):
|
||||||
"""add a status to any stream it should be in"""
|
"""add a status to any stream it should be in"""
|
||||||
status = models.Status.objects.select_subclasses().get(id=status_id)
|
status = models.Status.objects.select_subclasses().get(id=status_id)
|
||||||
|
@ -529,7 +547,7 @@ def add_status_task(status_id, increment_unread=False):
|
||||||
stream.add_status(status, increment_unread=increment_unread)
|
stream.add_status(status, increment_unread=increment_unread)
|
||||||
|
|
||||||
|
|
||||||
@app.task(queue=MEDIUM, ignore_result=True)
|
@app.task(queue=STREAMS)
|
||||||
def remove_user_statuses_task(viewer_id, user_id, stream_list=None):
|
def remove_user_statuses_task(viewer_id, user_id, stream_list=None):
|
||||||
"""remove all statuses by a user from a viewer's stream"""
|
"""remove all statuses by a user from a viewer's stream"""
|
||||||
stream_list = [streams[s] for s in stream_list] if stream_list else streams.values()
|
stream_list = [streams[s] for s in stream_list] if stream_list else streams.values()
|
||||||
|
@ -539,7 +557,7 @@ def remove_user_statuses_task(viewer_id, user_id, stream_list=None):
|
||||||
stream.remove_user_statuses(viewer, user)
|
stream.remove_user_statuses(viewer, user)
|
||||||
|
|
||||||
|
|
||||||
@app.task(queue=MEDIUM, ignore_result=True)
|
@app.task(queue=STREAMS)
|
||||||
def add_user_statuses_task(viewer_id, user_id, stream_list=None):
|
def add_user_statuses_task(viewer_id, user_id, stream_list=None):
|
||||||
"""add all statuses by a user to a viewer's stream"""
|
"""add all statuses by a user to a viewer's stream"""
|
||||||
stream_list = [streams[s] for s in stream_list] if stream_list else streams.values()
|
stream_list = [streams[s] for s in stream_list] if stream_list else streams.values()
|
||||||
|
@ -549,7 +567,7 @@ def add_user_statuses_task(viewer_id, user_id, stream_list=None):
|
||||||
stream.add_user_statuses(viewer, user)
|
stream.add_user_statuses(viewer, user)
|
||||||
|
|
||||||
|
|
||||||
@app.task(queue=MEDIUM, ignore_result=True)
|
@app.task(queue=STREAMS)
|
||||||
def handle_boost_task(boost_id):
|
def handle_boost_task(boost_id):
|
||||||
"""remove the original post and other, earlier boosts"""
|
"""remove the original post and other, earlier boosts"""
|
||||||
instance = models.Status.objects.get(id=boost_id)
|
instance = models.Status.objects.get(id=boost_id)
|
||||||
|
@ -563,10 +581,10 @@ def handle_boost_task(boost_id):
|
||||||
|
|
||||||
for stream in streams.values():
|
for stream in streams.values():
|
||||||
# people who should see the boost (not people who see the original status)
|
# people who should see the boost (not people who see the original status)
|
||||||
audience = stream.get_stores_for_object(instance)
|
audience = stream.get_stores_for_users(stream.get_audience(instance))
|
||||||
stream.remove_object_from_related_stores(boosted, stores=audience)
|
stream.remove_object_from_stores(boosted, audience)
|
||||||
for status in old_versions:
|
for status in old_versions:
|
||||||
stream.remove_object_from_related_stores(status, stores=audience)
|
stream.remove_object_from_stores(status, audience)
|
||||||
|
|
||||||
|
|
||||||
def get_status_type(status):
|
def get_status_type(status):
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
"""Do further startup configuration and initialization"""
|
"""Do further startup configuration and initialization"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import urllib
|
import urllib
|
||||||
import logging
|
import logging
|
||||||
|
@ -14,16 +15,16 @@ def download_file(url, destination):
|
||||||
"""Downloads a file to the given path"""
|
"""Downloads a file to the given path"""
|
||||||
try:
|
try:
|
||||||
# Ensure our destination directory exists
|
# Ensure our destination directory exists
|
||||||
os.makedirs(os.path.dirname(destination))
|
os.makedirs(os.path.dirname(destination), exist_ok=True)
|
||||||
with urllib.request.urlopen(url) as stream:
|
with urllib.request.urlopen(url) as stream:
|
||||||
with open(destination, "b+w") as outfile:
|
with open(destination, "b+w") as outfile:
|
||||||
outfile.write(stream.read())
|
outfile.write(stream.read())
|
||||||
except (urllib.error.HTTPError, urllib.error.URLError):
|
except (urllib.error.HTTPError, urllib.error.URLError) as err:
|
||||||
logger.info("Failed to download file %s", url)
|
logger.error("Failed to download file %s: %s", url, err)
|
||||||
except OSError:
|
except OSError as err:
|
||||||
logger.info("Couldn't open font file %s for writing", destination)
|
logger.error("Couldn't open font file %s for writing: %s", destination, err)
|
||||||
except: # pylint: disable=bare-except
|
except Exception as err: # pylint:disable=broad-except
|
||||||
logger.info("Unknown error in file download")
|
logger.error("Unknown error in file download: %s", err)
|
||||||
|
|
||||||
|
|
||||||
class BookwyrmConfig(AppConfig):
|
class BookwyrmConfig(AppConfig):
|
||||||
|
@ -40,6 +41,7 @@ class BookwyrmConfig(AppConfig):
|
||||||
from bookwyrm.telemetry import open_telemetry
|
from bookwyrm.telemetry import open_telemetry
|
||||||
|
|
||||||
open_telemetry.instrumentDjango()
|
open_telemetry.instrumentDjango()
|
||||||
|
open_telemetry.instrumentPostgres()
|
||||||
|
|
||||||
if settings.ENABLE_PREVIEW_IMAGES and settings.FONTS:
|
if settings.ENABLE_PREVIEW_IMAGES and settings.FONTS:
|
||||||
# Download any fonts that we don't have yet
|
# Download any fonts that we don't have yet
|
||||||
|
|
|
@ -1,35 +1,69 @@
|
||||||
""" using a bookwyrm instance as a source of book data """
|
""" using a bookwyrm instance as a source of book data """
|
||||||
|
from __future__ import annotations
|
||||||
from dataclasses import asdict, dataclass
|
from dataclasses import asdict, dataclass
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
import operator
|
import operator
|
||||||
|
from typing import Optional, Union, Any, Literal, overload
|
||||||
|
|
||||||
from django.contrib.postgres.search import SearchRank, SearchQuery
|
from django.contrib.postgres.search import SearchRank, SearchQuery
|
||||||
from django.db.models import F, Q
|
from django.db.models import F, Q
|
||||||
|
from django.db.models.query import QuerySet
|
||||||
|
|
||||||
from bookwyrm import models
|
from bookwyrm import models
|
||||||
from bookwyrm import connectors
|
from bookwyrm import connectors
|
||||||
from bookwyrm.settings import MEDIA_FULL_URL
|
from bookwyrm.settings import MEDIA_FULL_URL
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def search(
|
||||||
|
query: str,
|
||||||
|
*,
|
||||||
|
min_confidence: float = 0,
|
||||||
|
filters: Optional[list[Any]] = None,
|
||||||
|
return_first: Literal[False],
|
||||||
|
) -> QuerySet[models.Edition]:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def search(
|
||||||
|
query: str,
|
||||||
|
*,
|
||||||
|
min_confidence: float = 0,
|
||||||
|
filters: Optional[list[Any]] = None,
|
||||||
|
return_first: Literal[True],
|
||||||
|
) -> Optional[models.Edition]:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=arguments-differ
|
# pylint: disable=arguments-differ
|
||||||
def search(query, min_confidence=0, filters=None, return_first=False):
|
def search(
|
||||||
|
query: str,
|
||||||
|
*,
|
||||||
|
min_confidence: float = 0,
|
||||||
|
filters: Optional[list[Any]] = None,
|
||||||
|
return_first: bool = False,
|
||||||
|
books: Optional[QuerySet[models.Edition]] = None,
|
||||||
|
) -> Union[Optional[models.Edition], QuerySet[models.Edition]]:
|
||||||
"""search your local database"""
|
"""search your local database"""
|
||||||
filters = filters or []
|
filters = filters or []
|
||||||
if not query:
|
if not query:
|
||||||
return []
|
return None if return_first else []
|
||||||
query = query.strip()
|
query = query.strip()
|
||||||
|
|
||||||
results = None
|
results = None
|
||||||
# first, try searching unique identifiers
|
# first, try searching unique identifiers
|
||||||
# unique identifiers never have spaces, title/author usually do
|
# unique identifiers never have spaces, title/author usually do
|
||||||
if not " " in query:
|
if not " " in query:
|
||||||
results = search_identifiers(query, *filters, return_first=return_first)
|
results = search_identifiers(
|
||||||
|
query, *filters, return_first=return_first, books=books
|
||||||
|
)
|
||||||
|
|
||||||
# if there were no identifier results...
|
# if there were no identifier results...
|
||||||
if not results:
|
if not results:
|
||||||
# then try searching title/author
|
# then try searching title/author
|
||||||
results = search_title_author(
|
results = search_title_author(
|
||||||
query, min_confidence, *filters, return_first=return_first
|
query, min_confidence, *filters, return_first=return_first, books=books
|
||||||
)
|
)
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
@ -66,8 +100,18 @@ def format_search_result(search_result):
|
||||||
).json()
|
).json()
|
||||||
|
|
||||||
|
|
||||||
def search_identifiers(query, *filters, return_first=False):
|
def search_identifiers(
|
||||||
"""tries remote_id, isbn; defined as dedupe fields on the model"""
|
query,
|
||||||
|
*filters,
|
||||||
|
return_first=False,
|
||||||
|
books=None,
|
||||||
|
) -> Union[Optional[models.Edition], QuerySet[models.Edition]]:
|
||||||
|
"""search Editions by deduplication fields
|
||||||
|
|
||||||
|
Best for cases when we can assume someone is searching for an exact match on
|
||||||
|
commonly unique data identifiers like isbn or specific library ids.
|
||||||
|
"""
|
||||||
|
books = books or models.Edition.objects
|
||||||
if connectors.maybe_isbn(query):
|
if connectors.maybe_isbn(query):
|
||||||
# Oh did you think the 'S' in ISBN stood for 'standard'?
|
# Oh did you think the 'S' in ISBN stood for 'standard'?
|
||||||
normalized_isbn = query.strip().upper().rjust(10, "0")
|
normalized_isbn = query.strip().upper().rjust(10, "0")
|
||||||
|
@ -78,7 +122,7 @@ def search_identifiers(query, *filters, return_first=False):
|
||||||
for f in models.Edition._meta.get_fields()
|
for f in models.Edition._meta.get_fields()
|
||||||
if hasattr(f, "deduplication_field") and f.deduplication_field
|
if hasattr(f, "deduplication_field") and f.deduplication_field
|
||||||
]
|
]
|
||||||
results = models.Edition.objects.filter(
|
results = books.filter(
|
||||||
*filters, reduce(operator.or_, (Q(**f) for f in or_filters))
|
*filters, reduce(operator.or_, (Q(**f) for f in or_filters))
|
||||||
).distinct()
|
).distinct()
|
||||||
|
|
||||||
|
@ -87,11 +131,18 @@ def search_identifiers(query, *filters, return_first=False):
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def search_title_author(query, min_confidence, *filters, return_first=False):
|
def search_title_author(
|
||||||
|
query,
|
||||||
|
min_confidence,
|
||||||
|
*filters,
|
||||||
|
return_first=False,
|
||||||
|
books=None,
|
||||||
|
) -> QuerySet[models.Edition]:
|
||||||
"""searches for title and author"""
|
"""searches for title and author"""
|
||||||
|
books = books or models.Edition.objects
|
||||||
query = SearchQuery(query, config="simple") | SearchQuery(query, config="english")
|
query = SearchQuery(query, config="simple") | SearchQuery(query, config="english")
|
||||||
results = (
|
results = (
|
||||||
models.Edition.objects.filter(*filters, search_vector=query)
|
books.filter(*filters, search_vector=query)
|
||||||
.annotate(rank=SearchRank(F("search_vector"), query))
|
.annotate(rank=SearchRank(F("search_vector"), query))
|
||||||
.filter(rank__gt=min_confidence)
|
.filter(rank__gt=min_confidence)
|
||||||
.order_by("-rank")
|
.order_by("-rank")
|
||||||
|
@ -102,7 +153,7 @@ def search_title_author(query, min_confidence, *filters, return_first=False):
|
||||||
|
|
||||||
# filter out multiple editions of the same work
|
# filter out multiple editions of the same work
|
||||||
list_results = []
|
list_results = []
|
||||||
for work_id in set(editions_of_work[:30]):
|
for work_id in editions_of_work[:30]:
|
||||||
result = (
|
result = (
|
||||||
results.filter(parent_work=work_id)
|
results.filter(parent_work=work_id)
|
||||||
.order_by("-rank", "-edition_rank")
|
.order_by("-rank", "-edition_rank")
|
||||||
|
@ -122,11 +173,11 @@ class SearchResult:
|
||||||
title: str
|
title: str
|
||||||
key: str
|
key: str
|
||||||
connector: object
|
connector: object
|
||||||
view_link: str = None
|
view_link: Optional[str] = None
|
||||||
author: str = None
|
author: Optional[str] = None
|
||||||
year: str = None
|
year: Optional[str] = None
|
||||||
cover: str = None
|
cover: Optional[str] = None
|
||||||
confidence: int = 1
|
confidence: float = 1.0
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
# pylint: disable=consider-using-f-string
|
# pylint: disable=consider-using-f-string
|
||||||
|
|
|
@ -1,45 +1,57 @@
|
||||||
""" functionality outline for a book data connector """
|
""" functionality outline for a book data connector """
|
||||||
|
from __future__ import annotations
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Optional, TypedDict, Any, Callable, Union, Iterator
|
||||||
from urllib.parse import quote_plus
|
from urllib.parse import quote_plus
|
||||||
import imghdr
|
|
||||||
|
# pylint: disable-next=deprecated-module
|
||||||
|
import imghdr # Deprecated in 3.11 for removal in 3.13; no good alternative yet
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
import asyncio
|
||||||
|
import requests
|
||||||
|
from requests.exceptions import RequestException
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
from django.core.files.base import ContentFile
|
from django.core.files.base import ContentFile
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
import requests
|
|
||||||
from requests.exceptions import RequestException
|
|
||||||
|
|
||||||
from bookwyrm import activitypub, models, settings
|
from bookwyrm import activitypub, models, settings
|
||||||
|
from bookwyrm.settings import USER_AGENT
|
||||||
from .connector_manager import load_more_data, ConnectorException, raise_not_valid_url
|
from .connector_manager import load_more_data, ConnectorException, raise_not_valid_url
|
||||||
from .format_mappings import format_mappings
|
from .format_mappings import format_mappings
|
||||||
|
from ..book_search import SearchResult
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
JsonDict = dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
|
class ConnectorResults(TypedDict):
|
||||||
|
"""TypedDict for results returned by connector"""
|
||||||
|
|
||||||
|
connector: AbstractMinimalConnector
|
||||||
|
results: list[SearchResult]
|
||||||
|
|
||||||
|
|
||||||
class AbstractMinimalConnector(ABC):
|
class AbstractMinimalConnector(ABC):
|
||||||
"""just the bare bones, for other bookwyrm instances"""
|
"""just the bare bones, for other bookwyrm instances"""
|
||||||
|
|
||||||
def __init__(self, identifier):
|
def __init__(self, identifier: str):
|
||||||
# load connector settings
|
# load connector settings
|
||||||
info = models.Connector.objects.get(identifier=identifier)
|
info = models.Connector.objects.get(identifier=identifier)
|
||||||
self.connector = info
|
self.connector = info
|
||||||
|
|
||||||
# the things in the connector model to copy over
|
# the things in the connector model to copy over
|
||||||
self_fields = [
|
self.base_url = info.base_url
|
||||||
"base_url",
|
self.books_url = info.books_url
|
||||||
"books_url",
|
self.covers_url = info.covers_url
|
||||||
"covers_url",
|
self.search_url = info.search_url
|
||||||
"search_url",
|
self.isbn_search_url = info.isbn_search_url
|
||||||
"isbn_search_url",
|
self.name = info.name
|
||||||
"name",
|
self.identifier = info.identifier
|
||||||
"identifier",
|
|
||||||
]
|
|
||||||
for field in self_fields:
|
|
||||||
setattr(self, field, getattr(info, field))
|
|
||||||
|
|
||||||
def get_search_url(self, query):
|
def get_search_url(self, query: str) -> str:
|
||||||
"""format the query url"""
|
"""format the query url"""
|
||||||
# Check if the query resembles an ISBN
|
# Check if the query resembles an ISBN
|
||||||
if maybe_isbn(query) and self.isbn_search_url and self.isbn_search_url != "":
|
if maybe_isbn(query) and self.isbn_search_url and self.isbn_search_url != "":
|
||||||
|
@ -51,42 +63,91 @@ class AbstractMinimalConnector(ABC):
|
||||||
# searched as free text. This, instead, only searches isbn if it's isbn-y
|
# searched as free text. This, instead, only searches isbn if it's isbn-y
|
||||||
return f"{self.search_url}{quote_plus(query)}"
|
return f"{self.search_url}{quote_plus(query)}"
|
||||||
|
|
||||||
def process_search_response(self, query, data, min_confidence):
|
def process_search_response(
|
||||||
"""Format the search results based on the formt of the query"""
|
self, query: str, data: Any, min_confidence: float
|
||||||
|
) -> list[SearchResult]:
|
||||||
|
"""Format the search results based on the format of the query"""
|
||||||
if maybe_isbn(query):
|
if maybe_isbn(query):
|
||||||
return list(self.parse_isbn_search_data(data))[:10]
|
return list(self.parse_isbn_search_data(data))[:10]
|
||||||
return list(self.parse_search_data(data, min_confidence))[:10]
|
return list(self.parse_search_data(data, min_confidence))[:10]
|
||||||
|
|
||||||
|
async def get_results(
|
||||||
|
self,
|
||||||
|
session: aiohttp.ClientSession,
|
||||||
|
url: str,
|
||||||
|
min_confidence: float,
|
||||||
|
query: str,
|
||||||
|
) -> Optional[ConnectorResults]:
|
||||||
|
"""try this specific connector"""
|
||||||
|
# pylint: disable=line-too-long
|
||||||
|
headers = {
|
||||||
|
"Accept": (
|
||||||
|
'application/json, application/activity+json, application/ld+json; profile="https://www.w3.org/ns/activitystreams"; charset=utf-8'
|
||||||
|
),
|
||||||
|
"User-Agent": USER_AGENT,
|
||||||
|
}
|
||||||
|
params = {"min_confidence": min_confidence}
|
||||||
|
try:
|
||||||
|
async with session.get(url, headers=headers, params=params) as response:
|
||||||
|
if not response.ok:
|
||||||
|
logger.info("Unable to connect to %s: %s", url, response.reason)
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
raw_data = await response.json()
|
||||||
|
except aiohttp.client_exceptions.ContentTypeError as err:
|
||||||
|
logger.exception(err)
|
||||||
|
return None
|
||||||
|
|
||||||
|
return ConnectorResults(
|
||||||
|
connector=self,
|
||||||
|
results=self.process_search_response(
|
||||||
|
query, raw_data, min_confidence
|
||||||
|
),
|
||||||
|
)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
logger.info("Connection timed out for url: %s", url)
|
||||||
|
except aiohttp.ClientError as err:
|
||||||
|
logger.info(err)
|
||||||
|
return None
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_or_create_book(self, remote_id):
|
def get_or_create_book(self, remote_id: str) -> Optional[models.Book]:
|
||||||
"""pull up a book record by whatever means possible"""
|
"""pull up a book record by whatever means possible"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def parse_search_data(self, data, min_confidence):
|
def parse_search_data(
|
||||||
|
self, data: Any, min_confidence: float
|
||||||
|
) -> Iterator[SearchResult]:
|
||||||
"""turn the result json from a search into a list"""
|
"""turn the result json from a search into a list"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def parse_isbn_search_data(self, data):
|
def parse_isbn_search_data(self, data: Any) -> Iterator[SearchResult]:
|
||||||
"""turn the result json from a search into a list"""
|
"""turn the result json from a search into a list"""
|
||||||
|
|
||||||
|
|
||||||
class AbstractConnector(AbstractMinimalConnector):
|
class AbstractConnector(AbstractMinimalConnector):
|
||||||
"""generic book data connector"""
|
"""generic book data connector"""
|
||||||
|
|
||||||
def __init__(self, identifier):
|
generated_remote_link_field = ""
|
||||||
|
|
||||||
|
def __init__(self, identifier: str):
|
||||||
super().__init__(identifier)
|
super().__init__(identifier)
|
||||||
# fields we want to look for in book data to copy over
|
# fields we want to look for in book data to copy over
|
||||||
# title we handle separately.
|
# title we handle separately.
|
||||||
self.book_mappings = []
|
self.book_mappings: list[Mapping] = []
|
||||||
|
self.author_mappings: list[Mapping] = []
|
||||||
|
|
||||||
def get_or_create_book(self, remote_id):
|
def get_or_create_book(self, remote_id: str) -> Optional[models.Book]:
|
||||||
"""translate arbitrary json into an Activitypub dataclass"""
|
"""translate arbitrary json into an Activitypub dataclass"""
|
||||||
# first, check if we have the origin_id saved
|
# first, check if we have the origin_id saved
|
||||||
existing = models.Edition.find_existing_by_remote_id(
|
existing = models.Edition.find_existing_by_remote_id(
|
||||||
remote_id
|
remote_id
|
||||||
) or models.Work.find_existing_by_remote_id(remote_id)
|
) or models.Work.find_existing_by_remote_id(remote_id)
|
||||||
if existing:
|
if existing:
|
||||||
if hasattr(existing, "default_edition"):
|
if hasattr(existing, "default_edition") and isinstance(
|
||||||
|
existing.default_edition, models.Edition
|
||||||
|
):
|
||||||
return existing.default_edition
|
return existing.default_edition
|
||||||
return existing
|
return existing
|
||||||
|
|
||||||
|
@ -118,6 +179,9 @@ class AbstractConnector(AbstractMinimalConnector):
|
||||||
)
|
)
|
||||||
# this will dedupe automatically
|
# this will dedupe automatically
|
||||||
work = work_activity.to_model(model=models.Work, overwrite=False)
|
work = work_activity.to_model(model=models.Work, overwrite=False)
|
||||||
|
if not work:
|
||||||
|
return None
|
||||||
|
|
||||||
for author in self.get_authors_from_data(work_data):
|
for author in self.get_authors_from_data(work_data):
|
||||||
work.authors.add(author)
|
work.authors.add(author)
|
||||||
|
|
||||||
|
@ -125,12 +189,21 @@ class AbstractConnector(AbstractMinimalConnector):
|
||||||
load_more_data.delay(self.connector.id, work.id)
|
load_more_data.delay(self.connector.id, work.id)
|
||||||
return edition
|
return edition
|
||||||
|
|
||||||
def get_book_data(self, remote_id): # pylint: disable=no-self-use
|
def get_book_data(self, remote_id: str) -> JsonDict: # pylint: disable=no-self-use
|
||||||
"""this allows connectors to override the default behavior"""
|
"""this allows connectors to override the default behavior"""
|
||||||
return get_data(remote_id)
|
return get_data(remote_id)
|
||||||
|
|
||||||
def create_edition_from_data(self, work, edition_data, instance=None):
|
def create_edition_from_data(
|
||||||
|
self,
|
||||||
|
work: models.Work,
|
||||||
|
edition_data: Union[str, JsonDict],
|
||||||
|
instance: Optional[models.Edition] = None,
|
||||||
|
) -> Optional[models.Edition]:
|
||||||
"""if we already have the work, we're ready"""
|
"""if we already have the work, we're ready"""
|
||||||
|
if isinstance(edition_data, str):
|
||||||
|
# We don't expect a string here
|
||||||
|
return None
|
||||||
|
|
||||||
mapped_data = dict_from_mappings(edition_data, self.book_mappings)
|
mapped_data = dict_from_mappings(edition_data, self.book_mappings)
|
||||||
mapped_data["work"] = work.remote_id
|
mapped_data["work"] = work.remote_id
|
||||||
edition_activity = activitypub.Edition(**mapped_data)
|
edition_activity = activitypub.Edition(**mapped_data)
|
||||||
|
@ -138,6 +211,9 @@ class AbstractConnector(AbstractMinimalConnector):
|
||||||
model=models.Edition, overwrite=False, instance=instance
|
model=models.Edition, overwrite=False, instance=instance
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if not edition:
|
||||||
|
return None
|
||||||
|
|
||||||
# if we're updating an existing instance, we don't need to load authors
|
# if we're updating an existing instance, we don't need to load authors
|
||||||
if instance:
|
if instance:
|
||||||
return edition
|
return edition
|
||||||
|
@ -154,7 +230,9 @@ class AbstractConnector(AbstractMinimalConnector):
|
||||||
|
|
||||||
return edition
|
return edition
|
||||||
|
|
||||||
def get_or_create_author(self, remote_id, instance=None):
|
def get_or_create_author(
|
||||||
|
self, remote_id: str, instance: Optional[models.Author] = None
|
||||||
|
) -> Optional[models.Author]:
|
||||||
"""load that author"""
|
"""load that author"""
|
||||||
if not instance:
|
if not instance:
|
||||||
existing = models.Author.find_existing_by_remote_id(remote_id)
|
existing = models.Author.find_existing_by_remote_id(remote_id)
|
||||||
|
@ -174,46 +252,51 @@ class AbstractConnector(AbstractMinimalConnector):
|
||||||
model=models.Author, overwrite=False, instance=instance
|
model=models.Author, overwrite=False, instance=instance
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_remote_id_from_model(self, obj):
|
def get_remote_id_from_model(self, obj: models.BookDataModel) -> Optional[str]:
|
||||||
"""given the data stored, how can we look this up"""
|
"""given the data stored, how can we look this up"""
|
||||||
return getattr(obj, getattr(self, "generated_remote_link_field"))
|
remote_id: Optional[str] = getattr(obj, self.generated_remote_link_field)
|
||||||
|
return remote_id
|
||||||
|
|
||||||
def update_author_from_remote(self, obj):
|
def update_author_from_remote(self, obj: models.Author) -> Optional[models.Author]:
|
||||||
"""load the remote data from this connector and add it to an existing author"""
|
"""load the remote data from this connector and add it to an existing author"""
|
||||||
remote_id = self.get_remote_id_from_model(obj)
|
remote_id = self.get_remote_id_from_model(obj)
|
||||||
|
if not remote_id:
|
||||||
|
return None
|
||||||
return self.get_or_create_author(remote_id, instance=obj)
|
return self.get_or_create_author(remote_id, instance=obj)
|
||||||
|
|
||||||
def update_book_from_remote(self, obj):
|
def update_book_from_remote(self, obj: models.Edition) -> Optional[models.Edition]:
|
||||||
"""load the remote data from this connector and add it to an existing book"""
|
"""load the remote data from this connector and add it to an existing book"""
|
||||||
remote_id = self.get_remote_id_from_model(obj)
|
remote_id = self.get_remote_id_from_model(obj)
|
||||||
|
if not remote_id:
|
||||||
|
return None
|
||||||
data = self.get_book_data(remote_id)
|
data = self.get_book_data(remote_id)
|
||||||
return self.create_edition_from_data(obj.parent_work, data, instance=obj)
|
return self.create_edition_from_data(obj.parent_work, data, instance=obj)
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def is_work_data(self, data):
|
def is_work_data(self, data: JsonDict) -> bool:
|
||||||
"""differentiate works and editions"""
|
"""differentiate works and editions"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_edition_from_work_data(self, data):
|
def get_edition_from_work_data(self, data: JsonDict) -> JsonDict:
|
||||||
"""every work needs at least one edition"""
|
"""every work needs at least one edition"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_work_from_edition_data(self, data):
|
def get_work_from_edition_data(self, data: JsonDict) -> JsonDict:
|
||||||
"""every edition needs a work"""
|
"""every edition needs a work"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_authors_from_data(self, data):
|
def get_authors_from_data(self, data: JsonDict) -> Iterator[models.Author]:
|
||||||
"""load author data"""
|
"""load author data"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def expand_book_data(self, book):
|
def expand_book_data(self, book: models.Book) -> None:
|
||||||
"""get more info on a book"""
|
"""get more info on a book"""
|
||||||
|
|
||||||
|
|
||||||
def dict_from_mappings(data, mappings):
|
def dict_from_mappings(data: JsonDict, mappings: list[Mapping]) -> JsonDict:
|
||||||
"""create a dict in Activitypub format, using mappings supplies by
|
"""create a dict in Activitypub format, using mappings supplies by
|
||||||
the subclass"""
|
the subclass"""
|
||||||
result = {}
|
result: JsonDict = {}
|
||||||
for mapping in mappings:
|
for mapping in mappings:
|
||||||
# sometimes there are multiple mappings for one field, don't
|
# sometimes there are multiple mappings for one field, don't
|
||||||
# overwrite earlier writes in that case
|
# overwrite earlier writes in that case
|
||||||
|
@ -223,7 +306,11 @@ def dict_from_mappings(data, mappings):
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def get_data(url, params=None, timeout=settings.QUERY_TIMEOUT):
|
def get_data(
|
||||||
|
url: str,
|
||||||
|
params: Optional[dict[str, str]] = None,
|
||||||
|
timeout: int = settings.QUERY_TIMEOUT,
|
||||||
|
) -> JsonDict:
|
||||||
"""wrapper for request.get"""
|
"""wrapper for request.get"""
|
||||||
# check if the url is blocked
|
# check if the url is blocked
|
||||||
raise_not_valid_url(url)
|
raise_not_valid_url(url)
|
||||||
|
@ -256,10 +343,15 @@ def get_data(url, params=None, timeout=settings.QUERY_TIMEOUT):
|
||||||
logger.info(err)
|
logger.info(err)
|
||||||
raise ConnectorException(err)
|
raise ConnectorException(err)
|
||||||
|
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
raise ConnectorException("Unexpected data format")
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def get_image(url, timeout=10):
|
def get_image(
|
||||||
|
url: str, timeout: int = 10
|
||||||
|
) -> Union[tuple[ContentFile[bytes], str], tuple[None, None]]:
|
||||||
"""wrapper for requesting an image"""
|
"""wrapper for requesting an image"""
|
||||||
raise_not_valid_url(url)
|
raise_not_valid_url(url)
|
||||||
try:
|
try:
|
||||||
|
@ -289,14 +381,19 @@ def get_image(url, timeout=10):
|
||||||
class Mapping:
|
class Mapping:
|
||||||
"""associate a local database field with a field in an external dataset"""
|
"""associate a local database field with a field in an external dataset"""
|
||||||
|
|
||||||
def __init__(self, local_field, remote_field=None, formatter=None):
|
def __init__(
|
||||||
|
self,
|
||||||
|
local_field: str,
|
||||||
|
remote_field: Optional[str] = None,
|
||||||
|
formatter: Optional[Callable[[Any], Any]] = None,
|
||||||
|
):
|
||||||
noop = lambda x: x
|
noop = lambda x: x
|
||||||
|
|
||||||
self.local_field = local_field
|
self.local_field = local_field
|
||||||
self.remote_field = remote_field or local_field
|
self.remote_field = remote_field or local_field
|
||||||
self.formatter = formatter or noop
|
self.formatter = formatter or noop
|
||||||
|
|
||||||
def get_value(self, data):
|
def get_value(self, data: JsonDict) -> Optional[Any]:
|
||||||
"""pull a field from incoming json and return the formatted version"""
|
"""pull a field from incoming json and return the formatted version"""
|
||||||
value = data.get(self.remote_field)
|
value = data.get(self.remote_field)
|
||||||
if not value:
|
if not value:
|
||||||
|
@ -307,7 +404,7 @@ class Mapping:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def infer_physical_format(format_text):
|
def infer_physical_format(format_text: str) -> Optional[str]:
|
||||||
"""try to figure out what the standardized format is from the free value"""
|
"""try to figure out what the standardized format is from the free value"""
|
||||||
format_text = format_text.lower()
|
format_text = format_text.lower()
|
||||||
if format_text in format_mappings:
|
if format_text in format_mappings:
|
||||||
|
@ -320,8 +417,8 @@ def infer_physical_format(format_text):
|
||||||
return matches[0]
|
return matches[0]
|
||||||
|
|
||||||
|
|
||||||
def unique_physical_format(format_text):
|
def unique_physical_format(format_text: str) -> Optional[str]:
|
||||||
"""only store the format if it isn't diretly in the format mappings"""
|
"""only store the format if it isn't directly in the format mappings"""
|
||||||
format_text = format_text.lower()
|
format_text = format_text.lower()
|
||||||
if format_text in format_mappings:
|
if format_text in format_mappings:
|
||||||
# try a direct match, so saving this would be redundant
|
# try a direct match, so saving this would be redundant
|
||||||
|
@ -329,7 +426,7 @@ def unique_physical_format(format_text):
|
||||||
return format_text
|
return format_text
|
||||||
|
|
||||||
|
|
||||||
def maybe_isbn(query):
|
def maybe_isbn(query: str) -> bool:
|
||||||
"""check if a query looks like an isbn"""
|
"""check if a query looks like an isbn"""
|
||||||
isbn = re.sub(r"[\W_]", "", query) # removes filler characters
|
isbn = re.sub(r"[\W_]", "", query) # removes filler characters
|
||||||
# ISBNs must be numeric except an ISBN10 checkdigit can be 'X'
|
# ISBNs must be numeric except an ISBN10 checkdigit can be 'X'
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
""" using another bookwyrm instance as a source of book data """
|
""" using another bookwyrm instance as a source of book data """
|
||||||
|
from __future__ import annotations
|
||||||
|
from typing import Any, Iterator
|
||||||
|
|
||||||
from bookwyrm import activitypub, models
|
from bookwyrm import activitypub, models
|
||||||
from bookwyrm.book_search import SearchResult
|
from bookwyrm.book_search import SearchResult
|
||||||
from .abstract_connector import AbstractMinimalConnector
|
from .abstract_connector import AbstractMinimalConnector
|
||||||
|
@ -7,15 +10,19 @@ from .abstract_connector import AbstractMinimalConnector
|
||||||
class Connector(AbstractMinimalConnector):
|
class Connector(AbstractMinimalConnector):
|
||||||
"""this is basically just for search"""
|
"""this is basically just for search"""
|
||||||
|
|
||||||
def get_or_create_book(self, remote_id):
|
def get_or_create_book(self, remote_id: str) -> models.Edition:
|
||||||
return activitypub.resolve_remote_id(remote_id, model=models.Edition)
|
return activitypub.resolve_remote_id(remote_id, model=models.Edition)
|
||||||
|
|
||||||
def parse_search_data(self, data, min_confidence):
|
def parse_search_data(
|
||||||
|
self, data: list[dict[str, Any]], min_confidence: float
|
||||||
|
) -> Iterator[SearchResult]:
|
||||||
for search_result in data:
|
for search_result in data:
|
||||||
search_result["connector"] = self
|
search_result["connector"] = self
|
||||||
yield SearchResult(**search_result)
|
yield SearchResult(**search_result)
|
||||||
|
|
||||||
def parse_isbn_search_data(self, data):
|
def parse_isbn_search_data(
|
||||||
|
self, data: list[dict[str, Any]]
|
||||||
|
) -> Iterator[SearchResult]:
|
||||||
for search_result in data:
|
for search_result in data:
|
||||||
search_result["connector"] = self
|
search_result["connector"] = self
|
||||||
yield SearchResult(**search_result)
|
yield SearchResult(**search_result)
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
""" interface with whatever connectors the app has """
|
""" interface with whatever connectors the app has """
|
||||||
|
from __future__ import annotations
|
||||||
import asyncio
|
import asyncio
|
||||||
import importlib
|
import importlib
|
||||||
import ipaddress
|
import ipaddress
|
||||||
import logging
|
import logging
|
||||||
|
from asyncio import Future
|
||||||
|
from typing import Iterator, Any, Optional, Union, overload, Literal
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
@ -12,8 +15,10 @@ from django.db.models import signals
|
||||||
from requests import HTTPError
|
from requests import HTTPError
|
||||||
|
|
||||||
from bookwyrm import book_search, models
|
from bookwyrm import book_search, models
|
||||||
from bookwyrm.settings import SEARCH_TIMEOUT, USER_AGENT
|
from bookwyrm.book_search import SearchResult
|
||||||
from bookwyrm.tasks import app, LOW
|
from bookwyrm.connectors import abstract_connector
|
||||||
|
from bookwyrm.settings import SEARCH_TIMEOUT
|
||||||
|
from bookwyrm.tasks import app, CONNECTORS
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -22,61 +27,46 @@ class ConnectorException(HTTPError):
|
||||||
"""when the connector can't do what was asked"""
|
"""when the connector can't do what was asked"""
|
||||||
|
|
||||||
|
|
||||||
async def get_results(session, url, min_confidence, query, connector):
|
async def async_connector_search(
|
||||||
"""try this specific connector"""
|
query: str,
|
||||||
# pylint: disable=line-too-long
|
items: list[tuple[str, abstract_connector.AbstractConnector]],
|
||||||
headers = {
|
min_confidence: float,
|
||||||
"Accept": (
|
) -> list[Optional[abstract_connector.ConnectorResults]]:
|
||||||
'application/json, application/activity+json, application/ld+json; profile="https://www.w3.org/ns/activitystreams"; charset=utf-8'
|
|
||||||
),
|
|
||||||
"User-Agent": USER_AGENT,
|
|
||||||
}
|
|
||||||
params = {"min_confidence": min_confidence}
|
|
||||||
try:
|
|
||||||
async with session.get(url, headers=headers, params=params) as response:
|
|
||||||
if not response.ok:
|
|
||||||
logger.info("Unable to connect to %s: %s", url, response.reason)
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
raw_data = await response.json()
|
|
||||||
except aiohttp.client_exceptions.ContentTypeError as err:
|
|
||||||
logger.exception(err)
|
|
||||||
return
|
|
||||||
|
|
||||||
return {
|
|
||||||
"connector": connector,
|
|
||||||
"results": connector.process_search_response(
|
|
||||||
query, raw_data, min_confidence
|
|
||||||
),
|
|
||||||
}
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
logger.info("Connection timed out for url: %s", url)
|
|
||||||
except aiohttp.ClientError as err:
|
|
||||||
logger.info(err)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_connector_search(query, items, min_confidence):
|
|
||||||
"""Try a number of requests simultaneously"""
|
"""Try a number of requests simultaneously"""
|
||||||
timeout = aiohttp.ClientTimeout(total=SEARCH_TIMEOUT)
|
timeout = aiohttp.ClientTimeout(total=SEARCH_TIMEOUT)
|
||||||
async with aiohttp.ClientSession(timeout=timeout) as session:
|
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||||
tasks = []
|
tasks: list[Future[Optional[abstract_connector.ConnectorResults]]] = []
|
||||||
for url, connector in items:
|
for url, connector in items:
|
||||||
tasks.append(
|
tasks.append(
|
||||||
asyncio.ensure_future(
|
asyncio.ensure_future(
|
||||||
get_results(session, url, min_confidence, query, connector)
|
connector.get_results(session, url, min_confidence, query)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
results = await asyncio.gather(*tasks)
|
results = await asyncio.gather(*tasks)
|
||||||
return results
|
return list(results)
|
||||||
|
|
||||||
|
|
||||||
def search(query, min_confidence=0.1, return_first=False):
|
@overload
|
||||||
"""find books based on arbitary keywords"""
|
def search(
|
||||||
|
query: str, *, min_confidence: float = 0.1, return_first: Literal[False]
|
||||||
|
) -> list[abstract_connector.ConnectorResults]:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def search(
|
||||||
|
query: str, *, min_confidence: float = 0.1, return_first: Literal[True]
|
||||||
|
) -> Optional[SearchResult]:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
def search(
|
||||||
|
query: str, *, min_confidence: float = 0.1, return_first: bool = False
|
||||||
|
) -> Union[list[abstract_connector.ConnectorResults], Optional[SearchResult]]:
|
||||||
|
"""find books based on arbitrary keywords"""
|
||||||
if not query:
|
if not query:
|
||||||
return []
|
return None if return_first else []
|
||||||
results = []
|
|
||||||
|
|
||||||
items = []
|
items = []
|
||||||
for connector in get_connectors():
|
for connector in get_connectors():
|
||||||
|
@ -91,8 +81,12 @@ def search(query, min_confidence=0.1, return_first=False):
|
||||||
items.append((url, connector))
|
items.append((url, connector))
|
||||||
|
|
||||||
# load as many results as we can
|
# load as many results as we can
|
||||||
results = asyncio.run(async_connector_search(query, items, min_confidence))
|
# failed requests will return None, so filter those out
|
||||||
results = [r for r in results if r]
|
results = [
|
||||||
|
r
|
||||||
|
for r in asyncio.run(async_connector_search(query, items, min_confidence))
|
||||||
|
if r
|
||||||
|
]
|
||||||
|
|
||||||
if return_first:
|
if return_first:
|
||||||
# find the best result from all the responses and return that
|
# find the best result from all the responses and return that
|
||||||
|
@ -100,11 +94,12 @@ def search(query, min_confidence=0.1, return_first=False):
|
||||||
all_results = sorted(all_results, key=lambda r: r.confidence, reverse=True)
|
all_results = sorted(all_results, key=lambda r: r.confidence, reverse=True)
|
||||||
return all_results[0] if all_results else None
|
return all_results[0] if all_results else None
|
||||||
|
|
||||||
# failed requests will return None, so filter those out
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def first_search_result(query, min_confidence=0.1):
|
def first_search_result(
|
||||||
|
query: str, min_confidence: float = 0.1
|
||||||
|
) -> Union[models.Edition, SearchResult, None]:
|
||||||
"""search until you find a result that fits"""
|
"""search until you find a result that fits"""
|
||||||
# try local search first
|
# try local search first
|
||||||
result = book_search.search(query, min_confidence=min_confidence, return_first=True)
|
result = book_search.search(query, min_confidence=min_confidence, return_first=True)
|
||||||
|
@ -114,18 +109,20 @@ def first_search_result(query, min_confidence=0.1):
|
||||||
return search(query, min_confidence=min_confidence, return_first=True) or None
|
return search(query, min_confidence=min_confidence, return_first=True) or None
|
||||||
|
|
||||||
|
|
||||||
def get_connectors():
|
def get_connectors() -> Iterator[abstract_connector.AbstractConnector]:
|
||||||
"""load all connectors"""
|
"""load all connectors"""
|
||||||
for info in models.Connector.objects.filter(active=True).order_by("priority").all():
|
for info in models.Connector.objects.filter(active=True).order_by("priority").all():
|
||||||
yield load_connector(info)
|
yield load_connector(info)
|
||||||
|
|
||||||
|
|
||||||
def get_or_create_connector(remote_id):
|
def get_or_create_connector(remote_id: str) -> abstract_connector.AbstractConnector:
|
||||||
"""get the connector related to the object's server"""
|
"""get the connector related to the object's server"""
|
||||||
url = urlparse(remote_id)
|
url = urlparse(remote_id)
|
||||||
identifier = url.netloc
|
identifier = url.hostname
|
||||||
if not identifier:
|
if not identifier:
|
||||||
raise ValueError("Invalid remote id")
|
raise ValueError(f"Invalid remote id: {remote_id}")
|
||||||
|
|
||||||
|
base_url = f"{url.scheme}://{url.netloc}"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
connector_info = models.Connector.objects.get(identifier=identifier)
|
connector_info = models.Connector.objects.get(identifier=identifier)
|
||||||
|
@ -133,58 +130,75 @@ def get_or_create_connector(remote_id):
|
||||||
connector_info = models.Connector.objects.create(
|
connector_info = models.Connector.objects.create(
|
||||||
identifier=identifier,
|
identifier=identifier,
|
||||||
connector_file="bookwyrm_connector",
|
connector_file="bookwyrm_connector",
|
||||||
base_url=f"https://{identifier}",
|
base_url=base_url,
|
||||||
books_url=f"https://{identifier}/book",
|
books_url=f"{base_url}/book",
|
||||||
covers_url=f"https://{identifier}/images/covers",
|
covers_url=f"{base_url}/images/covers",
|
||||||
search_url=f"https://{identifier}/search?q=",
|
search_url=f"{base_url}/search?q=",
|
||||||
priority=2,
|
priority=2,
|
||||||
)
|
)
|
||||||
|
|
||||||
return load_connector(connector_info)
|
return load_connector(connector_info)
|
||||||
|
|
||||||
|
|
||||||
@app.task(queue=LOW, ignore_result=True)
|
@app.task(queue=CONNECTORS)
|
||||||
def load_more_data(connector_id, book_id):
|
def load_more_data(connector_id: str, book_id: str) -> None:
|
||||||
"""background the work of getting all 10,000 editions of LoTR"""
|
"""background the work of getting all 10,000 editions of LoTR"""
|
||||||
connector_info = models.Connector.objects.get(id=connector_id)
|
connector_info = models.Connector.objects.get(id=connector_id)
|
||||||
connector = load_connector(connector_info)
|
connector = load_connector(connector_info)
|
||||||
book = models.Book.objects.select_subclasses().get(id=book_id)
|
book = models.Book.objects.select_subclasses().get( # type: ignore[no-untyped-call]
|
||||||
|
id=book_id
|
||||||
|
)
|
||||||
connector.expand_book_data(book)
|
connector.expand_book_data(book)
|
||||||
|
|
||||||
|
|
||||||
@app.task(queue=LOW, ignore_result=True)
|
@app.task(queue=CONNECTORS)
|
||||||
def create_edition_task(connector_id, work_id, data):
|
def create_edition_task(
|
||||||
|
connector_id: int, work_id: int, data: Union[str, abstract_connector.JsonDict]
|
||||||
|
) -> None:
|
||||||
"""separate task for each of the 10,000 editions of LoTR"""
|
"""separate task for each of the 10,000 editions of LoTR"""
|
||||||
connector_info = models.Connector.objects.get(id=connector_id)
|
connector_info = models.Connector.objects.get(id=connector_id)
|
||||||
connector = load_connector(connector_info)
|
connector = load_connector(connector_info)
|
||||||
work = models.Work.objects.select_subclasses().get(id=work_id)
|
work = models.Work.objects.select_subclasses().get( # type: ignore[no-untyped-call]
|
||||||
|
id=work_id
|
||||||
|
)
|
||||||
connector.create_edition_from_data(work, data)
|
connector.create_edition_from_data(work, data)
|
||||||
|
|
||||||
|
|
||||||
def load_connector(connector_info):
|
def load_connector(
|
||||||
|
connector_info: models.Connector,
|
||||||
|
) -> abstract_connector.AbstractConnector:
|
||||||
"""instantiate the connector class"""
|
"""instantiate the connector class"""
|
||||||
connector = importlib.import_module(
|
connector = importlib.import_module(
|
||||||
f"bookwyrm.connectors.{connector_info.connector_file}"
|
f"bookwyrm.connectors.{connector_info.connector_file}"
|
||||||
)
|
)
|
||||||
return connector.Connector(connector_info.identifier)
|
return connector.Connector(connector_info.identifier) # type: ignore[no-any-return]
|
||||||
|
|
||||||
|
|
||||||
@receiver(signals.post_save, sender="bookwyrm.FederatedServer")
|
@receiver(signals.post_save, sender="bookwyrm.FederatedServer")
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def create_connector(sender, instance, created, *args, **kwargs):
|
def create_connector(
|
||||||
|
sender: Any,
|
||||||
|
instance: models.FederatedServer,
|
||||||
|
created: Any,
|
||||||
|
*args: Any,
|
||||||
|
**kwargs: Any,
|
||||||
|
) -> None:
|
||||||
"""create a connector to an external bookwyrm server"""
|
"""create a connector to an external bookwyrm server"""
|
||||||
if instance.application_type == "bookwyrm":
|
if instance.application_type == "bookwyrm":
|
||||||
get_or_create_connector(f"https://{instance.server_name}")
|
get_or_create_connector(f"https://{instance.server_name}")
|
||||||
|
|
||||||
|
|
||||||
def raise_not_valid_url(url):
|
def raise_not_valid_url(url: str) -> None:
|
||||||
"""do some basic reality checks on the url"""
|
"""do some basic reality checks on the url"""
|
||||||
parsed = urlparse(url)
|
parsed = urlparse(url)
|
||||||
if not parsed.scheme in ["http", "https"]:
|
if not parsed.scheme in ["http", "https"]:
|
||||||
raise ConnectorException("Invalid scheme: ", url)
|
raise ConnectorException("Invalid scheme: ", url)
|
||||||
|
|
||||||
|
if not parsed.hostname:
|
||||||
|
raise ConnectorException("Hostname missing: ", url)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ipaddress.ip_address(parsed.netloc)
|
ipaddress.ip_address(parsed.hostname)
|
||||||
raise ConnectorException("Provided url is an IP address: ", url)
|
raise ConnectorException("Provided url is an IP address: ", url)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
# it's not an IP address, which is good
|
# it's not an IP address, which is good
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
""" inventaire data connector """
|
""" inventaire data connector """
|
||||||
import re
|
import re
|
||||||
|
from typing import Any, Union, Optional, Iterator, Iterable
|
||||||
|
|
||||||
from bookwyrm import models
|
from bookwyrm import models
|
||||||
from bookwyrm.book_search import SearchResult
|
from bookwyrm.book_search import SearchResult
|
||||||
from .abstract_connector import AbstractConnector, Mapping
|
from .abstract_connector import AbstractConnector, Mapping, JsonDict
|
||||||
from .abstract_connector import get_data
|
from .abstract_connector import get_data
|
||||||
from .connector_manager import ConnectorException, create_edition_task
|
from .connector_manager import ConnectorException, create_edition_task
|
||||||
|
|
||||||
|
@ -13,7 +14,7 @@ class Connector(AbstractConnector):
|
||||||
|
|
||||||
generated_remote_link_field = "inventaire_id"
|
generated_remote_link_field = "inventaire_id"
|
||||||
|
|
||||||
def __init__(self, identifier):
|
def __init__(self, identifier: str):
|
||||||
super().__init__(identifier)
|
super().__init__(identifier)
|
||||||
|
|
||||||
get_first = lambda a: a[0]
|
get_first = lambda a: a[0]
|
||||||
|
@ -60,13 +61,13 @@ class Connector(AbstractConnector):
|
||||||
Mapping("died", remote_field="wdt:P570", formatter=get_first),
|
Mapping("died", remote_field="wdt:P570", formatter=get_first),
|
||||||
] + shared_mappings
|
] + shared_mappings
|
||||||
|
|
||||||
def get_remote_id(self, value):
|
def get_remote_id(self, value: str) -> str:
|
||||||
"""convert an id/uri into a url"""
|
"""convert an id/uri into a url"""
|
||||||
return f"{self.books_url}?action=by-uris&uris={value}"
|
return f"{self.books_url}?action=by-uris&uris={value}"
|
||||||
|
|
||||||
def get_book_data(self, remote_id):
|
def get_book_data(self, remote_id: str) -> JsonDict:
|
||||||
data = get_data(remote_id)
|
data = get_data(remote_id)
|
||||||
extracted = list(data.get("entities").values())
|
extracted = list(data.get("entities", {}).values())
|
||||||
try:
|
try:
|
||||||
data = extracted[0]
|
data = extracted[0]
|
||||||
except (KeyError, IndexError):
|
except (KeyError, IndexError):
|
||||||
|
@ -74,10 +75,16 @@ class Connector(AbstractConnector):
|
||||||
# flatten the data so that images, uri, and claims are on the same level
|
# flatten the data so that images, uri, and claims are on the same level
|
||||||
return {
|
return {
|
||||||
**data.get("claims", {}),
|
**data.get("claims", {}),
|
||||||
**{k: data.get(k) for k in ["uri", "image", "labels", "sitelinks", "type"]},
|
**{
|
||||||
|
k: data.get(k)
|
||||||
|
for k in ["uri", "image", "labels", "sitelinks", "type"]
|
||||||
|
if k in data
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
def parse_search_data(self, data, min_confidence):
|
def parse_search_data(
|
||||||
|
self, data: JsonDict, min_confidence: float
|
||||||
|
) -> Iterator[SearchResult]:
|
||||||
for search_result in data.get("results", []):
|
for search_result in data.get("results", []):
|
||||||
images = search_result.get("image")
|
images = search_result.get("image")
|
||||||
cover = f"{self.covers_url}/img/entities/{images[0]}" if images else None
|
cover = f"{self.covers_url}/img/entities/{images[0]}" if images else None
|
||||||
|
@ -96,8 +103,8 @@ class Connector(AbstractConnector):
|
||||||
connector=self,
|
connector=self,
|
||||||
)
|
)
|
||||||
|
|
||||||
def parse_isbn_search_data(self, data):
|
def parse_isbn_search_data(self, data: JsonDict) -> Iterator[SearchResult]:
|
||||||
"""got some daaaata"""
|
"""got some data"""
|
||||||
results = data.get("entities")
|
results = data.get("entities")
|
||||||
if not results:
|
if not results:
|
||||||
return
|
return
|
||||||
|
@ -114,35 +121,44 @@ class Connector(AbstractConnector):
|
||||||
connector=self,
|
connector=self,
|
||||||
)
|
)
|
||||||
|
|
||||||
def is_work_data(self, data):
|
def is_work_data(self, data: JsonDict) -> bool:
|
||||||
return data.get("type") == "work"
|
return data.get("type") == "work"
|
||||||
|
|
||||||
def load_edition_data(self, work_uri):
|
def load_edition_data(self, work_uri: str) -> JsonDict:
|
||||||
"""get a list of editions for a work"""
|
"""get a list of editions for a work"""
|
||||||
# pylint: disable=line-too-long
|
# pylint: disable=line-too-long
|
||||||
url = f"{self.books_url}?action=reverse-claims&property=wdt:P629&value={work_uri}&sort=true"
|
url = f"{self.books_url}?action=reverse-claims&property=wdt:P629&value={work_uri}&sort=true"
|
||||||
return get_data(url)
|
return get_data(url)
|
||||||
|
|
||||||
def get_edition_from_work_data(self, data):
|
def get_edition_from_work_data(self, data: JsonDict) -> JsonDict:
|
||||||
data = self.load_edition_data(data.get("uri"))
|
work_uri = data.get("uri")
|
||||||
|
if not work_uri:
|
||||||
|
raise ConnectorException("Invalid URI")
|
||||||
|
data = self.load_edition_data(work_uri)
|
||||||
try:
|
try:
|
||||||
uri = data.get("uris", [])[0]
|
uri = data.get("uris", [])[0]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
raise ConnectorException("Invalid book data")
|
raise ConnectorException("Invalid book data")
|
||||||
return self.get_book_data(self.get_remote_id(uri))
|
return self.get_book_data(self.get_remote_id(uri))
|
||||||
|
|
||||||
def get_work_from_edition_data(self, data):
|
def get_work_from_edition_data(self, data: JsonDict) -> JsonDict:
|
||||||
uri = data.get("wdt:P629", [None])[0]
|
try:
|
||||||
|
uri = data.get("wdt:P629", [])[0]
|
||||||
|
except IndexError:
|
||||||
|
raise ConnectorException("Invalid book data")
|
||||||
|
|
||||||
if not uri:
|
if not uri:
|
||||||
raise ConnectorException("Invalid book data")
|
raise ConnectorException("Invalid book data")
|
||||||
return self.get_book_data(self.get_remote_id(uri))
|
return self.get_book_data(self.get_remote_id(uri))
|
||||||
|
|
||||||
def get_authors_from_data(self, data):
|
def get_authors_from_data(self, data: JsonDict) -> Iterator[models.Author]:
|
||||||
authors = data.get("wdt:P50", [])
|
authors = data.get("wdt:P50", [])
|
||||||
for author in authors:
|
for author in authors:
|
||||||
yield self.get_or_create_author(self.get_remote_id(author))
|
model = self.get_or_create_author(self.get_remote_id(author))
|
||||||
|
if model:
|
||||||
|
yield model
|
||||||
|
|
||||||
def expand_book_data(self, book):
|
def expand_book_data(self, book: models.Book) -> None:
|
||||||
work = book
|
work = book
|
||||||
# go from the edition to the work, if necessary
|
# go from the edition to the work, if necessary
|
||||||
if isinstance(book, models.Edition):
|
if isinstance(book, models.Edition):
|
||||||
|
@ -154,11 +170,16 @@ class Connector(AbstractConnector):
|
||||||
# who knows, man
|
# who knows, man
|
||||||
return
|
return
|
||||||
|
|
||||||
for edition_uri in edition_options.get("uris"):
|
for edition_uri in edition_options.get("uris", []):
|
||||||
remote_id = self.get_remote_id(edition_uri)
|
remote_id = self.get_remote_id(edition_uri)
|
||||||
create_edition_task.delay(self.connector.id, work.id, remote_id)
|
create_edition_task.delay(self.connector.id, work.id, remote_id)
|
||||||
|
|
||||||
def create_edition_from_data(self, work, edition_data, instance=None):
|
def create_edition_from_data(
|
||||||
|
self,
|
||||||
|
work: models.Work,
|
||||||
|
edition_data: Union[str, JsonDict],
|
||||||
|
instance: Optional[models.Edition] = None,
|
||||||
|
) -> Optional[models.Edition]:
|
||||||
"""pass in the url as data and then call the version in abstract connector"""
|
"""pass in the url as data and then call the version in abstract connector"""
|
||||||
if isinstance(edition_data, str):
|
if isinstance(edition_data, str):
|
||||||
try:
|
try:
|
||||||
|
@ -168,22 +189,26 @@ class Connector(AbstractConnector):
|
||||||
return None
|
return None
|
||||||
return super().create_edition_from_data(work, edition_data, instance=instance)
|
return super().create_edition_from_data(work, edition_data, instance=instance)
|
||||||
|
|
||||||
def get_cover_url(self, cover_blob, *_):
|
def get_cover_url(
|
||||||
|
self, cover_blob: Union[list[JsonDict], JsonDict], *_: Any
|
||||||
|
) -> Optional[str]:
|
||||||
"""format the relative cover url into an absolute one:
|
"""format the relative cover url into an absolute one:
|
||||||
{"url": "/img/entities/e794783f01b9d4f897a1ea9820b96e00d346994f"}
|
{"url": "/img/entities/e794783f01b9d4f897a1ea9820b96e00d346994f"}
|
||||||
"""
|
"""
|
||||||
# covers may or may not be a list
|
# covers may or may not be a list
|
||||||
if isinstance(cover_blob, list) and len(cover_blob) > 0:
|
if isinstance(cover_blob, list):
|
||||||
|
if len(cover_blob) == 0:
|
||||||
|
return None
|
||||||
cover_blob = cover_blob[0]
|
cover_blob = cover_blob[0]
|
||||||
cover_id = cover_blob.get("url")
|
cover_id = cover_blob.get("url")
|
||||||
if not cover_id:
|
if not isinstance(cover_id, str):
|
||||||
return None
|
return None
|
||||||
# cover may or may not be an absolute url already
|
# cover may or may not be an absolute url already
|
||||||
if re.match(r"^http", cover_id):
|
if re.match(r"^http", cover_id):
|
||||||
return cover_id
|
return cover_id
|
||||||
return f"{self.covers_url}{cover_id}"
|
return f"{self.covers_url}{cover_id}"
|
||||||
|
|
||||||
def resolve_keys(self, keys):
|
def resolve_keys(self, keys: Iterable[str]) -> list[str]:
|
||||||
"""cool, it's "wd:Q3156592" now what the heck does that mean"""
|
"""cool, it's "wd:Q3156592" now what the heck does that mean"""
|
||||||
results = []
|
results = []
|
||||||
for uri in keys:
|
for uri in keys:
|
||||||
|
@ -191,10 +216,10 @@ class Connector(AbstractConnector):
|
||||||
data = self.get_book_data(self.get_remote_id(uri))
|
data = self.get_book_data(self.get_remote_id(uri))
|
||||||
except ConnectorException:
|
except ConnectorException:
|
||||||
continue
|
continue
|
||||||
results.append(get_language_code(data.get("labels")))
|
results.append(get_language_code(data.get("labels", {})))
|
||||||
return results
|
return results
|
||||||
|
|
||||||
def get_description(self, links):
|
def get_description(self, links: JsonDict) -> str:
|
||||||
"""grab an extracted excerpt from wikipedia"""
|
"""grab an extracted excerpt from wikipedia"""
|
||||||
link = links.get("enwiki")
|
link = links.get("enwiki")
|
||||||
if not link:
|
if not link:
|
||||||
|
@ -204,15 +229,15 @@ class Connector(AbstractConnector):
|
||||||
data = get_data(url)
|
data = get_data(url)
|
||||||
except ConnectorException:
|
except ConnectorException:
|
||||||
return ""
|
return ""
|
||||||
return data.get("extract")
|
return str(data.get("extract", ""))
|
||||||
|
|
||||||
def get_remote_id_from_model(self, obj):
|
def get_remote_id_from_model(self, obj: models.BookDataModel) -> str:
|
||||||
"""use get_remote_id to figure out the link from a model obj"""
|
"""use get_remote_id to figure out the link from a model obj"""
|
||||||
remote_id_value = obj.inventaire_id
|
remote_id_value = obj.inventaire_id
|
||||||
return self.get_remote_id(remote_id_value)
|
return self.get_remote_id(remote_id_value)
|
||||||
|
|
||||||
|
|
||||||
def get_language_code(options, code="en"):
|
def get_language_code(options: JsonDict, code: str = "en") -> Any:
|
||||||
"""when there are a bunch of translation but we need a single field"""
|
"""when there are a bunch of translation but we need a single field"""
|
||||||
result = options.get(code)
|
result = options.get(code)
|
||||||
if result:
|
if result:
|
||||||
|
|
|
@ -1,9 +1,13 @@
|
||||||
""" openlibrary data connector """
|
""" openlibrary data connector """
|
||||||
import re
|
import re
|
||||||
|
from typing import Any, Optional, Union, Iterator, Iterable
|
||||||
|
|
||||||
|
from markdown import markdown
|
||||||
|
|
||||||
from bookwyrm import models
|
from bookwyrm import models
|
||||||
from bookwyrm.book_search import SearchResult
|
from bookwyrm.book_search import SearchResult
|
||||||
from .abstract_connector import AbstractConnector, Mapping
|
from bookwyrm.utils.sanitizer import clean
|
||||||
|
from .abstract_connector import AbstractConnector, Mapping, JsonDict
|
||||||
from .abstract_connector import get_data, infer_physical_format, unique_physical_format
|
from .abstract_connector import get_data, infer_physical_format, unique_physical_format
|
||||||
from .connector_manager import ConnectorException, create_edition_task
|
from .connector_manager import ConnectorException, create_edition_task
|
||||||
from .openlibrary_languages import languages
|
from .openlibrary_languages import languages
|
||||||
|
@ -14,7 +18,7 @@ class Connector(AbstractConnector):
|
||||||
|
|
||||||
generated_remote_link_field = "openlibrary_link"
|
generated_remote_link_field = "openlibrary_link"
|
||||||
|
|
||||||
def __init__(self, identifier):
|
def __init__(self, identifier: str):
|
||||||
super().__init__(identifier)
|
super().__init__(identifier)
|
||||||
|
|
||||||
get_first = lambda a, *args: a[0]
|
get_first = lambda a, *args: a[0]
|
||||||
|
@ -94,14 +98,14 @@ class Connector(AbstractConnector):
|
||||||
Mapping("inventaire_id", remote_field="links", formatter=get_inventaire_id),
|
Mapping("inventaire_id", remote_field="links", formatter=get_inventaire_id),
|
||||||
]
|
]
|
||||||
|
|
||||||
def get_book_data(self, remote_id):
|
def get_book_data(self, remote_id: str) -> JsonDict:
|
||||||
data = get_data(remote_id)
|
data = get_data(remote_id)
|
||||||
if data.get("type", {}).get("key") == "/type/redirect":
|
if data.get("type", {}).get("key") == "/type/redirect":
|
||||||
remote_id = self.base_url + data.get("location")
|
remote_id = self.base_url + data.get("location", "")
|
||||||
return get_data(remote_id)
|
return get_data(remote_id)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
def get_remote_id_from_data(self, data):
|
def get_remote_id_from_data(self, data: JsonDict) -> str:
|
||||||
"""format a url from an openlibrary id field"""
|
"""format a url from an openlibrary id field"""
|
||||||
try:
|
try:
|
||||||
key = data["key"]
|
key = data["key"]
|
||||||
|
@ -109,10 +113,10 @@ class Connector(AbstractConnector):
|
||||||
raise ConnectorException("Invalid book data")
|
raise ConnectorException("Invalid book data")
|
||||||
return f"{self.books_url}{key}"
|
return f"{self.books_url}{key}"
|
||||||
|
|
||||||
def is_work_data(self, data):
|
def is_work_data(self, data: JsonDict) -> bool:
|
||||||
return bool(re.match(r"^[\/\w]+OL\d+W$", data["key"]))
|
return bool(re.match(r"^[\/\w]+OL\d+W$", data["key"]))
|
||||||
|
|
||||||
def get_edition_from_work_data(self, data):
|
def get_edition_from_work_data(self, data: JsonDict) -> JsonDict:
|
||||||
try:
|
try:
|
||||||
key = data["key"]
|
key = data["key"]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
@ -124,7 +128,7 @@ class Connector(AbstractConnector):
|
||||||
raise ConnectorException("No editions for work")
|
raise ConnectorException("No editions for work")
|
||||||
return edition
|
return edition
|
||||||
|
|
||||||
def get_work_from_edition_data(self, data):
|
def get_work_from_edition_data(self, data: JsonDict) -> JsonDict:
|
||||||
try:
|
try:
|
||||||
key = data["works"][0]["key"]
|
key = data["works"][0]["key"]
|
||||||
except (IndexError, KeyError):
|
except (IndexError, KeyError):
|
||||||
|
@ -132,7 +136,7 @@ class Connector(AbstractConnector):
|
||||||
url = f"{self.books_url}{key}"
|
url = f"{self.books_url}{key}"
|
||||||
return self.get_book_data(url)
|
return self.get_book_data(url)
|
||||||
|
|
||||||
def get_authors_from_data(self, data):
|
def get_authors_from_data(self, data: JsonDict) -> Iterator[models.Author]:
|
||||||
"""parse author json and load or create authors"""
|
"""parse author json and load or create authors"""
|
||||||
for author_blob in data.get("authors", []):
|
for author_blob in data.get("authors", []):
|
||||||
author_blob = author_blob.get("author", author_blob)
|
author_blob = author_blob.get("author", author_blob)
|
||||||
|
@ -144,7 +148,7 @@ class Connector(AbstractConnector):
|
||||||
continue
|
continue
|
||||||
yield author
|
yield author
|
||||||
|
|
||||||
def get_cover_url(self, cover_blob, size="L"):
|
def get_cover_url(self, cover_blob: list[str], size: str = "L") -> Optional[str]:
|
||||||
"""ask openlibrary for the cover"""
|
"""ask openlibrary for the cover"""
|
||||||
if not cover_blob:
|
if not cover_blob:
|
||||||
return None
|
return None
|
||||||
|
@ -152,8 +156,10 @@ class Connector(AbstractConnector):
|
||||||
image_name = f"{cover_id}-{size}.jpg"
|
image_name = f"{cover_id}-{size}.jpg"
|
||||||
return f"{self.covers_url}/b/id/{image_name}"
|
return f"{self.covers_url}/b/id/{image_name}"
|
||||||
|
|
||||||
def parse_search_data(self, data, min_confidence):
|
def parse_search_data(
|
||||||
for idx, search_result in enumerate(data.get("docs")):
|
self, data: JsonDict, min_confidence: float
|
||||||
|
) -> Iterator[SearchResult]:
|
||||||
|
for idx, search_result in enumerate(data.get("docs", [])):
|
||||||
# build the remote id from the openlibrary key
|
# build the remote id from the openlibrary key
|
||||||
key = self.books_url + search_result["key"]
|
key = self.books_url + search_result["key"]
|
||||||
author = search_result.get("author_name") or ["Unknown"]
|
author = search_result.get("author_name") or ["Unknown"]
|
||||||
|
@ -174,7 +180,7 @@ class Connector(AbstractConnector):
|
||||||
confidence=confidence,
|
confidence=confidence,
|
||||||
)
|
)
|
||||||
|
|
||||||
def parse_isbn_search_data(self, data):
|
def parse_isbn_search_data(self, data: JsonDict) -> Iterator[SearchResult]:
|
||||||
for search_result in list(data.values()):
|
for search_result in list(data.values()):
|
||||||
# build the remote id from the openlibrary key
|
# build the remote id from the openlibrary key
|
||||||
key = self.books_url + search_result["key"]
|
key = self.books_url + search_result["key"]
|
||||||
|
@ -188,12 +194,12 @@ class Connector(AbstractConnector):
|
||||||
year=search_result.get("publish_date"),
|
year=search_result.get("publish_date"),
|
||||||
)
|
)
|
||||||
|
|
||||||
def load_edition_data(self, olkey):
|
def load_edition_data(self, olkey: str) -> JsonDict:
|
||||||
"""query openlibrary for editions of a work"""
|
"""query openlibrary for editions of a work"""
|
||||||
url = f"{self.books_url}/works/{olkey}/editions"
|
url = f"{self.books_url}/works/{olkey}/editions"
|
||||||
return self.get_book_data(url)
|
return self.get_book_data(url)
|
||||||
|
|
||||||
def expand_book_data(self, book):
|
def expand_book_data(self, book: models.Book) -> None:
|
||||||
work = book
|
work = book
|
||||||
# go from the edition to the work, if necessary
|
# go from the edition to the work, if necessary
|
||||||
if isinstance(book, models.Edition):
|
if isinstance(book, models.Edition):
|
||||||
|
@ -206,14 +212,14 @@ class Connector(AbstractConnector):
|
||||||
# who knows, man
|
# who knows, man
|
||||||
return
|
return
|
||||||
|
|
||||||
for edition_data in edition_options.get("entries"):
|
for edition_data in edition_options.get("entries", []):
|
||||||
# does this edition have ANY interesting data?
|
# does this edition have ANY interesting data?
|
||||||
if ignore_edition(edition_data):
|
if ignore_edition(edition_data):
|
||||||
continue
|
continue
|
||||||
create_edition_task.delay(self.connector.id, work.id, edition_data)
|
create_edition_task.delay(self.connector.id, work.id, edition_data)
|
||||||
|
|
||||||
|
|
||||||
def ignore_edition(edition_data):
|
def ignore_edition(edition_data: JsonDict) -> bool:
|
||||||
"""don't load a million editions that have no metadata"""
|
"""don't load a million editions that have no metadata"""
|
||||||
# an isbn, we love to see it
|
# an isbn, we love to see it
|
||||||
if edition_data.get("isbn_13") or edition_data.get("isbn_10"):
|
if edition_data.get("isbn_13") or edition_data.get("isbn_10"):
|
||||||
|
@ -232,19 +238,30 @@ def ignore_edition(edition_data):
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def get_description(description_blob):
|
def get_description(description_blob: Union[JsonDict, str]) -> str:
|
||||||
"""descriptions can be a string or a dict"""
|
"""descriptions can be a string or a dict"""
|
||||||
if isinstance(description_blob, dict):
|
if isinstance(description_blob, dict):
|
||||||
return description_blob.get("value")
|
description = markdown(description_blob.get("value", ""))
|
||||||
return description_blob
|
else:
|
||||||
|
description = markdown(description_blob)
|
||||||
|
|
||||||
|
if (
|
||||||
|
description.startswith("<p>")
|
||||||
|
and description.endswith("</p>")
|
||||||
|
and description.count("<p>") == 1
|
||||||
|
):
|
||||||
|
# If there is just one <p> tag and it is around the text remove it
|
||||||
|
return description[len("<p>") : -len("</p>")].strip()
|
||||||
|
|
||||||
|
return clean(description)
|
||||||
|
|
||||||
|
|
||||||
def get_openlibrary_key(key):
|
def get_openlibrary_key(key: str) -> str:
|
||||||
"""convert /books/OL27320736M into OL27320736M"""
|
"""convert /books/OL27320736M into OL27320736M"""
|
||||||
return key.split("/")[-1]
|
return key.split("/")[-1]
|
||||||
|
|
||||||
|
|
||||||
def get_languages(language_blob):
|
def get_languages(language_blob: Iterable[JsonDict]) -> list[Optional[str]]:
|
||||||
"""/language/eng -> English"""
|
"""/language/eng -> English"""
|
||||||
langs = []
|
langs = []
|
||||||
for lang in language_blob:
|
for lang in language_blob:
|
||||||
|
@ -252,14 +269,14 @@ def get_languages(language_blob):
|
||||||
return langs
|
return langs
|
||||||
|
|
||||||
|
|
||||||
def get_dict_field(blob, field_name):
|
def get_dict_field(blob: Optional[JsonDict], field_name: str) -> Optional[Any]:
|
||||||
"""extract the isni from the remote id data for the author"""
|
"""extract the isni from the remote id data for the author"""
|
||||||
if not blob or not isinstance(blob, dict):
|
if not blob or not isinstance(blob, dict):
|
||||||
return None
|
return None
|
||||||
return blob.get(field_name)
|
return blob.get(field_name)
|
||||||
|
|
||||||
|
|
||||||
def get_wikipedia_link(links):
|
def get_wikipedia_link(links: list[Any]) -> Optional[str]:
|
||||||
"""extract wikipedia links"""
|
"""extract wikipedia links"""
|
||||||
if not isinstance(links, list):
|
if not isinstance(links, list):
|
||||||
return None
|
return None
|
||||||
|
@ -272,7 +289,7 @@ def get_wikipedia_link(links):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def get_inventaire_id(links):
|
def get_inventaire_id(links: list[Any]) -> Optional[str]:
|
||||||
"""extract and format inventaire ids"""
|
"""extract and format inventaire ids"""
|
||||||
if not isinstance(links, list):
|
if not isinstance(links, list):
|
||||||
return None
|
return None
|
||||||
|
@ -282,11 +299,13 @@ def get_inventaire_id(links):
|
||||||
continue
|
continue
|
||||||
if link.get("title") == "inventaire.io":
|
if link.get("title") == "inventaire.io":
|
||||||
iv_link = link.get("url")
|
iv_link = link.get("url")
|
||||||
|
if not isinstance(iv_link, str):
|
||||||
|
return None
|
||||||
return iv_link.split("/")[-1]
|
return iv_link.split("/")[-1]
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
def pick_default_edition(options):
|
def pick_default_edition(options: list[JsonDict]) -> Optional[JsonDict]:
|
||||||
"""favor physical copies with covers in english"""
|
"""favor physical copies with covers in english"""
|
||||||
if not options:
|
if not options:
|
||||||
return None
|
return None
|
||||||
|
|
|
@ -3,8 +3,8 @@ from django.core.mail import EmailMultiAlternatives
|
||||||
from django.template.loader import get_template
|
from django.template.loader import get_template
|
||||||
|
|
||||||
from bookwyrm import models, settings
|
from bookwyrm import models, settings
|
||||||
from bookwyrm.tasks import app, HIGH
|
from bookwyrm.tasks import app, EMAIL
|
||||||
from bookwyrm.settings import DOMAIN
|
from bookwyrm.settings import DOMAIN, BASE_URL
|
||||||
|
|
||||||
|
|
||||||
def email_data():
|
def email_data():
|
||||||
|
@ -14,6 +14,7 @@ def email_data():
|
||||||
"site_name": site.name,
|
"site_name": site.name,
|
||||||
"logo": site.logo_small_url,
|
"logo": site.logo_small_url,
|
||||||
"domain": DOMAIN,
|
"domain": DOMAIN,
|
||||||
|
"base_url": BASE_URL,
|
||||||
"user": None,
|
"user": None,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -75,7 +76,7 @@ def format_email(email_name, data):
|
||||||
return (subject, html_content, text_content)
|
return (subject, html_content, text_content)
|
||||||
|
|
||||||
|
|
||||||
@app.task(queue=HIGH, ignore_result=True)
|
@app.task(queue=EMAIL)
|
||||||
def send_email(recipient, subject, html_content, text_content):
|
def send_email(recipient, subject, html_content, text_content):
|
||||||
"""use a task to send the email"""
|
"""use a task to send the email"""
|
||||||
email = EmailMultiAlternatives(
|
email = EmailMultiAlternatives(
|
||||||
|
|
|
@ -15,7 +15,7 @@ from .custom_form import CustomForm, StyledForm
|
||||||
# pylint: disable=missing-class-docstring
|
# pylint: disable=missing-class-docstring
|
||||||
class ExpiryWidget(widgets.Select):
|
class ExpiryWidget(widgets.Select):
|
||||||
def value_from_datadict(self, data, files, name):
|
def value_from_datadict(self, data, files, name):
|
||||||
"""human-readable exiration time buckets"""
|
"""human-readable expiration time buckets"""
|
||||||
selected_string = super().value_from_datadict(data, files, name)
|
selected_string = super().value_from_datadict(data, files, name)
|
||||||
|
|
||||||
if selected_string == "day":
|
if selected_string == "day":
|
||||||
|
|
|
@ -15,6 +15,7 @@ class AuthorForm(CustomForm):
|
||||||
"aliases",
|
"aliases",
|
||||||
"bio",
|
"bio",
|
||||||
"wikipedia_link",
|
"wikipedia_link",
|
||||||
|
"wikidata",
|
||||||
"website",
|
"website",
|
||||||
"born",
|
"born",
|
||||||
"died",
|
"died",
|
||||||
|
@ -32,6 +33,7 @@ class AuthorForm(CustomForm):
|
||||||
"wikipedia_link": forms.TextInput(
|
"wikipedia_link": forms.TextInput(
|
||||||
attrs={"aria-describedby": "desc_wikipedia_link"}
|
attrs={"aria-describedby": "desc_wikipedia_link"}
|
||||||
),
|
),
|
||||||
|
"wikidata": forms.TextInput(attrs={"aria-describedby": "desc_wikidata"}),
|
||||||
"website": forms.TextInput(attrs={"aria-describedby": "desc_website"}),
|
"website": forms.TextInput(attrs={"aria-describedby": "desc_website"}),
|
||||||
"born": forms.SelectDateWidget(attrs={"aria-describedby": "desc_born"}),
|
"born": forms.SelectDateWidget(attrs={"aria-describedby": "desc_born"}),
|
||||||
"died": forms.SelectDateWidget(attrs={"aria-describedby": "desc_died"}),
|
"died": forms.SelectDateWidget(attrs={"aria-describedby": "desc_died"}),
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
""" using django model forms """
|
""" using django model forms """
|
||||||
from django import forms
|
from django import forms
|
||||||
|
|
||||||
|
from file_resubmit.widgets import ResubmitImageWidget
|
||||||
|
|
||||||
from bookwyrm import models
|
from bookwyrm import models
|
||||||
from bookwyrm.models.fields import ClearableFileInputWithWarning
|
|
||||||
from .custom_form import CustomForm
|
from .custom_form import CustomForm
|
||||||
from .widgets import ArrayWidget, SelectDateWidget, Select
|
from .widgets import ArrayWidget, SelectDateWidget, Select
|
||||||
|
|
||||||
|
@ -20,6 +21,7 @@ class EditionForm(CustomForm):
|
||||||
model = models.Edition
|
model = models.Edition
|
||||||
fields = [
|
fields = [
|
||||||
"title",
|
"title",
|
||||||
|
"sort_title",
|
||||||
"subtitle",
|
"subtitle",
|
||||||
"description",
|
"description",
|
||||||
"series",
|
"series",
|
||||||
|
@ -45,6 +47,9 @@ class EditionForm(CustomForm):
|
||||||
]
|
]
|
||||||
widgets = {
|
widgets = {
|
||||||
"title": forms.TextInput(attrs={"aria-describedby": "desc_title"}),
|
"title": forms.TextInput(attrs={"aria-describedby": "desc_title"}),
|
||||||
|
"sort_title": forms.TextInput(
|
||||||
|
attrs={"aria-describedby": "desc_sort_title"}
|
||||||
|
),
|
||||||
"subtitle": forms.TextInput(attrs={"aria-describedby": "desc_subtitle"}),
|
"subtitle": forms.TextInput(attrs={"aria-describedby": "desc_subtitle"}),
|
||||||
"description": forms.Textarea(
|
"description": forms.Textarea(
|
||||||
attrs={"aria-describedby": "desc_description"}
|
attrs={"aria-describedby": "desc_description"}
|
||||||
|
@ -66,9 +71,7 @@ class EditionForm(CustomForm):
|
||||||
"published_date": SelectDateWidget(
|
"published_date": SelectDateWidget(
|
||||||
attrs={"aria-describedby": "desc_published_date"}
|
attrs={"aria-describedby": "desc_published_date"}
|
||||||
),
|
),
|
||||||
"cover": ClearableFileInputWithWarning(
|
"cover": ResubmitImageWidget(attrs={"aria-describedby": "desc_cover"}),
|
||||||
attrs={"aria-describedby": "desc_cover"}
|
|
||||||
),
|
|
||||||
"physical_format": Select(
|
"physical_format": Select(
|
||||||
attrs={"aria-describedby": "desc_physical_format"}
|
attrs={"aria-describedby": "desc_physical_format"}
|
||||||
),
|
),
|
||||||
|
@ -107,6 +110,7 @@ class EditionFromWorkForm(CustomForm):
|
||||||
model = models.Work
|
model = models.Work
|
||||||
fields = [
|
fields = [
|
||||||
"title",
|
"title",
|
||||||
|
"sort_title",
|
||||||
"subtitle",
|
"subtitle",
|
||||||
"authors",
|
"authors",
|
||||||
"description",
|
"description",
|
||||||
|
|
|
@ -70,6 +70,22 @@ class DeleteUserForm(CustomForm):
|
||||||
fields = ["password"]
|
fields = ["password"]
|
||||||
|
|
||||||
|
|
||||||
|
class MoveUserForm(CustomForm):
|
||||||
|
target = forms.CharField(widget=forms.TextInput)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = models.User
|
||||||
|
fields = ["password"]
|
||||||
|
|
||||||
|
|
||||||
|
class AliasUserForm(CustomForm):
|
||||||
|
username = forms.CharField(widget=forms.TextInput)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
model = models.User
|
||||||
|
fields = ["password"]
|
||||||
|
|
||||||
|
|
||||||
class ChangePasswordForm(CustomForm):
|
class ChangePasswordForm(CustomForm):
|
||||||
current_password = forms.CharField(widget=forms.PasswordInput)
|
current_password = forms.CharField(widget=forms.PasswordInput)
|
||||||
confirm_password = forms.CharField(widget=forms.PasswordInput)
|
confirm_password = forms.CharField(widget=forms.PasswordInput)
|
||||||
|
|
|
@ -25,6 +25,10 @@ class ImportForm(forms.Form):
|
||||||
csv_file = forms.FileField()
|
csv_file = forms.FileField()
|
||||||
|
|
||||||
|
|
||||||
|
class ImportUserForm(forms.Form):
|
||||||
|
archive_file = forms.FileField()
|
||||||
|
|
||||||
|
|
||||||
class ShelfForm(CustomForm):
|
class ShelfForm(CustomForm):
|
||||||
class Meta:
|
class Meta:
|
||||||
model = models.Shelf
|
model = models.Shelf
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
""" using django model forms """
|
""" using django model forms """
|
||||||
|
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
@ -25,7 +26,7 @@ class FileLinkForm(CustomForm):
|
||||||
url = cleaned_data.get("url")
|
url = cleaned_data.get("url")
|
||||||
filetype = cleaned_data.get("filetype")
|
filetype = cleaned_data.get("filetype")
|
||||||
book = cleaned_data.get("book")
|
book = cleaned_data.get("book")
|
||||||
domain = urlparse(url).netloc
|
domain = urlparse(url).hostname
|
||||||
if models.LinkDomain.objects.filter(domain=domain).exists():
|
if models.LinkDomain.objects.filter(domain=domain).exists():
|
||||||
status = models.LinkDomain.objects.get(domain=domain).status
|
status = models.LinkDomain.objects.get(domain=domain).status
|
||||||
if status == "blocked":
|
if status == "blocked":
|
||||||
|
@ -37,10 +38,9 @@ class FileLinkForm(CustomForm):
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
if (
|
if (
|
||||||
not self.instance
|
models.FileLink.objects.filter(url=url, book=book, filetype=filetype)
|
||||||
and models.FileLink.objects.filter(
|
.exclude(pk=self.instance)
|
||||||
url=url, book=book, filetype=filetype
|
.exists()
|
||||||
).exists()
|
|
||||||
):
|
):
|
||||||
# pylint: disable=line-too-long
|
# pylint: disable=line-too-long
|
||||||
self.add_error(
|
self.add_error(
|
||||||
|
|
|
@ -24,7 +24,7 @@ class SortListForm(forms.Form):
|
||||||
sort_by = ChoiceField(
|
sort_by = ChoiceField(
|
||||||
choices=(
|
choices=(
|
||||||
("order", _("List Order")),
|
("order", _("List Order")),
|
||||||
("title", _("Book Title")),
|
("sort_title", _("Book Title")),
|
||||||
("rating", _("Rating")),
|
("rating", _("Rating")),
|
||||||
),
|
),
|
||||||
label=_("Sort By"),
|
label=_("Sort By"),
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
""" import classes """
|
""" import classes """
|
||||||
|
|
||||||
from .importer import Importer
|
from .importer import Importer
|
||||||
|
from .bookwyrm_import import BookwyrmImporter
|
||||||
from .calibre_import import CalibreImporter
|
from .calibre_import import CalibreImporter
|
||||||
from .goodreads_import import GoodreadsImporter
|
from .goodreads_import import GoodreadsImporter
|
||||||
from .librarything_import import LibrarythingImporter
|
from .librarything_import import LibrarythingImporter
|
||||||
|
|
24
bookwyrm/importers/bookwyrm_import.py
Normal file
24
bookwyrm/importers/bookwyrm_import.py
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
"""Import data from Bookwyrm export files"""
|
||||||
|
from django.http import QueryDict
|
||||||
|
|
||||||
|
from bookwyrm.models import User
|
||||||
|
from bookwyrm.models.bookwyrm_import_job import BookwyrmImportJob
|
||||||
|
|
||||||
|
|
||||||
|
class BookwyrmImporter:
|
||||||
|
"""Import a Bookwyrm User export file.
|
||||||
|
This is kind of a combination of an importer and a connector.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# pylint: disable=no-self-use
|
||||||
|
def process_import(
|
||||||
|
self, user: User, archive_file: bytes, settings: QueryDict
|
||||||
|
) -> BookwyrmImportJob:
|
||||||
|
"""import user data from a Bookwyrm export file"""
|
||||||
|
|
||||||
|
required = [k for k in settings if settings.get(k) == "on"]
|
||||||
|
|
||||||
|
job = BookwyrmImportJob.objects.create(
|
||||||
|
user=user, archive_file=archive_file, required=required
|
||||||
|
)
|
||||||
|
return job
|
|
@ -1,4 +1,6 @@
|
||||||
""" handle reading a csv from calibre """
|
""" handle reading a csv from calibre """
|
||||||
|
from typing import Any, Optional
|
||||||
|
|
||||||
from bookwyrm.models import Shelf
|
from bookwyrm.models import Shelf
|
||||||
|
|
||||||
from . import Importer
|
from . import Importer
|
||||||
|
@ -9,20 +11,15 @@ class CalibreImporter(Importer):
|
||||||
|
|
||||||
service = "Calibre"
|
service = "Calibre"
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args: Any, **kwargs: Any):
|
||||||
# Add timestamp to row_mappings_guesses for date_added to avoid
|
# Add timestamp to row_mappings_guesses for date_added to avoid
|
||||||
# integrity error
|
# integrity error
|
||||||
row_mappings_guesses = []
|
self.row_mappings_guesses = [
|
||||||
|
(field, mapping + (["timestamp"] if field == "date_added" else []))
|
||||||
for field, mapping in self.row_mappings_guesses:
|
for field, mapping in self.row_mappings_guesses
|
||||||
if field in ("date_added",):
|
]
|
||||||
row_mappings_guesses.append((field, mapping + ["timestamp"]))
|
|
||||||
else:
|
|
||||||
row_mappings_guesses.append((field, mapping))
|
|
||||||
|
|
||||||
self.row_mappings_guesses = row_mappings_guesses
|
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
def get_shelf(self, normalized_row):
|
def get_shelf(self, normalized_row: dict[str, Optional[str]]) -> Optional[str]:
|
||||||
# Calibre export does not indicate which shelf to use. Use a default one for now
|
# Calibre export does not indicate which shelf to use. Use a default one for now
|
||||||
return Shelf.TO_READ
|
return Shelf.TO_READ
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
""" handle reading a csv from an external service, defaults are from Goodreads """
|
""" handle reading a csv from an external service, defaults are from Goodreads """
|
||||||
import csv
|
import csv
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
from typing import Iterable, Optional
|
||||||
|
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from bookwyrm.models import ImportJob, ImportItem, SiteSettings
|
from bookwyrm.models import ImportJob, ImportItem, SiteSettings, User
|
||||||
|
|
||||||
|
|
||||||
class Importer:
|
class Importer:
|
||||||
|
@ -35,19 +37,26 @@ class Importer:
|
||||||
}
|
}
|
||||||
|
|
||||||
# pylint: disable=too-many-locals
|
# pylint: disable=too-many-locals
|
||||||
def create_job(self, user, csv_file, include_reviews, privacy):
|
def create_job(
|
||||||
|
self, user: User, csv_file: Iterable[str], include_reviews: bool, privacy: str
|
||||||
|
) -> ImportJob:
|
||||||
"""check over a csv and creates a database entry for the job"""
|
"""check over a csv and creates a database entry for the job"""
|
||||||
csv_reader = csv.DictReader(csv_file, delimiter=self.delimiter)
|
csv_reader = csv.DictReader(csv_file, delimiter=self.delimiter)
|
||||||
rows = list(csv_reader)
|
rows = list(csv_reader)
|
||||||
if len(rows) < 1:
|
if len(rows) < 1:
|
||||||
raise ValueError("CSV file is empty")
|
raise ValueError("CSV file is empty")
|
||||||
rows = enumerate(rows)
|
|
||||||
|
mappings = (
|
||||||
|
self.create_row_mappings(list(fieldnames))
|
||||||
|
if (fieldnames := csv_reader.fieldnames)
|
||||||
|
else {}
|
||||||
|
)
|
||||||
|
|
||||||
job = ImportJob.objects.create(
|
job = ImportJob.objects.create(
|
||||||
user=user,
|
user=user,
|
||||||
include_reviews=include_reviews,
|
include_reviews=include_reviews,
|
||||||
privacy=privacy,
|
privacy=privacy,
|
||||||
mappings=self.create_row_mappings(csv_reader.fieldnames),
|
mappings=mappings,
|
||||||
source=self.service,
|
source=self.service,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -55,16 +64,20 @@ class Importer:
|
||||||
if enforce_limit and allowed_imports <= 0:
|
if enforce_limit and allowed_imports <= 0:
|
||||||
job.complete_job()
|
job.complete_job()
|
||||||
return job
|
return job
|
||||||
for index, entry in rows:
|
for index, entry in enumerate(rows):
|
||||||
if enforce_limit and index >= allowed_imports:
|
if enforce_limit and index >= allowed_imports:
|
||||||
break
|
break
|
||||||
self.create_item(job, index, entry)
|
self.create_item(job, index, entry)
|
||||||
return job
|
return job
|
||||||
|
|
||||||
def update_legacy_job(self, job):
|
def update_legacy_job(self, job: ImportJob) -> None:
|
||||||
"""patch up a job that was in the old format"""
|
"""patch up a job that was in the old format"""
|
||||||
items = job.items
|
items = job.items
|
||||||
headers = list(items.first().data.keys())
|
first_item = items.first()
|
||||||
|
if first_item is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
headers = list(first_item.data.keys())
|
||||||
job.mappings = self.create_row_mappings(headers)
|
job.mappings = self.create_row_mappings(headers)
|
||||||
job.updated_date = timezone.now()
|
job.updated_date = timezone.now()
|
||||||
job.save()
|
job.save()
|
||||||
|
@ -75,24 +88,24 @@ class Importer:
|
||||||
item.normalized_data = normalized
|
item.normalized_data = normalized
|
||||||
item.save()
|
item.save()
|
||||||
|
|
||||||
def create_row_mappings(self, headers):
|
def create_row_mappings(self, headers: list[str]) -> dict[str, Optional[str]]:
|
||||||
"""guess what the headers mean"""
|
"""guess what the headers mean"""
|
||||||
mappings = {}
|
mappings = {}
|
||||||
for (key, guesses) in self.row_mappings_guesses:
|
for (key, guesses) in self.row_mappings_guesses:
|
||||||
value = [h for h in headers if h.lower() in guesses]
|
values = [h for h in headers if h.lower() in guesses]
|
||||||
value = value[0] if len(value) else None
|
value = values[0] if len(values) else None
|
||||||
if value:
|
if value:
|
||||||
headers.remove(value)
|
headers.remove(value)
|
||||||
mappings[key] = value
|
mappings[key] = value
|
||||||
return mappings
|
return mappings
|
||||||
|
|
||||||
def create_item(self, job, index, data):
|
def create_item(self, job: ImportJob, index: int, data: dict[str, str]) -> None:
|
||||||
"""creates and saves an import item"""
|
"""creates and saves an import item"""
|
||||||
normalized = self.normalize_row(data, job.mappings)
|
normalized = self.normalize_row(data, job.mappings)
|
||||||
normalized["shelf"] = self.get_shelf(normalized)
|
normalized["shelf"] = self.get_shelf(normalized)
|
||||||
ImportItem(job=job, index=index, data=data, normalized_data=normalized).save()
|
ImportItem(job=job, index=index, data=data, normalized_data=normalized).save()
|
||||||
|
|
||||||
def get_shelf(self, normalized_row):
|
def get_shelf(self, normalized_row: dict[str, Optional[str]]) -> Optional[str]:
|
||||||
"""determine which shelf to use"""
|
"""determine which shelf to use"""
|
||||||
shelf_name = normalized_row.get("shelf")
|
shelf_name = normalized_row.get("shelf")
|
||||||
if not shelf_name:
|
if not shelf_name:
|
||||||
|
@ -103,11 +116,15 @@ class Importer:
|
||||||
]
|
]
|
||||||
return shelf[0] if shelf else None
|
return shelf[0] if shelf else None
|
||||||
|
|
||||||
def normalize_row(self, entry, mappings): # pylint: disable=no-self-use
|
# pylint: disable=no-self-use
|
||||||
|
def normalize_row(
|
||||||
|
self, entry: dict[str, str], mappings: dict[str, Optional[str]]
|
||||||
|
) -> dict[str, Optional[str]]:
|
||||||
"""use the dataclass to create the formatted row of data"""
|
"""use the dataclass to create the formatted row of data"""
|
||||||
return {k: entry.get(v) for k, v in mappings.items()}
|
return {k: entry.get(v) if v else None for k, v in mappings.items()}
|
||||||
|
|
||||||
def get_import_limit(self, user): # pylint: disable=no-self-use
|
# pylint: disable=no-self-use
|
||||||
|
def get_import_limit(self, user: User) -> tuple[int, int]:
|
||||||
"""check if import limit is set and return how many imports are left"""
|
"""check if import limit is set and return how many imports are left"""
|
||||||
site_settings = SiteSettings.objects.get()
|
site_settings = SiteSettings.objects.get()
|
||||||
import_size_limit = site_settings.import_size_limit
|
import_size_limit = site_settings.import_size_limit
|
||||||
|
@ -125,7 +142,9 @@ class Importer:
|
||||||
allowed_imports = import_size_limit - imported_books
|
allowed_imports = import_size_limit - imported_books
|
||||||
return enforce_limit, allowed_imports
|
return enforce_limit, allowed_imports
|
||||||
|
|
||||||
def create_retry_job(self, user, original_job, items):
|
def create_retry_job(
|
||||||
|
self, user: User, original_job: ImportJob, items: list[ImportItem]
|
||||||
|
) -> ImportJob:
|
||||||
"""retry items that didn't import"""
|
"""retry items that didn't import"""
|
||||||
job = ImportJob.objects.create(
|
job = ImportJob.objects.create(
|
||||||
user=user,
|
user=user,
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
""" handle reading a tsv from librarything """
|
""" handle reading a tsv from librarything """
|
||||||
import re
|
import re
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
from bookwyrm.models import Shelf
|
from bookwyrm.models import Shelf
|
||||||
|
|
||||||
from . import Importer
|
from . import Importer
|
||||||
|
|
||||||
|
|
||||||
|
def _remove_brackets(value: Optional[str]) -> Optional[str]:
|
||||||
|
return re.sub(r"\[|\]", "", value) if value else None
|
||||||
|
|
||||||
|
|
||||||
class LibrarythingImporter(Importer):
|
class LibrarythingImporter(Importer):
|
||||||
"""csv downloads from librarything"""
|
"""csv downloads from librarything"""
|
||||||
|
|
||||||
|
@ -13,16 +18,19 @@ class LibrarythingImporter(Importer):
|
||||||
delimiter = "\t"
|
delimiter = "\t"
|
||||||
encoding = "ISO-8859-1"
|
encoding = "ISO-8859-1"
|
||||||
|
|
||||||
def normalize_row(self, entry, mappings): # pylint: disable=no-self-use
|
def normalize_row(
|
||||||
|
self, entry: dict[str, str], mappings: dict[str, Optional[str]]
|
||||||
|
) -> dict[str, Optional[str]]: # pylint: disable=no-self-use
|
||||||
"""use the dataclass to create the formatted row of data"""
|
"""use the dataclass to create the formatted row of data"""
|
||||||
remove_brackets = lambda v: re.sub(r"\[|\]", "", v) if v else None
|
normalized = {
|
||||||
normalized = {k: remove_brackets(entry.get(v)) for k, v in mappings.items()}
|
k: _remove_brackets(entry.get(v) if v else None)
|
||||||
isbn_13 = normalized.get("isbn_13")
|
for k, v in mappings.items()
|
||||||
isbn_13 = isbn_13.split(", ") if isbn_13 else []
|
}
|
||||||
normalized["isbn_13"] = isbn_13[1] if len(isbn_13) > 0 else None
|
isbn_13 = value.split(", ") if (value := normalized.get("isbn_13")) else []
|
||||||
|
normalized["isbn_13"] = isbn_13[1] if len(isbn_13) > 1 else None
|
||||||
return normalized
|
return normalized
|
||||||
|
|
||||||
def get_shelf(self, normalized_row):
|
def get_shelf(self, normalized_row: dict[str, Optional[str]]) -> Optional[str]:
|
||||||
if normalized_row["date_finished"]:
|
if normalized_row["date_finished"]:
|
||||||
return Shelf.READ_FINISHED
|
return Shelf.READ_FINISHED
|
||||||
if normalized_row["date_started"]:
|
if normalized_row["date_started"]:
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
""" handle reading a csv from openlibrary"""
|
""" handle reading a csv from openlibrary"""
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from . import Importer
|
from . import Importer
|
||||||
|
|
||||||
|
|
||||||
|
@ -7,7 +9,7 @@ class OpenLibraryImporter(Importer):
|
||||||
|
|
||||||
service = "OpenLibrary"
|
service = "OpenLibrary"
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args: Any, **kwargs: Any):
|
||||||
self.row_mappings_guesses.append(("openlibrary_key", ["edition id"]))
|
self.row_mappings_guesses.append(("openlibrary_key", ["edition id"]))
|
||||||
self.row_mappings_guesses.append(("openlibrary_work_key", ["work id"]))
|
self.row_mappings_guesses.append(("openlibrary_work_key", ["work id"]))
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
7904
bookwyrm/isbn/RangeMessage.xml
Normal file
7904
bookwyrm/isbn/RangeMessage.xml
Normal file
File diff suppressed because it is too large
Load diff
0
bookwyrm/isbn/__init__.py
Normal file
0
bookwyrm/isbn/__init__.py
Normal file
128
bookwyrm/isbn/isbn.py
Normal file
128
bookwyrm/isbn/isbn.py
Normal file
|
@ -0,0 +1,128 @@
|
||||||
|
""" Use the range message from isbn-international to hyphenate ISBNs """
|
||||||
|
import os
|
||||||
|
from typing import Optional
|
||||||
|
from xml.etree import ElementTree
|
||||||
|
from xml.etree.ElementTree import Element
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from bookwyrm import settings
|
||||||
|
|
||||||
|
|
||||||
|
def _get_rules(element: Element) -> list[Element]:
|
||||||
|
if (rules_el := element.find("Rules")) is not None:
|
||||||
|
return rules_el.findall("Rule")
|
||||||
|
return []
|
||||||
|
|
||||||
|
|
||||||
|
class IsbnHyphenator:
|
||||||
|
"""Class to manage the range message xml file and use it to hyphenate ISBNs"""
|
||||||
|
|
||||||
|
__range_message_url = "https://www.isbn-international.org/export_rangemessage.xml"
|
||||||
|
__range_file_path = os.path.join(
|
||||||
|
settings.BASE_DIR, "bookwyrm", "isbn", "RangeMessage.xml"
|
||||||
|
)
|
||||||
|
__element_tree = None
|
||||||
|
|
||||||
|
def update_range_message(self) -> None:
|
||||||
|
"""Download the range message xml file and save it locally"""
|
||||||
|
response = requests.get(self.__range_message_url, timeout=15)
|
||||||
|
with open(self.__range_file_path, "w", encoding="utf-8") as file:
|
||||||
|
file.write(response.text)
|
||||||
|
self.__element_tree = None
|
||||||
|
|
||||||
|
def hyphenate(self, isbn_13: Optional[str]) -> Optional[str]:
|
||||||
|
"""hyphenate the given ISBN-13 number using the range message"""
|
||||||
|
if isbn_13 is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if self.__element_tree is None:
|
||||||
|
self.__element_tree = ElementTree.parse(self.__range_file_path)
|
||||||
|
|
||||||
|
gs1_prefix = isbn_13[:3]
|
||||||
|
try:
|
||||||
|
reg_group = self.__find_reg_group(isbn_13, gs1_prefix)
|
||||||
|
except ValueError:
|
||||||
|
# if the reg groups are invalid, just return the original isbn
|
||||||
|
return isbn_13
|
||||||
|
|
||||||
|
if reg_group is None:
|
||||||
|
return isbn_13 # failed to hyphenate
|
||||||
|
|
||||||
|
registrant = self.__find_registrant(isbn_13, gs1_prefix, reg_group)
|
||||||
|
if registrant is None:
|
||||||
|
return isbn_13 # failed to hyphenate
|
||||||
|
|
||||||
|
publication = isbn_13[len(gs1_prefix) + len(reg_group) + len(registrant) : -1]
|
||||||
|
check_digit = isbn_13[-1:]
|
||||||
|
return "-".join((gs1_prefix, reg_group, registrant, publication, check_digit))
|
||||||
|
|
||||||
|
def __find_reg_group(self, isbn_13: str, gs1_prefix: str) -> Optional[str]:
|
||||||
|
if self.__element_tree is None:
|
||||||
|
self.__element_tree = ElementTree.parse(self.__range_file_path)
|
||||||
|
|
||||||
|
ucc_prefixes_el = self.__element_tree.find("EAN.UCCPrefixes")
|
||||||
|
if ucc_prefixes_el is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
for ean_ucc_el in ucc_prefixes_el.findall("EAN.UCC"):
|
||||||
|
if (
|
||||||
|
prefix_el := ean_ucc_el.find("Prefix")
|
||||||
|
) is not None and prefix_el.text == gs1_prefix:
|
||||||
|
for rule_el in _get_rules(ean_ucc_el):
|
||||||
|
length_el = rule_el.find("Length")
|
||||||
|
if length_el is None:
|
||||||
|
continue
|
||||||
|
length = int(text) if (text := length_el.text) else 0
|
||||||
|
if length == 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
range_el = rule_el.find("Range")
|
||||||
|
if range_el is None or range_el.text is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
reg_grp_range = [int(x[:length]) for x in range_el.text.split("-")]
|
||||||
|
reg_group = isbn_13[len(gs1_prefix) : len(gs1_prefix) + length]
|
||||||
|
if reg_grp_range[0] <= int(reg_group) <= reg_grp_range[1]:
|
||||||
|
return reg_group
|
||||||
|
return None
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __find_registrant(
|
||||||
|
self, isbn_13: str, gs1_prefix: str, reg_group: str
|
||||||
|
) -> Optional[str]:
|
||||||
|
from_ind = len(gs1_prefix) + len(reg_group)
|
||||||
|
|
||||||
|
if self.__element_tree is None:
|
||||||
|
self.__element_tree = ElementTree.parse(self.__range_file_path)
|
||||||
|
|
||||||
|
reg_groups_el = self.__element_tree.find("RegistrationGroups")
|
||||||
|
if reg_groups_el is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
for group_el in reg_groups_el.findall("Group"):
|
||||||
|
if (
|
||||||
|
prefix_el := group_el.find("Prefix")
|
||||||
|
) is not None and prefix_el.text == "-".join((gs1_prefix, reg_group)):
|
||||||
|
for rule_el in _get_rules(group_el):
|
||||||
|
length_el = rule_el.find("Length")
|
||||||
|
if length_el is None:
|
||||||
|
continue
|
||||||
|
length = int(text) if (text := length_el.text) else 0
|
||||||
|
if length == 0:
|
||||||
|
continue
|
||||||
|
|
||||||
|
range_el = rule_el.find("Range")
|
||||||
|
if range_el is None or range_el.text is None:
|
||||||
|
continue
|
||||||
|
registrant_range = [
|
||||||
|
int(x[:length]) for x in range_el.text.split("-")
|
||||||
|
]
|
||||||
|
registrant = isbn_13[from_ind : from_ind + length]
|
||||||
|
if registrant_range[0] <= int(registrant) <= registrant_range[1]:
|
||||||
|
return registrant
|
||||||
|
return None
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
hyphenator_singleton = IsbnHyphenator()
|
|
@ -5,7 +5,7 @@ from django.db.models import signals, Count, Q
|
||||||
|
|
||||||
from bookwyrm import models
|
from bookwyrm import models
|
||||||
from bookwyrm.redis_store import RedisStore
|
from bookwyrm.redis_store import RedisStore
|
||||||
from bookwyrm.tasks import app, MEDIUM, HIGH
|
from bookwyrm.tasks import app, LISTS
|
||||||
|
|
||||||
|
|
||||||
class ListsStream(RedisStore):
|
class ListsStream(RedisStore):
|
||||||
|
@ -24,8 +24,7 @@ class ListsStream(RedisStore):
|
||||||
|
|
||||||
def add_list(self, book_list):
|
def add_list(self, book_list):
|
||||||
"""add a list to users' feeds"""
|
"""add a list to users' feeds"""
|
||||||
# the pipeline contains all the add-to-stream activities
|
self.add_object_to_stores(book_list, self.get_stores_for_object(book_list))
|
||||||
self.add_object_to_related_stores(book_list)
|
|
||||||
|
|
||||||
def add_user_lists(self, viewer, user):
|
def add_user_lists(self, viewer, user):
|
||||||
"""add a user's lists to another user's feed"""
|
"""add a user's lists to another user's feed"""
|
||||||
|
@ -86,18 +85,19 @@ class ListsStream(RedisStore):
|
||||||
if group:
|
if group:
|
||||||
audience = audience.filter(
|
audience = audience.filter(
|
||||||
Q(id=book_list.user.id) # if the user is the list's owner
|
Q(id=book_list.user.id) # if the user is the list's owner
|
||||||
| Q(following=book_list.user) # if the user is following the pwmer
|
| Q(following=book_list.user) # if the user is following the owner
|
||||||
# if a user is in the group
|
# if a user is in the group
|
||||||
| Q(memberships__group__id=book_list.group.id)
|
| Q(memberships__group__id=book_list.group.id)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
audience = audience.filter(
|
audience = audience.filter(
|
||||||
Q(id=book_list.user.id) # if the user is the list's owner
|
Q(id=book_list.user.id) # if the user is the list's owner
|
||||||
| Q(following=book_list.user) # if the user is following the pwmer
|
| Q(following=book_list.user) # if the user is following the owner
|
||||||
)
|
)
|
||||||
return audience.distinct()
|
return audience.distinct()
|
||||||
|
|
||||||
def get_stores_for_object(self, obj):
|
def get_stores_for_object(self, obj):
|
||||||
|
"""the stores that an object belongs in"""
|
||||||
return [self.stream_id(u) for u in self.get_audience(obj)]
|
return [self.stream_id(u) for u in self.get_audience(obj)]
|
||||||
|
|
||||||
def get_lists_for_user(self, user): # pylint: disable=no-self-use
|
def get_lists_for_user(self, user): # pylint: disable=no-self-use
|
||||||
|
@ -217,14 +217,14 @@ def add_list_on_account_create_command(user_id):
|
||||||
|
|
||||||
|
|
||||||
# ---- TASKS
|
# ---- TASKS
|
||||||
@app.task(queue=MEDIUM, ignore_result=True)
|
@app.task(queue=LISTS)
|
||||||
def populate_lists_task(user_id):
|
def populate_lists_task(user_id):
|
||||||
"""background task for populating an empty list stream"""
|
"""background task for populating an empty list stream"""
|
||||||
user = models.User.objects.get(id=user_id)
|
user = models.User.objects.get(id=user_id)
|
||||||
ListsStream().populate_lists(user)
|
ListsStream().populate_lists(user)
|
||||||
|
|
||||||
|
|
||||||
@app.task(queue=MEDIUM, ignore_result=True)
|
@app.task(queue=LISTS)
|
||||||
def remove_list_task(list_id, re_add=False):
|
def remove_list_task(list_id, re_add=False):
|
||||||
"""remove a list from any stream it might be in"""
|
"""remove a list from any stream it might be in"""
|
||||||
stores = models.User.objects.filter(local=True, is_active=True).values_list(
|
stores = models.User.objects.filter(local=True, is_active=True).values_list(
|
||||||
|
@ -233,20 +233,20 @@ def remove_list_task(list_id, re_add=False):
|
||||||
|
|
||||||
# delete for every store
|
# delete for every store
|
||||||
stores = [ListsStream().stream_id(idx) for idx in stores]
|
stores = [ListsStream().stream_id(idx) for idx in stores]
|
||||||
ListsStream().remove_object_from_related_stores(list_id, stores=stores)
|
ListsStream().remove_object_from_stores(list_id, stores)
|
||||||
|
|
||||||
if re_add:
|
if re_add:
|
||||||
add_list_task.delay(list_id)
|
add_list_task.delay(list_id)
|
||||||
|
|
||||||
|
|
||||||
@app.task(queue=HIGH, ignore_result=True)
|
@app.task(queue=LISTS)
|
||||||
def add_list_task(list_id):
|
def add_list_task(list_id):
|
||||||
"""add a list to any stream it should be in"""
|
"""add a list to any stream it should be in"""
|
||||||
book_list = models.List.objects.get(id=list_id)
|
book_list = models.List.objects.get(id=list_id)
|
||||||
ListsStream().add_list(book_list)
|
ListsStream().add_list(book_list)
|
||||||
|
|
||||||
|
|
||||||
@app.task(queue=MEDIUM, ignore_result=True)
|
@app.task(queue=LISTS)
|
||||||
def remove_user_lists_task(viewer_id, user_id, exclude_privacy=None):
|
def remove_user_lists_task(viewer_id, user_id, exclude_privacy=None):
|
||||||
"""remove all lists by a user from a viewer's stream"""
|
"""remove all lists by a user from a viewer's stream"""
|
||||||
viewer = models.User.objects.get(id=viewer_id)
|
viewer = models.User.objects.get(id=viewer_id)
|
||||||
|
@ -254,7 +254,7 @@ def remove_user_lists_task(viewer_id, user_id, exclude_privacy=None):
|
||||||
ListsStream().remove_user_lists(viewer, user, exclude_privacy=exclude_privacy)
|
ListsStream().remove_user_lists(viewer, user, exclude_privacy=exclude_privacy)
|
||||||
|
|
||||||
|
|
||||||
@app.task(queue=MEDIUM, ignore_result=True)
|
@app.task(queue=LISTS)
|
||||||
def add_user_lists_task(viewer_id, user_id):
|
def add_user_lists_task(viewer_id, user_id):
|
||||||
"""add all lists by a user to a viewer's stream"""
|
"""add all lists by a user to a viewer's stream"""
|
||||||
viewer = models.User.objects.get(id=viewer_id)
|
viewer = models.User.objects.get(id=viewer_id)
|
||||||
|
|
|
@ -1,44 +1,14 @@
|
||||||
""" PROCEED WITH CAUTION: uses deduplication fields to permanently
|
""" PROCEED WITH CAUTION: uses deduplication fields to permanently
|
||||||
merge book data objects """
|
merge book data objects """
|
||||||
|
|
||||||
from django.core.management.base import BaseCommand
|
from django.core.management.base import BaseCommand
|
||||||
from django.db.models import Count
|
from django.db.models import Count
|
||||||
from bookwyrm import models
|
from bookwyrm import models
|
||||||
|
|
||||||
|
|
||||||
def update_related(canonical, obj):
|
def dedupe_model(model, dry_run=False):
|
||||||
"""update all the models with fk to the object being removed"""
|
|
||||||
# move related models to canonical
|
|
||||||
related_models = [
|
|
||||||
(r.remote_field.name, r.related_model) for r in canonical._meta.related_objects
|
|
||||||
]
|
|
||||||
for (related_field, related_model) in related_models:
|
|
||||||
related_objs = related_model.objects.filter(**{related_field: obj})
|
|
||||||
for related_obj in related_objs:
|
|
||||||
print("replacing in", related_model.__name__, related_field, related_obj.id)
|
|
||||||
try:
|
|
||||||
setattr(related_obj, related_field, canonical)
|
|
||||||
related_obj.save()
|
|
||||||
except TypeError:
|
|
||||||
getattr(related_obj, related_field).add(canonical)
|
|
||||||
getattr(related_obj, related_field).remove(obj)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_data(canonical, obj):
|
|
||||||
"""try to get the most data possible"""
|
|
||||||
for data_field in obj._meta.get_fields():
|
|
||||||
if not hasattr(data_field, "activitypub_field"):
|
|
||||||
continue
|
|
||||||
data_value = getattr(obj, data_field.name)
|
|
||||||
if not data_value:
|
|
||||||
continue
|
|
||||||
if not getattr(canonical, data_field.name):
|
|
||||||
print("setting data field", data_field.name, data_value)
|
|
||||||
setattr(canonical, data_field.name, data_value)
|
|
||||||
canonical.save()
|
|
||||||
|
|
||||||
|
|
||||||
def dedupe_model(model):
|
|
||||||
"""combine duplicate editions and update related models"""
|
"""combine duplicate editions and update related models"""
|
||||||
|
print(f"deduplicating {model.__name__}:")
|
||||||
fields = model._meta.get_fields()
|
fields = model._meta.get_fields()
|
||||||
dedupe_fields = [
|
dedupe_fields = [
|
||||||
f for f in fields if hasattr(f, "deduplication_field") and f.deduplication_field
|
f for f in fields if hasattr(f, "deduplication_field") and f.deduplication_field
|
||||||
|
@ -47,33 +17,42 @@ def dedupe_model(model):
|
||||||
dupes = (
|
dupes = (
|
||||||
model.objects.values(field.name)
|
model.objects.values(field.name)
|
||||||
.annotate(Count(field.name))
|
.annotate(Count(field.name))
|
||||||
.filter(**{"%s__count__gt" % field.name: 1})
|
.filter(**{f"{field.name}__count__gt": 1})
|
||||||
|
.exclude(**{field.name: ""})
|
||||||
|
.exclude(**{f"{field.name}__isnull": True})
|
||||||
)
|
)
|
||||||
|
|
||||||
for dupe in dupes:
|
for dupe in dupes:
|
||||||
value = dupe[field.name]
|
value = dupe[field.name]
|
||||||
if not value or value == "":
|
|
||||||
continue
|
|
||||||
print("----------")
|
print("----------")
|
||||||
print(dupe)
|
|
||||||
objs = model.objects.filter(**{field.name: value}).order_by("id")
|
objs = model.objects.filter(**{field.name: value}).order_by("id")
|
||||||
canonical = objs.first()
|
canonical = objs.first()
|
||||||
print("keeping", canonical.remote_id)
|
action = "would merge" if dry_run else "merging"
|
||||||
|
print(
|
||||||
|
f"{action} into {model.__name__} {canonical.remote_id} based on {field.name} {value}:"
|
||||||
|
)
|
||||||
for obj in objs[1:]:
|
for obj in objs[1:]:
|
||||||
print(obj.remote_id)
|
print(f"- {obj.remote_id}")
|
||||||
copy_data(canonical, obj)
|
absorbed_fields = obj.merge_into(canonical, dry_run=dry_run)
|
||||||
update_related(canonical, obj)
|
print(f" absorbed fields: {absorbed_fields}")
|
||||||
# remove the outdated entry
|
|
||||||
obj.delete()
|
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
"""dedplucate allllll the book data models"""
|
"""deduplicate allllll the book data models"""
|
||||||
|
|
||||||
help = "merges duplicate book data"
|
help = "merges duplicate book data"
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
"""add the arguments for this command"""
|
||||||
|
parser.add_argument(
|
||||||
|
"--dry_run",
|
||||||
|
action="store_true",
|
||||||
|
help="don't actually merge, only print what would happen",
|
||||||
|
)
|
||||||
|
|
||||||
# pylint: disable=no-self-use,unused-argument
|
# pylint: disable=no-self-use,unused-argument
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
"""run deudplications"""
|
"""run deduplications"""
|
||||||
dedupe_model(models.Edition)
|
dedupe_model(models.Edition, dry_run=options["dry_run"])
|
||||||
dedupe_model(models.Work)
|
dedupe_model(models.Work, dry_run=options["dry_run"])
|
||||||
dedupe_model(models.Author)
|
dedupe_model(models.Author, dry_run=options["dry_run"])
|
||||||
|
|
43
bookwyrm/management/commands/erase_deleted_user_data.py
Normal file
43
bookwyrm/management/commands/erase_deleted_user_data.py
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
""" Erase any data stored about deleted users """
|
||||||
|
import sys
|
||||||
|
from django.core.management.base import BaseCommand, CommandError
|
||||||
|
from bookwyrm import models
|
||||||
|
from bookwyrm.models.user import erase_user_data
|
||||||
|
|
||||||
|
# pylint: disable=missing-function-docstring
|
||||||
|
class Command(BaseCommand):
|
||||||
|
"""command-line options"""
|
||||||
|
|
||||||
|
help = "Remove Two Factor Authorisation from user"
|
||||||
|
|
||||||
|
def add_arguments(self, parser): # pylint: disable=no-self-use
|
||||||
|
parser.add_argument(
|
||||||
|
"--dryrun",
|
||||||
|
action="store_true",
|
||||||
|
help="Preview users to be cleared without altering the database",
|
||||||
|
)
|
||||||
|
|
||||||
|
def handle(self, *args, **options): # pylint: disable=unused-argument
|
||||||
|
|
||||||
|
# Check for anything fishy
|
||||||
|
bad_state = models.User.objects.filter(is_deleted=True, is_active=True)
|
||||||
|
if bad_state.exists():
|
||||||
|
raise CommandError(
|
||||||
|
f"{bad_state.count()} user(s) marked as both active and deleted"
|
||||||
|
)
|
||||||
|
|
||||||
|
deleted_users = models.User.objects.filter(is_deleted=True)
|
||||||
|
self.stdout.write(f"Found {deleted_users.count()} deleted users")
|
||||||
|
if options["dryrun"]:
|
||||||
|
self.stdout.write("\n".join(u.username for u in deleted_users[:5]))
|
||||||
|
if deleted_users.count() > 5:
|
||||||
|
self.stdout.write("... and more")
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
self.stdout.write("Erasing user data:")
|
||||||
|
for user_id in deleted_users.values_list("id", flat=True):
|
||||||
|
erase_user_data.delay(user_id)
|
||||||
|
self.stdout.write(".", ending="")
|
||||||
|
|
||||||
|
self.stdout.write("")
|
||||||
|
self.stdout.write("Tasks created successfully")
|
|
@ -1,54 +0,0 @@
|
||||||
""" Get your admin code to allow install """
|
|
||||||
from django.core.management.base import BaseCommand
|
|
||||||
|
|
||||||
from bookwyrm import models
|
|
||||||
from bookwyrm.settings import VERSION
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-self-use
|
|
||||||
class Command(BaseCommand):
|
|
||||||
"""command-line options"""
|
|
||||||
|
|
||||||
help = "What version is this?"
|
|
||||||
|
|
||||||
def add_arguments(self, parser):
|
|
||||||
"""specify which function to run"""
|
|
||||||
parser.add_argument(
|
|
||||||
"--current",
|
|
||||||
action="store_true",
|
|
||||||
help="Version stored in database",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--target",
|
|
||||||
action="store_true",
|
|
||||||
help="Version stored in settings",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"--update",
|
|
||||||
action="store_true",
|
|
||||||
help="Update database version",
|
|
||||||
)
|
|
||||||
|
|
||||||
# pylint: disable=unused-argument
|
|
||||||
def handle(self, *args, **options):
|
|
||||||
"""execute init"""
|
|
||||||
site = models.SiteSettings.objects.get()
|
|
||||||
current = site.version or "0.0.1"
|
|
||||||
target = VERSION
|
|
||||||
if options.get("current"):
|
|
||||||
print(current)
|
|
||||||
return
|
|
||||||
|
|
||||||
if options.get("target"):
|
|
||||||
print(target)
|
|
||||||
return
|
|
||||||
|
|
||||||
if options.get("update"):
|
|
||||||
site.version = target
|
|
||||||
site.save()
|
|
||||||
return
|
|
||||||
|
|
||||||
if current != target:
|
|
||||||
print(f"{current}/{target}")
|
|
||||||
else:
|
|
||||||
print(current)
|
|
12
bookwyrm/management/commands/merge_authors.py
Normal file
12
bookwyrm/management/commands/merge_authors.py
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
""" PROCEED WITH CAUTION: uses deduplication fields to permanently
|
||||||
|
merge author data objects """
|
||||||
|
from bookwyrm import models
|
||||||
|
from bookwyrm.management.merge_command import MergeCommand
|
||||||
|
|
||||||
|
|
||||||
|
class Command(MergeCommand):
|
||||||
|
"""merges two authors by ID"""
|
||||||
|
|
||||||
|
help = "merges specified authors into one"
|
||||||
|
|
||||||
|
MODEL = models.Author
|
12
bookwyrm/management/commands/merge_editions.py
Normal file
12
bookwyrm/management/commands/merge_editions.py
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
""" PROCEED WITH CAUTION: uses deduplication fields to permanently
|
||||||
|
merge edition data objects """
|
||||||
|
from bookwyrm import models
|
||||||
|
from bookwyrm.management.merge_command import MergeCommand
|
||||||
|
|
||||||
|
|
||||||
|
class Command(MergeCommand):
|
||||||
|
"""merges two editions by ID"""
|
||||||
|
|
||||||
|
help = "merges specified editions into one"
|
||||||
|
|
||||||
|
MODEL = models.Edition
|
12
bookwyrm/management/commands/merge_works.py
Normal file
12
bookwyrm/management/commands/merge_works.py
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
""" PROCEED WITH CAUTION: uses deduplication fields to permanently
|
||||||
|
merge work data objects """
|
||||||
|
from bookwyrm import models
|
||||||
|
from bookwyrm.management.merge_command import MergeCommand
|
||||||
|
|
||||||
|
|
||||||
|
class Command(MergeCommand):
|
||||||
|
"""merges two works by ID"""
|
||||||
|
|
||||||
|
help = "merges specified works into one"
|
||||||
|
|
||||||
|
MODEL = models.Work
|
|
@ -33,10 +33,10 @@ def remove_editions():
|
||||||
|
|
||||||
|
|
||||||
class Command(BaseCommand):
|
class Command(BaseCommand):
|
||||||
"""dedplucate allllll the book data models"""
|
"""deduplicate allllll the book data models"""
|
||||||
|
|
||||||
help = "merges duplicate book data"
|
help = "merges duplicate book data"
|
||||||
# pylint: disable=no-self-use,unused-argument
|
# pylint: disable=no-self-use,unused-argument
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
"""run deudplications"""
|
"""run deduplications"""
|
||||||
remove_editions()
|
remove_editions()
|
||||||
|
|
21
bookwyrm/management/commands/repair_editions.py
Normal file
21
bookwyrm/management/commands/repair_editions.py
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
""" Repair editions with missing works """
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
from bookwyrm import models
|
||||||
|
|
||||||
|
|
||||||
|
class Command(BaseCommand):
|
||||||
|
"""command-line options"""
|
||||||
|
|
||||||
|
help = "Repairs an edition that is in a broken state"
|
||||||
|
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
"""Find and repair broken editions"""
|
||||||
|
# Find broken editions
|
||||||
|
editions = models.Edition.objects.filter(parent_work__isnull=True)
|
||||||
|
self.stdout.write(f"Repairing {editions.count()} edition(s):")
|
||||||
|
|
||||||
|
# Do repair
|
||||||
|
for edition in editions:
|
||||||
|
edition.repair()
|
||||||
|
self.stdout.write(".", ending="")
|
|
@ -9,7 +9,7 @@ class Command(BaseCommand):
|
||||||
|
|
||||||
# pylint: disable=unused-argument
|
# pylint: disable=unused-argument
|
||||||
def handle(self, *args, **options):
|
def handle(self, *args, **options):
|
||||||
"""reveoke nonessential low priority tasks"""
|
"""revoke nonessential low priority tasks"""
|
||||||
types = [
|
types = [
|
||||||
"bookwyrm.preview_images.generate_edition_preview_image_task",
|
"bookwyrm.preview_images.generate_edition_preview_image_task",
|
||||||
"bookwyrm.preview_images.generate_user_preview_image_task",
|
"bookwyrm.preview_images.generate_user_preview_image_task",
|
||||||
|
|
37
bookwyrm/management/merge_command.py
Normal file
37
bookwyrm/management/merge_command.py
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
from django.core.management.base import BaseCommand
|
||||||
|
|
||||||
|
|
||||||
|
class MergeCommand(BaseCommand):
|
||||||
|
"""base class for merge commands"""
|
||||||
|
|
||||||
|
def add_arguments(self, parser):
|
||||||
|
"""add the arguments for this command"""
|
||||||
|
parser.add_argument("--canonical", type=int, required=True)
|
||||||
|
parser.add_argument("--other", type=int, required=True)
|
||||||
|
parser.add_argument(
|
||||||
|
"--dry_run",
|
||||||
|
action="store_true",
|
||||||
|
help="don't actually merge, only print what would happen",
|
||||||
|
)
|
||||||
|
|
||||||
|
# pylint: disable=no-self-use,unused-argument
|
||||||
|
def handle(self, *args, **options):
|
||||||
|
"""merge the two objects"""
|
||||||
|
model = self.MODEL
|
||||||
|
|
||||||
|
try:
|
||||||
|
canonical = model.objects.get(id=options["canonical"])
|
||||||
|
except model.DoesNotExist:
|
||||||
|
print("canonical book doesn’t exist!")
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
other = model.objects.get(id=options["other"])
|
||||||
|
except model.DoesNotExist:
|
||||||
|
print("other book doesn’t exist!")
|
||||||
|
return
|
||||||
|
|
||||||
|
absorbed_fields = other.merge_into(canonical, dry_run=options["dry_run"])
|
||||||
|
|
||||||
|
action = "would be" if options["dry_run"] else "has been"
|
||||||
|
print(f"{other.remote_id} {action} merged into {canonical.remote_id}")
|
||||||
|
print(f"absorbed fields: {absorbed_fields}")
|
|
@ -1,3 +1,4 @@
|
||||||
""" look at all this nice middleware! """
|
""" look at all this nice middleware! """
|
||||||
from .timezone_middleware import TimezoneMiddleware
|
from .timezone_middleware import TimezoneMiddleware
|
||||||
from .ip_middleware import IPBlocklistMiddleware
|
from .ip_middleware import IPBlocklistMiddleware
|
||||||
|
from .file_too_big import FileTooBig
|
||||||
|
|
30
bookwyrm/middleware/file_too_big.py
Normal file
30
bookwyrm/middleware/file_too_big.py
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
"""Middleware to display a custom 413 error page"""
|
||||||
|
|
||||||
|
from django.http import HttpResponse
|
||||||
|
from django.shortcuts import render
|
||||||
|
from django.core.exceptions import RequestDataTooBig
|
||||||
|
|
||||||
|
|
||||||
|
class FileTooBig:
|
||||||
|
"""Middleware to display a custom page when a
|
||||||
|
RequestDataTooBig exception is thrown"""
|
||||||
|
|
||||||
|
def __init__(self, get_response):
|
||||||
|
"""boilerplate __init__ from Django docs"""
|
||||||
|
|
||||||
|
self.get_response = get_response
|
||||||
|
|
||||||
|
def __call__(self, request):
|
||||||
|
"""If RequestDataTooBig is thrown, render the 413 error page"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
body = request.body # pylint: disable=unused-variable
|
||||||
|
|
||||||
|
except RequestDataTooBig:
|
||||||
|
|
||||||
|
rendered = render(request, "413.html")
|
||||||
|
response = HttpResponse(rendered)
|
||||||
|
return response
|
||||||
|
|
||||||
|
response = self.get_response(request)
|
||||||
|
return response
|
|
@ -1,5 +1,5 @@
|
||||||
""" Makes the app aware of the users timezone """
|
""" Makes the app aware of the users timezone """
|
||||||
import pytz
|
import zoneinfo
|
||||||
|
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
|
||||||
|
@ -12,9 +12,7 @@ class TimezoneMiddleware:
|
||||||
|
|
||||||
def __call__(self, request):
|
def __call__(self, request):
|
||||||
if request.user.is_authenticated:
|
if request.user.is_authenticated:
|
||||||
timezone.activate(pytz.timezone(request.user.preferred_timezone))
|
timezone.activate(zoneinfo.ZoneInfo(request.user.preferred_timezone))
|
||||||
else:
|
else:
|
||||||
timezone.activate(pytz.utc)
|
timezone.deactivate()
|
||||||
response = self.get_response(request)
|
return self.get_response(request)
|
||||||
timezone.deactivate()
|
|
||||||
return response
|
|
||||||
|
|
|
@ -1467,7 +1467,7 @@ class Migration(migrations.Migration):
|
||||||
(
|
(
|
||||||
"expiry",
|
"expiry",
|
||||||
models.DateTimeField(
|
models.DateTimeField(
|
||||||
default=bookwyrm.models.site.get_passowrd_reset_expiry
|
default=bookwyrm.models.site.get_password_reset_expiry
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
|
|
|
@ -6,7 +6,7 @@ from bookwyrm.connectors.abstract_connector import infer_physical_format
|
||||||
|
|
||||||
|
|
||||||
def infer_format(app_registry, schema_editor):
|
def infer_format(app_registry, schema_editor):
|
||||||
"""set the new phsyical format field based on existing format data"""
|
"""set the new physical format field based on existing format data"""
|
||||||
db_alias = schema_editor.connection.alias
|
db_alias = schema_editor.connection.alias
|
||||||
|
|
||||||
editions = (
|
editions = (
|
||||||
|
|
|
@ -5,7 +5,7 @@ from bookwyrm.settings import DOMAIN
|
||||||
|
|
||||||
|
|
||||||
def remove_self_connector(app_registry, schema_editor):
|
def remove_self_connector(app_registry, schema_editor):
|
||||||
"""set the new phsyical format field based on existing format data"""
|
"""set the new physical format field based on existing format data"""
|
||||||
db_alias = schema_editor.connection.alias
|
db_alias = schema_editor.connection.alias
|
||||||
app_registry.get_model("bookwyrm", "Connector").objects.using(db_alias).filter(
|
app_registry.get_model("bookwyrm", "Connector").objects.using(db_alias).filter(
|
||||||
connector_file="self_connector"
|
connector_file="self_connector"
|
||||||
|
|
|
@ -10,6 +10,7 @@ class Migration(migrations.Migration):
|
||||||
]
|
]
|
||||||
|
|
||||||
operations = [
|
operations = [
|
||||||
|
# The new timezones are "Factory" and "localtime"
|
||||||
migrations.AlterField(
|
migrations.AlterField(
|
||||||
model_name="user",
|
model_name="user",
|
||||||
name="preferred_timezone",
|
name="preferred_timezone",
|
||||||
|
|
51
bookwyrm/migrations/0179_populate_sort_title.py
Normal file
51
bookwyrm/migrations/0179_populate_sort_title.py
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
import re
|
||||||
|
from itertools import chain
|
||||||
|
|
||||||
|
from django.db import migrations, transaction
|
||||||
|
from django.db.models import Q
|
||||||
|
|
||||||
|
from bookwyrm.settings import LANGUAGE_ARTICLES
|
||||||
|
|
||||||
|
|
||||||
|
def set_sort_title(edition):
|
||||||
|
articles = chain(
|
||||||
|
*(LANGUAGE_ARTICLES.get(language, ()) for language in tuple(edition.languages))
|
||||||
|
)
|
||||||
|
edition.sort_title = re.sub(
|
||||||
|
f'^{" |^".join(articles)} ', "", str(edition.title).lower()
|
||||||
|
)
|
||||||
|
return edition
|
||||||
|
|
||||||
|
|
||||||
|
@transaction.atomic
|
||||||
|
def populate_sort_title(apps, schema_editor):
|
||||||
|
Edition = apps.get_model("bookwyrm", "Edition")
|
||||||
|
db_alias = schema_editor.connection.alias
|
||||||
|
editions_wo_sort_title = Edition.objects.using(db_alias).filter(
|
||||||
|
Q(sort_title__isnull=True) | Q(sort_title__exact="")
|
||||||
|
)
|
||||||
|
batch_size = 1000
|
||||||
|
start = 0
|
||||||
|
end = batch_size
|
||||||
|
while True:
|
||||||
|
batch = editions_wo_sort_title[start:end]
|
||||||
|
if not batch.exists():
|
||||||
|
break
|
||||||
|
Edition.objects.bulk_update(
|
||||||
|
(set_sort_title(edition) for edition in batch), ["sort_title"]
|
||||||
|
)
|
||||||
|
start = end
|
||||||
|
end += batch_size
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0178_auto_20230328_2132"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(
|
||||||
|
populate_sort_title, reverse_code=migrations.RunPython.noop
|
||||||
|
),
|
||||||
|
]
|
36
bookwyrm/migrations/0179_reportcomment_comment_type.py
Normal file
36
bookwyrm/migrations/0179_reportcomment_comment_type.py
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
# Generated by Django 3.2.18 on 2023-05-16 16:02
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0178_auto_20230328_2132"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="reportcomment",
|
||||||
|
name="action_type",
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("comment", "Comment"),
|
||||||
|
("resolve", "Resolved report"),
|
||||||
|
("reopen", "Re-opened report"),
|
||||||
|
("message_reporter", "Messaged reporter"),
|
||||||
|
("message_offender", "Messaged reported user"),
|
||||||
|
("user_suspension", "Suspended user"),
|
||||||
|
("user_unsuspension", "Un-suspended user"),
|
||||||
|
("user_perms", "Changed user permission level"),
|
||||||
|
("user_deletion", "Deleted user account"),
|
||||||
|
("block_domain", "Blocked domain"),
|
||||||
|
("approve_domain", "Approved domain"),
|
||||||
|
("delete_item", "Deleted item"),
|
||||||
|
],
|
||||||
|
default="comment",
|
||||||
|
max_length=20,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.RenameModel("ReportComment", "ReportAction"),
|
||||||
|
]
|
17
bookwyrm/migrations/0180_alter_reportaction_options.py
Normal file
17
bookwyrm/migrations/0180_alter_reportaction_options.py
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
# Generated by Django 3.2.18 on 2023-06-21 22:01
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0179_reportcomment_comment_type"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterModelOptions(
|
||||||
|
name="reportaction",
|
||||||
|
options={"ordering": ("created_date",)},
|
||||||
|
),
|
||||||
|
]
|
44
bookwyrm/migrations/0180_alter_user_preferred_language.py
Normal file
44
bookwyrm/migrations/0180_alter_user_preferred_language.py
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
# Generated by Django 3.2.19 on 2023-07-23 19:33
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0179_populate_sort_title"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="user",
|
||||||
|
name="preferred_language",
|
||||||
|
field=models.CharField(
|
||||||
|
blank=True,
|
||||||
|
choices=[
|
||||||
|
("en-us", "English"),
|
||||||
|
("ca-es", "Català (Catalan)"),
|
||||||
|
("de-de", "Deutsch (German)"),
|
||||||
|
("eo-uy", "Esperanto (Esperanto)"),
|
||||||
|
("es-es", "Español (Spanish)"),
|
||||||
|
("eu-es", "Euskara (Basque)"),
|
||||||
|
("gl-es", "Galego (Galician)"),
|
||||||
|
("it-it", "Italiano (Italian)"),
|
||||||
|
("fi-fi", "Suomi (Finnish)"),
|
||||||
|
("fr-fr", "Français (French)"),
|
||||||
|
("lt-lt", "Lietuvių (Lithuanian)"),
|
||||||
|
("nl-nl", "Nederlands (Dutch)"),
|
||||||
|
("no-no", "Norsk (Norwegian)"),
|
||||||
|
("pl-pl", "Polski (Polish)"),
|
||||||
|
("pt-br", "Português do Brasil (Brazilian Portuguese)"),
|
||||||
|
("pt-pt", "Português Europeu (European Portuguese)"),
|
||||||
|
("ro-ro", "Română (Romanian)"),
|
||||||
|
("sv-se", "Svenska (Swedish)"),
|
||||||
|
("zh-hans", "简体中文 (Simplified Chinese)"),
|
||||||
|
("zh-hant", "繁體中文 (Traditional Chinese)"),
|
||||||
|
],
|
||||||
|
max_length=255,
|
||||||
|
null=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
13
bookwyrm/migrations/0181_merge_20230806_2302.py
Normal file
13
bookwyrm/migrations/0181_merge_20230806_2302.py
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
# Generated by Django 3.2.20 on 2023-08-06 23:02
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0180_alter_reportaction_options"),
|
||||||
|
("bookwyrm", "0180_alter_user_preferred_language"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = []
|
130
bookwyrm/migrations/0182_auto_20231027_1122.py
Normal file
130
bookwyrm/migrations/0182_auto_20231027_1122.py
Normal file
|
@ -0,0 +1,130 @@
|
||||||
|
# Generated by Django 3.2.20 on 2023-10-27 11:22
|
||||||
|
|
||||||
|
import bookwyrm.models.activitypub_mixin
|
||||||
|
import bookwyrm.models.fields
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0181_merge_20230806_2302"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="user",
|
||||||
|
name="also_known_as",
|
||||||
|
field=bookwyrm.models.fields.ManyToManyField(to=settings.AUTH_USER_MODEL),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="user",
|
||||||
|
name="moved_to",
|
||||||
|
field=bookwyrm.models.fields.RemoteIdField(
|
||||||
|
max_length=255,
|
||||||
|
null=True,
|
||||||
|
validators=[bookwyrm.models.fields.validate_remote_id],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="notification",
|
||||||
|
name="notification_type",
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("FAVORITE", "Favorite"),
|
||||||
|
("REPLY", "Reply"),
|
||||||
|
("MENTION", "Mention"),
|
||||||
|
("TAG", "Tag"),
|
||||||
|
("FOLLOW", "Follow"),
|
||||||
|
("FOLLOW_REQUEST", "Follow Request"),
|
||||||
|
("BOOST", "Boost"),
|
||||||
|
("IMPORT", "Import"),
|
||||||
|
("ADD", "Add"),
|
||||||
|
("REPORT", "Report"),
|
||||||
|
("LINK_DOMAIN", "Link Domain"),
|
||||||
|
("INVITE", "Invite"),
|
||||||
|
("ACCEPT", "Accept"),
|
||||||
|
("JOIN", "Join"),
|
||||||
|
("LEAVE", "Leave"),
|
||||||
|
("REMOVE", "Remove"),
|
||||||
|
("GROUP_PRIVACY", "Group Privacy"),
|
||||||
|
("GROUP_NAME", "Group Name"),
|
||||||
|
("GROUP_DESCRIPTION", "Group Description"),
|
||||||
|
("MOVE", "Move"),
|
||||||
|
],
|
||||||
|
max_length=255,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="Move",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.AutoField(
|
||||||
|
auto_created=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
verbose_name="ID",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("created_date", models.DateTimeField(auto_now_add=True)),
|
||||||
|
("updated_date", models.DateTimeField(auto_now=True)),
|
||||||
|
(
|
||||||
|
"remote_id",
|
||||||
|
bookwyrm.models.fields.RemoteIdField(
|
||||||
|
max_length=255,
|
||||||
|
null=True,
|
||||||
|
validators=[bookwyrm.models.fields.validate_remote_id],
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("object", bookwyrm.models.fields.CharField(max_length=255)),
|
||||||
|
(
|
||||||
|
"origin",
|
||||||
|
bookwyrm.models.fields.CharField(
|
||||||
|
blank=True, default="", max_length=255, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"user",
|
||||||
|
bookwyrm.models.fields.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.PROTECT,
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"abstract": False,
|
||||||
|
},
|
||||||
|
bases=(bookwyrm.models.activitypub_mixin.ActivityMixin, models.Model),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="MoveUser",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"move_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="bookwyrm.move",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"target",
|
||||||
|
bookwyrm.models.fields.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.PROTECT,
|
||||||
|
related_name="move_target",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"abstract": False,
|
||||||
|
},
|
||||||
|
bases=("bookwyrm.move",),
|
||||||
|
),
|
||||||
|
]
|
18
bookwyrm/migrations/0183_auto_20231105_1607.py
Normal file
18
bookwyrm/migrations/0183_auto_20231105_1607.py
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
# Generated by Django 3.2.20 on 2023-11-05 16:07
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0182_auto_20231027_1122"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="user",
|
||||||
|
name="is_deleted",
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
35
bookwyrm/migrations/0184_auto_20231106_0421.py
Normal file
35
bookwyrm/migrations/0184_auto_20231106_0421.py
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
# Generated by Django 3.2.20 on 2023-11-06 04:21
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
from bookwyrm.models import User
|
||||||
|
|
||||||
|
|
||||||
|
def update_deleted_users(apps, schema_editor):
|
||||||
|
"""Find all the users who are deleted, not just inactive, and set deleted"""
|
||||||
|
users = apps.get_model("bookwyrm", "User")
|
||||||
|
db_alias = schema_editor.connection.alias
|
||||||
|
users.objects.using(db_alias).filter(
|
||||||
|
is_active=False,
|
||||||
|
deactivation_reason__in=[
|
||||||
|
"self_deletion",
|
||||||
|
"moderator_deletion",
|
||||||
|
],
|
||||||
|
).update(is_deleted=True)
|
||||||
|
|
||||||
|
# differente rules for remote users
|
||||||
|
users.objects.using(db_alias).filter(is_active=False, local=False,).exclude(
|
||||||
|
deactivation_reason="moderator_deactivation",
|
||||||
|
).update(is_deleted=True)
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0183_auto_20231105_1607"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RunPython(
|
||||||
|
update_deleted_users, reverse_code=migrations.RunPython.noop
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,42 @@
|
||||||
|
# Generated by Django 3.2.20 on 2023-11-13 22:39
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0184_auto_20231106_0421"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="notification",
|
||||||
|
name="notification_type",
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("FAVORITE", "Favorite"),
|
||||||
|
("BOOST", "Boost"),
|
||||||
|
("REPLY", "Reply"),
|
||||||
|
("MENTION", "Mention"),
|
||||||
|
("TAG", "Tag"),
|
||||||
|
("FOLLOW", "Follow"),
|
||||||
|
("FOLLOW_REQUEST", "Follow Request"),
|
||||||
|
("IMPORT", "Import"),
|
||||||
|
("ADD", "Add"),
|
||||||
|
("REPORT", "Report"),
|
||||||
|
("LINK_DOMAIN", "Link Domain"),
|
||||||
|
("INVITE", "Invite"),
|
||||||
|
("ACCEPT", "Accept"),
|
||||||
|
("JOIN", "Join"),
|
||||||
|
("LEAVE", "Leave"),
|
||||||
|
("REMOVE", "Remove"),
|
||||||
|
("GROUP_PRIVACY", "Group Privacy"),
|
||||||
|
("GROUP_NAME", "Group Name"),
|
||||||
|
("GROUP_DESCRIPTION", "Group Description"),
|
||||||
|
("MOVE", "Move"),
|
||||||
|
],
|
||||||
|
max_length=255,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
212
bookwyrm/migrations/0186_auto_20231116_0048.py
Normal file
212
bookwyrm/migrations/0186_auto_20231116_0048.py
Normal file
|
@ -0,0 +1,212 @@
|
||||||
|
# Generated by Django 3.2.20 on 2023-11-16 00:48
|
||||||
|
|
||||||
|
from django.conf import settings
|
||||||
|
import django.contrib.postgres.fields
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
import django.utils.timezone
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0185_alter_notification_notification_type"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="ParentJob",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.AutoField(
|
||||||
|
auto_created=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
verbose_name="ID",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("task_id", models.UUIDField(blank=True, null=True, unique=True)),
|
||||||
|
(
|
||||||
|
"created_date",
|
||||||
|
models.DateTimeField(default=django.utils.timezone.now),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"updated_date",
|
||||||
|
models.DateTimeField(default=django.utils.timezone.now),
|
||||||
|
),
|
||||||
|
("complete", models.BooleanField(default=False)),
|
||||||
|
(
|
||||||
|
"status",
|
||||||
|
models.CharField(
|
||||||
|
choices=[
|
||||||
|
("pending", "Pending"),
|
||||||
|
("active", "Active"),
|
||||||
|
("complete", "Complete"),
|
||||||
|
("stopped", "Stopped"),
|
||||||
|
("failed", "Failed"),
|
||||||
|
],
|
||||||
|
default="pending",
|
||||||
|
max_length=50,
|
||||||
|
null=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"user",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"abstract": False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="sitesettings",
|
||||||
|
name="user_import_time_limit",
|
||||||
|
field=models.IntegerField(default=48),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="notification",
|
||||||
|
name="notification_type",
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("FAVORITE", "Favorite"),
|
||||||
|
("BOOST", "Boost"),
|
||||||
|
("REPLY", "Reply"),
|
||||||
|
("MENTION", "Mention"),
|
||||||
|
("TAG", "Tag"),
|
||||||
|
("FOLLOW", "Follow"),
|
||||||
|
("FOLLOW_REQUEST", "Follow Request"),
|
||||||
|
("IMPORT", "Import"),
|
||||||
|
("USER_IMPORT", "User Import"),
|
||||||
|
("USER_EXPORT", "User Export"),
|
||||||
|
("ADD", "Add"),
|
||||||
|
("REPORT", "Report"),
|
||||||
|
("LINK_DOMAIN", "Link Domain"),
|
||||||
|
("INVITE", "Invite"),
|
||||||
|
("ACCEPT", "Accept"),
|
||||||
|
("JOIN", "Join"),
|
||||||
|
("LEAVE", "Leave"),
|
||||||
|
("REMOVE", "Remove"),
|
||||||
|
("GROUP_PRIVACY", "Group Privacy"),
|
||||||
|
("GROUP_NAME", "Group Name"),
|
||||||
|
("GROUP_DESCRIPTION", "Group Description"),
|
||||||
|
("MOVE", "Move"),
|
||||||
|
],
|
||||||
|
max_length=255,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="BookwyrmExportJob",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"parentjob_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="bookwyrm.parentjob",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("export_data", models.FileField(null=True, upload_to="")),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"abstract": False,
|
||||||
|
},
|
||||||
|
bases=("bookwyrm.parentjob",),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="BookwyrmImportJob",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"parentjob_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="bookwyrm.parentjob",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("archive_file", models.FileField(blank=True, null=True, upload_to="")),
|
||||||
|
("import_data", models.JSONField(null=True)),
|
||||||
|
(
|
||||||
|
"required",
|
||||||
|
django.contrib.postgres.fields.ArrayField(
|
||||||
|
base_field=models.CharField(blank=True, max_length=50),
|
||||||
|
blank=True,
|
||||||
|
size=None,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"abstract": False,
|
||||||
|
},
|
||||||
|
bases=("bookwyrm.parentjob",),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="ChildJob",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"id",
|
||||||
|
models.AutoField(
|
||||||
|
auto_created=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
verbose_name="ID",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
("task_id", models.UUIDField(blank=True, null=True, unique=True)),
|
||||||
|
(
|
||||||
|
"created_date",
|
||||||
|
models.DateTimeField(default=django.utils.timezone.now),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"updated_date",
|
||||||
|
models.DateTimeField(default=django.utils.timezone.now),
|
||||||
|
),
|
||||||
|
("complete", models.BooleanField(default=False)),
|
||||||
|
(
|
||||||
|
"status",
|
||||||
|
models.CharField(
|
||||||
|
choices=[
|
||||||
|
("pending", "Pending"),
|
||||||
|
("active", "Active"),
|
||||||
|
("complete", "Complete"),
|
||||||
|
("stopped", "Stopped"),
|
||||||
|
("failed", "Failed"),
|
||||||
|
],
|
||||||
|
default="pending",
|
||||||
|
max_length=50,
|
||||||
|
null=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"parent_job",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="child_jobs",
|
||||||
|
to="bookwyrm.parentjob",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"abstract": False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="notification",
|
||||||
|
name="related_user_export",
|
||||||
|
field=models.ForeignKey(
|
||||||
|
null=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="bookwyrm.bookwyrmexportjob",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
48
bookwyrm/migrations/0186_invite_request_notification.py
Normal file
48
bookwyrm/migrations/0186_invite_request_notification.py
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
# Generated by Django 3.2.20 on 2023-11-14 10:02
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0185_alter_notification_notification_type"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="notification",
|
||||||
|
name="related_invite_requests",
|
||||||
|
field=models.ManyToManyField(to="bookwyrm.InviteRequest"),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="notification",
|
||||||
|
name="notification_type",
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("FAVORITE", "Favorite"),
|
||||||
|
("BOOST", "Boost"),
|
||||||
|
("REPLY", "Reply"),
|
||||||
|
("MENTION", "Mention"),
|
||||||
|
("TAG", "Tag"),
|
||||||
|
("FOLLOW", "Follow"),
|
||||||
|
("FOLLOW_REQUEST", "Follow Request"),
|
||||||
|
("IMPORT", "Import"),
|
||||||
|
("ADD", "Add"),
|
||||||
|
("REPORT", "Report"),
|
||||||
|
("LINK_DOMAIN", "Link Domain"),
|
||||||
|
("INVITE_REQUEST", "Invite Request"),
|
||||||
|
("INVITE", "Invite"),
|
||||||
|
("ACCEPT", "Accept"),
|
||||||
|
("JOIN", "Join"),
|
||||||
|
("LEAVE", "Leave"),
|
||||||
|
("REMOVE", "Remove"),
|
||||||
|
("GROUP_PRIVACY", "Group Privacy"),
|
||||||
|
("GROUP_NAME", "Group Name"),
|
||||||
|
("GROUP_DESCRIPTION", "Group Description"),
|
||||||
|
("MOVE", "Move"),
|
||||||
|
],
|
||||||
|
max_length=255,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
54
bookwyrm/migrations/0187_partial_publication_dates.py
Normal file
54
bookwyrm/migrations/0187_partial_publication_dates.py
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
# Generated by Django 3.2.20 on 2023-11-09 16:57
|
||||||
|
|
||||||
|
import bookwyrm.models.fields
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0186_invite_request_notification"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="book",
|
||||||
|
name="first_published_date_precision",
|
||||||
|
field=models.CharField(
|
||||||
|
blank=True,
|
||||||
|
choices=[
|
||||||
|
("DAY", "Day prec."),
|
||||||
|
("MONTH", "Month prec."),
|
||||||
|
("YEAR", "Year prec."),
|
||||||
|
],
|
||||||
|
editable=False,
|
||||||
|
max_length=10,
|
||||||
|
null=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="book",
|
||||||
|
name="published_date_precision",
|
||||||
|
field=models.CharField(
|
||||||
|
blank=True,
|
||||||
|
choices=[
|
||||||
|
("DAY", "Day prec."),
|
||||||
|
("MONTH", "Month prec."),
|
||||||
|
("YEAR", "Year prec."),
|
||||||
|
],
|
||||||
|
editable=False,
|
||||||
|
max_length=10,
|
||||||
|
null=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="book",
|
||||||
|
name="first_published_date",
|
||||||
|
field=bookwyrm.models.fields.PartialDateField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="book",
|
||||||
|
name="published_date",
|
||||||
|
field=bookwyrm.models.fields.PartialDateField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
]
|
18
bookwyrm/migrations/0188_theme_loads.py
Normal file
18
bookwyrm/migrations/0188_theme_loads.py
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
# Generated by Django 3.2.23 on 2023-11-20 18:02
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0187_partial_publication_dates"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="theme",
|
||||||
|
name="loads",
|
||||||
|
field=models.BooleanField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
]
|
45
bookwyrm/migrations/0189_alter_user_preferred_language.py
Normal file
45
bookwyrm/migrations/0189_alter_user_preferred_language.py
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
# Generated by Django 3.2.23 on 2023-12-12 23:42
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0188_theme_loads"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="user",
|
||||||
|
name="preferred_language",
|
||||||
|
field=models.CharField(
|
||||||
|
blank=True,
|
||||||
|
choices=[
|
||||||
|
("en-us", "English"),
|
||||||
|
("ca-es", "Català (Catalan)"),
|
||||||
|
("de-de", "Deutsch (German)"),
|
||||||
|
("eo-uy", "Esperanto (Esperanto)"),
|
||||||
|
("es-es", "Español (Spanish)"),
|
||||||
|
("eu-es", "Euskara (Basque)"),
|
||||||
|
("gl-es", "Galego (Galician)"),
|
||||||
|
("it-it", "Italiano (Italian)"),
|
||||||
|
("fi-fi", "Suomi (Finnish)"),
|
||||||
|
("fr-fr", "Français (French)"),
|
||||||
|
("lt-lt", "Lietuvių (Lithuanian)"),
|
||||||
|
("nl-nl", "Nederlands (Dutch)"),
|
||||||
|
("no-no", "Norsk (Norwegian)"),
|
||||||
|
("pl-pl", "Polski (Polish)"),
|
||||||
|
("pt-br", "Português do Brasil (Brazilian Portuguese)"),
|
||||||
|
("pt-pt", "Português Europeu (European Portuguese)"),
|
||||||
|
("ro-ro", "Română (Romanian)"),
|
||||||
|
("sv-se", "Svenska (Swedish)"),
|
||||||
|
("uk-ua", "Українська (Ukrainian)"),
|
||||||
|
("zh-hans", "简体中文 (Simplified Chinese)"),
|
||||||
|
("zh-hant", "繁體中文 (Traditional Chinese)"),
|
||||||
|
],
|
||||||
|
max_length=255,
|
||||||
|
null=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,13 @@
|
||||||
|
# Generated by Django 3.2.23 on 2023-11-22 10:16
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0186_auto_20231116_0048"),
|
||||||
|
("bookwyrm", "0188_theme_loads"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = []
|
|
@ -0,0 +1,45 @@
|
||||||
|
# Generated by Django 3.2.23 on 2023-11-23 19:49
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0189_merge_0186_auto_20231116_0048_0188_theme_loads"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="notification",
|
||||||
|
name="notification_type",
|
||||||
|
field=models.CharField(
|
||||||
|
choices=[
|
||||||
|
("FAVORITE", "Favorite"),
|
||||||
|
("BOOST", "Boost"),
|
||||||
|
("REPLY", "Reply"),
|
||||||
|
("MENTION", "Mention"),
|
||||||
|
("TAG", "Tag"),
|
||||||
|
("FOLLOW", "Follow"),
|
||||||
|
("FOLLOW_REQUEST", "Follow Request"),
|
||||||
|
("IMPORT", "Import"),
|
||||||
|
("USER_IMPORT", "User Import"),
|
||||||
|
("USER_EXPORT", "User Export"),
|
||||||
|
("ADD", "Add"),
|
||||||
|
("REPORT", "Report"),
|
||||||
|
("LINK_DOMAIN", "Link Domain"),
|
||||||
|
("INVITE_REQUEST", "Invite Request"),
|
||||||
|
("INVITE", "Invite"),
|
||||||
|
("ACCEPT", "Accept"),
|
||||||
|
("JOIN", "Join"),
|
||||||
|
("LEAVE", "Leave"),
|
||||||
|
("REMOVE", "Remove"),
|
||||||
|
("GROUP_PRIVACY", "Group Privacy"),
|
||||||
|
("GROUP_NAME", "Group Name"),
|
||||||
|
("GROUP_DESCRIPTION", "Group Description"),
|
||||||
|
("MOVE", "Move"),
|
||||||
|
],
|
||||||
|
max_length=255,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
16
bookwyrm/migrations/0190_book_search_updates.py
Normal file
16
bookwyrm/migrations/0190_book_search_updates.py
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
# Generated by Django 3.2.20 on 2023-11-24 17:11
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0188_theme_loads"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RemoveIndex(
|
||||||
|
model_name="author",
|
||||||
|
name="bookwyrm_au_search__b050a8_gin",
|
||||||
|
),
|
||||||
|
]
|
13
bookwyrm/migrations/0191_merge_20240102_0326.py
Normal file
13
bookwyrm/migrations/0191_merge_20240102_0326.py
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
# Generated by Django 3.2.23 on 2024-01-02 03:26
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0189_alter_user_preferred_language"),
|
||||||
|
("bookwyrm", "0190_alter_notification_notification_type"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = []
|
|
@ -0,0 +1,76 @@
|
||||||
|
# Generated by Django 3.2.20 on 2023-11-25 00:47
|
||||||
|
|
||||||
|
from importlib import import_module
|
||||||
|
import re
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
import pgtrigger.compiler
|
||||||
|
import pgtrigger.migrations
|
||||||
|
|
||||||
|
trigger_migration = import_module("bookwyrm.migrations.0077_auto_20210623_2155")
|
||||||
|
|
||||||
|
# it's _very_ convenient for development that this migration be reversible
|
||||||
|
search_vector_trigger = trigger_migration.Migration.operations[4]
|
||||||
|
author_search_vector_trigger = trigger_migration.Migration.operations[5]
|
||||||
|
|
||||||
|
|
||||||
|
assert re.search(r"\bCREATE TRIGGER search_vector_trigger\b", search_vector_trigger.sql)
|
||||||
|
assert re.search(
|
||||||
|
r"\bCREATE TRIGGER author_search_vector_trigger\b",
|
||||||
|
author_search_vector_trigger.sql,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0190_book_search_updates"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name="book",
|
||||||
|
trigger=pgtrigger.compiler.Trigger(
|
||||||
|
name="update_search_vector_on_book_edit",
|
||||||
|
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||||
|
func="new.search_vector := setweight(coalesce(nullif(to_tsvector('english', new.title), ''), to_tsvector('simple', new.title)), 'A') || setweight(to_tsvector('english', coalesce(new.subtitle, '')), 'B') || (SELECT setweight(to_tsvector('simple', coalesce(array_to_string(array_agg(bookwyrm_author.name), ' '), '')), 'C') FROM bookwyrm_author LEFT JOIN bookwyrm_book_authors ON bookwyrm_author.id = bookwyrm_book_authors.author_id WHERE bookwyrm_book_authors.book_id = new.id ) || setweight(to_tsvector('english', coalesce(new.series, '')), 'D');RETURN NEW;",
|
||||||
|
hash="77d6399497c0a89b0bf09d296e33c396da63705c",
|
||||||
|
operation='INSERT OR UPDATE OF "title", "subtitle", "series", "search_vector"',
|
||||||
|
pgid="pgtrigger_update_search_vector_on_book_edit_bec58",
|
||||||
|
table="bookwyrm_book",
|
||||||
|
when="BEFORE",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name="author",
|
||||||
|
trigger=pgtrigger.compiler.Trigger(
|
||||||
|
name="reset_search_vector_on_author_edit",
|
||||||
|
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||||
|
func="WITH updated_books AS (SELECT book_id FROM bookwyrm_book_authors WHERE author_id = new.id ) UPDATE bookwyrm_book SET search_vector = '' FROM updated_books WHERE id = updated_books.book_id;RETURN NEW;",
|
||||||
|
hash="e7bbf08711ff3724c58f4d92fb7a082ffb3d7826",
|
||||||
|
operation='UPDATE OF "name"',
|
||||||
|
pgid="pgtrigger_reset_search_vector_on_author_edit_a447c",
|
||||||
|
table="bookwyrm_author",
|
||||||
|
when="AFTER",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.RunSQL(
|
||||||
|
sql="""DROP TRIGGER IF EXISTS search_vector_trigger ON bookwyrm_book;
|
||||||
|
DROP FUNCTION IF EXISTS book_trigger;
|
||||||
|
""",
|
||||||
|
reverse_sql=search_vector_trigger.sql,
|
||||||
|
),
|
||||||
|
migrations.RunSQL(
|
||||||
|
sql="""DROP TRIGGER IF EXISTS author_search_vector_trigger ON bookwyrm_author;
|
||||||
|
DROP FUNCTION IF EXISTS author_trigger;
|
||||||
|
""",
|
||||||
|
reverse_sql=author_search_vector_trigger.sql,
|
||||||
|
),
|
||||||
|
migrations.RunSQL(
|
||||||
|
# Recalculate book search vector for any missed author name changes
|
||||||
|
# due to bug in JOIN in the old trigger.
|
||||||
|
sql="UPDATE bookwyrm_book SET search_vector = NULL;",
|
||||||
|
reverse_sql=migrations.RunSQL.noop,
|
||||||
|
),
|
||||||
|
]
|
23
bookwyrm/migrations/0192_make_page_positions_text.py
Normal file
23
bookwyrm/migrations/0192_make_page_positions_text.py
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
# Generated by Django 3.2.23 on 2024-01-04 23:56
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0191_merge_20240102_0326"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="quotation",
|
||||||
|
name="endposition",
|
||||||
|
field=models.TextField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="quotation",
|
||||||
|
name="position",
|
||||||
|
field=models.TextField(blank=True, null=True),
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,18 @@
|
||||||
|
# Generated by Django 3.2.23 on 2024-01-02 19:36
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0191_merge_20240102_0326"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.RenameField(
|
||||||
|
model_name="sitesettings",
|
||||||
|
old_name="version",
|
||||||
|
new_name="available_version",
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,18 @@
|
||||||
|
# Generated by Django 3.2.23 on 2024-01-16 10:28
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0191_merge_20240102_0326"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="sitesettings",
|
||||||
|
name="user_exports_enabled",
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
]
|
92
bookwyrm/migrations/0193_auto_20240128_0249.py
Normal file
92
bookwyrm/migrations/0193_auto_20240128_0249.py
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
# Generated by Django 3.2.23 on 2024-01-28 02:49
|
||||||
|
|
||||||
|
import django.core.serializers.json
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
from django.core.files.storage import storages
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0192_sitesettings_user_exports_enabled"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="bookwyrmexportjob",
|
||||||
|
name="export_json",
|
||||||
|
field=models.JSONField(
|
||||||
|
encoder=django.core.serializers.json.DjangoJSONEncoder, null=True
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AddField(
|
||||||
|
model_name="bookwyrmexportjob",
|
||||||
|
name="json_completed",
|
||||||
|
field=models.BooleanField(default=False),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="bookwyrmexportjob",
|
||||||
|
name="export_data",
|
||||||
|
field=models.FileField(
|
||||||
|
null=True,
|
||||||
|
storage=storages["exports"],
|
||||||
|
upload_to="",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="AddFileToTar",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"childjob_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="bookwyrm.childjob",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"parent_export_job",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
related_name="child_edition_export_jobs",
|
||||||
|
to="bookwyrm.bookwyrmexportjob",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"abstract": False,
|
||||||
|
},
|
||||||
|
bases=("bookwyrm.childjob",),
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="AddBookToUserExportJob",
|
||||||
|
fields=[
|
||||||
|
(
|
||||||
|
"childjob_ptr",
|
||||||
|
models.OneToOneField(
|
||||||
|
auto_created=True,
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
parent_link=True,
|
||||||
|
primary_key=True,
|
||||||
|
serialize=False,
|
||||||
|
to="bookwyrm.childjob",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
"edition",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.CASCADE,
|
||||||
|
to="bookwyrm.edition",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"abstract": False,
|
||||||
|
},
|
||||||
|
bases=("bookwyrm.childjob",),
|
||||||
|
),
|
||||||
|
]
|
13
bookwyrm/migrations/0193_merge_20240203_1539.py
Normal file
13
bookwyrm/migrations/0193_merge_20240203_1539.py
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
# Generated by Django 3.2.23 on 2024-02-03 15:39
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0192_make_page_positions_text"),
|
||||||
|
("bookwyrm", "0192_sitesettings_user_exports_enabled"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = []
|
13
bookwyrm/migrations/0194_merge_20240203_1619.py
Normal file
13
bookwyrm/migrations/0194_merge_20240203_1619.py
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
# Generated by Django 3.2.23 on 2024-02-03 16:19
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0192_rename_version_sitesettings_available_version"),
|
||||||
|
("bookwyrm", "0193_merge_20240203_1539"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = []
|
46
bookwyrm/migrations/0195_alter_user_preferred_language.py
Normal file
46
bookwyrm/migrations/0195_alter_user_preferred_language.py
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
# Generated by Django 3.2.23 on 2024-02-21 00:45
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0194_merge_20240203_1619"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="user",
|
||||||
|
name="preferred_language",
|
||||||
|
field=models.CharField(
|
||||||
|
blank=True,
|
||||||
|
choices=[
|
||||||
|
("en-us", "English"),
|
||||||
|
("ca-es", "Català (Catalan)"),
|
||||||
|
("de-de", "Deutsch (German)"),
|
||||||
|
("eo-uy", "Esperanto (Esperanto)"),
|
||||||
|
("es-es", "Español (Spanish)"),
|
||||||
|
("eu-es", "Euskara (Basque)"),
|
||||||
|
("gl-es", "Galego (Galician)"),
|
||||||
|
("it-it", "Italiano (Italian)"),
|
||||||
|
("ko-kr", "한국어 (Korean)"),
|
||||||
|
("fi-fi", "Suomi (Finnish)"),
|
||||||
|
("fr-fr", "Français (French)"),
|
||||||
|
("lt-lt", "Lietuvių (Lithuanian)"),
|
||||||
|
("nl-nl", "Nederlands (Dutch)"),
|
||||||
|
("no-no", "Norsk (Norwegian)"),
|
||||||
|
("pl-pl", "Polski (Polish)"),
|
||||||
|
("pt-br", "Português do Brasil (Brazilian Portuguese)"),
|
||||||
|
("pt-pt", "Português Europeu (European Portuguese)"),
|
||||||
|
("ro-ro", "Română (Romanian)"),
|
||||||
|
("sv-se", "Svenska (Swedish)"),
|
||||||
|
("uk-ua", "Українська (Ukrainian)"),
|
||||||
|
("zh-hans", "简体中文 (Simplified Chinese)"),
|
||||||
|
("zh-hant", "繁體中文 (Traditional Chinese)"),
|
||||||
|
],
|
||||||
|
max_length=255,
|
||||||
|
null=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
13
bookwyrm/migrations/0196_merge_20240318_1737.py
Normal file
13
bookwyrm/migrations/0196_merge_20240318_1737.py
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
# Generated by Django 3.2.23 on 2024-03-18 17:37
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0193_auto_20240128_0249"),
|
||||||
|
("bookwyrm", "0195_alter_user_preferred_language"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = []
|
13
bookwyrm/migrations/0196_merge_pr3134_into_main.py
Normal file
13
bookwyrm/migrations/0196_merge_pr3134_into_main.py
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
# Generated by Django 3.2.23 on 2024-03-18 00:48
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0191_migrate_search_vec_triggers_to_pgtriggers"),
|
||||||
|
("bookwyrm", "0195_alter_user_preferred_language"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = []
|
41
bookwyrm/migrations/0197_author_search_vector.py
Normal file
41
bookwyrm/migrations/0197_author_search_vector.py
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
# Generated by Django 3.2.25 on 2024-03-20 15:15
|
||||||
|
|
||||||
|
import django.contrib.postgres.indexes
|
||||||
|
from django.db import migrations
|
||||||
|
import pgtrigger.compiler
|
||||||
|
import pgtrigger.migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0196_merge_pr3134_into_main"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AddIndex(
|
||||||
|
model_name="author",
|
||||||
|
index=django.contrib.postgres.indexes.GinIndex(
|
||||||
|
fields=["search_vector"], name="bookwyrm_au_search__b050a8_gin"
|
||||||
|
),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name="author",
|
||||||
|
trigger=pgtrigger.compiler.Trigger(
|
||||||
|
name="update_search_vector_on_author_edit",
|
||||||
|
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||||
|
func="new.search_vector := setweight(to_tsvector('simple', new.name), 'A') || setweight(to_tsvector('simple', coalesce(array_to_string(new.aliases, ' '), '')), 'B');RETURN NEW;",
|
||||||
|
hash="b97919016236d74d0ade51a0769a173ea269da64",
|
||||||
|
operation='INSERT OR UPDATE OF "name", "aliases", "search_vector"',
|
||||||
|
pgid="pgtrigger_update_search_vector_on_author_edit_c61cb",
|
||||||
|
table="bookwyrm_author",
|
||||||
|
when="BEFORE",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.RunSQL(
|
||||||
|
# Calculate search vector for all Authors.
|
||||||
|
sql="UPDATE bookwyrm_author SET search_vector = NULL;",
|
||||||
|
reverse_sql="UPDATE bookwyrm_author SET search_vector = NULL;",
|
||||||
|
),
|
||||||
|
]
|
13
bookwyrm/migrations/0197_merge_20240324_0235.py
Normal file
13
bookwyrm/migrations/0197_merge_20240324_0235.py
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
# Generated by Django 3.2.25 on 2024-03-24 02:35
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0196_merge_20240318_1737"),
|
||||||
|
("bookwyrm", "0196_merge_pr3134_into_main"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = []
|
48
bookwyrm/migrations/0197_mergedauthor_mergedbook.py
Normal file
48
bookwyrm/migrations/0197_mergedauthor_mergedbook.py
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
# Generated by Django 3.2.24 on 2024-02-28 21:30
|
||||||
|
|
||||||
|
from django.db import migrations, models
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0196_merge_pr3134_into_main"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="MergedBook",
|
||||||
|
fields=[
|
||||||
|
("deleted_id", models.IntegerField(primary_key=True, serialize=False)),
|
||||||
|
(
|
||||||
|
"merged_into",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.PROTECT,
|
||||||
|
related_name="absorbed",
|
||||||
|
to="bookwyrm.book",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"abstract": False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
migrations.CreateModel(
|
||||||
|
name="MergedAuthor",
|
||||||
|
fields=[
|
||||||
|
("deleted_id", models.IntegerField(primary_key=True, serialize=False)),
|
||||||
|
(
|
||||||
|
"merged_into",
|
||||||
|
models.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.PROTECT,
|
||||||
|
related_name="absorbed",
|
||||||
|
to="bookwyrm.author",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
options={
|
||||||
|
"abstract": False,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,23 @@
|
||||||
|
# Generated by Django 3.2.25 on 2024-03-26 11:37
|
||||||
|
|
||||||
|
import bookwyrm.models.bookwyrm_export_job
|
||||||
|
from django.db import migrations, models
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0197_merge_20240324_0235"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="bookwyrmexportjob",
|
||||||
|
name="export_data",
|
||||||
|
field=models.FileField(
|
||||||
|
null=True,
|
||||||
|
storage=bookwyrm.models.bookwyrm_export_job.select_exports_storage,
|
||||||
|
upload_to="",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,57 @@
|
||||||
|
# Generated by Django 3.2.25 on 2024-03-20 15:52
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
import pgtrigger.compiler
|
||||||
|
import pgtrigger.migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0197_author_search_vector"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
pgtrigger.migrations.RemoveTrigger(
|
||||||
|
model_name="author",
|
||||||
|
name="reset_search_vector_on_author_edit",
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.RemoveTrigger(
|
||||||
|
model_name="book",
|
||||||
|
name="update_search_vector_on_book_edit",
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name="author",
|
||||||
|
trigger=pgtrigger.compiler.Trigger(
|
||||||
|
name="reset_book_search_vector_on_author_edit",
|
||||||
|
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||||
|
func="WITH updated_books AS (SELECT book_id FROM bookwyrm_book_authors WHERE author_id = new.id ) UPDATE bookwyrm_book SET search_vector = '' FROM updated_books WHERE id = updated_books.book_id;RETURN NEW;",
|
||||||
|
hash="68422c0f29879c5802b82159dde45297eff53e73",
|
||||||
|
operation='UPDATE OF "name", "aliases"',
|
||||||
|
pgid="pgtrigger_reset_book_search_vector_on_author_edit_a50c7",
|
||||||
|
table="bookwyrm_author",
|
||||||
|
when="AFTER",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
pgtrigger.migrations.AddTrigger(
|
||||||
|
model_name="book",
|
||||||
|
trigger=pgtrigger.compiler.Trigger(
|
||||||
|
name="update_search_vector_on_book_edit",
|
||||||
|
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||||
|
func="WITH author_names AS (SELECT array_to_string(bookwyrm_author.name || bookwyrm_author.aliases, ' ') AS name_and_aliases FROM bookwyrm_author LEFT JOIN bookwyrm_book_authors ON bookwyrm_author.id = bookwyrm_book_authors.author_id WHERE bookwyrm_book_authors.book_id = new.id ) SELECT setweight(coalesce(nullif(to_tsvector('english', new.title), ''), to_tsvector('simple', new.title)), 'A') || setweight(to_tsvector('english', coalesce(new.subtitle, '')), 'B') || (SELECT setweight(to_tsvector('simple', coalesce(array_to_string(array_agg(name_and_aliases), ' '), '')), 'C') FROM author_names) || setweight(to_tsvector('english', coalesce(new.series, '')), 'D') INTO new.search_vector;RETURN NEW;",
|
||||||
|
hash="9324f5ca76a6f5e63931881d62d11da11f595b2c",
|
||||||
|
operation='INSERT OR UPDATE OF "title", "subtitle", "series", "search_vector"',
|
||||||
|
pgid="pgtrigger_update_search_vector_on_book_edit_bec58",
|
||||||
|
table="bookwyrm_book",
|
||||||
|
when="BEFORE",
|
||||||
|
),
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.RunSQL(
|
||||||
|
# Recalculate search vector for all Books because it now includes
|
||||||
|
# Author aliases.
|
||||||
|
sql="UPDATE bookwyrm_book SET search_vector = NULL;",
|
||||||
|
reverse_sql="UPDATE bookwyrm_book SET search_vector = NULL;",
|
||||||
|
),
|
||||||
|
]
|
|
@ -0,0 +1,70 @@
|
||||||
|
# Generated by Django 4.2.11 on 2024-03-29 19:25
|
||||||
|
|
||||||
|
import bookwyrm.models.fields
|
||||||
|
from django.conf import settings
|
||||||
|
from django.db import migrations
|
||||||
|
import django.db.models.deletion
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0198_book_search_vector_author_aliases"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = [
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="userblocks",
|
||||||
|
name="user_object",
|
||||||
|
field=bookwyrm.models.fields.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.PROTECT,
|
||||||
|
related_name="%(class)s_user_object",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="userblocks",
|
||||||
|
name="user_subject",
|
||||||
|
field=bookwyrm.models.fields.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.PROTECT,
|
||||||
|
related_name="%(class)s_user_subject",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="userfollowrequest",
|
||||||
|
name="user_object",
|
||||||
|
field=bookwyrm.models.fields.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.PROTECT,
|
||||||
|
related_name="%(class)s_user_object",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="userfollowrequest",
|
||||||
|
name="user_subject",
|
||||||
|
field=bookwyrm.models.fields.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.PROTECT,
|
||||||
|
related_name="%(class)s_user_subject",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="userfollows",
|
||||||
|
name="user_object",
|
||||||
|
field=bookwyrm.models.fields.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.PROTECT,
|
||||||
|
related_name="%(class)s_user_object",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
migrations.AlterField(
|
||||||
|
model_name="userfollows",
|
||||||
|
name="user_subject",
|
||||||
|
field=bookwyrm.models.fields.ForeignKey(
|
||||||
|
on_delete=django.db.models.deletion.PROTECT,
|
||||||
|
related_name="%(class)s_user_subject",
|
||||||
|
to=settings.AUTH_USER_MODEL,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
13
bookwyrm/migrations/0199_merge_20240326_1217.py
Normal file
13
bookwyrm/migrations/0199_merge_20240326_1217.py
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
# Generated by Django 3.2.25 on 2024-03-26 12:17
|
||||||
|
|
||||||
|
from django.db import migrations
|
||||||
|
|
||||||
|
|
||||||
|
class Migration(migrations.Migration):
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
("bookwyrm", "0198_alter_bookwyrmexportjob_export_data"),
|
||||||
|
("bookwyrm", "0198_book_search_vector_author_aliases"),
|
||||||
|
]
|
||||||
|
|
||||||
|
operations = []
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue