mirror of
https://github.com/bookwyrm-social/bookwyrm.git
synced 2024-05-16 15:33:15 +00:00
Compare commits
791 commits
Author | SHA1 | Date | |
---|---|---|---|
c4b21ee258 | |||
ad830dd885 | |||
366c647585 | |||
4f58b11330 | |||
609bc15406 | |||
c42db40a63 | |||
3aefbb548e | |||
baea105c18 | |||
c73d1fff6a | |||
3d183a393f | |||
f24fdf73b5 | |||
839ab2fafd | |||
637f19b208 | |||
031223104f | |||
6684d60526 | |||
cca58023ed | |||
bf5c08dbf3 | |||
be872ed672 | |||
70f803a1f6 | |||
4304cd4a79 | |||
8733369605 | |||
df78cc64a6 | |||
f844abcad9 | |||
21a39f8170 | |||
c3c46144fe | |||
d48d312c0a | |||
501fb45528 | |||
7d581759da | |||
d5a536ae36 | |||
26f92db5d8 | |||
5686c5ae5d | |||
9d9e64399c | |||
b6aba44e42 | |||
3ffbb242a4 | |||
af0bd90c15 | |||
73630331d1 | |||
ca6dbcb483 | |||
e1c54b2933 | |||
439cb3ccaa | |||
321397a349 | |||
464a0298c6 | |||
0501ce39cd | |||
4d5a30d953 | |||
5cfe7eca6f | |||
5082806b82 | |||
d1d91f0c2b | |||
ea0ade955b | |||
f085d3d0fe | |||
4bbdd0b2d0 | |||
d5fb21f330 | |||
f28800af7f | |||
cb3fd0cfc1 | |||
72ed878eeb | |||
f666951934 | |||
fcd0087589 | |||
ffee29d8e2 | |||
75bc4f8cb0 | |||
e7ae0fdf93 | |||
5d597f1ca9 | |||
0ac9d12d1c | |||
e74de94640 | |||
1464d09a43 | |||
2272e7a326 | |||
2bbe3d4c32 | |||
bb5d8152f1 | |||
dabf7c6e10 | |||
cdbc1d172c | |||
3133a47b7c | |||
c6ca547d58 | |||
797d5cb508 | |||
699d637bae | |||
9afd0ebb54 | |||
9685ae5a0a | |||
98600440d8 | |||
ed2e9e5ea8 | |||
ef57c0bc8b | |||
145c67dd21 | |||
6a67943408 | |||
9dfa218ba5 | |||
bf52eeaa9e | |||
011e4a27a6 | |||
7192449b21 | |||
d9bf848cfa | |||
bd95bcd50b | |||
f721289b1d | |||
a51402241b | |||
e0decbfd1d | |||
aee8dc16af | |||
5bd66cb3f7 | |||
ab7b0893e0 | |||
471233c1dc | |||
073f62d5bb | |||
a770689245 | |||
69f464418d | |||
f11c80162a | |||
7c2fa746ae | |||
03587dfdc7 | |||
dd27684d4b | |||
caebebeb37 | |||
592914dc91 | |||
2915133223 | |||
2d2ccd51df | |||
4a690e675a | |||
fb82c7a579 | |||
6f191acb27 | |||
7fb079cb43 | |||
7066e2815b | |||
e04cd79ff8 | |||
5e123972e8 | |||
b3753ab6da | |||
b8995bd4b1 | |||
769d9726e5 | |||
36222afa79 | |||
0795b4d171 | |||
2de35f3fc7 | |||
bac52eef3e | |||
8bbac458a6 | |||
5b71e94888 | |||
a914a44fba | |||
8e088a6d53 | |||
b508b4cd33 | |||
886d6ec9f7 | |||
21f75da75e | |||
20db968315 | |||
c3d25c59c5 | |||
3cde6dbe5a | |||
682bb3b62f | |||
b5b9eddaf0 | |||
ab430e0208 | |||
e13e4237f4 | |||
762786839c | |||
4ca52c0b38 | |||
6a87713f9f | |||
d08147c6d9 | |||
f423834bd0 | |||
d304ceb437 | |||
47afe34d97 | |||
4d23edddca | |||
68cb94daf2 | |||
864304f128 | |||
7690247ab4 | |||
3367b20965 | |||
748418590f | |||
ccf2b16d73 | |||
3be227fc86 | |||
a6dc5bd13f | |||
518da3b9cf | |||
2cf7ed477d | |||
cceccd1ecf | |||
beb49af514 | |||
90bd893568 | |||
e2c9ea3cd2 | |||
4b9fe0af0c | |||
1b9e0546e6 | |||
8cf52e0a77 | |||
0282e20b89 | |||
4e20e43037 | |||
383e6533e1 | |||
74fdd9a85a | |||
6af0a08838 | |||
12b469a0d6 | |||
288743b686 | |||
a3465e6154 | |||
3ba528ecdd | |||
304c47863b | |||
b68a4cc392 | |||
6dfb5000cc | |||
8d018b872f | |||
9e7b040b73 | |||
09c3d9c0dc | |||
dd9d68c97d | |||
d138395c75 | |||
91fe4ad535 | |||
9fa09d5ebe | |||
eadb0e640f | |||
be140d5e5a | |||
22c4155c7c | |||
498dc35d99 | |||
0f5a3e9163 | |||
da2636fa29 | |||
c1520da56d | |||
fee3fdd5a8 | |||
c944824ac7 | |||
4312e9bba0 | |||
39da471f79 | |||
570017d3b0 | |||
3652ac8100 | |||
f8fd76cff0 | |||
206ed9f7fb | |||
218171e9bc | |||
50b811d9aa | |||
1ae9870862 | |||
db97d76a24 | |||
354388cc8f | |||
2c59908ddd | |||
6a70eadba8 | |||
ec52460f02 | |||
1fabe51261 | |||
e6b6bd648d | |||
9d7965780d | |||
333fb03c2c | |||
8f537ef56a | |||
6163e1a6be | |||
dd1999eb8e | |||
4c0d5ede86 | |||
1c587c5e53 | |||
ddd13a3e2e | |||
7469f1f4ca | |||
363cb79951 | |||
46a158d701 | |||
8773caa26b | |||
89d8537e1b | |||
71f527eb1b | |||
4a9d69e169 | |||
d97747078e | |||
db629255db | |||
6ac38564e2 | |||
6c9ca0bf19 | |||
6b1ffbc634 | |||
748c934986 | |||
f7580c59a5 | |||
4e2b8af147 | |||
48f8ee57a6 | |||
faf45cf956 | |||
a1ac9494b2 | |||
6d5752fb4e | |||
37aa7ad2f6 | |||
e0667c6a03 | |||
103da863c4 | |||
fa66284000 | |||
0f0420ce04 | |||
438d88d8d4 | |||
5f2f321ed5 | |||
45cc3dc979 | |||
9c5f6c527b | |||
efa29b269c | |||
2ba7dff845 | |||
21a8570035 | |||
ef6fd608fa | |||
b05621005e | |||
3675a4cf3f | |||
5f7be848fc | |||
f96ddaa3e1 | |||
adff3c4251 | |||
765fc1e43d | |||
c106b2a988 | |||
2c231acebe | |||
a3e05254b5 | |||
582e97e4a5 | |||
0d619f7eb4 | |||
2bb9a85591 | |||
6add81cf15 | |||
629acbaa19 | |||
940274b1c2 | |||
accb3273f1 | |||
8ac873419f | |||
31babdfa51 | |||
80ad36e75b | |||
500e4eb4f5 | |||
82f9aa9da4 | |||
2d4b11aaee | |||
193aeff4d2 | |||
c4596544a3 | |||
30ba8d37dc | |||
eb6bea013f | |||
646b27b7a7 | |||
ea9d3f8ba1 | |||
290ee997b3 | |||
ad56024ffe | |||
f7b4d9ea50 | |||
6cb3b97144 | |||
a563275308 | |||
ddc35a7a52 | |||
26c37de2d4 | |||
fd0b1d90b0 | |||
dd5c314bd5 | |||
a59dcfc890 | |||
8e2649ba3b | |||
d73141792d | |||
469172947b | |||
833f26fd0e | |||
fb5fae4251 | |||
c22f189c86 | |||
61a6ee29d8 | |||
a585321ef9 | |||
45d6f1f890 | |||
b990d9ccd8 | |||
ea7f3c297e | |||
d640e4ac96 | |||
ddbda3ab9c | |||
76a3874662 | |||
8144507893 | |||
70adf878e8 | |||
5ef104b802 | |||
d4d2734dab | |||
62cc6c298f | |||
cbd08127ef | |||
eb13eb9882 | |||
9a487b0442 | |||
854eb36618 | |||
b04ebe397b | |||
5d13bf8e49 | |||
6dc95a82d6 | |||
1a682753c0 | |||
a4599d0374 | |||
83ff880603 | |||
ce18d343e8 | |||
93cab480d6 | |||
1966f1d9a3 | |||
f267fc3235 | |||
6cd2c91135 | |||
c2622a510c | |||
ebcc81dd73 | |||
30c9ec9611 | |||
51cb70d344 | |||
9acb5f66fe | |||
ae5950f187 | |||
766a2163dd | |||
db8c686dd3 | |||
597378bb78 | |||
9c3e6384f8 | |||
01db77a745 | |||
d287581620 | |||
193a1c7d54 | |||
8be9e91d21 | |||
f36af42f41 | |||
5509941aa4 | |||
d6f7f76c4d | |||
381490e31d | |||
addfee0607 | |||
2a85378456 | |||
d9a640c809 | |||
0756c5ac5c | |||
913a19c8f0 | |||
e2249f2515 | |||
f72ada4780 | |||
86d79f537a | |||
fb16806afe | |||
ffeca9f908 | |||
45d33c37ea | |||
ca79cb1ca7 | |||
5647477ba7 | |||
4711b3bc19 | |||
0d908b594c | |||
0e3936cb61 | |||
09b2dea995 | |||
3754718916 | |||
9b3f4933ac | |||
47cdc14bc0 | |||
430e4eb90d | |||
b728bb4323 | |||
a4172214d1 | |||
fb36958444 | |||
44d21d1ba4 | |||
bd3acdbf31 | |||
4a4046a704 | |||
7cca199a11 | |||
1649457372 | |||
7fcadb1d4d | |||
5c0e159d43 | |||
000e5e6145 | |||
8bb5a664c5 | |||
e032e5491d | |||
4bfa1ca5b8 | |||
13374917f3 | |||
799f842115 | |||
aa67f598dd | |||
9d502f5ee2 | |||
198c0037c6 | |||
e5d292919c | |||
029b438355 | |||
dd72013225 | |||
5d09c54e57 | |||
aac8aa1adf | |||
0f6e567b21 | |||
c65e165aeb | |||
979162da10 | |||
b27ed847d5 | |||
d93da4e86d | |||
8fd05004ea | |||
5384e4c470 | |||
b7ba6f1a36 | |||
7f55495287 | |||
31a78a5c9e | |||
193a36390b | |||
cf1afefc84 | |||
b8bf3d5bd9 | |||
58f149d889 | |||
90cc28986e | |||
d6eb390cee | |||
b5805accac | |||
bbfbd1e97a | |||
9bcb5b80ea | |||
8df408e07e | |||
bcb3a343d4 | |||
416a6caf2d | |||
44ef928c3c | |||
e4d688665c | |||
0299f2e235 | |||
c997d2d44a | |||
e322d3cae1 | |||
48904fc60b | |||
99a9a64708 | |||
065e15e4db | |||
72c1c6ee3d | |||
0276c15948 | |||
c6dea2523c | |||
6ba7418121 | |||
8ed4a997f8 | |||
2c9ebba5d7 | |||
7c2de92df3 | |||
b6325da9ab | |||
179dbd75aa | |||
b022b5a1b7 | |||
c2742b4d80 | |||
cfe42305be | |||
d828ba0bc6 | |||
6933f70af3 | |||
d94b27b723 | |||
3d9f339bd5 | |||
1d5cc83347 | |||
d8018cb937 | |||
4da96d937e | |||
446854ccf0 | |||
f011f2bce9 | |||
ff1f239a57 | |||
6aaff28c13 | |||
aaea1b1b9e | |||
8dbfba17d6 | |||
2ba0e3d7ff | |||
a7fcd898c2 | |||
97757fa1ee | |||
a56ba0ce1c | |||
8ddafafa84 | |||
d620bd7350 | |||
68f54cf5a4 | |||
f4da9fbf34 | |||
bf81192d73 | |||
e144ce19fa | |||
bd920a4630 | |||
7684101f15 | |||
06568aab88 | |||
5bf27d4fb2 | |||
1a7a843dea | |||
62f985edb8 | |||
54ec5e2ae0 | |||
63530294d4 | |||
da4214ad61 | |||
01d4381898 | |||
ab9cea1742 | |||
b81170c149 | |||
a884825b3c | |||
bbc78f03ae | |||
d5762f1d52 | |||
891b72c79c | |||
ddf94f8714 | |||
43324cf43a | |||
1bedcdaebd | |||
f3fc5f6179 | |||
99a9dbe5f4 | |||
be9d92b1c2 | |||
edfa6b18a1 | |||
fa80aa54a9 | |||
0e4c5ed439 | |||
c120fa8c87 | |||
2bb7652dfe | |||
e928027e16 | |||
dccac11527 | |||
ebcacfc6c5 | |||
44b14f4933 | |||
774b1095a3 | |||
0bb4b0d71d | |||
2248206a66 | |||
0a5e1048ce | |||
9ddd631549 | |||
1b958a9b31 | |||
282f7dd8d6 | |||
e152b625fa | |||
ee88c3b914 | |||
8663e204c7 | |||
e7a1572450 | |||
3f038b4d67 | |||
06d822d9e0 | |||
85d1760b97 | |||
716e64de68 | |||
15b7b7eaa7 | |||
d34b70cb7b | |||
ee6e3ed7eb | |||
2d185dfb8a | |||
27d99a0094 | |||
93a32f4e15 | |||
8d3c2d9bd2 | |||
7a6b60772c | |||
d2f06e804f | |||
a93519ec3e | |||
1190ea7e69 | |||
c17a2ec55b | |||
d3668e413d | |||
f353b49d36 | |||
47953c84d7 | |||
4de9907456 | |||
61caeed5a3 | |||
5e42afd85a | |||
d0c652f0f5 | |||
93a7dd9cf3 | |||
9e9e9a9f85 | |||
ff2bb513ed | |||
89b87db1c8 | |||
67822d3cb0 | |||
10e0f2224a | |||
7104e775d8 | |||
d682e55812 | |||
afad39bf80 | |||
954a02126e | |||
7d13cbb10b | |||
294788aa1a | |||
116a838eef | |||
f839038c8f | |||
285c513211 | |||
95ba38524b | |||
68f1a69b6a | |||
8c950237a4 | |||
e1217f2054 | |||
ae51dcec63 | |||
22554f85ad | |||
c1a7e4d9eb | |||
416bbd4d9e | |||
45fc10e3bf | |||
0502f6ba42 | |||
86fd62a09e | |||
2137737d9b | |||
621cfa7ed2 | |||
6f9c7f39fb | |||
c486b9c37e | |||
df43a8e2c5 | |||
941efb3f72 | |||
d2b2cc0521 | |||
853b5f28a4 | |||
935779b5e3 | |||
25a2615d5f | |||
50ac691126 | |||
4d35fd45df | |||
6f3b1b565f | |||
1952bb6ddc | |||
170d1fe205 | |||
737ac8e908 | |||
9752819bdb | |||
4b47646e28 | |||
5f619d7a39 | |||
777c8b4549 | |||
46d80d56a5 | |||
a9c605ea97 | |||
52a979da2d | |||
5592a8e08b | |||
f30555be0f | |||
f662e4e049 | |||
e29c93a1e9 | |||
ddec2dbaa9 | |||
b8fc5c9b7a | |||
8477d0b89d | |||
afb5c01947 | |||
2b6852e7a0 | |||
d05cf8e59b | |||
b6b55b2e65 | |||
c5e536aeaa | |||
07ef12ce8e | |||
0c846ca31f | |||
0a2efeb5aa | |||
6222088f15 | |||
fd1ebf5f71 | |||
11a726b40b | |||
c0a5e55f7f | |||
b34a491172 | |||
a27c652501 | |||
836127f369 | |||
20114b0059 | |||
c9e6dcc2d9 | |||
00bf2903bc | |||
698e74a496 | |||
695c67a714 | |||
abb6bcd199 | |||
4e16800b52 | |||
4a9d80268a | |||
781b01a007 | |||
1685ac1953 | |||
2237a7eb9e | |||
caa31de685 | |||
f88a0f8229 | |||
b78d51410b | |||
6392a8e01d | |||
912269303e | |||
abebf82042 | |||
25e8b259f7 | |||
3624763073 | |||
d55e0b6ba3 | |||
06923c64c1 | |||
3ade72b90d | |||
67f6c0a5a7 | |||
cd247a6689 | |||
b97dafc303 | |||
4d352faae3 | |||
f02faa1b74 | |||
1937177e1a | |||
3251ef0bf5 | |||
8afcb9b6d3 | |||
c02306a66b | |||
c066d11eb1 | |||
8f0f3e6ace | |||
f07d730e03 | |||
a4bfcb34d5 | |||
6667178703 | |||
c946e7dd82 | |||
0f79aea36f | |||
66f62566d6 | |||
97adf2f7fd | |||
0452e8698d | |||
16b7db4639 | |||
d7ba0e3a8a | |||
17d741039c | |||
0043329cc1 | |||
c3c22022f6 | |||
1778c56be0 | |||
55eb81dbf9 | |||
a7e427efc2 | |||
1798abfc3e | |||
34d5c557d8 | |||
3d123bc2f2 | |||
7cae5879c8 | |||
bcfd4d2efa | |||
e4ba09178f | |||
703a56940c | |||
3deddf6355 | |||
b3bfcf8665 | |||
458b258ad5 | |||
fcfe34f2f6 | |||
e34fe9a059 | |||
d4088ac854 | |||
33e179e44b | |||
2a08170fb5 | |||
088b9ab555 | |||
3e38fecd55 | |||
7cfdf235bc | |||
80a1180090 | |||
b05f2e99e8 | |||
4fd5e2094a | |||
9547edf845 | |||
d67903fd4b | |||
01a56540d0 | |||
c95f160216 | |||
fe4bc28f37 | |||
b69031c01a | |||
bab28a8fc9 | |||
ec2c5cb546 | |||
2c968e94cb | |||
fadf30b942 | |||
cc05cabcb5 | |||
ef582f1bc2 | |||
b75b5cb165 | |||
0a029e6e01 | |||
85b647b7ab | |||
1e495684af | |||
5b051631ec | |||
ce3885d4f6 | |||
1322a0c693 | |||
25fd7276ea | |||
af5f71f5ac | |||
05f8bd0d3c | |||
a5cf912ae8 | |||
a5ede835b2 | |||
d8ba1f4309 | |||
6e9f64262c | |||
2260e14868 | |||
688978369f | |||
b9851d665e | |||
a09b2ab45c | |||
bc870a305f | |||
c2196fb704 | |||
47e8f3c3e6 | |||
b0601a0958 | |||
4e999657cc | |||
d560a6baef | |||
e7ba6a3141 | |||
0a9ef9e047 | |||
4c526dfcaa | |||
dfa935bd72 | |||
1c9da7b84b | |||
5eae123668 | |||
567c103e59 | |||
e5f8e4babc | |||
0686926048 | |||
f6d8786179 | |||
3760e3b45c | |||
2e88e73509 | |||
0f2c0c034d | |||
767cd14639 | |||
8f8587f79d | |||
ff8e4597e5 | |||
0f8da5b738 | |||
c6aaa80c62 | |||
1e0fe6d7c8 | |||
5ed1441ddb | |||
d7adada29c | |||
2826e184d2 | |||
63b60ad62c | |||
185486c6fc | |||
53c8085207 | |||
a05942fe15 | |||
d9f6449767 | |||
15e82ece07 | |||
861d3b1500 | |||
518f87ef62 | |||
a166af9990 | |||
e76b44fc8f | |||
b7e7867b9b | |||
e8949bbffd | |||
27c40ccf20 | |||
66250e0dd8 | |||
0e43cc4274 | |||
013c726869 | |||
83ad45644b | |||
2dddb2e3da | |||
a901014e48 | |||
ae5c27f3bb | |||
f4a4b59a14 | |||
0be5cf31dc | |||
9d69f2fb3e | |||
73f1484025 | |||
455b0c82ea | |||
acafa0b417 | |||
3ca36fef4d | |||
173d0b77ac | |||
211b60bba2 | |||
d1bad521e9 | |||
8565367993 | |||
220cad8661 | |||
5d3883c9a0 | |||
1a733746f2 | |||
1a215e9b9e | |||
2f8cf941af | |||
3336fd0f11 | |||
f6fba19ac4 | |||
928d56b566 | |||
e37ed8ea5e | |||
c29ca5ad32 | |||
390f61ff3b | |||
61283b3d4f | |||
588ec80b4c | |||
6a81f91902 | |||
71dc05f894 | |||
30f025dbcd | |||
2920973961 | |||
f07d7b02f1 | |||
8736f2c6ef | |||
c721e17aa0 | |||
9cd599dee3 | |||
2293c1c5a8 | |||
bd26da351a | |||
fec6f39e4e | |||
308dfd1be0 | |||
0354e53eea | |||
9a30a3656a | |||
07aca2f62c | |||
1bda8a5d9d | |||
75f37d7361 | |||
d2c4785af1 | |||
eae06602a9 | |||
ccf3a4c5c1 | |||
8b88de624d | |||
eee4e30e25 | |||
fbb9d75cc8 | |||
0818d5aabb | |||
d56b9f14a2 | |||
040dca0c31 | |||
4f6a235d77 | |||
c6d23ba26a | |||
9ff28d97b1 | |||
affaf3d0ba | |||
b3a519c082 | |||
ab146f652a | |||
6b6ed23e25 | |||
c878e11913 | |||
e1f6110dc8 | |||
336c62bfc2 | |||
583d5b3bdb | |||
891a5d4dd8 | |||
51f445bc72 | |||
51bb4c6f5d | |||
225957ba8a | |||
54b8d2c3f3 | |||
6f27b5fd2e | |||
dbd5a02617 |
26
.env.example
26
.env.example
|
@ -16,6 +16,11 @@ DEFAULT_LANGUAGE="English"
|
|||
## Leave unset to allow all hosts
|
||||
# ALLOWED_HOSTS="localhost,127.0.0.1,[::1]"
|
||||
|
||||
# Specify when the site is served from a port that is not the default
|
||||
# for the protocol (80 for HTTP or 443 for HTTPS).
|
||||
# Probably only necessary in development.
|
||||
# PORT=1333
|
||||
|
||||
MEDIA_ROOT=images/
|
||||
|
||||
# Database configuration
|
||||
|
@ -71,14 +76,20 @@ ENABLE_THUMBNAIL_GENERATION=true
|
|||
USE_S3=false
|
||||
AWS_ACCESS_KEY_ID=
|
||||
AWS_SECRET_ACCESS_KEY=
|
||||
# seconds for signed S3 urls to expire
|
||||
# this is currently only used for user export files
|
||||
S3_SIGNED_URL_EXPIRY=900
|
||||
|
||||
# Commented are example values if you use a non-AWS, S3-compatible service
|
||||
# AWS S3 should work with only AWS_STORAGE_BUCKET_NAME and AWS_S3_REGION_NAME
|
||||
# non-AWS S3-compatible services will need AWS_STORAGE_BUCKET_NAME,
|
||||
# along with both AWS_S3_CUSTOM_DOMAIN and AWS_S3_ENDPOINT_URL
|
||||
# along with both AWS_S3_CUSTOM_DOMAIN and AWS_S3_ENDPOINT_URL.
|
||||
# AWS_S3_URL_PROTOCOL must end in ":" and defaults to the same protocol as
|
||||
# the BookWyrm instance ("http:" or "https:", based on USE_SSL).
|
||||
|
||||
# AWS_STORAGE_BUCKET_NAME= # "example-bucket-name"
|
||||
# AWS_S3_CUSTOM_DOMAIN=None # "example-bucket-name.s3.fr-par.scw.cloud"
|
||||
# AWS_S3_URL_PROTOCOL=None # "http:"
|
||||
# AWS_S3_REGION_NAME=None # "fr-par"
|
||||
# AWS_S3_ENDPOINT_URL=None # "https://s3.fr-par.scw.cloud"
|
||||
|
||||
|
@ -133,7 +144,14 @@ HTTP_X_FORWARDED_PROTO=false
|
|||
TWO_FACTOR_LOGIN_VALIDITY_WINDOW=2
|
||||
TWO_FACTOR_LOGIN_MAX_SECONDS=60
|
||||
|
||||
# Additional hosts to allow in the Content-Security-Policy, "self" (should be DOMAIN)
|
||||
# and AWS_S3_CUSTOM_DOMAIN (if used) are added by default.
|
||||
# Value should be a comma-separated list of host names.
|
||||
# Additional hosts to allow in the Content-Security-Policy, "self" (should be
|
||||
# DOMAIN with optionally ":" + PORT) and AWS_S3_CUSTOM_DOMAIN (if used) are
|
||||
# added by default. Value should be a comma-separated list of host names.
|
||||
CSP_ADDITIONAL_HOSTS=
|
||||
|
||||
# Time before being logged out (in seconds)
|
||||
# SESSION_COOKIE_AGE=2592000 # current default: 30 days
|
||||
|
||||
# Maximum allowed memory for file uploads (increase if users are having trouble
|
||||
# uploading BookWyrm export files).
|
||||
# DATA_UPLOAD_MAX_MEMORY_MiB=100
|
||||
|
|
17
.github/workflows/black.yml
vendored
17
.github/workflows/black.yml
vendored
|
@ -1,17 +0,0 @@
|
|||
name: Python Formatting (run ./bw-dev black to fix)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: psf/black@22.12.0
|
||||
with:
|
||||
version: 22.12.0
|
8
.github/workflows/codeql-analysis.yml
vendored
8
.github/workflows/codeql-analysis.yml
vendored
|
@ -36,11 +36,11 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v2
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
|
@ -51,7 +51,7 @@ jobs:
|
|||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v2
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
@ -65,4 +65,4 @@ jobs:
|
|||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v2
|
||||
uses: github/codeql-action/analyze@v3
|
||||
|
|
2
.github/workflows/curlylint.yaml
vendored
2
.github/workflows/curlylint.yaml
vendored
|
@ -10,7 +10,7 @@ jobs:
|
|||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install curlylint
|
||||
run: pip install curlylint
|
||||
|
|
61
.github/workflows/django-tests.yml
vendored
61
.github/workflows/django-tests.yml
vendored
|
@ -1,61 +0,0 @@
|
|||
name: Run Python Tests
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-20.04
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:13
|
||||
env:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: hunter2
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
- name: Run Tests
|
||||
env:
|
||||
SECRET_KEY: beepbeep
|
||||
DEBUG: false
|
||||
USE_HTTPS: true
|
||||
DOMAIN: your.domain.here
|
||||
BOOKWYRM_DATABASE_BACKEND: postgres
|
||||
MEDIA_ROOT: images/
|
||||
POSTGRES_PASSWORD: hunter2
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_DB: github_actions
|
||||
POSTGRES_HOST: 127.0.0.1
|
||||
CELERY_BROKER: ""
|
||||
REDIS_BROKER_PORT: 6379
|
||||
REDIS_BROKER_PASSWORD: beep
|
||||
USE_DUMMY_CACHE: true
|
||||
FLOWER_PORT: 8888
|
||||
EMAIL_HOST: "smtp.mailgun.org"
|
||||
EMAIL_PORT: 587
|
||||
EMAIL_HOST_USER: ""
|
||||
EMAIL_HOST_PASSWORD: ""
|
||||
EMAIL_USE_TLS: true
|
||||
ENABLE_PREVIEW_IMAGES: false
|
||||
ENABLE_THUMBNAIL_GENERATION: true
|
||||
HTTP_X_FORWARDED_PROTO: false
|
||||
run: |
|
||||
pytest -n 3
|
5
.github/workflows/lint-frontend.yaml
vendored
5
.github/workflows/lint-frontend.yaml
vendored
|
@ -19,10 +19,11 @@ jobs:
|
|||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it.
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install modules
|
||||
run: npm install stylelint stylelint-config-recommended stylelint-config-standard stylelint-order eslint
|
||||
# run: npm install stylelint stylelint-config-recommended stylelint-config-standard stylelint-order eslint
|
||||
run: npm install eslint@^8.9.0
|
||||
|
||||
# See .stylelintignore for files that are not linted.
|
||||
# - name: Run stylelint
|
||||
|
|
2
.github/workflows/prettier.yaml
vendored
2
.github/workflows/prettier.yaml
vendored
|
@ -14,7 +14,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it.
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install modules
|
||||
run: npm install prettier@2.5.1
|
||||
|
|
27
.github/workflows/pylint.yml
vendored
27
.github/workflows/pylint.yml
vendored
|
@ -1,27 +0,0 @@
|
|||
name: Pylint
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.9
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
- name: Analysing the code with pylint
|
||||
run: |
|
||||
pylint bookwyrm/
|
||||
|
99
.github/workflows/python.yml
vendored
Normal file
99
.github/workflows/python.yml
vendored
Normal file
|
@ -0,0 +1,99 @@
|
|||
name: Python
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
# overrides for .env.example
|
||||
env:
|
||||
POSTGRES_HOST: 127.0.0.1
|
||||
PGPORT: 5432
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: hunter2
|
||||
POSTGRES_DB: github_actions
|
||||
SECRET_KEY: beepbeep
|
||||
EMAIL_HOST_USER: ""
|
||||
EMAIL_HOST_PASSWORD: ""
|
||||
|
||||
jobs:
|
||||
pytest:
|
||||
name: Tests (pytest)
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:13
|
||||
env: # does not inherit from jobs.build.env
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: hunter2
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.11
|
||||
cache: pip
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
pip install pytest-github-actions-annotate-failures
|
||||
- name: Set up .env
|
||||
run: cp .env.example .env
|
||||
- name: Check migrations up-to-date
|
||||
run: python ./manage.py makemigrations --check
|
||||
- name: Run Tests
|
||||
run: pytest -n 3
|
||||
|
||||
pylint:
|
||||
name: Linting (pylint)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.11
|
||||
cache: pip
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
- name: Analyse code with pylint
|
||||
run: pylint bookwyrm/
|
||||
|
||||
mypy:
|
||||
name: Typing (mypy)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: 3.11
|
||||
cache: pip
|
||||
- name: Install Dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
- name: Set up .env
|
||||
run: cp .env.example .env
|
||||
- name: Analyse code with mypy
|
||||
run: mypy bookwyrm celerywyrm
|
||||
|
||||
black:
|
||||
name: Formatting (black; run ./bw-dev black to fix)
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
- uses: psf/black@stable
|
||||
with:
|
||||
version: "22.*"
|
5
.gitignore
vendored
5
.gitignore
vendored
|
@ -16,6 +16,8 @@
|
|||
# BookWyrm
|
||||
.env
|
||||
/images/
|
||||
/exports/
|
||||
/static/
|
||||
bookwyrm/static/css/bookwyrm.css
|
||||
bookwyrm/static/css/themes/
|
||||
!bookwyrm/static/css/themes/bookwyrm-*.scss
|
||||
|
@ -36,3 +38,6 @@ nginx/default.conf
|
|||
|
||||
#macOS
|
||||
**/.DS_Store
|
||||
|
||||
# Docker
|
||||
docker-compose.override.yml
|
||||
|
|
1
.prettierrc
Normal file
1
.prettierrc
Normal file
|
@ -0,0 +1 @@
|
|||
'trailingComma': 'es5'
|
|
@ -1,4 +1,4 @@
|
|||
FROM python:3.9
|
||||
FROM python:3.11
|
||||
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
|
|
|
@ -13,14 +13,15 @@ User relationship interactions follow the standard ActivityPub spec.
|
|||
- `Block`: prevent users from seeing one another's statuses, and prevents the blocked user from viewing the actor's profile
|
||||
- `Update`: updates a user's profile and settings
|
||||
- `Delete`: deactivates a user
|
||||
- `Undo`: reverses a `Follow` or `Block`
|
||||
- `Undo`: reverses a `Block` or `Follow`
|
||||
|
||||
### Activities
|
||||
- `Create/Status`: saves a new status in the database.
|
||||
- `Delete/Status`: Removes a status
|
||||
- `Like/Status`: Creates a favorite on the status
|
||||
- `Announce/Status`: Boosts the status into the actor's timeline
|
||||
- `Undo/*`,: Reverses a `Like` or `Announce`
|
||||
- `Undo/*`,: Reverses an `Announce`, `Like`, or `Move`
|
||||
- `Move/User`: Moves a user from one ActivityPub id to another.
|
||||
|
||||
### Collections
|
||||
User's books and lists are represented by [`OrderedCollection`](https://www.w3.org/TR/activitystreams-vocabulary/#dfn-orderedcollection)
|
||||
|
|
|
@ -10,7 +10,6 @@ BookWyrm is a social network for tracking your reading, talking about books, wri
|
|||
## Links
|
||||
|
||||
[![Mastodon Follow](https://img.shields.io/mastodon/follow/000146121?domain=https%3A%2F%2Ftech.lgbt&style=social)](https://tech.lgbt/@bookwyrm)
|
||||
[![Twitter Follow](https://img.shields.io/twitter/follow/BookWyrmSocial?style=social)](https://twitter.com/BookWyrmSocial)
|
||||
|
||||
- [Project homepage](https://joinbookwyrm.com/)
|
||||
- [Support](https://patreon.com/bookwyrm)
|
||||
|
|
|
@ -4,7 +4,11 @@ import sys
|
|||
|
||||
from .base_activity import ActivityEncoder, Signature, naive_parse
|
||||
from .base_activity import Link, Mention, Hashtag
|
||||
from .base_activity import ActivitySerializerError, resolve_remote_id
|
||||
from .base_activity import (
|
||||
ActivitySerializerError,
|
||||
resolve_remote_id,
|
||||
get_representative,
|
||||
)
|
||||
from .image import Document, Image
|
||||
from .note import Note, GeneratedNote, Article, Comment, Quotation
|
||||
from .note import Review, Rating
|
||||
|
@ -19,6 +23,7 @@ from .verbs import Create, Delete, Undo, Update
|
|||
from .verbs import Follow, Accept, Reject, Block
|
||||
from .verbs import Add, Remove
|
||||
from .verbs import Announce, Like
|
||||
from .verbs import Move
|
||||
|
||||
# this creates a list of all the Activity types that we can serialize,
|
||||
# so when an Activity comes in from outside, we can check if it's known
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
""" basics for an activitypub serializer """
|
||||
from __future__ import annotations
|
||||
from dataclasses import dataclass, fields, MISSING
|
||||
from json import JSONEncoder
|
||||
import logging
|
||||
from typing import Optional, Union, TypeVar, overload, Any
|
||||
|
||||
import requests
|
||||
|
||||
from django.apps import apps
|
||||
|
@ -10,12 +13,16 @@ from django.utils.http import http_date
|
|||
|
||||
from bookwyrm import models
|
||||
from bookwyrm.connectors import ConnectorException, get_data
|
||||
from bookwyrm.models import base_model
|
||||
from bookwyrm.signatures import make_signature
|
||||
from bookwyrm.settings import DOMAIN, INSTANCE_ACTOR_USERNAME
|
||||
from bookwyrm.tasks import app, MISC
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
TBookWyrmModel = TypeVar("TBookWyrmModel", bound=base_model.BookWyrmModel)
|
||||
|
||||
|
||||
class ActivitySerializerError(ValueError):
|
||||
"""routine problems serializing activitypub json"""
|
||||
|
@ -65,7 +72,13 @@ class ActivityObject:
|
|||
id: str
|
||||
type: str
|
||||
|
||||
def __init__(self, activity_objects=None, **kwargs):
|
||||
def __init__(
|
||||
self,
|
||||
activity_objects: Optional[
|
||||
dict[str, Union[str, list[str], ActivityObject, base_model.BookWyrmModel]]
|
||||
] = None,
|
||||
**kwargs: Any,
|
||||
):
|
||||
"""this lets you pass in an object with fields that aren't in the
|
||||
dataclass, which it ignores. Any field in the dataclass is required or
|
||||
has a default value"""
|
||||
|
@ -101,13 +114,13 @@ class ActivityObject:
|
|||
# pylint: disable=too-many-locals,too-many-branches,too-many-arguments
|
||||
def to_model(
|
||||
self,
|
||||
model=None,
|
||||
instance=None,
|
||||
allow_create=True,
|
||||
save=True,
|
||||
overwrite=True,
|
||||
allow_external_connections=True,
|
||||
):
|
||||
model: Optional[type[TBookWyrmModel]] = None,
|
||||
instance: Optional[TBookWyrmModel] = None,
|
||||
allow_create: bool = True,
|
||||
save: bool = True,
|
||||
overwrite: bool = True,
|
||||
allow_external_connections: bool = True,
|
||||
) -> Optional[TBookWyrmModel]:
|
||||
"""convert from an activity to a model instance. Args:
|
||||
model: the django model that this object is being converted to
|
||||
(will guess if not known)
|
||||
|
@ -224,7 +237,7 @@ class ActivityObject:
|
|||
omit = kwargs.get("omit", ())
|
||||
data = self.__dict__.copy()
|
||||
# recursively serialize
|
||||
for (k, v) in data.items():
|
||||
for k, v in data.items():
|
||||
try:
|
||||
if issubclass(type(v), ActivityObject):
|
||||
data[k] = v.serialize()
|
||||
|
@ -296,14 +309,40 @@ def get_model_from_type(activity_type):
|
|||
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
@overload
|
||||
def resolve_remote_id(
|
||||
remote_id,
|
||||
model=None,
|
||||
refresh=False,
|
||||
save=True,
|
||||
get_activity=False,
|
||||
allow_external_connections=True,
|
||||
):
|
||||
remote_id: str,
|
||||
model: type[TBookWyrmModel],
|
||||
refresh: bool = False,
|
||||
save: bool = True,
|
||||
get_activity: bool = False,
|
||||
allow_external_connections: bool = True,
|
||||
) -> TBookWyrmModel:
|
||||
...
|
||||
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
@overload
|
||||
def resolve_remote_id(
|
||||
remote_id: str,
|
||||
model: Optional[str] = None,
|
||||
refresh: bool = False,
|
||||
save: bool = True,
|
||||
get_activity: bool = False,
|
||||
allow_external_connections: bool = True,
|
||||
) -> base_model.BookWyrmModel:
|
||||
...
|
||||
|
||||
|
||||
# pylint: disable=too-many-arguments
|
||||
def resolve_remote_id(
|
||||
remote_id: str,
|
||||
model: Optional[Union[str, type[base_model.BookWyrmModel]]] = None,
|
||||
refresh: bool = False,
|
||||
save: bool = True,
|
||||
get_activity: bool = False,
|
||||
allow_external_connections: bool = True,
|
||||
) -> base_model.BookWyrmModel:
|
||||
"""take a remote_id and return an instance, creating if necessary. Args:
|
||||
remote_id: the unique url for looking up the object in the db or by http
|
||||
model: a string or object representing the model that corresponds to the object
|
||||
|
@ -358,19 +397,15 @@ def resolve_remote_id(
|
|||
|
||||
def get_representative():
|
||||
"""Get or create an actor representing the instance
|
||||
to sign requests to 'secure mastodon' servers"""
|
||||
username = f"{INSTANCE_ACTOR_USERNAME}@{DOMAIN}"
|
||||
email = "bookwyrm@localhost"
|
||||
try:
|
||||
user = models.User.objects.get(username=username)
|
||||
except models.User.DoesNotExist:
|
||||
user = models.User.objects.create_user(
|
||||
username=username,
|
||||
email=email,
|
||||
to sign outgoing HTTP GET requests"""
|
||||
return models.User.objects.get_or_create(
|
||||
username=f"{INSTANCE_ACTOR_USERNAME}@{DOMAIN}",
|
||||
defaults=dict(
|
||||
email="bookwyrm@localhost",
|
||||
local=True,
|
||||
localname=INSTANCE_ACTOR_USERNAME,
|
||||
)
|
||||
return user
|
||||
),
|
||||
)[0]
|
||||
|
||||
|
||||
def get_activitypub_data(url):
|
||||
|
@ -389,6 +424,7 @@ def get_activitypub_data(url):
|
|||
"Date": now,
|
||||
"Signature": make_signature("get", sender, url, now),
|
||||
},
|
||||
timeout=15,
|
||||
)
|
||||
except requests.RequestException:
|
||||
raise ConnectorException()
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
""" book and author data """
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
|
||||
from .base_activity import ActivityObject
|
||||
from .image import Document
|
||||
|
@ -11,19 +11,17 @@ from .image import Document
|
|||
class BookData(ActivityObject):
|
||||
"""shared fields for all book data and authors"""
|
||||
|
||||
openlibraryKey: str = None
|
||||
inventaireId: str = None
|
||||
librarythingKey: str = None
|
||||
goodreadsKey: str = None
|
||||
bnfId: str = None
|
||||
viaf: str = None
|
||||
wikidata: str = None
|
||||
asin: str = None
|
||||
aasin: str = None
|
||||
isfdb: str = None
|
||||
lastEditedBy: str = None
|
||||
links: List[str] = field(default_factory=lambda: [])
|
||||
fileLinks: List[str] = field(default_factory=lambda: [])
|
||||
openlibraryKey: Optional[str] = None
|
||||
inventaireId: Optional[str] = None
|
||||
librarythingKey: Optional[str] = None
|
||||
goodreadsKey: Optional[str] = None
|
||||
bnfId: Optional[str] = None
|
||||
viaf: Optional[str] = None
|
||||
wikidata: Optional[str] = None
|
||||
asin: Optional[str] = None
|
||||
aasin: Optional[str] = None
|
||||
isfdb: Optional[str] = None
|
||||
lastEditedBy: Optional[str] = None
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
|
@ -35,17 +33,19 @@ class Book(BookData):
|
|||
sortTitle: str = None
|
||||
subtitle: str = None
|
||||
description: str = ""
|
||||
languages: List[str] = field(default_factory=lambda: [])
|
||||
languages: list[str] = field(default_factory=list)
|
||||
series: str = ""
|
||||
seriesNumber: str = ""
|
||||
subjects: List[str] = field(default_factory=lambda: [])
|
||||
subjectPlaces: List[str] = field(default_factory=lambda: [])
|
||||
subjects: list[str] = field(default_factory=list)
|
||||
subjectPlaces: list[str] = field(default_factory=list)
|
||||
|
||||
authors: List[str] = field(default_factory=lambda: [])
|
||||
authors: list[str] = field(default_factory=list)
|
||||
firstPublishedDate: str = ""
|
||||
publishedDate: str = ""
|
||||
|
||||
cover: Document = None
|
||||
fileLinks: list[str] = field(default_factory=list)
|
||||
|
||||
cover: Optional[Document] = None
|
||||
type: str = "Book"
|
||||
|
||||
|
||||
|
@ -58,10 +58,10 @@ class Edition(Book):
|
|||
isbn10: str = ""
|
||||
isbn13: str = ""
|
||||
oclcNumber: str = ""
|
||||
pages: int = None
|
||||
pages: Optional[int] = None
|
||||
physicalFormat: str = ""
|
||||
physicalFormatDetail: str = ""
|
||||
publishers: List[str] = field(default_factory=lambda: [])
|
||||
publishers: list[str] = field(default_factory=list)
|
||||
editionRank: int = 0
|
||||
|
||||
type: str = "Edition"
|
||||
|
@ -73,7 +73,7 @@ class Work(Book):
|
|||
"""work instance of a book object"""
|
||||
|
||||
lccn: str = ""
|
||||
editions: List[str] = field(default_factory=lambda: [])
|
||||
editions: list[str] = field(default_factory=list)
|
||||
type: str = "Work"
|
||||
|
||||
|
||||
|
@ -83,12 +83,12 @@ class Author(BookData):
|
|||
"""author of a book"""
|
||||
|
||||
name: str
|
||||
isni: str = None
|
||||
viafId: str = None
|
||||
gutenbergId: str = None
|
||||
born: str = None
|
||||
died: str = None
|
||||
aliases: List[str] = field(default_factory=lambda: [])
|
||||
isni: Optional[str] = None
|
||||
viafId: Optional[str] = None
|
||||
gutenbergId: Optional[str] = None
|
||||
born: Optional[str] = None
|
||||
died: Optional[str] = None
|
||||
aliases: list[str] = field(default_factory=list)
|
||||
bio: str = ""
|
||||
wikipediaLink: str = ""
|
||||
type: str = "Author"
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
""" actor serializer """
|
||||
from dataclasses import dataclass, field
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict
|
||||
|
||||
from .base_activity import ActivityObject
|
||||
|
@ -35,9 +35,11 @@ class Person(ActivityObject):
|
|||
endpoints: Dict = None
|
||||
name: str = None
|
||||
summary: str = None
|
||||
icon: Image = field(default_factory=lambda: {})
|
||||
icon: Image = None
|
||||
bookwyrmUser: bool = False
|
||||
manuallyApprovesFollowers: str = False
|
||||
discoverable: str = False
|
||||
hideFollows: str = False
|
||||
movedTo: str = None
|
||||
alsoKnownAs: dict[str] = None
|
||||
type: str = "Person"
|
||||
|
|
|
@ -171,9 +171,19 @@ class Reject(Verb):
|
|||
type: str = "Reject"
|
||||
|
||||
def action(self, allow_external_connections=True):
|
||||
"""reject a follow request"""
|
||||
obj = self.object.to_model(save=False, allow_create=False)
|
||||
obj.reject()
|
||||
"""reject a follow or follow request"""
|
||||
|
||||
for model_name in ["UserFollowRequest", "UserFollows", None]:
|
||||
model = apps.get_model(f"bookwyrm.{model_name}") if model_name else None
|
||||
if obj := self.object.to_model(
|
||||
model=model,
|
||||
save=False,
|
||||
allow_create=False,
|
||||
allow_external_connections=allow_external_connections,
|
||||
):
|
||||
# Reject the first model that can be built.
|
||||
obj.reject()
|
||||
break
|
||||
|
||||
|
||||
@dataclass(init=False)
|
||||
|
@ -231,3 +241,30 @@ class Announce(Verb):
|
|||
def action(self, allow_external_connections=True):
|
||||
"""boost"""
|
||||
self.to_model(allow_external_connections=allow_external_connections)
|
||||
|
||||
|
||||
@dataclass(init=False)
|
||||
class Move(Verb):
|
||||
"""a user moving an object"""
|
||||
|
||||
object: str
|
||||
type: str = "Move"
|
||||
origin: str = None
|
||||
target: str = None
|
||||
|
||||
def action(self, allow_external_connections=True):
|
||||
"""move"""
|
||||
|
||||
object_is_user = resolve_remote_id(remote_id=self.object, model="User")
|
||||
|
||||
if object_is_user:
|
||||
model = apps.get_model("bookwyrm.MoveUser")
|
||||
|
||||
self.to_model(
|
||||
model=model,
|
||||
save=True,
|
||||
allow_external_connections=allow_external_connections,
|
||||
)
|
||||
else:
|
||||
# we might do something with this to move other objects at some point
|
||||
pass
|
||||
|
|
|
@ -112,7 +112,7 @@ class ActivityStream(RedisStore):
|
|||
trace.get_current_span().set_attribute("status_privacy", status.privacy)
|
||||
trace.get_current_span().set_attribute(
|
||||
"status_reply_parent_privacy",
|
||||
status.reply_parent.privacy if status.reply_parent else None,
|
||||
status.reply_parent.privacy if status.reply_parent else status.privacy,
|
||||
)
|
||||
# direct messages don't appear in feeds, direct comments/reviews/etc do
|
||||
if status.privacy == "direct" and status.status_type == "Note":
|
||||
|
@ -139,14 +139,14 @@ class ActivityStream(RedisStore):
|
|||
| (
|
||||
Q(following=status.user) & Q(following=status.reply_parent.user)
|
||||
) # if the user is following both authors
|
||||
).distinct()
|
||||
)
|
||||
|
||||
# only visible to the poster's followers and tagged users
|
||||
elif status.privacy == "followers":
|
||||
audience = audience.filter(
|
||||
Q(following=status.user) # if the user is following the author
|
||||
)
|
||||
return audience.distinct()
|
||||
return audience.distinct("id")
|
||||
|
||||
@tracer.start_as_current_span("ActivityStream.get_audience")
|
||||
def get_audience(self, status):
|
||||
|
@ -156,7 +156,7 @@ class ActivityStream(RedisStore):
|
|||
status_author = models.User.objects.filter(
|
||||
is_active=True, local=True, id=status.user.id
|
||||
).values_list("id", flat=True)
|
||||
return list(set(list(audience) + list(status_author)))
|
||||
return list(set(audience) | set(status_author))
|
||||
|
||||
def get_stores_for_users(self, user_ids):
|
||||
"""convert a list of user ids into redis store ids"""
|
||||
|
@ -183,15 +183,13 @@ class HomeStream(ActivityStream):
|
|||
def get_audience(self, status):
|
||||
trace.get_current_span().set_attribute("stream_id", self.key)
|
||||
audience = super()._get_audience(status)
|
||||
if not audience:
|
||||
return []
|
||||
# if the user is following the author
|
||||
audience = audience.filter(following=status.user).values_list("id", flat=True)
|
||||
# if the user is the post's author
|
||||
status_author = models.User.objects.filter(
|
||||
is_active=True, local=True, id=status.user.id
|
||||
).values_list("id", flat=True)
|
||||
return list(set(list(audience) + list(status_author)))
|
||||
return list(set(audience) | set(status_author))
|
||||
|
||||
def get_statuses_for_user(self, user):
|
||||
return models.Status.privacy_filter(
|
||||
|
@ -239,9 +237,7 @@ class BooksStream(ActivityStream):
|
|||
)
|
||||
|
||||
audience = super()._get_audience(status)
|
||||
if not audience:
|
||||
return models.User.objects.none()
|
||||
return audience.filter(shelfbook__book__parent_work=work).distinct()
|
||||
return audience.filter(shelfbook__book__parent_work=work)
|
||||
|
||||
def get_audience(self, status):
|
||||
# only show public statuses on the books feed,
|
||||
|
@ -329,10 +325,9 @@ def add_status_on_create(sender, instance, created, *args, **kwargs):
|
|||
remove_status_task.delay(instance.id)
|
||||
return
|
||||
|
||||
# To avoid creating a zillion unnecessary tasks caused by re-saving the model,
|
||||
# check if it's actually ready to send before we go. We're trusting this was
|
||||
# set correctly by the inbox or view
|
||||
if not instance.ready:
|
||||
# We don't want to create multiple add_status_tasks for each status, and because
|
||||
# the transactions are atomic, on_commit won't run until the status is ready to add.
|
||||
if not created:
|
||||
return
|
||||
|
||||
# when creating new things, gotta wait on the transaction
|
||||
|
@ -343,6 +338,10 @@ def add_status_on_create(sender, instance, created, *args, **kwargs):
|
|||
|
||||
def add_status_on_create_command(sender, instance, created):
|
||||
"""runs this code only after the database commit completes"""
|
||||
# boosts trigger 'saves" twice, so don't bother duplicating the task
|
||||
if sender == models.Boost and not created:
|
||||
return
|
||||
|
||||
priority = STREAMS
|
||||
# check if this is an old status, de-prioritize if so
|
||||
# (this will happen if federation is very slow, or, more expectedly, on csv import)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
"""Do further startup configuration and initialization"""
|
||||
|
||||
import os
|
||||
import urllib
|
||||
import logging
|
||||
|
@ -14,16 +15,16 @@ def download_file(url, destination):
|
|||
"""Downloads a file to the given path"""
|
||||
try:
|
||||
# Ensure our destination directory exists
|
||||
os.makedirs(os.path.dirname(destination))
|
||||
os.makedirs(os.path.dirname(destination), exist_ok=True)
|
||||
with urllib.request.urlopen(url) as stream:
|
||||
with open(destination, "b+w") as outfile:
|
||||
outfile.write(stream.read())
|
||||
except (urllib.error.HTTPError, urllib.error.URLError):
|
||||
logger.info("Failed to download file %s", url)
|
||||
except OSError:
|
||||
logger.info("Couldn't open font file %s for writing", destination)
|
||||
except: # pylint: disable=bare-except
|
||||
logger.info("Unknown error in file download")
|
||||
except (urllib.error.HTTPError, urllib.error.URLError) as err:
|
||||
logger.error("Failed to download file %s: %s", url, err)
|
||||
except OSError as err:
|
||||
logger.error("Couldn't open font file %s for writing: %s", destination, err)
|
||||
except Exception as err: # pylint:disable=broad-except
|
||||
logger.error("Unknown error in file download: %s", err)
|
||||
|
||||
|
||||
class BookwyrmConfig(AppConfig):
|
||||
|
|
|
@ -1,35 +1,69 @@
|
|||
""" using a bookwyrm instance as a source of book data """
|
||||
from __future__ import annotations
|
||||
from dataclasses import asdict, dataclass
|
||||
from functools import reduce
|
||||
import operator
|
||||
from typing import Optional, Union, Any, Literal, overload
|
||||
|
||||
from django.contrib.postgres.search import SearchRank, SearchQuery
|
||||
from django.db.models import F, Q
|
||||
from django.db.models.query import QuerySet
|
||||
|
||||
from bookwyrm import models
|
||||
from bookwyrm import connectors
|
||||
from bookwyrm.settings import MEDIA_FULL_URL
|
||||
|
||||
|
||||
@overload
|
||||
def search(
|
||||
query: str,
|
||||
*,
|
||||
min_confidence: float = 0,
|
||||
filters: Optional[list[Any]] = None,
|
||||
return_first: Literal[False],
|
||||
) -> QuerySet[models.Edition]:
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def search(
|
||||
query: str,
|
||||
*,
|
||||
min_confidence: float = 0,
|
||||
filters: Optional[list[Any]] = None,
|
||||
return_first: Literal[True],
|
||||
) -> Optional[models.Edition]:
|
||||
...
|
||||
|
||||
|
||||
# pylint: disable=arguments-differ
|
||||
def search(query, min_confidence=0, filters=None, return_first=False):
|
||||
def search(
|
||||
query: str,
|
||||
*,
|
||||
min_confidence: float = 0,
|
||||
filters: Optional[list[Any]] = None,
|
||||
return_first: bool = False,
|
||||
books: Optional[QuerySet[models.Edition]] = None,
|
||||
) -> Union[Optional[models.Edition], QuerySet[models.Edition]]:
|
||||
"""search your local database"""
|
||||
filters = filters or []
|
||||
if not query:
|
||||
return []
|
||||
return None if return_first else []
|
||||
query = query.strip()
|
||||
|
||||
results = None
|
||||
# first, try searching unique identifiers
|
||||
# unique identifiers never have spaces, title/author usually do
|
||||
if not " " in query:
|
||||
results = search_identifiers(query, *filters, return_first=return_first)
|
||||
results = search_identifiers(
|
||||
query, *filters, return_first=return_first, books=books
|
||||
)
|
||||
|
||||
# if there were no identifier results...
|
||||
if not results:
|
||||
# then try searching title/author
|
||||
results = search_title_author(
|
||||
query, min_confidence, *filters, return_first=return_first
|
||||
query, min_confidence, *filters, return_first=return_first, books=books
|
||||
)
|
||||
return results
|
||||
|
||||
|
@ -66,8 +100,18 @@ def format_search_result(search_result):
|
|||
).json()
|
||||
|
||||
|
||||
def search_identifiers(query, *filters, return_first=False):
|
||||
"""tries remote_id, isbn; defined as dedupe fields on the model"""
|
||||
def search_identifiers(
|
||||
query,
|
||||
*filters,
|
||||
return_first=False,
|
||||
books=None,
|
||||
) -> Union[Optional[models.Edition], QuerySet[models.Edition]]:
|
||||
"""search Editions by deduplication fields
|
||||
|
||||
Best for cases when we can assume someone is searching for an exact match on
|
||||
commonly unique data identifiers like isbn or specific library ids.
|
||||
"""
|
||||
books = books or models.Edition.objects
|
||||
if connectors.maybe_isbn(query):
|
||||
# Oh did you think the 'S' in ISBN stood for 'standard'?
|
||||
normalized_isbn = query.strip().upper().rjust(10, "0")
|
||||
|
@ -78,7 +122,7 @@ def search_identifiers(query, *filters, return_first=False):
|
|||
for f in models.Edition._meta.get_fields()
|
||||
if hasattr(f, "deduplication_field") and f.deduplication_field
|
||||
]
|
||||
results = models.Edition.objects.filter(
|
||||
results = books.filter(
|
||||
*filters, reduce(operator.or_, (Q(**f) for f in or_filters))
|
||||
).distinct()
|
||||
|
||||
|
@ -87,11 +131,18 @@ def search_identifiers(query, *filters, return_first=False):
|
|||
return results
|
||||
|
||||
|
||||
def search_title_author(query, min_confidence, *filters, return_first=False):
|
||||
def search_title_author(
|
||||
query,
|
||||
min_confidence,
|
||||
*filters,
|
||||
return_first=False,
|
||||
books=None,
|
||||
) -> QuerySet[models.Edition]:
|
||||
"""searches for title and author"""
|
||||
books = books or models.Edition.objects
|
||||
query = SearchQuery(query, config="simple") | SearchQuery(query, config="english")
|
||||
results = (
|
||||
models.Edition.objects.filter(*filters, search_vector=query)
|
||||
books.filter(*filters, search_vector=query)
|
||||
.annotate(rank=SearchRank(F("search_vector"), query))
|
||||
.filter(rank__gt=min_confidence)
|
||||
.order_by("-rank")
|
||||
|
@ -102,7 +153,7 @@ def search_title_author(query, min_confidence, *filters, return_first=False):
|
|||
|
||||
# filter out multiple editions of the same work
|
||||
list_results = []
|
||||
for work_id in set(editions_of_work[:30]):
|
||||
for work_id in editions_of_work[:30]:
|
||||
result = (
|
||||
results.filter(parent_work=work_id)
|
||||
.order_by("-rank", "-edition_rank")
|
||||
|
@ -122,11 +173,11 @@ class SearchResult:
|
|||
title: str
|
||||
key: str
|
||||
connector: object
|
||||
view_link: str = None
|
||||
author: str = None
|
||||
year: str = None
|
||||
cover: str = None
|
||||
confidence: int = 1
|
||||
view_link: Optional[str] = None
|
||||
author: Optional[str] = None
|
||||
year: Optional[str] = None
|
||||
cover: Optional[str] = None
|
||||
confidence: float = 1.0
|
||||
|
||||
def __repr__(self):
|
||||
# pylint: disable=consider-using-f-string
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
""" functionality outline for a book data connector """
|
||||
from __future__ import annotations
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Optional, TypedDict, Any, Callable, Union, Iterator
|
||||
from urllib.parse import quote_plus
|
||||
import imghdr
|
||||
|
||||
# pylint: disable-next=deprecated-module
|
||||
import imghdr # Deprecated in 3.11 for removal in 3.13; no good alternative yet
|
||||
import logging
|
||||
import re
|
||||
import asyncio
|
||||
|
@ -16,33 +20,38 @@ from bookwyrm import activitypub, models, settings
|
|||
from bookwyrm.settings import USER_AGENT
|
||||
from .connector_manager import load_more_data, ConnectorException, raise_not_valid_url
|
||||
from .format_mappings import format_mappings
|
||||
|
||||
from ..book_search import SearchResult
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
JsonDict = dict[str, Any]
|
||||
|
||||
|
||||
class ConnectorResults(TypedDict):
|
||||
"""TypedDict for results returned by connector"""
|
||||
|
||||
connector: AbstractMinimalConnector
|
||||
results: list[SearchResult]
|
||||
|
||||
|
||||
class AbstractMinimalConnector(ABC):
|
||||
"""just the bare bones, for other bookwyrm instances"""
|
||||
|
||||
def __init__(self, identifier):
|
||||
def __init__(self, identifier: str):
|
||||
# load connector settings
|
||||
info = models.Connector.objects.get(identifier=identifier)
|
||||
self.connector = info
|
||||
|
||||
# the things in the connector model to copy over
|
||||
self_fields = [
|
||||
"base_url",
|
||||
"books_url",
|
||||
"covers_url",
|
||||
"search_url",
|
||||
"isbn_search_url",
|
||||
"name",
|
||||
"identifier",
|
||||
]
|
||||
for field in self_fields:
|
||||
setattr(self, field, getattr(info, field))
|
||||
self.base_url = info.base_url
|
||||
self.books_url = info.books_url
|
||||
self.covers_url = info.covers_url
|
||||
self.search_url = info.search_url
|
||||
self.isbn_search_url = info.isbn_search_url
|
||||
self.name = info.name
|
||||
self.identifier = info.identifier
|
||||
|
||||
def get_search_url(self, query):
|
||||
def get_search_url(self, query: str) -> str:
|
||||
"""format the query url"""
|
||||
# Check if the query resembles an ISBN
|
||||
if maybe_isbn(query) and self.isbn_search_url and self.isbn_search_url != "":
|
||||
|
@ -54,13 +63,21 @@ class AbstractMinimalConnector(ABC):
|
|||
# searched as free text. This, instead, only searches isbn if it's isbn-y
|
||||
return f"{self.search_url}{quote_plus(query)}"
|
||||
|
||||
def process_search_response(self, query, data, min_confidence):
|
||||
def process_search_response(
|
||||
self, query: str, data: Any, min_confidence: float
|
||||
) -> list[SearchResult]:
|
||||
"""Format the search results based on the format of the query"""
|
||||
if maybe_isbn(query):
|
||||
return list(self.parse_isbn_search_data(data))[:10]
|
||||
return list(self.parse_search_data(data, min_confidence))[:10]
|
||||
|
||||
async def get_results(self, session, url, min_confidence, query):
|
||||
async def get_results(
|
||||
self,
|
||||
session: aiohttp.ClientSession,
|
||||
url: str,
|
||||
min_confidence: float,
|
||||
query: str,
|
||||
) -> Optional[ConnectorResults]:
|
||||
"""try this specific connector"""
|
||||
# pylint: disable=line-too-long
|
||||
headers = {
|
||||
|
@ -74,55 +91,63 @@ class AbstractMinimalConnector(ABC):
|
|||
async with session.get(url, headers=headers, params=params) as response:
|
||||
if not response.ok:
|
||||
logger.info("Unable to connect to %s: %s", url, response.reason)
|
||||
return
|
||||
return None
|
||||
|
||||
try:
|
||||
raw_data = await response.json()
|
||||
except aiohttp.client_exceptions.ContentTypeError as err:
|
||||
logger.exception(err)
|
||||
return
|
||||
return None
|
||||
|
||||
return {
|
||||
"connector": self,
|
||||
"results": self.process_search_response(
|
||||
return ConnectorResults(
|
||||
connector=self,
|
||||
results=self.process_search_response(
|
||||
query, raw_data, min_confidence
|
||||
),
|
||||
}
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
logger.info("Connection timed out for url: %s", url)
|
||||
except aiohttp.ClientError as err:
|
||||
logger.info(err)
|
||||
return None
|
||||
|
||||
@abstractmethod
|
||||
def get_or_create_book(self, remote_id):
|
||||
def get_or_create_book(self, remote_id: str) -> Optional[models.Book]:
|
||||
"""pull up a book record by whatever means possible"""
|
||||
|
||||
@abstractmethod
|
||||
def parse_search_data(self, data, min_confidence):
|
||||
def parse_search_data(
|
||||
self, data: Any, min_confidence: float
|
||||
) -> Iterator[SearchResult]:
|
||||
"""turn the result json from a search into a list"""
|
||||
|
||||
@abstractmethod
|
||||
def parse_isbn_search_data(self, data):
|
||||
def parse_isbn_search_data(self, data: Any) -> Iterator[SearchResult]:
|
||||
"""turn the result json from a search into a list"""
|
||||
|
||||
|
||||
class AbstractConnector(AbstractMinimalConnector):
|
||||
"""generic book data connector"""
|
||||
|
||||
def __init__(self, identifier):
|
||||
generated_remote_link_field = ""
|
||||
|
||||
def __init__(self, identifier: str):
|
||||
super().__init__(identifier)
|
||||
# fields we want to look for in book data to copy over
|
||||
# title we handle separately.
|
||||
self.book_mappings = []
|
||||
self.book_mappings: list[Mapping] = []
|
||||
self.author_mappings: list[Mapping] = []
|
||||
|
||||
def get_or_create_book(self, remote_id):
|
||||
def get_or_create_book(self, remote_id: str) -> Optional[models.Book]:
|
||||
"""translate arbitrary json into an Activitypub dataclass"""
|
||||
# first, check if we have the origin_id saved
|
||||
existing = models.Edition.find_existing_by_remote_id(
|
||||
remote_id
|
||||
) or models.Work.find_existing_by_remote_id(remote_id)
|
||||
if existing:
|
||||
if hasattr(existing, "default_edition"):
|
||||
if hasattr(existing, "default_edition") and isinstance(
|
||||
existing.default_edition, models.Edition
|
||||
):
|
||||
return existing.default_edition
|
||||
return existing
|
||||
|
||||
|
@ -154,6 +179,9 @@ class AbstractConnector(AbstractMinimalConnector):
|
|||
)
|
||||
# this will dedupe automatically
|
||||
work = work_activity.to_model(model=models.Work, overwrite=False)
|
||||
if not work:
|
||||
return None
|
||||
|
||||
for author in self.get_authors_from_data(work_data):
|
||||
work.authors.add(author)
|
||||
|
||||
|
@ -161,12 +189,21 @@ class AbstractConnector(AbstractMinimalConnector):
|
|||
load_more_data.delay(self.connector.id, work.id)
|
||||
return edition
|
||||
|
||||
def get_book_data(self, remote_id): # pylint: disable=no-self-use
|
||||
def get_book_data(self, remote_id: str) -> JsonDict: # pylint: disable=no-self-use
|
||||
"""this allows connectors to override the default behavior"""
|
||||
return get_data(remote_id)
|
||||
|
||||
def create_edition_from_data(self, work, edition_data, instance=None):
|
||||
def create_edition_from_data(
|
||||
self,
|
||||
work: models.Work,
|
||||
edition_data: Union[str, JsonDict],
|
||||
instance: Optional[models.Edition] = None,
|
||||
) -> Optional[models.Edition]:
|
||||
"""if we already have the work, we're ready"""
|
||||
if isinstance(edition_data, str):
|
||||
# We don't expect a string here
|
||||
return None
|
||||
|
||||
mapped_data = dict_from_mappings(edition_data, self.book_mappings)
|
||||
mapped_data["work"] = work.remote_id
|
||||
edition_activity = activitypub.Edition(**mapped_data)
|
||||
|
@ -174,6 +211,9 @@ class AbstractConnector(AbstractMinimalConnector):
|
|||
model=models.Edition, overwrite=False, instance=instance
|
||||
)
|
||||
|
||||
if not edition:
|
||||
return None
|
||||
|
||||
# if we're updating an existing instance, we don't need to load authors
|
||||
if instance:
|
||||
return edition
|
||||
|
@ -190,7 +230,9 @@ class AbstractConnector(AbstractMinimalConnector):
|
|||
|
||||
return edition
|
||||
|
||||
def get_or_create_author(self, remote_id, instance=None):
|
||||
def get_or_create_author(
|
||||
self, remote_id: str, instance: Optional[models.Author] = None
|
||||
) -> Optional[models.Author]:
|
||||
"""load that author"""
|
||||
if not instance:
|
||||
existing = models.Author.find_existing_by_remote_id(remote_id)
|
||||
|
@ -210,46 +252,51 @@ class AbstractConnector(AbstractMinimalConnector):
|
|||
model=models.Author, overwrite=False, instance=instance
|
||||
)
|
||||
|
||||
def get_remote_id_from_model(self, obj):
|
||||
def get_remote_id_from_model(self, obj: models.BookDataModel) -> Optional[str]:
|
||||
"""given the data stored, how can we look this up"""
|
||||
return getattr(obj, getattr(self, "generated_remote_link_field"))
|
||||
remote_id: Optional[str] = getattr(obj, self.generated_remote_link_field)
|
||||
return remote_id
|
||||
|
||||
def update_author_from_remote(self, obj):
|
||||
def update_author_from_remote(self, obj: models.Author) -> Optional[models.Author]:
|
||||
"""load the remote data from this connector and add it to an existing author"""
|
||||
remote_id = self.get_remote_id_from_model(obj)
|
||||
if not remote_id:
|
||||
return None
|
||||
return self.get_or_create_author(remote_id, instance=obj)
|
||||
|
||||
def update_book_from_remote(self, obj):
|
||||
def update_book_from_remote(self, obj: models.Edition) -> Optional[models.Edition]:
|
||||
"""load the remote data from this connector and add it to an existing book"""
|
||||
remote_id = self.get_remote_id_from_model(obj)
|
||||
if not remote_id:
|
||||
return None
|
||||
data = self.get_book_data(remote_id)
|
||||
return self.create_edition_from_data(obj.parent_work, data, instance=obj)
|
||||
|
||||
@abstractmethod
|
||||
def is_work_data(self, data):
|
||||
def is_work_data(self, data: JsonDict) -> bool:
|
||||
"""differentiate works and editions"""
|
||||
|
||||
@abstractmethod
|
||||
def get_edition_from_work_data(self, data):
|
||||
def get_edition_from_work_data(self, data: JsonDict) -> JsonDict:
|
||||
"""every work needs at least one edition"""
|
||||
|
||||
@abstractmethod
|
||||
def get_work_from_edition_data(self, data):
|
||||
def get_work_from_edition_data(self, data: JsonDict) -> JsonDict:
|
||||
"""every edition needs a work"""
|
||||
|
||||
@abstractmethod
|
||||
def get_authors_from_data(self, data):
|
||||
def get_authors_from_data(self, data: JsonDict) -> Iterator[models.Author]:
|
||||
"""load author data"""
|
||||
|
||||
@abstractmethod
|
||||
def expand_book_data(self, book):
|
||||
def expand_book_data(self, book: models.Book) -> None:
|
||||
"""get more info on a book"""
|
||||
|
||||
|
||||
def dict_from_mappings(data, mappings):
|
||||
def dict_from_mappings(data: JsonDict, mappings: list[Mapping]) -> JsonDict:
|
||||
"""create a dict in Activitypub format, using mappings supplies by
|
||||
the subclass"""
|
||||
result = {}
|
||||
result: JsonDict = {}
|
||||
for mapping in mappings:
|
||||
# sometimes there are multiple mappings for one field, don't
|
||||
# overwrite earlier writes in that case
|
||||
|
@ -259,7 +306,11 @@ def dict_from_mappings(data, mappings):
|
|||
return result
|
||||
|
||||
|
||||
def get_data(url, params=None, timeout=settings.QUERY_TIMEOUT):
|
||||
def get_data(
|
||||
url: str,
|
||||
params: Optional[dict[str, str]] = None,
|
||||
timeout: int = settings.QUERY_TIMEOUT,
|
||||
) -> JsonDict:
|
||||
"""wrapper for request.get"""
|
||||
# check if the url is blocked
|
||||
raise_not_valid_url(url)
|
||||
|
@ -292,10 +343,15 @@ def get_data(url, params=None, timeout=settings.QUERY_TIMEOUT):
|
|||
logger.info(err)
|
||||
raise ConnectorException(err)
|
||||
|
||||
if not isinstance(data, dict):
|
||||
raise ConnectorException("Unexpected data format")
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def get_image(url, timeout=10):
|
||||
def get_image(
|
||||
url: str, timeout: int = 10
|
||||
) -> Union[tuple[ContentFile[bytes], str], tuple[None, None]]:
|
||||
"""wrapper for requesting an image"""
|
||||
raise_not_valid_url(url)
|
||||
try:
|
||||
|
@ -325,14 +381,19 @@ def get_image(url, timeout=10):
|
|||
class Mapping:
|
||||
"""associate a local database field with a field in an external dataset"""
|
||||
|
||||
def __init__(self, local_field, remote_field=None, formatter=None):
|
||||
def __init__(
|
||||
self,
|
||||
local_field: str,
|
||||
remote_field: Optional[str] = None,
|
||||
formatter: Optional[Callable[[Any], Any]] = None,
|
||||
):
|
||||
noop = lambda x: x
|
||||
|
||||
self.local_field = local_field
|
||||
self.remote_field = remote_field or local_field
|
||||
self.formatter = formatter or noop
|
||||
|
||||
def get_value(self, data):
|
||||
def get_value(self, data: JsonDict) -> Optional[Any]:
|
||||
"""pull a field from incoming json and return the formatted version"""
|
||||
value = data.get(self.remote_field)
|
||||
if not value:
|
||||
|
@ -343,7 +404,7 @@ class Mapping:
|
|||
return None
|
||||
|
||||
|
||||
def infer_physical_format(format_text):
|
||||
def infer_physical_format(format_text: str) -> Optional[str]:
|
||||
"""try to figure out what the standardized format is from the free value"""
|
||||
format_text = format_text.lower()
|
||||
if format_text in format_mappings:
|
||||
|
@ -356,7 +417,7 @@ def infer_physical_format(format_text):
|
|||
return matches[0]
|
||||
|
||||
|
||||
def unique_physical_format(format_text):
|
||||
def unique_physical_format(format_text: str) -> Optional[str]:
|
||||
"""only store the format if it isn't directly in the format mappings"""
|
||||
format_text = format_text.lower()
|
||||
if format_text in format_mappings:
|
||||
|
@ -365,7 +426,7 @@ def unique_physical_format(format_text):
|
|||
return format_text
|
||||
|
||||
|
||||
def maybe_isbn(query):
|
||||
def maybe_isbn(query: str) -> bool:
|
||||
"""check if a query looks like an isbn"""
|
||||
isbn = re.sub(r"[\W_]", "", query) # removes filler characters
|
||||
# ISBNs must be numeric except an ISBN10 checkdigit can be 'X'
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
""" using another bookwyrm instance as a source of book data """
|
||||
from __future__ import annotations
|
||||
from typing import Any, Iterator
|
||||
|
||||
from bookwyrm import activitypub, models
|
||||
from bookwyrm.book_search import SearchResult
|
||||
from .abstract_connector import AbstractMinimalConnector
|
||||
|
@ -7,15 +10,19 @@ from .abstract_connector import AbstractMinimalConnector
|
|||
class Connector(AbstractMinimalConnector):
|
||||
"""this is basically just for search"""
|
||||
|
||||
def get_or_create_book(self, remote_id):
|
||||
def get_or_create_book(self, remote_id: str) -> models.Edition:
|
||||
return activitypub.resolve_remote_id(remote_id, model=models.Edition)
|
||||
|
||||
def parse_search_data(self, data, min_confidence):
|
||||
def parse_search_data(
|
||||
self, data: list[dict[str, Any]], min_confidence: float
|
||||
) -> Iterator[SearchResult]:
|
||||
for search_result in data:
|
||||
search_result["connector"] = self
|
||||
yield SearchResult(**search_result)
|
||||
|
||||
def parse_isbn_search_data(self, data):
|
||||
def parse_isbn_search_data(
|
||||
self, data: list[dict[str, Any]]
|
||||
) -> Iterator[SearchResult]:
|
||||
for search_result in data:
|
||||
search_result["connector"] = self
|
||||
yield SearchResult(**search_result)
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
""" interface with whatever connectors the app has """
|
||||
from __future__ import annotations
|
||||
import asyncio
|
||||
import importlib
|
||||
import ipaddress
|
||||
import logging
|
||||
from asyncio import Future
|
||||
from typing import Iterator, Any, Optional, Union, overload, Literal
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import aiohttp
|
||||
|
@ -12,6 +15,8 @@ from django.db.models import signals
|
|||
from requests import HTTPError
|
||||
|
||||
from bookwyrm import book_search, models
|
||||
from bookwyrm.book_search import SearchResult
|
||||
from bookwyrm.connectors import abstract_connector
|
||||
from bookwyrm.settings import SEARCH_TIMEOUT
|
||||
from bookwyrm.tasks import app, CONNECTORS
|
||||
|
||||
|
@ -22,11 +27,15 @@ class ConnectorException(HTTPError):
|
|||
"""when the connector can't do what was asked"""
|
||||
|
||||
|
||||
async def async_connector_search(query, items, min_confidence):
|
||||
async def async_connector_search(
|
||||
query: str,
|
||||
items: list[tuple[str, abstract_connector.AbstractConnector]],
|
||||
min_confidence: float,
|
||||
) -> list[Optional[abstract_connector.ConnectorResults]]:
|
||||
"""Try a number of requests simultaneously"""
|
||||
timeout = aiohttp.ClientTimeout(total=SEARCH_TIMEOUT)
|
||||
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||
tasks = []
|
||||
tasks: list[Future[Optional[abstract_connector.ConnectorResults]]] = []
|
||||
for url, connector in items:
|
||||
tasks.append(
|
||||
asyncio.ensure_future(
|
||||
|
@ -35,14 +44,29 @@ async def async_connector_search(query, items, min_confidence):
|
|||
)
|
||||
|
||||
results = await asyncio.gather(*tasks)
|
||||
return results
|
||||
return list(results)
|
||||
|
||||
|
||||
def search(query, min_confidence=0.1, return_first=False):
|
||||
@overload
|
||||
def search(
|
||||
query: str, *, min_confidence: float = 0.1, return_first: Literal[False]
|
||||
) -> list[abstract_connector.ConnectorResults]:
|
||||
...
|
||||
|
||||
|
||||
@overload
|
||||
def search(
|
||||
query: str, *, min_confidence: float = 0.1, return_first: Literal[True]
|
||||
) -> Optional[SearchResult]:
|
||||
...
|
||||
|
||||
|
||||
def search(
|
||||
query: str, *, min_confidence: float = 0.1, return_first: bool = False
|
||||
) -> Union[list[abstract_connector.ConnectorResults], Optional[SearchResult]]:
|
||||
"""find books based on arbitrary keywords"""
|
||||
if not query:
|
||||
return []
|
||||
results = []
|
||||
return None if return_first else []
|
||||
|
||||
items = []
|
||||
for connector in get_connectors():
|
||||
|
@ -57,8 +81,12 @@ def search(query, min_confidence=0.1, return_first=False):
|
|||
items.append((url, connector))
|
||||
|
||||
# load as many results as we can
|
||||
results = asyncio.run(async_connector_search(query, items, min_confidence))
|
||||
results = [r for r in results if r]
|
||||
# failed requests will return None, so filter those out
|
||||
results = [
|
||||
r
|
||||
for r in asyncio.run(async_connector_search(query, items, min_confidence))
|
||||
if r
|
||||
]
|
||||
|
||||
if return_first:
|
||||
# find the best result from all the responses and return that
|
||||
|
@ -66,11 +94,12 @@ def search(query, min_confidence=0.1, return_first=False):
|
|||
all_results = sorted(all_results, key=lambda r: r.confidence, reverse=True)
|
||||
return all_results[0] if all_results else None
|
||||
|
||||
# failed requests will return None, so filter those out
|
||||
return results
|
||||
|
||||
|
||||
def first_search_result(query, min_confidence=0.1):
|
||||
def first_search_result(
|
||||
query: str, min_confidence: float = 0.1
|
||||
) -> Union[models.Edition, SearchResult, None]:
|
||||
"""search until you find a result that fits"""
|
||||
# try local search first
|
||||
result = book_search.search(query, min_confidence=min_confidence, return_first=True)
|
||||
|
@ -80,18 +109,20 @@ def first_search_result(query, min_confidence=0.1):
|
|||
return search(query, min_confidence=min_confidence, return_first=True) or None
|
||||
|
||||
|
||||
def get_connectors():
|
||||
def get_connectors() -> Iterator[abstract_connector.AbstractConnector]:
|
||||
"""load all connectors"""
|
||||
for info in models.Connector.objects.filter(active=True).order_by("priority").all():
|
||||
yield load_connector(info)
|
||||
|
||||
|
||||
def get_or_create_connector(remote_id):
|
||||
def get_or_create_connector(remote_id: str) -> abstract_connector.AbstractConnector:
|
||||
"""get the connector related to the object's server"""
|
||||
url = urlparse(remote_id)
|
||||
identifier = url.netloc
|
||||
identifier = url.hostname
|
||||
if not identifier:
|
||||
raise ValueError("Invalid remote id")
|
||||
raise ValueError(f"Invalid remote id: {remote_id}")
|
||||
|
||||
base_url = f"{url.scheme}://{url.netloc}"
|
||||
|
||||
try:
|
||||
connector_info = models.Connector.objects.get(identifier=identifier)
|
||||
|
@ -99,10 +130,10 @@ def get_or_create_connector(remote_id):
|
|||
connector_info = models.Connector.objects.create(
|
||||
identifier=identifier,
|
||||
connector_file="bookwyrm_connector",
|
||||
base_url=f"https://{identifier}",
|
||||
books_url=f"https://{identifier}/book",
|
||||
covers_url=f"https://{identifier}/images/covers",
|
||||
search_url=f"https://{identifier}/search?q=",
|
||||
base_url=base_url,
|
||||
books_url=f"{base_url}/book",
|
||||
covers_url=f"{base_url}/images/covers",
|
||||
search_url=f"{base_url}/search?q=",
|
||||
priority=2,
|
||||
)
|
||||
|
||||
|
@ -110,7 +141,7 @@ def get_or_create_connector(remote_id):
|
|||
|
||||
|
||||
@app.task(queue=CONNECTORS)
|
||||
def load_more_data(connector_id, book_id):
|
||||
def load_more_data(connector_id: str, book_id: str) -> None:
|
||||
"""background the work of getting all 10,000 editions of LoTR"""
|
||||
connector_info = models.Connector.objects.get(id=connector_id)
|
||||
connector = load_connector(connector_info)
|
||||
|
@ -119,7 +150,9 @@ def load_more_data(connector_id, book_id):
|
|||
|
||||
|
||||
@app.task(queue=CONNECTORS)
|
||||
def create_edition_task(connector_id, work_id, data):
|
||||
def create_edition_task(
|
||||
connector_id: int, work_id: int, data: Union[str, abstract_connector.JsonDict]
|
||||
) -> None:
|
||||
"""separate task for each of the 10,000 editions of LoTR"""
|
||||
connector_info = models.Connector.objects.get(id=connector_id)
|
||||
connector = load_connector(connector_info)
|
||||
|
@ -127,30 +160,41 @@ def create_edition_task(connector_id, work_id, data):
|
|||
connector.create_edition_from_data(work, data)
|
||||
|
||||
|
||||
def load_connector(connector_info):
|
||||
def load_connector(
|
||||
connector_info: models.Connector,
|
||||
) -> abstract_connector.AbstractConnector:
|
||||
"""instantiate the connector class"""
|
||||
connector = importlib.import_module(
|
||||
f"bookwyrm.connectors.{connector_info.connector_file}"
|
||||
)
|
||||
return connector.Connector(connector_info.identifier)
|
||||
return connector.Connector(connector_info.identifier) # type: ignore[no-any-return]
|
||||
|
||||
|
||||
@receiver(signals.post_save, sender="bookwyrm.FederatedServer")
|
||||
# pylint: disable=unused-argument
|
||||
def create_connector(sender, instance, created, *args, **kwargs):
|
||||
def create_connector(
|
||||
sender: Any,
|
||||
instance: models.FederatedServer,
|
||||
created: Any,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""create a connector to an external bookwyrm server"""
|
||||
if instance.application_type == "bookwyrm":
|
||||
get_or_create_connector(f"https://{instance.server_name}")
|
||||
|
||||
|
||||
def raise_not_valid_url(url):
|
||||
def raise_not_valid_url(url: str) -> None:
|
||||
"""do some basic reality checks on the url"""
|
||||
parsed = urlparse(url)
|
||||
if not parsed.scheme in ["http", "https"]:
|
||||
raise ConnectorException("Invalid scheme: ", url)
|
||||
|
||||
if not parsed.hostname:
|
||||
raise ConnectorException("Hostname missing: ", url)
|
||||
|
||||
try:
|
||||
ipaddress.ip_address(parsed.netloc)
|
||||
ipaddress.ip_address(parsed.hostname)
|
||||
raise ConnectorException("Provided url is an IP address: ", url)
|
||||
except ValueError:
|
||||
# it's not an IP address, which is good
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
""" inventaire data connector """
|
||||
import re
|
||||
from typing import Any, Union, Optional, Iterator, Iterable
|
||||
|
||||
from bookwyrm import models
|
||||
from bookwyrm.book_search import SearchResult
|
||||
from .abstract_connector import AbstractConnector, Mapping
|
||||
from .abstract_connector import AbstractConnector, Mapping, JsonDict
|
||||
from .abstract_connector import get_data
|
||||
from .connector_manager import ConnectorException, create_edition_task
|
||||
|
||||
|
@ -13,7 +14,7 @@ class Connector(AbstractConnector):
|
|||
|
||||
generated_remote_link_field = "inventaire_id"
|
||||
|
||||
def __init__(self, identifier):
|
||||
def __init__(self, identifier: str):
|
||||
super().__init__(identifier)
|
||||
|
||||
get_first = lambda a: a[0]
|
||||
|
@ -60,13 +61,13 @@ class Connector(AbstractConnector):
|
|||
Mapping("died", remote_field="wdt:P570", formatter=get_first),
|
||||
] + shared_mappings
|
||||
|
||||
def get_remote_id(self, value):
|
||||
def get_remote_id(self, value: str) -> str:
|
||||
"""convert an id/uri into a url"""
|
||||
return f"{self.books_url}?action=by-uris&uris={value}"
|
||||
|
||||
def get_book_data(self, remote_id):
|
||||
def get_book_data(self, remote_id: str) -> JsonDict:
|
||||
data = get_data(remote_id)
|
||||
extracted = list(data.get("entities").values())
|
||||
extracted = list(data.get("entities", {}).values())
|
||||
try:
|
||||
data = extracted[0]
|
||||
except (KeyError, IndexError):
|
||||
|
@ -74,10 +75,16 @@ class Connector(AbstractConnector):
|
|||
# flatten the data so that images, uri, and claims are on the same level
|
||||
return {
|
||||
**data.get("claims", {}),
|
||||
**{k: data.get(k) for k in ["uri", "image", "labels", "sitelinks", "type"]},
|
||||
**{
|
||||
k: data.get(k)
|
||||
for k in ["uri", "image", "labels", "sitelinks", "type"]
|
||||
if k in data
|
||||
},
|
||||
}
|
||||
|
||||
def parse_search_data(self, data, min_confidence):
|
||||
def parse_search_data(
|
||||
self, data: JsonDict, min_confidence: float
|
||||
) -> Iterator[SearchResult]:
|
||||
for search_result in data.get("results", []):
|
||||
images = search_result.get("image")
|
||||
cover = f"{self.covers_url}/img/entities/{images[0]}" if images else None
|
||||
|
@ -96,7 +103,7 @@ class Connector(AbstractConnector):
|
|||
connector=self,
|
||||
)
|
||||
|
||||
def parse_isbn_search_data(self, data):
|
||||
def parse_isbn_search_data(self, data: JsonDict) -> Iterator[SearchResult]:
|
||||
"""got some data"""
|
||||
results = data.get("entities")
|
||||
if not results:
|
||||
|
@ -114,35 +121,44 @@ class Connector(AbstractConnector):
|
|||
connector=self,
|
||||
)
|
||||
|
||||
def is_work_data(self, data):
|
||||
def is_work_data(self, data: JsonDict) -> bool:
|
||||
return data.get("type") == "work"
|
||||
|
||||
def load_edition_data(self, work_uri):
|
||||
def load_edition_data(self, work_uri: str) -> JsonDict:
|
||||
"""get a list of editions for a work"""
|
||||
# pylint: disable=line-too-long
|
||||
url = f"{self.books_url}?action=reverse-claims&property=wdt:P629&value={work_uri}&sort=true"
|
||||
return get_data(url)
|
||||
|
||||
def get_edition_from_work_data(self, data):
|
||||
data = self.load_edition_data(data.get("uri"))
|
||||
def get_edition_from_work_data(self, data: JsonDict) -> JsonDict:
|
||||
work_uri = data.get("uri")
|
||||
if not work_uri:
|
||||
raise ConnectorException("Invalid URI")
|
||||
data = self.load_edition_data(work_uri)
|
||||
try:
|
||||
uri = data.get("uris", [])[0]
|
||||
except IndexError:
|
||||
raise ConnectorException("Invalid book data")
|
||||
return self.get_book_data(self.get_remote_id(uri))
|
||||
|
||||
def get_work_from_edition_data(self, data):
|
||||
uri = data.get("wdt:P629", [None])[0]
|
||||
def get_work_from_edition_data(self, data: JsonDict) -> JsonDict:
|
||||
try:
|
||||
uri = data.get("wdt:P629", [])[0]
|
||||
except IndexError:
|
||||
raise ConnectorException("Invalid book data")
|
||||
|
||||
if not uri:
|
||||
raise ConnectorException("Invalid book data")
|
||||
return self.get_book_data(self.get_remote_id(uri))
|
||||
|
||||
def get_authors_from_data(self, data):
|
||||
def get_authors_from_data(self, data: JsonDict) -> Iterator[models.Author]:
|
||||
authors = data.get("wdt:P50", [])
|
||||
for author in authors:
|
||||
yield self.get_or_create_author(self.get_remote_id(author))
|
||||
model = self.get_or_create_author(self.get_remote_id(author))
|
||||
if model:
|
||||
yield model
|
||||
|
||||
def expand_book_data(self, book):
|
||||
def expand_book_data(self, book: models.Book) -> None:
|
||||
work = book
|
||||
# go from the edition to the work, if necessary
|
||||
if isinstance(book, models.Edition):
|
||||
|
@ -154,11 +170,16 @@ class Connector(AbstractConnector):
|
|||
# who knows, man
|
||||
return
|
||||
|
||||
for edition_uri in edition_options.get("uris"):
|
||||
for edition_uri in edition_options.get("uris", []):
|
||||
remote_id = self.get_remote_id(edition_uri)
|
||||
create_edition_task.delay(self.connector.id, work.id, remote_id)
|
||||
|
||||
def create_edition_from_data(self, work, edition_data, instance=None):
|
||||
def create_edition_from_data(
|
||||
self,
|
||||
work: models.Work,
|
||||
edition_data: Union[str, JsonDict],
|
||||
instance: Optional[models.Edition] = None,
|
||||
) -> Optional[models.Edition]:
|
||||
"""pass in the url as data and then call the version in abstract connector"""
|
||||
if isinstance(edition_data, str):
|
||||
try:
|
||||
|
@ -168,22 +189,26 @@ class Connector(AbstractConnector):
|
|||
return None
|
||||
return super().create_edition_from_data(work, edition_data, instance=instance)
|
||||
|
||||
def get_cover_url(self, cover_blob, *_):
|
||||
def get_cover_url(
|
||||
self, cover_blob: Union[list[JsonDict], JsonDict], *_: Any
|
||||
) -> Optional[str]:
|
||||
"""format the relative cover url into an absolute one:
|
||||
{"url": "/img/entities/e794783f01b9d4f897a1ea9820b96e00d346994f"}
|
||||
"""
|
||||
# covers may or may not be a list
|
||||
if isinstance(cover_blob, list) and len(cover_blob) > 0:
|
||||
if isinstance(cover_blob, list):
|
||||
if len(cover_blob) == 0:
|
||||
return None
|
||||
cover_blob = cover_blob[0]
|
||||
cover_id = cover_blob.get("url")
|
||||
if not cover_id:
|
||||
if not isinstance(cover_id, str):
|
||||
return None
|
||||
# cover may or may not be an absolute url already
|
||||
if re.match(r"^http", cover_id):
|
||||
return cover_id
|
||||
return f"{self.covers_url}{cover_id}"
|
||||
|
||||
def resolve_keys(self, keys):
|
||||
def resolve_keys(self, keys: Iterable[str]) -> list[str]:
|
||||
"""cool, it's "wd:Q3156592" now what the heck does that mean"""
|
||||
results = []
|
||||
for uri in keys:
|
||||
|
@ -191,10 +216,10 @@ class Connector(AbstractConnector):
|
|||
data = self.get_book_data(self.get_remote_id(uri))
|
||||
except ConnectorException:
|
||||
continue
|
||||
results.append(get_language_code(data.get("labels")))
|
||||
results.append(get_language_code(data.get("labels", {})))
|
||||
return results
|
||||
|
||||
def get_description(self, links):
|
||||
def get_description(self, links: JsonDict) -> str:
|
||||
"""grab an extracted excerpt from wikipedia"""
|
||||
link = links.get("enwiki")
|
||||
if not link:
|
||||
|
@ -204,15 +229,15 @@ class Connector(AbstractConnector):
|
|||
data = get_data(url)
|
||||
except ConnectorException:
|
||||
return ""
|
||||
return data.get("extract")
|
||||
return data.get("extract", "")
|
||||
|
||||
def get_remote_id_from_model(self, obj):
|
||||
def get_remote_id_from_model(self, obj: models.BookDataModel) -> str:
|
||||
"""use get_remote_id to figure out the link from a model obj"""
|
||||
remote_id_value = obj.inventaire_id
|
||||
return self.get_remote_id(remote_id_value)
|
||||
|
||||
|
||||
def get_language_code(options, code="en"):
|
||||
def get_language_code(options: JsonDict, code: str = "en") -> Any:
|
||||
"""when there are a bunch of translation but we need a single field"""
|
||||
result = options.get(code)
|
||||
if result:
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
""" openlibrary data connector """
|
||||
import re
|
||||
from typing import Any, Optional, Union, Iterator, Iterable
|
||||
|
||||
from markdown import markdown
|
||||
|
||||
from bookwyrm import models
|
||||
from bookwyrm.book_search import SearchResult
|
||||
from .abstract_connector import AbstractConnector, Mapping
|
||||
from bookwyrm.utils.sanitizer import clean
|
||||
from .abstract_connector import AbstractConnector, Mapping, JsonDict
|
||||
from .abstract_connector import get_data, infer_physical_format, unique_physical_format
|
||||
from .connector_manager import ConnectorException, create_edition_task
|
||||
from .openlibrary_languages import languages
|
||||
|
@ -14,7 +18,7 @@ class Connector(AbstractConnector):
|
|||
|
||||
generated_remote_link_field = "openlibrary_link"
|
||||
|
||||
def __init__(self, identifier):
|
||||
def __init__(self, identifier: str):
|
||||
super().__init__(identifier)
|
||||
|
||||
get_first = lambda a, *args: a[0]
|
||||
|
@ -94,14 +98,14 @@ class Connector(AbstractConnector):
|
|||
Mapping("inventaire_id", remote_field="links", formatter=get_inventaire_id),
|
||||
]
|
||||
|
||||
def get_book_data(self, remote_id):
|
||||
def get_book_data(self, remote_id: str) -> JsonDict:
|
||||
data = get_data(remote_id)
|
||||
if data.get("type", {}).get("key") == "/type/redirect":
|
||||
remote_id = self.base_url + data.get("location")
|
||||
remote_id = self.base_url + data.get("location", "")
|
||||
return get_data(remote_id)
|
||||
return data
|
||||
|
||||
def get_remote_id_from_data(self, data):
|
||||
def get_remote_id_from_data(self, data: JsonDict) -> str:
|
||||
"""format a url from an openlibrary id field"""
|
||||
try:
|
||||
key = data["key"]
|
||||
|
@ -109,10 +113,10 @@ class Connector(AbstractConnector):
|
|||
raise ConnectorException("Invalid book data")
|
||||
return f"{self.books_url}{key}"
|
||||
|
||||
def is_work_data(self, data):
|
||||
def is_work_data(self, data: JsonDict) -> bool:
|
||||
return bool(re.match(r"^[\/\w]+OL\d+W$", data["key"]))
|
||||
|
||||
def get_edition_from_work_data(self, data):
|
||||
def get_edition_from_work_data(self, data: JsonDict) -> JsonDict:
|
||||
try:
|
||||
key = data["key"]
|
||||
except KeyError:
|
||||
|
@ -124,7 +128,7 @@ class Connector(AbstractConnector):
|
|||
raise ConnectorException("No editions for work")
|
||||
return edition
|
||||
|
||||
def get_work_from_edition_data(self, data):
|
||||
def get_work_from_edition_data(self, data: JsonDict) -> JsonDict:
|
||||
try:
|
||||
key = data["works"][0]["key"]
|
||||
except (IndexError, KeyError):
|
||||
|
@ -132,7 +136,7 @@ class Connector(AbstractConnector):
|
|||
url = f"{self.books_url}{key}"
|
||||
return self.get_book_data(url)
|
||||
|
||||
def get_authors_from_data(self, data):
|
||||
def get_authors_from_data(self, data: JsonDict) -> Iterator[models.Author]:
|
||||
"""parse author json and load or create authors"""
|
||||
for author_blob in data.get("authors", []):
|
||||
author_blob = author_blob.get("author", author_blob)
|
||||
|
@ -144,7 +148,7 @@ class Connector(AbstractConnector):
|
|||
continue
|
||||
yield author
|
||||
|
||||
def get_cover_url(self, cover_blob, size="L"):
|
||||
def get_cover_url(self, cover_blob: list[str], size: str = "L") -> Optional[str]:
|
||||
"""ask openlibrary for the cover"""
|
||||
if not cover_blob:
|
||||
return None
|
||||
|
@ -152,8 +156,10 @@ class Connector(AbstractConnector):
|
|||
image_name = f"{cover_id}-{size}.jpg"
|
||||
return f"{self.covers_url}/b/id/{image_name}"
|
||||
|
||||
def parse_search_data(self, data, min_confidence):
|
||||
for idx, search_result in enumerate(data.get("docs")):
|
||||
def parse_search_data(
|
||||
self, data: JsonDict, min_confidence: float
|
||||
) -> Iterator[SearchResult]:
|
||||
for idx, search_result in enumerate(data.get("docs", [])):
|
||||
# build the remote id from the openlibrary key
|
||||
key = self.books_url + search_result["key"]
|
||||
author = search_result.get("author_name") or ["Unknown"]
|
||||
|
@ -174,7 +180,7 @@ class Connector(AbstractConnector):
|
|||
confidence=confidence,
|
||||
)
|
||||
|
||||
def parse_isbn_search_data(self, data):
|
||||
def parse_isbn_search_data(self, data: JsonDict) -> Iterator[SearchResult]:
|
||||
for search_result in list(data.values()):
|
||||
# build the remote id from the openlibrary key
|
||||
key = self.books_url + search_result["key"]
|
||||
|
@ -188,12 +194,12 @@ class Connector(AbstractConnector):
|
|||
year=search_result.get("publish_date"),
|
||||
)
|
||||
|
||||
def load_edition_data(self, olkey):
|
||||
def load_edition_data(self, olkey: str) -> JsonDict:
|
||||
"""query openlibrary for editions of a work"""
|
||||
url = f"{self.books_url}/works/{olkey}/editions"
|
||||
return self.get_book_data(url)
|
||||
|
||||
def expand_book_data(self, book):
|
||||
def expand_book_data(self, book: models.Book) -> None:
|
||||
work = book
|
||||
# go from the edition to the work, if necessary
|
||||
if isinstance(book, models.Edition):
|
||||
|
@ -206,14 +212,14 @@ class Connector(AbstractConnector):
|
|||
# who knows, man
|
||||
return
|
||||
|
||||
for edition_data in edition_options.get("entries"):
|
||||
for edition_data in edition_options.get("entries", []):
|
||||
# does this edition have ANY interesting data?
|
||||
if ignore_edition(edition_data):
|
||||
continue
|
||||
create_edition_task.delay(self.connector.id, work.id, edition_data)
|
||||
|
||||
|
||||
def ignore_edition(edition_data):
|
||||
def ignore_edition(edition_data: JsonDict) -> bool:
|
||||
"""don't load a million editions that have no metadata"""
|
||||
# an isbn, we love to see it
|
||||
if edition_data.get("isbn_13") or edition_data.get("isbn_10"):
|
||||
|
@ -232,19 +238,30 @@ def ignore_edition(edition_data):
|
|||
return True
|
||||
|
||||
|
||||
def get_description(description_blob):
|
||||
def get_description(description_blob: Union[JsonDict, str]) -> str:
|
||||
"""descriptions can be a string or a dict"""
|
||||
if isinstance(description_blob, dict):
|
||||
return description_blob.get("value")
|
||||
return description_blob
|
||||
description = markdown(description_blob.get("value", ""))
|
||||
else:
|
||||
description = markdown(description_blob)
|
||||
|
||||
if (
|
||||
description.startswith("<p>")
|
||||
and description.endswith("</p>")
|
||||
and description.count("<p>") == 1
|
||||
):
|
||||
# If there is just one <p> tag and it is around the text remove it
|
||||
return description[len("<p>") : -len("</p>")].strip()
|
||||
|
||||
return clean(description)
|
||||
|
||||
|
||||
def get_openlibrary_key(key):
|
||||
def get_openlibrary_key(key: str) -> str:
|
||||
"""convert /books/OL27320736M into OL27320736M"""
|
||||
return key.split("/")[-1]
|
||||
|
||||
|
||||
def get_languages(language_blob):
|
||||
def get_languages(language_blob: Iterable[JsonDict]) -> list[Optional[str]]:
|
||||
"""/language/eng -> English"""
|
||||
langs = []
|
||||
for lang in language_blob:
|
||||
|
@ -252,14 +269,14 @@ def get_languages(language_blob):
|
|||
return langs
|
||||
|
||||
|
||||
def get_dict_field(blob, field_name):
|
||||
def get_dict_field(blob: Optional[JsonDict], field_name: str) -> Optional[Any]:
|
||||
"""extract the isni from the remote id data for the author"""
|
||||
if not blob or not isinstance(blob, dict):
|
||||
return None
|
||||
return blob.get(field_name)
|
||||
|
||||
|
||||
def get_wikipedia_link(links):
|
||||
def get_wikipedia_link(links: list[Any]) -> Optional[str]:
|
||||
"""extract wikipedia links"""
|
||||
if not isinstance(links, list):
|
||||
return None
|
||||
|
@ -272,7 +289,7 @@ def get_wikipedia_link(links):
|
|||
return None
|
||||
|
||||
|
||||
def get_inventaire_id(links):
|
||||
def get_inventaire_id(links: list[Any]) -> Optional[str]:
|
||||
"""extract and format inventaire ids"""
|
||||
if not isinstance(links, list):
|
||||
return None
|
||||
|
@ -282,11 +299,13 @@ def get_inventaire_id(links):
|
|||
continue
|
||||
if link.get("title") == "inventaire.io":
|
||||
iv_link = link.get("url")
|
||||
if not isinstance(iv_link, str):
|
||||
return None
|
||||
return iv_link.split("/")[-1]
|
||||
return None
|
||||
|
||||
|
||||
def pick_default_edition(options):
|
||||
def pick_default_edition(options: list[JsonDict]) -> Optional[JsonDict]:
|
||||
"""favor physical copies with covers in english"""
|
||||
if not options:
|
||||
return None
|
||||
|
|
|
@ -4,7 +4,7 @@ from django.template.loader import get_template
|
|||
|
||||
from bookwyrm import models, settings
|
||||
from bookwyrm.tasks import app, EMAIL
|
||||
from bookwyrm.settings import DOMAIN
|
||||
from bookwyrm.settings import DOMAIN, BASE_URL
|
||||
|
||||
|
||||
def email_data():
|
||||
|
@ -14,6 +14,7 @@ def email_data():
|
|||
"site_name": site.name,
|
||||
"logo": site.logo_small_url,
|
||||
"domain": DOMAIN,
|
||||
"base_url": BASE_URL,
|
||||
"user": None,
|
||||
}
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ class AuthorForm(CustomForm):
|
|||
"aliases",
|
||||
"bio",
|
||||
"wikipedia_link",
|
||||
"wikidata",
|
||||
"website",
|
||||
"born",
|
||||
"died",
|
||||
|
@ -32,6 +33,7 @@ class AuthorForm(CustomForm):
|
|||
"wikipedia_link": forms.TextInput(
|
||||
attrs={"aria-describedby": "desc_wikipedia_link"}
|
||||
),
|
||||
"wikidata": forms.TextInput(attrs={"aria-describedby": "desc_wikidata"}),
|
||||
"website": forms.TextInput(attrs={"aria-describedby": "desc_website"}),
|
||||
"born": forms.SelectDateWidget(attrs={"aria-describedby": "desc_born"}),
|
||||
"died": forms.SelectDateWidget(attrs={"aria-describedby": "desc_died"}),
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
""" using django model forms """
|
||||
from django import forms
|
||||
|
||||
from file_resubmit.widgets import ResubmitImageWidget
|
||||
|
||||
from bookwyrm import models
|
||||
from bookwyrm.models.fields import ClearableFileInputWithWarning
|
||||
from .custom_form import CustomForm
|
||||
from .widgets import ArrayWidget, SelectDateWidget, Select
|
||||
|
||||
|
@ -70,9 +71,7 @@ class EditionForm(CustomForm):
|
|||
"published_date": SelectDateWidget(
|
||||
attrs={"aria-describedby": "desc_published_date"}
|
||||
),
|
||||
"cover": ClearableFileInputWithWarning(
|
||||
attrs={"aria-describedby": "desc_cover"}
|
||||
),
|
||||
"cover": ResubmitImageWidget(attrs={"aria-describedby": "desc_cover"}),
|
||||
"physical_format": Select(
|
||||
attrs={"aria-describedby": "desc_physical_format"}
|
||||
),
|
||||
|
@ -111,6 +110,7 @@ class EditionFromWorkForm(CustomForm):
|
|||
model = models.Work
|
||||
fields = [
|
||||
"title",
|
||||
"sort_title",
|
||||
"subtitle",
|
||||
"authors",
|
||||
"description",
|
||||
|
|
|
@ -70,6 +70,22 @@ class DeleteUserForm(CustomForm):
|
|||
fields = ["password"]
|
||||
|
||||
|
||||
class MoveUserForm(CustomForm):
|
||||
target = forms.CharField(widget=forms.TextInput)
|
||||
|
||||
class Meta:
|
||||
model = models.User
|
||||
fields = ["password"]
|
||||
|
||||
|
||||
class AliasUserForm(CustomForm):
|
||||
username = forms.CharField(widget=forms.TextInput)
|
||||
|
||||
class Meta:
|
||||
model = models.User
|
||||
fields = ["password"]
|
||||
|
||||
|
||||
class ChangePasswordForm(CustomForm):
|
||||
current_password = forms.CharField(widget=forms.PasswordInput)
|
||||
confirm_password = forms.CharField(widget=forms.PasswordInput)
|
||||
|
|
|
@ -25,6 +25,10 @@ class ImportForm(forms.Form):
|
|||
csv_file = forms.FileField()
|
||||
|
||||
|
||||
class ImportUserForm(forms.Form):
|
||||
archive_file = forms.FileField()
|
||||
|
||||
|
||||
class ShelfForm(CustomForm):
|
||||
class Meta:
|
||||
model = models.Shelf
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
""" using django model forms """
|
||||
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
@ -25,7 +26,7 @@ class FileLinkForm(CustomForm):
|
|||
url = cleaned_data.get("url")
|
||||
filetype = cleaned_data.get("filetype")
|
||||
book = cleaned_data.get("book")
|
||||
domain = urlparse(url).netloc
|
||||
domain = urlparse(url).hostname
|
||||
if models.LinkDomain.objects.filter(domain=domain).exists():
|
||||
status = models.LinkDomain.objects.get(domain=domain).status
|
||||
if status == "blocked":
|
||||
|
@ -37,10 +38,9 @@ class FileLinkForm(CustomForm):
|
|||
),
|
||||
)
|
||||
if (
|
||||
not self.instance
|
||||
and models.FileLink.objects.filter(
|
||||
url=url, book=book, filetype=filetype
|
||||
).exists()
|
||||
models.FileLink.objects.filter(url=url, book=book, filetype=filetype)
|
||||
.exclude(pk=self.instance)
|
||||
.exists()
|
||||
):
|
||||
# pylint: disable=line-too-long
|
||||
self.add_error(
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
""" import classes """
|
||||
|
||||
from .importer import Importer
|
||||
from .bookwyrm_import import BookwyrmImporter
|
||||
from .calibre_import import CalibreImporter
|
||||
from .goodreads_import import GoodreadsImporter
|
||||
from .librarything_import import LibrarythingImporter
|
||||
|
|
24
bookwyrm/importers/bookwyrm_import.py
Normal file
24
bookwyrm/importers/bookwyrm_import.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
"""Import data from Bookwyrm export files"""
|
||||
from django.http import QueryDict
|
||||
|
||||
from bookwyrm.models import User
|
||||
from bookwyrm.models.bookwyrm_import_job import BookwyrmImportJob
|
||||
|
||||
|
||||
class BookwyrmImporter:
|
||||
"""Import a Bookwyrm User export file.
|
||||
This is kind of a combination of an importer and a connector.
|
||||
"""
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
def process_import(
|
||||
self, user: User, archive_file: bytes, settings: QueryDict
|
||||
) -> BookwyrmImportJob:
|
||||
"""import user data from a Bookwyrm export file"""
|
||||
|
||||
required = [k for k in settings if settings.get(k) == "on"]
|
||||
|
||||
job = BookwyrmImportJob.objects.create(
|
||||
user=user, archive_file=archive_file, required=required
|
||||
)
|
||||
return job
|
|
@ -1,4 +1,6 @@
|
|||
""" handle reading a csv from calibre """
|
||||
from typing import Any, Optional
|
||||
|
||||
from bookwyrm.models import Shelf
|
||||
|
||||
from . import Importer
|
||||
|
@ -9,7 +11,7 @@ class CalibreImporter(Importer):
|
|||
|
||||
service = "Calibre"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
def __init__(self, *args: Any, **kwargs: Any):
|
||||
# Add timestamp to row_mappings_guesses for date_added to avoid
|
||||
# integrity error
|
||||
row_mappings_guesses = []
|
||||
|
@ -23,6 +25,6 @@ class CalibreImporter(Importer):
|
|||
self.row_mappings_guesses = row_mappings_guesses
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
def get_shelf(self, normalized_row):
|
||||
def get_shelf(self, normalized_row: dict[str, Optional[str]]) -> Optional[str]:
|
||||
# Calibre export does not indicate which shelf to use. Use a default one for now
|
||||
return Shelf.TO_READ
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
""" handle reading a csv from an external service, defaults are from Goodreads """
|
||||
import csv
|
||||
from datetime import timedelta
|
||||
from typing import Iterable, Optional
|
||||
|
||||
from django.utils import timezone
|
||||
from bookwyrm.models import ImportJob, ImportItem, SiteSettings
|
||||
from bookwyrm.models import ImportJob, ImportItem, SiteSettings, User
|
||||
|
||||
|
||||
class Importer:
|
||||
|
@ -35,19 +37,26 @@ class Importer:
|
|||
}
|
||||
|
||||
# pylint: disable=too-many-locals
|
||||
def create_job(self, user, csv_file, include_reviews, privacy):
|
||||
def create_job(
|
||||
self, user: User, csv_file: Iterable[str], include_reviews: bool, privacy: str
|
||||
) -> ImportJob:
|
||||
"""check over a csv and creates a database entry for the job"""
|
||||
csv_reader = csv.DictReader(csv_file, delimiter=self.delimiter)
|
||||
rows = list(csv_reader)
|
||||
if len(rows) < 1:
|
||||
raise ValueError("CSV file is empty")
|
||||
rows = enumerate(rows)
|
||||
|
||||
mappings = (
|
||||
self.create_row_mappings(list(fieldnames))
|
||||
if (fieldnames := csv_reader.fieldnames)
|
||||
else {}
|
||||
)
|
||||
|
||||
job = ImportJob.objects.create(
|
||||
user=user,
|
||||
include_reviews=include_reviews,
|
||||
privacy=privacy,
|
||||
mappings=self.create_row_mappings(csv_reader.fieldnames),
|
||||
mappings=mappings,
|
||||
source=self.service,
|
||||
)
|
||||
|
||||
|
@ -55,16 +64,20 @@ class Importer:
|
|||
if enforce_limit and allowed_imports <= 0:
|
||||
job.complete_job()
|
||||
return job
|
||||
for index, entry in rows:
|
||||
for index, entry in enumerate(rows):
|
||||
if enforce_limit and index >= allowed_imports:
|
||||
break
|
||||
self.create_item(job, index, entry)
|
||||
return job
|
||||
|
||||
def update_legacy_job(self, job):
|
||||
def update_legacy_job(self, job: ImportJob) -> None:
|
||||
"""patch up a job that was in the old format"""
|
||||
items = job.items
|
||||
headers = list(items.first().data.keys())
|
||||
first_item = items.first()
|
||||
if first_item is None:
|
||||
return
|
||||
|
||||
headers = list(first_item.data.keys())
|
||||
job.mappings = self.create_row_mappings(headers)
|
||||
job.updated_date = timezone.now()
|
||||
job.save()
|
||||
|
@ -75,24 +88,24 @@ class Importer:
|
|||
item.normalized_data = normalized
|
||||
item.save()
|
||||
|
||||
def create_row_mappings(self, headers):
|
||||
def create_row_mappings(self, headers: list[str]) -> dict[str, Optional[str]]:
|
||||
"""guess what the headers mean"""
|
||||
mappings = {}
|
||||
for (key, guesses) in self.row_mappings_guesses:
|
||||
value = [h for h in headers if h.lower() in guesses]
|
||||
value = value[0] if len(value) else None
|
||||
values = [h for h in headers if h.lower() in guesses]
|
||||
value = values[0] if len(values) else None
|
||||
if value:
|
||||
headers.remove(value)
|
||||
mappings[key] = value
|
||||
return mappings
|
||||
|
||||
def create_item(self, job, index, data):
|
||||
def create_item(self, job: ImportJob, index: int, data: dict[str, str]) -> None:
|
||||
"""creates and saves an import item"""
|
||||
normalized = self.normalize_row(data, job.mappings)
|
||||
normalized["shelf"] = self.get_shelf(normalized)
|
||||
ImportItem(job=job, index=index, data=data, normalized_data=normalized).save()
|
||||
|
||||
def get_shelf(self, normalized_row):
|
||||
def get_shelf(self, normalized_row: dict[str, Optional[str]]) -> Optional[str]:
|
||||
"""determine which shelf to use"""
|
||||
shelf_name = normalized_row.get("shelf")
|
||||
if not shelf_name:
|
||||
|
@ -103,11 +116,15 @@ class Importer:
|
|||
]
|
||||
return shelf[0] if shelf else None
|
||||
|
||||
def normalize_row(self, entry, mappings): # pylint: disable=no-self-use
|
||||
# pylint: disable=no-self-use
|
||||
def normalize_row(
|
||||
self, entry: dict[str, str], mappings: dict[str, Optional[str]]
|
||||
) -> dict[str, Optional[str]]:
|
||||
"""use the dataclass to create the formatted row of data"""
|
||||
return {k: entry.get(v) for k, v in mappings.items()}
|
||||
return {k: entry.get(v) if v else None for k, v in mappings.items()}
|
||||
|
||||
def get_import_limit(self, user): # pylint: disable=no-self-use
|
||||
# pylint: disable=no-self-use
|
||||
def get_import_limit(self, user: User) -> tuple[int, int]:
|
||||
"""check if import limit is set and return how many imports are left"""
|
||||
site_settings = SiteSettings.objects.get()
|
||||
import_size_limit = site_settings.import_size_limit
|
||||
|
@ -125,7 +142,9 @@ class Importer:
|
|||
allowed_imports = import_size_limit - imported_books
|
||||
return enforce_limit, allowed_imports
|
||||
|
||||
def create_retry_job(self, user, original_job, items):
|
||||
def create_retry_job(
|
||||
self, user: User, original_job: ImportJob, items: list[ImportItem]
|
||||
) -> ImportJob:
|
||||
"""retry items that didn't import"""
|
||||
job = ImportJob.objects.create(
|
||||
user=user,
|
||||
|
|
|
@ -1,11 +1,16 @@
|
|||
""" handle reading a tsv from librarything """
|
||||
import re
|
||||
from typing import Optional
|
||||
|
||||
from bookwyrm.models import Shelf
|
||||
|
||||
from . import Importer
|
||||
|
||||
|
||||
def _remove_brackets(value: Optional[str]) -> Optional[str]:
|
||||
return re.sub(r"\[|\]", "", value) if value else None
|
||||
|
||||
|
||||
class LibrarythingImporter(Importer):
|
||||
"""csv downloads from librarything"""
|
||||
|
||||
|
@ -13,16 +18,19 @@ class LibrarythingImporter(Importer):
|
|||
delimiter = "\t"
|
||||
encoding = "ISO-8859-1"
|
||||
|
||||
def normalize_row(self, entry, mappings): # pylint: disable=no-self-use
|
||||
def normalize_row(
|
||||
self, entry: dict[str, str], mappings: dict[str, Optional[str]]
|
||||
) -> dict[str, Optional[str]]: # pylint: disable=no-self-use
|
||||
"""use the dataclass to create the formatted row of data"""
|
||||
remove_brackets = lambda v: re.sub(r"\[|\]", "", v) if v else None
|
||||
normalized = {k: remove_brackets(entry.get(v)) for k, v in mappings.items()}
|
||||
isbn_13 = normalized.get("isbn_13")
|
||||
isbn_13 = isbn_13.split(", ") if isbn_13 else []
|
||||
normalized = {
|
||||
k: _remove_brackets(entry.get(v) if v else None)
|
||||
for k, v in mappings.items()
|
||||
}
|
||||
isbn_13 = value.split(", ") if (value := normalized.get("isbn_13")) else []
|
||||
normalized["isbn_13"] = isbn_13[1] if len(isbn_13) > 1 else None
|
||||
return normalized
|
||||
|
||||
def get_shelf(self, normalized_row):
|
||||
def get_shelf(self, normalized_row: dict[str, Optional[str]]) -> Optional[str]:
|
||||
if normalized_row["date_finished"]:
|
||||
return Shelf.READ_FINISHED
|
||||
if normalized_row["date_started"]:
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
""" handle reading a csv from openlibrary"""
|
||||
from typing import Any
|
||||
|
||||
from . import Importer
|
||||
|
||||
|
||||
|
@ -7,7 +9,7 @@ class OpenLibraryImporter(Importer):
|
|||
|
||||
service = "OpenLibrary"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
def __init__(self, *args: Any, **kwargs: Any):
|
||||
self.row_mappings_guesses.append(("openlibrary_key", ["edition id"]))
|
||||
self.row_mappings_guesses.append(("openlibrary_work_key", ["work id"]))
|
||||
super().__init__(*args, **kwargs)
|
||||
|
|
7904
bookwyrm/isbn/RangeMessage.xml
Normal file
7904
bookwyrm/isbn/RangeMessage.xml
Normal file
File diff suppressed because it is too large
Load diff
0
bookwyrm/isbn/__init__.py
Normal file
0
bookwyrm/isbn/__init__.py
Normal file
128
bookwyrm/isbn/isbn.py
Normal file
128
bookwyrm/isbn/isbn.py
Normal file
|
@ -0,0 +1,128 @@
|
|||
""" Use the range message from isbn-international to hyphenate ISBNs """
|
||||
import os
|
||||
from typing import Optional
|
||||
from xml.etree import ElementTree
|
||||
from xml.etree.ElementTree import Element
|
||||
|
||||
import requests
|
||||
|
||||
from bookwyrm import settings
|
||||
|
||||
|
||||
def _get_rules(element: Element) -> list[Element]:
|
||||
if (rules_el := element.find("Rules")) is not None:
|
||||
return rules_el.findall("Rule")
|
||||
return []
|
||||
|
||||
|
||||
class IsbnHyphenator:
|
||||
"""Class to manage the range message xml file and use it to hyphenate ISBNs"""
|
||||
|
||||
__range_message_url = "https://www.isbn-international.org/export_rangemessage.xml"
|
||||
__range_file_path = os.path.join(
|
||||
settings.BASE_DIR, "bookwyrm", "isbn", "RangeMessage.xml"
|
||||
)
|
||||
__element_tree = None
|
||||
|
||||
def update_range_message(self) -> None:
|
||||
"""Download the range message xml file and save it locally"""
|
||||
response = requests.get(self.__range_message_url, timeout=15)
|
||||
with open(self.__range_file_path, "w", encoding="utf-8") as file:
|
||||
file.write(response.text)
|
||||
self.__element_tree = None
|
||||
|
||||
def hyphenate(self, isbn_13: Optional[str]) -> Optional[str]:
|
||||
"""hyphenate the given ISBN-13 number using the range message"""
|
||||
if isbn_13 is None:
|
||||
return None
|
||||
|
||||
if self.__element_tree is None:
|
||||
self.__element_tree = ElementTree.parse(self.__range_file_path)
|
||||
|
||||
gs1_prefix = isbn_13[:3]
|
||||
try:
|
||||
reg_group = self.__find_reg_group(isbn_13, gs1_prefix)
|
||||
except ValueError:
|
||||
# if the reg groups are invalid, just return the original isbn
|
||||
return isbn_13
|
||||
|
||||
if reg_group is None:
|
||||
return isbn_13 # failed to hyphenate
|
||||
|
||||
registrant = self.__find_registrant(isbn_13, gs1_prefix, reg_group)
|
||||
if registrant is None:
|
||||
return isbn_13 # failed to hyphenate
|
||||
|
||||
publication = isbn_13[len(gs1_prefix) + len(reg_group) + len(registrant) : -1]
|
||||
check_digit = isbn_13[-1:]
|
||||
return "-".join((gs1_prefix, reg_group, registrant, publication, check_digit))
|
||||
|
||||
def __find_reg_group(self, isbn_13: str, gs1_prefix: str) -> Optional[str]:
|
||||
if self.__element_tree is None:
|
||||
self.__element_tree = ElementTree.parse(self.__range_file_path)
|
||||
|
||||
ucc_prefixes_el = self.__element_tree.find("EAN.UCCPrefixes")
|
||||
if ucc_prefixes_el is None:
|
||||
return None
|
||||
|
||||
for ean_ucc_el in ucc_prefixes_el.findall("EAN.UCC"):
|
||||
if (
|
||||
prefix_el := ean_ucc_el.find("Prefix")
|
||||
) is not None and prefix_el.text == gs1_prefix:
|
||||
for rule_el in _get_rules(ean_ucc_el):
|
||||
length_el = rule_el.find("Length")
|
||||
if length_el is None:
|
||||
continue
|
||||
length = int(text) if (text := length_el.text) else 0
|
||||
if length == 0:
|
||||
continue
|
||||
|
||||
range_el = rule_el.find("Range")
|
||||
if range_el is None or range_el.text is None:
|
||||
continue
|
||||
|
||||
reg_grp_range = [int(x[:length]) for x in range_el.text.split("-")]
|
||||
reg_group = isbn_13[len(gs1_prefix) : len(gs1_prefix) + length]
|
||||
if reg_grp_range[0] <= int(reg_group) <= reg_grp_range[1]:
|
||||
return reg_group
|
||||
return None
|
||||
return None
|
||||
|
||||
def __find_registrant(
|
||||
self, isbn_13: str, gs1_prefix: str, reg_group: str
|
||||
) -> Optional[str]:
|
||||
from_ind = len(gs1_prefix) + len(reg_group)
|
||||
|
||||
if self.__element_tree is None:
|
||||
self.__element_tree = ElementTree.parse(self.__range_file_path)
|
||||
|
||||
reg_groups_el = self.__element_tree.find("RegistrationGroups")
|
||||
if reg_groups_el is None:
|
||||
return None
|
||||
|
||||
for group_el in reg_groups_el.findall("Group"):
|
||||
if (
|
||||
prefix_el := group_el.find("Prefix")
|
||||
) is not None and prefix_el.text == "-".join((gs1_prefix, reg_group)):
|
||||
for rule_el in _get_rules(group_el):
|
||||
length_el = rule_el.find("Length")
|
||||
if length_el is None:
|
||||
continue
|
||||
length = int(text) if (text := length_el.text) else 0
|
||||
if length == 0:
|
||||
continue
|
||||
|
||||
range_el = rule_el.find("Range")
|
||||
if range_el is None or range_el.text is None:
|
||||
continue
|
||||
registrant_range = [
|
||||
int(x[:length]) for x in range_el.text.split("-")
|
||||
]
|
||||
registrant = isbn_13[from_ind : from_ind + length]
|
||||
if registrant_range[0] <= int(registrant) <= registrant_range[1]:
|
||||
return registrant
|
||||
return None
|
||||
return None
|
||||
|
||||
|
||||
hyphenator_singleton = IsbnHyphenator()
|
|
@ -1,13 +1,14 @@
|
|||
""" PROCEED WITH CAUTION: uses deduplication fields to permanently
|
||||
merge book data objects """
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import Count
|
||||
from bookwyrm import models
|
||||
from bookwyrm.management.merge import merge_objects
|
||||
|
||||
|
||||
def dedupe_model(model):
|
||||
def dedupe_model(model, dry_run=False):
|
||||
"""combine duplicate editions and update related models"""
|
||||
print(f"deduplicating {model.__name__}:")
|
||||
fields = model._meta.get_fields()
|
||||
dedupe_fields = [
|
||||
f for f in fields if hasattr(f, "deduplication_field") and f.deduplication_field
|
||||
|
@ -16,30 +17,42 @@ def dedupe_model(model):
|
|||
dupes = (
|
||||
model.objects.values(field.name)
|
||||
.annotate(Count(field.name))
|
||||
.filter(**{"%s__count__gt" % field.name: 1})
|
||||
.filter(**{f"{field.name}__count__gt": 1})
|
||||
.exclude(**{field.name: ""})
|
||||
.exclude(**{f"{field.name}__isnull": True})
|
||||
)
|
||||
|
||||
for dupe in dupes:
|
||||
value = dupe[field.name]
|
||||
if not value or value == "":
|
||||
continue
|
||||
print("----------")
|
||||
print(dupe)
|
||||
objs = model.objects.filter(**{field.name: value}).order_by("id")
|
||||
canonical = objs.first()
|
||||
print("keeping", canonical.remote_id)
|
||||
action = "would merge" if dry_run else "merging"
|
||||
print(
|
||||
f"{action} into {model.__name__} {canonical.remote_id} based on {field.name} {value}:"
|
||||
)
|
||||
for obj in objs[1:]:
|
||||
print(obj.remote_id)
|
||||
merge_objects(canonical, obj)
|
||||
print(f"- {obj.remote_id}")
|
||||
absorbed_fields = obj.merge_into(canonical, dry_run=dry_run)
|
||||
print(f" absorbed fields: {absorbed_fields}")
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""deduplicate allllll the book data models"""
|
||||
|
||||
help = "merges duplicate book data"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
"""add the arguments for this command"""
|
||||
parser.add_argument(
|
||||
"--dry_run",
|
||||
action="store_true",
|
||||
help="don't actually merge, only print what would happen",
|
||||
)
|
||||
|
||||
# pylint: disable=no-self-use,unused-argument
|
||||
def handle(self, *args, **options):
|
||||
"""run deduplications"""
|
||||
dedupe_model(models.Edition)
|
||||
dedupe_model(models.Work)
|
||||
dedupe_model(models.Author)
|
||||
dedupe_model(models.Edition, dry_run=options["dry_run"])
|
||||
dedupe_model(models.Work, dry_run=options["dry_run"])
|
||||
dedupe_model(models.Author, dry_run=options["dry_run"])
|
||||
|
|
43
bookwyrm/management/commands/erase_deleted_user_data.py
Normal file
43
bookwyrm/management/commands/erase_deleted_user_data.py
Normal file
|
@ -0,0 +1,43 @@
|
|||
""" Erase any data stored about deleted users """
|
||||
import sys
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from bookwyrm import models
|
||||
from bookwyrm.models.user import erase_user_data
|
||||
|
||||
# pylint: disable=missing-function-docstring
|
||||
class Command(BaseCommand):
|
||||
"""command-line options"""
|
||||
|
||||
help = "Remove Two Factor Authorisation from user"
|
||||
|
||||
def add_arguments(self, parser): # pylint: disable=no-self-use
|
||||
parser.add_argument(
|
||||
"--dryrun",
|
||||
action="store_true",
|
||||
help="Preview users to be cleared without altering the database",
|
||||
)
|
||||
|
||||
def handle(self, *args, **options): # pylint: disable=unused-argument
|
||||
|
||||
# Check for anything fishy
|
||||
bad_state = models.User.objects.filter(is_deleted=True, is_active=True)
|
||||
if bad_state.exists():
|
||||
raise CommandError(
|
||||
f"{bad_state.count()} user(s) marked as both active and deleted"
|
||||
)
|
||||
|
||||
deleted_users = models.User.objects.filter(is_deleted=True)
|
||||
self.stdout.write(f"Found {deleted_users.count()} deleted users")
|
||||
if options["dryrun"]:
|
||||
self.stdout.write("\n".join(u.username for u in deleted_users[:5]))
|
||||
if deleted_users.count() > 5:
|
||||
self.stdout.write("... and more")
|
||||
sys.exit()
|
||||
|
||||
self.stdout.write("Erasing user data:")
|
||||
for user_id in deleted_users.values_list("id", flat=True):
|
||||
erase_user_data.delay(user_id)
|
||||
self.stdout.write(".", ending="")
|
||||
|
||||
self.stdout.write("")
|
||||
self.stdout.write("Tasks created successfully")
|
|
@ -1,54 +0,0 @@
|
|||
""" Get your admin code to allow install """
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from bookwyrm import models
|
||||
from bookwyrm.settings import VERSION
|
||||
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
class Command(BaseCommand):
|
||||
"""command-line options"""
|
||||
|
||||
help = "What version is this?"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
"""specify which function to run"""
|
||||
parser.add_argument(
|
||||
"--current",
|
||||
action="store_true",
|
||||
help="Version stored in database",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--target",
|
||||
action="store_true",
|
||||
help="Version stored in settings",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--update",
|
||||
action="store_true",
|
||||
help="Update database version",
|
||||
)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def handle(self, *args, **options):
|
||||
"""execute init"""
|
||||
site = models.SiteSettings.objects.get()
|
||||
current = site.version or "0.0.1"
|
||||
target = VERSION
|
||||
if options.get("current"):
|
||||
print(current)
|
||||
return
|
||||
|
||||
if options.get("target"):
|
||||
print(target)
|
||||
return
|
||||
|
||||
if options.get("update"):
|
||||
site.version = target
|
||||
site.save()
|
||||
return
|
||||
|
||||
if current != target:
|
||||
print(f"{current}/{target}")
|
||||
else:
|
||||
print(current)
|
21
bookwyrm/management/commands/repair_editions.py
Normal file
21
bookwyrm/management/commands/repair_editions.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
""" Repair editions with missing works """
|
||||
from django.core.management.base import BaseCommand
|
||||
from bookwyrm import models
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""command-line options"""
|
||||
|
||||
help = "Repairs an edition that is in a broken state"
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
def handle(self, *args, **options):
|
||||
"""Find and repair broken editions"""
|
||||
# Find broken editions
|
||||
editions = models.Edition.objects.filter(parent_work__isnull=True)
|
||||
self.stdout.write(f"Repairing {editions.count()} edition(s):")
|
||||
|
||||
# Do repair
|
||||
for edition in editions:
|
||||
edition.repair()
|
||||
self.stdout.write(".", ending="")
|
|
@ -1,50 +0,0 @@
|
|||
from django.db.models import ManyToManyField
|
||||
|
||||
|
||||
def update_related(canonical, obj):
|
||||
"""update all the models with fk to the object being removed"""
|
||||
# move related models to canonical
|
||||
related_models = [
|
||||
(r.remote_field.name, r.related_model) for r in canonical._meta.related_objects
|
||||
]
|
||||
for (related_field, related_model) in related_models:
|
||||
# Skip the ManyToMany fields that aren’t auto-created. These
|
||||
# should have a corresponding OneToMany field in the model for
|
||||
# the linking table anyway. If we update it through that model
|
||||
# instead then we won’t lose the extra fields in the linking
|
||||
# table.
|
||||
related_field_obj = related_model._meta.get_field(related_field)
|
||||
if isinstance(related_field_obj, ManyToManyField):
|
||||
through = related_field_obj.remote_field.through
|
||||
if not through._meta.auto_created:
|
||||
continue
|
||||
related_objs = related_model.objects.filter(**{related_field: obj})
|
||||
for related_obj in related_objs:
|
||||
print("replacing in", related_model.__name__, related_field, related_obj.id)
|
||||
try:
|
||||
setattr(related_obj, related_field, canonical)
|
||||
related_obj.save()
|
||||
except TypeError:
|
||||
getattr(related_obj, related_field).add(canonical)
|
||||
getattr(related_obj, related_field).remove(obj)
|
||||
|
||||
|
||||
def copy_data(canonical, obj):
|
||||
"""try to get the most data possible"""
|
||||
for data_field in obj._meta.get_fields():
|
||||
if not hasattr(data_field, "activitypub_field"):
|
||||
continue
|
||||
data_value = getattr(obj, data_field.name)
|
||||
if not data_value:
|
||||
continue
|
||||
if not getattr(canonical, data_field.name):
|
||||
print("setting data field", data_field.name, data_value)
|
||||
setattr(canonical, data_field.name, data_value)
|
||||
canonical.save()
|
||||
|
||||
|
||||
def merge_objects(canonical, obj):
|
||||
copy_data(canonical, obj)
|
||||
update_related(canonical, obj)
|
||||
# remove the outdated entry
|
||||
obj.delete()
|
|
@ -1,4 +1,3 @@
|
|||
from bookwyrm.management.merge import merge_objects
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
|
||||
|
@ -9,6 +8,11 @@ class MergeCommand(BaseCommand):
|
|||
"""add the arguments for this command"""
|
||||
parser.add_argument("--canonical", type=int, required=True)
|
||||
parser.add_argument("--other", type=int, required=True)
|
||||
parser.add_argument(
|
||||
"--dry_run",
|
||||
action="store_true",
|
||||
help="don't actually merge, only print what would happen",
|
||||
)
|
||||
|
||||
# pylint: disable=no-self-use,unused-argument
|
||||
def handle(self, *args, **options):
|
||||
|
@ -26,4 +30,8 @@ class MergeCommand(BaseCommand):
|
|||
print("other book doesn’t exist!")
|
||||
return
|
||||
|
||||
merge_objects(canonical, other)
|
||||
absorbed_fields = other.merge_into(canonical, dry_run=options["dry_run"])
|
||||
|
||||
action = "would be" if options["dry_run"] else "has been"
|
||||
print(f"{other.remote_id} {action} merged into {canonical.remote_id}")
|
||||
print(f"absorbed fields: {absorbed_fields}")
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
""" look at all this nice middleware! """
|
||||
from .timezone_middleware import TimezoneMiddleware
|
||||
from .ip_middleware import IPBlocklistMiddleware
|
||||
from .file_too_big import FileTooBig
|
||||
|
|
30
bookwyrm/middleware/file_too_big.py
Normal file
30
bookwyrm/middleware/file_too_big.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
"""Middleware to display a custom 413 error page"""
|
||||
|
||||
from django.http import HttpResponse
|
||||
from django.shortcuts import render
|
||||
from django.core.exceptions import RequestDataTooBig
|
||||
|
||||
|
||||
class FileTooBig:
|
||||
"""Middleware to display a custom page when a
|
||||
RequestDataTooBig exception is thrown"""
|
||||
|
||||
def __init__(self, get_response):
|
||||
"""boilerplate __init__ from Django docs"""
|
||||
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
"""If RequestDataTooBig is thrown, render the 413 error page"""
|
||||
|
||||
try:
|
||||
body = request.body # pylint: disable=unused-variable
|
||||
|
||||
except RequestDataTooBig:
|
||||
|
||||
rendered = render(request, "413.html")
|
||||
response = HttpResponse(rendered)
|
||||
return response
|
||||
|
||||
response = self.get_response(request)
|
||||
return response
|
|
@ -45,5 +45,7 @@ class Migration(migrations.Migration):
|
|||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(populate_sort_title),
|
||||
migrations.RunPython(
|
||||
populate_sort_title, reverse_code=migrations.RunPython.noop
|
||||
),
|
||||
]
|
||||
|
|
36
bookwyrm/migrations/0179_reportcomment_comment_type.py
Normal file
36
bookwyrm/migrations/0179_reportcomment_comment_type.py
Normal file
|
@ -0,0 +1,36 @@
|
|||
# Generated by Django 3.2.18 on 2023-05-16 16:02
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0178_auto_20230328_2132"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="reportcomment",
|
||||
name="action_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("comment", "Comment"),
|
||||
("resolve", "Resolved report"),
|
||||
("reopen", "Re-opened report"),
|
||||
("message_reporter", "Messaged reporter"),
|
||||
("message_offender", "Messaged reported user"),
|
||||
("user_suspension", "Suspended user"),
|
||||
("user_unsuspension", "Un-suspended user"),
|
||||
("user_perms", "Changed user permission level"),
|
||||
("user_deletion", "Deleted user account"),
|
||||
("block_domain", "Blocked domain"),
|
||||
("approve_domain", "Approved domain"),
|
||||
("delete_item", "Deleted item"),
|
||||
],
|
||||
default="comment",
|
||||
max_length=20,
|
||||
),
|
||||
),
|
||||
migrations.RenameModel("ReportComment", "ReportAction"),
|
||||
]
|
17
bookwyrm/migrations/0180_alter_reportaction_options.py
Normal file
17
bookwyrm/migrations/0180_alter_reportaction_options.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
# Generated by Django 3.2.18 on 2023-06-21 22:01
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0179_reportcomment_comment_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterModelOptions(
|
||||
name="reportaction",
|
||||
options={"ordering": ("created_date",)},
|
||||
),
|
||||
]
|
44
bookwyrm/migrations/0180_alter_user_preferred_language.py
Normal file
44
bookwyrm/migrations/0180_alter_user_preferred_language.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
# Generated by Django 3.2.19 on 2023-07-23 19:33
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0179_populate_sort_title"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="preferred_language",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("en-us", "English"),
|
||||
("ca-es", "Català (Catalan)"),
|
||||
("de-de", "Deutsch (German)"),
|
||||
("eo-uy", "Esperanto (Esperanto)"),
|
||||
("es-es", "Español (Spanish)"),
|
||||
("eu-es", "Euskara (Basque)"),
|
||||
("gl-es", "Galego (Galician)"),
|
||||
("it-it", "Italiano (Italian)"),
|
||||
("fi-fi", "Suomi (Finnish)"),
|
||||
("fr-fr", "Français (French)"),
|
||||
("lt-lt", "Lietuvių (Lithuanian)"),
|
||||
("nl-nl", "Nederlands (Dutch)"),
|
||||
("no-no", "Norsk (Norwegian)"),
|
||||
("pl-pl", "Polski (Polish)"),
|
||||
("pt-br", "Português do Brasil (Brazilian Portuguese)"),
|
||||
("pt-pt", "Português Europeu (European Portuguese)"),
|
||||
("ro-ro", "Română (Romanian)"),
|
||||
("sv-se", "Svenska (Swedish)"),
|
||||
("zh-hans", "简体中文 (Simplified Chinese)"),
|
||||
("zh-hant", "繁體中文 (Traditional Chinese)"),
|
||||
],
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
13
bookwyrm/migrations/0181_merge_20230806_2302.py
Normal file
13
bookwyrm/migrations/0181_merge_20230806_2302.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.20 on 2023-08-06 23:02
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0180_alter_reportaction_options"),
|
||||
("bookwyrm", "0180_alter_user_preferred_language"),
|
||||
]
|
||||
|
||||
operations = []
|
130
bookwyrm/migrations/0182_auto_20231027_1122.py
Normal file
130
bookwyrm/migrations/0182_auto_20231027_1122.py
Normal file
|
@ -0,0 +1,130 @@
|
|||
# Generated by Django 3.2.20 on 2023-10-27 11:22
|
||||
|
||||
import bookwyrm.models.activitypub_mixin
|
||||
import bookwyrm.models.fields
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0181_merge_20230806_2302"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="also_known_as",
|
||||
field=bookwyrm.models.fields.ManyToManyField(to=settings.AUTH_USER_MODEL),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="moved_to",
|
||||
field=bookwyrm.models.fields.RemoteIdField(
|
||||
max_length=255,
|
||||
null=True,
|
||||
validators=[bookwyrm.models.fields.validate_remote_id],
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="notification",
|
||||
name="notification_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("FAVORITE", "Favorite"),
|
||||
("REPLY", "Reply"),
|
||||
("MENTION", "Mention"),
|
||||
("TAG", "Tag"),
|
||||
("FOLLOW", "Follow"),
|
||||
("FOLLOW_REQUEST", "Follow Request"),
|
||||
("BOOST", "Boost"),
|
||||
("IMPORT", "Import"),
|
||||
("ADD", "Add"),
|
||||
("REPORT", "Report"),
|
||||
("LINK_DOMAIN", "Link Domain"),
|
||||
("INVITE", "Invite"),
|
||||
("ACCEPT", "Accept"),
|
||||
("JOIN", "Join"),
|
||||
("LEAVE", "Leave"),
|
||||
("REMOVE", "Remove"),
|
||||
("GROUP_PRIVACY", "Group Privacy"),
|
||||
("GROUP_NAME", "Group Name"),
|
||||
("GROUP_DESCRIPTION", "Group Description"),
|
||||
("MOVE", "Move"),
|
||||
],
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="Move",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("created_date", models.DateTimeField(auto_now_add=True)),
|
||||
("updated_date", models.DateTimeField(auto_now=True)),
|
||||
(
|
||||
"remote_id",
|
||||
bookwyrm.models.fields.RemoteIdField(
|
||||
max_length=255,
|
||||
null=True,
|
||||
validators=[bookwyrm.models.fields.validate_remote_id],
|
||||
),
|
||||
),
|
||||
("object", bookwyrm.models.fields.CharField(max_length=255)),
|
||||
(
|
||||
"origin",
|
||||
bookwyrm.models.fields.CharField(
|
||||
blank=True, default="", max_length=255, null=True
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
bookwyrm.models.fields.ForeignKey(
|
||||
on_delete=django.db.models.deletion.PROTECT,
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
bases=(bookwyrm.models.activitypub_mixin.ActivityMixin, models.Model),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="MoveUser",
|
||||
fields=[
|
||||
(
|
||||
"move_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="bookwyrm.move",
|
||||
),
|
||||
),
|
||||
(
|
||||
"target",
|
||||
bookwyrm.models.fields.ForeignKey(
|
||||
on_delete=django.db.models.deletion.PROTECT,
|
||||
related_name="move_target",
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
bases=("bookwyrm.move",),
|
||||
),
|
||||
]
|
18
bookwyrm/migrations/0183_auto_20231105_1607.py
Normal file
18
bookwyrm/migrations/0183_auto_20231105_1607.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.20 on 2023-11-05 16:07
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0182_auto_20231027_1122"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="user",
|
||||
name="is_deleted",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
35
bookwyrm/migrations/0184_auto_20231106_0421.py
Normal file
35
bookwyrm/migrations/0184_auto_20231106_0421.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
# Generated by Django 3.2.20 on 2023-11-06 04:21
|
||||
|
||||
from django.db import migrations
|
||||
from bookwyrm.models import User
|
||||
|
||||
|
||||
def update_deleted_users(apps, schema_editor):
|
||||
"""Find all the users who are deleted, not just inactive, and set deleted"""
|
||||
users = apps.get_model("bookwyrm", "User")
|
||||
db_alias = schema_editor.connection.alias
|
||||
users.objects.using(db_alias).filter(
|
||||
is_active=False,
|
||||
deactivation_reason__in=[
|
||||
"self_deletion",
|
||||
"moderator_deletion",
|
||||
],
|
||||
).update(is_deleted=True)
|
||||
|
||||
# differente rules for remote users
|
||||
users.objects.using(db_alias).filter(is_active=False, local=False,).exclude(
|
||||
deactivation_reason="moderator_deactivation",
|
||||
).update(is_deleted=True)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0183_auto_20231105_1607"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
update_deleted_users, reverse_code=migrations.RunPython.noop
|
||||
),
|
||||
]
|
|
@ -0,0 +1,42 @@
|
|||
# Generated by Django 3.2.20 on 2023-11-13 22:39
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0184_auto_20231106_0421"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="notification",
|
||||
name="notification_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("FAVORITE", "Favorite"),
|
||||
("BOOST", "Boost"),
|
||||
("REPLY", "Reply"),
|
||||
("MENTION", "Mention"),
|
||||
("TAG", "Tag"),
|
||||
("FOLLOW", "Follow"),
|
||||
("FOLLOW_REQUEST", "Follow Request"),
|
||||
("IMPORT", "Import"),
|
||||
("ADD", "Add"),
|
||||
("REPORT", "Report"),
|
||||
("LINK_DOMAIN", "Link Domain"),
|
||||
("INVITE", "Invite"),
|
||||
("ACCEPT", "Accept"),
|
||||
("JOIN", "Join"),
|
||||
("LEAVE", "Leave"),
|
||||
("REMOVE", "Remove"),
|
||||
("GROUP_PRIVACY", "Group Privacy"),
|
||||
("GROUP_NAME", "Group Name"),
|
||||
("GROUP_DESCRIPTION", "Group Description"),
|
||||
("MOVE", "Move"),
|
||||
],
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
]
|
212
bookwyrm/migrations/0186_auto_20231116_0048.py
Normal file
212
bookwyrm/migrations/0186_auto_20231116_0048.py
Normal file
|
@ -0,0 +1,212 @@
|
|||
# Generated by Django 3.2.20 on 2023-11-16 00:48
|
||||
|
||||
from django.conf import settings
|
||||
import django.contrib.postgres.fields
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import django.utils.timezone
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0185_alter_notification_notification_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="ParentJob",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("task_id", models.UUIDField(blank=True, null=True, unique=True)),
|
||||
(
|
||||
"created_date",
|
||||
models.DateTimeField(default=django.utils.timezone.now),
|
||||
),
|
||||
(
|
||||
"updated_date",
|
||||
models.DateTimeField(default=django.utils.timezone.now),
|
||||
),
|
||||
("complete", models.BooleanField(default=False)),
|
||||
(
|
||||
"status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("pending", "Pending"),
|
||||
("active", "Active"),
|
||||
("complete", "Complete"),
|
||||
("stopped", "Stopped"),
|
||||
("failed", "Failed"),
|
||||
],
|
||||
default="pending",
|
||||
max_length=50,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"user",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to=settings.AUTH_USER_MODEL,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="sitesettings",
|
||||
name="user_import_time_limit",
|
||||
field=models.IntegerField(default=48),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="notification",
|
||||
name="notification_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("FAVORITE", "Favorite"),
|
||||
("BOOST", "Boost"),
|
||||
("REPLY", "Reply"),
|
||||
("MENTION", "Mention"),
|
||||
("TAG", "Tag"),
|
||||
("FOLLOW", "Follow"),
|
||||
("FOLLOW_REQUEST", "Follow Request"),
|
||||
("IMPORT", "Import"),
|
||||
("USER_IMPORT", "User Import"),
|
||||
("USER_EXPORT", "User Export"),
|
||||
("ADD", "Add"),
|
||||
("REPORT", "Report"),
|
||||
("LINK_DOMAIN", "Link Domain"),
|
||||
("INVITE", "Invite"),
|
||||
("ACCEPT", "Accept"),
|
||||
("JOIN", "Join"),
|
||||
("LEAVE", "Leave"),
|
||||
("REMOVE", "Remove"),
|
||||
("GROUP_PRIVACY", "Group Privacy"),
|
||||
("GROUP_NAME", "Group Name"),
|
||||
("GROUP_DESCRIPTION", "Group Description"),
|
||||
("MOVE", "Move"),
|
||||
],
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="BookwyrmExportJob",
|
||||
fields=[
|
||||
(
|
||||
"parentjob_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="bookwyrm.parentjob",
|
||||
),
|
||||
),
|
||||
("export_data", models.FileField(null=True, upload_to="")),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
bases=("bookwyrm.parentjob",),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="BookwyrmImportJob",
|
||||
fields=[
|
||||
(
|
||||
"parentjob_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="bookwyrm.parentjob",
|
||||
),
|
||||
),
|
||||
("archive_file", models.FileField(blank=True, null=True, upload_to="")),
|
||||
("import_data", models.JSONField(null=True)),
|
||||
(
|
||||
"required",
|
||||
django.contrib.postgres.fields.ArrayField(
|
||||
base_field=models.CharField(blank=True, max_length=50),
|
||||
blank=True,
|
||||
size=None,
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
bases=("bookwyrm.parentjob",),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="ChildJob",
|
||||
fields=[
|
||||
(
|
||||
"id",
|
||||
models.AutoField(
|
||||
auto_created=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
verbose_name="ID",
|
||||
),
|
||||
),
|
||||
("task_id", models.UUIDField(blank=True, null=True, unique=True)),
|
||||
(
|
||||
"created_date",
|
||||
models.DateTimeField(default=django.utils.timezone.now),
|
||||
),
|
||||
(
|
||||
"updated_date",
|
||||
models.DateTimeField(default=django.utils.timezone.now),
|
||||
),
|
||||
("complete", models.BooleanField(default=False)),
|
||||
(
|
||||
"status",
|
||||
models.CharField(
|
||||
choices=[
|
||||
("pending", "Pending"),
|
||||
("active", "Active"),
|
||||
("complete", "Complete"),
|
||||
("stopped", "Stopped"),
|
||||
("failed", "Failed"),
|
||||
],
|
||||
default="pending",
|
||||
max_length=50,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
(
|
||||
"parent_job",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="child_jobs",
|
||||
to="bookwyrm.parentjob",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="notification",
|
||||
name="related_user_export",
|
||||
field=models.ForeignKey(
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="bookwyrm.bookwyrmexportjob",
|
||||
),
|
||||
),
|
||||
]
|
48
bookwyrm/migrations/0186_invite_request_notification.py
Normal file
48
bookwyrm/migrations/0186_invite_request_notification.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
# Generated by Django 3.2.20 on 2023-11-14 10:02
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0185_alter_notification_notification_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="notification",
|
||||
name="related_invite_requests",
|
||||
field=models.ManyToManyField(to="bookwyrm.InviteRequest"),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="notification",
|
||||
name="notification_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("FAVORITE", "Favorite"),
|
||||
("BOOST", "Boost"),
|
||||
("REPLY", "Reply"),
|
||||
("MENTION", "Mention"),
|
||||
("TAG", "Tag"),
|
||||
("FOLLOW", "Follow"),
|
||||
("FOLLOW_REQUEST", "Follow Request"),
|
||||
("IMPORT", "Import"),
|
||||
("ADD", "Add"),
|
||||
("REPORT", "Report"),
|
||||
("LINK_DOMAIN", "Link Domain"),
|
||||
("INVITE_REQUEST", "Invite Request"),
|
||||
("INVITE", "Invite"),
|
||||
("ACCEPT", "Accept"),
|
||||
("JOIN", "Join"),
|
||||
("LEAVE", "Leave"),
|
||||
("REMOVE", "Remove"),
|
||||
("GROUP_PRIVACY", "Group Privacy"),
|
||||
("GROUP_NAME", "Group Name"),
|
||||
("GROUP_DESCRIPTION", "Group Description"),
|
||||
("MOVE", "Move"),
|
||||
],
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
]
|
54
bookwyrm/migrations/0187_partial_publication_dates.py
Normal file
54
bookwyrm/migrations/0187_partial_publication_dates.py
Normal file
|
@ -0,0 +1,54 @@
|
|||
# Generated by Django 3.2.20 on 2023-11-09 16:57
|
||||
|
||||
import bookwyrm.models.fields
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0186_invite_request_notification"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="book",
|
||||
name="first_published_date_precision",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("DAY", "Day prec."),
|
||||
("MONTH", "Month prec."),
|
||||
("YEAR", "Year prec."),
|
||||
],
|
||||
editable=False,
|
||||
max_length=10,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="book",
|
||||
name="published_date_precision",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("DAY", "Day prec."),
|
||||
("MONTH", "Month prec."),
|
||||
("YEAR", "Year prec."),
|
||||
],
|
||||
editable=False,
|
||||
max_length=10,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="book",
|
||||
name="first_published_date",
|
||||
field=bookwyrm.models.fields.PartialDateField(blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="book",
|
||||
name="published_date",
|
||||
field=bookwyrm.models.fields.PartialDateField(blank=True, null=True),
|
||||
),
|
||||
]
|
18
bookwyrm/migrations/0188_theme_loads.py
Normal file
18
bookwyrm/migrations/0188_theme_loads.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.23 on 2023-11-20 18:02
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0187_partial_publication_dates"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="theme",
|
||||
name="loads",
|
||||
field=models.BooleanField(blank=True, null=True),
|
||||
),
|
||||
]
|
45
bookwyrm/migrations/0189_alter_user_preferred_language.py
Normal file
45
bookwyrm/migrations/0189_alter_user_preferred_language.py
Normal file
|
@ -0,0 +1,45 @@
|
|||
# Generated by Django 3.2.23 on 2023-12-12 23:42
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0188_theme_loads"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="preferred_language",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("en-us", "English"),
|
||||
("ca-es", "Català (Catalan)"),
|
||||
("de-de", "Deutsch (German)"),
|
||||
("eo-uy", "Esperanto (Esperanto)"),
|
||||
("es-es", "Español (Spanish)"),
|
||||
("eu-es", "Euskara (Basque)"),
|
||||
("gl-es", "Galego (Galician)"),
|
||||
("it-it", "Italiano (Italian)"),
|
||||
("fi-fi", "Suomi (Finnish)"),
|
||||
("fr-fr", "Français (French)"),
|
||||
("lt-lt", "Lietuvių (Lithuanian)"),
|
||||
("nl-nl", "Nederlands (Dutch)"),
|
||||
("no-no", "Norsk (Norwegian)"),
|
||||
("pl-pl", "Polski (Polish)"),
|
||||
("pt-br", "Português do Brasil (Brazilian Portuguese)"),
|
||||
("pt-pt", "Português Europeu (European Portuguese)"),
|
||||
("ro-ro", "Română (Romanian)"),
|
||||
("sv-se", "Svenska (Swedish)"),
|
||||
("uk-ua", "Українська (Ukrainian)"),
|
||||
("zh-hans", "简体中文 (Simplified Chinese)"),
|
||||
("zh-hant", "繁體中文 (Traditional Chinese)"),
|
||||
],
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.23 on 2023-11-22 10:16
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0186_auto_20231116_0048"),
|
||||
("bookwyrm", "0188_theme_loads"),
|
||||
]
|
||||
|
||||
operations = []
|
|
@ -0,0 +1,45 @@
|
|||
# Generated by Django 3.2.23 on 2023-11-23 19:49
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0189_merge_0186_auto_20231116_0048_0188_theme_loads"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="notification",
|
||||
name="notification_type",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("FAVORITE", "Favorite"),
|
||||
("BOOST", "Boost"),
|
||||
("REPLY", "Reply"),
|
||||
("MENTION", "Mention"),
|
||||
("TAG", "Tag"),
|
||||
("FOLLOW", "Follow"),
|
||||
("FOLLOW_REQUEST", "Follow Request"),
|
||||
("IMPORT", "Import"),
|
||||
("USER_IMPORT", "User Import"),
|
||||
("USER_EXPORT", "User Export"),
|
||||
("ADD", "Add"),
|
||||
("REPORT", "Report"),
|
||||
("LINK_DOMAIN", "Link Domain"),
|
||||
("INVITE_REQUEST", "Invite Request"),
|
||||
("INVITE", "Invite"),
|
||||
("ACCEPT", "Accept"),
|
||||
("JOIN", "Join"),
|
||||
("LEAVE", "Leave"),
|
||||
("REMOVE", "Remove"),
|
||||
("GROUP_PRIVACY", "Group Privacy"),
|
||||
("GROUP_NAME", "Group Name"),
|
||||
("GROUP_DESCRIPTION", "Group Description"),
|
||||
("MOVE", "Move"),
|
||||
],
|
||||
max_length=255,
|
||||
),
|
||||
),
|
||||
]
|
16
bookwyrm/migrations/0190_book_search_updates.py
Normal file
16
bookwyrm/migrations/0190_book_search_updates.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
# Generated by Django 3.2.20 on 2023-11-24 17:11
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("bookwyrm", "0188_theme_loads"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveIndex(
|
||||
model_name="author",
|
||||
name="bookwyrm_au_search__b050a8_gin",
|
||||
),
|
||||
]
|
13
bookwyrm/migrations/0191_merge_20240102_0326.py
Normal file
13
bookwyrm/migrations/0191_merge_20240102_0326.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.23 on 2024-01-02 03:26
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0189_alter_user_preferred_language"),
|
||||
("bookwyrm", "0190_alter_notification_notification_type"),
|
||||
]
|
||||
|
||||
operations = []
|
|
@ -0,0 +1,76 @@
|
|||
# Generated by Django 3.2.20 on 2023-11-25 00:47
|
||||
|
||||
from importlib import import_module
|
||||
import re
|
||||
|
||||
from django.db import migrations
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
|
||||
trigger_migration = import_module("bookwyrm.migrations.0077_auto_20210623_2155")
|
||||
|
||||
# it's _very_ convenient for development that this migration be reversible
|
||||
search_vector_trigger = trigger_migration.Migration.operations[4]
|
||||
author_search_vector_trigger = trigger_migration.Migration.operations[5]
|
||||
|
||||
|
||||
assert re.search(r"\bCREATE TRIGGER search_vector_trigger\b", search_vector_trigger.sql)
|
||||
assert re.search(
|
||||
r"\bCREATE TRIGGER author_search_vector_trigger\b",
|
||||
author_search_vector_trigger.sql,
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("bookwyrm", "0190_book_search_updates"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="book",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_search_vector_on_book_edit",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func="new.search_vector := setweight(coalesce(nullif(to_tsvector('english', new.title), ''), to_tsvector('simple', new.title)), 'A') || setweight(to_tsvector('english', coalesce(new.subtitle, '')), 'B') || (SELECT setweight(to_tsvector('simple', coalesce(array_to_string(array_agg(bookwyrm_author.name), ' '), '')), 'C') FROM bookwyrm_author LEFT JOIN bookwyrm_book_authors ON bookwyrm_author.id = bookwyrm_book_authors.author_id WHERE bookwyrm_book_authors.book_id = new.id ) || setweight(to_tsvector('english', coalesce(new.series, '')), 'D');RETURN NEW;",
|
||||
hash="77d6399497c0a89b0bf09d296e33c396da63705c",
|
||||
operation='INSERT OR UPDATE OF "title", "subtitle", "series", "search_vector"',
|
||||
pgid="pgtrigger_update_search_vector_on_book_edit_bec58",
|
||||
table="bookwyrm_book",
|
||||
when="BEFORE",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="author",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="reset_search_vector_on_author_edit",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func="WITH updated_books AS (SELECT book_id FROM bookwyrm_book_authors WHERE author_id = new.id ) UPDATE bookwyrm_book SET search_vector = '' FROM updated_books WHERE id = updated_books.book_id;RETURN NEW;",
|
||||
hash="e7bbf08711ff3724c58f4d92fb7a082ffb3d7826",
|
||||
operation='UPDATE OF "name"',
|
||||
pgid="pgtrigger_reset_search_vector_on_author_edit_a447c",
|
||||
table="bookwyrm_author",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
sql="""DROP TRIGGER IF EXISTS search_vector_trigger ON bookwyrm_book;
|
||||
DROP FUNCTION IF EXISTS book_trigger;
|
||||
""",
|
||||
reverse_sql=search_vector_trigger.sql,
|
||||
),
|
||||
migrations.RunSQL(
|
||||
sql="""DROP TRIGGER IF EXISTS author_search_vector_trigger ON bookwyrm_author;
|
||||
DROP FUNCTION IF EXISTS author_trigger;
|
||||
""",
|
||||
reverse_sql=author_search_vector_trigger.sql,
|
||||
),
|
||||
migrations.RunSQL(
|
||||
# Recalculate book search vector for any missed author name changes
|
||||
# due to bug in JOIN in the old trigger.
|
||||
sql="UPDATE bookwyrm_book SET search_vector = NULL;",
|
||||
reverse_sql=migrations.RunSQL.noop,
|
||||
),
|
||||
]
|
23
bookwyrm/migrations/0192_make_page_positions_text.py
Normal file
23
bookwyrm/migrations/0192_make_page_positions_text.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 3.2.23 on 2024-01-04 23:56
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0191_merge_20240102_0326"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="quotation",
|
||||
name="endposition",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="quotation",
|
||||
name="position",
|
||||
field=models.TextField(blank=True, null=True),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.23 on 2024-01-02 19:36
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0191_merge_20240102_0326"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name="sitesettings",
|
||||
old_name="version",
|
||||
new_name="available_version",
|
||||
),
|
||||
]
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.23 on 2024-01-16 10:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0191_merge_20240102_0326"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="sitesettings",
|
||||
name="user_exports_enabled",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
]
|
92
bookwyrm/migrations/0193_auto_20240128_0249.py
Normal file
92
bookwyrm/migrations/0193_auto_20240128_0249.py
Normal file
|
@ -0,0 +1,92 @@
|
|||
# Generated by Django 3.2.23 on 2024-01-28 02:49
|
||||
|
||||
import bookwyrm.storage_backends
|
||||
import django.core.serializers.json
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0192_sitesettings_user_exports_enabled"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="bookwyrmexportjob",
|
||||
name="export_json",
|
||||
field=models.JSONField(
|
||||
encoder=django.core.serializers.json.DjangoJSONEncoder, null=True
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="bookwyrmexportjob",
|
||||
name="json_completed",
|
||||
field=models.BooleanField(default=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="bookwyrmexportjob",
|
||||
name="export_data",
|
||||
field=models.FileField(
|
||||
null=True,
|
||||
storage=bookwyrm.storage_backends.ExportsFileStorage,
|
||||
upload_to="",
|
||||
),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="AddFileToTar",
|
||||
fields=[
|
||||
(
|
||||
"childjob_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="bookwyrm.childjob",
|
||||
),
|
||||
),
|
||||
(
|
||||
"parent_export_job",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="child_edition_export_jobs",
|
||||
to="bookwyrm.bookwyrmexportjob",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
bases=("bookwyrm.childjob",),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="AddBookToUserExportJob",
|
||||
fields=[
|
||||
(
|
||||
"childjob_ptr",
|
||||
models.OneToOneField(
|
||||
auto_created=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
parent_link=True,
|
||||
primary_key=True,
|
||||
serialize=False,
|
||||
to="bookwyrm.childjob",
|
||||
),
|
||||
),
|
||||
(
|
||||
"edition",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
to="bookwyrm.edition",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
bases=("bookwyrm.childjob",),
|
||||
),
|
||||
]
|
13
bookwyrm/migrations/0193_merge_20240203_1539.py
Normal file
13
bookwyrm/migrations/0193_merge_20240203_1539.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.23 on 2024-02-03 15:39
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0192_make_page_positions_text"),
|
||||
("bookwyrm", "0192_sitesettings_user_exports_enabled"),
|
||||
]
|
||||
|
||||
operations = []
|
13
bookwyrm/migrations/0194_merge_20240203_1619.py
Normal file
13
bookwyrm/migrations/0194_merge_20240203_1619.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.23 on 2024-02-03 16:19
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0192_rename_version_sitesettings_available_version"),
|
||||
("bookwyrm", "0193_merge_20240203_1539"),
|
||||
]
|
||||
|
||||
operations = []
|
46
bookwyrm/migrations/0195_alter_user_preferred_language.py
Normal file
46
bookwyrm/migrations/0195_alter_user_preferred_language.py
Normal file
|
@ -0,0 +1,46 @@
|
|||
# Generated by Django 3.2.23 on 2024-02-21 00:45
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0194_merge_20240203_1619"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="user",
|
||||
name="preferred_language",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
choices=[
|
||||
("en-us", "English"),
|
||||
("ca-es", "Català (Catalan)"),
|
||||
("de-de", "Deutsch (German)"),
|
||||
("eo-uy", "Esperanto (Esperanto)"),
|
||||
("es-es", "Español (Spanish)"),
|
||||
("eu-es", "Euskara (Basque)"),
|
||||
("gl-es", "Galego (Galician)"),
|
||||
("it-it", "Italiano (Italian)"),
|
||||
("ko-kr", "한국어 (Korean)"),
|
||||
("fi-fi", "Suomi (Finnish)"),
|
||||
("fr-fr", "Français (French)"),
|
||||
("lt-lt", "Lietuvių (Lithuanian)"),
|
||||
("nl-nl", "Nederlands (Dutch)"),
|
||||
("no-no", "Norsk (Norwegian)"),
|
||||
("pl-pl", "Polski (Polish)"),
|
||||
("pt-br", "Português do Brasil (Brazilian Portuguese)"),
|
||||
("pt-pt", "Português Europeu (European Portuguese)"),
|
||||
("ro-ro", "Română (Romanian)"),
|
||||
("sv-se", "Svenska (Swedish)"),
|
||||
("uk-ua", "Українська (Ukrainian)"),
|
||||
("zh-hans", "简体中文 (Simplified Chinese)"),
|
||||
("zh-hant", "繁體中文 (Traditional Chinese)"),
|
||||
],
|
||||
max_length=255,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
]
|
13
bookwyrm/migrations/0196_merge_20240318_1737.py
Normal file
13
bookwyrm/migrations/0196_merge_20240318_1737.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.23 on 2024-03-18 17:37
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0193_auto_20240128_0249"),
|
||||
("bookwyrm", "0195_alter_user_preferred_language"),
|
||||
]
|
||||
|
||||
operations = []
|
13
bookwyrm/migrations/0196_merge_pr3134_into_main.py
Normal file
13
bookwyrm/migrations/0196_merge_pr3134_into_main.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.23 on 2024-03-18 00:48
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0191_migrate_search_vec_triggers_to_pgtriggers"),
|
||||
("bookwyrm", "0195_alter_user_preferred_language"),
|
||||
]
|
||||
|
||||
operations = []
|
41
bookwyrm/migrations/0197_author_search_vector.py
Normal file
41
bookwyrm/migrations/0197_author_search_vector.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
# Generated by Django 3.2.25 on 2024-03-20 15:15
|
||||
|
||||
import django.contrib.postgres.indexes
|
||||
from django.db import migrations
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0196_merge_pr3134_into_main"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="author",
|
||||
index=django.contrib.postgres.indexes.GinIndex(
|
||||
fields=["search_vector"], name="bookwyrm_au_search__b050a8_gin"
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="author",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_search_vector_on_author_edit",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func="new.search_vector := setweight(to_tsvector('simple', new.name), 'A') || setweight(to_tsvector('simple', coalesce(array_to_string(new.aliases, ' '), '')), 'B');RETURN NEW;",
|
||||
hash="b97919016236d74d0ade51a0769a173ea269da64",
|
||||
operation='INSERT OR UPDATE OF "name", "aliases", "search_vector"',
|
||||
pgid="pgtrigger_update_search_vector_on_author_edit_c61cb",
|
||||
table="bookwyrm_author",
|
||||
when="BEFORE",
|
||||
),
|
||||
),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
# Calculate search vector for all Authors.
|
||||
sql="UPDATE bookwyrm_author SET search_vector = NULL;",
|
||||
reverse_sql="UPDATE bookwyrm_author SET search_vector = NULL;",
|
||||
),
|
||||
]
|
13
bookwyrm/migrations/0197_merge_20240324_0235.py
Normal file
13
bookwyrm/migrations/0197_merge_20240324_0235.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.25 on 2024-03-24 02:35
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0196_merge_20240318_1737"),
|
||||
("bookwyrm", "0196_merge_pr3134_into_main"),
|
||||
]
|
||||
|
||||
operations = []
|
48
bookwyrm/migrations/0197_mergedauthor_mergedbook.py
Normal file
48
bookwyrm/migrations/0197_mergedauthor_mergedbook.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
# Generated by Django 3.2.24 on 2024-02-28 21:30
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0196_merge_pr3134_into_main"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name="MergedBook",
|
||||
fields=[
|
||||
("deleted_id", models.IntegerField(primary_key=True, serialize=False)),
|
||||
(
|
||||
"merged_into",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.PROTECT,
|
||||
related_name="absorbed",
|
||||
to="bookwyrm.book",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name="MergedAuthor",
|
||||
fields=[
|
||||
("deleted_id", models.IntegerField(primary_key=True, serialize=False)),
|
||||
(
|
||||
"merged_into",
|
||||
models.ForeignKey(
|
||||
on_delete=django.db.models.deletion.PROTECT,
|
||||
related_name="absorbed",
|
||||
to="bookwyrm.author",
|
||||
),
|
||||
),
|
||||
],
|
||||
options={
|
||||
"abstract": False,
|
||||
},
|
||||
),
|
||||
]
|
|
@ -0,0 +1,23 @@
|
|||
# Generated by Django 3.2.25 on 2024-03-26 11:37
|
||||
|
||||
import bookwyrm.models.bookwyrm_export_job
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0197_merge_20240324_0235"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="bookwyrmexportjob",
|
||||
name="export_data",
|
||||
field=models.FileField(
|
||||
null=True,
|
||||
storage=bookwyrm.models.bookwyrm_export_job.select_exports_storage,
|
||||
upload_to="",
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,57 @@
|
|||
# Generated by Django 3.2.25 on 2024-03-20 15:52
|
||||
|
||||
from django.db import migrations
|
||||
import pgtrigger.compiler
|
||||
import pgtrigger.migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0197_author_search_vector"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="author",
|
||||
name="reset_search_vector_on_author_edit",
|
||||
),
|
||||
pgtrigger.migrations.RemoveTrigger(
|
||||
model_name="book",
|
||||
name="update_search_vector_on_book_edit",
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="author",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="reset_book_search_vector_on_author_edit",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func="WITH updated_books AS (SELECT book_id FROM bookwyrm_book_authors WHERE author_id = new.id ) UPDATE bookwyrm_book SET search_vector = '' FROM updated_books WHERE id = updated_books.book_id;RETURN NEW;",
|
||||
hash="68422c0f29879c5802b82159dde45297eff53e73",
|
||||
operation='UPDATE OF "name", "aliases"',
|
||||
pgid="pgtrigger_reset_book_search_vector_on_author_edit_a50c7",
|
||||
table="bookwyrm_author",
|
||||
when="AFTER",
|
||||
),
|
||||
),
|
||||
),
|
||||
pgtrigger.migrations.AddTrigger(
|
||||
model_name="book",
|
||||
trigger=pgtrigger.compiler.Trigger(
|
||||
name="update_search_vector_on_book_edit",
|
||||
sql=pgtrigger.compiler.UpsertTriggerSql(
|
||||
func="WITH author_names AS (SELECT array_to_string(bookwyrm_author.name || bookwyrm_author.aliases, ' ') AS name_and_aliases FROM bookwyrm_author LEFT JOIN bookwyrm_book_authors ON bookwyrm_author.id = bookwyrm_book_authors.author_id WHERE bookwyrm_book_authors.book_id = new.id ) SELECT setweight(coalesce(nullif(to_tsvector('english', new.title), ''), to_tsvector('simple', new.title)), 'A') || setweight(to_tsvector('english', coalesce(new.subtitle, '')), 'B') || (SELECT setweight(to_tsvector('simple', coalesce(array_to_string(array_agg(name_and_aliases), ' '), '')), 'C') FROM author_names) || setweight(to_tsvector('english', coalesce(new.series, '')), 'D') INTO new.search_vector;RETURN NEW;",
|
||||
hash="9324f5ca76a6f5e63931881d62d11da11f595b2c",
|
||||
operation='INSERT OR UPDATE OF "title", "subtitle", "series", "search_vector"',
|
||||
pgid="pgtrigger_update_search_vector_on_book_edit_bec58",
|
||||
table="bookwyrm_book",
|
||||
when="BEFORE",
|
||||
),
|
||||
),
|
||||
),
|
||||
migrations.RunSQL(
|
||||
# Recalculate search vector for all Books because it now includes
|
||||
# Author aliases.
|
||||
sql="UPDATE bookwyrm_book SET search_vector = NULL;",
|
||||
reverse_sql="UPDATE bookwyrm_book SET search_vector = NULL;",
|
||||
),
|
||||
]
|
13
bookwyrm/migrations/0199_merge_20240326_1217.py
Normal file
13
bookwyrm/migrations/0199_merge_20240326_1217.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.25 on 2024-03-26 12:17
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0198_alter_bookwyrmexportjob_export_data"),
|
||||
("bookwyrm", "0198_book_search_vector_author_aliases"),
|
||||
]
|
||||
|
||||
operations = []
|
|
@ -0,0 +1,19 @@
|
|||
# Generated by Django 3.2.25 on 2024-04-02 19:53
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0198_book_search_vector_author_aliases"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="status",
|
||||
index=models.Index(
|
||||
fields=["remote_id"], name="bookwyrm_st_remote__06aeba_idx"
|
||||
),
|
||||
),
|
||||
]
|
27
bookwyrm/migrations/0200_auto_20240327_1914.py
Normal file
27
bookwyrm/migrations/0200_auto_20240327_1914.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
# Generated by Django 3.2.25 on 2024-03-27 19:14
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0199_merge_20240326_1217"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="addfiletotar",
|
||||
name="childjob_ptr",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="addfiletotar",
|
||||
name="parent_export_job",
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="AddBookToUserExportJob",
|
||||
),
|
||||
migrations.DeleteModel(
|
||||
name="AddFileToTar",
|
||||
),
|
||||
]
|
|
@ -0,0 +1,19 @@
|
|||
# Generated by Django 3.2.25 on 2024-04-03 19:05
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0199_status_bookwyrm_st_remote__06aeba_idx"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="status",
|
||||
index=models.Index(
|
||||
fields=["thread_id"], name="bookwyrm_st_thread__cf064f_idx"
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,19 @@
|
|||
# Generated by Django 3.2.25 on 2024-04-03 19:10
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0200_status_bookwyrm_st_thread__cf064f_idx"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="keypair",
|
||||
index=models.Index(
|
||||
fields=["remote_id"], name="bookwyrm_ke_remote__472927_idx"
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,19 @@
|
|||
# Generated by Django 3.2.25 on 2024-04-03 19:14
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0201_keypair_bookwyrm_ke_remote__472927_idx"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="user",
|
||||
index=models.Index(
|
||||
fields=["username"], name="bookwyrm_us_usernam_b2546d_idx"
|
||||
),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,19 @@
|
|||
# Generated by Django 3.2.25 on 2024-04-03 19:22
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0202_user_bookwyrm_us_usernam_b2546d_idx"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddIndex(
|
||||
model_name="user",
|
||||
index=models.Index(
|
||||
fields=["is_active", "local"], name="bookwyrm_us_is_acti_972dc4_idx"
|
||||
),
|
||||
),
|
||||
]
|
13
bookwyrm/migrations/0204_merge_20240409_1042.py
Normal file
13
bookwyrm/migrations/0204_merge_20240409_1042.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.25 on 2024-04-09 10:42
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0197_mergedauthor_mergedbook"),
|
||||
("bookwyrm", "0203_user_bookwyrm_us_is_acti_972dc4_idx"),
|
||||
]
|
||||
|
||||
operations = []
|
13
bookwyrm/migrations/0205_merge_20240413_0232.py
Normal file
13
bookwyrm/migrations/0205_merge_20240413_0232.py
Normal file
|
@ -0,0 +1,13 @@
|
|||
# Generated by Django 3.2.25 on 2024-04-13 02:32
|
||||
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
("bookwyrm", "0200_auto_20240327_1914"),
|
||||
("bookwyrm", "0204_merge_20240409_1042"),
|
||||
]
|
||||
|
||||
operations = []
|
|
@ -20,19 +20,23 @@ from .readthrough import ReadThrough, ProgressUpdate, ProgressMode
|
|||
from .user import User, KeyPair
|
||||
from .annual_goal import AnnualGoal
|
||||
from .relationship import UserFollows, UserFollowRequest, UserBlocks
|
||||
from .report import Report, ReportComment
|
||||
from .report import Report, ReportAction
|
||||
from .federated_server import FederatedServer
|
||||
|
||||
from .group import Group, GroupMember, GroupMemberInvitation
|
||||
|
||||
from .import_job import ImportJob, ImportItem
|
||||
from .bookwyrm_import_job import BookwyrmImportJob
|
||||
from .bookwyrm_export_job import BookwyrmExportJob
|
||||
|
||||
from .move import MoveUser
|
||||
|
||||
from .site import SiteSettings, Theme, SiteInvite
|
||||
from .site import PasswordReset, InviteRequest
|
||||
from .announcement import Announcement
|
||||
from .antispam import EmailBlocklist, IPBlocklist, AutoMod, automod_task
|
||||
|
||||
from .notification import Notification
|
||||
from .notification import Notification, NotificationType
|
||||
|
||||
from .hashtag import Hashtag
|
||||
|
||||
|
|
|
@ -6,8 +6,9 @@ from functools import reduce
|
|||
import json
|
||||
import operator
|
||||
import logging
|
||||
from typing import List
|
||||
from typing import Any, Optional
|
||||
from uuid import uuid4
|
||||
from typing_extensions import Self
|
||||
|
||||
import aiohttp
|
||||
from Crypto.PublicKey import RSA
|
||||
|
@ -85,7 +86,7 @@ class ActivitypubMixin:
|
|||
super().__init__(*args, **kwargs)
|
||||
|
||||
@classmethod
|
||||
def find_existing_by_remote_id(cls, remote_id):
|
||||
def find_existing_by_remote_id(cls, remote_id: str) -> Self:
|
||||
"""look up a remote id in the db"""
|
||||
return cls.find_existing({"id": remote_id})
|
||||
|
||||
|
@ -137,7 +138,7 @@ class ActivitypubMixin:
|
|||
queue=queue,
|
||||
)
|
||||
|
||||
def get_recipients(self, software=None) -> List[str]:
|
||||
def get_recipients(self, software=None) -> list[str]:
|
||||
"""figure out which inbox urls to post to"""
|
||||
# first we have to figure out who should receive this activity
|
||||
privacy = self.privacy if hasattr(self, "privacy") else "public"
|
||||
|
@ -151,8 +152,9 @@ class ActivitypubMixin:
|
|||
# find anyone who's tagged in a status, for example
|
||||
mentions = self.recipients if hasattr(self, "recipients") else []
|
||||
|
||||
# we always send activities to explicitly mentioned users' inboxes
|
||||
recipients = [u.inbox for u in mentions or [] if not u.local]
|
||||
# we always send activities to explicitly mentioned users (using shared inboxes
|
||||
# where available to avoid duplicate submissions to a given instance)
|
||||
recipients = {u.shared_inbox or u.inbox for u in mentions if not u.local}
|
||||
|
||||
# unless it's a dm, all the followers should receive the activity
|
||||
if privacy != "direct":
|
||||
|
@ -172,18 +174,18 @@ class ActivitypubMixin:
|
|||
if user:
|
||||
queryset = queryset.filter(following=user)
|
||||
|
||||
# ideally, we will send to shared inboxes for efficiency
|
||||
shared_inboxes = (
|
||||
queryset.filter(shared_inbox__isnull=False)
|
||||
.values_list("shared_inbox", flat=True)
|
||||
.distinct()
|
||||
# as above, we prefer shared inboxes if available
|
||||
recipients.update(
|
||||
queryset.filter(shared_inbox__isnull=False).values_list(
|
||||
"shared_inbox", flat=True
|
||||
)
|
||||
)
|
||||
# but not everyone has a shared inbox
|
||||
inboxes = queryset.filter(shared_inbox__isnull=True).values_list(
|
||||
"inbox", flat=True
|
||||
recipients.update(
|
||||
queryset.filter(shared_inbox__isnull=True).values_list(
|
||||
"inbox", flat=True
|
||||
)
|
||||
)
|
||||
recipients += list(shared_inboxes) + list(inboxes)
|
||||
return list(set(recipients))
|
||||
return list(recipients)
|
||||
|
||||
def to_activity_dataclass(self):
|
||||
"""convert from a model to an activity"""
|
||||
|
@ -198,7 +200,14 @@ class ActivitypubMixin:
|
|||
class ObjectMixin(ActivitypubMixin):
|
||||
"""add this mixin for object models that are AP serializable"""
|
||||
|
||||
def save(self, *args, created=None, software=None, priority=BROADCAST, **kwargs):
|
||||
def save(
|
||||
self,
|
||||
*args: Any,
|
||||
created: Optional[bool] = None,
|
||||
software: Any = None,
|
||||
priority: str = BROADCAST,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""broadcast created/updated/deleted objects as appropriate"""
|
||||
broadcast = kwargs.get("broadcast", True)
|
||||
# this bonus kwarg would cause an error in the base save method
|
||||
|
@ -507,14 +516,14 @@ def unfurl_related_field(related_field, sort_field=None):
|
|||
|
||||
|
||||
@app.task(queue=BROADCAST)
|
||||
def broadcast_task(sender_id: int, activity: str, recipients: List[str]):
|
||||
def broadcast_task(sender_id: int, activity: str, recipients: list[str]):
|
||||
"""the celery task for broadcast"""
|
||||
user_model = apps.get_model("bookwyrm.User", require_ready=True)
|
||||
sender = user_model.objects.select_related("key_pair").get(id=sender_id)
|
||||
asyncio.run(async_broadcast(recipients, sender, activity))
|
||||
|
||||
|
||||
async def async_broadcast(recipients: List[str], sender, data: str):
|
||||
async def async_broadcast(recipients: list[str], sender, data: str):
|
||||
"""Send all the broadcasts simultaneously"""
|
||||
timeout = aiohttp.ClientTimeout(total=10)
|
||||
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||
|
@ -594,7 +603,7 @@ def to_ordered_collection_page(
|
|||
if activity_page.has_next():
|
||||
next_page = f"{remote_id}?page={activity_page.next_page_number()}"
|
||||
if activity_page.has_previous():
|
||||
prev_page = f"{remote_id}?page=%d{activity_page.previous_page_number()}"
|
||||
prev_page = f"{remote_id}?page={activity_page.previous_page_number()}"
|
||||
return activitypub.OrderedCollectionPage(
|
||||
id=f"{remote_id}?page={page}",
|
||||
partOf=remote_id,
|
||||
|
|
|
@ -10,6 +10,7 @@ from django.utils.translation import gettext_lazy as _
|
|||
|
||||
from bookwyrm.tasks import app, MISC
|
||||
from .base_model import BookWyrmModel
|
||||
from .notification import NotificationType
|
||||
from .user import User
|
||||
|
||||
|
||||
|
@ -80,7 +81,7 @@ def automod_task():
|
|||
with transaction.atomic():
|
||||
for admin in admins:
|
||||
notification, _ = notification_model.objects.get_or_create(
|
||||
user=admin, notification_type=notification_model.REPORT, read=False
|
||||
user=admin, notification_type=NotificationType.REPORT, read=False
|
||||
)
|
||||
notification.related_reports.set(reports)
|
||||
|
||||
|
|
|
@ -1,18 +1,25 @@
|
|||
""" database schema for info about authors """
|
||||
|
||||
import re
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
from typing import Tuple, Any
|
||||
|
||||
from django.db import models
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
import pgtrigger
|
||||
|
||||
from bookwyrm import activitypub
|
||||
from bookwyrm.settings import DOMAIN
|
||||
from bookwyrm.settings import BASE_URL
|
||||
from bookwyrm.utils.db import format_trigger
|
||||
|
||||
from .book import BookDataModel
|
||||
from .book import BookDataModel, MergedAuthor
|
||||
from . import fields
|
||||
|
||||
|
||||
class Author(BookDataModel):
|
||||
"""basic biographic info"""
|
||||
|
||||
merged_model = MergedAuthor
|
||||
|
||||
wikipedia_link = fields.CharField(
|
||||
max_length=255, blank=True, null=True, deduplication_field=True
|
||||
)
|
||||
|
@ -38,7 +45,7 @@ class Author(BookDataModel):
|
|||
)
|
||||
bio = fields.HtmlField(null=True, blank=True)
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
def save(self, *args: Tuple[Any, ...], **kwargs: dict[str, Any]) -> None:
|
||||
"""normalize isni format"""
|
||||
if self.isni:
|
||||
self.isni = re.sub(r"\s", "", self.isni)
|
||||
|
@ -63,11 +70,48 @@ class Author(BookDataModel):
|
|||
|
||||
def get_remote_id(self):
|
||||
"""editions and works both use "book" instead of model_name"""
|
||||
return f"https://{DOMAIN}/author/{self.id}"
|
||||
|
||||
activity_serializer = activitypub.Author
|
||||
return f"{BASE_URL}/author/{self.id}"
|
||||
|
||||
class Meta:
|
||||
"""sets up postgres GIN index field"""
|
||||
"""sets up indexes and triggers"""
|
||||
|
||||
# pylint: disable=line-too-long
|
||||
|
||||
indexes = (GinIndex(fields=["search_vector"]),)
|
||||
triggers = [
|
||||
pgtrigger.Trigger(
|
||||
name="update_search_vector_on_author_edit",
|
||||
when=pgtrigger.Before,
|
||||
operation=pgtrigger.Insert
|
||||
| pgtrigger.UpdateOf("name", "aliases", "search_vector"),
|
||||
func=format_trigger(
|
||||
"""new.search_vector :=
|
||||
-- author name, with priority A
|
||||
setweight(to_tsvector('simple', new.name), 'A') ||
|
||||
-- author aliases, with priority B
|
||||
setweight(to_tsvector('simple', coalesce(array_to_string(new.aliases, ' '), '')), 'B');
|
||||
RETURN new;
|
||||
"""
|
||||
),
|
||||
),
|
||||
pgtrigger.Trigger(
|
||||
name="reset_book_search_vector_on_author_edit",
|
||||
when=pgtrigger.After,
|
||||
operation=pgtrigger.UpdateOf("name", "aliases"),
|
||||
func=format_trigger(
|
||||
"""WITH updated_books AS (
|
||||
SELECT book_id
|
||||
FROM bookwyrm_book_authors
|
||||
WHERE author_id = new.id
|
||||
)
|
||||
UPDATE bookwyrm_book
|
||||
SET search_vector = ''
|
||||
FROM updated_books
|
||||
WHERE id = updated_books.book_id;
|
||||
RETURN new;
|
||||
"""
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
activity_serializer = activitypub.Author
|
||||
|
|
|
@ -10,7 +10,7 @@ from django.http import Http404
|
|||
from django.utils.translation import gettext_lazy as _
|
||||
from django.utils.text import slugify
|
||||
|
||||
from bookwyrm.settings import DOMAIN
|
||||
from bookwyrm.settings import BASE_URL
|
||||
from .fields import RemoteIdField
|
||||
|
||||
|
||||
|
@ -38,7 +38,7 @@ class BookWyrmModel(models.Model):
|
|||
|
||||
def get_remote_id(self):
|
||||
"""generate the url that resolves to the local object, without a slug"""
|
||||
base_path = f"https://{DOMAIN}"
|
||||
base_path = BASE_URL
|
||||
if hasattr(self, "user"):
|
||||
base_path = f"{base_path}{self.user.local_path}"
|
||||
|
||||
|
@ -53,7 +53,7 @@ class BookWyrmModel(models.Model):
|
|||
@property
|
||||
def local_path(self):
|
||||
"""how to link to this object in the local app, with a slug"""
|
||||
local = self.get_remote_id().replace(f"https://{DOMAIN}", "")
|
||||
local = self.get_remote_id().replace(BASE_URL, "")
|
||||
|
||||
name = None
|
||||
if hasattr(self, "name_field"):
|
||||
|
|
|
@ -1,27 +1,33 @@
|
|||
""" database schema for books and shelves """
|
||||
|
||||
from itertools import chain
|
||||
import re
|
||||
from typing import Any, Dict
|
||||
from typing_extensions import Self
|
||||
|
||||
from django.contrib.postgres.search import SearchVectorField
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
from django.core.cache import cache
|
||||
from django.db import models, transaction
|
||||
from django.db.models import Prefetch
|
||||
from django.db.models import Prefetch, ManyToManyField
|
||||
from django.dispatch import receiver
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from model_utils import FieldTracker
|
||||
from model_utils.managers import InheritanceManager
|
||||
from imagekit.models import ImageSpecField
|
||||
import pgtrigger
|
||||
|
||||
from bookwyrm import activitypub
|
||||
from bookwyrm.isbn.isbn import hyphenator_singleton as hyphenator
|
||||
from bookwyrm.preview_images import generate_edition_preview_image_task
|
||||
from bookwyrm.settings import (
|
||||
DOMAIN,
|
||||
BASE_URL,
|
||||
DEFAULT_LANGUAGE,
|
||||
LANGUAGE_ARTICLES,
|
||||
ENABLE_PREVIEW_IMAGES,
|
||||
ENABLE_THUMBNAIL_GENERATION,
|
||||
)
|
||||
from bookwyrm.utils.db import format_trigger
|
||||
|
||||
from .activitypub_mixin import OrderedCollectionPageMixin, ObjectMixin
|
||||
from .base_model import BookWyrmModel
|
||||
|
@ -90,7 +96,7 @@ class BookDataModel(ObjectMixin, BookWyrmModel):
|
|||
|
||||
abstract = True
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
def save(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""ensure that the remote_id is within this instance"""
|
||||
if self.id:
|
||||
self.remote_id = self.get_remote_id()
|
||||
|
@ -104,10 +110,115 @@ class BookDataModel(ObjectMixin, BookWyrmModel):
|
|||
"""only send book data updates to other bookwyrm instances"""
|
||||
super().broadcast(activity, sender, software=software, **kwargs)
|
||||
|
||||
def merge_into(self, canonical: Self, dry_run=False) -> Dict[str, Any]:
|
||||
"""merge this entity into another entity"""
|
||||
if canonical.id == self.id:
|
||||
raise ValueError(f"Cannot merge {self} into itself")
|
||||
|
||||
absorbed_fields = canonical.absorb_data_from(self, dry_run=dry_run)
|
||||
|
||||
if dry_run:
|
||||
return absorbed_fields
|
||||
|
||||
canonical.save()
|
||||
|
||||
self.merged_model.objects.create(deleted_id=self.id, merged_into=canonical)
|
||||
|
||||
# move related models to canonical
|
||||
related_models = [
|
||||
(r.remote_field.name, r.related_model) for r in self._meta.related_objects
|
||||
]
|
||||
# pylint: disable=protected-access
|
||||
for related_field, related_model in related_models:
|
||||
# Skip the ManyToMany fields that aren’t auto-created. These
|
||||
# should have a corresponding OneToMany field in the model for
|
||||
# the linking table anyway. If we update it through that model
|
||||
# instead then we won’t lose the extra fields in the linking
|
||||
# table.
|
||||
# pylint: disable=protected-access
|
||||
related_field_obj = related_model._meta.get_field(related_field)
|
||||
if isinstance(related_field_obj, ManyToManyField):
|
||||
through = related_field_obj.remote_field.through
|
||||
if not through._meta.auto_created:
|
||||
continue
|
||||
related_objs = related_model.objects.filter(**{related_field: self})
|
||||
for related_obj in related_objs:
|
||||
try:
|
||||
setattr(related_obj, related_field, canonical)
|
||||
related_obj.save()
|
||||
except TypeError:
|
||||
getattr(related_obj, related_field).add(canonical)
|
||||
getattr(related_obj, related_field).remove(self)
|
||||
|
||||
self.delete()
|
||||
return absorbed_fields
|
||||
|
||||
def absorb_data_from(self, other: Self, dry_run=False) -> Dict[str, Any]:
|
||||
"""fill empty fields with values from another entity"""
|
||||
absorbed_fields = {}
|
||||
for data_field in self._meta.get_fields():
|
||||
if not hasattr(data_field, "activitypub_field"):
|
||||
continue
|
||||
canonical_value = getattr(self, data_field.name)
|
||||
other_value = getattr(other, data_field.name)
|
||||
if not other_value:
|
||||
continue
|
||||
if isinstance(data_field, fields.ArrayField):
|
||||
if new_values := list(set(other_value) - set(canonical_value)):
|
||||
# append at the end (in no particular order)
|
||||
if not dry_run:
|
||||
setattr(self, data_field.name, canonical_value + new_values)
|
||||
absorbed_fields[data_field.name] = new_values
|
||||
elif isinstance(data_field, fields.PartialDateField):
|
||||
if (
|
||||
(not canonical_value)
|
||||
or (other_value.has_day and not canonical_value.has_day)
|
||||
or (other_value.has_month and not canonical_value.has_month)
|
||||
):
|
||||
if not dry_run:
|
||||
setattr(self, data_field.name, other_value)
|
||||
absorbed_fields[data_field.name] = other_value
|
||||
else:
|
||||
if not canonical_value:
|
||||
if not dry_run:
|
||||
setattr(self, data_field.name, other_value)
|
||||
absorbed_fields[data_field.name] = other_value
|
||||
return absorbed_fields
|
||||
|
||||
|
||||
class MergedBookDataModel(models.Model):
|
||||
"""a BookDataModel instance that has been merged into another instance. kept
|
||||
to be able to redirect old URLs"""
|
||||
|
||||
deleted_id = models.IntegerField(primary_key=True)
|
||||
|
||||
class Meta:
|
||||
"""abstract just like BookDataModel"""
|
||||
|
||||
abstract = True
|
||||
|
||||
|
||||
class MergedBook(MergedBookDataModel):
|
||||
"""an Book that has been merged into another one"""
|
||||
|
||||
merged_into = models.ForeignKey(
|
||||
"Book", on_delete=models.PROTECT, related_name="absorbed"
|
||||
)
|
||||
|
||||
|
||||
class MergedAuthor(MergedBookDataModel):
|
||||
"""an Author that has been merged into another one"""
|
||||
|
||||
merged_into = models.ForeignKey(
|
||||
"Author", on_delete=models.PROTECT, related_name="absorbed"
|
||||
)
|
||||
|
||||
|
||||
class Book(BookDataModel):
|
||||
"""a generic book, which can mean either an edition or a work"""
|
||||
|
||||
merged_model = MergedBook
|
||||
|
||||
connector = models.ForeignKey("Connector", on_delete=models.PROTECT, null=True)
|
||||
|
||||
# book/work metadata
|
||||
|
@ -133,8 +244,8 @@ class Book(BookDataModel):
|
|||
preview_image = models.ImageField(
|
||||
upload_to="previews/covers/", blank=True, null=True
|
||||
)
|
||||
first_published_date = fields.DateTimeField(blank=True, null=True)
|
||||
published_date = fields.DateTimeField(blank=True, null=True)
|
||||
first_published_date = fields.PartialDateField(blank=True, null=True)
|
||||
published_date = fields.PartialDateField(blank=True, null=True)
|
||||
|
||||
objects = InheritanceManager()
|
||||
field_tracker = FieldTracker(fields=["authors", "title", "subtitle", "cover"])
|
||||
|
@ -188,9 +299,13 @@ class Book(BookDataModel):
|
|||
"""properties of this edition, as a string"""
|
||||
items = [
|
||||
self.physical_format if hasattr(self, "physical_format") else None,
|
||||
f"{self.languages[0]} language"
|
||||
if self.languages and self.languages[0] and self.languages[0] != "English"
|
||||
else None,
|
||||
(
|
||||
f"{self.languages[0]} language"
|
||||
if self.languages
|
||||
and self.languages[0]
|
||||
and self.languages[0] != "English"
|
||||
else None
|
||||
),
|
||||
str(self.published_date.year) if self.published_date else None,
|
||||
", ".join(self.publishers) if hasattr(self, "publishers") else None,
|
||||
]
|
||||
|
@ -199,21 +314,27 @@ class Book(BookDataModel):
|
|||
@property
|
||||
def alt_text(self):
|
||||
"""image alt test"""
|
||||
text = self.title
|
||||
if self.edition_info:
|
||||
text += f" ({self.edition_info})"
|
||||
return text
|
||||
author = f"{name}: " if (name := self.author_text) else ""
|
||||
edition = f" ({info})" if (info := self.edition_info) else ""
|
||||
return f"{author}{self.title}{edition}"
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
def save(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""can't be abstract for query reasons, but you shouldn't USE it"""
|
||||
if not isinstance(self, Edition) and not isinstance(self, Work):
|
||||
if not isinstance(self, (Edition, Work)):
|
||||
raise ValueError("Books should be added as Editions or Works")
|
||||
|
||||
return super().save(*args, **kwargs)
|
||||
|
||||
def get_remote_id(self):
|
||||
"""editions and works both use "book" instead of model_name"""
|
||||
return f"https://{DOMAIN}/book/{self.id}"
|
||||
return f"{BASE_URL}/book/{self.id}"
|
||||
|
||||
def guess_sort_title(self):
|
||||
"""Get a best-guess sort title for the current book"""
|
||||
articles = chain(
|
||||
*(LANGUAGE_ARTICLES.get(language, ()) for language in tuple(self.languages))
|
||||
)
|
||||
return re.sub(f'^{" |^".join(articles)} ', "", str(self.title).lower())
|
||||
|
||||
def __repr__(self):
|
||||
# pylint: disable=consider-using-f-string
|
||||
|
@ -224,9 +345,49 @@ class Book(BookDataModel):
|
|||
)
|
||||
|
||||
class Meta:
|
||||
"""sets up postgres GIN index field"""
|
||||
"""set up indexes and triggers"""
|
||||
|
||||
# pylint: disable=line-too-long
|
||||
|
||||
indexes = (GinIndex(fields=["search_vector"]),)
|
||||
triggers = [
|
||||
pgtrigger.Trigger(
|
||||
name="update_search_vector_on_book_edit",
|
||||
when=pgtrigger.Before,
|
||||
operation=pgtrigger.Insert
|
||||
| pgtrigger.UpdateOf("title", "subtitle", "series", "search_vector"),
|
||||
func=format_trigger(
|
||||
"""
|
||||
WITH author_names AS (
|
||||
SELECT array_to_string(bookwyrm_author.name || bookwyrm_author.aliases, ' ') AS name_and_aliases
|
||||
FROM bookwyrm_author
|
||||
LEFT JOIN bookwyrm_book_authors
|
||||
ON bookwyrm_author.id = bookwyrm_book_authors.author_id
|
||||
WHERE bookwyrm_book_authors.book_id = new.id
|
||||
)
|
||||
SELECT
|
||||
-- title, with priority A (parse in English, default to simple if empty)
|
||||
setweight(COALESCE(nullif(
|
||||
to_tsvector('english', new.title), ''),
|
||||
to_tsvector('simple', new.title)), 'A') ||
|
||||
|
||||
-- subtitle, with priority B (always in English?)
|
||||
setweight(to_tsvector('english', COALESCE(new.subtitle, '')), 'B') ||
|
||||
|
||||
-- list of authors names and aliases (with priority C)
|
||||
(SELECT setweight(to_tsvector('simple', COALESCE(array_to_string(ARRAY_AGG(name_and_aliases), ' '), '')), 'C')
|
||||
FROM author_names
|
||||
) ||
|
||||
|
||||
--- last: series name, with lowest priority
|
||||
setweight(to_tsvector('english', COALESCE(new.series, '')), 'D')
|
||||
|
||||
INTO new.search_vector;
|
||||
RETURN new;
|
||||
"""
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class Work(OrderedCollectionPageMixin, Book):
|
||||
|
@ -320,6 +481,11 @@ class Edition(Book):
|
|||
serialize_reverse_fields = [("file_links", "fileLinks", "-created_date")]
|
||||
deserialize_reverse_fields = [("file_links", "fileLinks")]
|
||||
|
||||
@property
|
||||
def hyphenated_isbn13(self):
|
||||
"""generate the hyphenated version of the ISBN-13"""
|
||||
return hyphenator.hyphenate(self.isbn_13)
|
||||
|
||||
def get_rank(self):
|
||||
"""calculate how complete the data is on this edition"""
|
||||
rank = 0
|
||||
|
@ -343,7 +509,7 @@ class Edition(Book):
|
|||
# max rank is 9
|
||||
return rank
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
def save(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""set some fields on the edition object"""
|
||||
# calculate isbn 10/13
|
||||
if self.isbn_13 and self.isbn_13[:3] == "978" and not self.isbn_10:
|
||||
|
@ -353,9 +519,9 @@ class Edition(Book):
|
|||
|
||||
# normalize isbn format
|
||||
if self.isbn_10:
|
||||
self.isbn_10 = re.sub(r"[^0-9X]", "", self.isbn_10)
|
||||
self.isbn_10 = normalize_isbn(self.isbn_10)
|
||||
if self.isbn_13:
|
||||
self.isbn_13 = re.sub(r"[^0-9X]", "", self.isbn_13)
|
||||
self.isbn_13 = normalize_isbn(self.isbn_13)
|
||||
|
||||
# set rank
|
||||
self.edition_rank = self.get_rank()
|
||||
|
@ -367,19 +533,23 @@ class Edition(Book):
|
|||
|
||||
# Create sort title by removing articles from title
|
||||
if self.sort_title in [None, ""]:
|
||||
if self.sort_title in [None, ""]:
|
||||
articles = chain(
|
||||
*(
|
||||
LANGUAGE_ARTICLES.get(language, ())
|
||||
for language in tuple(self.languages)
|
||||
)
|
||||
)
|
||||
self.sort_title = re.sub(
|
||||
f'^{" |^".join(articles)} ', "", str(self.title).lower()
|
||||
)
|
||||
self.sort_title = self.guess_sort_title()
|
||||
|
||||
return super().save(*args, **kwargs)
|
||||
|
||||
@transaction.atomic
|
||||
def repair(self):
|
||||
"""If an edition is in a bad state (missing a work), let's fix that"""
|
||||
# made sure it actually NEEDS reapir
|
||||
if self.parent_work:
|
||||
return
|
||||
|
||||
new_work = Work.objects.create(title=self.title)
|
||||
new_work.authors.set(self.authors.all())
|
||||
|
||||
self.parent_work = new_work
|
||||
self.save(update_fields=["parent_work"], broadcast=False)
|
||||
|
||||
@classmethod
|
||||
def viewer_aware_objects(cls, viewer):
|
||||
"""annotate a book query with metadata related to the user"""
|
||||
|
@ -446,6 +616,11 @@ def isbn_13_to_10(isbn_13):
|
|||
return converted + str(checkdigit)
|
||||
|
||||
|
||||
def normalize_isbn(isbn):
|
||||
"""Remove unexpected characters from ISBN 10 or 13"""
|
||||
return re.sub(r"[^0-9X]", "", isbn)
|
||||
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
@receiver(models.signals.post_save, sender=Edition)
|
||||
def preview_image(instance, *args, **kwargs):
|
||||
|
|
334
bookwyrm/models/bookwyrm_export_job.py
Normal file
334
bookwyrm/models/bookwyrm_export_job.py
Normal file
|
@ -0,0 +1,334 @@
|
|||
"""Export user account to tar.gz file for import into another Bookwyrm instance"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from boto3.session import Session as BotoSession
|
||||
from s3_tar import S3Tar
|
||||
|
||||
from django.db.models import BooleanField, FileField, JSONField
|
||||
from django.db.models import Q
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.core.files.base import ContentFile
|
||||
from django.utils.module_loading import import_string
|
||||
|
||||
from bookwyrm import settings, storage_backends
|
||||
|
||||
from bookwyrm.models import AnnualGoal, ReadThrough, ShelfBook, ListItem
|
||||
from bookwyrm.models import Review, Comment, Quotation
|
||||
from bookwyrm.models import Edition
|
||||
from bookwyrm.models import UserFollows, User, UserBlocks
|
||||
from bookwyrm.models.job import ParentJob
|
||||
from bookwyrm.tasks import app, IMPORTS
|
||||
from bookwyrm.utils.tar import BookwyrmTarFile
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BookwyrmAwsSession(BotoSession):
|
||||
"""a boto session that always uses settings.AWS_S3_ENDPOINT_URL"""
|
||||
|
||||
def client(self, *args, **kwargs): # pylint: disable=arguments-differ
|
||||
kwargs["endpoint_url"] = settings.AWS_S3_ENDPOINT_URL
|
||||
return super().client("s3", *args, **kwargs)
|
||||
|
||||
|
||||
def select_exports_storage():
|
||||
"""callable to allow for dependency on runtime configuration"""
|
||||
cls = import_string(settings.EXPORTS_STORAGE)
|
||||
return cls()
|
||||
|
||||
|
||||
class BookwyrmExportJob(ParentJob):
|
||||
"""entry for a specific request to export a bookwyrm user"""
|
||||
|
||||
export_data = FileField(null=True, storage=select_exports_storage)
|
||||
export_json = JSONField(null=True, encoder=DjangoJSONEncoder)
|
||||
json_completed = BooleanField(default=False)
|
||||
|
||||
def start_job(self):
|
||||
"""schedule the first task"""
|
||||
|
||||
task = create_export_json_task.delay(job_id=self.id)
|
||||
self.task_id = task.id
|
||||
self.save(update_fields=["task_id"])
|
||||
|
||||
|
||||
@app.task(queue=IMPORTS)
|
||||
def create_export_json_task(job_id):
|
||||
"""create the JSON data for the export"""
|
||||
|
||||
job = BookwyrmExportJob.objects.get(id=job_id)
|
||||
|
||||
# don't start the job if it was stopped from the UI
|
||||
if job.complete:
|
||||
return
|
||||
|
||||
try:
|
||||
job.set_status("active")
|
||||
|
||||
# generate JSON structure
|
||||
job.export_json = export_json(job.user)
|
||||
job.save(update_fields=["export_json"])
|
||||
|
||||
# create archive in separate task
|
||||
create_archive_task.delay(job_id=job.id)
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
logger.exception(
|
||||
"create_export_json_task for %s failed with error: %s", job, err
|
||||
)
|
||||
job.set_status("failed")
|
||||
|
||||
|
||||
def archive_file_location(file, directory="") -> str:
|
||||
"""get the relative location of a file inside the archive"""
|
||||
return os.path.join(directory, file.name)
|
||||
|
||||
|
||||
def add_file_to_s3_tar(s3_tar: S3Tar, storage, file, directory=""):
|
||||
"""
|
||||
add file to S3Tar inside directory, keeping any directories under its
|
||||
storage location
|
||||
"""
|
||||
s3_tar.add_file(
|
||||
os.path.join(storage.location, file.name),
|
||||
folder=os.path.dirname(archive_file_location(file, directory=directory)),
|
||||
)
|
||||
|
||||
|
||||
@app.task(queue=IMPORTS)
|
||||
def create_archive_task(job_id):
|
||||
"""create the archive containing the JSON file and additional files"""
|
||||
|
||||
job = BookwyrmExportJob.objects.get(id=job_id)
|
||||
|
||||
# don't start the job if it was stopped from the UI
|
||||
if job.complete:
|
||||
return
|
||||
|
||||
try:
|
||||
export_task_id = str(job.task_id)
|
||||
archive_filename = f"{export_task_id}.tar.gz"
|
||||
export_json_bytes = DjangoJSONEncoder().encode(job.export_json).encode("utf-8")
|
||||
|
||||
user = job.user
|
||||
editions = get_books_for_user(user)
|
||||
|
||||
if settings.USE_S3:
|
||||
# Storage for writing temporary files
|
||||
exports_storage = storage_backends.ExportsS3Storage()
|
||||
|
||||
# Handle for creating the final archive
|
||||
s3_tar = S3Tar(
|
||||
exports_storage.bucket_name,
|
||||
os.path.join(exports_storage.location, archive_filename),
|
||||
session=BookwyrmAwsSession(),
|
||||
)
|
||||
|
||||
# Save JSON file to a temporary location
|
||||
export_json_tmp_file = os.path.join(export_task_id, "archive.json")
|
||||
exports_storage.save(
|
||||
export_json_tmp_file,
|
||||
ContentFile(export_json_bytes),
|
||||
)
|
||||
s3_tar.add_file(
|
||||
os.path.join(exports_storage.location, export_json_tmp_file)
|
||||
)
|
||||
|
||||
# Add images to TAR
|
||||
images_storage = storage_backends.ImagesStorage()
|
||||
|
||||
if user.avatar:
|
||||
add_file_to_s3_tar(s3_tar, images_storage, user.avatar)
|
||||
|
||||
for edition in editions:
|
||||
if edition.cover:
|
||||
add_file_to_s3_tar(
|
||||
s3_tar, images_storage, edition.cover, directory="images"
|
||||
)
|
||||
|
||||
# Create archive and store file name
|
||||
s3_tar.tar()
|
||||
job.export_data = archive_filename
|
||||
job.save(update_fields=["export_data"])
|
||||
|
||||
# Delete temporary files
|
||||
exports_storage.delete(export_json_tmp_file)
|
||||
|
||||
else:
|
||||
job.export_data = archive_filename
|
||||
with job.export_data.open("wb") as tar_file:
|
||||
with BookwyrmTarFile.open(mode="w:gz", fileobj=tar_file) as tar:
|
||||
# save json file
|
||||
tar.write_bytes(export_json_bytes)
|
||||
|
||||
# Add avatar image if present
|
||||
if user.avatar:
|
||||
tar.add_image(user.avatar)
|
||||
|
||||
for edition in editions:
|
||||
if edition.cover:
|
||||
tar.add_image(edition.cover, directory="images")
|
||||
job.save(update_fields=["export_data"])
|
||||
|
||||
job.set_status("completed")
|
||||
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
logger.exception("create_archive_task for %s failed with error: %s", job, err)
|
||||
job.set_status("failed")
|
||||
|
||||
|
||||
def export_json(user: User):
|
||||
"""create export JSON"""
|
||||
data = export_user(user) # in the root of the JSON structure
|
||||
data["settings"] = export_settings(user)
|
||||
data["goals"] = export_goals(user)
|
||||
data["books"] = export_books(user)
|
||||
data["saved_lists"] = export_saved_lists(user)
|
||||
data["follows"] = export_follows(user)
|
||||
data["blocks"] = export_blocks(user)
|
||||
return data
|
||||
|
||||
|
||||
def export_user(user: User):
|
||||
"""export user data"""
|
||||
data = user.to_activity()
|
||||
if user.avatar:
|
||||
data["icon"]["url"] = archive_file_location(user.avatar)
|
||||
else:
|
||||
data["icon"] = {}
|
||||
return data
|
||||
|
||||
|
||||
def export_settings(user: User):
|
||||
"""Additional settings - can't be serialized as AP"""
|
||||
vals = [
|
||||
"show_goal",
|
||||
"preferred_timezone",
|
||||
"default_post_privacy",
|
||||
"show_suggested_users",
|
||||
]
|
||||
return {k: getattr(user, k) for k in vals}
|
||||
|
||||
|
||||
def export_saved_lists(user: User):
|
||||
"""add user saved lists to export JSON"""
|
||||
return [l.remote_id for l in user.saved_lists.all()]
|
||||
|
||||
|
||||
def export_follows(user: User):
|
||||
"""add user follows to export JSON"""
|
||||
follows = UserFollows.objects.filter(user_subject=user).distinct()
|
||||
following = User.objects.filter(userfollows_user_object__in=follows).distinct()
|
||||
return [f.remote_id for f in following]
|
||||
|
||||
|
||||
def export_blocks(user: User):
|
||||
"""add user blocks to export JSON"""
|
||||
blocks = UserBlocks.objects.filter(user_subject=user).distinct()
|
||||
blocking = User.objects.filter(userblocks_user_object__in=blocks).distinct()
|
||||
return [b.remote_id for b in blocking]
|
||||
|
||||
|
||||
def export_goals(user: User):
|
||||
"""add user reading goals to export JSON"""
|
||||
reading_goals = AnnualGoal.objects.filter(user=user).distinct()
|
||||
return [
|
||||
{"goal": goal.goal, "year": goal.year, "privacy": goal.privacy}
|
||||
for goal in reading_goals
|
||||
]
|
||||
|
||||
|
||||
def export_books(user: User):
|
||||
"""add books to export JSON"""
|
||||
editions = get_books_for_user(user)
|
||||
return [export_book(user, edition) for edition in editions]
|
||||
|
||||
|
||||
def export_book(user: User, edition: Edition):
|
||||
"""add book to export JSON"""
|
||||
data = {}
|
||||
data["work"] = edition.parent_work.to_activity()
|
||||
data["edition"] = edition.to_activity()
|
||||
|
||||
if edition.cover:
|
||||
data["edition"]["cover"]["url"] = archive_file_location(
|
||||
edition.cover, directory="images"
|
||||
)
|
||||
|
||||
# authors
|
||||
data["authors"] = [author.to_activity() for author in edition.authors.all()]
|
||||
|
||||
# Shelves this book is on
|
||||
# Every ShelfItem is this book so we don't other serializing
|
||||
shelf_books = (
|
||||
ShelfBook.objects.select_related("shelf")
|
||||
.filter(user=user, book=edition)
|
||||
.distinct()
|
||||
)
|
||||
data["shelves"] = [shelfbook.shelf.to_activity() for shelfbook in shelf_books]
|
||||
|
||||
# Lists and ListItems
|
||||
# ListItems include "notes" and "approved" so we need them
|
||||
# even though we know it's this book
|
||||
list_items = ListItem.objects.filter(book=edition, user=user).distinct()
|
||||
|
||||
data["lists"] = []
|
||||
for item in list_items:
|
||||
list_info = item.book_list.to_activity()
|
||||
list_info[
|
||||
"privacy"
|
||||
] = item.book_list.privacy # this isn't serialized so we add it
|
||||
list_info["list_item"] = item.to_activity()
|
||||
data["lists"].append(list_info)
|
||||
|
||||
# Statuses
|
||||
# Can't use select_subclasses here because
|
||||
# we need to filter on the "book" value,
|
||||
# which is not available on an ordinary Status
|
||||
for status in ["comments", "quotations", "reviews"]:
|
||||
data[status] = []
|
||||
|
||||
comments = Comment.objects.filter(user=user, book=edition).all()
|
||||
for status in comments:
|
||||
obj = status.to_activity()
|
||||
obj["progress"] = status.progress
|
||||
obj["progress_mode"] = status.progress_mode
|
||||
data["comments"].append(obj)
|
||||
|
||||
quotes = Quotation.objects.filter(user=user, book=edition).all()
|
||||
for status in quotes:
|
||||
obj = status.to_activity()
|
||||
obj["position"] = status.position
|
||||
obj["endposition"] = status.endposition
|
||||
obj["position_mode"] = status.position_mode
|
||||
data["quotations"].append(obj)
|
||||
|
||||
reviews = Review.objects.filter(user=user, book=edition).all()
|
||||
data["reviews"] = [status.to_activity() for status in reviews]
|
||||
|
||||
# readthroughs can't be serialized to activity
|
||||
book_readthroughs = (
|
||||
ReadThrough.objects.filter(user=user, book=edition).distinct().values()
|
||||
)
|
||||
data["readthroughs"] = list(book_readthroughs)
|
||||
return data
|
||||
|
||||
|
||||
def get_books_for_user(user):
|
||||
"""Get all the books and editions related to a user"""
|
||||
|
||||
editions = (
|
||||
Edition.objects.select_related("parent_work")
|
||||
.filter(
|
||||
Q(shelves__user=user)
|
||||
| Q(readthrough__user=user)
|
||||
| Q(review__user=user)
|
||||
| Q(list__user=user)
|
||||
| Q(comment__user=user)
|
||||
| Q(quotation__user=user)
|
||||
)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
return editions
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue